pax_global_header00006660000000000000000000000064131501360140014504gustar00rootroot0000000000000052 comment=4f6f2e597c32945d0fef99d037604933c32db936 mlpack-2.2.5/000077500000000000000000000000001315013601400127615ustar00rootroot00000000000000mlpack-2.2.5/.appveyor.yml000066400000000000000000000075571315013601400154450ustar00rootroot00000000000000clone_depth: 10 environment: matrix: - APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2015 VSVER: Visual Studio 14 2015 Win64 - APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2017 VSVER: Visual Studio 15 2017 Win64 configuration: Release os: Visual Studio 2015 install: - ps: nuget install boost -o "${env:APPVEYOR_BUILD_FOLDER}" -Version 1.60.0 - ps: nuget install boost_unit_test_framework-vc140 -o "${env:APPVEYOR_BUILD_FOLDER}" -Version 1.60.0 - ps: nuget install boost_program_options-vc140 -o "${env:APPVEYOR_BUILD_FOLDER}" -Version 1.60.0 - ps: nuget install boost_random-vc140 -o "${env:APPVEYOR_BUILD_FOLDER}" -Version 1.60.0 - ps: nuget install boost_serialization-vc140 -o "${env:APPVEYOR_BUILD_FOLDER}" -Version 1.60.0 - ps: nuget install boost_math_c99-vc140 -o "${env:APPVEYOR_BUILD_FOLDER}" -Version 1.60.0 - ps: nuget install OpenBLAS -o "${env:APPVEYOR_BUILD_FOLDER}" build_script: - mkdir boost_libs - ps: cp C:\projects\mlpack\boost_program_options-vc140.1.60.0.0\lib\native\address-model-64\lib\*.* C:\projects\mlpack\boost_libs\ - ps: cp C:\projects\mlpack\boost_math_c99-vc140.1.60.0.0\lib\native\address-model-64\lib\*.* C:\projects\mlpack\boost_libs\ - ps: cp C:\projects\mlpack\boost_random-vc140.1.60.0.0\lib\native\address-model-64\lib\*.* C:\projects\mlpack\boost_libs\ - ps: cp C:\projects\mlpack\boost_serialization-vc140.1.60.0.0\lib\native\address-model-64\lib\*.* C:\projects\mlpack\boost_libs\ - ps: cp C:\projects\mlpack\boost_unit_test_framework-vc140.1.60.0.0\lib\native\address-model-64\lib\*.* C:\projects\mlpack\boost_libs\ - if not exist armadillo.tar.xz appveyor DownloadFile "http://sourceforge.net/projects/arma/files/armadillo-7.800.2.tar.xz" -FileName armadillo.tar.xz - 7z x armadillo.tar.xz -so | 7z x -si -ttar > nul - cd armadillo-7.800.2 && mkdir build && cd build - cmake -G "Visual Studio 14 2015 Win64" -DBLAS_LIBRARY:FILEPATH="%APPVEYOR_BUILD_FOLDER%/OpenBLAS.0.2.14.1/lib/native/lib/x64/libopenblas.dll.a" -DLAPACK_LIBRARY:FILEPATH="%APPVEYOR_BUILD_FOLDER%/OpenBLAS.0.2.14.1/lib/native/lib/x64/libopenblas.dll.a" -DCMAKE_PREFIX:FILEPATH="%APPVEYOR_BUILD_FOLDER%/armadillo" -DBUILD_SHARED_LIBS=OFF .. - '"C:\Program Files (x86)\MSBuild\14.0\Bin\MSBuild.exe" "C:\projects\mlpack\armadillo-7.800.2\build\armadillo.sln" /m /verbosity:quiet /p:Configuration=Release;Platform=x64' - cd C:\projects\mlpack && mkdir build && cd build - cmake -G "Visual Studio 14 2015 Win64" -DBLAS_LIBRARY:FILEPATH="%APPVEYOR_BUILD_FOLDER%/OpenBLAS.0.2.14.1/lib/native/lib/x64/libopenblas.dll.a" -DLAPACK_LIBRARY:FILEPATH="%APPVEYOR_BUILD_FOLDER%/OpenBLAS.0.2.14.1/lib/native/lib/x64/libopenblas.dll.a" -DARMADILLO_INCLUDE_DIR="C:/projects/mlpack/armadillo-7.800.2/include" -DARMADILLO_LIBRARY:FILEPATH="C:\projects\mlpack\armadillo-7.800.2\build\Debug\armadillo.lib" -DBOOST_INCLUDEDIR:PATH="C:\projects\mlpack\boost.1.60.0.0\lib\native\include" -DBOOST_LIBRARYDIR:PATH="C:\projects\mlpack\boost_libs" -DDEBUG=ON -DPROFILE=ON .. - '"C:\Program Files (x86)\MSBuild\14.0\Bin\MSBuild.exe" "C:\projects\mlpack\build\mlpack.sln" /m /verbosity:minimal /nologo /p:BuildInParallel=true /p:Configuration=Release;Platform=x64' - 7z a mlpack-windows-no-libs.zip "%APPVEYOR_BUILD_FOLDER%\build\Release\*.exe" - 7z a mlpack-windows.zip "%APPVEYOR_BUILD_FOLDER%\build\Release\*.*" "%APPVEYOR_BUILD_FOLDER%/OpenBLAS.0.2.14.1/lib/native/lib/x64/*.*" artifacts: - path: '**\*.zip' notifications: - provider: Email to: - mlpack-git@lists.mlpack.org on_build_success: true on_build_failure: true on_build_status_changed: true cache: - packages -> **\packages.config - armadillo.tar.xz -> appveyor.yaml # All plans have maximum build job execution time of 60 minutes. But right, now # the machine takes 30 minutes to build the code and at least 50 minutes to run # all tests. # test_script: - # '"C:\projects\mlpack\build\Release\mlpack_test.exe" -p' mlpack-2.2.5/.gitignore000066400000000000000000000001201315013601400147420ustar00rootroot00000000000000build* src/mlpack/core/util/gitversion.hpp src/mlpack/core/util/arma_config.hpp mlpack-2.2.5/.travis.yml000066400000000000000000000013751315013601400151000ustar00rootroot00000000000000language: cpp before_install: - sudo add-apt-repository -y ppa:ubuntu-toolchain-r/test - sudo apt-add-repository -y ppa:comp-phys/stable - sudo add-apt-repository -y ppa:boost-latest/ppa - sudo add-apt-repository -y "deb http://us.archive.ubuntu.com/ubuntu trusty main universe" - sudo apt-get update -qq - sudo apt-get install -qq g++-4.8 libarmadillo-dev libarmadillo4 libboost1.54-all-dev - export CC=gcc-4.8 - export CXX=g++-4.8 - printenv - sudo cp .travis/config.hpp /usr/include/armadillo_bits/config.hpp install: - mkdir build && cd build && cmake -DDEBUG=OFF -DPROFILE=OFF .. && make -j3 script: - travis_wait 30 ./bin/mlpack_test -p notifications: email: - mlpack-git@lists.mlpack.org irc: - "chat.freenode.net#mlpack" mlpack-2.2.5/.travis/000077500000000000000000000000001315013601400143475ustar00rootroot00000000000000mlpack-2.2.5/.travis/config.hpp000066400000000000000000000125311315013601400163270ustar00rootroot00000000000000// Copyright (C) 2008-2012 NICTA (www.nicta.com.au) // Copyright (C) 2008-2012 Conrad Sanderson // // This file is part of the Armadillo C++ library. // It is provided without any warranty of fitness // for any purpose. You can redistribute this file // and/or modify it under the terms of the GNU // Lesser General Public License (LGPL) as published // by the Free Software Foundation, either version 3 // of the License or (at your option) any later version. // (see http://www.opensource.org/licenses for more info) #if !defined(ARMA_USE_LAPACK) #define ARMA_USE_LAPACK //// Uncomment the above line if you have LAPACK or a high-speed replacement for LAPACK, //// such as Intel's MKL, AMD's ACML, or the Accelerate framework. //// LAPACK is required for matrix decompositions (eg. SVD) and matrix inverse. #endif #if !defined(ARMA_USE_BLAS) #define ARMA_USE_BLAS //// Uncomment the above line if you have BLAS or a high-speed replacement for BLAS, //// such as GotoBLAS, Intel's MKL, AMD's ACML, or the Accelerate framework. //// BLAS is used for matrix multiplication. //// Without BLAS, matrix multiplication will still work, but might be slower. #endif #define ARMA_USE_WRAPPER //// Comment out the above line if you prefer to directly link with LAPACK and/or BLAS (eg. -llapack -lblas) //// instead of linking indirectly with LAPACK and/or BLAS via Armadillo's run-time wrapper library. // #define ARMA_BLAS_CAPITALS //// Uncomment the above line if your BLAS and LAPACK libraries have capitalised function names (eg. ACML on 64-bit Windows) #define ARMA_BLAS_UNDERSCORE //// Uncomment the above line if your BLAS and LAPACK libraries have function names with a trailing underscore. //// Conversely, comment it out if the function names don't have a trailing underscore. // #define ARMA_BLAS_LONG //// Uncomment the above line if your BLAS and LAPACK libraries use "long" instead of "int" // #define ARMA_BLAS_LONG_LONG //// Uncomment the above line if your BLAS and LAPACK libraries use "long long" instead of "int" // #define ARMA_USE_TBB_ALLOC //// Uncomment the above line if you want to use Intel TBB scalable_malloc() and scalable_free() instead of standard new[] and delete[] // #define ARMA_USE_MKL_ALLOC //// Uncomment the above line if you want to use Intel MKL mkl_malloc() and mkl_free() instead of standard new[] and delete[] /* #undef ARMA_USE_ATLAS */ #define ARMA_ATLAS_INCLUDE_DIR / //// If you're using ATLAS and the compiler can't find cblas.h and/or clapack.h //// uncomment the above define and specify the appropriate include directory. //// Make sure the directory has a trailing / #define ARMA_64BIT_WORD //// Uncomment the above line if you require matrices/vectors capable of holding more than 4 billion elements. //// Your machine and compiler must have support for 64 bit integers (eg. via "long" or "long long") #if !defined(ARMA_USE_CXX11) #define ARMA_USE_CXX11 //// Uncomment the above line if you have a C++ compiler that supports the C++11 standard //// This will enable additional features, such as use of initialiser lists #endif #if !defined(ARMA_USE_HDF5) /* #undef ARMA_USE_HDF5 */ //// Uncomment the above line if you want the ability to save and load matrices stored in the HDF5 format; //// the hdf5.h header file must be available on your system and you will need to link with the hdf5 library (eg. -lhdf5) #endif #if !defined(ARMA_MAT_PREALLOC) #define ARMA_MAT_PREALLOC 16 #endif //// This is the number of preallocated elements used by matrices and vectors; //// it must be an integer that is at least 1. //// If you mainly use lots of very small vectors (eg. <= 4 elements), //// change the number to the size of your vectors. #if !defined(ARMA_SPMAT_CHUNKSIZE) #define ARMA_SPMAT_CHUNKSIZE 256 #endif //// This is the minimum increase in the amount of memory (in terms of elements) allocated by a sparse matrix; //// it must be an integer that is at least 1. //// The minimum recommended size is 16. // #define ARMA_NO_DEBUG //// Uncomment the above line if you want to disable all run-time checks. //// This will result in faster code, but you first need to make sure that your code runs correctly! //// We strongly recommend to have the run-time checks enabled during development, //// as this greatly aids in finding mistakes in your code, and hence speeds up development. //// We recommend that run-time checks be disabled _only_ for the shipped version of your program. // #define ARMA_EXTRA_DEBUG //// Uncomment the above line if you want to see the function traces of how Armadillo evaluates expressions. //// This is mainly useful for debugging of the library. // #define ARMA_USE_BOOST // #define ARMA_USE_BOOST_DATE #if !defined(ARMA_DEFAULT_OSTREAM) #define ARMA_DEFAULT_OSTREAM std::cout #endif #define ARMA_PRINT_LOGIC_ERRORS #define ARMA_PRINT_RUNTIME_ERRORS //#define ARMA_PRINT_HDF5_ERRORS #define ARMA_HAVE_STD_ISFINITE #define ARMA_HAVE_STD_ISINF #define ARMA_HAVE_STD_ISNAN #define ARMA_HAVE_STD_SNPRINTF #define ARMA_HAVE_LOG1P #define ARMA_HAVE_GETTIMEOFDAY #if defined(ARMA_DONT_USE_LAPACK) #undef ARMA_USE_LAPACK #endif #if defined(ARMA_DONT_USE_BLAS) #undef ARMA_USE_BLAS #endif #if defined(ARMA_DONT_USE_ATLAS) #undef ARMA_USE_ATLAS #undef ARMA_ATLAS_INCLUDE_DIR #endif #if defined(ARMA_DONT_PRINT_LOGIC_ERRORS) #undef ARMA_PRINT_LOGIC_ERRORS #endif #if defined(ARMA_DONT_PRINT_RUNTIME_ERRORS) #undef ARMA_PRINT_RUNTIME_ERRORS #endif mlpack-2.2.5/CMake/000077500000000000000000000000001315013601400137415ustar00rootroot00000000000000mlpack-2.2.5/CMake/ARMA_FindACML.cmake000066400000000000000000000021331315013601400170570ustar00rootroot00000000000000# - Find AMD's ACML library (no includes) which provides optimised BLAS and LAPACK functions # This module defines # ACML_LIBRARIES, the libraries needed to use ACML. # ACML_FOUND, If false, do not try to use ACML. # also defined, but not for general use are # ACML_LIBRARY, where to find the ACML library. set(ACML_NAMES ${ACML_NAMES} acml) find_library(ACML_LIBRARY NAMES ${ACML_NAMES} PATHS /usr/lib64 /usr/lib /usr/*/lib64 /usr/*/lib /usr/*/gfortran64/lib/ /usr/*/gfortran32/lib/ /usr/local/lib64 /usr/local/lib /opt/lib64 /opt/lib /opt/*/lib64 /opt/*/lib /opt/*/gfortran64/lib/ /opt/*/gfortran32/lib/ ) if (ACML_LIBRARY) set(ACML_LIBRARIES ${ACML_LIBRARY}) set(ACML_FOUND "YES") else () set(ACML_FOUND "NO") endif () if (ACML_FOUND) if (NOT ACML_FIND_QUIETLY) message(STATUS "Found the ACML library: ${ACML_LIBRARIES}") endif () else () if (ACML_FIND_REQUIRED) message(FATAL_ERROR "Could not find the ACML library") endif () endif () # Deprecated declarations. get_filename_component (NATIVE_ACML_LIB_PATH ${ACML_LIBRARY} PATH) mark_as_advanced( ACML_LIBRARY ) mlpack-2.2.5/CMake/ARMA_FindACMLMP.cmake000066400000000000000000000022551315013601400173210ustar00rootroot00000000000000# - Find AMD's ACMLMP library (no includes) which provides optimised and parallelised BLAS and LAPACK functions # This module defines # ACMLMP_LIBRARIES, the libraries needed to use ACMLMP. # ACMLMP_FOUND, If false, do not try to use ACMLMP. # also defined, but not for general use are # ACMLMP_LIBRARY, where to find the ACMLMP library. set(ACMLMP_NAMES ${ACMLMP_NAMES} acml_mp) find_library(ACMLMP_LIBRARY NAMES ${ACMLMP_NAMES} PATHS /usr/lib64 /usr/lib /usr/*/lib64 /usr/*/lib /usr/*/gfortran64_mp/lib/ /usr/*/gfortran32_mp/lib/ /usr/local/lib64 /usr/local/lib /opt/lib64 /opt/lib /opt/*/lib64 /opt/*/lib /opt/*/gfortran64_mp/lib/ /opt/*/gfortran32_mp/lib/ ) if (ACMLMP_LIBRARY) set(ACMLMP_LIBRARIES ${ACMLMP_LIBRARY}) set(ACMLMP_FOUND "YES") else () set(ACMLMP_FOUND "NO") endif () if (ACMLMP_FOUND) if (NOT ACMLMP_FIND_QUIETLY) message(STATUS "Found the ACMLMP library: ${ACMLMP_LIBRARIES}") endif () else () if (ACMLMP_FIND_REQUIRED) message(FATAL_ERROR "Could not find the ACMLMP library") endif () endif () # Deprecated declarations. get_filename_component (NATIVE_ACMLMP_LIB_PATH ${ACMLMP_LIBRARY} PATH) mark_as_advanced( ACMLMP_LIBRARY ) mlpack-2.2.5/CMake/ARMA_FindARPACK.cmake000066400000000000000000000015031315013601400173040ustar00rootroot00000000000000# - Try to find ARPACK # Once done this will define # # ARPACK_FOUND - system has ARPACK # ARPACK_LIBRARY - Link this to use ARPACK find_library(ARPACK_LIBRARY NAMES arpack PATHS /usr/lib64 /usr/lib /usr/local/lib64 /usr/local/lib ) if (ARPACK_LIBRARY) set(ARPACK_FOUND YES) else () # Search for PARPACK. find_library(ARPACK_LIBRARY NAMES parpack PATHS /usr/lib64 /usr/lib /usr/local/lib64 /usr/local/lib ) if (ARPACK_LIBRARY) set(ARPACK_FOUND YES) else () set(ARPACK_FOUND NO) endif () endif () if (ARPACK_FOUND) if (NOT ARPACK_FIND_QUIETLY) message(STATUS "Found an ARPACK library: ${ARPACK_LIBRARY}") endif () else () if (ARPACK_FIND_REQUIRED) message(FATAL_ERROR "Could not find an ARPACK library") endif () endif () mlpack-2.2.5/CMake/ARMA_FindBLAS.cmake000066400000000000000000000021161315013601400170650ustar00rootroot00000000000000# - Find a BLAS library (no includes) # This module defines # BLAS_LIBRARIES, the libraries needed to use BLAS. # BLAS_FOUND, If false, do not try to use BLAS. # also defined, but not for general use are # BLAS_LIBRARY, where to find the BLAS library. set(BLAS_NAMES ${BLAS_NAMES} blas) # Find the ATLAS version preferentially. find_library(BLAS_LIBRARY NAMES ${BLAS_NAMES} PATHS /usr/lib64/atlas /usr/lib/atlas /usr/local/lib64/atlas /usr/local/lib/atlas NO_DEFAULT_PATH) find_library(BLAS_LIBRARY NAMES ${BLAS_NAMES} PATHS /usr/lib64/atlas /usr/lib/atlas /usr/lib64 /usr/lib /usr/local/lib64 /usr/local/lib ) if (BLAS_LIBRARY) set(BLAS_LIBRARIES ${BLAS_LIBRARY}) set(BLAS_FOUND "YES") else () set(BLAS_FOUND "NO") endif () if (BLAS_FOUND) if (NOT BLAS_FIND_QUIETLY) message(STATUS "Found BLAS: ${BLAS_LIBRARIES}") endif () else () if (BLAS_FIND_REQUIRED) message(FATAL_ERROR "Could not find BLAS") endif () endif () # Deprecated declarations. get_filename_component (NATIVE_BLAS_LIB_PATH ${BLAS_LIBRARY} PATH) mark_as_advanced( BLAS_LIBRARY ) mlpack-2.2.5/CMake/ARMA_FindCBLAS.cmake000066400000000000000000000023551315013601400171750ustar00rootroot00000000000000# - Find CBLAS (includes and library) # This module defines # CBLAS_INCLUDE_DIR # CBLAS_LIBRARIES # CBLAS_FOUND # also defined, but not for general use are # CBLAS_LIBRARY, where to find the library. find_path(CBLAS_INCLUDE_DIR cblas.h /usr/include/atlas/ /usr/local/include/atlas/ /usr/include/ /usr/local/include/ ) set(CBLAS_NAMES ${CBLAS_NAMES} cblas) find_library(CBLAS_LIBRARY NAMES ${CBLAS_NAMES} PATHS /usr/lib64/atlas-sse3 /usr/lib64/atlas /usr/lib64 /usr/local/lib64/atlas /usr/local/lib64 /usr/lib/atlas-sse3 /usr/lib/atlas-sse2 /usr/lib/atlas-sse /usr/lib/atlas-3dnow /usr/lib/atlas /usr/lib /usr/local/lib/atlas /usr/local/lib ) if (CBLAS_LIBRARY AND CBLAS_INCLUDE_DIR) set(CBLAS_LIBRARIES ${CBLAS_LIBRARY}) set(CBLAS_FOUND "YES") else () set(CBLAS_FOUND "NO") endif () if (CBLAS_FOUND) if (NOT CBLAS_FIND_QUIETLY) message(STATUS "Found a CBLAS library: ${CBLAS_LIBRARIES}") endif () else () if (CBLAS_FIND_REQUIRED) message(FATAL_ERROR "Could not find a CBLAS library") endif () endif () # Deprecated declarations. set (NATIVE_CBLAS_INCLUDE_PATH ${CBLAS_INCLUDE_DIR} ) get_filename_component (NATIVE_CBLAS_LIB_PATH ${CBLAS_LIBRARY} PATH) mark_as_advanced( CBLAS_LIBRARY CBLAS_INCLUDE_DIR ) mlpack-2.2.5/CMake/ARMA_FindCLAPACK.cmake000066400000000000000000000025471315013601400174120ustar00rootroot00000000000000# - Find a version of CLAPACK (includes and library) # This module defines # CLAPACK_INCLUDE_DIR # CLAPACK_LIBRARIES # CLAPACK_FOUND # also defined, but not for general use are # CLAPACK_LIBRARY, where to find the library. find_path(CLAPACK_INCLUDE_DIR clapack.h /usr/include/atlas/ /usr/local/include/atlas/ /usr/include/ /usr/local/include/ ) set(CLAPACK_NAMES ${CLAPACK_NAMES} lapack_atlas) set(CLAPACK_NAMES ${CLAPACK_NAMES} clapack) find_library(CLAPACK_LIBRARY NAMES ${CLAPACK_NAMES} PATHS /usr/lib64/atlas-sse3 /usr/lib64/atlas /usr/lib64 /usr/local/lib64/atlas /usr/local/lib64 /usr/lib/atlas-sse3 /usr/lib/atlas-sse2 /usr/lib/atlas-sse /usr/lib/atlas-3dnow /usr/lib/atlas /usr/lib /usr/local/lib/atlas /usr/local/lib ) if (CLAPACK_LIBRARY AND CLAPACK_INCLUDE_DIR) set(CLAPACK_LIBRARIES ${CLAPACK_LIBRARY}) set(CLAPACK_FOUND "YES") else () set(CLAPACK_FOUND "NO") endif () if (CLAPACK_FOUND) if (NOT CLAPACK_FIND_QUIETLY) message(STATUS "Found a CLAPACK library: ${CLAPACK_LIBRARIES}") endif () else () if (CLAPACK_FIND_REQUIRED) message(FATAL_ERROR "Could not find a CLAPACK library") endif () endif () # Deprecated declarations. set (NATIVE_CLAPACK_INCLUDE_PATH ${CLAPACK_INCLUDE_DIR} ) get_filename_component (NATIVE_CLAPACK_LIB_PATH ${CLAPACK_LIBRARY} PATH) mark_as_advanced( CLAPACK_LIBRARY CLAPACK_INCLUDE_DIR ) mlpack-2.2.5/CMake/ARMA_FindLAPACK.cmake000066400000000000000000000022131315013601400172750ustar00rootroot00000000000000# - Find a LAPACK library (no includes) # This module defines # LAPACK_LIBRARIES, the libraries needed to use LAPACK. # LAPACK_FOUND, If false, do not try to use LAPACK. # also defined, but not for general use are # LAPACK_LIBRARY, where to find the LAPACK library. set(LAPACK_NAMES ${LAPACK_NAMES} lapack) # Check ATLAS paths preferentially, using this necessary hack (I love CMake). find_library(LAPACK_LIBRARY NAMES ${LAPACK_NAMES} PATHS /usr/lib64/atlas /usr/lib/atlas /usr/local/lib64/atlas /usr/local/lib/atlas NO_DEFAULT_PATH) find_library(LAPACK_LIBRARY NAMES ${LAPACK_NAMES} PATHS /usr/lib64 /usr/lib /usr/local/lib64 /usr/local/lib ) if (LAPACK_LIBRARY) set(LAPACK_LIBRARIES ${LAPACK_LIBRARY}) set(LAPACK_FOUND "YES") else () set(LAPACK_FOUND "NO") endif () if (LAPACK_FOUND) if (NOT LAPACK_FIND_QUIETLY) message(STATUS "Found LAPACK: ${LAPACK_LIBRARIES}") endif () else () if (LAPACK_FIND_REQUIRED) message(FATAL_ERROR "Could not find LAPACK") endif () endif () # Deprecated declarations. get_filename_component (NATIVE_LAPACK_LIB_PATH ${LAPACK_LIBRARY} PATH) mark_as_advanced( LAPACK_LIBRARY ) mlpack-2.2.5/CMake/ARMA_FindMKL.cmake000066400000000000000000000027461315013601400170000ustar00rootroot00000000000000# - Find the MKL libraries (no includes) # This module defines # MKL_LIBRARIES, the libraries needed to use Intel's implementation of BLAS & LAPACK. # MKL_FOUND, If false, do not try to use MKL. set(MKL_NAMES ${MKL_NAMES} mkl_lapack) set(MKL_NAMES ${MKL_NAMES} mkl_intel_thread) set(MKL_NAMES ${MKL_NAMES} mkl_core) set(MKL_NAMES ${MKL_NAMES} guide) set(MKL_NAMES ${MKL_NAMES} mkl) set(MKL_NAMES ${MKL_NAMES} iomp5) #set(MKL_NAMES ${MKL_NAMES} pthread) if(CMAKE_SIZEOF_VOID_P EQUAL 8) set(MKL_NAMES ${MKL_NAMES} mkl_intel_lp64) else() set(MKL_NAMES ${MKL_NAMES} mkl_intel) endif() foreach (MKL_NAME ${MKL_NAMES}) find_library(${MKL_NAME}_LIBRARY NAMES ${MKL_NAME} PATHS /usr/lib64 /usr/lib /usr/local/lib64 /usr/local/lib /opt/intel/lib/intel64 /opt/intel/lib/ia32 /opt/intel/mkl/lib/lib64 /opt/intel/mkl/lib/intel64 /opt/intel/mkl/lib/ia32 /opt/intel/mkl/lib /opt/intel/*/mkl/lib/intel64 /opt/intel/*/mkl/lib/ia32/ /opt/mkl/*/lib/em64t /opt/mkl/*/lib/32 /opt/intel/mkl/*/lib/em64t /opt/intel/mkl/*/lib/32 ) set(TMP_LIBRARY ${${MKL_NAME}_LIBRARY}) if(TMP_LIBRARY) set(MKL_LIBRARIES ${MKL_LIBRARIES} ${TMP_LIBRARY}) endif() endforeach() if (MKL_LIBRARIES) set(MKL_FOUND "YES") else () set(MKL_FOUND "NO") endif () if (MKL_FOUND) if (NOT MKL_FIND_QUIETLY) message(STATUS "Found MKL libraries: ${MKL_LIBRARIES}") endif () else () if (MKL_FIND_REQUIRED) message(FATAL_ERROR "Could not find MKL libraries") endif () endif () # mark_as_advanced(MKL_LIBRARY) mlpack-2.2.5/CMake/ARMA_FindOpenBLAS.cmake000066400000000000000000000017721315013601400177160ustar00rootroot00000000000000# - Find the OpenBLAS library (no includes) # This module defines # OpenBLAS_LIBRARIES, the libraries needed to use OpenBLAS. # OpenBLAS_FOUND, If false, do not try to use OpenBLAS. # also defined, but not for general use are # OpenBLAS_LIBRARY, where to find the OpenBLAS library. set(OpenBLAS_NAMES ${OpenBLAS_NAMES} openblas) find_library(OpenBLAS_LIBRARY NAMES ${OpenBLAS_NAMES} PATHS /lib64 /lib /usr/lib64 /usr/lib /usr/local/lib64 /usr/local/lib ) if (OpenBLAS_LIBRARY) set(OpenBLAS_LIBRARIES ${OpenBLAS_LIBRARY}) set(OpenBLAS_FOUND "YES") else () set(OpenBLAS_FOUND "NO") endif () if (OpenBLAS_FOUND) if (NOT OpenBLAS_FIND_QUIETLY) message(STATUS "Found the OpenBLAS library: ${OpenBLAS_LIBRARIES}") endif () else () if (OpenBLAS_FIND_REQUIRED) message(FATAL_ERROR "Could not find the OpenBLAS library") endif () endif () # Deprecated declarations. get_filename_component (NATIVE_OpenBLAS_LIB_PATH ${OpenBLAS_LIBRARY} PATH) mark_as_advanced( OpenBLAS_LIBRARY ) mlpack-2.2.5/CMake/CXX11.cmake000066400000000000000000000042261315013601400155530ustar00rootroot00000000000000# This is cloned from # https://github.com/nitroshare/CXX11-CMake-Macros # until C++11 support finally hits CMake stable (should be 3.1, I think). # Copyright (c) 2013 Nathan Osman # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. # Determines whether or not the compiler supports C++11 macro(check_for_cxx11_compiler _VAR) message(STATUS "Checking for C++11 compiler") set(${_VAR}) if((MSVC AND (MSVC14)) OR (CMAKE_COMPILER_IS_GNUCXX AND NOT ${CMAKE_CXX_COMPILER_VERSION} VERSION_LESS 4.6) OR (CMAKE_CXX_COMPILER_ID STREQUAL "Clang" AND NOT ${CMAKE_CXX_COMPILER_VERSION} VERSION_LESS 3.1) OR (CMAKE_CXX_COMPILER_ID STREQUAL "Intel" AND NOT ${CMAKE_CXX_COMPILER_VERSION} VERSION_LESS 12.0)) set(${_VAR} 1) message(STATUS "Checking for C++11 compiler - available") else() message(STATUS "Checking for C++11 compiler - unavailable") endif() endmacro() # Sets the appropriate flag to enable C++11 support macro(enable_cxx11) if(CMAKE_COMPILER_IS_GNUCXX OR CMAKE_CXX_COMPILER_ID STREQUAL "Clang" OR CMAKE_CXX_COMPILER_ID STREQUAL "Intel") set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++0x") endif() endmacro() mlpack-2.2.5/CMake/CreateArmaConfigInfo.cmake000066400000000000000000000060241315013601400207130ustar00rootroot00000000000000# Using the CMake tools to create the file arma_config.hpp, which contains # information on the Armadillo configuration when mlpack was compiled. This # assumes ${ARMADILLO_INCLUDE_DIR} is set. In addition, we must be careful to # avoid overwriting arma_config.hpp with the exact same information, because # this may trigger a new complete rebuild, which is undesired. if(EXISTS "${CMAKE_CURRENT_SOURCE_DIR}/src/mlpack/core/util/arma_config.hpp") file(READ "${CMAKE_CURRENT_SOURCE_DIR}/src/mlpack/core/util/arma_config.hpp" OLD_FILE_CONTENTS) else() set(OLD_FILE_CONTENTS "") endif() # If we are using Armadillo 5+, ARMA_64BIT_WORD is implicitly enabled. set(ARMA_HAS_64BIT_WORD 0) # This may be unnecessary. if(NOT (${ARMADILLO_VERSION_MAJOR} LESS 5)) # ARMA_64BIT_WORD is only set if we are on a 64-bit system. if (CMAKE_SIZEOF_VOID_P EQUAL 8) set(ARMA_HAS_64BIT_WORD 1) else () set(ARMA_HAS_64BIT_WORD 0) endif () else() # Otherwise, we'll need to open the config.hpp we are using and inspect the # setting of ARMA_64BIT_WORD. if(EXISTS "${ARMADILLO_INCLUDE_DIR}/armadillo_bits/config.hpp") file(READ "${ARMADILLO_INCLUDE_DIR}/armadillo_bits/config.hpp" ARMA_CONFIG) # Extract ARMA_64BIT_WORD. string(REGEX MATCH "[\r\n][ ]*#define ARMA_64BIT_WORD" ARMA_HAS_64BIT_WORD_PRE "${ARMA_CONFIG}") string(LENGTH "${ARMA_HAS_64BIT_WORD_PRE}" ARMA_HAS_64BIT_WORD) else() # Assumes ARMA_64BIT_WORD is not set. message(WARNING "Armadillo configuration file (${ARMADILLO_INCLUDE_DIR}/armadillo_bits/config.hpp) does not exist!") endif() endif() # Now use the value we gathered to generate the new file contents. if(ARMA_HAS_64BIT_WORD EQUAL 0) set(ARMA_64BIT_WORD_DEFINE "#define MLPACK_ARMA_NO64BIT_WORD") else() set(ARMA_64BIT_WORD_DEFINE "#define MLPACK_ARMA_64BIT_WORD") endif() set(NEW_FILE_CONTENTS "/** * @file arma_config.hpp * * This is an autogenerated file which contains the configuration of Armadillo * at the time mlpack was built. If you modify anything in here by hand, your * warranty is void, your house may catch fire, and we're not going to call the * police when your program segfaults so hard that robbers come to your house * and take everything you own. If you do decide, against better judgment, to * modify anything at all in this file, and you are reporting a bug, be * absolutely certain to mention that you've done something stupid in this file * first. * * In short: don't touch this file. */ #ifndef MLPACK_CORE_UTIL_ARMA_CONFIG_HPP #define MLPACK_CORE_UTIL_ARMA_CONFIG_HPP ${ARMA_64BIT_WORD_DEFINE} #endif ") # Did the contents of the file change at all? If not, don't write it. if(NOT "${OLD_FILE_CONTENTS}" STREQUAL "${NEW_FILE_CONTENTS}") # We have a reason to write the new file. message(STATUS "Regenerating arma_config.hpp.") file(REMOVE "${CMAKE_CURRENT_SOURCE_DIR}/src/mlpack/core/util/arma_config.hpp") file(WRITE "${CMAKE_CURRENT_SOURCE_DIR}/src/mlpack/core/util/arma_config.hpp" "${NEW_FILE_CONTENTS}") endif() mlpack-2.2.5/CMake/CreateGitVersionHeader.cmake000066400000000000000000000022441315013601400212730ustar00rootroot00000000000000# Using the CMake tools to create the gitversion.hpp, which just contains # the implementation of GetVersion() assuming that we are working inside of a # git repository. find_package(Git) execute_process(COMMAND ${GIT_EXECUTABLE} rev-parse --short HEAD WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR} OUTPUT_VARIABLE NEW_GIT_REVISION OUTPUT_STRIP_TRAILING_WHITESPACE) # Get the current version, if it exists. if(EXISTS ${CMAKE_CURRENT_SOURCE_DIR}/src/mlpack/core/util/gitversion.hpp) file(READ ${CMAKE_CURRENT_SOURCE_DIR}/src/mlpack/core/util/gitversion.hpp _OLD_GITVERSION_CONTENTS) string(REGEX REPLACE ".*return \"mlpack git-([0-9a-f]+)\".*" "\\1" OLD_GIT_REVISION ${_OLD_GITVERSION_CONTENTS}) else() set(OLD_GIT_REVISION "notfound") endif() if("${OLD_GIT_REVISION}" STREQUAL "${NEW_GIT_REVISION}") message(STATUS "gitversion.hpp is already up to date.") else() # Remove the old version. file(REMOVE ${CMAKE_CURRENT_SOURCE_DIR}/src/mlpack/core/util/gitversion.hpp) file(WRITE ${CMAKE_CURRENT_SOURCE_DIR}/src/mlpack/core/util/gitversion.hpp "return \"mlpack git-${NEW_GIT_REVISION}\";\n") message(STATUS "Updated gitversion.hpp.") endif() mlpack-2.2.5/CMake/FindArmadillo.cmake000066400000000000000000000353331315013601400174570ustar00rootroot00000000000000# - Find Armadillo # Find the Armadillo C++ library # # Using Armadillo: # find_package(Armadillo REQUIRED) # include_directories(${ARMADILLO_INCLUDE_DIRS}) # add_executable(foo foo.cc) # target_link_libraries(foo ${ARMADILLO_LIBRARIES}) # This module sets the following variables: # ARMADILLO_FOUND - set to true if the library is found # ARMADILLO_INCLUDE_DIRS - list of required include directories # ARMADILLO_LIBRARIES - list of libraries to be linked # ARMADILLO_VERSION_MAJOR - major version number # ARMADILLO_VERSION_MINOR - minor version number # ARMADILLO_VERSION_PATCH - patch version number # ARMADILLO_VERSION_STRING - version number as a string (ex: "1.0.4") # ARMADILLO_VERSION_NAME - name of the version (ex: "Antipodean Antileech") #============================================================================= # Copyright 2011 Clement Creusot # # Distributed under the OSI-approved BSD License (the "License"); # see accompanying file Copyright.txt for details. # # This software is distributed WITHOUT ANY WARRANTY; without even the # implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. # See the License for more information. #============================================================================= # (To distribute this file outside of CMake, substitute the full # License text for the above reference.) find_path(ARMADILLO_INCLUDE_DIR NAMES armadillo PATHS "$ENV{ProgramFiles}/Armadillo/include" ) if(ARMADILLO_INCLUDE_DIR) # ------------------------------------------------------------------------ # Extract version information from # ------------------------------------------------------------------------ # WARNING: Early releases of Armadillo didn't have the arma_version.hpp file. # (e.g. v.0.9.8-1 in ubuntu maverick packages (2001-03-15)) # If the file is missing, set all values to 0 set(ARMADILLO_VERSION_MAJOR 0) set(ARMADILLO_VERSION_MINOR 0) set(ARMADILLO_VERSION_PATCH 0) set(ARMADILLO_VERSION_NAME "EARLY RELEASE") if(EXISTS "${ARMADILLO_INCLUDE_DIR}/armadillo_bits/arma_version.hpp") # Read and parse armdillo version header file for version number file(READ "${ARMADILLO_INCLUDE_DIR}/armadillo_bits/arma_version.hpp" _armadillo_HEADER_CONTENTS) string(REGEX REPLACE ".*#define ARMA_VERSION_MAJOR ([0-9]+).*" "\\1" ARMADILLO_VERSION_MAJOR "${_armadillo_HEADER_CONTENTS}") string(REGEX REPLACE ".*#define ARMA_VERSION_MINOR ([0-9]+).*" "\\1" ARMADILLO_VERSION_MINOR "${_armadillo_HEADER_CONTENTS}") string(REGEX REPLACE ".*#define ARMA_VERSION_PATCH ([0-9]+).*" "\\1" ARMADILLO_VERSION_PATCH "${_armadillo_HEADER_CONTENTS}") # WARNING: The number of spaces before the version name is not one. string(REGEX REPLACE ".*#define ARMA_VERSION_NAME\ +\"([0-9a-zA-Z\ _-]+)\".*" "\\1" ARMADILLO_VERSION_NAME "${_armadillo_HEADER_CONTENTS}") endif() set(ARMADILLO_VERSION_STRING "${ARMADILLO_VERSION_MAJOR}.${ARMADILLO_VERSION_MINOR}.${ARMADILLO_VERSION_PATCH}") endif () #====================== # Determine what support libraries are being used, and whether or not we need to # link against them. We need to look in config.hpp. set(SUPPORT_INCLUDE_DIRS "") set(SUPPORT_LIBRARIES "") set(ARMA_NEED_LIBRARY true) # Assume true. if(EXISTS "${ARMADILLO_INCLUDE_DIR}/armadillo_bits/config.hpp") file(READ "${ARMADILLO_INCLUDE_DIR}/armadillo_bits/config.hpp" _armadillo_CONFIG_CONTENTS) # ARMA_USE_WRAPPER string(REGEX MATCH "\r?\n[\t ]*#define[ \t]+ARMA_USE_WRAPPER[ \t]*\r?\n" ARMA_USE_WRAPPER "${_armadillo_CONFIG_CONTENTS}") # ARMA_USE_LAPACK string(REGEX MATCH "\r?\n[\t ]*#if[\t ]+!defined[(]ARMA_USE_LAPACK[)][\t ]*\r?\n[\t ]*#define[ \t]+ARMA_USE_LAPACK[ \t]*\r?\n" ARMA_USE_LAPACK "${_armadillo_CONFIG_CONTENTS}") # ARMA_USE_BLAS string(REGEX MATCH "\r?\n[\t ]*#if[\t ]+!defined[(]ARMA_USE_BLAS[)][\t ]*\r?\n[\t ]*#define[ \t]+ARMA_USE_BLAS[ \t]*\r?\n" ARMA_USE_BLAS "${_armadillo_CONFIG_CONTENTS}") # ARMA_USE_ARPACK # ARMA_USE_ARPACK string(REGEX MATCH "\r?\n[\t ]*#if[\t ]+!defined[(]ARMA_USE_ARPACK[)][\t ]*\r?\n[\t ]*#define[ \t]+ARMA_USE_ARPACK[ \t]*\r?\n" ARMA_USE_ARPACK "${_armadillo_CONFIG_CONTENTS}") # Look for #define ARMA_USE_HDF5. string(REGEX MATCH "\r?\n[\t ]*#if[\t ]+!defined[(]ARMA_USE_HDF5[)][\t ]*\r?\n[\t ]*#define[ \t]+ARMA_USE_HDF5[ \t]*\r?\n" ARMA_USE_HDF5 "${_armadillo_CONFIG_CONTENTS}") # If we aren't wrapping, things get a little more complex. if("${ARMA_USE_WRAPPER}" STREQUAL "") set(ARMA_NEED_LIBRARY false) message(STATUS "ARMA_USE_WRAPPER is not defined, so all dependencies of " "Armadillo must be manually linked.") set(HAVE_LAPACK false) set(HAVE_BLAS false) # Search for LAPACK/BLAS (or replacement). if ((NOT "${ARMA_USE_LAPACK}" STREQUAL "") AND (NOT "${ARMA_USE_BLAS}" STREQUAL "")) # In order of preference: MKL, ACML, OpenBLAS, ATLAS set(MKL_FIND_QUIETLY true) include(ARMA_FindMKL) set(ACMLMP_FIND_QUIETLY true) include(ARMA_FindACMLMP) set(ACML_FIND_QUIETLY true) include(ARMA_FindACML) if (MKL_FOUND) message(STATUS "Using MKL for LAPACK/BLAS: ${MKL_LIBRARIES}") set(SUPPORT_LIBRARIES "${SUPPORT_LIBRARIES}" "${MKL_LIBRARIES}") set(HAVE_LAPACK true) set(HAVE_BLAS true) elseif (ACMLMP_FOUND) message(STATUS "Using multi-core ACML libraries for LAPACK/BLAS: ${ACMLMP_LIBRARIES}") set(SUPPORT_LIBRARIES "${SUPPORT_LIBRARIES}" "${ACMLMP_LIBRARIES}") set(HAVE_LAPACK true) set(HAVE_BLAS true) elseif (ACML_FOUND) message(STATUS "Using ACML for LAPACK/BLAS: ${ACML_LIBRARIES}") set(SUPPORT_LIBRARIES "${SUPPORT_LIBRARIES}" "${ACML_LIBRARIES}") set(HAVE_LAPACK true) set(HAVE_BLAS true) endif () endif () # If we haven't found BLAS, try. if (NOT "${ARMA_USE_BLAS}" STREQUAL "" AND NOT HAVE_BLAS) # Search for BLAS. set(OpenBLAS_FIND_QUIETLY true) include(ARMA_FindOpenBLAS) set(CBLAS_FIND_QUIETLY true) include(ARMA_FindCBLAS) set(BLAS_FIND_QUIETLY true) include(ARMA_FindBLAS) if (OpenBLAS_FOUND) # Warn if ATLAS is found also. if (CBLAS_FOUND) message(STATUS "Warning: both OpenBLAS and ATLAS have been found; " "ATLAS will not be used.") endif () message(STATUS "Using OpenBLAS for BLAS: ${OpenBLAS_LIBRARIES}") set(SUPPORT_LIBRARIES "${SUPPORT_LIBRARIES}" "${OpenBLAS_LIBRARIES}") set(HAVE_BLAS true) elseif (CBLAS_FOUND) message(STATUS "Using ATLAS for BLAS: ${CBLAS_LIBRARIES}") set(SUPPORT_LIBRARIES "${SUPPORT_LIBRARIES}" "${CBLAS_LIBRARIES}") set(SUPPORT_INCLUDE_DIRS "${SUPPORT_INCLUDE_DIRS}" "${CBLAS_INCLUDE_DIR}") set(HAVE_BLAS true) elseif (BLAS_FOUND) message(STATUS "Using standard BLAS: ${BLAS_LIBRARIES}") set(SUPPORT_LIBRARIES "${SUPPORT_LIBRARIES}" "${BLAS_LIBRARIES}") set(HAVE_BLAS true) endif () endif () # If we haven't found LAPACK, try. if (NOT "${ARMA_USE_LAPACK}" STREQUAL "" AND NOT HAVE_LAPACK) # Search for LAPACK. set(CLAPACK_FIND_QUIETLY true) include(ARMA_FindCLAPACK) set(LAPACK_FIND_QUIETLY true) include(ARMA_FindLAPACK) # Only use ATLAS if OpenBLAS isn't being used. if (CLAPACK_FOUND AND NOT OpenBLAS_FOUND) message(STATUS "Using ATLAS for LAPACK: ${CLAPACK_LIBRARIES}") set(SUPPORT_LIBRARIES "${SUPPORT_LIBRARIES}" "${CLAPACK_LIBRARIES}") set(SUPPORT_INCLUDE_DIRS "${SUPPORT_INCLUDE_DIRS}" "${CLAPACK_INCLUDE_DIR}") set(HAVE_LAPACK true) elseif (LAPACK_FOUND) message(STATUS "Using standard LAPACK: ${LAPACK_LIBRARIES}") set(SUPPORT_LIBRARIES "${SUPPORT_LIBRARIES}" "${LAPACK_LIBRARIES}") set(HAVE_LAPACK true) endif () endif () if (NOT "${ARMA_USE_LAPACK}" STREQUAL "" AND NOT HAVE_LAPACK) message(FATAL_ERROR "Cannot find LAPACK library, but ARMA_USE_LAPACK is " "set. Try specifying LAPACK libraries manually by setting the " "LAPACK_LIBRARY variable.") endif () if (NOT "${ARMA_USE_BLAS}" STREQUAL "" AND NOT HAVE_BLAS) message(FATAL_ERROR "Cannot find BLAS library, but ARMA_USE_BLAS is set. " "Try specifying BLAS libraries manually by setting the BLAS_LIBRARY " "variable.") endif () # Search for ARPACK (or replacement). if (NOT "${ARMA_USE_ARPACK}" STREQUAL "") # Use Armadillo ARPACK-finding procedure. set(ARPACK_FIND_QUIETLY true) include(ARMA_FindARPACK) if (NOT ARPACK_FOUND) message(FATAL_ERROR "ARMA_USE_ARPACK is defined in " "armadillo_bits/config.hpp, but ARPACK cannot be found. Try " "specifying ARPACK_LIBRARY.") endif () set(SUPPORT_LIBRARIES "${SUPPORT_LIBRARIES}" "${ARPACK_LIBRARY}") endif () # Search for HDF5 (or replacement). if (NOT "${ARMA_USE_HDF5}" STREQUAL "") find_package(HDF5 QUIET) if(NOT HDF5_FOUND) # On Debian systems, the HDF5 package has been split into multiple # packages so that it is co-installable. But this may mean that the # include files are hidden somewhere very odd that the FindHDF5.cmake # script will not find. Thus, we'll also quickly check pkgconfig to see # if there is information on what to use there. find_package(PkgConfig) if (PKG_CONFIG_FOUND) pkg_check_modules(HDF5 hdf5) # But using pkgconfig is a little weird because HDF5_LIBRARIES won't # be filled with exact library paths, like the other scripts. So # instead what we get is HDF5_LIBRARY_DIRS which is the equivalent of # what we'd pass to -L. if (HDF5_FOUND) # I'm not sure what I think of doing this here... link_directories("${HDF5_LIBRARY_DIRS}") endif() endif() endif() if(NOT HDF5_FOUND) # We tried but didn't find it. message(FATAL_ERROR "Armadillo HDF5 support is enabled, but HDF5 " "cannot be found on the system. Consider disabling HDF5 support.") endif() set(SUPPORT_INCLUDE_DIRS "${SUPPORT_INCLUDE_DIRS}" "${HDF5_INCLUDE_DIRS}") set(SUPPORT_LIBRARIES "${SUPPORT_LIBRARIES}" "${HDF5_LIBRARIES}") endif () else() # Some older versions still require linking against HDF5 since they did not # wrap libhdf5. This was true for versions older than 4.300. if(NOT "${ARMA_USE_HDF5}" STREQUAL "" AND "${ARMADILLO_VERSION_STRING}" VERSION_LESS "4.300.0") message(STATUS "Armadillo HDF5 support is enabled and manual linking is " "required.") # We have HDF5 support and need to link against HDF5. find_package(HDF5) if(NOT HDF5_FOUND) # On Debian systems, the HDF5 package has been split into multiple # packages so that it is co-installable. But this may mean that the # include files are hidden somewhere very odd that the FindHDF5.cmake # script will not find. Thus, we'll also quickly check pkgconfig to see # if there is information on what to use there. find_package(PkgConfig) if (PKG_CONFIG_FOUND) pkg_check_modules(HDF5 hdf5) # But using pkgconfig is a little weird because HDF5_LIBRARIES won't # be filled with exact library paths, like the other scripts. So # instead what we get is HDF5_LIBRARY_DIRS which is the equivalent of # what we'd pass to -L. if (HDF5_FOUND) # I'm not sure what I think of doing this here... link_directories("${HDF5_LIBRARY_DIRS}") endif() endif() endif() if(NOT HDF5_FOUND) # We tried but didn't find it. message(FATAL_ERROR "Armadillo HDF5 support is enabled, but HDF5 " "cannot be found on the system. Consider disabling HDF5 support.") endif() set(SUPPORT_INCLUDE_DIRS "${HDF5_INCLUDE_DIRS}") set(SUPPORT_LIBRARIES "${HDF5_LIBRARIES}") endif() # Versions between 4.300 and 4.500 did successfully wrap HDF5, but didn't have good support for setting the include directory correctly. if(NOT "${ARMA_USE_HDF5}" STREQUAL "" AND "${ARMADILLO_VERSION_STRING}" VERSION_GREATER "4.299.0" AND "${ARMADILLO_VERSION_STRING}" VERSION_LESS "4.450.0") message(STATUS "Armadillo HDF5 support is enabled and include " "directories must be found.") find_package(HDF5) if(NOT HDF5_FOUND) # On Debian systems, the HDF5 package has been split into multiple # packages so that it is co-installable. But this may mean that the # include files are hidden somewhere very odd that the FindHDF5.cmake # script will not find. Thus, we'll also quickly check pkgconfig to see # if there is information on what to use there. find_package(PkgConfig) if (PKG_CONFIG_FOUND) pkg_check_modules(HDF5 hdf5) endif() endif() if(NOT HDF5_FOUND) # We tried but didn't find it. message(FATAL_ERROR "Armadillo HDF5 support is enabled, but HDF5 " "cannot be found on the system. Consider disabling HDF5 support.") endif() set(SUPPORT_INCLUDE_DIRS "${HDF5_INCLUDE_DIRS}") endif() endif() else() message(FATAL_ERROR "${ARMADILLO_INCLUDE_DIR}/armadillo_bits/config.hpp not " "found! Cannot determine what to link against.") endif() if (ARMA_NEED_LIBRARY) # UNIX paths are standard, no need to write. find_library(ARMADILLO_LIBRARY NAMES armadillo PATHS "$ENV{ProgramFiles}/Armadillo/lib" "$ENV{ProgramFiles}/Armadillo/lib64" "$ENV{ProgramFiles}/Armadillo" ) # Checks 'REQUIRED', 'QUIET' and versions. include(FindPackageHandleStandardArgs) find_package_handle_standard_args(Armadillo REQUIRED_VARS ARMADILLO_LIBRARY ARMADILLO_INCLUDE_DIR VERSION_VAR ARMADILLO_VERSION_STRING) # version_var fails with cmake < 2.8.4. else () # Checks 'REQUIRED', 'QUIET' and versions. include(FindPackageHandleStandardArgs) find_package_handle_standard_args(Armadillo REQUIRED_VARS ARMADILLO_INCLUDE_DIR VERSION_VAR ARMADILLO_VERSION_STRING) endif () if (ARMADILLO_FOUND) # Also include support include directories. set(ARMADILLO_INCLUDE_DIRS ${ARMADILLO_INCLUDE_DIR} ${SUPPORT_INCLUDE_DIRS}) # Also include support libraries to link against. if (ARMA_NEED_LIBRARY) set(ARMADILLO_LIBRARIES ${ARMADILLO_LIBRARY} ${SUPPORT_LIBRARIES}) else () set(ARMADILLO_LIBRARIES ${SUPPORT_LIBRARIES}) endif () message(STATUS "Armadillo libraries: ${ARMADILLO_LIBRARIES}") endif () # Hide internal variables mark_as_advanced( ARMADILLO_INCLUDE_DIR ARMADILLO_LIBRARY) #====================== mlpack-2.2.5/CMake/FindBfd.cmake000066400000000000000000000037021315013601400162410ustar00rootroot00000000000000# - Try to find libbfd # Once done this will define # # LIBBFD_FOUND - system has libbfd # LIBBFD_INCLUDE_DIRS - the libbfd include directory # LIBBFD_LIBRARIES - Link these to use libbfd # LIBBFD_DEFINITIONS - Compiler switches required for using libbfd # # Based on: # # Copyright (c) 2008 Bernhard Walle # # Redistribution and use is allowed according to the terms of the New # BSD license. # For details see the accompanying COPYING-CMAKE-SCRIPTS file. # if (LIBBFD_LIBRARIES AND LIBBFD_INCLUDE_DIRS) set (LIBBFD_FIND_QUIETLY TRUE) endif () find_path (LIBBFD_INCLUDE_DIRS NAMES bfd.h dis-asm.h PATHS /usr/include /usr/local/include /opt/local/include /opt/include ENV CPATH) # Ugly, yes ugly... find_library (LIBBFD_BFD_LIBRARY NAMES bfd PATHS /usr/lib /usr/lib64 /usr/local/lib /usr/local/lib64 /usr/include /opt/local/lib /opt/usr/lib64 ENV LIBRARY_PATH ENV LD_LIBRARY_PATH) #find_library (LIBBFD_IBERTY_LIBRARY # NAMES # iberty # PATHS # /usr/lib # /usr/lib64 # /usr/local/lib # /usr/local/lib64 # /usr/include # /opt/local/lib # /opt/usr/lib64 # ENV LIBRARY_PATH # ENV LD_LIBRARY_PATH) #find_library (LIBBFD_OPCODES_LIBRARY # NAMES # opcodes # PATHS # /usr/lib # /usr/lib64 # /usr/local/lib # /usr/local/lib64 # /usr/include # /opt/local/lib # /opt/usr/lib64 # ENV LIBRARY_PATH # ENV LD_LIBRARY_PATH) include (FindPackageHandleStandardArgs) # handle the QUIETLY and REQUIRED arguments and set LIBBFD_FOUND to TRUE if all listed variables are TRUE FIND_PACKAGE_HANDLE_STANDARD_ARGS(LIBBFD DEFAULT_MSG LIBBFD_BFD_LIBRARY # LIBBFD_IBERTY_LIBRARY # LIBBFD_OPCODES_LIBRARY LIBBFD_INCLUDE_DIRS) set(LIBBFD_LIBRARIES "${LIBBFD_BFD_LIBRARY}") mark_as_advanced(LIBBFD_INCLUDE_DIRS LIBBFD_LIBRARIES)mlpack-2.2.5/CMake/FindLibDL.cmake000066400000000000000000000015331315013601400164740ustar00rootroot00000000000000# - Try to find libdl # Once done this will define # # LIBDL_FOUND - system has libdl # LIBDL_INCLUDE_DIRS - the libdl include directory # LIBDL_LIBRARIES - Link these to use libdl # LIBDL_NEEDS_UNDERSCORE - If extern "C" symbols are prefixed (BSD/Apple) # find_path (LIBDL_INCLUDE_DIRS NAMES dlfcn.h) find_library (LIBDL_LIBRARIES NAMES dl) include (FindPackageHandleStandardArgs) FIND_PACKAGE_HANDLE_STANDARD_ARGS(LibDL DEFAULT_MSG LIBDL_LIBRARIES LIBDL_INCLUDE_DIRS) set(CMAKE_REQUIRED_LIBRARIES dl) include(CheckCSourceRuns) CHECK_C_SOURCE_RUNS("#include #include void testfunc() {} int main() { testfunc(); if (dlsym(0, \"_testfunc\") != (void*)0) { return EXIT_SUCCESS; } else { return EXIT_FAILURE; } }" LIBDL_NEEDS_UNDERSCORE) mark_as_advanced(LIBDL_INCLUDE_DIRS LIBDL_LIBRARIES LIBDL_NEEDS_UNDERSCORE)mlpack-2.2.5/CMake/FindMathJax.cmake000066400000000000000000000010271315013601400171000ustar00rootroot00000000000000# Find the MathJax package. # Once done this will define # # MATHJAX_FOUND - system has MathJax # MATHJAX_JS_PATH - path to MathJax.js # MATHJAX_PATH - path to the MathJax root directory find_file (MATHJAX_JS_PATH NAMES MathJax.js PATHS ${MATHJAX_ROOT} /usr/share/javascript/mathjax/ /usr/local/share/javascript/mathjax/) get_filename_component (MATHJAX_PATH ${MATHJAX_JS_PATH} DIRECTORY) FIND_PACKAGE_HANDLE_STANDARD_ARGS(MathJax DEFAULT_MSG MATHJAX_JS_PATH) mark_as_advanced (MATHJAX_JS_PATH) mlpack-2.2.5/CMake/FindMatlabMex.cmake000066400000000000000000000070011315013601400174140ustar00rootroot00000000000000# This module looks for mex, the MATLAB compiler. # The following variables are defined when the script completes: # MATLAB_MEX: location of mex compiler # MATLAB_ROOT: root of MATLAB installation # MATLABMEX_FOUND: 0 if not found, 1 if found set(MATLABMEX_FOUND 0) if(WIN32) # This is untested but taken from the older FindMatlab.cmake script as well as # the modifications by Ramon Casero and Tom Doel for Gerardus. # Search for a version of Matlab available, starting from the most modern one # to older versions. foreach(MATVER "7.20" "7.19" "7.18" "7.17" "7.16" "7.15" "7.14" "7.13" "7.12" "7.11" "7.10" "7.9" "7.8" "7.7" "7.6" "7.5" "7.4") if((NOT DEFINED MATLAB_ROOT) OR ("${MATLAB_ROOT}" STREQUAL "") OR ("${MATLAB_ROOT}" STREQUAL "/registry")) get_filename_component(MATLAB_ROOT "[HKEY_LOCAL_MACHINE\\SOFTWARE\\MathWorks\\MATLAB\\${MATVER};MATLABROOT]" ABSOLUTE) set(MATLAB_VERSION ${MATVER}) endif() OR ("${MATLAB_ROOT}" STREQUAL "") OR ("${MATLAB_ROOT}" STREQUAL "/registry")) endforeach() find_program(MATLAB_MEX mex ${MATLAB_ROOT}/bin ) else() # Check if this is a Mac. if(${CMAKE_SYSTEM_NAME} MATCHES "Darwin") # This code is untested but taken from the older FindMatlab.cmake script as # well as the modifications by Ramon Casero and Tom Doel for Gerardus. set(LIBRARY_EXTENSION .dylib) # If this is a Mac and the attempts to find MATLAB_ROOT have so far failed,~ # we look in the applications folder if((NOT DEFINED MATLAB_ROOT) OR ("${MATLAB_ROOT}" STREQUAL "")) # Search for a version of Matlab available, starting from the most modern # one to older versions foreach(MATVER "R2013b" "R2013a" "R2012b" "R2012a" "R2011b" "R2011a" "R2010b" "R2010a" "R2009b" "R2009a" "R2008b") if((NOT DEFINED MATLAB_ROOT) OR ("${MATLAB_ROOT}" STREQUAL "")) if(EXISTS /Applications/MATLAB_${MATVER}.app) set(MATLAB_ROOT /Applications/MATLAB_${MATVER}.app) endif() endif() endforeach() endif() find_program(MATLAB_MEX mex PATHS ${MATLAB_ROOT}/bin ) else() # On a Linux system. The goal is to find MATLAB_ROOT. set(LIBRARY_EXTENSION .so) find_program(MATLAB_MEX_POSSIBLE_LINK mex PATHS ${MATLAB_ROOT}/bin /opt/matlab/bin /usr/local/matlab/bin $ENV{HOME}/matlab/bin # Now all the versions /opt/matlab/[rR]20[0-9][0-9][abAB]/bin /usr/local/matlab/[rR]20[0-9][0-9][abAB]/bin /opt/matlab-[rR]20[0-9][0-9][abAB]/bin /opt/matlab_[rR]20[0-9][0-9][abAB]/bin /usr/local/matlab-[rR]20[0-9][0-9][abAB]/bin /usr/local/matlab_[rR]20[0-9][0-9][abAB]/bin $ENV{HOME}/matlab/[rR]20[0-9][0-9][abAB]/bin $ENV{HOME}/matlab-[rR]20[0-9][0-9][abAB]/bin $ENV{HOME}/matlab_[rR]20[0-9][0-9][abAB]/bin ) get_filename_component(MATLAB_MEX "${MATLAB_MEX_POSSIBLE_LINK}" REALPATH) get_filename_component(MATLAB_BIN_ROOT "${MATLAB_MEX}" PATH) # Strip ./bin/. get_filename_component(MATLAB_ROOT "${MATLAB_BIN_ROOT}" PATH) endif() endif() if(NOT EXISTS "${MATLAB_MEX}" AND "${MatlabMex_FIND_REQUIRED}") message(FATAL_ERROR "Could not find MATLAB mex compiler; try specifying MATLAB_ROOT.") else() if(EXISTS "${MATLAB_MEX}") message(STATUS "Found MATLAB mex compiler: ${MATLAB_MEX}") message(STATUS "MATLAB root: ${MATLAB_ROOT}") set(MATLABMEX_FOUND 1) endif() endif() mark_as_advanced( MATLAB_MEX MATLABMEX_FOUND MATLAB_ROOT ) mlpack-2.2.5/CMake/GenerateDoxyfile.cmake000066400000000000000000000044511315013601400202050ustar00rootroot00000000000000# We need to modify the Doxyfile slightly. We'll copy the Doxyfile into the # build directory, update the location of the source, and then run Doxygen and # it will generate the documentation into the build directory. # First, read the Doxyfile in as a variable. file(READ "${CMAKE_CURRENT_SOURCE_DIR}/Doxyfile" DOXYFILE_CONTENTS) # Now, modify all the "INPUT" paths. I've written each of the three out by # hand. If more are added, they'll need to be added here too. string(REPLACE "./src/mlpack" "${CMAKE_CURRENT_SOURCE_DIR}/src/mlpack" DOXYFILE_AUXVAR "${DOXYFILE_CONTENTS}" ) string(REPLACE "./doc/guide" "${CMAKE_CURRENT_SOURCE_DIR}/doc/guide" DOXYFILE_CONTENTS "${DOXYFILE_AUXVAR}" ) string(REPLACE "./doc/tutorials" "${CMAKE_CURRENT_SOURCE_DIR}/doc/tutorials" DOXYFILE_AUXVAR "${DOXYFILE_CONTENTS}" ) string(REPLACE "./doc/policies" "${CMAKE_CURRENT_SOURCE_DIR}/doc/policies" DOXYFILE_CONTENTS "${DOXYFILE_AUXVAR}" ) string(REPLACE "./doc/doxygen/footer.html" "${CMAKE_CURRENT_SOURCE_DIR}/doc/doxygen/footer.html" DOXYFILE_AUXVAR "${DOXYFILE_CONTENTS}" ) string(REPLACE "./doc/doxygen/extra-stylesheet.css" "${CMAKE_CURRENT_SOURCE_DIR}/doc/doxygen/extra-stylesheet.css" DOXYFILE_CONTENTS "${DOXYFILE_AUXVAR}") # Change the STRIP_FROM_PATH so that it works right even in the build directory; # otherwise, every file will have the full path in it. string(REGEX REPLACE "(STRIP_FROM_PATH[ ]*=) ./" "\\1 ${CMAKE_CURRENT_SOURCE_DIR}/" DOXYFILE_AUXVAR ${DOXYFILE_CONTENTS}) # Apply the MathJax option. If the option is specified, we change the NO to # YES. Otherwise, it's off by default, so we needn't modify anything. if (MATHJAX) string(REGEX REPLACE "(USE_MATHJAX[ ]*=) NO" "\\1 YES" DOXYFILE_CONTENTS ${DOXYFILE_AUXVAR}) # Include the path to MathJax. If we couldn't find the MathJax package, # we will use MathJax at the MathJax Content Delivery Network. if (MATHJAX_FOUND) string(CONCAT DOXYFILE_AUXVAR ${DOXYFILE_CONTENTS} "\nMATHJAX_RELPATH = ${MATHJAX_PATH}") set(DOXYFILE_CONTENTS ${DOXYFILE_AUXVAR}) endif() else () set(DOXYFILE_CONTENTS ${DOXYFILE_AUXVAR}) endif () # Save the Doxyfile to its new location. file(WRITE "${DESTDIR}/Doxyfile" "${DOXYFILE_CONTENTS}") mlpack-2.2.5/CMake/GeneratePkgConfig.cmake000066400000000000000000000021741315013601400202710ustar00rootroot00000000000000# As input the following variables should be set: # # MLPACK_SOURCE_DIR: directory containing mlpack sources. # # And our goal in this file is to generate/configure mlpack.pc. # First, we need to extract the version string. if (NOT EXISTS "${CMAKE_BINARY_DIR}/include/mlpack/core/util/version.hpp") message(FATAL_ERROR "Cannot open " "${CNAKE_BINARY_DIR}/include/mlpack/core/util/version.hpp to extract " "version!") endif () file(READ "${CMAKE_BINARY_DIR}/include/mlpack/core/util/version.hpp" VERSION_HPP_CONTENTS) string(REGEX REPLACE ".*#define MLPACK_VERSION_MAJOR ([0-9]+).*" "\\1" MLPACK_VERSION_MAJOR "${VERSION_HPP_CONTENTS}") string(REGEX REPLACE ".*#define MLPACK_VERSION_MINOR ([0-9]+).*" "\\1" MLPACK_VERSION_MINOR "${VERSION_HPP_CONTENTS}") string(REGEX REPLACE ".*#define MLPACK_VERSION_PATCH [\"]?([0-9x]+)[\"]?.*" "\\1" MLPACK_VERSION_PATCH "${VERSION_HPP_CONTENTS}") set(MLPACK_VERSION_STRING "${MLPACK_VERSION_MAJOR}.${MLPACK_VERSION_MINOR}.${MLPACK_VERSION_PATCH}") configure_file( ${CMAKE_BINARY_DIR}/CMake/mlpack.pc.in.partial ${CMAKE_BINARY_DIR}/lib/pkgconfig/mlpack.pc @ONLY) mlpack-2.2.5/CMake/LICENSE.txt000066400000000000000000000006261315013601400155700ustar00rootroot00000000000000The ARMA_*.cmake files in this directory are from the Armadillo project, and are generally written by Conrad Sanderson and other Armadillo contributors. The Armadillo project, which is used heavily in mlpack, can be found at http://arma.sourceforge.net/ Those files are subject to the terms of the Mozilla Public License, v. 2.0. A copy of the license may be obtained at http://mozilla.org/MPL/2.0/. mlpack-2.2.5/CMake/NewCXX11.cmake000066400000000000000000000007601315013601400162240ustar00rootroot00000000000000# This file should be incorporated into the main CMakeLists.txt when CMake 3.1 # becomes the minimum required version (we should at least wait until late 2016 # or early 2017 for this). target_compile_features(mlpack PUBLIC cxx_decltype cxx_alias_templates cxx_auto_type cxx_lambdas cxx_constexpr cxx_rvalue_references cxx_static_assert cxx_template_template_parameters cxx_delegating_constructors cxx_variadic_templates cxx_nullptr cxx_noexcept ) mlpack-2.2.5/CMake/TargetDistclean.cmake000066400000000000000000000015321315013601400200210ustar00rootroot00000000000000# add custom target distclean # cleans and removes cmake generated files etc. # Jan Woetzel 04/2003 # # taken from http://cmake.org/pipermail/cmake/2003-June/003953.html # hate at http://itk.org/Bug/view.php?id=6647 # yacked and brought out of 2003 by rcurtin if (UNIX) # since it's unix-specific we will use bash add_custom_target (distclean @echo cleaning ${FASTLIB_SOURCE_DIR} for source distribution) add_custom_command(TARGET distclean COMMAND make ARGS clean COMMAND find ARGS ${FASTLIB_SOURCE_DIR} -iname CMakeCache.txt -delete COMMAND find ARGS ${FASTLIB_SOURCE_DIR} -iname cmake_install.cmake -delete COMMAND find ARGS ${FASTLIB_SOURCE_DIR} -iname Makefile -delete COMMAND find ARGS ${FASTLIB_SOURCE_DIR} -depth -type d -iname CMakeFiles -exec rm -rf {} \; COMMAND rm ARGS -rf bin lib include VERBATIM ) endif() mlpack-2.2.5/CMake/allexec2man.sh000077500000000000000000000011501315013601400164700ustar00rootroot00000000000000#!/bin/bash # # Convert all of the executables in this directory that are not tests to man # pages in the given directory. # # Usage: # allexec2man.sh /full/path/of/exec2man.sh output_directory/ # # For the executable 'cheese', the file 'cheese.1.gz' will be created in the # output directory. exec2man="$1" outdir="$2" mkdir -p "$outdir" for program in `find . -perm /u=x,g=x,o=x | \ grep -v '[.]$' | \ grep -v '_test$' | \ sed 's|^./||'`; do echo "Generating man page for $program..."; "$1" "$program" "$outdir/$program.1" gzip -f "$outdir/$program.1" done mlpack-2.2.5/CMake/exec2man.sh000077500000000000000000000065451315013601400160140ustar00rootroot00000000000000#!/bin/bash # Convert the output of an MLPACK executable into a man page. This assumes that # the CLI subsystem is used to output help, that the executable is properly # documented, and that the program is run in the directory that the executable # is in. Usually, this is used by CMake on Linux/UNIX systems to generate the # man pages. # # Usage: # exec2man.sh executable_name output_file_name # # No warranties... # # @author Ryan Curtin name="$1" output="$2" # Generate the synopsis. # First, required options. reqoptions=`./"$name" -h | \ awk '/Required options:/,/Options:/' | \ grep '^ --' | \ sed 's/^ --/--/' | \ sed 's/^--[A-Za-z0-9_-]* (\(-[A-Za-z0-9]\))/\1/' | \ sed 's/\(^-[A-Za-z0-9]\) [^\[].*/\1/' | \ sed 's/\(^-[A-Za-z0-9] \[[A-Za-z0-9]*\]\) .*/\1/' | \ sed 's/\(^--[A-Za-z0-9_-]*\) [^[].*/\1/' | \ sed 's/\(^--[A-Za-z0-9_-]* \[[A-Za-z0-9]*\]\) [^[].*/\1/' | \ tr '\n' ' ' | \ sed 's/\[//g' | \ sed 's/\]//g'` # Then, regular options. options=`./"$name" -h | \ awk '/Options:/,/For further information,/' | \ grep '^ --' | \ sed 's/^ --/--/' | \ grep -v -- '--help' | \ grep -v -- '--info' | \ grep -v -- '--verbose' | \ sed 's/^--[A-Za-z0-9_-]* (\(-[A-Za-z0-9]\))/\1/' | \ sed 's/\(^-[A-Za-z0-9]\) [^\[].*/\1/' | \ sed 's/\(^-[A-Za-z0-9] \[[A-Za-z0-9]*\]\) .*/\1/' | \ sed 's/\(^--[A-Za-z0-9_-]*\) [^[].*/\1/' | \ sed 's/\(^--[A-Za-z0-9_-]* \[[A-Za-z0-9]*\]\) [^[].*/\1/' | \ tr '\n' ' ' | \ sed 's/\[//g' | \ sed 's/\]//g' | \ sed 's/\(-[A-Za-z0-9]\)\( [^a-z]\)/\[\1\]\2/g' | \ sed 's/\(--[A-Za-z0-9_-]*\)\( [^a-z]\)/\[\1\]\2/g' | \ sed 's/\(-[A-Za-z0-9] [a-z]*\) /\[\1\] /g' | \ sed 's/\(--[A-Za-z0-9_-]* [a-z]*\) /\[\1\] /g'` synopsis="$name [-h] [-v] $reqoptions $options"; # Preview the whole thing first. #./$name -h | \ # awk -v syn="$synopsis" \ # '{ if (NR == 1) print "NAME\n '$name' - "tolower($0)"\nSYNOPSIS\n "syn" \nDESCRIPTION\n" ; else print } ' | \ # sed '/^[^ ]/ y/qwertyuiopasdfghjklzxcvbnm:/QWERTYUIOPASDFGHJKLZXCVBNM /' | \ # txt2man -T -P mlpack -t $name -d 1 # Now do it. # The awk script is a little ugly, but it is meant to format parameters # correctly so that the entire description of the parameter is on one line (this # helps avoid 'man' warnings). # The sed line at the end removes accidental macros from the output, replacing # single-quotes at the beginning of a line with the troff escape code \(aq. ./"$name" -h | \ sed 's/^For further information/Additional Information\n\n For further information/' | \ sed 's/^consult the documentation/ consult the documentation/' | \ sed 's/^distribution of MLPACK./ distribution of MLPACK./' | \ awk -v syn="$synopsis" \ '{ if (NR == 1) print "NAME\n '"$name"' - "tolower($0)"\nSYNOPSIS\n "syn" \nDESCRIPTION\n" ; else print } ' | \ sed '/^[^ ]/ y/qwertyuiopasdfghjklzxcvbnm:/QWERTYUIOPASDFGHJKLZXCVBNM /' | \ sed 's/ / /g' | \ awk '/NAME/,/REQUIRED OPTIONS/ { print; } /ADDITIONAL INFORMATION/,0 { print; } /REQUIRED OPTIONS/,/ADDITIONAL INFORMATION/ { if (!/REQUIRED_OPTIONS/ && !/OPTIONS/ && !/ADDITIONAL INFORMATION/) { if (/ --/) { printf "\n" } sub(/^[ ]*/, ""); sub(/ [ ]*/, " "); printf "%s ", $0; } else { if (!/REQUIRED OPTIONS/ && !/ADDITIONAL INFORMATION/) { print "\n"$0; } } }' | \ sed 's/ ADDITIONAL INFORMATION/\n\nADDITIONAL INFORMATION/' | \ txt2man -P mlpack -t "$name" -d 1 | \ sed "s/^'/\\\\(aq/" > "$output" mlpack-2.2.5/CMake/mlpack.pc.in000066400000000000000000000003021315013601400161340ustar00rootroot00000000000000Name: mlpack Description: scalable C++ machine learning library URL: http://www.mlpack.org/ Version: @MLPACK_VERSION_STRING@ Cflags: @MLPACK_INCLUDE_DIRS_STRING@ Libs: @MLPACK_LIBRARIES_STRING@ mlpack-2.2.5/CMake/mlpack_coverage.in000077500000000000000000000106121315013601400174160ustar00rootroot00000000000000#!/bin/bash # This script gets the test coverage for mlpack_test. test_case="ALL" gcov_loc="" token="" clean=true current_log_file=`date +'%Y.%h.%d:%H:%M:%S-coverage.log'` current_coverage_file=`date +'%Y.%h.%d:%H:%M:%S-coverage.info'` max_cov_count=50000 # default directories root_dir="../" # Extract arguments. for i in "$@" do case $i in -h|--help) echo "Usage: mlpack_coverage --help|-h" echo " mlpack_coverage [-r=test_suite] [-g=gcov_tool_location]" echo " [--token=coveralls_token]" echo "Optional parameters:" echo " -n|--no_test Do not run test before coverage computation" echo " -r|--run_test Run tests with specific test suite" echo " --no_clean Do not remove existing gcda file" echo " -g|--gcov_tool_location Gcov location if not default" echo " -t|--token Upload to coveralls with given token" echo " --max_cov_count Max line coverage count (default 50000)" echo " --root_dir Set the root directory from which gcov will be called. (default ../)" exit 0 shift ;; -n|--no_test) test_case="" shift ;; -r=*|--run_test=*) test_case="${i#*=}" shift # past argument=value ;; --no_clean) clean=false shift ;; -g=*|--gcov_tool_location=*) gcov_loc="${i#*=}" shift # past argument=value ;; -t=*|--token=*) token="${i#*=}" shift # past argument=value ;; --max_cov_count) max_cov_count="${i#*=}" shift ;; --root_dir=*) root_dir="${i#*=}" shift ;; *) # unknown option ;; esac done if [ "$clean" = true ]; then echo "Deleting existing coverage data..." find ./ -name "*.gcda" -type f -delete fi # Initial pass. echo "Generating primary coverage report." [[ -d ./coveragehistory/ ]] || mkdir coveragehistory lcov -b . -c -i -d ./ -o .coverage.wtest.base > ./coveragehistory/$current_log_file # Run the tests. if [ "$test_case" = "ALL" ]; then echo "Running all the tests..." @CMAKE_BINARY_DIR@/bin/mlpack_test elif ! [ "$test_case" = "" ]; then echo "Running test suite: $test_case" @CMAKE_BINARY_DIR@/bin/mlpack_test --run_test=$test_case fi # Generate coverage based on executed tests. echo "Computing coverage..." if [ "$gcov_loc" = "" ]; then lcov -b . -c -d ./ -o .coverage.wtest.run >> ./coveragehistory/$current_log_file else lcov -b . -c -d ./ -o .coverage.wtest.run --gcov-tool=$gcov_loc >> ./coveragehistory/$current_log_file fi echo "Filtering coverage files..." # Clear negative entries in coverage file sed -E 's/-([0-9]+)/$max_cov_count/g' -i .coverage.wtest.run # Merge coverage tracefiles. lcov -a .coverage.wtest.base -a .coverage.wtest.run -o .coverage.total >> ./coveragehistory/$current_log_file # Filtering, extracting project files. lcov -e .coverage.total "@CMAKE_CURRENT_SOURCE_DIR@/src/mlpack/*" -o .coverage.total.filtered >> ./coveragehistory/$current_log_file # Filtering, removing test-files and main.cpp. lcov -r .coverage.total.filtered "@CMAKE_CURRENT_SOURCE_DIR@/src/mlpack/*/*_main.cpp" -o .coverage.total.filtered >> ./coveragehistory/$current_log_file lcov -r .coverage.total.filtered "@CMAKE_CURRENT_SOURCE_DIR@/src/mlpack/tests/*" -o .coverage.total.filtered >> ./coveragehistory/$current_log_file # Remove untestable files. lcov -r .coverage.total.filtered "@CMAKE_CURRENT_SOURCE_DIR@/src/mlpack/core/util/gitversion.hpp" -o .coverage.total.filtered >> ./coveragehistory/$current_log_file lcov -r .coverage.total.filtered "@CMAKE_CURRENT_SOURCE_DIR@/src/mlpack/core/util/arma_config.hpp" -o .coverage.total.filtered >> ./coveragehistory/$current_log_file # Extra: Replace /build/ with /src/ to unify directories. cat .coverage.total.filtered > .coverage.total # Extra: Clear up previous data, create html folder. if [[ -d ./coverage/ ]] ; then rm -rf ./coverage/* else mkdir coverage fi # Step 9: Generate webpage. genhtml -o ./coverage/ .coverage.total # Extra: Preserve coverage file in coveragehistory folder. coverage_file=$current_coverage_file cp .coverage.total ./coveragehistory/$current_coverage_file # Clean temporary coverage files. #rm .coverage.* # Upload the result to coveralls if token is provided. if ! [ "$token" = "" ]; then cpp-coveralls -n -r $root_dir -b $root_dir -l ./coveragehistory/$current_coverage_file -t "$token" --max-cov-count $max_cov_count fi mlpack-2.2.5/CMakeLists.txt000066400000000000000000000522451315013601400155310ustar00rootroot00000000000000cmake_minimum_required(VERSION 2.8.5) project(mlpack C CXX) # First, define all the compilation options. # We default to debugging mode for developers. option(DEBUG "Compile with debugging information." OFF) option(PROFILE "Compile with profiling information." OFF) option(ARMA_EXTRA_DEBUG "Compile with extra Armadillo debugging symbols." OFF) option(MATLAB_BINDINGS "Compile MATLAB bindings if MATLAB is found." OFF) option(TEST_VERBOSE "Run test cases with verbose output." OFF) option(BUILD_TESTS "Build tests." ON) option(BUILD_CLI_EXECUTABLES "Build command-line executables." ON) option(BUILD_SHARED_LIBS "Compile shared libraries (if OFF, static libraries are compiled)." ON) option(BUILD_WITH_COVERAGE "Build with support for code coverage tools (gcc only)." OFF) option(MATHJAX "Use MathJax for HTML Doxygen output (disabled by default)." OFF) option(FORCE_CXX11 "Don't check that the compiler supports C++11, just assume it. Make sure to specify any necessary flag to enable C++11 as part of CXXFLAGS." OFF) enable_testing() # Ensure that we have a C++11 compiler. In newer versions of CMake, this is # done with target_compile_features() when the mlpack library target is added in # src/mlpack/CMakeLists.txt. if ((${CMAKE_MAJOR_VERSION} LESS 3 OR (${CMAKE_MAJOR_VERSION} EQUAL 3 AND ${CMAKE_MINOR_VERSION} LESS 1)) AND NOT FORCE_CXX11) # Older versions of CMake do not support target_compile_features(), so we have # to use something kind of hacky. include(CMake/CXX11.cmake) check_for_cxx11_compiler(HAS_CXX11) if(NOT HAS_CXX11) message(FATAL_ERROR "No C++11 compiler available!") endif() enable_cxx11() endif () # Otherwise, we may have to set the C++11 mode after the mlpack target is # defined. # Include modules in the CMake directory. set(CMAKE_MODULE_PATH ${CMAKE_MODULE_PATH} "${CMAKE_SOURCE_DIR}/CMake") # This is as of yet unused. #option(PGO "Use profile-guided optimization if not a debug build" ON) # Set the CFLAGS and CXXFLAGS depending on the options the user specified. # Only GCC-like compilers support -Wextra, and other compilers give tons of # output for -Wall, so only -Wall and -Wextra on GCC. if(CMAKE_COMPILER_IS_GNUCC OR "${CMAKE_CXX_COMPILER_ID}" STREQUAL "Clang") set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wall -Wextra -ftemplate-depth=1000") set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -Wall -Wextra") endif() # These support libraries are used if we need to link against something # specific. This list is a subset of MLPACK_LIBRARIES. set(COMPILER_SUPPORT_LIBRARIES "") # If using clang, we have to link against libc++ depending on the # OS (at least on some systems). Further, gcc sometimes optimizes calls to # math.h functions, making -lm unnecessary with gcc, but it may still be # necessary with clang. if("${CMAKE_CXX_COMPILER_ID}" STREQUAL "Clang") if (APPLE) # detect OS X version. Use '/usr/bin/sw_vers -productVersion' to # extract V from '10.V.x'.) exec_program(/usr/bin/sw_vers ARGS -productVersion OUTPUT_VARIABLE MACOSX_VERSION_RAW) string(REGEX REPLACE "10\\.([0-9]+).*" "\\1" MACOSX_VERSION "${MACOSX_VERSION_RAW}") # OSX Lion (10.7) and OS X Mountain Lion (10.8) doesn't automatically # select the right stdlib. if(${MACOSX_VERSION} LESS 9) set(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -stdlib=libc++") set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} -stdlib=libc++") set(CMAKE_MODULE_LINKER_FLAGS "${CMAKE_MODULE_LINKER_FLAGS} -stdlib=libc++") set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -stdlib=libc++") endif() endif() # Link everything with -lm. set(COMPILER_SUPPORT_LIBRARIES ${COMPILER_SUPPORT_LIBRARIES} "m") set(MLPACK_LIBRARIES ${MLPACK_LIBRARIES} "m") endif() # Setup build for test coverage if(BUILD_WITH_COVERAGE) # Currently coverage only works with GNU g++ if ("${CMAKE_CXX_COMPILER_ID}" STREQUAL "GNU") # Find gcov and lcov find_program(GCOV gcov) find_program(LCOV lcov) if(NOT GCOV) message(FATAL_ERROR "gcov not found! gcov is required when BUILD_WITH_COVERAGE=ON.") endif() set(MLPACK_LIBRARIES ${MLPACK_LIBRARIES} "supc++") set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} --coverage -fno-inline -fno-inline-small-functions -fno-default-inline -fprofile-arcs -fkeep-inline-functions") message(STATUS "Adding debug compile options for code coverage.") # Remove optimizations for better line coverage set(DEBUG ON) if(LCOV) configure_file(CMake/mlpack_coverage.in mlpack_coverage @ONLY) add_custom_target(mlpack_coverage DEPENDS mlpack_test COMMAND ${PROJECT_BINARY_DIR}/mlpack_coverage) else() message(WARNING "'lcov' not found; local coverage report is disabled. " "Install 'lcov' and rerun cmake to generate local coverage report.") endif() else() message(FATAL_ERROR "BUILD_WITH_COVERAGE can only work with GNU environment.") endif() endif() # For clock_gettime(). if (UNIX AND NOT APPLE) set(MLPACK_LIBRARIES ${MLPACK_LIBRARIES} "rt") endif () # Debugging CFLAGS. Turn optimizations off; turn debugging symbols on. if(DEBUG) if (NOT MSVC) add_definitions(-DDEBUG) set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -g -O0 -ftemplate-backtrace-limit=0") set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -std=c99 -g -O0") else() set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} /bigobj") endif() # mlpack uses it's own mlpack::backtrace class based on Binary File Descriptor # and linux Dynamic Loader and more portable version in future if(CMAKE_SYSTEM_NAME STREQUAL "Linux") find_package(Bfd) find_package(LibDL) if(LIBBFD_FOUND AND LIBDL_FOUND) set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -rdynamic") set(MLPACK_INCLUDE_DIRS ${MLPACK_INCLUDE_DIRS} ${LIBBFD_INCLUDE_DIRS} ${LIBDL_INCLUDE_DIRS}) set(MLPACK_LIBRARIES ${MLPACK_LIBRARIES} ${LIBBFD_LIBRARIES} ${LIBDL_LIBRARIES}) add_definitions(-DHAS_BFD_DL) else() message(WARNING "No libBFD and/or libDL has been found!") endif() endif() else() add_definitions(-DARMA_NO_DEBUG) add_definitions(-DNDEBUG) set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -O3") set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -std=c99 -O3") endif() # Profiling CFLAGS. Turn profiling information on. if(CMAKE_COMPILER_IS_GNUCC AND PROFILE) set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -pg") set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -pg") set(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -pg") endif() # If the user asked for running test cases with verbose output, turn that on. if(TEST_VERBOSE) add_definitions(-DTEST_VERBOSE) endif() # If the user asked for extra Armadillo debugging output, turn that on. if(ARMA_EXTRA_DEBUG) add_definitions(-DARMA_EXTRA_DEBUG) endif() # Now, find the libraries we need to compile against. Several variables can be # set to manually specify the directory in which each of these libraries # resides. # ARMADILLO_LIBRARY - location of libarmadillo.so / armadillo.lib # ARMADILLO_INCLUDE_DIR - directory containing # ARMADILLO_INCLUDE_DIRS - directories necessary for Armadillo includes # BOOST_ROOT - root of Boost installation # BOOST_INCLUDEDIR - include directory for Boost # BOOST_LIBRARYDIR - library directory for Boost # MATHJAX_ROOT - root of MathJax installation find_package(Armadillo 4.200.0 REQUIRED) # If Armadillo was compiled without ARMA_64BIT_WORD and we are on a 64-bit # system (where size_t will be 64 bits), suggest to the user that they should # compile Armadillo with 64-bit words. Note that with Armadillo 5.000.0 and # newer, ARMA_64BIT_WORD is enabled by default. if(CMAKE_SIZEOF_VOID_P EQUAL 8) # Check the version, to see if ARMA_64BIT_WORD is enabled by default. set(ARMA_HAS_64BIT_WORD 0) if(NOT (${ARMADILLO_VERSION_MAJOR} LESS 5)) set(ARMA_HAS_64BIT_WORD 1) else() # Can we open the configuration file? If not, issue a warning. if(NOT EXISTS "${ARMADILLO_INCLUDE_DIR}/armadillo_bits/config.hpp") message(WARNING "Armadillo configuration file " "(${ARMADILLO_INCLUDE_DIR}/armadillo_bits/config.hpp) does not exist!") else() # We are on a 64-bit system. Does Armadillo have ARMA_64BIT_WORD enabled? file(READ "${ARMADILLO_INCLUDE_DIR}/armadillo_bits/config.hpp" ARMA_CONFIG) string(REGEX MATCH "[\r\n][ ]*#define ARMA_64BIT_WORD" ARMA_HAS_64BIT_WORD_PRE "${ARMA_CONFIG}") string(LENGTH "${ARMA_HAS_64BIT_WORD_PRE}" ARMA_HAS_64BIT_WORD) endif() endif() if(ARMA_HAS_64BIT_WORD EQUAL 0) message(WARNING "This is a 64-bit system, but Armadillo was compiled " "without 64-bit index support. Consider recompiling Armadillo with " "ARMA_64BIT_WORD to enable 64-bit indices (large matrix support). " "mlpack will still work without ARMA_64BIT_WORD defined, but will not " "scale to matrices with more than 4 billion elements.") endif() else() # If we are on a 32-bit system, we must manually specify the size of the word # to be 32 bits, since otherwise Armadillo will produce a warning that it is # disabling 64-bit support. if (CMAKE_SIZEOF_VOID_P EQUAL 4) add_definitions(-DARMA_32BIT_WORD) endif () endif() # On Windows, Armadillo should be using LAPACK and BLAS but we still need to # link against it. We don't want to use the FindLAPACK or FindBLAS modules # because then we are required to have a FORTRAN compiler (argh!) so we will try # and find LAPACK and BLAS ourselves, using a slightly modified variant of the # script Armadillo uses to find these. if (WIN32) find_library(LAPACK_LIBRARY NAMES lapack liblapack lapack_win32_MT lapack_win32 PATHS "C:/Program Files/Armadillo" PATH_SUFFIXES "examples/lib_win32/") if (NOT LAPACK_LIBRARY) message(FATAL_ERROR "Cannot find LAPACK library (.lib)!") endif () find_library(BLAS_LIBRARY NAMES blas libblas blas_win32_MT blas_win32 PATHS "C:/Program Files/Armadillo" PATH_SUFFIXES "examples/lib_win32/") if (NOT BLAS_LIBRARY) message(FATAL_ERROR "Cannot find BLAS library (.lib)!") endif () # Piggyback LAPACK and BLAS linking into Armadillo link. set(ARMADILLO_LIBRARIES ${ARMADILLO_LIBRARIES} ${BLAS_LIBRARY} ${LAPACK_LIBRARY}) endif () # Include directories for the previous dependencies. set(MLPACK_INCLUDE_DIRS ${MLPACK_INCLUDE_DIRS} ${ARMADILLO_INCLUDE_DIRS}) set(MLPACK_LIBRARIES ${MLPACK_LIBRARIES} ${ARMADILLO_LIBRARIES}) # Unfortunately this configuration variable is necessary and will need to be # updated as time goes on and new versions are released. set(Boost_ADDITIONAL_VERSIONS "1.49.0" "1.50.0" "1.51.0" "1.52.0" "1.53.0" "1.54.0" "1.55.0") find_package(Boost 1.49 COMPONENTS program_options unit_test_framework serialization REQUIRED ) link_directories(${Boost_LIBRARY_DIRS}) # In Visual Studio, automatic linking is performed, so we don't need to worry # about it. Clear the list of libraries to link against and let Visual Studio # handle it. if (MSVC) link_directories(${Boost_LIBRARY_DIRS}) set(Boost_LIBRARIES "") endif () set(MLPACK_INCLUDE_DIRS ${MLPACK_INCLUDE_DIRS} ${Boost_INCLUDE_DIRS}) set(MLPACK_LIBRARIES ${MLPACK_LIBRARIES} ${Boost_LIBRARIES}) set(MLPACK_LIBRARY_DIRS ${MLPACK_LIBRARY_DIRS} ${Boost_LIBRARY_DIRS}) # For Boost testing framework (will have no effect on non-testing executables). # This specifies to Boost that we are dynamically linking to the Boost test # library. add_definitions(-DBOOST_TEST_DYN_LINK) # Detect OpenMP support in a compiler. If the compiler supports OpenMP, flags # to compile with OpenMP are returned and added and the HAS_OPENMP definition is # added for compilation. # # This way we can skip calls to functions defined in omp.h with code like: # #ifdef HAS_OPENMP # { # ... openMP code here ... # } # #endif find_package(OpenMP) if (OPENMP_FOUND) add_definitions(-DHAS_OPENMP) set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} ${OpenMP_C_FLAGS}") set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${OpenMP_CXX_FLAGS}") else () # Disable warnings for all the unknown OpenMP pragmas. set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wno-unknown-pragmas") endif () # Create a 'distclean' target in case the user is using an in-source build for # some reason. include(CMake/TargetDistclean.cmake OPTIONAL) include_directories(${CMAKE_SOURCE_DIR}) include_directories(${MLPACK_INCLUDE_DIRS}) # On Windows, things end up under Debug/ or Release/. if (WIN32) set(CMAKE_LIBRARY_OUTPUT_DIRECTORY ${CMAKE_BINARY_DIR}) set(CMAKE_RUNTIME_OUTPUT_DIRECTORY ${CMAKE_BINARY_DIR}) set(CMAKE_ARCHIVE_OUTPUT_DIRECTORY ${CMAKE_BINARY_DIR}) else () # If not on Windows, put them under more standard UNIX-like places. This is # necessary, otherwise they would all end up in # ${CMAKE_BINARY_DIR}/src/mlpack/methods/... or somewhere else random like # that. set(CMAKE_LIBRARY_OUTPUT_DIRECTORY ${CMAKE_BINARY_DIR}/lib/) set(CMAKE_RUNTIME_OUTPUT_DIRECTORY ${CMAKE_BINARY_DIR}/bin/) set(CMAKE_ARCHIVE_OUTPUT_DIRECTORY ${CMAKE_BINARY_DIR}/lib/) endif () # Determine whether or not this is a git repository, so that we can set the # version number if necessary. find_package(Git) set (USING_GIT "NO") if (GIT_FOUND) # Run 'git rev-parse HEAD' to find out if this is a working copy. If the # return code is not 0, then it isn't. execute_process(COMMAND ${GIT_EXECUTABLE} rev-parse HEAD WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR} OUTPUT_VARIABLE MLPACK_TMP_REV_INFO ERROR_VARIABLE MLPACK_TMP_REV_INFO_ERROR RESULT_VARIABLE MLPACK_TMP_REV_INFO_RESULT OUTPUT_STRIP_TRAILING_WHITESPACE) if (${MLPACK_TMP_REV_INFO_RESULT} EQUAL 0) set (USING_GIT "YES") add_definitions(-DMLPACK_GIT_VERSION) include(CMake/CreateGitVersionHeader.cmake) add_custom_target(mlpack_gitversion ALL COMMAND ${CMAKE_COMMAND} -P CMake/CreateGitVersionHeader.cmake WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR} COMMENT "Updating gitversion.hpp (if necessary)") # Add gitversion.hpp to the list of sources. set(MLPACK_SRCS ${MLPACK_SRCS} "${CMAKE_CURRENT_SOURCE_DIR}/src/mlpack/core/util/gitversion.hpp") endif () endif () # Create a target to generate arma_config.hpp, which is used to warn the user # when they are doing something stupid when linking something against mlpack. include(CMake/CreateArmaConfigInfo.cmake) add_custom_target(mlpack_arma_config ALL COMMAND ${CMAKE_COMMAND} -D ARMADILLO_INCLUDE_DIR="${ARMADILLO_INCLUDE_DIR}" -D ARMADILLO_VERSION_MAJOR="${ARMADILLO_VERSION_MAJOR}" -P CMake/CreateArmaConfigInfo.cmake WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR} COMMENT "Updating arma_config.hpp (if necessary)") set(MLPACK_SRCS ${MLPACK_SRCS} "${CMAKE_CURRENT_SOURCE_DIR}/src/mlpack/core/util/arma_config.hpp") # Recurse into the rest of the project. add_subdirectory(src/mlpack) # If we need to keep gitversion.hpp up to date, then make sure the mlpack target # depends on it. if (USING_GIT STREQUAL "YES") add_dependencies(mlpack mlpack_gitversion) endif () # Make the mlpack_arma_config target depend on mlpack (we couldn't do this # before the add_subdirectory() call because the mlpack target didn't exist # before that). add_dependencies(mlpack mlpack_arma_config) # Make a target to generate the documentation. If Doxygen isn't installed, then # I guess this option will just be unavailable. find_package(Doxygen) if (DOXYGEN_FOUND) if (MATHJAX) find_package(MathJax) if (NOT MATHJAX_FOUND) message(STATUS "Using MathJax at the MathJax Content Delivery Network. " "Be careful, formulas will not be shown without the internet.") endif () endif () # Preprocess the Doxyfile. This is done before 'make doc'. add_custom_command(OUTPUT ${CMAKE_BINARY_DIR}/Doxyfile PRE_BUILD COMMAND ${CMAKE_COMMAND} -D DESTDIR="${CMAKE_BINARY_DIR}" -D MATHJAX="${MATHJAX}" -D MATHJAX_FOUND="${MATHJAX_FOUND}" -D MATHJAX_PATH="${MATHJAX_PATH}" -P "${CMAKE_CURRENT_SOURCE_DIR}/CMake/GenerateDoxyfile.cmake" WORKING_DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}" DEPENDS "${CMAKE_CURRENT_SOURCE_DIR}/Doxyfile" COMMENT "Creating Doxyfile to generate Doxygen documentation" ) # Generate documentation. add_custom_target(doc COMMAND "${DOXYGEN_EXECUTABLE}" "${CMAKE_BINARY_DIR}/Doxyfile" DEPENDS "${CMAKE_BINARY_DIR}/Doxyfile" WORKING_DIRECTORY "${CMAKE_BINARY_DIR}" COMMENT "Generating API documentation with Doxygen" ) install(DIRECTORY ${CMAKE_BINARY_DIR}/doc/html DESTINATION share/doc/mlpack COMPONENT doc OPTIONAL ) endif () # Make a target to generate the man page documentation, but only if we are on a # UNIX-like system. if (UNIX) find_program(TXT2MAN txt2man) # It's not a requirement that we make man pages. if (NOT TXT2MAN) message(WARNING "txt2man not found; man pages will not be generated.") else () # We have the tools. We can make them. add_custom_target(man ALL ${CMAKE_CURRENT_SOURCE_DIR}/CMake/allexec2man.sh ${CMAKE_CURRENT_SOURCE_DIR}/CMake/exec2man.sh ${CMAKE_BINARY_DIR}/share/man WORKING_DIRECTORY ${CMAKE_BINARY_DIR}/bin DEPENDS mlpack_adaboost mlpack_approx_kfn mlpack_kfn mlpack_knn mlpack_krann mlpack_cf mlpack_decision_stump mlpack_decision_tree mlpack_det mlpack_emst mlpack_fastmks mlpack_gmm_train mlpack_gmm_probability mlpack_gmm_generate mlpack_hmm_generate mlpack_hmm_loglik mlpack_hmm_train mlpack_hmm_viterbi mlpack_hoeffding_tree mlpack_kernel_pca mlpack_kmeans mlpack_lars mlpack_linear_regression mlpack_local_coordinate_coding mlpack_logistic_regression mlpack_lsh mlpack_mean_shift mlpack_nbc mlpack_nca mlpack_nmf mlpack_pca mlpack_perceptron mlpack_radical mlpack_range_search mlpack_softmax_regression mlpack_sparse_coding COMMENT "Generating man pages from built executables." ) # Set the rules to install the documentation. install(DIRECTORY ${CMAKE_BINARY_DIR}/share/man/ DESTINATION share/man/man1/) endif () endif () # Create the pkg-config file, if we have pkg-config. find_package(PkgConfig) if (PKG_CONFIG_FOUND) # mlpack.pc must be generated as a separate target, otherwise it is possible # that the given version could be out of date. We don't need to worry about # the library or include directories changing, because CMake will re-run this # portion of the code whenever any of those changes. But the version must be # re-extracted every time the library is built. # So, we have to parse our list of library directories, libraries, and include # directories in order to get the correct line to give to pkg-config. # Next, adapt the list of include directories. foreach (incldir ${MLPACK_INCLUDE_DIRS}) # Filter out some obviously unnecessary directories. if (NOT "${incldir}" STREQUAL "/usr/include") set(MLPACK_INCLUDE_DIRS_STRING "${MLPACK_INCLUDE_DIRS_STRING} -I${incldir}") endif () endforeach () # Add the install directory too. set(MLPACK_INCLUDE_DIRS_STRING "${MLPACK_INCLUDE_DIRS_STRING} -I${CMAKE_INSTALL_PREFIX}/include/") # Create the list of link directories. set(MLPACK_LIBRARIES_LIST) foreach (linkdir ${MLPACK_LIBRARY_DIRS}) list(APPEND MLPACK_LIBRARIES_LIST "-L${linkdir}") endforeach () foreach(lib ${MLPACK_LIBRARIES}) string(SUBSTRING "${lib}" 0 1 first) if ("${first}" STREQUAL "/") # We need to split the directory and the library. string(REGEX REPLACE "(.*/)[^/]*$" "\\1" library_dir "${lib}") string(REGEX REPLACE ".*/lib([^/]*).so.*$" "\\1" library_name "${lib}") list(APPEND MLPACK_LIBRARIES_LIST "-L${library_dir}") list(APPEND MLPACK_LIBRARIES_LIST "-l${library_name}") else () list(APPEND MLPACK_LIBRARIES_LIST "-l${lib}") endif () endforeach () # Don't forget to add mlpack as a dependency too. list(APPEND MLPACK_LIBRARIES_LIST "-L${CMAKE_INSTALL_PREFIX}/lib/") list(APPEND MLPACK_LIBRARIES_LIST "-lmlpack") # Filter duplicate dependencies and directories. list(REMOVE_DUPLICATES MLPACK_LIBRARIES_LIST) # Filter out known unnecessary directories. list(REMOVE_ITEM MLPACK_LIBRARIES_LIST "-L/usr/lib" "-L/usr/lib/" "-L/usr/lib/x86_64-linux-gnu" "-L/usr/lib/x86_64-linux-gnu/" "-L/usr/lib/i386-linux-gnu" "-L/usr/lib/i386-linux-gnu/") string(REPLACE ";" " " MLPACK_LIBRARIES_STRING "${MLPACK_LIBRARIES_LIST}") # Do first stage of configuration. set(MLPACK_VERSION_STRING "@MLPACK_VERSION_STRING@") configure_file( ${CMAKE_CURRENT_SOURCE_DIR}/CMake/mlpack.pc.in ${CMAKE_BINARY_DIR}/CMake/mlpack.pc.in.partial @ONLY) add_custom_target(pkgconfig ALL ${CMAKE_COMMAND} -P "${CMAKE_CURRENT_SOURCE_DIR}/CMake/GeneratePkgConfig.cmake" DEPENDS mlpack_headers COMMENT "Generating mlpack.pc (pkg-config) file.") # Do we need a different directory? install(FILES ${CMAKE_CURRENT_BINARY_DIR}/lib/pkgconfig/mlpack.pc DESTINATION lib/pkgconfig/) endif () mlpack-2.2.5/COPYRIGHT.txt000066400000000000000000000124261315013601400150770ustar00rootroot00000000000000Format: http://www.debian.org/doc/packaging-manuals/copyright-format/1.0/ Upstream-Name: mlpack Upstream-Contact: Ryan Curtin Source: http://www.mlpack.org/ git://github.com/mlpack/mlpack.git Files: * Copyright: Copyright 2008-2017, Ryan Curtin Copyright 2008-2013, Bill March Copyright 2008-2012, Dongryeol Lee Copyright 2008-2013, Nishant Mehta Copyright 2008-2013, Parikshit Ram Copyright 2010-2012, James Cline Copyright 2010-2013, Sterling Peet Copyright 2011-2012, Matthew Amidon Copyright 2011-2012, Neil Slagle Copyright 2011, Ajinkya Kale Copyright 2011, Vlad Grantcharov Copyright 2011, Noah Kauffman Copyright 2012, Rajendran Mohan Copyright 2012, Trironk Kiatkungwanglai Copyright 2012, Patrick Mason Copyright 2013-2017, Marcus Edel Copyright 2013, Mudit Raj Gupta Copyright 2013, Sumedh Ghaisas Copyright 2014, Michael Fox Copyright 2014, Ryan Birmingham Copyright 2014, Siddharth Agrawal Copyright 2014, Saheb Motiani Copyright 2014, Yash Vadalia Copyright 2014, Abhishek Laddha Copyright 2014, Vahab Akbarzadeh Copyright 2014, Andrew Wells Copyright 2014, Zhihao Lou Copyright 2014, Udit Saxena Copyright 2014-2015, Stephen Tu Copyright 2014-2015, Jaskaran Singh Copyright 2015, Shangtong Zhang Copyright 2015, Hritik Jain Copyright 2015, Vladimir Glazachev Copyright 2015, QiaoAn Chen Copyright 2015, Janzen Brewer Copyright 2015, Trung Dinh Copyright 2015-2017, Tham Ngap Wei Copyright 2015, Grzegorz Krajewski Copyright 2015, Joseph Mariadassou Copyright 2015, Pavel Zhigulin Copyright 2016, Andy Fang Copyright 2016, Barak Pearlmutter Copyright 2016, Ivari Horm Copyright 2016, Dhawal Arora Copyright 2016, Alexander Leinoff Copyright 2016, Palash Ahuja Copyright 2016, Yannis Mentekidis Copyright 2016, Ranjan Mondal Copyright 2016, Mikhail Lozhnikov Copyright 2016, Marcos Pividori Copyright 2016, Keon Kim Copyright 2016, Nilay Jain Copyright 2016, Peter Lehner Copyright 2016, Anuraj Kanodia Copyright 2016, Ivan Georgiev Copyright 2016, Shikhar Bhardwaj Copyright 2016, Yashu Seth Copyright 2016, Mike Izbicki Copyright 2017, Sudhanshu Ranjan Copyright 2017, Piyush Jaiswal Copyright 2017, Dinesh Raj Copyright 2017, Lakshya Agrawal Copyright 2017, Praveen Ch License: BSD-3-clause All rights reserved. . Redistribution and use of mlpack in source and binary forms, with or without modification, are permitted provided that the following conditions are met: . 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. . 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. . 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. . THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. mlpack-2.2.5/Doxyfile000066400000000000000000000227571315013601400145040ustar00rootroot00000000000000# Doxyfile 1.4.7 #--------------------------------------------------------------------------- # Project related configuration options #--------------------------------------------------------------------------- PROJECT_NAME = mlpack PROJECT_NUMBER = 2.2.5 OUTPUT_DIRECTORY = ./doc CREATE_SUBDIRS = NO OUTPUT_LANGUAGE = English USE_WINDOWS_ENCODING = NO BRIEF_MEMBER_DESC = YES REPEAT_BRIEF = YES ABBREVIATE_BRIEF = "The $name class" \ "The $name widget" \ "The $name file" \ is \ provides \ specifies \ contains \ represents \ a \ an \ the ALWAYS_DETAILED_SEC = YES INLINE_INHERITED_MEMB = NO FULL_PATH_NAMES = YES STRIP_FROM_PATH = ./ STRIP_FROM_INC_PATH = SHORT_NAMES = NO JAVADOC_AUTOBRIEF = YES MULTILINE_CPP_IS_BRIEF = NO DETAILS_AT_TOP = YES INHERIT_DOCS = YES SEPARATE_MEMBER_PAGES = NO TAB_SIZE = 2 ALIASES = OPTIMIZE_OUTPUT_FOR_C = NO OPTIMIZE_OUTPUT_JAVA = NO BUILTIN_STL_SUPPORT = NO DISTRIBUTE_GROUP_DOC = NO SUBGROUPING = YES #--------------------------------------------------------------------------- # Build related configuration options #--------------------------------------------------------------------------- EXTRACT_ALL = YES EXTRACT_PRIVATE = NO EXTRACT_STATIC = YES EXTRACT_LOCAL_CLASSES = NO EXTRACT_LOCAL_METHODS = NO HIDE_UNDOC_MEMBERS = NO HIDE_UNDOC_CLASSES = NO HIDE_FRIEND_COMPOUNDS = YES HIDE_IN_BODY_DOCS = NO INTERNAL_DOCS = YES CASE_SENSE_NAMES = YES HIDE_SCOPE_NAMES = YES SHOW_INCLUDE_FILES = NO INLINE_INFO = YES SORT_MEMBER_DOCS = YES SORT_BRIEF_DOCS = YES SORT_BY_SCOPE_NAME = YES SORT_MEMBERS_CTORS_1ST = YES GENERATE_TODOLIST = NO GENERATE_TESTLIST = NO GENERATE_BUGLIST = YES GENERATE_DEPRECATEDLIST= NO ENABLED_SECTIONS = MAX_INITIALIZER_LINES = 30 SHOW_USED_FILES = YES SHOW_DIRECTORIES = YES FILE_VERSION_FILTER = #--------------------------------------------------------------------------- # configuration options related to warning and progress messages #--------------------------------------------------------------------------- QUIET = NO WARNINGS = YES WARN_IF_UNDOCUMENTED = YES WARN_IF_DOC_ERROR = YES WARN_NO_PARAMDOC = YES WARN_FORMAT = "$file:$line: $text" WARN_LOGFILE = #--------------------------------------------------------------------------- # configuration options related to the input files #--------------------------------------------------------------------------- INPUT = ./src/mlpack \ ./doc/guide \ ./doc/tutorials \ ./doc/policies FILE_PATTERNS = *.hpp \ *.cpp \ *.txt RECURSIVE = YES EXCLUDE = EXCLUDE_SYMLINKS = YES EXCLUDE_PATTERNS = */build/* \ */test/* \ */arma_extend/* \ */boost_backport/* \ */.svn/* \ *_impl.cc \ *_impl.h \ *_impl.hpp \ *.cpp \ *.cc \ *_test.cc EXAMPLE_PATH = EXAMPLE_PATTERNS = * EXAMPLE_RECURSIVE = NO IMAGE_PATH = INPUT_FILTER = FILTER_PATTERNS = FILTER_SOURCE_FILES = NO #--------------------------------------------------------------------------- # configuration options related to source browsing #--------------------------------------------------------------------------- SOURCE_BROWSER = YES INLINE_SOURCES = NO STRIP_CODE_COMMENTS = YES REFERENCED_BY_RELATION = YES REFERENCES_RELATION = YES REFERENCES_LINK_SOURCE = YES USE_HTAGS = NO VERBATIM_HEADERS = YES #--------------------------------------------------------------------------- # configuration options related to the alphabetical class index #--------------------------------------------------------------------------- ALPHABETICAL_INDEX = YES COLS_IN_ALPHA_INDEX = 1 IGNORE_PREFIX = #--------------------------------------------------------------------------- # configuration options related to the HTML output #--------------------------------------------------------------------------- GENERATE_HTML = YES HTML_OUTPUT = html HTML_FILE_EXTENSION = .html HTML_HEADER = HTML_FOOTER = ./doc/doxygen/footer.html HTML_STYLESHEET = HTML_EXTRA_STYLESHEET = ./doc/doxygen/extra-stylesheet.css HTML_ALIGN_MEMBERS = YES GENERATE_HTMLHELP = NO CHM_FILE = HHC_LOCATION = GENERATE_CHI = NO BINARY_TOC = NO TOC_EXPAND = NO DISABLE_INDEX = NO ENUM_VALUES_PER_LINE = 1 GENERATE_TREEVIEW = NO TREEVIEW_WIDTH = 250 USE_MATHJAX = NO MATHJAX_FORMAT = SVG MATHJAX_EXTENSIONS = TeX/AMSmath TeX/AMSsymbols #--------------------------------------------------------------------------- # configuration options related to the LaTeX output #--------------------------------------------------------------------------- GENERATE_LATEX = YES LATEX_OUTPUT = latex LATEX_CMD_NAME = latex MAKEINDEX_CMD_NAME = makeindex COMPACT_LATEX = NO PAPER_TYPE = letter EXTRA_PACKAGES = amsmath amssymb mathrsfs LATEX_HEADER = PDF_HYPERLINKS = NO USE_PDFLATEX = NO LATEX_BATCHMODE = NO LATEX_HIDE_INDICES = NO FORMULA_FONTSIZE = 50 #--------------------------------------------------------------------------- # configuration options related to the RTF output #--------------------------------------------------------------------------- GENERATE_RTF = NO RTF_OUTPUT = rtf COMPACT_RTF = NO RTF_HYPERLINKS = NO RTF_STYLESHEET_FILE = RTF_EXTENSIONS_FILE = #--------------------------------------------------------------------------- # configuration options related to the man page output #--------------------------------------------------------------------------- GENERATE_MAN = YES MAN_OUTPUT = man MAN_EXTENSION = .3 MAN_LINKS = NO #--------------------------------------------------------------------------- # configuration options related to the XML output #--------------------------------------------------------------------------- GENERATE_XML = NO XML_OUTPUT = xml XML_SCHEMA = XML_DTD = XML_PROGRAMLISTING = YES #--------------------------------------------------------------------------- # configuration options for the AutoGen Definitions output #--------------------------------------------------------------------------- GENERATE_AUTOGEN_DEF = NO #--------------------------------------------------------------------------- # configuration options related to the Perl module output #--------------------------------------------------------------------------- GENERATE_PERLMOD = NO PERLMOD_LATEX = NO PERLMOD_PRETTY = YES PERLMOD_MAKEVAR_PREFIX = #--------------------------------------------------------------------------- # Configuration options related to the preprocessor #--------------------------------------------------------------------------- ENABLE_PREPROCESSING = YES MACRO_EXPANSION = YES EXPAND_ONLY_PREDEF = NO SEARCH_INCLUDES = YES INCLUDE_PATH = INCLUDE_FILE_PATTERNS = PREDEFINED = EXPAND_AS_DEFINED = SKIP_FUNCTION_MACROS = YES #--------------------------------------------------------------------------- # Configuration::additions related to external references #--------------------------------------------------------------------------- TAGFILES = GENERATE_TAGFILE = ALLEXTERNALS = NO EXTERNAL_GROUPS = YES PERL_PATH = /usr/bin/perl #--------------------------------------------------------------------------- # Configuration options related to the dot tool #--------------------------------------------------------------------------- CLASS_DIAGRAMS = YES HIDE_UNDOC_RELATIONS = YES HAVE_DOT = YES CLASS_GRAPH = YES COLLABORATION_GRAPH = NO GROUP_GRAPHS = YES UML_LOOK = NO TEMPLATE_RELATIONS = YES INCLUDE_GRAPH = YES INCLUDED_BY_GRAPH = YES CALL_GRAPH = NO CALLER_GRAPH = NO GRAPHICAL_HIERARCHY = YES DIRECTORY_GRAPH = YES DOT_IMAGE_FORMAT = png # Hack dark color support in through the dot path. Kind of cheating... DOT_PATH = dot -Gbgcolor=black DOTFILE_DIRS = MAX_DOT_GRAPH_WIDTH = 800 MAX_DOT_GRAPH_HEIGHT = 600 MAX_DOT_GRAPH_DEPTH = 1000 DOT_TRANSPARENT = NO DOT_MULTI_TARGETS = NO GENERATE_LEGEND = YES DOT_CLEANUP = YES #--------------------------------------------------------------------------- # Configuration::additions related to the search engine #--------------------------------------------------------------------------- SEARCHENGINE = YES mlpack-2.2.5/HISTORY.md000066400000000000000000000361531315013601400144540ustar00rootroot00000000000000### mlpack 2.2.5 ###### 2017-08-25 * Compilation fix for some systems (#1082). * Fix PARAM_INT_OUT() (#1100). ### mlpack 2.2.4 ###### 2017-07-18 * Speed and memory improvements for DBSCAN. --single_mode can now be used for situations where previously RAM usage was too high. * Fix bug in CF causing incorrect recommendations. ### mlpack 2.2.3 ###### 2017-05-24 * Bug fix for --predictions_file in mlpack_decision_tree program. ### mlpack 2.2.2 ###### 2017-05-04 * Install backwards-compatibility mlpack_allknn and mlpack_allkfn programs; note they are deprecated and will be removed in mlpack 3.0.0 (#992). * Fix RStarTree bug that surfaced on OS X only (#964). * Small fixes for MiniBatchSGD and SGD and tests. ### mlpack 2.2.1 ###### 2017-04-13 * Compilation fix for mlpack_nca and mlpack_test on older Armadillo versions (#984). ### mlpack 2.2.0 ###### 2017-03-21 * Bugfix for mlpack_knn program (#816). * Add decision tree implementation in methods/decision_tree/. This is very similar to a C4.5 tree learner. * Add DBSCAN implementation in methods/dbscan/. * Add support for multidimensional discrete distributions (#810, #830). * Better output for Log::Debug/Log::Info/Log::Warn/Log::Fatal for Armadillo objects (#895, #928). * Refactor categorical CSV loading with boost::spirit for faster loading (#681). ### mlpack 2.1.1 ###### 2016-12-22 * HMMs now use random initialization; this should fix some convergence issues (#828). * HMMs now initialize emissions according to the distribution of observations (#833). * Minor fix for formatted output (#814). * Fix DecisionStump to properly work with any input type. ### mlpack 2.1.0 ###### 2016-10-31 * Fixed CoverTree to properly handle single-point datasets. * Fixed a bug in CosineTree (and thus QUIC-SVD) that caused split failures for some datasets (#717). * Added mlpack_preprocess_describe program, which can be used to print statistics on a given dataset (#742). * Fix prioritized recursion for k-furthest-neighbor search (mlpack_kfn and the KFN class), leading to orders-of-magnitude speedups in some cases. * Bump minimum required version of Armadillo to 4.200.0. * Added simple Gradient Descent optimizer, found in src/mlpack/core/optimizers/gradient_descent/ (#792). * Added approximate furthest neighbor search algorithms QDAFN and DrusillaSelect in src/mlpack/methods/approx_kfn/, with command-line program mlpack_approx_kfn. ### mlpack 2.0.3 ###### 2016-07-21 * Added multiprobe LSH (#691). The parameter 'T' to LSHSearch::Search() can now be used to control the number of extra bins that are probed, as can the -T (--num_probes) option to mlpack_lsh. * Added the Hilbert R tree to src/mlpack/core/tree/rectangle_tree/ (#664). It can be used as the typedef HilbertRTree, and it is now an option in the mlpack_knn, mlpack_kfn, mlpack_range_search, and mlpack_krann command-line programs. * Added the mlpack_preprocess_split and mlpack_preprocess_binarize programs, which can be used for preprocessing code (#650, #666). * Added OpenMP support to LSHSearch and mlpack_lsh (#700). ### mlpack 2.0.2 ###### 2016-06-20 * Added the function LSHSearch::Projections(), which returns an arma::cube with each projection table in a slice (#663). Instead of Projection(i), you should now use Projections().slice(i). * A new constructor has been added to LSHSearch that creates objects using projection tables provided in an arma::cube (#663). * Handle zero-variance dimensions in DET (#515). * Add MiniBatchSGD optimizer (src/mlpack/core/optimizers/minibatch_sgd/) and allow its use in mlpack_logistic_regression and mlpack_nca programs. * Add better backtrace support from Grzegorz Krajewski for Log::Fatal messages when compiled with debugging and profiling symbols. This requires libbfd and libdl to be present during compilation. * CosineTree test fix from Mikhail Lozhnikov (#358). * Fixed HMM initial state estimation (#600). * Changed versioning macros __MLPACK_VERSION_MAJOR, __MLPACK_VERSION_MINOR, and __MLPACK_VERSION_PATCH to MLPACK_VERSION_MAJOR, MLPACK_VERSION_MINOR, and MLPACK_VERSION_PATCH. The old names will remain in place until mlpack 3.0.0. * Renamed mlpack_allknn, mlpack_allkfn, and mlpack_allkrann to mlpack_knn, mlpack_kfn, and mlpack_krann. The mlpack_allknn, mlpack_allkfn, and mlpack_allkrann programs will remain as copies until mlpack 3.0.0. * Add --random_initialization option to mlpack_hmm_train, for use when no labels are provided. * Add --kill_empty_clusters option to mlpack_kmeans and KillEmptyClusters policy for the KMeans class (#595, #596). ### mlpack 2.0.1 ###### 2016-02-04 * Fix CMake to properly detect when MKL is being used with Armadillo. * Minor parameter handling fixes to mlpack_logistic_regression (#504, #505). * Properly install arma_config.hpp. * Memory handling fixes for Hoeffding tree code. * Add functions that allow changing training-time parameters to HoeffdingTree class. * Fix infinite loop in sparse coding test. * Documentation spelling fixes (#501). * Properly handle covariances for Gaussians with large condition number (#496), preventing GMMs from filling with NaNs during training (and also HMMs that use GMMs). * CMake fixes for finding LAPACK and BLAS as Armadillo dependencies when ATLAS is used. * CMake fix for projects using mlpack's CMake configuration from elsewhere (#512). ### mlpack 2.0.0 ###### 2015-12-24 * Removed overclustering support from k-means because it is not well-tested, may be buggy, and is (I think) unused. If this was support you were using, open a bug or get in touch with us; it would not be hard for us to reimplement it. * Refactored KMeans to allow different types of Lloyd iterations. * Added implementations of k-means: Elkan's algorithm, Hamerly's algorithm, Pelleg-Moore's algorithm, and the DTNN (dual-tree nearest neighbor) algorithm. * Significant acceleration of LRSDP via the use of accu(a % b) instead of trace(a * b). * Added MatrixCompletion class (matrix_completion), which performs nuclear norm minimization to fill unknown values of an input matrix. * No more dependence on Boost.Random; now we use C++11 STL random support. * Add softmax regression, contributed by Siddharth Agrawal and QiaoAn Chen. * Changed NeighborSearch, RangeSearch, FastMKS, LSH, and RASearch API; these classes now take the query sets in the Search() method, instead of in the constructor. * Use OpenMP, if available. For now OpenMP support is only available in the DET training code. * Add support for predicting new test point values to LARS and the command-line 'lars' program. * Add serialization support for Perceptron and LogisticRegression. * Refactor SoftmaxRegression to predict into an arma::Row object, and add a softmax_regression program. * Refactor LSH to allow loading and saving of models. * ToString() is removed entirely (#487). * Add --input_model_file and --output_model_file options to appropriate machine learning algorithms. * Rename all executables to start with an "mlpack" prefix (#229). * Add HoeffdingTree and mlpack_hoeffding_tree, an implementation of the streaming decision tree methodology from Domingos and Hulten in 2000. ### mlpack 1.0.12 ###### 2015-01-07 * Switch to 3-clause BSD license (from LGPL). ### mlpack 1.0.11 ###### 2014-12-11 * Proper handling of dimension calculation in PCA. * Load parameter vectors properly for LinearRegression models. * Linker fixes for AugLagrangian specializations under Visual Studio. * Add support for observation weights to LinearRegression. * MahalanobisDistance<> now takes root of the distance by default and therefore satisfies the triangle inequality (TakeRoot now defaults to true). * Better handling of optional Armadillo HDF5 dependency. * Fixes for numerous intermittent test failures. * math::RandomSeed() now sets the random seed for recent (>=3.930) Armadillo versions. * Handle Newton method convergence better for SparseCoding::OptimizeDictionary() and make maximum iterations a parameter. * Known bug: CosineTree construction may fail in some cases on i386 systems (#358). ### mlpack 1.0.10 ###### 2014-08-29 * Bugfix for NeighborSearch regression which caused very slow allknn/allkfn. Speeds are now restored to approximately 1.0.8 speeds, with significant improvement for the cover tree (#347). * Detect dependencies correctly when ARMA_USE_WRAPPER is not being defined (i.e., libarmadillo.so does not exist). * Bugfix for compilation under Visual Studio (#348). ### mlpack 1.0.9 ###### 2014-07-28 * GMM initialization is now safer and provides a working GMM when constructed with only the dimensionality and number of Gaussians (#301). * Check for division by 0 in Forward-Backward Algorithm in HMMs (#301). * Fix MaxVarianceNewCluster (used when re-initializing clusters for k-means) (#301). * Fixed implementation of Viterbi algorithm in HMM::Predict() (#303). * Significant speedups for dual-tree algorithms using the cover tree (#235, #314) including a faster implementation of FastMKS. * Fix for LRSDP optimizer so that it compiles and can be used (#312). * CF (collaborative filtering) now expects users and items to be zero-indexed, not one-indexed (#311). * CF::GetRecommendations() API change: now requires the number of recommendations as the first parameter. The number of users in the local neighborhood should be specified with CF::NumUsersForSimilarity(). * Removed incorrect PeriodicHRectBound (#58). * Refactor LRSDP into LRSDP class and standalone function to be optimized (#305). * Fix for centering in kernel PCA (#337). * Added simulated annealing (SA) optimizer, contributed by Zhihao Lou. * HMMs now support initial state probabilities; these can be set in the constructor, trained, or set manually with HMM::Initial() (#302). * Added Nyström method for kernel matrix approximation by Marcus Edel. * Kernel PCA now supports using Nyström method for approximation. * Ball trees now work with dual-tree algorithms, via the BallBound<> bound structure (#307); fixed by Yash Vadalia. * The NMF class is now AMF<>, and supports far more types of factorizations, by Sumedh Ghaisas. * A QUIC-SVD implementation has returned, written by Siddharth Agrawal and based on older code from Mudit Gupta. * Added perceptron and decision stump by Udit Saxena (these are weak learners for an eventual AdaBoost class). * Sparse autoencoder added by Siddharth Agrawal. ### mlpack 1.0.8 ###### 2014-01-06 * Memory leak in NeighborSearch index-mapping code fixed (#298). * GMMs can be trained using the existing model as a starting point by specifying an additional boolean parameter to GMM::Estimate() (#296). * Logistic regression implementation added in methods/logistic_regression (see also #293). * L-BFGS optimizer now returns its function via Function(). * Version information is now obtainable via mlpack::util::GetVersion() or the __MLPACK_VERSION_MAJOR, __MLPACK_VERSION_MINOR, and __MLPACK_VERSION_PATCH macros (#297). * Fix typos in allkfn and allkrann output. ### mlpack 1.0.7 ###### 2013-10-04 * Cover tree support for range search (range_search), rank-approximate nearest neighbors (allkrann), minimum spanning tree calculation (emst), and FastMKS (fastmks). * Dual-tree FastMKS implementation added and tested. * Added collaborative filtering package (cf) that can provide recommendations when given users and items. * Fix for correctness of Kernel PCA (kernel_pca) (#270). * Speedups for PCA and Kernel PCA (#198). * Fix for correctness of Neighborhood Components Analysis (NCA) (#279). * Minor speedups for dual-tree algorithms. * Fix for Naive Bayes Classifier (nbc) (#269). * Added a ridge regression option to LinearRegression (linear_regression) (#286). * Gaussian Mixture Models (gmm::GMM<>) now support arbitrary covariance matrix constraints (#283). * MVU (mvu) removed because it is known to not work (#183). * Minor updates and fixes for kernels (in mlpack::kernel). ### mlpack 1.0.6 ###### 2013-06-13 * Minor bugfix so that FastMKS gets built. ### mlpack 1.0.5 ###### 2013-05-01 * Speedups of cover tree traversers (#235). * Addition of rank-approximate nearest neighbors (RANN), found in src/mlpack/methods/rann/. * Addition of fast exact max-kernel search (FastMKS), found in src/mlpack/methods/fastmks/. * Fix for EM covariance estimation; this should improve GMM training time. * More parameters for GMM estimation. * Force GMM and GaussianDistribution covariance matrices to be positive definite, so that training converges much more often. * Add parameter for the tolerance of the Baum-Welch algorithm for HMM training. * Fix for compilation with clang compiler. * Fix for k-furthest-neighbor-search. ### mlpack 1.0.4 ###### 2013-02-08 * Force minimum Armadillo version to 2.4.2. * Better output of class types to streams; a class with a ToString() method implemented can be sent to a stream with operator<<. * Change return type of GMM::Estimate() to double (#257). * Style fixes for k-means and RADICAL. * Handle size_t support correctly with Armadillo 3.6.2 (#258). * Add locality-sensitive hashing (LSH), found in src/mlpack/methods/lsh/. * Better tests for SGD (stochastic gradient descent) and NCA (neighborhood components analysis). ### mlpack 1.0.3 ###### 2012-09-16 * Remove internal sparse matrix support because Armadillo 3.4.0 now includes it. When using Armadillo versions older than 3.4.0, sparse matrix support is not available. * NCA (neighborhood components analysis) now support an arbitrary optimizer (#245), including stochastic gradient descent (#249). ### mlpack 1.0.2 ###### 2012-08-15 * Added density estimation trees, found in src/mlpack/methods/det/. * Added non-negative matrix factorization, found in src/mlpack/methods/nmf/. * Added experimental cover tree implementation, found in src/mlpack/core/tree/cover_tree/ (#157). * Better reporting of boost::program_options errors (#225). * Fix for timers on Windows (#212, #211). * Fix for allknn and allkfn output (#204). * Sparse coding dictionary initialization is now a template parameter (#220). ### mlpack 1.0.1 ###### 2012-03-03 * Added kernel principal components analysis (kernel PCA), found in src/mlpack/methods/kernel_pca/ (#74). * Fix for Lovasz-Theta AugLagrangian tests (#182). * Fixes for allknn output (#185, #186). * Added range search executable (#192). * Adapted citations in documentation to BiBTeX; no citations in -h output (#195). * Stop use of 'const char*' and prefer 'std::string' (#176). * Support seeds for random numbers (#177). ### mlpack 1.0.0 ###### 2011-12-17 * Initial release. See any resolved tickets numbered less than #196 or execute this query: http://www.mlpack.org/trac/query?status=closed&milestone=mlpack+1.0.0 mlpack-2.2.5/LICENSE.txt000066400000000000000000000042761315013601400146150ustar00rootroot00000000000000mlpack is provided without any warranty of fitness for any purpose. You can redistribute the library and/or modify it under the terms of the 3-clause BSD license. The text of the 3-clause BSD license is contained below. mlpack contains some reproductions of the source code of Armadillo, which is licensed under the Mozilla Public License v2.0 (MPL2). This code is found in src/mlpack/core/arma_extend/ and more details on the licensing are available there. mlpack also contains some reproductions of the source code of Boost, which is licensed under the Boost Software License, version 1.0. This code is found in src/mlpack/core/boost_backport/ and more details on the licensing are available there. ---- Copyright (c) 2007-2016, mlpack contributors (see COPYRIGHT.txt) All rights reserved. Redistribution and use of mlpack in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. mlpack-2.2.5/README.md000066400000000000000000000216341315013601400142460ustar00rootroot00000000000000**mlpack** is an intuitive, fast, scalable C++ machine learning library, meant to be a machine learning analog to LAPACK. It aims to implement a wide array of machine learning methods and functions as a "swiss army knife" for machine learning researchers. 0. Contents ----------- 1. [Introduction](#1-introduction) 2. [Citation details](#2-citation-details) 3. [Dependencies](#3-dependencies) 4. [Building mlpack from source](#4-building-mlpack-from-source) 5. [Running mlpack programs](#5-running-mlpack-programs) 6. [Further documentation](#6-further-documentation) 7. [Bug reporting](#7-bug-reporting) 1. Introduction --------------- The mlpack website can be found at http://www.mlpack.org and contains numerous tutorials and extensive documentation. This README serves as a guide for what mlpack is, how to install it, how to run it, and where to find more documentation. The website should be consulted for further information: - [mlpack homepage](http://www.mlpack.org/) - [Tutorials](http://www.mlpack.org/docs/mlpack-git/doxygen.php?doc=tutorials.html) - [Development Site (Github)](http://www.github.com/mlpack/mlpack/) - [API documentation](http://www.mlpack.org/docs/mlpack-git/doxygen.php) 2. Citation details ------------------- If you use mlpack in your research or software, please cite mlpack using the citation below (given in BiBTeX format): @article{mlpack2013, title = {{mlpack}: A Scalable {C++} Machine Learning Library}, author = {Curtin, Ryan R. and Cline, James R. and Slagle, Neil P. and March, William B. and Ram, P. and Mehta, Nishant A. and Gray, Alexander G.}, journal = {Journal of Machine Learning Research}, volume = {14}, pages = {801--805}, year = {2013} } Citations are beneficial for the growth and improvement of mlpack. 3. Dependencies --------------- mlpack has the following dependencies: Armadillo >= 4.200.0 Boost (program_options, math_c99, unit_test_framework, serialization, spirit) CMake >= 2.8.5 All of those should be available in your distribution's package manager. If not, you will have to compile each of them by hand. See the documentation for each of those packages for more information. If you are compiling Armadillo by hand, ensure that LAPACK and BLAS are enabled. 4. Building mlpack from source ------------------------------ This section discusses how to build mlpack from source. However, mlpack is in the repositories of many Linux distributions and so it may be easier to use the package manager for your system. For example, on Ubuntu, you can install mlpack with the following command: $ sudo apt-get install libmlpack-dev There are some other useful pages to consult in addition to this section: - [Building mlpack From Source](http://www.mlpack.org/docs/mlpack-git/doxygen.php?doc=build.html) - [Building mlpack Under Windows](https://github.com/mlpack/mlpack/wiki/WindowsBuild) mlpack uses CMake as a build system and allows several flexible build configuration options. One can consult any of numerous CMake tutorials for further documentation, but this tutorial should be enough to get mlpack built and installed. First, unpack the mlpack source and change into the unpacked directory. Here we use mlpack-x.y.z where x.y.z is the version. $ tar -xzf mlpack-x.y.z.tar.gz $ cd mlpack-x.y.z Then, make a build directory. The directory can have any name, not just 'build', but 'build' is sufficient. $ mkdir build $ cd build The next step is to run CMake to configure the project. Running CMake is the equivalent to running `./configure` with autotools. If you run CMake with no options, it will configure the project to build with no debugging symbols and no profiling information: $ cmake ../ You can specify options to compile with debugging information and profiling information: $ cmake -D DEBUG=ON -D PROFILE=ON ../ Options are specified with the -D flag. A list of options allowed: DEBUG=(ON/OFF): compile with debugging symbols PROFILE=(ON/OFF): compile with profiling symbols ARMA_EXTRA_DEBUG=(ON/OFF): compile with extra Armadillo debugging symbols BOOST_ROOT=(/path/to/boost/): path to root of boost installation ARMADILLO_INCLUDE_DIR=(/path/to/armadillo/include/): path to Armadillo headers ARMADILLO_LIBRARY=(/path/to/armadillo/libarmadillo.so): Armadillo library Other tools can also be used to configure CMake, but those are not documented here. Once CMake is configured, building the library is as simple as typing 'make'. This will build all library components as well as 'mlpack_test'. $ make You can specify individual components which you want to build, if you do not want to build everything in the library: $ make mlpack_pca mlpack_knn mlpack_kfn If the build fails and you cannot figure out why, register an account on Github and submit an issue; the mlpack developers will quickly help you figure it out: [mlpack on Github](https://www.github.com/mlpack/mlpack/) Alternately, mlpack help can be found in IRC at `#mlpack` on irc.freenode.net. If you wish to install mlpack to `/usr/local/include/mlpack/` and `/usr/local/lib/` and `/usr/local/bin/`, once it has built, make sure you have root privileges (or write permissions to those three directories), and simply type $ make install You can now run the executables by name; you can link against mlpack with `-lmlpack` and the mlpack headers are found in `/usr/local/include/mlpack/`. If running the programs (i.e. `$ mlpack_knn -h`) gives an error of the form error while loading shared libraries: libmlpack.so.2: cannot open shared object file: No such file or directory then be sure that the runtime linker is searching the directory where `libmlpack.so` was installed (probably `/usr/local/lib/` unless you set it manually). One way to do this, on Linux, is to ensure that the `LD_LIBRARY_PATH` environment variable has the directory that contains `libmlpack.so`. Using bash, this can be set easily: export LD_LIBRARY_PATH="/usr/local/lib/:$LD_LIBRARY_PATH" (or whatever directory `libmlpack.so` is installed in.) 5. Running mlpack programs -------------------------- After building mlpack, the executables will reside in `build/bin/`. You can call them from there, or you can install the library and (depending on system settings) they should be added to your PATH and you can call them directly. The documentation below assumes the executables are in your PATH. Consider the 'mlpack_knn' program, which finds the k nearest neighbors in a reference dataset of all the points in a query set. That is, we have a query and a reference dataset. For each point in the query dataset, we wish to know the k points in the reference dataset which are closest to the given query point. Alternately, if the query and reference datasets are the same, the problem can be stated more simply: for each point in the dataset, we wish to know the k nearest points to that point. Each mlpack program has extensive help documentation which details what the method does, what each of the parameters are, and how to use them: $ mlpack_knn --help Running `mlpack_knn` on one dataset (that is, the query and reference datasets are the same) and finding the 5 nearest neighbors is very simple: $ mlpack_knn -r dataset.csv -n neighbors_out.csv -d distances_out.csv -k 5 -v The `-v (--verbose)` flag is optional; it gives informational output. It is not unique to `mlpack_knn` but is available in all mlpack programs. Verbose output also gives timing output at the end of the program, which can be very useful. 6. Further documentation ------------------------ The documentation given here is only a fraction of the available documentation for mlpack. If doxygen is installed, you can type `make doc` to build the documentation locally. Alternately, up-to-date documentation is available for older versions of mlpack: - [mlpack homepage](http://www.mlpack.org/) - [Tutorials](http://www.mlpack.org/docs/mlpack-git/doxygen.php?doc=tutorials.html) - [Development Site (Github)](https://www.github.com/mlpack/mlpack/) - [API documentation](http://www.mlpack.org/docs/mlpack-git/doxygen.php) 7. Bug reporting ---------------- (see also [mlpack help](http://www.mlpack.org/help.html)) If you find a bug in mlpack or have any problems, numerous routes are available for help. Github is used for bug tracking, and can be found at https://github.com/mlpack/mlpack/. It is easy to register an account and file a bug there, and the mlpack development team will try to quickly resolve your issue. In addition, mailing lists are available. The mlpack discussion list is available at [mlpack discussion list](https://lists.cc.gatech.edu/mailman/listinfo/mlpack) and the git commit list is available at [commit list](https://lists.cc.gatech.edu/mailman/listinfo/mlpack-git) Lastly, the IRC channel ```#mlpack``` on Freenode can be used to get help. mlpack-2.2.5/UPDATING.txt000066400000000000000000000030511315013601400147340ustar00rootroot00000000000000mlpack uses semantic versioning for its versioning conventions (http://semver.org). Because of the complexity and huge API of mlpack, it is worth elaborating on precisely when and how backwards compatibility will be broken. This will, of course, happen, as mlpack developers settle on increasingly effective abstractions for machine learning algorithms. * The command-line programs, bindings, and top-level classes for each machine learning algorithm, as well as the code in core/, are considered the "public API". So, for instance, the mlpack_linear_regression program, LinearRegression<>, and any bindings for LinearRegression<> are considered the "public API"; additionally, core utilities like data::Load() and data::Save() are considered "public". * Support classes for machine learning algorithms are considered the "private API". An example might be the mlpack::kmeans::MaxVarianceNewCluster class. This is a support class for mlpack::kmeans::KMeans<> and generally isn't used by end users. Thus, with this relatively simple definition of "public API" and "private API", we can provide a simple versioning scheme based completely on the semantic versioning guidelines: ---- Given a version number MAJOR.MINOR.PATCH, increment the: MAJOR version when you make incompatible public API changes, MINOR version when you add public API functionality in a backwards-compatible manner or make incompatible private API changes, and PATCH version when you make backwards-compatible bug fixes or documentation updates. ---- mlpack-2.2.5/doc/000077500000000000000000000000001315013601400135265ustar00rootroot00000000000000mlpack-2.2.5/doc/doxygen/000077500000000000000000000000001315013601400152035ustar00rootroot00000000000000mlpack-2.2.5/doc/doxygen/extra-stylesheet.css000066400000000000000000000002241315013601400212250ustar00rootroot00000000000000/* Additional CSS styles for the html output */ /* Fix the size of inline formulas */ img.formulaInl { vertical-align: middle; height: 15pt; } mlpack-2.2.5/doc/doxygen/footer.html000066400000000000000000000006531315013601400173730ustar00rootroot00000000000000 mlpack-2.2.5/doc/doxygen/stylesheet.css000066400000000000000000000307451315013601400201170ustar00rootroot00000000000000/* The standard CSS for doxygen */ body, table, div, p, dl { font-family: Lucida Grande, Verdana, Geneva, Arial, sans-serif; font-size: 12px; } /* @group Heading Levels */ h1 { font-size: 150%; color: #ffffff; } .title { font-size: 150%; font-weight: bold; margin: 10px 2px; color: #ffffff; } h2 { font-size: 120%; color: #ffffff; } h3 { font-size: 100%; color: #ffffff; } dt { font-weight: bold; } div.multicol { -moz-column-gap: 1em; -webkit-column-gap: 1em; -moz-column-count: 3; -webkit-column-count: 3; } p.startli, p.startdd, p.starttd { margin-top: 2px; } p.endli { margin-bottom: 0px; } p.enddd { margin-bottom: 4px; } p.endtd { margin-bottom: 2px; } /* @end */ caption { font-weight: bold; } span.legend { font-size: 70%; text-align: center; } h3.version { font-size: 90%; text-align: center; } div.qindex, div.navtab{ background-color: #000000; border: 1px solid #333333; text-align: center; margin: 2px; padding: 2px; } div.qindex, div.navpath { width: 100%; line-height: 140%; } div.navtab { margin-right: 15px; } /* @group Link Styling */ a { color: #BB2222; font-weight: normal; text-decoration: none; } .contents a:visited { color: #BB2222; } a:hover { text-decoration: underline; } a.qindex { font-weight: bold; } a.qindexHL { font-weight: bold; background-color: #9CAFD4; color: #ffffff; border: 1px double #869DCA; } .contents a.qindexHL:visited { color: #ffffff; } a.el { font-weight: bold; } a.elRef { } a.code { color: #BB2222; } a.codeRef { color: #BB2222; } /* @end */ dl.el { margin-left: -1cm; } .fragment { font-family: monospace, fixed; font-size: 105%; } pre.fragment { border: 5px solid #1D1D1D; background-color: #2D2D2D; padding: 10px 10px 10px 10px; page-break-before: avoid; overflow: auto; word-wrap: break-word; font-size: 90%; margin-left: 1.75em; margin-right: 1.75em; margin-top: 1em; margin-bottom: 1em; color: #ffffff; } div.ah { background-color: black; font-weight: bold; color: #ffffff; margin-bottom: 3px; margin-top: 3px; padding: 0.2em; border: solid thin #333; } div.groupHeader { margin-left: 16px; margin-top: 12px; font-weight: bold; } div.groupText { margin-left: 16px; font-style: italic; } body { background: #000000; color: #808080; margin: 0; } div.contents { margin-top: 10px; margin-left: 10px; margin-right: 5px; } td.indexkey { background-color: #000000; font-weight: bold; border: 1px solid #333333; margin: 2px 0px 2px 0; padding: 2px 10px; } td.indexvalue { background-color: #000000; border: 1px solid #333333; padding: 2px 10px; margin: 2px 0px; } tr.memlist { background-color: #EEF1F7; } p.formulaDsp { text-align: center; } img.formulaDsp { } img.formulaInl { vertical-align: middle; } div.center { text-align: center; margin-top: 0px; margin-bottom: 0px; padding: 0px; } div.center img { border: 0px; } address.footer { text-align: right; padding-right: 12px; } img.footer { border: 0px; vertical-align: middle; } /* @group Code Colorization */ span.keyword { color: #FF0000; } span.keywordtype { color: #FF00FF; } span.keywordflow { color: #800080; } span.comment { color: #00FFFF; } span.preprocessor { color: #808080; } span.stringliteral { color: #FFFF00; } span.charliteral { color: #FFFF00; } span.vhdldigit { color: #FFFF00; } span.vhdlchar { color: #FFFF00; } span.vhdlkeyword { color: #FF0000; } span.vhdllogic { color: #FF0000; } /* @end */ /* .search { color: #003399; font-weight: bold; } form.search { margin-bottom: 0px; margin-top: 0px; } input.search { font-size: 75%; color: #000080; font-weight: normal; background-color: #e8eef2; } */ td.tiny { font-size: 75%; } .dirtab { padding: 4px; border-collapse: collapse; border: 1px solid #A3B4D7; } th.dirtab { background: #EBEFF6; font-weight: bold; } hr { height: 0px; border: none; border-top: 3px solid #BB2222; } hr.footer { height: 1px; } /* @group Member Descriptions */ table.memberdecls { border-spacing: 0px; padding: 0px; } .mdescLeft, .mdescRight, .memItemLeft, .memItemRight, .memTemplItemLeft, .memTemplItemRight, .memTemplParams { background-color: #000000; border: none; margin: 4px; padding: 1px 0 0 8px; } .mdescLeft, .mdescRight { padding: 0px 8px 4px 8px; color: #555; } .memItemLeft, .memItemRight, .memTemplParams { border-top: 1px solid #333333; } .memItemLeft, .memTemplItemLeft { white-space: nowrap; } .memItemRight { width: 100%; } .memTemplParams { color: #FFFFFF; white-space: nowrap; } /* @end */ /* @group Member Details */ /* Styles for detailed member documentation */ .memtemplate { color: #FFFFFF; font-weight: bold; margin-left: 8px; font-family: Andalo Mono, Courier New, Courier, Lucida Typewrite, fixed; } .memnav { background-color: #000000; border: 1px solid #333333; text-align: center; margin: 2px; margin-right: 15px; padding: 2px; } .mempage { width: 100%; } .memitem { padding: 0; margin-bottom: 10px; margin-right: 5px; } .memname { white-space: nowrap; font-weight: bold; margin-left: 6px; font-family: Andale Mono, Courier New, Courier, Lucida Typewriter, fixed; } .memproto { border-top: 1px solid #808080; border-left: 1px solid #808080; border-right: 1px solid #808080; padding: 6px 0px 6px 0px; color: #FFFFFF; font-weight: bold; } .memdoc { border-bottom: 1px solid #808080; border-left: 1px solid #808080; border-right: 1px solid #808080; border-top: 1px solid #333333; padding: 2px 5px; } .paramkey { text-align: right; } .paramtype { white-space: nowrap; color: #808080; font-family: Andale Mono, Courier New, Courier, Lucida Typewriter, fixed; } .paramname { color: #BB2222; white-space: nowrap; font-family: Andale Mono, Courier New, Courier, Lucida Typewriter, fixed; } .paramname em { font-style: normal; } .params, .retval, .exception, .tparams { border-spacing: 6px 2px; } .params .paramname, .retval .paramname { font-weight: bold; vertical-align: top; } .params .paramtype { font-style: italic; vertical-align: top; } .params .paramdir { font-family: "courier new",courier,monospace; vertical-align: top; } /* @end */ /* @group Directory (tree) */ /* for the tree view */ .ftvtree { font-family: sans-serif; margin: 0px; } /* these are for tree view when used as main index */ .directory { font-size: 9pt; font-weight: bold; margin: 5px; } .directory h3 { margin: 0px; margin-top: 1em; font-size: 11pt; } /* The following two styles can be used to replace the root node title with an image of your choice. Simply uncomment the next two styles, specify the name of your image and be sure to set 'height' to the proper pixel height of your image. */ /* .directory h3.swap { height: 61px; background-repeat: no-repeat; background-image: url("yourimage.gif"); } .directory h3.swap span { display: none; } */ .directory > h3 { margin-top: 0; } .directory p { margin: 0px; white-space: nowrap; } .directory div { display: none; margin: 0px; } .directory img { vertical-align: -30%; } /* these are for tree view when not used as main index */ .directory-alt { font-size: 100%; font-weight: bold; } .directory-alt h3 { margin: 0px; margin-top: 1em; font-size: 11pt; } .directory-alt > h3 { margin-top: 0; } .directory-alt p { margin: 0px; white-space: nowrap; } .directory-alt div { display: none; margin: 0px; } .directory-alt img { vertical-align: -30%; } /* @end */ div.dynheader { margin-top: 8px; } address { font-style: normal; color: #2A3D61; } table.doxtable { border-collapse: collapse; } table.doxtable td, table.doxtable th { border: 1px solid #2D4068; padding: 3px 7px 2px; } table.doxtable th { background-color: #374F7F; color: #FFFFFF; font-size: 110%; padding-bottom: 4px; padding-top: 5px; text-align: left; } .tabsearch { top: 0px; left: 10px; height: 36px; background-image: url('tab_b.png'); z-index: 101; overflow: hidden; font-size: 13px; } .navpath ul { font-size: 11px; background: #000000; color: #8AA0CC; border-bottom: 1px solid #333333; overflow: hidden; margin: 0px; padding-top: 0.25em; padding-bottom: 0.25em; padding-left: 0.5em; padding-right: 0; border-left: 1px solid #333333; } .navpath li { list-style-type: none; float: left; padding-right: 0.5em; color: #364D7C; border-right: 1px solid #333333; padding-left: 0.5em; } .navpath li.navelem a { display: block; text-decoration: none; outline: none; } .navpath li.navelem a:hover { color:#FFFFFF; } .navpath li.footer { list-style-type: none; float: right; padding-left: 10px; padding-right: 15px; background-image: none; background-repeat: no-repeat; background-position: right; color: #364D7C; font-size: 8pt; } div.summary { float: right; font-size: 8pt; padding-right: 5px; width: 50%; text-align: right; } div.summary a { white-space: nowrap; } div.ingroups { font-size: 8pt; padding-left: 5px; width: 50%; text-align: left; } div.ingroups a { white-space: nowrap; } div.header { background-color: #000000; margin: 0px; border-bottom: 1px solid #333333; } div.headertitle { padding: 5px 5px 5px 10px; } dl { padding: 0 0 0 10px; } dl.note, dl.warning, dl.attention, dl.pre, dl.post, dl.invariant, dl.deprecated, dl.todo, dl.test, dl.bug { border-left: 4px solid; padding: 0 0 0 6px; } dl.note { border-color: #D0C000; } dl.warning, dl.attention { border-color: #FF0000; } dl.pre, dl.post, dl.invariant { border-color: #00D000; } dl.deprecated { border-color: #505050; } dl.todo { border-color: #00C0E0; } dl.test { border-color: #3030E0; } dl.bug { border-color: #C08050; } #projectlogo { text-align: center; vertical-align: bottom; border-collapse: separate; } #projectlogo img { border: 0px none; } #projectname { font: 300% Tahoma, Arial, sans-serif; margin: 0px; padding: 2px 0px; } #projectbrief { font: 120% Tahoma, Arial, sans-serif; margin: 0px; padding: 0px; } #projectnumber { font: 50% Tahoma, Arial,sans-serif; margin: 0px; padding: 0px; } #titlearea { padding: 0px; margin: 0px; width: 100%; border-bottom: 1px solid #808080; } .image { text-align: center; } .dotgraph { text-align: center; } .mscgraph { text-align: center; } .caption { font-weight: bold; } /** tab list at top of page */ .tabs, .tabs2, .tabs3 { background-image: none !important; background: #000000; border-left: 1px solid #333333; border-right: 1px solid #333333; border-bottom: 1px solid #333333; min-height: 1.5em; } .tablist li { background-image: none !important; background: #000000; border-right: 1px solid #333333; height: auto !important; padding-bottom: 0.25em; padding-top: 0.25em; line-height: 1em !important; } .tablist li.current { background: #BB2222; } .tablist li.current a { background-image: none !important; text-shadow: none; color: #ffffff; } .tablist a { background-image: none !important; text-shadow: none; color: #ffffff; font-weight: bold; } .tablist li:hover { background: #333333; } .tablist li.current:hover { background: #BB2222 !important; } /*** * For trac-doxygen; these rules won't apply otherwise. */ div.tabs span { background-image: none !important; background: transparent !important; height: auto !important; padding-bottom: 0.25em; padding-top: 0.25em; line-height: 1em !important; } div.tabs a { background-image: none !important; background: transparent !important; border-bottom: none !important; font-size: 100% !important; } div.tabs span { padding-bottom: 0.25em; padding-top: 0.25em; color: #ffffff !important; } div.tabs li:hover { background: #333333; } div.tabs li.current:hover { background: #BB2222 !important; } div.tabs li.current { background: #BB2222 !important; } div.tabs li { border-right: 1px solid #333333; } div.tabs ul { display: inline; font-size: 100%; padding-top: 0em; } /* I want the menus to display directly below the Trac menu. */ #content { padding-top: 0px; margin-top: 0px; } div.tabs { margin-bottom: 0px; background-image: none; } div.nav { border-bottom: 1px solid #808080; } /*** Fix the weird size of the menus */ #mainnav { font-size: 100% !important; } div#main div.nav { min-height: 1em !important; /* We must have the right height for the menus. */ border-bottom: 1px solid #333333; /* The plugin was giving a blue border. */ } mlpack-2.2.5/doc/guide/000077500000000000000000000000001315013601400146235ustar00rootroot00000000000000mlpack-2.2.5/doc/guide/build.hpp000066400000000000000000000120661315013601400164400ustar00rootroot00000000000000/*! @page build Building mlpack From Source @section buildintro Introduction This document discusses how to build mlpack from source. However, mlpack is in the repositories of many Linux distributions and so it may be easier to use the package manager for your system. For example, on Ubuntu, you can install mlpack with the following command: @code $ sudo apt-get install libmlpack-dev @endcode If mlpack is not available in your system's package manager, then you can follow this document for how to compile and install mlpack from source. mlpack uses CMake as a build system and allows several flexible build configuration options. One can consult any of numerous CMake tutorials for further documentation, but this tutorial should be enough to get mlpack built and installed on most Linux and UNIX-like systems (including OS X). If you want to build mlpack on Windows, see Keon's excellent tutorial. @section Download latest mlpack build Download latest mlpack build from here: mlpack-2.2.5 @section builddir Creating Build Directory Once the mlpack source is unpacked, you should create a build directory. @code $ cd mlpack-2.2.5 $ mkdir build @endcode The directory can have any name, not just 'build', but 'build' is sufficient enough. @section dep Dependencies of mlpack mlpack depends on the following libraries, which need to be installed on the system and have headers present: - Armadillo >= 4.200.0 (with LAPACK support) - Boost (math_c99, program_options, serialization, unit_test_framework, heap, spirit) >= 1.49 In Ubuntu and Debian, you can get all of these dependencies through apt: @code # apt-get install libboost-math-dev libboost-program-options-dev libboost-test-dev libboost-serialization-dev libarmadillo-dev binutils-dev @endcode On Fedora, Red Hat, or CentOS, these same dependencies can be obtained via dnf: @code # dnf install boost-devel boost-test boost-program-options boost-math armadillo-devel binutils-devel @endcode @section config Configuring CMake Running CMake is the equivalent to running `./configure` with autotools. If you are working with the svn trunk version of mlpack and run CMake with no options, it will configure the project to build with debugging symbols and profiling information: If you are working with a release of mlpack, running CMake with no options will configure the project to build without debugging or profiling information (for speed). @code $ cd build $ cmake ../ @endcode You can manually specify options to compile with or without debugging information and profiling information (i.e. as fast as possible): @code $ cd build $ cmake -D DEBUG=OFF -D PROFILE=OFF ../ @endcode The full list of options mlpack allows: - DEBUG=(ON/OFF): compile with debugging symbols (default ON in svn trunk, OFF in releases) - PROFILE=(ON/OFF): compile with profiling symbols (default ON in svn trunk, OFF in releases) - ARMA_EXTRA_DEBUG=(ON/OFF): compile with extra Armadillo debugging symbols (default OFF) - BUILD_TESTS=(ON/OFF): compile the \c mlpack_test program (default ON) - BUILD_CLI_EXECUTABLES=(ON/OFF): compile the mlpack command-line executables (i.e. \c mlpack_knn, \c mlpack_kfn, \c mlpack_logistic_regression, etc.) (default ON) - TEST_VERBOSE=(ON/OFF): run test cases in \c mlpack_test with verbose output (default OFF) Each option can be specified to CMake with the '-D' flag. Other tools can also be used to configure CMake, but those are not documented here. @section build Building mlpack Once CMake is configured, building the library is as simple as typing 'make'. This will build all library components as well as 'mlpack_test'. @code $ make Scanning dependencies of target mlpack [ 1%] Building CXX object src/mlpack/CMakeFiles/mlpack.dir/core/optimizers/aug_lagrangian/aug_lagrangian_test_functions.cpp.o <...> @endcode You can specify individual components which you want to build, if you do not want to build everything in the library: @code $ make mlpack_pca mlpack_knn mlpack_kfn @endcode One particular component of interest is mlpack_test, which runs the mlpack test suite. You can build this component with @code $ make mlpack_test @endcode and then run all of the tests, or an individual test suite: @code $ bin/mlpack_test $ bin/mlpack_test -t KNNTest @endcode If the build fails and you cannot figure out why, register an account on Github and submit an issue and the mlpack developers will quickly help you figure it out: http://mlpack.org/ http://github.com/mlpack/mlpack Alternately, mlpack help can be found in IRC at \#mlpack on irc.freenode.net. @section install Installing mlpack If you wish to install mlpack to /usr/include/mlpack/ and /usr/lib/ and /usr/bin/, once it has built, make sure you have root privileges (or write permissions to those two directories), and simply type @code # make install @endcode You can now run the executables by name; you can link against mlpack with -lmlpack, and the mlpack headers are found in /usr/include/mlpack/. */ mlpack-2.2.5/doc/guide/formats.hpp000066400000000000000000000335301315013601400170130ustar00rootroot00000000000000/*! @page formatdoc File formats in mlpack @section formatintro Introduction mlpack supports a wide variety of data and model formats for use in both its command-line programs and in C++ programs using mlpack via the mlpack::data::Load() function. This tutorial discusses the formats that are supported and how to use them. @section formattypes Supported dataset types Datasets in mlpack are represented internally as sparse or dense numeric matrices (specifically, as \c arma::mat or \c arma::sp_mat or similar). This means that when datasets are loaded from file, they must be converted to a suitable numeric representation. Therefore, in general, datasets on disk should contain only numeric features in order to be loaded successfully by mlpack. The types of datasets that mlpack can load are roughly the same as the types of matrices that Armadillo can load. However, the load functionality that mlpack provides only supports loading dense datasets. When datasets are loaded by mlpack, the file's type is detected using the file's extension. mlpack supports the following file types: - csv (comma-separated values), denoted by .csv or .txt - tsv (tab-separated values), denoted by .tsv, .csv, or .txt - ASCII (raw ASCII, with space-separated values), denoted by .txt - Armadillo ASCII (Armadillo's text format with a header), denoted by .txt - PGM, denoted by .pgm - PPM, denoted by .ppm - Armadillo binary, denoted by .bin - Raw binary, denoted by .bin \b "(note: this will be loaded as" \b "one-dimensional data, which is likely not what is desired.)" - HDF5, denoted by .hdf, .hdf5, .h5, or .he5 (note: HDF5 must be enabled" in the Armadillo configuration) - ARFF, denoted by .arff (note: this is not supported by all mlpack" command-line programs ; see \ref formatcat ) Datasets that are loaded by mlpack should be stored with one row for one point and one column for one dimension. Therefore, a dataset with three two-dimensional points \f$(0, 1)\f$, \f$(3, 1)\f$, and \f$(5, -5)\f$ would be stored in a csv file as: \code 0, 1 3, 1 5, -5 \endcode As noted earlier, the format is automatically detected at load time. Therefore, a dataset can be loaded in many ways: \code $ mlpack_logistic_regression -t dataset.csv -v [INFO ] Loading 'dataset.csv' as CSV data. Size is 32 x 37749. ... $ mlpack_logistic_regression -t dataset.txt -v [INFO ] Loading 'dataset.txt' as raw ASCII formatted data. Size is 32 x 37749. ... $ mlpack_logistic_regression -t dataset.h5 -v [INFO ] Loading 'dataset.h5' as HDF5 data. Size is 32 x 37749. ... \endcode Similarly, the format to save to is detected by the extension of the given filename. @section formatcpp Loading simple matrices in C++ When C++ is being written, the mlpack::data::Load() and mlpack::data::Save() functions are used to load and save datasets, respectively. These functions should be preferred over the built-in Armadillo \c .load() and \c .save() functions. Matrices in mlpack are column-major, meaning that each column should correspond to a point in the dataset and each row should correspond to a dimension; for more information, see \ref matrices . This is at odds with how the data is stored in files; therefore, a transposition is required during load and save. The mlpack::data::Load() and mlpack::data::Save() functions do this automatically (unless otherwise specified), which is why they are preferred over the Armadillo functions. To load a matrix from file, the call is straightforward. After creating a matrix object, the data can be loaded: \code arma::mat dataset; // The data will be loaded into this matrix. mlpack::data::Load("dataset.csv", dataset); \endcode Saving matrices is equally straightforward. The code below generates a random matrix with 10 points in 3 dimensions and saves it to a file as HDF5. \code // 3 dimensions (rows), with 10 points (columns). arma::mat dataset = arma::randu(3, 10); mlpack::data::Save("dataset.h5", dataset); \endcode As with the command-line programs, the type of data to be loaded is automatically detected from the filename extension. For more details, see the mlpack::data::Load() and mlpack::data::Save() documentation. @section sparseload Dealing with sparse matrices As mentioned earlier, support for loading sparse matrices in mlpack is not available at this time. To use a sparse matrix with mlpack code, you will have to write a C++ program instead of using any of the command-line tools, because the command-line tools all use dense datasets internally. (There is one exception: the \c mlpack_cf program, for collaborative filtering, loads sparse coordinate lists.) In addition, the \c mlpack::data::Load() function does not support loading any sparse format; so the best idea is to use undocumented Armadillo functionality to load coordinate lists. Suppose you have a coordinate list file like the one below: \code $ cat cl.csv 0 0 0.332 1 3 3.126 4 4 1.333 \endcode This represents a 5x5 matrix with three nonzero elements. We can load this using Armadillo: \code arma::sp_mat matrix; matrix.load("cl.csv", arma::coord_ascii); matrix = matrix.t(); // We must transpose after load! \endcode The transposition after loading is necessary if the coordinate list is in row-major format (that is, if each row in the matrix represents a point and each column represents a feature). Be sure that the matrix you use with mlpack methods has points as columns and features as rows! See \ref matrices for more information. @section formatcat Categorical features and command line programs In some situations it is useful to represent data not just as a numeric matrix but also as categorical data (i.e. with numeric but unordered categories). This support is useful for, e.g., decision trees and other models that support categorical features. In some machine learning situations, such as, e.g., decision trees, categorical data can be used. Categorical data might look like this (in CSV format): \code 0, 1, "true", 3 5, -2, "false", 5 2, 2, "true", 4 3, -1, "true", 3 4, 4, "not sure", 0 0, 7, "false", 6 \endcode In the example above, the third dimension (which takes values "true", "false", and "not sure") is categorical. mlpack can load and work with this data, but the strings must be mapped to numbers, because all dataset in mlpack are represented by Armadillo matrix objects. From the perspective of an mlpack command-line program, this support is transparent; mlpack will attempt to load the data file, and if it detects entries in the file that are not numeric, it will map them to numbers and then print, for each dimension, the number of mappings. For instance, if we run the \c mlpack_hoeffding_tree program (which supports categorical data) on the dataset above (stored as dataset.csv), we receive this output during loading: \code $ mlpack_hoeffding_tree -t dataset.csv -l dataset.labels.csv -v [INFO ] Loading 'dataset.csv' as CSV data. Size is 6 x 4. [INFO ] 0 mappings in dimension 0. [INFO ] 0 mappings in dimension 1. [INFO ] 3 mappings in dimension 2. [INFO ] 0 mappings in dimension 3. ... \endcode Currently, only the \c mlpack_hoeffding_tree program supports loading categorical data, and this is also the only program that supports loading an ARFF dataset. @section formatcatcpp Categorical features and C++ When writing C++, loading categorical data is slightly more tricky: the mappings from strings to integers must be preserved. This is the purpose of the mlpack::data::DatasetInfo class, which stores these mappings and can be used and load and save time to apply and de-apply the mappings. When loading a dataset with categorical data, the overload of mlpack::data::Load() that takes an mlpack::data::DatasetInfo object should be used. An example is below: \code arma::mat dataset; // Load into this matrix. mlpack::data::DatasetInfo info; // Store information about dataset in this. // Load the ARFF dataset. mlpack::data::Load("dataset.arff", dataset, info); \endcode After this load completes, the \c info object will hold the information about the mappings necessary to load the dataset. It is possible to re-use the \c DatasetInfo object to load another dataset with the same mappings. This is useful when, for instance, both a training and test set are being loaded, and it is necessary that the mappings from strings to integers for categorical features are identical. An example is given below. \code arma::mat trainingData; // Load training data into this matrix. mlpack::data::DatasetInfo info; // This will store the mappings. // Load the training data, and create the mappings in the 'info' object. mlpack::data::Load("training_data.arff", trainingData, info); // Load the test data, but re-use the 'info' object with the already initialized // mappings. This means that the same mappings will be applied to the test set. mlpack::data::Load("test_data.arff", trainingData, info); \endcode When saving data, pass the same DatasetInfo object it was loaded with in order to unmap the categorical features correctly. The example below demonstrates this functionality: it loads the dataset, increments all non-categorical features by 1, and then saves the dataset with the same DatasetInfo it was loaded with. \code arma::mat dataset; // Load data into this matrix. mlpack::data::DatasetInfo info; // This will store the mappings. // Load the dataset. mlpack::data::Load("dataset.tsv", dataset, info); // Loop over all features, and add 1 to all non-categorical features. for (size_t i = 0; i < info.Dimensionality(); ++i) { // The Type() function returns whether or not the data is numeric or // categorical. if (info.Type(i) != mlpack::data::Datatype::categorical) dataset.row(i) += 1.0; } // Save the modified dataset using the same DatasetInfo. mlpack::data::Save("dataset-new.tsv", dataset, info); \endcode There is more functionality to the DatasetInfo class; for more information, see the mlpack::data::DatasetInfo documentation. @section formatmodels Loading and saving models Using \c boost::serialization, mlpack is able to load and save machine learning models with ease. These models can currently be saved in three formats: - binary (.bin); this is not human-readable, but it is small - text (.txt); this is sort of human-readable and relatively small - xml (.xml); this is human-readable but very verbose and large The type of file to save is determined by the given file extension, as with the other loading and saving functionality in mlpack. Below is an example where a dataset stored as TSV and labels stored as ASCII text are used to train a logistic regression model, which is then saved to model.xml. \code $ mlpack_logistic_regression -t training_dataset.tsv -l training_labels.txt \ > -M model.xml \endcode Many mlpack command-line programs have support for loading and saving models through the \c --input_model_file (\c -m) and \c --output_model_file (\c -M) options; for more information, see the documentation for each program (accessible by passing \c --help as a parameter). @section formatmodels Loading and saving models in C++ mlpack uses the \c boost::serialization library internally to perform loading and saving of models, and provides convenience overloads of mlpack::data::Load() and mlpack::data::Save() to load and save these models. To be serializable, a class must implement the method \code template void Serialize(Archive& ar, const unsigned int version); \endcode \note For more information on this method and how it works, see the boost::serialization documentation at http://www.boost.org/libs/serialization/doc/ . Note that mlpack uses a \c Serialize() method and not a \c serialize() method, and also mlpack uses the mlpack::data::CreateNVP() method instead of \c BOOST_SERIALIZATION_NVP() ; this is for coherence with the mlpack style guidelines, and is done via a particularly complex bit of template metaprogramming in src/mlpack/core/data/serialization_shim.hpp (read that file if you want your head to hurt!). \note Examples of Serialize() methods can be found in most classes; one fairly straightforward example is found \ref mlpack::math::Range::Serialize() "in the mlpack::math::Range class". A more complex example is found \ref mlpack::tree::BinarySpaceTree::Serialize() "in the mlpack::tree::BinarySpaceTree class". Using the mlpack::data::Load() and mlpack::data::Save() classes is easy if the type being saved has a \c Serialize() method implemented: simply call either function with a filename, a name for the object to save, and the object itself. The example below, for instance, creates an mlpack::math::Range object and saves it as range.txt. Then, that range is loaded from file into another mlpack::math::Range object. \code // Create range and save it. mlpack::math::Range r(0.0, 5.0); mlpack::data::Save("range.txt", "range", r); // Load into new range. mlpack::math::Range newRange; mlpack::data::Load("range.txt", "range", newRange); \endcode It is important to be sure that you load the appropriate type; if you save, for instance, an mlpack::regression::LogisticRegression object and attempt to load it as an mlpack::math::Range object, the load will fail and an exception will be thrown. (When the object is saved as binary (.bin), it is possible that the load will not fail, but instead load with mangled data, which is perhaps even worse!) @section formatfinal Final notes If the examples here are unclear, it would be worth looking into the ways that mlpack::data::Load() and mlpack::data::Save() are used in the code. Some example files that may be useful to this end: - src/mlpack/methods/logistic_regression/logistic_regression_main.cpp - src/mlpack/methods/hoeffding_trees/hoeffding_tree_main.cpp - src/mlpack/methods/neighbor_search/knn_main.cpp If you are interested in adding support for more data types to mlpack, it would be preferable to add the support upstream to Armadillo instead, so that may be a better direction to go first. Then very little code modification for mlpack will be necessary. */ mlpack-2.2.5/doc/guide/iodoc.hpp000066400000000000000000000116121315013601400164320ustar00rootroot00000000000000/*! @page iodoc mlpack Input and Output @section iointro Introduction mlpack provides the following: - mlpack::Log, for debugging / informational / warning / fatal output - mlpack::CLI, for parsing command line options Each of those classes are well-documented, and that documentation should be consulted for further reference. @section simplelog Simple Logging Example mlpack has four logging levels: - Log::Debug - Log::Info - Log::Warn - Log::Fatal Output to Log::Debug does not show (and has no performance penalty) when mlpack is compiled without debugging symbols. Output to Log::Info is only shown when the program is run with the --verbose (or -v) flag. Log::Warn is always shown, and Log::Fatal will throw a std::runtime_error exception, when a newline is sent to it only. If mlpack was compiled with debugging symbols, Log::Fatal will always throw a std::runtime_error exception and print backtrace. Here is a simple example, and its output: @code #include using namespace mlpack; int main(int argc, char** argv) { CLI::ParseCommandLine(argc, argv); Log::Debug << "Compiled with debugging symbols." << std::endl; Log::Info << "Some test informational output." << std::endl; Log::Warn << "A warning!" << std::endl; Log::Fatal << "Program has crashed." << std::endl; Log::Warn << "Made it!" << std::endl; } @endcode With debugging output--verbose, the following is shown: @code [DEBUG] Compiled with debugging symbols. [INFO ] Some test informational output. [WARN ] A warning! [FATAL] [bt]: (1) /absolute/path/to/file/example.cpp:6: function() [FATAL] Program has crashed. terminate called after throwing an instance of 'std::runtime_error' what(): fatal error; see Log::Fatal output Aborted @endcode With debugging output, compilation flags -g -rdynamic and --verbose, the following is shown: @code [DEBUG] Compiled with debugging symbols. [INFO ] Some test informational output. [WARN ] A warning! [FATAL] Cannot give backtrace because program was compiled without: -g -rdynamic [FATAL] For a backtrace, recompile with: -g -rdynamic. [FATAL] Program has crashed. terminate called after throwing an instance of 'std::runtime_error' what(): fatal error; see Log::Fatal output Aborted @endcode The last warning is not reached, because Log::Fatal terminates the program. Without debugging symbols and without --verbose, the following is shown: @code $ ./main [WARN ] A warning! [FATAL] Program has crashed. terminate called after throwing an instance of 'std::runtime_error' what(): fatal error; see Log::Fatal output Aborted @endcode These four outputs can be very useful for both providing informational output and debugging output for your mlpack program. @section simplecli Simple CLI Example Through the mlpack::CLI object, command-line parameters can be easily added with the PROGRAM_INFO, PARAM_INT, PARAM_DOUBLE, PARAM_STRING, and PARAM_FLAG macros. Here is a sample use of those macros, extracted from methods/pca/pca_main.cpp. @code #include // Document program. PROGRAM_INFO("Principal Components Analysis", "This program performs principal " "components analysis on the given dataset. It will transform the data " "onto its principal components, optionally performing dimensionality " "reduction by ignoring the principal components with the smallest " "eigenvalues."); // Parameters for program. PARAM_STRING_REQ("input_file", "Input dataset to perform PCA on.", ""); PARAM_STRING_REQ("output_file", "Output dataset to perform PCA on.", ""); PARAM_INT("new_dimensionality", "Desired dimensionality of output dataset.", "", 0); using namespace mlpack; int main(int argc, char** argv) { // Parse commandline. CLI::ParseCommandLine(argc, argv); ... } @endcode Documentation is automatically generated using those macros, and when the program is run with --help the following is displayed: @code $ mlpack_pca --help Principal Components Analysis This program performs principal components analysis on the given dataset. It will transform the data onto its principal components, optionally performing dimensionality reduction by ignoring the principal components with the smallest eigenvalues. Required options: --input_file [string] Input dataset to perform PCA on. --output_file [string] Output dataset to perform PCA on. Options: --help (-h) Default help info. --info [string] Get help on a specific module or option. Default value ''. --new_dimensionality [int] Desired dimensionality of output dataset. Default value 0. --verbose (-v) Display informational messages and the full list of parameters and timers at the end of execution. @endcode The mlpack::CLI documentation can be consulted for further and complete documentation. */ mlpack-2.2.5/doc/guide/matrices.hpp000066400000000000000000000034551315013601400171520ustar00rootroot00000000000000/*! @page matrices Matrices in mlpack @section matintro Introduction mlpack uses Armadillo matrices for matrix support. Armadillo is a fast C++ matrix library which makes use of advanced template techniques to provide the fastest possible matrix operations. Documentation on Armadillo can be found on their website: http://arma.sourceforge.net/docs.html Nonetheless, there are a few further caveats for mlpack Armadillo usage. @section format Column-major Matrices Armadillo matrices are stored in a column-major format; this means that on disk, each column is located in contiguous memory. This means that, for the vast majority of machine learning methods, it is faster to store observations as columns and dimensions as rows. This is counter to most standard machine learning texts! Major implications of this are for linear algebra. For instance, the covariance of a matrix is typically @f$ C = X^T X @f$ but for a column-wise matrix, it is @f$ C = X X^T @f$ and this is very important to keep in mind! If your mlpack code is not working, this may be a factor in why. @section loading Loading Matrices mlpack provides a data::Load() and data::Save() function, which should be used instead of Armadillo's loading and saving functions. Most machine learning data is stored in row-major format; a CSV, for example, will generally have one observation per line and each column will correspond to a dimension. The data::Load() and data::Save() functions transpose the matrix upon loading, meaning that the following CSV: @code $ cat data.csv 3,3,3,3,0 3,4,4,3,0 3,4,4,3,0 3,3,4,3,0 3,6,4,3,0 2,4,4,3,0 2,4,4,1,0 3,3,3,2,0 3,4,4,2,0 3,4,4,2,0 3,3,4,2,0 3,6,4,2,0 2,4,4,2,0 @endcode is actually loaded with 5 rows and 13 columns, not 13 rows and 5 columns like the CSV is written. This is important to remember! */ mlpack-2.2.5/doc/guide/sample.hpp000066400000000000000000000055611315013601400166240ustar00rootroot00000000000000/*! @page sample Simple Sample mlpack Programs @section sampleintro Introduction On this page, several simple mlpack examples are contained, in increasing order of complexity. If you compile from the command-line, be sure that your compiler is in C++11 mode. With gcc and clang, this can be accomplished by adding the @c -std=c++11 option. @section covariance Covariance Computation A simple program to compute the covariance of a data matrix ("data.csv"), assuming that the data is already centered, and save it to file. @code // Includes all relevant components of mlpack. #include // Convenience. using namespace mlpack; int main() { // First, load the data. arma::mat data; // Use data::Load() which transposes the matrix. data::Load("data.csv", data, true); // Now compute the covariance. We assume that the data is already centered. // Remember, because the matrix is column-major, the covariance operation is // transposed. arma::mat cov = data * trans(data) / data.n_cols; // Save the output. data::Save("cov.csv", cov, true); } @endcode @section nn Nearest Neighbor This simple program uses the mlpack::neighbor::NeighborSearch object to find the nearest neighbor of each point in a dataset using the L1 metric, and then print the index of the neighbor and the distance of it to stdout. @code #include #include using namespace mlpack; using namespace mlpack::neighbor; // NeighborSearch and NearestNeighborSort using namespace mlpack::metric; // ManhattanDistance int main() { // Load the data from data.csv (hard-coded). Use CLI for simple command-line // parameter handling. arma::mat data; data::Load("data.csv", data, true); // Use templates to specify that we want a NeighborSearch object which uses // the Manhattan distance. NeighborSearch nn(data); // Create the object we will store the nearest neighbors in. arma::Mat neighbors; arma::mat distances; // We need to store the distance too. // Compute the neighbors. nn.Search(1, neighbors, distances); // Write each neighbor and distance using Log. for (size_t i = 0; i < neighbors.n_elem; ++i) { std::cout << "Nearest neighbor of point " << i << " is point " << neighbors[i] << " and the distance is " << distances[i] << ".\n"; } } @endcode @section other Other examples For more complex examples, it is useful to refer to the main executables: - methods/neighbor_search/knn_main.cpp - methods/neighbor_search/kfn_main.cpp - methods/emst/emst_main.cpp - methods/radical/radical_main.cpp - methods/nca/nca_main.cpp - methods/naive_bayes/nbc_main.cpp - methods/pca/pca_main.cpp - methods/lars/lars_main.cpp - methods/linear_regression/linear_regression_main.cpp - methods/gmm/gmm_main.cpp - methods/kmeans/kmeans_main.cpp */ mlpack-2.2.5/doc/guide/timer.hpp000066400000000000000000000032451315013601400164600ustar00rootroot00000000000000/*! @page timer mlpack Timers @section timerintro Introduction mlpack provides a simple timer interface for the timing of machine learning methods. The results of any timers used during the program are displayed at output by the mlpack::CLI object, when --verbose is given: @code $ mlpack_knn -r dataset.csv -n neighbors_out.csv -d distances_out.csv -k 5 -v <...> [INFO ] Program timers: [INFO ] computing_neighbors: 0.010650s [INFO ] loading_data: 0.002567s [INFO ] saving_data: 0.001115s [INFO ] total_time: 0.149816s [INFO ] tree_building: 0.000534s @endcode @section usingtimer Timer API The mlpack::Timer class provides three simple methods: @code void Timer::Start(const char* name); void Timer::Stop(const char* name); timeval Timer::Get(const char* name); @endcode Each timer is given a name, and is referenced by that name. You can call \c Timer::Start() and \c Timer::Stop() multiple times for a particular timer name, and the result will be the sum of the runs of the timer. Note that \c Timer::Stop() must be called before \c Timer::Start() is called again, otherwise a std::runtime_error exception will be thrown. A "total_time" timer is run by default for each mlpack program. @section example Timer Example Below is a very simple example of timer usage in code. @code #include using namespace mlpack; int main(int argc, char** argv) { CLI::ParseCommandLine(argc, argv); // Start a timer. Timer::Start("some_timer"); // Do some things. DoSomeStuff(); // Stop the timer. Timer::Stop("some_timer"); } @endcode If the --verbose flag was given to this executable, the resultant time that "some_timer" ran for would be shown. */ mlpack-2.2.5/doc/guide/version.hpp000066400000000000000000000017351315013601400170270ustar00rootroot00000000000000/*! @page verinfo mlpack version information @section vercode mlpack versions in code mlpack provides a couple of convenience macros and functions to get the version of mlpack. More information (and straightforward code) can be found in src/mlpack/core/util/version.hpp. The following three macros provide major, minor, and patch versions of mlpack (i.e. for mlpack-x.y.z, 'x' is the major version, 'y' is the minor version, and 'z' is the patch version): @code MLPACK_VERSION_MAJOR MLPACK_VERSION_MINOR MLPACK_VERSION_PATCH @endcode In addition, the function \c mlpack::util::GetVersion() returns the mlpack version as a string (for instance, "mlpack 1.0.8"). @section verex mlpack executable versions Each mlpack executable supports the \c --version (or \c -V ) option, which will print the version of mlpack used. If the version is not an official release but instead from svn trunk, the version will be "mlpack trunk" (and may have a revision number appended to "trunk"). */ mlpack-2.2.5/doc/policies/000077500000000000000000000000001315013601400153355ustar00rootroot00000000000000mlpack-2.2.5/doc/policies/elemtype.hpp000066400000000000000000000026671315013601400177050ustar00rootroot00000000000000/*! @page elem The ElemType policy in mlpack @section Overview \b mlpack algorithms should be as generic as possible. Often this means allowing arbitrary metrics or kernels to be used, but this also means allowing any type of data point to be used. This means that \b mlpack classes should support \c float, \c double, and other observation types. Some algorithms support this through the use of a \c MatType template parameter; others will have their own template parameter, \c ElemType. The \c ElemType template parameter can take any value that can be used by Armadillo (or, specifically, classes like \c arma::Mat<> and others); this encompasses the types - \c double - \c float - \c int - \c unsigned int - \c std::complex - \c std::complex and other primitive numeric types. Note that Armadillo does not support some integer types for functionality such as matrix decompositions or other more advanced linear algebra. This means that when these integer types are used, some algorithms may fail with Armadillo error messages indicating that those types cannot be used. @section A note for developers If the class has a \c MatType template parameter, \c ElemType can be easily defined as below: @code typedef typename MatType::elem_type ElemType; @endcode and otherwise a template parameter with the name \c ElemType can be used. It is generally a good idea to expose the element type somehow for use by other classes. */ mlpack-2.2.5/doc/policies/kernels.hpp000066400000000000000000000143331315013601400175150ustar00rootroot00000000000000/*! @page kernels The KernelType policy in mlpack @section kerneltoc Table of Contents - \ref kerneltype - \ref kerneltraits - \ref kernellist @section kerneltype Introduction to the KernelType policy `Kernel methods' make up a large class of machine learning techniques. Each of these methods is characterized by its dependence on a \b kernel \b function. In rough terms, a kernel function is a general notion of similarity between two points, with its value large when objects are similar and its value small when objects are dissimilar (note that this is not the only interpretation of what a kernel is). A kernel (or `Mercer kernel') \f$\mathcal{K}(\cdot, \cdot)\f$ takes two objects as input and returns some sort of similarity value. The specific details and properties of kernels are outside the scope of this documentation; for a better introduction to kernels and kernel methods, there are numerous better resources available, including \ref http://www.eric-kim.net/eric-kim-net/posts/1/kernel_trick.html "Eric Kim's tutorial". mlpack implements a number of kernel methods and, accordingly, each of these methods allows arbitrary kernels to be used via the \c KernelType template parameter. Like the \ref metrics "MetricType policy", the requirements are quite simple: a class implementing the \c KernelType policy must have - an \c Evaluate() function - a default constructor The signature of the \c Evaluate() function is straightforward: @code template double Evaluate(const VecTypeA& a, const VecTypeB& b); @endcode The function takes two vector arguments, \c a and \c b, and returns a \c double that is the evaluation of the kernel between the two arguments. So, for a particular kernel \f$\mathcal{K}(\cdot, \cdot)\f$, the \c Evaluate() function should return \f$\mathcal{K}(a, b)\f$. The arguments \c a and \c b, of types \c VecTypeA and \c VecTypeB, respectively, will be an Armadillo-like vector type (usually \c arma::vec, \c arma::sp_vec, or similar). In general it should be valid to assume that \c VecTypeA is a class with the same API as \c arma::vec. Note that for kernels that do not hold any state, the \c Evaluate() method can be marked as \c static. Overall, the \c KernelType template policy is quite simple (much like the \ref metrics "MetricType policy"). Below is an example kernel class, which outputs \c 1 if the vectors are close and \c 0 otherwise. @code class ExampleKernel { // Default constructor is required. ExampleKernel() { } // The example kernel holds no state, so we can mark Evaluate() as static. template static double Evaluate(const VecTypeA& a, const VecTypeB& b) { // Get how far apart the vectors are (using the Euclidean distance). const double distance = arma::norm(a - b); if (distance < 0.05) // Less than 0.05 distance is "close". return 1; else return 0; } }; @endcode Then, this kernel may be easily used inside of mlpack algorithms. For instance, the code below runs kernel PCA (\c mlpack::kpca::KernelPCA) on a random dataset using the \c ExampleKernel. The results are saved to a file called \c results.csv. (Note that this is simply an example to demonstrate usage, and this example kernel isn't actually likely to be useful in practice.) @code #include #include #include "example_kernel.hpp" // Contains the ExampleKernel class. using namespace mlpack; using namespace mlpack::kpca; using namespace arma; int main() { // Generate the random dataset; 10 dimensions, 5000 points. mat dataset = randu(10, 5000); // Instantiate the KernelPCA object with the ExampleKernel kernel type. KernelPCA kpca; // The dataset will be transformed using kernel PCA with the example kernel to // contain only 2 dimensions. kpca.Apply(dataset, 2); // Save the results to 'results.csv'. data::Save(dataset, "results.csv"); } @endcode @section kerneltraits The KernelTraits trait class Some algorithms that use kernels can specialize if the kernel fulfills some certain conditions. An example of a condition might be that the kernel is shift-invariant or that the kernel is normalized. In the case of fast max-kernel search (mlpack::fastmks::FastMKS), the computation can be accelerated if the kernel is normalized. For this reason, the \c KernelTraits trait class exists. This allows a kernel to specify via a \c const \c static \c bool when these types of conditions are satisfied. **Note that a KernelTraits class is not required,** but may be helpful. The \c KernelTraits trait class is a template class that takes a \c KernelType as a parameter, and exposes \c const \c static \c bool values that depend on the kernel. Setting these values is achieved by specialization. The code below provides an example, specializing \c KernelTraits for the \c ExampleKernel from earlier: @code template<> class KernelTraits { public: //! The example kernel is normalized (K(x, x) = 1 for all x). const static bool IsNormalized = true; }; @endcode At this time, there is only one kernel trait that is used in mlpack code: - \c IsNormalized (defaults to \c false): if \f$ K(x, x) = 1 \; \forall x \f$, then the kernel is normalized and this should be set to true. @section kernellist List of kernels and classes that use a \c KernelType mlpack comes with a number of pre-written kernels that satisfy the \c KernelType policy: - mlpack::kernel::LinearKernel - mlpack::kernel::ExampleKernel -- an example kernel with more documentation - mlpack::kernel::GaussianKernel - mlpack::kernel::HyperbolicTangentKernel - mlpack::kernel::EpanechnikovKernel - mlpack::kernel::CosineDistance - mlpack::kernel::LaplacianKernel - mlpack::kernel::PolynomialKernel - mlpack::kernel::TriangularKernel - mlpack::kernel::SphericalKernel - mlpack::kernel::PSpectrumStringKernel -- operates on strings, not vectors These kernels (or a custom kernel) may be used in a variety of mlpack methods: - mlpack::kpca::KernelPCA - kernel principal components analysis - mlpack::fastmks::FastMKS - fast max-kernel search - mlpack::kernel::NystroemMethod - the Nystroem method for sampling - mlpack::metric::IPMetric - a metric built on a kernel */ mlpack-2.2.5/doc/policies/metrics.hpp000066400000000000000000000102161315013601400175140ustar00rootroot00000000000000/*! @page metrics The MetricType policy in mlpack Many machine learning methods operate with some sort of metric, and often, this metric can be any arbitrary metric. For instance, consider the problem of nearest neighbor search; one can find the nearest neighbor of a point with respect to the standard Euclidean distance, or the Manhattan (city-block) distance. The actual search techniques, though, remain the same. And this is true of many machine learning methods: the specific metric that is used can be any valid metric. mlpack algorithms, when possible, allow the use of an arbitrary metric via the use of the \c MetricType template parameter. Any metric passed as a \c MetricType template parameter will need to have - an \c Evaluate function - a default constructor. The signature of the \c Evaluate function is straightforward: @code template double Evaluate(const VecTypeA& a, const VecTypeB& b); @endcode The function takes two vector arguments, \c a and \c b, and returns a \c double that is the evaluation of the metric between the two arguments. So, for a particular metric \f$d(\cdot, \cdot)\f$, the \c Evaluate() function should return \f$d(a, b)\f$. The arguments \c a and \c b, of types \c VecTypeA and \c VecTypeB, respectively, will be an Armadillo-like vector type (usually \c arma::vec, \c arma::sp_vec, or similar). In general it should be valid to assume that \c VecTypeA is a class with the same API as \c arma::vec. Note that for metrics that do not hold any state, the \c Evaluate() method can be marked as \c static. Overall, the \c MetricType template policy is quite simple (much like the \ref kernels KernelType policy). Below is an example metric class, which implements the L2 distance: @code class ExampleMetric { // Default constructor is required. ExampleMetric() { } // The example metric holds no state, so we can mark Evaluate() as static. template static double Evaluate(const VecTypeA& a, const VecTypeB& b) { // Return the L2 norm of the difference between the points, which is the // same as the L2 distance. return arma::norm(a - b); } }; @endcode Then, this metric can easily be used inside of other mlpack algorithms. For example, the code below runs range search on a random dataset with the \c ExampleKernel, by instantiating a \c mlpack::range::RangeSearch object that uses the \c ExampleKernel. Then, the number of results are printed. The \c RangeSearch class takes three template parameters: \c MetricType, \c MatType, and \c TreeType. (All three have defaults, so we will just leave \c MatType and \c TreeType to their defaults.) @code #include #include #include "example_metric.hpp" // A file that contains ExampleKernel. using namespace mlpack; using namespace mlpack::range; using namespace std; int main() { // Create a random dataset with 10 dimensions and 5000 points. arma::mat data = arma::randu(10, 5000); // Instantiate the RangeSearch object with the ExampleKernel. RangeSearch rs(data); // These vectors will store the results. vector> neighbors; vector> distances; // Create a random 10-dimensional query point. arma::vec query = arma::randu(10); // Find those points with distance (according to ExampleMetric) between 1 and // 2 from the query point. rs.Search(query, math::Range(1.0, 2.0), neighbors, distances); // Now, print the number of points inside the desired range. We know that // neighbors and distances will have length 1, since there was only one query // point. cout << neighbors[0].size() << " points within the range [1.0, 2.0] of the " << "query point!" << endl; } @endcode mlpack comes with a number of pre-written metrics that satisfy the \c MetricType policy: - mlpack::metric::ManhattanDistance - mlpack::metric::EuclideanDistance - mlpack::metric::ChebyshevDistance - mlpack::metric::MahalanobisDistance - mlpack::metric::LMetric (for arbitrary L-metrics) - mlpack::metric::IPMetric (requires a \ref kernels KernelType parameter) */ mlpack-2.2.5/doc/policies/trees.hpp000066400000000000000000001141521315013601400171740ustar00rootroot00000000000000/*! @page trees The TreeType policy in mlpack @section treeintro Introduction Trees are an important data structure in mlpack and are used in a number of the machine learning algorithms that mlpack implements. Often, the use of trees can allow significant acceleration of an algorithm; this is generally done by pruning away large parts of the tree during computation. Most mlpack algorithms that use trees are not tied to a specific tree but instead allow the user to choose a tree via the \c TreeType template parameter. Any tree passed as a \c TreeType template parameter will need to implement a certain set of functions. In addition, a tree may optionally specify some traits about itself with the \c TreeTraits trait class. This document aims to clarify the abstractions underlying mlpack trees, list and describe the required functionality of the \c TreeType policy, and point users towards existing types of trees. A table of contents is below: - \ref treeintro - \ref whatistree - \ref treetype_template_params - \ref treetype_api - \ref treetype_rigorous - \ref treetype_rigorous_template - \ref treetype_rigorous_constructor - \ref treetype_rigorous_basic - \ref treetype_rigorous_complex - \ref treetype_rigorous_serialization - \ref treetype_traits - \ref treetype_more Although this document is long, there may still be errors and unclear areas. If you are having trouble understanding anything, please get in touch on Github or on the mailing list and someone will help you (and possibly update the documentation afterwards). @section whatistree What is a tree? In mlpack, we assume that we have some sort of data matrix, which might be sparse or dense (that is, it could be of type \c arma::mat or \c arma::sp_mat, or any variant that implements the Armadillo API). This data matrix corresponds to a collection of points in some space (usually a Euclidean space). A tree is a way of organizing this data matrix in a hierarchical manner---so, points that are nearby should lie in similar nodes. We can rigorously define what a tree is, using the definition of **space tree** introduced in the following paper: @quote R.R. Curtin, W.B. March, P. Ram, D.V. Anderson, A.G. Gray, and C.L. Isbell Jr., "Tree-independent dual-tree algorithms," in Proceedings of the 30th International Conference on Machine Learning (ICML '13), pp. 1435--1443, 2013. @endquote The definition is: A **space tree** on a dataset \f$ S \in \mathcal{R}^{N \times d} \f$ is an undirected, connected, acyclic, rooted simple graph with the following properties: - Each node (or vertex) holds a number of points (possibly zero) and is connected to one parent node and a number of child nodes (possibly zero). - There is one node in every space tree with no parent; this is the root node of the tree. - Each point in \f$S\f$ is contained in at least one node. - Each node corresponds to some subset of \f$\mathcal{R}^d\f$ that contains each point in the node and also the subsets that correspond to each child of the node. This is really a quite straightforward definition: a tree is hierarchical, and each node corresponds to some region of the input space. Each node may have some number of children, and may hold some number of points. However, there is an important terminology distinction to make: the term **points held by a node** has a different meaning than the term **descendant points held by a node**. The points held in a node are just that---points held only in the node. The descendant points of a node are the combination of the points held in a node with the points held in the node's children and the points held in the node's children's children (and so forth). For the purposes of clarity in all discussions about trees, care is taken to differentiate the terms "descendant point" and "point". Now, it's also important to note that a point does not *need* to hold any children, and that a node *can* hold the same points as its children (or its parent). Some types of trees do this. For instance, each node in the cover tree holds only one point, and may have a child that holds the same point. As another example, the \f$kd\f$-tree holds its points only in the leaves (at the bottom of the tree). More information on space trees can be found in either the "Tree-independent dual-tree algorithms" paper or any of the related literature. So there is a huge amount of possible variety in the types of trees that can fall into the class of *space trees*. Therefore, it's important to treat them abstractly, and the \c TreeType policy allows us to do just that. All we need to remember is that a node in a tree can be represented as the combination of some points held in the node, some child nodes, and some geometric structure that represents the space that all of the descendant points fall into (this is a restatement of the fourth part of the definition). @section treetype_template_params Template parameters required by the TreeType policy Most everything in mlpack is decomposed into a series of configurable template parameters, and trees are no exception. In order to ease usage of high-level mlpack algorithms, each \c TreeType itself must be a template class taking three parameters: - \c MetricType -- the underlying metric that the tree will be built on (see \ref metrics "the MetricType policy documentation") - \c StatisticType -- holds any auxiliary information that individual algorithms may need - \c MatType -- the type of the matrix used to represent the data The reason that these three template parameters are necessary is so that each \c TreeType can be used as a template template parameter, which can radically simplify the required syntax for instantiating mlpack algorithms. By using template template parameters, a user needs only to write @code // The RangeSearch class takes a MetricType and a TreeType template parameter. // This code instantiates RangeSearch with the ManhattanDistance and a // QuadTree. Note that the QuadTree itself is a template, and takes a // MetricType, StatisticType, and MatType, just like the policy requires. // This example ignores the constructor parameters, for the sake of simplicity. RangeSearch rs(...); @endcode as opposed to the far more complicated alternative, where the user must specify the values of each template parameter of the tree type: @code // This is a much worse alternative, where the user must specify the template // arguments of their tree. RangeSearch> rs(...); @endcode Unfortunately, the price to pay for this user convenience is that *every* \c TreeType must have three template parameters, and they must be in exactly that order. Fortunately, there is an additional benefit: we are guaranteed that the tree is built using the same metric as the method (that is, a user can't specify different metric types to the algorithm and to the tree, which they can without template template parameters). There are two important notes about this: - Not every possible input of MetricType, StatisticType, and/or MatType necessarily need to be valid or work correctly for each type of tree. For instance, the QuadTree is limited to Euclidean metrics and will not work otherwise. Either compile-time static checks or detailed documentation can help keep users from using invalid combinations of template arguments. - Some types of trees have more template parameters than just these three. One example is the generalized binary space tree, where the bounding shape of each node is easily made into a fourth template parameter (the \c BinarySpaceTree class calls this the \c BoundType parameter), and the procedure used to split a node is easily made into a fifth template parameter (the \c BinarySpaceTree class calls this the \c SplitType parameter). However, the syntax of template template parameters *requires* that the class only has the correct number of template parameters---no more, no less. Fortunately, C++11 allows template typedefs, which can be used to provide partial specialization of template classes: @code // This is the definition of the BinarySpaceTree class, which has five template // parameters. template class BinarySpaceTree; // The 'using' keyword gives us a template typedef, so we can define the // MeanSplitKDTree template class, which has three parameters and is a valid // TreeType policy class. template using MeanSplitKDTree = BinarySpaceTree MeanSplit>; @endcode Now, the \c MeanSplitKDTree class has only three template parameters and can be used as a \c TreeType policy class in various mlpack algorithms. Many types of trees in mlpack have more than three template parameters and rely on template typedefs to provide simplified \c TreeType interfaces. @section treetype_api The TreeType API As a result of the definition of *space tree* in the previous section, a simplified API presents itself quite easily. However, more complex functionality is often necessary in mlpack, so this leads to more functions being necessary for a class to satisfy the \c TreeType policy. Combining this with the template parameters required for trees given in the previous section gives us the complete API required for a class implementing the \c TreeType policy. Below is the minimal set of functions required with minor documentation for each function. (More extensive documentation and explanation is given afterwards.) @code // The three template parameters will be supplied by the user, and are detailed // in the previous section. template class ExampleTree { public: ////////////////////// //// Constructors //// ////////////////////// // This batch constructor does not modify the dataset, and builds the entire // tree using a default-constructed MetricType. ExampleTree(const MatType& data); // This batch constructor does not modify the dataset, and builds the entire // tree using the given MetricType. ExampleTree(const MatType& data, MetricType& metric); // Initialize the tree from a given boost::serialization archive. SFINAE (the // second argument) is necessary to ensure that the archive is loading, not // saving. template ExampleTree( Archive& ar, const typename boost::enable_if::type* = 0); // Release any resources held by the tree. ~ExampleTree(); // ///////////////////////// // // // Basic functionality // // // ///////////////////////// // // Get the dataset that the tree is built on. const MatType& Dataset(); // Get the metric that the tree is built with. MetricType& Metric(); // Get/modify the StatisticType for this node. StatisticType& Stat(); // Return the parent of the node, or NULL if this is the root. ExampleTree* Parent(); // Return the number of children held by the node. size_t NumChildren(); // Return the i'th child held by the node. ExampleTree& Child(const size_t i); // Return the number of points held in the node. size_t NumPoints(); // Return the index of the i'th point held in the node. size_t Point(const size_t i); // Return the number of descendant nodes of this node. size_t NumDescendantNodes(); // Return the i'th descendant node of this node. ExampleTree& DescendantNode(const size_t i); // Return the number of descendant points of this node. size_t NumDescendants(); // Return the index of the i'th descendant point of this node. size_t Descendant(const size_t i); // Store the center of the bounding region of the node in the given vector. void Center(arma::vec& center); // ///////////////////////////////////////////////// // // // More complex distance-related functionality // // // ///////////////////////////////////////////////// // // Return the distance between the center of this node and the center of // its parent. double ParentDistance(); // Return an upper bound on the furthest possible distance between the // center of the node and any point held in the node. double FurthestPointDistance(); // Return an upper bound on the furthest possible distance between the // center of the node and any descendant point of the node. double FurthestDescendantDistance(); // Return a lower bound on the minimum distance between the center and any // edge of the node's bounding shape. double MinimumBoundDistance(); // Return a lower bound on the minimum distance between the given point and // the node. template double MinDistance(VecType& point); // Return a lower bound on the minimum distance between the given node and // this node. double MinDistance(ExampleTree& otherNode); // Return an upper bound on the maximum distance between the given point and // the node. template double MaxDistance(VecType& point); // Return an upper bound on the maximum distance between the given node and // this node. double MaxDistance(ExampleTree& otherNode); // Return the combined results of MinDistance() and MaxDistance(). template math::Range RangeDistance(VecType& point); // Return the combined results of MinDistance() and MaxDistance(). math::Range RangeDistance(ExampleTree& otherNode); // //////////////////////////////////// // // // Serialization (loading/saving) // // // //////////////////////////////////// // // Return a string representation of the tree. std::string ToString() const; // Serialize the tree (load from the given archive / save to the given // archive, depending on its type). template void Serialize(Archive& ar, const unsigned int version); protected: // A default constructor; only meant to be used by boost::serialization. This // must be protected so that boost::serialization will work; it does not need // to return a valid tree. ExampleTree(); // Friend access must be given for the default constructor. friend class boost::serialization::access; }; @endcode Although this is significantly more complex than the four-item definition of *space tree* might suggest, it turns out many of these methods are not difficult to implement for most reasonable tree types. It is also important to realize that this is a *minimum* API; you may implement more complex tree types at your leisure (and you may include more template parameters too, though you will have to use template typedefs to provide versions with three parameters; see \ref treetype_template_params "the previous section"). Before diving into the detailed documentation for each function, let us consider a few important points about the implications of this API: - **Trees are not default-constructible** and should not (in general) provide a default constructor. This helps prevent invalid trees. In general, any instantiated mlpack object should be valid and ready to use---and a tree built on no points is not valid or ready to use. - **Trees only need to provide batch constructors.** Although many tree types do have algorithms for incremental insertions, in mlpack this is not required because the tree-based algorithms that mlpack implements generally assume fully-built, non-modifiable trees. For this purpose, batch construction is perfectly sufficient. (It's also worth pointing out that for some types of trees, like kd-trees, the cost of a handful of insertions often outweighs the cost of completely rebuilding the tree.) - **Trees must provide a number of distance bounding functions.** The utility of trees generally stems from the ability to place quick bounds on distance-related quantities. For instance, if all the descendant points of a node are bounded by a ball of radius \f$\lambda\f$ and the center of the node is a point \f$c\f$, then the minimum distance between some point \f$p\f$ and any descendant point of the node is equal to the distance between \f$p\f$ and \f$c\f$ minus the radius \f$\lambda\f$: \f$d(p, c) - \lambda\f$. This is a fast calculation, and (usually) provides a decent bound on the minimum distance between \f$p\f$ and any descendant point of the node. - **Trees need to be able to be serialized.** mlpack uses the boost::serialization library for saving and loading objects. Trees---which can be a part of machine learning models---therefore must have the ability to be saved and loaded. Making this all work requires a protected constructor (part of the API) and generally makes it impossible to hold references instead of pointers internally, because if a tree is loaded from a file then it must own the dataset it is built on and the metric it uses (this also means that a destructor must exist for freeing these resources). Now, we can consider each part of the API more rigorously. @section treetype_rigorous Rigorous API documentation This section is divided into five parts: - \ref treetype_rigorous_template - \ref treetype_rigorous_constructor - \ref treetype_rigorous_basic - \ref treetype_rigorous_complex - \ref treetype_rigorous_serialization @subsection treetype_rigorous_template Template parameters \ref treetype_template_param "An earlier section" discussed the three different template parameters that are required by the \c TreeType policy. The \ref metrics "MetricType policy" provides one method that will be useful for tree building and other operations: @code // This function is required by the MetricType policy. // Evaluate the metric between two points (which may be of different types). template double Evaluate(const VecTypeA& a, const VecTypeB& b); @endcode Note that this method is not necessarily static, so a \c MetricType object should be held internally and its \c Evaluate() method should be called whenever the distance between two points is required. **It is generally a bad idea to hardcode any distance calculation in your tree.** This will make the tree unable to generalize to arbitrary metrics. If your tree must depend on certain assumptions holding about the metric (i.e. the metric is a Euclidean metric), then make that clear in the documentation of the tree, so users do not try to use the tree with an inappropriate metric. The second template parameter, \c StatisticType, is for auxiliary information that is required by certain algorithms. For instance, consider an algorithm which repeatedly uses the variance of the descendant points of a node. It might be tempting to add a \c Variance() method to the required \c TreeType API, but this quickly leads to code bloat (after all, the API already has quite enough functions as it is). Instead, it is better to create a \c StatisticType class which provides the \c Variance() method, and then call \c Stat().Variance() when the variance is required. This also holds true for cached data members. Each node should have its own instance of a \c StatisticType class. The \c StatisticType must provide the following constructor: @code // This constructor is required by the StatisticType policy. template StatisticType(TreeType& node); @endcode This constructor should be called with \c (*this) after the node is constructed (usually, this ends up being the last line in the constructor of a node). The last template parameter is the \c MatType parameter. This is generally \c arma::mat or \c arma::sp_mat, but could be any Armadillo type, including matrices that hold data points of different precisions (such as \c float or even \c int). It generally suffices to write \c MatType assuming that \c arma::mat will be used, since the vast majority of the time this will be what is used. @subsection treetype_rigorous_constructor Constructors and destructors The \c TreeType API requires at least three constructors. Technically, it does not *require* a destructor, but almost certainly your tree class will be doing some memory management internally and should have one (though not always). The first two constructors are variations of the same idea: @code // This batch constructor does not modify the dataset, and builds the entire // tree using a default-constructed MetricType. ExampleTree(const MatType& data); // This batch constructor does not modify the dataset, and builds the entire // tree using the given MetricType. ExampleTree(const MatType& data, MetricType& metric); @endcode All that is required here is that a constructor is available that takes a dataset and optionally an instantiated metric. If no metric is provided, then it should be assumed that the \c MetricType class has a default constructor and a default-constructed metric should be used. The constructor *must* return a valid, fully-constructed, ready-to-use tree that satisfies the definition of *space tree* that was \ref whatistree "given earlier". It is possible to implement both these constructors as one by using \c boost::optional. The third constructor requires the tree to be initializable from a \c boost::serialization archive: @code // Initialize the tree from a given boost::serialization archive. SFINAE (the // second argument) is necessary to ensure that the archive is loading, not // saving. template ExampleTree( Archive& ar, const typename boost::enable_if::type* = 0); @endcode This has implications on how the tree must be stored. In this case, the dataset is *not yet loaded* and therefore the tree **may be required to have ownership of the data matrix**. This means that realistically the most reasonable way to represent the data matrix internally in a tree class is not with a reference but instead with a pointer. If this is true, then a destructor will be required: @code // Release any resources held by the tree. ~ExampleTree(); @endcode and, if the data matrix is represented internally with a pointer, this destructor will need to release the memory for the data matrix (in the case that the tree was created via \c boost::serialization ). Note that these constructors are not necessarily the only constructors that a \c TreeType implementation can provide. One important example of when more constructors are useful is when the tree rearranges points internally; this might be desired for the sake of speed or memory optimization. But to do this with the required constructors would necessarily incur a copy of the data matrix, because the user will pass a \c "const MatType&". One alternate solution is to provide a constructor which takes an rvalue reference to a \c MatType: @code template ExampleTree(MatType&& data); @endcode (and another overload that takes an instantiated metric), and then the user can use \c std::move() to build the tree without copying the data matrix, although the data matrix will be modified: @code ExampleTree exTree(std::move(dataset)); @endcode It is, of course, possible to add even more constructors if desired. @subsection treetype_rigorous_basic Basic tree functionality The basic functionality of a class implementing the \c TreeType API is quite straightforward and intuitive. @code // Get the dataset that the tree is built on. const MatType& Dataset(); @endcode This should return a \c const reference to the dataset the tree is built on. The fact that this function is required essentially means that each node in the tree must store a pointer to the dataset (this is not the only option, but it is the most obvious option). @code // Get the metric that the tree is built with. MetricType& Metric(); @endcode Each node must also store an instantiated metric or a pointer to one (note that this is required even for metrics that have no state and have a \c static \c Evaluate() function). @code // Get/modify the StatisticType for this node. StatisticType& Stat(); @endcode As discussed earlier, each node must hold a \c StatisticType; this is accessible through the \c Stat() function. @code // Return the parent of the node, or NULL if this is the root. ExampleTree* Parent(); // Return the number of children held by the node. size_t NumChildren(); // Return the i'th child held by the node. ExampleTree& Child(const size_t i); // Return the number of points held in the node. size_t NumPoints(); // Return the index of the i'th point held in the node. size_t Point(const size_t i); // Return the number of descendant nodes of this node. size_t NumDescendantNodes(); // Return the i'th descendant node of this node. ExampleTree& DescendantNode(const size_t i); // Return the number of descendant points of this node. size_t NumDescendants(); // Return the index of the i'th descendant point of this node. size_t Descendant(const size_t i); @endcode These functions are all fairly self-explanatory. Most algorithms will use the \c Parent(), \c Children(), \c NumChildren(), \c Point(), and \c NumPoints() functions, so care should be taken when implementing those functions to ensure they will be efficient. Note that \c Point() and \c Descendant() should return indices of points, so the actual points can be accessed by calling \c "Dataset().col(Point(i))" for some index \c i (or something similar). An important note about the \c Descendant() function is that each descendant point should be unique. So if a node holds the point with index 6 and it has one child that holds the points with indices 6 and 7, then \c NumDescendants() should return 2, not 3. The ordering in which the descendants are returned can be arbitrary; so, \c Descendant(0) can return 6 \b or 7, and \c Descendant(1) should return the other index. @code // Store the center of the bounding region of the node in the given vector. void Center(arma::vec& center); @endcode The last function, \c Center(), should calculate the center of the bounding shape and store it in the given vector. So, for instance, if the tree is a ball tree, then the center is simply the center of the ball. Algorithm writers would be wise to try and avoid the use of \c Center() if possible, since it will necessarily cost a copy of a vector. @subsection treetype_rigorous_complex Complex tree functionality and bounds A node in a tree should also be able to calculate various distance-related bounds; these are particularly useful in tree-based algorithms. Note that any of these bounds does not necessarily need to be maximally tight; generally it is more important that each bound can be easily calculated. Details on each bounding function that the \c TreeType API requires are given below. @code // Return the distance between the center of this node and the center of // its parent. double ParentDistance(); @endcode Remember that each node corresponds to some region in the space that the dataset lies in. For most tree types this shape is often something geometrically simple: a ball, a cone, a hyperrectangle, a slice, or something similar. The \c ParentDistance() function should return the distance between the center of this node's region and the center of the parent node's region. In practice this bound is often used in dual-tree (or single-tree) algorithms to place an easy \c MinDistance() (or \c MaxDistance() ) bound for a child node; the parent's \c MinDistance() (or \c MaxDistance() ) function is called and then adjusted with \c ParentDistance() to provide a possibly loose but efficient bound on what the result of \c MinDistance() (or \c MaxDistance() ) would be with the child. @code // Return an upper bound on the furthest possible distance between the // center of the node and any point held in the node. double FurthestPointDistance(); // Return an upper bound on the furthest possible distance between the // center of the node and any descendant point of the node. double FurthestDescendantDistance(); @endcode It is often very useful to be able to bound the radius of a node, which is effectively what \c FurthestDescendantDistance() does. Often it is easiest to simply calculate and cache the furthest descendant distance at tree construction time. Some trees, such as the cover tree, are able to give guarantees that the points held in the node will necessarily be closer than the descendant points; therefore, the \c FurthestPointDistance() function is also useful. It is permissible to simply have \c FurthestPointDistance() return the result of \c FurthestDescendantDistance(), and that will still be a valid bound, but depending on the type of tree it may be possible to have \c FurthestPointDistance() return a tighter bound. @code // Return a lower bound on the minimum distance between the center and any // edge of the node's bounding shape. double MinimumBoundDistance(); @endcode This is, admittedly, a somewhat complex and weird quantity. It is one of the less important bounding functions, so it is valid to simply return 0... The bound is a bound on the minimum distance between the center of the node and any edge of the shape that bounds all of the descendants of the node. So, if the bounding shape is a ball (as in a ball tree or a cover tree), then \c MinimumBoundDistance() should just return the radius of the ball. If the bounding shape is a hypercube (as in a generalized octree), then \c MinimumBoundDistance() should return the side length divided by two. If the bounding shape is a hyperrectangle (as in a kd-tree or a spill tree), then \c MinimumBoundDistance() should return half the side length of the hyperrectangle's smallest side. @code // Return a lower bound on the minimum distance between the given point and // the node. template double MinDistance(VecType& point); // Return a lower bound on the minimum distance between the given node and // this node. double MinDistance(ExampleTree& otherNode); // Return an upper bound on the maximum distance between the given point and // the node. template double MaxDistance(VecType& point); // Return an upper bound on the maximum distance between the given node and // this node. double MaxDistance(ExampleTree& otherNode); // Return the combined results of MinDistance() and MaxDistance(). template math::Range RangeDistance(VecType& point); // Return the combined results of MinDistance() and MaxDistance(). math::Range RangeDistance(ExampleTree& otherNode); @endcode These six functions are almost without a doubt the most important functionality of a tree. Therefore, it is preferable that these methods be implemented as efficiently as possible, as they may potentially be called many millions of times in a tree-based algorithm. It is also preferable that these bounds be as tight as possible. In tree-based algorithms, these are used for pruning away work, and tighter bounds mean that more pruning is possible. Of these six functions, there are only really two bounds that are desired here: the *minimum distance* between a node and an object, and the *maximum distance* between a node and an object. The object may be either a vector (usually \c arma::vec ) or another tree node. Consider the first case, where the object is a vector. The result of \c MinDistance() needs to be less than or equal to the true minimum distance, which could be calculated as below: @code // We assume that we have a vector 'vec', and a tree node 'node'. double trueMinDist = DBL_MAX; for (size_t i = 0; i < node.NumDescendants(); ++i) { const double dist = node.Metric().Evaluate(vec, node.Dataset().col(node.Descendant(i))); if (dist < trueMinDist) trueMinDist = dist; } // At the end of the loop, trueMinDist will hold the true minimum distance // between 'vec' and any descendant point of 'node'. @endcode Often the bounding shape of a node will allow a quick calculation that will make a reasonable bound. For instance, if the node's bounding shape is a ball with radius \c r and center \c ctr, the calculation is simply \c "(node.Metric().Evaluate(vec, ctr) - r)". Usually a good \c MinDistance() or \c MaxDistance() function will make only one call to the \c Evaluate() function of the metric. The \c RangeDistance() function allows a way for both bounds to be calculated at once. It is possible to implement this as a call to \c MinDistance() followed by a call to \c MaxDistance(), but this may incur more metric \c Evaluate() calls than necessary. Often calculating both bounds at once can be more efficient and can be done with fewer \c Evaluate() calls than calling both \c MinDistance() and \c MaxDistance(). @subsection treetype_rigorous_serialization Serialization The last two public functions that the \c TreeType API requires are related to serialization and printing. @code // Return a string representation of the tree. std::string ToString() const; @endcode There are few restrictions on the precise way that the \c ToString() function should operate, but generally it should behave similarly to the \c ToString() function in other mlpack methods. Generally, a user will call \c ToString() when they want to inspect the object and see what it looks like. For a tree, printing the entire tree may be way more information than the user was expecting, so it may be a better option to print either only the node itself or the node plus one or two levels of children. @code // Serialize the tree (load from the given archive / save to the given // archive, depending on its type). template void Serialize(Archive& ar, const unsigned int version); protected: // A default constructor; only meant to be used by boost::serialization. This // must be protected so that boost::serialization will work; it does not need // to return a valid tree. ExampleTree(); // Friend access must be given for the default constructor. friend class boost::serialization::access; @endcode On the other hand, the specifics of the functionality required for the \c Serialize() function are somewhat more difficult. The \c Serialize() function will be called either when a tree is being saved to disk or loaded from disk. The \c boost::serialization documentation is fairly comprehensive, but when writing a \c Serialize() method for mlpack trees you should use \c data::CreateNVP() instead of \c BOOST_SERIALIZATION_NVP(). This is because mlpack classes implement \c Serialize() instead of \c serialize() in order to conform to the mlpack style guidelines, and making this work requires some interesting shim code, which is hidden inside of \c data::CreateNVP(). It may be useful to look at other \c Serialize() methods contained in other mlpack classes as an example. An important note is that it is very difficult to use references with \c boost::serialization, because \c Serialize() may be called at any time during the object's lifetime, and references cannot be re-seated. In general this will require the use of pointers, which then require manual memory management. Therefore, be careful that \c Serialize() (and the tree's destructor) properly handle memory management! @section treetype_traits The TreeTraits trait class Some tree-based algorithms can specialize if the tree fulfills certain conditions. For instance, if the regions represented by two sibling nodes cannot overlap, an algorithm may be able to perform a simpler computation. Based on this reasoning, the \c TreeTraits trait class (much like the mlpack::kernel::KernelTraits class) exists in order to allow a tree to specify (via a \c const \c static \c bool) when these types of conditions are satisfied. **Note that a TreeTraits class is not required,** but may be helpful. The \c TreeTraits trait class is a template class that takes a \c TreeType as a parameter, and exposes \c const \c static \c bool values that depend on the tree. Setting these values is achieved by specialization. The code below shows the default \c TreeTraits values (these are the values that will be used if no specialization is provided for a given \c TreeType). @code template class TreeTraits { public: // This is true if the subspaces represented by the children of a node can // overlap. static const bool HasOverlappingChildren = true; // This is true if Point(0) is the centroid of the node. static const bool FirstPointIsCentroid = false; // This is true if the points contained in the first child of a node // (Child(0)) are also contained in that node. static const bool HasSelfChildren = false; // This is true if the tree rearranges points in the dataset when it is built. static const bool RearrangesDataset = false; // This is true if the tree always has only two children. static const bool BinaryTree = false; }; @endcode An example specialization for the \ref mlpack::tree::KDTree class is given below. Note that \ref mlpack::tree::KDTree is itself a template class (like every class satisfying the \c TreeType policy), so we are specializing to a template parameter. @code template template<> class TreeTraits> { public: // The regions represented by the two children of a node may not overlap. static const bool HasOverlappingChildren = false; // There is no guarantee that the first point of a node is the centroid. static const bool FirstPointIsCentroid = false; // Points are not contained at multiple levels (only at the leaves). static const bool HasSelfChildren = false; // Points are rearranged during the building of the tree. static const bool RearrangesDataset = true; // The tree is always binary. static const bool BinaryTree = true; }; @endcode Currently, the traits available are each of the five detailed above. For more information, see the \ref mlpack::tree::TreeTraits documentation. @section treetype_more A list of trees in mlpack and more information mlpack contains several ready-to-use implementations of trees that satisfy the TreeType policy API: - mlpack::tree::KDTree - mlpack::tree::MeanSplitKDTree - mlpack::tree::BallTree - mlpack::tree::MeanSplitBallTree - mlpack::tree::RTree - mlpack::tree::RStarTree - mlpack::tree::StandardCoverTree Often, these are template typedefs of more flexible tree classes: - mlpack::tree::BinarySpaceTree -- binary trees, such as the KD-tree and ball tree - mlpack::tree::RectangleTree -- the R tree and variants - mlpack::tree::CoverTree -- the cover tree and variants */ mlpack-2.2.5/doc/tutorials/000077500000000000000000000000001315013601400155545ustar00rootroot00000000000000mlpack-2.2.5/doc/tutorials/README.md000066400000000000000000000053011315013601400170320ustar00rootroot00000000000000 ## Tutorials Tutorials for mlpack can be found [here : mlpack tutorials](http://www.mlpack.org/tutorials.html). ### General mlpack tutorials These tutorials introduce the basic concepts of working with mlpack, aimed at developers who want to use and contribute to mlpack but are not sure where to start. * [Building mlpack from source](http://www.mlpack.org/docs/mlpack-git/doxygen.php?doc=build.html) * [File Formats in mlpack](http://www.mlpack.org/docs/mlpack-git/doxygen.php?doc=formatdoc.html) * [Matrices in mlpack](http://www.mlpack.org/docs/mlpack-git/doxygen.php?doc=matrices.html) * [mlpack input and output](http://www.mlpack.org/docs/mlpack-git/doxygen.php?doc=iodoc.html) * [mlpack timers](http://www.mlpack.org/docs/mlpack-git/doxygen.php?doc=timer.html) * [Simple sample mlpack programs](http://www.mlpack.org/docs/mlpack-git/doxygen.php?doc=sample.html) ### Method-specific tutorials These tutorials introduce the various methods mlpack offers, aimed at users who want to get started quickly. These tutorials start with simple examples and progress to complex, extensible uses. * [NeighborSearch tutorial (mlpack_knn / mlpack_kfn)](http://www.mlpack.org/docs/mlpack-git/doxygen.php?doc=nstutorial.html) * [LinearRegression tutorial (mlpack_linear_regression)](http://www.mlpack.org/docs/mlpack-git/doxygen.php?doc=lrtutorial.html) * [RangeSearch tutorial (mlpack_range_search)](http://www.mlpack.org/docs/mlpack-git/doxygen.php?doc=rstutorial.html) * [Density Estimation Trees tutorial (mlpack_det)](http://www.mlpack.org/docs/mlpack-git/doxygen.php?doc=dettutorial.html) * [K-Means tutorial (mlpack_kmeans)](http://www.mlpack.org/docs/mlpack-git/doxygen.php?doc=kmtutorial.html) * [FastMKS tutorial (mlpack_fastmks)](http://www.mlpack.org/docs/mlpack-git/doxygen.php?doc=fmkstutorial.html) * [Euclidean Minimum Spanning Trees tutorial (mlpack_emst)](http://www.mlpack.org/docs/mlpack-git/doxygen.php?doc=emst_tutorial.html) * [Alternating Matrix Factorization Tutorial](http://www.mlpack.org/docs/mlpack-git/doxygen.php?doc=amftutorial.html) * [Collaborative Filtering Tutorial](http://www.mlpack.org/docs/mlpack-git/doxygen.php?doc=cftutorial.html) ### Policy Class Documentation mlpack uses templates to achieve its genericity and flexibility. Some of the template types used by mlpack are common across multiple machine learning algorithms. The links below provide documentation for some of these common types. * [The MetricType policy in mlpack](http://www.mlpack.org/docs/mlpack-git/doxygen.php?doc=metrics.html) * [The KernelType policy in mlpack](http://www.mlpack.org/docs/mlpack-git/doxygen.php?doc=kernels.html) * [The TreeType policy in mlpack](http://www.mlpack.org/docs/mlpack-git/doxygen.php?doc=trees.html) mlpack-2.2.5/doc/tutorials/amf/000077500000000000000000000000001315013601400163175ustar00rootroot00000000000000mlpack-2.2.5/doc/tutorials/amf/amf.txt000066400000000000000000000201661315013601400176300ustar00rootroot00000000000000/*! @file amf.txt @author Sumedh Ghaisas @brief Tutorial for how to use the AMF class. @page amftutorial Alternating Matrix Factorization tutorial. @section intro_amftut Introduction Alternating Matrix Factorization Alternating matrix factorization decomposes matrx V in the form \f$ V \approx WH \f$ where W is called the basis matrix and H is called the encoding matrix. V is taken to be of size n x m and the obtained W is n x r and H is r x m. The size r is called the rank of the factorization. Factorization is done by alternately calculating W and H respectively while holding the other matrix constant. \b mlpack provides: - a \ref amf_amftut "simple C++ interface" to perform Alternating Matrix Factorization @section toc_amftut Table of Contents A list of all the sections this tutorial contains. - \ref intro_amftut - \ref toc_amftut - \ref amf_amftut - \ref t_policy_amftut - \ref init_rule_amftut - \ref update_rule_amftut - \ref nmf_amftut - \ref svd_amftut - \ref further_doc_amftut @section amf_amftut The 'AMF' class The AMF class is templatized with 3 parameters; the first contains the policy used to determine when the algorithm has converged; the second contains the initialization rule for the W and H matrix; the last contains the update rule to be used during each iteration. This templatization allows the user to try various update rules, initialization rules, and termination policies (including ones not supplied with mlpack) for factorization. The class provides the following method that performs factorization @code template double Apply(const MatType& V, const size_t r, arma::mat& W, arma::mat& H); @endcode @subsection t_policy_amftut Using different termination policies The AMF implementation comes with different termination policies to support many implemented algorithms. Every termination policy implements the following method which returns the status of convergence. @code bool IsConverged(arma::mat& W, arma::mat& H) @endcode list of all the termination policies - \ref mlpack::amf::SimpleResidueTermination - \ref mlpack::amf::SimpleToleranceTermination - \ref mlpack::amf::ValidationRMSETermination In SimpleResidueTermination, termination decision depends on two factors, value of residue and number of iteration. If the current value of residue drops below the threshold or the number of iterations goes beyond the threshold, positive termination signal is passed to AMF. In SimpleToleranceTermination, termination criterion is met when increase in residue value drops below the given tolerance. To accommodate spikes, certain number of successive residue drops are accepted. Secondary termination criterion terminates algorithm when iteration count goes beyond the threshold. ValidationRMSETermination divids the data into 2 sets, training set and validation set. Entries of validation set are nullifed in the input matrix. Termination criterion is met when increase in validation set RMSe value drops below the given tolerance. To accommodate spikes certain number of successive validation RMSE drops are accepted. This upper imit on successive drops can be adjusted with reverseStepCount. Secondary termination criterion terminates algorithm when iteration count goes above the threshold. Though this termination policy is better measure of convergence than the above 2 termination policies, it may cause a overhead in performance. On the other hand \ref mlpack::amf::CompleteIncrementalTermination "CompleteIncrementalTermination" and \ref mlpack::amf::IncompleteIncrementalTermination are just wrapper classes for other termination policies. These policies are used when AMF is applied with \ref mlpack::amf::SVDCompleteIncrementalLearning "SVDCompleteIncrementalLearning" and \ref mlpack::amf::SVDIncompleteIncrementalLearning "SVDIncompleteIncrementalLearning" respectively. @subsection init_rule_amftut Using different initialization policies The AMF class comes with 2 initialization policies - \ref mlpack::amf::RandomInitialization "RandomInitialization" - \ref mlpack::amf::RandomAcolInitialization "RandomAcolInitialization" RandomInitialization initializes matrices W and H with random uniform distribution while RandomAcolInitialization initializes the W matrix by averaging p randomly chosen columns of V. In case of RandomAcolInitialization, p is a template parameter. To implement their own initialization policy, users need to define the following function in their class. @code template inline static void Initialize(const MatType& V, const size_t r, arma::mat& W, arma::mat& H) @endcode @subsection update_rule_amftut Using different update rules AMF supports following update rules - \ref mlpack::amf::NMFALSUpdate "AMFALSUpdate" - \ref mlpack::amf::NMFMultiplicativeDistanceUpdate "NMFMultiplicativeDistanceUpdate" - \ref mlpack::amf::NMFMultiplicativeDivergenceUpdate "NMFMultiplicativeDivergenceUpdate" - \ref mlpack::amf::SVDBatchLearning "SVDBatchLearning" - \ref mlpack::amf::SVDIncompleteIncrementalLearning "SVDIncompleteIncrementalLearning" - \ref mlpack::amf::SVDCompleteIncrementalLearning "SVDCompleteIncrementalLearning" Non-Negative Matrix factorization can be achieved with NMFALSUpdate, NMFMultiplicativeDivergenceUpdate or NMFMultiplicativeDivergenceUpdate. NMFALSUpdate implements simple Alternating Least Square optimization while the other rules implement algorithms given in paper 'Algorithms for Non-negative Matrix Factorization'. The remaining update rules perform Singular Value Decomposition of matrix V. This SVD factorization is optimized for the use by Collaborative Filtering. This use of SVD factorizers for Collaborative Filtering is described in the paper 'A Guide to singular Value Decomposition' by Chih-Chao Ma. For further details about the algorithms refer to the respective class documentation. @subsection nmf_amftut Using Non-Negative Matrix Factorization with AMF The use of AMF for Non-Negative Matrix factorization is simple. The AMF module defines \ref mlpack::amf::NMFALSFactorizer "NMFALSFactorizer" which can be used directly without knowing the internal structure of AMF. For example - @code #include #include #include using namespace std; using namespace arma; using namespace mlpack::amf; int main() { NMFALSFactorizer nmf; mat W, H; mat V = randu(100, 100); double residue = nmf.Apply(V, W, H); return 1; } @endcode NMFALSFactorizer uses SimpleResidueTermination which is most preferred with Non-Negative Matrix factorizers. Initialization of W and H in NMFALSFactorizer is random. The Apply function returns the residue obtained by comparing the constructed matrix W * H with the original matrix V. @subsection svd_amftut Using Singular Value Decomposition with AMF AMF implementation supports following SVD factorizers - \ref mlpack::amf::SVDBatchFactorizer "SVDBatchFactorizer" - \ref mlpack::amf::SparseSVDBatchFactorizer "SparseSVDBatchFactorizer" - \ref mlpack::amf::SVDIncompleteIncrementalFactorizer "SVDIncompleteIncrementalFactorizer" - \ref mlpack::amf::SparseSVDIncompleteIncrementalFactorizer "SparseSVDIncompleteIncrementalFactorizer" - \ref mlpack::amf::SVDCompleteIncrementalFactorizer "SVDCompleteIncrementalFactorizer" - \ref mlpack::amf::SparseSVDCompleteIncrementalFactorizer "SparseSVDCompleteIncrementalFactorizer" The sparse version of factorizers can be used with Armadillo's sparse matrix support. These specialized implementations boost runtime performance when the matrix to be factorized is relatively sparse. @code #include #include using namespace std; using namespace arma; using namespace mlpack::amf; int main() { sp_mat V = randu(100,100); mat W, H; SparseSVDBatchFactorizer svd; double residue = svd.Apply(V, W, H); } @endcode @section further_doc_amftut Further documentation For further documentation on the AMF class, consult the \ref mlpack::amf::AMF "complete API documentation". */ mlpack-2.2.5/doc/tutorials/approx_kfn/000077500000000000000000000000001315013601400177235ustar00rootroot00000000000000mlpack-2.2.5/doc/tutorials/approx_kfn/approx_kfn.txt000066400000000000000000001050021315013601400226310ustar00rootroot00000000000000/*! @file approx_kfn.txt @author Ryan Curtin @brief Tutorial for how to use approximate furthest neighbor search in mlpack. @page akfntutorial Approximate furthest neighbor search (mlpack_approx_kfn) tutorial @section intro_akfntut Introduction \b mlpack implements multiple strategies for approximate furthest neighbor search in its \c mlpack_approx_kfn and \c mlpack_kfn programs (each program corresponds to different techniques). This tutorial discusses what problems these algorithms solve and how to use each of the techniques that \b mlpack implements. \b mlpack implements five approximate furthest neighbor search algorithms: - brute-force search (in \c mlpack_kfn) - single-tree search (in \c mlpack_kfn) - dual-tree search (in \c mlpack_kfn) - query-dependent approximate furthest neighbor (QDAFN) (in \c mlpack_approx_kfn) - DrusillaSelect (in \c mlpack_approx_kfn) These methods are described in the following papers: @code @inproceedings{curtin2013tree, title={Tree-Independent Dual-Tree Algorithms}, author={Curtin, Ryan R. and March, William B. and Ram, Parikshit and Anderson, David V. and Gray, Alexander G. and Isbell Jr., Charles L.}, booktitle={Proceedings of The 30th International Conference on Machine Learning (ICML '13)}, pages={1435--1443}, year={2013} } @endcode @code @incollection{pagh2015approximate, title={Approximate furthest neighbor in high dimensions}, author={Pagh, Rasmus and Silvestri, Francesco and Sivertsen, Johan and Skala, Matthew}, booktitle={Similarity Search and Applications}, pages={3--14}, year={2015}, publisher={Springer} } @endcode @code @incollection{curtin2016fast, title={Fast approximate furthest neighbors with data-dependent candidate selection}, author={Curtin, Ryan R., and Gardner, Andrew B.}, booktitle={Similarity Search and Applications}, pages={221--235}, year={2016}, publisher={Springer} } @endcode The problem of furthest neighbor search is simple, and is the opposite of the much-more-studied nearest neighbor search problem. Given a set of reference points \f$R\f$ (the set in which we are searching), and a set of query points \f$Q\f$ (the set of points for which we want the furthest neighbor), our goal is to return the \f$k\f$ furthest neighbors for each query point in \f$Q\f$: \f[ \operatorname{k-argmax}_{p_r \in R} d(p_q, p_r). \f] In order to solve this problem, \b mlpack provides a number of interfaces. - two \ref cli_akfntut "simple command-line executables" to calculate approximate furthest neighbors - a simple \ref cpp_qdafn_akfntut "C++ class for QDAFN" - a simple \ref cpp_ds_akfntut "C++ class for DrusillaSelect" - a simple \ref cpp_kfn_akfntut "C++ class for tree-based and brute-force" search @section toc_akfntut Table of Contents A list of all the sections this tutorial contains. - \ref intro_akfntut - \ref toc_akfntut - \ref which_akfntut - \ref cli_akfntut - \ref cli_ex1_akfntut - \ref cli_ex2_akfntut - \ref cli_ex3_akfntut - \ref cli_ex4_akfntut - \ref cli_ex5_akfntut - \ref cli_ex6_akfntut - \ref cli_ex7_akfntut - \ref cli_ex8_akfntut - \ref cli_final_akfntut - \ref cpp_ds_akfntut - \ref cpp_ex1_ds_akfntut - \ref cpp_ex2_ds_akfntut - \ref cpp_ex3_ds_akfntut - \ref cpp_ex4_ds_akfntut - \ref cpp_ex5_ds_akfntut - \ref cpp_qdafn_akfntut - \ref cpp_ex1_qdafn_akfntut - \ref cpp_ex2_qdafn_akfntut - \ref cpp_ex3_qdafn_akfntut - \ref cpp_ex4_qdafn_akfntut - \ref cpp_ex5_qdafn_akfntut - \ref cpp_ns_akfntut - \ref cpp_ex1_ns_akfntut - \ref cpp_ex2_ns_akfntut - \ref cpp_ex3_ns_akfntut - \ref cpp_ex4_ns_akfntut - \ref further_doc_akfntut @section which_akfntut Which algorithm should be used? There are three algorithms for furthest neighbor search that \b mlpack implements, and each is suited to a different setting. Below is some basic guidance on what should be used. Note that the question of "which algorithm should be used" is a very difficult question to answer, so the guidance below is just that---guidance---and may not be right for a particular problem. - \c DrusillaSelect is very fast and will perform extremely well for datasets with outliers or datasets with structure (like low-dimensional datasets embedded in high dimensions) - \c QDAFN is a random approach and therefore should be well-suited for datasets with little to no structure - The tree-based approaches (the \c KFN class and the \c mlpack_kfn program) is best suited for low-dimensional datasets, and is most effective when very small levels of approximation are desired, or when exact results are desired. - Dual-tree search is most useful when the query set is large and structured (like for all-furthest-neighbor search). - Single-tree search is more useful when the query set is small. @section cli_akfntut Command-line 'mlpack_approx_kfn' and 'mlpack_kfn' \b mlpack provides two command-line programs to solve approximate furthest neighbor search: - \c mlpack_approx_kfn, for the QDAFN and DrusillaSelect approaches - \c mlpack_kfn, for exact and approximate tree-based approaches These two programs allow a large number of algorithms to be used to find approximate furthest neighbors. Note that the \c mlpack_kfn program is also documented by the \ref cli_nstut section of the \ref nstutorial page, as it shares options with the \c mlpack_knn program. Below are several examples of how the \c mlpack_approx_kfn and \c mlpack_kfn programs might be used. The first examples focus on the \c mlpack_approx_kfn program, and the last few show how \c mlpack_kfn can be used to produce approximate results. @subsection cli_ex1_akfntut Calculate 5 furthest neighbors with default options Here we have a query dataset \c queries.csv and a reference dataset \c refs.csv and we wish to find the 5 furthest neighbors of every query point in the reference dataset. We may do that with the \c mlpack_approx_kfn algorithm, using the default of the \c DrusillaSelect algorithm with default parameters. @code $ mlpack_approx_kfn -q queries.csv -r refs.csv -v -k 5 -n n.csv -d d.csv [INFO ] Loading 'refs.csv' as CSV data. Size is 3 x 1000. [INFO ] Building DrusillaSelect model... [INFO ] Model built. [INFO ] Loading 'queries.csv' as CSV data. Size is 3 x 1000. [INFO ] Searching for 5 furthest neighbors with DrusillaSelect... [INFO ] Search complete. [INFO ] Saving CSV data to 'n.csv'. [INFO ] Saving CSV data to 'd.csv'. [INFO ] [INFO ] Execution parameters: [INFO ] algorithm: ds [INFO ] calculate_error: false [INFO ] distances_file: d.csv [INFO ] exact_distances_file: "" [INFO ] help: false [INFO ] info: "" [INFO ] input_model_file: "" [INFO ] k: 5 [INFO ] neighbors_file: n.csv [INFO ] num_projections: 5 [INFO ] num_tables: 5 [INFO ] output_model_file: "" [INFO ] query_file: queries.csv [INFO ] reference_file: refs.csv [INFO ] verbose: true [INFO ] version: false [INFO ] [INFO ] Program timers: [INFO ] drusilla_select_construct: 0.000342s [INFO ] drusilla_select_search: 0.000780s [INFO ] loading_data: 0.010689s [INFO ] saving_data: 0.005585s [INFO ] total_time: 0.018592s @endcode Convenient timers for parts of the program operation are printed. The results, saved in \c n.csv and \c d.csv, indicate the furthest neighbors and distances for each query point. The row of the output file indicates the query point that the results are for. The neighbors are listed from furthest to nearest; so, the 4th element in the 3rd row of \c d.csv indicates the distance between the 3rd query point in \c queries.csv and its approximate 4th furthest neighbor. Similarly, the same element in \c n.csv indicates the index of the approximate 4th furthest neighbor (with respect to \c refs.csv). @subsection cli_ex2_akfntut Specifying algorithm parameters for DrusillaSelect The \c -p (\c --num_projections) and \c -t (\c --num_tables) parameters affect the running of the \c DrusillaSelect algorithm and the QDAFN algorithm. Specifically, larger values for each of these parameters will search more possible candidate furthest neighbors and produce better results (at the cost of runtime). More details on how each of these parameters works is available in the original papers, the \b mlpack source, or the documentation given by \c --help. In the example below, we run \c DrusillaSelect to find 4 furthest neighbors using 10 tables and 2 points in each table. In this case we have chosen to omit the \c -n \c n.csv option, meaning that only the output candidate distances will be written to \c d.csv. @code $ mlpack_approx_kfn -q queries.csv -r refs.csv -v -k 4 -n n.csv -d d.csv -t 10 -p 2 [INFO ] Loading 'refs.csv' as CSV data. Size is 3 x 1000. [INFO ] Building DrusillaSelect model... [INFO ] Model built. [INFO ] Loading 'queries.csv' as CSV data. Size is 3 x 1000. [INFO ] Searching for 4 furthest neighbors with DrusillaSelect... [INFO ] Search complete. [INFO ] Saving CSV data to 'n.csv'. [INFO ] Saving CSV data to 'd.csv'. [INFO ] [INFO ] Execution parameters: [INFO ] algorithm: ds [INFO ] calculate_error: false [INFO ] distances_file: d.csv [INFO ] exact_distances_file: "" [INFO ] help: false [INFO ] info: "" [INFO ] input_model_file: "" [INFO ] k: 4 [INFO ] neighbors_file: n.csv [INFO ] num_projections: 2 [INFO ] num_tables: 10 [INFO ] output_model_file: "" [INFO ] query_file: queries.csv [INFO ] reference_file: refs.csv [INFO ] verbose: true [INFO ] version: false [INFO ] [INFO ] Program timers: [INFO ] drusilla_select_construct: 0.000645s [INFO ] drusilla_select_search: 0.000551s [INFO ] loading_data: 0.008518s [INFO ] saving_data: 0.003734s [INFO ] total_time: 0.014019s @endcode @subsection cli_ex3_akfntut Using QDAFN instead of DrusillaSelect The algorithm to be used for approximate furthest neighbor search can be specified with the \c --algorithm (\c -a) option to the \c mlpack_approx_kfn program. Below, we use the QDAFN algorithm instead of the default. We leave the \c -p and \c -t options at their defaults---even though QDAFN often requires more tables and points to get the same quality of results. @code $ mlpack_approx_kfn -q queries.csv -r refs.csv -v -k 3 -n n.csv -d d.csv -a qdafn [INFO ] Loading 'refs.csv' as CSV data. Size is 3 x 1000. [INFO ] Building QDAFN model... [INFO ] Model built. [INFO ] Loading 'queries.csv' as CSV data. Size is 3 x 1000. [INFO ] Searching for 3 furthest neighbors with QDAFN... [INFO ] Search complete. [INFO ] Saving CSV data to 'n.csv'. [INFO ] Saving CSV data to 'd.csv'. [INFO ] [INFO ] Execution parameters: [INFO ] algorithm: qdafn [INFO ] calculate_error: false [INFO ] distances_file: d.csv [INFO ] exact_distances_file: "" [INFO ] help: false [INFO ] info: "" [INFO ] input_model_file: "" [INFO ] k: 3 [INFO ] neighbors_file: n.csv [INFO ] num_projections: 5 [INFO ] num_tables: 5 [INFO ] output_model_file: "" [INFO ] query_file: queries.csv [INFO ] reference_file: refs.csv [INFO ] verbose: true [INFO ] version: false [INFO ] [INFO ] Program timers: [INFO ] loading_data: 0.008380s [INFO ] qdafn_construct: 0.003399s [INFO ] qdafn_search: 0.000886s [INFO ] saving_data: 0.002253s [INFO ] total_time: 0.015465s @endcode @subsection cli_ex4_akfntut Printing results quality with exact distances The \c mlpack_approx_kfn program can calculate the quality of the results if the \c --calculate_error (\c -e) flag is specified. Below we use the program with its default parameters and calculate the error, which is displayed in the output. The error is only calculated for the furthest neighbor, not all k; therefore, in this example we have set \c -k to \c 1. @code $ mlpack_approx_kfn -q queries.csv -r refs.csv -v -k 1 -e -q -n n.csv [INFO ] Loading 'refs.csv' as CSV data. Size is 3 x 1000. [INFO ] Building DrusillaSelect model... [INFO ] Model built. [INFO ] Loading 'queries.csv' as CSV data. Size is 3 x 1000. [INFO ] Searching for 1 furthest neighbors with DrusillaSelect... [INFO ] Search complete. [INFO ] Calculating exact distances... [INFO ] 28891 node combinations were scored. [INFO ] 37735 base cases were calculated. [INFO ] Calculation complete. [INFO ] Average error: 1.08417. [INFO ] Maximum error: 1.28712. [INFO ] Minimum error: 1. [INFO ] [INFO ] Execution parameters: [INFO ] algorithm: ds [INFO ] calculate_error: true [INFO ] distances_file: "" [INFO ] exact_distances_file: "" [INFO ] help: false [INFO ] info: "" [INFO ] input_model_file: "" [INFO ] k: 3 [INFO ] neighbors_file: "" [INFO ] num_projections: 5 [INFO ] num_tables: 5 [INFO ] output_model_file: "" [INFO ] query_file: queries.csv [INFO ] reference_file: refs.csv [INFO ] verbose: true [INFO ] version: false [INFO ] [INFO ] Program timers: [INFO ] computing_neighbors: 0.001476s [INFO ] drusilla_select_construct: 0.000309s [INFO ] drusilla_select_search: 0.000495s [INFO ] loading_data: 0.008462s [INFO ] total_time: 0.011670s [INFO ] tree_building: 0.000202s @endcode Note that the output includes three lines indicating the error: @code [INFO ] Average error: 1.08417. [INFO ] Maximum error: 1.28712. [INFO ] Minimum error: 1. @endcode In this case, a minimum error of 1 indicates an exact result, and over the entire query set the algorithm has returned a furthest neighbor candidate with maximum error 1.28712. @subsection cli_ex5_akfntut Using cached exact distances for quality results However, for large datasets, calculating the error may take a long time, because the exact furthest neighbors must be calculated. Therefore, if the exact furthest neighbor distances are already known, they may be passed in with the \c --exact_distances_file (\c -x) option in order to avoid the calculation. In the example below, we assume \c exact.csv contains the exact furthest neighbor distances. We run the \c qdafn algorithm in this example. Note that the \c -e option must be specified for the \c -x option have any effect. @code $ mlpack_approx_kfn -q queries.csv -r refs.csv -k 1 -e -x exact.csv -n n.csv -v -a qdafn [INFO ] Loading 'refs.csv' as CSV data. Size is 3 x 1000. [INFO ] Building QDAFN model... [INFO ] Model built. [INFO ] Loading 'queries.csv' as CSV data. Size is 3 x 1000. [INFO ] Searching for 1 furthest neighbors with QDAFN... [INFO ] Search complete. [INFO ] Loading 'exact.csv' as raw ASCII formatted data. Size is 1 x 1000. [INFO ] Average error: 1.06914. [INFO ] Maximum error: 1.67407. [INFO ] Minimum error: 1. [INFO ] Saving CSV data to 'n.csv'. [INFO ] [INFO ] Execution parameters: [INFO ] algorithm: qdafn [INFO ] calculate_error: true [INFO ] distances_file: "" [INFO ] exact_distances_file: exact.csv [INFO ] help: false [INFO ] info: "" [INFO ] input_model_file: "" [INFO ] k: 1 [INFO ] neighbors_file: n.csv [INFO ] num_projections: 5 [INFO ] num_tables: 5 [INFO ] output_model_file: "" [INFO ] query_file: queries.csv [INFO ] reference_file: refs.csv [INFO ] verbose: true [INFO ] version: false [INFO ] [INFO ] Program timers: [INFO ] loading_data: 0.010348s [INFO ] qdafn_construct: 0.000318s [INFO ] qdafn_search: 0.000793s [INFO ] saving_data: 0.000259s [INFO ] total_time: 0.012254s @endcode @subsection cli_ex6_akfntut Using tree-based approximation with mlpack_kfn The \c mlpack_kfn algorithm allows specifying a desired approximation level with the \c --epsilon (\c -e) option. The parameter must be greater than or equal to 0 and less than 1. A setting of 0 indicates exact search. The example below runs dual-tree furthest neighbor search (the default algorithm) with the approximation parameter set to 0.5. @code $ mlpack_kfn -q queries.csv -r refs.csv -v -k 3 -e 0.5 -n n.csv -d d.csv [INFO ] Loading 'refs.csv' as CSV data. Size is 3 x 1000. [INFO ] Loaded reference data from 'refs.csv' (3x1000). [INFO ] Building reference tree... [INFO ] Tree built. [INFO ] Loading 'queries.csv' as CSV data. Size is 3 x 1000. [INFO ] Loaded query data from 'queries.csv' (3x1000). [INFO ] Searching for 3 neighbors with dual-tree kd-tree search... [INFO ] 1611 node combinations were scored. [INFO ] 13938 base cases were calculated. [INFO ] 1611 node combinations were scored. [INFO ] 13938 base cases were calculated. [INFO ] Search complete. [INFO ] Saving CSV data to 'n.csv'. [INFO ] Saving CSV data to 'd.csv'. [INFO ] [INFO ] Execution parameters: [INFO ] algorithm: dual_tree [INFO ] distances_file: d.csv [INFO ] epsilon: 0.5 [INFO ] help: false [INFO ] info: "" [INFO ] input_model_file: "" [INFO ] k: 3 [INFO ] leaf_size: 20 [INFO ] naive: false [INFO ] neighbors_file: n.csv [INFO ] output_model_file: "" [INFO ] percentage: 1 [INFO ] query_file: queries.csv [INFO ] random_basis: false [INFO ] reference_file: refs.csv [INFO ] seed: 0 [INFO ] single_mode: false [INFO ] tree_type: kd [INFO ] true_distances_file: "" [INFO ] true_neighbors_file: "" [INFO ] verbose: true [INFO ] version: false [INFO ] [INFO ] Program timers: [INFO ] computing_neighbors: 0.000442s [INFO ] loading_data: 0.008060s [INFO ] saving_data: 0.002850s [INFO ] total_time: 0.012667s [INFO ] tree_building: 0.000251s @endcode Note that the format of the output files \c d.csv and \c n.csv are the same as for \c mlpack_approx_kfn. @subsection cli_ex7_akfntut Different algorithms with 'mlpack_kfn' The \c mlpack_kfn program offers a large number of different algorithms that can be used. The \c --algorithm (\c -a) may be used to specify three main different algorithm types: \c naive (brute-force search), \c single_tree (single-tree search), \c dual_tree (dual-tree search, the default), and \c greedy ("defeatist" greedy search, which goes to one leaf node of the tree then terminates). The example below uses single-tree search to find approximate neighbors with epsilon set to 0.1. @code mlpack_kfn -q queries.csv -r refs.csv -v -k 3 -e 0.1 -n n.csv -d d.csv -a single_tree [INFO ] Loading 'refs.csv' as CSV data. Size is 3 x 1000. [INFO ] Loaded reference data from 'refs.csv' (3x1000). [INFO ] Building reference tree... [INFO ] Tree built. [INFO ] Loading 'queries.csv' as CSV data. Size is 3 x 1000. [INFO ] Loaded query data from 'queries.csv' (3x1000). [INFO ] Searching for 3 neighbors with single-tree kd-tree search... [INFO ] 13240 node combinations were scored. [INFO ] 15924 base cases were calculated. [INFO ] Search complete. [INFO ] Saving CSV data to 'n.csv'. [INFO ] Saving CSV data to 'd.csv'. [INFO ] [INFO ] Execution parameters: [INFO ] algorithm: single_tree [INFO ] distances_file: d.csv [INFO ] epsilon: 0.1 [INFO ] help: false [INFO ] info: "" [INFO ] input_model_file: "" [INFO ] k: 3 [INFO ] leaf_size: 20 [INFO ] naive: false [INFO ] neighbors_file: n.csv [INFO ] output_model_file: "" [INFO ] percentage: 1 [INFO ] query_file: queries.csv [INFO ] random_basis: false [INFO ] reference_file: refs.csv [INFO ] seed: 0 [INFO ] single_mode: false [INFO ] tree_type: kd [INFO ] true_distances_file: "" [INFO ] true_neighbors_file: "" [INFO ] verbose: true [INFO ] version: false [INFO ] [INFO ] Program timers: [INFO ] computing_neighbors: 0.000850s [INFO ] loading_data: 0.007858s [INFO ] saving_data: 0.003445s [INFO ] total_time: 0.013084s [INFO ] tree_building: 0.000250s @endcode @subsection cli_ex8_akfntut Saving a model for later use The \c mlpack_approx_kfn and \c mlpack_kfn programs both allow models to be saved and loaded for future use. The \c --output_model_file (\c -M) option allows specifying where to save a model, and the \c --input_model_file (\c -m) option allows a model to be loaded instead of trained. So, if you specify \c --input_model_file then you do not need to specify \c --reference_file (\c -r), \c --num_projections (\c -p), or \c --num_tables (\c -t). The example below saves a model with 10 projections and 5 tables. Note that neither \c --query_file (\c -q) nor \c -k are specified; this run only builds the model and saves it to \c model.bin. @code $ mlpack_approx_kfn -r refs.csv -t 5 -p 10 -v -M model.bin [INFO ] Loading 'refs.csv' as CSV data. Size is 3 x 1000. [INFO ] Building DrusillaSelect model... [INFO ] Model built. [INFO ] [INFO ] Execution parameters: [INFO ] algorithm: ds [INFO ] calculate_error: false [INFO ] distances_file: "" [INFO ] exact_distances_file: "" [INFO ] help: false [INFO ] info: "" [INFO ] input_model_file: "" [INFO ] k: 0 [INFO ] neighbors_file: "" [INFO ] num_projections: 10 [INFO ] num_tables: 5 [INFO ] output_model_file: model.bin [INFO ] query_file: "" [INFO ] reference_file: refs.csv [INFO ] verbose: true [INFO ] version: false [INFO ] [INFO ] Program timers: [INFO ] drusilla_select_construct: 0.000321s [INFO ] loading_data: 0.004700s [INFO ] total_time: 0.007320s @endcode Now, with the model saved, we can run approximate furthest neighbor search on a query set using the saved model: @code $ mlpack_approx_kfn -m model.bin -q queries.csv -k 3 -d d.csv -n n.csv -v [INFO ] Loading 'queries.csv' as CSV data. Size is 3 x 1000. [INFO ] Searching for 3 furthest neighbors with DrusillaSelect... [INFO ] Search complete. [INFO ] Saving CSV data to 'n.csv'. [INFO ] Saving CSV data to 'd.csv'. [INFO ] [INFO ] Execution parameters: [INFO ] algorithm: ds [INFO ] calculate_error: false [INFO ] distances_file: d.csv [INFO ] exact_distances_file: "" [INFO ] help: false [INFO ] info: "" [INFO ] input_model_file: model.bin [INFO ] k: 3 [INFO ] neighbors_file: n.csv [INFO ] num_projections: 5 [INFO ] num_tables: 5 [INFO ] output_model_file: "" [INFO ] query_file: queries.csv [INFO ] reference_file: "" [INFO ] verbose: true [INFO ] version: false [INFO ] [INFO ] Program timers: [INFO ] drusilla_select_search: 0.000878s [INFO ] loading_data: 0.004599s [INFO ] saving_data: 0.003006s [INFO ] total_time: 0.009234s @endcode These options work in the same way for both the \c mlpack_approx_kfn and \c mlpack_kfn programs. @subsection cli_final_akfntut Final command-line program notes Both the \c mlpack_kfn and \c mlpack_approx_kfn programs contain numerous options not fully documented in these short examples. You can run each program with the \c --help (\c -h) option for more information. @section cpp_ds_akfntut DrusillaSelect C++ class \b mlpack provides a simple \c DrusillaSelect C++ class that can be used inside of C++ programs to perform approximate furthest neighbor search. The class has only one template parameter---\c MatType---which specifies the type of matrix to be use. That means the class can be used with either dense data (of type \c arma::mat) or sparse data (of type \c arma::sp_mat). The following examples show simple usage of this class. @subsection cpp_ex1_ds_akfntut Approximate furthest neighbors with defaults The code below builds a \c DrusillaSelect model with default options on the matrix \c dataset, then queries for the approximate furthest neighbor of every point in the \c queries matrix. @code #include using namespace mlpack::neighbor; // The reference dataset. extern arma::mat dataset; // The query set. extern arma::mat queries; // Construct the model with defaults. DrusillaSelect<> ds(dataset); // Query the model, putting output into the following two matrices. arma::mat distances; arma::Mat neighbors; ds.Search(queries, 1, neighbors, distances); @endcode At the end of this code, both the \c distances and \c neighbors matrices will have number of columns equal to the number of columns in the \c queries matrix. So, each column of the \c distances and \c neighbors matrices are the distances or neighbors of the corresponding column in the \c queries matrix. @subsection cpp_ex2_ds_akfntut Custom numbers of tables and projections The following example constructs a DrusillaSelect model with 10 tables and 5 projections. Once that is done it performs the same task as the previous example. @code #include using namespace mlpack::neighbor; // The reference dataset. extern arma::mat dataset; // The query set. extern arma::mat queries; // Construct the model with custom parameters. DrusillaSelect<> ds(dataset, 10, 5); // Query the model, putting output into the following two matrices. arma::mat distances; arma::Mat neighbors; ds.Search(queries, 1, neighbors, distances); @endcode @subsection cpp_ex3_ds_akfntut Accessing the candidate set The \c DrusillaSelect algorithm merely scans the reference set and extracts a number of points that will be queried in a brute-force fashion when the \c Search() method is called. We can access this set with the \c CandidateSet() method. The code below prints the fifth point of the candidate set. @code #include using namespace mlpack::neighbor; // The reference dataset. extern arma::mat dataset; // Construct the model with custom parameters. DrusillaSelect<> ds(dataset, 10, 5); // Print the fifth point of the candidate set. std::cout << ds.CandidateSet().col(4).t(); @endcode @subsection cpp_ex4_ds_akfntut Retraining on a new reference set It is possible to retrain a \c DrusillaSelect model with new parameters or with a new reference set. This is functionally equivalent to creating a new model. The example code below creates a first \c DrusillaSelect model using 3 tables and 10 projections, and then retrains this with the same reference set using 10 tables and 3 projections. @code #include using namespace mlpack::neighbor; // The reference dataset. extern arma::mat dataset; // Construct the model with initial parameters. DrusillaSelect<> ds(dataset, 3, 10); // Now retrain with different parameters. ds.Train(dataset, 10, 3); @endcode @subsection cpp_ex5_ds_akfntut Running on sparse data We can set the template parameter for \c DrusillaSelect to \c arma::sp_mat in order to perform furthest neighbor search on sparse data. This code below creates a \c DrusillaSelect model using 4 tables and 6 projections with sparse input data, then searches for 3 approximate furthest neighbors. @code #include using namespace mlpack::neighbor; // The reference dataset. extern arma::sp_mat dataset; // The query dataset. extern arma::sp_mat querySet; // Construct the model on sparse data. DrusillaSelect ds(dataset, 4, 6); // Search on query data. arma::Mat neighbors; arma::mat distances; ds.Search(querySet, 3, neighbors, distances); @endcode @section cpp_qdafn_akfntut QDAFN C++ class \b mlpack also provides a standalone simple \c QDAFN class for furthest neighbor search. The API for this class is virtually identical to the \c DrusillaSelect class, and also has one template parameter to specify the type of matrix to be used (dense or sparse or other). The following subsections demonstrate usage of the \c QDAFN class in the same way as the previous section's examples for \c DrusillaSelect. @subsection cpp_ex1_qdafn_akfntut Approximate furthest neighbors with defaults The code below builds a \c QDAFN model with default options on the matrix \c dataset, then queries for the approximate furthest neighbor of every point in the \c queries matrix. @code #include using namespace mlpack::neighbor; // The reference dataset. extern arma::mat dataset; // The query set. extern arma::mat queries; // Construct the model with defaults. QDAFN<> qd(dataset); // Query the model, putting output into the following two matrices. arma::mat distances; arma::Mat neighbors; qd.Search(queries, 1, neighbors, distances); @endcode At the end of this code, both the \c distances and \c neighbors matrices will have number of columns equal to the number of columns in the \c queries matrix. So, each column of the \c distances and \c neighbors matrices are the distances or neighbors of the corresponding column in the \c queries matrix. @subsection cpp_ex2_qdafn_akfntut Custom numbers of tables and projections The following example constructs a QDAFN model with 15 tables and 30 projections. Once that is done it performs the same task as the previous example. @code #include using namespace mlpack::neighbor; // The reference dataset. extern arma::mat dataset; // The query set. extern arma::mat queries; // Construct the model with custom parameters. QDAFN<> qdafn(dataset, 15, 30); // Query the model, putting output into the following two matrices. arma::mat distances; arma::Mat neighbors; qdafn.Search(queries, 1, neighbors, distances); @endcode @subsection cpp_ex3_qdafn_akfntut Accessing the candidate set The \c QDAFN algorithm scans the reference set, extracting points that have been projected onto random directions. Each random direction corresponds to a single table. The \c QDAFN class stores these points as a vector of matrices, which can be accessed with the \c CandidateSet() method. The code below prints the fifth point of the candidate set of the third table. @code #include using namespace mlpack::neighbor; // The reference dataset. extern arma::mat dataset; // Construct the model with custom parameters. QDAFN<> qdafn(dataset, 10, 5); // Print the fifth point of the candidate set. std::cout << ds.CandidateSet(2).col(4).t(); @endcode @subsection cpp_ex4_qdafn_akfntut Retraining on a new reference set It is possible to retrain a \c QDAFN model with new parameters or with a new reference set. This is functionally equivalent to creating a new model. The example code below creates a first \c QDAFN model using 10 tables and 40 projections, and then retrains this with the same reference set using 15 tables and 25 projections. @code #include using namespace mlpack::neighbor; // The reference dataset. extern arma::mat dataset; // Construct the model with initial parameters. QDAFN<> qdafn(dataset, 3, 10); // Now retrain with different parameters. qdafn.Train(dataset, 10, 3); @endcode @subsection cpp_ex5_qdafn_akfntut Running on sparse data We can set the template parameter for \c QDAFN to \c arma::sp_mat in order to perform furthest neighbor search on sparse data. This code below creates a \c QDAFN model using 20 tables and 60 projections with sparse input data, then searches for 3 approximate furthest neighbors. @code #include using namespace mlpack::neighbor; // The reference dataset. extern arma::sp_mat dataset; // The query dataset. extern arma::sp_mat querySet; // Construct the model on sparse data. QDAFN qdafn(dataset, 20, 60); // Search on query data. arma::Mat neighbors; arma::mat distances; qdafn.Search(querySet, 3, neighbors, distances); @endcode @section cpp_ns_akfntut KFN C++ class The extensive \c NeighborSearch class also provides a way to search for approximate furthest neighbors using a different, tree-based technique. For full documentation on this class, see the \ref nstutorial "NeighborSearch tutorial". The \c KFN class is a convenient typedef of the \c NeighborSearch class that can be used to perform the furthest neighbors task with kd-trees. In the following subsections, the \c KFN class is used in short code examples. @subsection cpp_ex1_ns_akfntut Simple furthest neighbors example The \c KFN class has construction semantics similar to \c DrusillaSelect and \c QDAFN. The example below constructs a \c KFN object (which will build the tree on the reference set), but note that the third parameter to the constructor allows us to specify our desired level of approximation. In this example we choose epsilon = 0.05. Then, the code searches for 3 approximate furthest neighbors. @code #include using namespace mlpack::neighbor; // The reference dataset. extern arma::mat dataset; // The query set. extern arma::mat querySet; // Construct the object, performing the default dual-tree search with // approximation level epsilon = 0.05. KFN kfn(dataset, KFN::DUAL_TREE_MODE, 0.05); // Search for approximate furthest neighbors. arma::Mat neighbors; arma::mat distances; kfn.Search(querySet, 3, neighbors, distances); @endcode @subsection cpp_ex2_ns_akfntut Retraining on a new reference set Like the \c QDAFN and \c DrusillaSelect classes, the \c KFN class is capable of retraining on a new reference set. The code below demonstrates this. @code #include using namespace mlpack::neighbor; // The original reference set we train on. extern arma::mat dataset; // The new reference set we retrain on. extern arma::mat newDataset; // Construct the object with approximation level 0.1. KFN kfn(dataset, DUAL_TREE_MODE, 0.1); // Retrain on the new reference set. kfn.Train(newDataset); @endcode @subsection cpp_ex3_ns_akfntut Searching in single-tree mode The particular mode to be used in search can be specified in the constructor. In this example, we use single-tree search (as opposed to the default of dual-tree search). @code #include using namespace mlpack::neighbor; // The reference set. extern arma::mat dataset; // The query set. extern arma::mat querySet; // Construct the object with approximation level 0.25 and in single tree search // mode. KFN kfn(dataset, SINGLE_TREE_MODE, 0.25); // Search for 5 approximate furthest neighbors. arma::Mat neighbors; arma::mat distances; kfn.Search(querySet, 5, neighbors, distances); @endcode @subsection cpp_ex4_ns_akfntut Searching in brute-force mode If desired, brute-force search ("naive search") can be used to find the furthest neighbors; however, the result will not be approximate---it will be exact (since every possibility will be considered). The code below performs exact furthest neighbor search by using the \c KFN class in brute-force mode. @code #include using namespace mlpack::neighbor; // The reference set. extern arma::mat dataset; // The query set. extern arma::mat querySet; // Construct the object in brute-force mode. We can leave the approximation // parameter to its default (0) since brute-force will provide exact results. KFN kfn(dataset, NAIVE_MODE); // Perform the search for 2 furthest neighbors. arma::Mat neighbors; arma::mat distances; kfn.Search(querySet, 2, neighbors, distances); @endcode @section further_doc_akfntut Further documentation For further documentation on the approximate furthest neighbor facilities offered by \b mlpack, consult the following documentation: - \ref nstutorial - \ref mlpack::neighbor::QDAFN "QDAFN class documentation" - \ref mlpack::neighbor::DrusillaSelect "DrusillaSelect class documentation" - \ref mlpack::neighbor::NeighborSearch "NeighborSearch class documentation" */ mlpack-2.2.5/doc/tutorials/cf/000077500000000000000000000000001315013601400161445ustar00rootroot00000000000000mlpack-2.2.5/doc/tutorials/cf/cf.txt000066400000000000000000000415631315013601400173060ustar00rootroot00000000000000/*! @file cf.txt @author Ryan Curtin @brief Tutorial for how to use the CF class and program. @page cftutorial Collaborative filtering tutorial @section intro_cftut Introduction Collaborative filtering is an increasingly popular approach for recommender systems. A typical formulation of the problem is as follows: there are \f$n\f$ users and \f$m\f$ items, and each user has rated some of the items. We want to provide each user with a recommendation for an item they have not rated yet, which they are likely to rate highly. In another formulation, we may want to predict a user's rating of an item. This type of problem has been considered extensively, especially in the context of the Netflix prize. The winning approach for the Netflix prize was a collaborative filtering approach which utilized matrix decomposition. More information on their approach can be found in the following paper: @code @article{koren2009matrix, title={Matrix factorization techniques for recommender systems}, author={Koren, Yehuda and Bell, Robert and Volinsky, Chris}, journal={Computer}, number={8}, pages={30--37}, year={2009}, publisher={IEEE} } @endcode The key to this approach is that the data is represented as an incomplete matrix \f$V \in \Re^{n \times m}\f$, where \f$V_{ij}\f$ represents user \f$i\f$'s rating of item \f$j\f$, if that rating exists. The task, then, is to complete the entries of the matrix. In the matrix factorization framework, the matrix \f$V\f$ is assumed to be low-rank and decomposed into components as \f$V \approx WH\f$ according to some heuristic. In order to solve problems of this form, \b mlpack provides: - a \ref cli_cftut "simple command-line interface" to perform collaborative filtering - a \ref cf_cftut "simple C++ interface" to perform collaborative filtering - an \ref cpp_cftut "extensible C++ interface" for implementing new collaborative filtering techniques @section toc_cftut Table of Contents - \ref intro_cftut - \ref toc_cftut - \ref cli_cftut - \ref cli_input_format - \ref ex1_cf_cli - \ref ex1a_cf_cli - \ref ex1b_cf_cli - \ref ex2_cf_cli - \ref ex3_cf_cli - \ref ex4_cf_cli - \ref ex5_cf_cli - \ref cf_cftut - \ref ex1_cf_cpp - \ref ex2_cf_cpp - \ref ex3_cf_cpp - \ref ex4_cf_cpp - \ref cpp_cftut - \ref further_doc_cftut @section cli_cftut The 'mlpack_cf' program \b mlpack provides a command-line program, \c mlpack_cf, which is used to perform collaborative filtering on a given dataset. It can provide neighborhood-based recommendations for users. The algorithm used for matrix factorization is configurable, and the parameters of each algorithm are also configurable. The following examples detail usage of the \c mlpack_cf program. Note that you can get documentation on all the possible parameters by typing: @code $ mlpack_cf --help @endcode @subsection cli_input_format Input format for mlpack_cf The input file for the \c mlpack_cf program is specified with the \c --training_file or \c -t option. This file is a coordinate-format sparse matrix, similar to the Matrix Market (MM) format. The first coordinate is the user id; the second coordinate is the item id; and the third coordinate is the rating. So, for instance, a dataset with 3 users and 2 items, and ratings between 1 and 5, might look like the following: @code $ cat dataset.csv 0, 1, 4 1, 0, 5 1, 1, 1 2, 0, 2 @endcode This dataset has four ratings: user 0 has rated item 1 with a rating of 4; user 1 has rated item 0 with a rating of 5; user 1 has rated item 1 with a rating of 1; and user 2 has rated item 0 with a rating of 2. Note that the user and item indices start from 0, and the identifiers must be numeric indices, and not names. The type does not necessarily need to be a csv; it can be any supported storage format, assuming that it is a coordinate-format file in the format specified above. For more information on mlpack file formats, see the documentation for mlpack::data::Load(). @subsection ex1_cf_cli mlpack_cf with default parameters In this example, we have a dataset from MovieLens, and we want to use \c mlpack_cf with the default parameters, which will provide 5 recommendations for each user, and we wish to save the results in the file \c recommendations.csv. Assuming that our dataset is in the file \c MovieLens-100k.csv and it is in the correct format, we may use the \c mlpack_cf executable as below: @code $ mlpack_cf -t MovieLens-100k.csv -v -o recommendations.csv @endcode The \c -v option provides verbose output, and may be omitted if desired. Now, for each user, we have recommendations in \c recommendations.csv: @code $ head recommendations.csv 317,422,482,356,495 116,120,180,6,327 312,49,116,99,236 312,116,99,236,285 55,190,317,194,63 171,209,180,175,95 208,0,94,87,57 99,97,0,203,172 257,99,180,287,0 171,203,172,209,88 @endcode So, for user 0, the top 5 recommended items that user 0 has not rated are items 317, 422, 482, 356, and 495. For user 5, the recommendations are on the sixth line: 171, 209, 180, 175, 95. The \c mlpack_cf program can be built into a larger recommendation framework, with a preprocessing step that can turn user information and item information into numeric IDs, and a postprocessing step that can map these numeric IDs back to the original information. @subsection ex1a_cf_cli Saving mlpack_cf models The \c mlpack_cf program is able to save a particular model for later loading. Saving a model can be done with the \c --output_model_file or \c -M option. The example below builds a CF model on the \c MovieLens-100k.csv dataset, and then saves the model to the file \c cf-model.xml for later usage. @code $ mlpack_cf -t MovieLens-100k.csv -M cf-model.xml -v @endcode The models can also be saved as \c .bin or \c .txt; the \c .xml format provides a human-inspectable format (though the models tend to be quite complex and may be difficult to read). These models can then be re-used to provide specific recommendations for certain users, or other tasks. @subsection ex1b_cf_cli Loading mlpack_cf models Instead of training a model, the \c mlpack_cf model can also load a model to provide recommendations, using the \c --input_model_file or \c -m option. For instance, the example below will load the model from \c cf-model.xml and then generate 3 recommendations for each user in the dataset, saving the results to \c recommendations.csv. @code $ mlpack_cf -m cf-model.xml -v -o recommendations.csv @endcode @subsection ex2_cf_cli Specifying rank of mlpack_cf decomposition By default, the matrix factorizations in the \c mlpack_cf program decompose the data matrix into two matrices \f$W\f$ and \f$H\f$ with rank two. Often, this default parameter is not correct, and it makes sense to use a higher-rank decomposition. The rank can be specified with the \c --rank or \c -R parameter: @code $ mlpack_cf -t MovieLens-100k.csv -R 10 -v @endcode In the example above, the data matrix will be decomposed into two matrices of rank 10. In general, higher-rank decompositions will take longer, but will give more accurate predictions. @subsection ex3_cf_cli mlpack_cf with single-user recommendation In the previous two examples, the output file \c recommendations.csv contains one line for each user in the input dataset. But often, recommendations may only be desired for a few users. In that case, we can assemble a file of query users, with one user per line: @code $ cat query.csv 0 17 31 @endcode Now, if we run the \c mlpack_cf executable with this query file, we will obtain recommendations for users 0, 17, and 31: @code $ mlpack_cf -i MovieLens-100k.csv -R 10 -q query.csv -o recommendations.csv $ cat recommendations.csv 474,356,317,432,473 510,172,204,483,182 0,120,236,257,126 @endcode @subsection ex4_cf_cli mlpack_cf with non-default factorizer The \c --algorithm (or \c -a ) parameter controls the factorizer that is used. Several options are available: - \c 'NMF': non-negative matrix factorization; see mlpack::amf::AMF<> - \c 'SVDBatch': SVD batch factorization - \c 'SVDIncompleteIncremental': incomplete incremental SVD - \c 'SVDCompleteIncremental': complete incremental SVD - \c 'RegSVD': regularized SVD; see mlpack::svd::RegularizedSVD The default factorizer is \c 'NMF'. The example below uses the 'RegSVD' factorizer: @code $ mlpack_cf -i MovieLens-100k.csv -R 10 -q query.csv -a RegSVD -o recommendations.csv @endcode @subsection ex5_cf_cli mlpack_cf with non-default neighborhood size The \c mlpack_cf program produces recommendations using a neighborhood: similar users in the query user's neighborhood will be averaged to produce predictions. The size of this neighborhood is controlled with the \c --neighborhood (or \c -n ) option. An example using a neighborhood with 10 similar users is below: @code $ mlpack_cf -i MovieLens-100k.csv -R 10 -q query.csv -a RegSVD -n 10 @endcode @section cf_cftut The 'CF' class The \c CF class in \b mlpack offers a simple, flexible API for performing collaborative filtering for recommender systems within C++ applications. In the constructor, the \c CF class takes a coordinate-list dataset and decomposes the matrix according to the specified \c FactorizerType template parameter. Then, the \c GetRecommendations() function may be called to obtain recommendations for certain users (or all users), and the \c W() and \c H() matrices may be accessed to perform other computations. The data which the \c CF constructor takes should be an Armadillo matrix (\c arma::mat ) with three rows. The first row corresponds to users; the second row corresponds to items; the third column corresponds to the rating. This is a coordinate list format, like the format the \c cf executable takes. The data::Load() function can be used to load data. The following examples detail a few ways that the \c CF class can be used. @subsection ex1_cf_cpp CF with default parameters This example constructs the \c CF object with default parameters and obtains recommendations for each user, storing the output in the \c recommendations matrix. @code #include using namespace mlpack::cf; // The coordinate list of ratings that we have. extern arma::mat data; // The size of the neighborhood to use to get recommendations. extern size_t neighborhood; // The rank of the decomposition. extern size_t rank; // Build the CF object and perform the decomposition. // The constructor takes a default-constructed factorizer, which, by default, // is of type amf::NMFALSFactorizer. CF cf(data, amf::NMFALSFactorizer(), neighborhood, rank); // Store the results in this object. arma::Mat recommendations; // Get 5 recommendations for all users. cf.GetRecommendations(5, recommendations); @endcode @subsection ex2_cf_cpp CF with other factorizers \b mlpack provides a number of existing factorizers which can be used in place of the default mlpack::amf::NMFALSFactorizer (which is non-negative matrix factorization with alternating least squares update rules). These include: - mlpack::amf::SVDBatchFactorizer - mlpack::amf::SVDCompleteIncrementalFactorizer - mlpack::amf::SVDIncompleteIncrementalFactorizer - mlpack::amf::NMFALSFactorizer - mlpack::svd::RegularizedSVD - mlpack::svd::QUIC_SVD The amf::AMF<> class has many other possibilities than those listed here; it is a framework for alternating matrix factorization techniques. See the \ref amf::AMF<> "class documentation" or \ref amftutorial "tutorial on AMF" for more information. The use of another factorizer is straightforward; the example from the previous section is adapted below to use svd::RegularizedSVD: @code #include #include using namespace mlpack::cf; // The coordinate list of ratings that we have. extern arma::mat data; // The size of the neighborhood to use to get recommendations. extern size_t neighborhood; // The rank of the decomposition. extern size_t rank; // Build the CF object and perform the decomposition. CF cf(data, svd::RegularizedSVD(), neighborhood, rank); // Store the results in this object. arma::Mat recommendations; // Get 5 recommendations for all users. cf.GetRecommendations(5, recommendations); @endcode @subsection ex3_cf_cpp Predicting individual user/item ratings The \c Predict() method can be used to predict the rating of an item by a certain user, using the same neighborhood-based approach as the \c GetRecommendations() function or the \c cf executable. Below is an example of the use of that function. The example below will obtain the predicted rating for item 50 by user 12. @code #include using namespace mlpack::cf; // The coordinate list of ratings that we have. extern arma::mat data; // The size of the neighborhood to use to get recommendations. extern size_t neighborhood; // The rank of the decomposition. extern size_t rank; // Build the CF object and perform the decomposition. // The constructor takes a default-constructed factorizer, which, by default, // is of type amf::NMFALSFactorizer. CF cf(data, amf::NMFALSFactorizer(), neighborhood, rank); const double prediction = cf.Predict(12, 50); // User 12, item 50. @endcode @subsection ex4_cf_cpp Other operations with the W and H matrices Sometimes, the raw decomposed W and H matrices can be useful. The example below obtains these matrices, and multiplies them against each other to obtain a reconstructed data matrix with no missing values. @code #include using namespace mlpack::cf; // The coordinate list of ratings that we have. extern arma::mat data; // The size of the neighborhood to use to get recommendations. extern size_t neighborhood; // The rank of the decomposition. extern size_t rank; // Build the CF object and perform the decomposition. // The constructor takes a default-constructed factorizer, which, by default, // is of type amf::NMFALSFactorizer. CF cf(data, amf::NMFALSFactorizer(), neighborhood, rank); // References to W and H matrices. const arma::mat& W = cf.W(); const arma::mat& H = cf.H(); // Multiply the matrices together. arma::mat reconstructed = W * H; @endcode @section cpp_cftut Template parameters for the 'CF' class The \c CF class takes the \c FactorizerType as a template parameter to some of its constructors and to the \c Train() function. The \c FactorizerType class defines the algorithm used for matrix factorization. There are a number of existing factorizers that can be used in \b mlpack; these were detailed in the \ref ex2_cf_cpp "'other factorizers' example" of the previous section. The \c FactorizerType class must implement one of the two following methods: - \c "Apply(arma::mat& data, const size_t rank, arma::mat& W, arma::mat& H);" - \c "Apply(arma::sp_mat& data, const size_t rank, arma::mat& W, arma::mat& H);" The difference between these two methods is whether \c arma::mat or \c arma::sp_mat is used as input. If \c arma::mat is used, then the data matrix is a coordinate list with three columns, as in the constructor to the \c CF class. If \c arma::sp_mat is used, then a sparse matrix is passed with the number of rows equal to the number of items and the number of columns equal to the number of users, and each nonzero element in the matrix corresponds to a non-missing rating. The method that the factorizer implements is specified via the \c FactorizerTraits class, which is a template metaprogramming traits class: @code template struct FactorizerTraits { /** * If true, then the passed data matrix is used for factorizer.Apply(). * Otherwise, it is modified into a form suitable for factorization. */ static const bool UsesCoordinateList = false; }; @endcode If \c FactorizerTraits::UsesCoordinateList is \c true, then \c CF will try to call \c Apply() with an \c arma::mat object. Otherwise, \c CF will try to call \c Apply() with an \c arma::sp_mat object. Specifying the value of \c UsesCoordinateList is straightforward; provide this specialization of the \c FactorizerTraits class: @code template<> struct FactorizerTraits { static const bool UsesCoordinateList = true; // Set your value here. }; @endcode The \c Apply() function also takes a reference to the matrices \c W and \c H. When the \c Apply() function returns, the input data matrix should be decomposed into these two matrices. \c W should have number of rows equal to the number of items and number of columns equal to the \c rank parameter, and \c H should have number of rows equal to the \c rank parameter, and number of columns equal to the number of users. The \ref mlpack::amf::AMF "amf::AMF<> class" can be used as a base for factorizers that alternate between updating \c W and updating \c H. A useful reference is the \ref amftutorial "AMF tutorial". @section further_doc_cftut Further documentation Further documentation for the \c CF class may be found in the \ref mlpack::cf::CF "complete API documentation". In addition, more information on the \c AMF class of factorizers may be found in its \ref mlpack::amf::AMF "complete API documentation". */ mlpack-2.2.5/doc/tutorials/det/000077500000000000000000000000001315013601400163305ustar00rootroot00000000000000mlpack-2.2.5/doc/tutorials/det/det.txt000066400000000000000000000324131315013601400176500ustar00rootroot00000000000000/*! @file det.txt @author Parikshit Ram @brief Tutorial for how to perform density estimation with Density Estimation Trees (DET). @page dettutorial Density Estimation Tree (DET) tutorial @section intro_det_tut Introduction DETs perform the unsupervised task of density estimation using decision trees. Using a trained density estimation tree (DET), the density at any particular point can be estimated very quickly (O(log n) time, where n is the number of points the tree is built on). The details of this work is presented in the following paper: @code @inproceedings{ram2011density, title={Density estimation trees}, author={Ram, P. and Gray, A.G.}, booktitle={Proceedings of the 17th ACM SIGKDD International Conference on Knowledge Discovery and Data Mining}, pages={627--635}, year={2011}, organization={ACM} } @endcode \b mlpack provides: - a \ref cli_det_tut "simple command-line executable" to perform density estimation and related analyses using DETs - a \ref dtree_det_tut "generic C++ class (DTree)" which provides various functionality for the DETs - a set of functions in the namespace \ref dtutils_det_tut "mlpack::det" to perform cross-validation for the task of density estimation with DETs @section toc_det_tut Table of Contents A list of all the sections this tutorial contains. - \ref intro_det_tut - \ref toc_det_tut - \ref cli_det_tut - \ref cli_ex1_de_tut - \ref cli_ex2_de_test_tut - \ref cli_ex4_de_vi_tut - \ref cli_ex6_de_save - \ref cli_ex7_de_load - \ref dtree_det_tut - \ref dtree_pub_func_det_tut - \ref dtutils_det_tut - \ref dtutils_util_funcs - \ref further_doc_det_tut @section cli_det_tut Command-Line mlpack_det The command line arguments of this program can be viewed using the \c -h option: @code $ mlpack_det -h Density Estimation With Density Estimation Trees This program performs a number of functions related to Density Estimation Trees. The optimal Density Estimation Tree (DET) can be trained on a set of data (specified by --training_file or -t) using cross-validation (with number of folds specified by --folds). This trained density estimation tree may then be saved to a model file with the --output_model_file (-M) option. The variable importances of each dimension may be saved with the --vi_file (-i) option, and the density estimates on each training point may be saved to the file specified with the --training_set_estimates_file (-e) option. This program also can provide density estimates for a set of test points, specified in the --test_file (-T) file. The density estimation tree used for this task will be the tree that was trained on the given training points, or a tree stored in the file given with the --input_model_file (-m) parameter. The density estimates for the test points may be saved into the file specified with the --test_set_estimates_file (-E) option. Options: --folds (-f) [int] The number of folds of cross-validation to perform for the estimation (0 is LOOCV) Default value 10. --help (-h) Default help info. --info [string] Get help on a specific module or option. Default value ''. --input_model_file (-m) [string] File containing already trained density estimation tree. Default value ''. --max_leaf_size (-L) [int] The maximum size of a leaf in the unpruned, fully grown DET. Default value 10. --min_leaf_size (-l) [int] The minimum size of a leaf in the unpruned, fully grown DET. Default value 5. --output_model_file (-M) [string] File to save trained density estimation tree to. Default value ''. --test_file (-T) [string] A set of test points to estimate the density of. Default value ''. --test_set_estimates_file (-E) [string] The file in which to output the estimates on the test set from the final optimally pruned tree. Default value ''. --training_file (-t) [string] The data set on which to build a density estimation tree. Default value ''. --training_set_estimates_file (-e) [string] The file in which to output the density estimates on the training set from the final optimally pruned tree. Default value ''. --verbose (-v) Display informational messages and the full list of parameters and timers at the end of execution. --version (-V) Display the version of mlpack. --vi_file (-i) [string] The file to output the variable importance values for each feature. Default value ''. For further information, including relevant papers, citations, and theory, consult the documentation found at http://www.mlpack.org or included with your distribution of mlpack. @endcode @subsection cli_ex1_de_tut Plain-vanilla density estimation We can just train a DET on the provided data set \e S. Like all datasets \b mlpack uses, the data should be row-major (\b mlpack transposes data when it is loaded; internally, the data is column-major -- see \ref matrices "this page" for more information). @code $ mlpack_det -t dataset.csv -v @endcode By default, \c mlpack_det performs 10-fold cross-validation (using the \f$\alpha\f$-pruning regularization for decision trees). To perform LOOCV (leave-one-out cross-validation), which can provide better results but will take longer, use the following command: @code $ mlpack_det -t dataset.csv -f 0 -v @endcode To perform k-fold crossvalidation, use \c -f \c k (or \c --folds \c k). There are certain other options available for training. For example, in the construction of the initial tree, you can specify the maximum and minimum leaf sizes. By default, they are 10 and 5 respectively; you can set them using the \c -M (\c --max_leaf_size) and the \c -N (\c --min_leaf_size) options. @code $ mlpack_det -t dataset.csv -M 20 -N 10 @endcode In case you want to output the density estimates at the points in the training set, use the \c -e (\c --training_set_estimates_file) option to specify the output file to which the estimates will be saved. The first line in density_estimates.txt will correspond to the density at the first point in the training set. @code $ mlpack_det -t dataset.csv -e density_estimates.txt -v @endcode @subsection cli_ex2_de_test_tut Estimation on a test set Often, it is useful to train a density estimation tree on a training set and then obtain density estimates from the learned estimator for a separate set of test points. The \c -T (\c --test_file) option allows specification of a set of test points, and the \c -E (\c --test_set_estimates_file) option allows specification of the file into which the test set estimates are saved. @code $ mlpack_det -t dataset.csv -T test_points.csv -E test_density_estimates.txt -v @endcode @subsection cli_ex4_de_vi_tut Computing the variable importance The variable importance (with respect to density estimation) of the different features in the data set can be obtained by using the \c -i (\c --vi_file ) option. This outputs the absolute (as opposed to relative) variable importance of the all the features into the specified file. @code $ mlpack_det -t dataset.csv -i variable_importance.txt -v @endcode @subsection cli_ex6_de_save Saving trained DETs The \c mlpack_det program is capable of saving a trained DET to a file for later usage. The \c --output_model_file or \c -M option allows specification of the file to save to. In the example below, a DET trained on \c dataset.csv is saved to the file \c det.xml. @code $ mlpack_det -t dataset.csv -M det.xml -v @endcode @subsection cli_ex7_de_load Loading trained DETs A saved DET can be used to perform any of the functionality in the examples above. A saved DET is loaded with the \c --input_model_file or \c -m option. The example below loads a saved DET from \c det.xml and outputs density estimates on the dataset \c test_dataset.csv into the file \c estimates.csv. @code $ mlpack_det -m det.xml -T test_dataset.csv -E estimates.csv -v @endcode @section dtree_det_tut The 'DTree' class This class implements density estimation trees. Below is a simple example which initializes a density estimation tree. @code #include using namespace mlpack::det; // The dataset matrix, on which to learn the density estimation tree. extern arma::Mat data; // Initialize the tree. This function also creates and saves the bounding box // of the data. Note that it does not actually build the tree. DTree<> det(data); @endcode @subsection dtree_pub_func_det_tut Public Functions The function \c Grow() greedily grows the tree, adding new points to the tree. Note that the points in the dataset will be reordered. This should only be run on a tree which has not already been built. In general, it is more useful to use the \c Trainer() function found in \ref dtutils_det_tut. @code // This keeps track of the data during the shuffle that occurs while growing the // tree. arma::Col oldFromNew(data.n_cols); for (size_t i = 0; i < data.n_cols; i++) oldFromNew[i] = i; // This function grows the tree down to the leaves. It returns the current // minimum value of the regularization parameter alpha. size_t maxLeafSize = 10; size_t minLeafSize = 5; double alpha = det.Grow(data, oldFromNew, false, maxLeafSize, minLeafSize); @endcode Note that the alternate volume regularization should not be used (see ticket #238). To estimate the density at a given query point, use the following code. @code // For a given query, you can obtain the density estimate. extern arma::Col query; extern DTree* det; double estimate = det->ComputeValue(&query); @endcode Computing the \b variable \b importance of each feature for the given DET. @code // The data matrix and density estimation tree. extern arma::mat data; extern DTree* det; // The variable importances will be saved into this vector. arma::Col varImps; // You can obtain the variable importance from the current tree. det->ComputeVariableImportance(varImps); @endcode @section dtutils_det_tut 'namespace mlpack::det' The functions in this namespace allows the user to perform tasks with the 'DTree' class. Most importantly, the \c Trainer() method allows the full training of a density estimation tree with cross-validation. There are also utility functions which allow printing of leaf membership and variable importance. @subsection dtutils_util_funcs Utility Functions The code below details how to train a density estimation tree with cross-validation. @code #include using namespace mlpack::det; // The dataset matrix, on which to learn the density estimation tree. extern arma::Mat data; // The number of folds for cross-validation. const size_t folds = 10; // Set folds = 0 for LOOCV. const size_t maxLeafSize = 10; const size_t minLeafSize = 5; // Train the density estimation tree with cross-validation. DTree<>* dtree_opt = Trainer(data, folds, false, maxLeafSize, minLeafSize); @endcode Note that the alternate volume regularization should be set to false because it has known bugs (see #238). To print the class membership of leaves in the tree into a file, see the following code. @code extern arma::Mat labels; extern DTree* det; const size_t numClasses = 3; // The number of classes must be known. extern string leafClassMembershipFile; PrintLeafMembership(det, data, labels, numClasses, leafClassMembershipFile); @endcode Note that you can find the number of classes with \c max(labels) \c + \c 1. The variable importance can also be printed to a file in a similar manner. @code extern DTree* det; extern string variableImportanceFile; const size_t numFeatures = data.n_rows; PrintVariableImportance(det, numFeatures, variableImportanceFile); @endcode @section further_doc_det_tut Further Documentation For further documentation on the DTree class, consult the \ref mlpack::det::DTree "complete API documentation". */ ----- this option is not available in DET right now; see #238! ----- @subsection cli_alt_reg_tut Alternate DET regularization The usual regularized error \f$R_\alpha(t)\f$ of a node \f$t\f$ is given by: \f$R_\alpha(t) = R(t) + \alpha |\tilde{t}|\f$ where \f[ R(t) = -\frac{|t|^2}{N^2 V(t)}. \f] \f$V(t)\f$ is the volume of the node \f$t\f$ and \f$\tilde{t}\f$ is the set of leaves in the subtree rooted at \f$t\f$. For the purposes of density estimation, there is a different form of regularization: instead of penalizing the number of leaves in the subtree, we penalize the sum of the inverse of the volumes of the leaves. With this regularization, very small volume nodes are discouraged unless the data actually warrants it. Thus, \f[ R_\alpha'(t) = R(t) + \alpha I_v(\tilde{t}) \f] where \f[ I_v(\tilde{t}) = \sum_{l \in \tilde{t}} \frac{1}{V(l)}. \f] To use this form of regularization, use the \c -R flag. @code $ mlpack_det -t dataset.csv -R -v @endcode mlpack-2.2.5/doc/tutorials/emst/000077500000000000000000000000001315013601400165245ustar00rootroot00000000000000mlpack-2.2.5/doc/tutorials/emst/emst.txt000066400000000000000000000121271315013601400202400ustar00rootroot00000000000000/*! @file emst.txt @author Bill March @brief Tutorial for the Euclidean Minimum Spanning Tree algorithm. @page emst_tutorial EMST Tutorial @section intro_emsttut Introduction The Euclidean Minimum Spanning Tree problem is widely used in machine learning and data mining applications. Given a set \f$S\f$ of points in \f$\mathbf{R}^d\f$, our task is to compute lowest weight spanning tree in the complete graph on \f$S\f$ with edge weights given by the Euclidean distance between points. Among other applications, the EMST can be used to compute hierarchical clusterings of data. A single-linkage clustering can be obtained from the EMST by deleting all edges longer than a given cluster length. This technique is also referred to as a Friends-of-Friends clustering in the astronomy literature. mlpack includes an implementation of Dual-Tree Boruvka which uses \f$kd\f$-trees by default; this is the empirically and theoretically fastest EMST algorithm. In addition, the implementation supports the use of different trees via templates. For more details, see the following paper: @code @inproceedings{march2010fast, title={Fast {E}uclidean minimum spanning tree: algorithm, analysis, and applications}, author={March, William B. and Ram, Parikshit and Gray, Alexander G.}, booktitle={Proceedings of the 16th ACM SIGKDD International Conference on Knowledge Discovery and Data Mining (KDD '10)}, pages={603--612}, year={2010}, organization={ACM} } @endcode \b mlpack provides: - a \ref cli_emsttut "simple command-line executable" to compute the EMST of a given data set - a \ref dtb_emsttut "simple C++ interface" to compute the EMST @section toc_emsttut Table of Contents A list of all the sections this tutorial contains. - \ref intro_emsttut - \ref toc_emsttut - \ref cli_emsttut - \ref dtb_emsttut - \ref further_doc_emsttut @section cli_emsttut Command-Line 'EMST' The \c mlpack_emst executable in \b mlpack will compute the EMST of a given set of points and store the resulting edge list to a file. The output file contains an edge list representation of the MST in an \f$n-1 \times 3 \f$ matrix, where the first and second columns are labels of points and the third column is the edge weight. The edges are sorted in order of increasing weight. Below are several examples of simple usage (and the resultant output). The \c -v option is used so that verbose output is given. Further documentation on each individual option can be found by typing @code $ mlpack_emst --help @endcode @code $ mlpack_emst --input_file=dataset.csv --output_file=edge_list.csv -v [INFO ] Reading in data. [INFO ] Loading 'dataset.csv' as CSV data. [INFO ] Data read, building tree. [INFO ] Tree built, running algorithm. [INFO ] 4 edges found so far. [INFO ] 5 edges found so far. [INFO ] Total spanning tree length: 1002.45 [INFO ] Saving CSV data to 'edge_list.csv'. [INFO ] [INFO ] Execution parameters: [INFO ] help: false [INFO ] info: "" [INFO ] input_file: dataset.csv [INFO ] leaf_size: 1 [INFO ] naive: false [INFO ] output_file: edge_list.csv [INFO ] verbose: true [INFO ] [INFO ] Program timers: [INFO ] emst/mst_computation: 0.000179s [INFO ] emst/tree_building: 0.000061s [INFO ] total_time: 0.052641s @endcode The code performs at most \f$\log N\f$ iterations for \f$N\f$ data points. It will print an update on the number of MST edges found after each iteration. Convenient program timers are given for different parts of the calculation at the bottom of the output, as well as the parameters the simulation was run with. @code $ cat dataset.csv 0, 0 1, 1 3, 3 0.5, 0 1000, 0 1001, 0 $ cat edge_list.csv 0.0000000000e+00,3.0000000000e+00,5.0000000000e-01 4.0000000000e+00,5.0000000000e+00,1.0000000000e+00 1.0000000000e+00,3.0000000000e+00,1.1180339887e+00 1.0000000000e+00,2.0000000000e+00,2.8284271247e+00 2.0000000000e+00,4.0000000000e+00,9.9700451353e+02 @endcode The input points are labeled 0-5. The output tells us that the MST connects point 0 to point 3, point 4 to point 5, point 1 to point 3, point 1 to point 2, and point 2 to point 4, with the corresponding edge weights given in the third column. The total length of the MST is also given in the verbose output. Note that it is also possible to compute the EMST using a naive (\f$O(N^2)\f$) algorithm for timing and comparison purposes, using the \c --naive option. @section dtb_emsttut The 'DualTreeBoruvka' class The 'DualTreeBoruvka' class contains our implementation of the Dual-Tree Boruvka algorithm. The class has two constructors: the first takes the data set, constructs the tree (where the type of tree constructed is the TreeType template parameter), and computes the MST. The second takes data set and an already constructed tree. The class provides one method that performs the MST computation: @code void ComputeMST(const arma::mat& results); @endcode This method stores the computed MST in the matrix results in the format given above. @section further_doc_emsttut Further documentation For further documentation on the DualTreeBoruvka class, consult the \ref mlpack::emst::DualTreeBoruvka "complete API documentation". */ mlpack-2.2.5/doc/tutorials/fastmks/000077500000000000000000000000001315013601400172245ustar00rootroot00000000000000mlpack-2.2.5/doc/tutorials/fastmks/fastmks.txt000066400000000000000000000545241315013601400214470ustar00rootroot00000000000000/*! @file fastmks.txt @author Ryan Curtin @brief Tutorial for how to use FastMKS in mlpack. @page fmkstutorial Fast max-kernel search tutorial (fastmks) @section intro_fmkstut Introduction The FastMKS algorithm (fast exact max-kernel search) is a recent algorithm proposed in the following papers: @code @inproceedings{curtin2013fast, title={Fast Exact Max-Kernel Search}, author={Curtin, Ryan R. and Ram, Parikshit and Gray, Alexander G.}, booktitle={Proceedings of the 2013 SIAM International Conference on Data Mining (SDM '13)}, year={2013}, pages={1--9} } @article{curtin2014dual, author = {Curtin, Ryan R. and Ram, Parikshit}, title = {Dual-tree fast exact max-kernel search}, journal = {Statistical Analysis and Data Mining}, volume = {7}, number = {4}, publisher = {Wiley Subscription Services, Inc., A Wiley Company}, issn = {1932-1872}, url = {http://dx.doi.org/10.1002/sam.11218}, doi = {10.1002/sam.11218}, pages = {229--253}, year = {2014}, } @endcode Given a set of query points \f$Q\f$ and a set of reference points \f$R\f$, the FastMKS algorithm is a fast dual-tree (or single-tree) algorithm which finds \f[ \arg\max_{p_r \in R} K(p_q, p_r) \f] for all points \f$p_q \in Q\f$ and for some Mercer kernel \f$K(\cdot, \cdot)\f$. A Mercer kernel is a kernel that is positive semidefinite; these are the classes of kernels that can be used with the kernel trick. In short, the positive semidefiniteness of a Mercer kernel means that any kernel matrix (or Gram matrix) created on a dataset must be positive semidefinite. The FastMKS algorithm builds trees on the datasets \f$Q\f$ and \f$R\f$ in such a way that explicit representation of the points in the kernel space is unnecessary, by using cover trees (\ref mlpack::tree::CoverTree). This allows the algorithm to be run, for instance, on string kernels, where there is no sensible explicit representation. The \b mlpack implementation allows any type of tree that does not require an explicit representation to be used. For more details, see the paper. At the time of this writing there is no other fast algorithm for exact max-kernel search. \b mlpack implements both single-tree and dual-tree fast max-kernel search. \b mlpack provides: - a \ref cli_fmkstut "simple command-line executable" to run FastMKS - a \ref fastmks_fmkstut "C++ interface" to run FastMKS @section toc_fmkstut Table of Contents A list of all the sections this tutorial contains. - \ref intro_fmkstut - \ref toc_fmkstut - \ref cli_fmkstut - \ref cli_ex1_fmkstut - \ref cli_ex2_fmkstut - \ref cli_ex3_fmkstut - \ref cli_ex4_fmkstut - \ref cli_ex5_fmkstut - \ref cli_ex6_fmkstut - \ref cli_ex7_fmkstut - \ref fastmks_fmkstut - \ref fastmks_ex1_fmkstut - \ref fastmks_ex2_fmkstut - \ref fastmks_ex3_fmkstut - \ref fastmks_ex4_fmkstut - \ref writing_kernel_fmkstut - \ref custom_tree_fmkstut - \ref objects_fmkstut - \ref further_doc_fmkstut @section cli_fmkstut Command-line FastMKS (mlpack_fastmks) \b mlpack provides a command-line program, \c mlpack_fastmks, which is used to perform FastMKS on a given query and reference dataset. It supports numerous different types of kernels: - \ref mlpack::kernel::LinearKernel "linear kernel" - \ref mlpack::kernel::PolynomialKernel "polynomial kernel" - \ref mlpack::kernel::CosineDistance "cosine distance" - \ref mlpack::kernel::GaussianKernel "Gaussian kernel" - \ref mlpack::kernel::EpanechnikovKernel "Epanechnikov kernel" - \ref mlpack::kernel::TriangularKernel "triangular kernel" - \ref mlpack::kernel::HyperbolicTangentKernel "hyperbolic tangent kernel" Note that when a shift-invariant kernel is used, the results will be the same as nearest neighbor search, so @ref nstutorial "KNN" may be a better option. A shift-invariant kernel is a kernel that depends only on the distance between the two input points. The \ref mlpack::kernel::GaussianKernel "Gaussian kernel", \ref mlpack::kernel::EpanechnikovKernel "Epanechnikov kernel", and \ref mlpack::kernel::TriangularKernel "triangular kernel" are instances of shift-invariant kernels. The paper contains more details on this situation. The \c mlpack_fastmks executable still provides these kernels as options, though. The following examples detail usage of the \c mlpack_fastmks program. Note that you can get documentation on all the possible parameters by typing: @code $ mlpack_fastmks --help @endcode @subsection cli_ex1_fmkstut FastMKS with a linear kernel on one dataset If only one dataset is specified (with \c -r or \c --reference_file), the reference dataset is taken to be both the query and reference datasets. The example below finds the 4 maximum kernels of each point in dataset.csv, using the default linear kernel. @code $ mlpack_fastmks -r dataset.csv -k 4 -v -p products.csv -i indices.csv @endcode When the operation completes, the values of the kernels are saved in products.csv and the indices of the points which give the maximum kernels are saved in indices.csv. @code $ head indices.csv 762,910,863,890 762,910,426,568 910,762,863,426 762,910,863,426 863,910,614,762 762,863,910,614 762,910,488,568 762,910,863,426 910,762,863,426 863,762,910,614 @endcode @code $ head products.csv 1.6221652894e+00,1.5998743443e+00,1.5898890769e+00,1.5406789753e+00 1.3387953449e+00,1.3317349486e+00,1.2966613184e+00,1.2774493620e+00 1.6386110476e+00,1.6332029753e+00,1.5952629124e+00,1.5887195330e+00 1.0917545803e+00,1.0820878726e+00,1.0668992636e+00,1.0419838050e+00 1.2272441028e+00,1.2169643942e+00,1.2104597963e+00,1.2067780154e+00 1.5720962456e+00,1.5618504956e+00,1.5609069923e+00,1.5235605095e+00 1.3655478674e+00,1.3548593212e+00,1.3311547298e+00,1.3250728881e+00 2.0119149744e+00,2.0043668067e+00,1.9847289214e+00,1.9298280046e+00 1.1586923205e+00,1.1494586097e+00,1.1274872962e+00,1.1248172766e+00 4.4789820372e-01,4.4618539778e-01,4.4200024852e-01,4.3989721792e-01 @endcode We can see in this example that for point 0, the point with maximum kernel value is point 762, with a kernel value of 1.622165. For point 3, the point with third largest kernel value is point 863, with a kernel value of 1.0669. @subsection cli_ex2_fmkstut FastMKS on a reference and query dataset The query points may be different than the reference points. To specify a different query set, the \c -q (or \c --query_file) option is used, as in the example below. @code $ mlpack_fastmks -q query_set.csv -r reference_set.csv -k 5 -i indices.csv \ > -p products.csv @endcode @subsection cli_ex3_fmkstut FastMKS with a different kernel The \c mlpack_fastmks program offers more than just the linear kernel. Valid options are \c 'linear', \c 'polynomial', \c 'cosine', \c 'gaussian', \c 'epanechnikov', \c 'triangular' and \c 'hyptan' (the hyperbolic tangent kernel). Note that the hyperbolic tangent kernel is provably not a Mercer kernel but is positive semidefinite on most datasets and is commonly used as a kernel. Note also that the Gaussian kernel and other shift-invariant kernels give the same results as nearest neighbor search (see \ref nstutorial). The kernel to use is specified with the \c -K (or \c --kernel) option. The example below uses the cosine similarity as a kernel. @code $ mlpack_fastmks -r dataset.csv -k 5 -K cosine -i indices.csv -p products.csv -v @endcode @subsection cli_ex4_fmkstut Using single-tree search or naive search In some cases, it may be useful to not use the dual-tree FastMKS algorithm. Instead you can specify the \c --single option, indicating that a tree should be built only on the reference set, and then the queries should be processed in a linear scan (instead of in a tree). Alternately, the \c -N (or \c --naive) option makes the program not build trees at all and instead use brute-force search to find the solutions. The example below uses single-tree search on two datasets with the linear kernel. @code $ mlpack_fastmks -q query_set.csv -r reference_set.csv --single -k 5 \ > -p products.csv -i indices.csv -K linear @endcode The example below uses naive search on one dataset. @code $ mlpack_fastmks -r reference_set.csv -k 5 -N -p products.csv -i indices.csv @endcode @subsection cli_ex5_fmkstut Parameters for alternate kernels Many of the alternate kernel choices have parameters which can be chosen; these are detailed in this section. - \b \c -w (\c --bandwidth): this sets the bandwidth of the kernel, and is applicable to the \c 'gaussian', \c 'epanechnikov', and \c 'triangular' kernels. This is the "spread" of the kernel. - \b \c -d (\c --degree): this sets the degree of the polynomial kernel (the power to which the result is raised). It is only applicable to the \c 'polynomial' kernel. - \b \c -o (\c --offset): this sets the offset of the kernel, for the \c 'polynomial' and \c 'hyptan' kernel. See \ref mlpack::kernel::PolynomialKernel "the polynomial kernel documentation" and \ref mlpack::kernel::HyperbolicTangentKernel "the hyperbolic tangent kernel documentation" for more information. - \b \c -s (\c --scale): this sets the scale of the kernel, and is only applicable to the \c 'hyptan' kernel. See \ref mlpack::kernel::HyperbolicTangentKernel "the hyperbolic tangent kernel documentation" for more information. @subsection cli_ex6_fmkstut Saving a FastMKS model/tree The \c mlpack_fastmks program also supports saving a model built on a reference dataset (this model includes the tree, the kernel, and the search parameters). The \c --output_model_file or \c -M option allows one to save these parameters to disk for later usage. An example is below: @code $ mlpack_fastmks -r reference_set.csv -K cosine -M fastmks_model.xml @endcode This example builds a tree on the dataset in \c reference_set.csv using the cosine similarity kernel, and saves the resulting model to \c fastmks_model.xml. This model may then be used in later calls to the \c mlpack_fastmks program. @subsection cli_ex7_fmkstut Loading a FastMKS model for further searches Supposing that a FastMKS model has been saved with the \c --output_model_file or \c -M parameter, that model can then be later loaded in subsequent calls to the \c mlpack_fastmks program, using the \c --input_model_file or \c -m option. For instance, with a model saved in \c fastmks_model.xml and a query set in \c query_set.csv, we can find 3 max-kernel candidates, saving to \c indices.csv and \c kernels.csv: @code $ mlpack_fastmks -m fastmks_model.xml -k 3 -i indices.csv -p kernels.csv @endcode Loading a model as opposed to building a model is advantageous because the reference tree is already built. So, among other situations, this could be useful in the setting where many different query sets (or many different values of k) will be used. Note that the kernel cannot be changed in a saved model without rebuilding the model entirely. @section fastmks_fmkstut The 'FastMKS' class The \c FastMKS<> class offers a simple API for use within C++ applications, and allows further flexibility in kernel choice and tree type choice. However, \c FastMKS<> has no default template parameter for the kernel type -- that must be manually specified. Choices that \b mlpack provides include: - \ref mlpack::kernel::LinearKernel - \ref mlpack::kernel::PolynomialKernel - \ref mlpack::kernel::CosineDistance - \ref mlpack::kernel::GaussianKernel - \ref mlpack::kernel::EpanechnikovKernel - \ref mlpack::kernel::TriangularKernel - \ref mlpack::kernel::HyperbolicTangentKernel - \ref mlpack::kernel::LaplacianKernel - \ref mlpack::kernel::PSpectrumStringKernel The following examples use kernels from that list. Writing your own kernel is detailed in \ref writing_kernel_fmkstut "the next section". Remember that when you are using the C++ interface, the data matrices must be column-major. See \ref matrices for more information. @subsection fastmks_ex1_fmkstut FastMKS on one dataset Given only a reference dataset, the following code will run FastMKS with k set to 5. @code #include #include using namespace mlpack::fastmks; // The reference dataset, which is column-major. extern arma::mat data; // This will initialize the FastMKS object with the linear kernel with default // options: K(x, y) = x^T y. The tree is built in the constructor. FastMKS f(data); // The results will be stored in these matrices. arma::Mat indices; arma::mat products; // Run FastMKS. f.Search(5, indices, products); @endcode @subsection fastmks_ex2_fmkstut FastMKS with a query and reference dataset In this setting we have both a query and reference dataset. We search for 10 maximum kernels. @code #include #include using namespace mlpack::fastmks; using namespace mlpack::kernel; // The reference and query datasets, which are column-major. extern arma::mat referenceData; extern arma::mat queryData; // This will initialize the FastMKS object with the triangular kernel with // default options (bandwidth of 1). The reference tree is built in the // constructor. FastMKS f(referenceData); // The results will be stored in these matrices. arma::Mat indices; arma::mat products; // Run FastMKS. The query tree is built during the call to Search(). f.Search(queryData, 10, indices, products); @endcode @subsection fastmks_ex3_fmkstut FastMKS with an initialized kernel Often, kernels have parameters which need to be specified. \c FastMKS<> has constructors which take initialized kernels. Note that temporary kernels cannot be passed as an argument. The example below initializes a \c PolynomialKernel object and then runs FastMKS with a query and reference dataset. @code #include #include using namespace mlpack::fastmks; using namespace mlpack::kernel; // The reference and query datasets, which are column-major. extern arma::mat referenceData; extern arma::mat queryData; // Initialize the polynomial kernel with degree of 3 and offset of 2.5. PolynomialKernel pk(3.0, 2.5); // Create the FastMKS object with the initialized kernel. FastMKS f(referenceData, pk); // The results will be stored in these matrices. arma::Mat indices; arma::mat products; // Run FastMKS. f.Search(queryData, 10, indices, products); @endcode The syntax for running FastMKS with one dataset and an initialized kernel is very similar: @code f.Search(10, indices, products); @endcode @subsection fastmks_ex4_fmkstut FastMKS with an already-created tree By default, \c FastMKS<> uses the cover tree datastructure (see \ref mlpack::tree::CoverTree). Sometimes, it is useful to modify the parameters of the cover tree. In this scenario, a tree must be built outside of the constructor, and then passed to the appropriate \c FastMKS<> constructor. An example on just a reference dataset is shown below, where the base of the cover tree is modified. We also use an instantiated kernel, but because we are building our own tree, we must use \ref mlpack::metric::IPMetric "IPMetric" so that our tree is built on the metric induced by our kernel function. @code #include #include // The reference dataset, which is column-major. extern arma::mat data; // Initialize the polynomial kernel with a degree of 4 and offset of 2.0. PolynomialKernel pk(4.0, 2.0); // Create the metric induced by this kernel (because a kernel is not a metric // and we can't build a tree on a kernel alone). IPMetric metric(pk); // Now build a tree on the reference dataset using the instantiated metric and // the custom base of 1.5 (default is 1.3). We have to be sure to use the right // type here -- FastMKS needs the FastMKSStat object as the tree's // StatisticType. typedef tree::CoverTree, tree::FirstPointIsRoot, FastMKSStat> TreeType; // Convenience typedef. TreeType* tree = new TreeType(data, metric, 1.5); // Now initialize FastMKS with that statistic. We don't need to specify the // TreeType template parameter since we are still using the default. We don't // need to pass the kernel because that is contained in the tree. FastMKS f(tree); // The results will be stored in these matrices. arma::Mat indices; arma::mat products; // Run FastMKS. f.Search(10, indices, products); @endcode The syntax is similar for the case where different query and reference datasets are given; but trees for both need to be built in the manner specified above. Be sure to build both trees using the same metric (or at least a metric with the exact same parameters). @code f.Search(queryTree, 10, indices, products); @endcode @section writing_kernel_fmkstut Writing a custom kernel for FastMKS While \b mlpack provides some number of kernels in the mlpack::kernel namespace, it is easy to create a custom kernel. To satisfy the KernelType policy, a class must implement the following methods: @code // Empty constructor is required. KernelType(); // Evaluate the kernel between two points. template double Evaluate(const VecType& a, const VecType& b); @endcode The template parameter \c VecType is helpful (but not necessary) so that the kernel can be used with both sparse and dense matrices (\c arma::sp_mat and \c arma::mat). @section custom_tree_fmkstut Using other tree types for FastMKS The use of the cover tree (see \ref mlpack::tree::CoverTree "CoverTree") is not necessary for FastMKS, although it is the default tree type. A different type of tree can be specified with the TreeType template parameter. However, the tree type is required to have \ref mlpack::fastmks::FastMKSStat "FastMKSStat" as the StatisticType, and for FastMKS to work, the tree must be built only on kernel evaluations (or distance evaluations in the kernel space via \ref mlpack::metric::IPMetric "IPMetric::Evaluate()"). Below is an example where a custom tree class, \c CustomTree, is used as the tree type for FastMKS. In this example FastMKS is only run on one dataset. @code #include #include "custom_tree.hpp" using namespace mlpack::fastmks; using namespace mlpack::tree; // The dataset that FastMKS will be run on. extern arma::mat data; // The custom tree type. We'll assume that the first template parameter is the // statistic type. typedef CustomTree TreeType; // The FastMKS constructor will create the tree. FastMKS f(data); // These will hold the results. arma::Mat indices; arma::mat products; // Run FastMKS. f.Search(5, indices, products); @endcode @section objects_fmkstut Running FastMKS on objects FastMKS has a lot of utility on objects which are not representable in some sort of metric space. These objects might be strings, graphs, models, or other objects. For these types of objects, questions based on distance don't really make sense. One good example is with strings. The question "how far is 'dog' from 'Taki Inoue'?" simply doesn't make sense. We can't have a centroid of the terms 'Fritz', 'E28', and 'popsicle'. However, what we can do is define some sort of kernel on these objects. These kernels generally correspond to some similarity measure, with one example being the p-spectrum string kernel (see \ref mlpack::kernel::PSpectrumStringKernel). Using that, we can say "how similar is 'dog' to 'Taki Inoue'?" and get an actual numerical result by evaluating K('dog', 'Taki Inoue') (where K is our p-spectrum string kernel). The only requirement on these kernels is that they are positive definite kernels (or Mercer kernels). For more information on those details, refer to the FastMKS paper. Remember that FastMKS is a tree-based method. But trees like the binary space tree require centroids -- and as we said earlier, centroids often don't make sense with these types of objects. Therefore, we need a type of tree which is built \b exclusively on points in the dataset -- those are points which we can evaluate our kernel function on. The cover tree is one example of a type of tree satisfying this condition; its construction will only call the kernel function on two points that are in the dataset. But, we have one more problem. The \c CoverTree class is built on \c arma::mat objects (dense matrices). Our objects, however, are not necessarily representable in a column of a matrix. To use the example we have been using, strings cannot be represented easily in a matrix because they may all have different lengths. The way to work around this problem is to create a "fake" data matrix which simply holds indices to objects. A good example of how to do this is detailed in the documentation for the \ref mlpack::kernel::PSpectrumStringKernel "PSpectrumStringKernel". In short, the trick is to make each data matrix one-dimensional and containing linear indices: @code arma::mat data = "0 1 2 3 4 5 6 7 8"; @endcode Then, when \c Evaluate() is called on the kernel function, the parameters will be two one-dimensional vectors that simply contain indices to objects. The example below details the process a little better: @code // This function evaluates the kernel on two Objects (in this example, its // implementation is not important; the only important thing is that the // function exists). double ObjectKernel::Evaluate(const Object& a, const Object& b) const; template double ObjectKernel::Evaluate(const VecType& a, const VecType& b) const { // Extract the indices from the vectors. const size_t indexA = size_t(a[0]); const size_t indexB = size_t(b[0]); // Assume that 'objects' is an array (or std::vector or other container) // holding Objects. const Object& objectA = objects[indexA]; const Object& objectB = objects[indexB]; // Now call the function that does the actual evaluation on the objects and // return its result. return Evaluate(objectA, objectB); } @endcode As written earlier, the documentation for \ref mlpack::kernel::PSpectrumStringKernel "PSpectrumStringKernel" is a good place to consult for further reference on this. That kernel uses two dimensional indices; one dimension represents the index of the string, and the other represents whether it is referring to the query set or the reference set. If your kernel is meant to work on separate query and reference sets, that strategy should be considered. @section further_doc_fmkstut Further documentation For further documentation on the FastMKS class, consult the \ref mlpack::fastmks::FastMKS "complete API documentation". */ mlpack-2.2.5/doc/tutorials/kmeans/000077500000000000000000000000001315013601400170325ustar00rootroot00000000000000mlpack-2.2.5/doc/tutorials/kmeans/kmeans.txt000066400000000000000000000650231315013601400210570ustar00rootroot00000000000000/*! @file kmeans.txt @author Ryan Curtin @brief Tutorial for how to use k-means in mlpack. @page kmtutorial K-Means tutorial (kmeans) @section intro_kmtut Introduction The popular k-means algorithm for clustering has been around since the late 1950s, and the standard algorithm was proposed by Stuart Lloyd in 1957. Given a set of points \f$ X \f$, k-means clustering aims to partition each point \f$ x_i \f$ into a cluster \f$ c_j \f$ (where \f$ j \le k \f$ and \f$ k \f$, the number of clusters, is a parameter). The partitioning is done to minimize the objective function \f[ \sum_{j = 1}^{k} \sum_{x_i \in c_j} \| x_i - \mu_j \|^2 \f] where \f$\mu_j\f$ is the centroid of cluster \f$c_j\f$. The standard algorithm is a two-step algorithm: - \b Assignment \b step. Each point \f$x_i\f$ in \f$X\f$ is assigned to the cluster whose centroid it is closest to. - \b Update \b step. Using the new cluster assignments, the centroids of each cluster are recalculated. The algorithm has converged when no more assignment changes are happening with each iteration. However, this algorithm can get stuck in local minima of the objective function and is particularly sensitive to the initial cluster assignments. Also, situations can arise where the algorithm will never converge but reaches steady state -- for instance, one point may be changing between two cluster assignments. There is vast literature on the k-means algorithm and its uses, as well as strategies for choosing initial points effectively and keeping the algorithm from converging in local minima. \b mlpack does implement some of these, notably the Bradley-Fayyad algorithm (see the reference below) for choosing refined initial points. Importantly, the C++ \c KMeans class makes it very easy to improve the k-means algorithm in a modular way. @code @inproceedings{bradley1998refining, title={Refining initial points for k-means clustering}, author={Bradley, Paul S. and Fayyad, Usama M.}, booktitle={Proceedings of the Fifteenth International Conference on Machine Learning (ICML 1998)}, volume={66}, year={1998} } @endcode \b mlpack provides: - a \ref cli_kmtut "simple command-line executable" to run k-means - a \ref kmeans_kmtut "simple C++ interface" to run k-means - a \ref kmeans_template_kmtut "generic, extensible, and powerful C++ class" for complex usage @section toc_kmtut Table of Contents A list of all the sections this tutorial contains. - \ref intro_kmtut - \ref toc_kmtut - \ref cli_kmtut - \ref cli_ex1_kmtut - \ref cli_ex2_kmtut - \ref cli_ex3_kmtut - \ref cli_ex4_kmtut - \ref cli_ex6_kmtut - \ref cli_ex7_kmtut - \ref kmeans_kmtut - \ref kmeans_ex1_kmtut - \ref kmeans_ex2_kmtut - \ref kmeans_ex3_kmtut - \ref kmeans_ex5_kmtut - \ref kmeans_ex6_kmtut - \ref kmeans_ex7_kmtut - \ref kmeans_template_kmtut - \ref kmeans_metric_kmtut - \ref kmeans_initial_partition_kmtut - \ref kmeans_empty_cluster_kmtut - \ref kmeans_lloyd_kmtut - \ref further_doc_kmtut @section cli_kmtut Command-Line 'kmeans' \b mlpack provides a command-line executable, \c mlpack_kmeans, to allow easy execution of the k-means algorithm on data. Complete documentation of the executable can be found by typing @code $ mlpack_kmeans --help @endcode As of October 2014, support for overclustering has been removed due to bugs and lack of usage. If this is support you were using, or are interested, please file a bug or get in touch with the \b mlpack developers in some way so that the support can be re-implemented. Below are several examples demonstrating simple use of the \c mlpack_kmeans executable. @subsection cli_ex1_kmtut Simple k-means clustering We want to find 5 clusters using the points in the file dataset.csv. By default, if any of the clusters end up empty, that cluster will be reinitialized to contain the point furthest from the cluster with maximum variance. The cluster assignments of each point will be stored in assignments.csv. Each row in assignments.csv will correspond to the row in dataset.csv. @code $ mlpack_kmeans -c 5 -i dataset.csv -v -o assignments.csv @endcode @subsection cli_ex2_kmtut Saving the resulting centroids Sometimes it is useful to save the centroids of the clusters found by k-means; one example might be for plotting the points. The \c -C (\c --centroid_file) option allows specification of a file into which the centroids will be saved (one centroid per line, if it is a CSV or other text format). @code $ mlpack_kmeans -c 5 -i dataset.csv -v -o assignments.csv -C centroids.csv @endcode @subsection cli_ex3_kmtut Allowing empty clusters If you would like to allow empty clusters to exist, instead of reinitializing them, simply specify the \c -e (\c --allow_empty_clusters) option. Note that when you save your clusters, some of the clusters may be filled with NaNs. This is expected behavior -- if a cluster has no points, the concept of a centroid makes no sense. @code $ mlpack_kmeans -c 5 -i dataset.csv -v -o assignments.csv -C centroids.csv @endcode @subsection cli_ex4_kmtut Limiting the maximum number of iterations As mentioned earlier, the k-means algorithm can often fail to converge. In such a situation, it may be useful to stop the algorithm by way of limiting the maximum number of iterations. This can be done with the \c -m (\c --max_iterations) parameter, which is set to 1000 by default. If the maximum number of iterations is 0, the algorithm will run until convergence -- or potentially forever. The example below sets a maximum of 250 iterations. @code $ mlpack_kmeans -c 5 -i dataset.csv -v -o assignments.csv -m 250 @endcode @subsection cli_ex6_kmtut Using Bradley-Fayyad "refined start" The method proposed by Bradley and Fayyad in their paper "Refining initial points for k-means clustering" is implemented in \b mlpack. This strategy samples points from the dataset and runs k-means clustering on those points multiple times, saving the resulting clusters. Then, k-means clustering is run on those clusters, yielding the original number of clusters. The centroids of those resulting clusters are used as initial centroids for k-means clustering on the entire dataset. This technique generally gives better initial points than the default random partitioning, but depending on the parameters, it can take much longer. This initialization technique is enabled with the \c -r (\c --refined_start) option. The \c -S (\c --samplings) parameter controls how many samplings of the dataset are performed, and the \c -p (\c --percentage) parameter controls how much of the dataset is randomly sampled for each sampling (it must be between 0.0 and 1.0). For more information on the refined start technique, see the paper referenced in the introduction of this tutorial. The example below performs k-means clustering, giving 5 clusters, using the refined start technique, sampling 10% of the dataset 25 times to produce the initial centroids. @code $ mlpack_kmeans -c 5 -i dataset.csv -v -o assignments.csv -r -S 25 -p 0.2 @endcode @subsection cli_ex7_kmtut Using different k-means algorithms The \c mlpack_kmeans program implements six different strategies for clustering; each of these gives the exact same results, but will have different runtimes. The particular algorithm to use can be specified with the \c -a or \c --algorithm option. The choices are: - \c naive: the standard Lloyd iteration; takes \f$O(kN)\f$ time per iteration. - \c pelleg-moore: the 'blacklist' algorithm, which builds a kd-tree on the data. This can be fast when k is small and the dimensionality is reasonably low. - \c elkan: Elkan's algorithm for k-means, which maintains upper and lower distance bounds between each point and each centroid. This can be very fast, but it does not scale well to the case of large N or k, and uses a lot of memory. - \c hamerly: Hamerly's algorithm is a variant of Elkan's algorithm that handles memory usage much better and thus can operate with much larger datasets than Elkan's algorithm. - \c dualtree: The dual-tree algorithm for k-means builds a kd-tree on both the centroids and the points in order to prune away as much work as possible. This algorithm is most effective when both N and k are large. - \c dualtree-covertree: This is the dual-tree algorithm using cover trees instead of kd-trees. It satisfies the runtime guarantees specified in the dual-tree k-means paper. In general, the \c naive algorithm will be much slower than the others on datasets that are larger than tiny. The example below uses the \c dualtree algorithm to perform k-means clustering with 5 clusters on the dataset in \c dataset.csv, using the initial centroids in \c initial_centroids.csv, saving the resulting cluster assignments to \c assignments.csv: @code $ mlpack_kmeans -i dataset.csv -c 5 -v -I initial_centroids.csv -a dualtree \ > -o assignments.csv @endcode @section kmeans_kmtut The 'KMeans' class The \c KMeans<> class (with default template parameters) provides a simple way to run k-means clustering using \b mlpack in C++. The default template parameters for \c KMeans<> will initialize cluster assignments randomly and disallow empty clusters. When an empty cluster is encountered, the point furthest from the cluster with maximum variance is set to the centroid of the empty cluster. @subsection kmeans_ex1_kmtut Running k-means and getting cluster assignments The simplest way to use the \c KMeans<> class is to pass in a dataset and a number of clusters, and receive the cluster assignments in return. Note that the dataset must be column-major -- that is, one column corresponds to one point. See \ref matrices "the matrices guide" for more information. @code #include using namespace mlpack::kmeans; // The dataset we are clustering. extern arma::mat data; // The number of clusters we are getting. extern size_t clusters; // The assignments will be stored in this vector. arma::Row assignments; // Initialize with the default arguments. KMeans<> k; k.Cluster(data, clusters, assignments); @endcode Now, the vector \c assignments holds the cluster assignments of each point in the dataset. @subsection kmeans_ex2_kmtut Running k-means and getting centroids of clusters Often it is useful to not only have the cluster assignments, but the centroids of each cluster. Another overload of \c Cluster() makes this easily possible: @code #include using namespace mlpack::kmeans; // The dataset we are clustering. extern arma::mat data; // The number of clusters we are getting. extern size_t clusters; // The assignments will be stored in this vector. arma::Row assignments; // The centroids will be stored in this matrix. arma::mat centroids; // Initialize with the default arguments. KMeans<> k; k.Cluster(data, clusters, assignments, centroids); @endcode Note that the centroids matrix has columns equal to the number of clusters and rows equal to the dimensionality of the dataset. Each column represents the centroid of the according cluster -- \c centroids.col(0) represents the centroid of the first cluster. @subsection kmeans_ex3_kmtut Limiting the maximum number of iterations The first argument to the constructor allows specification of the maximum number of iterations. This is useful because often, the k-means algorithm does not converge, and is terminated after a number of iterations. Setting this parameter to 0 indicates that the algorithm will run until convergence -- note that in some cases, convergence may never happen. The default maximum number of iterations is 1000. @code // The first argument is the maximum number of iterations. Here we set it to // 500 iterations. KMeans<> k(500); @endcode Then you can run \c Cluster() as normal. @subsection kmeans_ex5_kmtut Setting initial cluster assignments If you have an initial guess for the cluster assignments for each point, you can fill the assignments vector with the guess and then pass an extra boolean (initialAssignmentGuess) as true to the \c Cluster() method. Below are examples for either overload of \c Cluster(). @code #include using namespace mlpack::kmeans; // The dataset we are clustering on. extern arma::mat dataset; // The number of clusters we are obtaining. extern size_t clusters; // A vector pre-filled with initial assignment guesses. extern arma::Row assignments; KMeans<> k; // The boolean set to true indicates that our assignments vector is filled with // initial guesses. k.Cluster(dataset, clusters, assignments, true); @endcode @code #include using namespace mlpack::kmeans; // The dataset we are clustering on. extern arma::mat dataset; // The number of clusters we are obtaining. extern size_t clusters; // A vector pre-filled with initial assignment guesses. extern arma::Row assignments; // This will hold the centroids of the finished clusters. arma::mat centroids; KMeans<> k; // The boolean set to true indicates that our assignments vector is filled with // initial guesses. k.Cluster(dataset, clusters, assignments, centroids, true); @endcode @note If you have a heuristic or algorithm which makes initial guesses, a more elegant solution is to create a new class fulfilling the InitialPartitionPolicy template policy. See \ref kmeans_initial_partition_kmtut "the section about changing the initial partitioning strategy" for more details. @par @note If you set the InitialPartitionPolicy parameter to something other than the default but give an initial cluster assignment guess, the InitialPartitionPolicy will not be used to initialize the algorithm. See \ref kmeans_initial_partition_kmtut "the section about changing the initial partitioning strategy" for more details. @subsection kmeans_ex6_kmtut Setting initial cluster centroids An equally important option to being able to make initial cluster assignment guesses is to make initial cluster centroid guesses without having to assign each point in the dataset to an initial cluster. This is similar to the previous section, but now you must pass two extra booleans -- the first (initialAssignmentGuess) as false, indicating that there are not initial cluster assignment guesses, and the second (initialCentroidGuess) as true, indicating that the centroids matrix is filled with initial centroid guesses. This, of course, only works with the overload of \c Cluster() that takes a matrix to put the resulting centroids in. Below is an example. @code #include using namespace mlpack::kmeans; // The dataset we are clustering on. extern arma::mat dataset; // The number of clusters we are obtaining. extern size_t clusters; // A matrix pre-filled with guesses for the initial cluster centroids. extern arma::mat centroids; // This will be filled with the final cluster assignments for each point. arma::Row assignments; KMeans<> k; // Remember, the first boolean indicates that we are not giving initial // assignment guesses, and the second boolean indicates that we are giving // initial centroid guesses. k.Cluster(dataset, clusters, assignments, centroids, false, true); @endcode @note If you have a heuristic or algorithm which makes initial guesses, a more elegant solution is to create a new class fulfilling the InitialPartitionPolicy template policy. See \ref kmeans_initial_partition_kmtut "the section about changing the initial partitioning strategy" for more details. @par @note If you set the InitialPartitionPolicy parameter to something other than the default but give an initial cluster centroid guess, the InitialPartitionPolicy will not be used to initialize the algorithm. See \ref kmeans_initial_partition_kmtut "the section about changing the initial partitioning strategy" for more details. @subsection kmeans_ex7_kmtut Running sparse k-means The \c Cluster() function can work on both sparse and dense matrices, so all of the above examples can be used with sparse matrices instead, if the fifth template parameter is modified. Below is a simple example. Note that the centroids are returned as a dense matrix, because the centroids of collections of sparse points are not generally sparse. @code // The sparse dataset. extern arma::sp_mat sparseDataset; // The number of clusters. extern size_t clusters; // The assignments will be stored in this vector. arma::Row assignments; // The centroids of each cluster will be stored in this sparse matrix. arma::sp_mat sparseCentroids; // We must change the fifth (and last) template parameter. KMeans k; k.Cluster(sparseDataset, clusters, assignments, sparseCentroids); @endcode @section kmeans_template_kmtut Template parameters for the 'KMeans' class The \c KMeans<> class also takes three template parameters, which can be modified to change the behavior of the k-means algorithm. There are three template parameters: - \c MetricType: controls the distance metric used for clustering (by default, the squared Euclidean distance is used) - \c InitialPartitionPolicy: the method by which initial clusters are set; by default, \ref mlpack::kmeans::SampleInitialization "SampleInitialization" is used - \c EmptyClusterPolicy: the action taken when an empty cluster is encountered; by default, \ref mlpack::kmeans::MaxVarianceNewCluster "MaxVarianceNewCluster" is used - \c LloydStepType: this defines the strategy used to make a single Lloyd iteration; by default this is the typical Lloyd iteration specified in \ref mlpack::kmeans::NaiveKMeans "NaiveKMeans" - \c MatType: type of data matrix to use for clustering The class is defined like below: @code template< typename DistanceMetric = mlpack::metric::SquaredEuclideanDistance, typename InitialPartitionPolicy = SampleInitialization, typename EmptyClusterPolicy = MaxVarianceNewCluster, template class LloydStepType = NaiveKMeans, typename MatType = arma::mat > class KMeans; @endcode In the following sections, each policy is described further, with examples of how to modify them. @subsection kmeans_metric_kmtut Changing the distance metric used for k-means Most machine learning algorithms in \b mlpack support modifying the distance metric, and \c KMeans<> is no exception. Similar to \ref mlpack::neighbor::NeighborSearch "NeighborSearch" (see \ref metric_type_doc_nstut "the section in the NeighborSearch tutorial"), any class in mlpack::metric can be given as an argument. The mlpack::metric::LMetric class is a good example implementation. A class fulfilling the MetricType policy must provide the following two functions: @code // Empty constructor is required. MetricType(); // Computer the distance between two points. template double Evaluate(const VecType& a, const VecType& b); @endcode Most of the standard metrics that could be used are stateless and therefore the \c Evaluate() method is implemented statically. However, there are metrics, such as the Mahalanobis distance (mlpack::metric::MahalanobisDistance), that store state. To this end, an instantiated MetricType object is stored within the \c KMeans class. The example below shows how to pass an instantiated MahalanobisDistance in the constructor. @code // The initialized Mahalanobis distance. extern mlpack::metric::MahalanobisDistance distance; // We keep the default arguments for the maximum number of iterations, but pass // our instantiated metric. KMeans k(1000, distance); @endcode @note While the MetricType policy only requires two methods, one of which is an empty constructor, more can always be added. mlpack::metric::MahalanobisDistance also has constructors with parameters, because it is a stateful metric. @subsection kmeans_initial_partition_kmtut Changing the initial partitioning strategy used for k-means There have been many initial cluster strategies for k-means proposed in the literature. Fortunately, the \c KMeans<> class makes it very easy to implement one of these methods and plug it in without needing to modify the existing algorithm code at all. By default, the \c KMeans<> class uses mlpack::kmeans::SampleInitialization, which randomly samples points as initial centroids. However, writing a new policy is simple; it needs to only implement the following functions: @code // Empty constructor is required. InitialPartitionPolicy(); // Only *one* of the following two functions is required! You should implement // whichever you find more convenient to implement. // This function is called to initialize the clusters and returns centroids. template void Cluster(MatType& data, const size_t clusters, arma::mat& centroids); // This function is called to initialize the clusters and returns individual // point assignments. The centroids will then be calculated from the given // assignments. template void Cluster(MatType& data, const size_t clusters, arma::Row assignments); @endcode The templatization of the \c Cluster() function allows both dense and sparse matrices to be passed in. If the desired policy does not work with sparse (or dense) matrices, then the method can be written specifically for one type of matrix -- however, be warned that if you try to use \c KMeans with that policy and the wrong type of matrix, you will get many ugly compilation errors! @code // The Cluster() function specialized for dense matrices. void Cluster(arma::mat& data, const size_t clusters, arma::Row assignments); @endcode Note that only one of the two possible \c Cluster() functions are required. This is because sometimes it is easier to express an initial partitioning policy as something that returns point assignments, and sometimes it is easier to express the policy as something that returns centroids. The KMeans<> class will use whichever of these two functions is given; if both are given, the overload that returns centroids will be preferred. One alternate to the default SampleInitialization policy is the RefinedStart policy, which is an implementation of the Bradley and Fayyad approach for finding initial points detailed in "Refined initial points for k-means clustering" and other places in this document. Another option is the RandomPartition class, which randomly assigns points to clusters, but this may not work very well for most settings. See the documentation for mlpack::kmeans::RefinedStart and mlpack::kmeans::RandomPartition for more information. If the \c Cluster() method returns point assignments instead of centroids, then valid initial assignments must be returned for every point in the dataset. As with the MetricType template parameter, an initialized InitialPartitionPolicy can be passed to the constructor of \c KMeans as a fourth argument. @subsection kmeans_empty_cluster_kmtut Changing the action taken when an empty cluster is encountered Sometimes, during clustering, a situation will arise where a cluster has no points in it. The \c KMeans class allows easy customization of the action to be taken when this occurs. By default, the point furthest from the centroid of the cluster with maximum variance is taken as the centroid of the empty cluster; this is implemented in the mlpack::kmeans::MaxVarianceNewCluster class. Another alternate choice is the mlpack::kmeans::AllowEmptyClusters class, which simply allows empty clusters to persist. A custom policy can be written and it must implement the following methods: @code // Empty constructor is required. EmptyClusterPolicy(); // This function is called when an empty cluster is encountered. emptyCluster // indicates the cluster which is empty, and then the clusterCounts and // assignments are meant to be modified by the function. The function should // return the number of modified points. template size_t EmptyCluster(const MatType& data, const size_t emptyCluster, const MatType& centroids, arma::Col& clusterCounts, arma::Row& assignments); @endcode The \c EmptyCluster() function is called for each cluster that is empty at each iteration of the algorithm. As with InitialPartitionPolicy, the \c EmptyCluster() function does not need to be generalized to support both dense and sparse matrices -- but usage with the wrong type of matrix will cause compilation errors. Like the other template parameters to \c KMeans, EmptyClusterPolicy implementations that have state can be passed to the constructor of \c KMeans as a fifth argument. See the kmeans::KMeans documentation for further details. @subsection kmeans_lloyd_kmtut The LloydStepType template parameter The internal algorithm used for a single step of the k-means algorithm can easily be changed; \b mlpack implements several existing classes that satisfy the \c LloydStepType policy: - mlpack::kmeans::NaiveKMeans - mlpack::kmeans::ElkanKMeans - mlpack::kmeans::HamerlyKMeans - mlpack::kmeans::PellegMooreKMeans - mlpack::kmeans::DualTreeKMeans Note that the \c LloydStepType policy is itself a template template parameter, and must accept two template parameters of its own: - \c MetricType: the type of metric to use - \c MatType: the type of data matrix to use The \c LloydStepType policy also mandates three functions: - a constructor: \c "LloydStepType(const MatType& dataset, MetricType& metric);" - an \c Iterate() function: @code /** * Run a single iteration of the Lloyd algorithm, updating the given centroids * into the newCentroids matrix. If any cluster is empty (that is, if any * cluster has no points assigned to it), then the centroid associated with * that cluster may be filled with invalid data (it will be corrected later). * * @param centroids Current cluster centroids. * @param newCentroids New cluster centroids. * @param counts Number of points in each cluster at the end of the iteration. */ double Iterate(const arma::mat& centroids, arma::mat& newCentroids, arma::Col& counts); @endcode - a function to get the number of distance calculations: @code size_t DistanceCalculations() const { return distanceCalculations; } @endcode Note that \c Iterate() does not need to return valid centroids if the cluster is empty. This is because \c EmptyClusterPolicy will handle the empty centroid. This behavior can be used to avoid small amounts of computation. For examples, see the five aforementioned implementations of classes that satisfy the \c LloydStepType policy. @section further_doc_kmtut Further documentation For further documentation on the KMeans class, consult the \ref mlpack::kmeans::KMeans "complete API documentation". */ mlpack-2.2.5/doc/tutorials/linear_regression/000077500000000000000000000000001315013601400212665ustar00rootroot00000000000000mlpack-2.2.5/doc/tutorials/linear_regression/linear_regression.txt000066400000000000000000000311771315013601400255520ustar00rootroot00000000000000/*! @file linear_regression.txt @author James Cline @brief Tutorial for how to use the LinearRegression class. @page lrtutorial Linear/ridge regression tutorial (mlpack_linear_regression) @section intro_lrtut Introduction Linear regression and ridge regression are simple machine learning techniques that aim to estimate the parameters of a linear model. Assuming we have \f$n\f$ \b predictor points \f$\mathbf{x_i}, 0 \le i < n\f$ of dimensionality \f$d\f$ and \f$n\f$ responses \f$y_i, 0 \le i < n\f$, we are trying to estimate the best fit for \f$\beta_i, 0 \le i \le d\f$ in the linear model \f[ y_i = \beta_0 + \displaystyle\sum_{j = 1}^{d} \beta_j x_{ij} \f] for each predictor \f$\mathbf{x_i}\f$ and response \f$y_i\f$. If we take each predictor \f$\mathbf{x_i}\f$ as a row in the matrix \f$\mathbf{X}\f$ and each response \f$y_i\f$ as an entry of the vector \f$\mathbf{y}\f$, we can represent the model in vector form: \f[ \mathbf{y} = \mathbf{X} \mathbf{\beta} + \beta_0 \f] The result of this method is the vector \f$\mathbf{\beta}\f$, including the offset term (or intercept term) \f$\beta_0\f$. \b mlpack provides: - a \ref cli_lrtut "simple command-line executable" to perform linear regression or ridge regression - a \ref linreg_lrtut "simple C++ interface" to perform linear regression or ridge regression @section toc_lrtut Table of Contents A list of all the sections this tutorial contains. - \ref intro_lrtut - \ref toc_lrtut - \ref cli_lrtut - \ref cli_ex1_lrtut - \ref cli_ex2_lrtut - \ref cli_ex3_lrtut - \ref cli_ex4_lrtut - \ref linreg_lrtut - \ref linreg_ex1_lrtut - \ref linreg_ex2_lrtut - \ref linreg_ex3_lrtut - \ref linreg_ex4_lrtut - \ref linreg_ex5_lrtut - \ref further_doc_lrtut @section cli_lrtut Command-Line 'mlpack_linear_regression' The simplest way to perform linear regression or ridge regression in \b mlpack is to use the \c mlpack_linear_regression executable. This program will perform linear regression and place the resultant coefficients into one file. The output file holds a vector of coefficients in increasing order of dimension; that is, the offset term (\f$\beta_0\f$), the coefficient for dimension 1 (\f$\beta_1\f$, then dimension 2 (\f$\beta_2\f$) and so forth, as well as the intercept. This executable can also predict the \f$y\f$ values of a second dataset based on the computed coefficients. Below are several examples of simple usage (and the resultant output). The \c option is used so that verbose output is given. Further documentation on each individual option can be found by typing @code $ mlpack_linear_regression --help @endcode @subsection cli_ex1_lrtut One file, generating the function coefficients @code $ mlpack_linear_regression --training_file dataset.csv -v -M lr.xml [INFO ] Loading 'dataset.csv' as CSV data. Size is 2 x 5. [INFO ] [INFO ] Execution parameters: [INFO ] help: false [INFO ] info: "" [INFO ] input_model_file: "" [INFO ] lambda: 0 [INFO ] output_model_file: lr.xml [INFO ] output_predictions: predictions.csv [INFO ] test_file: "" [INFO ] training_file: dataset.csv [INFO ] training_responses: "" [INFO ] verbose: true [INFO ] version: false [INFO ] [INFO ] Program timers: [INFO ] load_regressors: 0.000263s [INFO ] loading_data: 0.000220s [INFO ] regression: 0.000392s [INFO ] total_time: 0.001920s @endcode Convenient program timers are given for different parts of the calculation at the bottom of the output, as well as the parameters the simulation was run with. Now, if we look at the output model file, which is \c lr.xml, @code $ cat dataset.csv 0,0 1,1 2,2 3,3 4,4 $ cat lr.xml 2 1 2 1 -3.97205464519563669e-16 1.00000000000000022e+00 0.00000000000000000e+00 1 @endcode As you can see, the function for this input is \f$f(y)=0+1x_1\f$. We can see that the model we have trained catches this; in the \c section of \c lr.xml, we can see that there are two elements, which are (approximately) 0 and 1. The first element corresponds to the intercept 0, and the second column corresponds to the coefficient 1 for the variable \f$x_1\f$. Note that in this example, the regressors for the dataset are the second column. That is, the dataset is one dimensional, and the last column has the \f$y\f$ values, or responses, for each row. You can specify these responses in a separate file if you want, using the \c --input_responses, or \c -r, option. @subsection cli_ex2_lrtut Compute model and predict at the same time @code $ mlpack_linear_regression --training_file dataset.csv --test_file predict.csv \ > -v [INFO ] Loading 'dataset.csv' as CSV data. Size is 2 x 5. [INFO ] Loading 'predict.csv' as raw ASCII formatted data. Size is 1 x 3. [INFO ] Saving CSV data to 'predictions.csv'. [INFO ] [INFO ] Execution parameters: [INFO ] help: false [INFO ] info: "" [INFO ] input_model_file: "" [INFO ] lambda: 0 [INFO ] output_model_file: "" [INFO ] output_predictions: predictions.csv [INFO ] test_file: predict.csv [INFO ] training_file: dataset.csv [INFO ] training_responses: "" [INFO ] verbose: true [INFO ] version: false [INFO ] [INFO ] Program timers: [INFO ] load_regressors: 0.000371s [INFO ] load_test_points: 0.000229s [INFO ] loading_data: 0.000491s [INFO ] prediction: 0.000075s [INFO ] regression: 0.000449s [INFO ] saving_data: 0.000186s [INFO ] total_time: 0.002731s $ cat dataset.csv 0,0 1,1 2,2 3,3 4,4 $ cat predict.csv 2 3 4 $ cat predictions.csv 2.0000000000e+00 3.0000000000e+00 4.0000000000e+00 @endcode We used the same dataset, so we got the same parameters. The key thing to note about the \c predict.csv dataset is that it has the same dimensionality as the dataset used to create the model, one. If the model generating dataset has \f$d\f$ dimensions, so must the dataset we want to predict for. @subsection cli_ex3_lrtut Prediction using a precomputed model @code $ mlpack_linear_regression --input_model_file lr.xml --test_file predict.csv -v [INFO ] Loading 'predict.csv' as raw ASCII formatted data. Size is 1 x 3. [INFO ] Saving CSV data to 'predictions.csv'. [INFO ] [INFO ] Execution parameters: [INFO ] help: false [INFO ] info: "" [INFO ] input_model_file: lr.xml [INFO ] lambda: 0 [INFO ] output_model_file: "" [INFO ] output_predictions: predictions.csv [INFO ] test_file: predict.csv [INFO ] training_file: "" [INFO ] training_responses: "" [INFO ] verbose: true [INFO ] version: false [INFO ] [INFO ] Program timers: [INFO ] load_model: 0.000264s [INFO ] load_test_points: 0.000186s [INFO ] loading_data: 0.000157s [INFO ] prediction: 0.000098s [INFO ] saving_data: 0.000157s [INFO ] total_time: 0.001688s $ cat lr.xml 2 1 2 1 -3.97205464519563669e-16 1.00000000000000022e+00 0.00000000000000000e+00 1 $ cat predict.csv 2 3 4 $ cat predictions.csv 2.0000000000e+00 3.0000000000e+00 4.0000000000e+00 @endcode @subsection cli_ex4_lrtut Using ridge regression Sometimes, the input matrix of predictors has a covariance matrix that is not invertible, or the system is overdetermined. In this case, ridge regression is useful: it adds a normalization term to the covariance matrix to make it invertible. Ridge regression is a standard technique and documentation for the mathematics behind it can be found anywhere on the Internet. In short, the covariance matrix \f[ \mathbf{X}' \mathbf{X} \f] is replaced with \f[ \mathbf{X}' \mathbf{X} + \lambda \mathbf{I} \f] where \f$\mathbf{I}\f$ is the identity matrix. So, a \f$\lambda\f$ parameter greater than zero should be specified to perform ridge regression, using the \c --lambda (or \c -l) option. An example is given below. @code $ mlpack_linear_regression --training_file dataset.csv -v --lambda 0.5 -M lr.xml [INFO ] Loading 'dataset.csv' as CSV data. Size is 2 x 5. [INFO ] [INFO ] Execution parameters: [INFO ] help: false [INFO ] info: "" [INFO ] input_model_file: "" [INFO ] lambda: 0.5 [INFO ] output_model_file: lr.xml [INFO ] output_predictions: predictions.csv [INFO ] test_file: "" [INFO ] training_file: dataset.csv [INFO ] training_responses: "" [INFO ] verbose: true [INFO ] version: false [INFO ] [INFO ] Program timers: [INFO ] load_regressors: 0.000210s [INFO ] loading_data: 0.000170s [INFO ] regression: 0.000332s [INFO ] total_time: 0.001835s @endcode Further documentation on options should be found by using the \c --help option. @section linreg_lrtut The 'LinearRegression' class The 'LinearRegression' class is a simple implementation of linear regression. Using the LinearRegression class is very simple. It has two available constructors; one for generating a model from a matrix of predictors and a vector of responses, and one for loading an already computed model from a given file. The class provides one method that performs computation: @code void Predict(const arma::mat& points, arma::vec& predictions); @endcode Once you have generated or loaded a model, you can call this method and pass it a matrix of data points to predict values for using the model. The second parameter, predictions, will be modified to contain the predicted values corresponding to each row of the points matrix. @subsection linreg_ex1_lrtut Generating a model @code #include using namespace mlpack::regression; arma::mat data; // The dataset itself. arma::vec responses; // The responses, one row for each row in data. // Regress. LinearRegression lr(data, responses); // Get the parameters, or coefficients. arma::vec parameters = lr.Parameters(); @endcode @subsection linreg_ex2_lrtut Setting a model Assuming you already have a model and do not need to create one, this is how you would set the parameters for a LinearRegression instance. @code arma::vec parameters; // Your model. LinearRegression lr(); // Create a new LinearRegression instance or reuse one. lr.Parameters() = parameters; // Set the model. @endcode @subsection linreg_ex3_lrtut Load a model from a file If you have a generated model in a file somewhere you would like to load and use, you can use \c data::Load() to load it. @code std::string filename; // The path and name of your file. LinearRegression lr; data::Load(filename, "lr_model", lr); @endcode @subsection linreg_ex4_lrtut Prediction Once you have generated or loaded a model using one of the aforementioned methods, you can predict values for a dataset. @code LinearRegression lr(); // Load or generate your model. // The dataset we want to predict on; each row is a data point. arma::mat points; // This will store the predictions; one row for each point. arma::vec predictions; lr.Predict(points, predictions); // Predict. // Now, the vector 'predictions' will contain the predicted values. @endcode @subsection linreg_ex5_lrtut Setting lambda for ridge regression As discussed in \ref cli_ex4_lrtut, ridge regression is useful when the covariance of the predictors is not invertible. The standard constructor can be used to set a value of lambda: @code #include using namespace mlpack::regression; arma::mat data; // The dataset itself. arma::vec responses; // The responses, one row for each row in data. // Regress, with a lambda of 0.5. LinearRegression lr(data, responses, 0.5); // Get the parameters, or coefficients. arma::vec parameters = lr.Parameters(); @endcode In addition, the \c Lambda() function can be used to get or modify the lambda value: @code LinearRegression lr; lr.Lambda() = 0.5; Log::Info << "Lambda is " << lr.Lambda() << "." << std::endl; @endcode @section further_doc_lrtut Further documentation For further documentation on the LinearRegression class, consult the \ref mlpack::regression::LinearRegression "complete API documentation". */ mlpack-2.2.5/doc/tutorials/neighbor_search/000077500000000000000000000000001315013601400206765ustar00rootroot00000000000000mlpack-2.2.5/doc/tutorials/neighbor_search/neighbor_search.txt000066400000000000000000000371171315013601400245720ustar00rootroot00000000000000/*! @file neighbor_search.txt @author Ryan Curtin @brief Tutorial for how to use the NeighborSearch class. @page nstutorial NeighborSearch tutorial (k-nearest-neighbors) @section intro_nstut Introduction Nearest-neighbors search is a common machine learning task. In this setting, we have a \b query and a \b reference dataset. For each point in the \b query dataset, we wish to know the \f$k\f$ points in the \b reference dataset which are closest to the given query point. Alternately, if the query and reference datasets are the same, the problem can be stated more simply: for each point in the dataset, we wish to know the \f$k\f$ nearest points to that point. \b mlpack provides: - a \ref cli_nstut "simple command-line executable" to run nearest-neighbors search (and furthest-neighbors search) - a \ref knn_nstut "simple C++ interface" to perform nearest-neighbors search (and furthest-neighbors search) - a \ref neighborsearch_nstut "generic, extensible, and powerful C++ class (NeighborSearch)" for complex usage @section toc_nstut Table of Contents A list of all the sections this tutorial contains. - \ref intro_nstut - \ref toc_nstut - \ref cli_nstut - \ref cli_ex1_nstut - \ref cli_ex2_nstut - \ref cli_ex3_nstut - \ref knn_nstut - \ref knn_ex1_nstut - \ref knn_ex2_nstut - \ref knn_ex3_nstut - \ref neighborsearch_nstut - \ref sort_policy_doc_nstut - \ref metric_type_doc_nstut - \ref mat_type_doc_nstut - \ref tree_type_doc_nstut - \ref traverser_type_doc_nstut - \ref further_doc_nstut @section cli_nstut Command-Line 'mlpack_knn' The simplest way to perform nearest-neighbors search in \b mlpack is to use the \c mlpack_knn executable. This program will perform nearest-neighbors search and place the resultant neighbors into one file and the resultant distances into another. The output files are organized such that the first row corresponds to the nearest neighbors of the first query point, with the first column corresponding to the nearest neighbor, and so forth. Below are several examples of simple usage (and the resultant output). The \c -v option is used so that output is given. Further documentation on each individual option can be found by typing @code $ mlpack_knn --help @endcode @subsection cli_ex1_nstut One dataset, 5 nearest neighbors @code $ mlpack_knn -r dataset.csv -n neighbors_out.csv -d distances_out.csv -k 5 -v [INFO ] Loading 'dataset.csv' as CSV data. Size is 3 x 1000. [INFO ] Loaded reference data from 'dataset.csv' (3 x 1000). [INFO ] Building reference tree... [INFO ] Tree built. [INFO ] Searching for 5 nearest neighbors with dual-tree kd-tree search... [INFO ] 18412 node combinations were scored. [INFO ] 54543 base cases were calculated. [INFO ] Search complete. [INFO ] Saving CSV data to 'neighbors_out.csv'. [INFO ] Saving CSV data to 'distances_out.csv'. [INFO ] [INFO ] Execution parameters: [INFO ] distances_file: distances_out.csv [INFO ] help: false [INFO ] info: "" [INFO ] input_model_file: "" [INFO ] k: 5 [INFO ] leaf_size: 20 [INFO ] naive: false [INFO ] neighbors_file: neighbors_out.csv [INFO ] output_model_file: "" [INFO ] query_file: "" [INFO ] random_basis: false [INFO ] reference_file: dataset.csv [INFO ] seed: 0 [INFO ] single_mode: false [INFO ] tree_type: kd [INFO ] verbose: true [INFO ] version: false [INFO ] [INFO ] Program timers: [INFO ] computing_neighbors: 0.108968s [INFO ] loading_data: 0.006495s [INFO ] saving_data: 0.003843s [INFO ] total_time: 0.126036s [INFO ] tree_building: 0.003442s @endcode Convenient program timers are given for different parts of the calculation at the bottom of the output, as well as the parameters the simulation was run with. Now, if we look at the output files: @code $ head neighbors_out.csv 862,344,224,43,885 703,499,805,639,450 867,472,972,380,601 397,319,277,443,323 840,827,865,38,438 732,876,751,492,616 563,222,569,985,940 361,97,928,437,79 547,695,419,961,716 982,113,689,843,634 $ head distances_out.csv 5.986076164057e-02,7.664920518084e-02,1.116050961847e-01,1.155595474371e-01,1.169810085522e-01 7.532635022982e-02,1.012564715841e-01,1.127846944644e-01,1.209584396720e-01,1.216543647014e-01 7.659571546879e-02,1.014588981948e-01,1.025114621511e-01,1.128082429187e-01,1.131659758673e-01 2.079405647909e-02,4.710724516732e-02,7.597622408419e-02,9.171977778898e-02,1.037033340864e-01 7.082206779700e-02,9.002355499742e-02,1.044181406406e-01,1.093149568834e-01,1.139700558608e-01 5.688056488896e-02,9.478072514474e-02,1.085637706630e-01,1.114177921451e-01,1.139370265105e-01 7.882260880455e-02,9.454474078041e-02,9.724494179950e-02,1.023829575445e-01,1.066927013814e-01 7.005321598247e-02,9.131417221561e-02,9.498248889074e-02,9.897964162308e-02,1.121202216165e-01 5.295654132754e-02,5.509877761894e-02,8.108227366619e-02,9.785461174861e-02,1.043968140367e-01 3.992859920333e-02,4.471418646159e-02,7.346053904990e-02,9.181982339584e-02,9.843075910782e-02 @endcode So, the nearest neighbor to point 0 is point 862, with a distance of 5.986076164057e-02. The second nearest neighbor to point 0 is point 344, with a distance of 7.664920518084e-02. The third nearest neighbor to point 5 is point 751, with a distance of 1.085637706630e-01. @subsection cli_ex2_nstut Query and reference dataset, 10 nearest neighbors @code $ mlpack_knn -q query_dataset.csv -r reference_dataset.csv \ > -n neighbors_out.csv -d distances_out.csv -k 10 -v [INFO ] Loading 'reference_dataset.csv' as CSV data. Size is 3 x 1000. [INFO ] Loaded reference data from 'reference_dataset.csv' (3 x 1000). [INFO ] Building reference tree... [INFO ] Tree built. [INFO ] Loading 'query_dataset.csv' as CSV data. Size is 3 x 50. [INFO ] Loaded query data from 'query_dataset.csv' (3x50). [INFO ] Searching for 10 nearest neighbors with dual-tree kd-tree search... [INFO ] Building query tree... [INFO ] Tree built. [INFO ] Search complete. [INFO ] Saving CSV data to 'neighbors_out.csv'. [INFO ] Saving CSV data to 'distances_out.csv'. [INFO ] [INFO ] Execution parameters: [INFO ] distances_file: distances_out.csv [INFO ] help: false [INFO ] info: "" [INFO ] input_model_file: "" [INFO ] k: 10 [INFO ] leaf_size: 20 [INFO ] naive: false [INFO ] neighbors_file: neighbors_out.csv [INFO ] output_model_file: "" [INFO ] query_file: query_dataset.csv [INFO ] random_basis: false [INFO ] reference_file: reference_dataset.csv [INFO ] seed: 0 [INFO ] single_mode: false [INFO ] tree_type: kd [INFO ] verbose: true [INFO ] version: false [INFO ] [INFO ] Program timers: [INFO ] computing_neighbors: 0.022589s [INFO ] loading_data: 0.003572s [INFO ] saving_data: 0.000755s [INFO ] total_time: 0.032197s [INFO ] tree_building: 0.002590s @endcode @subsection cli_ex3_nstut One dataset, 3 nearest neighbors, leaf size of 15 points @code $ mlpack_knn -r dataset.csv -n neighbors_out.csv -d distances_out.csv -k 3 -l 15 -v [INFO ] Loading 'dataset.csv' as CSV data. Size is 3 x 1000. [INFO ] Loaded reference data from 'dataset.csv' (3 x 1000). [INFO ] Building reference tree... [INFO ] Tree built. [INFO ] Searching for 3 nearest neighbors with dual-tree kd-tree search... [INFO ] 19692 node combinations were scored. [INFO ] 36263 base cases were calculated. [INFO ] Search complete. [INFO ] Saving CSV data to 'neighbors_out.csv'. [INFO ] Saving CSV data to 'distances_out.csv'. [INFO ] [INFO ] Execution parameters: [INFO ] distances_file: distances_out.csv [INFO ] help: false [INFO ] info: "" [INFO ] input_model_file: "" [INFO ] k: 3 [INFO ] leaf_size: 15 [INFO ] naive: false [INFO ] neighbors_file: neighbors_out.csv [INFO ] output_model_file: "" [INFO ] query_file: "" [INFO ] random_basis: false [INFO ] reference_file: dataset.csv [INFO ] seed: 0 [INFO ] single_mode: false [INFO ] tree_type: kd [INFO ] verbose: true [INFO ] version: false [INFO ] [INFO ] Program timers: [INFO ] computing_neighbors: 0.059020s [INFO ] loading_data: 0.002791s [INFO ] saving_data: 0.002369s [INFO ] total_time: 0.069277s [INFO ] tree_building: 0.002713s @endcode Further documentation on options should be found by using the --help option. @section knn_nstut The 'KNN' class The 'KNN' class is, specifically, a typedef of the more extensible NeighborSearch class, querying for nearest neighbors using the Euclidean distance. @code typedef NeighborSearch KNN; @endcode Using the KNN class is particularly simple; first, the object must be constructed and given a dataset. Then, the method is run, and two matrices are returned: one which holds the indices of the nearest neighbors, and one which holds the distances of the nearest neighbors. These are of the same structure as the output --neighbors_file and --distances_file for the CLI interface (see above). A handful of examples of simple usage of the KNN class are given below. @subsection knn_ex1_nstut 5 nearest neighbors on a single dataset @code #include using namespace mlpack::neighbor; // Our dataset matrix, which is column-major. extern arma::mat data; KNN a(data); // The matrices we will store output in. arma::Mat resultingNeighbors; arma::mat resultingDistances; a.Search(5, resultingNeighbors, resultingDistances); @endcode The output of the search is stored in resultingNeighbors and resultingDistances. @subsection knn_ex2_nstut 10 nearest neighbors on a query and reference dataset @code #include using namespace mlpack::neighbor; // Our dataset matrices, which are column-major. extern arma::mat queryData, referenceData; KNN a(referenceData); // The matrices we will store output in. arma::Mat resultingNeighbors; arma::mat resultingDistances; a.Search(queryData, 10, resultingNeighbors, resultingDistances); @endcode @subsection knn_ex3_nstut Naive (exhaustive) search for 6 nearest neighbors on one dataset This example uses the O(n^2) naive search (not the tree-based search). @code #include using namespace mlpack::neighbor; // Our dataset matrix, which is column-major. extern arma::mat dataset; KNN a(dataset, true); // The matrices we will store output in. arma::Mat resultingNeighbors; arma::mat resultingDistances; a.Search(6, resultingNeighbors, resultingDistances); @endcode Needless to say, naive search can be very slow... @section neighborsearch_nstut The extensible 'NeighborSearch' class The NeighborSearch class is very extensible, having the following template arguments: @code template< typename SortPolicy = NearestNeighborSort, typename MetricType = mlpack::metric::EuclideanDistance, typename MatType = arma::mat, template class TreeType = tree::KDTree, template class TraversalType = TreeType, MatType>::template DualTreeTraverser> > class NeighborSearch; @endcode By choosing different components for each of these template classes, a very arbitrary neighbor searching object can be constructed. Note that each of these template parameters have defaults, so it is not necessary to specify each one. @subsection sort_policy_doc_nstut SortPolicy policy class The SortPolicy template parameter allows specification of how the NeighborSearch object will decide which points are to be searched for. The mlpack::neighbor::NearestNeighborSort class is a well-documented example. A custom SortPolicy class must implement the same methods which NearestNeighborSort does: @code static size_t SortDistance(const arma::vec& list, double newDistance); static bool IsBetter(const double value, const double ref); template static double BestNodeToNodeDistance(const TreeType* queryNode, const TreeType* referenceNode); template static double BestPointToNodeDistance(const arma::vec& queryPoint, const TreeType* referenceNode); static const double WorstDistance(); static const double BestDistance(); @endcode The mlpack::neighbor::FurthestNeighborSort class is another implementation, which is used to create the 'KFN' typedef class, which finds the furthest neighbors, as opposed to the nearest neighbors. @subsection metric_type_doc_nstut MetricType policy class The MetricType policy class allows the neighbor search to take place in any arbitrary metric space. The mlpack::metric::LMetric class is a good example implementation. A MetricType class must provide the following functions: @code // Empty constructor is required. MetricType(); // Compute the distance between two points. template double Evaluate(const VecType& a, const VecType& b); @endcode Internally, the NeighborSearch class keeps an instantiated MetricType class (which can be given in the constructor). This is useful for a metric like the Mahalanobis distance (mlpack::metric::MahalanobisDistance), which must store state (the covariance matrix). Therefore, you can write a non-static MetricType class and use it seamlessly with NeighborSearch. For more information on the MetricType policy, see the documentation \ref metrics "here". @subsection mat_type_doc_nstut MatType policy class The MatType template parameter specifies the type of data matrix used. This type must implement the same operations as an Armadillo matrix, and so standard choices are @c arma::mat and @c arma::sp_mat. @subsection tree_type_doc_nstut TreeType policy class The NeighborSearch class allows great extensibility in the selection of the type of tree used for search. This type must follow the typical mlpack TreeType policy, documented \ref trees "here". Typical choices might include mlpack::tree::KDTree, mlpack::tree::BallTree, mlpack::tree::StandardCoverTree, mlpack::tree::RTree, or mlpack::tree::RStarTree. It is easily possible to make your own tree type for use with NeighborSearch; consult the \ref trees "TreeType documentation" for more details. An example of using the NeighborSearch class with a ball tree is given below. @code // Construct a NeighborSearch object with ball bounds. NeighborSearch< NearestNeighborSort, metric::EuclideanDistance, arma::mat, tree::BallTree > neighborSearch(dataset); @endcode @subsection traverser_type_doc_nstut TraverserType policy class The last template parameter the NeighborSearch class offers is the TraverserType class. The TraverserType class holds the strategy used to traverse the trees in either single-tree or dual-tree search mode. By default, it is set to use the default traverser of the given @c TreeType (which is the member @c TreeType::DualTreeTraverser). This class must implement the following two methods: @code // Instantiate with a given RuleType. TraverserType(RuleType& rule); // Traverse with two trees. void Traverse(TreeType& queryNode, TreeType& referenceNode); @endcode The RuleType class provides the following functions for use in the traverser: @code // Evaluate the base case between two points. double BaseCase(const size_t queryIndex, const size_t referenceIndex); // Score the two nodes to see if they can be pruned, returning DBL_MAX if they // can be pruned. double Score(TreeType& queryNode, TreeType& referenceNode); @endcode Note also that any traverser given must satisfy the definition of a pruning dual-tree traversal given in the paper "Tree-independent dual-tree algorithms". @section further_doc_nstut Further documentation For further documentation on the NeighborSearch class, consult the \ref mlpack::neighbor::NeighborSearch "complete API documentation". */ mlpack-2.2.5/doc/tutorials/range_search/000077500000000000000000000000001315013601400201755ustar00rootroot00000000000000mlpack-2.2.5/doc/tutorials/range_search/range_search.txt000066400000000000000000000320461315013601400233640ustar00rootroot00000000000000/*! @file range_search.txt @author Ryan Curtin @brief Tutorial for how to use the RangeSearch class. @page rstutorial RangeSearch tutorial (mlpack_range_search) @section intro_rstut Introduction Range search is a simple machine learning task which aims to find all the neighbors of a point that fall into a certain range of distances. In this setting, we have a \b query and a \b reference dataset. Given a certain range, for each point in the \b query dataset, we wish to know all points in the \b reference dataset which have distances within that given range to the given query point. Alternately, if the query and reference datasets are the same, the problem can be stated more simply: for each point in the dataset, we wish to know all points which have distance in the given range to that point. \b mlpack provides: - a \ref cli_rstut "simple command-line executable" to run range search - a \ref rs_rstut "simple C++ interface" to perform range search - a \ref rs_ext_rstut "generic, extensible, and powerful C++ class (RangeSearch)" for complex usage @section toc_rstut Table of Contents A list of all the sections this tutorial contains. - \ref intro_rstut - \ref toc_rstut - \ref cli_rstut - \ref cli_ex1_rstut - \ref cli_ex2_rstut - \ref cli_ex3_rstut - \ref rs_rstut - \ref rs_ex1_rstut - \ref rs_ex2_rstut - \ref rs_ex3_rstut - \ref rs_ext_rstut - \ref metric_type_doc_rstut - \ref mat_type_doc_rstut - \ref tree_type_doc_rstut - \ref further_doc_rstut @section cli_rstut The 'mlpack_range_search' command-line executable \b mlpack provides an executable, \c mlpack_range_search, which can be used to perform range searches quickly and simply from the command-line. This program will perform the range search and place the resulting neighbor index list into one file and their corresponding distances into another file. These files are organized such that the first row corresponds to the neighbors (or distances) of the first query point, and the second row corresponds to the neighbors (or distances) of the second query point, and so forth. The neighbors of a specific point are not arranged in any specific order. Because a range search may return different numbers of points (including zero), the output file is technically not a valid CSV and may not be loadable by other programs. Therefore, if you need the results in a certain format, it may be better to use the \ref rs_rstut "C++ interface" to manually export the data in the preferred format. Below are several examples of simple usage (and the resultant output). The '-v' option is used so that output is given. Further documentation on each individual option can be found by typing @code $ mlpack_range_search --help @endcode @subsection cli_ex1_rstut One dataset, points with distance <= 0.01 @code $ mlpack_range_search -r dataset.csv -n neighbors_out.csv -d distances_out.csv \ > -U 0.076 -v [INFO ] Loading 'dataset.csv' as CSV data. Size is 3 x 1000. [INFO ] Loaded reference data from 'dataset.csv' (3x1000). [INFO ] Building reference tree... [INFO ] Tree built. [INFO ] Search for points in the range [0, 0.076] with dual-tree kd-tree search... [INFO ] Search complete. [INFO ] [INFO ] Execution parameters: [INFO ] distances_file: distances_out.csv [INFO ] help: false [INFO ] info: "" [INFO ] input_model_file: "" [INFO ] leaf_size: 20 [INFO ] max: 0.01 [INFO ] min: 0 [INFO ] naive: false [INFO ] neighbors_file: neighbors_out.csv [INFO ] output_model_file: "" [INFO ] query_file: "" [INFO ] random_basis: false [INFO ] reference_file: dataset.csv [INFO ] seed: 0 [INFO ] single_mode: false [INFO ] tree_type: kd [INFO ] verbose: true [INFO ] version: false [INFO ] [INFO ] Program timers: [INFO ] loading_data: 0.005201s [INFO ] range_search/computing_neighbors: 0.017110s [INFO ] total_time: 0.033313s [INFO ] tree_building: 0.002500s @endcode Convenient program timers are given for different parts of the calculation at the bottom of the output, as well as the parameters the simulation was run with. Now, if we look at the output files: @code $ head neighbors_out.csv 862 703 397, 277, 319 840 732 361 547, 695 113, 982, 689 $ head distances_out.csv 0.0598608 0.0753264 0.0207941, 0.0759762, 0.0471072 0.0708221 0.0568806 0.0700532 0.0529565, 0.0550988 0.0447142, 0.0399286, 0.0734605 @endcode We can see that only point 862 is within distance 0.076 of point 0. We can also see that point 2 has no points within a distance of 0.076 -- that line is empty. @subsection cli_ex2_rstut Query and reference dataset, range [1.0, 1.5] @code $ mlpack_range_search -q query_dataset.csv -r reference_dataset.csv -n \ > neighbors_out.csv -d distances_out.csv -L 1.0 -U 1.5 -v [INFO ] Loading 'reference_dataset.csv' as CSV data. Size is 3 x 1000. [INFO ] Loaded reference data from 'reference_dataset.csv' (3x1000). [INFO ] Building reference tree... [INFO ] Tree built. [INFO ] Loading 'query_dataset.csv' as CSV data. Size is 3 x 50. [INFO ] Loaded query data from 'query_dataset.csv' (3x50). [INFO ] Search for points in the range [1, 1.5] with dual-tree kd-tree search... [INFO ] Building query tree... [INFO ] Tree built. [INFO ] Search complete. [INFO ] [INFO ] Execution parameters: [INFO ] distances_file: distances_out.csv [INFO ] help: false [INFO ] info: "" [INFO ] input_model_file: "" [INFO ] leaf_size: 20 [INFO ] max: 1.5 [INFO ] min: 1 [INFO ] naive: false [INFO ] neighbors_file: neighbors_out.csv [INFO ] output_model_file: "" [INFO ] query_file: query_dataset.csv [INFO ] random_basis: false [INFO ] reference_file: reference_dataset.csv [INFO ] seed: 0 [INFO ] single_mode: false [INFO ] tree_type: kd [INFO ] verbose: true [INFO ] version: false [INFO ] [INFO ] Program timers: [INFO ] loading_data: 0.006199s [INFO ] range_search/computing_neighbors: 0.024427s [INFO ] total_time: 0.045403s [INFO ] tree_building: 0.003979s @endcode @subsection cli_ex3_rstut One dataset, range [0.7 0.8], leaf size of 15 points The \b mlpack implementation of range search is a dual-tree algorithm; when \f$kd\f$-trees are used, the leaf size of the tree can be changed. Depending on the characteristics of the dataset, a larger or smaller leaf size can provide faster computation. The leaf size is modifiable through the command-line interface, as shown below. @code $ mlpack_range_search -r dataset.csv -n neighbors_out.csv -d distances_out.csv \ > -L 0.7 -U 0.8 -l 15 -v [INFO ] Loading 'dataset.csv' as CSV data. Size is 3 x 1000. [INFO ] Loaded reference data from 'dataset.csv' (3x1000). [INFO ] Building reference tree... [INFO ] Tree built. [INFO ] Search for points in the range [0.7, 0.8] with dual-tree kd-tree search... [INFO ] Search complete. [INFO ] [INFO ] Execution parameters: [INFO ] distances_file: distances_out.csv [INFO ] help: false [INFO ] info: "" [INFO ] input_model_file: "" [INFO ] leaf_size: 15 [INFO ] max: 0.8 [INFO ] min: 0.7 [INFO ] naive: false [INFO ] neighbors_file: neighbors_out.csv [INFO ] output_model_file: "" [INFO ] query_file: "" [INFO ] random_basis: false [INFO ] reference_file: dataset.csv [INFO ] seed: 0 [INFO ] single_mode: false [INFO ] tree_type: kd [INFO ] verbose: true [INFO ] version: false [INFO ] [INFO ] Program timers: [INFO ] loading_data: 0.006298s [INFO ] range_search/computing_neighbors: 0.411041s [INFO ] total_time: 0.539931s [INFO ] tree_building: 0.004695s @endcode Further documentation on options should be found by using the --help option. @section rs_rstut The 'RangeSearch' class The 'RangeSearch' class is an extensible template class which allows a high level of flexibility. However, all of the template arguments have default parameters, allowing a user to simply use 'RangeSearch<>' for simple usage without worrying about the exact necessary template parameters. The class bears many similarities to the \ref nstutorial "NeighborSearch" class; usage generally consists of calling the constructor with one or two datasets, and then calling the 'Search()' method to perform the actual range search. The 'Search()' method stores the results in two vector-of-vector objects. This is necessary because each query point may have a different number of neighbors in the specified distance range. The structure of those two objects is very similar to the output files --neighbors_file and --distances_file for the CLI interface (see above). A handful of examples of simple usage of the RangeSearch class are given below. @subsection rs_ex1_rstut Distance less than 2.0 on a single dataset @code #include using namespace mlpack::range; // Our dataset matrix, which is column-major. extern arma::mat data; RangeSearch<> a(data); // The vector-of-vector objects we will store output in. std::vector > resultingNeighbors; std::vector > resultingDistances; // The range we will use. math::Range r(0.0, 2.0); // [0.0, 2.0]. a.Search(r, resultingNeighbors, resultingDistances); @endcode The output of the search is stored in resultingNeighbors and resultingDistances. @subsection rs_ex2_rstut Range [3.0, 4.0] on a query and reference dataset @code #include using namespace mlpack::range; // Our dataset matrices, which are column-major. extern arma::mat queryData, referenceData; RangeSearch<> a(referenceData); // The vector-of-vector objects we will store output in. std::vector > resultingNeighbors; std::vector > resultingDistances; // The range we will use. math::Range r(3.0, 4.0); // [3.0, 4.0]. a.Search(queryData, r, resultingNeighbors, resultingDistances); @endcode @subsection rs_ex3_rstut Naive (exhaustive) search for distance greater than 5.0 on one dataset This example uses the O(n^2) naive search (not the tree-based search). @code #include using namespace mlpack::range; // Our dataset matrix, which is column-major. extern arma::mat dataset; // The 'true' option indicates that we will use naive calculation. RangeSearch<> a(dataset, true); // The vector-of-vector objects we will store output in. std::vector > resultingNeighbors; std::vector > resultingDistances; // The range we will use. The upper bound is DBL_MAX. math::Range r(5.0, DBL_MAX); // [5.0, inf). a.Search(r, resultingNeighbors, resultingDistances); @endcode Needless to say, naive search can be very slow... @section rs_ext_rstut The extensible 'RangeSearch' class Similar to the \ref nstutorial "NeighborSearch class", the RangeSearch class is very extensible, having the following template arguments: @code template class TreeType = tree::KDTree> class RangeSearch; @endcode By choosing different components for each of these template classes, a very arbitrary range searching object can be constructed. @subsection metric_type_doc_rstut MetricType policy class The MetricType policy class allows the range search to take place in any arbitrary metric space. The mlpack::metric::LMetric class is a good example implementation. A MetricType class must provide the following functions: @code // Empty constructor is required. MetricType(); // Compute the distance between two points. template double Evaluate(const VecType& a, const VecType& b); @endcode Internally, the RangeSearch class keeps an instantiated MetricType class (which can be given in the constructor). This is useful for a metric like the Mahalanobis distance (mlpack::metric::MahalanobisDistance), which must store state (the covariance matrix). Therefore, you can write a non-static MetricType class and use it seamlessly with RangeSearch. @subsection mat_type_doc_rstut MatType policy class The MatType template parameter specifies the type of data matrix used. This type must implement the same operations as an Armadillo matrix, and so standard choices are @c arma::mat and @c arma::sp_mat. @subsection tree_type_doc_rstut TreeType policy class The RangeSearch class also allows a custom tree to be used. The TreeType policy is also used elsewhere in mlpack and is documented more thoroughly \ref trees "here". Typical choices might include mlpack::tree::KDTree (the default), mlpack::tree::BallTree, mlpack::tree::RTree, mlpack::tree::RStarTree, or mlpack::tree::StandardCoverTree. Below is an example that uses the RangeSearch class with an R-tree: @code // Construct a RangeSearch object with ball bounds. RangeSearch< metric::EuclideanDistance, arma::mat, tree::RTree > rangeSearch(dataset); @endcode For further information on trees, including how to write your own tree for use with RangeSearch and other mlpack methods, see the \ref trees "TreeType policy documentation". @section further_doc_rstut Further documentation For further documentation on the RangeSearch class, consult the \ref mlpack::range::RangeSearch "complete API documentation". */ mlpack-2.2.5/doc/tutorials/tutorials.txt000066400000000000000000000022401315013601400203410ustar00rootroot00000000000000/*! @file tutorials.txt @author Ryan Curtin @brief List of mlpack tutorials. @page tutorials Tutorials @section introd_tut Introductory Tutorials These tutorials introduce the basic concepts of working with mlpack, aimed at developers who want to use and contribute to mlpack but are not sure where to start. - \ref build - \ref formatdoc - \ref matrices - \ref iodoc - \ref timer - \ref sample @section method_tut Method-specific Tutorials These tutorials introduce the various methods mlpack offers, aimed at users who want to get started quickly. These tutorials start with simple examples and progress to complex, extensible uses. - \ref nstutorial - \ref lrtutorial - \ref rstutorial - \ref dettutorial - \ref kmtutorial - \ref fmkstutorial - \ref emst_tutorial - \ref amftutorial - \ref cftutorial - \ref akfntutorial @section policy_tut Policy Class Documentation mlpack uses templates to achieve its genericity and flexibility. Some of the template types used by mlpack are common across multiple machine learning algorithms. The links below provide documentation for some of these common types. - \ref metrics - \ref kernels - \ref trees */ mlpack-2.2.5/src/000077500000000000000000000000001315013601400135505ustar00rootroot00000000000000mlpack-2.2.5/src/mlpack/000077500000000000000000000000001315013601400150175ustar00rootroot00000000000000mlpack-2.2.5/src/mlpack/CMakeLists.txt000066400000000000000000000062371315013601400175670ustar00rootroot00000000000000include_directories(..) # include_directories(${CMAKE_CURRENT_BINARY_DIR}/..) # mlpack/mlpack_export.hpp # Add core.hpp to list of sources. set(MLPACK_SRCS ${MLPACK_SRCS} "${CMAKE_CURRENT_SOURCE_DIR}/core.hpp") ## Recurse into both core/ and methods/. set(DIRS core methods ) foreach(dir ${DIRS}) add_subdirectory(${dir}) endforeach() if (BUILD_TESTS) add_subdirectory(tests) endif () # MLPACK_SRCS is set in the subdirectories. The dependencies (MLPACK_LIBRARIES) # are set in the root CMakeLists.txt. add_library(mlpack ${MLPACK_SRCS}) # If we are not forcing C++11 support, check that the compiler supports C++11 # and enable it. if ((NOT FORCE_CXX11) AND (NOT (${CMAKE_MAJOR_VERSION} LESS 3 OR (${CMAKE_MAJOR_VERSION} EQUAL 3 AND ${CMAKE_MINOR_VERSION} LESS 1)))) # Use the newer C++11 checks. include(../../CMake/NewCXX11.cmake) endif () # Generate export symbols for Windows, instead of adding __declspec(dllimport) # and __declspec(dllexport) everywhere. However, those modifiers are still # necessary for global variables (of which there are a few in mlpack). set_target_properties(mlpack PROPERTIES WINDOWS_EXPORT_ALL_SYMBOLS ON) include(GenerateExportHeader) generate_export_header(mlpack EXPORT_FILE_NAME mlpack_export.hpp) if (NOT BUILD_SHARED_LIBS) add_definitions(-DMLPACK_STATIC_DEFINE) endif () target_link_libraries(mlpack ${MLPACK_LIBRARIES}) set_target_properties(mlpack PROPERTIES VERSION 2.2 SOVERSION 2 ) # Backtrace for Linux need those libs. if(CMAKE_SYSTEM_NAME STREQUAL "Linux") if(LIBBFD_FOUND AND LIBDL_FOUND AND DEBUG) target_link_libraries(mlpack ${LIBBFD_LIBRARIES}) target_link_libraries(mlpack ${LIBDL_LIBRARIES}) endif() endif() # Collect all header files in the library. file(GLOB_RECURSE INCLUDE_H_FILES RELATIVE ${CMAKE_CURRENT_SOURCE_DIR} *.h) file(GLOB_RECURSE INCLUDE_HPP_FILES RELATIVE ${CMAKE_CURRENT_SOURCE_DIR} *.hpp) set(INCLUDE_FILES ${INCLUDE_H_FILES} ${INCLUDE_HPP_FILES}) # Move all of these header files to /include/mlpack/ after the library # is built. First we have to create that directory though. add_custom_target(mlpack_headers) add_custom_command(TARGET mlpack_headers POST_BUILD COMMENT "Moving header files to include/mlpack/" COMMAND ${CMAKE_COMMAND} ARGS -E make_directory ${CMAKE_BINARY_DIR}/include/mlpack/ COMMAND ${CMAKE_COMMAND} ARGS -E copy ${CMAKE_CURRENT_BINARY_DIR}/mlpack_export.hpp ${CMAKE_BINARY_DIR}/include/mlpack) # Then copy each of the header files over to that directory. foreach(incl_file ${INCLUDE_FILES}) add_custom_command(TARGET mlpack_headers POST_BUILD COMMAND ${CMAKE_COMMAND} ARGS -E copy ${CMAKE_CURRENT_SOURCE_DIR}/${incl_file} ${CMAKE_BINARY_DIR}/include/mlpack/${incl_file}) endforeach() # At install time, we simply install that directory of header files we # collected to include/. install(DIRECTORY ${CMAKE_BINARY_DIR}/include/mlpack DESTINATION include) # Set generated executables to be installed. Unfortunately they must manually # be entered... install(TARGETS mlpack RUNTIME DESTINATION bin LIBRARY DESTINATION lib ARCHIVE DESTINATION lib) add_dependencies(mlpack mlpack_headers) mlpack-2.2.5/src/mlpack/core.hpp000066400000000000000000000227671315013601400164760ustar00rootroot00000000000000/** * @file core.hpp * * Include all of the base components required to write MLPACK methods, and the * main MLPACK Doxygen documentation. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_HPP #define MLPACK_CORE_HPP /** * @mainpage mlpack Documentation * * @section intro_sec Introduction * * mlpack is an intuitive, fast, scalable C++ machine learning library, meant to * be a machine learning analog to LAPACK. It aims to implement a wide array of * machine learning methods and function as a "swiss army knife" for machine * learning researchers. The mlpack development website can be found at * http://mlpack.org. * * mlpack uses the Armadillo C++ matrix library (http://arma.sourceforge.net) * for general matrix, vector, and linear algebra support. mlpack also uses the * program_options, math_c99, and unit_test_framework components of the Boost * library, and optionally uses libbfd and libdl to give backtraces when * compiled with debugging symbols on some platforms. * * @section howto How To Use This Documentation * * This documentation is API documentation similar to Javadoc. It isn't * necessarily a tutorial, but it does provide detailed documentation on every * namespace, method, and class. * * Each mlpack namespace generally refers to one machine learning method, so * browsing the list of namespaces provides some insight as to the breadth of * the methods contained in the library. * * To generate this documentation in your own local copy of mlpack, you can * simply use Doxygen, from the root directory of the project: * * @code * $ doxygen * @endcode * * @section executables Executables * * mlpack provides several executables so that mlpack methods can be used * without any need for knowledge of C++. These executables are all * self-documented, and that documentation can be accessed by running the * executables with the '-h' or '--help' flag. * * A full list of executables is given below: * * - mlpack_adaboost * - mlpack_approx_kfn * - mlpack_cf * - mlpack_decision_stump * - mlpack_decision_tree * - mlpack_det * - mlpack_emst * - mlpack_fastmks * - mlpack_gmm_train * - mlpack_gmm_generate * - mlpack_gmm_probability * - mlpack_hmm_train * - mlpack_hmm_loglik * - mlpack_hmm_viterbi * - mlpack_hmm_generate * - mlpack_hoeffding_tree * - mlpack_kernel_pca * - mlpack_kfn * - mlpack_kmeans * - mlpack_knn * - mlpack_krann * - mlpack_lars * - mlpack_linear_regression * - mlpack_local_coordinate_coding * - mlpack_logistic_regression * - mlpack_lsh * - mlpack_mean_shift * - mlpack_nbc * - mlpack_nca * - mlpack_pca * - mlpack_perceptron * - mlpack_radical * - mlpack_range_search * - mlpack_softmax_regression * - mlpack_sparse_coding * * @section tutorial Tutorials * * A few short tutorials on how to use mlpack are given below. * * - @ref build * - @ref matrices * - @ref iodoc * - @ref timer * - @ref sample * - @ref verinfo * * Tutorials on specific methods are also available. * * - @ref nstutorial * - @ref lrtutorial * - @ref rstutorial * - @ref dettutorial * - @ref emst_tutorial * - @ref kmtutorial * - @ref fmkstutorial * - @ref amftutorial * * @section methods Methods in mlpack * * The following methods are included in mlpack: * * - Density Estimation Trees - mlpack::det::DTree * - Euclidean Minimum Spanning Trees - mlpack::emst::DualTreeBoruvka * - Gaussian Mixture Models (GMMs) - mlpack::gmm::GMM * - Hidden Markov Models (HMMs) - mlpack::hmm::HMM * - Kernel PCA - mlpack::kpca::KernelPCA * - K-Means Clustering - mlpack::kmeans::KMeans * - Least-Angle Regression (LARS/LASSO) - mlpack::regression::LARS * - Local Coordinate Coding - mlpack::lcc::LocalCoordinateCoding * - Locality-Sensitive Hashing - mlpack::neighbor::LSHSearch * - Naive Bayes Classifier - mlpack::naive_bayes::NaiveBayesClassifier * - Neighborhood Components Analysis (NCA) - mlpack::nca::NCA * - Principal Components Analysis (PCA) - mlpack::pca::PCA * - RADICAL (ICA) - mlpack::radical::Radical * - Simple Least-Squares Linear Regression - * mlpack::regression::LinearRegression * - Sparse Coding - mlpack::sparse_coding::SparseCoding * - Tree-based neighbor search (KNN, KFN) - mlpack::neighbor::NeighborSearch * - Tree-based range search - mlpack::range::RangeSearch * * @section remarks Final Remarks * * mlpack contributors include: * * - Ryan Curtin * - James Cline * - Neil Slagle * - Matthew Amidon * - Vlad Grantcharov * - Ajinkya Kale * - Bill March * - Dongryeol Lee * - Nishant Mehta * - Parikshit Ram * - Rajendran Mohan * - Trironk Kiatkungwanglai * - Patrick Mason * - Chip Mappus * - Hua Ouyang * - Long Quoc Tran * - Noah Kauffman * - Guillermo Colon * - Wei Guan * - Ryan Riegel * - Nikolaos Vasiloglou * - Garry Boyer * - Andreas Löf * - Marcus Edel * - Mudit Raj Gupta * - Sumedh Ghaisas * - Michael Fox * - Ryan Birmingham * - Siddharth Agrawal * - Saheb Motiani * - Yash Vadalia * - Abhishek Laddha * - Vahab Akbarzadeh * - Andrew Wells * - Zhihao Lou * - Udit Saxena * - Stephen Tu * - Jaskaran Singh * - Shangtong Zhang * - Hritik Jain * - Vladimir Glazachev * - QiaoAn Chen * - Janzen Brewer * - Trung Dinh * - Tham Ngap Wei * - Grzegorz Krajewski * - Joseph Mariadassou * - Pavel Zhigulin * - Andy Fang * - Barak Pearlmutter * - Ivari Horm * - Dhawal Arora * - Alexander Leinoff * - Palash Ahuja * - Yannis Mentekidis * - Ranjan Mondal * - Mikhail Lozhnikov * - Marcos Pividori * - Keon Kim * - Nilay Jain * - Peter Lehner * - Anuraj Kanodia * - Ivan Georgiev * - Shikhar Bhardwaj * - Yashu Seth * - Mike Izbicki * - Sudhanshu Ranjan * - Piyush Jaiswal * - Dinesh Raj * - Lakshya Agrawal * - Vivek Pal * - Praveen Ch */ // First, include all of the prerequisites. #include // Now the core mlpack classes. #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include //mlpack::backtrace only for linux #ifdef HAS_BFD_DL #include #endif // Include kernel traits. #include #include #include #include #include #include #include #include #include #include #include // Use OpenMP if compiled with -DHAS_OPENMP. #ifdef HAS_OPENMP #include #endif // Use Armadillo's C++ version detection. #ifdef ARMA_USE_CXX11 #define MLPACK_USE_CX11 #endif #endif mlpack-2.2.5/src/mlpack/core/000077500000000000000000000000001315013601400157475ustar00rootroot00000000000000mlpack-2.2.5/src/mlpack/core/CMakeLists.txt000066400000000000000000000004231315013601400205060ustar00rootroot00000000000000# All we have to do is recurse into the subdirectories. set(DIRS arma_extend boost_backport data dists kernels math metrics optimizers tree util ) foreach(dir ${DIRS}) add_subdirectory(${dir}) endforeach() set(MLPACK_SRCS ${MLPACK_SRCS} PARENT_SCOPE) mlpack-2.2.5/src/mlpack/core/arma_extend/000077500000000000000000000000001315013601400202365ustar00rootroot00000000000000mlpack-2.2.5/src/mlpack/core/arma_extend/CMakeLists.txt000066400000000000000000000013231315013601400227750ustar00rootroot00000000000000# Define the files we need to compile. # Anything not in this list will not be compiled into mlpack. set(SOURCES arma_extend.hpp fn_ccov.hpp fn_ind2sub.hpp glue_ccov_meat.hpp glue_ccov_proto.hpp hdf5_misc.hpp op_ccov_meat.hpp op_ccov_proto.hpp operator_minus.hpp SpMat_extra_bones.hpp SpMat_extra_meat.hpp Mat_extra_bones.hpp Mat_extra_meat.hpp Cube_extra_bones.hpp Cube_extra_meat.hpp ) # add directory name to sources set(DIR_SRCS) foreach(file ${SOURCES}) set(DIR_SRCS ${DIR_SRCS} ${CMAKE_CURRENT_SOURCE_DIR}/${file}) endforeach() # Append sources (with directory name) to list of all mlpack sources (used at # the parent scope). set(MLPACK_SRCS ${MLPACK_SRCS} ${DIR_SRCS} PARENT_SCOPE) mlpack-2.2.5/src/mlpack/core/arma_extend/Cube_extra_bones.hpp000066400000000000000000000001661315013601400242210ustar00rootroot00000000000000//! Add a serialization operator. template void serialize(Archive& ar, const unsigned int version); mlpack-2.2.5/src/mlpack/core/arma_extend/Cube_extra_meat.hpp000066400000000000000000000021211315013601400240320ustar00rootroot00000000000000// Add a serialization operator. template template void Cube::serialize(Archive& ar, const unsigned int /* version */) { using boost::serialization::make_nvp; using boost::serialization::make_array; const uword old_n_elem = n_elem; // This is accurate from Armadillo 3.6.0 onwards. // We can't use BOOST_SERIALIZATION_NVP() because of the access::rw() call. ar & make_nvp("n_rows", access::rw(n_rows)); ar & make_nvp("n_cols", access::rw(n_cols)); ar & make_nvp("n_elem_slice", access::rw(n_elem_slice)); ar & make_nvp("n_slices", access::rw(n_slices)); ar & make_nvp("n_elem", access::rw(n_elem)); // mem_state will always be 0 on load, so we don't need to save it. if (Archive::is_loading::value) { // Don't free if local memory is being used. if (mem_state == 0 && mem != NULL && old_n_elem > arma_config::mat_prealloc) { memory::release(access::rw(mem)); } access::rw(mem_state) = 0; // We also need to allocate the memory we're using. init_cold(); } ar & make_array(access::rwp(mem), n_elem); } mlpack-2.2.5/src/mlpack/core/arma_extend/Mat_extra_bones.hpp000066400000000000000000000140641315013601400240660ustar00rootroot00000000000000// Copyright (C) 2008-2016 National ICT Australia (NICTA) // // This Source Code Form is subject to the terms of the Mozilla Public // License, v. 2.0. If a copy of the MPL was not distributed with this // file, You can obtain one at http://mozilla.org/MPL/2.0/. // ------------------------------------------------------------------- // // Written by Conrad Sanderson - http://conradsanderson.id.au // Written by Ryan Curtin //! Add a serialization operator. template void serialize(Archive& ar, const unsigned int version); /** * These will help us refer the proper vector / column types, only with * specifying the matrix type we want to use. */ typedef Col vec_type; typedef Col col_type; typedef Row row_type; /* * Add row_col_iterator and row_col_const_iterator to arma::Mat. */ /* * row_col_iterator for Mat. This iterator can return row and column index * of the entry its pointing too. The functionality of this iterator is similar * to sparse matrix iterators. */ #if ARMA_VERSION_MAJOR < 4 || \ (ARMA_VERSION_MAJOR == 4 && ARMA_VERSION_MINOR < 349) class row_col_iterator; class const_row_col_iterator { public: // empty constructor inline const_row_col_iterator(); // constructs const iterator from other iterators inline const_row_col_iterator(const row_col_iterator& it); inline const_row_col_iterator(const const_row_iterator& it); inline const_row_col_iterator(const row_iterator& it); // constructs iterator with given row and col index inline const_row_col_iterator(const Mat& in_M, const uword row = 0, const uword col = 0); /* * Returns the value of the current position. */ inline arma_hot const eT& operator*() const { return *current_pos; } /* * Increment and decrement operators for this iterator. */ inline arma_hot const_row_col_iterator& operator++(); inline arma_hot const_row_col_iterator operator++(int); inline arma_hot const_row_col_iterator& operator--(); inline arma_hot const_row_col_iterator operator--(int); /* * Comparison operator with itself and other relevant iterators. */ inline arma_hot bool operator==(const const_row_col_iterator& rhs) const; inline arma_hot bool operator!=(const const_row_col_iterator& rhs) const; inline arma_hot bool operator==(const row_col_iterator& rhs) const; inline arma_hot bool operator!=(const row_col_iterator& rhs) const; inline arma_hot bool operator==(const const_iterator& rhs) const; inline arma_hot bool operator!=(const const_iterator& rhs) const; inline arma_hot bool operator==(const iterator& rhs) const; inline arma_hot bool operator!=(const iterator& rhs) const; inline arma_hot bool operator==(const const_row_iterator& rhs) const; inline arma_hot bool operator!=(const const_row_iterator& rhs) const; inline arma_hot bool operator==(const row_iterator& rhs) const; inline arma_hot bool operator!=(const row_iterator& rhs) const; arma_inline uword row() const { return internal_row; } arma_inline uword col() const { return internal_col; } // So that we satisfy the STL iterator types. typedef std::bidirectional_iterator_tag iterator_category; typedef eT value_type; typedef uword difference_type; // not certain on this one typedef const eT* pointer; typedef const eT& reference; arma_aligned const Mat* M; arma_aligned const eT* current_pos; arma_aligned uword internal_col; arma_aligned uword internal_row; }; class row_col_iterator { public: // empty constructor inline row_col_iterator(); // constructs const iterator from other iterators inline row_col_iterator(const row_iterator& it); // constructs iterator with given row and col index inline row_col_iterator(Mat& in_M, const uword row = 0, const uword col = 0); /* * Returns the value of the current position. */ inline arma_hot eT& operator*() const { return *current_pos; } /* * Increment and decrement operators for this iterator. */ inline arma_hot row_col_iterator& operator++(); inline arma_hot row_col_iterator operator++(int); inline arma_hot row_col_iterator& operator--(); inline arma_hot row_col_iterator operator--(int); /* * Comparison operator with itself and other relevant iterators. */ inline arma_hot bool operator==(const const_row_col_iterator& rhs) const; inline arma_hot bool operator!=(const const_row_col_iterator& rhs) const; inline arma_hot bool operator==(const row_col_iterator& rhs) const; inline arma_hot bool operator!=(const row_col_iterator& rhs) const; inline arma_hot bool operator==(const const_iterator& rhs) const; inline arma_hot bool operator!=(const const_iterator& rhs) const; inline arma_hot bool operator==(const iterator& rhs) const; inline arma_hot bool operator!=(const iterator& rhs) const; inline arma_hot bool operator==(const const_row_iterator& rhs) const; inline arma_hot bool operator!=(const const_row_iterator& rhs) const; inline arma_hot bool operator==(const row_iterator& rhs) const; inline arma_hot bool operator!=(const row_iterator& rhs) const; arma_inline uword row() const { return internal_row; } arma_inline uword col() const { return internal_col; } // So that we satisfy the STL iterator types. typedef std::bidirectional_iterator_tag iterator_category; typedef eT value_type; typedef uword difference_type; // not certain on this one typedef const eT* pointer; typedef const eT& reference; arma_aligned const Mat* M; arma_aligned eT* current_pos; arma_aligned uword internal_col; arma_aligned uword internal_row; }; /* * Extra functions for Mat */ // begin for iterator row_col_iterator inline const_row_col_iterator begin_row_col() const; inline row_col_iterator begin_row_col(); // end for iterator row_col_iterator inline const_row_col_iterator end_row_col() const; inline row_col_iterator end_row_col(); #endif mlpack-2.2.5/src/mlpack/core/arma_extend/Mat_extra_meat.hpp000066400000000000000000000250301315013601400237010ustar00rootroot00000000000000// Copyright (C) 2008-2016 National ICT Australia (NICTA) // // This Source Code Form is subject to the terms of the Mozilla Public // License, v. 2.0. If a copy of the MPL was not distributed with this // file, You can obtain one at http://mozilla.org/MPL/2.0/. // ------------------------------------------------------------------- // // Written by Conrad Sanderson - http://conradsanderson.id.au // Written by Ryan Curtin // Add a serialization operator. template template void Mat::serialize(Archive& ar, const unsigned int /* version */) { using boost::serialization::make_nvp; using boost::serialization::make_array; const uword old_n_elem = n_elem; // This is accurate from Armadillo 3.6.0 onwards. // We can't use BOOST_SERIALIZATION_NVP() because of the access::rw() call. ar & make_nvp("n_rows", access::rw(n_rows)); ar & make_nvp("n_cols", access::rw(n_cols)); ar & make_nvp("n_elem", access::rw(n_elem)); ar & make_nvp("vec_state", access::rw(vec_state)); // mem_state will always be 0 on load, so we don't need to save it. if (Archive::is_loading::value) { // Don't free if local memory is being used. if (mem_state == 0 && mem != NULL && old_n_elem > arma_config::mat_prealloc) { memory::release(access::rw(mem)); } access::rw(mem_state) = 0; // We also need to allocate the memory we're using. init_cold(); } ar & make_array(access::rwp(mem), n_elem); } #if ARMA_VERSION_MAJOR < 4 || \ (ARMA_VERSION_MAJOR == 4 && ARMA_VERSION_MINOR < 349) /////////////////////////////////////////////////////////////////////////////// // Mat::const_row_col_iterator implementation // /////////////////////////////////////////////////////////////////////////////// template inline Mat::const_row_col_iterator::const_row_col_iterator() : M(NULL), current_pos(NULL), internal_col(0), internal_row(0) { // Technically this iterator is invalid (it may not point to a real element) } template inline Mat::const_row_col_iterator::const_row_col_iterator(const row_col_iterator& it) : M(it.M), current_pos(it.current_pos), internal_col(it.col()), internal_row(it.row()) { // Nothing to do. } template inline Mat::const_row_col_iterator::const_row_col_iterator(const const_row_iterator& it) : M(&it.M), current_pos(&it.M(it.row, it.col)), internal_col(it.col), internal_row(it.row) { // Nothing to do. } template inline Mat::const_row_col_iterator::const_row_col_iterator(const row_iterator& it) : M(&it.M), current_pos(&it.M(it.row, it.col)), internal_col(it.col), internal_row(it.row) { // Nothing to do. } template inline Mat::const_row_col_iterator::const_row_col_iterator(const Mat& in_M, const uword row, const uword col) : M(&in_M), current_pos(&in_M(row,col)), internal_col(col), internal_row(row) { // Nothing to do. } template inline typename Mat::const_row_col_iterator& Mat::const_row_col_iterator::operator++() { current_pos++; internal_row++; // Check to see if we moved a column. if(internal_row == M->n_rows) { internal_col++; internal_row = 0; } return *this; } template inline typename Mat::const_row_col_iterator Mat::const_row_col_iterator::operator++(int) { typename Mat::const_row_col_iterator temp(*this); ++(*this); return temp; } template inline typename Mat::const_row_col_iterator& Mat::const_row_col_iterator::operator--() { if(internal_row > 0) { current_pos--; internal_row--; } else if(internal_col > 0) { current_pos--; internal_col--; internal_row = M->n_rows - 1; } return *this; } template inline typename Mat::const_row_col_iterator Mat::const_row_col_iterator::operator--(int) { typename Mat::const_row_col_iterator temp(*this); --(*this); return temp; } template inline bool Mat::const_row_col_iterator::operator==(const const_row_col_iterator& rhs) const { return (rhs.current_pos == current_pos); } template inline bool Mat::const_row_col_iterator::operator!=(const const_row_col_iterator& rhs) const { return (rhs.current_pos != current_pos); } template inline bool Mat::const_row_col_iterator::operator==(const row_col_iterator& rhs) const { return (rhs.current_pos == current_pos); } template inline bool Mat::const_row_col_iterator::operator!=(const row_col_iterator& rhs) const { return (rhs.current_pos != current_pos); } template inline bool Mat::const_row_col_iterator::operator==(const const_iterator& rhs) const { return (rhs == current_pos); } template inline bool Mat::const_row_col_iterator::operator!=(const const_iterator& rhs) const { return (rhs != current_pos); } template inline bool Mat::const_row_col_iterator::operator==(const iterator& rhs) const { return (rhs == current_pos); } template inline bool Mat::const_row_col_iterator::operator!=(const iterator& rhs) const { return (rhs != current_pos); } template inline bool Mat::const_row_col_iterator::operator==(const const_row_iterator& rhs) const { return (&rhs.M(rhs.row, rhs.col) == current_pos); } template inline bool Mat::const_row_col_iterator::operator!=(const const_row_iterator& rhs) const { return (&rhs.M(rhs.row, rhs.col) != current_pos); } template inline bool Mat::const_row_col_iterator::operator==(const row_iterator& rhs) const { return (&rhs.M(rhs.row, rhs.col) == current_pos); } template inline bool Mat::const_row_col_iterator::operator!=(const row_iterator& rhs) const { return (&rhs.M(rhs.row, rhs.col) != current_pos); } /////////////////////////////////////////////////////////////////////////////// // Mat::row_col_iterator implementation // /////////////////////////////////////////////////////////////////////////////// template inline Mat::row_col_iterator::row_col_iterator() : M(NULL), current_pos(NULL), internal_col(0), internal_row(0) { // Technically this iterator is invalid (it may not point to a real element) } template inline Mat::row_col_iterator::row_col_iterator(const row_iterator& it) : M(&it.M), current_pos(&it.M(it.row, it.col)), internal_col(it.col), internal_row(it.row) { // Nothing to do. } template inline Mat::row_col_iterator::row_col_iterator(Mat& in_M, const uword row, const uword col) : M(&in_M), current_pos(&in_M(row,col)), internal_col(col), internal_row(row) { // Nothing to do. } template inline typename Mat::row_col_iterator& Mat::row_col_iterator::operator++() { current_pos++; internal_row++; // Check to see if we moved a column. if(internal_row == M->n_rows) { internal_col++; internal_row = 0; } return *this; } template inline typename Mat::row_col_iterator Mat::row_col_iterator::operator++(int) { typename Mat::row_col_iterator temp(*this); ++(*this); return temp; } template inline typename Mat::row_col_iterator& Mat::row_col_iterator::operator--() { if(internal_row != 0) { current_pos--; internal_row--; } else if(internal_col != 0) { current_pos--; internal_col--; internal_row = M->n_rows - 1; } return *this; } template inline typename Mat::row_col_iterator Mat::row_col_iterator::operator--(int) { typename Mat::row_col_iterator temp(*this); --(*this); return temp; } template inline bool Mat::row_col_iterator::operator==(const const_row_col_iterator& rhs) const { return (rhs.current_pos == current_pos); } template inline bool Mat::row_col_iterator::operator!=(const const_row_col_iterator& rhs) const { return (rhs.current_pos != current_pos); } template inline bool Mat::row_col_iterator::operator==(const row_col_iterator& rhs) const { return (rhs.current_pos == current_pos); } template inline bool Mat::row_col_iterator::operator!=(const row_col_iterator& rhs) const { return (rhs.current_pos != current_pos); } template inline bool Mat::row_col_iterator::operator==(const const_iterator& rhs) const { return (rhs == current_pos); } template inline bool Mat::row_col_iterator::operator!=(const const_iterator& rhs) const { return (rhs != current_pos); } template inline bool Mat::row_col_iterator::operator==(const iterator& rhs) const { return (rhs == current_pos); } template inline bool Mat::row_col_iterator::operator!=(const iterator& rhs) const { return (rhs != current_pos); } template inline bool Mat::row_col_iterator::operator==(const const_row_iterator& rhs) const { return (&rhs.M(rhs.row, rhs.col) == current_pos); } template inline bool Mat::row_col_iterator::operator!=(const const_row_iterator& rhs) const { return (&rhs.M(rhs.row, rhs.col) != current_pos); } template inline bool Mat::row_col_iterator::operator==(const row_iterator& rhs) const { return (&rhs.M(rhs.row, rhs.col) == current_pos); } template inline bool Mat::row_col_iterator::operator!=(const row_iterator& rhs) const { return (&rhs.M(rhs.row, rhs.col) != current_pos); } /////////////////////////////////////////////////////////////////////////////// // extended Mat functionality implementation // /////////////////////////////////////////////////////////////////////////////// template inline typename Mat::const_row_col_iterator Mat::begin_row_col() const { return const_row_col_iterator(*this); } template inline typename Mat::row_col_iterator Mat::begin_row_col() { return row_col_iterator(*this); } template inline typename Mat::const_row_col_iterator Mat::end_row_col() const { return ++const_row_col_iterator(*this, n_rows - 1, n_cols - 1); } template inline typename Mat::row_col_iterator Mat::end_row_col() { return ++row_col_iterator(*this, n_rows - 1, n_cols - 1); } #endif mlpack-2.2.5/src/mlpack/core/arma_extend/README.md000066400000000000000000000015651315013601400215240ustar00rootroot00000000000000The files in this directory are taken from newer versions of Armadillo in order to still support older versions of Armadillo. Therefore some files are licensed under the Mozilla Public License v2.0 (MPL2). These are the files under the MPL: - fn_ind2sub.hpp - SpMat_extra_bones.hpp - SpMat_extra_meat.hpp - operator_minus.hpp - hdf5_misc.hpp - Mat_extra_bones.hpp - Mat_extra_meat.hpp If you want a copy of mlpack without MPL code included, you will need to * Remove all of the above-listed files. * Remove the above-listed files from CMakeLists.txt. * Remove the above-listed files from arma_extend.hpp. * Modify the root CMakeLists.txt to require a sufficiently new version of * Armadillo that none of the above backports are required, by changing the line "find_package(Armadillo x.yyy.z REQUIRED)" to reference a sufficiently new version instead of x.yyy.z. mlpack-2.2.5/src/mlpack/core/arma_extend/SpMat_extra_bones.hpp000066400000000000000000000027071315013601400243720ustar00rootroot00000000000000// Copyright (C) 2008-2015 National ICT Australia (NICTA) // // This Source Code Form is subject to the terms of the Mozilla Public // License, v. 2.0. If a copy of the MPL was not distributed with this // file, You can obtain one at http://mozilla.org/MPL/2.0/. // ------------------------------------------------------------------- // // Written by Conrad Sanderson - http://conradsanderson.id.au // Written by Ryan Curtin // Written by Matthew Amidon /** * Add a batch constructor for SpMat, if the version is older than 3.810.0, and * also a serialize() function for Armadillo. */ template void serialize(Archive& ar, const unsigned int version); /** * These will help us refer the proper vector / column types, only with * specifying the matrix type we want to use. */ typedef SpCol vec_type; typedef SpCol col_type; typedef SpRow row_type; /* * Extra functions for SpMat * Adding definition of row_col_iterator to generalize with Mat::row_col_iterator */ #if ARMA_VERSION_MAJOR < 4 || \ (ARMA_VERSION_MAJOR == 4 && ARMA_VERSION_MINOR < 349) typedef iterator row_col_iterator; typedef const_iterator const_row_col_iterator; // begin for iterator row_col_iterator inline const_row_col_iterator begin_row_col() const; inline row_col_iterator begin_row_col(); // end for iterator row_col_iterator inline const_row_col_iterator end_row_col() const; inline row_col_iterator end_row_col(); #endif mlpack-2.2.5/src/mlpack/core/arma_extend/SpMat_extra_meat.hpp000066400000000000000000000045061315013601400242110ustar00rootroot00000000000000// Copyright (C) 2008-2015 National ICT Australia (NICTA) // // This Source Code Form is subject to the terms of the Mozilla Public // License, v. 2.0. If a copy of the MPL was not distributed with this // file, You can obtain one at http://mozilla.org/MPL/2.0/. // ------------------------------------------------------------------- // // Written by Conrad Sanderson - http://conradsanderson.id.au // Written by Ryan Curtin // Written by Matthew Amidon /** * Add a serialization function. */ template template void SpMat::serialize(Archive& ar, const unsigned int /* version */) { using boost::serialization::make_nvp; using boost::serialization::make_array; // This is accurate from Armadillo 3.6.0 onwards. // We can't use BOOST_SERIALIZATION_NVP() because of the access::rw() call. ar & make_nvp("n_rows", access::rw(n_rows)); ar & make_nvp("n_cols", access::rw(n_cols)); ar & make_nvp("n_elem", access::rw(n_elem)); ar & make_nvp("n_nonzero", access::rw(n_nonzero)); ar & make_nvp("vec_state", access::rw(vec_state)); // Now we have to serialize the values, row indices, and column pointers. // If we are loading, we need to initialize space for these things. if (Archive::is_loading::value) { const uword new_n_nonzero = n_nonzero; // Save this; we're about to nuke it. init(n_rows, n_cols); // Allocate column pointers. mem_resize(new_n_nonzero); // Allocate storage. // These calls will set the sentinel values at the end of the storage and // column pointers, if necessary, so we don't need to worry about them. } ar & make_array(access::rwp(values), n_nonzero); ar & make_array(access::rwp(row_indices), n_nonzero); ar & make_array(access::rwp(col_ptrs), n_cols + 1); } #if ARMA_VERSION_MAJOR < 4 || \ (ARMA_VERSION_MAJOR == 4 && ARMA_VERSION_MINOR < 349) template inline typename SpMat::const_row_col_iterator SpMat::begin_row_col() const { return begin(); } template inline typename SpMat::row_col_iterator SpMat::begin_row_col() { return begin(); } template inline typename SpMat::const_row_col_iterator SpMat::end_row_col() const { return end(); } template inline typename SpMat::row_col_iterator SpMat::end_row_col() { return end(); } #endif mlpack-2.2.5/src/mlpack/core/arma_extend/arma_extend.hpp000066400000000000000000000041741315013601400232440ustar00rootroot00000000000000/*** * @file arma_extend.hpp * @author Ryan Curtin * * Include Armadillo extensions which currently are not part of the main * Armadillo codebase. * * This will allow the use of the ccov() function (which performs the same * function as cov(trans(X)) but without the cost of computing trans(X)). This * also gives sparse matrix support, if it is necessary. */ #ifndef MLPACK_CORE_ARMA_EXTEND_ARMA_EXTEND_HPP #define MLPACK_CORE_ARMA_EXTEND_ARMA_EXTEND_HPP // Add batch constructor for sparse matrix (if version <= 3.810.0). #define ARMA_EXTRA_SPMAT_PROTO mlpack/core/arma_extend/SpMat_extra_bones.hpp #define ARMA_EXTRA_SPMAT_MEAT mlpack/core/arma_extend/SpMat_extra_meat.hpp // Add row_col_iterator and row_col_const_iterator for Mat. #define ARMA_EXTRA_MAT_PROTO mlpack/core/arma_extend/Mat_extra_bones.hpp #define ARMA_EXTRA_MAT_MEAT mlpack/core/arma_extend/Mat_extra_meat.hpp // Add boost serialization for Cube. #define ARMA_EXTRA_CUBE_PROTO mlpack/core/arma_extend/Cube_extra_bones.hpp #define ARMA_EXTRA_CUBE_MEAT mlpack/core/arma_extend/Cube_extra_meat.hpp // Manually set ARMA_{64,32}BIT_WORD for _WIN64 or win32 #if defined(_MSC_VER) #ifdef _WIN64 #define ARMA_64BIT_WORD #ifdef ARMA_32BIT_WORD #undef ARMA_32BIT_WORD #endif #else #define ARMA_32BIT_WORD #ifdef ARMA_64BIT_WORD #undef ARMA_64BIT_WORD #endif #endif #endif // Make sure that U64 and S64 support is enabled. #ifndef ARMA_USE_U64S64 #define ARMA_USE_U64S64 #endif // Include everything we'll need for serialize(). #include #include #include #include namespace arma { // u64/s64 #include "hdf5_misc.hpp" // ccov() #include "op_ccov_proto.hpp" #include "op_ccov_meat.hpp" #include "glue_ccov_proto.hpp" #include "glue_ccov_meat.hpp" #include "fn_ccov.hpp" // index to subscript and vice versa #include "fn_ind2sub.hpp" // inplace_reshape() #include "fn_inplace_reshape.hpp" // unary minus for sparse matrices #include "operator_minus.hpp" }; #endif mlpack-2.2.5/src/mlpack/core/arma_extend/fn_ccov.hpp000066400000000000000000000013101315013601400223570ustar00rootroot00000000000000//! \addtogroup fn_ccov //! @{ template inline const Op ccov(const Base& X, const uword norm_type = 0) { arma_extra_debug_sigprint(); arma_debug_check( (norm_type > 1), "ccov(): norm_type must be 0 or 1"); return Op(X.get_ref(), norm_type, 0); } template inline const Glue cov(const Base& A, const Base& B, const uword norm_type = 0) { arma_extra_debug_sigprint(); arma_debug_check( (norm_type > 1), "ccov(): norm_type must be 0 or 1"); return Glue(A.get_ref(), B.get_ref(), norm_type); } //! @} mlpack-2.2.5/src/mlpack/core/arma_extend/fn_ind2sub.hpp000066400000000000000000000037251315013601400230070ustar00rootroot00000000000000// Copyright (C) 2008-2016 National ICT Australia (NICTA) // // This Source Code Form is subject to the terms of the Mozilla Public // License, v. 2.0. If a copy of the MPL was not distributed with this // file, You can obtain one at http://mozilla.org/MPL/2.0/. // ------------------------------------------------------------------- // // Written by Conrad Sanderson - http://conradsanderson.id.au #if (ARMA_VERSION_MAJOR < 6 || \ (ARMA_VERSION_MAJOR == 6 && ARMA_VERSION_MINOR < 399)) inline uvec ind2sub(const SizeMat& s, const uword i) { arma_extra_debug_sigprint(); arma_debug_check( (i >= (s.n_rows * s.n_cols) ), "ind2sub(): index out of range" ); uvec out(2); out[0] = i % s.n_rows; out[1] = i / s.n_rows; return out; } inline uvec ind2sub(const SizeCube& s, const uword i) { arma_extra_debug_sigprint(); arma_debug_check( (i >= (s.n_rows * s.n_cols * s.n_slices) ), "ind2sub(): index out of range" ); const uword n_elem_slice = s.n_rows * s.n_cols; const uword slice = i / n_elem_slice; const uword j = i - (slice * n_elem_slice); const uword row = j % s.n_rows; const uword col = j / s.n_rows; uvec out(3); out[0] = row; out[1] = col; out[2] = slice; return out; } arma_inline uword sub2ind(const SizeMat& s, const uword row, const uword col) { arma_extra_debug_sigprint(); arma_debug_check( ((row >= s.n_rows) || (col >= s.n_cols)), "sub2ind(): subscript out of range" ); return uword(row + col*s.n_rows); } arma_inline uword sub2ind(const SizeCube& s, const uword row, const uword col, const uword slice) { arma_extra_debug_sigprint(); arma_debug_check( ((row >= s.n_rows) || (col >= s.n_cols) || (slice >= s.n_slices)), "sub2ind(): subscript out of range" ); return uword( (slice * s.n_rows * s.n_cols) + (col * s.n_rows) + row ); } #endif mlpack-2.2.5/src/mlpack/core/arma_extend/fn_inplace_reshape.hpp000066400000000000000000000014301315013601400245520ustar00rootroot00000000000000//! \addtogroup fn_inplace_reshape //! @{ /** * This does not handle column vectors or row vectors entirely correctly. You * should be able to do multiplication or other basic operations with the * resulting matrix, but it may have other problems. So if you are using this * on vectors (arma::Col<> or arma::Row<>), be careful, and be warned that * bizarre behavior may occur. */ template inline Mat& inplace_reshape(Mat& X, const uword new_n_rows, const uword new_n_cols) { arma_extra_debug_sigprint(); arma_debug_check((new_n_rows * new_n_cols) != X.n_elem, "inplace_reshape(): cannot add or remove elements"); access::rw(X.n_rows) = new_n_rows; access::rw(X.n_cols) = new_n_cols; return X; } //! @} mlpack-2.2.5/src/mlpack/core/arma_extend/glue_ccov_meat.hpp000066400000000000000000000056221315013601400237300ustar00rootroot00000000000000//! \addtogroup glue_cov //! @{ template inline void glue_ccov::direct_ccov(Mat& out, const Mat& A, const Mat& B, const uword norm_type) { arma_extra_debug_sigprint(); if(A.is_vec() && B.is_vec()) { arma_debug_check( (A.n_elem != B.n_elem), "ccov(): the number of elements in A and B must match" ); const eT* A_ptr = A.memptr(); const eT* B_ptr = B.memptr(); eT A_acc = eT(0); eT B_acc = eT(0); eT out_acc = eT(0); const uword N = A.n_elem; for(uword i=0; i 1) ? eT(N-1) : eT(1) ) : eT(N); out.set_size(1,1); out[0] = out_acc/norm_val; } else { arma_debug_assert_same_size(A, B, "ccov()"); const uword N = A.n_cols; const eT norm_val = (norm_type == 0) ? ( (N > 1) ? eT(N-1) : eT(1) ) : eT(N); out = A * trans(B); out -= (sum(A) * trans(sum(B))) / eT(N); out /= norm_val; } } template inline void glue_ccov::direct_ccov(Mat< std::complex >& out, const Mat< std::complex >& A, const Mat< std::complex >& B, const uword norm_type) { arma_extra_debug_sigprint(); typedef typename std::complex eT; if(A.is_vec() && B.is_vec()) { arma_debug_check( (A.n_elem != B.n_elem), "cov(): the number of elements in A and B must match" ); const eT* A_ptr = A.memptr(); const eT* B_ptr = B.memptr(); eT A_acc = eT(0); eT B_acc = eT(0); eT out_acc = eT(0); const uword N = A.n_elem; for(uword i=0; i 1) ? eT(N-1) : eT(1) ) : eT(N); out.set_size(1,1); out[0] = out_acc/norm_val; } else { arma_debug_assert_same_size(A, B, "ccov()"); const uword N = A.n_cols; const eT norm_val = (norm_type == 0) ? ( (N > 1) ? eT(N-1) : eT(1) ) : eT(N); out = A * trans(conj(B)); out -= (sum(A) * trans(conj(sum(B)))) / eT(N); out /= norm_val; } } template inline void glue_ccov::apply(Mat& out, const Glue& X) { arma_extra_debug_sigprint(); typedef typename T1::elem_type eT; const unwrap_check A_tmp(X.A, out); const unwrap_check B_tmp(X.B, out); const Mat& A = A_tmp.M; const Mat& B = B_tmp.M; const uword norm_type = X.aux_uword; if(&A != &B) { glue_ccov::direct_ccov(out, A, B, norm_type); } else { op_ccov::direct_ccov(out, A, norm_type); } } //! @} mlpack-2.2.5/src/mlpack/core/arma_extend/glue_ccov_proto.hpp000066400000000000000000000010571315013601400241430ustar00rootroot00000000000000//! \addtogroup glue_ccov //! @{ class glue_ccov { public: template inline static void direct_ccov(Mat& out, const Mat& A, const Mat& B, const uword norm_type); template inline static void direct_ccov(Mat< std::complex >& out, const Mat< std::complex >& A, const Mat< std::complex >& B, const uword norm_type); template inline static void apply(Mat& out, const Glue& X); }; //! @} mlpack-2.2.5/src/mlpack/core/arma_extend/hdf5_misc.hpp000066400000000000000000000020061315013601400226060ustar00rootroot00000000000000// Copyright (C) 2012-2013 National ICT Australia (NICTA) // // This Source Code Form is subject to the terms of the Mozilla Public // License, v. 2.0. If a copy of the MPL was not distributed with this // file, You can obtain one at http://mozilla.org/MPL/2.0/. // ------------------------------------------------------------------- // // Written by Conrad Sanderson - http://conradsanderson.id.au // Written by Ryan Curtin // Written by Szabolcs Horvat // To hack in u64/s64 support to Armadillo when it is not compiled with // ARMA_64BIT_WORD. namespace hdf5_misc { #if defined(ARMA_USE_HDF5) #if !(defined(ARMA_64BIT_WORD) || defined(ARMA_USE_U64S64)) #if defined(ULLONG_MAX) template<> inline hid_t get_hdf5_type< long long >() { return H5Tcopy(H5T_NATIVE_LLONG); } template<> inline hid_t get_hdf5_type< unsigned long long >() { return H5Tcopy(H5T_NATIVE_ULLONG); } #endif #endif #endif } // namespace hdf5_misc mlpack-2.2.5/src/mlpack/core/arma_extend/op_ccov_meat.hpp000066400000000000000000000033151315013601400234070ustar00rootroot00000000000000//! \addtogroup op_cov //! @{ template inline void op_ccov::direct_ccov(Mat& out, const Mat& A, const uword norm_type) { arma_extra_debug_sigprint(); if(A.is_vec()) { if(A.n_rows == 1) { out = var(trans(A), norm_type); } else { out = var(A, norm_type); } } else { const uword N = A.n_cols; const eT norm_val = (norm_type == 0) ? ( (N > 1) ? eT(N-1) : eT(1) ) : eT(N); const Col acc = sum(A, 1); out = A * trans(A); out -= (acc * trans(acc)) / eT(N); out /= norm_val; } } template inline void op_ccov::direct_ccov(Mat< std::complex >& out, const Mat< std::complex >& A, const uword norm_type) { arma_extra_debug_sigprint(); typedef typename std::complex eT; if(A.is_vec()) { if(A.n_rows == 1) { const Mat tmp_mat = var(trans(A), norm_type); out.set_size(1,1); out[0] = tmp_mat[0]; } else { const Mat tmp_mat = var(A, norm_type); out.set_size(1,1); out[0] = tmp_mat[0]; } } else { const uword N = A.n_cols; const eT norm_val = (norm_type == 0) ? ( (N > 1) ? eT(N-1) : eT(1) ) : eT(N); const Col acc = sum(A, 1); out = A * trans(conj(A)); out -= (acc * trans(conj(acc))) / eT(N); out /= norm_val; } } template inline void op_ccov::apply(Mat& out, const Op& in) { arma_extra_debug_sigprint(); typedef typename T1::elem_type eT; const unwrap_check tmp(in.m, out); const Mat& A = tmp.M; const uword norm_type = in.aux_uword_a; op_ccov::direct_ccov(out, A, norm_type); } //! @} mlpack-2.2.5/src/mlpack/core/arma_extend/op_ccov_proto.hpp000066400000000000000000000007261315013601400236270ustar00rootroot00000000000000//! \addtogroup op_cov //! @{ class op_ccov { public: template inline static void direct_ccov(Mat& out, const Mat& X, const uword norm_type); template inline static void direct_ccov(Mat< std::complex >& out, const Mat< std::complex >& X, const uword norm_type); template inline static void apply(Mat& out, const Op& in); }; //! @} mlpack-2.2.5/src/mlpack/core/arma_extend/operator_minus.hpp000066400000000000000000000016471315013601400240250ustar00rootroot00000000000000// Copyright (C) 2008-2015 National ICT Australia (NICTA) // // This Source Code Form is subject to the terms of the Mozilla Public // License, v. 2.0. If a copy of the MPL was not distributed with this // file, You can obtain one at http://mozilla.org/MPL/2.0/. // ------------------------------------------------------------------- // // Written by Conrad Sanderson - http://conradsanderson.id.au // Written by Ryan Curtin // Backport unary minus operator for sparse matrices to Armadillo 4.000 and // older. #if (ARMA_VERSION_MAJOR < 4) || \ (ARMA_VERSION_MAJOR == 4 && ARMA_VERSION_MINOR <= 0) template inline typename enable_if2 < is_arma_sparse_type::value && is_signed::value, SpOp >::result operator- (const T1& X) { arma_extra_debug_sigprint(); typedef typename T1::elem_type eT; return SpOp(X, eT(-1)); } #endif mlpack-2.2.5/src/mlpack/core/boost_backport/000077500000000000000000000000001315013601400207625ustar00rootroot00000000000000mlpack-2.2.5/src/mlpack/core/boost_backport/CMakeLists.txt000066400000000000000000000010061315013601400235170ustar00rootroot00000000000000# Define the files that we need to compile. # Anything not in this list will not be compiled into mlpack. set(SOURCES unordered_collections_load_imp.hpp unordered_collections_save_imp.hpp unordered_map.hpp ) # add directory name to sources set(DIR_SRCS) foreach(file ${SOURCES}) set(DIR_SRCS ${DIR_SRCS} ${CMAKE_CURRENT_SOURCE_DIR}/${file}) endforeach() # Append sources (with directory name) to list of all mlpack sources (used at # parent scope). set(MLPACK_SRCS ${MLPACK_SRCS} ${DIR_SRCS} PARENT_SCOPE) mlpack-2.2.5/src/mlpack/core/boost_backport/LICENSE.txt000066400000000000000000000024721315013601400226120ustar00rootroot00000000000000Boost Software License - Version 1.0 - August 17th, 2003 Permission is hereby granted, free of charge, to any person or organization obtaining a copy of the software and accompanying documentation covered by this license (the "Software") to use, reproduce, display, distribute, execute, and transmit the Software, and to prepare derivative works of the Software, and to permit third-parties to whom the Software is furnished to do so, all subject to the following: The copyright notices in the Software and this entire statement, including the above license grant, this restriction and the following disclaimer, must be included in all copies of the Software, in whole or in part, and all derivative works of the Software, unless such copies or derivative works are solely in the form of machine-executable object code generated by a source language processor. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, TITLE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR ANYONE DISTRIBUTING THE SOFTWARE BE LIABLE FOR ANY DAMAGES OR OTHER LIABILITY, WHETHER IN CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. mlpack-2.2.5/src/mlpack/core/boost_backport/README.md000066400000000000000000000014211315013601400222370ustar00rootroot00000000000000The files in this directory are taken from Boost 1.56.0 and Boost 1.61.0 in order to backport: * Serialization support for unordered_map (added in Boost 1.56). * Trigamma and polygamma function calculation for the gamma_distribution.hpp file (added in Boost 1.58.0, files taken from Boost 1.61.0) These files are licensed under the Boost Software License, available in LICENSE.txt in this directory. If you want a copy of mlpack without a dependence on the Boost Software License, then you will need to * remove this entire directory * remove the line "boost_backport" from src/mlpack/core/CMakeLists.txt * change the line "find_package(Boost x.yy" in the root CMakeLists.txt so that x is 1 and yy is at least 58. (That is, make mlpack require Boost 1.58 or newer). mlpack-2.2.5/src/mlpack/core/boost_backport/bernoulli.hpp000066400000000000000000000151511315013601400234710ustar00rootroot00000000000000 /////////////////////////////////////////////////////////////////////////////// // Copyright 2013 Nikhar Agrawal // Copyright 2013 Christopher Kormanyos // Copyright 2013 John Maddock // Copyright 2013 Paul Bristow // Distributed under the Boost // Software License, Version 1.0. (See accompanying file // LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) #ifndef _BOOST_BERNOULLI_B2N_2013_05_30_HPP_ #define _BOOST_BERNOULLI_B2N_2013_05_30_HPP_ #include "math_fwd.hpp" // Forward declarations from math_fwd namespace boost { namespace math{ template T unchecked_bernoulli_b2n(const std::size_t n); template T bernoulli_b2n(const int i, const Policy &pol); template T bernoulli_b2n(const int i); template OutputIterator bernoulli_b2n(const int start_index, const unsigned number_of_bernoullis_b2n, OutputIterator out_it, const Policy& pol); template OutputIterator bernoulli_b2n(const int start_index, const unsigned number_of_bernoullis_b2n, OutputIterator out_it); template T tangent_t2n(const int i, const Policy &pol); template T tangent_t2n(const int i); template OutputIterator tangent_t2n(const int start_index, const unsigned number_of_bernoullis_b2n, OutputIterator out_it, const Policy& pol); template OutputIterator tangent_t2n(const int start_index, const unsigned number_of_bernoullis_b2n, OutputIterator out_it); }} #include "detail/unchecked_bernoulli.hpp" #include "detail/bernoulli_details.hpp" namespace boost { namespace math { namespace detail { template OutputIterator bernoulli_number_imp(OutputIterator out, std::size_t start, std::size_t n, const Policy& pol, const mpl::int_& tag) { for(std::size_t i = start; (i <= max_bernoulli_b2n::value) && (i < start + n); ++i) { *out = unchecked_bernoulli_imp(i, tag); ++out; } for(std::size_t i = (std::max)(static_cast(max_bernoulli_b2n::value + 1), start); i < start + n; ++i) { // We must overflow: *out = (i & 1 ? 1 : -1) * policies::raise_overflow_error("boost::math::bernoulli_b2n<%1%>(n)", 0, T(i), pol); ++out; } return out; } template OutputIterator bernoulli_number_imp(OutputIterator out, std::size_t start, std::size_t n, const Policy& pol, const mpl::int_<0>& tag) { for(std::size_t i = start; (i <= max_bernoulli_b2n::value) && (i < start + n); ++i) { *out = unchecked_bernoulli_imp(i, tag); ++out; } // // Short circuit return so we don't grab the mutex below unless we have to: // if(start + n <= max_bernoulli_b2n::value) return out; return get_bernoulli_numbers_cache().copy_bernoulli_numbers(out, start, n, pol); } } // namespace detail template inline T bernoulli_b2n(const int i, const Policy &pol) { typedef mpl::int_::value> tag_type; if(i < 0) return policies::raise_domain_error("boost::math::bernoulli_b2n<%1%>", "Index should be >= 0 but got %1%", T(i), pol); T result = static_cast(0); // The = 0 is just to silence compiler warnings :-( boost::math::detail::bernoulli_number_imp(&result, static_cast(i), 1u, pol, tag_type()); return result; } template inline T bernoulli_b2n(const int i) { return boost::math::bernoulli_b2n(i, policies::policy<>()); } template inline OutputIterator bernoulli_b2n(const int start_index, const unsigned number_of_bernoullis_b2n, OutputIterator out_it, const Policy& pol) { typedef mpl::int_::value> tag_type; if(start_index < 0) { *out_it = policies::raise_domain_error("boost::math::bernoulli_b2n<%1%>", "Index should be >= 0 but got %1%", T(start_index), pol); return ++out_it; } return boost::math::detail::bernoulli_number_imp(out_it, start_index, number_of_bernoullis_b2n, pol, tag_type()); } template inline OutputIterator bernoulli_b2n(const int start_index, const unsigned number_of_bernoullis_b2n, OutputIterator out_it) { return boost::math::bernoulli_b2n(start_index, number_of_bernoullis_b2n, out_it, policies::policy<>()); } template inline T tangent_t2n(const int i, const Policy &pol) { if(i < 0) return policies::raise_domain_error("boost::math::tangent_t2n<%1%>", "Index should be >= 0 but got %1%", T(i), pol); T result; boost::math::detail::get_bernoulli_numbers_cache().copy_tangent_numbers(&result, i, 1, pol); return result; } template inline T tangent_t2n(const int i) { return boost::math::tangent_t2n(i, policies::policy<>()); } template inline OutputIterator tangent_t2n(const int start_index, const unsigned number_of_tangent_t2n, OutputIterator out_it, const Policy& pol) { if(start_index < 0) { *out_it = policies::raise_domain_error("boost::math::tangent_t2n<%1%>", "Index should be >= 0 but got %1%", T(start_index), pol); return ++out_it; } return boost::math::detail::get_bernoulli_numbers_cache().copy_tangent_numbers(out_it, start_index, number_of_tangent_t2n, pol); } template inline OutputIterator tangent_t2n(const int start_index, const unsigned number_of_tangent_t2n, OutputIterator out_it) { return boost::math::tangent_t2n(start_index, number_of_tangent_t2n, out_it, policies::policy<>()); } } } // namespace boost::math #endif // _BOOST_BERNOULLI_B2N_2013_05_30_HPP_ mlpack-2.2.5/src/mlpack/core/boost_backport/boost_backport.hpp000066400000000000000000000024731315013601400245140ustar00rootroot00000000000000/** * @file boost_backport.hpp * @author Yannis Mentekidis * * Centralized control of what boost files to include. We have backported the * following boost functionality: * * * unordered_set serialization support (added in boost 1.56.0) * * trigamma and polygamma function evaluation (added in boost 1.58.0) * * If the detected boost version is greater than 1.58.0, we include the normal * serialization, polygamma and trigamma functions (not the backported ones). * For versions 1.56, 1.57 we include the normal serialization but the * backported polygamma and trigamma functions. * For all older versions we include the backported headers. */ #ifndef MLPACK_CORE_BOOST_BACKPORT_HPP #define MLPACK_CORE_BOOST_BACKPORT_HPP #include #if BOOST_VERSION < 105600 // Backported unordered_map. #include "mlpack/core/boost_backport/unordered_map.hpp" #else // Boost's version #include #endif #if BOOST_VERSION < 105800 // Backported trigamma and polygamma. #include "mlpack/core/boost_backport/trigamma.hpp" #include "mlpack/core/boost_backport/polygamma.hpp" #else // Boost's version. #include #include #endif #endif // MLPACK_CORE_BOOST_BACKPORT_HPP mlpack-2.2.5/src/mlpack/core/boost_backport/detail/000077500000000000000000000000001315013601400222245ustar00rootroot00000000000000mlpack-2.2.5/src/mlpack/core/boost_backport/detail/bernoulli_details.hpp000066400000000000000000000606371315013601400264510ustar00rootroot00000000000000/////////////////////////////////////////////////////////////////////////////// // Copyright 2013 John Maddock // Distributed under the Boost // Software License, Version 1.0. (See accompanying file // LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) #ifndef BOOST_MATH_BERNOULLI_DETAIL_HPP_ #define BOOST_MATH_BERNOULLI_DETAIL_HPP_ #include "../math_fwd.hpp" #include #include #include #include #ifdef BOOST_HAS_THREADS #ifndef BOOST_NO_CXX11_HDR_ATOMIC # include # define BOOST_MATH_ATOMIC_NS std #if ATOMIC_INT_LOCK_FREE == 2 typedef std::atomic atomic_counter_type; typedef int atomic_integer_type; #elif ATOMIC_SHORT_LOCK_FREE == 2 typedef std::atomic atomic_counter_type; typedef short atomic_integer_type; #elif ATOMIC_LONG_LOCK_FREE == 2 typedef std::atomic atomic_counter_type; typedef long atomic_integer_type; #elif ATOMIC_LLONG_LOCK_FREE == 2 typedef std::atomic atomic_counter_type; typedef long long atomic_integer_type; #else # define BOOST_MATH_NO_ATOMIC_INT #endif #else // BOOST_NO_CXX11_HDR_ATOMIC // // We need Boost.Atomic, but on any platform that supports auto-linking we do // not need to link against a separate library: // #define BOOST_ATOMIC_NO_LIB #include # define BOOST_MATH_ATOMIC_NS boost namespace boost{ namespace math{ namespace detail{ // // We need a type to use as an atomic counter: // #if BOOST_ATOMIC_INT_LOCK_FREE == 2 typedef boost::atomic atomic_counter_type; typedef int atomic_integer_type; #elif BOOST_ATOMIC_SHORT_LOCK_FREE == 2 typedef boost::atomic atomic_counter_type; typedef short atomic_integer_type; #elif BOOST_ATOMIC_LONG_LOCK_FREE == 2 typedef boost::atomic atomic_counter_type; typedef long atomic_integer_type; #elif BOOST_ATOMIC_LLONG_LOCK_FREE == 2 typedef boost::atomic atomic_counter_type; typedef long long atomic_integer_type; #else # define BOOST_MATH_NO_ATOMIC_INT #endif }}} // namespaces #endif // BOOST_NO_CXX11_HDR_ATOMIC #endif // BOOST_HAS_THREADS namespace boost{ namespace math{ namespace detail{ // // Asymptotic expansion for B2n due to // Luschny LogB3 formula (http://www.luschny.de/math/primes/bernincl.html) // template T b2n_asymptotic(int n) { BOOST_MATH_STD_USING const T nx = static_cast(n); const T nx2(nx * nx); const T approximate_log_of_bernoulli_bn = ((boost::math::constants::half() + nx) * log(nx)) + ((boost::math::constants::half() - nx) * log(boost::math::constants::pi())) + (((T(3) / 2) - nx) * boost::math::constants::ln_two()) + ((nx * (T(2) - (nx2 * 7) * (1 + ((nx2 * 30) * ((nx2 * 12) - 1))))) / (((nx2 * nx2) * nx2) * 2520)); return ((n / 2) & 1 ? 1 : -1) * (approximate_log_of_bernoulli_bn > tools::log_max_value() ? policies::raise_overflow_error("boost::math::bernoulli_b2n<%1%>(std::size_t)", 0, nx, Policy()) : static_cast(exp(approximate_log_of_bernoulli_bn))); } template T t2n_asymptotic(int n) { BOOST_MATH_STD_USING // Just get B2n and convert to a Tangent number: T t2n = fabs(b2n_asymptotic(2 * n)) / (2 * n); T p2 = ldexp(T(1), n); if(tools::max_value() / p2 < t2n) return policies::raise_overflow_error("boost::math::tangent_t2n<%1%>(std::size_t)", 0, T(n), Policy()); t2n *= p2; p2 -= 1; if(tools::max_value() / p2 < t2n) return policies::raise_overflow_error("boost::math::tangent_t2n<%1%>(std::size_t)", 0, Policy()); t2n *= p2; return t2n; } // // We need to know the approximate value of /n/ which will // cause bernoulli_b2n(n) to return infinity - this allows // us to elude a great deal of runtime checking for values below // n, and only perform the full overflow checks when we know that we're // getting close to the point where our calculations will overflow. // We use Luschny's LogB3 formula (http://www.luschny.de/math/primes/bernincl.html) // to find the limit, and since we're dealing with the log of the Bernoulli numbers // we need only perform the calculation at double precision and not with T // (which may be a multiprecision type). The limit returned is within 1 of the true // limit for all the types tested. Note that although the code below is basically // the same as b2n_asymptotic above, it has been recast as a continuous real-valued // function as this makes the root finding go smoother/faster. It also omits the // sign of the Bernoulli number. // struct max_bernoulli_root_functor { max_bernoulli_root_functor(long long t) : target(static_cast(t)) {} double operator()(double n) { BOOST_MATH_STD_USING // Luschny LogB3(n) formula. const double nx2(n * n); const double approximate_log_of_bernoulli_bn = ((boost::math::constants::half() + n) * log(n)) + ((boost::math::constants::half() - n) * log(boost::math::constants::pi())) + (((double(3) / 2) - n) * boost::math::constants::ln_two()) + ((n * (2 - (nx2 * 7) * (1 + ((nx2 * 30) * ((nx2 * 12) - 1))))) / (((nx2 * nx2) * nx2) * 2520)); return approximate_log_of_bernoulli_bn - target; } private: double target; }; template inline std::size_t find_bernoulli_overflow_limit(const mpl::false_&) { long long t = lltrunc(boost::math::tools::log_max_value()); max_bernoulli_root_functor fun(t); boost::math::tools::equal_floor tol; boost::uintmax_t max_iter = boost::math::policies::get_max_root_iterations(); return static_cast(boost::math::tools::toms748_solve(fun, sqrt(double(t)), double(t), tol, max_iter).first) / 2; } template inline std::size_t find_bernoulli_overflow_limit(const mpl::true_&) { return max_bernoulli_index::value>::value; } template std::size_t b2n_overflow_limit() { // This routine is called at program startup if it's called at all: // that guarantees safe initialization of the static variable. typedef mpl::bool_<(bernoulli_imp_variant::value >= 1) && (bernoulli_imp_variant::value <= 3)> tag_type; static const std::size_t lim = find_bernoulli_overflow_limit(tag_type()); return lim; } // // The tangent numbers grow larger much more rapidly than the Bernoulli numbers do.... // so to compute the Bernoulli numbers from the tangent numbers, we need to avoid spurious // overflow in the calculation, we can do this by scaling all the tangent number by some scale factor: // template inline typename enable_if_c::is_specialized && (std::numeric_limits::radix == 2), T>::type tangent_scale_factor() { BOOST_MATH_STD_USING return ldexp(T(1), std::numeric_limits::min_exponent + 5); } template inline typename disable_if_c::is_specialized && (std::numeric_limits::radix == 2), T>::type tangent_scale_factor() { return tools::min_value() * 16; } // // Initializer: ensure all our constants are initialized prior to the first call of main: // template struct bernoulli_initializer { struct init { init() { // // We call twice, once to initialize our static table, and once to // initialize our dymanic table: // boost::math::bernoulli_b2n(2, Policy()); #ifndef BOOST_NO_EXCEPTIONS try{ #endif boost::math::bernoulli_b2n(max_bernoulli_b2n::value + 1, Policy()); #ifndef BOOST_NO_EXCEPTIONS } catch(const std::overflow_error&){} #endif boost::math::tangent_t2n(2, Policy()); } void force_instantiate()const{} }; static const init initializer; static void force_instantiate() { initializer.force_instantiate(); } }; template const typename bernoulli_initializer::init bernoulli_initializer::initializer; // // We need something to act as a cache for our calculated Bernoulli numbers. In order to // ensure both fast access and thread safety, we need a stable table which may be extended // in size, but which never reallocates: that way values already calculated may be accessed // concurrently with another thread extending the table with new values. // // Very very simple vector class that will never allocate more than once, we could use // boost::container::static_vector here, but that allocates on the stack, which may well // cause issues for the amount of memory we want in the extreme case... // template struct fixed_vector : private std::allocator { typedef unsigned size_type; typedef T* iterator; typedef const T* const_iterator; fixed_vector() : m_used(0) { std::size_t overflow_limit = 5 + b2n_overflow_limit >(); m_capacity = static_cast((std::min)(overflow_limit, static_cast(100000u))); m_data = this->allocate(m_capacity); } ~fixed_vector() { for(unsigned i = 0; i < m_used; ++i) this->destroy(&m_data[i]); this->deallocate(m_data, m_capacity); } T& operator[](unsigned n) { BOOST_ASSERT(n < m_used); return m_data[n]; } const T& operator[](unsigned n)const { BOOST_ASSERT(n < m_used); return m_data[n]; } unsigned size()const { return m_used; } unsigned size() { return m_used; } void resize(unsigned n, const T& val) { if(n > m_capacity) { BOOST_THROW_EXCEPTION(std::runtime_error("Exhausted storage for Bernoulli numbers.")); } for(unsigned i = m_used; i < n; ++i) new (m_data + i) T(val); m_used = n; } void resize(unsigned n) { resize(n, T()); } T* begin() { return m_data; } T* end() { return m_data + m_used; } T* begin()const { return m_data; } T* end()const { return m_data + m_used; } unsigned capacity()const { return m_capacity; } private: T* m_data; unsigned m_used, m_capacity; }; template class bernoulli_numbers_cache { public: bernoulli_numbers_cache() : m_overflow_limit((std::numeric_limits::max)()) #if defined(BOOST_HAS_THREADS) && !defined(BOOST_MATH_NO_ATOMIC_INT) , m_counter(0) #endif {} typedef fixed_vector container_type; void tangent(std::size_t m) { static const std::size_t min_overflow_index = b2n_overflow_limit() - 1; tn.resize(static_cast(m), T(0U)); BOOST_MATH_INSTRUMENT_VARIABLE(min_overflow_index); std::size_t prev_size = m_intermediates.size(); m_intermediates.resize(m, T(0U)); if(prev_size == 0) { m_intermediates[1] = tangent_scale_factor() /*T(1U)*/; tn[0U] = T(0U); tn[1U] = tangent_scale_factor()/* T(1U)*/; BOOST_MATH_INSTRUMENT_VARIABLE(tn[0]); BOOST_MATH_INSTRUMENT_VARIABLE(tn[1]); } for(std::size_t i = std::max(2, prev_size); i < m; i++) { bool overflow_check = false; if(i >= min_overflow_index && (boost::math::tools::max_value() / (i-1) < m_intermediates[1]) ) { std::fill(tn.begin() + i, tn.end(), boost::math::tools::max_value()); break; } m_intermediates[1] = m_intermediates[1] * (i-1); for(std::size_t j = 2; j <= i; j++) { overflow_check = (i >= min_overflow_index) && ( (boost::math::tools::max_value() / (i - j) < m_intermediates[j]) || (boost::math::tools::max_value() / (i - j + 2) < m_intermediates[j-1]) || (boost::math::tools::max_value() - m_intermediates[j] * (i - j) < m_intermediates[j-1] * (i - j + 2)) || ((boost::math::isinf)(m_intermediates[j])) ); if(overflow_check) { std::fill(tn.begin() + i, tn.end(), boost::math::tools::max_value()); break; } m_intermediates[j] = m_intermediates[j] * (i - j) + m_intermediates[j-1] * (i - j + 2); } if(overflow_check) break; // already filled the tn... tn[static_cast(i)] = m_intermediates[i]; BOOST_MATH_INSTRUMENT_VARIABLE(i); BOOST_MATH_INSTRUMENT_VARIABLE(tn[static_cast(i)]); } } void tangent_numbers_series(const std::size_t m) { BOOST_MATH_STD_USING static const std::size_t min_overflow_index = b2n_overflow_limit() - 1; typename container_type::size_type old_size = bn.size(); tangent(m); bn.resize(static_cast(m)); if(!old_size) { bn[0] = 1; old_size = 1; } T power_two(ldexp(T(1), static_cast(2 * old_size))); for(std::size_t i = old_size; i < m; i++) { T b(static_cast(i * 2)); // // Not only do we need to take care to avoid spurious over/under flow in // the calculation, but we also need to avoid overflow altogether in case // we're calculating with a type where "bad things" happen in that case: // b = b / (power_two * tangent_scale_factor()); b /= (power_two - 1); bool overflow_check = (i >= min_overflow_index) && (tools::max_value() / tn[static_cast(i)] < b); if(overflow_check) { m_overflow_limit = i; while(i < m) { b = std::numeric_limits::has_infinity ? std::numeric_limits::infinity() : tools::max_value(); bn[static_cast(i)] = ((i % 2U) ? b : T(-b)); ++i; } break; } else { b *= tn[static_cast(i)]; } power_two = ldexp(power_two, 2); const bool b_neg = i % 2 == 0; bn[static_cast(i)] = ((!b_neg) ? b : T(-b)); } } template OutputIterator copy_bernoulli_numbers(OutputIterator out, std::size_t start, std::size_t n, const Policy& pol) { // // There are basically 3 thread safety options: // // 1) There are no threads (BOOST_HAS_THREADS is not defined). // 2) There are threads, but we do not have a true atomic integer type, // in this case we just use a mutex to guard against race conditions. // 3) There are threads, and we have an atomic integer: in this case we can // use the double-checked locking pattern to avoid thread synchronisation // when accessing values already in the cache. // // First off handle the common case for overflow and/or asymptotic expansion: // if(start + n > bn.capacity()) { if(start < bn.capacity()) { out = copy_bernoulli_numbers(out, start, bn.capacity() - start, pol); n -= bn.capacity() - start; start = static_cast(bn.capacity()); } if(start < b2n_overflow_limit() + 2u) { for(; n; ++start, --n) { *out = b2n_asymptotic(static_cast(start * 2U)); ++out; } } for(; n; ++start, --n) { *out = policies::raise_overflow_error("boost::math::bernoulli_b2n<%1%>(std::size_t)", 0, T(start), pol); ++out; } return out; } #if !defined(BOOST_HAS_THREADS) // // Single threaded code, very simple: // if(start + n >= bn.size()) { std::size_t new_size = (std::min)((std::max)((std::max)(std::size_t(start + n), std::size_t(bn.size() + 20)), std::size_t(50)), std::size_t(bn.capacity())); tangent_numbers_series(new_size); } for(std::size_t i = (std::max)(std::size_t(max_bernoulli_b2n::value + 1), start); i < start + n; ++i) { *out = (i >= m_overflow_limit) ? policies::raise_overflow_error("boost::math::bernoulli_b2n<%1%>(std::size_t)", 0, T(i), pol) : bn[i]; ++out; } #elif defined(BOOST_MATH_NO_ATOMIC_INT) // // We need to grab a mutex every time we get here, for both readers and writers: // boost::detail::lightweight_mutex::scoped_lock l(m_mutex); if(start + n >= bn.size()) { std::size_t new_size = (std::min)((std::max)((std::max)(std::size_t(start + n), std::size_t(bn.size() + 20)), std::size_t(50)), std::size_t(bn.capacity())); tangent_numbers_series(new_size); } for(std::size_t i = (std::max)(std::size_t(max_bernoulli_b2n::value + 1), start); i < start + n; ++i) { *out = (i >= m_overflow_limit) ? policies::raise_overflow_error("boost::math::bernoulli_b2n<%1%>(std::size_t)", 0, T(i), pol) : bn[i]; ++out; } #else // // Double-checked locking pattern, lets us access cached already cached values // without locking: // // Get the counter and see if we need to calculate more constants: // if(static_cast(m_counter.load(BOOST_MATH_ATOMIC_NS::memory_order_consume)) < start + n) { boost::detail::lightweight_mutex::scoped_lock l(m_mutex); if(static_cast(m_counter.load(BOOST_MATH_ATOMIC_NS::memory_order_consume)) < start + n) { if(start + n >= bn.size()) { std::size_t new_size = (std::min)((std::max)((std::max)(std::size_t(start + n), std::size_t(bn.size() + 20)), std::size_t(50)), std::size_t(bn.capacity())); tangent_numbers_series(new_size); } m_counter.store(static_cast(bn.size()), BOOST_MATH_ATOMIC_NS::memory_order_release); } } for(std::size_t i = (std::max)(static_cast(max_bernoulli_b2n::value + 1), start); i < start + n; ++i) { *out = (i >= m_overflow_limit) ? policies::raise_overflow_error("boost::math::bernoulli_b2n<%1%>(std::size_t)", 0, T(i), pol) : bn[static_cast(i)]; ++out; } #endif return out; } template OutputIterator copy_tangent_numbers(OutputIterator out, std::size_t start, std::size_t n, const Policy& pol) { // // There are basically 3 thread safety options: // // 1) There are no threads (BOOST_HAS_THREADS is not defined). // 2) There are threads, but we do not have a true atomic integer type, // in this case we just use a mutex to guard against race conditions. // 3) There are threads, and we have an atomic integer: in this case we can // use the double-checked locking pattern to avoid thread synchronisation // when accessing values already in the cache. // // // First off handle the common case for overflow and/or asymptotic expansion: // if(start + n > bn.capacity()) { if(start < bn.capacity()) { out = copy_tangent_numbers(out, start, bn.capacity() - start, pol); n -= bn.capacity() - start; start = static_cast(bn.capacity()); } if(start < b2n_overflow_limit() + 2u) { for(; n; ++start, --n) { *out = t2n_asymptotic(static_cast(start)); ++out; } } for(; n; ++start, --n) { *out = policies::raise_overflow_error("boost::math::bernoulli_b2n<%1%>(std::size_t)", 0, T(start), pol); ++out; } return out; } #if !defined(BOOST_HAS_THREADS) // // Single threaded code, very simple: // if(start + n >= bn.size()) { std::size_t new_size = (std::min)((std::max)((std::max)(start + n, std::size_t(bn.size() + 20)), std::size_t(50)), std::size_t(bn.capacity())); tangent_numbers_series(new_size); } for(std::size_t i = start; i < start + n; ++i) { if(i >= m_overflow_limit) *out = policies::raise_overflow_error("boost::math::bernoulli_b2n<%1%>(std::size_t)", 0, T(i), pol); else { if(tools::max_value() * tangent_scale_factor() < tn[static_cast(i)]) *out = policies::raise_overflow_error("boost::math::bernoulli_b2n<%1%>(std::size_t)", 0, T(i), pol); else *out = tn[static_cast(i)] / tangent_scale_factor(); } ++out; } #elif defined(BOOST_MATH_NO_ATOMIC_INT) // // We need to grab a mutex every time we get here, for both readers and writers: // boost::detail::lightweight_mutex::scoped_lock l(m_mutex); if(start + n >= bn.size()) { std::size_t new_size = (std::min)((std::max)((std::max)(start + n, std::size_t(bn.size() + 20)), std::size_t(50)), std::size_t(bn.capacity())); tangent_numbers_series(new_size); } for(std::size_t i = start; i < start + n; ++i) { if(i >= m_overflow_limit) *out = policies::raise_overflow_error("boost::math::bernoulli_b2n<%1%>(std::size_t)", 0, T(i), pol); else { if(tools::max_value() * tangent_scale_factor() < tn[static_cast(i)]) *out = policies::raise_overflow_error("boost::math::bernoulli_b2n<%1%>(std::size_t)", 0, T(i), pol); else *out = tn[static_cast(i)] / tangent_scale_factor(); } ++out; } #else // // Double-checked locking pattern, lets us access cached already cached values // without locking: // // Get the counter and see if we need to calculate more constants: // if(static_cast(m_counter.load(BOOST_MATH_ATOMIC_NS::memory_order_consume)) < start + n) { boost::detail::lightweight_mutex::scoped_lock l(m_mutex); if(static_cast(m_counter.load(BOOST_MATH_ATOMIC_NS::memory_order_consume)) < start + n) { if(start + n >= bn.size()) { std::size_t new_size = (std::min)((std::max)((std::max)(start + n, std::size_t(bn.size() + 20)), std::size_t(50)), std::size_t(bn.capacity())); tangent_numbers_series(new_size); } m_counter.store(static_cast(bn.size()), BOOST_MATH_ATOMIC_NS::memory_order_release); } } for(std::size_t i = start; i < start + n; ++i) { if(i >= m_overflow_limit) *out = policies::raise_overflow_error("boost::math::bernoulli_b2n<%1%>(std::size_t)", 0, T(i), pol); else { if(tools::max_value() * tangent_scale_factor() < tn[static_cast(i)]) *out = policies::raise_overflow_error("boost::math::bernoulli_b2n<%1%>(std::size_t)", 0, T(i), pol); else *out = tn[static_cast(i)] / tangent_scale_factor(); } ++out; } #endif return out; } private: // // The caches for Bernoulli and tangent numbers, once allocated, // these must NEVER EVER reallocate as it breaks our thread // safety guarentees: // fixed_vector bn, tn; std::vector m_intermediates; // The value at which we know overflow has already occurred for the Bn: std::size_t m_overflow_limit; #if !defined(BOOST_HAS_THREADS) #elif defined(BOOST_MATH_NO_ATOMIC_INT) boost::detail::lightweight_mutex m_mutex; #else boost::detail::lightweight_mutex m_mutex; atomic_counter_type m_counter; #endif }; template inline bernoulli_numbers_cache& get_bernoulli_numbers_cache() { // // Force this function to be called at program startup so all the static variables // get initailzed then (thread safety). // bernoulli_initializer::force_instantiate(); static bernoulli_numbers_cache data; return data; } }}} #endif // BOOST_MATH_BERNOULLI_DETAIL_HPP mlpack-2.2.5/src/mlpack/core/boost_backport/detail/polygamma.hpp000066400000000000000000000542341315013601400247330ustar00rootroot00000000000000 /////////////////////////////////////////////////////////////////////////////// // Copyright 2013 Nikhar Agrawal // Copyright 2013 Christopher Kormanyos // Copyright 2014 John Maddock // Copyright 2013 Paul Bristow // Distributed under the Boost // Software License, Version 1.0. (See accompanying file // LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) #ifndef _BOOST_POLYGAMMA_DETAIL_2013_07_30_HPP_ #define _BOOST_POLYGAMMA_DETAIL_2013_07_30_HPP_ #include "../policy.hpp" #include "../bernoulli.hpp" #include #include #include #include #include #include #include #include #include #include #include #include #include // Forward declarations. namespace boost { namespace math { // Forward declaration of boost::math::polygamma. template inline typename tools::promote_args::type polygamma(const int n, T x, const Policy& pol); template inline typename tools::promote_args::type polygamma(const int n, T x); namespace policies { template inline int digits_base10(BOOST_MATH_EXPLICIT_TEMPLATE_TYPE(T)) { return boost::math::policies::digits() * 301 / 1000L; } } }} namespace boost { namespace math { namespace detail{ template T polygamma_atinfinityplus(const int n, const T& x, const Policy& pol, const char* function) // for large values of x such as for x> 400 { // See http://functions.wolfram.com/GammaBetaErf/PolyGamma2/06/02/0001/ BOOST_MATH_STD_USING // // sum == current value of accumulated sum. // term == value of current term to be added to sum. // part_term == value of current term excluding the Bernoulli number part // if(n + x == x) { // x is crazy large, just concentrate on the first part of the expression and use logs: if(n == 1) return 1 / x; T nlx = n * log(x); if((nlx < tools::log_max_value()) && (n < (int)max_factorial::value)) return ((n & 1) ? 1 : -1) * boost::math::factorial(n - 1) * pow(x, -n); else return ((n & 1) ? 1 : -1) * exp(boost::math::lgamma(T(n), pol) - n * log(x)); } T term, sum, part_term; T x_squared = x * x; // // Start by setting part_term to: // // (n-1)! / x^(n+1) // // which is common to both the first term of the series (with k = 1) // and to the leading part. // We can then get to the leading term by: // // part_term * (n + 2 * x) / 2 // // and to the first term in the series // (excluding the Bernoulli number) by: // // part_term n * (n + 1) / (2x) // // If either the factorial would overflow, // or the power term underflows, this just gets set to 0 and then we // know that we have to use logs for the initial terms: // part_term = ((n > (int)boost::math::max_factorial::value) && (T(n) * n > tools::log_max_value())) ? T(0) : static_cast(boost::math::factorial(n - 1, pol) * pow(x, -n - 1)); if(part_term == 0) { // Either n is very large, or the power term underflows, // set the initial values of part_term, term and sum via logs: part_term = static_cast(boost::math::lgamma(n, pol) - (n + 1) * log(x)); sum = exp(part_term + log(n + 2 * x) - boost::math::constants::ln_two()); part_term += log(T(n) * (n + 1)) - boost::math::constants::ln_two() - log(x); part_term = exp(part_term); } else { sum = part_term * (n + 2 * x) / 2; part_term *= (T(n) * (n + 1)) / 2; part_term /= x; } // // If the leading term is 0, so is the result: // if(sum == 0) return sum; for(unsigned k = 1;;) { term = part_term * boost::math::bernoulli_b2n(k, pol); sum += term; // // Normal termination condition: // if(fabs(term / sum) < tools::epsilon()) break; // // Increment our counter, and move part_term on to the next value: // ++k; part_term *= T(n + 2 * k - 2) * (n - 1 + 2 * k); part_term /= (2 * k - 1) * 2 * k; part_term /= x_squared; // // Emergency get out termination condition: // if(k > policies::get_max_series_iterations()) { return policies::raise_evaluation_error(function, "Series did not converge, closest value was %1%", sum, pol); } } if((n - 1) & 1) sum = -sum; return sum; } template T polygamma_attransitionplus(const int n, const T& x, const Policy& pol, const char* function) { // See: http://functions.wolfram.com/GammaBetaErf/PolyGamma2/16/01/01/0017/ // Use N = (0.4 * digits) + (4 * n) for target value for x: BOOST_MATH_STD_USING const int d4d = static_cast(0.4F * policies::digits_base10()); const int N = d4d + (4 * n); const int m = n; const int iter = N - itrunc(x); if(iter > (int)policies::get_max_series_iterations()) return policies::raise_evaluation_error(function, ("Exceeded maximum series evaluations evaluating at n = " + boost::lexical_cast(n) + " and x = %1%").c_str(), x, pol); const int minus_m_minus_one = -m - 1; T z(x); T sum0(0); T z_plus_k_pow_minus_m_minus_one(0); // Forward recursion to larger x, need to check for overflow first though: if(log(z + iter) * minus_m_minus_one > -tools::log_max_value()) { for(int k = 1; k <= iter; ++k) { z_plus_k_pow_minus_m_minus_one = pow(z, minus_m_minus_one); sum0 += z_plus_k_pow_minus_m_minus_one; z += 1; } sum0 *= boost::math::factorial(n); } else { for(int k = 1; k <= iter; ++k) { T log_term = log(z) * minus_m_minus_one + boost::math::lgamma(T(n + 1), pol); sum0 += exp(log_term); z += 1; } } if((n - 1) & 1) sum0 = -sum0; return sum0 + polygamma_atinfinityplus(n, z, pol, function); } template T polygamma_nearzero(int n, T x, const Policy& pol, const char* function) { BOOST_MATH_STD_USING // // If we take this expansion for polygamma: http://functions.wolfram.com/06.15.06.0003.02 // and substitute in this expression for polygamma(n, 1): http://functions.wolfram.com/06.15.03.0009.01 // we get an alternating series for polygamma when x is small in terms of zeta functions of // integer arguments (which are easy to evaluate, at least when the integer is even). // // In order to avoid spurious overflow, save the n! term for later, and rescale at the end: // T scale = boost::math::factorial(n, pol); // // "factorial_part" contains everything except the zeta function // evaluations in each term: // T factorial_part = 1; // // "prefix" is what we'll be adding the accumulated sum to, it will // be n! / z^(n+1), but since we're scaling by n! it's just // 1 / z^(n+1) for now: // T prefix = pow(x, n + 1); if(prefix == 0) return boost::math::policies::raise_overflow_error(function, 0, pol); prefix = 1 / prefix; // // First term in the series is necessarily < zeta(2) < 2, so // ignore the sum if it will have no effect on the result anyway: // if(prefix > 2 / policies::get_epsilon()) return ((n & 1) ? 1 : -1) * (tools::max_value() / prefix < scale ? policies::raise_overflow_error(function, 0, pol) : prefix * scale); // // As this is an alternating series we could accelerate it using // "Convergence Acceleration of Alternating Series", // Henri Cohen, Fernando Rodriguez Villegas, and Don Zagier, Experimental Mathematics, 1999. // In practice however, it appears not to make any difference to the number of terms // required except in some edge cases which are filtered out anyway before we get here. // T sum = prefix; for(unsigned k = 0;;) { // Get the k'th term: T term = factorial_part * boost::math::zeta(T(k + n + 1), pol); sum += term; // Termination condition: if(fabs(term) < fabs(sum * boost::math::policies::get_epsilon())) break; // // Move on k and factorial_part: // ++k; factorial_part *= (-x * (n + k)) / k; // // Last chance exit: // if(k > policies::get_max_series_iterations()) return policies::raise_evaluation_error(function, "Series did not converge, best value is %1%", sum, pol); } // // We need to multiply by the scale, at each stage checking for oveflow: // if(boost::math::tools::max_value() / scale < sum) return boost::math::policies::raise_overflow_error(function, 0, pol); sum *= scale; return n & 1 ? sum : T(-sum); } // // Helper function which figures out which slot our coefficient is in // given an angle multiplier for the cosine term of power: // template typename Table::value_type::reference dereference_table(Table& table, unsigned row, unsigned power) { return table[row][power / 2]; } template T poly_cot_pi(int n, T x, T xc, const Policy& pol, const char* function) { BOOST_MATH_STD_USING // Return n'th derivative of cot(pi*x) at x, these are simply // tabulated for up to n = 9, beyond that it is possible to // calculate coefficients as follows: // // The general form of each derivative is: // // pi^n * SUM{k=0, n} C[k,n] * cos^k(pi * x) * csc^(n+1)(pi * x) // // With constant C[0,1] = -1 and all other C[k,n] = 0; // Then for each k < n+1: // C[k-1, n+1] -= k * C[k, n]; // C[k+1, n+1] += (k-n-1) * C[k, n]; // // Note that there are many different ways of representing this derivative thanks to // the many trigomonetric identies available. In particular, the sum of powers of // cosines could be replaced by a sum of cosine multiple angles, and indeed if you // plug the derivative into Mathematica this is the form it will give. The two // forms are related via the Chebeshev polynomials of the first kind and // T_n(cos(x)) = cos(n x). The polynomial form has the great advantage that // all the cosine terms are zero at half integer arguments - right where this // function has it's minumum - thus avoiding cancellation error in this region. // // And finally, since every other term in the polynomials is zero, we can save // space by only storing the non-zero terms. This greatly complexifies // subscripting the tables in the calculation, but halves the storage space // (and complexity for that matter). // T s = fabs(x) < fabs(xc) ? boost::math::sin_pi(x, pol) : boost::math::sin_pi(xc, pol); T c = boost::math::cos_pi(x, pol); switch(n) { case 1: return -constants::pi() / (s * s); case 2: { return 2 * constants::pi() * constants::pi() * c / boost::math::pow<3>(s, pol); } case 3: { int P[] = { -2, -4 }; return boost::math::pow<3>(constants::pi(), pol) * tools::evaluate_even_polynomial(P, c) / boost::math::pow<4>(s, pol); } case 4: { int P[] = { 16, 8 }; return boost::math::pow<4>(constants::pi(), pol) * c * tools::evaluate_even_polynomial(P, c) / boost::math::pow<5>(s, pol); } case 5: { int P[] = { -16, -88, -16 }; return boost::math::pow<5>(constants::pi(), pol) * tools::evaluate_even_polynomial(P, c) / boost::math::pow<6>(s, pol); } case 6: { int P[] = { 272, 416, 32 }; return boost::math::pow<6>(constants::pi(), pol) * c * tools::evaluate_even_polynomial(P, c) / boost::math::pow<7>(s, pol); } case 7: { int P[] = { -272, -2880, -1824, -64 }; return boost::math::pow<7>(constants::pi(), pol) * tools::evaluate_even_polynomial(P, c) / boost::math::pow<8>(s, pol); } case 8: { int P[] = { 7936, 24576, 7680, 128 }; return boost::math::pow<8>(constants::pi(), pol) * c * tools::evaluate_even_polynomial(P, c) / boost::math::pow<9>(s, pol); } case 9: { int P[] = { -7936, -137216, -185856, -31616, -256 }; return boost::math::pow<9>(constants::pi(), pol) * tools::evaluate_even_polynomial(P, c) / boost::math::pow<10>(s, pol); } case 10: { int P[] = { 353792, 1841152, 1304832, 128512, 512 }; return boost::math::pow<10>(constants::pi(), pol) * c * tools::evaluate_even_polynomial(P, c) / boost::math::pow<11>(s, pol); } case 11: { int P[] = { -353792, -9061376, -21253376, -8728576, -518656, -1024}; return boost::math::pow<11>(constants::pi(), pol) * tools::evaluate_even_polynomial(P, c) / boost::math::pow<12>(s, pol); } case 12: { int P[] = { 22368256, 175627264, 222398464, 56520704, 2084864, 2048 }; return boost::math::pow<12>(constants::pi(), pol) * c * tools::evaluate_even_polynomial(P, c) / boost::math::pow<13>(s, pol); } #ifndef BOOST_NO_LONG_LONG case 13: { long long P[] = { -22368256LL, -795300864LL, -2868264960LL, -2174832640LL, -357888000LL, -8361984LL, -4096 }; return boost::math::pow<13>(constants::pi(), pol) * tools::evaluate_even_polynomial(P, c) / boost::math::pow<14>(s, pol); } case 14: { long long P[] = { 1903757312LL, 21016670208LL, 41731645440LL, 20261765120LL, 2230947840LL, 33497088LL, 8192 }; return boost::math::pow<14>(constants::pi(), pol) * c * tools::evaluate_even_polynomial(P, c) / boost::math::pow<15>(s, pol); } case 15: { long long P[] = { -1903757312LL, -89702612992LL, -460858269696LL, -559148810240LL, -182172651520LL, -13754155008LL, -134094848LL, -16384 }; return boost::math::pow<15>(constants::pi(), pol) * tools::evaluate_even_polynomial(P, c) / boost::math::pow<16>(s, pol); } case 16: { long long P[] = { 209865342976LL, 3099269660672LL, 8885192097792LL, 7048869314560LL, 1594922762240LL, 84134068224LL, 536608768LL, 32768 }; return boost::math::pow<16>(constants::pi(), pol) * c * tools::evaluate_even_polynomial(P, c) / boost::math::pow<17>(s, pol); } case 17: { long long P[] = { -209865342976LL, -12655654469632LL, -87815735738368LL, -155964390375424LL, -84842998005760LL, -13684856848384LL, -511780323328LL, -2146926592LL, -65536 }; return boost::math::pow<17>(constants::pi(), pol) * tools::evaluate_even_polynomial(P, c) / boost::math::pow<18>(s, pol); } case 18: { long long P[] = { 29088885112832LL, 553753414467584LL, 2165206642589696LL, 2550316668551168LL, 985278548541440LL, 115620218667008LL, 3100738912256LL, 8588754944LL, 131072 }; return boost::math::pow<18>(constants::pi(), pol) * c * tools::evaluate_even_polynomial(P, c) / boost::math::pow<19>(s, pol); } case 19: { long long P[] = { -29088885112832LL, -2184860175433728LL, -19686087844429824LL, -48165109676113920LL, -39471306959486976LL, -11124607890751488LL, -965271355195392LL, -18733264797696LL, -34357248000LL, -262144 }; return boost::math::pow<19>(constants::pi(), pol) * tools::evaluate_even_polynomial(P, c) / boost::math::pow<20>(s, pol); } case 20: { long long P[] = { 4951498053124096LL, 118071834535526400LL, 603968063567560704LL, 990081991141490688LL, 584901762421358592LL, 122829335169859584LL, 7984436548730880LL, 112949304754176LL, 137433710592LL, 524288 }; return boost::math::pow<20>(constants::pi(), pol) * c * tools::evaluate_even_polynomial(P, c) / boost::math::pow<21>(s, pol); } #endif } // // We'll have to compute the coefficients up to n, // complexity is O(n^2) which we don't worry about for now // as the values are computed once and then cached. // However, if the final evaluation would have too many // terms just bail out right away: // if((unsigned)n / 2u > policies::get_max_series_iterations()) return policies::raise_evaluation_error(function, "The value of n is so large that we're unable to compute the result in reasonable time, best guess is %1%", 0, pol); #ifdef BOOST_HAS_THREADS static boost::detail::lightweight_mutex m; boost::detail::lightweight_mutex::scoped_lock l(m); #endif static std::vector > table(1, std::vector(1, T(-1))); int index = n - 1; if(index >= (int)table.size()) { for(int i = (int)table.size() - 1; i < index; ++i) { int offset = i & 1; // 1 if the first cos power is 0, otherwise 0. int sin_order = i + 2; // order of the sin term int max_cos_order = sin_order - 1; // largest order of the polynomial of cos terms int max_columns = (max_cos_order - offset) / 2; // How many entries there are in the current row. int next_offset = offset ? 0 : 1; int next_max_columns = (max_cos_order + 1 - next_offset) / 2; // How many entries there will be in the next row table.push_back(std::vector(next_max_columns + 1, T(0))); for(int column = 0; column <= max_columns; ++column) { int cos_order = 2 * column + offset; // order of the cosine term in entry "column" BOOST_ASSERT(column < (int)table[i].size()); BOOST_ASSERT((cos_order + 1) / 2 < (int)table[i + 1].size()); table[i + 1][(cos_order + 1) / 2] += ((cos_order - sin_order) * table[i][column]) / (sin_order - 1); if(cos_order) table[i + 1][(cos_order - 1) / 2] += (-cos_order * table[i][column]) / (sin_order - 1); } } } T sum = boost::math::tools::evaluate_even_polynomial(&table[index][0], c, table[index].size()); if(index & 1) sum *= c; // First coeffient is order 1, and really an odd polynomial. if(sum == 0) return sum; // // The remaining terms are computed using logs since the powers and factorials // get real large real quick: // T power_terms = n * log(boost::math::constants::pi()); if(s == 0) return sum * boost::math::policies::raise_overflow_error(function, 0, pol); power_terms -= log(fabs(s)) * (n + 1); power_terms += boost::math::lgamma(T(n)); power_terms += log(fabs(sum)); if(power_terms > boost::math::tools::log_max_value()) return sum * boost::math::policies::raise_overflow_error(function, 0, pol); return exp(power_terms) * ((s < 0) && ((n + 1) & 1) ? -1 : 1) * boost::math::sign(sum); } template struct polygamma_initializer { struct init { init() { // Forces initialization of our table of coefficients and mutex: boost::math::polygamma(30, T(-2.5f), Policy()); } void force_instantiate()const{} }; static const init initializer; static void force_instantiate() { initializer.force_instantiate(); } }; template const typename polygamma_initializer::init polygamma_initializer::initializer; template inline T polygamma_imp(const int n, T x, const Policy &pol) { BOOST_MATH_STD_USING static const char* function = "boost::math::polygamma<%1%>(int, %1%)"; polygamma_initializer::initializer.force_instantiate(); if(n < 0) return policies::raise_domain_error(function, "Order must be >= 0, but got %1%", static_cast(n), pol); if(x < 0) { if(floor(x) == x) { // // Result is infinity if x is odd, and a pole error if x is even. // if(lltrunc(x) & 1) return policies::raise_overflow_error(function, 0, pol); else return policies::raise_pole_error(function, "Evaluation at negative integer %1%", x, pol); } T z = 1 - x; T result = polygamma_imp(n, z, pol) + constants::pi() * poly_cot_pi(n, z, x, pol, function); return n & 1 ? T(-result) : result; } // // Limit for use of small-x-series is chosen // so that the series doesn't go too divergent // in the first few terms. Ordinarily this // would mean setting the limit to ~ 1 / n, // but we can tolerate a small amount of divergence: // T small_x_limit = (std::min)(T(T(5) / n), T(0.25f)); if(x < small_x_limit) { return polygamma_nearzero(n, x, pol, function); } else if(x > 0.4F * policies::digits_base10() + 4.0f * n) { return polygamma_atinfinityplus(n, x, pol, function); } else if(x == 1) { return (n & 1 ? 1 : -1) * boost::math::factorial(n, pol) * boost::math::zeta(T(n + 1), pol); } else if(x == 0.5f) { T result = (n & 1 ? 1 : -1) * boost::math::factorial(n, pol) * boost::math::zeta(T(n + 1), pol); if(fabs(result) >= ldexp(tools::max_value(), -n - 1)) return boost::math::sign(result) * policies::raise_overflow_error(function, 0, pol); result *= ldexp(T(1), n + 1) - 1; return result; } else { return polygamma_attransitionplus(n, x, pol, function); } } } } } // namespace boost::math::detail #endif // _BOOST_POLYGAMMA_DETAIL_2013_07_30_HPP_ mlpack-2.2.5/src/mlpack/core/boost_backport/detail/unchecked_bernoulli.hpp000066400000000000000000002230751315013601400267520ustar00rootroot00000000000000 /////////////////////////////////////////////////////////////////////////////// // Copyright 2013 Nikhar Agrawal // Copyright 2013 Christopher Kormanyos // Copyright 2013 John Maddock // Copyright 2013 Paul Bristow // Distributed under the Boost // Software License, Version 1.0. (See accompanying file // LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) #ifndef BOOST_MATH_UNCHECKED_BERNOULLI_HPP_ #define BOOST_MATH_UNCHECKED_BERNOULLI_HPP_ #include "../math_fwd.hpp" #include #include #include #include #include #include namespace boost { namespace math { namespace detail { template struct max_bernoulli_index { BOOST_STATIC_CONSTANT(unsigned, value = 17); }; template <> struct max_bernoulli_index<1> { BOOST_STATIC_CONSTANT(unsigned, value = 32); }; template <> struct max_bernoulli_index<2> { BOOST_STATIC_CONSTANT(unsigned, value = 129); }; template <> struct max_bernoulli_index<3> { BOOST_STATIC_CONSTANT(unsigned, value = 1156); }; template <> struct max_bernoulli_index<4> { BOOST_STATIC_CONSTANT(unsigned, value = 11); }; template struct bernoulli_imp_variant { static const unsigned value = (std::numeric_limits::max_exponent == 128) && (std::numeric_limits::radix == 2) && (std::numeric_limits::digits <= std::numeric_limits::digits) && (boost::is_convertible::value) ? 1 : ( (std::numeric_limits::max_exponent == 1024) && (std::numeric_limits::radix == 2) && (std::numeric_limits::digits <= std::numeric_limits::digits) && (boost::is_convertible::value) ? 2 : ( (std::numeric_limits::max_exponent == 16384) && (std::numeric_limits::radix == 2) && (std::numeric_limits::digits <= std::numeric_limits::digits) && (boost::is_convertible::value) ? 3 : (!is_convertible::value ? 4 : 0) ) ); }; } // namespace detail template struct max_bernoulli_b2n : public detail::max_bernoulli_index::value>{}; namespace detail{ template inline T unchecked_bernoulli_imp(std::size_t n, const mpl::int_<0>& ) { static const boost::array::value> numerators = {{ boost::int64_t( +1LL), boost::int64_t( +1LL), boost::int64_t( -1LL), boost::int64_t( +1LL), boost::int64_t( -1LL), boost::int64_t( +5LL), boost::int64_t( -691LL), boost::int64_t( +7LL), boost::int64_t( -3617LL), boost::int64_t( +43867LL), boost::int64_t( -174611LL), boost::int64_t( +854513LL), boost::int64_t( -236364091LL), boost::int64_t( +8553103LL), boost::int64_t( -23749461029LL), boost::int64_t(+8615841276005LL), boost::int64_t(-7709321041217LL), boost::int64_t(+2577687858367LL) }}; static const boost::array::value> denominators = {{ boost::int64_t( 1LL), boost::int64_t( 6LL), boost::int64_t( 30LL), boost::int64_t( 42LL), boost::int64_t( 30LL), boost::int64_t( 66LL), boost::int64_t( 2730LL), boost::int64_t( 6LL), boost::int64_t( 510LL), boost::int64_t( 798LL), boost::int64_t( 330LL), boost::int64_t( 138LL), boost::int64_t( 2730LL), boost::int64_t( 6LL), boost::int64_t( 870LL), boost::int64_t( 14322LL), boost::int64_t( 510LL), boost::int64_t( 6LL) }}; return T(numerators[n]) / denominators[n]; } template inline T unchecked_bernoulli_imp(std::size_t n, const mpl::int_<1>& ) { static const boost::array::value> bernoulli_data = {{ +1.00000000000000000000000000000000000000000F, +0.166666666666666666666666666666666666666667F, -0.0333333333333333333333333333333333333333333F, +0.0238095238095238095238095238095238095238095F, -0.0333333333333333333333333333333333333333333F, +0.0757575757575757575757575757575757575757576F, -0.253113553113553113553113553113553113553114F, +1.16666666666666666666666666666666666666667F, -7.09215686274509803921568627450980392156863F, +54.9711779448621553884711779448621553884712F, -529.124242424242424242424242424242424242424F, +6192.12318840579710144927536231884057971014F, -86580.2531135531135531135531135531135531136F, +1.42551716666666666666666666666666666666667e6F, -2.72982310678160919540229885057471264367816e7F, +6.01580873900642368384303868174835916771401e8F, -1.51163157670921568627450980392156862745098e10F, +4.29614643061166666666666666666666666666667e11F, -1.37116552050883327721590879485616327721591e13F, +4.88332318973593166666666666666666666666667e14F, -1.92965793419400681486326681448632668144863e16F, +8.41693047573682615000553709856035437430786e17F, -4.03380718540594554130768115942028985507246e19F, +2.11507486380819916056014539007092198581560e21F, -1.20866265222965259346027311937082525317819e23F, +7.50086674607696436685572007575757575757576e24F, -5.03877810148106891413789303052201257861635e26F, +3.65287764848181233351104308429711779448622e28F, -2.84987693024508822262691464329106781609195e30F, +2.38654274996836276446459819192192149717514e32F, -2.13999492572253336658107447651910973926742e34F, +2.05009757234780975699217330956723102516667e36F, -2.09380059113463784090951852900279701847092e38F, }}; return bernoulli_data[n]; } template inline T unchecked_bernoulli_imp(std::size_t n, const mpl::int_<2>& ) { static const boost::array::value> bernoulli_data = {{ +1.00000000000000000000000000000000000000000, +0.166666666666666666666666666666666666666667, -0.0333333333333333333333333333333333333333333, +0.0238095238095238095238095238095238095238095, -0.0333333333333333333333333333333333333333333, +0.0757575757575757575757575757575757575757576, -0.253113553113553113553113553113553113553114, +1.16666666666666666666666666666666666666667, -7.09215686274509803921568627450980392156863, +54.9711779448621553884711779448621553884712, -529.124242424242424242424242424242424242424, +6192.12318840579710144927536231884057971014, -86580.2531135531135531135531135531135531136, +1.42551716666666666666666666666666666666667e6, -2.72982310678160919540229885057471264367816e7, +6.01580873900642368384303868174835916771401e8, -1.51163157670921568627450980392156862745098e10, +4.29614643061166666666666666666666666666667e11, -1.37116552050883327721590879485616327721591e13, +4.88332318973593166666666666666666666666667e14, -1.92965793419400681486326681448632668144863e16, +8.41693047573682615000553709856035437430786e17, -4.03380718540594554130768115942028985507246e19, +2.11507486380819916056014539007092198581560e21, -1.20866265222965259346027311937082525317819e23, +7.50086674607696436685572007575757575757576e24, -5.03877810148106891413789303052201257861635e26, +3.65287764848181233351104308429711779448622e28, -2.84987693024508822262691464329106781609195e30, +2.38654274996836276446459819192192149717514e32, -2.13999492572253336658107447651910973926742e34, +2.05009757234780975699217330956723102516667e36, -2.09380059113463784090951852900279701847092e38, +2.27526964884635155596492603527692645814700e40, -2.62577102862395760473030497361582020814490e42, +3.21250821027180325182047923042649852435219e44, -4.15982781667947109139170744952623589366896e46, +5.69206954820352800238834562191210586444805e48, -8.21836294197845756922906534686173330145509e50, +1.25029043271669930167323398297028955241772e53, -2.00155832332483702749253291988132987687242e55, +3.36749829153643742333966769033387530162196e57, -5.94709705031354477186604968440515408405791e59, +1.10119103236279775595641307904376916046305e62, -2.13552595452535011886583850190410656789733e64, +4.33288969866411924196166130593792062184514e66, -9.18855282416693282262005552155018971389604e68, +2.03468967763290744934550279902200200659751e71, -4.70038339580357310785752555350060606545967e73, +1.13180434454842492706751862577339342678904e76, -2.83822495706937069592641563364817647382847e78, +7.40642489796788506297508271409209841768797e80, -2.00964548027566044834656196727153631868673e83, +5.66571700508059414457193460305193569614195e85, -1.65845111541362169158237133743199123014950e88, +5.03688599504923774192894219151801548124424e90, -1.58614682376581863693634015729664387827410e93, +5.17567436175456269840732406825071225612408e95, -1.74889218402171173396900258776181591451415e98, +6.11605199949521852558245252642641677807677e100, -2.21227769127078349422883234567129324455732e103, +8.27227767987709698542210624599845957312047e105, -3.19589251114157095835916343691808148735263e108, +1.27500822233877929823100243029266798669572e111, -5.25009230867741338994028246245651754469199e113, +2.23018178942416252098692981988387281437383e116, -9.76845219309552044386335133989802393011669e118, +4.40983619784529542722726228748131691918758e121, -2.05085708864640888397293377275830154864566e124, +9.82144332797912771075729696020975210414919e126, -4.84126007982088805087891967099634127611305e129, +2.45530888014809826097834674040886903996737e132, -1.28069268040847475487825132786017857218118e135, +6.86761671046685811921018885984644004360924e137, -3.78464685819691046949789954163795568144895e140, +2.14261012506652915508713231351482720966602e143, -1.24567271371836950070196429616376072194583e146, +7.43457875510001525436796683940520613117807e148, -4.55357953046417048940633332233212748767721e151, +2.86121128168588683453638472510172325229190e154, -1.84377235520338697276882026536287854875414e157, +1.21811545362210466995013165065995213558174e160, -8.24821871853141215484818457296893447301419e162, +5.72258779378329433296516498142978615918685e165, -4.06685305250591047267679693831158655602196e168, +2.95960920646420500628752695815851870426379e171, -2.20495225651894575090311752273445984836379e174, +1.68125970728895998058311525151360665754464e177, -1.31167362135569576486452806355817153004431e180, +1.04678940094780380821832853929823089643829e183, -8.54328935788337077185982546299082774593270e185, +7.12878213224865423522884066771438224721245e188, -6.08029314555358993000847118686477458461988e191, +5.29967764248499239300942910043247266228490e194, -4.71942591687458626443646229013379911103761e197, +4.29284137914029810894168296541074669045521e200, -3.98767449682322074434477655542938795106651e203, +3.78197804193588827138944181161393327898220e206, -3.66142336836811912436858082151197348755196e209, +3.61760902723728623488554609298914089477541e212, -3.64707726451913543621383088655499449048682e215, +3.75087554364544090983452410104814189306842e218, -3.93458672964390282694891288533713429355657e221, +4.20882111481900820046571171111494898242731e224, -4.59022962206179186559802940573325591059371e227, +5.10317257726295759279198185106496768539760e230, -5.78227623036569554015377271242917142512200e233, +6.67624821678358810322637794412809363451080e236, -7.85353076444504163225916259639312444428230e239, +9.41068940670587255245443288258762485293948e242, -1.14849338734651839938498599206805592548354e246, +1.42729587428487856771416320087122499897180e249, -1.80595595869093090142285728117654560926719e252, +2.32615353076608052161297985184708876161736e255, -3.04957517154995947681942819261542593785327e258, +4.06858060764339734424012124124937318633684e261, -5.52310313219743616252320044093186392324280e264, +7.62772793964343924869949690204961215533859e267, -1.07155711196978863132793524001065396932667e271, +1.53102008959691884453440916153355334355847e274, -2.22448916821798346676602348865048510824835e277, +3.28626791906901391668189736436895275365183e280, -4.93559289559603449020711938191575963496999e283, +7.53495712008325067212266049779283956727824e286, -1.16914851545841777278088924731655041783900e290, +1.84352614678389394126646201597702232396492e293, -2.95368261729680829728014917350525183485207e296, +4.80793212775015697668878704043264072227967e299, -7.95021250458852528538243631671158693036798e302, +1.33527841873546338750122832017820518292039e306 }}; return bernoulli_data[n]; } template inline T unchecked_bernoulli_imp(std::size_t n, const mpl::int_<3>& ) { static const boost::array::value> bernoulli_data = {{ +1.00000000000000000000000000000000000000000L, +0.166666666666666666666666666666666666666667L, -0.0333333333333333333333333333333333333333333L, +0.0238095238095238095238095238095238095238095L, -0.0333333333333333333333333333333333333333333L, +0.0757575757575757575757575757575757575757576L, -0.253113553113553113553113553113553113553114L, +1.16666666666666666666666666666666666666667L, -7.09215686274509803921568627450980392156863L, +54.9711779448621553884711779448621553884712L, -529.124242424242424242424242424242424242424L, +6192.12318840579710144927536231884057971014L, -86580.2531135531135531135531135531135531136L, +1.42551716666666666666666666666666666666667E6L, -2.72982310678160919540229885057471264367816E7L, +6.01580873900642368384303868174835916771401E8L, -1.51163157670921568627450980392156862745098E10L, +4.29614643061166666666666666666666666666667E11L, -1.37116552050883327721590879485616327721591E13L, +4.88332318973593166666666666666666666666667E14L, -1.92965793419400681486326681448632668144863E16L, +8.41693047573682615000553709856035437430786E17L, -4.03380718540594554130768115942028985507246E19L, +2.11507486380819916056014539007092198581560E21L, -1.20866265222965259346027311937082525317819E23L, +7.50086674607696436685572007575757575757576E24L, -5.03877810148106891413789303052201257861635E26L, +3.65287764848181233351104308429711779448622E28L, -2.84987693024508822262691464329106781609195E30L, +2.38654274996836276446459819192192149717514E32L, -2.13999492572253336658107447651910973926742E34L, +2.05009757234780975699217330956723102516667E36L, -2.09380059113463784090951852900279701847092E38L, +2.27526964884635155596492603527692645814700E40L, -2.62577102862395760473030497361582020814490E42L, +3.21250821027180325182047923042649852435219E44L, -4.15982781667947109139170744952623589366896E46L, +5.69206954820352800238834562191210586444805E48L, -8.21836294197845756922906534686173330145509E50L, +1.25029043271669930167323398297028955241772E53L, -2.00155832332483702749253291988132987687242E55L, +3.36749829153643742333966769033387530162196E57L, -5.94709705031354477186604968440515408405791E59L, +1.10119103236279775595641307904376916046305E62L, -2.13552595452535011886583850190410656789733E64L, +4.33288969866411924196166130593792062184514E66L, -9.18855282416693282262005552155018971389604E68L, +2.03468967763290744934550279902200200659751E71L, -4.70038339580357310785752555350060606545967E73L, +1.13180434454842492706751862577339342678904E76L, -2.83822495706937069592641563364817647382847E78L, +7.40642489796788506297508271409209841768797E80L, -2.00964548027566044834656196727153631868673E83L, +5.66571700508059414457193460305193569614195E85L, -1.65845111541362169158237133743199123014950E88L, +5.03688599504923774192894219151801548124424E90L, -1.58614682376581863693634015729664387827410E93L, +5.17567436175456269840732406825071225612408E95L, -1.74889218402171173396900258776181591451415E98L, +6.11605199949521852558245252642641677807677E100L, -2.21227769127078349422883234567129324455732E103L, +8.27227767987709698542210624599845957312047E105L, -3.19589251114157095835916343691808148735263E108L, +1.27500822233877929823100243029266798669572E111L, -5.25009230867741338994028246245651754469199E113L, +2.23018178942416252098692981988387281437383E116L, -9.76845219309552044386335133989802393011669E118L, +4.40983619784529542722726228748131691918758E121L, -2.05085708864640888397293377275830154864566E124L, +9.82144332797912771075729696020975210414919E126L, -4.84126007982088805087891967099634127611305E129L, +2.45530888014809826097834674040886903996737E132L, -1.28069268040847475487825132786017857218118E135L, +6.86761671046685811921018885984644004360924E137L, -3.78464685819691046949789954163795568144895E140L, +2.14261012506652915508713231351482720966602E143L, -1.24567271371836950070196429616376072194583E146L, +7.43457875510001525436796683940520613117807E148L, -4.55357953046417048940633332233212748767721E151L, +2.86121128168588683453638472510172325229190E154L, -1.84377235520338697276882026536287854875414E157L, +1.21811545362210466995013165065995213558174E160L, -8.24821871853141215484818457296893447301419E162L, +5.72258779378329433296516498142978615918685E165L, -4.06685305250591047267679693831158655602196E168L, +2.95960920646420500628752695815851870426379E171L, -2.20495225651894575090311752273445984836379E174L, +1.68125970728895998058311525151360665754464E177L, -1.31167362135569576486452806355817153004431E180L, +1.04678940094780380821832853929823089643829E183L, -8.54328935788337077185982546299082774593270E185L, +7.12878213224865423522884066771438224721245E188L, -6.08029314555358993000847118686477458461988E191L, +5.29967764248499239300942910043247266228490E194L, -4.71942591687458626443646229013379911103761E197L, +4.29284137914029810894168296541074669045521E200L, -3.98767449682322074434477655542938795106651E203L, +3.78197804193588827138944181161393327898220E206L, -3.66142336836811912436858082151197348755196E209L, +3.61760902723728623488554609298914089477541E212L, -3.64707726451913543621383088655499449048682E215L, +3.75087554364544090983452410104814189306842E218L, -3.93458672964390282694891288533713429355657E221L, +4.20882111481900820046571171111494898242731E224L, -4.59022962206179186559802940573325591059371E227L, +5.10317257726295759279198185106496768539760E230L, -5.78227623036569554015377271242917142512200E233L, +6.67624821678358810322637794412809363451080E236L, -7.85353076444504163225916259639312444428230E239L, +9.41068940670587255245443288258762485293948E242L, -1.14849338734651839938498599206805592548354E246L, +1.42729587428487856771416320087122499897180E249L, -1.80595595869093090142285728117654560926719E252L, +2.32615353076608052161297985184708876161736E255L, -3.04957517154995947681942819261542593785327E258L, +4.06858060764339734424012124124937318633684E261L, -5.52310313219743616252320044093186392324280E264L, +7.62772793964343924869949690204961215533859E267L, -1.07155711196978863132793524001065396932667E271L, +1.53102008959691884453440916153355334355847E274L, -2.22448916821798346676602348865048510824835E277L, +3.28626791906901391668189736436895275365183E280L, -4.93559289559603449020711938191575963496999E283L, +7.53495712008325067212266049779283956727824E286L, -1.16914851545841777278088924731655041783900E290L, +1.84352614678389394126646201597702232396492E293L, -2.95368261729680829728014917350525183485207E296L, +4.80793212775015697668878704043264072227967E299L, -7.95021250458852528538243631671158693036798E302L, +1.33527841873546338750122832017820518292039E306L, #if LDBL_MAX_EXP == 16384 // Entries 260 - 600 http://www.wolframalpha.com/input/?i=TABLE[N[Bernoulli[i]%2C40]%2C+{i%2C258%2C600%2C2}] -2.277640649601959593875058983506938037019e309L, 3.945184036046326234163525556422667595884e312L, -6.938525772130602106071724989641405550473e315L, 1.238896367577564823729057820219210929986e319L, -2.245542599169309759499987966025604480745e322L, 4.131213176073842359732511639489669404266e325L, -7.713581346815269584960928069762882771369e328L, 1.461536066837669600638613788471335541313e332L, -2.809904606225532896862935642992712059631e335L, 5.480957121318876639512096994413992284327e338L, -1.084573284087686110518125291186079616320e342L, 2.176980775647663539729165173863716459962e345L, -4.431998786117553751947439433256752608068e348L, 9.150625657715535047417756278073770096073e351L, -1.915867353003157351316577579148683133613e355L, 4.067256303542212258698836003682016040629e358L, -8.754223791037736616228150209910348734629e361L, 1.910173688735533667244373747124109379826e365L, -4.225001320265091714631115064713174404607e368L, 9.471959352547827678466770796787503034505e371L, -2.152149973279986829719817376756088198573e375L, 4.955485775334221051344839716507812871361e378L, -1.156225941759134696630956889716381968142e382L, 2.733406597646137698610991926705098514017e385L, -6.546868135325176947099912523279938546333e388L, 1.588524912441221472814692121069821695547e392L, -3.904354800861715180218598151050191841308e395L, 9.719938686092045781827273411668132975319e398L, -2.450763621049522051234479737511375679283e402L, 6.257892098396815305085674126334317095277e405L, -1.618113552083806592527989531636955084420e409L, 4.236528795217618357348618613216833722648e412L, -1.123047068199051008086174989124136878992e416L, 3.013971787525654770217283559392286666886e419L, -8.188437573221553030375681429202969070420e422L, 2.251910591336716809153958146725775718707e426L, -6.268411292043789823075314151509139413399e429L, 1.765990845202322642693572112511312471527e433L, -5.035154436231331651259071296731160882240e436L, 1.452779356460483245253765356664402207266e440L, -4.241490890130137339052414960684151515166e443L, 1.252966001692427774088293833338841893293e447L, -3.744830047478272947978103227876747240343e450L, 1.132315806695710930595876001089232216024e454L, -3.463510845942701805991786197773934662578e457L, 1.071643382649675572086865465873916611537e461L, -3.353824475439933688957233489984711465335e464L, 1.061594257145875875963152734129803268488e468L, -3.398420969215528955528654193586189805265e471L, 1.100192502000434096206138068020551065890e475L, -3.601686379213993374332690210094863486472e478L, 1.192235170430164900533187239994513019475e482L, -3.990342751779668381699052942504119409180e485L, 1.350281800938769780891258894167663309221e489L, -4.619325443466054312873093650888507562249e492L, 1.597522243968586548227514639959727696694e496L, -5.584753729092155108530929002119620487652e499L, 1.973443623104646193229794524759543752089e503L, -7.048295441989615807045620880311201930244e506L, 2.544236702499719094591873151590280263560e510L, -9.281551595258615205927443367289948150345e513L, 3.421757163154453657766296828520235351572e517L, -1.274733639384538364282697627345068947433e521L, 4.798524805311016034711205886780460173566e524L, -1.825116948422858388787806917284878870034e528L, 7.013667442807288452441777981425055613982e531L, -2.723003862685989740898815670978399383114e535L, 1.068014853917260290630122222858884658850e539L, -4.231650952273697842269381683768681118533e542L, 1.693650052202594386658903598564772900388e546L, -6.846944855806453360616258582310883597678e549L, 2.795809132238082267120232174243715559601e553L, -1.153012972808983269106716828311318981951e557L, 4.802368854268746357511997492039592697149e560L, -2.019995255271910836389761734035403905781e564L, 8.580207235032617856059250643095019760968e567L, -3.680247942263468164408192134916355198549e571L, 1.593924457586765331397457407661306895942e575L, -6.970267175232643679233530367569943057501e578L, 3.077528087427698518703282907890556154309e582L, -1.371846760052887888926055417297342106614e586L, 6.173627360829553396851763207025505289166e589L, -2.804703130495506384463249394043486916669e593L, 1.286250900087150126167490951216207186092e597L, -5.954394420063617872366818601092036543220e600L, 2.782297785278756426177542270854984091406e604L, -1.312214674935307746141207680066262384215e608L, 6.246299145383554153167974732783934504370e611L, -3.000812007679574430883792565577444226490e615L, 1.454904877136007844493861746476079537075e619L, -7.118558521873800304612781121044077357278e622L, 3.514739820897817389472822276832677887997e626L, -1.751137068816377401163011262831890828437e630L, 8.803498091818800678575314081978951179602e633L, -4.465612911700593572269200981612564161010e637L, 2.285494565287530681465757798517033542888e641L, -1.180145168917737098025683613598595411329e645L, 6.147941849198393232663105284575149616925e648L, -3.231069156963603593233679426198974663352e652L, 1.713042725635435041806895849197608270935e656L, -9.161761363270648920537613435771882898051e659L, 4.942675965960539112005679080810117766825e663L, -2.689684712697383518131267222872386600031e667L, 1.476320014229917759615308193449511534656e671L, -8.173037740864781506597184122049453514594e674L, 4.563462313190521363235182420178784459580e678L, -2.569790015236158475703055501886439298708e682L, 1.459410219452119981958355737832022375085e686L, -8.358304882556983795372406183642486436653e689L, 4.827305091483557818593092377664570208355e693L, -2.811394311081493166793414157061950132403e697L, 1.651026863340675349245561261339568827739e701L, -9.776578579336866764167878646459810047899e704L, 5.837207965197521880181236529616560780535e708L, -3.513938957938032127105389702846371181520e712L, 2.132747371360190507595748444536911078788e716L, -1.305047363239192640729466563372665311602e720L, 8.050825342678337497636292798039996484780e723L, -5.006884161223862543665524155681082112689e727L, 3.139016066011452177570812014513491361235e731L, -1.983829535212711378291469356666001365873e735L, 1.263822427649676371257598052486237628698e739L, -8.115678659900522918802121684491754629503e742L, 5.252995164972075271667364371449050412435e746L, -3.427038125662404660056511738625477058135e750L, 2.253446011834352733279946306835940729858e754L, -1.493407341897034717876962786798831719683e758L, 9.974681322653365118752729509398728354442e761L, -6.714230142773850863927710112350816379426e765L, 4.554668668931723346600337564274944733530e769L, -3.113635386023220127834102980385275379533e773L, 2.144945411287666204679363498162954050208e777L, -1.488982121181387164932397544378555256016e781L, 1.041537218854627455352298173588983048748e785L, -7.341073881786613676177562822942175683993e788L, 5.213524272587199574980117351016322518428e792L, -3.730592531776514409283897139216167197989e796L, 2.689592876341877079083449497724049500175e800L, -1.953643788231947582529884602972233135002e804L, 1.429691073080500563348668321308878246277e808L, -1.054059177095488639836063073070536825675e812L, 7.828919160938693948399336431565350676613e815L, -5.857884457184396382550955498026762014753e819L, 4.415401588264172474136969345712659422380e823L, -3.352573884181287635796498822858109969161e827L, 2.564210385719224000156548240934108974447e831L, -1.975534392116037602837941409848663077528e835L, 1.533062123975940045180943006948008486466e839L, -1.198306160488763291730059994812781226903e843L, 9.434034267770711698676321369174735725321e846L, -7.480619200038505368468483892246806488879e850L, 5.974161898439971564124576801455052907638e854L, -4.805125663714699771668630995361572639386e858L, 3.892332138028039952403812726744593073776e862L, -3.175276505779699340738548328810180869575e866L, 2.608608681939322393581069188271626122519e870L, -2.158148554392732439392868052394994052628e874L, 1.797993483301448477700600221980862686033e878L, -1.508407575089108597171576068862286462909e882L, 1.274273406242459482708930389008701147244e886L, -1.083950475353171986748233157909397370193e890L, 9.284292630726328432038470356821265395331e893L, -8.007012115449516364480417355063446317414e897L, 6.952871948429568933888979915833266241471e901L, -6.078828929473797621198666799700739891205e905L, 5.350908089710964244671334224708057812633e909L, -4.742168072503284973969982758434401589090e913L, 4.231149239401967697257534662010605751136e917L, -3.800684612827828851942743291026898158947e921L, 3.436984796314246158361599955909956583986e925L, -3.128930718993658356398482705317381808301e929L, // // 602-1300: http://www.wolframalpha.com/input/?i=TABLE[N[Bernoulli[i]%2C40]%2C+{i%2C602%2C1300%2C2}] 2.867524740577223817164663595437919813239e933L, -2.645462974939090580963101220449509725942e937L, 2.456800827789169780295419018499543141869e941L, -2.296690549725790064673528302231294870532e945L, 2.161174697699793265715182091764676666457e949L, -2.047023224586087259305754002882269123194e953L, 1.951604806042481282712736234132803700277e957L, -1.872785206668284042110390583158639495143e961L, 1.808847160923282257302788929692654262867e965L, -1.758427529634609613399327744595257497188e969L, 1.720468488019528147087036246754294757647e973L, -1.694180279355332648057740852839804839425e977L, 1.679013685251183870616469618951463869496e981L, -1.674640861433092946269144173974414945664e985L, 1.680943600147858322148767806987527412112e989L, -1.698008433134805056489370119323402510305e993L, 1.726128304411348354183882648263448448633e997L, -1.765810838736918108045764015629875016219e1001L, 1.817793526882665071123822455897912718293e1005L, -1.883066459765807128944897377914669600374e1009L, 1.962903588035940537938222992228124233567e1013L, -2.058903881920696086033171142046100185783e1017L, 2.173044241735786946064676598703393618281e1021L, -2.307746591425236218893160658331303115253e1025L, 2.465962312241418731528973526597433097256e1029L, -2.651278087802503406316742676403301581549e1033L, 2.868048395658440423778896607880692085708e1037L, -3.121561373094393453726645989392054731637e1041L, 3.418246710091027042099932753084126095820e1045L, -3.765936717592482928796920675282930034018e1049L, 4.174194967165213973474293718362757753877e1053L, -4.654731142471753017867105249805137855862e1057L, 5.221926310090434518253178454907900079787e1061L, -5.893500145664015254409680930288710794031e1065L, 6.691361332576333738130720616841706994101e1069L, -7.642695184575063524608775697714741180954e1073L, 8.781359617440634128952082759434723165820e1077L, -1.014968338800868135594698909567734048618e1082L, 1.180079105471061498849752479044520598414e1086L, -1.380162016721660241308046692646452732446e1090L, 1.623685158291375662775444238282343536948e1094L, -1.921404880943289359290531906131400049399e1098L, 2.287040419533950152851434188305457266969e1102L, -2.738162880206032093123060939173765335255e1106L, 3.297371307848643161532227459901386725801e1110L, -3.993854689967542662299211323085023297602e1114L, 4.865474805885735467044047308902313673643e1118L, -5.961554732739027308247618738765152679497e1122L, 7.346627151757492821447573639763873833441e1126L, -9.105493288459908620636712748727395637965e1130L, 1.135007867626164861991621396462821975167e1135L, -1.422876214067403769204874786137232627418e1139L, 1.793912271573925309173135913914667878908e1143L, -2.274542916104231188526120123855259514144e1147L, 2.900273688809987694128857655036783261991e1151L, -3.719022795563122339874875448447744493398e1155L, 4.795753420982845153626611023078973364321e1159L, -6.218937220186281310109009529226561379773e1163L, 8.109611247999584815668395828940708619394e1167L, -1.063412316303440216539797215354141158589e1172L, 1.402214363674117662460496032135704328989e1176L, -1.859223235464558752766840772026058694872e1180L, 2.478828203789903637835992128856742276028e1184L, -3.323169416193176673655321536761413885767e1188L, 4.479640207312477092938541546776915956580e1192L, -6.071721672924085739424644485636889518799e1196L, 8.274698015123579607850404326757887762270e1200L, -1.133855131459773018024052539697784205966e1205L, 1.562146222050424344025824344480153248984e1209L, -2.163904570724750459592352173471446831752e1213L, 3.013703210722669908901286635073603018696e1217L, -4.219903244242308803914269531001720703294e1221L, 5.940703220571043642186808904696174833998e1225L, -8.408147464216029127243257448169774333631e1229L, 1.196419999747411909144144315499654470715e1234L, -1.711518922741148710381740436694440587059e1238L, 2.461434539630850545757453894977350505251e1242L, -3.558748530932574002484841810677232366801e1246L, 5.172525606281917297657859608800373729529e1250L, -7.557850217376323621984784308774476917753e1254L, 1.110141075986004209769735296234549704181e1259L, -1.639216556732622481406083885926912451281e1263L, 2.433138328152562628385514545400044125983e1267L, -3.630476645219033020888837165221286413171e1271L, 5.445289518636306992942604775585977779418e1275L, -8.209806424989072060381590985042272020067e1279L, 1.244209849774134691374848390346442737613e1284L, -1.895384488692308848372754844910263931874e1288L, 2.902272596647764894203369746806169285113e1292L, -4.466944174025026625137032739317650862593e1296L, 6.910485739507636504313238347702354354916e1300L, -1.074550085668784170644854815272144687769e1305L, 1.679419258904938802199084915274175753529e1309L, -2.638155207645646220849795321076977230763e1313L, 4.165284786632654168563096850610185378233e1317L, -6.609774274649031371770290191295685774584e1321L, 1.054194100570841329575393359295845860860e1326L, -1.689822316104196916970708778265725885275e1330L, 2.722340957904912685605914893019783431164e1334L, -4.407776313964403233676810178851005163725e1338L, 7.172436210641903635864868181569129834361e1342L, -1.172947440100495955246356688225986736990e1347L, 1.927745674072824377954824961348211728006e1351L, -3.184013467435655962214317208087993711563e1355L, 5.285045125125832341263897233405196808096e1359L, -8.815883582819232027207118521581424783107e1363L, 1.477818368424505276711779171224799759099e1368L, -2.489482576496570159333357550363134602876e1372L, 4.214292881345076419678976329218843808204e1376L, -7.169068531615459070909644981451297906220e1380L, 1.225513133750594558180516896275774441895e1385L, -2.105160827387119480607950260289853896637e1389L, 3.633787605672960549893307203363402915249e1393L, -6.302830804027849515239463308430185990705e1397L, 1.098521433860299633481449685364914115468e1402L, -1.923858597401607622723144320370279518600e1406L, 3.385512828549942051667348582951554570164e1410L, -5.986286250836771248147827011780631183980e1414L, 1.063572794668186370728928272374836554300e1419L, -1.898666684876492795233907174493757572290e1423L, 3.405627002840442789235393111726609930533e1427L, -6.137724140284450036591063946055819333244e1431L, 1.111411024660941507986132154479364267486e1436L, -2.022060876221034821890406900217875915949e1440L, 3.696248025817144690840539132103538834108e1444L, -6.788448439024998306316860676030442691610e1448L, 1.252615233049059554031883468823648511657e1453L, -2.322190433141265975888955985950824418729e1457L, 4.325200102353909846882217732999001735342e1461L, -8.093531903011880118699218269369570178812e1465L, 1.521558881878323790120983450270946857209e1470L, -2.873780311010933807686415826253380907421e1474L, 5.452903697278823304173192839252276211670e1478L, -1.039457922537509500320638240809547113575e1483L, 1.990610112724715126895008793014214505760e1487L, -3.829667853173777076954453401761025071562e1491L, 7.401624504283011888971231756333356050310e1495L, -1.437075122764477911733220492562365990710e1500L, 2.802940275035867428066581228962104019228e1504L, -5.491938363067613321364335249495394164430e1508L, 1.080961960603953462180593404647115933651e1513L, -2.137290931892412298654741768897581319007e1517L, 4.245031321673807283498263276791307370788e1521L, -8.469499523038763989328773224520912663309e1525L, 1.697421812794203793865032206191322699261e1530L, -3.417217332563937242285349373774004020539e1534L, 6.910378594841763785923780822895851271770e1538L, -1.403696282437585785557998429691459557649e1543L, 2.864060533055333035232343601021192111053e1547L, -5.869818290384811353182423286543086530728e1551L, 1.208359745327224593486268988808338456906e1556L, -2.498576742140453770373914215325521001990e1560L, 5.189311407347546310078739863704346083861e1564L, -1.082537954843916294257278789980768336964e1569L, 2.268238255751421312559806122980932952706e1573L, -4.773557403917983369065731568732198697502e1577L, 1.009019097334998841920279535262007639746e1582L, -2.142181266523235177327239693359275472557e1586L, 4.567814904130855969979178320003286614868e1590L, -9.782550516204803195398428611221899469345e1594L, 2.104180123097086948576304557651398411373e1599L, -4.545658958087323864004652894518442709646e1603L, 9.862563944609427542603740078470901803131e1607L, -2.149105846582226970866569209122813809019e1612L, 4.703235567543888152049628411354542509156e1616L, -1.033719212601584878353206879472796545848e1621L, 2.281767401903848796732740825793310514456e1625L, -5.058236070813950229238666252351966279306e1629L, 1.126112519657857205642546937554224492775e1634L, -2.517766761987679577706779689880657777343e1638L, 5.653225190181653388317503182908983211029e1642L, -1.274735955461074142223278576503188429497e1647L, 2.886578974679460464298863945016671299242e1651L, -6.564203307141426181809363135003467581753e1655L, 1.499036144473064593308260681782048262301e1660L, -3.437714715599902386917108442954580869236e1664L, 7.916830957072777234152907034541325149479e1668L, -1.830850567422571420661248197094782575285e1673L, 4.251778280827419894527511469762091846660e1677L, -9.915182507286989818033146623995507108134e1681L, 2.321878208636697663781227497233334385222e1686L, -5.459879022461660582811365437190884471726e1690L, 1.289222044549922720398543474297554204559e1695L, -3.056819658344217799458557578658863826289e1699L, 7.277891759142725294172926258364455941365e1703L, -1.739928293433385104144012025546489673795e1708L, 4.176797408823713136137404972612780406904e1712L, -1.006788178307821554781930741698052910780e1717L, 2.436754569909644399766538111317379484511e1721L, -5.921896599028498715774458493117079340155e1725L, 1.445045688171565118619109316933316429671e1730L, -3.540547766876069233350621578795319652040e1734L, 8.710114552028472554054293344204504325978e1738L, -2.151484527880464463303897113553085899101e1743L, 5.335928195512405709733771642389502809087e1747L, -1.328726408335015910030370523083559660016e1752L, 3.322090527232917400247098823651437597786e1756L, -8.339387326241218096865362177688582376376e1760L, 2.101842203781264395369771906884644062395e1765L, -5.318704469415522036482913743767085545209e1769L, 1.351288005941730688647540059088127991581e1774L, -3.446853546858473171100748720136784228698e1778L, 8.827284762030783576089954173424852998700e1782L, -2.269642226090373319660782216907175419317e1787L, 5.858820683661708553422363777419430816755e1791L, -1.518385813684321665045387969920683656625e1796L, 3.950661327164595923092260035122668890334e1800L, -1.031976516347387969958181456058243183780e1805L, 2.706317892325103782207094286049104555552e1809L, -7.125140422584701175967252533378906957380e1813L, 1.883260203116768075569432925204868418472e1818L, -4.997193687108743666000994570700725873035e1822L, 1.331182722092654526185433799891693838871e1827L, -3.559930289076558484535632566755216035553e1831L, 9.557281027056970446117541983785660301558e1835L, -2.575805002229372523547972911961335317502e1840L, 6.969058431277067406841032797913179025984e1844L, -1.892842481279278678390672746902260183506e1849L, 5.160964211693777744707760614147460787285e1853L, -1.412602588198037643242529860614298968137e1858L, 3.881313379962387603749693387037174052146e1862L, -1.070542170988009009334148472388319844527e1867L, 2.964094312414144330805731101996829908435e1871L, -8.238350132106899955856124602934281976453e1875L, 2.298504171050560756192352106062598639825e1880L, -6.437303944649223478093890316531995121228e1884L, 1.809727811843121957353712606428292269805e1889L, -5.107047553992257935533518628886728031061e1893L, 1.446674478990385642488446075734631327506e1898L, -4.113513327511444762766719175770513771122e1902L, 1.174067517257431444028448391638451935667e1907L, -3.363630086409895071362533854123306097827e1911L, 9.672868956071838221096869293070568259792e1915L, -2.792101741911955365960369780457612630184e1920L, 8.089710604557382430162031502761771390568e1924L, -2.352650988877130983061761312962677887796e1929L, 6.867549079740051556501575104006222995568e1933L, -2.012161201632998475706904405535757516336e1938L, 5.917489529279588702317256137229398357271e1942L, -1.746718667239329545125902248821502764273e1947L, 5.175069416058975040990816515838893249437e1951L, -1.538913401594651457295303469904084052963e1956L, 4.593185746210984655636051293374195150815e1960L, -1.375981868450401919299150690829612124045e1965L, 4.137207965217520410530508053863759216958e1969L, -1.248518564582257710069294326648626362439e1974L, 3.781575291117895093413381897917341286951e1978L, -1.149575999691408110085856948595444100435e1983L, 3.507413095836612229403470531176947165451e1987L, -1.074032838410645352804690949680310176413e1992L, 3.300857202456564870338466973024760446263e1996L, -1.018149578840803516349758843017979498322e2001L, 3.151876950233613792531594490714752800621e2005L, -9.792574827376149360558532022944033224780e2009L, 3.053456145978161645823454710737904504036e2014L, -9.555442346102849014299990542596620094035e2018L, 3.001037449298122384017009412541525703002e2023L, -9.459120112371096268275049056229023773120e2027L, 2.992168042152196502453442556462819104060e2032L, -9.498922680869041470681858599915282791899e2036L, 3.026307717971075309746179763189393755074e2041L, -9.676079238806159594565350708123427510151e2045L, 3.104778286352798464772361361434013339088e2050L, -9.997786802782252742109475924344598057966e2054L, 3.230847952724856366943939804248186203776e2059L, -1.047769651900498931701604323213605884945e2064L, 3.409958102134053489747140426163802214042e2068L, -1.113687894644055086152064258459886518528e2073L, 3.650114509271160332136458711252217684956e2077L, -1.200536387553969483433239131469825141412e2082L, 3.962482337718333099498977337189304099484e2086L, -1.312441206957064803437100929905979391106e2091L, 4.362246723746013772563799740886664288515e2095L, -1.454975881895253548422481637083633839534e2100L, 4.869831412214692119172895822285084162147e2104L, -1.635618419512383251104125916207188960680e2109L, 5.512611314145041257838234038980389596534e2113L, -1.864392957231340288547618808749072127289e2118L, 6.327317613106621547060670091824665547127e2122L, -2.154772001506498703267302897994526372056e2127L, 7.363426139490286496267931634843475368903e2131L, -2.524950643808031915843604894357998905460e2136L, 8.687956390288096215918373666581638675156e2140L, -2.999656978200020459428228924242615592768e2145L, 1.039231328851609224822335039430898644149e2150L, -3.612742437616019936358910410005123924796e2154L, 1.260211309932738404790711574105022002093e2159L, -4.410916378453971105434385837025433805752e2163L, 1.549140617923265948720013792673729394719e2168L, -5.459173749226782924959103886664322964926e2172L, 1.930343307630952098252884031069043541182e2177L, -6.848749229218425353808144618581305978045e2181L, 2.438117138001365487681440577590059588102e2186L, -8.708873656769794358508423272379627581292e2190L, 3.121268068338199458891764932384819739714e2195L, -1.122430216307539309816165910733145404999e2200L, 4.049900779207199370582177687160985635615e2204L, -1.466167983141158219266077836130256565915e2209L, 5.325678718693772500250292767751070974887e2213L, -1.940955845102272053048140384364058448998e2218L, 7.097467198361219669927211698104447309186e2222L, -2.603968771680987683436428778397387110896e2227L, 9.585403285394812946713320044815117440444e2231L, -3.540176030547640510648455468270569908446e2236L, 1.311827683984025111744358347783996339730e2241L, -4.877124229155333857009747836542843294702e2245L, 1.819213075760490882591173222316749809951e2250L, -6.808221630329265915405178596748950929642e2254L, 2.556299969544109052724772800143396857058e2259L, -9.629763347675306704861859899230073979116e2263L, 3.639508580119285595844040783082958425575e2268L, -1.380037493555816309137481185927387732499e2273L, 5.249980712165216709135893538080020409581e2277L, -2.003737844109055078145975651407367170529e2282L, 7.672522280806944397358668566379646540213e2286L, -2.947454993639165318799389781921184991045e2291L, 1.135966912801707623489383623092951142963e2296L, -4.392293711194501621873299212059053651432e2300L, 1.703813210168560937608104155973968112409e2305L, -6.630636743874062041158387022015853902938e2309L, 2.588742636486379690203698247275411406029e2314L, -1.013959594068423546627946242481463893979e2319L, 3.984265821528043268586235974854766821078e2323L, -1.570614519682157047612769672066387881154e2328L, 6.211297381339606877062824459742129064477e2332L, -2.464246931985476159686671650962783785426e2337L, 9.807833742601662212615240518855757197483e2341L, -3.916036434571217691317276306031837539092e2346L, 1.568566392975837368624727722120313955274e2351L, -6.302885887601142677858008037129298948063e2355L, 2.540704455306077495480843691828334210014e2360L, -1.027412480318234348899627142408950111875e2365L, 4.167823618450297116765978030480648316769e2369L, -1.696076602731914277275203926124423530377e2374L, 6.923904505633301788461482786634220738504e2378L, -2.835463065742506394026733592206185459035e2383L, 1.164828772275756526225951620927486307632e2388L, -4.800242878545012539781545966693324656699e2392L, 1.984381759611877246529319121941597679107e2397L, -8.228979942542641498511023600269641046627e2401L, 3.423130231367101727862739208673375060101e2406L, -1.428418168129733054582191895023094524495e2411L, 5.979153801634459282232521647160044877770e2415L, -2.510581926948409809562349588087762800160e2420L, 1.057443785053915411991029410076722022815e2425L, -4.467723713549428749678277264414266162837e2429L, 1.893474116528533144079731251913008472748e2434L, -8.049601965052954947260081891142509464888e2438L, 3.432648527503971149009691133946275281368e2443L, -1.468324699963694393989960228042259134294e2448L, // // 1302-1600: http://www.wolframalpha.com/input/?i=TABLE[N[Bernoulli[i]%2C40]%2C+{i%2C1302%2C1600%2C2}] 6.300146502435743791500010801885493871234e2452L, -2.711520667146768856688291798851999580833e2457L, 1.170595555513900137297344452318266434006e2462L, -5.069095411973246242900074508988493530542e2466L, 2.201819284807954055092117706033113168896e2471L, -9.593088725189386197503123561368325167085e2475L, 4.192362385909155628936230811010649614060e2480L, -1.837725836941968309866675158105812946762e2485L, 8.080201101491972605313807752565294881374e2489L, -3.563536075527215702966392543784039539240e2494L, 1.576361051321107275181955665159661781175e2499L, -6.994292466180175594372663323941761853364e2503L, 3.112744353537336702834647901141392426258e2508L, -1.389481328370627358752727485697345194612e2513L, 6.221134636655213696041740685131223999953e2517L, -2.793779613656947577224654924852010601105e2522L, 1.258399062987759035354039924686781081603e2527L, -5.685208194704131918461885165870560583895e2531L, 2.576167857759537340210434756292816456179e2536L, -1.170846052338591953257169251219597581763e2541L, 5.337296787116189575571202979672747140313e2545L, -2.440264475369219459038748840841422948951e2550L, 1.119037151526195093932933161706501865175e2555L, -5.146858829220973887154576240993607686435e2559L, 2.374259791963193693837576781321391741634e2564L, -1.098501215269400934956638118646657823799e2569L, 5.097500369683616795005376807036889542869e2573L, -2.372446971688020647583535886090779018865e2578L, 1.107430282014636546248612381377039463753e2583L, -5.184597227131050012643138079903381280471e2587L, 2.434392040100910394476893838832599310265e2592L, -1.146412753331162872665743308094817095949e2597L, 5.414578104816988124950636101250217797539e2601L, -2.564835392810685332173156758121489913946e2606L, 1.218495070518549208066544111736985586178e2611L, -5.805713573821806672815019495319510297824e2615L, 2.774298194574319430697819781128985128618e2620L, -1.329580186505564627453485444017911980430e2625L, 6.390545858902318479863947547243743500916e2629L, -3.080502542499571035376377703435361520427e2634L, 1.489236104239976282318361008292980814533e2639L, -7.220413839991892382038608955317126799684e2643L, 3.510874916591640642524021216241607185085e2648L, -1.712070118580404599831061485055269100525e2653L, 8.372956919832386730490070625622785478703e2657L, -4.106629146981883685523102256292669054596e2662L, 2.019945438530802964718619732330776495740e2667L, -9.964133277392242111939720494354938982970e2671L, 4.929278642971447854669801547226335041410e2676L, -2.445509657169810919463982615395074704130e2681L, 1.216734421265677299127016883839223226884e2686L, -6.071008437677720186241562251151490713584e2690L, 3.037824949882992896564570441252792097027e2695L, -1.524402878612630565501569310883356490225e2700L, 7.671320530781999359200097739951316234193e2704L, -3.871436167706734376478728954716915204399e2709L, 1.959313530432202158587932399068682252335e2714L, -9.944063618400630821320953821427307024297e2718L, 5.061161998202463346818982228476199873781e2723L, -2.583219090831132705328958245740715185448e2728L, 1.322193991367293532684189527174543501836e2733L, -6.786569982732483290873213417465458376706e2737L, 3.493212334804776543395067018414547811062e2742L, -1.803090099978261928508495412750404640933e2747L, 9.333100843930216567894508007158644926767e2751L, -4.844499031405982604449146511179496492045e2756L, 2.521648090959971240812330574936006906830e2761L, -1.316227870932708474838173333385377250286e2766L, 6.889488826832738674261056521130795910494e2770L, -3.616184242864384509259984293501533623932e2775L, 1.903356124758119137116543283603627028779e2780L, -1.004601544584640657081847200643996069583e2785L, 5.317043885597842225603585588404817559596e2789L, -2.821938866752488868682751438901900485500e2794L, 1.501842023003449590337997900945924161741e2799L, -8.014908048137216649348740300633172710524e2803L, 4.289126235121619907138036129192558937445e2808L, -2.301619137231461344870820700320913118444e2813L, 1.238485136850053215006962645111854705210e2818L, -6.682503731149007943059244518074044280490e2822L, 3.615572393938012932030234169574978859655e2827L, -1.961565108627429629104703146282982075623e2832L, 1.067123259692924564435881096382837264046e2837L, -5.821179870182035246401397327057170726418e2841L, 3.184127229476322727732208017279268211356e2846L, -1.746429902183019597973436257300843998825e2851L, 9.604873565299766333876882842813498685054e2855L, -5.296759978724702692134960752308186890356e2860L, 2.928906353338652198977536576170287112391e2865L, -1.623961162577704769945821804737884742792e2870L, 9.028574047002736235613238355032484299017e2874L, -5.033087486357905828950503441308068892610e2879L, 2.813325650062267479031371852434194635210e2884L, -1.576791132296320840138263753339056345362e2889L, 8.861258343945925667272164531504265693289e2893L, -4.993236404321511029440212686547068244002e2898L, 2.821192993950901287717082243608730217471e2903L, -1.598254169674379493385730199445427966752e2908L, 9.078617590346932363947095804057608979359e2912L, -5.170742114456472142154347566092068443393e2917L, 2.952866185102528847516095880416675972086e2922L, -1.690794578626103552690094140317813413244e2927L, 9.707168799669516048238542260085175133847e2931L, -5.587884732306715493795271931175883605707e2936L, 3.225179489154957423492905957887744116530e2941L, -1.866424419669188178697802576490431604300e2946L, 1.082967626854618222657109354056973072044e2951L, -6.300392007169862865282706277272018077291e2955L, 3.675066377245428685118763485986517510658e2960L, -2.149348371085132073107516253339849053182e2965L, 1.260349351812619395000600434630904474324e2970L, -7.409963623771231302980906971935254993610e2974L, 4.367980758467862686643231700861155889684e2979L, -2.581566823350789671250829457603555544100e2984L, 1.529757357568342629912560827243282062227e2989L, -9.088595394263364554625061567617375176719e2993L, 5.413829169254585648363594604231030415354e2998L, -3.233288119606092759447005827969216281573e3003L, 1.936042437734875803183915765854038424658e3008L, -1.162289934202291715747729318797398221667e3013L, 6.995870350500567071550614251287615697508e3017L, -4.221776496490106417392945233048068288503e3022L, 2.554309239868912570382343877718991746122e3027L, -1.549440871550119801225143558087410562418e3032L, 9.423199525954784955533959981278992475051e3036L, -5.745689660772387668861183913170050552119e3041L, 3.512407521007240798565045328376471603253e3046L, -2.152708113797517364614914569890010876143e3051L, 1.322761289733739440340237168659770154654e3056L, -8.148777388506488753591136948542248584098e3060L, 5.032880858479326069741729004270784264612e3065L, -3.116396010103058126269735274818345780360e3070L, 1.934634831148214353514796782480703021435e3075L, -1.204077166243116651938489240924641810276e3080L, 7.513065583444964704795707060501161621868e3084L, -4.699873512563164914493150520500838535415e3089L, 2.947541197349762411713872934523813866703e3094L, -1.853262416286420077763886100673646141885e3099L, 1.168196427912100545575264493997591040800e3104L, -7.382362285873345348505276546404015842875e3108L, 4.677071041058096429847797962954927487730e3113L, -2.970642034084362431442183248944824506476e3118L, 1.891572688282564476274920103912259755482e3123L, -1.207509963440193713810418554061532113326e3128L, 7.727731208240101791845515599659441557781e3132L, -4.957988488048495669466804712012179891532e3137L, 3.188965862446236259925047956715566822864e3142L, -2.056286895821370106507670239256782411337e3147L, 1.329246918771714093479509313343886287414e3152L, -8.614188519577835653765633797787633659253e3156L, // // 1602-1900: http://www.wolframalpha.com/input/?i=TABLE[N[Bernoulli[i]%2C40]%2C+{i%2C1602%2C1900%2C2}] 5.596396533621874175909933615343145642161e3161L, -3.644908483469388437457938883454376864180e3166L, 2.379838409026860469990569665632800095988e3171L, -1.557720925267669865362152155022069166772e3176L, 1.022143420270029721682551084917730373739e3181L, -6.723767358891570842116651998814252095792e3185L, 4.433950491570308179905446963723780229747e3190L, -2.931196854668917448553150023532223509373e3195L, 1.942557068752664549549945921392100172355e3200L, -1.290553202978622786891265558106235068695e3205L, 8.595082329732118303768775883557789195136e3209L, -5.738453265222970049867280061719670658457e3214L, 3.840687915100689856736926915331157331684e3219L, -2.576862441955523551149886625900059307506e3224L, 1.733166107320377310388765047659987844208e3229L, -1.168569552450178559412843683052610870569e3234L, 7.898289836694980777809433306209459851871e3238L, -5.351485909164216694400535493924387979018e3243L, 3.634772439350395177931952925644409735777e3248L, -2.474801048002975145046569303233576339695e3253L, 1.689126939254790850063878942448569759390e3258L, -1.155691524500722774057997965355407962525e3263L, 7.926435404542361405718288670391575676323e3267L, -5.449654814183048796524718620178906854846e3272L, 3.755898589900254795894812942275711835138e3277L, -2.594843902682143854622514329649211211808e3282L, 1.797048752397789969347915328338360264536e3287L, -1.247551415074438712713815166107969504456e3292L, 8.681719521514448143910215886388510318746e3296L, -6.056203898213120922016159444227958572276e3301L, 4.234882876331814099029781995617143573641e3306L, -2.968432911643338866295929748049749932906e3311L, 2.085723508930484816454740610260790948864e3316L, -1.469023169879432026361623513301566735138e3321L, 1.037150346505052892302077637883522696572e3326L, -7.339977067836656769144838365069396168014e3330L, 5.206985412168234130596004552956337839140e3335L, -3.702673773319239583641029108403509825141e3340L, 2.639251227995760315076225206168354089692e3345L, -1.885736353072698581595150856674914203383e3350L, 1.350563292338261784288559687678302458996e3355L, -9.695749980998301526113046898985991802000e3359L, 6.977167462628398202151721319169989304520e3364L, -5.032768280399753942925624560483352299263e3369L, 3.638844963651800168080623511900705036698e3374L, -2.637228631269251606169613775399022890118e3379L, 1.915836351653767108720464847696767898597e3384L, -1.395064293615007319328267865803567670760e3389L, 1.018249052614943190644465556486933211307e3394L, -7.449662162606857550867922631658930320805e3398L, 5.463119632208085241594107781601567713991e3403L, -4.015736541676989144201935890497836963875e3408L, 2.958754190183866660901503059509579790900e3413L, -2.185096074054288399312733179064098492511e3418L, 1.617517444557020250864919655301189186103e3423L, -1.200170662015511746748935675940010250555e3428L, 8.925888349899029449015791684428724952411e3432L, -6.653851763691885517669938275618991145962e3437L, 4.971722031098457895973348076474071155918e3442L, -3.723500582577984967442020337848702786829e3447L, 2.795153783541721373364976034391375710110e3452L, -2.103141577212720698169118819883801186873e3457L, 1.586129575320959267959148073466004084241e3462L, -1.198988457279648730711646682156242973137e3467L, 9.084402368157025658430300252246526602197e3471L, -6.898927494435965163817354296023108913714e3476L, 5.251332286149361587885046891266325872375e3481L, -4.006442950956739933884502808470603581850e3486L, 3.063718202820270282280659950794978994604e3491L, -2.348215284130973783732145823834807395920e3496L, 1.803952490148087317330011096671019781340e3501L, -1.389022326803437345760911068933754707688e3506L, 1.071986115818329525986099441493200866389e3511L, -8.292085224650940719705699485423856363908e3515L, 6.428829064452939640541475198655560890344e3520L, -4.995654440302797445368056643032307686314e3525L, 3.890847042582299188849273838681034339406e3530L, -3.037288555751484681537442833929275697351e3535L, 2.376385803695694695338601696534348875191e3540L, -1.863527130251861900692886008704804849076e3545L, 1.464674913498036269270793715104706378182e3550L, -1.153804954579033578659954846698233083197e3555L, 9.109783835348935092264268296199541780964e3559L, -7.208869193983001804305451104827153729326e3564L, 5.717530734277611949162917337810749919265e3569L, -4.544970302634007326980094771330550661605e3574L, 3.621042850825283032134228901678636353355e3579L, -2.891447067949778492831490654980043715471e3584L, 2.314060419397710657435821461707043283167e3589L, -1.856140759923563235273220981623595304434e3594L, 1.492185412981476596273279338314204171587e3599L, -1.202290032627175365810126250991853594801e3604L, 9.708881154579770196658265042625239421053e3608L, -7.857809850747029705680072304049448493252e3613L, 6.373898598298513400228819113197728735438e3618L, -5.181780406472117449048907989647202286666e3623L, 4.222036621953044040518942750638183171221e3628L, -3.447728386429130175025813550845575613047e3633L, 2.821701521717856346224159586852612710800e3638L, -2.314488376711998526455043944505424906920e3643L, 1.902671298033180765286213227393060711096e3648L, -1.567603736821312488140289549008391847440e3653L, 1.294408945316538946551785312385509945367e3658L, -1.071194533081615830960091702262923009420e3663L, 8.884351908108581551151252566466606126397e3667L, -7.384866682828103669170236267589653324531e3672L, 6.152023838008155718180876735217718355563e3677L, -5.136304310431705506236573876510219357975e3682L, 4.297736808124296434723193397876220759378e3687L, -3.603994887745884762510172194982172483480e3692L, 3.028884745605031552399167746007361297342e3697L, -2.551141302205187365552982635794121855138e3702L, 2.153467982869535549299173317536193051608e3707L, -1.821769476343602094059466497311600827296e3712L, 1.544537580582347892980177956984101211006e3717L, -1.312358705945937257247030754517293537539e3722L, 1.117518229297781388884979995402355617235e3727L, -9.536820860779441793021624381677086661097e3731L, 8.156400668831968026931547065507466530546e3736L, -6.990984948728184142718575396052260691181e3741L, 6.005124901126818071638224144541102727563e3746L, -5.169500241880947716732682089328427995109e3751L, 4.459815478235310026240134567325749844182e3756L, -3.855902253361684187081283218890336962427e3761L, 3.340988024176995223515640815937037040546e3766L, -2.901099226680215736735094376078800376829e3771L, 2.524573363444334459448089563912567842927e3776L, -2.201659455716348555524529213295341212492e3781L, 1.924190302190936448078364755844591374353e3786L, -1.685313186099770223843319514432495898517e3791L, 1.479268235966730475749985741048766689808e3796L, -1.301205702893883803117530921635013780575e3801L, 1.147035071153450453405384269242743907426e3806L, -1.013300250456366849150496776951686112298e3811L, 8.970761720605591762300958007557533865346e3815L, -7.958829781488943084496783248922217392838e3820L, 7.076146954685024795720193943027902028642e3825L, -6.304798526260409199660290516451546966159e3830L, 5.629519616664188107056583939722984509867e3835L, -5.037281594099054092767959480843344929292e3840L, 4.516946091316834843581919268794683123349e3845L, -4.058975118925834202620358386772092359951e3850L, 3.655187798978978909014603682039470653549e3855L, -3.298555903041546671060101785513812175322e3860L, 2.983031738662727912016882399515879119620e3865L, -2.703403043317732979516341931451317866898e3870L, 2.455170460800096241793872443768546335444e3875L, -2.234443928432490538417605502448376856290e3880L, 2.037854924078003280537856980560782325730e3885L, -1.862482033918775734840779765743099458137e3890L, // // 1902-2200: http://www.wolframalpha.com/input/?i=TABLE[N[Bernoulli[i]%2C40]%2C+{i%2C1902%2C2200%2C2}] 1.705787724951999960095629912416210969679e3895L, -1.565564556110550991891247404758895970376e3900L, 1.439889351869832939488618785632174464789e3905L, -1.327084102784257406218693901793045990520e3910L, 1.225682557296027075027021534960026145706e3915L, -1.134401635488994148555787301654561211982e3920L, 1.052116934052356802920509999705307165985e3925L, -9.778417073593082219082361206542342793584e3929L, 9.107088061888562704837019028349522303725e3934L, -8.499551364633102138471246155980056936129e3939L, 7.949082681085658044610890152056533167407e3944L, -7.449748809722797718736397140511396011691e3949L, 6.996307824769340144608141799981589288378e3954L, -6.584122718472954006131003060359621706243e3959L, 6.209086595833487707192492087176843233407e3964L, -5.867557793863165391821489909125720982339e3969L, 5.556303538475260373917478405626416604297e3974L, -5.272450955936249442242634142613834212778e3979L, 5.013444428433789818228792126117223030641e3984L, -4.777008429684552423800736200488532033034e3989L, 4.561115100786341787876705283291018781137e3994L, -4.363955932181992701667719449097126840439e3999L, 4.183917007557000586305945495258591147615e4004L, -4.019557342177353010692923286760895584096e4009L, 3.869589913635745758786275231296652917580e4014L, -3.732865038934070181861017140563175000872e4019L, 3.608355799736107390800162778737339576843e4024L, -3.495145258697474565347261083975193776541e4029L, 3.392415245050326563747729613872524362741e4034L, -3.299436517958948801426629481782413630714e4039L, 3.215560142306355508598119430378551642857e4044L, -3.140209934146377815556058799557727461298e4049L, 3.072875852591406752692761744649563131272e4054L, -3.013108231854799187724018548255922550991e4059L, 2.960512761914376268185064129600549308882e4064L, -2.914746139139036596123006476633770383901e4069L, 2.875512319506974985103149834921665445532e4074L, -2.842559316984704569380036093537576068104e4079L, 2.815676498441436148701483904115879856704e4084L, -2.794692334326268275058539147656334465534e4089L, 2.779472571396106785963004020814493340829e4094L, -2.769918800191406321625251621260024635680e4099L, 2.765967395840433013288935879837390099329e4104L, -2.767588816244119880300161388073836623878e4109L, 2.774787246856347651152278076466043136230e4114L, -2.787600586224957950622601135620189837948e4119L, 2.806100771288225169339048358106052817280e4124L, -2.830394446218080573456394167711739786431e4129L, 2.860623983452244712039094143642843717029e4134L, -2.896968870550611723525738907034588104300e4139L, 2.939647481737606306044335918078617963078e4144L, -2.988919258547518526076380181812161398808e4149L, 3.045087329976721023952450383837883029431e4154L, -3.108501609077197464748958150625867523408e4159L, 3.179562410123820875787052833975010965963e4164L, -3.258724638491880104953913719767939138170e4169L, 3.346502614347964869115073881474258766546e4174L, -3.443475601364631413158991572423086599816e4179L, 3.550294123121350747300886840907918182129e4184L, -3.667687162886053419715985091863398517145e4189L, 3.796470357354794420044278000297864085607e4194L, -3.937555311976846882455930574021795626971e4199L, 4.091960185075595842547638450930710467324e4204L, -4.260821710519620959138720129506770036460e4209L, 4.445408854703156440576808070360934740837e4214L, -4.647138333645908068599900650548418672065e4219L, 4.867592250805288922190809906525766574205e4224L, -5.108538156515551259475573296900660666192e4229L, 5.371951876776035157276013631113314852508e4234L, -5.660043513521220243900043448456234873940e4239L, 5.975287081834808618140945840817834710330e4244L, -6.320454323372684034118816565375206053746e4249L, 6.698653321371992324876559665938996023646e4254L, -7.113372643219128807424340495235606473967e4259L, 7.568531854202750881338746432078817214052e4264L, -8.068539383842553693076672384509126681464e4269L, 8.618358887685935324188596304168259394311e4274L, -9.223585437012291673660319256730398171887e4279L, 9.890533091606747031464718533600572123091e4284L, -1.062633567277107015128545384570274268438e4290L, 1.143906286231591191271274413511275981288e4295L, -1.233785411712565904499340744089870916842e4300L, 1.333307331840530219050170916015276125870e4305L, -1.443648758235403286296065629219598769529e4310L, 1.566147425967471851736562867318748510088e4315L, -1.702326086290842780634120184324081017286e4320L, 1.853920350455786350409148418966087344063e4325L, -2.022911043115598592197907512410632615740e4330L, 2.211561842992792253055716743938240466613e4335L, -2.422463130294011318178080247305407476096e4340L, 2.658583129381772791030436640519847627789e4345L, -2.923327636881988941081365085520742216540e4350L, 3.220609866329557159104267531058019683271e4355L, -3.554932228621330128152149026066400241546e4360L, 3.931482212643167323798366327390058684499e4365L, -4.356244944221399578650235478583297389113e4370L, 4.836135498303121165971331625888490168138e4375L, -5.379154636371461359750682662639062606297e4380L, 5.994572359716861309678596804350346692501e4385L, -6.693144535124290060793936095397161934045e4390L, 7.487368894313509797084395689517008597061e4395L, -8.391787970609807810531578161564037339793e4400L, 9.423348062978921203475110312003096820035e4405L, -1.060182516651648405903017734022504884319e4411L, 1.195033105063952979885086754342706651656e4416L, -1.349591538868673992167798923586925758429e4421L, 1.527028315253291113905307092657539132480e4426L, -1.731065051510920640409442255224015234974e4431L, 1.966076741510092840076264635935585216200e4436L, -2.237214093245750681191361238831105906202e4441L, 2.550550094903891445719729187215253324232e4446L, -2.913255853313667303707651906277658164129e4451L, 3.333811847072394764285817140850092324169e4456L, -3.822262084288044913490118858492563410392e4461L, 4.390520310533864198186202368026630430120e4466L, -5.052739449335052080092114976206610871466e4471L, 5.825757966350870043117899492954521458799e4476L, -6.729639942938203582008846884575881320532e4481L, 7.788329466816396015493306357116312471970e4486L, -9.030444674469025073047417528762134025409e4491L, 1.049024263381993629167658236142000524752e4497L, -1.220879351508964912255081664072251573277e4502L, 1.423541151220109512749655991050110438471e4507L, -1.662940118618541616964708044356967429362e4512L, 1.946219185900482116137855064775635250366e4517L, -2.281995008842006909631764011781911322493e4522L, 2.680678198213108543648324254258111216040e4527L, -3.154866427472784086389609599207759103500e4532L, 3.719827710160801797530420206201570269720e4537L, -4.394095404360277919140027580071549980218e4542L, 5.200201854779615608741690339830306148442e4547L, -6.165584312943608652377791415603277251516e4552L, 7.323705248531382981433751104158852636445e4557L, -8.715439846124090647163930834760361817820e4562L, 1.039079696609215651011736087603304766850e4568L, -1.241105689556982425619608247473478857800e4573L, 1.485143079696380339521658550262280772546e4578L, -1.780437412164973637340821168154300094802e4583L, 2.138372099157518882088209435171770222745e4588L, -2.572985071149069551034276570909360759588e4593L, 3.101615379617643734762997559011097203354e4598L, -3.745713657616368229906151946770042703357e4603L, 4.531859496161940719835150033082561700677e4608L, -5.493040495326927998321538336584233566465e4613L, 6.670262730603009306595018122252730741798e4618L, -8.114581584793494903775255213273982440688e4623L, 9.889666561810883044159054730371102725871e4628L, -1.207504541653929734716275932570097623330e4634L, 1.477021377885843688233899471354959308782e4639L, -1.809984912147908767583043524070645821179e4644L, // // 2202-2320: http://www.wolframalpha.com/input/?i=TABLE[N[Bernoulli[i]%2C40]%2C+{i%2C2202%2C2320%2C2}] 2.222043594325228980916360265527780300093e4649L, -2.732869701246338361699515268224049951411e4654L, 3.367233945421922463553518272642397177145e4659L, -4.156377225041273602431272489314020150392e4664L, 5.139764368092890466235162431795350591151e4669L, -6.367329693760865476879589228002216011370e4674L, 7.902356742934106007362514378717026407839e4679L, -9.825176966314431712897976595483070301406e4684L, 1.223792760178593282435724837135946867088e4690L, -1.527068151452750404853140815207477555192e4695L, 1.908935682572268829496101580401263597905e4700L, -2.390593888616966248780378941331847473699e4705L, 2.999171106576893833644521002894489856321e4710L, -3.769440655453736670024798444784356437578e4715L, 4.746047769851891438576002047529258107351e4720L, -5.986405469241447720766576164546767533359e4725L, 7.564466155536872051712519119999711534616e4730L, -9.575641408047918720040356745796976488951e4735L, 1.214322951835035451699619713803395497423e4741L, -1.542682591979864353012093794301924196234e4746L, 1.963334539793192183270983986567556358603e4751L, -2.503148969013901182572118121398034622584e4756L, 3.197076711250102964526567664729089847162e4761L, -4.090653552025822488578293526174572934858e4766L, 5.243302769651520536759521264615159906699e4771L, -6.732697170903775309261288127044088674182e4776L, 8.660529543801770516930589210020128142543e4781L, -1.116015823611149634592870112730519454113e4787L, 1.440675306432920129218036927923030695520e4792L, -1.863078034853256227415397798026969938881e4797L, 2.413595413458810442409656314019115041699e4802L, -3.132317029597258599678590012779717945144e4807L, 4.072246763371584312534474102756137619716e4812L, -5.303577511521827157146305369181950467569e4817L, 6.919417518688636032335131253584331645491e4822L, -9.043473312934241153732087612484569398979e4827L, 1.184037400265044213826044590639924237359e4833L, -1.552956685415800894409743993367334099777e4838L, 2.040404893052952221581694807126473204625e4843L, -2.685565763841580219033402331219206776210e4848L, 3.540927057361929050327811875290025248120e4853L, -4.676912607538885419407656762767991163574e4858L, 6.188165903566760647569323704623433330229e4863L, -8.202087471895029964699042637255411806373e4868L, 1.089045274355389654614196651761310970580e4874L, -1.448524684976553869119447042300206226148e4879L, 1.930028100376784839502387280956424581974e4884L, -2.576074799096023589462128312524664980682e4889L, 3.444369635011990347297134928452972402038e4894L, -4.613354441299253694113609154769978684993e4899L, 6.189834306866879018555349507257537840922e4904L, -8.319470760665157534580593571258276368233e4909L, 1.120124240070996761986102680587384813245e4915L, -1.510740451399746828351090108638980398124e4920L, 2.041108231091323198877509959371257503819e4925L, -2.762447751447012472733302936575873838539e4930L, #endif }}; return bernoulli_data[n]; } template inline T unchecked_bernoulli_imp(std::size_t n, const mpl::int_<4>& ) { // // Special case added for multiprecision types that have no conversion from long long, // there are very few such types, but mpfr_class is one. // static const boost::array::value> numerators = {{ boost::int32_t( +1LL), boost::int32_t( +1LL), boost::int32_t( -1LL), boost::int32_t( +1LL), boost::int32_t( -1LL), boost::int32_t( +5LL), boost::int32_t( -691LL), boost::int32_t( +7LL), boost::int32_t( -3617LL), boost::int32_t( +43867LL), boost::int32_t( -174611LL), boost::int32_t( +854513LL), }}; static const boost::array::value> denominators = {{ boost::int32_t( 1LL), boost::int32_t( 6LL), boost::int32_t( 30LL), boost::int32_t( 42LL), boost::int32_t( 30LL), boost::int32_t( 66LL), boost::int32_t( 2730LL), boost::int32_t( 6LL), boost::int32_t( 510LL), boost::int32_t( 798LL), boost::int32_t( 330LL), boost::int32_t( 138LL), }}; return T(numerators[n]) / T(denominators[n]); } } // namespace detail template inline T unchecked_bernoulli_b2n(const std::size_t n) { typedef mpl::int_::value> tag_type; return detail::unchecked_bernoulli_imp(n, tag_type()); } }} // namespaces #endif // BOOST_MATH_UNCHECKED_BERNOULLI_HPP mlpack-2.2.5/src/mlpack/core/boost_backport/math_fwd.hpp000066400000000000000000002030021315013601400232610ustar00rootroot00000000000000// math_fwd.hpp // TODO revise completely for new distribution classes. // Copyright Paul A. Bristow 2006. // Copyright John Maddock 2006. // Use, modification and distribution are subject to the // Boost Software License, Version 1.0. // (See accompanying file LICENSE_1_0.txt // or copy at http://www.boost.org/LICENSE_1_0.txt) // Omnibus list of forward declarations of math special functions. // IT = Integer type. // RT = Real type (built-in floating-point types, float, double, long double) & User Defined Types // AT = Integer or Real type #ifndef BOOST_MATH_SPECIAL_MATH_FWD_HPP #define BOOST_MATH_SPECIAL_MATH_FWD_HPP #ifdef _MSC_VER #pragma once #endif #include "policy.hpp" #include #include // for argument promotion. #include #include #include #define BOOST_NO_MACRO_EXPAND /**/ namespace boost { namespace math { // Math functions (in roughly alphabetic order). // Beta functions. template typename tools::promote_args::type beta(RT1 a, RT2 b); // Beta function (2 arguments). template typename tools::promote_args::type beta(RT1 a, RT2 b, A x); // Beta function (3 arguments). template typename tools::promote_args::type beta(RT1 a, RT2 b, RT3 x, const Policy& pol); // Beta function (3 arguments). template typename tools::promote_args::type betac(RT1 a, RT2 b, RT3 x); template typename tools::promote_args::type betac(RT1 a, RT2 b, RT3 x, const Policy& pol); template typename tools::promote_args::type ibeta(RT1 a, RT2 b, RT3 x); // Incomplete beta function. template typename tools::promote_args::type ibeta(RT1 a, RT2 b, RT3 x, const Policy& pol); // Incomplete beta function. template typename tools::promote_args::type ibetac(RT1 a, RT2 b, RT3 x); // Incomplete beta complement function. template typename tools::promote_args::type ibetac(RT1 a, RT2 b, RT3 x, const Policy& pol); // Incomplete beta complement function. template typename tools::promote_args::type ibeta_inv(T1 a, T2 b, T3 p, T4* py); template typename tools::promote_args::type ibeta_inv(T1 a, T2 b, T3 p, T4* py, const Policy& pol); template typename tools::promote_args::type ibeta_inv(RT1 a, RT2 b, RT3 p); // Incomplete beta inverse function. template typename tools::promote_args::type ibeta_inv(RT1 a, RT2 b, RT3 p, const Policy&); // Incomplete beta inverse function. template typename tools::promote_args::type ibeta_inva(RT1 a, RT2 b, RT3 p); // Incomplete beta inverse function. template typename tools::promote_args::type ibeta_inva(RT1 a, RT2 b, RT3 p, const Policy&); // Incomplete beta inverse function. template typename tools::promote_args::type ibeta_invb(RT1 a, RT2 b, RT3 p); // Incomplete beta inverse function. template typename tools::promote_args::type ibeta_invb(RT1 a, RT2 b, RT3 p, const Policy&); // Incomplete beta inverse function. template typename tools::promote_args::type ibetac_inv(T1 a, T2 b, T3 q, T4* py); template typename tools::promote_args::type ibetac_inv(T1 a, T2 b, T3 q, T4* py, const Policy& pol); template typename tools::promote_args::type ibetac_inv(RT1 a, RT2 b, RT3 q); // Incomplete beta complement inverse function. template typename tools::promote_args::type ibetac_inv(RT1 a, RT2 b, RT3 q, const Policy&); // Incomplete beta complement inverse function. template typename tools::promote_args::type ibetac_inva(RT1 a, RT2 b, RT3 q); // Incomplete beta complement inverse function. template typename tools::promote_args::type ibetac_inva(RT1 a, RT2 b, RT3 q, const Policy&); // Incomplete beta complement inverse function. template typename tools::promote_args::type ibetac_invb(RT1 a, RT2 b, RT3 q); // Incomplete beta complement inverse function. template typename tools::promote_args::type ibetac_invb(RT1 a, RT2 b, RT3 q, const Policy&); // Incomplete beta complement inverse function. template typename tools::promote_args::type ibeta_derivative(RT1 a, RT2 b, RT3 x); // derivative of incomplete beta template typename tools::promote_args::type ibeta_derivative(RT1 a, RT2 b, RT3 x, const Policy& pol); // derivative of incomplete beta // Binomial: template T binomial_coefficient(unsigned n, unsigned k, const Policy& pol); template T binomial_coefficient(unsigned n, unsigned k); // erf & erfc error functions. template // Error function. typename tools::promote_args::type erf(RT z); template // Error function. typename tools::promote_args::type erf(RT z, const Policy&); template // Error function complement. typename tools::promote_args::type erfc(RT z); template // Error function complement. typename tools::promote_args::type erfc(RT z, const Policy&); template // Error function inverse. typename tools::promote_args::type erf_inv(RT z); template // Error function inverse. typename tools::promote_args::type erf_inv(RT z, const Policy& pol); template // Error function complement inverse. typename tools::promote_args::type erfc_inv(RT z); template // Error function complement inverse. typename tools::promote_args::type erfc_inv(RT z, const Policy& pol); // Polynomials: template typename tools::promote_args::type legendre_next(unsigned l, T1 x, T2 Pl, T3 Plm1); template typename tools::promote_args::type legendre_p(int l, T x); #if !BOOST_WORKAROUND(BOOST_MSVC, <= 1310) template typename boost::enable_if_c::value, typename tools::promote_args::type>::type legendre_p(int l, T x, const Policy& pol); #endif template typename tools::promote_args::type legendre_q(unsigned l, T x); #if !BOOST_WORKAROUND(BOOST_MSVC, <= 1310) template typename boost::enable_if_c::value, typename tools::promote_args::type>::type legendre_q(unsigned l, T x, const Policy& pol); #endif template typename tools::promote_args::type legendre_next(unsigned l, unsigned m, T1 x, T2 Pl, T3 Plm1); template typename tools::promote_args::type legendre_p(int l, int m, T x); template typename tools::promote_args::type legendre_p(int l, int m, T x, const Policy& pol); template typename tools::promote_args::type laguerre_next(unsigned n, T1 x, T2 Ln, T3 Lnm1); template typename tools::promote_args::type laguerre_next(unsigned n, unsigned l, T1 x, T2 Pl, T3 Plm1); template typename tools::promote_args::type laguerre(unsigned n, T x); template typename tools::promote_args::type laguerre(unsigned n, unsigned m, T x, const Policy& pol); template struct laguerre_result { typedef typename mpl::if_< policies::is_policy, typename tools::promote_args::type, typename tools::promote_args::type >::type type; }; template typename laguerre_result::type laguerre(unsigned n, T1 m, T2 x); template typename tools::promote_args::type hermite(unsigned n, T x); template typename tools::promote_args::type hermite(unsigned n, T x, const Policy& pol); template typename tools::promote_args::type hermite_next(unsigned n, T1 x, T2 Hn, T3 Hnm1); template std::complex::type> spherical_harmonic(unsigned n, int m, T1 theta, T2 phi); template std::complex::type> spherical_harmonic(unsigned n, int m, T1 theta, T2 phi, const Policy& pol); template typename tools::promote_args::type spherical_harmonic_r(unsigned n, int m, T1 theta, T2 phi); template typename tools::promote_args::type spherical_harmonic_r(unsigned n, int m, T1 theta, T2 phi, const Policy& pol); template typename tools::promote_args::type spherical_harmonic_i(unsigned n, int m, T1 theta, T2 phi); template typename tools::promote_args::type spherical_harmonic_i(unsigned n, int m, T1 theta, T2 phi, const Policy& pol); // Elliptic integrals: template typename tools::promote_args::type ellint_rf(T1 x, T2 y, T3 z); template typename tools::promote_args::type ellint_rf(T1 x, T2 y, T3 z, const Policy& pol); template typename tools::promote_args::type ellint_rd(T1 x, T2 y, T3 z); template typename tools::promote_args::type ellint_rd(T1 x, T2 y, T3 z, const Policy& pol); template typename tools::promote_args::type ellint_rc(T1 x, T2 y); template typename tools::promote_args::type ellint_rc(T1 x, T2 y, const Policy& pol); template typename tools::promote_args::type ellint_rj(T1 x, T2 y, T3 z, T4 p); template typename tools::promote_args::type ellint_rj(T1 x, T2 y, T3 z, T4 p, const Policy& pol); template typename tools::promote_args::type ellint_rg(T1 x, T2 y, T3 z); template typename tools::promote_args::type ellint_rg(T1 x, T2 y, T3 z, const Policy& pol); template typename tools::promote_args::type ellint_2(T k); template typename tools::promote_args::type ellint_2(T1 k, T2 phi); template typename tools::promote_args::type ellint_2(T1 k, T2 phi, const Policy& pol); template typename tools::promote_args::type ellint_1(T k); template typename tools::promote_args::type ellint_1(T1 k, T2 phi); template typename tools::promote_args::type ellint_1(T1 k, T2 phi, const Policy& pol); template typename tools::promote_args::type ellint_d(T k); template typename tools::promote_args::type ellint_d(T1 k, T2 phi); template typename tools::promote_args::type ellint_d(T1 k, T2 phi, const Policy& pol); template typename tools::promote_args::type jacobi_zeta(T1 k, T2 phi); template typename tools::promote_args::type jacobi_zeta(T1 k, T2 phi, const Policy& pol); template typename tools::promote_args::type heuman_lambda(T1 k, T2 phi); template typename tools::promote_args::type heuman_lambda(T1 k, T2 phi, const Policy& pol); namespace detail{ template struct ellint_3_result { typedef typename mpl::if_< policies::is_policy, typename tools::promote_args::type, typename tools::promote_args::type >::type type; }; } // namespace detail template typename detail::ellint_3_result::type ellint_3(T1 k, T2 v, T3 phi); template typename tools::promote_args::type ellint_3(T1 k, T2 v, T3 phi, const Policy& pol); template typename tools::promote_args::type ellint_3(T1 k, T2 v); // Factorial functions. // Note: not for integral types, at present. template struct max_factorial; template RT factorial(unsigned int); template RT factorial(unsigned int, const Policy& pol); template RT unchecked_factorial(unsigned int BOOST_MATH_APPEND_EXPLICIT_TEMPLATE_TYPE(RT)); template RT double_factorial(unsigned i); template RT double_factorial(unsigned i, const Policy& pol); template typename tools::promote_args::type falling_factorial(RT x, unsigned n); template typename tools::promote_args::type falling_factorial(RT x, unsigned n, const Policy& pol); template typename tools::promote_args::type rising_factorial(RT x, int n); template typename tools::promote_args::type rising_factorial(RT x, int n, const Policy& pol); // Gamma functions. template typename tools::promote_args::type tgamma(RT z); template typename tools::promote_args::type tgamma1pm1(RT z); template typename tools::promote_args::type tgamma1pm1(RT z, const Policy& pol); template typename tools::promote_args::type tgamma(RT1 a, RT2 z); template typename tools::promote_args::type tgamma(RT1 a, RT2 z, const Policy& pol); template typename tools::promote_args::type lgamma(RT z, int* sign); template typename tools::promote_args::type lgamma(RT z, int* sign, const Policy& pol); template typename tools::promote_args::type lgamma(RT x); template typename tools::promote_args::type lgamma(RT x, const Policy& pol); template typename tools::promote_args::type tgamma_lower(RT1 a, RT2 z); template typename tools::promote_args::type tgamma_lower(RT1 a, RT2 z, const Policy&); template typename tools::promote_args::type gamma_q(RT1 a, RT2 z); template typename tools::promote_args::type gamma_q(RT1 a, RT2 z, const Policy&); template typename tools::promote_args::type gamma_p(RT1 a, RT2 z); template typename tools::promote_args::type gamma_p(RT1 a, RT2 z, const Policy&); template typename tools::promote_args::type tgamma_delta_ratio(T1 z, T2 delta); template typename tools::promote_args::type tgamma_delta_ratio(T1 z, T2 delta, const Policy&); template typename tools::promote_args::type tgamma_ratio(T1 a, T2 b); template typename tools::promote_args::type tgamma_ratio(T1 a, T2 b, const Policy&); template typename tools::promote_args::type gamma_p_derivative(T1 a, T2 x); template typename tools::promote_args::type gamma_p_derivative(T1 a, T2 x, const Policy&); // gamma inverse. template typename tools::promote_args::type gamma_p_inv(T1 a, T2 p); template typename tools::promote_args::type gamma_p_inva(T1 a, T2 p, const Policy&); template typename tools::promote_args::type gamma_p_inva(T1 a, T2 p); template typename tools::promote_args::type gamma_p_inv(T1 a, T2 p, const Policy&); template typename tools::promote_args::type gamma_q_inv(T1 a, T2 q); template typename tools::promote_args::type gamma_q_inv(T1 a, T2 q, const Policy&); template typename tools::promote_args::type gamma_q_inva(T1 a, T2 q); template typename tools::promote_args::type gamma_q_inva(T1 a, T2 q, const Policy&); // digamma: template typename tools::promote_args::type digamma(T x); template typename tools::promote_args::type digamma(T x, const Policy&); // trigamma: template typename tools::promote_args::type trigamma(T x); template typename tools::promote_args::type trigamma(T x, const Policy&); // polygamma: template typename tools::promote_args::type polygamma(int n, T x); template typename tools::promote_args::type polygamma(int n, T x, const Policy&); // Hypotenuse function sqrt(x ^ 2 + y ^ 2). template typename tools::promote_args::type hypot(T1 x, T2 y); template typename tools::promote_args::type hypot(T1 x, T2 y, const Policy&); // cbrt - cube root. template typename tools::promote_args::type cbrt(RT z); template typename tools::promote_args::type cbrt(RT z, const Policy&); // log1p is log(x + 1) template typename tools::promote_args::type log1p(T); template typename tools::promote_args::type log1p(T, const Policy&); // log1pmx is log(x + 1) - x template typename tools::promote_args::type log1pmx(T); template typename tools::promote_args::type log1pmx(T, const Policy&); // Exp (x) minus 1 functions. template typename tools::promote_args::type expm1(T); template typename tools::promote_args::type expm1(T, const Policy&); // Power - 1 template typename tools::promote_args::type powm1(const T1 a, const T2 z); template typename tools::promote_args::type powm1(const T1 a, const T2 z, const Policy&); // sqrt(1+x) - 1 template typename tools::promote_args::type sqrt1pm1(const T& val); template typename tools::promote_args::type sqrt1pm1(const T& val, const Policy&); // sinus cardinals: template typename tools::promote_args::type sinc_pi(T x); template typename tools::promote_args::type sinc_pi(T x, const Policy&); template typename tools::promote_args::type sinhc_pi(T x); template typename tools::promote_args::type sinhc_pi(T x, const Policy&); // inverse hyperbolics: template typename tools::promote_args::type asinh(T x); template typename tools::promote_args::type asinh(T x, const Policy&); template typename tools::promote_args::type acosh(T x); template typename tools::promote_args::type acosh(T x, const Policy&); template typename tools::promote_args::type atanh(T x); template typename tools::promote_args::type atanh(T x, const Policy&); namespace detail{ typedef mpl::int_<0> bessel_no_int_tag; // No integer optimisation possible. typedef mpl::int_<1> bessel_maybe_int_tag; // Maybe integer optimisation. typedef mpl::int_<2> bessel_int_tag; // Definite integer optimistaion. template struct bessel_traits { typedef typename mpl::if_< is_integral, typename tools::promote_args::type, typename tools::promote_args::type >::type result_type; typedef typename policies::precision::type precision_type; typedef typename mpl::if_< mpl::or_< mpl::less_equal >, mpl::greater > >, bessel_no_int_tag, typename mpl::if_< is_integral, bessel_int_tag, bessel_maybe_int_tag >::type >::type optimisation_tag; }; } // detail // Bessel functions: template typename detail::bessel_traits::result_type cyl_bessel_j(T1 v, T2 x, const Policy& pol); template typename detail::bessel_traits::result_type cyl_bessel_j_prime(T1 v, T2 x, const Policy& pol); template typename detail::bessel_traits >::result_type cyl_bessel_j(T1 v, T2 x); template typename detail::bessel_traits >::result_type cyl_bessel_j_prime(T1 v, T2 x); template typename detail::bessel_traits::result_type sph_bessel(unsigned v, T x, const Policy& pol); template typename detail::bessel_traits::result_type sph_bessel_prime(unsigned v, T x, const Policy& pol); template typename detail::bessel_traits >::result_type sph_bessel(unsigned v, T x); template typename detail::bessel_traits >::result_type sph_bessel_prime(unsigned v, T x); template typename detail::bessel_traits::result_type cyl_bessel_i(T1 v, T2 x, const Policy& pol); template typename detail::bessel_traits::result_type cyl_bessel_i_prime(T1 v, T2 x, const Policy& pol); template typename detail::bessel_traits >::result_type cyl_bessel_i(T1 v, T2 x); template typename detail::bessel_traits >::result_type cyl_bessel_i_prime(T1 v, T2 x); template typename detail::bessel_traits::result_type cyl_bessel_k(T1 v, T2 x, const Policy& pol); template typename detail::bessel_traits::result_type cyl_bessel_k_prime(T1 v, T2 x, const Policy& pol); template typename detail::bessel_traits >::result_type cyl_bessel_k(T1 v, T2 x); template typename detail::bessel_traits >::result_type cyl_bessel_k_prime(T1 v, T2 x); template typename detail::bessel_traits::result_type cyl_neumann(T1 v, T2 x, const Policy& pol); template typename detail::bessel_traits::result_type cyl_neumann_prime(T1 v, T2 x, const Policy& pol); template typename detail::bessel_traits >::result_type cyl_neumann(T1 v, T2 x); template typename detail::bessel_traits >::result_type cyl_neumann_prime(T1 v, T2 x); template typename detail::bessel_traits::result_type sph_neumann(unsigned v, T x, const Policy& pol); template typename detail::bessel_traits::result_type sph_neumann_prime(unsigned v, T x, const Policy& pol); template typename detail::bessel_traits >::result_type sph_neumann(unsigned v, T x); template typename detail::bessel_traits >::result_type sph_neumann_prime(unsigned v, T x); template typename detail::bessel_traits::result_type cyl_bessel_j_zero(T v, int m, const Policy& pol); template typename detail::bessel_traits >::result_type cyl_bessel_j_zero(T v, int m); template OutputIterator cyl_bessel_j_zero(T v, int start_index, unsigned number_of_zeros, OutputIterator out_it); template OutputIterator cyl_bessel_j_zero(T v, int start_index, unsigned number_of_zeros, OutputIterator out_it, const Policy&); template typename detail::bessel_traits::result_type cyl_neumann_zero(T v, int m, const Policy& pol); template typename detail::bessel_traits >::result_type cyl_neumann_zero(T v, int m); template OutputIterator cyl_neumann_zero(T v, int start_index, unsigned number_of_zeros, OutputIterator out_it); template OutputIterator cyl_neumann_zero(T v, int start_index, unsigned number_of_zeros, OutputIterator out_it, const Policy&); template std::complex >::result_type> cyl_hankel_1(T1 v, T2 x); template std::complex::result_type> cyl_hankel_1(T1 v, T2 x, const Policy& pol); template std::complex::result_type> cyl_hankel_2(T1 v, T2 x, const Policy& pol); template std::complex >::result_type> cyl_hankel_2(T1 v, T2 x); template std::complex::result_type> sph_hankel_1(T1 v, T2 x, const Policy& pol); template std::complex >::result_type> sph_hankel_1(T1 v, T2 x); template std::complex::result_type> sph_hankel_2(T1 v, T2 x, const Policy& pol); template std::complex >::result_type> sph_hankel_2(T1 v, T2 x); template typename tools::promote_args::type airy_ai(T x, const Policy&); template typename tools::promote_args::type airy_ai(T x); template typename tools::promote_args::type airy_bi(T x, const Policy&); template typename tools::promote_args::type airy_bi(T x); template typename tools::promote_args::type airy_ai_prime(T x, const Policy&); template typename tools::promote_args::type airy_ai_prime(T x); template typename tools::promote_args::type airy_bi_prime(T x, const Policy&); template typename tools::promote_args::type airy_bi_prime(T x); template T airy_ai_zero(int m); template T airy_ai_zero(int m, const Policy&); template OutputIterator airy_ai_zero( int start_index, unsigned number_of_zeros, OutputIterator out_it); template OutputIterator airy_ai_zero( int start_index, unsigned number_of_zeros, OutputIterator out_it, const Policy&); template T airy_bi_zero(int m); template T airy_bi_zero(int m, const Policy&); template OutputIterator airy_bi_zero( int start_index, unsigned number_of_zeros, OutputIterator out_it); template OutputIterator airy_bi_zero( int start_index, unsigned number_of_zeros, OutputIterator out_it, const Policy&); template typename tools::promote_args::type sin_pi(T x, const Policy&); template typename tools::promote_args::type sin_pi(T x); template typename tools::promote_args::type cos_pi(T x, const Policy&); template typename tools::promote_args::type cos_pi(T x); template int fpclassify BOOST_NO_MACRO_EXPAND(T t); template bool isfinite BOOST_NO_MACRO_EXPAND(T z); template bool isinf BOOST_NO_MACRO_EXPAND(T t); template bool isnan BOOST_NO_MACRO_EXPAND(T t); template bool isnormal BOOST_NO_MACRO_EXPAND(T t); template int signbit BOOST_NO_MACRO_EXPAND(T x); template int sign BOOST_NO_MACRO_EXPAND(const T& z); template typename tools::promote_args_permissive::type copysign BOOST_NO_MACRO_EXPAND(const T& x, const U& y); template typename tools::promote_args_permissive::type changesign BOOST_NO_MACRO_EXPAND(const T& z); // Exponential integrals: namespace detail{ template struct expint_result { typedef typename mpl::if_< policies::is_policy, typename tools::promote_args::type, typename tools::promote_args::type >::type type; }; } // namespace detail template typename tools::promote_args::type expint(unsigned n, T z, const Policy&); template typename detail::expint_result::type expint(T const z, U const u); template typename tools::promote_args::type expint(T z); // Zeta: template typename tools::promote_args::type zeta(T s, const Policy&); // Owen's T function: template typename tools::promote_args::type owens_t(T1 h, T2 a, const Policy& pol); template typename tools::promote_args::type owens_t(T1 h, T2 a); // Jacobi Functions: template typename tools::promote_args::type jacobi_elliptic(T k, U theta, V* pcn, V* pdn, const Policy&); template typename tools::promote_args::type jacobi_elliptic(T k, U theta, V* pcn = 0, V* pdn = 0); template typename tools::promote_args::type jacobi_sn(U k, T theta, const Policy& pol); template typename tools::promote_args::type jacobi_sn(U k, T theta); template typename tools::promote_args::type jacobi_cn(T k, U theta, const Policy& pol); template typename tools::promote_args::type jacobi_cn(T k, U theta); template typename tools::promote_args::type jacobi_dn(T k, U theta, const Policy& pol); template typename tools::promote_args::type jacobi_dn(T k, U theta); template typename tools::promote_args::type jacobi_cd(T k, U theta, const Policy& pol); template typename tools::promote_args::type jacobi_cd(T k, U theta); template typename tools::promote_args::type jacobi_dc(T k, U theta, const Policy& pol); template typename tools::promote_args::type jacobi_dc(T k, U theta); template typename tools::promote_args::type jacobi_ns(T k, U theta, const Policy& pol); template typename tools::promote_args::type jacobi_ns(T k, U theta); template typename tools::promote_args::type jacobi_sd(T k, U theta, const Policy& pol); template typename tools::promote_args::type jacobi_sd(T k, U theta); template typename tools::promote_args::type jacobi_ds(T k, U theta, const Policy& pol); template typename tools::promote_args::type jacobi_ds(T k, U theta); template typename tools::promote_args::type jacobi_nc(T k, U theta, const Policy& pol); template typename tools::promote_args::type jacobi_nc(T k, U theta); template typename tools::promote_args::type jacobi_nd(T k, U theta, const Policy& pol); template typename tools::promote_args::type jacobi_nd(T k, U theta); template typename tools::promote_args::type jacobi_sc(T k, U theta, const Policy& pol); template typename tools::promote_args::type jacobi_sc(T k, U theta); template typename tools::promote_args::type jacobi_cs(T k, U theta, const Policy& pol); template typename tools::promote_args::type jacobi_cs(T k, U theta); template typename tools::promote_args::type zeta(T s); // pow: template typename tools::promote_args::type pow(T base, const Policy& policy); template typename tools::promote_args::type pow(T base); // next: template typename tools::promote_args::type nextafter(const T&, const U&, const Policy&); template typename tools::promote_args::type nextafter(const T&, const U&); template typename tools::promote_args::type float_next(const T&, const Policy&); template typename tools::promote_args::type float_next(const T&); template typename tools::promote_args::type float_prior(const T&, const Policy&); template typename tools::promote_args::type float_prior(const T&); template typename tools::promote_args::type float_distance(const T&, const U&, const Policy&); template typename tools::promote_args::type float_distance(const T&, const U&); template typename tools::promote_args::type float_advance(T val, int distance, const Policy& pol); template typename tools::promote_args::type float_advance(const T& val, int distance); template typename tools::promote_args::type ulp(const T& val, const Policy& pol); template typename tools::promote_args::type ulp(const T& val); template typename tools::promote_args::type relative_difference(const T&, const U&); template typename tools::promote_args::type epsilon_difference(const T&, const U&); /* template T unchecked_bernoulli_b2n(const std::size_t n); template T bernoulli_b2n(const int i, const Policy &pol); template T bernoulli_b2n(const int i); template OutputIterator bernoulli_b2n(const int start_index, const unsigned number_of_bernoullis_b2n, OutputIterator out_it, const Policy& pol); template OutputIterator bernoulli_b2n(const int start_index, const unsigned number_of_bernoullis_b2n, OutputIterator out_it); template T tangent_t2n(const int i, const Policy &pol); template T tangent_t2n(const int i); template OutputIterator tangent_t2n(const int start_index, const unsigned number_of_bernoullis_b2n, OutputIterator out_it, const Policy& pol); template OutputIterator tangent_t2n(const int start_index, const unsigned number_of_bernoullis_b2n, OutputIterator out_it); */ } // namespace math } // namespace boost #ifdef BOOST_HAS_LONG_LONG #define BOOST_MATH_DETAIL_LL_FUNC(Policy)\ \ template \ inline T modf(const T& v, boost::long_long_type* ipart){ using boost::math::modf; return modf(v, ipart, Policy()); }\ \ template \ inline boost::long_long_type lltrunc(const T& v){ using boost::math::lltrunc; return lltrunc(v, Policy()); }\ \ template \ inline boost::long_long_type llround(const T& v){ using boost::math::llround; return llround(v, Policy()); }\ #else #define BOOST_MATH_DETAIL_LL_FUNC(Policy) #endif #define BOOST_MATH_DECLARE_SPECIAL_FUNCTIONS(Policy)\ \ BOOST_MATH_DETAIL_LL_FUNC(Policy)\ \ template \ inline typename boost::math::tools::promote_args::type \ beta(RT1 a, RT2 b) { return ::boost::math::beta(a, b, Policy()); }\ \ template \ inline typename boost::math::tools::promote_args::type \ beta(RT1 a, RT2 b, A x){ return ::boost::math::beta(a, b, x, Policy()); }\ \ template \ inline typename boost::math::tools::promote_args::type \ betac(RT1 a, RT2 b, RT3 x) { return ::boost::math::betac(a, b, x, Policy()); }\ \ template \ inline typename boost::math::tools::promote_args::type \ ibeta(RT1 a, RT2 b, RT3 x){ return ::boost::math::ibeta(a, b, x, Policy()); }\ \ template \ inline typename boost::math::tools::promote_args::type \ ibetac(RT1 a, RT2 b, RT3 x){ return ::boost::math::ibetac(a, b, x, Policy()); }\ \ template \ inline typename boost::math::tools::promote_args::type \ ibeta_inv(T1 a, T2 b, T3 p, T4* py){ return ::boost::math::ibeta_inv(a, b, p, py, Policy()); }\ \ template \ inline typename boost::math::tools::promote_args::type \ ibeta_inv(RT1 a, RT2 b, RT3 p){ return ::boost::math::ibeta_inv(a, b, p, Policy()); }\ \ template \ inline typename boost::math::tools::promote_args::type \ ibetac_inv(T1 a, T2 b, T3 q, T4* py){ return ::boost::math::ibetac_inv(a, b, q, py, Policy()); }\ \ template \ inline typename boost::math::tools::promote_args::type \ ibeta_inva(RT1 a, RT2 b, RT3 p){ return ::boost::math::ibeta_inva(a, b, p, Policy()); }\ \ template \ inline typename boost::math::tools::promote_args::type \ ibetac_inva(T1 a, T2 b, T3 q){ return ::boost::math::ibetac_inva(a, b, q, Policy()); }\ \ template \ inline typename boost::math::tools::promote_args::type \ ibeta_invb(RT1 a, RT2 b, RT3 p){ return ::boost::math::ibeta_invb(a, b, p, Policy()); }\ \ template \ inline typename boost::math::tools::promote_args::type \ ibetac_invb(T1 a, T2 b, T3 q){ return ::boost::math::ibetac_invb(a, b, q, Policy()); }\ \ template \ inline typename boost::math::tools::promote_args::type \ ibetac_inv(RT1 a, RT2 b, RT3 q){ return ::boost::math::ibetac_inv(a, b, q, Policy()); }\ \ template \ inline typename boost::math::tools::promote_args::type \ ibeta_derivative(RT1 a, RT2 b, RT3 x){ return ::boost::math::ibeta_derivative(a, b, x, Policy()); }\ \ template T binomial_coefficient(unsigned n, unsigned k){ return ::boost::math::binomial_coefficient(n, k, Policy()); }\ \ template \ inline typename boost::math::tools::promote_args::type erf(RT z) { return ::boost::math::erf(z, Policy()); }\ \ template \ inline typename boost::math::tools::promote_args::type erfc(RT z){ return ::boost::math::erfc(z, Policy()); }\ \ template \ inline typename boost::math::tools::promote_args::type erf_inv(RT z) { return ::boost::math::erf_inv(z, Policy()); }\ \ template \ inline typename boost::math::tools::promote_args::type erfc_inv(RT z){ return ::boost::math::erfc_inv(z, Policy()); }\ \ using boost::math::legendre_next;\ \ template \ inline typename boost::math::tools::promote_args::type \ legendre_p(int l, T x){ return ::boost::math::legendre_p(l, x, Policy()); }\ \ template \ inline typename boost::math::tools::promote_args::type \ legendre_q(unsigned l, T x){ return ::boost::math::legendre_q(l, x, Policy()); }\ \ using ::boost::math::legendre_next;\ \ template \ inline typename boost::math::tools::promote_args::type \ legendre_p(int l, int m, T x){ return ::boost::math::legendre_p(l, m, x, Policy()); }\ \ using ::boost::math::laguerre_next;\ \ template \ inline typename boost::math::tools::promote_args::type \ laguerre(unsigned n, T x){ return ::boost::math::laguerre(n, x, Policy()); }\ \ template \ inline typename boost::math::laguerre_result::type \ laguerre(unsigned n, T1 m, T2 x) { return ::boost::math::laguerre(n, m, x, Policy()); }\ \ template \ inline typename boost::math::tools::promote_args::type \ hermite(unsigned n, T x){ return ::boost::math::hermite(n, x, Policy()); }\ \ using boost::math::hermite_next;\ \ template \ inline std::complex::type> \ spherical_harmonic(unsigned n, int m, T1 theta, T2 phi){ return boost::math::spherical_harmonic(n, m, theta, phi, Policy()); }\ \ template \ inline typename boost::math::tools::promote_args::type \ spherical_harmonic_r(unsigned n, int m, T1 theta, T2 phi){ return ::boost::math::spherical_harmonic_r(n, m, theta, phi, Policy()); }\ \ template \ inline typename boost::math::tools::promote_args::type \ spherical_harmonic_i(unsigned n, int m, T1 theta, T2 phi){ return boost::math::spherical_harmonic_i(n, m, theta, phi, Policy()); }\ \ template \ inline typename boost::math::tools::promote_args::type \ spherical_harmonic_i(unsigned n, int m, T1 theta, T2 phi, const Policy& pol);\ \ template \ inline typename boost::math::tools::promote_args::type \ ellint_rf(T1 x, T2 y, T3 z){ return ::boost::math::ellint_rf(x, y, z, Policy()); }\ \ template \ inline typename boost::math::tools::promote_args::type \ ellint_rd(T1 x, T2 y, T3 z){ return ::boost::math::ellint_rd(x, y, z, Policy()); }\ \ template \ inline typename boost::math::tools::promote_args::type \ ellint_rc(T1 x, T2 y){ return ::boost::math::ellint_rc(x, y, Policy()); }\ \ template \ inline typename boost::math::tools::promote_args::type \ ellint_rj(T1 x, T2 y, T3 z, T4 p){ return boost::math::ellint_rj(x, y, z, p, Policy()); }\ \ template \ inline typename boost::math::tools::promote_args::type \ ellint_rg(T1 x, T2 y, T3 z){ return ::boost::math::ellint_rg(x, y, z, Policy()); }\ \ template \ inline typename boost::math::tools::promote_args::type ellint_2(T k){ return boost::math::ellint_2(k, Policy()); }\ \ template \ inline typename boost::math::tools::promote_args::type ellint_2(T1 k, T2 phi){ return boost::math::ellint_2(k, phi, Policy()); }\ \ template \ inline typename boost::math::tools::promote_args::type ellint_d(T k){ return boost::math::ellint_d(k, Policy()); }\ \ template \ inline typename boost::math::tools::promote_args::type ellint_d(T1 k, T2 phi){ return boost::math::ellint_d(k, phi, Policy()); }\ \ template \ inline typename boost::math::tools::promote_args::type jacobi_zeta(T1 k, T2 phi){ return boost::math::jacobi_zeta(k, phi, Policy()); }\ \ template \ inline typename boost::math::tools::promote_args::type heuman_lambda(T1 k, T2 phi){ return boost::math::heuman_lambda(k, phi, Policy()); }\ \ template \ inline typename boost::math::tools::promote_args::type ellint_1(T k){ return boost::math::ellint_1(k, Policy()); }\ \ template \ inline typename boost::math::tools::promote_args::type ellint_1(T1 k, T2 phi){ return boost::math::ellint_1(k, phi, Policy()); }\ \ template \ inline typename boost::math::tools::promote_args::type ellint_3(T1 k, T2 v, T3 phi){ return boost::math::ellint_3(k, v, phi, Policy()); }\ \ template \ inline typename boost::math::tools::promote_args::type ellint_3(T1 k, T2 v){ return boost::math::ellint_3(k, v, Policy()); }\ \ using boost::math::max_factorial;\ template \ inline RT factorial(unsigned int i) { return boost::math::factorial(i, Policy()); }\ using boost::math::unchecked_factorial;\ template \ inline RT double_factorial(unsigned i){ return boost::math::double_factorial(i, Policy()); }\ template \ inline typename boost::math::tools::promote_args::type falling_factorial(RT x, unsigned n){ return boost::math::falling_factorial(x, n, Policy()); }\ template \ inline typename boost::math::tools::promote_args::type rising_factorial(RT x, unsigned n){ return boost::math::rising_factorial(x, n, Policy()); }\ \ template \ inline typename boost::math::tools::promote_args::type tgamma(RT z){ return boost::math::tgamma(z, Policy()); }\ \ template \ inline typename boost::math::tools::promote_args::type tgamma1pm1(RT z){ return boost::math::tgamma1pm1(z, Policy()); }\ \ template \ inline typename boost::math::tools::promote_args::type tgamma(RT1 a, RT2 z){ return boost::math::tgamma(a, z, Policy()); }\ \ template \ inline typename boost::math::tools::promote_args::type lgamma(RT z, int* sign){ return boost::math::lgamma(z, sign, Policy()); }\ \ template \ inline typename boost::math::tools::promote_args::type lgamma(RT x){ return boost::math::lgamma(x, Policy()); }\ \ template \ inline typename boost::math::tools::promote_args::type tgamma_lower(RT1 a, RT2 z){ return boost::math::tgamma_lower(a, z, Policy()); }\ \ template \ inline typename boost::math::tools::promote_args::type gamma_q(RT1 a, RT2 z){ return boost::math::gamma_q(a, z, Policy()); }\ \ template \ inline typename boost::math::tools::promote_args::type gamma_p(RT1 a, RT2 z){ return boost::math::gamma_p(a, z, Policy()); }\ \ template \ inline typename boost::math::tools::promote_args::type tgamma_delta_ratio(T1 z, T2 delta){ return boost::math::tgamma_delta_ratio(z, delta, Policy()); }\ \ template \ inline typename boost::math::tools::promote_args::type tgamma_ratio(T1 a, T2 b) { return boost::math::tgamma_ratio(a, b, Policy()); }\ \ template \ inline typename boost::math::tools::promote_args::type gamma_p_derivative(T1 a, T2 x){ return boost::math::gamma_p_derivative(a, x, Policy()); }\ \ template \ inline typename boost::math::tools::promote_args::type gamma_p_inv(T1 a, T2 p){ return boost::math::gamma_p_inv(a, p, Policy()); }\ \ template \ inline typename boost::math::tools::promote_args::type gamma_p_inva(T1 a, T2 p){ return boost::math::gamma_p_inva(a, p, Policy()); }\ \ template \ inline typename boost::math::tools::promote_args::type gamma_q_inv(T1 a, T2 q){ return boost::math::gamma_q_inv(a, q, Policy()); }\ \ template \ inline typename boost::math::tools::promote_args::type gamma_q_inva(T1 a, T2 q){ return boost::math::gamma_q_inva(a, q, Policy()); }\ \ template \ inline typename boost::math::tools::promote_args::type digamma(T x){ return boost::math::digamma(x, Policy()); }\ \ template \ inline typename boost::math::tools::promote_args::type trigamma(T x){ return boost::math::trigamma(x, Policy()); }\ \ template \ inline typename boost::math::tools::promote_args::type polygamma(int n, T x){ return boost::math::polygamma(n, x, Policy()); }\ \ template \ inline typename boost::math::tools::promote_args::type \ hypot(T1 x, T2 y){ return boost::math::hypot(x, y, Policy()); }\ \ template \ inline typename boost::math::tools::promote_args::type cbrt(RT z){ return boost::math::cbrt(z, Policy()); }\ \ template \ inline typename boost::math::tools::promote_args::type log1p(T x){ return boost::math::log1p(x, Policy()); }\ \ template \ inline typename boost::math::tools::promote_args::type log1pmx(T x){ return boost::math::log1pmx(x, Policy()); }\ \ template \ inline typename boost::math::tools::promote_args::type expm1(T x){ return boost::math::expm1(x, Policy()); }\ \ template \ inline typename boost::math::tools::promote_args::type \ powm1(const T1 a, const T2 z){ return boost::math::powm1(a, z, Policy()); }\ \ template \ inline typename boost::math::tools::promote_args::type sqrt1pm1(const T& val){ return boost::math::sqrt1pm1(val, Policy()); }\ \ template \ inline typename boost::math::tools::promote_args::type sinc_pi(T x){ return boost::math::sinc_pi(x, Policy()); }\ \ template \ inline typename boost::math::tools::promote_args::type sinhc_pi(T x){ return boost::math::sinhc_pi(x, Policy()); }\ \ template\ inline typename boost::math::tools::promote_args::type asinh(const T x){ return boost::math::asinh(x, Policy()); }\ \ template\ inline typename boost::math::tools::promote_args::type acosh(const T x){ return boost::math::acosh(x, Policy()); }\ \ template\ inline typename boost::math::tools::promote_args::type atanh(const T x){ return boost::math::atanh(x, Policy()); }\ \ template \ inline typename boost::math::detail::bessel_traits::result_type cyl_bessel_j(T1 v, T2 x)\ { return boost::math::cyl_bessel_j(v, x, Policy()); }\ \ template \ inline typename boost::math::detail::bessel_traits::result_type cyl_bessel_j_prime(T1 v, T2 x)\ { return boost::math::cyl_bessel_j_prime(v, x, Policy()); }\ \ template \ inline typename boost::math::detail::bessel_traits::result_type sph_bessel(unsigned v, T x)\ { return boost::math::sph_bessel(v, x, Policy()); }\ \ template \ inline typename boost::math::detail::bessel_traits::result_type sph_bessel_prime(unsigned v, T x)\ { return boost::math::sph_bessel_prime(v, x, Policy()); }\ \ template \ inline typename boost::math::detail::bessel_traits::result_type \ cyl_bessel_i(T1 v, T2 x) { return boost::math::cyl_bessel_i(v, x, Policy()); }\ \ template \ inline typename boost::math::detail::bessel_traits::result_type \ cyl_bessel_i_prime(T1 v, T2 x) { return boost::math::cyl_bessel_i_prime(v, x, Policy()); }\ \ template \ inline typename boost::math::detail::bessel_traits::result_type \ cyl_bessel_k(T1 v, T2 x) { return boost::math::cyl_bessel_k(v, x, Policy()); }\ \ template \ inline typename boost::math::detail::bessel_traits::result_type \ cyl_bessel_k_prime(T1 v, T2 x) { return boost::math::cyl_bessel_k_prime(v, x, Policy()); }\ \ template \ inline typename boost::math::detail::bessel_traits::result_type \ cyl_neumann(T1 v, T2 x){ return boost::math::cyl_neumann(v, x, Policy()); }\ \ template \ inline typename boost::math::detail::bessel_traits::result_type \ cyl_neumann_prime(T1 v, T2 x){ return boost::math::cyl_neumann_prime(v, x, Policy()); }\ \ template \ inline typename boost::math::detail::bessel_traits::result_type \ sph_neumann(unsigned v, T x){ return boost::math::sph_neumann(v, x, Policy()); }\ \ template \ inline typename boost::math::detail::bessel_traits::result_type \ sph_neumann_prime(unsigned v, T x){ return boost::math::sph_neumann_prime(v, x, Policy()); }\ \ template \ inline typename boost::math::detail::bessel_traits::result_type cyl_bessel_j_zero(T v, int m)\ { return boost::math::cyl_bessel_j_zero(v, m, Policy()); }\ \ template \ inline void cyl_bessel_j_zero(T v,\ int start_index,\ unsigned number_of_zeros,\ OutputIterator out_it)\ { boost::math::cyl_bessel_j_zero(v, start_index, number_of_zeros, out_it, Policy()); }\ \ template \ inline typename boost::math::detail::bessel_traits::result_type cyl_neumann_zero(T v, int m)\ { return boost::math::cyl_neumann_zero(v, m, Policy()); }\ \ template \ inline void cyl_neumann_zero(T v,\ int start_index,\ unsigned number_of_zeros,\ OutputIterator out_it)\ { boost::math::cyl_neumann_zero(v, start_index, number_of_zeros, out_it, Policy()); }\ \ template \ inline typename boost::math::tools::promote_args::type sin_pi(T x){ return boost::math::sin_pi(x); }\ \ template \ inline typename boost::math::tools::promote_args::type cos_pi(T x){ return boost::math::cos_pi(x); }\ \ using boost::math::fpclassify;\ using boost::math::isfinite;\ using boost::math::isinf;\ using boost::math::isnan;\ using boost::math::isnormal;\ using boost::math::signbit;\ using boost::math::sign;\ using boost::math::copysign;\ using boost::math::changesign;\ \ template \ inline typename boost::math::tools::promote_args::type expint(T const& z, U const& u)\ { return boost::math::expint(z, u, Policy()); }\ \ template \ inline typename boost::math::tools::promote_args::type expint(T z){ return boost::math::expint(z, Policy()); }\ \ template \ inline typename boost::math::tools::promote_args::type zeta(T s){ return boost::math::zeta(s, Policy()); }\ \ template \ inline T round(const T& v){ using boost::math::round; return round(v, Policy()); }\ \ template \ inline int iround(const T& v){ using boost::math::iround; return iround(v, Policy()); }\ \ template \ inline long lround(const T& v){ using boost::math::lround; return lround(v, Policy()); }\ \ template \ inline T trunc(const T& v){ using boost::math::trunc; return trunc(v, Policy()); }\ \ template \ inline int itrunc(const T& v){ using boost::math::itrunc; return itrunc(v, Policy()); }\ \ template \ inline long ltrunc(const T& v){ using boost::math::ltrunc; return ltrunc(v, Policy()); }\ \ template \ inline T modf(const T& v, T* ipart){ using boost::math::modf; return modf(v, ipart, Policy()); }\ \ template \ inline T modf(const T& v, int* ipart){ using boost::math::modf; return modf(v, ipart, Policy()); }\ \ template \ inline T modf(const T& v, long* ipart){ using boost::math::modf; return modf(v, ipart, Policy()); }\ \ template \ inline typename boost::math::tools::promote_args::type pow(T v){ return boost::math::pow(v, Policy()); }\ \ template T nextafter(const T& a, const T& b){ return boost::math::nextafter(a, b, Policy()); }\ template T float_next(const T& a){ return boost::math::float_next(a, Policy()); }\ template T float_prior(const T& a){ return boost::math::float_prior(a, Policy()); }\ template T float_distance(const T& a, const T& b){ return boost::math::float_distance(a, b, Policy()); }\ template T ulp(const T& a){ return boost::math::ulp(a, Policy()); }\ \ template \ inline typename boost::math::tools::promote_args::type owens_t(RT1 a, RT2 z){ return boost::math::owens_t(a, z, Policy()); }\ \ template \ inline std::complex::result_type> cyl_hankel_1(T1 v, T2 x)\ { return boost::math::cyl_hankel_1(v, x, Policy()); }\ \ template \ inline std::complex::result_type> cyl_hankel_2(T1 v, T2 x)\ { return boost::math::cyl_hankel_2(v, x, Policy()); }\ \ template \ inline std::complex::result_type> sph_hankel_1(T1 v, T2 x)\ { return boost::math::sph_hankel_1(v, x, Policy()); }\ \ template \ inline std::complex::result_type> sph_hankel_2(T1 v, T2 x)\ { return boost::math::sph_hankel_2(v, x, Policy()); }\ \ template \ inline typename boost::math::tools::promote_args::type jacobi_elliptic(T k, T theta, T* pcn, T* pdn)\ { return boost::math::jacobi_elliptic(k, theta, pcn, pdn, Policy()); }\ \ template \ inline typename boost::math::tools::promote_args::type jacobi_sn(U k, T theta)\ { return boost::math::jacobi_sn(k, theta, Policy()); }\ \ template \ inline typename boost::math::tools::promote_args::type jacobi_cn(T k, U theta)\ { return boost::math::jacobi_cn(k, theta, Policy()); }\ \ template \ inline typename boost::math::tools::promote_args::type jacobi_dn(T k, U theta)\ { return boost::math::jacobi_dn(k, theta, Policy()); }\ \ template \ inline typename boost::math::tools::promote_args::type jacobi_cd(T k, U theta)\ { return boost::math::jacobi_cd(k, theta, Policy()); }\ \ template \ inline typename boost::math::tools::promote_args::type jacobi_dc(T k, U theta)\ { return boost::math::jacobi_dc(k, theta, Policy()); }\ \ template \ inline typename boost::math::tools::promote_args::type jacobi_ns(T k, U theta)\ { return boost::math::jacobi_ns(k, theta, Policy()); }\ \ template \ inline typename boost::math::tools::promote_args::type jacobi_sd(T k, U theta)\ { return boost::math::jacobi_sd(k, theta, Policy()); }\ \ template \ inline typename boost::math::tools::promote_args::type jacobi_ds(T k, U theta)\ { return boost::math::jacobi_ds(k, theta, Policy()); }\ \ template \ inline typename boost::math::tools::promote_args::type jacobi_nc(T k, U theta)\ { return boost::math::jacobi_nc(k, theta, Policy()); }\ \ template \ inline typename boost::math::tools::promote_args::type jacobi_nd(T k, U theta)\ { return boost::math::jacobi_nd(k, theta, Policy()); }\ \ template \ inline typename boost::math::tools::promote_args::type jacobi_sc(T k, U theta)\ { return boost::math::jacobi_sc(k, theta, Policy()); }\ \ template \ inline typename boost::math::tools::promote_args::type jacobi_cs(T k, U theta)\ { return boost::math::jacobi_cs(k, theta, Policy()); }\ \ template \ inline typename boost::math::tools::promote_args::type airy_ai(T x)\ { return boost::math::airy_ai(x, Policy()); }\ \ template \ inline typename boost::math::tools::promote_args::type airy_bi(T x)\ { return boost::math::airy_bi(x, Policy()); }\ \ template \ inline typename boost::math::tools::promote_args::type airy_ai_prime(T x)\ { return boost::math::airy_ai_prime(x, Policy()); }\ \ template \ inline typename boost::math::tools::promote_args::type airy_bi_prime(T x)\ { return boost::math::airy_bi_prime(x, Policy()); }\ \ template \ inline T airy_ai_zero(int m)\ { return boost::math::airy_ai_zero(m, Policy()); }\ template \ OutputIterator airy_ai_zero(int start_index, unsigned number_of_zeros, OutputIterator out_it)\ { return boost::math::airy_ai_zero(start_index, number_of_zeros, out_it, Policy()); }\ \ template \ inline T airy_bi_zero(int m)\ { return boost::math::airy_bi_zero(m, Policy()); }\ template \ OutputIterator airy_bi_zero(int start_index, unsigned number_of_zeros, OutputIterator out_it)\ { return boost::math::airy_bi_zero(start_index, number_of_zeros, out_it, Policy()); }\ \ template \ T bernoulli_b2n(const int i)\ { return boost::math::bernoulli_b2n(i, Policy()); }\ template \ OutputIterator bernoulli_b2n(int start_index, unsigned number_of_bernoullis_b2n, OutputIterator out_it)\ { return boost::math::bernoulli_b2n(start_index, number_of_bernoullis_b2n, out_it, Policy()); }\ \ template \ T tangent_t2n(const int i)\ { return boost::math::tangent_t2n(i, Policy()); }\ template \ OutputIterator tangent_t2n(int start_index, unsigned number_of_bernoullis_b2n, OutputIterator out_it)\ { return boost::math::tangent_t2n(start_index, number_of_bernoullis_b2n, out_it, Policy()); }\ \ #endif // BOOST_MATH_SPECIAL_MATH_FWD_HPP mlpack-2.2.5/src/mlpack/core/boost_backport/policy.hpp000066400000000000000000001130521315013601400227740ustar00rootroot00000000000000// Copyright John Maddock 2007. // Use, modification and distribution are subject to the // Boost Software License, Version 1.0. (See accompanying file // LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) #ifndef BOOST_MATH_POLICY_HPP #define BOOST_MATH_POLICY_HPP #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include //#include //#include #include // Sadly we do need the .h versions of these to be sure of getting // FLT_MANT_DIG etc. #include #include #include #include namespace boost{ namespace math{ namespace tools{ template BOOST_MATH_CONSTEXPR int digits(BOOST_MATH_EXPLICIT_TEMPLATE_TYPE(T)) BOOST_NOEXCEPT; template BOOST_MATH_CONSTEXPR T epsilon(BOOST_MATH_EXPLICIT_TEMPLATE_TYPE(T)) BOOST_MATH_NOEXCEPT(T); } // Namespace tools namespace policies{ // // Define macros for our default policies, if they're not defined already: // // Special cases for exceptions disabled first: // #ifdef BOOST_NO_EXCEPTIONS # ifndef BOOST_MATH_DOMAIN_ERROR_POLICY # define BOOST_MATH_DOMAIN_ERROR_POLICY errno_on_error # endif # ifndef BOOST_MATH_POLE_ERROR_POLICY # define BOOST_MATH_POLE_ERROR_POLICY errno_on_error # endif # ifndef BOOST_MATH_OVERFLOW_ERROR_POLICY # define BOOST_MATH_OVERFLOW_ERROR_POLICY errno_on_error # endif # ifndef BOOST_MATH_EVALUATION_ERROR_POLICY # define BOOST_MATH_EVALUATION_ERROR_POLICY errno_on_error # endif # ifndef BOOST_MATH_ROUNDING_ERROR_POLICY # define BOOST_MATH_ROUNDING_ERROR_POLICY errno_on_error # endif #endif // // Then the regular cases: // #ifndef BOOST_MATH_DOMAIN_ERROR_POLICY #define BOOST_MATH_DOMAIN_ERROR_POLICY throw_on_error #endif #ifndef BOOST_MATH_POLE_ERROR_POLICY #define BOOST_MATH_POLE_ERROR_POLICY throw_on_error #endif #ifndef BOOST_MATH_OVERFLOW_ERROR_POLICY #define BOOST_MATH_OVERFLOW_ERROR_POLICY throw_on_error #endif #ifndef BOOST_MATH_EVALUATION_ERROR_POLICY #define BOOST_MATH_EVALUATION_ERROR_POLICY throw_on_error #endif #ifndef BOOST_MATH_ROUNDING_ERROR_POLICY #define BOOST_MATH_ROUNDING_ERROR_POLICY throw_on_error #endif #ifndef BOOST_MATH_UNDERFLOW_ERROR_POLICY #define BOOST_MATH_UNDERFLOW_ERROR_POLICY ignore_error #endif #ifndef BOOST_MATH_DENORM_ERROR_POLICY #define BOOST_MATH_DENORM_ERROR_POLICY ignore_error #endif #ifndef BOOST_MATH_INDETERMINATE_RESULT_ERROR_POLICY #define BOOST_MATH_INDETERMINATE_RESULT_ERROR_POLICY ignore_error #endif #ifndef BOOST_MATH_DIGITS10_POLICY #define BOOST_MATH_DIGITS10_POLICY 0 #endif #ifndef BOOST_MATH_PROMOTE_FLOAT_POLICY #define BOOST_MATH_PROMOTE_FLOAT_POLICY true #endif #ifndef BOOST_MATH_PROMOTE_DOUBLE_POLICY #ifdef BOOST_MATH_NO_LONG_DOUBLE_MATH_FUNCTIONS #define BOOST_MATH_PROMOTE_DOUBLE_POLICY false #else #define BOOST_MATH_PROMOTE_DOUBLE_POLICY true #endif #endif #ifndef BOOST_MATH_DISCRETE_QUANTILE_POLICY #define BOOST_MATH_DISCRETE_QUANTILE_POLICY integer_round_outwards #endif #ifndef BOOST_MATH_ASSERT_UNDEFINED_POLICY #define BOOST_MATH_ASSERT_UNDEFINED_POLICY true #endif #ifndef BOOST_MATH_MAX_SERIES_ITERATION_POLICY #define BOOST_MATH_MAX_SERIES_ITERATION_POLICY 1000000 #endif #ifndef BOOST_MATH_MAX_ROOT_ITERATION_POLICY #define BOOST_MATH_MAX_ROOT_ITERATION_POLICY 200 #endif #if !defined(__BORLANDC__) #define BOOST_MATH_META_INT(type, name, Default)\ template struct name : public boost::mpl::int_{};\ namespace detail{\ template \ char test_is_valid_arg(const name*);\ char test_is_default_arg(const name*);\ template struct is_##name##_imp\ {\ template static char test(const name*);\ static double test(...);\ BOOST_STATIC_CONSTANT(bool, value = sizeof(test(static_cast(0))) == 1);\ };\ }\ template struct is_##name : public boost::mpl::bool_< ::boost::math::policies::detail::is_##name##_imp::value>{}; #define BOOST_MATH_META_BOOL(name, Default)\ template struct name : public boost::mpl::bool_{};\ namespace detail{\ template \ char test_is_valid_arg(const name*);\ char test_is_default_arg(const name*);\ template struct is_##name##_imp\ {\ template static char test(const name*);\ static double test(...);\ BOOST_STATIC_CONSTANT(bool, value = sizeof(test(static_cast(0))) == 1);\ };\ }\ template struct is_##name : public boost::mpl::bool_< ::boost::math::policies::detail::is_##name##_imp::value>{}; #else #define BOOST_MATH_META_INT(Type, name, Default)\ template struct name : public boost::mpl::int_{};\ namespace detail{\ template \ char test_is_valid_arg(const name*);\ char test_is_default_arg(const name*);\ template struct is_##name##_tester\ {\ template static char test(const name&);\ static double test(...);\ };\ template struct is_##name##_imp\ {\ static T inst;\ BOOST_STATIC_CONSTANT(bool, value = sizeof( ::boost::math::policies::detail::is_##name##_tester::test(inst)) == 1);\ };\ }\ template struct is_##name : public boost::mpl::bool_< ::boost::math::policies::detail::is_##name##_imp::value>\ {\ template struct apply{ typedef is_##name type; };\ }; #define BOOST_MATH_META_BOOL(name, Default)\ template struct name : public boost::mpl::bool_{};\ namespace detail{\ template \ char test_is_valid_arg(const name*);\ char test_is_default_arg(const name*);\ template struct is_##name##_tester\ {\ template static char test(const name&);\ static double test(...);\ };\ template struct is_##name##_imp\ {\ static T inst;\ BOOST_STATIC_CONSTANT(bool, value = sizeof( ::boost::math::policies::detail::is_##name##_tester::test(inst)) == 1);\ };\ }\ template struct is_##name : public boost::mpl::bool_< ::boost::math::policies::detail::is_##name##_imp::value>\ {\ template struct apply{ typedef is_##name type; };\ }; #endif // // Begin by defining policy types for error handling: // enum error_policy_type { throw_on_error = 0, errno_on_error = 1, ignore_error = 2, user_error = 3 }; BOOST_MATH_META_INT(error_policy_type, domain_error, BOOST_MATH_DOMAIN_ERROR_POLICY) BOOST_MATH_META_INT(error_policy_type, pole_error, BOOST_MATH_POLE_ERROR_POLICY) BOOST_MATH_META_INT(error_policy_type, overflow_error, BOOST_MATH_OVERFLOW_ERROR_POLICY) BOOST_MATH_META_INT(error_policy_type, underflow_error, BOOST_MATH_UNDERFLOW_ERROR_POLICY) BOOST_MATH_META_INT(error_policy_type, denorm_error, BOOST_MATH_DENORM_ERROR_POLICY) BOOST_MATH_META_INT(error_policy_type, evaluation_error, BOOST_MATH_EVALUATION_ERROR_POLICY) BOOST_MATH_META_INT(error_policy_type, rounding_error, BOOST_MATH_ROUNDING_ERROR_POLICY) BOOST_MATH_META_INT(error_policy_type, indeterminate_result_error, BOOST_MATH_INDETERMINATE_RESULT_ERROR_POLICY) // // Policy types for internal promotion: // BOOST_MATH_META_BOOL(promote_float, BOOST_MATH_PROMOTE_FLOAT_POLICY) BOOST_MATH_META_BOOL(promote_double, BOOST_MATH_PROMOTE_DOUBLE_POLICY) BOOST_MATH_META_BOOL(assert_undefined, BOOST_MATH_ASSERT_UNDEFINED_POLICY) // // Policy types for discrete quantiles: // enum discrete_quantile_policy_type { real, integer_round_outwards, integer_round_inwards, integer_round_down, integer_round_up, integer_round_nearest }; BOOST_MATH_META_INT(discrete_quantile_policy_type, discrete_quantile, BOOST_MATH_DISCRETE_QUANTILE_POLICY) // // Precision: // BOOST_MATH_META_INT(int, digits10, BOOST_MATH_DIGITS10_POLICY) BOOST_MATH_META_INT(int, digits2, 0) // // Iterations: // BOOST_MATH_META_INT(unsigned long, max_series_iterations, BOOST_MATH_MAX_SERIES_ITERATION_POLICY) BOOST_MATH_META_INT(unsigned long, max_root_iterations, BOOST_MATH_MAX_ROOT_ITERATION_POLICY) // // Define the names for each possible policy: // #define BOOST_MATH_PARAMETER(name)\ BOOST_PARAMETER_TEMPLATE_KEYWORD(name##_name)\ BOOST_PARAMETER_NAME(name##_name) struct default_policy{}; namespace detail{ // // Trait to work out bits precision from digits10 and digits2: // template struct precision { // // Now work out the precision: // typedef typename mpl::if_c< (Digits10::value == 0), digits2<0>, digits2<((Digits10::value + 1) * 1000L) / 301L> >::type digits2_type; public: #ifdef __BORLANDC__ typedef typename mpl::if_c< (Digits2::value > ::boost::math::policies::detail::precision::digits2_type::value), Digits2, digits2_type>::type type; #else typedef typename mpl::if_c< (Digits2::value > digits2_type::value), Digits2, digits2_type>::type type; #endif }; template struct select_result { typedef A type; }; template struct select_result { typedef typename mpl::deref::type type; }; template struct find_arg { private: typedef typename mpl::find_if::type iter; typedef typename mpl::end::type end_type; public: typedef typename select_result< DefaultType, iter, ::boost::is_same::value>::type type; }; double test_is_valid_arg(...); double test_is_default_arg(...); char test_is_valid_arg(const default_policy*); char test_is_default_arg(const default_policy*); template struct is_valid_policy_imp { BOOST_STATIC_CONSTANT(bool, value = sizeof(::boost::math::policies::detail::test_is_valid_arg(static_cast(0))) == 1); }; template struct is_default_policy_imp { BOOST_STATIC_CONSTANT(bool, value = sizeof(::boost::math::policies::detail::test_is_default_arg(static_cast(0))) == 1); }; template struct is_valid_policy : public mpl::bool_< ::boost::math::policies::detail::is_valid_policy_imp::value> {}; template struct is_default_policy : public mpl::bool_< ::boost::math::policies::detail::is_default_policy_imp::value> { template struct apply { typedef is_default_policy type; }; }; template struct append_N { typedef typename mpl::push_back::type new_seq; typedef typename append_N::type type; }; template struct append_N { typedef Seq type; }; // // Traits class to work out what template parameters our default // policy<> class will have when modified for forwarding: // template struct default_args { typedef promote_float arg1; typedef promote_double arg2; }; template <> struct default_args { typedef default_policy arg1; typedef default_policy arg2; }; template <> struct default_args { typedef promote_float arg1; typedef default_policy arg2; }; template <> struct default_args { typedef promote_double arg1; typedef default_policy arg2; }; typedef default_args::arg1 forwarding_arg1; typedef default_args::arg2 forwarding_arg2; } // detail // // Now define the policy type with enough arguments to handle all // the policies: // template struct policy { private: // // Validate all our arguments: // BOOST_STATIC_ASSERT(::boost::math::policies::detail::is_valid_policy::value); BOOST_STATIC_ASSERT(::boost::math::policies::detail::is_valid_policy::value); BOOST_STATIC_ASSERT(::boost::math::policies::detail::is_valid_policy::value); BOOST_STATIC_ASSERT(::boost::math::policies::detail::is_valid_policy::value); BOOST_STATIC_ASSERT(::boost::math::policies::detail::is_valid_policy::value); BOOST_STATIC_ASSERT(::boost::math::policies::detail::is_valid_policy::value); BOOST_STATIC_ASSERT(::boost::math::policies::detail::is_valid_policy::value); BOOST_STATIC_ASSERT(::boost::math::policies::detail::is_valid_policy::value); BOOST_STATIC_ASSERT(::boost::math::policies::detail::is_valid_policy::value); BOOST_STATIC_ASSERT(::boost::math::policies::detail::is_valid_policy::value); BOOST_STATIC_ASSERT(::boost::math::policies::detail::is_valid_policy::value); BOOST_STATIC_ASSERT(::boost::math::policies::detail::is_valid_policy::value); BOOST_STATIC_ASSERT(::boost::math::policies::detail::is_valid_policy::value); // // Typelist of the arguments: // typedef mpl::list arg_list; public: typedef typename detail::find_arg, domain_error<> >::type domain_error_type; typedef typename detail::find_arg, pole_error<> >::type pole_error_type; typedef typename detail::find_arg, overflow_error<> >::type overflow_error_type; typedef typename detail::find_arg, underflow_error<> >::type underflow_error_type; typedef typename detail::find_arg, denorm_error<> >::type denorm_error_type; typedef typename detail::find_arg, evaluation_error<> >::type evaluation_error_type; typedef typename detail::find_arg, rounding_error<> >::type rounding_error_type; typedef typename detail::find_arg, indeterminate_result_error<> >::type indeterminate_result_error_type; private: // // Now work out the precision: // typedef typename detail::find_arg, digits10<> >::type digits10_type; typedef typename detail::find_arg, digits2<> >::type bits_precision_type; public: typedef typename detail::precision::type precision_type; // // Internal promotion: // typedef typename detail::find_arg, promote_float<> >::type promote_float_type; typedef typename detail::find_arg, promote_double<> >::type promote_double_type; // // Discrete quantiles: // typedef typename detail::find_arg, discrete_quantile<> >::type discrete_quantile_type; // // Mathematically undefined properties: // typedef typename detail::find_arg, assert_undefined<> >::type assert_undefined_type; // // Max iterations: // typedef typename detail::find_arg, max_series_iterations<> >::type max_series_iterations_type; typedef typename detail::find_arg, max_root_iterations<> >::type max_root_iterations_type; }; // // These full specializations are defined to reduce the amount of // template instantiations that have to take place when using the default // policies, they have quite a large impact on compile times: // template <> struct policy { public: typedef domain_error<> domain_error_type; typedef pole_error<> pole_error_type; typedef overflow_error<> overflow_error_type; typedef underflow_error<> underflow_error_type; typedef denorm_error<> denorm_error_type; typedef evaluation_error<> evaluation_error_type; typedef rounding_error<> rounding_error_type; typedef indeterminate_result_error<> indeterminate_result_error_type; #if BOOST_MATH_DIGITS10_POLICY == 0 typedef digits2<> precision_type; #else typedef detail::precision, digits2<> >::type precision_type; #endif typedef promote_float<> promote_float_type; typedef promote_double<> promote_double_type; typedef discrete_quantile<> discrete_quantile_type; typedef assert_undefined<> assert_undefined_type; typedef max_series_iterations<> max_series_iterations_type; typedef max_root_iterations<> max_root_iterations_type; }; template <> struct policy { public: typedef domain_error<> domain_error_type; typedef pole_error<> pole_error_type; typedef overflow_error<> overflow_error_type; typedef underflow_error<> underflow_error_type; typedef denorm_error<> denorm_error_type; typedef evaluation_error<> evaluation_error_type; typedef rounding_error<> rounding_error_type; typedef indeterminate_result_error<> indeterminate_result_error_type; #if BOOST_MATH_DIGITS10_POLICY == 0 typedef digits2<> precision_type; #else typedef detail::precision, digits2<> >::type precision_type; #endif typedef promote_float promote_float_type; typedef promote_double promote_double_type; typedef discrete_quantile<> discrete_quantile_type; typedef assert_undefined<> assert_undefined_type; typedef max_series_iterations<> max_series_iterations_type; typedef max_root_iterations<> max_root_iterations_type; }; template struct normalise { private: typedef mpl::list arg_list; typedef typename detail::find_arg, typename Policy::domain_error_type >::type domain_error_type; typedef typename detail::find_arg, typename Policy::pole_error_type >::type pole_error_type; typedef typename detail::find_arg, typename Policy::overflow_error_type >::type overflow_error_type; typedef typename detail::find_arg, typename Policy::underflow_error_type >::type underflow_error_type; typedef typename detail::find_arg, typename Policy::denorm_error_type >::type denorm_error_type; typedef typename detail::find_arg, typename Policy::evaluation_error_type >::type evaluation_error_type; typedef typename detail::find_arg, typename Policy::rounding_error_type >::type rounding_error_type; typedef typename detail::find_arg, typename Policy::indeterminate_result_error_type >::type indeterminate_result_error_type; // // Now work out the precision: // typedef typename detail::find_arg, digits10<> >::type digits10_type; typedef typename detail::find_arg, typename Policy::precision_type >::type bits_precision_type; typedef typename detail::precision::type precision_type; // // Internal promotion: // typedef typename detail::find_arg, typename Policy::promote_float_type >::type promote_float_type; typedef typename detail::find_arg, typename Policy::promote_double_type >::type promote_double_type; // // Discrete quantiles: // typedef typename detail::find_arg, typename Policy::discrete_quantile_type >::type discrete_quantile_type; // // Mathematically undefined properties: // typedef typename detail::find_arg, typename Policy::assert_undefined_type >::type assert_undefined_type; // // Max iterations: // typedef typename detail::find_arg, typename Policy::max_series_iterations_type>::type max_series_iterations_type; typedef typename detail::find_arg, typename Policy::max_root_iterations_type>::type max_root_iterations_type; // // Define a typelist of the policies: // typedef mpl::vector< domain_error_type, pole_error_type, overflow_error_type, underflow_error_type, denorm_error_type, evaluation_error_type, rounding_error_type, indeterminate_result_error_type, precision_type, promote_float_type, promote_double_type, discrete_quantile_type, assert_undefined_type, max_series_iterations_type, max_root_iterations_type> result_list; // // Remove all the policies that are the same as the default: // typedef typename mpl::remove_if >::type reduced_list; // // Pad out the list with defaults: // typedef typename detail::append_N::value)>::type result_type; public: typedef policy< typename mpl::at >::type, typename mpl::at >::type, typename mpl::at >::type, typename mpl::at >::type, typename mpl::at >::type, typename mpl::at >::type, typename mpl::at >::type, typename mpl::at >::type, typename mpl::at >::type, typename mpl::at >::type, typename mpl::at >::type, typename mpl::at >::type, typename mpl::at >::type > type; }; // // Full specialisation to speed up compilation of the common case: // template <> struct normalise, promote_float, promote_double, discrete_quantile<>, assert_undefined<>, default_policy, default_policy, default_policy, default_policy, default_policy, default_policy, default_policy> { typedef policy type; }; template <> struct normalise, promote_float, promote_double, discrete_quantile<>, assert_undefined<>, default_policy, default_policy, default_policy, default_policy, default_policy, default_policy, default_policy> { typedef policy type; }; inline BOOST_MATH_CONSTEXPR policy<> make_policy() BOOST_NOEXCEPT { return policy<>(); } template inline BOOST_MATH_CONSTEXPR typename normalise, A1>::type make_policy(const A1&) BOOST_NOEXCEPT { typedef typename normalise, A1>::type result_type; return result_type(); } template inline BOOST_MATH_CONSTEXPR typename normalise, A1, A2>::type make_policy(const A1&, const A2&) BOOST_NOEXCEPT { typedef typename normalise, A1, A2>::type result_type; return result_type(); } template inline BOOST_MATH_CONSTEXPR typename normalise, A1, A2, A3>::type make_policy(const A1&, const A2&, const A3&) BOOST_NOEXCEPT { typedef typename normalise, A1, A2, A3>::type result_type; return result_type(); } template inline BOOST_MATH_CONSTEXPR typename normalise, A1, A2, A3, A4>::type make_policy(const A1&, const A2&, const A3&, const A4&) BOOST_NOEXCEPT { typedef typename normalise, A1, A2, A3, A4>::type result_type; return result_type(); } template inline BOOST_MATH_CONSTEXPR typename normalise, A1, A2, A3, A4, A5>::type make_policy(const A1&, const A2&, const A3&, const A4&, const A5&) BOOST_NOEXCEPT { typedef typename normalise, A1, A2, A3, A4, A5>::type result_type; return result_type(); } template inline BOOST_MATH_CONSTEXPR typename normalise, A1, A2, A3, A4, A5, A6>::type make_policy(const A1&, const A2&, const A3&, const A4&, const A5&, const A6&) BOOST_NOEXCEPT { typedef typename normalise, A1, A2, A3, A4, A5, A6>::type result_type; return result_type(); } template inline BOOST_MATH_CONSTEXPR typename normalise, A1, A2, A3, A4, A5, A6, A7>::type make_policy(const A1&, const A2&, const A3&, const A4&, const A5&, const A6&, const A7&) BOOST_NOEXCEPT { typedef typename normalise, A1, A2, A3, A4, A5, A6, A7>::type result_type; return result_type(); } template inline BOOST_MATH_CONSTEXPR typename normalise, A1, A2, A3, A4, A5, A6, A7, A8>::type make_policy(const A1&, const A2&, const A3&, const A4&, const A5&, const A6&, const A7&, const A8&) BOOST_NOEXCEPT { typedef typename normalise, A1, A2, A3, A4, A5, A6, A7, A8>::type result_type; return result_type(); } template inline BOOST_MATH_CONSTEXPR typename normalise, A1, A2, A3, A4, A5, A6, A7, A8, A9>::type make_policy(const A1&, const A2&, const A3&, const A4&, const A5&, const A6&, const A7&, const A8&, const A9&) BOOST_NOEXCEPT { typedef typename normalise, A1, A2, A3, A4, A5, A6, A7, A8, A9>::type result_type; return result_type(); } template inline BOOST_MATH_CONSTEXPR typename normalise, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10>::type make_policy(const A1&, const A2&, const A3&, const A4&, const A5&, const A6&, const A7&, const A8&, const A9&, const A10&) BOOST_NOEXCEPT { typedef typename normalise, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10>::type result_type; return result_type(); } template inline BOOST_MATH_CONSTEXPR typename normalise, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11>::type make_policy(const A1&, const A2&, const A3&, const A4&, const A5&, const A6&, const A7&, const A8&, const A9&, const A10&, const A11&) BOOST_NOEXCEPT { typedef typename normalise, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11>::type result_type; return result_type(); } // // Traits class to handle internal promotion: // template struct evaluation { typedef Real type; }; template struct evaluation { typedef typename mpl::if_::type type; }; template struct evaluation { typedef typename mpl::if_::type type; }; #ifdef BOOST_NO_LIMITS_COMPILE_TIME_CONSTANTS template struct basic_digits : public mpl::int_<0>{ }; template <> struct basic_digits : public mpl::int_{ }; template <> struct basic_digits : public mpl::int_{ }; template <> struct basic_digits : public mpl::int_{ }; template struct precision { BOOST_STATIC_ASSERT( ::std::numeric_limits::radix == 2); typedef typename Policy::precision_type precision_type; typedef basic_digits digits_t; typedef typename mpl::if_< mpl::equal_to >, // Possibly unknown precision: precision_type, typename mpl::if_< mpl::or_, mpl::less_equal > >, // Default case, full precision for RealType: digits2< ::std::numeric_limits::digits>, // User customised precision: precision_type >::type >::type type; }; template struct precision { typedef digits2 type; }; template struct precision { typedef digits2 type; }; template struct precision { typedef digits2 type; }; #else template struct precision { BOOST_STATIC_ASSERT((::std::numeric_limits::radix == 2) || ((::std::numeric_limits::is_specialized == 0) || (::std::numeric_limits::digits == 0))); #ifndef __BORLANDC__ typedef typename Policy::precision_type precision_type; typedef typename mpl::if_c< ((::std::numeric_limits::is_specialized == 0) || (::std::numeric_limits::digits == 0)), // Possibly unknown precision: precision_type, typename mpl::if_c< ((::std::numeric_limits::digits <= precision_type::value) || (Policy::precision_type::value <= 0)), // Default case, full precision for RealType: digits2< ::std::numeric_limits::digits>, // User customised precision: precision_type >::type >::type type; #else typedef typename Policy::precision_type precision_type; typedef mpl::int_< ::std::numeric_limits::digits> digits_t; typedef mpl::bool_< ::std::numeric_limits::is_specialized> spec_t; typedef typename mpl::if_< mpl::or_, mpl::equal_to > >, // Possibly unknown precision: precision_type, typename mpl::if_< mpl::or_, mpl::less_equal > >, // Default case, full precision for RealType: digits2< ::std::numeric_limits::digits>, // User customised precision: precision_type >::type >::type type; #endif }; #endif #ifdef BOOST_MATH_USE_FLOAT128 template struct precision { typedef mpl::int_<113> type; }; #endif namespace detail{ template inline BOOST_MATH_CONSTEXPR int digits_imp(mpl::true_ const&) BOOST_NOEXCEPT { #ifndef BOOST_NO_LIMITS_COMPILE_TIME_CONSTANTS BOOST_STATIC_ASSERT( ::std::numeric_limits::is_specialized); #else BOOST_ASSERT(::std::numeric_limits::is_specialized); #endif typedef typename boost::math::policies::precision::type p_t; return p_t::value; } template inline BOOST_MATH_CONSTEXPR int digits_imp(mpl::false_ const&) BOOST_NOEXCEPT { return tools::digits(); } } // namespace detail template inline BOOST_MATH_CONSTEXPR int digits(BOOST_MATH_EXPLICIT_TEMPLATE_TYPE(T)) BOOST_NOEXCEPT { typedef mpl::bool_< std::numeric_limits::is_specialized > tag_type; return detail::digits_imp(tag_type()); } template inline BOOST_MATH_CONSTEXPR int digits_base10(BOOST_MATH_EXPLICIT_TEMPLATE_TYPE(T)) BOOST_NOEXCEPT { return boost::math::policies::digits() * 301 / 1000L; } template inline BOOST_MATH_CONSTEXPR unsigned long get_max_series_iterations() BOOST_NOEXCEPT { typedef typename Policy::max_series_iterations_type iter_type; return iter_type::value; } template inline BOOST_MATH_CONSTEXPR unsigned long get_max_root_iterations() BOOST_NOEXCEPT { typedef typename Policy::max_root_iterations_type iter_type; return iter_type::value; } namespace detail{ template struct series_factor_calc { static T get() BOOST_MATH_NOEXCEPT(T) { return ldexp(T(1.0), 1 - Digits::value); } }; template struct series_factor_calc { static BOOST_MATH_CONSTEXPR T get() BOOST_MATH_NOEXCEPT(T) { return boost::math::tools::epsilon(); } }; template struct series_factor_calc { BOOST_STATIC_CONSTANT(boost::uintmax_t, v = static_cast(1u) << (Digits::value - 1)); static BOOST_MATH_CONSTEXPR T get() BOOST_MATH_NOEXCEPT(T) { return 1 / static_cast(v); } }; template struct series_factor_calc { static BOOST_MATH_CONSTEXPR T get() BOOST_MATH_NOEXCEPT(T) { return boost::math::tools::epsilon(); } }; template inline BOOST_MATH_CONSTEXPR T get_epsilon_imp(mpl::true_ const&) BOOST_MATH_NOEXCEPT(T) { #ifndef BOOST_NO_LIMITS_COMPILE_TIME_CONSTANTS BOOST_STATIC_ASSERT( ::std::numeric_limits::is_specialized); BOOST_STATIC_ASSERT( ::std::numeric_limits::radix == 2); #else BOOST_ASSERT(::std::numeric_limits::is_specialized); BOOST_ASSERT(::std::numeric_limits::radix == 2); #endif typedef typename boost::math::policies::precision::type p_t; typedef mpl::bool_::digits> is_small_int; typedef mpl::bool_= std::numeric_limits::digits> is_default_value; return series_factor_calc::get(); } template inline BOOST_MATH_CONSTEXPR T get_epsilon_imp(mpl::false_ const&) BOOST_MATH_NOEXCEPT(T) { return tools::epsilon(); } } // namespace detail template inline BOOST_MATH_CONSTEXPR T get_epsilon(BOOST_MATH_EXPLICIT_TEMPLATE_TYPE(T)) BOOST_MATH_NOEXCEPT(T) { typedef mpl::bool_< (std::numeric_limits::is_specialized && (std::numeric_limits::radix == 2)) > tag_type; return detail::get_epsilon_imp(tag_type()); } namespace detail{ template char test_is_policy(const policy*); double test_is_policy(...); template struct is_policy_imp { BOOST_STATIC_CONSTANT(bool, value = (sizeof(::boost::math::policies::detail::test_is_policy(static_cast(0))) == 1)); }; } template struct is_policy : public mpl::bool_< ::boost::math::policies::detail::is_policy_imp

::value> {}; // // Helper traits class for distribution error handling: // template struct constructor_error_check { typedef typename Policy::domain_error_type domain_error_type; typedef typename mpl::if_c< (domain_error_type::value == throw_on_error) || (domain_error_type::value == user_error) || (domain_error_type::value == errno_on_error), mpl::true_, mpl::false_>::type type; }; template struct method_error_check { typedef typename Policy::domain_error_type domain_error_type; typedef typename mpl::if_c< (domain_error_type::value == throw_on_error) && (domain_error_type::value != user_error), mpl::false_, mpl::true_>::type type; }; // // Does the Policy ever throw on error? // template struct is_noexcept_error_policy { typedef typename Policy::domain_error_type t1; typedef typename Policy::pole_error_type t2; typedef typename Policy::overflow_error_type t3; typedef typename Policy::underflow_error_type t4; typedef typename Policy::denorm_error_type t5; typedef typename Policy::evaluation_error_type t6; typedef typename Policy::rounding_error_type t7; typedef typename Policy::indeterminate_result_error_type t8; BOOST_STATIC_CONSTANT(bool, value = ((t1::value != throw_on_error) && (t1::value != user_error) && (t2::value != throw_on_error) && (t2::value != user_error) && (t3::value != throw_on_error) && (t3::value != user_error) && (t4::value != throw_on_error) && (t4::value != user_error) && (t5::value != throw_on_error) && (t5::value != user_error) && (t6::value != throw_on_error) && (t6::value != user_error) && (t7::value != throw_on_error) && (t7::value != user_error) && (t8::value != throw_on_error) && (t8::value != user_error))); }; }}} // namespaces #endif // BOOST_MATH_POLICY_HPP mlpack-2.2.5/src/mlpack/core/boost_backport/polygamma.hpp000066400000000000000000000065301315013601400234650ustar00rootroot00000000000000 /////////////////////////////////////////////////////////////////////////////// // Copyright 2013 Nikhar Agrawal // Copyright 2013 Christopher Kormanyos // Copyright 2014 John Maddock // Copyright 2013 Paul Bristow // Distributed under the Boost // Software License, Version 1.0. (See accompanying file // LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) #ifndef _BOOST_POLYGAMMA_2013_07_30_HPP_ #define _BOOST_POLYGAMMA_2013_07_30_HPP_ #include "detail/polygamma.hpp" #include "trigamma.hpp" #include // Forward declarations namespace boost { namespace math { template inline typename tools::promote_args::type trigamma(T x, const Policy&); template inline typename tools::promote_args::type trigamma(T x); }} namespace boost { namespace math { template inline typename tools::promote_args::type polygamma(const int n, T x, const Policy& pol) { // // Filter off special cases right at the start: // if(n == 0) return boost::math::digamma(x, pol); if(n == 1) return boost::math::trigamma(x, pol); // // We've found some standard library functions to misbehave if any FPU exception flags // are set prior to their call, this code will clear those flags, then reset them // on exit: // BOOST_FPU_EXCEPTION_GUARD // // The type of the result - the common type of T and U after // any integer types have been promoted to double: // typedef typename tools::promote_args::type result_type; // // The type used for the calculation. This may be a wider type than // the result in order to ensure full precision: // typedef typename policies::evaluation::type value_type; // // The type of the policy to forward to the actual implementation. // We disable promotion of float and double as that's [possibly] // happened already in the line above. Also reset to the default // any policies we don't use (reduces code bloat if we're called // multiple times with differing policies we don't actually use). // Also normalise the type, again to reduce code bloat in case we're // called multiple times with functionally identical policies that happen // to be different types. // typedef typename policies::normalise< Policy, policies::promote_float, policies::promote_double, policies::discrete_quantile<>, policies::assert_undefined<> >::type forwarding_policy; // // Whew. Now we can make the actual call to the implementation. // Arguments are explicitly cast to the evaluation type, and the result // passed through checked_narrowing_cast which handles things like overflow // according to the policy passed: // return policies::checked_narrowing_cast( detail::polygamma_imp(n, static_cast(x), forwarding_policy()), "boost::math::polygamma<%1%>(int, %1%)"); } template inline typename tools::promote_args::type polygamma(const int n, T x) { return boost::math::polygamma(n, x, policies::policy<>()); } } } // namespace boost::math #endif // _BOOST_BERNOULLI_2013_05_30_HPP_ mlpack-2.2.5/src/mlpack/core/boost_backport/trigamma.hpp000066400000000000000000000513101315013601400232740ustar00rootroot00000000000000// (C) Copyright John Maddock 2006. // Use, modification and distribution are subject to the // Boost Software License, Version 1.0. (See accompanying file // LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) #ifndef BOOST_MATH_SF_TRIGAMMA_HPP #define BOOST_MATH_SF_TRIGAMMA_HPP #ifdef _MSC_VER #pragma once #endif #include "math_fwd.hpp" #include "polygamma.hpp" #include #include #include #include #include #include #include namespace boost{ namespace math{ namespace detail{ template T polygamma_imp(const int n, T x, const Policy &pol); template T trigamma_prec(T x, const mpl::int_<53>*, const Policy&) { // Max error in interpolated form: 3.736e-017 static const T offset = BOOST_MATH_BIG_CONSTANT(T, 53, 2.1093254089355469); static const T P_1_2[] = { BOOST_MATH_BIG_CONSTANT(T, 53, -1.1093280605946045), BOOST_MATH_BIG_CONSTANT(T, 53, -3.8310674472619321), BOOST_MATH_BIG_CONSTANT(T, 53, -3.3703848401898283), BOOST_MATH_BIG_CONSTANT(T, 53, 0.28080574467981213), BOOST_MATH_BIG_CONSTANT(T, 53, 1.6638069578676164), BOOST_MATH_BIG_CONSTANT(T, 53, 0.64468386819102836), }; static const T Q_1_2[] = { BOOST_MATH_BIG_CONSTANT(T, 53, 1.0), BOOST_MATH_BIG_CONSTANT(T, 53, 3.4535389668541151), BOOST_MATH_BIG_CONSTANT(T, 53, 4.5208926987851437), BOOST_MATH_BIG_CONSTANT(T, 53, 2.7012734178351534), BOOST_MATH_BIG_CONSTANT(T, 53, 0.64468798399785611), BOOST_MATH_BIG_CONSTANT(T, 53, -0.20314516859987728e-6), }; // Max error in interpolated form: 1.159e-017 static const T P_2_4[] = { BOOST_MATH_BIG_CONSTANT(T, 53, -0.13803835004508849e-7), BOOST_MATH_BIG_CONSTANT(T, 53, 0.50000049158540261), BOOST_MATH_BIG_CONSTANT(T, 53, 1.6077979838469348), BOOST_MATH_BIG_CONSTANT(T, 53, 2.5645435828098254), BOOST_MATH_BIG_CONSTANT(T, 53, 2.0534873203680393), BOOST_MATH_BIG_CONSTANT(T, 53, 0.74566981111565923), }; static const T Q_2_4[] = { BOOST_MATH_BIG_CONSTANT(T, 53, 1.0), BOOST_MATH_BIG_CONSTANT(T, 53, 2.8822787662376169), BOOST_MATH_BIG_CONSTANT(T, 53, 4.1681660554090917), BOOST_MATH_BIG_CONSTANT(T, 53, 2.7853527819234466), BOOST_MATH_BIG_CONSTANT(T, 53, 0.74967671848044792), BOOST_MATH_BIG_CONSTANT(T, 53, -0.00057069112416246805), }; // Maximum Deviation Found: 6.896e-018 // Expected Error Term : -6.895e-018 // Maximum Relative Change in Control Points : 8.497e-004 static const T P_4_inf[] = { static_cast(0.68947581948701249e-17L), static_cast(0.49999999999998975L), static_cast(1.0177274392923795L), static_cast(2.498208511343429L), static_cast(2.1921221359427595L), static_cast(1.5897035272532764L), static_cast(0.40154388356961734L), }; static const T Q_4_inf[] = { static_cast(1.0L), static_cast(1.7021215452463932L), static_cast(4.4290431747556469L), static_cast(2.9745631894384922L), static_cast(2.3013614809773616L), static_cast(0.28360399799075752L), static_cast(0.022892987908906897L), }; if(x <= 2) { return (offset + boost::math::tools::evaluate_polynomial(P_1_2, x) / tools::evaluate_polynomial(Q_1_2, x)) / (x * x); } else if(x <= 4) { T y = 1 / x; return (1 + tools::evaluate_polynomial(P_2_4, y) / tools::evaluate_polynomial(Q_2_4, y)) / x; } T y = 1 / x; return (1 + tools::evaluate_polynomial(P_4_inf, y) / tools::evaluate_polynomial(Q_4_inf, y)) / x; } template T trigamma_prec(T x, const mpl::int_<64>*, const Policy&) { // Max error in interpolated form: 1.178e-020 static const T offset_1_2 = BOOST_MATH_BIG_CONSTANT(T, 64, 2.109325408935546875); static const T P_1_2[] = { BOOST_MATH_BIG_CONSTANT(T, 64, -1.10932535608960258341), BOOST_MATH_BIG_CONSTANT(T, 64, -4.18793841543017129052), BOOST_MATH_BIG_CONSTANT(T, 64, -4.63865531898487734531), BOOST_MATH_BIG_CONSTANT(T, 64, -0.919832884430500908047), BOOST_MATH_BIG_CONSTANT(T, 64, 1.68074038333180423012), BOOST_MATH_BIG_CONSTANT(T, 64, 1.21172611429185622377), BOOST_MATH_BIG_CONSTANT(T, 64, 0.259635673503366427284), }; static const T Q_1_2[] = { BOOST_MATH_BIG_CONSTANT(T, 64, 1.0), BOOST_MATH_BIG_CONSTANT(T, 64, 3.77521119359546982995), BOOST_MATH_BIG_CONSTANT(T, 64, 5.664338024578956321), BOOST_MATH_BIG_CONSTANT(T, 64, 4.25995134879278028361), BOOST_MATH_BIG_CONSTANT(T, 64, 1.62956638448940402182), BOOST_MATH_BIG_CONSTANT(T, 64, 0.259635512844691089868), BOOST_MATH_BIG_CONSTANT(T, 64, 0.629642219810618032207e-8), }; // Max error in interpolated form: 3.912e-020 static const T P_2_8[] = { BOOST_MATH_BIG_CONSTANT(T, 64, -0.387540035162952880976e-11), BOOST_MATH_BIG_CONSTANT(T, 64, 0.500000000276430504), BOOST_MATH_BIG_CONSTANT(T, 64, 3.21926880986360957306), BOOST_MATH_BIG_CONSTANT(T, 64, 10.2550347708483445775), BOOST_MATH_BIG_CONSTANT(T, 64, 18.9002075150709144043), BOOST_MATH_BIG_CONSTANT(T, 64, 21.0357215832399705625), BOOST_MATH_BIG_CONSTANT(T, 64, 13.4346512182925923978), BOOST_MATH_BIG_CONSTANT(T, 64, 3.98656291026448279118), }; static const T Q_2_8[] = { BOOST_MATH_BIG_CONSTANT(T, 64, 1.0), BOOST_MATH_BIG_CONSTANT(T, 64, 6.10520430478613667724), BOOST_MATH_BIG_CONSTANT(T, 64, 18.475001060603645512), BOOST_MATH_BIG_CONSTANT(T, 64, 31.7087534567758405638), BOOST_MATH_BIG_CONSTANT(T, 64, 31.908814523890465398), BOOST_MATH_BIG_CONSTANT(T, 64, 17.4175479039227084798), BOOST_MATH_BIG_CONSTANT(T, 64, 3.98749106958394941276), BOOST_MATH_BIG_CONSTANT(T, 64, -0.000115917322224411128566), }; // Maximum Deviation Found: 2.635e-020 // Expected Error Term : 2.635e-020 // Maximum Relative Change in Control Points : 1.791e-003 static const T P_8_inf[] = { BOOST_MATH_BIG_CONSTANT(T, 64, -0.263527875092466899848e-19), BOOST_MATH_BIG_CONSTANT(T, 64, 0.500000000000000058145), BOOST_MATH_BIG_CONSTANT(T, 64, 0.0730121433777364138677), BOOST_MATH_BIG_CONSTANT(T, 64, 1.94505878379957149534), BOOST_MATH_BIG_CONSTANT(T, 64, 0.0517092358874932620529), BOOST_MATH_BIG_CONSTANT(T, 64, 1.07995383547483921121), }; static const T Q_8_inf[] = { BOOST_MATH_BIG_CONSTANT(T, 64, 1.0), BOOST_MATH_BIG_CONSTANT(T, 64, -0.187309046577818095504), BOOST_MATH_BIG_CONSTANT(T, 64, 3.95255391645238842975), BOOST_MATH_BIG_CONSTANT(T, 64, -1.14743283327078949087), BOOST_MATH_BIG_CONSTANT(T, 64, 2.52989799376344914499), BOOST_MATH_BIG_CONSTANT(T, 64, -0.627414303172402506396), BOOST_MATH_BIG_CONSTANT(T, 64, 0.141554248216425512536), }; if(x <= 2) { return (offset_1_2 + boost::math::tools::evaluate_polynomial(P_1_2, x) / tools::evaluate_polynomial(Q_1_2, x)) / (x * x); } else if(x <= 8) { T y = 1 / x; return (1 + tools::evaluate_polynomial(P_2_8, y) / tools::evaluate_polynomial(Q_2_8, y)) / x; } T y = 1 / x; return (1 + tools::evaluate_polynomial(P_8_inf, y) / tools::evaluate_polynomial(Q_8_inf, y)) / x; } template T trigamma_prec(T x, const mpl::int_<113>*, const Policy&) { // Max error in interpolated form: 1.916e-035 static const T P_1_2[] = { BOOST_MATH_BIG_CONSTANT(T, 113, -0.999999999999999082554457936871832533), BOOST_MATH_BIG_CONSTANT(T, 113, -4.71237311120865266379041700054847734), BOOST_MATH_BIG_CONSTANT(T, 113, -7.94125711970499027763789342500817316), BOOST_MATH_BIG_CONSTANT(T, 113, -5.74657746697664735258222071695644535), BOOST_MATH_BIG_CONSTANT(T, 113, -0.404213349456398905981223965160595687), BOOST_MATH_BIG_CONSTANT(T, 113, 2.47877781178642876561595890095758896), BOOST_MATH_BIG_CONSTANT(T, 113, 2.07714151702455125992166949812126433), BOOST_MATH_BIG_CONSTANT(T, 113, 0.858877899162360138844032265418028567), BOOST_MATH_BIG_CONSTANT(T, 113, 0.20499222604410032375789018837922397), BOOST_MATH_BIG_CONSTANT(T, 113, 0.0272103140348194747360175268778415049), BOOST_MATH_BIG_CONSTANT(T, 113, 0.0015764849020876949848954081173520686), }; static const T Q_1_2[] = { BOOST_MATH_BIG_CONSTANT(T, 113, 1.0), BOOST_MATH_BIG_CONSTANT(T, 113, 4.71237311120863419878375031457715223), BOOST_MATH_BIG_CONSTANT(T, 113, 9.58619118655339853449127952145877467), BOOST_MATH_BIG_CONSTANT(T, 113, 11.0940067269829372437561421279054968), BOOST_MATH_BIG_CONSTANT(T, 113, 8.09075424749327792073276309969037885), BOOST_MATH_BIG_CONSTANT(T, 113, 3.87705890159891405185343806884451286), BOOST_MATH_BIG_CONSTANT(T, 113, 1.22758678701914477836330837816976782), BOOST_MATH_BIG_CONSTANT(T, 113, 0.249092040606385004109672077814668716), BOOST_MATH_BIG_CONSTANT(T, 113, 0.0295750413900655597027079600025569048), BOOST_MATH_BIG_CONSTANT(T, 113, 0.00157648490200498142247694709728858139), BOOST_MATH_BIG_CONSTANT(T, 113, 0.161264050344059471721062360645432809e-14), }; // Max error in interpolated form: 8.958e-035 static const T P_2_4[] = { BOOST_MATH_BIG_CONSTANT(T, 113, -2.55843734739907925764326773972215085), BOOST_MATH_BIG_CONSTANT(T, 113, -12.2830208240542011967952466273455887), BOOST_MATH_BIG_CONSTANT(T, 113, -23.9195022162767993526575786066414403), BOOST_MATH_BIG_CONSTANT(T, 113, -24.9256431504823483094158828285470862), BOOST_MATH_BIG_CONSTANT(T, 113, -14.7979122765478779075108064826412285), BOOST_MATH_BIG_CONSTANT(T, 113, -4.46654453928610666393276765059122272), BOOST_MATH_BIG_CONSTANT(T, 113, -0.0191439033405649675717082465687845002), BOOST_MATH_BIG_CONSTANT(T, 113, 0.515412052554351265708917209749037352), BOOST_MATH_BIG_CONSTANT(T, 113, 0.195378348786064304378247325360320038), BOOST_MATH_BIG_CONSTANT(T, 113, 0.0334761282624174313035014426794245393), BOOST_MATH_BIG_CONSTANT(T, 113, 0.002373665205942206348500250056602687), }; static const T Q_2_4[] = { BOOST_MATH_BIG_CONSTANT(T, 113, 1.0), BOOST_MATH_BIG_CONSTANT(T, 113, 4.80098558454419907830670928248659245), BOOST_MATH_BIG_CONSTANT(T, 113, 9.99220727843170133895059300223445265), BOOST_MATH_BIG_CONSTANT(T, 113, 11.8896146167631330735386697123464976), BOOST_MATH_BIG_CONSTANT(T, 113, 8.96613256683809091593793565879092581), BOOST_MATH_BIG_CONSTANT(T, 113, 4.47254136149624110878909334574485751), BOOST_MATH_BIG_CONSTANT(T, 113, 1.48600982028196527372434773913633152), BOOST_MATH_BIG_CONSTANT(T, 113, 0.319570735766764237068541501137990078), BOOST_MATH_BIG_CONSTANT(T, 113, 0.0407358345787680953107374215319322066), BOOST_MATH_BIG_CONSTANT(T, 113, 0.00237366520593271641375755486420859837), BOOST_MATH_BIG_CONSTANT(T, 113, 0.239554887903526152679337256236302116e-15), BOOST_MATH_BIG_CONSTANT(T, 113, -0.294749244740618656265237072002026314e-17), }; static const T y_offset_2_4 = BOOST_MATH_BIG_CONSTANT(T, 113, 3.558437347412109375); // Max error in interpolated form: 4.319e-035 static const T P_4_8[] = { BOOST_MATH_BIG_CONSTANT(T, 113, 0.166626112697021464248967707021688845e-16), BOOST_MATH_BIG_CONSTANT(T, 113, 0.499999999999997739552090249208808197), BOOST_MATH_BIG_CONSTANT(T, 113, 6.40270945019053817915772473771553187), BOOST_MATH_BIG_CONSTANT(T, 113, 41.3833374155000608013677627389343329), BOOST_MATH_BIG_CONSTANT(T, 113, 166.803341854562809335667241074035245), BOOST_MATH_BIG_CONSTANT(T, 113, 453.39964786925369319960722793414521), BOOST_MATH_BIG_CONSTANT(T, 113, 851.153712317697055375935433362983944), BOOST_MATH_BIG_CONSTANT(T, 113, 1097.70657567285059133109286478004458), BOOST_MATH_BIG_CONSTANT(T, 113, 938.431232478455316020076349367632922), BOOST_MATH_BIG_CONSTANT(T, 113, 487.268001604651932322080970189930074), BOOST_MATH_BIG_CONSTANT(T, 113, 119.953445242335730062471193124820659), }; static const T Q_4_8[] = { BOOST_MATH_BIG_CONSTANT(T, 113, 1.0), BOOST_MATH_BIG_CONSTANT(T, 113, 12.4720855670474488978638945855932398), BOOST_MATH_BIG_CONSTANT(T, 113, 78.6093129753298570701376952709727391), BOOST_MATH_BIG_CONSTANT(T, 113, 307.470246050318322489781182863190127), BOOST_MATH_BIG_CONSTANT(T, 113, 805.140686101151538537565264188630079), BOOST_MATH_BIG_CONSTANT(T, 113, 1439.12019760292146454787601409644413), BOOST_MATH_BIG_CONSTANT(T, 113, 1735.6105285756048831268586001383127), BOOST_MATH_BIG_CONSTANT(T, 113, 1348.32500712856328019355198611280536), BOOST_MATH_BIG_CONSTANT(T, 113, 607.225985860570846699704222144650563), BOOST_MATH_BIG_CONSTANT(T, 113, 119.952317857277045332558673164517227), BOOST_MATH_BIG_CONSTANT(T, 113, 0.000140165918355036060868680809129436084), }; // Maximum Deviation Found: 2.867e-035 // Expected Error Term : 2.866e-035 // Maximum Relative Change in Control Points : 2.662e-004 static const T P_8_16[] = { BOOST_MATH_BIG_CONSTANT(T, 113, -0.184828315274146610610872315609837439e-19), BOOST_MATH_BIG_CONSTANT(T, 113, 0.500000000000000004122475157735807738), BOOST_MATH_BIG_CONSTANT(T, 113, 3.02533865247313349284875558880415875), BOOST_MATH_BIG_CONSTANT(T, 113, 13.5995927517457371243039532492642734), BOOST_MATH_BIG_CONSTANT(T, 113, 35.3132224283087906757037999452941588), BOOST_MATH_BIG_CONSTANT(T, 113, 67.1639424550714159157603179911505619), BOOST_MATH_BIG_CONSTANT(T, 113, 83.5767733658513967581959839367419891), BOOST_MATH_BIG_CONSTANT(T, 113, 71.073491212235705900866411319363501), BOOST_MATH_BIG_CONSTANT(T, 113, 35.8621515614725564575893663483998663), BOOST_MATH_BIG_CONSTANT(T, 113, 8.72152231639983491987779743154333318), }; static const T Q_8_16[] = { BOOST_MATH_BIG_CONSTANT(T, 113, 1.0), BOOST_MATH_BIG_CONSTANT(T, 113, 5.71734397161293452310624822415866372), BOOST_MATH_BIG_CONSTANT(T, 113, 25.293404179620438179337103263274815), BOOST_MATH_BIG_CONSTANT(T, 113, 62.2619767967468199111077640625328469), BOOST_MATH_BIG_CONSTANT(T, 113, 113.955048909238993473389714972250235), BOOST_MATH_BIG_CONSTANT(T, 113, 130.807138328938966981862203944329408), BOOST_MATH_BIG_CONSTANT(T, 113, 102.423146902337654110717764213057753), BOOST_MATH_BIG_CONSTANT(T, 113, 44.0424772805245202514468199602123565), BOOST_MATH_BIG_CONSTANT(T, 113, 8.89898032477904072082994913461386099), BOOST_MATH_BIG_CONSTANT(T, 113, -0.0296627336872039988632793863671456398), }; // Maximum Deviation Found: 1.079e-035 // Expected Error Term : -1.079e-035 // Maximum Relative Change in Control Points : 7.884e-003 static const T P_16_inf[] = { BOOST_MATH_BIG_CONSTANT(T, 113, 0.0), BOOST_MATH_BIG_CONSTANT(T, 113, 0.500000000000000000000000000000087317), BOOST_MATH_BIG_CONSTANT(T, 113, 0.345625669885456215194494735902663968), BOOST_MATH_BIG_CONSTANT(T, 113, 9.62895499360842232127552650044647769), BOOST_MATH_BIG_CONSTANT(T, 113, 3.5936085382439026269301003761320812), BOOST_MATH_BIG_CONSTANT(T, 113, 49.459599118438883265036646019410669), BOOST_MATH_BIG_CONSTANT(T, 113, 7.77519237321893917784735690560496607), BOOST_MATH_BIG_CONSTANT(T, 113, 74.4536074488178075948642351179304121), BOOST_MATH_BIG_CONSTANT(T, 113, 2.75209340397069050436806159297952699), BOOST_MATH_BIG_CONSTANT(T, 113, 23.9292359711471667884504840186561598), }; static const T Q_16_inf[] = { BOOST_MATH_BIG_CONSTANT(T, 113, 1.0), BOOST_MATH_BIG_CONSTANT(T, 113, 0.357918006437579097055656138920742037), BOOST_MATH_BIG_CONSTANT(T, 113, 19.1386039850709849435325005484512944), BOOST_MATH_BIG_CONSTANT(T, 113, 0.874349081464143606016221431763364517), BOOST_MATH_BIG_CONSTANT(T, 113, 98.6516097434855572678195488061432509), BOOST_MATH_BIG_CONSTANT(T, 113, -16.1051972833382893468655223662534306), BOOST_MATH_BIG_CONSTANT(T, 113, 154.316860216253720989145047141653727), BOOST_MATH_BIG_CONSTANT(T, 113, -40.2026880424378986053105969312264534), BOOST_MATH_BIG_CONSTANT(T, 113, 60.1679136674264778074736441126810223), BOOST_MATH_BIG_CONSTANT(T, 113, -13.3414844622256422644504472438320114), BOOST_MATH_BIG_CONSTANT(T, 113, 2.53795636200649908779512969030363442), }; if(x <= 2) { return (2 + boost::math::tools::evaluate_polynomial(P_1_2, x) / tools::evaluate_polynomial(Q_1_2, x)) / (x * x); } else if(x <= 4) { return (y_offset_2_4 + boost::math::tools::evaluate_polynomial(P_2_4, x) / tools::evaluate_polynomial(Q_2_4, x)) / (x * x); } else if(x <= 8) { T y = 1 / x; return (1 + tools::evaluate_polynomial(P_4_8, y) / tools::evaluate_polynomial(Q_4_8, y)) / x; } else if(x <= 16) { T y = 1 / x; return (1 + tools::evaluate_polynomial(P_8_16, y) / tools::evaluate_polynomial(Q_8_16, y)) / x; } T y = 1 / x; return (1 + tools::evaluate_polynomial(P_16_inf, y) / tools::evaluate_polynomial(Q_16_inf, y)) / x; } template T trigamma_imp(T x, const Tag* t, const Policy& pol) { // // This handles reflection of negative arguments, and all our // error handling, then forwards to the T-specific approximation. // BOOST_MATH_STD_USING // ADL of std functions. T result = 0; // // Check for negative arguments and use reflection: // if(x <= 0) { // Reflect: T z = 1 - x; // Argument reduction for tan: if(floor(x) == x) { return policies::raise_pole_error("boost::math::trigamma<%1%>(%1%)", 0, (1-x), pol); } T s = fabs(x) < fabs(z) ? boost::math::sin_pi(x, pol) : boost::math::sin_pi(z, pol); return -trigamma_imp(z, t, pol) + boost::math::pow<2>(constants::pi()) / (s * s); } if(x < 1) { result = 1 / (x * x); x += 1; } return result + trigamma_prec(x, t, pol); } template T trigamma_imp(T x, const mpl::int_<0>*, const Policy& pol) { return polygamma_imp(1, x, pol); } // // Initializer: ensure all our constants are initialized prior to the first call of main: // template struct trigamma_initializer { struct init { init() { typedef typename policies::precision::type precision_type; do_init(mpl::bool_()); } void do_init(const mpl::true_&) { boost::math::trigamma(T(2.5), Policy()); } void do_init(const mpl::false_&){} void force_instantiate()const{} }; static const init initializer; static void force_instantiate() { initializer.force_instantiate(); } }; template const typename trigamma_initializer::init trigamma_initializer::initializer; } // namespace detail template inline typename tools::promote_args::type trigamma(T x, const Policy&) { typedef typename tools::promote_args::type result_type; typedef typename policies::evaluation::type value_type; typedef typename policies::precision::type precision_type; typedef typename mpl::if_< mpl::or_< mpl::less_equal >, mpl::greater > >, mpl::int_<0>, typename mpl::if_< mpl::less >, mpl::int_<53>, typename mpl::if_< mpl::less >, mpl::int_<64>, mpl::int_<113> >::type >::type >::type tag_type; typedef typename policies::normalise< Policy, policies::promote_float, policies::promote_double, policies::discrete_quantile<>, policies::assert_undefined<> >::type forwarding_policy; // Force initialization of constants: detail::trigamma_initializer::force_instantiate(); return policies::checked_narrowing_cast(detail::trigamma_imp( static_cast(x), static_cast(0), forwarding_policy()), "boost::math::trigamma<%1%>(%1%)"); } template inline typename tools::promote_args::type trigamma(T x) { return trigamma(x, policies::policy<>()); } } // namespace math } // namespace boost #endif mlpack-2.2.5/src/mlpack/core/boost_backport/unordered_collections_load_imp.hpp000066400000000000000000000052151315013601400277270ustar00rootroot00000000000000// Distributed under the Boost Software License, Version 1.0. // (See accompanying file LICENSE.txt or copy at // http://www.boost.org/LICENSE_1_0.txt) #ifndef BOOST_SERIALIZATION_UNORDERED_COLLECTIONS_LOAD_IMP_HPP #define BOOST_SERIALIZATION_UNORDERED_COLLECTIONS_LOAD_IMP_HPP // MS compatible compilers support #pragma once #if defined(_MSC_VER) && (_MSC_VER >= 1020) # pragma once # pragma warning (disable : 4786) // too long name, harmless warning #endif /////////1/////////2/////////3/////////4/////////5/////////6/////////7/////////8 // unordered_collections_load_imp.hpp: serialization for loading stl collections // (C) Copyright 2002 Robert Ramey - http://www.rrsd.com . // (C) Copyright 2014 Jim Bell // Use, modification and distribution is subject to the Boost Software // License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at // http://www.boost.org/LICENSE_1_0.txt) // See http://www.boost.org for updates, documentation, and revision history. // helper function templates for serialization of collections #include #include // size_t #include // msvc 6.0 needs this for warning suppression #if defined(BOOST_NO_STDC_NAMESPACE) namespace std{ using ::size_t; } // namespace std #endif #include #include #include #include #include #include #include namespace boost{ namespace serialization { namespace stl { ////////////////////////////////////////////////////////////////////// // implementation of serialization for STL containers // template inline void load_unordered_collection(Archive & ar, Container &s) { s.clear(); collection_size_type count; collection_size_type bucket_count; boost::serialization::item_version_type item_version(0); boost::archive::library_version_type library_version( ar.get_library_version() ); // retrieve number of elements ar >> BOOST_SERIALIZATION_NVP(count); ar >> BOOST_SERIALIZATION_NVP(bucket_count); if(boost::archive::library_version_type(3) < library_version){ ar >> BOOST_SERIALIZATION_NVP(item_version); } s.rehash(bucket_count); InputFunction ifunc; while(count-- > 0){ ifunc(ar, s, item_version); } } } // namespace stl } // namespace serialization } // namespace boost #endif //BOOST_SERIALIZATION_UNORDERED_COLLECTIONS_LOAD_IMP_HPP mlpack-2.2.5/src/mlpack/core/boost_backport/unordered_collections_save_imp.hpp000066400000000000000000000061111315013601400277420ustar00rootroot00000000000000// Distributed under the Boost Software License, Version 1.0. // (See accompanying file LICENSE.txt or copy at // http://www.boost.org/LICENSE_1_0.txt) #ifndef BOOST_SERIALIZATION_UNORDERED_COLLECTIONS_SAVE_IMP_HPP #define BOOST_SERIALIZATION_UNORDERED_COLLECTIONS_SAVE_IMP_HPP // MS compatible compilers support #pragma once #if defined(_MSC_VER) && (_MSC_VER >= 1020) # pragma once #endif /////////1/////////2/////////3/////////4/////////5/////////6/////////7/////////8 // hash_collections_save_imp.hpp: serialization for stl collections // (C) Copyright 2002 Robert Ramey - http://www.rrsd.com . // (C) Copyright 2014 Jim Bell // Use, modification and distribution is subject to the Boost Software // License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at // http://www.boost.org/LICENSE_1_0.txt) // See http://www.boost.org for updates, documentation, and revision history. // helper function templates for serialization of collections #include #include #include #include #include #include namespace boost{ namespace serialization { namespace stl { ////////////////////////////////////////////////////////////////////// // implementation of serialization for STL containers // template inline void save_unordered_collection(Archive & ar, const Container &s) { collection_size_type count(s.size()); const collection_size_type bucket_count(s.bucket_count()); const item_version_type item_version( version::value ); #if 0 /* should only be necessary to create archives of previous versions * which is not currently supported. So for now comment this out */ boost::archive::library_version_type library_version( ar.get_library_version() ); // retrieve number of elements ar << BOOST_SERIALIZATION_NVP(count); ar << BOOST_SERIALIZATION_NVP(bucket_count); if(boost::archive::library_version_type(3) < library_version){ // record number of elements // make sure the target type is registered so we can retrieve // the version when we load ar << BOOST_SERIALIZATION_NVP(item_version); } #else ar << BOOST_SERIALIZATION_NVP(count); ar << BOOST_SERIALIZATION_NVP(bucket_count); ar << BOOST_SERIALIZATION_NVP(item_version); #endif typename Container::const_iterator it = s.begin(); while(count-- > 0){ // note borland emits a no-op without the explicit namespace boost::serialization::save_construct_data_adl( ar, &(*it), boost::serialization::version< typename Container::value_type >::value ); ar << boost::serialization::make_nvp("item", *it++); } } } // namespace stl } // namespace serialization } // namespace boost #endif //BOOST_SERIALIZATION_UNORDERED_COLLECTIONS_SAVE_IMP_HPP mlpack-2.2.5/src/mlpack/core/boost_backport/unordered_map.hpp000066400000000000000000000137261315013601400243300ustar00rootroot00000000000000// Distributed under the Boost Software License, Version 1.0. // (See accompanying file LICENSE.txt or copy at // http://www.boost.org/LICENSE_1_0.txt) #ifndef BOOST_SERIALIZATION_UNORDERED_MAP_HPP #define BOOST_SERIALIZATION_UNORDERED_MAP_HPP // MS compatible compilers support #pragma once #if defined(_MSC_VER) && (_MSC_VER >= 1020) # pragma once #endif /////////1/////////2/////////3/////////4/////////5/////////6/////////7/////////8 // serialization/unordered_map.hpp: // serialization for stl unordered_map templates // (C) Copyright 2002 Robert Ramey - http://www.rrsd.com . // (C) Copyright 2014 Jim Bell // Use, modification and distribution is subject to the Boost Software // License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at // http://www.boost.org/LICENSE_1_0.txt) // See http://www.boost.org for updates, documentation, and revision history. #include #include #include #include "unordered_collections_save_imp.hpp" #include "unordered_collections_load_imp.hpp" #include namespace boost { namespace serialization { namespace stl { // map input template struct archive_input_unordered_map { inline void operator()( Archive &ar, Container &s, const unsigned int v ){ typedef typename Container::value_type type; detail::stack_construct t(ar, v); // borland fails silently w/o full namespace ar >> boost::serialization::make_nvp("item", t.reference()); std::pair result = s.insert(t.reference()); // note: the following presumes that the map::value_type was NOT tracked // in the archive. This is the usual case, but here there is no way // to determine that. if(result.second){ ar.reset_object_address( & (result.first->second), & t.reference().second ); } } }; // multimap input template struct archive_input_unordered_multimap { inline void operator()( Archive &ar, Container &s, const unsigned int v ){ typedef typename Container::value_type type; detail::stack_construct t(ar, v); // borland fails silently w/o full namespace ar >> boost::serialization::make_nvp("item", t.reference()); typename Container::const_iterator result = s.insert(t.reference()); // note: the following presumes that the map::value_type was NOT tracked // in the archive. This is the usual case, but here there is no way // to determine that. ar.reset_object_address( & result->second, & t.reference() ); } }; } // stl template< class Archive, class Key, class HashFcn, class EqualKey, class Allocator > inline void save( Archive & ar, const std::unordered_map< Key, HashFcn, EqualKey, Allocator > &t, const unsigned int /*file_version*/ ){ boost::serialization::stl::save_unordered_collection< Archive, std::unordered_map< Key, HashFcn, EqualKey, Allocator > >(ar, t); } template< class Archive, class Key, class HashFcn, class EqualKey, class Allocator > inline void load( Archive & ar, std::unordered_map< Key, HashFcn, EqualKey, Allocator > &t, const unsigned int /*file_version*/ ){ boost::serialization::stl::load_unordered_collection< Archive, std::unordered_map< Key, HashFcn, EqualKey, Allocator >, boost::serialization::stl::archive_input_unordered_map< Archive, std::unordered_map< Key, HashFcn, EqualKey, Allocator > > >(ar, t); } // split non-intrusive serialization function member into separate // non intrusive save/load member functions template< class Archive, class Key, class HashFcn, class EqualKey, class Allocator > inline void serialize( Archive & ar, std::unordered_map< Key, HashFcn, EqualKey, Allocator > &t, const unsigned int file_version ){ boost::serialization::split_free(ar, t, file_version); } // unordered_multimap template< class Archive, class Key, class HashFcn, class EqualKey, class Allocator > inline void save( Archive & ar, const std::unordered_multimap< Key, HashFcn, EqualKey, Allocator > &t, const unsigned int /*file_version*/ ){ boost::serialization::stl::save_unordered_collection< Archive, std::unordered_multimap< Key, HashFcn, EqualKey, Allocator > >(ar, t); } template< class Archive, class Key, class HashFcn, class EqualKey, class Allocator > inline void load( Archive & ar, std::unordered_multimap< Key, HashFcn, EqualKey, Allocator > &t, const unsigned int /*file_version*/ ){ boost::serialization::stl::load_unordered_collection< Archive, std::unordered_multimap< Key, HashFcn, EqualKey, Allocator >, boost::serialization::stl::archive_input_unordered_multimap< Archive, std::unordered_multimap< Key, HashFcn, EqualKey, Allocator > > >(ar, t); } // split non-intrusive serialization function member into separate // non intrusive save/load member functions template< class Archive, class Key, class HashFcn, class EqualKey, class Allocator > inline void serialize( Archive & ar, std::unordered_multimap< Key, HashFcn, EqualKey, Allocator > &t, const unsigned int file_version ){ boost::serialization::split_free(ar, t, file_version); } } // namespace serialization } // namespace boost #endif // BOOST_SERIALIZATION_UNORDERED_MAP_HPP mlpack-2.2.5/src/mlpack/core/data/000077500000000000000000000000001315013601400166605ustar00rootroot00000000000000mlpack-2.2.5/src/mlpack/core/data/CMakeLists.txt000066400000000000000000000014101315013601400214140ustar00rootroot00000000000000# Define the files that we need to compile. # Anything not in this list will not be compiled into mlpack. set(SOURCES dataset_mapper.hpp dataset_mapper_impl.hpp extension.hpp format.hpp load_csv.hpp load_csv.cpp load.hpp load_model_impl.hpp load_impl.hpp load.cpp load_arff.hpp load_arff_impl.hpp normalize_labels.hpp normalize_labels_impl.hpp save.hpp save_impl.hpp serialization_shim.hpp split_data.hpp imputer.hpp binarize.hpp ) # add directory name to sources set(DIR_SRCS) foreach(file ${SOURCES}) set(DIR_SRCS ${DIR_SRCS} ${CMAKE_CURRENT_SOURCE_DIR}/${file}) endforeach() # Append sources (with directory name) to list of all mlpack sources (used at # parent scope). set(MLPACK_SRCS ${MLPACK_SRCS} ${DIR_SRCS} PARENT_SCOPE) mlpack-2.2.5/src/mlpack/core/data/binarize.hpp000066400000000000000000000056471315013601400212100ustar00rootroot00000000000000/** * @file binarize.hpp * @author Keon Kim * * Defines Binarize(), a utility function, sets values to 0 or 1 * to a given threshold. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_DATA_BINARIZE_HPP #define MLPACK_CORE_DATA_BINARIZE_HPP #include namespace mlpack { namespace data { /** * Given an input dataset and threshold, set values greater than threshold to * 1 and values less than or equal to the threshold to 0. This overload applies * the changes to all dimensions. * * @code * arma::Mat input = loadData(); * arma::Mat output; * double threshold = 0.5; * * // Binarize the whole Matrix. All positive values in will be set to 1 and * // the values less than or equal to 0.5 will become 0. * Binarize(input, output, threshold); * @endcode * * @param input Input matrix to Binarize. * @param output Matrix you want to save binarized data into. * @param threshold Threshold can by any number. */ template void Binarize(const arma::Mat& input, arma::Mat& output, const double threshold) { output.copy_size(input); const int totalElems = static_cast(input.n_elem); const T *inPtr = input.memptr(); T *outPtr = output.memptr(); #pragma omp parallel for for (int i = 0; i < totalElems; ++i) { if (inPtr[i] > threshold) outPtr[i] = 1; else outPtr[i] = 0; } } /** * Given an input dataset and threshold, set values greater than threshold to * 1 and values less than or equal to the threshold to 0. This overload takes * a dimension and applys the changes to the given dimension. * * @code * arma::Mat input = loadData(); * arma::Mat output; * double threshold = 0.5; * size_t dimension = 0; * * // Binarize the first dimension. All positive values in the first dimension * // will be set to 1 and the values less than or equal to 0 will become 0. * Binarize(input, output, threshold, dimension); * @endcode * * @param input Input matrix to Binarize. * @param output Matrix you want to save binarized data into. * @param threshold Threshold can by any number. * @param dimension Feature to apply the Binarize function. */ template void Binarize(const arma::Mat& input, arma::Mat& output, const double threshold, const size_t dimension) { output = input; const int totalCols = static_cast(input.n_cols); #pragma omp parallel for for (int i = 0; i < totalCols; ++i) { if (input(dimension, i) > threshold) output(dimension, i) = 1; else output(dimension, i) = 0; } } } // namespace data } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/core/data/dataset_mapper.hpp000066400000000000000000000144701315013601400223700ustar00rootroot00000000000000/** * @file dataset_mapper.hpp * @author Ryan Curtin * @author Keon Kim * * Defines the DatasetMapper class, which holds information about a dataset. * This is useful when the dataset contains categorical non-numeric features * that needs to be mapped to categorical numeric features. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_DATA_DATASET_INFO_HPP #define MLPACK_CORE_DATA_DATASET_INFO_HPP #include #include #include #include "map_policies/increment_policy.hpp" namespace mlpack { namespace data { /** * Auxiliary information for a dataset, including mappings to/from strings and * the datatype of each dimension. DatasetMapper objects are optionally * produced by data::Load(), and store the type of each dimension * (Datatype::numeric or Datatype::categorical) as well as mappings from strings * to unsigned integers and vice versa. * * @tparam PolicyType Mapping policy used to specify MapString(); */ template class DatasetMapper { public: /** * Create the DatasetMapper object with the given dimensionality. Note that * the dimensionality cannot be changed later; you will have to create a new * DatasetMapper object. */ explicit DatasetMapper(const size_t dimensionality = 0); /** * Create the DatasetMapper object with the given policy and dimensionality. * Note that the dimensionality cannot be changed later; you will have to * create a new DatasetMapper object. Policy can be modified by the modifier. */ explicit DatasetMapper(PolicyType& policy, const size_t dimensionality = 0); /** * Preprocessing: during a first pass of the data, pass the strings on to the * MapPolicy if they are needed. * * @param string String to map. * @param dimension Dimension to map for. */ template void MapFirstPass(const std::string& string, const size_t dimension); /** * Given the string and the dimension to which it belongs, return its numeric * mapping. If no mapping yet exists, the string is added to the list of * mappings for the given dimension. The dimension parameter refers to the * index of the dimension of the string (i.e. the row in the dataset). * * @tparam T Numeric type to map to (int/double/float/etc.). * @param string String to find/create mapping for. * @param dimension Index of the dimension of the string. */ template T MapString(const std::string& string, const size_t dimension); /** * Return the string that corresponds to a given value in a given dimension. * If the string is not a valid mapping in the given dimension, a * std::invalid_argument is thrown. * * @param value Mapped value for string. * @param dimension Dimension to unmap string from. */ const std::string& UnmapString(const size_t value, const size_t dimension); /** * Return the value that corresponds to a given string in a given dimension. * If the value is not a valid mapping in the given dimension, a * std::invalid_argument is thrown. * * @param string Mapped string for value. * @param dimension Dimension to unmap string from. */ typename PolicyType::MappedType UnmapValue(const std::string& string, const size_t dimension); /** * MapTokens turns vector of strings into numeric variables and puts them * into a given matrix. It is uses mapping policy to store categorical values * to maps. How it determines whether a value is categorical and how it * stores the categorical value into map and replaces with the numerical value * all depends on the mapping policy object's MapTokens() funciton. * * @tparam eT Type of armadillo matrix. * @param tokens Vector of variables inside a dimension. * @param row Position of the given tokens. * @param matrix Matrix to save the data into. */ template void MapTokens(const std::vector& tokens, size_t& row, arma::Mat& matrix); //! Return the type of a given dimension (numeric or categorical). Datatype Type(const size_t dimension) const; //! Modify the type of a given dimension (be careful!). Datatype& Type(const size_t dimension); /** * Get the number of mappings for a particular dimension. If the dimension * is numeric, then this will return 0. */ size_t NumMappings(const size_t dimension) const; /** * Get the dimensionality of the DatasetMapper object (that is, how many * dimensions it has information for). If this object was created by a call * to mlpack::data::Load(), then the dimensionality will be the same as the * number of rows (dimensions) in the dataset. */ size_t Dimensionality() const; /** * Serialize the dataset information. */ template void Serialize(Archive& ar, const unsigned int /* version */) { ar & data::CreateNVP(types, "types"); ar & data::CreateNVP(maps, "maps"); } //! Return the policy of the mapper. const PolicyType& Policy() const; //! Modify the policy of the mapper (be careful!). PolicyType& Policy(); //! Modify (Replace) the policy of the mapper with a new policy. void Policy(PolicyType&& policy); private: //! Types of each dimension. std::vector types; // BiMapType definition using BiMapType = boost::bimap; // Mappings from strings to integers. // Map entries will only exist for dimensions that are categorical. // MapType = map, numMappings>> using MapType = std::unordered_map>; //! maps object stores string and numerical pairs. MapType maps; //! policy object tells dataset mapper how the categorical values should be // mapped to the maps object. It is used in MapString() and MapTokens(). PolicyType policy; }; // Use typedef to provide backward compatibility using DatasetInfo = DatasetMapper; } // namespace data } // namespace mlpack #include "dataset_mapper_impl.hpp" #endif mlpack-2.2.5/src/mlpack/core/data/dataset_mapper_impl.hpp000066400000000000000000000124641315013601400234120ustar00rootroot00000000000000/** * @file dataset_mapper_impl.hpp * @author Ryan Curtin * @author Keon Kim * * An implementation of the DatasetMapper class. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_DATA_DATASET_INFO_IMPL_HPP #define MLPACK_CORE_DATA_DATASET_INFO_IMPL_HPP // In case it hasn't already been included. #include "dataset_mapper.hpp" namespace mlpack { namespace data { // Default constructor. template inline DatasetMapper::DatasetMapper(const size_t dimensionality) : types(dimensionality, Datatype::numeric) { // Nothing to initialize here. } template inline DatasetMapper::DatasetMapper(PolicyType& policy, const size_t dimensionality) : types(dimensionality, Datatype::numeric), policy(std::move(policy)) { // Nothing to initialize here. } // Utility helper function to call MapFirstPass. template void CallMapFirstPass( PolicyType& policy, const std::string& string, const size_t dimension, std::vector& types, const typename std::enable_if::type* = 0) { policy.template MapFirstPass(string, dimension, types); } // Utility helper function that doesn't call anything. template void CallMapFirstPass( PolicyType& /* policy */, const std::string& /* string */, const size_t /* dimension */, std::vector& /* types */, const typename std::enable_if::type* = 0) { // Nothing to do here. } template template void DatasetMapper::MapFirstPass(const std::string& string, const size_t dimension) { // Call the correct overload (via SFINAE). CallMapFirstPass(policy, string, dimension, types); } // When we want to insert value into the map, we use the policy to map the // string. template template inline T DatasetMapper::MapString(const std::string& string, const size_t dimension) { return policy.template MapString(string, dimension, maps, types); } // Return the string corresponding to a value in a given dimension. template inline const std::string& DatasetMapper::UnmapString( const size_t value, const size_t dimension) { // Throw an exception if the value doesn't exist. if (maps[dimension].first.right.count(value) == 0) { std::ostringstream oss; oss << "DatasetMapper::UnmapString(): value '" << value << "' unknown for dimension " << dimension; throw std::invalid_argument(oss.str()); } return maps[dimension].first.right.at(value); } // Return the value corresponding to a string in a given dimension. template inline typename PolicyType::MappedType DatasetMapper::UnmapValue( const std::string& string, const size_t dimension) { // Throw an exception if the value doesn't exist. if (maps[dimension].first.left.count(string) == 0) { std::ostringstream oss; oss << "DatasetMapper::UnmapValue(): string '" << string << "' unknown for dimension " << dimension; throw std::invalid_argument(oss.str()); } return maps[dimension].first.left.at(string); } template template inline void DatasetMapper::MapTokens( const std::vector& tokens, size_t& row, arma::Mat& matrix) { return policy.template MapTokens(tokens, row, matrix, maps, types); } // Get the type of a particular dimension. template inline Datatype DatasetMapper::Type(const size_t dimension) const { if (dimension >= types.size()) { std::ostringstream oss; oss << "requested type of dimension " << dimension << ", but dataset only " << "has " << types.size() << " dimensions"; throw std::invalid_argument(oss.str()); } return types[dimension]; } template inline Datatype& DatasetMapper::Type(const size_t dimension) { if (dimension >= types.size()) types.resize(dimension + 1, Datatype::numeric); return types[dimension]; } template inline size_t DatasetMapper::NumMappings(const size_t dimension) const { return (maps.count(dimension) == 0) ? 0 : maps.at(dimension).second; } template inline size_t DatasetMapper::Dimensionality() const { return types.size(); } template inline const PolicyType& DatasetMapper::Policy() const { return this->policy; } template inline PolicyType& DatasetMapper::Policy() { return this->policy; } template inline void DatasetMapper::Policy(PolicyType&& policy) { this->policy = std::forward(policy); } } // namespace data } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/core/data/extension.hpp000066400000000000000000000017161315013601400214120ustar00rootroot00000000000000/** * @file extension.hpp * @author Ryan Curtin * * Given a filename, extract its extension. This is used by data::Load() and * data::Save(). * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_DATA_EXTENSION_HPP #define MLPACK_CORE_DATA_EXTENSION_HPP #include namespace mlpack { namespace data { inline std::string Extension(const std::string& filename) { const size_t ext = filename.rfind('.'); std::string extension; if (ext == std::string::npos) return extension; extension = filename.substr(ext + 1); std::transform(extension.begin(), extension.end(), extension.begin(), ::tolower); return extension; } } // namespace data } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/core/data/format.hpp000066400000000000000000000013401315013601400206570ustar00rootroot00000000000000/** * @file formats.hpp * @author Ryan Curtin * * Define the formats that can be used by mlpack's Load() and Save() mechanisms * via boost::serialization. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_DATA_FORMATS_HPP #define MLPACK_CORE_DATA_FORMATS_HPP namespace mlpack { namespace data { //! Define the formats we can read through boost::serialization. enum format { autodetect, text, xml, binary }; } // namespace data } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/core/data/imputation_methods/000077500000000000000000000000001315013601400225745ustar00rootroot00000000000000mlpack-2.2.5/src/mlpack/core/data/imputation_methods/CMakeLists.txt000066400000000000000000000010021315013601400253250ustar00rootroot00000000000000# Define the files we need to compile # Anything not in this list will not be compiled into mlpack. set(SOURCES custom_imputation.hpp listwise_deletion.hpp mean_imputation.hpp median_imputation.hpp ) # Add directory name to sources. set(DIR_SRCS) foreach(file ${SOURCES}) set(DIR_SRCS ${DIR_SRCS} ${CMAKE_CURRENT_SOURCE_DIR}/${file}) endforeach() # Append sources (with directory name) to list of all mlpack sources (used at # the parent scope). set(MLPACK_SRCS ${MLPACK_SRCS} ${DIR_SRCS} PARENT_SCOPE) mlpack-2.2.5/src/mlpack/core/data/imputation_methods/custom_imputation.hpp000066400000000000000000000044611315013601400270750ustar00rootroot00000000000000/** * @file custom_imputation.hpp * @author Keon Kim * * Definition and Implementation of the empty CustomImputation class. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_DATA_IMPUTE_STRATEGIES_CUSTOM_IMPUTATION_HPP #define MLPACK_CORE_DATA_IMPUTE_STRATEGIES_CUSTOM_IMPUTATION_HPP #include namespace mlpack { namespace data { /** * A simple custom imputation class * @tparam T Type of armadillo matrix */ template class CustomImputation { public: CustomImputation(T customValue): customValue(std::move(customValue)) { // nothing to initialize here } /** * Impute function searches through the input looking for mappedValue and * replaces it with the user-defined custom value of the given dimension. * The result is overwritten to the input, not creating any copy. Custom value * must be set when initializing the CustomImputation object. * * @param input Matrix that contains mappedValue. * @param mappedValue Value that the user wants to get rid of. * @param dimension Index of the dimension of the mappedValue. * @param columnMajor State of whether the input matrix is columnMajor or not. */ void Impute(arma::Mat& input, const T& mappedValue, const size_t dimension, const bool columnMajor = true) { // replace the target value to custom value if (columnMajor) { for (size_t i = 0; i < input.n_cols; ++i) { if (input(dimension, i) == mappedValue || std::isnan(input(dimension, i))) { input(dimension, i) = customValue; } } } else { for (size_t i = 0; i < input.n_rows; ++i) { if (input(i, dimension) == mappedValue || std::isnan(input(i, dimension))) { input(i, dimension) = customValue; } } } } private: //! A user-defined value that the user wants to replace missing values with. T customValue; }; // class CustomImputation } // namespace data } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/core/data/imputation_methods/listwise_deletion.hpp000066400000000000000000000041761315013601400270430ustar00rootroot00000000000000/** * @file listwise_deletion.hpp * @author Keon Kim * * Definition and Implementation of the empty ListwiseDeletion class. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_DATA_IMPUTE_STRATEGIES_LISTWISE_DELETION_HPP #define MLPACK_CORE_DATA_IMPUTE_STRATEGIES_LISTWISE_DELETION_HPP #include namespace mlpack { namespace data { /** * A complete-case analysis to remove the values containing mappedValue. * Removes all data for a case that has one or more missing values. * @tparam T Type of armadillo matrix */ template class ListwiseDeletion { public: /** * Impute function searches through the input looking for mappedValue and * remove the whole row or column. The result is overwritten to the input. * * @param input Matrix that contains mappedValue. * @param mappedValue Value that the user wants to get rid of. * @param dimension Index of the dimension of the mappedValue. * @param columnMajor State of whether the input matrix is columnMajor or not. */ void Impute(arma::Mat& input, const T& mappedValue, const size_t dimension, const bool columnMajor = true) { std::vector colsToKeep; if (columnMajor) { for (size_t i = 0; i < input.n_cols; ++i) { if (!(input(dimension, i) == mappedValue || std::isnan(input(dimension, i)))) { colsToKeep.push_back(i); } } input = input.cols(arma::uvec(colsToKeep)); } else { for (size_t i = 0; i < input.n_rows; ++i) { if (!(input(i, dimension) == mappedValue || std::isnan(input(i, dimension)))) { colsToKeep.push_back(i); } } input = input.rows(arma::uvec(colsToKeep)); } } }; // class ListwiseDeletion } // namespace data } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/core/data/imputation_methods/mean_imputation.hpp000066400000000000000000000056151315013601400265050ustar00rootroot00000000000000/** * @file mean_imputation.hpp * @author Keon Kim * * Definition and Implementation of the MeanImputation class. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_DATA_IMPUTE_STRATEGIES_MEAN_IMPUTATION_HPP #define MLPACK_CORE_DATA_IMPUTE_STRATEGIES_MEAN_IMPUTATION_HPP #include namespace mlpack { namespace data { /** * A simple mean imputation class * @tparam T Type of armadillo matrix */ template class MeanImputation { public: /** * Impute function searches through the input looking for mappedValue and * replaces it with the mean of the given dimension. The result is overwritten * to the input matrix. * * @param input Matrix that contains mappedValue. * @param mappedValue Value that the user wants to get rid of. * @param dimension Index of the dimension of the mappedValue. * @param columnMajor State of whether the input matrix is columnMajor or not. */ void Impute(arma::Mat& input, const T& mappedValue, const size_t dimension, const bool columnMajor = true) { double sum = 0; size_t elems = 0; // excluding nan or missing target using PairType = std::pair; // dimensions and indexes are saved as pairs inside this vector. std::vector targets; // calculate number of elements and sum of them excluding mapped value or // nan. while doing that, remember where mappedValue or NaN exists. if (columnMajor) { for (size_t i = 0; i < input.n_cols; ++i) { if (input(dimension, i) == mappedValue || std::isnan(input(dimension, i))) { targets.emplace_back(dimension, i); } else { elems++; sum += input(dimension, i); } } } else { for (size_t i = 0; i < input.n_rows; ++i) { if (input(i, dimension) == mappedValue || std::isnan(input(i, dimension))) { targets.emplace_back(i, dimension); } else { elems++; sum += input(i, dimension); } } } if (elems == 0) Log::Fatal << "it is impossible to calculate mean; no valid elements in " << "the dimension" << std::endl; // calculate mean; const double mean = sum / elems; // Now replace the calculated mean to the missing variables // It only needs to loop through targets vector, not the whole matrix. for (const PairType& target : targets) { input(target.first, target.second) = mean; } } }; // class MeanImputation } // namespace data } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/core/data/imputation_methods/median_imputation.hpp000066400000000000000000000051521315013601400270160ustar00rootroot00000000000000/** * @file median_imputation.hpp * @author Keon Kim * * Definition and Implementation of the MedianImputation class. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_DATA_IMPUTE_STRATEGIES_MEDIAN_IMPUTATION_HPP #define MLPACK_CORE_DATA_IMPUTE_STRATEGIES_MEDIAN_IMPUTATION_HPP #include namespace mlpack { namespace data { /** * This is a class implementation of simple median imputation. * replace missing value with middle or average of middle values * @tparam T Type of armadillo matrix */ template class MedianImputation { public: /** * Impute function searches through the input looking for mappedValue and * replaces it with the median of the given dimension. The result is * overwritten to the input matrix. * * @param input Matrix that contains mappedValue. * @param mappedValue Value that the user wants to get rid of. * @param dimension Index of the dimension of the mappedValue. * @param columnMajor State of whether the input matrix is columnMajor or not. */ void Impute(arma::Mat& input, const T& mappedValue, const size_t dimension, const bool columnMajor = true) { using PairType = std::pair; // dimensions and indexes are saved as pairs inside this vector. std::vector targets; // good elements are kept inside this vector. std::vector elemsToKeep; if (columnMajor) { for (size_t i = 0; i < input.n_cols; ++i) { if (input(dimension, i) == mappedValue || std::isnan(input(dimension, i))) { targets.emplace_back(dimension, i); } else { elemsToKeep.push_back(input(dimension, i)); } } } else { for (size_t i = 0; i < input.n_rows; ++i) { if (input(i, dimension) == mappedValue || std::isnan(input(i, dimension))) { targets.emplace_back(i, dimension); } else { elemsToKeep.push_back(input(i, dimension)); } } } // calculate median const double median = arma::median(arma::vec(elemsToKeep)); for (const PairType& target : targets) { input(target.first, target.second) = median; } } }; // class MedianImputation } // namespace data } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/core/data/imputer.hpp000066400000000000000000000053751315013601400210700ustar00rootroot00000000000000/** * @file imputer.hpp * @author Keon Kim * * Defines Imputer class a utility function to replace missing variables in a * dataset. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_DATA_IMPUTER_HPP #define MLPACK_CORE_DATA_IMPUTER_HPP #include #include "dataset_mapper.hpp" #include "map_policies/missing_policy.hpp" #include "map_policies/increment_policy.hpp" namespace mlpack { namespace data { /** * Given a dataset of a particular datatype, replace user-specified missing * value with a variable dependent on the StrategyType and MapperType. * * @tparam T Type of armadillo matrix used for imputation strategy. * @tparam MapperType DatasetMapper that is used to hold dataset information. * @tparam StrategyType Imputation strategy used. */ template class Imputer { public: Imputer(MapperType mapper, bool columnMajor = true): mapper(std::move(mapper)), columnMajor(columnMajor) { // Nothing to initialize here. } Imputer(MapperType mapper, StrategyType strategy, bool columnMajor = true): strategy(std::move(strategy)), mapper(std::move(mapper)), columnMajor(columnMajor) { // Nothing to initialize here. } /** * Given an input dataset, replace missing values of a dimension with given * imputation strategy. This function does not produce output matrix, but * overwrites the result into the input matrix. * * @param input Input dataset to apply imputation. * @oaran missingValue User defined missing value; it can be anything. * @param dimension Dimension to apply the imputation. */ void Impute(arma::Mat& input, const std::string& missingValue, const size_t dimension) { T mappedValue = static_cast(mapper.UnmapValue(missingValue, dimension)); strategy.Impute(input, mappedValue, dimension, columnMajor); } //! Get the strategy const StrategyType& Strategy() const { return strategy; } //! Modify the given given strategy (be careful!) StrategyType& Strategy() { return strategy; } //! Get the mapper const MapperType& Mapper() const { return mapper; } //! Modify the given mapper (be careful!) MapperType& Mapper() { return mapper; } private: // StrategyType StrategyType strategy; // DatasetMapperType MapperType mapper; // save columnMajor as a member variable since it is rarely changed. bool columnMajor; }; // class Imputer } // namespace data } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/core/data/load.cpp000066400000000000000000000042511315013601400203050ustar00rootroot00000000000000/** * @file load.cpp * @author Tham Ngap Wei * * Explicit instantiations of load functions. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include "load.hpp" #include "load_impl.hpp" namespace mlpack { namespace data /** Functions to load and save matrices and models. */ { template bool Load(const std::string&, arma::Mat&, const bool, const bool); template bool Load(const std::string&, arma::Mat&, const bool, const bool); template bool Load(const std::string&, arma::Mat&, const bool, const bool); template bool Load(const std::string&, arma::Mat&, const bool, const bool); template bool Load(const std::string&, arma::Mat&, const bool, const bool); template bool Load(const std::string&, arma::Mat&, DatasetMapper&, const bool, const bool); template bool Load(const std::string&, arma::Mat&, DatasetMapper&, const bool, const bool); template bool Load(const std::string&, arma::Mat&, DatasetMapper&, const bool, const bool); template bool Load(const std::string&, arma::Mat&, DatasetMapper&, const bool, const bool); #ifndef _WIN32 template bool Load(const std::string&, arma::Mat&, DatasetMapper&, const bool, const bool); #endif }} mlpack-2.2.5/src/mlpack/core/data/load.hpp000066400000000000000000000244241315013601400203160ustar00rootroot00000000000000/** * @file load.hpp * @author Ryan Curtin * * Load an Armadillo matrix from file. This is necessary because Armadillo does * not transpose matrices on input, and it allows us to give better error * output. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_DATA_LOAD_HPP #define MLPACK_CORE_DATA_LOAD_HPP #include #include #include #include "format.hpp" #include "dataset_mapper.hpp" #include "load_csv.hpp" namespace mlpack { namespace data /** Functions to load and save matrices and models. */ { /** * Loads a matrix from file, guessing the filetype from the extension. This * will transpose the matrix at load time (unless the transpose parameter is set * to false). If the filetype cannot be determined, an error will be given. * * The supported types of files are the same as found in Armadillo: * * - CSV (csv_ascii), denoted by .csv, or optionally .txt * - TSV (raw_ascii), denoted by .tsv, .csv, or .txt * - ASCII (raw_ascii), denoted by .txt * - Armadillo ASCII (arma_ascii), also denoted by .txt * - PGM (pgm_binary), denoted by .pgm * - PPM (ppm_binary), denoted by .ppm * - Raw binary (raw_binary), denoted by .bin * - Armadillo binary (arma_binary), denoted by .bin * - HDF5, denoted by .hdf, .hdf5, .h5, or .he5 * * If the file extension is not one of those types, an error will be given. * This is preferable to Armadillo's default behavior of loading an unknown * filetype as raw_binary, which can have very confusing effects. * * If the parameter 'fatal' is set to true, a std::runtime_error exception will * be thrown if the matrix does not load successfully. The parameter * 'transpose' controls whether or not the matrix is transposed after loading. * In most cases, because data is generally stored in a row-major format and * mlpack requires column-major matrices, this should be left at its default * value of 'true'. * * @param filename Name of file to load. * @param matrix Matrix to load contents of file into. * @param fatal If an error should be reported as fatal (default false). * @param transpose If true, transpose the matrix after loading. * @return Boolean value indicating success or failure of load. */ template bool Load(const std::string& filename, arma::Mat& matrix, const bool fatal = false, const bool transpose = true); extern template bool Load(const std::string&, arma::Mat&, const bool, const bool); extern template bool Load(const std::string&, arma::Mat&, const bool, const bool); extern template bool Load(const std::string&, arma::Mat&, const bool, const bool); extern template bool Load(const std::string&, arma::Mat&, const bool, const bool); #ifndef _WIN32 extern template bool Load(const std::string&, arma::Mat&, const bool, const bool); #endif /** * Load a column vector from a file, guessing the filetype from the extension. * * The supported types of files are the same as found in Armadillo: * * - CSV (csv_ascii), denoted by .csv, or optionally .txt * - TSV (raw_ascii), denoted by .tsv, .csv, or .txt * - ASCII (raw_ascii), denoted by .txt * - Armadillo ASCII (arma_ascii), also denoted by .txt * - PGM (pgm_binary), denoted by .pgm * - PPM (ppm_binary), denoted by .ppm * - Raw binary (raw_binary), denoted by .bin * - Armadillo binary (arma_binary), denoted by .bin * - HDF5, denoted by .hdf, .hdf5, .h5, or .he5 * * If the file extension is not one of those types, an error will be given. * This is preferable to Armadillo's default behavior of loading an unknown * filetype as raw_binary, which can have very confusing effects. * * If the parameter 'fatal' is set to true, a std::runtime_error exception will * be thrown if the matrix does not load successfully. * * @param filename Name of file to load. * @param colvec Column vector to load contents of file into. * @param fatal If an error should be reported as fatal (default false). * @return Boolean value indicating success or failure of load. */ template bool Load(const std::string& filename, arma::Col& colvec, const bool fatal = false); /** * Load a row vector from a file, guessing the filetype from the extension. * * The supported types of files are the same as found in Armadillo: * * - CSV (csv_ascii), denoted by .csv, or optionally .txt * - TSV (raw_ascii), denoted by .tsv, .csv, or .txt * - ASCII (raw_ascii), denoted by .txt * - Armadillo ASCII (arma_ascii), also denoted by .txt * - PGM (pgm_binary), denoted by .pgm * - PPM (ppm_binary), denoted by .ppm * - Raw binary (raw_binary), denoted by .bin * - Armadillo binary (arma_binary), denoted by .bin * - HDF5, denoted by .hdf, .hdf5, .h5, or .he5 * * If the file extension is not one of those types, an error will be given. * This is preferable to Armadillo's default behavior of loading an unknown * filetype as raw_binary, which can have very confusing effects. * * If the parameter 'fatal' is set to true, a std::runtime_error exception will * be thrown if the matrix does not load successfully. * * @param filename Name of file to load. * @param colvec Column vector to load contents of file into. * @param fatal If an error should be reported as fatal (default false). * @return Boolean value indicating success or failure of load. */ template bool Load(const std::string& filename, arma::Row& colvec, const bool fatal = false); /** * Loads a matrix from a file, guessing the filetype from the extension and * mapping categorical features with a DatasetMapper object. This will * transpose the matrix (unless the transpose parameter is set to false). * This particular overload of Load() can only load text-based formats, such as * those given below: * * - CSV (csv_ascii), denoted by .csv, or optionally .txt * - TSV (raw_ascii), denoted by .tsv, .csv, or .txt * - ASCII (raw_ascii), denoted by .txt * * If the file extension is not one of those types, an error will be given. * This is preferable to Armadillo's default behavior of loading an unknown * filetype as raw_binary, which can have very confusing effects. * * If the parameter 'fatal' is set to true, a std::runtime_error exception will * be thrown if the matrix does not load successfully. The parameter * 'transpose' controls whether or not the matrix is transposed after loading. * In most cases, because data is generally stored in a row-major format and * mlpack requires column-major matrices, this should be left at its default * value of 'true'. * * The DatasetMapper object passed to this function will be re-created, so any * mappings from previous loads will be lost. * * @param filename Name of file to load. * @param matrix Matrix to load contents of file into. * @param info DatasetMapper object to populate with mappings and data types. * @param fatal If an error should be reported as fatal (default false). * @param transpose If true, transpose the matrix after loading. * @return Boolean value indicating success or failure of load. */ template bool Load(const std::string& filename, arma::Mat& matrix, DatasetMapper& info, const bool fatal = false, const bool transpose = true); extern template bool Load(const std::string&, arma::Mat&, DatasetMapper&, const bool, const bool); extern template bool Load(const std::string&, arma::Mat&, DatasetMapper&, const bool, const bool); extern template bool Load(const std::string&, arma::Mat&, DatasetMapper&, const bool, const bool); extern template bool Load(const std::string&, arma::Mat&, DatasetMapper&, const bool, const bool); extern template bool Load(const std::string&, arma::Mat&, DatasetMapper&, const bool, const bool); /** * Load a model from a file, guessing the filetype from the extension, or, * optionally, loading the specified format. If automatic extension detection * is used and the filetype cannot be determined, an error will be given. * * The supported types of files are the same as what is supported by the * boost::serialization library: * * - text, denoted by .txt * - xml, denoted by .xml * - binary, denoted by .bin * * The format parameter can take any of the values in the 'format' enum: * 'format::autodetect', 'format::text', 'format::xml', and 'format::binary'. * The autodetect functionality operates on the file extension (so, "file.txt" * would be autodetected as text). * * The name parameter should be specified to indicate the name of the structure * to be loaded. This should be the same as the name that was used to save the * structure (otherwise, the loading procedure will fail). * * If the parameter 'fatal' is set to true, then an exception will be thrown in * the event of load failure. Otherwise, the method will return false and the * relevant error information will be printed to Log::Warn. */ template bool Load(const std::string& filename, const std::string& name, T& t, const bool fatal = false, format f = format::autodetect); } // namespace data } // namespace mlpack // Include implementation of model-loading Load() overload. #include "load_model_impl.hpp" #endif mlpack-2.2.5/src/mlpack/core/data/load_arff.hpp000066400000000000000000000043411315013601400213100ustar00rootroot00000000000000/** * @file load_arff.hpp * @author Ryan Curtin * * Load an ARFF dataset. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_DATA_LOAD_ARFF_HPP #define MLPACK_CORE_DATA_LOAD_ARFF_HPP #include #include "dataset_mapper.hpp" #include namespace mlpack { namespace data { /** * A utility function to load an ARFF dataset as numeric features (that is, as * an Armadillo matrix without any modification). An exception will be thrown * if any features are non-numeric. */ template void LoadARFF(const std::string& filename, arma::Mat& matrix); /** * A utility function to load an ARFF dataset as numeric and categorical * features, using the DatasetInfo structure for mapping. An exception will be * thrown upon failure. * * A pre-existing DatasetInfo object can be passed in, but if the dimensionality * of the given DatasetInfo object (info.Dimensionality()) does not match the * dimensionality of the data, a std::invalid_argument exception will be thrown. * If an empty DatasetInfo object is given (constructed with the default * constructor or otherwise, so that info.Dimensionality() is 0), it will be set * to the right dimensionality. * * This ability to pass in pre-existing DatasetInfo objects is very necessary * when, e.g., loading a test set after training. If the same DatasetInfo from * loading the training set is not used, then the test set may be loaded with * different mappings---which can cause horrible problems! * * @param filename Name of ARFF file to load. * @param matrix Matrix to load data into. * @param info DatasetInfo object; can be default-constructed or pre-existing * from another call to LoadARFF(). */ template void LoadARFF(const std::string& filename, arma::Mat& matrix, DatasetMapper& info); } // namespace data } // namespace mlpack // Include implementation. #include "load_arff_impl.hpp" #endif mlpack-2.2.5/src/mlpack/core/data/load_arff_impl.hpp000066400000000000000000000153311315013601400223320ustar00rootroot00000000000000/** * @file load_arff_impl.hpp * @author Ryan Curtin * * Load an ARFF dataset. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_DATA_LOAD_ARFF_IMPL_HPP #define MLPACK_CORE_DATA_LOAD_ARFF_IMPL_HPP // In case it hasn't been included yet. #include "load_arff.hpp" #include namespace mlpack { namespace data { template void LoadARFF(const std::string& filename, arma::Mat& matrix, DatasetMapper& info) { // First, open the file. std::ifstream ifs; ifs.open(filename); std::string line; size_t dimensionality = 0; std::vector types; size_t headerLines = 0; while (!ifs.eof()) { // Read the next line, then strip whitespace from either side. std::getline(ifs, line, '\n'); boost::trim(line); ++headerLines; // Is the first character a comment, or is the line empty? if (line[0] == '%' || line.empty()) continue; // Ignore this line. // If the first character is @, we are looking at @relation, @attribute, or // @data. if (line[0] == '@') { typedef boost::tokenizer> Tokenizer; std::string separators = " \t\%"; // Split on comments too. boost::escaped_list_separator sep("\\", separators, "\"{"); Tokenizer tok(line, sep); Tokenizer::iterator it = tok.begin(); // Get the annotation we are looking at. std::string annotation(*it); std::transform(annotation.begin(), annotation.end(), annotation.begin(), ::tolower); if (annotation == "@relation") { // We don't actually have anything to do with the name of the dataset. continue; } else if (annotation == "@attribute") { ++dimensionality; // We need to mark this dimension with its according type. ++it; // Ignore the dimension name. std::string dimType = *(++it); std::transform(dimType.begin(), dimType.end(), dimType.begin(), ::tolower); if (dimType == "numeric" || dimType == "integer" || dimType == "real") { types.push_back(false); // The feature is numeric. } else if (dimType == "string") { types.push_back(true); // The feature is categorical. } else if (dimType[0] == '{') { throw std::logic_error("list of ARFF values not yet supported"); } } else if (annotation == "@data") { // We are in the data section. So we can move out of this loop. break; } else { throw std::runtime_error("unknown ARFF annotation '" + (*tok.begin()) + "'"); } } } if (ifs.eof()) throw std::runtime_error("no @data section found"); // Reset the DatasetInfo object, if needed. if (info.Dimensionality() == 0) { info = DatasetMapper(dimensionality); } else if (info.Dimensionality() != dimensionality) { std::ostringstream oss; oss << "data::LoadARFF(): given DatasetInfo has dimensionality " << info.Dimensionality() << ", but data has dimensionality " << dimensionality; throw std::invalid_argument(oss.str()); } for (size_t i = 0; i < types.size(); ++i) { if (types[i]) info.Type(i) = Datatype::categorical; else info.Type(i) = Datatype::numeric; } // We need to find out how many lines of data are in the file. std::streampos pos = ifs.tellg(); size_t row = 0; while (!ifs.eof()) { std::getline(ifs, line, '\n'); ++row; } // Uncount the EOF row. --row; // Since we've hit the EOF, we have to call clear() so we can seek again. ifs.clear(); ifs.seekg(pos); // Now, set the size of the matrix. matrix.set_size(dimensionality, row); // Now we are looking at the @data section. row = 0; while (!ifs.eof()) { std::getline(ifs, line, '\n'); boost::trim(line); // Each line of the @data section must be a CSV (except sparse data, which // we will handle later). So now we can tokenize the // CSV and parse it. The '?' representing a missing value is not allowed, // so if that occurs we throw an exception. We also throw an exception if // any piece of data does not match its type (categorical or numeric). // If the first character is {, it is sparse data, and we can just say this // is not handled for now... if (line[0] == '{') throw std::runtime_error("cannot yet parse sparse ARFF data"); // Tokenize the line. typedef boost::tokenizer> Tokenizer; boost::escaped_list_separator sep("\\", ",", "\""); Tokenizer tok(line, sep); size_t col = 0; std::stringstream token; for (Tokenizer::iterator it = tok.begin(); it != tok.end(); ++it) { // Check that we are not too many columns in. if (col >= matrix.n_rows) { std::stringstream error; error << "Too many columns in line " << (headerLines + row) << "."; throw std::runtime_error(error.str()); } // What should this token be? if (info.Type(col) == Datatype::categorical) { // Strip spaces before mapping. std::string token = *it; boost::trim(token); matrix(col, row) = info.template MapString(token, col); // We load transposed. } else if (info.Type(col) == Datatype::numeric) { // Attempt to read as numeric. token.clear(); token.str(*it); eT val = eT(0); token >> val; if (token.fail()) { // Check for NaN or inf. if (!arma::diskio::convert_naninf(val, token.str())) { // Okay, it's not NaN or inf. If it's '?', we issue a specific // error, otherwise we issue a general error. std::stringstream error; std::string tokenStr = token.str(); boost::trim(tokenStr); if (tokenStr == "?") error << "Missing values ('?') not supported, "; else error << "Parse error "; error << "at line " << (headerLines + row) << " token " << col << ": \"" << tokenStr << "\"."; throw std::runtime_error(error.str()); } } // If we made it to here, we have a value. matrix(col, row) = val; // We load transposed. } ++col; } ++row; } } } // namespace data } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/core/data/load_csv.cpp000066400000000000000000000061241315013601400211610ustar00rootroot00000000000000/** * @file load_csv.cpp * @author Tham Ngap Wei * * Implementation of LoadCSV functions. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include "load_csv.hpp" namespace mlpack { namespace data { LoadCSV::LoadCSV(std::string file, bool fatal) : extension(Extension(file)), fatalIfOpenFail(fatal), fileName(std::move(file)), inFile(fileName) { CanOpen(); } bool LoadCSV::CanOpen() { if(!inFile.is_open()) { if(fatalIfOpenFail) { Log::Fatal << "Cannot open file '" << fileName << "'. " << std::endl; } else { Log::Warn << "Cannot open file '" << fileName << "'; load failed." << std::endl; } return false; } inFile.unsetf(std::ios::skipws); return true; } size_t LoadCSV::ColSize() { //boost tokenizer or strtok can do the same thing, I use //spirit at here because I think this is a nice example using namespace boost::spirit; using bsi_type = boost::spirit::istream_iterator; using iter_type = boost::iterator_range; inFile.clear(); inFile.seekg(0, std::ios::beg); //spirit::qi requires iterators to be atleast forward iterators, //but std::istream_iterator is input iteraotr, so we use //boost::spirit::istream_iterator to overcome this problem bsi_type begin(inFile); bsi_type end; size_t col = 0; //the parser of boost spirit can work with "actions"(functor) //when the parser find match target, this functor will be executed auto findColSize = [&col](iter_type){ ++col; }; //qi::char_ bite an character //qi::char_(",\r\n") only bite a "," or "\r" or "\n" character //* means the parser(ex : qi::char_) can bite [0, any size] of characters //~ means negate, so ~qi::char_(",\r\n") means I want to bite anything except of ",\r\n" //parse % "," means you want to parse string like "1,2,3,apple"(noticed it without last comma) //qi::raw restrict the automatic conversion of boost::spirit, without it, spirit parser //will try to convert the string to std::string, this may cause memory allocation(if small string //optimization fail). //After we wrap the parser with qi::raw, the attribute(the data accepted by functor) will //become boost::iterator_range, this could save a tons of memory allocations qi::parse(begin, end, qi::raw[*~qi::char_(",\r\n")][findColSize] % ","); return col; } size_t LoadCSV::RowSize() { inFile.clear(); inFile.seekg(0, std::ios::beg); size_t row = 0; std::string line; while(std::getline(inFile, line)) { ++row; } return row; } boost::spirit::qi::rule LoadCSV::CreateCharRule() const { using namespace boost::spirit; if(extension == "csv" || extension == "txt") { return qi::raw[*~qi::char_(",\r\n")]; } else { return qi::raw[*~qi::char_("\t\r\n")]; } } } // namespace data } // namespace mlpack mlpack-2.2.5/src/mlpack/core/data/load_csv.hpp000066400000000000000000000243151315013601400211700ustar00rootroot00000000000000/** * @file load_csv.hpp * @author ThamNgapWei * * This is a csv parsers which use to parse the csv file format * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_DATA_LOAD_CSV_HPP #define MLPACK_CORE_DATA_LOAD_CSV_HPP #include #include #include #include #include #include #include "extension.hpp" #include "format.hpp" #include "dataset_mapper.hpp" namespace mlpack { namespace data { /** *Load the csv file.This class use boost::spirit *to implement the parser, please refer to following link *http://theboostcpplibraries.com/boost.spirit for quick review. */ class LoadCSV { public: explicit LoadCSV(std::string file, bool fatal = false); template void Load(arma::Mat &inout, DatasetMapper &infoSet, bool transpose = true) { if(!CanOpen()) { return; } if(transpose) { TranposeParse(inout, infoSet); } else { NonTranposeParse(inout, infoSet); } } size_t ColSize(); size_t RowSize(); /** * Peek at the file to determine the number of rows and columns in the matrix, * assuming a non-transposed matrix. This will also take a first pass over * the data for DatasetMapper, if MapPolicy::NeedsFirstPass is true. The info * object will be re-initialized with the correct dimensionality. * * @param rows Variable to be filled with the number of rows. * @param cols Variable to be filled with the number of columns. * @param info DatasetMapper object to use for first pass. */ template void GetMatrixSize(size_t& rows, size_t& cols, DatasetMapper& info) { // Take a pass through the file. If the DatasetMapper policy requires it, // we will pass everything string through MapString(). This might be useful // if, e.g., the MapPolicy needs to find which dimensions are numeric or // categorical. // Reset to the start of the file. inFile.clear(); inFile.seekg(0, std::ios::beg); rows = 0; cols = 0; // First, count the number of rows in the file (this is the dimensionality). std::string line; while (std::getline(inFile, line)) { ++rows; } info = DatasetMapper(rows); // Now, jump back to the beginning of the file. inFile.clear(); inFile.seekg(0, std::ios::beg); rows = 0; while (std::getline(inFile, line)) { ++rows; if (rows == 1) { // Extract the number of columns. auto findColSize = [&cols](iter_type) { ++cols; }; boost::spirit::qi::phrase_parse(line.begin(), line.end(), CreateCharRule()[findColSize] % ",", boost::spirit::ascii::space); } // I guess this is technically a second pass, but that's ok... still the // same idea... if (MapPolicy::NeedsFirstPass) { // In this case we must pass everything we parse to the MapPolicy. auto firstPassMap = [&](const iter_type& iter) { std::string str(iter.begin(), iter.end()); if (str == "\t") str.clear(); boost::trim(str); info.template MapFirstPass(std::move(str), rows - 1); }; // Now parse the line. boost::spirit::qi::phrase_parse(line.begin(), line.end(), CreateCharRule()[firstPassMap] % ",", boost::spirit::ascii::space); } } } template void GetTransposeMatrixSize(size_t& rows, size_t& cols, DatasetMapper& info) { // Take a pass through the file. If the DatasetMapper policy requires it, // we will pass everything string through MapString(). This might be useful // if, e.g., the MapPolicy needs to find which dimensions are numeric or // categorical. // Reset to the start of the file. inFile.clear(); inFile.seekg(0, std::ios::beg); rows = 0; cols = 0; std::string line; while (std::getline(inFile, line)) { ++cols; if (cols == 1) { // Extract the number of dimensions. auto findRowSize = [&rows](iter_type) { ++rows; }; boost::spirit::qi::phrase_parse(line.begin(), line.end(), CreateCharRule()[findRowSize] % ",", boost::spirit::ascii::space); // Now that we know the dimensionality, initialize the DatasetMapper. info = DatasetMapper(rows); } // If we need to do a first pass for the DatasetMapper, do it. if (MapPolicy::NeedsFirstPass) { size_t dim = 0; // In this case we must pass everything we parse to the MapPolicy. auto firstPassMap = [&](const iter_type& iter) { std::string str(iter.begin(), iter.end()); if (str == "\t") str.clear(); boost::trim(str); info.template MapFirstPass(std::move(str), dim++); }; // Now parse the line. boost::spirit::qi::phrase_parse(line.begin(), line.end(), CreateCharRule()[firstPassMap] % ",", boost::spirit::ascii::space); } } } private: using iter_type = boost::iterator_range; struct ElemParser { //return int_parser if the type of T is_integral template static typename std::enable_if::value, boost::spirit::qi::int_parser>::type Parser() { return boost::spirit::qi::int_parser(); } //return real_parser if T is floating_point template static typename std::enable_if::value, boost::spirit::qi::real_parser>::type Parser() { return boost::spirit::qi::real_parser(); } }; bool CanOpen(); template void NonTranposeParse(arma::Mat &inout, DatasetMapper &infoSet) { using namespace boost::spirit; // Get the size of the matrix. size_t rows, cols; GetMatrixSize(rows, cols, infoSet); // Set up output matrix. inout.set_size(rows, cols); size_t row = 0; size_t col = 0; // Reset file position. std::string line; inFile.clear(); inFile.seekg(0, std::ios::beg); auto setCharClass = [&](iter_type const &iter) { std::string str(iter.begin(), iter.end()); if (str == "\t") { str.clear(); } boost::trim(str); inout(row, col++) = infoSet.template MapString(std::move(str), row); }; auto charRule = CreateCharRule(); while (std::getline(inFile, line)) { //parse the numbers from a line(ex : 1,2,3,4), if the parser find the number //it will execute the setNum function const bool canParse = qi::phrase_parse(line.begin(), line.end(), charRule[setCharClass] % ",", ascii::space); if (!canParse) { throw std::runtime_error("LoadCSV cannot parse categories"); } ++row; col = 0; } } template void TranposeParse(arma::Mat &inout, DatasetMapper &infoSet) { // Get matrix size. This also initializes infoSet correctly. size_t rows, cols; GetTransposeMatrixSize(rows, cols, infoSet); // Set the matrix size. inout.set_size(rows, cols); TranposeParseImpl(inout, infoSet); } template bool TranposeParseImpl(arma::Mat& inout, DatasetMapper& infoSet) { using namespace boost::spirit; size_t row = 0; size_t col = 0; std::string line; inFile.clear(); inFile.seekg(0, std::ios::beg); auto setCharClass = [&](iter_type const &iter) { // All parsed values must be mapped. std::string str(iter.begin(), iter.end()); if (str == "\t") str.clear(); boost::trim(str); inout(row, col) = infoSet.template MapString(std::move(str), row); ++row; }; auto charRule = CreateCharRule(); while (std::getline(inFile, line)) { row = 0; //parse number of characters from a line, it will execute setNum if it is number, //else execute setCharClass, "|" means "if not a, then b" // Assemble the rule const bool canParse = qi::phrase_parse(line.begin(), line.end(), charRule[setCharClass] % ",", ascii::space); if(!canParse) { throw std::runtime_error("LoadCSV cannot parse categories"); } ++col; } return true; } template boost::spirit::qi::rule CreateNumRule() const { using namespace boost::spirit; //elemParser will generate integer or real parser based on T auto elemParser = ElemParser::Parser(); //qi::skip can specify which characters you want to skip, //in this example, elemParser will parse int or double value, //we use qi::skip to skip space //qi::omit can omit the attributes of spirit, every parser of spirit //has attribute(the type will pass into actions(functor)) //if you do not omit it, the attribute combine with attribute may //change the attribute //input like 2-200 or 2DM will make the parser fail, //so we use "look ahead parser--&" to make sure next //character is "," or end of line(eof) or end of file(eoi) //looks ahead parser will not consume any input or generate //any attribute if(extension == "csv" || extension == "txt") { return elemParser >> &(qi::lit(",") | qi::eol | qi::eoi); } else { return elemParser >> &(qi::lit("\t") | qi::eol | qi::eoi); } } boost::spirit::qi::rule CreateCharRule() const; std::string extension; bool fatalIfOpenFail; std::string fileName; std::ifstream inFile; }; } // namespace data } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/core/data/load_impl.hpp000066400000000000000000000273711315013601400213430ustar00rootroot00000000000000/** * @file load_impl.hpp * @author Ryan Curtin * * Implementation of templatized load() function defined in load.hpp. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_DATA_LOAD_IMPL_HPP #define MLPACK_CORE_DATA_LOAD_IMPL_HPP // In case it hasn't already been included. #include "load.hpp" #include "load_csv.hpp" #include "extension.hpp" #include #include #include #include "load_csv.hpp" #include "load.hpp" #include "extension.hpp" #include #include #include #include "load_arff.hpp" namespace mlpack { namespace data { namespace details{ template std::vector ToTokens(Tokenizer &lineTok) { std::vector tokens; std::transform(std::begin(lineTok), std::end(lineTok), std::back_inserter(tokens), [&tokens](std::string const &str) { std::string trimmedToken(str); boost::trim(trimmedToken); return std::move(trimmedToken); }); return tokens; } inline void TransposeTokens(std::vector> const &input, std::vector &output, size_t index) { output.clear(); for(size_t i = 0; i != input.size(); ++i) { output.emplace_back(input[i][index]); } } } //namespace details template bool inline inplace_transpose(arma::Mat& X) { try { X = arma::trans(X); return false; } catch (std::bad_alloc&) { #if (ARMA_VERSION_MAJOR >= 4) || \ ((ARMA_VERSION_MAJOR == 3) && (ARMA_VERSION_MINOR >= 930)) arma::inplace_trans(X, "lowmem"); return true; #else Log::Fatal << "data::Load(): inplace_trans() is only available on Armadillo" << " 3.930 or higher. Ran out of memory to transpose matrix." << std::endl; return false; #endif } } // Load column vector. template bool Load(const std::string& filename, arma::Col& vec, const bool fatal) { return Load(filename, vec, fatal, false); } // Load row vector. template bool Load(const std::string& filename, arma::Row& rowvec, const bool fatal) { return Load(filename, rowvec, fatal, false); } template bool Load(const std::string& filename, arma::Mat& matrix, const bool fatal, const bool transpose) { Timer::Start("loading_data"); // Get the extension. std::string extension = Extension(filename); // Catch nonexistent files by opening the stream ourselves. std::fstream stream; #ifdef _WIN32 // Always open in binary mode on Windows. stream.open(filename.c_str(), std::fstream::in | std::fstream::binary); #else stream.open(filename.c_str(), std::fstream::in); #endif if (!stream.is_open()) { Timer::Stop("loading_data"); if (fatal) Log::Fatal << "Cannot open file '" << filename << "'. " << std::endl; else Log::Warn << "Cannot open file '" << filename << "'; load failed." << std::endl; return false; } bool unknownType = false; arma::file_type loadType; std::string stringType; if (extension == "csv" || extension == "tsv") { loadType = arma::diskio::guess_file_type(stream); if (loadType == arma::csv_ascii) { if (extension == "tsv") Log::Warn << "'" << filename << "' is comma-separated, not " "tab-separated!" << std::endl; stringType = "CSV data"; } else if (loadType == arma::raw_ascii) // .csv file can be tsv. { if (extension == "csv") { // We should issue a warning, but we don't want to issue the warning if // there is only one column in the CSV (since there will be no commas // anyway, and it will be detected as arma::raw_ascii). const std::streampos pos = stream.tellg(); std::string line; std::getline(stream, line, '\n'); boost::trim(line); // Reset stream position. stream.seekg(pos); // If there are no spaces or whitespace in the line, then we shouldn't // print the warning. if ((line.find(' ') != std::string::npos) || (line.find('\t') != std::string::npos)) { Log::Warn << "'" << filename << "' is not a standard csv file." << std::endl; } } stringType = "raw ASCII formatted data"; } else { unknownType = true; loadType = arma::raw_binary; // Won't be used; prevent a warning. stringType = ""; } } else if (extension == "txt") { // This could be raw ASCII or Armadillo ASCII (ASCII with size header). // We'll let Armadillo do its guessing (although we have to check if it is // arma_ascii ourselves) and see what we come up with. // This is taken from load_auto_detect() in diskio_meat.hpp const std::string ARMA_MAT_TXT = "ARMA_MAT_TXT"; //char* rawHeader = new char[ARMA_MAT_TXT.length() + 1]; std::string rawHeader(ARMA_MAT_TXT.length(), '\0'); std::streampos pos = stream.tellg(); stream.read(&rawHeader[0], std::streamsize(ARMA_MAT_TXT.length())); stream.clear(); stream.seekg(pos); // Reset stream position after peeking. if (rawHeader == ARMA_MAT_TXT) { loadType = arma::arma_ascii; stringType = "Armadillo ASCII formatted data"; } else // It's not arma_ascii. Now we let Armadillo guess. { loadType = arma::diskio::guess_file_type(stream); if (loadType == arma::raw_ascii) // Raw ASCII (space-separated). stringType = "raw ASCII formatted data"; else if (loadType == arma::csv_ascii) // CSV can be .txt too. stringType = "CSV data"; else // Unknown .txt... we will throw an error. unknownType = true; } } else if (extension == "bin") { // This could be raw binary or Armadillo binary (binary with header). We // will check to see if it is Armadillo binary. const std::string ARMA_MAT_BIN = "ARMA_MAT_BIN"; std::string rawHeader(ARMA_MAT_BIN.length(), '\0'); std::streampos pos = stream.tellg(); stream.read(&rawHeader[0], std::streamsize(ARMA_MAT_BIN.length())); stream.clear(); stream.seekg(pos); // Reset stream position after peeking. if (rawHeader == ARMA_MAT_BIN) { stringType = "Armadillo binary formatted data"; loadType = arma::arma_binary; } else // We can only assume it's raw binary. { stringType = "raw binary formatted data"; loadType = arma::raw_binary; } } else if (extension == "pgm") { loadType = arma::pgm_binary; stringType = "PGM data"; } else if (extension == "h5" || extension == "hdf5" || extension == "hdf" || extension == "he5") { #ifdef ARMA_USE_HDF5 loadType = arma::hdf5_binary; stringType = "HDF5 data"; #if ARMA_VERSION_MAJOR == 4 && \ (ARMA_VERSION_MINOR >= 300 && ARMA_VERSION_MINOR <= 400) Timer::Stop("loading_data"); if (fatal) Log::Fatal << "Attempted to load '" << filename << "' as HDF5 data, but " << "Armadillo 4.300.0 through Armadillo 4.400.1 are known to have " << "bugs and one of these versions is in use. Load failed." << std::endl; else Log::Warn << "Attempted to load '" << filename << "' as HDF5 data, but " << "Armadillo 4.300.0 through Armadillo 4.400.1 are known to have " << "bugs and one of these versions is in use. Load failed." << std::endl; return false; #endif #else Timer::Stop("loading_data"); if (fatal) Log::Fatal << "Attempted to load '" << filename << "' as HDF5 data, but " << "Armadillo was compiled without HDF5 support. Load failed." << std::endl; else Log::Warn << "Attempted to load '" << filename << "' as HDF5 data, but " << "Armadillo was compiled without HDF5 support. Load failed." << std::endl; return false; #endif } else // Unknown extension... { unknownType = true; loadType = arma::raw_binary; // Won't be used; prevent a warning. stringType = ""; } // Provide error if we don't know the type. if (unknownType) { Timer::Stop("loading_data"); if (fatal) Log::Fatal << "Unable to detect type of '" << filename << "'; " << "incorrect extension?" << std::endl; else Log::Warn << "Unable to detect type of '" << filename << "'; load failed." << " Incorrect extension?" << std::endl; return false; } // Try to load the file; but if it's raw_binary, it could be a problem. if (loadType == arma::raw_binary) Log::Warn << "Loading '" << filename << "' as " << stringType << "; " << "but this may not be the actual filetype!" << std::endl; else Log::Info << "Loading '" << filename << "' as " << stringType << ". " << std::flush; // We can't use the stream if the type is HDF5. bool success; if (loadType != arma::hdf5_binary) success = matrix.load(stream, loadType); else success = matrix.load(filename, loadType); if (!success) { Log::Info << std::endl; Timer::Stop("loading_data"); if (fatal) Log::Fatal << "Loading from '" << filename << "' failed." << std::endl; else Log::Warn << "Loading from '" << filename << "' failed." << std::endl; return false; } else Log::Info << "Size is " << (transpose ? matrix.n_cols : matrix.n_rows) << " x " << (transpose ? matrix.n_rows : matrix.n_cols) << ".\n"; // Now transpose the matrix, if necessary. Armadillo loads HDF5 matrices // transposed, so we have to work around that. if (transpose && loadType != arma::hdf5_binary) { inplace_transpose(matrix); } else if (!transpose && loadType == arma::hdf5_binary) { inplace_transpose(matrix); } Timer::Stop("loading_data"); // Finally, return the success indicator. return success; } // Load with mappings. Unfortunately we have to implement this ourselves. template bool Load(const std::string& filename, arma::Mat& matrix, DatasetMapper& info, const bool fatal, const bool transpose) { // Get the extension and load as necessary. Timer::Start("loading_data"); // Get the extension. const std::string extension = Extension(filename); if (extension == "csv" || extension == "tsv" || extension == "txt") { LoadCSV loader(filename, fatal); loader.Load(matrix, info, transpose); } else if (extension == "arff") { Log::Info << "Loading '" << filename << "' as ARFF dataset. " << std::flush; try { LoadARFF(filename, matrix, info); // We transpose by default. So, un-transpose if necessary... if (!transpose) inplace_transpose(matrix); } catch (std::exception& e) { if (fatal) Log::Fatal << e.what() << std::endl; else Log::Warn << e.what() << std::endl; } } else { // The type is unknown. Timer::Stop("loading_data"); if (fatal) Log::Fatal << "Unable to detect type of '" << filename << "'; " << "incorrect extension?" << std::endl; else Log::Warn << "Unable to detect type of '" << filename << "'; load failed." << " Incorrect extension?" << std::endl; return false; } Log::Info << "Size is " << (transpose ? matrix.n_cols : matrix.n_rows) << " x " << (transpose ? matrix.n_rows : matrix.n_cols) << ".\n"; Timer::Stop("loading_data"); return true; } } // namespace data } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/core/data/load_model_impl.hpp000066400000000000000000000057371315013601400225250ustar00rootroot00000000000000/** * @file load_model_impl.hpp * @author Ryan Curtin * * Implementation of model-specific Load() function. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_DATA_LOAD_MODEL_IMPL_HPP #define MLPACK_CORE_DATA_LOAD_MODEL_IMPL_HPP // In case it hasn't already been included. #include "load.hpp" #include #include #include "extension.hpp" #include #include #include #include #include #include #include #include "serialization_shim.hpp" namespace mlpack { namespace data { // Load a model from file. template bool Load(const std::string& filename, const std::string& name, T& t, const bool fatal, format f) { if (f == format::autodetect) { std::string extension = Extension(filename); if (extension == "xml") f = format::xml; else if (extension == "bin") f = format::binary; else if (extension == "txt") f = format::text; else { if (fatal) Log::Fatal << "Unable to detect type of '" << filename << "'; incorrect" << " extension?" << std::endl; else Log::Warn << "Unable to detect type of '" << filename << "'; load " << "failed. Incorrect extension?" << std::endl; return false; } } // Now load the given format. std::ifstream ifs; #ifdef _WIN32 // Open non-text in binary mode on Windows. if (f == format::binary) ifs.open(filename, std::ifstream::in | std::ifstream::binary); else ifs.open(filename, std::ifstream::in); #else ifs.open(filename, std::ifstream::in); #endif if (!ifs.is_open()) { if (fatal) Log::Fatal << "Unable to open file '" << filename << "' to load object '" << name << "'." << std::endl; else Log::Warn << "Unable to open file '" << filename << "' to load object '" << name << "'." << std::endl; return false; } try { if (f == format::xml) { boost::archive::xml_iarchive ar(ifs); ar >> CreateNVP(t, name); } else if (f == format::text) { boost::archive::text_iarchive ar(ifs); ar >> CreateNVP(t, name); } else if (f == format::binary) { boost::archive::binary_iarchive ar(ifs); ar >> CreateNVP(t, name); } return true; } catch (boost::archive::archive_exception& e) { if (fatal) Log::Fatal << e.what() << std::endl; else Log::Warn << e.what() << std::endl; return false; } } } // namespace data } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/core/data/map_policies/000077500000000000000000000000001315013601400213245ustar00rootroot00000000000000mlpack-2.2.5/src/mlpack/core/data/map_policies/CMakeLists.txt000066400000000000000000000007201315013601400240630ustar00rootroot00000000000000# Define the files we need to compile # Anything not in this list will not be compiled into mlpack. set(SOURCES increment_policy.hpp missing_policy.hpp ) # Add directory name to sources. set(DIR_SRCS) foreach(file ${SOURCES}) set(DIR_SRCS ${DIR_SRCS} ${CMAKE_CURRENT_SOURCE_DIR}/${file}) endforeach() # Append sources (with directory name) to list of all mlpack sources (used at # the parent scope). set(MLPACK_SRCS ${MLPACK_SRCS} ${DIR_SRCS} PARENT_SCOPE) mlpack-2.2.5/src/mlpack/core/data/map_policies/datatype.hpp000066400000000000000000000017241315013601400236540ustar00rootroot00000000000000/** * @file missing_policy.hpp * @author Keon Kim * * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_DATA_MAP_POLICIES_DATATYPE_HPP #define MLPACK_CORE_DATA_MAP_POLICIES_DATATYPE_HPP #include namespace mlpack { namespace data { /** * The Datatype enum specifies the types of data mlpack algorithms can use. * The vast majority of mlpack algorithms can only use numeric data (i.e. * float/double/etc.), but some algorithms can use categorical data, specified * via this Datatype enum and the DatasetMapper class. */ enum Datatype : bool /* [> bool is all the precision we need for two types <] */ { numeric = 0, categorical = 1 }; } // namespace data } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/core/data/map_policies/increment_policy.hpp000066400000000000000000000141631315013601400254050ustar00rootroot00000000000000/** * @file increment_policy.hpp * @author Keon Kim * * Default increment maping policy for dataset info. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_DATA_MAP_POLICIES_INCREMENT_POLICY_HPP #define MLPACK_CORE_DATA_MAP_POLICIES_INCREMENT_POLICY_HPP #include #include #include #include namespace mlpack { namespace data { /** * IncrementPolicy is used as a helper class for DatasetMapper. It tells how the * strings should be mapped. Purpose of this policy is to map all dimension if * one if the variables in a dimension turns out to be a categorical variable. * IncrementPolicy maps strings to incrementing unsigned integers (size_t). * The first string to be mapped will be mapped to 0, the next to 1 and so on. */ class IncrementPolicy { public: // typedef of MappedType using MappedType = size_t; //! We do need a first pass over the data to set the dimension types right. static const bool NeedsFirstPass = true; /** * Determine if the dimension is numeric or categorical. */ template void MapFirstPass(const std::string& string, const size_t dim, std::vector& types) { if (types[dim] == Datatype::categorical) { // No need to check; it's already categorical. return; } // Otherwise we need to attempt to read the value. If the read fails, the // dimension is categorical; otherwise we leave it at the default of // numeric. std::stringstream token; token.str(string); T val; token >> val; if (token.fail() || !token.eof()) { // Parsing failed; the dimension is categorical. types[dim] = Datatype::categorical; } } /** * Given the string and the dimension to which the it belongs, and the maps * and types given by the DatasetMapper class, returns its numeric mapping. * If no mapping yet exists, the string is added to the list of mappings for * the given dimension. This function is used as a helper function for * DatasetMapper class. * * @tparam MapType Type of unordered_map that contains mapped value pairs * @param string String to find/create mapping for. * @param dimension Index of the dimension of the string. * @param maps Unordered map given by the DatasetMapper. * @param types Vector containing the type information about each dimensions. */ template T MapString(const std::string& string, const size_t dimension, MapType& maps, std::vector& types) { // If we are in a categorical dimension we already know we need to map. if (types[dimension] == Datatype::numeric) { // Check if this string needs to be mapped or if it can be read // directly as a number. This will be true if nothing else in this // dimension has yet been mapped, but this can't be read as a number. std::stringstream token; token.str(string); T val; token >> val; if (!token.fail() && token.eof()) { // We can return what we have. return val; } } // The token must be mapped. // If this condition is true, either we have no mapping for the given string // or we have no mappings for the given dimension at all. In either case, // we create a mapping. if (maps.count(dimension) == 0 || maps[dimension].first.left.count(string) == 0) { // This string does not exist yet. size_t& numMappings = maps[dimension].second; // Change type of the feature to categorical. if (numMappings == 0) types[dimension] = Datatype::categorical; typedef boost::bimap::value_type PairType; maps[dimension].first.insert(PairType(string, numMappings)); return T(numMappings++); } else { // This string already exists in the mapping. return maps[dimension].first.left.at(string); } } /** * MapTokens turns vector of strings into numeric variables and puts them * into a given matrix. It is used as a helper function when trying to load * files. Each dimension's tokens are given in to this function. If one of the * tokens turns out to be a string, all the tokens should be mapped using the * MapString() funciton. * * @tparam eT Type of armadillo matrix. * @tparam MapType Type of unordered_map that contains mapped value pairs. * @param tokens Vector of variables inside a dimension. * @param row Position of the given tokens. * @param matrix Matrix to save the data into. * @param maps Maps given by the DatasetMapper class. * @param types Types of each dimensions given by the DatasetMapper class. */ template void MapTokens(const std::vector& tokens, size_t& row, arma::Mat& matrix, MapType& maps, std::vector& types) { auto notNumber = [](const std::string& str) { eT val(0); std::stringstream token; token.str(str); token >> val; return token.fail(); }; const bool notNumeric = std::any_of(std::begin(tokens), std::end(tokens), notNumber); if (notNumeric) { for (size_t i = 0; i != tokens.size(); ++i) { const eT val = static_cast(this->MapString(tokens[i], row, maps, types)); matrix.at(row, i) = val; } } else { std::stringstream token; for (size_t i = 0; i != tokens.size(); ++i) { token.str(tokens[i]); token >> matrix.at(row, i); token.clear(); } } } }; // class IncrementPolicy } // namespace data } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/core/data/map_policies/missing_policy.hpp000066400000000000000000000142061315013601400250700ustar00rootroot00000000000000/** * @file missing_policy.hpp * @author Keon Kim * * Missing map policy for dataset info. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_DATA_MAP_POLICIES_MISSING_POLICY_HPP #define MLPACK_CORE_DATA_MAP_POLICIES_MISSING_POLICY_HPP #include #include #include #include #include namespace mlpack { namespace data { /** * MissingPolicy is used as a helper class for DatasetMapper. It tells how the * strings should be mapped. Purpose of this policy is to map all user-defined * missing variables into maps so that users can decide what to do with the * corrupted data. User-defined missing variables are given by the missingSet. * Note that MissingPolicy does not change type of features. */ class MissingPolicy { public: // typedef of MappedType using MappedType = double; MissingPolicy() { // Nothing to initialize here. } /** * Create the MissingPolicy object with the given missingSet. Note that the * missingSet cannot be changed later; you will have to create a new * MissingPolicy object. * * @param missingSet Set of strings that should be mapped. */ explicit MissingPolicy(std::set missingSet) : missingSet(std::move(missingSet)) { // Nothing to initialize here. } //! This doesn't need a first pass over the data to set up. static const bool NeedsFirstPass = false; /** * There is nothing for us to do here, but this is required by the MapPolicy * type. */ template void MapFirstPass(const std::string& /* string */, const size_t /* dim */) { // Nothing to do. } /** * Given the string and the dimension to which it belongs by the user, and * the maps and types given by the DatasetMapper class, returns its numeric * mapping. If no mapping yet exists and the string is included in the * missingSet, the string is added to the list of mappings for the given * dimension. This function is used as a helper function for DatasetMapper * class. * * @tparam MapType Type of unordered_map that contains mapped value pairs * @param string String to find/create mapping for. * @param dimension Index of the dimension of the string. * @param maps Unordered map given by the DatasetMapper. * @param types Vector containing the type information about each dimensions. */ template T MapString(const std::string& string, const size_t dimension, MapType& maps, std::vector& /* types */) { static_assert(std::numeric_limits::has_quiet_NaN == true, "Cannot use MissingPolicy with types where has_quiet_NaN() is false!"); // If we can load the string then there is no need for mapping. std::stringstream token; token.str(string); T t; token >> t; // Could be sped up by only doing this if we need to. // If extraction of the value fails, or if it is a value that is supposed to // be mapped, then do mapping. if (token.fail() || !token.eof() || missingSet.find(string) != std::end(missingSet)) { // Everything is mapped to NaN. However we must still keep track of // everything that we have mapped, so we add it to the maps if needed. if (maps.count(dimension) == 0 || maps[dimension].first.left.count(string) == 0) { // This string does not exist yet. typedef boost::bimap::value_type PairType; maps[dimension].first.insert(PairType(string, std::numeric_limits::quiet_NaN())); maps[dimension].second++; } return std::numeric_limits::quiet_NaN(); } else { // We can just return the value that we read. return t; } } /** * MapTokens turns vector of strings into numeric variables and puts them * into a given matrix. It is used as a helper function when trying to load * files. Each dimension's tokens are given in to this function. If one of the * tokens turns out to be a string or one of the missingSet's variables, only * the token responsible for it should be mapped using the MapString() * funciton. * * @tparam eT Type of armadillo matrix. * @tparam MapType Type of unordered_map that contains mapped value pairs. * @param tokens Vector of variables inside a dimension. * @param row Position of the given tokens. * @param matrix Matrix to save the data into. * @param maps Maps given by the DatasetMapper class. * @param types Types of each dimensions given by the DatasetMapper class. */ template void MapTokens(const std::vector& tokens, size_t& row, arma::Mat& matrix, MapType& maps, std::vector& types) { // MissingPolicy allows double type matrix only, because it uses NaN. static_assert(std::is_same::value, "You must use double type " " matrix in order to apply MissingPolicy"); std::stringstream token; for (size_t i = 0; i != tokens.size(); ++i) { token.str(tokens[i]); token>>matrix.at(row, i); // if the token is not number, map it. // or if token is a number, but is included in the missingSet, map it. if (token.fail() || missingSet.find(tokens[i]) != std::end(missingSet)) { const eT val = static_cast(this->MapString(tokens[i], row, maps, types)); matrix.at(row, i) = val; } token.clear(); } } private: // Note that missingSet and maps are different. // missingSet specifies which value/string should be mapped and may be a // superset of 'maps'. std::set missingSet; }; // class MissingPolicy } // namespace data } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/core/data/normalize_labels.hpp000066400000000000000000000036541315013601400227230ustar00rootroot00000000000000/** * @file normalize_labels.hpp * @author Ryan Curtin * * Often labels are not given as {0, 1, 2, ...} but instead {1, 2, ...} or even * {-1, 1} or otherwise. The purpose of this function is to normalize labels to * {0, 1, 2, ...} and provide a mapping back to those labels. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_DATA_NORMALIZE_LABELS_HPP #define MLPACK_CORE_DATA_NORMALIZE_LABELS_HPP #include namespace mlpack { namespace data { /** * Given a set of labels of a particular datatype, convert them to unsigned * labels in the range [0, n) where n is the number of different labels. Also, * a reverse mapping from the new label to the old value is stored in the * 'mapping' vector. * * @param labelsIn Input labels of arbitrary datatype. * @param labels Vector that unsigned labels will be stored in. * @param mapping Reverse mapping to convert new labels back to old labels. */ template void NormalizeLabels(const RowType& labelsIn, arma::Row& labels, arma::Col& mapping); /** * Given a set of labels that have been mapped to the range [0, n), map them * back to the original labels given by the 'mapping' vector. * * @param labels Set of normalized labels to convert. * @param mapping Mapping to use to convert labels. * @param labelsOut Vector to store new labels in. */ template void RevertLabels(const arma::Row& labels, const arma::Col& mapping, arma::Row& labelsOut); } // namespace data } // namespace mlpack // Include implementation. #include "normalize_labels_impl.hpp" #endif mlpack-2.2.5/src/mlpack/core/data/normalize_labels_impl.hpp000066400000000000000000000055251315013601400237430ustar00rootroot00000000000000/** * @file normalize_labels_impl.hpp * @author Ryan Curtin * * Implementation of label normalization functions; these are useful for mapping * labels to the range [0, n). * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_DATA_NORMALIZE_LABELS_IMPL_HPP #define MLPACK_CORE_DATA_NORMALIZE_LABELS_IMPL_HPP // In case it hasn't been included yet. #include "normalize_labels.hpp" namespace mlpack { namespace data { /** * Given a set of labels of a particular datatype, convert them to unsigned * labels in the range [0, n) where n is the number of different labels. Also, * a reverse mapping from the new label to the old value is stored in the * 'mapping' vector. * * @param labelsIn Input labels of arbitrary datatype. * @param labels Vector that unsigned labels will be stored in. * @param mapping Reverse mapping to convert new labels back to old labels. */ template void NormalizeLabels(const RowType& labelsIn, arma::Row& labels, arma::Col& mapping) { // Loop over the input labels, and develop the mapping. We'll first naively // resize the mapping to the maximum possible size, and then when we fill it, // we'll resize it back down to its actual size. mapping.set_size(labelsIn.n_elem); labels.set_size(labelsIn.n_elem); size_t curLabel = 0; for (size_t i = 0; i < labelsIn.n_elem; ++i) { bool found = false; for (size_t j = 0; j < curLabel; ++j) { // Is the label already in the list of labels we have seen? if (labelsIn[i] == mapping[j]) { labels[i] = j; found = true; break; } } // Do we need to add this new label? if (!found) { mapping[curLabel] = labelsIn[i]; labels[i] = curLabel; ++curLabel; } } // Resize mapping back down to necessary size. mapping.resize(curLabel); } /** * Given a set of labels that have been mapped to the range [0, n), map them * back to the original labels given by the 'mapping' vector. * * @param labels Set of normalized labels to convert. * @param mapping Mapping to use to convert labels. * @param labelsOut Vector to store new labels in. */ template void RevertLabels(const arma::Row& labels, const arma::Col& mapping, arma::Row& labelsOut) { // We already have the mapping, so we just need to loop over each element. labelsOut.set_size(labels.n_elem); for (size_t i = 0; i < labels.n_elem; ++i) labelsOut[i] = mapping[labels[i]]; } } // namespace data } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/core/data/save.hpp000066400000000000000000000074221315013601400203340ustar00rootroot00000000000000/** * @file save.hpp * @author Ryan Curtin * * Save an Armadillo matrix to file. This is necessary because Armadillo does * not transpose matrices upon saving, and it allows us to give better error * output. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_DATA_SAVE_HPP #define MLPACK_CORE_DATA_SAVE_HPP #include #include // Includes Armadillo. #include #include "format.hpp" namespace mlpack { namespace data /** Functions to load and save matrices. */ { /** * Saves a matrix to file, guessing the filetype from the extension. This * will transpose the matrix at save time. If the filetype cannot be * determined, an error will be given. * * The supported types of files are the same as found in Armadillo: * * - CSV (csv_ascii), denoted by .csv, or optionally .txt * - ASCII (raw_ascii), denoted by .txt * - Armadillo ASCII (arma_ascii), also denoted by .txt * - PGM (pgm_binary), denoted by .pgm * - PPM (ppm_binary), denoted by .ppm * - Raw binary (raw_binary), denoted by .bin * - Armadillo binary (arma_binary), denoted by .bin * - HDF5 (hdf5_binary), denoted by .hdf5, .hdf, .h5, or .he5 * * If the file extension is not one of those types, an error will be given. If * the 'fatal' parameter is set to true, a std::runtime_error exception will be * thrown upon failure. If the 'transpose' parameter is set to true, the matrix * will be transposed before saving. Generally, because mlpack stores matrices * in a column-major format and most datasets are stored on disk as row-major, * this parameter should be left at its default value of 'true'. * * @param filename Name of file to save to. * @param matrix Matrix to save into file. * @param fatal If an error should be reported as fatal (default false). * @param transpose If true, transpose the matrix before saving. * @return Boolean value indicating success or failure of save. */ template bool Save(const std::string& filename, const arma::Mat& matrix, const bool fatal = false, bool transpose = true); /** * Saves a model to file, guessing the filetype from the extension, or, * optionally, saving the specified format. If automatic extension detection is * used and the filetype cannot be determined, and error will be given. * * The supported types of files are the same as what is supported by the * boost::serialization library: * * - text, denoted by .txt * - xml, denoted by .xml * - binary, denoted by .bin * * The format parameter can take any of the values in the 'format' enum: * 'format::autodetect', 'format::text', 'format::xml', and 'format::binary'. * The autodetect functionality operates on the file extension (so, "file.txt" * would be autodetected as text). * * The name parameter should be specified to indicate the name of the structure * to be saved. If Load() is later called on the generated file, the name used * to load should be the same as the name used for this call to Save(). * * If the parameter 'fatal' is set to true, then an exception will be thrown in * the event of a save failure. Otherwise, the method will return false and the * relevant error information will be printed to Log::Warn. */ template bool Save(const std::string& filename, const std::string& name, T& t, const bool fatal = false, format f = format::autodetect); } // namespace data } // namespace mlpack // Include implementation. #include "save_impl.hpp" #endif mlpack-2.2.5/src/mlpack/core/data/save_impl.hpp000066400000000000000000000161511315013601400213540ustar00rootroot00000000000000/** * @file save_impl.hpp * @author Ryan Curtin * * Implementation of save functionality. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_DATA_SAVE_IMPL_HPP #define MLPACK_CORE_DATA_SAVE_IMPL_HPP // In case it hasn't already been included. #include "save.hpp" #include "extension.hpp" #include #include #include #include #include "serialization_shim.hpp" namespace mlpack { namespace data { template bool Save(const std::string& filename, const arma::Mat& matrix, const bool fatal, bool transpose) { Timer::Start("saving_data"); // First we will try to discriminate by file extension. std::string extension = Extension(filename); if (extension == "") { Timer::Stop("saving_data"); if (fatal) Log::Fatal << "No extension given with filename '" << filename << "'; " << "type unknown. Save failed." << std::endl; else Log::Warn << "No extension given with filename '" << filename << "'; " << "type unknown. Save failed." << std::endl; return false; } // Catch errors opening the file. std::fstream stream; #ifdef _WIN32 // Always open in binary mode on Windows. stream.open(filename.c_str(), std::fstream::out | std::fstream::binary); #else stream.open(filename.c_str(), std::fstream::out); #endif if (!stream.is_open()) { Timer::Stop("saving_data"); if (fatal) Log::Fatal << "Cannot open file '" << filename << "' for writing. " << "Save failed." << std::endl; else Log::Warn << "Cannot open file '" << filename << "' for writing; save " << "failed." << std::endl; return false; } bool unknownType = false; arma::file_type saveType; std::string stringType; if (extension == "csv") { saveType = arma::csv_ascii; stringType = "CSV data"; } else if (extension == "txt") { saveType = arma::raw_ascii; stringType = "raw ASCII formatted data"; } else if (extension == "bin") { saveType = arma::arma_binary; stringType = "Armadillo binary formatted data"; } else if (extension == "pgm") { saveType = arma::pgm_binary; stringType = "PGM data"; } else if (extension == "h5" || extension == "hdf5" || extension == "hdf" || extension == "he5") { #ifdef ARMA_USE_HDF5 saveType = arma::hdf5_binary; stringType = "HDF5 data"; #else Timer::Stop("saving_data"); if (fatal) Log::Fatal << "Attempted to save HDF5 data to '" << filename << "', but " << "Armadillo was compiled without HDF5 support. Save failed." << std::endl; else Log::Warn << "Attempted to save HDF5 data to '" << filename << "', but " << "Armadillo was compiled without HDF5 support. Save failed." << std::endl; return false; #endif } else { unknownType = true; saveType = arma::raw_binary; // Won't be used; prevent a warning. stringType = ""; } // Provide error if we don't know the type. if (unknownType) { Timer::Stop("saving_data"); if (fatal) Log::Fatal << "Unable to determine format to save to from filename '" << filename << "'. Save failed." << std::endl; else Log::Warn << "Unable to determine format to save to from filename '" << filename << "'. Save failed." << std::endl; return false; } // Try to save the file. Log::Info << "Saving " << stringType << " to '" << filename << "'." << std::endl; // Transpose the matrix. If we are saving HDF5, Armadillo already transposes // this on save, so we don't need to. if ((transpose && saveType != arma::hdf5_binary) || (!transpose && saveType == arma::hdf5_binary)) { arma::Mat tmp = trans(matrix); // We can't save with streams for HDF5. const bool success = (saveType == arma::hdf5_binary) ? tmp.quiet_save(filename, saveType) : tmp.quiet_save(stream, saveType); if (!success) { Timer::Stop("saving_data"); if (fatal) Log::Fatal << "Save to '" << filename << "' failed." << std::endl; else Log::Warn << "Save to '" << filename << "' failed." << std::endl; return false; } } else { // We can't save with streams for HDF5. const bool success = (saveType == arma::hdf5_binary) ? matrix.quiet_save(filename, saveType) : matrix.quiet_save(stream, saveType); if (!success) { Timer::Stop("saving_data"); if (fatal) Log::Fatal << "Save to '" << filename << "' failed." << std::endl; else Log::Warn << "Save to '" << filename << "' failed." << std::endl; return false; } } Timer::Stop("saving_data"); // Finally return success. return true; } //! Save a model to file. template bool Save(const std::string& filename, const std::string& name, T& t, const bool fatal, format f) { if (f == format::autodetect) { std::string extension = Extension(filename); if (extension == "xml") f = format::xml; else if (extension == "bin") f = format::binary; else if (extension == "txt") f = format::text; else { if (fatal) Log::Fatal << "Unable to detect type of '" << filename << "'; incorrect" << " extension? (allowed: xml/bin/txt)" << std::endl; else Log::Warn << "Unable to detect type of '" << filename << "'; save " << "failed. Incorrect extension? (allowed: xml/bin/txt)" << std::endl; return false; } } // Open the file to save to. std::ofstream ofs; #ifdef _WIN32 if (f == format::binary) // Open non-text types in binary mode on Windows. ofs.open(filename, std::ofstream::out | std::ofstream::binary); else ofs.open(filename, std::ofstream::out); #else ofs.open(filename, std::ofstream::out); #endif if (!ofs.is_open()) { if (fatal) Log::Fatal << "Unable to open file '" << filename << "' to save object '" << name << "'." << std::endl; else Log::Warn << "Unable to open file '" << filename << "' to save object '" << name << "'." << std::endl; return false; } try { if (f == format::xml) { boost::archive::xml_oarchive ar(ofs); ar << CreateNVP(t, name); } else if (f == format::text) { boost::archive::text_oarchive ar(ofs); ar << CreateNVP(t, name); } else if (f == format::binary) { boost::archive::binary_oarchive ar(ofs); ar << CreateNVP(t, name); } return true; } catch (boost::archive::archive_exception& e) { if (fatal) Log::Fatal << e.what() << std::endl; else Log::Warn << e.what() << std::endl; return false; } } } // namespace data } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/core/data/serialization_shim.hpp000066400000000000000000000412171315013601400232730ustar00rootroot00000000000000/** * @file serialization_shim.hpp * @author Ryan Curtin * * This file contains the necessary shims to make boost.serialization work with * classes that have a Serialize() method (instead of a serialize() method). * * This allows our mlpack naming conventions to remain intact, and only costs a * small amount of ridiculous template metaprogramming. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_UTIL_SERIALIZATION_SHIM_HPP #define MLPACK_CORE_UTIL_SERIALIZATION_SHIM_HPP #include #include #include namespace mlpack { namespace data { // This gives us a HasSerializeCheck type (where U is a function pointer) // we can use with SFINAE to catch when a type has a Serialize() function. HAS_MEM_FUNC(Serialize, HasSerializeCheck); // Don't call this with a non-class. HasSerializeFunction::value is true if the // type T has a static or non-static Serialize() function. template struct HasSerializeFunction { static const bool value = // Non-static version. HasSerializeCheck::value || // Static version. HasSerializeCheck::value; }; template struct HasSerialize { // We have to handle the case where T isn't a class... typedef char yes[1]; typedef char no [2]; template struct check; template static yes& chk( // This matches classes. check>::type*, typename boost::enable_if>::type*>*); template static no& chk(...); // This matches non-classes. static const bool value = (sizeof(chk(0)) == sizeof(yes)); }; // Declare the shims we need. template struct FirstShim; template struct FirstArrayShim; template struct FirstNormalArrayShim; template struct SecondShim; template struct SecondArrayShim; template struct SecondNormalArrayShim; template struct PointerShim; /** * Call this function to produce a name-value pair; this is similar to * BOOST_SERIALIZATION_NVP(), but should be used for types that have a * Serialize() function (or contain a type that has a Serialize() function) * instead of a serialize() function. The template type should be automatically * deduced, and the two boost::enable_if<> parameters are automatically deduced * too. So usage looks like * * @code * MyType t; * CreateNVP(t, "my_name_for_t"); * @endcode * * Note that the second parameter, 'name', must be a valid XML identifier. * * This function does not return a boost::serialization::nvp object, but * instead a shim type (FirstShim). * * This particular overload is used by classes that have a Serialize() function. * * @param t Object to create NVP (name-value pair) with. * @param name Name of object (must be a valid XML identifier). */ template inline FirstShim CreateNVP( T& t, const std::string& name, typename boost::enable_if>::type* = 0) { return FirstShim(t, name); } /** * Call this function to produce a name-value pair; this is similar to * BOOST_SERIALIZATION_NVP(), but should be used for types that have a * Serialize() function (or contain a type that has a Serialize() function) * instead of a serialize() function. The template type should be automatically * deduced, and the two boost::enable_if<> parameters are automatically deduced * too. So usage looks like * * @code * MyType t; * CreateNVP(t, "my_name_for_t"); * @endcode * * Note that the second parameter, 'name', must be a valid XML identifier. * * This particular overload is used by classes that do not have a Serialize() * function (so, no shim is necessary) or primitive types that aren't pointers. * * @param t Object to create NVP (name-value pair) with. * @param name Name of object (must be a valid XML identifier). */ template inline #ifndef BOOST_NO_FUNCTION_TEMPLATE_ORDERING const // Imitate the boost::serialization make_nvp() function. #endif boost::serialization::nvp CreateNVP( T& t, const std::string& name, typename boost::disable_if>::type* = 0, typename boost::disable_if>::type* = 0) { return boost::serialization::make_nvp(name.c_str(), t); } /** * Call this function to produce a name-value pair; this is similar to * BOOST_SERIALIZATION_NVP(), but should be used for types that have a * Serialize() function (or contain a type that has a Serialize() function) * instead of a serialize() function. The template type should be automatically * deduced, and the two boost::enable_if<> parameters are automatically deduced * too. So usage looks like * * @code * MyType t; * CreateNVP(t, "my_name_for_t"); * @endcode * * Note that the second parameter, 'name', must be a valid XML identifier. * * This particular overload is used by pointers to classes that have a * Serialize() function. * * @param t Object to create NVP (name-value pair) with. * @param name Name of object (must be a valid XML identifier). */ template inline #ifndef BOOST_NO_FUNCTION_TEMPLATE_ORDERING const #endif boost::serialization::nvp*> CreateNVP( T*& t, const std::string& name, typename boost::enable_if>::type* = 0) { return boost::serialization::make_nvp(name.c_str(), reinterpret_cast*&>(t)); } /** * Call this function to produce a name-value pair; this is similar to * BOOST_SERIALIZATION_NVP(), but should be used for types that have a * Serialize() function (or contain a type that has a Serialize() function) * instead of a serialize() function. The template type should be automatically * deduced, and the two boost::enable_if<> parameters are automatically deduced * too. So usage looks like * * @code * MyType t; * CreateNVP(t, "my_name_for_t"); * @endcode * * Note that the second parameter, 'name', must be a valid XML identifier. * * This particular overload is used by pointers to classes that do not have a * Serialize() function, or pointers to non-classes. * * @param t Object to create NVP (name-value pair) with. * @param name Name of object (must be a valid XML identifier). */ template inline #ifndef BOOST_NO_FUNCTION_TEMPLATE_ORDERING const #endif boost::serialization::nvp CreateNVP( T*& t, const std::string& name, typename boost::disable_if>::type* = 0) { return boost::serialization::make_nvp(name.c_str(), t); } /** * Call this function to produce a name-value pair for an array; this is similar * to boost::serialization::make_array(), but provides a nicer wrapper, allows * types that have a Serialize() function, and allows you to give a name to your * array. This particular overload is used by classes that have a Serialize() * function. */ template inline FirstArrayShim CreateArrayNVP( T* t, const size_t len, const std::string& name, typename boost::enable_if>::type* = 0) { return FirstArrayShim(t, len, name); } /** * Call this function to produce a name-value pair for an array; this is similar * to boost::serialization::make_array(), but provides a nicer wrapper, allows * types that have a Serialize() function, and allows you to give a name to your * array. This particular overload is used by classes that do not have a * Serialize() function or primitive types. */ template inline FirstNormalArrayShim CreateArrayNVP( T* t, const size_t len, const std::string& name, typename boost::disable_if>::type* = 0) { return FirstNormalArrayShim(t, len, name); } /** * The first shim: simply holds the object and its name. This shim's purpose is * to be caught by our overloads of operator<<, operator&, and operator>>, which * then creates a second shim. */ template struct FirstShim { //! Construct the first shim with the given object and name. FirstShim(T& t, const std::string& name) : t(t), name(name) { } T& t; const std::string& name; }; /** * A first shim for arrays. This shim's purpose is to be caught by our * overloads of operator<<, operator&, and operator>>, which then creates a * second shim. */ template struct FirstArrayShim { //! Construct the first shim with the given objects, length, and name. FirstArrayShim(T* t, const size_t len, const std::string& name) : t(t), len(len), name(name) { } T* t; const size_t len; const std::string& name; }; /** * A first shim for arrays without a Serialize() method. This shim's purpose is * to be caught by our overloads of operator<<, operator&, and operator>>, which * then creates a second shim. */ template struct FirstNormalArrayShim { //! Construct the first shim with the given objects, length, and name. FirstNormalArrayShim(T* t, const size_t len, const std::string& name) : t(t), len(len), name(name) { } T* t; const size_t len; const std::string& name; }; /** * The second shim: wrap the call to Serialize() inside of a serialize() * function, so that an archive type can call serialize() on a SecondShim object * and this gets forwarded correctly to our object's Serialize() function. */ template struct SecondShim { //! Construct the second shim. The name isn't necessary for this shim. SecondShim(T& t) : t(t) { } //! A wrapper for t.Serialize(). template void serialize(Archive& ar, const unsigned int version) { t.Serialize(ar, version); } T& t; }; /** * A shim for objects in an array; this is basically like the SecondShim, but * for arrays that hold objects that have Serialize() methods instead of * serialize() methods. */ template struct SecondArrayShim { //! Construct the shim. SecondArrayShim(T* t, const size_t len) : t(t), len(len) { } //! A wrapper for Serialize() for each element. template void serialize(Archive& ar, const unsigned int /* version */) { // Serialize each element, using the shims we already have. for (size_t i = 0; i < len; ++i) ar & CreateNVP(t[i], "item"); } T* t; const size_t len; }; /** * A shim for objects in an array which do not have a Serialize() function. * This is like the SecondShim class. */ template struct SecondNormalArrayShim { //! Construct the shim. SecondNormalArrayShim(T* t, const size_t len) : t(t), len(len) { } //! A wrapper for make_array(). template void serialize(Archive& ar, const unsigned int /* version */) { ar & boost::serialization::make_array(t, len); } T* t; const size_t len; }; /** * A shim for pointers. Only the type of this shim is used, so it is created * with a static_cast<>, and then later static_cast<>ed back to the original * type. */ template struct PointerShim : public T { }; /** * Catch when we call operator<< with a FirstShim object. In this case, we make * the second-level shim and use it. Note that this second-level shim can be * used as an lvalue, which is what's necessary for this whole thing to work. * The first-level shim can't be an lvalue (this is why we need two levels of * shims). */ template Archive& operator<<(Archive& ar, FirstShim t) { SecondShim sh(t.t); return (ar << boost::serialization::make_nvp(t.name.c_str(), sh)); } /** * Catch when we call operator& with a FirstShim object. In this case, we make * the second-level shim and use it. Note that this second-level shim can be * used as an lvalue, which is what's necessary for this whole thing to work. * The first-level shim can't be an lvalue (this is why we need two levels of * shims). */ template Archive& operator&(Archive& ar, FirstShim t) { SecondShim sh(t.t); return (ar & boost::serialization::make_nvp(t.name.c_str(), sh)); } /** * Catch when we call operator>> with a FirstShim object. In this case, we make * the second-level shim and use it. Note that this second-level shim can be * used as an lvalue, which is what's necessary for this whole thing to work. * The first-level shim can't be an lvalue (this is why we need two levels of * shims). */ template Archive& operator>>(Archive& ar, FirstShim t) { SecondShim sh(t.t); return (ar >> boost::serialization::make_nvp(t.name.c_str(), sh)); } /** * Catch when we call operator<< with a FirstArrayShim object. In this case, we * make the second-level array shim and use it. Note that this second-level * shim can be used as an lvalue, which is what's necessary for this whole thing * to work. The first-level shim can't be an lvalue (this is why we need two * levels of shims). */ template Archive& operator<<(Archive& ar, FirstArrayShim t) { SecondArrayShim sh(t.t, t.len); return (ar << boost::serialization::make_nvp(t.name.c_str(), sh)); } /** * Catch when we call operator& with a FirstArrayShim object. In this case, we * make the second-level array shim and use it. Note that this second-level * shim can be used as an lvalue, which is what's necessary for this whole thing * to work. The first-level shim can't be an lvalue (this is why we need two * levels of shims). */ template Archive& operator&(Archive& ar, FirstArrayShim t) { SecondArrayShim sh(t.t, t.len); return (ar & boost::serialization::make_nvp(t.name.c_str(), sh)); } /** * Catch when we call operator>> with a FirstArrayShim object. In this case, we * make the second-level array shim and use it. Note that this second-level * shim can be used as an lvalue, which is what's necessary for this whole thing * to work. The first-level shim can't be an lvalue (this is why we need two * levels of shims). */ template Archive& operator>>(Archive& ar, FirstArrayShim t) { SecondArrayShim sh(t.t, t.len); return (ar >> boost::serialization::make_nvp(t.name.c_str(), sh)); } /** * Catch when we call operator<< with a FirstNormalArrayShim object. In this * case, we make the second-level array shim and use it. Note that this * second-level shim can be used as an lvalue, which is necessary if we want to * use make_nvp() safely. The first-level shim can't be an lvalue (this is why * we need two levels of shims). */ template Archive& operator<<(Archive& ar, FirstNormalArrayShim t) { SecondNormalArrayShim sh(t.t, t.len); return (ar << boost::serialization::make_nvp(t.name.c_str(), sh)); } /** * Catch when we call operator& with a FirstNormalArrayShim object. In this * case, we make the second-level array shim and use it. Note that this * second-level shim can be used as an lvalue, which is necessary if we want to * use make_nvp() safely. The first-level shim can't be an lvalue (this is why * we need two levels of shims). */ template Archive& operator&(Archive& ar, FirstNormalArrayShim t) { SecondNormalArrayShim sh(t.t, t.len); return (ar & boost::serialization::make_nvp(t.name.c_str(), sh)); } /** * Catch when we call operator>> with a FirstNormalArrayShim object. In this * case, we make the second-level array shim and use it. Note that this * second-level shim can be used as an lvalue, which is necessary if we want to * use make_nvp() safely. The first-level shim can't be an lvalue (this is why * we need two levels of shims). */ template Archive& operator>>(Archive& ar, FirstNormalArrayShim t) { SecondNormalArrayShim sh(t.t, t.len); return (ar >> boost::serialization::make_nvp(t.name.c_str(), sh)); } } // namespace data } // namespace mlpack namespace boost { namespace serialization { /** * Catch a call to serialize() with a PointerShim, and call the Serialize() * function directly. */ template inline void serialize(Archive& ar, mlpack::data::PointerShim& t, const BOOST_PFTO unsigned int version) { T* tptr = reinterpret_cast(&t); tptr->Serialize(ar, version); } } // namespace serialization } // namespace boost #endif mlpack-2.2.5/src/mlpack/core/data/serialization_template_version.hpp000066400000000000000000000024041315013601400257060ustar00rootroot00000000000000/** * @file serialization_template_version.hpp * @author Ryan Curtin * * A better version of the BOOST_CLASS_VERSION() macro that supports templated * classes. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_DATA_SERIALIZATION_TEMPLATE_VERSION_HPP #define MLPACK_CORE_DATA_SERIALIZATION_TEMPLATE_VERSION_HPP /** * Use this like BOOST_CLASS_VERSION(), but for templated classes. The first * argument is the signature for the template. Here is an example for * math::Range: * * BOOST_TEMPLATE_CLASS_VERSION(template, math::Range, 1); */ #define BOOST_TEMPLATE_CLASS_VERSION(SIGNATURE, T, N) \ namespace boost { \ namespace serialization { \ SIGNATURE \ struct version> \ { \ typedef mpl::int_ type; \ typedef mpl::integral_c_tag tag; \ BOOST_STATIC_CONSTANT(int, value = version::type::value); \ BOOST_MPL_ASSERT(( \ boost::mpl::less< \ boost::mpl::int_, \ boost::mpl::int_<256> \ > \ )); \ }; \ } \ } #endif mlpack-2.2.5/src/mlpack/core/data/split_data.hpp000066400000000000000000000150071315013601400215200ustar00rootroot00000000000000/** * @file split_data.hpp * @author Tham Ngap Wei, Keon Kim * * Defines Split(), a utility function to split a dataset into a * training set and a test set. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_DATA_SPLIT_DATA_HPP #define MLPACK_CORE_DATA_SPLIT_DATA_HPP #include namespace mlpack { namespace data { /** * Given an input dataset and labels, split into a training set and test set. * Example usage below. This overload places the split dataset into the four * output parameters given (trainData, testData, trainLabel, and testLabel). * * @code * arma::mat input = loadData(); * arma::Row label = loadLabel(); * arma::mat trainData; * arma::mat testData; * arma::Row trainLabel; * arma::Row testLabel; * math::RandomSeed(100); // Set the seed if you like. * * // Split the dataset into a training and test set, with 30% of the data being * // held out for the test set. * Split(input, label, trainData, * testData, trainLabel, testLabel, 0.3); * @endcode * * @param input Input dataset to split. * @param label Input labels to split. * @param trainData Matrix to store training data into. * @param testData Matrix to store test data into. * @param trainLabel Vector to store training labels into. * @param testLabel Vector to store test labels into. * @param testRatio Percentage of dataset to use for test set (between 0 and 1). */ template void Split(const arma::Mat& input, const arma::Row& inputLabel, arma::Mat& trainData, arma::Mat& testData, arma::Row& trainLabel, arma::Row& testLabel, const double testRatio) { const size_t testSize = static_cast(input.n_cols * testRatio); const size_t trainSize = input.n_cols - testSize; trainData.set_size(input.n_rows, trainSize); testData.set_size(input.n_rows, testSize); trainLabel.set_size(trainSize); testLabel.set_size(testSize); const arma::Col order = arma::shuffle(arma::linspace>(0, input.n_cols - 1, input.n_cols)); for (size_t i = 0; i != trainSize; ++i) { trainData.col(i) = input.col(order[i]); trainLabel(i) = inputLabel(order[i]); } for (size_t i = 0; i != testSize; ++i) { testData.col(i) = input.col(order[i + trainSize]); testLabel(i) = inputLabel(order[i + trainSize]); } } /** * Given an input dataset, split into a training set and test set. * Example usage below. This overload places the split dataset into the two * output parameters given (trainData, testData). * * @code * arma::mat input = loadData(); * arma::mat trainData; * arma::mat testData; * math::RandomSeed(100); // Set the seed if you like. * * // Split the dataset into a training and test set, with 30% of the data being * // held out for the test set. * Split(input, trainData, testData, 0.3); * @endcode * * @param input Input dataset to split. * @param trainData Matrix to store training data into. * @param testData Matrix to store test data into. * @param testRatio Percentage of dataset to use for test set (between 0 and 1). */ template void Split(const arma::Mat& input, arma::Mat& trainData, arma::Mat& testData, const double testRatio) { const size_t testSize = static_cast(input.n_cols * testRatio); const size_t trainSize = input.n_cols - testSize; trainData.set_size(input.n_rows, trainSize); testData.set_size(input.n_rows, testSize); const arma::Col order = arma::shuffle(arma::linspace>(0, input.n_cols -1, input.n_cols)); for (size_t i = 0; i != trainSize; ++i) { trainData.col(i) = input.col(order[i]); } for (size_t i = 0; i != testSize; ++i) { testData.col(i) = input.col(order[i + trainSize]); } } /** * Given an input dataset and labels, split into a training set and test set. * Example usage below. This overload returns the split dataset as a std::tuple * with four elements: an arma::Mat containing the training data, an * arma::Mat containing the test data, an arma::Row containing the * training labels, and an arma::Row containing the test labels. * * @code * arma::mat input = loadData(); * arma::Row label = loadLabel(); * auto splitResult = Split(input, label, 0.2); * @endcode * * @param input Input dataset to split. * @param label Input labels to split. * @param testRatio Percentage of dataset to use for test set (between 0 and 1). * @return std::tuple containing trainData (arma::Mat), testData * (arma::Mat), trainLabel (arma::Row), and testLabel (arma::Row). */ template std::tuple, arma::Mat, arma::Row, arma::Row> Split(const arma::Mat& input, const arma::Row& inputLabel, const double testRatio) { arma::Mat trainData; arma::Mat testData; arma::Row trainLabel; arma::Row testLabel; Split(input, inputLabel, trainData, testData, trainLabel, testLabel, testRatio); return std::make_tuple(std::move(trainData), std::move(testData), std::move(trainLabel), std::move(testLabel)); } /** * Given an input dataset, split into a training set and test set. * Example usage below. This overload returns the split dataset as a std::tuple * with two elements: an arma::Mat containing the training data and an * arma::Mat containing the test data. * * @code * arma::mat input = loadData(); * auto splitResult = Split(input, 0.2); * @endcode * * @param input Input dataset to split. * @param testRatio Percentage of dataset to use for test set (between 0 and 1). * @return std::tuple containing trainData (arma::Mat) * and testData (arma::Mat). */ template std::tuple, arma::Mat> Split(const arma::Mat& input, const double testRatio) { arma::Mat trainData; arma::Mat testData; Split(input, trainData, testData, testRatio); return std::make_tuple(std::move(trainData), std::move(testData)); } } // namespace data } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/core/dists/000077500000000000000000000000001315013601400170755ustar00rootroot00000000000000mlpack-2.2.5/src/mlpack/core/dists/CMakeLists.txt000066400000000000000000000012701315013601400216350ustar00rootroot00000000000000# Define the files we need to compile. # Anything not in this list will not be compiled into mlpack. set(SOURCES discrete_distribution.hpp discrete_distribution.cpp gaussian_distribution.hpp gaussian_distribution.cpp laplace_distribution.hpp laplace_distribution.cpp regression_distribution.hpp regression_distribution.cpp gamma_distribution.hpp gamma_distribution.cpp ) # add directory name to sources set(DIR_SRCS) foreach(file ${SOURCES}) set(DIR_SRCS ${DIR_SRCS} ${CMAKE_CURRENT_SOURCE_DIR}/${file}) endforeach() # Append sources (with directory name) to list of all mlpack sources (used at # the parent scope). set(MLPACK_SRCS ${MLPACK_SRCS} ${DIR_SRCS} PARENT_SCOPE) mlpack-2.2.5/src/mlpack/core/dists/discrete_distribution.cpp000066400000000000000000000114251315013601400242050ustar00rootroot00000000000000/** * @file discrete_distribution.cpp * @author Ryan Curtin * * Implementation of DiscreteDistribution probability distribution. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include "discrete_distribution.hpp" using namespace mlpack; using namespace mlpack::distribution; /** * Return a randomly generated observation according to the probability * distribution defined by this object. */ arma::vec DiscreteDistribution::Random() const { size_t dimension = probabilities.size(); arma::vec result(dimension); for (size_t d = 0; d < dimension; d++) { // Generate a random number. double randObs = math::Random(); double sumProb = 0; for (size_t obs = 0; obs < probabilities[d].n_elem; obs++) { if ((sumProb += probabilities[d][obs]) >= randObs) { result[d] = obs; break; } } if (sumProb > 1.0) { // This shouldn't happen. result[d] = probabilities[d].n_elem - 1; } } return result; } /** * Estimate the probability distribution directly from the given observations. */ void DiscreteDistribution::Train(const arma::mat& observations) { // Make sure the observations have same dimension as the probabilities. if (observations.n_rows != probabilities.size()) { throw std::invalid_argument("observations must have same dimensionality as " "the DiscreteDistribution object"); } // Get the dimension size of the distribution. const size_t dimensions = probabilities.size(); // Clear the old probabilities. for (size_t i = 0; i < dimensions; i++) probabilities[i].zeros(); // Iterate all the probabilities in each dimension for (size_t r = 0; r < observations.n_cols; ++r) { for (size_t i = 0; i < dimensions; ++i) { // Add the probability of each observation. The addition of 0.5 to the // observation is to turn the default flooring operation of the size_t // cast into a rounding observation. const size_t obs = size_t(observations(i, r) + 0.5); // Ensure that the observation is within the bounds. if (obs >= probabilities[i].n_elem) { std::ostringstream oss; oss << "observation " << r << " in dimension " << i << " (" << observations(i, r) << ") is invalid; must be in [0, " << probabilities[i].n_elem << "] for this distribution"; throw std::invalid_argument(oss.str()); } probabilities[i][obs]++; } } // Now normalize the distributions. for (size_t i = 0; i < dimensions; ++i) { double sum = accu(probabilities[i]); if (sum > 0) probabilities[i] /= sum; else // Force normalization. probabilities[i].fill(1.0 / probabilities[i].n_elem); } } /** * Estimate the probability distribution from the given observations when also * given probabilities that each observation is from this distribution. */ void DiscreteDistribution::Train(const arma::mat& observations, const arma::vec& probObs) { // Make sure the observations have same dimension as the probabilities. if (observations.n_rows != probabilities.size()) { throw std::invalid_argument("observations must have same dimensionality as " "the DiscreteDistribution object"); } // Get the dimension size of the distribution. size_t dimensions = probabilities.size(); // Clear the old probabilities. for (size_t i = 0; i < dimensions; i++) probabilities[i].zeros(); // Ensure that the observation is within the bounds. for (size_t r = 0; r < observations.n_cols; r++) { for (size_t i = 0; i < dimensions; i++) { // Add the probability of each observation. The addition of 0.5 to the // observation is to turn the default flooring operation of the size_t cast // into a rounding observation. const size_t obs = size_t(observations(i, r) + 0.5); // Ensure that the observation is within the bounds. if (obs >= probabilities[i].n_elem) { std::ostringstream oss; oss << "observation " << r << " in dimension " << i << " (" << observations(i, r) << ") is invalid; must be in [0, " << probabilities[i].n_elem << "] for this distribution"; throw std::invalid_argument(oss.str()); } probabilities[i][obs] += probObs[r]; } } // Now normalize the distributions. for (size_t i = 0; i < dimensions; ++i) { double sum = accu(probabilities[i]); if (sum > 0) probabilities[i] /= sum; else // Force normalization. probabilities[i].fill(1.0 / probabilities[i].n_elem); } } mlpack-2.2.5/src/mlpack/core/dists/discrete_distribution.hpp000066400000000000000000000202101315013601400242020ustar00rootroot00000000000000/** * @file discrete_distribution.hpp * @author Ryan Curtin * * Implementation of the discrete distribution, where each discrete observation * has a given probability. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_DISTRIBUTIONS_DISCRETE_DISTRIBUTION_HPP #define MLPACK_CORE_DISTRIBUTIONS_DISCRETE_DISTRIBUTION_HPP #include #include #include namespace mlpack { namespace distribution /** Probability distributions. */ { /** * A discrete distribution where the only observations are discrete * observations. This is useful (for example) with discrete Hidden Markov * Models, where observations are non-negative integers representing specific * emissions. * * No bounds checking is performed for observations, so if an invalid * observation is passed (i.e. observation > numObservations), a crash will * probably occur. * * This distribution only supports one-dimensional observations, so when passing * an arma::vec as an observation, it should only have one dimension * (vec.n_rows == 1). Any additional dimensions will simply be ignored. * * @note * This class, like every other class in mlpack, uses arma::vec to represent * observations. While a discrete distribution only has positive integers * (size_t) as observations, these can be converted to doubles (which is what * arma::vec holds). This distribution internally converts those doubles back * into size_t before comparisons. * @endnote */ class DiscreteDistribution { public: /** * Default constructor, which creates a distribution that has no observations. */ DiscreteDistribution() : probabilities(std::vector(1)){ /* Nothing to do. */ } /** * Define the discrete distribution as having numObservations possible * observations. The probability in each state will be set to (1 / * numObservations). * * @param numObservations Number of possible observations this distribution * can have. */ DiscreteDistribution(const size_t numObservations) : probabilities(std::vector(1, arma::ones(numObservations) / numObservations)) { /* Nothing to do. */ } /** * Define the multidimensional discrete distribution as having numObservations possible * observations. The probability in each state will be set to (1 / * numObservations of each dimension). * * @param numObservations Number of possible observations this distribution * can have. */ DiscreteDistribution(const arma::Col& numObservations) { for (size_t i = 0; i < numObservations.n_elem; i++) { const size_t numObs = size_t(numObservations[i]); if (numObs <= 0) { std::ostringstream oss; oss << "number of observations for dimension " << i << " is 0, but " << "must be greater than 0"; throw std::invalid_argument(oss.str()); } probabilities.push_back(arma::ones(numObs) / numObs); } } /** * Define the multidimensional discrete distribution as having the given probabilities for each * observation. * * @param probabilities Probabilities of each possible observation. */ DiscreteDistribution(const std::vector& probabilities) { for (size_t i = 0; i < probabilities.size(); i++) { arma::vec temp = probabilities[i]; double sum = accu(temp); if (sum > 0) this->probabilities.push_back(temp / sum); else { this->probabilities.push_back(arma::ones(temp.n_elem) / temp.n_elem); } } } /** * Get the dimensionality of the distribution. */ size_t Dimensionality() const { return probabilities.size(); } /** * Return the probability of the given observation. If the observation is * greater than the number of possible observations, then a crash will * probably occur -- bounds checking is not performed. * * @param observation Observation to return the probability of. * @return Probability of the given observation. */ double Probability(const arma::vec& observation) const { double probability = 1.0; // Ensure the observation has the same dimension with the probabilities if (observation.n_elem != probabilities.size()) { Log::Debug << "the obversation must has the same dimension with the probabilities" << "the observation's dimension is" << observation.n_elem << "but the dimension of " << "probabilities is" << probabilities.size() << std::endl; return probability; } for (size_t dimension = 0; dimension < observation.n_elem; dimension++) { // Adding 0.5 helps ensure that we cast the floating point to a size_t // correctly. const size_t obs = size_t(observation(dimension) + 0.5); // Ensure that the observation is within the bounds. if (obs >= probabilities[dimension].n_elem) { Log::Debug << "DiscreteDistribution::Probability(): received observation " << obs << "; observation must be in [0, " << probabilities[dimension].n_elem << "] for this distribution." << std::endl; } probability *= probabilities[dimension][obs]; } return probability; } /** * Return the log probability of the given observation. If the observation is * greater than the number of possible observations, then a crash will * probably occur -- bounds checking is not performed. * * @param observation Observation to return the log probability of. * @return Log probability of the given observation. */ double LogProbability(const arma::vec& observation) const { // TODO: consider storing log probabilities instead? return log(Probability(observation)); } /** * Return a randomly generated observation (one-dimensional vector; one * observation) according to the probability distribution defined by this * object. * * @return Random observation. */ arma::vec Random() const; /** * Estimate the probability distribution directly from the given observations. * If any of the observations is greater than numObservations, a crash is * likely to occur. * * @param observations List of observations. */ void Train(const arma::mat& observations); /** * Estimate the probability distribution from the given observations, taking * into account the probability of each observation actually being from this * distribution. * * @param observations List of observations. * @param probabilities List of probabilities that each observation is * actually from this distribution. */ void Train(const arma::mat& observations, const arma::vec& probabilities); //! Return the vector of probabilities for the given dimension. arma::vec& Probabilities(const size_t dim = 0) { return probabilities[dim]; } //! Modify the vector of probabilities for the given dimension. const arma::vec& Probabilities(const size_t dim = 0) const { return probabilities[dim]; } /** * Serialize the distribution. */ template void Serialize(Archive& ar, const unsigned int /* version */) { // We serialize the vector manually since there seem to be some problems // with some boost versions. size_t dimensionality; dimensionality = probabilities.size(); ar & data::CreateNVP(dimensionality, "dimensionality"); if (Archive::is_loading::value) { probabilities.clear(); probabilities.resize(dimensionality); } for (size_t i = 0; i < dimensionality; ++i) { std::ostringstream oss; oss << "probabilities" << i; ar & data::CreateNVP(probabilities[i], oss.str()); } } private: //! The probabilities for each dimension; each arma::vec represents the //! probabilities for the observations in each dimension. std::vector probabilities; }; } // namespace distribution } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/core/dists/gamma_distribution.cpp000066400000000000000000000176411315013601400234730ustar00rootroot00000000000000/** * @file gamma_distribution.cpp * @author Yannis Mentekidis * * Implementation of the methods of GammaDistribution. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include "gamma_distribution.hpp" #include using namespace mlpack; using namespace mlpack::distribution; GammaDistribution::GammaDistribution(const size_t dimensionality) { // Initialize distribution. alpha.zeros(dimensionality); beta.zeros(dimensionality); } GammaDistribution::GammaDistribution(const arma::mat& data, const double tol) { Train(data, tol); } GammaDistribution::GammaDistribution(const arma::vec& alpha, const arma::vec& beta) { if (beta.n_elem != alpha.n_elem) throw std::runtime_error("Alpha and beta vector dimensions mismatch."); this->alpha = alpha; this->beta = beta; } // Returns true if computation converged. inline bool GammaDistribution::Converged(const double aOld, const double aNew, const double tol) { return (std::abs(aNew - aOld) / aNew) < tol; } // Fits an alpha and beta parameter to each dimension of the data. void GammaDistribution::Train(const arma::mat& rdata, const double tol) { // If fittingSet is empty, nothing to do. if (arma::size(rdata) == arma::size(arma::mat())) return; // Use boost's definitions of digamma and tgamma, and std::log. using boost::math::digamma; using boost::math::trigamma; using std::log; // Calculate log(mean(x)) and mean(log(x)) of each dataset row. const arma::vec meanLogxVec = arma::mean(arma::log(rdata), 1); const arma::vec meanxVec = arma::mean(rdata, 1); const arma::vec logMeanxVec = arma::log(meanxVec); // Call the statistics-only GammaDistribution::Train() function to fit the // parameters. That function does all the work so we're done. Train(logMeanxVec, meanLogxVec, meanxVec, tol); } //Fits an alpha and beta parameter according to observation probabilities. void GammaDistribution::Train(const arma::mat& rdata, const arma::vec& probabilities, const double tol) { // If fittingSet is empty, nothing to do. if (arma::size(rdata) == arma::size(arma::mat())) return; arma::vec meanLogxVec(rdata.n_rows, arma::fill::zeros); arma::vec meanxVec(rdata.n_rows, arma::fill::zeros); arma::vec logMeanxVec(rdata.n_rows, arma::fill::zeros); for (size_t i = 0; i < rdata.n_cols; i++) { meanLogxVec += probabilities(i) * arma::log(rdata.col(i)); meanxVec += probabilities(i) * rdata.col(i); } double totProbability = arma::accu(probabilities); meanLogxVec /= totProbability; meanxVec /= totProbability; logMeanxVec = arma::log(meanxVec); // Call the statistics-only GammaDistribution::Train() function to fit the // parameters. That function does all the work so we're done. Train(logMeanxVec, meanLogxVec, meanxVec, tol); } // Fits an alpha and beta parameter to each dimension of the data. void GammaDistribution::Train(const arma::vec& logMeanxVec, const arma::vec& meanLogxVec, const arma::vec& meanxVec, const double tol) { // Use boost's definitions of digamma and tgamma, and std::log. using boost::math::digamma; using boost::math::trigamma; using std::log; // Number of dimensions of gamma distribution. size_t ndim = logMeanxVec.n_rows; // Sanity check - all vectors are same size. if (logMeanxVec.n_rows != meanLogxVec.n_rows || logMeanxVec.n_rows != meanxVec.n_rows) throw std::runtime_error("Statistic vectors must be of the same size."); // Allocate space for alphas and betas (Assume independent rows). alpha.set_size(ndim); beta.set_size(ndim); // Treat each dimension (i.e. row) independently. for (size_t row = 0; row < ndim; ++row) { // Statistics for this row. const double meanLogx = meanLogxVec(row); const double meanx = meanxVec(row); const double logMeanx = logMeanxVec(row); // Starting point for Generalized Newton. double aEst = 0.5 / (logMeanx - meanLogx); double aOld; // Newton's method: In each step, make an update to aEst. If value didn't // change much (abs(aNew - aEst) / aEst < tol), then stop. do { // Needed for convergence test. aOld = aEst; // Calculate new value for alpha. double nominator = meanLogx - logMeanx + log(aEst) - digamma(aEst); double denominator = pow(aEst, 2) * (1 / aEst - trigamma(aEst)); // Protect against division by 0. if (denominator == 0) throw std::logic_error("GammaDistribution::Train() attempted division" " by 0."); aEst = 1.0 / ((1.0 / aEst) + nominator / denominator); // Protect against nan values (aEst will be passed to logarithm). if (aEst <= 0) throw std::logic_error("GammaDistribution::Train(): estimated invalid " "negative value for parameter alpha!"); } while (!Converged(aEst, aOld, tol)); alpha(row) = aEst; beta(row) = meanx / aEst; } } // Returns the probability of the provided observations. void GammaDistribution::Probability(const arma::mat& observations, arma::vec& probabilities) const { size_t numObs = observations.n_cols; // Set all equal to 1 (multiplication neutral). probabilities.ones(numObs); // Compute denominator only once for each dimension. arma::vec denominators(alpha.n_elem); for (size_t d = 0; d < alpha.n_elem; ++d) denominators(d) = std::tgamma(alpha(d)) * std::pow(beta(d), alpha(d)); // Compute probability of each observation. for (size_t i = 0; i < numObs; ++i) { for (size_t d = 0; d < observations.n_rows; ++d) { // Compute probability using Multiplication Law. double factor = std::exp(-observations(d, i) / beta(d)); double numerator = std::pow(observations(d, i), alpha(d) - 1); probabilities(i) *= factor * numerator / denominators(d); } } } // Returns the probability of one observation (x) for one of the Gamma's // dimensions. double GammaDistribution::Probability(double x, size_t dim) const { return std::pow(x, alpha(dim) - 1) * std::exp(-x / beta(dim)) / (std::tgamma(alpha(dim)) * std::pow(beta(dim), alpha(dim))); } // Returns the log probability of the provided observations. void GammaDistribution::LogProbability(const arma::mat& observations, arma::vec& LogProbabilities) const { size_t numObs = observations.n_cols; // Set all equal to 0 (addition neutral). LogProbabilities.zeros(numObs); // Compute denominator only once for each dimension. arma::vec denominators(alpha.n_elem); for (size_t d = 0; d < alpha.n_elem; ++d) denominators(d) = std::tgamma(alpha(d)) * std::pow(beta(d), alpha(d)); // Compute probability of each observation. for (size_t i = 0; i < numObs; ++i) { for (size_t d = 0; d < observations.n_rows; ++d) { // Compute probability using Multiplication Law and Logarithm addition // property. double factor = std::exp(-observations(d, i) / beta(d)); double numerator = std::pow(observations(d, i), alpha(d) - 1); LogProbabilities(i) += std::log( numerator * factor / denominators(d)); } } } // Returns a gamma-random d-dimensional vector. arma::vec GammaDistribution::Random() const { arma::vec randVec(alpha.n_elem); for (size_t d = 0; d < alpha.n_elem; ++d) { std::gamma_distribution dist(alpha(d), beta(d)); // Use the mlpack random object. randVec(d) = dist(mlpack::math::randGen); } return randVec; } mlpack-2.2.5/src/mlpack/core/dists/gamma_distribution.hpp000066400000000000000000000174011315013601400234720ustar00rootroot00000000000000/** * @file gamma_distribution.hpp * @author Yannis Mentekidis * * Implementation of a Gamma distribution of multidimensional data that fits * gamma parameters (alpha, beta) to data. * The fitting is done independently for each dataset dimension (row), based on * the assumption each dimension is fully indepeendent. * * Based on "Estimating a Gamma Distribution" by Thomas P. Minka: * research.microsoft.com/~minka/papers/minka-gamma.pdf * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef _MLPACK_CORE_DISTRIBUTIONS_GAMMA_DISTRIBUTION_HPP #define _MLPACK_CORE_DISTRIBUTIONS_GAMMA_DISTRIBUTION_HPP #include #include #include namespace mlpack { namespace distribution { /** * This class represents the Gamma distribution. It supports training a Gamma * distribution on a given dataset and accessing the fitted alpha and beta * parameters. * * This class supports multidimensional Gamma distributions; however, it is * assumed that each dimension is independent; therefore, a multidimensional * Gamma distribution here may be seen as a set of independent * single-dimensional Gamma distributions---and the parameters are estimated * under this assumption. * * The estimation algorithm used can be found in the following paper: * * @code * @techreport{minka2002estimating, * title={Estimating a {G}amma distribution}, * author={Minka, Thomas P.}, * institution={Microsoft Research}, * address={Cambridge, U.K.}, * year={2002} * } * @endcode */ class GammaDistribution { public: /** * Construct the Gamma distribution with the given number of dimensions * (default 0); each parameter will be initialized to 0. * * @param dimensionality Number of dimensions. */ GammaDistribution(const size_t dimensionality = 0); /** * Construct the Gamma distribution, training on the given parameters. * * @param data Data to train the distribution on. * @param tol Convergence tolerance. This is *not* an absolute measure: * It will stop the approximation once the *change* in the value is * smaller than tol. */ GammaDistribution(const arma::mat& data, const double tol = 1e-8); /** * Construct the Gamma distribution given two vectors alpha and beta. * * @param alpha The vector of alphas, one per dimension. * @param beta The vector of betas, one per dimension. */ GammaDistribution(const arma::vec& alpha, const arma::vec& beta); /** * Destructor. */ ~GammaDistribution() {}; /** * This function trains (fits distribution parameters) to new data or the * dataset the object owns. * * @param rdata Reference data to fit parameters to. * @param tol Convergence tolerance. This is *not* an absolute measure: * It will stop the approximation once the *change* in the value is * smaller than tol. */ void Train(const arma::mat& rdata, const double tol = 1e-8); /** * Fits an alpha and beta parameter according to observation probabilities. * This method is not yet implemented. * * @param observations The reference data, one observation per column * @param probabilities The probability of each observation. One value per * column of the observations matrix. * @param tol Convergence tolerance. This is *not* an absolute measure: * It will stop the approximation once the *change* in the value is * smaller than tol. */ void Train(const arma::mat& observations, const arma::vec& probabilities, const double tol = 1e-8); /** * This function trains (fits distribution parameters) to a dataset with * pre-computed statistics logMeanx, meanLogx, meanx for each dimension. * * @param logMeanxVec Is each dimension's logarithm of the mean * (log(mean(x))). * @param meanLogxVec Is each dimension's mean of logarithms (mean(log(x))). * @param meanxVec Is each dimension's mean (mean(x)). * @param tol Convergence tolerance. This is *not* an absolute measure: * It will stop the approximation once the *change* in the value is * smaller than tol. */ void Train(const arma::vec& logMeanxVec, const arma::vec& meanLogxVec, const arma::vec& meanxVec, const double tol = 1e-8); /** * This function returns the probability of a group of observations. * * The probability of the value x is * * \frac{x^(\alpha - 1)}{\Gamma(\alpha) * \beta^\alpha} * e ^ * {-\frac{x}{\beta}} * * for one dimension. This implementation assumes each dimension is * independent, so the product rule is used. * * @param observations Matrix of observations, one per column. * @param probabilities column vector of probabilities, one per observation. */ void Probability(const arma::mat& observations, arma::vec& Probabilities) const; /* * This is a shortcut to the Probability(arma::mat&, arma::vec&) function * for when we want to evaluate only the probability of one dimension of the * gamma. * * @param x The 1-dimensional observation. * @param dim The dimension for which to calculate the probability */ double Probability(double x, size_t dim) const; /** * This function returns the logarithm of the probability of a group of * observations. * * The logarithm of the probability of a value x is * * log(\frac{x^(\alpha - 1)}{\Gamma(\alpha) * \beta^\alpha} * e ^ * {-\frac{x}{\beta}}) * * for one dimension. This implementation assumes each dimension is * independent, so the product rule is used. * * @param observations Matrix of observations, one per column. * @param logProbabilities column vector of log probabilities, one per * observation. */ void LogProbability(const arma::mat& observations, arma::vec& LogProbabilities) const; /** * This function returns an observation of this distribution */ arma::vec Random() const; // Access to Gamma distribution parameters. //! Get the alpha parameter of the given dimension. double Alpha(const size_t dim) const { return alpha[dim]; } //! Modify the alpha parameter of the given dimension. double& Alpha(const size_t dim) { return alpha[dim]; } //! Get the beta parameter of the given dimension. double Beta(const size_t dim) const { return beta[dim]; } //! Modify the beta parameter of the given dimension. double& Beta(const size_t dim) { return beta[dim]; } //! Get the dimensionality of the distribution. size_t Dimensionality() const { return alpha.n_elem; } private: //! Array of fitted alphas. arma::vec alpha; //! Array of fitted betas. arma::vec beta; /** * This is a small function that returns true if the update of alpha is * smaller than the tolerance ratio. * * @param aOld old value of parameter we want to estimate (alpha in our * case). * @param aNew new value of parameter (the value after 1 iteration from * aOld). * @param tol Convergence tolerance. Relative measure (see documentation of * GammaDistribution::Train). */ inline bool Converged(const double aOld, const double aNew, const double tol); }; } // namespace distributions. } // namespace mlpack. #endif mlpack-2.2.5/src/mlpack/core/dists/gaussian_distribution.cpp000066400000000000000000000131601315013601400242130ustar00rootroot00000000000000/** * @file gaussian_distribution.cpp * @author Ryan Curtin * @author Michael Fox * * Implementation of Gaussian distribution class. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include "gaussian_distribution.hpp" #include using namespace mlpack; using namespace mlpack::distribution; GaussianDistribution::GaussianDistribution(const arma::vec& mean, const arma::mat& covariance) : mean(mean) { Covariance(covariance); } void GaussianDistribution::Covariance(const arma::mat& covariance) { this->covariance = covariance; FactorCovariance(); } void GaussianDistribution::Covariance(arma::mat&& covariance) { this->covariance = std::move(covariance); FactorCovariance(); } void GaussianDistribution::FactorCovariance() { // On Armadillo < 4.500, the "lower" option isn't available. #if (ARMA_VERSION_MAJOR < 4) || \ ((ARMA_VERSION_MAJOR == 4) && (ARMA_VERSION_MINOR < 500)) covLower = arma::chol(covariance).t(); // This is less efficient. #else covLower = arma::chol(covariance, "lower"); #endif // Comment from rcurtin: // // I think the use of the word "interpret" in the Armadillo documentation // about trimatl and trimatu is somewhat misleading. What the function will // actually do, when used in that context, is loop over the upper triangular // part of the matrix and set it all to 0, so this ends up actually just // burning cycles---also because the operator=() evaluates the expression and // strips the knowledge that it's a lower triangular matrix. So then the call // to .i() doesn't actually do anything smarter. // // But perusing fn_inv.hpp more closely, there is a specialization that will // work when called like this: inv(trimatl(covLower)), and will use LAPACK's // ?trtri functions. However, it will still set the upper triangular part to // 0 after the method. That last part is unnecessary, but baked into // Armadillo, so there's not really much that can be done about that without // discussion with the Armadillo maintainer. const arma::mat invCovLower = arma::inv(arma::trimatl(covLower)); invCov = invCovLower.t() * invCovLower; double sign = 0.; arma::log_det(logDetCov, sign, covLower); logDetCov *= 2; } double GaussianDistribution::LogProbability(const arma::vec& observation) const { const size_t k = observation.n_elem; const arma::vec diff = mean - observation; const arma::vec v = (diff.t() * invCov * diff); return -0.5 * k * log2pi - 0.5 * logDetCov - 0.5 * v(0); } arma::vec GaussianDistribution::Random() const { return covLower * arma::randn(mean.n_elem) + mean; } /** * Estimate the Gaussian distribution directly from the given observations. * * @param observations List of observations. */ void GaussianDistribution::Train(const arma::mat& observations) { if (observations.n_cols > 0) { mean.zeros(observations.n_rows); covariance.zeros(observations.n_rows, observations.n_rows); } else // This will end up just being empty. { // TODO(stephentu): why do we allow this case? why not throw an error? mean.zeros(0); covariance.zeros(0); return; } // Calculate the mean. for (size_t i = 0; i < observations.n_cols; i++) mean += observations.col(i); // Normalize the mean. mean /= observations.n_cols; // Now calculate the covariance. for (size_t i = 0; i < observations.n_cols; i++) { arma::vec obsNoMean = observations.col(i) - mean; covariance += obsNoMean * trans(obsNoMean); } // Finish estimating the covariance by normalizing, with the (1 / (n - 1)) so // that it is the unbiased estimator. covariance /= (observations.n_cols - 1); // Ensure that the covariance is positive definite. gmm::PositiveDefiniteConstraint::ApplyConstraint(covariance); FactorCovariance(); } /** * Estimate the Gaussian distribution from the given observations, taking into * account the probability of each observation actually being from this * distribution. */ void GaussianDistribution::Train(const arma::mat& observations, const arma::vec& probabilities) { if (observations.n_cols > 0) { mean.zeros(observations.n_rows); covariance.zeros(observations.n_rows, observations.n_rows); } else // This will end up just being empty. { // TODO(stephentu): same as above mean.zeros(0); covariance.zeros(0); return; } double sumProb = 0; // First calculate the mean, and save the sum of all the probabilities for // later normalization. for (size_t i = 0; i < observations.n_cols; i++) { mean += probabilities[i] * observations.col(i); sumProb += probabilities[i]; } if (sumProb == 0) { // Nothing in this Gaussian! At least set the covariance so that it's // invertible. covariance.diag() += 1e-50; FactorCovariance(); return; } // Normalize. if (sumProb > 0) mean /= sumProb; // Now find the covariance. for (size_t i = 0; i < observations.n_cols; i++) { arma::vec obsNoMean = observations.col(i) - mean; covariance += probabilities[i] * (obsNoMean * trans(obsNoMean)); } // This is probably biased, but I don't know how to unbias it. if (sumProb > 0) covariance /= sumProb; // Ensure that the covariance is positive definite. gmm::PositiveDefiniteConstraint::ApplyConstraint(covariance); FactorCovariance(); } mlpack-2.2.5/src/mlpack/core/dists/gaussian_distribution.hpp000066400000000000000000000135441315013601400242260ustar00rootroot00000000000000/** * @file gaussian_distribution.hpp * @author Ryan Curtin * @author Michael Fox * * Implementation of the Gaussian distribution. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_DISTRIBUTIONS_GAUSSIAN_DISTRIBUTION_HPP #define MLPACK_CORE_DISTRIBUTIONS_GAUSSIAN_DISTRIBUTION_HPP #include namespace mlpack { namespace distribution { /** * A single multivariate Gaussian distribution. */ class GaussianDistribution { private: //! Mean of the distribution. arma::vec mean; //! Positive definite covariance of the distribution. arma::mat covariance; //! Lower triangular factor of cov (e.g. cov = LL^T). arma::mat covLower; //! Cached inverse of covariance. arma::mat invCov; //! Cached logdet(cov). double logDetCov; //! log(2pi) static const constexpr double log2pi = 1.83787706640934533908193770912475883; public: /** * Default constructor, which creates a Gaussian with zero dimension. */ GaussianDistribution() { /* nothing to do */ } /** * Create a Gaussian distribution with zero mean and identity covariance with * the given dimensionality. */ GaussianDistribution(const size_t dimension) : mean(arma::zeros(dimension)), covariance(arma::eye(dimension, dimension)), covLower(arma::eye(dimension, dimension)), invCov(arma::eye(dimension, dimension)), logDetCov(0) { /* Nothing to do. */ } /** * Create a Gaussian distribution with the given mean and covariance. * * covariance is expected to be positive definite. */ GaussianDistribution(const arma::vec& mean, const arma::mat& covariance); // TODO(stephentu): do we want a (arma::vec&&, arma::mat&&) ctor? //! Return the dimensionality of this distribution. size_t Dimensionality() const { return mean.n_elem; } /** * Return the probability of the given observation. */ double Probability(const arma::vec& observation) const { return exp(LogProbability(observation)); } /** * Return the log probability of the given observation. */ double LogProbability(const arma::vec& observation) const; /** * Calculates the multivariate Gaussian probability density function for each * data point (column) in the given matrix. * * @param x List of observations. * @param probabilities Output probabilities for each input observation. */ void Probability(const arma::mat& x, arma::vec& probabilities) const { arma::vec logProbabilities; LogProbability(x, logProbabilities); probabilities = arma::exp(logProbabilities); } void LogProbability(const arma::mat& x, arma::vec& logProbabilities) const; /** * Return a randomly generated observation according to the probability * distribution defined by this object. * * @return Random observation from this Gaussian distribution. */ arma::vec Random() const; /** * Estimate the Gaussian distribution directly from the given observations. * * @param observations List of observations. */ void Train(const arma::mat& observations); /** * Estimate the Gaussian distribution from the given observations, taking into * account the probability of each observation actually being from this * distribution. */ void Train(const arma::mat& observations, const arma::vec& probabilities); /** * Return the mean. */ const arma::vec& Mean() const { return mean; } /** * Return a modifiable copy of the mean. */ arma::vec& Mean() { return mean; } /** * Return the covariance matrix. */ const arma::mat& Covariance() const { return covariance; } /** * Set the covariance. */ void Covariance(const arma::mat& covariance); void Covariance(arma::mat&& covariance); /** * Serialize the distribution. */ template void Serialize(Archive& ar, const unsigned int /* version */) { using data::CreateNVP; // We just need to serialize each of the members. ar & CreateNVP(mean, "mean"); ar & CreateNVP(covariance, "covariance"); ar & CreateNVP(covLower, "covLower"); ar & CreateNVP(invCov, "invCov"); ar & CreateNVP(logDetCov, "logDetCov"); } private: /** * This factors the covariance using arma::chol(). The function assumes that * the given matrix is factorizable via the Cholesky decomposition. If not, a * std::runtime_error will be thrown. */ void FactorCovariance(); }; /** * Calculates the multivariate Gaussian log probability density function for each * data point (column) in the given matrix * * @param x List of observations. * @param probabilities Output log probabilities for each input observation. */ inline void GaussianDistribution::LogProbability(const arma::mat& x, arma::vec& logProbabilities) const { // Column i of 'diffs' is the difference between x.col(i) and the mean. arma::mat diffs = x - (mean * arma::ones(x.n_cols)); // Now, we only want to calculate the diagonal elements of (diffs' * cov^-1 * // diffs). We just don't need any of the other elements. We can calculate // the right hand part of the equation (instead of the left side) so that // later we are referencing columns, not rows -- that is faster. const arma::mat rhs = -0.5 * invCov * diffs; arma::vec logExponents(diffs.n_cols); // We will now fill this. for (size_t i = 0; i < diffs.n_cols; i++) logExponents(i) = accu(diffs.unsafe_col(i) % rhs.unsafe_col(i)); const size_t k = x.n_rows; logProbabilities = -0.5 * k * log2pi - 0.5 * logDetCov + logExponents; } } // namespace distribution } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/core/dists/laplace_distribution.cpp000066400000000000000000000062121315013601400240020ustar00rootroot00000000000000/* * @file laplace_distribution.cpp * @author Zhihao Lou * * Implementation of Laplace distribution. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include #include "laplace_distribution.hpp" using namespace mlpack; using namespace mlpack::distribution; /** * Return the log probability of the given observation. */ double LaplaceDistribution::LogProbability(const arma::vec& observation) const { // Evaluate the PDF of the Laplace distribution to determine the log probability. return -log(2. * scale) - arma::norm(observation - mean, 2) / scale; } /** * Estimate the Laplace distribution directly from the given observations. * * @param observations List of observations. */ void LaplaceDistribution::Estimate(const arma::mat& observations) { // The maximum likelihood estimate of the mean is the median of the data for // the univariate case. See the short note "The Double Exponential // Distribution: Using Calculus to Find a Maximum Likelihood Estimator" by // R.M. Norton. // // But for the multivariate case, the derivation is slightly different. The // log-likelihood function is now // L(\theta) = -n ln 2 - \sum_{i = 1}^{n} (x_i - \theta)^T (x_i - \theta). // Differentiating with respect to the vector \theta gives // L'(\theta) = \sum_{i = 1}^{n} 2 (x_i - \theta) // which means that for an individual component \theta_k, // d / d\theta_k L(\theta) = \sum_{i = 1}^{n} 2 (x_ik - \theta_k) // which is zero when // \theta_k = (1 / n) \sum_{i = 1}^{n} x_ik // so L'(\theta) = 0 when \theta is the mean of the observations. I am not // 100% certain my calculus and linear algebra is right, but I think it is... mean = arma::mean(observations, 1); // The maximum likelihood estimate of the scale parameter is the mean // deviation from the mean. scale = 0.0; for (size_t i = 0; i < observations.n_cols; ++i) scale += arma::norm(observations.col(i) - mean, 2); scale /= observations.n_cols; } /** * Estimate the Laplace distribution directly from the given observations, * taking into account the probability of each observation actually being from * this distribution. */ void LaplaceDistribution::Estimate(const arma::mat& observations, const arma::vec& probabilities) { // I am not completely sure that this change results in a valid maximum // likelihood estimator given probabilities of points. mean.zeros(observations.n_rows); for (size_t i = 0; i < observations.n_cols; ++i) mean += observations.col(i) * probabilities(i); mean /= arma::accu(probabilities); // This the same formula as the previous function, but here we are multiplying // by the probability that the point is actually from this distribution. scale = 0.0; for (size_t i = 0; i < observations.n_cols; ++i) scale += probabilities(i) * arma::norm(observations.col(i) - mean, 2); scale /= arma::accu(probabilities); } mlpack-2.2.5/src/mlpack/core/dists/laplace_distribution.hpp000066400000000000000000000112061315013601400240060ustar00rootroot00000000000000/* * @file laplace.hpp * @author Zhihao Lou * * Laplace (double exponential) distribution used in SA. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_DISTRIBUTIONS_LAPLACE_DISTRIBUTION_HPP #define MLPACK_CORE_DISTRIBUTIONS_LAPLACE_DISTRIBUTION_HPP namespace mlpack { namespace distribution { /** * The multivariate Laplace distribution centered at 0 has pdf * * \f[ * f(x|\theta) = \frac{1}{2 \theta}\exp\left(-\frac{\|x - \mu\|}{\theta}\right) * \f] * * given scale parameter \f$\theta\f$ and mean \f$\mu\f$. This implementation * assumes a diagonal covariance, but a rewrite to support arbitrary covariances * is possible. * * See the following paper for more information on the non-diagonal-covariance * Laplace distribution and estimation techniques: * * @code * @article{eltoft2006multivariate, * title={{On the Multivariate Laplace Distribution}}, * author={Eltoft, Torbj\orn and Kim, Taesu and Lee, Te-Won}, * journal={IEEE Signal Processing Letters}, * volume={13}, * number={5}, * pages={300--304}, * year={2006} * } * @endcode * * Note that because of the diagonal covariance restriction, much of the algebra * in the paper above becomes simplified, and the PDF takes roughly the same * form as the univariate case. */ class LaplaceDistribution { public: /** * Default constructor, which creates a Laplace distribution with zero * dimension and zero scale parameter. */ LaplaceDistribution() : scale(0) { } /** * Construct the Laplace distribution with the given scale and dimensionality. * The mean is initialized to zero. * * @param dimensionality Dimensionality of distribution. * @param scale Scale of distribution. */ LaplaceDistribution(const size_t dimensionality, const double scale) : mean(arma::zeros(dimensionality)), scale(scale) { } /** * Construct the Laplace distribution with the given mean and scale parameter. * * @param mean Mean of distribution. * @param scale Scale of distribution. */ LaplaceDistribution(const arma::vec& mean, const double scale) : mean(mean), scale(scale) { } //! Return the dimensionality of this distribution. size_t Dimensionality() const { return mean.n_elem; } /** * Return the probability of the given observation. */ double Probability(const arma::vec& observation) const { return exp(LogProbability(observation)); } /** * Return the log probability of the given observation. */ double LogProbability(const arma::vec& observation) const; /** * Return a randomly generated observation according to the probability * distribution defined by this object. This is inlined for speed. * * @return Random observation from this Laplace distribution. */ arma::vec Random() const { arma::vec result(mean.n_elem); result.randu(); // Convert from uniform distribution to Laplace distribution. // arma::sign() does not exist in Armadillo < 3.920 so we have to do this // elementwise. for (size_t i = 0; i < result.n_elem; ++i) { if (result[i] < 0.5) result[i] = mean[i] + scale * std::log(1 + 2.0 * (result[i] - 0.5)); else result[i] = mean[i] - scale * std::log(1 - 2.0 * (result[i] - 0.5)); } return result; } /** * Estimate the Laplace distribution directly from the given observations. * * @param observations List of observations. */ void Estimate(const arma::mat& observations); /** * Estimate the Laplace distribution from the given observations, taking into * account the probability of each observation actually being from this * distribution. */ void Estimate(const arma::mat& observations, const arma::vec& probabilities); //! Return the mean. const arma::vec& Mean() const { return mean; } //! Modify the mean. arma::vec& Mean() { return mean; } //! Return the scale parameter. double Scale() const { return scale; } //! Modify the scale parameter. double& Scale() { return scale; } /** * Serialize the distribution. */ template void Serialize(Archive& ar, const unsigned int /* version */) { ar & data::CreateNVP(mean, "mean"); ar & data::CreateNVP(scale, "scale"); } private: //! Mean of the distribution. arma::vec mean; //! Scale parameter of the distribution. double scale; }; } // namespace distribution } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/core/dists/regression_distribution.cpp000066400000000000000000000040411315013601400245570ustar00rootroot00000000000000/** * @file regression_distribution.cpp * @author Michael Fox * * Implementation of conditional Gaussian distribution for HMM regression (HMMR) * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include "regression_distribution.hpp" using namespace mlpack; using namespace mlpack::distribution; /** * Estimate parameters using provided observation weights * * @param observations List of observations. */ void RegressionDistribution::Train(const arma::mat& observations) { regression::LinearRegression lr(observations.rows(1, observations.n_rows - 1), (observations.row(0)).t(), 0, true); rf = lr; arma::vec fitted; lr.Predict(observations.rows(1, observations.n_rows - 1), fitted); err.Train(observations.row(0) - fitted.t()); } /** * Estimate parameters using provided observation weights. * * @param weights probability that given observation is from distribution */ void RegressionDistribution::Train(const arma::mat& observations, const arma::vec& weights) { regression::LinearRegression lr(observations.rows(1, observations.n_rows - 1), (observations.row(0)).t(), 0, true, weights); rf = lr; arma::vec fitted; lr.Predict(observations.rows(1, observations.n_rows - 1), fitted); err.Train(observations.row(0) - fitted.t(), weights); } /** * Evaluate probability density function of given observation. * * @param observation point to evaluate probability at */ double RegressionDistribution::Probability(const arma::vec& observation) const { arma::vec fitted; rf.Predict(observation.rows(1, observation.n_rows-1), fitted); return err.Probability(observation(0)-fitted); } void RegressionDistribution::Predict(const arma::mat& points, arma::vec& predictions) const { rf.Predict(points, predictions); } mlpack-2.2.5/src/mlpack/core/dists/regression_distribution.hpp000066400000000000000000000077301315013601400245740ustar00rootroot00000000000000/** * @file regression_distribution.hpp * @author Michael Fox * * Implementation of conditional Gaussian distribution for HMM regression (HMMR) * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_DISTRIBUTIONS_REGRESSION_DISTRIBUTION_HPP #define MLPACK_CORE_DISTRIBUTIONS_REGRESSION_DISTRIBUTION_HPP #include #include #include namespace mlpack { namespace distribution { /** * A class that represents a univariate conditionally Gaussian distribution. * Can be used as an emission distribution with the hmm class to implement HMM * regression (HMMR) as described in * https://www.ima.umn.edu/preprints/January1994/1195.pdf * The hmm observations should have the dependent variable in the first row, * with the independent variables in the other rows. */ class RegressionDistribution { private: //! Regression function for representing conditional mean. regression::LinearRegression rf; //! Error distribution. GaussianDistribution err; public: /** * Default constructor, which creates a Gaussian with zero dimension. */ RegressionDistribution() { /* nothing to do */ } /** * Create a Conditional Gaussian distribution with conditional mean function * obtained by running RegressionFunction on predictors, responses. * * @param predictors Matrix of predictors (X). * @param responses Vector of responses (y). */ RegressionDistribution(const arma::mat& predictors, const arma::vec& responses) : rf(regression::LinearRegression(predictors, responses)) { err = GaussianDistribution(1); arma::mat cov(1, 1); cov(0, 0) = rf.ComputeError(predictors, responses); err.Covariance(std::move(cov)); } /** * Serialize the distribution. */ template void Serialize(Archive& ar, const unsigned int /* version */) { ar & data::CreateNVP(rf, "rf"); ar & data::CreateNVP(err, "err"); } //! Return regression function. const regression::LinearRegression& Rf() const { return rf; } //! Modify regression function. regression::LinearRegression& Rf() { return rf; } //! Return error distribution. const GaussianDistribution& Err() const { return err; } //! Modify error distribution. GaussianDistribution& Err() { return err; } /** * Estimate the Gaussian distribution directly from the given observations. * * @param observations List of observations. */ void Train(const arma::mat& observations); /** * Estimate parameters using provided observation weights * * @param weights probability that given observation is from distribution */ void Train(const arma::mat& observations, const arma::vec& weights); /** * Evaluate probability density function of given observation * * @param observation point to evaluate probability at */ double Probability(const arma::vec& observation) const; /** * Evaluate log probability density function of given observation * * @param observation point to evaluate log probability at */ double LogProbability(const arma::vec& observation) const { return log(Probability(observation)); } /** * Calculate y_i for each data point in points. * * @param points the data points to calculate with. * @param predictions y, will contain calculated values on completion. */ void Predict(const arma::mat& points, arma::vec& predictions) const; //! Return the parameters (the b vector). const arma::vec& Parameters() const { return rf.Parameters(); } //! Return the dimensionality size_t Dimensionality() const { return rf.Parameters().n_elem; } }; } // namespace distribution } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/core/kernels/000077500000000000000000000000001315013601400174125ustar00rootroot00000000000000mlpack-2.2.5/src/mlpack/core/kernels/CMakeLists.txt000066400000000000000000000015301315013601400221510ustar00rootroot00000000000000# Define the files we need to compile. # Anything not in this list will not be compiled into mlpack. set(SOURCES cosine_distance.hpp cosine_distance_impl.hpp epanechnikov_kernel.hpp epanechnikov_kernel_impl.hpp epanechnikov_kernel.cpp example_kernel.hpp gaussian_kernel.hpp hyperbolic_tangent_kernel.hpp kernel_traits.hpp laplacian_kernel.hpp linear_kernel.hpp polynomial_kernel.hpp pspectrum_string_kernel.hpp pspectrum_string_kernel_impl.hpp pspectrum_string_kernel.cpp spherical_kernel.hpp triangular_kernel.hpp ) # add directory name to sources set(DIR_SRCS) foreach(file ${SOURCES}) set(DIR_SRCS ${DIR_SRCS} ${CMAKE_CURRENT_SOURCE_DIR}/${file}) endforeach() # Append sources (with directory name) to list of all mlpack sources (used at # the parent scope). set(MLPACK_SRCS ${MLPACK_SRCS} ${DIR_SRCS} PARENT_SCOPE) mlpack-2.2.5/src/mlpack/core/kernels/cosine_distance.hpp000066400000000000000000000034011315013601400232530ustar00rootroot00000000000000/** * @file cosine_distance.hpp * @author Ryan Curtin * * This implements the cosine distance (or cosine similarity) between two * vectors, which is a measure of the angle between the two vectors. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_KERNELS_COSINE_DISTANCE_HPP #define MLPACK_CORE_KERNELS_COSINE_DISTANCE_HPP #include #include namespace mlpack { namespace kernel { /** * The cosine distance (or cosine similarity). It is defined by * * @f[ * d(a, b) = \frac{a^T b}{|| a || || b ||} * @f] * * and this class assumes the standard L2 inner product. */ class CosineDistance { public: /** * Computes the cosine distance between two points. * * @param a First vector. * @param b Second vector. * @return d(a, b). */ template static double Evaluate(const VecTypeA& a, const VecTypeB& b); //! Serialize the class (there's nothing to save). template void Serialize(Archive& /* ar */, const unsigned int /* version */) { } }; //! Kernel traits for the cosine distance. template<> class KernelTraits { public: //! The cosine kernel is normalized: K(x, x) = 1 for all x. static const bool IsNormalized = true; //! The cosine kernel doesn't include a squared distance. static const bool UsesSquaredDistance = false; }; } // namespace kernel } // namespace mlpack // Include implementation. #include "cosine_distance_impl.hpp" #endif mlpack-2.2.5/src/mlpack/core/kernels/cosine_distance_impl.hpp000066400000000000000000000023261315013601400243010ustar00rootroot00000000000000/** * @file cosine_distance_impl.hpp * @author Ryan Curtin * * This implements the cosine distance. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_KERNELS_COSINE_DISTANCE_IMPL_HPP #define MLPACK_CORE_KERNELS_COSINE_DISTANCE_IMPL_HPP #include "cosine_distance.hpp" namespace mlpack { namespace kernel { template double CosineDistance::Evaluate(const VecTypeA& a, const VecTypeB& b) { // Since we are using the L2 inner product, this is easy. But we have to make // sure we aren't dividing by zero (if we are, then the cosine similarity is // 0: we reason this value because the cosine distance is just a normalized // dot product; take away the normalization, and if ||a|| or ||b|| is equal to // 0, then a^T b is zero too). const double denominator = norm(a, 2) * norm(b, 2); if (denominator == 0.0) return 0; else return dot(a, b) / denominator; } } // namespace kernel } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/core/kernels/epanechnikov_kernel.cpp000066400000000000000000000041261315013601400241330ustar00rootroot00000000000000/** * @file epanechnikov_kernel.cpp * @author Neil Slagle * * Implementation of non-template Epanechnikov kernels. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include "epanechnikov_kernel.hpp" #include using namespace mlpack; using namespace mlpack::kernel; /** * Compute the normalizer of this Epanechnikov kernel for the given dimension. * * @param dimension Dimension to calculate the normalizer for. */ double EpanechnikovKernel::Normalizer(const size_t dimension) { return 2.0 * pow(bandwidth, (double) dimension) * std::pow(M_PI, dimension / 2.0) / (boost::math::tgamma(dimension / 2.0 + 1.0) * (dimension + 2.0)); } /** * Evaluate the kernel not for two points but for a numerical value. */ double EpanechnikovKernel::Evaluate(const double distance) const { return std::max(0.0, 1 - std::pow(distance, 2.0) * inverseBandwidthSquared); } /** * Evaluate gradient of the kernel not for two points * but for a numerical value. */ double EpanechnikovKernel::Gradient(const double distance) const { if (std::abs(bandwidth) < std::abs(distance)) { return 0; } else if (std::abs(bandwidth) > std::abs(distance)) { return -2 * inverseBandwidthSquared * distance; } else { // The gradient doesn't exist. return arma::datum::nan; } } /** * Evaluate gradient of the kernel not for two points * but for a numerical value. */ double EpanechnikovKernel::GradientForSquaredDistance(const double distanceSquared) const { double bandwidthSquared = bandwidth * bandwidth; if (distanceSquared < bandwidthSquared) { return -1 * inverseBandwidthSquared; } else if (distanceSquared > bandwidthSquared && distanceSquared >= 0) { return 0; } else { // The gradient doesn't exist. return arma::datum::nan; } } mlpack-2.2.5/src/mlpack/core/kernels/epanechnikov_kernel.hpp000066400000000000000000000065711315013601400241460ustar00rootroot00000000000000/** * @file epanechnikov_kernel.hpp * @author Neil Slagle * * Definition of the Epanechnikov kernel. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_KERNELS_EPANECHNIKOV_KERNEL_HPP #define MLPACK_CORE_KERNELS_EPANECHNIKOV_KERNEL_HPP #include #include namespace mlpack { namespace kernel { /** * The Epanechnikov kernel, defined as * * @f[ * K(x, y) = \max \{0, 1 - || x - y ||^2_2 / b^2 \} * @f] * * where @f$ b @f$ is the bandwidth the of the kernel (defaults to 1.0). */ class EpanechnikovKernel { public: /** * Instantiate the Epanechnikov kernel with the given bandwidth (default 1.0). * * @param bandwidth Bandwidth of the kernel. */ EpanechnikovKernel(const double bandwidth = 1.0) : bandwidth(bandwidth), inverseBandwidthSquared(1.0 / (bandwidth * bandwidth)) { } /** * Evaluate the Epanechnikov kernel on the given two inputs. * * @tparam VecTypeA Type of first vector. * @tparam VecTypeB Type of second vector. * @param a One input vector. * @param b The other input vector. */ template double Evaluate(const VecTypeA& a, const VecTypeB& b) const; /** * Evaluate the Epanechnikov kernel given that the distance between the two * input points is known. */ double Evaluate(const double distance) const; /** * Evaluate the Gradient of Epanechnikov kernel * given that the distance between the two * input points is known. */ double Gradient(const double distance) const; /** * Evaluate the Gradient of Epanechnikov kernel * given that the squared distance between the two * input points is known. */ double GradientForSquaredDistance(const double distanceSquared) const; /** * Obtains the convolution integral [integral of K(||x-a||) K(||b-x||) dx] * for the two vectors. * * @tparam VecType Type of vector (arma::vec, arma::spvec should be expected). * @param a First vector. * @param b Second vector. * @return the convolution integral value. */ template double ConvolutionIntegral(const VecTypeA& a, const VecTypeB& b); /** * Compute the normalizer of this Epanechnikov kernel for the given dimension. * * @param dimension Dimension to calculate the normalizer for. */ double Normalizer(const size_t dimension); /** * Serialize the kernel. */ template void Serialize(Archive& ar, const unsigned int version); private: //! Bandwidth of the kernel. double bandwidth; //! Cached value of the inverse bandwidth squared (to speed up computation). double inverseBandwidthSquared; }; //! Kernel traits for the Epanechnikov kernel. template<> class KernelTraits { public: //! The Epanechnikov kernel is normalized: K(x, x) = 1 for all x. static const bool IsNormalized = true; //! The Epanechnikov kernel includes a squared distance. static const bool UsesSquaredDistance = true; }; } // namespace kernel } // namespace mlpack // Include implementation. #include "epanechnikov_kernel_impl.hpp" #endif mlpack-2.2.5/src/mlpack/core/kernels/epanechnikov_kernel_impl.hpp000066400000000000000000000057701315013601400251670ustar00rootroot00000000000000/** * @file epanechnikov_kernel_impl.hpp * @author Neil Slagle * * Implementation of template-based Epanechnikov kernel functions. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_KERNELS_EPANECHNIKOV_KERNEL_IMPL_HPP #define MLPACK_CORE_KERNELS_EPANECHNIKOV_KERNEL_IMPL_HPP // In case it hasn't already been included. #include "epanechnikov_kernel.hpp" #include #include namespace mlpack { namespace kernel { template inline double EpanechnikovKernel::Evaluate(const VecTypeA& a, const VecTypeB& b) const { return std::max(0.0, 1.0 - metric::SquaredEuclideanDistance::Evaluate(a, b) * inverseBandwidthSquared); } /** * Obtains the convolution integral [integral of K(||x-a||) K(||b-x||) dx] * for the two vectors. * * @tparam VecTypeA Type of first vector (arma::vec, arma::sp_vec should be * expected). * @tparam VecTypeB Type of second vector (arma::vec, arma::sp_vec). * @param a First vector. * @param b Second vector. * @return the convolution integral value. */ template double EpanechnikovKernel::ConvolutionIntegral(const VecTypeA& a, const VecTypeB& b) { double distance = sqrt(metric::SquaredEuclideanDistance::Evaluate(a, b)); if (distance >= 2.0 * bandwidth) return 0.0; double volumeSquared = std::pow(Normalizer(a.n_rows), 2.0); switch (a.n_rows) { case 1: return 1.0 / volumeSquared * (16.0 / 15.0 * bandwidth - 4.0 * distance * distance / (3.0 * bandwidth) + 2.0 * distance * distance * distance / (3.0 * bandwidth * bandwidth) - std::pow(distance, 5.0) / (30.0 * std::pow(bandwidth, 4.0))); break; case 2: return 1.0 / volumeSquared * ((2.0 / 3.0 * bandwidth * bandwidth - distance * distance) * asin(sqrt(1.0 - std::pow(distance / (2.0 * bandwidth), 2.0))) + sqrt(4.0 * bandwidth * bandwidth - distance * distance) * (distance / 6.0 + 2.0 / 9.0 * distance * std::pow(distance / bandwidth, 2.0) - distance / 72.0 * std::pow(distance / bandwidth, 4.0))); break; default: Log::Fatal << "EpanechnikovKernel::ConvolutionIntegral(): dimension " << a.n_rows << " not supported."; return -1.0; // This line will not execute. break; } } //! Serialize the kernel. template void EpanechnikovKernel::Serialize(Archive& ar, const unsigned int /* version */) { ar & data::CreateNVP(bandwidth, "bandwidth"); ar & data::CreateNVP(inverseBandwidthSquared, "inverseBandwidthSquared"); } } // namespace kernel } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/core/kernels/example_kernel.hpp000066400000000000000000000127731315013601400231300ustar00rootroot00000000000000/** * @file example_kernel.hpp * @author Ryan Curtin * * This is an example kernel. If you are making your own kernel, follow the * outline specified in this file. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_KERNELS_EXAMPLE_KERNEL_HPP #define MLPACK_CORE_KERNELS_EXAMPLE_KERNEL_HPP #include namespace mlpack { /** * @brief Kernel functions. * * This namespace contains kernel functions, which evaluate some kernel function * @f$ K(x, y) @f$ for some arbitrary vectors @f$ x @f$ and @f$ y @f$ of the * same dimension. The single restriction on the function @f$ K(x, y) @f$ is * that it must satisfy Mercer's condition: * * @f[ * \int \int K(x, y) g(x) g(y) dx dy \ge 0 * @f] * * for all square integrable functions @f$ g(x) @f$. * * The kernels in this namespace all implement the KernelType policy. For more * information, see \ref kernels "The KernelType policy documentation". */ namespace kernel { /** * An example kernel function. This is not a useful kernel, but it implements * the two functions necessary to satisfy the Kernel policy (so that a class can * be used whenever an mlpack method calls for a `typename Kernel` template * parameter. * * All that is necessary is a constructor and an `Evaluate()` function. More * methods could be added; for instance, one useful idea is a constructor which * takes parameters for a kernel (for instance, the width of the Gaussian for a * Gaussian kernel). However, mlpack methods cannot count on these various * constructors existing, which is why most methods allow passing an * already-instantiated kernel object (and by default the method will construct * the kernel with the default constructor). So, for instance, * * @code * GaussianKernel k(5.0); * KernelPCA kpca(dataset, k); * @endcode * * will set up kernel PCA using a Gaussian kernel with a width of 5.0, but * * @code * KernelPCA kpca(dataset); * @endcode * * will create the kernel with the default constructor. It is important (but * not strictly mandatory) that your default constructor still gives a working * kernel. * * @note * Not all kernels require state. For instance, the regular dot product needs * no parameters. In that case, no local variables are necessary and * `Evaluate()` can (and should) be declared static. However, for greater * generalization, mlpack methods expect all kernels to require state and hence * must store instantiated kernel functions; this is why a default constructor * is necessary. * @endnote */ class ExampleKernel { public: /** * The default constructor, which takes no parameters. Because our simple * example kernel has no internal parameters that need to be stored, the * constructor does not need to do anything. For a more complex example, see * the GaussianKernel, which stores an internal parameter. */ ExampleKernel() { } /** * Evaluates the kernel function for two given vectors. In this case, because * our simple example kernel has no internal parameters, we can declare the * function static. For a more complex example which cannot be declared * static, see the GaussianKernel, which stores an internal parameter. * * @tparam VecTypeA Type of first vector (arma::vec, arma::sp_vec should be * expected). * @tparam VecTypeB Type of second vector (arma::vec, arma::sp_vec). * @param a First vector. * @param b Second vector. * @return K(a, b). */ template static double Evaluate(const VecTypeA& /* a */, const VecTypeB& /* b */) { return 0; } /** * Serializes the kernel. In this case, the kernel has no members, so we do * not need to do anything at all. */ template void Serialize(Archive& /* ar */, const unsigned int /* version */) { } /** * Obtains the convolution integral [integral K(||x-a||)K(||b-x||)dx] * for the two vectors. In this case, because * our simple example kernel has no internal parameters, we can declare the * function static. For a more complex example which cannot be declared * static, see the GaussianKernel, which stores an internal parameter. * * @tparam VecTypeA Type of first vector (arma::vec, arma::sp_vec should be * expected). * @tparam VecTypeB Type of second vector (arma::vec, arma::sp_vec). * @param a First vector. * @param b Second vector. * @return the convolution integral value. */ template static double ConvolutionIntegral(const VecTypeA& /* a */, const VecTypeB& /* b */) { return 0; } /** * Obtains the normalizing volume for the kernel with dimension $dimension$. * In this case, because our simple example kernel has no internal parameters, * we can declare the function static. For a more complex example which * cannot be declared static, see the GaussianKernel, which stores an internal * parameter. * * @param dimension the dimension of the space. * @return the normalization constant. */ static double Normalizer() { return 0; } // Modified to remove unused variable "dimension" //static double Normalizer(size_t dimension=1) { return 0; } }; } // namespace kernel } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/core/kernels/gaussian_kernel.hpp000066400000000000000000000120131315013601400232720ustar00rootroot00000000000000/** * @file gaussian_kernel.hpp * @author Wei Guan * @author James Cline * @author Ryan Curtin * * Implementation of the Gaussian kernel (GaussianKernel). * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_KERNELS_GAUSSIAN_KERNEL_HPP #define MLPACK_CORE_KERNELS_GAUSSIAN_KERNEL_HPP #include #include #include namespace mlpack { namespace kernel { /** * The standard Gaussian kernel. Given two vectors @f$ x @f$, @f$ y @f$, and a * bandwidth @f$ \mu @f$ (set in the constructor), * * @f[ * K(x, y) = \exp(-\frac{|| x - y ||^2}{2 \mu^2}). * @f] * * The implementation is all in the header file because it is so simple. */ class GaussianKernel { public: /** * Default constructor; sets bandwidth to 1.0. */ GaussianKernel() : bandwidth(1.0), gamma(-0.5) { } /** * Construct the Gaussian kernel with a custom bandwidth. * * @param bandwidth The bandwidth of the kernel (@f$\mu@f$). */ GaussianKernel(const double bandwidth) : bandwidth(bandwidth), gamma(-0.5 * pow(bandwidth, -2.0)) { } /** * Evaluation of the Gaussian kernel. This could be generalized to use any * distance metric, not the Euclidean distance, but for now, the Euclidean * distance is used. * * @tparam VecType Type of vector (likely arma::vec or arma::spvec). * @param a First vector. * @param b Second vector. * @return K(a, b) using the bandwidth (@f$\mu@f$) specified in the * constructor. */ template double Evaluate(const VecTypeA& a, const VecTypeB& b) const { // The precalculation of gamma saves us a little computation time. return exp(gamma * metric::SquaredEuclideanDistance::Evaluate(a, b)); } /** * Evaluation of the Gaussian kernel given the distance between two points. * * @param t The distance between the two points the kernel is evaluated on. * @return K(t) using the bandwidth (@f$\mu@f$) specified in the * constructor. */ double Evaluate(const double t) const { // The precalculation of gamma saves us a little computation time. return exp(gamma * std::pow(t, 2.0)); } /** * Evaluation of the gradient of Gaussian kernel * given the distance between two points. * * @param t The distance between the two points the kernel is evaluated on. * @return K(t) using the bandwidth (@f$\mu@f$) specified in the * constructor. */ double Gradient(const double t) const { return 2 * t * gamma * exp(gamma * std::pow(t, 2.0)); } /** * Evaluation of the gradient of Gaussian kernel * given the squared distance between two points. * * @param t The squared distance between the two points * @return K(t) using the bandwidth (@f$\mu@f$) specified in the * constructor. */ double GradientForSquaredDistance(const double t) const { return gamma * exp(gamma * t); } /** * Obtain the normalization constant of the Gaussian kernel. * * @param dimension * @return the normalization constant */ double Normalizer(const size_t dimension) { return pow(sqrt(2.0 * M_PI) * bandwidth, (double) dimension); } /** * Obtain a convolution integral of the Gaussian kernel. * * @param a First vector. * @param b Second vector. * @return The convolution integral. */ template double ConvolutionIntegral(const VecTypeA& a, const VecTypeB& b) { return Evaluate(sqrt(metric::SquaredEuclideanDistance::Evaluate(a, b) / 2.0)) / (Normalizer(a.n_rows) * pow(2.0, (double) a.n_rows / 2.0)); } //! Get the bandwidth. double Bandwidth() const { return bandwidth; } //! Modify the bandwidth. This takes an argument because we must update the //! precalculated constant (gamma). void Bandwidth(const double bandwidth) { this->bandwidth = bandwidth; this->gamma = -0.5 * pow(bandwidth, -2.0); } //! Get the precalculated constant. double Gamma() const { return gamma; } //! Serialize the kernel. template void Serialize(Archive& ar, const unsigned int /* version */) { ar & data::CreateNVP(bandwidth, "bandwidth"); ar & data::CreateNVP(gamma, "gamma"); } private: //! Kernel bandwidth. double bandwidth; //! Precalculated constant depending on the bandwidth; //! @f$ \gamma = -\frac{1}{2 \mu^2} @f$. double gamma; }; //! Kernel traits for the Gaussian kernel. template<> class KernelTraits { public: //! The Gaussian kernel is normalized: K(x, x) = 1 for all x. static const bool IsNormalized = true; //! The Gaussian kernel includes a squared distance. static const bool UsesSquaredDistance = true; }; } // namespace kernel } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/core/kernels/hyperbolic_tangent_kernel.hpp000066400000000000000000000045331315013601400253500ustar00rootroot00000000000000/** * @file hyperbolic_tangent_kernel.hpp * @author Ajinkya Kale * * Implementation of the hyperbolic tangent kernel. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_KERNELS_HYPERBOLIC_TANGENT_KERNEL_HPP #define MLPACK_CORE_KERNELS_HYPERBOLIC_TANGENT_KERNEL_HPP #include namespace mlpack { namespace kernel { /** * Hyperbolic tangent kernel. For any two vectors @f$ x @f$, @f$ y @f$ and a * given scale @f$ s @f$ and offset @f$ t @f$ * * @f[ * K(x, y) = \tanh(s + t) * @f] */ class HyperbolicTangentKernel { public: /** * This constructor sets the default scale to 1.0 and offset to 0.0. */ HyperbolicTangentKernel() : scale(1.0), offset(0.0) { } /** * Construct the hyperbolic tangent kernel with custom scale factor and * offset. * * @param scale Scaling factor for . * @param offset Kernel offset. */ HyperbolicTangentKernel(double scale, double offset) : scale(scale), offset(offset) { } /** * Evaluate the hyperbolic tangent kernel. This evaluation uses Armadillo's * dot() function. * * @tparam VecTypeA Type of first vector (should be arma::vec or * arma::sp_vec). * @tparam VecTypeB Type of second vector (arma::vec / arma::sp_vec). * @param a First vector. * @param b Second vector. * @return K(a, b). */ template double Evaluate(const VecTypeA& a, const VecTypeB& b) { return tanh(scale * arma::dot(a, b) + offset); } //! Get scale factor. double Scale() const { return scale; } //! Modify scale factor. double& Scale() { return scale; } //! Get offset for the kernel. double Offset() const { return offset; } //! Modify offset for the kernel. double& Offset() { return offset; } //! Serialize the kernel. template void Serialize(Archive& ar, const unsigned int /* version */) { ar & data::CreateNVP(scale, "scale"); ar & data::CreateNVP(offset, "offset"); } private: double scale; double offset; }; } // namespace kernel } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/core/kernels/kernel_traits.hpp000066400000000000000000000024541315013601400227760ustar00rootroot00000000000000/** * @file kernel_traits.hpp * @author Ryan Curtin * * This provides the KernelTraits class, a template class to get information * about various kernels. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_KERNELS_KERNEL_TRAITS_HPP #define MLPACK_CORE_KERNELS_KERNEL_TRAITS_HPP namespace mlpack { namespace kernel { /** * This is a template class that can provide information about various kernels. * By default, this class will provide the weakest possible assumptions on * kernels, and each kernel should override values as necessary. If a kernel * doesn't need to override a value, then there's no need to write a * KernelTraits specialization for that class. */ template class KernelTraits { public: /** * If true, then the kernel is normalized: K(x, x) = K(y, y) = 1 for all x. */ static const bool IsNormalized = false; /** * If true, then the kernel include a squared distance, ||x - y||^2 . */ static const bool UsesSquaredDistance = false; }; } // namespace kernel } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/core/kernels/laplacian_kernel.hpp000066400000000000000000000067751315013601400234260ustar00rootroot00000000000000/** * @file laplacian_kernel.hpp * @author Ajinkya Kale * * Implementation of the Laplacian kernel (LaplacianKernel). * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_KERNELS_LAPLACIAN_KERNEL_HPP #define MLPACK_CORE_KERNELS_LAPLACIAN_KERNEL_HPP #include namespace mlpack { namespace kernel { /** * The standard Laplacian kernel. Given two vectors @f$ x @f$, @f$ y @f$, and a * bandwidth @f$ \mu @f$ (set in the constructor), * * @f[ * K(x, y) = \exp(-\frac{|| x - y ||}{\mu}). * @f] * * The implementation is all in the header file because it is so simple. */ class LaplacianKernel { public: /** * Default constructor; sets bandwidth to 1.0. */ LaplacianKernel() : bandwidth(1.0) { } /** * Construct the Laplacian kernel with a custom bandwidth. * * @param bandwidth The bandwidth of the kernel (@f$\mu@f$). */ LaplacianKernel(double bandwidth) : bandwidth(bandwidth) { } /** * Evaluation of the Laplacian kernel. This could be generalized to use any * distance metric, not the Euclidean distance, but for now, the Euclidean * distance is used. * * @tparam VecTypeA Type of first vector (likely arma::vec or arma::sp_vec). * @tparam VecTypeB Type of second vector (arma::vec / arma::sp_vec). * @param a First vector. * @param b Second vector. * @return K(a, b) using the bandwidth (@f$\mu@f$) specified in the * constructor. */ template double Evaluate(const VecTypeA& a, const VecTypeB& b) const { // The precalculation of gamma saves us a little computation time. return exp(-metric::EuclideanDistance::Evaluate(a, b) / bandwidth); } /** * Evaluation of the Laplacian kernel given the distance between two points. * * @param t The distance between the two points the kernel should be evaluated * on. * @return K(t) using the bandwidth (@f$\mu@f$) specified in the * constructor. */ double Evaluate(const double t) const { // The precalculation of gamma saves us a little computation time. return exp(-t / bandwidth); } /** * Evaluation of the gradient of the Laplacian kernel * given the distance between two points. * * @param t The distance between the two points the kernel should be evaluated * on. * @return K(t) using the bandwidth (@f$\mu@f$) specified in the * constructor. */ double Gradient(const double t) const { return exp(-t / bandwidth) / -bandwidth; } //! Get the bandwidth. double Bandwidth() const { return bandwidth; } //! Modify the bandwidth. double& Bandwidth() { return bandwidth; } //! Serialize the kernel. template void Serialize(Archive& ar, const unsigned int /* version */) { ar & data::CreateNVP(bandwidth, "bandwidth"); } private: //! Kernel bandwidth. double bandwidth; }; //! Kernel traits of the Laplacian kernel. template<> class KernelTraits { public: //! The Laplacian kernel is normalized: K(x, x) = 1 for all x. static const bool IsNormalized = true; //! The Laplacian kernel doesn't include a squared distance. static const bool UsesSquaredDistance = false; }; } // namespace kernel } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/core/kernels/linear_kernel.hpp000066400000000000000000000032731315013601400227420ustar00rootroot00000000000000/** * @file linear_kernel.hpp * @author Wei Guan * @author James Cline * @author Ryan Curtin * * Implementation of the linear kernel (just the standard dot product). * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_KERNELS_LINEAR_KERNEL_HPP #define MLPACK_CORE_KERNELS_LINEAR_KERNEL_HPP #include namespace mlpack { namespace kernel { /** * The simple linear kernel (dot product). For any two vectors @f$ x @f$ and * @f$ y @f$, * * @f[ * K(x, y) = x^T y * @f] * * This kernel has no parameters and therefore the evaluation can be static. */ class LinearKernel { public: /** * This constructor does nothing; the linear kernel has no parameters to * store. */ LinearKernel() { } /** * Simple evaluation of the dot product. This evaluation uses Armadillo's * dot() function. * * @tparam VecTypeA Type of first vector (should be arma::vec or * arma::sp_vec). * @tparam VecTypeB Type of second vector (arma::vec / arma::sp_vec). * @param a First vector. * @param b Second vector. * @return K(a, b). */ template static double Evaluate(const VecTypeA& a, const VecTypeB& b) { return arma::dot(a, b); } //! Serialize the kernel (it has no members... do nothing). template void Serialize(Archive& /* ar */, const unsigned int /* version */) { } }; } // namespace kernel } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/core/kernels/polynomial_kernel.hpp000066400000000000000000000050131315013601400236450ustar00rootroot00000000000000/** * @file polynomial_kernel.hpp * @author Ajinkya Kale * * Implementation of the polynomial kernel (just the standard dot product). * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_KERNELS_POLYNOMIAL_KERNEL_HPP #define MLPACK_CORE_KERNELS_POLYNOMIAL_KERNEL_HPP #include namespace mlpack { namespace kernel { /** * The simple polynomial kernel. For any two vectors @f$ x @f$, @f$ y @f$, * @f$ degree @f$ and @f$ offset @f$, * * @f[ * K(x, y) = (x^T * y + offset) ^ {degree}. * @f] */ class PolynomialKernel { public: /** * Construct the Polynomial Kernel with the given offset and degree. If the * arguments are omitted, the default degree is 2 and the default offset is 0. * * @param offset Offset of the dot product of the arguments. * @param degree Degree of the polynomial. */ PolynomialKernel(const double degree = 2.0, const double offset = 0.0) : degree(degree), offset(offset) { } /** * Simple evaluation of the dot product. This evaluation uses Armadillo's * dot() function. * * @tparam VecTypeA Type of first vector (should be arma::vec or * arma::sp_vec). * @tparam VecTypeB Type of second vector (arma::vec / arma::sp_vec). * @param a First vector. * @param b Second vector. * @return K(a, b). */ template double Evaluate(const VecTypeA& a, const VecTypeB& b) const { return pow((arma::dot(a, b) + offset), degree); } //! Get the degree of the polynomial. const double& Degree() const { return degree; } //! Modify the degree of the polynomial. double& Degree() { return degree; } //! Get the offset of the dot product of the arguments. const double& Offset() const { return offset; } //! Modify the offset of the dot product of the arguments. double& Offset() { return offset; } //! Serialize the kernel. template void Serialize(Archive& ar, const unsigned int /* version */) { ar & data::CreateNVP(degree, "degree"); ar & data::CreateNVP(offset, "offset"); } private: //! The degree of the polynomial. double degree; //! The offset of the dot product of the arguments. double offset; }; } // namespace kernel } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/core/kernels/pspectrum_string_kernel.cpp000066400000000000000000000053201315013601400250660ustar00rootroot00000000000000/** * @file pspectrum_string_kernel.cpp * @author Ryan Curtin * * Implementation of the p-spectrum string kernel, created for use with FastMKS. * Instead of passing a data matrix to FastMKS which stores the kernels, pass a * one-dimensional data matrix (data vector) to FastMKS which stores indices of * strings; then, the actual strings are given to the PSpectrumStringKernel at * construction time, and the kernel knows to map the indices to actual strings. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include "pspectrum_string_kernel.hpp" using namespace std; using namespace mlpack; using namespace mlpack::kernel; /** * Initialize the PSpectrumStringKernel with the given string datasets. For * more information on this, see the general class documentation. * * @param datasets Sets of string data. @param p The length of substrings to * search. */ mlpack::kernel::PSpectrumStringKernel::PSpectrumStringKernel( const std::vector >& datasets, const size_t p) : datasets(datasets), p(p) { // We have to assemble the counts of substrings. This is not a particularly // fast operation, unfortunately, but it only needs to be done once. Log::Info << "Assembling counts of substrings of length " << p << "." << std::endl; // Resize for number of datasets. counts.resize(datasets.size()); for (size_t dataset = 0; dataset < datasets.size(); ++dataset) { const std::vector& set = datasets[dataset]; // Resize for number of strings in dataset. counts[dataset].resize(set.size()); // Inspect each string in the dataset. for (size_t index = 0; index < set.size(); ++index) { // Convenience references. const std::string& str = set[index]; std::map& mapping = counts[dataset][index]; size_t start = 0; while ((start + p) <= str.length()) { string sub = str.substr(start, p); // Convert all characters to lowercase. bool invalid = false; for (size_t j = 0; j < p; ++j) { if (!isalnum(sub[j])) { invalid = true; break; // Only consider substrings with alphanumerics. } sub[j] = tolower(sub[j]); } // Increment position in string. ++start; if (!invalid) { // Add to the map. ++mapping[sub]; } } } } Log::Info << "Substring extraction complete." << std::endl; } mlpack-2.2.5/src/mlpack/core/kernels/pspectrum_string_kernel.hpp000066400000000000000000000115651315013601400251030ustar00rootroot00000000000000/** * @file pspectrum_string_kernel.hpp * @author Ryan Curtin * * Implementation of the p-spectrum string kernel, created for use with FastMKS. * Instead of passing a data matrix to FastMKS which stores the kernels, pass a * one-dimensional data matrix (data vector) to FastMKS which stores indices of * strings; then, the actual strings are given to the PSpectrumStringKernel at * construction time, and the kernel knows to map the indices to actual strings. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_KERNELS_PSPECTRUM_STRING_KERNEL_HPP #define MLPACK_CORE_KERNELS_PSPECTRUM_STRING_KERNEL_HPP #include #include #include #include #include namespace mlpack { namespace kernel { /** * The p-spectrum string kernel. Given a length p, the p-spectrum kernel finds * the contiguous subsequence match count between two strings. The kernel will * take every possible substring of length p of one string and count how many * times it appears in the other string. * * The string kernel, when created, must be passed a reference to a series of * string datasets (std::vector >&). This is because * mlpack only supports datasets which are Armadillo matrices -- and a dataset * of variable-length strings cannot be easily cast into an Armadillo matrix. * * Therefore, once the PSpectrumStringKernel is created with a reference to the * string datasets, a "fake" Armadillo data matrix must be created, which simply * holds indices to the strings they represent. This "fake" matrix has two rows * and n columns (where n is the number of strings in the dataset). The first * row holds the index of the dataset (remember, the kernel can have multiple * datasets), and the second row holds the index of the string. A fake matrix * containing only strings from dataset 0 might look like this: * * [[0 0 0 0 0 0 0 0 0] * [0 1 2 3 4 5 6 7 8]] * * This fake matrix is then given to the machine learning method, which will * eventually call PSpectrumStringKernel::Evaluate(a, b), where a and b are two * columns of the fake matrix. The string kernel will then map these fake * columns back to the strings they represent, and then correctly evaluate the * kernel. * * Unfortunately, not every machine learning method will work with this kernel. * Only machine learning methods which do not ever operate on the explicit * representation of points can use this kernel. So, for instance, one cannot * build a kd-tree on strings, because the BinarySpaceTree<> class will split * the data according to the fake data matrix -- resulting in a meaningless * tree. This kernel was originally written for the FastMKS method; so, at the * very least, it will work with that. */ class PSpectrumStringKernel { public: /** * Initialize the PSpectrumStringKernel with the given string datasets. For * more information on this, see the general class documentation. * * @param datasets Sets of string data. * @param p The length of substrings to search. */ PSpectrumStringKernel(const std::vector >& datasets, const size_t p); /** * Evaluate the kernel for the string indices given. As mentioned in the * class documentation, a and b should be 2-element vectors, where the first * element contains the index of the dataset and the second element contains * the index of the string. Therefore, if [2 3] is passed for a, the string * used will be datasets[2][3] (datasets is of type * std::vector >&). * * @param a Index of string and dataset for first string. * @param b Index of string and dataset for second string. */ template double Evaluate(const VecType& a, const VecType& b) const; //! Access the lists of substrings. const std::vector > >& Counts() const { return counts; } //! Modify the lists of substrings. std::vector > >& Counts() { return counts; } //! Access the value of p. size_t P() const { return p; } //! Modify the value of p. size_t& P() { return p; } private: //! The datasets. const std::vector >& datasets; //! Mappings of the datasets to counts of substrings. Such a huge structure //! is not wonderful... std::vector > > counts; //! The value of p to use in calculation. size_t p; }; } // namespace kernel } // namespace mlpack // Include implementation of templated Evaluate(). #include "pspectrum_string_kernel_impl.hpp" #endif mlpack-2.2.5/src/mlpack/core/kernels/pspectrum_string_kernel_impl.hpp000066400000000000000000000053421315013601400261200ustar00rootroot00000000000000/** * @file pspectrum_string_kernel_impl.hpp * @author Ryan Curtin * * Implementation of the p-spectrum string kernel, created for use with FastMKS. * Instead of passing a data matrix to FastMKS which stores the kernels, pass a * one-dimensional data matrix (data vector) to FastMKS which stores indices of * strings; then, the actual strings are given to the PSpectrumStringKernel at * construction time, and the kernel knows to map the indices to actual strings. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_KERNELS_PSPECTRUM_STRING_KERNEL_IMPL_HPP #define MLPACK_CORE_KERNELS_PSPECTRUM_STRING_KERNEL_IMPL_HPP // In case it has not been included yet. #include "pspectrum_string_kernel.hpp" namespace mlpack { namespace kernel { /** * Evaluate the kernel for the string indices given. As mentioned in the class * documentation, a and b should be 2-element vectors, where the first element * contains the index of the dataset and the second element contains the index * of the string. Therefore, if [2 3] is passed for a, the string used will be * datasets[2][3] (datasets is of type std::vector >&). * * @param a Index of string and dataset for first string. * @param b Index of string and dataset for second string. */ template double PSpectrumStringKernel::Evaluate(const VecType& a, const VecType& b) const { // Get the map of substrings for the two strings we are interested in. const std::map& aMap = counts[a[0]][a[1]]; const std::map& bMap = counts[b[0]][b[1]]; double eval = 0; // Loop through the two maps (which, when iterated through, are sorted // alphabetically). std::map::const_iterator aIt = aMap.begin(); std::map::const_iterator bIt = bMap.begin(); while ((aIt != aMap.end()) && (bIt != bMap.end())) { // Compare alphabetically (this is how std::map is ordered). int result = (*aIt).first.compare((*bIt).first); if (result == 0) // The same substring. { eval += ((*aIt).second * (*bIt).second); // Now increment both. ++aIt; ++bIt; } else if (result > 0) { // aIt is "ahead" of bIt (alphabetically); so increment bIt to "catch up". ++bIt; } else { // bIt is "ahead" of aIt (alphabetically); so increment aIt to "catch up". ++aIt; } } return eval; } } // namespace kernel } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/core/kernels/spherical_kernel.hpp000066400000000000000000000074621315013601400234460ustar00rootroot00000000000000/** * @file spherical_kernel.hpp * @author Neil Slagle * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_KERNELS_SPHERICAL_KERNEL_HPP #define MLPACK_CORE_KERNELS_SPHERICAL_KERNEL_HPP #include #include namespace mlpack { namespace kernel { /** * The spherical kernel, which is 1 when the distance between the two argument * points is less than or equal to the bandwidth, or 0 otherwise. */ class SphericalKernel { public: /** * Construct the SphericalKernel with the given bandwidth. */ SphericalKernel(const double bandwidth = 1.0) : bandwidth(bandwidth), bandwidthSquared(std::pow(bandwidth, 2.0)) { /* Nothing to do. */ } /** * Evaluate the spherical kernel with the given two vectors. * * @tparam VecTypeA Type of first vector. * @tparam VecTypeB Type of second vector. * @param a First vector. * @param b Second vector. * @return The kernel evaluation between the two vectors. */ template double Evaluate(const VecTypeA& a, const VecTypeB& b) const { return (metric::SquaredEuclideanDistance::Evaluate(a, b) <= bandwidthSquared) ? 1.0 : 0.0; } /** * Obtains the convolution integral [integral K(||x-a||)K(||b-x||)dx] * for the two vectors. * * @tparam VecTypeA Type of first vector (arma::vec, arma::sp_vec should be * expected). * @tparam VecTypeB Type of second vector. * @param a First vector. * @param b Second vector. * @return the convolution integral value. */ template double ConvolutionIntegral(const VecTypeA& a, const VecTypeB& b) const { double distance = sqrt(metric::SquaredEuclideanDistance::Evaluate(a, b)); if (distance >= 2.0 * bandwidth) { return 0.0; } double volumeSquared = pow(Normalizer(a.n_rows), 2.0); switch(a.n_rows) { case 1: return 1.0 / volumeSquared * (2.0 * bandwidth - distance); break; case 2: return 1.0 / volumeSquared * (2.0 * bandwidth * bandwidth * acos(distance/(2.0 * bandwidth)) - distance / 4.0 * sqrt(4.0*bandwidth*bandwidth-distance*distance)); break; default: Log::Fatal << "The spherical kernel does not support convolution\ integrals above dimension two, yet..." << std::endl; return -1.0; break; } } double Normalizer(size_t dimension) const { return pow(bandwidth, (double) dimension) * pow(M_PI, dimension / 2.0) / boost::math::tgamma(dimension / 2.0 + 1.0); } /** * Evaluate the kernel when only a distance is given, not two points. * * @param t Argument to kernel. */ double Evaluate(const double t) const { return (t <= bandwidth) ? 1.0 : 0.0; } double Gradient(double t) { return t == bandwidth ? arma::datum::nan : 0.0; } //! Serialize the object. template void Serialize(Archive& ar, const unsigned int /* version */) { ar & data::CreateNVP(bandwidth, "bandwidth"); ar & data::CreateNVP(bandwidthSquared, "bandwidthSquared"); } private: double bandwidth; double bandwidthSquared; }; //! Kernel traits for the spherical kernel. template<> class KernelTraits { public: //! The spherical kernel is normalized: K(x, x) = 1 for all x. static const bool IsNormalized = true; //! The spherical kernel doesn't include a squared distance. static const bool UsesSquaredDistance = false; }; } // namespace kernel } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/core/kernels/triangular_kernel.hpp000066400000000000000000000057141315013601400236420ustar00rootroot00000000000000/** * @file triangular_kernel.hpp * @author Ryan Curtin * * Definition and implementation of the trivially simple triangular kernel. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_KERNELS_TRIANGULAR_KERNEL_HPP #define MLPACK_CORE_KERNELS_TRIANGULAR_KERNEL_HPP #include #include namespace mlpack { namespace kernel { /** * The trivially simple triangular kernel, defined by * * @f[ * K(x, y) = \max \{ 0, 1 - \frac{|| x - y ||_2}{b} \} * @f] * * where \f$ b \f$ is the bandwidth of the kernel. */ class TriangularKernel { public: /** * Initialize the triangular kernel with the given bandwidth (default 1.0). * * @param bandwidth Bandwidth of the triangular kernel. */ TriangularKernel(const double bandwidth = 1.0) : bandwidth(bandwidth) { } /** * Evaluate the triangular kernel for the two given vectors. * * @tparam VecTypeA Type of first vector. * @tparam VecTypeB Type of second vector. * @param a First vector. * @param b Second vector. */ template double Evaluate(const VecTypeA& a, const VecTypeB& b) const { return std::max(0.0, (1 - metric::EuclideanDistance::Evaluate(a, b) / bandwidth)); } /** * Evaluate the triangular kernel given that the distance between the two * points is known. * * @param distance The distance between the two points. */ double Evaluate(const double distance) const { return std::max(0.0, (1 - distance) / bandwidth); } /** * Evaluate the gradient of triangular kernel * given that the distance between the two * points is known. * * @param distance The distance between the two points. */ double Gradient(const double distance) const { if (distance < 1) { return -1.0 / bandwidth; } else if (distance > 1) { return 0; } else { return arma::datum::nan; } } //! Get the bandwidth of the kernel. double Bandwidth() const { return bandwidth; } //! Modify the bandwidth of the kernel. double& Bandwidth() { return bandwidth; } //! Serialize the kernel. template void Serialize(Archive& ar, const unsigned int /* version */) { ar & data::CreateNVP(bandwidth, "bandwidth"); } private: //! The bandwidth of the kernel. double bandwidth; }; //! Kernel traits for the triangular kernel. template<> class KernelTraits { public: //! The triangular kernel is normalized: K(x, x) = 1 for all x. static const bool IsNormalized = true; //! The triangular kernel doesn't include a squared distance. static const bool UsesSquaredDistance = false; }; } // namespace kernel } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/core/math/000077500000000000000000000000001315013601400167005ustar00rootroot00000000000000mlpack-2.2.5/src/mlpack/core/math/CMakeLists.txt000066400000000000000000000011671315013601400214450ustar00rootroot00000000000000# Define the files we need to compile # Anything not in this list will not be compiled into mlpack. set(SOURCES clamp.hpp columns_to_blocks.hpp columns_to_blocks.cpp lin_alg.hpp lin_alg_impl.hpp lin_alg.cpp random.hpp random.cpp random_basis.hpp random_basis.cpp range.hpp range_impl.hpp round.hpp ) # add directory name to sources set(DIR_SRCS) foreach(file ${SOURCES}) set(DIR_SRCS ${DIR_SRCS} ${CMAKE_CURRENT_SOURCE_DIR}/${file}) endforeach() # Append sources (with directory name) to list of all mlpack sources (used at # the parent scope). set(MLPACK_SRCS ${MLPACK_SRCS} ${DIR_SRCS} PARENT_SCOPE) mlpack-2.2.5/src/mlpack/core/math/clamp.hpp000066400000000000000000000033141315013601400205060ustar00rootroot00000000000000/** * @file clamp.hpp * * Miscellaneous math clamping routines. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_MATH_CLAMP_HPP #define MLPACK_CORE_MATH_CLAMP_HPP #include #include #include namespace mlpack { namespace math /** Miscellaneous math routines. */ { /** * Forces a number to be non-negative, turning negative numbers into zero. * Avoids branching costs (this is a measurable improvement). * * @param d Double to clamp. * @return 0 if d < 0, d otherwise. */ inline double ClampNonNegative(const double d) { return (d + fabs(d)) / 2; } /** * Forces a number to be non-positive, turning positive numbers into zero. * Avoids branching costs (this is a measurable improvement). * * @param d Double to clamp. * @param 0 if d > 0, d otherwise. */ inline double ClampNonPositive(const double d) { return (d - fabs(d)) / 2; } /** * Clamp a number between a particular range. * * @param value The number to clamp. * @param rangeMin The first of the range. * @param rangeMax The last of the range. * @return max(rangeMin, min(rangeMax, d)). */ inline double ClampRange(double value, const double rangeMin, const double rangeMax) { value -= rangeMax; value = ClampNonPositive(value) + rangeMax; value -= rangeMin; value = ClampNonNegative(value) + rangeMin; return value; } } // namespace math } // namespace mlpack #endif // MLPACK_CORE_MATH_CLAMP_HPP mlpack-2.2.5/src/mlpack/core/math/columns_to_blocks.cpp000066400000000000000000000053561315013601400231340ustar00rootroot00000000000000/** * @file columns_to_blocks.cpp * @author Tham Ngap Wei * * Implementation of the ColumnsToBlocks class. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include "columns_to_blocks.hpp" namespace mlpack { namespace math { ColumnsToBlocks::ColumnsToBlocks(const size_t rows, const size_t cols, const size_t blockHeight, const size_t blockWidth) : blockHeight(blockHeight), blockWidth(blockWidth), bufSize(1), bufValue(-1), minRange(0), maxRange(255), scale(false), rows(rows), cols(cols) { } bool ColumnsToBlocks::IsPerfectSquare(const size_t value) const { const size_t root = (size_t) std::round(std::sqrt(value)); return (value == root * root); } void ColumnsToBlocks::Transform(const arma::mat& maximalInputs, arma::mat& output) { if (!IsPerfectSquare(maximalInputs.n_rows)) { throw std::runtime_error("maximalInputs.n_rows should be perfect square"); } if (blockHeight == 0 || blockWidth == 0) { size_t const squareRows = static_cast(std::sqrt(maximalInputs.n_rows)); blockHeight = squareRows; blockWidth = squareRows; } if (blockHeight * blockWidth != maximalInputs.n_rows) { throw std::runtime_error("blockHeight * blockWidth should " "equal to maximalInputs.n_rows"); } const size_t rowOffset = blockHeight+bufSize; const size_t colOffset = blockWidth+bufSize; output.ones(bufSize + rows * rowOffset, bufSize + cols * colOffset); output *= bufValue; size_t k = 0; const size_t maxSize = std::min(rows * cols, (size_t) maximalInputs.n_cols); for (size_t i = 0; i != rows; ++i) { for (size_t j = 0; j != cols; ++j) { // Now, copy the elements of the row to the output submatrix. const size_t minRow = bufSize + i * rowOffset; const size_t minCol = bufSize + j * colOffset; const size_t maxRow = i * rowOffset + blockHeight; const size_t maxCol = j * colOffset + blockWidth; output.submat(minRow, minCol, maxRow, maxCol) = arma::reshape(maximalInputs.col(k++), blockHeight, blockWidth); if (k >= maxSize) break; } } if (scale) { const double max = output.max(); const double min = output.min(); if ((max - min) != 0) { output = (output - min) / (max - min) * (maxRange - minRange) + minRange; } } } } // namespace math } // namespace mlpack mlpack-2.2.5/src/mlpack/core/math/columns_to_blocks.hpp000066400000000000000000000205571315013601400231410ustar00rootroot00000000000000/** * @file columns_to_blocks.hpp * @author Tham Ngap Wei * * A helper class that could be useful for visualizing the output of * MaximalInputs() and possibly other things. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_NN_COLUMNS_TO_BLOCKS_HPP #define MLPACK_METHODS_NN_COLUMNS_TO_BLOCKS_HPP #include namespace mlpack { namespace math { /** * Transform the columns of the given matrix into a block format. This could be * useful with the mlpack::nn::MaximalInputs() function, if your training * samples are images. Roughly speaking, given a matrix * * [[A] * [B] * [C] * [D]] * * then the ColumnsToBlocks class can transform this to something like * * [[m m m m m] * [m A m B m] * [m m m m m] * [m C m D m] * [m m m m m]] * * where A through D are vectors and may themselves be reshaped by * ColumnsToBlocks. * * An example usage of the ColumnsToBlocks class with the output of * MaximalInputs() is given below; this assumes that the images are square, and * will return a matrix with a one-element margin, with each maximal input (that * is, each column of the maximalInput matrix) as a square block in the output * matrix. 5 rows and columns of blocks will be in the output matrix. * * @code * // We assume we have a sparse autoencoder 'encoder'. * arma::mat maximalInput; // Store the features learned by sparse autoencoder * mlpack::nn::MaximalInputs(encoder.Parameters(), maximalInput); * * arma::mat outputs; * const bool scale = true; * * ColumnsToBlocks ctb(5, 5); * arma::mat output; * ctb.Transform(maximalInput, output); * // You can save the output as a pgm, this may help you visualize the training * // results. * output.save(fileName, arma::pgm_binary); * @endcode * * Another example of usage is given below, on a sample matrix. * * @code * // This matrix has two columns. * arma::mat input; * input << -1.0000 << 0.1429 << arma::endr * << -0.7143 << 0.4286 << arma::endr * << -0.4286 << 0.7143 << arma::endr * << -0.1429 << 1.0000 << arma::endr; * * arma::mat output; * ColumnsToBlocks ctb(1, 2); * ctb.Transform(input, output); * * // The columns of the input will be reshaped as a square which is * // surrounded by padding value -1 (this value could be changed with the * // BufValue() method): * // -1.0000 -1.0000 -1.0000 -1.0000 -1.0000 -1.0000 -1.0000 * // -1.0000 -1.0000 -0.4286 -1.0000 0.1429 0.7143 -1.0000 * // -1.0000 -0.7143 -0.1429 -1.0000 0.4286 1.0000 -1.0000 * // -1.0000 -1.0000 -1.0000 -1.0000 -1.0000 -1.0000 -1.0000 * * // Now, let's change some parameters; let's have each input column output not * // as a square, but as a 4x1 vector. * ctb.BlockWidth(1); * ctb.BlockHeight(4); * ctb.Transform(input, output); * * // The output here will be similar, but each maximal input is 4x1: * // -1.0000 -1.0000 -1.0000 -1.0000 -1.0000 * // -1.0000 -1.0000 -1.0000 0.1429 -1.0000 * // -1.0000 -0.7143 -1.0000 0.4286 -1.0000 * // -1.0000 -0.4286 -1.0000 0.7143 -1.0000 * // -1.0000 -0.1429 -1.0000 1.0000 -1.0000 * // -1.0000 -1.0000 -1.0000 -1.0000 -1.0000 * @endcode * * The ColumnsToBlocks class can also, depending on the parameters, scale the * input to a given range (useful for exporting to PGM, for instance), and also * set the buffer size and value. See the Scale(), MinRange(), MaxRange(), * BufSize(), and BufValue() methods for more details. */ class ColumnsToBlocks { public: /** * Constructor a ColumnsToBlocks object with the given parameters. The rows * and cols parameters control the number of blocks per row and column of the * output matrix, respectively, and the blockHeight and blockWidth parameters * control the size of the individual blocks. If blockHeight and blockWidth * are specified, then (blockHeight * blockWidth) must be equal to the number * of rows in the input matrix when Transform() is called. If blockHeight and * blockWidth are not specified, then the square root of the number of rows of * the input matrix will be taken when Transform() is called and that will be * used as the block width and height. * * Note that the ColumnsToBlocks object can also scale the inputs to a given * range; see Scale(), MinRange(), and MaxRange(), and the buffer (margin) * size can also be set with BufSize(), and the value used for the buffer can * be set with BufValue(). * * @param rows Number of blocks in each column of the output matrix. * @param cols Number of blocks in each row of the output matrix. * @param blockHeight Height of each block. * @param blockWidth Width of each block. * * @warning blockHeight * blockWidth must be equal to maximalInputs.n_rows. */ ColumnsToBlocks(size_t rows, size_t cols, size_t blockHeight = 0, size_t blockWidth = 0); /** * Transform the columns of the input matrix into blocks. If blockHeight and * blockWidth were not specified in the constructor (and BlockHeight() and * BlockWidth() were not called), then the number of rows in the input matrix * must be a perfect square. * * @param input Input matrix to transform. * @param output Matrix to store transformed output in. */ void Transform(const arma::mat& maximalInputs, arma::mat& output); //! Set the height of each block; see the constructor for more details. void BlockHeight(const size_t value) { blockHeight = value; } //! Get the block height. size_t BlockHeight() const { return blockHeight; } //! Set the width of each block; see the constructor for more details. void BlockWidth(size_t value) { blockWidth = value; } //! Get the block width. size_t BlockWidth() const { return blockWidth; } //! Modify the buffer size (the size of the margin around each column of the //! input). The default value is 1. void BufSize(const size_t value) { bufSize = value; } //! Get the buffer size. size_t BufSize() const { return bufSize; } //! Modify the value used for buffer cells; the default is -1. void BufValue(const double value) { bufValue = value; } //! Get the value used for buffer cells. double BufValue() const { return bufValue; } //! Set the maximum of the range the input will be scaled to, if scaling is //! enabled (see Scale()). void MaxRange(const double value) { maxRange = value; } //! Get the maximum of the range the input will be scaled to, if scaling is //! enabled (see Scale()). double MaxRange() const { return maxRange; } //! Set the minimum of the range the input will be scaled to, if scaling is //! enabled (see Scale()). void MinRange(const double value) { minRange = value; } //! Get the minimum of the range the input will be scaled to, if scaling is //! enabled (see Scale()). double MinRange() const { return minRange; } //! Set whether or not scaling is enabled (see also MaxRange() and //! MinRange()). void Scale(const bool value) { scale = value; } //! Get whether or not scaling is enabled (see also MaxRange() and //! MinRange()). bool Scale() const { return scale; } //! Set the number of blocks per row. void Rows(const size_t value) { rows = value; } //! Modify the number of blocks per row. size_t Rows() const { return rows; } //! Set the number of blocks per column. void Cols(const size_t value) { cols = value; } //! Return the number of blocks per column. size_t Cols() const { return cols; } private: //! Determine whether or not the number is a perfect square. bool IsPerfectSquare(size_t value) const; //! The height of each block. size_t blockHeight; //! The width of each block. size_t blockWidth; //! The size of the buffer around each block. size_t bufSize; //! The value of the buffer around each block. double bufValue; //! The minimum of the range to be scaled to (if scaling is enabled). double minRange; //! The maximum of the range to be scaled to (if scaling is enabled). double maxRange; //! Whether or not scaling is enabled. bool scale; //! The number of blocks in each row. size_t rows; //! The number of blocks in each column. size_t cols; }; } // namespace math } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/core/math/lin_alg.cpp000066400000000000000000000174421315013601400210210ustar00rootroot00000000000000/** * @file lin_alg.cpp * @author Nishant Mehta * * Linear algebra utilities. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include "lin_alg.hpp" #include #include using namespace mlpack; using namespace math; /** * Auxiliary function to raise vector elements to a specific power. The sign * is ignored in the power operation and then re-added. Useful for * eigenvalues. */ void mlpack::math::VectorPower(arma::vec& vec, const double power) { for (size_t i = 0; i < vec.n_elem; i++) { if (std::abs(vec(i)) > 1e-12) vec(i) = (vec(i) > 0) ? std::pow(vec(i), (double) power) : -std::pow(-vec(i), (double) power); else vec(i) = 0; } } /** * Creates a centered matrix, where centering is done by subtracting * the sum over the columns (a column vector) from each column of the matrix. * * @param x Input matrix * @param xCentered Matrix to write centered output into */ void mlpack::math::Center(const arma::mat& x, arma::mat& xCentered) { // Get the mean of the elements in each row. arma::vec rowMean = arma::sum(x, 1) / x.n_cols; xCentered = x - arma::repmat(rowMean, 1, x.n_cols); } /** * Whitens a matrix using the singular value decomposition of the covariance * matrix. Whitening means the covariance matrix of the result is the identity * matrix. */ void mlpack::math::WhitenUsingSVD(const arma::mat& x, arma::mat& xWhitened, arma::mat& whiteningMatrix) { arma::mat covX, u, v, invSMatrix, temp1; arma::vec sVector; covX = ccov(x); svd(u, sVector, v, covX); size_t d = sVector.n_elem; invSMatrix.zeros(d, d); invSMatrix.diag() = 1 / sqrt(sVector); whiteningMatrix = v * invSMatrix * trans(u); xWhitened = whiteningMatrix * x; } /** * Whitens a matrix using the eigendecomposition of the covariance matrix. * Whitening means the covariance matrix of the result is the identity matrix. */ void mlpack::math::WhitenUsingEig(const arma::mat& x, arma::mat& xWhitened, arma::mat& whiteningMatrix) { arma::mat diag, eigenvectors; arma::vec eigenvalues; // Get eigenvectors of covariance of input matrix. eig_sym(eigenvalues, eigenvectors, ccov(x)); // Generate diagonal matrix using 1 / sqrt(eigenvalues) for each value. VectorPower(eigenvalues, -0.5); diag.zeros(eigenvalues.n_elem, eigenvalues.n_elem); diag.diag() = eigenvalues; // Our whitening matrix is diag(1 / sqrt(eigenvectors)) * eigenvalues. whiteningMatrix = diag * trans(eigenvectors); // Now apply the whitening matrix. xWhitened = whiteningMatrix * x; } /** * Overwrites a dimension-N vector to a random vector on the unit sphere in R^N. */ void mlpack::math::RandVector(arma::vec& v) { v.zeros(); for (size_t i = 0; i + 1 < v.n_elem; i += 2) { double a = Random(); double b = Random(); double first_term = sqrt(-2 * log(a)); double second_term = 2 * M_PI * b; v[i] = first_term * cos(second_term); v[i + 1] = first_term * sin(second_term); } if ((v.n_elem % 2) == 1) { v[v.n_elem - 1] = sqrt(-2 * log(math::Random())) * cos(2 * M_PI * math::Random()); } v /= sqrt(dot(v, v)); } /** * Orthogonalize x and return the result in W, using eigendecomposition. * We will be using the formula \f$ W = x (x^T x)^{-0.5} \f$. */ void mlpack::math::Orthogonalize(const arma::mat& x, arma::mat& W) { // For a matrix A, A^N = V * D^N * V', where VDV' is the // eigendecomposition of the matrix A. arma::mat eigenvalues, eigenvectors; arma::vec egval; eig_sym(egval, eigenvectors, ccov(x)); VectorPower(egval, -0.5); eigenvalues.zeros(egval.n_elem, egval.n_elem); eigenvalues.diag() = egval; arma::mat at = (eigenvectors * eigenvalues * trans(eigenvectors)); W = at * x; } /** * Orthogonalize x in-place. This could be sped up by a custom * implementation. */ void mlpack::math::Orthogonalize(arma::mat& x) { Orthogonalize(x, x); } /** * Remove a certain set of rows in a matrix while copying to a second matrix. * * @param input Input matrix to copy. * @param rowsToRemove Vector containing indices of rows to be removed. * @param output Matrix to copy non-removed rows into. */ void mlpack::math::RemoveRows(const arma::mat& input, const std::vector& rowsToRemove, arma::mat& output) { const size_t nRemove = rowsToRemove.size(); const size_t nKeep = input.n_rows - nRemove; if (nRemove == 0) { output = input; // Copy everything. } else { output.set_size(nKeep, input.n_cols); size_t curRow = 0; size_t removeInd = 0; // First, check 0 to first row to remove. if (rowsToRemove[0] > 0) { // Note that this implies that n_rows > 1. output.rows(0, rowsToRemove[0] - 1) = input.rows(0, rowsToRemove[0] - 1); curRow += rowsToRemove[0]; } // Now, check i'th row to remove to (i + 1)'th row to remove, until i is the // penultimate row. while (removeInd < nRemove - 1) { const size_t height = rowsToRemove[removeInd + 1] - rowsToRemove[removeInd] - 1; if (height > 0) { output.rows(curRow, curRow + height - 1) = input.rows(rowsToRemove[removeInd] + 1, rowsToRemove[removeInd + 1] - 1); curRow += height; } removeInd++; } // Now that i is the last row to remove, check last row to remove to last // row. if (rowsToRemove[removeInd] < input.n_rows - 1) { output.rows(curRow, nKeep - 1) = input.rows(rowsToRemove[removeInd] + 1, input.n_rows - 1); } } } void mlpack::math::Svec(const arma::mat& input, arma::vec& output) { const size_t n = input.n_rows; const size_t n2bar = n * (n + 1) / 2; output.zeros(n2bar); size_t idx = 0; for (size_t i = 0; i < n; i++) { for (size_t j = i; j < n; j++) { if (i == j) output(idx++) = input(i, j); else output(idx++) = M_SQRT2 * input(i, j); } } } void mlpack::math::Svec(const arma::sp_mat& input, arma::sp_vec& output) { const size_t n = input.n_rows; const size_t n2bar = n * (n + 1) / 2; output.zeros(n2bar, 1); for (auto it = input.begin(); it != input.end(); ++it) { const size_t i = it.row(); const size_t j = it.col(); if (i > j) continue; if (i == j) output(SvecIndex(i, j, n)) = *it; else output(SvecIndex(i, j, n)) = M_SQRT2 * (*it); } } void mlpack::math::Smat(const arma::vec& input, arma::mat& output) { const size_t n = static_cast(ceil((-1. + sqrt(1. + 8. * input.n_elem))/2.)); output.zeros(n, n); size_t idx = 0; for (size_t i = 0; i < n; i++) { for (size_t j = i; j < n; j++) { if (i == j) output(i, j) = input(idx++); else output(i, j) = output(j, i) = M_SQRT1_2 * input(idx++); } } } void mlpack::math::SymKronId(const arma::mat& A, arma::mat& op) { // TODO(stephentu): there's probably an easier way to build this operator const size_t n = A.n_rows; const size_t n2bar = n * (n + 1) / 2; op.zeros(n2bar, n2bar); size_t idx = 0; for (size_t i = 0; i < n; i++) { for (size_t j = i; j < n; j++) { for (size_t k = 0; k < n; k++) { op(idx, SvecIndex(k, j, n)) += ((k == j) ? 1. : M_SQRT1_2) * A(i, k); op(idx, SvecIndex(i, k, n)) += ((k == i) ? 1. : M_SQRT1_2) * A(k, j); } op.row(idx) *= 0.5; if (i != j) op.row(idx) *= M_SQRT2; idx++; } } } mlpack-2.2.5/src/mlpack/core/math/lin_alg.hpp000066400000000000000000000073131315013601400210220ustar00rootroot00000000000000/** * @file lin_alg.hpp * @author Nishant Mehta * * Linear algebra utilities. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_MATH_LIN_ALG_HPP #define MLPACK_CORE_MATH_LIN_ALG_HPP #include /** * Linear algebra utility functions, generally performed on matrices or vectors. */ namespace mlpack { namespace math { /** * Auxiliary function to raise vector elements to a specific power. The sign * is ignored in the power operation and then re-added. Useful for * eigenvalues. */ void VectorPower(arma::vec& vec, const double power); /** * Creates a centered matrix, where centering is done by subtracting * the sum over the columns (a column vector) from each column of the matrix. * * @param x Input matrix * @param xCentered Matrix to write centered output into */ void Center(const arma::mat& x, arma::mat& xCentered); /** * Whitens a matrix using the singular value decomposition of the covariance * matrix. Whitening means the covariance matrix of the result is the identity * matrix. */ void WhitenUsingSVD(const arma::mat& x, arma::mat& xWhitened, arma::mat& whiteningMatrix); /** * Whitens a matrix using the eigendecomposition of the covariance matrix. * Whitening means the covariance matrix of the result is the identity matrix. */ void WhitenUsingEig(const arma::mat& x, arma::mat& xWhitened, arma::mat& whiteningMatrix); /** * Overwrites a dimension-N vector to a random vector on the unit sphere in R^N. */ void RandVector(arma::vec& v); /** * Orthogonalize x and return the result in W, using eigendecomposition. * We will be using the formula \f$ W = x (x^T x)^{-0.5} \f$. */ void Orthogonalize(const arma::mat& x, arma::mat& W); /** * Orthogonalize x in-place. This could be sped up by a custom * implementation. */ void Orthogonalize(arma::mat& x); /** * Remove a certain set of rows in a matrix while copying to a second matrix. * * @param input Input matrix to copy. * @param rowsToRemove Vector containing indices of rows to be removed. * @param output Matrix to copy non-removed rows into. */ void RemoveRows(const arma::mat& input, const std::vector& rowsToRemove, arma::mat& output); /** * Upper triangular representation of a symmetric matrix, scaled such that, * dot(Svec(A), Svec(B)) == dot(A, B) for symmetric A, B. Specifically, * * Svec(K) = [ K_11, sqrt(2) K_12, ..., sqrt(2) K_1n, K_22, ..., sqrt(2) K_2n, ..., K_nn ]^T * * @param input A symmetric matrix * @param output */ void Svec(const arma::mat& input, arma::vec& output); void Svec(const arma::sp_mat& input, arma::sp_vec& output); /** * The inverse of Svec. That is, Smat(Svec(A)) == A. * * @param input * @param output A symmetric matrix */ void Smat(const arma::vec& input, arma::mat& output); /** * Return the index such that A[i,j] == factr(i, j) * svec(A)[pos(i, j)], * where factr(i, j) = sqrt(2) if i != j and 1 otherwise. * * @param i * @param j * @param n */ inline size_t SvecIndex(size_t i, size_t j, size_t n); /** * If A is a symmetric matrix, then SymKronId returns an operator Op such that * * Op * svec(X) == svec(0.5 * (AX + XA)) * * for every symmetric matrix X * * @param A * @param op */ void SymKronId(const arma::mat& A, arma::mat& op); } // namespace math } // namespace mlpack // Partially include implementation #include "lin_alg_impl.hpp" #endif // MLPACK_CORE_MATH_LIN_ALG_HPP mlpack-2.2.5/src/mlpack/core/math/lin_alg_impl.hpp000066400000000000000000000012501315013601400220350ustar00rootroot00000000000000/** * @file lin_alg_impl.hpp * @author Stephen Tu * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_MATH_LIN_ALG_IMPL_HPP #define MLPACK_CORE_MATH_LIN_ALG_IMPL_HPP #include "lin_alg.hpp" namespace mlpack { namespace math { inline size_t SvecIndex(size_t i, size_t j, size_t n) { if (i > j) std::swap(i, j); return (j-i) + (n*(n+1) - (n-i)*(n-i+1))/2; } } // namespace math } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/core/math/random.cpp000066400000000000000000000014041315013601400206630ustar00rootroot00000000000000/** * @file random.cpp * * Declarations of global random number generators. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include #include namespace mlpack { namespace math { // Global random object. MLPACK_EXPORT std::mt19937 randGen; // Global uniform distribution. MLPACK_EXPORT std::uniform_real_distribution<> randUniformDist(0.0, 1.0); // Global normal distribution. MLPACK_EXPORT std::normal_distribution<> randNormalDist(0.0, 1.0); } // namespace math } // namespace mlpack mlpack-2.2.5/src/mlpack/core/math/random.hpp000066400000000000000000000076701315013601400207030ustar00rootroot00000000000000/** * @file random.hpp * * Miscellaneous math random-related routines. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_MATH_RANDOM_HPP #define MLPACK_CORE_MATH_RANDOM_HPP #include #include #include namespace mlpack { namespace math /** Miscellaneous math routines. */ { /** * MLPACK_EXPORT is required for global variables; it exports the symbols * correctly on Windows. */ // Global random object. extern MLPACK_EXPORT std::mt19937 randGen; // Global uniform distribution. extern MLPACK_EXPORT std::uniform_real_distribution<> randUniformDist; // Global normal distribution. extern MLPACK_EXPORT std::normal_distribution<> randNormalDist; /** * Set the random seed used by the random functions (Random() and RandInt()). * The seed is casted to a 32-bit integer before being given to the random * number generator, but a size_t is taken as a parameter for API consistency. * * @param seed Seed for the random number generator. */ inline void RandomSeed(const size_t seed) { randGen.seed((uint32_t) seed); srand((unsigned int) seed); #if ARMA_VERSION_MAJOR > 3 || \ (ARMA_VERSION_MAJOR == 3 && ARMA_VERSION_MINOR >= 930) // Armadillo >= 3.930 has its own random number generator internally that we // need to set the seed for also. arma::arma_rng::set_seed(seed); #endif } /** * Generates a uniform random number between 0 and 1. */ inline double Random() { return randUniformDist(randGen); } /** * Generates a uniform random number in the specified range. */ inline double Random(const double lo, const double hi) { return lo + (hi - lo) * randUniformDist(randGen); } /** * Generates a uniform random integer. */ inline int RandInt(const int hiExclusive) { return (int) std::floor((double) hiExclusive * randUniformDist(randGen)); } /** * Generates a uniform random integer. */ inline int RandInt(const int lo, const int hiExclusive) { return lo + (int) std::floor((double) (hiExclusive - lo) * randUniformDist(randGen)); } /** * Generates a normally distributed random number with mean 0 and variance 1. */ inline double RandNormal() { return randNormalDist(randGen); } /** * Generates a normally distributed random number with specified mean and * variance. * * @param mean Mean of distribution. * @param variance Variance of distribution. */ inline double RandNormal(const double mean, const double variance) { return variance * randNormalDist(randGen) + mean; } /** * Obtains no more than maxNumSamples distinct samples. Each sample belongs to * [loInclusive, hiExclusive). * * @param loInclusive The lower bound (inclusive). * @param hiExclusive The high bound (exclusive). * @param maxNumSamples The maximum number of samples to obtain. * @param distinctSamples The samples that will be obtained. */ inline void ObtainDistinctSamples(const size_t loInclusive, const size_t hiExclusive, const size_t maxNumSamples, arma::uvec& distinctSamples) { const size_t samplesRangeSize = hiExclusive - loInclusive; if (samplesRangeSize > maxNumSamples) { arma::Col samples; samples.zeros(samplesRangeSize); for (size_t i = 0; i < maxNumSamples; i++) samples [ (size_t) math::RandInt(samplesRangeSize) ]++; distinctSamples = arma::find(samples > 0); if (loInclusive > 0) distinctSamples += loInclusive; } else { distinctSamples.set_size(samplesRangeSize); for (size_t i = 0; i < samplesRangeSize; i++) distinctSamples[i] = loInclusive + i; } } } // namespace math } // namespace mlpack #endif // MLPACK_CORE_MATH_MATH_LIB_HPP mlpack-2.2.5/src/mlpack/core/math/random_basis.cpp000066400000000000000000000020701315013601400220440ustar00rootroot00000000000000/** * @file random_basis.cpp * @author Ryan Curtin * * Generate a random d-dimensional basis. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include "random_basis.hpp" using namespace arma; namespace mlpack { namespace math { void RandomBasis(mat& basis, const size_t d) { while(true) { // [Q, R] = qr(randn(d, d)); // Q = Q * diag(sign(diag(R))); mat r; if (qr(basis, r, randn(d, d))) { vec rDiag(r.n_rows); for (size_t i = 0; i < rDiag.n_elem; ++i) { if (r(i, i) < 0) rDiag(i) = -1; else if (r(i, i) > 0) rDiag(i) = 1; else rDiag(i) = 0; } basis *= diagmat(rDiag); // Check if the determinant is positive. if (det(basis) >= 0) break; } } } } // namespace math } // namespace mlpack mlpack-2.2.5/src/mlpack/core/math/random_basis.hpp000066400000000000000000000015111315013601400220500ustar00rootroot00000000000000/** * @file random_basis.hpp * @author Ryan Curtin * * Generate a random d-dimensional basis. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_MATH_RANDOM_BASIS_HPP #define MLPACK_CORE_MATH_RANDOM_BASIS_HPP #include namespace mlpack { namespace math { /** * Create a random d-dimensional orthogonal basis, storing it in the given * matrix. * * @param basis Matrix to store basis in. * @param d Desired number of dimensions in the basis. */ void RandomBasis(arma::mat& basis, const size_t d); } // namespace math } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/core/math/range.hpp000066400000000000000000000110221315013601400205010ustar00rootroot00000000000000/** * @file range.hpp * * Definition of the Range class, which represents a simple range with a lower * and upper bound. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_MATH_RANGE_HPP #define MLPACK_CORE_MATH_RANGE_HPP namespace mlpack { namespace math { template class RangeType; //! 3.0.0 TODO: break reverse-compatibility by changing RangeType to Range. typedef RangeType Range; /** * Simple real-valued range. It contains an upper and lower bound. * * Note that until mlpack 3.0.0, this class is named RangeType<> and for the * specification where T is double, you can use math::Range. As of mlpack * 3.0.0, this class will be renamed math::Range<>. * * @tparam T type of element held by this range. */ template class RangeType { private: T lo; /// The lower bound. T hi; /// The upper bound. public: /** Initialize to an empty set (where lo > hi). */ inline RangeType(); /*** * Initialize a range to enclose only the given point (lo = point, hi = * point). * * @param point Point that this range will enclose. */ inline RangeType(const T point); /** * Initializes to specified range. * * @param lo Lower bound of the range. * @param hi Upper bound of the range. */ inline RangeType(const T lo, const T hi); //! Get the lower bound. inline T Lo() const { return lo; } //! Modify the lower bound. inline T& Lo() { return lo; } //! Get the upper bound. inline T Hi() const { return hi; } //! Modify the upper bound. inline T& Hi() { return hi; } /** * Gets the span of the range (hi - lo). */ inline T Width() const; /** * Gets the midpoint of this range. */ inline T Mid() const; /** * Expands this range to include another range. * * @param rhs Range to include. */ inline RangeType& operator|=(const RangeType& rhs); /** * Expands this range to include another range. * * @param rhs Range to include. */ inline RangeType operator|(const RangeType& rhs) const; /** * Shrinks this range to be the overlap with another range; this makes an * empty set if there is no overlap. * * @param rhs Other range. */ inline RangeType& operator&=(const RangeType& rhs); /** * Shrinks this range to be the overlap with another range; this makes an * empty set if there is no overlap. * * @param rhs Other range. */ inline RangeType operator&(const RangeType& rhs) const; /** * Scale the bounds by the given double. * * @param d Scaling factor. */ inline RangeType& operator*=(const T d); /** * Scale the bounds by the given double. * * @param d Scaling factor. */ inline RangeType operator*(const T d) const; /** * Scale the bounds by the given double. * * @param d Scaling factor. */ template friend inline RangeType operator*(const TT d, const RangeType& r); /** * Compare with another range for strict equality. * * @param rhs Other range. */ inline bool operator==(const RangeType& rhs) const; /** * Compare with another range for strict equality. * * @param rhs Other range. */ inline bool operator!=(const RangeType& rhs) const; /** * Compare with another range. For Range objects x and y, x < y means that x * is strictly less than y and does not overlap at all. * * @param rhs Other range. */ inline bool operator<(const RangeType& rhs) const; /** * Compare with another range. For Range objects x and y, x < y means that x * is strictly less than y and does not overlap at all. * * @param rhs Other range. */ inline bool operator>(const RangeType& rhs) const; /** * Determines if a point is contained within the range. * * @param d Point to check. */ inline bool Contains(const T d) const; /** * Determines if another range overlaps with this one. * * @param r Other range. * * @return true if ranges overlap at all. */ inline bool Contains(const RangeType& r) const; /** * Serialize the range object. */ template void Serialize(Archive& ar, const unsigned int version); }; } // namespace math } // namespace mlpack // Include inlined implementation. #include "range_impl.hpp" #endif // MLPACK_CORE_MATH_RANGE_HPP mlpack-2.2.5/src/mlpack/core/math/range_impl.hpp000066400000000000000000000107071315013601400215330ustar00rootroot00000000000000/** * @file range_impl.hpp * * Implementation of the (inlined) Range class. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_MATH_RANGE_IMPL_HPP #define MLPACK_CORE_MATH_RANGE_IMPL_HPP #include "range.hpp" #include #include namespace mlpack { namespace math { /** * Initialize the range to 0. */ template inline RangeType::RangeType() : lo(std::numeric_limits::max()), hi(-std::numeric_limits::max()) { /* nothing else to do */ } /** * Initialize a range to enclose only the given point. */ template inline RangeType::RangeType(const T point) : lo(point), hi(point) { /* nothing else to do */ } /** * Initializes the range to the specified values. */ template inline RangeType::RangeType(const T lo, const T hi) : lo(lo), hi(hi) { /* nothing else to do */ } /** * Gets the span of the range, hi - lo. Returns 0 if the range is negative. */ template inline T RangeType::Width() const { if (lo < hi) return (hi - lo); else return 0.0; } /** * Gets the midpoint of this range. */ template inline T RangeType::Mid() const { return (hi + lo) / 2; } /** * Expands range to include the other range. */ template inline RangeType& RangeType::operator|=(const RangeType& rhs) { if (rhs.lo < lo) lo = rhs.lo; if (rhs.hi > hi) hi = rhs.hi; return *this; } template inline RangeType RangeType::operator|(const RangeType& rhs) const { return RangeType((rhs.lo < lo) ? rhs.lo : lo, (rhs.hi > hi) ? rhs.hi : hi); } /** * Shrinks range to be the overlap with another range, becoming an empty * set if there is no overlap. */ template inline RangeType& RangeType::operator&=(const RangeType& rhs) { if (rhs.lo > lo) lo = rhs.lo; if (rhs.hi < hi) hi = rhs.hi; return *this; } template inline RangeType RangeType::operator&(const RangeType& rhs) const { return RangeType((rhs.lo > lo) ? rhs.lo : lo, (rhs.hi < hi) ? rhs.hi : hi); } /** * Scale the bounds by the given double. */ template inline RangeType& RangeType::operator*=(const T d) { lo *= d; hi *= d; // Now if we've negated, we need to flip things around so the bound is valid. if (lo > hi) { double tmp = hi; hi = lo; lo = tmp; } return *this; } template inline RangeType RangeType::operator*(const T d) const { double nlo = lo * d; double nhi = hi * d; if (nlo <= nhi) return RangeType(nlo, nhi); else return RangeType(nhi, nlo); } // Symmetric case. template inline RangeType operator*(const T d, const RangeType& r) { double nlo = r.lo * d; double nhi = r.hi * d; if (nlo <= nhi) return RangeType(nlo, nhi); else return RangeType(nhi, nlo); } /** * Compare with another range for strict equality. */ template inline bool RangeType::operator==(const RangeType& rhs) const { return (lo == rhs.lo) && (hi == rhs.hi); } template inline bool RangeType::operator!=(const RangeType& rhs) const { return (lo != rhs.lo) || (hi != rhs.hi); } /** * Compare with another range. For Range objects x and y, x < y means that x is * strictly less than y and does not overlap at all. */ template inline bool RangeType::operator<(const RangeType& rhs) const { return hi < rhs.lo; } template inline bool RangeType::operator>(const RangeType& rhs) const { return lo > rhs.hi; } /** * Determines if a point is contained within the range. */ template inline bool RangeType::Contains(const T d) const { return d >= lo && d <= hi; } /** * Determines if this range overlaps with another range. */ template inline bool RangeType::Contains(const RangeType& r) const { return lo <= r.hi && hi >= r.lo; } //! Serialize the range. template template void RangeType::Serialize(Archive& ar, const unsigned int /* version */) { ar & data::CreateNVP(hi, "hi"); ar & data::CreateNVP(lo, "lo"); } } // namespace math } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/core/math/round.hpp000066400000000000000000000014741315013601400205460ustar00rootroot00000000000000/** * @file round.hpp * @author Ryan Curtin * * Implementation of round() for use on Visual Studio, where C99 isn't * implemented. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_MATH_ROUND_HPP #define MLPACK_CORE_MATH_ROUND_HPP // _MSC_VER should only be defined for Visual Studio, which doesn't implement // C99. #ifdef _MSC_VER // This function ends up going into the global namespace, so it can be used in // place of C99's round(). //! Round a number to the nearest integer. inline double round(double a) { return floor(a + 0.5); } #endif #endif mlpack-2.2.5/src/mlpack/core/metrics/000077500000000000000000000000001315013601400174155ustar00rootroot00000000000000mlpack-2.2.5/src/mlpack/core/metrics/CMakeLists.txt000066400000000000000000000010451315013601400221550ustar00rootroot00000000000000# Define the files we need to compile. # Anything not in this list will not be compiled into mlpack. set(SOURCES ip_metric.hpp ip_metric_impl.hpp lmetric.hpp lmetric_impl.hpp mahalanobis_distance.hpp mahalanobis_distance_impl.hpp ) # add directory name to sources set(DIR_SRCS) foreach(file ${SOURCES}) set(DIR_SRCS ${DIR_SRCS} ${CMAKE_CURRENT_SOURCE_DIR}/${file}) endforeach() # Append sources (with directory name) to list of all mlpack sources (used at # the parent scope). set(MLPACK_SRCS ${MLPACK_SRCS} ${DIR_SRCS} PARENT_SCOPE) mlpack-2.2.5/src/mlpack/core/metrics/ip_metric.hpp000066400000000000000000000041421315013601400221020ustar00rootroot00000000000000/** * @file ip_metric.hpp * @author Ryan Curtin * * Inner product induced metric. If given a kernel function, this gives the * complementary metric. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_FASTMKS_IP_METRIC_HPP #define MLPACK_METHODS_FASTMKS_IP_METRIC_HPP namespace mlpack { namespace metric { /** * The inner product metric, IPMetric, takes a given Mercer kernel (KernelType), * and when Evaluate() is called, returns the distance between the two points in * kernel space: * * @f[ * d(x, y) = \sqrt{ K(x, x) + K(y, y) - 2K(x, y) }. * @f] * * @tparam KernelType Type of Kernel to use. This must be a Mercer kernel * (positive definite), otherwise the metric may not be valid. */ template class IPMetric { public: //! Create the IPMetric without an instantiated kernel. IPMetric(); //! Create the IPMetric with an instantiated kernel. IPMetric(KernelType& kernel); //! Destroy the IPMetric object. ~IPMetric(); /** * Evaluate the metric. * * @tparam VecTypeA Type of first vector. * @tparam VecTypeB Type of second vector. * @param a First vector. * @param b Second vector. * @return Distance between the two points in kernel space. */ template typename VecTypeA::elem_type Evaluate(const VecTypeA& a, const VecTypeB& b); //! Get the kernel. const KernelType& Kernel() const { return *kernel; } //! Modify the kernel. KernelType& Kernel() { return *kernel; } //! Serialize the metric. template void Serialize(Archive& ar, const unsigned int version); private: //! The kernel we are using. KernelType* kernel; //! If true, we are responsible for deleting the kernel. bool kernelOwner; }; } // namespace metric } // namespace mlpack // Include implementation. #include "ip_metric_impl.hpp" #endif mlpack-2.2.5/src/mlpack/core/metrics/ip_metric_impl.hpp000066400000000000000000000045571315013601400231350ustar00rootroot00000000000000/** * @file ip_metric_impl.hpp * @author Ryan Curtin * * Implementation of the IPMetric. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_FASTMKS_IP_METRIC_IMPL_HPP #define MLPACK_METHODS_FASTMKS_IP_METRIC_IMPL_HPP // In case it hasn't been included yet. #include "ip_metric_impl.hpp" #include #include namespace mlpack { namespace metric { // Constructor with no instantiated kernel. template IPMetric::IPMetric() : kernel(new KernelType()), kernelOwner(true) { // Nothing to do. } // Constructor with instantiated kernel. template IPMetric::IPMetric(KernelType& kernel) : kernel(&kernel), kernelOwner(false) { // Nothing to do. } // Destructor for the IPMetric. template IPMetric::~IPMetric() { if (kernelOwner) delete kernel; } template template inline typename Vec1Type::elem_type IPMetric::Evaluate( const Vec1Type& a, const Vec2Type& b) { // This is the metric induced by the kernel function. // Maybe we can do better by caching some of this? return sqrt(kernel->Evaluate(a, a) + kernel->Evaluate(b, b) - 2 * kernel->Evaluate(a, b)); } // Serialize the kernel. template template void IPMetric::Serialize(Archive& ar, const unsigned int /* version */) { // If we're loading, we need to allocate space for the kernel, and we will own // the kernel. if (Archive::is_loading::value) kernelOwner = true; ar & data::CreateNVP(kernel, "kernel"); } // A specialization for the linear kernel, which actually just turns out to be // the Euclidean distance. template<> template inline typename Vec1Type::elem_type IPMetric::Evaluate( const Vec1Type& a, const Vec2Type& b) { return metric::LMetric<2, true>::Evaluate(a, b); } } // namespace metric } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/core/metrics/lmetric.hpp000066400000000000000000000072541315013601400215750ustar00rootroot00000000000000/** * @file lmetric.hpp * @author Ryan Curtin * * Generalized L-metric, allowing both squared distances to be returned as well * as non-squared distances. The squared distances are faster to compute. * * This also gives several convenience typedefs for commonly used L-metrics. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_METRICS_LMETRIC_HPP #define MLPACK_CORE_METRICS_LMETRIC_HPP #include namespace mlpack { namespace metric { /** * The L_p metric for arbitrary integer p, with an option to take the root. * * This class implements the standard L_p metric for two arbitrary vectors @f$ x * @f$ and @f$ y @f$ of dimensionality @f$ n @f$: * * @f[ * d(x, y) = \left( \sum_{i = 1}^{n} | x_i - y_i |^p \right)^{\frac{1}{p}}. * @f] * * The value of p is given as a template parameter. * * In addition, the function @f$ d(x, y) @f$ can be simplified, neglecting the * p-root calculation. This is done by specifying the TakeRoot template * parameter to be false. Then, * * @f[ * d(x, y) = \sum_{i = 1}^{n} | x_i - y_i |^p * @f] * * It is faster to compute that distance, so TakeRoot is by default off. * However, when TakeRoot is false, the distance given is not actually a true * metric -- it does not satisfy the triangle inequality. Some mlpack methods * do not require the triangle inequality to operate correctly (such as the * BinarySpaceTree), but setting TakeRoot = false in some cases will cause * incorrect results. * * A few convenience typedefs are given: * * - ManhattanDistance * - EuclideanDistance * - SquaredEuclideanDistance * * @tparam Power Power of metric; i.e. Power = 1 gives the L1-norm (Manhattan * distance). * @tparam TakeRoot If true, the Power'th root of the result is taken before it * is returned. Setting this to false causes the metric to not satisfy the * Triangle Inequality (be careful!). */ template class LMetric { public: /*** * Default constructor does nothing, but is required to satisfy the Metric * policy. */ LMetric() { } /** * Computes the distance between two points. * * @tparam VecTypeA Type of first vector (generally arma::vec or * arma::sp_vec). * @tparam VecTypeB Type of second vector. * @param a First vector. * @param b Second vector. * @return Distance between vectors a and b. */ template static typename VecTypeA::elem_type Evaluate(const VecTypeA& a, const VecTypeB& b); //! Serialize the metric (nothing to do). template void Serialize(Archive& /* ar */, const unsigned int /* version */) { } //! The power of the metric. static const int Power = TPower; //! Whether or not the root is taken. static const bool TakeRoot = TTakeRoot; }; // Convenience typedefs. /** * The Manhattan (L1) distance. */ typedef LMetric<1, false> ManhattanDistance; /** * The squared Euclidean (L2) distance. Note that this is not technically a * metric! But it can sometimes be used when distances are required. */ typedef LMetric<2, false> SquaredEuclideanDistance; /** * The Euclidean (L2) distance. */ typedef LMetric<2, true> EuclideanDistance; /** * The L-infinity distance. */ typedef LMetric ChebyshevDistance; } // namespace metric } // namespace mlpack // Include implementation. #include "lmetric_impl.hpp" #endif mlpack-2.2.5/src/mlpack/core/metrics/lmetric_impl.hpp000066400000000000000000000057231315013601400226150ustar00rootroot00000000000000/** * @file lmetric_impl.hpp * @author Ryan Curtin * * Implementation of template specializations of LMetric class. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_METRICS_LMETRIC_IMPL_HPP #define MLPACK_CORE_METRICS_LMETRIC_IMPL_HPP // In case it hasn't been included. #include "lmetric.hpp" namespace mlpack { namespace metric { // Unspecialized implementation. This should almost never be used... template template typename VecTypeA::elem_type LMetric::Evaluate( const VecTypeA& a, const VecTypeB& b) { typename VecTypeA::elem_type sum = 0; for (size_t i = 0; i < a.n_elem; i++) sum += std::pow(fabs(a[i] - b[i]), Power); if (!TakeRoot) // The compiler should optimize this correctly at compile-time. return sum; return std::pow(sum, (1.0 / Power)); } // L1-metric specializations; the root doesn't matter. template<> template typename VecTypeA::elem_type LMetric<1, true>::Evaluate( const VecTypeA& a, const VecTypeB& b) { return arma::accu(abs(a - b)); } template<> template typename VecTypeA::elem_type LMetric<1, false>::Evaluate( const VecTypeA& a, const VecTypeB& b) { return arma::accu(abs(a - b)); } // L2-metric specializations. template<> template typename VecTypeA::elem_type LMetric<2, true>::Evaluate( const VecTypeA& a, const VecTypeB& b) { return sqrt(arma::accu(square(a - b))); } template<> template typename VecTypeA::elem_type LMetric<2, false>::Evaluate( const VecTypeA& a, const VecTypeB& b) { return accu(arma::square(a - b)); } // L3-metric specialization (not very likely to be used, but just in case). template<> template typename VecTypeA::elem_type LMetric<3, true>::Evaluate( const VecTypeA& a, const VecTypeB& b) { typename VecTypeA::elem_type sum = 0; for (size_t i = 0; i < a.n_elem; i++) sum += std::pow(fabs(a[i] - b[i]), 3.0); return std::pow(arma::accu(arma::pow(arma::abs(a - b), 3.0)), 1.0 / 3.0); } template<> template typename VecTypeA::elem_type LMetric<3, false>::Evaluate( const VecTypeA& a, const VecTypeB& b) { return arma::accu(arma::pow(arma::abs(a - b), 3.0)); } // L-infinity (Chebyshev distance) specialization template<> template typename VecTypeA::elem_type LMetric::Evaluate( const VecTypeA& a, const VecTypeB& b) { return arma::as_scalar(arma::max(arma::abs(a - b))); } } // namespace metric } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/core/metrics/mahalanobis_distance.hpp000066400000000000000000000072501315013601400242620ustar00rootroot00000000000000/*** * @file mahalanobis_dstance.h * @author Ryan Curtin * * The Mahalanobis distance. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_METRICS_MAHALANOBIS_DISTANCE_HPP #define MLPACK_CORE_METRICS_MAHALANOBIS_DISTANCE_HPP #include namespace mlpack { namespace metric { /** * The Mahalanobis distance, which is essentially a stretched Euclidean * distance. Given a square covariance matrix @f$ Q @f$ of size @f$ d @f$ x * @f$ d @f$, where @f$ d @f$ is the dimensionality of the points it will be * evaluating, and given two vectors @f$ x @f$ and @f$ y @f$ also of * dimensionality @f$ d @f$, * * @f[ * d(x, y) = \sqrt{(x - y)^T Q (x - y)} * @f] * * where Q is the covariance matrix. * * Because each evaluation multiplies (x_1 - x_2) by the covariance matrix, it * may be much quicker to use an LMetric and simply stretch the actual dataset * itself before performing any evaluations. However, this class is provided * for convenience. * * Similar to the LMetric class, this offers a template parameter TakeRoot * which, when set to false, will instead evaluate the distance * * @f[ * d(x, y) = (x - y)^T Q (x - y) * @f] * * which is faster to evaluate. * * @tparam TakeRoot If true, takes the root of the output. It is slightly * faster to leave this at the default of false, but this means the metric may * not satisfy the triangle inequality and may not be usable for methods that * expect a true metric. */ template class MahalanobisDistance { public: /** * Initialize the Mahalanobis distance with the empty matrix as covariance. * Don't call Evaluate() until you set the covariance with Covariance()! */ MahalanobisDistance() { } /** * Initialize the Mahalanobis distance with the identity matrix of the given * dimensionality. * * @param dimensionality Dimesnsionality of the covariance matrix. */ MahalanobisDistance(const size_t dimensionality) : covariance(arma::eye(dimensionality, dimensionality)) { } /** * Initialize the Mahalanobis distance with the given covariance matrix. The * given covariance matrix will be copied (this is not optimal). * * @param covariance The covariance matrix to use for this distance. */ MahalanobisDistance(const arma::mat& covariance) : covariance(covariance) { } /** * Evaluate the distance between the two given points using this Mahalanobis * distance. If the covariance matrix has not been set (i.e. if you used the * empty constructor and did not later modify the covariance matrix), calling * this method will probably result in a crash. * * @param a First vector. * @param b Second vector. */ template double Evaluate(const VecTypeA& a, const VecTypeB& b); /** * Access the covariance matrix. * * @return Constant reference to the covariance matrix. */ const arma::mat& Covariance() const { return covariance; } /** * Modify the covariance matrix. * * @return Reference to the covariance matrix. */ arma::mat& Covariance() { return covariance; } //! Serialize the Mahalanobis distance. template void Serialize(Archive& ar, const unsigned int version); private: //! The covariance matrix associated with this distance. arma::mat covariance; }; } // namespace distance } // namespace mlpack #include "mahalanobis_distance_impl.hpp" #endif mlpack-2.2.5/src/mlpack/core/metrics/mahalanobis_distance_impl.hpp000066400000000000000000000033731315013601400253050ustar00rootroot00000000000000/*** * @file mahalanobis_distance.cc * @author Ryan Curtin * * Implementation of the Mahalanobis distance. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_METRICS_MAHALANOBIS_DISTANCE_IMPL_HPP #define MLPACK_CORE_METRICS_MAHALANOBIS_DISTANCE_IMPL_HPP #include "mahalanobis_distance.hpp" namespace mlpack { namespace metric { /** * Specialization for non-rooted case. */ template<> template double MahalanobisDistance::Evaluate(const VecTypeA& a, const VecTypeB& b) { arma::vec m = (a - b); arma::mat out = trans(m) * covariance * m; // 1x1 return out[0]; } /** * Specialization for rooted case. This requires one extra evaluation of * sqrt(). */ template<> template double MahalanobisDistance::Evaluate(const VecTypeA& a, const VecTypeB& b) { // Check if covariance matrix has been initialized. if (covariance.n_rows == 0) covariance = arma::eye(a.n_elem, a.n_elem); arma::vec m = (a - b); arma::mat out = trans(m) * covariance * m; // 1x1; return sqrt(out[0]); } // Serialize the Mahalanobis distance. template template void MahalanobisDistance::Serialize(Archive& ar, const unsigned int /* version */) { ar & data::CreateNVP(covariance, "covariance"); } } // namespace metric } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/core/optimizers/000077500000000000000000000000001315013601400201545ustar00rootroot00000000000000mlpack-2.2.5/src/mlpack/core/optimizers/CMakeLists.txt000066400000000000000000000003401315013601400227110ustar00rootroot00000000000000set(DIRS adadelta adam aug_lagrangian gradient_descent lbfgs minibatch_sgd rmsprop sa sdp sgd ) foreach(dir ${DIRS}) add_subdirectory(${dir}) endforeach() set(MLPACK_SRCS ${MLPACK_SRCS} PARENT_SCOPE) mlpack-2.2.5/src/mlpack/core/optimizers/adadelta/000077500000000000000000000000001315013601400217135ustar00rootroot00000000000000mlpack-2.2.5/src/mlpack/core/optimizers/adadelta/CMakeLists.txt000066400000000000000000000003421315013601400244520ustar00rootroot00000000000000set(SOURCES ada_delta.hpp ada_delta_impl.hpp ) set(DIR_SRCS) foreach(file ${SOURCES}) set(DIR_SRCS ${DIR_SRCS} ${CMAKE_CURRENT_SOURCE_DIR}/${file}) endforeach() set(MLPACK_SRCS ${MLPACK_SRCS} ${DIR_SRCS} PARENT_SCOPE) mlpack-2.2.5/src/mlpack/core/optimizers/adadelta/ada_delta.hpp000066400000000000000000000127421315013601400243300ustar00rootroot00000000000000/** * @file ada_delta.hpp * @author Ryan Curtin * @author Vasanth Kalingeri * * Implementation of the Adadelta optimizer. Adadelta is an optimizer that * dynamically adapts over time using only first order information. * Additionally, Adadelta requires no manual tuning of a learning rate. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef __MLPACK_CORE_OPTIMIZERS_ADADELTA_ADA_DELTA_HPP #define __MLPACK_CORE_OPTIMIZERS_ADADELTA_ADA_DELTA_HPP #include namespace mlpack { namespace optimization { /** * Adadelta is an optimizer that uses two ideas to improve upon the two main * drawbacks of the Adagrad method: * * - Accumulate Over Window * - Correct Units with Hessian Approximation * * For more information, see the following. * * @code * @article{Zeiler2012, * author = {Matthew D. Zeiler}, * title = {{ADADELTA:} An Adaptive Learning Rate Method}, * journal = {CoRR}, * year = {2012} * } * @endcode * * For AdaDelta to work, a DecomposableFunctionType template parameter is * required. This class must implement the following function: * * size_t NumFunctions(); * double Evaluate(const arma::mat& coordinates, const size_t i); * void Gradient(const arma::mat& coordinates, * const size_t i, * arma::mat& gradient); * * NumFunctions() should return the number of functions (\f$n\f$), and in the * other two functions, the parameter i refers to which individual function (or * gradient) is being evaluated. So, for the case of a data-dependent function, * such as NCA (see mlpack::nca::NCA), NumFunctions() should return the number * of points in the dataset, and Evaluate(coordinates, 0) will evaluate the * objective function on the first point in the dataset (presumably, the dataset * is held internally in the DecomposableFunctionType). * * @tparam DecomposableFunctionType Decomposable objective function type to be * minimized. */ template class AdaDelta { public: /** * Construct the AdaDelta optimizer with the given function and parameters. * The defaults here are not necessarily good for the given problem, so it is * suggested that the values used be tailored to the task at hand. The * maximum number of iterations refers to the maximum number of points that * are processed (i.e., one iteration equals one point; one iteration does not * equal one pass over the dataset). * * @param function Function to be optimized (minimized). * @param rho Smoothing constant * @param eps Value used to initialise the mean squared gradient parameter. * @param maxIterations Maximum number of iterations allowed (0 means no * limit). * @param tolerance Maximum absolute tolerance to terminate algorithm. * @param shuffle If true, the function order is shuffled; otherwise, each * function is visited in linear order. */ AdaDelta(DecomposableFunctionType& function, const double rho = 0.95, const double eps = 1e-6, const size_t maxIterations = 100000, const double tolerance = 1e-5, const bool shuffle = true); /** * Optimize the given function using AdaDelta. The given starting point will * be modified to store the finishing point of the algorithm, and the final * objective value is returned. * * @param iterate Starting point (will be modified). * @return Objective value of the final point. */ double Optimize(arma::mat& iterate); //! Get the instantiated function to be optimized. const DecomposableFunctionType& Function() const { return function; } //! Modify the instantiated function. DecomposableFunctionType& Function() { return function; } //! Get the smoothing parameter. double Rho() const { return rho; } //! Modify the smoothing parameter. double& Rho() { return rho; } //! Get the value used to initialise the mean squared gradient parameter. double Epsilon() const { return eps; } //! Modify the value used to initialise the mean squared gradient parameter. double& Epsilon() { return eps; } //! Get the maximum number of iterations (0 indicates no limit). size_t MaxIterations() const { return maxIterations; } //! Modify the maximum number of iterations (0 indicates no limit). size_t& MaxIterations() { return maxIterations; } //! Get the tolerance for termination. double Tolerance() const { return tolerance; } //! Modify the tolerance for termination. double& Tolerance() { return tolerance; } //! Get whether or not the individual functions are shuffled. bool Shuffle() const { return shuffle; } //! Modify whether or not the individual functions are shuffled. bool& Shuffle() { return shuffle; } private: //! The instantiated function. DecomposableFunctionType& function; //! The smoothing parameter. double rho; //! The value used to initialise the mean squared gradient parameter. double eps; //! The maximum number of allowed iterations. size_t maxIterations; //! The tolerance for termination. double tolerance; //! Controls whether or not the individual functions are shuffled when //! iterating. bool shuffle; }; } // namespace optimization } // namespace mlpack // Include implementation. #include "ada_delta_impl.hpp" #endif mlpack-2.2.5/src/mlpack/core/optimizers/adadelta/ada_delta_impl.hpp000066400000000000000000000111501315013601400253410ustar00rootroot00000000000000/** * @file ada_delta_impl.hpp * @author Ryan Curtin * @author Vasanth Kalingeri * * Implementation of the Adadelta optimizer. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef __MLPACK_CORE_OPTIMIZERS_ADADELTA_ADA_DELTA_IMPL_HPP #define __MLPACK_CORE_OPTIMIZERS_ADADELTA_ADA_DELTA_IMPL_HPP #include "ada_delta.hpp" namespace mlpack { namespace optimization { template AdaDelta::AdaDelta(DecomposableFunctionType& function, const double rho, const double eps, const size_t maxIterations, const double tolerance, const bool shuffle) : function(function), rho(rho), eps(eps), maxIterations(maxIterations), tolerance(tolerance), shuffle(shuffle) { /* Nothing to do. */ } //! Optimize the function (minimize). template double AdaDelta::Optimize(arma::mat& iterate) { // Find the number of functions to use. const size_t numFunctions = function.NumFunctions(); // This is used only if shuffle is true. arma::Col visitationOrder; if (shuffle) visitationOrder = arma::shuffle(arma::linspace>(0, (numFunctions - 1), numFunctions)); // To keep track of where we are and how things are going. size_t currentFunction = 0; double overallObjective = 0; double lastObjective = DBL_MAX; // Calculate the first objective function. for (size_t i = 0; i < numFunctions; ++i) overallObjective += function.Evaluate(iterate, i); // Now iterate! arma::mat gradient(iterate.n_rows, iterate.n_cols); // Leaky sum of squares of parameter gradient. arma::mat meanSquaredGradient = arma::zeros(iterate.n_rows, iterate.n_cols); // Leaky sum of squares of parameter gradient. arma::mat meanSquaredGradientDx = arma::zeros(iterate.n_rows, iterate.n_cols); for (size_t i = 1; i != maxIterations; ++i, ++currentFunction) { // Is this iteration the start of a sequence? if ((currentFunction % numFunctions) == 0) { // Output current objective function. Log::Info << "AdaDelta: iteration " << i << ", objective " << overallObjective << "." << std::endl; if (std::isnan(overallObjective) || std::isinf(overallObjective)) { Log::Warn << "AdaDelta: converged to " << overallObjective << "; terminating with failure. Try a smaller step size?" << std::endl; return overallObjective; } if (std::abs(lastObjective - overallObjective) < tolerance) { Log::Info << "AdaDelta: minimized within tolerance " << tolerance << "; terminating optimization." << std::endl; return overallObjective; } // Reset the counter variables. lastObjective = overallObjective; overallObjective = 0; currentFunction = 0; if (shuffle) // Determine order of visitation. visitationOrder = arma::shuffle(visitationOrder); } // Evaluate the gradient for this iteration. if (shuffle) function.Gradient(iterate, visitationOrder[currentFunction], gradient); else function.Gradient(iterate, currentFunction, gradient); // Accumulate gradient. meanSquaredGradient *= rho; meanSquaredGradient += (1 - rho) * (gradient % gradient); arma::mat dx = arma::sqrt((meanSquaredGradientDx + eps) / (meanSquaredGradient + eps)) % gradient; // Accumulate updates. meanSquaredGradientDx *= rho; meanSquaredGradientDx += (1 - rho) * (dx % dx); // Apply update. iterate -= dx; // Now add that to the overall objective function. if (shuffle) overallObjective += function.Evaluate(iterate, visitationOrder[currentFunction]); else overallObjective += function.Evaluate(iterate, currentFunction); } Log::Info << "AdaDelta: maximum iterations (" << maxIterations << ") reached; terminating optimization." << std::endl; // Calculate final objective. overallObjective = 0; for (size_t i = 0; i < numFunctions; ++i) overallObjective += function.Evaluate(iterate, i); return overallObjective; } } // namespace optimization } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/core/optimizers/adam/000077500000000000000000000000001315013601400210565ustar00rootroot00000000000000mlpack-2.2.5/src/mlpack/core/optimizers/adam/CMakeLists.txt000066400000000000000000000003301315013601400236120ustar00rootroot00000000000000set(SOURCES adam.hpp adam_impl.hpp ) set(DIR_SRCS) foreach(file ${SOURCES}) set(DIR_SRCS ${DIR_SRCS} ${CMAKE_CURRENT_SOURCE_DIR}/${file}) endforeach() set(MLPACK_SRCS ${MLPACK_SRCS} ${DIR_SRCS} PARENT_SCOPE) mlpack-2.2.5/src/mlpack/core/optimizers/adam/adam.hpp000066400000000000000000000142071315013601400224750ustar00rootroot00000000000000/** * @file adam.hpp * @author Ryan Curtin * @author Vasanth Kalingeri * @author Marcus Edel * * Adam optimizer. Adam is an an algorithm for first-order gradient-based * optimization of stochastic objective functions, based on adaptive estimates * of lower-order moments. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef __MLPACK_CORE_OPTIMIZERS_ADAM_ADAM_HPP #define __MLPACK_CORE_OPTIMIZERS_ADAM_ADAM_HPP #include namespace mlpack { namespace optimization { /** * Adam is an optimizer that computes individual adaptive learning rates for * different parameters from estimates of first and second moments of the * gradients. * * For more information, see the following. * * @code * @article{Kingma2014, * author = {Diederik P. Kingma and Jimmy Ba}, * title = {Adam: {A} Method for Stochastic Optimization}, * journal = {CoRR}, * year = {2014} * } * @endcode * * * For Adam to work, a DecomposableFunctionType template parameter is required. * This class must implement the following function: * * size_t NumFunctions(); * double Evaluate(const arma::mat& coordinates, const size_t i); * void Gradient(const arma::mat& coordinates, * const size_t i, * arma::mat& gradient); * * NumFunctions() should return the number of functions (\f$n\f$), and in the * other two functions, the parameter i refers to which individual function (or * gradient) is being evaluated. So, for the case of a data-dependent function, * such as NCA (see mlpack::nca::NCA), NumFunctions() should return the number * of points in the dataset, and Evaluate(coordinates, 0) will evaluate the * objective function on the first point in the dataset (presumably, the dataset * is held internally in the DecomposableFunctionType). * * @tparam DecomposableFunctionType Decomposable objective function type to be * minimized. */ template class Adam { public: /** * Construct the Adam optimizer with the given function and parameters. The * defaults here are not necessarily good for the given problem, so it is * suggested that the values used be tailored to the task at hand. The * maximum number of iterations refers to the maximum number of points that * are processed (i.e., one iteration equals one point; one iteration does not * equal one pass over the dataset). * * @param function Function to be optimized (minimized). * @param stepSize Step size for each iteration. * @param beta1 Exponential decay rate for the first moment estimates. * @param beta2 Exponential decay rate for the weighted infinity norm estimates. * @param eps Value used to initialise the mean squared gradient parameter. * @param maxIterations Maximum number of iterations allowed (0 means no * limit). * @param tolerance Maximum absolute tolerance to terminate algorithm. * @param shuffle If true, the function order is shuffled; otherwise, each * function is visited in linear order. */ Adam(DecomposableFunctionType& function, const double stepSize = 0.001, const double beta1 = 0.9, const double beta2 = 0.999, const double eps = 1e-8, const size_t maxIterations = 100000, const double tolerance = 1e-5, const bool shuffle = true); /** * Optimize the given function using Adam. The given starting point will be * modified to store the finishing point of the algorithm, and the final * objective value is returned. * * @param iterate Starting point (will be modified). * @return Objective value of the final point. */ double Optimize(arma::mat& iterate); //! Get the instantiated function to be optimized. const DecomposableFunctionType& Function() const { return function; } //! Modify the instantiated function. DecomposableFunctionType& Function() { return function; } //! Get the step size. double StepSize() const { return stepSize; } //! Modify the step size. double& StepSize() { return stepSize; } //! Get the smoothing parameter. double Beta1() const { return beta1; } //! Modify the smoothing parameter. double& Beta1() { return beta1; } //! Get the second moment coefficient. double Beta2() const { return beta2; } //! Modify the second moment coefficient. double& Beta2() { return beta2; } //! Get the value used to initialise the mean squared gradient parameter. double Epsilon() const { return eps; } //! Modify the value used to initialise the mean squared gradient parameter. double& Epsilon() { return eps; } //! Get the maximum number of iterations (0 indicates no limit). size_t MaxIterations() const { return maxIterations; } //! Modify the maximum number of iterations (0 indicates no limit). size_t& MaxIterations() { return maxIterations; } //! Get the tolerance for termination. double Tolerance() const { return tolerance; } //! Modify the tolerance for termination. double& Tolerance() { return tolerance; } //! Get whether or not the individual functions are shuffled. bool Shuffle() const { return shuffle; } //! Modify whether or not the individual functions are shuffled. bool& Shuffle() { return shuffle; } private: //! The instantiated function. DecomposableFunctionType& function; //! The step size for each example. double stepSize; //! Exponential decay rate for the first moment estimates. double beta1; //! Exponential decay rate for the weighted infinity norm estimates. double beta2; //! The value used to initialise the mean squared gradient parameter. double eps; //! The maximum number of allowed iterations. size_t maxIterations; //! The tolerance for termination. double tolerance; //! Controls whether or not the individual functions are shuffled when //! iterating. bool shuffle; }; } // namespace optimization } // namespace mlpack // Include implementation. #include "adam_impl.hpp" #endif mlpack-2.2.5/src/mlpack/core/optimizers/adam/adam_impl.hpp000066400000000000000000000114111315013601400235100ustar00rootroot00000000000000/** * @file adam_impl.hpp * @author Ryan Curtin * @author Vasanth Kalingeri * @author Marcus Edel * * Implementation of the Adam optimizer. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef __MLPACK_CORE_OPTIMIZERS_ADAM_ADAM_IMPL_HPP #define __MLPACK_CORE_OPTIMIZERS_ADAM_ADAM_IMPL_HPP // In case it hasn't been included yet. #include "adam.hpp" namespace mlpack { namespace optimization { template Adam::Adam(DecomposableFunctionType& function, const double stepSize, const double beta1, const double beta2, const double eps, const size_t maxIterations, const double tolerance, const bool shuffle) : function(function), stepSize(stepSize), beta1(beta1), beta2(beta2), eps(eps), maxIterations(maxIterations), tolerance(tolerance), shuffle(shuffle) { /* Nothing to do. */ } //! Optimize the function (minimize). template double Adam::Optimize(arma::mat& iterate) { // Find the number of functions to use. const size_t numFunctions = function.NumFunctions(); // This is used only if shuffle is true. arma::Col visitationOrder; if (shuffle) visitationOrder = arma::shuffle(arma::linspace>(0, (numFunctions - 1), numFunctions)); // To keep track of where we are and how things are going. size_t currentFunction = 0; double overallObjective = 0; double lastObjective = DBL_MAX; // Calculate the first objective function. for (size_t i = 0; i < numFunctions; ++i) overallObjective += function.Evaluate(iterate, i); // Now iterate! arma::mat gradient(iterate.n_rows, iterate.n_cols); // Exponential moving average of gradient values. arma::mat mean = arma::zeros(iterate.n_rows, iterate.n_cols); // Exponential moving average of squared gradient values. arma::mat variance = arma::zeros(iterate.n_rows, iterate.n_cols); for (size_t i = 1; i != maxIterations; ++i, ++currentFunction) { // Is this iteration the start of a sequence? if ((currentFunction % numFunctions) == 0) { // Output current objective function. Log::Info << "Adam: iteration " << i << ", objective " << overallObjective << "." << std::endl; if (std::isnan(overallObjective) || std::isinf(overallObjective)) { Log::Warn << "Adam: converged to " << overallObjective << "; terminating with failure. Try a smaller step size?" << std::endl; return overallObjective; } if (std::abs(lastObjective - overallObjective) < tolerance) { Log::Info << "Adam: minimized within tolerance " << tolerance << "; " << "terminating optimization." << std::endl; return overallObjective; } // Reset the counter variables. lastObjective = overallObjective; overallObjective = 0; currentFunction = 0; if (shuffle) // Determine order of visitation. visitationOrder = arma::shuffle(visitationOrder); } // Evaluate the gradient for this iteration. if (shuffle) function.Gradient(iterate, visitationOrder[currentFunction], gradient); else function.Gradient(iterate, currentFunction, gradient); // And update the iterate. mean *= beta1; mean += (1 - beta1) * gradient; variance *= beta2; variance += (1 - beta2) * (gradient % gradient); const double biasCorrection1 = 1.0 - std::pow(beta1, (double) i); const double biasCorrection2 = 1.0 - std::pow(beta2, (double) i); iterate -= (stepSize * std::sqrt(biasCorrection2) / biasCorrection1) * mean / (arma::sqrt(variance) + eps); // Now add that to the overall objective function. if (shuffle) overallObjective += function.Evaluate(iterate, visitationOrder[currentFunction]); else overallObjective += function.Evaluate(iterate, currentFunction); } Log::Info << "Adam: maximum iterations (" << maxIterations << ") reached; " << "terminating optimization." << std::endl; // Calculate final objective. overallObjective = 0; for (size_t i = 0; i < numFunctions; ++i) overallObjective += function.Evaluate(iterate, i); return overallObjective; } } // namespace optimization } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/core/optimizers/aug_lagrangian/000077500000000000000000000000001315013601400231135ustar00rootroot00000000000000mlpack-2.2.5/src/mlpack/core/optimizers/aug_lagrangian/CMakeLists.txt000066400000000000000000000005651315013601400256610ustar00rootroot00000000000000set(SOURCES aug_lagrangian.hpp aug_lagrangian_impl.hpp aug_lagrangian_function.hpp aug_lagrangian_function_impl.hpp aug_lagrangian_test_functions.hpp aug_lagrangian_test_functions.cpp ) set(DIR_SRCS) foreach(file ${SOURCES}) set(DIR_SRCS ${DIR_SRCS} ${CMAKE_CURRENT_SOURCE_DIR}/${file}) endforeach() set(MLPACK_SRCS ${MLPACK_SRCS} ${DIR_SRCS} PARENT_SCOPE) mlpack-2.2.5/src/mlpack/core/optimizers/aug_lagrangian/aug_lagrangian.hpp000066400000000000000000000130121315013601400265600ustar00rootroot00000000000000/** * @file aug_lagrangian.hpp * @author Ryan Curtin * * Definition of AugLagrangian class, which implements the Augmented Lagrangian * optimization method (also called the 'method of multipliers'. This class * uses the L-BFGS optimizer. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_OPTIMIZERS_AUG_LAGRANGIAN_AUG_LAGRANGIAN_HPP #define MLPACK_CORE_OPTIMIZERS_AUG_LAGRANGIAN_AUG_LAGRANGIAN_HPP #include #include #include "aug_lagrangian_function.hpp" namespace mlpack { namespace optimization { /** * The AugLagrangian class implements the Augmented Lagrangian method of * optimization. In this scheme, a penalty term is added to the Lagrangian. * This method is also called the "method of multipliers". * * The template class LagrangianFunction must implement the following five * methods: * * - double Evaluate(const arma::mat& coordinates); * - void Gradient(const arma::mat& coordinates, arma::mat& gradient); * - size_t NumConstraints(); * - double EvaluateConstraint(size_t index, const arma::mat& coordinates); * - double GradientConstraint(size_t index, const arma::mat& coordinates, * arma::mat& gradient); * * The number of constraints must be greater than or equal to 0, and * EvaluateConstraint() should evaluate the constraint at the given index for * the given coordinates. Evaluate() should provide the objective function * value for the given coordinates. * * @tparam LagrangianFunction Function which can be optimized by this class. */ template class AugLagrangian { public: //! Shorthand for the type of the L-BFGS optimizer we'll be using. typedef L_BFGS > L_BFGSType; /** * Initialize the Augmented Lagrangian with the default L-BFGS optimizer. We * limit the number of L-BFGS iterations to 1000, rather than the unlimited * default L-BFGS. * * @param function The function to be optimized. */ AugLagrangian(LagrangianFunction& function); /** * Initialize the Augmented Lagrangian with a custom L-BFGS optimizer. * * @param function The function to be optimized. This must be a pre-created * utility AugLagrangianFunction. * @param lbfgs The custom L-BFGS optimizer to be used. This should have * already been initialized with the given AugLagrangianFunction. */ AugLagrangian(AugLagrangianFunction& augfunc, L_BFGSType& lbfgs); /** * Optimize the function. The value '1' is used for the initial value of each * Lagrange multiplier. To set the Lagrange multipliers yourself, use the * other overload of Optimize(). * * @param coordinates Output matrix to store the optimized coordinates in. * @param maxIterations Maximum number of iterations of the Augmented * Lagrangian algorithm. 0 indicates no maximum. * @param sigma Initial penalty parameter. */ bool Optimize(arma::mat& coordinates, const size_t maxIterations = 1000); /** * Optimize the function, giving initial estimates for the Lagrange * multipliers. The vector of Lagrange multipliers will be modified to * contain the Lagrange multipliers of the final solution (if one is found). * * @param coordinates Output matrix to store the optimized coordinates in. * @param initLambda Vector of initial Lagrange multipliers. Should have * length equal to the number of constraints. * @param initSigma Initial penalty parameter. * @param maxIterations Maximum number of iterations of the Augmented * Lagrangian algorithm. 0 indicates no maximum. */ bool Optimize(arma::mat& coordinates, const arma::vec& initLambda, const double initSigma, const size_t maxIterations = 1000); //! Get the LagrangianFunction. const LagrangianFunction& Function() const { return function; } //! Modify the LagrangianFunction. LagrangianFunction& Function() { return function; } //! Get the L-BFGS object used for the actual optimization. const L_BFGSType& LBFGS() const { return lbfgs; } //! Modify the L-BFGS object used for the actual optimization. L_BFGSType& LBFGS() { return lbfgs; } //! Get the Lagrange multipliers. const arma::vec& Lambda() const { return augfunc.Lambda(); } //! Modify the Lagrange multipliers (i.e. set them before optimization). arma::vec& Lambda() { return augfunc.Lambda(); } //! Get the penalty parameter. double Sigma() const { return augfunc.Sigma(); } //! Modify the penalty parameter. double& Sigma() { return augfunc.Sigma(); } private: //! Function to be optimized. LagrangianFunction& function; //! Internally used AugLagrangianFunction which holds the function we are //! optimizing. This isn't publically accessible, but we provide ways to get //! to the Lagrange multipliers and the penalty parameter sigma. AugLagrangianFunction augfunc; //! If the user did not pass an L_BFGS object, we'll use our own internal one. L_BFGSType lbfgsInternal; //! The L-BFGS optimizer that we will use. L_BFGSType& lbfgs; }; } // namespace optimization } // namespace mlpack #include "aug_lagrangian_impl.hpp" #endif // MLPACK_CORE_OPTIMIZERS_AUG_LAGRANGIAN_AUG_LAGRANGIAN_HPP mlpack-2.2.5/src/mlpack/core/optimizers/aug_lagrangian/aug_lagrangian_function.hpp000066400000000000000000000077751315013601400305100ustar00rootroot00000000000000/** * @file aug_lagrangian_function.hpp * @author Ryan Curtin * * Contains a utility class for AugLagrangian. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_OPTIMIZERS_AUG_LAGRANGIAN_AUG_LAGRANGIAN_FUNCTION_HPP #define MLPACK_CORE_OPTIMIZERS_AUG_LAGRANGIAN_AUG_LAGRANGIAN_FUNCTION_HPP #include namespace mlpack { namespace optimization { /** * This is a utility class used by AugLagrangian, meant to wrap a * LagrangianFunction into a function usable by a simple optimizer like L-BFGS. * Given a LagrangianFunction which follows the format outlined in the * documentation for AugLagrangian, this class provides Evaluate(), Gradient(), * and GetInitialPoint() functions which allow this class to be used with a * simple optimizer like L-BFGS. * * This class can be specialized for your particular implementation -- commonly, * a faster method for computing the overall objective and gradient of the * augmented Lagrangian function can be implemented than the naive, default * implementation given. Use class template specialization and re-implement all * of the methods (unfortunately, C++ specialization rules mean you have to * re-implement everything). * * @tparam LagrangianFunction Lagrangian function to be used. */ template class AugLagrangianFunction { public: /** * Initialize the AugLagrangianFunction, but don't set the Lagrange * multipliers or penalty parameters yet. Make sure you set the Lagrange * multipliers before you use this... * * @param function Lagrangian function. */ AugLagrangianFunction(LagrangianFunction& function); /** * Initialize the AugLagrangianFunction with the given LagrangianFunction, * Lagrange multipliers, and initial penalty parameter. * * @param function Lagrangian function. * @param lambda Initial Lagrange multipliers. * @param sigma Initial penalty parameter. */ AugLagrangianFunction(LagrangianFunction& function, const arma::vec& lambda, const double sigma); /** * Evaluate the objective function of the Augmented Lagrangian function, which * is the standard Lagrangian function evaluation plus a penalty term, which * penalizes unsatisfied constraints. * * @param coordinates Coordinates to evaluate function at. * @return Objective function. */ double Evaluate(const arma::mat& coordinates) const; /** * Evaluate the gradient of the Augmented Lagrangian function. * * @param coordinates Coordinates to evaluate gradient at. * @param gradient Matrix to store gradient into. */ void Gradient(const arma::mat& coordinates, arma::mat& gradient) const; /** * Get the initial point of the optimization (supplied by the * LagrangianFunction). * * @return Initial point. */ const arma::mat& GetInitialPoint() const; //! Get the Lagrange multipliers. const arma::vec& Lambda() const { return lambda; } //! Modify the Lagrange multipliers. arma::vec& Lambda() { return lambda; } //! Get sigma (the penalty parameter). double Sigma() const { return sigma; } //! Modify sigma (the penalty parameter). double& Sigma() { return sigma; } //! Get the Lagrangian function. const LagrangianFunction& Function() const { return function; } //! Modify the Lagrangian function. LagrangianFunction& Function() { return function; } private: //! Instantiation of the function to be optimized. LagrangianFunction& function; //! The Lagrange multipliers. arma::vec lambda; //! The penalty parameter. double sigma; }; } // namespace optimization } // namespace mlpack // Include basic implementation. #include "aug_lagrangian_function_impl.hpp" #endif // MLPACK_CORE_OPTIMIZERS_AUG_LAGRANGIAN_AUG_LAGRANGIAN_FUNCTION_HPP mlpack-2.2.5/src/mlpack/core/optimizers/aug_lagrangian/aug_lagrangian_function_impl.hpp000066400000000000000000000063541315013601400315210ustar00rootroot00000000000000/** * @file aug_lagrangian_function_impl.hpp * @author Ryan Curtin * * Simple, naive implementation of AugLagrangianFunction. Better * specializations can probably be given in many cases, but this is the most * general case. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_OPTIMIZERS_AUG_LAGRANGIAN_AUG_LAGRANGIAN_FUNCTION_IMPL_HPP #define MLPACK_CORE_OPTIMIZERS_AUG_LAGRANGIAN_AUG_LAGRANGIAN_FUNCTION_IMPL_HPP // In case it hasn't been included. #include "aug_lagrangian_function.hpp" namespace mlpack { namespace optimization { // Initialize the AugLagrangianFunction. template AugLagrangianFunction::AugLagrangianFunction( LagrangianFunction& function) : function(function), lambda(function.NumConstraints()), sigma(10) { // Initialize lambda vector to all zeroes. lambda.zeros(); } // Initialize the AugLagrangianFunction. template AugLagrangianFunction::AugLagrangianFunction( LagrangianFunction& function, const arma::vec& lambda, const double sigma) : function(function), lambda(lambda), sigma(sigma) { // Nothing else to do. } // Evaluate the AugLagrangianFunction at the given coordinates. template double AugLagrangianFunction::Evaluate( const arma::mat& coordinates) const { // The augmented Lagrangian is evaluated as // f(x) + {-lambda_i * c_i(x) + (sigma / 2) c_i(x)^2} for all constraints // First get the function's objective value. double objective = function.Evaluate(coordinates); // Now loop for each constraint. for (size_t i = 0; i < function.NumConstraints(); ++i) { double constraint = function.EvaluateConstraint(i, coordinates); objective += (-lambda[i] * constraint) + sigma * std::pow(constraint, 2) / 2; } return objective; } // Evaluate the gradient of the AugLagrangianFunction at the given coordinates. template void AugLagrangianFunction::Gradient( const arma::mat& coordinates, arma::mat& gradient) const { // The augmented Lagrangian's gradient is evaluted as // f'(x) + {(-lambda_i + sigma * c_i(x)) * c'_i(x)} for all constraints gradient.zeros(); function.Gradient(coordinates, gradient); arma::mat constraintGradient; // Temporary for constraint gradients. for (size_t i = 0; i < function.NumConstraints(); i++) { function.GradientConstraint(i, coordinates, constraintGradient); // Now calculate scaling factor and add to existing gradient. arma::mat tmpGradient; tmpGradient = (-lambda[i] + sigma * function.EvaluateConstraint(i, coordinates)) * constraintGradient; gradient += tmpGradient; } } // Get the initial point. template const arma::mat& AugLagrangianFunction::GetInitialPoint() const { return function.GetInitialPoint(); } } // namespace optimization } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/core/optimizers/aug_lagrangian/aug_lagrangian_impl.hpp000066400000000000000000000123671315013601400276150ustar00rootroot00000000000000/** * @file aug_lagrangian_impl.hpp * @author Ryan Curtin * * Implementation of AugLagrangian class (Augmented Lagrangian optimization * method). * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_OPTIMIZERS_AUG_LAGRANGIAN_AUG_LAGRANGIAN_IMPL_HPP #define MLPACK_CORE_OPTIMIZERS_AUG_LAGRANGIAN_AUG_LAGRANGIAN_IMPL_HPP #include #include "aug_lagrangian_function.hpp" namespace mlpack { namespace optimization { template AugLagrangian::AugLagrangian(LagrangianFunction& function) : function(function), augfunc(function), lbfgsInternal(augfunc), lbfgs(lbfgsInternal) { lbfgs.MaxIterations() = 1000; } template AugLagrangian::AugLagrangian( AugLagrangianFunction& augfunc, L_BFGSType& lbfgs) : function(augfunc.Function()), augfunc(augfunc), lbfgs(lbfgs) { // Nothing to do. lbfgsInternal isn't used in this case. } // This overload just sets the lambda and sigma and calls the other overload. template bool AugLagrangian::Optimize(arma::mat& coordinates, const arma::vec& initLambda, const double initSigma, const size_t maxIterations) { augfunc.Lambda() = initLambda; augfunc.Sigma() = initSigma; return Optimize(coordinates, maxIterations); } template bool AugLagrangian::Optimize(arma::mat& coordinates, const size_t maxIterations) { // Ensure that we update lambda immediately. double penaltyThreshold = DBL_MAX; // Track the last objective to compare for convergence. double lastObjective = function.Evaluate(coordinates); // Then, calculate the current penalty. double penalty = 0; for (size_t i = 0; i < function.NumConstraints(); i++) penalty += std::pow(function.EvaluateConstraint(i, coordinates), 2); Log::Debug << "Penalty is " << penalty << " (threshold " << penaltyThreshold << ")." << std::endl; // The odd comparison allows user to pass maxIterations = 0 (i.e. no limit on // number of iterations). size_t it; for (it = 0; it != (maxIterations - 1); it++) { Log::Info << "AugLagrangian on iteration " << it << ", starting with objective " << lastObjective << "." << std::endl; if (!lbfgs.Optimize(coordinates)) Log::Info << "L-BFGS reported an error during optimization." << std::endl; // Check if we are done with the entire optimization (the threshold we are // comparing with is arbitrary). if (std::abs(lastObjective - function.Evaluate(coordinates)) < 1e-10 && augfunc.Sigma() > 500000) return true; lastObjective = function.Evaluate(coordinates); // Assuming that the optimization has converged to a new set of coordinates, // we now update either lambda or sigma. We update sigma if the penalty // term is too high, and we update lambda otherwise. // First, calculate the current penalty. double penalty = 0; for (size_t i = 0; i < function.NumConstraints(); i++) { penalty += std::pow(function.EvaluateConstraint(i, coordinates), 2); // Log::Debug << "Constraint " << i << " is " << // function.EvaluateConstraint(i, coordinates) << std::endl; } Log::Info << "Penalty is " << penalty << " (threshold " << penaltyThreshold << ")." << std::endl; for (size_t i = 0; i < function.NumConstraints(); ++i) { // arma::mat tmpgrad; // function.GradientConstraint(i, coordinates, tmpgrad); // Log::Debug << "Gradient of constraint " << i << " is " << std::endl; // Log::Debug << tmpgrad << std::endl; } if (penalty < penaltyThreshold) // We update lambda. { // We use the update: lambda_{k + 1} = lambda_k - sigma * c(coordinates), // but we have to write a loop to do this for each constraint. for (size_t i = 0; i < function.NumConstraints(); i++) augfunc.Lambda()[i] -= augfunc.Sigma() * function.EvaluateConstraint(i, coordinates); // We also update the penalty threshold to be a factor of the current // penalty. TODO: this factor should be a parameter (from CLI). The // value of 0.25 is taken from Burer and Monteiro (2002). penaltyThreshold = 0.25 * penalty; Log::Info << "Lagrange multiplier estimates updated." << std::endl; } else { // We multiply sigma by a constant value. TODO: this factor should be a // parameter (from CLI). The value of 10 is taken from Burer and Monteiro // (2002). augfunc.Sigma() *= 10; Log::Info << "Updated sigma to " << augfunc.Sigma() << "." << std::endl; } } return false; } } // namespace optimization } // namespace mlpack #endif // MLPACK_CORE_OPTIMIZERS_AUG_LAGRANGIAN_AUG_LAGRANGIAN_IMPL_HPP mlpack-2.2.5/src/mlpack/core/optimizers/aug_lagrangian/aug_lagrangian_test_functions.cpp000066400000000000000000000223131315013601400317060ustar00rootroot00000000000000/** * @file aug_lagrangian_test_functions.cpp * @author Ryan Curtin * * Implementation of AugLagrangianTestFunction class. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include "aug_lagrangian_test_functions.hpp" using namespace mlpack; using namespace mlpack::optimization; // // AugLagrangianTestFunction // AugLagrangianTestFunction::AugLagrangianTestFunction() { // Set the initial point to be (0, 0). initialPoint.zeros(2, 1); } AugLagrangianTestFunction::AugLagrangianTestFunction( const arma::mat& initialPoint) : initialPoint(initialPoint) { // Nothing to do. } double AugLagrangianTestFunction::Evaluate(const arma::mat& coordinates) { // f(x) = 6 x_1^2 + 4 x_1 x_2 + 3 x_2^2 return ((6 * std::pow(coordinates[0], 2)) + (4 * (coordinates[0] * coordinates[1])) + (3 * std::pow(coordinates[1], 2))); } void AugLagrangianTestFunction::Gradient(const arma::mat& coordinates, arma::mat& gradient) { // f'_x1(x) = 12 x_1 + 4 x_2 // f'_x2(x) = 4 x_1 + 6 x_2 gradient.set_size(2, 1); gradient[0] = 12 * coordinates[0] + 4 * coordinates[1]; gradient[1] = 4 * coordinates[0] + 6 * coordinates[1]; } double AugLagrangianTestFunction::EvaluateConstraint(const size_t index, const arma::mat& coordinates) { // We return 0 if the index is wrong (not 0). if (index != 0) return 0; // c(x) = x_1 + x_2 - 5 return (coordinates[0] + coordinates[1] - 5); } void AugLagrangianTestFunction::GradientConstraint(const size_t index, const arma::mat& /* coordinates */, arma::mat& gradient) { // If the user passed an invalid index (not 0), we will return a zero // gradient. gradient.zeros(2, 1); if (index == 0) { // c'_x1(x) = 1 // c'_x2(x) = 1 gradient.ones(2, 1); // Use a shortcut instead of assigning individually. } } // // GockenbachFunction // GockenbachFunction::GockenbachFunction() { // Set the initial point to (0, 0, 1). initialPoint.zeros(3, 1); initialPoint[2] = 1; } GockenbachFunction::GockenbachFunction(const arma::mat& initialPoint) : initialPoint(initialPoint) { // Nothing to do. } double GockenbachFunction::Evaluate(const arma::mat& coordinates) { // f(x) = (x_1 - 1)^2 + 2 (x_2 + 2)^2 + 3(x_3 + 3)^2 return ((std::pow(coordinates[0] - 1, 2)) + (2 * std::pow(coordinates[1] + 2, 2)) + (3 * std::pow(coordinates[2] + 3, 2))); } void GockenbachFunction::Gradient(const arma::mat& coordinates, arma::mat& gradient) { // f'_x1(x) = 2 (x_1 - 1) // f'_x2(x) = 4 (x_2 + 2) // f'_x3(x) = 6 (x_3 + 3) gradient.set_size(3, 1); gradient[0] = 2 * (coordinates[0] - 1); gradient[1] = 4 * (coordinates[1] + 2); gradient[2] = 6 * (coordinates[2] + 3); } double GockenbachFunction::EvaluateConstraint(const size_t index, const arma::mat& coordinates) { double constraint = 0; switch (index) { case 0: // g(x) = (x_3 - x_2 - x_1 - 1) = 0 constraint = (coordinates[2] - coordinates[1] - coordinates[0] - 1); break; case 1: // h(x) = (x_3 - x_1^2) >= 0 // To deal with the inequality, the constraint will simply evaluate to 0 // when h(x) >= 0. constraint = std::min(0.0, (coordinates[2] - std::pow(coordinates[0], 2))); break; } // 0 will be returned for an invalid index (but this is okay). return constraint; } void GockenbachFunction::GradientConstraint(const size_t index, const arma::mat& coordinates, arma::mat& gradient) { gradient.zeros(3, 1); switch (index) { case 0: // g'_x1(x) = -1 // g'_x2(x) = -1 // g'_x3(x) = 1 gradient[0] = -1; gradient[1] = -1; gradient[2] = 1; break; case 1: // h'_x1(x) = -2 x_1 // h'_x2(x) = 0 // h'_x3(x) = 1 gradient[0] = -2 * coordinates[0]; gradient[2] = 1; break; } } // // LovaszThetaSDP // LovaszThetaSDP::LovaszThetaSDP() : edges(0), vertices(0), initialPoint(0, 0) { } LovaszThetaSDP::LovaszThetaSDP(const arma::mat& edges) : edges(edges), initialPoint(0, 0) { // Calculate V by finding the maximum index in the edges matrix. vertices = max(max(edges)) + 1; } double LovaszThetaSDP::Evaluate(const arma::mat& coordinates) { // The objective is equal to -Tr(ones * X) = -Tr(ones * (R^T * R)). // This can be simplified into the negative sum of (R^T * R). arma::mat x = trans(coordinates) * coordinates; double obj = -accu(x); return obj; } void LovaszThetaSDP::Gradient(const arma::mat& coordinates, arma::mat& gradient) { // The gradient is equal to (2 S' R^T)^T, with R being coordinates. // S' = C - sum_{i = 1}^{m} [ y_i - sigma (Tr(A_i * (R^T R)) - b_i)] * A_i // We will calculate it in a not very smart way, but it should work. // Initialize S' piece by piece. It is of size n x n. const size_t n = coordinates.n_cols; arma::mat s(n, n); s.ones(); s *= -1; // C = -ones(). for (size_t i = 0; i < NumConstraints(); ++i) { // Calculate [ y_i - sigma (Tr(A_i * (R^T R)) - b_i) ] * A_i. // Result will be a matrix; inner result is a scalar. if (i == 0) { // A_0 = I_n. Hooray! That's easy! b_0 = 1. double inner = -1 * double(n) - 0.5 * (accu(trans(coordinates) % coordinates) - 1); arma::mat zz = (inner * arma::eye(n, n)); s -= zz; } else { // Get edge so we can construct constraint A_i matrix. b_i = 0. arma::vec edge = edges.col(i - 1); arma::mat a; a.zeros(n, n); // Only two nonzero entries. a(edge[0], edge[1]) = 1; a(edge[1], edge[0]) = 1; double inner = (-1) - 0.5 * (accu(a % (trans(coordinates) * coordinates))); arma::mat zz = (inner * a); s -= zz; } } // The gradient of -Tr(ones * X) is equal to -2 * ones * R. gradient = trans(2 * s * trans(coordinates)); } size_t LovaszThetaSDP::NumConstraints() const { // Each edge is a constraint, and we have the constraint Tr(X) = 1. return edges.n_cols + 1; } double LovaszThetaSDP::EvaluateConstraint(const size_t index, const arma::mat& coordinates) { if (index == 0) // This is the constraint Tr(X) = 1. { double sum = -1; // Tr(X) - 1 = 0, so we prefix the subtraction. for (size_t i = 0; i < coordinates.n_cols; i++) sum += std::abs(dot(coordinates.col(i), coordinates.col(i))); return sum; } size_t i = edges(0, index - 1); size_t j = edges(1, index - 1); // The constraint itself is X_ij, or (R^T R)_ij. return std::abs(dot(coordinates.col(i), coordinates.col(j))); } void LovaszThetaSDP::GradientConstraint(const size_t index, const arma::mat& coordinates, arma::mat& gradient) { if (index == 0) // This is the constraint Tr(X) = 1. { gradient = 2 * coordinates; // d/dR (Tr(R R^T)) = 2 R. return; } size_t i = edges(0, index - 1); size_t j = edges(1, index - 1); // Since the constraint is (R^T R)_ij, the gradient for (x, y) will be (I // derived this for one of the MVU constraints): // 0 , y != i, y != j // 2 R_xj, y = i, y != j // 2 R_xi, y != i, y = j // 4 R_xy, y = i, y = j // This results in the gradient matrix having two nonzero rows; for row // i, the elements are R_nj, where n is the row; for column j, the elements // are R_ni. gradient.zeros(coordinates.n_rows, coordinates.n_cols); gradient.col(i) = coordinates.col(j); gradient.col(j) += coordinates.col(i); // In case j = i (shouldn't happen). } const arma::mat& LovaszThetaSDP::GetInitialPoint() { if (initialPoint.n_rows != 0 && initialPoint.n_cols != 0) return initialPoint; // It has already been calculated. // First, we must calculate the correct value of r. The matrix we return, R, // will be r x V, because X = R^T R is of dimension V x V. // The rule for calculating r (from Monteiro and Burer, eq. 5) is // r = max(r >= 0 : r (r + 1) / 2 <= m } // where m is equal to the number of constraints plus one. // // Solved, this is // 0.5 r^2 + 0.5 r - m = 0 // which becomes // r = (-0.5 [+/-] sqrt((-0.5)^2 - 4 * -0.5 * m)) / -1 // r = 0.5 [+/-] sqrt(0.25 + 2 m) // and because m is always positive, // r = 0.5 + sqrt(0.25 + 2m) float m = NumConstraints(); float r = 0.5 + sqrt(0.25 + 2 * m); if (ceil(r) > vertices) r = vertices; // An upper bound on the dimension. initialPoint.set_size(ceil(r), vertices); // Now we set the entries of the initial matrix according to the formula given // in Section 4 of Monteiro and Burer. for (size_t i = 0; i < r; i++) { for (size_t j = 0; j < (size_t) vertices; j++) { if (i == j) initialPoint(i, j) = sqrt(1.0 / r) + sqrt(1.0 / (vertices * m)); else initialPoint(i, j) = sqrt(1.0 / (vertices * m)); } } return initialPoint; } mlpack-2.2.5/src/mlpack/core/optimizers/aug_lagrangian/aug_lagrangian_test_functions.hpp000066400000000000000000000106431315013601400317160ustar00rootroot00000000000000/** * @file aug_lagrangian_test_functions.hpp * @author Ryan Curtin * * Define test functions for the augmented Lagrangian method. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_OPTIMIZERS_AUG_LAGRANGIAN_TEST_FUNCTIONS_HPP #define MLPACK_CORE_OPTIMIZERS_AUG_LAGRANGIAN_TEST_FUNCTIONS_HPP #include namespace mlpack { namespace optimization { /** * This function is taken from "Practical Mathematical Optimization" (Snyman), * section 5.3.8 ("Application of the Augmented Lagrangian Method"). It has * only one constraint. * * The minimum that satisfies the constraint is x = [1, 4], with an objective * value of 70. */ class AugLagrangianTestFunction { public: AugLagrangianTestFunction(); AugLagrangianTestFunction(const arma::mat& initial_point); double Evaluate(const arma::mat& coordinates); void Gradient(const arma::mat& coordinates, arma::mat& gradient); size_t NumConstraints() const { return 1; } double EvaluateConstraint(const size_t index, const arma::mat& coordinates); void GradientConstraint(const size_t index, const arma::mat& coordinates, arma::mat& gradient); const arma::mat& GetInitialPoint() const { return initialPoint; } private: arma::mat initialPoint; }; /** * This function is taken from M. Gockenbach's lectures on general nonlinear * programs, found at: * http://www.math.mtu.edu/~msgocken/ma5630spring2003/lectures/nlp/nlp.pdf * * The program we are using is example 2.5 from this document. * I have arbitrarily decided that this will be called the Gockenbach function. * * The minimum that satisfies the two constraints is given as * x = [0.12288, -1.1078, 0.015100], with an objective value of about 29.634. */ class GockenbachFunction { public: GockenbachFunction(); GockenbachFunction(const arma::mat& initial_point); double Evaluate(const arma::mat& coordinates); void Gradient(const arma::mat& coordinates, arma::mat& gradient); size_t NumConstraints() const { return 2; }; double EvaluateConstraint(const size_t index, const arma::mat& coordinates); void GradientConstraint(const size_t index, const arma::mat& coordinates, arma::mat& gradient); const arma::mat& GetInitialPoint() const { return initialPoint; } private: arma::mat initialPoint; }; /** * This function is the Lovasz-Theta semidefinite program, as implemented in the * following paper: * * S. Burer, R. Monteiro * "A nonlinear programming algorithm for solving semidefinite programs via * low-rank factorization." * Journal of Mathematical Programming, 2004 * * Given a simple, undirected graph G = (V, E), the Lovasz-Theta SDP is defined * by: * * min_X{Tr(-(e e^T)^T X) : Tr(X) = 1, X_ij = 0 for all (i, j) in E, X >= 0} * * where e is the vector of all ones and X has dimension |V| x |V|. * * In the Monteiro-Burer formulation, we take X = R * R^T, where R is the * coordinates given to the Evaluate(), Gradient(), EvaluateConstraint(), and * GradientConstraint() functions. */ class LovaszThetaSDP { public: LovaszThetaSDP(); /** * Initialize the Lovasz-Theta SDP with the given set of edges. The edge * matrix should consist of rows of two dimensions, where dimension 0 is the * first vertex of the edge and dimension 1 is the second edge (or vice versa, * as it doesn't make a difference). * * @param edges Matrix of edges. */ LovaszThetaSDP(const arma::mat& edges); double Evaluate(const arma::mat& coordinates); void Gradient(const arma::mat& coordinates, arma::mat& gradient); size_t NumConstraints() const; double EvaluateConstraint(const size_t index, const arma::mat& coordinates); void GradientConstraint(const size_t index, const arma::mat& coordinates, arma::mat& gradient); const arma::mat& GetInitialPoint(); const arma::mat& Edges() const { return edges; } arma::mat& Edges() { return edges; } private: arma::mat edges; size_t vertices; arma::mat initialPoint; }; } // namespace optimization } // namespace mlpack #endif // MLPACK_CORE_OPTIMIZERS_AUG_LAGRANGIAN_TEST_FUNCTIONS_HPP mlpack-2.2.5/src/mlpack/core/optimizers/gradient_descent/000077500000000000000000000000001315013601400234565ustar00rootroot00000000000000mlpack-2.2.5/src/mlpack/core/optimizers/gradient_descent/CMakeLists.txt000066400000000000000000000004301315013601400262130ustar00rootroot00000000000000set(SOURCES gradient_descent.hpp gradient_descent_impl.hpp test_function.hpp test_function.cpp ) set(DIR_SRCS) foreach(file ${SOURCES}) set(DIR_SRCS ${DIR_SRCS} ${CMAKE_CURRENT_SOURCE_DIR}/${file}) endforeach() set(MLPACK_SRCS ${MLPACK_SRCS} ${DIR_SRCS} PARENT_SCOPE) mlpack-2.2.5/src/mlpack/core/optimizers/gradient_descent/gradient_descent.hpp000066400000000000000000000077171315013601400275050ustar00rootroot00000000000000/** * @file gradient_descent.hpp * @author Sumedh Ghaisas * * Simple Gradient Descent. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_OPTIMIZERS_GRADIENT_DESCENT_GRADIENT_DESCENT_HPP #define MLPACK_CORE_OPTIMIZERS_GRADIENT_DESCENT_GRADIENT_DESCENT_HPP #include namespace mlpack { namespace optimization { /** * Gradient Descent is a technique to minimize a function. To find a local * minimum of a function using gradient descent, one takes steps proportional * to the negative of the gradient of the function at the current point, * producing the following update scheme: * * \f[ * A_{j + 1} = A_j + \alpha \nabla F(A) * \f] * * where \f$ \alpha \f$ is a parameter which specifies the step size. \f$ F \f$ * is the function being optimized. The algorithm continues until \f$ j * \f$ reaches the maximum number of iterations---or when an update produces * an improvement within a certain tolerance \f$ \epsilon \f$. That is, * * \f[ * | F(A_{j + 1}) - F(A_j) | < \epsilon. * \f] * * The parameter \f$\epsilon\f$ is specified by the tolerance parameter to the * constructor. * * For Gradient Descent to work, a FunctionType template parameter is required. * This class must implement the following function: * * double Evaluate(const arma::mat& coordinates); * void Gradient(const arma::mat& coordinates, * arma::mat& gradient); * * @tparam FunctionType Decomposable objective function type to be * minimized. */ template class GradientDescent { public: /** * Construct the Gradient Descent optimizer with the given function and * parameters. The defaults here are not necessarily good for the given * problem, so it is suggested that the values used be tailored to the task * at hand. * * @param function Function to be optimized (minimized). * @param stepSize Step size for each iteration. * @param maxIterations Maximum number of iterations allowed (0 means no * limit). * @param tolerance Maximum absolute tolerance to terminate algorithm. */ GradientDescent(FunctionType& function, const double stepSize = 0.01, const size_t maxIterations = 100000, const double tolerance = 1e-5); /** * Optimize the given function using gradient descent. The given starting * point will be modified to store the finishing point of the algorithm, and * the final objective value is returned. * * @param iterate Starting point (will be modified). * @return Objective value of the final point. */ double Optimize(arma::mat& iterate); //! Get the instantiated function to be optimized. const FunctionType& Function() const { return function; } //! Modify the instantiated function. FunctionType& Function() { return function; } //! Get the step size. double StepSize() const { return stepSize; } //! Modify the step size. double& StepSize() { return stepSize; } //! Get the maximum number of iterations (0 indicates no limit). size_t MaxIterations() const { return maxIterations; } //! Modify the maximum number of iterations (0 indicates no limit). size_t& MaxIterations() { return maxIterations; } //! Get the tolerance for termination. double Tolerance() const { return tolerance; } //! Modify the tolerance for termination. double& Tolerance() { return tolerance; } private: //! The instantiated function. FunctionType& function; //! The step size for each example. double stepSize; //! The maximum number of allowed iterations. size_t maxIterations; //! The tolerance for termination. double tolerance; }; } // namespace optimization } // namespace mlpack // Include implementation. #include "gradient_descent_impl.hpp" #endif mlpack-2.2.5/src/mlpack/core/optimizers/gradient_descent/gradient_descent_impl.hpp000066400000000000000000000051321315013601400305130ustar00rootroot00000000000000/** * @file gradient_descent_impl.hpp * @author Sumedh Ghaisas * * Simple gradient descent implementation. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_OPTIMIZERS_GRADIENT_DESCENT_GRADIENT_DESCENT_IMPL_HPP #define MLPACK_CORE_OPTIMIZERS_GRADIENT_DESCENT_GRADIENT_DESCENT_IMPL_HPP // In case it hasn't been included yet. #include "gradient_descent.hpp" namespace mlpack { namespace optimization { template GradientDescent::GradientDescent( FunctionType& function, const double stepSize, const size_t maxIterations, const double tolerance) : function(function), stepSize(stepSize), maxIterations(maxIterations), tolerance(tolerance) { /* Nothing to do. */ } //! Optimize the function (minimize). template double GradientDescent::Optimize( arma::mat& iterate) { // To keep track of where we are and how things are going. double overallObjective = function.Evaluate(iterate); double lastObjective = DBL_MAX; // Now iterate! arma::mat gradient(iterate.n_rows, iterate.n_cols); for (size_t i = 1; i != maxIterations; ++i) { // Output current objective function. Log::Info << "Gradient Descent: iteration " << i << ", objective " << overallObjective << "." << std::endl; if (std::isnan(overallObjective) || std::isinf(overallObjective)) { Log::Warn << "Gradient Descent: converged to " << overallObjective << "; terminating" << " with failure. Try a smaller step size?" << std::endl; return overallObjective; } if (std::abs(lastObjective - overallObjective) < tolerance) { Log::Info << "Gradient Descent: minimized within tolerance " << tolerance << "; " << "terminating optimization." << std::endl; return overallObjective; } // Reset the counter variables. lastObjective = overallObjective; function.Gradient(iterate, gradient); // And update the iterate. iterate -= stepSize * gradient; // Now add that to the overall objective function. overallObjective = function.Evaluate(iterate); } Log::Info << "Gradient Descent: maximum iterations (" << maxIterations << ") reached; " << "terminating optimization." << std::endl; return overallObjective; } } // namespace optimization } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/core/optimizers/gradient_descent/test_function.cpp000066400000000000000000000015371315013601400270540ustar00rootroot00000000000000/** * @file test_function.cpp * @author Sumedh Ghaisas * * Implementation of very simple test function for gradient descent. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include "test_function.hpp" using namespace mlpack; using namespace mlpack::optimization; using namespace mlpack::optimization::test; double GDTestFunction::Evaluate(const arma::mat& coordinates) const { arma::vec temp = arma::trans(coordinates) * coordinates; return temp(0, 0); } void GDTestFunction::Gradient(const arma::mat& coordinates, arma::mat& gradient) const { gradient = 2 * coordinates; } mlpack-2.2.5/src/mlpack/core/optimizers/gradient_descent/test_function.hpp000066400000000000000000000024431315013601400270560ustar00rootroot00000000000000/** * @file test_function.hpp * @author Sumedh Ghaisas * * Very simple test function for SGD. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_OPTIMIZERS_GD_TEST_FUNCTION_HPP #define MLPACK_CORE_OPTIMIZERS_GD_TEST_FUNCTION_HPP #include namespace mlpack { namespace optimization { namespace test { //! Very, very simple test function which is the composite of three other //! functions. The gradient is not very steep far away from the optimum, so a //! larger step size may be required to optimize it in a reasonable number of //! iterations. class GDTestFunction { public: //! Nothing to do for the constructor. GDTestFunction() { } //! Get the starting point. arma::mat GetInitialPoint() const { return arma::mat("1; 3; 2"); } //! Evaluate a function. double Evaluate(const arma::mat& coordinates) const; //! Evaluate the gradient of a function. void Gradient(const arma::mat& coordinates, arma::mat& gradient) const; }; } // namespace test } // namespace optimization } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/core/optimizers/lbfgs/000077500000000000000000000000001315013601400212515ustar00rootroot00000000000000mlpack-2.2.5/src/mlpack/core/optimizers/lbfgs/CMakeLists.txt000066400000000000000000000004041315013601400240070ustar00rootroot00000000000000set(SOURCES lbfgs_impl.hpp lbfgs.hpp test_functions.hpp test_functions.cpp ) set(DIR_SRCS) foreach(file ${SOURCES}) set(DIR_SRCS ${DIR_SRCS} ${CMAKE_CURRENT_SOURCE_DIR}/${file}) endforeach() set(MLPACK_SRCS ${MLPACK_SRCS} ${DIR_SRCS} PARENT_SCOPE) mlpack-2.2.5/src/mlpack/core/optimizers/lbfgs/lbfgs.hpp000066400000000000000000000234641315013601400230700ustar00rootroot00000000000000/** * @file lbfgs.hpp * @author Dongryeol Lee * @author Ryan Curtin * * The generic L-BFGS optimizer. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_OPTIMIZERS_LBFGS_LBFGS_HPP #define MLPACK_CORE_OPTIMIZERS_LBFGS_LBFGS_HPP #include namespace mlpack { namespace optimization { /** * The generic L-BFGS optimizer, which uses a back-tracking line search * algorithm to minimize a function. The parameters for the algorithm (number * of memory points, maximum step size, and so forth) are all configurable via * either the constructor or standalone modifier functions. A function which * can be optimized by this class must implement the following methods: * * - a default constructor * - double Evaluate(const arma::mat& coordinates); * - void Gradient(const arma::mat& coordinates, arma::mat& gradient); * - arma::mat& GetInitialPoint(); */ template class L_BFGS { public: /** * Initialize the L-BFGS object. Store a reference to the function we will be * optimizing and set the size of the memory for the algorithm. There are * many parameters that can be set for the optimization, but default values * are given for each of them. * * @param function Instance of function to be optimized. * @param numBasis Number of memory points to be stored (default 5). * @param maxIterations Maximum number of iterations for the optimization * (0 means no limit and may run indefinitely). * @param armijoConstant Controls the accuracy of the line search routine for * determining the Armijo condition. * @param wolfe Parameter for detecting the Wolfe condition. * @param minGradientNorm Minimum gradient norm required to continue the * optimization. * @param maxLineSearchTrials The maximum number of trials for the line search * (before giving up). * @param minStep The minimum step of the line search. * @param maxStep The maximum step of the line search. */ L_BFGS(FunctionType& function, const size_t numBasis = 10, /* same default as scipy */ const size_t maxIterations = 10000, /* many but not infinite */ const double armijoConstant = 1e-4, const double wolfe = 0.9, const double minGradientNorm = 1e-6, const double factr = 1e-15, const size_t maxLineSearchTrials = 50, const double minStep = 1e-20, const double maxStep = 1e20); /** * Return the point where the lowest function value has been found. * * @return arma::vec representing the point and a double with the function * value at that point. */ const std::pair& MinPointIterate() const; /** * Use L-BFGS to optimize the given function, starting at the given iterate * point and finding the minimum. The maximum number of iterations is set in * the constructor (or with MaxIterations()). Alternately, another overload * is provided which takes a maximum number of iterations as a parameter. The * given starting point will be modified to store the finishing point of the * algorithm, and the final objective value is returned. * * @param iterate Starting point (will be modified). * @return Objective value of the final point. */ double Optimize(arma::mat& iterate); /** * Use L-BFGS to optimize (minimize) the given function, starting at the given * iterate point, and performing no more than the given maximum number of * iterations (the class variable maxIterations is ignored for this run, but * not modified). The given starting point will be modified to store the * finishing point of the algorithm, and the final objective value is * returned. * * @param iterate Starting point (will be modified). * @param maxIterations Maximum number of iterations (0 specifies no limit). * @return Objective value of the final point. */ double Optimize(arma::mat& iterate, const size_t maxIterations); //! Return the function that is being optimized. const FunctionType& Function() const { return function; } //! Modify the function that is being optimized. FunctionType& Function() { return function; } //! Get the memory size. size_t NumBasis() const { return numBasis; } //! Modify the memory size. size_t& NumBasis() { return numBasis; } //! Get the maximum number of iterations. size_t MaxIterations() const { return maxIterations; } //! Modify the maximum number of iterations. size_t& MaxIterations() { return maxIterations; } //! Get the Armijo condition constant. double ArmijoConstant() const { return armijoConstant; } //! Modify the Armijo condition constant. double& ArmijoConstant() { return armijoConstant; } //! Get the Wolfe parameter. double Wolfe() const { return wolfe; } //! Modify the Wolfe parameter. double& Wolfe() { return wolfe; } //! Get the minimum gradient norm. double MinGradientNorm() const { return minGradientNorm; } //! Modify the minimum gradient norm. double& MinGradientNorm() { return minGradientNorm; } //! Get the factr value. double Factr() const { return factr; } //! Modify the factr value. double& Factr() { return factr; } //! Get the maximum number of line search trials. size_t MaxLineSearchTrials() const { return maxLineSearchTrials; } //! Modify the maximum number of line search trials. size_t& MaxLineSearchTrials() { return maxLineSearchTrials; } //! Return the minimum line search step size. double MinStep() const { return minStep; } //! Modify the minimum line search step size. double& MinStep() { return minStep; } //! Return the maximum line search step size. double MaxStep() const { return maxStep; } //! Modify the maximum line search step size. double& MaxStep() { return maxStep; } private: //! Internal reference to the function we are optimizing. FunctionType& function; //! Position of the new iterate. arma::mat newIterateTmp; //! Stores all the s matrices in memory. arma::cube s; //! Stores all the y matrices in memory. arma::cube y; //! Size of memory for this L-BFGS optimizer. size_t numBasis; //! Maximum number of iterations. size_t maxIterations; //! Parameter for determining the Armijo condition. double armijoConstant; //! Parameter for detecting the Wolfe condition. double wolfe; //! Minimum gradient norm required to continue the optimization. double minGradientNorm; //! Minimum relative function value decrease to continue the optimization. double factr; //! Maximum number of trials for the line search. size_t maxLineSearchTrials; //! Minimum step of the line search. double minStep; //! Maximum step of the line search. double maxStep; //! Best point found so far. std::pair minPointIterate; /** * Evaluate the function at the given iterate point and store the result if it * is a new minimum. * * @return The value of the function. */ double Evaluate(const arma::mat& iterate); /** * Calculate the scaling factor, gamma, which is used to scale the Hessian * approximation matrix. See method M3 in Section 4 of Liu and Nocedal * (1989). * * @return The calculated scaling factor. */ double ChooseScalingFactor(const size_t iterationNum, const arma::mat& gradient); /** * Check to make sure that the norm of the gradient is not smaller than 1e-5. * Currently that value is not configurable. * * @return (norm < minGradientNorm). */ bool GradientNormTooSmall(const arma::mat& gradient); /** * Perform a back-tracking line search along the search direction to * calculate a step size satisfying the Wolfe conditions. The parameter * iterate will be modified if the method is successful. * * @param functionValue Value of the function at the initial point * @param iterate The initial point to begin the line search from * @param gradient The gradient at the initial point * @param searchDirection A vector specifying the search direction * @param stepSize Variable the calculated step size will be stored in * * @return false if no step size is suitable, true otherwise. */ bool LineSearch(double& functionValue, arma::mat& iterate, arma::mat& gradient, const arma::mat& searchDirection); /** * Find the L-BFGS search direction. * * @param gradient The gradient at the current point * @param iteration_num The iteration number * @param scaling_factor Scaling factor to use (see ChooseScalingFactor_()) * @param search_direction Vector to store search direction in */ void SearchDirection(const arma::mat& gradient, const size_t iterationNum, const double scalingFactor, arma::mat& searchDirection); /** * Update the y and s matrices, which store the differences * between the iterate and old iterate and the differences between the * gradient and the old gradient, respectively. * * @param iterationNum Iteration number * @param iterate Current point * @param oldIterate Point at last iteration * @param gradient Gradient at current point (iterate) * @param oldGradient Gradient at last iteration point (oldIterate) */ void UpdateBasisSet(const size_t iterationNum, const arma::mat& iterate, const arma::mat& oldIterate, const arma::mat& gradient, const arma::mat& oldGradient); }; } // namespace optimization } // namespace mlpack #include "lbfgs_impl.hpp" #endif // MLPACK_CORE_OPTIMIZERS_LBFGS_LBFGS_HPP mlpack-2.2.5/src/mlpack/core/optimizers/lbfgs/lbfgs_impl.hpp000066400000000000000000000361761315013601400241150ustar00rootroot00000000000000/** * @file lbfgs_impl.hpp * @author Dongryeol Lee (dongryel@cc.gatech.edu) * @author Ryan Curtin * * The implementation of the L_BFGS optimizer. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_OPTIMIZERS_LBFGS_LBFGS_IMPL_HPP #define MLPACK_CORE_OPTIMIZERS_LBFGS_LBFGS_IMPL_HPP namespace mlpack { namespace optimization { /** * Initialize the L_BFGS object. Copy the function we will be optimizing and * set the size of the memory for the algorithm. * * @param function Instance of function to be optimized * @param numBasis Number of memory points to be stored * @param armijoConstant Controls the accuracy of the line search routine for * determining the Armijo condition. * @param wolfe Parameter for detecting the Wolfe condition. * @param minGradientNorm Minimum gradient norm required to continue the * optimization. * @param maxLineSearchTrials The maximum number of trials for the line search * (before giving up). * @param minStep The minimum step of the line search. * @param maxStep The maximum step of the line search. */ template L_BFGS::L_BFGS(FunctionType& function, const size_t numBasis, const size_t maxIterations, const double armijoConstant, const double wolfe, const double minGradientNorm, const double factr, const size_t maxLineSearchTrials, const double minStep, const double maxStep) : function(function), numBasis(numBasis), maxIterations(maxIterations), armijoConstant(armijoConstant), wolfe(wolfe), minGradientNorm(minGradientNorm), factr(factr), maxLineSearchTrials(maxLineSearchTrials), minStep(minStep), maxStep(maxStep) { // Get the dimensions of the coordinates of the function; GetInitialPoint() // might return an arma::vec, but that's okay because then n_cols will simply // be 1. const size_t rows = function.GetInitialPoint().n_rows; const size_t cols = function.GetInitialPoint().n_cols; newIterateTmp.set_size(rows, cols); s.set_size(rows, cols, numBasis); y.set_size(rows, cols, numBasis); // Allocate the pair holding the min iterate information. minPointIterate.first.zeros(rows, cols); minPointIterate.second = std::numeric_limits::max(); } /** * Evaluate the function at the given iterate point and store the result if * it is a new minimum. * * @return The value of the function */ template double L_BFGS::Evaluate(const arma::mat& iterate) { // Evaluate the function and keep track of the minimum function // value encountered during the optimization. double functionValue = function.Evaluate(iterate); if (functionValue < minPointIterate.second) { minPointIterate.first = iterate; minPointIterate.second = functionValue; } return functionValue; } /** * Calculate the scaling factor gamma which is used to scale the Hessian * approximation matrix. See method M3 in Section 4 of Liu and Nocedal (1989). * * @return The calculated scaling factor */ template double L_BFGS::ChooseScalingFactor(const size_t iterationNum, const arma::mat& gradient) { double scalingFactor = 1.0; if (iterationNum > 0) { int previousPos = (iterationNum - 1) % numBasis; // Get s and y matrices once instead of multiple times. arma::mat& sMat = s.slice(previousPos); arma::mat& yMat = y.slice(previousPos); scalingFactor = dot(sMat, yMat) / dot(yMat, yMat); } else { scalingFactor = 1.0 / sqrt(dot(gradient, gradient)); } return scalingFactor; } /** * Check to make sure that the norm of the gradient is not smaller than 1e-10. * Currently that value is not configurable. * * @return (norm < minGradientNorm) */ template bool L_BFGS::GradientNormTooSmall(const arma::mat& gradient) { double norm = arma::norm(gradient, 2); return (norm < minGradientNorm); } /** * Perform a back-tracking line search along the search direction to calculate a * step size satisfying the Wolfe conditions. * * @param functionValue Value of the function at the initial point * @param iterate The initial point to begin the line search from * @param gradient The gradient at the initial point * @param searchDirection A vector specifying the search direction * @param stepSize Variable the calculated step size will be stored in * * @return false if no step size is suitable, true otherwise. */ template bool L_BFGS::LineSearch(double& functionValue, arma::mat& iterate, arma::mat& gradient, const arma::mat& searchDirection) { // Default first step size of 1.0. double stepSize = 1.0; // The initial linear term approximation in the direction of the // search direction. double initialSearchDirectionDotGradient = arma::dot(gradient, searchDirection); // If it is not a descent direction, just report failure. if (initialSearchDirectionDotGradient > 0.0) { Log::Warn << "L-BFGS line search direction is not a descent direction " << "(terminating)!" << std::endl; return false; } // Save the initial function value. double initialFunctionValue = functionValue; // Unit linear approximation to the decrease in function value. double linearApproxFunctionValueDecrease = armijoConstant * initialSearchDirectionDotGradient; // The number of iteration in the search. size_t numIterations = 0; // Armijo step size scaling factor for increase and decrease. const double inc = 2.1; const double dec = 0.5; double width = 0; while (true) { // Perform a step and evaluate the gradient and the function values at that // point. newIterateTmp = iterate; newIterateTmp += stepSize * searchDirection; functionValue = Evaluate(newIterateTmp); function.Gradient(newIterateTmp, gradient); numIterations++; if (functionValue > initialFunctionValue + stepSize * linearApproxFunctionValueDecrease) { width = dec; } else { // Check Wolfe's condition. double searchDirectionDotGradient = arma::dot(gradient, searchDirection); if (searchDirectionDotGradient < wolfe * initialSearchDirectionDotGradient) { width = inc; } else { if (searchDirectionDotGradient > -wolfe * initialSearchDirectionDotGradient) { width = dec; } else { break; } } } // Terminate when the step size gets too small or too big or it // exceeds the max number of iterations. const bool cond1 = (stepSize < minStep); const bool cond2 = (stepSize > maxStep); const bool cond3 = (numIterations >= maxLineSearchTrials); if (cond1 || cond2 || cond3) break; // Scale the step size. stepSize *= width; } // Move to the new iterate. iterate = newIterateTmp; return true; } /** * Find the L_BFGS search direction. * * @param gradient The gradient at the current point * @param iterationNum The iteration number * @param scalingFactor Scaling factor to use (see ChooseScalingFactor_()) * @param searchDirection Vector to store search direction in */ template void L_BFGS::SearchDirection(const arma::mat& gradient, const size_t iterationNum, const double scalingFactor, arma::mat& searchDirection) { // Start from this point. searchDirection = gradient; // See "A Recursive Formula to Compute H * g" in "Updating quasi-Newton // matrices with limited storage" (Nocedal, 1980). // Temporary variables. arma::vec rho(numBasis); arma::vec alpha(numBasis); size_t limit = (numBasis > iterationNum) ? 0 : (iterationNum - numBasis); for (size_t i = iterationNum; i != limit; i--) { int translatedPosition = (i + (numBasis - 1)) % numBasis; rho[iterationNum - i] = 1.0 / arma::dot(y.slice(translatedPosition), s.slice(translatedPosition)); alpha[iterationNum - i] = rho[iterationNum - i] * arma::dot(s.slice(translatedPosition), searchDirection); searchDirection -= alpha[iterationNum - i] * y.slice(translatedPosition); } searchDirection *= scalingFactor; for (size_t i = limit; i < iterationNum; i++) { int translatedPosition = i % numBasis; double beta = rho[iterationNum - i - 1] * arma::dot(y.slice(translatedPosition), searchDirection); searchDirection += (alpha[iterationNum - i - 1] - beta) * s.slice(translatedPosition); } // Negate the search direction so that it is a descent direction. searchDirection *= -1; } /** * Update the y and s matrices, which store the differences between * the iterate and old iterate and the differences between the gradient and the * old gradient, respectively. * * @param iterationNum Iteration number * @param iterate Current point * @param oldIterate Point at last iteration * @param gradient Gradient at current point (iterate) * @param oldGradient Gradient at last iteration point (oldIterate) */ template void L_BFGS::UpdateBasisSet(const size_t iterationNum, const arma::mat& iterate, const arma::mat& oldIterate, const arma::mat& gradient, const arma::mat& oldGradient) { // Overwrite a certain position instead of pushing everything in the vector // back one position. int overwritePos = iterationNum % numBasis; s.slice(overwritePos) = iterate - oldIterate; y.slice(overwritePos) = gradient - oldGradient; } /** * Return the point where the lowest function value has been found. * * @return arma::vec representing the point and a double with the function * value at that point. */ template inline const std::pair& L_BFGS::MinPointIterate() const { return minPointIterate; } template inline double L_BFGS::Optimize(arma::mat& iterate) { return Optimize(iterate, maxIterations); } /** * Use L_BFGS to optimize the given function, starting at the given iterate * point and performing no more than the specified number of maximum iterations. * The given starting point will be modified to store the finishing point of the * algorithm. * * @param numIterations Maximum number of iterations to perform * @param iterate Starting point (will be modified) */ template double L_BFGS::Optimize(arma::mat& iterate, const size_t maxIterations) { // Ensure that the cubes holding past iterations' information are the right // size. Also set the current best point value to the maximum. const size_t rows = function.GetInitialPoint().n_rows; const size_t cols = function.GetInitialPoint().n_cols; s.set_size(rows, cols, numBasis); y.set_size(rows, cols, numBasis); minPointIterate.second = std::numeric_limits::max(); // The old iterate to be saved. arma::mat oldIterate; oldIterate.zeros(iterate.n_rows, iterate.n_cols); // Whether to optimize until convergence. bool optimizeUntilConvergence = (maxIterations == 0); // The initial function value. double functionValue = Evaluate(iterate); double prevFunctionValue = functionValue; // The gradient: the current and the old. arma::mat gradient; arma::mat oldGradient; gradient.zeros(iterate.n_rows, iterate.n_cols); oldGradient.zeros(iterate.n_rows, iterate.n_cols); // The search direction. arma::mat searchDirection; searchDirection.zeros(iterate.n_rows, iterate.n_cols); // The initial gradient value. function.Gradient(iterate, gradient); // The main optimization loop. for (size_t itNum = 0; optimizeUntilConvergence || (itNum != maxIterations); ++itNum) { Log::Debug << "L-BFGS iteration " << itNum << "; objective " << function.Evaluate(iterate) << ", gradient norm " << arma::norm(gradient, 2) << ", " << ((prevFunctionValue - functionValue) / std::max(std::max(fabs(prevFunctionValue), fabs(functionValue)), 1.0)) << "." << std::endl; prevFunctionValue = functionValue; // Break when the norm of the gradient becomes too small. // // But don't do this on the first iteration to ensure we always take at // least one descent step. if (itNum > 0 && GradientNormTooSmall(gradient)) { Log::Debug << "L-BFGS gradient norm too small (terminating successfully)." << std::endl; break; } // Break if the objective is not a number. if (std::isnan(functionValue)) { Log::Warn << "L-BFGS terminated with objective " << functionValue << "; " << "are the objective and gradient functions implemented correctly?" << std::endl; break; } // Choose the scaling factor. double scalingFactor = ChooseScalingFactor(itNum, gradient); // Build an approximation to the Hessian and choose the search // direction for the current iteration. SearchDirection(gradient, itNum, scalingFactor, searchDirection); // Save the old iterate and the gradient before stepping. oldIterate = iterate; oldGradient = gradient; // Do a line search and take a step. if (!LineSearch(functionValue, iterate, gradient, searchDirection)) { Log::Debug << "Line search failed. Stopping optimization." << std::endl; break; // The line search failed; nothing else to try. } // It is possible that the difference between the two coordinates is zero. // In this case we terminate successfully. if (accu(iterate != oldIterate) == 0) { Log::Debug << "L-BFGS step size of 0 (terminating successfully)." << std::endl; break; } // If we can't make progress on the gradient, then we'll also accept // a stable function value. const double denom = std::max( std::max(fabs(prevFunctionValue), fabs(functionValue)), 1.0); if ((prevFunctionValue - functionValue) / denom <= factr) { Log::Debug << "L-BFGS function value stable (terminating successfully)." << std::endl; break; } // Overwrite an old basis set. UpdateBasisSet(itNum, iterate, oldIterate, gradient, oldGradient); } // End of the optimization loop. return function.Evaluate(iterate); } } // namespace optimization } // namespace mlpack #endif // MLPACK_CORE_OPTIMIZERS_LBFGS_LBFGS_IMPL_HPP mlpack-2.2.5/src/mlpack/core/optimizers/lbfgs/test_functions.cpp000066400000000000000000000146721315013601400250360ustar00rootroot00000000000000/** * @file test_functions.cpp * @author Ryan Curtin * * Implementations of the test functions defined in test_functions.hpp. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include "test_functions.hpp" using namespace mlpack::optimization::test; // // RosenbrockFunction implementation // RosenbrockFunction::RosenbrockFunction() { initialPoint.set_size(2, 1); initialPoint[0] = -1.2; initialPoint[1] = 1; } /** * Calculate the objective function. */ double RosenbrockFunction::Evaluate(const arma::mat& coordinates) { double x1 = coordinates[0]; double x2 = coordinates[1]; double objective = /* f1(x) */ 100 * std::pow(x2 - std::pow(x1, 2), 2) + /* f2(x) */ std::pow(1 - x1, 2); return objective; } /** * Calculate the gradient. */ void RosenbrockFunction::Gradient(const arma::mat& coordinates, arma::mat& gradient) { // f'_{x1}(x) = -2 (1 - x1) + 400 (x1^3 - (x2 x1)) // f'_{x2}(x) = 200 (x2 - x1^2) double x1 = coordinates[0]; double x2 = coordinates[1]; gradient.set_size(2, 1); gradient[0] = -2 * (1 - x1) + 400 * (std::pow(x1, 3) - x2 * x1); gradient[1] = 200 * (x2 - std::pow(x1, 2)); } const arma::mat& RosenbrockFunction::GetInitialPoint() const { return initialPoint; } // // WoodFunction implementation // WoodFunction::WoodFunction() { initialPoint.set_size(4, 1); initialPoint[0] = -3; initialPoint[1] = -1; initialPoint[2] = -3; initialPoint[3] = -1; } /** * Calculate the objective function. */ double WoodFunction::Evaluate(const arma::mat& coordinates) { // For convenience; we assume these temporaries will be optimized out. double x1 = coordinates[0]; double x2 = coordinates[1]; double x3 = coordinates[2]; double x4 = coordinates[3]; double objective = /* f1(x) */ 100 * std::pow(x2 - std::pow(x1, 2), 2) + /* f2(x) */ std::pow(1 - x1, 2) + /* f3(x) */ 90 * std::pow(x4 - std::pow(x3, 2), 2) + /* f4(x) */ std::pow(1 - x3, 2) + /* f5(x) */ 10 * std::pow(x2 + x4 - 2, 2) + /* f6(x) */ (1 / 10) * std::pow(x2 - x4, 2); return objective; } /** * Calculate the gradient. */ void WoodFunction::Gradient(const arma::mat& coordinates, arma::mat& gradient) { // For convenience; we assume these temporaries will be optimized out. double x1 = coordinates[0]; double x2 = coordinates[1]; double x3 = coordinates[2]; double x4 = coordinates[3]; // f'_{x1}(x) = 400 (x1^3 - x2 x1) - 2 (1 - x1) // f'_{x2}(x) = 200 (x2 - x1^2) + 20 (x2 + x4 - 2) + (1 / 5) (x2 - x4) // f'_{x3}(x) = 360 (x3^3 - x4 x3) - 2 (1 - x3) // f'_{x4}(x) = 180 (x4 - x3^2) + 20 (x2 + x4 - 2) - (1 / 5) (x2 - x4) gradient.set_size(4, 1); gradient[0] = 400 * (std::pow(x1, 3) - x2 * x1) - 2 * (1 - x1); gradient[1] = 200 * (x2 - std::pow(x1, 2)) + 20 * (x2 + x4 - 2) + (1 / 5) * (x2 - x4); gradient[2] = 360 * (std::pow(x3, 3) - x4 * x3) - 2 * (1 - x3); gradient[3] = 180 * (x4 - std::pow(x3, 2)) + 20 * (x2 + x4 - 2) - (1 / 5) * (x2 - x4); } const arma::mat& WoodFunction::GetInitialPoint() const { return initialPoint; } // // GeneralizedRosenbrockFunction implementation // GeneralizedRosenbrockFunction::GeneralizedRosenbrockFunction(int n) : n(n) { initialPoint.set_size(n, 1); for (int i = 0; i < n; i++) // Set to [-1.2 1 -1.2 1 ...]. { if (i % 2 == 1) initialPoint[i] = -1.2; else initialPoint[i] = 1; } } /** * Calculate the objective function. */ double GeneralizedRosenbrockFunction::Evaluate(const arma::mat& coordinates) const { double fval = 0; for (int i = 0; i < (n - 1); i++) { fval += 100 * std::pow(std::pow(coordinates[i], 2) - coordinates[i + 1], 2) + std::pow(1 - coordinates[i], 2); } return fval; } /** * Calculate the gradient. */ void GeneralizedRosenbrockFunction::Gradient(const arma::mat& coordinates, arma::mat& gradient) const { gradient.set_size(n); for (int i = 0; i < (n - 1); i++) { gradient[i] = 400 * (std::pow(coordinates[i], 3) - coordinates[i] * coordinates[i + 1]) + 2 * (coordinates[i] - 1); if (i > 0) gradient[i] += 200 * (coordinates[i] - std::pow(coordinates[i - 1], 2)); } gradient[n - 1] = 200 * (coordinates[n - 1] - std::pow(coordinates[n - 2], 2)); } //! Calculate the objective function of one of the individual functions. double GeneralizedRosenbrockFunction::Evaluate(const arma::mat& coordinates, const size_t i) const { return 100 * std::pow((std::pow(coordinates[i], 2) - coordinates[i + 1]), 2) + std::pow(1 - coordinates[i], 2); } //! Calculate the gradient of one of the individual functions. void GeneralizedRosenbrockFunction::Gradient(const arma::mat& coordinates, const size_t i, arma::mat& gradient) const { gradient.zeros(n); gradient[i] = 400 * (std::pow(coordinates[i], 3) - coordinates[i] * coordinates[i + 1]) + 2 * (coordinates[i] - 1); gradient[i + 1] = 200 * (coordinates[i + 1] - std::pow(coordinates[i], 2)); } const arma::mat& GeneralizedRosenbrockFunction::GetInitialPoint() const { return initialPoint; } // // RosenbrockWoodFunction implementation // RosenbrockWoodFunction::RosenbrockWoodFunction() : rf(4), wf() { initialPoint.set_size(4, 2); initialPoint.col(0) = rf.GetInitialPoint(); initialPoint.col(1) = wf.GetInitialPoint(); } /** * Calculate the objective function. */ double RosenbrockWoodFunction::Evaluate(const arma::mat& coordinates) { double objective = rf.Evaluate(coordinates.col(0)) + wf.Evaluate(coordinates.col(1)); return objective; } /*** * Calculate the gradient. */ void RosenbrockWoodFunction::Gradient(const arma::mat& coordinates, arma::mat& gradient) { gradient.set_size(4, 2); arma::vec grf(4); arma::vec gwf(4); rf.Gradient(coordinates.col(0), grf); wf.Gradient(coordinates.col(1), gwf); gradient.col(0) = grf; gradient.col(1) = gwf; } const arma::mat& RosenbrockWoodFunction::GetInitialPoint() const { return initialPoint; } mlpack-2.2.5/src/mlpack/core/optimizers/lbfgs/test_functions.hpp000066400000000000000000000116151315013601400250350ustar00rootroot00000000000000/** * @file test_functions.hpp * @author Ryan Curtin * * A collection of functions to test optimizers (in this case, L-BFGS). These * come from the following paper: * * "Testing Unconstrained Optimization Software" * Jorge J. Moré, Burton S. Garbow, and Kenneth E. Hillstrom. 1981. * ACM Trans. Math. Softw. 7, 1 (March 1981), 17-41. * http://portal.acm.org/citation.cfm?id=355934.355936 * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_OPTIMIZERS_LBFGS_TEST_FUNCTIONS_HPP #define MLPACK_CORE_OPTIMIZERS_LBFGS_TEST_FUNCTIONS_HPP #include // To fulfill the template policy class 'FunctionType', we must implement // the following: // // FunctionType(); // constructor // void Gradient(const arma::mat& coordinates, arma::mat& gradient); // double Evaluate(const arma::mat& coordinates); // const arma::mat& GetInitialPoint(); // // Note that we are using an arma::mat instead of the more intuitive and // expected arma::vec. This is because L-BFGS will also optimize matrices. // However, remember that an arma::vec is simply an (n x 1) arma::mat. You can // use either internally but the L-BFGS method requires arma::mat& to be passed // (C++ does not allow implicit reference casting to subclasses). namespace mlpack { namespace optimization { namespace test { /** * The Rosenbrock function, defined by * f(x) = f1(x) + f2(x) * f1(x) = 100 (x2 - x1^2)^2 * f2(x) = (1 - x1)^2 * x_0 = [-1.2, 1] * * This should optimize to f(x) = 0, at x = [1, 1]. * * "An automatic method for finding the greatest or least value of a function." * H.H. Rosenbrock. 1960. Comput. J. 3., 175-184. */ class RosenbrockFunction { public: RosenbrockFunction(); // initialize initial point double Evaluate(const arma::mat& coordinates); void Gradient(const arma::mat& coordinates, arma::mat& gradient); const arma::mat& GetInitialPoint() const; private: arma::mat initialPoint; }; /** * The Wood function, defined by * f(x) = f1(x) + f2(x) + f3(x) + f4(x) + f5(x) + f6(x) * f1(x) = 100 (x2 - x1^2)^2 * f2(x) = (1 - x1)^2 * f3(x) = 90 (x4 - x3^2)^2 * f4(x) = (1 - x3)^2 * f5(x) = 10 (x2 + x4 - 2)^2 * f6(x) = (1 / 10) (x2 - x4)^2 * x_0 = [-3, -1, -3, -1] * * This should optimize to f(x) = 0, at x = [1, 1, 1, 1]. * * "A comparative study of nonlinear programming codes." * A.R. Colville. 1968. Rep. 320-2949, IBM N.Y. Scientific Center. */ class WoodFunction { public: WoodFunction(); // initialize initial point double Evaluate(const arma::mat& coordinates); void Gradient(const arma::mat& coordinates, arma::mat& gradient); const arma::mat& GetInitialPoint() const; private: arma::mat initialPoint; }; /** * The Generalized Rosenbrock function in n dimensions, defined by * f(x) = sum_i^{n - 1} (f(i)(x)) * f_i(x) = 100 * (x_i^2 - x_{i + 1})^2 + (1 - x_i)^2 * x_0 = [-1.2, 1, -1.2, 1, ...] * * This should optimize to f(x) = 0, at x = [1, 1, 1, 1, ...]. * * This function can also be used for stochastic gradient descent (SGD) as a * decomposable function (DecomposableFunctionType), so there are other * overloads of Evaluate() and Gradient() implemented, as well as * NumFunctions(). * * "An analysis of the behavior of a glass of genetic adaptive systems." * K.A. De Jong. Ph.D. thesis, University of Michigan, 1975. */ class GeneralizedRosenbrockFunction { public: /*** * Set the dimensionality of the extended Rosenbrock function. * * @param n Number of dimensions for the function. */ GeneralizedRosenbrockFunction(int n); double Evaluate(const arma::mat& coordinates) const; void Gradient(const arma::mat& coordinates, arma::mat& gradient) const; size_t NumFunctions() const { return n - 1; } double Evaluate(const arma::mat& coordinates, const size_t i) const; void Gradient(const arma::mat& coordinates, const size_t i, arma::mat& gradient) const; const arma::mat& GetInitialPoint() const; private: arma::mat initialPoint; int n; // Dimensionality }; /** * The Generalized Rosenbrock function in 4 dimensions with the Wood Function in * four dimensions. In this function we are actually optimizing a 2x4 matrix of * coordinates, not a vector. */ class RosenbrockWoodFunction { public: RosenbrockWoodFunction(); // initialize initial point double Evaluate(const arma::mat& coordinates); void Gradient(const arma::mat& coordinates, arma::mat& gradient); const arma::mat& GetInitialPoint() const; private: arma::mat initialPoint; GeneralizedRosenbrockFunction rf; WoodFunction wf; }; } // namespace test } // namespace optimization } // namespace mlpack #endif // MLPACK_CORE_OPTIMIZERS_LBFGS_TEST_FUNCTIONS_HPP mlpack-2.2.5/src/mlpack/core/optimizers/minibatch_sgd/000077500000000000000000000000001315013601400227475ustar00rootroot00000000000000mlpack-2.2.5/src/mlpack/core/optimizers/minibatch_sgd/CMakeLists.txt000066400000000000000000000003521315013601400255070ustar00rootroot00000000000000set(SOURCES minibatch_sgd.hpp minibatch_sgd_impl.hpp ) set(DIR_SRCS) foreach(file ${SOURCES}) set(DIR_SRCS ${DIR_SRCS} ${CMAKE_CURRENT_SOURCE_DIR}/${file}) endforeach() set(MLPACK_SRCS ${MLPACK_SRCS} ${DIR_SRCS} PARENT_SCOPE) mlpack-2.2.5/src/mlpack/core/optimizers/minibatch_sgd/minibatch_sgd.hpp000066400000000000000000000141741315013601400262620ustar00rootroot00000000000000/** * @file minibatch_sgd.hpp * @author Ryan Curtin * * Mini-batch Stochastic Gradient Descent (SGD). * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_OPTIMIZERS_MINIBATCH_SGD_MINIBATCH_SGD_HPP #define MLPACK_CORE_OPTIMIZERS_MINIBATCH_SGD_MINIBATCH_SGD_HPP #include namespace mlpack { namespace optimization { /** * Mini-batch Stochastic Gradient Descent is a technique for minimizing a * function which can be expressed as a sum of other functions. That is, * suppose we have * * \f[ * f(A) = \sum_{i = 0}^{n} f_i(A) * \f] * * and our task is to minimize \f$ A \f$. Mini-batch SGD iterates over batches * of functions \f$ \{ f_{i0}(A), f_{i1}(A), \ldots, f_{i(m - 1)}(A) \f$ for * some batch size \f$ m \f$, producing the following update scheme: * * \f[ * A_{j + 1} = A_j + \alpha \left(\sum_{k = 0}^{m - 1} \nabla f_{ik}(A) \right) * \f] * * where \f$ \alpha \f$ is a parameter which specifies the step size. Each * mini-batch is passed through either sequentially or randomly. The algorithm * continues until \f$ j \f$ reaches the maximum number of iterations---or when * a full sequence of updates through each of the mini-batches produces an * improvement within a certain tolerance \f$ \epsilon \f$. * * The parameter \f$ \epsilon \f$ is specified by the tolerance parameter tot he * constructor, as is the maximum number of iterations specified by the * maxIterations parameter. * * This class is useful for data-dependent functions whose objective function * can be expressed as a sum of objective functions operating on an individual * point. Then, mini-batch SGD considers the gradient of the objective function * operation on an individual mini-batch of points in its update of \f$ A \f$. * * For mini-batch SGD to work, a DecomposableFunctionType template parameter is * required. * This class must implement the following function: * * size_t NumFunctions(); * double Evaluate(const arma::mat& coordinates, const size_t i); * void Gradient(const arma::mat& coordinates, * const size_t i, * arma::mat& gradient); * * NumFunctions() should return the number of functions, and in the other two * functions, the parameter i refers to which individual function (or gradient) * is being evaluated. So, for the case of a data-dependent function, such as * NCA (see mlpack::nca::NCA), NumFunctions() should return the number of points * in the dataset, and Evaluate(coordinates, 0) will evaluate the objective * function on the first point in the dataset (presumably, the dataset is held * internally in the DecomposableFunctionType). * * @tparam DecomposableFunctionType Decomposable objective function type to be * minimized. */ template class MiniBatchSGD { public: /** * Construct the MiniBatchSGD optimizer with the given function and * parameters. The defaults here are not necessarily good for the given * problem, so it is suggested that the values used be tailored for the task * at hand. The maximum number of iterations refers to the maximum number of * mini-batches that are processed. * * @param function Function to be optimized (minimized). * @param batchSize Size of each mini-batch. * @param stepSize Step size for each iteration. * @param maxIterations Maximum number of iterations allowed (0 means no * limit). * @param tolerance Maximum absolute tolerance to terminate algorithm. * @param shuffle If true, the mini-batch order is shuffled; otherwise, each * mini-batch is visited in linear order. */ MiniBatchSGD(DecomposableFunctionType& function, const size_t batchSize = 1000, const double stepSize = 0.01, const size_t maxIterations = 100000, const double tolerance = 1e-5, const bool shuffle = true); /** * Optimize the given function using mini-batch SGD. The given starting point * will be modified to store the finishing point of the algorithm, and the * final objective value is returned. * * @param iterate Starting point (will be modified). * @return Objective value of the final point. */ double Optimize(arma::mat& iterate); //! Get the instantiated function to be optimized. const DecomposableFunctionType& Function() const { return function; } //! Modify the instantiated function. DecomposableFunctionType& Function() { return function; } //! Get the batch size. size_t BatchSize() const { return batchSize; } //! Modify the batch size. size_t& BatchSize() { return batchSize; } //! Get the step size. double StepSize() const { return stepSize; } //! Modify the step size. double& StepSize() { return stepSize; } //! Get the maximum number of iterations (0 indicates no limit). size_t MaxIterations() const { return maxIterations; } //! Modify the maximum number of iterations (0 indicates no limit). size_t& MaxIterations() { return maxIterations; } //! Get the tolerance for termination. double Tolerance() const { return tolerance; } //! Modify the tolerance for termination. double& Tolerance() { return tolerance; } //! Get whether or not the individual functions are shuffled. bool Shuffle() const { return shuffle; } //! Modify whether or not the individual functions are shuffled. bool& Shuffle() { return shuffle; } private: //! The instantiated function. DecomposableFunctionType& function; //! The size of each mini-batch. size_t batchSize; //! The step size for each example. double stepSize; //! The maximum number of allowed iterations. size_t maxIterations; //! The tolerance for termination. double tolerance; //! Controls whether or not the individual functions are shuffled when //! iterating. bool shuffle; }; } // namespace optimization } // namespace mlpack // Include implementation. #include "minibatch_sgd_impl.hpp" #endif mlpack-2.2.5/src/mlpack/core/optimizers/minibatch_sgd/minibatch_sgd_impl.hpp000066400000000000000000000116421315013601400273000ustar00rootroot00000000000000/** * @file minibatch_sgd_impl.hpp * @author Ryan Curtin * * Implementation of mini-batch SGD. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_OPTIMIZERS_MINIBATCH_SGD_MINIBATCH_SGD_IMPL_HPP #define MLPACK_CORE_OPTIMIZERS_MINIBATCH_SGD_MINIBATCH_SGD_IMPL_HPP // In case it hasn't been included yet. #include "minibatch_sgd.hpp" namespace mlpack { namespace optimization { template MiniBatchSGD::MiniBatchSGD( DecomposableFunctionType& function, const size_t batchSize, const double stepSize, const size_t maxIterations, const double tolerance, const bool shuffle) : function(function), batchSize(batchSize), stepSize(stepSize), maxIterations(maxIterations), tolerance(tolerance), shuffle(shuffle) { /* Nothing to do. */ } //! Optimize the function (minimize). template double MiniBatchSGD::Optimize(arma::mat& iterate) { // Find the number of functions. const size_t numFunctions = function.NumFunctions(); size_t numBatches = numFunctions / batchSize; if (numFunctions % batchSize != 0) ++numBatches; // Capture last few. // Batch visitation order. arma::Col visitationOrder = arma::shuffle( arma::linspace>(0, (numBatches - 1), numBatches)); // To keep track of where we are and how things are going. size_t currentBatch = 0; double overallObjective = 0; double lastObjective = DBL_MAX; // Calculate the first objective function. for (size_t i = 0; i < numFunctions; ++i) overallObjective += function.Evaluate(iterate, i); // Now iterate! arma::mat gradient(iterate.n_rows, iterate.n_cols); for (size_t i = 1; i != maxIterations; ++i, ++currentBatch) { // Is this iteration the start of a sequence? if ((currentBatch % numBatches) == 0) { // Output current objective function. Log::Info << "Mini-batch SGD: iteration " << i << ", objective " << overallObjective << "." << std::endl; if (std::isnan(overallObjective) || std::isinf(overallObjective)) { Log::Warn << "Mini-batch SGD: converged to " << overallObjective << "; terminating with failure. Try a smaller step size?" << std::endl; return overallObjective; } if (std::abs(lastObjective - overallObjective) < tolerance) { Log::Info << "Mini-batch SGD: minimized within tolerance " << tolerance << "; terminating optimization." << std::endl; return overallObjective; } // Reset the counter variables. lastObjective = overallObjective; overallObjective = 0; currentBatch = 0; if (shuffle) visitationOrder = arma::shuffle(visitationOrder); } // Evaluate the gradient for this mini-batch. const size_t offset = batchSize * visitationOrder[currentBatch]; function.Gradient(iterate, offset, gradient); if (visitationOrder[currentBatch] != numBatches - 1) { for (size_t j = 1; j < batchSize; ++j) { arma::mat funcGradient; function.Gradient(iterate, offset + j, funcGradient); gradient += funcGradient; } // Now update the iterate. iterate -= (stepSize / batchSize) * gradient; // Add that to the overall objective function. for (size_t j = 0; j < batchSize; ++j) overallObjective += function.Evaluate(iterate, offset + j); } else { // Handle last batch differently: it's not a full-size batch. const size_t lastBatchSize = numFunctions - offset - 1; for (size_t j = 1; j < lastBatchSize; ++j) { arma::mat funcGradient; function.Gradient(iterate, offset + j, funcGradient); gradient += funcGradient; } // Ensure the last batch size isn't zero, to avoid division by zero before // updating. if (lastBatchSize > 0) { // Now update the iterate. iterate -= (stepSize / lastBatchSize) * gradient; } else { // Now update the iterate. iterate -= stepSize * gradient; } // Add that to the overall objective function. for (size_t j = 0; j < lastBatchSize; ++j) overallObjective += function.Evaluate(iterate, offset + j); } } Log::Info << "Mini-batch SGD: maximum iterations (" << maxIterations << ") " << "reached; terminating optimization." << std::endl; // Calculate final objective. overallObjective = 0; for (size_t i = 0; i < numFunctions; ++i) overallObjective += function.Evaluate(iterate, i); return overallObjective; } } // namespace optimization } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/core/optimizers/rmsprop/000077500000000000000000000000001315013601400216565ustar00rootroot00000000000000mlpack-2.2.5/src/mlpack/core/optimizers/rmsprop/CMakeLists.txt000066400000000000000000000003361315013601400244200ustar00rootroot00000000000000set(SOURCES rmsprop.hpp rmsprop_impl.hpp ) set(DIR_SRCS) foreach(file ${SOURCES}) set(DIR_SRCS ${DIR_SRCS} ${CMAKE_CURRENT_SOURCE_DIR}/${file}) endforeach() set(MLPACK_SRCS ${MLPACK_SRCS} ${DIR_SRCS} PARENT_SCOPE) mlpack-2.2.5/src/mlpack/core/optimizers/rmsprop/rmsprop.hpp000066400000000000000000000136221315013601400240750ustar00rootroot00000000000000/** * @file rmsprop.hpp * @author Ryan Curtin * @author Marcus Edel * * RMSprop optimizer. RmsProp is an optimizer that utilizes the magnitude of * recent gradients to normalize the gradients. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_OPTIMIZERS_RMSPROP_RMSPROP_HPP #define MLPACK_CORE_OPTIMIZERS_RMSPROP_RMSPROP_HPP #include namespace mlpack { namespace optimization { /** * RMSprop is an optimizer that utilizes the magnitude of recent gradients to * normalize the gradients. In its basic form, given a step rate \f$ \gamma \f$ * and a decay term \f$ \alpha \f$ we perform the following updates: * * \f{eqnarray*}{ * r_t &=& (1 - \gamma) f'(\Delta_t)^2 + \gamma r_{t - 1} \\ * v_{t + 1} &=& \frac{\alpha}{\sqrt{r_t}}f'(\Delta_t) \\ * \Delta_{t + 1} &=& \Delta_t - v_{t + 1} * \f} * * For more information, see the following. * * @code * @misc{tieleman2012, * title={Lecture 6.5 - rmsprop, COURSERA: Neural Networks for Machine * Learning}, * year={2012} * } * @endcode * * For RMSprop to work, a DecomposableFunctionType template parameter is * required. This class must implement the following function: * * size_t NumFunctions(); * double Evaluate(const arma::mat& coordinates, const size_t i); * void Gradient(const arma::mat& coordinates, * const size_t i, * arma::mat& gradient); * * NumFunctions() should return the number of functions (\f$n\f$), and in the * other two functions, the parameter i refers to which individual function (or * gradient) is being evaluated. So, for the case of a data-dependent function, * such as NCA (see mlpack::nca::NCA), NumFunctions() should return the number * of points in the dataset, and Evaluate(coordinates, 0) will evaluate the * objective function on the first point in the dataset (presumably, the dataset * is held internally in the DecomposableFunctionType). * * @tparam DecomposableFunctionType Decomposable objective function type to be * minimized. */ template class RMSprop { public: /** * Construct the RMSprop optimizer with the given function and parameters. The * defaults here are not necessarily good for the given problem, so it is * suggested that the values used be tailored to the task at hand. The * maximum number of iterations refers to the maximum number of points that * are processed (i.e., one iteration equals one point; one iteration does not * equal one pass over the dataset). * * @param function Function to be optimized (minimized). * @param stepSize Step size for each iteration. * @param alpha Smoothing constant, similar to that used in AdaDelta and * momentum methods. * @param eps Value used to initialise the mean squared gradient parameter. * @param maxIterations Maximum number of iterations allowed (0 means no * limit). * @param tolerance Maximum absolute tolerance to terminate algorithm. * @param shuffle If true, the function order is shuffled; otherwise, each * function is visited in linear order. */ RMSprop(DecomposableFunctionType& function, const double stepSize = 0.01, const double alpha = 0.99, const double eps = 1e-8, const size_t maxIterations = 100000, const double tolerance = 1e-5, const bool shuffle = true); /** * Optimize the given function using RMSprop. The given starting point will be * modified to store the finishing point of the algorithm, and the final * objective value is returned. * * @param iterate Starting point (will be modified). * @return Objective value of the final point. */ double Optimize(arma::mat& iterate); //! Get the instantiated function to be optimized. const DecomposableFunctionType& Function() const { return function; } //! Modify the instantiated function. DecomposableFunctionType& Function() { return function; } //! Get the step size. double StepSize() const { return stepSize; } //! Modify the step size. double& StepSize() { return stepSize; } //! Get the smoothing parameter. double Alpha() const { return alpha; } //! Modify the smoothing parameter. double& Alpha() { return alpha; } //! Get the value used to initialise the mean squared gradient parameter. double Epsilon() const { return eps; } //! Modify the value used to initialise the mean squared gradient parameter. double& Epsilon() { return eps; } //! Get the maximum number of iterations (0 indicates no limit). size_t MaxIterations() const { return maxIterations; } //! Modify the maximum number of iterations (0 indicates no limit). size_t& MaxIterations() { return maxIterations; } //! Get the tolerance for termination. double Tolerance() const { return tolerance; } //! Modify the tolerance for termination. double& Tolerance() { return tolerance; } //! Get whether or not the individual functions are shuffled. bool Shuffle() const { return shuffle; } //! Modify whether or not the individual functions are shuffled. bool& Shuffle() { return shuffle; } private: //! The instantiated function. DecomposableFunctionType& function; //! The step size for each example. double stepSize; //! The smoothing parameter. double alpha; //! The value used to initialise the mean squared gradient parameter. double eps; //! The maximum number of allowed iterations. size_t maxIterations; //! The tolerance for termination. double tolerance; //! Controls whether or not the individual functions are shuffled when //! iterating. bool shuffle; }; } // namespace optimization } // namespace mlpack // Include implementation. #include "rmsprop_impl.hpp" #endif mlpack-2.2.5/src/mlpack/core/optimizers/rmsprop/rmsprop_impl.hpp000066400000000000000000000106271315013601400251200ustar00rootroot00000000000000/** * @file rmsprop_impl.hpp * @author Ryan Curtin * @author Marcus Edel * * Implementation of the RMSprop optimizer. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_OPTIMIZERS_RMSPROP_RMSPROP_IMPL_HPP #define MLPACK_CORE_OPTIMIZERS_RMSPROP_RMSPROP_IMPL_HPP // In case it hasn't been included yet. #include "rmsprop.hpp" namespace mlpack { namespace optimization { template RMSprop::RMSprop(DecomposableFunctionType& function, const double stepSize, const double alpha, const double eps, const size_t maxIterations, const double tolerance, const bool shuffle) : function(function), stepSize(stepSize), alpha(alpha), eps(eps), maxIterations(maxIterations), tolerance(tolerance), shuffle(shuffle) { /* Nothing to do. */ } //! Optimize the function (minimize). template double RMSprop::Optimize(arma::mat& iterate) { // Find the number of functions to use. const size_t numFunctions = function.NumFunctions(); // This is used only if shuffle is true. arma::Col visitationOrder; if (shuffle) visitationOrder = arma::shuffle(arma::linspace>(0, (numFunctions - 1), numFunctions)); // To keep track of where we are and how things are going. size_t currentFunction = 0; double overallObjective = 0; double lastObjective = DBL_MAX; // Calculate the first objective function. for (size_t i = 0; i < numFunctions; ++i) overallObjective += function.Evaluate(iterate, i); // Now iterate! arma::mat gradient(iterate.n_rows, iterate.n_cols); // Leaky sum of squares of parameter gradient. arma::mat meanSquaredGradient = arma::zeros(iterate.n_rows, iterate.n_cols); for (size_t i = 1; i != maxIterations; ++i, ++currentFunction) { // Is this iteration the start of a sequence? if ((currentFunction % numFunctions) == 0) { // Output current objective function. Log::Info << "RMSprop: iteration " << i << ", objective " << overallObjective << "." << std::endl; if (std::isnan(overallObjective) || std::isinf(overallObjective)) { Log::Warn << "RMSprop: converged to " << overallObjective << "; terminating with failure. Try a smaller step size?" << std::endl; return overallObjective; } if (std::abs(lastObjective - overallObjective) < tolerance) { Log::Info << "RMSprop: minimized within tolerance " << tolerance << "; " << "terminating optimization." << std::endl; return overallObjective; } // Reset the counter variables. lastObjective = overallObjective; overallObjective = 0; currentFunction = 0; if (shuffle) // Determine order of visitation. visitationOrder = arma::shuffle(visitationOrder); } // Evaluate the gradient for this iteration. if (shuffle) function.Gradient(iterate, visitationOrder[currentFunction], gradient); else function.Gradient(iterate, currentFunction, gradient); // And update the iterate. meanSquaredGradient *= alpha; meanSquaredGradient += (1 - alpha) * (gradient % gradient); iterate -= stepSize * gradient / (arma::sqrt(meanSquaredGradient) + eps); // Now add that to the overall objective function. if (shuffle) overallObjective += function.Evaluate(iterate, visitationOrder[currentFunction]); else overallObjective += function.Evaluate(iterate, currentFunction); } Log::Info << "RMSprop: maximum iterations (" << maxIterations << ") reached; " << "terminating optimization." << std::endl; // Calculate final objective. overallObjective = 0; for (size_t i = 0; i < numFunctions; ++i) overallObjective += function.Evaluate(iterate, i); return overallObjective; } } // namespace optimization } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/core/optimizers/sa/000077500000000000000000000000001315013601400205575ustar00rootroot00000000000000mlpack-2.2.5/src/mlpack/core/optimizers/sa/CMakeLists.txt000066400000000000000000000003571315013601400233240ustar00rootroot00000000000000set(SOURCES sa.hpp sa_impl.hpp exponential_schedule.hpp ) set(DIR_SRCS) foreach(file ${SOURCES}) set(DIR_SRCS ${DIR_SRCS} ${CMAKE_CURRENT_SOURCE_DIR}/${file}) endforeach() set(MLPACK_SRCS ${MLPACK_SRCS} ${DIR_SRCS} PARENT_SCOPE) mlpack-2.2.5/src/mlpack/core/optimizers/sa/exponential_schedule.hpp000066400000000000000000000037211315013601400254750ustar00rootroot00000000000000/** * @file exponential_schedule.hpp * @author Zhihao Lou * * Exponential (geometric) cooling schedule used in SA. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_OPTIMIZERS_SA_EXPONENTIAL_SCHEDULE_HPP #define MLPACK_CORE_OPTIMIZERS_SA_EXPONENTIAL_SCHEDULE_HPP namespace mlpack { namespace optimization { /** * The exponential cooling schedule cools the temperature T at every step * according to the equation * * \f[ * T_{n+1} = (1-\lambda) T_{n} * \f] * * where \f$ 0<\lambda<1 \f$ is the cooling speed. The smaller \f$ \lambda \f$ * is, the slower the cooling speed, and better the final result will be. Some * literature uses \f$ \alpha = (-1 \lambda) \f$ instead. In practice, * \f$ \alpha \f$ is very close to 1 and will be awkward to input (e.g. * alpha = 0.999999 vs lambda = 1e-6). */ class ExponentialSchedule { public: /* * Construct the ExponentialSchedule with the given parameter. * * @param lambda Cooling speed. */ ExponentialSchedule(const double lambda = 0.001) : lambda(lambda) { } /** * Returns the next temperature given current status. The current system's * energy is not used in this calculation. * * @param currentTemperature Current temperature of system. * @param currentEnergy Current energy of system (not used). */ double NextTemperature( const double currentTemperature, const double /* currentEnergy */) { return (1 - lambda) * currentTemperature; } //! Get the cooling speed, lambda. double Lambda() const { return lambda; } //! Modify the cooling speed, lambda. double& Lambda() { return lambda; } private: //! The cooling speed. double lambda; }; } // namespace optimization } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/core/optimizers/sa/sa.hpp000066400000000000000000000206461315013601400217030ustar00rootroot00000000000000/** * @file sa.hpp * @author Zhihao Lou * * Simulated Annealing (SA). * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_OPTIMIZERS_SA_SA_HPP #define MLPACK_CORE_OPTIMIZERS_SA_SA_HPP #include #include "exponential_schedule.hpp" namespace mlpack { namespace optimization { /** * Simulated Annealing is an stochastic optimization algorithm which is able to * deliver near-optimal results quickly without knowing the gradient of the * function being optimized. It has unique hill climbing capability that makes * it less vulnerable to local minima. This implementation uses exponential * cooling schedule and feedback move control by default, but the cooling * schedule can be changed via a template parameter. * * The algorithm keeps the temperature at initial temperature for initMove * steps to get rid of the dependency on the initial condition. After that, it * cools every step until the system is considered frozen or maxIterations is * reached. * * At each step, SA only perturbs one parameter at a time. When SA has perturbed * all parameters in a problem, a sweep has been completed. Every moveCtrlSweep * sweeps, the algorithm does feedback move control to change the average move * size depending on the responsiveness of each parameter. Parameter gain * controls the proportion of the feedback control. * * The system is considered "frozen" when its score fails to change more then * tolerance for maxToleranceSweep consecutive sweeps. * * For SA to work, the FunctionType parameter must implement the following * two methods: * * double Evaluate(const arma::mat& coordinates); * arma::mat& GetInitialPoint(); * * and the CoolingScheduleType parameter must implement the following method: * * double NextTemperature(const double currentTemperature, * const double currentValue); * * which returns the next temperature given current temperature and the value * of the function being optimized. * * @tparam FunctionType objective function type to be minimized. * @tparam CoolingScheduleType type for cooling schedule */ template< typename FunctionType, typename CoolingScheduleType = ExponentialSchedule > class SA { public: /** * Construct the SA optimizer with the given function and parameters. * * @param function Function to be minimized. * @param coolingSchedule Instantiated cooling schedule. * @param maxIterations Maximum number of iterations allowed (0 indicates no limit). * @param initT Initial temperature. * @param initMoves Number of initial iterations without changing temperature. * @param moveCtrlSweep Sweeps per feedback move control. * @param tolerance Tolerance to consider system frozen. * @param maxToleranceSweep Maximum sweeps below tolerance to consider system * frozen. * @param maxMoveCoef Maximum move size. * @param initMoveCoef Initial move size. * @param gain Proportional control in feedback move control. */ SA(FunctionType& function, CoolingScheduleType& coolingSchedule, const size_t maxIterations = 1000000, const double initT = 10000., const size_t initMoves = 1000, const size_t moveCtrlSweep = 100, const double tolerance = 1e-5, const size_t maxToleranceSweep = 3, const double maxMoveCoef = 20, const double initMoveCoef = 0.3, const double gain = 0.3); /** * Optimize the given function using simulated annealing. The given starting * point will be modified to store the finishing point of the algorithm, and * the final objective value is returned. * * @param iterate Starting point (will be modified). * @return Objective value of the final point. */ double Optimize(arma::mat& iterate); //! Get the instantiated function to be optimized. const FunctionType& Function() const { return function; } //! Modify the instantiated function. FunctionType& Function() { return function; } //! Get the temperature. double Temperature() const { return temperature; } //! Modify the temperature. double& Temperature() { return temperature; } //! Get the initial moves. size_t InitMoves() const { return initMoves; } //! Modify the initial moves. size_t& InitMoves() { return initMoves; } //! Get sweeps per move control. size_t MoveCtrlSweep() const { return moveCtrlSweep; } //! Modify sweeps per move control. size_t& MoveCtrlSweep() { return moveCtrlSweep; } //! Get the tolerance. double Tolerance() const { return tolerance; } //! Modify the tolerance. double& Tolerance() { return tolerance; } //! Get the maxToleranceSweep. size_t MaxToleranceSweep() const { return maxToleranceSweep; } //! Modify the maxToleranceSweep. size_t& MaxToleranceSweep() { return maxToleranceSweep; } //! Get the gain. double Gain() const { return gain; } //! Modify the gain. double& Gain() { return gain; } //! Get the maximum number of iterations. size_t MaxIterations() const { return maxIterations; } //! Modify the maximum number of iterations. size_t& MaxIterations() { return maxIterations; } //! Get the maximum move size of each parameter. arma::mat MaxMove() const { return maxMove; } //! Modify the maximum move size of each parameter. arma::mat& MaxMove() { return maxMove; } //! Get move size of each parameter. arma::mat MoveSize() const { return moveSize; } //! Modify move size of each parameter. arma::mat& MoveSize() { return moveSize; } private: //! The function to be optimized. FunctionType& function; //! The cooling schedule being used. CoolingScheduleType& coolingSchedule; //! The maximum number of iterations. size_t maxIterations; //! The current temperature. double temperature; //! The number of initial moves before reducing the temperature. size_t initMoves; //! The number of sweeps before a MoveControl() call. size_t moveCtrlSweep; //! Tolerance for convergence. double tolerance; //! Number of sweeps in tolerance before system is considered frozen. size_t maxToleranceSweep; //! Proportional control in feedback move control. double gain; //! Maximum move size of each parameter. arma::mat maxMove; //! Move size of each parameter. arma::mat moveSize; /** * GenerateMove proposes a move on element iterate(idx), and determines if * that move is acceptable or not according to the Metropolis criterion. * After that it increments idx so the next call will make a move on next * parameters. When all elements of the state have been moved (a sweep), it * resets idx and increments sweepCounter. When sweepCounter reaches * moveCtrlSweep, it performs MoveControl() and resets sweepCounter. * * @param iterate Current optimization position. * @param accept Matrix representing which parameters have had accepted moves. * @param energy Current energy of the system. * @param idx Current parameter to modify. * @param sweepCounter Current counter representing how many sweeps have been * completed. */ void GenerateMove(arma::mat& iterate, arma::mat& accept, double& energy, size_t& idx, size_t& sweepCounter); /** * MoveControl() uses a proportional feedback control to determine the size * parameter to pass to the move generation distribution. The target of such * move control is to make the acceptance ratio, accept/nMoves, be as close to * 0.44 as possible. Generally speaking, the larger the move size is, the * larger the function value change of the move will be, and less likely such * move will be accepted by the Metropolis criterion. Thus, the move size is * controlled by * * log(moveSize) = log(moveSize) + gain * (accept/nMoves - target) * * For more theory and the mysterious 0.44 value, see Jimmy K.-C. Lam and * Jean-Marc Delosme. `An efficient simulated annealing schedule: derivation'. * Technical Report 8816, Yale University, 1988. * * @param nMoves Number of moves since last call. * @param accept Matrix representing which parameters have had accepted moves. */ void MoveControl(const size_t nMoves, arma::mat& accept); }; } // namespace optimization } // namespace mlpack #include "sa_impl.hpp" #endif mlpack-2.2.5/src/mlpack/core/optimizers/sa/sa_impl.hpp000066400000000000000000000150021315013601400227120ustar00rootroot00000000000000/** * @file sa_impl.hpp * @auther Zhihao Lou * * The implementation of the SA optimizer. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_OPTIMIZERS_SA_SA_IMPL_HPP #define MLPACK_CORE_OPTIMIZERS_SA_SA_IMPL_HPP #include namespace mlpack { namespace optimization { template< typename FunctionType, typename CoolingScheduleType > SA::SA( FunctionType& function, CoolingScheduleType& coolingSchedule, const size_t maxIterations, const double initT, const size_t initMoves, const size_t moveCtrlSweep, const double tolerance, const size_t maxToleranceSweep, const double maxMoveCoef, const double initMoveCoef, const double gain) : function(function), coolingSchedule(coolingSchedule), maxIterations(maxIterations), temperature(initT), initMoves(initMoves), moveCtrlSweep(moveCtrlSweep), tolerance(tolerance), maxToleranceSweep(maxToleranceSweep), gain(gain) { const size_t rows = function.GetInitialPoint().n_rows; const size_t cols = function.GetInitialPoint().n_cols; maxMove.set_size(rows, cols); maxMove.fill(maxMoveCoef); moveSize.set_size(rows, cols); moveSize.fill(initMoveCoef); } //! Optimize the function (minimize). template< typename FunctionType, typename CoolingScheduleType > double SA::Optimize(arma::mat &iterate) { const size_t rows = function.GetInitialPoint().n_rows; const size_t cols = function.GetInitialPoint().n_cols; size_t frozenCount = 0; double energy = function.Evaluate(iterate); double oldEnergy = energy; math::RandomSeed(std::time(NULL)); size_t idx = 0; size_t sweepCounter = 0; arma::mat accept(rows, cols); accept.zeros(); // Initial moves to get rid of dependency of initial states. for (size_t i = 0; i < initMoves; ++i) GenerateMove(iterate, accept, energy, idx, sweepCounter); // Iterating and cooling. for (size_t i = 0; i != maxIterations; ++i) { oldEnergy = energy; GenerateMove(iterate, accept, energy, idx, sweepCounter); temperature = coolingSchedule.NextTemperature(temperature, energy); // Determine if the optimization has entered (or continues to be in) a // frozen state. if (std::abs(energy - oldEnergy) < tolerance) ++frozenCount; else frozenCount = 0; // Terminate, if possible. if (frozenCount >= maxToleranceSweep * moveCtrlSweep * iterate.n_elem) { Log::Debug << "SA: minimized within tolerance " << tolerance << " for " << maxToleranceSweep << " sweeps after " << i << " iterations; " << "terminating optimization." << std::endl; return energy; } } Log::Debug << "SA: maximum iterations (" << maxIterations << ") reached; " << "terminating optimization." << std::endl; return energy; } /** * GenerateMove proposes a move on element iterate(idx), and determines * it that move is acceptable or not according to the Metropolis criterion. * After that it increments idx so next call will make a move on next * parameters. When all elements of the state has been moved (a sweep), it * resets idx and increments sweepCounter. When sweepCounter reaches * moveCtrlSweep, it performs moveControl and resets sweepCounter. */ template< typename FunctionType, typename CoolingScheduleType > void SA::GenerateMove( arma::mat& iterate, arma::mat& accept, double& energy, size_t& idx, size_t& sweepCounter) { const double prevEnergy = energy; const double prevValue = iterate(idx); // It is possible to use a non-Laplace distribution here, but it is difficult // because the acceptance ratio should be as close to 0.44 as possible, and // MoveControl() is derived for the Laplace distribution. // Sample from a Laplace distribution with scale parameter moveSize(idx). const double unif = 2.0 * math::Random() - 1.0; const double move = (unif < 0) ? (moveSize(idx) * std::log(1 + unif)) : (-moveSize(idx) * std::log(1 - unif)); iterate(idx) += move; energy = function.Evaluate(iterate); // According to the Metropolis criterion, accept the move with probability // min{1, exp(-(E_new - E_old) / T)}. const double xi = math::Random(); const double delta = energy - prevEnergy; const double criterion = std::exp(-delta / temperature); if (delta <= 0. || criterion > xi) { accept(idx) += 1.; } else // Reject the move; restore previous state. { iterate(idx) = prevValue; energy = prevEnergy; } ++idx; if (idx == iterate.n_elem) // Finished with a sweep. { idx = 0; ++sweepCounter; } if (sweepCounter == moveCtrlSweep) // Do MoveControl(). { MoveControl(moveCtrlSweep, accept); sweepCounter = 0; } } /** * MoveControl() uses a proportional feedback control to determine the size * parameter to pass to the move generation distribution. The target of such * move control is to make the acceptance ratio, accept/nMoves, be as close to * 0.44 as possible. Generally speaking, the larger the move size is, the larger * the function value change of the move will be, and less likely such move will * be accepted by the Metropolis criterion. Thus, the move size is controlled by * * log(moveSize) = log(moveSize) + gain * (accept/nMoves - target) * * For more theory and the mysterious 0.44 value, see Jimmy K.-C. Lam and * Jean-Marc Delosme. `An efficient simulated annealing schedule: derivation'. * Technical Report 8816, Yale University, 1988. */ template< typename FunctionType, typename CoolingScheduleType > void SA::MoveControl(const size_t nMoves, arma::mat& accept) { arma::mat target; target.copy_size(accept); target.fill(0.44); moveSize = arma::log(moveSize); moveSize += gain * (accept / (double) nMoves - target); moveSize = arma::exp(moveSize); // To avoid the use of element-wise arma::min(), which is only available in // Armadillo after v3.930, we use a for loop here instead. for (size_t i = 0; i < accept.n_elem; ++i) moveSize(i) = (moveSize(i) > maxMove(i)) ? maxMove(i) : moveSize(i); accept.zeros(); } } // namespace optimization } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/core/optimizers/sdp/000077500000000000000000000000001315013601400207425ustar00rootroot00000000000000mlpack-2.2.5/src/mlpack/core/optimizers/sdp/CMakeLists.txt000066400000000000000000000005131315013601400235010ustar00rootroot00000000000000set(SOURCES lrsdp.hpp lrsdp_impl.hpp lrsdp_function.hpp lrsdp_function_impl.hpp primal_dual.hpp primal_dual_impl.hpp sdp.hpp sdp_impl.hpp ) set(DIR_SRCS) foreach(file ${SOURCES}) set(DIR_SRCS ${DIR_SRCS} ${CMAKE_CURRENT_SOURCE_DIR}/${file}) endforeach() set(MLPACK_SRCS ${MLPACK_SRCS} ${DIR_SRCS} PARENT_SCOPE) mlpack-2.2.5/src/mlpack/core/optimizers/sdp/lrsdp.hpp000066400000000000000000000061041315013601400226000ustar00rootroot00000000000000/** * @file lrsdp.hpp * @author Ryan Curtin * * An implementation of Monteiro and Burer's formulation of low-rank * semidefinite programs (LR-SDP). * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_OPTIMIZERS_SDP_LRSDP_HPP #define MLPACK_CORE_OPTIMIZERS_SDP_LRSDP_HPP #include #include #include "lrsdp_function.hpp" namespace mlpack { namespace optimization { /** * LRSDP is the implementation of Monteiro and Burer's formulation of low-rank * semidefinite programs (LR-SDP). This solver uses the augmented Lagrangian * optimizer to solve low-rank semidefinite programs. */ template class LRSDP { public: /** * Create an LRSDP to be optimized. The solution will end up being a matrix * of size (rows) x (rank). To construct each constraint and the objective * function, use the function SDP() in order to access the SDPType object * associated with this optimizer. * * @param numConstraints Number of constraints in the problem. * @param initialPoint Initial point of the optimization. */ LRSDP(const size_t numSparseConstraints, const size_t numDenseConstraints, const arma::mat& initialPoint); /** * Create an LRSDP object with the given SDP problem to be solved, and the * given initial point. Note that the SDP may be modified later by calling * SDP() to access the object. * * @param sdp SDP to be solved. * @param initialPoint Initial point of the optimization. */ LRSDP(const SDPType& sdp, const arma::mat& initialPoint); /** * Optimize the LRSDP and return the final objective value. The given * coordinates will be modified to contain the final solution. * * @param coordinates Starting coordinates for the optimization. */ double Optimize(arma::mat& coordinates); //! Return the SDP that will be solved. const SDPType& SDP() const { return function.SDP(); } //! Modify the SDP that will be solved. SDPType& SDP() { return function.SDP(); } //! Return the function to be optimized. const LRSDPFunction& Function() const { return function; } //! Modify the function to be optimized. LRSDPFunction& Function() { return function; } //! Return the augmented Lagrangian object. const AugLagrangian>& AugLag() const { return augLag; } //! Modify the augmented Lagrangian object. AugLagrangian>& AugLag() { return augLag; } private: //! Function to optimize, which the AugLagrangian object holds. LRSDPFunction function; //! The AugLagrangian object which will be used for optimization. AugLagrangian> augLag; }; } // namespace optimization } // namespace mlpack // Include implementation #include "lrsdp_impl.hpp" #endif mlpack-2.2.5/src/mlpack/core/optimizers/sdp/lrsdp_function.hpp000066400000000000000000000072711315013601400245130ustar00rootroot00000000000000/** * @file lrsdp_function.hpp * @author Ryan Curtin * @author Abhishek Laddha * * A class that represents the objective function which LRSDP optimizes. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_OPTIMIZERS_SDP_LRSDP_FUNCTION_HPP #define MLPACK_CORE_OPTIMIZERS_SDP_LRSDP_FUNCTION_HPP #include #include #include namespace mlpack { namespace optimization { /** * The objective function that LRSDP is trying to optimize. */ template class LRSDPFunction { public: /** * Construct the LRSDPFunction from the given SDP. * * @param sdp * @param initialPoint */ LRSDPFunction(const SDPType& sdp, const arma::mat& initialPoint); /** * Construct the LRSDPFunction with the given initial point and number of * constraints. Note n_cols of the initialPoint specifies the rank. * * Set the A_x, B_x, and C_x matrices for each constraint using the A_x(), * B_x(), and C_x() functions, for x in {sparse, dense}. * * @param numSparseConstraints * @param numDenseConstraints * @param initialPoint */ LRSDPFunction(const size_t numSparseConstraints, const size_t numDenseConstraints, const arma::mat& initialPoint); /** * Evaluate the objective function of the LRSDP (no constraints) at the given * coordinates. */ double Evaluate(const arma::mat& coordinates) const; /** * Evaluate the gradient of the LRSDP (no constraints) at the given * coordinates. */ void Gradient(const arma::mat& coordinates, arma::mat& gradient) const; /** * Evaluate a particular constraint of the LRSDP at the given coordinates. */ double EvaluateConstraint(const size_t index, const arma::mat& coordinates) const; /** * Evaluate the gradient of a particular constraint of the LRSDP at the given * coordinates. */ void GradientConstraint(const size_t index, const arma::mat& coordinates, arma::mat& gradient) const; //! Get the total number of constraints in the LRSDP. size_t NumConstraints() const { return sdp.NumConstraints(); } //! Get the initial point of the LRSDP. const arma::mat& GetInitialPoint() const { return initialPoint; } //! Return the SDP object representing the problem. const SDPType& SDP() const { return sdp; } //! Modify the SDP object representing the problem. SDPType& SDP() { return sdp; } private: //! SDP object representing the problem SDPType sdp; //! Initial point. arma::mat initialPoint; }; // Declare specializations in lrsdp_function.cpp. template <> inline double AugLagrangianFunction>>::Evaluate( const arma::mat& coordinates) const; template <> inline double AugLagrangianFunction>>::Evaluate( const arma::mat& coordinates) const; template <> inline void AugLagrangianFunction>>::Gradient( const arma::mat& coordinates, arma::mat& gradient) const; template <> inline void AugLagrangianFunction>>::Gradient( const arma::mat& coordinates, arma::mat& gradient) const; } // namespace optimization } // namespace mlpack // Include implementation #include "lrsdp_function_impl.hpp" #endif // MLPACK_CORE_OPTIMIZERS_SDP_LRSDP_FUNCTION_HPP mlpack-2.2.5/src/mlpack/core/optimizers/sdp/lrsdp_function_impl.hpp000066400000000000000000000167471315013601400255440ustar00rootroot00000000000000/** * @file lrsdp_function.cpp * @author Ryan Curtin * @author Abhishek Laddha * * Implementation of the LRSDPFunction class, and also template specializations * for faster execution with the AugLagrangian optimizer. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_OPTIMIZERS_SDP_LRSDP_FUNCTION_IMPL_HPP #define MLPACK_CORE_OPTIMIZERS_SDP_LRSDP_FUNCTION_IMPL_HPP #include "lrsdp_function.hpp" namespace mlpack { namespace optimization { template LRSDPFunction::LRSDPFunction(const SDPType& sdp, const arma::mat& initialPoint): sdp(sdp), initialPoint(initialPoint) { if (initialPoint.n_rows < initialPoint.n_cols) Log::Warn << "LRSDPFunction::LRSDPFunction(): solution matrix will have " << "more columns than rows. It may be more efficient to find the " << "transposed solution." << std::endl; } template LRSDPFunction::LRSDPFunction(const size_t numSparseConstraints, const size_t numDenseConstraints, const arma::mat& initialPoint): sdp(initialPoint.n_rows, numSparseConstraints, numDenseConstraints), initialPoint(initialPoint) { if (initialPoint.n_rows < initialPoint.n_cols) Log::Warn << "LRSDPFunction::LRSDPFunction(): solution matrix will have " << "more columns than rows. It may be more efficient to find the " << "transposed solution." << std::endl; } template double LRSDPFunction::Evaluate(const arma::mat& coordinates) const { const arma::mat rrt = coordinates * trans(coordinates); return accu(SDP().C() % rrt); } template void LRSDPFunction::Gradient(const arma::mat& /* coordinates */, arma::mat& /* gradient */) const { Log::Fatal << "LRSDPFunction::Gradient() not implemented for arbitrary optimizers!" << std::endl; } template double LRSDPFunction::EvaluateConstraint(const size_t index, const arma::mat& coordinates) const { const arma::mat rrt = coordinates * trans(coordinates); if (index < SDP().NumSparseConstraints()) return accu(SDP().SparseA()[index] % rrt) - SDP().SparseB()[index]; const size_t index1 = index - SDP().NumSparseConstraints(); return accu(SDP().DenseA()[index1] % rrt) - SDP().DenseB()[index1]; } template void LRSDPFunction::GradientConstraint(const size_t /* index */, const arma::mat& /* coordinates */, arma::mat& /* gradient */) const { Log::Fatal << "LRSDPFunction::GradientConstraint() not implemented for arbitrary " << "optimizers!" << std::endl; } //! Utility function for calculating part of the objective when AugLagrangian is //! used with an LRSDPFunction. template static inline void UpdateObjective(double& objective, const arma::mat& rrt, const std::vector& ais, const arma::vec& bis, const arma::vec& lambda, const size_t lambdaOffset, const double sigma) { for (size_t i = 0; i < ais.size(); ++i) { // Take the trace subtracted by the b_i. const double constraint = accu(ais[i] % rrt) - bis[i]; objective -= (lambda[lambdaOffset + i] * constraint); objective += (sigma / 2.) * constraint * constraint; } } //! Utility function for calculating part of the gradient when AugLagrangian is //! used with an LRSDPFunction. template static inline void UpdateGradient(arma::mat& s, const arma::mat& rrt, const std::vector& ais, const arma::vec& bis, const arma::vec& lambda, const size_t lambdaOffset, const double sigma) { for (size_t i = 0; i < ais.size(); ++i) { const double constraint = accu(ais[i] % rrt) - bis[i]; const double y = lambda[lambdaOffset + i] - sigma * constraint; s -= y * ais[i]; } } template static inline double EvaluateImpl(const LRSDPFunction& function, const arma::mat& coordinates, const arma::vec& lambda, const double sigma) { // We can calculate the entire objective in a smart way. // L(R, y, s) = Tr(C * (R R^T)) - // sum_{i = 1}^{m} (y_i (Tr(A_i * (R R^T)) - b_i)) + // (sigma / 2) * sum_{i = 1}^{m} (Tr(A_i * (R R^T)) - b_i)^2 // Let's start with the objective: Tr(C * (R R^T)). // Simple, possibly slow solution-- see below for optimization opportunity // // TODO: Note that Tr(C^T * (R R^T)) = Tr( (CR)^T * R ), so // multiplying C*R first, and then taking the trace dot should be more memory // efficient // // Similarly for the constraints, taking A*R first should be more efficient const arma::mat rrt = coordinates * trans(coordinates); double objective = accu(function.SDP().C() % rrt); // Now each constraint. UpdateObjective(objective, rrt, function.SDP().SparseA(), function.SDP().SparseB(), lambda, 0, sigma); UpdateObjective(objective, rrt, function.SDP().DenseA(), function.SDP().DenseB(), lambda, function.SDP().NumSparseConstraints(), sigma); return objective; } template static inline void GradientImpl(const LRSDPFunction& function, const arma::mat& coordinates, const arma::vec& lambda, const double sigma, arma::mat& gradient) { // We can calculate the gradient in a smart way. // L'(R, y, s) = 2 * S' * R // with // S' = C - sum_{i = 1}^{m} y'_i A_i // y'_i = y_i - sigma * (Trace(A_i * (R R^T)) - b_i) const arma::mat rrt = coordinates * trans(coordinates); arma::mat s(function.SDP().C()); UpdateGradient( s, rrt, function.SDP().SparseA(), function.SDP().SparseB(), lambda, 0, sigma); UpdateGradient( s, rrt, function.SDP().DenseA(), function.SDP().DenseB(), lambda, function.SDP().NumSparseConstraints(), sigma); gradient = 2 * s * coordinates; } // Template specializations for function and gradient evaluation. // Note that C++ does not allow partial specialization of class members, // so we have to go about this in a somewhat round-about way. template <> inline double AugLagrangianFunction>>::Evaluate( const arma::mat& coordinates) const { return EvaluateImpl(function, coordinates, lambda, sigma); } template <> inline double AugLagrangianFunction>>::Evaluate( const arma::mat& coordinates) const { return EvaluateImpl(function, coordinates, lambda, sigma); } template <> inline void AugLagrangianFunction>>::Gradient( const arma::mat& coordinates, arma::mat& gradient) const { GradientImpl(function, coordinates, lambda, sigma, gradient); } template <> inline void AugLagrangianFunction>>::Gradient( const arma::mat& coordinates, arma::mat& gradient) const { GradientImpl(function, coordinates, lambda, sigma, gradient); } } // namespace optimization } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/core/optimizers/sdp/lrsdp_impl.hpp000066400000000000000000000022161315013601400236210ustar00rootroot00000000000000/** * @file lrsdp.cpp * @author Ryan Curtin * * An implementation of Monteiro and Burer's formulation of low-rank * semidefinite programs (LR-SDP). * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_OPTIMIZERS_SDP_LRSDP_IMPL_HPP #define MLPACK_CORE_OPTIMIZERS_SDP_LRSDP_IMPL_HPP #include "lrsdp.hpp" namespace mlpack { namespace optimization { template LRSDP::LRSDP(const size_t numSparseConstraints, const size_t numDenseConstraints, const arma::mat& initialPoint) : function(numSparseConstraints, numDenseConstraints, initialPoint), augLag(function) { } template double LRSDP::Optimize(arma::mat& coordinates) { augLag.Sigma() = 10; augLag.Optimize(coordinates, 1000); return augLag.Function().Evaluate(coordinates); } } // namespace optimization } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/core/optimizers/sdp/primal_dual.hpp000066400000000000000000000072211315013601400237460ustar00rootroot00000000000000/** * @file primal_dual.hpp * @author Stephen Tu * * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_OPTIMIZERS_SDP_PRIMAL_DUAL_HPP #define MLPACK_CORE_OPTIMIZERS_SDP_PRIMAL_DUAL_HPP #include #include namespace mlpack { namespace optimization { /** * Interface to a primal dual interior point solver. * * @tparam SDPType */ template class PrimalDualSolver { public: /** * Construct a new solver instance from a given SDP instance. * Uses a random, positive initialization point. * * @param sdp Initialized SDP to be solved. */ PrimalDualSolver(const SDPType& sdp); /** * Construct a new solver instance from a given SDP instance. Uses a random, * positive initialization point. Both initialX and initialZ need to be * positive definite matrices. * * @param sdp Initialized SDP to be solved. * @param initialX * @param initialYSparse * @param initialYDense * @param initialZ */ PrimalDualSolver(const SDPType& sdp, const arma::mat& initialX, const arma::vec& initialYSparse, const arma::vec& initialYDense, const arma::mat& initialZ); /** * Invoke the optimization procedure, returning the converged values for the * primal and dual variables. * * @param X * @param ySparse * @param yDense * @param Z */ double Optimize(arma::mat& X, arma::vec& ySparse, arma::vec& yDense, arma::mat& Z); /** * Invoke the optimization procedure, and only return the primal variable. * * @param X */ double Optimize(arma::mat& X) { arma::vec ysparse, ydense; arma::mat Z; return Optimize(X, ysparse, ydense, Z); } //! Return the underlying SDP instance. const SDPType& SDP() const { return sdp; } //! Modify tau. Typical values are 0.99. double& Tau() { return tau; } //! Modify the XZ tolerance. double& NormXzTol() { return normXzTol; } //! Modify the primal infeasibility tolerance. double& PrimalInfeasTol() { return primalInfeasTol; } //! Modify the dual infeasibility tolerance. double& DualInfeasTol() { return dualInfeasTol; } //! Modify the maximum number of iterations to run before converging. size_t& MaxIterations() { return maxIterations; } private: //! The SDP problem instance to optimize. SDPType sdp; //! Starting point for X. Needs to be positive definite. arma::mat initialX; //! Starting lagrange multiplier for the sparse constraints. arma::vec initialYsparse; //! Starting lagrange multiplier for the sparse constraints. arma::vec initialYdense; //! Starting point for Z, the complementary slack variable. Needs to be //positive definite. arma::mat initialZ; //! The step size modulating factor. Needs to be a scalar in (0, 1). double tau; //! The tolerance on the norm of XZ required before terminating. double normXzTol; //! The tolerance required on the primal constraints required before //! terminating. double primalInfeasTol; //! The tolerance required on the dual constraint required before terminating. double dualInfeasTol; //! Maximum number of iterations to run. Set to 0 for no limit. size_t maxIterations; }; } // namespace optimization } // namespace mlpack // Include implementation. #include "primal_dual_impl.hpp" #endif mlpack-2.2.5/src/mlpack/core/optimizers/sdp/primal_dual_impl.hpp000066400000000000000000000362161315013601400247750ustar00rootroot00000000000000/** * @file primal_dual_impl.hpp * @author Stephen Tu * * Contains an implementation of the "XZ+ZX" primal-dual infeasible interior * point method with a Mehrotra predictor-corrector update step presented and * analyzed in: * * Primal-dual interior-point methods for semidefinite programming: * Convergence rates, stability and numerical results. * Farid Alizadeh, Jean-Pierre Haeberly, and Michael Overton. * SIAM J. Optim. 1998. * https://www.cs.nyu.edu/overton/papers/pdffiles/pdsdp.pdf * * We will refer to this paper as [AHO98] in this file. * * Note there are many optimizations that still need to be implemented. See the * code comments for more details. * * Also note the current implementation assumes the SDP problem has a strictly * feasible primal/dual point (and therefore the duality gap is zero), and * that the constraint matrices are linearly independent. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_OPTIMIZERS_SDP_PRIMAL_DUAL_IMPL_HPP #define MLPACK_CORE_OPTIMIZERS_SDP_PRIMAL_DUAL_IMPL_HPP #include "primal_dual.hpp" namespace mlpack { namespace optimization { template PrimalDualSolver::PrimalDualSolver(const SDPType& sdp) : sdp(sdp), initialX(arma::eye(sdp.N(), sdp.N())), initialYsparse(arma::ones(sdp.NumSparseConstraints())), initialYdense(arma::ones(sdp.NumDenseConstraints())), initialZ(arma::eye(sdp.N(), sdp.N())), tau(0.99), normXzTol(1e-7), primalInfeasTol(1e-7), dualInfeasTol(1e-7), maxIterations(1000) { } template PrimalDualSolver::PrimalDualSolver(const SDPType& sdp, const arma::mat& initialX, const arma::vec& initialYsparse, const arma::vec& initialYdense, const arma::mat& initialZ) : sdp(sdp), initialX(initialX), initialYsparse(initialYsparse), initialYdense(initialYdense), initialZ(initialZ), tau(0.99), normXzTol(1e-7), primalInfeasTol(1e-7), dualInfeasTol(1e-7), maxIterations(1000) { arma::mat tmp; // Note that the algorithm we implement requires primal iterate X and // dual multiplier Z to be positive definite (but not feasible). if (initialX.n_rows != sdp.N() || initialX.n_cols != sdp.N()) Log::Fatal << "PrimalDualSolver::PrimalDualSolver(): " << "initialX needs to be square n x n matrix." << std::endl; if (!arma::chol(tmp, initialX)) Log::Fatal << "PrimalDualSolver::PrimalDualSolver(): " << "initialX needs to be symmetric positive definite." << std::endl; if (initialYsparse.n_elem != sdp.NumSparseConstraints()) Log::Fatal << "PrimalDualSolver::PrimalDualSolver(): " << "initialYsparse needs to have the same length as the number of sparse " << "constraints." << std::endl; if (initialYdense.n_elem != sdp.NumDenseConstraints()) Log::Fatal << "PrimalDualSolver::PrimalDualSolver(): " << "initialYdense needs to have the same length as the number of dense " << "constraints." << std::endl; if (initialZ.n_rows != sdp.N() || initialZ.n_cols != sdp.N()) Log::Fatal << "PrimalDualSolver::PrimalDualSolver(): " << "initialZ needs to be square n x n matrix." << std::endl; if (!arma::chol(tmp, initialZ)) Log::Fatal << "PrimalDualSolver::PrimalDualSolver(): " << "initialZ needs to be symmetric positive definite." << std::endl; } /** * Compute * * alpha = min(1, tau * alphahat(A, dA)) * * where * * alphahat = sup{ alphahat : A + dA is psd } * * See (2.18) of [AHO98] for more details. */ static inline double Alpha(const arma::mat& A, const arma::mat& dA, double tau) { // On Armadillo < 4.500, the "lower" option isn't available. #if (ARMA_VERSION_MAJOR < 4) || \ ((ARMA_VERSION_MAJOR == 4) && (ARMA_VERSION_MINOR < 500)) const arma::mat L = arma::chol(A).t(); // This is less efficient. #else const arma::mat L = arma::chol(A, "lower"); #endif const arma::mat Linv = arma::inv(arma::trimatl(L)); // TODO(stephentu): We only want the top eigenvalue, we should // be able to do better than full eigen-decomposition. const arma::vec evals = arma::eig_sym(-Linv * dA * Linv.t()); const double alphahatinv = evals(evals.n_elem - 1); double alphahat = 1. / alphahatinv; if (alphahat < 0.) // dA is PSD already alphahat = 1.; return std::min(1., tau * alphahat); } /** * Solve the following Lyapunov equation (for X) * * AX + XA = H * * where A, H are symmetric matrices. * * TODO(stephentu): Note this method current uses arma's builtin arma::syl * method, which is overkill for this situation. See Lemma 7.2 of [AHO98] for * how to solve this Lyapunov equation using an eigenvalue decomposition of A. * */ static inline void SolveLyapunov(arma::mat& X, const arma::mat& A, const arma::mat& H) { arma::syl(X, A, A, -H); } /** * Solve the following KKT system (2.10) of [AHO98]: * * [ 0 A^T I ] [ dsx ] = [ rd ] * [ A 0 0 ] [ dy ] = [ rp ] * [ E 0 F ] [ dsz ] = [ rc ] * \---- M ----/ * * where * * A = [ Asparse ] * [ Adense ] * dy = [ dysparse dydense ] * E = Z sym I * F = X sym I * */ static inline void SolveKKTSystem(const arma::sp_mat& Asparse, const arma::mat& Adense, const arma::mat& Z, const arma::mat& M, const arma::mat& F, const arma::vec& rp, const arma::vec& rd, const arma::vec& rc, arma::vec& dsx, arma::vec& dysparse, arma::vec& dydense, arma::vec& dsz) { arma::mat Frd_rc_Mat, Einv_Frd_rc_Mat, Einv_Frd_ATdy_rc_Mat, Frd_ATdy_rc_Mat; arma::vec Einv_Frd_rc, Einv_Frd_ATdy_rc, dy; // Note: Whenever a formula calls for E^(-1) v for some v, we solve Lyapunov // equations instead of forming an explicit inverse. // Compute the RHS of (2.12) math::Smat(F * rd - rc, Frd_rc_Mat); SolveLyapunov(Einv_Frd_rc_Mat, Z, 2. * Frd_rc_Mat); math::Svec(Einv_Frd_rc_Mat, Einv_Frd_rc); arma::vec rhs = rp; const size_t numConstraints = Asparse.n_rows + Adense.n_rows; if (Asparse.n_rows) rhs(arma::span(0, Asparse.n_rows - 1)) += Asparse * Einv_Frd_rc; if (Adense.n_rows) rhs(arma::span(Asparse.n_rows, numConstraints - 1)) += Adense * Einv_Frd_rc; // TODO(stephentu): use a more efficient method (e.g. LU decomposition) if (!arma::solve(dy, M, rhs)) Log::Fatal << "PrimalDualSolver::SolveKKTSystem(): Could not solve KKT " << "system." << std::endl; if (Asparse.n_rows) dysparse = dy(arma::span(0, Asparse.n_rows - 1)); if (Adense.n_rows) dydense = dy(arma::span(Asparse.n_rows, numConstraints - 1)); // Compute dx from (2.13) math::Smat(F * (rd - Asparse.t() * dysparse - Adense.t() * dydense) - rc, Frd_ATdy_rc_Mat); SolveLyapunov(Einv_Frd_ATdy_rc_Mat, Z, 2. * Frd_ATdy_rc_Mat); math::Svec(Einv_Frd_ATdy_rc_Mat, Einv_Frd_ATdy_rc); dsx = -Einv_Frd_ATdy_rc; // Compute dz from (2.14) dsz = rd - Asparse.t() * dysparse - Adense.t() * dydense; } namespace private_ { // TODO(stephentu): should we move this somewhere more general? template struct vectype { }; template struct vectype> { typedef arma::Col type; }; template struct vectype> { typedef arma::SpCol type; }; } // namespace private_ template double PrimalDualSolver::Optimize(arma::mat& X, arma::vec& ysparse, arma::vec& ydense, arma::mat& Z) { // TODO(stephentu): We need a method which deals with the case when the Ais // are not linearly independent. const size_t n = sdp.N(); const size_t n2bar = sdp.N2bar(); // Form the A matrix in (2.7). Note we explicitly handle // sparse and dense constraints separately. arma::sp_mat Asparse(sdp.NumSparseConstraints(), n2bar); arma::sp_vec Aisparse; for (size_t i = 0; i < sdp.NumSparseConstraints(); i++) { math::Svec(sdp.SparseA()[i], Aisparse); Asparse.row(i) = Aisparse.t(); } arma::mat Adense(sdp.NumDenseConstraints(), n2bar); arma::vec Aidense; for (size_t i = 0; i < sdp.NumDenseConstraints(); i++) { math::Svec(sdp.DenseA()[i], Aidense); Adense.row(i) = Aidense.t(); } typename private_::vectype::type sc; math::Svec(sdp.C(), sc); X = initialX; ysparse = initialYsparse; ydense = initialYdense; Z = initialZ; arma::vec sx, sz, dysparse, dydense, dsx, dsz; arma::mat dX, dZ; math::Svec(X, sx); math::Svec(Z, sz); arma::vec rp, rd, rc, gk; arma::mat Rc, F, Einv_F_AsparseT, Einv_F_AdenseT, Gk, M, DualCheck; rp.set_size(sdp.NumConstraints()); Einv_F_AsparseT.set_size(n2bar, sdp.NumSparseConstraints()); Einv_F_AdenseT.set_size(n2bar, sdp.NumDenseConstraints()); M.set_size(sdp.NumConstraints(), sdp.NumConstraints()); double primalObj = 0., alpha, beta; for (size_t iteration = 1; iteration != maxIterations; iteration++) { // Note: The Mehrotra PC algorithm works like this at a high level. // We first solve a KKT system with mu=0. Then, we use the results // of this KKT system to get a better estimate of mu and solve // the KKT system again. Empirically, this PC step has been shown to // significantly reduce the number of required iterations (and is used // by most practical solver implementations). if (sdp.NumSparseConstraints()) rp(arma::span(0, sdp.NumSparseConstraints() - 1)) = sdp.SparseB() - Asparse * sx; if (sdp.NumDenseConstraints()) rp(arma::span(sdp.NumSparseConstraints(), sdp.NumConstraints() - 1)) = sdp.DenseB() - Adense * sx; // Rd = C - Z - smat A^T y rd = sc - sz - Asparse.t() * ysparse - Adense.t() * ydense; math::SymKronId(X, F); // We compute E^(-1) F A^T by solving Lyapunov equations. // See (2.16). for (size_t i = 0; i < sdp.NumSparseConstraints(); i++) { SolveLyapunov(Gk, Z, X * sdp.SparseA()[i] + sdp.SparseA()[i] * X); math::Svec(Gk, gk); Einv_F_AsparseT.col(i) = gk; } for (size_t i = 0; i < sdp.NumDenseConstraints(); i++) { SolveLyapunov(Gk, Z, X * sdp.DenseA()[i] + sdp.DenseA()[i] * X); math::Svec(Gk, gk); Einv_F_AdenseT.col(i) = gk; } // Form the M = A E^(-1) F A^T matrix (2.15) // // Since we split A up into its sparse and dense components, // we have to handle each block separately. if (sdp.NumSparseConstraints()) { M.submat(arma::span(0, sdp.NumSparseConstraints() - 1), arma::span(0, sdp.NumSparseConstraints() - 1)) = Asparse * Einv_F_AsparseT; if (sdp.NumDenseConstraints()) { M.submat(arma::span(0, sdp.NumSparseConstraints() - 1), arma::span(sdp.NumSparseConstraints(), sdp.NumConstraints() - 1)) = Asparse * Einv_F_AdenseT; } } if (sdp.NumDenseConstraints()) { if (sdp.NumSparseConstraints()) { M.submat(arma::span(sdp.NumSparseConstraints(), sdp.NumConstraints() - 1), arma::span(0, sdp.NumSparseConstraints() - 1)) = Adense * Einv_F_AsparseT; } M.submat(arma::span(sdp.NumSparseConstraints(), sdp.NumConstraints() - 1), arma::span(sdp.NumSparseConstraints(), sdp.NumConstraints() - 1)) = Adense * Einv_F_AdenseT; } const double sxdotsz = arma::dot(sx, sz); // TODO(stephentu): computing these alphahats should take advantage of // the cholesky decomposition of X and Z which we should have available // when we use more efficient methods above. // This solves step (1) of Section 7, the "predictor" step. Rc = -0.5*(X*Z + Z*X); math::Svec(Rc, rc); SolveKKTSystem(Asparse, Adense, Z, M, F, rp, rd, rc, dsx, dysparse, dydense, dsz); math::Smat(dsx, dX); math::Smat(dsz, dZ); // Step (2), determine step size lengths (alpha, beta) alpha = Alpha(X, dX, tau); beta = Alpha(Z, dZ, tau); // See (7.1) const double sigma = std::pow(arma::dot(X + alpha * dX, Z + beta * dZ) / sxdotsz, 3); const double mu = sigma * sxdotsz / n; // Step (3), the "corrector" step. Rc = mu*arma::eye(n, n) - 0.5*(X*Z + Z*X + dX*dZ + dZ*dX); math::Svec(Rc, rc); SolveKKTSystem(Asparse, Adense, Z, M, F, rp, rd, rc, dsx, dysparse, dydense, dsz); math::Smat(dsx, dX); math::Smat(dsz, dZ); alpha = Alpha(X, dX, tau); beta = Alpha(Z, dZ, tau); // Iterate update X += alpha * dX; math::Svec(X, sx); ysparse += beta * dysparse; ydense += beta * dydense; Z += beta * dZ; math::Svec(Z, sz); // Below, we check the KKT conditions. Recall the KKT conditions are // // (1) Primal feasibility // (2) Dual feasibility // (3) XZ = 0 (slackness condition) // // If the KKT conditions are satisfied to a certain degree of precision, // then we consider this a valid certificate of optimality and terminate. // Otherwise, we proceed onwards. const double normXZ = arma::norm(X * Z, "fro"); const double sparsePrimalInfeas = arma::norm(sdp.SparseB() - Asparse * sx, 2); const double densePrimalInfeas = arma::norm(sdp.DenseB() - Adense * sx, 2); const double primalInfeas = sqrt(sparsePrimalInfeas * sparsePrimalInfeas + densePrimalInfeas * densePrimalInfeas); primalObj = arma::dot(sdp.C(), X); const double dualObj = arma::dot(sdp.SparseB(), ysparse) + arma::dot(sdp.DenseB(), ydense); const double dualityGap = primalObj - dualObj; // TODO(stephentu): this dual check is quite expensive, // maybe make it optional? DualCheck = Z - sdp.C(); for (size_t i = 0; i < sdp.NumSparseConstraints(); i++) DualCheck += ysparse(i) * sdp.SparseA()[i]; for (size_t i = 0; i < sdp.NumDenseConstraints(); i++) DualCheck += ydense(i) * sdp.DenseA()[i]; const double dualInfeas = arma::norm(DualCheck, "fro"); Log::Debug << "iter=" << iteration << ", " << "primal=" << primalObj << ", " << "dual=" << dualObj << ", " << "gap=" << dualityGap << ", " << "||XZ||=" << normXZ << ", " << "primalInfeas=" << primalInfeas << ", " << "dualInfeas=" << dualInfeas << ", " << "mu=" << mu << std::endl; if (normXZ <= normXzTol && primalInfeas <= primalInfeasTol && dualInfeas <= dualInfeasTol) return primalObj; } Log::Warn << "PrimalDualSolver::Optimizer(): Did not converge after " << maxIterations << " iterations!" << std::endl; return primalObj; } } // namespace optimization } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/core/optimizers/sdp/sdp.hpp000066400000000000000000000117451315013601400222510ustar00rootroot00000000000000/** * @file sdp.hpp * @author Stephen Tu * * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_OPTIMIZERS_SDP_SDP_HPP #define MLPACK_CORE_OPTIMIZERS_SDP_SDP_HPP #include #include namespace mlpack { namespace optimization { /** * Specify an SDP in primal form * * min dot(C, X) * s.t. dot(Ai, X) = bi, i=1,...,m, X >= 0 * * This representation allows the constraint matrices Ai to be specified as * either dense matrices (arma::mat) or sparse matrices (arma::sp_mat). After * initializing the SDP object, you will need to set the constraints yourself, * via the SparseA(), SparseB(), DenseA(), DenseB(), and C() functions. Note * that for each matrix you add to either SparseA() or DenseA(), you must add * the corresponding b value to the corresponding vector SparseB() or DenseB(). * * The objective matrix (C) may be stored as either dense or sparse depending on * the ObjectiveMatrixType parameter. * * @tparam ObjectiveMatrixType Should be either arma::mat or arma::sp_mat. */ template class SDP { public: typedef ObjectiveMatrixType objective_matrix_type; /** * Initialize this SDP to an empty state. To add constraints, you will have * to modify the constraints via the SparseA(), DenseA(), SparseB(), DenseB(), * and C() functions. For the sake of speed, there is no error checking, so * if you specify an invalid SDP, whatever solver you use will gladly try to * solve it! (And it will probably crash horribly.) */ SDP(); /** * Initialize this SDP to one which structurally has size n. To set the * constraints you will still need to access through SparseA(), DenseA(), * SparseB(), DenseB(), and C(). Consider using move semantics to keep things * fast. As with the previous constructor, there is no error checking for the * sake of speed, so if you build an invalid SDP, whatever solver you use will * gladly try to solve it! (And it will probably crash horribly.) * * @param n Number of rows (and columns) in the objective matrix C. * @param numSparseConstraints Number of sparse constraints. * @param numDenseConstraints Number of dense constraints. */ SDP(const size_t n, const size_t numSparseConstraints, const size_t numDenseConstraints); //! Return number of rows and columns in the objective matrix C. size_t N() const { return c.n_rows; } size_t N2bar() const { return N() * (N() + 1) / 2; } //! Return the number of sparse constraints (constraints with sparse Ai) in //! the SDP. size_t NumSparseConstraints() const { return sparseB.n_elem; } //! Return the number of dense constraints (constraints with dense Ai) in the //! SDP. size_t NumDenseConstraints() const { return denseB.n_elem; } //! Return the total number of constraints in the SDP. size_t NumConstraints() const { return sparseB.n_elem + denseB.n_elem; } //! Modify the sparse objective function matrix (sparseC). ObjectiveMatrixType& C() { return c; } //! Return the sparse objective function matrix (sparseC). const ObjectiveMatrixType& C() const { return c; } //! Return the vector of sparse A matrices (which correspond to the sparse //! constraints). const std::vector& SparseA() const { return sparseA; } //! Modify the veector of sparse A matrices (which correspond to the sparse //! constraints). std::vector& SparseA() { return sparseA; } //! Return the vector of dense A matrices (which correspond to the dense //! constraints). const std::vector& DenseA() const { return denseA; } //! Modify the veector of dense A matrices (which correspond to the dense //! constraints). std::vector& DenseA() { return denseA; } //! Return the vector of sparse B values. const arma::vec& SparseB() const { return sparseB; } //! Modify the vector of sparse B values. arma::vec& SparseB() { return sparseB; } //! Return the vector of dense B values. const arma::vec& DenseB() const { return denseB; } //! Modify the vector of dense B values. arma::vec& DenseB() { return denseB; } /** * Check whether or not the constraint matrices are linearly independent. * * Warning: possibly very expensive check. */ bool HasLinearlyIndependentConstraints() const; private: //! Objective function matrix c. ObjectiveMatrixType c; //! A_i for each sparse constraint. std::vector sparseA; //! b_i for each sparse constraint. arma::vec sparseB; //! A_i for each dense constraint. std::vector denseA; //! b_i for each dense constraint. arma::vec denseB; }; } // namespace optimization } // namespace mlpack // Include implementation. #include "sdp_impl.hpp" #endif mlpack-2.2.5/src/mlpack/core/optimizers/sdp/sdp_impl.hpp000066400000000000000000000035731315013601400232720ustar00rootroot00000000000000/** * @file sdp_impl.hpp * @author Stephen Tu * * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_OPTIMIZERS_SDP_SDP_IMPL_HPP #define MLPACK_CORE_OPTIMIZERS_SDP_SDP_IMPL_HPP #include "sdp.hpp" namespace mlpack { namespace optimization { template SDP::SDP() : c(), sparseA(), sparseB(), denseA(), denseB() { } template SDP::SDP(const size_t n, const size_t numSparseConstraints, const size_t numDenseConstraints) : c(n, n), sparseA(numSparseConstraints), sparseB(numSparseConstraints), denseA(numDenseConstraints), denseB(numDenseConstraints) { for (size_t i = 0; i < numSparseConstraints; i++) sparseA[i].zeros(n, n); for (size_t i = 0; i < numDenseConstraints; i++) denseA[i].zeros(n, n); } template bool SDP::HasLinearlyIndependentConstraints() const { // Very inefficient, should only be used for testing/debugging const size_t n2bar = N2bar(); arma::mat A(NumConstraints(), n2bar); if (A.n_rows > n2bar) return false; for (size_t i = 0; i < NumSparseConstraints(); i++) { arma::vec sa; math::Svec(arma::mat(SparseA()[i]), sa); A.row(i) = sa.t(); } for (size_t i = 0; i < NumDenseConstraints(); i++) { arma::vec sa; math::Svec(DenseA()[i], sa); A.row(NumSparseConstraints() + i) = sa.t(); } const arma::vec s = arma::svd(A); return s(s.n_elem - 1) > 1e-5; } } // namespace optimization } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/core/optimizers/sgd/000077500000000000000000000000001315013601400207315ustar00rootroot00000000000000mlpack-2.2.5/src/mlpack/core/optimizers/sgd/CMakeLists.txt000066400000000000000000000003761315013601400234770ustar00rootroot00000000000000set(SOURCES sgd.hpp sgd_impl.hpp test_function.hpp test_function.cpp ) set(DIR_SRCS) foreach(file ${SOURCES}) set(DIR_SRCS ${DIR_SRCS} ${CMAKE_CURRENT_SOURCE_DIR}/${file}) endforeach() set(MLPACK_SRCS ${MLPACK_SRCS} ${DIR_SRCS} PARENT_SCOPE) mlpack-2.2.5/src/mlpack/core/optimizers/sgd/sgd.hpp000066400000000000000000000135341315013601400222250ustar00rootroot00000000000000/** * @file sgd.hpp * @author Ryan Curtin * * Stochastic Gradient Descent (SGD). * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_OPTIMIZERS_SGD_SGD_HPP #define MLPACK_CORE_OPTIMIZERS_SGD_SGD_HPP #include namespace mlpack { namespace optimization { /** * Stochastic Gradient Descent is a technique for minimizing a function which * can be expressed as a sum of other functions. That is, suppose we have * * \f[ * f(A) = \sum_{i = 0}^{n} f_i(A) * \f] * * and our task is to minimize \f$ A \f$. Stochastic gradient descent iterates * over each function \f$ f_i(A) \f$, producing the following update scheme: * * \f[ * A_{j + 1} = A_j + \alpha \nabla f_i(A) * \f] * * where \f$ \alpha \f$ is a parameter which specifies the step size. \f$ i \f$ * is chosen according to \f$ j \f$ (the iteration number). The SGD class * supports either scanning through each of the \f$ n \f$ functions \f$ f_i(A) * \f$ linearly, or in a random sequence. The algorithm continues until \f$ j * \f$ reaches the maximum number of iterations---or when a full sequence of * updates through each of the \f$ n \f$ functions \f$ f_i(A) \f$ produces an * improvement within a certain tolerance \f$ \epsilon \f$. That is, * * \f[ * | f(A_{j + n}) - f(A_j) | < \epsilon. * \f] * * The parameter \f$\epsilon\f$ is specified by the tolerance parameter to the * constructor; \f$n\f$ is specified by the maxIterations parameter. * * This class is useful for data-dependent functions whose objective function * can be expressed as a sum of objective functions operating on an individual * point. Then, SGD considers the gradient of the objective function operating * on an individual point in its update of \f$ A \f$. * * For SGD to work, a DecomposableFunctionType template parameter is required. * This class must implement the following function: * * size_t NumFunctions(); * double Evaluate(const arma::mat& coordinates, const size_t i); * void Gradient(const arma::mat& coordinates, * const size_t i, * arma::mat& gradient); * * NumFunctions() should return the number of functions (\f$n\f$), and in the * other two functions, the parameter i refers to which individual function (or * gradient) is being evaluated. So, for the case of a data-dependent function, * such as NCA (see mlpack::nca::NCA), NumFunctions() should return the number * of points in the dataset, and Evaluate(coordinates, 0) will evaluate the * objective function on the first point in the dataset (presumably, the dataset * is held internally in the DecomposableFunctionType). * * @tparam DecomposableFunctionType Decomposable objective function type to be * minimized. */ template class SGD { public: /** * Construct the SGD optimizer with the given function and parameters. The * defaults here are not necessarily good for the given problem, so it is * suggested that the values used be tailored to the task at hand. The * maximum number of iterations refers to the maximum number of points that * are processed (i.e., one iteration equals one point; one iteration does not * equal one pass over the dataset). * * @param function Function to be optimized (minimized). * @param stepSize Step size for each iteration. * @param maxIterations Maximum number of iterations allowed (0 means no * limit). * @param tolerance Maximum absolute tolerance to terminate algorithm. * @param shuffle If true, the function order is shuffled; otherwise, each * function is visited in linear order. */ SGD(DecomposableFunctionType& function, const double stepSize = 0.01, const size_t maxIterations = 100000, const double tolerance = 1e-5, const bool shuffle = true); /** * Optimize the given function using stochastic gradient descent. The given * starting point will be modified to store the finishing point of the * algorithm, and the final objective value is returned. * * @param iterate Starting point (will be modified). * @return Objective value of the final point. */ double Optimize(arma::mat& iterate); //! Get the instantiated function to be optimized. const DecomposableFunctionType& Function() const { return function; } //! Modify the instantiated function. DecomposableFunctionType& Function() { return function; } //! Get the step size. double StepSize() const { return stepSize; } //! Modify the step size. double& StepSize() { return stepSize; } //! Get the maximum number of iterations (0 indicates no limit). size_t MaxIterations() const { return maxIterations; } //! Modify the maximum number of iterations (0 indicates no limit). size_t& MaxIterations() { return maxIterations; } //! Get the tolerance for termination. double Tolerance() const { return tolerance; } //! Modify the tolerance for termination. double& Tolerance() { return tolerance; } //! Get whether or not the individual functions are shuffled. bool Shuffle() const { return shuffle; } //! Modify whether or not the individual functions are shuffled. bool& Shuffle() { return shuffle; } private: //! The instantiated function. DecomposableFunctionType& function; //! The step size for each example. double stepSize; //! The maximum number of allowed iterations. size_t maxIterations; //! The tolerance for termination. double tolerance; //! Controls whether or not the individual functions are shuffled when //! iterating. bool shuffle; }; } // namespace optimization } // namespace mlpack // Include implementation. #include "sgd_impl.hpp" #endif mlpack-2.2.5/src/mlpack/core/optimizers/sgd/sgd_impl.hpp000066400000000000000000000076711315013601400232530ustar00rootroot00000000000000/** * @file sgd_impl.hpp * @author Ryan Curtin * * Implementation of stochastic gradient descent. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_OPTIMIZERS_SGD_SGD_IMPL_HPP #define MLPACK_CORE_OPTIMIZERS_SGD_SGD_IMPL_HPP #include // In case it hasn't been included yet. #include "sgd.hpp" namespace mlpack { namespace optimization { template SGD::SGD(DecomposableFunctionType& function, const double stepSize, const size_t maxIterations, const double tolerance, const bool shuffle) : function(function), stepSize(stepSize), maxIterations(maxIterations), tolerance(tolerance), shuffle(shuffle) { /* Nothing to do. */ } //! Optimize the function (minimize). template double SGD::Optimize(arma::mat& iterate) { // Find the number of functions to use. const size_t numFunctions = function.NumFunctions(); // This is used only if shuffle is true. arma::Col visitationOrder; if (shuffle) visitationOrder = arma::shuffle(arma::linspace>(0, (numFunctions - 1), numFunctions)); // To keep track of where we are and how things are going. size_t currentFunction = 0; double overallObjective = 0; double lastObjective = DBL_MAX; // Calculate the first objective function. for (size_t i = 0; i < numFunctions; ++i) overallObjective += function.Evaluate(iterate, i); // Now iterate! arma::mat gradient(iterate.n_rows, iterate.n_cols); for (size_t i = 1; i != maxIterations; ++i, ++currentFunction) { // Is this iteration the start of a sequence? if ((currentFunction % numFunctions) == 0) { // Output current objective function. Log::Info << "SGD: iteration " << i << ", objective " << overallObjective << "." << std::endl; if (std::isnan(overallObjective) || std::isinf(overallObjective)) { Log::Warn << "SGD: converged to " << overallObjective << "; terminating" << " with failure. Try a smaller step size?" << std::endl; return overallObjective; } if (std::abs(lastObjective - overallObjective) < tolerance) { Log::Info << "SGD: minimized within tolerance " << tolerance << "; " << "terminating optimization." << std::endl; return overallObjective; } // Reset the counter variables. lastObjective = overallObjective; overallObjective = 0; currentFunction = 0; if (shuffle) // Determine order of visitation. visitationOrder = arma::shuffle(visitationOrder); } // Evaluate the gradient for this iteration. if (shuffle) function.Gradient(iterate, visitationOrder[currentFunction], gradient); else function.Gradient(iterate, currentFunction, gradient); // And update the iterate. iterate -= stepSize * gradient; // Now add that to the overall objective function. if (shuffle) overallObjective += function.Evaluate(iterate, visitationOrder[currentFunction]); else overallObjective += function.Evaluate(iterate, currentFunction); } Log::Info << "SGD: maximum iterations (" << maxIterations << ") reached; " << "terminating optimization." << std::endl; // Calculate final objective. overallObjective = 0; for (size_t i = 0; i < numFunctions; ++i) overallObjective += function.Evaluate(iterate, i); return overallObjective; } } // namespace optimization } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/core/optimizers/sgd/test_function.cpp000066400000000000000000000026771315013601400243350ustar00rootroot00000000000000/** * @file test_function.cpp * @author Ryan Curtin * * Implementation of very simple test function for stochastic gradient descent * (SGD). * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include "test_function.hpp" using namespace mlpack; using namespace mlpack::optimization; using namespace mlpack::optimization::test; double SGDTestFunction::Evaluate(const arma::mat& coordinates, const size_t i) const { switch (i) { case 0: return -std::exp(-std::abs(coordinates[0])); case 1: return std::pow(coordinates[1], 2); case 2: return std::pow(coordinates[2], 4) + 3 * std::pow(coordinates[2], 2); default: return 0; } } void SGDTestFunction::Gradient(const arma::mat& coordinates, const size_t i, arma::mat& gradient) const { gradient.zeros(3); switch (i) { case 0: if (coordinates[0] >= 0) gradient[0] = std::exp(-coordinates[0]); else gradient[0] = -std::exp(coordinates[0]); break; case 1: gradient[1] = 2 * coordinates[1]; break; case 2: gradient[2] = 4 * std::pow(coordinates[2], 3) + 6 * coordinates[2]; break; } } mlpack-2.2.5/src/mlpack/core/optimizers/sgd/test_function.hpp000066400000000000000000000027011315013601400243260ustar00rootroot00000000000000/** * @file test_function.hpp * @author Ryan Curtin * * Very simple test function for SGD. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_OPTIMIZERS_SGD_TEST_FUNCTION_HPP #define MLPACK_CORE_OPTIMIZERS_SGD_TEST_FUNCTION_HPP #include namespace mlpack { namespace optimization { namespace test { //! Very, very simple test function which is the composite of three other //! functions. The gradient is not very steep far away from the optimum, so a //! larger step size may be required to optimize it in a reasonable number of //! iterations. class SGDTestFunction { public: //! Nothing to do for the constructor. SGDTestFunction() { } //! Return 3 (the number of functions). size_t NumFunctions() const { return 3; } //! Get the starting point. arma::mat GetInitialPoint() const { return arma::mat("6; -45.6; 6.2"); } //! Evaluate a function. double Evaluate(const arma::mat& coordinates, const size_t i) const; //! Evaluate the gradient of a function. void Gradient(const arma::mat& coordinates, const size_t i, arma::mat& gradient) const; }; } // namespace test } // namespace optimization } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/core/tree/000077500000000000000000000000001315013601400167065ustar00rootroot00000000000000mlpack-2.2.5/src/mlpack/core/tree/CMakeLists.txt000066400000000000000000000115531315013601400214530ustar00rootroot00000000000000# Define the files we need to compile. # Anything not in this list will not be compiled into mlpack. set(SOURCES address.hpp ballbound.hpp ballbound_impl.hpp binary_space_tree.hpp binary_space_tree/binary_space_tree.hpp binary_space_tree/binary_space_tree_impl.hpp binary_space_tree/breadth_first_dual_tree_traverser.hpp binary_space_tree/breadth_first_dual_tree_traverser_impl.hpp binary_space_tree/dual_tree_traverser.hpp binary_space_tree/dual_tree_traverser_impl.hpp binary_space_tree/mean_split.hpp binary_space_tree/mean_split_impl.hpp binary_space_tree/midpoint_split.hpp binary_space_tree/midpoint_split_impl.hpp binary_space_tree/rp_tree_max_split.hpp binary_space_tree/rp_tree_max_split_impl.hpp binary_space_tree/rp_tree_mean_split.hpp binary_space_tree/rp_tree_mean_split_impl.hpp binary_space_tree/single_tree_traverser.hpp binary_space_tree/single_tree_traverser_impl.hpp binary_space_tree/vantage_point_split.hpp binary_space_tree/vantage_point_split_impl.hpp binary_space_tree/traits.hpp binary_space_tree/typedef.hpp binary_space_tree/ub_tree_split.hpp binary_space_tree/ub_tree_split_impl.hpp bounds.hpp bound_traits.hpp cellbound.hpp cellbound_impl.hpp cosine_tree/cosine_tree.hpp cosine_tree/cosine_tree.cpp cover_tree/cover_tree.hpp cover_tree/cover_tree_impl.hpp cover_tree/first_point_is_root.hpp cover_tree/single_tree_traverser.hpp cover_tree/single_tree_traverser_impl.hpp cover_tree/dual_tree_traverser.hpp cover_tree/dual_tree_traverser_impl.hpp cover_tree/traits.hpp cover_tree/typedef.hpp example_tree.hpp greedy_single_tree_traverser.hpp greedy_single_tree_traverser_impl.hpp hollow_ball_bound.hpp hollow_ball_bound_impl.hpp hrectbound.hpp hrectbound_impl.hpp octree.hpp octree/octree.hpp octree/octree_impl.hpp octree/single_tree_traverser.hpp octree/single_tree_traverser_impl.hpp octree/dual_tree_traverser.hpp octree/dual_tree_traverser_impl.hpp octree/traits.hpp perform_split.hpp rectangle_tree.hpp rectangle_tree/rectangle_tree.hpp rectangle_tree/rectangle_tree_impl.hpp rectangle_tree/single_tree_traverser.hpp rectangle_tree/single_tree_traverser_impl.hpp rectangle_tree/dual_tree_traverser.hpp rectangle_tree/dual_tree_traverser_impl.hpp rectangle_tree/r_tree_split.hpp rectangle_tree/r_tree_split_impl.hpp rectangle_tree/no_auxiliary_information.hpp rectangle_tree/r_tree_descent_heuristic.hpp rectangle_tree/r_tree_descent_heuristic_impl.hpp rectangle_tree/r_star_tree_descent_heuristic.hpp rectangle_tree/r_star_tree_descent_heuristic_impl.hpp rectangle_tree/r_star_tree_split.hpp rectangle_tree/r_star_tree_split_impl.hpp rectangle_tree/x_tree_split.hpp rectangle_tree/x_tree_split_impl.hpp rectangle_tree/x_tree_auxiliary_information.hpp rectangle_tree/hilbert_r_tree_descent_heuristic.hpp rectangle_tree/hilbert_r_tree_descent_heuristic_impl.hpp rectangle_tree/hilbert_r_tree_split.hpp rectangle_tree/hilbert_r_tree_split_impl.hpp rectangle_tree/hilbert_r_tree_auxiliary_information.hpp rectangle_tree/hilbert_r_tree_auxiliary_information_impl.hpp rectangle_tree/discrete_hilbert_value.hpp rectangle_tree/discrete_hilbert_value_impl.hpp rectangle_tree/r_plus_tree_descent_heuristic.hpp rectangle_tree/r_plus_tree_descent_heuristic_impl.hpp rectangle_tree/minimal_coverage_sweep.hpp rectangle_tree/minimal_coverage_sweep_impl.hpp rectangle_tree/minimal_splits_number_sweep.hpp rectangle_tree/minimal_splits_number_sweep_impl.hpp rectangle_tree/r_plus_tree_split.hpp rectangle_tree/r_plus_tree_split_impl.hpp rectangle_tree/r_plus_tree_split_policy.hpp rectangle_tree/r_plus_plus_tree_descent_heuristic.hpp rectangle_tree/r_plus_plus_tree_descent_heuristic_impl.hpp rectangle_tree/r_plus_plus_tree_split_policy.hpp rectangle_tree/r_plus_plus_tree_auxiliary_information.hpp rectangle_tree/r_plus_plus_tree_auxiliary_information_impl.hpp space_split/hyperplane.hpp space_split/mean_space_split.hpp space_split/mean_space_split_impl.hpp space_split/midpoint_space_split.hpp space_split/midpoint_space_split_impl.hpp space_split/projection_vector.hpp space_split/space_split.hpp space_split/space_split_impl.hpp spill_tree.hpp spill_tree/is_spill_tree.hpp spill_tree/spill_tree.hpp spill_tree/spill_tree_impl.hpp spill_tree/spill_dual_tree_traverser.hpp spill_tree/spill_dual_tree_traverser_impl.hpp spill_tree/spill_single_tree_traverser.hpp spill_tree/spill_single_tree_traverser_impl.hpp spill_tree/traits.hpp spill_tree/typedef.hpp statistic.hpp traversal_info.hpp tree_traits.hpp ) # add directory name to sources set(DIR_SRCS) foreach(file ${SOURCES}) set(DIR_SRCS ${DIR_SRCS} ${CMAKE_CURRENT_SOURCE_DIR}/${file}) endforeach() # Append sources (with directory name) to list of all mlpack sources (used at # the parent scope). set(MLPACK_SRCS ${MLPACK_SRCS} ${DIR_SRCS} PARENT_SCOPE) mlpack-2.2.5/src/mlpack/core/tree/address.hpp000066400000000000000000000214631315013601400210520ustar00rootroot00000000000000/** * @file address.hpp * @author Mikhail Lozhnikov * * This file contains a series of functions for translating points to addresses * and back and functions for comparing addresses. * * The notion of addresses is described in the following paper. * @code * @inproceedings{bayer1997, * author = {Bayer, Rudolf}, * title = {The Universal B-Tree for Multidimensional Indexing: General * Concepts}, * booktitle = {Proceedings of the International Conference on Worldwide * Computing and Its Applications}, * series = {WWCA '97}, * year = {1997}, * isbn = {3-540-63343-X}, * pages = {198--209}, * numpages = {12}, * publisher = {Springer-Verlag}, * address = {London, UK, UK}, * } * @endcode */ #ifndef MLPACK_CORE_TREE_ADDRESS_HPP #define MLPACK_CORE_TREE_ADDRESS_HPP namespace mlpack { namespace bound { namespace addr { /** * Calculate the address of a point. Be careful, the point and the address * variables should be equal-sized and the type of the address should correspond * to the type of the vector. * * The function maps each floating point coordinate to an equal-sized unsigned * integer datatype in such a way that the transform preserves the ordering * (i.e. lower floating point values correspond to lower integers). Thus, * the mapping saves the exponent and the mantissa of each floating point value * consequently, furthermore the exponent is stored before the mantissa. In the * case of negative numbers the resulting integer value should be inverted. * In the multi-dimensional case, after we transform the representation, we * have to interleave the bits of the new representation across all the elements * in the address vector. * * @param address The resulting address. * @param point The point that is being translated to the address. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ template void PointToAddress(AddressType& address, const VecType& point) { typedef typename VecType::elem_type VecElemType; // Check that the arguments are compatible. typedef typename std::conditional::type AddressElemType; static_assert(std::is_same::value == true, "The vector element type does not " "correspond to the address element type."); arma::Col result(point.n_elem); constexpr size_t order = sizeof(AddressElemType) * CHAR_BIT; // Calculate the number of bits for the exponent. const int numExpBits = std::ceil(std::log2( std::numeric_limits::max_exponent - std::numeric_limits::min_exponent + 1.0)); // Calculate the number of bits for the mantissa. const int numMantBits = order - numExpBits - 1; assert(point.n_elem == address.n_elem); assert(address.n_elem > 0); for (size_t i = 0; i < point.n_elem; i++) { int e; VecElemType normalizedVal = std::frexp(point(i),&e); bool sgn = std::signbit(normalizedVal); if (point(i) == 0) e = std::numeric_limits::min_exponent; if (sgn) normalizedVal = -normalizedVal; if (e < std::numeric_limits::min_exponent) { AddressElemType tmp = (AddressElemType) 1 << (std::numeric_limits::min_exponent - e); e = std::numeric_limits::min_exponent; normalizedVal /= tmp; } // Extract the mantissa. AddressElemType tmp = (AddressElemType) 1 << numMantBits; result(i) = std::floor(normalizedVal * tmp); // Add the exponent. assert(result(i) < ((AddressElemType) 1 << numMantBits)); result(i) |= ((AddressElemType) (e - std::numeric_limits::min_exponent)) << numMantBits; assert(result(i) < ((AddressElemType) 1 << (order - 1)) - 1); // Negative values should be inverted. if (sgn) { result(i) = ((AddressElemType) 1 << (order - 1)) - 1 - result(i); assert((result(i) >> (order - 1)) == 0); } else { result(i) |= (AddressElemType) 1 << (order - 1); assert((result(i) >> (order - 1)) == 1); } } address.zeros(point.n_elem); // Interleave the bits of the new representation across all the elements // in the address vector. for (size_t i = 0; i < order; i++) for (size_t j = 0; j < point.n_elem; j++) { size_t bit = (i * point.n_elem + j) % order; size_t row = (i * point.n_elem + j) / order; address(row) |= (((result(j) >> (order - 1 - i)) & 1) << (order - 1 - bit)); } } /** * Translate the address to the point. Be careful, the point and the address * variables should be equal-sized and the type of the address should correspond * to the type of the vector. * * The function makes the backward transform to the function above. * * @param address An address to translate. * @param point The point that corresponds to the address. */ template void AddressToPoint(VecType& point, const AddressType& address) { typedef typename VecType::elem_type VecElemType; // Check that the arguments are compatible. typedef typename std::conditional::type AddressElemType; static_assert(std::is_same::value == true, "The vector element type does not " "correspond to the address element type."); constexpr size_t order = sizeof(AddressElemType) * CHAR_BIT; // Calculate the number of bits for the exponent. const int numExpBits = std::ceil(std::log2( std::numeric_limits::max_exponent - std::numeric_limits::min_exponent + 1.0)); assert(point.n_elem == address.n_elem); assert(address.n_elem > 0); arma::Col rearrangedAddress(address.n_elem, arma::fill::zeros); // Calculate the number of bits for the mantissa. const int numMantBits = order - numExpBits - 1; for (size_t i = 0; i < order; i++) for (size_t j = 0; j < address.n_elem; j++) { size_t bit = (i * address.n_elem + j) % order; size_t row = (i * address.n_elem + j) / order; rearrangedAddress(j) |= (((address(row) >> (order - 1 - bit)) & 1) << (order - 1 - i)); } for (size_t i = 0; i < rearrangedAddress.n_elem; i++) { bool sgn = rearrangedAddress(i) & ((AddressElemType) 1 << (order - 1)); if (!sgn) { rearrangedAddress(i) = ((AddressElemType) 1 << (order - 1)) - 1 - rearrangedAddress(i); } // Extract the mantissa. AddressElemType tmp = (AddressElemType) 1 << numMantBits; AddressElemType mantissa = rearrangedAddress(i) & (tmp - 1); if (mantissa == 0) mantissa = 1; VecElemType normalizedVal = (VecElemType) mantissa / tmp; if (!sgn) normalizedVal = -normalizedVal; // Extract the exponent tmp = (AddressElemType) 1 << numExpBits; AddressElemType e = (rearrangedAddress(i) >> numMantBits) & (tmp - 1); e += std::numeric_limits::min_exponent; point(i) = std::ldexp(normalizedVal, e); if (std::isinf(point(i))) { if (point(i) > 0) point(i) = std::numeric_limits::max(); else point(i) = std::numeric_limits::lowest(); } } } /** * Compare two addresses. The function returns 1 if the first address is greater * than the second one, -1 if the first address is less than the second one, * otherwise the function returns 0. */ template int CompareAddresses(const AddressType1& addr1, const AddressType2& addr2) { static_assert(std::is_same::value == true, "Can't compare " "addresses of distinct types"); assert(addr1.n_elem == addr2.n_elem); for (size_t i = 0; i < addr1.n_elem; i++) { if (addr1[i] < addr2[i]) return -1; else if (addr2[i] < addr1[i]) return 1; } return 0; } /** * Returns true if an address is contained between two other addresses. */ template bool Contains(const AddressType1& address, const AddressType2& loBound, const AddressType3& hiBound) { return ((CompareAddresses(loBound, address) <= 0) && (CompareAddresses(hiBound, address) >= 0)); } } // namespace addr } // namespace bound } // namespave mlpack #endif // MLPACK_CORE_TREE_ADDRESS_HPP mlpack-2.2.5/src/mlpack/core/tree/ballbound.hpp000066400000000000000000000136721315013601400213720ustar00rootroot00000000000000/** * @file ballbound.hpp * * Bounds that are useful for binary space partitioning trees. * Interface to a ball bound that works in arbitrary metric spaces. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_TREE_BALLBOUND_HPP #define MLPACK_CORE_TREE_BALLBOUND_HPP #include #include #include "bound_traits.hpp" namespace mlpack { namespace bound { /** * Ball bound encloses a set of points at a specific distance (radius) from a * specific point (center). MetricType is the custom metric type that defaults * to the Euclidean (L2) distance. * * @tparam MetricType metric type used in the distance measure. * @tparam VecType Type of vector (arma::vec or arma::sp_vec or similar). */ template, typename VecType = arma::vec> class BallBound { public: //! The underlying data type. typedef typename VecType::elem_type ElemType; //! A public version of the vector type. typedef VecType Vec; private: //! The radius of the ball bound. ElemType radius; //! The center of the ball bound. VecType center; //! The metric used in this bound. MetricType* metric; /** * To know whether this object allocated memory to the metric member * variable. This will be true except in the copy constructor and the * overloaded assignment operator. We need this to know whether we should * delete the metric member variable in the destructor. */ bool ownsMetric; public: //! Empty Constructor. BallBound(); /** * Create the ball bound with the specified dimensionality. * * @param dimension Dimensionality of ball bound. */ BallBound(const size_t dimension); /** * Create the ball bound with the specified radius and center. * * @param radius Radius of ball bound. * @param center Center of ball bound. */ BallBound(const ElemType radius, const VecType& center); //! Copy constructor. To prevent memory leaks. BallBound(const BallBound& other); //! For the same reason as the copy constructor: to prevent memory leaks. BallBound& operator=(const BallBound& other); //! Move constructor: take possession of another bound. BallBound(BallBound&& other); //! Destructor to release allocated memory. ~BallBound(); //! Get the radius of the ball. ElemType Radius() const { return radius; } //! Modify the radius of the ball. ElemType& Radius() { return radius; } //! Get the center point of the ball. const VecType& Center() const { return center; } //! Modify the center point of the ball. VecType& Center() { return center; } //! Get the dimensionality of the ball. size_t Dim() const { return center.n_elem; } /** * Get the minimum width of the bound (this is same as the diameter). * For ball bounds, width along all dimensions remain same. */ ElemType MinWidth() const { return radius * 2.0; } //! Get the range in a certain dimension. math::RangeType operator[](const size_t i) const; /** * Determines if a point is within this bound. */ bool Contains(const VecType& point) const; /** * Place the center of BallBound into the given vector. * * @param center Vector which the centroid will be written to. */ void Center(VecType& center) const { center = this->center; } /** * Calculates minimum bound-to-point squared distance. */ template ElemType MinDistance(const OtherVecType& point, typename boost::enable_if>* = 0) const; /** * Calculates minimum bound-to-bound squared distance. */ ElemType MinDistance(const BallBound& other) const; /** * Computes maximum distance. */ template ElemType MaxDistance(const OtherVecType& point, typename boost::enable_if>* = 0) const; /** * Computes maximum distance. */ ElemType MaxDistance(const BallBound& other) const; /** * Calculates minimum and maximum bound-to-point distance. */ template math::RangeType RangeDistance( const OtherVecType& other, typename boost::enable_if>* = 0) const; /** * Calculates minimum and maximum bound-to-bound distance. * * Example: bound1.MinDistanceSq(other) for minimum distance. */ math::RangeType RangeDistance(const BallBound& other) const; /** * Expand the bound to include the given node. */ const BallBound& operator|=(const BallBound& other); /** * Expand the bound to include the given point. The centroid is recalculated * to be the center of all of the given points. * * @tparam MatType Type of matrix; could be arma::mat, arma::spmat, or a * vector. * @tparam data Data points to add. */ template const BallBound& operator|=(const MatType& data); /** * Returns the diameter of the ballbound. */ ElemType Diameter() const { return 2 * radius; } //! Returns the distance metric used in this bound. const MetricType& Metric() const { return *metric; } //! Modify the distance metric used in this bound. MetricType& Metric() { return *metric; } //! Serialize the bound. template void Serialize(Archive& ar, const unsigned int version); }; //! A specialization of BoundTraits for this bound type. template struct BoundTraits> { //! These bounds are potentially loose in some dimensions. const static bool HasTightBounds = false; }; } // namespace bound } // namespace mlpack #include "ballbound_impl.hpp" #endif // MLPACK_CORE_TREE_DBALLBOUND_HPP mlpack-2.2.5/src/mlpack/core/tree/ballbound_impl.hpp000066400000000000000000000211341315013601400224030ustar00rootroot00000000000000/** * @file ballbound_impl.hpp * * Bounds that are useful for binary space partitioning trees. * Implementation of BallBound ball bound metric policy class. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_TREE_BALLBOUND_IMPL_HPP #define MLPACK_CORE_TREE_BALLBOUND_IMPL_HPP // In case it hasn't been included already. #include "ballbound.hpp" #include #include namespace mlpack { namespace bound { //! Empty Constructor. template BallBound::BallBound() : radius(std::numeric_limits::lowest()), metric(new MetricType()), ownsMetric(true) { /* Nothing to do. */ } /** * Create the ball bound with the specified dimensionality. * * @param dimension Dimensionality of ball bound. */ template BallBound::BallBound(const size_t dimension) : radius(std::numeric_limits::lowest()), center(dimension), metric(new MetricType()), ownsMetric(true) { /* Nothing to do. */ } /** * Create the ball bound with the specified radius and center. * * @param radius Radius of ball bound. * @param center Center of ball bound. */ template BallBound::BallBound(const ElemType radius, const VecType& center) : radius(radius), center(center), metric(new MetricType()), ownsMetric(true) { /* Nothing to do. */ } //! Copy Constructor. To prevent memory leaks. template BallBound::BallBound(const BallBound& other) : radius(other.radius), center(other.center), metric(other.metric), ownsMetric(false) { /* Nothing to do. */ } //! For the same reason as the copy constructor: to prevent memory leaks. template BallBound& BallBound::operator=( const BallBound& other) { radius = other.radius; center = other.center; metric = other.metric; ownsMetric = false; } //! Move constructor. template BallBound::BallBound(BallBound&& other) : radius(other.radius), center(other.center), metric(other.metric), ownsMetric(other.ownsMetric) { // Fix the other bound. other.radius = 0.0; other.center = VecType(); other.metric = NULL; other.ownsMetric = false; } //! Destructor to release allocated memory. template BallBound::~BallBound() { if (ownsMetric) delete metric; } //! Get the range in a certain dimension. template math::RangeType::ElemType> BallBound::operator[](const size_t i) const { if (radius < 0) return math::Range(); else return math::Range(center[i] - radius, center[i] + radius); } /** * Determines if a point is within the bound. */ template bool BallBound::Contains(const VecType& point) const { if (radius < 0) return false; else return metric->Evaluate(center, point) <= radius; } /** * Calculates minimum bound-to-point squared distance. */ template template typename BallBound::ElemType BallBound::MinDistance( const OtherVecType& point, typename boost::enable_if>* /* junk */) const { if (radius < 0) return std::numeric_limits::max(); else return math::ClampNonNegative(metric->Evaluate(point, center) - radius); } /** * Calculates minimum bound-to-bound squared distance. */ template typename BallBound::ElemType BallBound::MinDistance(const BallBound& other) const { if (radius < 0) return std::numeric_limits::max(); else { const ElemType delta = metric->Evaluate(center, other.center) - radius - other.radius; return math::ClampNonNegative(delta); } } /** * Computes maximum distance. */ template template typename BallBound::ElemType BallBound::MaxDistance( const OtherVecType& point, typename boost::enable_if >* /* junk */) const { if (radius < 0) return std::numeric_limits::max(); else return metric->Evaluate(point, center) + radius; } /** * Computes maximum distance. */ template typename BallBound::ElemType BallBound::MaxDistance(const BallBound& other) const { if (radius < 0) return std::numeric_limits::max(); else return metric->Evaluate(other.center, center) + radius + other.radius; } /** * Calculates minimum and maximum bound-to-bound squared distance. * * Example: bound1.MinDistanceSq(other) for minimum squared distance. */ template template math::RangeType::ElemType> BallBound::RangeDistance( const OtherVecType& point, typename boost::enable_if >* /* junk */) const { if (radius < 0) return math::Range(std::numeric_limits::max(), std::numeric_limits::max()); else { const ElemType dist = metric->Evaluate(center, point); return math::Range(math::ClampNonNegative(dist - radius), dist + radius); } } template math::RangeType::ElemType> BallBound::RangeDistance( const BallBound& other) const { if (radius < 0) return math::Range(std::numeric_limits::max(), std::numeric_limits::max()); else { const ElemType dist = metric->Evaluate(center, other.center); const ElemType sumradius = radius + other.radius; return math::Range(math::ClampNonNegative(dist - sumradius), dist + sumradius); } } /** * Expand the bound to include the given bound. * template const BallBound& BallBound::operator|=( const BallBound& other) { double dist = metric->Evaluate(center, other); // Now expand the radius as necessary. if (dist > radius) radius = dist; return *this; }*/ /** * Expand the bound to include the given point. Algorithm adapted from * Jack Ritter, "An Efficient Bounding Sphere" in Graphics Gems (1990). * The difference lies in the way we initialize the ball bound. The way we * expand the bound is same. */ template template const BallBound& BallBound::operator|=(const MatType& data) { if (radius < 0) { center = data.col(0); radius = 0; } // Now iteratively add points. for (size_t i = 0; i < data.n_cols; ++i) { const ElemType dist = metric->Evaluate(center, (VecType) data.col(i)); // See if the new point lies outside the bound. if (dist > radius) { // Move towards the new point and increase the radius just enough to // accommodate the new point. const VecType diff = data.col(i) - center; center += ((dist - radius) / (2 * dist)) * diff; radius = 0.5 * (dist + radius); } } return *this; } //! Serialize the BallBound. template template void BallBound::Serialize( Archive& ar, const unsigned int /* version */) { ar & data::CreateNVP(radius, "radius"); ar & data::CreateNVP(center, "center"); if (Archive::is_loading::value) { // If we're loading, delete the local metric since we'll have a new one. if (ownsMetric) delete metric; } ar & data::CreateNVP(metric, "metric"); ar & data::CreateNVP(ownsMetric, "ownsMetric"); } } // namespace bound } // namespace mlpack #endif // MLPACK_CORE_TREE_DBALLBOUND_IMPL_HPP mlpack-2.2.5/src/mlpack/core/tree/binary_space_tree.hpp000066400000000000000000000025321315013601400230770ustar00rootroot00000000000000/** * @file binary_space_tree.hpp * @author Ryan Curtin * * Include all the necessary files to use the BinarySpaceTree class. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_TREE_BINARY_SPACE_TREE_HPP #define MLPACK_CORE_TREE_BINARY_SPACE_TREE_HPP #include #include "bounds.hpp" #include "binary_space_tree/midpoint_split.hpp" #include "binary_space_tree/mean_split.hpp" #include "binary_space_tree/vantage_point_split.hpp" #include "binary_space_tree/rp_tree_max_split.hpp" #include "binary_space_tree/rp_tree_mean_split.hpp" #include "binary_space_tree/ub_tree_split.hpp" #include "binary_space_tree/binary_space_tree.hpp" #include "binary_space_tree/single_tree_traverser.hpp" #include "binary_space_tree/single_tree_traverser_impl.hpp" #include "binary_space_tree/dual_tree_traverser.hpp" #include "binary_space_tree/dual_tree_traverser_impl.hpp" #include "binary_space_tree/breadth_first_dual_tree_traverser.hpp" #include "binary_space_tree/breadth_first_dual_tree_traverser_impl.hpp" #include "binary_space_tree/traits.hpp" #include "binary_space_tree/typedef.hpp" #endif mlpack-2.2.5/src/mlpack/core/tree/binary_space_tree/000077500000000000000000000000001315013601400223645ustar00rootroot00000000000000mlpack-2.2.5/src/mlpack/core/tree/binary_space_tree/binary_space_tree.hpp000066400000000000000000000522621315013601400265620ustar00rootroot00000000000000/** * @file binary_space_tree.hpp * * Definition of generalized binary space partitioning tree (BinarySpaceTree). * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_TREE_BINARY_SPACE_TREE_BINARY_SPACE_TREE_HPP #define MLPACK_CORE_TREE_BINARY_SPACE_TREE_BINARY_SPACE_TREE_HPP #include #include "../statistic.hpp" #include "midpoint_split.hpp" namespace mlpack { namespace tree /** Trees and tree-building procedures. */ { /** * A binary space partitioning tree, such as a KD-tree or a ball tree. Once the * bound and type of dataset is defined, the tree will construct itself. Call * the constructor with the dataset to build the tree on, and the entire tree * will be built. * * This particular tree does not allow growth, so you cannot add or delete nodes * from it. If you need to add or delete a node, the better procedure is to * rebuild the tree entirely. * * This tree does take one runtime parameter in the constructor, which is the * max leaf size to be used. * * @tparam MetricType The metric used for tree-building. The BoundType may * place restrictions on the metrics that can be used. * @tparam StatisticType Extra data contained in the node. See statistic.hpp * for the necessary skeleton interface. * @tparam MatType The dataset class. * @tparam BoundType The bound used for each node. HRectBound, the default, * requires that an LMetric<> is used for MetricType (so, EuclideanDistance, * ManhattanDistance, etc.). * @tparam SplitType The class that partitions the dataset/points at a * particular node into two parts. Its definition decides the way this split * is done. */ template class BoundType = bound::HRectBound, template class SplitType = MidpointSplit> class BinarySpaceTree { public: //! So other classes can use TreeType::Mat. typedef MatType Mat; //! The type of element held in MatType. typedef typename MatType::elem_type ElemType; typedef SplitType, MatType> Split; private: //! The left child node. BinarySpaceTree* left; //! The right child node. BinarySpaceTree* right; //! The parent node (NULL if this is the root of the tree). BinarySpaceTree* parent; //! The index of the first point in the dataset contained in this node (and //! its children). size_t begin; //! The number of points of the dataset contained in this node (and its //! children). size_t count; //! The bound object for this node. BoundType bound; //! Any extra data contained in the node. StatisticType stat; //! The distance from the centroid of this node to the centroid of the parent. ElemType parentDistance; //! The worst possible distance to the furthest descendant, cached to speed //! things up. ElemType furthestDescendantDistance; //! The minimum distance from the center to any edge of the bound. ElemType minimumBoundDistance; //! The dataset. If we are the root of the tree, we own the dataset and must //! delete it. MatType* dataset; public: //! A single-tree traverser for binary space trees; see //! single_tree_traverser.hpp for implementation. template class SingleTreeTraverser; //! A dual-tree traverser for binary space trees; see dual_tree_traverser.hpp. template class DualTreeTraverser; template class BreadthFirstDualTreeTraverser; /** * Construct this as the root node of a binary space tree using the given * dataset. This will copy the input matrix; if you don't want this, consider * using the constructor that takes an rvalue reference and use std::move(). * * @param data Dataset to create tree from. This will be copied! * @param maxLeafSize Size of each leaf in the tree. */ BinarySpaceTree(const MatType& data, const size_t maxLeafSize = 20); /** * Construct this as the root node of a binary space tree using the given * dataset. This will copy the input matrix and modify its ordering; a * mapping of the old point indices to the new point indices is filled. If * you don't want the matrix to be copied, consider using the constructor that * takes an rvalue reference and use std::move(). * * @param data Dataset to create tree from. This will be copied! * @param oldFromNew Vector which will be filled with the old positions for * each new point. * @param maxLeafSize Size of each leaf in the tree. */ BinarySpaceTree(const MatType& data, std::vector& oldFromNew, const size_t maxLeafSize = 20); /** * Construct this as the root node of a binary space tree using the given * dataset. This will copy the input matrix and modify its ordering; a * mapping of the old point indices to the new point indices is filled, as * well as a mapping of the new point indices to the old point indices. If * you don't want the matrix to be copied, consider using the constructor that * takes an rvalue reference and use std::move(). * * @param data Dataset to create tree from. This will be copied! * @param oldFromNew Vector which will be filled with the old positions for * each new point. * @param newFromOld Vector which will be filled with the new positions for * each old point. * @param maxLeafSize Size of each leaf in the tree. */ BinarySpaceTree(const MatType& data, std::vector& oldFromNew, std::vector& newFromOld, const size_t maxLeafSize = 20); /** * Construct this as the root node of a binary space tree using the given * dataset. This will take ownership of the data matrix; if you don't want * this, consider using the constructor that takes a const reference to a * dataset. * * @param data Dataset to create tree from. * @param maxLeafSize Size of each leaf in the tree. */ BinarySpaceTree(MatType&& data, const size_t maxLeafSize = 20); /** * Construct this as the root node of a binary space tree using the given * dataset. This will take ownership of the data matrix; a mapping of the * old point indices to the new point indices is filled. If you don't want * the matrix to have its ownership taken, consider using the constructor that * takes a const reference to a dataset. * * @param data Dataset to create tree from. * @param oldFromNew Vector which will be filled with the old positions for * each new point. * @param maxLeafSize Size of each leaf in the tree. */ BinarySpaceTree(MatType&& data, std::vector& oldFromNew, const size_t maxLeafSize = 20); /** * Construct this as the root node of a binary space tree using the given * dataset. This will take ownership of the data matrix; a mapping of the old * point indices to the new point indices is filled, as well as a mapping of * the new point indices to the old point indices. If you don't want the * matrix to have its ownership taken, consider using the constructor that * takes a const reference to a dataset. * * @param data Dataset to create tree from. * @param oldFromNew Vector which will be filled with the old positions for * each new point. * @param newFromOld Vector which will be filled with the new positions for * each old point. * @param maxLeafSize Size of each leaf in the tree. */ BinarySpaceTree(MatType&& data, std::vector& oldFromNew, std::vector& newFromOld, const size_t maxLeafSize = 20); /** * Construct this node as a child of the given parent, starting at column * begin and using count points. The ordering of that subset of points in the * parent's data matrix will be modified! This is used for recursive * tree-building by the other constructors which don't specify point indices. * * @param parent Parent of this node. Its dataset will be modified! * @param begin Index of point to start tree construction with. * @param count Number of points to use to construct tree. * @param splitter Instantiated node splitter object. * @param maxLeafSize Size of each leaf in the tree. */ BinarySpaceTree(BinarySpaceTree* parent, const size_t begin, const size_t count, SplitType, MatType>& splitter, const size_t maxLeafSize = 20); /** * Construct this node as a child of the given parent, starting at column * begin and using count points. The ordering of that subset of points in the * parent's data matrix will be modified! This is used for recursive * tree-building by the other constructors which don't specify point indices. * * A mapping of the old point indices to the new point indices is filled, but * it is expected that the vector is already allocated with size greater than * or equal to (begin + count), and if that is not true, invalid memory reads * (and writes) will occur. * * @param parent Parent of this node. Its dataset will be modified! * @param begin Index of point to start tree construction with. * @param count Number of points to use to construct tree. * @param oldFromNew Vector which will be filled with the old positions for * each new point. * @param splitter Instantiated node splitter object. * @param maxLeafSize Size of each leaf in the tree. */ BinarySpaceTree(BinarySpaceTree* parent, const size_t begin, const size_t count, std::vector& oldFromNew, SplitType, MatType>& splitter, const size_t maxLeafSize = 20); /** * Construct this node as a child of the given parent, starting at column * begin and using count points. The ordering of that subset of points in the * parent's data matrix will be modified! This is used for recursive * tree-building by the other constructors which don't specify point indices. * * A mapping of the old point indices to the new point indices is filled, as * well as a mapping of the new point indices to the old point indices. It is * expected that the vector is already allocated with size greater than or * equal to (begin_in + count_in), and if that is not true, invalid memory * reads (and writes) will occur. * * @param parent Parent of this node. Its dataset will be modified! * @param begin Index of point to start tree construction with. * @param count Number of points to use to construct tree. * @param oldFromNew Vector which will be filled with the old positions for * each new point. * @param newFromOld Vector which will be filled with the new positions for * each old point. * @param maxLeafSize Size of each leaf in the tree. */ BinarySpaceTree(BinarySpaceTree* parent, const size_t begin, const size_t count, std::vector& oldFromNew, std::vector& newFromOld, SplitType, MatType>& splitter, const size_t maxLeafSize = 20); /** * Create a binary space tree by copying the other tree. Be careful! This * can take a long time and use a lot of memory. * * @param other Tree to be replicated. */ BinarySpaceTree(const BinarySpaceTree& other); /** * Move constructor for a BinarySpaceTree; possess all the members of the * given tree. */ BinarySpaceTree(BinarySpaceTree&& other); /** * Initialize the tree from a boost::serialization archive. * * @param ar Archive to load tree from. Must be an iarchive, not an oarchive. */ template BinarySpaceTree( Archive& ar, const typename boost::enable_if::type* = 0); /** * Deletes this node, deallocating the memory for the children and calling * their destructors in turn. This will invalidate any pointers or references * to any nodes which are children of this one. */ ~BinarySpaceTree(); //! Return the bound object for this node. const BoundType& Bound() const { return bound; } //! Return the bound object for this node. BoundType& Bound() { return bound; } //! Return the statistic object for this node. const StatisticType& Stat() const { return stat; } //! Return the statistic object for this node. StatisticType& Stat() { return stat; } //! Return whether or not this node is a leaf (true if it has no children). bool IsLeaf() const; //! Gets the left child of this node. BinarySpaceTree* Left() const { return left; } //! Modify the left child of this node. BinarySpaceTree*& Left() { return left; } //! Gets the right child of this node. BinarySpaceTree* Right() const { return right; } //! Modify the right child of this node. BinarySpaceTree*& Right() { return right; } //! Gets the parent of this node. BinarySpaceTree* Parent() const { return parent; } //! Modify the parent of this node. BinarySpaceTree*& Parent() { return parent; } //! Get the dataset which the tree is built on. const MatType& Dataset() const { return *dataset; } //! Modify the dataset which the tree is built on. Be careful! MatType& Dataset() { return *dataset; } //! Get the metric that the tree uses. MetricType Metric() const { return MetricType(); } //! Return the number of children in this node. size_t NumChildren() const; /** * Return the index of the nearest child node to the given query point. If * this is a leaf node, it will return NumChildren() (invalid index). */ template size_t GetNearestChild( const VecType& point, typename boost::enable_if >::type* = 0); /** * Return the index of the furthest child node to the given query point. If * this is a leaf node, it will return NumChildren() (invalid index). */ template size_t GetFurthestChild( const VecType& point, typename boost::enable_if >::type* = 0); /** * Return the index of the nearest child node to the given query node. If it * can't decide, it will return NumChildren() (invalid index). */ size_t GetNearestChild(const BinarySpaceTree& queryNode); /** * Return the index of the furthest child node to the given query node. If it * can't decide, it will return NumChildren() (invalid index). */ size_t GetFurthestChild(const BinarySpaceTree& queryNode); /** * Return the furthest distance to a point held in this node. If this is not * a leaf node, then the distance is 0 because the node holds no points. */ ElemType FurthestPointDistance() const; /** * Return the furthest possible descendant distance. This returns the maximum * distance from the centroid to the edge of the bound and not the empirical * quantity which is the actual furthest descendant distance. So the actual * furthest descendant distance may be less than what this method returns (but * it will never be greater than this). */ ElemType FurthestDescendantDistance() const; //! Return the minimum distance from the center of the node to any bound edge. ElemType MinimumBoundDistance() const; //! Return the distance from the center of this node to the center of the //! parent node. ElemType ParentDistance() const { return parentDistance; } //! Modify the distance from the center of this node to the center of the //! parent node. ElemType& ParentDistance() { return parentDistance; } /** * Return the specified child (0 will be left, 1 will be right). If the index * is greater than 1, this will return the right child. * * @param child Index of child to return. */ BinarySpaceTree& Child(const size_t child) const; BinarySpaceTree*& ChildPtr(const size_t child) { return (child == 0) ? left : right; } //! Return the number of points in this node (0 if not a leaf). size_t NumPoints() const; /** * Return the number of descendants of this node. For a non-leaf in a binary * space tree, this is the number of points at the descendant leaves. For a * leaf, this is the number of points in the leaf. */ size_t NumDescendants() const; /** * Return the index (with reference to the dataset) of a particular descendant * of this node. The index should be greater than zero but less than the * number of descendants. * * @param index Index of the descendant. */ size_t Descendant(const size_t index) const; /** * Return the index (with reference to the dataset) of a particular point in * this node. This will happily return invalid indices if the given index is * greater than the number of points in this node (obtained with NumPoints()) * -- be careful. * * @param index Index of point for which a dataset index is wanted. */ size_t Point(const size_t index) const; //! Return the minimum distance to another node. ElemType MinDistance(const BinarySpaceTree& other) const { return bound.MinDistance(other.Bound()); } //! Return the maximum distance to another node. ElemType MaxDistance(const BinarySpaceTree& other) const { return bound.MaxDistance(other.Bound()); } //! Return the minimum and maximum distance to another node. math::RangeType RangeDistance(const BinarySpaceTree& other) const { return bound.RangeDistance(other.Bound()); } //! Return the minimum distance to another point. template ElemType MinDistance(const VecType& point, typename boost::enable_if >::type* = 0) const { return bound.MinDistance(point); } //! Return the maximum distance to another point. template ElemType MaxDistance(const VecType& point, typename boost::enable_if >::type* = 0) const { return bound.MaxDistance(point); } //! Return the minimum and maximum distance to another point. template math::RangeType RangeDistance(const VecType& point, typename boost::enable_if >::type* = 0) const { return bound.RangeDistance(point); } //! Return the index of the beginning point of this subset. size_t Begin() const { return begin; } //! Modify the index of the beginning point of this subset. size_t& Begin() { return begin; } //! Return the number of points in this subset. size_t Count() const { return count; } //! Modify the number of points in this subset. size_t& Count() { return count; } //! Store the center of the bounding region in the given vector. void Center(arma::vec& center) const { bound.Center(center); } private: /** * Splits the current node, assigning its left and right children recursively. * * @param maxLeafSize Maximum number of points held in a leaf. * @param splitter Instantiated SplitType object. */ void SplitNode(const size_t maxLeafSize, SplitType, MatType>& splitter); /** * Splits the current node, assigning its left and right children recursively. * Also returns a list of the changed indices. * * @param oldFromNew Vector holding permuted indices. * @param maxLeafSize Maximum number of points held in a leaf. * @param splitter Instantiated SplitType object. */ void SplitNode(std::vector& oldFromNew, const size_t maxLeafSize, SplitType, MatType>& splitter); /** * Update the bound of the current node. This method does not take into * account bound-specific properties. * * @param boundToUpdate The bound to update. */ template void UpdateBound(BoundType2& boundToUpdate); /** * Update the bound of the current node. This method is designed for * HollowBallBound only. * * @param boundToUpdate The bound to update. */ void UpdateBound(bound::HollowBallBound& boundToUpdate); protected: /** * A default constructor. This is meant to only be used with * boost::serialization, which is allowed with the friend declaration below. * This does not return a valid tree! The method must be protected, so that * the serialization shim can work with the default constructor. */ BinarySpaceTree(); //! Friend access is given for the default constructor. friend class boost::serialization::access; public: /** * Serialize the tree. */ template void Serialize(Archive& ar, const unsigned int version); }; } // namespace tree } // namespace mlpack // Include implementation. #include "binary_space_tree_impl.hpp" // Include everything else, if necessary. #include "../binary_space_tree.hpp" #endif mlpack-2.2.5/src/mlpack/core/tree/binary_space_tree/binary_space_tree_impl.hpp000066400000000000000000001016331315013601400276000ustar00rootroot00000000000000/** * @file binary_space_tree_impl.hpp * * Implementation of generalized space partitioning tree. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_TREE_BINARY_SPACE_TREE_BINARY_SPACE_TREE_IMPL_HPP #define MLPACK_CORE_TREE_BINARY_SPACE_TREE_BINARY_SPACE_TREE_IMPL_HPP // In case it wasn't included already for some reason. #include "binary_space_tree.hpp" #include #include #include namespace mlpack { namespace tree { // Each of these overloads is kept as a separate function to keep the overhead // from the two std::vectors out, if possible. template class BoundType, template class SplitType> BinarySpaceTree:: BinarySpaceTree( const MatType& data, const size_t maxLeafSize) : left(NULL), right(NULL), parent(NULL), begin(0), /* This root node starts at index 0, */ count(data.n_cols), /* and spans all of the dataset. */ bound(data.n_rows), parentDistance(0), // Parent distance for the root is 0: it has no parent. dataset(new MatType(data)) // Copies the dataset. { // Do the actual splitting of this node. SplitType, MatType> splitter; SplitNode(maxLeafSize, splitter); // Create the statistic depending on if we are a leaf or not. stat = StatisticType(*this); } template class BoundType, template class SplitType> BinarySpaceTree:: BinarySpaceTree( const MatType& data, std::vector& oldFromNew, const size_t maxLeafSize) : left(NULL), right(NULL), parent(NULL), begin(0), count(data.n_cols), bound(data.n_rows), parentDistance(0), // Parent distance for the root is 0: it has no parent. dataset(new MatType(data)) // Copies the dataset. { // Initialize oldFromNew correctly. oldFromNew.resize(data.n_cols); for (size_t i = 0; i < data.n_cols; i++) oldFromNew[i] = i; // Fill with unharmed indices. // Now do the actual splitting. SplitType, MatType> splitter; SplitNode(oldFromNew, maxLeafSize, splitter); // Create the statistic depending on if we are a leaf or not. stat = StatisticType(*this); } template class BoundType, template class SplitType> BinarySpaceTree:: BinarySpaceTree( const MatType& data, std::vector& oldFromNew, std::vector& newFromOld, const size_t maxLeafSize) : left(NULL), right(NULL), parent(NULL), begin(0), count(data.n_cols), bound(data.n_rows), parentDistance(0), // Parent distance for the root is 0: it has no parent. dataset(new MatType(data)) // Copies the dataset. { // Initialize the oldFromNew vector correctly. oldFromNew.resize(data.n_cols); for (size_t i = 0; i < data.n_cols; i++) oldFromNew[i] = i; // Fill with unharmed indices. // Now do the actual splitting. SplitType, MatType> splitter; SplitNode(oldFromNew, maxLeafSize, splitter); // Create the statistic depending on if we are a leaf or not. stat = StatisticType(*this); // Map the newFromOld indices correctly. newFromOld.resize(data.n_cols); for (size_t i = 0; i < data.n_cols; i++) newFromOld[oldFromNew[i]] = i; } template class BoundType, template class SplitType> BinarySpaceTree:: BinarySpaceTree(MatType&& data, const size_t maxLeafSize) : left(NULL), right(NULL), parent(NULL), begin(0), count(data.n_cols), bound(data.n_rows), parentDistance(0), // Parent distance for the root is 0: it has no parent. dataset(new MatType(std::move(data))) { // Do the actual splitting of this node. SplitType, MatType> splitter; SplitNode(maxLeafSize, splitter); // Create the statistic depending on if we are a leaf or not. stat = StatisticType(*this); } template class BoundType, template class SplitType> BinarySpaceTree:: BinarySpaceTree( MatType&& data, std::vector& oldFromNew, const size_t maxLeafSize) : left(NULL), right(NULL), parent(NULL), begin(0), count(data.n_cols), bound(data.n_rows), parentDistance(0), // Parent distance for the root is 0: it has no parent. dataset(new MatType(std::move(data))) { // Initialize oldFromNew correctly. oldFromNew.resize(dataset->n_cols); for (size_t i = 0; i < dataset->n_cols; i++) oldFromNew[i] = i; // Fill with unharmed indices. // Now do the actual splitting. SplitType, MatType> splitter; SplitNode(oldFromNew, maxLeafSize, splitter); // Create the statistic depending on if we are a leaf or not. stat = StatisticType(*this); } template class BoundType, template class SplitType> BinarySpaceTree:: BinarySpaceTree( MatType&& data, std::vector& oldFromNew, std::vector& newFromOld, const size_t maxLeafSize) : left(NULL), right(NULL), parent(NULL), begin(0), count(data.n_cols), bound(data.n_rows), parentDistance(0), // Parent distance for the root is 0: it has no parent. dataset(new MatType(std::move(data))) { // Initialize the oldFromNew vector correctly. oldFromNew.resize(dataset->n_cols); for (size_t i = 0; i < dataset->n_cols; i++) oldFromNew[i] = i; // Fill with unharmed indices. // Now do the actual splitting. SplitType, MatType> splitter; SplitNode(oldFromNew, maxLeafSize, splitter); // Create the statistic depending on if we are a leaf or not. stat = StatisticType(*this); // Map the newFromOld indices correctly. newFromOld.resize(dataset->n_cols); for (size_t i = 0; i < dataset->n_cols; i++) newFromOld[oldFromNew[i]] = i; } template class BoundType, template class SplitType> BinarySpaceTree:: BinarySpaceTree( BinarySpaceTree* parent, const size_t begin, const size_t count, SplitType, MatType>& splitter, const size_t maxLeafSize) : left(NULL), right(NULL), parent(parent), begin(begin), count(count), bound(parent->Dataset().n_rows), dataset(&parent->Dataset()) // Point to the parent's dataset. { // Perform the actual splitting. SplitNode(maxLeafSize, splitter); // Create the statistic depending on if we are a leaf or not. stat = StatisticType(*this); } template class BoundType, template class SplitType> BinarySpaceTree:: BinarySpaceTree( BinarySpaceTree* parent, const size_t begin, const size_t count, std::vector& oldFromNew, SplitType, MatType>& splitter, const size_t maxLeafSize) : left(NULL), right(NULL), parent(parent), begin(begin), count(count), bound(parent->Dataset().n_rows), dataset(&parent->Dataset()) { // Hopefully the vector is initialized correctly! We can't check that // entirely but we can do a minor sanity check. assert(oldFromNew.size() == dataset->n_cols); // Perform the actual splitting. SplitNode(oldFromNew, maxLeafSize, splitter); // Create the statistic depending on if we are a leaf or not. stat = StatisticType(*this); } template class BoundType, template class SplitType> BinarySpaceTree:: BinarySpaceTree( BinarySpaceTree* parent, const size_t begin, const size_t count, std::vector& oldFromNew, std::vector& newFromOld, SplitType, MatType>& splitter, const size_t maxLeafSize) : left(NULL), right(NULL), parent(parent), begin(begin), count(count), bound(parent->Dataset()->n_rows), dataset(&parent->Dataset()) { // Hopefully the vector is initialized correctly! We can't check that // entirely but we can do a minor sanity check. Log::Assert(oldFromNew.size() == dataset->n_cols); // Perform the actual splitting. SplitNode(oldFromNew, maxLeafSize, splitter); // Create the statistic depending on if we are a leaf or not. stat = StatisticType(*this); // Map the newFromOld indices correctly. newFromOld.resize(dataset->n_cols); for (size_t i = 0; i < dataset->n_cols; i++) newFromOld[oldFromNew[i]] = i; } /** * Create a binary space tree by copying the other tree. Be careful! This can * take a long time and use a lot of memory. */ template class BoundType, template class SplitType> BinarySpaceTree:: BinarySpaceTree( const BinarySpaceTree& other) : left(NULL), right(NULL), parent(other.parent), begin(other.begin), count(other.count), bound(other.bound), stat(other.stat), parentDistance(other.parentDistance), furthestDescendantDistance(other.furthestDescendantDistance), // Copy matrix, but only if we are the root. dataset((other.parent == NULL) ? new MatType(*other.dataset) : NULL) { // Create left and right children (if any). if (other.Left()) { left = new BinarySpaceTree(*other.Left()); left->Parent() = this; // Set parent to this, not other tree. } if (other.Right()) { right = new BinarySpaceTree(*other.Right()); right->Parent() = this; // Set parent to this, not other tree. } // Propagate matrix, but only if we are the root. if (parent == NULL) { std::queue queue; if (left) queue.push(left); if (right) queue.push(right); while (!queue.empty()) { BinarySpaceTree* node = queue.front(); queue.pop(); node->dataset = dataset; if (node->left) queue.push(node->left); if (node->right) queue.push(node->right); } } } /** * Move constructor. */ template class BoundType, template class SplitType> BinarySpaceTree:: BinarySpaceTree(BinarySpaceTree&& other) : left(other.left), right(other.right), parent(other.parent), begin(other.begin), count(other.count), bound(std::move(other.bound)), stat(std::move(other.stat)), parentDistance(other.parentDistance), furthestDescendantDistance(other.furthestDescendantDistance), minimumBoundDistance(other.minimumBoundDistance), dataset(other.dataset) { // Now we are a clone of the other tree. But we must also clear the other // tree's contents, so it doesn't delete anything when it is destructed. other.left = NULL; other.right = NULL; other.begin = 0; other.count = 0; other.parentDistance = 0.0; other.furthestDescendantDistance = 0.0; other.minimumBoundDistance = 0.0; other.dataset = NULL; //Set new parent. if (left) left->parent = this; if (right) right->parent = this; } /** * Initialize the tree from an archive. */ template class BoundType, template class SplitType> template BinarySpaceTree:: BinarySpaceTree( Archive& ar, const typename boost::enable_if::type*) : BinarySpaceTree() // Create an empty BinarySpaceTree. { // We've delegated to the constructor which gives us an empty tree, and now we // can serialize from it. ar >> data::CreateNVP(*this, "tree"); } /** * Deletes this node, deallocating the memory for the children and calling their * destructors in turn. This will invalidate any pointers or references to any * nodes which are children of this one. */ template class BoundType, template class SplitType> BinarySpaceTree:: ~BinarySpaceTree() { delete left; delete right; // If we're the root, delete the matrix. if (!parent) delete dataset; } template class BoundType, template class SplitType> inline bool BinarySpaceTree::IsLeaf() const { return !left; } /** * Returns the number of children in this node. */ template class BoundType, template class SplitType> inline size_t BinarySpaceTree::NumChildren() const { if (left && right) return 2; if (left) return 1; return 0; } /** * Return the index of the nearest child node to the given query point. If * this is a leaf node, it will return NumChildren() (invalid index). */ template class BoundType, template class SplitType> template size_t BinarySpaceTree::GetNearestChild( const VecType& point, typename boost::enable_if >::type*) { if (IsLeaf() || !left || !right) return 0; if (left->MinDistance(point) <= right->MinDistance(point)) return 0; return 1; } /** * Return the index of the furthest child node to the given query point. If * this is a leaf node, it will return NumChildren() (invalid index). */ template class BoundType, template class SplitType> template size_t BinarySpaceTree::GetFurthestChild( const VecType& point, typename boost::enable_if >::type*) { if (IsLeaf() || !left || !right) return 0; if (left->MaxDistance(point) > right->MaxDistance(point)) return 0; return 1; } /** * Return the index of the nearest child node to the given query node. If it * can't decide, it will return NumChildren() (invalid index). */ template class BoundType, template class SplitType> size_t BinarySpaceTree::GetNearestChild(const BinarySpaceTree& queryNode) { if (IsLeaf() || !left || !right) return 0; ElemType leftDist = left->MinDistance(queryNode); ElemType rightDist = right->MinDistance(queryNode); if (leftDist < rightDist) return 0; if (rightDist < leftDist) return 1; return NumChildren(); } /** * Return the index of the furthest child node to the given query node. If it * can't decide, it will return NumChildren() (invalid index). */ template class BoundType, template class SplitType> size_t BinarySpaceTree::GetFurthestChild(const BinarySpaceTree& queryNode) { if (IsLeaf() || !left || !right) return 0; ElemType leftDist = left->MaxDistance(queryNode); ElemType rightDist = right->MaxDistance(queryNode); if (leftDist > rightDist) return 0; if (rightDist > leftDist) return 1; return NumChildren(); } /** * Return a bound on the furthest point in the node from the center. This * returns 0 unless the node is a leaf. */ template class BoundType, template class SplitType> inline typename BinarySpaceTree::ElemType BinarySpaceTree::FurthestPointDistance() const { if (!IsLeaf()) return 0.0; // Otherwise return the distance from the center to a corner of the bound. return 0.5 * bound.Diameter(); } /** * Return the furthest possible descendant distance. This returns the maximum * distance from the center to the edge of the bound and not the empirical * quantity which is the actual furthest descendant distance. So the actual * furthest descendant distance may be less than what this method returns (but * it will never be greater than this). */ template class BoundType, template class SplitType> inline typename BinarySpaceTree::ElemType BinarySpaceTree::FurthestDescendantDistance() const { return furthestDescendantDistance; } //! Return the minimum distance from the center to any bound edge. template class BoundType, template class SplitType> inline typename BinarySpaceTree::ElemType BinarySpaceTree::MinimumBoundDistance() const { return bound.MinWidth() / 2.0; } /** * Return the specified child. */ template class BoundType, template class SplitType> inline BinarySpaceTree& BinarySpaceTree::Child(const size_t child) const { if (child == 0) return *left; else return *right; } /** * Return the number of points contained in this node. */ template class BoundType, template class SplitType> inline size_t BinarySpaceTree::NumPoints() const { if (left) return 0; return count; } /** * Return the number of descendants contained in the node. */ template class BoundType, template class SplitType> inline size_t BinarySpaceTree::NumDescendants() const { return count; } /** * Return the index of a particular descendant contained in this node. */ template class BoundType, template class SplitType> inline size_t BinarySpaceTree::Descendant(const size_t index) const { return (begin + index); } /** * Return the index of a particular point contained in this node. */ template class BoundType, template class SplitType> inline size_t BinarySpaceTree::Point(const size_t index) const { return (begin + index); } template class BoundType, template class SplitType> void BinarySpaceTree:: SplitNode(const size_t maxLeafSize, SplitType, MatType>& splitter) { // We need to expand the bounds of this node properly. UpdateBound(bound); // Calculate the furthest descendant distance. furthestDescendantDistance = 0.5 * bound.Diameter(); // Now, check if we need to split at all. if (count <= maxLeafSize) return; // We can't split this. // splitCol denotes the two partitions of the dataset after the split. The // points on its left go to the left child and the others go to the right // child. size_t splitCol; // Find the partition of the node. This method does not perform the split. typename Split::SplitInfo splitInfo; const bool split = splitter.SplitNode(bound, *dataset, begin, count, splitInfo); // The node may not be always split. For instance, if all the points are the // same, we can't split them. if (!split) return; // Perform the actual splitting. This will order the dataset such that // points that belong to the left subtree are on the left of splitCol, and // points from the right subtree are on the right side of splitCol. splitCol = splitter.PerformSplit(*dataset, begin, count, splitInfo); assert(splitCol > begin); assert(splitCol < begin + count); // Now that we know the split column, we will recursively split the children // by calling their constructors (which perform this splitting process). left = new BinarySpaceTree(this, begin, splitCol - begin, splitter, maxLeafSize); right = new BinarySpaceTree(this, splitCol, begin + count - splitCol, splitter, maxLeafSize); // Calculate parent distances for those two nodes. arma::vec center, leftCenter, rightCenter; Center(center); left->Center(leftCenter); right->Center(rightCenter); const ElemType leftParentDistance = MetricType::Evaluate(center, leftCenter); const ElemType rightParentDistance = MetricType::Evaluate(center, rightCenter); left->ParentDistance() = leftParentDistance; right->ParentDistance() = rightParentDistance; } template class BoundType, template class SplitType> void BinarySpaceTree:: SplitNode(std::vector& oldFromNew, const size_t maxLeafSize, SplitType, MatType>& splitter) { // We need to expand the bounds of this node properly. UpdateBound(bound); // Calculate the furthest descendant distance. furthestDescendantDistance = 0.5 * bound.Diameter(); // First, check if we need to split at all. if (count <= maxLeafSize) return; // We can't split this. // splitCol denotes the two partitions of the dataset after the split. The // points on its left go to the left child and the others go to the right // child. size_t splitCol; // Find the partition of the node. This method does not perform the split. typename Split::SplitInfo splitInfo; const bool split = splitter.SplitNode(bound, *dataset, begin, count, splitInfo); // The node may not be always split. For instance, if all the points are the // same, we can't split them. if (!split) return; // Perform the actual splitting. This will order the dataset such that // points that belong to the left subtree are on the left of splitCol, and // points from the right subtree are on the right side of splitCol. splitCol = splitter.PerformSplit(*dataset, begin, count, splitInfo, oldFromNew); assert(splitCol > begin); assert(splitCol < begin + count); // Now that we know the split column, we will recursively split the children // by calling their constructors (which perform this splitting process). left = new BinarySpaceTree(this, begin, splitCol - begin, oldFromNew, splitter, maxLeafSize); right = new BinarySpaceTree(this, splitCol, begin + count - splitCol, oldFromNew, splitter, maxLeafSize); // Calculate parent distances for those two nodes. arma::vec center, leftCenter, rightCenter; Center(center); left->Center(leftCenter); right->Center(rightCenter); const ElemType leftParentDistance = MetricType::Evaluate(center, leftCenter); const ElemType rightParentDistance = MetricType::Evaluate(center, rightCenter); left->ParentDistance() = leftParentDistance; right->ParentDistance() = rightParentDistance; } template class BoundType, template class SplitType> template void BinarySpaceTree:: UpdateBound(BoundType2& boundToUpdate) { if (count > 0) boundToUpdate |= dataset->cols(begin, begin + count - 1); } template class BoundType, template class SplitType> void BinarySpaceTree:: UpdateBound(bound::HollowBallBound& boundToUpdate) { if (!parent) { if (count > 0) boundToUpdate |= dataset->cols(begin, begin + count - 1); return; } if (parent->left != NULL && parent->left != this) { boundToUpdate.HollowCenter() = parent->left->bound.Center(); boundToUpdate.InnerRadius() = std::numeric_limits::max(); } if (count > 0) boundToUpdate |= dataset->cols(begin, begin + count - 1); } // Default constructor (private), for boost::serialization. template class BoundType, template class SplitType> BinarySpaceTree:: BinarySpaceTree() : left(NULL), right(NULL), parent(NULL), begin(0), count(0), stat(*this), parentDistance(0), furthestDescendantDistance(0), dataset(NULL) { // Nothing to do. } /** * Serialize the tree. */ template class BoundType, template class SplitType> template void BinarySpaceTree:: Serialize(Archive& ar, const unsigned int /* version */) { using data::CreateNVP; // If we're loading, and we have children, they need to be deleted. if (Archive::is_loading::value) { if (left) delete left; if (right) delete right; if (!parent) delete dataset; } ar & CreateNVP(parent, "parent"); ar & CreateNVP(begin, "begin"); ar & CreateNVP(count, "count"); ar & CreateNVP(bound, "bound"); ar & CreateNVP(stat, "statistic"); ar & CreateNVP(parentDistance, "parentDistance"); ar & CreateNVP(furthestDescendantDistance, "furthestDescendantDistance"); ar & CreateNVP(dataset, "dataset"); // Save children last; otherwise boost::serialization gets confused. ar & CreateNVP(left, "left"); ar & CreateNVP(right, "right"); // Due to quirks of boost::serialization, if a tree is saved as an object and // not a pointer, the first level of the tree will be duplicated on load. // Therefore, if we are the root of the tree, then we need to make sure our // children's parent links are correct, and delete the duplicated node if // necessary. if (Archive::is_loading::value) { // Get parents of left and right children, or, NULL, if they don't exist. BinarySpaceTree* leftParent = left ? left->Parent() : NULL; BinarySpaceTree* rightParent = right ? right->Parent() : NULL; // Reassign parent links if necessary. if (left && left->Parent() != this) left->Parent() = this; if (right && right->Parent() != this) right->Parent() = this; // Do we need to delete the left parent? if (leftParent != NULL && leftParent != this) { // Sever the duplicate parent's children. Ensure we don't delete the // dataset, by faking the duplicated parent's parent (that is, we need to // set the parent to something non-NULL; 'this' works). leftParent->Parent() = this; leftParent->Left() = NULL; leftParent->Right() = NULL; delete leftParent; } // Do we need to delete the right parent? if (rightParent != NULL && rightParent != this && rightParent != leftParent) { // Sever the duplicate parent's children, in the same way as above. rightParent->Parent() = this; rightParent->Left() = NULL; rightParent->Right() = NULL; delete rightParent; } } } } // namespace tree } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/core/tree/binary_space_tree/breadth_first_dual_tree_traverser.hpp000066400000000000000000000074141315013601400320440ustar00rootroot00000000000000/** * @file breadth_first_dual_tree_traverser.hpp * @author Ryan Curtin * * Defines the BreadthFirstDualTreeTraverser for the BinarySpaceTree tree type. * This is a nested class of BinarySpaceTree which traverses two trees in a * breadth-first manner with a given set of rules which indicate the branches * which can be pruned and the order in which to recurse. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_TREE_BINARY_SPACE_TREE_BREADTH_FIRST_DUAL_TREE_TRAVERSER_HPP #define MLPACK_CORE_TREE_BINARY_SPACE_TREE_BREADTH_FIRST_DUAL_TREE_TRAVERSER_HPP #include #include #include "../binary_space_tree.hpp" namespace mlpack { namespace tree { template struct QueueFrame { TreeType* queryNode; TreeType* referenceNode; size_t queryDepth; double score; TraversalInfoType traversalInfo; }; template class BoundType, template class SplitType> template class BinarySpaceTree::BreadthFirstDualTreeTraverser { public: /** * Instantiate the dual-tree traverser with the given rule set. */ BreadthFirstDualTreeTraverser(RuleType& rule); typedef QueueFrame QueueFrameType; /** * Traverse the two trees. This does not reset the number of prunes. * * @param queryNode The query node to be traversed. * @param referenceNode The reference node to be traversed. * @param score The score of the current node combination. */ void Traverse(BinarySpaceTree& queryNode, BinarySpaceTree& referenceNode); void Traverse(BinarySpaceTree& queryNode, std::priority_queue& referenceQueue); //! Get the number of prunes. size_t NumPrunes() const { return numPrunes; } //! Modify the number of prunes. size_t& NumPrunes() { return numPrunes; } //! Get the number of visited combinations. size_t NumVisited() const { return numVisited; } //! Modify the number of visited combinations. size_t& NumVisited() { return numVisited; } //! Get the number of times a node combination was scored. size_t NumScores() const { return numScores; } //! Modify the number of times a node combination was scored. size_t& NumScores() { return numScores; } //! Get the number of times a base case was calculated. size_t NumBaseCases() const { return numBaseCases; } //! Modify the number of times a base case was calculated. size_t& NumBaseCases() { return numBaseCases; } private: //! Reference to the rules with which the trees will be traversed. RuleType& rule; //! The number of prunes. size_t numPrunes; //! The number of node combinations that have been visited during traversal. size_t numVisited; //! The number of times a node combination was scored. size_t numScores; //! The number of times a base case was calculated. size_t numBaseCases; //! Traversal information, held in the class so that it isn't continually //! being reallocated. typename RuleType::TraversalInfoType traversalInfo; }; } // namespace tree } // namespace mlpack // Include implementation. #include "breadth_first_dual_tree_traverser_impl.hpp" #endif // MLPACK_CORE_TREE_BINARY_SPACE_TREE_BREADTH_FIRST_DUAL_TREE_TRAVERSER_HPP mlpack-2.2.5/src/mlpack/core/tree/binary_space_tree/breadth_first_dual_tree_traverser_impl.hpp000066400000000000000000000166521315013601400330710ustar00rootroot00000000000000/** * @file breadth_first_dual_tree_traverser_impl.hpp * @author Ryan Curtin * * Implementation of the BreadthFirstDualTreeTraverser for BinarySpaceTree. * This is a way to perform a dual-tree traversal of two trees. The trees must * be the same type. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_TREE_BINARY_SPACE_TREE_BREADTH_FIRST_DUAL_TREE_TRAVERSER_IMPL_HPP #define MLPACK_CORE_TREE_BINARY_SPACE_TREE_BREADTH_FIRST_DUAL_TREE_TRAVERSER_IMPL_HPP // In case it hasn't been included yet. #include "breadth_first_dual_tree_traverser.hpp" namespace mlpack { namespace tree { template class BoundType, template class SplitType> template BinarySpaceTree:: BreadthFirstDualTreeTraverser::BreadthFirstDualTreeTraverser( RuleType& rule) : rule(rule), numPrunes(0), numVisited(0), numScores(0), numBaseCases(0) { /* Nothing to do. */ } template bool operator<(const QueueFrame& a, const QueueFrame& b) { if (a.queryDepth > b.queryDepth) return true; else if ((a.queryDepth == b.queryDepth) && (a.score > b.score)) return true; return false; } template class BoundType, template class SplitType> template void BinarySpaceTree:: BreadthFirstDualTreeTraverser::Traverse( BinarySpaceTree& queryRoot, BinarySpaceTree& referenceRoot) { // Increment the visit counter. ++numVisited; // Store the current traversal info. traversalInfo = rule.TraversalInfo(); // Must score the root combination. const double rootScore = rule.Score(queryRoot, referenceRoot); if (rootScore == DBL_MAX) return; // This probably means something is wrong. std::priority_queue queue; QueueFrameType rootFrame; rootFrame.queryNode = &queryRoot; rootFrame.referenceNode = &referenceRoot; rootFrame.queryDepth = 0; rootFrame.score = 0.0; rootFrame.traversalInfo = rule.TraversalInfo(); queue.push(rootFrame); // Start the traversal. Traverse(queryRoot, queue); } template class BoundType, template class SplitType> template void BinarySpaceTree:: BreadthFirstDualTreeTraverser::Traverse( BinarySpaceTree& queryNode, std::priority_queue& referenceQueue) { // Store queues for the children. We will recurse into the children once our // queue is empty. std::priority_queue leftChildQueue; std::priority_queue rightChildQueue; while (!referenceQueue.empty()) { QueueFrameType currentFrame = referenceQueue.top(); referenceQueue.pop(); BinarySpaceTree& queryNode = *currentFrame.queryNode; BinarySpaceTree& referenceNode = *currentFrame.referenceNode; typename RuleType::TraversalInfoType ti = currentFrame.traversalInfo; rule.TraversalInfo() = ti; const size_t queryDepth = currentFrame.queryDepth; double score = rule.Score(queryNode, referenceNode); ++numScores; if (score == DBL_MAX) { ++numPrunes; continue; } // If both are leaves, we must evaluate the base case. if (queryNode.IsLeaf() && referenceNode.IsLeaf()) { // Loop through each of the points in each node. const size_t queryEnd = queryNode.Begin() + queryNode.Count(); const size_t refEnd = referenceNode.Begin() + referenceNode.Count(); for (size_t query = queryNode.Begin(); query < queryEnd; ++query) { // See if we need to investigate this point (this function should be // implemented for the single-tree recursion too). Restore the // traversal information first. // const double childScore = rule.Score(query, referenceNode); // if (childScore == DBL_MAX) // continue; // We can't improve this particular point. for (size_t ref = referenceNode.Begin(); ref < refEnd; ++ref) rule.BaseCase(query, ref); numBaseCases += referenceNode.Count(); } } else if ((!queryNode.IsLeaf()) && referenceNode.IsLeaf()) { // We have to recurse down the query node. QueueFrameType fl = { queryNode.Left(), &referenceNode, queryDepth + 1, score, rule.TraversalInfo() }; leftChildQueue.push(fl); QueueFrameType fr = { queryNode.Right(), &referenceNode, queryDepth + 1, score, ti }; rightChildQueue.push(fr); } else if (queryNode.IsLeaf() && (!referenceNode.IsLeaf())) { // We have to recurse down the reference node. In this case the recursion // order does matter. Before recursing, though, we have to set the // traversal information correctly. QueueFrameType fl = { &queryNode, referenceNode.Left(), queryDepth, score, rule.TraversalInfo() }; referenceQueue.push(fl); QueueFrameType fr = { &queryNode, referenceNode.Right(), queryDepth, score, ti }; referenceQueue.push(fr); } else { // We have to recurse down both query and reference nodes. Because the // query descent order does not matter, we will go to the left query child // first. Before recursing, we have to set the traversal information // correctly. QueueFrameType fll = { queryNode.Left(), referenceNode.Left(), queryDepth + 1, score, rule.TraversalInfo() }; leftChildQueue.push(fll); QueueFrameType flr = { queryNode.Left(), referenceNode.Right(), queryDepth + 1, score, rule.TraversalInfo() }; leftChildQueue.push(flr); QueueFrameType frl = { queryNode.Right(), referenceNode.Left(), queryDepth + 1, score, rule.TraversalInfo() }; rightChildQueue.push(frl); QueueFrameType frr = { queryNode.Right(), referenceNode.Right(), queryDepth + 1, score, rule.TraversalInfo() }; rightChildQueue.push(frr); } } // Now, recurse into the left and right children queues. The order doesn't // matter. if (leftChildQueue.size() > 0) Traverse(*queryNode.Left(), leftChildQueue); if (rightChildQueue.size() > 0) Traverse(*queryNode.Right(), rightChildQueue); } } // namespace tree } // namespace mlpack #endif // MLPACK_CORE_TREE_BINARY_SPACE_TREE_BREADTH_FIRST_DUAL_TREE_TRAVERSER_IMPL_HPP mlpack-2.2.5/src/mlpack/core/tree/binary_space_tree/dual_tree_traverser.hpp000066400000000000000000000063561315013601400271500ustar00rootroot00000000000000/** * @file dual_tree_traverser.hpp * @author Ryan Curtin * * Defines the DualTreeTraverser for the BinarySpaceTree tree type. This is a * nested class of BinarySpaceTree which traverses two trees in a depth-first * manner with a given set of rules which indicate the branches which can be * pruned and the order in which to recurse. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_TREE_BINARY_SPACE_TREE_DUAL_TREE_TRAVERSER_HPP #define MLPACK_CORE_TREE_BINARY_SPACE_TREE_DUAL_TREE_TRAVERSER_HPP #include #include "binary_space_tree.hpp" namespace mlpack { namespace tree { template class BoundType, template class SplitType> template class BinarySpaceTree::DualTreeTraverser { public: /** * Instantiate the dual-tree traverser with the given rule set. */ DualTreeTraverser(RuleType& rule); /** * Traverse the two trees. This does not reset the number of prunes. * * @param queryNode The query node to be traversed. * @param referenceNode The reference node to be traversed. * @param score The score of the current node combination. */ void Traverse(BinarySpaceTree& queryNode, BinarySpaceTree& referenceNode); //! Get the number of prunes. size_t NumPrunes() const { return numPrunes; } //! Modify the number of prunes. size_t& NumPrunes() { return numPrunes; } //! Get the number of visited combinations. size_t NumVisited() const { return numVisited; } //! Modify the number of visited combinations. size_t& NumVisited() { return numVisited; } //! Get the number of times a node combination was scored. size_t NumScores() const { return numScores; } //! Modify the number of times a node combination was scored. size_t& NumScores() { return numScores; } //! Get the number of times a base case was calculated. size_t NumBaseCases() const { return numBaseCases; } //! Modify the number of times a base case was calculated. size_t& NumBaseCases() { return numBaseCases; } private: //! Reference to the rules with which the trees will be traversed. RuleType& rule; //! The number of prunes. size_t numPrunes; //! The number of node combinations that have been visited during traversal. size_t numVisited; //! The number of times a node combination was scored. size_t numScores; //! The number of times a base case was calculated. size_t numBaseCases; //! Traversal information, held in the class so that it isn't continually //! being reallocated. typename RuleType::TraversalInfoType traversalInfo; }; } // namespace tree } // namespace mlpack // Include implementation. #include "dual_tree_traverser_impl.hpp" #endif // MLPACK_CORE_TREE_BINARY_SPACE_TREE_DUAL_TREE_TRAVERSER_HPP mlpack-2.2.5/src/mlpack/core/tree/binary_space_tree/dual_tree_traverser_impl.hpp000066400000000000000000000260301315013601400301600ustar00rootroot00000000000000/** * @file dual_tree_traverser_impl.hpp * @author Ryan Curtin * * Implementation of the DualTreeTraverser for BinarySpaceTree. This is a way * to perform a dual-tree traversal of two trees. The trees must be the same * type. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_TREE_BINARY_SPACE_TREE_DUAL_TREE_TRAVERSER_IMPL_HPP #define MLPACK_CORE_TREE_BINARY_SPACE_TREE_DUAL_TREE_TRAVERSER_IMPL_HPP // In case it hasn't been included yet. #include "dual_tree_traverser.hpp" namespace mlpack { namespace tree { template class BoundType, template class SplitType> template BinarySpaceTree:: DualTreeTraverser::DualTreeTraverser(RuleType& rule) : rule(rule), numPrunes(0), numVisited(0), numScores(0), numBaseCases(0) { /* Nothing to do. */ } template class BoundType, template class SplitType> template void BinarySpaceTree:: DualTreeTraverser::Traverse( BinarySpaceTree& queryNode, BinarySpaceTree& referenceNode) { // Increment the visit counter. ++numVisited; // Store the current traversal info. traversalInfo = rule.TraversalInfo(); // If both are leaves, we must evaluate the base case. if (queryNode.IsLeaf() && referenceNode.IsLeaf()) { // Loop through each of the points in each node. const size_t queryEnd = queryNode.Begin() + queryNode.Count(); const size_t refEnd = referenceNode.Begin() + referenceNode.Count(); for (size_t query = queryNode.Begin(); query < queryEnd; ++query) { // See if we need to investigate this point (this function should be // implemented for the single-tree recursion too). Restore the traversal // information first. rule.TraversalInfo() = traversalInfo; const double childScore = rule.Score(query, referenceNode); if (childScore == DBL_MAX) continue; // We can't improve this particular point. for (size_t ref = referenceNode.Begin(); ref < refEnd; ++ref) rule.BaseCase(query, ref); numBaseCases += referenceNode.Count(); } } else if (((!queryNode.IsLeaf()) && referenceNode.IsLeaf()) || (queryNode.NumDescendants() > 3 * referenceNode.NumDescendants() && !queryNode.IsLeaf() && !referenceNode.IsLeaf())) { // We have to recurse down the query node. In this case the recursion order // does not matter. const double leftScore = rule.Score(*queryNode.Left(), referenceNode); ++numScores; if (leftScore != DBL_MAX) Traverse(*queryNode.Left(), referenceNode); else ++numPrunes; // Before recursing, we have to set the traversal information correctly. rule.TraversalInfo() = traversalInfo; const double rightScore = rule.Score(*queryNode.Right(), referenceNode); ++numScores; if (rightScore != DBL_MAX) Traverse(*queryNode.Right(), referenceNode); else ++numPrunes; } else if (queryNode.IsLeaf() && (!referenceNode.IsLeaf())) { // We have to recurse down the reference node. In this case the recursion // order does matter. Before recursing, though, we have to set the // traversal information correctly. double leftScore = rule.Score(queryNode, *referenceNode.Left()); typename RuleType::TraversalInfoType leftInfo = rule.TraversalInfo(); rule.TraversalInfo() = traversalInfo; double rightScore = rule.Score(queryNode, *referenceNode.Right()); numScores += 2; if (leftScore < rightScore) { // Recurse to the left. Restore the left traversal info. Store the right // traversal info. traversalInfo = rule.TraversalInfo(); rule.TraversalInfo() = leftInfo; Traverse(queryNode, *referenceNode.Left()); // Is it still valid to recurse to the right? rightScore = rule.Rescore(queryNode, *referenceNode.Right(), rightScore); if (rightScore != DBL_MAX) { // Restore the right traversal info. rule.TraversalInfo() = traversalInfo; Traverse(queryNode, *referenceNode.Right()); } else ++numPrunes; } else if (rightScore < leftScore) { // Recurse to the right. Traverse(queryNode, *referenceNode.Right()); // Is it still valid to recurse to the left? leftScore = rule.Rescore(queryNode, *referenceNode.Left(), leftScore); if (leftScore != DBL_MAX) { // Restore the left traversal info. rule.TraversalInfo() = leftInfo; Traverse(queryNode, *referenceNode.Left()); } else ++numPrunes; } else // leftScore is equal to rightScore. { if (leftScore == DBL_MAX) { numPrunes += 2; } else { // Choose the left first. Restore the left traversal info. Store the // right traversal info. traversalInfo = rule.TraversalInfo(); rule.TraversalInfo() = leftInfo; Traverse(queryNode, *referenceNode.Left()); rightScore = rule.Rescore(queryNode, *referenceNode.Right(), rightScore); if (rightScore != DBL_MAX) { // Restore the right traversal info. rule.TraversalInfo() = traversalInfo; Traverse(queryNode, *referenceNode.Right()); } else ++numPrunes; } } } else { // We have to recurse down both query and reference nodes. Because the // query descent order does not matter, we will go to the left query child // first. Before recursing, we have to set the traversal information // correctly. double leftScore = rule.Score(*queryNode.Left(), *referenceNode.Left()); typename RuleType::TraversalInfoType leftInfo = rule.TraversalInfo(); rule.TraversalInfo() = traversalInfo; double rightScore = rule.Score(*queryNode.Left(), *referenceNode.Right()); typename RuleType::TraversalInfoType rightInfo; numScores += 2; if (leftScore < rightScore) { // Recurse to the left. Restore the left traversal info. Store the right // traversal info. rightInfo = rule.TraversalInfo(); rule.TraversalInfo() = leftInfo; Traverse(*queryNode.Left(), *referenceNode.Left()); // Is it still valid to recurse to the right? rightScore = rule.Rescore(*queryNode.Left(), *referenceNode.Right(), rightScore); if (rightScore != DBL_MAX) { // Restore the right traversal info. rule.TraversalInfo() = rightInfo; Traverse(*queryNode.Left(), *referenceNode.Right()); } else ++numPrunes; } else if (rightScore < leftScore) { // Recurse to the right. Traverse(*queryNode.Left(), *referenceNode.Right()); // Is it still valid to recurse to the left? leftScore = rule.Rescore(*queryNode.Left(), *referenceNode.Left(), leftScore); if (leftScore != DBL_MAX) { // Restore the left traversal info. rule.TraversalInfo() = leftInfo; Traverse(*queryNode.Left(), *referenceNode.Left()); } else ++numPrunes; } else { if (leftScore == DBL_MAX) { numPrunes += 2; } else { // Choose the left first. Restore the left traversal info and store the // right traversal info. rightInfo = rule.TraversalInfo(); rule.TraversalInfo() = leftInfo; Traverse(*queryNode.Left(), *referenceNode.Left()); // Is it still valid to recurse to the right? rightScore = rule.Rescore(*queryNode.Left(), *referenceNode.Right(), rightScore); if (rightScore != DBL_MAX) { // Restore the right traversal information. rule.TraversalInfo() = rightInfo; Traverse(*queryNode.Left(), *referenceNode.Right()); } else ++numPrunes; } } // Restore the main traversal information. rule.TraversalInfo() = traversalInfo; // Now recurse down the right query node. leftScore = rule.Score(*queryNode.Right(), *referenceNode.Left()); leftInfo = rule.TraversalInfo(); rule.TraversalInfo() = traversalInfo; rightScore = rule.Score(*queryNode.Right(), *referenceNode.Right()); numScores += 2; if (leftScore < rightScore) { // Recurse to the left. Restore the left traversal info. Store the right // traversal info. rightInfo = rule.TraversalInfo(); rule.TraversalInfo() = leftInfo; Traverse(*queryNode.Right(), *referenceNode.Left()); // Is it still valid to recurse to the right? rightScore = rule.Rescore(*queryNode.Right(), *referenceNode.Right(), rightScore); if (rightScore != DBL_MAX) { // Restore the right traversal info. rule.TraversalInfo() = rightInfo; Traverse(*queryNode.Right(), *referenceNode.Right()); } else ++numPrunes; } else if (rightScore < leftScore) { // Recurse to the right. Traverse(*queryNode.Right(), *referenceNode.Right()); // Is it still valid to recurse to the left? leftScore = rule.Rescore(*queryNode.Right(), *referenceNode.Left(), leftScore); if (leftScore != DBL_MAX) { // Restore the left traversal info. rule.TraversalInfo() = leftInfo; Traverse(*queryNode.Right(), *referenceNode.Left()); } else ++numPrunes; } else { if (leftScore == DBL_MAX) { numPrunes += 2; } else { // Choose the left first. Restore the left traversal info. Store the // right traversal info. rightInfo = rule.TraversalInfo(); rule.TraversalInfo() = leftInfo; Traverse(*queryNode.Right(), *referenceNode.Left()); // Is it still valid to recurse to the right? rightScore = rule.Rescore(*queryNode.Right(), *referenceNode.Right(), rightScore); if (rightScore != DBL_MAX) { // Restore the right traversal info. rule.TraversalInfo() = rightInfo; Traverse(*queryNode.Right(), *referenceNode.Right()); } else ++numPrunes; } } } } } // namespace tree } // namespace mlpack #endif // MLPACK_CORE_TREE_BINARY_SPACE_TREE_DUAL_TREE_TRAVERSER_IMPL_HPP mlpack-2.2.5/src/mlpack/core/tree/binary_space_tree/mean_split.hpp000066400000000000000000000111251315013601400252300ustar00rootroot00000000000000/** * @file mean_split.hpp * @author Yash Vadalia * @author Ryan Curtin * * Definition of MeanSplit, a class that splits a binary space partitioning tree * node into two parts using the mean of the values in a certain dimension. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_TREE_BINARY_SPACE_TREE_MEAN_SPLIT_HPP #define MLPACK_CORE_TREE_BINARY_SPACE_TREE_MEAN_SPLIT_HPP #include #include namespace mlpack { namespace tree /** Trees and tree-building procedures. */ { /** * A binary space partitioning tree node is split into its left and right child. * The split is done in the dimension that has the maximum width. The points are * divided into two parts based on the mean in this dimension. */ template class MeanSplit { public: //! An information about the partition. struct SplitInfo { //! The dimension to split the node on. size_t splitDimension; //! The split in dimension splitDimension is based on this value. double splitVal; }; /** * Find the partition of the node. This method fills up the dimension * that will be used to split the node and the value according which the split * will be performed. * * @param bound The bound used for this node. * @param data The dataset used by the binary space tree. * @param begin Index of the starting point in the dataset that belongs to * this node. * @param count Number of points in this node. * @param splitInfo An information about the split. This information contains * the dimension and the value. */ static bool SplitNode(const BoundType& bound, MatType& data, const size_t begin, const size_t count, SplitInfo& splitInfo); /** * Perform the split process according to the information about the * split. This will order the dataset such that points that belong to the left * subtree are on the left of the split column, and points from the right * subtree are on the right side of the split column. * * @param bound The bound used for this node. * @param data The dataset used by the binary space tree. * @param begin Index of the starting point in the dataset that belongs to * this node. * @param count Number of points in this node. * @param splitInfo The information about the split. */ static size_t PerformSplit(MatType& data, const size_t begin, const size_t count, const SplitInfo& splitInfo) { return split::PerformSplit(data, begin, count, splitInfo); } /** * Perform the split process according to the information about the split and * return the list of changed indices. This will order the dataset such that * points that belong to the left subtree are on the left of the split column, * and points from the right subtree are on the right side of the split * column. * * @param bound The bound used for this node. * @param data The dataset used by the binary space tree. * @param begin Index of the starting point in the dataset that belongs to * this node. * @param count Number of points in this node. * @param splitInfo The information about the split. * @param oldFromNew Vector which will be filled with the old positions for * each new point. */ static size_t PerformSplit(MatType& data, const size_t begin, const size_t count, const SplitInfo& splitInfo, std::vector& oldFromNew) { return split::PerformSplit(data, begin, count, splitInfo, oldFromNew); } /** * Indicates that a point should be assigned to the left subtree. * * @param point The point that is being assigned. * @param splitInfo An information about the split. */ template static bool AssignToLeftNode(const VecType& point, const SplitInfo& splitInfo) { return point[splitInfo.splitDimension] < splitInfo.splitVal; } }; } // namespace tree } // namespace mlpack // Include implementation. #include "mean_split_impl.hpp" #endif mlpack-2.2.5/src/mlpack/core/tree/binary_space_tree/mean_split_impl.hpp000066400000000000000000000052721315013601400262570ustar00rootroot00000000000000/** * @file mean_split_impl.hpp * @author Yash Vadalia * @author Ryan Curtin * * Implementation of class(MeanSplit) to split a binary space partition tree. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_TREE_BINARY_SPACE_TREE_MEAN_SPLIT_IMPL_HPP #define MLPACK_CORE_TREE_BINARY_SPACE_TREE_MEAN_SPLIT_IMPL_HPP #include "mean_split.hpp" namespace mlpack { namespace tree { template bool MeanSplit::SplitNode(const BoundType& bound, MatType& data, const size_t begin, const size_t count, SplitInfo& splitInfo) { double maxWidth = -1; splitInfo.splitDimension = data.n_rows; // Indicate invalid. // Find the split dimension. If the bound is tight, we only need to consult // the bound's width. if (bound::BoundTraits::HasTightBounds) { for (size_t d = 0; d < data.n_rows; d++) { const double width = bound[d].Width(); if (width > maxWidth) { maxWidth = width; splitInfo.splitDimension = d; } } } else { // We must individually calculate bounding boxes. math::Range* ranges = new math::Range[data.n_rows]; for (size_t i = begin; i < begin + count; ++i) { // Expand each dimension as necessary. for (size_t d = 0; d < data.n_rows; ++d) { const double val = data(d, i); if (val < ranges[d].Lo()) ranges[d].Lo() = val; if (val > ranges[d].Hi()) ranges[d].Hi() = val; } } // Now, which is the widest? for (size_t d = 0; d < data.n_rows; d++) { const double width = ranges[d].Width(); if (width > maxWidth) { maxWidth = width; splitInfo.splitDimension = d; } } delete[] ranges; } if (maxWidth == 0) // All these points are the same. We can't split. return false; // Split in the mean of that dimension. splitInfo.splitVal = 0.0; for (size_t i = begin; i < begin + count; ++i) splitInfo.splitVal += data(splitInfo.splitDimension, i); splitInfo.splitVal /= count; Log::Assert(splitInfo.splitVal >= bound[splitInfo.splitDimension].Lo()); Log::Assert(splitInfo.splitVal <= bound[splitInfo.splitDimension].Hi()); return true; } } // namespace tree } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/core/tree/binary_space_tree/midpoint_split.hpp000066400000000000000000000113251315013601400261350ustar00rootroot00000000000000/** * @file midpoint_split.hpp * @author Yash Vadalia * @author Ryan Curtin * * Definition of MidpointSplit, a class that splits a binary space partitioning * tree node into two parts using the midpoint of the values in a certain * dimension. The dimension to split on is the dimension with maximum variance. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_TREE_BINARY_SPACE_TREE_MIDPOINT_SPLIT_HPP #define MLPACK_CORE_TREE_BINARY_SPACE_TREE_MIDPOINT_SPLIT_HPP #include #include namespace mlpack { namespace tree /** Trees and tree-building procedures. */ { /** * A binary space partitioning tree node is split into its left and right child. * The split is done in the dimension that has the maximum width. The points are * divided into two parts based on the midpoint in this dimension. */ template class MidpointSplit { public: //! A struct that contains an information about the split. struct SplitInfo { //! The dimension to split the node on. size_t splitDimension; //! The split in dimension splitDimension is based on this value. double splitVal; }; /** * Find the partition of the node. This method fills up the dimension that * will be used to split the node and the value according which the split * will be performed. * * @param bound The bound used for this node. * @param data The dataset used by the binary space tree. * @param begin Index of the starting point in the dataset that belongs to * this node. * @param count Number of points in this node. * @param splitInfo An information about the split. This information contains * the dimension and the value. */ static bool SplitNode(const BoundType& bound, MatType& data, const size_t begin, const size_t count, SplitInfo& splitInfo); /** * Perform the split process according to the information about the * split. This will order the dataset such that points that belong to the left * subtree are on the left of the split column, and points from the right * subtree are on the right side of the split column. * * @param bound The bound used for this node. * @param data The dataset used by the binary space tree. * @param begin Index of the starting point in the dataset that belongs to * this node. * @param count Number of points in this node. * @param splitInfo The information about the split. */ static size_t PerformSplit(MatType& data, const size_t begin, const size_t count, const SplitInfo& splitInfo) { return split::PerformSplit(data, begin, count, splitInfo); } /** * Perform the split process according to the information about the split and * return the list of changed indices. This will order the dataset such that * points that belong to the left subtree are on the left of the split column, * and points from the right subtree are on the right side of the split * column. * * @param bound The bound used for this node. * @param data The dataset used by the binary space tree. * @param begin Index of the starting point in the dataset that belongs to * this node. * @param count Number of points in this node. * @param splitInfo The information about the split. * @param oldFromNew Vector which will be filled with the old positions for * each new point. */ static size_t PerformSplit(MatType& data, const size_t begin, const size_t count, const SplitInfo& splitInfo, std::vector& oldFromNew) { return split::PerformSplit(data, begin, count, splitInfo, oldFromNew); } /** * Indicates that a point should be assigned to the left subtree. * * @param point The point that is being assigned. * @param splitInfo An information about the split. */ template static bool AssignToLeftNode(const VecType& point, const SplitInfo& splitInfo) { return point[splitInfo.splitDimension] < splitInfo.splitVal; } }; } // namespace tree } // namespace mlpack // Include implementation. #include "midpoint_split_impl.hpp" #endif mlpack-2.2.5/src/mlpack/core/tree/binary_space_tree/midpoint_split_impl.hpp000066400000000000000000000053221315013601400271560ustar00rootroot00000000000000/** * @file midpoint_split_impl.hpp * @author Yash Vadalia * @author Ryan Curtin * * Implementation of class (MidpointSplit) to split a binary space partition * tree. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_TREE_BINARY_SPACE_TREE_MIDPOINT_SPLIT_IMPL_HPP #define MLPACK_CORE_TREE_BINARY_SPACE_TREE_MIDPOINT_SPLIT_IMPL_HPP #include "midpoint_split.hpp" #include namespace mlpack { namespace tree { template bool MidpointSplit::SplitNode(const BoundType& bound, MatType& data, const size_t begin, const size_t count, SplitInfo& splitInfo) { double maxWidth = -1; splitInfo.splitDimension = data.n_rows; // Indicate invalid. // Find the split dimension. If the bound is tight, we only need to consult // the bound's width. if (bound::BoundTraits::HasTightBounds) { for (size_t d = 0; d < data.n_rows; d++) { const double width = bound[d].Width(); if (width > maxWidth) { maxWidth = width; splitInfo.splitDimension = d; // Split in the midpoint of that dimension. splitInfo.splitVal = bound[d].Mid(); } } } else { // We must individually calculate bounding boxes. math::Range* ranges = new math::Range[data.n_rows]; for (size_t i = begin; i < begin + count; ++i) { // Expand each dimension as necessary. for (size_t d = 0; d < data.n_rows; ++d) { const double val = data(d, i); if (val < ranges[d].Lo()) ranges[d].Lo() = val; if (val > ranges[d].Hi()) ranges[d].Hi() = val; } } // Now, which is the widest? for (size_t d = 0; d < data.n_rows; d++) { const double width = ranges[d].Width(); if (width > maxWidth) { maxWidth = width; splitInfo.splitDimension = d; // Split in the midpoint of that dimension. splitInfo.splitVal = ranges[d].Mid(); } } delete[] ranges; } if (maxWidth <= 0) // All these points are the same. We can't split. return false; // Split in the midpoint of that dimension. splitInfo.splitVal = bound[splitInfo.splitDimension].Mid(); return true; } } // namespace tree } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/core/tree/binary_space_tree/rp_tree_max_split.hpp000066400000000000000000000127311315013601400266210ustar00rootroot00000000000000/** * @file rp_tree_max_split.hpp * @author Mikhail Lozhnikov * * Definition of class (RPTreeMaxSplit) to split a binary space partition * tree. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_TREE_BINARY_SPACE_TREE_RP_TREE_MAX_SPLIT_HPP #define MLPACK_CORE_TREE_BINARY_SPACE_TREE_RP_TREE_MAX_SPLIT_HPP #include #include namespace mlpack { namespace tree /** Trees and tree-building procedures. */ { /** * This class splits a node by a random hyperplane. In order to choose the * hyperplane we need to choose the normal to the hyperplane and the position * of the hyperplane i.e. the scalar product of the normal and a point. * * A point will be assigned to the left subtree if the product of * this point and the normal is less or equal to the split value (i.e. the * position of the hyperplane). */ template class RPTreeMaxSplit { public: //! The element type held by the matrix type. typedef typename MatType::elem_type ElemType; //! An information about the partition. struct SplitInfo { //! The normal vector to the hyperplane that splits the node. arma::Col direction; //! The value according to which the node is being split. ElemType splitVal; }; /** * Split the node by a random hyperplane. * * @param bound The bound used for this node. * @param data The dataset used by the binary space tree. * @param begin Index of the starting point in the dataset that belongs to * this node. * @param count Number of points in this node. * @param splitInfo An information about the split. This information contains * the direction and the value. */ static bool SplitNode(const BoundType& /*bound*/, MatType& data, const size_t begin, const size_t count, SplitInfo& splitInfo); /** * Perform the split process according to the information about the * split. This will order the dataset such that points that belong to the left * subtree are on the left of the split column, and points from the right * subtree are on the right side of the split column. * * @param bound The bound used for this node. * @param data The dataset used by the binary space tree. * @param begin Index of the starting point in the dataset that belongs to * this node. * @param count Number of points in this node. * @param splitInfo The information about the split. */ static size_t PerformSplit(MatType& data, const size_t begin, const size_t count, const SplitInfo& splitInfo) { return split::PerformSplit(data, begin, count, splitInfo); } /** * Perform the split process according to the information about the split and * return the list of changed indices. This will order the dataset such that * points that belong to the left subtree are on the left of the split column, * and points from the right subtree are on the right side of the split * column. * * @param bound The bound used for this node. * @param data The dataset used by the binary space tree. * @param begin Index of the starting point in the dataset that belongs to * this node. * @param count Number of points in this node. * @param splitInfo The information about the split. * @param oldFromNew Vector which will be filled with the old positions for * each new point. */ static size_t PerformSplit(MatType& data, const size_t begin, const size_t count, const SplitInfo& splitInfo, std::vector& oldFromNew) { return split::PerformSplit(data, begin, count, splitInfo, oldFromNew); } /** * Indicates that a point should be assigned to the left subtree. * * @param point The point that is being assigned. * @param splitInfo An information about the split. */ template static bool AssignToLeftNode(const VecType& point, const SplitInfo& splitInfo) { return (arma::dot(point, splitInfo.direction) <= splitInfo.splitVal); } private: /** * This method finds the position of the hyperplane that will split the node. * * @param data The dataset used by the binary space tree. * @param begin Index of the starting point in the dataset that belongs to * this node. * @param count Number of points in this node. * @param direction A random vector that is the normal to the hyperplane * which will split the node. * @param splitVal The value according which the node will be split. */ static bool GetSplitVal(const MatType& data, const size_t begin, const size_t count, const arma::Col& direction, ElemType& splitVal); }; } // namespace tree } // namespace mlpack // Include implementation. #include "rp_tree_max_split_impl.hpp" #endif // MLPACK_CORE_TREE_BINARY_SPACE_TREE_RP_TREE_MAX_SPLIT_HPP mlpack-2.2.5/src/mlpack/core/tree/binary_space_tree/rp_tree_max_split_impl.hpp000066400000000000000000000057631315013601400276510ustar00rootroot00000000000000/** * @file rp_tree_max_split_impl.hpp * @author Mikhail Lozhnikov * * Implementation of class (RPTreeMaxSplit) to split a binary space partition * tree. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_TREE_BINARY_SPACE_TREE_RP_TREE_MAX_SPLIT_IMPL_HPP #define MLPACK_CORE_TREE_BINARY_SPACE_TREE_RP_TREE_MAX_SPLIT_IMPL_HPP #include "rp_tree_max_split.hpp" #include "rp_tree_mean_split.hpp" namespace mlpack { namespace tree { template bool RPTreeMaxSplit::SplitNode(const BoundType& /* bound */, MatType& data, const size_t begin, const size_t count, SplitInfo& splitInfo) { splitInfo.direction.zeros(data.n_rows); // Get the normal to the hyperplane. math::RandVector(splitInfo.direction); // Get the value according to which we will perform the split. return GetSplitVal(data, begin, count, splitInfo.direction, splitInfo.splitVal); } template bool RPTreeMaxSplit::GetSplitVal( const MatType& data, const size_t begin, const size_t count, const arma::Col& direction, ElemType& splitVal) { const size_t maxNumSamples = 100; const size_t numSamples = std::min(maxNumSamples, count); arma::uvec samples; // Get no more than numSamples distinct samples. math::ObtainDistinctSamples(begin, begin + count, numSamples, samples); arma::Col values(samples.n_elem); // Find the median of scalar products of the samples and the normal vector. for (size_t k = 0; k < samples.n_elem; k++) values[k] = arma::dot(data.col(samples[k]), direction); const ElemType maximum = arma::max(values); const ElemType minimum = arma::min(values); if (minimum == maximum) return false; splitVal = arma::median(values); // Add a random deviation to the median. // This algorithm differs from the method suggested in the random projection // tree paper, for two reasons: // 1. Evaluating the method proposed in the paper is time-consuming, since // we must solve the furthest-pair problem. // 2. The proposed method does not appear to guarantee that a valid split // value will be generated (i.e. it can produce a split value where there // may be no points on the left or the right). splitVal += math::Random((minimum - splitVal) * 0.75, (maximum - splitVal) * 0.75); if (splitVal == maximum) splitVal = minimum; return true; } } // namespace tree } // namespace mlpack #endif // MLPACK_CORE_TREE_BINARY_SPACE_TREE_RP_TREE_MAX_SPLIT_IMPL_HPP mlpack-2.2.5/src/mlpack/core/tree/binary_space_tree/rp_tree_mean_split.hpp000066400000000000000000000152321315013601400267530ustar00rootroot00000000000000/** * @file rp_tree_mean_split.hpp * @author Mikhail Lozhnikov * * Definition of class (RPTreeMaxSplit) to split a binary space partition * tree. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_TREE_BINARY_SPACE_TREE_RP_TREE_MEAN_SPLIT_HPP #define MLPACK_CORE_TREE_BINARY_SPACE_TREE_RP_TREE_MEAN_SPLIT_HPP #include #include "rp_tree_max_split.hpp" #include #include namespace mlpack { namespace tree /** Trees and tree-building procedures. */ { /** * This class splits a binary space tree. This class provides two different * kinds of split. The mean split (i.e. all points are split by the median * of their distance to the mean point) is performed if the average distance * between points multiplied by a constant is greater than the diameter of the * node. Otherwise, the median split (i.e. the node is split by a random * hyperplane) is performed. */ template class RPTreeMeanSplit { public: //! The element type held by the matrix type. typedef typename MatType::elem_type ElemType; //! An information about the partition. struct SplitInfo { //! The normal to the hyperplane that will split the node. arma::Col direction; //! The mean of some sampled points. arma::Col mean; //! The value according to which the split will be performed. ElemType splitVal; //! Indicates that we should use the mean split algorithm instead of the //! median split. bool meanSplit; }; /** * Split the node according to the mean value in the dimension with maximum * width. * * @param bound The bound used for this node. * @param data The dataset used by the binary space tree. * @param begin Index of the starting point in the dataset that belongs to * this node. * @param count Number of points in this node. * @param splitInfo An information about the split. This information contains * the direction and the value. */ static bool SplitNode(const BoundType& /* bound */, MatType& data, const size_t begin, const size_t count, SplitInfo& splitInfo); /** * Perform the split process according to the information about the * split. This will order the dataset such that points that belong to the left * subtree are on the left of the split column, and points from the right * subtree are on the right side of the split column. * * @param bound The bound used for this node. * @param data The dataset used by the binary space tree. * @param begin Index of the starting point in the dataset that belongs to * this node. * @param count Number of points in this node. * @param splitInfo The information about the split. */ static size_t PerformSplit(MatType& data, const size_t begin, const size_t count, const SplitInfo& splitInfo) { return split::PerformSplit(data, begin, count, splitInfo); } /** * Perform the split process according to the information about the split and * return the list of changed indices. This will order the dataset such that * points that belong to the left subtree are on the left of the split column, * and points from the right subtree are on the right side of the split * column. * * @param bound The bound used for this node. * @param data The dataset used by the binary space tree. * @param begin Index of the starting point in the dataset that belongs to * this node. * @param count Number of points in this node. * @param splitInfo The information about the split. * @param oldFromNew Vector which will be filled with the old positions for * each new point. */ static size_t PerformSplit(MatType& data, const size_t begin, const size_t count, const SplitInfo& splitInfo, std::vector& oldFromNew) { return split::PerformSplit(data, begin, count, splitInfo, oldFromNew); } /** * Indicates that a point should be assigned to the left subtree. * * @param point The point that is being assigned. * @param splitInfo An information about the split. */ template static bool AssignToLeftNode(const VecType& point, const SplitInfo& splitInfo) { if (splitInfo.meanSplit) return arma::dot(point - splitInfo.mean, point - splitInfo.mean) <= splitInfo.splitVal; return (arma::dot(point, splitInfo.direction) <= splitInfo.splitVal); } private: /** * Get the average distance between points in the dataset. * * @param data The dataset used by the binary space tree. * @param samples The indices of points that will be used for the calculation. */ static ElemType GetAveragePointDistance(MatType& data, const arma::uvec& samples); /** * Get the median of scalar products of the sampled points and the normal * to the hyperplane (i.e. the position of the hyperplane). * * @param data The dataset used by the binary space tree. * @param samples The indices of points that will be used for the calculation. * @param direction The normal to the hyperplane. * @param splitVal The median value. */ static bool GetDotMedian(const MatType& data, const arma::uvec& samples, const arma::Col& direction, ElemType& splitVal); /** * Get the mean point and the median of distance from the mean to any point of * the dataset. * * @param data The dataset used by the binary space tree. * @param samples The indices of points that will be used for the calculation. * @param mean The mean point. * @param splitVal The median value. */ static bool GetMeanMedian(const MatType& data, const arma::uvec& samples, arma::Col& mean, ElemType& splitVal); }; } // namespace tree } // namespace mlpack // Include implementation. #include "rp_tree_mean_split_impl.hpp" #endif // MLPACK_CORE_TREE_BINARY_SPACE_TREE_RP_TREE_MEAN_SPLIT_HPP mlpack-2.2.5/src/mlpack/core/tree/binary_space_tree/rp_tree_mean_split_impl.hpp000066400000000000000000000102411315013601400277670ustar00rootroot00000000000000/** * @file rp_tree_mean_split_impl.hpp * @author Mikhail Lozhnikov * * Implementation of class (RPTreeMeanSplit) to split a binary space partition * tree. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_TREE_BINARY_SPACE_TREE_RP_TREE_MEAN_SPLIT_IMPL_HPP #define MLPACK_CORE_TREE_BINARY_SPACE_TREE_RP_TREE_MEAN_SPLIT_IMPL_HPP #include "rp_tree_max_split.hpp" namespace mlpack { namespace tree { template bool RPTreeMeanSplit::SplitNode(const BoundType& bound, MatType& data, const size_t begin, const size_t count, SplitInfo& splitInfo) { const size_t maxNumSamples = 100; const size_t numSamples = std::min(maxNumSamples, count); arma::uvec samples; // Get no more than numSamples distinct samples. math::ObtainDistinctSamples(begin, begin + count, numSamples, samples); // Find the average distance between points. ElemType averageDistanceSq = GetAveragePointDistance(data, samples); const ElemType threshold = 10; if (bound.Diameter() * bound.Diameter() <= threshold * averageDistanceSq) { // We will perform the median split. splitInfo.meanSplit = false; splitInfo.direction.zeros(data.n_rows); // Get a random normal vector. math::RandVector(splitInfo.direction); // Get the median value of the scalar products of the normal and the // sampled points. The node will be split according to this value. return GetDotMedian(data, samples, splitInfo.direction, splitInfo.splitVal); } else { // We will perform the mean split. splitInfo.meanSplit = true; // Get the median of the distances between the mean point and the sampled // points. The node will be split according to this value. return GetMeanMedian(data, samples, splitInfo.mean, splitInfo.splitVal); } } template typename MatType::elem_type RPTreeMeanSplit:: GetAveragePointDistance( MatType& data, const arma::uvec& samples) { ElemType dist = 0; for (size_t i = 0; i < samples.n_elem; i++) for (size_t j = i + 1; j < samples.n_elem; j++) dist += metric::SquaredEuclideanDistance::Evaluate(data.col(samples[i]), data.col(samples[j])); dist /= (samples.n_elem * (samples.n_elem - 1) / 2); return dist; } template bool RPTreeMeanSplit::GetDotMedian( const MatType& data, const arma::uvec& samples, const arma::Col& direction, ElemType& splitVal) { arma::Col values(samples.n_elem); for (size_t k = 0; k < samples.n_elem; k++) values[k] = arma::dot(data.col(samples[k]), direction); const ElemType maximum = arma::max(values); const ElemType minimum = arma::min(values); if (minimum == maximum) return false; splitVal = arma::median(values); if (splitVal == maximum) splitVal = minimum; return true; } template bool RPTreeMeanSplit::GetMeanMedian( const MatType& data, const arma::uvec& samples, arma::Col& mean, ElemType& splitVal) { arma::Col values(samples.n_elem); mean = arma::mean(data.cols(samples), 1); arma::Col tmp(data.n_rows); for (size_t k = 0; k < samples.n_elem; k++) { tmp = data.col(samples[k]); tmp -= mean; values[k] = arma::dot(tmp, tmp); } const ElemType maximum = arma::max(values); const ElemType minimum = arma::min(values); if (minimum == maximum) return false; splitVal = arma::median(values); if (splitVal == maximum) splitVal = minimum; return true; } } // namespace tree } // namespace mlpack #endif // MLPACK_CORE_TREE_BINARY_SPACE_TREE_RP_TREE_MEAN_SPLIT_IMPL_HPP mlpack-2.2.5/src/mlpack/core/tree/binary_space_tree/single_tree_traverser.hpp000066400000000000000000000041401315013601400274710ustar00rootroot00000000000000/** * @file single_tree_traverser.hpp * @author Ryan Curtin * * A nested class of BinarySpaceTree which traverses the entire tree with a * given set of rules which indicate the branches which can be pruned and the * order in which to recurse. This traverser is a depth-first traverser. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_TREE_BINARY_SPACE_TREE_SINGLE_TREE_TRAVERSER_HPP #define MLPACK_CORE_TREE_BINARY_SPACE_TREE_SINGLE_TREE_TRAVERSER_HPP #include #include "binary_space_tree.hpp" namespace mlpack { namespace tree { template class BoundType, template class SplitType> template class BinarySpaceTree::SingleTreeTraverser { public: /** * Instantiate the single tree traverser with the given rule set. */ SingleTreeTraverser(RuleType& rule); /** * Traverse the tree with the given point. * * @param queryIndex The index of the point in the query set which is being * used as the query point. * @param referenceNode The tree node to be traversed. */ void Traverse(const size_t queryIndex, BinarySpaceTree& referenceNode); //! Get the number of prunes. size_t NumPrunes() const { return numPrunes; } //! Modify the number of prunes. size_t& NumPrunes() { return numPrunes; } private: //! Reference to the rules with which the tree will be traversed. RuleType& rule; //! The number of nodes which have been pruned during traversal. size_t numPrunes; }; } // namespace tree } // namespace mlpack // Include implementation. #include "single_tree_traverser_impl.hpp" #endif mlpack-2.2.5/src/mlpack/core/tree/binary_space_tree/single_tree_traverser_impl.hpp000066400000000000000000000073431315013601400305220ustar00rootroot00000000000000/** * @file single_tree_traverser_impl.hpp * @author Ryan Curtin * * A nested class of BinarySpaceTree which traverses the entire tree with a * given set of rules which indicate the branches which can be pruned and the * order in which to recurse. This traverser is a depth-first traverser. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_TREE_BINARY_SPACE_TREE_SINGLE_TREE_TRAVERSER_IMPL_HPP #define MLPACK_CORE_TREE_BINARY_SPACE_TREE_SINGLE_TREE_TRAVERSER_IMPL_HPP // In case it hasn't been included yet. #include "single_tree_traverser.hpp" #include namespace mlpack { namespace tree { template class BoundType, template class SplitType> template BinarySpaceTree:: SingleTreeTraverser::SingleTreeTraverser(RuleType& rule) : rule(rule), numPrunes(0) { /* Nothing to do. */ } template class BoundType, template class SplitType> template void BinarySpaceTree:: SingleTreeTraverser::Traverse( const size_t queryIndex, BinarySpaceTree& referenceNode) { // If we are a leaf, run the base case as necessary. if (referenceNode.IsLeaf()) { const size_t refEnd = referenceNode.Begin() + referenceNode.Count(); for (size_t i = referenceNode.Begin(); i < refEnd; ++i) rule.BaseCase(queryIndex, i); } else { // If either score is DBL_MAX, we do not recurse into that node. double leftScore = rule.Score(queryIndex, *referenceNode.Left()); double rightScore = rule.Score(queryIndex, *referenceNode.Right()); if (leftScore < rightScore) { // Recurse to the left. Traverse(queryIndex, *referenceNode.Left()); // Is it still valid to recurse to the right? rightScore = rule.Rescore(queryIndex, *referenceNode.Right(), rightScore); if (rightScore != DBL_MAX) Traverse(queryIndex, *referenceNode.Right()); // Recurse to the right. else ++numPrunes; } else if (rightScore < leftScore) { // Recurse to the right. Traverse(queryIndex, *referenceNode.Right()); // Is it still valid to recurse to the left? leftScore = rule.Rescore(queryIndex, *referenceNode.Left(), leftScore); if (leftScore != DBL_MAX) Traverse(queryIndex, *referenceNode.Left()); // Recurse to the left. else ++numPrunes; } else // leftScore is equal to rightScore. { if (leftScore == DBL_MAX) { numPrunes += 2; // Pruned both left and right. } else { // Choose the left first. Traverse(queryIndex, *referenceNode.Left()); // Is it still valid to recurse to the right? rightScore = rule.Rescore(queryIndex, *referenceNode.Right(), rightScore); if (rightScore != DBL_MAX) Traverse(queryIndex, *referenceNode.Right()); else ++numPrunes; } } } } } // namespace tree } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/core/tree/binary_space_tree/traits.hpp000066400000000000000000000173761315013601400244210ustar00rootroot00000000000000/** * @file traits.hpp * @author Ryan Curtin * * Specialization of the TreeTraits class for the BinarySpaceTree type of tree. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_TREE_BINARY_SPACE_TREE_TRAITS_HPP #define MLPACK_CORE_TREE_BINARY_SPACE_TREE_TRAITS_HPP #include #include namespace mlpack { namespace tree { /** * This is a specialization of the TreeTraits class to the BinarySpaceTree tree * type. It defines characteristics of the binary space tree, and is used to * help write tree-independent (but still optimized) tree-based algorithms. See * mlpack/core/tree/tree_traits.hpp for more information. */ template class BoundType, template class SplitType> class TreeTraits> { public: /** * Each binary space tree node has two children which represent * non-overlapping subsets of the space which the node represents. Therefore, * children are not overlapping. */ static const bool HasOverlappingChildren = false; /** * Each binary space tree node doesn't share points with any other node. */ static const bool HasDuplicatedPoints = false; /** * There is no guarantee that the first point in a node is its centroid. */ static const bool FirstPointIsCentroid = false; /** * Points are not contained at multiple levels of the binary space tree. */ static const bool HasSelfChildren = false; /** * Points are rearranged during building of the tree. */ static const bool RearrangesDataset = true; /** * This is always a binary tree. */ static const bool BinaryTree = true; /** * Binary space trees don't have duplicated points, so NumDescendants() * represents the number of unique descendant points. */ static const bool UniqueNumDescendants = true; }; /** * This is a specialization of the TreeType class to the max-split random * projection tree. The only difference with general BinarySpaceTree is that the * tree can have overlapping children. */ template class BoundType> class TreeTraits> { public: /** * Children of a random projection tree node may overlap. */ static const bool HasOverlappingChildren = true; /** * The tree has not got duplicated points. */ static const bool HasDuplicatedPoints = false; /** * There is no guarantee that the first point in a node is its centroid. */ static const bool FirstPointIsCentroid = false; /** * Points are not contained at multiple levels of the binary space tree. */ static const bool HasSelfChildren = false; /** * Points are rearranged during building of the tree. */ static const bool RearrangesDataset = true; /** * This is always a binary tree. */ static const bool BinaryTree = true; /** * Binary space trees don't have duplicated points, so NumDescendants() * represents the number of unique descendant points. */ static const bool UniqueNumDescendants = true; }; /** * This is a specialization of the TreeType class to the mean-split random * projection tree. The only difference with general BinarySpaceTree is that the * tree can have overlapping children. */ template class BoundType> class TreeTraits> { public: /** * Children of a random projection tree node may overlap. */ static const bool HasOverlappingChildren = true; /** * The tree has not got duplicated points. */ static const bool HasDuplicatedPoints = false; /** * There is no guarantee that the first point in a node is its centroid. */ static const bool FirstPointIsCentroid = false; /** * Points are not contained at multiple levels of the binary space tree. */ static const bool HasSelfChildren = false; /** * Points are rearranged during building of the tree. */ static const bool RearrangesDataset = true; /** * This is always a binary tree. */ static const bool BinaryTree = true; /** * Binary space trees don't have duplicated points, so NumDescendants() * represents the number of unique descendant points. */ static const bool UniqueNumDescendants = true; }; /** * This is a specialization of the TreeType class to the BallTree tree type. * The only difference with general BinarySpaceTree is that BallTree can have * overlapping children. * See mlpack/core/tree/tree_traits.hpp for more information. */ template class SplitType> class TreeTraits> { public: static const bool HasOverlappingChildren = true; static const bool HasDuplicatedPoints = false; static const bool FirstPointIsCentroid = false; static const bool HasSelfChildren = false; static const bool RearrangesDataset = true; static const bool BinaryTree = true; static const bool UniqueNumDescendants = true; }; /** * This is a specialization of the TreeType class to an arbitrary tree with * HollowBallBound (currently only the vantage point tree is supported). * The only difference with general BinarySpaceTree is that the tree can have * overlapping children. */ template class SplitType> class TreeTraits> { public: static const bool HasOverlappingChildren = true; static const bool HasDuplicatedPoints = false; static const bool FirstPointIsCentroid = false; static const bool HasSelfChildren = false; static const bool RearrangesDataset = true; static const bool BinaryTree = true; static const bool UniqueNumDescendants = true; }; /** * This is a specialization of the TreeType class to the UBTree tree type. * The only difference with general BinarySpaceTree is that UBTree can have * overlapping children. * See mlpack/core/tree/tree_traits.hpp for more information. */ template class SplitType> class TreeTraits> { public: static const bool HasOverlappingChildren = true; static const bool HasDuplicatedPoints = false; static const bool FirstPointIsCentroid = false; static const bool HasSelfChildren = false; static const bool RearrangesDataset = true; static const bool BinaryTree = true; static const bool UniqueNumDescendants = true; }; } // namespace tree } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/core/tree/binary_space_tree/typedef.hpp000066400000000000000000000262171315013601400245450ustar00rootroot00000000000000/** * @file typedef.hpp * @author Ryan Curtin * * Template typedefs for the BinarySpaceTree class that satisfy the requirements * of the TreeType policy class. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_TREE_BINARY_SPACE_TREE_TYPEDEF_HPP #define MLPACK_CORE_TREE_BINARY_SPACE_TREE_TYPEDEF_HPP // In case it hasn't been included yet. #include "../binary_space_tree.hpp" namespace mlpack { namespace tree { /** * The standard midpoint-split kd-tree. This is not the original formulation by * Bentley but instead the later formulation by Deng and Moore, which only holds * points in the leaves of the tree. When recursively splitting nodes, the * KDTree class select the dimension with maximum variance to split on, and * picks the midpoint of the range in that dimension as the value on which to * split nodes. * * For more information, see the following papers. * * @code * @article{bentley1975multidimensional, * title={Multidimensional binary search trees used for associative searching}, * author={Bentley, J.L.}, * journal={Communications of the ACM}, * volume={18}, * number={9}, * pages={509--517}, * year={1975}, * publisher={ACM} * } * * @inproceedings{deng1995multiresolution, * title={Multiresolution instance-based learning}, * author={Deng, K. and Moore, A.W.}, * booktitle={Proceedings of the 1995 International Joint Conference on AI * (IJCAI-95)}, * pages={1233--1239}, * year={1995} * } * @endcode * * This template typedef satisfies the TreeType policy API. * * @see @ref trees, BinarySpaceTree, MeanSplitKDTree */ template using KDTree = BinarySpaceTree; /** * A mean-split kd-tree. This is the same as the KDTree, but this particular * implementation will use the mean of the data in the split dimension as the * value on which to split, instead of the midpoint. This can sometimes give * better performance, but it is not always clear which type of tree is best. * * This template typedef satisfies the TreeType policy API. * * @see @ref trees, BinarySpaceTree, KDTree */ template using MeanSplitKDTree = BinarySpaceTree; /** * A midpoint-split ball tree. This tree holds its points only in the leaves, * similar to the KDTree and MeanSplitKDTree. However, the bounding shape of * each node is a ball, not a hyper-rectangle. This can make the ball tree * advantageous in some higher-dimensional situations and for some datasets. * The tree construction algorithm here is the same as Omohundro's 'K-d * construction algorithm', except the splitting value is the midpoint, not the * median. This can result in trees that better reflect the data, although they * may be unbalanced. * * @code * @techreport{omohundro1989five, * author={S.M. Omohundro}, * title={Five balltree construction algorithms}, * year={1989}, * institution={University of California, Berkeley International Computer * Science Institute Technical Reports}, * number={TR-89-063} * } * @endcode * * This template typedef satisfies the TreeType policy API. * * @see @ref trees, BinarySpaceTree, KDTree, MeanSplitBallTree */ template using BallTree = BinarySpaceTree; /** * A mean-split ball tree. This tree, like the BallTree, holds its points only * in the leaves. The tree construction algorithm here is the same as * Omohundro's 'K-dc onstruction algorithm', except the splitting value is the * mean, not the median. This can result in trees that better reflect the data, * although they may be unbalanced. * * @code * @techreport{omohundro1989five, * author={S.M. Omohundro}, * title={Five balltree construction algorithms}, * year={1989}, * institution={University of California, Berkeley International Computer * Science Institute Technical Reports}, * number={TR-89-063} * } * @endcode * * This template typedef satisfies the TreeType policy API. * * @see @ref trees, BinarySpaceTree, BallTree, MeanSplitKDTree */ template using MeanSplitBallTree = BinarySpaceTree; /** * The vantage point tree (which is also called the metric tree. Vantage point * trees and metric trees were invented independently by Yianilos an Uhlmann) is * a kind of the binary space tree. When recursively splitting nodes, the VPTree * class selects the vantage point and splits the node according to the distance * to this point. Thus, points that are closer to the vantage point form the * inner subtree. Other points form the outer subtree. The vantage point is * contained in the first (inner) node. * * This implementation differs from the original algorithms. Namely, vantage * points are not contained in intermediate nodes. The tree has points only in * the leaves of the tree. * * For more information, see the following papers. * * @code * @inproceedings{yianilos1993vptrees, * author = {Yianilos, Peter N.}, * title = {Data Structures and Algorithms for Nearest Neighbor Search in * General Metric Spaces}, * booktitle = {Proceedings of the Fourth Annual ACM-SIAM Symposium on * Discrete Algorithms}, * series = {SODA '93}, * year = {1993}, * isbn = {0-89871-313-7}, * pages = {311--321}, * numpages = {11}, * publisher = {Society for Industrial and Applied Mathematics}, * address = {Philadelphia, PA, USA} * } * * @article{uhlmann1991metrictrees, * author = {Jeffrey K. Uhlmann}, * title = {Satisfying general proximity / similarity queries with metric * trees}, * journal = {Information Processing Letters}, * volume = {40}, * number = {4}, * pages = {175 - 179}, * year = {1991}, * } * @endcode * * This template typedef satisfies the TreeType policy API. * * @see @ref trees, BinarySpaceTree, VantagePointTree, VPTree */ template using VPTreeSplit = VantagePointSplit; template using VPTree = BinarySpaceTree; /** * A max-split random projection tree. When recursively splitting nodes, the * MaxSplitRPTree class selects a random hyperplane and splits a node by the * hyperplane. The tree holds points in leaf nodes. In contrast to the k-d tree, * children of a MaxSplitRPTree node may overlap. * * @code * @inproceedings{dasgupta2008, * author = {Dasgupta, Sanjoy and Freund, Yoav}, * title = {Random Projection Trees and Low Dimensional Manifolds}, * booktitle = {Proceedings of the Fortieth Annual ACM Symposium on Theory of * Computing}, * series = {STOC '08}, * year = {2008}, * pages = {537--546}, * numpages = {10}, * publisher = {ACM}, * address = {New York, NY, USA}, * } * @endcode * * This template typedef satisfies the TreeType policy API. * * @see @ref trees, BinarySpaceTree, BallTree, MeanSplitKDTree */ template using MaxRPTree = BinarySpaceTree; /** * A mean-split random projection tree. When recursively splitting nodes, the * RPTree class may perform one of two different kinds of split. * Depending on the diameter and the average distance between points, the node * may be split by a random hyperplane or according to the distance from the * mean point. The tree holds points in leaf nodes. In contrast to the k-d tree, * children of a MaxSplitRPTree node may overlap. * * @code * @inproceedings{dasgupta2008, * author = {Dasgupta, Sanjoy and Freund, Yoav}, * title = {Random Projection Trees and Low Dimensional Manifolds}, * booktitle = {Proceedings of the Fortieth Annual ACM Symposium on Theory of * Computing}, * series = {STOC '08}, * year = {2008}, * pages = {537--546}, * numpages = {10}, * publisher = {ACM}, * address = {New York, NY, USA}, * } * @endcode * * This template typedef satisfies the TreeType policy API. * * @see @ref trees, BinarySpaceTree, BallTree, MeanSplitKDTree */ template using RPTree = BinarySpaceTree; /** * The Universal B-tree. When recursively splitting nodes, the class * calculates addresses of all points and splits each node according to the * median address. Children may overlap since the implementation * of a tighter bound requires a lot of arithmetic operations. In order to get * a tighter bound increase the CellBound::maxNumBounds constant. * * @code * @inproceedings{bayer1997, * author = {Bayer, Rudolf}, * title = {The Universal B-Tree for Multidimensional Indexing: General * Concepts}, * booktitle = {Proceedings of the International Conference on Worldwide * Computing and Its Applications}, * series = {WWCA '97}, * year = {1997}, * isbn = {3-540-63343-X}, * pages = {198--209}, * numpages = {12}, * publisher = {Springer-Verlag}, * address = {London, UK, UK}, * } * @endcode * * This template typedef satisfies the TreeType policy API. * * @see @ref trees, BinarySpaceTree, BallTree, MeanSplitKDTree */ template using UBTree = BinarySpaceTree; } // namespace tree } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/core/tree/binary_space_tree/ub_tree_split.hpp000066400000000000000000000101051315013601400257320ustar00rootroot00000000000000/** * @file ub_tree_split.hpp * @author Mikhail Lozhnikov * * Definition of UBTreeSplit, a class that splits the space according * to the median address of points contained in the node. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_TREE_BINARY_SPACE_TREE_UB_TREE_SPLIT_HPP #define MLPACK_CORE_TREE_BINARY_SPACE_TREE_UB_TREE_SPLIT_HPP #include #include "../address.hpp" namespace mlpack { namespace tree /** Trees and tree-building procedures. */ { /** * Split a node into two parts according to the median address of points * contained in the node. The class reorders the dataset such that points * with lower addresses belong to the left subtree and points with high * addresses belong to the right subtree. */ template class UBTreeSplit { public: //! The type of an address element. typedef typename std::conditional::type AddressElemType; //! An information about the partition. struct SplitInfo { //! This vector contains addresses of all points in the dataset. std::vector, size_t>>* addresses; }; /** * Split the node according to the median address of points contained in the * node. * * @param bound The bound used for this node. * @param data The dataset used by the binary space tree. * @param begin Index of the starting point in the dataset that belongs to * this node. * @param count Number of points in this node. * @param splitInfo An information about the split (not used here). */ bool SplitNode(BoundType& bound, MatType& data, const size_t begin, const size_t count, SplitInfo& splitInfo); /** * Rearrange the dataset according to the addresses. * * @param data The dataset used by the binary space tree. * @param begin Index of the starting point in the dataset that belongs to * this node. * @param count Number of points in this node. * @param splitInfo The information about the split. */ static size_t PerformSplit(MatType& data, const size_t begin, const size_t count, const SplitInfo& splitInfo); /** * Rearrange the dataset according to the addresses and return the list * of changed indices. * * @param data The dataset used by the binary space tree. * @param begin Index of the starting point in the dataset that belongs to * this node. * @param count Number of points in this node. * @param splitInfo The information about the split. * @param oldFromNew Vector which will be filled with the old positions for * each new point. */ static size_t PerformSplit(MatType& data, const size_t begin, const size_t count, const SplitInfo& splitInfo, std::vector& oldFromNew); private: //! This vector contains addresses of all points in the dataset. std::vector, size_t>> addresses; /** * Calculate addresses for all points in the dataset. * * @param data The dataset used by the binary space tree. */ void InitializeAddresses(const MatType& data); //! A comparator for sorting addresses. static bool ComparePair( const std::pair, size_t>& p1, const std::pair, size_t>& p2) { return bound::addr::CompareAddresses(p1.first, p2.first) < 0; } }; } // namespace tree } // namespace mlpack // Include implementation. #include "ub_tree_split_impl.hpp" #endif mlpack-2.2.5/src/mlpack/core/tree/binary_space_tree/ub_tree_split_impl.hpp000066400000000000000000000153461315013601400267670ustar00rootroot00000000000000/** * @file ub_tree_split_impl.hpp * @author Mikhail Lozhnikov * * Implementation of UBTreeSplit, a class that splits a node according * to the median address of points contained in the node. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_TREE_BINARY_SPACE_TREE_UB_TREE_SPLIT_IMPL_HPP #define MLPACK_CORE_TREE_BINARY_SPACE_TREE_UB_TREE_SPLIT_IMPL_HPP #include "ub_tree_split.hpp" #include namespace mlpack { namespace tree { template bool UBTreeSplit::SplitNode(BoundType& bound, MatType& data, const size_t begin, const size_t count, SplitInfo& splitInfo) { constexpr size_t order = sizeof(AddressElemType) * CHAR_BIT; if (begin == 0 && count == data.n_cols) { // Calculate all addresses. InitializeAddresses(data); // Probably this is not a good idea. Maybe it is better to get // a number of distinct samples and find the median. std::sort(addresses.begin(), addresses.end(), ComparePair); // Save the vector in order to rearrange the dataset later. splitInfo.addresses = &addresses; } else { // We have already rearranged the dataset. splitInfo.addresses = NULL; } // The bound shouldn't contain too many subrectangles. // In order to minimize the number of hyperrectangles we set last bits // of the last address in the node to 1 and last bits of the first address // in the next node to zero in such a way that the ordering is not // disturbed. if (begin + count < data.n_cols) { // Omit leading equal bits. size_t row = 0; arma::Col& lo = addresses[begin + count - 1].first; const arma::Col& hi = addresses[begin + count].first; for (; row < data.n_rows; row++) if (lo[row] != hi[row]) break; size_t bit = 0; for (; bit < order; bit++) if ((lo[row] & ((AddressElemType) 1 << (order - 1 - bit))) != (hi[row] & ((AddressElemType) 1 << (order - 1 - bit)))) break; bit++; // Replace insignificant bits. if (bit == order) { bit = 0; row++; } else { for (; bit < order; bit++) lo[row] |= ((AddressElemType) 1 << (order - 1 - bit)); row++; } for (; row < data.n_rows; row++) for (; bit < order; bit++) lo[row] |= ((AddressElemType) 1 << (order - 1 - bit)); } // The bound shouldn't contain too many subrectangles. // In order to minimize the number of hyperrectangles we set last bits // of the first address in the next node to 0 and last bits of the last // address in the previous node to 1 in such a way that the ordering is not // disturbed. if (begin > 0) { // Omit leading equal bits. size_t row = 0; const arma::Col& lo = addresses[begin - 1].first; arma::Col& hi = addresses[begin].first; for (; row < data.n_rows; row++) if (lo[row] != hi[row]) break; size_t bit = 0; for (; bit < order; bit++) if ((lo[row] & ((AddressElemType) 1 << (order - 1 - bit))) != (hi[row] & ((AddressElemType) 1 << (order - 1 - bit)))) break; bit++; // Replace insignificant bits. if (bit == order) { bit = 0; row++; } else { for (; bit < order; bit++) hi[row] &= ~((AddressElemType) 1 << (order - 1 - bit)); row++; } for (; row < data.n_rows; row++) for (; bit < order; bit++) hi[row] &= ~((AddressElemType) 1 << (order - 1 - bit)); } // Set the minimum and the maximum addresses. for (size_t k = 0; k < bound.Dim(); k++) { bound.LoAddress()[k] = addresses[begin].first[k]; bound.HiAddress()[k] = addresses[begin + count - 1].first[k]; } bound.UpdateAddressBounds(data.cols(begin, begin + count - 1)); return true; } template void UBTreeSplit::InitializeAddresses(const MatType& data) { addresses.resize(data.n_cols); // Calculate all addresses. for (size_t i = 0; i < data.n_cols; i++) { addresses[i].first.zeros(data.n_rows); bound::addr::PointToAddress(addresses[i].first, data.col(i)); addresses[i].second = i; } } template size_t UBTreeSplit::PerformSplit( MatType& data, const size_t begin, const size_t count, const SplitInfo& splitInfo) { // For the first time we have to rearrange the dataset. if (splitInfo.addresses) { std::vector newFromOld(data.n_cols); std::vector oldFromNew(data.n_cols); for (size_t i = 0; i < splitInfo.addresses->size(); i++) { newFromOld[i] = i; oldFromNew[i] = i; } for (size_t i = 0; i < splitInfo.addresses->size(); i++) { size_t index = (*splitInfo.addresses)[i].second; size_t oldI = oldFromNew[i]; size_t newIndex = newFromOld[index]; data.swap_cols(i, newFromOld[index]); size_t tmp = newFromOld[index]; newFromOld[index] = i; newFromOld[oldI] = tmp; tmp = oldFromNew[i]; oldFromNew[i] = oldFromNew[newIndex]; oldFromNew[newIndex] = tmp; } } // Since the dataset is sorted we can easily obtain the split column. return begin + count / 2; } template size_t UBTreeSplit::PerformSplit( MatType& data, const size_t begin, const size_t count, const SplitInfo& splitInfo, std::vector& oldFromNew) { // For the first time we have to rearrange the dataset. if (splitInfo.addresses) { std::vector newFromOld(data.n_cols); for (size_t i = 0; i < splitInfo.addresses->size(); i++) newFromOld[i] = i; for (size_t i = 0; i < splitInfo.addresses->size(); i++) { size_t index = (*splitInfo.addresses)[i].second; size_t oldI = oldFromNew[i]; size_t newIndex = newFromOld[index]; data.swap_cols(i, newFromOld[index]); size_t tmp = newFromOld[index]; newFromOld[index] = i; newFromOld[oldI] = tmp; tmp = oldFromNew[i]; oldFromNew[i] = oldFromNew[newIndex]; oldFromNew[newIndex] = tmp; } } // Since the dataset is sorted we can easily obtain the split column. return begin + count / 2; } } // namespace tree } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/core/tree/binary_space_tree/vantage_point_split.hpp000066400000000000000000000152101315013601400271450ustar00rootroot00000000000000/** * @file vantage_point_split.hpp * @author Mikhail Lozhnikov * * Definition of class VantagePointSplit, a class that splits a vantage point * tree into two parts using the distance to a certain vantage point. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_TREE_BINARY_SPACE_TREE_VANTAGE_POINT_SPLIT_HPP #define MLPACK_CORE_TREE_BINARY_SPACE_TREE_VANTAGE_POINT_SPLIT_HPP #include #include #include namespace mlpack { namespace tree /** Trees and tree-building procedures. */ { /** * The class splits a binary space partitioning tree node according to the * median distance to the vantage point. Thus points that are closer to the * vantage point belong to the left subtree and points that are farther from * the vantage point belong to the right subtree. */ template class VantagePointSplit { public: //! The matrix element type. typedef typename MatType::elem_type ElemType; //! The bounding shape type. typedef typename BoundType::MetricType MetricType; //! A struct that contains an information about the split. struct SplitInfo { //! The vantage point. arma::Col vantagePoint; //! The median distance according to which the node will be split. ElemType mu; //! An instance of the MetricType class. const MetricType* metric; SplitInfo() : mu(0), metric(NULL) { } template SplitInfo(const MetricType& metric, const VecType& vantagePoint, ElemType mu) : vantagePoint(vantagePoint), mu(mu), metric(&metric) { } }; /** * Split the node according to the distance to a vantage point. * * @param bound The bound used for this node. * @param data The dataset used by the binary space tree. * @param begin Index of the starting point in the dataset that belongs to * this node. * @param count Number of points in this node. * @param splitInfo An information about the split. This information contains * the vantage point and the median distance to the vantage point. */ static bool SplitNode(const BoundType& bound, MatType& data, const size_t begin, const size_t count, SplitInfo& splitInfo); /** * Perform the split process according to the information about the * split. This will order the dataset such that points that belong to the left * subtree are on the left of the split column, and points from the right * subtree are on the right side of the split column. * * @param bound The bound used for this node. * @param data The dataset used by the binary space tree. * @param begin Index of the starting point in the dataset that belongs to * this node. * @param count Number of points in this node. * @param splitInfo The information about the split. */ static size_t PerformSplit(MatType& data, const size_t begin, const size_t count, const SplitInfo& splitInfo) { return split::PerformSplit(data, begin, count, splitInfo); } /** * Perform the split process according to the information about the split and * return the list of changed indices. This will order the dataset such that * points that belong to the left subtree are on the left of the split column, * and points from the right subtree are on the right side of the split * column. * * @param bound The bound used for this node. * @param data The dataset used by the binary space tree. * @param begin Index of the starting point in the dataset that belongs to * this node. * @param count Number of points in this node. * @param splitInfo The information about the split. * @param oldFromNew Vector which will be filled with the old positions for * each new point. */ static size_t PerformSplit(MatType& data, const size_t begin, const size_t count, const SplitInfo& splitInfo, std::vector& oldFromNew) { return split::PerformSplit(data, begin, count, splitInfo, oldFromNew); } /** * Indicates that a point should be assigned to the left subtree. * This method returns true if a point should be assigned to the left subtree, * i.e., if the distance from the point to the vantage point is less then the * median value. Otherwise it returns false. * * @param point The point that is being assigned. * @param splitInfo An information about the split. */ template static bool AssignToLeftNode(const VecType& point, const SplitInfo& splitInfo) { return (splitInfo.metric->Evaluate(splitInfo.vantagePoint, point) < splitInfo.mu); } private: /** * Select the best vantage point, i.e., the point with the largest second * moment of the distance from a number of random node points to the vantage * point. Firstly this method selects no more than MaxNumSamples random * points. Then it evaluates each point, i.e., calculates the corresponding * second moment and selects the point with the largest moment. Each random * point belongs to the node. * * @param metric The metric used by the tree. * @param data The dataset used by the tree. * @param begin Index of the starting point in the dataset that belongs to * this node. * @param count Number of points in this node. * @param vantagePoint The index of the vantage point in the dataset. * @param mu The median value of distance form the vantage point to * a number of random points. */ static void SelectVantagePoint(const MetricType& metric, const MatType& data, const size_t begin, const size_t count, size_t& vantagePoint, ElemType& mu); }; } // namespace tree } // namespace mlpack // Include implementation. #include "vantage_point_split_impl.hpp" #endif // MLPACK_CORE_TREE_BINARY_SPACE_TREE_VANTAGE_POINT_SPLIT_HPP mlpack-2.2.5/src/mlpack/core/tree/binary_space_tree/vantage_point_split_impl.hpp000066400000000000000000000056631315013601400302010ustar00rootroot00000000000000/** * @file vantage_point_split_impl.hpp * @author Mikhail Lozhnikov * * Implementation of class (VantagePointSplit) to split a vantage point * tree according to the median value of the distance to a certain vantage point. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_TREE_BINARY_SPACE_TREE_VANTAGE_POINT_SPLIT_IMPL_HPP #define MLPACK_CORE_TREE_BINARY_SPACE_TREE_VANTAGE_POINT_SPLIT_IMPL_HPP #include "vantage_point_split.hpp" #include namespace mlpack { namespace tree { template bool VantagePointSplit:: SplitNode(const BoundType& bound, MatType& data, const size_t begin, const size_t count, SplitInfo& splitInfo) { ElemType mu = 0; size_t vantagePointIndex; // Find the best vantage point. SelectVantagePoint(bound.Metric(), data, begin, count, vantagePointIndex, mu); // If all points are equal, we can't split. if (mu == 0) return false; splitInfo = SplitInfo(bound.Metric(), data.col(vantagePointIndex), mu); return true; } template void VantagePointSplit:: SelectVantagePoint(const MetricType& metric, const MatType& data, const size_t begin, const size_t count, size_t& vantagePoint, ElemType& mu) { arma::uvec vantagePointCandidates; arma::Col distances(MaxNumSamples); // Get no more than max(MaxNumSamples, count) vantage point candidates math::ObtainDistinctSamples(begin, begin + count, MaxNumSamples, vantagePointCandidates); ElemType bestSpread = 0; arma::uvec samples; // Evaluate each candidate for (size_t i = 0; i < vantagePointCandidates.n_elem; i++) { // Get no more than min(MaxNumSamples, count) random samples math::ObtainDistinctSamples(begin, begin + count, MaxNumSamples, samples); // Calculate the second moment of the distance to the vantage point // candidate using these random samples. distances.set_size(samples.n_elem); for (size_t j = 0; j < samples.n_elem; j++) distances[j] = metric.Evaluate(data.col(vantagePointCandidates[i]), data.col(samples[j])); const ElemType spread = arma::sum(distances % distances) / samples.n_elem; if (spread > bestSpread) { bestSpread = spread; vantagePoint = vantagePointCandidates[i]; // Calculate the median value of the distance from the vantage point // candidate to these samples. mu = arma::median(distances); } } assert(bestSpread > 0); } } // namespace tree } // namespace mlpack #endif // MLPACK_CORE_TREE_BINARY_SPACE_TREE_VANTAGE_POINT_SPLIT_IMPL_HPP mlpack-2.2.5/src/mlpack/core/tree/bound_traits.hpp000066400000000000000000000021501315013601400221120ustar00rootroot00000000000000/** * @file bound_traits.hpp * @author Ryan Curtin * * A class for template metaprogramming traits for bounds. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_TREE_BOUND_TRAITS_HPP #define MLPACK_CORE_TREE_BOUND_TRAITS_HPP namespace mlpack { namespace bound { /** * A class to obtain compile-time traits about BoundType classes. If you are * writing your own BoundType class, you should make a template specialization * in order to set the values correctly. * * @see TreeTraits, KernelTraits */ template struct BoundTraits { //! If true, then the bounds for each dimension are tight. If false, then the //! bounds for each dimension may be looser than the range of all points held //! in the bound. This defaults to false. static const bool HasTightBounds = false; }; } // namespace bound } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/core/tree/bounds.hpp000066400000000000000000000013131315013601400207070ustar00rootroot00000000000000/** * @file bounds.hpp * * Bounds that are useful for binary space partitioning trees. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_TREE_BOUNDS_HPP #define MLPACK_CORE_TREE_BOUNDS_HPP #include #include #include "bound_traits.hpp" #include "hrectbound.hpp" #include "ballbound.hpp" #include "hollow_ball_bound.hpp" #include "cellbound.hpp" #endif // MLPACK_CORE_TREE_BOUNDS_HPP mlpack-2.2.5/src/mlpack/core/tree/cellbound.hpp000066400000000000000000000237551315013601400214020ustar00rootroot00000000000000/** * @file cellbound.hpp * @author Mikhail Lozhnikov * * Definition of the CellBound class. The class describes a bound that consists * of a number of hyperrectangles. These hyperrectangles do not overlap each * other. The bound is limited by an outer hyperrectangle and two addresses, * the lower address and the high address. Thus, the bound contains all points * included between the lower and the high addresses. * * The notion of addresses is described in the following paper. * @code * @inproceedings{bayer1997, * author = {Bayer, Rudolf}, * title = {The Universal B-Tree for Multidimensional Indexing: General * Concepts}, * booktitle = {Proceedings of the International Conference on Worldwide * Computing and Its Applications}, * series = {WWCA '97}, * year = {1997}, * isbn = {3-540-63343-X}, * pages = {198--209}, * numpages = {12}, * publisher = {Springer-Verlag}, * address = {London, UK, UK}, * } * @endcode * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_TREE_CELLBOUND_HPP #define MLPACK_CORE_TREE_CELLBOUND_HPP #include #include #include #include "bound_traits.hpp" #include "address.hpp" namespace mlpack { namespace bound { /** * The CellBound class describes a bound that consists of a number of * hyperrectangles. These hyperrectangles do not overlap each other. The bound * is limited by an outer hyperrectangle and two addresses, the lower address * and the high address. Thus, the bound contains all points included between * the lower and the high addresses. The class caches the minimum bounding * rectangle, the lower and the high addresses and the hyperrectangles * that are described by the addresses. * * The notion of addresses is described in the following paper. * @code * @inproceedings{bayer1997, * author = {Bayer, Rudolf}, * title = {The Universal B-Tree for Multidimensional Indexing: General * Concepts}, * booktitle = {Proceedings of the International Conference on Worldwide * Computing and Its Applications}, * series = {WWCA '97}, * year = {1997}, * isbn = {3-540-63343-X}, * pages = {198--209}, * numpages = {12}, * publisher = {Springer-Verlag}, * address = {London, UK, UK}, * } * @endcode */ template, typename ElemType = double> class CellBound { public: //! Depending on the precision of the tree element type, we may need to use //! uint32_t or uint64_t. typedef typename std::conditional::type AddressElemType; /** * Empty constructor; creates a bound of dimensionality 0. */ CellBound(); /** * Initializes to specified dimensionality with each dimension the empty * set. */ CellBound(const size_t dimension); //! Copy constructor; necessary to prevent memory leaks. CellBound(const CellBound& other); //! Same as copy constructor; necessary to prevent memory leaks. CellBound& operator=(const CellBound& other); //! Move constructor: take possession of another bound's information. CellBound(CellBound&& other); //! Destructor: clean up memory. ~CellBound(); /** * Resets all dimensions to the empty set (so that this bound contains * nothing). */ void Clear(); //! Gets the dimensionality. size_t Dim() const { return dim; } //! Get the range for a particular dimension. No bounds checking. Be //! careful: this may make MinWidth() invalid. math::RangeType& operator[](const size_t i) { return bounds[i]; } //! Modify the range for a particular dimension. No bounds checking. const math::RangeType& operator[](const size_t i) const { return bounds[i]; } //! Get lower address. arma::Col& LoAddress() { return loAddress; } //! Modify lower address. const arma::Col& LoAddress() const {return loAddress; } //! Get high address. arma::Col& HiAddress() { return hiAddress; } //! Modify high address. const arma::Col& HiAddress() const {return hiAddress; } //! Get lower bound of each subrectangle. const arma::Mat& LoBound() const { return loBound; } //! Get high bound of each subrectangle. const arma::Mat& HiBound() const { return hiBound; } //! Get the number of subrectangles. size_t NumBounds() const { return numBounds; } //! Get the minimum width of the bound. ElemType MinWidth() const { return minWidth; } //! Modify the minimum width of the bound. ElemType& MinWidth() { return minWidth; } /** * Calculates the center of the range, placing it into the given vector. * * @param center Vector which the center will be written to. */ void Center(arma::Col& center) const; /** * Calculates minimum bound-to-point distance. * * @param point Point to which the minimum distance is requested. */ template ElemType MinDistance(const VecType& point, typename boost::enable_if>* = 0) const; /** * Calculates minimum bound-to-bound distance. * * @param other Bound to which the minimum distance is requested. */ ElemType MinDistance(const CellBound& other) const; /** * Calculates maximum bound-to-point squared distance. * * @param point Point to which the maximum distance is requested. */ template ElemType MaxDistance(const VecType& point, typename boost::enable_if>* = 0) const; /** * Computes maximum distance. * * @param other Bound to which the maximum distance is requested. */ ElemType MaxDistance(const CellBound& other) const; /** * Calculates minimum and maximum bound-to-bound distance. * * @param other Bound to which the minimum and maximum distances are * requested. */ math::RangeType RangeDistance(const CellBound& other) const; /** * Calculates minimum and maximum bound-to-point distance. * * @param point Point to which the minimum and maximum distances are * requested. */ template math::RangeType RangeDistance( const VecType& point, typename boost::enable_if>* = 0) const; /** * Expands this region to include new points. * * @tparam MatType Type of matrix; could be Mat, SpMat, a subview, or just a * vector. * @param data Data points to expand this region to include. */ template CellBound& operator|=(const MatType& data); /** * Expands this region to encompass another bound. */ CellBound& operator|=(const CellBound& other); /** * Determines if a point is within this bound. */ template bool Contains(const VecType& point) const; /** * Calculate the bounds of all subrectangles. You should set the lower and the * high addresses. * * @param data Points that are contained in the node. */ template void UpdateAddressBounds(const MatType& data); /** * Returns the diameter of the hyperrectangle (that is, the longest diagonal). */ ElemType Diameter() const; /** * Serialize the bound object. */ template void Serialize(Archive& ar, const unsigned int version); private: //! The precision of the tree element type. static constexpr size_t order = sizeof(AddressElemType) * CHAR_BIT; //! Maximum number of subrectangles. const size_t maxNumBounds = 10; //! The dimensionality of the bound. size_t dim; //! The bounds for each dimension. math::RangeType* bounds; //! Lower bounds of subrectangles. arma::Mat loBound; //! High bounds of subrectangles. arma::Mat hiBound; //! The numbre of subrectangles. size_t numBounds; //! The lowest address that the bound may contain. arma::Col loAddress; //! The highest address that the bound may contain. arma::Col hiAddress; //! The minimal width of the outer rectangle. ElemType minWidth; /** * Add a subrectangle to the bound. * * @param loCorner The lower corner of the subrectangle that is being added. * @param hiCorner The high corner of the subrectangle that is being added. * @param data Points that are contained in the node. */ template void AddBound(const arma::Col& loCorner, const arma::Col& hiCorner, const MatType& data); /** * Initialize all subrectangles that touches the lower address. This function * should be called before InitLowerBound(). * * @param numEqualBits The number of equal leading bits of the lower address * and the high address. * @param data Points that are contained in the node. */ template void InitHighBound(size_t numEqualBits, const MatType& data); /** * Initialize all subrectangles that touches the high address. This function * should be called after InitHighBound(). * * @param numEqualBits The number of equal leading bits of the lower address * and the high address. * @param data Points that are contained in the node. */ template void InitLowerBound(size_t numEqualBits, const MatType& data); }; // A specialization of BoundTraits for this class. template struct BoundTraits> { //! These bounds are always tight for each dimension. const static bool HasTightBounds = true; }; } // namespace bound } // namespace mlpack #include "cellbound_impl.hpp" #endif // MLPACK_CORE_TREE_CELLBOUND_HPP mlpack-2.2.5/src/mlpack/core/tree/cellbound_impl.hpp000066400000000000000000000671501315013601400224200ustar00rootroot00000000000000/** * @file cellbound_impl.hpp * @author Mikhail Lozhnikov * * Implementation of the CellBound class. The class describes a bound that * consists of a number of hyperrectangles. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_TREE_CELLBOUND_IMPL_HPP #define MLPACK_CORE_TREE_CELLBOUND_IMPL_HPP #include // In case it has not been included yet. #include "cellbound.hpp" namespace mlpack { namespace bound { /** * Empty constructor. */ template inline CellBound::CellBound() : dim(0), bounds(NULL), loBound(arma::Mat()), hiBound(arma::Mat()), numBounds(0), loAddress(arma::Col()), hiAddress(arma::Col()), minWidth(0) { /* Nothing to do. */ } /** * Initializes to specified dimensionality with each dimension the empty * set. */ template inline CellBound::CellBound(const size_t dimension) : dim(dimension), bounds(new math::RangeType[dim]), loBound(arma::Mat(dim, maxNumBounds)), hiBound(arma::Mat(dim, maxNumBounds)), numBounds(0), loAddress(dim), hiAddress(dim), minWidth(0) { for (size_t k = 0; k < dim ; k++) { loAddress[k] = std::numeric_limits::max(); hiAddress[k] = 0; } } /** * Copy constructor necessary to prevent memory leaks. */ template inline CellBound::CellBound( const CellBound& other) : dim(other.Dim()), bounds(new math::RangeType[dim]), loBound(other.loBound), hiBound(other.hiBound), numBounds(other.numBounds), loAddress(other.loAddress), hiAddress(other.hiAddress), minWidth(other.MinWidth()) { // Copy other bounds over. for (size_t i = 0; i < dim; i++) bounds[i] = other.bounds[i]; } /** * Same as the copy constructor. */ template inline CellBound& CellBound::operator=( const CellBound& other) { if (dim != other.Dim()) { // Reallocation is necessary. dim = other.Dim(); bounds = new math::RangeType[dim]; } loBound = other.loBound; hiBound = other.hiBound; numBounds = other.numBounds; loAddress = other.loAddress; hiAddress = other.hiAddress; // Now copy each of the bound values. for (size_t i = 0; i < dim; i++) bounds[i] = other.bounds[i]; minWidth = other.MinWidth(); return *this; } /** * Move constructor: take possession of another bound's information. */ template inline CellBound::CellBound( CellBound&& other) : dim(other.dim), bounds(other.bounds), loBound(std::move(other.loBound)), hiBound(std::move(other.hiBound)), numBounds(std::move(other.numBounds)), loAddress(std::move(other.loAddress)), hiAddress(std::move(other.hiAddress)), minWidth(other.minWidth) { // Fix the other bound. other.dim = 0; other.bounds = NULL; other.minWidth = 0.0; } /** * Destructor: clean up memory. */ template inline CellBound::~CellBound() { if (bounds) delete[] bounds; } /** * Resets all dimensions to the empty set. */ template inline void CellBound::Clear() { for (size_t k = 0; k < dim; k++) { bounds[k] = math::RangeType(); loAddress[k] = std::numeric_limits::max(); hiAddress[k] = 0; } minWidth = 0; } /*** * Calculates the centroid of the range, placing it into the given vector. * * @param centroid Vector which the centroid will be written to. */ template inline void CellBound::Center( arma::Col& center) const { // Set size correctly if necessary. if (!(center.n_elem == dim)) center.set_size(dim); for (size_t i = 0; i < dim; i++) center(i) = bounds[i].Mid(); } template template void CellBound::AddBound( const arma::Col& loCorner, const arma::Col& hiCorner, const MatType& data) { assert(numBounds < loBound.n_cols); assert(loBound.n_rows == dim); assert(loCorner.n_elem == dim); assert(hiCorner.n_elem == dim); for (size_t k = 0; k < dim; k++) { loBound(k, numBounds) = std::numeric_limits::max(); hiBound(k, numBounds) = std::numeric_limits::lowest(); } for (size_t i = 0; i < data.n_cols; i++) { size_t k = 0; // Check if the point is contained in the hyperrectangle. for (k = 0; k < dim; k++) if (data(k, i) < loCorner[k] || data(k, i) > hiCorner[k]) break; if (k < dim) continue; // The point is not contained in the hyperrectangle. // Shrink the bound. for (k = 0; k < dim; k++) { loBound(k, numBounds) = std::min(loBound(k, numBounds), data(k, i)); hiBound(k, numBounds) = std::max(hiBound(k, numBounds), data(k, i)); } } for (size_t k = 0; k < dim; k++) if (loBound(k, numBounds) > hiBound(k, numBounds)) return; // The hyperrectangle does not contain points. numBounds++; } template template void CellBound::InitHighBound(size_t numEqualBits, const MatType& data) { arma::Col tmpHiAddress(hiAddress); arma::Col tmpLoAddress(hiAddress); arma::Col loCorner(tmpHiAddress.n_elem); arma::Col hiCorner(tmpHiAddress.n_elem); assert(tmpHiAddress.n_elem > 0); // We have to calculate the number of subrectangles since the maximum number // of hyperrectangles is restricted. size_t numCorners = 0; for (size_t pos = numEqualBits + 1; pos < order * tmpHiAddress.n_elem; pos++) { size_t row = pos / order; size_t bit = order - 1 - pos % order; // This hyperrectangle is not contained entirely in the bound. // So, the number of hyperrectangles should be increased. if (tmpHiAddress[row] & ((AddressElemType) 1 << bit)) numCorners++; // We ran out of the limit of hyperrectangles. In that case we enlare // the last hyperrectangle. if (numCorners >= maxNumBounds / 2) tmpHiAddress[row] |= ((AddressElemType) 1 << bit); } size_t pos = order * tmpHiAddress.n_elem - 1; // Find the last hyperrectangle and add it to the bound. for ( ; pos > numEqualBits; pos--) { size_t row = pos / order; size_t bit = order - 1 - pos % order; // All last bits after pos of tmpHiAddress are equal to 1 and // All last bits of tmpLoAddress (after pos) are equal to 0. // Thus, tmpHiAddress corresponds to the high corner of the enlarged // rectangle and tmpLoAddress corresponds to the lower corner. if (!(tmpHiAddress[row] & ((AddressElemType) 1 << bit))) { addr::AddressToPoint(loCorner, tmpLoAddress); addr::AddressToPoint(hiCorner, tmpHiAddress); AddBound(loCorner, hiCorner, data); break; } // Nullify the bit that corresponds to this step. tmpLoAddress[row] &= ~((AddressElemType) 1 << bit); } // Add the enlarged rectangle if we have not done that. if (pos == numEqualBits) { addr::AddressToPoint(loCorner, tmpLoAddress); addr::AddressToPoint(hiCorner, tmpHiAddress); AddBound(loCorner, hiCorner, data); } for ( ; pos > numEqualBits; pos--) { size_t row = pos / order; size_t bit = order - 1 - pos % order; // The lower bound should correspond to this step. tmpLoAddress[row] &= ~((AddressElemType) 1 << bit); if (tmpHiAddress[row] & ((AddressElemType) 1 << bit)) { // This hyperrectangle is contained entirely in the bound and do not // overlap with other hyperrectangles since loAddress is less than // tmpLoAddress and tmpHiAddress is less that the lower addresses // of hyperrectangles that we have added previously. tmpHiAddress[row] ^= (AddressElemType) 1 << bit; addr::AddressToPoint(loCorner, tmpLoAddress); addr::AddressToPoint(hiCorner, tmpHiAddress); AddBound(loCorner, hiCorner, data); } // The high bound should correspond to this step. tmpHiAddress[row] |= ((AddressElemType) 1 << bit); } } template template void CellBound::InitLowerBound(size_t numEqualBits, const MatType& data) { arma::Col tmpHiAddress(loAddress); arma::Col tmpLoAddress(loAddress); arma::Col loCorner(tmpHiAddress.n_elem); arma::Col hiCorner(tmpHiAddress.n_elem); // We have to calculate the number of subrectangles since the maximum number // of hyperrectangles is restricted. size_t numCorners = 0; for (size_t pos = numEqualBits + 1; pos < order * tmpHiAddress.n_elem; pos++) { size_t row = pos / order; size_t bit = order - 1 - pos % order; // This hyperrectangle is not contained entirely in the bound. // So, the number of hyperrectangles should be increased. if (!(tmpLoAddress[row] & ((AddressElemType) 1 << bit))) numCorners++; // We ran out of the limit of hyperrectangles. In that case we enlare // the last hyperrectangle. if (numCorners >= maxNumBounds - numBounds) tmpLoAddress[row] &= ~((AddressElemType) 1 << bit); } size_t pos = order * tmpHiAddress.n_elem - 1; // Find the last hyperrectangle and add it to the bound. for ( ; pos > numEqualBits; pos--) { size_t row = pos / order; size_t bit = order - 1 - pos % order; // All last bits after pos of tmpHiAddress are equal to 1 and // All last bits of tmpLoAddress (after pos) are equal to 0. // Thus, tmpHiAddress corresponds to the high corner of the enlarged // rectangle and tmpLoAddress corresponds to the lower corner. if (tmpLoAddress[row] & ((AddressElemType) 1 << bit)) { addr::AddressToPoint(loCorner, tmpLoAddress); addr::AddressToPoint(hiCorner, tmpHiAddress); AddBound(loCorner, hiCorner, data); break; } // Enlarge the hyperrectangle at this step since it is contained // entirely in the bound. tmpHiAddress[row] |= ((AddressElemType) 1 << bit); } // Add the enlarged rectangle if we have not done that. if (pos == numEqualBits) { addr::AddressToPoint(loCorner, tmpLoAddress); addr::AddressToPoint(hiCorner, tmpHiAddress); AddBound(loCorner, hiCorner, data); } for ( ; pos > numEqualBits; pos--) { size_t row = pos / order; size_t bit = order - 1 - pos % order; // The high bound should correspond to this step. tmpHiAddress[row] |= ((AddressElemType) 1 << bit); if (!(tmpLoAddress[row] & ((AddressElemType) 1 << bit))) { // This hyperrectangle is contained entirely in the bound and do not // overlap with other hyperrectangles since hiAddress is greater than // tmpHiAddress and tmpLoAddress is greater that the high addresses // of hyperrectangles that we have added previously. tmpLoAddress[row] ^= (AddressElemType) 1 << bit; addr::AddressToPoint(loCorner, tmpLoAddress); addr::AddressToPoint(hiCorner, tmpHiAddress); AddBound(loCorner, hiCorner, data); } // The lower bound should correspond to this step. tmpLoAddress[row] &= ~((AddressElemType) 1 << bit); } } template template void CellBound::UpdateAddressBounds(const MatType& data) { numBounds = 0; // Calculate the number of equal leading bits of the lower address and // the high address. size_t row = 0; for ( ; row < hiAddress.n_elem; row++) if (loAddress[row] != hiAddress[row]) break; // If the high address is equal to the lower address. if (row == hiAddress.n_elem) { for (size_t i = 0; i < dim; i++) { loBound(i, 0) = bounds[i].Lo(); hiBound(i, 0) = bounds[i].Hi(); } numBounds = 1; return; } size_t bit = 0; for ( ; bit < order; bit++) if ((loAddress[row] & ((AddressElemType) 1 << (order - 1 - bit))) != (hiAddress[row] & ((AddressElemType) 1 << (order - 1 - bit)))) break; if ((row == hiAddress.n_elem - 1) && (bit == order - 1)) { // If the addresses differ in the last bit. for (size_t i = 0; i < dim; i++) { loBound(i, 0) = bounds[i].Lo(); hiBound(i, 0) = bounds[i].Hi(); } numBounds = 1; return; } size_t numEqualBits = row * order + bit; InitHighBound(numEqualBits, data); InitLowerBound(numEqualBits, data); assert(numBounds <= maxNumBounds); if (numBounds == 0) { // I think this should never happen. for (size_t i = 0; i < dim; i++) { loBound(i, 0) = bounds[i].Lo(); hiBound(i, 0) = bounds[i].Hi(); } numBounds = 1; } } /** * Calculates minimum bound-to-point squared distance. */ template template inline ElemType CellBound::MinDistance( const VecType& point, typename boost::enable_if>* /* junk */) const { Log::Assert(point.n_elem == dim); ElemType minSum = std::numeric_limits::max(); ElemType lower, higher; for (size_t i = 0; i < numBounds; i++) { ElemType sum = 0; for (size_t d = 0; d < dim; d++) { lower = loBound(d, i) - point[d]; higher = point[d] - hiBound(d, i); // Since only one of 'lower' or 'higher' is negative, if we add each's // absolute value to itself and then sum those two, our result is the // nonnegative half of the equation times two; then we raise to power Power. if (MetricType::Power == 1) sum += lower + std::fabs(lower) + higher + std::fabs(higher); else if (MetricType::Power == 2) { ElemType dist = lower + std::fabs(lower) + higher + std::fabs(higher); sum += dist * dist; } else { sum += pow((lower + fabs(lower)) + (higher + fabs(higher)), (ElemType) MetricType::Power); } if (sum >= minSum) break; } if (sum < minSum) minSum = sum; } // Now take the Power'th root (but make sure our result is squared if it needs // to be); then cancel out the constant of 2 (which may have been squared now) // that was introduced earlier. The compiler should optimize out the if // statement entirely. if (MetricType::Power == 1) return minSum * 0.5; else if (MetricType::Power == 2) { if (MetricType::TakeRoot) return (ElemType) std::sqrt(minSum) * 0.5; else return minSum * 0.25; } else { if (MetricType::TakeRoot) return (ElemType) pow((double) minSum, 1.0 / (double) MetricType::Power) / 2.0; else return minSum / pow(2.0, MetricType::Power); } } /** * Calculates minimum bound-to-bound squared distance. */ template ElemType CellBound::MinDistance(const CellBound& other) const { Log::Assert(dim == other.dim); ElemType minSum = std::numeric_limits::max(); ElemType lower, higher; for (size_t i = 0; i < numBounds; i++) for (size_t j = 0; j < other.numBounds; j++) { ElemType sum = 0; for (size_t d = 0; d < dim; d++) { lower = other.loBound(d, j) - hiBound(d, i); higher = loBound(d, i) - other.hiBound(d, j); // We invoke the following: // x + fabs(x) = max(x * 2, 0) // (x * 2)^2 / 4 = x^2 // The compiler should optimize out this if statement entirely. if (MetricType::Power == 1) sum += (lower + std::fabs(lower)) + (higher + std::fabs(higher)); else if (MetricType::Power == 2) { ElemType dist = lower + std::fabs(lower) + higher + std::fabs(higher); sum += dist * dist; } else { sum += pow((lower + fabs(lower)) + (higher + fabs(higher)), (ElemType) MetricType::Power); } if (sum >= minSum) break; } if (sum < minSum) minSum = sum; } // The compiler should optimize out this if statement entirely. if (MetricType::Power == 1) return minSum * 0.5; else if (MetricType::Power == 2) { if (MetricType::TakeRoot) return (ElemType) std::sqrt(minSum) * 0.5; else return minSum * 0.25; } else { if (MetricType::TakeRoot) return (ElemType) pow((double) minSum, 1.0 / (double) MetricType::Power) / 2.0; else return minSum / pow(2.0, MetricType::Power); } } /** * Calculates maximum bound-to-point squared distance. */ template template inline ElemType CellBound::MaxDistance( const VecType& point, typename boost::enable_if >* /* junk */) const { ElemType maxSum = std::numeric_limits::lowest(); Log::Assert(point.n_elem == dim); for (size_t i = 0; i < numBounds; i++) { ElemType sum = 0; for (size_t d = 0; d < dim; d++) { ElemType v = std::max(fabs(point[d] - loBound(d, i)), fabs(hiBound(d, i) - point[d])); if (MetricType::Power == 1) sum += v; // v is non-negative. else if (MetricType::Power == 2) sum += v * v; else sum += std::pow(v, (ElemType) MetricType::Power); } if (sum > maxSum) maxSum = sum; } // The compiler should optimize out this if statement entirely. if (MetricType::TakeRoot) { if (MetricType::Power == 1) return maxSum; else if (MetricType::Power == 2) return (ElemType) std::sqrt(maxSum); else return (ElemType) pow((double) maxSum, 1.0 / (double) MetricType::Power); } else return maxSum; } /** * Computes maximum distance. */ template inline ElemType CellBound::MaxDistance( const CellBound& other) const { ElemType maxSum = std::numeric_limits::lowest(); Log::Assert(dim == other.dim); ElemType v; for (size_t i = 0; i < numBounds; i++) for (size_t j = 0; j < other.numBounds; j++) { ElemType sum = 0; for (size_t d = 0; d < dim; d++) { v = std::max(fabs(other.hiBound(d, j) - loBound(d, i)), fabs(hiBound(d, i) - other.loBound(d, j))); // The compiler should optimize out this if statement entirely. if (MetricType::Power == 1) sum += v; // v is non-negative. else if (MetricType::Power == 2) sum += v * v; else sum += std::pow(v, (ElemType) MetricType::Power); } if (sum > maxSum) maxSum = sum; } // The compiler should optimize out this if statement entirely. if (MetricType::TakeRoot) { if (MetricType::Power == 1) return maxSum; else if (MetricType::Power == 2) return (ElemType) std::sqrt(maxSum); else return (ElemType) pow((double) maxSum, 1.0 / (double) MetricType::Power); } else return maxSum; } /** * Calculates minimum and maximum bound-to-bound squared distance. */ template inline math::RangeType CellBound::RangeDistance( const CellBound& other) const { ElemType minLoSum = std::numeric_limits::max(); ElemType maxHiSum = std::numeric_limits::lowest(); Log::Assert(dim == other.dim); ElemType v1, v2, vLo, vHi; for (size_t i = 0; i < numBounds; i++) for (size_t j = 0; j < other.numBounds; j++) { ElemType loSum = 0; ElemType hiSum = 0; for (size_t d = 0; d < dim; d++) { v1 = other.loBound(d, j) - hiBound(d, i); v2 = loBound(d, i) - other.hiBound(d, j); // One of v1 or v2 is negative. if (v1 >= v2) { vHi = -v2; // Make it nonnegative. vLo = (v1 > 0) ? v1 : 0; // Force to be 0 if negative. } else { vHi = -v1; // Make it nonnegative. vLo = (v2 > 0) ? v2 : 0; // Force to be 0 if negative. } // The compiler should optimize out this if statement entirely. if (MetricType::Power == 1) { loSum += vLo; // vLo is non-negative. hiSum += vHi; // vHi is non-negative. } else if (MetricType::Power == 2) { loSum += vLo * vLo; hiSum += vHi * vHi; } else { loSum += std::pow(vLo, (ElemType) MetricType::Power); hiSum += std::pow(vHi, (ElemType) MetricType::Power); } } if (loSum < minLoSum) minLoSum = loSum; if (hiSum > maxHiSum) maxHiSum = hiSum; } if (MetricType::TakeRoot) { if (MetricType::Power == 1) return math::RangeType(minLoSum, maxHiSum); else if (MetricType::Power == 2) return math::RangeType((ElemType) std::sqrt(minLoSum), (ElemType) std::sqrt(maxHiSum)); else { return math::RangeType( (ElemType) pow((double) minLoSum, 1.0 / (double) MetricType::Power), (ElemType) pow((double) maxHiSum, 1.0 / (double) MetricType::Power)); } } else return math::RangeType(minLoSum, maxHiSum); } /** * Calculates minimum and maximum bound-to-point squared distance. */ template template inline math::RangeType CellBound::RangeDistance( const VecType& point, typename boost::enable_if>* /* junk */) const { ElemType minLoSum = std::numeric_limits::max(); ElemType maxHiSum = std::numeric_limits::lowest(); Log::Assert(point.n_elem == dim); ElemType v1, v2, vLo, vHi; for (size_t i = 0; i < numBounds; i++) { ElemType loSum = 0; ElemType hiSum = 0; for (size_t d = 0; d < dim; d++) { v1 = loBound(d, i) - point[d]; // Negative if point[d] > lo. v2 = point[d] - hiBound(d, i); // Negative if point[d] < hi. // One of v1 or v2 (or both) is negative. if (v1 >= 0) // point[d] <= bounds_[d].Lo(). { vHi = -v2; // v2 will be larger but must be negated. vLo = v1; } else // point[d] is between lo and hi, or greater than hi. { if (v2 >= 0) { vHi = -v1; // v1 will be larger, but must be negated. vLo = v2; } else { vHi = -std::min(v1, v2); // Both are negative, but we need the larger. vLo = 0; } } // The compiler should optimize out this if statement entirely. if (MetricType::Power == 1) { loSum += vLo; // vLo is non-negative. hiSum += vHi; // vHi is non-negative. } else if (MetricType::Power == 2) { loSum += vLo * vLo; hiSum += vHi * vHi; } else { loSum += std::pow(vLo, (ElemType) MetricType::Power); hiSum += std::pow(vHi, (ElemType) MetricType::Power); } } if (loSum < minLoSum) minLoSum = loSum; if (hiSum > maxHiSum) maxHiSum = hiSum; } if (MetricType::TakeRoot) { if (MetricType::Power == 1) return math::RangeType(minLoSum, maxHiSum); else if (MetricType::Power == 2) return math::RangeType((ElemType) std::sqrt(minLoSum), (ElemType) std::sqrt(maxHiSum)); else { return math::RangeType( (ElemType) pow((double) minLoSum, 1.0 / (double) MetricType::Power), (ElemType) pow((double) maxHiSum, 1.0 / (double) MetricType::Power)); } } else return math::RangeType(minLoSum, maxHiSum); } /** * Expands this region to include a new point. */ template template inline CellBound& CellBound::operator|=( const MatType& data) { Log::Assert(data.n_rows == dim); arma::Col mins(arma::min(data, 1)); arma::Col maxs(arma::max(data, 1)); minWidth = std::numeric_limits::max(); for (size_t i = 0; i < dim; i++) { bounds[i] |= math::RangeType(mins[i], maxs[i]); const ElemType width = bounds[i].Width(); if (width < minWidth) minWidth = width; loBound(i, 0) = bounds[i].Lo(); hiBound(i, 0) = bounds[i].Hi(); } numBounds = 1; return *this; } /** * Expands this region to encompass another bound. */ template inline CellBound& CellBound::operator|=( const CellBound& other) { assert(other.dim == dim); minWidth = std::numeric_limits::max(); for (size_t i = 0; i < dim; i++) { bounds[i] |= other.bounds[i]; const ElemType width = bounds[i].Width(); if (width < minWidth) minWidth = width; } if (addr::CompareAddresses(other.loAddress, loAddress) < 0) loAddress = other.loAddress; if (addr::CompareAddresses(other.hiAddress, hiAddress) > 0) hiAddress = other.hiAddress; if (loAddress[0] > hiAddress[0]) { for (size_t i = 0; i < dim; i++) { loBound(i, 0) = bounds[i].Lo(); hiBound(i, 0) = bounds[i].Hi(); } numBounds = 1; } return *this; } /** * Determines if a point is within this bound. */ template template inline bool CellBound::Contains(const VecType& point) const { for (size_t i = 0; i < point.n_elem; i++) { if (!bounds[i].Contains(point(i))) return false; } if (loAddress[0] > hiAddress[0]) return true; arma::Col address(dim); addr::PointToAddress(address, point); return addr::Contains(address, loAddress, hiAddress); } /** * Returns the diameter of the hyperrectangle (that is, the longest diagonal). */ template inline ElemType CellBound::Diameter() const { ElemType d = 0; for (size_t i = 0; i < dim; ++i) d += std::pow(bounds[i].Hi() - bounds[i].Lo(), (ElemType) MetricType::Power); if (MetricType::TakeRoot) return (ElemType) std::pow((double) d, 1.0 / (double) MetricType::Power); else return d; } //! Serialize the bound object. template template void CellBound::Serialize(Archive& ar, const unsigned int /* version */) { ar & data::CreateNVP(dim, "dim"); // Allocate memory for the bounds, if necessary. if (Archive::is_loading::value) { if (bounds) delete[] bounds; bounds = new math::RangeType[dim]; } ar & data::CreateArrayNVP(bounds, dim, "bounds"); ar & data::CreateNVP(minWidth, "minWidth"); ar & data::CreateNVP(loBound, "loBound"); ar & data::CreateNVP(hiBound, "hiBound"); ar & data::CreateNVP(numBounds, "numBounds"); ar & data::CreateNVP(loAddress, "loAddress"); ar & data::CreateNVP(hiAddress, "hiAddress"); } } // namespace bound } // namespace mlpack #endif // MLPACK_CORE_TREE_HRECTBOUND_IMPL_HPP mlpack-2.2.5/src/mlpack/core/tree/cosine_tree/000077500000000000000000000000001315013601400212055ustar00rootroot00000000000000mlpack-2.2.5/src/mlpack/core/tree/cosine_tree/cosine_tree.cpp000066400000000000000000000333071315013601400242160ustar00rootroot00000000000000/** * @file cosine_tree_impl.hpp * @author Siddharth Agrawal * * Implementation of cosine tree. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include "cosine_tree.hpp" #include #include namespace mlpack { namespace tree { CosineTree::CosineTree(const arma::mat& dataset) : dataset(dataset), parent(NULL), left(NULL), right(NULL), numColumns(dataset.n_cols) { // Initialize sizes of column indices and l2 norms. indices.resize(numColumns); l2NormsSquared.zeros(numColumns); // Set indices and calculate squared norms of the columns. for (size_t i = 0; i < numColumns; i++) { indices[i] = i; double l2Norm = arma::norm(dataset.col(i), 2); l2NormsSquared(i) = l2Norm * l2Norm; } // Frobenius norm of columns in the node. frobNormSquared = arma::accu(l2NormsSquared); // Calculate centroid of columns in the node. CalculateCentroid(); splitPointIndex = ColumnSampleLS(); } CosineTree::CosineTree(CosineTree& parentNode, const std::vector& subIndices) : dataset(parentNode.GetDataset()), parent(&parentNode), left(NULL), right(NULL), numColumns(subIndices.size()) { // Initialize sizes of column indices and l2 norms. indices.resize(numColumns); l2NormsSquared.zeros(numColumns); // Set indices and squared norms of the columns. for (size_t i = 0; i < numColumns; i++) { indices[i] = parentNode.indices[subIndices[i]]; l2NormsSquared(i) = parentNode.l2NormsSquared(subIndices[i]); } // Frobenius norm of columns in the node. frobNormSquared = arma::accu(l2NormsSquared); // Calculate centroid of columns in the node. CalculateCentroid(); splitPointIndex = ColumnSampleLS(); } CosineTree::CosineTree(const arma::mat& dataset, const double epsilon, const double delta) : dataset(dataset), delta(delta), left(NULL), right(NULL) { // Declare the cosine tree priority queue. CosineNodeQueue treeQueue; // Define root node of the tree and add it to the queue. CosineTree root(dataset); arma::vec tempVector = arma::zeros(dataset.n_rows); root.L2Error(-1.0); // We don't know what the error is. root.BasisVector(tempVector); treeQueue.push(&root); // Initialize Monte Carlo error estimate for comparison. double monteCarloError = root.FrobNormSquared(); while (monteCarloError > epsilon * root.FrobNormSquared()) { // Pop node from queue with highest projection error. CosineTree* currentNode; currentNode = treeQueue.top(); treeQueue.pop(); // If the priority is 0, we can't improve anything, and we can assume that // we've done the best we can. if (currentNode->L2Error() == 0.0) { Log::Warn << "CosineTree::CosineTree(): could not build tree to " << "desired relative error " << epsilon << "; failing with estimated " << "relative error " << (monteCarloError / root.FrobNormSquared()) << "." << std::endl; break; } // Split the node into left and right children. We assume that this cannot // fail; it might fail if L2Error() is 0, but we have already avoided that // case. currentNode->CosineNodeSplit(); // Obtain pointers to the left and right children of the current node. CosineTree *currentLeft, *currentRight; currentLeft = currentNode->Left(); currentRight = currentNode->Right(); // Calculate basis vectors of left and right children. arma::vec lBasisVector, rBasisVector; ModifiedGramSchmidt(treeQueue, currentLeft->Centroid(), lBasisVector); ModifiedGramSchmidt(treeQueue, currentRight->Centroid(), rBasisVector, &lBasisVector); // Add basis vectors to their respective nodes. currentLeft->BasisVector(lBasisVector); currentRight->BasisVector(rBasisVector); // Calculate Monte Carlo error estimates for child nodes. MonteCarloError(currentLeft, treeQueue, &lBasisVector, &rBasisVector); MonteCarloError(currentRight, treeQueue, &lBasisVector, &rBasisVector); // Push child nodes into the priority queue. treeQueue.push(currentLeft); treeQueue.push(currentRight); // Calculate Monte Carlo error estimate for the root node. monteCarloError = MonteCarloError(&root, treeQueue); } // Construct the subspace basis from the current priority queue. ConstructBasis(treeQueue); } CosineTree::~CosineTree() { if (left) delete left; if (right) delete right; } void CosineTree::ModifiedGramSchmidt(CosineNodeQueue& treeQueue, arma::vec& centroid, arma::vec& newBasisVector, arma::vec* addBasisVector) { // Set new basis vector to centroid. newBasisVector = centroid; // Variables for iterating throught the priority queue. CosineTree *currentNode; CosineNodeQueue::const_iterator i = treeQueue.begin(); // For every vector in the current basis, remove its projection from the // centroid. for ( ; i != treeQueue.end(); i++) { currentNode = *i; double projection = arma::dot(currentNode->BasisVector(), centroid); newBasisVector -= projection * currentNode->BasisVector(); } // If additional basis vector is passed, take it into account. if (addBasisVector) { double projection = arma::dot(*addBasisVector, centroid); newBasisVector -= *addBasisVector * projection; } // Normalize the modified centroid vector. if (arma::norm(newBasisVector, 2)) newBasisVector /= arma::norm(newBasisVector, 2); } double CosineTree::MonteCarloError(CosineTree* node, CosineNodeQueue& treeQueue, arma::vec* addBasisVector1, arma::vec* addBasisVector2) { std::vector sampledIndices; arma::vec probabilities; // Sample O(log m) points from the input node's distribution. // 'm' is the number of columns present in the node. size_t numSamples = log(node->NumColumns()) + 1; node->ColumnSamplesLS(sampledIndices, probabilities, numSamples); // Get pointer to the original dataset. arma::mat dataset = node->GetDataset(); // Initialize weighted projection magnitudes as zeros. arma::vec weightedMagnitudes; weightedMagnitudes.zeros(numSamples); // Set size of projection vector, depending on whether additional basis // vectors are passed. size_t projectionSize; if (addBasisVector1 && addBasisVector2) projectionSize = treeQueue.size() + 2; else projectionSize = treeQueue.size(); // For each sample, calculate the weighted projection onto the current basis. for (size_t i = 0; i < numSamples; i++) { // Initialize projection as a vector of zeros. arma::vec projection; projection.zeros(projectionSize); CosineTree *currentNode; CosineNodeQueue::const_iterator j = treeQueue.begin(); size_t k = 0; // Compute the projection of the sampled vector onto the existing subspace. for ( ; j != treeQueue.end(); j++, k++) { currentNode = *j; projection(k) = arma::dot(dataset.col(sampledIndices[i]), currentNode->BasisVector()); } // If two additional vectors are passed, take their projections. if (addBasisVector1 && addBasisVector2) { projection(k++) = arma::dot(dataset.col(sampledIndices[i]), *addBasisVector1); projection(k) = arma::dot(dataset.col(sampledIndices[i]), *addBasisVector2); } // Calculate the Frobenius norm squared of the projected vector. double frobProjection = arma::norm(projection, "frob"); double frobProjectionSquared = frobProjection * frobProjection; // Calculate the weighted projection magnitude. weightedMagnitudes(i) = frobProjectionSquared / probabilities(i); } // Compute mean and standard deviation of the weighted samples. double mu = arma::mean(weightedMagnitudes); double sigma = arma::stddev(weightedMagnitudes); if (!sigma) { node->L2Error(node->FrobNormSquared() - mu); return (node->FrobNormSquared() - mu); } // Fit a normal distribution using the calculated statistics, and calculate a // lower bound on the magnitudes for the passed 'delta' parameter. boost::math::normal dist(mu, sigma); double lowerBound = boost::math::quantile(dist, delta); // Upper bound on the subspace reconstruction error. node->L2Error(node->FrobNormSquared() - lowerBound); return (node->FrobNormSquared() - lowerBound); } void CosineTree::ConstructBasis(CosineNodeQueue& treeQueue) { // Initialize basis as matrix of zeros. basis.zeros(dataset.n_rows, treeQueue.size()); // Variables for iterating through the priority queue. CosineTree *currentNode; CosineNodeQueue::const_iterator i = treeQueue.begin(); // Transfer basis vectors from the queue to the basis matrix. size_t j = 0; for ( ; i != treeQueue.end(); i++, j++) { currentNode = *i; basis.col(j) = currentNode->BasisVector(); } } void CosineTree::CosineNodeSplit() { // If less than two points, splitting does not make sense---there is nothing // to split. if (numColumns < 2) return; // Calculate cosines with respect to the splitting point. arma::vec cosines; CalculateCosines(cosines); // Compute maximum and minimum cosine values. double cosineMax, cosineMin; cosineMax = arma::max(cosines % (cosines < 1)); cosineMin = arma::min(cosines); std::vector leftIndices, rightIndices; // Split columns into left and right children. The splitting condition for the // column to be in the left child is as follows: // cos_max - cos(i) < cos(i) - cos_min // We deviate from the paper here and use < instead of <= in order to handle // the edge case where cosineMax == cosineMin, and force there to be at least // one point in the right node. for (size_t i = 0; i < numColumns; i++) { if (cosineMax - cosines(i) < cosines(i) - cosineMin) leftIndices.push_back(i); else rightIndices.push_back(i); } // Split the node into left and right children. left = new CosineTree(*this, leftIndices); right = new CosineTree(*this, rightIndices); } void CosineTree::ColumnSamplesLS(std::vector& sampledIndices, arma::vec& probabilities, size_t numSamples) { // Initialize the cumulative distribution vector size. arma::vec cDistribution; cDistribution.zeros(numColumns + 1); // Calculate cumulative length-squared distribution for the node. for (size_t i = 0; i < numColumns; i++) { cDistribution(i + 1) = cDistribution(i) + (l2NormsSquared(i) / frobNormSquared); } // Initialize sizes of the 'sampledIndices' and 'probabilities' vectors. sampledIndices.resize(numSamples); probabilities.zeros(numSamples); for (size_t i = 0; i < numSamples; i++) { // Generate a random value for sampling. double randValue = arma::randu(); size_t start = 0, end = numColumns, searchIndex; // Sample from the distribution and store corresponding probability. searchIndex = BinarySearch(cDistribution, randValue, start, end); sampledIndices[i] = indices[searchIndex]; probabilities(i) = l2NormsSquared(searchIndex) / frobNormSquared; } } size_t CosineTree::ColumnSampleLS() { // If only one element is present, there can only be one sample. if (numColumns < 2) { return 0; } // Initialize the cumulative distribution vector size. arma::vec cDistribution; cDistribution.zeros(numColumns + 1); // Calculate cumulative length-squared distribution for the node. for (size_t i = 0; i < numColumns; i++) { cDistribution(i + 1) = cDistribution(i) + (l2NormsSquared(i) / frobNormSquared); } // Generate a random value for sampling. double randValue = arma::randu(); size_t start = 0, end = numColumns; // Sample from the distribution. return BinarySearch(cDistribution, randValue, start, end); } size_t CosineTree::BinarySearch(arma::vec& cDistribution, double value, size_t start, size_t end) { size_t pivot = (start + end) / 2; // If pivot is zero, first point is the sampled point. if (!pivot) { return pivot; } // Binary search recursive algorithm. if (value > cDistribution(pivot - 1) && value <= cDistribution(pivot)) { return (pivot - 1); } else if (value < cDistribution(pivot - 1)) { return BinarySearch(cDistribution, value, start, pivot - 1); } else { return BinarySearch(cDistribution, value, pivot + 1, end); } } void CosineTree::CalculateCosines(arma::vec& cosines) { // Initialize cosine vector as a vector of zeros. cosines.zeros(numColumns); for (size_t i = 0; i < numColumns; i++) { // If norm is zero, store cosine value as zero. Else, calculate cosine value // between two vectors. if (l2NormsSquared(i) == 0) { cosines(i) = 0; } else { cosines(i) = std::abs(arma::norm_dot(dataset.col(indices[splitPointIndex]), dataset.col(indices[i]))); } } } void CosineTree::CalculateCentroid() { // Initialize centroid as vector of zeros. centroid.zeros(dataset.n_rows); // Calculate centroid of columns in the node. for (size_t i = 0; i < numColumns; i++) { centroid += dataset.col(indices[i]); } centroid /= numColumns; } } // namespace tree } // namespace mlpack mlpack-2.2.5/src/mlpack/core/tree/cosine_tree/cosine_tree.hpp000066400000000000000000000231031315013601400242140ustar00rootroot00000000000000/** * @file cosine_tree.hpp * @author Siddharth Agrawal * * Definition of Cosine Tree. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_TREE_COSINE_TREE_COSINE_TREE_HPP #define MLPACK_CORE_TREE_COSINE_TREE_COSINE_TREE_HPP #include #include namespace mlpack { namespace tree { // Predeclare classes for CosineNodeQueue typedef. class CompareCosineNode; class CosineTree; // CosineNodeQueue typedef. typedef boost::heap::priority_queue > CosineNodeQueue; class CosineTree { public: /** * CosineTree constructor for the root node of the tree. It initializes the * necessary variables required for splitting of the node, and building the * tree further. It takes a pointer to the input matrix and calculates the * relevant variables using it. * * @param dataset Matrix for which cosine tree is constructed. */ CosineTree(const arma::mat& dataset); /** * CosineTree constructor for nodes other than the root node of the tree. It * takes in a pointer to the parent node and a list of column indices which * mentions the columns to be included in the node. The function calculate the * relevant variables just like the constructor above. * * @param parentNode Pointer to the parent cosine node. * @param subIndices Pointer to vector of column indices to be included. */ CosineTree(CosineTree& parentNode, const std::vector& subIndices); /** * Construct the CosineTree and the basis for the given matrix, and passed * 'epsilon' and 'delta' parameters. The CosineTree is constructed by * splitting nodes in the direction of maximum error, stored using a priority * queue. Basis vectors are added from the left and right children of the * split node. The basis vector from a node is the orthonormalized centroid of * its columns. The splitting continues till the Monte Carlo estimate of the * input matrix's projection on the obtained subspace is less than a fraction * of the norm of the input matrix. * * @param dataset Matrix for which the CosineTree is constructed. * @param epsilon Error tolerance fraction for calculated subspace. * @param delta Cumulative probability for Monte Carlo error lower bound. */ CosineTree(const arma::mat& dataset, const double epsilon, const double delta); /** * Clean up the CosineTree: release allocated memory (including children). */ ~CosineTree(); /** * Calculates the orthonormalization of the passed centroid, with respect to * the current vector subspace. * * @param treeQueue Priority queue of cosine nodes. * @param centroid Centroid of the node being added to the basis. * @param newBasisVector Orthonormalized centroid of the node. * @param addBasisVector Address to additional basis vector. */ void ModifiedGramSchmidt(CosineNodeQueue& treeQueue, arma::vec& centroid, arma::vec& newBasisVector, arma::vec* addBasisVector = NULL); /** * Estimates the squared error of the projection of the input node's matrix * onto the current vector subspace. A normal distribution is fit using * weighted norms of projections of samples drawn from the input node's matrix * columns. The error is calculated as the difference between the Frobenius * norm of the input node's matrix and lower bound of the normal distribution. * * @param node Node for which Monte Carlo estimate is calculated. * @param treeQueue Priority queue of cosine nodes. * @param addBasisVector1 Address to first additional basis vector. * @param addBasisVector2 Address to second additional basis vector. */ double MonteCarloError(CosineTree* node, CosineNodeQueue& treeQueue, arma::vec* addBasisVector1 = NULL, arma::vec* addBasisVector2 = NULL); /** * Constructs the final basis matrix, after the cosine tree construction. * * @param treeQueue Priority queue of cosine nodes. */ void ConstructBasis(CosineNodeQueue& treeQueue); /** * This function splits the cosine node into two children based on the cosines * of the columns contained in the node, with respect to the sampled splitting * point. The function also calls the CosineTree constructor for the children. */ void CosineNodeSplit(); /** * Sample 'numSamples' points from the Length-Squared distribution of the * cosine node. The function uses 'l2NormsSquared' to calculate the cumulative * probability distribution of the column vectors. The sampling is based on a * randomly generated values in the range [0, 1]. */ void ColumnSamplesLS(std::vector& sampledIndices, arma::vec& probabilities, size_t numSamples); /** * Sample a point from the Length-Squared distribution of the cosine node. The * function uses 'l2NormsSquared' to calculate the cumulative probability * distribution of the column vectors. The sampling is based on a randomly * generated value in the range [0, 1]. */ size_t ColumnSampleLS(); /** * Sample a column based on the cumulative Length-Squared distribution of the * cosine node, and a randomly generated value in the range [0, 1]. Binary * search is more efficient than searching linearly for the same. This leads * a significant speedup when there are large number of columns to choose from * and when a number of samples are to be drawn from the distribution. * * @param cDistribution Cumulative LS distribution of columns in the node. * @param value Randomly generated value in the range [0, 1]. * @param start Starting index of the distribution interval to search in. * @param end Ending index of the distribution interval to search in. */ size_t BinarySearch(arma::vec& cDistribution, double value, size_t start, size_t end); /** * Calculate cosines of the columns present in the node, with respect to the * sampled splitting point. The calculated cosine values are useful for * splitting the node into its children. * * @param cosines Vector to store the cosine values in. */ void CalculateCosines(arma::vec& cosines); /** * Calculate centroid of the columns present in the node. The calculated * centroid is used as a basis vector for the cosine tree being constructed. */ void CalculateCentroid(); //! Returns the basis of the constructed subspace. void GetFinalBasis(arma::mat& finalBasis) { finalBasis = basis; } //! Get pointer to the dataset matrix. const arma::mat& GetDataset() const { return dataset; } //! Get the indices of columns in the node. std::vector& VectorIndices() { return indices; } //! Set the Monte Carlo error. void L2Error(const double error) { this->l2Error = error; } //! Get the Monte Carlo error. double L2Error() const { return l2Error; } //! Get pointer to the centroid vector. arma::vec& Centroid() { return centroid; } //! Set the basis vector of the node. void BasisVector(arma::vec& bVector) { this->basisVector = bVector; } //! Get the basis vector of the node. arma::vec& BasisVector() { return basisVector; } //! Get pointer to the parent node. CosineTree* Parent() const { return parent; } //! Modify the pointer to the parent node. CosineTree*& Parent() { return parent; } //! Get pointer to the left child of the node. CosineTree* Left() const { return left; } //! Modify the pointer to the left child of the node. CosineTree*& Left() { return left; } //! Get pointer to the right child of the node. CosineTree* Right() const { return right; } //! Modify the pointer to the left child of the node. CosineTree*& Right() { return right; } //! Get number of columns of input matrix in the node. size_t NumColumns() const { return numColumns; } //! Get the Frobenius norm squared of columns in the node. double FrobNormSquared() const { return frobNormSquared; } //! Get the column index of split point of the node. size_t SplitPointIndex() const { return indices[splitPointIndex]; } private: //! Matrix for which cosine tree is constructed. const arma::mat& dataset; //! Cumulative probability for Monte Carlo error lower bound. double delta; //! Subspace basis of the input dataset. arma::mat basis; //! Parent of the node. CosineTree* parent; //! Left child of the node. CosineTree* left; //! Right child of the node. CosineTree* right; //! Indices of columns of input matrix in the node. std::vector indices; //! L2-norm squared of columns in the node. arma::vec l2NormsSquared; //! Centroid of columns of input matrix in the node. arma::vec centroid; //! Orthonormalized basis vector of the node. arma::vec basisVector; //! Index of split point of cosine node. size_t splitPointIndex; //! Number of columns of input matrix in the node. size_t numColumns; //! Monte Carlo error for this node. double l2Error; //! Frobenius norm squared of columns in the node. double frobNormSquared; }; class CompareCosineNode { public: // Comparison function for construction of priority queue. bool operator() (const CosineTree* a, const CosineTree* b) const { return a->L2Error() < b->L2Error(); } }; } // namespace tree } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/core/tree/cover_tree.hpp000066400000000000000000000015561315013601400215630ustar00rootroot00000000000000/** * @file cover_tree.hpp * @author Ryan Curtin * * Includes all the necessary files to use the CoverTree class. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_TREE_COVER_TREE_HPP #define MLPACK_CORE_TREE_COVER_TREE_HPP #include #include "cover_tree/first_point_is_root.hpp" #include "cover_tree/cover_tree.hpp" #include "cover_tree/single_tree_traverser.hpp" #include "cover_tree/single_tree_traverser_impl.hpp" #include "cover_tree/dual_tree_traverser.hpp" #include "cover_tree/dual_tree_traverser_impl.hpp" #include "cover_tree/traits.hpp" #include "cover_tree/typedef.hpp" #endif mlpack-2.2.5/src/mlpack/core/tree/cover_tree/000077500000000000000000000000001315013601400210435ustar00rootroot00000000000000mlpack-2.2.5/src/mlpack/core/tree/cover_tree/cover_tree.hpp000066400000000000000000000540471315013601400237230ustar00rootroot00000000000000/** * @file cover_tree.hpp * @author Ryan Curtin * * Definition of CoverTree, which can be used in place of the BinarySpaceTree. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_TREE_COVER_TREE_COVER_TREE_HPP #define MLPACK_CORE_TREE_COVER_TREE_COVER_TREE_HPP #include #include #include "../statistic.hpp" #include "first_point_is_root.hpp" namespace mlpack { namespace tree { /** * A cover tree is a tree specifically designed to speed up nearest-neighbor * computation in high-dimensional spaces. Each non-leaf node references a * point and has a nonzero number of children, including a "self-child" which * references the same point. A leaf node represents only one point. * * The tree can be thought of as a hierarchy with the root node at the top level * and the leaf nodes at the bottom level. Each level in the tree has an * assigned 'scale' i. The tree follows these two invariants: * * - nesting: the level C_i is a subset of the level C_{i - 1}. * - covering: all node in level C_{i - 1} have at least one node in the * level C_i with distance less than or equal to b^i (exactly one of these * is a parent of the point in level C_{i - 1}. * * Note that in the cover tree paper, there is a third invariant (the * 'separation invariant'), but that does not apply to our implementation, * because we have relaxed the invariant. * * The value 'b' refers to the base, which is a parameter of the tree. These * three properties make the cover tree very good for fast, high-dimensional * nearest-neighbor search. * * The theoretical structure of the tree contains many 'implicit' nodes which * only have a "self-child" (a child referencing the same point, but at a lower * scale level). This practical implementation only constructs explicit nodes * -- non-leaf nodes with more than one child. A leaf node has no children, and * its scale level is INT_MIN. * * For more information on cover trees, see * * @code * @inproceedings{ * author = {Beygelzimer, Alina and Kakade, Sham and Langford, John}, * title = {Cover trees for nearest neighbor}, * booktitle = {Proceedings of the 23rd International Conference on Machine * Learning}, * series = {ICML '06}, * year = {2006}, * pages = {97--104] * } * @endcode * * For information on runtime bounds of the nearest-neighbor computation using * cover trees, see the following paper, presented at NIPS 2009: * * @code * @inproceedings{ * author = {Ram, P., and Lee, D., and March, W.B., and Gray, A.G.}, * title = {Linear-time Algorithms for Pairwise Statistical Problems}, * booktitle = {Advances in Neural Information Processing Systems 22}, * editor = {Y. Bengio and D. Schuurmans and J. Lafferty and C.K.I. Williams * and A. Culotta}, * pages = {1527--1535}, * year = {2009} * } * @endcode * * The CoverTree class offers three template parameters; a custom metric type * can be used with MetricType (this class defaults to the L2-squared metric). * The root node's point can be chosen with the RootPointPolicy; by default, the * FirstPointIsRoot policy is used, meaning the first point in the dataset is * used. The StatisticType policy allows you to define statistics which can be * gathered during the creation of the tree. * * @tparam MetricType Metric type to use during tree construction. * @tparam RootPointPolicy Determines which point to use as the root node. * @tparam StatisticType Statistic to be used during tree creation. * @tparam MatType Type of matrix to build the tree on (generally mat or * sp_mat). */ template, typename StatisticType = EmptyStatistic, typename MatType = arma::mat, typename RootPointPolicy = FirstPointIsRoot> class CoverTree { public: //! So that other classes can access the matrix type. typedef MatType Mat; //! The type held by the matrix type. typedef typename MatType::elem_type ElemType; /** * Create the cover tree with the given dataset and given base. * The dataset will not be modified during the building procedure (unlike * BinarySpaceTree). * * The last argument will be removed in mlpack 1.1.0 (see #274 and #273). * * @param dataset Reference to the dataset to build a tree on. * @param base Base to use during tree building (default 2.0). */ CoverTree(const MatType& dataset, const ElemType base = 2.0, MetricType* metric = NULL); /** * Create the cover tree with the given dataset and the given instantiated * metric. Optionally, set the base. The dataset will not be modified during * the building procedure (unlike BinarySpaceTree). * * @param dataset Reference to the dataset to build a tree on. * @param metric Instantiated metric to use during tree building. * @param base Base to use during tree building (default 2.0). */ CoverTree(const MatType& dataset, MetricType& metric, const ElemType base = 2.0); /** * Create the cover tree with the given dataset, taking ownership of the * dataset. Optionally, set the base. * * @param dataset Reference to the dataset to build a tree on. * @param base Base to use during tree building (default 2.0). */ CoverTree(MatType&& dataset, const ElemType base = 2.0); /** * Create the cover tree with the given dataset and the given instantiated * metric, taking ownership of the dataset. Optionally, set the base. * * @param dataset Reference to the dataset to build a tree on. * @param metric Instantiated metric to use during tree building. * @param base Base to use during tree building (default 2.0). */ CoverTree(MatType&& dataset, MetricType& metric, const ElemType base = 2.0); /** * Construct a child cover tree node. This constructor is not meant to be * used externally, but it could be used to insert another node into a tree. * This procedure uses only one vector for the near set, the far set, and the * used set (this is to prevent unnecessary memory allocation in recursive * calls to this constructor). Therefore, the size of the near set, far set, * and used set must be passed in. The near set will be entirely used up, and * some of the far set may be used. The value of usedSetSize will be set to * the number of points used in the construction of this node, and the value * of farSetSize will be modified to reflect the number of points in the far * set _after_ the construction of this node. * * If you are calling this manually, be careful that the given scale is * as small as possible, or you may be creating an implicit node in your tree. * * @param dataset Reference to the dataset to build a tree on. * @param base Base to use during tree building. * @param pointIndex Index of the point this node references. * @param scale Scale of this level in the tree. * @param parent Parent of this node (NULL indicates no parent). * @param parentDistance Distance to the parent node. * @param indices Array of indices, ordered [ nearSet | farSet | usedSet ]; * will be modified to [ farSet | usedSet ]. * @param distances Array of distances, ordered the same way as the indices. * These represent the distances between the point specified by pointIndex * and each point in the indices array. * @param nearSetSize Size of the near set; if 0, this will be a leaf. * @param farSetSize Size of the far set; may be modified (if this node uses * any points in the far set). * @param usedSetSize The number of points used will be added to this number. */ CoverTree(const MatType& dataset, const ElemType base, const size_t pointIndex, const int scale, CoverTree* parent, const ElemType parentDistance, arma::Col& indices, arma::vec& distances, size_t nearSetSize, size_t& farSetSize, size_t& usedSetSize, MetricType& metric = NULL); /** * Manually construct a cover tree node; no tree assembly is done in this * constructor, and children must be added manually (use Children()). This * constructor is useful when the tree is being "imported" into the CoverTree * class after being created in some other manner. * * @param dataset Reference to the dataset this node is a part of. * @param base Base that was used for tree building. * @param pointIndex Index of the point in the dataset which this node refers * to. * @param scale Scale of this node's level in the tree. * @param parent Parent node (NULL indicates no parent). * @param parentDistance Distance to parent node point. * @param furthestDescendantDistance Distance to furthest descendant point. * @param metric Instantiated metric (optional). */ CoverTree(const MatType& dataset, const ElemType base, const size_t pointIndex, const int scale, CoverTree* parent, const ElemType parentDistance, const ElemType furthestDescendantDistance, MetricType* metric = NULL); /** * Create a cover tree from another tree. Be careful! This may use a lot of * memory and take a lot of time. This will also make a copy of the dataset. * * @param other Cover tree to copy from. */ CoverTree(const CoverTree& other); /** * Move constructor for a Cover Tree, possess all the members of the given * tree. * * @param other Cover Tree to move. */ CoverTree(CoverTree&& other); /** * Create a cover tree from a boost::serialization archive. */ template CoverTree( Archive& ar, const typename boost::enable_if::type* = 0); /** * Delete this cover tree node and its children. */ ~CoverTree(); //! A single-tree cover tree traverser; see single_tree_traverser.hpp for //! implementation. template class SingleTreeTraverser; //! A dual-tree cover tree traverser; see dual_tree_traverser.hpp. template class DualTreeTraverser; template using BreadthFirstDualTreeTraverser = DualTreeTraverser; //! Get a reference to the dataset. const MatType& Dataset() const { return *dataset; } //! Get the index of the point which this node represents. size_t Point() const { return point; } //! For compatibility with other trees; the argument is ignored. size_t Point(const size_t) const { return point; } bool IsLeaf() const { return (children.size() == 0); } size_t NumPoints() const { return 1; } //! Get a particular child node. const CoverTree& Child(const size_t index) const { return *children[index]; } //! Modify a particular child node. CoverTree& Child(const size_t index) { return *children[index]; } CoverTree*& ChildPtr(const size_t index) { return children[index]; } //! Get the number of children. size_t NumChildren() const { return children.size(); } //! Get the children. const std::vector& Children() const { return children; } //! Modify the children manually (maybe not a great idea). std::vector& Children() { return children; } //! Get the number of descendant points. size_t NumDescendants() const; //! Get the index of a particular descendant point. size_t Descendant(const size_t index) const; //! Get the scale of this node. int Scale() const { return scale; } //! Modify the scale of this node. Be careful... int& Scale() { return scale; } //! Get the base. ElemType Base() const { return base; } //! Modify the base; don't do this, you'll break everything. ElemType& Base() { return base; } //! Get the statistic for this node. const StatisticType& Stat() const { return stat; } //! Modify the statistic for this node. StatisticType& Stat() { return stat; } /** * Return the index of the nearest child node to the given query point. If * this is a leaf node, it will return NumChildren() (invalid index). */ template size_t GetNearestChild( const VecType& point, typename boost::enable_if >::type* = 0); /** * Return the index of the furthest child node to the given query point. If * this is a leaf node, it will return NumChildren() (invalid index). */ template size_t GetFurthestChild( const VecType& point, typename boost::enable_if >::type* = 0); /** * Return the index of the nearest child node to the given query node. If it * can't decide, it will return NumChildren() (invalid index). */ size_t GetNearestChild(const CoverTree& queryNode); /** * Return the index of the furthest child node to the given query node. If it * can't decide, it will return NumChildren() (invalid index). */ size_t GetFurthestChild(const CoverTree& queryNode); //! Return the minimum distance to another node. ElemType MinDistance(const CoverTree& other) const; //! Return the minimum distance to another node given that the point-to-point //! distance has already been calculated. ElemType MinDistance(const CoverTree& other, const ElemType distance) const; //! Return the minimum distance to another point. ElemType MinDistance(const arma::vec& other) const; //! Return the minimum distance to another point given that the distance from //! the center to the point has already been calculated. ElemType MinDistance(const arma::vec& other, const ElemType distance) const; //! Return the maximum distance to another node. ElemType MaxDistance(const CoverTree& other) const; //! Return the maximum distance to another node given that the point-to-point //! distance has already been calculated. ElemType MaxDistance(const CoverTree& other, const ElemType distance) const; //! Return the maximum distance to another point. ElemType MaxDistance(const arma::vec& other) const; //! Return the maximum distance to another point given that the distance from //! the center to the point has already been calculated. ElemType MaxDistance(const arma::vec& other, const ElemType distance) const; //! Return the minimum and maximum distance to another node. math::RangeType RangeDistance(const CoverTree& other) const; //! Return the minimum and maximum distance to another node given that the //! point-to-point distance has already been calculated. math::RangeType RangeDistance(const CoverTree& other, const ElemType distance) const; //! Return the minimum and maximum distance to another point. math::RangeType RangeDistance(const arma::vec& other) const; //! Return the minimum and maximum distance to another point given that the //! point-to-point distance has already been calculated. math::RangeType RangeDistance(const arma::vec& other, const ElemType distance) const; //! Get the parent node. CoverTree* Parent() const { return parent; } //! Modify the parent node. CoverTree*& Parent() { return parent; } //! Get the distance to the parent. ElemType ParentDistance() const { return parentDistance; } //! Modify the distance to the parent. ElemType& ParentDistance() { return parentDistance; } //! Get the distance to the furthest point. This is always 0 for cover trees. ElemType FurthestPointDistance() const { return 0.0; } //! Get the distance from the center of the node to the furthest descendant. ElemType FurthestDescendantDistance() const { return furthestDescendantDistance; } //! Modify the distance from the center of the node to the furthest //! descendant. ElemType& FurthestDescendantDistance() { return furthestDescendantDistance; } //! Get the minimum distance from the center to any bound edge (this is the //! same as furthestDescendantDistance). ElemType MinimumBoundDistance() const { return furthestDescendantDistance; } //! Get the center of the node and store it in the given vector. void Center(arma::vec& center) const { center = arma::vec(dataset->col(point)); } //! Get the instantiated metric. MetricType& Metric() const { return *metric; } private: //! Reference to the matrix which this tree is built on. const MatType* dataset; //! Index of the point in the matrix which this node represents. size_t point; //! The list of children; the first is the self-child. std::vector children; //! Scale level of the node. int scale; //! The base used to construct the tree. ElemType base; //! The instantiated statistic. StatisticType stat; //! The number of descendant points. size_t numDescendants; //! The parent node (NULL if this is the root of the tree). CoverTree* parent; //! Distance to the parent. ElemType parentDistance; //! Distance to the furthest descendant. ElemType furthestDescendantDistance; //! Whether or not we need to destroy the metric in the destructor. bool localMetric; //! If true, we own the dataset and need to destroy it in the destructor. bool localDataset; //! The metric used for this tree. MetricType* metric; /** * Create the children for this node. */ void CreateChildren(arma::Col& indices, arma::vec& distances, size_t nearSetSize, size_t& farSetSize, size_t& usedSetSize); /** * Fill the vector of distances with the distances between the point specified * by pointIndex and each point in the indices array. The distances of the * first pointSetSize points in indices are calculated (so, this does not * necessarily need to use all of the points in the arrays). * * @param pointIndex Point to build the distances for. * @param indices List of indices to compute distances for. * @param distances Vector to store calculated distances in. * @param pointSetSize Number of points in arrays to calculate distances for. */ void ComputeDistances(const size_t pointIndex, const arma::Col& indices, arma::vec& distances, const size_t pointSetSize); /** * Split the given indices and distances into a near and a far set, returning * the number of points in the near set. The distances must already be * initialized. This will order the indices and distances such that the * points in the near set make up the first part of the array and the far set * makes up the rest: [ nearSet | farSet ]. * * @param indices List of indices; will be reordered. * @param distances List of distances; will be reordered. * @param bound If the distance is less than or equal to this bound, the point * is placed into the near set. * @param pointSetSize Size of point set (because we may be sorting a smaller * list than the indices vector will hold). */ size_t SplitNearFar(arma::Col& indices, arma::vec& distances, const ElemType bound, const size_t pointSetSize); /** * Assuming that the list of indices and distances is sorted as * [ childFarSet | childUsedSet | farSet | usedSet ], * resort the sets so the organization is * [ childFarSet | farSet | childUsedSet | usedSet ]. * * The size_t parameters specify the sizes of each set in the array. Only the * ordering of the indices and distances arrays will be modified (not their * actual contents). * * The size of any of the four sets can be zero and this method will handle * that case accordingly. * * @param indices List of indices to sort. * @param distances List of distances to sort. * @param childFarSetSize Number of points in child far set (childFarSet). * @param childUsedSetSize Number of points in child used set (childUsedSet). * @param farSetSize Number of points in far set (farSet). */ size_t SortPointSet(arma::Col& indices, arma::vec& distances, const size_t childFarSetSize, const size_t childUsedSetSize, const size_t farSetSize); void MoveToUsedSet(arma::Col& indices, arma::vec& distances, size_t& nearSetSize, size_t& farSetSize, size_t& usedSetSize, arma::Col& childIndices, const size_t childFarSetSize, const size_t childUsedSetSize); size_t PruneFarSet(arma::Col& indices, arma::vec& distances, const ElemType bound, const size_t nearSetSize, const size_t pointSetSize); /** * Take a look at the last child (the most recently created one) and remove * any implicit nodes that have been created. */ void RemoveNewImplicitNodes(); protected: /** * A default constructor. This is meant to only be used with * boost::serialization, which is allowed with the friend declaration below. * This does not return a valid tree! This method must be protected, so that * the serialization shim can work with the default constructor. */ CoverTree(); //! Friend access is given for the default constructor. friend class boost::serialization::access; public: /** * Serialize the tree. */ template void Serialize(Archive& ar, const unsigned int /* version */); size_t DistanceComps() const { return distanceComps; } size_t& DistanceComps() { return distanceComps; } private: size_t distanceComps; }; } // namespace tree } // namespace mlpack // Include implementation. #include "cover_tree_impl.hpp" // Include the rest of the pieces, if necessary. #include "../cover_tree.hpp" #endif mlpack-2.2.5/src/mlpack/core/tree/cover_tree/cover_tree_impl.hpp000066400000000000000000001441071315013601400247410ustar00rootroot00000000000000/** * @file cover_tree_impl.hpp * @author Ryan Curtin * * Implementation of CoverTree class. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_TREE_COVER_TREE_COVER_TREE_IMPL_HPP #define MLPACK_CORE_TREE_COVER_TREE_COVER_TREE_IMPL_HPP // In case it hasn't already been included. #include "cover_tree.hpp" #include namespace mlpack { namespace tree { // Create the cover tree. template< typename MetricType, typename StatisticType, typename MatType, typename RootPointPolicy > CoverTree::CoverTree( const MatType& dataset, const ElemType base, MetricType* metric) : dataset(&dataset), point(RootPointPolicy::ChooseRoot(dataset)), scale(INT_MAX), base(base), numDescendants(0), parent(NULL), parentDistance(0), furthestDescendantDistance(0), localMetric(metric == NULL), localDataset(false), metric(metric), distanceComps(0) { // If we need to create a metric, do that. We'll just do it on the heap. if (localMetric) this->metric = new MetricType(); // If there is only one point or zero points in the dataset... uh, we're done. // Technically, if the dataset has zero points, our node is not correct... if (dataset.n_cols <= 1) { scale = INT_MIN; return; } // Kick off the building. Create the indices array and the distances array. arma::Col indices = arma::linspace >(1, dataset.n_cols - 1, dataset.n_cols - 1); // This is now [1 2 3 4 ... n]. We must be sure that our point does not // occur. if (point != 0) indices[point - 1] = 0; // Put 0 back into the set; remove what was there. arma::vec distances(dataset.n_cols - 1); // Build the initial distances. ComputeDistances(point, indices, distances, dataset.n_cols - 1); // Create the children. size_t farSetSize = 0; size_t usedSetSize = 0; CreateChildren(indices, distances, dataset.n_cols - 1, farSetSize, usedSetSize); // If we ended up creating only one child, remove the implicit node. while (children.size() == 1) { // Prepare to delete the implicit child node. CoverTree* old = children[0]; // Now take its children and set their parent correctly. children.erase(children.begin()); for (size_t i = 0; i < old->NumChildren(); ++i) { children.push_back(&(old->Child(i))); // Set its parent correctly, and rebuild the statistic. old->Child(i).Parent() = this; old->Child(i).Stat() = StatisticType(old->Child(i)); } // Remove all the children so they don't get erased. old->Children().clear(); // Reduce our own scale. scale = old->Scale(); // Now delete it. delete old; } // Use the furthest descendant distance to determine the scale of the root // node. if (furthestDescendantDistance == 0.0) scale = INT_MIN; else scale = (int) ceil(log(furthestDescendantDistance) / log(base)); // Initialize statistic. stat = StatisticType(*this); Log::Info << distanceComps << " distance computations during tree " << "construction." << std::endl; } template< typename MetricType, typename StatisticType, typename MatType, typename RootPointPolicy > CoverTree::CoverTree( const MatType& dataset, MetricType& metric, const ElemType base) : dataset(&dataset), point(RootPointPolicy::ChooseRoot(dataset)), scale(INT_MAX), base(base), numDescendants(0), parent(NULL), parentDistance(0), furthestDescendantDistance(0), localMetric(false), localDataset(false), metric(&metric), distanceComps(0) { // If there is only one point or zero points in the dataset... uh, we're done. // Technically, if the dataset has zero points, our node is not correct... if (dataset.n_cols <= 1) { scale = INT_MIN; return; } // Kick off the building. Create the indices array and the distances array. arma::Col indices = arma::linspace >(1, dataset.n_cols - 1, dataset.n_cols - 1); // This is now [1 2 3 4 ... n]. We must be sure that our point does not // occur. if (point != 0) indices[point - 1] = 0; // Put 0 back into the set; remove what was there. arma::vec distances(dataset.n_cols - 1); // Build the initial distances. ComputeDistances(point, indices, distances, dataset.n_cols - 1); // Create the children. size_t farSetSize = 0; size_t usedSetSize = 0; CreateChildren(indices, distances, dataset.n_cols - 1, farSetSize, usedSetSize); // If we ended up creating only one child, remove the implicit node. while (children.size() == 1) { // Prepare to delete the implicit child node. CoverTree* old = children[0]; // Now take its children and set their parent correctly. children.erase(children.begin()); for (size_t i = 0; i < old->NumChildren(); ++i) { children.push_back(&(old->Child(i))); // Set its parent correctly. old->Child(i).Parent() = this; // Rebuild the statistic. old->Child(i).Stat() = StatisticType(old->Child(i)); } // Remove all the children so they don't get erased. old->Children().clear(); // Reduce our own scale. scale = old->Scale(); // Now delete it. delete old; } // Use the furthest descendant distance to determine the scale of the root // node. if (furthestDescendantDistance == 0.0) scale = INT_MIN; else scale = (int) ceil(log(furthestDescendantDistance) / log(base)); // Initialize statistic. stat = StatisticType(*this); Log::Info << distanceComps << " distance computations during tree " << "construction." << std::endl; } template< typename MetricType, typename StatisticType, typename MatType, typename RootPointPolicy > CoverTree::CoverTree( MatType&& data, const ElemType base) : dataset(new MatType(std::move(data))), point(RootPointPolicy::ChooseRoot(dataset)), scale(INT_MAX), base(base), numDescendants(0), parent(NULL), parentDistance(0), furthestDescendantDistance(0), localMetric(true), localDataset(true), distanceComps(0) { // We need to create a metric. We'll just do it on the heap. this->metric = new MetricType(); // If there is only one point or zero points in the dataset... uh, we're done. // Technically, if the dataset has zero points, our node is not correct... if (dataset->n_cols <= 1) { scale = INT_MIN; return; } // Kick off the building. Create the indices array and the distances array. arma::Col indices = arma::linspace >(1, dataset->n_cols - 1, dataset->n_cols - 1); // This is now [1 2 3 4 ... n]. We must be sure that our point does not // occur. if (point != 0) indices[point - 1] = 0; // Put 0 back into the set; remove what was there. arma::vec distances(dataset->n_cols - 1); // Build the initial distances. ComputeDistances(point, indices, distances, dataset->n_cols - 1); // Create the children. size_t farSetSize = 0; size_t usedSetSize = 0; CreateChildren(indices, distances, dataset->n_cols - 1, farSetSize, usedSetSize); // If we ended up creating only one child, remove the implicit node. while (children.size() == 1) { // Prepare to delete the implicit child node. CoverTree* old = children[0]; // Now take its children and set their parent correctly. children.erase(children.begin()); for (size_t i = 0; i < old->NumChildren(); ++i) { children.push_back(&(old->Child(i))); // Set its parent correctly, and rebuild the statistic. old->Child(i).Parent() = this; old->Child(i).Stat() = StatisticType(old->Child(i)); } // Remove all the children so they don't get erased. old->Children().clear(); // Reduce our own scale. scale = old->Scale(); // Now delete it. delete old; } // Use the furthest descendant distance to determine the scale of the root // node. if (furthestDescendantDistance == 0.0) scale = INT_MIN; else scale = (int) ceil(log(furthestDescendantDistance) / log(base)); // Initialize statistic. stat = StatisticType(*this); Log::Info << distanceComps << " distance computations during tree " << "construction." << std::endl; } template< typename MetricType, typename StatisticType, typename MatType, typename RootPointPolicy > CoverTree::CoverTree( MatType&& data, MetricType& metric, const ElemType base) : dataset(new MatType(std::move(data))), point(RootPointPolicy::ChooseRoot(dataset)), scale(INT_MAX), base(base), numDescendants(0), parent(NULL), parentDistance(0), furthestDescendantDistance(0), localMetric(false), localDataset(true), metric(&metric), distanceComps(0) { // If there is only one point or zero points in the dataset... uh, we're done. // Technically, if the dataset has zero points, our node is not correct... if (dataset->n_cols <= 1) { scale = INT_MIN; return; } // Kick off the building. Create the indices array and the distances array. arma::Col indices = arma::linspace >(1, dataset->n_cols - 1, dataset->n_cols - 1); // This is now [1 2 3 4 ... n]. We must be sure that our point does not // occur. if (point != 0) indices[point - 1] = 0; // Put 0 back into the set; remove what was there. arma::vec distances(dataset->n_cols - 1); // Build the initial distances. ComputeDistances(point, indices, distances, dataset->n_cols - 1); // Create the children. size_t farSetSize = 0; size_t usedSetSize = 0; CreateChildren(indices, distances, dataset->n_cols - 1, farSetSize, usedSetSize); // If we ended up creating only one child, remove the implicit node. while (children.size() == 1) { // Prepare to delete the implicit child node. CoverTree* old = children[0]; // Now take its children and set their parent correctly. children.erase(children.begin()); for (size_t i = 0; i < old->NumChildren(); ++i) { children.push_back(&(old->Child(i))); // Set its parent correctly, and rebuild the statistic. old->Child(i).Parent() = this; old->Child(i).Stat() = StatisticType(old->Child(i)); } // Remove all the children so they don't get erased. old->Children().clear(); // Reduce our own scale. scale = old->Scale(); // Now delete it. delete old; } // Use the furthest descendant distance to determine the scale of the root // node. if (furthestDescendantDistance == 0.0) scale = INT_MIN; else scale = (int) ceil(log(furthestDescendantDistance) / log(base)); // Initialize statistic. stat = StatisticType(*this); Log::Info << distanceComps << " distance computations during tree " << "construction." << std::endl; } template< typename MetricType, typename StatisticType, typename MatType, typename RootPointPolicy > CoverTree::CoverTree( const MatType& dataset, const ElemType base, const size_t pointIndex, const int scale, CoverTree* parent, const ElemType parentDistance, arma::Col& indices, arma::vec& distances, size_t nearSetSize, size_t& farSetSize, size_t& usedSetSize, MetricType& metric) : dataset(&dataset), point(pointIndex), scale(scale), base(base), numDescendants(0), parent(parent), parentDistance(parentDistance), furthestDescendantDistance(0), localMetric(false), localDataset(false), metric(&metric), distanceComps(0) { // If the size of the near set is 0, this is a leaf. if (nearSetSize == 0) { this->scale = INT_MIN; numDescendants = 1; stat = StatisticType(*this); return; } // Otherwise, create the children. CreateChildren(indices, distances, nearSetSize, farSetSize, usedSetSize); // Initialize statistic. stat = StatisticType(*this); } // Manually create a cover tree node. template< typename MetricType, typename StatisticType, typename MatType, typename RootPointPolicy > CoverTree::CoverTree( const MatType& dataset, const ElemType base, const size_t pointIndex, const int scale, CoverTree* parent, const ElemType parentDistance, const ElemType furthestDescendantDistance, MetricType* metric) : dataset(&dataset), point(pointIndex), scale(scale), base(base), numDescendants(0), parent(parent), parentDistance(parentDistance), furthestDescendantDistance(furthestDescendantDistance), localMetric(metric == NULL), localDataset(false), metric(metric), distanceComps(0) { // If necessary, create a local metric. if (localMetric) this->metric = new MetricType(); // Initialize the statistic. stat = StatisticType(*this); } template< typename MetricType, typename StatisticType, typename MatType, typename RootPointPolicy > CoverTree::CoverTree( const CoverTree& other) : dataset((other.parent == NULL && other.localDataset) ? new MatType(*other.dataset) : other.dataset), point(other.point), scale(other.scale), base(other.base), stat(other.stat), numDescendants(other.numDescendants), parent(other.parent), parentDistance(other.parentDistance), furthestDescendantDistance(other.furthestDescendantDistance), localMetric(false), localDataset(other.parent == NULL && other.localDataset), metric(other.metric), distanceComps(0) { // Copy each child by hand. for (size_t i = 0; i < other.NumChildren(); ++i) { children.push_back(new CoverTree(other.Child(i))); children[i]->Parent() = this; } // Propagate matrix, but only if we are the root. if (parent == NULL && localDataset) { std::queue queue; for (size_t i = 0; i < NumChildren(); ++i) queue.push(children[i]); while (!queue.empty()) { CoverTree* node = queue.front(); queue.pop(); node->dataset = dataset; for (size_t i = 0; i < node->NumChildren(); ++i) queue.push(node->children[i]); } } } template< typename MetricType, typename StatisticType, typename MatType, typename RootPointPolicy > CoverTree::CoverTree( CoverTree&& other) : dataset(other.dataset), point(other.point), children(std::move(other.children)), scale(other.scale), base(other.base), stat(std::move(other.stat)), numDescendants(other.numDescendants), parent(other.parent), parentDistance(other.parentDistance), furthestDescendantDistance(other.furthestDescendantDistance), localMetric(other.localMetric), localDataset(other.localDataset), metric(other.metric), distanceComps(other.distanceComps) { // Set proper parent pointer. for (size_t i = 0; i < children.size(); ++i) children[i]->Parent() = this; other.dataset = NULL; other.point = 0; other.scale = INT_MIN; other.base = 0; other.numDescendants = 0; other.parent = NULL; other.parentDistance = 0; other.furthestDescendantDistance = 0; other.localMetric = false; other.localDataset = false; other.metric = NULL; } // Construct from a boost::serialization archive. template< typename MetricType, typename StatisticType, typename MatType, typename RootPointPolicy > template CoverTree::CoverTree( Archive& ar, const typename boost::enable_if::type*) : CoverTree() // Create an empty CoverTree. { // Now, serialize to our empty tree. ar >> data::CreateNVP(*this, "tree"); } template< typename MetricType, typename StatisticType, typename MatType, typename RootPointPolicy > CoverTree::~CoverTree() { // Delete each child. for (size_t i = 0; i < children.size(); ++i) delete children[i]; // Delete the local metric, if necessary. if (localMetric) delete metric; // Delete the local dataset, if necessary. if (localDataset) delete dataset; } //! Return the number of descendant points. template< typename MetricType, typename StatisticType, typename MatType, typename RootPointPolicy > inline size_t CoverTree:: NumDescendants() const { return numDescendants; } //! Return the index of a particular descendant point. template< typename MetricType, typename StatisticType, typename MatType, typename RootPointPolicy > inline size_t CoverTree::Descendant( const size_t index) const { // The first descendant is the point contained within this node. if (index == 0) return point; // Is it in the self-child? if (index < children[0]->NumDescendants()) return children[0]->Descendant(index); // Now check the other children. size_t sum = children[0]->NumDescendants(); for (size_t i = 1; i < children.size(); ++i) { if (index - sum < children[i]->NumDescendants()) return children[i]->Descendant(index - sum); sum += children[i]->NumDescendants(); } // This should never happen. return (size_t() - 1); } /** * Return the index of the nearest child node to the given query point. If * this is a leaf node, it will return NumChildren() (invalid index). */ template template size_t CoverTree:: GetNearestChild(const VecType& point, typename boost::enable_if >::type*) { if (IsLeaf()) return 0; ElemType bestDistance = std::numeric_limits::max(); size_t bestIndex = 0; for (size_t i = 0; i < children.size(); ++i) { ElemType distance = children[i]->MinDistance(point); if (distance <= bestDistance) { bestDistance = distance; bestIndex = i; } } return bestIndex; } /** * Return the index of the furthest child node to the given query point. If * this is a leaf node, it will return NumChildren() (invalid index). */ template template size_t CoverTree:: GetFurthestChild(const VecType& point, typename boost::enable_if >::type*) { if (IsLeaf()) return 0; ElemType bestDistance = 0; size_t bestIndex = 0; for (size_t i = 0; i < children.size(); ++i) { ElemType distance = children[i]->MaxDistance(point); if (distance >= bestDistance) { bestDistance = distance; bestIndex = i; } } return bestIndex; } /** * Return the index of the nearest child node to the given query node. If it * can't decide, it will return NumChildren() (invalid index). */ template size_t CoverTree:: GetNearestChild(const CoverTree& queryNode) { if (IsLeaf()) return 0; ElemType bestDistance = std::numeric_limits::max(); size_t bestIndex = 0; for (size_t i = 0; i < children.size(); ++i) { ElemType distance = children[i]->MinDistance(queryNode); if (distance <= bestDistance) { bestDistance = distance; bestIndex = i; } } return bestIndex; } /** * Return the index of the furthest child node to the given query node. If it * can't decide, it will return NumChildren() (invalid index). */ template size_t CoverTree:: GetFurthestChild(const CoverTree& queryNode) { if (IsLeaf()) return 0; ElemType bestDistance = 0; size_t bestIndex = 0; for (size_t i = 0; i < children.size(); ++i) { ElemType distance = children[i]->MaxDistance(queryNode); if (distance >= bestDistance) { bestDistance = distance; bestIndex = i; } } return bestIndex; } template< typename MetricType, typename StatisticType, typename MatType, typename RootPointPolicy > typename CoverTree::ElemType CoverTree:: MinDistance(const CoverTree& other) const { // Every cover tree node will contain points up to base^(scale + 1) away. return std::max(metric->Evaluate(dataset->col(point), other.Dataset().col(other.Point())) - furthestDescendantDistance - other.FurthestDescendantDistance(), 0.0); } template< typename MetricType, typename StatisticType, typename MatType, typename RootPointPolicy > typename CoverTree::ElemType CoverTree:: MinDistance(const CoverTree& other, const ElemType distance) const { // We already have the distance as evaluated by the metric. return std::max(distance - furthestDescendantDistance - other.FurthestDescendantDistance(), 0.0); } template< typename MetricType, typename StatisticType, typename MatType, typename RootPointPolicy > typename CoverTree::ElemType CoverTree:: MinDistance(const arma::vec& other) const { return std::max(metric->Evaluate(dataset->col(point), other) - furthestDescendantDistance, 0.0); } template< typename MetricType, typename StatisticType, typename MatType, typename RootPointPolicy > typename CoverTree::ElemType CoverTree:: MinDistance(const arma::vec& /* other */, const ElemType distance) const { return std::max(distance - furthestDescendantDistance, 0.0); } template< typename MetricType, typename StatisticType, typename MatType, typename RootPointPolicy > typename CoverTree::ElemType CoverTree:: MaxDistance(const CoverTree& other) const { return metric->Evaluate(dataset->col(point), other.Dataset().col(other.Point())) + furthestDescendantDistance + other.FurthestDescendantDistance(); } template< typename MetricType, typename StatisticType, typename MatType, typename RootPointPolicy > typename CoverTree::ElemType CoverTree:: MaxDistance(const CoverTree& other, const ElemType distance) const { // We already have the distance as evaluated by the metric. return distance + furthestDescendantDistance + other.FurthestDescendantDistance(); } template< typename MetricType, typename StatisticType, typename MatType, typename RootPointPolicy > typename CoverTree::ElemType CoverTree:: MaxDistance(const arma::vec& other) const { return metric->Evaluate(dataset->col(point), other) + furthestDescendantDistance; } template< typename MetricType, typename StatisticType, typename MatType, typename RootPointPolicy > typename CoverTree::ElemType CoverTree:: MaxDistance(const arma::vec& /* other */, const ElemType distance) const { return distance + furthestDescendantDistance; } //! Return the minimum and maximum distance to another node. template< typename MetricType, typename StatisticType, typename MatType, typename RootPointPolicy > math::RangeType::ElemType> CoverTree:: RangeDistance(const CoverTree& other) const { const ElemType distance = metric->Evaluate(dataset->col(point), other.Dataset().col(other.Point())); math::RangeType result; result.Lo() = distance - furthestDescendantDistance - other.FurthestDescendantDistance(); result.Hi() = distance + furthestDescendantDistance + other.FurthestDescendantDistance(); return result; } //! Return the minimum and maximum distance to another node given that the //! point-to-point distance has already been calculated. template< typename MetricType, typename StatisticType, typename MatType, typename RootPointPolicy > math::RangeType::ElemType> CoverTree:: RangeDistance(const CoverTree& other, const ElemType distance) const { math::RangeType result; result.Lo() = distance - furthestDescendantDistance - other.FurthestDescendantDistance(); result.Hi() = distance + furthestDescendantDistance + other.FurthestDescendantDistance(); return result; } //! Return the minimum and maximum distance to another point. template< typename MetricType, typename StatisticType, typename MatType, typename RootPointPolicy > math::RangeType::ElemType> CoverTree:: RangeDistance(const arma::vec& other) const { const ElemType distance = metric->Evaluate(dataset->col(point), other); return math::RangeType(distance - furthestDescendantDistance, distance + furthestDescendantDistance); } //! Return the minimum and maximum distance to another point given that the //! point-to-point distance has already been calculated. template< typename MetricType, typename StatisticType, typename MatType, typename RootPointPolicy > math::RangeType::ElemType> CoverTree:: RangeDistance(const arma::vec& /* other */, const ElemType distance) const { return math::RangeType(distance - furthestDescendantDistance, distance + furthestDescendantDistance); } //! For a newly initialized node, create children using the near and far set. template< typename MetricType, typename StatisticType, typename MatType, typename RootPointPolicy > inline void CoverTree::CreateChildren( arma::Col& indices, arma::vec& distances, size_t nearSetSize, size_t& farSetSize, size_t& usedSetSize) { // Determine the next scale level. This should be the first level where there // are any points in the far set. So, if we know the maximum distance in the // distances array, this will be the largest i such that // maxDistance > pow(base, i) // and using this for the scale factor should guarantee we are not creating an // implicit node. If the maximum distance is 0, every point in the near set // will be created as a leaf, and a child to this node. We also do not need // to change the furthestChildDistance or furthestDescendantDistance. const ElemType maxDistance = max(distances.rows(0, nearSetSize + farSetSize - 1)); if (maxDistance == 0) { // Make the self child at the lowest possible level. // This should not modify farSetSize or usedSetSize. size_t tempSize = 0; children.push_back(new CoverTree(*dataset, base, point, INT_MIN, this, 0, indices, distances, 0, tempSize, usedSetSize, *metric)); distanceComps += children.back()->DistanceComps(); // Every point in the near set should be a leaf. for (size_t i = 0; i < nearSetSize; ++i) { // farSetSize and usedSetSize will not be modified. children.push_back(new CoverTree(*dataset, base, indices[i], INT_MIN, this, distances[i], indices, distances, 0, tempSize, usedSetSize, *metric)); distanceComps += children.back()->DistanceComps(); usedSetSize++; } // The number of descendants is just the number of children, because each of // them are leaves and contain one point. numDescendants = children.size(); // Re-sort the dataset. We have // [ used | far | other used ] // and we want // [ far | all used ]. SortPointSet(indices, distances, 0, usedSetSize, farSetSize); return; } const int nextScale = std::min(scale, (int) ceil(log(maxDistance) / log(base))) - 1; const ElemType bound = pow(base, nextScale); // First, make the self child. We must split the given near set into the near // set and far set for the self child. size_t childNearSetSize = SplitNearFar(indices, distances, bound, nearSetSize); // Build the self child (recursively). size_t childFarSetSize = nearSetSize - childNearSetSize; size_t childUsedSetSize = 0; children.push_back(new CoverTree(*dataset, base, point, nextScale, this, 0, indices, distances, childNearSetSize, childFarSetSize, childUsedSetSize, *metric)); // Don't double-count the self-child (so, subtract one). numDescendants += children[0]->NumDescendants(); // The self-child can't modify the furthestChildDistance away from 0, but it // can modify the furthestDescendantDistance. furthestDescendantDistance = children[0]->FurthestDescendantDistance(); // Remove any implicit nodes we may have created. RemoveNewImplicitNodes(); distanceComps += children[0]->DistanceComps(); // Now the arrays, in memory, look like this: // [ childFar | childUsed | far | used ] // but we need to move the used points past our far set: // [ childFar | far | childUsed + used ] // and keeping in mind that childFar = our near set, // [ near | far | childUsed + used ] // is what we are trying to make. SortPointSet(indices, distances, childFarSetSize, childUsedSetSize, farSetSize); // Update size of near set and used set. nearSetSize -= childUsedSetSize; usedSetSize += childUsedSetSize; // Now for each point in the near set, we need to make children. To save // computation later, we'll create an array holding the points in the near // set, and then after each run we'll check which of those (if any) were used // and we will remove them. ...if that's faster. I think it is. while (nearSetSize > 0) { size_t newPointIndex = nearSetSize - 1; // Swap to front if necessary. if (newPointIndex != 0) { const size_t tempIndex = indices[newPointIndex]; const ElemType tempDist = distances[newPointIndex]; indices[newPointIndex] = indices[0]; distances[newPointIndex] = distances[0]; indices[0] = tempIndex; distances[0] = tempDist; } // Will this be a new furthest child? if (distances[0] > furthestDescendantDistance) furthestDescendantDistance = distances[0]; // If there's only one point left, we don't need this crap. if ((nearSetSize == 1) && (farSetSize == 0)) { size_t childNearSetSize = 0; children.push_back(new CoverTree(*dataset, base, indices[0], nextScale, this, distances[0], indices, distances, childNearSetSize, farSetSize, usedSetSize, *metric)); distanceComps += children.back()->DistanceComps(); numDescendants += children.back()->NumDescendants(); // Because the far set size is 0, we don't have to do any swapping to // move the point into the used set. ++usedSetSize; --nearSetSize; // And we're done. break; } // Create the near and far set indices and distance vectors. We don't fill // in the self-point, yet. arma::Col childIndices(nearSetSize + farSetSize); childIndices.rows(0, (nearSetSize + farSetSize - 2)) = indices.rows(1, nearSetSize + farSetSize - 1); arma::vec childDistances(nearSetSize + farSetSize); // Build distances for the child. ComputeDistances(indices[0], childIndices, childDistances, nearSetSize + farSetSize - 1); // Split into near and far sets for this point. childNearSetSize = SplitNearFar(childIndices, childDistances, bound, nearSetSize + farSetSize - 1); childFarSetSize = PruneFarSet(childIndices, childDistances, base * bound, childNearSetSize, (nearSetSize + farSetSize - 1)); // Now that we know the near and far set sizes, we can put the used point // (the self point) in the correct place; now, when we call // MoveToUsedSet(), it will move the self-point correctly. The distance // does not matter. childIndices(childNearSetSize + childFarSetSize) = indices[0]; childDistances(childNearSetSize + childFarSetSize) = 0; // Build this child (recursively). childUsedSetSize = 1; // Mark self point as used. children.push_back(new CoverTree(*dataset, base, indices[0], nextScale, this, distances[0], childIndices, childDistances, childNearSetSize, childFarSetSize, childUsedSetSize, *metric)); numDescendants += children.back()->NumDescendants(); // Remove any implicit nodes. RemoveNewImplicitNodes(); distanceComps += children.back()->DistanceComps(); // Now with the child created, it returns the childIndices and // childDistances vectors in this form: // [ childFar | childUsed ] // For each point in the childUsed set, we must move that point to the used // set in our own vector. MoveToUsedSet(indices, distances, nearSetSize, farSetSize, usedSetSize, childIndices, childFarSetSize, childUsedSetSize); } // Calculate furthest descendant. for (size_t i = (nearSetSize + farSetSize); i < (nearSetSize + farSetSize + usedSetSize); ++i) if (distances[i] > furthestDescendantDistance) furthestDescendantDistance = distances[i]; } template< typename MetricType, typename StatisticType, typename MatType, typename RootPointPolicy > size_t CoverTree:: SplitNearFar(arma::Col& indices, arma::vec& distances, const ElemType bound, const size_t pointSetSize) { // Sanity check; there is no guarantee that this condition will not be true. // ...or is there? if (pointSetSize <= 1) return 0; // We'll traverse from both left and right. size_t left = 0; size_t right = pointSetSize - 1; // A modification of quicksort, with the pivot value set to the bound. // Everything on the left of the pivot will be less than or equal to the // bound; everything on the right will be greater than the bound. while ((distances[left] <= bound) && (left != right)) ++left; while ((distances[right] > bound) && (left != right)) --right; while (left != right) { // Now swap the values and indices. const size_t tempPoint = indices[left]; const ElemType tempDist = distances[left]; indices[left] = indices[right]; distances[left] = distances[right]; indices[right] = tempPoint; distances[right] = tempDist; // Traverse the left, seeing how many points are correctly on that side. // When we encounter an incorrect point, stop. We will switch it later. while ((distances[left] <= bound) && (left != right)) ++left; // Traverse the right, seeing how many points are correctly on that side. // When we encounter an incorrect point, stop. We will switch it with the // wrong point from the left side. while ((distances[right] > bound) && (left != right)) --right; } // The final left value is the index of the first far value. return left; } // Returns the maximum distance between points. template< typename MetricType, typename StatisticType, typename MatType, typename RootPointPolicy > void CoverTree:: ComputeDistances(const size_t pointIndex, const arma::Col& indices, arma::vec& distances, const size_t pointSetSize) { // For each point, rebuild the distances. The indices do not need to be // modified. distanceComps += pointSetSize; for (size_t i = 0; i < pointSetSize; ++i) { distances[i] = metric->Evaluate(dataset->col(pointIndex), dataset->col(indices[i])); } } template< typename MetricType, typename StatisticType, typename MatType, typename RootPointPolicy > size_t CoverTree:: SortPointSet(arma::Col& indices, arma::vec& distances, const size_t childFarSetSize, const size_t childUsedSetSize, const size_t farSetSize) { // We'll use low-level memcpy calls ourselves, just to ensure it's done // quickly and the way we want it to be. Unfortunately this takes up more // memory than one-element swaps, but there's not a great way around that. const size_t bufferSize = std::min(farSetSize, childUsedSetSize); const size_t bigCopySize = std::max(farSetSize, childUsedSetSize); // Sanity check: there is no need to sort if the buffer size is going to be // zero. if (bufferSize == 0) return (childFarSetSize + farSetSize); size_t* indicesBuffer = new size_t[bufferSize]; ElemType* distancesBuffer = new ElemType[bufferSize]; // The start of the memory region to copy to the buffer. const size_t bufferFromLocation = ((bufferSize == farSetSize) ? (childFarSetSize + childUsedSetSize) : childFarSetSize); // The start of the memory region to move directly to the new place. const size_t directFromLocation = ((bufferSize == farSetSize) ? childFarSetSize : (childFarSetSize + childUsedSetSize)); // The destination to copy the buffer back to. const size_t bufferToLocation = ((bufferSize == farSetSize) ? childFarSetSize : (childFarSetSize + farSetSize)); // The destination of the directly moved memory region. const size_t directToLocation = ((bufferSize == farSetSize) ? (childFarSetSize + farSetSize) : childFarSetSize); // Copy the smaller piece to the buffer. memcpy(indicesBuffer, indices.memptr() + bufferFromLocation, sizeof(size_t) * bufferSize); memcpy(distancesBuffer, distances.memptr() + bufferFromLocation, sizeof(ElemType) * bufferSize); // Now move the other memory. memmove(indices.memptr() + directToLocation, indices.memptr() + directFromLocation, sizeof(size_t) * bigCopySize); memmove(distances.memptr() + directToLocation, distances.memptr() + directFromLocation, sizeof(ElemType) * bigCopySize); // Now copy the temporary memory to the right place. memcpy(indices.memptr() + bufferToLocation, indicesBuffer, sizeof(size_t) * bufferSize); memcpy(distances.memptr() + bufferToLocation, distancesBuffer, sizeof(ElemType) * bufferSize); delete[] indicesBuffer; delete[] distancesBuffer; // This returns the complete size of the far set. return (childFarSetSize + farSetSize); } template< typename MetricType, typename StatisticType, typename MatType, typename RootPointPolicy > void CoverTree:: MoveToUsedSet(arma::Col& indices, arma::vec& distances, size_t& nearSetSize, size_t& farSetSize, size_t& usedSetSize, arma::Col& childIndices, const size_t childFarSetSize, // childNearSetSize is 0 here. const size_t childUsedSetSize) { const size_t originalSum = nearSetSize + farSetSize + usedSetSize; // Loop across the set. We will swap points as we need. It should be noted // that farSetSize and nearSetSize may change with each iteration of this loop // (depending on if we make a swap or not). size_t startChildUsedSet = 0; // Where to start in the child set. for (size_t i = 0; i < nearSetSize; ++i) { // Discover if this point was in the child's used set. for (size_t j = startChildUsedSet; j < childUsedSetSize; ++j) { if (childIndices[childFarSetSize + j] == indices[i]) { // We have found a point; a swap is necessary. // Since this point is from the near set, to preserve the near set, we // must do a swap. if (farSetSize > 0) { if ((nearSetSize - 1) != i) { // In this case it must be a three-way swap. size_t tempIndex = indices[nearSetSize + farSetSize - 1]; ElemType tempDist = distances[nearSetSize + farSetSize - 1]; size_t tempNearIndex = indices[nearSetSize - 1]; ElemType tempNearDist = distances[nearSetSize - 1]; indices[nearSetSize + farSetSize - 1] = indices[i]; distances[nearSetSize + farSetSize - 1] = distances[i]; indices[nearSetSize - 1] = tempIndex; distances[nearSetSize - 1] = tempDist; indices[i] = tempNearIndex; distances[i] = tempNearDist; } else { // We can do a two-way swap. size_t tempIndex = indices[nearSetSize + farSetSize - 1]; ElemType tempDist = distances[nearSetSize + farSetSize - 1]; indices[nearSetSize + farSetSize - 1] = indices[i]; distances[nearSetSize + farSetSize - 1] = distances[i]; indices[i] = tempIndex; distances[i] = tempDist; } } else if ((nearSetSize - 1) != i) { // A two-way swap is possible. size_t tempIndex = indices[nearSetSize + farSetSize - 1]; ElemType tempDist = distances[nearSetSize + farSetSize - 1]; indices[nearSetSize + farSetSize - 1] = indices[i]; distances[nearSetSize + farSetSize - 1] = distances[i]; indices[i] = tempIndex; distances[i] = tempDist; } else { // No swap is necessary. } // We don't need to do a complete preservation of the child index set, // but we want to make sure we only loop over points we haven't seen. // So increment the child counter by 1 and move a point if we need. if (j != startChildUsedSet) { childIndices[childFarSetSize + j] = childIndices[childFarSetSize + startChildUsedSet]; } // Update all counters from the swaps we have done. ++startChildUsedSet; --nearSetSize; --i; // Since we moved a point out of the near set we must step back. break; // Break out of this for loop; back to the first one. } } } // Now loop over the far set. This loop is different because we only require // a normal two-way swap instead of the three-way swap to preserve the near // set / far set ordering. for (size_t i = 0; i < farSetSize; ++i) { // Discover if this point was in the child's used set. for (size_t j = startChildUsedSet; j < childUsedSetSize; ++j) { if (childIndices[childFarSetSize + j] == indices[i + nearSetSize]) { // We have found a point to swap. // Perform the swap. size_t tempIndex = indices[nearSetSize + farSetSize - 1]; ElemType tempDist = distances[nearSetSize + farSetSize - 1]; indices[nearSetSize + farSetSize - 1] = indices[nearSetSize + i]; distances[nearSetSize + farSetSize - 1] = distances[nearSetSize + i]; indices[nearSetSize + i] = tempIndex; distances[nearSetSize + i] = tempDist; if (j != startChildUsedSet) { childIndices[childFarSetSize + j] = childIndices[childFarSetSize + startChildUsedSet]; } // Update all counters from the swaps we have done. ++startChildUsedSet; --farSetSize; --i; break; // Break out of this for loop; back to the first one. } } } // Update used set size. usedSetSize += childUsedSetSize; Log::Assert(originalSum == (nearSetSize + farSetSize + usedSetSize)); } template< typename MetricType, typename StatisticType, typename MatType, typename RootPointPolicy > size_t CoverTree:: PruneFarSet(arma::Col& indices, arma::vec& distances, const ElemType bound, const size_t nearSetSize, const size_t pointSetSize) { // What we are trying to do is remove any points greater than the bound from // the far set. We don't care what happens to those indices and distances... // so, we don't need to properly swap points -- just drop new ones in place. size_t left = nearSetSize; size_t right = pointSetSize - 1; while ((distances[left] <= bound) && (left != right)) ++left; while ((distances[right] > bound) && (left != right)) --right; while (left != right) { // We don't care what happens to the point which should be on the right. indices[left] = indices[right]; distances[left] = distances[right]; --right; // Since we aren't changing the right. // Advance to next location which needs to switch. while ((distances[left] <= bound) && (left != right)) ++left; while ((distances[right] > bound) && (left != right)) --right; } // The far set size is the left pointer, with the near set size subtracted // from it. return (left - nearSetSize); } /** * Take a look at the last child (the most recently created one) and remove any * implicit nodes that have been created. */ template< typename MetricType, typename StatisticType, typename MatType, typename RootPointPolicy > inline void CoverTree:: RemoveNewImplicitNodes() { // If we created an implicit node, take its self-child instead (this could // happen multiple times). while (children[children.size() - 1]->NumChildren() == 1) { CoverTree* old = children[children.size() - 1]; children.erase(children.begin() + children.size() - 1); // Now take its child. children.push_back(&(old->Child(0))); // Set its parent and parameters correctly, and rebuild the statistic. old->Child(0).Parent() = this; old->Child(0).ParentDistance() = old->ParentDistance(); old->Child(0).DistanceComps() = old->DistanceComps(); old->Child(0).Stat() = StatisticType(old->Child(0)); // Remove its child (so it doesn't delete it). old->Children().erase(old->Children().begin() + old->Children().size() - 1); // Now delete it. delete old; } } /** * Default constructor, only for use with boost::serialization. */ template< typename MetricType, typename StatisticType, typename MatType, typename RootPointPolicy > CoverTree::CoverTree() : dataset(NULL), point(0), scale(INT_MIN), base(0.0), numDescendants(0), parent(NULL), parentDistance(0.0), furthestDescendantDistance(0.0), localMetric(false), localDataset(false), metric(NULL) { // Nothing to do. } /** * Serialize to/from a boost::serialization archive. */ template< typename MetricType, typename StatisticType, typename MatType, typename RootPointPolicy > template void CoverTree::Serialize( Archive& ar, const unsigned int /* version */) { using data::CreateNVP; // If we're loading, and we have children, they need to be deleted. We may // also need to delete the local metric and dataset. if (Archive::is_loading::value) { for (size_t i = 0; i < children.size(); ++i) delete children[i]; if (localMetric && metric) delete metric; if (localDataset && dataset) delete dataset; } ar & CreateNVP(dataset, "dataset"); ar & CreateNVP(point, "point"); ar & CreateNVP(scale, "scale"); ar & CreateNVP(base, "base"); ar & CreateNVP(stat, "stat"); ar & CreateNVP(numDescendants, "numDescendants"); // Due to quirks of boost::serialization, depending on how the user // serializes the tree, it's possible that the root of the tree will // accidentally be serialized twice. So if we are a first-level child, we // avoid serializing the parent. The true (non-duplicated) parent will fix // the parent link. if (Archive::is_saving::value && parent != NULL && parent->Parent() == NULL) { CoverTree* fakeParent = NULL; ar & CreateNVP(fakeParent, "parent"); } else { ar & CreateNVP(parent, "parent"); } ar & CreateNVP(parentDistance, "parentDistance"); ar & CreateNVP(furthestDescendantDistance, "furthestDescendantDistance"); ar & CreateNVP(metric, "metric"); if (Archive::is_loading::value && parent == NULL) { localMetric = true; localDataset = true; } // Lastly, serialize the children. size_t numChildren = children.size(); ar & CreateNVP(numChildren, "numChildren"); if (Archive::is_loading::value) children.resize(numChildren); for (size_t i = 0; i < numChildren; ++i) { std::ostringstream oss; oss << "child" << i; ar & CreateNVP(children[i], oss.str()); } if (Archive::is_loading::value && parent == NULL) { // Look through each child individually. for (size_t i = 0; i < children.size(); ++i) { children[i]->localMetric = false; children[i]->localDataset = false; children[i]->Parent() = this; } } } } // namespace tree } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/core/tree/cover_tree/dual_tree_traverser.hpp000066400000000000000000000061721315013601400256230ustar00rootroot00000000000000/** * @file dual_tree_traverser.hpp * @author Ryan Curtin * * A dual-tree traverser for the cover tree. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_TREE_COVER_TREE_DUAL_TREE_TRAVERSER_HPP #define MLPACK_CORE_TREE_COVER_TREE_DUAL_TREE_TRAVERSER_HPP #include #include namespace mlpack { namespace tree { template< typename MetricType, typename StatisticType, typename MatType, typename RootPointPolicy > template class CoverTree:: DualTreeTraverser { public: /** * Initialize the dual tree traverser with the given rule type. */ DualTreeTraverser(RuleType& rule); /** * Traverse the two specified trees. * * @param queryNode Root of query tree. * @param referenceNode Root of reference tree. */ void Traverse(CoverTree& queryNode, CoverTree& referenceNode); //! Get the number of pruned nodes. size_t NumPrunes() const { return numPrunes; } //! Modify the number of pruned nodes. size_t& NumPrunes() { return numPrunes; } ///// These are all fake because this is a patch for kd-trees only and I still ///// want it to compile! size_t NumVisited() const { return 0; } size_t NumScores() const { return 0; } size_t NumBaseCases() const { return 0; } private: //! The instantiated rule set for pruning branches. RuleType& rule; //! The number of pruned nodes. size_t numPrunes; //! Struct used for traversal. struct DualCoverTreeMapEntry { //! The node this entry refers to. CoverTree* referenceNode; //! The score of the node. double score; //! The base case. double baseCase; //! The traversal info associated with the call to Score() for this entry. typename RuleType::TraversalInfoType traversalInfo; //! Comparison operator, for sorting within the map. bool operator<(const DualCoverTreeMapEntry& other) const { if (score == other.score) return (baseCase < other.baseCase); else return (score < other.score); } }; /** * Helper function for traversal of the two trees. */ void Traverse(CoverTree& queryNode, std::map >& referenceMap); //! Prepare map for recursion. void PruneMap(CoverTree& queryNode, std::map >& referenceMap, std::map >& childMap); void ReferenceRecursion(CoverTree& queryNode, std::map >& referenceMap); }; } // namespace tree } // namespace mlpack // Include implementation. #include "dual_tree_traverser_impl.hpp" #endif mlpack-2.2.5/src/mlpack/core/tree/cover_tree/dual_tree_traverser_impl.hpp000066400000000000000000000266461315013601400266540ustar00rootroot00000000000000/** * @file dual_tree_traverser_impl.hpp * @author Ryan Curtin * * A dual-tree traverser for the cover tree. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_TREE_COVER_TREE_DUAL_TREE_TRAVERSER_IMPL_HPP #define MLPACK_CORE_TREE_COVER_TREE_DUAL_TREE_TRAVERSER_IMPL_HPP #include #include namespace mlpack { namespace tree { template< typename MetricType, typename StatisticType, typename MatType, typename RootPointPolicy > template CoverTree:: DualTreeTraverser::DualTreeTraverser(RuleType& rule) : rule(rule), numPrunes(0) { /* Nothing to do. */ } template< typename MetricType, typename StatisticType, typename MatType, typename RootPointPolicy > template void CoverTree:: DualTreeTraverser::Traverse(CoverTree& queryNode, CoverTree& referenceNode) { // Start by creating a map and adding the reference root node to it. std::map > refMap; DualCoverTreeMapEntry rootRefEntry; rootRefEntry.referenceNode = &referenceNode; // Perform the evaluation between the roots of either tree. rootRefEntry.score = rule.Score(queryNode, referenceNode); rootRefEntry.baseCase = rule.BaseCase(queryNode.Point(), referenceNode.Point()); rootRefEntry.traversalInfo = rule.TraversalInfo(); refMap[referenceNode.Scale()].push_back(rootRefEntry); Traverse(queryNode, refMap); } template< typename MetricType, typename StatisticType, typename MatType, typename RootPointPolicy > template void CoverTree:: DualTreeTraverser::Traverse( CoverTree& queryNode, std::map >& referenceMap) { if (referenceMap.size() == 0) return; // Nothing to do! // First recurse down the reference nodes as necessary. ReferenceRecursion(queryNode, referenceMap); // Did the map get emptied? if (referenceMap.size() == 0) return; // Nothing to do! // Now, reduce the scale of the query node by recursing. But we can't recurse // if the query node is a leaf node. if ((queryNode.Scale() != INT_MIN) && (queryNode.Scale() >= (*referenceMap.rbegin()).first)) { // Recurse into the non-self-children first. The recursion order cannot // affect the runtime of the algorithm, because each query child recursion's // results are separate and independent. I don't think this is true in // every case, and we may have to modify this section to consider scores in // the future. for (size_t i = 1; i < queryNode.NumChildren(); ++i) { // We need a copy of the map for this child. std::map > childMap; PruneMap(queryNode.Child(i), referenceMap, childMap); Traverse(queryNode.Child(i), childMap); } std::map > selfChildMap; PruneMap(queryNode.Child(0), referenceMap, selfChildMap); Traverse(queryNode.Child(0), selfChildMap); } if (queryNode.Scale() != INT_MIN) return; // No need to evaluate base cases at this level. It's all done. // If we have made it this far, all we have is a bunch of base case // evaluations to do. Log::Assert((*referenceMap.begin()).first == INT_MIN); Log::Assert(queryNode.Scale() == INT_MIN); std::vector& pointVector = (*referenceMap.begin()).second; for (size_t i = 0; i < pointVector.size(); ++i) { // Get a reference to the frame. const DualCoverTreeMapEntry& frame = pointVector[i]; CoverTree* refNode = frame.referenceNode; // If the point is the same as both parents, then we have already done this // base case. if ((refNode->Point() == refNode->Parent()->Point()) && (queryNode.Point() == queryNode.Parent()->Point())) { ++numPrunes; continue; } // Score the node, to see if we can prune it, after restoring the traversal // info. rule.TraversalInfo() = frame.traversalInfo; double score = rule.Score(queryNode, *refNode); if (score == DBL_MAX) { ++numPrunes; continue; } // If not, compute the base case. rule.BaseCase(queryNode.Point(), pointVector[i].referenceNode->Point()); } } template< typename MetricType, typename StatisticType, typename MatType, typename RootPointPolicy > template void CoverTree:: DualTreeTraverser::PruneMap( CoverTree& queryNode, std::map >& referenceMap, std::map >& childMap) { if (referenceMap.empty()) return; // Nothing to do. // Copy the zero set first. if ((*referenceMap.begin()).first == INT_MIN) { // Get a reference to the vector representing the entries at this scale. std::vector& scaleVector = (*referenceMap.begin()).second; // Before traversing all the points in this scale, sort by score. std::sort(scaleVector.begin(), scaleVector.end()); const int thisScale = (*referenceMap.begin()).first; childMap[thisScale].reserve(scaleVector.size()); std::vector& newScaleVector = childMap[thisScale]; // Loop over each entry in the vector. for (size_t j = 0; j < scaleVector.size(); ++j) { const DualCoverTreeMapEntry& frame = scaleVector[j]; // First evaluate if we can prune without performing the base case. CoverTree* refNode = frame.referenceNode; // Perform the actual scoring, after restoring the traversal info. rule.TraversalInfo() = frame.traversalInfo; double score = rule.Score(queryNode, *refNode); if (score == DBL_MAX) { // Pruned. Move on. ++numPrunes; continue; } // If it isn't pruned, we must evaluate the base case. const double baseCase = rule.BaseCase(queryNode.Point(), refNode->Point()); // Add to child map. newScaleVector.push_back(frame); newScaleVector.back().score = score; newScaleVector.back().baseCase = baseCase; newScaleVector.back().traversalInfo = rule.TraversalInfo(); } // If we didn't add anything, then strike this vector from the map. if (newScaleVector.size() == 0) childMap.erase((*referenceMap.begin()).first); } typename std::map >::reverse_iterator it = referenceMap.rbegin(); while ((it != referenceMap.rend())) { const int thisScale = (*it).first; if (thisScale == INT_MIN) // We already did it. break; // Get a reference to the vector representing the entries at this scale. std::vector& scaleVector = (*it).second; // Before traversing all the points in this scale, sort by score. std::sort(scaleVector.begin(), scaleVector.end()); childMap[thisScale].reserve(scaleVector.size()); std::vector& newScaleVector = childMap[thisScale]; // Loop over each entry in the vector. for (size_t j = 0; j < scaleVector.size(); ++j) { const DualCoverTreeMapEntry& frame = scaleVector[j]; // First evaluate if we can prune without performing the base case. CoverTree* refNode = frame.referenceNode; // Perform the actual scoring, after restoring the traversal info. rule.TraversalInfo() = frame.traversalInfo; double score = rule.Score(queryNode, *refNode); if (score == DBL_MAX) { // Pruned. Move on. ++numPrunes; continue; } // If it isn't pruned, we must evaluate the base case. const double baseCase = rule.BaseCase(queryNode.Point(), refNode->Point()); // Add to child map. newScaleVector.push_back(frame); newScaleVector.back().score = score; newScaleVector.back().baseCase = baseCase; newScaleVector.back().traversalInfo = rule.TraversalInfo(); } // If we didn't add anything, then strike this vector from the map. if (newScaleVector.size() == 0) childMap.erase((*it).first); ++it; // Advance to next scale. } } template< typename MetricType, typename StatisticType, typename MatType, typename RootPointPolicy > template void CoverTree:: DualTreeTraverser::ReferenceRecursion( CoverTree& queryNode, std::map >& referenceMap) { // First, reduce the maximum scale in the reference map down to the scale of // the query node. while (!referenceMap.empty()) { // Hacky bullshit to imitate jl cover tree. if (queryNode.Parent() == NULL && (*referenceMap.rbegin()).first < queryNode.Scale()) break; if (queryNode.Parent() != NULL && (*referenceMap.rbegin()).first <= queryNode.Scale()) break; // If the query node's scale is INT_MIN and the reference map's maximum // scale is INT_MIN, don't try to recurse... if ((queryNode.Scale() == INT_MIN) && ((*referenceMap.rbegin()).first == INT_MIN)) break; // Get a reference to the current largest scale. std::vector& scaleVector = (*referenceMap.rbegin()).second; // Before traversing all the points in this scale, sort by score. std::sort(scaleVector.begin(), scaleVector.end()); // Now loop over each element. for (size_t i = 0; i < scaleVector.size(); ++i) { // Get a reference to the current element. const DualCoverTreeMapEntry& frame = scaleVector.at(i); CoverTree* refNode = frame.referenceNode; // Create the score for the children. double score = rule.Rescore(queryNode, *refNode, frame.score); // Now if this childScore is DBL_MAX we can prune all children. In this // recursion setup pruning is all or nothing for children. if (score == DBL_MAX) { ++numPrunes; continue; } // If it is not pruned, we must evaluate the base case. // Add the children. for (size_t j = 0; j < refNode->NumChildren(); ++j) { rule.TraversalInfo() = frame.traversalInfo; double childScore = rule.Score(queryNode, refNode->Child(j)); if (childScore == DBL_MAX) { ++numPrunes; continue; } // It wasn't pruned; evaluate the base case. const double baseCase = rule.BaseCase(queryNode.Point(), refNode->Child(j).Point()); DualCoverTreeMapEntry newFrame; newFrame.referenceNode = &refNode->Child(j); newFrame.score = childScore; // Use the score of the parent. newFrame.baseCase = baseCase; newFrame.traversalInfo = rule.TraversalInfo(); referenceMap[newFrame.referenceNode->Scale()].push_back(newFrame); } } // Now clear the memory for this scale; it isn't needed anymore. referenceMap.erase((*referenceMap.rbegin()).first); } } } // namespace tree } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/core/tree/cover_tree/first_point_is_root.hpp000066400000000000000000000026251315013601400256570ustar00rootroot00000000000000/** * @file first_point_is_root.hpp * @author Ryan Curtin * * A very simple policy for the cover tree; the first point in the dataset is * chosen as the root of the cover tree. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_TREE_FIRST_POINT_IS_ROOT_HPP #define MLPACK_CORE_TREE_FIRST_POINT_IS_ROOT_HPP #include namespace mlpack { namespace tree { /** * This class is meant to be used as a choice for the policy class * RootPointPolicy of the CoverTree class. This policy determines which point * is used for the root node of the cover tree. This particular implementation * simply chooses the first point in the dataset as the root. A more complex * implementation might choose, for instance, the point with least maximum * distance to other points (the closest to the "middle"). */ class FirstPointIsRoot { public: /** * Return the point to be used as the root point of the cover tree. This just * returns 0. */ template static size_t ChooseRoot(const MatType& /* dataset */) { return 0; } }; } // namespace tree } // namespace mlpack #endif // MLPACK_CORE_TREE_FIRST_POINT_IS_ROOT_HPP mlpack-2.2.5/src/mlpack/core/tree/cover_tree/single_tree_traverser.hpp000066400000000000000000000035611315013601400261560ustar00rootroot00000000000000/** * @file single_tree_traverser.hpp * @author Ryan Curtin * * Defines the SingleTreeTraverser for the cover tree. This implements a * single-tree breadth-first recursion with a pruning rule and a base case (two * point) rule. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_TREE_COVER_TREE_SINGLE_TREE_TRAVERSER_HPP #define MLPACK_CORE_TREE_COVER_TREE_SINGLE_TREE_TRAVERSER_HPP #include #include "cover_tree.hpp" namespace mlpack { namespace tree { template< typename MetricType, typename StatisticType, typename MatType, typename RootPointPolicy > template class CoverTree:: SingleTreeTraverser { public: /** * Initialize the single tree traverser with the given rule. */ SingleTreeTraverser(RuleType& rule); /** * Traverse the tree with the given point. * * @param queryIndex The index of the point in the query set which is used as * the query point. * @param referenceNode The tree node to be traversed. */ void Traverse(const size_t queryIndex, CoverTree& referenceNode); //! Get the number of prunes so far. size_t NumPrunes() const { return numPrunes; } //! Set the number of prunes (good for a reset to 0). size_t& NumPrunes() { return numPrunes; } private: //! Reference to the rules with which the tree will be traversed. RuleType& rule; //! The number of nodes which have been pruned during traversal. size_t numPrunes; }; } // namespace tree } // namespace mlpack // Include implementation. #include "single_tree_traverser_impl.hpp" #endif mlpack-2.2.5/src/mlpack/core/tree/cover_tree/single_tree_traverser_impl.hpp000066400000000000000000000163351315013601400272020ustar00rootroot00000000000000/** * @file single_tree_traverser_impl.hpp * @author Ryan Curtin * * Implementation of the single tree traverser for cover trees, which implements * a breadth-first traversal. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_TREE_COVER_TREE_SINGLE_TREE_TRAVERSER_IMPL_HPP #define MLPACK_CORE_TREE_COVER_TREE_SINGLE_TREE_TRAVERSER_IMPL_HPP // In case it hasn't been included yet. #include "single_tree_traverser.hpp" #include namespace mlpack { namespace tree { //! This is the structure the cover tree map will use for traversal. template< typename MetricType, typename StatisticType, typename MatType, typename RootPointPolicy > struct CoverTreeMapEntry { //! The node this entry refers to. CoverTree* node; //! The score of the node. double score; //! The index of the parent node. size_t parent; //! The base case evaluation. double baseCase; //! Comparison operator. bool operator<(const CoverTreeMapEntry& other) const { return (score < other.score); } }; template< typename MetricType, typename StatisticType, typename MatType, typename RootPointPolicy > template CoverTree:: SingleTreeTraverser::SingleTreeTraverser(RuleType& rule) : rule(rule), numPrunes(0) { /* Nothing to do. */ } template< typename MetricType, typename StatisticType, typename MatType, typename RootPointPolicy > template void CoverTree:: SingleTreeTraverser::Traverse( const size_t queryIndex, CoverTree& referenceNode) { // This is a non-recursive implementation (which should be faster than a // recursive implementation). typedef CoverTreeMapEntry MapEntryType; // We will use this map as a priority queue. Each key represents the scale, // and then the vector is all the nodes in that scale which need to be // investigated. Because no point in a scale can add a point in its own // scale, we know that the vector for each scale is final when we get to it. // In addition, map is organized in such a way that rbegin() will return the // largest scale. std::map > mapQueue; // Create the score for the children. double rootChildScore = rule.Score(queryIndex, referenceNode); if (rootChildScore == DBL_MAX) { numPrunes += referenceNode.NumChildren(); } else { // Manually add the children of the first node. // Often, a ruleset will return without doing any computation on cover trees // using TreeTraits::FirstPointIsCentroid; this is an optimization that // (theoretically) the compiler should get right. double rootBaseCase = rule.BaseCase(queryIndex, referenceNode.Point()); // Don't add the self-leaf. size_t i = 0; if (referenceNode.Child(0).NumChildren() == 0) { ++numPrunes; i = 1; } for (/* i was set above. */; i < referenceNode.NumChildren(); ++i) { MapEntryType newFrame; newFrame.node = &referenceNode.Child(i); newFrame.score = rootChildScore; newFrame.baseCase = rootBaseCase; newFrame.parent = referenceNode.Point(); // Put it into the map. mapQueue[newFrame.node->Scale()].push_back(newFrame); } } // Now begin the iteration through the map, but only if it has anything in it. if (mapQueue.empty()) return; typename std::map >::reverse_iterator rit = mapQueue.rbegin(); // We will treat the leaves differently (below). while ((*rit).first != INT_MIN) { // Get a reference to the current scale. std::vector& scaleVector = (*rit).second; // Before traversing all the points in this scale, sort by score. std::sort(scaleVector.begin(), scaleVector.end()); // Now loop over each element. for (size_t i = 0; i < scaleVector.size(); ++i) { // Get a reference to the current element. const MapEntryType& frame = scaleVector.at(i); CoverTree* node = frame.node; const double score = frame.score; const size_t parent = frame.parent; const size_t point = node->Point(); double baseCase = frame.baseCase; // First we recalculate the score of this node to find if we can prune it. if (rule.Rescore(queryIndex, *node, score) == DBL_MAX) { ++numPrunes; continue; } // Create the score for the children. const double childScore = rule.Score(queryIndex, *node); // Now if this childScore is DBL_MAX we can prune all children. In this // recursion setup pruning is all or nothing for children. if (childScore == DBL_MAX) { numPrunes += node->NumChildren(); continue; } // If we are a self-child, the base case has already been evaluated. // Often, a ruleset will return without doing any computation on cover // trees using TreeTraits::FirstPointIsCentroid; this is an optimization // that (theoretically) the compiler should get right. if (point != parent) baseCase = rule.BaseCase(queryIndex, point); // Don't add the self-leaf. size_t j = 0; if (node->Child(0).NumChildren() == 0) { ++numPrunes; j = 1; } for (/* j is already set. */; j < node->NumChildren(); ++j) { MapEntryType newFrame; newFrame.node = &node->Child(j); newFrame.score = childScore; newFrame.baseCase = baseCase; newFrame.parent = point; mapQueue[newFrame.node->Scale()].push_back(newFrame); } } // Now clear the memory for this scale; it isn't needed anymore. mapQueue.erase((*rit).first); } // Now deal with the leaves. for (size_t i = 0; i < mapQueue[INT_MIN].size(); ++i) { const MapEntryType& frame = mapQueue[INT_MIN].at(i); CoverTree* node = frame.node; const double score = frame.score; const size_t point = node->Point(); // First, recalculate the score of this node to find if we can prune it. double rescore = rule.Rescore(queryIndex, *node, score); if (rescore == DBL_MAX) { ++numPrunes; continue; } // For this to be a valid dual-tree algorithm, we *must* evaluate the // combination, even if pruning it will make no difference. It's the // definition. const double actualScore = rule.Score(queryIndex, *node); if (actualScore == DBL_MAX) { ++numPrunes; continue; } else { // Evaluate the base case, since the combination was not pruned. // Often, a ruleset will return without doing any computation on cover // trees using TreeTraits::FirstPointIsCentroid; this is an optimization // that (theoretically) the compiler should get right. rule.BaseCase(queryIndex, point); } } } } // namespace tree } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/core/tree/cover_tree/traits.hpp000066400000000000000000000041361315013601400230660ustar00rootroot00000000000000/** * @file traits.hpp * @author Ryan Curtin * * This file contains the specialization of the TreeTraits class for the * CoverTree type of tree. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_TREE_COVER_TREE_TRAITS_HPP #define MLPACK_CORE_TREE_COVER_TREE_TRAITS_HPP #include namespace mlpack { namespace tree { /** * The specialization of the TreeTraits class for the CoverTree tree type. It * defines characteristics of the cover tree, and is used to help write * tree-independent (but still optimized) tree-based algorithms. See * mlpack/core/tree/tree_traits.hpp for more information. */ template class TreeTraits> { public: /** * The cover tree (or, this implementation of it) does not require that * children represent non-overlapping subsets of the parent node. */ static const bool HasOverlappingChildren = true; /** * Cover trees do have self-children, so points can be included in more than * one node. */ static const bool HasDuplicatedPoints = true; /** * Each cover tree node contains only one point, and that point is its * centroid. */ static const bool FirstPointIsCentroid = true; /** * Cover trees do have self-children. */ static const bool HasSelfChildren = true; /** * Points are not rearranged when the tree is built. */ static const bool RearrangesDataset = false; /** * The cover tree is not necessarily a binary tree. */ static const bool BinaryTree = false; /** * NumDescendants() represents the number of unique descendant points. */ static const bool UniqueNumDescendants = true; }; } // namespace tree } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/core/tree/cover_tree/typedef.hpp000066400000000000000000000025331315013601400232170ustar00rootroot00000000000000/** * @file typedef.hpp * @author Ryan Curtin * * Typedef of cover tree to match TreeType API. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_TREE_COVER_TREE_TYPEDEF_HPP #define MLPACK_CORE_TREE_COVER_TREE_TYPEDEF_HPP #include "cover_tree.hpp" namespace mlpack { namespace tree { /** * The standard cover tree, as detailed in the original cover tree paper: * * @code * @inproceedings{ * author={Beygelzimer, A. and Kakade, S. and Langford, J.}, * title={Cover trees for nearest neighbor}, * booktitle={Proceedings of the 23rd International Conference on Machine * Learning (ICML 2006)}, * pages={97--104}, * year={2006} * } * @endcode * * This template typedef satisfies the requirements of the TreeType API. * * @see @ref trees, CoverTree */ template using StandardCoverTree = CoverTree; } // namespace tree } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/core/tree/example_tree.hpp000066400000000000000000000241741315013601400221010ustar00rootroot00000000000000/** * @file example_tree.hpp * @author Ryan Curtin * * An example tree. This contains all the functions that mlpack trees must * implement (although the actual implementations here don't make any sense * because this is just an example). * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_TREE_EXAMPLE_TREE_HPP #define MLPACK_CORE_TREE_EXAMPLE_TREE_HPP namespace mlpack { namespace tree { /** * This is not an actual space tree but instead an example tree that exists to * show and document all the functions that mlpack trees must implement. For a * better overview of trees, see @ref trees. Also be aware that the * implementations of each of the methods in this example tree are entirely fake * and do not work; this example tree exists for its API, not its * implementation. * * Note that trees often have different properties. These properties are known * at compile-time through the mlpack::tree::TreeTraits class, and some * properties may imply the existence (or non-existence) of certain functions. * Refer to the TreeTraits for more documentation on that. * * The three template parameters below must be template parameters to the tree, * in the order given below. More template parameters are fine, but they must * come after the first three. * * @tparam MetricType This defines the space in which the tree will be built. * For some trees, arbitrary metrics cannot be used, and a template * metaprogramming approach should be used to issue a compile-time error if * a metric cannot be used with a specific tree type. One example is the * tree::BinarySpaceTree tree type, which cannot work with the * metric::IPMetric class. * @tparam StatisticType A tree node can hold a statistic, which is sometimes * useful for various dual-tree algorithms. The tree itself does not need * to know anything about how the statistic works, but it needs to hold a * StatisticType in each node. It can be assumed that the StatisticType * class has a constructor StatisticType(const ExampleTree&). * @tparam MatType A tree could be built on a dense matrix or a sparse matrix. * All mlpack trees should be able to support any Armadillo-compatible * matrix type. When the tree is written it should be assumed that MatType * has the same functionality as arma::mat. */ template, typename StatisticType = EmptyStatistic, typename MatType = arma::mat> class ExampleTree { public: /** * This constructor will build the tree given a dataset and an instantiated * metric. Note that the parameter is a MatType& and not an arma::mat&. The * dataset is not modified by the tree-building process (if it is, see the * documentation for mlpack::tree::TreeTraits::RearrangesDataset for how to * deal with that situation). The MetricType parameter is necessary even * though some metrics do not hold any state. This is so that the tree does * not have to worry about instantiating the metric (if the tree had to worry * about this, this would almost certainly incur additional runtime complexity * and a larger runtime size of the tree node objects, which is to be * avoided). The metric can't be const, in case MetricType::Evaluate() is * non-const. * * When this constructor is finished, the entire tree will be built and ready * to use. The constructor should call the constructor of the statistic for * each node that is built (see tree::EmptyStatistic for more information). * * @param dataset The dataset that the tree will be built on. * @param metric The instantiated metric to use to build the dataset. */ ExampleTree(const MatType& dataset, MetricType& metric); //! Return the number of children of this node. size_t NumChildren() const; //! Return a particular child of this node. const ExampleTree& Child(const size_t i) const; //! Modify a particular child of this node. ExampleTree& Child(const size_t i); //! Return the parent node (NULL if this is the root of the tree). ExampleTree* Parent() const; //! Return the number of points held in this node. size_t NumPoints() const; /** * Return the index of a particular point of this node. mlpack trees do not, * in general, hold the actual dataset, and instead just hold the indices of * the points they contain. Thus, you might use this function in code like * this: * * @code * arma::vec thirdPoint = dataset.col(treeNode.Point(2)); * @endcode */ size_t Point(const size_t i) const; /** * Get the number of descendant points. This is the number of unique points * held in this node plus the number of points held in all descendant nodes. * This could be calculated at build-time and cached, or could be calculated * at run-time. This may be harder to calculate for trees that may hold * points in multiple nodes (like cover trees and spill trees, for instance). */ size_t NumDescendants() const; /** * Get the index of a particular descendant point. The ordering of the * descendants does not matter, as long as calling Descendant(0) through * Descendant(NumDescendants() - 1) will return the indices of every * unique descendant point of the node. */ size_t Descendant(const size_t i) const; //! Get the statistic for this node. const StatisticType& Stat() const; //! Modify the statistic for this node. StatisticType& Stat(); //! Get the instantiated metric for this node. const MetricType& Metric() const; //! Modify the instantiated metric for this node. MetricType& Metric(); /** * Return the minimum distance between this node and a point. It is not * required that the exact minimum distance between the node and the point is * returned but instead a lower bound on the minimum distance will suffice. * See the definitions in @ref trees for more information. * * @param point Point to return [lower bound on] minimum distance to. */ double MinDistance(const MatType& point) const; /** * Return the minimum distance between this node and another node. It is not * required that the exact minimum distance between the two nodes be returned * but instead a lower bound on the minimum distance will suffice. See the * definitions in @ref trees for more information. * * @param node Node to return [lower bound on] minimum distance to. */ double MinDistance(const ExampleTree& other) const; /** * Return the maximum distance between this node and a point. It is not * required that the exact maximum distance between the node and the point is * returned but instead an upper bound on the maximum distance will suffice. * See the definitions in @ref trees for more information. * * @param point Point to return [upper bound on] maximum distance to. */ double MaxDistance(const MatType& point) const; /** * Return the maximum distance between this node and another node. It is not * required that the exact maximum distance between the two nodes be returned * but instead an upper bound on the maximum distance will suffice. See the * definitions in @ref trees for more information. * * @param node Node to return [upper bound on] maximum distance to. */ double MaxDistance(const ExampleTree& other) const; /** * Return both the minimum and maximum distances between this node and a point * as a math::Range object. This overload is given because it is possible * that, for some tree types, calculation of both at once is faster than a * call to MinDistance() then MaxDistance(). It is not necessary that the * minimum and maximum distances be exact; it is sufficient to return a lower * bound on the minimum distance and an upper bound on the maximum distance. * See the definitions in @ref trees for more information. * * @param point Point to return [bounds on] minimum and maximum distances to. */ math::Range RangeDistance(const MatType& point) const; /** * Return both the minimum and maximum distances between this node and another * node as a math::Range object. This overload is given because it is * possible that, for some tree types, calculation of both at once is faster * than a call to MinDistance() then MaxDistance(). It is not necessary that * the minimum and maximum distances be exact; it is sufficient to return a * lower bound on the minimum distance and an upper bound on the maximum * distance. See the definitions in @ref trees for more information. * * @param node Node to return [bounds on] minimum and maximum distances to. */ math::Range RangeDistance(const ExampleTree& other) const; /** * Fill the given vector with the center of the node. * * @param centroid Vector to be filled with the center of the node. */ void Centroid(arma::vec& centroid) const; /** * Get the distance from the center of the node to the furthest descendant * point of this node. This does not necessarily need to be the exact * furthest descendant distance but instead can be an upper bound. See the * definitions in @ref trees for more information. */ double FurthestDescendantDistance() const; /** * Get the distance from the center of this node to the center of the parent * node. */ double ParentDistance() const; private: //! This member is just here so the ExampleTree compiles without warnings. It //! is not required to be a member in every type of tree. StatisticType stat; /** * This member is just here so the ExampleTree compiles without warnings. It * is not required to be a member in every type of tree. Be aware that * storing the metric as a member and not a reference may mean that for some * metrics (such as metric::MahalanobisDistance in high dimensionality) may * incur lots of unnecessary matrix copying. */ MetricType& metric; }; } // namespace tree } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/core/tree/greedy_single_tree_traverser.hpp000066400000000000000000000031711315013601400253550ustar00rootroot00000000000000/** * @file greedy_single_tree_traverser_impl.hpp * @author Marcos Pividori * * A simple greedy traverser which always chooses the child with the best score * and doesn't do backtracking. The RuleType class must implement the method * 'GetBestChild()'. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_TREE_GREEDY_SINGLE_TREE_TRAVERSER_HPP #define MLPACK_CORE_TREE_GREEDY_SINGLE_TREE_TRAVERSER_HPP #include namespace mlpack { namespace tree { template class GreedySingleTreeTraverser { public: /** * Instantiate the greedy single tree traverser with the given rule set. */ GreedySingleTreeTraverser(RuleType& rule); /** * Traverse the tree with the given point. * * @param queryIndex The index of the point in the query set which is being * used as the query point. * @param referenceNode The tree node to be traversed. */ void Traverse(const size_t queryIndex, TreeType& referenceNode); //! Get the number of prunes. size_t NumPrunes() const { return numPrunes; } private: //! Reference to the rules with which the tree will be traversed. RuleType& rule; //! The number of nodes which have been pruned during traversal. size_t numPrunes; }; } // namespace tree } // namespace mlpack // Include implementation. #include "greedy_single_tree_traverser_impl.hpp" #endif mlpack-2.2.5/src/mlpack/core/tree/greedy_single_tree_traverser_impl.hpp000066400000000000000000000032521315013601400263760ustar00rootroot00000000000000/** * @file greedy_single_tree_traverser_impl.hpp * @author Marcos Pividori * * A simple greedy traverser which always chooses the child with the best score * and doesn't do backtracking. The RuleType class must implement the method * 'GetBestChild()'. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_TREE_GREEDY_SINGLE_TREE_TRAVERSER_IMPL_HPP #define MLPACK_CORE_TREE_GREEDY_SINGLE_TREE_TRAVERSER_IMPL_HPP // In case it hasn't been included yet. #include "greedy_single_tree_traverser.hpp" namespace mlpack { namespace tree { template GreedySingleTreeTraverser::GreedySingleTreeTraverser( RuleType& rule) : rule(rule), numPrunes(0) { /* Nothing to do. */ } template void GreedySingleTreeTraverser::Traverse( const size_t queryIndex, TreeType& referenceNode) { // Run the base case as necessary for all the points in the reference node. for (size_t i = 0; i < referenceNode.NumPoints(); ++i) rule.BaseCase(queryIndex, referenceNode.Point(i)); if (!referenceNode.IsLeaf()) { // We are prunning all but one child. numPrunes += referenceNode.NumChildren() - 1; // Recurse the best child. size_t bestChild = rule.GetBestChild(queryIndex, referenceNode); Traverse(queryIndex, referenceNode.Child(bestChild)); } } } // namespace tree } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/core/tree/hollow_ball_bound.hpp000066400000000000000000000161221315013601400231060ustar00rootroot00000000000000/** * @file hollow_ball_bound.hpp * * Bounds that are useful for binary space partitioning trees. * Interface to a ball bound that works in arbitrary metric spaces. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_TREE_HOLLOW_BALL_BOUND_HPP #define MLPACK_CORE_TREE_HOLLOW_BALL_BOUND_HPP #include #include #include "bound_traits.hpp" namespace mlpack { namespace bound { /** * Hollow ball bound encloses a set of points at a specific distance (radius) * from a specific point (center) except points at a specific distance from * another point (the center of the hole). MetricType is the custom metric type * that defaults to the Euclidean (L2) distance. * * @tparam TMetricType metric type used in the distance measure. * @tparam ElemType Type of element (float or double or similar). */ template, typename ElemType = double> class HollowBallBound { public: //! A public version of the metric type. typedef TMetricType MetricType; private: //! The inner and the outer radii of the bound. math::RangeType radii; //! The center of the ball bound. arma::Col center; //! The center of the hollow. arma::Col hollowCenter; //! The metric used in this bound. MetricType* metric; /** * To know whether this object allocated memory to the metric member * variable. This will be true except in the copy constructor and the * overloaded assignment operator. We need this to know whether we should * delete the metric member variable in the destructor. */ bool ownsMetric; public: //! Empty Constructor. HollowBallBound(); /** * Create the ball bound with the specified dimensionality. * * @param dimension Dimensionality of ball bound. */ HollowBallBound(const size_t dimension); /** * Create the ball bound with the specified radius and center. * * @param innerRadius Inner radius of ball bound. * @param outerRadius Outer radius of ball bound. * @param center Center of ball bound. */ template HollowBallBound(const ElemType innerRadius, const ElemType outerRadius, const VecType& center); //! Copy constructor. To prevent memory leaks. HollowBallBound(const HollowBallBound& other); //! For the same reason as the copy constructor: to prevent memory leaks. HollowBallBound& operator=(const HollowBallBound& other); //! Move constructor: take possession of another bound. HollowBallBound(HollowBallBound&& other); //! Destructor to release allocated memory. ~HollowBallBound(); //! Get the outer radius of the ball. ElemType OuterRadius() const { return radii.Hi(); } //! Modify the outer radius of the ball. ElemType& OuterRadius() { return radii.Hi(); } //! Get the innner radius of the ball. ElemType InnerRadius() const { return radii.Lo(); } //! Modify the inner radius of the ball. ElemType& InnerRadius() { return radii.Lo(); } //! Get the center point of the ball. const arma::Col& Center() const { return center; } //! Modify the center point of the ball. arma::Col& Center() { return center; } //! Get the center point of the hollow. const arma::Col& HollowCenter() const { return hollowCenter; } //! Modify the center point of the hollow. arma::Col& HollowCenter() { return hollowCenter; } //! Get the dimensionality of the ball. size_t Dim() const { return center.n_elem; } /** * Get the minimum width of the bound (this is same as the diameter). * For ball bounds, width along all dimensions remain same. */ ElemType MinWidth() const { return radii.Hi() * 2.0; } //! Get the range in a certain dimension. math::RangeType operator[](const size_t i) const; /** * Determines if a point is within this bound. */ template bool Contains(const VecType& point) const; /** * Determines if another bound is within this bound. */ bool Contains(const HollowBallBound& other) const; /** * Place the center of BallBound into the given vector. * * @param center Vector which the centroid will be written to. */ template void Center(VecType& center) const { center = this->center; } /** * Calculates minimum bound-to-point squared distance. */ template ElemType MinDistance(const VecType& point, typename boost::enable_if>* = 0) const; /** * Calculates minimum bound-to-bound squared distance. */ ElemType MinDistance(const HollowBallBound& other) const; /** * Computes maximum distance. */ template ElemType MaxDistance(const VecType& point, typename boost::enable_if>* = 0) const; /** * Computes maximum distance. */ ElemType MaxDistance(const HollowBallBound& other) const; /** * Calculates minimum and maximum bound-to-point distance. */ template math::RangeType RangeDistance( const VecType& other, typename boost::enable_if>* = 0) const; /** * Calculates minimum and maximum bound-to-bound distance. * * Example: bound1.MinDistanceSq(other) for minimum distance. */ math::RangeType RangeDistance(const HollowBallBound& other) const; /** * Expand the bound to include the given point. The centroid will not be * moved. * * @tparam MatType Type of matrix; could be arma::mat, arma::spmat, or a * vector. * @tparam data Data points to add. */ template const HollowBallBound& operator|=(const MatType& data); /** * Expand the bound to include the given bound. The centroid will not be * moved. * * @tparam MatType Type of matrix; could be arma::mat, arma::spmat, or a * vector. * @tparam data Data points to add. */ const HollowBallBound& operator|=(const HollowBallBound& other); /** * Returns the diameter of the ballbound. */ ElemType Diameter() const { return 2 * radii.Hi(); } //! Returns the distance metric used in this bound. const MetricType& Metric() const { return *metric; } //! Modify the distance metric used in this bound. MetricType& Metric() { return *metric; } //! Serialize the bound. template void Serialize(Archive& ar, const unsigned int version); }; //! A specialization of BoundTraits for this bound type. template struct BoundTraits> { //! These bounds are potentially loose in some dimensions. const static bool HasTightBounds = false; }; } // namespace bound } // namespace mlpack #include "hollow_ball_bound_impl.hpp" #endif // MLPACK_CORE_TREE_HOLLOW_BALL_BOUND_HPP mlpack-2.2.5/src/mlpack/core/tree/hollow_ball_bound_impl.hpp000066400000000000000000000323601315013601400241310ustar00rootroot00000000000000/** * @file hollow_ball_bound_impl.hpp * * Bounds that are useful for binary space partitioning trees. * Implementation of HollowBallBound ball bound metric policy class. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_TREE_HOLLOW_BALL_BOUND_IMPL_HPP #define MLPACK_CORE_TREE_HOLLOW_BALL_BOUND_IMPL_HPP // In case it hasn't been included already. #include "hollow_ball_bound.hpp" namespace mlpack { namespace bound { //! Empty Constructor. template HollowBallBound::HollowBallBound() : radii(std::numeric_limits::lowest(), std::numeric_limits::lowest()), metric(new MetricType()), ownsMetric(true) { /* Nothing to do. */ } /** * Create the hollow ball bound with the specified dimensionality. * * @param dimension Dimensionality of ball bound. */ template HollowBallBound::HollowBallBound(const size_t dimension) : radii(std::numeric_limits::lowest(), std::numeric_limits::lowest()), center(dimension), hollowCenter(dimension), metric(new MetricType()), ownsMetric(true) { /* Nothing to do. */ } /** * Create the hollow ball bound with the specified radii and center. * * @param innerRadius Inner radius of hollow ball bound. * @param outerRadius Outer radius of hollow ball bound. * @param center Center of hollow ball bound. */ template template HollowBallBound:: HollowBallBound(const ElemType innerRadius, const ElemType outerRadius, const VecType& center) : radii(innerRadius, outerRadius), center(center), hollowCenter(center), metric(new MetricType()), ownsMetric(true) { /* Nothing to do. */ } //! Copy Constructor. To prevent memory leaks. template HollowBallBound::HollowBallBound( const HollowBallBound& other) : radii(other.radii), center(other.center), hollowCenter(other.hollowCenter), metric(other.metric), ownsMetric(false) { /* Nothing to do. */ } //! For the same reason as the copy constructor: to prevent memory leaks. template HollowBallBound& HollowBallBound:: operator=(const HollowBallBound& other) { radii = other.radii; center = other.center; hollowCenter = other.hollowCenter; metric = other.metric; ownsMetric = false; return *this; } //! Move constructor. template HollowBallBound::HollowBallBound( HollowBallBound&& other) : radii(other.radii), center(std::move(other.center)), hollowCenter(std::move(other.hollowCenter)), metric(other.metric), ownsMetric(other.ownsMetric) { // Fix the other bound. other.radii.Hi() = 0.0; other.radii.Lo() = 0.0; other.center = arma::Col(); other.hollowCenter = arma::Col(); other.metric = NULL; other.ownsMetric = false; } //! Destructor to release allocated memory. template HollowBallBound::~HollowBallBound() { if (ownsMetric) delete metric; } //! Get the range in a certain dimension. template math::RangeType HollowBallBound::operator[]( const size_t i) const { if (radii.Hi() < 0) return math::Range(); else return math::Range(center[i] - radii.Hi(), center[i] + radii.Hi()); } /** * Determines if a point is within the bound. */ template template bool HollowBallBound::Contains( const VecType& point) const { if (radii.Hi() < 0) return false; else { ElemType dist = metric->Evaluate(center, point); if (dist > radii.Hi()) return false; // The point is situated outside the outer ball. // Check if the point is situated outside the hole. dist = metric->Evaluate(hollowCenter, point); return (dist >= radii.Lo()); } } /** * Determines if another bound is within this bound. */ template bool HollowBallBound::Contains( const HollowBallBound& other) const { if (radii.Hi() < 0) return false; else { const ElemType dist = metric->Evaluate(center, other.center); const ElemType hollowCenterDist = metric->Evaluate(hollowCenter, other.center); const ElemType hollowHollowDist = metric->Evaluate(hollowCenter, other.hollowCenter); // The outer ball of the second bound does not contain the hole of the first // bound. bool containOnOneSide = (hollowCenterDist - other.radii.Hi() >= radii.Lo()) && (dist + other.radii.Hi() <= radii.Hi()); // The hole of the second bound contains the hole of the first bound. bool containOnEverySide = (hollowHollowDist + radii.Lo() <= other.radii.Lo()) && (dist + other.radii.Hi() <= radii.Hi()); // The first bound has not got a hole. bool containAsBall = (radii.Lo() == 0) && (dist + other.radii.Hi() <= radii.Hi()); return (containOnOneSide || containOnEverySide || containAsBall); } } /** * Calculates minimum bound-to-point squared distance. */ template template ElemType HollowBallBound::MinDistance( const VecType& point, typename boost::enable_if>* /* junk */) const { if (radii.Hi() < 0) return std::numeric_limits::max(); else { const ElemType outerDistance = metric->Evaluate(point, center) - radii.Hi(); if (outerDistance >= 0) return outerDistance; // The outer ball does not contain the point. // Check if the point is situated in the hole. const ElemType innerDistance = math::ClampNonNegative(radii.Lo() - metric->Evaluate(point, hollowCenter)); return innerDistance; } } /** * Calculates minimum bound-to-bound squared distance. */ template ElemType HollowBallBound::MinDistance( const HollowBallBound& other) const { if (radii.Hi() < 0 || other.radii.Hi() < 0) return std::numeric_limits::max(); else { const ElemType outerDistance = metric->Evaluate(center, other.center) - radii.Hi() - other.radii.Hi(); if (outerDistance >= 0) return outerDistance; // The outer hollows do not overlap. // Check if the hole of the second bound contains the outer ball of the // first bound. const ElemType innerDistance1 = other.radii.Lo() - metric->Evaluate(center, other.hollowCenter) - radii.Hi(); if (innerDistance1 >= 0) return innerDistance1; // Check if the hole of the first bound contains the outer ball of the // second bound. const ElemType innerDistance2 = math::ClampNonNegative(radii.Lo() - metric->Evaluate(hollowCenter, other.center) - other.radii.Hi()); return innerDistance2; } } /** * Computes maximum distance. */ template template ElemType HollowBallBound::MaxDistance( const VecType& point, typename boost::enable_if >* /* junk */) const { if (radii.Hi() < 0) return std::numeric_limits::max(); else return metric->Evaluate(point, center) + radii.Hi(); } /** * Computes maximum distance. */ template ElemType HollowBallBound::MaxDistance( const HollowBallBound& other) const { if (radii.Hi() < 0) return std::numeric_limits::max(); else return metric->Evaluate(other.center, center) + radii.Hi() + other.radii.Hi(); } /** * Calculates minimum and maximum bound-to-bound squared distance. * * Example: bound1.MinDistanceSq(other) for minimum squared distance. */ template template math::RangeType HollowBallBound::RangeDistance( const VecType& point, typename boost::enable_if >* /* junk */) const { if (radii.Hi() < 0) return math::Range(std::numeric_limits::max(), std::numeric_limits::max()); else { math::RangeType range; const ElemType dist = metric->Evaluate(point, center); if (dist >= radii.Hi()) // The outer ball does not contain the point. range.Lo() = dist - radii.Hi(); else { // Check if the point is situated in the hole. range.Lo() = math::ClampNonNegative(radii.Lo() - metric->Evaluate(point, hollowCenter)); } range.Hi() = dist + radii.Hi(); return range; } } template math::RangeType HollowBallBound::RangeDistance( const HollowBallBound& other) const { if (radii.Hi() < 0) return math::Range(std::numeric_limits::max(), std::numeric_limits::max()); else { math::RangeType range; const ElemType dist = metric->Evaluate(center, other.center); const ElemType outerDistance = dist - radii.Hi() - other.radii.Hi(); if (outerDistance >= 0) range.Lo() = outerDistance; // The outer balls do not overlap. else { const ElemType innerDistance1 = other.radii.Lo() - metric->Evaluate(center, other.hollowCenter) - radii.Hi(); // Check if the outer ball of the first bound is contained in the // hole of the second bound. if (innerDistance1 >= 0) range.Lo() = innerDistance1; else { // Check if the outer ball of the second bound is contained in the // hole of the first bound. range.Lo() = math::ClampNonNegative(radii.Lo() - metric->Evaluate(hollowCenter, other.center) - other.radii.Hi()); } } range.Hi() = dist + radii.Hi() + other.radii.Hi(); return range; } } /** * Expand the bound to include the given point. Algorithm adapted from * Jack Ritter, "An Efficient Bounding Sphere" in Graphics Gems (1990). * The difference lies in the way we initialize the ball bound. The way we * expand the bound is same. */ template template const HollowBallBound& HollowBallBound::operator|=(const MatType& data) { if (radii.Hi() < 0) { center = data.col(0); radii.Hi() = 0; } if (radii.Lo() < 0) { hollowCenter = data.col(0); radii.Lo() = 0; } // Now iteratively add points. for (size_t i = 0; i < data.n_cols; ++i) { const ElemType dist = metric->Evaluate(center, data.col(i)); const ElemType hollowDist = metric->Evaluate(hollowCenter, data.col(i)); // See if the new point lies outside the bound. if (dist > radii.Hi()) { // Move towards the new point and increase the radius just enough to // accommodate the new point. const arma::Col diff = data.col(i) - center; center += ((dist - radii.Hi()) / (2 * dist)) * diff; radii.Hi() = 0.5 * (dist + radii.Hi()); } if (hollowDist < radii.Lo()) radii.Lo() = hollowDist; } return *this; } /** * Expand the bound to include the given bound. */ template const HollowBallBound& HollowBallBound::operator|=(const HollowBallBound& other) { if (radii.Hi() < 0) { center = other.center; hollowCenter = other.hollowCenter; radii.Hi() = other.radii.Hi(); radii.Lo() = other.radii.Lo(); return *this; } const ElemType dist = metric->Evaluate(center, other.center); // Check if the outer balls overlap. if (radii.Hi() < dist + other.radii.Hi()) radii.Hi() = dist + other.radii.Hi(); const ElemType innerDist = math::ClampNonNegative(other.radii.Lo() - metric->Evaluate(hollowCenter, other.hollowCenter)); // Check if the hole of the first bound is not contained in the hole of the // second bound. if (radii.Lo() > innerDist) radii.Lo() = innerDist; return *this; } //! Serialize the BallBound. template template void HollowBallBound::Serialize( Archive& ar, const unsigned int /* version */) { ar & data::CreateNVP(radii, "radii"); ar & data::CreateNVP(center, "center"); ar & data::CreateNVP(hollowCenter, "hollowCenter"); if (Archive::is_loading::value) { // If we're loading, delete the local metric since we'll have a new one. if (ownsMetric) delete metric; } ar & data::CreateNVP(metric, "metric"); ar & data::CreateNVP(ownsMetric, "ownsMetric"); } } // namespace bound } // namespace mlpack #endif // MLPACK_CORE_TREE_HOLLOW_BALL_BOUND_IMPL_HPP mlpack-2.2.5/src/mlpack/core/tree/hrectbound.hpp000066400000000000000000000154651315013601400215670ustar00rootroot00000000000000/** * @file hrectbound.hpp * * Bounds that are useful for binary space partitioning trees. * * This file describes the interface for the HRectBound class, which implements * a hyperrectangle bound. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_TREE_HRECTBOUND_HPP #define MLPACK_CORE_TREE_HRECTBOUND_HPP #include #include #include #include "bound_traits.hpp" namespace mlpack { namespace bound { namespace meta /** Metaprogramming utilities. */ { //! Utility struct where Value is true if and only if the argument is of type //! LMetric. template struct IsLMetric { static const bool Value = false; }; //! Specialization for IsLMetric when the argument is of type LMetric. template struct IsLMetric> { static const bool Value = true; }; } // namespace util /** * Hyper-rectangle bound for an L-metric. This should be used in conjunction * with the LMetric class. Be sure to use the same template parameters for * LMetric as you do for HRectBound -- otherwise odd results may occur. * * @tparam MetricType Type of metric to use; must be of type LMetric. * @tparam ElemType Element type (double/float/int/etc.). */ template, typename ElemType = double> class HRectBound { // It is required that HRectBound have an LMetric as the given MetricType. static_assert(meta::IsLMetric::Value == true, "HRectBound can only be used with the LMetric<> metric type."); public: /** * Empty constructor; creates a bound of dimensionality 0. */ HRectBound(); /** * Initializes to specified dimensionality with each dimension the empty * set. */ HRectBound(const size_t dimension); //! Copy constructor; necessary to prevent memory leaks. HRectBound(const HRectBound& other); //! Same as copy constructor; necessary to prevent memory leaks. HRectBound& operator=(const HRectBound& other); //! Move constructor: take possession of another bound's information. HRectBound(HRectBound&& other); //! Destructor: clean up memory. ~HRectBound(); /** * Resets all dimensions to the empty set (so that this bound contains * nothing). */ void Clear(); //! Gets the dimensionality. size_t Dim() const { return dim; } //! Get the range for a particular dimension. No bounds checking. Be //! careful: this may make MinWidth() invalid. math::RangeType& operator[](const size_t i) { return bounds[i]; } //! Modify the range for a particular dimension. No bounds checking. const math::RangeType& operator[](const size_t i) const { return bounds[i]; } //! Get the minimum width of the bound. ElemType MinWidth() const { return minWidth; } //! Modify the minimum width of the bound. ElemType& MinWidth() { return minWidth; } /** * Calculates the center of the range, placing it into the given vector. * * @param center Vector which the center will be written to. */ void Center(arma::Col& center) const; /** * Calculate the volume of the hyperrectangle. * * @return Volume of the hyperrectangle. */ ElemType Volume() const; /** * Calculates minimum bound-to-point distance. * * @param point Point to which the minimum distance is requested. */ template ElemType MinDistance(const VecType& point, typename boost::enable_if>* = 0) const; /** * Calculates minimum bound-to-bound distance. * * @param other Bound to which the minimum distance is requested. */ ElemType MinDistance(const HRectBound& other) const; /** * Calculates maximum bound-to-point squared distance. * * @param point Point to which the maximum distance is requested. */ template ElemType MaxDistance(const VecType& point, typename boost::enable_if>* = 0) const; /** * Computes maximum distance. * * @param other Bound to which the maximum distance is requested. */ ElemType MaxDistance(const HRectBound& other) const; /** * Calculates minimum and maximum bound-to-bound distance. * * @param other Bound to which the minimum and maximum distances are * requested. */ math::RangeType RangeDistance(const HRectBound& other) const; /** * Calculates minimum and maximum bound-to-point distance. * * @param point Point to which the minimum and maximum distances are * requested. */ template math::RangeType RangeDistance( const VecType& point, typename boost::enable_if>* = 0) const; /** * Expands this region to include new points. * * @tparam MatType Type of matrix; could be Mat, SpMat, a subview, or just a * vector. * @param data Data points to expand this region to include. */ template HRectBound& operator|=(const MatType& data); /** * Expands this region to encompass another bound. */ HRectBound& operator|=(const HRectBound& other); /** * Determines if a point is within this bound. */ template bool Contains(const VecType& point) const; /** * Determines if this bound partially contains a bound. */ bool Contains(const HRectBound& bound) const; /** * Returns the intersection of this bound and another. */ HRectBound operator&(const HRectBound& bound) const; /** * Intersects this bound with another. */ HRectBound& operator&=(const HRectBound& bound); /** * Returns the volume of overlap of this bound and another. */ ElemType Overlap(const HRectBound& bound) const; /** * Returns the diameter of the hyperrectangle (that is, the longest diagonal). */ ElemType Diameter() const; /** * Serialize the bound object. */ template void Serialize(Archive& ar, const unsigned int version); private: //! The dimensionality of the bound. size_t dim; //! The bounds for each dimension. math::RangeType* bounds; //! Cached minimum width of bound. ElemType minWidth; }; // A specialization of BoundTraits for this class. template struct BoundTraits> { //! These bounds are always tight for each dimension. const static bool HasTightBounds = true; }; } // namespace bound } // namespace mlpack #include "hrectbound_impl.hpp" #endif // MLPACK_CORE_TREE_HRECTBOUND_HPP mlpack-2.2.5/src/mlpack/core/tree/hrectbound_impl.hpp000066400000000000000000000426311315013601400226030ustar00rootroot00000000000000/** * @file hrectbound_impl.hpp * * Implementation of hyper-rectangle bound policy class. * Template parameter Power is the metric to use; use 2 for Euclidean (L2). * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_TREE_HRECTBOUND_IMPL_HPP #define MLPACK_CORE_TREE_HRECTBOUND_IMPL_HPP #include // In case it has not been included yet. #include "hrectbound.hpp" namespace mlpack { namespace bound { /** * Empty constructor. */ template inline HRectBound::HRectBound() : dim(0), bounds(NULL), minWidth(0) { /* Nothing to do. */ } /** * Initializes to specified dimensionality with each dimension the empty * set. */ template inline HRectBound::HRectBound(const size_t dimension) : dim(dimension), bounds(new math::RangeType[dim]), minWidth(0) { /* Nothing to do. */ } /** * Copy constructor necessary to prevent memory leaks. */ template inline HRectBound::HRectBound( const HRectBound& other) : dim(other.Dim()), bounds(new math::RangeType[dim]), minWidth(other.MinWidth()) { // Copy other bounds over. for (size_t i = 0; i < dim; i++) bounds[i] = other[i]; } /** * Same as the copy constructor. */ template inline HRectBound& HRectBound::operator=( const HRectBound& other) { if (dim != other.Dim()) { // Reallocation is necessary. if (bounds) delete[] bounds; dim = other.Dim(); bounds = new math::RangeType[dim]; } // Now copy each of the bound values. for (size_t i = 0; i < dim; i++) bounds[i] = other[i]; minWidth = other.MinWidth(); return *this; } /** * Move constructor: take possession of another bound's information. */ template inline HRectBound::HRectBound( HRectBound&& other) : dim(other.dim), bounds(other.bounds), minWidth(other.minWidth) { // Fix the other bound. other.dim = 0; other.bounds = NULL; other.minWidth = 0.0; } /** * Destructor: clean up memory. */ template inline HRectBound::~HRectBound() { if (bounds) delete[] bounds; } /** * Resets all dimensions to the empty set. */ template inline void HRectBound::Clear() { for (size_t i = 0; i < dim; i++) bounds[i] = math::RangeType(); minWidth = 0; } /*** * Calculates the centroid of the range, placing it into the given vector. * * @param centroid Vector which the centroid will be written to. */ template inline void HRectBound::Center( arma::Col& center) const { // Set size correctly if necessary. if (!(center.n_elem == dim)) center.set_size(dim); for (size_t i = 0; i < dim; i++) center(i) = bounds[i].Mid(); } /** * Calculate the volume of the hyperrectangle. * * @return Volume of the hyperrectangle. */ template inline ElemType HRectBound::Volume() const { ElemType volume = 1.0; for (size_t i = 0; i < dim; ++i) { if (bounds[i].Lo() >= bounds[i].Hi()) return 0; volume *= (bounds[i].Hi() - bounds[i].Lo()); } return volume; } /** * Calculates minimum bound-to-point squared distance. */ template template inline ElemType HRectBound::MinDistance( const VecType& point, typename boost::enable_if>* /* junk */) const { Log::Assert(point.n_elem == dim); ElemType sum = 0; ElemType lower, higher; for (size_t d = 0; d < dim; d++) { lower = bounds[d].Lo() - point[d]; higher = point[d] - bounds[d].Hi(); // Since only one of 'lower' or 'higher' is negative, if we add each's // absolute value to itself and then sum those two, our result is the // nonnegative half of the equation times two; then we raise to power Power. if (MetricType::Power == 1) sum += (lower + std::fabs(lower)) + (higher + std::fabs(higher)); else if (MetricType::Power == 2) { ElemType dist = (lower + std::fabs(lower)) + (higher + std::fabs(higher)); sum += dist * dist; } else { sum += pow((lower + fabs(lower)) + (higher + fabs(higher)), (ElemType) MetricType::Power); } } // Now take the Power'th root (but make sure our result is squared if it needs // to be); then cancel out the constant of 2 (which may have been squared now) // that was introduced earlier. The compiler should optimize out the if // statement entirely. if (MetricType::Power == 1) return sum * 0.5; else if (MetricType::Power == 2) { if (MetricType::TakeRoot) return (ElemType) std::sqrt(sum) * 0.5; else return sum * 0.25; } else { if (MetricType::TakeRoot) return (ElemType) pow((double) sum, 1.0 / (double) MetricType::Power) / 2.0; else return sum / pow(2.0, MetricType::Power); } } /** * Calculates minimum bound-to-bound squared distance. */ template ElemType HRectBound::MinDistance(const HRectBound& other) const { Log::Assert(dim == other.dim); ElemType sum = 0; const math::RangeType* mbound = bounds; const math::RangeType* obound = other.bounds; ElemType lower, higher; for (size_t d = 0; d < dim; d++) { lower = obound->Lo() - mbound->Hi(); higher = mbound->Lo() - obound->Hi(); // We invoke the following: // x + fabs(x) = max(x * 2, 0) // (x * 2)^2 / 4 = x^2 // The compiler should optimize out this if statement entirely. if (MetricType::Power == 1) sum += (lower + std::fabs(lower)) + (higher + std::fabs(higher)); else if (MetricType::Power == 2) { ElemType dist = (lower + std::fabs(lower)) + (higher + std::fabs(higher)); sum += dist * dist; } else { sum += pow((lower + fabs(lower)) + (higher + fabs(higher)), (ElemType) MetricType::Power); } // Move bound pointers. mbound++; obound++; } // The compiler should optimize out this if statement entirely. if (MetricType::Power == 1) return sum * 0.5; else if (MetricType::Power == 2) { if (MetricType::TakeRoot) return (ElemType) std::sqrt(sum) * 0.5; else return sum * 0.25; } else { if (MetricType::TakeRoot) return (ElemType) pow((double) sum, 1.0 / (double) MetricType::Power) / 2.0; else return sum / pow(2.0, MetricType::Power); } } /** * Calculates maximum bound-to-point squared distance. */ template template inline ElemType HRectBound::MaxDistance( const VecType& point, typename boost::enable_if >* /* junk */) const { ElemType sum = 0; Log::Assert(point.n_elem == dim); for (size_t d = 0; d < dim; d++) { ElemType v = std::max(fabs(point[d] - bounds[d].Lo()), fabs(bounds[d].Hi() - point[d])); // The compiler should optimize out this if statement entirely. if (MetricType::Power == 1) sum += v; // v is non-negative. else if (MetricType::Power == 2) sum += v * v; else sum += std::pow(v, (ElemType) MetricType::Power); } // The compiler should optimize out this if statement entirely. if (MetricType::TakeRoot) { if (MetricType::Power == 1) return sum; else if (MetricType::Power == 2) return (ElemType) std::sqrt(sum); else return (ElemType) pow((double) sum, 1.0 / (double) MetricType::Power); } else return sum; } /** * Computes maximum distance. */ template inline ElemType HRectBound::MaxDistance( const HRectBound& other) const { ElemType sum = 0; Log::Assert(dim == other.dim); ElemType v; for (size_t d = 0; d < dim; d++) { v = std::max(fabs(other.bounds[d].Hi() - bounds[d].Lo()), fabs(bounds[d].Hi() - other.bounds[d].Lo())); // The compiler should optimize out this if statement entirely. if (MetricType::Power == 1) sum += v; // v is non-negative. else if (MetricType::Power == 2) sum += v * v; else sum += std::pow(v, (ElemType) MetricType::Power); } // The compiler should optimize out this if statement entirely. if (MetricType::TakeRoot) { if (MetricType::Power == 1) return sum; else if (MetricType::Power == 2) return (ElemType) std::sqrt(sum); else return (ElemType) pow((double) sum, 1.0 / (double) MetricType::Power); } else return sum; } /** * Calculates minimum and maximum bound-to-bound squared distance. */ template inline math::RangeType HRectBound::RangeDistance( const HRectBound& other) const { ElemType loSum = 0; ElemType hiSum = 0; Log::Assert(dim == other.dim); ElemType v1, v2, vLo, vHi; for (size_t d = 0; d < dim; d++) { v1 = other.bounds[d].Lo() - bounds[d].Hi(); v2 = bounds[d].Lo() - other.bounds[d].Hi(); // One of v1 or v2 is negative. if (v1 >= v2) { vHi = -v2; // Make it nonnegative. vLo = (v1 > 0) ? v1 : 0; // Force to be 0 if negative. } else { vHi = -v1; // Make it nonnegative. vLo = (v2 > 0) ? v2 : 0; // Force to be 0 if negative. } // The compiler should optimize out this if statement entirely. if (MetricType::Power == 1) { loSum += vLo; // vLo is non-negative. hiSum += vHi; // vHi is non-negative. } else if (MetricType::Power == 2) { loSum += vLo * vLo; hiSum += vHi * vHi; } else { loSum += std::pow(vLo, (ElemType) MetricType::Power); hiSum += std::pow(vHi, (ElemType) MetricType::Power); } } if (MetricType::TakeRoot) { if (MetricType::Power == 1) return math::RangeType(loSum, hiSum); else if (MetricType::Power == 2) return math::RangeType((ElemType) std::sqrt(loSum), (ElemType) std::sqrt(hiSum)); else { return math::RangeType( (ElemType) pow((double) loSum, 1.0 / (double) MetricType::Power), (ElemType) pow((double) hiSum, 1.0 / (double) MetricType::Power)); } } else return math::RangeType(loSum, hiSum); } /** * Calculates minimum and maximum bound-to-point squared distance. */ template template inline math::RangeType HRectBound::RangeDistance( const VecType& point, typename boost::enable_if>* /* junk */) const { ElemType loSum = 0; ElemType hiSum = 0; Log::Assert(point.n_elem == dim); ElemType v1, v2, vLo, vHi; for (size_t d = 0; d < dim; d++) { v1 = bounds[d].Lo() - point[d]; // Negative if point[d] > lo. v2 = point[d] - bounds[d].Hi(); // Negative if point[d] < hi. // One of v1 or v2 (or both) is negative. if (v1 >= 0) // point[d] <= bounds_[d].Lo(). { vHi = -v2; // v2 will be larger but must be negated. vLo = v1; } else // point[d] is between lo and hi, or greater than hi. { if (v2 >= 0) { vHi = -v1; // v1 will be larger, but must be negated. vLo = v2; } else { vHi = -std::min(v1, v2); // Both are negative, but we need the larger. vLo = 0; } } // The compiler should optimize out this if statement entirely. if (MetricType::Power == 1) { loSum += vLo; // vLo is non-negative. hiSum += vHi; // vHi is non-negative. } else if (MetricType::Power == 2) { loSum += vLo * vLo; hiSum += vHi * vHi; } else { loSum += std::pow(vLo, (ElemType) MetricType::Power); hiSum += std::pow(vHi, (ElemType) MetricType::Power); } } if (MetricType::TakeRoot) { if (MetricType::Power == 1) return math::RangeType(loSum, hiSum); else if (MetricType::Power == 2) return math::RangeType((ElemType) std::sqrt(loSum), (ElemType) std::sqrt(hiSum)); else { return math::RangeType( (ElemType) pow((double) loSum, 1.0 / (double) MetricType::Power), (ElemType) pow((double) hiSum, 1.0 / (double) MetricType::Power)); } } else return math::RangeType(loSum, hiSum); } /** * Expands this region to include a new point. */ template template inline HRectBound& HRectBound::operator|=( const MatType& data) { Log::Assert(data.n_rows == dim); arma::Col mins(min(data, 1)); arma::Col maxs(max(data, 1)); minWidth = std::numeric_limits::max(); for (size_t i = 0; i < dim; i++) { bounds[i] |= math::RangeType(mins[i], maxs[i]); const ElemType width = bounds[i].Width(); if (width < minWidth) minWidth = width; } return *this; } /** * Expands this region to encompass another bound. */ template inline HRectBound& HRectBound::operator|=( const HRectBound& other) { assert(other.dim == dim); minWidth = std::numeric_limits::max(); for (size_t i = 0; i < dim; i++) { bounds[i] |= other.bounds[i]; const ElemType width = bounds[i].Width(); if (width < minWidth) minWidth = width; } return *this; } /** * Determines if a point is within this bound. */ template template inline bool HRectBound::Contains(const VecType& point) const { for (size_t i = 0; i < point.n_elem; i++) { if (!bounds[i].Contains(point(i))) return false; } return true; } /** * Determines if this bound partially contains a bound. */ template inline bool HRectBound::Contains( const HRectBound& bound) const { for (size_t i = 0; i < dim; i++) { const math::RangeType& r_a = bounds[i]; const math::RangeType& r_b = bound.bounds[i]; if (r_a.Hi() <= r_b.Lo() || r_a.Lo() >= r_b.Hi()) // If a does not overlap b at all. return false; } return true; } /** * Returns the intersection of this bound and another. */ template inline HRectBound HRectBound:: operator&(const HRectBound& bound) const { HRectBound result(dim); for (size_t k = 0; k < dim; k++) { result[k].Lo() = std::max(bounds[k].Lo(), bound.bounds[k].Lo()); result[k].Hi() = std::min(bounds[k].Hi(), bound.bounds[k].Hi()); } return result; } /** * Intersects this bound with another. */ template inline HRectBound& HRectBound:: operator&=(const HRectBound& bound) { for (size_t k = 0; k < dim; k++) { bounds[k].Lo() = std::max(bounds[k].Lo(), bound.bounds[k].Lo()); bounds[k].Hi() = std::min(bounds[k].Hi(), bound.bounds[k].Hi()); } return *this; } /** * Returns the volume of overlap of this bound and another. */ template inline ElemType HRectBound::Overlap( const HRectBound& bound) const { ElemType volume = 1.0; for (size_t k = 0; k < dim; k++) { ElemType lo = std::max(bounds[k].Lo(), bound.bounds[k].Lo()); ElemType hi = std::min(bounds[k].Hi(), bound.bounds[k].Hi()); if ( hi <= lo) return 0; volume *= hi - lo; } return volume; } /** * Returns the diameter of the hyperrectangle (that is, the longest diagonal). */ template inline ElemType HRectBound::Diameter() const { ElemType d = 0; for (size_t i = 0; i < dim; ++i) d += std::pow(bounds[i].Hi() - bounds[i].Lo(), (ElemType) MetricType::Power); if (MetricType::TakeRoot) return (ElemType) std::pow((double) d, 1.0 / (double) MetricType::Power); else return d; } //! Serialize the bound object. template template void HRectBound::Serialize(Archive& ar, const unsigned int /* version */) { ar & data::CreateNVP(dim, "dim"); // Allocate memory for the bounds, if necessary. if (Archive::is_loading::value) { if (bounds) delete[] bounds; bounds = new math::RangeType[dim]; } ar & data::CreateArrayNVP(bounds, dim, "bounds"); ar & data::CreateNVP(minWidth, "minWidth"); } } // namespace bound } // namespace mlpack #endif // MLPACK_CORE_TREE_HRECTBOUND_IMPL_HPP mlpack-2.2.5/src/mlpack/core/tree/octree.hpp000066400000000000000000000012501315013601400206760ustar00rootroot00000000000000/** * @file octree.hpp * @author Ryan Curtin * * Include all the necessary files to use the Octree class. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_TREE_OCTREE_HPP #define MLPACK_CORE_TREE_OCTREE_HPP #include #include "bounds.hpp" #include "octree/octree.hpp" #include "octree/traits.hpp" #include "octree/single_tree_traverser.hpp" #include "octree/dual_tree_traverser.hpp" #endif mlpack-2.2.5/src/mlpack/core/tree/octree/000077500000000000000000000000001315013601400201675ustar00rootroot00000000000000mlpack-2.2.5/src/mlpack/core/tree/octree/dual_tree_traverser.hpp000066400000000000000000000047241315013601400247500ustar00rootroot00000000000000/** * @file dual_tree_traverser.hpp * @author Ryan Curtin * * Define the dual-tree traverser for the Octree. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_TREE_OCTREE_DUAL_TREE_TRAVERSER_HPP #define MLPACK_CORE_TREE_OCTREE_DUAL_TREE_TRAVERSER_HPP #include #include "octree.hpp" namespace mlpack { namespace tree { template template class Octree::DualTreeTraverser { public: /** * Instantiate the given dual-tree traverser with the given rule set. */ DualTreeTraverser(RuleType& rule); /** * Traverse the two trees. This does not reset the statistics of the * traversals (it just adds to them). */ void Traverse(Octree& queryNode, Octree& referenceNode); //! Get the number of pruned nodes. size_t NumPrunes() const { return numPrunes; } //! Modify the number of pruned nodes (i.e. to reset it). size_t& NumPrunes() { return numPrunes; } //! Get the number of visited node combinations. size_t NumVisited() const { return numVisited; } //! Modify the number of visited node combinations. size_t& NumVistied() { return numVisited; } //! Get the number of times a node was scored. size_t NumScores() const { return numScores; } //! Modify the number of times a node was scored. size_t& NumScores() { return numScores; } //! Get the number of times a base case was computed. size_t NumBaseCases() const { return numBaseCases; } //! Modify the number of times a base case was computed. size_t& NumBaseCases() { return numBaseCases; } private: //! The rule type to use. RuleType& rule; //! The number of prunes. size_t numPrunes; //! The number of visited node combinations. size_t numVisited; //! The number of times a node was scored. size_t numScores; //! The number of times a base case was calculated. size_t numBaseCases; //! Traversal information, held in the class so that it isn't continually //! being reallocated. typename RuleType::TraversalInfoType traversalInfo; }; } // namespace tree } // namespace mlpack // Include implementation. #include "dual_tree_traverser_impl.hpp" #endif mlpack-2.2.5/src/mlpack/core/tree/octree/dual_tree_traverser_impl.hpp000066400000000000000000000113521315013601400257640ustar00rootroot00000000000000/** * @file dual_tree_traverser_impl.hpp * @author Ryan Curtin * * Implementation of the dual-tree traverser for the octree. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_TREE_OCTREE_DUAL_TREE_TRAVERSER_IMPL_HPP #define MLPACK_CORE_TREE_OCTREE_DUAL_TREE_TRAVERSER_IMPL_HPP // In case it hasn't been included yet. #include "dual_tree_traverser.hpp" namespace mlpack { namespace tree { template template Octree::DualTreeTraverser:: DualTreeTraverser(RuleType& rule) : rule(rule), numPrunes(0), numVisited(0), numScores(0), numBaseCases(0) { // Nothing to do. } template template void Octree::DualTreeTraverser:: Traverse(Octree& queryNode, Octree& referenceNode) { // Increment the visit counter. ++numVisited; // Store the current traversal info. traversalInfo = rule.TraversalInfo(); if (queryNode.IsLeaf() && referenceNode.IsLeaf()) { const size_t begin = queryNode.Point(0); const size_t end = begin + queryNode.NumPoints(); for (size_t q = begin; q < end; ++q) { // First, see if we can prune the reference node for this query point. rule.TraversalInfo() = traversalInfo; const double score = rule.Score(q, referenceNode); if (score == DBL_MAX) { ++numPrunes; continue; } const size_t rBegin = referenceNode.Point(0); const size_t rEnd = rBegin + referenceNode.NumPoints(); for (size_t r = rBegin; r < rEnd; ++r) rule.BaseCase(q, r); numBaseCases += referenceNode.NumPoints(); } } else if (!queryNode.IsLeaf() && referenceNode.IsLeaf()) { // We have to recurse down the query node. Order does not matter. for (size_t i = 0; i < queryNode.NumChildren(); ++i) { rule.TraversalInfo() = traversalInfo; const double score = rule.Score(queryNode.Child(i), referenceNode); if (score == DBL_MAX) { ++numPrunes; continue; } Traverse(queryNode.Child(i), referenceNode); } } else if (queryNode.IsLeaf() && !referenceNode.IsLeaf()) { // We have to recurse down the reference node, so we need to do it in an // ordered manner. arma::vec scores(referenceNode.NumChildren()); std::vector tis(referenceNode.NumChildren()); for (size_t i = 0; i < referenceNode.NumChildren(); ++i) { rule.TraversalInfo() = traversalInfo; scores[i] = rule.Score(queryNode, referenceNode.Child(i)); tis[i] = rule.TraversalInfo(); } // Sort the scores. arma::uvec scoreOrder = arma::sort_index(scores); for (size_t i = 0; i < scoreOrder.n_elem; ++i) { if (scores[scoreOrder[i]] == DBL_MAX) { // We don't need to check any more---all children past here are pruned. numPrunes += scoreOrder.n_elem - i; break; } rule.TraversalInfo() = tis[scoreOrder[i]]; Traverse(queryNode, referenceNode.Child(scoreOrder[i])); } } else { // We have to recurse down both the query and reference nodes. Query order // does not matter, so we will do that in sequence. However we will // allocate the arrays for recursion at this level. arma::vec scores(referenceNode.NumChildren()); std::vector tis(referenceNode.NumChildren()); for (size_t j = 0; j < queryNode.NumChildren(); ++j) { // Now we have to recurse down the reference node, which we will do in a // prioritized manner. for (size_t i = 0; i < referenceNode.NumChildren(); ++i) { rule.TraversalInfo() = traversalInfo; scores[i] = rule.Score(queryNode.Child(j), referenceNode.Child(i)); tis[i] = rule.TraversalInfo(); } // Sort the scores. arma::uvec scoreOrder = arma::sort_index(scores); for (size_t i = 0; i < scoreOrder.n_elem; ++i) { if (scores[scoreOrder[i]] == DBL_MAX) { // We don't need to check any more---all children past here are pruned. numPrunes += scoreOrder.n_elem - i; break; } rule.TraversalInfo() = tis[scoreOrder[i]]; Traverse(queryNode.Child(j), referenceNode.Child(scoreOrder[i])); } } } } } // namespace tree } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/core/tree/octree/octree.hpp000066400000000000000000000402161315013601400221640ustar00rootroot00000000000000/** * @file octree.hpp * @author Ryan Curtin * * Definition of generalized octree (Octree). * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_TREE_OCTREE_OCTREE_HPP #define MLPACK_CORE_TREE_OCTREE_OCTREE_HPP #include #include "../hrectbound.hpp" #include "../statistic.hpp" namespace mlpack { namespace tree { template class Octree { public: //! So other classes can use TreeType::Mat. typedef MatType Mat; //! The type of element held in MatType. typedef typename MatType::elem_type ElemType; //! A single-tree traverser; see single_tree_traverser.hpp. template class SingleTreeTraverser; //! A dual-tree traverser; see dual_tree_traverser.hpp. template class DualTreeTraverser; private: //! The children held by this node. std::vector children; //! The index of the first point in the dataset contained in this node (and //! its children). size_t begin; //! The number of points of the dataset contained in this node (and its //! children). size_t count; //! The minimum bounding rectangle of the points held in the node (and its //! children). bound::HRectBound bound; //! The dataset. MatType* dataset; //! The parent (NULL if this node is the root). Octree* parent; //! The statistic. StatisticType stat; //! The distance from the center of this node to the center of the parent. ElemType parentDistance; //! The distance to the furthest descendant, cached to speed things up. ElemType furthestDescendantDistance; //! An instantiated metric. MetricType metric; public: /** * Construct this as the root node of an octree on the given dataset. This * copies the dataset. If you don't want to copy the input dataset, consider * using the constructor that takes an rvalue reference and use std::move(). * * @param data Dataset to create tree from. This will be copied! * @param maxLeafSize Maximum number of points in a leaf node. */ Octree(const MatType& data, const size_t maxLeafSize = 20); /** * Construct this as the root node of an octree on the given dataset. This * copies the dataset and modifies its ordering; a mapping of the old point * indices to the new point indices is filled. If you don't want the matrix * to be copied, consider using the constructor that takes an rvalue reference * and use std::move(). * * @param data Dataset to create tree from. This will be copied! * @param oldFromNew Vector which will be filled with the old positions for * each new point. * @param maxLeafSize Maximum number of points in a leaf node. */ Octree(const MatType& data, std::vector& oldFromNew, const size_t maxLeafSize = 20); /** * Construct this as the root node of an octree on the given dataset. This * copies the dataset and modifies its ordering; a mapping of the old point * indices to the new point indices is filled, and a mapping of the new point * indices to the old point indices is filled. If you don't want the matrix * to be copied, consider using the constructor that takes an rvalue reference * and use std::move(). * * @param data Dataset to create tree from. This will be copied! * @param oldFromNew Vector which will be filled with the old positions for * each new point. * @param newFromOld Vector which will be filled with the new positions for * each old point. * @param maxLeafSize Maximum number of points in a leaf node. */ Octree(const MatType& data, std::vector& oldFromNew, std::vector& newFromOld, const size_t maxLeafSize = 20); /** * Construct this as the root node of an octree on the given dataset. This * will take ownership of the dataset; if you don't want this, consider using * the constructor that takes a const reference to the dataset. * * @param data Dataset to create tree from. This will be copied! * @param maxLeafSize Maximum number of points in a leaf node. */ Octree(MatType&& data, const size_t maxLeafSize = 20); /** * Construct this as the root node of an octree on the given dataset. This * will take ownership of the dataset; if you don't want this, consider using * the constructor that takes a const reference to the dataset. This modifies * the ordering of the dataset; a mapping of the old point indices to the new * point indices is filled. * * @param data Dataset to create tree from. This will be copied! * @param oldFromNew Vector which will be filled with the old positions for * each new point. * @param maxLeafSize Maximum number of points in a leaf node. */ Octree(MatType&& data, std::vector& oldFromNew, const size_t maxLeafSize = 20); /** * Construct this as the root node of an octree on the given dataset. This * will take ownership of the dataset; if you don't want this, consider using * the constructor that takes a const reference to the dataset. This modifies * the ordering of the dataset; a mapping of the old point indices to the new * point indices is filled, and a mapping of the new point indices to the old * point indices is filled. * * @param data Dataset to create tree from. This will be copied! * @param oldFromNew Vector which will be filled with the old positions for * each new point. * @param newFromOld Vector which will be filled with the new positions for * each old point. * @param maxLeafSize Maximum number of points in a leaf node. */ Octree(MatType&& data, std::vector& oldFromNew, std::vector& newFromOld, const size_t maxLeafSize = 20); /** * Construct this node as a child of the given parent, starting at column * begin and using count points. The ordering of that subset of points in the * parent's data matrix will be modified! This is used for recursive * tree-building by the other constructors that don't specify point indices. * * @param parent Parent of this node. Its dataset will be modified! * @param begin Index of point to start tree construction with. * @param count Number of points to use to construct tree. * @param center Center of the node (for splitting). * @param width Width of the node in each dimension. * @param maxLeafSize Maximum number of points in a leaf node. */ Octree(Octree* parent, const size_t begin, const size_t count, const arma::vec& center, const double width, const size_t maxLeafSize = 20); /** * Construct this node as a child of the given parent, starting at column * begin and using count points. The ordering of that subset of points in the * parent's data matrix will be modified! This is used for recursive * tree-building by the other constructors that don't specify point indices. * * A mapping of the old point indices to the new point indices is filled, but * it is expected that the vector is already allocated with size greater than * or equal to (begin + count), and if that is not true, invalid memory reads * (and writes) will occur. * * @param parent Parent of this node. Its dataset will be modified! * @param begin Index of point to start tree construction with. * @param count Number of points to use to construct tree. * @param oldFromNew Vector which will be filled with the old positions for * each new point. * @param center Center of the node (for splitting). * @param width Width of the node in each dimension. * @param maxLeafSize Maximum number of points in a leaf node. */ Octree(Octree* parent, const size_t begin, const size_t count, std::vector& oldFromNew, const arma::vec& center, const double width, const size_t maxLeafSize = 20); /** * Copy the given tree. Be careful! This may use a lot of memory. * * @param other Tree to copy from. */ Octree(const Octree& other); /** * Move the given tree. The tree passed as a parameter will be emptied and * will not be usable after this call. * * @param other Tree to move. */ Octree(Octree&& other); /** * Initialize the tree from a boost::serialization archive. * * @param ar Archive to load tree from. Must be an iarchive, not an oarchive. */ template Octree( Archive& ar, const typename boost::enable_if::type* = 0); /** * Destroy the tree. */ ~Octree(); //! Return the dataset used by this node. const MatType& Dataset() const { return *dataset; } //! Get the pointer to the parent. Octree* Parent() const { return parent; } //! Modify the pointer to the parent (be careful!). Octree*& Parent() { return parent; } //! Return the bound object for this node. const bound::HRectBound& Bound() const { return bound; } //! Modify the bound object for this node. bound::HRectBound& Bound() { return bound; } //! Return the statistic object for this node. const StatisticType& Stat() const { return stat; } //! Modify the statistic object for this node. StatisticType& Stat() { return stat; } //! Return the number of children in this node. size_t NumChildren() const; //! Return the metric that this tree uses. MetricType Metric() const { return MetricType(); } /** * Return the index of the nearest child node to the given query point. If * this is a leaf node, it will return NumChildren() (invalid index). */ template size_t GetNearestChild( const VecType& point, typename boost::enable_if>::type* = 0) const; /** * Return the index of the furthest child node to the given query point. If * this is a leaf node, it will return NumChildren() (invalid index). */ template size_t GetFurthestChild( const VecType& point, typename boost::enable_if >::type* = 0) const; /** * Return whether or not the node is a leaf. */ bool IsLeaf() const { return NumChildren() == 0; } /** * Return the index of the nearest child node to the given query node. If it * can't decide, it will return NumChildren() (invalid index). */ size_t GetNearestChild(const Octree& queryNode) const; /** * Return the index of the furthest child node to the given query node. If it * can't decide, it will return NumChildren() (invalid index). */ size_t GetFurthestChild(const Octree& queryNode) const; /** * Return the furthest distance to a point held in this node. If this is not * a leaf node, then the distance is 0 because the node holds no points. */ ElemType FurthestPointDistance() const; /** * Return the furthest possible descendant distance. This returns the maximum * distance from the centroid to the edge of the bound and not the empirical * quantity which is the actual furthest descendant distance. So the actual * furthest descendant distance may be less than what this method returns (but * it will never be greater than this). */ ElemType FurthestDescendantDistance() const; //! Return the minimum distance from the center of the node to any bound edge. ElemType MinimumBoundDistance() const; //! Return the distance from the center of this node to the center of the //! parent node. ElemType ParentDistance() const { return parentDistance; } //! Modify the distance from the center of this node to the center of the //! parent node. ElemType& ParentDistance() { return parentDistance; } /** * Return the specified child. If the index is out of bounds, unspecified * behavior will occur. */ const Octree& Child(const size_t child) const { return *children[child]; } /** * Return the specified child. If the index is out of bounds, unspecified * behavior will occur. */ Octree& Child(const size_t child) { return *children[child]; } /** * Return the pointer to the given child. This allows the child itself to be * modified. */ Octree*& ChildPtr(const size_t child) { return children[child]; } //! Return the number of points in this node (0 if not a leaf). size_t NumPoints() const; //! Return the number of descendants of this node. size_t NumDescendants() const; /** * Return the index (with reference to the dataset) of a particular * descendant. */ size_t Descendant(const size_t index) const; /** * Return the index (with reference to the dataset) of a particular point in * this node. If the given index is invalid (i.e. if it is greater than * NumPoints()), the indices returned will be invalid. */ size_t Point(const size_t index) const; //! Return the minimum distance to another node. ElemType MinDistance(const Octree& other) const; //! Return the maximum distance to another node. ElemType MaxDistance(const Octree& other) const; //! Return the minimum and maximum distance to another node. math::RangeType RangeDistance(const Octree& other) const; //! Return the minimum distance to the given point. template ElemType MinDistance( const VecType& point, typename boost::enable_if>::type* = 0) const; //! Return the maximum distance to the given point. template ElemType MaxDistance( const VecType& point, typename boost::enable_if>::type* = 0) const; //! Return the minimum and maximum distance to another node. template math::RangeType RangeDistance( const VecType& point, typename boost::enable_if>::type* = 0) const; //! Store the center of the bounding region in the given vector. void Center(arma::vec& center) const { bound.Center(center); } //! Serialize the tree. template void Serialize(Archive& ar, const unsigned int /* version */); protected: /** * A default constructor. This is meant to only be used with * boost::serialization, which is allowed with the friend declaration below. * This does not return a valid treee! The method must be protected, so that * the serialization shim can work with the default constructor. */ Octree(); //! Friend access is given for the default constructor. friend class boost::serialization::access; private: /** * Split the node, using the given center and the given maximum width of this * node. * * @param center Center of the node. * @param width Width of the current node. * @param maxLeafSize Maximum number of points allowed in a leaf. */ void SplitNode(const arma::vec& center, const double width, const size_t maxLeafSize); /** * Split the node, using the given center and the given maximum width of this * node, and fill the mappings vector. * * @param center Center of the node. * @param width Width of the current node. * @param oldFromNew Mappings from old to new. * @param maxLeafSize Maximum number of points allowed in a leaf. */ void SplitNode(const arma::vec& center, const double width, std::vector& oldFromNew, const size_t maxLeafSize); /** * This is used for sorting points while splitting. */ struct SplitInfo { //! Create the SplitInfo object. SplitInfo(const size_t d, const arma::vec& c) : d(d), center(c) {} //! The dimension we are splitting on. size_t d; //! The center of the node. const arma::vec& center; template static bool AssignToLeftNode(const VecType& point, const SplitInfo& s) { return point[s.d] < s.center[s.d]; } }; }; } // namespace tree } // namespace mlpack // Include implementation. #include "octree_impl.hpp" #endif mlpack-2.2.5/src/mlpack/core/tree/octree/octree_impl.hpp000066400000000000000000000666031315013601400232150ustar00rootroot00000000000000/** * @file octree_impl.hpp * @author Ryan Curtin * * Implementation of generalized octree (Octree). * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_TREE_OCTREE_OCTREE_IMPL_HPP #define MLPACK_CORE_TREE_OCTREE_OCTREE_IMPL_HPP #include "octree.hpp" #include #include namespace mlpack { namespace tree { //! Construct the tree. template Octree::Octree(const MatType& dataset, const size_t maxLeafSize) : begin(0), count(dataset.n_cols), bound(dataset.n_rows), dataset(new MatType(dataset)), parent(NULL), parentDistance(0.0) { if (count > 0) { // Calculate empirical center of data. bound |= *this->dataset; arma::vec center; bound.Center(center); double maxWidth = 0.0; for (size_t i = 0; i < bound.Dim(); ++i) if (bound[i].Hi() - bound[i].Lo() > maxWidth) maxWidth = bound[i].Hi() - bound[i].Lo(); SplitNode(center, maxWidth, maxLeafSize); furthestDescendantDistance = 0.5 * bound.Diameter(); } else { furthestDescendantDistance = 0.0; } // Initialize the statistic. stat = StatisticType(*this); } //! Construct the tree. template Octree::Octree( const MatType& dataset, std::vector& oldFromNew, const size_t maxLeafSize) : begin(0), count(dataset.n_cols), bound(dataset.n_rows), dataset(new MatType(dataset)), parent(NULL), parentDistance(0.0) { oldFromNew.resize(this->dataset->n_cols); for (size_t i = 0; i < this->dataset->n_cols; ++i) oldFromNew[i] = i; if (count > 0) { // Calculate empirical center of data. bound |= *this->dataset; arma::vec center; bound.Center(center); double maxWidth = 0.0; for (size_t i = 0; i < bound.Dim(); ++i) if (bound[i].Hi() - bound[i].Lo() > maxWidth) maxWidth = bound[i].Hi() - bound[i].Lo(); SplitNode(center, maxWidth, oldFromNew, maxLeafSize); furthestDescendantDistance = 0.5 * bound.Diameter(); } else { furthestDescendantDistance = 0.0; } // Initialize the statistic. stat = StatisticType(*this); } //! Construct the tree. template Octree::Octree( const MatType& dataset, std::vector& oldFromNew, std::vector& newFromOld, const size_t maxLeafSize) : begin(0), count(dataset.n_cols), bound(dataset.n_rows), dataset(new MatType(dataset)), parent(NULL), parentDistance(0.0) { oldFromNew.resize(this->dataset->n_cols); for (size_t i = 0; i < this->dataset->n_cols; ++i) oldFromNew[i] = i; if (count > 0) { // Calculate empirical center of data. bound |= *this->dataset; arma::vec center; bound.Center(center); double maxWidth = 0.0; for (size_t i = 0; i < bound.Dim(); ++i) if (bound[i].Hi() - bound[i].Lo() > maxWidth) maxWidth = bound[i].Hi() - bound[i].Lo(); SplitNode(center, maxWidth, oldFromNew, maxLeafSize); furthestDescendantDistance = 0.5 * bound.Diameter(); } else { furthestDescendantDistance = 0.0; } // Initialize the statistic. stat = StatisticType(*this); // Map the newFromOld indices correctly. newFromOld.resize(this->dataset->n_cols); for (size_t i = 0; i < this->dataset->n_cols; i++) newFromOld[oldFromNew[i]] = i; } //! Construct the tree. template Octree::Octree(MatType&& dataset, const size_t maxLeafSize) : begin(0), count(dataset.n_cols), bound(dataset.n_rows), dataset(new MatType(std::move(dataset))), parent(NULL), parentDistance(0.0) { if (count > 0) { // Calculate empirical center of data. bound |= *this->dataset; arma::vec center; bound.Center(center); double maxWidth = 0.0; for (size_t i = 0; i < bound.Dim(); ++i) if (bound[i].Hi() - bound[i].Lo() > maxWidth) maxWidth = bound[i].Hi() - bound[i].Lo(); SplitNode(center, maxWidth, maxLeafSize); furthestDescendantDistance = 0.5 * bound.Diameter(); } else { furthestDescendantDistance = 0.0; } // Initialize the statistic. stat = StatisticType(*this); } //! Construct the tree. template Octree::Octree( MatType&& dataset, std::vector& oldFromNew, const size_t maxLeafSize) : begin(0), count(dataset.n_cols), bound(dataset.n_rows), dataset(new MatType(std::move(dataset))), parent(NULL), parentDistance(0.0) { oldFromNew.resize(this->dataset->n_cols); for (size_t i = 0; i < this->dataset->n_cols; ++i) oldFromNew[i] = i; if (count > 0) { // Calculate empirical center of data. bound |= *this->dataset; arma::vec center; bound.Center(center); double maxWidth = 0.0; for (size_t i = 0; i < bound.Dim(); ++i) if (bound[i].Hi() - bound[i].Lo() > maxWidth) maxWidth = bound[i].Hi() - bound[i].Lo(); SplitNode(center, maxWidth, oldFromNew, maxLeafSize); furthestDescendantDistance = 0.5 * bound.Diameter(); } else { furthestDescendantDistance = 0.0; } // Initialize the statistic. stat = StatisticType(*this); } //! Construct the tree. template Octree::Octree( MatType&& dataset, std::vector& oldFromNew, std::vector& newFromOld, const size_t maxLeafSize) : begin(0), count(dataset.n_cols), bound(dataset.n_rows), dataset(new MatType(std::move(dataset))), parent(NULL), parentDistance(0.0) { oldFromNew.resize(this->dataset->n_cols); for (size_t i = 0; i < this->dataset->n_cols; ++i) oldFromNew[i] = i; if (count > 0) { // Calculate empirical center of data. bound |= *this->dataset; arma::vec center; bound.Center(center); double maxWidth = 0.0; for (size_t i = 0; i < bound.Dim(); ++i) if (bound[i].Hi() - bound[i].Lo() > maxWidth) maxWidth = bound[i].Hi() - bound[i].Lo(); SplitNode(center, maxWidth, oldFromNew, maxLeafSize); furthestDescendantDistance = 0.5 * bound.Diameter(); } else { furthestDescendantDistance = 0.0; } // Initialize the statistic. stat = StatisticType(*this); // Map the newFromOld indices correctly. newFromOld.resize(this->dataset->n_cols); for (size_t i = 0; i < this->dataset->n_cols; i++) newFromOld[oldFromNew[i]] = i; } //! Construct a child node. template Octree::Octree( Octree* parent, const size_t begin, const size_t count, const arma::vec& center, const double width, const size_t maxLeafSize) : begin(begin), count(count), bound(parent->dataset->n_rows), dataset(parent->dataset), parent(parent) { // Calculate empirical center of data. bound |= dataset->cols(begin, begin + count - 1); // Now split the node. SplitNode(center, width, maxLeafSize); // Calculate the distance from the empirical center of this node to the // empirical center of the parent. arma::vec trueCenter, parentCenter; bound.Center(trueCenter); parent->Bound().Center(parentCenter); parentDistance = metric.Evaluate(trueCenter, parentCenter); furthestDescendantDistance = 0.5 * bound.Diameter(); // Initialize the statistic. stat = StatisticType(*this); } //! Construct a child node. template Octree::Octree( Octree* parent, const size_t begin, const size_t count, std::vector& oldFromNew, const arma::vec& center, const double width, const size_t maxLeafSize) : begin(begin), count(count), bound(parent->dataset->n_rows), dataset(parent->dataset), parent(parent) { // Calculate empirical center of data. bound |= dataset->cols(begin, begin + count - 1); // Now split the node. SplitNode(center, width, oldFromNew, maxLeafSize); // Calculate the distance from the empirical center of this node to the // empirical center of the parent. arma::vec trueCenter, parentCenter; bound.Center(trueCenter); parent->Bound().Center(parentCenter); parentDistance = metric.Evaluate(trueCenter, parentCenter); furthestDescendantDistance = 0.5 * bound.Diameter(); // Initialize the statistic. stat = StatisticType(*this); } //! Copy the given tree. template Octree::Octree(const Octree& other) : begin(other.begin), count(other.count), bound(other.bound), dataset((other.parent == NULL) ? new MatType(*other.dataset) : NULL), parent(NULL), stat(other.stat), parentDistance(other.parentDistance), furthestDescendantDistance(other.furthestDescendantDistance), metric(other.metric) { // If we have any children, we need to create them, and then ensure that their // parent links are set right. for (size_t i = 0; i < other.NumChildren(); ++i) { children.push_back(new Octree(other.Child(i))); children[i]->parent = this; children[i]->dataset = this->dataset; } } //! Move the given tree. template Octree::Octree(Octree&& other) : children(std::move(other.children)), begin(other.begin), count(other.count), bound(std::move(other.bound)), dataset(other.dataset), parent(other.parent), stat(std::move(other.stat)), parentDistance(other.parentDistance), furthestDescendantDistance(other.furthestDescendantDistance), metric(std::move(other.metric)) { // Update the parent pointers of the direct children. for (size_t i = 0; i < children.size(); ++i) children[i]->parent = this; other.begin = 0; other.count = 0; other.dataset = new MatType(); other.parentDistance = 0.0; other.furthestDescendantDistance = 0.0; other.parent = NULL; } template Octree::Octree() : begin(0), count(0), bound(0), dataset(new MatType()), parent(NULL), parentDistance(0.0), furthestDescendantDistance(0.0) { // Nothing to do. } template template Octree::Octree( Archive& ar, const typename boost::enable_if::type*) : Octree() // Create an empty tree. { // De-serialize the tree into this object. ar >> data::CreateNVP(*this, "tree"); } template Octree::~Octree() { // Delete the dataset if we aren't the parent. if (!parent) delete dataset; // Now delete each of the children. for (size_t i = 0; i < children.size(); ++i) delete children[i]; children.clear(); } template size_t Octree::NumChildren() const { return children.size(); } template template size_t Octree::GetNearestChild( const VecType& point, typename boost::enable_if>::type*) const { // It's possible that this could be improved by caching which children we have // and which we don't, but for now this is just a brute force search. ElemType bestDistance = DBL_MAX; size_t bestIndex = NumChildren(); for (size_t i = 0; i < NumChildren(); ++i) { const double dist = children[i]->MinDistance(point); if (dist < bestDistance) { bestDistance = dist; bestIndex = i; } } return bestIndex; } template template size_t Octree::GetFurthestChild( const VecType& point, typename boost::enable_if>::type*) const { // It's possible that this could be improved by caching which children we have // and which we don't, but for now this is just a brute force search. ElemType bestDistance = -1.0; // Initialize to invalid distance. size_t bestIndex = NumChildren(); for (size_t i = 0; i < NumChildren(); ++i) { const double dist = children[i]->MaxDistance(point); if (dist > bestDistance) { bestDistance = dist; bestIndex = i; } } return bestIndex; } template size_t Octree::GetNearestChild( const Octree& queryNode) const { // It's possible that this could be improved by caching which children we have // and which we don't, but for now this is just a brute force search. ElemType bestDistance = DBL_MAX; size_t bestIndex = NumChildren(); for (size_t i = 0; i < NumChildren(); ++i) { const double dist = children[i]->MinDistance(queryNode); if (dist < bestDistance) { bestDistance = dist; bestIndex = i; } } return bestIndex; } template size_t Octree::GetFurthestChild( const Octree& queryNode) const { // It's possible that this could be improved by caching which children we have // and which we don't, but for now this is just a brute force search. ElemType bestDistance = -1.0; // Initialize to invalid distance. size_t bestIndex = NumChildren(); for (size_t i = 0; i < NumChildren(); ++i) { const double dist = children[i]->MaxDistance(queryNode); if (dist > bestDistance) { bestDistance = dist; bestIndex = i; } } return bestIndex; } template typename Octree::ElemType Octree::FurthestPointDistance() const { // If we are not a leaf, then this distance is 0. Otherwise, return the // furthest descendant distance. return (children.size() > 0) ? 0.0 : furthestDescendantDistance; } template typename Octree::ElemType Octree::FurthestDescendantDistance() const { return furthestDescendantDistance; } template typename Octree::ElemType Octree::MinimumBoundDistance() const { return bound.MinWidth() / 2.0; } template size_t Octree::NumPoints() const { // We have no points unless we are a leaf; return (children.size() > 0) ? 0 : count; } template size_t Octree::NumDescendants() const { return count; } template size_t Octree::Descendant( const size_t index) const { return begin + index; } template size_t Octree::Point(const size_t index) const { return begin + index; } template typename Octree::ElemType Octree::MinDistance(const Octree& other) const { return bound.MinDistance(other.Bound()); } template typename Octree::ElemType Octree::MaxDistance(const Octree& other) const { return bound.MaxDistance(other.Bound()); } template math::RangeType::ElemType> Octree::RangeDistance(const Octree& other) const { return bound.RangeDistance(other.Bound()); } template template typename Octree::ElemType Octree::MinDistance( const VecType& point, typename boost::enable_if>::type*) const { return bound.MinDistance(point); } template template typename Octree::ElemType Octree::MaxDistance( const VecType& point, typename boost::enable_if>::type*) const { return bound.MaxDistance(point); } template template math::RangeType::ElemType> Octree::RangeDistance( const VecType& point, typename boost::enable_if>::type*) const { return bound.RangeDistance(point); } //! Serialize the tree. template template void Octree::Serialize( Archive& ar, const unsigned int /* version */) { using data::CreateNVP; // If we're loading and we have children, they need to be deleted. if (Archive::is_loading::value) { for (size_t i = 0; i < children.size(); ++i) delete children[i]; children.clear(); if (!parent) delete dataset; } ar & CreateNVP(begin, "begin"); ar & CreateNVP(count, "count"); ar & CreateNVP(bound, "bound"); ar & CreateNVP(stat, "stat"); ar & CreateNVP(parentDistance, "parentDistance"); ar & CreateNVP(furthestDescendantDistance, "furthestDescendantDistance"); ar & CreateNVP(metric, "metric"); // Due to quirks of boost::serialization, depending on how the user // serializes the tree, it's possible that the root of the tree will // accidentally be serialized twice. So if we are a first-level child, we // avoid serializing the parent. The true (non-duplicated) parent will fix // the parent link. bool hasFakeParent = false; if (Archive::is_saving::value && parent != NULL && parent->parent == NULL) { Octree* fakeParent = NULL; hasFakeParent = true; ar & CreateNVP(fakeParent, "parent"); ar & CreateNVP(hasFakeParent, "hasFakeParent"); } else { ar & CreateNVP(parent, "parent"); ar & CreateNVP(hasFakeParent, "hasFakeParent"); } // Only serialize the dataset if we don't have a fake parent. Otherwise, the // real parent will come and set it later. if (!hasFakeParent) ar & CreateNVP(dataset, "dataset"); size_t numChildren = 0; if (Archive::is_saving::value) numChildren = children.size(); ar & CreateNVP(numChildren, "numChildren"); if (Archive::is_loading::value) children.resize(numChildren); for (size_t i = 0; i < numChildren; ++i) { std::ostringstream oss; oss << "child" << i; ar & CreateNVP(children[i], oss.str()); } // Fix the child pointers, if they were set to a fake parent. if (Archive::is_loading::value && parent == NULL) { for (size_t i = 0; i < children.size(); ++i) { children[i]->dataset = this->dataset; children[i]->parent = this; } } } //! Split the node. template void Octree::SplitNode( const arma::vec& center, const double width, const size_t maxLeafSize) { // No need to split if we have fewer than the maximum number of points in this // node. if (count <= maxLeafSize) return; // This will hold the index of the first point in each child. arma::Col childBegins(((size_t) 1 << dataset->n_rows) + 1); childBegins[0] = begin; childBegins[childBegins.n_elem - 1] = begin + count; // We will make log2(dim) passes, splitting along the last down to the first // dimension. The tuple holds { dim, begin, count, leftChildIndex }. std::stack> stack; stack.push(std::tuple(dataset->n_rows - 1, begin, count, 0)); while (!stack.empty()) { std::tuple t = stack.top(); stack.pop(); const size_t d = std::get<0>(t); const size_t childBegin = std::get<1>(t); const size_t childCount = std::get<2>(t); const size_t leftChildIndex = std::get<3>(t); // Perform a "half-split": after this split, all points belonging to // children of index 2^(d - 1) - 1 and less will be on the left side, and // all points belonging to children of index 2^(d - 1) and above will be on // the right side. SplitInfo s(d, center); const size_t firstRight = split::PerformSplit(*dataset, childBegin, childCount, s); // We can set the first index of the right child. The first index of the // left child is already set. const size_t rightChildIndex = leftChildIndex + ((size_t) 1 << d); childBegins[rightChildIndex] = firstRight; // Now we have to recurse, if this was not the last dimension. if (d != 0) { if (firstRight > childBegin) { stack.push(std::tuple(d - 1, childBegin, firstRight - childBegin, leftChildIndex)); } else { // Set beginning indices correctly for all children below this level. for (size_t c = leftChildIndex + 1; c < rightChildIndex; ++c) childBegins[c] = childBegins[leftChildIndex]; } if (firstRight < childBegin + childCount) { stack.push(std::tuple(d - 1, firstRight, childCount - (firstRight - childBegin), rightChildIndex)); } else { // Set beginning indices correctly for all children below this level. for (size_t c = rightChildIndex + 1; c < rightChildIndex + (rightChildIndex - leftChildIndex); ++c) childBegins[c] = childBegins[rightChildIndex]; } } } // Now that the dataset is reordered, we can create the children. arma::vec childCenter(center.n_elem); const double childWidth = width / 2.0; for (size_t i = 0; i < childBegins.n_elem - 1; ++i) { // If the child has no points, don't create it. if (childBegins[i + 1] - childBegins[i] == 0) continue; // Create the correct center. for (size_t d = 0; d < center.n_elem; ++d) { // Is the dimension "right" (1) or "left" (0)? if (((i >> d) & 1) == 0) childCenter[d] = center[d] - childWidth; else childCenter[d] = center[d] + childWidth; } children.push_back(new Octree(this, childBegins[i], childBegins[i + 1] - childBegins[i], childCenter, childWidth, maxLeafSize)); } } //! Split the node, and store mappings. template void Octree::SplitNode( const arma::vec& center, const double width, std::vector& oldFromNew, const size_t maxLeafSize) { // No need to split if we have fewer than the maximum number of points in this // node. if (count <= maxLeafSize) return; // This will hold the index of the first point in each child. arma::Col childBegins(((size_t) 1 << dataset->n_rows) + 1); childBegins[0] = begin; childBegins[childBegins.n_elem - 1] = begin + count; // We will make log2(dim) passes, splitting along the last down to the first // dimension. The tuple holds { dim, begin, count, leftChildIndex }. std::stack> stack; stack.push(std::tuple(dataset->n_rows - 1, begin, count, 0)); while (!stack.empty()) { std::tuple t = stack.top(); stack.pop(); const size_t d = std::get<0>(t); const size_t childBegin = std::get<1>(t); const size_t childCount = std::get<2>(t); const size_t leftChildIndex = std::get<3>(t); // Perform a "half-split": after this split, all points belonging to // children of index 2^(d - 1) - 1 and less will be on the left side, and // all points belonging to children of index 2^(d - 1) and above will be on // the right side. SplitInfo s(d, center); const size_t firstRight = split::PerformSplit(*dataset, childBegin, childCount, s, oldFromNew); // We can set the first index of the right child. The first index of the // left child is already set. const size_t rightChildIndex = leftChildIndex + ((size_t) 1 << d); childBegins[rightChildIndex] = firstRight; // Now we have to recurse, if this was not the last dimension. if (d != 0) { if (firstRight > childBegin) { stack.push(std::tuple(d - 1, childBegin, firstRight - childBegin, leftChildIndex)); } else { // Set beginning indices correctly for all children below this level. for (size_t c = leftChildIndex + 1; c < rightChildIndex; ++c) childBegins[c] = childBegins[leftChildIndex]; } if (firstRight < childBegin + childCount) { stack.push(std::tuple(d - 1, firstRight, childCount - (firstRight - childBegin), rightChildIndex)); } else { // Set beginning indices correctly for all children below this level. for (size_t c = rightChildIndex + 1; c < rightChildIndex + (rightChildIndex - leftChildIndex); ++c) childBegins[c] = childBegins[rightChildIndex]; } } } // Now that the dataset is reordered, we can create the children. arma::vec childCenter(center.n_elem); const double childWidth = width / 2.0; for (size_t i = 0; i < childBegins.n_elem - 1; ++i) { // If the child has no points, don't create it. if (childBegins[i + 1] - childBegins[i] == 0) continue; // Create the correct center. for (size_t d = 0; d < center.n_elem; ++d) { // Is the dimension "right" (1) or "left" (0)? if (((i >> d) & 1) == 0) childCenter[d] = center[d] - childWidth; else childCenter[d] = center[d] + childWidth; } children.push_back(new Octree(this, childBegins[i], childBegins[i + 1] - childBegins[i], oldFromNew, childCenter, childWidth, maxLeafSize)); } } } // namespace tree } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/core/tree/octree/single_tree_traverser.hpp000066400000000000000000000031471315013601400253020ustar00rootroot00000000000000/** * @file single_tree_traverser.hpp * @author Ryan Curtin * * Definition of the single tree traverser for the octree. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_TREE_OCTREE_SINGLE_TREE_TRAVERSER_HPP #define MLPACK_CORE_TREE_OCTREE_SINGLE_TREE_TRAVERSER_HPP #include #include "octree.hpp" namespace mlpack { namespace tree { template template class Octree::SingleTreeTraverser { public: /** * Instantiate the traverser with the given rule set. */ SingleTreeTraverser(RuleType& rule); /** * Traverse the reference tree with the given query point. This does not * reset the number of pruned nodes. * * @param queryIndex Index of query point. * @param referenceNode Node in reference tree. */ void Traverse(const size_t queryIndex, Octree& referenceNode); //! Get the number of pruned nodes. size_t NumPrunes() const { return numPrunes; } //! Modify the number of pruned nodes. size_t& NumPrunes() { return numPrunes; } private: //! The instantiated rule. RuleType& rule; //! The number of reference nodes that have been pruned. size_t numPrunes; }; } // namespace tree } // namespace mlpack // Include implementation. #include "single_tree_traverser_impl.hpp" #endif mlpack-2.2.5/src/mlpack/core/tree/octree/single_tree_traverser_impl.hpp000066400000000000000000000042571315013601400263260ustar00rootroot00000000000000/** * @file single_tree_traverser_impl.hpp * @author Ryan Curtin * * Implementation of the single tree traverser for octrees. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_TREE_OCTREE_SINGLE_TREE_TRAVERSER_IMPL_HPP #define MLPACK_CORE_TREE_OCTREE_SINGLE_TREE_TRAVERSER_IMPL_HPP // In case it hasn't been included yet. #include "single_tree_traverser.hpp" namespace mlpack { namespace tree { template template Octree::SingleTreeTraverser:: SingleTreeTraverser(RuleType& rule) : rule(rule) { // Nothing to do. } template template void Octree::SingleTreeTraverser:: Traverse(const size_t queryIndex, Octree& referenceNode) { // If we are a leaf, run the base cases. if (referenceNode.NumChildren() == 0) { const size_t refBegin = referenceNode.Point(0); const size_t refEnd = refBegin + referenceNode.NumPoints(); for (size_t r = refBegin; r < refEnd; ++r) rule.BaseCase(queryIndex, r); } else { // Do a prioritized recursion, by scoring all candidates and then sorting // them. arma::vec scores(referenceNode.NumChildren()); for (size_t i = 0; i < scores.n_elem; ++i) scores[i] = rule.Score(queryIndex, referenceNode.Child(i)); // Sort the scores. arma::uvec sortedIndices = arma::sort_index(scores); for (size_t i = 0; i < sortedIndices.n_elem; ++i) { // If the node is pruned, all subsequent nodes in sorted order will also // be pruned. if (scores[sortedIndices[i]] == DBL_MAX) { numPrunes += (sortedIndices.n_elem - i); break; } Traverse(queryIndex, referenceNode.Child(sortedIndices[i])); } } } } // namespace tree } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/core/tree/octree/traits.hpp000066400000000000000000000035541315013601400222150ustar00rootroot00000000000000/** * @file traits.hpp * @author Ryan Curtin * * Specialization of the TreeTraits class for the Octree class. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_TREE_OCTREE_TRAITS_HPP #define MLPACK_CORE_TREE_OCTREE_TRAITS_HPP #include namespace mlpack { namespace tree { /** * This is a specialization of the TreeTraits class to the Octree tree type. It * defines characteristics of the octree, and is used to help write * tree-independent (but still optimized) tree-based algorithms. See * mlpack/core/tree/tree_traits.hpp for more information. */ template class TreeTraits> { public: /** * No octree nodes will overlap. */ static const bool HasOverlappingChildren = false; /** * Points are not shared across nodes in the octree. */ static const bool HasDuplicatedPoints = false; /** * There is no guarantee that the first point in a node is its centroid. */ static const bool FirstPointIsCentroid = false; /** * Points are not contained at multiple levels of the octree. */ static const bool HasSelfChildren = false; /** * Points are rearranged during building of the tree. */ static const bool RearrangesDataset = true; /** * This is not necessarily a binary tree. */ static const bool BinaryTree = false; /** * NumDescendants() represents the number of unique descendant points. */ static const bool UniqueNumDescendants = true; }; } // namespace tree } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/core/tree/perform_split.hpp000066400000000000000000000132231315013601400223050ustar00rootroot00000000000000/** * @file perform_split.hpp * @author Mikhail Lozhnikov * * This file contains functions that implement the default binary split * behavior. The functions perform the actual splitting. This will order * the dataset such that points that belong to the left subtree are on the left * of the split column, and points from the right subtree are on the right side * of the split column. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_TREE_PERFORM_SPLIT_HPP #define MLPACK_CORE_TREE_PERFORM_SPLIT_HPP namespace mlpack { namespace tree /** Trees and tree-building procedures. */ { namespace split { /** * This function implements the default split behavior i.e. it rearranges * points according to the split information. The SplitType::AssignToLeftNode() * function is used in order to determine the child that contains any particular * point. * * @param data The dataset used by the binary space tree. * @param begin Index of the starting point in the dataset that belongs to * this node. * @param count Number of points in this node. * @param splitInfo The information about the split. */ template size_t PerformSplit(MatType& data, const size_t begin, const size_t count, const typename SplitType::SplitInfo& splitInfo) { // This method modifies the input dataset. We loop both from the left and // right sides of the points contained in this node. size_t left = begin; size_t right = begin + count - 1; // First half-iteration of the loop is out here because the termination // condition is in the middle. while ((left <= right) && (SplitType::AssignToLeftNode(data.col(left), splitInfo))) left++; while ((!SplitType::AssignToLeftNode(data.col(right), splitInfo)) && (left <= right) && (right > 0)) right--; // Shortcut for when all points are on the right. if (left == right && right == 0) return left; while (left <= right) { // Swap columns. data.swap_cols(left, right); // See how many points on the left are correct. When they are correct, // increase the left counter accordingly. When we encounter one that isn't // correct, stop. We will switch it later. while (SplitType::AssignToLeftNode(data.col(left), splitInfo) && (left <= right)) left++; // Now see how many points on the right are correct. When they are correct, // decrease the right counter accordingly. When we encounter one that isn't // correct, stop. We will switch it with the wrong point we found in the // previous loop. while ((!SplitType::AssignToLeftNode(data.col(right), splitInfo)) && (left <= right)) right--; } Log::Assert(left == right + 1); return left; } /** * This function implements the default split behavior i.e. it rearranges * points according to the split information. The SplitType::AssignToLeftNode() * function is used in order to determine the child that contains any particular * point. The function takes care of indices and returns the list of changed * indices. * * @param data The dataset used by the binary space tree. * @param begin Index of the starting point in the dataset that belongs to * this node. * @param count Number of points in this node. * @param splitInfo The information about the split. * @param oldFromNew Vector which will be filled with the old positions for * each new point. */ template size_t PerformSplit(MatType& data, const size_t begin, const size_t count, const typename SplitType::SplitInfo& splitInfo, std::vector& oldFromNew) { // This method modifies the input dataset. We loop both from the left and // right sides of the points contained in this node. size_t left = begin; size_t right = begin + count - 1; // First half-iteration of the loop is out here because the termination // condition is in the middle. while ((left <= right) && (SplitType::AssignToLeftNode(data.col(left), splitInfo))) left++; while ((!SplitType::AssignToLeftNode(data.col(right), splitInfo)) && (left <= right) && (right > 0)) right--; // Shortcut for when all points are on the right. if (left == right && right == 0) return left; while (left <= right) { // Swap columns. data.swap_cols(left, right); // Update the indices for what we changed. size_t t = oldFromNew[left]; oldFromNew[left] = oldFromNew[right]; oldFromNew[right] = t; // See how many points on the left are correct. When they are correct, // increase the left counter accordingly. When we encounter one that isn't // correct, stop. We will switch it later. while (SplitType::AssignToLeftNode(data.col(left), splitInfo) && (left <= right)) left++; // Now see how many points on the right are correct. When they are correct, // decrease the right counter accordingly. When we encounter one that isn't // correct, stop. We will switch it with the wrong point we found in the // previous loop. while ((!SplitType::AssignToLeftNode(data.col(right), splitInfo)) && (left <= right)) right--; } Log::Assert(left == right + 1); return left; } } // namespace split } // namespace tree } // namespace mlpack #endif // MLPACK_CORE_TREE_BINARY_SPACE_TREE_PERFORM_SPLIT_HPP mlpack-2.2.5/src/mlpack/core/tree/rectangle_tree.hpp000066400000000000000000000040721315013601400224050ustar00rootroot00000000000000/** * @file rectangle_tree.hpp * @author Andrew Wells * * Include all the necessary files to use the Rectangle Type Trees (RTree, * RStarTree, XTree, and HilbertRTree). * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_TREE_RECTANGLE_TREE_HPP #define MLPACK_CORE_TREE_RECTANGLE_TREE_HPP /* We include bounds.hpp since it gives us the necessary files. * However, we will not use the "ballbounds" option. */ #include "bounds.hpp" #include "rectangle_tree/rectangle_tree.hpp" #include "rectangle_tree/single_tree_traverser.hpp" #include "rectangle_tree/single_tree_traverser_impl.hpp" #include "rectangle_tree/dual_tree_traverser.hpp" #include "rectangle_tree/dual_tree_traverser_impl.hpp" #include "rectangle_tree/r_tree_split.hpp" #include "rectangle_tree/r_star_tree_split.hpp" #include "rectangle_tree/no_auxiliary_information.hpp" #include "rectangle_tree/r_tree_descent_heuristic.hpp" #include "rectangle_tree/r_star_tree_descent_heuristic.hpp" #include "rectangle_tree/x_tree_split.hpp" #include "rectangle_tree/x_tree_auxiliary_information.hpp" #include "rectangle_tree/hilbert_r_tree_descent_heuristic.hpp" #include "rectangle_tree/hilbert_r_tree_split.hpp" #include "rectangle_tree/hilbert_r_tree_auxiliary_information.hpp" #include "rectangle_tree/discrete_hilbert_value.hpp" #include "rectangle_tree/r_plus_tree_descent_heuristic.hpp" #include "rectangle_tree/r_plus_tree_split_policy.hpp" #include "rectangle_tree/minimal_coverage_sweep.hpp" #include "rectangle_tree/minimal_splits_number_sweep.hpp" #include "rectangle_tree/r_plus_tree_split.hpp" #include "rectangle_tree/r_plus_plus_tree_auxiliary_information.hpp" #include "rectangle_tree/r_plus_plus_tree_descent_heuristic.hpp" #include "rectangle_tree/r_plus_plus_tree_split_policy.hpp" #include "rectangle_tree/traits.hpp" #include "rectangle_tree/typedef.hpp" #endif mlpack-2.2.5/src/mlpack/core/tree/rectangle_tree/000077500000000000000000000000001315013601400216715ustar00rootroot00000000000000mlpack-2.2.5/src/mlpack/core/tree/rectangle_tree/discrete_hilbert_value.hpp000066400000000000000000000245531315013601400271220ustar00rootroot00000000000000/** * @file discrete_hilbert_value.hpp * @author Mikhail Lozhnikov * * Definition of the DiscreteHilbertValue class, a class that calculates * the ordering of points using the Hilbert curve. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_TREE_RECTANGLE_TREE_DISCRETE_HILBERT_VALUE_HPP #define MLPACK_CORE_TREE_RECTANGLE_TREE_DISCRETE_HILBERT_VALUE_HPP #include namespace mlpack { namespace tree /** Trees and tree-building procedures. */ { /** * The DiscreteHilbertValue class stores Hilbert values for all of the points in * a RectangleTree node, and calculates Hilbert values for new points. This * implementation calculates the full discrete Hilbert value; for a * d-dimensional vector filled with elements of size E, each Hilbert value will * take dE space. */ template class DiscreteHilbertValue { public: //! Depending on the precision of the tree element type, we may need to use //! uint32_t or uint64_t. typedef typename std::conditional::type HilbertElemType; //! Default constructor. DiscreteHilbertValue(); /** * Construct this for the node tree. If the node is the root this method * computes the Hilbert value for each point in the tree's dataset. * * @param node The node that stores this Hilbert value. */ template DiscreteHilbertValue(const TreeType* tree); /** * Create a Hilbert value object by copying from another one. * * @param other The object from which the value will be copied. * @param tree The node that holds the Hilbert value. * @param deepCopy If false, the dataset will not be copied. */ template DiscreteHilbertValue(const DiscreteHilbertValue& other, TreeType* tree, bool deepCopy); /** * Create a Hilbert value object by moving another one. * * @param other The Hilbert value object from which the value will be moved. */ DiscreteHilbertValue(DiscreteHilbertValue&& other); //! Free memory ~DiscreteHilbertValue(); /** * Compare two points. It returns 1 if the first point is greater than the * second one, -1 if the first point is less than the second one and 0 if the * Hilbert values of the points are equal. In order to do it this method * computes the Hilbert values of the points. * * @param pt1 The first point. * @param pt2 The second point. */ template static int ComparePoints(const VecType1& pt1, const VecType2& pt2, typename boost::enable_if>* = 0, typename boost::enable_if>* = 0); /** * Compare two Hilbert values. It returns 1 if the first value is greater than * the second one, -1 if the first value is less than the second one and 0 if * the values are equal. This method does not compute the Hilbert values. * * @param val1 The first point. * @param val2 The second point. */ static int CompareValues(const DiscreteHilbertValue& val1, const DiscreteHilbertValue& val2); /** * Compare the largest Hilbert value of the node with the val value. It * returns 1 if the value of the node is greater than val, -1 if the value of * the node is less than val and 0 if the values are equal. This method does * not compute the Hilbert values. * * @param val The Hilbert value to compare with. */ int CompareWith(const DiscreteHilbertValue& val) const; /** * Compare the largest Hilbert value of the node with the Hilbert value of the * point. It returns 1 if the value of the node is greater than the value of * the point, -1 if the value of the node is less than the value of the point * and 0 if the values are equal. This method computes the Hilbert value of * the point. * * @param pt The point to compare with. */ template int CompareWith(const VecType& pt, typename boost::enable_if>* = 0) const; /** * Compare the Hilbert value of the cached point with the Hilbert value of the * given point. It returns 1 if the value of the node is greater than the * value of the point, -1 if the value of the node is less than the value of * the point and 0 if the values are equal. This method computes the Hilbert * value of the point. * * @param pt The point to compare with. */ template int CompareWithCachedPoint( const VecType& pt, typename boost::enable_if>* = 0) const; /** * Update the largest Hilbert value of the node and insert the point in the * local dataset if the node is a leaf. * * @param node The node in which the point is being inserted. * @param point The number of the point being inserted. */ template size_t InsertPoint(TreeType *node, const VecType& pt, typename boost::enable_if>* = 0); /** * Update the largest Hilbert value of the node. * * @param node The node being inserted. */ template void InsertNode(TreeType* node); /** * Update the largest Hilbert value of the node and delete the point from the * local dataset. * * @param node The node from which the point is being deleted. * @param localIndex The index of the point in the local dataset. */ template void DeletePoint(TreeType* node, const size_t localIndex); /** * Update the largest Hilbert value of the node. * * @param node The node from which another node is being deleted. * @param nodeIndex The index of the node being deleted. */ template void RemoveNode(TreeType* node, const size_t nodeIndex); /** * Copy the local Hilbert value's pointer. * * @param val The DiscreteHilbertValue object from which the dataset * will be copied. */ DiscreteHilbertValue& operator=(const DiscreteHilbertValue& val); /** * Nullify the localHilbertValues pointer in order to prevent an invalid free. */ void NullifyData(); /** * Update the largest Hilbert value and the local Hilbert values of an * intermediate node. The children of the node (or the points that the node * contains) should be arranged according to their Hilbert values. * * @param node The node in which the information should be updated. */ template void UpdateLargestValue(TreeType* node); /** * This method updates the largest Hilbert value of a leaf node and * redistributes the Hilbert values of points according to their new position * after the split algorithm. * * @param parent The parent of the node that was split. * @param firstSibling The first cooperating sibling. * @param lastSibling The last cooperating sibling. */ template void RedistributeHilbertValues(TreeType* parent, const size_t firstSibling, const size_t lastSibling); /** * Calculate the Hilbert value of the point pt. * * @param pt The point for which the Hilbert value should be calculated. */ template static arma::Col CalculateValue( const VecType& pt, typename boost::enable_if>* = 0); /** * Compare two Hilbert values. It returns 1 if the first value is greater than * the second one, -1 if the first value is less than the second one and 0 if * the values are equal. This method does not compute the Hilbert values. * * @param value1 The first value. * @param value2 The second value. */ static int CompareValues(const arma::Col& value1, const arma::Col& value2); //! Return the number of values. size_t NumValues() const { return numValues; } //! Modify the number of values. size_t& NumValues() { return numValues; } //! Return the Hilbert values. const arma::Mat* LocalHilbertValues() const { return localHilbertValues; } //! Modify the Hilbert values. arma::Mat*& LocalHilbertValues() { return localHilbertValues; } //! Return the ownsLocalHilbertValues variable. bool OwnsLocalHilbertValues() const { return ownsLocalHilbertValues; } //! Modify the ownsLocalHilbertValues variable. bool& OwnsLocalHilbertValues() { return ownsLocalHilbertValues; } //! Return the cached point (valueToInsert). const arma::Col* ValueToInsert() const { return valueToInsert; } //! Modify the cached point (valueToInsert). arma::Col* ValueToInsert() { return valueToInsert; } //! Return the ownsValueToInsert variable. bool OwnsValueToInsert() const { return ownsValueToInsert; } //! Modify the ownsValueToInsert variable. bool& OwnsValueToInsert() { return ownsValueToInsert; } private: //! The number of bits that we can store. static constexpr size_t order = sizeof(HilbertElemType) * CHAR_BIT; //! The local Hilbert values. arma::Mat* localHilbertValues; //! Indicates that the node owns the localHilbertValues variable. bool ownsLocalHilbertValues; //! The number of values in the localHilbertValues dataset. size_t numValues; /** The Hilbert value of the point that is being inserted. * The pointer is the same in all nodes. The value is updated in InsertPoint() * if it is invoked at the root level. This variable helps to avoid * multiple computation of the Hilbert value of a point in the insertion * process. */ arma::Col* valueToInsert; //! Indicates that the node owns the valueToInsert. bool ownsValueToInsert; public: template void Serialize(Archive& ar, const unsigned int /* version */); }; } // namespace tree } // namespace mlpack // Include implementation. #include "discrete_hilbert_value_impl.hpp" #endif // MLPACK_CORE_TREE_RECTANGLE_TREE_DISCRETE_HILBERT_VALUE_HPP mlpack-2.2.5/src/mlpack/core/tree/rectangle_tree/discrete_hilbert_value_impl.hpp000066400000000000000000000361641315013601400301440ustar00rootroot00000000000000/** * @file discrete_hilbert_value.hpp * @author Mikhail Lozhnikov * * Definition of the DiscreteHilbertValue class, a class that calculates * the ordering of points using the Hilbert curve. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_TREE_RECTANGLE_TREE_DISCRETE_HILBERT_VALUE_IMPL_HPP #define MLPACK_CORE_TREE_RECTANGLE_TREE_DISCRETE_HILBERT_VALUE_IMPL_HPP #include "discrete_hilbert_value.hpp" namespace mlpack { namespace tree /** Trees and tree-building procedures. */ { template DiscreteHilbertValue::DiscreteHilbertValue() : localHilbertValues(NULL), ownsLocalHilbertValues(false), numValues(0), valueToInsert(NULL), ownsValueToInsert(false) { } template DiscreteHilbertValue::~DiscreteHilbertValue() { if (ownsLocalHilbertValues) delete localHilbertValues; if (ownsValueToInsert) delete valueToInsert; } template template DiscreteHilbertValue::DiscreteHilbertValue(const TreeType* tree) : localHilbertValues(NULL), ownsLocalHilbertValues(false), numValues(0), valueToInsert(tree->Parent() ? tree->Parent()->AuxiliaryInfo().HilbertValue().ValueToInsert() : new arma::Col(tree->Dataset().n_rows)), ownsValueToInsert(tree->Parent() ? false : true) { // Calculate the Hilbert value for all points. if (!tree->Parent()) // This is the root node. ownsLocalHilbertValues = true; else if (tree->Parent()->Child(0).IsLeaf()) { // This is a leaf node. assert(tree->Parent()->NumChildren() > 0); ownsLocalHilbertValues = true; } if (ownsLocalHilbertValues) { localHilbertValues = new arma::Mat(tree->Dataset().n_rows, tree->MaxLeafSize() + 1); } } template template DiscreteHilbertValue:: DiscreteHilbertValue(const DiscreteHilbertValue& other, TreeType* tree, bool deepCopy) : localHilbertValues(NULL), ownsLocalHilbertValues(other.ownsLocalHilbertValues), numValues(other.NumValues()), valueToInsert(NULL), ownsValueToInsert(other.ownsValueToInsert) { if (deepCopy) { // Only leaf nodes own the localHilbertValues dataset. // Intermediate nodes store the pointer to the corresponding dataset. if (ownsLocalHilbertValues) localHilbertValues = new arma::Mat( *other.LocalHilbertValues()); else localHilbertValues = NULL; // Only the root owns ownsValueToInsert. Other nodes the pointer. if (ownsValueToInsert) valueToInsert = new arma::Col( *other.ValueToInsert()); else { assert(tree->Parent() != NULL); // Copy the pointer from the parent node. valueToInsert = const_cast*> (tree->Parent()->AuxiliaryInfo().HilbertValue().ValueToInsert()); } if (tree->NumChildren() == 0) { // We have to update pointers to the localHilbertValues dataset in // intermediate nodes. TreeType* node = tree; while (node->Parent() != NULL) { if (node->Parent()->NumChildren() > 1) { const std::vector parentChildren = node->AuxiliaryInfo().Children(node->Parent()); // If node is not the last child of its parent, we shouldn't copy // the localHilbertValues pointer. if (parentChildren[node->Parent()->NumChildren() - 2] == NULL) break; } node->Parent()->AuxiliaryInfo().HilbertValue().LocalHilbertValues() = localHilbertValues; node = node->Parent(); } } } else { localHilbertValues = const_cast*> (other.LocalHilbertValues()); valueToInsert = const_cast*> (other.ValueToInsert()); } } template DiscreteHilbertValue:: DiscreteHilbertValue(DiscreteHilbertValue&& other) : localHilbertValues(other.localHilbertValues), ownsLocalHilbertValues(other.ownsLocalHilbertValues), numValues(other.numValues), valueToInsert(other.valueToInsert), ownsValueToInsert(other.ownsValueToInsert) { other.localHilbertValues = NULL; other.ownsLocalHilbertValues = false; other.numValues = 0; other.valueToInsert = NULL; other.ownsValueToInsert = false; } template template arma::Col::HilbertElemType> DiscreteHilbertValue:: CalculateValue(const VecType& pt,typename boost::enable_if>*) { typedef typename VecType::elem_type VecElemType; arma::Col res(pt.n_rows); // Calculate the number of bits for the exponent. const int numExpBits = std::ceil(std::log2( std::numeric_limits::max_exponent - std::numeric_limits::min_exponent + 1.0)); // Calculate the number of bits for the mantissa. const int numMantBits = order - numExpBits - 1; for (size_t i = 0; i < pt.n_rows; i++) { int e; VecElemType normalizedVal = std::frexp(pt(i),&e); bool sgn = std::signbit(normalizedVal); if (pt(i) == 0) e = std::numeric_limits::min_exponent; if (sgn) normalizedVal = -normalizedVal; if (e < std::numeric_limits::min_exponent) { HilbertElemType tmp = (HilbertElemType) 1 << (std::numeric_limits::min_exponent - e); e = std::numeric_limits::min_exponent; normalizedVal /= tmp; } // Extract the mantissa. HilbertElemType tmp = (HilbertElemType) 1 << numMantBits; res(i) = std::floor(normalizedVal * tmp); // Add the exponent. assert(res(i) < ((HilbertElemType) 1 << numMantBits)); res(i) |= ((HilbertElemType) (e - std::numeric_limits::min_exponent)) << numMantBits; assert(res(i) < ((HilbertElemType) 1 << (order - 1)) - 1); // Negative values should be inverted. if (sgn) { res(i) = ((HilbertElemType) 1 << (order - 1)) - 1 - res(i); assert((res(i) >> (order - 1)) == 0); } else { res(i) |= (HilbertElemType) 1 << (order - 1); assert((res(i) >> (order - 1)) == 1); } } HilbertElemType M = (HilbertElemType) 1 << (order - 1); // Since the Hilbert curve is continuous we should permutate and intend // coordinate axes depending on the position of the point. for (HilbertElemType Q = M; Q > 1; Q >>= 1) { HilbertElemType P = Q - 1; for (size_t i = 0; i < pt.n_rows; i++) { if (res(i) & Q) // Invert. res(0) ^= P; else // Permutate. { HilbertElemType t = (res(0) ^ res(i)) & P; res(0) ^= t; res(i) ^= t; } } } // Gray encode. for (size_t i = 1; i < pt.n_rows; i++) res(i) ^= res(i - 1); HilbertElemType t = 0; // Some coordinate axes should be inverted. for (HilbertElemType Q = M; Q > 1; Q >>= 1) if (res(pt.n_rows - 1) & Q) t ^= Q - 1; for (size_t i = 0; i < pt.n_rows; i++) res(i) ^= t; // We should rearrange bits in order to compare two Hilbert values faster. arma::Col rearrangedResult(pt.n_rows, arma::fill::zeros); for (size_t i = 0; i < order; i++) for (size_t j = 0; j < pt.n_rows; j++) { size_t bit = (i * pt.n_rows + j) % order; size_t row = (i * pt.n_rows + j) / order; rearrangedResult(row) |= (((res(j) >> (order - 1 - i)) & 1) << (order - 1 - bit)); } return rearrangedResult; } template int DiscreteHilbertValue:: CompareValues(const arma::Col& value1, const arma::Col& value2) { for (size_t i = 0; i < value1.n_rows; i++) { if (value1(i) > value2(i)) return 1; else if (value1(i) < value2(i)) return -1; } return 0; } template template int DiscreteHilbertValue:: ComparePoints(const VecType1& pt1, const VecType2& pt2, typename boost::enable_if>*, typename boost::enable_if>*) { arma::Col val1 = CalculateValue(pt1); arma::Col val2 = CalculateValue(pt2); return CompareValues(val1, val2); } template int DiscreteHilbertValue:: CompareValues(const DiscreteHilbertValue& val1, const DiscreteHilbertValue& val2) { if (val1.NumValues() > 0 && val2.NumValues() == 0) return 1; else if (val1.NumValues() == 0 && val2.NumValues() > 0) return -1; else if (val1.NumValues() == 0 && val2.NumValues() == 0) return 0; return CompareValues(val1.LocalHilbertValues()->col(val1.NumValues() - 1), val2.LocalHilbertValues()->col(val2.NumValues() - 1)); } template int DiscreteHilbertValue:: CompareWith(const DiscreteHilbertValue& val) const { return CompareValues(*this, val); } template template int DiscreteHilbertValue:: CompareWith(const VecType& pt, typename boost::enable_if>*) const { arma::Col val = CalculateValue(pt); if (numValues == 0) return -1; return CompareValues(localHilbertValues->col(numValues - 1),val); } template template int DiscreteHilbertValue:: CompareWithCachedPoint(const VecType& , typename boost::enable_if>*) const { if (numValues == 0) return -1; return CompareValues(localHilbertValues->col(numValues - 1), *valueToInsert); } template template size_t DiscreteHilbertValue:: InsertPoint(TreeType *node, const VecType& pt, typename boost::enable_if>*) { size_t i = 0; // All points are inserted to the root node. if (!node->Parent()) *valueToInsert = CalculateValue(pt); if (node->IsLeaf()) { // Find an appropriate place. for (i = 0; i < numValues; i++) if (CompareValues(localHilbertValues->col(i), *valueToInsert) > 0) break; for (size_t j = numValues; j > i; j--) localHilbertValues->col(j) = localHilbertValues->col(j-1); localHilbertValues->col(i) = *valueToInsert; numValues++; // Propagate changes of the largest Hilbert value downward. TreeType* root = node->Parent(); while (root != NULL) { root->AuxiliaryInfo().HilbertValue().UpdateLargestValue(root); root = root->Parent(); } } return i; } template template void DiscreteHilbertValue::InsertNode(TreeType* node) { DiscreteHilbertValue &val = node->AuxiliaryInfo().HilbertValue(); if (CompareWith(node,val) < 0) { localHilbertValues = val.LocalHilbertValues(); numValues = val.NumValues(); } } template template void DiscreteHilbertValue:: DeletePoint(TreeType* /* node */, const size_t localIndex) { // Delete the Hilbert value from the local dataset for (size_t i = numValues - 1; i > localIndex; i--) localHilbertValues->col(i - 1) = localHilbertValues->col(i); numValues--; } template template void DiscreteHilbertValue:: RemoveNode(TreeType* node, const size_t nodeIndex) { if (node->NumChildren() <= 1) { localHilbertValues = NULL; numValues = 0; return; } if (nodeIndex + 1 == node->NumChildren()) { // Update the largest Hilbert value if the value exists TreeType& child = node->Child(nodeIndex - 1); if (child.AuxiliaryInfo.HilbertValue().NumValues() != 0) { numValues = child.AuxiliaryInfo.HilbertValue().NumValues(); localHilbertValues = child.AuxiliaryInfo.HilbertValue().LocalHilbertValues(); } else { localHilbertValues = NULL; numValues = 0; } } } template DiscreteHilbertValue& DiscreteHilbertValue:: operator=(const DiscreteHilbertValue& val) { localHilbertValues = const_cast* > (val.LocalHilbertValues()); ownsLocalHilbertValues = false; numValues = val.NumValues(); return *this; } template void DiscreteHilbertValue::NullifyData() { ownsLocalHilbertValues = false; } template template void DiscreteHilbertValue::UpdateLargestValue(TreeType* node) { if (!node->IsLeaf()) { // Update the largest Hilbert value localHilbertValues = node->Child(node->NumChildren() - 1).AuxiliaryInfo().HilbertValue().LocalHilbertValues(); numValues = node->Child(node->NumChildren() - 1).AuxiliaryInfo().HilbertValue().NumValues(); } } template template void DiscreteHilbertValue::RedistributeHilbertValues( TreeType* parent, const size_t firstSibling, const size_t lastSibling) { // We need to update the local dataset if points were redistributed. size_t numPoints = 0; for (size_t i = firstSibling; i <= lastSibling; i++) numPoints += parent->Child(i).NumPoints(); // Copy the local Hilbert values. arma::Mat tmp(localHilbertValues->n_rows, numPoints); size_t iPoint = 0; for (size_t i = firstSibling; i<= lastSibling; i++) { DiscreteHilbertValue &value = parent->Child(i).AuxiliaryInfo().HilbertValue(); for (size_t j = 0; j < value.NumValues(); j++) { tmp.col(iPoint) = value.LocalHilbertValues()->col(j); iPoint++; } } assert(iPoint == numPoints); iPoint = 0; // Redistribute the Hilbert values. for (size_t i = firstSibling; i <= lastSibling; i++) { DiscreteHilbertValue &value = parent->Child(i).AuxiliaryInfo().HilbertValue(); for (size_t j = 0; j < parent->Child(i).NumPoints(); j++) { value.LocalHilbertValues()->col(j) = tmp.col(iPoint); iPoint++; } value.NumValues() = parent->Child(i).NumPoints(); } assert(iPoint == numPoints); } template template void DiscreteHilbertValue:: Serialize(Archive& ar, const unsigned int /* version */) { using data::CreateNVP; ar & CreateNVP(localHilbertValues, "localHilbertValues"); ar & CreateNVP(ownsLocalHilbertValues, "ownsLocalHilbertValues"); ar & CreateNVP(numValues, "numValues"); ar & CreateNVP(valueToInsert, "valueToInsert"); ar & CreateNVP(ownsValueToInsert, "ownsValueToInsert"); } } // namespace tree } // namespace mlpack #endif // MLPACK_CORE_TREE_RECTANGLE_TREE_DISCRETE_HILBERT_VALUE_IMPL_HPP mlpack-2.2.5/src/mlpack/core/tree/rectangle_tree/dual_tree_traverser.hpp000066400000000000000000000066171315013601400264550ustar00rootroot00000000000000/** * @file dual_tree_traverser.hpp * @author Andrew Wells * * A nested class of Rectangle Tree for traversing rectangle type trees * with a given set of rules which indicate the branches to prune and the * order in which to recurse. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_TREE_RECTANGLE_TREE_DUAL_TREE_TRAVERSER_HPP #define MLPACK_CORE_TREE_RECTANGLE_TREE_DUAL_TREE_TRAVERSER_HPP #include #include "rectangle_tree.hpp" namespace mlpack { namespace tree { template class AuxiliaryInformationType> template class RectangleTree::DualTreeTraverser { public: /** * Instantiate the dual-tree traverser with the given rule set. */ DualTreeTraverser(RuleType& rule); /** * Traverse the two trees. This does not reset the number of prunes. * * @param queryNode The query node to be traversed. * @param referenceNode The reference node to be traversed. * @param score The score of the current node combination. */ void Traverse(RectangleTree& queryNode, RectangleTree& referenceNode); //! Get the number of prunes. size_t NumPrunes() const { return numPrunes; } //! Modify the number of prunes. size_t& NumPrunes() { return numPrunes; } //! Get the number of visited combinations. size_t NumVisited() const { return numVisited; } //! Modify the number of visited combinations. size_t& NumVisited() { return numVisited; } //! Get the number of times a node combination was scored. size_t NumScores() const { return numScores; } //! Modify the number of times a node combination was scored. size_t& NumScores() { return numScores; } //! Get the number of times a base case was calculated. size_t NumBaseCases() const { return numBaseCases; } //! Modify the number of times a base case was calculated. size_t& NumBaseCases() { return numBaseCases; } private: // We use this struct and this function to make the sorting and scoring easy // and efficient: struct NodeAndScore { RectangleTree* node; double score; typename RuleType::TraversalInfoType travInfo; }; static bool nodeComparator(const NodeAndScore& obj1, const NodeAndScore& obj2) { return obj1.score < obj2.score; } //! Reference to the rules with which the trees will be traversed. RuleType& rule; //! The number of prunes. size_t numPrunes; //! The number of node combinations that have been visited during traversal. size_t numVisited; //! The number of times a node combination was scored. size_t numScores; //! The number of times a base case was calculated. size_t numBaseCases; //! Traversal information, held in the class so that it isn't continually //! being reallocated. typename RuleType::TraversalInfoType traversalInfo; }; } // namespace tree } // namespace mlpack // Include implementation. #include "dual_tree_traverser_impl.hpp" #endif mlpack-2.2.5/src/mlpack/core/tree/rectangle_tree/dual_tree_traverser_impl.hpp000066400000000000000000000135671315013601400275000ustar00rootroot00000000000000/** * @file dual_tree_traverser_impl.hpp * @author Andrew Wells * * A class for traversing rectangle type trees with a given set of rules * which indicate the branches to prune and the order in which to recurse. * This is a depth-first traverser. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPAC_CORE_TREE_RECTANGLE_TREE_DUAL_TREE_TRAVERSER_IMPL_HPP #define MLPAC_CORE_TREE_RECTANGLE_TREE_DUAL_TREE_TRAVERSER_IMPL_HPP #include "dual_tree_traverser.hpp" #include #include namespace mlpack { namespace tree { template class AuxiliaryInformationType> template RectangleTree:: DualTreeTraverser::DualTreeTraverser(RuleType& rule) : rule(rule), numPrunes(0), numVisited(0), numScores(0), numBaseCases(0) { /* Nothing to do */ } template class AuxiliaryInformationType> template void RectangleTree:: DualTreeTraverser::Traverse(RectangleTree& queryNode, RectangleTree& referenceNode) { // Increment the visit counter. ++numVisited; // Store the current traversal info. traversalInfo = rule.TraversalInfo(); // We now have four options. // 1) Both nodes are leaf nodes. // 2) Only the reference node is a leaf node. // 3) Only the query node is a leaf node. // 4) Niether node is a leaf node. // We go through those options in that order. if (queryNode.IsLeaf() && referenceNode.IsLeaf()) { // Evaluate the base case. Do the query points on the outside so we can // possibly prune the reference node for that particular point. for (size_t query = 0; query < queryNode.Count(); ++query) { // Restore the traversal information. rule.TraversalInfo() = traversalInfo; const double childScore = rule.Score(queryNode.Point(query), referenceNode); if (childScore == DBL_MAX) continue; // We don't require a search in this reference node. for(size_t ref = 0; ref < referenceNode.Count(); ++ref) rule.BaseCase(queryNode.Point(query), referenceNode.Point(ref)); numBaseCases += referenceNode.Count(); } } else if (!queryNode.IsLeaf() && referenceNode.IsLeaf()) { // We only need to traverse down the query node. Order doesn't matter here. for (size_t i = 0; i < queryNode.NumChildren(); ++i) { // Before recursing, we have to set the traversal information correctly. rule.TraversalInfo() = traversalInfo; ++numScores; if (rule.Score(queryNode.Child(i), referenceNode) < DBL_MAX) Traverse(queryNode.Child(i), referenceNode); else numPrunes++; } } else if (queryNode.IsLeaf() && !referenceNode.IsLeaf()) { // We only need to traverse down the reference node. Order does matter // here. // We sort the children of the reference node by their scores. std::vector nodesAndScores(referenceNode.NumChildren()); for (size_t i = 0; i < referenceNode.NumChildren(); i++) { rule.TraversalInfo() = traversalInfo; nodesAndScores[i].node = &(referenceNode.Child(i)); nodesAndScores[i].score = rule.Score(queryNode, *(nodesAndScores[i].node)); nodesAndScores[i].travInfo = rule.TraversalInfo(); } std::sort(nodesAndScores.begin(), nodesAndScores.end(), nodeComparator); numScores += nodesAndScores.size(); for (size_t i = 0; i < nodesAndScores.size(); i++) { rule.TraversalInfo() = nodesAndScores[i].travInfo; if (rule.Rescore(queryNode, *(nodesAndScores[i].node), nodesAndScores[i].score) < DBL_MAX) { Traverse(queryNode, *(nodesAndScores[i].node)); } else { numPrunes += nodesAndScores.size() - i; break; } } } else { // We need to traverse down both the reference and the query trees. // We loop through all of the query nodes, and for each of them, we // loop through the reference nodes to see where we need to descend. for (size_t j = 0; j < queryNode.NumChildren(); j++) { // We sort the children of the reference node by their scores. std::vector nodesAndScores(referenceNode.NumChildren()); for (size_t i = 0; i < referenceNode.NumChildren(); i++) { rule.TraversalInfo() = traversalInfo; nodesAndScores[i].node = &(referenceNode.Child(i)); nodesAndScores[i].score = rule.Score(queryNode.Child(j), *nodesAndScores[i].node); nodesAndScores[i].travInfo = rule.TraversalInfo(); } std::sort(nodesAndScores.begin(), nodesAndScores.end(), nodeComparator); numScores += nodesAndScores.size(); for (size_t i = 0; i < nodesAndScores.size(); i++) { rule.TraversalInfo() = nodesAndScores[i].travInfo; if (rule.Rescore(queryNode.Child(j), *(nodesAndScores[i].node), nodesAndScores[i].score) < DBL_MAX) { Traverse(queryNode.Child(j), *(nodesAndScores[i].node)); } else { numPrunes += nodesAndScores.size() - i; break; } } } } } } // namespace tree } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/core/tree/rectangle_tree/hilbert_r_tree_auxiliary_information.hpp000066400000000000000000000123731315013601400320750ustar00rootroot00000000000000/** * @file hilbert_r_tree_auxiliary_information.hpp * @author Mikhail Lozhnikov * * Definition of the HilbertRTreeAuxiliaryInformation class, * a class that provides some Hilbert r-tree specific information * about the nodes. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_TREE_RECTANGLE_TREE_HILBERT_R_TREE_AUXILIARY_INFORMATION_HPP #define MLPACK_CORE_TREE_RECTANGLE_TREE_HILBERT_R_TREE_AUXILIARY_INFORMATION_HPP namespace mlpack { namespace tree { template class HilbertValueType> class HilbertRTreeAuxiliaryInformation { public: //! The element type held by the tree. typedef typename TreeType::ElemType ElemType; //! Default constructor HilbertRTreeAuxiliaryInformation(); /** * Construct this as an auxiliary information for the given node. * * @param node The node that stores this auxiliary information. */ HilbertRTreeAuxiliaryInformation(const TreeType* node); /** * Create an auxiliary information object by copying from another object. * * @param other Another auxiliary information object from which the * information will be copied. * @param tree The node that holds the auxiliary information. * @param deepCopy If false, the new object uses the same memory * (not used here). */ HilbertRTreeAuxiliaryInformation( const HilbertRTreeAuxiliaryInformation& other, TreeType* tree = NULL, bool deepCopy = true); /** * Create an auxiliary information object by moving from the other node. * * @param other The object from which the information will be moved. */ HilbertRTreeAuxiliaryInformation(HilbertRTreeAuxiliaryInformation&& other); /** * Copy the auxiliary information. * * @param other The object from which the information will be moved. */ HilbertRTreeAuxiliaryInformation& operator=( const HilbertRTreeAuxiliaryInformation& other); /** * The Hilbert R tree requires to insert points according to their Hilbert * value. This method should take care of it. It returns false if it does * nothing and true if it handles the insertion process. * * @param node The node in which the point is being inserted. * @param point The number of the point being inserted. */ bool HandlePointInsertion(TreeType* node, const size_t point); /** * The Hilbert R tree requires to insert nodes according to their Hilbert * value. This method should take care of it. It returns false if it does * nothing and true if it handles the insertion process. * * @param node The node in which the nodeToInsert is being inserted. * @param nodeToInsert The node being inserted. * @param insertionLevel The level of the tree at which the nodeToInsert * should be inserted. */ bool HandleNodeInsertion(TreeType* node, TreeType* nodeToInsert, bool insertionLevel); /** * The Hilbert R tree requires all points to be arranged according to their * Hilbert value. This method should take care of saving this property after * the deletion process. It returns false if it does nothing and true if it * handles the deletion process. * * @param node The node from which the point is being deleted. * @param localIndex The index of the point being deleted. */ bool HandlePointDeletion(TreeType* node, const size_t localIndex); /** * The Hilbert R tree requires all nodes to be arranged according to their * Hilbert value. This method should take care of saving this property after * the deletion process. It returns false if it does nothing and true if it * handles the deletion process. * * @param node The node from which the node is being deleted. * @param nodeIndex The index of the node being deleted. */ bool HandleNodeRemoval(TreeType* node, const size_t nodeIndex); /** * Update the auxiliary information in the node. The method returns true if * the update should be propagated downward. * * @param node The node in which the auxiliary information being update. */ bool UpdateAuxiliaryInfo(TreeType* node); //! Clear memory. void NullifyData(); //! Return the children vector of the tree. static const std::vector Children(const TreeType* tree) { return tree->children; } private: //! The largest Hilbert value of a point enclosed by the node. HilbertValueType hilbertValue; public: //! Return the largest Hilbert value of a point covered by the node. const HilbertValueType& HilbertValue() const { return hilbertValue; } //! Modify the largest Hilbert value of a point covered by the node. HilbertValueType& HilbertValue() { return hilbertValue; } /** * Serialize the information. */ template void Serialize(Archive& ar, const unsigned int /* version */); }; } // namespace tree } // namespace mlpack #include "hilbert_r_tree_auxiliary_information_impl.hpp" #endif//MLPACK_CORE_TREE_RECTANGLE_TREE_HILBERT_R_TREE_AUXILIARY_INFORMATION_HPP mlpack-2.2.5/src/mlpack/core/tree/rectangle_tree/hilbert_r_tree_auxiliary_information_impl.hpp000066400000000000000000000135601315013601400331150ustar00rootroot00000000000000/** * @file hilbert_r_tree_auxiliary_information.hpp * @author Mikhail Lozhnikov * * Implementation of the HilbertRTreeAuxiliaryInformation class, a class that * provides some Hilbert r-tree specific information about the nodes. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_TREE_RECTANGLE_TREE_HILBERT_R_TREE_AUXILIARY_INFORMATION_IMPL_HPP #define MLPACK_CORE_TREE_RECTANGLE_TREE_HILBERT_R_TREE_AUXILIARY_INFORMATION_IMPL_HPP #include "hilbert_r_tree_auxiliary_information.hpp" namespace mlpack { namespace tree { template class HilbertValueType> HilbertRTreeAuxiliaryInformation:: HilbertRTreeAuxiliaryInformation() { } template class HilbertValueType> HilbertRTreeAuxiliaryInformation:: HilbertRTreeAuxiliaryInformation(const TreeType* node) : hilbertValue(node) { } template class HilbertValueType> HilbertRTreeAuxiliaryInformation:: HilbertRTreeAuxiliaryInformation( const HilbertRTreeAuxiliaryInformation& other, TreeType* tree, bool deepCopy) : hilbertValue(other.HilbertValue(), tree, deepCopy) { } template class HilbertValueType> HilbertRTreeAuxiliaryInformation:: HilbertRTreeAuxiliaryInformation(HilbertRTreeAuxiliaryInformation&& other) : hilbertValue(std::move(other.hilbertValue)) { } template class HilbertValueType> HilbertRTreeAuxiliaryInformation& HilbertRTreeAuxiliaryInformation::operator=( const HilbertRTreeAuxiliaryInformation& other) { hilbertValue = other.hilbertValue; return *this; } template class HilbertValueType> bool HilbertRTreeAuxiliaryInformation:: HandlePointInsertion(TreeType* node, const size_t point) { if (node->IsLeaf()) { // Get the position at which the point should be inserted, and then update // the largest Hilbert value of the node. size_t pos = hilbertValue.InsertPoint(node, node->Dataset().col(point)); // Move points. for (size_t i = node->NumPoints(); i > pos; i--) node->Point(i) = node->Point(i - 1); // Insert the point. node->Point(pos) = point; node->Count()++; } else { // Calculate the Hilbert value. hilbertValue.InsertPoint(node, node->Dataset().col(point)); } return true; } template class HilbertValueType> bool HilbertRTreeAuxiliaryInformation:: HandleNodeInsertion(TreeType* node, TreeType* nodeToInsert, bool insertionLevel) { if (insertionLevel) { size_t pos; // Find the best position for the node being inserted. // The node should be inserted according to its Hilbert value. for (pos = 0; pos < node->NumChildren(); pos++) if (HilbertValueType::CompareValues( node->Child(pos).AuxiliaryInfo().HilbertValue(), nodeToInsert->AuxiliaryInfo().HilbertValue()) < 0) break; // Move nodes. for (size_t i = node->NumChildren(); i > pos; i--) node->children[i] = node->children[i - 1]; // Insert the node. node->children[pos] = nodeToInsert; nodeToInsert->Parent() = node; // Update the largest Hilbert value. hilbertValue.InsertNode(nodeToInsert); } else hilbertValue.InsertNode(nodeToInsert); // Update the largest Hilbert value. return true; } template class HilbertValueType> bool HilbertRTreeAuxiliaryInformation:: HandlePointDeletion(TreeType* node, const size_t localIndex) { // Update the largest Hilbert value. hilbertValue.DeletePoint(node,localIndex); for (size_t i = localIndex + 1; localIndex < node->NumPoints(); i++) node->Point(i - 1) = node->Point(i); node->NumPoints()--; return true; } template class HilbertValueType> bool HilbertRTreeAuxiliaryInformation:: HandleNodeRemoval(TreeType* node, const size_t nodeIndex) { // Update the largest Hilbert value. hilbertValue.RemoveNode(node,nodeIndex); for (size_t i = nodeIndex + 1; nodeIndex < node->NumChildren(); i++) node->children[i - 1] = node->children[i]; node->NumChildren()--; return true; } template class HilbertValueType> bool HilbertRTreeAuxiliaryInformation:: UpdateAuxiliaryInfo(TreeType* node) { if (node->IsLeaf()) // Should already be updated return true; TreeType& child = node->Child(node->NumChildren() - 1); if (hilbertValue.CompareWith(child.AuxiliaryInfo().HilbertValue()) < 0) { hilbertValue = child.AuxiliaryInfo().HilbertValue(); return true; } return false; } template class HilbertValueType> void HilbertRTreeAuxiliaryInformation:: NullifyData() { hilbertValue.NullifyData(); } template class HilbertValueType> template void HilbertRTreeAuxiliaryInformation:: Serialize(Archive& ar, const unsigned int /* version */) { using data::CreateNVP; ar & CreateNVP(hilbertValue, "hilbertValue"); } } // namespace tree } // namespace mlpack #endif//MLPACK_CORE_TREE_RECTANGLE_TREE_HILBERT_R_TREE_AUXILIARY_INFORMATION_IMPL_HPP mlpack-2.2.5/src/mlpack/core/tree/rectangle_tree/hilbert_r_tree_descent_heuristic.hpp000066400000000000000000000040111315013601400311530ustar00rootroot00000000000000/** * @file hilbert_r_tree_descent_heuristic.hpp * @author Mikhail Lozhnikov * * Definition of HilbertRTreeDescentHeuristic, a class that chooses the best * child of a node in an R tree when inserting a new point. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_TREE_RECTANGLE_TREE_HILBERT_R_TREE_DESCENT_HEURISTIC_HPP #define MLPACK_CORE_TREE_RECTANGLE_TREE_HILBERT_R_TREE_DESCENT_HEURISTIC_HPP #include namespace mlpack { namespace tree { /** * This class chooses the best child of a node in a Hilbert R tree when * inserting a new point. This is done, in this class, by using the Hilbert * value of the point to be inserted. */ class HilbertRTreeDescentHeuristic { public: /** * Evaluate the node using a heuristic. Returns the number of the node with * minimum largest Hilbert value that is greater than the Hilbert value of the * point being inserted. * * @param node The node that is being evaluated. * @param point The number of the point that is being inserted. */ template static size_t ChooseDescentNode(const TreeType* node, const size_t point); /** * Evaluate the node using a heuristic. Returns the number of the node with * minimum largest Hilbert value that is greater than the largest Hilbert * value of the point being inserted. * * @param node The node that is being evaluated. * @param insertedNode The node that is being inserted. */ template static size_t ChooseDescentNode(const TreeType* node, const TreeType* insertedNode); }; } // namespace tree } // namespace mlpack #include "hilbert_r_tree_descent_heuristic_impl.hpp" #endif // MLPACK_CORE_TREE_RECTANGLE_TREE_HILBERT_R_TREE_DESCENT_HEURISTIC_HPP mlpack-2.2.5/src/mlpack/core/tree/rectangle_tree/hilbert_r_tree_descent_heuristic_impl.hpp000066400000000000000000000032641315013601400322050ustar00rootroot00000000000000/** * @file hilbert_r_tree_descent_heuristic_impl.hpp * @author Mikhail Lozhnikov * * Implementation of HilbertRTreeDescentHeuristic, a class that chooses the best * child of a node in an R tree when inserting a new point. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_TREE_RECTANGLE_TREE_HILBERT_R_TREE_DESCENT_HEURISTIC_IMPL_HPP #define MLPACK_CORE_TREE_RECTANGLE_TREE_HILBERT_R_TREE_DESCENT_HEURISTIC_IMPL_HPP #include "hilbert_r_tree_descent_heuristic.hpp" namespace mlpack { namespace tree { template size_t HilbertRTreeDescentHeuristic::ChooseDescentNode( const TreeType* node, const size_t point) { size_t bestIndex = 0; for (bestIndex = 0; bestIndex < node->NumChildren() - 1; bestIndex++) if (node->Child(bestIndex).AuxiliaryInfo().HilbertValue(). CompareWithCachedPoint(node->Dataset().col(point)) > 0) break; return bestIndex; } template size_t HilbertRTreeDescentHeuristic::ChooseDescentNode( const TreeType* node, const TreeType* /* insertedNode */) { size_t bestIndex = 0; for (bestIndex = 0; bestIndex < node->NumChildren() - 1; bestIndex++) if (node->Child(bestIndex).AuxiliaryInfo().HilbertValue(). CompareWith(node, node->AuxiliaryInfo().HilbertValue()) > 0) break; return bestIndex; } } // namespace tree } // namespace mlpack #endif // MLPACK_CORE_TREE_RECTANGLE_TREE_HILBERT_R_TREE_DESCENT_HEURISTIC_IMPL_HPP mlpack-2.2.5/src/mlpack/core/tree/rectangle_tree/hilbert_r_tree_split.hpp000066400000000000000000000066401315013601400266140ustar00rootroot00000000000000/** * @file hilbert_r_tree_split.hpp * @author Mikhail Lozhnikov * * Definition of the HilbertRTreeSplit class, a class that splits the nodes of an R * tree, starting at a leaf node and moving upwards if necessary. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_TREE_RECTANGLE_TREE_HILBERT_R_TREE_SPLIT_HPP #define MLPACK_CORE_TREE_RECTANGLE_TREE_HILBERT_R_TREE_SPLIT_HPP #include namespace mlpack { namespace tree /** Trees and tree-building procedures. */ { /** * The splitting procedure for the Hilbert R tree. The template parameter * splitOrder is the order of the splitting policy. The Hilbert R tree splits a * node on overflow, turning splitOrder nodes into (splitOrder + 1) nodes. * * @tparam splitOrder Number of nodes to split. */ template class HilbertRTreeSplit { public: /** * Split a leaf node using the "default" algorithm. If necessary, this split * will propagate upwards through the tree. * * @param node The node that is being split. * @param relevels Not used. */ template static void SplitLeafNode(TreeType* tree, std::vector& relevels); /** * Split a non-leaf node using the "default" algorithm. If this is a root * node, the tree increases in depth. * * @param node The node that is being split. * @param relevels Not used. */ template static bool SplitNonLeafNode(TreeType* tree, std::vector& relevels); private: /** * Try to find splitOrder cooperating siblings in order to redistribute their * children evenly. Returns true on success. * * @param parent The parent of of the overflowing node. * @param iTree The number of the overflowing node. * @param firstSibling The first cooperating sibling. * @param lastSibling The last cooperating sibling. */ template static bool FindCooperatingSiblings(TreeType* parent, const size_t iTree, size_t& firstSibling, size_t& lastSibling); /** * Redistribute the children of the cooperating siblings evenly among them. * * @param parent The parent of of the overflowing node. * @param firstSibling The first cooperating sibling. * @param lastSibling The last cooperating sibling. */ template static void RedistributeNodesEvenly(const TreeType* parent, const size_t firstSibling, const size_t lastSibling); /** * Redistribute the points of the cooperating siblings evenly among them. * * @param parent The parent of of the overflowing node. * @param firstSibling The first cooperating sibling. * @param lastSibling The last cooperating sibling. */ template static void RedistributePointsEvenly(TreeType* parent, const size_t firstSibling, const size_t lastSibling); }; } // namespace tree } // namespace mlpack // Include implementation. #include "hilbert_r_tree_split_impl.hpp" #endif mlpack-2.2.5/src/mlpack/core/tree/rectangle_tree/hilbert_r_tree_split_impl.hpp000066400000000000000000000267241315013601400276420ustar00rootroot00000000000000/** * @file hilbert_r_tree_split_impl.hpp * @author Mikhail Lozhnikov * * Implementation of class (HilbertRTreeSplit) to split a RectangleTree. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_TREE_RECTANGLE_TREE_HILBERT_R_TREE_SPLIT_IMPL_HPP #define MLPACK_CORE_TREE_RECTANGLE_TREE_HILBERT_R_TREE_SPLIT_IMPL_HPP #include "hilbert_r_tree_split.hpp" #include "rectangle_tree.hpp" #include namespace mlpack { namespace tree { template template void HilbertRTreeSplit::SplitLeafNode(TreeType* tree, std::vector& relevels) { if (tree->Count() <= tree->MaxLeafSize()) return; // If we are splitting the root node, we need will do things differently so // that the constructor and other methods don't confuse the end user by giving // an address of another node. if (tree->Parent() == NULL) { // We actually want to copy this way. Pointers and everything. TreeType* copy = new TreeType(*tree, false); // Only the root node owns this variable. copy->AuxiliaryInfo().HilbertValue().OwnsValueToInsert() = false; // Only leaf nodes own this variable. tree->AuxiliaryInfo().HilbertValue().OwnsLocalHilbertValues() = false; copy->Parent() = tree; tree->Count() = 0; tree->NullifyData(); // Because this was a leaf node, numChildren must be 0. tree->children[(tree->NumChildren())++] = copy; SplitLeafNode(copy, relevels); return; } TreeType* parent = tree->Parent(); size_t iTree = 0; for (iTree = 0; parent->children[iTree] != tree; iTree++); // Try to find splitOrder cooperating siblings in order to redistribute points // among them and avoid split. size_t firstSibling, lastSibling; if (FindCooperatingSiblings(parent, iTree, firstSibling, lastSibling)) { RedistributePointsEvenly(parent, firstSibling, lastSibling); return; } // We can not find splitOrder cooperating siblings since they are all full. // We introduce new one instead. size_t iNewSibling = (iTree + splitOrder < parent->NumChildren() ? iTree + splitOrder : parent->NumChildren()); for (size_t i = parent->NumChildren(); i > iNewSibling ; i--) parent->children[i] = parent->children[i - 1]; parent->NumChildren()++; parent->children[iNewSibling] = new TreeType(parent); lastSibling = (iTree + splitOrder < parent->NumChildren() ? iTree + splitOrder : parent->NumChildren() - 1); firstSibling = (lastSibling > splitOrder ? lastSibling - splitOrder : 0); assert(lastSibling - firstSibling <= splitOrder); assert(firstSibling >= 0); assert(lastSibling < parent->NumChildren()); // Redistribute the points among (splitOrder + 1) cooperating siblings evenly. RedistributePointsEvenly(parent, firstSibling, lastSibling); if (parent->NumChildren() == parent->MaxNumChildren() + 1) SplitNonLeafNode(parent, relevels); } template template bool HilbertRTreeSplit:: SplitNonLeafNode(TreeType* tree, std::vector& relevels) { // If we are splitting the root node, we need will do things differently so // that the constructor and other methods don't confuse the end user by giving // an address of another node. if (tree->Parent() == NULL) { // We actually want to copy this way. Pointers and everything. TreeType* copy = new TreeType(*tree, false); // Only the root node owns this variable. copy->AuxiliaryInfo().HilbertValue().OwnsValueToInsert() = false; copy->Parent() = tree; tree->NumChildren() = 0; tree->NullifyData(); tree->children[(tree->NumChildren())++] = copy; SplitNonLeafNode(copy, relevels); return true; } TreeType* parent = tree->Parent(); size_t iTree = 0; for (iTree = 0; parent->children[iTree] != tree; iTree++); // Try to find splitOrder cooperating siblings in order to redistribute // children among them and avoid split. size_t firstSibling, lastSibling; if (FindCooperatingSiblings(parent, iTree, firstSibling, lastSibling)) { RedistributeNodesEvenly(parent, firstSibling, lastSibling); return false; } // We can not find splitOrder cooperating siblings since they are all full. // We introduce new one instead. size_t iNewSibling = (iTree + splitOrder < parent->NumChildren() ? iTree + splitOrder : parent->NumChildren()); for (size_t i = parent->NumChildren(); i > iNewSibling ; i--) parent->children[i] = parent->children[i - 1]; parent->NumChildren()++; parent->children[iNewSibling] = new TreeType(parent); lastSibling = (iTree + splitOrder < parent->NumChildren() ? iTree + splitOrder : parent->NumChildren() - 1); firstSibling = (lastSibling > splitOrder ? lastSibling - splitOrder : 0); assert(lastSibling - firstSibling <= splitOrder); assert(firstSibling >= 0); assert(lastSibling < parent->NumChildren()); // Redistribute children among (splitOrder + 1) cooperating siblings evenly. RedistributeNodesEvenly(parent, firstSibling, lastSibling); if (parent->NumChildren() == parent->MaxNumChildren() + 1) SplitNonLeafNode(parent, relevels); return false; } template template bool HilbertRTreeSplit::FindCooperatingSiblings( TreeType* parent, const size_t iTree, size_t& firstSibling, size_t& lastSibling) { size_t start = (iTree > splitOrder - 1 ? iTree - splitOrder + 1 : 0); size_t end = (iTree + splitOrder <= parent->NumChildren() ? iTree + splitOrder : parent->NumChildren()); size_t iUnderfullSibling; // Try to find empty space among cooperating siblings. if (parent->Child(iTree).NumChildren() != 0) { for (iUnderfullSibling = start; iUnderfullSibling < end; iUnderfullSibling++) if (parent->Child(iUnderfullSibling).NumChildren() < parent->Child(iUnderfullSibling).MaxNumChildren() - 1) break; } else { for (iUnderfullSibling = start; iUnderfullSibling < end; iUnderfullSibling++) if (parent->Child(iUnderfullSibling).NumPoints() < parent->Child(iUnderfullSibling).MaxLeafSize() - 1) break; } if (iUnderfullSibling == end) // All nodes are full. return false; if (iUnderfullSibling > iTree) { lastSibling = (iTree + splitOrder - 1 < parent->NumChildren() ? iTree + splitOrder - 1 : parent->NumChildren() - 1); firstSibling = (lastSibling > splitOrder - 1 ? lastSibling - splitOrder + 1 : 0); } else { lastSibling = (iUnderfullSibling + splitOrder - 1 < parent->NumChildren() ? iUnderfullSibling + splitOrder - 1 : parent->NumChildren() - 1); firstSibling = (lastSibling > splitOrder - 1 ? lastSibling - splitOrder + 1 : 0); } assert(lastSibling - firstSibling <= splitOrder - 1); assert(firstSibling >= 0); assert(lastSibling < parent->NumChildren()); return true; } template template void HilbertRTreeSplit:: RedistributeNodesEvenly(const TreeType *parent, size_t firstSibling, size_t lastSibling) { size_t numChildren = 0; size_t numChildrenPerNode, numRestChildren; for (size_t i = firstSibling; i <= lastSibling; i++) numChildren += parent->Child(i).NumChildren(); numChildrenPerNode = numChildren / (lastSibling - firstSibling + 1); numRestChildren = numChildren % (lastSibling - firstSibling + 1); std::vector children(numChildren); // Copy children's children in order to redistribute them. size_t iChild = 0; for (size_t i = firstSibling; i <= lastSibling; i++) { for (size_t j = 0; j < parent->Child(i).NumChildren(); j++) { children[iChild] = parent->Child(i).children[j]; iChild++; } } iChild = 0; for (size_t i = firstSibling; i <= lastSibling; i++) { // Since we redistribute children of a sibling we should recalculate the // bound. parent->Child(i).Bound().Clear(); parent->Child(i).numDescendants = 0; for (size_t j = 0; j < numChildrenPerNode; j++) { parent->Child(i).Bound() |= children[iChild]->Bound(); parent->Child(i).numDescendants += children[iChild]->numDescendants; parent->Child(i).children[j] = children[iChild]; children[iChild]->Parent() = parent->children[i]; iChild++; } if (numRestChildren > 0) { parent->Child(i).Bound() |= children[iChild]->Bound(); parent->Child(i).numDescendants += children[iChild]->numDescendants; parent->Child(i).children[numChildrenPerNode] = children[iChild]; children[iChild]->Parent() = parent->children[i]; parent->Child(i).NumChildren() = numChildrenPerNode + 1; numRestChildren--; iChild++; } else { parent->Child(i).NumChildren() = numChildrenPerNode; } assert(parent->Child(i).NumChildren() <= parent->Child(i).MaxNumChildren()); // Fix the largest Hilbert value of the sibling. parent->Child(i).AuxiliaryInfo().HilbertValue().UpdateLargestValue( parent->children[i]); } } template template void HilbertRTreeSplit:: RedistributePointsEvenly(TreeType* parent, const size_t firstSibling, const size_t lastSibling) { size_t numPoints = 0; size_t numPointsPerNode, numRestPoints; for (size_t i = firstSibling; i <= lastSibling; i++) numPoints += parent->Child(i).NumPoints(); numPointsPerNode = numPoints / (lastSibling - firstSibling + 1); numRestPoints = numPoints % (lastSibling - firstSibling + 1); std::vector points(numPoints); // Copy children's points in order to redistribute them. size_t iPoint = 0; for (size_t i = firstSibling; i <= lastSibling; i++) { for (size_t j = 0; j < parent->Child(i).NumPoints(); j++) points[iPoint++] = parent->Child(i).Point(j); } iPoint = 0; for (size_t i = firstSibling; i <= lastSibling; i++) { // Since we redistribute points of a sibling we should recalculate the // bound. parent->Child(i).Bound().Clear(); size_t j; for (j = 0; j < numPointsPerNode; j++) { parent->Child(i).Bound() |= parent->Dataset().col(points[iPoint]); parent->Child(i).Point(j) = points[iPoint]; iPoint++; } if (numRestPoints > 0) { parent->Child(i).Bound() |= parent->Dataset().col(points[iPoint]); parent->Child(i).Point(j) = points[iPoint]; parent->Child(i).Count() = numPointsPerNode + 1; numRestPoints--; iPoint++; } else { parent->Child(i).Count() = numPointsPerNode; } parent->Child(i).numDescendants = parent->Child(i).Count(); assert(parent->Child(i).NumPoints() <= parent->Child(i).MaxLeafSize()); } // Fix the largest Hilbert values of the siblings. parent->AuxiliaryInfo().HilbertValue().RedistributeHilbertValues(parent, firstSibling, lastSibling); TreeType* root = parent; while (root != NULL) { root->AuxiliaryInfo().HilbertValue().UpdateLargestValue(root); root = root->Parent(); } } } // namespace tree } // namespace mlpack #endif // MLPACK_CORE_TREE_RECTANGLE_TREE_HILBERT_R_TREE_SPLIT_IMPL_HPP mlpack-2.2.5/src/mlpack/core/tree/rectangle_tree/minimal_coverage_sweep.hpp000066400000000000000000000067101315013601400271120ustar00rootroot00000000000000/** * @file minimal_coverage_sweep.hpp * @author Mikhail Lozhnikov * * Definition of the MinimalCoverageSweep class, a class that finds a partition * of a node along an axis. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_TREE_RECTANGLE_TREE_MINIMAL_COVERAGE_SWEEP_HPP #define MLPACK_CORE_TREE_RECTANGLE_TREE_MINIMAL_COVERAGE_SWEEP_HPP namespace mlpack { namespace tree { /** * The MinimalCoverageSweep class finds a partition along which we * can split a node according to the coverage of two resulting nodes. The class * finds a partition along a given axis. Moreover, the class evaluates the cost * of each split. The cost is proportional to the total coverage of resulting * nodes. If the resulting nodes are overflowed the maximum cost is returned. * * @tparam SplitPolicy The class that provides rules for inserting children of * a node that is being split into two new subtrees. */ template class MinimalCoverageSweep { public: //! A struct that provides the type of the sweep cost. template struct SweepCost { typedef typename TreeType::ElemType type; }; /** * Find a suitable partition of a non-leaf node along the provided axis. * The method returns the cost of the split. * * @param axis The axis along which we are finding a partition. * @param node The node that is being split. * @param axisCut The coordinate at which the node may be split. */ template static typename TreeType::ElemType SweepNonLeafNode( const size_t axis, const TreeType* node, typename TreeType::ElemType& axisCut); /** * Find a suitable partition of a leaf node along the provided axis. * The method returns the cost of the split. * * @param axis The axis along which we are finding a partition. * @param node The node that is being split. * @param axisCut The coordinate at which the node may be split. */ template static typename TreeType::ElemType SweepLeafNode( const size_t axis, const TreeType* node, typename TreeType::ElemType& axisCut); /** * Check if an intermediate node can be split along the axis at the provided * coordinate. * * @param node The node that is being split. * @param cutAxis The axis that we want to check. * @param cut The coordinate that we want to check. */ template static bool CheckNonLeafSweep(const TreeType* node, const size_t cutAxis, const ElemType cut); /** * Check if a leaf node can be split along the axis at the provided * coordinate. * * @param node The node that is being split. * @param cutAxis The axis that we want to check. * @param cut The coordinate that we want to check. */ template static bool CheckLeafSweep(const TreeType* node, const size_t cutAxis, const ElemType cut); }; } // namespace tree } // namespace mlpack // Include implementation #include "minimal_coverage_sweep_impl.hpp" #endif // MLPACK_CORE_TREE_RECTANGLE_TREE_MINIMAL_COVERAGE_SWEEP_HPP mlpack-2.2.5/src/mlpack/core/tree/rectangle_tree/minimal_coverage_sweep_impl.hpp000066400000000000000000000136701315013601400301360ustar00rootroot00000000000000/** * @file minimal_coverage_sweep_impl.hpp * @author Mikhail Lozhnikov * * Implementation of the MinimalCoverageSweep class, a class that finds a * partition of a node along an axis. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_TREE_RECTANGLE_TREE_MINIMAL_COVERAGE_SWEEP_IMPL_HPP #define MLPACK_CORE_TREE_RECTANGLE_TREE_MINIMAL_COVERAGE_SWEEP_IMPL_HPP #include "minimal_coverage_sweep.hpp" namespace mlpack { namespace tree { template template typename TreeType::ElemType MinimalCoverageSweep:: SweepNonLeafNode(const size_t axis, const TreeType* node, typename TreeType::ElemType& axisCut) { typedef typename TreeType::ElemType ElemType; typedef bound::HRectBound BoundType; std::vector> sorted(node->NumChildren()); for (size_t i = 0; i < node->NumChildren(); i++) { sorted[i].first = SplitPolicy::Bound(node->Child(i))[axis].Hi(); sorted[i].second = i; } // Sort high bounds of children. std::sort(sorted.begin(), sorted.end(), [] (const std::pair& s1, const std::pair& s2) { return s1.first < s2.first; }); size_t splitPointer = node->NumChildren() / 2; axisCut = sorted[splitPointer - 1].first; // Check if the midpoint split is suitable. if (!CheckNonLeafSweep(node, axis, axisCut)) { // Find any suitable partition if the default partition is not acceptable. for (splitPointer = 1; splitPointer < sorted.size(); splitPointer++) { axisCut = sorted[splitPointer - 1].first; if (CheckNonLeafSweep(node, axis, axisCut)) break; } if (splitPointer == node->NumChildren()) return std::numeric_limits::max(); } BoundType bound1(node->Bound().Dim()); BoundType bound2(node->Bound().Dim()); // Find bounds of two resulting nodes. for (size_t i = 0; i < splitPointer; i++) bound1 |= node->Child(sorted[i].second).Bound(); for (size_t i = splitPointer; i < node->NumChildren(); i++) bound2 |= node->Child(sorted[i].second).Bound(); // Evaluate the cost of the split i.e. calculate the total coverage // of two resulting nodes. ElemType area1 = bound1.Volume(); ElemType area2 = bound2.Volume(); return area1 + area2; } template template typename TreeType::ElemType MinimalCoverageSweep:: SweepLeafNode(const size_t axis, const TreeType* node, typename TreeType::ElemType& axisCut) { typedef typename TreeType::ElemType ElemType; typedef bound::HRectBound BoundType; std::vector> sorted(node->Count()); sorted.resize(node->Count()); for (size_t i = 0; i < node->NumPoints(); i++) { sorted[i].first = node->Dataset().col(node->Point(i))[axis]; sorted[i].second = i; } // Sort high bounds of children. std::sort(sorted.begin(), sorted.end(), [] (const std::pair& s1, const std::pair& s2) { return s1.first < s2.first; }); size_t splitPointer = node->Count() / 2; axisCut = sorted[splitPointer - 1].first; // Check if the partition is suitable. if (!CheckLeafSweep(node, axis, axisCut)) return std::numeric_limits::max(); BoundType bound1(node->Bound().Dim()); BoundType bound2(node->Bound().Dim()); // Find bounds of two resulting nodes. for (size_t i = 0; i < splitPointer; i++) bound1 |= node->Dataset().col(node->Point(sorted[i].second)); for (size_t i = splitPointer; i < node->NumChildren(); i++) bound2 |= node->Dataset().col(node->Point(sorted[i].second)); // Evaluate the cost of the split i.e. calculate the total coverage // of two resulting nodes. return bound1.Volume() + bound2.Volume(); } template template bool MinimalCoverageSweep:: CheckNonLeafSweep(const TreeType* node, const size_t cutAxis, const ElemType cut) { size_t numTreeOneChildren = 0; size_t numTreeTwoChildren = 0; // Calculate the number of children in the resulting nodes. for (size_t i = 0; i < node->NumChildren(); i++) { const TreeType& child = node->Child(i); int policy = SplitPolicy::GetSplitPolicy(child, cutAxis, cut); if (policy == SplitPolicy::AssignToFirstTree) numTreeOneChildren++; else if (policy == SplitPolicy::AssignToSecondTree) numTreeTwoChildren++; else { // The split is required. numTreeOneChildren++; numTreeTwoChildren++; } } if (numTreeOneChildren <= node->MaxNumChildren() && numTreeOneChildren > 0 && numTreeTwoChildren <= node->MaxNumChildren() && numTreeTwoChildren > 0) return true; return false; } template template bool MinimalCoverageSweep:: CheckLeafSweep(const TreeType* node, const size_t cutAxis, const ElemType cut) { size_t numTreeOnePoints = 0; size_t numTreeTwoPoints = 0; // Calculate the number of points in the resulting nodes. for (size_t i = 0; i < node->NumPoints(); i++) { if (node->Dataset().col(node->Point(i))[cutAxis] <= cut) numTreeOnePoints++; else numTreeTwoPoints++; } if (numTreeOnePoints <= node->MaxLeafSize() && numTreeOnePoints > 0 && numTreeTwoPoints <= node->MaxLeafSize() && numTreeTwoPoints > 0) return true; return false; } } // namespace tree } // namespace mlpack #endif // MLPACK_CORE_TREE_RECTANGLE_TREE_MINIMAL_COVERAGE_SWEEP_IMPL_HPP mlpack-2.2.5/src/mlpack/core/tree/rectangle_tree/minimal_splits_number_sweep.hpp000066400000000000000000000050561315013601400302070ustar00rootroot00000000000000/** * @file minimal_splits_number_sweep.hpp * @author Mikhail Lozhnikov * * Definition of the MinimalSplitsNumberSweep class, a class that finds a * partition of a node along an axis. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_TREE_RECTANGLE_TREE_MINIMAL_SPLITS_NUMBER_SWEEP_HPP #define MLPACK_CORE_TREE_RECTANGLE_TREE_MINIMAL_SPLITS_NUMBER_SWEEP_HPP namespace mlpack { namespace tree { /** * The MinimalSplitsNumberSweep class finds a partition along which we * can split a node according to the number of required splits of the node. * The class finds a partition along a given axis. Moreover, the class evaluates * the cost of each split. The cost is proportional to the number of required * splits and the difference of sizes of resulting nodes. If the resulting nodes * are overflowed the maximum cost is returned. * * @tparam SplitPolicy The class that provides rules for inserting children of * a node that is being split into two new subtrees. */ template class MinimalSplitsNumberSweep { public: //! A struct that provides the type of the sweep cost. template struct SweepCost { typedef size_t type; }; /** * Find a suitable partition of a non-leaf node along the provided axis. * The method returns the cost of the split. * * @param axis The axis along which we are finding a partition. * @param node The node that is being split. * @param axisCut The coordinate at which the node may be split. */ template static size_t SweepNonLeafNode( const size_t axis, const TreeType* node, typename TreeType::ElemType& axisCut); /** * Find a suitable partition of a leaf node along the provided axis. * The method returns the cost of the split. * * @param axis The axis along which we are finding a partition. * @param node The node that is being split. * @param axisCut The coordinate at which the node may be split. */ template static size_t SweepLeafNode( const size_t axis, const TreeType* node, typename TreeType::ElemType& axisCut); }; } // namespace tree } // namespace mlpack // Include implementation #include "minimal_splits_number_sweep_impl.hpp" #endif // MLPACK_CORE_TREE_RECTANGLE_TREE_MINIMAL_SPLITS_NUMBER_SWEEP_HPP mlpack-2.2.5/src/mlpack/core/tree/rectangle_tree/minimal_splits_number_sweep_impl.hpp000066400000000000000000000063601315013601400312270ustar00rootroot00000000000000/** * @file minimal_splits_number_sweep_impl.hpp * @author Mikhail Lozhnikov * * Implementation of the MinimalSplitsNumberSweep class, a class that finds a * partition of a node along an axis. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_TREE_RECTANGLE_TREE_MINIMAL_SPLITS_NUMBER_SWEEP_IMPL_HPP #define MLPACK_CORE_TREE_RECTANGLE_TREE_MINIMAL_SPLITS_NUMBER_SWEEP_IMPL_HPP #include "minimal_splits_number_sweep.hpp" namespace mlpack { namespace tree { template template size_t MinimalSplitsNumberSweep::SweepNonLeafNode( const size_t axis, const TreeType* node, typename TreeType::ElemType& axisCut) { typedef typename TreeType::ElemType ElemType; std::vector> sorted(node->NumChildren()); for (size_t i = 0; i < node->NumChildren(); i++) { sorted[i].first = SplitPolicy::Bound(node->Child(i))[axis].Hi(); sorted[i].second = i; } // Sort candidates in order to check balancing. std::sort(sorted.begin(), sorted.end(), [] (const std::pair& s1, const std::pair& s2) { return s1.first < s2.first; }); size_t minCost = SIZE_MAX; // Find a split with the minimal cost. for (size_t i = 0; i < sorted.size(); i++) { size_t numTreeOneChildren = 0; size_t numTreeTwoChildren = 0; size_t numSplits = 0; // Calculate the number of splits. for (size_t j = 0; j < node->NumChildren(); j++) { const TreeType& child = node->Child(j); int policy = SplitPolicy::GetSplitPolicy(child, axis, sorted[i].first); if (policy == SplitPolicy::AssignToFirstTree) numTreeOneChildren++; else if (policy == SplitPolicy::AssignToSecondTree) numTreeTwoChildren++; else { numTreeOneChildren++; numTreeTwoChildren++; numSplits++; } } // Check if the split is possible. if (numTreeOneChildren <= node->MaxNumChildren() && numTreeOneChildren > 0 && numTreeTwoChildren <= node->MaxNumChildren() && numTreeTwoChildren > 0) { // Evaluate the cost using the number of splits and balancing. size_t balance; if (sorted.size() / 2 > i ) balance = sorted.size() / 2 - i; else balance = i - sorted.size() / 2; size_t cost = numSplits * balance; if (cost < minCost) { minCost = cost; axisCut = sorted[i].first; } } } return minCost; } template template size_t MinimalSplitsNumberSweep::SweepLeafNode( const size_t axis, const TreeType* node, typename TreeType::ElemType& axisCut) { // Split along the median. axisCut = (node->Bound()[axis].Lo() + node->Bound()[axis].Hi()) * 0.5; if (node->Bound()[axis].Lo() == axisCut) return SIZE_MAX; return 0; } } // namespace tree } // namespace mlpack #endif // MLPACK_CORE_TREE_RECTANGLE_TREE_MINIMAL_SPLITS_NUMBER_SWEEP_IMPL_HPP mlpack-2.2.5/src/mlpack/core/tree/rectangle_tree/no_auxiliary_information.hpp000066400000000000000000000126411315013601400275160ustar00rootroot00000000000000/** * @file no_auxiliary_information.hpp * @author Mikhail Lozhnikov * * Definition of the NoAuxiliaryInformation class, a class that provides * no additional information about the nodes. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_TREE_RECTANGLE_TREE_NO_AUXILIARY_INFORMATION_HPP #define MLPACK_CORE_TREE_RECTANGLE_TREE_NO_AUXILIARY_INFORMATION_HPP namespace mlpack { namespace tree { template class NoAuxiliaryInformation { public: //! Construct the auxiliary information object. NoAuxiliaryInformation() { }; //! Construct the auxiliary information object. NoAuxiliaryInformation(const TreeType* /* node */) { }; //! Construct the auxiliary information object. NoAuxiliaryInformation(const NoAuxiliaryInformation& /* other */, TreeType* /* tree */, bool /* deepCopy */ = true) { }; //! Construct the auxiliary information object. NoAuxiliaryInformation(NoAuxiliaryInformation&& /* other */) { }; //! Copy the auxiliary information object. NoAuxiliaryInformation& operator=(const NoAuxiliaryInformation& /* other */) { return *this; } /** * Some tree types require to save some properties at the insertion process. * This method allows the auxiliary information the option of manipulating * the tree in order to perform the insertion process. If the auxiliary * information does that, then the method should return true; if the method * returns false the RectangleTree performs its default behavior. * * @param node The node in which the point is being inserted. * @param point The global number of the point being inserted. */ bool HandlePointInsertion(TreeType* /* node */, const size_t /* point */) { return false; } /** * Some tree types require to save some properties at the insertion process. * This method allows the auxiliary information the option of manipulating * the tree in order to perform the insertion process. If the auxiliary * information does that, then the method should return true; if the method * returns false the RectangleTree performs its default behavior. * * @param node The node in which the nodeToInsert is being inserted. * @param nodeToInsert The node being inserted. * @param insertionLevel The level of the tree at which the nodeToInsert * should be inserted. */ bool HandleNodeInsertion(TreeType* /* node */, TreeType* /* nodeToInsert */, bool /* insertionLevel */) { return false; } /** * Some tree types require to save some properties at the deletion process. * This method allows the auxiliary information the option of manipulating * the tree in order to perform the deletion process. If the auxiliary * information does that, then the method should return true; if the method * returns false the RectangleTree performs its default behavior. * * @param node The node from which the point is being deleted. * @param localIndex The local index of the point being deleted. */ bool HandlePointDeletion(TreeType* /* node */, const size_t /* localIndex */) { return false; } /** * Some tree types require to save some properties at the deletion process. * This method allows the auxiliary information the option of manipulating * the tree in order to perform the deletion process. If the auxiliary * information does that, then the method should return true; if the method * returns false the RectangleTree performs its default behavior. * * @param node The node from which the node is being deleted. * @param nodeIndex The local index of the node being deleted. */ bool HandleNodeRemoval(TreeType* /* node */, const size_t /* nodeIndex */) { return false; } /** * Some tree types require to propagate the information upward. * This method should return false if this is not the case. If true is * returned, the update will be propagated upward. * * @param node The node in which the auxiliary information being update. */ bool UpdateAuxiliaryInfo(TreeType* /* node */) { return false; } /** * The R++ tree requires to split the maximum bounding rectangle of a node * that is being split. This method is intended for that. This method is only * necessary for an AuxiliaryInformationType that is being used in conjunction * with RPlusTreeSplit. * * @param treeOne The first subtree. * @param treeTwo The second subtree. * @param axis The axis along which the split is performed. * @param cut The coordinate at which the node is split. */ void SplitAuxiliaryInfo(TreeType* /* treeOne */, TreeType* /* treeTwo */, size_t /* axis */, typename TreeType::ElemType /* cut */) { } /** * Nullify the auxiliary information in order to prevent an invalid free. */ void NullifyData() { } /** * Serialize the information. */ template void Serialize(Archive &, const unsigned int /* version */) { }; }; } // namespace tree } // namespace mlpack #endif // MLPACK_CORE_TREE_RECTANGLE_TREE_NO_AUXILIARY_INFORMATION_HPP mlpack-2.2.5/src/mlpack/core/tree/rectangle_tree/r_plus_plus_tree_auxiliary_information.hpp000066400000000000000000000143131315013601400324660ustar00rootroot00000000000000/** * @file r_plus_plus_tree_auxiliary_information.hpp * @author Mikhail Lozhnikov * * Definition of the RPlusPlusTreeAuxiliaryInformation class, * a class that provides some r++-tree specific information * about the nodes. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_TREE_RECTANGLE_TREE_R_PLUS_PLUS_TREE_AUXILIARY_INFORMATION_HPP #define MLPACK_CORE_TREE_RECTANGLE_TREE_R_PLUS_PLUS_TREE_AUXILIARY_INFORMATION_HPP #include #include "../hrectbound.hpp" namespace mlpack { namespace tree { template class RPlusPlusTreeAuxiliaryInformation { public: //! The element type held by the tree. typedef typename TreeType::ElemType ElemType; //! The bound type held by the auxiliary information. typedef bound::HRectBound BoundType; //! Construct the auxiliary information object. RPlusPlusTreeAuxiliaryInformation(); /** * Construct this as an auxiliary information for the given node. * * @param node The node that stores this auxiliary information. */ RPlusPlusTreeAuxiliaryInformation(const TreeType* /* node */); /** * Create an auxiliary information object by copying from another object. * * @param other Another auxiliary information object from which the * information will be copied. * @param tree The node that holds the auxiliary information. * @param deepCopy If false, the new object uses the same memory * (not used here). */ RPlusPlusTreeAuxiliaryInformation( const RPlusPlusTreeAuxiliaryInformation& other, TreeType* tree, bool /* deepCopy */ = true); /** * Create an auxiliary information object by moving from another node. * * @param other The auxiliary information object from which the information * will be moved. */ RPlusPlusTreeAuxiliaryInformation(RPlusPlusTreeAuxiliaryInformation&& other); /** * Some tree types require to save some properties at the insertion process. * This method allows the auxiliary information the option of manipulating * the tree in order to perform the insertion process. If the auxiliary * information does that, then the method should return true; if the method * returns false the RectangleTree performs its default behavior. * * @param node The node in which the point is being inserted. * @param point The global number of the point being inserted. */ bool HandlePointInsertion(TreeType* /* node */, const size_t /* point */); /** * Some tree types require to save some properties at the insertion process. * This method allows the auxiliary information the option of manipulating * the tree in order to perform the insertion process. If the auxiliary * information does that, then the method should return true; if the method * returns false the RectangleTree performs its default behavior. * * @param node The node in which the nodeToInsert is being inserted. * @param nodeToInsert The node being inserted. * @param insertionLevel The level of the tree at which the nodeToInsert * should be inserted. */ bool HandleNodeInsertion(TreeType* /* node */, TreeType* /* nodeToInsert */, bool /* insertionLevel */); /** * Some tree types require to save some properties at the deletion process. * This method allows the auxiliary information the option of manipulating * the tree in order to perform the deletion process. If the auxiliary * information does that, then the method should return true; if the method * returns false the RectangleTree performs its default behavior. * * @param node The node from which the point is being deleted. * @param localIndex The local index of the point being deleted. */ bool HandlePointDeletion(TreeType* /* node */, const size_t /* localIndex */); /** * Some tree types require to save some properties at the deletion process. * This method allows the auxiliary information the option of manipulating * the tree in order to perform the deletion process. If the auxiliary * information does that, then the method should return true; if the method * returns false the RectangleTree performs its default behavior. * * @param node The node from which the node is being deleted. * @param nodeIndex The local index of the node being deleted. */ bool HandleNodeRemoval(TreeType* /* node */, const size_t /* nodeIndex */); /** * Some tree types require to propagate the information upward. * This method should return false if this is not the case. If true is * returned, the update will be propagated upward. * * @param node The node in which the auxiliary information being update. */ bool UpdateAuxiliaryInfo(TreeType* /* node */); /** * The R++ tree requires to split the maximum bounding rectangle of a node * that is being split. This method is intended for that. * * @param treeOne The first subtree. * @param treeTwo The second subtree. * @param axis The axis along which the split is performed. * @param cut The coordinate at which the node is split. */ void SplitAuxiliaryInfo(TreeType* treeOne, TreeType* treeTwo, const size_t axis, const ElemType cut); /** * Nullify the auxiliary information in order to prevent an invalid free. */ void NullifyData(); //! Return the maximum bounding rectangle. BoundType& OuterBound() { return outerBound; } //! Modify the maximum bounding rectangle. const BoundType& OuterBound() const { return outerBound; } private: //! The maximum bounding rectangle. BoundType outerBound; public: /** * Serialize the information. */ template void Serialize(Archive &, const unsigned int /* version */); }; } // namespace tree } // namespace mlpack #include "r_plus_plus_tree_auxiliary_information_impl.hpp" #endif//MLPACK_CORE_TREE_RECTANGLE_TREE_R_PLUS_PLUS_TREE_AUXILIARY_INFORMATION_HPP mlpack-2.2.5/src/mlpack/core/tree/rectangle_tree/r_plus_plus_tree_auxiliary_information_impl.hpp000066400000000000000000000075641315013601400335210ustar00rootroot00000000000000/** * @file r_plus_plus_tree_auxiliary_information.hpp * @author Mikhail Lozhnikov * * Implementation of the RPlusPlusTreeAuxiliaryInformation class, * a class that provides some r++-tree specific information * about the nodes. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_TREE_RECTANGLE_TREE_R_PLUS_PLUS_TREE_AUXILIARY_INFORMATION_IMPL_HPP #define MLPACK_CORE_TREE_RECTANGLE_TREE_R_PLUS_PLUS_TREE_AUXILIARY_INFORMATION_IMPL_HPP #include "r_plus_plus_tree_auxiliary_information.hpp" namespace mlpack { namespace tree { template RPlusPlusTreeAuxiliaryInformation:: RPlusPlusTreeAuxiliaryInformation() : outerBound(0) { } template RPlusPlusTreeAuxiliaryInformation:: RPlusPlusTreeAuxiliaryInformation(const TreeType* tree) : outerBound(tree->Parent() ? tree->Parent()->AuxiliaryInfo().OuterBound() : tree->Bound().Dim()) { // Initialize the maximum bounding rectangle if the node is the root if (!tree->Parent()) for (size_t k = 0; k < outerBound.Dim(); k++) { outerBound[k].Lo() = std::numeric_limits::lowest(); outerBound[k].Hi() = std::numeric_limits::max(); } } template RPlusPlusTreeAuxiliaryInformation:: RPlusPlusTreeAuxiliaryInformation( const RPlusPlusTreeAuxiliaryInformation& other, TreeType* /* tree */, bool /* deepCopy */) : outerBound(other.OuterBound()) { } template RPlusPlusTreeAuxiliaryInformation:: RPlusPlusTreeAuxiliaryInformation(RPlusPlusTreeAuxiliaryInformation&& other) : outerBound(std::move(other.outerBound)) { } template bool RPlusPlusTreeAuxiliaryInformation::HandlePointInsertion( TreeType* /* node */, const size_t /* point */) { return false; } template bool RPlusPlusTreeAuxiliaryInformation::HandleNodeInsertion( TreeType* /* node */, TreeType* /* nodeToInsert */, bool /* insertionLevel */) { assert(false); return false; } template bool RPlusPlusTreeAuxiliaryInformation::HandlePointDeletion( TreeType* /* node */, const size_t /* localIndex */) { return false; } template bool RPlusPlusTreeAuxiliaryInformation::HandleNodeRemoval( TreeType* /* node */, const size_t /* nodeIndex */) { return false; } template bool RPlusPlusTreeAuxiliaryInformation::UpdateAuxiliaryInfo( TreeType* /* node */) { return false; } template void RPlusPlusTreeAuxiliaryInformation::SplitAuxiliaryInfo( TreeType* treeOne, TreeType* treeTwo, const size_t axis, const typename TreeType::ElemType cut) { typedef bound::HRectBound Bound; Bound& treeOneBound = treeOne->AuxiliaryInfo().OuterBound(); Bound& treeTwoBound = treeTwo->AuxiliaryInfo().OuterBound(); // Copy the maximum bounding rectangle. treeOneBound = outerBound; treeTwoBound = outerBound; // Set proper limits. treeOneBound[axis].Hi() = cut; treeTwoBound[axis].Lo() = cut; } template void RPlusPlusTreeAuxiliaryInformation::NullifyData() { } /** * Serialize the information. */ template template void RPlusPlusTreeAuxiliaryInformation:: Serialize(Archive& ar, const unsigned int /* version */) { using data::CreateNVP; ar & CreateNVP(outerBound, "outerBound"); } } // namespace tree } // namespace mlpack #endif // MLPACK_CORE_TREE_RECTANGLE_TREE_R_PLUS_PLUS_TREE_AUXILIARY_INFORMATION_IMPL_HPP mlpack-2.2.5/src/mlpack/core/tree/rectangle_tree/r_plus_plus_tree_descent_heuristic.hpp000066400000000000000000000035021315013601400315540ustar00rootroot00000000000000/** * @file r_plus_plus_tree_descent_heuristic.hpp * @author Mikhail Lozhnikov * * Definition of RPlusPlusTreeDescentHeuristic, a class that chooses the best * child of a node in an R++ tree when inserting a new point. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_TREE_RECTANGLE_TREE_R_PLUS_PLUS_TREE_DESCENT_HEURISTIC_HPP #define MLPACK_CORE_TREE_RECTANGLE_TREE_R_PLUS_PLUS_TREE_DESCENT_HEURISTIC_HPP #include namespace mlpack { namespace tree { class RPlusPlusTreeDescentHeuristic { public: /** * Evaluate the node using a heuristic. Returns the number of the node * with minimum largest Hilbert value is greater than the Hilbert value of * the point being inserted. * * @param node The node that is being evaluated. * @param point The number of the point that is being inserted. */ template static size_t ChooseDescentNode(TreeType* node, const size_t point); /** * Evaluate the node using a heuristic. Returns the number of the node * with minimum largest Hilbert value is greater than the largest * Hilbert value of the point being inserted. * * @param node The node that is being evaluated. * @param insertedNode The node that is being inserted. */ template static size_t ChooseDescentNode(const TreeType* node, const TreeType* insertedNode); }; } // namespace tree } // namespace mlpack #include "r_plus_plus_tree_descent_heuristic_impl.hpp" #endif // MLPACK_CORE_TREE_RECTANGLE_TREE_R_PLUS_PLUS_TREE_DESCENT_HEURISTIC_HPP mlpack-2.2.5/src/mlpack/core/tree/rectangle_tree/r_plus_plus_tree_descent_heuristic_impl.hpp000066400000000000000000000031521315013601400325760ustar00rootroot00000000000000/** * @file r_plus_plus_tree_descent_heuristic_impl.hpp * @author Mikhail Lozhnikov * * Implementation of RPlusPlusTreeDescentHeuristic, a class that chooses the * best child of a node in an R++ tree when inserting a new point. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_TREE_RECTANGLE_TREE_R_PLUS_PLUS_TREE_DESCENT_HEURISTIC_IMPL_HPP #define MLPACK_CORE_TREE_RECTANGLE_TREE_R_PLUS_PLUS_TREE_DESCENT_HEURISTIC_IMPL_HPP #include "r_plus_plus_tree_descent_heuristic.hpp" #include "../hrectbound.hpp" namespace mlpack { namespace tree { template size_t RPlusPlusTreeDescentHeuristic::ChooseDescentNode( TreeType* node, const size_t point) { // Find the node whose maximum bounding rectangle contains the point. for (size_t bestIndex = 0; bestIndex < node->NumChildren(); bestIndex++) { if (node->Child(bestIndex).AuxiliaryInfo().OuterBound().Contains( node->Dataset().col(point))) return bestIndex; } // We should never reach this point. assert(false); return 0; } template size_t RPlusPlusTreeDescentHeuristic::ChooseDescentNode( const TreeType* /* node */, const TreeType* /* insertedNode */) { // Should never be used. assert(false); return 0; } } // namespace tree } // namespace mlpack #endif // MLPACK_CORE_TREE_RECTANGLE_TREE_R_PLUS_PLUS_TREE_DESCENT_HEURISTIC_IMPL_HPP mlpack-2.2.5/src/mlpack/core/tree/rectangle_tree/r_plus_plus_tree_split_policy.hpp000066400000000000000000000056071315013601400305720ustar00rootroot00000000000000/** * @file r_plus_plus_tree_split_policy.hpp * @author Mikhail Lozhnikov * * Definition and implementation of the RPlusPlusTreeSplitPolicy class, a class * that helps to determine the subtree into which we should insert an * intermediate node. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_TREE_RECTANGLE_TREE_R_PLUS_PLUS_TREE_SPLIT_POLICY_HPP #define MLPACK_CORE_TREE_RECTANGLE_TREE_R_PLUS_PLUS_TREE_SPLIT_POLICY_HPP namespace mlpack { namespace tree { /** * The RPlusPlusTreeSplitPolicy helps to determine the subtree into which * we should insert a child of an intermediate node that is being split. * This class is designed for the R++ tree. */ class RPlusPlusTreeSplitPolicy { public: //! Indicate that the child should be split. static const int SplitRequired = 0; //! Indicate that the child should be inserted to the first subtree. static const int AssignToFirstTree = 1; //! Indicate that the child should be inserted to the second subtree. static const int AssignToSecondTree = 2; /** * This method returns SplitRequired if a child of an intermediate node should * be split, AssignToFirstTree if the child should be inserted to the first * subtree, AssignToSecondTree if the child should be inserted to the second * subtree. The method makes desicion according to the maximum bounding * rectangle of the child, the axis along which the intermediate node is being * split and the coordinate at which the node is being split. * * @param child A child of the node that is being split. * @param axis The axis along which the node is being split. * @param cut The coordinate at which the node is being split. */ template static int GetSplitPolicy(const TreeType& child, const size_t axis, const typename TreeType::ElemType cut) { if (child.AuxiliaryInfo().OuterBound()[axis].Hi() <= cut) return AssignToFirstTree; else if (child.AuxiliaryInfo().OuterBound()[axis].Lo() >= cut) return AssignToSecondTree; return SplitRequired; } /** * Return the maximum bounding rectangle of the node. * This method should always return the bound that is used for the * decision-making in GetSplitPolicy(). * * @param node The node whose bound is requested. */ template static const bound::HRectBound& Bound(const TreeType& node) { return node.AuxiliaryInfo().OuterBound(); } }; } // namespace tree } // namespace mlpack #endif // MLPACK_CORE_TREE_RECTANGLE_TREE_R_PLUS_PLUS_TREE_SPLIT_POLICY_HPP mlpack-2.2.5/src/mlpack/core/tree/rectangle_tree/r_plus_tree_descent_heuristic.hpp000066400000000000000000000034531315013601400305160ustar00rootroot00000000000000/** * @file r_plus_tree_descent_heuristic.hpp * @author Mikhail Lozhnikov * * Definition of RPlusTreeDescentHeuristic, a class that chooses the best child * of a node in an R+ tree when inserting a new point. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_TREE_RECTANGLE_TREE_R_PLUS_TREE_DESCENT_HEURISTIC_HPP #define MLPACK_CORE_TREE_RECTANGLE_TREE_R_PLUS_TREE_DESCENT_HEURISTIC_HPP #include namespace mlpack { namespace tree { class RPlusTreeDescentHeuristic { public: /** * Evaluate the node using a heuristic. Returns the number of the node * with minimum largest Hilbert value is greater than the Hilbert value of * the point being inserted. * * @param node The node that is being evaluated. * @param point The number of the point that is being inserted. */ template static size_t ChooseDescentNode(TreeType* node, const size_t point); /** * Evaluate the node using a heuristic. Returns the number of the node * with minimum largest Hilbert value is greater than the largest * Hilbert value of the point being inserted. * * @param node The node that is being evaluated. * @param insertedNode The node that is being inserted. */ template static size_t ChooseDescentNode(const TreeType* /* node */, const TreeType* /*insertedNode */); }; } // namespace tree } // namespace mlpack #include "r_plus_tree_descent_heuristic_impl.hpp" #endif // MLPACK_CORE_TREE_RECTANGLE_TREE_R_PLUS_TREE_DESCENT_HEURISTIC_HPP mlpack-2.2.5/src/mlpack/core/tree/rectangle_tree/r_plus_tree_descent_heuristic_impl.hpp000066400000000000000000000062621315013601400315400ustar00rootroot00000000000000/** * @file hilbert_r_tree_descent_heuristic_impl.hpp * @author Mikhail Lozhnikov * * Implementation of HilbertRTreeDescentHeuristic, a class that chooses the best child * of a node in an R tree when inserting a new point. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_TREE_RECTANGLE_TREE_R_PLUS_TREE_DESCENT_HEURISTIC_IMPL_HPP #define MLPACK_CORE_TREE_RECTANGLE_TREE_R_PLUS_TREE_DESCENT_HEURISTIC_IMPL_HPP #include "r_plus_tree_descent_heuristic.hpp" #include "../hrectbound.hpp" namespace mlpack { namespace tree { template size_t RPlusTreeDescentHeuristic::ChooseDescentNode(TreeType* node, const size_t point) { typedef typename TreeType::ElemType ElemType; size_t bestIndex = 0; bool success = true; // Try to find a node that contains the point. for (bestIndex = 0; bestIndex < node->NumChildren(); bestIndex++) { if (node->Child(bestIndex).Bound().Contains( node->Dataset().col(point))) return bestIndex; } // No one node contains the point. Try to enlarge a node in such a way, that // the resulting node do not overlap other nodes. for (bestIndex = 0; bestIndex < node->NumChildren(); bestIndex++) { bound::HRectBound bound = node->Child(bestIndex).Bound(); bound |= node->Dataset().col(point); success = true; for (size_t j = 0; j < node->NumChildren(); j++) { if (j == bestIndex) continue; success = false; // Two nodes overlap if and only if there are no dimension in which // they do not overlap each other. for (size_t k = 0; k < node->Bound().Dim(); k++) { if (bound[k].Lo() >= node->Child(j).Bound()[k].Hi() || node->Child(j).Bound()[k].Lo() >= bound[k].Hi()) { // We found the dimension in which these nodes do not overlap // each other. success = true; break; } } if (!success) // These two nodes overlap each other. break; } if (success) // We found two nodes that do no overlap each other. break; } if (!success) // We could not find two nodes that do no overlap each other. { size_t depth = node->TreeDepth(); // Create a new node into which we will insert the point. TreeType* tree = node; while (depth > 1) { TreeType* child = new TreeType(tree); tree->children[tree->NumChildren()++] = child; tree = child; depth--; } return node->NumChildren() - 1; } assert(bestIndex < node->NumChildren()); return bestIndex; } template size_t RPlusTreeDescentHeuristic::ChooseDescentNode( const TreeType* /* node */, const TreeType* /*insertedNode */) { // Should never be used. assert(false); return 0; } } // namespace tree } // namespace mlpack #endif // MLPACK_CORE_TREE_RECTANGLE_TREE_R_PLUS_TREE_DESCENT_HEURISTIC_IMPL_HPP mlpack-2.2.5/src/mlpack/core/tree/rectangle_tree/r_plus_tree_split.hpp000066400000000000000000000115001315013601400261350ustar00rootroot00000000000000/** * @file r_plus_tree_split.hpp * @author Mikhail Lozhnikov * * Definition of the RPlusTreeSplit class, a class that splits the nodes of an * R+ (or R++) tree, starting at a leaf node and moving upwards if necessary. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_TREE_RECTANGLE_TREE_R_PLUS_TREE_SPLIT_HPP #define MLPACK_CORE_TREE_RECTANGLE_TREE_R_PLUS_TREE_SPLIT_HPP #include namespace mlpack { namespace tree /** Trees and tree-building procedures. */ { /** * The RPlusTreeSplit class performs the split process of a node on overflow. * * @tparam SplitPolicyType The class that helps to determine the subtree into * which we should insert a child node. * @tparam SweepType The class that finds the partition of a node along a given * axis. The partition algorithm tries to find a partition along each axis, * evaluates each partition and chooses the best one. */ template class SweepType> class RPlusTreeSplit { public: typedef SplitPolicyType SplitPolicy; /** * Split a leaf node using the "default" algorithm. If necessary, this split * will propagate upwards through the tree. * @param node. The node that is being split. * @param relevels Not used. */ template static void SplitLeafNode(TreeType* tree, std::vector& relevels); /** * Split a non-leaf node using the "default" algorithm. If this is a root * node, the tree increases in depth. * @param node. The node that is being split. * @param relevels Not used. */ template static bool SplitNonLeafNode(TreeType* tree, std::vector& relevels); private: /** * Split a leaf node along an axis. * * @param tree The node that is being split into two new nodes. * @param treeOne The first subtree of two resulting subtrees. * @param treeOne The second subtree of two resulting subtrees. * @param cutAxis The axis along which the node is being split. * @param cut The coordinate at which the node is being split. */ template static void SplitLeafNodeAlongPartition( TreeType* tree, TreeType* treeOne, TreeType* treeTwo, const size_t cutAxis, const typename TreeType::ElemType cut); /** * Split a non-leaf node along an axis. This method propagates the split * downward up to a leaf node if necessary. * * @param tree The node that is being split into two new nodes. * @param treeOne The first subtree of two resulting subtrees. * @param treeOne The second subtree of two resulting subtrees. * @param cutAxis The axis along which the node is being split. * @param cut The coordinate at which the node is being split. */ template static void SplitNonLeafNodeAlongPartition( TreeType* tree, TreeType* treeOne, TreeType* treeTwo, const size_t cutAxis, const typename TreeType::ElemType cut); /** * This method is used to make sure that the tree has equivalent maximum depth * in every branch. The method should be invoked if one of two resulting * subtrees is empty after the split process * (i.e. the subtree contains no children). * The method convert the empty node into an empty subtree (increase the node * in depth). * * @param tree One of two subtrees that is not empty. * @param emptyTree The empty subtree. */ template static void AddFakeNodes(const TreeType* tree, TreeType* emptyTree); /** * Partition a node using SweepType. This method invokes * SweepType::Sweep(Non)LeafNode() for each dimension and chooses the * best one. The method returns false if the node needn't partitioning. * Overwise, the method returns true. If the method failed in finding * an acceptable partition, the minCutAxis will be equal to the number of * dimensions. * * @param node The node that is being split. * @param minCutAxis The axis along which the node will be split. * @param minCut The coordinate at which the node will be split. */ template static bool PartitionNode(const TreeType* node, size_t& minCutAxis, typename TreeType::ElemType& minCut); /** * Insert a node into another node. */ template static void InsertNodeIntoTree(TreeType* destTree, TreeType* srcNode); }; } // namespace tree } // namespace mlpack // Include implementation #include "r_plus_tree_split_impl.hpp" #endif // MLPACK_CORE_TREE_RECTANGLE_TREE_R_PLUS_TREE_SPLIT_HPP mlpack-2.2.5/src/mlpack/core/tree/rectangle_tree/r_plus_tree_split_impl.hpp000066400000000000000000000270261315013601400271700ustar00rootroot00000000000000/** * @file r_plus_tree_split_impl.hpp * @author Mikhail Lozhnikov * * Implementation of class (RPlusTreeSplit) to split a RectangleTree. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_TREE_RECTANGLE_TREE_R_PLUS_TREE_SPLIT_IMPL_HPP #define MLPACK_CORE_TREE_RECTANGLE_TREE_R_PLUS_TREE_SPLIT_IMPL_HPP #include "r_plus_tree_split.hpp" #include "rectangle_tree.hpp" #include "r_plus_plus_tree_auxiliary_information.hpp" #include "r_plus_tree_split_policy.hpp" #include "r_plus_plus_tree_split_policy.hpp" namespace mlpack { namespace tree { template class SweepType> template void RPlusTreeSplit:: SplitLeafNode(TreeType* tree, std::vector& relevels) { typedef typename TreeType::ElemType ElemType; if (tree->Count() == 1) { // Check if an intermediate node was added during the insertion process. // i.e. we couldn't enlarge a node of the R+ tree. So, one of intermediate // nodes may be overflowed. TreeType* node = tree->Parent(); while (node != NULL) { if (node->NumChildren() == node->MaxNumChildren() + 1) { // Split the overflowed node. RPlusTreeSplit::SplitNonLeafNode(node, relevels); return; } node = node->Parent(); } return; } else if (tree->Count() <= tree->MaxLeafSize()) { return; } // If we are splitting the root node, we need will do things differently so // that the constructor and other methods don't confuse the end user by giving // an address of another node. if (tree->Parent() == NULL) { // We actually want to copy this way. Pointers and everything. TreeType* copy = new TreeType(*tree, false); copy->Parent() = tree; tree->Count() = 0; tree->NullifyData(); // Because this was a leaf node, numChildren must be 0. tree->children[(tree->NumChildren())++] = copy; assert(tree->NumChildren() == 1); RPlusTreeSplit::SplitLeafNode(copy, relevels); return; } size_t cutAxis = tree->Bound().Dim(); ElemType cut = std::numeric_limits::lowest(); // Try to find a partiotion of the node. if (!PartitionNode(tree, cutAxis, cut)) return; // If we could not find a suitable partition. if (cutAxis == tree->Bound().Dim()) { tree->MaxLeafSize()++; tree->points.resize(tree->MaxLeafSize() + 1); Log::Warn << "Could not find an acceptable partition." "The size of the node will be increased."; return; } TreeType* treeOne = new TreeType(tree->Parent(), tree->MaxNumChildren()); TreeType* treeTwo = new TreeType(tree->Parent(), tree->MaxNumChildren()); treeOne->MinLeafSize() = 0; treeOne->MinNumChildren() = 0; treeTwo->MinLeafSize() = 0; treeTwo->MinNumChildren() = 0; // Split the node into two new nodes. SplitLeafNodeAlongPartition(tree, treeOne, treeTwo, cutAxis, cut); TreeType* parent = tree->Parent(); size_t i = 0; while (parent->children[i] != tree) i++; assert(i < parent->NumChildren()); // Insert two new nodes to the tree. parent->children[i] = treeOne; parent->children[parent->NumChildren()++] = treeTwo; assert(parent->NumChildren() <= parent->MaxNumChildren() + 1); // Propagate the split upward if necessary. if (parent->NumChildren() == parent->MaxNumChildren() + 1) RPlusTreeSplit::SplitNonLeafNode(parent, relevels); tree->SoftDelete(); } template class SweepType> template bool RPlusTreeSplit:: SplitNonLeafNode(TreeType* tree, std::vector& relevels) { typedef typename TreeType::ElemType ElemType; // If we are splitting the root node, we need will do things differently so // that the constructor and other methods don't confuse the end user by giving // an address of another node. if (tree->Parent() == NULL) { // We actually want to copy this way. Pointers and everything. TreeType* copy = new TreeType(*tree, false); copy->Parent() = tree; tree->NumChildren() = 0; tree->NullifyData(); tree->children[(tree->NumChildren())++] = copy; RPlusTreeSplit::SplitNonLeafNode(copy,relevels); return true; } size_t cutAxis = tree->Bound().Dim(); ElemType cut = std::numeric_limits::lowest(); // Try to find a partiotion of the node. if ( !PartitionNode(tree, cutAxis, cut)) return false; // If we could not find a suitable partition. if (cutAxis == tree->Bound().Dim()) { tree->MaxNumChildren()++; tree->children.resize(tree->MaxNumChildren() + 1); Log::Warn << "Could not find an acceptable partition." "The size of the node will be increased."; return false; } TreeType* treeOne = new TreeType(tree->Parent(), tree->MaxNumChildren()); TreeType* treeTwo = new TreeType(tree->Parent(), tree->MaxNumChildren()); treeOne->MinLeafSize() = 0; treeOne->MinNumChildren() = 0; treeTwo->MinLeafSize() = 0; treeTwo->MinNumChildren() = 0; // Split the node into two new nodes. SplitNonLeafNodeAlongPartition(tree, treeOne, treeTwo, cutAxis, cut); TreeType* parent = tree->Parent(); size_t i = 0; while (parent->children[i] != tree) i++; assert(i < parent->NumChildren()); // Insert two new nodes to the tree. parent->children[i] = treeOne; parent->children[parent->NumChildren()++] = treeTwo; tree->SoftDelete(); assert(parent->NumChildren() <= parent->MaxNumChildren() + 1); // Propagate the split upward if necessary. if (parent->NumChildren() == parent->MaxNumChildren() + 1) RPlusTreeSplit::SplitNonLeafNode(parent, relevels); return false; } template class SweepType> template void RPlusTreeSplit::SplitLeafNodeAlongPartition( TreeType* tree, TreeType* treeOne, TreeType* treeTwo, const size_t cutAxis, const typename TreeType::ElemType cut) { // Split the auxiliary information. tree->AuxiliaryInfo().SplitAuxiliaryInfo(treeOne, treeTwo, cutAxis, cut); // Ensure that the capacity of the nodes is sufficient. if (treeOne->MaxLeafSize() < tree->NumPoints()) { treeOne->MaxLeafSize() = tree->NumPoints(); treeOne->points.resize(treeOne->MaxLeafSize() + 1); } if (treeTwo->MaxLeafSize() < tree->NumPoints()) { treeTwo->MaxLeafSize() = tree->NumPoints(); treeTwo->points.resize(treeTwo->MaxLeafSize() + 1); } // Insert points into the corresponding subtree. for (size_t i = 0; i < tree->NumPoints(); i++) { if (tree->Dataset().col(tree->Point(i))[cutAxis] <= cut) { treeOne->Point(treeOne->Count()++) = tree->Point(i); treeOne->Bound() |= tree->Dataset().col(tree->Point(i)); } else { treeTwo->Point(treeTwo->Count()++) = tree->Point(i); treeTwo->Bound() |= tree->Dataset().col(tree->Point(i)); } } // Update the number of descandants. treeOne->numDescendants = treeOne->Count(); treeTwo->numDescendants = treeTwo->Count(); assert(treeOne->Count() <= treeOne->MaxLeafSize()); assert(treeTwo->Count() <= treeTwo->MaxLeafSize()); assert(tree->Count() == treeOne->Count() + treeTwo->Count()); assert(treeOne->Bound()[cutAxis].Hi() < treeTwo->Bound()[cutAxis].Lo()); } template class SweepType> template void RPlusTreeSplit::SplitNonLeafNodeAlongPartition( TreeType* tree, TreeType* treeOne, TreeType* treeTwo, const size_t cutAxis, const typename TreeType::ElemType cut) { // Split the auxiliary information. tree->AuxiliaryInfo().SplitAuxiliaryInfo(treeOne, treeTwo, cutAxis, cut); // Insert children into the corresponding subtree. for (size_t i = 0; i < tree->NumChildren(); i++) { TreeType* child = tree->children[i]; int policy = SplitPolicyType::GetSplitPolicy(*child, cutAxis, cut); if (policy == SplitPolicyType::AssignToFirstTree) { InsertNodeIntoTree(treeOne, child); child->Parent() = treeOne; } else if (policy == SplitPolicyType::AssignToSecondTree) { InsertNodeIntoTree(treeTwo, child); child->Parent() = treeTwo; } else { // The child should be split (i.e. the partition divides its bound). TreeType* childOne = new TreeType(treeOne); TreeType* childTwo = new TreeType(treeTwo); treeOne->MinLeafSize() = 0; treeOne->MinNumChildren() = 0; treeTwo->MinLeafSize() = 0; treeTwo->MinNumChildren() = 0; // Propagate the split downward. if (child->IsLeaf()) SplitLeafNodeAlongPartition(child, childOne, childTwo, cutAxis, cut); else SplitNonLeafNodeAlongPartition(child, childOne, childTwo, cutAxis, cut); InsertNodeIntoTree(treeOne, childOne); InsertNodeIntoTree(treeTwo, childTwo); child->SoftDelete(); } } assert(treeOne->NumChildren() + treeTwo->NumChildren() != 0); // Add a fake subtree if one of the subtrees is empty. if (treeOne->NumChildren() == 0) AddFakeNodes(treeTwo, treeOne); else if (treeTwo->NumChildren() == 0) AddFakeNodes(treeOne, treeTwo); assert(treeOne->NumChildren() <= treeOne->MaxNumChildren()); assert(treeTwo->NumChildren() <= treeTwo->MaxNumChildren()); } template class SweepType> template void RPlusTreeSplit:: AddFakeNodes(const TreeType* tree, TreeType* emptyTree) { size_t numDescendantNodes = tree->TreeDepth() - 1; TreeType* node = emptyTree; for (size_t i = 0; i < numDescendantNodes; i++) { TreeType* child = new TreeType(node); node->children[node->NumChildren()++] = child; node = child; } } template class SweepType> template bool RPlusTreeSplit:: PartitionNode(const TreeType* node, size_t& minCutAxis, typename TreeType::ElemType& minCut) { if ((node->NumChildren() <= node->MaxNumChildren() && !node->IsLeaf()) || (node->Count() <= node->MaxLeafSize() && node->IsLeaf())) return false; // No partition required. // Define the type of the sweep cost. typedef typename SweepType::template SweepCost::type SweepCostType; SweepCostType minCost = std::numeric_limits::max(); minCutAxis = node->Bound().Dim(); // Find the sweep with a minimal cost. for (size_t k = 0; k < node->Bound().Dim(); k++) { typename TreeType::ElemType cut; SweepCostType cost; if (node->IsLeaf()) cost = SweepType::SweepLeafNode(k, node, cut); else cost = SweepType::SweepNonLeafNode(k, node, cut); if (cost < minCost) { minCost = cost; minCutAxis = k; minCut = cut; } } return true; } template class SweepType> template void RPlusTreeSplit:: InsertNodeIntoTree(TreeType* destTree, TreeType* srcNode) { destTree->Bound() |= srcNode->Bound(); destTree->numDescendants += srcNode->numDescendants; destTree->children[destTree->NumChildren()++] = srcNode; } } // namespace tree } // namespace mlpack #endif // MLPACK_CORE_TREE_RECTANGLE_TREE_R_PLUS_TREE_SPLIT_IMPL_HPP mlpack-2.2.5/src/mlpack/core/tree/rectangle_tree/r_plus_tree_split_policy.hpp000066400000000000000000000054521315013601400275250ustar00rootroot00000000000000/** * @file r_plus_tree_split_policy.hpp * @author Mikhail Lozhnikov * * Definition and implementation of the RPlusTreeSplitPolicy class, a class that * helps to determine the subtree into which we should insert an intermediate * node. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_TREE_RECTANGLE_TREE_R_PLUS_TREE_SPLIT_POLICY_HPP #define MLPACK_CORE_TREE_RECTANGLE_TREE_R_PLUS_TREE_SPLIT_POLICY_HPP namespace mlpack { namespace tree { /** * The RPlusPlusTreeSplitPolicy helps to determine the subtree into which * we should insert a child of an intermediate node that is being split. * This class is designed for the R+ tree. */ class RPlusTreeSplitPolicy { public: //! Indicate that the child should be split. static const int SplitRequired = 0; //! Indicate that the child should be inserted to the first subtree. static const int AssignToFirstTree = 1; //! Indicate that the child should be inserted to the second subtree. static const int AssignToSecondTree = 2; /** * This method returns SplitRequired if a child of an intermediate node should * be split, AssignToFirstTree if the child should be inserted to the first * subtree, AssignToSecondTree if the child should be inserted to the second * subtree. The method makes desicion according to the minimum bounding * rectangle of the child, the axis along which the intermediate node is being * split and the coordinate at which the node is being split. * * @param child A child of the node that is being split. * @param axis The axis along which the node is being split. * @param cut The coordinate at which the node is being split. */ template static int GetSplitPolicy(const TreeType& child, const size_t axis, const typename TreeType::ElemType cut) { if (child.Bound()[axis].Hi() <= cut) return AssignToFirstTree; else if (child.Bound()[axis].Lo() >= cut) return AssignToSecondTree; return SplitRequired; } /** * Return the minimum bounding rectangle of the node. * This method should always return the bound that is used for the * decision-making in GetSplitPolicy(). * * @param node The node whose bound is requested. */ template static const bound::HRectBound& Bound(const TreeType& node) { return node.Bound(); } }; } // namespace tree } // namespace mlpack #endif // MLPACK_CORE_TREE_RECTANGLE_TREE_R_PLUS_TREE_SPLIT_POLICY_HPP mlpack-2.2.5/src/mlpack/core/tree/rectangle_tree/r_star_tree_descent_heuristic.hpp000066400000000000000000000034041315013601400305000ustar00rootroot00000000000000/** * @file r_star_tree_descent_heuristic.hpp * @author Andrew Wells * * Definition of RStarTreeDescentHeuristic, a class that chooses the best child * of a node in an R tree when inserting a new point. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_TREE_RECTANGLE_TREE_R_STAR_TREE_DESCENT_HEURISTIC_HPP #define MLPACK_CORE_TREE_RECTANGLE_TREE_R_STAR_TREE_DESCENT_HEURISTIC_HPP #include namespace mlpack { namespace tree { /** * When descending a RectangleTree to insert a point, we need to have a way to * choose a child node when the point isn't enclosed by any of them. This * heuristic is used to do so using the rules for the R* tree. */ class RStarTreeDescentHeuristic { public: /** * Evaluate the node using a heuristic. The heuristic guarantees two things: * * 1. If point is contained in (or on) bound, the value returned is zero. * 2. If the point is not contained in (or on) bound, the value returned is * greater than zero. * * @param bound The bound used for the node that is being evaluated. * @param point The index of the point that is being inserted. */ template static size_t ChooseDescentNode(const TreeType* node, const size_t point); template static size_t ChooseDescentNode(const TreeType* node, const TreeType* insertedNode); }; } // namespace tree } // namespace mlpack // Include implementation. #include "r_star_tree_descent_heuristic_impl.hpp" #endif mlpack-2.2.5/src/mlpack/core/tree/rectangle_tree/r_star_tree_descent_heuristic_impl.hpp000066400000000000000000000152471315013601400315310ustar00rootroot00000000000000/** * @file r_star_tree_descent_heuristic_impl.hpp * @author Andrew Wells * * Implementation of RStarTreeDescentHeuristic, a class that chooses the best child of a node in * an R tree when inserting a new point. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_TREE_RECTANGLE_TREE_R_STAR_TREE_DESCENT_HEURISTIC_IMPL_HPP #define MLPACK_CORE_TREE_RECTANGLE_TREE_R_STAR_TREE_DESCENT_HEURISTIC_IMPL_HPP #include "r_star_tree_descent_heuristic.hpp" namespace mlpack { namespace tree { template inline size_t RStarTreeDescentHeuristic::ChooseDescentNode( const TreeType* node, const size_t point) { // Convenience typedef. typedef typename TreeType::ElemType ElemType; bool tiedOne = false; std::vector originalScores(node->NumChildren()); ElemType origMinScore = std::numeric_limits::max(); if (node->Child(0).IsLeaf()) { // If its children are leaf nodes, use minimum overlap to choose. size_t bestIndex = 0; for (size_t i = 0; i < node->NumChildren(); i++) { ElemType sc = 0; for (size_t j = 0; j < node->NumChildren(); j++) { if (j != i) { ElemType overlap = 1.0; ElemType newOverlap = 1.0; for (size_t k = 0; k < node->Bound().Dim(); k++) { ElemType newHigh = std::max(node->Dataset().col(point)[k], node->Child(i).Bound()[k].Hi()); ElemType newLow = std::min(node->Dataset().col(point)[k], node->Child(i).Bound()[k].Lo()); overlap *= node->Child(i).Bound()[k].Hi() < node->Child(j).Bound()[k].Lo() || node->Child(i).Bound()[k].Lo() > node->Child(j).Bound()[k].Hi() ? 0 : std::min(node->Child(i).Bound()[k].Hi(), node->Child(j).Bound()[k].Hi()) - std::max(node->Child(i).Bound()[k].Lo(), node->Child(j).Bound()[k].Lo()); newOverlap *= newHigh < node->Child(j).Bound()[k].Lo() || newLow > node->Child(j).Bound()[k].Hi() ? 0 : std::min(newHigh, node->Child(j).Bound()[k].Hi()) - std::max(newLow, node->Child(j).Bound()[k].Lo()); } sc += newOverlap - overlap; } } originalScores[i] = sc; if (sc < origMinScore) { origMinScore = sc; bestIndex = i; } else if (sc == origMinScore) { tiedOne = true; } } if (!tiedOne) return bestIndex; } // We do this if it is not on the second level or if there was a tie. std::vector scores(node->NumChildren()); if (tiedOne) { // If the first heuristic was tied, we need to eliminate garbage values. for (size_t i = 0; i < scores.size(); i++) scores[i] = std::numeric_limits::max(); } std::vector vols(node->NumChildren()); ElemType minScore = std::numeric_limits::max(); size_t bestIndex = 0; bool tied = false; for (size_t i = 0; i < node->NumChildren(); i++) { if (!tiedOne || originalScores[i] == origMinScore) { ElemType v1 = 1.0; ElemType v2 = 1.0; for (size_t j = 0; j < node->Bound().Dim(); j++) { v1 *= node->Child(i).Bound()[j].Width(); v2 *= node->Child(i).Bound()[j].Contains( node->Dataset().col(point)[j]) ? node->Child(i).Bound()[j].Width() : (node->Child(i).Bound()[j].Hi() < node->Dataset().col(point)[j] ? (node->Dataset().col(point)[j] - node->Child(i).Bound()[j].Lo()) : (node->Child(i).Bound()[j].Hi() - node->Dataset().col(point)[j])); } assert(v2 - v1 >= 0); vols[i] = v1; scores[i] = v2 - v1; if (v2 - v1 < minScore) { minScore = v2 - v1; bestIndex = i; } else if (v2 - v1 == minScore) { tied = true; } } } if (tied) { // We break ties by choosing the smallest bound. ElemType minVol = std::numeric_limits::max(); bestIndex = 0; for (size_t i = 0; i < scores.size(); i++) { if (scores[i] == minScore) { if (vols[i] < minVol) { minVol = vols[i]; bestIndex = i; } } } } return bestIndex; } /** * We simplify this to the same code as is used in the regular R tree, since the * inserted node should always be above the leaf level. If the tree is * eventually changed to support rectangles, this could be changed to match the * above code; however, the paper's explanation for their algorithm seems to * indicate the above is more for points than for rectangles. */ template inline size_t RStarTreeDescentHeuristic::ChooseDescentNode( const TreeType* node, const TreeType* insertedNode) { // Convenience typedef. typedef typename TreeType::ElemType ElemType; std::vector scores(node->NumChildren()); std::vector vols(node->NumChildren()); ElemType minScore = std::numeric_limits::max(); size_t bestIndex = 0; bool tied = false; for (size_t i = 0; i < node->NumChildren(); i++) { ElemType v1 = 1.0; ElemType v2 = 1.0; for (size_t j = 0; j < node->Child(i).Bound().Dim(); j++) { v1 *= node->Child(i).Bound()[j].Width(); v2 *= node->Child(i).Bound()[j].Contains(insertedNode->Bound()[j]) ? node->Child(i).Bound()[j].Width() : (insertedNode->Bound()[j].Contains(node->Child(i).Bound()[j]) ? insertedNode->Bound()[j].Width() : (insertedNode->Bound()[j].Lo() < node->Child(i).Bound()[j].Lo() ? (node->Child(i).Bound()[j].Hi() - insertedNode->Bound()[j].Lo()) : (insertedNode->Bound()[j].Hi() - node->Child(i).Bound()[j].Lo()))); } assert(v2 - v1 >= 0); vols[i] = v1; scores[i] = v2 - v1; if (v2 - v1 < minScore) { minScore = v2 - v1; bestIndex = i; } else if (v2 - v1 == minScore) { tied = true; } } if (tied) { // We break ties by choosing the smallest bound. ElemType minVol = std::numeric_limits::max(); bestIndex = 0; for (size_t i = 0; i < scores.size(); i++) { if (scores[i] == minScore) { if (vols[i] < minVol) { minVol = vols[i]; bestIndex = i; } } } } return bestIndex; } } // namespace tree } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/core/tree/rectangle_tree/r_star_tree_split.hpp000066400000000000000000000050041315013601400261250ustar00rootroot00000000000000/** * @file r_tree_star_split.hpp * @author Andrew Wells * * Definition of the RStarTreeSplit class, a class that splits the nodes of an R tree, starting * at a leaf node and moving upwards if necessary. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_TREE_RECTANGLE_TREE_R_STAR_TREE_SPLIT_HPP #define MLPACK_CORE_TREE_RECTANGLE_TREE_R_STAR_TREE_SPLIT_HPP #include namespace mlpack { namespace tree /** Trees and tree-building procedures. */ { /** * A Rectangle Tree has new points inserted at the bottom. When these * nodes overflow, we split them, moving up the tree and splitting nodes * as necessary. */ class RStarTreeSplit { public: /** * Split a leaf node using the algorithm described in "The R*-tree: An * Efficient and Robust Access method for Points and Rectangles." If * necessary, this split will propagate upwards through the tree. */ template static void SplitLeafNode(TreeType *tree,std::vector& relevels); /** * Split a non-leaf node using the "default" algorithm. If this is a root * node, the tree increases in depth. */ template static bool SplitNonLeafNode(TreeType *tree,std::vector& relevels); /** * Reinsert any points into the tree, if needed. This returns the number of * points reinserted. */ template static size_t ReinsertPoints(TreeType* tree, std::vector& relevels); /** * Given a node, return the best dimension and the best index to split on. */ template static void PickLeafSplit(TreeType* tree, size_t& bestAxis, size_t& bestIndex); private: /** * Insert a node into another node. */ template static void InsertNodeIntoTree(TreeType* destTree, TreeType* srcNode); /** * Comparator for sorting with std::pair. This comparator works a little bit * faster then the default comparator. */ template static bool PairComp(const std::pair& p1, const std::pair& p2) { return p1.first < p2.first; } }; } // namespace tree } // namespace mlpack // Include implementation #include "r_star_tree_split_impl.hpp" #endif mlpack-2.2.5/src/mlpack/core/tree/rectangle_tree/r_star_tree_split_impl.hpp000066400000000000000000000412241315013601400271520ustar00rootroot00000000000000/** * @file r_star_tree_split_impl.hpp * @author Andrew Wells * * Implementation of class (RStarTreeSplit) to split a RectangleTree. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_TREE_RECTANGLE_TREE_R_STAR_TREE_SPLIT_IMPL_HPP #define MLPACK_CORE_TREE_RECTANGLE_TREE_R_STAR_TREE_SPLIT_IMPL_HPP #include "r_star_tree_split.hpp" #include "rectangle_tree.hpp" #include namespace mlpack { namespace tree { /** * Reinsert any points into the tree, if needed. This returns the number of * points reinserted. */ template size_t RStarTreeSplit::ReinsertPoints(TreeType* tree, std::vector& relevels) { // Convenience typedef. typedef typename TreeType::ElemType ElemType; // Check if we need to reinsert. if (relevels[tree->TreeDepth() - 1]) { relevels[tree->TreeDepth() - 1] = false; // We sort the points by decreasing distance to the centroid of the bound. // We then remove the first p entries and reinsert them at the root. TreeType* root = tree; while (root->Parent() != NULL) root = root->Parent(); size_t p = tree->MaxLeafSize() * 0.3; // The paper says this works the best. if (p > 0) { // We'll only do reinsertions if p > 0. p is the number of points we will // reinsert. If p == 0, then no reinsertion is necessary and we continue // with the splitting procedure. std::vector> sorted(tree->Count()); arma::Col center; tree->Bound().Center(center); for (size_t i = 0; i < sorted.size(); i++) { sorted[i].first = tree->Metric().Evaluate(center, tree->Dataset().col(tree->Point(i))); sorted[i].second = tree->Point(i); } std::sort(sorted.begin(), sorted.end(), PairComp); // Remove the points furthest from the center of the node. for (size_t i = 0; i < p; i++) root->DeletePoint(sorted[sorted.size() - 1 - i].second, relevels); // Now reinsert the points, but reverse the order---insert the closest to // the center first. for (size_t i = p; i > 0; --i) root->InsertPoint(sorted[sorted.size() - i].second, relevels); } return p; } return 0; } /** * Given a node, return the best dimension and the best index to split on. */ template void RStarTreeSplit::PickLeafSplit(TreeType* tree, size_t& bestAxis, size_t& bestIndex) { // Convenience typedef. typedef typename TreeType::ElemType ElemType; typedef bound::HRectBound BoundType; bestAxis = 0; bestIndex = 0; ElemType bestScore = std::numeric_limits::max(); /** * Check each dimension, to find which dimension is best to split on. */ for (size_t j = 0; j < tree->Bound().Dim(); j++) { ElemType axisScore = 0.0; // Sort in increasing values of the selected dimension j. arma::Col dimValues(tree->Count()); for (size_t i = 0; i < tree->Count(); ++i) dimValues[i] = tree->Dataset().col(tree->Point(i))[j]; arma::uvec sortedIndices = arma::sort_index(dimValues); // We'll store each of the three scores for each distribution. const size_t numPossibleSplits = tree->MaxLeafSize() - 2 * tree->MinLeafSize() + 2; arma::Col areas(numPossibleSplits, arma::fill::zeros); arma::Col margins(numPossibleSplits, arma::fill::zeros); arma::Col overlaps(numPossibleSplits, arma::fill::zeros); for (size_t i = 0; i < numPossibleSplits; i++) { // The ith arrangement is obtained by placing the first // tree->MinLeafSize() + i points in one rectangle and the rest in // another. Then we calculate the three scores for that distribution. size_t splitIndex = tree->MinLeafSize() + i; BoundType bound1(tree->Bound().Dim()); BoundType bound2(tree->Bound().Dim()); for (size_t l = 0; l < splitIndex; l++) bound1 |= tree->Dataset().col(tree->Point(sortedIndices[l])); for (size_t l = splitIndex; l < tree->Count(); l++) bound2 |= tree->Dataset().col(tree->Point(sortedIndices[l])); areas[i] = bound1.Volume() + bound2.Volume(); overlaps[i] = bound1.Overlap(bound2); for (size_t k = 0; k < bound1.Dim(); k++) margins[i] += bound1[k].Width() + bound2[k].Width(); axisScore += margins[i]; } // Is this dimension a new best score? We want the lowest possible score. if (axisScore < bestScore) { bestScore = axisScore; bestAxis = j; size_t overlapIndex = 0; size_t areaIndex = 0; bool tiedOnOverlap = false; for (size_t i = 1; i < areas.n_elem; i++) { if (overlaps[i] < overlaps[overlapIndex]) { tiedOnOverlap = false; overlapIndex = i; areaIndex = i; } else if (overlaps[i] == overlaps[overlapIndex]) { tiedOnOverlap = true; if (areas[i] < areas[areaIndex]) areaIndex = i; } } // Select the best index for splitting. bestIndex = (tiedOnOverlap ? areaIndex : overlapIndex); } } } /** * We call GetPointSeeds to get the two points which will be the initial points * in the new nodes We then call AssignPointDestNode to assign the remaining * points to the two new nodes. Finally, we delete the old node and insert the * new nodes into the tree, spliting the parent if necessary. */ template void RStarTreeSplit::SplitLeafNode(TreeType *tree,std::vector& relevels) { // Convenience typedef. typedef typename TreeType::ElemType ElemType; // If there's no need to split, don't. if (tree->Count() <= tree->MaxLeafSize()) return; // If we haven't yet checked if we need to reinsert on this level, we try // doing so now. if (ReinsertPoints(tree, relevels) > 0) return; // We don't need to reinsert. Instead, we need to split the node. size_t bestAxis; size_t bestIndex; PickLeafSplit(tree, bestAxis, bestIndex); /** * Now that we have found the best dimension to split on, re-sort in that * dimension to prepare for reinsertion of points into the new nodes. */ std::vector> sorted(tree->Count()); for (size_t i = 0; i < sorted.size(); i++) { sorted[i].first = tree->Dataset().col(tree->Point(i))[bestAxis]; sorted[i].second = tree->Point(i); } std::sort(sorted.begin(), sorted.end(), PairComp); /** * If 'tree' is the root of the tree (i.e. if it has no parent), then we must * create two new child nodes, distribute the points from the original node * among them, and insert those. If 'tree' is not the root of the tree, then * we may create only one new child node, redistribute the points from the * original node between 'tree' and the new node, then insert those nodes into * the parent. * * Here we simply set treeOne and treeTwo to the right values to avoid code * duplication. */ TreeType* par = tree->Parent(); TreeType* treeOne = (par) ? tree : new TreeType(tree); TreeType* treeTwo = (par) ? new TreeType(par) : new TreeType(tree); // Now clean the node, and we will re-use this. const size_t numPoints = tree->Count(); // Reset the original node's values, regardless of whether it will become // the new parent or not. tree->numChildren = 0; tree->numDescendants = 0; tree->count = 0; tree->bound.Clear(); // Insert the points into the appropriate tree. for (size_t i = 0; i < numPoints; i++) { if (i < bestIndex + tree->MinLeafSize()) treeOne->InsertPoint(sorted[i].second); else treeTwo->InsertPoint(sorted[i].second); } // Insert the new tree node(s). if (par) { // Just insert the new node into the parent. par->children[par->NumChildren()++] = treeTwo; // If we have overflowed the parent's children, then we need to split that // node also. if (par->NumChildren() == par->MaxNumChildren() + 1) RStarTreeSplit::SplitNonLeafNode(par, relevels); } else { // Now insert the two nodes into 'tree', which is now a higher-level root // node in the tree. InsertNodeIntoTree(tree, treeOne); InsertNodeIntoTree(tree, treeTwo); } } /** * We call GetBoundSeeds to get the two new nodes that this one will be broken * into. Then we call AssignNodeDestNode to move the children of this node into * either of those two nodes. Finally, we delete the now unused information and * recurse up the tree if necessary. We don't need to worry about the bounds * higher up the tree because they were already updated if necessary. */ template bool RStarTreeSplit::SplitNonLeafNode(TreeType *tree,std::vector& relevels) { // Convenience typedef. typedef typename TreeType::ElemType ElemType; typedef bound::HRectBound BoundType; // Reinsertion isn't done for non-leaf nodes; the paper doesn't seem to make // it clear how to reinsert an entire node without reinserting each of the // points, so we will avoid that here. This is a possible area for // improvement of this code. size_t bestAxis = 0; size_t bestIndex = 0; ElemType bestScore = std::numeric_limits::max(); bool lowIsBetter = true; /** * Check over each dimension to see which is best to use for splitting. */ for (size_t j = 0; j < tree->Bound().Dim(); j++) { ElemType axisLoScore = 0.0; ElemType axisHiScore = 0.0; // We have to calculate values for both the lower and higher parts of the // bound. arma::Col loDimValues(tree->NumChildren()); arma::Col hiDimValues(tree->NumChildren()); for (size_t i = 0; i < tree->NumChildren(); i++) { loDimValues[i] = tree->Child(i).Bound()[j].Lo(); hiDimValues[i] = tree->Child(i).Bound()[j].Hi(); } arma::uvec sortedLoDimIndices = arma::sort_index(loDimValues); arma::uvec sortedHiDimIndices = arma::sort_index(hiDimValues); // We'll store each of the three scores for each distribution. Remember // that these are the sums calculated over both the low and high bounds of // each rectangle. const size_t numPossibleSplits = tree->MaxNumChildren() - 2 * tree->MinNumChildren() + 2; arma::Col areas(2 * numPossibleSplits, arma::fill::zeros); arma::Col margins(2 * numPossibleSplits, arma::fill::zeros); arma::Col overlaps(2 * numPossibleSplits, arma::fill::zeros); for (size_t i = 0; i < numPossibleSplits; ++i) { // The ith arrangement is obtained by placing the first // tree->MinNumChildren() + i points in one rectangle and the rest in // another. Then we calculate the three scores for that distribution. const size_t splitIndex = tree->MinNumChildren() + i; BoundType lb1(tree->Bound().Dim()); BoundType lb2(tree->Bound().Dim()); BoundType hb1(tree->Bound().Dim()); BoundType hb2(tree->Bound().Dim()); for (size_t l = 0; l < splitIndex; ++l) { lb1 |= tree->Child(sortedLoDimIndices[l]).Bound(); hb1 |= tree->Child(sortedHiDimIndices[l]).Bound(); } for (size_t l = splitIndex; l < tree->NumChildren(); ++l) { lb2 |= tree->Child(sortedLoDimIndices[l]).Bound(); hb2 |= tree->Child(sortedHiDimIndices[l]).Bound(); } // Calculate low bound distributions. areas[2 * i] = lb1.Volume() + lb2.Volume(); overlaps[2 * i] = lb1.Overlap(lb2); // Calculate high bound distributions. areas[2 * i + 1] = hb1.Volume() + hb2.Volume(); overlaps[2 * i + 1] = hb1.Overlap(hb2); // Now calculate margins for each. for (size_t k = 0; k < lb1.Dim(); k++) { margins[2 * i] += lb1[k].Width() + lb2[k].Width(); margins[2 * i + 1] += hb1[k].Width() + hb2[k].Width(); } // The score we use is the sum of all scores. axisLoScore += margins[2 * i]; axisHiScore += margins[2 * i + 1]; } // If this dimension's score (for lower or higher bound scores) is a new // best, then extract the necessary split information. if (std::min(axisLoScore, axisHiScore) < bestScore) { bestScore = std::min(axisLoScore, axisHiScore); if (axisLoScore < axisHiScore) lowIsBetter = true; else lowIsBetter = false; bestAxis = j; // This will get us either the lower or higher bound depending on which we // want. Remember that we selected *either* the lower or higher bounds to // split on, so we want to only check those. const size_t indexOffset = lowIsBetter ? 0 : 1; size_t overlapIndex = indexOffset; size_t areaIndex = indexOffset; bool tiedOnOverlap = false; // Find the best possible split (and whether it is on the low values or // high values of the bounds). for (size_t i = 1; i < numPossibleSplits; i++) { // Check bounds. if (overlaps[2 * i + indexOffset] < overlaps[overlapIndex]) { tiedOnOverlap = false; areaIndex = 2 * i + indexOffset; overlapIndex = 2 * i + indexOffset; } else if (overlaps[i] == overlaps[overlapIndex]) { tiedOnOverlap = true; if (areas[2 * i + indexOffset] < areas[areaIndex]) areaIndex = 2 * i + indexOffset; } } bestIndex = ((tiedOnOverlap ? areaIndex : overlapIndex) - indexOffset) / 2 + tree->MinNumChildren(); } } // Get a list of the old children. std::vector oldChildren(tree->NumChildren()); for (size_t i = 0; i < oldChildren.size(); ++i) oldChildren[i] = &tree->Child(i); /** * If 'tree' is the root of the tree (i.e. if it has no parent), then we must * create two new child nodes, distribute the children from the original node * among them, and insert those. If 'tree' is not the root of the tree, then * we may create only one new child node, redistribute the children from the * original node between 'tree' and the new node, then insert those nodes into * the parent. * * Here, we simply set treeOne and treeTwo to the right values to avoid code * duplication. */ TreeType* par = tree->Parent(); TreeType* treeOne = par ? tree : new TreeType(tree); TreeType* treeTwo = par ? new TreeType(par) : new TreeType(tree); // Now clean the node. tree->numChildren = 0; tree->numDescendants = 0; tree->count = 0; tree->bound.Clear(); // Assemble vector of values. arma::Col values(oldChildren.size()); for (size_t i = 0; i < oldChildren.size(); ++i) { values[i] = (lowIsBetter ? oldChildren[i]->Bound()[bestAxis].Lo() : oldChildren[i]->Bound()[bestAxis].Hi()); } arma::uvec indices = arma::sort_index(values); for (size_t i = 0; i < bestIndex; ++i) InsertNodeIntoTree(treeOne, oldChildren[indices[i]]); for (size_t i = bestIndex; i < oldChildren.size(); ++i) InsertNodeIntoTree(treeTwo, oldChildren[indices[i]]); // Insert the new tree node(s). if (par) { // Insert the new node into the parent. The number of descendants does not // need to be updated. par->children[par->NumChildren()++] = treeTwo; } else { // Insert both nodes into 'tree', which is now a higher-level root node. InsertNodeIntoTree(tree, treeOne); InsertNodeIntoTree(tree, treeTwo); // We have to update the children of treeOne so that they record the correct // parent. for (size_t i = 0; i < treeOne->NumChildren(); i++) treeOne->children[i]->Parent() = treeOne; } // Update the children of treeTwo to have the correct parent. for (size_t i = 0; i < treeTwo->NumChildren(); i++) treeTwo->children[i]->Parent() = treeTwo; // If we have overflowed hte parent's children, then we need to split that // node also. if (par && par->NumChildren() >= par->MaxNumChildren() + 1) RStarTreeSplit::SplitNonLeafNode(par, relevels); return false; } /** * Insert a node into another node. Expanding the bounds and updating the * numberOfChildren. */ template void RStarTreeSplit::InsertNodeIntoTree(TreeType* destTree, TreeType* srcNode) { destTree->Bound() |= srcNode->Bound(); destTree->numDescendants += srcNode->numDescendants; destTree->children[destTree->NumChildren()++] = srcNode; } } // namespace tree } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/core/tree/rectangle_tree/r_tree_descent_heuristic.hpp000066400000000000000000000041141315013601400274460ustar00rootroot00000000000000/** * @file r_tree_descent_heuristic.hpp * @author Andrew Wells * * Definition of RTreeDescentHeuristic, a class that chooses the best child of a * node in an R tree when inserting a new point. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_TREE_RECTANGLE_TREE_R_TREE_DESCENT_HEURISTIC_HPP #define MLPACK_CORE_TREE_RECTANGLE_TREE_R_TREE_DESCENT_HEURISTIC_HPP #include namespace mlpack { namespace tree { /** * When descending a RectangleTree to insert a point, we need to have a way to * choose a child node when the point isn't enclosed by any of them. This * heuristic is used to do so. */ class RTreeDescentHeuristic { public: /** * Evaluate the node using a heuristic. The heuristic guarantees two things: * * 1. If point is contained in (or on) the bound, the value returned is zero. * 2. If the point is not contained in (or on) the bound, the value returned * is greater than zero. * * @param node The node that is being evaluated. * @param point The index of the point that is being inserted. */ template static size_t ChooseDescentNode(const TreeType* node, const size_t point); /** * Evaluate the node using a heuristic. The heuristic guarantees two things: * * 1. If point is contained in (or on) the bound, the value returned is zero. * 2. If the point is not contained in (or on) the bound, the value returned * is greater than zero. * * @param node The node that is being evaluated. * @param insertedNode The node that is being inserted. */ template static size_t ChooseDescentNode(const TreeType* node, const TreeType* insertedNode); }; } // namespace tree } // namespace mlpack // Include implementation. #include "r_tree_descent_heuristic_impl.hpp" #endif mlpack-2.2.5/src/mlpack/core/tree/rectangle_tree/r_tree_descent_heuristic_impl.hpp000066400000000000000000000063451315013601400304770ustar00rootroot00000000000000/** * @file r_tree_descent_heuristic_impl.hpp * @author Andrew Wells * * Implementation of RTreeDescentHeuristic, a class that chooses the best child * of a node in an R tree when inserting a new point. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_TREE_RECTANGLE_TREE_R_TREE_DESCENT_HEURISTIC_IMPL_HPP #define MLPACK_CORE_TREE_RECTANGLE_TREE_R_TREE_DESCENT_HEURISTIC_IMPL_HPP #include "r_tree_descent_heuristic.hpp" namespace mlpack { namespace tree { template inline size_t RTreeDescentHeuristic::ChooseDescentNode(const TreeType* node, const size_t point) { // Convenience typedef. typedef typename TreeType::ElemType ElemType; ElemType minScore = std::numeric_limits::max(); int bestIndex = 0; ElemType bestVol = 0.0; for (size_t i = 0; i < node->NumChildren(); i++) { ElemType v1 = 1.0; ElemType v2 = 1.0; for (size_t j = 0; j < node->Child(i).Bound().Dim(); j++) { v1 *= node->Child(i).Bound()[j].Width(); v2 *= node->Child(i).Bound()[j].Contains(node->Dataset().col(point)[j]) ? node->Child(i).Bound()[j].Width() : (node->Child(i).Bound()[j].Hi() < node->Dataset().col(point)[j] ? (node->Dataset().col(point)[j] - node->Child(i).Bound()[j].Lo()) : (node->Child(i).Bound()[j].Hi() - node->Dataset().col(point)[j])); } assert(v2 - v1 >= 0); if ((v2 - v1) < minScore) { minScore = v2 - v1; bestVol = v1; bestIndex = i; } else if ((v2 - v1) == minScore && v1 < bestVol) { bestVol = v1; bestIndex = i; } } return bestIndex; } template inline size_t RTreeDescentHeuristic::ChooseDescentNode( const TreeType* node, const TreeType* insertedNode) { // Convenience typedef. typedef typename TreeType::ElemType ElemType; ElemType minScore = std::numeric_limits::max(); int bestIndex = 0; ElemType bestVol = 0.0; for (size_t i = 0; i < node->NumChildren(); i++) { ElemType v1 = 1.0; ElemType v2 = 1.0; for (size_t j = 0; j < node->Child(i).Bound().Dim(); j++) { v1 *= node->Child(i).Bound()[j].Width(); v2 *= node->Child(i).Bound()[j].Contains(insertedNode->Bound()[j]) ? node->Child(i).Bound()[j].Width() : (insertedNode->Bound()[j].Contains(node->Child(i).Bound()[j]) ? insertedNode->Bound()[j].Width() : (insertedNode->Bound()[j].Lo() < node->Child(i).Bound()[j].Lo() ? (node->Child(i).Bound()[j].Hi() - insertedNode->Bound()[j].Lo()) : (insertedNode->Bound()[j].Hi() - node->Child(i).Bound()[j].Lo()))); } assert(v2 - v1 >= 0); if ((v2 - v1) < minScore) { minScore = v2 - v1; bestVol = v1; bestIndex = i; } else if ((v2 - v1) == minScore && v1 < bestVol) { bestVol = v1; bestIndex = i; } } return bestIndex; } } // namespace tree } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/core/tree/rectangle_tree/r_tree_split.hpp000066400000000000000000000051531315013601400251010ustar00rootroot00000000000000/** * @file r_tree_split.hpp * @author Andrew Wells * * Definition of the RTreeSplit class, a class that splits the nodes of an R * tree, starting at a leaf node and moving upwards if necessary. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_TREE_RECTANGLE_TREE_R_TREE_SPLIT_HPP #define MLPACK_CORE_TREE_RECTANGLE_TREE_R_TREE_SPLIT_HPP #include namespace mlpack { namespace tree /** Trees and tree-building procedures. */ { /** * A Rectangle Tree has new points inserted at the bottom. When these * nodes overflow, we split them, moving up the tree and splitting nodes * as necessary. */ class RTreeSplit { public: /** * Split a leaf node using the "default" algorithm. If necessary, this split * will propagate upwards through the tree. */ template static void SplitLeafNode(TreeType *tree,std::vector& relevels); /** * Split a non-leaf node using the "default" algorithm. If this is a root * node, the tree increases in depth. */ template static bool SplitNonLeafNode(TreeType *tree,std::vector& relevels); private: /** * Get the seeds for splitting a leaf node. */ template static void GetPointSeeds(const TreeType *tree,int& i, int& j); /** * Get the seeds for splitting a non-leaf node. */ template static void GetBoundSeeds(const TreeType *tree,int& i, int& j); /** * Assign points to the two new nodes. */ template static void AssignPointDestNode(TreeType* oldTree, TreeType* treeOne, TreeType* treeTwo, const int intI, const int intJ); /** * Assign nodes to the two new nodes. */ template static void AssignNodeDestNode(TreeType* oldTree, TreeType* treeOne, TreeType* treeTwo, const int intI, const int intJ); /** * Insert a node into another node. */ template static void InsertNodeIntoTree(TreeType* destTree, TreeType* srcNode); }; } // namespace tree } // namespace mlpack // Include implementation #include "r_tree_split_impl.hpp" #endif mlpack-2.2.5/src/mlpack/core/tree/rectangle_tree/r_tree_split_impl.hpp000066400000000000000000000426651315013601400261330ustar00rootroot00000000000000/** * @file r_tree_split_impl.hpp * @author Andrew Wells * * Implementation of class (RTreeSplit) to split a RectangleTree. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_TREE_RECTANGLE_TREE_R_TREE_SPLIT_IMPL_HPP #define MLPACK_CORE_TREE_RECTANGLE_TREE_R_TREE_SPLIT_IMPL_HPP #include "r_tree_split.hpp" #include "rectangle_tree.hpp" #include namespace mlpack { namespace tree { /** * We call GetPointSeeds to get the two points which will be the initial points * in the new nodes We then call AssignPointDestNode to assign the remaining * points to the two new nodes. Finally, we delete the old node and insert the * new nodes into the tree, spliting the parent if necessary. */ template void RTreeSplit::SplitLeafNode(TreeType *tree,std::vector& relevels) { if (tree->Count() <= tree->MaxLeafSize()) return; // If we are splitting the root node, we need will do things differently so // that the constructor and other methods don't confuse the end user by giving // an address of another node. if (tree->Parent() == NULL) { // We actually want to copy this way. Pointers and everything. TreeType* copy = new TreeType(*tree, false); copy->Parent() = tree; tree->Count() = 0; tree->NullifyData(); // Because this was a leaf node, numChildren must be 0. tree->children[(tree->NumChildren())++] = copy; RTreeSplit::SplitLeafNode(copy,relevels); return; } assert(tree->Parent()->NumChildren() <= tree->Parent()->MaxNumChildren()); // Use the quadratic split method from: Guttman "R-Trees: A Dynamic Index // Structure for Spatial Searching" It is simplified since we don't handle // rectangles, only points. We assume that the tree uses Euclidean Distance. int i = 0; int j = 0; RTreeSplit::GetPointSeeds(tree,i, j); TreeType* treeOne = new TreeType(tree->Parent()); TreeType* treeTwo = new TreeType(tree->Parent()); // This will assign the ith and jth point appropriately. AssignPointDestNode(tree, treeOne, treeTwo, i, j); // Remove this node and insert treeOne and treeTwo. TreeType* par = tree->Parent(); size_t index = 0; while (par->children[index] != tree) { ++index; } par->children[index] = treeOne; par->children[par->NumChildren()++] = treeTwo; // We only add one at a time, so we should only need to test for equality // just in case, we use an assert. assert(par->NumChildren() <= par->MaxNumChildren() + 1); if (par->NumChildren() == par->MaxNumChildren() + 1) RTreeSplit::SplitNonLeafNode(par,relevels); assert(treeOne->Parent()->NumChildren() <= treeOne->MaxNumChildren()); assert(treeOne->Parent()->NumChildren() >= treeOne->MinNumChildren()); assert(treeTwo->Parent()->NumChildren() <= treeTwo->MaxNumChildren()); assert(treeTwo->Parent()->NumChildren() >= treeTwo->MinNumChildren()); // We need to delete this carefully since references to points are used. tree->SoftDelete(); } /** * We call GetBoundSeeds to get the two new nodes that this one will be broken * into. Then we call AssignNodeDestNode to move the children of this node into * either of those two nodes. Finally, we delete the now unused information and * recurse up the tree if necessary. We don't need to worry about the bounds * higher up the tree because they were already updated if necessary. */ template bool RTreeSplit::SplitNonLeafNode(TreeType *tree,std::vector& relevels) { // If we are splitting the root node, we need will do things differently so // that the constructor and other methods don't confuse the end user by giving // an address of another node. if (tree->Parent() == NULL) { // We actually want to copy this way. Pointers and everything. TreeType* copy = new TreeType(*tree, false); copy->Parent() = tree; tree->NumChildren() = 0; tree->NullifyData(); tree->children[(tree->NumChildren())++] = copy; RTreeSplit::SplitNonLeafNode(copy,relevels); return true; } int i = 0; int j = 0; RTreeSplit::GetBoundSeeds(tree,i, j); assert(i != j); TreeType* treeOne = new TreeType(tree->Parent()); TreeType* treeTwo = new TreeType(tree->Parent()); // This will assign the ith and jth rectangles appropriately. AssignNodeDestNode(tree, treeOne, treeTwo, i, j); // Remove this node and insert treeOne and treeTwo. TreeType* par = tree->Parent(); size_t index = 0; while (par->children[index] != tree) { ++index; } assert(index != par->NumChildren()); par->children[index] = treeOne; par->children[par->NumChildren()++] = treeTwo; for (size_t i = 0; i < par->NumChildren(); i++) assert(par->children[i] != tree); // We only add one at a time, so should only need to test for equality just in // case, we use an assert. assert(par->NumChildren() <= par->MaxNumChildren() + 1); if (par->NumChildren() == par->MaxNumChildren() + 1) RTreeSplit::SplitNonLeafNode(par,relevels); // We have to update the children of each of these new nodes so that they // record the correct parent. for (size_t i = 0; i < treeOne->NumChildren(); i++) treeOne->children[i]->Parent() = treeOne; for (size_t i = 0; i < treeTwo->NumChildren(); i++) treeTwo->children[i]->Parent() = treeTwo; assert(treeOne->NumChildren() <= treeOne->MaxNumChildren()); assert(treeTwo->NumChildren() <= treeTwo->MaxNumChildren()); assert(treeOne->Parent()->NumChildren() <= treeOne->MaxNumChildren()); // Because we now have pointers to the information stored under this tree, // we need to delete this node carefully. tree->SoftDelete(); //currently does nothing but leak memory. return false; } /** * Get the two points that will be used as seeds for the split of a leaf node. * The indices of these points will be stored in iRet and jRet. */ template void RTreeSplit::GetPointSeeds(const TreeType *tree,int& iRet, int& jRet) { // Here we want to find the pair of points that it is worst to place in the // same node. Because we are just using points, we will simply choose the two // that would create the most voluminous hyperrectangle. typename TreeType::ElemType worstPairScore = -1.0; for (size_t i = 0; i < tree->Count(); i++) { for (size_t j = i + 1; j < tree->Count(); j++) { const typename TreeType::ElemType score = arma::prod(arma::abs( tree->Dataset().col(tree->Point(i)) - tree->Dataset().col(tree->Point(j)))); if (score > worstPairScore) { worstPairScore = score; iRet = i; jRet = j; } } } } /** * Get the two bounds that will be used as seeds for the split of the node. The * indices of the bounds will be stored in iRet and jRet. */ template void RTreeSplit::GetBoundSeeds(const TreeType *tree,int& iRet, int& jRet) { // Convenience typedef. typedef typename TreeType::ElemType ElemType; ElemType worstPairScore = -1.0; for (size_t i = 0; i < tree->NumChildren(); i++) { for (size_t j = i + 1; j < tree->NumChildren(); j++) { ElemType score = 1.0; for (size_t k = 0; k < tree->Bound().Dim(); k++) { const ElemType hiMax = std::max(tree->Child(i).Bound()[k].Hi(), tree->Child(j).Bound()[k].Hi()); const ElemType loMin = std::min(tree->Child(i).Bound()[k].Lo(), tree->Child(j).Bound()[k].Lo()); score *= (hiMax - loMin); } if (score > worstPairScore) { worstPairScore = score; iRet = i; jRet = j; } } } } template void RTreeSplit::AssignPointDestNode(TreeType* oldTree, TreeType* treeOne, TreeType* treeTwo, const int intI, const int intJ) { // Convenience typedef. typedef typename TreeType::ElemType ElemType; size_t end = oldTree->Count(); assert(end > 1); // If this isn't true, the tree is really weird. // Restart the point counts since we are going to move them. oldTree->Count() = 0; treeOne->Count() = 0; treeTwo->Count() = 0; treeOne->InsertPoint(oldTree->Point(intI)); treeTwo->InsertPoint(oldTree->Point(intJ)); // If intJ is the last point in the tree, we need to switch the order so that // we remove the correct points. if (intI > intJ) { oldTree->Point(intI) = oldTree->Point(--end); // Decrement end. oldTree->Point(intJ) = oldTree->Point(--end); // Decrement end. } else { oldTree->Point(intJ) = oldTree->Point(--end); // Decrement end. oldTree->Point(intI) = oldTree->Point(--end); // Decrement end. } size_t numAssignedOne = 1; size_t numAssignedTwo = 1; // In each iteration, we go through all points and find the one that causes // the least increase of volume when added to one of the rectangles. We then // add it to that rectangle. We stop when we run out of points or when all of // the remaining points need to be assigned to the same rectangle to satisfy // the minimum fill requirement. // The below is safe because if end decreases and the right hand side of the // second part of the conjunction changes on the same iteration, we added the // point to the node with fewer points anyways. while ((end > 0) && (end > oldTree->MinLeafSize() - std::min(numAssignedOne, numAssignedTwo))) { int bestIndex = 0; ElemType bestScore = std::numeric_limits::max(); int bestRect = 1; // Calculate the increase in volume for assigning this point to each // rectangle. // First, calculate the starting volume. ElemType volOne = 1.0; ElemType volTwo = 1.0; for (size_t i = 0; i < oldTree->Bound().Dim(); i++) { volOne *= treeOne->Bound()[i].Width(); volTwo *= treeTwo->Bound()[i].Width(); } // Find the point that, when assigned to one of the two new rectangles, // minimizes the increase in volume. for (size_t index = 0; index < end; index++) { ElemType newVolOne = 1.0; ElemType newVolTwo = 1.0; for (size_t i = 0; i < oldTree->Bound().Dim(); i++) { ElemType c = oldTree->Dataset().col(oldTree->Point(index))[i]; newVolOne *= treeOne->Bound()[i].Contains(c) ? treeOne->Bound()[i].Width() : (c < treeOne->Bound()[i].Lo() ? (treeOne->Bound()[i].Hi() - c) : (c - treeOne->Bound()[i].Lo())); newVolTwo *= treeTwo->Bound()[i].Contains(c) ? treeTwo->Bound()[i].Width() : (c < treeTwo->Bound()[i].Lo() ? (treeTwo->Bound()[i].Hi() - c) : (c - treeTwo->Bound()[i].Lo())); } // Choose the rectangle that requires the lesser increase in volume. if ((newVolOne - volOne) < (newVolTwo - volTwo)) { if (newVolOne - volOne < bestScore) { bestScore = newVolOne - volOne; bestIndex = index; bestRect = 1; } } else { if (newVolTwo - volTwo < bestScore) { bestScore = newVolTwo - volTwo; bestIndex = index; bestRect = 2; } } } // Assign the point that causes the least increase in volume // to the appropriate rectangle. if (bestRect == 1) { treeOne->InsertPoint(oldTree->Point(bestIndex)); numAssignedOne++; } else { treeTwo->InsertPoint(oldTree->Point(bestIndex)); numAssignedTwo++; } oldTree->Point(bestIndex) = oldTree->Point(--end); // Decrement end. } // See if we need to satisfy the minimum fill. if (end > 0) { if (numAssignedOne < numAssignedTwo) { for (size_t i = 0; i < end; i++) treeOne->InsertPoint(oldTree->Point(i)); } else { for (size_t i = 0; i < end; i++) treeTwo->InsertPoint(oldTree->Point(i)); } } } template void RTreeSplit::AssignNodeDestNode(TreeType* oldTree, TreeType* treeOne, TreeType* treeTwo, const int intI, const int intJ) { // Convenience typedef. typedef typename TreeType::ElemType ElemType; size_t end = oldTree->NumChildren(); assert(end > 1); // If this isn't true, the tree is really weird. assert(intI != intJ); for (size_t i = 0; i < oldTree->NumChildren(); i++) for (size_t j = i + 1; j < oldTree->NumChildren(); j++) assert(oldTree->children[i] != oldTree->children[j]); InsertNodeIntoTree(treeOne, oldTree->children[intI]); InsertNodeIntoTree(treeTwo, oldTree->children[intJ]); // If intJ is the last node in the tree, we need to switch the order so that // we remove the correct nodes. if (intI > intJ) { oldTree->children[intI] = oldTree->children[--end]; oldTree->children[intJ] = oldTree->children[--end]; } else { oldTree->children[intJ] = oldTree->children[--end]; oldTree->children[intI] = oldTree->children[--end]; } assert(treeOne->NumChildren() == 1); assert(treeTwo->NumChildren() == 1); for (size_t i = 0; i < end; i++) for (size_t j = i + 1; j < end; j++) assert(oldTree->children[i] != oldTree->children[j]); for (size_t i = 0; i < end; i++) assert(oldTree->children[i] != treeOne->children[0]); for (size_t i = 0; i < end; i++) assert(oldTree->children[i] != treeTwo->children[0]); size_t numAssignTreeOne = 1; size_t numAssignTreeTwo = 1; // In each iteration, we go through all of the nodes and find the one that // causes the least increase of volume when added to one of the two new // rectangles. We then add it to that rectangle. while ((end > 0) && (end > oldTree->MinNumChildren() - std::min(numAssignTreeOne, numAssignTreeTwo))) { int bestIndex = 0; ElemType bestScore = std::numeric_limits::max(); int bestRect = 0; // Calculate the increase in volume for assigning this node to each of the // new rectangles. ElemType volOne = 1.0; ElemType volTwo = 1.0; for (size_t i = 0; i < oldTree->Bound().Dim(); i++) { volOne *= treeOne->Bound()[i].Width(); volTwo *= treeTwo->Bound()[i].Width(); } for (size_t index = 0; index < end; index++) { ElemType newVolOne = 1.0; ElemType newVolTwo = 1.0; for (size_t i = 0; i < oldTree->Bound().Dim(); i++) { // For each of the new rectangles, find the width in this dimension if // we add the rectangle at index to the new rectangle. const math::RangeType& range = oldTree->Child(index).Bound()[i]; newVolOne *= treeOne->Bound()[i].Contains(range) ? treeOne->Bound()[i].Width() : (range.Contains(treeOne->Bound()[i]) ? range.Width() : (range.Lo() < treeOne->Bound()[i].Lo() ? (treeOne->Bound()[i].Hi() - range.Lo()) : (range.Hi() - treeOne->Bound()[i].Lo()))); newVolTwo *= treeTwo->Bound()[i].Contains(range) ? treeTwo->Bound()[i].Width() : (range.Contains(treeTwo->Bound()[i]) ? range.Width() : (range.Lo() < treeTwo->Bound()[i].Lo() ? (treeTwo->Bound()[i].Hi() - range.Lo()) : (range.Hi() - treeTwo->Bound()[i].Lo()))); } // Choose the rectangle that requires the lesser increase in volume. if ((newVolOne - volOne) < (newVolTwo - volTwo)) { if (newVolOne - volOne < bestScore) { bestScore = newVolOne - volOne; bestIndex = index; bestRect = 1; } } else { if (newVolTwo - volTwo < bestScore) { bestScore = newVolTwo - volTwo; bestIndex = index; bestRect = 2; } } } // Assign the rectangle that causes the least increase in volume // to the appropriate rectangle. if (bestRect == 1) { InsertNodeIntoTree(treeOne, oldTree->children[bestIndex]); numAssignTreeOne++; } else { InsertNodeIntoTree(treeTwo, oldTree->children[bestIndex]); numAssignTreeTwo++; } oldTree->children[bestIndex] = oldTree->children[--end]; } // See if we need to satisfy the minimum fill. if (end > 0) { if (numAssignTreeOne < numAssignTreeTwo) { for (size_t i = 0; i < end; i++) { InsertNodeIntoTree(treeOne, oldTree->children[i]); numAssignTreeOne++; } } else { for (size_t i = 0; i < end; i++) { InsertNodeIntoTree(treeTwo, oldTree->children[i]); numAssignTreeTwo++; } } } for (size_t i = 0; i < treeOne->NumChildren(); i++) for (size_t j = i + 1; j < treeOne->NumChildren(); j++) assert(treeOne->children[i] != treeOne->children[j]); for (size_t i = 0; i < treeTwo->NumChildren(); i++) for (size_t j = i + 1; j < treeTwo->NumChildren(); j++) assert(treeTwo->children[i] != treeTwo->children[j]); } /** * Insert a node into another node. Expanding the bounds and updating the * numberOfChildren. */ template void RTreeSplit::InsertNodeIntoTree(TreeType* destTree, TreeType* srcNode) { destTree->Bound() |= srcNode->Bound(); destTree->numDescendants += srcNode->numDescendants; destTree->children[destTree->NumChildren()++] = srcNode; } } // namespace tree } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/core/tree/rectangle_tree/rectangle_tree.hpp000066400000000000000000000535541315013601400254010ustar00rootroot00000000000000/** * @file rectangle_tree.hpp * @author Andrew Wells * * Definition of generalized rectangle type trees (r_tree, r_star_tree, x_tree, * and hilbert_r_tree). * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_TREE_RECTANGLE_TREE_RECTANGLE_TREE_HPP #define MLPACK_CORE_TREE_RECTANGLE_TREE_RECTANGLE_TREE_HPP #include #include "../hrectbound.hpp" #include "../statistic.hpp" #include "r_tree_split.hpp" #include "r_tree_descent_heuristic.hpp" #include "no_auxiliary_information.hpp" namespace mlpack { namespace tree /** Trees and tree-building procedures. */ { /** * A rectangle type tree tree, such as an R-tree or X-tree. Once the * bound and type of dataset is defined, the tree will construct itself. Call * the constructor with the dataset to build the tree on, and the entire tree * will be built. * * This tree does allow growth, so you can add and delete nodes from it. * * @tparam MetricType This *must* be EuclideanDistance, but the template * parameter is required to satisfy the TreeType API. * @tparam StatisticType Extra data contained in the node. See statistic.hpp * for the necessary skeleton interface. * @tparam MatType The dataset class. * @tparam SplitType The type of split to use when inserting points. * @tparam DescentType The heuristic to use when descending the tree to insert * points. * @tparam AuxiliaryInformationType An auxiliary information contained * in the node. This information depends on the type of the RectangleTree. */ template class AuxiliaryInformationType = NoAuxiliaryInformation> class RectangleTree { // The metric *must* be the euclidean distance. static_assert(boost::is_same::value, "RectangleTree: MetricType must be metric::EuclideanDistance."); public: //! So other classes can use TreeType::Mat. typedef MatType Mat; //! The element type held by the matrix type. typedef typename MatType::elem_type ElemType; //! The auxiliary information type held by the tree. typedef AuxiliaryInformationType AuxiliaryInformation; private: //! The max number of child nodes a non-leaf node can have. size_t maxNumChildren; //! The minimum number of child nodes a non-leaf node can have. size_t minNumChildren; //! The number of child nodes actually in use (0 if this is a leaf node). size_t numChildren; //! The child nodes (Starting at 0 and ending at (numChildren-1) ). std::vector children; //! The parent node (NULL if this is the root of the tree). RectangleTree* parent; //! The index of the first point in the dataset contained in this node (and //! its children). THIS IS ALWAYS 0 AT THE MOMENT. IT EXISTS MERELY IN CASE //! I THINK OF A WAY TO CHANGE THAT. IN OTHER WORDS, IT WILL PROBABLY BE //! REMOVED. size_t begin; //! The number of points in the dataset contained in this node (and its //! children). size_t count; //! The number of descendants of this node. size_t numDescendants; //! The max leaf size. size_t maxLeafSize; //! The minimum leaf size. size_t minLeafSize; //! The bound object for this node. bound::HRectBound bound; //! Any extra data contained in the node. StatisticType stat; //! The distance from the centroid of this node to the centroid of the parent. ElemType parentDistance; //! The dataset. const MatType* dataset; //! Whether or not we are responsible for deleting the dataset. This is //! probably not aligned well... bool ownsDataset; //! The mapping to the dataset std::vector points; //! A tree-specific information AuxiliaryInformationType auxiliaryInfo; public: //! A single traverser for rectangle type trees. See //! single_tree_traverser.hpp for implementation. template class SingleTreeTraverser; //! A dual tree traverser for rectangle type trees. template class DualTreeTraverser; /** * Construct this as the root node of a rectangle type tree using the given * dataset. This will modify the ordering of the points in the dataset! * * @param data Dataset from which to create the tree. This will be modified! * @param maxLeafSize Maximum size of each leaf in the tree. * @param minLeafSize Minimum size of each leaf in the tree. * @param maxNumChildren The maximum number of child nodes a non-leaf node may * have. * @param minNumChildren The minimum number of child nodes a non-leaf node may * have. * @param firstDataIndex The index of the first data point. UNUSED UNLESS WE * ADD SUPPORT FOR HAVING A "CENTERAL" DATA MATRIX. */ RectangleTree(const MatType& data, const size_t maxLeafSize = 20, const size_t minLeafSize = 8, const size_t maxNumChildren = 5, const size_t minNumChildren = 2, const size_t firstDataIndex = 0); /** * Construct this as the root node of a rectangle tree type using the given * dataset, and taking ownership of the given dataset. * * @param data Dataset from which to create the tree. * @param maxLeafSize Maximum size of each leaf in the tree. * @param minLeafSize Minimum size of each leaf in the tree. * @param maxNumChildren The maximum number of child nodes a non-leaf node may * have. * @param minNumChildren The minimum number of child nodes a non-leaf node may * have. * @param firstDataIndex The index of the first data point. UNUSED UNLESS WE * ADD SUPPORT FOR HAVING A "CENTERAL" DATA MATRIX. */ RectangleTree(MatType&& data, const size_t maxLeafSize = 20, const size_t minLeafSize = 8, const size_t maxNumChildren = 5, const size_t minNumChildren = 2, const size_t firstDataIndex = 0); /** * Construct this as an empty node with the specified parent. Copying the * parameters (maxLeafSize, minLeafSize, maxNumChildren, minNumChildren, * firstDataIndex) from the parent. * * @param parentNode The parent of the node that is being constructed. * @param numMaxChildren The max number of child nodes (used in x-trees). */ explicit RectangleTree(RectangleTree* parentNode, const size_t numMaxChildren = 0); /** * Create a rectangle tree by copying the other tree. Be careful! This can * take a long time and use a lot of memory. * * @param other The tree to be copied. * @param deepCopy If false, the children are not recursively copied. */ RectangleTree(const RectangleTree& other, const bool deepCopy = true, RectangleTree* newParent = NULL); /** * Create a rectangle tree by moving the other tree. * * @param other The tree to be copied. */ RectangleTree(RectangleTree&& other); /** * Construct the tree from a boost::serialization archive. */ template RectangleTree( Archive& ar, const typename boost::enable_if::type* = 0); /** * Deletes this node, deallocating the memory for the children and calling * their destructors in turn. This will invalidate any younters or references * to any nodes which are children of this one. */ ~RectangleTree(); /** * Delete this node of the tree, but leave the stuff contained in it intact. * This is used when splitting a node, where the data in this tree is moved to * two other trees. */ void SoftDelete(); /** * Nullify the auxiliary information. Used for memory management. * Be cafeful. */ void NullifyData(); /** * Inserts a point into the tree. * * @param point The index of a point in the dataset. */ void InsertPoint(const size_t point); /** * Inserts a point into the tree, tracking which levels have been inserted * into. * * @param point The index of a point in the dataset. * @param relevels The levels that have been reinserted to on this top level * insertion. */ void InsertPoint(const size_t point, std::vector& relevels); /** * Inserts a node into the tree, tracking which levels have been inserted * into. The node will be inserted so that the tree remains valid. * * @param node The node to be inserted. * @param level The depth that should match the node where this node is * finally inserted. This should be the number returned by calling * TreeDepth() from the node that originally contained "node". * @param relevels The levels that have been reinserted to on this top level * insertion. */ void InsertNode(RectangleTree* node, const size_t level, std::vector& relevels); /** * Deletes a point from the treeand, updates the bounding rectangle. * However, the point will be kept in the centeral dataset. (The * user may remove it from there if he wants, but he must not change the * indices of the other points.) Returns true if the point is successfully * removed and false if it is not. (ie. the point is not in the tree) */ bool DeletePoint(const size_t point); /** * Deletes a point from the tree, updates the bounding rectangle, * tracking levels. However, the point will be kept in the centeral dataset. * (The user may remove it from there if he wants, but he * must not change the indices of the other points.) Returns true if the point * is successfully removed and false if it is not. (ie. the point is not in * the tree) */ bool DeletePoint(const size_t point, std::vector& relevels); /** * Removes a node from the tree. You are responsible for deleting it if you * wish to do so. */ bool RemoveNode(const RectangleTree* node, std::vector& relevels); /** * Find a node in this tree by its begin and count (const). * * Every node is uniquely identified by these two numbers. * This is useful for communicating position over the network, * when pointers would be invalid. * * @param begin The begin() of the node to find. * @param count The count() of the node to find. * @return The found node, or NULL if not found. */ const RectangleTree* FindByBeginCount(size_t begin, size_t count) const; /** * Find a node in this tree by its begin and count. * * Every node is uniquely identified by these two numbers. * This is useful for communicating position over the network, * when pointers would be invalid. * * @param begin The begin() of the node to find. * @param count The count() of the node to find. * @return The found node, or NULL if not found. */ RectangleTree* FindByBeginCount(size_t begin, size_t count); //! Return the bound object for this node. const bound::HRectBound& Bound() const { return bound; } //! Modify the bound object for this node. bound::HRectBound& Bound() { return bound; } //! Return the statistic object for this node. const StatisticType& Stat() const { return stat; } //! Modify the statistic object for this node. StatisticType& Stat() { return stat; } //! Return the auxiliary information object of this node. const AuxiliaryInformationType &AuxiliaryInfo() const { return auxiliaryInfo; } //! Modify the split object of this node. AuxiliaryInformationType& AuxiliaryInfo() { return auxiliaryInfo; } //! Return whether or not this node is a leaf (true if it has no children). bool IsLeaf() const; //! Return the maximum leaf size. size_t MaxLeafSize() const { return maxLeafSize; } //! Modify the maximum leaf size. size_t& MaxLeafSize() { return maxLeafSize; } //! Return the minimum leaf size. size_t MinLeafSize() const { return minLeafSize; } //! Modify the minimum leaf size. size_t& MinLeafSize() { return minLeafSize; } //! Return the maximum number of children (in a non-leaf node). size_t MaxNumChildren() const { return maxNumChildren; } //! Modify the maximum number of children (in a non-leaf node). size_t& MaxNumChildren() { return maxNumChildren; } //! Return the minimum number of children (in a non-leaf node). size_t MinNumChildren() const { return minNumChildren; } //! Modify the minimum number of children (in a non-leaf node). size_t& MinNumChildren() { return minNumChildren; } //! Gets the parent of this node. RectangleTree* Parent() const { return parent; } //! Modify the parent of this node. RectangleTree*& Parent() { return parent; } //! Get the dataset which the tree is built on. const MatType& Dataset() const { return *dataset; } //! Modify the dataset which the tree is built on. Be careful! MatType& Dataset() { return const_cast(*dataset); } //! Get the metric which the tree uses. MetricType Metric() const { return MetricType(); } //! Get the centroid of the node and store it in the given vector. void Center(arma::vec& center) { bound.Center(center); } //! Return the number of child nodes. (One level beneath this one only.) size_t NumChildren() const { return numChildren; } //! Modify the number of child nodes. Be careful. size_t& NumChildren() { return numChildren; } /** * Return the index of the nearest child node to the given query point. If * this is a leaf node, it will return NumChildren() (invalid index). */ template size_t GetNearestChild( const VecType& point, typename boost::enable_if >::type* = 0); /** * Return the index of the furthest child node to the given query point. If * this is a leaf node, it will return NumChildren() (invalid index). */ template size_t GetFurthestChild( const VecType& point, typename boost::enable_if >::type* = 0); /** * Return the index of the nearest child node to the given query node. If it * can't decide, it will return NumChildren() (invalid index). */ size_t GetNearestChild(const RectangleTree& queryNode); /** * Return the index of the furthest child node to the given query node. If it * can't decide, it will return NumChildren() (invalid index). */ size_t GetFurthestChild(const RectangleTree& queryNode); /** * Return the furthest distance to a point held in this node. If this is not * a leaf node, then the distance is 0 because the node holds no points. */ ElemType FurthestPointDistance() const; /** * Return the furthest possible descendant distance. This returns the maximum * distance from the centroid to the edge of the bound and not the empirical * quantity which is the actual furthest descendant distance. So the actual * furthest descendant distance may be less than what this method returns (but * it will never be greater than this). */ ElemType FurthestDescendantDistance() const; //! Return the minimum distance from the center to any edge of the bound. //! Currently, this returns 0, which doesn't break algorithms, but it isn't //! necessarily correct, either. ElemType MinimumBoundDistance() const { return bound.MinWidth() / 2.0; } //! Return the distance from the center of this node to the center of the //! parent node. ElemType ParentDistance() const { return parentDistance; } //! Modify the distance from the center of this node to the center of the //! parent node. ElemType& ParentDistance() { return parentDistance; } /** * Get the specified child. * * @param child Index of child to return. */ inline RectangleTree& Child(const size_t child) const { return *children[child]; } /** * Modify the specified child. * * @param child Index of child to return. */ inline RectangleTree& Child(const size_t child) { return *children[child]; } //! Return the number of points in this node (returns 0 if this node is not a //! leaf). size_t NumPoints() const; /** * Return the number of descendants of this node. For a non-leaf in a binary * space tree, this is the number of points at the descendant leaves. For a * leaf, this is the number of points in the leaf. */ size_t NumDescendants() const; /** * Return the index (with reference to the dataset) of a particular descendant * of this node. The index should be greater than zero but less than the * number of descendants. * * @param index Index of the descendant. */ size_t Descendant(const size_t index) const; /** * Return the index (with reference to the dataset) of a particular point in * this node. This will happily return invalid indices if the given index is * greater than the number of points in this node (obtained with NumPoints()) * -- be careful. * * @param index Index of point for which a dataset index is wanted. */ size_t Point(const size_t index) const { return points[index]; } //! Modify the index of a particular point in this node. Be very careful when //! you do this! You may make the tree invalid. size_t& Point(const size_t index) { return points[index]; } //! Return the minimum distance to another node. ElemType MinDistance(const RectangleTree& other) const { return bound.MinDistance(other.Bound()); } //! Return the maximum distance to another node. ElemType MaxDistance(const RectangleTree& other) const { return bound.MaxDistance(other.Bound()); } //! Return the minimum and maximum distance to another node. math::RangeType RangeDistance(const RectangleTree& other) const { return bound.RangeDistance(other.Bound()); } //! Return the minimum distance to another point. template ElemType MinDistance(const VecType& point, typename boost::enable_if >::type* = 0) const { return bound.MinDistance(point); } //! Return the maximum distance to another point. template ElemType MaxDistance(const VecType& point, typename boost::enable_if >::type* = 0) const { return bound.MaxDistance(point); } //! Return the minimum and maximum distance to another point. template math::RangeType RangeDistance( const VecType& point, typename boost::enable_if >::type* = 0) const { return bound.RangeDistance(point); } /** * Obtains the number of nodes in the tree, starting with this. */ size_t TreeSize() const; /** * Obtains the number of levels below this node in the tree, starting with * this. */ size_t TreeDepth() const; //! Return the index of the beginning point of this subset. size_t Begin() const { return begin; } //! Modify the index of the beginning point of this subset. size_t& Begin() { return begin; } //! Return the number of points in this subset. size_t Count() const { return count; } //! Modify the number of points in this subset. size_t& Count() { return count; } private: /** * Splits the current node, recursing up the tree. * * @param relevels Vector to track which levels have been inserted to. */ void SplitNode(std::vector& relevels); protected: /** * A default constructor. This is meant to only be used with * boost::serialization, which is allowed with the friend declaration below. * This does not return a valid tree! This method must be protected, so that * the serialization shim can work with the default constructor. */ RectangleTree(); //! Friend access is given for the default constructor. friend class boost::serialization::access; //! Give friend access for DescentType. friend DescentType; //! Give friend access for SplitType. friend SplitType; //! Give friend access for AuxiliaryInformationType. friend AuxiliaryInformation; public: /** * Condense the bounding rectangles for this node based on the removal of the * point specified by the arma::vec&. This recurses up the tree. If a node * goes below the minimum fill, this function will fix the tree. * * @param point The arma::vec& of the point that was removed to require this * condesation of the tree. * @param usePoint True if we use the optimized version of the algorithm that * is possible when we now what point was deleted. False otherwise (eg. * if we deleted a node instead of a point). */ void CondenseTree(const arma::vec& point, std::vector& relevels, const bool usePoint); /** * Shrink the bound object of this node for the removal of a point. * * @param point The arma::vec& of the point that was removed to require this * shrinking. * @return true if the bound needed to be changed, false if it did not. */ bool ShrinkBoundForPoint(const arma::vec& point); /** * Shrink the bound object of this node for the removal of a child node. * * @param bound The HRectBound<>& of the bound that was removed to reqire this * shrinking. * @return true if the bound needed to be changed, false if it did not. */ bool ShrinkBoundForBound(const bound::HRectBound& changedBound); /** * Make an exact copy of this node, pointers and everything. */ RectangleTree* ExactClone(); /** * Serialize the tree. */ template void Serialize(Archive& ar, const unsigned int /* version */); }; } // namespace tree } // namespace mlpack // Include implementation. #include "rectangle_tree_impl.hpp" #endif mlpack-2.2.5/src/mlpack/core/tree/rectangle_tree/rectangle_tree_impl.hpp000066400000000000000000001155051315013601400264150ustar00rootroot00000000000000/** * @file rectangle_tree_impl.hpp * @author Andrew Wells * * Implementation of generalized rectangle tree. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_TREE_RECTANGLE_TREE_RECTANGLE_TREE_IMPL_HPP #define MLPACK_CORE_TREE_RECTANGLE_TREE_RECTANGLE_TREE_IMPL_HPP // In case it wasn't included already for some reason. #include "rectangle_tree.hpp" #include #include namespace mlpack { namespace tree { template class AuxiliaryInformationType> RectangleTree:: RectangleTree(const MatType& data, const size_t maxLeafSize, const size_t minLeafSize, const size_t maxNumChildren, const size_t minNumChildren, const size_t firstDataIndex) : maxNumChildren(maxNumChildren), minNumChildren(minNumChildren), numChildren(0), children(maxNumChildren + 1), // Add one to make splitting the node simpler. parent(NULL), begin(0), count(0), numDescendants(0), maxLeafSize(maxLeafSize), minLeafSize(minLeafSize), bound(data.n_rows), parentDistance(0), dataset(new MatType(data)), ownsDataset(true), points(maxLeafSize + 1), // Add one to make splitting the node simpler. auxiliaryInfo(this) { stat = StatisticType(*this); // For now, just insert the points in order. RectangleTree* root = this; for (size_t i = firstDataIndex; i < data.n_cols; i++) root->InsertPoint(i); } template class AuxiliaryInformationType> RectangleTree:: RectangleTree(MatType&& data, const size_t maxLeafSize, const size_t minLeafSize, const size_t maxNumChildren, const size_t minNumChildren, const size_t firstDataIndex) : maxNumChildren(maxNumChildren), minNumChildren(minNumChildren), numChildren(0), children(maxNumChildren + 1), // Add one to make splitting the node simpler. parent(NULL), begin(0), count(0), numDescendants(0), maxLeafSize(maxLeafSize), minLeafSize(minLeafSize), bound(data.n_rows), parentDistance(0), dataset(new MatType(std::move(data))), ownsDataset(true), points(maxLeafSize + 1), // Add one to make splitting the node simpler. auxiliaryInfo(this) { stat = StatisticType(*this); // For now, just insert the points in order. RectangleTree* root = this; for (size_t i = firstDataIndex; i < dataset->n_cols; i++) root->InsertPoint(i); } template class AuxiliaryInformationType> RectangleTree:: RectangleTree( RectangleTree* parentNode,const size_t numMaxChildren) : maxNumChildren(numMaxChildren > 0 ? numMaxChildren : parentNode->MaxNumChildren()), minNumChildren(parentNode->MinNumChildren()), numChildren(0), children(maxNumChildren + 1), parent(parentNode), begin(0), count(0), numDescendants(0), maxLeafSize(parentNode->MaxLeafSize()), minLeafSize(parentNode->MinLeafSize()), bound(parentNode->Bound().Dim()), parentDistance(0), dataset(&parentNode->Dataset()), ownsDataset(false), points(maxLeafSize + 1), // Add one to make splitting the node simpler. auxiliaryInfo(this) { stat = StatisticType(*this); } /** * Create a rectangle tree by copying the other tree. Be careful! This can * take a long time and use a lot of memory. */ template class AuxiliaryInformationType> RectangleTree:: RectangleTree( const RectangleTree& other, const bool deepCopy, RectangleTree* newParent) : maxNumChildren(other.MaxNumChildren()), minNumChildren(other.MinNumChildren()), numChildren(other.NumChildren()), children(maxNumChildren + 1, NULL), parent(deepCopy ? newParent : other.Parent()), begin(other.Begin()), count(other.Count()), numDescendants(other.numDescendants), maxLeafSize(other.MaxLeafSize()), minLeafSize(other.MinLeafSize()), bound(other.bound), parentDistance(other.ParentDistance()), dataset(deepCopy ? (parent ? parent->dataset : new MatType(*other.dataset)) : &other.Dataset()), ownsDataset(deepCopy && (!parent)), points(other.points), auxiliaryInfo(other.auxiliaryInfo, this, deepCopy) { if (deepCopy) { if (numChildren > 0) { for (size_t i = 0; i < numChildren; i++) children[i] = new RectangleTree(other.Child(i), true, this); } } else children = other.children; } template class AuxiliaryInformationType> RectangleTree:: RectangleTree(RectangleTree&& other) : maxNumChildren(other.MaxNumChildren()), minNumChildren(other.MinNumChildren()), numChildren(other.NumChildren()), children(std::move(other.children)), parent(other.Parent()), begin(other.Begin()), count(other.Count()), numDescendants(other.numDescendants), maxLeafSize(other.MaxLeafSize()), minLeafSize(other.MinLeafSize()), bound(std::move(other.bound)), parentDistance(other.ParentDistance()), dataset(other.dataset), ownsDataset(other.ownsDataset), points(std::move(other.points)), auxiliaryInfo(std::move(other.auxiliaryInfo)) { if (parent) { size_t iChild = 0; while (parent->children[iChild] != (&other)) iChild++; assert(iChild < numChildren); parent->children[iChild] = this; } if (!IsLeaf()) { for (size_t i = 0; i < numChildren; i++) children[i]->parent = this; } other.maxNumChildren = 0; other.minNumChildren = 0; other.numChildren = 0; other.parent = NULL; other.begin = 0; other.count = 0; other.numDescendants = 0; other.maxLeafSize = 0; other.minLeafSize = 0; other.parentDistance = 0; other.dataset = NULL; other.ownsDataset = false; } /** * Construct the tree from a boost::serialization archive. */ template class AuxiliaryInformationType> template RectangleTree:: RectangleTree( Archive& ar, const typename boost::enable_if::type*) : RectangleTree() // Use default constructor. { // Now serialize. ar >> data::CreateNVP(*this, "tree"); } /** * Deletes this node, deallocating the memory for the children and calling * their destructors in turn. This will invalidate any pointers or references * to any nodes which are children of this one. */ template class AuxiliaryInformationType> RectangleTree:: ~RectangleTree() { for (size_t i = 0; i < numChildren; i++) delete children[i]; if (ownsDataset) delete dataset; } /** * Deletes this node but leaves the children untouched. Needed for when we * split nodes and remove nodes (inserting and deleting points). */ template class AuxiliaryInformationType> void RectangleTree:: SoftDelete() { parent = NULL; for (size_t i = 0; i < children.size(); i++) children[i] = NULL; numChildren = 0; delete this; } /** * Nullify the auxiliary information. */ template class AuxiliaryInformationType> void RectangleTree:: NullifyData() { auxiliaryInfo.NullifyData(); } /** * Recurse through the tree and insert the point at the leaf node chosen * by the heuristic. */ template class AuxiliaryInformationType> void RectangleTree:: InsertPoint(const size_t point) { // Expand the bound regardless of whether it is a leaf node. bound |= dataset->col(point); numDescendants++; std::vector lvls(TreeDepth(), true); // If this is a leaf node, we stop here and add the point. if (numChildren == 0) { if (!auxiliaryInfo.HandlePointInsertion(this, point)) points[count++] = point; SplitNode(lvls); return; } // If it is not a leaf node, we use the DescentHeuristic to choose a child // to which we recurse. auxiliaryInfo.HandlePointInsertion(this, point); const size_t descentNode = DescentType::ChooseDescentNode(this, point); children[descentNode]->InsertPoint(point, lvls); } /** * Inserts a point into the tree, tracking which levels have been inserted into. */ template class AuxiliaryInformationType> void RectangleTree:: InsertPoint(const size_t point, std::vector& relevels) { // Expand the bound regardless of whether it is a leaf node. bound |= dataset->col(point); numDescendants++; // If this is a leaf node, we stop here and add the point. if (numChildren == 0) { if (!auxiliaryInfo.HandlePointInsertion(this, point)) points[count++] = point; SplitNode(relevels); return; } // If it is not a leaf node, we use the DescentHeuristic to choose a child // to which we recurse. auxiliaryInfo.HandlePointInsertion(this, point); const size_t descentNode = DescentType::ChooseDescentNode(this,point); children[descentNode]->InsertPoint(point, relevels); } /** * Inserts a node into the tree, tracking which levels have been inserted into. * * @param node The node to be inserted. * @param level The level on which this node should be inserted. * @param relevels The levels that have been reinserted to on this top level * insertion. */ template class AuxiliaryInformationType> void RectangleTree:: InsertNode(RectangleTree* node, const size_t level, std::vector& relevels) { // Expand the bound regardless of the level. bound |= node->Bound(); numDescendants += node->numDescendants; if (level == TreeDepth()) { if (!auxiliaryInfo.HandleNodeInsertion(this, node, true)) { children[numChildren++] = node; node->Parent() = this; } SplitNode(relevels); } else { auxiliaryInfo.HandleNodeInsertion(this, node, false); const size_t descentNode = DescentType::ChooseDescentNode(this, node); children[descentNode]->InsertNode(node, level, relevels); } } /** * Recurse through the tree to remove the point. Once we find the point, we * shrink the rectangles if necessary. */ template class AuxiliaryInformationType> bool RectangleTree:: DeletePoint(const size_t point) { // It is possible that this will cause a reinsertion, so we need to handle the // levels properly. RectangleTree* root = this; while (root->Parent() != NULL) root = root->Parent(); std::vector lvls(root->TreeDepth(), true); if (numChildren == 0) { for (size_t i = 0; i < count; i++) { if (points[i] == point) { if (!auxiliaryInfo.HandlePointDeletion(this, i)) points[i] = points[--count]; RectangleTree* tree = this; while (tree != NULL) { tree->numDescendants--; tree = tree->Parent(); } // This function wil ensure that minFill is satisfied. CondenseTree(dataset->col(point), lvls, true); return true; } } } for (size_t i = 0; i < numChildren; i++) if (children[i]->Bound().Contains(dataset->col(point))) if (children[i]->DeletePoint(point, lvls)) return true; return false; } /** * Recurse through the tree to remove the point. Once we find the point, we * shrink the rectangles if necessary. */ template class AuxiliaryInformationType> bool RectangleTree:: DeletePoint(const size_t point, std::vector& relevels) { if (numChildren == 0) { for (size_t i = 0; i < count; i++) { if (points[i] == point) { if (!auxiliaryInfo.HandlePointDeletion(this, i)) points[i] = points[--count]; RectangleTree* tree = this; while (tree != NULL) { tree->numDescendants--; tree = tree->Parent(); } // This function will ensure that minFill is satisfied. CondenseTree(dataset->col(point), relevels, true); return true; } } } for (size_t i = 0; i < numChildren; i++) if (children[i]->Bound().Contains(dataset->col(point))) if (children[i]->DeletePoint(point, relevels)) return true; return false; } /** * Recurse through the tree to remove the node. Once we find the node, we * shrink the rectangles if necessary. */ template class AuxiliaryInformationType> bool RectangleTree:: RemoveNode(const RectangleTree* node, std::vector& relevels) { for (size_t i = 0; i < numChildren; i++) { if (children[i] == node) { if (!auxiliaryInfo.HandleNodeRemoval(this, i)) { children[i] = children[--numChildren]; // Decrement numChildren. } RectangleTree* tree = this; while (tree != NULL) { tree->numDescendants -= node->numDescendants; tree = tree->Parent(); } CondenseTree(arma::vec(), relevels, false); return true; } bool contains = true; for (size_t j = 0; j < node->Bound().Dim(); j++) contains &= Child(i).Bound()[j].Contains(node->Bound()[j]); if (contains) if (children[i]->RemoveNode(node, relevels)) return true; } return false; } template class AuxiliaryInformationType> size_t RectangleTree::TreeSize() const { int n = 0; for (int i = 0; i < numChildren; i++) n += children[i]->TreeSize(); return n + 1; // Add one for this node. } template class AuxiliaryInformationType> size_t RectangleTree::TreeDepth() const { int n = 1; RectangleTree* currentNode = const_cast (this); while (!currentNode->IsLeaf()) { currentNode = currentNode->children[0]; n++; } return n; } template class AuxiliaryInformationType> inline bool RectangleTree::IsLeaf() const { return (numChildren == 0); } /** * Return the index of the nearest child node to the given query point. If * this is a leaf node, it will return NumChildren() (invalid index). */ template class AuxiliaryInformationType> template size_t RectangleTree::GetNearestChild( const VecType& point, typename boost::enable_if >::type*) { if (IsLeaf()) return 0; ElemType bestDistance = std::numeric_limits::max(); size_t bestIndex = 0; for (size_t i = 0; i < NumChildren(); ++i) { ElemType distance = Child(i).MinDistance(point); if (distance <= bestDistance) { bestDistance = distance; bestIndex = i; } } return bestIndex; } /** * Return the index of the furthest child node to the given query point. If * this is a leaf node, it will return NumChildren() (invalid index). */ template class AuxiliaryInformationType> template size_t RectangleTree::GetFurthestChild( const VecType& point, typename boost::enable_if >::type*) { if (IsLeaf()) return 0; ElemType bestDistance = 0; size_t bestIndex = 0; for (size_t i = 0; i < NumChildren(); ++i) { ElemType distance = Child(i).MaxDistance(point); if (distance >= bestDistance) { bestDistance = distance; bestIndex = i; } } return bestIndex; } /** * Return the index of the nearest child node to the given query node. If it * can't decide, it will return NumChildren() (invalid index). */ template class AuxiliaryInformationType> size_t RectangleTree::GetNearestChild(const RectangleTree& queryNode) { if (IsLeaf()) return 0; ElemType bestDistance = std::numeric_limits::max(); size_t bestIndex = 0; for (size_t i = 0; i < NumChildren(); ++i) { ElemType distance = Child(i).MinDistance(queryNode); if (distance <= bestDistance) { bestDistance = distance; bestIndex = i; } } return bestIndex; } /** * Return the index of the furthest child node to the given query node. If it * can't decide, it will return NumChildren() (invalid index). */ template class AuxiliaryInformationType> size_t RectangleTree::GetFurthestChild(const RectangleTree& queryNode) { if (IsLeaf()) return 0; ElemType bestDistance = 0; size_t bestIndex = 0; for (size_t i = 0; i < NumChildren(); ++i) { ElemType distance = Child(i).MaxDistance(queryNode); if (distance >= bestDistance) { bestDistance = distance; bestIndex = i; } } return bestIndex; } /** * Return a bound on the furthest point in the node form the centroid. * This returns 0 unless the node is a leaf. */ template class AuxiliaryInformationType> inline typename RectangleTree::ElemType RectangleTree::FurthestPointDistance() const { if (!IsLeaf()) return 0.0; // Otherwise return the distance from the centroid to a corner of the bound. return 0.5 * bound.Diameter(); } /** * Return the furthest possible descendant distance. This returns the maximum * distance from the centroid to the edge of the bound and not the empirical * quantity which is the actual furthest descendant distance. So the actual * furthest descendant distance may be less than what this method returns (but * it will never be greater than this). */ template class AuxiliaryInformationType> inline typename RectangleTree::ElemType RectangleTree::FurthestDescendantDistance() const { // Return the distance from the centroid to a corner of the bound. return 0.5 * bound.Diameter(); } /** * Return the number of points contained in this node. Zero if it is a non-leaf * node. */ template class AuxiliaryInformationType> inline size_t RectangleTree::NumPoints() const { if (numChildren != 0) // This is not a leaf node. return 0; return count; } /** * Return the number of descendants under or in this node. */ template class AuxiliaryInformationType> inline size_t RectangleTree::NumDescendants() const { return numDescendants; } /** * Return the index of a particular descendant contained in this node. */ template class AuxiliaryInformationType> inline size_t RectangleTree::Descendant(const size_t index) const { // I think this may be inefficient... if (numChildren == 0) { return (points[index]); } else { size_t n = 0; for (size_t i = 0; i < numChildren; ++i) { const size_t nd = children[i]->NumDescendants(); if (index - n < nd) return children[i]->Descendant(index - n); n += nd; } // I don't think this is valid. return children[numChildren - 1]->Descendant(index - n); } } /** * Split the tree. This calls the SplitType code to split a node. This method * should only be called on a leaf node. */ template class AuxiliaryInformationType> void RectangleTree:: SplitNode(std::vector& relevels) { if (numChildren == 0) { // We let the SplitType check if the node if overflowed // since an intermediate node of the R+ tree may be overflowed if the leaf // node contains only one point. // The SplitType takes care of this and of moving up the tree if necessary. SplitType::SplitLeafNode(this,relevels); } else { // Check to see if we are full. if (numChildren <= maxNumChildren) return; // We don't need to split. // If we are full, then we need to split (or at least try). The SplitType // takes care of this and of moving up the tree if necessary. SplitType::SplitNonLeafNode(this,relevels); } } //! Default constructor for boost::serialization. template class AuxiliaryInformationType> RectangleTree:: RectangleTree() : maxNumChildren(0), // Try to give sensible defaults, but it shouldn't matter minNumChildren(0), // because this tree isn't valid anyway and is only used numChildren(0), // by boost::serialization. parent(NULL), begin(0), count(0), maxLeafSize(0), minLeafSize(0), parentDistance(0.0), dataset(NULL), ownsDataset(false) { // Nothing to do. } /** * Condense the tree. This shrinks the bounds and moves up the tree if * applicable. If a node goes below minimum fill, this code will deal with it. */ template class AuxiliaryInformationType> void RectangleTree:: CondenseTree(const arma::vec& point, std::vector& relevels, const bool usePoint) { // First delete the node if we need to. There's no point in shrinking the // bound first. if (IsLeaf() && count < minLeafSize && parent != NULL) { // We can't delete the root node. for (size_t i = 0; i < parent->NumChildren(); i++) { if (parent->children[i] == this) { // Decrement numChildren. if (!auxiliaryInfo.HandleNodeRemoval(parent, i)) { parent->children[i] = parent->children[--parent->NumChildren()]; } // We find the root and shrink bounds at the same time. bool stillShrinking = true; RectangleTree* root = parent; while (root->Parent() != NULL) { if (stillShrinking) stillShrinking = root->ShrinkBoundForBound(bound); root = root->Parent(); } if (stillShrinking) stillShrinking = root->ShrinkBoundForBound(bound); root = parent; while (root != NULL) { root->numDescendants -= numDescendants; root = root->Parent(); } stillShrinking = true; root = parent; while (root->Parent() != NULL) { if (stillShrinking) stillShrinking = root->AuxiliaryInfo().UpdateAuxiliaryInfo(root); root = root->Parent(); } if (stillShrinking) stillShrinking = root->AuxiliaryInfo().UpdateAuxiliaryInfo(root); // Reinsert the points at the root node. for (size_t j = 0; j < count; j++) root->InsertPoint(points[j], relevels); // This will check the minFill of the parent. parent->CondenseTree(point, relevels, usePoint); // Now it should be safe to delete this node. SoftDelete(); return; } } // Control should never reach here. assert(false); } else if (!IsLeaf() && numChildren < minNumChildren) { if (parent != NULL) { // The normal case. We need to be careful with the root. for (size_t j = 0; j < parent->NumChildren(); j++) { if (parent->children[j] == this) { // Decrement numChildren. if (!auxiliaryInfo.HandleNodeRemoval(parent,j)) { parent->children[j] = parent->children[--parent->NumChildren()]; } size_t level = TreeDepth(); // We find the root and shrink bounds at the same time. bool stillShrinking = true; RectangleTree* root = parent; while (root->Parent() != NULL) { if (stillShrinking) stillShrinking = root->ShrinkBoundForBound(bound); root = root->Parent(); } if (stillShrinking) stillShrinking = root->ShrinkBoundForBound(bound); root = parent; while (root != NULL) { root->numDescendants -= numDescendants; root = root->Parent(); } stillShrinking = true; root = parent; while (root->Parent() != NULL) { if (stillShrinking) stillShrinking = root->AuxiliaryInfo().UpdateAuxiliaryInfo(root); root = root->Parent(); } if (stillShrinking) stillShrinking = root->AuxiliaryInfo().UpdateAuxiliaryInfo(root); // Reinsert the nodes at the root node. for (size_t i = 0; i < numChildren; i++) root->InsertNode(children[i], level, relevels); // This will check the minFill of the point. parent->CondenseTree(point, relevels, usePoint); // Now it should be safe to delete this node. SoftDelete(); return; } } } else if (numChildren == 1) { // If there are multiple children, we can't do anything to the root. RectangleTree* child = children[0]; // Required for the X tree. if (child->NumChildren() > maxNumChildren) { maxNumChildren = child->MaxNumChildren(); children.resize(maxNumChildren + 1); } for (size_t i = 0; i < child->NumChildren(); i++) { children[i] = child->children[i]; children[i]->Parent() = this; child->children[i] = NULL; } numChildren = child->NumChildren(); child->NumChildren() = 0; for (size_t i = 0; i < child->Count(); i++) { // In case the tree has a height of two. points[i] = child->Point(i); } auxiliaryInfo = child->AuxiliaryInfo(); count = child->Count(); child->Count() = 0; delete child; return; } } // If we didn't delete it, shrink the bound if we need to. if (usePoint && (ShrinkBoundForPoint(point) || auxiliaryInfo.UpdateAuxiliaryInfo(this)) && parent != NULL) parent->CondenseTree(point, relevels, usePoint); else if (!usePoint && (ShrinkBoundForBound(bound) || auxiliaryInfo.UpdateAuxiliaryInfo(this)) && parent != NULL) parent->CondenseTree(point, relevels, usePoint); } /** * Shrink the bound so it fits tightly after the removal of this point. */ template class AuxiliaryInformationType> bool RectangleTree:: ShrinkBoundForPoint(const arma::vec& point) { bool shrunk = false; if (IsLeaf()) { for (size_t i = 0; i < bound.Dim(); i++) { if (bound[i].Lo() == point[i]) { ElemType min = std::numeric_limits::max(); for (size_t j = 0; j < count; j++) { if (dataset->col(points[j])[i] < min) min = dataset->col(points[j])[i]; } if (bound[i].Lo() < min) { shrunk = true; bound[i].Lo() = min; } else if (min < bound[i].Lo()) { assert(false); // We have a problem. } } else if (bound[i].Hi() == point[i]) { ElemType max = std::numeric_limits::lowest(); for (size_t j = 0; j < count; j++) { if (dataset->col(points[j])[i] > max) max = dataset->col(points[j])[i]; } if (bound[i].Hi() > max) { shrunk = true; bound[i].Hi() = max; } else if (max > bound[i].Hi()) { assert(false); // We have a problem. } } } } else { for (size_t i = 0; i < bound.Dim(); i++) { if (bound[i].Lo() == point[i]) { ElemType min = std::numeric_limits::max(); for (size_t j = 0; j < numChildren; j++) { if (children[j]->Bound()[i].Lo() < min) min = children[j]->Bound()[i].Lo(); } if (bound[i].Lo() < min) { shrunk = true; bound[i].Lo() = min; } } else if (bound[i].Hi() == point[i]) { ElemType max = std::numeric_limits::lowest(); for (size_t j = 0; j < numChildren; j++) { if (children[j]->Bound()[i].Hi() > max) max = children[j]->Bound()[i].Hi(); } if (bound[i].Hi() > max) { shrunk = true; bound[i].Hi() = max; } } } } return shrunk; } /** * Shrink the bound so it fits tightly after the removal of another bound. */ template class AuxiliaryInformationType> bool RectangleTree:: ShrinkBoundForBound(const bound::HRectBound& /* b */) { // Using the sum is safe since none of the dimensions can increase. ElemType sum = 0; // I think it may be faster to just recalculate the whole thing. for (size_t i = 0; i < bound.Dim(); i++) { sum += bound[i].Width(); bound[i].Lo() = std::numeric_limits::max(); bound[i].Hi() = std::numeric_limits::lowest(); } for (size_t i = 0; i < numChildren; i++) { bound |= children[i]->Bound(); } ElemType sum2 = 0; for (size_t i = 0; i < bound.Dim(); i++) sum2 += bound[i].Width(); return sum != sum2; } /** * Serialize the tree. */ template class AuxiliaryInformationType> template void RectangleTree:: Serialize(Archive& ar, const unsigned int /* version */) { using data::CreateNVP; // Clean up memory, if necessary. if (Archive::is_loading::value) { for (size_t i = 0; i < numChildren; i++) delete children[i]; children.clear(); if (ownsDataset && dataset) delete dataset; } ar & CreateNVP(maxNumChildren, "maxNumChildren"); ar & CreateNVP(minNumChildren, "minNumChildren"); ar & CreateNVP(numChildren, "numChildren"); // Due to quirks of boost::serialization, depending on how the user serializes // the tree, the root node may be duplicated. Therefore we don't allow // children of the root to serialize the parent, and we fix the parent link // after serializing the children when loading below. if (Archive::is_saving::value && parent != NULL && parent->Parent() == NULL) { RectangleTree* fakeParent = NULL; ar & CreateNVP(fakeParent, "parent"); } else { ar & CreateNVP(parent, "parent"); } ar & CreateNVP(begin, "begin"); ar & CreateNVP(count, "count"); ar & CreateNVP(numDescendants, "numDescendants"); ar & CreateNVP(maxLeafSize, "maxLeafSize"); ar & CreateNVP(minLeafSize, "minLeafSize"); ar & CreateNVP(bound, "bound"); ar & CreateNVP(stat, "stat"); ar & CreateNVP(parentDistance, "parentDistance"); ar & CreateNVP(dataset, "dataset"); // If we are loading and we are the root, we own the dataset. if (Archive::is_loading::value && parent == NULL) ownsDataset = true; ar & CreateNVP(points, "points"); ar & CreateNVP(auxiliaryInfo, "auxiliaryInfo"); // Because 'children' holds mlpack types (that have Serialize()), we can't use // the std::vector serialization. if (Archive::is_loading::value) children.resize(numChildren); for (size_t i = 0; i < numChildren; ++i) { std::ostringstream oss; oss << "child" << i; ar & CreateNVP(children[i], oss.str()); } // Fix the parent links for the children, if necessary. if (Archive::is_loading::value && parent == NULL) { // Look through each child individually. for (size_t i = 0; i < children.size(); ++i) { children[i]->ownsDataset = false; children[i]->Parent() = this; } } } } // namespace tree } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/core/tree/rectangle_tree/single_tree_traverser.hpp000066400000000000000000000045201315013601400270000ustar00rootroot00000000000000/** * @file single_tree_traverser.hpp * @author Andrew Wells * * A nested class of Rectangle Tree for traversing rectangle type trees * with a given set of rules which indicate the branches to prune and the * order in which to recurse. This is a depth-first traverser. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_TREE_RECTANGLE_TREE_SINGLE_TREE_TRAVERSER_HPP #define MLPACK_CORE_TREE_RECTANGLE_TREE_SINGLE_TREE_TRAVERSER_HPP #include #include "rectangle_tree.hpp" namespace mlpack { namespace tree { template class AuxiliaryInformationType> template class RectangleTree::SingleTreeTraverser { public: /** * Instantiate the traverser with the given rule set. */ SingleTreeTraverser(RuleType& rule); /** * Traverse the tree with the given point. * * @param queryIndex The index of the point in the query set which is being * used as the query point. * @param referenceNode The tree node to be traversed. */ void Traverse(const size_t queryIndex, const RectangleTree& referenceNode); //! Get the number of prunes. size_t NumPrunes() const { return numPrunes; } //! Modify the number of prunes. size_t& NumPrunes() { return numPrunes; } private: // We use this class and this function to make the sorting and scoring easy // and efficient: struct NodeAndScore { RectangleTree* node; double score; }; static bool NodeComparator(const NodeAndScore& obj1, const NodeAndScore& obj2) { return obj1.score < obj2.score; } //! Reference to the rules with which the tree will be traversed. RuleType& rule; //! The number of nodes which have been prenud during traversal. size_t numPrunes; }; } // namespace tree } // namespace mlpack // Include implementation. #include "single_tree_traverser_impl.hpp" #endif mlpack-2.2.5/src/mlpack/core/tree/rectangle_tree/single_tree_traverser_impl.hpp000066400000000000000000000055431315013601400300270ustar00rootroot00000000000000/** * @file single_tree_traverser_impl.hpp * @author Andrew Wells * * A class for traversing rectangle type trees with a given set of rules * which indicate the branches to prune and the order in which to recurse. * This is a depth-first traverser. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_TREE_RECTANGLE_TREE_SINGLE_TREE_TRAVERSER_IMPL_HPP #define MLPACK_CORE_TREE_RECTANGLE_TREE_SINGLE_TREE_TRAVERSER_IMPL_HPP #include "single_tree_traverser.hpp" #include #include namespace mlpack { namespace tree { template class AuxiliaryInformationType> template RectangleTree:: SingleTreeTraverser::SingleTreeTraverser(RuleType& rule) : rule(rule), numPrunes(0) { /* Nothing to do */ } template class AuxiliaryInformationType> template void RectangleTree:: SingleTreeTraverser::Traverse( const size_t queryIndex, const RectangleTree& referenceNode) { // If we reach a leaf node, we need to run the base case. if (referenceNode.IsLeaf()) { for (size_t i = 0; i < referenceNode.Count(); i++) rule.BaseCase(queryIndex, referenceNode.Point(i)); return; } // This is not a leaf node so we sort the children of this node by their // scores. std::vector nodesAndScores(referenceNode.NumChildren()); for (size_t i = 0; i < referenceNode.NumChildren(); i++) { nodesAndScores[i].node = &(referenceNode.Child(i)); nodesAndScores[i].score = rule.Score(queryIndex, *nodesAndScores[i].node); } std::sort(nodesAndScores.begin(), nodesAndScores.end(), NodeComparator); // Now iterate through them starting with the best and stopping when we reach // one that isn't good enough. for (size_t i = 0; i < referenceNode.NumChildren(); i++) { if (rule.Rescore(queryIndex, *nodesAndScores[i].node, nodesAndScores[i].score) != DBL_MAX) { Traverse(queryIndex, *nodesAndScores[i].node); } else { numPrunes += referenceNode.NumChildren() - i; return; } } } } // namespace tree } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/core/tree/rectangle_tree/traits.hpp000066400000000000000000000075141315013601400237170ustar00rootroot00000000000000/** * @file traits.hpp * @author Andrew Wells * * Specialization of the TreeTraits class for the RectangleTree type of tree. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_TREE_RECTANGLE_TREE_TRAITS_HPP #define MLPACK_CORE_TREE_RECTANGLE_TREE_TRAITS_HPP #include namespace mlpack { namespace tree { /** * This is a specialization of the TreeType class to the RectangleTree tree * type. It defines characteristics of the rectangle type trees, and is used to * help write tree-independent (but still optimized) tree-based algorithms. See * mlpack/core/tree/tree_traits.hpp for more information. */ template class AuxiliaryInformationType> class TreeTraits> { public: /** * An R-tree can have overlapping children. */ static const bool HasOverlappingChildren = true; /** * An R-tree node doesn't share points with another node. */ static const bool HasDuplicatedPoints = false; /** * There is no guarantee that the first point in a node is its centroid. */ static const bool FirstPointIsCentroid = false; /** * Points are not contained at multiple levels of the R-tree. */ static const bool HasSelfChildren = false; /** * Points are rearranged during building of the tree. * THIS MAY NOT BE TRUE. IT'S HARD TO DYNAMICALLY INSERT POINTS * AND REARRANGE THE MATRIX */ static const bool RearrangesDataset = false; /** * This tree is not necessarily a binary tree. */ static const bool BinaryTree = false; /** * Rectangle trees don't have duplicated points, so NumDescendants() * represents the number of unique descendant points. */ static const bool UniqueNumDescendants = true; }; /** * Since the R+/R++ tree can not have overlapping children, we should define * traits for the R+/R++ tree. */ template class SweepType, typename DescentType, template class AuxiliaryInformationType> class TreeTraits, DescentType, AuxiliaryInformationType>> { public: /** * The R+/R++ tree can't have overlapping children. */ static const bool HasOverlappingChildren = false; /** * An R-tree node doesn't share points with another node. */ static const bool HasDuplicatedPoints = false; /** * There is no guarantee that the first point in a node is its centroid. */ static const bool FirstPointIsCentroid = false; /** * Points are not contained at multiple levels of the R-tree. */ static const bool HasSelfChildren = false; /** * Points are rearranged during building of the tree. * THIS MAY NOT BE TRUE. IT'S HARD TO DYNAMICALLY INSERT POINTS * AND REARRANGE THE MATRIX */ static const bool RearrangesDataset = false; /** * This tree is not necessarily a binary tree. */ static const bool BinaryTree = false; /** * Rectangle trees don't have duplicated points, so NumDescendants() * represents the number of unique descendant points. */ static const bool UniqueNumDescendants = true; }; } // namespace tree } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/core/tree/rectangle_tree/typedef.hpp000066400000000000000000000160221315013601400240430ustar00rootroot00000000000000/** * @file typedef.hpp * @author Ryan Curtin * * Typedefs of RectangleTrees, for use by classes that require trees matching * the TreeType API. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_TREE_RECTANGLE_TREE_TYPEDEF_HPP #define MLPACK_CORE_TREE_RECTANGLE_TREE_TYPEDEF_HPP #include "rectangle_tree.hpp" namespace mlpack { namespace tree { /** * An implementation of the R tree that satisfies the TreeType policy API. * * This is the same R-tree structure as proposed by Guttman: * * @code * @inproceedings{guttman1984r, * title={R-trees: a dynamic index structure for spatial searching}, * author={Guttman, A.}, * booktitle={Proceedings of the 1984 ACM SIGMOD International Conference on * Management of Data (SIGMOD '84)}, * volume={14}, * number={2}, * year={1984}, * publisher={ACM} * } * @endcode * * @see @ref trees, RStarTree */ template using RTree = RectangleTree; /** * The R*-tree, a more recent variant of the R tree. This template typedef * satisfies the TreeType policy API. * * @code * @inproceedings{beckmann1990r, * title={The R*-tree: an efficient and robust access method for points and * rectangles}, * author={Beckmann, N. and Kriegel, H.-P. and Schneider, R. and Seeger, B.}, * booktitle={Proceedings of the 1990 ACM SIGMOD International Conference on * Management of Data (SIGMOD '90)}, * volume={19}, * number={2}, * year={1990}, * publisher={ACM} * } * @endcode * * @see @ref trees, RTree */ template using RStarTree = RectangleTree; /** * The X-tree, a variant of the R tree with supernodes. This template typedef * satisfies the TreeType policy API. * * @code * @inproceedings{berchtold1996r, * title = {The X-Tree: An Index Structure for High--Dimensional Data}, * author = {Berchtold, Stefan and Keim, Daniel A. and Kriegel, Hans-Peter}, * booktitle = {Proc. 22th Int. Conf. on Very Large Databases (VLDB'96), Bombay, India}, * editor = {Vijayaraman, T. and Buchmann, Alex and Mohan, C. and Sarda, N.}, * pages = {28--39}, * year = {1996}, * publisher = {Morgan Kaufmann} * } * @endcode * * @see @ref trees, RTree, RStarTree */ template using XTree = RectangleTree; /** * The Hilbert R-tree, a variant of the R tree with an ordering along * the Hilbert curve. This template typedef satisfies the TreeType policy API. * * @code * @inproceedings{kamel1994r, * author = {Kamel, Ibrahim and Faloutsos, Christos}, * title = {Hilbert R-tree: An Improved R-tree Using Fractals}, * booktitle = {Proceedings of the 20th International Conference on Very Large Data Bases}, * series = {VLDB '94}, * year = {1994}, * isbn = {1-55860-153-8}, * pages = {500--509}, * numpages = {10}, * url = {http://dl.acm.org/citation.cfm?id=645920.673001}, * acmid = {673001}, * publisher = {Morgan Kaufmann Publishers Inc.}, * address = {San Francisco, CA, USA} * } * @endcode * * @see @ref trees, RTree, DiscreteHilbertRTree */ template using DiscreteHilbertRTreeAuxiliaryInformation = HilbertRTreeAuxiliaryInformation; template using HilbertRTree = RectangleTree, HilbertRTreeDescentHeuristic, DiscreteHilbertRTreeAuxiliaryInformation>; /** * The R+ tree, a variant of the R tree that avoids overlapping rectangles. * The implementation is modified from the original paper implementation. * This template typedef satisfies the TreeType policy API. * * @code * @inproceedings{sellis1987r, * author = {Sellis, Timos K. and Roussopoulos, Nick and Faloutsos, Christos}, * title = {The R+-Tree: A Dynamic Index for Multi-Dimensional Objects}, * booktitle = {Proceedings of the 13th International Conference on Very * Large Data Bases}, * series = {VLDB '87}, * year = {1987}, * isbn = {0-934613-46-X}, * pages = {507--518}, * numpages = {12}, * publisher = {Morgan Kaufmann Publishers Inc.}, * address = {San Francisco, CA, USA}, * } * @endcode * * @see @ref trees, RTree, RTree, RPlusTree */ template using RPlusTree = RectangleTree, RPlusTreeDescentHeuristic, NoAuxiliaryInformation>; /** * The R++ tree, a variant of the R+ tree with maximum buonding rectangles. * This template typedef satisfies the TreeType policy API. * * @code * @inproceedings{sumak2014r, * author = {{\v{S}}um{\'a}k, Martin and Gursk{\'y}, Peter}, * title = {R++-Tree: An Efficient Spatial Access Method for Highly Redundant * Point Data}, * booktitle = {New Trends in Databases and Information Systems: 17th East * European Conference on Advances in Databases and Information Systems}, * year = {2014}, * isbn = {978-3-319-01863-8}, * pages = {37--44}, * publisher = {Springer International Publishing}, * } * @endcode * * @see @ref trees, RTree, RTree, RPlusTree, RPlusPlusTree */template using RPlusPlusTree = RectangleTree, RPlusPlusTreeDescentHeuristic, RPlusPlusTreeAuxiliaryInformation>; } // namespace tree } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/core/tree/rectangle_tree/x_tree_auxiliary_information.hpp000066400000000000000000000177451315013601400304020ustar00rootroot00000000000000/** * @file no_auxiliary_information.hpp * @author Mikhail Lozhnikov * * Definition of the XTreeAuxiliaryInformation class, a class that provides * some x-tree specific information about the nodes. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_TREE_RECTANGLE_TREE_X_TREE_AUXILIARY_INFORMATION_HPP #define MLPACK_CORE_TREE_RECTANGLE_TREE_X_TREE_AUXILIARY_INFORMATION_HPP namespace mlpack { namespace tree { /** * The XTreeAuxiliaryInformation class provides information specific to X trees * for each node in a RectangleTree. */ template class XTreeAuxiliaryInformation { public: //! Default constructor XTreeAuxiliaryInformation() : normalNodeMaxNumChildren(0), splitHistory(0) { }; /** * Construct this with the specified node. * * @param node The node that stores this auxiliary information. */ XTreeAuxiliaryInformation(const TreeType* node) : normalNodeMaxNumChildren(node->Parent() ? node->Parent()->AuxiliaryInfo().NormalNodeMaxNumChildren() : node->MaxNumChildren()), splitHistory(node->Bound().Dim()) { }; /** * Create an auxiliary information object by copying from another object. * * @param other Another auxiliary information object from which the * information will be copied. * @param tree The node that holds the auxiliary information. * @param deepCopy If false, the new object uses the same memory * (not used here). */ XTreeAuxiliaryInformation(const XTreeAuxiliaryInformation& other, TreeType* /* tree */ = NULL, bool /* deepCopy */ = true) : normalNodeMaxNumChildren(other.NormalNodeMaxNumChildren()), splitHistory(other.SplitHistory()) { }; /** * Copy the auxiliary information object. * * @param other The node from which the information will be copied. */ XTreeAuxiliaryInformation& operator=(const XTreeAuxiliaryInformation& other) { normalNodeMaxNumChildren = other.NormalNodeMaxNumChildren(); splitHistory = other.SplitHistory(); return *this; } /** * Create an auxiliary information object by moving from the other node. * * @param other The object from which the information will be moved. */ XTreeAuxiliaryInformation(XTreeAuxiliaryInformation&& other) : normalNodeMaxNumChildren(other.NormalNodeMaxNumChildren()), splitHistory(std::move(other.splitHistory)) { other.normalNodeMaxNumChildren = 0; }; /** * Some tree types require to save some properties at the insertion process. * This method allows the auxiliary information the option of manipulating the * tree in order to perform the insertion process. If the auxiliary * information does that, then the method should return true; if the method * returns false the RectangleTree performs its default behavior. * * @param node The node in which the point is being inserted. * @param point The global number of the point being inserted. */ bool HandlePointInsertion(TreeType* /* node */, const size_t /* point */) { return false; } /** * Some tree types require to save some properties at the insertion process. * This method allows the auxiliary information the option of manipulating the * tree in order to perform the insertion process. If the auxiliary * information does that, then the method should return true; if the method * returns false the RectangleTree performs its default behavior. * * @param node The node in which the nodeToInsert is being inserted. * @param nodeToInsert The node being inserted. * @param insertionLevel The level of the tree at which the nodeToInsert * should be inserted. */ bool HandleNodeInsertion(TreeType* /* node */, TreeType* /* nodeToInsert */, bool /* insertionLevel */) { return false; } /** * Some tree types require to save some properties at the deletion process. * This method allows the auxiliary information the option of manipulating * the tree in order to perform the deletion process. If the auxiliary * information does that, then the method should return true; if the method * returns false the RectangleTree performs its default behavior. * @param node The node from which the point is being deleted. * @param localIndex The local index of the point being deleted. */ bool HandlePointDeletion(TreeType* , const size_t) { return false; } /** * Some tree types require to save some properties at the deletion process. * This method allows the auxiliary information the option of manipulating * the tree in order to perform the deletion process. If the auxiliary * information does that, then the method should return true; if the method * returns false the RectangleTree performs its default behavior. * @param node The node from which the node is being deleted. * @param nodeIndex The local index of the node being deleted. */ bool HandleNodeRemoval(TreeType* , const size_t) { return false; } /** * Some tree types require to propagate the information upward. * This method should return false if this is not the case. If true is * returned, the update will be propagated upward. * @param node The node in which the auxiliary information being update. */ bool UpdateAuxiliaryInfo(TreeType* ) { return false; } /** * Nullify the auxiliary information in order to prevent an invalid free. */ void NullifyData() { } /** * The X tree requires that the tree records it's "split history". To make * this easy, we use the following structure. */ typedef struct SplitHistoryStruct { int lastDimension; std::vector history; SplitHistoryStruct(int dim) : lastDimension(0), history(dim) { for (int i = 0; i < dim; i++) history[i] = false; } SplitHistoryStruct(const SplitHistoryStruct& other) : lastDimension(other.lastDimension), history(other.history) { } SplitHistoryStruct& operator=(const SplitHistoryStruct& other) { lastDimension = other.lastDimension; history = other.history; return *this; } SplitHistoryStruct(SplitHistoryStruct&& other) : lastDimension(other.lastDimension), history(std::move(other.history)) { other.lastDimension = 0; } template void Serialize(Archive& ar, const unsigned int /* version */) { ar & data::CreateNVP(lastDimension, "lastDimension"); ar & data::CreateNVP(history, "history"); } } SplitHistoryStruct; private: //! The max number of child nodes a non-leaf normal node can have. size_t normalNodeMaxNumChildren; //! A struct to store the "split history" for X trees. SplitHistoryStruct splitHistory; public: //! Return the maximum number of a normal node's children. size_t NormalNodeMaxNumChildren() const { return normalNodeMaxNumChildren; } //! Modify the maximum number of a normal node's children. size_t& NormalNodeMaxNumChildren() { return normalNodeMaxNumChildren; } //! Return the split history of the node associated with this object. const SplitHistoryStruct& SplitHistory() const { return splitHistory; } //! Modify the split history of the node associated with this object. SplitHistoryStruct& SplitHistory() { return splitHistory; } /** * Serialize the information. */ template void Serialize(Archive& ar, const unsigned int /* version */) { using data::CreateNVP; ar & CreateNVP(normalNodeMaxNumChildren, "normalNodeMaxNumChildren"); ar & CreateNVP(splitHistory, "splitHistory"); } }; } // namespace tree } // namespace mlpack #endif // MLPACK_CORE_TREE_RECTANGLE_TREE_X_TREE_AUXILIARY_INFORMATION_HPP mlpack-2.2.5/src/mlpack/core/tree/rectangle_tree/x_tree_split.hpp000066400000000000000000000045271315013601400251130ustar00rootroot00000000000000/** * @file x_tre_split.hpp * @author Andrew Wells * * Definition of the XTreeSplit class, a class that splits the nodes of an X * tree, starting at a leaf node and moving upwards if necessary. * * This is known to have a bug: see #368. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_TREE_RECTANGLE_TREE_X_TREE_SPLIT_HPP #define MLPACK_CORE_TREE_RECTANGLE_TREE_X_TREE_SPLIT_HPP #include namespace mlpack { namespace tree /** Trees and tree-building procedures. */ { /** * The X-tree paper says that a maximum allowable overlap of 20% works well. * * This code should eventually be refactored so as to avoid polluting * mlpack::tree with this random double. */ const double MAX_OVERLAP = 0.2; /** * A Rectangle Tree has new points inserted at the bottom. When these * nodes overflow, we split them, moving up the tree and splitting nodes * as necessary. */ class XTreeSplit { public: /** * Split a leaf node using the algorithm described in "The R*-tree: An * Efficient and Robust Access method for Points and Rectangles." If * necessary, this split will propagate upwards through the tree. */ template static void SplitLeafNode(TreeType *tree,std::vector& relevels); /** * Split a non-leaf node using the "default" algorithm. If this is a root * node, the tree increases in depth. */ template static bool SplitNonLeafNode(TreeType *tree,std::vector& relevels); private: /** * Insert a node into another node. */ template static void InsertNodeIntoTree(TreeType* destTree, TreeType* srcNode); /** * Comparator for sorting with std::pair. This comparator works a little bit * faster then the default comparator. */ template static bool PairComp(const std::pair& p1, const std::pair& p2) { return p1.first < p2.first; } }; } // namespace tree } // namespace mlpack // Include implementation #include "x_tree_split_impl.hpp" #endif mlpack-2.2.5/src/mlpack/core/tree/rectangle_tree/x_tree_split_impl.hpp000066400000000000000000000576541315013601400261450ustar00rootroot00000000000000/** * @file x_tree_split_impl.hpp * @author Andrew Wells * * Implementation of class (XTreeSplit) to split a RectangleTree. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_TREE_RECTANGLE_TREE_X_TREE_SPLIT_IMPL_HPP #define MLPACK_CORE_TREE_RECTANGLE_TREE_X_TREE_SPLIT_IMPL_HPP #include "x_tree_split.hpp" #include "rectangle_tree.hpp" #include namespace mlpack { namespace tree { /** * We call GetPointSeeds to get the two points which will be the initial points * in the new nodes We then call AssignPointDestNode to assign the remaining * points to the two new nodes. Finally, we delete the old node and insert the * new nodes into the tree, spliting the parent if necessary. */ template void XTreeSplit::SplitLeafNode(TreeType *tree,std::vector& relevels) { // Convenience typedef. typedef typename TreeType::ElemType ElemType; if (tree->Count() <= tree->MaxLeafSize()) return; // If we haven't yet reinserted on this level, we try doing so now. if (RStarTreeSplit::ReinsertPoints(tree, relevels) > 0) return; // The procedure of splitting a leaf node is virtually identical to the R* // tree procedure, so we can reuse code. size_t bestAxis; size_t bestIndex; RStarTreeSplit::PickLeafSplit(tree, bestAxis, bestIndex); /** * Now that we have found the best dimension to split on, re-sort in that * dimension to prepare for reinsertion of points into the new nodes. */ std::vector> sorted(tree->Count()); for (size_t i = 0; i < sorted.size(); i++) { sorted[i].first = tree->Dataset().col(tree->Point(i))[bestAxis]; sorted[i].second = tree->Point(i); } std::sort(sorted.begin(), sorted.end(), PairComp); /** * If 'tree' is the root of the tree (i.e. if it has no parent), then we must * create two new child nodes, distribute the points from the original node * among them, and insert those. If 'tree' is not the root of the tree, then * we may create only one new child node, redistribute the points from the * original node between 'tree' and the new node, then insert those nodes into * the parent. * * Here we simply set treeOne and treeTwo to the right values to avoid code * duplication. */ TreeType* par = tree->Parent(); TreeType* treeOne = (par) ? tree : new TreeType(tree); TreeType* treeTwo = (par) ? new TreeType(par) : new TreeType(tree); // Now clean the node, and we will re-use this. const size_t numPoints = tree->Count(); // Reset the original node's values, regardless of whether it will become // the new parent or not. tree->numChildren = 0; tree->numDescendants = 0; tree->count = 0; tree->bound.Clear(); // Insert the points into the appropriate tree. for (size_t i = 0; i < numPoints; i++) { if (i < bestIndex + tree->MinLeafSize()) treeOne->InsertPoint(sorted[i].second); else treeTwo->InsertPoint(sorted[i].second); } // Insert the new tree node(s). if (par) { par->children[par->NumChildren()++] = treeTwo; } else { InsertNodeIntoTree(tree, treeOne); InsertNodeIntoTree(tree, treeTwo); } // We now update the split history of each new node. treeOne->AuxiliaryInfo().SplitHistory().history[bestAxis] = true; treeOne->AuxiliaryInfo().SplitHistory().lastDimension = bestAxis; treeTwo->AuxiliaryInfo().SplitHistory().history[bestAxis] = true; treeTwo->AuxiliaryInfo().SplitHistory().lastDimension = bestAxis; // If we overflowed the parent, split it. if (par && par->NumChildren() == par->MaxNumChildren() + 1) XTreeSplit::SplitNonLeafNode(par,relevels); } /** * We call GetBoundSeeds to get the two new nodes that this one will be broken * into. Then we call AssignNodeDestNode to move the children of this node into * either of those two nodes. Finally, we delete the now unused information and * recurse up the tree if necessary. We don't need to worry about the bounds * higher up the tree because they were already updated if necessary. */ template bool XTreeSplit::SplitNonLeafNode(TreeType *tree,std::vector& relevels) { // Convenience typedef. typedef typename TreeType::ElemType ElemType; typedef bound::HRectBound BoundType; // The X tree paper doesn't explain how to handle the split history when // reinserting nodes and reinserting nodes seems to hurt the performance, so // we don't do it. // We find the split axis that will be used if the topological split fails now // to save CPU time. // Find the next split axis. std::vector axes(tree->Bound().Dim(), true); std::vector dimensionsLastUsed(tree->NumChildren()); for (size_t i = 0; i < tree->NumChildren(); i++) dimensionsLastUsed[i] = tree->Child(i).AuxiliaryInfo().SplitHistory().lastDimension; std::sort(dimensionsLastUsed.begin(), dimensionsLastUsed.end()); size_t lastDim = dimensionsLastUsed[dimensionsLastUsed.size() / 2]; size_t minOverlapSplitDimension = tree->Bound().Dim(); // See if we can use a new dimension. for (size_t i = lastDim + 1; i < axes.size(); i++) { for (size_t j = 0; j < tree->NumChildren(); j++) axes[i] = axes[i] & tree->Child(j).AuxiliaryInfo().SplitHistory().history[i]; if (axes[i] == true) { minOverlapSplitDimension = i; break; } } if (minOverlapSplitDimension == tree->Bound().Dim()) { for (size_t i = 0; i < lastDim + 1; i++) { axes[i] = true; for (size_t j = 0; j < tree->NumChildren(); j++) axes[i] = axes[i] & tree->Child(j).AuxiliaryInfo().SplitHistory().history[i]; if (axes[i] == true) { minOverlapSplitDimension = i; break; } } } bool minOverlapSplitUsesHi = false; ElemType bestScoreMinOverlapSplit = std::numeric_limits::max(); ElemType areaOfBestMinOverlapSplit = 0; int bestIndexMinOverlapSplit = 0; int bestOverlapIndexOnBestAxis = 0; int bestAreaIndexOnBestAxis = 0; bool tiedOnOverlap = false; bool lowIsBest = true; int bestAxis = 0; ElemType bestAxisScore = std::numeric_limits::max(); ElemType overlapBestOverlapAxis = 0; ElemType areaBestOverlapAxis = 0; ElemType overlapBestAreaAxis = 0; ElemType areaBestAreaAxis = 0; for (size_t j = 0; j < tree->Bound().Dim(); j++) { ElemType axisScore = 0.0; // We'll do Bound().Lo() now and use Bound().Hi() later. std::vector> sorted(tree->NumChildren()); for (size_t i = 0; i < sorted.size(); i++) { sorted[i].first = tree->Child(i).Bound()[j].Lo(); sorted[i].second = &tree->Child(i); } std::sort(sorted.begin(), sorted.end(), PairComp); // We'll store each of the three scores for each distribution. std::vector areas(tree->MaxNumChildren() - 2 * tree->MinNumChildren() + 2); std::vector margins(tree->MaxNumChildren() - 2 * tree->MinNumChildren() + 2); std::vector overlapedAreas(tree->MaxNumChildren() - 2 * tree->MinNumChildren() + 2); for (size_t i = 0; i < areas.size(); i++) { areas[i] = 0.0; margins[i] = 0.0; overlapedAreas[i] = 0.0; } for (size_t i = 0; i < areas.size(); i++) { // The ith arrangement is obtained by placing the first // tree->MinNumChildren() + i points in one rectangle and the rest in // another. Then we calculate the three scores for that distribution. size_t cutOff = tree->MinNumChildren() + i; BoundType bound1(tree->Bound().Dim()); BoundType bound2(tree->Bound().Dim()); for (size_t l = 0; l < cutOff; l++) bound1 |= sorted[l].second->Bound(); for (size_t l = cutOff; l < tree->NumChildren(); l++) bound2 |= sorted[l].second->Bound(); ElemType area1 = bound1.Volume(); ElemType area2 = bound2.Volume(); ElemType oArea = bound1.Overlap(bound2); for (size_t k = 0; k < bound1.Dim(); k++) margins[i] += bound1[k].Width() + bound2[k].Width(); areas[i] += area1 + area2; overlapedAreas[i] += oArea; axisScore += margins[i]; } if (axisScore < bestAxisScore) { bestAxisScore = axisScore; bestAxis = j; bestOverlapIndexOnBestAxis = 0; bestAreaIndexOnBestAxis = 0; overlapBestOverlapAxis = overlapedAreas[bestOverlapIndexOnBestAxis]; areaBestOverlapAxis = areas[bestAreaIndexOnBestAxis]; for (size_t i = 1; i < areas.size(); i++) { if (overlapedAreas[i] < overlapedAreas[bestOverlapIndexOnBestAxis]) { tiedOnOverlap = false; bestAreaIndexOnBestAxis = i; bestOverlapIndexOnBestAxis = i; overlapBestOverlapAxis = overlapedAreas[i]; areaBestOverlapAxis = areas[i]; } else if (overlapedAreas[i] == overlapedAreas[bestOverlapIndexOnBestAxis]) { tiedOnOverlap = true; if (areas[i] < areas[bestAreaIndexOnBestAxis]) { bestAreaIndexOnBestAxis = i; overlapBestAreaAxis = overlapedAreas[i]; areaBestAreaAxis = areas[i]; } } } } // Track the minOverlapSplit data if (minOverlapSplitDimension != tree->Bound().Dim() && j == minOverlapSplitDimension) { for (size_t i = 0; i < overlapedAreas.size(); i++) { if (overlapedAreas[i] < bestScoreMinOverlapSplit) { bestScoreMinOverlapSplit = overlapedAreas[i]; bestIndexMinOverlapSplit = i; areaOfBestMinOverlapSplit = areas[i]; } } } } // Now we do the same thing using Bound().Hi() and choose the best of the two. for (size_t j = 0; j < tree->Bound().Dim(); j++) { ElemType axisScore = 0.0; std::vector> sorted(tree->NumChildren()); for (size_t i = 0; i < sorted.size(); i++) { sorted[i].first = tree->Child(i).Bound()[j].Hi(); sorted[i].second = &tree->Child(i); } std::sort(sorted.begin(), sorted.end(), PairComp); // We'll store each of the three scores for each distribution. std::vector areas(tree->MaxNumChildren() - 2 * tree->MinNumChildren() + 2); std::vector margins(tree->MaxNumChildren() - 2 * tree->MinNumChildren() + 2); std::vector overlapedAreas(tree->MaxNumChildren() - 2 * tree->MinNumChildren() + 2); for (size_t i = 0; i < areas.size(); i++) { areas[i] = 0.0; margins[i] = 0.0; overlapedAreas[i] = 0.0; } for (size_t i = 0; i < areas.size(); i++) { // The ith arrangement is obtained by placing the first // tree->MinNumChildren() + i points in one rectangle and the rest in // another. Then we calculate the three scores for that distribution. size_t cutOff = tree->MinNumChildren() + i; BoundType bound1(tree->Bound().Dim()); BoundType bound2(tree->Bound().Dim()); for (size_t l = 0; l < cutOff; l++) bound1 |= sorted[l].second->Bound(); for (size_t l = cutOff; l < tree->NumChildren(); l++) bound2 |= sorted[l].second->Bound(); ElemType area1 = bound1.Volume(); ElemType area2 = bound2.Volume(); ElemType oArea = bound1.Overlap(bound2); for (size_t k = 0; k < bound1.Dim(); k++) margins[i] += bound1[k].Width() + bound2[k].Width(); areas[i] += area1 + area2; overlapedAreas[i] += oArea; axisScore += margins[i]; } if (axisScore < bestAxisScore) { bestAxisScore = axisScore; bestAxis = j; lowIsBest = false; bestOverlapIndexOnBestAxis = 0; bestAreaIndexOnBestAxis = 0; overlapBestOverlapAxis = overlapedAreas[bestOverlapIndexOnBestAxis]; areaBestOverlapAxis = areas[bestAreaIndexOnBestAxis]; for (size_t i = 1; i < areas.size(); i++) { if (overlapedAreas[i] < overlapedAreas[bestOverlapIndexOnBestAxis]) { tiedOnOverlap = false; bestAreaIndexOnBestAxis = i; bestOverlapIndexOnBestAxis = i; overlapBestOverlapAxis = overlapedAreas[i]; areaBestOverlapAxis = areas[i]; } else if (overlapedAreas[i] == overlapedAreas[bestOverlapIndexOnBestAxis]) { tiedOnOverlap = true; if (areas[i] < areas[bestAreaIndexOnBestAxis]) { bestAreaIndexOnBestAxis = i; overlapBestAreaAxis = overlapedAreas[i]; areaBestAreaAxis = areas[i]; } } } } // Track the minOverlapSplit data if (minOverlapSplitDimension != tree->Bound().Dim() && j == minOverlapSplitDimension) { for (size_t i = 0; i < overlapedAreas.size(); i++) { if (overlapedAreas[i] < bestScoreMinOverlapSplit) { minOverlapSplitUsesHi = true; bestScoreMinOverlapSplit = overlapedAreas[i]; bestIndexMinOverlapSplit = i; areaOfBestMinOverlapSplit = areas[i]; } } } } std::vector> sorted(tree->NumChildren()); if (lowIsBest) { for (size_t i = 0; i < sorted.size(); i++) { sorted[i].first = tree->Child(i).Bound()[bestAxis].Lo(); sorted[i].second = &tree->Child(i); } } else { for (size_t i = 0; i < sorted.size(); i++) { sorted[i].first = tree->Child(i).Bound()[bestAxis].Hi(); sorted[i].second = &tree->Child(i); } } std::sort(sorted.begin(), sorted.end(), PairComp); if (tree->Parent() != NULL) { // Reuse tree as the new child. TreeType* treeTwo = new TreeType(tree->Parent(), tree->MaxNumChildren()); const size_t numChildren = tree->NumChildren(); tree->numChildren = 0; tree->count = 0; // Now as per the X-tree paper, we ensure that this split was good enough. bool useMinOverlapSplit = false; if (tiedOnOverlap) { if (overlapBestAreaAxis/areaBestAreaAxis < MAX_OVERLAP) { for (size_t i = 0; i < numChildren; i++) { if (i < bestAreaIndexOnBestAxis + tree->MinNumChildren()) InsertNodeIntoTree(tree, sorted[i].second); else InsertNodeIntoTree(treeTwo, sorted[i].second); } } else useMinOverlapSplit = true; } else { if (overlapBestOverlapAxis/areaBestOverlapAxis < MAX_OVERLAP) { tree->numDescendants = 0; tree->bound.Clear(); for (size_t i = 0; i < numChildren; i++) { if (i < bestOverlapIndexOnBestAxis + tree->MinNumChildren()) InsertNodeIntoTree(tree, sorted[i].second); else InsertNodeIntoTree(treeTwo, sorted[i].second); } } else useMinOverlapSplit = true; } // If the split was not good enough, then we try the minimal overlap split. // If that fails, we create a "super node" (more accurately we resize this // one to make it a super node). if (useMinOverlapSplit) { // If there is a dimension that might work, try that. if ((minOverlapSplitDimension != tree->Bound().Dim()) && (bestScoreMinOverlapSplit / areaOfBestMinOverlapSplit < MAX_OVERLAP)) { std::vector> sorted2(numChildren); if (minOverlapSplitUsesHi) { for (size_t i = 0; i < sorted2.size(); i++) { sorted2[i].first = sorted[i].second->Bound()[bestAxis].Hi(); sorted2[i].second = sorted[i].second; } } else { for (size_t i = 0; i < sorted2.size(); i++) { sorted2[i].first = sorted[i].second->Bound()[bestAxis].Lo(); sorted2[i].second = sorted[i].second; } } std::sort(sorted2.begin(), sorted2.end(), PairComp); tree->numDescendants = 0; tree->bound.Clear(); for (size_t i = 0; i < numChildren; i++) { if (i < bestIndexMinOverlapSplit + tree->MinNumChildren()) InsertNodeIntoTree(tree, sorted2[i].second); else InsertNodeIntoTree(treeTwo, sorted2[i].second); } } else { // We don't create a supernode that would be the only child of the root. // (Note that if you did try to do so you would need to update the // parent field on each child of this new node as creating a supernode // causes the function to return before that is done. // I thought commenting out the bellow would make the tree less // efficient but would still work. It doesn't. I should look into that // to see if there is another bug. if ((tree->Parent()->Parent() == NULL) && (tree->Parent()->NumChildren() == 1)) { // We make the root a supernode instead. tree->Parent()->MaxNumChildren() = tree->MaxNumChildren() + tree->AuxiliaryInfo().NormalNodeMaxNumChildren(); tree->Parent()->children.resize(tree->Parent()->MaxNumChildren() + 1); tree->Parent()->NumChildren() = tree->NumChildren(); for (size_t i = 0; i < numChildren; ++i) { tree->Parent()->children[i] = sorted[i].second; tree->Parent()->children[i]->Parent() = tree->Parent(); tree->children[i] = NULL; } delete tree; delete treeTwo; return false; } // If we don't have to worry about the root, we just enlarge this node. tree->MaxNumChildren() += tree->AuxiliaryInfo().NormalNodeMaxNumChildren(); tree->children.resize(tree->MaxNumChildren() + 1); tree->numChildren = numChildren; for (size_t i = 0; i < numChildren; i++) tree->Child(i).Parent() = tree; delete treeTwo; return false; } } // Update the split history of each child. tree->AuxiliaryInfo().SplitHistory().history[bestAxis] = true; tree->AuxiliaryInfo().SplitHistory().lastDimension = bestAxis; treeTwo->AuxiliaryInfo().SplitHistory().history[bestAxis] = true; treeTwo->AuxiliaryInfo().SplitHistory().lastDimension = bestAxis; // Remove this node and insert treeOne and treeTwo TreeType* par = tree->Parent(); par->children[par->NumChildren()++] = treeTwo; // We only add one at a time, so we should only need to test for equality // just in case, we use an assert. if (!(par->NumChildren() <= par->MaxNumChildren() + 1)) Log::Debug << "error " << par->NumChildren() << ", " << par->MaxNumChildren() + 1 << std::endl; assert(par->NumChildren() <= par->MaxNumChildren() + 1); if (par->NumChildren() == par->MaxNumChildren() + 1) XTreeSplit::SplitNonLeafNode(par,relevels); // We have to update the children of each of these new nodes so that they // record the correct parent. for (size_t i = 0; i < treeTwo->NumChildren(); i++) treeTwo->Child(i).Parent() = treeTwo; assert(tree->Parent()->NumChildren() <= tree->Parent()->MaxNumChildren()); assert(tree->Parent()->NumChildren() >= tree->Parent()->MinNumChildren()); assert(treeTwo->Parent()->NumChildren() <= treeTwo->Parent()->MaxNumChildren()); assert(treeTwo->Parent()->NumChildren() >= treeTwo->Parent()->MinNumChildren()); return false; } else { // We are the root of the tree, so we need to create two children to add. TreeType* treeOne = new TreeType(tree, tree->MaxNumChildren()); TreeType* treeTwo = new TreeType(tree, tree->MaxNumChildren()); const size_t numChildren = tree->NumChildren(); tree->numChildren = 0; // Now as per the X-tree paper, we ensure that this split was good enough. bool useMinOverlapSplit = false; if (tiedOnOverlap) { if (overlapBestAreaAxis/areaBestAreaAxis < MAX_OVERLAP) { for (size_t i = 0; i < numChildren; i++) { if (i < bestAreaIndexOnBestAxis + tree->MinNumChildren()) InsertNodeIntoTree(treeOne, sorted[i].second); else InsertNodeIntoTree(treeTwo, sorted[i].second); } } else useMinOverlapSplit = true; } else { if (overlapBestOverlapAxis/areaBestOverlapAxis < MAX_OVERLAP) { for (size_t i = 0; i < numChildren; i++) { if (i < bestOverlapIndexOnBestAxis + tree->MinNumChildren()) InsertNodeIntoTree(treeOne, sorted[i].second); else InsertNodeIntoTree(treeTwo, sorted[i].second); } } else useMinOverlapSplit = true; } // If the split was not good enough, then we try the minimal overlap split. // If that fails, we create a "super node" (more accurately we resize this one // to make it a super node). if (useMinOverlapSplit) { // If there is a dimension that might work, try that. if ((minOverlapSplitDimension != tree->Bound().Dim()) && (bestScoreMinOverlapSplit / areaOfBestMinOverlapSplit < MAX_OVERLAP)) { std::vector> sorted2(numChildren); if (minOverlapSplitUsesHi) { for (size_t i = 0; i < sorted2.size(); i++) { sorted2[i].first = sorted[i].second->Bound()[bestAxis].Hi(); sorted2[i].second = sorted[i].second; } } else { for (size_t i = 0; i < sorted2.size(); i++) { sorted2[i].first = sorted[i].second->Bound()[bestAxis].Lo(); sorted2[i].second = sorted[i].second; } } std::sort(sorted2.begin(), sorted2.end(), PairComp); for (size_t i = 0; i < numChildren; i++) { if (i < bestIndexMinOverlapSplit + tree->MinNumChildren()) InsertNodeIntoTree(treeOne, sorted2[i].second); else InsertNodeIntoTree(treeTwo, sorted2[i].second); } } else { // Make this node a supernode. tree->MaxNumChildren() += tree->AuxiliaryInfo().NormalNodeMaxNumChildren(); tree->children.resize(tree->MaxNumChildren() + 1); tree->numChildren = numChildren; for (size_t i = 0; i < numChildren; i++) tree->Child(i).Parent() = tree; delete treeOne; delete treeTwo; return false; } } // Update the split history of each child. treeOne->AuxiliaryInfo().SplitHistory().history[bestAxis] = true; treeOne->AuxiliaryInfo().SplitHistory().lastDimension = bestAxis; treeTwo->AuxiliaryInfo().SplitHistory().history[bestAxis] = true; treeTwo->AuxiliaryInfo().SplitHistory().lastDimension = bestAxis; // Remove this node and insert treeOne and treeTwo tree->children[0] = treeOne; tree->children[1] = treeTwo; tree->numChildren = 2; tree->numDescendants = treeOne->numDescendants + treeTwo->numDescendants; // We have to update the children of each of these new nodes so that they // record the correct parent. for (size_t i = 0; i < treeOne->NumChildren(); ++i) treeOne->Child(i).Parent() = treeOne; for (size_t i = 0; i < treeTwo->NumChildren(); i++) treeTwo->Child(i).Parent() = treeTwo; return false; } } /** * Insert a node into another node. Expanding the bounds and updating the * numberOfChildren. */ template void XTreeSplit::InsertNodeIntoTree(TreeType* destTree, TreeType* srcNode) { destTree->Bound() |= srcNode->Bound(); destTree->numDescendants += srcNode->numDescendants; destTree->children[destTree->NumChildren()++] = srcNode; } } // namespace tree } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/core/tree/space_split/000077500000000000000000000000001315013601400212145ustar00rootroot00000000000000mlpack-2.2.5/src/mlpack/core/tree/space_split/hyperplane.hpp000066400000000000000000000100341315013601400240720ustar00rootroot00000000000000/** * @file hyperplane.hpp * @author Marcos Pividori * * Definition of Hyperplane and AxisOrthogonalHyperplane. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_TREE_SPILL_TREE_HYPERPLANE_HPP #define MLPACK_CORE_TREE_SPILL_TREE_HYPERPLANE_HPP #include #include "projection_vector.hpp" namespace mlpack { namespace tree { /** * HyperplaneBase defines a splitting hyperplane based on a projection vector * and projection value. * * @tparam BoundT The bound type considered. * @tparam ProjVectorT Type of projection vector (AxisParallelProjVector, * ProjVector). */ template class HyperplaneBase { public: //! Useful typedef for the bound type. typedef BoundT BoundType; //! Useful typedef for the projection vector type. typedef ProjVectorT ProjVectorType; private: //! Projection vector. ProjVectorType projVect; //! Projection value that determines the decision boundary. double splitVal; public: /** * Empty Constructor. By default will consider all points to the left. */ HyperplaneBase() : splitVal(DBL_MAX) {}; /** * Create the hyperplane with the specified projection vector and split value. * * @param projVect Projection vector. * @param splitVal Split value. */ HyperplaneBase(const ProjVectorType& projVect, double splitVal) : projVect(projVect), splitVal(splitVal) {}; /** * Project the given point on the projection vector and subtract the * split value. * * @param point Point to be projected. */ template double Project(const VecType& point, typename boost::enable_if >::type* = 0) const { if (splitVal == DBL_MAX) return 0; return projVect.Project(point) - splitVal; }; /** * Determine if the given point is to the left of the hyperplane, this means * if the projection over the projection vector is negative or zero. * * @param point Point to be analyzed. */ template bool Left(const VecType& point, typename boost::enable_if >::type* = 0) const { return Project(point) <= 0; }; /** * Determine if the given point is to the right of the hyperplane, this means * if the projection over the projection vector is positive. * * @param point Point to be analyzed. */ template bool Right(const VecType& point, typename boost::enable_if >::type* = 0) const { return Project(point) > 0; }; /** * Determine if the given bound is to the left of the hyperplane. * * @param point Bound to be analyzed. */ bool Left(const BoundType& bound) const { if (splitVal == DBL_MAX) return true; return projVect.Project(bound).Hi() <= splitVal; }; /** * Determine if the given bound is to the right of the hyperplane. * * @param point Bound to be analyzed. */ bool Right(const BoundType& bound) const { if (splitVal == DBL_MAX) return false; return projVect.Project(bound).Lo() > splitVal; }; /** * Serialization. */ template void Serialize(Archive& ar, const unsigned int /* version */) { ar & data::CreateNVP(projVect, "projVect"); ar & data::CreateNVP(splitVal, "splitVal"); }; }; /** * AxisOrthogonalHyperplane represents a hyperplane orthogonal to an axis. */ template using AxisOrthogonalHyperplane = HyperplaneBase, AxisParallelProjVector>; /** * Hyperplane represents a general hyperplane (not necessarily axis-orthogonal). */ template using Hyperplane = HyperplaneBase, ProjVector>; } // namespace tree } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/core/tree/space_split/mean_space_split.hpp000066400000000000000000000027131315013601400252360ustar00rootroot00000000000000/** * @file mean_space_split.hpp * @author Marcos Pividori * * Definition of MeanSpaceSplit, to create a splitting hyperplane considering * the mean of the values in a certain projection. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_TREE_SPILL_TREE_MEAN_SPACE_SPLIT_HPP #define MLPACK_CORE_TREE_SPILL_TREE_MEAN_SPACE_SPLIT_HPP #include #include "hyperplane.hpp" namespace mlpack { namespace tree { template class MeanSpaceSplit { public: /** * Create a splitting hyperplane considering the mean of the values in a * certain projection. * * @param bound The bound used for this node. * @param data The dataset used by the tree. * @param points Vector of indexes of points to be considered. * @param hyp Resulting splitting hyperplane. * @return Flag to determine if split is possible. */ template static bool SplitSpace( const typename HyperplaneType::BoundType& bound, const MatType& data, const arma::Col& points, HyperplaneType& hyp); }; } // namespace tree } // namespace mlpack // Include implementation. #include "mean_space_split_impl.hpp" #endif mlpack-2.2.5/src/mlpack/core/tree/space_split/mean_space_split_impl.hpp000066400000000000000000000027011315013601400262540ustar00rootroot00000000000000/** * @file mean_space_split_impl.hpp * @author Marcos Pividori * * Implementation of MeanSpaceSplit, to create a splitting hyperplane * considering the midpoint/mean of the values in a certain projection. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_TREE_SPILL_TREE_MEAN_SPACE_SPLIT_IMPL_HPP #define MLPACK_CORE_TREE_SPILL_TREE_MEAN_SPACE_SPLIT_IMPL_HPP #include "mean_space_split.hpp" #include "space_split.hpp" namespace mlpack { namespace tree { template template bool MeanSpaceSplit::SplitSpace( const typename HyperplaneType::BoundType& bound, const MatType& data, const arma::Col& points, HyperplaneType& hyp) { typename HyperplaneType::ProjVectorType projVector; double midValue; if (!SpaceSplit::GetProjVector(bound, data, points, projVector, midValue)) return false; double splitVal = 0.0; for (size_t i = 0; i < points.n_elem; i++) splitVal += projVector.Project(data.col(points[i])); splitVal /= points.n_elem; hyp = HyperplaneType(projVector, splitVal); return true; } } // namespace tree } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/core/tree/space_split/midpoint_space_split.hpp000066400000000000000000000027531315013601400261450ustar00rootroot00000000000000/** * @file midpoing_space_split.hpp * @author Marcos Pividori * * Definition of MidpointSpaceSplit, to create a splitting hyperplane * considering the midpoint of the values in a certain projection. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_TREE_SPILL_TREE_MIDPOINT_SPACE_SPLIT_HPP #define MLPACK_CORE_TREE_SPILL_TREE_MIDPOINT_SPACE_SPLIT_HPP #include #include "hyperplane.hpp" namespace mlpack { namespace tree { template class MidpointSpaceSplit { public: /** * Create a splitting hyperplane considering the midpoint of the values in a * certain projection. * * @param bound The bound used for this node. * @param data The dataset used by the tree. * @param points Vector of indexes of points to be considered. * @param hyp Resulting splitting hyperplane. * @return Flag to determine if split is possible. */ template static bool SplitSpace( const typename HyperplaneType::BoundType& bound, const MatType& data, const arma::Col& points, HyperplaneType& hyp); }; } // namespace tree } // namespace mlpack // Include implementation. #include "midpoint_space_split_impl.hpp" #endif mlpack-2.2.5/src/mlpack/core/tree/space_split/midpoint_space_split_impl.hpp000066400000000000000000000024671315013601400271700ustar00rootroot00000000000000/** * @file midpoint_space_split_impl.hpp * @author Marcos Pividori * * Implementation of MidpointSpaceSplit, to create a splitting hyperplane * considering the midpoint of the values in a certain projection. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_TREE_SPILL_TREE_MIDPOINT_SPACE_SPLIT_IMPL_HPP #define MLPACK_CORE_TREE_SPILL_TREE_MIDPOINT_SPACE_SPLIT_IMPL_HPP #include "midpoint_space_split.hpp" #include "space_split.hpp" namespace mlpack { namespace tree { template template bool MidpointSpaceSplit::SplitSpace( const typename HyperplaneType::BoundType& bound, const MatType& data, const arma::Col& points, HyperplaneType& hyp) { typename HyperplaneType::ProjVectorType projVector; double midValue; if (!SpaceSplit::GetProjVector(bound, data, points, projVector, midValue)) return false; hyp = HyperplaneType(projVector, midValue); return true; } } // namespace tree } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/core/tree/space_split/projection_vector.hpp000066400000000000000000000073151315013601400254710ustar00rootroot00000000000000/** * @file projection_vector.hpp * @author Marcos Pividori * * Definition of ProjVector and AxisParallelProjVector. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_TREE_SPILL_TREE_PROJECTION_VECTOR_HPP #define MLPACK_CORE_TREE_SPILL_TREE_PROJECTION_VECTOR_HPP #include #include "../bounds.hpp" namespace mlpack { namespace tree { /** * AxisParallelProjVector defines an axis-parallel projection vector. * We can efficiently project points, simply analyzing a specific dimension. */ class AxisParallelProjVector { //! Dimension considered. size_t dim; public: /** * Create the projection vector based on the specified dimension. * * @param dim Dimension to be considered. */ AxisParallelProjVector(size_t dim = 0) : dim(dim) {}; /** * Project the given point on the projection vector. * * @param point Point to be projected. */ template double Project(const VecType& point, typename boost::enable_if >::type* = 0) const { return point[dim]; }; /** * Project the given hrect bound on the projection vector. * * @param bound Bound to be projected. * @return Range of projected values. */ template math::RangeType Project( const bound::HRectBound& bound) const { return bound[dim]; }; /** * Project the given ball bound on the projection vector. * * @param bound Bound to be projected. * @return Range of projected values. */ template math::RangeType Project( const bound::BallBound& bound) const { return bound[dim]; }; /** * Serialization. */ template void Serialize(Archive& ar, const unsigned int /* version */) { ar & data::CreateNVP(dim, "dim"); }; }; /** * ProjVector defines a general projection vector (not necessarily * axis-parallel). */ class ProjVector { //! Projection vector. arma::vec projVect; public: /** * Empty Constructor. */ ProjVector() : projVect() {}; /** * Create the projection vector based on the specified vector. * * @param vect Vector to be considered. */ ProjVector(const arma::vec& vect) : projVect(arma::normalise(vect)) {}; /** * Project the given point on the projection vector. * * @param point Point to be projected. */ template double Project(const VecType& point, typename boost::enable_if >::type* = 0) const { return arma::dot(point, projVect); }; /** * Project the given ball bound on the projection vector. * * @param bound Bound to be projected. * @return Range of projected values. */ template math::RangeType Project( const bound::BallBound& bound) const { typedef typename VecType::elem_type ElemType; const double center = Project(bound.Center()); const ElemType radius = bound.Radius(); return math::RangeType(center - radius, center + radius); }; /** * Serialization. */ template void Serialize(Archive& ar, const unsigned int /* version */) { ar & data::CreateNVP(projVect, "projVect"); }; }; } // namespace tree } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/core/tree/space_split/space_split.hpp000066400000000000000000000044671315013601400242460ustar00rootroot00000000000000/** * @file space_split.hpp * @author Marcos Pividori * * Definition of SpaceSplit, implementing some methods to create a projection * vector based on a given set of points. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_TREE_SPILL_TREE_SPACE_SPLIT_HPP #define MLPACK_CORE_TREE_SPILL_TREE_SPACE_SPLIT_HPP #include #include "hyperplane.hpp" namespace mlpack { namespace tree { template class SpaceSplit { public: /** * Create a projection vector based on the given set of point. This special * case will create an axis-parallel projection vector in the dimension that * has the maximum width. * * @param bound The bound used for this node. * @param data The dataset used by the tree. * @param points Vector of indexes of points to be considered. * @param projVector Resulting axis-parallel projection vector. * @param midValue Mid value in the chosen projection. * @return Flag to determine if it is possible. */ static bool GetProjVector( const bound::HRectBound& bound, const MatType& data, const arma::Col& points, AxisParallelProjVector& projVector, double& midValue); /** * Create a projection vector based on the given set of point. We efficiently * estimate the farthest pair of points in the given set: p and q, and then * consider the projection vector (q - p). * * @param bound The bound used for this node. * @param data The dataset used by the tree. * @param points Vector of indexes of points to be considered. * @param projVector Resulting projection vector. * @param midValue Mid value in the chosen projection. * @return Flag to determine if it is possible. */ template static bool GetProjVector( const BoundType& bound, const MatType& data, const arma::Col& points, ProjVector& projVector, double& midValue); }; } // namespace tree } // namespace mlpack // Include implementation. #include "space_split_impl.hpp" #endif mlpack-2.2.5/src/mlpack/core/tree/space_split/space_split_impl.hpp000066400000000000000000000051711315013601400252600ustar00rootroot00000000000000/** * @file space_split_impl.hpp * @author Marcos Pividori * * Implementation of SpaceSplit, to create a projection vector based on a given * set of points. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_TREE_SPILL_TREE_SPACE_SPLIT_IMPL_HPP #define MLPACK_CORE_TREE_SPILL_TREE_SPACE_SPLIT_IMPL_HPP #include "space_split.hpp" namespace mlpack { namespace tree { template bool SpaceSplit::GetProjVector( const bound::HRectBound& bound, const MatType& data, const arma::Col& /* points */, AxisParallelProjVector& projVector, double& midValue) { // Get the dimension that has the maximum width. size_t splitDim = data.n_rows; // Indicate invalid. double maxWidth = -1; for (size_t d = 0; d < data.n_rows; d++) { const double width = bound[d].Width(); if (width > maxWidth) { maxWidth = width; splitDim = d; } } if (maxWidth <= 0) // All these points are the same. return false; projVector = AxisParallelProjVector(splitDim); midValue = bound[splitDim].Mid(); return true; } template template bool SpaceSplit::GetProjVector( const BoundType& /* bound */, const MatType& data, const arma::Col& points, ProjVector& projVector, double& midValue) { MetricType metric; // Efficiently estimate the farthest pair of points in the given set. size_t fst = points[rand() % points.n_elem]; size_t snd = points[0]; double max = metric.Evaluate(data.col(fst), data.col(snd)); for (size_t i = 1; i < points.n_elem; i++) { double dist = metric.Evaluate(data.col(fst), data.col(points[i])); if (dist > max) { max = dist; snd = points[i]; } } std::swap(fst, snd); for (size_t i = 0; i < points.n_elem; i++) { double dist = metric.Evaluate(data.col(fst), data.col(points[i])); if (dist > max) { max = dist; snd = points[i]; } } if (max == 0) // All these points are the same. return false; // Calculate the normalized projection vector. projVector = ProjVector(data.col(snd) - data.col(fst)); arma::vec midPoint = (data.col(snd) + data.col(fst)) / 2; midValue = projVector.Project(midPoint); return true; } } // namespace tree } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/core/tree/spill_tree.hpp000066400000000000000000000016311315013601400215620ustar00rootroot00000000000000/** * @file spill_tree.hpp * @author Marcos Pividori * * Include all the necessary files to use the SpillTree class. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_TREE_SPILL_TREE_HPP #define MLPACK_CORE_TREE_SPILL_TREE_HPP #include #include "bounds.hpp" #include "spill_tree/is_spill_tree.hpp" #include "spill_tree/spill_tree.hpp" #include "spill_tree/spill_single_tree_traverser.hpp" #include "spill_tree/spill_single_tree_traverser_impl.hpp" #include "spill_tree/spill_dual_tree_traverser.hpp" #include "spill_tree/spill_dual_tree_traverser_impl.hpp" #include "spill_tree/traits.hpp" #include "spill_tree/typedef.hpp" #endif mlpack-2.2.5/src/mlpack/core/tree/spill_tree/000077500000000000000000000000001315013601400210505ustar00rootroot00000000000000mlpack-2.2.5/src/mlpack/core/tree/spill_tree/is_spill_tree.hpp000066400000000000000000000023111315013601400244130ustar00rootroot00000000000000/** * @file is_spill_tree.hpp * * Definition of IsSpillTree. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_TREE_SPILL_TREE_IS_SPILL_TREE_HPP #define MLPACK_CORE_TREE_SPILL_TREE_IS_SPILL_TREE_HPP #include "spill_tree.hpp" namespace mlpack { namespace tree /** Trees and tree-building procedures. */ { // Useful struct when specific behaviour for SpillTrees is required. template struct IsSpillTree { static const bool value = false; }; // Specialization for SpillTree. template class HyperplaneType, template class SplitType> struct IsSpillTree> { static const bool value = true; }; } // namespace tree } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/core/tree/spill_tree/spill_dual_tree_traverser.hpp000066400000000000000000000065611315013601400270350ustar00rootroot00000000000000/** * @file spill_dual_tree_traverser.hpp * @author Ryan Curtin * @author Marcos Pividori * * Defines the SpillDualTreeTraverser for the SpillTree tree type. This is a * nested class of SpillTree which traverses two trees in a depth-first * manner with a given set of rules which indicate the branches which can be * pruned and the order in which to recurse. * The Defeatist template parameter determines if the traversers must do * defeatist search on overlapping nodes. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_TREE_SPILL_TREE_SPILL_DUAL_TREE_TRAVERSER_HPP #define MLPACK_CORE_TREE_SPILL_TREE_SPILL_DUAL_TREE_TRAVERSER_HPP #include #include "spill_tree.hpp" namespace mlpack { namespace tree { template class HyperplaneType, template class SplitType> template class SpillTree:: SpillDualTreeTraverser { public: /** * Instantiate the dual-tree traverser with the given rule set. */ SpillDualTreeTraverser(RuleType& rule); /** * Traverse the two trees. This does not reset the number of prunes. * * @param queryNode The query node to be traversed. * @param referenceNode The reference node to be traversed. * @param score The score of the current node combination. */ void Traverse(SpillTree& queryNode, SpillTree& referenceNode); //! Get the number of prunes. size_t NumPrunes() const { return numPrunes; } //! Modify the number of prunes. size_t& NumPrunes() { return numPrunes; } //! Get the number of visited combinations. size_t NumVisited() const { return numVisited; } //! Modify the number of visited combinations. size_t& NumVisited() { return numVisited; } //! Get the number of times a node combination was scored. size_t NumScores() const { return numScores; } //! Modify the number of times a node combination was scored. size_t& NumScores() { return numScores; } //! Get the number of times a base case was calculated. size_t NumBaseCases() const { return numBaseCases; } //! Modify the number of times a base case was calculated. size_t& NumBaseCases() { return numBaseCases; } private: //! Reference to the rules with which the trees will be traversed. RuleType& rule; //! The number of prunes. size_t numPrunes; //! The number of node combinations that have been visited during traversal. size_t numVisited; //! The number of times a node combination was scored. size_t numScores; //! The number of times a base case was calculated. size_t numBaseCases; //! Traversal information, held in the class so that it isn't continually //! being reallocated. typename RuleType::TraversalInfoType traversalInfo; }; } // namespace tree } // namespace mlpack // Include implementation. #include "spill_dual_tree_traverser_impl.hpp" #endif // MLPACK_CORE_TREE_SPILL_TREE_SPILL_DUAL_TREE_TRAVERSER_HPP mlpack-2.2.5/src/mlpack/core/tree/spill_tree/spill_dual_tree_traverser_impl.hpp000066400000000000000000000335451315013601400300600ustar00rootroot00000000000000/** * @file spill_dual_tree_traverser_impl.hpp * @author Ryan Curtin * @author Marcos Pividori * * Implementation of the SpillDualTreeTraverser for SpillTree. This is a way * to perform a dual-tree traversal of two trees. The trees must be the same * type. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_TREE_SPILL_TREE_SPILL_DUAL_TREE_TRAVERSER_IMPL_HPP #define MLPACK_CORE_TREE_SPILL_TREE_SPILL_DUAL_TREE_TRAVERSER_IMPL_HPP // In case it hasn't been included yet. #include "spill_dual_tree_traverser.hpp" namespace mlpack { namespace tree { template class HyperplaneType, template class SplitType> template SpillTree:: SpillDualTreeTraverser::SpillDualTreeTraverser( RuleType& rule) : rule(rule), numPrunes(0), numVisited(0), numScores(0), numBaseCases(0) { /* Nothing to do. */ } template class HyperplaneType, template class SplitType> template void SpillTree:: SpillDualTreeTraverser::Traverse( SpillTree& queryNode, SpillTree& referenceNode) { // Increment the visit counter. ++numVisited; // Store the current traversal info. traversalInfo = rule.TraversalInfo(); // If both are leaves, we must evaluate the base case. if (queryNode.IsLeaf() && referenceNode.IsLeaf()) { // Loop through each of the points in each node. const size_t queryEnd = queryNode.NumPoints(); const size_t refEnd = referenceNode.NumPoints(); for (size_t query = 0; query < queryEnd; ++query) { const size_t queryIndex = queryNode.Point(query); // See if we need to investigate this point. Restore the traversal // information first. rule.TraversalInfo() = traversalInfo; const double childScore = rule.Score(queryIndex, referenceNode); if (childScore == DBL_MAX) continue; // We can't improve this particular point. for (size_t ref = 0; ref < refEnd; ++ref) rule.BaseCase(queryIndex, referenceNode.Point(ref)); numBaseCases += refEnd; } } else if (((!queryNode.IsLeaf()) && referenceNode.IsLeaf()) || (queryNode.NumDescendants() > 3 * referenceNode.NumDescendants() && !queryNode.IsLeaf() && !referenceNode.IsLeaf())) { // We have to recurse down the query node. In this case the recursion order // does not matter. const double leftScore = rule.Score(*queryNode.Left(), referenceNode); ++numScores; if (leftScore != DBL_MAX) Traverse(*queryNode.Left(), referenceNode); else ++numPrunes; // Before recursing, we have to set the traversal information correctly. rule.TraversalInfo() = traversalInfo; const double rightScore = rule.Score(*queryNode.Right(), referenceNode); ++numScores; if (rightScore != DBL_MAX) Traverse(*queryNode.Right(), referenceNode); else ++numPrunes; } else if (queryNode.IsLeaf() && (!referenceNode.IsLeaf())) { if (Defeatist && referenceNode.Overlap()) { // If referenceNode is a overlapping node let's do defeatist search. size_t bestChild = rule.GetBestChild(queryNode, referenceNode); if (bestChild < referenceNode.NumChildren()) { Traverse(queryNode, referenceNode.Child(bestChild)); ++numPrunes; } else { // If we can't decide which child node to traverse, this means that // queryNode is at both sides of the splitting hyperplane. So, as // queryNode is a leafNode, all we can do is single tree search for each // point in the query node. const size_t queryEnd = queryNode.NumPoints(); DefeatistSingleTreeTraverser st(rule); // Loop through each of the points in query node. for (size_t query = 0; query < queryEnd; ++query) { const size_t queryIndex = queryNode.Point(query); // See if we need to investigate this point. const double childScore = rule.Score(queryIndex, referenceNode); if (childScore == DBL_MAX) continue; // We can't improve this particular point. st.Traverse(queryIndex, referenceNode); } } } else { // We have to recurse down the reference node. In this case the recursion // order does matter. Before recursing, though, we have to set the // traversal information correctly. double leftScore = rule.Score(queryNode, *referenceNode.Left()); typename RuleType::TraversalInfoType leftInfo = rule.TraversalInfo(); rule.TraversalInfo() = traversalInfo; double rightScore = rule.Score(queryNode, *referenceNode.Right()); numScores += 2; if (leftScore < rightScore) { // Recurse to the left. Restore the left traversal info. Store the // right traversal info. traversalInfo = rule.TraversalInfo(); rule.TraversalInfo() = leftInfo; Traverse(queryNode, *referenceNode.Left()); // Is it still valid to recurse to the right? rightScore = rule.Rescore(queryNode, *referenceNode.Right(), rightScore); if (rightScore != DBL_MAX) { // Restore the right traversal info. rule.TraversalInfo() = traversalInfo; Traverse(queryNode, *referenceNode.Right()); } else ++numPrunes; } else if (rightScore < leftScore) { // Recurse to the right. Traverse(queryNode, *referenceNode.Right()); // Is it still valid to recurse to the left? leftScore = rule.Rescore(queryNode, *referenceNode.Left(), leftScore); if (leftScore != DBL_MAX) { // Restore the left traversal info. rule.TraversalInfo() = leftInfo; Traverse(queryNode, *referenceNode.Left()); } else ++numPrunes; } else // leftScore is equal to rightScore. { if (leftScore == DBL_MAX) { numPrunes += 2; } else { // Choose the left first. Restore the left traversal info. Store the // right traversal info. traversalInfo = rule.TraversalInfo(); rule.TraversalInfo() = leftInfo; Traverse(queryNode, *referenceNode.Left()); rightScore = rule.Rescore(queryNode, *referenceNode.Right(), rightScore); if (rightScore != DBL_MAX) { // Restore the right traversal info. rule.TraversalInfo() = traversalInfo; Traverse(queryNode, *referenceNode.Right()); } else ++numPrunes; } } } } else { if (Defeatist && referenceNode.Overlap()) { // If referenceNode is a overlapping node let's do defeatist search. size_t bestChild = rule.GetBestChild(*queryNode.Left(), referenceNode); if (bestChild < referenceNode.NumChildren()) { Traverse(*queryNode.Left(), referenceNode.Child(bestChild)); ++numPrunes; } else { // If we can't decide which child node to traverse, this means that // queryNode.Left() is at both sides of the splitting hyperplane. So, // let's recurse down only the query node. Traverse(*queryNode.Left(), referenceNode); } bestChild = rule.GetBestChild(*queryNode.Right(), referenceNode); if (bestChild < referenceNode.NumChildren()) { Traverse(*queryNode.Right(), referenceNode.Child(bestChild)); ++numPrunes; } else { // If we can't decide which child node to traverse, this means that // queryNode.Right() is at both sides of the splitting hyperplane. So, // let's recurse down only the query node. Traverse(*queryNode.Right(), referenceNode); } } else { // We have to recurse down both query and reference nodes. Because the // query descent order does not matter, we will go to the left query child // first. Before recursing, we have to set the traversal information // correctly. double leftScore = rule.Score(*queryNode.Left(), *referenceNode.Left()); typename RuleType::TraversalInfoType leftInfo = rule.TraversalInfo(); rule.TraversalInfo() = traversalInfo; double rightScore = rule.Score(*queryNode.Left(), *referenceNode.Right()); typename RuleType::TraversalInfoType rightInfo; numScores += 2; if (leftScore < rightScore) { // Recurse to the left. Restore the left traversal info. Store the // right traversal info. rightInfo = rule.TraversalInfo(); rule.TraversalInfo() = leftInfo; Traverse(*queryNode.Left(), *referenceNode.Left()); // Is it still valid to recurse to the right? rightScore = rule.Rescore(*queryNode.Left(), *referenceNode.Right(), rightScore); if (rightScore != DBL_MAX) { // Restore the right traversal info. rule.TraversalInfo() = rightInfo; Traverse(*queryNode.Left(), *referenceNode.Right()); } else ++numPrunes; } else if (rightScore < leftScore) { // Recurse to the right. Traverse(*queryNode.Left(), *referenceNode.Right()); // Is it still valid to recurse to the left? leftScore = rule.Rescore(*queryNode.Left(), *referenceNode.Left(), leftScore); if (leftScore != DBL_MAX) { // Restore the left traversal info. rule.TraversalInfo() = leftInfo; Traverse(*queryNode.Left(), *referenceNode.Left()); } else ++numPrunes; } else { if (leftScore == DBL_MAX) { numPrunes += 2; } else { // Choose the left first. Restore the left traversal info and store // the right traversal info. rightInfo = rule.TraversalInfo(); rule.TraversalInfo() = leftInfo; Traverse(*queryNode.Left(), *referenceNode.Left()); // Is it still valid to recurse to the right? rightScore = rule.Rescore(*queryNode.Left(), *referenceNode.Right(), rightScore); if (rightScore != DBL_MAX) { // Restore the right traversal information. rule.TraversalInfo() = rightInfo; Traverse(*queryNode.Left(), *referenceNode.Right()); } else ++numPrunes; } } // Restore the main traversal information. rule.TraversalInfo() = traversalInfo; // Now recurse down the right query node. leftScore = rule.Score(*queryNode.Right(), *referenceNode.Left()); leftInfo = rule.TraversalInfo(); rule.TraversalInfo() = traversalInfo; rightScore = rule.Score(*queryNode.Right(), *referenceNode.Right()); numScores += 2; if (leftScore < rightScore) { // Recurse to the left. Restore the left traversal info. Store the // right traversal info. rightInfo = rule.TraversalInfo(); rule.TraversalInfo() = leftInfo; Traverse(*queryNode.Right(), *referenceNode.Left()); // Is it still valid to recurse to the right? rightScore = rule.Rescore(*queryNode.Right(), *referenceNode.Right(), rightScore); if (rightScore != DBL_MAX) { // Restore the right traversal info. rule.TraversalInfo() = rightInfo; Traverse(*queryNode.Right(), *referenceNode.Right()); } else ++numPrunes; } else if (rightScore < leftScore) { // Recurse to the right. Traverse(*queryNode.Right(), *referenceNode.Right()); // Is it still valid to recurse to the left? leftScore = rule.Rescore(*queryNode.Right(), *referenceNode.Left(), leftScore); if (leftScore != DBL_MAX) { // Restore the left traversal info. rule.TraversalInfo() = leftInfo; Traverse(*queryNode.Right(), *referenceNode.Left()); } else ++numPrunes; } else { if (leftScore == DBL_MAX) { numPrunes += 2; } else { // Choose the left first. Restore the left traversal info. Store the // right traversal info. rightInfo = rule.TraversalInfo(); rule.TraversalInfo() = leftInfo; Traverse(*queryNode.Right(), *referenceNode.Left()); // Is it still valid to recurse to the right? rightScore = rule.Rescore(*queryNode.Right(), *referenceNode.Right(), rightScore); if (rightScore != DBL_MAX) { // Restore the right traversal info. rule.TraversalInfo() = rightInfo; Traverse(*queryNode.Right(), *referenceNode.Right()); } else ++numPrunes; } } } } } } // namespace tree } // namespace mlpack #endif // MLPACK_CORE_TREE_SPILL_TREE_SPILL_DUAL_TREE_TRAVERSER_IMPL_HPP mlpack-2.2.5/src/mlpack/core/tree/spill_tree/spill_single_tree_traverser.hpp000066400000000000000000000043531315013601400273660ustar00rootroot00000000000000/** * @file spill_single_tree_traverser.hpp * @author Ryan Curtin * @author Marcos Pividori * * A nested class of SpillTree which traverses the entire tree with a * given set of rules which indicate the branches which can be pruned and the * order in which to recurse. This traverser is a depth-first traverser. * The Defeatist template parameter determines if the traversers must do * defeatist search on overlapping nodes. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_TREE_SPILL_TREE_SPILL_SINGLE_TREE_TRAVERSER_HPP #define MLPACK_CORE_TREE_SPILL_TREE_SPILL_SINGLE_TREE_TRAVERSER_HPP #include #include "spill_tree.hpp" namespace mlpack { namespace tree { template class HyperplaneType, template class SplitType> template class SpillTree:: SpillSingleTreeTraverser { public: /** * Instantiate the single tree traverser with the given rule set. */ SpillSingleTreeTraverser(RuleType& rule); /** * Traverse the tree with the given point. * * @param queryIndex The index of the point in the query set which is being * used as the query point. * @param referenceNode The tree node to be traversed. */ void Traverse(const size_t queryIndex, SpillTree& referenceNode); //! Get the number of prunes. size_t NumPrunes() const { return numPrunes; } //! Modify the number of prunes. size_t& NumPrunes() { return numPrunes; } private: //! Reference to the rules with which the tree will be traversed. RuleType& rule; //! The number of nodes which have been pruned during traversal. size_t numPrunes; }; } // namespace tree } // namespace mlpack // Include implementation. #include "spill_single_tree_traverser_impl.hpp" #endif mlpack-2.2.5/src/mlpack/core/tree/spill_tree/spill_single_tree_traverser_impl.hpp000066400000000000000000000102171315013601400304030ustar00rootroot00000000000000/** * @file spill_single_tree_traverser_impl.hpp * @author Ryan Curtin * @author Marcos Pividori * * A nested class of SpillTree which traverses the entire tree with a * given set of rules which indicate the branches which can be pruned and the * order in which to recurse. This traverser is a depth-first traverser. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_TREE_SPILL_TREE_SPILL_SINGLE_TREE_TRAVERSER_IMPL_HPP #define MLPACK_CORE_TREE_SPILL_TREE_SPILL_SINGLE_TREE_TRAVERSER_IMPL_HPP // In case it hasn't been included yet. #include "spill_single_tree_traverser.hpp" namespace mlpack { namespace tree { template class HyperplaneType, template class SplitType> template SpillTree:: SpillSingleTreeTraverser::SpillSingleTreeTraverser( RuleType& rule) : rule(rule), numPrunes(0) { /* Nothing to do. */ } template class HyperplaneType, template class SplitType> template void SpillTree:: SpillSingleTreeTraverser::Traverse( const size_t queryIndex, SpillTree& referenceNode) { // If we are a leaf, run the base case as necessary. if (referenceNode.IsLeaf()) { for (size_t i = 0; i < referenceNode.NumPoints(); ++i) rule.BaseCase(queryIndex, referenceNode.Point(i)); } else { if (Defeatist && referenceNode.Overlap()) { // If referenceNode is a overlapping node we do defeatist search. size_t bestChild = rule.GetBestChild(queryIndex, referenceNode); Traverse(queryIndex, referenceNode.Child(bestChild)); ++numPrunes; } else { // If either score is DBL_MAX, we do not recurse into that node. double leftScore = rule.Score(queryIndex, *referenceNode.Left()); double rightScore = rule.Score(queryIndex, *referenceNode.Right()); if (leftScore < rightScore) { // Recurse to the left. Traverse(queryIndex, *referenceNode.Left()); // Is it still valid to recurse to the right? rightScore = rule.Rescore(queryIndex, *referenceNode.Right(), rightScore); if (rightScore != DBL_MAX) Traverse(queryIndex, *referenceNode.Right()); // Recurse to the right. else ++numPrunes; } else if (rightScore < leftScore) { // Recurse to the right. Traverse(queryIndex, *referenceNode.Right()); // Is it still valid to recurse to the left? leftScore = rule.Rescore(queryIndex, *referenceNode.Left(), leftScore); if (leftScore != DBL_MAX) Traverse(queryIndex, *referenceNode.Left()); // Recurse to the left. else ++numPrunes; } else // leftScore is equal to rightScore. { if (leftScore == DBL_MAX) { numPrunes += 2; // Pruned both left and right. } else { // Choose the left first. Traverse(queryIndex, *referenceNode.Left()); // Is it still valid to recurse to the right? rightScore = rule.Rescore(queryIndex, *referenceNode.Right(), rightScore); if (rightScore != DBL_MAX) Traverse(queryIndex, *referenceNode.Right()); else ++numPrunes; } } } } } } // namespace tree } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/core/tree/spill_tree/spill_tree.hpp000066400000000000000000000420161315013601400237260ustar00rootroot00000000000000/** * @file spill_tree.hpp * * Definition of generalized hybrid spill tree (SpillTree). * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_TREE_SPILL_TREE_SPILL_TREE_HPP #define MLPACK_CORE_TREE_SPILL_TREE_SPILL_TREE_HPP #include #include "../space_split/midpoint_space_split.hpp" #include "../statistic.hpp" namespace mlpack { namespace tree /** Trees and tree-building procedures. */ { /** * A hybrid spill tree is a variant of binary space trees in which the children * of a node can "spill over" each other, and contain shared datapoints. * * Two new separating planes lplane and rplane are defined, both of which are * parallel to the original decision boundary and at a distance tau from it. * The region between lplane and rplane is called "overlapping buffer". * * For each node, we first split the points considering the overlapping buffer. * If either of its children contains more than rho fraction of the total points * we undo the overlapping splitting. Instead a conventional partition is used. * In this way, we can ensure that each split reduces the number of points of a * node by at least a constant factor. * * This particular tree does not allow growth, so you cannot add or delete nodes * from it. If you need to add or delete a node, the better procedure is to * rebuild the tree entirely. * * Three runtime parameters are required in the constructor: * - maxLeafSize: Max leaf size to be used. * - tau: Overlapping size. * - rho: Balance threshold. * * For more information on spill trees, see * * @code * @inproceedings{ * author = {Ting Liu, Andrew W. Moore, Alexander Gray and Ke Yang}, * title = {An Investigation of Practical Approximate Nearest Neighbor * Algorithms}, * booktitle = {Advances in Neural Information Processing Systems 17}, * year = {2005}, * pages = {825--832} * } * @endcode * * @tparam MetricType The metric used for tree-building. * @tparam StatisticType Extra data contained in the node. See statistic.hpp * for the necessary skeleton interface. * @tparam MatType The dataset class. * @tparam HyperplaneType The splitting hyperplane class. * @tparam SplitType The class that partitions the dataset/points at a * particular node into two parts. Its definition decides the way this split * is done. */ template class HyperplaneType = AxisOrthogonalHyperplane, template class SplitType = MidpointSpaceSplit> class SpillTree { public: //! So other classes can use TreeType::Mat. typedef MatType Mat; //! The type of element held in MatType. typedef typename MatType::elem_type ElemType; //! The bound type. typedef typename HyperplaneType::BoundType BoundType; private: //! The left child node. SpillTree* left; //! The right child node. SpillTree* right; //! The parent node (NULL if this is the root of the tree). SpillTree* parent; //! The number of points of the dataset contained in this node (and its //! children). size_t count; //! The list of indexes of points contained in this node (non-null for //! leaf nodes). arma::Col* pointsIndex; //! Flag to distinguish overlapping nodes from non-overlapping nodes. bool overlappingNode; //! Splitting hyperplane represented by this node. HyperplaneType hyperplane; //! The bound object for this node. BoundType bound; //! Any extra data contained in the node. StatisticType stat; //! The distance from the centroid of this node to the centroid of the parent. ElemType parentDistance; //! The worst possible distance to the furthest descendant, cached to speed //! things up. ElemType furthestDescendantDistance; //! The minimum distance from the center to any edge of the bound. ElemType minimumBoundDistance; //! The dataset. If we are the root of the tree, we own the dataset and must //! delete it. const MatType* dataset; //! If true, we own the dataset and need to destroy it in the destructor. bool localDataset; //! A generic single-tree traverser for hybrid spill trees; see //! spill_single_tree_traverser.hpp for implementation. The Defeatist //! template parameter determines if the traverser must do defeatist search on //! overlapping nodes. template class SpillSingleTreeTraverser; //! A generic dual-tree traverser for hybrid spill trees; see //! spill_dual_tree_traverser.hpp for implementation. The Defeatist //! template parameter determines if the traverser must do defeatist search on //! overlapping nodes. template class SpillDualTreeTraverser; public: //! A single-tree traverser for hybrid spill trees. template using SingleTreeTraverser = SpillSingleTreeTraverser; //! A defeatist single-tree traverser for hybrid spill trees. template using DefeatistSingleTreeTraverser = SpillSingleTreeTraverser; //! A dual-tree traverser for hybrid spill trees. template using DualTreeTraverser = SpillDualTreeTraverser; //! A defeatist dual-tree traverser for hybrid spill trees. template using DefeatistDualTreeTraverser = SpillDualTreeTraverser; /** * Construct this as the root node of a hybrid spill tree using the given * dataset. The dataset will not be modified during the building procedure * (unlike BinarySpaceTree). * * @param data Dataset to create tree from. * @param tau Overlapping size. * @param maxLeafSize Size of each leaf in the tree. * @param rho Balance threshold. */ SpillTree(const MatType& data, const double tau = 0, const size_t maxLeafSize = 20, const double rho = 0.7); /** * Construct this as the root node of a hybrid spill tree using the given * dataset. This will take ownership of the data matrix; if you don't want * this, consider using the constructor that takes a const reference to a * dataset. * * @param data Dataset to create tree from. * @param tau Overlapping size. * @param maxLeafSize Size of each leaf in the tree. * @param rho Balance threshold. */ SpillTree(MatType&& data, const double tau = 0, const size_t maxLeafSize = 20, const double rho = 0.7); /** * Construct this node as a child of the given parent, including the given * list of points. This is used for recursive tree-building by the other * constructors which don't specify point indices. * * @param parent Parent of this node. * @param points Vector of indexes of points to be included in this node. * @param tau Overlapping size. * @param maxLeafSize Size of each leaf in the tree. * @param rho Balance threshold. */ SpillTree(SpillTree* parent, arma::Col& points, const double tau = 0, const size_t maxLeafSize = 20, const double rho = 0.7); /** * Create a hybrid spill tree by copying the other tree. Be careful! This * can take a long time and use a lot of memory. * * @param other tree to be replicated. */ SpillTree(const SpillTree& other); /** * Move constructor for a SpillTree; possess all the members of the given * tree. * * @param other tree to be moved. */ SpillTree(SpillTree&& other); /** * Initialize the tree from a boost::serialization archive. * * @param ar Archive to load tree from. Must be an iarchive, not an oarchive. */ template SpillTree( Archive& ar, const typename boost::enable_if::type* = 0); /** * Deletes this node, deallocating the memory for the children and calling * their destructors in turn. This will invalidate any pointers or references * to any nodes which are children of this one. */ ~SpillTree(); //! Return the bound object for this node. const BoundType& Bound() const { return bound; } //! Return the bound object for this node. BoundType& Bound() { return bound; } //! Return the statistic object for this node. const StatisticType& Stat() const { return stat; } //! Return the statistic object for this node. StatisticType& Stat() { return stat; } //! Return whether or not this node is a leaf (true if it has no children). bool IsLeaf() const; //! Gets the left child of this node. SpillTree* Left() const { return left; } //! Modify the left child of this node. SpillTree*& Left() { return left; } //! Gets the right child of this node. SpillTree* Right() const { return right; } //! Modify the right child of this node. SpillTree*& Right() { return right; } //! Gets the parent of this node. SpillTree* Parent() const { return parent; } //! Modify the parent of this node. SpillTree*& Parent() { return parent; } //! Get the dataset which the tree is built on. const MatType& Dataset() const { return *dataset; } //! Distinguish overlapping nodes from non-overlapping nodes. bool Overlap() const { return overlappingNode; } //! Get the Hyperplane instance. const HyperplaneType& Hyperplane() const { return hyperplane; } //! Get the metric that the tree uses. MetricType Metric() const { return MetricType(); } //! Return the number of children in this node. size_t NumChildren() const; /** * Return the index of the nearest child node to the given query point (this * is an efficient estimation based on the splitting hyperplane, the node * returned is not necessarily the nearest). If this is a leaf node, it will * return NumChildren() (invalid index). */ template size_t GetNearestChild( const VecType& point, typename boost::enable_if >::type* = 0); /** * Return the index of the furthest child node to the given query point (this * is an efficient estimation based on the splitting hyperplane, the node * returned is not necessarily the furthest). If this is a leaf node, it will * return NumChildren() (invalid index). */ template size_t GetFurthestChild( const VecType& point, typename boost::enable_if >::type* = 0); /** * Return the index of the nearest child node to the given query node (this * is an efficient estimation based on the splitting hyperplane, the node * returned is not necessarily the nearest). If it can't decide it will * return NumChildren() (invalid index). */ size_t GetNearestChild(const SpillTree& queryNode); /** * Return the index of the furthest child node to the given query node (this * is an efficient estimation based on the splitting hyperplane, the node * returned is not necessarily the furthest). If it can't decide it will * return NumChildren() (invalid index). */ size_t GetFurthestChild(const SpillTree& queryNode); /** * Return the furthest distance to a point held in this node. If this is not * a leaf node, then the distance is 0 because the node holds no points. */ ElemType FurthestPointDistance() const; /** * Return the furthest possible descendant distance. This returns the maximum * distance from the centroid to the edge of the bound and not the empirical * quantity which is the actual furthest descendant distance. So the actual * furthest descendant distance may be less than what this method returns (but * it will never be greater than this). */ ElemType FurthestDescendantDistance() const; //! Return the minimum distance from the center of the node to any bound edge. ElemType MinimumBoundDistance() const; //! Return the distance from the center of this node to the center of the //! parent node. ElemType ParentDistance() const { return parentDistance; } //! Modify the distance from the center of this node to the center of the //! parent node. ElemType& ParentDistance() { return parentDistance; } /** * Return the specified child (0 will be left, 1 will be right). If the index * is greater than 1, this will return the right child. * * @param child Index of child to return. */ SpillTree& Child(const size_t child) const; SpillTree*& ChildPtr(const size_t child) { return (child == 0) ? left : right; } //! Return the number of points in this node (0 if not a leaf). size_t NumPoints() const; /** * Return the number of descendants of this node. For a non-leaf spill tree, * this is the number of points at the descendant leaves. For a leaf, this is * the number of points in the leaf. */ size_t NumDescendants() const; /** * Return the index (with reference to the dataset) of a particular descendant * of this node. The index should be greater than zero but less than the * number of descendants. * * @param index Index of the descendant. */ size_t Descendant(const size_t index) const; /** * Return the index (with reference to the dataset) of a particular point in * this node. This will happily return invalid indices if the given index is * greater than the number of points in this node (obtained with NumPoints()) * -- be careful. * * @param index Index of point for which a dataset index is wanted. */ size_t Point(const size_t index) const; //! Return the minimum distance to another node. ElemType MinDistance(const SpillTree& other) const { return bound.MinDistance(other.Bound()); } //! Return the maximum distance to another node. ElemType MaxDistance(const SpillTree& other) const { return bound.MaxDistance(other.Bound()); } //! Return the minimum and maximum distance to another node. math::RangeType RangeDistance(const SpillTree& other) const { return bound.RangeDistance(other.Bound()); } //! Return the minimum distance to another point. template ElemType MinDistance(const VecType& point, typename boost::enable_if >::type* = 0) const { return bound.MinDistance(point); } //! Return the maximum distance to another point. template ElemType MaxDistance(const VecType& point, typename boost::enable_if >::type* = 0) const { return bound.MaxDistance(point); } //! Return the minimum and maximum distance to another point. template math::RangeType RangeDistance(const VecType& point, typename boost::enable_if >::type* = 0) const { return bound.RangeDistance(point); } //! Returns false: this tree type does not have self children. static bool HasSelfChildren() { return false; } //! Store the center of the bounding region in the given vector. void Center(arma::vec& center) { bound.Center(center); } private: /** * Splits the current node, assigning its left and right children recursively. * * @param points Vector of indexes of points to be included in this node. * @param maxLeafSize Maximum number of points held in a leaf. * @param tau Overlapping size. * @param rho Balance threshold. */ void SplitNode(arma::Col& points, const size_t maxLeafSize, const double tau, const double rho); /** * Split the list of points. * * @param tau Overlapping size. * @param rho Balance threshold. * @param points Vector of indexes of points to be included. * @param leftPoints Indexes of points to be included in left child. * @param rightPoints Indexes of points to be included in right child. * @return Flag to know if the overlapping buffer was included. */ bool SplitPoints(const double tau, const double rho, const arma::Col& points, arma::Col& leftPoints, arma::Col& rightPoints); protected: /** * A default constructor. This is meant to only be used with * boost::serialization, which is allowed with the friend declaration below. * This does not return a valid tree! The method must be protected, so that * the serialization shim can work with the default constructor. */ SpillTree(); //! Friend access is given for the default constructor. friend class boost::serialization::access; public: /** * Serialize the tree. */ template void Serialize(Archive& ar, const unsigned int version); }; } // namespace tree } // namespace mlpack // Include implementation. #include "spill_tree_impl.hpp" // Include everything else, if necessary. #include "../spill_tree.hpp" #endif mlpack-2.2.5/src/mlpack/core/tree/spill_tree/spill_tree_impl.hpp000066400000000000000000000637461315013601400247640ustar00rootroot00000000000000/** * @file spill_tree_impl.hpp * * Implementation of generalized hybrid spill tree (SpillTree). * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_TREE_SPILL_TREE_SPILL_TREE_IMPL_HPP #define MLPACK_CORE_TREE_SPILL_TREE_SPILL_TREE_IMPL_HPP // In case it wasn't included already for some reason. #include "spill_tree.hpp" namespace mlpack { namespace tree { template class HyperplaneType, template class SplitType> SpillTree:: SpillTree( const MatType& data, const double tau, const size_t maxLeafSize, const double rho) : left(NULL), right(NULL), parent(NULL), count(0), pointsIndex(NULL), overlappingNode(false), hyperplane(), bound(data.n_rows), parentDistance(0), // Parent distance for the root is 0: it has no parent. dataset(&data), localDataset(false) { arma::Col points; if (dataset->n_cols > 0) // Fill points with all possible indexes: 0 .. (dataset->n_cols - 1). points = arma::linspace>(0, dataset->n_cols - 1, dataset->n_cols); // Do the actual splitting of this node. SplitNode(points, maxLeafSize, tau, rho); // Create the statistic depending on if we are a leaf or not. stat = StatisticType(*this); } template class HyperplaneType, template class SplitType> SpillTree:: SpillTree( MatType&& data, const double tau, const size_t maxLeafSize, const double rho) : left(NULL), right(NULL), parent(NULL), count(0), pointsIndex(NULL), overlappingNode(false), hyperplane(), bound(data.n_rows), parentDistance(0), // Parent distance for the root is 0: it has no parent. dataset(new MatType(std::move(data))), localDataset(true) { arma::Col points; if (dataset->n_cols > 0) // Fill points with all possible indexes: 0 .. (dataset->n_cols - 1). points = arma::linspace>(0, dataset->n_cols - 1, dataset->n_cols); // Do the actual splitting of this node. SplitNode(points, maxLeafSize, tau, rho); // Create the statistic depending on if we are a leaf or not. stat = StatisticType(*this); } template class HyperplaneType, template class SplitType> SpillTree:: SpillTree( SpillTree* parent, arma::Col& points, const double tau, const size_t maxLeafSize, const double rho) : left(NULL), right(NULL), parent(parent), count(0), pointsIndex(NULL), overlappingNode(false), hyperplane(), bound(parent->Dataset().n_rows), dataset(&parent->Dataset()), // Point to the parent's dataset. localDataset(false) { // Perform the actual splitting. SplitNode(points, maxLeafSize, tau, rho); // Create the statistic depending on if we are a leaf or not. stat = StatisticType(*this); } /** * Create a hybrid spill tree by copying the other tree. Be careful! This can * take a long time and use a lot of memory. */ template class HyperplaneType, template class SplitType> SpillTree:: SpillTree(const SpillTree& other) : left(NULL), right(NULL), parent(other.parent), count(other.count), pointsIndex(NULL), overlappingNode(other.overlappingNode), hyperplane(other.hyperplane), bound(other.bound), stat(other.stat), parentDistance(other.parentDistance), furthestDescendantDistance(other.furthestDescendantDistance), // Copy matrix, but only if we are the root and the other tree has its own // copy of the dataset. dataset((other.parent == NULL && other.localDataset) ? new MatType(*other.dataset) : other.dataset), localDataset(other.parent == NULL && other.localDataset) { // Create left and right children (if any). if (other.Left()) { left = new SpillTree(*other.Left()); left->Parent() = this; // Set parent to this, not other tree. } if (other.Right()) { right = new SpillTree(*other.Right()); right->Parent() = this; // Set parent to this, not other tree. } // If vector of indexes, copy it. if (other.pointsIndex) pointsIndex = new arma::Col(*other.pointsIndex); // Propagate matrix, but only if we are the root. if (parent == NULL && localDataset) { std::queue queue; if (left) queue.push(left); if (right) queue.push(right); while (!queue.empty()) { SpillTree* node = queue.front(); queue.pop(); node->dataset = dataset; if (node->left) queue.push(node->left); if (node->right) queue.push(node->right); } } } /** * Move constructor. */ template class HyperplaneType, template class SplitType> SpillTree:: SpillTree(SpillTree&& other) : left(other.left), right(other.right), parent(other.parent), count(other.count), pointsIndex(other.pointsIndex), overlappingNode(other.overlappingNode), hyperplane(other.hyperplane), bound(std::move(other.bound)), stat(std::move(other.stat)), parentDistance(other.parentDistance), furthestDescendantDistance(other.furthestDescendantDistance), minimumBoundDistance(other.minimumBoundDistance), dataset(other.dataset), localDataset(other.localDataset) { // Now we are a clone of the other tree. But we must also clear the other // tree's contents, so it doesn't delete anything when it is destructed. other.left = NULL; other.right = NULL; other.count = 0; other.pointsIndex = NULL; other.parentDistance = 0.0; other.furthestDescendantDistance = 0.0; other.minimumBoundDistance = 0.0; other.dataset = NULL; other.localDataset = false; //Set new parent. if (left) left->parent = this; if (right) right->parent = this; } /** * Initialize the tree from an archive. */ template class HyperplaneType, template class SplitType> template SpillTree:: SpillTree( Archive& ar, const typename boost::enable_if::type*) : SpillTree() // Create an empty SpillTree. { // We've delegated to the constructor which gives us an empty tree, and now we // can serialize from it. ar >> data::CreateNVP(*this, "tree"); } /** * Deletes this node, deallocating the memory for the children and calling their * destructors in turn. This will invalidate any pointers or references to any * nodes which are children of this one. */ template class HyperplaneType, template class SplitType> SpillTree:: ~SpillTree() { delete left; delete right; delete pointsIndex; // If we're the root and we own the dataset, delete it. if (!parent && localDataset) delete dataset; } template class HyperplaneType, template class SplitType> inline bool SpillTree::IsLeaf() const { return !left; } /** * Returns the number of children in this node. */ template class HyperplaneType, template class SplitType> inline size_t SpillTree::NumChildren() const { if (left && right) return 2; if (left) return 1; return 0; } /** * Return the index of the nearest child node to the given query point (this * is an efficient estimation based on the splitting hyperplane, the node * returned is not necessarily the nearest). If this is a leaf node, it will * return NumChildren() (invalid index). */ template class HyperplaneType, template class SplitType> template size_t SpillTree::GetNearestChild( const VecType& point, typename boost::enable_if >::type*) { if (IsLeaf() || !left || !right) return 0; if (hyperplane.Left(point)) return 0; return 1; } /** * Return the index of the furthest child node to the given query point (this * is an efficient estimation based on the splitting hyperplane, the node * returned is not necessarily the furthest). If this is a leaf node, it will * return NumChildren() (invalid index). */ template class HyperplaneType, template class SplitType> template size_t SpillTree::GetFurthestChild( const VecType& point, typename boost::enable_if >::type*) { if (IsLeaf() || !left || !right) return 0; if (hyperplane.Left(point)) return 1; return 0; } /** * Return the index of the nearest child node to the given query node (this * is an efficient estimation based on the splitting hyperplane, the node * returned is not necessarily the nearest). If it can't decide it will * return NumChildren() (invalid index). */ template class HyperplaneType, template class SplitType> size_t SpillTree::GetNearestChild(const SpillTree& queryNode) { if (IsLeaf() || !left || !right) return 0; if (hyperplane.Left(queryNode.Bound())) return 0; if (hyperplane.Right(queryNode.Bound())) return 1; // Can't decide. return 2; } /** * Return the index of the furthest child node to the given query point (this * is an efficient estimation based on the splitting hyperplane, the node * returned is not necessarily the furthest). If this is a leaf node, it will * return NumChildren() (invalid index). */ template class HyperplaneType, template class SplitType> size_t SpillTree::GetFurthestChild(const SpillTree& queryNode) { if (IsLeaf() || !left || !right) return 0; if (hyperplane.Left(queryNode.Bound())) return 1; if (hyperplane.Right(queryNode.Bound())) return 0; // Can't decide. return 2; } /** * Return a bound on the furthest point in the node from the center. This * returns 0 unless the node is a leaf. */ template class HyperplaneType, template class SplitType> inline typename SpillTree::ElemType SpillTree:: FurthestPointDistance() const { if (!IsLeaf()) return 0.0; // Otherwise return the distance from the center to a corner of the bound. return 0.5 * bound.Diameter(); } /** * Return the furthest possible descendant distance. This returns the maximum * distance from the center to the edge of the bound and not the empirical * quantity which is the actual furthest descendant distance. So the actual * furthest descendant distance may be less than what this method returns (but * it will never be greater than this). */ template class HyperplaneType, template class SplitType> inline typename SpillTree::ElemType SpillTree:: FurthestDescendantDistance() const { return furthestDescendantDistance; } //! Return the minimum distance from the center to any bound edge. template class HyperplaneType, template class SplitType> inline typename SpillTree::ElemType SpillTree:: MinimumBoundDistance() const { return bound.MinWidth() / 2.0; } /** * Return the specified child. */ template class HyperplaneType, template class SplitType> inline SpillTree& SpillTree:: Child(const size_t child) const { if (child == 0) return *left; else return *right; } /** * Return the number of points contained in this node. */ template class HyperplaneType, template class SplitType> inline size_t SpillTree::NumPoints() const { if (IsLeaf()) return count; return 0; } /** * Return the number of descendants contained in the node. */ template class HyperplaneType, template class SplitType> inline size_t SpillTree::NumDescendants() const { return count; } /** * Return the index of a particular descendant contained in this node. */ template class HyperplaneType, template class SplitType> inline size_t SpillTree::Descendant(const size_t index) const { if (IsLeaf()) return (*pointsIndex)[index]; size_t num = left->NumDescendants(); if (index < num) return left->Descendant(index); if (right) return right->Descendant(index - num); // This should never happen. return (size_t() - 1); } /** * Return the index of a particular point contained in this node. */ template class HyperplaneType, template class SplitType> inline size_t SpillTree::Point(const size_t index) const { if (IsLeaf()) return (*pointsIndex)[index]; // This should never happen. return (size_t() - 1); } template class HyperplaneType, template class SplitType> void SpillTree:: SplitNode(arma::Col& points, const size_t maxLeafSize, const double tau, const double rho) { // We need to expand the bounds of this node properly. for (size_t i = 0; i < points.n_elem; i++) bound |= dataset->col(points[i]); // Calculate the furthest descendant distance. furthestDescendantDistance = 0.5 * bound.Diameter(); // Now, check if we need to split at all. if (points.n_elem <= maxLeafSize) { pointsIndex = new arma::Col(); pointsIndex->swap(points); count = pointsIndex->n_elem; return; // We can't split this. } const bool split = SplitType::SplitSpace(bound, *dataset, points, hyperplane); // The node may not be always split. For instance, if all the points are the // same, we can't split them. if (!split) { pointsIndex = new arma::Col(); pointsIndex->swap(points); count = pointsIndex->n_elem; return; // We can't split this. } arma::Col leftPoints, rightPoints; // Split the node. overlappingNode = SplitPoints(tau, rho, points, leftPoints, rightPoints); // We don't need the information in points, so lets clean it. arma::Col().swap(points); // Now we will recursively split the children by calling their constructors // (which perform this splitting process). left = new SpillTree(this, leftPoints, tau, maxLeafSize, rho); right = new SpillTree(this, rightPoints, tau, maxLeafSize, rho); // Update count number, to represent the number of descendant points. count = left->NumDescendants() + right->NumDescendants(); // Calculate parent distances for those two nodes. arma::vec center, leftCenter, rightCenter; Center(center); left->Center(leftCenter); right->Center(rightCenter); const ElemType leftParentDistance = MetricType::Evaluate(center, leftCenter); const ElemType rightParentDistance = MetricType::Evaluate(center, rightCenter); left->ParentDistance() = leftParentDistance; right->ParentDistance() = rightParentDistance; } template class HyperplaneType, template class SplitType> bool SpillTree:: SplitPoints(const double tau, const double rho, const arma::Col& points, arma::Col& leftPoints, arma::Col& rightPoints) { arma::vec projections(points.n_elem); size_t left = 0, right = 0, leftFrontier = 0, rightFrontier = 0; // Count the number of points to the left/right of the splitting hyperplane. for (size_t i = 0; i < points.n_elem; i++) { // Store projection value for future use. projections[i] = hyperplane.Project(dataset->col(points[i])); if (projections[i] <= 0) { left++; if (projections[i] > -tau) leftFrontier++; } else { right++; if (projections[i] < tau) rightFrontier++; } } const double p1 = double (left + rightFrontier) / points.n_elem; const double p2 = double (right + leftFrontier) / points.n_elem; if ((p1 <= rho || rightFrontier == 0) && (p2 <= rho || leftFrontier == 0)) { // Perform the actual splitting considering the overlapping buffer. Points // with projection value in the range (-tau, tau) are included in both, // leftPoints and rightPoints. leftPoints.resize(left + rightFrontier); rightPoints.resize(right + leftFrontier); for (size_t i = 0, rc = 0, lc = 0; i < points.n_elem; i++) { if (projections[i] < tau || projections[i] <= 0) leftPoints[lc++] = points[i]; if (projections[i] > -tau) rightPoints[rc++] = points[i]; } // Return true, because it is a overlapping node. return true; } // Perform the actual splitting ignoring the overlapping buffer. Points // with projection value less than or equal to zero are included in leftPoints // and points with projection value greater than zero are included in // rightPoints. leftPoints.resize(left); rightPoints.resize(right); for (size_t i = 0, rc = 0, lc = 0; i < points.n_elem; i++) { if (projections[i] <= 0) leftPoints[lc++] = points[i]; else rightPoints[rc++] = points[i]; } // Return false, because it isn't a overlapping node. return false; } // Default constructor (private), for boost::serialization. template class HyperplaneType, template class SplitType> SpillTree:: SpillTree() : left(NULL), right(NULL), parent(NULL), count(0), pointsIndex(NULL), overlappingNode(false), stat(*this), parentDistance(0), furthestDescendantDistance(0), dataset(NULL), localDataset(false) { // Nothing to do. } /** * Serialize the tree. */ template class HyperplaneType, template class SplitType> template void SpillTree:: Serialize(Archive& ar, const unsigned int /* version */) { using data::CreateNVP; // If we're loading, and we have children, they need to be deleted. if (Archive::is_loading::value) { if (left) delete left; if (right) delete right; if (!parent && localDataset) delete dataset; } ar & CreateNVP(parent, "parent"); ar & CreateNVP(count, "count"); ar & CreateNVP(pointsIndex, "pointsIndex"); ar & CreateNVP(overlappingNode, "overlappingNode"); ar & CreateNVP(hyperplane, "hyperplane"); ar & CreateNVP(bound, "bound"); ar & CreateNVP(stat, "statistic"); ar & CreateNVP(parentDistance, "parentDistance"); ar & CreateNVP(furthestDescendantDistance, "furthestDescendantDistance"); ar & CreateNVP(dataset, "dataset"); if (Archive::is_loading::value && parent == NULL) localDataset = true; // Save children last; otherwise boost::serialization gets confused. ar & CreateNVP(left, "left"); ar & CreateNVP(right, "right"); // Due to quirks of boost::serialization, if a tree is saved as an object and // not a pointer, the first level of the tree will be duplicated on load. // Therefore, if we are the root of the tree, then we need to make sure our // children's parent links are correct, and delete the duplicated node if // necessary. if (Archive::is_loading::value) { // Get parents of left and right children, or, NULL, if they don't exist. SpillTree* leftParent = left ? left->Parent() : NULL; SpillTree* rightParent = right ? right->Parent() : NULL; // Reassign parent links if necessary. if (left && left->Parent() != this) left->Parent() = this; if (right && right->Parent() != this) right->Parent() = this; // Do we need to delete the left parent? if (leftParent != NULL && leftParent != this) { // Sever the duplicate parent's children. Ensure we don't delete the // dataset, by faking the duplicated parent's parent (that is, we need to // set the parent to something non-NULL; 'this' works). leftParent->Parent() = this; leftParent->Left() = NULL; leftParent->Right() = NULL; delete leftParent; } // Do we need to delete the right parent? if (rightParent != NULL && rightParent != this && rightParent != leftParent) { // Sever the duplicate parent's children, in the same way as above. rightParent->Parent() = this; rightParent->Left() = NULL; rightParent->Right() = NULL; delete rightParent; } } } } // namespace tree } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/core/tree/spill_tree/traits.hpp000066400000000000000000000041301315013601400230650ustar00rootroot00000000000000/** * @file traits.hpp * @author Ryan Curtin * @author Marcos Pividori * * Specialization of the TreeTraits class for the SpillTree type of tree. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_TREE_SPILL_TREE_TRAITS_HPP #define MLPACK_CORE_TREE_SPILL_TREE_TRAITS_HPP #include namespace mlpack { namespace tree { /** * This is a specialization of the TreeType class to the SpillTree tree type. * It defines characteristics of the spill tree, and is used to help write * tree-independent (but still optimized) tree-based algorithms. See * mlpack/core/tree/tree_traits.hpp for more information. */ template class HyperplaneType, template class SplitType> class TreeTraits> { public: /** * Each spill tree node has two children which can share points. * Therefore, children can be overlapping. */ static const bool HasOverlappingChildren = true; /** * There is no guarantee that the first point in a node is its centroid. */ static const bool FirstPointIsCentroid = false; /** * Points are not contained at multiple levels of the spill tree. */ static const bool HasSelfChildren = false; /** * Points are not rearranged during building of the tree. */ static const bool RearrangesDataset = false; /** * This is always a binary tree. */ static const bool BinaryTree = true; /** * Spill trees have duplicated points, so NumDescendants() could count a given * point twice. */ static const bool UniqueNumDescendants = false; }; } // namespace tree } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/core/tree/spill_tree/typedef.hpp000066400000000000000000000115211315013601400232210ustar00rootroot00000000000000/** * @file typedef.hpp * @author Ryan Curtin * @author Marcos Pividori * * Template typedefs for the SpillTree class that satisfy the requirements * of the TreeType policy class. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_TREE_SPILL_TREE_TYPEDEF_HPP #define MLPACK_CORE_TREE_SPILL_TREE_TYPEDEF_HPP #include "../space_split/mean_space_split.hpp" #include "../space_split/midpoint_space_split.hpp" namespace mlpack { namespace tree { /** * The hybrid spill tree. It is a variant of metric-trees in which the children * of a node can "spill over" onto each other, and contain shared datapoints. * * When recursively splitting nodes, the SPTree class select the dimension with * maximum width to split on, and picks the midpoint of the range in that * dimension as the value on which to split nodes. * * In each case a "overlapping buffer" is defined, included points at a distance * less than tau from the decision boundary defined by the midpoint. * * For each node, we first split the points considering the overlapping buffer. * If either of its children contains more than rho fraction of the total points * we undo the overlapping splitting. Instead a conventional partition is used. * In this way, we can ensure that each split reduces the number of points of a * node by at least a constant factor. * * For more information, see the following paper. * * @code * @inproceedings{ * author = {Ting Liu, Andrew W. Moore, Alexander Gray and Ke Yang}, * title = {An Investigation of Practical Approximate Nearest Neighbor * Algorithms}, * booktitle = {Advances in Neural Information Processing Systems 17}, * year = {2005}, * pages = {825--832} * } * @endcode * * This template typedef satisfies the TreeType policy API. * * @see @ref trees, SpillTree, MeanSPTree */ template using SPTree = SpillTree; /** * A mean-split hybrid spill tree. This is the same as the SPTree, but this * particular implementation will use the mean of the data in the split * dimension as the value on which to split, instead of the midpoint. * This can sometimes give better performance, but it is not always clear which * type of tree is best. * * This template typedef satisfies the TreeType policy API. * * @see @ref trees, SpillTree, SPTree */ template using MeanSPTree = SpillTree; /** * A hybrid spill tree considering general splitting hyperplanes (not * necessarily axis-orthogonal). This particular implementation will consider * the midpoint of the projection of the data in the vector determined by the * farthest pair of points. This can sometimes give better performance, but * generally it doesn't because it takes O(d) to calculate the projection of the * query point when deciding which node to traverse, while when using a * axis-orthogonal hyperplane, as SPTree does, we can do it in O(1). * * This template typedef satisfies the TreeType policy API. * * @see @ref trees, SpillTree, SPTree */ template using NonOrtSPTree = SpillTree; /** * A mean-split hybrid spill tree considering general splitting hyperplanes (not * necessarily axis-orthogonal). This is the same as the NonOrtSPTree, but this * particular implementation will use the mean of the data in the split * projection as the value on which to split, instead of the midpoint. * This can sometimes give better performance, but it is not always clear which * type of tree is best. * * This template typedef satisfies the TreeType policy API. * * @see @ref trees, SpillTree, MeanSPTree, NonOrtSPTree */ template using NonOrtMeanSPTree = SpillTree; } // namespace tree } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/core/tree/statistic.hpp000066400000000000000000000026451315013601400214350ustar00rootroot00000000000000/** * @file statistic.hpp * * Definition of the policy type for the statistic class. * * You should define your own statistic that looks like EmptyStatistic. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_TREE_STATISTIC_HPP #define MLPACK_CORE_TREE_STATISTIC_HPP namespace mlpack { namespace tree { /** * Empty statistic if you are not interested in storing statistics in your * tree. Use this as a template for your own. */ class EmptyStatistic { public: EmptyStatistic() { } ~EmptyStatistic() { } /** * This constructor is called when a node is finished being created. The * node is finished, and its children are finished, but it is not * necessarily true that the statistics of other nodes are initialized yet. * * @param node Node which this corresponds to. */ template EmptyStatistic(TreeType& /* node */) { } /** * Serialize the statistic (there's nothing to be saved). */ template void Serialize(Archive& /* ar */, const unsigned int /* version */) { } }; } // namespace tree } // namespace mlpack #endif // MLPACK_CORE_TREE_STATISTIC_HPP mlpack-2.2.5/src/mlpack/core/tree/traversal_info.hpp000066400000000000000000000071221315013601400224370ustar00rootroot00000000000000/** * @file traversal_info.hpp * @author Ryan Curtin * * This class will hold the traversal information for dual-tree traversals. A * dual-tree traversal should be updating the members of this class before * Score() is called. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_TREE_TRAVERSAL_INFO_HPP #define MLPACK_CORE_TREE_TRAVERSAL_INFO_HPP namespace mlpack { namespace tree { /** * The TraversalInfo class holds traversal information which is used in * dual-tree (and single-tree) traversals. A traversal should be updating the * members of this class before Score() is called. This class should be held as * a member of the RuleType class and the interface to it should be through a * TraversalInfo() method. * * The information held by this class is the last node combination visited * before the current node combination was recursed into, and the score * resulting from when Score() was called on that combination. However, this * information is identical for a query node and a reference node in a * particular node combination, so traversals only need to update the * TraversalInfo object in a query node (and the algorithms should only use the * TraversalInfo object from a query node). * * In general, this auxiliary traversal information is used to try and make a * prune without needing to call BaseCase() or calculate the distance between * nodes. Using this information you can place bounds on the distance between * the two nodes quickly. * * If the traversal is not updating the members of this class correctly, a * likely result is a null pointer dereference. Dual-tree algorithms should * assume that the members are set properly and should not need to check for * null pointers. * * There is one exception, which is the root node combination; the score can be * set to 0 and the query and reference nodes can just be set to the root nodes; * no algorithm should be able to prune the root combination anyway. */ template class TraversalInfo { public: /** * Create the TraversalInfo object and initialize the pointers to NULL. */ TraversalInfo() : lastQueryNode(NULL), lastReferenceNode(NULL), lastScore(0.0), lastBaseCase(0.0) { /* Nothing to do. */ } //! Get the last query node. TreeType* LastQueryNode() const { return lastQueryNode; } //! Modify the last query node. TreeType*& LastQueryNode() { return lastQueryNode; } //! Get the last reference node. TreeType* LastReferenceNode() const { return lastReferenceNode; } //! Modify the last reference node. TreeType*& LastReferenceNode() { return lastReferenceNode; } //! Get the score associated with the last query and reference nodes. double LastScore() const { return lastScore; } //! Modify the score associated with the last query and reference nodes. double& LastScore() { return lastScore; } //! Get the base case associated with the last node combination. double LastBaseCase() const { return lastBaseCase; } //! Modify the base case associated with the last node combination. double& LastBaseCase() { return lastBaseCase; } private: //! The last query node. TreeType* lastQueryNode; //! The last reference node. TreeType* lastReferenceNode; //! The last score. double lastScore; //! The last base case. double lastBaseCase; }; } // namespace tree } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/core/tree/tree_traits.hpp000066400000000000000000000074371315013601400217570ustar00rootroot00000000000000/** * @file tree_traits.hpp * @author Ryan Curtin * * This file implements the basic, unspecialized TreeTraits class, which * provides information about tree types. If you create a tree class, you * should specialize this class with the characteristics of your tree. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_TREE_TREE_TRAITS_HPP #define MLPACK_CORE_TREE_TREE_TRAITS_HPP namespace mlpack { namespace tree { /** * The TreeTraits class provides compile-time information on the characteristics * of a given tree type. These include traits such as whether or not a node * knows the distance to its parent node, or whether or not the subspaces * represented by children can overlap. * * These traits can be used for static compile-time optimization: * * @code * // This if statement will be optimized out at compile time! * if (TreeTraits::HasOverlappingChildren == false) * { * // Do a simpler computation because no children overlap. * } * else * { * // Do the full, complex calculation. * } * @endcode * * The traits can also be used in conjunction with SFINAE to write specialized * versions of functions: * * @code * template * void Compute(TreeType& node, * boost::enable_if< * TreeTraits::RearrangesDataset>::type*) * { * // Computation where special dataset-rearranging tree constructor is * // called. * } * * template * void Compute(TreeType& node, * boost::enable_if< * !TreeTraits::RearrangesDataset>::type*) * { * // Computation where normal tree constructor is called. * } * @endcode * * In those two examples, the boost::enable_if<> class takes a boolean template * parameter which allows that function to be called when the boolean is true. * * Each trait must be a static const value and not a function; only const values * can be used as template parameters (or constexprs can be used too). By * default (the unspecialized implementation of TreeTraits), each parameter is * set to make as few assumptions about the tree as possible; so, even if * TreeTraits is not specialized for a particular tree type, tree-based * algorithms should still work. * * When you write your own tree, you must specialize the TreeTraits class to * your tree type and set the corresponding values appropriately. See * mlpack/core/tree/binary_space_tree/traits.hpp for an example. */ template class TreeTraits { public: /** * This is true if the subspaces represented by the children of a node can * overlap. */ static const bool HasOverlappingChildren = true; /** * This is true if a point can be included in more than one node. */ static const bool HasDuplicatedPoints = false; /** * This is true if the first point of each node is the centroid of its bound. */ static const bool FirstPointIsCentroid = false; /** * This is true if the points contained in the first child of a node * (Child(0)) are also contained in that node. */ static const bool HasSelfChildren = false; /** * This is true if the tree rearranges points in the dataset when it is built. */ static const bool RearrangesDataset = false; /** * This is true if the tree always has only two children. */ static const bool BinaryTree = false; /** * This is true if the NumDescendants() method doesn't include duplicated * points. */ static const bool UniqueNumDescendants = true; }; } // namespace tree } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/core/util/000077500000000000000000000000001315013601400167245ustar00rootroot00000000000000mlpack-2.2.5/src/mlpack/core/util/CMakeLists.txt000066400000000000000000000015241315013601400214660ustar00rootroot00000000000000# Define the files we need to compile. # Anything not in this list will not be compiled into mlpack. set(SOURCES arma_config.hpp arma_config_check.hpp backtrace.hpp backtrace.cpp cli.hpp cli.cpp cli_deleter.hpp cli_deleter.cpp cli_impl.hpp deprecated.hpp log.hpp log.cpp nulloutstream.hpp option.hpp option.cpp option_impl.hpp prefixedoutstream.hpp prefixedoutstream.cpp prefixedoutstream_impl.hpp sfinae_utility.hpp singletons.hpp singletons.cpp timers.hpp timers.cpp version.hpp version.cpp ) # add directory name to sources set(DIR_SRCS) foreach(file ${SOURCES}) set(DIR_SRCS ${DIR_SRCS} ${CMAKE_CURRENT_SOURCE_DIR}/${file}) endforeach() # Append sources (with directory name) to list of all mlpack sources (used at # the parent scope). set(MLPACK_SRCS ${MLPACK_SRCS} ${DIR_SRCS} PARENT_SCOPE) mlpack-2.2.5/src/mlpack/core/util/arma_config_check.hpp000066400000000000000000000031061315013601400230370ustar00rootroot00000000000000/** * @file arma_config_check.hpp * @author Ryan Curtin * * Using the contents of arma_config.hpp, try to catch the condition where the * user has included mlpack with ARMA_64BIT_WORD enabled but mlpack was compiled * without ARMA_64BIT_WORD enabled. This should help prevent a long, drawn-out * debugging process where nobody can figure out why the stack is getting * mangled. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_UTIL_ARMA_CONFIG_CHECK_HPP #define MLPACK_CORE_UTIL_ARMA_CONFIG_CHECK_HPP #include "arma_config.hpp" #ifdef ARMA_64BIT_WORD #ifdef MLPACK_ARMA_NO_64BIT_WORD #pragma message "mlpack was compiled without ARMA_64BIT_WORD, but you are \ compiling with ARMA_64BIT_WORD. This will almost certainly cause irreparable \ disaster. Either disable ARMA_64BIT_WORD in your application which is using \ mlpack, or, recompile mlpack against a version of Armadillo which has \ ARMA_64BIT_WORD enabled." #endif #else #ifdef MLPACK_ARMA_64BIT_WORD #pragma message "mlpack was compiled with ARMA_64BIT_WORD, but you are \ compiling without ARMA_64BIT_WORD. This will almost certainly cause \ irreparable disaster. Either enable ARMA_64BIT_WORD in your application which \ is using mlpack, or, recompile mlpack against a version of Armadillo which has \ ARMA_64BIT_WORD disabled." #endif #endif #endif mlpack-2.2.5/src/mlpack/core/util/arma_traits.hpp000066400000000000000000000044361315013601400217520ustar00rootroot00000000000000/** * @file arma_traits.hpp * @author Ryan Curtin * * Some traits used for template metaprogramming (SFINAE) with Armadillo types. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_UTIL_ARMA_TRAITS_HPP #define MLPACK_CORE_UTIL_ARMA_TRAITS_HPP // Structs have public members by default (that's why they are chosen over // classes). /** * If value == true, then VecType is some sort of Armadillo vector or subview. * You might use this struct like this: * * @code * // Only accepts VecTypes that are actually Armadillo vector types. * template * void Function(const VecType& argumentA, * typename boost::enable_if >* = 0); * @endcode * * The use of the enable_if object allows the compiler to instantiate Function() * only if VecType is one of the Armadillo vector types. It has a default * argument because it isn't meant to be used in either the function call or the * function body. */ template struct IsVector { const static bool value = false; }; // Commenting out the first template per case, because //Visual Studio doesn't like this instantiaion pattern (error C2910). //template<> template struct IsVector > { const static bool value = true; }; //template<> template struct IsVector > { const static bool value = true; }; //template<> template struct IsVector > { const static bool value = true; }; //template<> template struct IsVector > { const static bool value = true; }; //template<> template struct IsVector > { const static bool value = true; }; //template<> template struct IsVector > { const static bool value = true; }; // I'm not so sure about this one. An SpSubview object can be a row or column, // but it can also be a matrix subview. //template<> template struct IsVector > { const static bool value = true; }; #endif mlpack-2.2.5/src/mlpack/core/util/backtrace.cpp000066400000000000000000000110741315013601400213520ustar00rootroot00000000000000/** * @file backtrace.cpp * @author Grzegorz Krajewski * * Implementation of the Backtrace class. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include #ifdef HAS_BFD_DL #include #include #include #include // Some versions of libbfd require PACKAGE and PACKAGE_VERSION to be set in // order for the include to not fail. For more information: // https://github.com/mlpack/mlpack/issues/574 #ifndef PACKAGE #define PACKAGE #ifndef PACKAGE_VERSION #define PACKAGE_VERSION #include #undef PACKAGE_VERSION #else #include #endif #undef PACKAGE #else #ifndef PACKAGE_VERSION #define PACKAGE_VERSION #include #undef PACKAGE_VERSION #else #include #endif #endif #include #endif #include "backtrace.hpp" #include "log.hpp" // Easier to read Backtrace::DecodeAddress(). #ifdef HAS_BFD_DL #define TRACE_CONDITION_1 (!dladdr(trace[i], &addressHandler)) #define FIND_LINE (bfd_find_nearest_line(abfd, text, syms, offset, &frame.file, &frame.function, &frame.line) && frame.file) #endif using namespace mlpack; // Initialize Backtrace static inctances. Backtrace::Frames Backtrace::frame; std::vector Backtrace::stack; #ifdef HAS_BFD_DL // Binary File Descriptor objects. bfd* abfd = 0; // Descriptor datastructure. asymbol **syms = 0; // Symbols datastructure. asection *text = 0; // Strings datastructure. #endif #ifdef HAS_BFD_DL Backtrace::Backtrace(int maxDepth) { frame.address = NULL; frame.function = "0"; frame.file = "0"; frame.line = 0; stack.clear(); GetAddress(maxDepth); } #else Backtrace::Backtrace() { // Dummy constructor } #endif #ifdef HAS_BFD_DL void Backtrace::GetAddress(int maxDepth) { void* trace[maxDepth]; int stackDepth = backtrace(trace, maxDepth); // Skip first stack frame (points to Backtrace::Backtrace). for (int i = 1; i < stackDepth; i++) { Dl_info addressHandler; //No backtrace will be printed if no compile flags: -g -rdynamic if (TRACE_CONDITION_1) { return ; } frame.address = addressHandler.dli_saddr; DecodeAddress((long)frame.address); } } void Backtrace::DecodeAddress(long addr) { // Check to see if there is anything to descript. If it doesn't, we'll // dump running program. if (!abfd) { char ename[1024]; int l = readlink("/proc/self/exe", ename, sizeof(ename)); if (l == -1) { perror("Failed to open executable!\n"); return; } ename[l] = 0; bfd_init(); abfd = bfd_openr(ename, 0); if (!abfd) { perror("bfd_openr failed: "); return; } bfd_check_format(abfd,bfd_object); unsigned storage_needed = bfd_get_symtab_upper_bound(abfd); syms = (asymbol **) malloc(storage_needed); text = bfd_get_section_by_name(abfd, ".text"); } long offset = addr - text->vma; if (offset > 0) { if (FIND_LINE) { DemangleFunction(); // Save retrieved information. stack.push_back(frame); } } } void Backtrace::DemangleFunction() { int status; char* tmp = abi::__cxa_demangle(frame.function, 0, 0, &status); // If demangling is successful, reallocate 'frame.function' pointer to // demangled name. Else if 'status != 0', leave 'frame.function as it is. if (status == 0) { frame.function = tmp; } } #else void Backtrace::GetAddress(int /* maxDepth */) { } void Backtrace::DecodeAddress(long /* address */) { } void Backtrace::DemangleFunction() { } #endif std::string Backtrace::ToString() { std::string stackStr; #ifdef HAS_BFD_DL std::ostringstream lineOss; std::ostringstream it; if (stack.size() <= 0) { stackStr = "Cannot give backtrace because program was compiled"; stackStr += " without: -g -rdynamic\nFor a backtrace,"; stackStr += " recompile with: -g -rdynamic.\n"; return stackStr; } for (size_t i = 0; i < stack.size(); i++) { frame = stack[i]; lineOss << frame.line; it << i + 1; stackStr += "[bt]: (" + it.str() + ") " + frame.file + ":" + lineOss.str() + " " + frame.function + ":\n"; lineOss.str(""); it.str(""); } #else stackStr = "[bt]: No backtrace for this OS. Work in progress."; #endif return stackStr; } mlpack-2.2.5/src/mlpack/core/util/backtrace.hpp000066400000000000000000000043301315013601400213540ustar00rootroot00000000000000/** * @file backtrace.hpp * @author Grzegorz Krajewski * * Definition of the Backtrace class. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef __MLPACK_CORE_UTIL_BACKTRACE_HPP #define __MLPACK_CORE_UTIL_BACKTRACE_HPP #include #include namespace mlpack { /** * Provides a backtrace. * * The Backtrace class retrieve addresses of each called function from the * stack and decode file name, function & line number. Retrieved information * can be printed in form: * * @code * [b]: (count) /directory/to/file.cpp:function(args):line_number * @endcode * * Backtrace is printed always when Log::Assert failed. * An example is given below. * * @code * if (!someImportantCondition()) * { * Log::Fatal << "someImportantCondition() is not satisfied! Terminating."; * Log::Fatal << std::endl; * } * @endcode * * @note Log::Assert will not be shown when compiling in non-debug mode. * * @see PrefixedOutStream, Log */ class Backtrace { public: /** * Constructor initialize fields and call GetAddress to retrieve addresses * for each frame of backtrace. * * @param maxDepth Maximum depth of backtrace. Default 32 steps. */ #ifdef HAS_BFD_DL Backtrace(int maxDepth = 32); #else Backtrace(); #endif //! Returns string of backtrace. std::string ToString(); private: /** * Gets addresses of each called function from the stack. * * @param maxDepth Maximum depth of backtrace. Default 32 steps. */ static void GetAddress(int maxDepth); /** * Decodes file name, function & line number. * * @param address Address of traced frame. */ static void DecodeAddress(long address); //! Demangles function name. static void DemangleFunction(); //! Backtrace datastructure. struct Frames { void *address; const char* function; const char* file; unsigned line; } static frame; //! A vector for all the backtrace information. static std::vector stack; }; }; //namespace mlpack #endif mlpack-2.2.5/src/mlpack/core/util/cli.cpp000066400000000000000000000571651315013601400202150ustar00rootroot00000000000000/** * @file cli.cpp * @author Matthew Amidon * * Implementation of the CLI module for parsing parameters. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include #include #include #include #include "cli.hpp" #include "log.hpp" using namespace mlpack; using namespace mlpack::util; /* For clarity, we will alias boost's namespace. */ namespace po = boost::program_options; // Fake ProgramDoc in case none is supplied. static ProgramDoc emptyProgramDoc = ProgramDoc("", ""); /* Constructors, Destructors, Copy */ /* Make the constructor private, to preclude unauthorized instances */ CLI::CLI() : desc("Allowed Options") , didParse(false), doc(&emptyProgramDoc) { return; } /** * Initialize desc with a particular name. * * @param optionsName Name of the module, as far as boost is concerned. */ CLI::CLI(const std::string& optionsName) : desc(optionsName), didParse(false), doc(&emptyProgramDoc) { return; } // Private copy constructor; don't want copies floating around. CLI::CLI(const CLI& other) : desc(other.desc), didParse(false), doc(&emptyProgramDoc) { return; } CLI::~CLI() { // We need to print any output options. PrintOutput(); // Terminate the program timers. std::map::iterator it; for (it = timer.GetAllTimers().begin(); it != timer.GetAllTimers().end(); ++it) { std::string i = (*it).first; if (timer.GetState(i) == 1) Timer::Stop(i); } // Did the user ask for verbose output? If so we need to print everything. // But only if the user did not ask for help or info. if (HasParam("verbose") && !HasParam("help") && !HasParam("info")) { Log::Info << std::endl << "Execution parameters:" << std::endl; Print(); Log::Info << "Program timers:" << std::endl; std::map::iterator it; for (it = timer.GetAllTimers().begin(); it != timer.GetAllTimers().end(); ++it) { std::string i = (*it).first; Log::Info << " " << i << ": "; timer.PrintTimer((*it).first); } } // Notify the user if we are debugging, but only if we actually parsed the // options. This way this output doesn't show up inexplicably for someone who // may not have wanted it there (i.e. in Boost unit tests). if (didParse) Log::Debug << "Compiled with debugging symbols." << std::endl; return; } /** * Adds a parameter to the hierarchy. Use char* and not std::string since the * vast majority of use cases will be literal strings. * * @param identifier The name of the parameter. * @param description Short string description of the parameter. * @param alias An alias for the parameter. * @param required Indicates if parameter must be set on command line. * @param input If true, the parameter is an input parameter (not an output * parameter). */ void CLI::Add(const std::string& identifier, const std::string& description, const std::string& alias, const bool required, const bool input) { po::options_description& desc = CLI::GetSingleton().desc; // Must make use of boost option name syntax. std::string progOptId = alias.length() ? identifier + "," + alias : identifier; // Deal with a required alias. AddAlias(alias, identifier); // Add the option to boost::program_options. desc.add_options()(progOptId.c_str(), description.c_str()); // Make sure the description, etc. ends up in gmap. gmap_t& gmap = GetSingleton().globalValues; ParamData data; data.desc = description; data.tname = ""; data.name = identifier; data.isFlag = false; data.wasPassed = false; gmap[identifier] = data; // If the option is required, add it to the required options list. if (required) GetSingleton().requiredOptions.push_front(identifier); // Depending on whether the option is input or output, add it to the list of // input or output options. if (input) GetSingleton().inputOptions.push_front(identifier); else GetSingleton().outputOptions.push_front(identifier); return; } /* * Adds an alias mapping for a given parameter. * * @param alias The alias we will use for the parameter. * @param original The name of the actual parameter we will be mapping to. */ void CLI::AddAlias(const std::string& alias, const std::string& original) { // Conduct the mapping. if (alias.length()) { amap_t& amap = GetSingleton().aliasValues; amap[alias] = original; } } /* * @brief Adds a flag parameter to CLI. */ void CLI::AddFlag(const std::string& identifier, const std::string& description, const std::string& alias) { // Reuse functionality from Add(). Add(identifier, description, alias, false); // Insert the proper metadata into gmap. gmap_t& gmap = GetSingleton().globalValues; ParamData data; data.desc = description; data.tname = TYPENAME(bool); data.name = std::string(identifier); data.isFlag = true; data.wasPassed = false; gmap[data.name] = data; } std::string CLI::AliasReverseLookup(const std::string& value) { amap_t& amap = GetSingleton().aliasValues; amap_t::iterator iter; for (iter = amap.begin(); iter != amap.end(); ++iter) if (iter->second == value) // Found our match. return iter->first; return ""; // Nothing found. } /** * Parses the parameters for 'help' and 'info' If found, will print out the * appropriate information and kill the program. */ void CLI::DefaultMessages() { // --version is prioritized over --help. if (HasParam("version")) { std::cout << GetSingleton().programName << ": part of " << util::GetVersion() << std::endl; exit(0); } // Default help message. if (HasParam("help")) { Log::Info.ignoreInput = false; PrintHelp(); exit(0); // The user doesn't want to run the program, he wants help. } if (HasParam("info")) { Log::Info.ignoreInput = false; std::string str = GetParam("info"); // The info node should always be there, but the user may not have specified // anything. if (str != "") { PrintHelp(str); exit(0); } // Otherwise just print the generalized help. PrintHelp(); exit(0); } if (HasParam("verbose")) { // Give [INFO ] output. Log::Info.ignoreInput = false; } // Notify the user if we are debugging. This is not done in the constructor // because the output streams may not be set up yet. We also don't want this // message twice if the user just asked for help or information. Log::Debug << "Compiled with debugging symbols." << std::endl; } /** * Destroy the CLI object. This resets the pointer to the singleton, so in case * someone tries to access it after destruction, a new one will be made (the * program will not fail). */ void CLI::Destroy() { if (singleton != NULL) { delete singleton; singleton = NULL; // Reset pointer. } } /** * See if the specified flag was found while parsing. * * @param identifier The name of the parameter in question. */ bool CLI::HasParam(const std::string& key) { return GetParam(key); } /** * GetParam() is equivalent to HasParam(). */ template<> bool& CLI::GetParam(const std::string& key) { std::string used_key = key; po::variables_map vmap = GetSingleton().vmap; gmap_t& gmap = GetSingleton().globalValues; // Take any possible alias into account. amap_t& amap = GetSingleton().aliasValues; if (amap.count(key)) used_key = amap[key]; // Does the parameter exist at all? int isInGmap = gmap.count(used_key); // Check if the parameter is boolean; if it is, we just want to see if it was // passed. if (isInGmap) return gmap[used_key].wasPassed; // The parameter was not passed in; terminate the program. Log::Fatal << "Parameter '--" << key << "' does not exist in this program." << std::endl; // These lines will never be reached, but must be here to make the compiler // happy. bool* trash = new bool; *trash = false; return *trash; } /**  * Hyphenate a string or split it onto multiple 80-character lines, with some  * amount of padding on each line.  This is used for option output.  *  * @param str String to hyphenate (splits are on ' ').  * @param padding Amount of padding on the left for each new line.  */ std::string CLI::HyphenateString(const std::string& str, int padding) { size_t margin = 80 - padding; if (str.length() < margin) return str; std::string out(""); unsigned int pos = 0; // First try to look as far as possible. while (pos < str.length()) { size_t splitpos; // Check that we don't have a newline first. splitpos = str.find('\n', pos); if (splitpos == std::string::npos || splitpos > (pos + margin)) { // We did not find a newline. if (str.length() - pos < margin) { splitpos = str.length(); // The rest fits on one line. } else { splitpos = str.rfind(' ', margin + pos); // Find nearest space. if (splitpos <= pos || splitpos == std::string::npos) // Not found. splitpos = pos + margin; } } out += str.substr(pos, (splitpos - pos)); if (splitpos < str.length()) { out += '\n'; out += std::string(padding, ' '); } pos = splitpos; if (str[pos] == ' ' || str[pos] == '\n') pos++; } return out; } /** * Grab the description of the specified node. * * @param identifier Name of the node in question. * @return Description of the node in question. */ std::string CLI::GetDescription(const std::string& identifier) { gmap_t& gmap = GetSingleton().globalValues; std::string name = std::string(identifier); //Take any possible alias into account amap_t& amap = GetSingleton().aliasValues; if (amap.count(name)) name = amap[name]; if (gmap.count(name)) return gmap[name].desc; else return ""; } // Returns the sole instance of this class. CLI& CLI::GetSingleton() { if (singleton == NULL) singleton = new CLI(); return *singleton; } /** * Parses the commandline for arguments. * * @param argc The number of arguments on the commandline. * @param argv The array of arguments as strings */ void CLI::ParseCommandLine(int argc, char** line) { Timer::Start("total_time"); GetSingleton().programName = std::string(line[0]); po::variables_map& vmap = GetSingleton().vmap; po::options_description& desc = GetSingleton().desc; // Parse the command line, place the options & values into vmap try { // Get the basic_parsed_options po::basic_parsed_options bpo( po::parse_command_line(argc, line, desc)); // Look for any duplicate parameters, removing duplicate flags RemoveDuplicateFlags(bpo); // Record the basic_parsed_options po::store(bpo, vmap); } catch (std::exception& ex) { Log::Fatal << "Caught exception from parsing command line:\t"; Log::Fatal << ex.what() << std::endl; } // Flush the buffer, make sure changes are propagated to vmap po::notify(vmap); UpdateGmap(); DefaultMessages(); RequiredOptions(); } /* * Removes duplicate flags. * * @param bpo The basic_program_options to remove duplicate flags from. */ void CLI::RemoveDuplicateFlags(po::basic_parsed_options& bpo) { // Iterate over all the program_options, looking for duplicate parameters for (unsigned int i = 0; i < bpo.options.size(); i++) { for (unsigned int j = i + 1; j < bpo.options.size(); j++) { if (bpo.options[i].string_key == bpo.options[j].string_key) { // If a duplicate is found, check to see if either one has a value if (bpo.options[i].value.size() == 0 && bpo.options[j].value.size() == 0) { // If neither has a value, consider it a duplicate flag and remove the // duplicate. It's important to not break out of this loop because // there might be another duplicate later on in the vector. bpo.options.erase(bpo.options.begin()+j); } else { // If one or both has a value, produce an error and politely // terminate. We pull the name from the original_tokens, rather than // from the string_key, because the string_key is the parameter after // aliases have been expanded. Log::Fatal << "\"" << bpo.options[j].original_tokens[0] << "\"" << " is defined multiple times." << std::endl; } } } } } // Prints any output options. void CLI::PrintOutput() { gmap_t& gmap = GetSingleton().globalValues; gmap_t::iterator iter; for (iter = gmap.begin(); iter != gmap.end(); ++iter) { std::string key = iter->first; ParamData data = iter->second; const std::list& inputOptions = GetSingleton().inputOptions; const bool input = (std::find(std::begin(inputOptions), std::end(inputOptions), key) != std::end(inputOptions)); // Ignore input options. if (input) continue; // Ignore string output options that end in _file. if ((data.tname == TYPENAME(std::string)) && (data.name.substr(data.name.size() - 5, 5) == "_file")) continue; // Reverse compatibility; should be removed for mlpack 3.0.0. Don't print // some options that have only been kept for reverse compatibility. if (data.name == "output_predictions" || data.name == "output_ic" || data.name == "output_unmixing") continue; // Now, we must print it, so figure out what the type is. if (data.tname == TYPENAME(std::string)) { std::string value = GetParam(key); std::cout << key << ": " << value << std::endl; } else if (data.tname == TYPENAME(int)) { int value = GetParam(key); std::cout << key << ": " << value << std::endl; } else if (data.tname == TYPENAME(double)) { double value = GetParam(key); std::cout << key << ": " << value << std::endl; } else { std::cout << key << ": unknown data type" << std::endl; } } } /* Prints out the current hierarchy. */ void CLI::Print() { gmap_t& gmap = GetSingleton().globalValues; gmap_t::iterator iter; // Print out all the values. for (iter = gmap.begin(); iter != gmap.end(); ++iter) { std::string key = iter->first; Log::Info << " " << key << ": "; // Now, figure out what type it is, and print it. // We can handle strings, ints, bools, floats, doubles. ParamData data = iter->second; if (data.tname == TYPENAME(std::string)) { std::string value = GetParam(key); if (value == "") Log::Info << "\"\""; Log::Info << value; } else if (data.tname == TYPENAME(int)) { int value = GetParam(key); Log::Info << value; } else if (data.tname == TYPENAME(bool)) { bool value = HasParam(key); Log::Info << (value ? "true" : "false"); } else if (data.tname == TYPENAME(float)) { float value = GetParam(key); Log::Info << value; } else if (data.tname == TYPENAME(double)) { double value = GetParam(key); Log::Info << value; } else { // We don't know how to print this, or it's a timeval which is printed // later. Log::Info << "(Unknown data type - " << data.tname << ")"; } Log::Info << std::endl; } Log::Info << std::endl; } /* Prints the descriptions of the current hierarchy. */ void CLI::PrintHelp(const std::string& param) { std::string used_param = param; gmap_t& gmap = GetSingleton().globalValues; amap_t& amap = GetSingleton().aliasValues; gmap_t::iterator iter; ProgramDoc docs = *GetSingleton().doc; // If we pass a single param, alias it if necessary. if (used_param != "" && amap.count(used_param)) used_param = amap[used_param]; // Do we only want to print out one value? if (used_param != "" && gmap.count(used_param)) { ParamData data = gmap[used_param]; std::string alias = AliasReverseLookup(used_param); alias = alias.length() ? " (-" + alias + ")" : alias; // Figure out the name of the type. std::string type = ""; if (data.tname == TYPENAME(std::string)) type = " [string]"; else if (data.tname == TYPENAME(int)) type = " [int]"; else if (data.tname == TYPENAME(bool)) type = ""; // Nothing to pass for a flag. else if (data.tname == TYPENAME(float)) type = " [float]"; else if (data.tname == TYPENAME(double)) type = " [double]"; // Now, print the descriptions. std::string fullDesc = " --" + used_param + alias + type + " "; if (fullDesc.length() <= 32) // It all fits on one line. std::cout << fullDesc << std::string(32 - fullDesc.length(), ' '); else // We need multiple lines. std::cout << fullDesc << std::endl << std::string(32, ' '); std::cout << HyphenateString(data.desc, 32) << std::endl; return; } else if (used_param != "") { // User passed a single variable, but it doesn't exist. std::cerr << "Parameter --" << used_param << " does not exist." << std::endl; exit(1); // Nothing left to do. } // Print out the descriptions. if (docs.programName != "") { std::cout << docs.programName << std::endl << std::endl; std::cout << " " << HyphenateString(docs.documentation, 2) << std::endl << std::endl; } else std::cout << "[undocumented program]" << std::endl << std::endl; for (size_t pass = 0; pass < 3; ++pass) { bool printedHeader = false; // Print out the descriptions of everything else. for (iter = gmap.begin(); iter != gmap.end(); ++iter) { std::string key = iter->first; ParamData data = iter->second; std::string desc = data.desc; std::string alias = AliasReverseLookup(key); alias = alias.length() ? " (-" + alias + ")" : alias; // Is the option required or not? And is it an input option or not? const std::list& requiredOptions = GetSingleton().requiredOptions; const std::list& inputOptions = GetSingleton().inputOptions; const bool required = (std::find(std::begin(requiredOptions), std::end(requiredOptions), key) != std::end(requiredOptions)); const bool input = (std::find(std::begin(inputOptions), std::end(inputOptions), key) != std::end(inputOptions)); // Filter un-printed options. if ((pass == 0) && !(required && input)) // Required input options only. continue; if ((pass == 1) && !(!required && input)) // Optional input options only. continue; if ((pass == 2) && input) // Output options only (always optional). continue; // Only print string output options that end in "_file". if ((pass == 2) && ((data.tname != TYPENAME(std::string)) || (data.name.substr(data.name.size() - 5, 5) != "_file"))) continue; // For reverse compatibility: this can be removed when these options are // gone in mlpack 3.0.0. We don't want to print the deprecated options. if (data.name == "inputFile") continue; if (!printedHeader) { printedHeader = true; if (pass == 0) std::cout << "Required input options:" << std::endl << std::endl; else if (pass == 1) std::cout << "Optional input options: " << std::endl << std::endl; else if (pass == 2) std::cout << "Optional output options: " << std::endl << std::endl; } if (pass >= 1) // Append default value to description. { desc += " Default value "; std::stringstream tmp; if (data.tname == TYPENAME(std::string)) tmp << "'" << boost::any_cast(data.value) << "'."; else if (data.tname == TYPENAME(int)) tmp << boost::any_cast(data.value) << '.'; else if (data.tname == TYPENAME(bool)) desc = data.desc; // No extra output for that. else if (data.tname == TYPENAME(float)) tmp << boost::any_cast(data.value) << '.'; else if (data.tname == TYPENAME(double)) tmp << boost::any_cast(data.value) << '.'; desc += tmp.str(); } // Figure out the name of the type. std::string type = ""; if (data.tname == TYPENAME(std::string)) type = " [string]"; else if (data.tname == TYPENAME(int)) type = " [int]"; else if (data.tname == TYPENAME(bool)) type = ""; // Nothing to pass for a flag. else if (data.tname == TYPENAME(float)) type = " [float]"; else if (data.tname == TYPENAME(double)) type = " [double]"; // Now, print the descriptions. std::string fullDesc = " --" + key + alias + type + " "; if (fullDesc.length() <= 32) // It all fits on one line. std::cout << fullDesc << std::string(32 - fullDesc.length(), ' '); else // We need multiple lines. std::cout << fullDesc << std::endl << std::string(32, ' '); std::cout << HyphenateString(desc, 32) << std::endl; } if (printedHeader) std::cout << std::endl; } // Helpful information at the bottom of the help output, to point the user to // citations and better documentation (if necessary). See ticket #201. std::cout << HyphenateString("For further information, including relevant " "papers, citations, and theory, consult the documentation found at " "http://www.mlpack.org or included with your distribution of mlpack.", 0) << std::endl; } /** * Registers a ProgramDoc object, which contains documentation about the * program. * * @param doc Pointer to the ProgramDoc object. */ void CLI::RegisterProgramDoc(ProgramDoc* doc) { // Only register the doc if it is not the dummy object we created at the // beginning of the file (as a default value in case this is never called). if (doc != &emptyProgramDoc) GetSingleton().doc = doc; } /** * Checks that all parameters specified as required have been specified on the * command line. If they havent, prints an error message and kills the program. */ void CLI::RequiredOptions() { po::variables_map& vmap = GetSingleton().vmap; std::list rOpt = GetSingleton().requiredOptions; // Now, warn the user if they missed any required options. std::list::iterator iter; for (iter = rOpt.begin(); iter != rOpt.end(); ++iter) { std::string str = *iter; if (!vmap.count(str)) { // If a required option isn't there... Log::Fatal << "Required option --" << str << " is undefined." << std::endl; } } } /** * Parses the values given on the command line, overriding any default values. */ void CLI::UpdateGmap() { gmap_t& gmap = GetSingleton().globalValues; po::variables_map& vmap = GetSingleton().vmap; // Iterate through vmap, and overwrite default values with anything found on // command line. po::variables_map::iterator i; for (i = vmap.begin(); i != vmap.end(); ++i) { ParamData param; if (gmap.count(i->first)) // We need to preserve certain data param = gmap[i->first]; param.value = vmap[i->first].value(); param.wasPassed = true; gmap[i->first] = param; } } // Add help parameter. PARAM_FLAG("help", "Default help info.", "h"); PARAM_STRING_IN("info", "Get help on a specific module or option.", "", ""); PARAM_FLAG("verbose", "Display informational messages and the full list of " "parameters and timers at the end of execution.", "v"); PARAM_FLAG("version", "Display the version of mlpack.", "V"); mlpack-2.2.5/src/mlpack/core/util/cli.hpp000066400000000000000000000360531315013601400202130ustar00rootroot00000000000000/** * @file cli.hpp * @author Matthew Amidon * * This file implements the CLI subsystem which is intended to replace FX. * This can be used more or less regardless of context. In the future, * it might be expanded to include file I/O. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_UTIL_CLI_HPP #define MLPACK_CORE_UTIL_CLI_HPP #include #include #include #include #include #include #include #include "timers.hpp" #include "cli_deleter.hpp" // To make sure we can delete the singleton. #include "version.hpp" #include "param.hpp" /** * The TYPENAME macro is used internally to convert a type into a string. */ #define TYPENAME(x) (std::string(typeid(x).name())) namespace po = boost::program_options; namespace mlpack { namespace util { // Externally defined in option.hpp, this class holds information about the // program being run. class ProgramDoc; } // namespace util /** * Aids in the extensibility of CLI by focusing potential * changes into one structure. */ struct ParamData { //! Name of this parameter. std::string name; //! Description of this parameter, if any. std::string desc; //! Type information of this parameter. std::string tname; //! The actual value of this parameter. boost::any value; //! True if this parameter was passed in via command line or file. bool wasPassed; //! True if the wasPassed value should not be ignored. bool isFlag; }; /** * @brief Parses the command line for parameters and holds user-specified * parameters. * * The CLI class is a subsystem by which parameters for machine learning methods * can be specified and accessed. In conjunction with the macros PARAM_DOUBLE, * PARAM_INT, PARAM_STRING, PARAM_FLAG, and others, this class aims to make user * configurability of mlpack methods very easy. There are only three methods in * CLI that a user should need: CLI::ParseCommandLine(), CLI::GetParam(), and * CLI::HasParam() (in addition to the PARAM_*() macros). * * @section addparam Adding parameters to a program * * @code * $ ./executable --bar=5 * @endcode * * @note The = is optional; a space can also be used. * * A parameter is specified by using one of the following macros (this is not a * complete list; see core/io/cli.hpp): * * - PARAM_FLAG(ID, DESC, ALIAS) * - PARAM_DOUBLE(ID, DESC, ALIAS, DEF) * - PARAM_INT(ID, DESC, ALIAS, DEF) * - PARAM_STRING(ID, DESC, ALIAS, DEF) * * @param ID Name of the parameter. * @param DESC Short description of the parameter (one/two sentences). * @param ALIAS An alias for the parameter. * @param DEF Default value of the parameter. * * The flag (boolean) type automatically defaults to false; it is specified * merely as a flag on the command line (no '=true' is required). * * Here is an example of a few parameters being defined; this is for the KNN * executable (methods/neighbor_search/knn_main.cpp): * * @code * PARAM_STRING_REQ("reference_file", "File containing the reference dataset.", * "r"); * PARAM_STRING_REQ("distances_file", "File to output distances into.", "d"); * PARAM_STRING_REQ("neighbors_file", "File to output neighbors into.", "n"); * PARAM_INT_REQ("k", "Number of furthest neighbors to find.", "k"); * PARAM_STRING("query_file", "File containing query points (optional).", "q", * ""); * PARAM_INT("leaf_size", "Leaf size for tree building.", "l", 20); * PARAM_FLAG("naive", "If true, O(n^2) naive mode is used for computation.", * "N"); * PARAM_FLAG("single_mode", "If true, single-tree search is used (as opposed " * "to dual-tree search.", "s"); * @endcode * * More documentation is available on the PARAM_*() macros in the documentation * for core/io/cli.hpp. * * @section programinfo Documenting the program itself * * In addition to allowing documentation for each individual parameter and * module, the PROGRAM_INFO() macro provides support for documenting the program * itself. There should only be one instance of the PROGRAM_INFO() macro. * Below is an example: * * @code * PROGRAM_INFO("Maximum Variance Unfolding", "This program performs maximum " * "variance unfolding on the given dataset, writing a lower-dimensional " * "unfolded dataset to the given output file."); * @endcode * * This description should be verbose, and explain to a non-expert user what the * program does and how to use it. If relevant, paper citations should be * included. * * @section parsecli Parsing the command line with CLI * * To have CLI parse the command line at the beginning of code execution, only a * call to ParseCommandLine() is necessary: * * @code * int main(int argc, char** argv) * { * CLI::ParseCommandLine(argc, argv); * * ... * } * @endcode * * CLI provides --help and --info options which give nicely formatted * documentation of each option; the documentation is generated from the DESC * arguments in the PARAM_*() macros. * * @section getparam Getting parameters with CLI * * When the parameters have been defined, the next important thing is how to * access them. For this, the HasParam() and GetParam() methods are * used. For instance, to see if the user passed the flag (boolean) "naive": * * @code * if (CLI::HasParam("naive")) * { * Log::Info << "Naive has been passed!" << std::endl; * } * @endcode * * To get the value of a parameter, such as a string, use GetParam: * * @code * const std::string filename = CLI::GetParam("filename"); * @endcode * * @note * Options should only be defined in files which define `main()` (that is, main * executables). If options are defined elsewhere, they may be spuriously * included into other executables and confuse users. Similarly, if your * executable has options which you did not define, it is probably because the * option is defined somewhere else and included in your executable. * * @bug * The __COUNTER__ variable is used in most cases to guarantee a unique global * identifier for options declared using the PARAM_*() macros. However, not all * compilers have this support--most notably, gcc < 4.3. In that case, the * __LINE__ macro is used as an attempt to get a unique global identifier, but * collisions are still possible, and they produce bizarre error messages. See * https://github.com/mlpack/mlpack/issues/100 for more information. */ class CLI { public: /** * Adds a parameter to the hierarchy; use the PARAM_*() macros instead of this * (i.e. PARAM_INT()). Uses char* and not std::string since the vast majority * of use cases will be literal strings. * * @param identifier The name of the parameter. * @param description Short string description of the parameter. * @param alias An alias for the parameter, defaults to "" which is no alias. * (""). * @param required Indicates if parameter must be set on command line. * @param input If true, the parameter is an input (not output) parameter. */ static void Add(const std::string& path, const std::string& description, const std::string& alias = "", const bool required = false, const bool input = true); /** * Adds a parameter to the hierarchy; use the PARAM_*() macros instead of this * (i.e. PARAM_INT()). Uses char* and not std::string since the vast majority * of use cases will be literal strings. If the argument requires a * parameter, you must specify a type. * * @param identifier The name of the parameter. * @param description Short string description of the parameter. * @param alias An alias for the parameter, defaults to "" which is no alias. * @param required Indicates if parameter must be set on command line. * @param input If true, the parameter is an input (not output) parameter. */ template static void Add(const std::string& identifier, const std::string& description, const std::string& alias = "", const bool required = false, const bool input = true); /** * Adds a flag parameter to the hierarchy; use PARAM_FLAG() instead of this. * * @param identifier The name of the paramater. * @param description Short string description of the parameter. * @param alias An alias for the parameter, defaults to "" which is no alias. */ static void AddFlag(const std::string& identifier, const std::string& description, const std::string& alias = ""); /** * Parses the parameters for 'help' and 'info'. * If found, will print out the appropriate information and kill the program. */ static void DefaultMessages(); /** * Destroy the CLI object. This resets the pointer to the singleton, so in * case someone tries to access it after destruction, a new one will be made * (the program will not fail). */ static void Destroy(); /** * Grab the value of type T found while parsing. You can set the value using * this reference safely. * * @param identifier The name of the parameter in question. */ template static T& GetParam(const std::string& identifier); /** * Get the description of the specified node. * * @param identifier Name of the node in question. * @return Description of the node in question. */ static std::string GetDescription(const std::string& identifier); /** * Retrieve the singleton. * * Not exposed to the outside, so as to spare users some ungainly * x.GetSingleton().foo() syntax. * * In this case, the singleton is used to store data for the static methods, * as there is no point in defining static methods only to have users call * private instance methods. * * @return The singleton instance for use in the static methods. */ static CLI& GetSingleton(); /** * See if the specified flag was found while parsing. * * @param identifier The name of the parameter in question. */ static bool HasParam(const std::string& identifier); /** * Hyphenate a string or split it onto multiple 80-character lines, with some * amount of padding on each line. This is ued for option output. * * @param str String to hyphenate (splits are on ' '). * @param padding Amount of padding on the left for each new line. */ static std::string HyphenateString(const std::string& str, int padding); /** * Parses the commandline for arguments. * * @param argc The number of arguments on the commandline. * @param argv The array of arguments as strings. */ static void ParseCommandLine(int argc, char** argv); /** * Removes duplicate flags. * * @param bpo The basic_program_options to remove duplicate flags from. */ static void RemoveDuplicateFlags(po::basic_parsed_options& bpo); /** * Print the value of any output options on stdout. */ static void PrintOutput(); /** * Print out the current hierarchy. */ static void Print(); /** * Print out the help info of the hierarchy. */ static void PrintHelp(const std::string& param = ""); /** * Registers a ProgramDoc object, which contains documentation about the * program. If this method has been called before (that is, if two * ProgramDocs are instantiated in the program), a fatal error will occur. * * @param doc Pointer to the ProgramDoc object. */ static void RegisterProgramDoc(util::ProgramDoc* doc); /** * Destructor. */ ~CLI(); private: //! The documentation and names of options. po::options_description desc; //! Values of the options given by user. po::variables_map vmap; //! Identifier names of required options. std::list requiredOptions; //! Pathnames of input options. std::list inputOptions; //! Pathnames of output options. std::list outputOptions; //! Map of global values. typedef std::map gmap_t; gmap_t globalValues; //! Map for aliases, from alias to actual name. typedef std::map amap_t; amap_t aliasValues; //! The singleton itself. static CLI* singleton; //! True, if CLI was used to parse command line options. bool didParse; //! Hold the name of the program for --version. std::string programName; //! Holds the timer objects. Timers timer; //! So that Timer::Start() and Timer::Stop() can access the timer variable. friend class Timer; public: //! Pointer to the ProgramDoc object. util::ProgramDoc *doc; private: /** * Maps a given alias to a given parameter. * * @param alias The name of the alias to be mapped. * @param original The name of the parameter to be mapped. */ static void AddAlias(const std::string& alias, const std::string& original); /** * Returns an alias, if given the name of the original. * * @param value The value in a key:value pair where the key * is an alias. * @return The alias associated with value. */ static std::string AliasReverseLookup(const std::string& value); /** * Checks that all required parameters have been specified on the command * line. If any have not been specified, an error message is printed and the * program is terminated. */ static void RequiredOptions(); /** * Parses the values given on the command line, overriding any default values. */ static void UpdateGmap(); /** * Make the constructor private, to preclude unauthorized instances. */ CLI(); /** * Initialize desc with a particular name. * * @param optionsName Name of the module, as far as boost is concerned. */ CLI(const std::string& optionsName); //! Private copy constructor; we don't want copies floating around. CLI(const CLI& other); //! Metaprogramming structure for vector detection. template struct IsStdVector { const static bool value = false; }; //! Metaprogramming structure for vector detection. template struct IsStdVector> { const static bool value = true; }; /** * Add an option if it is not a vector type. This is a utility function used * by CLI::Add. * * @tparam Type of parameter. * @param optId Name of parameter. * @param descr Description. */ template void AddOption( const char* optId, const char* descr, const typename boost::disable_if>::type* /* junk */ = 0); /** * Add an option if it is a vector type. This is a utility function used by * CLI::Add. * * @tparam Type of parameter. * @param optId Name of parameter. * @param descr Description. */ template void AddOption( const char* optId, const char* descr, const typename boost::enable_if>::type* /* junk */ = 0); }; } // namespace mlpack // Include the actual definitions of templated methods #include "cli_impl.hpp" #endif mlpack-2.2.5/src/mlpack/core/util/cli_deleter.cpp000066400000000000000000000020541315013601400217040ustar00rootroot00000000000000/** * @file cli_deleter.cpp * @author Ryan Curtin * * Extremely simple class whose only job is to delete the existing CLI object at * the end of execution. This is meant to allow the user to avoid typing * 'CLI::Destroy()' at the end of their program. The file also defines a static * CLIDeleter class, which will be initialized at the beginning of the program * and deleted at the end. The destructor destroys the CLI singleton. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include "cli_deleter.hpp" #include "cli.hpp" using namespace mlpack; using namespace mlpack::util; /*** * Empty constructor that does nothing. */ CLIDeleter::CLIDeleter() { /* Nothing to do. */ } /*** * This destructor deletes the CLI singleton. */ CLIDeleter::~CLIDeleter() { // Delete the singleton! CLI::Destroy(); } mlpack-2.2.5/src/mlpack/core/util/cli_deleter.hpp000066400000000000000000000017721315013601400217170ustar00rootroot00000000000000/** * @file cli_deleter.hpp * @author Ryan Curtin * * Definition of the CLIDeleter() class. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_UTIL_CLI_DELETER_HPP #define MLPACK_CORE_UTIL_CLI_DELETER_HPP namespace mlpack { namespace util { /** * Extremely simple class whose only job is to delete the existing CLI object at * the end of execution. This is meant to allow the user to avoid typing * 'CLI::Destroy()' at the end of their program. The file also defines a static * CLIDeleter class, which will be initialized at the beginning of the program * and deleted at the end. The destructor destroys the CLI singleton. */ class CLIDeleter { public: CLIDeleter(); ~CLIDeleter(); }; } // namespace util } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/core/util/cli_impl.hpp000066400000000000000000000117311315013601400212300ustar00rootroot00000000000000/** * @file cli_impl.hpp * @author Matthew Amidon * * Implementation of templated functions of the CLI class. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_UTIL_CLI_IMPL_HPP #define MLPACK_CORE_UTIL_CLI_IMPL_HPP // In case it has not already been included. #include "cli.hpp" #include "prefixedoutstream.hpp" // Include option.hpp here because it requires CLI but is also templated. #include "option.hpp" namespace mlpack { /** * @brief Adds a parameter to CLI, making it accessibile via GetParam & * CheckValue. * * @tparam T The type of the parameter. * @param identifier The name of the parameter, eg foo. * @param description A string description of the parameter. * @param alias Short name of the parameter. * @param required If required, the program will refuse to run unless the * parameter is specified. * @param input If true, the parameter is an input parameter (not an output * parameter). */ template void CLI::Add(const std::string& identifier, const std::string& description, const std::string& alias, const bool required, const bool input) { // Temporarily define color code escape sequences. #ifndef _WIN32 #define BASH_RED "\033[0;31m" #define BASH_CLEAR "\033[0m" #else #define BASH_RED "" #define BASH_CLEAR "" #endif // Temporary outstream object for detecting duplicate identifiers. util::PrefixedOutStream outstr(std::cerr, BASH_RED "[FATAL] " BASH_CLEAR, false, true /* fatal */); #undef BASH_RED #undef BASH_CLEAR // Define identifier and alias maps. gmap_t& gmap = GetSingleton().globalValues; amap_t& amap = GetSingleton().aliasValues; // If found in current map, print fatal error and terminate the program. if (gmap.count(identifier)) outstr << "Parameter --" << identifier << "(-" << alias << ") " << "is defined multiple times with same identifiers." << std::endl; if (amap.count(alias)) outstr << "Parameter --" << identifier << "(-" << alias << ") " << "is defined multiple times with same alias." << std::endl; // Must make use of boost syntax here. std::string progOptId = alias.length() ? identifier + "," + alias : identifier; // Add the alias, if necessary AddAlias(alias, identifier); // Add the option to boost program_options. GetSingleton().AddOption(progOptId.c_str(), description.c_str()); // Make sure the appropriate metadata is inserted into gmap. ParamData data; T tmp = T(); data.desc = description; data.name = identifier; data.tname = TYPENAME(T); data.value = boost::any(tmp); data.wasPassed = false; gmap[identifier] = data; // If the option is required, add it to the required options list. if (required) GetSingleton().requiredOptions.push_front(identifier); // Depending on whether or not the option is input or output, add it to the // appropriate list. if (input) GetSingleton().inputOptions.push_front(identifier); else GetSingleton().outputOptions.push_front(identifier); } // We specialize this in cli.cpp. template<> bool& CLI::GetParam(const std::string& identifier); /** * @brief Returns the value of the specified parameter. * If the parameter is unspecified, an undefined but * more or less valid value is returned. * * @tparam T The type of the parameter. * @param identifier The full name of the parameter. * * @return The value of the parameter. Use CLI::CheckValue to determine if it's * valid. */ template T& CLI::GetParam(const std::string& identifier) { // Used to ensure we have a valid value. T tmp = T(); // Used to index into the globalValues map. std::string key = std::string(identifier); gmap_t& gmap = GetSingleton().globalValues; // Now check if we have an alias. amap_t& amap = GetSingleton().aliasValues; if (amap.count(key)) key = amap[key]; // What if we don't actually have any value? if (!gmap.count(key)) { gmap[key] = ParamData(); gmap[key].value = boost::any(tmp); *boost::any_cast(&gmap[key].value) = tmp; } // What if we have meta-data, but no data? boost::any val = gmap[key].value; if (val.empty()) gmap[key].value = boost::any(tmp); return *boost::any_cast(&gmap[key].value); } template void CLI::AddOption( const char* optId, const char* descr, const typename boost::disable_if>::type* /* junk */) { desc.add_options()(optId, po::value(), descr); } template void CLI::AddOption( const char* optId, const char* descr, const typename boost::enable_if>::type* /* junk */) { desc.add_options()(optId, po::value()->multitoken(), descr); } } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/core/util/deprecated.hpp000066400000000000000000000013721315013601400215400ustar00rootroot00000000000000/** * @file deprecated.hpp * @author Marcos Pividori. * * Definition of the mlpack_deprecated macro. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_UTIL_DEPRECATED_HPP #define MLPACK_CORE_UTIL_DEPRECATED_HPP #ifdef __GNUG__ #define mlpack_deprecated __attribute__((deprecated)) #elif defined(_MSC_VER) #define mlpack_deprecated __declspec(deprecated) #else #pragma message("WARNING: You need to implement mlpack_deprecated for this " "compiler") #define mlpack_deprecated #endif #endif mlpack-2.2.5/src/mlpack/core/util/log.cpp000066400000000000000000000016541315013601400202170ustar00rootroot00000000000000/** * @file log.cpp * @author Matthew Amidon * * Implementation of the Log class. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include "log.hpp" #ifdef HAS_BFD_DL #include "backtrace.hpp" #endif using namespace mlpack; using namespace mlpack::util; // Only do anything for Assert() if in debugging mode. #ifdef DEBUG void Log::Assert(bool condition, const std::string& message) { if (!condition) { #ifdef HAS_BFD_DL Backtrace bt; Log::Debug << bt.ToString(); #endif Log::Debug << message << std::endl; throw std::runtime_error("Log::Assert() failed: " + message); } } #else void Log::Assert(bool /* condition */, const std::string& /* message */) { } #endif mlpack-2.2.5/src/mlpack/core/util/log.hpp000066400000000000000000000055211315013601400202210ustar00rootroot00000000000000/** * @file log.hpp * @author Matthew Amidon * * Definition of the Log class. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_UTIL_LOG_HPP #define MLPACK_CORE_UTIL_LOG_HPP #include #include #include "prefixedoutstream.hpp" #include "nulloutstream.hpp" namespace mlpack { /** * Provides a convenient way to give formatted output. * * The Log class has four members which can be used in the same way ostreams can * be used: * * - Log::Debug * - Log::Info * - Log::Warn * - Log::Fatal * * Each of these will prefix a tag to the output (for easy filtering), and the * fatal output will terminate the program when a newline is encountered. An * example is given below. * * @code * Log::Info << "Checking a condition." << std::endl; * if (!someCondition()) * Log::Warn << "someCondition() is not satisfied!" << std::endl; * Log::Info << "Checking an important condition." << std::endl; * if (!someImportantCondition()) * { * Log::Fatal << "someImportantCondition() is not satisfied! Terminating."; * Log::Fatal << std::endl; * } * @endcode * * Any messages sent to Log::Debug will not be shown when compiling in non-debug * mode. Messages to Log::Info will only be shown when the --verbose flag is * given to the program (or rather, the CLI class). * * @see PrefixedOutStream, NullOutStream, CLI */ class Log { public: /** * Checks if the specified condition is true. * If not, halts program execution and prints a custom error message. * Does nothing in non-debug mode. */ static void Assert(bool condition, const std::string& message = "Assert Failed."); /** * MLPACK_EXPORT is required for global variables, so that they are properly * exported by the Windows compiler. */ // We only use PrefixedOutStream if the program is compiled with debug // symbols. #ifdef DEBUG //! Prints debug output with the appropriate tag: [DEBUG]. static MLPACK_EXPORT util::PrefixedOutStream Debug; #else //! Dumps debug output into the bit nether regions. static MLPACK_EXPORT util::NullOutStream Debug; #endif //! Prints informational messages if --verbose is specified, prefixed with //! [INFO ]. static MLPACK_EXPORT util::PrefixedOutStream Info; //! Prints warning messages prefixed with [WARN ]. static MLPACK_EXPORT util::PrefixedOutStream Warn; //! Prints fatal messages prefixed with [FATAL], then terminates the program. static MLPACK_EXPORT util::PrefixedOutStream Fatal; //! Reference to cout, if necessary. static std::ostream& cout; }; }; //namespace mlpack #endif mlpack-2.2.5/src/mlpack/core/util/nulloutstream.hpp000066400000000000000000000046251315013601400223620ustar00rootroot00000000000000/** * @file nulloutstream.hpp * @author Ryan Curtin * @author Matthew Amidon * * Definition of the NullOutStream class. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_UTIL_NULLOUTSTREAM_HPP #define MLPACK_CORE_UTIL_NULLOUTSTREAM_HPP #include #include #include namespace mlpack { namespace util { /** * Used for Log::Debug when not compiled with debugging symbols. This class * does nothing and should be optimized out entirely by the compiler. */ class NullOutStream { public: /** * Does nothing. */ NullOutStream() { } /** * Does nothing. */ NullOutStream(const NullOutStream& /* other */) { } //! Does nothing. NullOutStream& operator<<(bool) { return *this; } //! Does nothing. NullOutStream& operator<<(short) { return *this; } //! Does nothing. NullOutStream& operator<<(unsigned short) { return *this; } //! Does nothing. NullOutStream& operator<<(int) { return *this; } //! Does nothing. NullOutStream& operator<<(unsigned int) { return *this; } //! Does nothing. NullOutStream& operator<<(long) { return *this; } //! Does nothing. NullOutStream& operator<<(unsigned long) { return *this; } //! Does nothing. NullOutStream& operator<<(float) { return *this; } //! Does nothing. NullOutStream& operator<<(double) { return *this; } //! Does nothing. NullOutStream& operator<<(long double) { return *this; } //! Does nothing. NullOutStream& operator<<(void*) { return *this; } //! Does nothing. NullOutStream& operator<<(const char*) { return *this; } //! Does nothing. NullOutStream& operator<<(std::string&) { return *this; } //! Does nothing. NullOutStream& operator<<(std::streambuf*) { return *this; } //! Does nothing. NullOutStream& operator<<(std::ostream& (*) (std::ostream&)) { return *this; } //! Does nothing. NullOutStream& operator<<(std::ios& (*) (std::ios&)) { return *this; } //! Does nothing. NullOutStream& operator<<(std::ios_base& (*) (std::ios_base&)) { return *this; } //! Does nothing. template NullOutStream& operator<<(const T&) { return *this; } }; } // namespace util } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/core/util/option.cpp000066400000000000000000000024261315013601400207440ustar00rootroot00000000000000/** * @file option.cpp * @author Ryan Curtin * * Implementation of the ProgramDoc class. The class registers itself with CLI * when constructed. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include "cli.hpp" #include "option.hpp" #include using namespace mlpack; using namespace mlpack::util; using namespace std; /** * Construct a ProgramDoc object. When constructed, it will register itself * with CLI. A fatal error will be thrown if more than one is constructed. * * @param programName Short string representing the name of the program. * @param documentation Long string containing documentation on how to use the * program and what it is. No newline characters are necessary; this is * taken care of by CLI later. * @param defaultModule Name of the default module. */ ProgramDoc::ProgramDoc(const std::string& programName, const std::string& documentation) : programName(programName), documentation(documentation) { // Register this with CLI. CLI::RegisterProgramDoc(this); } mlpack-2.2.5/src/mlpack/core/util/option.hpp000066400000000000000000000067421315013601400207560ustar00rootroot00000000000000/** * @file option.hpp * @author Matthew Amidon * * Definition of the Option class, which is used to define parameters which are * used by CLI. The ProgramDoc class also resides here. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_UTIL_OPTION_HPP #define MLPACK_CORE_UTIL_OPTION_HPP #include #include "cli.hpp" namespace mlpack { namespace util { /** * A static object whose constructor registers a parameter with the CLI class. * This should not be used outside of CLI itself, and you should use the * PARAM_FLAG(), PARAM_DOUBLE(), PARAM_INT(), PARAM_STRING(), or other similar * macros to declare these objects instead of declaring them directly. * * @see core/io/cli.hpp, mlpack::CLI */ template class Option { public: /** * Construct an Option object. When constructed, it will register * itself with CLI. * * @param ignoreTemplate Whether or not the template type matters for this * option. Essentially differs options with no value (flags) from those * that do, and thus require a type. * @param defaultValue Default value this parameter will be initialized to. * @param identifier The name of the option (no dashes in front; for --help, * we would pass "help"). * @param description A short string describing the option. * @param alias Short name of the parameter. * @param required Whether or not the option is required at runtime. * @param input Whether or not the option is an input option. */ Option(const bool ignoreTemplate, const N defaultValue, const std::string& identifier, const std::string& description, const std::string& alias, const bool required = false, const bool input = true); /** * Constructs an Option object. When constructed, it will register a flag * with CLI. * * @param identifier The name of the option (no dashes in front); for --help * we would pass "help". * @param description A short string describing the option. * @param alias Short name of the parameter. */ Option(const std::string& identifier, const std::string& description, const std::string& alias); }; /** * A static object whose constructor registers program documentation with the * CLI class. This should not be used outside of CLI itself, and you should use * the PROGRAM_INFO() macro to declare these objects. Only one ProgramDoc * object should ever exist. * * @see core/util/cli.hpp, mlpack::CLI */ class ProgramDoc { public: /** * Construct a ProgramDoc object. When constructed, it will register itself * with CLI. * * @param programName Short string representing the name of the program. * @param documentation Long string containing documentation on how to use the * program and what it is. No newline characters are necessary; this is * taken care of by CLI later. */ ProgramDoc(const std::string& programName, const std::string& documentation); //! The name of the program. std::string programName; //! Documentation for what the program does. std::string documentation; }; } // namespace util } // namespace mlpack // For implementations of templated functions #include "option_impl.hpp" #endif mlpack-2.2.5/src/mlpack/core/util/option_impl.hpp000066400000000000000000000027451315013601400217760ustar00rootroot00000000000000/** * @file option_impl.hpp * @author Matthew Amidon * * Implementation of template functions for the Option class. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_UTIL_OPTION_IMPL_HPP #define MLPACK_CORE_UTIL_OPTION_IMPL_HPP // Just in case it has not been included. #include "option.hpp" namespace mlpack { namespace util { /** * Registers a parameter with CLI. */ template Option::Option(const bool ignoreTemplate, const N defaultValue, const std::string& identifier, const std::string& description, const std::string& alias, const bool required, const bool input) { if (ignoreTemplate) { CLI::Add(identifier, description, alias, required, input); } else { CLI::Add(identifier, description, alias, required, input); CLI::GetParam(identifier) = defaultValue; } } /** * Registers a flag parameter with CLI. */ template Option::Option(const std::string& identifier, const std::string& description, const std::string& alias) { CLI::AddFlag(identifier, description, alias); } } // namespace util } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/core/util/param.hpp000066400000000000000000000420051315013601400205360ustar00rootroot00000000000000/** * @file param.hpp * @author Matthew Amidon * @author Ryan Curtin * * Definition of PARAM_*_IN() and PARAM_*_OUT() macros, as well as the * PROGRAM_INFO() macro, which are used to define input and output parameters of * command-line programs and bindings to other languages. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_UTIL_PARAM_HPP #define MLPACK_CORE_UTIL_PARAM_HPP #include /** * Document an executable. Only one instance of this macro should be * present in your program! Therefore, use it in the main.cpp * (or corresponding executable) in your program. * * @see mlpack::CLI, PARAM_FLAG(), PARAM_INT_IN(), PARAM_DOUBLE_IN(), * PARAM_STRING_IN(), PARAM_VECTOR_IN(), PARAM_INT_OUT(), PARAM_DOUBLE_OUT(), * PARAM_VECTOR_OUT(), PARAM_INT_IN_REQ(), PARAM_DOUBLE_IN_REQ(), * PARAM_STRING_IN_REQ(), PARAM_VECTOR_IN_REQ(), PARAM_INT_OUT_REQ(), * PARAM_DOUBLE_OUT_REQ(), PARAM_VECTOR_OUT_REQ(), PARAM_STRING_OUT_REQ(). * * @param NAME Short string representing the name of the program. * @param DESC Long string describing what the program does and possibly a * simple usage example. Newlines should not be used here; this is taken * care of by CLI (however, you can explicitly specify newlines to denote * new paragraphs). */ #define PROGRAM_INFO(NAME, DESC) static mlpack::util::ProgramDoc \ cli_programdoc_dummy_object = mlpack::util::ProgramDoc(NAME, DESC); /** * Define a flag parameter. * * @param ID Name of the parameter. * @param DESC Quick description of the parameter (1-2 sentences). * @param ALIAS An alias for the parameter (one letter). * * @see mlpack::CLI, PROGRAM_INFO() * * @bug * The __COUNTER__ variable is used in most cases to guarantee a unique global * identifier for options declared using the PARAM_*() macros. However, not all * compilers have this support--most notably, gcc < 4.3. In that case, the * __LINE__ macro is used as an attempt to get a unique global identifier, but * collisions are still possible, and they produce bizarre error messages. See * https://github.com/mlpack/mlpack/issues/100 for more information. */ #define PARAM_FLAG(ID, DESC, ALIAS) \ PARAM_FLAG_INTERNAL(ID, DESC, ALIAS); /** * Define an integer input parameter. * * The parameter can then be specified on the command line with * --ID=value. * * @param ID Name of the parameter. * @param DESC Quick description of the parameter (1-2 sentences). * @param ALIAS An alias for the parameter (one letter). * @param DEF Default value of the parameter. * * @see mlpack::CLI, PROGRAM_INFO() * * @bug * The __COUNTER__ variable is used in most cases to guarantee a unique global * identifier for options declared using the PARAM_*() macros. However, not all * compilers have this support--most notably, gcc < 4.3. In that case, the * __LINE__ macro is used as an attempt to get a unique global identifier, but * collisions are still possible, and they produce bizarre error messages. See * https://github.com/mlpack/mlpack/issues/100 for more information. */ #define PARAM_INT_IN(ID, DESC, ALIAS, DEF) \ PARAM_IN(int, ID, DESC, ALIAS, DEF, false) /** * Define an integer output parameter. This parameter will be printed on stdout * at the end of the program; for instance, if the parameter name is "number" * and the value is 5, the output on stdout would be of the following form: * * @code * number: 5 * @endcode * * If the parameter is not set by the end of the program, a fatal runtime error * will be issued. * * @param ID Name of the parameter. * @param DESC Quick description of the parameter (1-2 sentences). * * @see mlpack::CLI, PROGRAM_INFO() * * @bug * The __COUNTER__ variable is used in most cases to guarantee a unique global * identifier for options declared using the PARAM_*() macros. However, not all * compilers have this support--most notably, gcc < 4.3. In that case, the * __LINE__ macro is used as an attempt to get a unique global identifier, but * collisions are still possible, and they produce bizarre error messages. See * https://github.com/mlpack/mlpack/issues/100 for more information. */ #define PARAM_INT_OUT(ID, DESC) \ PARAM_OUT(int, ID, DESC, "", 0, false) /** * Define a double input parameter. * * The parameter can then be specified on the command line with * --ID=value. * * @param ID Name of the parameter. * @param DESC Quick description of the parameter (1-2 sentences). * @param ALIAS An alias for the parameter (one letter). * @param DEF Default value of the parameter. * * @see mlpack::CLI, PROGRAM_INFO() * * @bug * The __COUNTER__ variable is used in most cases to guarantee a unique global * identifier for options declared using the PARAM_*() macros. However, not all * compilers have this support--most notably, gcc < 4.3. In that case, the * __LINE__ macro is used as an attempt to get a unique global identifier, but * collisions are still possible, and they produce bizarre error messages. See * https://github.com/mlpack/mlpack/issues/100 for more information. */ #define PARAM_DOUBLE_IN(ID, DESC, ALIAS, DEF) \ PARAM_IN(double, ID, DESC, ALIAS, DEF, false) /** * Define a double output parameter. This parameter will be printed on stdout * at the end of the program; for instance, if the parameter name is "number" * and the value is 5.012, the output on stdout would be of the following form: * * @code * number: 5.012 * @endcode * * If the parameter is not set by the end of the program, a fatal runtime error * will be issued. * * @param ID Name of the parameter. * @param DESC Quick description of the parameter (1-2 sentences). * * @see mlpack::CLI, PROGRAM_INFO() * * @bug * The __COUNTER__ variable is used in most cases to guarantee a unique global * identifier for options declared using the PARAM_*() macros. However, not all * compilers have this support--most notably, gcc < 4.3. In that case, the * __LINE__ macro is used as an attempt to get a unique global identifier, but * collisions are still possible, and they produce bizarre error messages. See * https://github.com/mlpack/mlpack/issues/100 for more information. */ #define PARAM_DOUBLE_OUT(ID, DESC) \ PARAM_OUT(double, ID, DESC, "", 0.0, false) /** * Define a string input parameter. * * The parameter can then be specified on the command line with * --ID=value. If ALIAS is equal to DEF_MOD (which is set using the * PROGRAM_INFO() macro), the parameter can be specified with just --ID=value. * * @param ID Name of the parameter. * @param DESC Quick description of the parameter (1-2 sentences). * @param ALIAS An alias for the parameter (one letter). * @param DEF Default value of the parameter. * * @see mlpack::CLI, PROGRAM_INFO() * * @bug * The __COUNTER__ variable is used in most cases to guarantee a unique global * identifier for options declared using the PARAM_*() macros. However, not all * compilers have this support--most notably, gcc < 4.3. In that case, the * __LINE__ macro is used as an attempt to get a unique global identifier, but * collisions are still possible, and they produce bizarre error messages. See * https://github.com/mlpack/mlpack/issues/100 for more information. */ #define PARAM_STRING_IN(ID, DESC, ALIAS, DEF) \ PARAM_IN(std::string, ID, DESC, ALIAS, DEF, false) /** * Define a string output parameter. * * If the parameter name does not end in "_file" (i.e. "output_file", * "predictions_file", etc.), then the string will be printed to stdout at the * end of the program. For instance, if there was a string output parameter * called "something" with value "hello", at the end of the program the output * would be of the following form: * * @code * something: "hello" * @endcode * * If the parameter is not set by the end of the program, a fatal runtime error * will be issued. * * An alias is still allowed for string output parameters, because if the * parameter name ends in "_file", then the user must be able to specify it as * input. The default value will always be the empty string. * * @param ID Name of the parameter. * @param DESC Quick description of the parameter (1-2 sentences). * @param ALIAS An alias for the parameter (one letter). * * @see mlpack::CLI, PROGRAM_INFO() * * @bug * The __COUNTER__ variable is used in most cases to guarantee a unique global * identifier for options declared using the PARAM_*() macros. However, not all * compilers have this support--most notably, gcc < 4.3. In that case, the * __LINE__ macro is used as an attempt to get a unique global identifier, but * collisions are still possible, and they produce bizarre error messages. See * https://github.com/mlpack/mlpack/issues/100 for more information. */ #define PARAM_STRING_OUT(ID, DESC, ALIAS) \ PARAM_OUT(std::string, ID, DESC, ALIAS, "", false) /** * Define a vector input parameter. * * The parameter can then be specified on the command line with * --ID=value1,value2,value3. * * @param ID Name of the parameter. * @param DESC Quick description of the parameter (1-2 sentences). * @param ALIAS An alias for the parameter (one letter). * @param DEF Default value of the parameter. * * @see mlpack::CLI, PROGRAM_INFO() * * @bug * The __COUNTER__ variable is used in most cases to guarantee a unique global * identifier for options declared using the PARAM_*() macros. However, not all * compilers have this support--most notably, gcc < 4.3. In that case, the * __LINE__ macro is used as an attempt to get a unique global identifier, but * collisions are still possible, and they produce bizarre error messages. See * https://github.com/mlpack/mlpack/issues/100 for more information. */ #define PARAM_VECTOR_IN(T, ID, DESC, ALIAS) \ PARAM_IN(std::vector, ID, DESC, ALIAS, std::vector(), false) /** * Define a vector output parameter. This vector will be printed on stdout at * the end of the program; for instance, if the parameter name is "vector" and * the vector holds the array { 1, 2, 3, 4 }, the output on stdout would be of * the following form: * * @code * vector: 1, 2, 3, 4 * @endcode * * If the parameter is not set by the end of the program, a fatal runtime error * will be issued. * * @param ID Name of the parameter. * @param DESC Quick description of the parameter (1-2 sentences). * * @see mlpack::CLI, PROGRAM_INFO() * * @bug * The __COUNTER__ variable is used in most cases to guarantee a unique global * identifier for options declared using the PARAM_*() macros. However, not all * compilers have this support--most notably, gcc < 4.3. In that case, the * __LINE__ macro is used as an attempt to get a unique global identifier, but * collisions are still possible, and they produce bizarre error messages. See * https://github.com/mlpack/mlpack/issues/100 for more information. */ #define PARAM_VECTOR_OUT(T, ID) \ PARAM_OUT(std::vector, ID, DESC, "", std::vector(), false) /** * Define a required integer input parameter. * * The parameter must then be specified on the command line with --ID=value. * * @param ID Name of the parameter. * @param DESC Quick description of the parameter (1-2 sentences). * @param ALIAS An alias for the parameter (one letter). * * @see mlpack::CLI, PROGRAM_INFO() * * @bug * The __COUNTER__ variable is used in most cases to guarantee a unique global * identifier for options declared using the PARAM_*() macros. However, not all * compilers have this support--most notably, gcc < 4.3. In that case, the * __LINE__ macro is used as an attempt to get a unique global identifier, but * collisions are still possible, and they produce bizarre error messages. See * https://github.com/mlpack/mlpack/issues/100 for more information. */ #define PARAM_INT_IN_REQ(ID, DESC, ALIAS) \ PARAM_IN(int, ID, DESC, ALIAS, 0, true) /** * Define a required double parameter. * * The parameter must then be specified on the command line with --ID=value. * * @param ID Name of the parameter. * @param DESC Quick description of the parameter (1-2 sentences). * @param ALIAS An alias for the parameter (one letter). * * @see mlpack::CLI, PROGRAM_INFO() * * @bug * The __COUNTER__ variable is used in most cases to guarantee a unique global * identifier for options declared using the PARAM_*() macros. However, not all * compilers have this support--most notably, gcc < 4.3. In that case, the * __LINE__ macro is used as an attempt to get a unique global identifier, but * collisions are still possible, and they produce bizarre error messages. See * https://github.com/mlpack/mlpack/issues/100 for more information. */ #define PARAM_DOUBLE_IN_REQ(ID, DESC, ALIAS) \ PARAM_IN(double, ID, DESC, ALIAS, 0.0d, true) /** * Define a required string parameter. * * The parameter must then be specified on the command line with --ID=value. * * @param ID Name of the parameter. * @param DESC Quick description of the parameter (1-2 sentences). * @param ALIAS An alias for the parameter (one letter). * * @see mlpack::CLI, PROGRAM_INFO() * * @bug * The __COUNTER__ variable is used in most cases to guarantee a unique global * identifier for options declared using the PARAM_*() macros. However, not all * compilers have this support--most notably, gcc < 4.3. In that case, the * __LINE__ macro is used as an attempt to get a unique global identifier, but * collisions are still possible, and they produce bizarre error messages. See * https://github.com/mlpack/mlpack/issues/100 for more information. */ #define PARAM_STRING_IN_REQ(ID, DESC, ALIAS) \ PARAM_IN(std::string, ID, DESC, ALIAS, "", true) /** * Define a required vector parameter. * * The parameter must then be specified on the command line with * --ID=value1,value2,value3. * * @param ID Name of the parameter. * @param DESC Quick description of the parameter (1-2 sentences). * @param ALIAS An alias for the parameter (one letter). * * @see mlpack::CLI, PROGRAM_INFO() * * @bug * The __COUNTER__ variable is used in most cases to guarantee a unique global * identifier for options declared using the PARAM_*() macros. However, not all * compilers have this support--most notably, gcc < 4.3. In that case, the * __LINE__ macro is used as an attempt to get a unique global identifier, but * collisions are still possible, and they produce bizarre error messages. See * https://github.com/mlpack/mlpack/issues/100 for more information. */ #define PARAM_VECTOR_IN_REQ(T, ID, DESC, ALIAS) \ PARAM_IN(std::vector, ID, DESC, ALIAS, std::vector(), true); /** * @cond * Don't document internal macros. */ // These are ugly, but necessary utility functions we must use to generate a // unique identifier inside of the PARAM() module. #define JOIN(x, y) JOIN_AGAIN(x, y) #define JOIN_AGAIN(x, y) x ## y /** @endcond */ /** * Define an input parameter. Don't use this function; use the other ones above * that call it. Note that we are using the __LINE__ macro for naming these * actual parameters when __COUNTER__ does not exist, which is a bit of an ugly * hack... but this is the preprocessor, after all. We don't have much choice * other than ugliness. * * @param T Type of the parameter. * @param ID Name of the parameter. * @param DESC Description of the parameter (1-2 sentences). * @param ALIAS Alias for this parameter (one letter). * @param DEF Default value of the parameter. * @param REQ Whether or not parameter is required (boolean value). */ #ifdef __COUNTER__ #define PARAM_IN(T, ID, DESC, ALIAS, DEF, REQ) \ static mlpack::util::Option \ JOIN(cli_option_dummy_object_in_, __COUNTER__) \ (false, DEF, ID, DESC, ALIAS, REQ, true); #define PARAM_OUT(T, ID, DESC, ALIAS, DEF, REQ) \ static mlpack::util::Option \ JOIN(cli_option_dummy_object_out_, __COUNTER__) \ (false, DEF, ID, DESC, ALIAS, REQ, false); /** @cond Don't document internal macros. */ #define PARAM_FLAG_INTERNAL(ID, DESC, ALIAS) static \ mlpack::util::Option JOIN(__io_option_flag_object_, __COUNTER__) \ (ID, DESC, ALIAS); /** @endcond */ #else // We have to do some really bizarre stuff since __COUNTER__ isn't defined. I // don't think we can absolutely guarantee success, but it should be "good // enough". We use the __LINE__ macro and the type of the parameter to try // and get a good guess at something unique. #define PARAM_IN(T, ID, DESC, ALIAS, DEF, REQ) \ static mlpack::util::Option \ JOIN(JOIN(io_option_dummy_object_in_, __LINE__), opt) \ (false, DEF, ID, DESC, ALIAS, REQ, true); #define PARAM_OUT(T, ID, DESC, ALIAS, DEF, REQ) \ static mlpack::util::Option \ JOIN(JOIN(io_option_dummy_object_out_, __LINE__), opt) \ (false, DEF, ID, DESC, ALIAS, REQ, false); /** @cond Don't document internal macros. */ #define PARAM_FLAG_INTERNAL(ID, DESC, ALIAS) static \ mlpack::util::Option JOIN(__io_option_flag_object_, __LINE__) \ (ID, DESC, ALIAS); /** @endcond */ #endif #endif mlpack-2.2.5/src/mlpack/core/util/prefixedoutstream.cpp000066400000000000000000000053671315013601400232150ustar00rootroot00000000000000/** * @file prefixedoutstream.cpp * @author Ryan Curtin * @author Matthew Amidon * * Implementation of PrefixedOutStream methods. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include #include "prefixedoutstream.hpp" using namespace mlpack::util; /** * These are all necessary because gcc's template mechanism does not seem smart * enough to figure out what I want to pass into operator<< without these. That * may not be the actual case, but it works when these is here. */ PrefixedOutStream& PrefixedOutStream::operator<<(bool val) { BaseLogic(val); return *this; } PrefixedOutStream& PrefixedOutStream::operator<<(short val) { BaseLogic(val); return *this; } PrefixedOutStream& PrefixedOutStream::operator<<(unsigned short val) { BaseLogic(val); return *this; } PrefixedOutStream& PrefixedOutStream::operator<<(int val) { BaseLogic(val); return *this; } PrefixedOutStream& PrefixedOutStream::operator<<(unsigned int val) { BaseLogic(val); return *this; } PrefixedOutStream& PrefixedOutStream::operator<<(long val) { BaseLogic(val); return *this; } PrefixedOutStream& PrefixedOutStream::operator<<(unsigned long val) { BaseLogic(val); return *this; } PrefixedOutStream& PrefixedOutStream::operator<<(float val) { BaseLogic(val); return *this; } PrefixedOutStream& PrefixedOutStream::operator<<(double val) { BaseLogic(val); return *this; } PrefixedOutStream& PrefixedOutStream::operator<<(long double val) { BaseLogic(val); return *this; } PrefixedOutStream& PrefixedOutStream::operator<<(void* val) { BaseLogic(val); return *this; } PrefixedOutStream& PrefixedOutStream::operator<<(const char* str) { BaseLogic(str); return *this; } PrefixedOutStream& PrefixedOutStream::operator<<(std::string& str) { BaseLogic(str); return *this; } PrefixedOutStream& PrefixedOutStream::operator<<(std::streambuf* sb) { BaseLogic(sb); return *this; } PrefixedOutStream& PrefixedOutStream::operator<<( std::ostream& (*pf)(std::ostream&)) { BaseLogic(pf); return *this; } PrefixedOutStream& PrefixedOutStream::operator<<(std::ios& (*pf)(std::ios&)) { BaseLogic(pf); return *this; } PrefixedOutStream& PrefixedOutStream::operator<<( std::ios_base& (*pf) (std::ios_base&)) { BaseLogic(pf); return *this; } mlpack-2.2.5/src/mlpack/core/util/prefixedoutstream.hpp000066400000000000000000000126141315013601400232130ustar00rootroot00000000000000/** * @file prefixedoutstream.hpp * @author Ryan Curtin * @author Matthew Amidon * * Declaration of the PrefixedOutStream class. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_UTIL_PREFIXEDOUTSTREAM_HPP #define MLPACK_CORE_UTIL_PREFIXEDOUTSTREAM_HPP #include namespace mlpack { namespace util { /** * Allows us to output to an ostream with a prefix at the beginning of each * line, in the same way we would output to cout or cerr. The prefix is * specified in the constructor (as well as the destination ostream). A newline * must be passed to the stream, and then the prefix will be prepended to the * next line. For example, * * @code * PrefixedOutStream outstr(std::cout, "[TEST] "); * outstr << "Hello world I like " << 7.5; * outstr << "...Continue" << std::endl; * outstr << "After the CR\n" << std::endl; * @endcode * * would give, on std::cout, * * @code * [TEST] Hello world I like 7.5...Continue * [TEST] After the CR * [TEST] * @endcode * * These objects are used for the mlpack::Log levels (DEBUG, INFO, WARN, and * FATAL). */ class PrefixedOutStream { public: /** * Set up the PrefixedOutStream. * * @param destination ostream which receives output from this object. * @param prefix The prefix to prepend to each line. * @param ignoreInput If true, the stream will not be printed. * @param fatal If true, a std::runtime_error exception is thrown after * printing a newline. */ PrefixedOutStream(std::ostream& destination, const char* prefix, bool ignoreInput = false, bool fatal = false) : destination(destination), ignoreInput(ignoreInput), prefix(prefix), // We want the first call to operator<< to prefix the prefix so we set // carriageReturned to true. carriageReturned(true), fatal(fatal) { /* nothing to do */ } //! Write a bool to the stream. PrefixedOutStream& operator<<(bool val); //! Write a short to the stream. PrefixedOutStream& operator<<(short val); //! Write an unsigned short to the stream. PrefixedOutStream& operator<<(unsigned short val); //! Write an int to the stream. PrefixedOutStream& operator<<(int val); //! Write an unsigned int to the stream. PrefixedOutStream& operator<<(unsigned int val); //! Write a long to the stream. PrefixedOutStream& operator<<(long val); //! Write an unsigned long to the stream. PrefixedOutStream& operator<<(unsigned long val); //! Write a float to the stream. PrefixedOutStream& operator<<(float val); //! Write a double to the stream. PrefixedOutStream& operator<<(double val); //! Write a long double to the stream. PrefixedOutStream& operator<<(long double val); //! Write a void pointer to the stream. PrefixedOutStream& operator<<(void* val); //! Write a character array to the stream. PrefixedOutStream& operator<<(const char* str); //! Write a string to the stream. PrefixedOutStream& operator<<(std::string& str); //! Write a streambuf to the stream. PrefixedOutStream& operator<<(std::streambuf* sb); //! Write an ostream manipulator function to the stream. PrefixedOutStream& operator<<(std::ostream& (*pf)(std::ostream&)); //! Write an ios manipulator function to the stream. PrefixedOutStream& operator<<(std::ios& (*pf)(std::ios&)); //! Write an ios_base manipulator function to the stream. PrefixedOutStream& operator<<(std::ios_base& (*pf)(std::ios_base&)); //! Write anything else to the stream. template PrefixedOutStream& operator<<(const T& s); //! The output stream that all data is to be sent too; example: std::cout. std::ostream& destination; //! Discards input, prints nothing if true. bool ignoreInput; private: /** * Conducts the base logic required in all the operator << overloads. Mostly * just a good idea to reduce copy-pasta. * * This overload is for non-Armadillo objects, which need special handling * during printing. * * @tparam T The type of the data to output. * @param val The The data to be output. */ template typename std::enable_if::value>::type BaseLogic(const T& val); /** * Conducts the base logic required in all the operator << overloads. Mostly * just a good idea to reduce copy-pasta. * * This overload is for Armadillo objects, which need special handling during * printing. * * @tparam T The type of the data to output. * @param val The The data to be output. */ template typename std::enable_if::value>::type BaseLogic(const T& val); /** * Output the prefix, but only if we need to and if we are allowed to. */ inline void PrefixIfNeeded(); //! Contains the prefix we must prepend to each line. std::string prefix; //! If true, the previous call to operator<< encountered a CR, and a prefix //! will be necessary. bool carriageReturned; //! If true, a std::runtime_error exception will be thrown when a CR is //! encountered. bool fatal; }; } // namespace util } // namespace mlpack // Template definitions. #include "prefixedoutstream_impl.hpp" #endif mlpack-2.2.5/src/mlpack/core/util/prefixedoutstream_impl.hpp000066400000000000000000000161741315013601400242410ustar00rootroot00000000000000/** * @file prefixedoutstream.hpp * @author Ryan Curtin * @author Matthew Amidon * * Implementation of templated PrefixedOutStream member functions. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_UTIL_PREFIXEDOUTSTREAM_IMPL_HPP #define MLPACK_CORE_UTIL_PREFIXEDOUTSTREAM_IMPL_HPP // Just in case it hasn't been included. #include "prefixedoutstream.hpp" #ifdef HAS_BFD_DL #include "backtrace.hpp" #endif #include namespace mlpack { namespace util { template PrefixedOutStream& PrefixedOutStream::operator<<(const T& s) { BaseLogic(s); return *this; } // For non-Armadillo types. template typename std::enable_if::value>::type PrefixedOutStream::BaseLogic(const T& val) { // We will use this to track whether or not we need to terminate at the end of // this call (only for streams which terminate after a newline). bool newlined = false; std::string line; // If we need to, output the prefix. PrefixIfNeeded(); std::ostringstream convert; // Sync flags and precision with destination stream convert.setf(destination.flags()); convert.precision(destination.precision()); convert << val; if (convert.fail()) { PrefixIfNeeded(); if (!ignoreInput) { destination << "Failed lexical_cast(T) for output; output" " not shown." << std::endl; newlined = true; } } else { line = convert.str(); // If the length of the casted thing was 0, it may have been a stream // manipulator, so send it directly to the stream and don't ask questions. if (line.length() == 0) { // The prefix cannot be necessary at this point. if (!ignoreInput) // Only if the user wants it. destination << val; return; } // Now, we need to check for newlines in the output and print it. size_t nl; size_t pos = 0; while ((nl = line.find('\n', pos)) != std::string::npos) { PrefixIfNeeded(); // Only output if the user wants it. if (!ignoreInput) { destination << line.substr(pos, nl - pos); destination << std::endl; newlined = true; } carriageReturned = true; // Regardless of whether or not we display it. pos = nl + 1; } if (pos != line.length()) // We need to display the rest. { PrefixIfNeeded(); if (!ignoreInput) destination << line.substr(pos); } } // If we displayed a newline and we need to throw afterwards, do that. if (fatal && newlined) { if (!ignoreInput) destination << std::endl; // Print a backtrace, if we can. #ifdef HAS_BFD_DL if (fatal) { size_t nl; size_t pos = 0; Backtrace bt; std::string btLine = bt.ToString(); while ((nl = btLine.find('\n', pos)) != std::string::npos) { PrefixIfNeeded(); destination << btLine.substr(pos, nl - pos); destination << std::endl; carriageReturned = true; // Regardless of whether or not we display it. pos = nl + 1; } } #endif throw std::runtime_error("fatal error; see Log::Fatal output"); } } // For Armadillo types. template typename std::enable_if::value>::type PrefixedOutStream::BaseLogic(const T& val) { // Extract printable object from the input. const arma::Mat& printVal(val); // We will use this to track whether or not we need to terminate at the end of // this call (only for streams which terminate after a newline). bool newlined = false; std::string line; // If we need to, output the prefix. PrefixIfNeeded(); std::ostringstream convert; // Check if the stream is in the default state. if (destination.flags() == convert.flags() && destination.precision() == convert.precision()) { printVal.print(convert); } else { // Sync flags and precision with destination stream convert.setf(destination.flags()); convert.precision(destination.precision()); // Set width of the convert stream. const arma::Mat& absVal(arma::abs(printVal)); double maxVal = absVal.max(); if (maxVal == 0.0) maxVal = 1; int maxLog = log10(maxVal); maxLog = (maxLog > 0) ? floor(maxLog) + 1 : 1; const int padding = 4; convert.width(convert.precision() + maxLog + padding); printVal.raw_print(convert); } if (convert.fail()) { PrefixIfNeeded(); if (!ignoreInput) { destination << "Failed type conversion to string for output; output not " "shown." << std::endl; newlined = true; } } else { line = convert.str(); // If the length of the casted thing was 0, it may have been a stream // manipulator, so send it directly to the stream and don't ask questions. if (line.length() == 0) { // The prefix cannot be necessary at this point. if (!ignoreInput) // Only if the user wants it. destination << val; return; } // Now, we need to check for newlines in the output and print it. size_t nl; size_t pos = 0; while ((nl = line.find('\n', pos)) != std::string::npos) { PrefixIfNeeded(); // Only output if the user wants it. if (!ignoreInput) { destination << line.substr(pos, nl - pos); destination << std::endl; } newlined = true; // Ensure this is set for the fatal exception if needed. carriageReturned = true; // Regardless of whether or not we display it. pos = nl + 1; } if (pos != line.length()) // We need to display the rest. { PrefixIfNeeded(); if (!ignoreInput) destination << line.substr(pos); } } // If we displayed a newline and we need to throw afterwards, do that. if (fatal && newlined) { if (!ignoreInput) destination << std::endl; // Print a backtrace, if we can. #ifdef HAS_BFD_DL if (fatal && !ignoreInput) { size_t nl; size_t pos = 0; Backtrace bt; std::string btLine = bt.ToString(); while ((nl = btLine.find('\n', pos)) != std::string::npos) { PrefixIfNeeded(); if (!ignoreInput) { destination << btLine.substr(pos, nl - pos); destination << std::endl; } carriageReturned = true; // Regardless of whether or not we display it. pos = nl + 1; } } #endif throw std::runtime_error("fatal error; see Log::Fatal output"); } } // This is an inline function (that is why it is here and not in .cc). void PrefixedOutStream::PrefixIfNeeded() { // If we need to, output a prefix. if (carriageReturned) { if (!ignoreInput) // But only if we are allowed to. destination << prefix; carriageReturned = false; // Denote that the prefix has been displayed. } } } // namespace util } // namespace mlpack #endif // MLPACK_CORE_UTIL_PREFIXEDOUTSTREAM_IMPL_HPP mlpack-2.2.5/src/mlpack/core/util/sfinae_utility.hpp000066400000000000000000000043751315013601400224760ustar00rootroot00000000000000/** * @file sfinae_utility.hpp * @author Trironk Kiatkungwanglai * * This file contains macro utilities for the SFINAE Paradigm. These utilities * determine if classes passed in as template parameters contain members at * compile time, which is useful for changing functionality depending on what * operations an object is capable of performing. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_SFINAE_UTILITY #define MLPACK_CORE_SFINAE_UTILITY #include #include /* * Constructs a template supporting the SFINAE pattern. * * This macro generates a template struct that is useful for enabling/disabling * a method if the template class passed in contains a member function matching * a given signature with a specified name. * * The generated struct should be used in conjunction with boost::disable_if and * boost::enable_if. Here is an example usage: * * For general references, see: * http://stackoverflow.com/a/264088/391618 * * For an mlpack specific use case, see /mlpack/core/util/prefixedoutstream.hpp * and /mlpack/core/util/prefixedoutstream_impl.hpp * * @param NAME the name of the struct to construct. For example: HasToString * @param FUNC the name of the function to check for. For example: ToString */ #define HAS_MEM_FUNC(FUNC, NAME) \ template \ struct NAME { \ typedef char yes[1]; \ typedef char no [2]; \ template struct type_check; \ template static yes &chk(type_check *); \ template static no &chk(...); \ static bool const value = sizeof(chk(0)) == sizeof(yes); \ }; #endif mlpack-2.2.5/src/mlpack/core/util/singletons.cpp000066400000000000000000000031441315013601400216170ustar00rootroot00000000000000/** * @file singletons.cpp * @author Ryan Curtin * * Declaration of singletons in libmlpack.so. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include "cli.hpp" #include "log.hpp" #include "singletons.hpp" using namespace mlpack; using namespace mlpack::util; // Color code escape sequences -- but not on Windows. #ifndef _WIN32 #define BASH_RED "\033[0;31m" #define BASH_GREEN "\033[0;32m" #define BASH_YELLOW "\033[0;33m" #define BASH_CYAN "\033[0;36m" #define BASH_CLEAR "\033[0m" #else #define BASH_RED "" #define BASH_GREEN "" #define BASH_YELLOW "" #define BASH_CYAN "" #define BASH_CLEAR "" #endif CLI* CLI::singleton = NULL; // Only output debugging output if in debug mode. #ifdef DEBUG PrefixedOutStream Log::Debug = PrefixedOutStream(std::cout, BASH_CYAN "[DEBUG] " BASH_CLEAR); #else NullOutStream Log::Debug = NullOutStream(); #endif PrefixedOutStream Log::Info = PrefixedOutStream(std::cout, BASH_GREEN "[INFO ] " BASH_CLEAR, true /* unless --verbose */, false); PrefixedOutStream Log::Warn = PrefixedOutStream(std::cout, BASH_YELLOW "[WARN ] " BASH_CLEAR, false, false); PrefixedOutStream Log::Fatal = PrefixedOutStream(std::cerr, BASH_RED "[FATAL] " BASH_CLEAR, false, true /* fatal */); /** * This has to be last, so that the CLI object is destroyed before the Log * output objects are destroyed. */ CLIDeleter cliDeleter; mlpack-2.2.5/src/mlpack/core/util/singletons.hpp000066400000000000000000000012431315013601400216220ustar00rootroot00000000000000/** * @file singletons.hpp * @author Ryan Curtin * * Definitions of singletons used by libmlpack.so. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_UTIL_SINGLETONS_HPP #define MLPACK_CORE_UTIL_SINGLETONS_HPP #include #include "cli_deleter.hpp" namespace mlpack { namespace util { extern MLPACK_EXPORT CLIDeleter cliDeleter; } // namespace util } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/core/util/timers.cpp000066400000000000000000000100621315013601400207320ustar00rootroot00000000000000/** * @file timers.cpp * @author Matthew Amidon * @author Marcus Edel * * Implementation of timers. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include "timers.hpp" #include "cli.hpp" #include "log.hpp" #include #include using namespace mlpack; using namespace std::chrono; /** * Start the given timer. */ void Timer::Start(const std::string& name) { CLI::GetSingleton().timer.StartTimer(name); } /** * Stop the given timer. */ void Timer::Stop(const std::string& name) { CLI::GetSingleton().timer.StopTimer(name); } /** * Get the given timer. */ microseconds Timer::Get(const std::string& name) { return CLI::GetSingleton().timer.GetTimer(name); } std::map& Timers::GetAllTimers() { return timers; } microseconds Timers::GetTimer(const std::string& timerName) { return timers[timerName]; } bool Timers::GetState(std::string timerName) { return timerState[timerName]; } void Timers::PrintTimer(const std::string& timerName) { microseconds totalDuration = timers[timerName]; // Convert microseconds to seconds. seconds totalDurationSec = duration_cast(totalDuration); microseconds totalDurationMicroSec = duration_cast(totalDuration % seconds(1)); Log::Info << totalDurationSec.count() << "." << std::setw(6) << std::setfill('0') << totalDurationMicroSec.count() << "s"; // Also output convenient day/hr/min/sec. // The following line is a custom duration for a day. typedef duration> days; days d = duration_cast(totalDuration); hours h = duration_cast(totalDuration % days(1)); minutes m = duration_cast(totalDuration % hours(1)); seconds s = duration_cast(totalDuration % minutes(1)); // No output if it didn't even take a minute. if (!(d.count() == 0 && h.count() == 0 && m.count() == 0)) { bool output = false; // Denotes if we have output anything yet. Log::Info << " ("; // Only output units if they have nonzero values (yes, a bit tedious). if (d.count() > 0) { Log::Info << d.count() << " days"; output = true; } if (h.count() > 0) { if (output) Log::Info << ", "; Log::Info << h.count() << " hrs"; output = true; } if (m.count() > 0) { if (output) Log::Info << ", "; Log::Info << m.count() << " mins"; output = true; } if (s.count() > 0) { if (output) Log::Info << ", "; Log::Info << s.count() << "." << std::setw(1) << (totalDurationMicroSec.count() / 100000) << " secs"; output = true; } Log::Info << ")"; } Log::Info << std::endl; } high_resolution_clock::time_point Timers::GetTime() { return high_resolution_clock::now(); } void Timers::StartTimer(const std::string& timerName) { if ((timerState[timerName] == 1) && (timerName != "total_time")) { std::ostringstream error; error << "Timer::Start(): timer '" << timerName << "' has already been started"; throw std::runtime_error(error.str()); } timerState[timerName] = true; high_resolution_clock::time_point currTime = GetTime(); // If the timer is added first time if (timers.count(timerName) == 0) { timers[timerName] = (microseconds) 0; } timerStartTime[timerName] = currTime; } void Timers::StopTimer(const std::string& timerName) { if ((timerState[timerName] == 0) && (timerName != "total_time")) { std::ostringstream error; error << "Timer::Stop(): timer '" << timerName << "' has already been stopped"; throw std::runtime_error(error.str()); } timerState[timerName] = false; high_resolution_clock::time_point currTime = GetTime(); // Calculate the delta time. timers[timerName] += duration_cast(currTime - timerStartTime[timerName]); } mlpack-2.2.5/src/mlpack/core/util/timers.hpp000066400000000000000000000075601315013601400207500ustar00rootroot00000000000000/** * @file timers.hpp * @author Matthew Amidon * @author Marcus Edel * * Timers for MLPACK. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_UTILITIES_TIMERS_HPP #define MLPACK_CORE_UTILITIES_TIMERS_HPP #include #include #include // chrono library for cross platform timer calculation #if defined(_WIN32) // uint64_t isn't defined on every windows. #if !defined(HAVE_UINT64_T) #if SIZEOF_UNSIGNED_LONG == 8 typedef unsigned long uint64_t; #else typedef unsigned long long uint64_t; #endif // SIZEOF_UNSIGNED_LONG #endif // HAVE_UINT64_T #endif namespace mlpack { /** * The timer class provides a way for mlpack methods to be timed. The three * methods contained in this class allow a named timer to be started and * stopped, and its value to be obtained. */ class Timer { public: /** * Start the given timer. If a timer is started, then stopped, then * re-started, then re-stopped, the final value of the timer is the length of * both runs -- that is, mlpack timers are additive for each time they are * run, and do not reset. * * @note A std::runtime_error exception will be thrown if a timer is started * twice. * * @param name Name of timer to be started. */ static void Start(const std::string& name); /** * Stop the given timer. * * @note A std::runtime_error exception will be thrown if a timer is started * twice. * * @param name Name of timer to be stopped. */ static void Stop(const std::string& name); /** * Get the value of the given timer. * * @param name Name of timer to return value of. */ static std::chrono::microseconds Get(const std::string& name); }; class Timers { public: //! Nothing to do for the constructor. Timers() { } /** * Returns a copy of all the timers used via this interface. */ std::map& GetAllTimers(); /** * Returns a copy of the timer specified. * * @param timerName The name of the timer in question. */ std::chrono::microseconds GetTimer(const std::string& timerName); /** * Prints the specified timer. If it took longer than a minute to complete * the timer will be displayed in days, hours, and minutes as well. * * @param timerName The name of the timer in question. */ void PrintTimer(const std::string& timerName); /**  * Initializes a timer, available like a normal value specified on  * the command line.  Timers are of type timeval. If a timer is started, then * stopped, then re-started, then stopped, the final timer value will be the * length of both runs of the timer.  *  * @param timerName The name of the timer in question.  */ void StartTimer(const std::string& timerName); /**  * Halts the timer, and replaces it's value with  * the delta time from it's start   *   * @param timerName The name of the timer in question.  */ void StopTimer(const std::string& timerName); /** * Returns state of the given timer. * * @param timerName The name of the timer in question. */ bool GetState(std::string timerName); private: //! A map of all the timers that are being tracked. std::map timers; //! A map that contains whether or not each timer is currently running. std::map timerState; //! A map for the starting values of the timers. std::map timerStartTime; std::chrono::high_resolution_clock::time_point GetTime(); }; } // namespace mlpack #endif // MLPACK_CORE_UTILITIES_TIMERS_HPP mlpack-2.2.5/src/mlpack/core/util/version.cpp000066400000000000000000000016001315013601400211120ustar00rootroot00000000000000/** * @file version.cpp * @author Ryan Curtin * * The implementation of GetVersion(). * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include "version.hpp" #include // If we are not a git revision, just use the macros to assemble the version // name. std::string mlpack::util::GetVersion() { #ifndef MLPACK_GIT_VERSION std::stringstream o; o << "mlpack " << MLPACK_VERSION_MAJOR << "." << MLPACK_VERSION_MINOR << "." << MLPACK_VERSION_PATCH; return o.str(); #else // This file is generated by CMake as necessary and contains just a return // statement with the git revision in it. #include "gitversion.hpp" #endif } mlpack-2.2.5/src/mlpack/core/util/version.hpp000066400000000000000000000023651315013601400211300ustar00rootroot00000000000000/** * @file version.hpp * @author Ryan Curtin * * The current version of mlpack, available as macros and as a string. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_CORE_UTIL_VERSION_HPP #define MLPACK_CORE_UTIL_VERSION_HPP #include // The version of mlpack. If this is a git repository, this will be a version // with higher number than the most recent release. #define MLPACK_VERSION_MAJOR 2 #define MLPACK_VERSION_MINOR 2 #define MLPACK_VERSION_PATCH 5 // Reverse compatibility; these macros will be removed in future versions of // mlpack (3.0.0 and newer)! #define __MLPACK_VERSION_MAJOR 2 #define __MLPACK_VERSION_MINOR 2 #define __MLPACK_VERSION_PATCH 5 // The name of the version (for use by --version). namespace mlpack { namespace util { /** * This will return either "mlpack x.y.z" or "mlpack master-XXXXXXX" depending on * whether or not this is a stable version of mlpack or a git repository. */ std::string GetVersion(); } // namespace util } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/methods/000077500000000000000000000000001315013601400164625ustar00rootroot00000000000000mlpack-2.2.5/src/mlpack/methods/CMakeLists.txt000066400000000000000000000022021315013601400212160ustar00rootroot00000000000000# This macro adds a command-line executable with the given name. It assumes # that the file with main() is in _main.cpp, and produces an output # program with the name mlpack_. macro (add_cli_executable name) if (BUILD_CLI_EXECUTABLES) add_executable(mlpack_${name} ${name}_main.cpp ) target_link_libraries(mlpack_${name} mlpack ${COMPILER_SUPPORT_LIBRARIES} ) install(TARGETS mlpack_${name} RUNTIME DESTINATION bin) endif () endmacro () # Recurse into each method mlpack provides. set(DIRS preprocess adaboost approx_kfn amf cf dbscan decision_stump decision_tree det emst fastmks gmm hmm hoeffding_trees kernel_pca kmeans mean_shift lars linear_regression local_coordinate_coding logistic_regression lsh matrix_completion naive_bayes nca neighbor_search nmf # lmf pca perceptron quic_svd radical randomized_svd range_search rann regularized_svd softmax_regression sparse_autoencoder sparse_coding nystroem_method ) foreach(dir ${DIRS}) add_subdirectory(${dir}) endforeach() set(MLPACK_SRCS ${MLPACK_SRCS} ${DIR_SRCS} PARENT_SCOPE) mlpack-2.2.5/src/mlpack/methods/adaboost/000077500000000000000000000000001315013601400202565ustar00rootroot00000000000000mlpack-2.2.5/src/mlpack/methods/adaboost/CMakeLists.txt000066400000000000000000000010141315013601400230120ustar00rootroot00000000000000cmake_minimum_required(VERSION 2.8) # Define the files we need to compile. # Anything not in this list will not be compiled into mlpack. set(SOURCES adaboost.hpp adaboost_impl.hpp ) # Add directory name to sources. set(DIR_SRCS) foreach(file ${SOURCES}) set(DIR_SRCS ${DIR_SRCS} ${CMAKE_CURRENT_SOURCE_DIR}/${file}) endforeach() # Append sources (with directory name) to list of all mlpack sources (used at # the parent scope). set(MLPACK_SRCS ${MLPACK_SRCS} ${DIR_SRCS} PARENT_SCOPE) add_cli_executable (adaboost) mlpack-2.2.5/src/mlpack/methods/adaboost/adaboost.hpp000066400000000000000000000142001315013601400225600ustar00rootroot00000000000000/** * @file adaboost.hpp * @author Udit Saxena * * The AdaBoost class. AdaBoost is a boosting algorithm, meaning that it * combines an ensemble of weak learners to produce a strong learner. For more * information on AdaBoost, see the following paper: * * @code * @article{schapire1999improved, * author = {Schapire, Robert E. and Singer, Yoram}, * title = {Improved Boosting Algorithms Using Confidence-rated Predictions}, * journal = {Machine Learning}, * volume = {37}, * number = {3}, * month = dec, * year = {1999}, * issn = {0885-6125}, * pages = {297--336}, * } * @endcode * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_ADABOOST_ADABOOST_HPP #define MLPACK_METHODS_ADABOOST_ADABOOST_HPP #include #include #include namespace mlpack { namespace adaboost { /** * The AdaBoost class. AdaBoost is a boosting algorithm, meaning that it * combines an ensemble of weak learners to produce a strong learner. For more * information on AdaBoost, see the following paper: * * @code * @article{schapire1999improved, * author = {Schapire, Robert E. and Singer, Yoram}, * title = {Improved Boosting Algorithms Using Confidence-rated Predictions}, * journal = {Machine Learning}, * volume = {37}, * number = {3}, * month = dec, * year = {1999}, * issn = {0885-6125}, * pages = {297--336}, * } * @endcode * * This class is general, and can be used with any type of weak learner, so long * as the learner implements the following functions: * * @code * // A boosting constructor, which learns using the training parameters of the * // given other WeakLearner, but uses the given instance weights for training. * WeakLearner(WeakLearner& other, * const MatType& data, * const arma::Row& labels, * const arma::rowvec& weights); * * // Given the test points, classify them and output predictions into * // predictedLabels. * void Classify(const MatType& data, arma::Row& predictedLabels); * @endcode * * For more information on and examples of weak learners, see * perceptron::Perceptron<> and decision_stump::DecisionStump<>. * * @tparam MatType Data matrix type (i.e. arma::mat or arma::sp_mat). * @tparam WeakLearnerType Type of weak learner to use. */ template, typename MatType = arma::mat> class AdaBoost { public: /** * Constructor. This runs the AdaBoost.MH algorithm to provide a trained * boosting model. This constructor takes an already-initialized weak * learner; all other weak learners will learn with the same parameters as the * given weak learner. * * @param data Input data. * @param labels Corresponding labels. * @param iterations Number of boosting rounds. * @param tol The tolerance for change in values of rt. * @param other Weak learner that has already been initialized. */ AdaBoost(const MatType& data, const arma::Row& labels, const WeakLearnerType& other, const size_t iterations = 100, const double tolerance = 1e-6); /** * Create the AdaBoost object without training. Be sure to call Train() * before calling Classify()! */ AdaBoost(const double tolerance = 1e-6); // Return the value of ztProduct. double ZtProduct() { return ztProduct; } //! Get the tolerance for stopping the optimization during training. double Tolerance() const { return tolerance; } //! Modify the tolerance for stopping the optimization during training. double& Tolerance() { return tolerance; } //! Get the number of classes this model is trained on. size_t Classes() const { return classes; } //! Get the number of weak learners in the model. size_t WeakLearners() const { return alpha.size(); } //! Get the weights for the given weak learner. double Alpha(const size_t i) const { return alpha[i]; } //! Modify the weight for the given weak learner (be careful!). double& Alpha(const size_t i) { return alpha[i]; } //! Get the given weak learner. const WeakLearnerType& WeakLearner(const size_t i) const { return wl[i]; } //! Modify the given weak learner (be careful!). WeakLearnerType& WeakLearner(const size_t i) { return wl[i]; } /** * Train AdaBoost on the given dataset. This method takes an initialized * WeakLearnerType; the parameters for this weak learner will be used to train * each of the weak learners during AdaBoost training. Note that this will * completely overwrite any model that has already been trained with this * object. * * @param data Dataset to train on. * @param labels Labels for each point in the dataset. * @param learner Learner to use for training. */ void Train(const MatType& data, const arma::Row& labels, const WeakLearnerType& learner, const size_t iterations = 100, const double tolerance = 1e-6); /** * Classify the given test points. * * @param test Testing data. * @param predictedLabels Vector in which to the predicted labels of the test * set will be stored. */ void Classify(const MatType& test, arma::Row& predictedLabels); /** * Serialize the AdaBoost model. */ template void Serialize(Archive& ar, const unsigned int /* version */); private: //! The number of classes in the model. size_t classes; // The tolerance for change in rt and when to stop. double tolerance; //! The vector of weak learners. std::vector wl; //! The weights corresponding to each weak learner. std::vector alpha; //! To check for the bound for the Hamming loss. double ztProduct; }; // class AdaBoost } // namespace adaboost } // namespace mlpack #include "adaboost_impl.hpp" #endif mlpack-2.2.5/src/mlpack/methods/adaboost/adaboost_impl.hpp000066400000000000000000000156031315013601400236110ustar00rootroot00000000000000/* * @file adaboost_impl.hpp * @author Udit Saxena * * Implementation of the AdaBoost class. * * @code * @article{schapire1999improved, * author = {Schapire, Robert E. and Singer, Yoram}, * title = {Improved Boosting Algorithms Using Confidence-rated Predictions}, * journal = {Machine Learning}, * volume = {37}, * number = {3}, * month = dec, * year = {1999}, * issn = {0885-6125}, * pages = {297--336}, * } * @endcode * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_ADABOOST_ADABOOST_IMPL_HPP #define MLPACK_METHODS_ADABOOST_ADABOOST_IMPL_HPP #include "adaboost.hpp" namespace mlpack { namespace adaboost { /** * Constructor. Currently runs the AdaBoost.MH algorithm. * * @param data Input data * @param labels Corresponding labels * @param iterations Number of boosting rounds * @param tol Tolerance for termination of Adaboost.MH. * @param other Weak Learner, which has been initialized already. */ template AdaBoost::AdaBoost( const MatType& data, const arma::Row& labels, const WeakLearnerType& other, const size_t iterations, const double tol) { Train(data, labels, other, iterations, tol); } // Empty constructor. template AdaBoost::AdaBoost(const double tolerance) : tolerance(tolerance) { // Nothing to do. } // Train AdaBoost. template void AdaBoost::Train( const MatType& data, const arma::Row& labels, const WeakLearnerType& other, const size_t iterations, const double tolerance) { // Clear information from previous runs. wl.clear(); alpha.clear(); // Count the number of classes. classes = (arma::max(labels) - arma::min(labels)) + 1; this->tolerance = tolerance; // crt is the cumulative rt value for terminating the optimization when rt is // changing by less than the tolerance. double rt, crt = 0.0, alphat = 0.0, zt; ztProduct = 1.0; // To be used for prediction by the weak learner. arma::Row predictedLabels(labels.n_cols); // Use tempData to modify input data for incorporating weights. MatType tempData(data); // This matrix is a helper matrix used to calculate the final hypothesis. arma::mat sumFinalH = arma::zeros(classes, predictedLabels.n_cols); // Load the initial weights into a 2-D matrix. const double initWeight = 1.0 / double(data.n_cols * classes); arma::mat D(classes, data.n_cols); D.fill(initWeight); // Weights are stored in this row vector. arma::rowvec weights(predictedLabels.n_cols); // This is the final hypothesis. arma::Row finalH(predictedLabels.n_cols); // Now, start the boosting rounds. for (size_t i = 0; i < iterations; i++) { // Initialized to zero in every round. rt is used for calculation of // alphat; it is the weighted error. // rt = (sum) D(i) y(i) ht(xi) rt = 0.0; // zt is used for weight normalization. zt = 0.0; // Build the weight vectors. weights = arma::sum(D); // Use the existing weak learner to train a new one with new weights. WeakLearnerType w(other, tempData, labels, weights); w.Classify(tempData, predictedLabels); // Now from predictedLabels, build ht, the weak hypothesis // buildClassificationMatrix(ht, predictedLabels); // Now, calculate alpha(t) using ht. for (size_t j = 0; j < D.n_cols; j++) // instead of D, ht { if (predictedLabels(j) == labels(j)) rt += arma::accu(D.col(j)); else rt -= arma::accu(D.col(j)); } if ((i > 0) && (std::abs(rt - crt) < tolerance)) break; crt = rt; // Our goal is to find alphat which mizimizes or approximately minimizes the // value of Z as a function of alpha. alphat = 0.5 * log((1 + rt) / (1 - rt)); alpha.push_back(alphat); wl.push_back(w); // Now start modifying the weights. for (size_t j = 0; j < D.n_cols; j++) { const double expo = exp(alphat); if (predictedLabels(j) == labels(j)) { for (size_t k = 0; k < D.n_rows; k++) { // We calculate zt, the normalization constant. D(k, j) /= expo; zt += D(k, j); // * exp(-1 * alphat * yt(j,k) * ht(j,k)); // Add to the final hypothesis matrix. // sumFinalH(k, j) += (alphat * ht(k, j)); if (k == labels(j)) sumFinalH(k, j) += (alphat); // * ht(k, j)); else sumFinalH(k, j) -= (alphat); } } else { for (size_t k = 0; k < D.n_rows; k++) { // We calculate zt, the normalization constant. D(k, j) *= expo; zt += D(k, j); // Add to the final hypothesis matrix. if (k == labels(j)) sumFinalH(k, j) += alphat; // * ht(k, j)); else sumFinalH(k, j) -= alphat; } } } // Normalize D. D /= zt; // Accumulate the value of zt for the Hamming loss bound. ztProduct *= zt; } } /** * Classify the given test points. */ template void AdaBoost::Classify( const MatType& test, arma::Row& predictedLabels) { arma::Row tempPredictedLabels(test.n_cols); arma::mat cMatrix(classes, test.n_cols); cMatrix.zeros(); predictedLabels.set_size(test.n_cols); for (size_t i = 0; i < wl.size(); i++) { wl[i].Classify(test, tempPredictedLabels); for (size_t j = 0; j < tempPredictedLabels.n_cols; j++) cMatrix(tempPredictedLabels(j), j) += alpha[i]; } arma::colvec cMRow; arma::uword maxIndex = 0; for (size_t i = 0; i < predictedLabels.n_cols; i++) { cMRow = cMatrix.unsafe_col(i); cMRow.max(maxIndex); predictedLabels(i) = maxIndex; } } /** * Serialize the AdaBoost model. */ template template void AdaBoost::Serialize(Archive& ar, const unsigned int /* version */) { ar & data::CreateNVP(classes, "classes"); ar & data::CreateNVP(tolerance, "tolerance"); ar & data::CreateNVP(ztProduct, "ztProduct"); ar & data::CreateNVP(alpha, "alpha"); // Now serialize each weak learner. if (Archive::is_loading::value) { wl.clear(); wl.resize(alpha.size()); } for (size_t i = 0; i < wl.size(); ++i) { std::ostringstream oss; oss << "weakLearner" << i; ar & data::CreateNVP(wl[i], oss.str()); } } } // namespace adaboost } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/methods/adaboost/adaboost_main.cpp000066400000000000000000000314051315013601400235650ustar00rootroot00000000000000/* * @file: adaboost_main.cpp * @author: Udit Saxena * * Implementation of the AdaBoost main program. * * @code * @article{Schapire:1999:IBA:337859.337870, * author = {Schapire, Robert E. and Singer, Yoram}, * title = {Improved Boosting Algorithms Using Confidence-rated Predictions}, * journal = {Machine Learning}, * issue_date = {Dec. 1999}, * volume = {37}, * number = {3}, * month = dec, * year = {1999}, * issn = {0885-6125}, * pages = {297--336}, * numpages = {40}, * url = {http://dx.doi.org/10.1023/A:1007614523901}, * doi = {10.1023/A:1007614523901}, * acmid = {337870}, * publisher = {Kluwer Academic Publishers}, * address = {Hingham, MA, USA}, * keywords = {boosting algorithms, decision trees, multiclass classification, * output coding} * @endcode * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include #include #include #include "adaboost.hpp" #include #include using namespace mlpack; using namespace std; using namespace arma; using namespace mlpack::adaboost; using namespace mlpack::decision_stump; using namespace mlpack::perceptron; PROGRAM_INFO("AdaBoost", "This program implements the AdaBoost (or Adaptive " "Boosting) algorithm. The variant of AdaBoost implemented here is " "AdaBoost.MH. It uses a weak learner, either decision stumps or " "perceptrons, and over many iterations, creates a strong learner that is a " "weighted ensemble of weak learners. It runs these iterations until a " "tolerance value is crossed for change in the value of the weighted " "training error." "\n\n" "For more information about the algorithm, see the paper \"Improved " "Boosting Algorithms Using Confidence-Rated Predictions\", by R.E. Schapire" " and Y. Singer." "\n\n" "This program allows training of an AdaBoost model, and then application of" " that model to a test dataset. To train a model, a dataset must be passed" " with the --training_file (-t) option. Labels can be given with the " "--labels_file (-l) option; if no labels file is specified, the labels will" " be assumed to be the last column of the input dataset. Alternately, an " "AdaBoost model may be loaded with the --input_model_file (-m) option." "\n\n" "Once a model is trained or loaded, it may be used to provide class " "predictions for a given test dataset. A test dataset may be specified " "with the --test_file (-T) parameter. The predicted classes for each point" " in the test dataset will be saved into the file specified by the " "--output_file (-o) parameter. The AdaBoost model itself may be saved to " "a file specified by the --output_model_file (-M) parameter."); // Input for training. PARAM_STRING_IN("training_file", "A file containing the training set.", "t", ""); PARAM_STRING_IN("labels_file", "A file containing labels for the training set.", "l", ""); // Loading/saving of a model. PARAM_STRING_IN("input_model_file", "File containing input AdaBoost model.", "m", ""); PARAM_STRING_OUT("output_model_file", "File to save trained AdaBoost model to.", "M"); // Classification options. PARAM_STRING_IN("test_file", "A file containing the test set.", "T", ""); PARAM_STRING_OUT("output_file", "The file in which the predicted labels for the" " test set will be written.", "o"); // Training options. PARAM_INT_IN("iterations", "The maximum number of boosting iterations to be run" " (0 will run until convergence.)", "i", 1000); PARAM_DOUBLE_IN("tolerance", "The tolerance for change in values of the " "weighted error during training.", "e", 1e-10); PARAM_STRING_IN("weak_learner", "The type of weak learner to use: " "'decision_stump', or 'perceptron'.", "w", "decision_stump"); /** * The model to save to disk. */ class AdaBoostModel { public: enum WeakLearnerTypes { DECISION_STUMP, PERCEPTRON }; private: //! The mappings for the labels. Col mappings; //! The type of weak learner. size_t weakLearnerType; //! Non-NULL if using decision stumps. AdaBoost>* dsBoost; //! Non-NULL if using perceptrons. AdaBoost>* pBoost; //! Number of dimensions in training data. size_t dimensionality; public: //! Create an empty AdaBoost model. AdaBoostModel() : dsBoost(NULL), pBoost(NULL), dimensionality(0) { } //! Create the AdaBoost model with the given mappings and type. AdaBoostModel(const Col& mappings, const size_t weakLearnerType) : mappings(mappings), weakLearnerType(weakLearnerType), dsBoost(NULL), pBoost(NULL), dimensionality(0) { // Nothing to do. } ~AdaBoostModel() { if (dsBoost) delete dsBoost; if (pBoost) delete pBoost; } //! Get the mappings. const Col& Mappings() const { return mappings; } //! Modify the mappings. Col& Mappings() { return mappings; } //! Get the weak learner type. size_t WeakLearnerType() const { return weakLearnerType; } //! Modify the weak learner type. size_t& WeakLearnerType() { return weakLearnerType; } //! Get the dimensionality of the model. size_t Dimensionality() const { return dimensionality; } //! Modify the dimensionality of the model. size_t& Dimensionality() { return dimensionality; } //! Train the model. void Train(const mat& data, const Row& labels, const size_t iterations, const double tolerance) { dimensionality = data.n_rows; if (weakLearnerType == WeakLearnerTypes::DECISION_STUMP) { if (dsBoost) delete dsBoost; DecisionStump<> ds(data, labels, max(labels) + 1); dsBoost = new AdaBoost>(data, labels, ds, iterations, tolerance); } else if (weakLearnerType == WeakLearnerTypes::PERCEPTRON) { Perceptron<> p(data, labels, max(labels) + 1); pBoost = new AdaBoost>(data, labels, p, iterations, tolerance); } } //! Classify test points. void Classify(const mat& testData, Row& predictions) { if (weakLearnerType == WeakLearnerTypes::DECISION_STUMP) dsBoost->Classify(testData, predictions); else if (weakLearnerType == WeakLearnerTypes::PERCEPTRON) pBoost->Classify(testData, predictions); } //! Serialize the model. template void Serialize(Archive& ar, const unsigned int /* version */) { if (Archive::is_loading::value) { if (dsBoost) delete dsBoost; if (pBoost) delete pBoost; dsBoost = NULL; pBoost = NULL; } ar & data::CreateNVP(mappings, "mappings"); ar & data::CreateNVP(weakLearnerType, "weakLearnerType"); if (weakLearnerType == WeakLearnerTypes::DECISION_STUMP) ar & data::CreateNVP(dsBoost, "adaboost_ds"); else if (weakLearnerType == WeakLearnerTypes::PERCEPTRON) ar & data::CreateNVP(pBoost, "adaboost_p"); ar & data::CreateNVP(dimensionality, "dimensionality"); } }; int main(int argc, char *argv[]) { CLI::ParseCommandLine(argc, argv); // Check input parameters and issue warnings/errors as necessary. // The user cannot specify both a training file and an input model file. if (CLI::HasParam("training_file") && CLI::HasParam("input_model_file")) { Log::Fatal << "Only one of --training_file or --input_model_file may be " << "specified!" << endl; } // The user must specify either a training file or an input model file. if (!CLI::HasParam("training_file") && !CLI::HasParam("input_model_file")) { Log::Fatal << "Either --training_file or --input_model_file must be " << "specified!" << endl; } // The weak learner must make sense. if (CLI::GetParam("weak_learner") != "decision_stump" && CLI::GetParam("weak_learner") != "perceptron") { Log::Fatal << "Unknown weak learner type '" << CLI::GetParam("weak_learner") << "'; must be 'decision_stump' or 'perceptron'." << endl; } // --labels_file can't be specified without --training_file. if (CLI::HasParam("labels_file") && !CLI::HasParam("training_file")) Log::Warn << "--labels_file ignored, because --training_file was not " << "passed." << endl; // Sanity check on iterations. int iterInt = CLI::GetParam("iterations"); if (iterInt < 0) { Log::Fatal << "Invalid number of iterations (" << iterInt << ") specified! " << "Must be greater than 0." << endl; } // If a weak learner is specified with a model, it will be ignored. if (CLI::HasParam("input_model_file") && CLI::HasParam("weak_learner")) { Log::Warn << "--weak_learner ignored because --input_model_file is " << "specified." << endl; } // Training parameters are ignored if no training file is given. if (CLI::HasParam("tolerance") && !CLI::HasParam("training_file")) { Log::Warn << "--tolerance ignored, because --training_file was not " << "passed." << endl; } if (CLI::HasParam("iterations") && !CLI::HasParam("training_file")) { Log::Warn << "--iterations ignored, because --training_file was not " << "passed." << endl; } if (!CLI::HasParam("output_model_file") && !CLI::HasParam("output_file")) { Log::Warn << "Neither --output_model_file nor --output_file are specified; " << "no results will be saved." << endl; } if (CLI::HasParam("output_file") && !CLI::HasParam("test_file")) { Log::Warn << "--output_file ignored because --test_file is not specified." << endl; } AdaBoostModel m; if (CLI::HasParam("training_file")) { const string trainingDataFilename = CLI::GetParam("training_file"); mat trainingData; data::Load(trainingDataFilename, trainingData, true); const string labelsFilename = CLI::GetParam("labels_file"); // Load labels. Mat labelsIn; if (CLI::HasParam("labels_file")) { const string labelsFilename = CLI::GetParam("labels_file"); // Load labels. data::Load(labelsFilename, labelsIn, true); // Do the labels need to be transposed? if (labelsIn.n_cols == 1) labelsIn = labelsIn.t(); } else { // Extract the labels as the last dimension of the training data. Log::Info << "Using the last dimension of training set as labels." << endl; labelsIn = conv_to>::from( trainingData.row(trainingData.n_rows - 1)).t(); trainingData.shed_row(trainingData.n_rows - 1); } // Helpers for normalizing the labels. Row labels; // Normalize the labels. data::NormalizeLabels(labelsIn.row(0), labels, m.Mappings()); // Get other training parameters. const double tolerance = CLI::GetParam("tolerance"); const size_t iterations = (size_t) CLI::GetParam("iterations"); const string weakLearner = CLI::GetParam("weak_learner"); if (weakLearner == "decision_stump") m.WeakLearnerType() = AdaBoostModel::WeakLearnerTypes::DECISION_STUMP; else if (weakLearner == "perceptron") m.WeakLearnerType() = AdaBoostModel::WeakLearnerTypes::PERCEPTRON; Timer::Start("adaboost_training"); m.Train(trainingData, labels, iterations, tolerance); Timer::Stop("adaboost_training"); } else { // We have a specified input model file. const string inputModelFile = CLI::GetParam("input_model_file"); data::Load(inputModelFile, "adaboost_model", m, true); // Fatal on failure. } // Perform classification, if desired. if (CLI::HasParam("test_file")) { const string testingDataFilename = CLI::GetParam("test_file"); mat testingData; data::Load(testingDataFilename, testingData, true); if (testingData.n_rows != m.Dimensionality()) Log::Fatal << "Test data dimensionality (" << testingData.n_rows << ") " << "must be the same as the model dimensionality (" << m.Dimensionality() << ")!" << endl; Row predictedLabels(testingData.n_cols); Timer::Start("adaboost_classification"); m.Classify(testingData, predictedLabels); Timer::Stop("adaboost_classification"); Row results; data::RevertLabels(predictedLabels, m.Mappings(), results); if (CLI::HasParam("output_file")) data::Save(CLI::GetParam("output_file"), results, true); } // Should we save the model, too? if (CLI::HasParam("output_model_file")) data::Save(CLI::GetParam("output_model_file"), "adaboost_model", m); } mlpack-2.2.5/src/mlpack/methods/amf/000077500000000000000000000000001315013601400172255ustar00rootroot00000000000000mlpack-2.2.5/src/mlpack/methods/amf/CMakeLists.txt000066400000000000000000000010411315013601400217610ustar00rootroot00000000000000# Define the files we need to compile # Anything not in this list will not be compiled into mlpack. set(SOURCES amf.hpp amf_impl.hpp ) # Add directory name to sources. set(DIR_SRCS) foreach(file ${SOURCES}) set(DIR_SRCS ${DIR_SRCS} ${CMAKE_CURRENT_SOURCE_DIR}/${file}) endforeach() # Append sources (with directory name) to list of all mlpack sources (used at # the parent scope). set(MLPACK_SRCS ${MLPACK_SRCS} ${DIR_SRCS} PARENT_SCOPE) add_subdirectory(update_rules) add_subdirectory(init_rules) add_subdirectory(termination_policies) mlpack-2.2.5/src/mlpack/methods/amf/amf.hpp000066400000000000000000000244571315013601400205150ustar00rootroot00000000000000/** * @file amf.hpp * @author Sumedh Ghaisas * @author Mohan Rajendran * @author Ryan Curtin * * Alternating Matrix Factorization * * The AMF (alternating matrix factorization) class, from which more commonly * known techniques such as incremental SVD, NMF, and batch-learning SVD can be * derived. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_AMF_AMF_HPP #define MLPACK_METHODS_AMF_AMF_HPP #include #include #include #include #include #include #include #include #include #include namespace mlpack { namespace amf /** Alternating Matrix Factorization **/ { /** * This class implements AMF (alternating matrix factorization) on the given * matrix V. Alternating matrix factorization decomposes V in the form * \f$ V \approx WH \f$ where W is called the basis matrix and H is called the * encoding matrix. V is taken to be of size n x m and the obtained W is n x r * and H is r x m. The size r is called the rank of the factorization. * * The implementation requires three template types; the first contains the * policy used to determine when the algorithm has converged; the second * contains the initialization rule for the W and H matrix; the last contains * the update rule to be used during each iteration. This templatization allows * the user to try various update rules, initialization rules, and termination * policies (including ones not supplied with mlpack) for factorization. By * default, the template parameters to AMF implement non-negative matrix * factorization with the multiplicative distance update. * * A simple example of how to run AMF (or NMF) is shown below. * * @code * extern arma::mat V; // Matrix that we want to perform LMF on. * size_t r = 10; // Rank of decomposition * arma::mat W; // Basis matrix * arma::mat H; // Encoding matrix * * AMF<> amf; // Default options: NMF with multiplicative distance update rules. * amf.Apply(V, r, W, H); * @endcode * * @tparam TerminationPolicy The policy to use for determining when the * factorization has converged. * @tparam InitializationRule The initialization rule for initializing W and H * matrix. * @tparam UpdateRule The update rule for calculating W and H matrix at each * iteration. * * @see NMFMultiplicativeDistanceUpdate, SimpleResidueTermination */ template, typename UpdateRuleType = NMFMultiplicativeDistanceUpdate> class AMF { public: /** * Create the AMF object and (optionally) set the parameters which AMF will * run with. The minimum residue refers to the root mean square of the * difference between two subsequent iterations of the product W * H. A low * residue indicates that subsequent iterations are not producing much change * in W and H. Once the residue goes below the specified minimum residue, the * algorithm terminates. * * @param initializationRule Optional instantiated InitializationRule object * for initializing the W and H matrices. * @param updateRule Optional instantiated UpdateRule object; this parameter * is useful when the update rule for the W and H vector has state that * it needs to store (i.e. HUpdate() and WUpdate() are not static * functions). * @param terminationPolicy Optional instantiated TerminationPolicy object. */ AMF(const TerminationPolicyType& terminationPolicy = TerminationPolicyType(), const InitializationRuleType& initializeRule = InitializationRuleType(), const UpdateRuleType& update = UpdateRuleType()); /** * Apply Alternating Matrix Factorization to the provided matrix. * * @param V Input matrix to be factorized. * @param W Basis matrix to be output. * @param H Encoding matrix to output. * @param r Rank r of the factorization. */ template double Apply(const MatType& V, const size_t r, arma::mat& W, arma::mat& H); //! Access the termination policy. const TerminationPolicyType& TerminationPolicy() const { return terminationPolicy; } //! Modify the termination policy. TerminationPolicyType& TerminationPolicy() { return terminationPolicy; } //! Access the initialization rule. const InitializationRuleType& InitializeRule() const { return initializationRule; } //! Modify the initialization rule. InitializationRuleType& InitializeRule() { return initializationRule; } //! Access the update rule. const UpdateRuleType& Update() const { return update; } //! Modify the update rule. UpdateRuleType& Update() { return update; } private: //! Termination policy. TerminationPolicyType terminationPolicy; //! Instantiated initialization Rule. InitializationRuleType initializationRule; //! Instantiated update rule. UpdateRuleType update; }; // class AMF typedef amf::AMF, amf::NMFALSUpdate> NMFALSFactorizer; //! Add simple typedefs #ifdef MLPACK_USE_CXX11 /** * SVDBatchFactorizer factorizes given matrix V into two matrices W and H by * gradient descent. SVD batch learning is described in paper 'A Guide to * singular Value Decomposition' by Chih-Chao Ma. * * @see SVDBatchLearning */ template using SVDBatchFactorizer = amf::AMF, amf::SVDBatchLearning>; /** * SVDIncompleteIncrementalFactorizer factorizes given matrix V into two * matrices W and H by incomplete incremental gradient descent. SVD incomplete * incremental learning is described in paper 'A Guide to singular Value * Decomposition' * by Chih-Chao Ma. * * @see SVDIncompleteIncrementalLearning */ template using SVDIncompleteIncrementalFactorizer = amf::AMF< amf::SimpleResidueTermination, amf::RandomAcolInitialization<>, amf::SVDIncompleteIncrementalLearning>; /** * SVDCompleteIncrementalFactorizer factorizes given matrix V into two matrices * W and H by complete incremental gradient descent. SVD complete incremental * learning is described in paper 'A Guide to singular Value Decomposition' * by Chih-Chao Ma. * * @see SVDCompleteIncrementalLearning */ template using SVDCompleteIncrementalFactorizer = amf::AMF< amf::SimpleResidueTermination, amf::RandomAcolInitialization<>, amf::SVDCompleteIncrementalLearning>; #else // #ifdef MLPACK_USE_CXX11 /** * SparseSVDBatchFactorizer factorizes given sparse matrix V into two matrices W * and H by gradient descent. SVD batch learning is described in paper 'A Guide * to singular Value Decomposition' by Chih-Chao Ma. * * @see SVDBatchLearning */ typedef amf::AMF, amf::SVDBatchLearning> SparseSVDBatchFactorizer; /** * SparseSVDBatchFactorizer factorizes given matrix V into two matrices W and H * by gradient descent. SVD batch learning is described in paper 'A Guide to * singular Value Decomposition' by Chih-Chao Ma. * * @see SVDBatchLearning */ typedef amf::AMF, amf::SVDBatchLearning> SVDBatchFactorizer; /** * SparseSVDIncompleteIncrementalFactorizer factorizes given sparse matrix V * into two matrices W and H by incomplete incremental gradient descent. SVD * incomplete incremental learning is described in paper 'A Guide to singular * Value Decomposition' by Chih-Chao Ma. * * @see SVDIncompleteIncrementalLearning */ typedef amf::AMF, amf::SVDIncompleteIncrementalLearning> SparseSVDIncompleteIncrementalFactorizer; /** * SVDIncompleteIncrementalFactorizer factorizes given matrix V into two * matrices W and H by incomplete incremental gradient descent. SVD incomplete * incremental learning is described in paper 'A Guide to singular Value * Decomposition' by Chih-Chao Ma. * * @see SVDIncompleteIncrementalLearning */ typedef amf::AMF, amf::SVDIncompleteIncrementalLearning> SVDIncompleteIncrementalFactorizer; /** * SparseSVDCompleteIncrementalFactorizer factorizes given sparse matrix V * into two matrices W and H by complete incremental gradient descent. SVD * complete incremental learning is described in paper 'A Guide to singular * Value Decomposition' by Chih-Chao Ma. * * @see SVDCompleteIncrementalLearning */ typedef amf::AMF, amf::SVDCompleteIncrementalLearning > SparseSVDCompleteIncrementalFactorizer; /** * SVDCompleteIncrementalFactorizer factorizes given matrix V into two matrices * W and H by complete incremental gradient descent. SVD complete incremental * learning is described in paper 'A Guide to singular Value Decomposition' * by Chih-Chao Ma. * * @see SVDCompleteIncrementalLearning */ typedef amf::AMF, amf::SVDCompleteIncrementalLearning > SVDCompleteIncrementalFactorizer; #endif // #ifdef MLPACK_USE_CXX11 } // namespace amf } // namespace mlpack // Include implementation. #include "amf_impl.hpp" #endif // MLPACK_METHODS_AMF_AMF_HPP mlpack-2.2.5/src/mlpack/methods/amf/amf_impl.hpp000066400000000000000000000044401315013601400215240ustar00rootroot00000000000000/** * @file amf_impl.hpp * @author Sumedh Ghaisas * @author Mohan Rajendran * @author Ryan Curtin * * Implementation of AMF class. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ namespace mlpack { namespace amf { /** * Construct the AMF object. */ template AMF::AMF( const TerminationPolicyType& terminationPolicy, const InitializationRuleType& initializationRule, const UpdateRuleType& update) : terminationPolicy(terminationPolicy), initializationRule(initializationRule), update(update) { } /** * Apply Alternating Matrix Factorization to the provided matrix. * * @param V Input matrix to be factorized * @param W Basis matrix to be output * @param H Encoding matrix to output * @param r Rank r of the factorization */ template template double AMF:: Apply(const MatType& V, const size_t r, arma::mat& W, arma::mat& H) { // Initialize W and H. initializationRule.Initialize(V, r, W, H); Log::Info << "Initialized W and H." << std::endl; // initialize the update rule update.Initialize(V, r); // initialize the termination policy terminationPolicy.Initialize(V); // check if termination conditions are met while (!terminationPolicy.IsConverged(W, H)) { // Update the values of W and H based on the update rules provided. update.WUpdate(V, W, H); update.HUpdate(V, W, H); } // get final residue and iteration count from termination policy const double residue = terminationPolicy.Index(); const size_t iteration = terminationPolicy.Iteration(); Log::Info << "AMF converged to residue of " << residue << " in " << iteration << " iterations." << std::endl; return residue; } } // namespace amf } // namespace mlpack mlpack-2.2.5/src/mlpack/methods/amf/init_rules/000077500000000000000000000000001315013601400214025ustar00rootroot00000000000000mlpack-2.2.5/src/mlpack/methods/amf/init_rules/CMakeLists.txt000066400000000000000000000007611315013601400241460ustar00rootroot00000000000000# Define the files we need to compile # Anything not in this list will not be compiled into mlpack. set(SOURCES random_init.hpp random_acol_init.hpp average_init.hpp given_init.hpp ) # Add directory name to sources. set(DIR_SRCS) foreach(file ${SOURCES}) set(DIR_SRCS ${DIR_SRCS} ${CMAKE_CURRENT_SOURCE_DIR}/${file}) endforeach() # Append sources (with directory name) to list of all mlpack sources (used at # the parent scope). set(MLPACK_SRCS ${MLPACK_SRCS} ${DIR_SRCS} PARENT_SCOPE) mlpack-2.2.5/src/mlpack/methods/amf/init_rules/average_init.hpp000066400000000000000000000045571315013601400245630ustar00rootroot00000000000000/** * @file averge_init.hpp * @author Sumedh Ghaisas * * Initialization rule for Alternating Matrix Factorization. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_AMF_AVERAGE_INIT_HPP #define MLPACK_METHODS_AMF_AVERAGE_INIT_HPP #include namespace mlpack { namespace amf { /** * This initialization rule initializes matrix W and H to root of the average of * V, perturbed with uniform noise. Uniform noise is generated by Armadillo's * 'randu' function. For better performance, the lowest element of the matrix * is subtracted from the average before dividing it by the factorization rank. * This computed value is added with the random noise. */ class AverageInitialization { public: // Empty constructor required for the InitializeRule template AverageInitialization() { } /** * Initialize the matrices W and H to the average value of V with uniform * random noise added. * * @param V Input matrix. * @param r Rank of matrix. * @param W W matrix, to be initialized. * @param H H matrix, to be initialized. */ template inline static void Initialize(const MatType& V, const size_t r, arma::mat& W, arma::mat& H) { const size_t n = V.n_rows; const size_t m = V.n_cols; double avgV = 0; size_t count = 0; double min = DBL_MAX; // Iterate over all elements in the matrix (for sparse matrices, this only // iterates over nonzeros). for (typename MatType::const_row_col_iterator it = V.begin(); it != V.end(); ++it) { ++count; avgV += *it; // Track the minimum value. if (*it < min) min = *it; } avgV = sqrt(((avgV / (n * m)) - min) / r); // Initialize to random values. W.randu(n, r); H.randu(r, m); W = W + avgV; H = H + avgV; } //! Serialize the object (in this case, there is nothing to do). template void Serialize(Archive& /* ar */, const unsigned int /* version */) { } }; } // namespace amf } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/methods/amf/init_rules/given_init.hpp000066400000000000000000000045611315013601400242540ustar00rootroot00000000000000/** * @file given_initialization.hpp * @author Ryan Curtin * * Initialization rule for alternating matrix factorization (AMF). This simple * initialization is performed by assigning a given matrix to W and H. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_AMF_INIT_RULES_GIVEN_INIT_HPP #define MLPACK_METHODS_AMF_INIT_RULES_GIVEN_INIT_HPP #include namespace mlpack { namespace amf { /** * This initialization rule for AMF simply fills the W and H matrices with the * matrices given to the constructor of this object. Note that this object does * not use std::move() during the Initialize() method, so it can be reused for * multiple AMF objects, but will incur copies of the W and H matrices. */ class GivenInitialization { public: // Empty constructor required for the InitializeRule template. GivenInitialization() { } // Initialize the GivenInitialization object with the given matrices. GivenInitialization(const arma::mat& w, const arma::mat& h) : w(w), h(h) { } // Initialize the GivenInitialization object, taking control of the given // matrices. GivenInitialization(const arma::mat&& w, const arma::mat&& h) : w(std::move(w)), h(std::move(h)) { } /** * Fill W and H with random uniform noise. * * @param V Input matrix. * @param r Rank of decomposition. * @param W W matrix, to be filled with random noise. * @param H H matrix, to be filled with random noise. */ template inline void Initialize(const MatType& /* V */, const size_t /* r */, arma::mat& W, arma::mat& H) { // Initialize to the given matrices. W = w; H = h; } //! Serialize the object (in this case, there is nothing to serialize). template void Serialize(Archive& ar, const unsigned int /* version */) { ar & data::CreateNVP(w, "w"); ar & data::CreateNVP(h, "h"); } private: //! The W matrix for initialization. arma::mat w; //! The H matrix for initialization. arma::mat h; }; } // namespace amf } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/methods/amf/init_rules/random_acol_init.hpp000066400000000000000000000053511315013601400254200ustar00rootroot00000000000000/** * @file random_acol_init.hpp * @author Mohan Rajendran * * Initialization rule for Alternating Matrix Factorization. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_LMF_RANDOM_ACOL_INIT_HPP #define MLPACK_METHODS_LMF_RANDOM_ACOL_INIT_HPP #include #include namespace mlpack { namespace amf { /** * This class initializes the W matrix of the AMF algorithm by averaging p * randomly chosen columns of V. In this case, p is a template parameter. H is * then filled using a uniform distribution in the range [0, 1]. * * This simple initialization is the "random Acol initialization" found in the * following paper: * * @code * @techreport{langville2014algorithms, * title = {Algorithms, Initializations, and Convergence for the Nonnegative * Matrix Factorization}, * author = {Langville, A.N. and Meyer, C.D. and Albright, R. and Cox, J. and * Duling, D.}, * year = {2014}, * institution = {NCSU Technical Report Math 81706} * } * @endcode * * @tparam columnsToAverage The number of random columns to average for each * column of W. */ template class RandomAcolInitialization { public: // Empty constructor required for the InitializeRule template RandomAcolInitialization() { } template inline static void Initialize(const MatType& V, const size_t r, arma::mat& W, arma::mat& H) { const size_t n = V.n_rows; const size_t m = V.n_cols; if (columnsToAverage > m) { Log::Warn << "Number of random columns (columnsToAverage) is more than " << "the number of columns available in the V matrix; weird results " << "may ensue!" << std::endl; } W.zeros(n, r); // Initialize W matrix with random columns. for (size_t col = 0; col < r; col++) { for (size_t randCol = 0; randCol < columnsToAverage; randCol++) { // .col() does not work in this case, as of Armadillo 3.920. W.unsafe_col(col) += V.col(math::RandInt(0, m)); } } // Now divide by p. W /= columnsToAverage; // Initialize H to random values. H.randu(r, m); } //! Serialize the object (in this case, there is nothing to serialize). template void Serialize(Archive& /* ar */, const unsigned int /* version */) { } }; } // namespace amf } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/methods/amf/init_rules/random_init.hpp000066400000000000000000000034331315013601400244210ustar00rootroot00000000000000/** * @file random_init.hpp * @author Mohan Rajendran * * Initialization rule for alternating matrix factorization (AMF). This simple * initialization is performed by assigning a random matrix to W and H. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_LMF_RANDOM_INIT_HPP #define MLPACK_METHODS_LMF_RANDOM_INIT_HPP #include namespace mlpack { namespace amf { /** * This initialization rule for AMF simply fills the W and H matrices with * uniform random noise in [0, 1]. */ class RandomInitialization { public: // Empty constructor required for the InitializeRule template RandomInitialization() { } /** * Fill W and H with random uniform noise. * * @param V Input matrix. * @param r Rank of decomposition. * @param W W matrix, to be filled with random noise. * @param H H matrix, to be filled with random noise. */ template inline static void Initialize(const MatType& V, const size_t r, arma::mat& W, arma::mat& H) { // Simple implementation (left in the header file due to its simplicity). const size_t n = V.n_rows; const size_t m = V.n_cols; // Initialize to random values. W.randu(n, r); H.randu(r, m); } //! Serialize the object (in this case, there is nothing to serialize). template void Serialize(Archive& /* ar */, const unsigned int /* version */) { } }; } // namespace amf } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/methods/amf/termination_policies/000077500000000000000000000000001315013601400234455ustar00rootroot00000000000000mlpack-2.2.5/src/mlpack/methods/amf/termination_policies/CMakeLists.txt000066400000000000000000000011721315013601400262060ustar00rootroot00000000000000# Define the files we need to compile # Anything not in this list will not be compiled into mlpack. set(SOURCES simple_residue_termination.hpp simple_tolerance_termination.hpp validation_rmse_termination.hpp incomplete_incremental_termination.hpp complete_incremental_termination.hpp max_iteration_termination.hpp ) # Add directory name to sources. set(DIR_SRCS) foreach(file ${SOURCES}) set(DIR_SRCS ${DIR_SRCS} ${CMAKE_CURRENT_SOURCE_DIR}/${file}) endforeach() # Append sources (with directory name) to list of all mlpack sources (used at # the parent scope). set(MLPACK_SRCS ${MLPACK_SRCS} ${DIR_SRCS} PARENT_SCOPE) mlpack-2.2.5/src/mlpack/methods/amf/termination_policies/complete_incremental_termination.hpp000066400000000000000000000070271315013601400327660ustar00rootroot00000000000000/** * @file complete_incremental_termination.hpp * @author Sumedh Ghaisas * * Termination policy used in AMF (Alternating Matrix Factorization). * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_AMF_COMPLETE_INCREMENTAL_TERMINATION_HPP #define MLPACK_METHODS_AMF_COMPLETE_INCREMENTAL_TERMINATION_HPP namespace mlpack { namespace amf { /** * This class acts as a wrapper for basic termination policies to be used by * SVDCompleteIncrementalLearning. This class calls the wrapped class functions * after every n calls to main class functions where n is the number of non-zero * entries in the matrix being factorized. This is necessary for * SVDCompleteIncrementalLearning, because otherwise IsConverged() is called * after every point, which is very slow. * * @see AMF, SVDCompleteIncrementalLearning */ template class CompleteIncrementalTermination { public: /** * Empty constructor. * * @param tPolicy object of wrapped class. */ CompleteIncrementalTermination( TerminationPolicy tPolicy = TerminationPolicy()) : tPolicy(tPolicy) { } /** * Initializes the termination policy before stating the factorization. * * @param V Input matrix to be factorized. */ template void Initialize(const MatType& V) { tPolicy.Initialize(V); // Get the number of non-zero entries. incrementalIndex = arma::accu(V != 0); iteration = 0; } /** * Initializes the termination policy before stating the factorization. This * is a specialization for sparse matrices. * * @param V Input matrix to be factorized. */ void Initialize(const arma::sp_mat& V) { tPolicy.Initialize(V); // Get number of non-zero entries incrementalIndex = V.n_nonzero; iteration = 0; } /** * Check if termination criterion is met, if the current iteration means that * each point has been visited. * * @param W Basis matrix of output. * @param H Encoding matrix of output. */ bool IsConverged(arma::mat& W, arma::mat& H) { // Increment iteration count. iteration++; // If iteration count is multiple of incremental index, return wrapped class // function. if (iteration % incrementalIndex == 0) return tPolicy.IsConverged(W, H); else return false; } //! Get current value of residue const double& Index() const { return tPolicy.Index(); } //! Get current iteration count const size_t& Iteration() const { return iteration; } //! Access upper limit of iteration count. const size_t& MaxIterations() const { return tPolicy.MaxIterations(); } //! Modify maximum number of iterations. size_t& MaxIterations() { return tPolicy.MaxIterations(); } //! Access the wrapped termination policy. const TerminationPolicy& TPolicy() const { return tPolicy; } //! Modify the wrapped termination policy. TerminationPolicy& TPolicy() { return tPolicy; } private: //! Wrapped termination policy. TerminationPolicy tPolicy; //! Number of iterations after which wrapped termination policy will be //! called. size_t incrementalIndex; //! Current iteration number. size_t iteration; }; // class CompleteIncrementalTermination } // namespace amf } // namespace mlpack #endif // MLPACK_METHODS_AMF_COMPLETE_INCREMENTAL_TERMINATION_HPP mlpack-2.2.5/src/mlpack/methods/amf/termination_policies/incomplete_incremental_termination.hpp000066400000000000000000000056451315013601400333210ustar00rootroot00000000000000/** * @file incomplete_incremental_termination.hpp * @author Sumedh Ghaisas * * Termination policy used in AMF (Alternating Matrix Factorization). * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef _MLPACK_METHODS_AMF_INCOMPLETE_INCREMENTAL_TERMINATION_HPP #define _MLPACK_METHODS_AMF_INCOMPLETE_INCREMENTAL_TERMINATION_HPP #include namespace mlpack { namespace amf { /** * This class acts as a wrapper for basic termination policies to be used by * SVDIncompleteIncrementalLearning. This class calls the wrapped class functions * after every n calls to main class functions where n is the number of rows. * * @see AMF, SVDIncompleteIncrementalLearning */ template class IncompleteIncrementalTermination { public: /** * Empty constructor * * @param tPolicy object of wrapped class. */ IncompleteIncrementalTermination( TerminationPolicy tPolicy = TerminationPolicy()) : tPolicy(tPolicy) { } /** * Initializes the termination policy before stating the factorization. * * @param V Input matrix to be factorized. */ template void Initialize(const MatType& V) { tPolicy.Initialize(V); // Initialize incremental index to number of rows. incrementalIndex = V.n_rows; iteration = 0; } /** * Check if termination criterio is met. * * @param W Basis matrix of output. * @param H Encoding matrix of output. */ bool IsConverged(arma::mat& W, arma::mat& H) { // increment iteration count iteration++; // If the iteration count is a multiple of incremental index, return the // wrapped termination policy result. if (iteration % incrementalIndex == 0) return tPolicy.IsConverged(W, H); else return false; } //! Get current value of residue. const double& Index() const { return tPolicy.Index(); } //! Get current iteration count. const size_t& Iteration() const { return iteration; } //! Access maximum number of iterations. size_t MaxIterations() const { return tPolicy.MaxIterations(); } //! Modify maximum number of iterations. size_t& MaxIterations() { return tPolicy.MaxIterations(); } //! Access the wrapped termination policy. const TerminationPolicy& TPolicy() const { return tPolicy; } //! Modify the wrapped termination policy. TerminationPolicy& TPolicy() { return tPolicy; } private: //! Wrapped termination policy. TerminationPolicy tPolicy; //! Number of iterations after which wrapped class object will be called. size_t incrementalIndex; //! Current iteration count. size_t iteration; }; // class IncompleteIncrementalTermination } // namespace amf } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/methods/amf/termination_policies/max_iteration_termination.hpp000066400000000000000000000050501315013601400314320ustar00rootroot00000000000000/** * @file max_iteration_termination.hpp * @author Ryan Curtin * * A termination policy which only terminates when the maximum number of * iterations is reached. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_AMF_TERMINATION_POLICIES_MAX_ITERATION_TERMINATION_HPP #define MLPACK_METHODS_AMF_TERMINATION_POLICIES_MAX_ITERATION_TERMINATION_HPP namespace mlpack { namespace amf { /** * This termination policy only terminates when the maximum number of iterations * has been reached. */ class MaxIterationTermination { public: /** * Construct the termination policy with the given number of iterations * allowed (default 1000). If maxIterations is 0, then termination will never * occur. * * @param maxIterations Maximum number of allowed iterations. */ MaxIterationTermination(const size_t maxIterations) : maxIterations(maxIterations), iteration(0) { if (maxIterations == 0) Log::Warn << "MaxIterationTermination::MaxIterationTermination(): given " << "number of iterations is 0, so algorithm will never terminate!" << std::endl; } /** * Initialize for the given matrix V (there is nothing to do). */ template void Initialize(const MatType& /* V */) { } /** * Check if convergence has occurred. */ bool IsConverged(const arma::mat& /* H */, const arma::mat& /* W */) { // Return true if we have performed the correct number of iterations. return (++iteration >= maxIterations); } //! Return something similar to the residue, which in this case is just the //! number of iterations left, since we don't have access to anything else. size_t Index() { return (iteration > maxIterations) ? 0 : maxIterations - iteration; } //! Get the current iteration. size_t Iteration() const { return iteration; } //! Modify the current iteration. size_t& Iteration() { return iteration; } //! Get the maximum number of iterations. size_t MaxIterations() const { return maxIterations; } //! Modify the maximum number of iterations. size_t& MaxIterations() { return maxIterations; } private: //! The maximum number of allowed iterations. size_t maxIterations; //! The number of the current iteration. size_t iteration; }; } // namespace amf } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/methods/amf/termination_policies/simple_residue_termination.hpp000066400000000000000000000072561315013601400316120ustar00rootroot00000000000000/** * @file simple_residue_termination.hpp * @author Sumedh Ghaisas * * Termination policy used in AMF (Alternating Matrix Factorization). * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef _MLPACK_METHODS_AMF_SIMPLERESIDUETERMINATION_HPP_INCLUDED #define _MLPACK_METHODS_AMF_SIMPLERESIDUETERMINATION_HPP_INCLUDED #include namespace mlpack { namespace amf { /** * This class implements a simple residue-based termination policy. The * termination decision depends on two factors: the value of the residue (the * difference between the norm of WH this iteration and the previous iteration), * and the number of iterations. If the current value of residue drops below * the threshold or the number of iterations goes above the iteration limit, * IsConverged() will return true. This class is meant for use with the AMF * (alternating matrix factorization) class. * * @see AMF */ class SimpleResidueTermination { public: /** * Construct the SimpleResidueTermination object with the given minimum * residue (or the default) and the given maximum number of iterations (or the * default). 0 indicates no iteration limit. * * @param minResidue Minimum residue for termination. * @param maxIterations Maximum number of iterations. */ SimpleResidueTermination(const double minResidue = 1e-5, const size_t maxIterations = 10000) : minResidue(minResidue), maxIterations(maxIterations) { } /** * Initializes the termination policy before stating the factorization. * * @param V Input matrix being factorized. */ template void Initialize(const MatType& V) { // Initialize the things we keep track of. residue = DBL_MAX; iteration = 1; nm = V.n_rows * V.n_cols; // Remove history. normOld = 0; } /** * Check if termination criterion is met. * * @param W Basis matrix of output. * @param H Encoding matrix of output. */ bool IsConverged(arma::mat& W, arma::mat& H) { // Calculate the norm and compute the residue, but do it by hand, so as to // avoid calculating (W*H), which may be very large. double norm = 0.0; for (size_t j = 0; j < H.n_cols; ++j) norm += arma::norm(W * H.col(j), "fro"); residue = fabs(normOld - norm) / normOld; // Store the norm. normOld = norm; // Increment iteration count iteration++; Log::Info << "Iteration " << iteration << "; residue " << residue << ".\n"; // Check if termination criterion is met. return (residue < minResidue || iteration > maxIterations); } //! Get current value of residue const double& Index() const { return residue; } //! Get current iteration count const size_t& Iteration() const { return iteration; } //! Access max iteration count const size_t& MaxIterations() const { return maxIterations; } size_t& MaxIterations() { return maxIterations; } //! Access minimum residue value const double& MinResidue() const { return minResidue; } double& MinResidue() { return minResidue; } public: //! residue threshold double minResidue; //! iteration threshold size_t maxIterations; //! current value of residue double residue; //! current iteration count size_t iteration; //! norm of previous iteration double normOld; size_t nm; }; // class SimpleResidueTermination } // namespace amf } // namespace mlpack #endif // _MLPACK_METHODS_AMF_SIMPLERESIDUETERMINATION_HPP_INCLUDED mlpack-2.2.5/src/mlpack/methods/amf/termination_policies/simple_tolerance_termination.hpp000066400000000000000000000123231315013601400321150ustar00rootroot00000000000000/** * @file simple_tolerance_termination.hpp * @author Sumedh Ghaisas * * Termination policy used in AMF (Alternating Matrix Factorization). * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef _MLPACK_METHODS_AMF_SIMPLE_TOLERANCE_TERMINATION_HPP_INCLUDED #define _MLPACK_METHODS_AMF_SIMPLE_TOLERANCE_TERMINATION_HPP_INCLUDED #include namespace mlpack { namespace amf { /** * This class implements residue tolerance termination policy. Termination * criterion is met when increase in residue value drops below the given tolerance. * To accommodate spikes certain number of successive residue drops are accepted. * This upper imit on successive drops can be adjusted with reverseStepCount. * Secondary termination criterion terminates algorithm when iteration count * goes above the threshold. * * @see AMF */ template class SimpleToleranceTermination { public: //! empty constructor SimpleToleranceTermination(const double tolerance = 1e-5, const size_t maxIterations = 10000, const size_t reverseStepTolerance = 3) : tolerance(tolerance), maxIterations(maxIterations), reverseStepTolerance(reverseStepTolerance) {} /** * Initializes the termination policy before stating the factorization. * * @param V Input matrix to be factorized. */ void Initialize(const MatType& V) { residueOld = DBL_MAX; iteration = 1; residue = DBL_MIN; reverseStepCount = 0; isCopy = false; this->V = &V; c_index = 0; c_indexOld = 0; reverseStepCount = 0; } /** * Check if termination criterio is met. * * @param W Basis matrix of output. * @param H Encoding matrix of output. */ bool IsConverged(arma::mat& W, arma::mat& H) { arma::mat WH; WH = W * H; // compute residue residueOld = residue; size_t n = V->n_rows; size_t m = V->n_cols; double sum = 0; size_t count = 0; for(size_t i = 0;i < n;i++) { for(size_t j = 0;j < m;j++) { double temp = 0; if ((temp = (*V)(i,j)) != 0) { temp = (temp - WH(i, j)); temp = temp * temp; sum += temp; count++; } } } residue = sum / count; residue = sqrt(residue); // increment iteration count iteration++; Log::Info << "Iteration " << iteration << "; residue " << ((residueOld - residue) / residueOld) << ".\n"; // if residue tolerance is not satisfied if ((residueOld - residue) / residueOld < tolerance && iteration > 4) { // check if this is a first of successive drops if (reverseStepCount == 0 && isCopy == false) { // store a copy of W and H matrix isCopy = true; this->W = W; this->H = H; // store residue values c_index = residue; c_indexOld = residueOld; } // increase successive drop count reverseStepCount++; } // if tolerance is satisfied else { // initialize successive drop count reverseStepCount = 0; // if residue is droped below minimum scrap stored values if (residue <= c_indexOld && isCopy == true) { isCopy = false; } } // check if termination criterion is met if (reverseStepCount == reverseStepTolerance || iteration > maxIterations) { // if stored values are present replace them with current value as they // represent the minimum residue point if (isCopy) { W = this->W; H = this->H; residue = c_index; } return true; } else return false; } //! Get current value of residue const double& Index() const { return residue; } //! Get current iteration count const size_t& Iteration() const { return iteration; } //! Access upper limit of iteration count const size_t& MaxIterations() const { return maxIterations; } size_t& MaxIterations() { return maxIterations; } //! Access tolerance value const double& Tolerance() const { return tolerance; } double& Tolerance() { return tolerance; } private: //! tolerance double tolerance; //! iteration threshold size_t maxIterations; //! pointer to matrix being factorized const MatType* V; //! current iteration count size_t iteration; //! residue values double residueOld; double residue; double normOld; //! tolerance on successive residue drops size_t reverseStepTolerance; //! successive residue drops size_t reverseStepCount; //! indicates whether a copy of information is available which corresponds to //! minimum residue point bool isCopy; //! variables to store information of minimum residue poi arma::mat W; arma::mat H; double c_indexOld; double c_index; }; // class SimpleToleranceTermination } // namespace amf } // namespace mlpack #endif // _MLPACK_METHODS_AMF_SIMPLE_TOLERANCE_TERMINATION_HPP_INCLUDED mlpack-2.2.5/src/mlpack/methods/amf/termination_policies/validation_RMSE_termination.hpp000066400000000000000000000147131315013601400315550ustar00rootroot00000000000000/** * @file validation_RMSE_termination.hpp * @author Sumedh Ghaisas * * Termination policy used in AMF (Alternating Matrix Factorization). * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef _MLPACK_METHODS_AMF_VALIDATIONRMSETERMINATION_HPP_INCLUDED #define _MLPACK_METHODS_AMF_VALIDATIONRMSETERMINATION_HPP_INCLUDED #include namespace mlpack { namespace amf { /** * This class implements validation termination policy based on RMSE index. * The input data matrix is divided into 2 sets, training set and validation set. * Entries of validation set are nullifed in the input matrix. Termination * criterion is met when increase in validation set RMSe value drops below the * given tolerance. To accommodate spikes certain number of successive validation * RMSE drops are accepted. This upper imit on successive drops can be adjusted * with reverseStepCount. Secondary termination criterion terminates algorithm * when iteration count goes above the threshold. * * @note The input matrix is modified by this termination policy. * * @see AMF */ template class ValidationRMSETermination { public: /** * Create a validation set according to given parameters and nullifies this * set in data matrix(training set). * * @param V Input matrix to be factorized. * @param num_test_points number of validation test points * @param maxIterations max iteration count before termination * @param reverseStepTolerance max successive RMSE drops allowed */ ValidationRMSETermination(MatType& V, size_t num_test_points, double tolerance = 1e-5, size_t maxIterations = 10000, size_t reverseStepTolerance = 3) : tolerance(tolerance), maxIterations(maxIterations), num_test_points(num_test_points), reverseStepTolerance(reverseStepTolerance) { size_t n = V.n_rows; size_t m = V.n_cols; // initialize validation set matrix test_points.zeros(num_test_points, 3); // fill validation set matrix with random chosen entries for(size_t i = 0; i < num_test_points; i++) { double t_val; size_t t_row; size_t t_col; // pick a random non-zero entry do { t_row = rand() % n; t_col = rand() % m; } while((t_val = V(t_row, t_col)) == 0); // add the entry to the validation set test_points(i, 0) = t_row; test_points(i, 1) = t_col; test_points(i, 2) = t_val; // nullify the added entry from data matrix (training set) V(t_row, t_col) = 0; } } /** * Initializes the termination policy before stating the factorization. * * @param V Input matrix to be factorized. */ void Initialize(const MatType& /* V */) { iteration = 1; rmse = DBL_MAX; rmseOld = DBL_MAX; c_index = 0; c_indexOld = 0; reverseStepCount = 0; isCopy = false; } /** * Check if termination criterio is met. * * @param W Basis matrix of output. * @param H Encoding matrix of output. */ bool IsConverged(arma::mat& W, arma::mat& H) { arma::mat WH; WH = W * H; // compute validation RMSE if (iteration != 0) { rmseOld = rmse; rmse = 0; for(size_t i = 0; i < num_test_points; i++) { size_t t_row = test_points(i, 0); size_t t_col = test_points(i, 1); double t_val = test_points(i, 2); double temp = (t_val - WH(t_row, t_col)); temp *= temp; rmse += temp; } rmse /= num_test_points; rmse = sqrt(rmse); } // increment iteration count iteration++; // if RMSE tolerance is not satisfied if ((rmseOld - rmse) / rmseOld < tolerance && iteration > 4) { // check if this is a first of successive drops if (reverseStepCount == 0 && isCopy == false) { // store a copy of W and H matrix isCopy = true; this->W = W; this->H = H; // store residue values c_indexOld = rmseOld; c_index = rmse; } // increase successive drop count reverseStepCount++; } // if tolerance is satisfied else { // initialize successive drop count reverseStepCount = 0; // if residue is droped below minimum scrap stored values if (rmse <= c_indexOld && isCopy == true) { isCopy = false; } } // check if termination criterion is met if (reverseStepCount == reverseStepTolerance || iteration > maxIterations) { // if stored values are present replace them with current value as they // represent the minimum residue point if (isCopy) { W = this->W; H = this->H; rmse = c_index; } return true; } else return false; } //! Get current value of residue const double& Index() const { return rmse; } //! Get current iteration count const size_t& Iteration() const { return iteration; } //! Get number of validation points const size_t& NumTestPoints() const { return num_test_points; } //! Access upper limit of iteration count const size_t& MaxIterations() const { return maxIterations; } size_t& MaxIterations() { return maxIterations; } //! Access tolerance value const double& Tolerance() const { return tolerance; } double& Tolerance() { return tolerance; } private: //! tolerance double tolerance; //! max iteration limit size_t maxIterations; //! number of validation test points size_t num_test_points; //! current iteration count size_t iteration; //! validation point matrix arma::mat test_points; //! rmse values double rmseOld; double rmse; //! tolerance on successive residue drops size_t reverseStepTolerance; //! successive residue drops size_t reverseStepCount; //! indicates whether a copy of information is available which corresponds to //! minimum residue point bool isCopy; //! variables to store information of minimum residue point arma::mat W; arma::mat H; double c_indexOld; double c_index; }; // class ValidationRMSETermination } // namespace amf } // namespace mlpack #endif // _MLPACK_METHODS_AMF_VALIDATIONRMSETERMINATION_HPP_INCLUDED mlpack-2.2.5/src/mlpack/methods/amf/update_rules/000077500000000000000000000000001315013601400217215ustar00rootroot00000000000000mlpack-2.2.5/src/mlpack/methods/amf/update_rules/CMakeLists.txt000066400000000000000000000011041315013601400244550ustar00rootroot00000000000000# Define the files we need to compile # Anything not in this list will not be compiled into mlpack. set(SOURCES nmf_als.hpp nmf_mult_dist.hpp nmf_mult_div.hpp svd_batch_learning.hpp svd_incomplete_incremental_learning.hpp svd_complete_incremental_learning.hpp ) # Add directory name to sources. set(DIR_SRCS) foreach(file ${SOURCES}) set(DIR_SRCS ${DIR_SRCS} ${CMAKE_CURRENT_SOURCE_DIR}/${file}) endforeach() # Append sources (with directory name) to list of all mlpack sources (used at # the parent scope). set(MLPACK_SRCS ${MLPACK_SRCS} ${DIR_SRCS} PARENT_SCOPE) mlpack-2.2.5/src/mlpack/methods/amf/update_rules/nmf_als.hpp000066400000000000000000000065441315013601400240620ustar00rootroot00000000000000/** * @file nmf_als.hpp * @author Mohan Rajendran * * Update rules for the Non-negative Matrix Factorization. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_LMF_UPDATE_RULES_NMF_ALS_HPP #define MLPACK_METHODS_LMF_UPDATE_RULES_NMF_ALS_HPP #include namespace mlpack { namespace amf { /** * This class implements a method titled 'Alternating Least Squares' described * in the following paper: * * @code * @article{paatero1994positive, * title={Positive matrix factorization: A non-negative factor model with * optimal utilization of error estimates of data values}, * author={Paatero, P. and Tapper, U.}, * journal={Environmetrics}, * volume={5}, * number={2}, * pages={111--126}, * year={1994} * } * @endcode * * It uses the least squares projection formula to reduce the error value of * \f$ \sqrt{\sum_i \sum_j(V-WH)^2} \f$ by alternately calculating W and H * respectively while holding the other matrix constant. */ class NMFALSUpdate { public: //! Empty constructor required for the UpdateRule template. NMFALSUpdate() { } /** * Set initial values for the factorization. In this case, we don't need to * set anything. */ template void Initialize(const MatType& /* dataset */, const size_t /* rank */) { // Nothing to do. } /** * The update rule for the basis matrix W. The formula used isa * * \f[ * W^T = \frac{H V^T}{H H^T} * \f] * * The function takes in all the matrices and only changes the value of the W * matrix. * * @param V Input matrix to be factorized. * @param W Basis matrix to be updated. * @param H Encoding matrix. */ template inline static void WUpdate(const MatType& V, arma::mat& W, const arma::mat& H) { // The call to inv() sometimes fails; so we are using the psuedoinverse. // W = (inv(H * H.t()) * H * V.t()).t(); W = V * H.t() * pinv(H * H.t()); // Set all negative numbers to machine epsilon. for (size_t i = 0; i < W.n_elem; i++) { if (W(i) < 0.0) { W(i) = 0.0; } } } /** * The update rule for the encoding matrix H. The formula used is * * \f[ * H = \frac{W^T V}{W^T W} * \f] * * The function takes in all the matrices and only changes the value of the H * matrix. * * @param V Input matrix to be factorized. * @param W Basis matrix. * @param H Encoding matrix to be updated. */ template inline static void HUpdate(const MatType& V, const arma::mat& W, arma::mat& H) { H = pinv(W.t() * W) * W.t() * V; // Set all negative numbers to 0. for (size_t i = 0; i < H.n_elem; i++) { if (H(i) < 0.0) { H(i) = 0.0; } } } //! Serialize the object (in this case, there is nothing to serialize). template void Serialize(Archive& /* ar */, const unsigned int /* version */) { } }; // class NMFALSUpdate } // namespace amf } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/methods/amf/update_rules/nmf_mult_dist.hpp000066400000000000000000000061241315013601400253010ustar00rootroot00000000000000/** * @file nmf_mult_dist.hpp * @author Mohan Rajendran * * Update rules for the Non-negative Matrix Factorization. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_LMF_UPDATE_RULES_NMF_MULT_DIST_UPDATE_RULES_HPP #define MLPACK_METHODS_LMF_UPDATE_RULES_NMF_MULT_DIST_UPDATE_RULES_HPP #include namespace mlpack { namespace amf { /** * The multiplicative distance update rules for matrices W and H. This follows * a method described in the following paper: * * @code * @inproceedings{lee2001algorithms, * title={Algorithms for non-negative matrix factorization}, * author={Lee, D.D. and Seung, H.S.}, * booktitle={Advances in Neural Information Processing Systems 13 * (NIPS 2000)}, * pages={556--562}, * year={2001} * } * @endcode * * This is a multiplicative rule that ensures that the Frobenius norm * \f$ \sqrt{\sum_i \sum_j(V-WH)^2} \f$ is non-increasing between subsequent * iterations. Both of the update rules for W and H are defined in this file. */ class NMFMultiplicativeDistanceUpdate { public: // Empty constructor required for the UpdateRule template. NMFMultiplicativeDistanceUpdate() { } /** * Initialize the factorization. These update rules hold no information, so * the input parameters are ignored. */ template void Initialize(const MatType& /* dataset */, const size_t /* rank */) { // Nothing to do. } /** * The update rule for the basis matrix W. The formula used isa * * \f[ * W_{ia} \leftarrow W_{ia} \frac{(VH^T)_{ia}}{(WHH^T)_{ia}} * \f] * * The function takes in all the matrices and only changes the value of the W * matrix. * * @param V Input matrix to be factorized. * @param W Basis matrix to be updated. * @param H Encoding matrix. */ template inline static void WUpdate(const MatType& V, arma::mat& W, const arma::mat& H) { W = (W % (V * H.t())) / (W * H * H.t()); } /** * The update rule for the encoding matrix H. The formula used is * * \f[ * H_{a\mu} \leftarrow H_{a\mu} \frac{(W^T V)_{a\mu}}{(W^T WH)_{a\mu}} * \f] * * The function takes in all the matrices and only changes the value of the H * matrix. * * @param V Input matrix to be factorized. * @param W Basis matrix. * @param H Encoding matrix to be updated. */ template inline static void HUpdate(const MatType& V, const arma::mat& W, arma::mat& H) { H = (H % (W.t() * V)) / (W.t() * W * H); } //! Serialize the object (in this case, there is nothing to serialize). template void Serialize(Archive& /* ar */, const unsigned int /* version */) { } }; } // namespace amf } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/methods/amf/update_rules/nmf_mult_div.hpp000066400000000000000000000110561315013601400251200ustar00rootroot00000000000000/** * @file mult_div_update_rules.hpp * @author Mohan Rajendran * * Update rules for the Non-negative Matrix Factorization. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_LMF_UPDATE_RULES_NMF_MULT_DIV_HPP #define MLPACK_METHODS_LMF_UPDATE_RULES_NMF_MULT_DIV_HPP #include namespace mlpack { namespace amf { /** * This follows a method described in the paper 'Algorithms for Non-negative * * @code * @inproceedings{lee2001algorithms, * title={Algorithms for non-negative matrix factorization}, * author={Lee, D.D. and Seung, H.S.}, * booktitle={Advances in Neural Information Processing Systems 13 * (NIPS 2000)}, * pages={556--562}, * year={2001} * } * @endcode * * This is a multiplicative rule that ensures that the Kullback–Leibler * divergence * * \f[ * \sum_i \sum_j (V_{ij} \log\frac{V_{ij}}{(W H)_{ij}} - V_{ij} + (W H)_{ij}) * \f] * * is non-increasing between subsequent iterations. Both of the update rules * for W and H are defined in this file. * * This set of update rules is not meant to work with sparse matrices. Using * sparse matrices often causes NaNs in the output, so other choices of update * rules are better in that situation. */ class NMFMultiplicativeDivergenceUpdate { public: // Empty constructor required for the WUpdateRule template. NMFMultiplicativeDivergenceUpdate() { } /** * Initialize the factorization. These rules don't store any state, so the * input values are ignore. */ template void Initialize(const MatType& /* dataset */, const size_t /* rank */) { // Nothing to do. } /** * The update rule for the basis matrix W. The formula used is * * \f[ * W_{ia} \leftarrow W_{ia} \frac{\sum_{\mu} H_{a\mu} V_{i\mu} / (W H)_{i\mu}} * {\sum_{\nu} H_{a\nu}} * \f] * * The function takes in all the matrices and only changes the value of the W * matrix. * * @param V Input matrix to be factorized. * @param W Basis matrix to be updated. * @param H Encoding matrix. */ template inline static void WUpdate(const MatType& V, arma::mat& W, const arma::mat& H) { // Simple implementation left in the header file. arma::mat t1; arma::rowvec t2; t1 = W * H; for (size_t i = 0; i < W.n_rows; ++i) { for (size_t j = 0; j < W.n_cols; ++j) { // Writing this as a single expression does not work as of Armadillo // 3.920. This should be fixed in a future release, and then the code // below can be fixed. //t2 = H.row(j) % V.row(i) / t1.row(i); t2.set_size(H.n_cols); for (size_t k = 0; k < t2.n_elem; ++k) { t2(k) = H(j, k) * V(i, k) / t1(i, k); } W(i, j) = W(i, j) * sum(t2) / sum(H.row(j)); } } } /** * The update rule for the encoding matrix H. The formula used is * * \f[ * H_{a\mu} \leftarrow H_{a\mu} \frac{\sum_{i} W_{ia} V_{i\mu}/(WH)_{i\mu}} * {\sum_{k} H_{ka}} * \f] * * The function takes in all the matrices and only changes the value of the H * matrix. * * @param V Input matrix to be factorized. * @param W Basis matrix. * @param H Encoding matrix to updated. */ template inline static void HUpdate(const MatType& V, const arma::mat& W, arma::mat& H) { // Simple implementation left in the header file. arma::mat t1; arma::colvec t2; t1 = W * H; for (size_t i = 0; i < H.n_rows; i++) { for (size_t j = 0; j < H.n_cols; j++) { // Writing this as a single expression does not work as of Armadillo // 3.920. This should be fixed in a future release, and then the code // below can be fixed. //t2 = W.col(i) % V.col(j) / t1.col(j); t2.set_size(W.n_rows); for (size_t k = 0; k < t2.n_elem; ++k) { t2(k) = W(k, i) * V(k, j) / t1(k, j); } H(i,j) = H(i,j) * sum(t2) / sum(W.col(i)); } } } //! Serialize the object (in this case, there is nothing to serialize). template void Serialize(Archive& /* ar */, const unsigned int /* version */) { } }; } // namespace amf } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/methods/amf/update_rules/svd_batch_learning.hpp000066400000000000000000000154601315013601400262540ustar00rootroot00000000000000/** * @file svd_batch_learning.hpp * @author Sumedh Ghaisas * * SVD factorizer used in AMF (Alternating Matrix Factorization). * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_AMF_UPDATE_RULES_SVD_BATCH_LEARNING_HPP #define MLPACK_METHODS_AMF_UPDATE_RULES_SVD_BATCH_LEARNING_HPP #include namespace mlpack { namespace amf { /** * This class implements SVD batch learning with momentum. This procedure is * described in the following paper: * * @code * @techreport{ma2008guide, * title={A Guide to Singular Value Decomposition for Collaborative * Filtering}, * author={Ma, Chih-Chao}, * year={2008}, * institution={Department of Computer Science, National Taiwan University} * } * @endcode * * This class implements 'Algorithm 4' as given in the paper. * * The factorizer decomposes the matrix V into two matrices W and H such that * sum of sum of squared error between V and W * H is minimum. This optimization * is performed with gradient descent. To make gradient descent faster, momentum * is added. */ class SVDBatchLearning { public: /** * SVD Batch learning constructor. * * @param u step value used in batch learning * @param kw regularization constant for W matrix * @param kh regularization constant for H matrix * @param momentum momentum applied to batch learning process */ SVDBatchLearning(double u = 0.0002, double kw = 0, double kh = 0, double momentum = 0.9) : u(u), kw(kw), kh(kh), momentum(momentum) { // empty constructor } /** * Initialize parameters before factorization. This function must be called * before a new factorization. This resets the internally-held momentum. * * @param dataset Input matrix to be factorized. * @param rank rank of factorization */ template void Initialize(const MatType& dataset, const size_t rank) { const size_t n = dataset.n_rows; const size_t m = dataset.n_cols; mW.zeros(n, rank); mH.zeros(rank, m); } /** * The update rule for the basis matrix W. * The function takes in all the matrices and only changes the * value of the W matrix. * * @param V Input matrix to be factorized. * @param W Basis matrix to be updated. * @param H Encoding matrix. */ template inline void WUpdate(const MatType& V, arma::mat& W, const arma::mat& H) { size_t n = V.n_rows; size_t m = V.n_cols; size_t r = W.n_cols; // initialize the momentum of this iteration. mW = momentum * mW; // Compute the step. arma::mat deltaW; deltaW.zeros(n, r); for (size_t i = 0; i < n; i++) { for (size_t j = 0; j < m; j++) { const double val = V(i, j); if (val != 0) deltaW.row(i) += (val - arma::dot(W.row(i), H.col(j))) * arma::trans(H.col(j)); } // Add regularization. if (kw != 0) deltaW.row(i) -= kw * W.row(i); } // Add the step to the momentum. mW += u * deltaW; // Add the momentum to the W matrix. W += mW; } /** * The update rule for the encoding matrix H. * The function takes in all the matrices and only changes the * value of the H matrix. * * @param V Input matrix to be factorized. * @param W Basis matrix. * @param H Encoding matrix to be updated. */ template inline void HUpdate(const MatType& V, const arma::mat& W, arma::mat& H) { size_t n = V.n_rows; size_t m = V.n_cols; size_t r = W.n_cols; // Initialize the momentum of this iteration. mH = momentum * mH; // Compute the step. arma::mat deltaH; deltaH.zeros(r, m); for (size_t j = 0; j < m; j++) { for (size_t i = 0; i < n; i++) { const double val = V(i, j); if (val != 0) deltaH.col(j) += (val - arma::dot(W.row(i), H.col(j))) * W.row(i).t(); } // Add regularization. if (kh != 0) deltaH.col(j) -= kh * H.col(j); } // Add this step to the momentum. mH += u * deltaH; // Add the momentum to H. H += mH; } //! Serialize the SVDBatch object. template void Serialize(Archive& ar, const unsigned int /* version */) { using data::CreateNVP; ar & CreateNVP(u, "u"); ar & CreateNVP(kw, "kw"); ar & CreateNVP(kh, "kh"); ar & CreateNVP(momentum, "momentum"); ar & CreateNVP(mW, "mW"); ar & CreateNVP(mH, "mH"); } private: //! Step size of the algorithm. double u; //! Regularization parameter for matrix W. double kw; //! Regularization parameter for matrix H. double kh; //! Momentum value (between 0 and 1). double momentum; //! Momentum matrix for matrix W arma::mat mW; //! Momentum matrix for matrix H arma::mat mH; }; // class SVDBatchLearning //! TODO : Merge this template specialized function for sparse matrix using //! common row_col_iterator /** * WUpdate function specialization for sparse matrix */ template<> inline void SVDBatchLearning::WUpdate(const arma::sp_mat& V, arma::mat& W, const arma::mat& H) { const size_t n = V.n_rows; const size_t r = W.n_cols; mW = momentum * mW; arma::mat deltaW; deltaW.zeros(n, r); for (arma::sp_mat::const_iterator it = V.begin(); it != V.end(); ++it) { const size_t row = it.row(); const size_t col = it.col(); deltaW.row(it.row()) += (*it - arma::dot(W.row(row), H.col(col))) * arma::trans(H.col(col)); } if (kw != 0) deltaW -= kw * W; mW += u * deltaW; W += mW; } template<> inline void SVDBatchLearning::HUpdate(const arma::sp_mat& V, const arma::mat& W, arma::mat& H) { const size_t m = V.n_cols; const size_t r = W.n_cols; mH = momentum * mH; arma::mat deltaH; deltaH.zeros(r, m); for (arma::sp_mat::const_iterator it = V.begin(); it != V.end(); ++it) { const size_t row = it.row(); const size_t col = it.col(); deltaH.col(col) += (*it - arma::dot(W.row(row), H.col(col))) * W.row(row).t(); } if (kh != 0) deltaH -= kh * H; mH += u * deltaH; H += mH; } } // namespace amf } // namespace mlpack #endif // MLPACK_METHODS_AMF_UPDATE_RULES_SVD_BATCH_LEARNING_HPP mlpack-2.2.5/src/mlpack/methods/amf/update_rules/svd_complete_incremental_learning.hpp000066400000000000000000000167621315013601400313720ustar00rootroot00000000000000/** * @file svd_complete_incremental_learning.hpp * @author Sumedh Ghaisas * * SVD factorizer used in AMF (Alternating Matrix Factorization). * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_AMF_SVD_COMPLETE_INCREMENTAL_LEARNING_HPP #define MLPACK_METHODS_AMF_SVD_COMPLETE_INCREMENTAL_LEARNING_HPP #include namespace mlpack { namespace amf { /** * This class computes SVD using complete incremental batch learning, as * described in the following paper: * * @code * @techreport{ma2008guide, * title={A Guide to Singular Value Decomposition for Collaborative * Filtering}, * author={Ma, Chih-Chao}, * year={2008}, * institution={Department of Computer Science, National Taiwan University} * } * @endcode * * This class implements 'Algorithm 3' given in the paper. Complete incremental * learning is an extreme case of incremental learning, where feature vectors * are updated after looking at each single element in the input matrix (V). * This approach differs from incomplete incremental learning where feature * vectors are updated after seeing columns of elements in the input matrix. * * @see SVDIncompleteIncrementalLearning */ template class SVDCompleteIncrementalLearning { public: /** * Initialize the SVDCompleteIncrementalLearning class with the given * parameters. * * @param u Step value used in batch learning. * @param kw Regularization constant for W matrix. * @param kh Regularization constant for H matrix. */ SVDCompleteIncrementalLearning(double u = 0.0001, double kw = 0, double kh = 0) : u(u), kw(kw), kh(kh) { // Nothing to do. } /** * Initialize parameters before factorization. This function must be called * before a new factorization. For this initialization, the input parameters * are unnecessary; we are only setting the current element index to 0. * * @param dataset Input matrix to be factorized. * @param rank rank of factorization */ void Initialize(const MatType& /* dataset */, const size_t /* rank */) { // Initialize the current score counters. currentUserIndex = 0; currentItemIndex = 0; } /** * The update rule for the basis matrix W. The function takes in all the * matrices and only changes the value of the W matrix. * * @param V Input matrix to be factorized. * @param W Basis matrix to be updated. * @param H Encoding matrix. */ inline void WUpdate(const MatType& V, arma::mat& W, const arma::mat& H) { arma::mat deltaW; deltaW.zeros(1, W.n_cols); // Loop until a non-zero entry is found. while(true) { const double val = V(currentItemIndex, currentUserIndex); // Update feature vector if current entry is non-zero and break the loop. if (val != 0) { deltaW += (val - arma::dot(W.row(currentItemIndex), H.col(currentUserIndex))) * H.col(currentUserIndex).t(); // Add regularization. if (kw != 0) deltaW -= kw * W.row(currentItemIndex); break; } } W.row(currentItemIndex) += u * deltaW; } /** * The update rule for the encoding matrix H. * The function takes in all the matrices and only changes the * value of the H matrix. * * @param V Input matrix to be factorized. * @param W Basis matrix. * @param H Encoding matrix to be updated. */ inline void HUpdate(const MatType& V, const arma::mat& W, arma::mat& H) { arma::mat deltaH; deltaH.zeros(H.n_rows, 1); const double val = V(currentItemIndex, currentUserIndex); // Update H matrix based on the non-zero entry found in WUpdate function. deltaH += (val - arma::dot(W.row(currentItemIndex), H.col(currentUserIndex))) * W.row(currentItemIndex).t(); // Add regularization. if (kh != 0) deltaH -= kh * H.col(currentUserIndex); // Move on to the next entry. currentUserIndex = currentUserIndex + 1; if (currentUserIndex == V.n_rows) { currentUserIndex = 0; currentItemIndex = (currentItemIndex + 1) % V.n_cols; } H.col(currentUserIndex++) += u * deltaH; } private: //! Step count of batch learning. double u; //! Regularization parameter for matrix W. double kw; //! Regularization parameter for matrix H. double kh; //! User of index of current entry. size_t currentUserIndex; //! Item index of current entry. size_t currentItemIndex; }; //! TODO : Merge this template specialized function for sparse matrix using //! common row_col_iterator //! template specialiazed functions for sparse matrices template<> class SVDCompleteIncrementalLearning { public: SVDCompleteIncrementalLearning(double u = 0.01, double kw = 0, double kh = 0) : u(u), kw(kw), kh(kh), it(NULL) {} ~SVDCompleteIncrementalLearning() { delete it; } void Initialize(const arma::sp_mat& dataset, const size_t rank) { (void)rank; n = dataset.n_rows; m = dataset.n_cols; it = new arma::sp_mat::const_iterator(dataset.begin()); isStart = true; } /** * The update rule for the basis matrix W. * The function takes in all the matrices and only changes the * value of the W matrix. * * @param V Input matrix to be factorized. * @param W Basis matrix to be updated. * @param H Encoding matrix. */ inline void WUpdate(const arma::sp_mat& V, arma::mat& W, const arma::mat& H) { if (!isStart) (*it)++; else isStart = false; if (*it == V.end()) { delete it; it = new arma::sp_mat::const_iterator(V.begin()); } size_t currentUserIndex = it->col(); size_t currentItemIndex = it->row(); arma::mat deltaW(1, W.n_cols); deltaW.zeros(); deltaW += (**it - arma::dot(W.row(currentItemIndex), H.col(currentUserIndex))) * arma::trans(H.col(currentUserIndex)); if (kw != 0) deltaW -= kw * W.row(currentItemIndex); W.row(currentItemIndex) += u*deltaW; } /** * The update rule for the encoding matrix H. * The function takes in all the matrices and only changes the * value of the H matrix. * * @param V Input matrix to be factorized. * @param W Basis matrix. * @param H Encoding matrix to be updated. */ inline void HUpdate(const arma::sp_mat& V, const arma::mat& W, arma::mat& H) { (void)V; arma::mat deltaH(H.n_rows, 1); deltaH.zeros(); size_t currentUserIndex = it->col(); size_t currentItemIndex = it->row(); deltaH += (**it - arma::dot(W.row(currentItemIndex), H.col(currentUserIndex))) * arma::trans(W.row(currentItemIndex)); if (kh != 0) deltaH -= kh * H.col(currentUserIndex); H.col(currentUserIndex) += u * deltaH; } private: double u; double kw; double kh; size_t n; size_t m; arma::sp_mat dummy; arma::sp_mat::const_iterator* it; bool isStart; }; // class SVDCompleteIncrementalLearning } // namespace amf } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/methods/amf/update_rules/svd_incomplete_incremental_learning.hpp000066400000000000000000000150601315013601400317070ustar00rootroot00000000000000/** * @file svd_incomplete_incremental_learning.hpp * @author Sumedh Ghaisas * * SVD factorizer used in AMF (Alternating Matrix Factorization). * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_AMF_SVD_INCOMPLETE_INCREMENTAL_LEARNING_HPP #define MLPACK_METHODS_AMF_SVD_INCOMPLETE_INCREMENTAL_LEARNING_HPP namespace mlpack { namespace amf { /** * This class computes SVD using incomplete incremental batch learning, as * described in the following paper: * * @code * @techreport{ma2008guide, * title={A Guide to Singular Value Decomposition for Collaborative * Filtering}, * author={Ma, Chih-Chao}, * year={2008}, * institution={Department of Computer Science, National Taiwan University} * } * @endcode * * This class implements 'Algorithm 2' as given in the paper. Incremental * learning modifies only some feature values in W and H after scanning part of * the input matrix (V). This differs from batch learning, which considers * every element in V for each update of W and H. The regularization technique * is also different: in incomplete incremental learning, regularization takes * into account the number of elements in a given column of V. * * @see SVDBatchLearning */ class SVDIncompleteIncrementalLearning { public: /** * Initialize the parameters of SVDIncompleteIncrementalLearning. * * @param u Step value used in batch learning. * @param kw Regularization constant for W matrix. * @param kh Regularization constant for H matrix. */ SVDIncompleteIncrementalLearning(double u = 0.001, double kw = 0, double kh = 0) : u(u), kw(kw), kh(kh) { // Nothing to do. } /** * Initialize parameters before factorization. This function must be called * before a new factorization. This simply sets the column being considered * to 0, so the input matrix and rank are not used. * * @param dataset Input matrix to be factorized. * @param rank rank of factorization */ template void Initialize(const MatType& /* dataset */, const size_t /* rank */) { // Set the current user to 0. currentUserIndex = 0; } /** * The update rule for the basis matrix W. * The function takes in all the matrices and only changes the * value of the W matrix. * * @param V Input matrix to be factorized. * @param W Basis matrix to be updated. * @param H Encoding matrix. */ template inline void WUpdate(const MatType& V, arma::mat& W, const arma::mat& H) { arma::mat deltaW; deltaW.zeros(V.n_rows, W.n_cols); // Iterate through all the rating by this user to update corresponding item // feature feature vector. for (size_t i = 0; i < V.n_rows; ++i) { const double val = V(i, currentUserIndex); // Update only if the rating is non-zero. if (val != 0) deltaW.row(i) += (val - arma::dot(W.row(i), H.col(currentUserIndex))) * H.col(currentUserIndex).t(); // Add regularization. if (kw != 0) deltaW.row(i) -= kw * W.row(i); } W += u * deltaW; } /** * The update rule for the encoding matrix H. The function takes in all the * matrices and only changes the value of the H matrix. * * @param V Input matrix to be factorized. * @param W Basis matrix. * @param H Encoding matrix to be updated. */ template inline void HUpdate(const MatType& V, const arma::mat& W, arma::mat& H) { arma::vec deltaH; deltaH.zeros(H.n_rows); // Iterate through all the rating by this user to update corresponding item // feature feature vector. for (size_t i = 0; i < V.n_rows; ++i) { const double val = V(i, currentUserIndex); // Update only if the rating is non-zero. if (val != 0) deltaH += (val - arma::dot(W.row(i), H.col(currentUserIndex))) * W.row(i).t(); } // Add regularization. if (kh != 0) deltaH -= kh * H.col(currentUserIndex); // Update H matrix and move on to the next user. H.col(currentUserIndex++) += u * deltaH; currentUserIndex = currentUserIndex % V.n_cols; } private: //! Step size of batch learning. double u; //! Regularization parameter for W matrix. double kw; //! Regularization parameter for H matrix. double kh; //! Current user under consideration. size_t currentUserIndex; }; //! TODO : Merge this template specialized function for sparse matrix using //! common row_col_iterator //! template specialiazed functions for sparse matrices template<> inline void SVDIncompleteIncrementalLearning:: WUpdate(const arma::sp_mat& V, arma::mat& W, const arma::mat& H) { arma::mat deltaW(V.n_rows, W.n_cols); deltaW.zeros(); for(arma::sp_mat::const_iterator it = V.begin_col(currentUserIndex); it != V.end_col(currentUserIndex);it++) { double val = *it; size_t i = it.row(); deltaW.row(i) += (val - arma::dot(W.row(i), H.col(currentUserIndex))) * arma::trans(H.col(currentUserIndex)); if (kw != 0) deltaW.row(i) -= kw * W.row(i); } W += u*deltaW; } template<> inline void SVDIncompleteIncrementalLearning:: HUpdate(const arma::sp_mat& V, const arma::mat& W, arma::mat& H) { arma::mat deltaH(H.n_rows, 1); deltaH.zeros(); for(arma::sp_mat::const_iterator it = V.begin_col(currentUserIndex); it != V.end_col(currentUserIndex);it++) { double val = *it; size_t i = it.row(); if ((val = V(i, currentUserIndex)) != 0) deltaH += (val - arma::dot(W.row(i), H.col(currentUserIndex))) * arma::trans(W.row(i)); } if (kh != 0) deltaH -= kh * H.col(currentUserIndex); H.col(currentUserIndex++) += u * deltaH; currentUserIndex = currentUserIndex % V.n_cols; } } // namepsace amf } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/methods/ann/000077500000000000000000000000001315013601400172365ustar00rootroot00000000000000mlpack-2.2.5/src/mlpack/methods/ann/CMakeLists.txt000066400000000000000000000007011315013601400217740ustar00rootroot00000000000000# Define the files we need to compile # Anything not in this list will not be compiled into mlpack. set(SOURCES init_rules/random_init.hpp ) # Add directory name to sources. set(DIR_SRCS) foreach(file ${SOURCES}) set(DIR_SRCS ${DIR_SRCS} ${CMAKE_CURRENT_SOURCE_DIR}/${file}) endforeach() # Append sources (with directory name) to list of all mlpack sources (used at # the parent scope). set(MLPACK_SRCS ${MLPACK_SRCS} ${DIR_SRCS} PARENT_SCOPE) mlpack-2.2.5/src/mlpack/methods/ann/init_rules/000077500000000000000000000000001315013601400214135ustar00rootroot00000000000000mlpack-2.2.5/src/mlpack/methods/ann/init_rules/random_init.hpp000066400000000000000000000051751315013601400244370ustar00rootroot00000000000000/** * @file random_init.hpp * @author Marcus Edel * * Intialization rule for the neural networks. This simple initialization is * performed by assigning a random matrix to the weight matrix. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_ANN_INIT_RULES_RANDOM_INIT_HPP #define MLPACK_METHODS_ANN_INIT_RULES_RANDOM_INIT_HPP #include namespace mlpack { namespace ann /** Artificial Neural Network. */ { /** * This class is used to initialize randomly the weight matrix. */ class RandomInitialization { public: /** * Initialize the random initialization rule with the given lower bound and * upper bound. * * @param lowerBound The number used as lower bound. * @param upperBound The number used as upper bound. */ RandomInitialization(const double lowerBound = -1, const double upperBound = 1) : lowerBound(lowerBound), upperBound(upperBound) { } /** * Initialize the random initialization rule with the given bound. * Using the negative of the bound as lower bound and the positive bound as * upper bound. * * @param bound The number used as lower bound */ RandomInitialization(const double bound) : lowerBound(-std::abs(bound)), upperBound(std::abs(bound)) { } /** * Initialize randomly the elements of the specified weight matrix. * * @param W Weight matrix to initialize. * @param rows Number of rows. * @param cols Number of columns. */ template void Initialize(arma::Mat& W, const size_t rows, const size_t cols) { W = lowerBound + arma::randu>(rows, cols) * (upperBound - lowerBound); } /** * Initialize randomly the elements of the specified weight 3rd order tensor. * * @param W Weight matrix to initialize. * @param rows Number of rows. * @param cols Number of columns. */ template void Initialize(arma::Cube& W, const size_t rows, const size_t cols, const size_t slices) { W = arma::Cube(rows, cols, slices); for (size_t i = 0; i < slices; i++) Initialize(W.slice(i), rows, cols); } private: //! The number used as lower bound. const double lowerBound; //! The number used as upper bound. const double upperBound; }; // class RandomInitialization } // namespace ann } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/methods/approx_kfn/000077500000000000000000000000001315013601400206315ustar00rootroot00000000000000mlpack-2.2.5/src/mlpack/methods/approx_kfn/CMakeLists.txt000066400000000000000000000011721315013601400233720ustar00rootroot00000000000000# Define the files we need to compile. # Anything not in this list will not be compiled into mlpack. set(SOURCES # DrusillaSelect sources. drusilla_select.hpp drusilla_select_impl.hpp # QDAFN sources. qdafn.hpp qdafn_impl.hpp ) # Add directory name to sources. set(DIR_SRCS) foreach(file ${SOURCES}) set(DIR_SRCS ${DIR_SRCS} ${CMAKE_CURRENT_SOURCE_DIR}/${file}) endforeach() # Append sources (with directory name) to list of all mlpack sources (used at # the parent scope). set(MLPACK_SRCS ${MLPACK_SRCS} ${DIR_SRCS} PARENT_SCOPE) # This program computes approximate furthest neighbors. add_cli_executable(approx_kfn) mlpack-2.2.5/src/mlpack/methods/approx_kfn/approx_kfn_main.cpp000066400000000000000000000252111315013601400245110ustar00rootroot00000000000000/** * @file smarthash_main.cpp * @author Ryan Curtin * * Command-line program for the SmartHash algorithm. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include #include #include "drusilla_select.hpp" #include "qdafn.hpp" #include #include using namespace mlpack; using namespace mlpack::neighbor; using namespace std; PROGRAM_INFO("Approximate furthest neighbor search", "This program implements two strategies for furthest neighbor search. " "These strategies are:" "\n\n" " - The 'qdafn' algorithm from 'Approximate Furthest Neighbor in High " "Dimensions' by R. Pagh, F. Silvestri, J. Sivertsen, and M. Skala, in " "Similarity Search and Applications 2015 (SISAP)." "\n" " - The 'DrusillaSelect' algorithm from 'Fast approximate furthest " "neighbors with data-dependent candidate selection, by R.R. Curtin and A.B." " Gardner, in Similarity Search and Applications 2016 (SISAP)." "\n\n" "These two strategies give approximate results for the furthest neighbor " "search problem and can be used as fast replacements for other furthest " "neighbor techniques such as those found in the mlpack_kfn program. Note " "that typically, the 'ds' algorithm requires far fewer tables and " "projections than the 'qdafn' algorithm." "\n\n" "Specify a reference set (set to search in) with --reference_file, " "specify a query set with --query_file, and specify algorithm parameters " "with --num_tables (-t) and --num_projections (-p) (or don't and defaults " "will be used). The algorithm to be used (either 'ds'---the default---or " "'qdafn') may be specified with --algorithm. Also specify the number of " "neighbors to search for with --k. Each of those options also has short " "names; see the detailed parameter documentation below." "\n\n" "If no query file is specified, the reference set will be used as the " "query set. A model may be saved with --output_model_file (-M), and an " "input model may be loaded instead of specifying a reference set with " "--input_model_file (-m)." "\n\n" "Results for each query point are stored in the files specified by " "--neighbors_file and --distances_file. This is in the same format as the " "mlpack_kfn and mlpack_knn programs: each row holds the k distances or " "neighbor indices for each query point."); PARAM_STRING_IN("reference_file", "File containing reference points.", "r", ""); PARAM_STRING_IN("query_file", "File containing query points.", "q", ""); // Model loading and saving. PARAM_STRING_IN("input_model_file", "File containing input model.", "m", ""); PARAM_STRING_OUT("output_model_file", "File to save output model to.", "M"); PARAM_INT_IN("k", "Number of furthest neighbors to search for.", "k", 0); PARAM_INT_IN("num_tables", "Number of hash tables to use.", "t", 5); PARAM_INT_IN("num_projections", "Number of projections to use in each hash " "table.", "p", 5); PARAM_STRING_IN("algorithm", "Algorithm to use: 'ds' or 'qdafn'.", "a", "ds"); PARAM_STRING_IN("neighbors_file", "File to save furthest neighbor indices to.", "n", ""); PARAM_STRING_IN("distances_file", "File to save furthest neighbor distances to.", "d", ""); PARAM_FLAG("calculate_error", "If set, calculate the average distance error for" " the first furthest neighbor only.", "e"); PARAM_STRING_IN("exact_distances_file", "File containing exact distances to " "furthest neighbors; this can be used to avoid explicit calculation when " "--calculate_error is set.", "x", ""); // If we save a model we must also save what type it is. class ApproxKFNModel { public: int type; DrusillaSelect<> ds; QDAFN<> qdafn; //! Constructor, which does nothing. ApproxKFNModel() : type(0), ds(1, 1), qdafn(1, 1) { } //! Serialize the model. template void Serialize(Archive& ar, const unsigned int /* version */) { ar & data::CreateNVP(type, "type"); if (type == 0) { ar & data::CreateNVP(ds, "model"); } else { ar & data::CreateNVP(qdafn, "model"); } } }; int main(int argc, char** argv) { CLI::ParseCommandLine(argc, argv); if (!CLI::HasParam("reference_file") && !CLI::HasParam("input_model_file")) Log::Fatal << "Either --reference_file (-r) or --input_model_file (-m) must" << " be specified!" << endl; if (CLI::HasParam("reference_file") && CLI::HasParam("input_model_file")) Log::Fatal << "Only one of --reference_file (-r) or --input_model_file (-m)" << " can be specified!" << endl; if (!CLI::HasParam("output_model_file") && !CLI::HasParam("k")) Log::Warn << "Neither --output_model_file (-M) nor --k (-k) are specified;" << " no task will be performed." << endl; if (!CLI::HasParam("neighbors_file") && !CLI::HasParam("distances_file") && !CLI::HasParam("output_model_file")) Log::Warn << "None of --output_model_file (-M), --neighbors_file (-n), or " << "--distances_file (-d) are specified; no output will be saved!" << endl; if (CLI::GetParam("algorithm") != "ds" && CLI::GetParam("algorithm") != "qdafn") Log::Fatal << "Unknown algorithm '" << CLI::GetParam("algorithm") << "'; must be 'ds' or 'qdafn'!" << endl; if (CLI::HasParam("k") && !(CLI::HasParam("reference_file") || CLI::HasParam("query_file"))) Log::Fatal << "If search is being performed, then either --query_file " << "or --reference_file must be specified!" << endl; if (CLI::GetParam("num_tables") <= 0) Log::Fatal << "Invalid --num_tables value (" << CLI::GetParam("num_tables") << "); must be greater than 0!" << endl; if (CLI::GetParam("num_projections") <= 0) Log::Fatal << "Invalid --num_projections value (" << CLI::GetParam("num_projections") << "); must be greater than 0!" << endl; if (CLI::HasParam("calculate_error") && !CLI::HasParam("k")) Log::Warn << "--calculate_error ignored because --k is not specified." << endl; if (CLI::HasParam("exact_distances_file") && !CLI::HasParam("calculate_error")) Log::Warn << "--exact_distances_file ignored beceause --calculate_error is " << "not specified." << endl; if (CLI::HasParam("calculate_error") && !CLI::HasParam("exact_distances_file") && !CLI::HasParam("reference_file")) Log::Fatal << "Cannot calculate error without either --exact_distances_file" << " or --reference_file specified!" << endl; // Do the building of a model, if necessary. ApproxKFNModel m; arma::mat referenceSet; // This may be used at query time. if (CLI::HasParam("reference_file")) { const string referenceFile = CLI::GetParam("reference_file"); data::Load(referenceFile, referenceSet); const size_t numTables = (size_t) CLI::GetParam("num_tables"); const size_t numProjections = (size_t) CLI::GetParam("num_projections"); const string algorithm = CLI::GetParam("algorithm"); if (algorithm == "ds") { Timer::Start("drusilla_select_construct"); Log::Info << "Building DrusillaSelect model..." << endl; m.type = 0; m.ds = DrusillaSelect<>(referenceSet, numTables, numProjections); Timer::Stop("drusilla_select_construct"); } else { Timer::Start("qdafn_construct"); Log::Info << "Building QDAFN model..." << endl; m.type = 1; m.qdafn = QDAFN<>(referenceSet, numTables, numProjections); Timer::Stop("qdafn_construct"); } Log::Info << "Model built." << endl; } else { // We must load the model from file. const string inputModelFile = CLI::GetParam("input_model_file"); data::Load(inputModelFile, "approx_kfn", m); } // Now, do we need to do any queries? if (CLI::HasParam("k")) { arma::mat querySet; // This may or may not be used. const size_t k = (size_t) CLI::GetParam("k"); arma::Mat neighbors; arma::mat distances; arma::mat& set = CLI::HasParam("query_file") ? querySet : referenceSet; if (CLI::HasParam("query_file")) { const string queryFile = CLI::GetParam("query_file"); data::Load(queryFile, querySet); } if (m.type == 0) { Timer::Start("drusilla_select_search"); Log::Info << "Searching for " << k << " furthest neighbors with " << "DrusillaSelect..." << endl; m.ds.Search(set, k, neighbors, distances); Timer::Stop("drusilla_select_search"); } else { Timer::Start("qdafn_search"); Log::Info << "Searching for " << k << " furthest neighbors with " << "QDAFN..." << endl; m.qdafn.Search(set, k, neighbors, distances); Timer::Stop("qdafn_search"); } Log::Info << "Search complete." << endl; // Should we calculate error? if (CLI::HasParam("calculate_error")) { arma::mat exactDistances; if (CLI::HasParam("exact_distances_file")) { data::Load(CLI::GetParam("exact_distances_file"), exactDistances); } else { // Calculate exact distances. We are guaranteed the reference set is // available. Log::Info << "Calculating exact distances..." << endl; AllkFN kfn(referenceSet); arma::Mat exactNeighbors; kfn.Search(set, 1, exactNeighbors, exactDistances); Log::Info << "Calculation complete." << endl; } const double averageError = arma::sum(exactDistances.row(0) / distances.row(0)) / distances.n_cols; const double minError = arma::min(exactDistances.row(0) / distances.row(0)); const double maxError = arma::max(exactDistances.row(0) / distances.row(0)); Log::Info << "Average error: " << averageError << "." << endl; Log::Info << "Maximum error: " << maxError << "." << endl; Log::Info << "Minimum error: " << minError << "." << endl; } // Save results, if desired. if (CLI::HasParam("neighbors_file")) data::Save(CLI::GetParam("neighbors_file"), neighbors, false); if (CLI::HasParam("distances_file")) data::Save(CLI::GetParam("distances_file"), distances, false); } // Should we save the model? if (CLI::HasParam("output_model_file")) data::Save(CLI::GetParam("output_model_file"), "approx_kfn", m); } mlpack-2.2.5/src/mlpack/methods/approx_kfn/drusilla_select.hpp000066400000000000000000000105661315013601400245300ustar00rootroot00000000000000/** * @file drusilla_select.hpp * @author Ryan Curtin * * An implementation of the approximate furthest neighbor algorithm specified in * the following paper: * * @code * @incollection{curtin2016fast, * title={Fast approximate furthest neighbors with data-dependent candidate * selection}, * author={Curtin, R.R., and Gardner, A.B.}, * booktitle={Similarity Search and Applications}, * pages={221--235}, * year={2016}, * publisher={Springer} * } * @endcode * * This algorithm, called DrusillaSelect, constructs a candidate set of points * to query to find an approximate furthest neighbor. The strange name is a * result of the algorithm being named after a cat. The cat in question may be * viewed at http://www.ratml.org/misc_img/drusilla_fence.png. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_APPROX_KFN_DRUSILLA_SELECT_HPP #define MLPACK_METHODS_APPROX_KFN_DRUSILLA_SELECT_HPP #include namespace mlpack { namespace neighbor { template class DrusillaSelect { public: /** * Construct the DrusillaSelect object with the given reference set (this is * the set that will be searched). The resulting set of candidate points that * will be searched at query time will have size l*m. * * @param referenceSet Set of reference data. * @param l Number of projections. * @param m Number of elements to store for each projection. */ DrusillaSelect(const MatType& referenceSet, const size_t l, const size_t m); /** * Construct the DrusillaSelect object with no given reference set. Be sure * to call Train() before calling Search()! * * @param l Number of projections. * @param m Number of elements to store for each projection. */ DrusillaSelect(const size_t l, const size_t m); /** * Build the set of candidate points on the given reference set. If l and m * are left unspecified, then the values set in the constructor will be used * instead. * * @param referenceSet Set to extract candidate points from. * @param l Number of projections. * @param m Number of elements to store for each projection. */ void Train(const MatType& referenceSet, const size_t l = 0, const size_t m = 0); /** * Search for the k furthest neighbors of the given query set. (The query set * can contain just one point: that is okay.) The results will be stored in * the given neighbors and distances matrices, in the same format as the * NeighborSearch and LSHSearch classes. That is, each column in the * neighbors and distances matrices will refer to a single query point, and * the k'th row in that column will refer to the k'th candidate neighbor or * distance for that query point. * * @param querySet Set of query points to search. * @param k Number of furthest neighbors to search for. * @param neighbors Matrix to store resulting neighbors in. * @param distances Matrix to store resulting distances in. */ void Search(const MatType& querySet, const size_t k, arma::Mat& neighbors, arma::mat& distances); /** * Serialize the model. */ template void Serialize(Archive& ar, const unsigned int /* version */); //! Access the candidate set. const MatType& CandidateSet() const { return candidateSet; } //! Modify the candidate set. Be careful! MatType& CandidateSet() { return candidateSet; } //! Access the indices of points in the candidate set. const arma::Col& CandidateIndices() const { return candidateIndices; } //! Modify the indices of points in the candidate set. Be careful! arma::Col& CandidateIndices() { return candidateIndices; } private: //! The reference set. MatType candidateSet; //! Indices of each point in the reference set. arma::Col candidateIndices; //! The number of projections. size_t l; //! The number of points in each projection. size_t m; }; } // namespace neighbor } // namespace mlpack // Include implementation. #include "drusilla_select_impl.hpp" #endif mlpack-2.2.5/src/mlpack/methods/approx_kfn/drusilla_select_impl.hpp000066400000000000000000000152731315013601400255510ustar00rootroot00000000000000/** * @file drusilla_select_impl.hpp * @author Ryan Curtin * * Implementation of DrusillaSelect class methods. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_APPROX_KFN_DRUSILLA_SELECT_IMPL_HPP #define MLPACK_METHODS_APPROX_KFN_DRUSILLA_SELECT_IMPL_HPP // In case it hasn't been included yet. #include "drusilla_select.hpp" #include #include #include #include #include namespace mlpack { namespace neighbor { // Constructor. template DrusillaSelect::DrusillaSelect(const MatType& referenceSet, const size_t l, const size_t m) : candidateSet(referenceSet.n_cols, l * m), candidateIndices(l * m), l(l), m(m) { if (l == 0) throw std::invalid_argument("DrusillaSelect::DrusillaSelect(): invalid " "value of l; must be greater than 0!"); else if (m == 0) throw std::invalid_argument("DrusillaSelect::DrusillaSelect(): invalid " "value of m; must be greater than 0!"); Train(referenceSet, l, m); } // Constructor with no training. template DrusillaSelect::DrusillaSelect(const size_t l, const size_t m) : candidateSet(0, l * m), candidateIndices(l * m), l(l), m(m) { if (l == 0) throw std::invalid_argument("DrusillaSelect::DrusillaSelect(): invalid " "value of l; must be greater than 0!"); else if (m == 0) throw std::invalid_argument("DrusillaSelect::DrusillaSelect(): invalid " "value of m; must be greater than 0!"); } // Train the model. template void DrusillaSelect::Train( const MatType& referenceSet, const size_t lIn, const size_t mIn) { // Did the user specify a new size? If so, use it. if (lIn > 0) l = lIn; if (mIn > 0) m = mIn; if ((l * m) > referenceSet.n_cols) throw std::invalid_argument("DrusillaSelect::Train(): l and m are too " "large! Choose smaller values. l*m must be smaller than the number " "of points in the dataset."); candidateSet.set_size(referenceSet.n_rows, l * m); candidateIndices.set_size(l * m); arma::vec dataMean(arma::mean(referenceSet, 1)); arma::vec norms(referenceSet.n_cols); MatType refCopy(referenceSet.n_rows, referenceSet.n_cols); for (size_t i = 0; i < refCopy.n_cols; ++i) { refCopy.col(i) = referenceSet.col(i) - dataMean; norms[i] = arma::norm(refCopy.col(i)); } // Find the top m points for each of the l projections... for (size_t i = 0; i < l; ++i) { // Pick best index. arma::uword maxIndex; norms.max(maxIndex); arma::vec line(refCopy.col(maxIndex) / arma::norm(refCopy.col(maxIndex))); // Calculate distortion and offset and make scores. std::vector closeAngle(referenceSet.n_cols, false); arma::vec sums(referenceSet.n_cols); for (size_t j = 0; j < referenceSet.n_cols; ++j) { if (norms[j] > 0.0) { const double offset = arma::dot(refCopy.col(j), line); const double distortion = arma::norm(refCopy.col(j) - offset * line); sums[j] = std::abs(offset) - std::abs(distortion); closeAngle[j] = (std::atan(distortion / std::abs(offset)) < (M_PI / 8.0)); } else { sums[j] = norms[j]; } } // Find the top m elements using a priority queue. typedef std::pair Candidate; struct CandidateCmp { bool operator()(const Candidate& c1, const Candidate& c2) { return c2.first < c1.first; } }; std::vector clist(m, std::make_pair(double(-DBL_MAX), size_t(-1))); std::priority_queue, CandidateCmp> pq(CandidateCmp(), std::move(clist)); for (size_t j = 0; j < sums.n_elem; ++j) { Candidate c = std::make_pair(sums[j], j); if (CandidateCmp()(c, pq.top())) { pq.pop(); pq.push(c); } } // Take the top m elements for this table. for (size_t j = 0; j < m; ++j) { const size_t index = pq.top().second; pq.pop(); candidateSet.col(i * m + j) = referenceSet.col(index); candidateIndices[i * m + j] = index; // Mark the norm as -1 so we don't see this point again. norms[index] = -1.0; } // Calculate angles from the current projection. Anything close enough, // mark the norm as 0. for (size_t j = 0; j < norms.n_elem; ++j) if (norms[j] > 0.0 && closeAngle[j]) norms[j] = 0.0; } } // Search. template void DrusillaSelect::Search(const MatType& querySet, const size_t k, arma::Mat& neighbors, arma::mat& distances) { if (candidateSet.n_cols == 0) throw std::runtime_error("DrusillaSelect::Search(): candidate set not " "initialized! Call Train() first."); if (k > (l * m)) throw std::invalid_argument("DrusillaSelect::Search(): requested k is " "greater than number of points in candidate set! Increase l or m."); // We'll use the NeighborSearchRules class to perform our brute-force search. // Note that we aren't using trees for our search, so we can use 'int' as a // TreeType. metric::EuclideanDistance metric; NeighborSearchRules> rules(candidateSet, querySet, k, metric, 0, false); for (size_t q = 0; q < querySet.n_cols; ++q) for (size_t r = 0; r < candidateSet.n_cols; ++r) rules.BaseCase(q, r); rules.GetResults(neighbors, distances); // Map the neighbors back to their original indices in the reference set. for (size_t i = 0; i < neighbors.n_elem; ++i) neighbors[i] = candidateIndices[neighbors[i]]; } //! Serialize the model. template template void DrusillaSelect::Serialize(Archive& ar, const unsigned int /* version */) { using data::CreateNVP; ar & CreateNVP(candidateSet, "candidateSet"); ar & CreateNVP(candidateIndices, "candidateIndices"); ar & CreateNVP(l, "l"); ar & CreateNVP(m, "m"); } } // namespace neighbor } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/methods/approx_kfn/qdafn.hpp000066400000000000000000000072641315013601400224440ustar00rootroot00000000000000/** * @file qdafn.hpp * @author Ryan Curtin * * An implementation of the query-dependent approximate furthest neighbor * algorithm specified in the following paper: * * @code * @incollection{pagh2015approximate, * title={Approximate furthest neighbor in high dimensions}, * author={Pagh, R. and Silvestri, F. and Sivertsen, J. and Skala, M.}, * booktitle={Similarity Search and Applications}, * pages={3--14}, * year={2015}, * publisher={Springer} * } * @endcode * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_APPROX_KFN_QDAFN_HPP #define MLPACK_METHODS_APPROX_KFN_QDAFN_HPP #include #include namespace mlpack { namespace neighbor { template class QDAFN { public: /** * Construct the QDAFN object but do not train it. Be sure to call Train() * before calling Search(). * * @param l Number of projections. * @param m Number of elements to store for each projection. */ QDAFN(const size_t l, const size_t m); /** * Construct the QDAFN object with the given reference set (this is the set * that will be searched). * * @param referenceSet Set of reference data. * @param l Number of projections. * @param m Number of elements to store for each projection. */ QDAFN(const MatType& referenceSet, const size_t l, const size_t m); /** * Train the QDAFN model on the given reference set, optionally setting new * parameters for the number of projections/tables (l) and the number of * elements stored for each projection/table (m). * * @param referenceSet Reference set to train on. * @param l Number of projections. * @param m Number of elements to store for each projection. */ void Train(const MatType& referenceSet, const size_t l = 0, const size_t m = 0); /** * Search for the k furthest neighbors of the given query set. (The query set * can contain just one point, that is okay.) The results will be stored in * the given neighbors and distances matrices, in the same format as the * mlpack NeighborSearch and LSHSearch classes. */ void Search(const MatType& querySet, const size_t k, arma::Mat& neighbors, arma::mat& distances); //! Serialize the model. template void Serialize(Archive& ar, const unsigned int /* version */); //! Get the number of projections. size_t NumProjections() const { return candidateSet.size(); } //! Get the candidate set for the given projection table. const MatType& CandidateSet(const size_t t) const { return candidateSet[t]; } //! Modify the candidate set for the given projection table. Careful! MatType& CandidateSet(const size_t t) { return candidateSet[t]; } private: //! The number of projections. size_t l; //! The number of elements to store for each projection. size_t m; //! The random lines we are projecting onto. Has l columns. arma::mat lines; //! Projections of each point onto each random line. arma::mat projections; //! Indices of the points for each S. arma::Mat sIndices; //! Values of a_i * x for each point in S. arma::mat sValues; // Candidate sets; one element in the vector for each table. std::vector candidateSet; }; } // namespace neighbor } // namespace mlpack // Include implementation. #include "qdafn_impl.hpp" #endif mlpack-2.2.5/src/mlpack/methods/approx_kfn/qdafn_impl.hpp000066400000000000000000000136211315013601400234570ustar00rootroot00000000000000/** * @file qdafn_impl.hpp * @author Ryan Curtin * * Implementation of QDAFN class methods. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_APPROX_KFN_QDAFN_IMPL_HPP #define MLPACK_METHODS_APPROX_KFN_QDAFN_IMPL_HPP // In case it hasn't been included yet. #include "qdafn.hpp" #include #include namespace mlpack { namespace neighbor { // Non-training constructor. template QDAFN::QDAFN(const size_t l, const size_t m) : l(l), m(m) { if (l == 0) throw std::invalid_argument("QDAFN::QDAFN(): l must be greater than 0!"); if (m == 0) throw std::invalid_argument("QDAFN::QDAFN(): m must be greater than 0!"); } // Constructor. template QDAFN::QDAFN(const MatType& referenceSet, const size_t l, const size_t m) : l(l), m(m) { if (l == 0) throw std::invalid_argument("QDAFN::QDAFN(): l must be greater than 0!"); if (m == 0) throw std::invalid_argument("QDAFN::QDAFN(): m must be greater than 0!"); Train(referenceSet); } // Train the object. template void QDAFN::Train(const MatType& referenceSet, const size_t lIn, const size_t mIn) { if (lIn != 0) l = lIn; if (mIn != 0) m = mIn; // Build tables. This is done by drawing random points from a Gaussian // distribution as the vectors we project onto. The Gaussian should have zero // mean and unit variance. mlpack::distribution::GaussianDistribution gd(referenceSet.n_rows); lines.set_size(referenceSet.n_rows, l); for (size_t i = 0; i < l; ++i) lines.col(i) = gd.Random(); // Now, project each of the reference points onto each line, and collect the // top m elements. projections = referenceSet.t() * lines; // Loop over each projection and find the top m elements. sIndices.set_size(m, l); sValues.set_size(m, l); candidateSet.resize(l); for (size_t i = 0; i < l; ++i) { candidateSet[i].set_size(referenceSet.n_rows, m); arma::uvec sortedIndices = arma::sort_index(projections.col(i), "descend"); // Grab the top m elements. for (size_t j = 0; j < m; ++j) { sIndices(j, i) = sortedIndices[j]; sValues(j, i) = projections(sortedIndices[j], i); candidateSet[i].col(j) = referenceSet.col(sortedIndices[j]); } } } // Search. template void QDAFN::Search(const MatType& querySet, const size_t k, arma::Mat& neighbors, arma::mat& distances) { if (k > m) throw std::invalid_argument("QDAFN::Search(): requested k is greater than " "value of m!"); neighbors.set_size(k, querySet.n_cols); neighbors.fill(size_t() - 1); distances.zeros(k, querySet.n_cols); // Search for each point. for (size_t q = 0; q < querySet.n_cols; ++q) { // Initialize a priority queue. // The size_t represents the index of the table, and the double represents // the value of l_i * S_i - l_i * query (see line 6 of Algorithm 1). std::priority_queue> queue; for (size_t i = 0; i < l; ++i) { const double val = sValues(0, i) - arma::dot(querySet.col(q), lines.col(i)); queue.push(std::make_pair(val, i)); } // To track where we are in each S table, we keep the next index to look at // in each table (they start at 0). arma::Col tableLocations = arma::zeros>(l); // Now that the queue is initialized, iterate over m elements. std::vector> v(k, std::make_pair(-1.0, size_t(-1))); std::priority_queue> resultsQueue(std::less>(), std::move(v)); for (size_t i = 0; i < m; ++i) { std::pair p = queue.top(); queue.pop(); // Get index of reference point to look at. const size_t tableIndex = tableLocations[p.second]; // Calculate distance from query point. const double dist = mlpack::metric::EuclideanDistance::Evaluate( querySet.col(q), candidateSet[p.second].col(tableIndex)); // Is this neighbor good enough to insert into the results? if (dist > resultsQueue.top().first) { resultsQueue.pop(); resultsQueue.push(std::make_pair(dist, sIndices(tableIndex, p.second))); } // Now (line 14) get the next element and insert into the queue. Do this // by adjusting the previous value. Don't insert anything if we are at // the end of the search, though. if (i < m - 1) { tableLocations[p.second]++; const double val = p.first - sValues(tableIndex, p.second) + sValues(tableIndex + 1, p.second); queue.push(std::make_pair(val, p.second)); } } // Extract the results. for (size_t j = 1; j <= k; ++j) { neighbors(k - j, q) = resultsQueue.top().second; distances(k - j, q) = resultsQueue.top().first; resultsQueue.pop(); } } } template template void QDAFN::Serialize(Archive& ar, const unsigned int /* version */) { using data::CreateNVP; ar & CreateNVP(l, "l"); ar & CreateNVP(m, "m"); ar & CreateNVP(lines, "lines"); ar & CreateNVP(projections, "projections"); ar & CreateNVP(sIndices, "sIndices"); ar & CreateNVP(sValues, "sValues"); if (Archive::is_loading::value) candidateSet.clear(); ar & CreateNVP(candidateSet, "candidateSet"); } } // namespace neighbor } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/methods/approx_kfn/qdafn_main.cpp000066400000000000000000000074541315013601400234440ustar00rootroot00000000000000/** * @file qdafn_main.cpp * @author Ryan Curtin * * Command-line program for the QDAFN algorithm. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include #include "qdafn.hpp" #include using namespace qdafn; using namespace mlpack; using namespace std; PROGRAM_INFO("Query-dependent approximate furthest neighbor search", "This program implements the algorithm from the SISAP 2015 paper titled " "'Approximate Furthest Neighbor in High Dimensions' by R. Pagh, F. " "Silvestri, J. Sivertsen, and M. Skala. Specify a reference set (set to " "search in) with --reference_file, specify a query set (set to search for) " "with --query_file, and specify algorithm parameters with --num_tables and " "--num_projections (or don't, and defaults will be used). Also specify " "the number of points to search for with --k. Each of those options has " "short names too; see the detailed parameter documentation below." "\n\n" "Results for each query point are stored in the files specified by " "--neighbors_file and --distances_file. This is in the same format as the " "mlpack KFN and KNN programs: each row holds the k distances or neighbor " "indices for each query point."); PARAM_STRING_REQ("reference_file", "File containing reference points.", "r"); PARAM_STRING_REQ("query_file", "File containing query points.", "q"); PARAM_INT_REQ("k", "Number of furthest neighbors to search for.", "k"); PARAM_INT("num_tables", "Number of hash tables to use.", "t", 10); PARAM_INT("num_projections", "Number of projections to use in each hash table.", "p", 30); PARAM_STRING("neighbors_file", "File to save furthest neighbor indices to.", "n", ""); PARAM_STRING("distances_file", "File to save furthest neighbor distances to.", "d", ""); PARAM_FLAG("calculate_error", "If set, calculate the average distance error.", "e"); int main(int argc, char** argv) { CLI::ParseCommandLine(argc, argv); const string referenceFile = CLI::GetParam("reference_file"); const string queryFile = CLI::GetParam("query_file"); const size_t k = (size_t) CLI::GetParam("k"); const size_t numTables = (size_t) CLI::GetParam("num_tables"); const size_t numProjections = (size_t) CLI::GetParam("num_projections"); // Load the data. arma::mat referenceData, queryData; data::Load(referenceFile, referenceData, true); data::Load(queryFile, queryData, true); // Construct the object. Timer::Start("qdafn_construct"); QDAFN<> q(referenceData, numTables, numProjections); Timer::Stop("qdafn_construct"); // Do the search. arma::Mat neighbors; arma::mat distances; Timer::Start("qdafn_search"); q.Search(queryData, k, neighbors, distances); Timer::Stop("qdafn_search"); // Print the number of base cases. Log::Info << "Total distance evaluations: " << (queryData.n_cols * numProjections) << "." << endl; if (CLI::HasParam("calculate_error")) { neighbor::AllkFN kfn(referenceData); arma::Mat trueNeighbors; arma::mat trueDistances; kfn.Search(queryData, 1, trueNeighbors, trueDistances); const double averageError = arma::sum(trueDistances / distances.row(0)) / distances.n_cols; Log::Info << "Average error: " << averageError << "." << endl; } // Save the results. if (CLI::HasParam("neighbors_file")) data::Save(CLI::GetParam("neighbors_file"), neighbors); if (CLI::HasParam("distances_file")) data::Save(CLI::GetParam("distances_file"), distances); } mlpack-2.2.5/src/mlpack/methods/cf/000077500000000000000000000000001315013601400170525ustar00rootroot00000000000000mlpack-2.2.5/src/mlpack/methods/cf/CMakeLists.txt000066400000000000000000000010061315013601400216070ustar00rootroot00000000000000# Define the files we need to compile # Anything not in this list will not be compiled into mlpack. set(SOURCES cf.hpp cf_impl.hpp cf.cpp svd_wrapper.hpp svd_wrapper_impl.hpp ) # Add directory name to sources. set(DIR_SRCS) foreach(file ${SOURCES}) set(DIR_SRCS ${DIR_SRCS} ${CMAKE_CURRENT_SOURCE_DIR}/${file}) endforeach() # Append sources (with directory name) to list of all mlpack sources (used at # the parent scope). set(MLPACK_SRCS ${MLPACK_SRCS} ${DIR_SRCS} PARENT_SCOPE) add_cli_executable (cf) mlpack-2.2.5/src/mlpack/methods/cf/cf.cpp000066400000000000000000000233171315013601400201540ustar00rootroot00000000000000/** * @file cf.cpp * @author Mudit Raj Gupta * @author Sumedh Ghaisas * * Collaborative Filtering. * * Implementation of CF class to perform Collaborative Filtering on the * specified data set. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include "cf.hpp" namespace mlpack { namespace cf { // Default CF constructor. CF::CF(const size_t numUsersForSimilarity, const size_t rank) : numUsersForSimilarity(numUsersForSimilarity), rank(rank) { // Validate neighbourhood size. if (numUsersForSimilarity < 1) { Log::Warn << "CF::CF(): neighbourhood size should be > 0 (" << numUsersForSimilarity << " given). Setting value to 5.\n"; // Set default value of 5. this->numUsersForSimilarity = 5; } } void CF::GetRecommendations(const size_t numRecs, arma::Mat& recommendations) { // Generate list of users. Maybe it would be more efficient to pass an empty // users list, and then have the other overload of GetRecommendations() assume // that if users is empty, then recommendations should be generated for all // users? arma::Col users = arma::linspace >(0, cleanedData.n_cols - 1, cleanedData.n_cols); // Call the main overload for recommendations. GetRecommendations(numRecs, recommendations, users); } void CF::GetRecommendations(const size_t numRecs, arma::Mat& recommendations, arma::Col& users) { // We want to avoid calculating the full rating matrix, so we will do nearest // neighbor search only on the H matrix, using the observation that if the // rating matrix X = W*H, then d(X.col(i), X.col(j)) = d(W H.col(i), W // H.col(j)). This can be seen as nearest neighbor search on the H matrix // with the Mahalanobis distance where M^{-1} = W^T W. So, we'll decompose // M^{-1} = L L^T (the Cholesky decomposition), and then multiply H by L^T. // Then we can perform nearest neighbor search. arma::mat l = arma::chol(w.t() * w); arma::mat stretchedH = l * h; // Due to the Armadillo API, l is L^T. // Now, we will use the decomposed w and h matrices to estimate what the user // would have rated items as, and then pick the best items. // Temporarily store feature vector of queried users. arma::mat query(stretchedH.n_rows, users.n_elem); // Select feature vectors of queried users. for (size_t i = 0; i < users.n_elem; i++) query.col(i) = stretchedH.col(users(i)); // Temporary storage for neighborhood of the queried users. arma::Mat neighborhood; // Calculate the neighborhood of the queried users. // This should be a templatized option. neighbor::KNN a(stretchedH); arma::mat resultingDistances; // Temporary storage. a.Search(query, numUsersForSimilarity, neighborhood, resultingDistances); // Generate recommendations for each query user by finding the maximum numRecs // elements in the averages matrix. recommendations.set_size(numRecs, users.n_elem); arma::mat values(numRecs, users.n_elem); for (size_t i = 0; i < users.n_elem; i++) { // First, calculate average of neighborhood values. arma::vec averages; averages.zeros(cleanedData.n_rows); for (size_t j = 0; j < neighborhood.n_rows; ++j) averages += w * h.col(neighborhood(j, i)); averages /= neighborhood.n_rows; // Let's build the list of candidate recomendations for the given user. // Default candidate: the smallest possible value and invalid item number. const Candidate def = std::make_pair(-DBL_MAX, cleanedData.n_rows); std::vector vect(numRecs, def); typedef std::priority_queue, CandidateCmp> CandidateList; CandidateList pqueue(CandidateCmp(), std::move(vect)); // Look through the averages column corresponding to the current user. for (size_t j = 0; j < averages.n_rows; ++j) { // Ensure that the user hasn't already rated the item. if (cleanedData(j, users(i)) != 0.0) continue; // The user already rated the item. // Is the estimated value better than the worst candidate? if (averages[j] > pqueue.top().first) { Candidate c = std::make_pair(averages[j], j); pqueue.pop(); pqueue.push(c); } } for (size_t p = 1; p <= numRecs; p++) { recommendations(numRecs - p, i) = pqueue.top().second; values(numRecs - p, i) = pqueue.top().first; pqueue.pop(); } // If we were not able to come up with enough recommendations, issue a // warning. if (recommendations(numRecs - 1, i) == def.second) Log::Warn << "Could not provide " << numRecs << " recommendations " << "for user " << users(i) << " (not enough un-rated items)!" << std::endl; } } // Predict the rating for a single user/item combination. double CF::Predict(const size_t user, const size_t item) const { // First, we need to find the nearest neighbors of the given user. // We'll use the same technique as for GetRecommendations(). // We want to avoid calculating the full rating matrix, so we will do nearest // neighbor search only on the H matrix, using the observation that if the // rating matrix X = W*H, then d(X.col(i), X.col(j)) = d(W H.col(i), W // H.col(j)). This can be seen as nearest neighbor search on the H matrix // with the Mahalanobis distance where M^{-1} = W^T W. So, we'll decompose // M^{-1} = L L^T (the Cholesky decomposition), and then multiply H by L^T. // Then we can perform nearest neighbor search. arma::mat l = arma::chol(w.t() * w); arma::mat stretchedH = l * h; // Due to the Armadillo API, l is L^T. // Now, we will use the decomposed w and h matrices to estimate what the user // would have rated items as, and then pick the best items. // Temporarily store feature vector of queried users. arma::mat query = stretchedH.col(user); // Temporary storage for neighborhood of the queried users. arma::Mat neighborhood; // Calculate the neighborhood of the queried users. // This should be a templatized option. neighbor::KNN a(stretchedH, neighbor::SINGLE_TREE_MODE); arma::mat resultingDistances; // Temporary storage. a.Search(query, numUsersForSimilarity, neighborhood, resultingDistances); double rating = 0; // We'll take the average of neighborhood values. for (size_t j = 0; j < neighborhood.n_rows; ++j) rating += arma::as_scalar(w.row(item) * h.col(neighborhood(j, 0))); rating /= neighborhood.n_rows; return rating; } // Predict the rating for a group of user/item combinations. void CF::Predict(const arma::Mat& combinations, arma::vec& predictions) const { // First, for nearest neighbor search, stretch the H matrix. arma::mat l = arma::chol(w.t() * w); arma::mat stretchedH = l * h; // Due to the Armadillo API, l is L^T. // Now, we must determine those query indices we need to find the nearest // neighbors for. This is easiest if we just sort the combinations matrix. arma::Mat sortedCombinations(combinations.n_rows, combinations.n_cols); arma::uvec ordering = arma::sort_index(combinations.row(0).t()); for (size_t i = 0; i < ordering.n_elem; ++i) sortedCombinations.col(i) = combinations.col(ordering[i]); // Now, we have to get the list of unique users we will be searching for. arma::Col users = arma::unique(combinations.row(0).t()); // Assemble our query matrix from the stretchedH matrix. arma::mat queries(stretchedH.n_rows, users.n_elem); for (size_t i = 0; i < queries.n_cols; ++i) queries.col(i) = stretchedH.col(users[i]); // Now calculate the neighborhood of these users. neighbor::KNN a(stretchedH); arma::mat distances; arma::Mat neighborhood; a.Search(queries, numUsersForSimilarity, neighborhood, distances); // Now that we have the neighborhoods we need, calculate the predictions. predictions.set_size(combinations.n_cols); size_t user = 0; // Cumulative user count, because we are doing it in order. for (size_t i = 0; i < sortedCombinations.n_cols; ++i) { // Could this be made faster by calculating dot products for multiple items // at once? double rating = 0.0; // Map the combination's user to the user ID used for kNN. while (users[user] < sortedCombinations(0, i)) ++user; for (size_t j = 0; j < neighborhood.n_rows; ++j) rating += arma::as_scalar(w.row(sortedCombinations(1, i)) * h.col(neighborhood(j, user))); rating /= neighborhood.n_rows; predictions(ordering[i]) = rating; } } void CF::CleanData(const arma::mat& data, arma::sp_mat& cleanedData) { // Generate list of locations for batch insert constructor for sparse // matrices. arma::umat locations(2, data.n_cols); arma::vec values(data.n_cols); for (size_t i = 0; i < data.n_cols; ++i) { // We have to transpose it because items are rows, and users are columns. locations(1, i) = ((arma::uword) data(0, i)); locations(0, i) = ((arma::uword) data(1, i)); values(i) = data(2, i); if (values(i) == 0) Log::Warn << "User rating of 0 ignored for user " << locations(1, i) << ", item " << locations(0, i) << "." << std::endl; } // Find maximum user and item IDs. const size_t maxItemID = (size_t) max(locations.row(0)) + 1; const size_t maxUserID = (size_t) max(locations.row(1)) + 1; // Fill sparse matrix. cleanedData = arma::sp_mat(locations, values, maxItemID, maxUserID); } } // namespace mlpack } // namespace cf mlpack-2.2.5/src/mlpack/methods/cf/cf.hpp000066400000000000000000000240151315013601400201550ustar00rootroot00000000000000/** * @file cf.hpp * @author Mudit Raj Gupta * @author Sumedh Ghaisas * * Collaborative filtering. * * Defines the CF class to perform collaborative filtering on the specified data * set using alternating least squares (ALS). * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_CF_CF_HPP #define MLPACK_METHODS_CF_CF_HPP #include #include #include #include #include #include #include #include namespace mlpack { namespace cf /** Collaborative filtering. */ { /** * Template class for factorizer traits. This stores the default values for the * variables to be assumed for a given factorizer. If any of the factorizers * needs to have a different value for the traits, a template specialization has * be wriiten for that factorizer. An example can be found in the module for * Regularized SVD. */ template struct FactorizerTraits { /** * If true, then the passed data matrix is used for factorizer.Apply(). * Otherwise, it is modified into a form suitable for factorization. */ static const bool UsesCoordinateList = false; }; /** * This class implements Collaborative Filtering (CF). This implementation * presently supports Alternating Least Squares (ALS) for collaborative * filtering. * * A simple example of how to run Collaborative Filtering is shown below. * * @code * extern arma::mat data; // (user, item, rating) table * extern arma::Col users; // users seeking recommendations * arma::Mat recommendations; // Recommendations * * CF cf(data); // Default options. * * // Generate 10 recommendations for all users. * cf.GetRecommendations(10, recommendations); * * // Generate 10 recommendations for specified users. * cf.GetRecommendations(10, recommendations, users); * * @endcode * * The data matrix is a (user, item, rating) table. Each column in the matrix * should have three rows. The first represents the user; the second represents * the item; and the third represents the rating. The user and item, while they * are in a matrix that holds doubles, should hold integer (or size_t) values. * The user and item indices are assumed to start at 0. * * @tparam FactorizerType The type of matrix factorization to use to decompose * the rating matrix (a W and H matrix). This must implement the method * Apply(arma::sp_mat& data, size_t rank, arma::mat& W, arma::mat& H). */ class CF { public: /** * Initialize the CF object without performing any factorization. Be sure to * call Train() before calling GetRecommendations() or any other functions! */ CF(const size_t numUsersForSimilarity = 5, const size_t rank = 0); /** * Initialize the CF object using an instantiated factorizer, immediately * factorizing the given data to create a model. There are parameters that can * be set; default values are provided for each of them. If the rank is left * unset (or is set to 0), a simple density-based heuristic will be used to * choose a rank. * * The provided dataset should be a coordinate list; that is, a 3-row matrix * where each column corresponds to a (user, item, rating) entry in the * matrix. * * @param data Data matrix: coordinate list or dense matrix. * @param factorizer Instantiated factorizer object. * @param numUsersForSimilarity Size of the neighborhood. * @param rank Rank parameter for matrix factorization. */ template CF(const arma::mat& data, FactorizerType factorizer = FactorizerType(), const size_t numUsersForSimilarity = 5, const size_t rank = 0); /** * Initialize the CF object using an instantiated factorizer, immediately * factorizing the given data to create a model. There are parameters that can * be set; default values are provided for each of them. If the rank is left * unset (or is set to 0), a simple density-based heuristic will be used to * choose a rank. Data will be considered in the format of items vs. users and * will be passed directly to the factorizer without cleaning. This overload * of the constructor will only be available if the factorizer does not use a * coordinate list (i.e. if UsesCoordinateList is false). * * The U and T template parameters are for SFINAE, so that this overload is * only available when the FactorizerType uses a coordinate list. * * @param data Sparse matrix data. * @param factorizer Instantiated factorizer object. * @param numUsersForSimilarity Size of the neighborhood. * @param rank Rank parameter for matrix factorization. */ template CF(const arma::sp_mat& data, FactorizerType factorizer = FactorizerType(), const size_t numUsersForSimilarity = 5, const size_t rank = 0, const typename boost::disable_if_c< FactorizerTraits::UsesCoordinateList>::type* = 0); /** * Train the CF model (i.e. factorize the input matrix) using the parameters * that have already been set for the model (specifically, the rank * parameter), and optionally, using the given FactorizerType. * * @param data Input dataset; coordinate list or dense matrix. * @param factorizer Instantiated factorizer. */ template void Train(const arma::mat& data, FactorizerType factorizer = FactorizerType()); /** * Train the CF model (i.e. factorize the input matrix) using the parameters * that have already been set for the model (specifically, the rank * parameter), and optionally, using the given FactorizerType. * * @param data Sparse matrix data. * @param factorizer Instantiated factorizer. */ template void Train(const arma::sp_mat& data, FactorizerType factorizer = FactorizerType(), const typename boost::disable_if_c< FactorizerTraits::UsesCoordinateList>::type* = 0); //! Sets number of users for calculating similarity. void NumUsersForSimilarity(const size_t num) { if (num < 1) { Log::Warn << "CF::NumUsersForSimilarity(): invalid value (< 1) " "ignored." << std::endl; return; } this->numUsersForSimilarity = num; } //! Gets number of users for calculating similarity. size_t NumUsersForSimilarity() const { return numUsersForSimilarity; } //! Sets rank parameter for matrix factorization. void Rank(const size_t rankValue) { this->rank = rankValue; } //! Gets rank parameter for matrix factorization. size_t Rank() const { return rank; } //! Get the User Matrix. const arma::mat& W() const { return w; } //! Get the Item Matrix. const arma::mat& H() const { return h; } //! Get the cleaned data matrix. const arma::sp_mat& CleanedData() const { return cleanedData; } /** * Generates the given number of recommendations for all users. * * @param numRecs Number of Recommendations * @param recommendations Matrix to save recommendations into. */ void GetRecommendations(const size_t numRecs, arma::Mat& recommendations); /** * Generates the given number of recommendations for the specified users. * * @param numRecs Number of Recommendations * @param recommendations Matrix to save recommendations * @param users Users for which recommendations are to be generated */ void GetRecommendations(const size_t numRecs, arma::Mat& recommendations, arma::Col& users); //! Converts the User, Item, Value Matrix to User-Item Table static void CleanData(const arma::mat& data, arma::sp_mat& cleanedData); /** * Predict the rating of an item by a particular user. * * @param user User to predict for. * @param item Item to predict for. */ double Predict(const size_t user, const size_t item) const; /** * Predict ratings for each user-item combination in the given coordinate list * matrix. The matrix 'combinations' should have two rows and number of * columns equal to the number of desired predictions. The first element of * each column corresponds to the user index, and the second element of each * column corresponds to the item index. The output vector 'predictions' will * have length equal to combinations.n_cols, and predictions[i] will be equal * to the prediction for the user/item combination in combinations.col(i). * * @param combinations User/item combinations to predict. * @param predictions Predicted ratings for each user/item combination. */ void Predict(const arma::Mat& combinations, arma::vec& predictions) const; /** * Serialize the CF model to the given archive. */ template void Serialize(Archive& ar, const unsigned int /* version */); private: //! Number of users for similarity. size_t numUsersForSimilarity; //! Rank used for matrix factorization. size_t rank; //! User matrix. arma::mat w; //! Item matrix. arma::mat h; //! Cleaned data matrix. arma::sp_mat cleanedData; //! Candidate represents a possible recommendation (value, item). typedef std::pair Candidate; //! Compare two candidates based on the value. struct CandidateCmp { bool operator()(const Candidate& c1, const Candidate& c2) { return c1.first > c2.first; }; }; }; // class CF } // namespace cf } // namespace mlpack // Include implementation of templated functions. #include "cf_impl.hpp" #endif mlpack-2.2.5/src/mlpack/methods/cf/cf_impl.hpp000066400000000000000000000125561315013601400212050ustar00rootroot00000000000000/** * @file cf_impl.hpp * @author Mudit Raj Gupta * @author Sumedh Ghaisas * * Collaborative Filtering. * * Implementation of CF class to perform Collaborative Filtering on the * specified data set. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_CF_CF_IMPL_HPP #define MLPACK_METHODS_CF_CF_IMPL_HPP // In case it hasn't been included yet. #include "cf.hpp" namespace mlpack { namespace cf { // Apply the factorizer when a coordinate list is used. template void ApplyFactorizer(FactorizerType& factorizer, const arma::mat& data, const arma::sp_mat& /* cleanedData */, const size_t rank, arma::mat& w, arma::mat& h, const typename boost::enable_if_c::UsesCoordinateList>::type* = 0) { factorizer.Apply(data, rank, w, h); } // Apply the factorizer when coordinate lists are not used. template void ApplyFactorizer(FactorizerType& factorizer, const arma::mat& /* data */, const arma::sp_mat& cleanedData, const size_t rank, arma::mat& w, arma::mat& h, const typename boost::disable_if_c::UsesCoordinateList>::type* = 0) { factorizer.Apply(cleanedData, rank, w, h); } /** * Construct the CF object using an instantiated factorizer. */ template CF::CF(const arma::mat& data, FactorizerType factorizer, const size_t numUsersForSimilarity, const size_t rank) : numUsersForSimilarity(numUsersForSimilarity), rank(rank) { // Validate neighbourhood size. if (numUsersForSimilarity < 1) { Log::Warn << "CF::CF(): neighbourhood size should be > 0 (" << numUsersForSimilarity << " given). Setting value to 5.\n"; // Set default value of 5. this->numUsersForSimilarity = 5; } Train(data, factorizer); } /** * Construct the CF object using an instantiated factorizer. */ template CF::CF(const arma::sp_mat& data, FactorizerType factorizer, const size_t numUsersForSimilarity, const size_t rank, const typename boost::disable_if_c::UsesCoordinateList>::type*) : numUsersForSimilarity(numUsersForSimilarity), rank(rank) { // Validate neighbourhood size. if (numUsersForSimilarity < 1) { Log::Warn << "CF::CF(): neighbourhood size should be > 0(" << numUsersForSimilarity << " given). Setting value to 5.\n"; //Setting Default Value of 5 this->numUsersForSimilarity = 5; } Train(data, factorizer); } template void CF::Train(const arma::mat& data, FactorizerType factorizer) { CleanData(data, cleanedData); // Check if the user wanted us to choose a rank for them. if (rank == 0) { // This is a simple heuristic that picks a rank based on the density of the // dataset between 5 and 105. const double density = (cleanedData.n_nonzero * 100.0) / cleanedData.n_elem; const size_t rankEstimate = size_t(density) + 5; // Set to heuristic value. Log::Info << "No rank given for decomposition; using rank of " << rankEstimate << " calculated by density-based heuristic." << std::endl; this->rank = rankEstimate; } // Decompose the data matrix (which is in coordinate list form) to user and // data matrices. Timer::Start("cf_factorization"); ApplyFactorizer(factorizer, data, cleanedData, this->rank, w, h); Timer::Stop("cf_factorization"); } template void CF::Train(const arma::sp_mat& data, FactorizerType factorizer, const typename boost::disable_if_c::UsesCoordinateList>::type*) { cleanedData = data; // Check if the user wanted us to choose a rank for them. if (rank == 0) { // This is a simple heuristic that picks a rank based on the density of the // dataset between 5 and 105. const double density = (cleanedData.n_nonzero * 100.0) / cleanedData.n_elem; const size_t rankEstimate = size_t(density) + 5; // Set to heuristic value. Log::Info << "No rank given for decomposition; using rank of " << rankEstimate << " calculated by density-based heuristic." << std::endl; this->rank = rankEstimate; } Timer::Start("cf_factorization"); factorizer.Apply(cleanedData, this->rank, w, h); Timer::Stop("cf_factorization"); } //! Serialize the model. template void CF::Serialize(Archive& ar, const unsigned int /* version */) { // This model is simple; just serialize all the members. No special handling // required. using data::CreateNVP; ar & CreateNVP(numUsersForSimilarity, "numUsersForSimilarity"); ar & CreateNVP(rank, "rank"); ar & CreateNVP(w, "w"); ar & CreateNVP(h, "h"); ar & CreateNVP(cleanedData, "cleanedData"); } } // namespace mlpack } // namespace cf #endif mlpack-2.2.5/src/mlpack/methods/cf/cf_main.cpp000066400000000000000000000311231315013601400211520ustar00rootroot00000000000000/** * @file cf_main.hpp * @author Mudit Raj Gupta * * Main executable to run CF. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include #include #include #include #include #include "cf.hpp" #include #include using namespace mlpack; using namespace mlpack::cf; using namespace mlpack::amf; using namespace mlpack::svd; using namespace std; // Document program. PROGRAM_INFO("Collaborating Filtering", "This program performs collaborative " "filtering (CF) on the given dataset. Given a list of user, item and " "preferences (--training_file) the program will perform a matrix " "decomposition and then can perform a series of actions related to " "collaborative filtering. Alternately, the program can load an existing " "saved CF model with the --input_model_file (-m) option and then use that " "model to provide recommendations or predict values." "\n\n" "The input file should contain a 3-column matrix of ratings, where the " "first column is the user, the second column is the item, and the third " "column is that user's rating of that item. Both the users and items " "should be numeric indices, not names. The indices are assumed to start " "from 0." "\n\n" "A set of query users for which recommendations can be generated may be " "specified with the --query_file (-q) option; alternately, recommendations " "may be generated for every user in the dataset by specifying the " "--all_user_recommendations (-A) option. In addition, the number of " "recommendations per user to generate can be specified with the " "--recommendations (-r) parameter, and the number of similar users (the " "size of the neighborhood) to be considered when generating recommendations" " can be specified with the --neighborhood (-n) option." "\n\n" "For performing the matrix decomposition, the following optimization " "algorithms can be specified via the --algorithm (-a) parameter: " "\n" "'RegSVD' -- Regularized SVD using a SGD optimizer\n" "'NMF' -- Non-negative matrix factorization with alternating least squares " "update rules\n" "'BatchSVD' -- SVD batch learning\n" "'SVDIncompleteIncremental' -- SVD incomplete incremental learning\n" "'SVDCompleteIncremental' -- SVD complete incremental learning\n" "\n" "A trained model may be saved to a file with the --output_model_file (-M) " "parameter."); // Parameters for training a model. PARAM_STRING_IN("training_file", "Input dataset to perform CF on.", "t", ""); PARAM_STRING_IN("algorithm", "Algorithm used for matrix factorization.", "a", "NMF"); PARAM_INT_IN("neighborhood", "Size of the neighborhood of similar users to " "consider for each query user.", "n", 5); PARAM_INT_IN("rank", "Rank of decomposed matrices (if 0, a heuristic is used to" " estimate the rank).", "R", 0); PARAM_STRING_IN("test_file", "Test set to calculate RMSE on.", "T", ""); // Offer the user the option to set the maximum number of iterations, and // terminate only based on the number of iterations. PARAM_INT_IN("max_iterations", "Maximum number of iterations.", "N", 1000); PARAM_FLAG("iteration_only_termination", "Terminate only when the maximum " "number of iterations is reached.", "I"); PARAM_DOUBLE_IN("min_residue", "Residue required to terminate the factorization" " (lower values generally mean better fits).", "r", 1e-5); // Load/save a model. PARAM_STRING_IN("input_model_file", "File to load trained CF model from.", "m", ""); PARAM_STRING_OUT("output_model_file", "File to save trained CF model to.", "M"); // Query settings. PARAM_STRING_IN("query_file", "List of users for which recommendations are to " "be generated.", "q", ""); PARAM_FLAG("all_user_recommendations", "Generate recommendations for all " "users.", "A"); PARAM_STRING_OUT("output_file","File to save output recommendations to.", "o"); PARAM_INT_IN("recommendations", "Number of recommendations to generate for each" " query user.", "c", 5); PARAM_INT_IN("seed", "Set the random seed (0 uses std::time(NULL)).", "s", 0); void ComputeRecommendations(CF& cf, const size_t numRecs, arma::Mat& recommendations) { // Reading users. const string queryFile = CLI::GetParam("query_file"); if (queryFile != "") { // User matrix. arma::Mat userTmp; arma::Col users; data::Load(queryFile, userTmp, true, false /* Don't transpose. */); users = userTmp.col(0); Log::Info << "Generating recommendations for " << users.n_elem << " users " << "in '" << queryFile << "'." << endl; cf.GetRecommendations(numRecs, recommendations, users); } else { Log::Info << "Generating recommendations for all users." << endl; cf.GetRecommendations(numRecs, recommendations); } } void ComputeRMSE(CF& cf) { // Now, compute each test point. const string testFile = CLI::GetParam("test_file"); arma::mat testData; data::Load(testFile, testData, true); // Assemble the combination matrix to get RMSE value. arma::Mat combinations(2, testData.n_cols); for (size_t i = 0; i < testData.n_cols; ++i) { combinations(0, i) = size_t(testData(0, i)); combinations(1, i) = size_t(testData(1, i)); } // Now compute the RMSE. arma::vec predictions; cf.Predict(combinations, predictions); // Compute the root of the sum of the squared errors, divide by the number of // points to get the RMSE. It turns out this is just the L2-norm divided by // the square root of the number of points, if we interpret the predictions // and the true values as vectors. const double rmse = arma::norm(predictions - testData.row(2).t(), 2) / std::sqrt((double) testData.n_cols); Log::Info << "RMSE is " << rmse << "." << endl; } void PerformAction(CF& c) { if (CLI::HasParam("query_file") || CLI::HasParam("all_user_recommendations")) { // Get parameters for generating recommendations. const size_t numRecs = (size_t) CLI::GetParam("recommendations"); // Get the recommendations. arma::Mat recommendations; ComputeRecommendations(c, numRecs, recommendations); // Save the output. const string outputFile = CLI::GetParam("output_file"); if (outputFile != "") data::Save(outputFile, recommendations); } if (CLI::HasParam("test_file")) ComputeRMSE(c); if (CLI::HasParam("output_model_file")) data::Save(CLI::GetParam("output_model_file"), "cf_model", c); } template void PerformAction(Factorizer&& factorizer, arma::mat& dataset, const size_t rank) { // Parameters for generating the CF object. const size_t neighborhood = (size_t) CLI::GetParam("neighborhood"); CF c(dataset, factorizer, neighborhood, rank); PerformAction(c); } void AssembleFactorizerType(const std::string& algorithm, arma::mat& dataset, const bool maxIterationTermination, const size_t rank) { const size_t maxIterations = (size_t) CLI::GetParam("max_iterations"); if (maxIterationTermination) { // Force termination when maximum number of iterations reached. MaxIterationTermination mit(maxIterations); if (algorithm == "NMF") { typedef AMF FactorizerType; PerformAction(FactorizerType(mit), dataset, rank); } else if (algorithm == "BatchSVD") { typedef AMF FactorizerType; PerformAction(FactorizerType(mit), dataset, rank); } else if (algorithm == "SVDIncompleteIncremental") { typedef AMF FactorizerType; PerformAction(FactorizerType(mit), dataset, rank); } else if (algorithm == "SVDCompleteIncremental") { typedef AMF> FactorizerType; PerformAction(FactorizerType(mit), dataset, rank); } else if (algorithm == "RegSVD") { Log::Fatal << "--iteration_only_termination not supported with 'RegSVD' " << "algorithm!" << endl; } } else { // Use default termination (SimpleResidueTermination), but set the maximum // number of iterations. const double minResidue = CLI::GetParam("min_residue"); SimpleResidueTermination srt(minResidue, maxIterations); if (algorithm == "NMF") PerformAction(NMFALSFactorizer(srt), dataset, rank); else if (algorithm == "BatchSVD") PerformAction(SVDBatchFactorizer(srt), dataset, rank); else if (algorithm == "SVDIncompleteIncremental") PerformAction(SparseSVDIncompleteIncrementalFactorizer(srt), dataset, rank); else if (algorithm == "SVDCompleteIncremental") PerformAction(SparseSVDCompleteIncrementalFactorizer(srt), dataset, rank); else if (algorithm == "RegSVD") PerformAction(RegularizedSVD<>(maxIterations), dataset, rank); } } int main(int argc, char** argv) { // Parse command line options. CLI::ParseCommandLine(argc, argv); if (CLI::GetParam("seed") == 0) math::RandomSeed(std::time(NULL)); else math::RandomSeed(CLI::GetParam("seed")); // Validate parameters. if (CLI::HasParam("training_file") && CLI::HasParam("input_model_file")) Log::Fatal << "Only one of --training_file (t) or --input_model_file (-m) " << "may be specified!" << endl; if (!CLI::HasParam("training_file") && !CLI::HasParam("input_model_file")) Log::Fatal << "Neither --training_file (-t) nor --input_model_file (-m) are" << " specified!" << endl; // Check that nothing stupid is happening. if (CLI::HasParam("query_file") && CLI::HasParam("all_user_recommendations")) Log::Fatal << "Both --query_file and --all_user_recommendations are given, " << "but only one is allowed!" << endl; if (!CLI::HasParam("output_file") && !CLI::HasParam("output_model_file")) Log::Warn << "Neither --output_file nor --output_model_file are specified; " << "no output will be saved." << endl; if (CLI::HasParam("output_file") && (!CLI::HasParam("query_file") || CLI::HasParam("all_user_recommendations"))) Log::Warn << "--output_file is ignored because neither --query_file nor " << "--all_user_recommendations are specified." << endl; // Either load from a model, or train a model. if (CLI::HasParam("training_file")) { // Read from the input file. const string trainingFile = CLI::GetParam("training_file"); arma::mat dataset; data::Load(trainingFile, dataset, true); // Recommendation matrix. arma::Mat recommendations; // Get parameters. const size_t rank = (size_t) CLI::GetParam("rank"); // Perform decomposition to prepare for recommendations. Log::Info << "Performing CF matrix decomposition on dataset..." << endl; const string algo = CLI::GetParam("algorithm"); // Issue an error if an invalid factorizer is used. if (algo != "NMF" && algo != "BatchSVD" && algo != "SVDIncompleteIncremental" && algo != "SVDCompleteIncremental" && algo != "RegSVD") Log::Fatal << "Invalid decomposition algorithm. Choices are 'NMF', " << "'BatchSVD', 'SVDIncompleteIncremental', 'SVDCompleteIncremental'," << " and 'RegSVD'." << endl; // Issue a warning if the user provided a minimum residue but it will be // ignored. if (CLI::HasParam("min_residue") && CLI::HasParam("iteration_only_termination")) Log::Warn << "--min_residue ignored, because --iteration_only_termination" << " is specified." << endl; // Perform the factorization and do whatever the user wanted. AssembleFactorizerType(algo, dataset, CLI::HasParam("iteration_only_termination"), rank); } else { // Load an input model. CF c; data::Load(CLI::GetParam("input_model_file"), "cf_model", c, true); PerformAction(c); } } mlpack-2.2.5/src/mlpack/methods/cf/svd_wrapper.hpp000066400000000000000000000051601315013601400221210ustar00rootroot00000000000000/** * @file svd_wrapper.hpp * @author Sumedh Ghaisas * * Wrapper class for SVD factorizers used for Collaborative Filtering. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_SVDWRAPPER_HPP #define MLPACK_METHODS_SVDWRAPPER_HPP #include namespace mlpack { namespace cf { /** * This class acts as a dummy class for passing as template parameter. Passing * this class as a template parameter to class SVDWrapper will force SVDWrapper * to use Armadillo's SVD implementation. */ class DummyClass {}; /** * This class acts as the wrapper for all SVD factorizers which are incompatible * with CF module. Normally SVD factrorizers implement Apply method which takes * matrix V and factorizes it into P, sigma and Q where V = P * sigma * trans(Q). * But CF module requires factrorization to be V = W * H. This class multiplies * P and sigma and takes the first 'r' eigenvectors out where 'r' is the rank * of factorization. Q matrix is transposed and trimmed to support the rank * of factorization. The Factroizer class should implement Apply which takes * matrices P, sigma, Q and V as their parameter respectively. */ template class SVDWrapper { public: // empty constructor SVDWrapper(const Factorizer& factorizer = Factorizer()) : factorizer(factorizer) {}; /** * Factorizer function which takes SVD of the given matrix and returns the * frobenius norm of error. * * @param V input matrix * @param W first unitary matrix * @param sigma eigenvalue matrix * @param H second unitary matrix * * @note V = W * sigma * arma::trans(H) */ double Apply(const arma::mat& V, arma::mat& W, arma::mat& sigma, arma::mat& H) const; /** * Factorizer function which computes SVD and returns matrices as required by * CF module. * * @param V input matrix * @param W first unitary matrix * @param H second unitary matrix * * @note V = W * H */ double Apply(const arma::mat& V, size_t r, arma::mat& W, arma::mat& H) const; private: //! svd factorizer Factorizer factorizer; }; // class SVDWrapper //! add simple typedefs typedef SVDWrapper ArmaSVDFactorizer; //! include the implementation #include "svd_wrapper_impl.hpp" } // namespace cf } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/methods/cf/svd_wrapper_impl.hpp000066400000000000000000000076411315013601400231500ustar00rootroot00000000000000/** * @file svd_wrapper_impl.hpp * @author Sumedh Ghaisas * * Implementation of the SVD wrapper class. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ template double mlpack::cf::SVDWrapper::Apply(const arma::mat& V, arma::mat& W, arma::mat& sigma, arma::mat& H) const { // get svd factorization arma::vec E; factorizer.Apply(W, E, H, V); // construct sigma matrix sigma.zeros(V.n_rows, V.n_cols); for(size_t i = 0;i < sigma.n_rows && i < sigma.n_cols;i++) sigma(i, i) = E(i, 0); arma::mat V_rec = W * sigma * arma::trans(H); // return normalized frobenius error return arma::norm(V - V_rec, "fro") / arma::norm(V, "fro"); } template<> double mlpack::cf::SVDWrapper::Apply(const arma::mat& V, arma::mat& W, arma::mat& sigma, arma::mat& H) const { // get svd factorization arma::vec E; arma::svd(W, E, H, V); // construct sigma matrix sigma.zeros(V.n_rows, V.n_cols); for(size_t i = 0;i < sigma.n_rows && i < sigma.n_cols;i++) sigma(i, i) = E(i, 0); arma::mat V_rec = W * sigma * arma::trans(H); // return normalized frobenius error return arma::norm(V - V_rec, "fro") / arma::norm(V, "fro"); } template double mlpack::cf::SVDWrapper::Apply(const arma::mat& V, size_t r, arma::mat& W, arma::mat& H) const { // check if the given rank is valid if (r > V.n_rows || r > V.n_cols) { Log::Info << "Rank " << r << ", given for decomposition is invalid." << std::endl; r = (V.n_rows > V.n_cols) ? V.n_cols : V.n_rows; Log::Info << "Setting decomposition rank to " << r << std::endl; } // get svd factorization arma::vec sigma; factorizer.Apply(W, sigma, H, V); // remove the part of W and H depending upon the value of rank W = W.submat(0, 0, W.n_rows - 1, r - 1); H = H.submat(0, 0, H.n_cols - 1, r - 1); // take only required eigenvalues sigma = sigma.subvec(0, r - 1); // eigenvalue matrix is multiplied to W // it can either be multiplied to H matrix W = W * arma::diagmat(sigma); // take transpose of the matrix H as required by CF module H = arma::trans(H); // reconstruct the matrix arma::mat V_rec = W * H; // return the normalized frobenius norm return arma::norm(V - V_rec, "fro") / arma::norm(V, "fro"); } template<> double mlpack::cf::SVDWrapper::Apply(const arma::mat& V, size_t r, arma::mat& W, arma::mat& H) const { // check if the given rank is valid if (r > V.n_rows || r > V.n_cols) { Log::Info << "Rank " << r << ", given for decomposition is invalid." << std::endl; r = (V.n_rows > V.n_cols) ? V.n_cols : V.n_rows; Log::Info << "Setting decomposition rank to " << r << std::endl; } // get svd factorization arma::vec sigma; arma::svd(W, sigma, H, V); // remove the part of W and H depending upon the value of rank W = W.submat(0, 0, W.n_rows - 1, r - 1); H = H.submat(0, 0, H.n_cols - 1, r - 1); // take only required eigenvalues sigma = sigma.subvec(0, r - 1); // eigenvalue matrix is multiplied to W // it can either be multiplied to H matrix W = W * arma::diagmat(sigma); // take transpose of the matrix H as required by CF module H = arma::trans(H); // reconstruct the matrix arma::mat V_rec = W * H; // return the normalized frobenius norm return arma::norm(V - V_rec, "fro") / arma::norm(V, "fro"); } mlpack-2.2.5/src/mlpack/methods/dbscan/000077500000000000000000000000001315013601400177145ustar00rootroot00000000000000mlpack-2.2.5/src/mlpack/methods/dbscan/CMakeLists.txt000066400000000000000000000007741315013601400224640ustar00rootroot00000000000000# Define the files we need to compile # Anything not in this list will not be compiled into mlpack. set(SOURCES dbscan.hpp dbscan_impl.hpp random_point_selection.hpp ) # Add directory name to sources. set(DIR_SRCS) foreach(file ${SOURCES}) set(DIR_SRCS ${DIR_SRCS} ${CMAKE_CURRENT_SOURCE_DIR}/${file}) endforeach() # Append sources (with directory name) to list of all mlpack sources (used at # the parent scope). set(MLPACK_SRCS ${MLPACK_SRCS} ${DIR_SRCS} PARENT_SCOPE) add_cli_executable(dbscan) mlpack-2.2.5/src/mlpack/methods/dbscan/dbscan.hpp000066400000000000000000000135071315013601400216650ustar00rootroot00000000000000/** * @file dbscan.hpp * @author Ryan Curtin * * An implementation of the DBSCAN clustering method, which is flexible enough * to support other algorithms for finding nearest neighbors. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_DBSCAN_DBSCAN_HPP #define MLPACK_METHODS_DBSCAN_DBSCAN_HPP #include #include #include #include "random_point_selection.hpp" #include namespace mlpack { namespace dbscan { /** * DBSCAN (Density-Based Spatial Clustering of Applications with Noise) is a * clustering technique described in the following paper: * * @code * @inproceedings{ester1996density, * title={A density-based algorithm for discovering clusters in large spatial * databases with noise.}, * author={Ester, M. and Kriegel, H.-P. and Sander, J. and Xu, X.}, * booktitle={Proceedings of the Second International Conference on Knowledge * Discovery and Data Mining (KDD '96)}, * pages={226--231}, * year={1996} * } * @endcode * * The DBSCAN algorithm iteratively clusters points using range searches with a * specified radius parameter. This implementation allows configuration of the * range search technique used and the point selection strategy by means of * template parameters. * * @tparam RangeSearchType Class to use for range searching. * @tparam PointSelectionPolicy Strategy for selecting next point to cluster * with. */ template, typename PointSelectionPolicy = RandomPointSelection> class DBSCAN { public: /** * Construct the DBSCAN object with the given parameters. The batchMode * parameter should be set to false in the case where RAM issues will be * encountered (i.e. if the dataset is very large or if epsilon is large). * When batchMode is false, each point will be searched iteratively, which * could be slower but will use less memory. * * @param epsilon Size of range query. * @param minPoints Minimum number of points for each cluster. * @param batchMode If true, all points are searched in batch. * @param rangeSearch Optional instantiated RangeSearch object. * @param pointSelector OptionL instantiated PointSelectionPolicy object. */ DBSCAN(const double epsilon, const size_t minPoints, const bool batchMode = true, RangeSearchType rangeSearch = RangeSearchType(), PointSelectionPolicy pointSelector = PointSelectionPolicy()); /** * Performs DBSCAN clustering on the data, returning number of clusters * and also the centroid of each cluster. * * @tparam MatType Type of matrix (arma::mat or arma::sp_mat). * @param data Dataset to cluster. * @param centroids Matrix in which centroids are stored. */ template size_t Cluster(const MatType& data, arma::mat& centroids); /** * Performs DBSCAN clustering on the data, returning number of clusters * and also the list of cluster assignments. * * @tparam MatType Type of matrix (arma::mat or arma::sp_mat). * @param data Dataset to cluster. * @param assignments Vector to store cluster assignments. */ template size_t Cluster(const MatType& data, arma::Row& assignments); /** * Performs DBSCAN clustering on the data, returning number of clusters, * the centroid of each cluster and also the list of cluster assignments. * If assignments[i] == assignments.n_elem - 1, then the point is considered * "noise". * * @tparam MatType Type of matrix (arma::mat or arma::sp_mat). * @param data Dataset to cluster. * @param assignments Vector to store cluster assignments. * @param centroids Matrix in which centroids are stored. */ template size_t Cluster(const MatType& data, arma::Row& assignments, arma::mat& centroids); private: //! Maximum distance between two points to be part of same cluster. double epsilon; //! Minimum number of points to be in the epsilon-neighborhood (including //! itself) for the point to be a core-point. size_t minPoints; //! Whether or not to perform the search in batch mode. If false, single bool batchMode; //! Instantiated range search policy. RangeSearchType rangeSearch; //! Instantiated point selection policy. PointSelectionPolicy pointSelector; /** * Performs DBSCAN clustering on the data, returning the number of clusters and * also the list of cluster assignments. This searches each point iteratively, * and can save on RAM usage. It may be slower than the batch search with a * dual-tree algorithm. * * @param data Dataset to cluster. * @param assignments Assignments for each point. * @param uf UnionFind structure that will be modified. */ template void PointwiseCluster(const MatType& data, emst::UnionFind& uf); /** * Performs DBSCAN clustering on the data, returning number of clusters * and also the list of cluster assignments. This can perform search in batch, * so it is well suited for dual-tree or naive search. * * @param data Dataset to cluster. * @param assignments Assignments for each point. * @param uf UnionFind structure that will be modified. */ template void BatchCluster(const MatType& data, emst::UnionFind& uf); }; } // namespace dbscan } // namespace mlpack // Include implementation. #include "dbscan_impl.hpp" #endif mlpack-2.2.5/src/mlpack/methods/dbscan/dbscan_impl.hpp000066400000000000000000000136711315013601400227100ustar00rootroot00000000000000/** * @file dbscan_impl.hpp * @author Ryan Curtin * * Implementation of DBSCAN. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_DBSCAN_DBSCAN_IMPL_HPP #define MLPACK_METHODS_DBSCAN_DBSCAN_IMPL_HPP #include "dbscan.hpp" namespace mlpack { namespace dbscan { /** * Construct the DBSCAN object with the given parameters. */ template DBSCAN::DBSCAN( const double epsilon, const size_t minPoints, const bool batchMode, RangeSearchType rangeSearch, PointSelectionPolicy pointSelector) : epsilon(epsilon), minPoints(minPoints), batchMode(batchMode), rangeSearch(rangeSearch), pointSelector(pointSelector) { // Nothing to do. } /** * Performs DBSCAN clustering on the data, returning number of clusters * and also the centroid of each cluster. */ template template size_t DBSCAN::Cluster( const MatType& data, arma::mat& centroids) { // These assignments will be thrown away, but there is no way to avoid // calculating them. arma::Row assignments(data.n_cols); assignments.fill(SIZE_MAX); return Cluster(data, assignments, centroids); } /** * Performs DBSCAN clustering on the data, returning number of clusters, * the centroid of each cluster and also the list of cluster assignments. */ template template size_t DBSCAN::Cluster( const MatType& data, arma::Row& assignments, arma::mat& centroids) { const size_t numClusters = Cluster(data, assignments); // Now calculate the centroids. centroids.zeros(data.n_rows, numClusters); // Calculate number of points in each cluster. arma::Row counts; counts.zeros(numClusters); for (size_t i = 0; i < data.n_cols; ++i) { if (assignments[i] != SIZE_MAX) { centroids.col(assignments[i]) += data.col(i); ++counts[assignments[i]]; } } // We should be guaranteed that the number of clusters is always greater than // zero. for (size_t i = 0; i < numClusters; ++i) centroids.col(i) /= counts[i]; return numClusters; } /** * Performs DBSCAN clustering on the data, returning the number of clusters and * also the list of cluster assignments. */ template template size_t DBSCAN::Cluster( const MatType& data, arma::Row& assignments) { // Initialize the UnionFind object. emst::UnionFind uf(data.n_cols); rangeSearch.Train(data); if (batchMode) BatchCluster(data, uf); else PointwiseCluster(data, uf); // Now set assignments. assignments.set_size(data.n_cols); for (size_t i = 0; i < data.n_cols; ++i) assignments[i] = uf.Find(i); // Get a count of all clusters. const size_t numClusters = arma::max(assignments) + 1; arma::Col counts(numClusters, arma::fill::zeros); for (size_t i = 0; i < assignments.n_elem; ++i) counts[assignments[i]]++; // Now assign clusters to new indices. size_t currentCluster = 0; arma::Col newAssignments(numClusters); for (size_t i = 0; i < counts.n_elem; ++i) { if (counts[i] >= minPoints) newAssignments[i] = currentCluster++; else newAssignments[i] = SIZE_MAX; } // Now reassign. for (size_t i = 0; i < assignments.n_elem; ++i) assignments[i] = newAssignments[assignments[i]]; Log::Info << currentCluster << " clusters found." << std::endl; return currentCluster; } /** * Performs DBSCAN clustering on the data, returning the number of clusters and * also the list of cluster assignments. This searches each point iteratively, * and can save on RAM usage. It may be slower than the batch search with a * dual-tree algorithm. */ template template void DBSCAN::PointwiseCluster( const MatType& data, emst::UnionFind& uf) { std::vector> neighbors; std::vector> distances; for (size_t i = 0; i < data.n_cols; ++i) { if (i % 10000 == 0 && i > 0) Log::Info << "DBSCAN clustering on point " << i << "..." << std::endl; // Do the range search for only this point. rangeSearch.Search(data.col(i), math::Range(0.0, epsilon), neighbors, distances); // Union to all neighbors. for (size_t j = 0; j < neighbors[0].size(); ++j) uf.Union(i, neighbors[0][j]); } } /** * Performs DBSCAN clustering on the data, returning number of clusters * and also the list of cluster assignments. This can perform search in batch, * naive search). */ template template void DBSCAN::BatchCluster( const MatType& data, emst::UnionFind& uf) { // For each point, find the points in epsilon-nighborhood and their distances. std::vector> neighbors; std::vector> distances; Log::Info << "Performing range search." << std::endl; rangeSearch.Train(data); rangeSearch.Search(data, math::Range(0.0, epsilon), neighbors, distances); Log::Info << "Range search complete." << std::endl; // Now loop over all points. for (size_t i = 0; i < data.n_cols; ++i) { // Union to all neighbors. for (size_t j = 0; j < neighbors[i].size(); ++j) uf.Union(i, neighbors[i][j]); } } } // namespace dbscan } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/methods/dbscan/dbscan_main.cpp000066400000000000000000000134631315013601400226650ustar00rootroot00000000000000/** * @file dbscan_main.cpp * @author Ryan Curtin * * Implementation of program to run DBSCAN. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include "dbscan.hpp" #include #include #include #include #include using namespace mlpack; using namespace mlpack::range; using namespace mlpack::dbscan; using namespace mlpack::metric; using namespace mlpack::tree; using namespace std; PROGRAM_INFO("DBSCAN clustering", "This program implements the DBSCAN algorithm for clustering using " "accelerated tree-based range search. The type of tree that is used " "may be parameterized, or brute-force range search may also be used." "\n\n" "The input dataset to be clustered may be specified with the --input_file " "option, the radius of each range search may be specified with the " "--epsilon option, and the minimum number of points in a cluster may be " "specified with the --min_size option." "\n\n" "The output of the clustering may be saved as --assignments_file or " "--centroids_file; --assignments_file will save the cluster assignments of " "each point, and --centroids_file will save the centroids of each cluster." "\n\n" "The range search may be controlled with the --tree_type, --single_mode, " "and --naive parameters. The --tree_type parameter can control the type of" " tree used for range search; this can take a variety of values: 'kd', 'r'," " 'r-star', 'x', 'hilbert-r', 'r-plus', 'r-plus-plus', 'cover', 'ball'. " "The --single_mode option will force single-tree search (as opposed to the " "default dual-tree search). --single_mode can be useful when the RAM usage" " of batch search is too high. The --naive option will force brute-force " "range search." "\n\n" "An example usage to run DBSCAN on the dataset in input.csv with a radius " "of 0.5 and a minimum cluster size of 5 is given below:" "\n\n" " $ mlpack_dbscan -i input.csv -e 0.5 -m 5"); PARAM_STRING_IN_REQ("input_file", "Input dataset to cluster.", "i"); PARAM_STRING_OUT("assignments_file", "Output file for assignments of each " "point.", "a"); PARAM_STRING_OUT("centroids_file", "File to save output centroids to.", "C"); PARAM_DOUBLE_IN("epsilon", "Radius of each range search.", "e", 1.0); PARAM_INT_IN("min_size", "Minimum number of points for a cluster.", "m", 5); PARAM_STRING_IN("tree_type", "If using single-tree or dual-tree search, the " "type of tree to use ('kd', 'r', 'r-star', 'x', 'hilbert-r', 'r-plus', " "'r-plus-plus', 'cover', 'ball').", "t", "kd"); PARAM_FLAG("single_mode", "If set, single-tree range search (not dual-tree) " "will be used.", "S"); PARAM_FLAG("naive", "If set, brute-force range search (not tree-based) " "will be used.", "N"); // Actually run the clustering, and process the output. template void RunDBSCAN(RangeSearchType rs = RangeSearchType()) { if (CLI::HasParam("single_mode")) rs.SingleMode() = true; // Load dataset. arma::mat dataset; data::Load(CLI::GetParam("input_file"), dataset); const double epsilon = CLI::GetParam("epsilon"); const size_t minSize = (size_t) CLI::GetParam("min_size"); DBSCAN d(epsilon, minSize, !CLI::HasParam("single_mode"), rs); // If possible, avoid the overhead of calculating centroids. arma::Row assignments; if (CLI::HasParam("centroids_file")) { arma::mat centroids; d.Cluster(dataset, assignments, centroids); data::Save(CLI::GetParam("centroids_file"), centroids, false); } else { d.Cluster(dataset, assignments); } if (CLI::HasParam("assignments_file")) data::Save(CLI::GetParam("assignments_file"), assignments, false, false /* no transpose */); } int main(int argc, char** argv) { CLI::ParseCommandLine(argc, argv); if (!CLI::HasParam("assignments_file") && !CLI::HasParam("centroids_file")) Log::Warn << "Neither --assignments_file nor --centroids_file are " << "specified; no output will be saved!" << endl; if (CLI::HasParam("single_mode") && CLI::HasParam("naive")) Log::Warn << "--single_mode ignored because --naive is specified." << endl; // Fire off naive search if needed. if (CLI::HasParam("naive")) { RangeSearch<> rs(true); RunDBSCAN(rs); } const string treeType = CLI::GetParam("tree_type"); if (treeType == "kd") RunDBSCAN>(); else if (treeType == "cover") RunDBSCAN>(); else if (treeType == "r") RunDBSCAN>(); else if (treeType == "r-star") RunDBSCAN>(); else if (treeType == "x") RunDBSCAN>(); else if (treeType == "hilbert-r") RunDBSCAN>(); else if (treeType == "r-plus") RunDBSCAN>(); else if (treeType == "r-plus-plus") RunDBSCAN>(); else if (treeType == "ball") RunDBSCAN>(); else { Log::Fatal << "Unknown tree type specified! Valid choices are 'kd', " << "'cover', 'r', 'r-star', 'x', 'hilbert-r', 'r-plus', 'r-plus-plus'," << " and 'ball'." << endl; } } mlpack-2.2.5/src/mlpack/methods/dbscan/random_point_selection.hpp000066400000000000000000000027061315013601400251700ustar00rootroot00000000000000/** * @file random_point_selection.hpp * @author Ryan Curtin * * Randomly select the next point for DBSCAN. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_DBSCAN_RANDOM_POINT_SELECTION_HPP #define MLPACK_METHODS_DBSCAN_RANDOM_POINT_SELECTION_HPP #include #include namespace mlpack { namespace dbscan { /** * This class can be used to randomly select the next point to use for DBSCAN. */ class RandomPointSelection { public: /** * Select the next point to use, randomly. * * @param unvisited Bitset indicating which points are unvisited. * @param data Unused data. */ template static size_t Select(const boost::dynamic_bitset<>& unvisited, const MatType& /* data */) { const size_t max = unvisited.count(); const size_t index = math::RandInt(max); // Select the index'th unvisited point. size_t found = 0; for (size_t i = 0; i < unvisited.size(); ++i) { if (unvisited[i]) ++found; if (found > index) return i; } return 0; // Not sure if it is possible to get here. } }; } // namespace dbscan } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/methods/decision_stump/000077500000000000000000000000001315013601400215075ustar00rootroot00000000000000mlpack-2.2.5/src/mlpack/methods/decision_stump/CMakeLists.txt000066400000000000000000000010351315013601400242460ustar00rootroot00000000000000cmake_minimum_required(VERSION 2.8) # Define the files we need to compile. # Anything not in this list will not be compiled into mlpack. set(SOURCES decision_stump.hpp decision_stump_impl.hpp ) # Add directory name to sources. set(DIR_SRCS) foreach(file ${SOURCES}) set(DIR_SRCS ${DIR_SRCS} ${CMAKE_CURRENT_SOURCE_DIR}/${file}) endforeach() # Append sources (with directory name) to list of all mlpack sources (used at # the parent scope). set(MLPACK_SRCS ${MLPACK_SRCS} ${DIR_SRCS} PARENT_SCOPE) add_cli_executable(decision_stump) mlpack-2.2.5/src/mlpack/methods/decision_stump/decision_stump.hpp000066400000000000000000000175161315013601400252570ustar00rootroot00000000000000/** * @file decision_stump.hpp * @author Udit Saxena * * Definition of decision stumps. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_DECISION_STUMP_DECISION_STUMP_HPP #define MLPACK_METHODS_DECISION_STUMP_DECISION_STUMP_HPP #include namespace mlpack { namespace decision_stump { /** * This class implements a decision stump. It constructs a single level * decision tree, i.e., a decision stump. It uses entropy to decide splitting * ranges. * * The stump is parameterized by a splitting dimension (the dimension on which * points are split), a vector of bin split values, and a vector of labels for * each bin. Bin i is specified by the range [split[i], split[i + 1]). The * last bin has range up to \infty (split[i + 1] does not exist in that case). * Points that are below the first bin will take the label of the first bin. * * @tparam MatType Type of matrix that is being used (sparse or dense). */ template class DecisionStump { public: /** * Constructor. Train on the provided data. Generate a decision stump from * data. * * @param data Input, training data. * @param labels Labels of training data. * @param classes Number of distinct classes in labels. * @param bucketSize Minimum size of bucket when splitting. */ DecisionStump(const MatType& data, const arma::Row& labels, const size_t classes, const size_t bucketSize = 10); /** * Alternate constructor which copies the parameters bucketSize and classes * from an already initiated decision stump, other. It appropriately sets the * weight vector. * * @param other The other initiated Decision Stump object from * which we copy the values. * @param data The data on which to train this object on. * @param labels The labels of data. * @param weights Weight vector to use while training. For boosting purposes. */ DecisionStump(const DecisionStump<>& other, const MatType& data, const arma::Row& labels, const arma::rowvec& weights); /** * Create a decision stump without training. This stump will not be useful * and will always return a class of 0 for anything that is to be classified, * so it would be a prudent idea to call Train() after using this constructor. */ DecisionStump(); /** * Train the decision stump on the given data. This completely overwrites any * previous training data, so after training the stump may be completely * different. * * @param data Dataset to train on. * @param labels Labels for each point in the dataset. * @param classes Number of classes in the dataset. * @param bucketSize Minimum size of bucket when splitting. */ void Train(const MatType& data, const arma::Row& labels, const size_t classes, const size_t bucketSize); /** * Train the decision stump on the given data, with the given weights. This * completely overwrites any previous training data, so after training the * stump may be completely different. * * @param data Dataset to train on. * @param labels Labels for each point in the dataset. * @param weights Weights for each point in the dataset. * @param classes Number of classes in the dataset. * @param bucketSize Minimum size of bucket when splitting. */ void Train(const MatType& data, const arma::Row& labels, const arma::rowvec& weights, const size_t classes, const size_t bucketSize); /** * Classification function. After training, classify test, and put the * predicted classes in predictedLabels. * * @param test Testing data or data to classify. * @param predictedLabels Vector to store the predicted classes after * classifying test data. */ void Classify(const MatType& test, arma::Row& predictedLabels); //! Access the splitting dimension. size_t SplitDimension() const { return splitDimension; } //! Modify the splitting dimension (be careful!). size_t& SplitDimension() { return splitDimension; } //! Access the splitting values. const arma::vec& Split() const { return split; } //! Modify the splitting values (be careful!). arma::vec& Split() { return split; } //! Access the labels for each split bin. const arma::Col BinLabels() const { return binLabels; } //! Modify the labels for each split bin (be careful!). arma::Col& BinLabels() { return binLabels; } //! Serialize the decision stump. template void Serialize(Archive& ar, const unsigned int /* version */); private: //! The number of classes (we must store this for boosting). size_t classes; //! The minimum number of points in a bucket. size_t bucketSize; //! Stores the value of the dimension on which to split. size_t splitDimension; //! Stores the splitting values after training. arma::vec split; //! Stores the labels for each splitting bin. arma::Col binLabels; /** * Sets up dimension as if it were splitting on it and finds entropy when * splitting on dimension. * * @param dimension A row from the training data, which might be a * candidate for the splitting dimension. * @tparam UseWeights Whether we need to run a weighted Decision Stump. */ template double SetupSplitDimension(const VecType& dimension, const arma::Row& labels, const arma::rowvec& weightD); /** * After having decided the dimension on which to split, train on that * dimension. * * @tparam dimension dimension is the dimension decided by the constructor * on which we now train the decision stump. */ template void TrainOnDim(const VecType& dimension, const arma::Row& labels); /** * After the "split" matrix has been set up, merge ranges with identical class * labels. */ void MergeRanges(); /** * Count the most frequently occurring element in subCols. * * @param subCols The vector in which to find the most frequently occurring * element. */ template double CountMostFreq(const VecType& subCols); /** * Returns 1 if all the values of featureRow are not same. * * @param featureRow The dimension which is checked for identical values. */ template int IsDistinct(const VecType& featureRow); /** * Calculate the entropy of the given dimension. * * @param labels Corresponding labels of the dimension. * @param classes Number of classes. * @param weights Weights for this set of labels. * @tparam UseWeights If true, the weights in the weight vector will be used * (otherwise they are ignored). */ template double CalculateEntropy(const VecType& labels, const WeightVecType& weights); /** * Train the decision stump on the given data and labels. * * @param data Dataset to train on. * @param labels Labels for dataset. * @param weights Weights for this set of labels. * @tparam UseWeights If true, the weights in the weight vector will be used * (otherwise they are ignored). */ template void Train(const MatType& data, const arma::Row& labels, const arma::rowvec& weights); }; } // namespace decision_stump } // namespace mlpack #include "decision_stump_impl.hpp" #endif mlpack-2.2.5/src/mlpack/methods/decision_stump/decision_stump_impl.hpp000066400000000000000000000351731315013601400262770ustar00rootroot00000000000000/** * @file decision_stump_impl.hpp * @author Udit Saxena * * Implementation of DecisionStump class. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_DECISION_STUMP_DECISION_STUMP_IMPL_HPP #define MLPACK_METHODS_DECISION_STUMP_DECISION_STUMP_IMPL_HPP // In case it hasn't been included yet. #include "decision_stump.hpp" namespace mlpack { namespace decision_stump { /** * Constructor. Train on the provided data. Generate a decision stump from data. * * @param data Input, training data. * @param labels Labels of data. * @param classes Number of distinct classes in labels. * @param bucketSize Minimum size of bucket when splitting. */ template DecisionStump::DecisionStump(const MatType& data, const arma::Row& labels, const size_t classes, const size_t bucketSize) : classes(classes), bucketSize(bucketSize) { arma::rowvec weights; Train(data, labels, weights); } /** * Empty constructor. */ template DecisionStump::DecisionStump() : classes(1), bucketSize(0), splitDimension(0), split(1), binLabels(1) { split[0] = DBL_MAX; binLabels[0] = 0; } /** * Train on the given data and labels. */ template void DecisionStump::Train(const MatType& data, const arma::Row& labels, const size_t classes, const size_t bucketSize) { this->classes = classes; this->bucketSize = bucketSize; // Pass to unweighted training function. arma::rowvec weights; Train(data, labels, weights); } /** * Train the decision stump on the given data, with the given weights. This * completely overwrites any previous training data, so after training the * stump may be completely different. */ template void DecisionStump::Train(const MatType& data, const arma::Row& labels, const arma::rowvec& weights, const size_t classes, const size_t bucketSize) { this->classes = classes; this->bucketSize = bucketSize; // Pass to weighted training function. Train(data, labels, weights); } /** * Train the decision stump on the given data and labels. * * @param data Dataset to train on. * @param labels Labels for dataset. * @param UseWeights Whether we need to run a weighted Decision Stump. */ template template void DecisionStump::Train(const MatType& data, const arma::Row& labels, const arma::rowvec& weights) { // If classLabels are not all identical, proceed with training. size_t bestDim = 0; double entropy; const double rootEntropy = CalculateEntropy(labels, weights); double gain, bestGain = 0.0; for (size_t i = 0; i < data.n_rows; i++) { // Go through each dimension of the data. if (IsDistinct(data.row(i))) { // For each dimension with non-identical values, treat it as a potential // splitting dimension and calculate entropy if split on it. entropy = SetupSplitDimension(data.row(i), labels, weights); gain = rootEntropy - entropy; // Find the dimension with the best entropy so that the gain is // maximized. // We are maximizing gain, which is what is returned from // SetupSplitDimension(). if (gain < bestGain) { bestDim = i; bestGain = gain; } } } splitDimension = bestDim; // Once the splitting column/dimension has been decided, train on it. TrainOnDim(data.row(splitDimension), labels); } /** * Classification function. After training, classify test, and put the predicted * classes in predictedLabels. * * @param test Testing data or data to classify. * @param predictedLabels Vector to store the predicted classes after * classifying test */ template void DecisionStump::Classify(const MatType& test, arma::Row& predictedLabels) { predictedLabels.set_size(test.n_cols); for (size_t i = 0; i < test.n_cols; i++) { // Determine which bin the test point falls into. // Assume first that it falls into the first bin, then proceed through the // bins until it is known which bin it falls into. size_t bin = 0; const double val = test(splitDimension, i); while (bin < split.n_elem - 1) { if (val < split(bin + 1)) break; ++bin; } predictedLabels(i) = binLabels(bin); } } /** * Alternate constructor which copies parameters bucketSize and numClasses * from an already initiated decision stump, other. It appropriately * sets the Weight vector. * * @param other The other initiated Decision Stump object from * which we copy the values from. * @param data The data on which to train this object on. * @param D Weight vector to use while training. For boosting purposes. * @param labels The labels of data. * @param UseWeights Whether we need to run a weighted Decision Stump. */ template DecisionStump::DecisionStump(const DecisionStump<>& other, const MatType& data, const arma::Row& labels, const arma::rowvec& weights) : classes(other.classes), bucketSize(other.bucketSize) { Train(data, labels, weights); } /** * Serialize the decision stump. */ template template void DecisionStump::Serialize(Archive& ar, const unsigned int /* version */) { using data::CreateNVP; // This is straightforward; just serialize all of the members of the class. // None need special handling. ar & CreateNVP(classes, "classes"); ar & CreateNVP(bucketSize, "bucketSize"); ar & CreateNVP(splitDimension, "splitDimension"); ar & CreateNVP(split, "split"); ar & CreateNVP(binLabels, "binLabels"); } /** * Sets up dimension as if it were splitting on it and finds entropy when * splitting on dimension. * * @param dimension A row from the training data, which might be a candidate for * the splitting dimension. * @param UseWeights Whether we need to run a weighted Decision Stump. */ template template double DecisionStump::SetupSplitDimension( const VecType& dimension, const arma::Row& labels, const arma::rowvec& weights) { size_t i, count, begin, end; double entropy = 0.0; // Store the indices of the sorted dimension to build a vector of sorted // labels. This sort is stable. arma::uvec sortedIndexDim = arma::stable_sort_index(dimension.t()); arma::Row sortedLabels(dimension.n_elem); arma::rowvec sortedWeights(dimension.n_elem); for (i = 0; i < dimension.n_elem; i++) { sortedLabels(i) = labels(sortedIndexDim(i)); // Apply weights if necessary. if (UseWeights) sortedWeights(i) = weights(sortedIndexDim(i)); } i = 0; count = 0; // This splits the sorted data into buckets of size greater than or equal to // bucketSize. while (i < sortedLabels.n_elem) { count++; if (i == sortedLabels.n_elem - 1) { // If we're at the end, then don't worry about the bucket size; just take // this as the last bin. begin = i - count + 1; end = i; // Use ratioEl to calculate the ratio of elements in this split. const double ratioEl = ((double) (end - begin + 1) / sortedLabels.n_elem); entropy += ratioEl * CalculateEntropy( sortedLabels.subvec(begin, end), sortedWeights.subvec(begin, end)); i++; } else if (sortedLabels(i) != sortedLabels(i + 1)) { // If we're not at the last element of sortedLabels, then check whether // count is less than the current bucket size. if (count < bucketSize) { // If it is, then take the minimum bucket size anyways. // This is where the inpBucketSize comes into use. // This makes sure there isn't a bucket for every change in labels. begin = i - count + 1; end = begin + bucketSize - 1; if (end > sortedLabels.n_elem - 1) end = sortedLabels.n_elem - 1; } else { // If it is not, then take the bucket size as the value of count. begin = i - count + 1; end = i; } const double ratioEl = ((double) (end - begin + 1) / sortedLabels.n_elem); entropy += ratioEl * CalculateEntropy( sortedLabels.subvec(begin, end), sortedWeights.subvec(begin, end)); i = end + 1; count = 0; } else i++; } return entropy; } /** * After having decided the dimension on which to split, train on that * dimension. * * @param dimension Dimension is the dimension decided by the constructor on * which we now train the decision stump. */ template template void DecisionStump::TrainOnDim(const VecType& dimension, const arma::Row& labels) { size_t i, count, begin, end; typename MatType::row_type sortedSplitDim = arma::sort(dimension); arma::uvec sortedSplitIndexDim = arma::stable_sort_index(dimension.t()); arma::Row sortedLabels(dimension.n_elem); sortedLabels.fill(0); for (i = 0; i < dimension.n_elem; i++) sortedLabels(i) = labels(sortedSplitIndexDim(i)); arma::rowvec subCols; double mostFreq; i = 0; count = 0; while (i < sortedLabels.n_elem) { count++; if (i == sortedLabels.n_elem - 1) { begin = i - count + 1; end = i; mostFreq = CountMostFreq(sortedLabels.cols(begin, end)); split.resize(split.n_elem + 1); split(split.n_elem - 1) = sortedSplitDim(begin); binLabels.resize(binLabels.n_elem + 1); binLabels(binLabels.n_elem - 1) = mostFreq; i++; } else if (sortedLabels(i) != sortedLabels(i + 1)) { if (count < bucketSize) { // Test for different values of bucketSize, especially extreme cases. begin = i - count + 1; end = begin + bucketSize - 1; if (end > sortedLabels.n_elem - 1) end = sortedLabels.n_elem - 1; } else { begin = i - count + 1; end = i; } // Find the most frequent element in subCols so as to assign a label to // the bucket of subCols. mostFreq = CountMostFreq(sortedLabels.cols(begin, end)); split.resize(split.n_elem + 1); split(split.n_elem - 1) = sortedSplitDim(begin); binLabels.resize(binLabels.n_elem + 1); binLabels(binLabels.n_elem - 1) = mostFreq; i = end + 1; count = 0; } else i++; } // Now trim the split matrix so that buckets one after the after which point // to the same classLabel are merged as one big bucket. MergeRanges(); } /** * After the "split" matrix has been set up, merge ranges with identical class * labels. */ template void DecisionStump::MergeRanges() { for (size_t i = 1; i < split.n_rows; i++) { if (binLabels(i) == binLabels(i - 1)) { // Remove this row, as it has the same label as the previous bucket. binLabels.shed_row(i); split.shed_row(i); // Go back to previous row. i--; } } } template template double DecisionStump::CountMostFreq(const VecType& subCols) { // We'll create a map of elements and the number of times that each element is // seen. std::map countMap; for (size_t i = 0; i < subCols.n_elem; ++i) { if (countMap.count(subCols[i]) == 0) countMap[subCols[i]] = 1; else ++countMap[subCols[i]]; } // Now find the maximum value. typename std::map::iterator it = countMap.begin(); double mostFreq = it->first; size_t mostFreqCount = it->second; while (it != countMap.end()) { if (it->second >= mostFreqCount) { mostFreq = it->first; mostFreqCount = it->second; } ++it; } return mostFreq; } /** * Returns 1 if all the values of featureRow are not the same. * * @param featureRow The dimension which is checked for identical values. */ template template int DecisionStump::IsDistinct(const VecType& featureRow) { typename VecType::elem_type val = featureRow(0); for (size_t i = 1; i < featureRow.n_elem; ++i) if (val != featureRow(i)) return 1; return 0; } /** * Calculate entropy of dimension. * * @param labels Corresponding labels of the dimension. * @param UseWeights Whether we need to run a weighted Decision Stump. */ template template double DecisionStump::CalculateEntropy( const VecType& labels, const WeightVecType& weights) { double entropy = 0.0; size_t j; arma::rowvec numElem(classes); numElem.fill(0); // Variable to accumulate the weight in this subview_row. double accWeight = 0.0; // Populate numElem; they are used as helpers to calculate entropy. if (UseWeights) { for (j = 0; j < labels.n_elem; j++) { numElem(labels(j)) += weights(j); accWeight += weights(j); } for (j = 0; j < classes; j++) { const double p1 = ((double) numElem(j) / accWeight); // Instead of using log2(), which is C99 and may not exist on some // compilers, use std::log(), then use the change-of-base formula to make // the result correct. entropy += (p1 == 0) ? 0 : p1 * std::log(p1); } } else { for (j = 0; j < labels.n_elem; j++) numElem(labels(j))++; for (j = 0; j < classes; j++) { const double p1 = ((double) numElem(j) / labels.n_elem); // Instead of using log2(), which is C99 and may not exist on some // compilers, use std::log(), then use the change-of-base formula to make // the result correct. entropy += (p1 == 0) ? 0 : p1 * std::log(p1); } } return entropy / std::log(2.0); } } // namespace decision_stump } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/methods/decision_stump/decision_stump_main.cpp000066400000000000000000000174221315013601400262520ustar00rootroot00000000000000/** * @file decision_stump_main.cpp * @author Udit Saxena * * Main executable for the decision stump. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include #include #include #include "decision_stump.hpp" #include #include using namespace mlpack; using namespace mlpack::decision_stump; using namespace std; using namespace arma; PROGRAM_INFO("Decision Stump", "This program implements a decision stump, which is a single-level decision" " tree. The decision stump will split on one dimension of the input data, " "and will split into multiple buckets. The dimension and bins are selected" " by maximizing the information gain of the split. Optionally, the minimum" " number of training points in each bin can be specified with the " "--bucket_size (-b) parameter.\n" "\n" "The decision stump is parameterized by a splitting dimension and a vector " "of values that denote the splitting values of each bin.\n" "\n" "This program enables several applications: a decision tree may be trained " "or loaded, and then that decision tree may be used to classify a given set" " of test points. The decision tree may also be saved to a file for later " "usage.\n" "\n" "To train a decision stump, training data should be passed with the " "--training_file (-t) option, and their corresponding labels should be " "passed with the --labels_file (-l) option. Optionally, if --labels_file " "is not specified, the labels are assumed to be the last dimension of the " "training dataset. The --bucket_size (-b) parameter controls the minimum " "number of training points in each decision stump bucket.\n" "\n" "For classifying a test set, a decision stump may be loaded with the " "--input_model_file (-m) parameter (useful for the situation where a " "stump has not just been trained), and a test set may be specified with the" " --test_file (-T) parameter. The predicted labels will be saved to the " "file specified with the --predictions_file (-p) parameter.\n" "\n" "Because decision stumps are trained in batch, retraining does not make " "sense and thus it is not possible to pass both --training_file and " "--input_model_file; instead, simply build a new decision stump with the " "training data.\n" "\n" "A trained decision stump can be saved with the --output_model_file (-M) " "option. That stump may later be re-used in subsequent calls to this " "program (or others)."); // Datasets we might load. PARAM_STRING_IN("training_file", "A file containing the training set.", "t", ""); PARAM_STRING_IN("labels_file", "A file containing labels for the training set." "If not specified, the labels are assumed to be the last row of the " "training data.", "l", ""); PARAM_STRING_IN("test_file", "A file containing the test set.", "T", ""); // Output. PARAM_STRING_OUT("predictions_file", "The file in which the predicted labels " "for the test set will be written.", "p"); // We may load or save a model. PARAM_STRING_IN("input_model_file", "File containing decision stump model to " "load.", "m", ""); PARAM_STRING_OUT("output_model_file", "File to save trained decision stump " "model to.", "M"); PARAM_INT_IN("bucket_size", "The minimum number of training points in each " "decision stump bucket.", "b", 6); /** * This is the structure that actually saves to disk. We have to save the * label mappings, too, otherwise everything we load at test time in a future * run will end up being borked. */ struct DSModel { //! The mappings. arma::Col mappings; //! The stump. DecisionStump<> stump; //! Serialize the model. template void Serialize(Archive& ar, const unsigned int /* version */) { ar & data::CreateNVP(mappings, "mappings"); ar & data::CreateNVP(stump, "stump"); } }; int main(int argc, char *argv[]) { CLI::ParseCommandLine(argc, argv); // Check that the parameters are reasonable. if (CLI::HasParam("training_file") && CLI::HasParam("input_model_file")) { Log::Fatal << "Both --training_file and --input_model_file are specified, " << "but a trained model cannot be retrained. Only one of these options" << " may be specified." << endl; } if (!CLI::HasParam("training_file") && !CLI::HasParam("input_model_file")) { Log::Fatal << "Neither --training_file nor --input_model_file are given; " << "one must be specified." << endl; } if (!CLI::HasParam("output_model_file") && !CLI::HasParam("predictions_file")) { Log::Warn << "Neither --output_model_file nor --predictions_file are " << "specified; no results will be saved!" << endl; } // We must either load a model, or train a new stump. DSModel model; if (CLI::HasParam("training_file")) { const string trainingDataFilename = CLI::GetParam("training_file"); mat trainingData; data::Load(trainingDataFilename, trainingData, true); // Load labels, if necessary. Mat labelsIn; if (CLI::HasParam("labels_file")) { const string labelsFilename = CLI::GetParam("labels_file"); // Load labels. data::Load(labelsFilename, labelsIn, true); // Do the labels need to be transposed? if (labelsIn.n_cols == 1) labelsIn = labelsIn.t(); } else { // Extract the labels as the last Log::Info << "Using the last dimension of training set as labels." << endl; labelsIn = arma::conv_to>::from( trainingData.row(trainingData.n_rows - 1).t()); trainingData.shed_row(trainingData.n_rows - 1); } // Normalize the labels. Row labels; data::NormalizeLabels(labelsIn.row(0), labels, model.mappings); const size_t bucketSize = CLI::GetParam("bucket_size"); const size_t classes = labels.max() + 1; Timer::Start("training"); model.stump.Train(trainingData, labels, classes, bucketSize); Timer::Stop("training"); } else { const string inputModelFile = CLI::GetParam("input_model_file"); data::Load(inputModelFile, "decision_stump_model", model, true); } // Now, do we need to do any testing? if (CLI::HasParam("test_file")) { // Load the test file. const string testingDataFilename = CLI::GetParam("test_file"); mat testingData; data::Load(testingDataFilename, testingData, true); if (testingData.n_rows <= model.stump.SplitDimension()) Log::Fatal << "Test data dimensionality (" << testingData.n_rows << ") " << "is too low; the trained stump requires at least " << model.stump.SplitDimension() << " dimensions!" << endl; Row predictedLabels(testingData.n_cols); Timer::Start("testing"); model.stump.Classify(testingData, predictedLabels); Timer::Stop("testing"); // Denormalize predicted labels, if we want to save them. if (CLI::HasParam("predictions_file")) { Row actualLabels; data::RevertLabels(predictedLabels, model.mappings, actualLabels); // Save the predicted labels in a transposed form as output. const string predictionsFile = CLI::GetParam("predictions_file"); data::Save(predictionsFile, actualLabels, true); } } // Save the model, if desired. if (CLI::HasParam("output_model_file")) data::Save(CLI::GetParam("output_model_file"), "decision_stump_model", model); } mlpack-2.2.5/src/mlpack/methods/decision_tree/000077500000000000000000000000001315013601400212765ustar00rootroot00000000000000mlpack-2.2.5/src/mlpack/methods/decision_tree/CMakeLists.txt000066400000000000000000000013031315013601400240330ustar00rootroot00000000000000cmake_minimum_required(VERSION 2.8) # Define the files we need to compile. # Anything not in this list will not be compiled into mlpack. set(SOURCES decision_tree.hpp decision_tree_impl.hpp all_categorical_split.hpp all_categorical_split_impl.hpp best_binary_numeric_split.hpp best_binary_numeric_split_impl.hpp gini_gain.hpp information_gain.hpp ) # Add directory name to sources. set(DIR_SRCS) foreach(file ${SOURCES}) set(DIR_SRCS ${DIR_SRCS} ${CMAKE_CURRENT_SOURCE_DIR}/${file}) endforeach() # Append sources (with directory name) to list of all mlpack sources (used at # the parent scope). set(MLPACK_SRCS ${MLPACK_SRCS} ${DIR_SRCS} PARENT_SCOPE) add_cli_executable(decision_tree) mlpack-2.2.5/src/mlpack/methods/decision_tree/all_categorical_split.hpp000066400000000000000000000065371315013601400263420ustar00rootroot00000000000000/** * @file all_categorical_split.hpp * @author Ryan Curtin * * This file defines a tree splitter that split a categorical feature into all * of the possible categories. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_DECISION_TREE_ALL_CATEGORICAL_SPLIT_HPP #define MLPACK_METHODS_DECISION_TREE_ALL_CATEGORICAL_SPLIT_HPP #include namespace mlpack { namespace tree { /** * The AllCategoricalSplit is a splitting function that will split categorical * features into many children: one child for each category. * * @tparam FitnessFunction Fitness function to evaluate gain with. */ template class AllCategoricalSplit { public: // No extra info needed for split. template class AuxiliarySplitInfo { }; /** * Check if we can split a node. If we can split a node in a way that * improves on 'bestGain', then we return the improved gain. Otherwise we * return the value 'bestGain'. If a split is made, then classProbabilities * and aux may be modified. For this particular split type, aux will be empty * and classProbabilities will hold one element---the number of children. * * @param bestGain Best gain seen so far (we'll only split if we find gain * better than this). * @param data The dimension of data points to check for a split in. * @param numCategories Number of categories in the categorical data. * @param labels Labels for each point. * @param numClasses Number of classes in the dataset. * @param minimumLeafSize Minimum number of points in a leaf node for * splitting. * @param classProbabilities Class probabilities vector, which may be filled * with split information a successful split. * @param aux Auxiliary split information, which may be modified on a * successful split. */ template static double SplitIfBetter( const double bestGain, const VecType& data, const size_t numCategories, const arma::Row& labels, const size_t numClasses, const size_t minimumLeafSize, arma::Col& classProbabilities, AuxiliarySplitInfo& aux); /** * Return the number of children in the split. * * @param classProbabilities Auxiliary information for the split. * @param aux (Unused) auxiliary information for the split. */ template static size_t NumChildren(const arma::Col& classProbabilities, const AuxiliarySplitInfo& /* aux */); /** * Calculate the direction a point should percolate to. * * @param classProbabilities Auxiliary information for the split. * @param aux (Unused) auxiliary information for the split. */ template static size_t CalculateDirection( const ElemType& point, const arma::Col& classProbabilities, const AuxiliarySplitInfo& /* aux */); }; } // namespace tree } // namespace mlpack // Include implementation. #include "all_categorical_split_impl.hpp" #endif mlpack-2.2.5/src/mlpack/methods/decision_tree/all_categorical_split_impl.hpp000066400000000000000000000062331315013601400273540ustar00rootroot00000000000000/** * @file all_categorical_split_impl.hpp * @author Ryan Curtin * * Implementation of the AllCategoricalSplit categorical split class. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_DECISION_TREE_ALL_CATEGORICAL_SPLIT_IMPL_HPP #define MLPACK_METHODS_DECISION_TREE_ALL_CATEGORICAL_SPLIT_IMPL_HPP namespace mlpack { namespace tree { template template double AllCategoricalSplit::SplitIfBetter( const double bestGain, const VecType& data, const size_t numCategories, const arma::Row& labels, const size_t numClasses, const size_t minimumLeafSize, arma::Col& classProbabilities, AuxiliarySplitInfo& /* aux */) { // Count the number of elements in each potential child. const double epsilon = 1e-7; // Tolerance for floating-point errors. arma::Col counts(numCategories); counts.zeros(); for (size_t i = 0; i < data.n_elem; ++i) counts[(size_t) data[i]]++; // If each child will have the minimum number of points in it, we can split. // Otherwise we can't. if (arma::min(counts) < minimumLeafSize) return bestGain; // Calculate the gain of the split. First we have to calculate the labels // that would be assigned to each child. arma::uvec childPositions(numCategories, arma::fill::zeros); std::vector> childLabels(numCategories); for (size_t i = 0; i < numCategories; ++i) childLabels[i].zeros(counts[i]); // Extract labels for each child. for (size_t i = 0; i < data.n_elem; ++i) { const size_t category = (size_t) data[i]; childLabels[category][childPositions[category]++] = labels[i]; } double overallGain = 0.0; for (size_t i = 0; i < counts.n_elem; ++i) { // Calculate the gain of this child. const double childPct = double(counts[i]) / double(data.n_elem); const double childGain = FitnessFunction::Evaluate(childLabels[i], numClasses); overallGain += childPct * childGain; } if (overallGain > bestGain + epsilon) { // This is better, so set up the class probabilities vector and return. classProbabilities.set_size(1); classProbabilities[0] = numCategories; return overallGain; } // Otherwise there was no improvement. return bestGain; } template template size_t AllCategoricalSplit::NumChildren( const arma::Col& classProbabilities, const AuxiliarySplitInfo& /* aux */) { return classProbabilities[0]; } template template size_t AllCategoricalSplit::CalculateDirection( const ElemType& point, const arma::Col& /* classProbabilities */, const AuxiliarySplitInfo& /* aux */) { return (size_t) point; } } // namespace tree } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/methods/decision_tree/best_binary_numeric_split.hpp000066400000000000000000000062261315013601400272530ustar00rootroot00000000000000/** * @file best_binary_numeric_split.hpp * @author Ryan Curtin * * A tree splitter that finds the best binary numeric split. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_DECISION_TREE_BEST_BINARY_NUMERIC_SPLIT_HPP #define MLPACK_METHODS_DECISION_TREE_BEST_BINARY_NUMERIC_SPLIT_HPP #include namespace mlpack { namespace tree { /** * The BestBinaryNumericSplit is a splitting function for decision trees that * will exhaustively search a numeric dimension for the best binary split. * * @tparam FitnessFunction Fitness function to use to calculate gain. */ template class BestBinaryNumericSplit { public: // No extra info needed for split. template class AuxiliarySplitInfo { }; /** * Check if we can split a node. If we can split a node in a way that * improves on 'bestGain', then we return the improved gain. Otherwise we * return the value 'bestGain'. If a split is made, then classProbabilities * and aux may be modified. * * @param bestGain Best gain seen so far (we'll only split if we find gain * better than this). * @param data The dimension of data points to check for a split in. * @param numCategories Number of categories in the categorical data. * @param labels Labels for each point. * @param numClasses Number of classes in the dataset. * @param minimumLeafSize Minimum number of points in a leaf node for * splitting. * @param classProbabilities Class probabilities vector, which may be filled * with split information a successful split. * @param aux Auxiliary split information, which may be modified on a * successful split. */ template static double SplitIfBetter( const double bestGain, const VecType& data, const arma::Row& labels, const size_t numClasses, const size_t minimumLeafSize, arma::Col& classProbabilities, AuxiliarySplitInfo& aux); /** * Returns 2, since the binary split always has two children. */ template static size_t NumChildren(const arma::Col& /* classProbabilities */, const AuxiliarySplitInfo& /* aux */) { return 2; } /** * Given a point, calculate which child it should go to (left or right). * * @param point Point to calculate direction of. * @param classProbabilities Auxiliary information for the split. * @param aux (Unused) auxiliary information for the split. */ template static size_t CalculateDirection( const ElemType& point, const arma::Col& classProbabilities, const AuxiliarySplitInfo& /* aux */); }; } // namespace tree } // namespace mlpack // Include implementation. #include "best_binary_numeric_split_impl.hpp" #endif mlpack-2.2.5/src/mlpack/methods/decision_tree/best_binary_numeric_split_impl.hpp000066400000000000000000000070431315013601400302720ustar00rootroot00000000000000/** * @file best_binary_numeric_split_impl.hpl * @author Ryan Curtin * * Implementation of strategy that finds the best binary numeric split. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_DECISION_TREE_BEST_BINARY_NUMERIC_SPLIT_IMPL_HPP #define MLPACK_METHODS_DECISION_TREE_BEST_BINARY_NUMERIC_SPLIT_IMPL_HPP namespace mlpack { namespace tree { template template double BestBinaryNumericSplit::SplitIfBetter( const double bestGain, const VecType& data, const arma::Row& labels, const size_t numClasses, const size_t minimumLeafSize, arma::Col& classProbabilities, AuxiliarySplitInfo& /* aux */) { // First sanity check: if we don't have enough points, we can't split. if (data.n_elem < (minimumLeafSize * 2)) return bestGain; // Next, sort the data. arma::uvec sortedIndices = arma::sort_index(data); arma::Row sortedLabels(labels.n_elem); for (size_t i = 0; i < sortedLabels.n_elem; ++i) sortedLabels[sortedIndices[i]] = labels[i]; // Loop through all possible split points, choosing the best one. Also, force // a minimum leaf size of 1 (empty children don't make sense). double bestFoundGain = bestGain; const size_t minimum = std::max(minimumLeafSize, (size_t) 1); for (size_t index = minimum; index < data.n_elem - (minimum - 1); ++index) { // Make sure that the value has changed. if (data[sortedIndices[index]] == data[sortedIndices[index - 1]]) continue; // Calculate the gain for the left and right child. const double leftGain = FitnessFunction::Evaluate(sortedLabels.subvec(0, index - 1), numClasses); const double rightGain = FitnessFunction::Evaluate(sortedLabels.subvec( index, sortedLabels.n_elem - 1), numClasses); // Calculate the fraction of points in the left and right children. const double leftRatio = double(index) / double(sortedLabels.n_elem); const double rightRatio = 1.0 - leftRatio; // Calculate the gain at this split point. const double gain = leftRatio * leftGain + rightRatio * rightGain; // Corner case: is this the best possible split? if (gain == 0.0) { // We can take a shortcut: no split will be better than this, so just take // this one. classProbabilities.set_size(1); // The actual split value will be halfway between the value at index - 1 // and index. classProbabilities[0] = (data[sortedIndices[index - 1]] + data[sortedIndices[index]]) / 2.0; return gain; } else if (gain > bestFoundGain) { // We still have a better split. bestFoundGain = gain; classProbabilities.set_size(1); classProbabilities[0] = (data[sortedIndices[index - 1]] + data[sortedIndices[index]]) / 2.0; } } return bestFoundGain; } template template size_t BestBinaryNumericSplit::CalculateDirection( const ElemType& point, const arma::Col& classProbabilities, const AuxiliarySplitInfo& /* aux */) { if (point <= classProbabilities[0]) return 0; // Go left. else return 1; // Go right. } } // namespace tree } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/methods/decision_tree/decision_tree.hpp000066400000000000000000000241201315013601400246220ustar00rootroot00000000000000/** * @file decision_tree.hpp * @author Ryan Curtin * * A generic decision tree learner. Its behavior can be controlled via template * arguments. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_DECISION_TREE_DECISION_TREE_HPP #define MLPACK_METHODS_DECISION_TREE_DECISION_TREE_HPP #include #include "gini_gain.hpp" #include "best_binary_numeric_split.hpp" #include "all_categorical_split.hpp" namespace mlpack { namespace tree { /** * This class implements a generic decision tree learner. Its behavior can be * controlled via its template arguments. * * The class inherits from the auxiliary split information in order to prevent * an empty auxiliary split information struct from taking any extra size. */ template class NumericSplitType = BestBinaryNumericSplit, template class CategoricalSplitType = AllCategoricalSplit, typename ElemType = double, bool NoRecursion = false> class DecisionTree : public NumericSplitType::template AuxiliarySplitInfo, public CategoricalSplitType::template AuxiliarySplitInfo { public: //! Allow access to the numeric split type. typedef NumericSplitType NumericSplit; //! Allow access to the categorical split type. typedef CategoricalSplitType CategoricalSplit; /** * Construct the decision tree on the given data and labels, where the data * can be both numeric and categorical. Setting minimumLeafSize too small may * cause the tree to overfit, but setting it too large may cause it to * underfit. * * @param data Dataset to train on. * @param datasetInfo Type information for each dimension of the dataset. * @param labels Labels for each training point. * @param numClasses Number of classes in the dataset. * @param minimumLeafSize Minimum number of points in each leaf node. */ template DecisionTree(const MatType& data, const data::DatasetInfo& datasetInfo, const arma::Row& labels, const size_t numClasses, const size_t minimumLeafSize = 10); /** * Construct the decision tree on the given data and labels, assuming that the * data is all of the numeric type. Setting minimumLeafSize too small may * cause the tree to overfit, but setting it too large may cause it to * underfit. * * @param data Dataset to train on. * @param labels Labels for each training point. * @param numClasses Number of classes in the dataset. * @param minimumLeafSize Minimum number of points in each leaf node. */ template DecisionTree(const MatType& data, const arma::Row& labels, const size_t numClasses, const size_t minimumLeafSize = 10); /** * Construct a decision tree without training it. It will be a leaf node with * equal probabilities for each class. * * @param numClasses Number of classes in the dataset. */ DecisionTree(const size_t numClasses = 1); /** * Copy another tree. This may use a lot of memory---be sure that it's what * you want to do. * * @param other Tree to copy. */ DecisionTree(const DecisionTree& other); /** * Take ownership of another tree. * * @param other Tree to take ownership of. */ DecisionTree(DecisionTree&& other); /** * Copy another tree. This may use a lot of memory---be sure that it's what * you want to do. * * @param other Tree to copy. */ DecisionTree& operator=(const DecisionTree& other); /** * Take ownership of another tree. * * @param other Tree to take ownership of. */ DecisionTree& operator=(DecisionTree&& other); /** * Clean up memory. */ ~DecisionTree(); /** * Train the decision tree on the given data. This will overwrite the * existing model. The data may have numeric and categorical types, specified * by the datasetInfo parameter. Setting minimumLeafSize too small may cause * the tree to overfit, but setting it too large may cause it to underfit. * * @param data Dataset to train on. * @param datasetInfo Type information for each dimension. * @param labels Labels for each training point. * @param numClasses Number of classes in the dataset. * @param minimumLeafSize Minimum number of points in each leaf node. */ template void Train(const MatType& data, const data::DatasetInfo& datasetInfo, const arma::Row& labels, const size_t numClasses, const size_t minimumLeafSize = 10); /** * Train the decision tree on the given data, assuming that all dimensions are * numeric. This will overwrite the given model. Setting minimumLeafSize too * small may cause the tree to overfit, but setting it too large may cause it * to underfit. * * @param data Dataset to train on. * @param labels Labels for each training point. * @param numClasses Number of classes in the dataset. * @param minimumLeafSize Minimum number of points in each leaf node. */ template void Train(const MatType& data, const arma::Row& labels, const size_t numClasses, const size_t minimumLeafSize = 10); /** * Classify the given point, using the entire tree. The predicted label is * returned. * * @param point Point to classify. */ template size_t Classify(const VecType& point) const; /** * Classify the given point and also return estimates of the probability for * each class in the given vector. * * @param point Point to classify. * @param prediction This will be set to the predicted class of the point. * @param probabilities This will be filled with class probabilities for the * point. */ template void Classify(const VecType& point, size_t& prediction, arma::vec& probabilities) const; /** * Classify the given points, using the entire tree. The predicted labels for * each point are stored in the given vector. * * @param data Set of points to classify. * @param predictions This will be filled with predictions for each point. */ template void Classify(const MatType& data, arma::Row& predictions) const; /** * Classify the given points and also return estimates of the probabilities * for each class in the given matrix. The predicted labels for each point * are stored in the given vector. * * @param data Set of points to classify. * @param predictions This will be filled with predictions for each point. * @param probabilities This will be filled with class probabilities for each * point. */ template void Classify(const MatType& data, arma::Row& predictions, arma::mat& probabilities) const; /** * Serialize the tree. */ template void Serialize(Archive& ar, const unsigned int /* version */); //! Get the number of children. size_t NumChildren() const { return children.size(); } //! Get the child of the given index. const DecisionTree& Child(const size_t i) const { return *children[i]; } //! Modify the child of the given index (be careful!). DecisionTree& Child(const size_t i) { return *children[i]; } /** * Given a point and that this node is not a leaf, calculate the index of the * child node this point would go towards. This method is primarily used by * the Classify() function, but it can be used in a standalone sense too. * * @param point Point to classify. */ template size_t CalculateDirection(const VecType& point) const; private: //! The vector of children. std::vector children; //! The dimension this node splits on. size_t splitDimension; //! The type of the dimension that we have split on (if we are not a leaf). //! If we are a leaf, then this is the index of the majority class. size_t dimensionTypeOrMajorityClass; /** * This vector may hold different things. If the node has no children, then * it is guaranteed to hold the probabilities of each class. If the node has * children, then it may be used arbitrarily by the split type's * CalculateDirection() function and may not necessarily hold class * probabilities. */ arma::vec classProbabilities; //! Note that this class will also hold the members of the NumericSplit and //! CategoricalSplit AuxiliarySplitInfo classes, since it inherits from them. //! We'll define some convenience typedefs here. typedef typename NumericSplit::template AuxiliarySplitInfo NumericAuxiliarySplitInfo; typedef typename CategoricalSplit::template AuxiliarySplitInfo CategoricalAuxiliarySplitInfo; /** * Calculate the class probabilities of the given labels. */ template void CalculateClassProbabilities(const RowType& labels, const size_t numClasses); }; /** * Convenience typedef for decision stumps (single level decision trees). */ template class NumericSplitType = BestBinaryNumericSplit, template class CategoricalSplitType = AllCategoricalSplit, typename ElemType = double> using DecisionStump = DecisionTree; } // namespace tree } // namespace mlpack // Include implementation. #include "decision_tree_impl.hpp" #endif mlpack-2.2.5/src/mlpack/methods/decision_tree/decision_tree_impl.hpp000066400000000000000000000532141315013601400256510ustar00rootroot00000000000000/** * @file decision_tree_impl.hpp * @author Ryan Curtin * * Implementation of generic decision tree class. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_DECISION_TREE_DECISION_TREE_IMPL_HPP #define MLPACK_METHODS_DECISION_TREE_DECISION_TREE_IMPL_HPP namespace mlpack { namespace tree { //! Construct and train. template class NumericSplitType, template class CategoricalSplitType, typename ElemType, bool NoRecursion> template DecisionTree::DecisionTree(const MatType& data, const data::DatasetInfo& datasetInfo, const arma::Row& labels, const size_t numClasses, const size_t minimumLeafSize) { // Pass off work to the Train() method. Train(data, datasetInfo, labels, numClasses, minimumLeafSize); } //! Construct and train. template class NumericSplitType, template class CategoricalSplitType, typename ElemType, bool NoRecursion> template DecisionTree::DecisionTree(const MatType& data, const arma::Row& labels, const size_t numClasses, const size_t minimumLeafSize) { // Pass off work to the Train() method. Train(data, labels, numClasses, minimumLeafSize); } //! Construct, don't train. template class NumericSplitType, template class CategoricalSplitType, typename ElemType, bool NoRecursion> DecisionTree::DecisionTree(const size_t numClasses) : dimensionTypeOrMajorityClass(0), classProbabilities(numClasses) { // Initialize utility vector. classProbabilities.fill(1.0 / (double) numClasses); } //! Copy another tree. template class NumericSplitType, template class CategoricalSplitType, typename ElemType, bool NoRecursion> DecisionTree::DecisionTree(const DecisionTree& other) : NumericAuxiliarySplitInfo(other), CategoricalAuxiliarySplitInfo(other), splitDimension(other.splitDimension), dimensionTypeOrMajorityClass(other.dimensionTypeOrMajorityClass), classProbabilities(other.classProbabilities) { // Copy each child. for (size_t i = 0; i < other.children.size(); ++i) children.push_back(new DecisionTree(*other.children[i])); } //! Take ownership of another tree. template class NumericSplitType, template class CategoricalSplitType, typename ElemType, bool NoRecursion> DecisionTree::DecisionTree(DecisionTree&& other) : NumericAuxiliarySplitInfo(std::move(other)), CategoricalAuxiliarySplitInfo(std::move(other)), children(std::move(other.children)), splitDimension(other.splitDimension), dimensionTypeOrMajorityClass(other.dimensionTypeOrMajorityClass), classProbabilities(std::move(other.classProbabilities)) { // Reset the other object. other.classProbabilities.ones(1); // One class, P(1) = 1. } //! Copy another tree. template class NumericSplitType, template class CategoricalSplitType, typename ElemType, bool NoRecursion> DecisionTree& DecisionTree::operator=(const DecisionTree& other) { // Clean memory if needed. for (size_t i = 0; i < children.size(); ++i) delete children[i]; children.clear(); // Copy everything from the other tree. splitDimension = other.splitDimension; dimensionTypeOrMajorityClass = other.dimensionTypeOrMajorityClass; classProbabilities = other.classProbabilities; // Copy the children. for (size_t i = 0; i < other.children.size(); ++i) children.push_back(new DecisionTree(*other.children[i])); // Copy the auxiliary info. NumericAuxiliarySplitInfo::operator=(other); CategoricalAuxiliarySplitInfo::operator=(other); return *this; } //! Take ownership of another tree. template class NumericSplitType, template class CategoricalSplitType, typename ElemType, bool NoRecursion> DecisionTree& DecisionTree::operator=(DecisionTree&& other) { // Clean memory if needed. for (size_t i = 0; i < children.size(); ++i) delete children[i]; children.clear(); // Take ownership of the other tree's components. children = std::move(other.children); splitDimension = other.splitDimension; dimensionTypeOrMajorityClass = other.dimensionTypeOrMajorityClass; classProbabilities = std::move(other.classProbabilities); // Reset the class probabilities of the other object. other.classProbabilities.ones(1); // One class, P(1) = 1. // Take ownership of the auxiliary info. NumericAuxiliarySplitInfo::operator=(std::move(other)); CategoricalAuxiliarySplitInfo::operator=(std::move(other)); return *this; } //! Clean up memory. template class NumericSplitType, template class CategoricalSplitType, typename ElemType, bool NoRecursion> DecisionTree::~DecisionTree() { for (size_t i = 0; i < children.size(); ++i) delete children[i]; } //! Train on the given data. template class NumericSplitType, template class CategoricalSplitType, typename ElemType, bool NoRecursion> template void DecisionTree::Train(const MatType& data, const data::DatasetInfo& datasetInfo, const arma::Row& labels, const size_t numClasses, const size_t minimumLeafSize) { // Clear children if needed. for (size_t i = 0; i < children.size(); ++i) delete children[i]; children.clear(); // Look through the list of dimensions and obtain the gain of the best split. // We'll cache the best numeric and categorical split auxiliary information in // numericAux and categoricalAux (and clear them later if we make not split), // and use classProbabilities as auxiliary information. Later we'll overwrite // classProbabilities to the empirical class probabilities if we do not split. double bestGain = FitnessFunction::Evaluate(labels, numClasses); size_t bestDim = datasetInfo.Dimensionality(); // This means "no split". for (size_t i = 0; i < datasetInfo.Dimensionality(); ++i) { double dimGain = -DBL_MAX; if (datasetInfo.Type(i) == data::Datatype::categorical) dimGain = CategoricalSplit::SplitIfBetter(bestGain, data.row(i), datasetInfo.NumMappings(i), labels, numClasses, minimumLeafSize, classProbabilities, *this); else if (datasetInfo.Type(i) == data::Datatype::numeric) dimGain = NumericSplit::SplitIfBetter(bestGain, data.row(i), labels, numClasses, minimumLeafSize, classProbabilities, *this); // Was there an improvement? If so mark that it's the new best dimension. if (dimGain > bestGain) { bestDim = i; bestGain = dimGain; } // If the gain is the best possible, no need to keep looking. if (bestGain == 0.0) break; } // Did we split or not? If so, then split the data and create the children. if (bestDim != datasetInfo.Dimensionality()) { dimensionTypeOrMajorityClass = (size_t) datasetInfo.Type(bestDim); splitDimension = bestDim; // Get the number of children we will have. size_t numChildren = 0; if (datasetInfo.Type(bestDim) == data::Datatype::categorical) numChildren = CategoricalSplit::NumChildren(classProbabilities, *this); else numChildren = NumericSplit::NumChildren(classProbabilities, *this); // Calculate all child assignments. arma::Col childAssignments(data.n_cols); if (datasetInfo.Type(bestDim) == data::Datatype::categorical) { for (size_t j = 0; j < data.n_cols; ++j) childAssignments[j] = CategoricalSplit::CalculateDirection( data(bestDim, j), classProbabilities, *this); } else { for (size_t j = 0; j < data.n_cols; ++j) childAssignments[j] = NumericSplit::CalculateDirection(data(bestDim, j), classProbabilities, *this); } // Figure out counts of children. arma::Row childCounts(numClasses, arma::fill::zeros); for (size_t i = 0; i < childAssignments.n_elem; ++i) childCounts[childAssignments[i]]++; // Split into children. for (size_t i = 0; i < numChildren; ++i) { // Now that we have the size of the matrix we need to extract, extract it. MatType childPoints(data.n_rows, childCounts[i]); arma::Row childLabels(childCounts[i]); size_t currentCol = 0; for (size_t j = 0; j < data.n_cols; ++j) { if (childAssignments[j] == i) { childPoints.col(currentCol) = data.col(j); childLabels[currentCol++] = labels[j]; } } // Now build the child recursively. if (NoRecursion) children.push_back(new DecisionTree(childPoints, datasetInfo, childLabels, numClasses, childPoints.n_cols)); else children.push_back(new DecisionTree(childPoints, datasetInfo, childLabels, numClasses, minimumLeafSize)); } } else { // Clear auxiliary info objects. NumericAuxiliarySplitInfo::operator=(NumericAuxiliarySplitInfo()); CategoricalAuxiliarySplitInfo::operator=(CategoricalAuxiliarySplitInfo()); // Calculate class probabilities because we are a leaf. CalculateClassProbabilities(labels, numClasses); } } //! Train on the given data, assuming all dimensions are numeric. template class NumericSplitType, template class CategoricalSplitType, typename ElemType, bool NoRecursion> template void DecisionTree::Train(const MatType& data, const arma::Row& labels, const size_t numClasses, const size_t minimumLeafSize) { // Clear children if needed. for (size_t i = 0; i < children.size(); ++i) delete children[i]; children.clear(); // We won't be using these members, so reset them. CategoricalAuxiliarySplitInfo::operator=(CategoricalAuxiliarySplitInfo()); // Look through the list of dimensions and obtain the best split. We'll cache // the best numeric split auxiliary information in numericAux (and clear it // later if we don't make a split), and use classProbabilities as auxiliary // information. Later we'll overwrite classProbabilities to the empirical // class probabilities if we do not split. double bestGain = FitnessFunction::Evaluate(labels, numClasses); size_t bestDim = data.n_rows; // This means "no split". for (size_t i = 0; i < data.n_rows; ++i) { double dimGain = NumericSplitType::SplitIfBetter(bestGain, data.row(i), labels, numClasses, minimumLeafSize, classProbabilities, *this); if (dimGain > bestGain) { bestDim = i; bestGain = dimGain; } // If the gain is the best possible, no need to keep looking. if (bestGain == 0.0) break; } // Did we split or not? If so, then split the data and create the children. if (bestDim != data.n_rows) { // We know that the split is numeric. size_t numChildren = NumericSplit::NumChildren(classProbabilities, *this); splitDimension = bestDim; dimensionTypeOrMajorityClass = (size_t) data::Datatype::numeric; // Calculate all child assignments. arma::Col childAssignments(data.n_cols); for (size_t j = 0; j < data.n_cols; ++j) childAssignments[j] = NumericSplit::CalculateDirection(data(bestDim, j), classProbabilities, *this); // Calculate counts of children in each node. arma::Col childCounts(numChildren); childCounts.zeros(); for (size_t j = 0; j < childAssignments.n_elem; ++j) childCounts[childAssignments[j]]++; for (size_t i = 0; i < numChildren; ++i) { // Now that we have the size of the matrix we need to extract, extract it. MatType childPoints(data.n_rows, childCounts[i]); arma::Row childLabels(childCounts[i]); size_t currentCol = 0; for (size_t j = 0; j < data.n_cols; ++j) { if (childAssignments[j] == i) { childPoints.col(currentCol) = data.col(j); childLabels[currentCol++] = labels[j]; } } // Now build the child recursively. if (NoRecursion) children.push_back(new DecisionTree(childPoints, childLabels, numClasses, childPoints.n_cols)); else children.push_back(new DecisionTree(childPoints, childLabels, numClasses, minimumLeafSize)); } } else { // We won't be needing these members, so reset them. NumericAuxiliarySplitInfo::operator=(NumericAuxiliarySplitInfo()); // Calculate class probabilities because we are a leaf. CalculateClassProbabilities(labels, numClasses); } } //! Return the class. template class NumericSplitType, template class CategoricalSplitType, typename ElemType, bool NoRecursion> template size_t DecisionTree::Classify(const VecType& point) const { if (children.size() == 0) { // Return cached max of probabilities. return dimensionTypeOrMajorityClass; } return children[CalculateDirection(point)]->Classify(point); } //! Return class probabilities for a given point. template class NumericSplitType, template class CategoricalSplitType, typename ElemType, bool NoRecursion> template void DecisionTree::Classify(const VecType& point, size_t& prediction, arma::vec& probabilities) const { if (children.size() == 0) { prediction = dimensionTypeOrMajorityClass; probabilities = classProbabilities; return; } children[CalculateDirection(point)]->Classify(point, prediction, probabilities); } //! Return the class for a set of points. template class NumericSplitType, template class CategoricalSplitType, typename ElemType, bool NoRecursion> template void DecisionTree::Classify(const MatType& data, arma::Row& predictions) const { predictions.set_size(data.n_cols); if (children.size() == 0) { predictions.fill(dimensionTypeOrMajorityClass); return; } // Loop over each point. for (size_t i = 0; i < data.n_cols; ++i) predictions[i] = Classify(data.col(i)); } //! Return the class probabilities for a set of points. template class NumericSplitType, template class CategoricalSplitType, typename ElemType, bool NoRecursion> template void DecisionTree::Classify(const MatType& data, arma::Row& predictions, arma::mat& probabilities) const { predictions.set_size(data.n_cols); if (children.size() == 0) { predictions.fill(dimensionTypeOrMajorityClass); probabilities = arma::repmat(classProbabilities, 1, data.n_cols); return; } // Otherwise we have to find the right size to set the predictions matrix to // be. DecisionTree* node = children[0]; while (node->NumChildren() != 0) node = &node->Child(0); probabilities.set_size(node->classProbabilities.n_elem, data.n_cols); for (size_t i = 0; i < data.n_cols; ++i) { arma::vec v = probabilities.unsafe_col(i); // Alias of column. Classify(data.col(i), predictions[i], v); } } //! Serialize the tree. template class NumericSplitType, template class CategoricalSplitType, typename ElemType, bool NoRecursion> template void DecisionTree::Serialize(Archive& ar, const unsigned int /* version */) { using data::CreateNVP; // Clean memory if needed. if (Archive::is_loading::value) { for (size_t i = 0; i < children.size(); ++i) delete children[i]; children.clear(); } // Serialize the children first. size_t numChildren = children.size(); ar & CreateNVP(numChildren, "numChildren"); if (Archive::is_loading::value) { children.resize(numChildren, NULL); for (size_t i = 0; i < numChildren; ++i) children[i] = new DecisionTree(); } for (size_t i = 0; i < numChildren; ++i) { std::ostringstream name; name << "child" << i; ar & CreateNVP(*children[i], name.str()); } // Now serialize the rest of the object. ar & CreateNVP(splitDimension, "splitDimension"); ar & CreateNVP(dimensionTypeOrMajorityClass, "dimensionTypeOrMajorityClass"); ar & CreateNVP(classProbabilities, "classProbabilities"); } template class NumericSplitType, template class CategoricalSplitType, typename ElemType, bool NoRecursion> template size_t DecisionTree::CalculateDirection(const VecType& point) const { if ((data::Datatype) dimensionTypeOrMajorityClass == data::Datatype::categorical) return CategoricalSplit::CalculateDirection(point[splitDimension], classProbabilities, *this); else return NumericSplit::CalculateDirection(point[splitDimension], classProbabilities, *this); } template class NumericSplitType, template class CategoricalSplitType, typename ElemType, bool NoRecursion> template void DecisionTree::CalculateClassProbabilities( const RowType& labels, const size_t numClasses) { classProbabilities.zeros(numClasses); for (size_t i = 0; i < labels.n_elem; ++i) classProbabilities[labels[i]]++; // Now normalize into probabilities. classProbabilities /= labels.n_elem; arma::uword maxIndex; classProbabilities.max(maxIndex); dimensionTypeOrMajorityClass = (size_t) maxIndex; } } // namespace tree } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/methods/decision_tree/decision_tree_main.cpp000066400000000000000000000174331315013601400256320ustar00rootroot00000000000000/** * @file decision_tree_main.cpp * @author Ryan Curtin * * A command-line program to build a decision tree. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include #include "decision_tree.hpp" #include #include using namespace std; using namespace mlpack; using namespace mlpack::tree; PROGRAM_INFO("Decision tree", "Train and evaluate using a decision tree. Given a dataset containing " "numeric features and associated labels for each point in the dataset, this" " program can train a decision tree on that data." "\n\n" "The training file and associated labels are specified with the " "--training_file and --labels_file options, respectively. The labels " "should be in the range [0, num_classes - 1]." "\n\n" "When a model is trained, it may be saved to file with the " "--output_model_file (-M) option. A model may be loaded from file for " "predictions with the --input_model_file (-m) option. The " "--input_model_file option may not be specified when the --training_file " "option is specified. The --minimum_leaf_size (-n) parameter specifies " "the minimum number of training points that must fall into each leaf for " "it to be split. If --print_training_error (-e) is specified, the training" " error will be printed." "\n\n" "A file containing test data may be specified with the --test_file (-T) " "option, and if performance numbers are desired for that test set, labels " "may be specified with the --test_labels_file (-L) option. Predictions f" "for each test point may be stored into the file specified by the " "--predictions_file (-p) option. Class probabilities for each prediction " "will be stored in the file specified by the --probabilities_file (-P) " "option."); // Datasets. PARAM_STRING_IN("training_file", "File containing training points.", "t", ""); PARAM_STRING_IN("labels_file", "File containing training labels.", "l", ""); PARAM_STRING_IN("test_file", "File containing test points.", "T", ""); PARAM_STRING_IN("test_labels_file", "File containing test labels, if accuracy " "calculation is desired.", "L", ""); // Training parameters. PARAM_INT_IN("minimum_leaf_size", "Minimum number of points in a leaf.", "n", 20); PARAM_FLAG("print_training_error", "Print the training error.", "e"); // Output parameters. PARAM_STRING_IN("probabilities_file", "File to save class probabilities to for" " each test point.", "P", ""); PARAM_STRING_IN("predictions_file", "File to save class predictions to for " "each test point.", "p", ""); /** * This is the class that we will serialize. It is a pretty simple wrapper * around DecisionTree<>. In order to support categoricals, it will need to * also hold and serialize a DatasetInfo. */ class DecisionTreeModel { public: // The tree itself, left public for direct access by this program. DecisionTree<> tree; // Create the model. DecisionTreeModel() { /* Nothing to do. */ } // Serialize the model. template void Serialize(Archive& ar, const unsigned int /* version */) { ar & data::CreateNVP(tree, "tree"); } }; // Models. PARAM_STRING_IN("input_model_file", "File to load pre-trained decision tree " "from, to be used with test points.", "m", ""); PARAM_STRING_IN("output_model_file", "File to save trained decision tree to.", "M", ""); int main(int argc, char** argv) { CLI::ParseCommandLine(argc, argv); // Check parameters. if (CLI::HasParam("training_file") && CLI::HasParam("input_model_file")) Log::Fatal << "Cannot specify both --training_file and --input_model_file!" << endl; if (CLI::HasParam("training_file") && !CLI::HasParam("labels_file")) Log::Fatal << "Must specify --labels_file when --training_file is " << "specified!" << endl; if (CLI::HasParam("test_labels_file") && !CLI::HasParam("test_file")) Log::Warn << "--test_labels_file ignored because --test_file is not passed." << endl; if (!CLI::HasParam("output_model_file") && !CLI::HasParam("probabilities_file") && !CLI::HasParam("predictions_file") && !CLI::HasParam("test_labels_file")) Log::Warn << "None of --output_model_file, --probabilities_file, or " << "--predictions_file are given, and accuracy is not being calculated;" << " no output will be saved!" << endl; if (CLI::HasParam("print_training_error") && !CLI::HasParam("training_file")) Log::Warn << "--print_training_error ignored because --training_file is not" << " specified." << endl; if (!CLI::HasParam("test_file")) { if (CLI::HasParam("probabilities_file")) Log::Warn << "--probabilities_file ignored because --test_file is not " << "specified." << endl; if (CLI::HasParam("predictions_file")) Log::Warn << "--predictions_file ignored because --test_file is not " << "specified." << endl; } // Load the model or build the tree. DecisionTreeModel model; if (CLI::HasParam("training_file")) { arma::mat dataset; data::Load(CLI::GetParam("training_file"), dataset, true); arma::Mat labels; data::Load(CLI::GetParam("labels_file"), labels, true); // Calculate number of classes. const size_t numClasses = arma::max(arma::max(labels)) + 1; // Now build the tree. const size_t minLeafSize = (size_t) CLI::GetParam("minimum_leaf_size"); model.tree = DecisionTree<>(dataset, labels.row(0), numClasses, minLeafSize); // Do we need to print training error? if (CLI::HasParam("print_training_error")) { arma::Row predictions; arma::mat probabilities; model.tree.Classify(dataset, predictions, probabilities); size_t correct = 0; for (size_t i = 0; i < dataset.n_cols; ++i) if (predictions[i] == labels[i]) ++correct; // Print number of correct points. Log::Info << double(correct) / double(dataset.n_cols) * 100 << "\% " << "correct on training set (" << correct << " / " << dataset.n_cols << ")." << endl; } } else { data::Load(CLI::GetParam("input_model_file"), "model", model, true); } // Do we need to get predictions? if (CLI::HasParam("test_file")) { arma::mat testPoints; data::Load(CLI::GetParam("test_file"), testPoints, true); arma::Row predictions; arma::mat probabilities; model.tree.Classify(testPoints, predictions, probabilities); // Do we need to calculate accuracy? if (CLI::HasParam("test_labels_file")) { arma::Mat testLabels; data::Load(CLI::GetParam("test_labels_file"), testLabels, true); size_t correct = 0; for (size_t i = 0; i < testPoints.n_cols; ++i) if (predictions[i] == testLabels[i]) ++correct; // Print number of correct points. Log::Info << double(correct) / double(testPoints.n_cols) * 100 << "\% " << "correct on test set (" << correct << " / " << testPoints.n_cols << ")." << endl; } // Do we need to save outputs? if (CLI::HasParam("predictions_file")) data::Save(CLI::GetParam("predictions_file"), predictions); if (CLI::HasParam("probabilities_file")) data::Save(CLI::GetParam("probabilities_file"), probabilities); } // Do we need to save the model? if (CLI::HasParam("output_model_file")) data::Save(CLI::GetParam("output_model_file"), "model", model); } mlpack-2.2.5/src/mlpack/methods/decision_tree/gini_gain.hpp000066400000000000000000000046771315013601400237510ustar00rootroot00000000000000/** * @file gini_gain.hpp * @author Ryan Curtin * * The GiniGain class, which is a fitness function (FitnessFunction) for * decision trees. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_DECISION_TREE_GINI_GAIN_HPP #define MLPACK_METHODS_DECISION_TREE_GINI_GAIN_HPP #include namespace mlpack { namespace tree { /** * The Gini gain, a measure of set purity usable as a fitness function * (FitnessFunction) for decision trees. This is the exact same thing as the * well-known Gini impurity, but negated---since the decision tree will be * trying to maximize gain (and the Gini impurity would need to be minimized). */ class GiniGain { public: /** * Evaluate the Gini impurity on the given set of labels. RowType should be * an Armadillo vector that holds size_t objects. * * @param labels Set of labels to evaluate Gini impurity on. * @param numClasses Number of classes in the dataset. */ template static double Evaluate(const RowType& labels, const size_t numClasses) { // Corner case: if there are no elements, the impurity is zero. if (labels.n_elem == 0) return 0.0; arma::Col counts(numClasses); counts.zeros(); for (size_t i = 0; i < labels.n_elem; ++i) counts[labels[i]]++; // Calculate the Gini impurity of the un-split node. double impurity = 0.0; for (size_t i = 0; i < numClasses; ++i) { const double f = ((double) counts[i] / (double) labels.n_elem); impurity += f * (1.0 - f); } return -impurity; } /** * Return the range of the Gini impurity for the given number of classes. * (That is, the difference between the maximum possible value and the minimum * possible value.) * * @param numClasses Number of classes in the dataset. */ static double Range(const size_t numClasses) { // The best possible case is that only one class exists, which gives a Gini // impurity of 0. The worst possible case is that the classes are evenly // distributed, which gives n * (1/n * (1 - 1/n)) = 1 - 1/n. return 1.0 - (1.0 / double(numClasses)); } }; } // namespace tree } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/methods/decision_tree/information_gain.hpp000066400000000000000000000042651315013601400253410ustar00rootroot00000000000000/** * @file information_gain.hpp * @author Ryan Curtin * * An implementation of information gain, which can be used in place of Gini * gain. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_DECISION_TREE_INFORMATION_GAIN_HPP #define MLPACK_METHODS_DECISION_TREE_INFORMATION_GAIN_HPP #include namespace mlpack { namespace tree { /** * The standard information gain criterion, used for calculating gain in * decision trees. */ class InformationGain { public: /** * Given a set of labels, calculate the information gain of those labels. * * @param labels Labels of the dataset. * @param numClasses Number of classes in the dataset. */ static double Evaluate(const arma::Row& labels, const size_t numClasses) { // Edge case: if there are no elements, the gain is zero. if (labels.n_elem == 0) return 0.0; // Count the number of elements in each class. arma::Col counts(numClasses); counts.zeros(); for (size_t i = 0; i < labels.n_elem; ++i) counts[labels[i]]++; // Calculate the information gain. double gain = 0.0; for (size_t i = 0; i < numClasses; ++i) { const double f = ((double) counts[i] / (double) labels.n_elem); if (f > 0.0) gain += f * std::log2(f); } return gain; } /** * Return the range of the information gain for the given number of classes. * (That is, the difference between the maximum possible value and the minimum * possible value.) * * @param numClasses Number of classes in the dataset. */ static double Range(const size_t numClasses) { // The best possible case gives an information gain of 0. The worst // possible case is even distribution, which gives n * (1/n * log2(1/n)) = // log2(1/n) = -log2(n). So, the range is log2(n). return std::log2(numClasses); } }; } // namespace tree } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/methods/det/000077500000000000000000000000001315013601400172365ustar00rootroot00000000000000mlpack-2.2.5/src/mlpack/methods/det/CMakeLists.txt000066400000000000000000000011031315013601400217710ustar00rootroot00000000000000# Define the files we need to compile # Anything not in this list will not be compiled into the output library # Do not include test programs here set(SOURCES # the DET class dtree.hpp dtree.cpp # the util file dt_utils.hpp dt_utils.cpp ) # add directory name to sources set(DIR_SRCS) foreach(file ${SOURCES}) set(DIR_SRCS ${DIR_SRCS} ${CMAKE_CURRENT_SOURCE_DIR}/${file}) endforeach() # append sources (with directory name) to list of all mlpack sources (used at the parent scope) set(MLPACK_SRCS ${MLPACK_SRCS} ${DIR_SRCS} PARENT_SCOPE) add_cli_executable(det) mlpack-2.2.5/src/mlpack/methods/det/det_main.cpp000066400000000000000000000170361315013601400215310ustar00rootroot00000000000000/** * @file det_main.cpp * @author Parikshit Ram (pram@cc.gatech.edu) * * This file runs density estimation trees. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include #include #include "dt_utils.hpp" #include #include using namespace mlpack; using namespace mlpack::det; using namespace std; PROGRAM_INFO("Density Estimation With Density Estimation Trees", "This program performs a number of functions related to Density Estimation " "Trees. The optimal Density Estimation Tree (DET) can be trained on a set " "of data (specified by --training_file or -t) using cross-validation (with " "number of folds specified by --folds). This trained density estimation " "tree may then be saved to a model file with the --output_model_file (-M) " "option." "\n\n" "The variable importances of each dimension may be saved with the " "--vi_file (-i) option, and the density estimates on each training point " "may be saved to the file specified with the --training_set_estimates_file " "(-e) option." "\n\n" "This program also can provide density estimates for a set of test points, " "specified in the --test_file (-T) file. The density estimation tree used " "for this task will be the tree that was trained on the given training " "points, or a tree stored in the file given with the --input_model_file " "(-m) parameter. The density estimates for the test points may be saved " "into the file specified with the --test_set_estimates_file (-E) option."); // Input data files. PARAM_STRING_IN("training_file", "The data set on which to build a density " "estimation tree.", "t", ""); // Input or output model. PARAM_STRING_IN("input_model_file", "File containing already trained density " "estimation tree.", "m", ""); PARAM_STRING_OUT("output_model_file", "File to save trained density estimation " "tree to.", "M"); // Output data files. PARAM_STRING_IN("test_file", "A set of test points to estimate the density of.", "T", ""); PARAM_STRING_OUT("training_set_estimates_file", "The file in which to output " "the density estimates on the training set from the final optimally pruned " "tree.", "e"); PARAM_STRING_OUT("test_set_estimates_file", "The file in which to output the " "estimates on the test set from the final optimally pruned tree.", "E"); PARAM_STRING_OUT("vi_file", "The file to output the variable importance values " "for each feature.", "i"); // Parameters for the training algorithm. PARAM_INT_IN("folds", "The number of folds of cross-validation to perform for the " "estimation (0 is LOOCV)", "f", 10); PARAM_INT_IN("min_leaf_size", "The minimum size of a leaf in the unpruned, fully " "grown DET.", "l", 5); PARAM_INT_IN("max_leaf_size", "The maximum size of a leaf in the unpruned, fully " "grown DET.", "L", 10); /* PARAM_FLAG("volume_regularization", "This flag gives the used the option to use" "a form of regularization similar to the usual alpha-pruning in decision " "tree. But instead of regularizing on the number of leaves, you regularize " "on the sum of the inverse of the volume of the leaves (meaning you " "penalize low volume leaves.", "R"); */ int main(int argc, char *argv[]) { CLI::ParseCommandLine(argc, argv); // Validate input parameters. if (CLI::HasParam("training_file") && CLI::HasParam("input_model_file")) Log::Fatal << "Only one of --training_file (-t) or --input_model_file (-m) " << "may be specified!" << endl; if (!CLI::HasParam("training_file") && !CLI::HasParam("input_model_file")) Log::Fatal << "Neither --training_file (-t) nor --input_model_file (-m) " << "are specified!" << endl; if (!CLI::HasParam("training_file")) { if (CLI::HasParam("training_set_estimates_file")) Log::Warn << "--training_set_estimates_file (-e) ignored because " << "--training_file (-t) is not specified." << endl; if (CLI::HasParam("folds")) Log::Warn << "--folds (-f) ignored because --training_file (-t) is not " << "specified." << endl; if (CLI::HasParam("min_leaf_size")) Log::Warn << "--min_leaf_size (-l) ignored because --training_file (-t) " << "is not specified." << endl; if (CLI::HasParam("max_leaf_size")) Log::Warn << "--max_leaf_size (-L) ignored because --training_file (-t) " << "is not specified." << endl; } if (!CLI::HasParam("test_file") && CLI::HasParam("test_set_estimates_file")) Log::Warn << "--test_set_estimates_file (-E) ignored because --test_file " << "(-T) is not specified." << endl; // Are we training a DET or loading from file? DTree* tree; if (CLI::HasParam("training_file")) { const string trainSetFile = CLI::GetParam("training_file"); arma::mat trainingData; data::Load(trainSetFile, trainingData, true); // Cross-validation here. size_t folds = CLI::GetParam("folds"); if (folds == 0) { folds = trainingData.n_cols; Log::Info << "Performing leave-one-out cross validation." << endl; } else { Log::Info << "Performing " << folds << "-fold cross validation." << endl; } const bool regularization = false; // const bool regularization = CLI::HasParam("volume_regularization"); const int maxLeafSize = CLI::GetParam("max_leaf_size"); const int minLeafSize = CLI::GetParam("min_leaf_size"); // Obtain the optimal tree. Timer::Start("det_training"); tree = Trainer(trainingData, folds, regularization, maxLeafSize, minLeafSize, ""); Timer::Stop("det_training"); // Compute training set estimates, if desired. if (CLI::HasParam("training_set_estimates_file")) { // Compute density estimates for each point in the training set. arma::rowvec trainingDensities(trainingData.n_cols); Timer::Start("det_estimation_time"); for (size_t i = 0; i < trainingData.n_cols; i++) trainingDensities[i] = tree->ComputeValue(trainingData.unsafe_col(i)); Timer::Stop("det_estimation_time"); data::Save(CLI::GetParam("training_set_estimates_file"), trainingDensities); } } else { data::Load(CLI::GetParam("input_model_file"), "det_model", tree, true); } // Compute the density at the provided test points and output the density in // the given file. const string testFile = CLI::GetParam("test_file"); if (testFile != "") { arma::mat testData; data::Load(testFile, testData, true); // Compute test set densities. Timer::Start("det_test_set_estimation"); arma::rowvec testDensities(testData.n_cols); for (size_t i = 0; i < testData.n_cols; i++) testDensities[i] = tree->ComputeValue(testData.unsafe_col(i)); Timer::Stop("det_test_set_estimation"); if (CLI::GetParam("test_set_estimates_file") != "") data::Save(CLI::GetParam("test_set_estimates_file"), testDensities); } // Print variable importance. if (CLI::HasParam("vi_file")) PrintVariableImportance(tree, CLI::GetParam("vi_file")); // Save the model, if desired. if (CLI::HasParam("output_model_file")) data::Save(CLI::GetParam("output_model_file"), "det_model", tree, false); delete tree; } mlpack-2.2.5/src/mlpack/methods/det/dt_utils.cpp000066400000000000000000000241541315013601400215770ustar00rootroot00000000000000/** * @file dt_utils.cpp * @author Parikshit Ram (pram@cc.gatech.edu) * * This file implements functions to perform different tasks with the Density * Tree class. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include "dt_utils.hpp" using namespace mlpack; using namespace det; void mlpack::det::PrintLeafMembership(DTree* dtree, const arma::mat& data, const arma::Mat& labels, const size_t numClasses, const std::string leafClassMembershipFile) { // Tag the leaves with numbers. int numLeaves = dtree->TagTree(); arma::Mat table(numLeaves, (numClasses + 1)); table.zeros(); for (size_t i = 0; i < data.n_cols; i++) { const arma::vec testPoint = data.unsafe_col(i); const int leafTag = dtree->FindBucket(testPoint); const size_t label = labels[i]; table(leafTag, label) += 1; } if (leafClassMembershipFile == "") { Log::Info << "Leaf membership; row represents leaf id, column represents " << "class id; value represents number of points in leaf in class." << std::endl << table; } else { // Create a stream for the file. std::ofstream outfile(leafClassMembershipFile.c_str()); if (outfile.good()) { outfile << table; Log::Info << "Leaf membership printed to '" << leafClassMembershipFile << "'." << std::endl; } else { Log::Warn << "Can't open '" << leafClassMembershipFile << "' to write " << "leaf membership to." << std::endl; } outfile.close(); } return; } void mlpack::det::PrintVariableImportance(const DTree* dtree, const std::string viFile) { arma::vec imps; dtree->ComputeVariableImportance(imps); double max = 0.0; for (size_t i = 0; i < imps.n_elem; ++i) if (imps[i] > max) max = imps[i]; Log::Info << "Maximum variable importance: " << max << "." << std::endl; if (viFile == "") { Log::Info << "Variable importance: " << std::endl << imps.t() << std::endl; } else { std::ofstream outfile(viFile.c_str()); if (outfile.good()) { outfile << imps; Log::Info << "Variable importance printed to '" << viFile << "'." << std::endl; } else { Log::Warn << "Can't open '" << viFile << "' to write variable importance " << "to." << std::endl; } outfile.close(); } } // This function trains the optimal decision tree using the given number of // folds. DTree* mlpack::det::Trainer(arma::mat& dataset, const size_t folds, const bool useVolumeReg, const size_t maxLeafSize, const size_t minLeafSize, const std::string unprunedTreeOutput) { // Initialize the tree. DTree dtree(dataset); // Prepare to grow the tree... arma::Col oldFromNew(dataset.n_cols); for (size_t i = 0; i < oldFromNew.n_elem; i++) oldFromNew[i] = i; // Save the dataset since it would be modified while growing the tree. arma::mat newDataset(dataset); // Growing the tree double oldAlpha = 0.0; double alpha = dtree.Grow(newDataset, oldFromNew, useVolumeReg, maxLeafSize, minLeafSize); Log::Info << dtree.SubtreeLeaves() << " leaf nodes in the tree using full " << "dataset; minimum alpha: " << alpha << "." << std::endl; // Compute densities for the training points in the full tree, if we were // asked for this. if (unprunedTreeOutput != "") { std::ofstream outfile(unprunedTreeOutput.c_str()); if (outfile.good()) { for (size_t i = 0; i < dataset.n_cols; ++i) { arma::vec testPoint = dataset.unsafe_col(i); outfile << dtree.ComputeValue(testPoint) << std::endl; } } else { Log::Warn << "Can't open '" << unprunedTreeOutput << "' to write computed" << " densities to." << std::endl; } outfile.close(); } // Sequentially prune and save the alpha values and the values of c_t^2 * r_t. std::vector > prunedSequence; while (dtree.SubtreeLeaves() > 1) { std::pair treeSeq(oldAlpha, dtree.SubtreeLeavesLogNegError()); prunedSequence.push_back(treeSeq); oldAlpha = alpha; alpha = dtree.PruneAndUpdate(oldAlpha, dataset.n_cols, useVolumeReg); // Some sanity checks. It seems that on some datasets, the error does not // increase as the tree is pruned but instead stays the same---hence the // "<=" in the final assert. Log::Assert((alpha < std::numeric_limits::max()) || (dtree.SubtreeLeaves() == 1)); Log::Assert(alpha > oldAlpha); Log::Assert(dtree.SubtreeLeavesLogNegError() <= treeSeq.second); } std::pair treeSeq(oldAlpha, dtree.SubtreeLeavesLogNegError()); prunedSequence.push_back(treeSeq); Log::Info << prunedSequence.size() << " trees in the sequence; maximum alpha:" << " " << oldAlpha << "." << std::endl; arma::mat cvData(dataset); size_t testSize = dataset.n_cols / folds; arma::vec regularizationConstants(prunedSequence.size()); regularizationConstants.fill(0.0); Timer::Start("cross_validation"); // Go through each fold. On the Visual Studio compiler, we have to use // intmax_t because size_t is not yet supported by their OpenMP // implementation. #ifdef _WIN32 #pragma omp parallel for default(none) \ shared(testSize, cvData, prunedSequence, regularizationConstants, dataset) for (intmax_t fold = 0; fold < (intmax_t) folds; fold++) #else #pragma omp parallel for default(none) \ shared(testSize, cvData, prunedSequence, regularizationConstants, dataset) for (size_t fold = 0; fold < folds; fold++) #endif { // Break up data into train and test sets. size_t start = fold * testSize; size_t end = std::min((size_t) (fold + 1) * testSize, (size_t) cvData.n_cols); arma::mat test = cvData.cols(start, end - 1); arma::mat train(cvData.n_rows, cvData.n_cols - test.n_cols); if (start == 0 && end < cvData.n_cols) { train.cols(0, train.n_cols - 1) = cvData.cols(end, cvData.n_cols - 1); } else if (start > 0 && end == cvData.n_cols) { train.cols(0, train.n_cols - 1) = cvData.cols(0, start - 1); } else { train.cols(0, start - 1) = cvData.cols(0, start - 1); train.cols(start, train.n_cols - 1) = cvData.cols(end, cvData.n_cols - 1); } // Initialize the tree. DTree cvDTree(train); // Getting ready to grow the tree... arma::Col cvOldFromNew(train.n_cols); for (size_t i = 0; i < cvOldFromNew.n_elem; i++) cvOldFromNew[i] = i; // Grow the tree. cvDTree.Grow(train, cvOldFromNew, useVolumeReg, maxLeafSize, minLeafSize); // Sequentially prune with all the values of available alphas and adding // values for test values. Don't enter this loop if there are less than two // trees in the pruned sequence. arma::vec cvRegularizationConstants(prunedSequence.size()); cvRegularizationConstants.fill(0.0); for (size_t i = 0; i < ((prunedSequence.size() < 2) ? 0 : prunedSequence.size() - 2); ++i) { // Compute test values for this state of the tree. double cvVal = 0.0; for (size_t j = 0; j < test.n_cols; j++) { arma::vec testPoint = test.unsafe_col(j); cvVal += cvDTree.ComputeValue(testPoint); } // Update the cv regularization constant. cvRegularizationConstants[i] += 2.0 * cvVal / (double) dataset.n_cols; // Determine the new alpha value and prune accordingly. double cvOldAlpha = 0.5 * (prunedSequence[i + 1].first + prunedSequence[i + 2].first); cvDTree.PruneAndUpdate(cvOldAlpha, train.n_cols, useVolumeReg); } // Compute test values for this state of the tree. double cvVal = 0.0; for (size_t i = 0; i < test.n_cols; ++i) { arma::vec testPoint = test.unsafe_col(i); cvVal += cvDTree.ComputeValue(testPoint); } if (prunedSequence.size() > 2) cvRegularizationConstants[prunedSequence.size() - 2] += 2.0 * cvVal / (double) dataset.n_cols; #pragma omp critical regularizationConstants += cvRegularizationConstants; } Timer::Stop("cross_validation"); double optimalAlpha = -1.0; long double cvBestError = -std::numeric_limits::max(); for (size_t i = 0; i < prunedSequence.size() - 1; ++i) { // We can no longer work in the log-space for this because we have no // guarantee the quantity will be positive. long double thisError = -std::exp((long double) prunedSequence[i].second) + (long double) regularizationConstants[i]; if (thisError > cvBestError) { cvBestError = thisError; optimalAlpha = prunedSequence[i].first; } } Log::Info << "Optimal alpha: " << optimalAlpha << "." << std::endl; // Initialize the tree. DTree* dtreeOpt = new DTree(dataset); // Getting ready to grow the tree... for (size_t i = 0; i < oldFromNew.n_elem; i++) oldFromNew[i] = i; // Save the dataset since it would be modified while growing the tree. newDataset = dataset; // Grow the tree. oldAlpha = -DBL_MAX; alpha = dtreeOpt->Grow(newDataset, oldFromNew, useVolumeReg, maxLeafSize, minLeafSize); // Prune with optimal alpha. while ((oldAlpha < optimalAlpha) && (dtreeOpt->SubtreeLeaves() > 1)) { oldAlpha = alpha; alpha = dtreeOpt->PruneAndUpdate(oldAlpha, newDataset.n_cols, useVolumeReg); // Some sanity checks. Log::Assert((alpha < std::numeric_limits::max()) || (dtreeOpt->SubtreeLeaves() == 1)); Log::Assert(alpha > oldAlpha); } Log::Info << dtreeOpt->SubtreeLeaves() << " leaf nodes in the optimally " << "pruned tree; optimal alpha: " << oldAlpha << "." << std::endl; return dtreeOpt; } mlpack-2.2.5/src/mlpack/methods/det/dt_utils.hpp000066400000000000000000000052541315013601400216040ustar00rootroot00000000000000/** * @file dt_utils.hpp * @author Parikshit Ram (pram@cc.gatech.edu) * * This file implements functions to perform different tasks with the Density * Tree class. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_DET_DT_UTILS_HPP #define MLPACK_METHODS_DET_DT_UTILS_HPP #include #include "dtree.hpp" namespace mlpack { namespace det { /** * Print the membership of leaves of a density estimation tree given the labels * and number of classes. Optionally, pass the name of a file to print this * information to (otherwise stdout is used). * * @param dtree Tree to print membership of. * @param data Dataset tree is built upon. * @param labels Class labels of dataset. * @param numClasses Number of classes in dataset. * @param leafClassMembershipFile Name of file to print to (optional). */ void PrintLeafMembership(DTree* dtree, const arma::mat& data, const arma::Mat& labels, const size_t numClasses, const std::string leafClassMembershipFile = ""); /** * Print the variable importance of each dimension of a density estimation tree. * Optionally, pass the name of a file to print this information to (otherwise * stdout is used). * * @param dtree Density tree to use. * @param viFile Name of file to print to (optional). */ void PrintVariableImportance(const DTree* dtree, const std::string viFile = ""); /** * Train the optimal decision tree using cross-validation with the given number * of folds. Optionally, give a filename to print the unpruned tree to. This * initializes a tree on the heap, so you are responsible for deleting it. * * @param dataset Dataset for the tree to use. * @param folds Number of folds to use for cross-validation. * @param useVolumeReg If true, use volume regularization. * @param maxLeafSize Maximum number of points allowed in a leaf. * @param minLeafSize Minimum number of points allowed in a leaf. * @param unprunedTreeOutput Filename to print unpruned tree to (optional). */ DTree* Trainer(arma::mat& dataset, const size_t folds, const bool useVolumeReg = false, const size_t maxLeafSize = 10, const size_t minLeafSize = 5, const std::string unprunedTreeOutput = ""); } // namespace det } // namespace mlpack #endif // MLPACK_METHODS_DET_DT_UTILS_HPP mlpack-2.2.5/src/mlpack/methods/det/dtree.cpp000066400000000000000000000510761315013601400210560ustar00rootroot00000000000000 /** * @file dtree.cpp * @author Parikshit Ram (pram@cc.gatech.edu) * * Implementations of some declared functions in * the Density Estimation Tree class. * * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include "dtree.hpp" #include using namespace mlpack; using namespace det; DTree::DTree() : start(0), end(0), splitDim(size_t(-1)), splitValue(DBL_MAX), logNegError(-DBL_MAX), subtreeLeavesLogNegError(-DBL_MAX), subtreeLeaves(0), root(true), ratio(1.0), logVolume(-DBL_MAX), bucketTag(-1), alphaUpper(0.0), left(NULL), right(NULL) { /* Nothing to do. */ } // Root node initializers DTree::DTree(const arma::vec& maxVals, const arma::vec& minVals, const size_t totalPoints) : start(0), end(totalPoints), maxVals(maxVals), minVals(minVals), splitDim(size_t(-1)), splitValue(DBL_MAX), logNegError(LogNegativeError(totalPoints)), subtreeLeavesLogNegError(-DBL_MAX), subtreeLeaves(0), root(true), ratio(1.0), logVolume(-DBL_MAX), bucketTag(-1), alphaUpper(0.0), left(NULL), right(NULL) { /* Nothing to do. */ } DTree::DTree(arma::mat& data) : start(0), end(data.n_cols), splitDim(size_t(-1)), splitValue(DBL_MAX), subtreeLeavesLogNegError(-DBL_MAX), subtreeLeaves(0), root(true), ratio(1.0), logVolume(-DBL_MAX), bucketTag(-1), alphaUpper(0.0), left(NULL), right(NULL) { // Initialize to first column; values will be overwritten if necessary. maxVals = data.col(0); minVals = data.col(0); // Loop over data to extract maximum and minimum values in each dimension. for (size_t i = 1; i < data.n_cols; ++i) { for (size_t j = 0; j < data.n_rows; ++j) { if (data(j, i) > maxVals[j]) maxVals[j] = data(j, i); if (data(j, i) < minVals[j]) minVals[j] = data(j, i); } } logNegError = LogNegativeError(data.n_cols); } // Non-root node initializers DTree::DTree(const arma::vec& maxVals, const arma::vec& minVals, const size_t start, const size_t end, const double logNegError) : start(start), end(end), maxVals(maxVals), minVals(minVals), splitDim(size_t(-1)), splitValue(DBL_MAX), logNegError(logNegError), subtreeLeavesLogNegError(-DBL_MAX), subtreeLeaves(0), root(false), ratio(1.0), logVolume(-DBL_MAX), bucketTag(-1), alphaUpper(0.0), left(NULL), right(NULL) { /* Nothing to do. */ } DTree::DTree(const arma::vec& maxVals, const arma::vec& minVals, const size_t totalPoints, const size_t start, const size_t end) : start(start), end(end), maxVals(maxVals), minVals(minVals), splitDim(size_t(-1)), splitValue(DBL_MAX), logNegError(LogNegativeError(totalPoints)), subtreeLeavesLogNegError(-DBL_MAX), subtreeLeaves(0), root(false), ratio(1.0), logVolume(-DBL_MAX), bucketTag(-1), alphaUpper(0.0), left(NULL), right(NULL) { /* Nothing to do. */ } DTree::~DTree() { delete left; delete right; } // This function computes the log-l2-negative-error of a given node from the // formula R(t) = log(|t|^2 / (N^2 V_t)). double DTree::LogNegativeError(const size_t totalPoints) const { // log(-|t|^2 / (N^2 V_t)) = log(-1) + 2 log(|t|) - 2 log(N) - log(V_t). double err = 2 * std::log((double) (end - start)) - 2 * std::log((double) totalPoints); arma::vec valDiffs = maxVals - minVals; for (size_t i = 0; i < maxVals.n_elem; ++i) { // Ignore very small dimensions to prevent overflow. if (valDiffs[i] > 1e-50) err -= std::log(valDiffs[i]); } return err; } // This function finds the best split with respect to the L2-error, by trying // all possible splits. The dataset is the full data set but the start and // end are used to obtain the point in this node. bool DTree::FindSplit(const arma::mat& data, size_t& splitDim, double& splitValue, double& leftError, double& rightError, const size_t minLeafSize) const { // Ensure the dimensionality of the data is the same as the dimensionality of // the bounding rectangle. assert(data.n_rows == maxVals.n_elem); assert(data.n_rows == minVals.n_elem); const size_t points = end - start; double minError = logNegError; bool splitFound = false; // Loop through each dimension. for (size_t dim = 0; dim < maxVals.n_elem; dim++) { // Have to deal with REAL, INTEGER, NOMINAL data differently, so we have to // think of how to do that... const double min = minVals[dim]; const double max = maxVals[dim]; // If there is nothing to split in this dimension, move on. if (max - min == 0.0) continue; // Skip to next dimension. // Initializing all the stuff for this dimension. bool dimSplitFound = false; // Take an error estimate for this dimension. double minDimError = std::pow(points, 2.0) / (max - min); double dimLeftError = 0.0; // For -Wuninitialized. These variables will double dimRightError = 0.0; // always be set to something else before use. double dimSplitValue = 0.0; // Find the log volume of all the other dimensions. double volumeWithoutDim = logVolume - std::log(max - min); // Get the values for the dimension. arma::rowvec dimVec = data.row(dim).subvec(start, end - 1); // Sort the values in ascending order. dimVec = arma::sort(dimVec); // Find the best split for this dimension. We need to figure out why // there are spikes if this minLeafSize is enforced here... for (size_t i = minLeafSize - 1; i < dimVec.n_elem - minLeafSize; ++i) { // This makes sense for real continuous data. This kinda corrupts the // data and estimation if the data is ordinal. const double split = (dimVec[i] + dimVec[i + 1]) / 2.0; if (split == dimVec[i]) continue; // We can't split here (two points are the same). // Another way of picking split is using this: // split = leftsplit; if ((split - min > 0.0) && (max - split > 0.0)) { // Ensure that the right node will have at least the minimum number of // points. Log::Assert((points - i - 1) >= minLeafSize); // Now we have to see if the error will be reduced. Simple manipulation // of the error function gives us the condition we must satisfy: // |t_l|^2 / V_l + |t_r|^2 / V_r >= |t|^2 / (V_l + V_r) // and because the volume is only dependent on the dimension we are // splitting, we can assume V_l is just the range of the left and V_r is // just the range of the right. double negLeftError = std::pow(i + 1, 2.0) / (split - min); double negRightError = std::pow(points - i - 1, 2.0) / (max - split); // If this is better, take it. if ((negLeftError + negRightError) >= minDimError) { minDimError = negLeftError + negRightError; dimLeftError = negLeftError; dimRightError = negRightError; dimSplitValue = split; dimSplitFound = true; } } } double actualMinDimError = std::log(minDimError) - 2 * std::log((double) data.n_cols) - volumeWithoutDim; if ((actualMinDimError > minError) && dimSplitFound) { // Calculate actual error (in logspace) by adding terms back to our // estimate. minError = actualMinDimError; splitDim = dim; splitValue = dimSplitValue; leftError = std::log(dimLeftError) - 2 * std::log((double) data.n_cols) - volumeWithoutDim; rightError = std::log(dimRightError) - 2 * std::log((double) data.n_cols) - volumeWithoutDim; splitFound = true; } // end if better split found in this dimension. } return splitFound; } size_t DTree::SplitData(arma::mat& data, const size_t splitDim, const double splitValue, arma::Col& oldFromNew) const { // Swap all columns such that any columns with value in dimension splitDim // less than or equal to splitValue are on the left side, and all others are // on the right side. A similar sort to this is also performed in // BinarySpaceTree construction (its comments are more detailed). size_t left = start; size_t right = end - 1; for (;;) { while (data(splitDim, left) <= splitValue) ++left; while (data(splitDim, right) > splitValue) --right; if (left > right) break; data.swap_cols(left, right); // Store the mapping from old to new. const size_t tmp = oldFromNew[left]; oldFromNew[left] = oldFromNew[right]; oldFromNew[right] = tmp; } // This now refers to the first index of the "right" side. return left; } // Greedily expand the tree double DTree::Grow(arma::mat& data, arma::Col& oldFromNew, const bool useVolReg, const size_t maxLeafSize, const size_t minLeafSize) { Log::Assert(data.n_rows == maxVals.n_elem); Log::Assert(data.n_rows == minVals.n_elem); double leftG, rightG; // Compute points ratio. ratio = (double) (end - start) / (double) oldFromNew.n_elem; // Compute the log of the volume of the node. logVolume = 0; for (size_t i = 0; i < maxVals.n_elem; ++i) if (maxVals[i] - minVals[i] > 0.0) logVolume += std::log(maxVals[i] - minVals[i]); // Check if node is large enough to split. if ((size_t) (end - start) > maxLeafSize) { // Find the split. size_t dim; double splitValueTmp; double leftError, rightError; if (FindSplit(data, dim, splitValueTmp, leftError, rightError, minLeafSize)) { // Move the data around for the children to have points in a node lie // contiguously (to increase efficiency during the training). const size_t splitIndex = SplitData(data, dim, splitValueTmp, oldFromNew); // Make max and min vals for the children. arma::vec maxValsL(maxVals); arma::vec maxValsR(maxVals); arma::vec minValsL(minVals); arma::vec minValsR(minVals); maxValsL[dim] = splitValueTmp; minValsR[dim] = splitValueTmp; // Store split dim and split val in the node. splitValue = splitValueTmp; splitDim = dim; // Recursively grow the children. left = new DTree(maxValsL, minValsL, start, splitIndex, leftError); right = new DTree(maxValsR, minValsR, splitIndex, end, rightError); leftG = left->Grow(data, oldFromNew, useVolReg, maxLeafSize, minLeafSize); rightG = right->Grow(data, oldFromNew, useVolReg, maxLeafSize, minLeafSize); // Store values of R(T~) and |T~|. subtreeLeaves = left->SubtreeLeaves() + right->SubtreeLeaves(); // Find the log negative error of the subtree leaves. This is kind of an // odd one because we don't want to represent the error in non-log-space, // but we have to calculate log(E_l + E_r). So we multiply E_l and E_r by // V_t (remember E_l has an inverse relationship to the volume of the // nodes) and then subtract log(V_t) at the end of the whole expression. // As a result we do leave log-space, but the largest quantity we // represent is on the order of (V_t / V_i) where V_i is the smallest leaf // node below this node, which depends heavily on the depth of the tree. subtreeLeavesLogNegError = std::log( std::exp(logVolume + left->SubtreeLeavesLogNegError()) + std::exp(logVolume + right->SubtreeLeavesLogNegError())) - logVolume; } else { // No split found so make a leaf out of it. subtreeLeaves = 1; subtreeLeavesLogNegError = logNegError; } } else { // We can make this a leaf node. assert((size_t) (end - start) >= minLeafSize); subtreeLeaves = 1; subtreeLeavesLogNegError = logNegError; } // If this is a leaf, do not compute g_k(t); otherwise compute, store, and // propagate min(g_k(t_L), g_k(t_R), g_k(t)), unless t_L and/or t_R are // leaves. if (subtreeLeaves == 1) { return std::numeric_limits::max(); } else { const double range = maxVals[splitDim] - minVals[splitDim]; const double leftRatio = (splitValue - minVals[splitDim]) / range; const double rightRatio = (maxVals[splitDim] - splitValue) / range; const size_t leftPow = std::pow((double) (left->End() - left->Start()), 2); const size_t rightPow = std::pow((double) (right->End() - right->Start()), 2); const size_t thisPow = std::pow((double) (end - start), 2); double tmpAlphaSum = leftPow / leftRatio + rightPow / rightRatio - thisPow; if (left->SubtreeLeaves() > 1) { const double exponent = 2 * std::log((double) data.n_cols) + logVolume + left->AlphaUpper(); // Whether or not this will overflow is highly dependent on the depth of // the tree. tmpAlphaSum += std::exp(exponent); } if (right->SubtreeLeaves() > 1) { const double exponent = 2 * std::log((double) data.n_cols) + logVolume + right->AlphaUpper(); tmpAlphaSum += std::exp(exponent); } alphaUpper = std::log(tmpAlphaSum) - 2 * std::log((double) data.n_cols) - logVolume; double gT; if (useVolReg) { // This is wrong for now! gT = alphaUpper;// / (subtreeLeavesVTInv - vTInv); } else { gT = alphaUpper - std::log((double) (subtreeLeaves - 1)); } return std::min(gT, std::min(leftG, rightG)); } // We need to compute (c_t^2) * r_t for all subtree leaves; this is equal to // n_t ^ 2 / r_t * n ^ 2 = -error. Therefore the value we need is actually // -1.0 * subtreeLeavesError. } double DTree::PruneAndUpdate(const double oldAlpha, const size_t points, const bool useVolReg) { // Compute gT. if (subtreeLeaves == 1) // If we are a leaf... { return std::numeric_limits::max(); } else { // Compute gT value for node t. volatile double gT; if (useVolReg) gT = alphaUpper;// - std::log(subtreeLeavesVTInv - vTInv); else gT = alphaUpper - std::log((double) (subtreeLeaves - 1)); if (gT > oldAlpha) { // Go down the tree and update accordingly. Traverse the children. double leftG = left->PruneAndUpdate(oldAlpha, points, useVolReg); double rightG = right->PruneAndUpdate(oldAlpha, points, useVolReg); // Update values. subtreeLeaves = left->SubtreeLeaves() + right->SubtreeLeaves(); // Find the log negative error of the subtree leaves. This is kind of an // odd one because we don't want to represent the error in non-log-space, // but we have to calculate log(E_l + E_r). So we multiply E_l and E_r by // V_t (remember E_l has an inverse relationship to the volume of the // nodes) and then subtract log(V_t) at the end of the whole expression. // As a result we do leave log-space, but the largest quantity we // represent is on the order of (V_t / V_i) where V_i is the smallest leaf // node below this node, which depends heavily on the depth of the tree. subtreeLeavesLogNegError = std::log( std::exp(logVolume + left->SubtreeLeavesLogNegError()) + std::exp(logVolume + right->SubtreeLeavesLogNegError())) - logVolume; // Recalculate upper alpha. const double range = maxVals[splitDim] - minVals[splitDim]; const double leftRatio = (splitValue - minVals[splitDim]) / range; const double rightRatio = (maxVals[splitDim] - splitValue) / range; const size_t leftPow = std::pow((double) (left->End() - left->Start()), 2); const size_t rightPow = std::pow((double) (right->End() - right->Start()), 2); const size_t thisPow = std::pow((double) (end - start), 2); double tmpAlphaSum = leftPow / leftRatio + rightPow / rightRatio - thisPow; if (left->SubtreeLeaves() > 1) { const double exponent = 2 * std::log((double) points) + logVolume + left->AlphaUpper(); // Whether or not this will overflow is highly dependent on the depth of // the tree. tmpAlphaSum += std::exp(exponent); } if (right->SubtreeLeaves() > 1) { const double exponent = 2 * std::log((double) points) + logVolume + right->AlphaUpper(); tmpAlphaSum += std::exp(exponent); } alphaUpper = std::log(tmpAlphaSum) - 2 * std::log((double) points) - logVolume; // Update gT value. if (useVolReg) { // This is incorrect. gT = alphaUpper; // / (subtreeLeavesVTInv - vTInv); } else { gT = alphaUpper - std::log((double) (subtreeLeaves - 1)); } Log::Assert(gT < std::numeric_limits::max()); return std::min((double) gT, std::min(leftG, rightG)); } else { // Prune this subtree. // First, make this node a leaf node. subtreeLeaves = 1; subtreeLeavesLogNegError = logNegError; delete left; delete right; left = NULL; right = NULL; // Pass information upward. return std::numeric_limits::max(); } } } // Check whether a given point is within the bounding box of this node (check // generally done at the root, so its the bounding box of the data). // // Future improvement: Open up the range with epsilons on both sides where // epsilon depends on the density near the boundary. bool DTree::WithinRange(const arma::vec& query) const { for (size_t i = 0; i < query.n_elem; ++i) if ((query[i] < minVals[i]) || (query[i] > maxVals[i])) return false; return true; } double DTree::ComputeValue(const arma::vec& query) const { Log::Assert(query.n_elem == maxVals.n_elem); if (root == 1) // If we are the root... { // Check if the query is within range. if (!WithinRange(query)) return 0.0; } if (subtreeLeaves == 1) // If we are a leaf... { return std::exp(std::log(ratio) - logVolume); } else { if (query[splitDim] <= splitValue) { // If left subtree, go to left child. return left->ComputeValue(query); } else // If right subtree, go to right child { return right->ComputeValue(query); } } return 0.0; } void DTree::WriteTree(FILE *fp, const size_t level) const { if (subtreeLeaves > 1) { fprintf(fp, "\n"); for (size_t i = 0; i < level; ++i) fprintf(fp, "|\t"); fprintf(fp, "Var. %zu > %lg", splitDim, splitValue); right->WriteTree(fp, level + 1); fprintf(fp, "\n"); for (size_t i = 0; i < level; ++i) fprintf(fp, "|\t"); fprintf(fp, "Var. %zu <= %lg ", splitDim, splitValue); left->WriteTree(fp, level); } else // If we are a leaf... { fprintf(fp, ": f(x)=%lg", std::exp(std::log(ratio) - logVolume)); if (bucketTag != -1) fprintf(fp, " BT:%d", bucketTag); } } // Index the buckets for possible usage later. int DTree::TagTree(const int tag) { if (subtreeLeaves == 1) { // Only label leaves. bucketTag = tag; return (tag + 1); } else { return right->TagTree(left->TagTree(tag)); } } int DTree::FindBucket(const arma::vec& query) const { Log::Assert(query.n_elem == maxVals.n_elem); if (subtreeLeaves == 1) // If we are a leaf... { return bucketTag; } else if (query[splitDim] <= splitValue) { // If left subtree, go to left child. return left->FindBucket(query); } else { // If right subtree, go to right child. return right->FindBucket(query); } } void DTree::ComputeVariableImportance(arma::vec& importances) const { // Clear and set to right size. importances.zeros(maxVals.n_elem); std::stack nodes; nodes.push(this); while(!nodes.empty()) { const DTree& curNode = *nodes.top(); nodes.pop(); if (curNode.subtreeLeaves == 1) continue; // Do nothing for leaves. // The way to do this entirely in log-space is (at this time) somewhat // unclear. So this risks overflow. importances[curNode.SplitDim()] += (-std::exp(curNode.LogNegError()) - (-std::exp(curNode.Left()->LogNegError()) + -std::exp(curNode.Right()->LogNegError()))); nodes.push(curNode.Left()); nodes.push(curNode.Right()); } } mlpack-2.2.5/src/mlpack/methods/det/dtree.hpp000066400000000000000000000257721315013601400210670ustar00rootroot00000000000000/** * @file dtree.hpp * @author Parikshit Ram (pram@cc.gatech.edu) * * Density Estimation Tree class * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_DET_DTREE_HPP #define MLPACK_METHODS_DET_DTREE_HPP #include namespace mlpack { namespace det /** Density Estimation Trees */ { /** * A density estimation tree is similar to both a decision tree and a space * partitioning tree (like a kd-tree). Each leaf represents a constant-density * hyper-rectangle. The tree is constructed in such a way as to minimize the * integrated square error between the probability distribution of the tree and * the observed probability distribution of the data. Because the tree is * similar to a decision tree, the density estimation tree can provide very fast * density estimates for a given point. * * For more information, see the following paper: * * @code * @incollection{ram2011, * author = {Ram, Parikshit and Gray, Alexander G.}, * title = {Density estimation trees}, * booktitle = {{Proceedings of the 17th ACM SIGKDD International Conference * on Knowledge Discovery and Data Mining}}, * series = {KDD '11}, * year = {2011}, * pages = {627--635} * } * @endcode */ class DTree { public: /** * Create an empty density estimation tree. */ DTree(); /** * Create a density estimation tree with the given bounds and the given number * of total points. Children will not be created. * * @param maxVals Maximum values of the bounding box. * @param minVals Minimum values of the bounding box. * @param totalPoints Total number of points in the dataset. */ DTree(const arma::vec& maxVals, const arma::vec& minVals, const size_t totalPoints); /** * Create a density estimation tree on the given data. Children will be * created following the procedure outlined in the paper. The data will be * modified; it will be reordered similar to the way BinarySpaceTree modifies * datasets. * * @param data Dataset to build tree on. */ DTree(arma::mat& data); /** * Create a child node of a density estimation tree given the bounding box * specified by maxVals and minVals, using the size given in start and end and * the specified error. Children of this node will not be created * recursively. * * @param maxVals Upper bound of bounding box. * @param minVals Lower bound of bounding box. * @param start Start of points represented by this node in the data matrix. * @param end End of points represented by this node in the data matrix. * @param error log-negative error of this node. */ DTree(const arma::vec& maxVals, const arma::vec& minVals, const size_t start, const size_t end, const double logNegError); /** * Create a child node of a density estimation tree given the bounding box * specified by maxVals and minVals, using the size given in start and end, * and calculating the error with the total number of points given. Children * of this node will not be created recursively. * * @param maxVals Upper bound of bounding box. * @param minVals Lower bound of bounding box. * @param start Start of points represented by this node in the data matrix. * @param end End of points represented by this node in the data matrix. */ DTree(const arma::vec& maxVals, const arma::vec& minVals, const size_t totalPoints, const size_t start, const size_t end); //! Clean up memory allocated by the tree. ~DTree(); /** * Greedily expand the tree. The points in the dataset will be reordered * during tree growth. * * @param data Dataset to build tree on. * @param oldFromNew Mappings from old points to new points. * @param useVolReg If true, volume regularization is used. * @param maxLeafSize Maximum size of a leaf. * @param minLeafSize Minimum size of a leaf. */ double Grow(arma::mat& data, arma::Col& oldFromNew, const bool useVolReg = false, const size_t maxLeafSize = 10, const size_t minLeafSize = 5); /** * Perform alpha pruning on a tree. Returns the new value of alpha. * * @param oldAlpha Old value of alpha. * @param points Total number of points in dataset. * @param useVolReg If true, volume regularization is used. * @return New value of alpha. */ double PruneAndUpdate(const double oldAlpha, const size_t points, const bool useVolReg = false); /** * Compute the logarithm of the density estimate of a given query point. * * @param query Point to estimate density of. */ double ComputeValue(const arma::vec& query) const; /** * Print the tree in a depth-first manner (this function is called * recursively). * * @param fp File to write the tree to. * @param level Level of the tree (should start at 0). */ void WriteTree(FILE *fp, const size_t level = 0) const; /** * Index the buckets for possible usage later; this results in every leaf in * the tree having a specific tag (accessible with BucketTag()). This * function calls itself recursively. * * @param tag Tag for the next leaf; leave at 0 for the initial call. */ int TagTree(const int tag = 0); /** * Return the tag of the leaf containing the query. This is useful for * generating class memberships. * * @param query Query to search for. */ int FindBucket(const arma::vec& query) const; /** * Compute the variable importance of each dimension in the learned tree. * * @param importances Vector to store the calculated importances in. */ void ComputeVariableImportance(arma::vec& importances) const; /** * Compute the log-negative-error for this point, given the total number of * points in the dataset. * * @param totalPoints Total number of points in the dataset. */ double LogNegativeError(const size_t totalPoints) const; /** * Return whether a query point is within the range of this node. */ bool WithinRange(const arma::vec& query) const; private: // The indices in the complete set of points // (after all forms of swapping in the original data // matrix to align all the points in a node // consecutively in the matrix. The 'old_from_new' array // maps the points back to their original indices. //! The index of the first point in the dataset contained in this node (and //! its children). size_t start; //! The index of the last point in the dataset contained in this node (and its //! children). size_t end; //! Upper half of bounding box for this node. arma::vec maxVals; //! Lower half of bounding box for this node. arma::vec minVals; //! The splitting dimension for this node. size_t splitDim; //! The split value on the splitting dimension for this node. double splitValue; //! log-negative-L2-error of the node. double logNegError; //! Sum of the error of the leaves of the subtree. double subtreeLeavesLogNegError; //! Number of leaves of the subtree. size_t subtreeLeaves; //! If true, this node is the root of the tree. bool root; //! Ratio of the number of points in the node to the total number of points. double ratio; //! The logarithm of the volume of the node. double logVolume; //! The tag for the leaf, used for hashing points. int bucketTag; //! Upper part of alpha sum; used for pruning. double alphaUpper; //! The left child. DTree* left; //! The right child. DTree* right; public: //! Return the starting index of points contained in this node. size_t Start() const { return start; } //! Return the first index of a point not contained in this node. size_t End() const { return end; } //! Return the split dimension of this node. size_t SplitDim() const { return splitDim; } //! Return the split value of this node. double SplitValue() const { return splitValue; } //! Return the log negative error of this node. double LogNegError() const { return logNegError; } //! Return the log negative error of all descendants of this node. double SubtreeLeavesLogNegError() const { return subtreeLeavesLogNegError; } //! Return the number of leaves which are descendants of this node. size_t SubtreeLeaves() const { return subtreeLeaves; } //! Return the ratio of points in this node to the points in the whole //! dataset. double Ratio() const { return ratio; } //! Return the inverse of the volume of this node. double LogVolume() const { return logVolume; } //! Return the left child. DTree* Left() const { return left; } //! Return the right child. DTree* Right() const { return right; } //! Return whether or not this is the root of the tree. bool Root() const { return root; } //! Return the upper part of the alpha sum. double AlphaUpper() const { return alphaUpper; } //! Return the maximum values. const arma::vec& MaxVals() const { return maxVals; } //! Modify the maximum values. arma::vec& MaxVals() { return maxVals; } //! Return the minimum values. const arma::vec& MinVals() const { return minVals; } //! Modify the minimum values. arma::vec& MinVals() { return minVals; } /** * Serialize the density estimation tree. */ template void Serialize(Archive& ar, const unsigned int /* version */) { using data::CreateNVP; ar & CreateNVP(start, "start"); ar & CreateNVP(end, "end"); ar & CreateNVP(maxVals, "maxVals"); ar & CreateNVP(minVals, "minVals"); ar & CreateNVP(splitDim, "splitDim"); ar & CreateNVP(splitValue, "splitValue"); ar & CreateNVP(logNegError, "logNegError"); ar & CreateNVP(subtreeLeavesLogNegError, "subtreeLeavesLogNegError"); ar & CreateNVP(subtreeLeaves, "subtreeLeaves"); ar & CreateNVP(root, "root"); ar & CreateNVP(ratio, "ratio"); ar & CreateNVP(logVolume, "logVolume"); ar & CreateNVP(bucketTag, "bucketTag"); ar & CreateNVP(alphaUpper, "alphaUpper"); if (Archive::is_loading::value) { if (left) delete left; if (right) delete right; } ar & CreateNVP(left, "left"); ar & CreateNVP(right, "right"); } private: // Utility methods. /** * Find the dimension to split on. */ bool FindSplit(const arma::mat& data, size_t& splitDim, double& splitValue, double& leftError, double& rightError, const size_t minLeafSize = 5) const; /** * Split the data, returning the number of points left of the split. */ size_t SplitData(arma::mat& data, const size_t splitDim, const double splitValue, arma::Col& oldFromNew) const; }; } // namespace det } // namespace mlpack #endif // MLPACK_METHODS_DET_DTREE_HPP mlpack-2.2.5/src/mlpack/methods/emst/000077500000000000000000000000001315013601400174325ustar00rootroot00000000000000mlpack-2.2.5/src/mlpack/methods/emst/CMakeLists.txt000066400000000000000000000011031315013601400221650ustar00rootroot00000000000000# Define the files we need to compile # Anything not in this list will not be compiled into mlpack. set(SOURCES # union_find union_find.hpp # dtb dtb.hpp dtb_impl.hpp dtb_rules.hpp dtb_rules_impl.hpp dtb_stat.hpp edge_pair.hpp ) # Add directory name to sources. set(DIR_SRCS) foreach(file ${SOURCES}) set(DIR_SRCS ${DIR_SRCS} ${CMAKE_CURRENT_SOURCE_DIR}/${file}) endforeach() # Append sources (with directory name) to list of all mlpack sources (used at # the parent scope). set(MLPACK_SRCS ${MLPACK_SRCS} ${DIR_SRCS} PARENT_SCOPE) add_cli_executable(emst) mlpack-2.2.5/src/mlpack/methods/emst/dtb.hpp000066400000000000000000000146311315013601400207210ustar00rootroot00000000000000/** * @file dtb.hpp * @author Bill March (march@gatech.edu) * * Contains an implementation of the DualTreeBoruvka algorithm for finding a * Euclidean Minimum Spanning Tree using the kd-tree data structure. * * @code * @inproceedings{ * author = {March, W.B., Ram, P., and Gray, A.G.}, * title = {{Fast Euclidean Minimum Spanning Tree: Algorithm, Analysis, * Applications.}}, * booktitle = {Proceedings of the 16th ACM SIGKDD International Conference * on Knowledge Discovery and Data Mining} * series = {KDD 2010}, * year = {2010} * } * @endcode * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_EMST_DTB_HPP #define MLPACK_METHODS_EMST_DTB_HPP #include "dtb_stat.hpp" #include "edge_pair.hpp" #include #include #include namespace mlpack { namespace emst /** Euclidean Minimum Spanning Trees. */ { /** * Performs the MST calculation using the Dual-Tree Boruvka algorithm, using any * type of tree. * * For more information on the algorithm, see the following citation: * * @code * @inproceedings{ * author = {March, W.B., Ram, P., and Gray, A.G.}, * title = {{Fast Euclidean Minimum Spanning Tree: Algorithm, Analysis, * Applications.}}, * booktitle = {Proceedings of the 16th ACM SIGKDD International Conference * on Knowledge Discovery and Data Mining} * series = {KDD 2010}, * year = {2010} * } * @endcode * * General usage of this class might be like this: * * @code * extern arma::mat data; // We want to find the MST of this dataset. * DualTreeBoruvka<> dtb(data); // Create the tree with default options. * * // Find the MST. * arma::mat mstResults; * dtb.ComputeMST(mstResults); * @endcode * * More advanced usage of the class can use different types of trees, pass in an * already-built tree, or compute the MST using the O(n^2) naive algorithm. * * @tparam MetricType The metric to use. * @tparam MatType The type of data matrix to use. * @tparam TreeType Type of tree to use. This should follow the TreeType policy * API. */ template< typename MetricType = metric::EuclideanDistance, typename MatType = arma::mat, template class TreeType = tree::KDTree > class DualTreeBoruvka { public: //! Convenience typedef. typedef TreeType Tree; private: //! Permutations of points during tree building. std::vector oldFromNew; //! Pointer to the root of the tree. Tree* tree; //! Reference to the data (this is what should be used for accessing data). const MatType& data; //! Indicates whether or not we "own" the tree. bool ownTree; //! Indicates whether or not O(n^2) naive mode will be used. bool naive; //! Edges. std::vector edges; // We must use vector with non-numerical types. //! Connections. UnionFind connections; //! List of edge nodes. arma::Col neighborsInComponent; //! List of edge nodes. arma::Col neighborsOutComponent; //! List of edge distances. arma::vec neighborsDistances; //! Total distance of the tree. double totalDist; //! The instantiated metric. MetricType metric; //! For sorting the edge list after the computation. struct SortEdgesHelper { bool operator()(const EdgePair& pairA, const EdgePair& pairB) { return (pairA.Distance() < pairB.Distance()); } } SortFun; public: /** * Create the tree from the given dataset. This copies the dataset to an * internal copy, because tree-building modifies the dataset. * * @param data Dataset to build a tree for. * @param naive Whether the computation should be done in O(n^2) naive mode. * @param metric An optional instantiated metric to use. */ DualTreeBoruvka(const MatType& dataset, const bool naive = false, const MetricType metric = MetricType()); /** * Create the DualTreeBoruvka object with an already initialized tree. This * will not copy the dataset, and can save a little processing power. Naive * mode is not available as an option for this constructor; instead, to run * naive computation, construct a tree with all the points in one leaf (i.e. * leafSize = number of points). * * @note * Because tree-building (at least with BinarySpaceTree) modifies the ordering * of a matrix, be sure you pass the modified matrix to this object! In * addition, mapping the points of the matrix back to their original indices * is not done when this constructor is used. * @endnote * * @param tree Pre-built tree. * @param metric An optional instantiated metric to use. */ DualTreeBoruvka(Tree* tree, const MetricType metric = MetricType()); /** * Delete the tree, if it was created inside the object. */ ~DualTreeBoruvka(); /** * Iteratively find the nearest neighbor of each component until the MST is * complete. The results will be a 3xN matrix (with N equal to the number of * edges in the minimum spanning tree). The first row will contain the lesser * index of the edge; the second row will contain the greater index of the * edge; and the third row will contain the distance between the two edges. * * @param results Matrix which results will be stored in. */ void ComputeMST(arma::mat& results); private: /** * Adds a single edge to the edge list */ void AddEdge(const size_t e1, const size_t e2, const double distance); /** * Adds all the edges found in one iteration to the list of neighbors. */ void AddAllEdges(); /** * Unpermute the edge list and output it to results. */ void EmitResults(arma::mat& results); /** * This function resets the values in the nodes of the tree nearest neighbor * distance, and checks for fully connected nodes. */ void CleanupHelper(Tree* tree); /** * The values stored in the tree must be reset on each iteration. */ void Cleanup(); }; // class DualTreeBoruvka } // namespace emst } // namespace mlpack #include "dtb_impl.hpp" #endif // MLPACK_METHODS_EMST_DTB_HPP mlpack-2.2.5/src/mlpack/methods/emst/dtb_impl.hpp000066400000000000000000000223041315013601400217360ustar00rootroot00000000000000/** * @file dtb_impl.hpp * @author Bill March (march@gatech.edu) * * Implementation of DTB. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_EMST_DTB_IMPL_HPP #define MLPACK_METHODS_EMST_DTB_IMPL_HPP #include "dtb_rules.hpp" namespace mlpack { namespace emst { //! Call the tree constructor that does mapping. template TreeType* BuildTree( MatType& dataset, std::vector& oldFromNew, typename boost::enable_if_c< tree::TreeTraits::RearrangesDataset == true, TreeType* >::type = 0) { return new TreeType(dataset, oldFromNew); } //! Call the tree constructor that does not do mapping. template TreeType* BuildTree( const MatType& dataset, const std::vector& /* oldFromNew */, const typename boost::enable_if_c< tree::TreeTraits::RearrangesDataset == false, TreeType* >::type = 0) { return new TreeType(dataset); } /** * Takes in a reference to the data set. Copies the data, builds the tree, * and initializes all of the member variables. */ template< typename MetricType, typename MatType, template class TreeType> DualTreeBoruvka::DualTreeBoruvka( const MatType& dataset, const bool naive, const MetricType metric) : tree(naive ? NULL : BuildTree(const_cast(dataset), oldFromNew)), data(naive ? dataset : tree->Dataset()), ownTree(!naive), naive(naive), connections(dataset.n_cols), totalDist(0.0), metric(metric) { edges.reserve(data.n_cols - 1); // Set size. neighborsInComponent.set_size(data.n_cols); neighborsOutComponent.set_size(data.n_cols); neighborsDistances.set_size(data.n_cols); neighborsDistances.fill(DBL_MAX); } template< typename MetricType, typename MatType, template class TreeType> DualTreeBoruvka::DualTreeBoruvka( Tree* tree, const MetricType metric) : tree(tree), data(tree->Dataset()), ownTree(false), naive(false), connections(data.n_cols), totalDist(0.0), metric(metric) { edges.reserve(data.n_cols - 1); // Fill with EdgePairs. neighborsInComponent.set_size(data.n_cols); neighborsOutComponent.set_size(data.n_cols); neighborsDistances.set_size(data.n_cols); neighborsDistances.fill(DBL_MAX); } template< typename MetricType, typename MatType, template class TreeType> DualTreeBoruvka::~DualTreeBoruvka() { if (ownTree) delete tree; } /** * Iteratively find the nearest neighbor of each component until the MST is * complete. */ template< typename MetricType, typename MatType, template class TreeType> void DualTreeBoruvka::ComputeMST( arma::mat& results) { Timer::Start("emst/mst_computation"); totalDist = 0; // Reset distance. typedef DTBRules RuleType; RuleType rules(data, connections, neighborsDistances, neighborsInComponent, neighborsOutComponent, metric); while (edges.size() < (data.n_cols - 1)) { if (naive) { // Full O(N^2) traversal. for (size_t i = 0; i < data.n_cols; ++i) for (size_t j = 0; j < data.n_cols; ++j) rules.BaseCase(i, j); } else { typename Tree::template DualTreeTraverser traverser(rules); traverser.Traverse(*tree, *tree); } AddAllEdges(); Cleanup(); Log::Info << edges.size() << " edges found so far." << std::endl; if (!naive) { Log::Info << rules.BaseCases() << " cumulative base cases." << std::endl; Log::Info << rules.Scores() << " cumulative node combinations scored." << std::endl; } } Timer::Stop("emst/mst_computation"); EmitResults(results); Log::Info << "Total spanning tree length: " << totalDist << std::endl; } /** * Adds a single edge to the edge list */ template< typename MetricType, typename MatType, template class TreeType> void DualTreeBoruvka::AddEdge( const size_t e1, const size_t e2, const double distance) { Log::Assert((distance >= 0.0), "DualTreeBoruvka::AddEdge(): distance cannot be negative."); if (e1 < e2) edges.push_back(EdgePair(e1, e2, distance)); else edges.push_back(EdgePair(e2, e1, distance)); } /** * Adds all the edges found in one iteration to the list of neighbors. */ template< typename MetricType, typename MatType, template class TreeType> void DualTreeBoruvka::AddAllEdges() { for (size_t i = 0; i < data.n_cols; i++) { size_t component = connections.Find(i); size_t inEdge = neighborsInComponent[component]; size_t outEdge = neighborsOutComponent[component]; if (connections.Find(inEdge) != connections.Find(outEdge)) { //totalDist = totalDist + dist; // changed to make this agree with the cover tree code totalDist += neighborsDistances[component]; AddEdge(inEdge, outEdge, neighborsDistances[component]); connections.Union(inEdge, outEdge); } } } /** * Unpermute the edge list (if necessary) and output it to results. */ template< typename MetricType, typename MatType, template class TreeType> void DualTreeBoruvka::EmitResults( arma::mat& results) { // Sort the edges. std::sort(edges.begin(), edges.end(), SortFun); Log::Assert(edges.size() == data.n_cols - 1); results.set_size(3, edges.size()); // Need to unpermute the point labels. if (!naive && ownTree && tree::TreeTraits::RearrangesDataset) { for (size_t i = 0; i < (data.n_cols - 1); i++) { // Make sure the edge list stores the smaller index first to // make checking correctness easier size_t ind1 = oldFromNew[edges[i].Lesser()]; size_t ind2 = oldFromNew[edges[i].Greater()]; if (ind1 < ind2) { edges[i].Lesser() = ind1; edges[i].Greater() = ind2; } else { edges[i].Lesser() = ind2; edges[i].Greater() = ind1; } results(0, i) = edges[i].Lesser(); results(1, i) = edges[i].Greater(); results(2, i) = edges[i].Distance(); } } else { for (size_t i = 0; i < edges.size(); i++) { results(0, i) = edges[i].Lesser(); results(1, i) = edges[i].Greater(); results(2, i) = edges[i].Distance(); } } } /** * This function resets the values in the nodes of the tree nearest neighbor * distance and checks for fully connected nodes. */ template< typename MetricType, typename MatType, template class TreeType> void DualTreeBoruvka::CleanupHelper(Tree* tree) { // Reset the statistic information. tree->Stat().MaxNeighborDistance() = DBL_MAX; tree->Stat().MinNeighborDistance() = DBL_MAX; tree->Stat().Bound() = DBL_MAX; // Recurse into all children. for (size_t i = 0; i < tree->NumChildren(); ++i) CleanupHelper(&tree->Child(i)); // Get the component of the first child or point. Then we will check to see // if all other components of children and points are the same. const int component = (tree->NumChildren() != 0) ? tree->Child(0).Stat().ComponentMembership() : connections.Find(tree->Point(0)); // Check components of children. for (size_t i = 0; i < tree->NumChildren(); ++i) if (tree->Child(i).Stat().ComponentMembership() != component) return; // Check components of points. for (size_t i = 0; i < tree->NumPoints(); ++i) if (connections.Find(tree->Point(i)) != size_t(component)) return; // If we made it this far, all components are the same. tree->Stat().ComponentMembership() = component; } /** * The values stored in the tree must be reset on each iteration. */ template< typename MetricType, typename MatType, template class TreeType> void DualTreeBoruvka::Cleanup() { for (size_t i = 0; i < data.n_cols; i++) neighborsDistances[i] = DBL_MAX; if (!naive) CleanupHelper(tree); } } // namespace emst } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/methods/emst/dtb_rules.hpp000066400000000000000000000113441315013601400221310ustar00rootroot00000000000000/** * @file dtb.hpp * @author Bill March (march@gatech.edu) * * Tree traverser rules for the DualTreeBoruvka algorithm. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_EMST_DTB_RULES_HPP #define MLPACK_METHODS_EMST_DTB_RULES_HPP #include #include namespace mlpack { namespace emst { template class DTBRules { public: DTBRules(const arma::mat& dataSet, UnionFind& connections, arma::vec& neighborsDistances, arma::Col& neighborsInComponent, arma::Col& neighborsOutComponent, MetricType& metric); double BaseCase(const size_t queryIndex, const size_t referenceIndex); /** * Get the score for recursion order. A low score indicates priority for * recursion, while DBL_MAX indicates that the node should not be recursed * into at all (it should be pruned). * * @param queryIndex Index of query point. * @param referenceNode Candidate node to be recursed into. */ double Score(const size_t queryIndex, TreeType& referenceNode); /** * Re-evaluate the score for recursion order. A low score indicates priority * for recursion, while DBL_MAX indicates that the node should not be recursed * into at all (it should be pruned). This is used when the score has already * been calculated, but another recursion may have modified the bounds for * pruning. So the old score is checked against the new pruning bound. * * @param queryIndex Index of query point. * @param referenceNode Candidate node to be recursed into. * @param oldScore Old score produced by Score() (or Rescore()). */ double Rescore(const size_t queryIndex, TreeType& referenceNode, const double oldScore); /** * Get the score for recursion order. A low score indicates priority for * recursionm while DBL_MAX indicates that the node should not be recursed * into at all (it should be pruned). * * @param queryNode Candidate query node to recurse into. * @param referenceNode Candidate reference node to recurse into. */ double Score(TreeType& queryNode, TreeType& referenceNode); /** * Re-evaluate the score for recursion order. A low score indicates priority * for recursion, while DBL_MAX indicates that the node should not be recursed * into at all (it should be pruned). This is used when the score has already * been calculated, but another recursion may have modified the bounds for * pruning. So the old score is checked against the new pruning bound. * * @param queryNode Candidate query node to recurse into. * @param referenceNode Candidate reference node to recurse into. * @param oldScore Old score produced by Socre() (or Rescore()). */ double Rescore(TreeType& queryNode, TreeType& referenceNode, const double oldScore) const; typedef typename tree::TraversalInfo TraversalInfoType; const TraversalInfoType& TraversalInfo() const { return traversalInfo; } TraversalInfoType& TraversalInfo() { return traversalInfo; } //! Get the number of base cases performed. size_t BaseCases() const { return baseCases; } //! Modify the number of base cases performed. size_t& BaseCases() { return baseCases; } //! Get the number of node combinations that have been scored. size_t Scores() const { return scores; } //! Modify the number of node combinations that have been scored. size_t& Scores() { return scores; } private: //! The data points. const arma::mat& dataSet; //! Stores the tree structure so far UnionFind& connections; //! The distance to the candidate nearest neighbor for each component. arma::vec& neighborsDistances; //! The index of the point in the component that is an endpoint of the //! candidate edge. arma::Col& neighborsInComponent; //! The index of the point outside of the component that is an endpoint //! of the candidate edge. arma::Col& neighborsOutComponent; //! The instantiated metric. MetricType& metric; /** * Update the bound for the given query node. */ inline double CalculateBound(TreeType& queryNode) const; TraversalInfoType traversalInfo; //! The number of base cases calculated. size_t baseCases; //! The number of node combinations that have been scored. size_t scores; }; // class DTBRules } // emst namespace } // mlpack namespace #include "dtb_rules_impl.hpp" #endif mlpack-2.2.5/src/mlpack/methods/emst/dtb_rules_impl.hpp000066400000000000000000000153101315013601400231470ustar00rootroot00000000000000/** * @file dtb_impl.hpp * @author Bill March (march@gatech.edu) * * Tree traverser rules for the DualTreeBoruvka algorithm. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_EMST_DTB_RULES_IMPL_HPP #define MLPACK_METHODS_EMST_DTB_RULES_IMPL_HPP namespace mlpack { namespace emst { template DTBRules:: DTBRules(const arma::mat& dataSet, UnionFind& connections, arma::vec& neighborsDistances, arma::Col& neighborsInComponent, arma::Col& neighborsOutComponent, MetricType& metric) : dataSet(dataSet), connections(connections), neighborsDistances(neighborsDistances), neighborsInComponent(neighborsInComponent), neighborsOutComponent(neighborsOutComponent), metric(metric), baseCases(0), scores(0) { // Nothing else to do. } template inline force_inline double DTBRules::BaseCase(const size_t queryIndex, const size_t referenceIndex) { // Check if the points are in the same component at this iteration. // If not, return the distance between them. Also, store a better result as // the current neighbor, if necessary. double newUpperBound = -1.0; // Find the index of the component the query is in. size_t queryComponentIndex = connections.Find(queryIndex); size_t referenceComponentIndex = connections.Find(referenceIndex); if (queryComponentIndex != referenceComponentIndex) { ++baseCases; double distance = metric.Evaluate(dataSet.col(queryIndex), dataSet.col(referenceIndex)); if (distance < neighborsDistances[queryComponentIndex]) { Log::Assert(queryIndex != referenceIndex); neighborsDistances[queryComponentIndex] = distance; neighborsInComponent[queryComponentIndex] = queryIndex; neighborsOutComponent[queryComponentIndex] = referenceIndex; } } if (newUpperBound < neighborsDistances[queryComponentIndex]) newUpperBound = neighborsDistances[queryComponentIndex]; Log::Assert(newUpperBound >= 0.0); return newUpperBound; } template double DTBRules::Score(const size_t queryIndex, TreeType& referenceNode) { size_t queryComponentIndex = connections.Find(queryIndex); // If the query belongs to the same component as all of the references, // then prune. The cast is to stop a warning about comparing unsigned to // signed values. if (queryComponentIndex == (size_t) referenceNode.Stat().ComponentMembership()) return DBL_MAX; const arma::vec queryPoint = dataSet.unsafe_col(queryIndex); const double distance = referenceNode.MinDistance(queryPoint); // If all the points in the reference node are farther than the candidate // nearest neighbor for the query's component, we prune. return neighborsDistances[queryComponentIndex] < distance ? DBL_MAX : distance; } template double DTBRules::Rescore(const size_t queryIndex, TreeType& /* referenceNode */, const double oldScore) { // We don't need to check component membership again, because it can't // change inside a single iteration. return (oldScore > neighborsDistances[connections.Find(queryIndex)]) ? DBL_MAX : oldScore; } template double DTBRules::Score(TreeType& queryNode, TreeType& referenceNode) { // If all the queries belong to the same component as all the references // then we prune. if ((queryNode.Stat().ComponentMembership() >= 0) && (queryNode.Stat().ComponentMembership() == referenceNode.Stat().ComponentMembership())) return DBL_MAX; ++scores; const double distance = queryNode.MinDistance(referenceNode); const double bound = CalculateBound(queryNode); // If all the points in the reference node are farther than the candidate // nearest neighbor for all queries in the node, we prune. return (bound < distance) ? DBL_MAX : distance; } template double DTBRules::Rescore(TreeType& queryNode, TreeType& /* referenceNode */, const double oldScore) const { const double bound = CalculateBound(queryNode); return (oldScore > bound) ? DBL_MAX : oldScore; } // Calculate the bound for a given query node in its current state and update // it. template inline double DTBRules::CalculateBound( TreeType& queryNode) const { double worstPointBound = -DBL_MAX; double bestPointBound = DBL_MAX; double worstChildBound = -DBL_MAX; double bestChildBound = DBL_MAX; // Now, find the best and worst point bounds. for (size_t i = 0; i < queryNode.NumPoints(); ++i) { const size_t pointComponent = connections.Find(queryNode.Point(i)); const double bound = neighborsDistances[pointComponent]; if (bound > worstPointBound) worstPointBound = bound; if (bound < bestPointBound) bestPointBound = bound; } // Find the best and worst child bounds. for (size_t i = 0; i < queryNode.NumChildren(); ++i) { const double maxBound = queryNode.Child(i).Stat().MaxNeighborDistance(); if (maxBound > worstChildBound) worstChildBound = maxBound; const double minBound = queryNode.Child(i).Stat().MinNeighborDistance(); if (minBound < bestChildBound) bestChildBound = minBound; } // Now calculate the actual bounds. const double worstBound = std::max(worstPointBound, worstChildBound); const double bestBound = std::min(bestPointBound, bestChildBound); // We must check that bestBound != DBL_MAX; otherwise, we risk overflow. const double bestAdjustedBound = (bestBound == DBL_MAX) ? DBL_MAX : bestBound + 2 * queryNode.FurthestDescendantDistance(); // Update the relevant quantities in the node. queryNode.Stat().MaxNeighborDistance() = worstBound; queryNode.Stat().MinNeighborDistance() = bestBound; queryNode.Stat().Bound() = std::min(worstBound, bestAdjustedBound); return queryNode.Stat().Bound(); } } // namespace emst } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/methods/emst/dtb_stat.hpp000066400000000000000000000061101315013601400217450ustar00rootroot00000000000000/** * @file dtb.hpp * @author Bill March (march@gatech.edu) * * DTBStat is the StatisticType used by trees when performing EMST. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_EMST_DTB_STAT_HPP #define MLPACK_METHODS_EMST_DTB_STAT_HPP #include namespace mlpack { namespace emst { /** * A statistic for use with mlpack trees, which stores the upper bound on * distance to nearest neighbors and the component which this node belongs to. */ class DTBStat { private: //! Upper bound on the distance to the nearest neighbor of any point in this //! node. double maxNeighborDistance; //! Lower bound on the distance to the nearest neighbor of any point in this //! node. double minNeighborDistance; //! Total bound for pruning. double bound; //! The index of the component that all points in this node belong to. This //! is the same index returned by UnionFind for all points in this node. If //! points in this node are in different components, this value will be //! negative. int componentMembership; public: /** * A generic initializer. Sets the maximum neighbor distance to its default, * and the component membership to -1 (no component). */ DTBStat() : maxNeighborDistance(DBL_MAX), minNeighborDistance(DBL_MAX), bound(DBL_MAX), componentMembership(-1) { } /** * This is called when a node is finished initializing. We set the maximum * neighbor distance to its default, and if possible, we set the component * membership of the node (if it has only one point and no children). * * @param node Node that has been finished. */ template DTBStat(const TreeType& node) : maxNeighborDistance(DBL_MAX), minNeighborDistance(DBL_MAX), bound(DBL_MAX), componentMembership( ((node.NumPoints() == 1) && (node.NumChildren() == 0)) ? node.Point(0) : -1) { } //! Get the maximum neighbor distance. double MaxNeighborDistance() const { return maxNeighborDistance; } //! Modify the maximum neighbor distance. double& MaxNeighborDistance() { return maxNeighborDistance; } //! Get the minimum neighbor distance. double MinNeighborDistance() const { return minNeighborDistance; } //! Modify the minimum neighbor distance. double& MinNeighborDistance() { return minNeighborDistance; } //! Get the total bound for pruning. double Bound() const { return bound; } //! Modify the total bound for pruning. double& Bound() { return bound; } //! Get the component membership of this node. int ComponentMembership() const { return componentMembership; } //! Modify the component membership of this node. int& ComponentMembership() { return componentMembership; } }; // class DTBStat } // namespace emst } // namespace mlpack #endif // MLPACK_METHODS_EMST_DTB_STAT_HPP mlpack-2.2.5/src/mlpack/methods/emst/edge_pair.hpp000066400000000000000000000037571315013601400220760ustar00rootroot00000000000000/** * @file edge_pair.hpp * * @author Bill March (march@gatech.edu) * * This file contains utilities necessary for all of the minimum spanning tree * algorithms. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_EMST_EDGE_PAIR_HPP #define MLPACK_METHODS_EMST_EDGE_PAIR_HPP #include #include "union_find.hpp" namespace mlpack { namespace emst { /** * An edge pair is simply two indices and a distance. It is used as the * basic element of an edge list when computing a minimum spanning tree. */ class EdgePair { private: //! Lesser index. size_t lesser; //! Greater index. size_t greater; //! Distance between two indices. double distance; public: /** * Initialize an EdgePair with two indices and a distance. The indices are * called lesser and greater, implying that they be sorted before calling * Init. However, this is not necessary for functionality; it is just a way * to keep the edge list organized in other code. */ EdgePair(const size_t lesser, const size_t greater, const double dist) : lesser(lesser), greater(greater), distance(dist) { Log::Assert(lesser != greater, "EdgePair::EdgePair(): indices cannot be equal."); } //! Get the lesser index. size_t Lesser() const { return lesser; } //! Modify the lesser index. size_t& Lesser() { return lesser; } //! Get the greater index. size_t Greater() const { return greater; } //! Modify the greater index. size_t& Greater() { return greater; } //! Get the distance. double Distance() const { return distance; } //! Modify the distance. double& Distance() { return distance; } }; // class EdgePair } // namespace emst } // namespace mlpack #endif // MLPACK_METHODS_EMST_EDGE_PAIR_HPP mlpack-2.2.5/src/mlpack/methods/emst/emst_main.cpp000066400000000000000000000103751315013601400221200ustar00rootroot00000000000000/** * @file emst_main.cpp * @author Bill March (march@gatech.edu) * * Calls the DualTreeBoruvka algorithm from dtb.hpp. * Can optionally call naive Boruvka's method. * * For algorithm details, see: * * @code * @inproceedings{ * author = {March, W.B., Ram, P., and Gray, A.G.}, * title = {{Fast Euclidean Minimum Spanning Tree: Algorithm, Analysis, * Applications.}}, * booktitle = {Proceedings of the 16th ACM SIGKDD International Conference * on Knowledge Discovery and Data Mining} * series = {KDD 2010}, * year = {2010} * } * @endcode * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include "dtb.hpp" #include #include #include PROGRAM_INFO("Fast Euclidean Minimum Spanning Tree", "This program can compute " "the Euclidean minimum spanning tree of a set of input points using the " "dual-tree Boruvka algorithm." "\n\n" "The output is saved in a three-column matrix, where each row indicates an " "edge. The first column corresponds to the lesser index of the edge; the " "second column corresponds to the greater index of the edge; and the third " "column corresponds to the distance between the two points."); PARAM_STRING_IN_REQ("input_file", "Data input file.", "i"); PARAM_STRING_OUT("output_file", "Data output file. Stored as an edge list.", "o"); PARAM_FLAG("naive", "Compute the MST using O(n^2) naive algorithm.", "n"); PARAM_INT_IN("leaf_size", "Leaf size in the kd-tree. One-element leaves give " "the empirically best performance, but at the cost of greater memory " "requirements.", "l", 1); using namespace mlpack; using namespace mlpack::emst; using namespace mlpack::tree; using namespace mlpack::metric; using namespace std; int main(int argc, char* argv[]) { CLI::ParseCommandLine(argc, argv); const string inputFile = CLI::GetParam("input_file"); const string outputFile = CLI::GetParam("output_file"); if (!CLI::HasParam("output_file")) Log::Warn << "--output_file is not specified, so no output will be saved!" << endl; arma::mat dataPoints; data::Load(inputFile, dataPoints, true); // Do naive computation if necessary. if (CLI::GetParam("naive")) { Log::Info << "Running naive algorithm." << endl; DualTreeBoruvka<> naive(dataPoints, true); arma::mat naiveResults; naive.ComputeMST(naiveResults); if (CLI::HasParam("output_file")) data::Save(outputFile, naiveResults, true); } else { Log::Info << "Building tree.\n"; // Check that the leaf size is reasonable. if (CLI::GetParam("leaf_size") <= 0) { Log::Fatal << "Invalid leaf size (" << CLI::GetParam("leaf_size") << ")! Must be greater than or equal to 1." << std::endl; } // Initialize the tree and get ready to compute the MST. Compute the tree // by hand. const size_t leafSize = (size_t) CLI::GetParam("leaf_size"); Timer::Start("tree_building"); std::vector oldFromNew; KDTree tree(dataPoints, oldFromNew, leafSize); metric::LMetric<2, true> metric; Timer::Stop("tree_building"); DualTreeBoruvka<> dtb(&tree, metric); // Run the DTB algorithm. Log::Info << "Calculating minimum spanning tree." << endl; arma::mat results; dtb.ComputeMST(results); // Unmap the results. arma::mat unmappedResults(results.n_rows, results.n_cols); for (size_t i = 0; i < results.n_cols; ++i) { const size_t indexA = oldFromNew[size_t(results(0, i))]; const size_t indexB = oldFromNew[size_t(results(1, i))]; if (indexA < indexB) { unmappedResults(0, i) = indexA; unmappedResults(1, i) = indexB; } else { unmappedResults(0, i) = indexB; unmappedResults(1, i) = indexA; } unmappedResults(2, i) = results(2, i); } if (CLI::HasParam("output_file")) data::Save(outputFile, unmappedResults, true); } } mlpack-2.2.5/src/mlpack/methods/emst/union_find.hpp000066400000000000000000000050001315013601400222660ustar00rootroot00000000000000/** * @file union_find.hpp * @author Bill March (march@gatech.edu) * * Implements a union-find data structure. This structure tracks the components * of a graph. Each point in the graph is initially in its own component. * Calling unionfind.Union(x, y) unites the components indexed by x and y. * unionfind.Find(x) returns the index of the component containing point x. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_EMST_UNION_FIND_HPP #define MLPACK_METHODS_EMST_UNION_FIND_HPP #include namespace mlpack { namespace emst { /** * A Union-Find data structure. See Cormen, Rivest, & Stein for details. The * structure tracks the components of a graph. Each point in the graph is * initially in its own component. Calling Union(x, y) unites the components * indexed by x and y. Find(x) returns the index of the component containing * point x. */ class UnionFind { private: arma::Col parent; arma::ivec rank; public: //! Construct the object with the given size. UnionFind(const size_t size) : parent(size), rank(size) { for (size_t i = 0; i < size; ++i) { parent[i] = i; rank[i] = 0; } } //! Destroy the object (nothing to do). ~UnionFind() { } /** * Returns the component containing an element. * * @param x the component to be found * @return The index of the component containing x */ size_t Find(const size_t x) { if (parent[x] == x) { return x; } else { // This ensures that the tree has a small depth parent[x] = Find(parent[x]); return parent[x]; } } /** * Union the components containing x and y. * * @param x one component * @param y the other component */ void Union(const size_t x, const size_t y) { const size_t xRoot = Find(x); const size_t yRoot = Find(y); if (xRoot == yRoot) { return; } else if (rank[xRoot] == rank[yRoot]) { parent[yRoot] = parent[xRoot]; rank[xRoot] = rank[xRoot] + 1; } else if (rank[xRoot] > rank[yRoot]) { parent[yRoot] = xRoot; } else { parent[xRoot] = yRoot; } } }; // class UnionFind } // namespace emst } // namespace mlpack #endif // MLPACK_METHODS_EMST_UNION_FIND_HPP mlpack-2.2.5/src/mlpack/methods/fastmks/000077500000000000000000000000001315013601400201325ustar00rootroot00000000000000mlpack-2.2.5/src/mlpack/methods/fastmks/CMakeLists.txt000066400000000000000000000011211315013601400226650ustar00rootroot00000000000000# Define the files we need to compile. # Anything not in this list will not be compiled into mlpack. set(SOURCES fastmks.hpp fastmks_impl.hpp fastmks_model.hpp fastmks_model_impl.hpp fastmks_model.cpp fastmks_rules.hpp fastmks_rules_impl.hpp ) # Add directory name to sources. set(DIR_SRCS) foreach(file ${SOURCES}) set(DIR_SRCS ${DIR_SRCS} ${CMAKE_CURRENT_SOURCE_DIR}/${file}) endforeach() # Append sources (with directory name) to list of all mlpack sources (used at # the parent scope). set(MLPACK_SRCS ${MLPACK_SRCS} ${DIR_SRCS} PARENT_SCOPE) add_cli_executable(fastmks) mlpack-2.2.5/src/mlpack/methods/fastmks/fastmks.hpp000066400000000000000000000255351315013601400223250ustar00rootroot00000000000000/** * @file fastmks.hpp * @author Ryan Curtin * * Definition of the FastMKS class, which implements fast exact max-kernel * search. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_FASTMKS_FASTMKS_HPP #define MLPACK_METHODS_FASTMKS_FASTMKS_HPP #include #include #include "fastmks_stat.hpp" #include namespace mlpack { namespace fastmks /** Fast max-kernel search. */ { /** * An implementation of fast exact max-kernel search. Given a query dataset and * a reference dataset (or optionally just a reference dataset which is also * used as the query dataset), fast exact max-kernel search finds, for each * point in the query dataset, the k points in the reference set with maximum * kernel value K(p_q, p_r), where k is a specified parameter and K() is a * Mercer kernel. * * For more information, see the following paper. * * @code * @inproceedings{curtin2013fast, * title={Fast Exact Max-Kernel Search}, * author={Curtin, Ryan R. and Ram, Parikshit and Gray, Alexander G.}, * booktitle={Proceedings of the 2013 SIAM International Conference on Data * Mining (SDM 13)}, * year={2013} * } * @endcode * * This class allows specification of the type of kernel and also of the type of * tree. FastMKS can be run on kernels that work on arbitrary objects -- * however, this only works with cover trees and other trees that are built only * on points in the dataset (and not centroids of regions or anything like * that). * * @tparam KernelType Type of kernel to run FastMKS with. * @tparam MatType Type of data matrix (usually arma::mat). * @tparam TreeType Type of tree to run FastMKS with; it must satisfy the * TreeType policy API. */ template< typename KernelType, typename MatType = arma::mat, template class TreeType = tree::StandardCoverTree > class FastMKS { public: //! Convenience typedef. typedef TreeType, FastMKSStat, MatType> Tree; /** * Create the FastMKS object with an empty reference set and default kernel. * Make sure to call Train() before Search() is called! * * @param singleMode Whether or not to run single-tree search. * @param naive Whether or not to run brute-force (naive) search. */ FastMKS(const bool singleMode = false, const bool naive = false); /** * Create the FastMKS object with the given reference set (this is the set * that is searched). Optionally, specify whether or not single-tree search * or naive (brute-force) search should be used. * * @param referenceSet Set of reference data. * @param singleMode Whether or not to run single-tree search. * @param naive Whether or not to run brute-force (naive) search. */ FastMKS(const MatType& referenceSet, const bool singleMode = false, const bool naive = false); /** * Create the FastMKS object using the reference set (this is the set that is * searched) with an initialized kernel. This is useful for when the kernel * stores state. Optionally, specify whether or not single-tree search or * naive (brute-force) search should be used. * * @param referenceSet Reference set of data for FastMKS. * @param kernel Initialized kernel. * @param single Whether or not to run single-tree search. * @param naive Whether or not to run brute-force (naive) search. */ FastMKS(const MatType& referenceSet, KernelType& kernel, const bool singleMode = false, const bool naive = false); /** * Create the FastMKS object with an already-initialized tree built on the * reference points. Be sure that the tree is built with the metric type * IPMetric. Optionally, whether or not to run single-tree search * can be specified. Brute-force search is not available with this * constructor since a tree is given (use one of the other constructors). * * @param referenceTree Tree built on reference data. * @param single Whether or not to run single-tree search. * @param naive Whether or not to run brute-force (naive) search. */ FastMKS(Tree* referenceTree, const bool singleMode = false); //! Destructor for the FastMKS object. ~FastMKS(); /** * "Train" the FastMKS model on the given reference set (this will just build * a tree, if the current search mode is not naive mode). * * @param referenceSet Set of reference points. */ void Train(const MatType& referenceSet); /** * "Train" the FastMKS model on the given reference set and use the given * kernel. This will just build a tree and replace the metric, if the current * search mode is not naive mode. * * @param referenceSet Set of reference points. * @param kernel Kernel to use for search. */ void Train(const MatType& referenceSet, KernelType& kernel); /** * Train the FastMKS model on the given reference tree. This takes ownership * of the tree, so you do not need to delete it! This will throw an exception * if the model is searching in naive mode (i.e. if Naive() == true). * * @param tree Tree to use as reference data. */ void Train(Tree* referenceTree); /** * Search for the points in the reference set with maximum kernel evaluation * to each point in the given query set. The resulting kernel evaluations are * stored in the kernels matrix, and the corresponding point indices are * stored in the indices matrix. The results for each point in the query set * are stored in the corresponding column of the kernels and products * matrices; for instance, the index of the point with maximum kernel * evaluation to point 4 in the query set will be stored in row 0 and column 4 * of the indices matrix. * * If querySet only contains a few points, the extra overhead of building a * tree to perform dual-tree search may not be warranted, and it may be faster * to use single-tree search, either by setting singleMode to false in the * constructor or with SingleMode(). * * @param querySet Set of query points (can be a single point). * @param k The number of maximum kernels to find. * @param indices Matrix to store resulting indices of max-kernel search in. * @param kernels Matrix to store resulting max-kernel values in. */ void Search(const MatType& querySet, const size_t k, arma::Mat& indices, arma::mat& kernels); /** * Search for the points in the reference set with maximum kernel evaluation * to each point in the query set corresponding to the given pre-built query * tree. The resulting kernel evaluations are stored in the kernels matrix, * and the corresponding point indices are stored in the indices matrix. The * results for each point in the query set are stored in the corresponding * column of the kernels and products matrices; for instance, the index of the * point with maximum kernel evaluation to point 4 in the query set will be * stored in row 0 and column 4 of the indices matrix. * * This will throw an exception if called while the FastMKS object has * 'single' set to true. * * Be aware that if your tree modifies the original input matrix, the results * here are with respect to the modified input matrix (that is, * queryTree->Dataset()). * * @param queryTree Tree built on query points. * @param k The number of maximum kernels to find. * @param indices Matrix to store resulting indices of max-kernel search in. * @param kernels Matrix to store resulting max-kernel values in. */ void Search(Tree* querySet, const size_t k, arma::Mat& indices, arma::mat& kernels); /** * Search for the maximum inner products of the query set (or if no query set * was passed, the reference set is used). The resulting maximum inner * products are stored in the products matrix and the corresponding point * indices are stores in the indices matrix. The results for each point in * the query set are stored in the corresponding column of the indices and * products matrices; for instance, the index of the point with maximum inner * product to point 4 in the query set will be stored in row 0 and column 4 of * the indices matrix. * * @param k The number of maximum kernels to find. * @param indices Matrix to store resulting indices of max-kernel search in. * @param products Matrix to store resulting max-kernel values in. */ void Search(const size_t k, arma::Mat& indices, arma::mat& products); //! Get the inner-product metric induced by the given kernel. const metric::IPMetric& Metric() const { return metric; } //! Modify the inner-product metric induced by the given kernel. metric::IPMetric& Metric() { return metric; } //! Get whether or not single-tree search is used. bool SingleMode() const { return singleMode; } //! Modify whether or not single-tree search is used. bool& SingleMode() { return singleMode; } //! Get whether or not brute-force (naive) search is used. bool Naive() const { return naive; } //! Modify whether or not brute-force (naive) search is used. bool& Naive() { return naive; } //! Serialize the model. template void Serialize(Archive& ar, const unsigned int /* version */); private: //! The reference dataset. We never own this; only the tree or a higher level //! does. const MatType* referenceSet; //! The tree built on the reference dataset. Tree* referenceTree; //! If true, this object created the tree and is responsible for it. bool treeOwner; //! If true, we own the dataset. This happens in only a few situations. bool setOwner; //! If true, single-tree search is used. bool singleMode; //! If true, naive (brute-force) search is used. bool naive; //! The instantiated inner-product metric induced by the given kernel. metric::IPMetric metric; //! Candidate represents a possible candidate point (value, index). typedef std::pair Candidate; //! Compare two candidates based on the value. struct CandidateCmp { bool operator()(const Candidate& c1, const Candidate& c2) { return c1.first > c2.first; }; }; //! Use a priority queue to represent the list of candidate points. typedef std::priority_queue, CandidateCmp> CandidateList; }; } // namespace fastmks } // namespace mlpack // Include implementation. #include "fastmks_impl.hpp" #endif mlpack-2.2.5/src/mlpack/methods/fastmks/fastmks_impl.hpp000066400000000000000000000334051315013601400233410ustar00rootroot00000000000000/** * @file fastmks_impl.hpp * @author Ryan Curtin * * Implementation of the FastMKS class (fast max-kernel search). * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_FASTMKS_FASTMKS_IMPL_HPP #define MLPACK_METHODS_FASTMKS_FASTMKS_IMPL_HPP // In case it hasn't yet been included. #include "fastmks.hpp" #include "fastmks_rules.hpp" #include namespace mlpack { namespace fastmks { // No data; create a model on an empty dataset. template class TreeType> FastMKS::FastMKS(const bool singleMode, const bool naive) : referenceSet(new MatType()), referenceTree(NULL), treeOwner(true), setOwner(true), singleMode(singleMode), naive(naive) { Timer::Start("tree_building"); if (!naive) referenceTree = new Tree(*referenceSet); Timer::Stop("tree_building"); } // No instantiated kernel. template class TreeType> FastMKS::FastMKS( const MatType& referenceSet, const bool singleMode, const bool naive) : referenceSet(&referenceSet), referenceTree(NULL), treeOwner(true), setOwner(false), singleMode(singleMode), naive(naive) { Timer::Start("tree_building"); if (!naive) referenceTree = new Tree(referenceSet); Timer::Stop("tree_building"); } // Instantiated kernel. template class TreeType> FastMKS::FastMKS(const MatType& referenceSet, KernelType& kernel, const bool singleMode, const bool naive) : referenceSet(&referenceSet), referenceTree(NULL), treeOwner(true), setOwner(false), singleMode(singleMode), naive(naive), metric(kernel) { Timer::Start("tree_building"); // If necessary, the reference tree should be built. There is no query tree. if (!naive) referenceTree = new Tree(referenceSet, metric); Timer::Stop("tree_building"); } // One dataset, pre-built tree. template class TreeType> FastMKS::FastMKS(Tree* referenceTree, const bool singleMode) : referenceSet(&referenceTree->Dataset()), referenceTree(referenceTree), treeOwner(false), setOwner(false), singleMode(singleMode), naive(false), metric(referenceTree->Metric()) { // Nothing to do. } template class TreeType> FastMKS::~FastMKS() { // If we created the trees, we must delete them. if (treeOwner && referenceTree) delete referenceTree; if (setOwner) delete referenceSet; } template class TreeType> void FastMKS::Train(const MatType& referenceSet) { if (setOwner) delete this->referenceSet; this->referenceSet = &referenceSet; this->setOwner = false; if (!naive) { if (treeOwner && referenceTree) delete referenceTree; referenceTree = new Tree(referenceSet, metric); treeOwner = true; } } template class TreeType> void FastMKS::Train(const MatType& referenceSet, KernelType& kernel) { if (setOwner) delete this->referenceSet; this->referenceSet = &referenceSet; this->metric = metric::IPMetric(kernel); this->setOwner = false; if (!naive) { if (treeOwner && referenceTree) delete referenceTree; referenceTree = new Tree(referenceSet, metric); treeOwner = true; } } template class TreeType> void FastMKS::Train(Tree* tree) { if (naive) throw std::invalid_argument("cannot call FastMKS::Train() with a tree when " "in naive search mode"); if (setOwner) delete this->referenceSet; this->referenceSet = &tree->Dataset(); this->metric = metric::IPMetric(tree->Metric().Kernel()); this->setOwner = false; if (treeOwner && referenceTree) delete referenceTree; this->referenceTree = tree; this->treeOwner = true; } template class TreeType> void FastMKS::Search( const MatType& querySet, const size_t k, arma::Mat& indices, arma::mat& kernels) { if (k > referenceSet->n_cols) { std::stringstream ss; ss << "requested value of k (" << k << ") is greater than the number of " << "points in the reference set (" << referenceSet->n_cols << ")"; throw std::invalid_argument(ss.str()); } Timer::Start("computing_products"); // No remapping will be necessary because we are using the cover tree. indices.set_size(k, querySet.n_cols); kernels.set_size(k, querySet.n_cols); // Naive implementation. if (naive) { // Simple double loop. Stupid, slow, but a good benchmark. for (size_t q = 0; q < querySet.n_cols; ++q) { const Candidate def = std::make_pair(-DBL_MAX, size_t() - 1); std::vector cList(k, def); CandidateList pqueue(CandidateCmp(), std::move(cList)); for (size_t r = 0; r < referenceSet->n_cols; ++r) { const double eval = metric.Kernel().Evaluate(querySet.col(q), referenceSet->col(r)); if (eval > pqueue.top().first) { Candidate c = std::make_pair(eval, r); pqueue.pop(); pqueue.push(c); } } for (size_t j = 1; j <= k; j++) { indices(k - j, q) = pqueue.top().second; kernels(k - j, q) = pqueue.top().first; pqueue.pop(); } } Timer::Stop("computing_products"); return; } // Single-tree implementation. if (singleMode) { // Create rules object (this will store the results). This constructor // precalculates each self-kernel value. typedef FastMKSRules RuleType; RuleType rules(*referenceSet, querySet, k, metric.Kernel()); typename Tree::template SingleTreeTraverser traverser(rules); for (size_t i = 0; i < querySet.n_cols; ++i) traverser.Traverse(i, *referenceTree); Log::Info << rules.BaseCases() << " base cases." << std::endl; Log::Info << rules.Scores() << " scores." << std::endl; rules.GetResults(indices, kernels); Timer::Stop("computing_products"); return; } // Dual-tree implementation. First, we need to build the query tree. We are // assuming it doesn't map anything... Timer::Stop("computing_products"); Timer::Start("tree_building"); Tree queryTree(querySet); Timer::Stop("tree_building"); Search(&queryTree, k, indices, kernels); } template class TreeType> void FastMKS::Search( Tree* queryTree, const size_t k, arma::Mat& indices, arma::mat& kernels) { if (k > referenceSet->n_cols) { std::stringstream ss; ss << "requested value of k (" << k << ") is greater than the number of " << "points in the reference set (" << referenceSet->n_cols << ")"; throw std::invalid_argument(ss.str()); } // If either naive mode or single mode is specified, this must fail. if (naive || singleMode) { throw std::invalid_argument("can't call Search() with a query tree when " "single mode or naive search is enabled"); } // No remapping will be necessary because we are using the cover tree. indices.set_size(k, queryTree->Dataset().n_cols); kernels.set_size(k, queryTree->Dataset().n_cols); Timer::Start("computing_products"); typedef FastMKSRules RuleType; RuleType rules(*referenceSet, queryTree->Dataset(), k, metric.Kernel()); typename Tree::template DualTreeTraverser traverser(rules); traverser.Traverse(*queryTree, *referenceTree); Log::Info << rules.BaseCases() << " base cases." << std::endl; Log::Info << rules.Scores() << " scores." << std::endl; rules.GetResults(indices, kernels); Timer::Stop("computing_products"); } template class TreeType> void FastMKS::Search( const size_t k, arma::Mat& indices, arma::mat& kernels) { // No remapping will be necessary because we are using the cover tree. Timer::Start("computing_products"); indices.set_size(k, referenceSet->n_cols); kernels.set_size(k, referenceSet->n_cols); // Naive implementation. if (naive) { // Simple double loop. Stupid, slow, but a good benchmark. for (size_t q = 0; q < referenceSet->n_cols; ++q) { const Candidate def = std::make_pair(-DBL_MAX, size_t() - 1); std::vector cList(k, def); CandidateList pqueue(CandidateCmp(), std::move(cList)); for (size_t r = 0; r < referenceSet->n_cols; ++r) { if (q == r) continue; // Don't return the point as its own candidate. const double eval = metric.Kernel().Evaluate(referenceSet->col(q), referenceSet->col(r)); if (eval > pqueue.top().first) { Candidate c = std::make_pair(eval, r); pqueue.pop(); pqueue.push(c); } } for (size_t j = 1; j <= k; j++) { indices(k - j, q) = pqueue.top().second; kernels(k - j, q) = pqueue.top().first; pqueue.pop(); } } Timer::Stop("computing_products"); return; } // Single-tree implementation. if (singleMode) { // Create rules object (this will store the results). This constructor // precalculates each self-kernel value. typedef FastMKSRules RuleType; RuleType rules(*referenceSet, *referenceSet, k, metric.Kernel()); typename Tree::template SingleTreeTraverser traverser(rules); for (size_t i = 0; i < referenceSet->n_cols; ++i) traverser.Traverse(i, *referenceTree); // Save the number of pruned nodes. const size_t numPrunes = traverser.NumPrunes(); Log::Info << "Pruned " << numPrunes << " nodes." << std::endl; Log::Info << rules.BaseCases() << " base cases." << std::endl; Log::Info << rules.Scores() << " scores." << std::endl; rules.GetResults(indices, kernels); Timer::Stop("computing_products"); return; } // Dual-tree implementation. Timer::Stop("computing_products"); Search(referenceTree, k, indices, kernels); } //! Serialize the model. template class TreeType> template void FastMKS::Serialize( Archive& ar, const unsigned int /* version */) { using data::CreateNVP; // Serialize preferences for search. ar & CreateNVP(naive, "naive"); ar & CreateNVP(singleMode, "singleMode"); // If we are doing naive search, serialize the dataset. Otherwise we // serialize the tree. if (naive) { if (Archive::is_loading::value) { if (setOwner && referenceSet) delete referenceSet; setOwner = true; } ar & CreateNVP(referenceSet, "referenceSet"); ar & CreateNVP(metric, "metric"); } else { // Delete the current reference tree, if necessary. if (Archive::is_loading::value) { if (treeOwner && referenceTree) delete referenceTree; treeOwner = true; } ar & CreateNVP(referenceTree, "referenceTree"); if (Archive::is_loading::value) { if (setOwner && referenceSet) delete referenceSet; referenceSet = &referenceTree->Dataset(); metric = metric::IPMetric(referenceTree->Metric().Kernel()); setOwner = false; } } } } // namespace fastmks } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/methods/fastmks/fastmks_main.cpp000066400000000000000000000233071315013601400233170ustar00rootroot00000000000000/** * @file fastmks_main.cpp * @author Ryan Curtin * * Main executable for maximum inner product search. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include #include #include #include #include "fastmks.hpp" #include "fastmks_model.hpp" using namespace std; using namespace mlpack; using namespace mlpack::fastmks; using namespace mlpack::kernel; using namespace mlpack::tree; using namespace mlpack::metric; PROGRAM_INFO("FastMKS (Fast Max-Kernel Search)", "This program will find the k maximum kernel of a set of points, " "using a query set and a reference set (which can optionally be the same " "set). More specifically, for each point in the query set, the k points in" " the reference set with maximum kernel evaluations are found. The kernel " "function used is specified by --kernel." "\n\n" "For example, the following command will calculate, for each point in " "'query.csv', the five points in 'reference.csv' with maximum kernel " "evaluation using the linear kernel. The kernel evaluations are stored in " "'kernels.csv' and the indices are stored in 'indices.csv'." "\n\n" "$ fastmks --k 5 --reference_file reference.csv --query_file query.csv\n" " --indices_file indices.csv --kernels_file kernels.csv --kernel linear" "\n\n" "The output files are organized such that row i and column j in the indices" " output file corresponds to the index of the point in the reference set " "that has i'th largest kernel evaluation with the point in the query set " "with index j. Row i and column j in the kernels output file corresponds " "to the kernel evaluation between those two points." "\n\n" "This executable performs FastMKS using a cover tree. The base used to " "build the cover tree can be specified with the --base option."); // Model-building parameters. PARAM_STRING_IN("reference_file", "File containing the reference dataset.", "r", ""); PARAM_STRING_IN("kernel", "Kernel type to use: 'linear', 'polynomial', " "'cosine', 'gaussian', 'epanechnikov', 'triangular', 'hyptan'.", "K", "linear"); PARAM_DOUBLE_IN("base", "Base to use during cover tree construction.", "b", 2.0); // Kernel parameters. PARAM_DOUBLE_IN("degree", "Degree of polynomial kernel.", "d", 2.0); PARAM_DOUBLE_IN("offset", "Offset of kernel (for polynomial and hyptan " "kernels).", "o", 0.0); PARAM_DOUBLE_IN("bandwidth", "Bandwidth (for Gaussian, Epanechnikov, and " "triangular kernels).", "w", 1.0); PARAM_DOUBLE_IN("scale", "Scale of kernel (for hyptan kernel).", "s", 1.0); // Load/save models. PARAM_STRING_IN("input_model_file", "File containing FastMKS model.", "m", ""); PARAM_STRING_OUT("output_model_file", "File to save FastMKS model to.", "M"); // Search preferences. PARAM_STRING_IN("query_file", "File containing the query dataset.", "q", ""); PARAM_INT_IN("k", "Number of maximum kernels to find.", "k", 0); PARAM_FLAG("naive", "If true, O(n^2) naive mode is used for computation.", "N"); PARAM_FLAG("single", "If true, single-tree search is used (as opposed to " "dual-tree search.", "S"); PARAM_STRING_OUT("kernels_file", "File to save kernels into.", "p"); PARAM_STRING_OUT("indices_file", "File to save indices of kernels into.", "i"); int main(int argc, char** argv) { CLI::ParseCommandLine(argc, argv); // Validate command-line parameters. if (CLI::HasParam("reference_file") && CLI::HasParam("input_model_file")) Log::Fatal << "Cannot specify both --reference_file (-r) and " << "--input_model_file (-m)!" << endl; if (!CLI::HasParam("reference_file") && !CLI::HasParam("input_model_file")) Log::Fatal << "Must specify either --reference_file (-r) or " << "--input_model_file (-m)!" << endl; if (CLI::HasParam("input_model_file")) { if (CLI::HasParam("kernel")) Log::Warn << "--kernel (-k) ignored because --input_model_file (-m) is " << "specified." << endl; if (CLI::HasParam("bandwidth")) Log::Warn << "--bandwidth (-w) ignored because --input_model_file (-m) is" << " specified." << endl; if (CLI::HasParam("degree")) Log::Warn << "--degree (-d) ignored because --input_model_file (-m) is " << " specified." << endl; if (CLI::HasParam("offset")) Log::Warn << "--offset (-o) ignored because --input_model_file (-m) is " << " specified." << endl; } if (!CLI::HasParam("k") && (CLI::HasParam("indices_file") || CLI::HasParam("kernels_file"))) Log::Warn << "--indices_file and --kernels_file ignored, because no search " << "task is specified (i.e., --k is not specified)!" << endl; if (CLI::HasParam("k") && !(CLI::HasParam("indices_file") || CLI::HasParam("kernels_file"))) Log::Warn << "Search specified with --k, but no output will be saved " << "because neither --indices_file nor --kernels_file are specified!" << endl; // Check on kernel type. const string kernelType = CLI::GetParam("kernel"); if ((kernelType != "linear") && (kernelType != "polynomial") && (kernelType != "cosine") && (kernelType != "gaussian") && (kernelType != "triangular") && (kernelType != "hyptan") && (kernelType != "epanechnikov")) { Log::Fatal << "Invalid kernel type: '" << kernelType << "'; must be " << "'linear', 'polynomial', 'cosine', 'gaussian', 'triangular', or " << "'epanechnikov'." << endl; } // Naive mode overrides single mode. if (CLI::HasParam("naive") && CLI::HasParam("single")) Log::Warn << "--single ignored because --naive is present." << endl; FastMKSModel model; arma::mat referenceData; if (CLI::HasParam("reference_file")) { data::Load(CLI::GetParam("reference_file"), referenceData, true); Log::Info << "Loaded reference data from '" << CLI::GetParam("reference_file") << "' (" << referenceData.n_rows << " x " << referenceData.n_cols << ")." << endl; // For cover tree construction. const double base = CLI::GetParam("base"); // Kernel parameters. const string kernelType = CLI::GetParam("kernel"); const double degree = CLI::GetParam("degree"); const double offset = CLI::GetParam("offset"); const double bandwidth = CLI::GetParam("bandwidth"); const double scale = CLI::GetParam("scale"); // Search preferences. const bool naive = CLI::HasParam("naive"); const bool single = CLI::HasParam("single"); if (kernelType == "linear") { LinearKernel lk; model.KernelType() = FastMKSModel::LINEAR_KERNEL; model.BuildModel(referenceData, lk, single, naive, base); } else if (kernelType == "polynomial") { PolynomialKernel pk(degree, offset); model.KernelType() = FastMKSModel::POLYNOMIAL_KERNEL; model.BuildModel(referenceData, pk, single, naive, base); } else if (kernelType == "cosine") { CosineDistance cd; model.KernelType() = FastMKSModel::COSINE_DISTANCE; model.BuildModel(referenceData, cd, single, naive, base); } else if (kernelType == "gaussian") { GaussianKernel gk(bandwidth); model.KernelType() = FastMKSModel::GAUSSIAN_KERNEL; model.BuildModel(referenceData, gk, single, naive, base); } else if (kernelType == "epanechnikov") { EpanechnikovKernel ek(bandwidth); model.KernelType() = FastMKSModel::EPANECHNIKOV_KERNEL; model.BuildModel(referenceData, ek, single, naive, base); } else if (kernelType == "triangular") { TriangularKernel tk(bandwidth); model.KernelType() = FastMKSModel::TRIANGULAR_KERNEL; model.BuildModel(referenceData, tk, single, naive, base); } else if (kernelType == "hyptan") { HyperbolicTangentKernel htk(scale, offset); model.KernelType() = FastMKSModel::HYPTAN_KERNEL; model.BuildModel(referenceData, htk, single, naive, base); } } else { // Load model from file, then do whatever is necessary. data::Load(CLI::GetParam("input_model_file"), "fastmks_model", model, true); } // Set search preferences. model.Naive() = CLI::HasParam("naive"); model.SingleMode() = CLI::HasParam("single"); // Should we do search? if (CLI::HasParam("k")) { arma::mat kernels; arma::Mat indices; if (CLI::HasParam("query_file")) { const string queryFile = CLI::GetParam("query_file"); const double base = CLI::GetParam("base"); arma::mat queryData; data::Load(queryFile, queryData, true); Log::Info << "Loaded query data from '" << queryFile << "' (" << queryData.n_rows << " x " << queryData.n_cols << ")." << endl; model.Search(queryData, (size_t) CLI::GetParam("k"), indices, kernels, base); } else { model.Search((size_t) CLI::GetParam("k"), indices, kernels); } // Save output, if we were asked to. if (CLI::HasParam("kernels_file")) { const string kernelsFile = CLI::GetParam("kernels_file"); data::Save(kernelsFile, kernels, false); } if (CLI::HasParam("indices_file")) { const string indicesFile = CLI::GetParam("indices_file"); data::Save(indicesFile, indices, false); } } // Save the model, if requested. if (CLI::HasParam("output_model_file")) data::Save(CLI::GetParam("output_model_file"), "fastmks_model", model); } mlpack-2.2.5/src/mlpack/methods/fastmks/fastmks_model.cpp000066400000000000000000000117651315013601400235000ustar00rootroot00000000000000/** * @file fastmks_model.cpp * @author Ryan Curtin * * Implementation of non-templatized functions of FastMKSModel. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include "fastmks_model.hpp" namespace mlpack { namespace fastmks { FastMKSModel::FastMKSModel(const int kernelType) : kernelType(kernelType), linear(NULL), polynomial(NULL), cosine(NULL), gaussian(NULL), epan(NULL), triangular(NULL), hyptan(NULL) { // Nothing to do. } FastMKSModel::~FastMKSModel() { // Clean memory. if (linear) delete linear; if (polynomial) delete polynomial; if (cosine) delete cosine; if (gaussian) delete gaussian; if (epan) delete epan; if (triangular) delete triangular; if (hyptan) delete hyptan; } bool FastMKSModel::Naive() const { switch (kernelType) { case LINEAR_KERNEL: return linear->Naive(); case POLYNOMIAL_KERNEL: return polynomial->Naive(); case COSINE_DISTANCE: return cosine->Naive(); case GAUSSIAN_KERNEL: return gaussian->Naive(); case EPANECHNIKOV_KERNEL: return epan->Naive(); case TRIANGULAR_KERNEL: return triangular->Naive(); case HYPTAN_KERNEL: return hyptan->Naive(); } throw std::runtime_error("invalid model type"); } bool& FastMKSModel::Naive() { switch (kernelType) { case LINEAR_KERNEL: return linear->Naive(); case POLYNOMIAL_KERNEL: return polynomial->Naive(); case COSINE_DISTANCE: return cosine->Naive(); case GAUSSIAN_KERNEL: return gaussian->Naive(); case EPANECHNIKOV_KERNEL: return epan->Naive(); case TRIANGULAR_KERNEL: return triangular->Naive(); case HYPTAN_KERNEL: return hyptan->Naive(); } throw std::runtime_error("invalid model type"); } bool FastMKSModel::SingleMode() const { switch (kernelType) { case LINEAR_KERNEL: return linear->SingleMode(); case POLYNOMIAL_KERNEL: return polynomial->SingleMode(); case COSINE_DISTANCE: return cosine->SingleMode(); case GAUSSIAN_KERNEL: return gaussian->SingleMode(); case EPANECHNIKOV_KERNEL: return epan->SingleMode(); case TRIANGULAR_KERNEL: return triangular->SingleMode(); case HYPTAN_KERNEL: return hyptan->SingleMode(); } throw std::runtime_error("invalid model type"); } bool& FastMKSModel::SingleMode() { switch (kernelType) { case LINEAR_KERNEL: return linear->SingleMode(); case POLYNOMIAL_KERNEL: return polynomial->SingleMode(); case COSINE_DISTANCE: return cosine->SingleMode(); case GAUSSIAN_KERNEL: return gaussian->SingleMode(); case EPANECHNIKOV_KERNEL: return epan->SingleMode(); case TRIANGULAR_KERNEL: return triangular->SingleMode(); case HYPTAN_KERNEL: return hyptan->SingleMode(); } throw std::runtime_error("invalid model type"); } void FastMKSModel::Search(const arma::mat& querySet, const size_t k, arma::Mat& indices, arma::mat& kernels, const double base) { switch (kernelType) { case LINEAR_KERNEL: Search(*linear, querySet, k, indices, kernels, base); break; case POLYNOMIAL_KERNEL: Search(*polynomial, querySet, k, indices, kernels, base); break; case COSINE_DISTANCE: Search(*cosine, querySet, k, indices, kernels, base); break; case GAUSSIAN_KERNEL: Search(*gaussian, querySet, k, indices, kernels, base); break; case EPANECHNIKOV_KERNEL: Search(*epan, querySet, k, indices, kernels, base); break; case TRIANGULAR_KERNEL: Search(*triangular, querySet, k, indices, kernels, base); break; case HYPTAN_KERNEL: Search(*hyptan, querySet, k, indices, kernels, base); break; default: throw std::runtime_error("invalid model type"); } } void FastMKSModel::Search(const size_t k, arma::Mat& indices, arma::mat& kernels) { switch (kernelType) { case LINEAR_KERNEL: linear->Search(k, indices, kernels); break; case POLYNOMIAL_KERNEL: polynomial->Search(k, indices, kernels); break; case COSINE_DISTANCE: cosine->Search(k, indices, kernels); break; case GAUSSIAN_KERNEL: gaussian->Search(k, indices, kernels); break; case EPANECHNIKOV_KERNEL: epan->Search(k, indices, kernels); break; case TRIANGULAR_KERNEL: triangular->Search(k, indices, kernels); break; case HYPTAN_KERNEL: hyptan->Search(k, indices, kernels); break; default: throw std::invalid_argument("invalid model type"); } } } // namespace fastmks } // namespace mlpack mlpack-2.2.5/src/mlpack/methods/fastmks/fastmks_model.hpp000066400000000000000000000117261315013601400235020ustar00rootroot00000000000000/** * @file fastmks_model.hpp * @author Ryan Curtin * * A utility struct to contain all the possible FastMKS models. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_FASTMKS_FASTMKS_MODEL_HPP #define MLPACK_METHODS_FASTMKS_FASTMKS_MODEL_HPP #include #include "fastmks.hpp" #include #include #include #include #include #include #include #include #include #include #include namespace mlpack { namespace fastmks { //! A utility struct to contain all the possible FastMKS models, for use by the //! mlpack_fastmks program. class FastMKSModel { public: //! A list of all the kernels we support. enum KernelTypes { LINEAR_KERNEL, POLYNOMIAL_KERNEL, COSINE_DISTANCE, GAUSSIAN_KERNEL, EPANECHNIKOV_KERNEL, TRIANGULAR_KERNEL, HYPTAN_KERNEL }; /** * Create the FastMKSModel with the given kernel type. */ FastMKSModel(const int kernelType = LINEAR_KERNEL); /** * Clean memory. */ ~FastMKSModel(); /** * Build the model on the given reference set. Make sure kernelType is equal * to the correct entry in KernelTypes for the given KernelType class! */ template void BuildModel(const arma::mat& referenceData, TKernelType& kernel, const bool singleMode, const bool naive, const double base); //! Get whether or not naive search is used. bool Naive() const; //! Set whether or not naive search is used. bool& Naive(); //! Get whether or not single-tree search is used. bool SingleMode() const; //! Set whether or not single-tree search is used. bool& SingleMode(); //! Get the kernel type. int KernelType() const { return kernelType; } //! Modify the kernel type. int& KernelType() { return kernelType; } /** * Search with a different query set. * * @param querySet Set to search with. * @param k Number of max-kernel candidates to search for. * @param indices A matrix in which to store the indices of max-kernel * candidates. * @param kernels A matrix in which to store the max-kernel candidate kernel * values. * @param base Base to use for cover tree building (if in dual-tree search * mode). */ void Search(const arma::mat& querySet, const size_t k, arma::Mat& indices, arma::mat& kernels, const double base); /** * Search with the reference set as the query set. * * @param k Number of max-kernel candidates to search for. * @param indices A matrix in which to store the indices of max-kernel * candidates. * @param kernels A matrix in which to store the max-kernel candidate kernel * values. */ void Search(const size_t k, arma::Mat& indices, arma::mat& kernels); /** * Serialize the model. */ template void Serialize(Archive& ar, const unsigned int /* version */); private: //! The type of kernel we are using. int kernelType; //! This will only be non-NULL if this is the type of kernel we are using. FastMKS* linear; //! This will only be non-NULL if this is the type of kernel we are using. FastMKS* polynomial; //! This will only be non-NULL if this is the type of kernel we are using. FastMKS* cosine; //! This will only be non-NULL if this is the type of kernel we are using. FastMKS* gaussian; //! This will only be non-NULL if this is the type of kernel we are using. FastMKS* epan; //! This will only be non-NULL if this is the type of kernel we are using. FastMKS* triangular; //! This will only be non-NULL if this is the type of kernel we are using. FastMKS* hyptan; //! Build a query tree and execute the search. template void Search(FastMKSType& f, const arma::mat& querySet, const size_t k, arma::Mat& indices, arma::mat& kernels, const double base); }; } // namespace fastmks } // namespace mlpack #include "fastmks_model_impl.hpp" #endif mlpack-2.2.5/src/mlpack/methods/fastmks/fastmks_model_impl.hpp000066400000000000000000000132021315013601400245120ustar00rootroot00000000000000/** * @file fastmks_model_impl.hpp * @author Ryan Curtin * * Implementation of templated functions of FastMKSModel. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_FASTMKS_FASTMKS_MODEL_IMPL_HPP #define MLPACK_METHODS_FASTMKS_FASTMKS_MODEL_IMPL_HPP #include "fastmks_model.hpp" namespace mlpack { namespace fastmks { //! This is called when the KernelType is the same as the model. template void BuildFastMKSModel(FastMKS& f, KernelType& k, const arma::mat& referenceData, const double base) { // Do we need to build the tree? if (f.Naive()) { f.Train(referenceData, k); } else { // Create the tree with the specified base. Timer::Start("tree_building"); metric::IPMetric metric(k); typename FastMKS::Tree* tree = new typename FastMKS::Tree(referenceData, metric, base); Timer::Stop("tree_building"); f.Train(tree); } } //! This is only called when something goes wrong. template void BuildFastMKSModel(FastMKSType& /* f */, KernelType& /* k */, const arma::mat& /* referenceData */, const double /* base */) { throw std::invalid_argument("FastMKSModel::BuildModel(): given kernel type is" " not equal to kernel type of the model!"); } template void FastMKSModel::BuildModel(const arma::mat& referenceData, TKernelType& kernel, const bool singleMode, const bool naive, const double base) { // Clean memory if necessary. if (linear) delete linear; if (polynomial) delete polynomial; if (cosine) delete cosine; if (gaussian) delete gaussian; if (epan) delete epan; if (triangular) delete triangular; if (hyptan) delete hyptan; linear = NULL; polynomial = NULL; cosine = NULL; gaussian = NULL; epan = NULL; triangular = NULL; hyptan = NULL; // Instantiate the right model. switch (kernelType) { case LINEAR_KERNEL: linear = new FastMKS(singleMode, naive); BuildFastMKSModel(*linear, kernel, referenceData, base); break; case POLYNOMIAL_KERNEL: polynomial = new FastMKS(singleMode, naive); BuildFastMKSModel(*polynomial, kernel, referenceData, base); break; case COSINE_DISTANCE: cosine = new FastMKS(singleMode, naive); BuildFastMKSModel(*cosine, kernel, referenceData, base); break; case GAUSSIAN_KERNEL: gaussian = new FastMKS(singleMode, naive); BuildFastMKSModel(*gaussian, kernel, referenceData, base); break; case EPANECHNIKOV_KERNEL: epan = new FastMKS(singleMode, naive); BuildFastMKSModel(*epan, kernel, referenceData, base); break; case TRIANGULAR_KERNEL: triangular = new FastMKS(singleMode, naive); BuildFastMKSModel(*triangular, kernel, referenceData, base); break; case HYPTAN_KERNEL: hyptan = new FastMKS(singleMode, naive); BuildFastMKSModel(*hyptan, kernel, referenceData, base); break; } } template void FastMKSModel::Serialize(Archive& ar, const unsigned int /* version */) { using data::CreateNVP; ar & CreateNVP(kernelType, "kernelType"); if (Archive::is_loading::value) { // Clean memory. if (linear) delete linear; if (polynomial) delete polynomial; if (cosine) delete cosine; if (gaussian) delete gaussian; if (epan) delete epan; if (triangular) delete triangular; if (hyptan) delete hyptan; linear = NULL; polynomial = NULL; cosine = NULL; gaussian = NULL; epan = NULL; triangular = NULL; hyptan = NULL; } // Serialize the correct model. switch (kernelType) { case LINEAR_KERNEL: ar & CreateNVP(linear, "linear_fastmks"); break; case POLYNOMIAL_KERNEL: ar & CreateNVP(polynomial, "polynomial_fastmks"); break; case COSINE_DISTANCE: ar & CreateNVP(cosine, "cosine_fastmks"); break; case GAUSSIAN_KERNEL: ar & CreateNVP(gaussian, "gaussian_fastmks"); break; case EPANECHNIKOV_KERNEL: ar & CreateNVP(epan, "epan_fastmks"); break; case TRIANGULAR_KERNEL: ar & CreateNVP(triangular, "triangular_fastmks"); break; case HYPTAN_KERNEL: ar & CreateNVP(hyptan, "hyptan_fastmks"); break; } } template void FastMKSModel::Search(FastMKSType& f, const arma::mat& querySet, const size_t k, arma::Mat& indices, arma::mat& kernels, const double base) { if (f.Naive() || f.SingleMode()) { f.Search(querySet, k, indices, kernels); } else { Timer::Start("tree_building"); typename FastMKSType::Tree queryTree(querySet, base); Timer::Stop("tree_building"); f.Search(&queryTree, k, indices, kernels); } } } // namespace fastmks } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/methods/fastmks/fastmks_rules.hpp000066400000000000000000000161411315013601400235300ustar00rootroot00000000000000/** * @file fastmks_rules.hpp * @author Ryan Curtin * * Rules for the single or dual tree traversal for fast max-kernel search. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_FASTMKS_FASTMKS_RULES_HPP #define MLPACK_METHODS_FASTMKS_FASTMKS_RULES_HPP #include #include #include #include #include namespace mlpack { namespace fastmks { /** * The FastMKSRules class is a template helper class used by FastMKS class when * performing exact max-kernel search. For each point in the query dataset, it * keeps track of the k best candidates in the reference dataset. * * @tparam KernelType Type of kernel to run FastMKS with. * @tparam TreeType Type of tree to run FastMKS with; it must satisfy the * TreeType policy API. */ template class FastMKSRules { public: /** * Construct the FastMKSRules object. This is usually done from within the * FastMKS class at search time. * * @param referenceSet Set of reference data. * @param querySet Set of query data. * @param k Number of candidates to search for. * @param kernel Kernel to run FastMKS with. */ FastMKSRules(const typename TreeType::Mat& referenceSet, const typename TreeType::Mat& querySet, const size_t k, KernelType& kernel); /** * Store the list of candidates for each query point in the given matrices. * * @param indices Matrix storing lists of candidate for each query point. * @param products Matrix storing kernel value for each candidate. */ void GetResults(arma::Mat& indices, arma::mat& products); //! Compute the base case (kernel value) between two points. double BaseCase(const size_t queryIndex, const size_t referenceIndex); /** * Get the score for recursion order. A low score indicates priority for * recursion, while DBL_MAX indicates that the node should not be recursed * into at all (it should be pruned). * * @param queryIndex Index of query point. * @param referenceNode Candidate to be recursed into. */ double Score(const size_t queryIndex, TreeType& referenceNode); /** * Get the score for recursion order. A low score indicates priority for * recursion, while DBL_MAX indicates that the node should not be recursed * into at all (it should be pruned). * * @param queryNode Candidate query node to be recursed into. * @param referenceNode Candidate reference node to be recursed into. */ double Score(TreeType& queryNode, TreeType& referenceNode); /** * Re-evaluate the score for recursion order. A low score indicates priority * for recursion, while DBL_MAX indicates that a node should not be recursed * into at all (it should be pruned). This is used when the score has already * been calculated, but another recursion may have modified the bounds for * pruning. So the old score is checked against the new pruning bound. * * @param queryIndex Index of query point. * @param referenceNode Candidate node to be recursed into. * @param oldScore Old score produced by Score() (or Rescore()). */ double Rescore(const size_t queryIndex, TreeType& referenceNode, const double oldScore) const; /** * Re-evaluate the score for recursion order. A low score indicates priority * for recursion, while DBL_MAX indicates that a node should not be recursed * into at all (it should be pruned). This is used when the score has already * been calculated, but another recursion may have modified the bounds for * pruning. So the old score is checked against the new pruning bound. * * @param queryNode Candidate query node to be recursed into. * @param referenceNode Candidate reference node to be recursed into. * @param oldScore Old score produced by Score() (or Rescore()). */ double Rescore(TreeType& queryNode, TreeType& referenceNode, const double oldScore) const; //! Get the number of times BaseCase() was called. size_t BaseCases() const { return baseCases; } //! Modify the number of times BaseCase() was called. size_t& BaseCases() { return baseCases; } //! Get the number of times Score() was called. size_t Scores() const { return scores; } //! Modify the number of times Score() was called. size_t& Scores() { return scores; } typedef typename tree::TraversalInfo TraversalInfoType; const TraversalInfoType& TraversalInfo() const { return traversalInfo; } TraversalInfoType& TraversalInfo() { return traversalInfo; } private: //! The reference dataset. const typename TreeType::Mat& referenceSet; //! The query dataset. const typename TreeType::Mat& querySet; //! Candidate represents a possible candidate point (value, index). typedef std::pair Candidate; //! Compare two candidates based on the value. struct CandidateCmp { bool operator()(const Candidate& c1, const Candidate& c2) const { return c1.first > c2.first; }; }; //! Use a min heap to represent the list of candidate points. //! We will use a boost::heap::priority_queue instead of a std::priority_queue //! because we need to iterate over all the candidates and std::priority_queue //! doesn't provide that interface. typedef boost::heap::priority_queue> CandidateList; //! Set of candidates for each point. std::vector candidates; //! Number of points to search for. const size_t k; //! Cached query set self-kernels (|| q || for each q). arma::vec queryKernels; //! Cached reference set self-kernels (|| r || for each r). arma::vec referenceKernels; //! The instantiated kernel. KernelType& kernel; //! The last query index BaseCase() was called with. size_t lastQueryIndex; //! The last reference index BaseCase() was called with. size_t lastReferenceIndex; //! The last kernel evaluation resulting from BaseCase(). double lastKernel; //! Calculate the bound for a given query node. double CalculateBound(TreeType& queryNode) const; /** * Helper function to insert a point into the list of candidate points. * * @param queryIndex Index of point whose neighbors we are inserting into. * @param index Index of reference point which is being inserted. * @param product Kernel value for given candidate. */ void InsertNeighbor(const size_t queryIndex, const size_t index, const double product); //! For benchmarking. size_t baseCases; //! For benchmarking. size_t scores; TraversalInfoType traversalInfo; }; } // namespace fastmks } // namespace mlpack // Include implementation. #include "fastmks_rules_impl.hpp" #endif mlpack-2.2.5/src/mlpack/methods/fastmks/fastmks_rules_impl.hpp000066400000000000000000000455301315013601400245550ustar00rootroot00000000000000/** * @file fastmks_rules_impl.hpp * @author Ryan Curtin * * Implementation of FastMKSRules for cover tree search. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_FASTMKS_FASTMKS_RULES_IMPL_HPP #define MLPACK_METHODS_FASTMKS_FASTMKS_RULES_IMPL_HPP // In case it hasn't already been included. #include "fastmks_rules.hpp" namespace mlpack { namespace fastmks { template FastMKSRules::FastMKSRules( const typename TreeType::Mat& referenceSet, const typename TreeType::Mat& querySet, const size_t k, KernelType& kernel) : referenceSet(referenceSet), querySet(querySet), k(k), kernel(kernel), lastQueryIndex(-1), lastReferenceIndex(-1), lastKernel(0.0), baseCases(0), scores(0) { // Precompute each self-kernel. queryKernels.set_size(querySet.n_cols); for (size_t i = 0; i < querySet.n_cols; ++i) queryKernels[i] = sqrt(kernel.Evaluate(querySet.col(i), querySet.col(i))); referenceKernels.set_size(referenceSet.n_cols); for (size_t i = 0; i < referenceSet.n_cols; ++i) referenceKernels[i] = sqrt(kernel.Evaluate(referenceSet.col(i), referenceSet.col(i))); // Set to invalid memory, so that the first node combination does not try to // dereference null pointers. traversalInfo.LastQueryNode() = (TreeType*) this; traversalInfo.LastReferenceNode() = (TreeType*) this; // Let's build the list of candidate points for each query point. // It will be initialized with k candidates: (-DBL_MAX, size_t() - 1) // The list of candidates will be updated when visiting new points with the // BaseCase() method. const Candidate def = std::make_pair(-DBL_MAX, size_t() - 1); CandidateList pqueue; pqueue.reserve(k); for (size_t i = 0; i < k; i++) pqueue.push(def); std::vector tmp(querySet.n_cols, pqueue); candidates.swap(tmp); } template void FastMKSRules::GetResults( arma::Mat& indices, arma::mat& products) { indices.set_size(k, querySet.n_cols); products.set_size(k, querySet.n_cols); for (size_t i = 0; i < querySet.n_cols; i++) { CandidateList& pqueue = candidates[i]; for (size_t j = 1; j <= k; j++) { indices(k - j, i) = pqueue.top().second; products(k - j, i) = pqueue.top().first; pqueue.pop(); } } } template inline force_inline double FastMKSRules::BaseCase( const size_t queryIndex, const size_t referenceIndex) { // Score() always happens before BaseCase() for a given node combination. For // cover trees, the kernel evaluation between the two centroid points already // happened. So we don't need to do it. Note that this optimizes out if the // first conditional is false (its result is known at compile time). if (tree::TreeTraits::FirstPointIsCentroid) { if ((queryIndex == lastQueryIndex) && (referenceIndex == lastReferenceIndex)) return lastKernel; // Store new values. lastQueryIndex = queryIndex; lastReferenceIndex = referenceIndex; } ++baseCases; double kernelEval = kernel.Evaluate(querySet.col(queryIndex), referenceSet.col(referenceIndex)); // Update the last kernel value, if we need to. if (tree::TreeTraits::FirstPointIsCentroid) lastKernel = kernelEval; // If the reference and query sets are identical, we still need to compute the // base case (so that things can be bounded properly), but we won't add it to // the results. if ((&querySet == &referenceSet) && (queryIndex == referenceIndex)) return kernelEval; InsertNeighbor(queryIndex, referenceIndex, kernelEval); return kernelEval; } template double FastMKSRules::Score(const size_t queryIndex, TreeType& referenceNode) { // Compare with the current best. const double bestKernel = candidates[queryIndex].top().first; // See if we can perform a parent-child prune. const double furthestDist = referenceNode.FurthestDescendantDistance(); if (referenceNode.Parent() != NULL) { double maxKernelBound; const double parentDist = referenceNode.ParentDistance(); const double combinedDistBound = parentDist + furthestDist; const double lastKernel = referenceNode.Parent()->Stat().LastKernel(); if (kernel::KernelTraits::IsNormalized) { const double squaredDist = std::pow(combinedDistBound, 2.0); const double delta = (1 - 0.5 * squaredDist); if (lastKernel <= delta) { const double gamma = combinedDistBound * sqrt(1 - 0.25 * squaredDist); maxKernelBound = lastKernel * delta + gamma * sqrt(1 - std::pow(lastKernel, 2.0)); } else { maxKernelBound = 1.0; } } else { maxKernelBound = lastKernel + combinedDistBound * queryKernels[queryIndex]; } if (maxKernelBound < bestKernel) return DBL_MAX; } // Calculate the maximum possible kernel value, either by calculating the // centroid or, if the centroid is a point, use that. ++scores; double kernelEval; if (tree::TreeTraits::FirstPointIsCentroid) { // Could it be that this kernel evaluation has already been calculated? if (tree::TreeTraits::HasSelfChildren && referenceNode.Parent() != NULL && referenceNode.Point(0) == referenceNode.Parent()->Point(0)) { kernelEval = referenceNode.Parent()->Stat().LastKernel(); } else { kernelEval = BaseCase(queryIndex, referenceNode.Point(0)); } } else { arma::vec refCenter; referenceNode.Center(refCenter); kernelEval = kernel.Evaluate(querySet.col(queryIndex), refCenter); } referenceNode.Stat().LastKernel() = kernelEval; double maxKernel; if (kernel::KernelTraits::IsNormalized) { const double squaredDist = std::pow(furthestDist, 2.0); const double delta = (1 - 0.5 * squaredDist); if (kernelEval <= delta) { const double gamma = furthestDist * sqrt(1 - 0.25 * squaredDist); maxKernel = kernelEval * delta + gamma * sqrt(1 - std::pow(kernelEval, 2.0)); } else { maxKernel = 1.0; } } else { maxKernel = kernelEval + furthestDist * queryKernels[queryIndex]; } // We return the inverse of the maximum kernel so that larger kernels are // recursed into first. return (maxKernel >= bestKernel) ? (1.0 / maxKernel) : DBL_MAX; } template double FastMKSRules::Score(TreeType& queryNode, TreeType& referenceNode) { // Update and get the query node's bound. queryNode.Stat().Bound() = CalculateBound(queryNode); const double bestKernel = queryNode.Stat().Bound(); // First, see if we can make a parent-child or parent-parent prune. These // four bounds on the maximum kernel value are looser than the bound normally // used, but they can prevent a base case from needing to be calculated. // Convenience caching so lines are shorter. const double queryParentDist = queryNode.ParentDistance(); const double queryDescDist = queryNode.FurthestDescendantDistance(); const double refParentDist = referenceNode.ParentDistance(); const double refDescDist = referenceNode.FurthestDescendantDistance(); double adjustedScore = traversalInfo.LastBaseCase(); const double queryDistBound = (queryParentDist + queryDescDist); const double refDistBound = (refParentDist + refDescDist); double dualQueryTerm; double dualRefTerm; // The parent-child and parent-parent prunes work by applying the same pruning // condition as when the parent node was used, except they are tighter because // queryDistBound < queryNode.Parent()->FurthestDescendantDistance() // and // refDistBound < referenceNode.Parent()->FurthestDescendantDistance() // so we construct the same bounds that were used when Score() was called with // the parents, except with the tighter distance bounds. Sometimes this // allows us to prune nodes without evaluating the base cases between them. if (traversalInfo.LastQueryNode() == queryNode.Parent()) { // We can assume that queryNode.Parent() != NULL, because at the root node // combination, the traversalInfo.LastQueryNode() pointer will _not_ be // NULL. We also should be guaranteed that // traversalInfo.LastReferenceNode() is either the reference node or the // parent of the reference node. adjustedScore += queryDistBound * traversalInfo.LastReferenceNode()->Stat().SelfKernel(); dualQueryTerm = queryDistBound; } else { // The query parent could be NULL, which does weird things and we have to // consider. if (traversalInfo.LastReferenceNode() != NULL) { adjustedScore += queryDescDist * traversalInfo.LastReferenceNode()->Stat().SelfKernel(); dualQueryTerm = queryDescDist; } else { // This makes it so a child-parent (or parent-parent) prune is not // possible. dualQueryTerm = 0.0; adjustedScore = bestKernel; } } if (traversalInfo.LastReferenceNode() == referenceNode.Parent()) { // We can assume that referenceNode.Parent() != NULL, because at the root // node combination, the traversalInfo.LastReferenceNode() pointer will // _not_ be NULL. adjustedScore += refDistBound * traversalInfo.LastQueryNode()->Stat().SelfKernel(); dualRefTerm = refDistBound; } else { // The reference parent could be NULL, which does weird things and we have // to consider. if (traversalInfo.LastQueryNode() != NULL) { adjustedScore += refDescDist * traversalInfo.LastQueryNode()->Stat().SelfKernel(); dualRefTerm = refDescDist; } else { // This makes it so a child-parent (or parent-parent) prune is not // possible. dualRefTerm = 0.0; adjustedScore = bestKernel; } } // Now add the dual term. adjustedScore += (dualQueryTerm * dualRefTerm); if (adjustedScore < bestKernel) { // It is not possible that this node combination can contain a point // combination with kernel value better than the minimum kernel value to // improve any of the results, so we can prune it. return DBL_MAX; } // We were unable to perform a parent-child or parent-parent prune, so now we // must calculate kernel evaluation, if necessary. double kernelEval = 0.0; if (tree::TreeTraits::FirstPointIsCentroid) { // For this type of tree, we may have already calculated the base case in // the parents. if ((traversalInfo.LastQueryNode() != NULL) && (traversalInfo.LastReferenceNode() != NULL) && (traversalInfo.LastQueryNode()->Point(0) == queryNode.Point(0)) && (traversalInfo.LastReferenceNode()->Point(0) == referenceNode.Point(0))) { // Base case already done. kernelEval = traversalInfo.LastBaseCase(); // When BaseCase() is called after Score(), these must be correct so that // another kernel evaluation is not performed. lastQueryIndex = queryNode.Point(0); lastReferenceIndex = referenceNode.Point(0); } else { // The kernel must be evaluated, but it is between points in the dataset, // so we can call BaseCase(). BaseCase() will set lastQueryIndex and // lastReferenceIndex correctly. kernelEval = BaseCase(queryNode.Point(0), referenceNode.Point(0)); } traversalInfo.LastBaseCase() = kernelEval; } else { // Calculate the maximum possible kernel value. arma::vec queryCenter; arma::vec refCenter; queryNode.Center(queryCenter); referenceNode.Center(refCenter); kernelEval = kernel.Evaluate(queryCenter, refCenter); traversalInfo.LastBaseCase() = kernelEval; } ++scores; double maxKernel; if (kernel::KernelTraits::IsNormalized) { // We have a tighter bound for normalized kernels. const double querySqDist = std::pow(queryDescDist, 2.0); const double refSqDist = std::pow(refDescDist, 2.0); const double bothSqDist = std::pow((queryDescDist + refDescDist), 2.0); if (kernelEval <= (1 - 0.5 * bothSqDist)) { const double queryDelta = (1 - 0.5 * querySqDist); const double queryGamma = queryDescDist * sqrt(1 - 0.25 * querySqDist); const double refDelta = (1 - 0.5 * refSqDist); const double refGamma = refDescDist * sqrt(1 - 0.25 * refSqDist); maxKernel = kernelEval * (queryDelta * refDelta - queryGamma * refGamma) + sqrt(1 - std::pow(kernelEval, 2.0)) * (queryGamma * refDelta + queryDelta * refGamma); } else { maxKernel = 1.0; } } else { // Use standard bound; kernel is not normalized. const double refKernelTerm = queryDescDist * referenceNode.Stat().SelfKernel(); const double queryKernelTerm = refDescDist * queryNode.Stat().SelfKernel(); maxKernel = kernelEval + refKernelTerm + queryKernelTerm + (queryDescDist * refDescDist); } // Store relevant information for parent-child pruning. traversalInfo.LastQueryNode() = &queryNode; traversalInfo.LastReferenceNode() = &referenceNode; // We return the inverse of the maximum kernel so that larger kernels are // recursed into first. return (maxKernel >= bestKernel) ? (1.0 / maxKernel) : DBL_MAX; } template double FastMKSRules::Rescore(const size_t queryIndex, TreeType& /*referenceNode*/, const double oldScore) const { const double bestKernel = candidates[queryIndex].top().first; return ((1.0 / oldScore) >= bestKernel) ? oldScore : DBL_MAX; } template double FastMKSRules::Rescore(TreeType& queryNode, TreeType& /*referenceNode*/, const double oldScore) const { queryNode.Stat().Bound() = CalculateBound(queryNode); const double bestKernel = queryNode.Stat().Bound(); return ((1.0 / oldScore) >= bestKernel) ? oldScore : DBL_MAX; } /** * Calculate the bound for the given query node. This bound represents the * minimum value which a node combination must achieve to guarantee an * improvement in the results. * * @param queryNode Query node to calculate bound for. */ template double FastMKSRules::CalculateBound(TreeType& queryNode) const { // We have four possible bounds -- just like NeighborSearchRules, but they are // slightly different in this context. // // (1) min ( min_{all points p in queryNode} P_p[k], // min_{all children c in queryNode} B(c) ); // (2) max_{all points p in queryNode} P_p[k] + (worst child distance + worst // descendant distance) sqrt(K(I_p[k], I_p[k])); // (3) max_{all children c in queryNode} B(c) + <-- not done yet. ignored. // (4) B(parent of queryNode); double worstPointKernel = DBL_MAX; double bestAdjustedPointKernel = -DBL_MAX; const double queryDescendantDistance = queryNode.FurthestDescendantDistance(); // Loop over all points in this node to find the worst max-kernel value and // the best possible adjusted max-kernel value that could be held by any // descendant. for (size_t i = 0; i < queryNode.NumPoints(); ++i) { const size_t point = queryNode.Point(i); const CandidateList& candidatesPoints = candidates[point]; if (candidatesPoints.top().first < worstPointKernel) worstPointKernel = candidatesPoints.top().first; if (candidatesPoints.top().first == -DBL_MAX) continue; // Avoid underflow. // This should be (queryDescendantDistance + centroidDistance) for any tree // but it works for cover trees since centroidDistance = 0 for cover trees. // The formulation here is slightly different than in Equation 43 of // "Dual-tree fast exact max-kernel search". Because we could be searching // for k max kernels and not just one, the bound for this point must // actually be the minimum adjusted kernel of all k candidate kernels. // So, // B(N_q) = min_{1 \le j \le k} k_j^*(p_q) - // \lambda_q \sqrt(K(p_j^*(p_q), p_j^*(p_q))) // where p_j^*(p_q) is the j'th kernel candidate for query point p_q and // k_j^*(p_q) is K(p_q, p_j^*(p_q)). double worstPointCandidateKernel = DBL_MAX; typedef typename CandidateList::const_iterator iter; for (iter it = candidatesPoints.begin(); it != candidatesPoints.end(); ++it) { const double candidateKernel = it->first - queryDescendantDistance * referenceKernels[it->second]; if (candidateKernel < worstPointCandidateKernel) worstPointCandidateKernel = candidateKernel; } if (worstPointCandidateKernel > bestAdjustedPointKernel) bestAdjustedPointKernel = worstPointCandidateKernel; } // Loop over all the children in the node. double worstChildKernel = DBL_MAX; for (size_t i = 0; i < queryNode.NumChildren(); ++i) { if (queryNode.Child(i).Stat().Bound() < worstChildKernel) worstChildKernel = queryNode.Child(i).Stat().Bound(); } // Now assemble bound (1). const double firstBound = (worstPointKernel < worstChildKernel) ? worstPointKernel : worstChildKernel; // Bound (2) is bestAdjustedPointKernel. const double fourthBound = (queryNode.Parent() == NULL) ? -DBL_MAX : queryNode.Parent()->Stat().Bound(); // Pick the best of these bounds. const double interA = (firstBound > bestAdjustedPointKernel) ? firstBound : bestAdjustedPointKernel; const double interB = fourthBound; return (interA > interB) ? interA : interB; } /** * Helper function to insert a point into the list of candidate points. * * @param queryIndex Index of point whose neighbors we are inserting into. * @param index Index of reference point which is being inserted. * @param product Kernel value for given candidate. */ template inline void FastMKSRules::InsertNeighbor( const size_t queryIndex, const size_t index, const double product) { CandidateList& pqueue = candidates[queryIndex]; if (product > pqueue.top().first) { Candidate c = std::make_pair(product, index); pqueue.pop(); pqueue.push(c); } } } // namespace fastmks } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/methods/fastmks/fastmks_stat.hpp000066400000000000000000000072351315013601400233550ustar00rootroot00000000000000/** * @file fastmks_stat.hpp * @author Ryan Curtin * * The statistic used in trees with FastMKS. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_FASTMKS_FASTMKS_STAT_HPP #define MLPACK_METHODS_FASTMKS_FASTMKS_STAT_HPP #include #include namespace mlpack { namespace fastmks { /** * The statistic used in trees with FastMKS. This stores both the bound and the * self-kernels for each node in the tree. */ class FastMKSStat { public: /** * Default initialization. */ FastMKSStat() : bound(-DBL_MAX), selfKernel(0.0), lastKernel(0.0), lastKernelNode(NULL) { } /** * Initialize this statistic for the given tree node. The TreeType's metric * better be IPMetric with some kernel type (that is, Metric().Kernel() must * exist). * * @param node Node that this statistic is built for. */ template FastMKSStat(const TreeType& node) : bound(-DBL_MAX), lastKernel(0.0), lastKernelNode(NULL) { // Do we have to calculate the centroid? if (tree::TreeTraits::FirstPointIsCentroid) { // If this type of tree has self-children, then maybe the evaluation is // already done. These statistics are built bottom-up, so the child stat // should already be done. if ((tree::TreeTraits::HasSelfChildren) && (node.NumChildren() > 0) && (node.Point(0) == node.Child(0).Point(0))) { selfKernel = node.Child(0).Stat().SelfKernel(); } else { selfKernel = sqrt(node.Metric().Kernel().Evaluate( node.Dataset().col(node.Point(0)), node.Dataset().col(node.Point(0)))); } } else { // Calculate the centroid. arma::vec center; node.Center(center); selfKernel = sqrt(node.Metric().Kernel().Evaluate(center, center)); } } //! Get the self-kernel. double SelfKernel() const { return selfKernel; } //! Modify the self-kernel. double& SelfKernel() { return selfKernel; } //! Get the bound. double Bound() const { return bound; } //! Modify the bound. double& Bound() { return bound; } //! Get the last kernel evaluation. double LastKernel() const { return lastKernel; } //! Modify the last kernel evaluation. double& LastKernel() { return lastKernel; } //! Get the address of the node corresponding to the last distance evaluation. void* LastKernelNode() const { return lastKernelNode; } //! Modify the address of the node corresponding to the last distance //! evaluation. void*& LastKernelNode() { return lastKernelNode; } //! Serialize the statistic. template void Serialize(Archive& ar, const unsigned int /* version */) { ar & data::CreateNVP(bound, "bound"); ar & data::CreateNVP(selfKernel, "selfKernel"); // Void out last kernel information on load. if (Archive::is_loading::value) { lastKernel = 0.0; lastKernelNode = NULL; } } private: //! The bound for pruning. double bound; //! The self-kernel evaluation: sqrt(K(centroid, centroid)). double selfKernel; //! The last kernel evaluation. double lastKernel; //! The node corresponding to the last kernel evaluation. This has to be void //! otherwise we get recursive template arguments. void* lastKernelNode; }; } // namespace fastmks } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/methods/gmm/000077500000000000000000000000001315013601400172425ustar00rootroot00000000000000mlpack-2.2.5/src/mlpack/methods/gmm/CMakeLists.txt000066400000000000000000000012761315013601400220100ustar00rootroot00000000000000# Define the files we need to compile. # Anything not in this list will not be compiled into mlpack. set(SOURCES gmm.hpp gmm.cpp gmm_impl.hpp em_fit.hpp em_fit_impl.hpp no_constraint.hpp positive_definite_constraint.hpp diagonal_constraint.hpp eigenvalue_ratio_constraint.hpp ) # Add directory name to sources. set(DIR_SRCS) foreach(file ${SOURCES}) set(DIR_SRCS ${DIR_SRCS} ${CMAKE_CURRENT_SOURCE_DIR}/${file}) endforeach() # Append sources (with directory name) to list of all mlpack sources (used at # the parent scope). set(MLPACK_SRCS ${MLPACK_SRCS} ${DIR_SRCS} PARENT_SCOPE) add_cli_executable(gmm_train) add_cli_executable(gmm_generate) add_cli_executable(gmm_probability) mlpack-2.2.5/src/mlpack/methods/gmm/diagonal_constraint.hpp000066400000000000000000000021471315013601400240010ustar00rootroot00000000000000/** * @file diagonal_constraint.hpp * @author Ryan Curtin * * Constrain a covariance matrix to be diagonal. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_GMM_DIAGONAL_CONSTRAINT_HPP #define MLPACK_METHODS_GMM_DIAGONAL_CONSTRAINT_HPP #include namespace mlpack { namespace gmm { /** * Force a covariance matrix to be diagonal. */ class DiagonalConstraint { public: //! Force a covariance matrix to be diagonal. static void ApplyConstraint(arma::mat& covariance) { // Save the diagonal only. arma::vec diagonal = covariance.diag(); covariance = arma::diagmat(diagonal); } //! Serialize the constraint (which holds nothing, so, nothing to do). template static void Serialize(Archive& /* ar */, const unsigned int /* version */) { } }; } // namespace gmm } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/methods/gmm/eigenvalue_ratio_constraint.hpp000066400000000000000000000067521315013601400255530ustar00rootroot00000000000000/** * @file eigenvalue_ratio_constraint.hpp * @author Ryan Curtin * * Constrain a covariance matrix to have a certain ratio of eigenvalues. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_GMM_EIGENVALUE_RATIO_CONSTRAINT_HPP #define MLPACK_METHODS_GMM_EIGENVALUE_RATIO_CONSTRAINT_HPP #include namespace mlpack { namespace gmm { /** * Given a vector of eigenvalue ratios, ensure that the covariance matrix always * has those eigenvalue ratios. When you create this object, make sure that the * vector of ratios that you pass does not go out of scope, because this object * holds a reference to that vector instead of copying it. (This doesn't apply * if you are deserializing the object from a file.) */ class EigenvalueRatioConstraint { public: /** * Create the EigenvalueRatioConstraint object with the given vector of * eigenvalue ratios. These ratios are with respect to the first eigenvalue, * which is the largest eigenvalue, so the first element of the vector should * be 1. In addition, all other elements should be less than or equal to 1. */ EigenvalueRatioConstraint(const arma::vec& ratios) : // Make an alias of the ratios vector. It will never be modified here. ratios(const_cast(ratios.memptr()), ratios.n_elem, false) { // Check validity of ratios. if (std::abs(ratios[0] - 1.0) > 1e-20) Log::Fatal << "EigenvalueRatioConstraint::EigenvalueRatioConstraint(): " << "first element of ratio vector is not 1.0!" << std::endl; for (size_t i = 1; i < ratios.n_elem; ++i) { if (ratios[i] > 1.0) Log::Fatal << "EigenvalueRatioConstraint::EigenvalueRatioConstraint(): " << "element " << i << " of ratio vector is greater than 1.0!" << std::endl; if (ratios[i] < 0.0) Log::Warn << "EigenvalueRatioConstraint::EigenvalueRatioConstraint(): " << "element " << i << " of ratio vectors is negative and will " << "probably cause the covariance to be non-invertible..." << std::endl; } } /** * Apply the eigenvalue ratio constraint to the given covariance matrix. */ void ApplyConstraint(arma::mat& covariance) const { // Eigendecompose the matrix. arma::vec eigenvalues; arma::mat eigenvectors; arma::eig_sym(eigenvalues, eigenvectors, covariance); // Change the eigenvalues to what we are forcing them to be. There // shouldn't be any negative eigenvalues anyway, so it doesn't matter if we // are suddenly forcing them to be positive. If the first eigenvalue is // negative, well, there are going to be some problems later... eigenvalues = (eigenvalues[0] * ratios); // Reassemble the matrix. covariance = eigenvectors * arma::diagmat(eigenvalues) * eigenvectors.t(); } //! Serialize the constraint. template void Serialize(Archive& ar, const unsigned int /* version */) { // Strip the const for the sake of loading/saving. This is the only time it // is modified (other than the constructor). ar & data::CreateNVP(const_cast(ratios), "ratios"); } private: //! Ratios for eigenvalues. const arma::vec ratios; }; } // namespace gmm } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/methods/gmm/em_fit.hpp000066400000000000000000000163661315013601400212320ustar00rootroot00000000000000/** * @file em_fit.hpp * @author Ryan Curtin * @author Michael Fox * * Utility class to fit a GMM using the EM algorithm. Used by * GMM::Estimate<>(). * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_GMM_EM_FIT_HPP #define MLPACK_METHODS_GMM_EM_FIT_HPP #include #include // Default clustering mechanism. #include // Default covariance matrix constraint. #include "positive_definite_constraint.hpp" namespace mlpack { namespace gmm { /** * This class contains methods which can fit a GMM to observations using the EM * algorithm. It requires an initial clustering mechanism, which is by default * the KMeans algorithm. The clustering mechanism must implement the following * method: * * - void Cluster(const arma::mat& observations, * const size_t clusters, * arma::Row& assignments); * * This method should create 'clusters' clusters, and return the assignment of * each point to a cluster. */ template, typename CovarianceConstraintPolicy = PositiveDefiniteConstraint> class EMFit { public: /** * Construct the EMFit object, optionally passing an InitialClusteringType * object (just in case it needs to store state). Setting the maximum number * of iterations to 0 means that the EM algorithm will iterate until * convergence (with the given tolerance). * * The parameter forcePositive controls whether or not the covariance matrices * are checked for positive definiteness at each iteration. This could be a * time-consuming task, so, if you know your data is well-behaved, you can set * it to false and save some runtime. * * @param maxIterations Maximum number of iterations for EM. * @param tolerance Log-likelihood tolerance required for convergence. * @param forcePositive Check for positive-definiteness of each covariance * matrix at each iteration. * @param clusterer Object which will perform the initial clustering. */ EMFit(const size_t maxIterations = 300, const double tolerance = 1e-10, InitialClusteringType clusterer = InitialClusteringType(), CovarianceConstraintPolicy constraint = CovarianceConstraintPolicy()); /** * Fit the observations to a Gaussian mixture model (GMM) using the EM * algorithm. The size of the vectors (indicating the number of components) * must already be set. Optionally, if useInitialModel is set to true, then * the model given in the means, covariances, and weights parameters is used * as the initial model, instead of using the InitialClusteringType::Cluster() * option. * * @param observations List of observations to train on. * @param means Vector to store trained means in. * @param covariances Vector to store trained covariances in. * @param weights Vector to store a priori weights in. * @param useInitialModel If true, the given model is used for the initial * clustering. */ void Estimate(const arma::mat& observations, std::vector& dists, arma::vec& weights, const bool useInitialModel = false); /** * Fit the observations to a Gaussian mixture model (GMM) using the EM * algorithm, taking into account the probabilities of each point being from * this mixture. The size of the vectors (indicating the number of * components) must already be set. Optionally, if useInitialModel is set to * true, then the model given in the means, covariances, and weights * parameters is used as the initial model, instead of using the * InitialClusteringType::Cluster() option. * * @param observations List of observations to train on. * @param probabilities Probability of each point being from this model. * @param means Vector to store trained means in. * @param covariances Vector to store trained covariances in. * @param weights Vector to store a priori weights in. * @param useInitialModel If true, the given model is used for the initial * clustering. */ void Estimate(const arma::mat& observations, const arma::vec& probabilities, std::vector& dists, arma::vec& weights, const bool useInitialModel = false); //! Get the clusterer. const InitialClusteringType& Clusterer() const { return clusterer; } //! Modify the clusterer. InitialClusteringType& Clusterer() { return clusterer; } //! Get the covariance constraint policy class. const CovarianceConstraintPolicy& Constraint() const { return constraint; } //! Modify the covariance constraint policy class. CovarianceConstraintPolicy& Constraint() { return constraint; } //! Get the maximum number of iterations of the EM algorithm. size_t MaxIterations() const { return maxIterations; } //! Modify the maximum number of iterations of the EM algorithm. size_t& MaxIterations() { return maxIterations; } //! Get the tolerance for the convergence of the EM algorithm. double Tolerance() const { return tolerance; } //! Modify the tolerance for the convergence of the EM algorithm. double& Tolerance() { return tolerance; } //! Serialize the fitter. template void Serialize(Archive& ar, const unsigned int version); private: /** * Run the clusterer, and then turn the cluster assignments into Gaussians. * This is a helper function for both overloads of Estimate(). The vectors * must be already set to the number of clusters. * * @param observations List of observations. * @param means Vector to store means in. * @param covariances Vector to store covariances in. * @param weights Vector to store a priori weights in. */ void InitialClustering(const arma::mat& observations, std::vector& dists, arma::vec& weights); /** * Calculate the log-likelihood of a model. Yes, this is reimplemented in the * GMM code. Intuition suggests that the log-likelihood is not the best way * to determine if the EM algorithm has converged. * * @param data Data matrix. * @param means Vector of means. * @param covariances Vector of covariance matrices. * @param weights Vector of a priori weights. */ double LogLikelihood(const arma::mat& data, const std::vector& dists, const arma::vec& weights) const; //! Maximum iterations of EM algorithm. size_t maxIterations; //! Tolerance for convergence of EM. double tolerance; //! Object which will perform the clustering. InitialClusteringType clusterer; //! Object which applies constraints to the covariance matrix. CovarianceConstraintPolicy constraint; }; } // namespace gmm } // namespace mlpack // Include implementation. #include "em_fit_impl.hpp" #endif mlpack-2.2.5/src/mlpack/methods/gmm/em_fit_impl.hpp000066400000000000000000000251201315013601400222370ustar00rootroot00000000000000/** * @file em_fit_impl.hpp * @author Ryan Curtin * @author Michael Fox * * Implementation of EM algorithm for fitting GMMs. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_GMM_EM_FIT_IMPL_HPP #define MLPACK_METHODS_GMM_EM_FIT_IMPL_HPP // In case it hasn't been included yet. #include "em_fit.hpp" namespace mlpack { namespace gmm { //! Constructor. template EMFit::EMFit( const size_t maxIterations, const double tolerance, InitialClusteringType clusterer, CovarianceConstraintPolicy constraint) : maxIterations(maxIterations), tolerance(tolerance), clusterer(clusterer), constraint(constraint) { /* Nothing to do. */ } template void EMFit::Estimate( const arma::mat& observations, std::vector& dists, arma::vec& weights, const bool useInitialModel) { // Only perform initial clustering if the user wanted it. if (!useInitialModel) InitialClustering(observations, dists, weights); double l = LogLikelihood(observations, dists, weights); Log::Debug << "EMFit::Estimate(): initial clustering log-likelihood: " << l << std::endl; double lOld = -DBL_MAX; arma::mat condProb(observations.n_cols, dists.size()); // Iterate to update the model until no more improvement is found. size_t iteration = 1; while (std::abs(l - lOld) > tolerance && iteration != maxIterations) { Log::Info << "EMFit::Estimate(): iteration " << iteration << ", " << "log-likelihood " << l << "." << std::endl; // Calculate the conditional probabilities of choosing a particular // Gaussian given the observations and the present theta value. for (size_t i = 0; i < dists.size(); i++) { // Store conditional probabilities into condProb vector for each // Gaussian. First we make an alias of the condProb vector. arma::vec condProbAlias = condProb.unsafe_col(i); dists[i].Probability(observations, condProbAlias); condProbAlias *= weights[i]; } // Normalize row-wise. for (size_t i = 0; i < condProb.n_rows; i++) { // Avoid dividing by zero; if the probability for everything is 0, we // don't want to make it NaN. const double probSum = accu(condProb.row(i)); if (probSum != 0.0) condProb.row(i) /= probSum; } // Store the sum of the probability of each state over all the observations. arma::vec probRowSums = trans(arma::sum(condProb, 0 /* columnwise */)); // Calculate the new value of the means using the updated conditional // probabilities. for (size_t i = 0; i < dists.size(); i++) { // Don't update if there's no probability of the Gaussian having points. if (probRowSums[i] != 0) dists[i].Mean() = (observations * condProb.col(i)) / probRowSums[i]; // Calculate the new value of the covariances using the updated // conditional probabilities and the updated means. arma::mat tmp = observations - (dists[i].Mean() * arma::ones(observations.n_cols)); arma::mat tmpB = tmp % (arma::ones(observations.n_rows) * trans(condProb.col(i))); // Don't update if there's no probability of the Gaussian having points. if (probRowSums[i] != 0.0) { arma::mat covariance = (tmp * trans(tmpB)) / probRowSums[i]; // Apply covariance constraint. constraint.ApplyConstraint(covariance); dists[i].Covariance(std::move(covariance)); } } // Calculate the new values for omega using the updated conditional // probabilities. weights = probRowSums / observations.n_cols; // Update values of l; calculate new log-likelihood. lOld = l; l = LogLikelihood(observations, dists, weights); iteration++; } } template void EMFit::Estimate( const arma::mat& observations, const arma::vec& probabilities, std::vector& dists, arma::vec& weights, const bool useInitialModel) { if (!useInitialModel) InitialClustering(observations, dists, weights); double l = LogLikelihood(observations, dists, weights); Log::Debug << "EMFit::Estimate(): initial clustering log-likelihood: " << l << std::endl; double lOld = -DBL_MAX; arma::mat condProb(observations.n_cols, dists.size()); // Iterate to update the model until no more improvement is found. size_t iteration = 1; while (std::abs(l - lOld) > tolerance && iteration != maxIterations) { // Calculate the conditional probabilities of choosing a particular // Gaussian given the observations and the present theta value. for (size_t i = 0; i < dists.size(); i++) { // Store conditional probabilities into condProb vector for each // Gaussian. First we make an alias of the condProb vector. arma::vec condProbAlias = condProb.unsafe_col(i); dists[i].Probability(observations, condProbAlias); condProbAlias *= weights[i]; } // Normalize row-wise. for (size_t i = 0; i < condProb.n_rows; i++) { // Avoid dividing by zero; if the probability for everything is 0, we // don't want to make it NaN. const double probSum = accu(condProb.row(i)); if (probSum != 0.0) condProb.row(i) /= probSum; } // This will store the sum of probabilities of each state over all the // observations. arma::vec probRowSums(dists.size()); // Calculate the new value of the means using the updated conditional // probabilities. for (size_t i = 0; i < dists.size(); i++) { // Calculate the sum of probabilities of points, which is the // conditional probability of each point being from Gaussian i // multiplied by the probability of the point being from this mixture // model. probRowSums[i] = accu(condProb.col(i) % probabilities); dists[i].Mean() = (observations * (condProb.col(i) % probabilities)) / probRowSums[i]; // Calculate the new value of the covariances using the updated // conditional probabilities and the updated means. arma::mat tmp = observations - (dists[i].Mean() * arma::ones(observations.n_cols)); arma::mat tmpB = tmp % (arma::ones(observations.n_rows) * trans(condProb.col(i) % probabilities)); arma::mat cov = (tmp * trans(tmpB)) / probRowSums[i]; // Apply covariance constraint. constraint.ApplyConstraint(cov); dists[i].Covariance(std::move(cov)); } // Calculate the new values for omega using the updated conditional // probabilities. weights = probRowSums / accu(probabilities); // Update values of l; calculate new log-likelihood. lOld = l; l = LogLikelihood(observations, dists, weights); iteration++; } } template void EMFit:: InitialClustering(const arma::mat& observations, std::vector& dists, arma::vec& weights) { // Assignments from clustering. arma::Row assignments; // Run clustering algorithm. clusterer.Cluster(observations, dists.size(), assignments); std::vector means(dists.size()); std::vector covs(dists.size()); // Now calculate the means, covariances, and weights. weights.zeros(); for (size_t i = 0; i < dists.size(); ++i) { means[i].zeros(dists[i].Mean().n_elem); covs[i].zeros(dists[i].Covariance().n_rows, dists[i].Covariance().n_cols); } // From the assignments, generate our means, covariances, and weights. for (size_t i = 0; i < observations.n_cols; ++i) { const size_t cluster = assignments[i]; // Add this to the relevant mean. means[cluster] += observations.col(i); // Add this to the relevant covariance. covs[cluster] += observations.col(i) * trans(observations.col(i)); // Now add one to the weights (we will normalize). weights[cluster]++; } // Now normalize the mean and covariance. for (size_t i = 0; i < dists.size(); ++i) { means[i] /= (weights[i] > 1) ? weights[i] : 1; } for (size_t i = 0; i < observations.n_cols; ++i) { const size_t cluster = assignments[i]; const arma::vec normObs = observations.col(i) - means[cluster]; covs[cluster] += normObs * normObs.t(); } for (size_t i = 0; i < dists.size(); ++i) { covs[i] /= (weights[i] > 1) ? weights[i] : 1; // Apply constraints to covariance matrix. constraint.ApplyConstraint(covs[i]); std::swap(dists[i].Mean(), means[i]); dists[i].Covariance(std::move(covs[i])); } // Finally, normalize weights. weights /= accu(weights); } template double EMFit::LogLikelihood( const arma::mat& observations, const std::vector& dists, const arma::vec& weights) const { double logLikelihood = 0; arma::vec phis; arma::mat likelihoods(dists.size(), observations.n_cols); for (size_t i = 0; i < dists.size(); ++i) { dists[i].Probability(observations, phis); likelihoods.row(i) = weights(i) * trans(phis); } // Now sum over every point. for (size_t j = 0; j < observations.n_cols; ++j) { if (accu(likelihoods.col(j)) == 0) Log::Info << "Likelihood of point " << j << " is 0! It is probably an " << "outlier." << std::endl; logLikelihood += log(accu(likelihoods.col(j))); } return logLikelihood; } template template void EMFit::Serialize( Archive& ar, const unsigned int /* version */) { using data::CreateNVP; ar & CreateNVP(maxIterations, "maxIterations"); ar & CreateNVP(tolerance, "tolerance"); ar & CreateNVP(clusterer, "clusterer"); ar & CreateNVP(constraint, "constraint"); } } // namespace gmm } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/methods/gmm/gmm.cpp000066400000000000000000000104171315013601400205310ustar00rootroot00000000000000/** * @file gmm.cpp * @author Parikshit Ram (pram@cc.gatech.edu) * @author Ryan Curtin * @author Michael Fox * * Implementation of template-based GMM methods. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include "gmm.hpp" namespace mlpack { namespace gmm { /** * Create a GMM with the given number of Gaussians, each of which have the * specified dimensionality. The means and covariances will be set to 0. * * @param gaussians Number of Gaussians in this GMM. * @param dimensionality Dimensionality of each Gaussian. */ GMM::GMM(const size_t gaussians, const size_t dimensionality) : gaussians(gaussians), dimensionality(dimensionality), dists(gaussians, distribution::GaussianDistribution(dimensionality)), weights(gaussians) { // Set equal weights. Technically this model is still valid, but only barely. weights.fill(1.0 / gaussians); } // Copy constructor for when the other GMM uses the same fitting type. GMM::GMM(const GMM& other) : gaussians(other.Gaussians()), dimensionality(other.dimensionality), dists(other.dists), weights(other.weights) { /* Nothing to do. */ } GMM& GMM::operator=(const GMM& other) { gaussians = other.gaussians; dimensionality = other.dimensionality; dists = other.dists; weights = other.weights; return *this; } /** * Return the probability of the given observation being from this GMM. */ double GMM::Probability(const arma::vec& observation) const { // Sum the probability for each Gaussian in our mixture (and we have to // multiply by the prior for each Gaussian too). double sum = 0; for (size_t i = 0; i < gaussians; i++) sum += weights[i] * dists[i].Probability(observation); return sum; } /** * Return the probability of the given observation being from the given * component in the mixture. */ double GMM::Probability(const arma::vec& observation, const size_t component) const { // We are only considering one Gaussian component -- so we only need to call // Probability() once. We do consider the prior probability! return weights[component] * dists[component].Probability(observation); } /** * Return a randomly generated observation according to the probability * distribution defined by this object. */ arma::vec GMM::Random() const { // Determine which Gaussian it will be coming from. double gaussRand = math::Random(); size_t gaussian = 0; double sumProb = 0; for (size_t g = 0; g < gaussians; g++) { sumProb += weights(g); if (gaussRand <= sumProb) { gaussian = g; break; } } return trans(chol(dists[gaussian].Covariance())) * arma::randn(dimensionality) + dists[gaussian].Mean(); } /** * Classify the given observations as being from an individual component in this * GMM. */ void GMM::Classify(const arma::mat& observations, arma::Row& labels) const { // This is not the best way to do this! // We should not have to fill this with values, because each one should be // overwritten. labels.set_size(observations.n_cols); for (size_t i = 0; i < observations.n_cols; ++i) { // Find maximum probability component. double probability = 0; for (size_t j = 0; j < gaussians; ++j) { double newProb = Probability(observations.unsafe_col(i), j); if (newProb >= probability) { probability = newProb; labels[i] = j; } } } } /** * Get the log-likelihood of this data's fit to the model. */ double GMM::LogLikelihood( const arma::mat& data, const std::vector& distsL, const arma::vec& weightsL) const { double loglikelihood = 0; arma::vec phis; arma::mat likelihoods(gaussians, data.n_cols); for (size_t i = 0; i < gaussians; i++) { distsL[i].Probability(data, phis); likelihoods.row(i) = weightsL(i) * trans(phis); } // Now sum over every point. for (size_t j = 0; j < data.n_cols; j++) loglikelihood += log(accu(likelihoods.col(j))); return loglikelihood; } } // namespace gmm } // namespace mlpack mlpack-2.2.5/src/mlpack/methods/gmm/gmm.hpp000066400000000000000000000247151315013601400205440ustar00rootroot00000000000000/** * @author Parikshit Ram (pram@cc.gatech.edu) * @author Michael Fox * @file gmm.hpp * * Defines a Gaussian Mixture model and * estimates the parameters of the model * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_MOG_MOG_EM_HPP #define MLPACK_METHODS_MOG_MOG_EM_HPP #include // This is the default fitting method class. #include "em_fit.hpp" namespace mlpack { namespace gmm /** Gaussian Mixture Models. */ { /** * A Gaussian Mixture Model (GMM). This class uses maximum likelihood loss * functions to estimate the parameters of the GMM on a given dataset via the * given fitting mechanism, defined by the FittingType template parameter. The * GMM can be trained using normal data, or data with probabilities of being * from this GMM (see GMM::Train() for more information). * * The Train() method uses a template type 'FittingType'. The FittingType * template class must provide a way for the GMM to train on data. It must * provide the following two functions: * * @code * void Estimate(const arma::mat& observations, * std::vector& dists, * arma::vec& weights); * * void Estimate(const arma::mat& observations, * const arma::vec& probabilities, * std::vector& dists, * arma::vec& weights); * @endcode * * These functions should produce a trained GMM from the given observations and * probabilities. These may modify the size of the model (by increasing the * size of the mean and covariance vectors as well as the weight vectors), but * the method should expect that these vectors are already set to the size of * the GMM as specified in the constructor. * * For a sample implementation, see the EMFit class; this class uses the EM * algorithm to train a GMM, and is the default fitting type for the Train() * method. * * The GMM, once trained, can be used to generate random points from the * distribution and estimate the probability of points being from the * distribution. The parameters of the GMM can be obtained through the * accessors and mutators. * * Example use: * * @code * // Set up a mixture of 5 gaussians in a 4-dimensional space. * GMM g(5, 4); * * // Train the GMM given the data observations, using the default EM fitting * // mechanism. * g.Train(data); * * // Get the probability of 'observation' being observed from this GMM. * double probability = g.Probability(observation); * * // Get a random observation from the GMM. * arma::vec observation = g.Random(); * @endcode */ class GMM { private: //! The number of Gaussians in the model. size_t gaussians; //! The dimensionality of the model. size_t dimensionality; //! Vector of Gaussians std::vector dists; //! Vector of a priori weights for each Gaussian. arma::vec weights; public: /** * Create an empty Gaussian Mixture Model, with zero gaussians. */ GMM() : gaussians(0), dimensionality(0) { // Warn the user. They probably don't want to do this. If this constructor // is being used (because it is required by some template classes), the user // should know that it is potentially dangerous. Log::Debug << "GMM::GMM(): no parameters given; Estimate() may fail " << "unless parameters are set." << std::endl; } /** * Create a GMM with the given number of Gaussians, each of which have the * specified dimensionality. The means and covariances will be set to 0. * * @param gaussians Number of Gaussians in this GMM. * @param dimensionality Dimensionality of each Gaussian. */ GMM(const size_t gaussians, const size_t dimensionality); /** * Create a GMM with the given dists and weights. * * @param dists Distributions of the model. * @param weights Weights of the model. */ GMM(const std::vector & dists, const arma::vec& weights) : gaussians(dists.size()), dimensionality((!dists.empty()) ? dists[0].Mean().n_elem : 0), dists(dists), weights(weights) { /* Nothing to do. */ } //! Copy constructor for GMMs. GMM(const GMM& other); //! Copy operator for GMMs. GMM& operator=(const GMM& other); //! Return the number of gaussians in the model. size_t Gaussians() const { return gaussians; } //! Return the dimensionality of the model. size_t Dimensionality() const { return dimensionality; } /** * Return a const reference to a component distribution. * * @param i index of component. */ const distribution::GaussianDistribution& Component(size_t i) const { return dists[i]; } /** * Return a reference to a component distribution. * * @param i index of component. */ distribution::GaussianDistribution& Component(size_t i) { return dists[i]; } //! Return a const reference to the a priori weights of each Gaussian. const arma::vec& Weights() const { return weights; } //! Return a reference to the a priori weights of each Gaussian. arma::vec& Weights() { return weights; } /** * Return the probability that the given observation came from this * distribution. * * @param observation Observation to evaluate the probability of. */ double Probability(const arma::vec& observation) const; /** * Return the probability that the given observation came from the given * Gaussian component in this distribution. * * @param observation Observation to evaluate the probability of. * @param component Index of the component of the GMM to be considered. */ double Probability(const arma::vec& observation, const size_t component) const; /** * Return a randomly generated observation according to the probability * distribution defined by this object. * * @return Random observation from this GMM. */ arma::vec Random() const; /** * Estimate the probability distribution directly from the given observations, * using the given algorithm in the FittingType class to fit the data. * * The fitting will be performed 'trials' times; from these trials, the model * with the greatest log-likelihood will be selected. By default, only one * trial is performed. The log-likelihood of the best fitting is returned. * * Optionally, the existing model can be used as an initial model for the * estimation by setting 'useExistingModel' to true. If the fitting procedure * is deterministic after the initial position is given, then 'trials' should * be set to 1. * * @tparam FittingType The type of fitting method which should be used * (EMFit<> is suggested). * @param observations Observations of the model. * @param trials Number of trials to perform; the model in these trials with * the greatest log-likelihood will be selected. * @param useExistingModel If true, the existing model is used as an initial * model for the estimation. * @return The log-likelihood of the best fit. */ template> double Train(const arma::mat& observations, const size_t trials = 1, const bool useExistingModel = false, FittingType fitter = FittingType()); /** * Estimate the probability distribution directly from the given observations, * taking into account the probability of each observation actually being from * this distribution, and using the given algorithm in the FittingType class * to fit the data. * * The fitting will be performed 'trials' times; from these trials, the model * with the greatest log-likelihood will be selected. By default, only one * trial is performed. The log-likelihood of the best fitting is returned. * * Optionally, the existing model can be used as an initial model for the * estimation by setting 'useExistingModel' to true. If the fitting procedure * is deterministic after the initial position is given, then 'trials' should * be set to 1. * * @param observations Observations of the model. * @param probabilities Probability of each observation being from this * distribution. * @param trials Number of trials to perform; the model in these trials with * the greatest log-likelihood will be selected. * @param useExistingModel If true, the existing model is used as an initial * model for the estimation. * @return The log-likelihood of the best fit. */ template> double Train(const arma::mat& observations, const arma::vec& probabilities, const size_t trials = 1, const bool useExistingModel = false, FittingType fitter = FittingType()); /** * Classify the given observations as being from an individual component in * this GMM. The resultant classifications are stored in the 'labels' object, * and each label will be between 0 and (Gaussians() - 1). Supposing that a * point was classified with label 2, and that our GMM object was called * 'gmm', one could access the relevant Gaussian distribution as follows: * * @code * arma::vec mean = gmm.Means()[2]; * arma::mat covariance = gmm.Covariances()[2]; * double priorWeight = gmm.Weights()[2]; * @endcode * * @param observations List of observations to classify. * @param labels Object which will be filled with labels. */ void Classify(const arma::mat& observations, arma::Row& labels) const; /** * Serialize the GMM. */ template void Serialize(Archive& ar, const unsigned int /* version */); private: /** * This function computes the loglikelihood of the given model. This function * is used by GMM::Train(). * * @param dataPoints Observations to calculate the likelihood for. * @param means Means of the given mixture model. * @param covars Covariances of the given mixture model. * @param weights Weights of the given mixture model. */ double LogLikelihood( const arma::mat& dataPoints, const std::vector& distsL, const arma::vec& weights) const; }; } // namespace gmm } // namespace mlpack // Include implementation. #include "gmm_impl.hpp" #endif mlpack-2.2.5/src/mlpack/methods/gmm/gmm_generate_main.cpp000066400000000000000000000045211315013601400234060ustar00rootroot00000000000000/** * @file gmm_generate_main.cpp * @author Ryan Curtin * * Load a GMM from file, then generate samples from it. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include #include "gmm.hpp" #include #include using namespace std; using namespace mlpack; using namespace mlpack::gmm; PROGRAM_INFO("GMM Sample Generator", "This program is able to generate samples from a pre-trained GMM (use " "gmm_train to train a GMM). It loads a GMM from the file specified with " "--input_model_file (-m), and generates a number of samples from that " "model; the number of samples is specified by the --samples (-n) parameter." "The output samples are saved in the file specified by --output_file " "(-o)."); PARAM_STRING_IN_REQ("input_model_file", "File containing input GMM model.", "m"); PARAM_INT_IN_REQ("samples", "Number of samples to generate.", "n"); PARAM_STRING_OUT("output_file", "File to save output samples in.", "o"); PARAM_INT_IN("seed", "Random seed. If 0, 'std::time(NULL)' is used.", "s", 0); int main(int argc, char** argv) { CLI::ParseCommandLine(argc, argv); if (!CLI::HasParam("output_file")) Log::Warn << "--output_file (-o) is not specified;" << "no results will be saved!" << endl; if (CLI::GetParam("seed") == 0) mlpack::math::RandomSeed(time(NULL)); else mlpack::math::RandomSeed((size_t) CLI::GetParam("seed")); if (CLI::GetParam("samples") < 0) Log::Fatal << "Parameter to --samples must be greater than 0!" << endl; GMM gmm; data::Load(CLI::GetParam("input_model_file"), "gmm", gmm, true); size_t length = (size_t) CLI::GetParam("samples"); Log::Info << "Generating " << length << " samples..." << endl; arma::mat samples(gmm.Dimensionality(), length); for (size_t i = 0; i < length; ++i) samples.col(i) = gmm.Random(); if (CLI::HasParam("output_file")) data::Save(CLI::GetParam("output_file"), samples); else Log::Warn << "--output_file is not specified, so no output will be saved!" << endl; } mlpack-2.2.5/src/mlpack/methods/gmm/gmm_impl.hpp000066400000000000000000000152311315013601400215560ustar00rootroot00000000000000/** * @file gmm_impl.hpp * @author Parikshit Ram (pram@cc.gatech.edu) * @author Ryan Curtin * @author Michael Fox * * Implementation of template-based GMM methods. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_GMM_GMM_IMPL_HPP #define MLPACK_METHODS_GMM_GMM_IMPL_HPP // In case it hasn't already been included. #include "gmm.hpp" namespace mlpack { namespace gmm { /** * Fit the GMM to the given observations. */ template double GMM::Train(const arma::mat& observations, const size_t trials, const bool useExistingModel, FittingType fitter) { double bestLikelihood; // This will be reported later. // We don't need to store temporary models if we are only doing one trial. if (trials == 1) { // Train the model. The user will have been warned earlier if the GMM was // initialized with no parameters (0 gaussians, dimensionality of 0). fitter.Estimate(observations, dists, weights, useExistingModel); bestLikelihood = LogLikelihood(observations, dists, weights); } else { if (trials == 0) return -DBL_MAX; // It's what they asked for... // If each trial must start from the same initial location, we must save it. std::vector distsOrig; arma::vec weightsOrig; if (useExistingModel) { distsOrig = dists; weightsOrig = weights; } // We need to keep temporary copies. We'll do the first training into the // actual model position, so that if it's the best we don't need to copy it. fitter.Estimate(observations, dists, weights, useExistingModel); bestLikelihood = LogLikelihood(observations, dists, weights); Log::Info << "GMM::Train(): Log-likelihood of trial 0 is " << bestLikelihood << "." << std::endl; // Now the temporary model. std::vector distsTrial(gaussians, distribution::GaussianDistribution(dimensionality)); arma::vec weightsTrial(gaussians); for (size_t trial = 1; trial < trials; ++trial) { if (useExistingModel) { distsTrial = distsOrig; weightsTrial = weightsOrig; } fitter.Estimate(observations, distsTrial, weightsTrial, useExistingModel); // Check to see if the log-likelihood of this one is better. double newLikelihood = LogLikelihood(observations, distsTrial, weightsTrial); Log::Info << "GMM::Train(): Log-likelihood of trial " << trial << " is " << newLikelihood << "." << std::endl; if (newLikelihood > bestLikelihood) { // Save new likelihood and copy new model. bestLikelihood = newLikelihood; dists = distsTrial; weights = weightsTrial; } } } // Report final log-likelihood and return it. Log::Info << "GMM::Train(): log-likelihood of trained GMM is " << bestLikelihood << "." << std::endl; return bestLikelihood; } /** * Fit the GMM to the given observations, each of which has a certain * probability of being from this distribution. */ template double GMM::Train(const arma::mat& observations, const arma::vec& probabilities, const size_t trials, const bool useExistingModel, FittingType fitter) { double bestLikelihood; // This will be reported later. // We don't need to store temporary models if we are only doing one trial. if (trials == 1) { // Train the model. The user will have been warned earlier if the GMM was // initialized with no parameters (0 gaussians, dimensionality of 0). fitter.Estimate(observations, probabilities, dists, weights, useExistingModel); bestLikelihood = LogLikelihood(observations, dists, weights); } else { if (trials == 0) return -DBL_MAX; // It's what they asked for... // If each trial must start from the same initial location, we must save it. std::vector distsOrig; arma::vec weightsOrig; if (useExistingModel) { distsOrig = dists; weightsOrig = weights; } // We need to keep temporary copies. We'll do the first training into the // actual model position, so that if it's the best we don't need to copy it. fitter.Estimate(observations, probabilities, dists, weights, useExistingModel); bestLikelihood = LogLikelihood(observations, dists, weights); Log::Debug << "GMM::Train(): Log-likelihood of trial 0 is " << bestLikelihood << "." << std::endl; // Now the temporary model. std::vector distsTrial(gaussians, distribution::GaussianDistribution(dimensionality)); arma::vec weightsTrial(gaussians); for (size_t trial = 1; trial < trials; ++trial) { if (useExistingModel) { distsTrial = distsOrig; weightsTrial = weightsOrig; } fitter.Estimate(observations, probabilities, distsTrial, weightsTrial, useExistingModel); // Check to see if the log-likelihood of this one is better. double newLikelihood = LogLikelihood(observations, distsTrial, weightsTrial); Log::Debug << "GMM::Train(): Log-likelihood of trial " << trial << " is " << newLikelihood << "." << std::endl; if (newLikelihood > bestLikelihood) { // Save new likelihood and copy new model. bestLikelihood = newLikelihood; dists = distsTrial; weights = weightsTrial; } } } // Report final log-likelihood and return it. Log::Info << "GMM::Train(): log-likelihood of trained GMM is " << bestLikelihood << "." << std::endl; return bestLikelihood; } /** * Serialize the object. */ template void GMM::Serialize(Archive& ar, const unsigned int /* version */) { using data::CreateNVP; ar & CreateNVP(gaussians, "gaussians"); ar & CreateNVP(dimensionality, "dimensionality"); // Load (or save) the gaussians. Not going to use the default std::vector // serialize here because it won't call out correctly to Serialize() for each // Gaussian distribution. if (Archive::is_loading::value) dists.resize(gaussians); for (size_t i = 0; i < gaussians; ++i) { std::ostringstream oss; oss << "dist" << i; ar & CreateNVP(dists[i], oss.str()); } ar & CreateNVP(weights, "weights"); } } // namespace gmm } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/methods/gmm/gmm_probability_main.cpp000066400000000000000000000042761315013601400241430ustar00rootroot00000000000000/** * @file gmm_probability_main.cpp * @author Ryan Curtin * * Given a GMM, calculate the probability of points coming from it. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include #include "gmm.hpp" #include #include using namespace std; using namespace mlpack; using namespace mlpack::gmm; PROGRAM_INFO("GMM Probability Calculator", "This program calculates the probability that given points came from a " "given GMM (that is, P(X | gmm)). The GMM is specified with the " "--input_model_file option, and the points are specified with the " "--input_file option. The output probabilities are stored in the file " "specified by the --output_file option."); PARAM_STRING_IN_REQ("input_model_file", "File containing input GMM.", "m"); PARAM_STRING_IN_REQ("input_file", "File containing points.", "i"); PARAM_STRING_OUT("output_file", "File to save calculated probabilities to.", "o"); int main(int argc, char** argv) { CLI::ParseCommandLine(argc, argv); const string inputFile = CLI::GetParam("input_file"); const string inputModelFile = CLI::GetParam("input_model_file"); const string outputFile = CLI::GetParam("output_file"); if (!CLI::HasParam("output_file")) Log::Warn << "--output_file (-o) is not specified;" << "no results will be saved!" << endl; // Get the GMM and the points. GMM gmm; data::Load(inputModelFile, "gmm", gmm); arma::mat dataset; data::Load(inputFile, dataset); // Now calculate the probabilities. arma::rowvec probabilities(dataset.n_cols); for (size_t i = 0; i < dataset.n_cols; ++i) probabilities[i] = gmm.Probability(dataset.unsafe_col(i)); // And save the result. if (CLI::HasParam("output_file")) data::Save(CLI::GetParam("output_file"), probabilities); else Log::Warn << "--output_file was not specified, so no output will be saved!" << endl; } mlpack-2.2.5/src/mlpack/methods/gmm/gmm_train_main.cpp000066400000000000000000000174501315013601400227360ustar00rootroot00000000000000/** * @author Parikshit Ram * @file gmm_train_main.cpp * * This program trains a mixture of Gaussians on a given data matrix. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include #include #include #include "gmm.hpp" #include "no_constraint.hpp" #include using namespace mlpack; using namespace mlpack::gmm; using namespace mlpack::util; using namespace mlpack::kmeans; using namespace std; PROGRAM_INFO("Gaussian Mixture Model (GMM) Training", "This program takes a parametric estimate of a Gaussian mixture model (GMM)" " using the EM algorithm to find the maximum likelihood estimate. The " "model may be saved to file, which will contain information about each " "Gaussian." "\n\n" "If GMM training fails with an error indicating that a covariance matrix " "could not be inverted, make sure that the --no_force_positive flag is not " "specified. Alternately, adding a small amount of Gaussian noise (using " "the --noise parameter) to the entire dataset may help prevent Gaussians " "with zero variance in a particular dimension, which is usually the cause " "of non-invertible covariance matrices." "\n\n" "The 'no_force_positive' flag, if set, will avoid the checks after each " "iteration of the EM algorithm which ensure that the covariance matrices " "are positive definite. Specifying the flag can cause faster runtime, " "but may also cause non-positive definite covariance matrices, which will " "cause the program to crash." "\n\n" "Optionally, multiple trials may be performed, by specifying the --trials " "option. The model with greatest log-likelihood will be taken."); // Parameters for training. PARAM_STRING_IN_REQ("input_file", "File containing the data on which the model " "will be fit.", "i"); PARAM_INT_IN_REQ("gaussians", "Number of Gaussians in the GMM.", "g"); PARAM_INT_IN("seed", "Random seed. If 0, 'std::time(NULL)' is used.", "s", 0); PARAM_INT_IN("trials", "Number of trials to perform in training GMM.", "t", 1); // Parameters for EM algorithm. PARAM_DOUBLE_IN("tolerance", "Tolerance for convergence of EM.", "T", 1e-10); PARAM_FLAG("no_force_positive", "Do not force the covariance matrices to be " "positive definite.", "P"); PARAM_INT_IN("max_iterations", "Maximum number of iterations of EM algorithm " "(passing 0 will run until convergence).", "n", 250); // Parameters for dataset modification. PARAM_DOUBLE_IN("noise", "Variance of zero-mean Gaussian noise to add to data.", "N", 0); // Parameters for k-means initialization. PARAM_FLAG("refined_start", "During the initialization, use refined initial " "positions for k-means clustering (Bradley and Fayyad, 1998).", "r"); PARAM_INT_IN("samplings", "If using --refined_start, specify the number of " "samplings used for initial points.", "S", 100); PARAM_DOUBLE_IN("percentage", "If using --refined_start, specify the percentage" " of the dataset used for each sampling (should be between 0.0 and 1.0).", "p", 0.02); // Parameters for model saving/loading. PARAM_STRING_IN("input_model_file", "File containing initial input GMM model.", "m", ""); PARAM_STRING_OUT("output_model_file", "File to save trained GMM model to.", "M"); int main(int argc, char* argv[]) { CLI::ParseCommandLine(argc, argv); // Check parameters and load data. if (CLI::GetParam("seed") != 0) math::RandomSeed((size_t) CLI::GetParam("seed")); else math::RandomSeed((size_t) std::time(NULL)); const int gaussians = CLI::GetParam("gaussians"); if (gaussians <= 0) { Log::Fatal << "Invalid number of Gaussians (" << gaussians << "); must " "be greater than or equal to 1." << std::endl; } if (!CLI::HasParam("output_model_file")) Log::Warn << "--output_model_file is not specified, so no model will be " << "saved!" << endl; arma::mat dataPoints; data::Load(CLI::GetParam("input_file"), dataPoints, true); // Do we need to add noise to the dataset? if (CLI::HasParam("noise")) { Timer::Start("noise_addition"); const double noise = CLI::GetParam("noise"); dataPoints += noise * arma::randn(dataPoints.n_rows, dataPoints.n_cols); Log::Info << "Added zero-mean Gaussian noise with variance " << noise << " to dataset." << std::endl; Timer::Stop("noise_addition"); } // Initialize GMM. GMM gmm(size_t(gaussians), dataPoints.n_rows); if (CLI::HasParam("input_model_file")) { data::Load(CLI::GetParam("input_model_file"), "gmm", gmm, true); if (gmm.Dimensionality() != dataPoints.n_rows) Log::Fatal << "Given input data (with --input_file) has dimensionality " << dataPoints.n_rows << ", but the initial model (given with " << "--input_model_file) has dimensionality " << gmm.Dimensionality() << "!" << endl; } // Gather parameters for EMFit object. const size_t maxIterations = (size_t) CLI::GetParam("max_iterations"); const double tolerance = CLI::GetParam("tolerance"); const bool forcePositive = !CLI::HasParam("no_force_positive"); // This gets a bit weird because we need different types depending on whether // --refined_start is specified. double likelihood; if (CLI::HasParam("refined_start")) { const int samplings = CLI::GetParam("samplings"); const double percentage = CLI::GetParam("percentage"); if (samplings <= 0) Log::Fatal << "Number of samplings (" << samplings << ") must be greater" << " than 0!" << std::endl; if (percentage <= 0.0 || percentage > 1.0) Log::Fatal << "Percentage for sampling (" << percentage << ") must be " << "greater than 0.0 and less than or equal to 1.0!" << std::endl; typedef KMeans KMeansType; // These are default parameters. KMeansType k(1000, metric::SquaredEuclideanDistance(), RefinedStart(samplings, percentage)); // Depending on the value of 'forcePositive', we have to use different // types. if (forcePositive) { // Compute the parameters of the model using the EM algorithm. Timer::Start("em"); EMFit em(maxIterations, tolerance, k); likelihood = gmm.Train(dataPoints, CLI::GetParam("trials"), false, em); Timer::Stop("em"); } else { // Compute the parameters of the model using the EM algorithm. Timer::Start("em"); EMFit em(maxIterations, tolerance, k); likelihood = gmm.Train(dataPoints, CLI::GetParam("trials"), false, em); Timer::Stop("em"); } } else { // Depending on the value of forcePositive, we have to use different types. if (forcePositive) { // Compute the parameters of the model using the EM algorithm. Timer::Start("em"); EMFit<> em(maxIterations, tolerance); likelihood = gmm.Train(dataPoints, CLI::GetParam("trials"), false, em); Timer::Stop("em"); } else { // Compute the parameters of the model using the EM algorithm. Timer::Start("em"); EMFit, NoConstraint> em(maxIterations, tolerance); likelihood = gmm.Train(dataPoints, CLI::GetParam("trials"), false, em); Timer::Stop("em"); } } Log::Info << "Log-likelihood of estimate: " << likelihood << "." << endl; if (CLI::HasParam("output_model_file")) data::Save(CLI::GetParam("output_model_file"), "gmm", gmm); } mlpack-2.2.5/src/mlpack/methods/gmm/no_constraint.hpp000066400000000000000000000021261315013601400226340ustar00rootroot00000000000000/** * @file no_constraint.hpp * @author Ryan Curtin * * No constraint on the covariance matrix. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_GMM_NO_CONSTRAINT_HPP #define MLPACK_METHODS_GMM_NO_CONSTRAINT_HPP #include namespace mlpack { namespace gmm { /** * This class enforces no constraint on the covariance matrix. It's faster this * way, although depending on your situation you may end up with a * non-invertible covariance matrix. */ class NoConstraint { public: //! Do nothing, and do not modify the covariance matrix. static void ApplyConstraint(const arma::mat& /* covariance */) { } //! Serialize the object (nothing to do). template static void Serialize(Archive& /* ar */, const unsigned int /* version */) { } }; } // namespace gmm } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/methods/gmm/positive_definite_constraint.hpp000066400000000000000000000054161315013601400257360ustar00rootroot00000000000000/** * @file positive_definite_constraint.hpp * @author Ryan Curtin * * Restricts a covariance matrix to being positive definite. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_GMM_POSITIVE_DEFINITE_CONSTRAINT_HPP #define MLPACK_METHODS_GMM_POSITIVE_DEFINITE_CONSTRAINT_HPP #include namespace mlpack { namespace gmm { /** * Given a covariance matrix, force the matrix to be positive definite. Also * force a minimum value on the diagonal, so that even if the matrix is * invertible, it doesn't cause problems with Cholesky decompositions. The * forcing here is also done in order to bring the condition number of the * matrix under 1e5 (10k), which should help with numerical stability. */ class PositiveDefiniteConstraint { public: /** * Apply the positive definiteness constraint to the given covariance matrix, * and ensure each value on the diagonal is at least 1e-50. * * @param covariance Covariance matrix. */ static void ApplyConstraint(arma::mat& covariance) { // What we want to do is make sure that the matrix is positive definite and // that the condition number isn't too large. We also need to ensure that // the covariance matrix is not too close to zero (hence, we ensure that all // eigenvalues are at least 1e-50). arma::vec eigval; arma::mat eigvec; arma::eig_sym(eigval, eigvec, covariance); // If the matrix is not positive definite or if the condition number is // large, we must project it back onto the cone of positive definite // matrices with reasonable condition number (I'm picking 1e5 here, not for // any particular reason). if ((eigval[0] < 0.0) || ((eigval[eigval.n_elem - 1] / eigval[0]) > 1e5) || (eigval[eigval.n_elem - 1] < 1e-50)) { // Project any negative eigenvalues back to non-negative, and project any // too-small eigenvalues to a large enough value. Make them as small as // possible to satisfy our constraint on the condition number. const double minEigval = std::max(eigval[eigval.n_elem - 1] / 1e5, 1e-50); for (size_t i = 0; i < eigval.n_elem; ++i) eigval[i] = std::max(eigval[i], minEigval); // Now reassemble the covariance matrix. covariance = eigvec * arma::diagmat(eigval) * eigvec.t(); } } //! Serialize the constraint (which stores nothing, so, nothing to do). template static void Serialize(Archive& /* ar */, const unsigned int /* version */) { } }; } // namespace gmm } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/methods/hmm/000077500000000000000000000000001315013601400172435ustar00rootroot00000000000000mlpack-2.2.5/src/mlpack/methods/hmm/CMakeLists.txt000066400000000000000000000012201315013601400217760ustar00rootroot00000000000000# Define the files we need to compile. # Anything not in this list will not be compiled into mlpack. set(SOURCES hmm.hpp hmm_impl.hpp hmm_regression.hpp hmm_regression_impl.hpp hmm_util.hpp hmm_util_impl.hpp ) # Add directory name to sources. set(DIR_SRCS) foreach(file ${SOURCES}) set(DIR_SRCS ${DIR_SRCS} ${CMAKE_CURRENT_SOURCE_DIR}/${file}) endforeach() # Append sources (with directory name) to list of all mlpack sources (used at # the parent scope). set(MLPACK_SRCS ${MLPACK_SRCS} ${DIR_SRCS} PARENT_SCOPE) add_cli_executable(hmm_train) add_cli_executable(hmm_loglik) add_cli_executable(hmm_viterbi) add_cli_executable(hmm_generate) mlpack-2.2.5/src/mlpack/methods/hmm/hmm.hpp000066400000000000000000000371161315013601400205450ustar00rootroot00000000000000/** * @file hmm.hpp * @author Ryan Curtin * @author Tran Quoc Long * @author Michael Fox * * Definition of HMM class. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_HMM_HMM_HPP #define MLPACK_METHODS_HMM_HMM_HPP #include #include namespace mlpack { namespace hmm /** Hidden Markov Models. */ { /** * A class that represents a Hidden Markov Model with an arbitrary type of * emission distribution. This HMM class supports training (supervised and * unsupervised), prediction of state sequences via the Viterbi algorithm, * estimation of state probabilities, generation of random sequences, and * calculation of the log-likelihood of a given sequence. * * The template parameter, Distribution, specifies the distribution which the * emissions follow. The class should implement the following functions: * * @code * class Distribution * { * public: * // The type of observation used by this distribution. * typedef something DataType; * * // Return the probability of the given observation. * double Probability(const DataType& observation) const; * * // Estimate the distribution based on the given observations. * void Train(const std::vector& observations); * * // Estimate the distribution based on the given observations, given also * // the probability of each observation coming from this distribution. * void Train(const std::vector& observations, * const std::vector& probabilities); * }; * @endcode * * See the mlpack::distribution::DiscreteDistribution class for an example. One * would use the DiscreteDistribution class when the observations are * non-negative integers. Other distributions could be Gaussians, a mixture of * Gaussians (GMM), or any other probability distribution implementing the * four Distribution functions. * * Usage of the HMM class generally involves either training an HMM or loading * an already-known HMM and taking probability measurements of sequences. * Example code for supervised training of a Gaussian HMM (that is, where the * emission output distribution is a single Gaussian for each hidden state) is * given below. * * @code * extern arma::mat observations; // Each column is an observation. * extern arma::Row states; // Hidden states for each observation. * // Create an untrained HMM with 5 hidden states and default (N(0, 1)) * // Gaussian distributions with the dimensionality of the dataset. * HMM hmm(5, GaussianDistribution(observations.n_rows)); * * // Train the HMM (the labels could be omitted to perform unsupervised * // training). * hmm.Train(observations, states); * @endcode * * Once initialized, the HMM can evaluate the probability of a certain sequence * (with LogLikelihood()), predict the most likely sequence of hidden states * (with Predict()), generate a sequence (with Generate()), or estimate the * probabilities of each state for a sequence of observations (with Train()). * * @tparam Distribution Type of emission distribution for this HMM. */ template class HMM { public: /** * Create the Hidden Markov Model with the given number of hidden states and * the given default distribution for emissions. The dimensionality of the * observations is taken from the emissions variable, so it is important that * the given default emission distribution is set with the correct * dimensionality. Alternately, set the dimensionality with Dimensionality(). * Optionally, the tolerance for convergence of the Baum-Welch algorithm can * be set. * * By default, the transition matrix and initial probability vector are set to * contain equal probability for each state. * * @param states Number of states. * @param emissions Default distribution for emissions. * @param tolerance Tolerance for convergence of training algorithm * (Baum-Welch). */ HMM(const size_t states = 0, const Distribution emissions = Distribution(), const double tolerance = 1e-5); /** * Create the Hidden Markov Model with the given initial probability vector, * the given transition matrix, and the given emission distributions. The * dimensionality of the observations of the HMM are taken from the given * emission distributions. Alternately, the dimensionality can be set with * Dimensionality(). * * The initial state probability vector should have length equal to the number * of states, and each entry represents the probability of being in the given * state at time T = 0 (the beginning of a sequence). * * The transition matrix should be such that T(i, j) is the probability of * transition to state i from state j. The columns of the matrix should sum * to 1. * * The emission matrix should be such that E(i, j) is the probability of * emission i while in state j. The columns of the matrix should sum to 1. * * Optionally, the tolerance for convergence of the Baum-Welch algorithm can * be set. * * @param initial Initial state probabilities. * @param transition Transition matrix. * @param emission Emission distributions. * @param tolerance Tolerance for convergence of training algorithm * (Baum-Welch). */ HMM(const arma::vec& initial, const arma::mat& transition, const std::vector& emission, const double tolerance = 1e-5); /** * Train the model using the Baum-Welch algorithm, with only the given * unlabeled observations. Instead of giving a guess transition and emission * matrix here, do that in the constructor. Each matrix in the vector of data * sequences holds an individual data sequence; each point in each individual * data sequence should be a column in the matrix. The number of rows in each * matrix should be equal to the dimensionality of the HMM (which is set in * the constructor). * * It is preferable to use the other overload of Train(), with labeled data. * That will produce much better results. However, if labeled data is * unavailable, this will work. In addition, it is possible to use Train() * with labeled data first, and then continue to train the model using this * overload of Train() with unlabeled data. * * The tolerance of the Baum-Welch algorithm can be set either in the * constructor or with the Tolerance() method. When the change in * log-likelihood of the model between iterations is less than the tolerance, * the Baum-Welch algorithm terminates. * * @note * Train() can be called multiple times with different sequences; each time it * is called, it uses the current parameters of the HMM as a starting point * for training. * @endnote * * @param dataSeq Vector of observation sequences. */ void Train(const std::vector& dataSeq); /** * Train the model using the given labeled observations; the transition and * emission matrices are directly estimated. Each matrix in the vector of * data sequences corresponds to a vector in the vector of state sequences. * Each point in each individual data sequence should be a column in the * matrix, and its state should be the corresponding element in the state * sequence vector. For instance, dataSeq[0].col(3) corresponds to the fourth * observation in the first data sequence, and its state is stateSeq[0][3]. * The number of rows in each matrix should be equal to the dimensionality of * the HMM (which is set in the constructor). * * @note * Train() can be called multiple times with different sequences; each time it * is called, it uses the current parameters of the HMM as a starting point * for training. * @endnote * * @param dataSeq Vector of observation sequences. * @param stateSeq Vector of state sequences, corresponding to each * observation. */ void Train(const std::vector& dataSeq, const std::vector >& stateSeq); /** * Estimate the probabilities of each hidden state at each time step for each * given data observation, using the Forward-Backward algorithm. Each matrix * which is returned has columns equal to the number of data observations, and * rows equal to the number of hidden states in the model. The log-likelihood * of the most probable sequence is returned. * * @param dataSeq Sequence of observations. * @param stateProb Matrix in which the probabilities of each state at each * time interval will be stored. * @param forwardProb Matrix in which the forward probabilities of each state * at each time interval will be stored. * @param backwardProb Matrix in which the backward probabilities of each * state at each time interval will be stored. * @param scales Vector in which the scaling factors at each time interval * will be stored. * @return Log-likelihood of most likely state sequence. */ double Estimate(const arma::mat& dataSeq, arma::mat& stateProb, arma::mat& forwardProb, arma::mat& backwardProb, arma::vec& scales) const; /** * Estimate the probabilities of each hidden state at each time step of each * given data observation, using the Forward-Backward algorithm. The returned * matrix of state probabilities has columns equal to the number of data * observations, and rows equal to the number of hidden states in the model. * The log-likelihood of the most probable sequence is returned. * * @param dataSeq Sequence of observations. * @param stateProb Probabilities of each state at each time interval. * @return Log-likelihood of most likely state sequence. */ double Estimate(const arma::mat& dataSeq, arma::mat& stateProb) const; /** * Generate a random data sequence of the given length. The data sequence is * stored in the dataSequence parameter, and the state sequence is stored in * the stateSequence parameter. Each column of dataSequence represents a * random observation. * * @param length Length of random sequence to generate. * @param dataSequence Vector to store data in. * @param stateSequence Vector to store states in. * @param startState Hidden state to start sequence in (default 0). */ void Generate(const size_t length, arma::mat& dataSequence, arma::Row& stateSequence, const size_t startState = 0) const; /** * Compute the most probable hidden state sequence for the given data * sequence, using the Viterbi algorithm, returning the log-likelihood of the * most likely state sequence. * * @param dataSeq Sequence of observations. * @param stateSeq Vector in which the most probable state sequence will be * stored. * @return Log-likelihood of most probable state sequence. */ double Predict(const arma::mat& dataSeq, arma::Row& stateSeq) const; /** * Compute the log-likelihood of the given data sequence. * * @param dataSeq Data sequence to evaluate the likelihood of. * @return Log-likelihood of the given sequence. */ double LogLikelihood(const arma::mat& dataSeq) const; /** * HMM filtering. Computes the k-step-ahead expected emission at each time * conditioned only on prior observations. That is * E{ Y[t+k] | Y[0], ..., Y[t] }. * The returned matrix has columns equal to the number of observations. Note * that the expectation may not be meaningful for discrete emissions. * * @param dataSeq Sequence of observations. * @param filterSeq Vector in which the expected emission sequence will be * stored. * @param ahead Number of steps ahead (k) for expectations. */ void Filter(const arma::mat& dataSeq, arma::mat& filterSeq, size_t ahead = 0) const; /** * HMM smoothing. Computes expected emission at each time conditioned on all * observations. That is * E{ Y[t] | Y[0], ..., Y[T] }. * The returned matrix has columns equal to the number of observations. Note * that the expectation may not be meaningful for discrete emissions. * * @param dataSeq Sequence of observations. * @param smoothSeq Vector in which the expected emission sequence will be * stored. */ void Smooth(const arma::mat& dataSeq, arma::mat& smoothSeq) const; //! Return the vector of initial state probabilities. const arma::vec& Initial() const { return initial; } //! Modify the vector of initial state probabilities. arma::vec& Initial() { return initial; } //! Return the transition matrix. const arma::mat& Transition() const { return transition; } //! Return a modifiable transition matrix reference. arma::mat& Transition() { return transition; } //! Return the emission distributions. const std::vector& Emission() const { return emission; } //! Return a modifiable emission probability matrix reference. std::vector& Emission() { return emission; } //! Get the dimensionality of observations. size_t Dimensionality() const { return dimensionality; } //! Set the dimensionality of observations. size_t& Dimensionality() { return dimensionality; } //! Get the tolerance of the Baum-Welch algorithm. double Tolerance() const { return tolerance; } //! Modify the tolerance of the Baum-Welch algorithm. double& Tolerance() { return tolerance; } /** * Serialize the object. */ template void Serialize(Archive& ar, const unsigned int version); protected: // Helper functions. /** * The Forward algorithm (part of the Forward-Backward algorithm). Computes * forward probabilities for each state for each observation in the given data * sequence. The returned matrix has rows equal to the number of hidden * states and columns equal to the number of observations. * * @param dataSeq Data sequence to compute probabilities for. * @param scales Vector in which scaling factors will be saved. * @param forwardProb Matrix in which forward probabilities will be saved. */ void Forward(const arma::mat& dataSeq, arma::vec& scales, arma::mat& forwardProb) const; /** * The Backward algorithm (part of the Forward-Backward algorithm). Computes * backward probabilities for each state for each observation in the given * data sequence, using the scaling factors found (presumably) by Forward(). * The returned matrix has rows equal to the number of hidden states and * columns equal to the number of observations. * * @param dataSeq Data sequence to compute probabilities for. * @param scales Vector of scaling factors. * @param backwardProb Matrix in which backward probabilities will be saved. */ void Backward(const arma::mat& dataSeq, const arma::vec& scales, arma::mat& backwardProb) const; //! Set of emission probability distributions; one for each state. std::vector emission; //! Transition probability matrix. arma::mat transition; private: //! Initial state probability vector. arma::vec initial; //! Dimensionality of observations. size_t dimensionality; //! Tolerance of Baum-Welch algorithm. double tolerance; }; } // namespace hmm } // namespace mlpack // Include implementation. #include "hmm_impl.hpp" #endif mlpack-2.2.5/src/mlpack/methods/hmm/hmm_generate_main.cpp000066400000000000000000000070701315013601400234120ustar00rootroot00000000000000/** * @file hmm_generate_main.cpp * @author Ryan Curtin * @author Michael Fox * * Compute the most probably hidden state sequence of a given observation * sequence for a given HMM. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include #include #include #include "hmm.hpp" #include "hmm_util.hpp" #include PROGRAM_INFO("Hidden Markov Model (HMM) Sequence Generator", "This " "utility takes an already-trained HMM (--model_file) and generates a " "random observation sequence and hidden state sequence based on its " "parameters, saving them to the specified files (--output_file and " "--state_file)"); PARAM_STRING_IN_REQ("model_file", "File containing HMM.", "m"); PARAM_INT_IN_REQ("length", "Length of sequence to generate.", "l"); PARAM_INT_IN("start_state", "Starting state of sequence.", "t", 0); PARAM_STRING_OUT("output_file", "File to save observation sequence to.", "o"); PARAM_STRING_OUT("state_file", "File to save hidden state sequence to.", "S"); PARAM_INT_IN("seed", "Random seed. If 0, 'std::time(NULL)' is used.", "s", 0); using namespace mlpack; using namespace mlpack::hmm; using namespace mlpack::distribution; using namespace mlpack::util; using namespace mlpack::gmm; using namespace mlpack::math; using namespace arma; using namespace std; // Because we don't know what the type of our HMM is, we need to write a // function which can take arbitrary HMM types. struct Generate { template static void Apply(HMMType& hmm, void* /* extraInfo */) { mat observations; Row sequence; // Load the parameters. const size_t startState = (size_t) CLI::GetParam("start_state"); const size_t length = (size_t) CLI::GetParam("length"); const string outputFile = CLI::GetParam("output_file"); const string sequenceFile = CLI::GetParam("state_file"); Log::Info << "Generating sequence of length " << length << "..." << endl; if (startState >= hmm.Transition().n_rows) Log::Fatal << "Invalid start state (" << startState << "); must be " << "between 0 and number of states (" << hmm.Transition().n_rows << ")!" << endl; hmm.Generate(length, observations, sequence, startState); // Now save the output. if (CLI::HasParam("output_file")) data::Save(outputFile, observations, true); // Do we want to save the hidden sequence? if (CLI::HasParam("state_file")) data::Save(sequenceFile, sequence, true); if (outputFile == "" && sequenceFile == "") Log::Warn << "Neither --output_file nor --state_file are specified; no " << "output will be saved." << endl; } }; int main(int argc, char** argv) { // Parse command line options. CLI::ParseCommandLine(argc, argv); if (!CLI::HasParam("output_file") && !CLI::HasParam("state_file")) Log::Warn << "Neither --output_file nor --state_file are specified; no " << "output will be saved!" << endl; // Set random seed. if (CLI::GetParam("seed") != 0) RandomSeed((size_t) CLI::GetParam("seed")); else RandomSeed((size_t) time(NULL)); // Load model, and perform the generation. const string modelFile = CLI::GetParam("model_file"); LoadHMMAndPerformAction(modelFile); } mlpack-2.2.5/src/mlpack/methods/hmm/hmm_impl.hpp000066400000000000000000000517001315013601400215610ustar00rootroot00000000000000/** * @file hmm_impl.hpp * @author Ryan Curtin * @author Tran Quoc Long * @author Michael Fox * * Implementation of HMM class. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_HMM_HMM_IMPL_HPP #define MLPACK_METHODS_HMM_HMM_IMPL_HPP // Just in case... #include "hmm.hpp" namespace mlpack { namespace hmm { /** * Create the Hidden Markov Model with the given number of hidden states and the * given number of emission states. */ template HMM::HMM(const size_t states, const Distribution emissions, const double tolerance) : emission(states, /* default distribution */ emissions), transition(arma::randu(states, states)), initial(arma::randu(states) / (double) states), dimensionality(emissions.Dimensionality()), tolerance(tolerance) { // Normalize the transition probabilities and initial state probabilities. initial /= arma::accu(initial); for (size_t i = 0; i < transition.n_cols; ++i) transition.col(i) /= arma::accu(transition.col(i)); } /** * Create the Hidden Markov Model with the given transition matrix and the given * emission probability matrix. */ template HMM::HMM(const arma::vec& initial, const arma::mat& transition, const std::vector& emission, const double tolerance) : emission(emission), transition(transition), initial(initial), tolerance(tolerance) { // Set the dimensionality, if we can. if (emission.size() > 0) dimensionality = emission[0].Dimensionality(); else { Log::Warn << "HMM::HMM(): no emission distributions given; assuming a " << "dimensionality of 0 and hoping it gets set right later." << std::endl; dimensionality = 0; } } /** * Train the model using the Baum-Welch algorithm, with only the given unlabeled * observations. Each matrix in the vector of data sequences holds an * individual data sequence; each point in each individual data sequence should * be a column in the matrix. The number of rows in each matrix should be equal * to the dimensionality of the HMM. * * It is preferable to use the other overload of Train(), with labeled data. * That will produce much better results. However, if labeled data is * unavailable, this will work. In addition, it is possible to use Train() with * labeled data first, and then continue to train the model using this overload * of Train() with unlabeled data. * * @param dataSeq Set of data sequences to train on. */ template void HMM::Train(const std::vector& dataSeq) { // We should allow a guess at the transition and emission matrices. double loglik = 0; double oldLoglik = 0; // Maximum iterations? size_t iterations = 1000; // Find length of all sequences and ensure they are the correct size. size_t totalLength = 0; for (size_t seq = 0; seq < dataSeq.size(); seq++) { totalLength += dataSeq[seq].n_cols; if (dataSeq[seq].n_rows != dimensionality) Log::Fatal << "HMM::Train(): data sequence " << seq << " has " << "dimensionality " << dataSeq[seq].n_rows << " (expected " << dimensionality << " dimensions)." << std::endl; } // These are used later for training of each distribution. We initialize it // all now so we don't have to do any allocation later on. std::vector emissionProb(transition.n_cols, arma::vec(totalLength)); arma::mat emissionList(dimensionality, totalLength); // This should be the Baum-Welch algorithm (EM for HMM estimation). This // follows the procedure outlined in Elliot, Aggoun, and Moore's book "Hidden // Markov Models: Estimation and Control", pp. 36-40. for (size_t iter = 0; iter < iterations; iter++) { // Clear new transition matrix and emission probabilities. arma::vec newInitial(transition.n_rows); newInitial.zeros(); arma::mat newTransition(transition.n_rows, transition.n_cols); newTransition.zeros(); // Reset log likelihood. loglik = 0; // Sum over time. size_t sumTime = 0; // Loop over each sequence. for (size_t seq = 0; seq < dataSeq.size(); seq++) { arma::mat stateProb; arma::mat forward; arma::mat backward; arma::vec scales; // Add the log-likelihood of this sequence. This is the E-step. loglik += Estimate(dataSeq[seq], stateProb, forward, backward, scales); // Add to estimate of initial probability for state j. for (size_t j = 0; j < transition.n_cols; ++j) newInitial[j] += stateProb(j, 0); // Now re-estimate the parameters. This is the M-step. // pi_i = sum_d ((1 / P(seq[d])) sum_t (f(i, 0) b(i, 0)) // T_ij = sum_d ((1 / P(seq[d])) sum_t (f(i, t) T_ij E_i(seq[d][t]) b(i, // t + 1))) // E_ij = sum_d ((1 / P(seq[d])) sum_{t | seq[d][t] = j} f(i, t) b(i, t) // We store the new estimates in a different matrix. for (size_t t = 0; t < dataSeq[seq].n_cols; ++t) { for (size_t j = 0; j < transition.n_cols; ++j) { if (t < dataSeq[seq].n_cols - 1) { // Estimate of T_ij (probability of transition from state j to state // i). We postpone multiplication of the old T_ij until later. for (size_t i = 0; i < transition.n_rows; i++) newTransition(i, j) += forward(j, t) * backward(i, t + 1) * emission[i].Probability(dataSeq[seq].unsafe_col(t + 1)) / scales[t + 1]; } // Add to list of emission observations, for Distribution::Train(). emissionList.col(sumTime) = dataSeq[seq].col(t); emissionProb[j][sumTime] = stateProb(j, t); } sumTime++; } } // Normalize the new initial probabilities. if (dataSeq.size() > 1) initial = newInitial / dataSeq.size(); else initial = newInitial; // Assign the new transition matrix. We use %= (element-wise // multiplication) because every element of the new transition matrix must // still be multiplied by the old elements (this is the multiplication we // earlier postponed). transition %= newTransition; // Now we normalize the transition matrix. for (size_t i = 0; i < transition.n_cols; i++) { const double sum = accu(transition.col(i)); if (sum > 0.0) transition.col(i) /= sum; else transition.col(i).fill(1.0 / (double) transition.n_rows); } // Now estimate emission probabilities. for (size_t state = 0; state < transition.n_cols; state++) emission[state].Train(emissionList, emissionProb[state]); Log::Debug << "Iteration " << iter << ": log-likelihood " << loglik << "." << std::endl; if (std::abs(oldLoglik - loglik) < tolerance) { Log::Debug << "Converged after " << iter << " iterations." << std::endl; break; } oldLoglik = loglik; } } /** * Train the model using the given labeled observations; the transition and * emission matrices are directly estimated. */ template void HMM::Train(const std::vector& dataSeq, const std::vector >& stateSeq) { // Simple error checking. if (dataSeq.size() != stateSeq.size()) { Log::Fatal << "HMM::Train(): number of data sequences (" << dataSeq.size() << ") not equal to number of state sequences (" << stateSeq.size() << ")." << std::endl; } initial.zeros(); transition.zeros(); // Estimate the transition and emission matrices directly from the // observations. The emission list holds the time indices for observations // from each state. std::vector > > emissionList(transition.n_cols); for (size_t seq = 0; seq < dataSeq.size(); seq++) { // Simple error checking. if (dataSeq[seq].n_cols != stateSeq[seq].n_elem) { Log::Fatal << "HMM::Train(): number of observations (" << dataSeq[seq].n_cols << ") in sequence " << seq << " not equal to number of states (" << stateSeq[seq].n_cols << ") in sequence " << seq << "." << std::endl; } if (dataSeq[seq].n_rows != dimensionality) { Log::Fatal << "HMM::Train(): data sequence " << seq << " has " << "dimensionality " << dataSeq[seq].n_rows << " (expected " << dimensionality << " dimensions)." << std::endl; } // Loop over each observation in the sequence. For estimation of the // transition matrix, we must ignore the last observation. initial[stateSeq[seq][0]]++; for (size_t t = 0; t < dataSeq[seq].n_cols - 1; t++) { transition(stateSeq[seq][t + 1], stateSeq[seq][t])++; emissionList[stateSeq[seq][t]].push_back(std::make_pair(seq, t)); } // Last observation. emissionList[stateSeq[seq][stateSeq[seq].n_elem - 1]].push_back( std::make_pair(seq, stateSeq[seq].n_elem - 1)); } // Normalize initial weights. initial /= accu(initial); // Normalize transition matrix. for (size_t col = 0; col < transition.n_cols; col++) { // If the transition probability sum is greater than 0 in this column, the // emission probability sum will also be greater than 0. We want to avoid // division by 0. double sum = accu(transition.col(col)); if (sum > 0) transition.col(col) /= sum; } // Estimate emission matrix. for (size_t state = 0; state < transition.n_cols; state++) { // Generate full sequence of observations for this state from the list of // emissions that are from this state. if (emissionList[state].size() > 0) { arma::mat emissions(dimensionality, emissionList[state].size()); for (size_t i = 0; i < emissions.n_cols; i++) { emissions.col(i) = dataSeq[emissionList[state][i].first].col( emissionList[state][i].second); } emission[state].Train(emissions); } else { Log::Warn << "There are no observations in training data with hidden " << "state " << state << "! The corresponding emission distribution " << "is likely to be meaningless." << std::endl; } } } /** * Estimate the probabilities of each hidden state at each time step for each * given data observation. */ template double HMM::Estimate(const arma::mat& dataSeq, arma::mat& stateProb, arma::mat& forwardProb, arma::mat& backwardProb, arma::vec& scales) const { // First run the forward-backward algorithm. Forward(dataSeq, scales, forwardProb); Backward(dataSeq, scales, backwardProb); // Now assemble the state probability matrix based on the forward and backward // probabilities. stateProb = forwardProb % backwardProb; // Finally assemble the log-likelihood and return it. return accu(log(scales)); } /** * Estimate the probabilities of each hidden state at each time step for each * given data observation. */ template double HMM::Estimate(const arma::mat& dataSeq, arma::mat& stateProb) const { // We don't need to save these. arma::mat forwardProb, backwardProb; arma::vec scales; return Estimate(dataSeq, stateProb, forwardProb, backwardProb, scales); } /** * Generate a random data sequence of a given length. The data sequence is * stored in the dataSequence parameter, and the state sequence is stored in * the stateSequence parameter. */ template void HMM::Generate(const size_t length, arma::mat& dataSequence, arma::Row& stateSequence, const size_t startState) const { // Set vectors to the right size. stateSequence.set_size(length); dataSequence.set_size(dimensionality, length); // Set start state (default is 0). stateSequence[0] = startState; // Choose first emission state. double randValue = math::Random(); // We just have to find where our random value sits in the probability // distribution of emissions for our starting state. dataSequence.col(0) = emission[startState].Random(); // Now choose the states and emissions for the rest of the sequence. for (size_t t = 1; t < length; t++) { // First choose the hidden state. randValue = math::Random(); // Now find where our random value sits in the probability distribution of // state changes. double probSum = 0; for (size_t st = 0; st < transition.n_rows; st++) { probSum += transition(st, stateSequence[t - 1]); if (randValue <= probSum) { stateSequence[t] = st; break; } } // Now choose the emission. dataSequence.col(t) = emission[stateSequence[t]].Random(); } } /** * Compute the most probable hidden state sequence for the given observation * using the Viterbi algorithm. Returns the log-likelihood of the most likely * sequence. */ template double HMM::Predict(const arma::mat& dataSeq, arma::Row& stateSeq) const { // This is an implementation of the Viterbi algorithm for finding the most // probable sequence of states to produce the observed data sequence. We // don't use log-likelihoods to save that little bit of time, but we'll // calculate the log-likelihood at the end of it all. stateSeq.set_size(dataSeq.n_cols); arma::mat logStateProb(transition.n_rows, dataSeq.n_cols); arma::mat stateSeqBack(transition.n_rows, dataSeq.n_cols); // Store the logs of the transposed transition matrix. This is because we // will be using the rows of the transition matrix. arma::mat logTrans(log(trans(transition))); // The calculation of the first state is slightly different; the probability // of the first state being state j is the maximum probability that the state // came to be j from another state. logStateProb.col(0).zeros(); for (size_t state = 0; state < transition.n_rows; state++) { logStateProb(state, 0) = log(initial[state] * emission[state].Probability(dataSeq.unsafe_col(0))); stateSeqBack(state, 0) = state; } // Store the best first state. arma::uword index; for (size_t t = 1; t < dataSeq.n_cols; t++) { // Assemble the state probability for this element. // Given that we are in state j, we use state with the highest probability // of being the previous state. for (size_t j = 0; j < transition.n_rows; j++) { arma::vec prob = logStateProb.col(t - 1) + logTrans.col(j); logStateProb(j, t) = prob.max(index) + log(emission[j].Probability(dataSeq.unsafe_col(t))); stateSeqBack(j, t) = index; } } // Backtrack to find the most probable state sequence. logStateProb.unsafe_col(dataSeq.n_cols - 1).max(index); stateSeq[dataSeq.n_cols - 1] = index; for (size_t t = 2; t <= dataSeq.n_cols; t++) stateSeq[dataSeq.n_cols - t] = stateSeqBack(stateSeq[dataSeq.n_cols - t + 1], dataSeq.n_cols - t + 1); return logStateProb(stateSeq(dataSeq.n_cols - 1), dataSeq.n_cols - 1); } /** * Compute the log-likelihood of the given data sequence. */ template double HMM::LogLikelihood(const arma::mat& dataSeq) const { arma::mat forward; arma::vec scales; Forward(dataSeq, scales, forward); // The log-likelihood is the log of the scales for each time step. return accu(log(scales)); } /** * HMM filtering. */ template void HMM::Filter(const arma::mat& dataSeq, arma::mat& filterSeq, size_t ahead) const { // First run the forward algorithm. arma::mat forwardProb; arma::vec scales; Forward(dataSeq, scales, forwardProb); // Propagate state ahead. if (ahead != 0) forwardProb = pow(transition, ahead) * forwardProb; // Compute expected emissions. // Will not work for distributions without a Mean() function. filterSeq.zeros(dimensionality, dataSeq.n_cols); for (size_t i = 0; i < emission.size(); i++) filterSeq += emission[i].Mean() * forwardProb.row(i); } /** * HMM smoothing. */ template void HMM::Smooth(const arma::mat& dataSeq, arma::mat& smoothSeq) const { // First run the forward algorithm. arma::mat stateProb; Estimate(dataSeq, stateProb); // Compute expected emissions. // Will not work for distributions without a Mean() function. smoothSeq.zeros(dimensionality, dataSeq.n_cols); for (size_t i = 0; i < emission.size(); i++) smoothSeq += emission[i].Mean() * stateProb.row(i); } /** * The Forward procedure (part of the Forward-Backward algorithm). */ template void HMM::Forward(const arma::mat& dataSeq, arma::vec& scales, arma::mat& forwardProb) const { // Our goal is to calculate the forward probabilities: // P(X_k | o_{1:k}) for all possible states X_k, for each time point k. forwardProb.zeros(transition.n_rows, dataSeq.n_cols); scales.zeros(dataSeq.n_cols); // The first entry in the forward algorithm uses the initial state // probabilities. Note that MATLAB assumes that the starting state (at // t = -1) is state 0; this is not our assumption here. To force that // behavior, you could append a single starting state to every single data // sequence and that should produce results in line with MATLAB. for (size_t state = 0; state < transition.n_rows; state++) forwardProb(state, 0) = initial(state) * emission[state].Probability(dataSeq.unsafe_col(0)); // Then normalize the column. scales[0] = accu(forwardProb.col(0)); if (scales[0] > 0.0) forwardProb.col(0) /= scales[0]; // Now compute the probabilities for each successive observation. for (size_t t = 1; t < dataSeq.n_cols; t++) { for (size_t j = 0; j < transition.n_rows; j++) { // The forward probability of state j at time t is the sum over all states // of the probability of the previous state transitioning to the current // state and emitting the given observation. forwardProb(j, t) = accu(forwardProb.col(t - 1) % trans(transition.row(j))) * emission[j].Probability(dataSeq.unsafe_col(t)); } // Normalize probability. scales[t] = accu(forwardProb.col(t)); if (scales[t] > 0.0) forwardProb.col(t) /= scales[t]; } } template void HMM::Backward(const arma::mat& dataSeq, const arma::vec& scales, arma::mat& backwardProb) const { // Our goal is to calculate the backward probabilities: // P(X_k | o_{k + 1:T}) for all possible states X_k, for each time point k. backwardProb.zeros(transition.n_rows, dataSeq.n_cols); // The last element probability is 1. backwardProb.col(dataSeq.n_cols - 1).fill(1); // Now step backwards through all other observations. for (size_t t = dataSeq.n_cols - 2; t + 1 > 0; t--) { for (size_t j = 0; j < transition.n_rows; j++) { // The backward probability of state j at time t is the sum over all state // of the probability of the next state having been a transition from the // current state multiplied by the probability of each of those states // emitting the given observation. for (size_t state = 0; state < transition.n_rows; state++) backwardProb(j, t) += transition(state, j) * backwardProb(state, t + 1) * emission[state].Probability(dataSeq.unsafe_col(t + 1)); // Normalize by the weights from the forward algorithm. if (scales[t + 1] > 0.0) backwardProb(j, t) /= scales[t + 1]; } } } //! Serialize the HMM. template template void HMM::Serialize(Archive& ar, const unsigned int /* version */) { ar & data::CreateNVP(dimensionality, "dimensionality"); ar & data::CreateNVP(tolerance, "tolerance"); ar & data::CreateNVP(transition, "transition"); ar & data::CreateNVP(initial, "initial"); // Now serialize each emission. If we are loading, we must resize the vector // of emissions correctly. if (Archive::is_loading::value) emission.resize(transition.n_rows); // Load the emissions; generate the correct name for each one. for (size_t i = 0; i < emission.size(); ++i) { std::ostringstream oss; oss << "emission" << i; ar & data::CreateNVP(emission[i], oss.str()); } } } // namespace hmm } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/methods/hmm/hmm_loglik_main.cpp000066400000000000000000000051601315013601400230770ustar00rootroot00000000000000/** * @file hmm_loglik_main.cpp * @author Ryan Curtin * * Compute the log-likelihood of a given sequence for a given HMM. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include #include #include #include "hmm.hpp" #include "hmm_util.hpp" #include PROGRAM_INFO("Hidden Markov Model (HMM) Sequence Log-Likelihood", "This " "utility takes an already-trained HMM (--model_file) and evaluates the " "log-likelihood of a given sequence of observations (--input_file). The " "computed log-likelihood is given directly to stdout."); PARAM_STRING_IN_REQ("input_file", "File containing observations,", "i"); PARAM_STRING_IN_REQ("model_file", "File containing HMM.", "m"); PARAM_DOUBLE_OUT("log_likelihood", "Log-likelihood of the sequence."); using namespace mlpack; using namespace mlpack::hmm; using namespace mlpack::distribution; using namespace mlpack::util; using namespace mlpack::gmm; using namespace arma; using namespace std; // Because we don't know what the type of our HMM is, we need to write a // function that can take arbitrary HMM types. struct Loglik { template static void Apply(HMMType& hmm, void* /* extraInfo */) { // Load the data sequence. const string inputFile = CLI::GetParam("input_file"); mat dataSeq; data::Load(inputFile, dataSeq, true); // Detect if we need to transpose the data, in the case where the input data // has one dimension. if ((dataSeq.n_cols == 1) && (hmm.Emission()[0].Dimensionality() == 1)) { Log::Info << "Data sequence appears to be transposed; correcting." << endl; dataSeq = dataSeq.t(); } if (dataSeq.n_rows != hmm.Emission()[0].Dimensionality()) Log::Fatal << "Dimensionality of sequence (" << dataSeq.n_rows << ") is " << "not equal to the dimensionality of the HMM (" << hmm.Emission()[0].Dimensionality() << ")!" << endl; const double loglik = hmm.LogLikelihood(dataSeq); CLI::GetParam("log_likelihood") = loglik; } }; int main(int argc, char** argv) { // Parse command line options. CLI::ParseCommandLine(argc, argv); // Load model, and calculate the log-likelihood of the sequence. const string modelFile = CLI::GetParam("model_file"); LoadHMMAndPerformAction(modelFile); } mlpack-2.2.5/src/mlpack/methods/hmm/hmm_regression.hpp000066400000000000000000000343711315013601400230050ustar00rootroot00000000000000/** * @file hmm_regression.hpp * @author Michael Fox * * Definition of HMMRegression class. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_HMM_HMM_REGRESSION_HPP #define MLPACK_METHODS_HMM_HMM_REGRESSION_HPP #include #include #include "hmm.hpp" namespace mlpack { namespace hmm /** Hidden Markov Models. */ { /** * A class that represents a Hidden Markov Model Regression (HMMR). HMMR is an * extension of Hidden Markov Models to regression analysis. The method is * described in (Fridman, 1993) * https://www.ima.umn.edu/preprints/January1994/1195.pdf * An HMMR is a linear regression model whose coefficients are determined by a * finite-state Markov chain. The error terms are conditionally independently * normally distributed with zero mean and state-dependent variance. Let Q_t be * a finite-state Markov chain, X_t a vector of predictors and Y_t a response. * The HMMR is * Y_t = X_t \beta_{Q_t} + \sigma_{Q_t} \epsilon_t * * This HMMR class supports training (supervised and unsupervised), prediction * of state sequences via the Viterbi algorithm, estimation of state * probabilities, filtering and smoothing of responses, and calculation of the * log-likelihood of a given sequence. * * Usage of the HMMR class generally involves either training an HMMR or loading * an already-known HMMR and using to filter a sequence. * Example code for supervised training of an HMMR is given below. * * @code * // Each column is a vector of predictors for a single observation. * arma::mat predictors(5, 100, arma::fill::randn); * // Responses for each observation * arma::vec responses(100, arma::fill::randn); * * // Create an untrained HMMR with 3 hidden states * RegressionDistribution rd(predictors, responses); * arma::mat transition("0.5 0.5;" "0.5 0.5;"); * std::vector emissions(2,rd); * HMMRegression hmmr("0.9 0.1", transition, emissions); * * // Train the HMM (supply a state sequence to perform supervised training) * std::vector predictorsSeq(1, predictors); * std::vector< arma::vec> responsesSeq(1, responses); * hmmr.Train(predictorsSeq, responsesSeq); * hmm.Train(observations, states); * @endcode * * Once initialized, the HMMR can evaluate the probability of a certain sequence * (with LogLikelihood()), predict the most likely sequence of hidden states * (with Predict()), estimate the probabilities of each state for a sequence of * observations (with Estimate()), or perform filtering or smoothing of * observations. * */ class HMMRegression : public HMM { public: /** * Create the Hidden Markov Model Regression with the given number of hidden * states and the given default regression emission. The dimensionality of the * observations is taken from the emissions variable, so it is important that * the given default emission distribution is set with the correct * dimensionality. Alternately, set the dimensionality with Dimensionality(). * Optionally, the tolerance for convergence of the Baum-Welch algorithm can * be set. * * By default, the transition matrix and initial probability vector are set to * contain equal probability for each state. * * @param states Number of states. * @param emissions Default distribution for emissions. * @param tolerance Tolerance for convergence of training algorithm * (Baum-Welch). */ HMMRegression(const size_t states, const distribution::RegressionDistribution emissions, const double tolerance = 1e-5) : HMM(states, emissions, tolerance) { /* nothing to do */ } /** * Create the Hidden Markov Model Regression with the given initial * probability vector, the given transition matrix, and the given regression * emission distributions. The dimensionality of the observations of the HMMR * are taken from the given emission distributions. Alternately, the * dimensionality can be set with Dimensionality(). * * The initial state probability vector should have length equal to the number * of states, and each entry represents the probability of being in the given * state at time T = 0 (the beginning of a sequence). * * The transition matrix should be such that T(i, j) is the probability of * transition to state i from state j. The columns of the matrix should sum * to 1. * * Optionally, the tolerance for convergence of the Baum-Welch algorithm can * be set. * * @param initial Initial state probabilities. * @param transition Transition matrix. * @param emission Emission distributions. * @param tolerance Tolerance for convergence of training algorithm * (Baum-Welch). */ HMMRegression(const arma::vec& initial, const arma::mat& transition, const std::vector& emission, const double tolerance = 1e-5) : HMM(initial, transition, emission, tolerance) { /* nothing to do */ } /** * Train the model using the Baum-Welch algorithm, with only the given * predictors and responses. Instead of giving a guess transition and emission * here, do that in the constructor. Each matrix in the vector of predictors * corresponds to an individual data sequence, and likewise for each vec in * the vector of responses. The number of rows in each matrix of predictors * plus one should be equal to the dimensionality of the HMM (which is set in * the constructor). * * It is preferable to use the other overload of Train(), with labeled data. * That will produce much better results. However, if labeled data is * unavailable, this will work. In addition, it is possible to use Train() * with labeled data first, and then continue to train the model using this * overload of Train() with unlabeled data. * * The tolerance of the Baum-Welch algorithm can be set either in the * constructor or with the Tolerance() method. When the change in * log-likelihood of the model between iterations is less than the tolerance, * the Baum-Welch algorithm terminates. * * @note * Train() can be called multiple times with different sequences; each time it * is called, it uses the current parameters of the HMM as a starting point * for training. * @endnote * * @param predictors Vector of predictor sequences. * @param responses Vector of response sequences. */ void Train(const std::vector& predictors, const std::vector& responses); /** * Train the model using the given labeled observations; the transition and * regression emissions are directly estimated. Each matrix in the vector of * predictors corresponds to an individual data sequence, and likewise for * each vec in the vector of responses. The number of rows in each matrix of * predictors plus one should be equal to the dimensionality of the HMM * (which is set in the constructor). * * @note * Train() can be called multiple times with different sequences; each time it * is called, it uses the current parameters of the HMMR as a starting point * for training. * @endnote * * @param predictors Vector of predictor sequences. * @param responses Vector of response sequences. * @param stateSeq Vector of state sequences, corresponding to each * observation. */ void Train(const std::vector& predictors, const std::vector& responses, const std::vector >& stateSeq); /** * Estimate the probabilities of each hidden state at each time step for each * given data observation, using the Forward-Backward algorithm. Each matrix * which is returned has columns equal to the number of data observations, and * rows equal to the number of hidden states in the model. The log-likelihood * of the most probable sequence is returned. * * @param predictors Vector of predictor sequences. * @param responses Vector of response sequences. * @param stateProb Matrix in which the probabilities of each state at each * time interval will be stored. * @param forwardProb Matrix in which the forward probabilities of each state * at each time interval will be stored. * @param backwardProb Matrix in which the backward probabilities of each * state at each time interval will be stored. * @param scales Vector in which the scaling factors at each time interval * will be stored. * @return Log-likelihood of most likely state sequence. */ double Estimate(const arma::mat& predictors, const arma::vec& responses, arma::mat& stateProb, arma::mat& forwardProb, arma::mat& backwardProb, arma::vec& scales) const; /** * Estimate the probabilities of each hidden state at each time step of each * given data observation, using the Forward-Backward algorithm. The returned * matrix of state probabilities has columns equal to the number of data * observations, and rows equal to the number of hidden states in the model. * The log-likelihood of the most probable sequence is returned. * * @param predictors Vector of predictor sequences. * @param responses Vector of response sequences. * @param stateProb Probabilities of each state at each time interval. * @return Log-likelihood of most likely state sequence. */ double Estimate(const arma::mat& predictors, const arma::vec& responses, arma::mat& stateProb) const; /** * Compute the most probable hidden state sequence for the given predictors * and responses, using the Viterbi algorithm, returning the log-likelihood of * the most likely state sequence. * * @param predictors Vector of predictor sequences. * @param responses Vector of response sequences. * @param stateSeq Vector in which the most probable state sequence will be * stored. * @return Log-likelihood of most probable state sequence. */ double Predict(const arma::mat& predictors, const arma::vec& responses, arma::Row& stateSeq) const; /** * Compute the log-likelihood of the given predictors and responses. * * @param predictors Vector of predictor sequences. * @param responses Vector of response sequences. * @return Log-likelihood of the given sequence. */ double LogLikelihood(const arma::mat& predictors, const arma::vec& responses) const; /** * HMMR filtering. Computes the k-step-ahead expected response at each time * conditioned only on prior observations. That is * E{ Y[t+k] | Y[0], ..., Y[t] }. * The returned matrix has columns equal to the number of observations. Note * that the expectation may not be meaningful for discrete emissions. * * @param predictors Vector of predictor sequences. * @param responses Vector of response sequences. * @param initial Distribution of initial state. * @param ahead Number of steps ahead (k) for expectations. * @param filterSeq Vector in which the expected emission sequence will be * stored. */ void Filter(const arma::mat& predictors, const arma::vec& responses, arma::vec& filterSeq, size_t ahead = 0) const; /** * HMM smoothing. Computes expected emission at each time conditioned on all * observations. That is * E{ Y[t] | Y[0], ..., Y[T] }. * The returned matrix has columns equal to the number of observations. Note * that the expectation may not be meaningful for discrete emissions. * * @param predictors Vector of predictor sequences. * @param responses Vector of response sequences.. * @param initial Distribution of initial state. * @param smoothSeq Vector in which the expected emission sequence will be * stored. */ void Smooth(const arma::mat& predictors, const arma::vec& responses, arma::vec& smoothSeq) const; private: /** * Utility functions to facilitate the use of the HMM class for HMMR. */ void StackData(const std::vector& predictors, const std::vector& responses, std::vector& dataSeq) const; void StackData(const arma::mat& predictors, const arma::vec& responses, arma::mat& dataSeq) const; /** * The Forward algorithm (part of the Forward-Backward algorithm). Computes * forward probabilities for each state for each observation in the given data * sequence. The returned matrix has rows equal to the number of hidden * states and columns equal to the number of observations. * * @param predictors Vector of predictor sequences. * @param responses Vector of response sequences. * @param scales Vector in which scaling factors will be saved. * @param forwardProb Matrix in which forward probabilities will be saved. */ void Forward(const arma::mat& predictors, const arma::vec& responses, arma::vec& scales, arma::mat& forwardProb) const; /** * The Backward algorithm (part of the Forward-Backward algorithm). Computes * backward probabilities for each state for each observation in the given * data sequence, using the scaling factors found (presumably) by Forward(). * The returned matrix has rows equal to the number of hidden states and * columns equal to the number of observations. * * @param predictors Vector of predictor sequences. * @param responses Vector of response sequences. * @param scales Vector of scaling factors. * @param backwardProb Matrix in which backward probabilities will be saved. */ void Backward(const arma::mat& predictors, const arma::vec& responses, const arma::vec& scales, arma::mat& backwardProb) const; }; } // namespace hmm } // namespace mlpack // Include implementation. #include "hmm_regression_impl.hpp" #endif mlpack-2.2.5/src/mlpack/methods/hmm/hmm_regression_impl.hpp000066400000000000000000000135151315013601400240230ustar00rootroot00000000000000/** * @file hmm_regression_impl.hpp * @author Ryan Curtin * @author Tran Quoc Long * @author Michael Fox * * Implementation of HMMRegression class. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_HMM_HMM_REGRESSION_IMPL_HPP #define MLPACK_METHODS_HMM_HMM_REGRESSION_IMPL_HPP // Just in case... #include "hmm_regression.hpp" namespace mlpack { namespace hmm { void HMMRegression::Train(const std::vector& predictors, const std::vector& responses) { std::vector dataSeq; StackData(predictors, responses, dataSeq); this->HMM::Train(dataSeq); } void HMMRegression::Train(const std::vector& predictors, const std::vector& responses, const std::vector >& stateSeq) { std::vector dataSeq; StackData(predictors, responses, dataSeq); this->HMM::Train(dataSeq, stateSeq); } /** * Estimate the probabilities of each hidden state at each time step for each * given data observation. */ double HMMRegression::Estimate(const arma::mat& predictors, const arma::vec& responses, arma::mat& stateProb, arma::mat& forwardProb, arma::mat& backwardProb, arma::vec& scales) const { arma::mat dataSeq; StackData(predictors, responses, dataSeq); return this->HMM::Estimate(dataSeq, stateProb, forwardProb, backwardProb, scales); } /** * Estimate the probabilities of each hidden state at each time step for each * given data observation. */ double HMMRegression::Estimate(const arma::mat& predictors, const arma::vec& responses, arma::mat& stateProb) const { arma::mat dataSeq; StackData(predictors, responses, dataSeq); return this->HMM::Estimate(dataSeq, stateProb); } /** * Compute the most probable hidden state sequence for the given observation * using the Viterbi algorithm. Returns the log-likelihood of the most likely * sequence. */ double HMMRegression::Predict(const arma::mat& predictors, const arma::vec& responses, arma::Row& stateSeq) const { arma::mat dataSeq; StackData(predictors, responses, dataSeq); return this->HMM::Predict(dataSeq, stateSeq); } /** * Compute the log-likelihood of the given data sequence. */ double HMMRegression::LogLikelihood(const arma::mat& predictors, const arma::vec& responses) const { arma::mat dataSeq; StackData(predictors, responses, dataSeq); return this->HMM::LogLikelihood(dataSeq); } /** * HMMRegression filtering. */ void HMMRegression::Filter(const arma::mat& predictors, const arma::vec& responses, arma::vec& filterSeq, size_t ahead) const { // First run the forward algorithm arma::mat forwardProb; arma::vec scales; Forward(predictors, responses, scales, forwardProb); // Propagate state, predictors ahead if (ahead != 0) { forwardProb = pow(transition, ahead)*forwardProb; forwardProb = forwardProb.cols(0, forwardProb.n_cols-ahead-1); } // Compute expected emissions. filterSeq.resize(responses.n_elem - ahead); filterSeq.zeros(); arma::vec nextSeq; for(size_t i = 0; i < emission.size(); i++) { emission[i].Predict(predictors.cols(ahead, predictors.n_cols-1), nextSeq); filterSeq = filterSeq + nextSeq%(forwardProb.row(i).t()); } } /** * HMM smoothing. */ void HMMRegression::Smooth(const arma::mat& predictors, const arma::vec& responses, arma::vec& smoothSeq) const { // First run the forward algorithm arma::mat stateProb; Estimate(predictors, responses, stateProb); // Compute expected emissions. smoothSeq.resize(responses.n_elem); smoothSeq.zeros(); arma::vec nextSeq; for(size_t i = 0; i < emission.size(); i++) { emission[i].Predict(predictors, nextSeq); smoothSeq = smoothSeq + nextSeq%(stateProb.row(i).t()); } } /** * The Forward procedure (part of the Forward-Backward algorithm). */ void HMMRegression::Forward(const arma::mat& predictors, const arma::vec& responses, arma::vec& scales, arma::mat& forwardProb) const { arma::mat dataSeq; StackData(predictors, responses, dataSeq); this->HMM::Forward(dataSeq, scales, forwardProb); } void HMMRegression::Backward(const arma::mat& predictors, const arma::vec& responses, const arma::vec& scales, arma::mat& backwardProb) const { arma::mat dataSeq; StackData(predictors, responses, dataSeq); this->HMM::Backward(dataSeq, scales, backwardProb); } void HMMRegression::StackData(const std::vector& predictors, const std::vector& responses, std::vector& dataSeq) const { arma::mat nextSeq; for(size_t i = 0; i < predictors.size(); i++) { nextSeq = predictors[i]; nextSeq.insert_rows(0, responses[i].t()); dataSeq.push_back(nextSeq); } } void HMMRegression::StackData(const arma::mat& predictors, const arma::vec& responses, arma::mat& dataSeq) const { dataSeq = predictors; dataSeq.insert_rows(0, responses.t()); } } // namespace hmm } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/methods/hmm/hmm_train_main.cpp000066400000000000000000000472721315013601400227450ustar00rootroot00000000000000/** * @file hmm_train_main.cpp * @author Ryan Curtin * * Executable which trains an HMM and saves the trained HMM to file. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include #include #include #include "hmm.hpp" #include "hmm_util.hpp" #include PROGRAM_INFO("Hidden Markov Model (HMM) Training", "This program allows a " "Hidden Markov Model to be trained on labeled or unlabeled data. It " "support three types of HMMs: discrete HMMs, Gaussian HMMs, or GMM HMMs." "\n\n" "Either one input sequence can be specified (with --input_file), or, a " "file containing files in which input sequences can be found (when " "--input_file and --batch are used together). In addition, labels can be " "provided in the file specified by --labels_file, and if --batch is used, " "the file given to --labels_file should contain a list of files of labels " "corresponding to the sequences in the file given to --input_file." "\n\n" "The HMM is trained with the Baum-Welch algorithm if no labels are " "provided. The tolerance of the Baum-Welch algorithm can be set with the " "--tolerance option. By default, the transition matrix is randomly " "initialized and the emission distributions are initialized to fit the " "extent of the data." "\n\n" "Optionally, a pre-created HMM model can be used as a guess for the " "transition matrix and emission probabilities; this is specifiable with " "--model_file."); PARAM_STRING_IN_REQ("input_file", "File containing input observations.", "i"); PARAM_STRING_IN_REQ("type", "Type of HMM: discrete | gaussian | gmm.", "t"); PARAM_FLAG("batch", "If true, input_file (and if passed, labels_file) are " "expected to contain a list of files to use as input observation sequences " "(and label sequences).", "b"); PARAM_INT_IN("states", "Number of hidden states in HMM (necessary, unless " "model_file is specified).", "n", 0); PARAM_INT_IN("gaussians", "Number of gaussians in each GMM (necessary when type" " is 'gmm').", "g", 0); PARAM_STRING_IN("model_file", "Pre-existing HMM model file.", "m", ""); PARAM_STRING_IN("labels_file", "Optional file of hidden states, used for " "labeled training.", "l", ""); PARAM_STRING_OUT("output_model_file", "File to save trained HMM to.", "o"); PARAM_INT_IN("seed", "Random seed. If 0, 'std::time(NULL)' is used.", "s", 0); PARAM_DOUBLE_IN("tolerance", "Tolerance of the Baum-Welch algorithm.", "T", 1e-5); using namespace mlpack; using namespace mlpack::hmm; using namespace mlpack::distribution; using namespace mlpack::util; using namespace mlpack::gmm; using namespace mlpack::math; using namespace arma; using namespace std; // Because we don't know what the type of our HMM is, we need to write a // function that can take arbitrary HMM types. struct Init { template static void Apply(HMMType& hmm, vector* trainSeq) { const size_t states = CLI::GetParam("states"); const double tolerance = CLI::GetParam("tolerance"); // Create the initialized-to-zero model. Create(hmm, *trainSeq, states, tolerance); // Initializing the emission distribution depends on the distribution. // Therefore we have to use the helper functions. RandomInitialize(hmm.Emission()); } //! Helper function to create discrete HMM. static void Create(HMM& hmm, vector& trainSeq, size_t states, double tolerance) { // Maximum observation is necessary so we know how to train the discrete // distribution. arma::Col maxEmissions(trainSeq[0].n_rows); maxEmissions.zeros(); for (vector::iterator it = trainSeq.begin(); it != trainSeq.end(); ++it) { arma::Col maxSeqs = arma::conv_to>::from(arma::max(*it, 1)) + 1; maxEmissions = arma::max(maxEmissions, maxSeqs); } hmm = HMM(size_t(states), DiscreteDistribution(maxEmissions), tolerance); } //! Helper function to create Gaussian HMM. static void Create(HMM& hmm, vector& trainSeq, size_t states, double tolerance) { // Find dimension of the data. const size_t dimensionality = trainSeq[0].n_rows; // Verify dimensionality of data. for (size_t i = 0; i < trainSeq.size(); ++i) if (trainSeq[i].n_rows != dimensionality) Log::Fatal << "Observation sequence " << i << " dimensionality (" << trainSeq[i].n_rows << " is incorrect (should be " << dimensionality << ")!" << endl; // Get the model and initialize it. hmm = HMM(size_t(states), GaussianDistribution(dimensionality), tolerance); } //! Helper function to create GMM HMM. static void Create(HMM& hmm, vector& trainSeq, size_t states, double tolerance) { // Find dimension of the data. const size_t dimensionality = trainSeq[0].n_rows; const int gaussians = CLI::GetParam("gaussians"); if (gaussians == 0) Log::Fatal << "Number of gaussians for each GMM must be specified (-g) " << "when type = 'gmm'!" << endl; if (gaussians < 0) Log::Fatal << "Invalid number of gaussians (" << gaussians << "); must " << "be greater than or equal to 1." << endl; // Create HMM object. hmm = HMM(size_t(states), GMM(size_t(gaussians), dimensionality), tolerance); // Issue a warning if the user didn't give labels. if (!CLI::HasParam("labels_file")) Log::Warn << "Unlabeled training of GMM HMMs is almost certainly not " << "going to produce good results!" << endl; } //! Helper function for discrete emission distributions. static void RandomInitialize(vector& e) { for (size_t i = 0; i < e.size(); ++i) { e[i].Probabilities().randu(); e[i].Probabilities() /= arma::accu(e[i].Probabilities()); } } //! Helper function for Gaussian emission distributions. static void RandomInitialize(vector& e) { for (size_t i = 0; i < e.size(); ++i) { const size_t dimensionality = e[i].Mean().n_rows; e[i].Mean().randu(); // Generate random covariance. arma::mat r = arma::randu(dimensionality, dimensionality); e[i].Covariance(r * r.t()); } } //! Helper function for GMM emission distributions. static void RandomInitialize(vector& e) { for (size_t i = 0; i < e.size(); ++i) { // Random weights. e[i].Weights().randu(); e[i].Weights() /= arma::accu(e[i].Weights()); // Random means and covariances. for (int g = 0; g < CLI::GetParam("gaussians"); ++g) { const size_t dimensionality = e[i].Component(g).Mean().n_rows; e[i].Component(g).Mean().randu(); // Generate random covariance. arma::mat r = arma::randu(dimensionality, dimensionality); e[i].Component(g).Covariance(r * r.t()); } } } }; // Because we don't know what the type of our HMM is, we need to write a // function that can take arbitrary HMM types. struct Train { template static void Apply(HMMType& hmm, vector* trainSeqPtr) { const bool batch = CLI::HasParam("batch"); const double tolerance = CLI::GetParam("tolerance"); // Do we need to replace the tolerance? if (CLI::HasParam("tolerance")) hmm.Tolerance() = tolerance; const string labelsFile = CLI::GetParam("labels_file"); // Verify that the dimensionality of our observations is the same as the // dimensionality of our HMM's emissions. vector& trainSeq = *trainSeqPtr; for (size_t i = 0; i < trainSeq.size(); ++i) if (trainSeq[i].n_rows != hmm.Emission()[0].Dimensionality()) Log::Fatal << "Dimensionality of training sequence " << i << " (" << trainSeq[i].n_rows << ") is not equal to the dimensionality of " << "the HMM (" << hmm.Emission()[0].Dimensionality() << ")!" << endl; vector> labelSeq; // May be empty. if (CLI::HasParam("labels_file")) { // Do we have multiple label files to load? char lineBuf[1024]; if (batch) { fstream f(labelsFile); if (!f.is_open()) Log::Fatal << "Could not open '" << labelsFile << "' for reading." << endl; // Now read each line in. f.getline(lineBuf, 1024, '\n'); while (!f.eof()) { Log::Info << "Adding training sequence labels from '" << lineBuf << "'." << endl; // Now read the matrix. Mat label; data::Load(lineBuf, label, true); // Fatal on failure. // Ensure that matrix only has one row. if (label.n_cols == 1) label = trans(label); if (label.n_rows > 1) Log::Fatal << "Invalid labels; must be one-dimensional." << endl; // Check all of the labels. for (size_t i = 0; i < label.n_cols; ++i) { if (label[i] >= hmm.Transition().n_cols) { Log::Fatal << "HMM has " << hmm.Transition().n_cols << " hidden " << "states, but label on line " << i << " of '" << lineBuf << "' is " << label[i] << " (should be between 0 and " << (hmm.Transition().n_cols - 1) << ")!" << endl; } } labelSeq.push_back(label.row(0)); f.getline(lineBuf, 1024, '\n'); } f.close(); } else { Mat label; data::Load(labelsFile, label, true); // Ensure that matrix only has one row. if (label.n_cols == 1) label = trans(label); if (label.n_rows > 1) Log::Fatal << "Invalid labels; must be one-dimensional." << endl; // Verify the same number of observations as the data. if (label.n_elem != trainSeq[labelSeq.size()].n_cols) Log::Fatal << "Label sequence " << labelSeq.size() << " does not have" << " the same number of points as observation sequence " << labelSeq.size() << "!" << endl; // Check all of the labels. for (size_t i = 0; i < label.n_cols; ++i) { if (label[i] >= hmm.Transition().n_cols) { Log::Fatal << "HMM has " << hmm.Transition().n_cols << " hidden " << "states, but label on line " << i << " of '" << labelsFile << "' is " << label[i] << " (should be between 0 and " << (hmm.Transition().n_cols - 1) << ")!" << endl; } } labelSeq.push_back(label.row(0)); } // Now perform the training with labels. hmm.Train(trainSeq, labelSeq); } else { // Perform unsupervised training. hmm.Train(trainSeq); } // Save the model. if (CLI::HasParam("output_model_file")) { const string modelFile = CLI::GetParam("output_model_file"); SaveHMM(hmm, modelFile); } } }; int main(int argc, char** argv) { // Parse command line options. CLI::ParseCommandLine(argc, argv); // Set random seed. if (CLI::GetParam("seed") != 0) RandomSeed((size_t) CLI::GetParam("seed")); else RandomSeed((size_t) time(NULL)); // Validate parameters. const string modelFile = CLI::GetParam("model_file"); const string inputFile = CLI::GetParam("input_file"); const string type = CLI::GetParam("type"); const size_t states = CLI::GetParam("states"); const double tolerance = CLI::GetParam("tolerance"); const bool batch = CLI::HasParam("batch"); // Verify that either a model or a type was given. if (modelFile == "" && type == "") Log::Fatal << "No model file specified and no HMM type given! At least " << "one is required." << endl; // If no model is specified, make sure we are training with valid parameters. if (modelFile == "") { // Validate number of states. if (states == 0) Log::Fatal << "Must specify number of states if model file is not " << "specified!" << endl; } if (modelFile != "" && CLI::HasParam("tolerance")) Log::Info << "Tolerance of existing model in '" << modelFile << "' will be " << "replaced with specified tolerance of " << tolerance << "." << endl; // Load the input data. vector trainSeq; if (batch) { // The input file contains a list of files to read. Log::Info << "Reading list of training sequences from '" << inputFile << "'." << endl; fstream f(inputFile.c_str(), ios_base::in); if (!f.is_open()) Log::Fatal << "Could not open '" << inputFile << "' for reading." << endl; // Now read each line in. char lineBuf[1024]; // Max 1024 characters... hopefully long enough. f.getline(lineBuf, 1024, '\n'); while (!f.eof()) { Log::Info << "Adding training sequence from '" << lineBuf << "'." << endl; // Now read the matrix. trainSeq.push_back(mat()); data::Load(lineBuf, trainSeq.back(), true); // Fatal on failure. // See if we need to transpose the data. if (type == "discrete") { if (trainSeq.back().n_cols == 1) trainSeq.back() = trans(trainSeq.back()); } f.getline(lineBuf, 1024, '\n'); } f.close(); } else { // Only one input file. trainSeq.resize(1); data::Load(inputFile, trainSeq[0], true); } // If we have a model file, we can autodetect the type. if (CLI::HasParam("model_file")) { LoadHMMAndPerformAction(modelFile, &trainSeq); } else { // We need to read in the type and build the HMM by hand. const string type = CLI::GetParam("type"); if (type == "discrete") { // Maximum observation is necessary so we know how to train the discrete // distribution. size_t maxEmission = 0; for (vector::iterator it = trainSeq.begin(); it != trainSeq.end(); ++it) { size_t maxSeq = size_t(as_scalar(max(trainSeq[0], 1))) + 1; if (maxSeq > maxEmission) maxEmission = maxSeq; } Log::Info << maxEmission << " discrete observations in the input data." << endl; // Create HMM object. HMM hmm(size_t(states), DiscreteDistribution(maxEmission), tolerance); // Initialize emissions using the distribution of the full data. DiscreteDistribution sampleEmission; if (trainSeq.size() > 1) { // Flatten data matrix for training of an emission distribution. This // is not efficient! size_t totalCols = 0; for (size_t i = 0; i < trainSeq.size(); ++i) totalCols += trainSeq[i].n_cols; arma::mat flatData(trainSeq[0].n_rows, totalCols); size_t currentCol = 0; for (size_t i = 0; i < trainSeq.size(); ++i) { flatData.cols(currentCol, currentCol + trainSeq[i].n_cols - 1) = trainSeq[i]; currentCol += trainSeq[i].n_cols; } sampleEmission.Train(flatData); } else { sampleEmission.Train(trainSeq[0]); } // Apply initialized emissions. for (size_t e = 0; e < hmm.Transition().n_cols; ++e) hmm.Emission()[e] = sampleEmission; // Now train it. Pass the already-loaded training data. Train::Apply(hmm, &trainSeq); } else if (type == "gaussian") { // Find dimension of the data. const size_t dimensionality = trainSeq[0].n_rows; // Verify dimensionality of data. for (size_t i = 0; i < trainSeq.size(); ++i) if (trainSeq[i].n_rows != dimensionality) Log::Fatal << "Observation sequence " << i << " dimensionality (" << trainSeq[i].n_rows << " is incorrect (should be " << dimensionality << ")!" << endl; HMM hmm(size_t(states), GaussianDistribution(dimensionality), tolerance); // Initialize emissions using the distribution of the full data. GaussianDistribution sampleEmission; if (trainSeq.size() > 1) { // Flatten data matrix for training of an emission distribution. This // is not efficient! size_t totalCols = 0; for (size_t i = 0; i < trainSeq.size(); ++i) totalCols += trainSeq[i].n_cols; arma::mat flatData(trainSeq[0].n_rows, totalCols); size_t currentCol = 0; for (size_t i = 0; i < trainSeq.size(); ++i) { flatData.cols(currentCol, currentCol + trainSeq[i].n_cols - 1) = trainSeq[i]; currentCol += trainSeq[i].n_cols; } sampleEmission.Train(flatData); } else { sampleEmission.Train(trainSeq[0]); } // Set all emissions to the initialized emission. for (size_t e = 0; e < hmm.Transition().n_cols; ++e) hmm.Emission()[e] = sampleEmission; // Now train it. Train::Apply(hmm, &trainSeq); } else if (type == "gmm") { // Find dimension of the data. const size_t dimensionality = trainSeq[0].n_rows; const int gaussians = CLI::GetParam("gaussians"); if (gaussians == 0) Log::Fatal << "Number of gaussians for each GMM must be specified (-g) " << "when type = 'gmm'!" << endl; if (gaussians < 0) Log::Fatal << "Invalid number of gaussians (" << gaussians << "); must " << "be greater than or equal to 1." << endl; // Create HMM object. HMM hmm(size_t(states), GMM(size_t(gaussians), dimensionality), tolerance); // Initialize emissions using the distribution of the full data. // Super-simple emission training: we don't want it to take long at all. GMM sampleEmission; EMFit<> fitter(1, 0.01); // Only one iteration of EM GMM training. if (trainSeq.size() > 1) { // Flatten data matrix for training of an emission distribution. This // is not efficient! size_t totalCols = 0; for (size_t i = 0; i < trainSeq.size(); ++i) totalCols += trainSeq[i].n_cols; arma::mat flatData(trainSeq[0].n_rows, totalCols); size_t currentCol = 0; for (size_t i = 0; i < trainSeq.size(); ++i) { flatData.cols(currentCol, currentCol + trainSeq[i].n_cols - 1) = trainSeq[i]; currentCol += trainSeq[i].n_cols; } sampleEmission.Train(flatData, 1, false, fitter); } else { sampleEmission.Train(trainSeq[0], 1, false, fitter); } // Set all emissions to the initialized emission. for (size_t e = 0; e < hmm.Transition().n_cols; ++e) hmm.Emission()[e] = sampleEmission; // Now train it. Train::Apply(hmm, &trainSeq); // Issue a warning if the user didn't give labels. if (!CLI::HasParam("labels_file")) Log::Warn << "Unlabeled training of GMM HMMs is almost certainly not " << "going to produce good results!" << endl; Train::Apply(hmm, &trainSeq); } else { Log::Fatal << "Unknown HMM type: " << type << "; must be 'discrete', " << "'gaussian', or 'gmm'." << endl; } } } mlpack-2.2.5/src/mlpack/methods/hmm/hmm_util.hpp000066400000000000000000000023661315013601400216010ustar00rootroot00000000000000/** * @file hmm_util.hpp * @author Ryan Curtin * * Utility to read HMM type from file. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_HMM_HMM_UTIL_HPP #define MLPACK_METHODS_HMM_HMM_UTIL_HPP #include namespace mlpack { namespace hmm { //! HMMType, to be stored on disk. This is of type char, which is one byte. //! (I'm not sure what will happen on systems where one byte is not eight bits.) enum HMMType : char { DiscreteHMM = 0, GaussianHMM, GaussianMixtureModelHMM }; //! ActionType should implement static void Apply(HMMType&). template void LoadHMMAndPerformAction(const std::string& modelFile, ExtraInfoType* x = NULL); //! Save an HMM to a file. The file must also encode what type of HMM is being //! stored. template void SaveHMM(HMMType& hmm, const std::string& modelFile); } // namespace hmm } // namespace mlpack #include "hmm_util_impl.hpp" #endif mlpack-2.2.5/src/mlpack/methods/hmm/hmm_util_impl.hpp000066400000000000000000000120431315013601400226130ustar00rootroot00000000000000/** * @file hmm_util_impl.hpp * @author Ryan Curtin * * Implementation of HMM utilities to load arbitrary HMM types. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_HMM_HMM_UTIL_IMPL_HPP #define MLPACK_METHODS_HMM_HMM_UTIL_IMPL_HPP #include #include #include namespace mlpack { namespace hmm { // Forward declarations of utility functions. // Set up the archive for deserialization. template void LoadHMMAndPerformActionHelper(const std::string& modelFile, ExtraInfoType* x = NULL); // Actually deserialize into the correct type. template void DeserializeHMMAndPerformAction(ArchiveType& ar, ExtraInfoType* x = NULL); template void LoadHMMAndPerformAction(const std::string& modelFile, ExtraInfoType* x) { using namespace boost::archive; const std::string extension = data::Extension(modelFile); if (extension == "xml") LoadHMMAndPerformActionHelper(modelFile, x); else if (extension == "bin") LoadHMMAndPerformActionHelper(modelFile, x); else if (extension == "txt") LoadHMMAndPerformActionHelper(modelFile, x); else Log::Fatal << "Unknown extension '" << extension << "' for HMM model file " << "(known: 'xml', 'txt', 'bin')." << std::endl; } template void LoadHMMAndPerformActionHelper(const std::string& modelFile, ExtraInfoType* x) { std::ifstream ifs(modelFile); if (ifs.fail()) Log::Fatal << "Cannot open model file '" << modelFile << "' for loading!" << std::endl; ArchiveType ar(ifs); // Read in the unsigned integer that denotes the type of the model. char type; ar >> data::CreateNVP(type, "hmm_type"); using namespace mlpack::distribution; switch (type) { case HMMType::DiscreteHMM: DeserializeHMMAndPerformAction>(ar, x); break; case HMMType::GaussianHMM: DeserializeHMMAndPerformAction>(ar, x); break; case HMMType::GaussianMixtureModelHMM: DeserializeHMMAndPerformAction>(ar, x); break; default: Log::Fatal << "Unknown HMM type '" << (unsigned int) type << "'!" << std::endl; } } template void DeserializeHMMAndPerformAction(ArchiveType& ar, ExtraInfoType* x) { // Extract the HMM and perform the action. HMMType hmm; ar >> data::CreateNVP(hmm, "hmm"); ActionType::Apply(hmm, x); } // Helper function. template void SaveHMMHelper(HMMType& hmm, const std::string& modelFile); template char GetHMMType(); template void SaveHMM(HMMType& hmm, const std::string& modelFile) { using namespace boost::archive; const std::string extension = data::Extension(modelFile); if (extension == "xml") SaveHMMHelper(hmm, modelFile); else if (extension == "bin") SaveHMMHelper(hmm, modelFile); else if (extension == "txt") SaveHMMHelper(hmm, modelFile); else Log::Fatal << "Unknown extension '" << extension << "' for HMM model file." << std::endl; } template void SaveHMMHelper(HMMType& hmm, const std::string& modelFile) { std::ofstream ofs(modelFile); if (ofs.fail()) Log::Fatal << "Cannot open model file '" << modelFile << "' for saving!" << std::endl; ArchiveType ar(ofs); // Write out the unsigned integer that denotes the type of the model. char type = GetHMMType(); if (type == char(-1)) Log::Fatal << "Unknown HMM type given to SaveHMM()!" << std::endl; ar << data::CreateNVP(type, "hmm_type"); ar << data::CreateNVP(hmm, "hmm"); } // Utility functions to turn a type into something we can store. template char GetHMMType() { return char(-1); } template<> char GetHMMType>() { return HMMType::DiscreteHMM; } template<> char GetHMMType>() { return HMMType::GaussianHMM; } template<> char GetHMMType>() { return HMMType::GaussianMixtureModelHMM; } } // namespace hmm } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/methods/hmm/hmm_viterbi_main.cpp000066400000000000000000000057111315013601400232640ustar00rootroot00000000000000/** * @file hmm_viterbi_main.cpp * @author Ryan Curtin * * Compute the most probably hidden state sequence of a given observation * sequence for a given HMM. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include #include #include #include "hmm.hpp" #include "hmm_util.hpp" #include PROGRAM_INFO("Hidden Markov Model (HMM) Viterbi State Prediction", "This " "utility takes an already-trained HMM (--model_file) and evaluates the " "most probably hidden state sequence of a given sequence of observations " "(--input_file), using the Viterbi algorithm. The computed state sequence " "is saved to the specified output file (--output_file)."); PARAM_STRING_IN_REQ("input_file", "File containing observations,", "i"); PARAM_STRING_IN_REQ("model_file", "File containing HMM.", "m"); PARAM_STRING_OUT("output_file", "File to save predicted state sequence to.", "o"); using namespace mlpack; using namespace mlpack::hmm; using namespace mlpack::distribution; using namespace mlpack::util; using namespace mlpack::gmm; using namespace arma; using namespace std; // Because we don't know what the type of our HMM is, we need to write a // function that can take arbitrary HMM types. struct Viterbi { template static void Apply(HMMType& hmm, void* /* extraInfo */) { // Load observations. const string inputFile = CLI::GetParam("input_file"); const string outputFile = CLI::GetParam("output_file"); mat dataSeq; data::Load(inputFile, dataSeq, true); // See if transposing the data could make it the right dimensionality. if ((dataSeq.n_cols == 1) && (hmm.Emission()[0].Dimensionality() == 1)) { Log::Info << "Data sequence appears to be transposed; correcting." << endl; dataSeq = dataSeq.t(); } // Verify correct dimensionality. if (dataSeq.n_rows != hmm.Emission()[0].Dimensionality()) Log::Fatal << "Observation dimensionality (" << dataSeq.n_rows << ") " << "does not match HMM Gaussian dimensionality (" << hmm.Emission()[0].Dimensionality() << ")!" << endl; arma::Row sequence; hmm.Predict(dataSeq, sequence); // Save output. if (CLI::HasParam("output_file")) data::Save(outputFile, sequence, true); } }; int main(int argc, char** argv) { // Parse command line options. CLI::ParseCommandLine(argc, argv); if (!CLI::HasParam("output_file")) Log::Warn << "--output_file (-o) is not specified; no results will be " << "saved!" << endl; const string modelFile = CLI::GetParam("model_file"); LoadHMMAndPerformAction(modelFile); } mlpack-2.2.5/src/mlpack/methods/hoeffding_trees/000077500000000000000000000000001315013601400216155ustar00rootroot00000000000000mlpack-2.2.5/src/mlpack/methods/hoeffding_trees/CMakeLists.txt000066400000000000000000000015131315013601400243550ustar00rootroot00000000000000# Define the files we need to compile # Anything not in this list will not be compiled into mlpack. set(SOURCES binary_numeric_split.hpp binary_numeric_split_impl.hpp binary_numeric_split_info.hpp categorical_split_info.hpp gini_impurity.hpp hoeffding_categorical_split.hpp hoeffding_categorical_split_impl.hpp hoeffding_numeric_split.hpp hoeffding_numeric_split_impl.hpp hoeffding_tree.hpp hoeffding_tree_impl.hpp information_gain.hpp numeric_split_info.hpp typedef.hpp ) # Add directory name to sources. set(DIR_SRCS) foreach(file ${SOURCES}) set(DIR_SRCS ${DIR_SRCS} ${CMAKE_CURRENT_SOURCE_DIR}/${file}) endforeach() # Append sources (with directory name) to list of all mlpack sources (used at # the parent scope). set(MLPACK_SRCS ${MLPACK_SRCS} ${DIR_SRCS} PARENT_SCOPE) add_cli_executable(hoeffding_tree) mlpack-2.2.5/src/mlpack/methods/hoeffding_trees/binary_numeric_split.hpp000066400000000000000000000115051315013601400265510ustar00rootroot00000000000000/** * @file binary_numeric_split.hpp * @author Ryan Curtin * * An implementation of the binary-tree-based numeric splitting procedure * described by Gama, Rocha, and Medas in their KDD 2003 paper. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_HOEFFDING_SPLIT_BINARY_NUMERIC_SPLIT_HPP #define MLPACK_METHODS_HOEFFDING_SPLIT_BINARY_NUMERIC_SPLIT_HPP #include "binary_numeric_split_info.hpp" namespace mlpack { namespace tree { /** * The BinaryNumericSplit class implements the numeric feature splitting * strategy devised by Gama, Rocha, and Medas in the following paper: * * @code * @inproceedings{gama2003accurate, * title={Accurate Decision Trees for Mining High-Speed Data Streams}, * author={Gama, J. and Rocha, R. and Medas, P.}, * year={2003}, * booktitle={Proceedings of the Ninth ACM SIGKDD International Conference on * Knowledge Discovery and Data Mining (KDD '03)}, * pages={523--528} * } * @endcode * * This splitting procedure builds a binary tree on points it has seen so far, * and then EvaluateFitnessFunction() returns the best possible split in O(n) * time, where n is the number of samples seen so far. Every split with this * split type returns only two splits (greater than or equal to the split point, * and less than the split point). The Train() function should take O(1) time. * * @tparam FitnessFunction Fitness function to use for calculating gain. * @tparam ObservationType Type of observation used by this dimension. */ template class BinaryNumericSplit { public: //! The splitting information required by the BinaryNumericSplit. typedef BinaryNumericSplitInfo SplitInfo; /** * Create the BinaryNumericSplit object with the given number of classes. * * @param numClasses Number of classes in dataset. */ BinaryNumericSplit(const size_t numClasses); /** * Create the BinaryNumericSplit object with the given number of classes, * using information from the given other split for other parameters. In this * case, there are no other parameters, but this function is required by the * HoeffdingTree class. */ BinaryNumericSplit(const size_t numClasses, const BinaryNumericSplit& other); /** * Train on the given value with the given label. * * @param value The value to train on. * @param label The label to train on. */ void Train(ObservationType value, const size_t label); /** * Given the points seen so far, evaluate the fitness function, returning the * best possible gain of a binary split. Note that this takes O(n) time, * where n is the number of points seen so far. So this may not exactly be * fast... * * The best possible split will be stored in bestFitness, and the second best * possible split will be stored in secondBestFitness. * * @param bestFitness Fitness function value for best possible split. * @param secondBestFitness Fitness function value for second best possible * split. */ void EvaluateFitnessFunction(double& bestFitness, double& secondBestFitness); // Return the number of children if this node were to split on this feature. size_t NumChildren() const { return 2; } /** * Given that a split should happen, return the majority classes of the (two) * children and an initialized SplitInfo object. * * @param childMajorities Majority classes of the children after the split. * @param splitInfo Split information. */ void Split(arma::Col& childMajorities, SplitInfo& splitInfo); //! The majority class of the points seen so far. size_t MajorityClass() const; //! The probability of the majority class given the points seen so far. double MajorityProbability() const; //! Serialize the object. template void Serialize(Archive& ar, const unsigned int /* version */); private: //! The elements seen so far, in sorted order. std::multimap sortedElements; //! The classes we have seen so far (for majority calculations). arma::Col classCounts; //! A cached best split point. ObservationType bestSplit; //! If true, the cached best split point is accurate (that is, we have not //! seen any more samples since we calculated it). bool isAccurate; }; // Convenience typedef. template using BinaryDoubleNumericSplit = BinaryNumericSplit; } // namespace tree } // namespace mlpack // Include implementation. #include "binary_numeric_split_impl.hpp" #endif mlpack-2.2.5/src/mlpack/methods/hoeffding_trees/binary_numeric_split_impl.hpp000066400000000000000000000131171315013601400275730ustar00rootroot00000000000000/** * @file binary_numeric_split_impl.hpp * @author Ryan Curtin * * Implementation of the BinaryNumericSplit class. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_HOEFFDING_TREES_BINARY_NUMERIC_SPLIT_IMPL_HPP #define MLPACK_METHODS_HOEFFDING_TREES_BINARY_NUMERIC_SPLIT_IMPL_HPP // In case it hasn't been included yet. #include "binary_numeric_split.hpp" namespace mlpack { namespace tree { template BinaryNumericSplit::BinaryNumericSplit( const size_t numClasses) : classCounts(numClasses), bestSplit(std::numeric_limits::min()), isAccurate(true) { // Zero out class counts. classCounts.zeros(); } template BinaryNumericSplit::BinaryNumericSplit( const size_t numClasses, const BinaryNumericSplit& /* other */) : classCounts(numClasses), bestSplit(std::numeric_limits::min()), isAccurate(true) { // Zero out class counts. classCounts.zeros(); } template void BinaryNumericSplit::Train( ObservationType value, const size_t label) { // Push it into the multimap, and update the class counts. sortedElements.insert(std::pair(value, label)); ++classCounts[label]; // Whatever we have cached is no longer valid. isAccurate = false; } template void BinaryNumericSplit:: EvaluateFitnessFunction(double& bestFitness, double& secondBestFitness) { // Unfortunately, we have to iterate over the map. bestSplit = std::numeric_limits::min(); // Initialize the sufficient statistics. arma::Mat counts(classCounts.n_elem, 2); counts.col(0).zeros(); counts.col(1) = classCounts; bestFitness = FitnessFunction::Evaluate(counts); secondBestFitness = 0.0; // Initialize to the first observation, so we don't calculate gain on the // first iteration (it will be 0). ObservationType lastObservation = (*sortedElements.begin()).first; size_t lastClass = classCounts.n_elem; for (typename std::multimap::const_iterator it = sortedElements.begin(); it != sortedElements.end(); ++it) { // If this value is the same as the last, or if this is the first value, or // we have the same class as the previous observation, don't calculate the // gain---it can't be any better. (See Fayyad and Irani, 1991.) if (((*it).first != lastObservation) || ((*it).second != lastClass)) { lastObservation = (*it).first; lastClass = (*it).second; const double value = FitnessFunction::Evaluate(counts); if (value > bestFitness) { bestFitness = value; bestSplit = (*it).first; } else if (value > secondBestFitness) { secondBestFitness = value; } } // Move the point to the right side of the split. --counts((*it).second, 1); ++counts((*it).second, 0); } isAccurate = true; } template void BinaryNumericSplit::Split( arma::Col& childMajorities, SplitInfo& splitInfo) { if (!isAccurate) { double bestGain, secondBestGain; EvaluateFitnessFunction(bestGain, secondBestGain); } // Make one child for each side of the split. childMajorities.set_size(2); arma::Mat counts(classCounts.n_elem, 2); counts.col(0).zeros(); counts.col(1) = classCounts; double min = DBL_MAX; double max = -DBL_MAX; for (typename std::multimap::const_iterator it = sortedElements.begin();// (*it).first < bestSplit; ++it) it != sortedElements.end(); ++it) { // Move the point to the correct side of the split. if ((*it).first < bestSplit) { --counts((*it).second, 1); ++counts((*it).second, 0); } if ((*it).first < min) min = (*it).first; if ((*it).first > max) max = (*it).first; } // Calculate the majority classes of the children. arma::uword maxIndex; counts.unsafe_col(0).max(maxIndex); childMajorities[0] = size_t(maxIndex); counts.unsafe_col(1).max(maxIndex); childMajorities[1] = size_t(maxIndex); // Create the according SplitInfo object. splitInfo = SplitInfo(bestSplit); } template size_t BinaryNumericSplit::MajorityClass() const { arma::uword maxIndex; classCounts.max(maxIndex); return size_t(maxIndex); } template double BinaryNumericSplit:: MajorityProbability() const { return double(arma::max(classCounts)) / double(arma::accu(classCounts)); } template template void BinaryNumericSplit::Serialize( Archive& ar, const unsigned int /* version */) { // Serialize. ar & data::CreateNVP(sortedElements, "sortedElements"); ar & data::CreateNVP(classCounts, "classCounts"); } } // namespace tree } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/methods/hoeffding_trees/binary_numeric_split_info.hpp000066400000000000000000000025211315013601400275620ustar00rootroot00000000000000/** * @file binary_numeric_split_info.hpp * @author Ryan Curtin * * After a binary numeric split has been made, this holds information on the * split (just the split point). * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_HOEFFDING_TREES_BINARY_NUMERIC_SPLIT_INFO_HPP #define MLPACK_METHODS_HOEFFDING_TREES_BINARY_NUMERIC_SPLIT_INFO_HPP #include namespace mlpack { namespace tree { template class BinaryNumericSplitInfo { public: BinaryNumericSplitInfo() { /* Nothing to do. */ } BinaryNumericSplitInfo(const ObservationType& splitPoint) : splitPoint(splitPoint) { /* Nothing to do. */ } template size_t CalculateDirection(const eT& value) const { return (value < splitPoint) ? 0 : 1; } //! Serialize the split (save/load the split points). template void Serialize(Archive& ar, const unsigned int /* version */) { ar & data::CreateNVP(splitPoint, "splitPoint"); } private: ObservationType splitPoint; }; } // namespace tree } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/methods/hoeffding_trees/categorical_split_info.hpp000066400000000000000000000022241315013601400270310ustar00rootroot00000000000000/** * @file categorical_split_info.hpp * @author Ryan Curtin * * After a categorical split has been made, this holds information on the split. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_HOEFFDING_TREES_CATEGORICAL_SPLIT_INFO_HPP #define MLPACK_METHODS_HOEFFDING_TREES_CATEGORICAL_SPLIT_INFO_HPP #include namespace mlpack { namespace tree { class CategoricalSplitInfo { public: CategoricalSplitInfo(const size_t /* categories */) { } template static size_t CalculateDirection(const eT& value) { // We have a child for each categorical value, and value should be in the // range [0, categories). return size_t(value); } //! Serialize the object. (Nothing needs to be saved.) template void Serialize(Archive& /* ar */, const unsigned int /* version */) { } }; } // namespace tree } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/methods/hoeffding_trees/gini_impurity.hpp000066400000000000000000000051361315013601400252230ustar00rootroot00000000000000/** * @file gini_impurity.hpp * @author Ryan Curtin * * The GiniImpurity class, which is a fitness function (FitnessFunction) for * streaming decision trees. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_HOEFFDING_TREES_GINI_INDEX_HPP #define MLPACK_METHODS_HOEFFDING_TREES_GINI_INDEX_HPP #include namespace mlpack { namespace tree { class GiniImpurity { public: static double Evaluate(const arma::Mat& counts) { // We need to sum over the difference between the un-split node and the // split nodes. First we'll calculate the number of elements in each split // and total. size_t numElem = 0; arma::vec splitCounts(counts.n_cols); for (size_t i = 0; i < counts.n_cols; ++i) { splitCounts[i] = arma::accu(counts.col(i)); numElem += splitCounts[i]; } // Corner case: if there are no elements, the impurity is zero. if (numElem == 0) return 0.0; arma::Col classCounts = arma::sum(counts, 1); // Calculate the Gini impurity of the un-split node. double impurity = 0.0; for (size_t i = 0; i < classCounts.n_elem; ++i) { const double f = ((double) classCounts[i] / (double) numElem); impurity += f * (1.0 - f); } // Now calculate the impurity of the split nodes and subtract them from the // overall impurity. for (size_t i = 0; i < counts.n_cols; ++i) { if (splitCounts[i] > 0) { double splitImpurity = 0.0; for (size_t j = 0; j < counts.n_rows; ++j) { const double f = ((double) counts(j, i) / (double) splitCounts[i]); splitImpurity += f * (1.0 - f); } impurity -= ((double) splitCounts[i] / (double) numElem) * splitImpurity; } } return impurity; } /** * Return the range of the Gini impurity for the given number of classes. * (That is, the difference between the maximum possible value and the minimum * possible value.) */ static double Range(const size_t numClasses) { // The best possible case is that only one class exists, which gives a Gini // impurity of 0. The worst possible case is that the classes are evenly // distributed, which gives n * (1/n * (1 - 1/n)) = 1 - 1/n. return 1.0 - (1.0 / double(numClasses)); } }; } // namespace tree } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/methods/hoeffding_trees/hoeffding_categorical_split.hpp000066400000000000000000000106601315013601400300320ustar00rootroot00000000000000/** * @file hoeffding_categorical_split.hpp * @author Ryan Curtin * * A class that contains the information necessary to perform a categorical * split for Hoeffding trees. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_HOEFFDING_TREES_HOEFFDING_CATEGORICAL_SPLIT_HPP #define MLPACK_METHODS_HOEFFDING_TREES_HOEFFDING_CATEGORICAL_SPLIT_HPP #include #include "categorical_split_info.hpp" namespace mlpack { namespace tree { /** * This is the standard Hoeffding-bound categorical feature proposed in the * paper below: * * @code * @inproceedings{domingos2000mining, * title={{Mining High-Speed Data Streams}}, * author={Domingos, P. and Hulten, G.}, * year={2000}, * booktitle={Proceedings of the Sixth ACM SIGKDD International Conference on * Knowledge Discovery and Data Mining (KDD '00)}, * pages={71--80} * } * @endcode * * This class will track the sufficient statistics of the training points it has * seen. The HoeffdingSplit class (and other related classes) can use this * class to track categorical features and split decision tree nodes. * * @tparam FitnessFunction Fitness function to use for calculating gain. */ template class HoeffdingCategoricalSplit { public: //! The type of split information required by the HoeffdingCategoricalSplit. typedef CategoricalSplitInfo SplitInfo; /** * Create the HoeffdingCategoricalSplit given a number of categories for this * dimension and a number of classes. * * @param numCategories Number of categories in this dimension. * @param numClasses Number of classes in this dimension. */ HoeffdingCategoricalSplit(const size_t numCategories, const size_t numClasses); /** * Create the HoeffdingCategoricalSplit given a number of categories for this * dimension and a number of classes and another HoeffdingCategoricalSplit to * take parameters from. In this particular case, there are no parameters to * take, but this constructor is required by the HoeffdingTree class. */ HoeffdingCategoricalSplit(const size_t numCategories, const size_t numClasses, const HoeffdingCategoricalSplit& other); /** * Train on the given value with the given label. * * @param value Value to train on. * @param label Label to train on. */ template void Train(eT value, const size_t label); /** * Given the points seen so far, evaluate the fitness function, returning the * gain for the best possible split and the second best possible split. In * this splitting technique, we only split one possible way, so * secondBestFitness will always be 0. * * @param bestFitness The fitness function result for this split. * @param secondBestFitness This is always set to 0 (this split only splits * one way). */ void EvaluateFitnessFunction(double& bestFitness, double& secondBestFitness) const; //! Return the number of children, if the node were to split. size_t NumChildren() const { return sufficientStatistics.n_cols; } /** * Gather the information for a split: get the labels of the child majorities, * and initialize the SplitInfo object. * * @param childMajorities Majorities of child nodes to be created. * @param splitInfo Information for splitting. */ void Split(arma::Col& childMajorities, SplitInfo& splitInfo); //! Get the majority class seen so far. size_t MajorityClass() const; //! Get the probability of the majority class given the points seen so far. double MajorityProbability() const; //! Serialize the categorical split. template void Serialize(Archive& ar, const unsigned int /* version */) { ar & data::CreateNVP(sufficientStatistics, "sufficientStatistics"); } private: //! The sufficient statistics for all points seen so far. Each column //! corresponds to a category, and contains a count of each of the classes //! seen for points in that category. arma::Mat sufficientStatistics; }; } // namespace tree } // namespace mlpack // Include implementation. #include "hoeffding_categorical_split_impl.hpp" #endif mlpack-2.2.5/src/mlpack/methods/hoeffding_trees/hoeffding_categorical_split_impl.hpp000066400000000000000000000060541315013601400310550ustar00rootroot00000000000000/** * @file hoeffding_categorical_split_impl.hpp * @author Ryan Curtin * * Implemental of the HoeffdingCategoricalSplit class. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_HOEFFDING_TREES_HOEFFDING_CATEGORICAL_SPLIT_IMPL_HPP #define MLPACK_METHODS_HOEFFDING_TREES_HOEFFDING_CATEGORICAL_SPLIT_IMPL_HPP // In case it hasn't been included yet. #include "hoeffding_categorical_split.hpp" namespace mlpack { namespace tree { template HoeffdingCategoricalSplit::HoeffdingCategoricalSplit( const size_t numCategories, const size_t numClasses) : sufficientStatistics(numClasses, numCategories) { sufficientStatistics.zeros(); } template HoeffdingCategoricalSplit::HoeffdingCategoricalSplit( const size_t numCategories, const size_t numClasses, const HoeffdingCategoricalSplit& /* other */) : sufficientStatistics(numClasses, numCategories) { sufficientStatistics.zeros(); } template template void HoeffdingCategoricalSplit::Train(eT value, const size_t label) { // Add this to our counts. // 'value' should be categorical, so we should be able to cast to size_t... sufficientStatistics(label, size_t(value))++; } template void HoeffdingCategoricalSplit::EvaluateFitnessFunction( double& bestFitness, double& secondBestFitness) const { bestFitness = FitnessFunction::Evaluate(sufficientStatistics); secondBestFitness = 0.0; // We only split one possible way. } template void HoeffdingCategoricalSplit::Split( arma::Col& childMajorities, SplitInfo& splitInfo) { // We'll make one child for each category. childMajorities.set_size(sufficientStatistics.n_cols); for (size_t i = 0; i < sufficientStatistics.n_cols; ++i) { arma::uword maxIndex = 0; sufficientStatistics.unsafe_col(i).max(maxIndex); childMajorities[i] = size_t(maxIndex); } // Create the according SplitInfo object. splitInfo = SplitInfo(sufficientStatistics.n_cols); } template size_t HoeffdingCategoricalSplit::MajorityClass() const { // Calculate the class that we have seen the most of. arma::Col classCounts = arma::sum(sufficientStatistics, 1); arma::uword maxIndex = 0; classCounts.max(maxIndex); return size_t(maxIndex); } template double HoeffdingCategoricalSplit::MajorityProbability() const { arma::Col classCounts = arma::sum(sufficientStatistics, 1); return double(classCounts.max()) / double(arma::accu(classCounts)); } } // namespace tree } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/methods/hoeffding_trees/hoeffding_numeric_split.hpp000066400000000000000000000131321315013601400272140ustar00rootroot00000000000000/** * @file hoeffding_numeric_split.hpp * @author Ryan Curtin * * A numeric feature split for Hoeffding trees. This is a very simple * implementation based on a minor note in the paper "Mining High-Speed Data * Streams" by Pedro Domingos and Geoff Hulten. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_HOEFFDING_TREES_HOEFFDING_NUMERIC_SPLIT_HPP #define MLPACK_METHODS_HOEFFDING_TREES_HOEFFDING_NUMERIC_SPLIT_HPP #include #include "numeric_split_info.hpp" namespace mlpack { namespace tree { /** * The HoeffdingNumericSplit class implements the numeric feature splitting * strategy alluded to by Domingos and Hulten in the following paper: * * @code * @inproceedings{domingos2000mining, * title={{Mining High-Speed Data Streams}}, * author={Domingos, P. and Hulten, G.}, * year={2000}, * booktitle={Proceedings of the Sixth ACM SIGKDD International Conference on * Knowledge Discovery and Data Mining (KDD '00)}, * pages={71--80} * } * @endcode * * The strategy alluded to is very simple: we discretize the numeric features * that we see. But in this case, we don't know how many bins we have, which * makes things a little difficult. This class only makes binary splits, and * has a maximum number of bins. The binning strategy is simple: the split * caches the minimum and maximum value of points seen so far, and when the * number of points hits a predefined threshold, the cached minimum-maximum * range is equally split into bins, and splitting proceeds in the same way as * with the categorical splits. This is a simple and stupid strategy, so don't * expect it to be the best possible thing you can do. * * @tparam FitnessFunction Fitness function to use for calculating gain. * @tparam ObservationType Type of observations in this dimension. */ template class HoeffdingNumericSplit { public: //! The splitting information type required by the HoeffdingNumericSplit. typedef NumericSplitInfo SplitInfo; /** * Create the HoeffdingNumericSplit class, and specify some basic parameters * about how the binning should take place. * * @param numClasses Number of classes. * @param bins Number of bins. * @param observationsBeforeBinning Number of points to see before binning is * performed. */ HoeffdingNumericSplit(const size_t numClasses, const size_t bins = 10, const size_t observationsBeforeBinning = 100); /** * Create the HoeffdingNumericSplit class, using the parameters from the given * other split object. */ HoeffdingNumericSplit(const size_t numClasses, const HoeffdingNumericSplit& other); /** * Train the HoeffdingNumericSplit on the given observed value (remember that * this object only cares about the information for a single feature, not an * entire point). * * @param value Value in the dimension that this HoeffdingNumericSplit refers * to. * @param label Label of the given point. */ void Train(ObservationType value, const size_t label); /** * Evaluate the fitness function given what has been calculated so far. In * this case, if binning has not yet been performed, 0 will be returned (i.e., * no gain). Because this split can only split one possible way, * secondBestFitness (the fitness function for the second best possible split) * will be set to 0. * * @param bestFitness Value of the fitness function for the best possible * split. * @param secondBestFitness Value of the fitness function for the second best * possible split (always 0 for this split). */ void EvaluateFitnessFunction(double& bestFitness, double& secondBestFitness) const; //! Return the number of children if this node splits on this feature. size_t NumChildren() const { return bins; } /** * Return the majority class of each child to be created, if a split on this * dimension was performed. Also create the split object. */ void Split(arma::Col& childMajorities, SplitInfo& splitInfo) const; //! Return the majority class. size_t MajorityClass() const; //! Return the probability of the majority class. double MajorityProbability() const; //! Return the number of bins. size_t Bins() const { return bins; } //! Serialize the object. template void Serialize(Archive& ar, const unsigned int /* version */); private: //! Before binning, this holds the points we have seen so far. arma::Col observations; //! This holds the labels of the points before binning. arma::Col labels; //! The split points for the binning (length bins - 1). arma::Col splitPoints; //! The number of bins. size_t bins; //! The number of observations we must see before binning. size_t observationsBeforeBinning; //! The number of samples we have seen so far. size_t samplesSeen; //! After binning, this contains the sufficient statistics. arma::Mat sufficientStatistics; }; //! Convenience typedef. template using HoeffdingDoubleNumericSplit = HoeffdingNumericSplit; } // namespace tree } // namespace mlpack // Include implementation. #include "hoeffding_numeric_split_impl.hpp" #endif mlpack-2.2.5/src/mlpack/methods/hoeffding_trees/hoeffding_numeric_split_impl.hpp000066400000000000000000000170051315013601400302400ustar00rootroot00000000000000/** * @file hoeffding_numeric_split_impl.hpp * @author Ryan Curtin * * An implementation of the simple HoeffdingNumericSplit class. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_HOEFFDING_TREES_HOEFFDING_NUMERIC_SPLIT_IMPL_HPP #define MLPACK_METHODS_HOEFFDING_TREES_HOEFFDING_NUMERIC_SPLIT_IMPL_HPP #include "hoeffding_numeric_split.hpp" namespace mlpack { namespace tree { template HoeffdingNumericSplit::HoeffdingNumericSplit( const size_t numClasses, const size_t bins, const size_t observationsBeforeBinning) : observations(observationsBeforeBinning - 1), labels(observationsBeforeBinning - 1), bins(bins), observationsBeforeBinning(observationsBeforeBinning), samplesSeen(0), sufficientStatistics(arma::zeros>(numClasses, bins)) { observations.zeros(); labels.zeros(); } template HoeffdingNumericSplit::HoeffdingNumericSplit( const size_t numClasses, const HoeffdingNumericSplit& other) : observations(other.observationsBeforeBinning - 1), labels(other.observationsBeforeBinning - 1), bins(other.bins), observationsBeforeBinning(other.observationsBeforeBinning), samplesSeen(0), sufficientStatistics(arma::zeros>(numClasses, bins)) { observations.zeros(); labels.zeros(); } template void HoeffdingNumericSplit::Train( ObservationType value, const size_t label) { if (samplesSeen < observationsBeforeBinning - 1) { // Add this to the samples we have seen. observations[samplesSeen] = value; labels[samplesSeen] = label; ++samplesSeen; return; } else if (samplesSeen == observationsBeforeBinning - 1) { // Now we need to make the bins. ObservationType min = value; ObservationType max = value; for (size_t i = 0; i < observationsBeforeBinning - 1; ++i) { if (observations[i] < min) min = observations[i]; else if (observations[i] > max) max = observations[i]; } // Now split these. We can't use linspace, because we don't want to include // the endpoints. splitPoints.resize(bins - 1); const ObservationType binWidth = (max - min) / bins; for (size_t i = 0; i < bins - 1; ++i) splitPoints[i] = min + (i + 1) * binWidth; ++samplesSeen; // Now, add all of the points we've seen to the sufficient statistics. for (size_t i = 0; i < observationsBeforeBinning - 1; ++i) { // What bin does the point fall into? size_t bin = 0; while (bin < bins - 1 && observations[i] > splitPoints[bin]) ++bin; sufficientStatistics(labels[i], bin)++; } } // If we've gotten to here, then we need to add the point to the sufficient // statistics. What bin does the point fall into? size_t bin = 0; while (bin < bins - 1 && value > splitPoints[bin]) ++bin; sufficientStatistics(label, bin)++; } template void HoeffdingNumericSplit:: EvaluateFitnessFunction(double& bestFitness, double& secondBestFitness) const { secondBestFitness = 0.0; // We can only split one way. if (samplesSeen < observationsBeforeBinning) bestFitness = 0.0; else bestFitness = FitnessFunction::Evaluate(sufficientStatistics); } template void HoeffdingNumericSplit::Split( arma::Col& childMajorities, SplitInfo& splitInfo) const { childMajorities.set_size(sufficientStatistics.n_cols); for (size_t i = 0; i < sufficientStatistics.n_cols; ++i) { arma::uword maxIndex = 0; sufficientStatistics.unsafe_col(i).max(maxIndex); childMajorities[i] = size_t(maxIndex); } // Create the SplitInfo object. splitInfo = SplitInfo(splitPoints); } template size_t HoeffdingNumericSplit:: MajorityClass() const { // If we haven't yet determined the bins, we must calculate this by hand. if (samplesSeen < observationsBeforeBinning) { arma::Col classes(sufficientStatistics.n_rows); classes.zeros(); for (size_t i = 0; i < samplesSeen; ++i) classes[labels[i]]++; arma::uword majorityClass; classes.max(majorityClass); return size_t(majorityClass); } else { // We've calculated the bins, so we can just sum over the sufficient // statistics. arma::Col classCounts = arma::sum(sufficientStatistics, 1); arma::uword maxIndex = 0; classCounts.max(maxIndex); return size_t(maxIndex); } } template double HoeffdingNumericSplit:: MajorityProbability() const { // If we haven't yet determined the bins, we must calculate this by hand. if (samplesSeen < observationsBeforeBinning) { arma::Col classes(sufficientStatistics.n_rows); classes.zeros(); for (size_t i = 0; i < samplesSeen; ++i) classes[labels[i]]++; return double(classes.max()) / double(arma::accu(classes)); } else { // We've calculated the bins, so we can just sum over the sufficient // statistics. arma::Col classCounts = arma::sum(sufficientStatistics, 1); return double(classCounts.max()) / double(arma::sum(classCounts)); } } template template void HoeffdingNumericSplit::Serialize( Archive& ar, const unsigned int /* version */) { using data::CreateNVP; ar & CreateNVP(samplesSeen, "samplesSeen"); ar & CreateNVP(observationsBeforeBinning, "observationsBeforeBinning"); ar & CreateNVP(bins, "bins"); if (samplesSeen >= observationsBeforeBinning) { // The binning has happened, so we only need to save the resulting bins. ar & CreateNVP(splitPoints, "splitPoints"); ar & CreateNVP(sufficientStatistics, "sufficientStatistics"); if (Archive::is_loading::value) { // Clean other objects. observations.clear(); labels.clear(); } } else { // The binning has not happened yet, so we only need to save the information // required before binning. if (Archive::is_loading::value) { observations.zeros(observationsBeforeBinning); labels.zeros(observationsBeforeBinning); } // Save the number of classes. size_t numClasses; if (Archive::is_saving::value) numClasses = sufficientStatistics.n_rows; ar & data::CreateNVP(numClasses, "numClasses"); for (size_t i = 0; i < samplesSeen; ++i) { std::ostringstream oss; oss << "obs" << i; ar & CreateNVP(observations[i], oss.str()); std::ostringstream oss2; oss2 << "label" << i; ar & CreateNVP(labels[i], oss2.str()); } if (Archive::is_loading::value) { // Clean other objects. splitPoints.clear(); sufficientStatistics.zeros(numClasses, bins); } } } } // namespace tree } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/methods/hoeffding_trees/hoeffding_tree.hpp000066400000000000000000000333151315013601400253030ustar00rootroot00000000000000/** * @file hoeffding_split.hpp * @author Ryan Curtin * * An implementation of the standard Hoeffding tree by Pedro Domingos and Geoff * Hulten in ``Mining High-Speed Data Streams''. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_HOEFFDING_TREES_HOEFFDING_TREE_HPP #define MLPACK_METHODS_HOEFFDING_TREES_HOEFFDING_TREE_HPP #include #include #include "gini_impurity.hpp" #include "hoeffding_numeric_split.hpp" #include "hoeffding_categorical_split.hpp" namespace mlpack { namespace tree { /** * The HoeffdingTree object represents all of the necessary information for a * Hoeffding-bound-based decision tree. This class is able to train on samples * in streaming settings and batch settings, and perform splits based on the * Hoeffding bound. The Hoeffding tree (also known as the "very fast decision * tree" -- VFDT) is described in the following paper: * * @code * @inproceedings{domingos2000mining, * title={{Mining High-Speed Data Streams}}, * author={Domingos, P. and Hulten, G.}, * year={2000}, * booktitle={Proceedings of the Sixth ACM SIGKDD International Conference * on Knowledge Discovery and Data Mining (KDD '00)}, * pages={71--80} * } * @endcode * * The class is modular, and takes three template parameters. The first, * FitnessFunction, is the fitness function that should be used to determine * whether a split is beneficial; examples might be GiniImpurity or * InformationGain. The NumericSplitType determines how numeric attributes are * handled, and the CategoricalSplitType determines how categorical attributes * are handled. As far as the actual splitting goes, the meat of the splitting * procedure will be contained in those two classes. * * @tparam FitnessFunction Fitness function to use. * @tparam NumericSplitType Technique for splitting numeric features. * @tparam CategoricalSplitType Technique for splitting categorical features. */ template class NumericSplitType = HoeffdingDoubleNumericSplit, template class CategoricalSplitType = HoeffdingCategoricalSplit > class HoeffdingTree { public: //! Allow access to the numeric split type. typedef NumericSplitType NumericSplit; //! Allow access to the categorical split type. typedef CategoricalSplitType CategoricalSplit; /** * Construct the Hoeffding tree with the given parameters and given training * data. The tree may be trained either in batch mode (which looks at all * points before splitting, and propagates these points to the created * children for further training), or in streaming mode, where each point is * only considered once. (In general, batch mode will give better-performing * trees, but will have higher memory and runtime costs for the same dataset.) * * @param data Dataset to train on. * @param datasetInfo Information on the dataset (types of each feature). * @param labels Labels of each point in the dataset. * @param numClasses Number of classes in the dataset. * @param batchTraining Whether or not to train in batch. * @param successProbability Probability of success required in Hoeffding * bounds before a split can happen. * @param maxSamples Maximum number of samples before a split is forced (0 * never forces a split); ignored in batch training mode. * @param checkInterval Number of samples required before each split; ignored * in batch training mode. * @param minSamples If the node has seen this many points or fewer, no split * will be allowed. */ template HoeffdingTree(const MatType& data, const data::DatasetInfo& datasetInfo, const arma::Row& labels, const size_t numClasses, const bool batchTraining = true, const double successProbability = 0.95, const size_t maxSamples = 0, const size_t checkInterval = 100, const size_t minSamples = 100, const CategoricalSplitType& categoricalSplitIn = CategoricalSplitType(0, 0), const NumericSplitType& numericSplitIn = NumericSplitType(0)); /** * Construct the Hoeffding tree with the given parameters, but training on no * data. The dimensionMappings parameter is only used if it is desired that * this node does not create its own dimensionMappings object (for instance, * if this is a child of another node in the tree). * * @param dimensionality Dimensionality of the dataset. * @param numClasses Number of classes in the dataset. * @param datasetInfo Information on the dataset (types of each feature). * @param successProbability Probability of success required in Hoeffding * bound before a split can happen. * @param maxSamples Maximum number of samples before a split is forced. * @param checkInterval Number of samples required before each split check. * @param minSamples If the node has seen this many points or fewer, no split * will be allowed. * @param dimensionMappings Mappings from dimension indices to positions in * numeric and categorical split vectors. If left NULL, a new one will * be created. */ HoeffdingTree(const data::DatasetInfo& datasetInfo, const size_t numClasses, const double successProbability = 0.95, const size_t maxSamples = 0, const size_t checkInterval = 100, const size_t minSamples = 100, const CategoricalSplitType& categoricalSplitIn = CategoricalSplitType(0, 0), const NumericSplitType& numericSplitIn = NumericSplitType(0), std::unordered_map>* dimensionMappings = NULL); /** * Copy another tree (warning: this will duplicate the tree entirely, and may * use a lot of memory. Make sure it's what you want before you do it). * * @param other Tree to copy. */ HoeffdingTree(const HoeffdingTree& other); /** * Clean up memory. */ ~HoeffdingTree(); /** * Train on a set of points, either in streaming mode or in batch mode, with * the given labels. * * @param data Data points to train on. * @param label Labels of data points. * @param batchTraining If true, perform training in batch. */ template void Train(const MatType& data, const arma::Row& labels, const bool batchTraining = true); /** * Train on a single point in streaming mode, with the given label. * * @param point Point to train on. * @param label Label of point to train on. */ template void Train(const VecType& point, const size_t label); /** * Check if a split would satisfy the conditions of the Hoeffding bound with * the node's specified success probability. If so, the number of children * that would be created is returned. If not, 0 is returned. */ size_t SplitCheck(); //! Get the splitting dimension (size_t(-1) if no split). size_t SplitDimension() const { return splitDimension; } //! Get the majority class. size_t MajorityClass() const { return majorityClass; } //! Modify the majority class. size_t& MajorityClass() { return majorityClass; } //! Get the probability of the majority class (based on training samples). double MajorityProbability() const { return majorityProbability; } //! Modify the probability of the majority class. double& MajorityProbability() { return majorityProbability; } //! Get the number of children. size_t NumChildren() const { return children.size(); } //! Get a child. const HoeffdingTree& Child(const size_t i) const { return *children[i]; } //! Modify a child. HoeffdingTree& Child(const size_t i) { return *children[i]; } //! Get the confidence required for a split. double SuccessProbability() const { return successProbability; } //! Modify the confidence required for a split. void SuccessProbability(const double successProbability); //! Get the minimum number of samples for a split. size_t MinSamples() const { return minSamples; } //! Modify the minimum number of samples for a split. void MinSamples(const size_t minSamples); //! Get the maximum number of samples before a split is forced. size_t MaxSamples() const { return maxSamples; } //! Modify the maximum number of samples before a split is forced. void MaxSamples(const size_t maxSamples); //! Get the number of samples before a split check is performed. size_t CheckInterval() const { return checkInterval; } //! Modify the number of samples before a split check is performed. void CheckInterval(const size_t checkInterval); /** * Given a point and that this node is not a leaf, calculate the index of the * child node this point would go towards. This method is primarily used by * the Classify() function, but it can be used in a standalone sense too. * * @param point Point to classify. */ template size_t CalculateDirection(const VecType& point) const; /** * Classify the given point, using this node and the entire (sub)tree beneath * it. The predicted label is returned. * * @param point Point to classify. * @return Predicted label of point. */ template size_t Classify(const VecType& point) const; /** * Classify the given point and also return an estimate of the probability * that the prediction is correct. (This estimate is simply the probability * that a training point was from the majority class in the leaf that this * point binned to.) * * @param point Point to classify. * @param prediction Predicted label of point. * @param probability An estimate of the probability that the prediction is * correct. */ template void Classify(const VecType& point, size_t& prediction, double& probability) const; /** * Classify the given points, using this node and the entire (sub)tree beneath * it. The predicted labels for each point are returned. * * @param data Points to classify. * @param predictions Predicted labels for each point. */ template void Classify(const MatType& data, arma::Row& predictions) const; /** * Classify the given points, using this node and the entire (sub)tree beneath * it. The predicted labels for each point are returned, as well as an * estimate of the probability that the prediction is correct for each point. * This estimate is simply the MajorityProbability() for the leaf that each * point bins to. * * @param data Points to classify. * @param predictions Predicted labels for each point. * @param probabilities Probability estimates for each predicted label. */ template void Classify(const MatType& data, arma::Row& predictions, arma::rowvec& probabilities) const; /** * Given that this node should split, create the children. */ void CreateChildren(); //! Serialize the split. template void Serialize(Archive& ar, const unsigned int /* version */); private: // We need to keep some information for before we have split. //! Information for splitting of numeric features (used before split). std::vector> numericSplits; //! Information for splitting of categorical features (used before split). std::vector> categoricalSplits; //! This structure is owned by this node only if it is the root of the tree. std::unordered_map>* dimensionMappings; //! Indicates whether or not we own the mappings. bool ownsMappings; //! The number of samples seen so far by this node. size_t numSamples; //! The number of classes this node is trained on. size_t numClasses; //! The maximum number of samples we can see before splitting. size_t maxSamples; //! The number of samples that should be seen before checking for a split. size_t checkInterval; //! The minimum number of samples for splitting. size_t minSamples; //! The dataset information. const data::DatasetInfo* datasetInfo; //! Whether or not we own the dataset information. bool ownsInfo; //! The required probability of success for a split to be performed. double successProbability; // And we need to keep some information for after we have split. //! The dimension that this node has split on. size_t splitDimension; //! The majority class of this node. size_t majorityClass; //! The empirical probability of a point this node saw having the majority //! class. double majorityProbability; //! If the split is categorical, this holds the splitting information. typename CategoricalSplitType::SplitInfo categoricalSplit; //! If the split is numeric, this holds the splitting information. typename NumericSplitType::SplitInfo numericSplit; //! If the split has occurred, these are the children. std::vector children; }; } // namespace tree } // namespace mlpack #include "hoeffding_tree_impl.hpp" #endif mlpack-2.2.5/src/mlpack/methods/hoeffding_trees/hoeffding_tree_impl.hpp000066400000000000000000000620411315013601400263220ustar00rootroot00000000000000/** * @file hoeffding_split_impl.hpp * @author Ryan Curtin * * Implementation of the HoeffdingTree class. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_HOEFFDING_TREES_HOEFFDING_TREE_IMPL_HPP #define MLPACK_METHODS_HOEFFDING_TREES_HOEFFDING_TREE_IMPL_HPP // In case it hasn't been included yet. #include "hoeffding_tree.hpp" namespace mlpack { namespace tree { template class NumericSplitType, template class CategoricalSplitType> template HoeffdingTree< FitnessFunction, NumericSplitType, CategoricalSplitType >::HoeffdingTree(const MatType& data, const data::DatasetInfo& datasetInfo, const arma::Row& labels, const size_t numClasses, const bool batchTraining, const double successProbability, const size_t maxSamples, const size_t checkInterval, const size_t minSamples, const CategoricalSplitType& categoricalSplitIn, const NumericSplitType& numericSplitIn) : dimensionMappings(new std::unordered_map>()), ownsMappings(true), numSamples(0), numClasses(numClasses), maxSamples((maxSamples == 0) ? size_t(-1) : maxSamples), checkInterval(checkInterval), minSamples(minSamples), datasetInfo(&datasetInfo), ownsInfo(false), successProbability(successProbability), splitDimension(size_t(-1)), categoricalSplit(0), numericSplit() { // Generate dimension mappings and create split objects. for (size_t i = 0; i < datasetInfo.Dimensionality(); ++i) { if (datasetInfo.Type(i) == data::Datatype::categorical) { categoricalSplits.push_back(CategoricalSplitType( datasetInfo.NumMappings(i), numClasses, categoricalSplitIn)); (*dimensionMappings)[i] = std::make_pair(data::Datatype::categorical, categoricalSplits.size() - 1); } else { numericSplits.push_back(NumericSplitType(numClasses, numericSplitIn)); (*dimensionMappings)[i] = std::make_pair(data::Datatype::numeric, numericSplits.size() - 1); } } // Now train. Train(data, labels, batchTraining); } template class NumericSplitType, template class CategoricalSplitType> HoeffdingTree< FitnessFunction, NumericSplitType, CategoricalSplitType >::HoeffdingTree(const data::DatasetInfo& datasetInfo, const size_t numClasses, const double successProbability, const size_t maxSamples, const size_t checkInterval, const size_t minSamples, const CategoricalSplitType& categoricalSplitIn, const NumericSplitType& numericSplitIn, std::unordered_map>* dimensionMappingsIn) : dimensionMappings((dimensionMappingsIn != NULL) ? dimensionMappingsIn : new std::unordered_map>()), ownsMappings(dimensionMappingsIn == NULL), numSamples(0), numClasses(numClasses), maxSamples((maxSamples == 0) ? size_t(-1) : maxSamples), checkInterval(checkInterval), minSamples(minSamples), datasetInfo(&datasetInfo), ownsInfo(false), successProbability(successProbability), splitDimension(size_t(-1)), categoricalSplit(0), numericSplit() { // Do we need to generate the mappings too? if (ownsMappings) { for (size_t i = 0; i < datasetInfo.Dimensionality(); ++i) { if (datasetInfo.Type(i) == data::Datatype::categorical) { categoricalSplits.push_back(CategoricalSplitType( datasetInfo.NumMappings(i), numClasses, categoricalSplitIn)); (*dimensionMappings)[i] = std::make_pair(data::Datatype::categorical, categoricalSplits.size() - 1); } else { numericSplits.push_back(NumericSplitType(numClasses, numericSplitIn)); (*dimensionMappings)[i] = std::make_pair(data::Datatype::numeric, numericSplits.size() - 1); } } } else { for (size_t i = 0; i < datasetInfo.Dimensionality(); ++i) { if (datasetInfo.Type(i) == data::Datatype::categorical) { categoricalSplits.push_back(CategoricalSplitType( datasetInfo.NumMappings(i), numClasses, categoricalSplitIn)); } else { numericSplits.push_back(NumericSplitType(numClasses, numericSplitIn)); } } } } // Copy constructor. template class NumericSplitType, template class CategoricalSplitType> HoeffdingTree:: HoeffdingTree(const HoeffdingTree& other) : numericSplits(other.numericSplits), categoricalSplits(other.categoricalSplits), dimensionMappings(new std::unordered_map>(*other.dimensionMappings)), ownsMappings(true), numSamples(other.numSamples), numClasses(other.numClasses), maxSamples(other.maxSamples), checkInterval(other.checkInterval), minSamples(other.minSamples), datasetInfo(new data::DatasetInfo(*other.datasetInfo)), ownsInfo(true), successProbability(other.successProbability), splitDimension(other.splitDimension), majorityClass(other.majorityClass), majorityProbability(other.majorityProbability), categoricalSplit(other.categoricalSplit), numericSplit(other.numericSplit) { // Copy each of the children. for (size_t i = 0; i < other.children.size(); ++i) children.push_back(new HoeffdingTree(other.children[i])); } template class NumericSplitType, template class CategoricalSplitType> HoeffdingTree:: ~HoeffdingTree() { if (ownsMappings) delete dimensionMappings; if (ownsInfo) delete datasetInfo; for (size_t i = 0; i < children.size(); ++i) delete children[i]; } //! Train on a set of points. template class NumericSplitType, template class CategoricalSplitType> template void HoeffdingTree< FitnessFunction, NumericSplitType, CategoricalSplitType >::Train(const MatType& data, const arma::Row& labels, const bool batchTraining) { if (batchTraining) { // Pass all the points through the nodes, and then split only after that. checkInterval = data.n_cols; // Only split on the last sample. // Don't split if there are fewer than five points. size_t oldMaxSamples = maxSamples; maxSamples = std::max(size_t(data.n_cols - 1), size_t(5)); for (size_t i = 0; i < data.n_cols; ++i) Train(data.col(i), labels[i]); maxSamples = oldMaxSamples; // Now, if we did split, find out which points go to which child, and // perform the same batch training. if (children.size() > 0) { // We need to create a vector of indices that represent the points that // must go to each child, so we need children.size() vectors, but we don't // know how long they will be. Therefore, we will create vectors each of // size data.n_cols, but will probably not use all the memory we // allocated, and then pass subvectors to the submat() function. std::vector indices(children.size(), arma::uvec(data.n_cols)); arma::Col counts = arma::zeros>(children.size()); for (size_t i = 0; i < data.n_cols; ++i) { size_t direction = CalculateDirection(data.col(i)); size_t currentIndex = counts[direction]; indices[direction][currentIndex] = i; counts[direction]++; } // Now pass each of these submatrices to the children to perform // batch-mode training. for (size_t i = 0; i < children.size(); ++i) { // If we don't have any points that go to the child in question, don't // train that child. if (counts[i] == 0) continue; // The submatrix here is non-contiguous, but I think this will be faster // than copying the points to an ordered state. We still have to // assemble the labels vector, though. arma::Row childLabels = labels.cols( indices[i].subvec(0, counts[i] - 1)); // Unfortunately, limitations of Armadillo's non-contiguous subviews // prohibits us from successfully passing the non-contiguous subview to // Train(), since the col() function is not provided. So, // unfortunately, instead, we'll just extract the non-contiguous // submatrix. MatType childData = data.cols(indices[i].subvec(0, counts[i] - 1)); children[i]->Train(childData, childLabels, true); } } } else { // We aren't training in batch mode; loop through the points. for (size_t i = 0; i < data.n_cols; ++i) Train(data.col(i), labels[i]); } } //! Train on one point. template class NumericSplitType, template class CategoricalSplitType> template void HoeffdingTree< FitnessFunction, NumericSplitType, CategoricalSplitType >::Train(const VecType& point, const size_t label) { if (splitDimension == size_t(-1)) { ++numSamples; size_t numericIndex = 0; size_t categoricalIndex = 0; for (size_t i = 0; i < point.n_rows; ++i) { if (datasetInfo->Type(i) == data::Datatype::categorical) categoricalSplits[categoricalIndex++].Train(point[i], label); else if (datasetInfo->Type(i) == data::Datatype::numeric) numericSplits[numericIndex++].Train(point[i], label); } // Grab majority class from splits. if (categoricalSplits.size() > 0) { majorityClass = categoricalSplits[0].MajorityClass(); majorityProbability = categoricalSplits[0].MajorityProbability(); } else { majorityClass = numericSplits[0].MajorityClass(); majorityProbability = numericSplits[0].MajorityProbability(); } // Check for a split, if we should. if (numSamples % checkInterval == 0) { const size_t numChildren = SplitCheck(); if (numChildren > 0) { // We need to add a bunch of children. // Delete children, if we have them. children.clear(); CreateChildren(); } } } else { // Already split. Pass the training point to the relevant child. size_t direction = CalculateDirection(point); children[direction]->Train(point, label); } } template class NumericSplitType, template class CategoricalSplitType> size_t HoeffdingTree< FitnessFunction, NumericSplitType, CategoricalSplitType >::SplitCheck() { // Do nothing if we've already split. if (splitDimension != size_t(-1)) return 0; // If not enough points have been seen, we cannot split. if (numSamples <= minSamples) return 0; // Check the fitness of each dimension. Then we'll use a Hoeffding bound // somehow. // Calculate epsilon, the value we need things to be greater than. const double rSquared = std::pow(FitnessFunction::Range(numClasses), 2.0); const double epsilon = std::sqrt(rSquared * std::log(1.0 / (1.0 - successProbability)) / (2 * numSamples)); // Find the best and second best possible splits. double largest = -DBL_MAX; size_t largestIndex = 0; double secondLargest = -DBL_MAX; for (size_t i = 0; i < categoricalSplits.size() + numericSplits.size(); ++i) { size_t type = dimensionMappings->at(i).first; size_t index = dimensionMappings->at(i).second; // Some split procedures can split multiple ways, but we only care about the // best two splits that can be done in every network. double bestGain = 0.0; double secondBestGain = 0.0; if (type == data::Datatype::categorical) categoricalSplits[index].EvaluateFitnessFunction(bestGain, secondBestGain); else if (type == data::Datatype::numeric) numericSplits[index].EvaluateFitnessFunction(bestGain, secondBestGain); // See if these gains are better than the previous. if (bestGain > largest) { secondLargest = largest; largest = bestGain; largestIndex = i; } else if (bestGain > secondLargest) { secondLargest = bestGain; } if (secondBestGain > secondLargest) { secondLargest = secondBestGain; } } // Are these far enough apart to split? if ((largest > 0.0) && ((largest - secondLargest > epsilon) || (numSamples > maxSamples) || (epsilon <= 0.05))) { // Split! splitDimension = largestIndex; const size_t type = dimensionMappings->at(largestIndex).first; const size_t index = dimensionMappings->at(largestIndex).second; if (type == data::Datatype::categorical) { // I don't know if this should be here. majorityClass = categoricalSplits[index].MajorityClass(); return categoricalSplits[index].NumChildren(); } else { majorityClass = numericSplits[index].MajorityClass(); return numericSplits[index].NumChildren(); } } else { return 0; // Don't split. } } template< typename FitnessFunction, template class NumericSplitType, template class CategoricalSplitType > void HoeffdingTree< FitnessFunction, NumericSplitType, CategoricalSplitType >::SuccessProbability(const double successProbability) { this->successProbability = successProbability; for (size_t i = 0; i < children.size(); ++i) children[i]->SuccessProbability(successProbability); } template< typename FitnessFunction, template class NumericSplitType, template class CategoricalSplitType > void HoeffdingTree< FitnessFunction, NumericSplitType, CategoricalSplitType >::MinSamples(const size_t minSamples) { this->minSamples = minSamples; for (size_t i = 0; i < children.size(); ++i) children[i]->MinSamples(minSamples); } template< typename FitnessFunction, template class NumericSplitType, template class CategoricalSplitType > void HoeffdingTree< FitnessFunction, NumericSplitType, CategoricalSplitType >::MaxSamples(const size_t maxSamples) { this->maxSamples = maxSamples; for (size_t i = 0; i < children.size(); ++i) children[i]->MaxSamples(maxSamples); } template< typename FitnessFunction, template class NumericSplitType, template class CategoricalSplitType > void HoeffdingTree< FitnessFunction, NumericSplitType, CategoricalSplitType >::CheckInterval(const size_t checkInterval) { this->checkInterval = checkInterval; for (size_t i = 0; i < children.size(); ++i) children[i]->CheckInterval(checkInterval); } template< typename FitnessFunction, template class NumericSplitType, template class CategoricalSplitType > template size_t HoeffdingTree< FitnessFunction, NumericSplitType, CategoricalSplitType >::CalculateDirection(const VecType& point) const { // Don't call this before the node is split... if (datasetInfo->Type(splitDimension) == data::Datatype::numeric) return numericSplit.CalculateDirection(point[splitDimension]); else if (datasetInfo->Type(splitDimension) == data::Datatype::categorical) return categoricalSplit.CalculateDirection(point[splitDimension]); else return 0; // Not sure what to do here... } template< typename FitnessFunction, template class NumericSplitType, template class CategoricalSplitType > template size_t HoeffdingTree< FitnessFunction, NumericSplitType, CategoricalSplitType >::Classify(const VecType& point) const { if (children.size() == 0) { // If we're a leaf (or being considered a leaf), classify based on what we // know. return majorityClass; } else { // Otherwise, pass to the right child and let them classify. return children[CalculateDirection(point)]->Classify(point); } } template< typename FitnessFunction, template class NumericSplitType, template class CategoricalSplitType > template void HoeffdingTree< FitnessFunction, NumericSplitType, CategoricalSplitType >::Classify(const VecType& point, size_t& prediction, double& probability) const { if (children.size() == 0) { // We are a leaf, so classify accordingly. prediction = majorityClass; probability = majorityProbability; } else { // Pass to the right child and let them do the classification. children[CalculateDirection(point)]->Classify(point, prediction, probability); } } //! Batch classification. template< typename FitnessFunction, template class NumericSplitType, template class CategoricalSplitType > template void HoeffdingTree< FitnessFunction, NumericSplitType, CategoricalSplitType >::Classify(const MatType& data, arma::Row& predictions) const { predictions.set_size(data.n_cols); for (size_t i = 0; i < data.n_cols; ++i) predictions[i] = Classify(data.col(i)); } //! Batch classification with probabilities. template< typename FitnessFunction, template class NumericSplitType, template class CategoricalSplitType > template void HoeffdingTree< FitnessFunction, NumericSplitType, CategoricalSplitType >::Classify(const MatType& data, arma::Row& predictions, arma::rowvec& probabilities) const { predictions.set_size(data.n_cols); probabilities.set_size(data.n_cols); for (size_t i = 0; i < data.n_cols; ++i) Classify(data.col(i), predictions[i], probabilities[i]); } template< typename FitnessFunction, template class NumericSplitType, template class CategoricalSplitType > void HoeffdingTree< FitnessFunction, NumericSplitType, CategoricalSplitType >::CreateChildren() { // Create the children. arma::Col childMajorities; if (dimensionMappings->at(splitDimension).first == data::Datatype::categorical) { categoricalSplits[dimensionMappings->at(splitDimension).second].Split( childMajorities, categoricalSplit); } else if (dimensionMappings->at(splitDimension).first == data::Datatype::numeric) { numericSplits[dimensionMappings->at(splitDimension).second].Split( childMajorities, numericSplit); } // We already know what the splitDimension will be. for (size_t i = 0; i < childMajorities.n_elem; ++i) { // We need to also give our split objects to the new children, so that // parameters for the splits can be passed down. But if we have no // categorical or numeric features, we can't pass anything but the // defaults... if (categoricalSplits.size() == 0) { // Pass a default categorical split. children.push_back(new HoeffdingTree(*datasetInfo, numClasses, successProbability, maxSamples, checkInterval, minSamples, CategoricalSplitType(0, numClasses), numericSplits[0], dimensionMappings)); } else if (numericSplits.size() == 0) { // Pass a default numeric split. children.push_back(new HoeffdingTree(*datasetInfo, numClasses, successProbability, maxSamples, checkInterval, minSamples, categoricalSplits[0], NumericSplitType(numClasses), dimensionMappings)); } else { // Pass both splits that we already have. children.push_back(new HoeffdingTree(*datasetInfo, numClasses, successProbability, maxSamples, checkInterval, minSamples, categoricalSplits[0], numericSplits[0], dimensionMappings)); } children[i]->MajorityClass() = childMajorities[i]; } // Eliminate now-unnecessary split information. numericSplits.clear(); categoricalSplits.clear(); } template< typename FitnessFunction, template class NumericSplitType, template class CategoricalSplitType > template void HoeffdingTree< FitnessFunction, NumericSplitType, CategoricalSplitType >::Serialize(Archive& ar, const unsigned int /* version */) { using data::CreateNVP; ar & CreateNVP(splitDimension, "splitDimension"); // Clear memory for the mappings if necessary. if (Archive::is_loading::value && ownsMappings && dimensionMappings) delete dimensionMappings; ar & CreateNVP(dimensionMappings, "dimensionMappings"); // Special handling for const object. data::DatasetInfo* d = NULL; if (Archive::is_saving::value) d = const_cast(datasetInfo); ar & CreateNVP(d, "datasetInfo"); if (Archive::is_loading::value) { if (datasetInfo && ownsInfo) delete datasetInfo; datasetInfo = d; ownsInfo = true; ownsMappings = true; // We also own the mappings we loaded. // Clear the children. for (size_t i = 0; i < children.size(); ++i) delete children[i]; children.clear(); } ar & CreateNVP(majorityClass, "majorityClass"); ar & CreateNVP(majorityProbability, "majorityProbability"); // Depending on whether or not we have split yet, we may need to save // different things. if (splitDimension == size_t(-1)) { // We have not yet split. So we have to serialize the splits. ar & CreateNVP(numSamples, "numSamples"); ar & CreateNVP(numClasses, "numClasses"); ar & CreateNVP(maxSamples, "maxSamples"); ar & CreateNVP(successProbability, "successProbability"); // Serialize the splits, but not if we haven't seen any samples yet (in // which case we can just reinitialize). if (Archive::is_loading::value) { // Re-initialize all of the splits. numericSplits.clear(); categoricalSplits.clear(); for (size_t i = 0; i < datasetInfo->Dimensionality(); ++i) { if (datasetInfo->Type(i) == data::Datatype::categorical) categoricalSplits.push_back(CategoricalSplitType( datasetInfo->NumMappings(i), numClasses)); else numericSplits.push_back( NumericSplitType(numClasses)); } // Clear things we don't need. categoricalSplit = typename CategoricalSplitType:: SplitInfo(numClasses); numericSplit = typename NumericSplitType::SplitInfo(); } // There's no need to serialize if there's no information contained in the // splits. if (numSamples == 0) return; // Serialize numeric splits. for (size_t i = 0; i < numericSplits.size(); ++i) { std::ostringstream name; name << "numericSplit" << i; ar & CreateNVP(numericSplits[i], name.str()); } // Serialize categorical splits. for (size_t i = 0; i < categoricalSplits.size(); ++i) { std::ostringstream name; name << "categoricalSplit" << i; ar & CreateNVP(categoricalSplits[i], name.str()); } } else { // We have split, so we only need to save the split and the children. if (datasetInfo->Type(splitDimension) == data::Datatype::categorical) ar & CreateNVP(categoricalSplit, "categoricalSplit"); else ar & CreateNVP(numericSplit, "numericSplit"); // Serialize the children, because we have split. size_t numChildren; if (Archive::is_saving::value) numChildren = children.size(); ar & CreateNVP(numChildren, "numChildren"); if (Archive::is_loading::value) // If needed, allocate space. { children.resize(numChildren, NULL); for (size_t i = 0; i < numChildren; ++i) children[i] = new HoeffdingTree(data::DatasetInfo(0), 0); } for (size_t i = 0; i < numChildren; ++i) { std::ostringstream name; name << "child" << i; ar & data::CreateNVP(*children[i], name.str()); // The child doesn't actually own its own DatasetInfo. We do. The same // applies for the dimension mappings. children[i]->ownsInfo = false; children[i]->ownsMappings = false; } if (Archive::is_loading::value) { numericSplits.clear(); categoricalSplits.clear(); numSamples = 0; numClasses = 0; maxSamples = 0; successProbability = 0.0; } } } } // namespace tree } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/methods/hoeffding_trees/hoeffding_tree_main.cpp000066400000000000000000000317271315013601400263070ustar00rootroot00000000000000/** * @file hoeffding_tree_main.cpp * @author Ryan Curtin * * A command-line executable that can build a streaming decision tree. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include #include #include #include #include #include #include #include using namespace std; using namespace mlpack; using namespace mlpack::tree; using namespace mlpack::data; PROGRAM_INFO("Hoeffding trees", "This program implements Hoeffding trees, a form of streaming decision tree" " suited best for large (or streaming) datasets. This program supports " "both categorical and numeric data stored in the ARFF format. Given an " "input dataset, this program is able to train the tree with numerous " "training options, and save the model to a file. The program is also able " "to use a trained model or a model from file in order to predict classes " "for a given test set." "\n\n" "The training file and associated labels are specified with the " "--training_file and --labels_file options, respectively. The training " "file must be in ARFF format. The training may be performed in batch mode " "(like a typical decision tree algorithm) by specifying the --batch_mode " "option, but this may not be the best option for large datasets." "\n\n" "When a model is trained, it may be saved to a file with the " "--output_model_file (-M) option. A model may be loaded from file for " "further training or testing with the --input_model_file (-m) option." "\n\n" "A test file may be specified with the --test_file (-T) option, and if " "performance numbers are desired for that test set, labels may be specified" " with the --test_labels_file (-L) option. Predictions for each test point" " will be stored in the file specified by --predictions_file (-p) and " "probabilities for each predictions will be stored in the file specified by" " the --probabilities_file (-P) option."); PARAM_STRING_IN("training_file", "Training dataset file.", "t", ""); PARAM_STRING_IN("labels_file", "Labels for training dataset.", "l", ""); PARAM_DOUBLE_IN("confidence", "Confidence before splitting (between 0 and 1).", "c", 0.95); PARAM_INT_IN("max_samples", "Maximum number of samples before splitting.", "n", 5000); PARAM_INT_IN("min_samples", "Minimum number of samples before splitting.", "I", 100); PARAM_STRING_IN("input_model_file", "File to load trained tree from.", "m", ""); PARAM_STRING_OUT("output_model_file", "File to save trained tree to.", "M"); PARAM_STRING_IN("test_file", "File of testing data.", "T", ""); PARAM_STRING_IN("test_labels_file", "Labels of test data.", "L", ""); PARAM_STRING_OUT("predictions_file", "File to output label predictions for " "test data into.", "p"); PARAM_STRING_OUT("probabilities_file", "In addition to predicting labels, " "provide prediction probabilities in this file.", "P"); PARAM_STRING_IN("numeric_split_strategy", "The splitting strategy to use for " "numeric features: 'domingos' or 'binary'.", "N", "binary"); PARAM_FLAG("batch_mode", "If true, samples will be considered in batch instead " "of as a stream. This generally results in better trees but at the cost of" " memory usage and runtime.", "b"); PARAM_FLAG("info_gain", "If set, information gain is used instead of Gini " "impurity for calculating Hoeffding bounds.", "i"); PARAM_INT_IN("passes", "Number of passes to take over the dataset.", "s", 1); PARAM_INT_IN("bins", "If the 'domingos' split strategy is used, this specifies " "the number of bins for each numeric split.", "B", 10); PARAM_INT_IN("observations_before_binning", "If the 'domingos' split strategy " "is used, this specifies the number of samples observed before binning is " "performed.", "o", 100); // Helper function for once we have chosen a tree type. template void PerformActions(const typename TreeType::NumericSplit& numericSplit = typename TreeType::NumericSplit(0)); int main(int argc, char** argv) { CLI::ParseCommandLine(argc, argv); // Check input parameters for validity. const string trainingFile = CLI::GetParam("training_file"); const string labelsFile = CLI::GetParam("labels_file"); const string inputModelFile = CLI::GetParam("input_model_file"); const string testFile = CLI::GetParam("test_file"); const string predictionsFile = CLI::GetParam("predictions_file"); const string probabilitiesFile = CLI::GetParam("probabilities_file"); const string numericSplitStrategy = CLI::GetParam("numeric_split_strategy"); if ((CLI::HasParam("predictions_file") || CLI::HasParam("probabilities_file")) && !CLI::HasParam("test_file")) Log::Fatal << "--test_file must be specified if --predictions_file or " << "--probabilities_file is specified." << endl; if (!CLI::HasParam("training_file") && !CLI::HasParam("input_model_file")) Log::Fatal << "One of --training_file or --input_model_file must be " << "specified!" << endl; if (CLI::HasParam("training_file") && !CLI::HasParam("labels_file")) Log::Fatal << "If --training_file is specified, --labels_file must be " << "specified too!" << endl; if (!CLI::HasParam("training_file") && CLI::HasParam("batch_mode")) Log::Warn << "--batch_mode (-b) ignored; no training set provided." << endl; if (CLI::HasParam("passes") && CLI::HasParam("batch_mode")) Log::Warn << "--batch_mode (-b) ignored because --passes was specified." << endl; if (CLI::HasParam("info_gain")) { if (numericSplitStrategy == "domingos") { const size_t bins = (size_t) CLI::GetParam("bins"); const size_t observationsBeforeBinning = (size_t) CLI::GetParam("observations_before_binning"); HoeffdingDoubleNumericSplit ns(0, bins, observationsBeforeBinning); PerformActions>(ns); } else if (numericSplitStrategy == "binary") { PerformActions>(); } else { Log::Fatal << "Unrecognized numeric split strategy (" << numericSplitStrategy << ")! Must be 'domingos' or 'binary'." << endl; } } else { if (numericSplitStrategy == "domingos") { const size_t bins = (size_t) CLI::GetParam("bins"); const size_t observationsBeforeBinning = (size_t) CLI::GetParam("observations_before_binning"); HoeffdingDoubleNumericSplit ns(0, bins, observationsBeforeBinning); PerformActions>(ns); } else if (numericSplitStrategy == "binary") { PerformActions>(); } else { Log::Fatal << "Unrecognized numeric split strategy (" << numericSplitStrategy << ")! Must be 'domingos' or 'binary'." << endl; } } } template void PerformActions(const typename TreeType::NumericSplit& numericSplit) { // Load necessary parameters. const string trainingFile = CLI::GetParam("training_file"); const string labelsFile = CLI::GetParam("labels_file"); const double confidence = CLI::GetParam("confidence"); const size_t maxSamples = (size_t) CLI::GetParam("max_samples"); const size_t minSamples = (size_t) CLI::GetParam("min_samples"); const string inputModelFile = CLI::GetParam("input_model_file"); const string outputModelFile = CLI::GetParam("output_model_file"); const string testFile = CLI::GetParam("test_file"); const string predictionsFile = CLI::GetParam("predictions_file"); const string probabilitiesFile = CLI::GetParam("probabilities_file"); bool batchTraining = CLI::HasParam("batch_mode"); const size_t passes = (size_t) CLI::GetParam("passes"); if (passes > 1) batchTraining = false; // We already warned about this earlier. TreeType* tree = NULL; DatasetInfo datasetInfo; if (!CLI::HasParam("input_model_file")) { arma::mat trainingSet; data::Load(trainingFile, trainingSet, datasetInfo, true); for (size_t i = 0; i < trainingSet.n_rows; ++i) Log::Info << datasetInfo.NumMappings(i) << " mappings in dimension " << i << "." << endl; arma::Col labelsIn; data::Load(labelsFile, labelsIn, true, false); arma::Row labels = labelsIn.t(); // Now create the decision tree. Timer::Start("tree_training"); if (passes > 1) Log::Info << "Taking " << passes << " passes over the dataset." << endl; tree = new TreeType(trainingSet, datasetInfo, labels, max(labels) + 1, batchTraining, confidence, maxSamples, 100, minSamples, typename TreeType::CategoricalSplit(0, 0), numericSplit); for (size_t i = 1; i < passes; ++i) tree->Train(trainingSet, labels, false); Timer::Stop("tree_training"); } else { tree = new TreeType(datasetInfo, 1, 1); data::Load(inputModelFile, "streamingDecisionTree", *tree, true); if (CLI::HasParam("training_file")) { arma::mat trainingSet; data::Load(trainingFile, trainingSet, datasetInfo, true); for (size_t i = 0; i < trainingSet.n_rows; ++i) Log::Info << datasetInfo.NumMappings(i) << " mappings in dimension " << i << "." << endl; arma::Col labelsIn; data::Load(labelsFile, labelsIn, true, false); arma::Row labels = labelsIn.t(); // Now create the decision tree. Timer::Start("tree_training"); if (passes > 1) { Log::Info << "Taking " << passes << " passes over the dataset." << endl; for (size_t i = 0; i < passes; ++i) tree->Train(trainingSet, labels, false); } else { tree->Train(trainingSet, labels, batchTraining); } Timer::Stop("tree_training"); } } if (CLI::HasParam("training_file")) { // Get training error. arma::mat trainingSet; data::Load(trainingFile, trainingSet, datasetInfo, true); arma::Row predictions; tree->Classify(trainingSet, predictions); arma::Col labelsIn; data::Load(labelsFile, labelsIn, true, false); arma::Row labels = labelsIn.t(); size_t correct = 0; for (size_t i = 0; i < labels.n_elem; ++i) if (labels[i] == predictions[i]) ++correct; Log::Info << correct << " out of " << labels.n_elem << " correct " << "on training set (" << double(correct) / double(labels.n_elem) * 100.0 << ")." << endl; } // Get the number of nods in the tree. std::queue queue; queue.push(tree); size_t nodes = 0; while (!queue.empty()) { TreeType* node = queue.front(); queue.pop(); ++nodes; for (size_t i = 0; i < node->NumChildren(); ++i) queue.push(&node->Child(i)); } Log::Info << nodes << " nodes in the tree." << endl; // The tree is trained or loaded. Now do any testing if we need. if (CLI::HasParam("test_file")) { arma::mat testSet; data::Load(testFile, testSet, datasetInfo, true); arma::Row predictions; arma::rowvec probabilities; Timer::Start("tree_testing"); tree->Classify(testSet, predictions, probabilities); Timer::Stop("tree_testing"); if (CLI::HasParam("test_labels_file")) { string testLabelsFile = CLI::GetParam("test_labels_file"); arma::Col testLabelsIn; data::Load(testLabelsFile, testLabelsIn, true, false); arma::Row testLabels = testLabelsIn.t(); size_t correct = 0; for (size_t i = 0; i < testLabels.n_elem; ++i) { if (predictions[i] == testLabels[i]) ++correct; } Log::Info << correct << " out of " << testLabels.n_elem << " correct " << "on test set (" << double(correct) / double(testLabels.n_elem) * 100.0 << ")." << endl; } if (CLI::HasParam("predictions_file")) data::Save(predictionsFile, predictions); if (CLI::HasParam("probabilities_file")) data::Save(probabilitiesFile, probabilities); } // Check the accuracy on the training set. if (CLI::HasParam("output_model_file")) data::Save(outputModelFile, "streamingDecisionTree", *tree, true); // Clean up memory. delete tree; } mlpack-2.2.5/src/mlpack/methods/hoeffding_trees/information_gain.hpp000066400000000000000000000055651315013601400256640ustar00rootroot00000000000000/** * @file information_gain.hpp * @author Ryan Curtin * * An implementation of information gain, which can be used in place of Gini * impurity. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_HOEFFDING_TREES_INFORMATION_GAIN_HPP #define MLPACK_METHODS_HOEFFDING_TREES_INFORMATION_GAIN_HPP namespace mlpack { namespace tree { class InformationGain { public: /** * Given the sufficient statistics of a proposed split, calculate the * information gain if that split was to be used. The 'counts' matrix should * contain the number of points in each class in each column, so the size of * 'counts' is children x classes, where 'children' is the number of child * nodes in the proposed split. * * @param counts Matrix of sufficient statistics. */ static double Evaluate(const arma::Mat& counts) { // Calculate the number of elements in the unsplit node and also in each // proposed child. size_t numElem = 0; arma::vec splitCounts(counts.n_elem); for (size_t i = 0; i < counts.n_cols; ++i) { splitCounts[i] = arma::accu(counts.col(i)); numElem += splitCounts[i]; } // Corner case: if there are no elements, the gain is zero. if (numElem == 0) return 0.0; arma::Col classCounts = arma::sum(counts, 1); // Calculate the gain of the unsplit node. double gain = 0.0; for (size_t i = 0; i < classCounts.n_elem; ++i) { const double f = ((double) classCounts[i] / (double) numElem); if (f > 0.0) gain -= f * std::log2(f); } // Now calculate the impurity of the split nodes and subtract them from the // overall gain. for (size_t i = 0; i < counts.n_cols; ++i) { if (splitCounts[i] > 0) { double splitGain = 0.0; for (size_t j = 0; j < counts.n_rows; ++j) { const double f = ((double) counts(j, i) / (double) splitCounts[i]); if (f > 0.0) splitGain += f * std::log2(f); } gain += ((double) splitCounts[i] / (double) numElem) * splitGain; } } return gain; } /** * Return the range of the information gain for the given number of classes. * (That is, the difference between the maximum possible value and the minimum * possible value.) */ static double Range(const size_t numClasses) { // The best possible case gives an information gain of 0. The worst // possible case is even distribution, which gives n * (1/n * log2(1/n)) = // log2(1/n) = -log2(n). So, the range is log2(n). return std::log2(numClasses); } }; } // namespace tree } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/methods/hoeffding_trees/numeric_split_info.hpp000066400000000000000000000026311315013601400262200ustar00rootroot00000000000000/** * @file numeric_split_info.hpp * @author Ryan Curtin * * After a numeric split has been made, this holds information on the split. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_HOEFFDING_TREES_NUMERIC_SPLIT_INFO_HPP #define MLPACK_METHODS_HOEFFDING_TREES_NUMERIC_SPLIT_INFO_HPP #include namespace mlpack { namespace tree { template class NumericSplitInfo { public: NumericSplitInfo() { /* Nothing to do. */ } NumericSplitInfo(const arma::Col& splitPoints) : splitPoints(splitPoints) { /* Nothing to do. */ } template size_t CalculateDirection(const eT& value) const { // What bin does the point fall into? size_t bin = 0; while (bin < splitPoints.n_elem && value > splitPoints[bin]) ++bin; return bin; } //! Serialize the split (save/load the split points). template void Serialize(Archive& ar, const unsigned int /* version */) { ar & data::CreateNVP(splitPoints, "splitPoints"); } private: arma::Col splitPoints; }; } // namespace tree } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/methods/hoeffding_trees/typedef.hpp000066400000000000000000000012631315013601400237700ustar00rootroot00000000000000/** * @file typedef.hpp * @author Ryan Curtin * * Useful typedefs. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_HOEFFDING_TREES_TYPEDEF_HPP #define MLPACK_METHODS_HOEFFDING_TREES_TYPEDEF_HPP #include "streaming_decision_tree.hpp" #include "hoeffding_tree.hpp" namespace mlpack { namespace tree { typedef StreamingDecisionTree> HoeffdingTreeType; } // namespace tree } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/methods/kernel_pca/000077500000000000000000000000001315013601400205655ustar00rootroot00000000000000mlpack-2.2.5/src/mlpack/methods/kernel_pca/CMakeLists.txt000066400000000000000000000010131315013601400233200ustar00rootroot00000000000000# Define the files we need to compile # Anything not in this list will not be compiled into mlpack. set(SOURCES kernel_pca.hpp kernel_pca_impl.hpp ) # Add directory name to sources. set(DIR_SRCS) foreach(file ${SOURCES}) set(DIR_SRCS ${DIR_SRCS} ${CMAKE_CURRENT_SOURCE_DIR}/${file}) endforeach() # Append sources (with directory name) to list of all mlpack sources (used at # the parent scope). set(MLPACK_SRCS ${MLPACK_SRCS} ${DIR_SRCS} PARENT_SCOPE) add_subdirectory(kernel_rules) add_cli_executable(kernel_pca) mlpack-2.2.5/src/mlpack/methods/kernel_pca/kernel_pca.hpp000066400000000000000000000114101315013601400233760ustar00rootroot00000000000000/** * @file kernel_pca.hpp * @author Ajinkya Kale * @author Marcus Edel * * Defines the KernelPCA class to perform Kernel Principal Components Analysis * on the specified data set. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_KERNEL_PCA_KERNEL_PCA_HPP #define MLPACK_METHODS_KERNEL_PCA_KERNEL_PCA_HPP #include #include namespace mlpack { namespace kpca { /** * This class performs kernel principal components analysis (Kernel PCA), for a * given kernel. This is a standard machine learning technique and is * well-documented on the Internet and in standard texts. It is often used as a * dimensionality reduction technique, and can also be useful in mapping * linearly inseparable classes of points to different spaces where they are * linearly separable. * * The performance of the method is highly dependent on the kernel choice. * There are numerous available kernels in the mlpack::kernel namespace (see * files in mlpack/core/kernels/) and it is easy to write your own; see other * implementations for examples. */ template < typename KernelType, typename KernelRule = NaiveKernelRule > class KernelPCA { public: /** * Construct the KernelPCA object, optionally passing a kernel. Optionally, * the transformed data can be centered about the origin; to do this, pass * 'true' for centerTransformedData. This will take slightly longer (but not * much). * * @param kernel Kernel to be used for computation. * @param centerTransformedData Center transformed data. */ KernelPCA(const KernelType kernel = KernelType(), const bool centerTransformedData = false); /** * Apply Kernel Principal Components Analysis to the provided data set. * * @param data Data matrix. * @param transformedData Matrix to output results into. * @param eigval KPCA eigenvalues will be written to this vector. * @param eigvec KPCA eigenvectors will be written to this matrix. * @param newDimension New dimension for the dataset. */ void Apply(const arma::mat& data, arma::mat& transformedData, arma::vec& eigval, arma::mat& eigvec, const size_t newDimension); /** * Apply Kernel Principal Components Analysis to the provided data set. * * @param data Data matrix. * @param transformedData Matrix to output results into. * @param eigval KPCA eigenvalues will be written to this vector. * @param eigvec KPCA eigenvectors will be written to this matrix. */ void Apply(const arma::mat& data, arma::mat& transformedData, arma::vec& eigval, arma::mat& eigvec); /** * Apply Kernel Principal Component Analysis to the provided data set. * * @param data Data matrix. * @param transformedData Matrix to output results into. * @param eigval KPCA eigenvalues will be written to this vector. */ void Apply(const arma::mat& data, arma::mat& transformedData, arma::vec& eigval); /** * Apply dimensionality reduction using Kernel Principal Component Analysis * to the provided data set. The data matrix will be modified in-place. Note * that the dimension can be larger than the existing dimension because KPCA * works on the kernel matrix, not the covariance matrix. This means the new * dimension can be as large as the number of points (columns) in the dataset. * Note that if you specify newDimension to be larger than the current * dimension of the data (the number of rows), then it's not really * "dimensionality reduction"... * * @param data Data matrix. * @param newDimension New dimension for the dataset. */ void Apply(arma::mat& data, const size_t newDimension); //! Get the kernel. const KernelType& Kernel() const { return kernel; } //! Modify the kernel. KernelType& Kernel() { return kernel; } //! Return whether or not the transformed data is centered. bool CenterTransformedData() const { return centerTransformedData; } //! Return whether or not the transformed data is centered. bool& CenterTransformedData() { return centerTransformedData; } private: //! The instantiated kernel. KernelType kernel; //! If true, the data will be scaled (by standard deviation) when Apply() is //! run. bool centerTransformedData; }; // class KernelPCA } // namespace kpca } // namespace mlpack // Include implementation. #include "kernel_pca_impl.hpp" #endif // MLPACK_METHODS_KERNEL_PCA_KERNEL_PCA_HPP mlpack-2.2.5/src/mlpack/methods/kernel_pca/kernel_pca_impl.hpp000066400000000000000000000060571315013601400244320ustar00rootroot00000000000000/** * @file kernel_pca_impl.hpp * @author Ajinkya Kale * @author Marcus Edel * * Implementation of Kernel PCA class to perform Kernel Principal Components * Analysis on the specified data set. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_KERNEL_PCA_KERNEL_PCA_IMPL_HPP #define MLPACK_METHODS_KERNEL_PCA_KERNEL_PCA_IMPL_HPP // In case it hasn't already been included. #include "kernel_pca.hpp" namespace mlpack { namespace kpca { template KernelPCA::KernelPCA(const KernelType kernel, const bool centerTransformedData) : kernel(kernel), centerTransformedData(centerTransformedData) { } //! Apply Kernel Principal Component Analysis to the provided data set. template void KernelPCA::Apply(const arma::mat& data, arma::mat& transformedData, arma::vec& eigval, arma::mat& eigvec, const size_t newDimension) { KernelRule::ApplyKernelMatrix(data, transformedData, eigval, eigvec, newDimension, kernel); // Center the transformed data, if the user asked for it. if (centerTransformedData) { arma::colvec transformedDataMean = arma::mean(transformedData, 1); transformedData = transformedData - (transformedDataMean * arma::ones(transformedData.n_cols)); } } //! Apply Kernel Principal Component Analysis to the provided data set. template void KernelPCA::Apply(const arma::mat& data, arma::mat& transformedData, arma::vec& eigval, arma::mat& eigvec) { Apply(data, transformedData, eigval, eigvec, data.n_cols); } //! Apply Kernel Principal Component Analysis to the provided data set. template void KernelPCA::Apply(const arma::mat& data, arma::mat& transformedData, arma::vec& eigVal) { arma::mat coeffs; Apply(data, transformedData, eigVal, coeffs, data.n_cols); } //! Use KPCA for dimensionality reduction. template void KernelPCA::Apply(arma::mat& data, const size_t newDimension) { arma::mat coeffs; arma::vec eigVal; Apply(data, data, eigVal, coeffs, newDimension); if (newDimension < coeffs.n_rows && newDimension > 0) data.shed_rows(newDimension, data.n_rows - 1); } } // namespace mlpack } // namespace kpca #endif mlpack-2.2.5/src/mlpack/methods/kernel_pca/kernel_pca_main.cpp000066400000000000000000000231101315013601400243750ustar00rootroot00000000000000/** * @file kernel_pca_main.cpp * @author Ajinkya Kale * * Executable for Kernel PCA. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include "kernel_pca.hpp" using namespace mlpack; using namespace mlpack::kpca; using namespace mlpack::kernel; using namespace std; using namespace arma; PROGRAM_INFO("Kernel Principal Components Analysis", "This program performs Kernel Principal Components Analysis (KPCA) on the " "specified dataset with the specified kernel. This will transform the " "data onto the kernel principal components, and optionally reduce the " "dimensionality by ignoring the kernel principal components with the " "smallest eigenvalues." "\n\n" "For the case where a linear kernel is used, this reduces to regular " "PCA." "\n\n" "For example, the following will perform KPCA on the 'input.csv' file using" " the gaussian kernel and store the transformed date in the " "'transformed.csv' file." "\n\n" "$ kernel_pca -i input.csv -k gaussian -o transformed.csv" "\n\n" "The kernels that are supported are listed below:" "\n\n" " * 'linear': the standard linear dot product (same as normal PCA):\n" " K(x, y) = x^T y\n" "\n" " * 'gaussian': a Gaussian kernel; requires bandwidth:\n" " K(x, y) = exp(-(|| x - y || ^ 2) / (2 * (bandwidth ^ 2)))\n" "\n" " * 'polynomial': polynomial kernel; requires offset and degree:\n" " K(x, y) = (x^T y + offset) ^ degree\n" "\n" " * 'hyptan': hyperbolic tangent kernel; requires scale and offset:\n" " K(x, y) = tanh(scale * (x^T y) + offset)\n" "\n" " * 'laplacian': Laplacian kernel; requires bandwidth:\n" " K(x, y) = exp(-(|| x - y ||) / bandwidth)\n" "\n" " * 'epanechnikov': Epanechnikov kernel; requires bandwidth:\n" " K(x, y) = max(0, 1 - || x - y ||^2 / bandwidth^2)\n" "\n" " * 'cosine': cosine distance:\n" " K(x, y) = 1 - (x^T y) / (|| x || * || y ||)\n" "\n" "The parameters for each of the kernels should be specified with the " "options --bandwidth, --kernel_scale, --offset, or --degree (or a " "combination of those options)." "\n\n" "Optionally, the nystr\u00F6m method (\"Using the Nystroem method to speed up" " kernel machines\", 2001) can be used to calculate the kernel matrix by " "specifying the --nystroem_method (-n) option. This approach works by using" " a subset of the data as basis to reconstruct the kernel matrix; to specify" " the sampling scheme, the --sampling parameter is used, the sampling scheme" " for the nystr\u00F6m method can be chosen from the following list: kmeans," " random, ordered."); PARAM_STRING_IN_REQ("input_file", "Input dataset to perform KPCA on.", "i"); PARAM_STRING_OUT("output_file", "File to save modified dataset to.", "o"); PARAM_STRING_IN_REQ("kernel", "The kernel to use; see the above documentation " "for the list of usable kernels.", "k"); PARAM_INT_IN("new_dimensionality", "If not 0, reduce the dimensionality of " "the output dataset by ignoring the dimensions with the smallest " "eigenvalues.", "d", 0); PARAM_FLAG("center", "If set, the transformed data will be centered about the " "origin.", "c"); PARAM_FLAG("nystroem_method", "If set, the nystroem method will be used.", "n"); PARAM_STRING_IN("sampling", "Sampling scheme to use for the nystroem method: " "'kmeans', 'random', 'ordered'", "s", "kmeans"); PARAM_DOUBLE_IN("kernel_scale", "Scale, for 'hyptan' kernel.", "S", 1.0); PARAM_DOUBLE_IN("offset", "Offset, for 'hyptan' and 'polynomial' kernels.", "O", 0.0); PARAM_DOUBLE_IN("bandwidth", "Bandwidth, for 'gaussian' and 'laplacian' " "kernels.", "b", 1.0); PARAM_DOUBLE_IN("degree", "Degree of polynomial, for 'polynomial' kernel.", "D", 1.0); //! Run RunKPCA on the specified dataset for the given kernel type. template void RunKPCA(arma::mat& dataset, const bool centerTransformedData, const bool nystroem, const size_t newDim, const string& sampling, KernelType& kernel) { if (nystroem) { // Make sure the sampling scheme is valid. if (sampling == "kmeans") { KernelPCA > >kpca; kpca.Apply(dataset, newDim); } else if (sampling == "random") { KernelPCA > kpca; kpca.Apply(dataset, newDim); } else if (sampling == "ordered") { KernelPCA > kpca; kpca.Apply(dataset, newDim); } else { // Invalid sampling scheme. Log::Fatal << "Invalid sampling scheme ('" << sampling << "'); valid " << "choices are 'kmeans', 'random' and 'ordered'" << endl; } } else { KernelPCA kpca(kernel, centerTransformedData); kpca.Apply(dataset, newDim); } } int main(int argc, char** argv) { // Parse command line options. CLI::ParseCommandLine(argc, argv); if (!CLI::HasParam("output_file")) Log::Warn << "--output_file is not specified; no output will be saved!" << endl; // Load input dataset. mat dataset; const string inputFile = CLI::GetParam("input_file"); data::Load(inputFile, dataset, true); // Fatal on failure. // Get the new dimensionality, if it is necessary. size_t newDim = dataset.n_rows; if (CLI::GetParam("new_dimensionality") != 0) { newDim = CLI::GetParam("new_dimensionality"); if (newDim > dataset.n_rows) { Log::Fatal << "New dimensionality (" << newDim << ") cannot be greater than existing dimensionality (" << dataset.n_rows << ")!" << endl; } } // Get the kernel type and make sure it is valid. const string kernelType = CLI::GetParam("kernel"); const bool centerTransformedData = CLI::HasParam("center"); const bool nystroem = CLI::HasParam("nystroem_method"); const string sampling = CLI::GetParam("sampling"); if (kernelType == "linear") { LinearKernel kernel; RunKPCA(dataset, centerTransformedData, nystroem, newDim, sampling, kernel); } else if (kernelType == "gaussian") { const double bandwidth = CLI::GetParam("bandwidth"); GaussianKernel kernel(bandwidth); RunKPCA(dataset, centerTransformedData, nystroem, newDim, sampling, kernel); } else if (kernelType == "polynomial") { const double degree = CLI::GetParam("degree"); const double offset = CLI::GetParam("offset"); PolynomialKernel kernel(degree, offset); RunKPCA(dataset, centerTransformedData, nystroem, newDim, sampling, kernel); } else if (kernelType == "hyptan") { const double scale = CLI::GetParam("kernel_scale"); const double offset = CLI::GetParam("offset"); HyperbolicTangentKernel kernel(scale, offset); RunKPCA(dataset, centerTransformedData, nystroem, newDim, sampling, kernel); } else if (kernelType == "laplacian") { const double bandwidth = CLI::GetParam("bandwidth"); LaplacianKernel kernel(bandwidth); RunKPCA(dataset, centerTransformedData, nystroem, newDim, sampling, kernel); } else if (kernelType == "epanechnikov") { const double bandwidth = CLI::GetParam("bandwidth"); EpanechnikovKernel kernel(bandwidth); RunKPCA(dataset, centerTransformedData, nystroem, newDim, sampling, kernel); } else if (kernelType == "cosine") { CosineDistance kernel; RunKPCA(dataset, centerTransformedData, nystroem, newDim, sampling, kernel); } else { // Invalid kernel type. Log::Fatal << "Invalid kernel type ('" << kernelType << "'); valid choices " << "are 'linear', 'gaussian', 'polynomial', 'hyptan', 'laplacian', and " << "'cosine'." << endl; } // Save the output dataset. const string outputFile = CLI::GetParam("output_file"); if (outputFile != "") data::Save(outputFile, dataset, true); // Fatal on failure. } mlpack-2.2.5/src/mlpack/methods/kernel_pca/kernel_rules/000077500000000000000000000000001315013601400232575ustar00rootroot00000000000000mlpack-2.2.5/src/mlpack/methods/kernel_pca/kernel_rules/CMakeLists.txt000066400000000000000000000007151315013601400260220ustar00rootroot00000000000000# Define the files we need to compile # Anything not in this list will not be compiled into mlpack. set(SOURCES nystroem_method.hpp naive_method.hpp ) # Add directory name to sources. set(DIR_SRCS) foreach(file ${SOURCES}) set(DIR_SRCS ${DIR_SRCS} ${CMAKE_CURRENT_SOURCE_DIR}/${file}) endforeach() # Append sources (with directory name) to list of all mlpack sources (used at # the parent scope). set(MLPACK_SRCS ${MLPACK_SRCS} ${DIR_SRCS} PARENT_SCOPE) mlpack-2.2.5/src/mlpack/methods/kernel_pca/kernel_rules/naive_method.hpp000066400000000000000000000065371315013601400264450ustar00rootroot00000000000000/** * @file naive_method.hpp * @author Ajinkya Kale * * Use the naive method to construct the kernel matrix. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_KERNEL_PCA_NAIVE_METHOD_HPP #define MLPACK_METHODS_KERNEL_PCA_NAIVE_METHOD_HPP #include namespace mlpack { namespace kpca { template class NaiveKernelRule { public: /** * Construct the exact kernel matrix. * * @param data Input data points. * @param transformedData Matrix to output results into. * @param eigval KPCA eigenvalues will be written to this vector. * @param eigvec KPCA eigenvectors will be written to this matrix. * @param rank Rank to be used for matrix approximation. * @param kernel Kernel to be used for computation. */ static void ApplyKernelMatrix(const arma::mat& data, arma::mat& transformedData, arma::vec& eigval, arma::mat& eigvec, const size_t /* unused */, KernelType kernel = KernelType()) { // Construct the kernel matrix. arma::mat kernelMatrix; // Resize the kernel matrix to the right size. kernelMatrix.set_size(data.n_cols, data.n_cols); // Note that we only need to calculate the upper triangular part of the // kernel matrix, since it is symmetric. This helps minimize the number of // kernel evaluations. for (size_t i = 0; i < data.n_cols; ++i) { for (size_t j = i; j < data.n_cols; ++j) { // Evaluate the kernel on these two points. kernelMatrix(i, j) = kernel.Evaluate(data.unsafe_col(i), data.unsafe_col(j)); } } // Copy to the lower triangular part of the matrix. for (size_t i = 1; i < data.n_cols; ++i) for (size_t j = 0; j < i; ++j) kernelMatrix(i, j) = kernelMatrix(j, i); // For PCA the data has to be centered, even if the data is centered. But it // is not guaranteed that the data, when mapped to the kernel space, is also // centered. Since we actually never work in the feature space we cannot // center the data. So, we perform a "psuedo-centering" using the kernel // matrix. arma::rowvec rowMean = arma::sum(kernelMatrix, 0) / kernelMatrix.n_cols; kernelMatrix.each_col() -= arma::sum(kernelMatrix, 1) / kernelMatrix.n_cols; kernelMatrix.each_row() -= rowMean; kernelMatrix += arma::sum(rowMean) / kernelMatrix.n_cols; // Eigendecompose the centered kernel matrix. arma::eig_sym(eigval, eigvec, kernelMatrix); // Swap the eigenvalues since they are ordered backwards (we need largest to // smallest). for (size_t i = 0; i < floor(eigval.n_elem / 2.0); ++i) eigval.swap_rows(i, (eigval.n_elem - 1) - i); // Flip the coefficients to produce the same effect. eigvec = arma::fliplr(eigvec); transformedData = eigvec.t() * kernelMatrix; transformedData.each_col() /= arma::sqrt(eigval); } }; } // namespace kpca } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/methods/kernel_pca/kernel_rules/nystroem_method.hpp000066400000000000000000000057271315013601400272230ustar00rootroot00000000000000/** * @file nystroem_method.hpp * @author Marcus Edel * * Use the Nystroem method for approximating a kernel matrix. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_KERNEL_PCA_NYSTROEM_METHOD_HPP #define MLPACK_METHODS_KERNEL_PCA_NYSTROEM_METHOD_HPP #include #include #include namespace mlpack { namespace kpca { template< typename KernelType, typename PointSelectionPolicy = kernel::KMeansSelection<> > class NystroemKernelRule { public: /** * Construct the kernel matrix approximation using the nystroem method. * * @param data Input data points. * @param transformedData Matrix to output results into. * @param eigval KPCA eigenvalues will be written to this vector. * @param eigvec KPCA eigenvectors will be written to this matrix. * @param rank Rank to be used for matrix approximation. * @param kernel Kernel to be used for computation. */ static void ApplyKernelMatrix(const arma::mat& data, arma::mat& transformedData, arma::vec& eigval, arma::mat& eigvec, const size_t rank, KernelType kernel = KernelType()) { arma::mat G, v; kernel::NystroemMethod nm(data, kernel, rank); nm.Apply(G); transformedData = G.t() * G; // Center the reconstructed approximation. math::Center(transformedData, transformedData); // For PCA the data has to be centered, even if the data is centered. But // it is not guaranteed that the data, when mapped to the kernel space, is // also centered. Since we actually never work in the feature space we // cannot center the data. So, we perform a "psuedo-centering" using the // kernel matrix. arma::colvec colMean = arma::sum(G, 1) / G.n_rows; G.each_row() -= arma::sum(G, 0) / G.n_rows; G.each_col() -= colMean; G += arma::sum(colMean) / G.n_rows; // Eigendecompose the centered kernel matrix. arma::eig_sym(eigval, eigvec, transformedData); // Swap the eigenvalues since they are ordered backwards (we need largest // to smallest). for (size_t i = 0; i < floor(eigval.n_elem / 2.0); ++i) eigval.swap_rows(i, (eigval.n_elem - 1) - i); // Flip the coefficients to produce the same effect. eigvec = arma::fliplr(eigvec); transformedData = eigvec.t() * G.t(); } }; } // namespace kpca } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/methods/kmeans/000077500000000000000000000000001315013601400177405ustar00rootroot00000000000000mlpack-2.2.5/src/mlpack/methods/kmeans/CMakeLists.txt000066400000000000000000000021621315013601400225010ustar00rootroot00000000000000# Define the files we need to compile. # Anything not in this list will not be compiled into mlpack. set(SOURCES allow_empty_clusters.hpp dual_tree_kmeans.hpp dual_tree_kmeans_impl.hpp dual_tree_kmeans_rules.hpp dual_tree_kmeans_rules_impl.hpp dual_tree_kmeans_statistic.hpp elkan_kmeans.hpp elkan_kmeans_impl.hpp hamerly_kmeans.hpp hamerly_kmeans_impl.hpp kill_empty_clusters.hpp kmeans.hpp kmeans_impl.hpp max_variance_new_cluster.hpp max_variance_new_cluster_impl.hpp naive_kmeans.hpp naive_kmeans_impl.hpp pelleg_moore_kmeans.hpp pelleg_moore_kmeans_impl.hpp pelleg_moore_kmeans_rules.hpp pelleg_moore_kmeans_rules_impl.hpp pelleg_moore_kmeans_statistic.hpp random_partition.hpp refined_start.hpp refined_start_impl.hpp sample_initialization.hpp ) # Add directory name to sources. set(DIR_SRCS) foreach(file ${SOURCES}) set(DIR_SRCS ${DIR_SRCS} ${CMAKE_CURRENT_SOURCE_DIR}/${file}) endforeach() # Append sources (with directory name) to list of all mlpack sources (used at # the parent scope). set(MLPACK_SRCS ${MLPACK_SRCS} ${DIR_SRCS} PARENT_SCOPE) add_cli_executable(kmeans) mlpack-2.2.5/src/mlpack/methods/kmeans/allow_empty_clusters.hpp000066400000000000000000000044261315013601400247370ustar00rootroot00000000000000/** * @file allow_empty_clusters.hpp * @author Ryan Curtin * * This very simple policy is used when K-Means is allowed to return empty * clusters. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_KMEANS_ALLOW_EMPTY_CLUSTERS_HPP #define MLPACK_METHODS_KMEANS_ALLOW_EMPTY_CLUSTERS_HPP #include namespace mlpack { namespace kmeans { /** * Policy which allows K-Means to create empty clusters without any error being * reported. */ class AllowEmptyClusters { public: //! Default constructor required by EmptyClusterPolicy policy. AllowEmptyClusters() { } /** * This function allows empty clusters to persist simply by leaving the empty * cluster in its last position. * * @tparam MatType Type of data (arma::mat or arma::spmat). * @param data Dataset on which clustering is being performed. * @param emptyCluster Index of cluster which is empty. * @param oldCentroids Centroids of each cluster (one per column) at the start * of the iteration. * @param newCentroids Centroids of each cluster (one per column) at the end * of the iteration. * @param clusterCounts Number of points in each cluster. * @param assignments Cluster assignments of each point. * @param iteration Number of iteration. * * @return Number of points changed (0). */ template static inline force_inline size_t EmptyCluster( const MatType& /* data */, const size_t emptyCluster, const arma::mat& oldCentroids, arma::mat& newCentroids, arma::Col& /* clusterCounts */, MetricType& /* metric */, const size_t /* iteration */) { // Take the last iteration's centroid. newCentroids.col(emptyCluster) = oldCentroids.col(emptyCluster); return 0; // No points were changed. } //! Serialize the empty cluster policy (nothing to do). template void Serialize(Archive& /* ar */, const unsigned int /* version */) { } }; } // namespace kmeans } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/methods/kmeans/dual_tree_kmeans.hpp000066400000000000000000000144411315013601400237570ustar00rootroot00000000000000/** * @file dtnn_kmeans.hpp * @author Ryan Curtin * * An implementation of a Lloyd iteration which uses dual-tree nearest neighbor * search as a black box. The conditions under which this will perform best are * probably limited to the case where k is close to the number of points in the * dataset, and the number of iterations of the k-means algorithm will be few. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_KMEANS_DUAL_TREE_KMEANS_HPP #define MLPACK_METHODS_KMEANS_DUAL_TREE_KMEANS_HPP #include #include #include #include "dual_tree_kmeans_statistic.hpp" namespace mlpack { namespace kmeans { /** * An algorithm for an exact Lloyd iteration which simply uses dual-tree * nearest-neighbor search to find the nearest centroid for each point in the * dataset. The conditions under which this will perform best are probably * limited to the case where k is close to the number of points in the dataset, * and the number of iterations of the k-means algorithm will be few. */ template< typename MetricType, typename MatType, template class TreeType = tree::KDTree> class DualTreeKMeans { public: //! Convenience typedef. typedef TreeType Tree; template using NNSTreeType = TreeType; /** * Construct the DualTreeKMeans object, which will construct a tree on the * points. */ DualTreeKMeans(const MatType& dataset, MetricType& metric); /** * Delete the tree constructed by the DualTreeKMeans object. */ ~DualTreeKMeans(); /** * Run a single iteration of the dual-tree nearest neighbor algorithm for * k-means, updating the given centroids into the newCentroids matrix. * * @param centroids Current cluster centroids. * @param newCentroids New cluster centroids. * @param counts Current counts, to be overwritten with new counts. */ double Iterate(const arma::mat& centroids, arma::mat& newCentroids, arma::Col& counts); //! Return the number of distance calculations. size_t DistanceCalculations() const { return distanceCalculations; } //! Modify the number of distance calculations. size_t& DistanceCalculations() { return distanceCalculations; } private: //! The original dataset reference. const MatType& datasetOrig; // Maybe not necessary. //! The tree built on the points. Tree* tree; //! The dataset we are using. const MatType& dataset; //! The metric. MetricType metric; //! Track distance calculations. size_t distanceCalculations; //! Track iteration number. size_t iteration; //! Upper bounds on nearest centroid. arma::vec upperBounds; //! Lower bounds on second closest cluster distance for each point. arma::vec lowerBounds; //! Indicator of whether or not the point is pruned. std::vector prunedPoints; arma::Row assignments; std::vector visited; // Was the point visited this iteration? arma::mat lastIterationCentroids; // For sanity checks. arma::vec clusterDistances; // The amount the clusters moved last iteration. arma::mat interclusterDistances; // Static storage for intercluster distances. //! Update the bounds in the tree before the next iteration. //! centroids is the current (not yet searched) centroids. void UpdateTree(Tree& node, const arma::mat& centroids, const double parentUpperBound = 0.0, const double adjustedParentUpperBound = DBL_MAX, const double parentLowerBound = DBL_MAX, const double adjustedParentLowerBound = 0.0); //! Extract the centroids of the clusters. void ExtractCentroids(Tree& node, arma::mat& newCentroids, arma::Col& newCounts, const arma::mat& centroids); void CoalesceTree(Tree& node, const size_t child = 0); void DecoalesceTree(Tree& node); }; //! Utility function for hiding children. This actually does something, and is //! called if the tree is not a binary tree. template void HideChild(TreeType& node, const size_t child, const typename boost::disable_if_c< tree::TreeTraits::BinaryTree>::type* junk = 0); //! Utility function for hiding children. This is called when the tree is a //! binary tree, and does nothing, because we don't hide binary children in this //! way. template void HideChild(TreeType& node, const size_t child, const typename boost::enable_if_c< tree::TreeTraits::BinaryTree>::type* junk = 0); //! Utility function for restoring children to a non-binary tree. template void RestoreChildren(TreeType& node, const typename boost::disable_if_c::BinaryTree>::type* junk = 0); //! Utility function for restoring children to a binary tree. template void RestoreChildren(TreeType& node, const typename boost::enable_if_c::BinaryTree>::type* junk = 0); //! A template typedef for the DualTreeKMeans algorithm with the default tree //! type (a kd-tree). template using DefaultDualTreeKMeans = DualTreeKMeans; //! A template typedef for the DualTreeKMeans algorithm with the cover tree //! type. template using CoverTreeDualTreeKMeans = DualTreeKMeans; } // namespace kmeans } // namespace mlpack #include "dual_tree_kmeans_impl.hpp" #endif mlpack-2.2.5/src/mlpack/methods/kmeans/dual_tree_kmeans_impl.hpp000066400000000000000000000555061315013601400250070ustar00rootroot00000000000000/** * @file dtnn_kmeans_impl.hpp * @author Ryan Curtin * * An implementation of a Lloyd iteration which uses dual-tree nearest neighbor * search as a black box. The conditions under which this will perform best are * probably limited to the case where k is close to the number of points in the * dataset, and the number of iterations of the k-means algorithm will be few. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_KMEANS_DTNN_KMEANS_IMPL_HPP #define MLPACK_METHODS_KMEANS_DTNN_KMEANS_IMPL_HPP // In case it hasn't been included yet. #include "dual_tree_kmeans.hpp" #include "dual_tree_kmeans_rules.hpp" namespace mlpack { namespace kmeans { //! Call the tree constructor that does mapping. template TreeType* BuildTree( const typename TreeType::Mat& dataset, std::vector& oldFromNew, const typename boost::enable_if_c< tree::TreeTraits::RearrangesDataset == true, TreeType* >::type = 0) { // This is a hack. I know this will be BinarySpaceTree, so force a leaf size // of two. return new TreeType(dataset, oldFromNew, 1); } //! Call the tree constructor that does not do mapping. template TreeType* BuildTree( const typename TreeType::Mat& dataset, const std::vector& /* oldFromNew */, const typename boost::enable_if_c< tree::TreeTraits::RearrangesDataset == false, TreeType* >::type = 0) { return new TreeType(dataset); } template class TreeType> DualTreeKMeans::DualTreeKMeans( const MatType& dataset, MetricType& metric) : datasetOrig(dataset), tree(new Tree(const_cast(dataset))), dataset(tree->Dataset()), metric(metric), distanceCalculations(0), iteration(0), upperBounds(dataset.n_cols), lowerBounds(dataset.n_cols), prunedPoints(dataset.n_cols, false), // Fill with false. assignments(dataset.n_cols), visited(dataset.n_cols, false) // Fill with false. { for (size_t i = 0; i < dataset.n_cols; ++i) { prunedPoints[i] = false; visited[i] = false; } assignments.fill(size_t(-1)); upperBounds.fill(DBL_MAX); lowerBounds.fill(DBL_MAX); } template class TreeType> DualTreeKMeans::~DualTreeKMeans() { if (tree) delete tree; } // Run a single iteration. template class TreeType> double DualTreeKMeans::Iterate( const arma::mat& centroids, arma::mat& newCentroids, arma::Col& counts) { // Build a tree on the centroids. This will make a copy if necessary, which // is unfortunate, but I don't see a reasonable way around it. std::vector oldFromNewCentroids; Tree* centroidTree = BuildTree(centroids, oldFromNewCentroids); // Find the nearest neighbors of each of the clusters. We have to make our // own TreeType, which is a little bit abuse, but we know for sure the // TreeStatType we have will work. neighbor::NeighborSearch nns(std::move(*centroidTree)); // Reset information in the tree, if we need to. if (iteration > 0) { Timer::Start("knn"); // If the tree maps points, we need an intermediate result matrix. arma::mat* interclusterDistancesTemp = (tree::TreeTraits::RearrangesDataset) ? new arma::mat(1, centroids.n_elem) : &interclusterDistances; arma::Mat closestClusters; // We don't actually care about these. nns.Search(1, closestClusters, *interclusterDistancesTemp); distanceCalculations += nns.BaseCases() + nns.Scores(); // We need to do the unmapping ourselves, if the tree does mapping. if (tree::TreeTraits::RearrangesDataset) { for (size_t i = 0; i < interclusterDistances.n_elem; ++i) interclusterDistances[oldFromNewCentroids[i]] = (*interclusterDistancesTemp)[i]; delete interclusterDistancesTemp; } Timer::Stop("knn"); UpdateTree(*tree, centroids); for (size_t i = 0; i < dataset.n_cols; ++i) visited[i] = false; } else { // Not initialized yet. clusterDistances.set_size(centroids.n_cols + 1); interclusterDistances.set_size(1, centroids.n_cols); } // We won't use the KNN class here because we have our own set of rules. lastIterationCentroids = centroids; typedef DualTreeKMeansRules RuleType; RuleType rules(nns.ReferenceTree().Dataset(), dataset, assignments, upperBounds, lowerBounds, metric, prunedPoints, oldFromNewCentroids, visited); typename Tree::template BreadthFirstDualTreeTraverser traverser(rules); Timer::Start("tree_mod"); CoalesceTree(*tree); Timer::Stop("tree_mod"); // Set the number of pruned centroids in the root to 0. tree->Stat().Pruned() = 0; traverser.Traverse(*tree, nns.ReferenceTree()); distanceCalculations += rules.BaseCases() + rules.Scores(); Timer::Start("tree_mod"); DecoalesceTree(*tree); Timer::Stop("tree_mod"); // Now we need to extract the clusters. newCentroids.zeros(centroids.n_rows, centroids.n_cols); counts.zeros(centroids.n_cols); ExtractCentroids(*tree, newCentroids, counts, centroids); // Now, calculate how far the clusters moved, after normalizing them. double residual = 0.0; clusterDistances[centroids.n_cols] = 0.0; for (size_t c = 0; c < centroids.n_cols; ++c) { if (counts[c] == 0) { clusterDistances[c] = 0; } else { newCentroids.col(c) /= counts(c); const double movement = metric.Evaluate(centroids.col(c), newCentroids.col(c)); clusterDistances[c] = movement; residual += std::pow(movement, 2.0); if (movement > clusterDistances[centroids.n_cols]) clusterDistances[centroids.n_cols] = movement; } } distanceCalculations += centroids.n_cols; delete centroidTree; ++iteration; return std::sqrt(residual); } template class TreeType> void DualTreeKMeans::UpdateTree( Tree& node, const arma::mat& centroids, const double parentUpperBound, const double adjustedParentUpperBound, const double parentLowerBound, const double adjustedParentLowerBound) { const bool prunedLastIteration = node.Stat().StaticPruned(); node.Stat().StaticPruned() = false; // Grab information from the parent, if we can. if (node.Parent() != NULL && node.Parent()->Stat().Pruned() == centroids.n_cols && node.Parent()->Stat().Owner() < centroids.n_cols) { // When taking bounds from the parent, note that the parent has already // adjusted the bounds according to the cluster movements, so we need to // de-adjust them since we'll adjust them again. Maybe there is a smarter // way to do this... node.Stat().UpperBound() = parentUpperBound; node.Stat().LowerBound() = parentLowerBound; node.Stat().Pruned() = node.Parent()->Stat().Pruned(); node.Stat().Owner() = node.Parent()->Stat().Owner(); } const double unadjustedUpperBound = node.Stat().UpperBound(); double adjustedUpperBound = adjustedParentUpperBound; const double unadjustedLowerBound = node.Stat().LowerBound(); double adjustedLowerBound = adjustedParentLowerBound; // Exhaustive lower bound check. Sigh. /* if (!prunedLastIteration) { for (size_t i = 0; i < node.NumDescendants(); ++i) { double closest = DBL_MAX; double secondClosest = DBL_MAX; arma::vec distances(centroids.n_cols); for (size_t j = 0; j < centroids.n_cols; ++j) { const double dist = metric.Evaluate(dataset.col(node.Descendant(i)), lastIterationCentroids.col(j)); distances(j) = dist; if (dist < closest) { secondClosest = closest; closest = dist; } else if (dist < secondClosest) secondClosest = dist; } if (closest - 1e-10 > node.Stat().UpperBound()) { Log::Warn << distances.t(); Log::Fatal << "Point " << node.Descendant(i) << " in " << node.Point(0) << "c" << node.NumDescendants() << " invalidates upper bound " << node.Stat().UpperBound() << " with closest cluster distance " << closest << ".\n"; } if (node.NumChildren() == 0) { if (secondClosest + 1e-10 < std::min(lowerBounds[node.Descendant(i)], node.Stat().LowerBound())) { Log::Warn << distances.t(); Log::Warn << node; Log::Fatal << "Point " << node.Descendant(i) << " in " << node.Point(0) << "c" << node.NumDescendants() << " invalidates lower bound " << std::min(lowerBounds[node.Descendant(i)], node.Stat().LowerBound()) << " (" << lowerBounds[node.Descendant(i)] << ", " << node.Stat().LowerBound() << ") with " << "second closest cluster distance " << secondClosest << ". cd " << closest << "; pruned " << prunedPoints[node.Descendant(i)] << " visited " << visited[node.Descendant(i)] << ".\n"; } } } } */ if ((node.Stat().Pruned() == centroids.n_cols) && (node.Stat().Owner() < centroids.n_cols)) { // Adjust bounds. node.Stat().UpperBound() += clusterDistances[node.Stat().Owner()]; node.Stat().LowerBound() -= clusterDistances[centroids.n_cols]; if (adjustedParentUpperBound < node.Stat().UpperBound()) node.Stat().UpperBound() = adjustedParentUpperBound; if (adjustedParentLowerBound > node.Stat().LowerBound()) node.Stat().LowerBound() = adjustedParentLowerBound; // Try to use the inter-cluster distances to produce a better lower bound, // if possible. const double interclusterBound = interclusterDistances[node.Stat().Owner()] / 2.0; if (interclusterBound > node.Stat().LowerBound()) { node.Stat().LowerBound() = interclusterBound; adjustedLowerBound = node.Stat().LowerBound(); } if (node.Stat().UpperBound() < node.Stat().LowerBound()) { node.Stat().StaticPruned() = true; } else { // Tighten bound. node.Stat().UpperBound() = std::min(node.Stat().UpperBound(), node.MaxDistance(centroids.col(node.Stat().Owner()))); adjustedUpperBound = node.Stat().UpperBound(); ++distanceCalculations; if (node.Stat().UpperBound() < node.Stat().LowerBound()) node.Stat().StaticPruned() = true; } } else { node.Stat().LowerBound() -= clusterDistances[centroids.n_cols]; } // Recurse into children, and if all the children (and all the points) are // pruned, then we can mark this as statically pruned. bool allChildrenPruned = true; for (size_t i = 0; i < node.NumChildren(); ++i) { UpdateTree(node.Child(i), centroids, unadjustedUpperBound, adjustedUpperBound, unadjustedLowerBound, adjustedLowerBound); if (!node.Child(i).Stat().StaticPruned()) allChildrenPruned = false; } bool allPointsPruned = true; if (tree::TreeTraits::HasSelfChildren && node.NumChildren() > 0) { // If this tree type has self-children, then we have already adjusted the // point bounds at a lower level, and we can determine if all of our points // are pruned simply by seeing if all of the children's points are pruned. // This particular line below additionally assumes that each node's points // are all contained in its first child. This is valid for the cover tree, // but maybe not others. allPointsPruned = node.Child(0).Stat().StaticPruned(); } else if (!node.Stat().StaticPruned()) { // Try to prune individual points. for (size_t i = 0; i < node.NumPoints(); ++i) { const size_t index = node.Point(i); if (!visited[index] && !prunedPoints[index]) { upperBounds[index] = DBL_MAX; // Reset the bounds. lowerBounds[index] = DBL_MAX; allPointsPruned = false; continue; // We didn't visit it and we don't have valid bounds -- so we // can't prune it. } if (prunedLastIteration) { // It was pruned last iteration but not this iteration. // Set the bounds correctly. upperBounds[index] += node.Stat().StaticUpperBoundMovement(); lowerBounds[index] -= node.Stat().StaticLowerBoundMovement(); } prunedPoints[index] = false; const size_t owner = assignments[index]; const double lowerBound = std::min(lowerBounds[index] - clusterDistances[centroids.n_cols], node.Stat().LowerBound()); const double pruningLowerBound = std::max(lowerBound, interclusterDistances[owner] / 2.0); if (upperBounds[index] + clusterDistances[owner] < pruningLowerBound) { prunedPoints[index] = true; upperBounds[index] += clusterDistances[owner]; lowerBounds[index] = pruningLowerBound; } else { // Attempt to tighten the bound. upperBounds[index] = metric.Evaluate(dataset.col(index), centroids.col(owner)); ++distanceCalculations; if (upperBounds[index] < pruningLowerBound) { prunedPoints[index] = true; lowerBounds[index] = pruningLowerBound; } else { // Point cannot be pruned. We may have to inspect the point at a // lower level, though. If that's the case, then we shouldn't // invalidate the bounds we've got -- it will happen at the lower // level. if (!tree::TreeTraits::HasSelfChildren || node.NumChildren() == 0) { upperBounds[index] = DBL_MAX; lowerBounds[index] = DBL_MAX; } allPointsPruned = false; } } } } /* if (node.Stat().StaticPruned() && !allChildrenPruned) { Log::Warn << node; for (size_t i = 0; i < node.NumChildren(); ++i) Log::Warn << "child " << i << ":\n" << node.Child(i); Log::Fatal << "Node is statically pruned but not all its children are!\n"; } */ // If all of the children and points are pruned, we may mark this node as // pruned. if (allChildrenPruned && allPointsPruned && !node.Stat().StaticPruned()) { node.Stat().StaticPruned() = true; node.Stat().Owner() = centroids.n_cols; // Invalid owner. node.Stat().Pruned() = size_t(-1); } if (!node.Stat().StaticPruned()) { node.Stat().UpperBound() = DBL_MAX; node.Stat().LowerBound() = DBL_MAX; node.Stat().Pruned() = size_t(-1); node.Stat().Owner() = centroids.n_cols; node.Stat().StaticPruned() = false; } else // The node is now pruned. { if (prunedLastIteration) { // Track total movement while pruned. node.Stat().StaticUpperBoundMovement() += clusterDistances[node.Stat().Owner()]; node.Stat().StaticLowerBoundMovement() += clusterDistances[centroids.n_cols]; } else { node.Stat().StaticUpperBoundMovement() = clusterDistances[node.Stat().Owner()]; node.Stat().StaticLowerBoundMovement() = clusterDistances[centroids.n_cols]; } } } template class TreeType> void DualTreeKMeans::ExtractCentroids( Tree& node, arma::mat& newCentroids, arma::Col& newCounts, const arma::mat& centroids) { // Does this node own points? if ((node.Stat().Pruned() == newCentroids.n_cols) || (node.Stat().StaticPruned() && node.Stat().Owner() < newCentroids.n_cols)) { const size_t owner = node.Stat().Owner(); newCentroids.col(owner) += node.Stat().Centroid() * node.NumDescendants(); newCounts[owner] += node.NumDescendants(); // Perform the sanity check here. /* for (size_t i = 0; i < node.NumDescendants(); ++i) { const size_t index = node.Descendant(i); arma::vec trueDistances(centroids.n_cols); for (size_t j = 0; j < centroids.n_cols; ++j) { const double dist = metric.Evaluate(dataset.col(index), centroids.col(j)); trueDistances[j] = dist; } arma::uword minIndex; const double minDist = trueDistances.min(minIndex); if (size_t(minIndex) != owner) { Log::Warn << node; Log::Warn << trueDistances.t(); Log::Fatal << "Point " << index << " of node " << node.Point(0) << "c" << node.NumDescendants() << " has true minimum cluster " << minIndex << " with " << "distance " << minDist << " but node is pruned with upper bound " << node.Stat().UpperBound() << " and owner " << node.Stat().Owner() << ".\n"; } } */ } else { // Check each point held in the node. // Only check at leaves. if (node.NumChildren() == 0) { for (size_t i = 0; i < node.NumPoints(); ++i) { const size_t owner = assignments[node.Point(i)]; newCentroids.col(owner) += dataset.col(node.Point(i)); ++newCounts[owner]; /* const size_t index = node.Point(i); arma::vec trueDistances(centroids.n_cols); for (size_t j = 0; j < centroids.n_cols; ++j) { const double dist = metric.Evaluate(dataset.col(index), centroids.col(j)); trueDistances[j] = dist; } arma::uword minIndex; const double minDist = trueDistances.min(minIndex); if (size_t(minIndex) != owner) { Log::Warn << node; Log::Warn << trueDistances.t(); Log::Fatal << "Point " << index << " of node " << node.Point(0) << "c" << node.NumDescendants() << " has true minimum cluster " << minIndex << " with " << "distance " << minDist << " but was assigned to cluster " << assignments[node.Point(i)] << " with ub " << upperBounds[node.Point(i)] << " and lb " << lowerBounds[node.Point(i)] << "; pp " << (prunedPoints[node.Point(i)] ? "true" : "false") << ", visited " << (visited[node.Point(i)] ? "true" : "false") << ".\n"; } */ } } // The node is not entirely owned by a cluster. Recurse. for (size_t i = 0; i < node.NumChildren(); ++i) ExtractCentroids(node.Child(i), newCentroids, newCounts, centroids); } } template class TreeType> void DualTreeKMeans::CoalesceTree( Tree& node, const size_t child /* Which child are we? */) { // If all children except one are pruned, we can hide this node. if (node.NumChildren() == 0) return; // We can't do anything. // If this is the root node, we can't coalesce. if (node.Parent() != NULL) { // First, we should coalesce those nodes that aren't statically pruned. for (size_t i = node.NumChildren() - 1; i > 0; --i) { if (node.Child(i).Stat().StaticPruned()) HideChild(node, i); else CoalesceTree(node.Child(i), i); } if (node.Child(0).Stat().StaticPruned()) HideChild(node, 0); else CoalesceTree(node.Child(0), 0); // If we've pruned all but one child, then notPrunedIndex will contain the // index of that child, and we can coalesce this node entirely. Note that // the case where all children are statically pruned should not happen, // because then this node should itself be statically pruned. if (node.NumChildren() == 1) { node.Child(0).Parent() = node.Parent(); node.Parent()->ChildPtr(child) = node.ChildPtr(0); } } else { // We can't coalesce the root, so call the children individually and // coalesce them. for (size_t i = 0; i < node.NumChildren(); ++i) CoalesceTree(node.Child(i), i); } } template class TreeType> void DualTreeKMeans::DecoalesceTree(Tree& node) { node.Parent() = (Tree*) node.Stat().TrueParent(); RestoreChildren(node); for (size_t i = 0; i < node.NumChildren(); ++i) DecoalesceTree(node.Child(i)); } //! Utility function for hiding children in a non-binary tree. template void HideChild(TreeType& node, const size_t child, const typename boost::disable_if_c< tree::TreeTraits::BinaryTree>::type*) { // We're going to assume we have a Children() function open to us. If we // don't, then this won't work, I guess... node.Children().erase(node.Children().begin() + child); } //! Utility function for hiding children in a binary tree. template void HideChild(TreeType& node, const size_t child, const typename boost::enable_if_c< tree::TreeTraits::BinaryTree>::type*) { // If we're hiding the left child, then take the right child as the new left // child. if (child == 0) { node.ChildPtr(0) = node.ChildPtr(1); node.ChildPtr(1) = NULL; } else { node.ChildPtr(1) = NULL; } } //! Utility function for restoring children in a non-binary tree. template void RestoreChildren(TreeType& node, const typename boost::disable_if_c< tree::TreeTraits::BinaryTree>::type*) { node.Children().clear(); node.Children().resize(node.Stat().NumTrueChildren()); for (size_t i = 0; i < node.Stat().NumTrueChildren(); ++i) node.Children()[i] = (TreeType*) node.Stat().TrueChild(i); } //! Utility function for restoring children in a binary tree. template void RestoreChildren(TreeType& node, const typename boost::enable_if_c< tree::TreeTraits::BinaryTree>::type*) { if (node.Stat().NumTrueChildren() > 0) { node.ChildPtr(0) = (TreeType*) node.Stat().TrueChild(0); node.ChildPtr(1) = (TreeType*) node.Stat().TrueChild(1); } } } // namespace kmeans } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/methods/kmeans/dual_tree_kmeans_rules.hpp000066400000000000000000000051511315013601400251670ustar00rootroot00000000000000/** * @file dtnn_rules.hpp * @author Ryan Curtin * * A set of rules for the dual-tree k-means algorithm which uses dual-tree * nearest neighbor search. For the most part we'll call out to * NeighborSearchRules when we can. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_KMEANS_DUAL_TREE_KMEANS_RULES_HPP #define MLPACK_METHODS_KMEANS_DUAL_TREE_KMEANS_RULES_HPP #include namespace mlpack { namespace kmeans { template class DualTreeKMeansRules { public: DualTreeKMeansRules(const arma::mat& centroids, const arma::mat& dataset, arma::Row& assignments, arma::vec& upperBounds, arma::vec& lowerBounds, MetricType& metric, const std::vector& prunedPoints, const std::vector& oldFromNewCentroids, std::vector& visited); double BaseCase(const size_t queryIndex, const size_t referenceIndex); double Score(const size_t queryIndex, TreeType& referenceNode); double Score(TreeType& queryNode, TreeType& referenceNode); double Rescore(const size_t queryIndex, TreeType& referenceNode, const double oldScore); double Rescore(TreeType& queryNode, TreeType& referenceNode, const double oldScore); typedef typename tree::TraversalInfo TraversalInfoType; TraversalInfoType& TraversalInfo() { return traversalInfo; } const TraversalInfoType& TraversalInfo() const { return traversalInfo; } size_t BaseCases() const { return baseCases; } size_t& BaseCases() { return baseCases; } size_t Scores() const { return scores; } size_t& Scores() { return scores; } private: const arma::mat& centroids; const arma::mat& dataset; arma::Row& assignments; arma::vec& upperBounds; arma::vec& lowerBounds; MetricType& metric; const std::vector& prunedPoints; const std::vector& oldFromNewCentroids; std::vector& visited; size_t baseCases; size_t scores; TraversalInfoType traversalInfo; size_t lastQueryIndex; size_t lastReferenceIndex; size_t lastBaseCase; }; } // namespace kmeans } // namespace mlpack #include "dual_tree_kmeans_rules_impl.hpp" #endif mlpack-2.2.5/src/mlpack/methods/kmeans/dual_tree_kmeans_rules_impl.hpp000066400000000000000000000274241315013601400262170ustar00rootroot00000000000000/** * @file dtnn_rules_impl.hpp * @author Ryan Curtin * * Implementation of DualTreeKMeansRules. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_KMEANS_DUAL_TREE_KMEANS_RULES_IMPL_HPP #define MLPACK_METHODS_KMEANS_DUAL_TREE_KMEANS_RULES_IMPL_HPP #include "dual_tree_kmeans_rules.hpp" namespace mlpack { namespace kmeans { template DualTreeKMeansRules::DualTreeKMeansRules( const arma::mat& centroids, const arma::mat& dataset, arma::Row& assignments, arma::vec& upperBounds, arma::vec& lowerBounds, MetricType& metric, const std::vector& prunedPoints, const std::vector& oldFromNewCentroids, std::vector& visited) : centroids(centroids), dataset(dataset), assignments(assignments), upperBounds(upperBounds), lowerBounds(lowerBounds), metric(metric), prunedPoints(prunedPoints), oldFromNewCentroids(oldFromNewCentroids), visited(visited), baseCases(0), scores(0), lastQueryIndex(dataset.n_cols), lastReferenceIndex(centroids.n_cols) { // We must set the traversal info last query and reference node pointers to // something that is both invalid (i.e. not a tree node) and not NULL. We'll // use the this pointer. traversalInfo.LastQueryNode() = (TreeType*) this; traversalInfo.LastReferenceNode() = (TreeType*) this; } template inline force_inline double DualTreeKMeansRules::BaseCase( const size_t queryIndex, const size_t referenceIndex) { if (prunedPoints[queryIndex]) return 0.0; // Returning 0 shouldn't be a problem. // If we have already performed this base case, then do not perform it again. if ((lastQueryIndex == queryIndex) && (lastReferenceIndex == referenceIndex)) return lastBaseCase; // Any base cases imply that we will get a result. visited[queryIndex] = true; // Calculate the distance. ++baseCases; const double distance = metric.Evaluate(dataset.col(queryIndex), centroids.col(referenceIndex)); if (distance < upperBounds[queryIndex]) { lowerBounds[queryIndex] = upperBounds[queryIndex]; upperBounds[queryIndex] = distance; assignments[queryIndex] = (tree::TreeTraits::RearrangesDataset) ? oldFromNewCentroids[referenceIndex] : referenceIndex; } else if (distance < lowerBounds[queryIndex]) { lowerBounds[queryIndex] = distance; } // Cache this information for the next time BaseCase() is called. lastQueryIndex = queryIndex; lastReferenceIndex = referenceIndex; lastBaseCase = distance; return distance; } template inline double DualTreeKMeansRules::Score( const size_t queryIndex, TreeType& /* referenceNode */) { // If the query point has already been pruned, then don't recurse further. if (prunedPoints[queryIndex]) return DBL_MAX; // No pruning at this level; we're not likely to encounter a single query // point with a reference node.. return 0; } template inline double DualTreeKMeansRules::Score( TreeType& queryNode, TreeType& referenceNode) { if (queryNode.Stat().StaticPruned() == true) return DBL_MAX; // Pruned() for the root node must never be set to size_t(-1). if (queryNode.Stat().Pruned() == size_t(-1)) { queryNode.Stat().Pruned() = queryNode.Parent()->Stat().Pruned(); queryNode.Stat().LowerBound() = queryNode.Parent()->Stat().LowerBound(); queryNode.Stat().Owner() = queryNode.Parent()->Stat().Owner(); } if (queryNode.Stat().Pruned() == centroids.n_cols) return DBL_MAX; // This looks a lot like the hackery used in NeighborSearchRules to avoid // distance computations. We'll use the traversal info to see if a // parent-child or parent-parent prune is possible. const double queryParentDist = queryNode.ParentDistance(); const double queryDescDist = queryNode.FurthestDescendantDistance(); const double refParentDist = referenceNode.ParentDistance(); const double refDescDist = referenceNode.FurthestDescendantDistance(); const double lastScore = traversalInfo.LastScore(); double adjustedScore; double score = 0.0; // We want to set adjustedScore to be the distance between the centroid of the // last query node and last reference node. We will do this by adjusting the // last score. In some cases, we can just use the last base case. if (tree::TreeTraits::FirstPointIsCentroid) { adjustedScore = traversalInfo.LastBaseCase(); } else if (lastScore == 0.0) // Nothing we can do here. { adjustedScore = 0.0; } else { // The last score is equal to the distance between the centroids minus the // radii of the query and reference bounds along the axis of the line // between the two centroids. In the best case, these radii are the // furthest descendant distances, but that is not always true. It would // take too long to calculate the exact radii, so we are forced to use // MinimumBoundDistance() as a lower-bound approximation. const double lastQueryDescDist = traversalInfo.LastQueryNode()->MinimumBoundDistance(); const double lastRefDescDist = traversalInfo.LastReferenceNode()->MinimumBoundDistance(); adjustedScore = lastScore + lastQueryDescDist; adjustedScore = lastScore + lastRefDescDist; } // Assemble an adjusted score. For nearest neighbor search, this adjusted // score is a lower bound on MinDistance(queryNode, referenceNode) that is // assembled without actually calculating MinDistance(). For furthest // neighbor search, it is an upper bound on // MaxDistance(queryNode, referenceNode). If the traversalInfo isn't usable // then the node should not be pruned by this. if (traversalInfo.LastQueryNode() == queryNode.Parent()) { const double queryAdjust = queryParentDist + queryDescDist; adjustedScore -= queryAdjust; } else if (traversalInfo.LastQueryNode() == &queryNode) { adjustedScore -= queryDescDist; } else { // The last query node wasn't this query node or its parent. So we force // the adjustedScore to be such that this combination can't be pruned here, // because we don't really know anything about it. // It would be possible to modify this section to try and make a prune based // on the query descendant distance and the distance between the query node // and last traversal query node, but this case doesn't actually happen for // kd-trees or cover trees. adjustedScore = 0.0; } if (traversalInfo.LastReferenceNode() == referenceNode.Parent()) { const double refAdjust = refParentDist + refDescDist; adjustedScore -= refAdjust; } else if (traversalInfo.LastReferenceNode() == &referenceNode) { adjustedScore -= refDescDist; } else { // The last reference node wasn't this reference node or its parent. So we // force the adjustedScore to be such that this combination can't be pruned // here, because we don't really know anything about it. // It would be possible to modify this section to try and make a prune based // on the reference descendant distance and the distance between the // reference node and last traversal reference node, but this case doesn't // actually happen for kd-trees or cover trees. adjustedScore = 0.0; } // Now, check if we can prune. if (adjustedScore > queryNode.Stat().UpperBound()) { if (!(tree::TreeTraits::FirstPointIsCentroid && score == 0.0)) { // There isn't any need to set the traversal information because no // descendant combinations will be visited, and those are the only // combinations that would depend on the traversal information. if (adjustedScore < queryNode.Stat().LowerBound()) { // If this might affect the lower bound, make it more exact. queryNode.Stat().LowerBound() = std::min(queryNode.Stat().LowerBound(), queryNode.MinDistance(referenceNode)); ++scores; } queryNode.Stat().Pruned() += referenceNode.NumDescendants(); score = DBL_MAX; } } if (score != DBL_MAX) { // Get minimum and maximum distances. const math::Range distances = queryNode.RangeDistance(referenceNode); score = distances.Lo(); ++scores; if (distances.Lo() > queryNode.Stat().UpperBound()) { // The reference node can own no points in this query node. We may // improve the lower bound on pruned nodes, though. if (distances.Lo() < queryNode.Stat().LowerBound()) queryNode.Stat().LowerBound() = distances.Lo(); // This assumes that reference clusters don't appear elsewhere in the // tree. queryNode.Stat().Pruned() += referenceNode.NumDescendants(); score = DBL_MAX; } else if (distances.Hi() < queryNode.Stat().UpperBound()) { // Tighten upper bound. const double tighterBound = queryNode.MaxDistance(centroids.col(referenceNode.Descendant(0))); ++scores; // Count extra distance calculation. if (tighterBound <= queryNode.Stat().UpperBound()) { // We can improve the best estimate. queryNode.Stat().UpperBound() = tighterBound; // Remember that our upper bound does correspond to a cluster centroid, // so it does correspond to a cluster. We'll mark the cluster as the // owner, but note that the node is not truly owned unless // Stat().Pruned() is centroids.n_cols. queryNode.Stat().Owner() = (tree::TreeTraits::RearrangesDataset) ? oldFromNewCentroids[referenceNode.Descendant(0)] : referenceNode.Descendant(0); } } } // Is everything pruned? if (queryNode.Stat().Pruned() == centroids.n_cols - 1) { queryNode.Stat().Pruned() = centroids.n_cols; // Owner() is already set. return DBL_MAX; } // Set traversal information. traversalInfo.LastQueryNode() = &queryNode; traversalInfo.LastReferenceNode() = &referenceNode; traversalInfo.LastScore() = score; return score; } template inline double DualTreeKMeansRules::Rescore( const size_t /* queryIndex */, TreeType& /* referenceNode */, const double oldScore) { // No rescoring (for now). return oldScore; } template inline double DualTreeKMeansRules::Rescore( TreeType& queryNode, TreeType& referenceNode, const double oldScore) { if (oldScore == DBL_MAX) return DBL_MAX; // It's already pruned. // oldScore contains the minimum distance between queryNode and referenceNode. // In the time since Score() has been called, the upper bound *may* have // tightened. If it has tightened enough, we may prune this node now. if (oldScore > queryNode.Stat().UpperBound()) { // We may still be able to improve the lower bound on pruned nodes. if (oldScore < queryNode.Stat().LowerBound()) queryNode.Stat().LowerBound() = oldScore; // This assumes that reference clusters don't appear elsewhere in the tree. queryNode.Stat().Pruned() += referenceNode.NumDescendants(); return DBL_MAX; } // Also, check if everything has been pruned. if (queryNode.Stat().Pruned() == centroids.n_cols - 1) { queryNode.Stat().Pruned() = centroids.n_cols; // Owner() is already set. return DBL_MAX; } return oldScore; } } // namespace kmeans } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/methods/kmeans/dual_tree_kmeans_statistic.hpp000066400000000000000000000073061315013601400260500ustar00rootroot00000000000000/** * @file dtnn_statistic.hpp * @author Ryan Curtin * * Statistic for dual-tree nearest neighbor search based k-means clustering. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_KMEANS_DTNN_STATISTIC_HPP #define MLPACK_METHODS_KMEANS_DTNN_STATISTIC_HPP #include namespace mlpack { namespace kmeans { class DualTreeKMeansStatistic : public neighbor::NeighborSearchStat { public: DualTreeKMeansStatistic() : neighbor::NeighborSearchStat(), upperBound(DBL_MAX), lowerBound(DBL_MAX), owner(size_t(-1)), pruned(size_t(-1)), staticPruned(false), staticUpperBoundMovement(0.0), staticLowerBoundMovement(0.0), centroid(), trueParent(NULL) { // Nothing to do. } template DualTreeKMeansStatistic(TreeType& node) : neighbor::NeighborSearchStat(), upperBound(DBL_MAX), lowerBound(DBL_MAX), owner(size_t(-1)), pruned(size_t(-1)), staticPruned(false), staticUpperBoundMovement(0.0), staticLowerBoundMovement(0.0), trueParent(node.Parent()) { // Empirically calculate the centroid. centroid.zeros(node.Dataset().n_rows); for (size_t i = 0; i < node.NumPoints(); ++i) { // Correct handling of cover tree: don't double-count the point which // appears in the children. if (tree::TreeTraits::HasSelfChildren && i == 0 && node.NumChildren() > 0) continue; centroid += node.Dataset().col(node.Point(i)); } for (size_t i = 0; i < node.NumChildren(); ++i) centroid += node.Child(i).NumDescendants() * node.Child(i).Stat().Centroid(); centroid /= node.NumDescendants(); // Set the true children correctly. trueChildren.resize(node.NumChildren()); for (size_t i = 0; i < node.NumChildren(); ++i) trueChildren[i] = &node.Child(i); } double UpperBound() const { return upperBound; } double& UpperBound() { return upperBound; } double LowerBound() const { return lowerBound; } double& LowerBound() { return lowerBound; } const arma::vec& Centroid() const { return centroid; } arma::vec& Centroid() { return centroid; } size_t Owner() const { return owner; } size_t& Owner() { return owner; } size_t Pruned() const { return pruned; } size_t& Pruned() { return pruned; } bool StaticPruned() const { return staticPruned; } bool& StaticPruned() { return staticPruned; } double StaticUpperBoundMovement() const { return staticUpperBoundMovement; } double& StaticUpperBoundMovement() { return staticUpperBoundMovement; } double StaticLowerBoundMovement() const { return staticLowerBoundMovement; } double& StaticLowerBoundMovement() { return staticLowerBoundMovement; } void* TrueParent() const { return trueParent; } void*& TrueParent() { return trueParent; } void* TrueChild(const size_t i) const { return trueChildren[i]; } void*& TrueChild(const size_t i) { return trueChildren[i]; } size_t NumTrueChildren() const { return trueChildren.size(); } private: double upperBound; double lowerBound; size_t owner; size_t pruned; bool staticPruned; double staticUpperBoundMovement; double staticLowerBoundMovement; arma::vec centroid; void* trueParent; std::vector trueChildren; }; } // namespace kmeans } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/methods/kmeans/elkan_kmeans.hpp000066400000000000000000000037731315013601400231130ustar00rootroot00000000000000/** * @file elkan_kmeans.hpp * @author Ryan Curtin * * An implementation of Elkan's algorithm for exact Lloyd iterations. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_KMEANS_ELKAN_KMEANS_HPP #define MLPACK_METHODS_KMEANS_ELKAN_KMEANS_HPP namespace mlpack { namespace kmeans { template class ElkanKMeans { public: /** * Construct the ElkanKMeans object, which must store several sets of bounds. */ ElkanKMeans(const MatType& dataset, MetricType& metric); /** * Run a single iteration of Elkan's algorithm, updating the given centroids * into the newCentroids matrix. * * @param centroids Current cluster centroids. * @param newCentroids New cluster centroids. * @param counts Current counts, to be overwritten with new counts. */ double Iterate(const arma::mat& centroids, arma::mat& newCentroids, arma::Col& counts); size_t DistanceCalculations() const { return distanceCalculations; } private: //! The dataset. const MatType& dataset; //! The instantiated metric. MetricType& metric; //! Holds intra-cluster distances. arma::mat clusterDistances; //! Half the distance from a cluster to its nearest cluster (s(c)). arma::vec minClusterDistances; //! Holds the index of the cluster that owns each point. arma::Col assignments; //! Upper bounds on the distance between each point and its closest cluster. arma::vec upperBounds; //! Lower bounds on the distance between each point and each cluster. arma::mat lowerBounds; //! Track distance calculations. size_t distanceCalculations; }; } // namespace kmeans } // namespace mlpack // Include implementation. #include "elkan_kmeans_impl.hpp" #endif mlpack-2.2.5/src/mlpack/methods/kmeans/elkan_kmeans_impl.hpp000066400000000000000000000150011315013601400241170ustar00rootroot00000000000000/** * @file elkan_kmeans_impl.hpp * @author Ryan Curtin * * An implementation of Elkan's algorithm for exact Lloyd iterations. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_KMEANS_ELKAN_KMEANS_IMPL_HPP #define MLPACK_METHODS_KMEANS_ELKAN_KMEANS_IMPL_HPP // In case it hasn't been included yet. #include "elkan_kmeans.hpp" namespace mlpack { namespace kmeans { template ElkanKMeans::ElkanKMeans(const MatType& dataset, MetricType& metric) : dataset(dataset), metric(metric), distanceCalculations(0) { } // Run a single iteration of Elkan's algorithm for Lloyd iterations. template double ElkanKMeans::Iterate(const arma::mat& centroids, arma::mat& newCentroids, arma::Col& counts) { // Clear new centroids. newCentroids.zeros(centroids.n_rows, centroids.n_cols); counts.zeros(centroids.n_cols); // At the beginning of the iteration, we must compute the distances between // all centers. This is O(k^2). clusterDistances.set_size(centroids.n_cols, centroids.n_cols); // Self-distances are always 0, but we set them to DBL_MAX to avoid the self // being the closest cluster centroid. clusterDistances.diag().fill(DBL_MAX); // Initially set r(x) to true. std::vector mustRecalculate(dataset.n_cols, true); // If this is the first iteration, we must reset all the bounds. if (lowerBounds.n_rows != centroids.n_cols) { lowerBounds.set_size(centroids.n_cols, dataset.n_cols); assignments.set_size(dataset.n_cols); upperBounds.set_size(dataset.n_cols); lowerBounds.fill(0); upperBounds.fill(DBL_MAX); assignments.fill(0); } // Step 1: for all centers, compute between-cluster distances. For all // centers, compute s(c) = 1/2 min d(c, c'). for (size_t i = 0; i < centroids.n_cols; ++i) { for (size_t j = i + 1; j < centroids.n_cols; ++j) { const double distance = metric.Evaluate(centroids.col(i), centroids.col(j)); distanceCalculations++; clusterDistances(i, j) = distance; clusterDistances(j, i) = distance; } } // Now find the closest cluster to each other cluster. We multiply by 0.5 so // that this is equivalent to s(c) for each cluster c. minClusterDistances = 0.5 * arma::min(clusterDistances).t(); // Now loop over all points, and see which ones need to be updated. for (size_t i = 0; i < dataset.n_cols; ++i) { // Step 2: identify all points such that u(x) <= s(c(x)). if (upperBounds(i) <= minClusterDistances(assignments[i])) { // No change needed. This point must still belong to that cluster. counts(assignments[i])++; newCentroids.col(assignments[i]) += arma::vec(dataset.col(i)); continue; } else { for (size_t c = 0; c < centroids.n_cols; ++c) { // Step 3: for all remaining points x and centers c such that c != c(x), // u(x) > l(x, c) and u(x) > 0.5 d(c(x), c)... if (assignments[i] == c) continue; // Pruned because this cluster is already the assignment. if (upperBounds(i) <= lowerBounds(c, i)) continue; // Pruned by triangle inequality on lower bound. if (upperBounds(i) <= 0.5 * clusterDistances(assignments[i], c)) continue; // Pruned by triangle inequality on cluster distances. // Step 3a: if r(x) then compute d(x, c(x)) and assign r(x) = false. // Otherwise, d(x, c(x)) = u(x). double dist; if (mustRecalculate[i]) { mustRecalculate[i] = false; dist = metric.Evaluate(dataset.col(i), centroids.col(assignments[i])); lowerBounds(assignments[i], i) = dist; upperBounds(i) = dist; distanceCalculations++; // Check if we can prune again. if (upperBounds(i) <= lowerBounds(c, i)) continue; // Pruned by triangle inequality on lower bound. if (upperBounds(i) <= 0.5 * clusterDistances(assignments[i], c)) continue; // Pruned by triangle inequality on cluster distances. } else { dist = upperBounds(i); // This is equivalent to d(x, c(x)). } // Step 3b: if d(x, c(x)) > l(x, c) or d(x, c(x)) > 0.5 d(c(x), c)... if (dist > lowerBounds(c, i) || dist > 0.5 * clusterDistances(assignments[i], c)) { // Compute d(x, c). If d(x, c) < d(x, c(x)) then assign c(x) = c. const double pointDist = metric.Evaluate(dataset.col(i), centroids.col(c)); lowerBounds(c, i) = pointDist; distanceCalculations++; if (pointDist < dist) { upperBounds(i) = pointDist; assignments[i] = c; } } } } // At this point, we know the new cluster assignment. // Step 4: for each center c, let m(c) be the mean of the points assigned to // c. newCentroids.col(assignments[i]) += arma::vec(dataset.col(i)); counts[assignments[i]]++; } // Now, normalize and calculate the distance each cluster has moved. arma::vec moveDistances(centroids.n_cols); double cNorm = 0.0; // Cluster movement for residual. for (size_t c = 0; c < centroids.n_cols; ++c) { if (counts[c] > 0) newCentroids.col(c) /= counts[c]; moveDistances(c) = metric.Evaluate(newCentroids.col(c), centroids.col(c)); cNorm += std::pow(moveDistances(c), 2.0); distanceCalculations++; } for (size_t i = 0; i < dataset.n_cols; ++i) { // Step 5: for each point x and center c, assign // l(x, c) = max { l(x, c) - d(c, m(c)), 0 }. // But it doesn't actually matter if l(x, c) is positive. for (size_t c = 0; c < centroids.n_cols; ++c) lowerBounds(c, i) -= moveDistances(c); // Step 6: for each point x, assign // u(x) = u(x) + d(m(c(x)), c(x)) // r(x) = true (we are setting that at the start of every iteration). upperBounds(i) += moveDistances(assignments[i]); } return std::sqrt(cNorm); } } // namespace kmeans } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/methods/kmeans/hamerly_kmeans.hpp000066400000000000000000000035221315013601400234520ustar00rootroot00000000000000/** * @file hamerly_kmeans.hpp * @author Ryan Curtin * * An implementation of Greg Hamerly's algorithm for k-means clustering. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_KMEANS_HAMERLY_KMEANS_HPP #define MLPACK_METHODS_KMEANS_HAMERLY_KMEANS_HPP namespace mlpack { namespace kmeans { template class HamerlyKMeans { public: /** * Construct the HamerlyKMeans object, which must store several sets of * bounds. */ HamerlyKMeans(const MatType& dataset, MetricType& metric); /** * Run a single iteration of Hamerly's algorithm, updating the given centroids * into the newCentroids matrix. * * @param centroids Current cluster centroids. * @param newCentroids New cluster centroids. * @param counts Current counts, to be overwritten with new counts. */ double Iterate(const arma::mat& centroids, arma::mat& newCentroids, arma::Col& counts); size_t DistanceCalculations() const { return distanceCalculations; } private: //! The dataset. const MatType& dataset; //! The instantiated metric. MetricType& metric; //! Minimum cluster distances from each cluster. arma::vec minClusterDistances; //! Upper bounds for each point. arma::vec upperBounds; //! Lower bounds for each point. arma::vec lowerBounds; //! Assignments for each point. arma::Col assignments; //! Track distance calculations. size_t distanceCalculations; }; } // namespace kmeans } // namespace mlpack // Include implementation. #include "hamerly_kmeans_impl.hpp" #endif mlpack-2.2.5/src/mlpack/methods/kmeans/hamerly_kmeans_impl.hpp000066400000000000000000000124531315013601400244760ustar00rootroot00000000000000/** * @file hamerly_kmeans_impl.hpp * @author Ryan Curtin * * An implementation of Greg Hamerly's algorithm for k-means clustering. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_KMEANS_HAMERLY_KMEANS_IMPL_HPP #define MLPACK_METHODS_KMEANS_HAMERLY_KMEANS_IMPL_HPP // In case it hasn't been included yet. #include "hamerly_kmeans.hpp" namespace mlpack { namespace kmeans { template HamerlyKMeans::HamerlyKMeans(const MatType& dataset, MetricType& metric) : dataset(dataset), metric(metric), distanceCalculations(0) { // Nothing to do. } template double HamerlyKMeans::Iterate(const arma::mat& centroids, arma::mat& newCentroids, arma::Col& counts) { size_t hamerlyPruned = 0; // If this is the first iteration, we need to set all the bounds. if (minClusterDistances.n_elem != centroids.n_cols) { upperBounds.set_size(dataset.n_cols); upperBounds.fill(DBL_MAX); lowerBounds.zeros(dataset.n_cols); assignments.zeros(dataset.n_cols); minClusterDistances.set_size(centroids.n_cols); } // Reset new centroids. newCentroids.zeros(centroids.n_rows, centroids.n_cols); counts.zeros(centroids.n_cols); // Calculate minimum intra-cluster distance for each cluster. minClusterDistances.fill(DBL_MAX); for (size_t i = 0; i < centroids.n_cols; ++i) { for (size_t j = i + 1; j < centroids.n_cols; ++j) { const double dist = metric.Evaluate(centroids.col(i), centroids.col(j)) / 2.0; ++distanceCalculations; // Update bounds, if this intra-cluster distance is smaller. if (dist < minClusterDistances(i)) minClusterDistances(i) = dist; if (dist < minClusterDistances(j)) minClusterDistances(j) = dist; } } for (size_t i = 0; i < dataset.n_cols; ++i) { const double m = std::max(minClusterDistances(assignments[i]), lowerBounds(i)); // First bound test. if (upperBounds(i) <= m) { ++hamerlyPruned; newCentroids.col(assignments[i]) += dataset.col(i); ++counts(assignments[i]); continue; } // Tighten upper bound. upperBounds(i) = metric.Evaluate(dataset.col(i), centroids.col(assignments[i])); ++distanceCalculations; // Second bound test. if (upperBounds(i) <= m) { newCentroids.col(assignments[i]) += dataset.col(i); ++counts(assignments[i]); continue; } // The bounds failed. So test against all other clusters. // This is Hamerly's Point-All-Ctrs() function from the paper. // We have to reset the lower bound first. lowerBounds(i) = DBL_MAX; for (size_t c = 0; c < centroids.n_cols; ++c) { if (c == assignments[i]) continue; const double dist = metric.Evaluate(dataset.col(i), centroids.col(c)); // Is this a better cluster? At this point, upperBounds[i] = d(i, c(i)). if (dist < upperBounds(i)) { // lowerBounds holds the second closest cluster. lowerBounds(i) = upperBounds(i); upperBounds(i) = dist; assignments[i] = c; } else if (dist < lowerBounds(i)) { // This is a closer second-closest cluster. lowerBounds(i) = dist; } } distanceCalculations += centroids.n_cols - 1; // Update new centroids. newCentroids.col(assignments[i]) += dataset.col(i); ++counts(assignments[i]); } // Normalize centroids and calculate cluster movement (contains parts of // Move-Centers() and Update-Bounds()). double furthestMovement = 0.0; double secondFurthestMovement = 0.0; size_t furthestMovingCluster = 0; arma::vec centroidMovements(centroids.n_cols); double centroidMovement = 0.0; for (size_t c = 0; c < centroids.n_cols; ++c) { if (counts(c) > 0) newCentroids.col(c) /= counts(c); // Calculate movement. const double movement = metric.Evaluate(centroids.col(c), newCentroids.col(c)); centroidMovements(c) = movement; centroidMovement += std::pow(movement, 2.0); ++distanceCalculations; if (movement > furthestMovement) { secondFurthestMovement = furthestMovement; furthestMovement = movement; furthestMovingCluster = c; } else if (movement > secondFurthestMovement) { secondFurthestMovement = movement; } } // Now update bounds (lines 3-8 of Update-Bounds()). for (size_t i = 0; i < dataset.n_cols; ++i) { upperBounds(i) += centroidMovements(assignments[i]); if (assignments[i] == furthestMovingCluster) lowerBounds(i) -= secondFurthestMovement; else lowerBounds(i) -= furthestMovement; } Log::Info << "Hamerly prunes: " << hamerlyPruned << ".\n"; return std::sqrt(centroidMovement); } } // namespace kmeans } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/methods/kmeans/kill_empty_clusters.hpp000066400000000000000000000044651315013601400245570ustar00rootroot00000000000000/** * @file allow_empty_clusters.hpp * @author Ryan Curtin * * This very simple policy is used when K-Means is allowed to return empty * clusters. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef __MLPACK_METHODS_KMEANS_KILL_EMPTY_CLUSTERS_HPP #define __MLPACK_METHODS_KMEANS_KILL_EMPTY_CLUSTERS_HPP #include namespace mlpack { namespace kmeans { /** * Policy which allows K-Means to "kill" empty clusters without any error being * reported. This means the centroids will be filled with DBL_MAX. */ class KillEmptyClusters { public: //! Default constructor required by EmptyClusterPolicy policy. KillEmptyClusters() { } /** * This function sets an empty cluster found during k-means to all DBL_MAX * (i.e. an invalid "dead" cluster). * * @tparam MatType Type of data (arma::mat or arma::spmat). * @param data Dataset on which clustering is being performed. * @param emptyCluster Index of cluster which is empty. * @param oldCentroids Centroids of each cluster (one per column) at the start * of the iteration. * @param newCentroids Centroids of each cluster (one per column) at the end * of the iteration. * @param clusterCounts Number of points in each cluster. * @param assignments Cluster assignments of each point. * @param iteration Number of iteration. * * @return Number of points changed (0). */ template static inline force_inline size_t EmptyCluster( const MatType& /* data */, const size_t emptyCluster, const arma::mat& /* oldCentroids */, arma::mat& newCentroids, arma::Col& /* clusterCounts */, MetricType& /* metric */, const size_t /* iteration */) { // Kill the empty cluster. newCentroids.col(emptyCluster).fill(DBL_MAX); return 0; // No points were changed. } //! Serialize the empty cluster policy (nothing to do). template void Serialize(Archive& /* ar */, const unsigned int /* version */) { } }; } // namespace kmeans } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/methods/kmeans/kmeans.hpp000066400000000000000000000200471315013601400217320ustar00rootroot00000000000000/** * @file kmeans.hpp * @author Parikshit Ram (pram@cc.gatech.edu) * * K-Means clustering. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_KMEANS_KMEANS_HPP #define MLPACK_METHODS_KMEANS_KMEANS_HPP #include #include #include "sample_initialization.hpp" #include "max_variance_new_cluster.hpp" #include "naive_kmeans.hpp" #include namespace mlpack { namespace kmeans /** K-Means clustering. */ { /** * This class implements K-Means clustering, using a variety of possible * implementations of Lloyd's algorithm. * * Four template parameters can (optionally) be supplied: the distance metric to * use, the policy for how to find the initial partition of the data, the * actions to be taken when an empty cluster is encountered, and the * implementation of a single Lloyd step to use. * * A simple example of how to run K-Means clustering is shown below. * * @code * extern arma::mat data; // Dataset we want to run K-Means on. * arma::Row assignments; // Cluster assignments. * arma::mat centroids; // Cluster centroids. * * KMeans<> k; // Default options. * k.Cluster(data, 3, assignments, centroids); // 3 clusters. * * // Cluster using the Manhattan distance, 100 iterations maximum, saving only * // the centroids. * KMeans k(100); * k.Cluster(data, 6, centroids); // 6 clusters. * @endcode * * @tparam MetricType The distance metric to use for this KMeans; see * metric::LMetric for an example. * @tparam InitialPartitionPolicy Initial partitioning policy; must implement a * default constructor and either 'void Cluster(const arma::mat&, const * size_t, arma::Row&)' or 'void Cluster(const arma::mat&, const * size_t, arma::mat&)'. * @tparam EmptyClusterPolicy Policy for what to do on an empty cluster; must * implement a default constructor and 'void EmptyCluster(const arma::mat& * data, const size_t emptyCluster, const arma::mat& oldCentroids, * arma::mat& newCentroids, arma::Col& counts, MetricType& metric, * const size_t iteration)'. * @tparam LloydStepType Implementation of single Lloyd step to use. * * @see RandomPartition, SampleInitialization, RefinedStart, AllowEmptyClusters, * MaxVarianceNewCluster, NaiveKMeans, ElkanKMeans */ template class LloydStepType = NaiveKMeans, typename MatType = arma::mat> class KMeans { public: /** * Create a K-Means object and (optionally) set the parameters which K-Means * will be run with. * * @param maxIterations Maximum number of iterations allowed before giving up * (0 is valid, but the algorithm may never terminate). * @param metric Optional MetricType object; for when the metric has state * it needs to store. * @param partitioner Optional InitialPartitionPolicy object; for when a * specially initialized partitioning policy is required. * @param emptyClusterAction Optional EmptyClusterPolicy object; for when a * specially initialized empty cluster policy is required. */ KMeans(const size_t maxIterations = 1000, const MetricType metric = MetricType(), const InitialPartitionPolicy partitioner = InitialPartitionPolicy(), const EmptyClusterPolicy emptyClusterAction = EmptyClusterPolicy()); /** * Perform k-means clustering on the data, returning a list of cluster * assignments. Optionally, the vector of assignments can be set to an * initial guess of the cluster assignments; to do this, set initialGuess to * true. * * @tparam MatType Type of matrix (arma::mat or arma::sp_mat). * @param data Dataset to cluster. * @param clusters Number of clusters to compute. * @param assignments Vector to store cluster assignments in. * @param initialGuess If true, then it is assumed that assignments has a list * of initial cluster assignments. */ void Cluster(const MatType& data, const size_t clusters, arma::Row& assignments, const bool initialGuess = false); /** * Perform k-means clustering on the data, returning the centroids of each * cluster in the centroids matrix. Optionally, the initial centroids can be * specified by filling the centroids matrix with the initial centroids and * specifying initialGuess = true. * * @tparam MatType Type of matrix (arma::mat or arma::sp_mat). * @param data Dataset to cluster. * @param clusters Number of clusters to compute. * @param centroids Matrix in which centroids are stored. * @param initialGuess If true, then it is assumed that centroids contains the * initial cluster centroids. */ void Cluster(const MatType& data, const size_t clusters, arma::mat& centroids, const bool initialGuess = false); /** * Perform k-means clustering on the data, returning a list of cluster * assignments and also the centroids of each cluster. Optionally, the vector * of assignments can be set to an initial guess of the cluster assignments; * to do this, set initialAssignmentGuess to true. Another way to set initial * cluster guesses is to fill the centroids matrix with the centroid guesses, * and then set initialCentroidGuess to true. initialAssignmentGuess * supersedes initialCentroidGuess, so if both are set to true, the * assignments vector is used. * * @tparam MatType Type of matrix (arma::mat or arma::sp_mat). * @param data Dataset to cluster. * @param clusters Number of clusters to compute. * @param assignments Vector to store cluster assignments in. * @param centroids Matrix in which centroids are stored. * @param initialAssignmentGuess If true, then it is assumed that assignments * has a list of initial cluster assignments. * @param initialCentroidGuess If true, then it is assumed that centroids * contains the initial centroids of each cluster. */ void Cluster(const MatType& data, const size_t clusters, arma::Row& assignments, arma::mat& centroids, const bool initialAssignmentGuess = false, const bool initialCentroidGuess = false); //! Get the maximum number of iterations. size_t MaxIterations() const { return maxIterations; } //! Set the maximum number of iterations. size_t& MaxIterations() { return maxIterations; } //! Get the distance metric. const MetricType& Metric() const { return metric; } //! Modify the distance metric. MetricType& Metric() { return metric; } //! Get the initial partitioning policy. const InitialPartitionPolicy& Partitioner() const { return partitioner; } //! Modify the initial partitioning policy. InitialPartitionPolicy& Partitioner() { return partitioner; } //! Get the empty cluster policy. const EmptyClusterPolicy& EmptyClusterAction() const { return emptyClusterAction; } //! Modify the empty cluster policy. EmptyClusterPolicy& EmptyClusterAction() { return emptyClusterAction; } //! Serialize the k-means object. template void Serialize(Archive& ar, const unsigned int version); private: //! Maximum number of iterations before giving up. size_t maxIterations; //! Instantiated distance metric. MetricType metric; //! Instantiated initial partitioning policy. InitialPartitionPolicy partitioner; //! Instantiated empty cluster policy. EmptyClusterPolicy emptyClusterAction; }; } // namespace kmeans } // namespace mlpack // Include implementation. #include "kmeans_impl.hpp" #endif // MLPACK_METHODS_KMEANS_KMEANS_HPP mlpack-2.2.5/src/mlpack/methods/kmeans/kmeans_impl.hpp000066400000000000000000000263351315013601400227610ustar00rootroot00000000000000/** * @file kmeans_impl.hpp * @author Parikshit Ram (pram@cc.gatech.edu) * @author Ryan Curtin * * Implementation for the K-means method for getting an initial point. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include "kmeans.hpp" #include #include namespace mlpack { namespace kmeans { /** * This gives us a GivesCentroids object that we can use to tell whether or not * an InitialPartitionPolicy returns centroids or point assignments. */ HAS_MEM_FUNC(Cluster, GivesCentroidsCheck); /** * 'value' is true if the InitialPartitionPolicy class has a member * Cluster(const arma::mat& data, const size_t clusters, arma::mat& centroids). */ template struct GivesCentroids { static const bool value = // Non-static version. GivesCentroidsCheck::value || // Static version. GivesCentroidsCheck::value; }; //! Call the initial partition policy, if it returns assignments. This returns //! 'true' to indicate that assignments were given. template bool GetInitialAssignmentsOrCentroids( InitialPartitionPolicy& ipp, const MatType& data, const size_t clusters, arma::Row& assignments, arma::mat& /* centroids */, const typename boost::disable_if_c< GivesCentroids::value == true>::type* = 0) { ipp.Cluster(data, clusters, assignments); return true; } //! Call the initial partition policy, if it returns centroids. This returns //! 'false' to indicate that assignments were not given. template bool GetInitialAssignmentsOrCentroids( InitialPartitionPolicy& ipp, const MatType& data, const size_t clusters, arma::Row& /* assignments */, arma::mat& centroids, const typename boost::enable_if_c< GivesCentroids::value == true>::type* = 0) { ipp.Cluster(data, clusters, centroids); return false; } /** * Construct the K-Means object. */ template class LloydStepType, typename MatType> KMeans< MetricType, InitialPartitionPolicy, EmptyClusterPolicy, LloydStepType, MatType>:: KMeans(const size_t maxIterations, const MetricType metric, const InitialPartitionPolicy partitioner, const EmptyClusterPolicy emptyClusterAction) : maxIterations(maxIterations), metric(metric), partitioner(partitioner), emptyClusterAction(emptyClusterAction) { // Nothing to do. } /** * Perform k-means clustering on the data, returning a list of cluster * assignments. This just forward to the other function, which returns the * centroids too. If this is properly inlined, there shouldn't be any * performance penalty whatsoever. */ template class LloydStepType, typename MatType> inline void KMeans< MetricType, InitialPartitionPolicy, EmptyClusterPolicy, LloydStepType, MatType>:: Cluster(const MatType& data, const size_t clusters, arma::Row& assignments, const bool initialGuess) { arma::mat centroids(data.n_rows, clusters); Cluster(data, clusters, assignments, centroids, initialGuess); } /** * Perform k-means clustering on the data, returning a list of cluster * assignments and the centroids of each cluster. */ template class LloydStepType, typename MatType> void KMeans< MetricType, InitialPartitionPolicy, EmptyClusterPolicy, LloydStepType, MatType>:: Cluster(const MatType& data, const size_t clusters, arma::mat& centroids, const bool initialGuess) { // Make sure we have more points than clusters. if (clusters > data.n_cols) Log::Warn << "KMeans::Cluster(): more clusters requested than points given." << std::endl; else if (clusters == 0) Log::Warn << "KMeans::Cluster(): zero clusters requested. This probably " << "isn't going to work. Brace for crash." << std::endl; // Check validity of initial guess. if (initialGuess) { if (centroids.n_cols != clusters) Log::Fatal << "KMeans::Cluster(): wrong number of initial cluster " << "centroids (" << centroids.n_cols << ", should be " << clusters << ")!" << std::endl; if (centroids.n_rows != data.n_rows) Log::Fatal << "KMeans::Cluster(): initial cluster centroids have wrong " << " dimensionality (" << centroids.n_rows << ", should be " << data.n_rows << ")!" << std::endl; } // Use the partitioner to come up with the partition assignments and calculate // the initial centroids. if (!initialGuess) { // The GetInitialAssignmentsOrCentroids() function will call the appropriate // function in the InitialPartitionPolicy to return either assignments or // centroids. We prefer centroids, but if assignments are returned, then we // have to calculate the initial centroids for the first iteration. arma::Row assignments; bool gotAssignments = GetInitialAssignmentsOrCentroids(partitioner, data, clusters, assignments, centroids); if (gotAssignments) { // The partitioner gives assignments, so we need to calculate centroids // from those assignments. arma::Row counts; counts.zeros(clusters); centroids.zeros(data.n_rows, clusters); for (size_t i = 0; i < data.n_cols; ++i) { centroids.col(assignments[i]) += arma::vec(data.col(i)); counts[assignments[i]]++; } for (size_t i = 0; i < clusters; ++i) if (counts[i] != 0) centroids.col(i) /= counts[i]; } } // Counts of points in each cluster. arma::Col counts(clusters); size_t iteration = 0; LloydStepType lloydStep(data, metric); arma::mat centroidsOther; double cNorm; do { // We have two centroid matrices. We don't want to copy anything, so, // depending on the iteration number, we use a different centroid matrix... if (iteration % 2 == 0) cNorm = lloydStep.Iterate(centroids, centroidsOther, counts); else cNorm = lloydStep.Iterate(centroidsOther, centroids, counts); // If we are not allowing empty clusters, then check that all of our // clusters have points. for (size_t i = 0; i < clusters; i++) { if (counts[i] == 0) { Log::Info << "Cluster " << i << " is empty.\n"; if (iteration % 2 == 0) emptyClusterAction.EmptyCluster(data, i, centroids, centroidsOther, counts, metric, iteration); else emptyClusterAction.EmptyCluster(data, i, centroidsOther, centroids, counts, metric, iteration); } } iteration++; Log::Info << "KMeans::Cluster(): iteration " << iteration << ", residual " << cNorm << ".\n"; if (std::isnan(cNorm) || std::isinf(cNorm)) cNorm = 1e-4; // Keep iterating. } while (cNorm > 1e-5 && iteration != maxIterations); // If we ended on an even iteration, then the centroids are in the // centroidsOther matrix, and we need to steal its memory (steal_mem() avoids // a copy if possible). if ((iteration - 1) % 2 == 0) centroids.steal_mem(centroidsOther); if (iteration != maxIterations) { Log::Info << "KMeans::Cluster(): converged after " << iteration << " iterations." << std::endl; } else { Log::Info << "KMeans::Cluster(): terminated after limit of " << iteration << " iterations." << std::endl; } Log::Info << lloydStep.DistanceCalculations() << " distance calculations." << std::endl; } /** * Perform k-means clustering on the data, returning a list of cluster * assignments and the centroids of each cluster. */ template class LloydStepType, typename MatType> void KMeans< MetricType, InitialPartitionPolicy, EmptyClusterPolicy, LloydStepType, MatType>:: Cluster(const MatType& data, const size_t clusters, arma::Row& assignments, arma::mat& centroids, const bool initialAssignmentGuess, const bool initialCentroidGuess) { // Now, the initial assignments. First determine if they are necessary. if (initialAssignmentGuess) { if (assignments.n_elem != data.n_cols) Log::Fatal << "KMeans::Cluster(): initial cluster assignments (length " << assignments.n_elem << ") not the same size as the dataset (size " << data.n_cols << ")!" << std::endl; // Calculate initial centroids. arma::Row counts; counts.zeros(clusters); centroids.zeros(data.n_rows, clusters); for (size_t i = 0; i < data.n_cols; ++i) { centroids.col(assignments[i]) += arma::vec(data.col(i)); counts[assignments[i]]++; } for (size_t i = 0; i < clusters; ++i) if (counts[i] != 0) centroids.col(i) /= counts[i]; } Cluster(data, clusters, centroids, initialAssignmentGuess || initialCentroidGuess); // Calculate final assignments. assignments.set_size(data.n_cols); for (size_t i = 0; i < data.n_cols; ++i) { // Find the closest centroid to this point. double minDistance = std::numeric_limits::infinity(); size_t closestCluster = centroids.n_cols; // Invalid value. for (size_t j = 0; j < centroids.n_cols; j++) { const double distance = metric.Evaluate(data.col(i), centroids.col(j)); if (distance < minDistance) { minDistance = distance; closestCluster = j; } } Log::Assert(closestCluster != centroids.n_cols); assignments[i] = closestCluster; } } template class LloydStepType, typename MatType> template void KMeans::Serialize(Archive& ar, const unsigned int /* version */) { ar & data::CreateNVP(maxIterations, "max_iterations"); ar & data::CreateNVP(metric, "metric"); ar & data::CreateNVP(partitioner, "partitioner"); ar & data::CreateNVP(emptyClusterAction, "emptyClusterAction"); } } // namespace kmeans } // namespace mlpack mlpack-2.2.5/src/mlpack/methods/kmeans/kmeans_main.cpp000066400000000000000000000321341315013601400227310ustar00rootroot00000000000000/** * @file kmeans_main.cpp * @author Ryan Curtin * * Executable for running K-Means. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include #include #include #include "kmeans.hpp" #include "allow_empty_clusters.hpp" #include "kill_empty_clusters.hpp" #include "refined_start.hpp" #include "elkan_kmeans.hpp" #include "hamerly_kmeans.hpp" #include "pelleg_moore_kmeans.hpp" #include "dual_tree_kmeans.hpp" using namespace mlpack; using namespace mlpack::kmeans; using namespace std; // Define parameters for the executable. PROGRAM_INFO("K-Means Clustering", "This program performs K-Means clustering " "on the given dataset, storing the learned cluster assignments either as " "a column of labels in the file containing the input dataset or in a " "separate file. Empty clusters are not allowed by default; when a cluster " "becomes empty, the point furthest from the centroid of the cluster with " "maximum variance is taken to fill that cluster." "\n\n" "Optionally, the Bradley and Fayyad approach (\"Refining initial points for" " k-means clustering\", 1998) can be used to select initial points by " "specifying the --refined_start (-r) option. This approach works by taking" " random samples of the dataset; to specify the number of samples, the " "--samples parameter is used, and to specify the percentage of the dataset " "to be used in each sample, the --percentage parameter is used (it should " "be a value between 0.0 and 1.0)." "\n\n" "There are several options available for the algorithm used for each Lloyd " "iteration, specified with the --algorithm (-a) option. The standard O(kN)" " approach can be used ('naive'). Other options include the Pelleg-Moore " "tree-based algorithm ('pelleg-moore'), Elkan's triangle-inequality based " "algorithm ('elkan'), Hamerly's modification to Elkan's algorithm " "('hamerly'), the dual-tree k-means algorithm ('dualtree'), and the " "dual-tree k-means algorithm using the cover tree ('dualtree-covertree')." "\n\n" "The behavior for when an empty cluster is encountered can be modified with" " the --allow_empty_clusters (-e) option. When this option is specified " "and there is a cluster owning no points at the end of an iteration, that " "cluster's centroid will simply remain in its position from the previous " "iteration. If the --kill_empty_clusters (-E) option is specified, then " "when a cluster owns no points at the end of an iteration, the cluster " "centroid is simply filled with DBL_MAX, killing it and effectively " "reducing k for the rest of the computation. Note that the default option " "when neither empty cluster option is specified can be time-consuming to " "calculate; therefore, specifying -e or -E will often accelerate runtime." "\n\n" "As of October 2014, the --overclustering option has been removed. If you " "want this support back, let us know---file a bug at " "https://github.com/mlpack/mlpack/ or get in touch through another means."); // Required options. PARAM_STRING_IN_REQ("input_file", "Input dataset to perform clustering on.", "i"); PARAM_INT_IN_REQ("clusters", "Number of clusters to find (0 autodetects from " "initial centroids).", "c"); // Output options. PARAM_FLAG("in_place", "If specified, a column containing the learned cluster " "assignments will be added to the input dataset file. In this case, " "--outputFile is overridden.", "P"); PARAM_STRING_OUT("output_file", "File to write output labels or labeled data " "to.", "o"); PARAM_STRING_OUT("centroid_file", "If specified, the centroids of each cluster " "will be written to the given file.", "C"); // k-means configuration options. PARAM_FLAG("allow_empty_clusters", "Allow empty clusters to be persist.", "e"); PARAM_FLAG("kill_empty_clusters", "Remove empty clusters when they occur.", "E"); PARAM_FLAG("labels_only", "Only output labels into output file.", "l"); PARAM_INT_IN("max_iterations", "Maximum number of iterations before k-means " "terminates.", "m", 1000); PARAM_INT_IN("seed", "Random seed. If 0, 'std::time(NULL)' is used.", "s", 0); PARAM_STRING_IN("initial_centroids", "Start with the specified initial " "centroids.", "I", ""); // Parameters for "refined start" k-means. PARAM_FLAG("refined_start", "Use the refined initial point strategy by Bradley " "and Fayyad to choose initial points.", "r"); PARAM_INT_IN("samplings", "Number of samplings to perform for refined start " "(use when --refined_start is specified).", "S", 100); PARAM_DOUBLE_IN("percentage", "Percentage of dataset to use for each refined " "start sampling (use when --refined_start is specified).", "p", 0.02); PARAM_STRING_IN("algorithm", "Algorithm to use for the Lloyd iteration " "('naive', 'pelleg-moore', 'elkan', 'hamerly', 'dualtree', or " "'dualtree-covertree').", "a", "naive"); // Given the type of initial partition policy, figure out the empty cluster // policy and run k-means. template void FindEmptyClusterPolicy(const InitialPartitionPolicy& ipp); // Given the initial partitionining policy and empty cluster policy, figure out // the Lloyd iteration step type and run k-means. template void FindLloydStepType(const InitialPartitionPolicy& ipp); // Given the template parameters, sanitize/load input and run k-means. template class LloydStepType> void RunKMeans(const InitialPartitionPolicy& ipp); int main(int argc, char** argv) { CLI::ParseCommandLine(argc, argv); // Initialize random seed. if (CLI::GetParam("seed") != 0) math::RandomSeed((size_t) CLI::GetParam("seed")); else math::RandomSeed((size_t) std::time(NULL)); // Now, start building the KMeans type that we'll be using. Start with the // initial partition policy. The call to FindEmptyClusterPolicy<> results in // a call to RunKMeans<> and the algorithm is completed. if (CLI::HasParam("refined_start")) { const int samplings = CLI::GetParam("samplings"); const double percentage = CLI::GetParam("percentage"); if (samplings < 0) Log::Fatal << "Number of samplings (" << samplings << ") must be " << "greater than 0!" << endl; if (percentage <= 0.0 || percentage > 1.0) Log::Fatal << "Percentage for sampling (" << percentage << ") must be " << "greater than 0.0 and less than or equal to 1.0!" << endl; FindEmptyClusterPolicy(RefinedStart(samplings, percentage)); } else { FindEmptyClusterPolicy(SampleInitialization()); } } // Given the type of initial partition policy, figure out the empty cluster // policy and run k-means. template void FindEmptyClusterPolicy(const InitialPartitionPolicy& ipp) { if (CLI::HasParam("allow_empty_clusters") && CLI::HasParam("kill_empty_clusters")) Log::Fatal << "Only one of --allow_empty_clusters (-e) or " << "--kill_empty_clusters (-E) may be specified!" << endl; else if (CLI::HasParam("allow_empty_clusters")) FindLloydStepType(ipp); else if (CLI::HasParam("kill_empty_clusters")) FindLloydStepType(ipp); else FindLloydStepType(ipp); } // Given the initial partitionining policy and empty cluster policy, figure out // the Lloyd iteration step type and run k-means. template void FindLloydStepType(const InitialPartitionPolicy& ipp) { const string algorithm = CLI::GetParam("algorithm"); if (algorithm == "elkan") RunKMeans(ipp); else if (algorithm == "hamerly") RunKMeans(ipp); else if (algorithm == "pelleg-moore") RunKMeans(ipp); else if (algorithm == "dualtree") RunKMeans(ipp); else if (algorithm == "dualtree-covertree") RunKMeans(ipp); else if (algorithm == "naive") RunKMeans(ipp); else Log::Fatal << "Unknown algorithm: '" << algorithm << "'. Supported options" << " are 'naive', 'pelleg-moore', 'elkan', 'hamerly', 'dualtree', and " << "'dualtree-covertree'." << endl; } // Given the template parameters, sanitize/load input and run k-means. template class LloydStepType> void RunKMeans(const InitialPartitionPolicy& ipp) { // Now, do validation of input options. const string inputFile = CLI::GetParam("input_file"); int clusters = CLI::GetParam("clusters"); if (clusters < 0) { Log::Fatal << "Invalid number of clusters requested (" << clusters << ")! " << "Must be greater than or equal to 0." << endl; } else if (clusters == 0 && CLI::HasParam("initial_centroids")) { Log::Info << "Detecting number of clusters automatically from input " << "centroids." << endl; } else if (clusters == 0) { Log::Fatal << "Number of clusters requested is 0, and no initial centroids " << "provided!" << endl; } const int maxIterations = CLI::GetParam("max_iterations"); if (maxIterations < 0) { Log::Fatal << "Invalid value for maximum iterations (" << maxIterations << ")! Must be greater than or equal to 0." << endl; } // Make sure we have an output file if we're not doing the work in-place. if (!CLI::HasParam("in_place") && !CLI::HasParam("output_file") && !CLI::HasParam("centroid_file")) { Log::Warn << "--output_file, --in_place, and --centroid_file are not set; " << "no results will be saved." << std::endl; } // Load our dataset. arma::mat dataset; data::Load(inputFile, dataset, true); // Fatal upon failure. arma::mat centroids; const bool initialCentroidGuess = CLI::HasParam("initial_centroids"); // Load initial centroids if the user asked for it. if (initialCentroidGuess) { string initialCentroidsFile = CLI::GetParam("initial_centroids"); data::Load(initialCentroidsFile, centroids, true); if (clusters == 0) clusters = centroids.n_cols; if (CLI::HasParam("refined_start")) Log::Warn << "Initial centroids are specified, but will be ignored " << "because --refined_start is also specified!" << endl; else Log::Info << "Using initial centroid guesses from '" << initialCentroidsFile << "'." << endl; } Timer::Start("clustering"); KMeans kmeans(maxIterations, metric::EuclideanDistance(), ipp); if (CLI::HasParam("output_file") || CLI::HasParam("in_place")) { // We need to get the assignments. arma::Row assignments; kmeans.Cluster(dataset, clusters, assignments, centroids, false, initialCentroidGuess); Timer::Stop("clustering"); // Now figure out what to do with our results. if (CLI::HasParam("in_place")) { // Add the column of assignments to the dataset; but we have to convert // them to type double first. arma::rowvec converted(assignments.n_elem); for (size_t i = 0; i < assignments.n_elem; i++) converted(i) = (double) assignments(i); dataset.insert_rows(dataset.n_rows, converted); // Save the dataset. data::Save(inputFile, dataset); } else { if (CLI::HasParam("labels_only")) { // Save only the labels. string outputFile = CLI::GetParam("output_file"); data::Save(outputFile, assignments); } else { // Convert the assignments to doubles. arma::rowvec converted(assignments.n_elem); for (size_t i = 0; i < assignments.n_elem; i++) converted(i) = (double) assignments(i); dataset.insert_rows(dataset.n_rows, converted); // Now save, in the different file. string outputFile = CLI::GetParam("output_file"); data::Save(outputFile, dataset); } } } else { // Just save the centroids. kmeans.Cluster(dataset, clusters, centroids, initialCentroidGuess); Timer::Stop("clustering"); } // Should we write the centroids to a file? if (CLI::HasParam("centroid_file")) data::Save(CLI::GetParam("centroid_file"), centroids); } mlpack-2.2.5/src/mlpack/methods/kmeans/max_variance_new_cluster.hpp000066400000000000000000000055711315013601400255300ustar00rootroot00000000000000/** * @file max_variance_new_cluster.hpp * @author Ryan Curtin * * An implementation of the EmptyClusterPolicy policy class for K-Means. When * an empty cluster is detected, the point furthest from the centroid of the * cluster with maximum variance is taken to be a new cluster. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_KMEANS_MAX_VARIANCE_NEW_CLUSTER_HPP #define MLPACK_METHODS_KMEANS_MAX_VARIANCE_NEW_CLUSTER_HPP #include namespace mlpack { namespace kmeans { /** * When an empty cluster is detected, this class takes the point furthest from * the centroid of the cluster with maximum variance as a new cluster. */ class MaxVarianceNewCluster { public: //! Default constructor required by EmptyClusterPolicy. MaxVarianceNewCluster() : iteration(size_t(-1)) { } /** * Take the point furthest from the centroid of the cluster with maximum * variance to be a new cluster. * * @tparam MatType Type of data (arma::mat or arma::sp_mat). * @param data Dataset on which clustering is being performed. * @param emptyCluster Index of cluster which is empty. * @param oldCentroids Centroids of each cluster (one per column), at the * start of the iteration. * @param newCentroids Centroids of each cluster (one per column), at the end * of the iteration. This will be modified! * @param clusterCounts Number of points in each cluster. * @param assignments Cluster assignments of each point. * * @return Number of points changed. */ template size_t EmptyCluster(const MatType& data, const size_t emptyCluster, const arma::mat& oldCentroids, arma::mat& newCentroids, arma::Col& clusterCounts, MetricType& metric, const size_t iteration); //! Serialize the object. template void Serialize(Archive& ar, const unsigned int version); private: //! Index of iteration for which variance is cached. size_t iteration; //! Cached variances for each cluster. arma::vec variances; //! Cached assignments for each point. arma::Row assignments; //! Called when we are on a new iteration. template void Precalculate(const MatType& data, const arma::mat& oldCentroids, arma::Col& clusterCounts, MetricType& metric); }; } // namespace kmeans } // namespace mlpack // Include implementation. #include "max_variance_new_cluster_impl.hpp" #endif mlpack-2.2.5/src/mlpack/methods/kmeans/max_variance_new_cluster_impl.hpp000066400000000000000000000135561315013601400265530ustar00rootroot00000000000000/** * @file max_variance_new_cluster_impl.hpp * @author Ryan Curtin * * Implementation of MaxVarianceNewCluster class. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_KMEANS_MAX_VARIANCE_NEW_CLUSTER_IMPL_HPP #define MLPACK_METHODS_KMEANS_MAX_VARIANCE_NEW_CLUSTER_IMPL_HPP // Just in case it has not been included. #include "max_variance_new_cluster.hpp" namespace mlpack { namespace kmeans { /** * Take action about an empty cluster. */ template size_t MaxVarianceNewCluster::EmptyCluster(const MatType& data, const size_t emptyCluster, const arma::mat& oldCentroids, arma::mat& newCentroids, arma::Col& clusterCounts, MetricType& metric, const size_t iteration) { // If necessary, calculate the variances and assignments. if (iteration != this->iteration || assignments.n_elem != data.n_cols) Precalculate(data, oldCentroids, clusterCounts, metric); this->iteration = iteration; // Now find the cluster with maximum variance. arma::uword maxVarCluster = 0; variances.max(maxVarCluster); // If the cluster with maximum variance has variance of 0, then we can't // continue. All the points are the same. if (variances[maxVarCluster] == 0.0) return 0; // Now, inside this cluster, find the point which is furthest away. size_t furthestPoint = data.n_cols; double maxDistance = -DBL_MAX; for (size_t i = 0; i < data.n_cols; ++i) { if (assignments[i] == maxVarCluster) { const double distance = std::pow(metric.Evaluate(data.col(i), newCentroids.col(maxVarCluster)), 2.0); if (distance > maxDistance) { maxDistance = distance; furthestPoint = i; } } } // Take that point and add it to the empty cluster. newCentroids.col(maxVarCluster) *= (double(clusterCounts[maxVarCluster]) / double(clusterCounts[maxVarCluster] - 1)); newCentroids.col(maxVarCluster) -= (1.0 / (clusterCounts[maxVarCluster] - 1.0)) * arma::vec(data.col(furthestPoint)); clusterCounts[maxVarCluster]--; clusterCounts[emptyCluster]++; newCentroids.col(emptyCluster) = arma::vec(data.col(furthestPoint)); assignments[furthestPoint] = emptyCluster; // Modify the variances, as necessary. variances[emptyCluster] = 0; // One has already been subtracted from clusterCounts[maxVarCluster]. If // EmptyCluster() is called again, we can't pull another point from // maxVarCluster (we'd be making an empty cluster), so force a call to // Precalculate() if EmptyCluster() is called again by changing // this->iteration. if (clusterCounts[maxVarCluster] <= 1) { variances[maxVarCluster] = 0; --this->iteration; } else { variances[maxVarCluster] = (1.0 / clusterCounts[maxVarCluster]) * ((clusterCounts[maxVarCluster] + 1) * variances[maxVarCluster] - maxDistance); } // Output some debugging information. Log::Debug << "Point " << furthestPoint << " assigned to empty cluster " << emptyCluster << ".\n"; return 1; // We only changed one point. } //! Serialize the object. template void MaxVarianceNewCluster::Serialize(Archive& /* ar */, const unsigned int /* version */) { // Serialization is useless here, because the only thing we store is // precalculated quantities, and if we're serializing, our precalculations are // likely to be useless when we deserialize (because the user will be running // a different clustering, probably). So there is no need to store anything, // and if we are loading, we just reset the assignments array so // precalculation will happen next time EmptyCluster() is called. if (Archive::is_loading::value) assignments.set_size(0); } template void MaxVarianceNewCluster::Precalculate(const MatType& data, const arma::mat& oldCentroids, arma::Col& clusterCounts, MetricType& metric) { // We have to calculate the variances of each cluster and the assignments of // each point. This is most easily done by iterating through the entire // dataset. variances.zeros(oldCentroids.n_cols); assignments.set_size(data.n_cols); // Add the variance of each point's distance away from the cluster. I think // this is the sensible thing to do. for (size_t i = 0; i < data.n_cols; ++i) { // Find the closest centroid to this point. double minDistance = std::numeric_limits::infinity(); size_t closestCluster = oldCentroids.n_cols; // Invalid value. for (size_t j = 0; j < oldCentroids.n_cols; j++) { const double distance = metric.Evaluate(data.col(i), oldCentroids.col(j)); if (distance < minDistance) { minDistance = distance; closestCluster = j; } } assignments[i] = closestCluster; variances[closestCluster] += std::pow(metric.Evaluate(data.col(i), oldCentroids.col(closestCluster)), 2.0); } // Divide by the number of points in the cluster to produce the variance, // unless the cluster is empty or contains only one point, in which case we // set the variance to 0. for (size_t i = 0; i < clusterCounts.n_elem; ++i) if (clusterCounts[i] <= 1) variances[i] = 0; else variances[i] /= clusterCounts[i]; } } // namespace kmeans } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/methods/kmeans/naive_kmeans.hpp000066400000000000000000000046051315013601400231160ustar00rootroot00000000000000/** * @file naive_kmeans.hpp * @author Ryan Curtin * * An implementation of a naively-implemented step of the Lloyd algorithm for * k-means clustering. This may still be the best choice for small datasets or * datasets with very high dimensionality. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_KMEANS_NAIVE_KMEANS_HPP #define MLPACK_METHODS_KMEANS_NAIVE_KMEANS_HPP namespace mlpack { namespace kmeans { /** * This is an implementation of a single iteration of Lloyd's algorithm for * k-means. If your intention is to run the full k-means algorithm, you are * looking for the mlpack::kmeans::KMeans class instead of this one. This class * is used by KMeans as the actual implementation of the Lloyd iteration. * * @param MetricType Type of metric used with this implementation. * @param MatType Matrix type (arma::mat or arma::sp_mat). */ template class NaiveKMeans { public: /** * Construct the NaiveKMeans object with the given dataset and metric. * * @param dataset Dataset. * @param metric Instantiated metric. */ NaiveKMeans(const MatType& dataset, MetricType& metric); /** * Run a single iteration of the Lloyd algorithm, updating the given centroids * into the newCentroids matrix. If any cluster is empty (that is, if any * cluster has no points assigned to it), then the centroid associated with * that cluster may be filled with invalid data (it will be corrected later). * * @param centroids Current cluster centroids. * @param newCentroids New cluster centroids. * @param counts Number of points in each cluster at the end of the iteration. */ double Iterate(const arma::mat& centroids, arma::mat& newCentroids, arma::Col& counts); size_t DistanceCalculations() const { return distanceCalculations; } private: //! The dataset. const MatType& dataset; //! The instantiated metric. MetricType& metric; //! Number of distance calculations. size_t distanceCalculations; }; } // namespace kmeans } // namespace mlpack // Include implementation. #include "naive_kmeans_impl.hpp" #endif mlpack-2.2.5/src/mlpack/methods/kmeans/naive_kmeans_impl.hpp000066400000000000000000000053601315013601400241360ustar00rootroot00000000000000/** * @file naive_kmeans_impl.hpp * @author Ryan Curtin * * An implementation of a naively-implemented step of the Lloyd algorithm for * k-means clustering. This may still be the best choice for small datasets or * datasets with very high dimensionality. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_KMEANS_NAIVE_KMEANS_IMPL_HPP #define MLPACK_METHODS_KMEANS_NAIVE_KMEANS_IMPL_HPP // In case it hasn't been included yet. #include "naive_kmeans.hpp" namespace mlpack { namespace kmeans { template NaiveKMeans::NaiveKMeans(const MatType& dataset, MetricType& metric) : dataset(dataset), metric(metric), distanceCalculations(0) { /* Nothing to do. */ } // Run a single iteration. template double NaiveKMeans::Iterate(const arma::mat& centroids, arma::mat& newCentroids, arma::Col& counts) { newCentroids.zeros(centroids.n_rows, centroids.n_cols); counts.zeros(centroids.n_cols); // Find the closest centroid to each point and update the new centroids. for (size_t i = 0; i < dataset.n_cols; i++) { // Find the closest centroid to this point. double minDistance = std::numeric_limits::infinity(); size_t closestCluster = centroids.n_cols; // Invalid value. for (size_t j = 0; j < centroids.n_cols; j++) { const double distance = metric.Evaluate(dataset.col(i), centroids.col(j)); if (distance < minDistance) { minDistance = distance; closestCluster = j; } } Log::Assert(closestCluster != centroids.n_cols); // We now have the minimum distance centroid index. Update that centroid. newCentroids.col(closestCluster) += arma::vec(dataset.col(i)); counts(closestCluster)++; } // Now normalize the centroid. for (size_t i = 0; i < centroids.n_cols; ++i) if (counts(i) != 0) newCentroids.col(i) /= counts(i); distanceCalculations += centroids.n_cols * dataset.n_cols; // Calculate cluster distortion for this iteration. double cNorm = 0.0; for (size_t i = 0; i < centroids.n_cols; ++i) { cNorm += std::pow(metric.Evaluate(centroids.col(i), newCentroids.col(i)), 2.0); } distanceCalculations += centroids.n_cols; return std::sqrt(cNorm); } } // namespace kmeans } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/methods/kmeans/pelleg_moore_kmeans.hpp000066400000000000000000000055661315013601400244740ustar00rootroot00000000000000/** * @file pelleg_moore_kmeans.hpp * @author Ryan Curtin * * An implementation of Pelleg-Moore's 'blacklist' algorithm for k-means * clustering. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_KMEANS_PELLEG_MOORE_KMEANS_HPP #define MLPACK_METHODS_KMEANS_PELLEG_MOORE_KMEANS_HPP #include #include "pelleg_moore_kmeans_statistic.hpp" namespace mlpack { namespace kmeans { /** * An implementation of Pelleg-Moore's 'blacklist' algorithm for k-means * clustering. This algorithm builds a kd-tree on the data points and traverses * it in order to determine the closest clusters to each point. * * For more information on the algorithm, see * * @code * @inproceedings{pelleg1999accelerating, * title={Accelerating exact k-means algorithms with geometric reasoning}, * author={Pelleg, Dan and Moore, Andrew W.}, * booktitle={Proceedings of the Fifth ACM SIGKDD International Conference * on Knowledge Discovery and Data Mining (KDD '99)}, * pages={277--281}, * year={1999}, * organization={ACM} * } * @endcode */ template class PellegMooreKMeans { public: /** * Construct the PellegMooreKMeans object, which must construct a tree. */ PellegMooreKMeans(const MatType& dataset, MetricType& metric); /** * Delete the tree constructed by the PellegMooreKMeans object. */ ~PellegMooreKMeans(); /** * Run a single iteration of the Pelleg-Moore blacklist algorithm, updating * the given centroids into the newCentroids matrix. * * @param centroids Current cluster centroids. * @param newCentroids New cluster centroids. * @param counts Current counts, to be overwritten with new counts. */ double Iterate(const arma::mat& centroids, arma::mat& newCentroids, arma::Col& counts); //! Return the number of distance calculations. size_t DistanceCalculations() const { return distanceCalculations; } //! Modify the number of distance calculations. size_t& DistanceCalculations() { return distanceCalculations; } //! Convenience typedef for the tree. typedef tree::KDTree TreeType; private: //! The original dataset reference. const MatType& datasetOrig; // Maybe not necessary. //! The tree built on the points. TreeType* tree; //! The dataset we are using. const MatType& dataset; //! The metric. MetricType& metric; //! Track distance calculations. size_t distanceCalculations; }; } // namespace kmeans } // namespace mlpack #include "pelleg_moore_kmeans_impl.hpp" #endif mlpack-2.2.5/src/mlpack/methods/kmeans/pelleg_moore_kmeans_impl.hpp000066400000000000000000000046651315013601400255140ustar00rootroot00000000000000/** * @file pelleg_moore_kmeans_impl.hpp * @author Ryan Curtin * * An implementation of Pelleg-Moore's 'blacklist' algorithm for k-means * clustering. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_KMEANS_PELLEG_MOORE_KMEANS_IMPL_HPP #define MLPACK_METHODS_KMEANS_PELLEG_MOORE_KMEANS_IMPL_HPP #include "pelleg_moore_kmeans.hpp" #include "pelleg_moore_kmeans_rules.hpp" namespace mlpack { namespace kmeans { template PellegMooreKMeans::PellegMooreKMeans( const MatType& dataset, MetricType& metric) : datasetOrig(dataset), tree(new TreeType(const_cast(datasetOrig))), dataset(tree->Dataset()), metric(metric), distanceCalculations(0) { // Nothing to do. } template PellegMooreKMeans::~PellegMooreKMeans() { if (tree) delete tree; } // Run a single iteration. template double PellegMooreKMeans::Iterate( const arma::mat& centroids, arma::mat& newCentroids, arma::Col& counts) { newCentroids.zeros(centroids.n_rows, centroids.n_cols); counts.zeros(centroids.n_cols); // Create rules object. typedef PellegMooreKMeansRules RulesType; RulesType rules(dataset, centroids, newCentroids, counts, metric); // Use single-tree traverser. typename TreeType::template SingleTreeTraverser traverser(rules); // Now, do a traversal with a fake query index (since the query index is // irrelevant; we are checking each node with all clusters. traverser.Traverse(0, *tree); distanceCalculations += rules.DistanceCalculations(); // Now, calculate how far the clusters moved, after normalizing them. double residual = 0.0; for (size_t c = 0; c < centroids.n_cols; ++c) { if (counts[c] > 0) { newCentroids.col(c) /= counts(c); residual += std::pow(metric.Evaluate(centroids.col(c), newCentroids.col(c)), 2.0); } } distanceCalculations += centroids.n_cols; return std::sqrt(residual); } } // namespace kmeans } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/methods/kmeans/pelleg_moore_kmeans_rules.hpp000066400000000000000000000101441315013601400256720ustar00rootroot00000000000000/** * @file kmeans_rules.hpp * @author Ryan Curtin * * Defines the pruning rules and base cases rules necessary to perform * single-tree k-means clustering using the Pelleg-Moore fast k-means algorithm, * which has been shoehorned to fit into the mlpack tree abstractions. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_KMEANS_PELLEG_MOORE_KMEANS_RULES_HPP #define MLPACK_METHODS_KMEANS_PELLEG_MOORE_KMEANS_RULES_HPP namespace mlpack { namespace kmeans { /** * The rules class for the single-tree Pelleg-Moore kd-tree traversal for * k-means clustering. Although TreeType is a free template parameter, this * particular implementation is specialized to trees with hyper-rectangle bounds * due to the pruning rule used to determine if one cluster dominates a node * with respect to another cluster. * * Our implementation here abuses the single-tree algorithm abstractions a * little bit. Instead of doing a traversal for a particular query point, in * this case we consider all clusters at once---so the query point is entirely * ignored during in BaseCase() and Score(). */ template class PellegMooreKMeansRules { public: /** * Create the PellegMooreKMeansRules object. * * @param dataset The dataset that the tree is built on. * @param centroids The current centroids. * @param newCentroids New centroids after this iteration (output). * @param counts Current cluster counts, to be replaced with new cluster * counts. * @param metric Instantiated metric. */ PellegMooreKMeansRules(const typename TreeType::Mat& dataset, const arma::mat& centroids, arma::mat& newCentroids, arma::Col& counts, MetricType& metric); /** * The BaseCase() function for this single-tree algorithm does nothing. * Instead, point-to-cluster comparisons are handled as necessary in Score(). * * @param queryIndex Index of query point (fake, will be ignored). * @param referenceIndex Index of reference point. */ double BaseCase(const size_t queryIndex, const size_t referenceIndex); /** * Determine if a cluster can be pruned, and if not, perform point-to-cluster * comparisons. The point-to-cluster comparisons are performed here and not * in BaseCase() because of the complexity of managing the blacklist. * * @param queryIndex Index of query point (fake, will be ignored). * @param referenceNode Node containing points in the dataset. */ double Score(const size_t queryIndex, TreeType& referenceNode); /** * Rescore to determine if a node can be pruned. In this case, a node can * never be pruned during rescoring, so this just returns oldScore. * * @param queryIndex Index of query point (fake, will be ignored). * @param referenceNode Node containing points in the dataset. * @param oldScore Resulting score from Score(). */ double Rescore(const size_t queryIndex, TreeType& referenceNode, const double oldScore); //! Get the number of distance calculations that have been performed. size_t DistanceCalculations() const { return distanceCalculations; } //! Modify the number of distance calculations that have been performed. size_t& DistanceCalculations() { return distanceCalculations; } private: //! The dataset. const typename TreeType::Mat& dataset; //! The clusters. const arma::mat& centroids; //! The new centroids. arma::mat& newCentroids; //! The counts of points in each cluster. arma::Col& counts; //! Instantiated metric. MetricType& metric; //! The number of O(d) distance calculations that have been performed. size_t distanceCalculations; }; } // namespace kmeans } // namespace mlpack // Include implementation. #include "pelleg_moore_kmeans_rules_impl.hpp" #endif mlpack-2.2.5/src/mlpack/methods/kmeans/pelleg_moore_kmeans_rules_impl.hpp000066400000000000000000000136711315013601400267230ustar00rootroot00000000000000/** * @file pelleg_moore_kmeans_rules_impl.hpp * @author Ryan Curtin * * Implementation of the pruning rules and base cases necessary to perform * single-tree k-means clustering using the fast Pelleg-Moore k-means algorithm, * which has been shoehorned into the mlpack tree abstractions. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_KMEANS_PELLEG_MOORE_KMEANS_RULES_IMPL_HPP #define MLPACK_METHODS_KMEANS_PELLEG_MOORE_KMEANS_RULES_IMPL_HPP // In case it hasn't been included yet. #include "pelleg_moore_kmeans_rules.hpp" namespace mlpack { namespace kmeans { template PellegMooreKMeansRules::PellegMooreKMeansRules( const typename TreeType::Mat& dataset, const arma::mat& centroids, arma::mat& newCentroids, arma::Col& counts, MetricType& metric) : dataset(dataset), centroids(centroids), newCentroids(newCentroids), counts(counts), metric(metric), distanceCalculations(0) { // Nothing to do. } template inline force_inline double PellegMooreKMeansRules::BaseCase( const size_t /* queryIndex */, const size_t /* referenceIndex */) { return 0.0; } template double PellegMooreKMeansRules::Score( const size_t /* queryIndex */, TreeType& referenceNode) { // Obtain the parent's blacklist. If this is the root node, we'll start with // an empty blacklist. This means that after each iteration, we don't need to // reset any statistics. if (referenceNode.Parent() == NULL || referenceNode.Parent()->Stat().Blacklist().n_elem == 0) referenceNode.Stat().Blacklist().zeros(centroids.n_cols); else referenceNode.Stat().Blacklist() = referenceNode.Parent()->Stat().Blacklist(); // The query index is a fake index that we won't use, and the reference node // holds all of the points in the dataset. Our goal is to determine whether // or not this node is dominated by a single cluster. const size_t whitelisted = centroids.n_cols - arma::accu(referenceNode.Stat().Blacklist()); distanceCalculations += whitelisted; // Which cluster has minimum distance to the node? size_t closestCluster = centroids.n_cols; double minMinDistance = DBL_MAX; for (size_t i = 0; i < centroids.n_cols; ++i) { if (referenceNode.Stat().Blacklist()[i] == 0) { const double minDistance = referenceNode.MinDistance(centroids.col(i)); if (minDistance < minMinDistance) { minMinDistance = minDistance; closestCluster = i; } } } // Now, for every other whitelisted cluster, determine if the closest cluster // owns the point. This calculation is specific to hyperrectangle trees (but, // this implementation is specific to kd-trees, so that's okay). For // circular-bound trees, the condition should be simpler and can probably be // expressed as a comparison between minimum and maximum distances. size_t newBlacklisted = 0; for (size_t c = 0; c < centroids.n_cols; ++c) { if (referenceNode.Stat().Blacklist()[c] == 1 || c == closestCluster) continue; // This algorithm comes from the proof of Lemma 4 in the extended version // of the Pelleg-Moore paper (the CMU tech report, that is). It has been // adapted for speed. arma::vec cornerPoint(centroids.n_rows); for (size_t d = 0; d < referenceNode.Bound().Dim(); ++d) { if (centroids(d, c) > centroids(d, closestCluster)) cornerPoint(d) = referenceNode.Bound()[d].Hi(); else cornerPoint(d) = referenceNode.Bound()[d].Lo(); } const double closestDist = metric.Evaluate(cornerPoint, centroids.col(closestCluster)); const double otherDist = metric.Evaluate(cornerPoint, centroids.col(c)); distanceCalculations += 3; // One for cornerPoint, then two distances. if (closestDist < otherDist) { // The closest cluster dominates the node with respect to the cluster c. // So we can blacklist c. referenceNode.Stat().Blacklist()[c] = 1; ++newBlacklisted; } } if (whitelisted - newBlacklisted == 1) { // This node is dominated by the closest cluster. counts[closestCluster] += referenceNode.NumDescendants(); newCentroids.col(closestCluster) += referenceNode.NumDescendants() * referenceNode.Stat().Centroid(); return DBL_MAX; } // Perform the base case here. for (size_t i = 0; i < referenceNode.NumPoints(); ++i) { size_t bestCluster = centroids.n_cols; double bestDistance = DBL_MAX; for (size_t c = 0; c < centroids.n_cols; ++c) { if (referenceNode.Stat().Blacklist()[c] == 1) continue; ++distanceCalculations; // The reference index is the index of the data point. const double distance = metric.Evaluate(centroids.col(c), dataset.col(referenceNode.Point(i))); if (distance < bestDistance) { bestDistance = distance; bestCluster = c; } } // Add to resulting centroid. newCentroids.col(bestCluster) += dataset.col(referenceNode.Point(i)); ++counts(bestCluster); } // Otherwise, we're not sure, so we can't prune. Recursion order doesn't make // a difference, so we'll just return a score of 0. return 0.0; } template double PellegMooreKMeansRules::Rescore( const size_t /* queryIndex */, TreeType& /* referenceNode */, const double oldScore) { // There's no possible way that calling Rescore() can produce a prune now when // it couldn't before. return oldScore; } } // namespace kmeans } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/methods/kmeans/pelleg_moore_kmeans_statistic.hpp000066400000000000000000000044771315013601400265630ustar00rootroot00000000000000/** * @file pelleg_moore_kmeans_statistic.hpp * @author Ryan Curtin * * A StatisticType for trees which holds the blacklist for various k-means * clusters. See the Pelleg and Moore paper for more details. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_KMEANS_PELLEG_MOORE_KMEANS_STATISTIC_HPP #define MLPACK_METHODS_KMEANS_PELLEG_MOORE_KMEANS_STATISTIC_HPP namespace mlpack { namespace kmeans { /** * A statistic for trees which holds the blacklist for Pelleg-Moore k-means * clustering (which represents the clusters that cannot possibly own any points * in a node). */ class PellegMooreKMeansStatistic { public: //! Initialize the statistic without a node (this does nothing). PellegMooreKMeansStatistic() { } //! Initialize the statistic for a node; this calculates the centroid and //! caches it. template PellegMooreKMeansStatistic(TreeType& node) { centroid.zeros(node.Dataset().n_rows); // Hope it's a depth-first build procedure. Also, this won't work right for // trees that have self-children or stuff like that. for (size_t i = 0; i < node.NumChildren(); ++i) { centroid += node.Child(i).NumDescendants() * node.Child(i).Stat().Centroid(); } for (size_t i = 0; i < node.NumPoints(); ++i) { centroid += node.Dataset().col(node.Point(i)); } if (node.NumDescendants() > 0) centroid /= node.NumDescendants(); else centroid.fill(DBL_MAX); // Invalid centroid. What else can we do? } //! Get the cluster blacklist. const arma::uvec& Blacklist() const { return blacklist; } //! Modify the cluster blacklist. arma::uvec& Blacklist() { return blacklist; } //! Get the node's centroid. const arma::vec& Centroid() const { return centroid; } //! Modify the node's centroid (be careful!). arma::vec& Centroid() { return centroid; } private: //! The cluster blacklist for the node. arma::uvec blacklist; //! The centroid of the node, cached for use during prunes. arma::vec centroid; }; } // namespace kmeans } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/methods/kmeans/random_partition.hpp000066400000000000000000000037761315013601400240370ustar00rootroot00000000000000/** * @file random_partition.hpp * @author Ryan Curtin * * Very simple partitioner which partitions the data randomly into the number of * desired clusters. Used as the default InitialPartitionPolicy for KMeans. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_KMEANS_RANDOM_PARTITION_HPP #define MLPACK_METHODS_KMEANS_RANDOM_PARTITION_HPP #include namespace mlpack { namespace kmeans { /** * A very simple partitioner which partitions the data randomly into the number * of desired clusters. It has no parameters, and so an instance of the class * is not even necessary. */ class RandomPartition { public: //! Empty constructor, required by the InitialPartitionPolicy policy. RandomPartition() { } /** * Partition the given dataset into the given number of clusters. Assignments * are random, and the number of points in each cluster should be equal (or * approximately equal). * * @tparam MatType Type of data (arma::mat or arma::sp_mat). * @param data Dataset to partition. * @param clusters Number of clusters to split dataset into. * @param assignments Vector to store cluster assignments into. Values will * be between 0 and (clusters - 1). */ template inline static void Cluster(const MatType& data, const size_t clusters, arma::Row& assignments) { // Implementation is so simple we'll put it here in the header file. assignments = arma::shuffle(arma::linspace>(0, (clusters - 1), data.n_cols)); } //! Serialize the partitioner (nothing to do). template void Serialize(Archive& /* ar */, const unsigned int /* version */) { } }; } } #endif mlpack-2.2.5/src/mlpack/methods/kmeans/refined_start.hpp000066400000000000000000000073621315013601400233120ustar00rootroot00000000000000/** * @file refined_start.hpp * @author Ryan Curtin * * An implementation of Bradley and Fayyad's "Refining Initial Points for * K-Means clustering". This class is meant to provide better initial points * for the k-means algorithm. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_KMEANS_REFINED_START_HPP #define MLPACK_METHODS_KMEANS_REFINED_START_HPP #include namespace mlpack { namespace kmeans { /** * A refined approach for choosing initial points for k-means clustering. This * approach runs k-means several times on random subsets of the data, and then * clusters those solutions to select refined initial cluster assignments. It * is an implementation of the following paper: * * @inproceedings{bradley1998refining, * title={Refining initial points for k-means clustering}, * author={Bradley, Paul S and Fayyad, Usama M}, * booktitle={Proceedings of the Fifteenth International Conference on Machine * Learning (ICML 1998)}, * volume={66}, * year={1998} * } */ class RefinedStart { public: /** * Create the RefinedStart object, optionally specifying parameters for the * number of samplings to perform and the percentage of the dataset to use in * each sampling. */ RefinedStart(const size_t samplings = 100, const double percentage = 0.02) : samplings(samplings), percentage(percentage) { } /** * Partition the given dataset into the given number of clusters according to * the random sampling scheme outlined in Bradley and Fayyad's paper, and * return centroids. * * @tparam MatType Type of data (arma::mat or arma::sp_mat). * @param data Dataset to partition. * @param clusters Number of clusters to split dataset into. * @param centroids Matrix to store centroids into. */ template void Cluster(const MatType& data, const size_t clusters, arma::mat& centroids) const; /** * Partition the given dataset into the given number of clusters according to * the random sampling scheme outlined in Bradley and Fayyad's paper, and * return point assignments. * * @tparam MatType Type of data (arma::mat or arma::sp_mat). * @param data Dataset to partition. * @param clusters Number of clusters to split dataset into. * @param assignments Vector to store cluster assignments into. Values will * be between 0 and (clusters - 1). */ template void Cluster(const MatType& data, const size_t clusters, arma::Row& assignments) const; //! Get the number of samplings that will be performed. size_t Samplings() const { return samplings; } //! Modify the number of samplings that will be performed. size_t& Samplings() { return samplings; } //! Get the percentage of the data used by each subsampling. double Percentage() const { return percentage; } //! Modify the percentage of the data used by each subsampling. double& Percentage() { return percentage; } //! Serialize the object. template void Serialize(Archive& ar, const unsigned int /* version */) { ar & data::CreateNVP(samplings, "samplings"); ar & data::CreateNVP(percentage, "percentage"); } private: //! The number of samplings to perform. size_t samplings; //! The percentage of the data to use for each subsampling. double percentage; }; } // namespace kmeans } // namespace mlpack // Include implementation. #include "refined_start_impl.hpp" #endif mlpack-2.2.5/src/mlpack/methods/kmeans/refined_start_impl.hpp000066400000000000000000000074741315013601400243370ustar00rootroot00000000000000/** * @file refined_start_impl.hpp * @author Ryan Curtin * * An implementation of Bradley and Fayyad's "Refining Initial Points for * K-Means clustering". This class is meant to provide better initial points * for the k-means algorithm. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_KMEANS_REFINED_START_IMPL_HPP #define MLPACK_METHODS_KMEANS_REFINED_START_IMPL_HPP // In case it hasn't been included yet. #include "refined_start.hpp" namespace mlpack { namespace kmeans { //! Partition the given dataset according to Bradley and Fayyad's algorithm. template void RefinedStart::Cluster(const MatType& data, const size_t clusters, arma::mat& centroids) const { // This will hold the sampled datasets. const size_t numPoints = size_t(percentage * data.n_cols); MatType sampledData(data.n_rows, numPoints); // vector is packed so each bool is 1 bit. std::vector pointsUsed(data.n_cols, false); arma::mat sampledCentroids(data.n_rows, samplings * clusters); for (size_t i = 0; i < samplings; ++i) { // First, assemble the sampled dataset. size_t curSample = 0; while (curSample < numPoints) { // Pick a random point in [0, numPoints). size_t sample = (size_t) math::RandInt(data.n_cols); if (!pointsUsed[sample]) { // This point isn't used yet. So we'll put it in our sample. pointsUsed[sample] = true; sampledData.col(curSample) = data.col(sample); ++curSample; } } // Now, using the sampled dataset, run k-means. In the case of an empty // cluster, we re-initialize that cluster as the point furthest away from // the cluster with maximum variance. This is not *exactly* what the paper // implements, but it is quite similar, and we'll call it "good enough". KMeans<> kmeans; kmeans.Cluster(sampledData, clusters, centroids); // Store the sampled centroids. sampledCentroids.cols(i * clusters, (i + 1) * clusters - 1) = centroids; pointsUsed.assign(data.n_cols, false); } // Now, we run k-means on the sampled centroids to get our final clusters. KMeans<> kmeans; kmeans.Cluster(sampledCentroids, clusters, centroids); } template void RefinedStart::Cluster(const MatType& data, const size_t clusters, arma::Row& assignments) const { // Perform the Bradley-Fayyad refined start algorithm, and get initial // centroids back. arma::mat centroids; Cluster(data, clusters, centroids); // Turn the final centroids into assignments. assignments.set_size(data.n_cols); for (size_t i = 0; i < data.n_cols; ++i) { // Find the closest centroid to this point. double minDistance = std::numeric_limits::infinity(); size_t closestCluster = clusters; for (size_t j = 0; j < clusters; ++j) { // This is restricted to the L2 distance, and unfortunately it would take // a lot of refactoring and redesign to make this more general... we would // probably need to have KMeans take a template template parameter for the // initial partition policy. It's not clear how to best do this. const double distance = metric::EuclideanDistance::Evaluate(data.col(i), centroids.col(j)); if (distance < minDistance) { minDistance = distance; closestCluster = j; } } // Assign the point to its closest cluster. assignments[i] = closestCluster; } } } // namespace kmeans } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/methods/kmeans/sample_initialization.hpp000066400000000000000000000031131315013601400250370ustar00rootroot00000000000000/** * @file sample_initialization.hpp * @author Ryan Curtin * * In order to construct initial centroids, randomly sample points from the * dataset. This tends to give better results than the RandomPartition * strategy. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef __MLPACK_METHODS_KMEANS_SAMPLE_INITIALIZATION_HPP #define __MLPACK_METHODS_KMEANS_SAMPLE_INITIALIZATION_HPP #include #include namespace mlpack { namespace kmeans { class SampleInitialization { public: //! Empty constructor, required by the InitialPartitionPolicy type definition. SampleInitialization() { } /** * Initialize the centroids matrix by randomly sampling points from the data * matrix. * * @param data Dataset. * @param clusters Number of clusters. * @param centroids Matrix to put initial centroids into. */ template inline static void Cluster(const MatType& data, const size_t clusters, arma::mat& centroids) { centroids.set_size(data.n_rows, clusters); for (size_t i = 0; i < clusters; ++i) { // Randomly sample a point. const size_t index = math::RandInt(0, data.n_cols); centroids.col(i) = data.col(index); } } }; } // namespace kmeans } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/methods/lars/000077500000000000000000000000001315013601400174235ustar00rootroot00000000000000mlpack-2.2.5/src/mlpack/methods/lars/CMakeLists.txt000066400000000000000000000007531315013601400221700ustar00rootroot00000000000000# Define the files we need to compile # Anything not in this list will not be compiled into the output library set(SOURCES lars.hpp lars_impl.hpp lars.cpp ) # add directory name to sources set(DIR_SRCS) foreach(file ${SOURCES}) set(DIR_SRCS ${DIR_SRCS} ${CMAKE_CURRENT_SOURCE_DIR}/${file}) endforeach() # append sources (with directory name) to list of all mlpack sources (used at the parent scope) set(MLPACK_SRCS ${MLPACK_SRCS} ${DIR_SRCS} PARENT_SCOPE) add_cli_executable(lars) mlpack-2.2.5/src/mlpack/methods/lars/lars.cpp000066400000000000000000000335551315013601400211030ustar00rootroot00000000000000/** * @file lars.cpp * @author Nishant Mehta (niche) * * Implementation of LARS and LASSO. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include "lars.hpp" #include #include using namespace mlpack; using namespace mlpack::regression; LARS::LARS(const bool useCholesky, const double lambda1, const double lambda2, const double tolerance) : matGram(&matGramInternal), useCholesky(useCholesky), lasso((lambda1 != 0)), lambda1(lambda1), elasticNet((lambda1 != 0) && (lambda2 != 0)), lambda2(lambda2), tolerance(tolerance) { /* Nothing left to do. */ } LARS::LARS(const bool useCholesky, const arma::mat& gramMatrix, const double lambda1, const double lambda2, const double tolerance) : matGram(&gramMatrix), useCholesky(useCholesky), lasso((lambda1 != 0)), lambda1(lambda1), elasticNet((lambda1 != 0) && (lambda2 != 0)), lambda2(lambda2), tolerance(tolerance) { /* Nothing left to do */ } void LARS::Train(const arma::mat& matX, const arma::vec& y, arma::vec& beta, const bool transposeData) { Timer::Start("lars_regression"); // Clear any previous solution information. betaPath.clear(); lambdaPath.clear(); activeSet.clear(); isActive.clear(); ignoreSet.clear(); isIgnored.clear(); matUtriCholFactor.reset(); // This matrix may end up holding the transpose -- if necessary. arma::mat dataTrans; // dataRef is row-major. const arma::mat& dataRef = (transposeData ? dataTrans : matX); if (transposeData) dataTrans = trans(matX); // Compute X' * y. arma::vec vecXTy = trans(dataRef) * y; // Set up active set variables. In the beginning, the active set has size 0 // (all dimensions are inactive). isActive.resize(dataRef.n_cols, false); // Set up ignores set variables. Initialized empty. isIgnored.resize(dataRef.n_cols, false); // Initialize yHat and beta. beta = arma::zeros(dataRef.n_cols); arma::vec yHat = arma::zeros(dataRef.n_rows); arma::vec yHatDirection(dataRef.n_rows); bool lassocond = false; // Compute the initial maximum correlation among all dimensions. arma::vec corr = vecXTy; double maxCorr = 0; size_t changeInd = 0; for (size_t i = 0; i < vecXTy.n_elem; ++i) { if (fabs(corr(i)) > maxCorr) { maxCorr = fabs(corr(i)); changeInd = i; } } betaPath.push_back(beta); lambdaPath.push_back(maxCorr); // If the maximum correlation is too small, there is no reason to continue. if (maxCorr < lambda1) { lambdaPath[0] = lambda1; Timer::Stop("lars_regression"); return; } // Compute the Gram matrix. If this is the elastic net problem, we will add // lambda2 * I_n to the matrix. if (matGram->n_elem != dataRef.n_cols * dataRef.n_cols) { // In this case, matGram should reference matGramInternal. matGramInternal = trans(dataRef) * dataRef; if (elasticNet && !useCholesky) matGramInternal += lambda2 * arma::eye(dataRef.n_cols, dataRef.n_cols); } // Main loop. while (((activeSet.size() + ignoreSet.size()) < dataRef.n_cols) && (maxCorr > tolerance)) { // Compute the maximum correlation among inactive dimensions. maxCorr = 0; for (size_t i = 0; i < dataRef.n_cols; i++) { if ((!isActive[i]) && (!isIgnored[i]) && (fabs(corr(i)) > maxCorr)) { maxCorr = fabs(corr(i)); changeInd = i; } } if (!lassocond) { if (useCholesky) { // vec newGramCol = vec(activeSet.size()); // for (size_t i = 0; i < activeSet.size(); i++) // { // newGramCol[i] = dot(matX.col(activeSet[i]), matX.col(changeInd)); // } // This is equivalent to the above 5 lines. arma::vec newGramCol = matGram->elem(changeInd * dataRef.n_cols + arma::conv_to::from(activeSet)); CholeskyInsert((*matGram)(changeInd, changeInd), newGramCol); } // Add variable to active set. Activate(changeInd); } // Compute signs of correlations. arma::vec s = arma::vec(activeSet.size()); for (size_t i = 0; i < activeSet.size(); i++) s(i) = corr(activeSet[i]) / fabs(corr(activeSet[i])); // Compute the "equiangular" direction in parameter space (betaDirection). // We use quotes because in the case of non-unit norm variables, this need // not be equiangular. arma::vec unnormalizedBetaDirection; double normalization; arma::vec betaDirection; if (useCholesky) { // Check for singularity. const double lastUtriElement = matUtriCholFactor( matUtriCholFactor.n_cols - 1, matUtriCholFactor.n_rows - 1); if (std::abs(lastUtriElement) > tolerance) { // Ok, no singularity. /** * Note that: * R^T R % S^T % S = (R % S)^T (R % S) * Now, for 1 the ones vector: * inv( (R % S)^T (R % S) ) 1 * = inv(R % S) inv((R % S)^T) 1 * = inv(R % S) Solve((R % S)^T, 1) * = inv(R % S) Solve(R^T, s) * = Solve(R % S, Solve(R^T, s) * = s % Solve(R, Solve(R^T, s)) */ unnormalizedBetaDirection = solve(trimatu(matUtriCholFactor), solve(trimatl(trans(matUtriCholFactor)), s)); normalization = 1.0 / sqrt(dot(s, unnormalizedBetaDirection)); betaDirection = normalization * unnormalizedBetaDirection; } else { // Singularity, so remove variable from active set, add to ignores set, // and look for new variable to add. Log::Warn << "Encountered singularity when adding variable " << changeInd << " to active set; permanently removing." << std::endl; Deactivate(activeSet.size() - 1); Ignore(changeInd); CholeskyDelete(matUtriCholFactor.n_rows - 1); continue; } } else { arma::mat matGramActive = arma::mat(activeSet.size(), activeSet.size()); for (size_t i = 0; i < activeSet.size(); i++) for (size_t j = 0; j < activeSet.size(); j++) matGramActive(i, j) = (*matGram)(activeSet[i], activeSet[j]); // Check for singularity. arma::mat matS = s * arma::ones(1, activeSet.size()); const bool solvedOk = solve(unnormalizedBetaDirection, matGramActive % trans(matS) % matS, arma::ones(activeSet.size(), 1)); if (solvedOk) { // Ok, no singularity. normalization = 1.0 / sqrt(sum(unnormalizedBetaDirection)); betaDirection = normalization * unnormalizedBetaDirection % s; } else { // Singularity, so remove variable from active set, add to ignores set, // and look for new variable to add. Deactivate(activeSet.size() - 1); Ignore(changeInd); Log::Warn << "Encountered singularity when adding variable " << changeInd << " to active set; permanently removing." << std::endl; continue; } } // compute "equiangular" direction in output space ComputeYHatDirection(dataRef, betaDirection, yHatDirection); double gamma = maxCorr / normalization; // If not all variables are active. if ((activeSet.size() + ignoreSet.size()) < dataRef.n_cols) { // Compute correlations with direction. for (size_t ind = 0; ind < dataRef.n_cols; ind++) { if (isActive[ind] || isIgnored[ind]) continue; double dirCorr = dot(dataRef.col(ind), yHatDirection); double val1 = (maxCorr - corr(ind)) / (normalization - dirCorr); double val2 = (maxCorr + corr(ind)) / (normalization + dirCorr); if ((val1 > 0) && (val1 < gamma)) gamma = val1; if ((val2 > 0) && (val2 < gamma)) gamma = val2; } } // Bound gamma according to LASSO. if (lasso) { lassocond = false; double lassoboundOnGamma = DBL_MAX; size_t activeIndToKickOut = -1; for (size_t i = 0; i < activeSet.size(); i++) { double val = -beta(activeSet[i]) / betaDirection(i); if ((val > 0) && (val < lassoboundOnGamma)) { lassoboundOnGamma = val; activeIndToKickOut = i; } } if (lassoboundOnGamma < gamma) { gamma = lassoboundOnGamma; lassocond = true; changeInd = activeIndToKickOut; } } // Update the prediction. yHat += gamma * yHatDirection; // Update the estimator. for (size_t i = 0; i < activeSet.size(); i++) { beta(activeSet[i]) += gamma * betaDirection(i); } // Sanity check to make sure the kicked out dimension is actually zero. if (lassocond) { if (beta(activeSet[changeInd]) != 0) beta(activeSet[changeInd]) = 0; } betaPath.push_back(beta); if (lassocond) { // Index is in position changeInd in activeSet. if (useCholesky) CholeskyDelete(changeInd); Deactivate(changeInd); } corr = vecXTy - trans(dataRef) * yHat; if (elasticNet) corr -= lambda2 * beta; double curLambda = 0; for (size_t i = 0; i < activeSet.size(); i++) curLambda += fabs(corr(activeSet[i])); curLambda /= ((double) activeSet.size()); lambdaPath.push_back(curLambda); // Time to stop for LASSO? if (lasso) { if (curLambda <= lambda1) { InterpolateBeta(); break; } } } // Unfortunate copy... beta = betaPath.back(); Timer::Stop("lars_regression"); } void LARS::Predict(const arma::mat& points, arma::vec& predictions, const bool rowMajor) const { // We really only need to store beta internally... if (rowMajor) predictions = points * betaPath.back(); else predictions = (betaPath.back().t() * points).t(); } // Private functions. void LARS::Deactivate(const size_t activeVarInd) { isActive[activeSet[activeVarInd]] = false; activeSet.erase(activeSet.begin() + activeVarInd); } void LARS::Activate(const size_t varInd) { isActive[varInd] = true; activeSet.push_back(varInd); } void LARS::Ignore(const size_t varInd) { isIgnored[varInd] = true; ignoreSet.push_back(varInd); } void LARS::ComputeYHatDirection(const arma::mat& matX, const arma::vec& betaDirection, arma::vec& yHatDirection) { yHatDirection.fill(0); for (size_t i = 0; i < activeSet.size(); i++) yHatDirection += betaDirection(i) * matX.col(activeSet[i]); } void LARS::InterpolateBeta() { int pathLength = betaPath.size(); // interpolate beta and stop double ultimateLambda = lambdaPath[pathLength - 1]; double penultimateLambda = lambdaPath[pathLength - 2]; double interp = (penultimateLambda - lambda1) / (penultimateLambda - ultimateLambda); betaPath[pathLength - 1] = (1 - interp) * (betaPath[pathLength - 2]) + interp * betaPath[pathLength - 1]; lambdaPath[pathLength - 1] = lambda1; } void LARS::CholeskyInsert(const arma::vec& newX, const arma::mat& X) { if (matUtriCholFactor.n_rows == 0) { matUtriCholFactor = arma::mat(1, 1); if (elasticNet) matUtriCholFactor(0, 0) = sqrt(dot(newX, newX) + lambda2); else matUtriCholFactor(0, 0) = norm(newX, 2); } else { arma::vec newGramCol = trans(X) * newX; CholeskyInsert(dot(newX, newX), newGramCol); } } void LARS::CholeskyInsert(double sqNormNewX, const arma::vec& newGramCol) { int n = matUtriCholFactor.n_rows; if (n == 0) { matUtriCholFactor = arma::mat(1, 1); if (elasticNet) matUtriCholFactor(0, 0) = sqrt(sqNormNewX + lambda2); else matUtriCholFactor(0, 0) = sqrt(sqNormNewX); } else { arma::mat matNewR = arma::mat(n + 1, n + 1); if (elasticNet) sqNormNewX += lambda2; arma::vec matUtriCholFactork = solve(trimatl(trans(matUtriCholFactor)), newGramCol); matNewR(arma::span(0, n - 1), arma::span(0, n - 1)) = matUtriCholFactor; matNewR(arma::span(0, n - 1), n) = matUtriCholFactork; matNewR(n, arma::span(0, n - 1)).fill(0.0); matNewR(n, n) = sqrt(sqNormNewX - dot(matUtriCholFactork, matUtriCholFactork)); matUtriCholFactor = matNewR; } } void LARS::GivensRotate(const arma::vec::fixed<2>& x, arma::vec::fixed<2>& rotatedX, arma::mat& matG) { if (x(1) == 0) { matG = arma::eye(2, 2); rotatedX = x; } else { double r = norm(x, 2); matG = arma::mat(2, 2); double scaledX1 = x(0) / r; double scaledX2 = x(1) / r; matG(0, 0) = scaledX1; matG(1, 0) = -scaledX2; matG(0, 1) = scaledX2; matG(1, 1) = scaledX1; rotatedX = arma::vec(2); rotatedX(0) = r; rotatedX(1) = 0; } } void LARS::CholeskyDelete(const size_t colToKill) { size_t n = matUtriCholFactor.n_rows; if (colToKill == (n - 1)) { matUtriCholFactor = matUtriCholFactor(arma::span(0, n - 2), arma::span(0, n - 2)); } else { matUtriCholFactor.shed_col(colToKill); // remove column colToKill n--; for (size_t k = colToKill; k < n; k++) { arma::mat matG; arma::vec::fixed<2> rotatedVec; GivensRotate(matUtriCholFactor(arma::span(k, k + 1), k), rotatedVec, matG); matUtriCholFactor(arma::span(k, k + 1), k) = rotatedVec; if (k < n - 1) { matUtriCholFactor(arma::span(k, k + 1), arma::span(k + 1, n - 1)) = matG * matUtriCholFactor(arma::span(k, k + 1), arma::span(k + 1, n - 1)); } } matUtriCholFactor.shed_row(n); } } mlpack-2.2.5/src/mlpack/methods/lars/lars.hpp000066400000000000000000000211111315013601400210710ustar00rootroot00000000000000/** * @file lars.hpp * @author Nishant Mehta (niche) * * Definition of the LARS class, which performs Least Angle Regression and the * LASSO. * * Only minor modifications of LARS are necessary to handle the constrained * version of the problem: * * \f[ * \min_{\beta} 0.5 || X \beta - y ||_2^2 + 0.5 \lambda_2 || \beta ||_2^2 * \f] * subject to \f$ ||\beta||_1 <= \tau \f$ * * Although this option currently is not implemented, it will be implemented * very soon. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_LARS_LARS_HPP #define MLPACK_METHODS_LARS_LARS_HPP #include namespace mlpack { namespace regression { // beta is the estimator // yHat is the prediction from the current estimator /** * An implementation of LARS, a stage-wise homotopy-based algorithm for * l1-regularized linear regression (LASSO) and l1+l2 regularized linear * regression (Elastic Net). * * Let \f$ X \f$ be a matrix where each row is a point and each column is a * dimension and let \f$ y \f$ be a vector of responses. * * The Elastic Net problem is to solve * * \f[ \min_{\beta} 0.5 || X \beta - y ||_2^2 + \lambda_1 || \beta ||_1 + * 0.5 \lambda_2 || \beta ||_2^2 \f] * * where \f$ \beta \f$ is the vector of regression coefficients. * * If \f$ \lambda_1 > 0 \f$ and \f$ \lambda_2 = 0 \f$, the problem is the LASSO. * If \f$ \lambda_1 > 0 \f$ and \f$ \lambda_2 > 0 \f$, the problem is the * elastic net. * If \f$ \lambda_1 = 0 \f$ and \f$ \lambda_2 > 0 \f$, the problem is ridge * regression. * If \f$ \lambda_1 = 0 \f$ and \f$ \lambda_2 = 0 \f$, the problem is * unregularized linear regression. * * Note: This algorithm is not recommended for use (in terms of efficiency) * when \f$ \lambda_1 \f$ = 0. * * For more details, see the following papers: * * @code * @article{efron2004least, * title={Least angle regression}, * author={Efron, B. and Hastie, T. and Johnstone, I. and Tibshirani, R.}, * journal={The Annals of statistics}, * volume={32}, * number={2}, * pages={407--499}, * year={2004}, * publisher={Institute of Mathematical Statistics} * } * @endcode * * @code * @article{zou2005regularization, * title={Regularization and variable selection via the elastic net}, * author={Zou, H. and Hastie, T.}, * journal={Journal of the Royal Statistical Society Series B}, * volume={67}, * number={2}, * pages={301--320}, * year={2005}, * publisher={Royal Statistical Society} * } * @endcode */ class LARS { public: /** * Set the parameters to LARS. Both lambda1 and lambda2 default to 0. * * @param useCholesky Whether or not to use Cholesky decomposition when * solving linear system (as opposed to using the full Gram matrix). * @param lambda1 Regularization parameter for l1-norm penalty. * @param lambda2 Regularization parameter for l2-norm penalty. * @param tolerance Run until the maximum correlation of elements in (X^T y) * is less than this. */ LARS(const bool useCholesky, const double lambda1 = 0.0, const double lambda2 = 0.0, const double tolerance = 1e-16); /** * Set the parameters to LARS, and pass in a precalculated Gram matrix. Both * lambda1 and lambda2 default to 0. * * @param useCholesky Whether or not to use Cholesky decomposition when * solving linear system (as opposed to using the full Gram matrix). * @param gramMatrix Gram matrix. * @param lambda1 Regularization parameter for l1-norm penalty. * @param lambda2 Regularization parameter for l2-norm penalty. * @param tolerance Run until the maximum correlation of elements in (X^T y) * is less than this. */ LARS(const bool useCholesky, const arma::mat& gramMatrix, const double lambda1 = 0.0, const double lambda2 = 0.0, const double tolerance = 1e-16); /** * Run LARS. The input matrix (like all mlpack matrices) should be * column-major -- each column is an observation and each row is a dimension. * However, because LARS is more efficient on a row-major matrix, this method * will (internally) transpose the matrix. If this transposition is not * necessary (i.e., you want to pass in a row-major matrix), pass 'false' for * the transposeData parameter. * * @param data Column-major input data (or row-major input data if rowMajor = * true). * @param responses A vector of targets. * @param beta Vector to store the solution (the coefficients) in. * @param transposeData Set to false if the data is row-major. */ void Train(const arma::mat& data, const arma::vec& responses, arma::vec& beta, const bool transposeData = true); /** * Predict y_i for each data point in the given data matrix, using the * currently-trained LARS model (so make sure you run Regress() first). If * the data matrix is row-major (as opposed to the usual column-major format * for mlpack matrices), set rowMajor = true to avoid an extra transpose. * * @param points The data points to regress on. * @param predictions y, which will contained calculated values on completion. */ void Predict(const arma::mat& points, arma::vec& predictions, const bool rowMajor = false) const; //! Access the set of active dimensions. const std::vector& ActiveSet() const { return activeSet; } //! Access the set of coefficients after each iteration; the solution is the //! last element. const std::vector& BetaPath() const { return betaPath; } //! Access the set of values for lambda1 after each iteration; the solution is //! the last element. const std::vector& LambdaPath() const { return lambdaPath; } //! Access the upper triangular cholesky factor. const arma::mat& MatUtriCholFactor() const { return matUtriCholFactor; } /** * Serialize the LARS model. */ template void Serialize(Archive& ar, const unsigned int /* version */); private: //! Gram matrix. arma::mat matGramInternal; //! Pointer to the Gram matrix we will use. const arma::mat* matGram; //! Upper triangular cholesky factor; initially 0x0 matrix. arma::mat matUtriCholFactor; //! Whether or not to use Cholesky decomposition when solving linear system. bool useCholesky; //! True if this is the LASSO problem. bool lasso; //! Regularization parameter for l1 penalty. double lambda1; //! True if this is the elastic net problem. bool elasticNet; //! Regularization parameter for l2 penalty. double lambda2; //! Tolerance for main loop. double tolerance; //! Solution path. std::vector betaPath; //! Value of lambda_1 for each solution in solution path. std::vector lambdaPath; //! Active set of dimensions. std::vector activeSet; //! Active set membership indicator (for each dimension). std::vector isActive; // Set of variables that are ignored (if any). //! Set of ignored variables (for dimensions in span{active set dimensions}). std::vector ignoreSet; //! Membership indicator for set of ignored variables. std::vector isIgnored; /** * Remove activeVarInd'th element from active set. * * @param activeVarInd Index of element to remove from active set. */ void Deactivate(const size_t activeVarInd); /** * Add dimension varInd to active set. * * @param varInd Dimension to add to active set. */ void Activate(const size_t varInd); /** * Add dimension varInd to ignores set (never removed). * * @param varInd Dimension to add to ignores set. */ void Ignore(const size_t varInd); // compute "equiangular" direction in output space void ComputeYHatDirection(const arma::mat& matX, const arma::vec& betaDirection, arma::vec& yHatDirection); // interpolate to compute last solution vector void InterpolateBeta(); void CholeskyInsert(const arma::vec& newX, const arma::mat& X); void CholeskyInsert(double sqNormNewX, const arma::vec& newGramCol); void GivensRotate(const arma::vec::fixed<2>& x, arma::vec::fixed<2>& rotatedX, arma::mat& G); void CholeskyDelete(const size_t colToKill); }; } // namespace regression } // namespace mlpack // Include implementation of Serialize(). #include "lars_impl.hpp" #endif mlpack-2.2.5/src/mlpack/methods/lars/lars_impl.hpp000066400000000000000000000031371315013601400221220ustar00rootroot00000000000000/** * @file lars_impl.hpp * @author Ryan Curtin * * Implementation of templated LARS functions. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_LARS_LARS_IMPL_HPP #define MLPACK_METHODS_LARS_LARS_IMPL_HPP //! In case it hasn't been included yet. #include "lars.hpp" namespace mlpack { namespace regression { /** * Serialize the LARS model. */ template void LARS::Serialize(Archive& ar, const unsigned int /* version */) { using data::CreateNVP; // If we're loading, we have to use the internal storage. if (Archive::is_loading::value) { matGram = &matGramInternal; ar & CreateNVP(matGramInternal, "matGramInternal"); } else { ar & CreateNVP(const_cast(*matGram), "matGramInternal"); } ar & CreateNVP(matUtriCholFactor, "matUtriCholFactor"); ar & CreateNVP(useCholesky, "useCholesky"); ar & CreateNVP(lasso, "lasso"); ar & CreateNVP(lambda1, "lambda1"); ar & CreateNVP(elasticNet, "elasticNet"); ar & CreateNVP(lambda2, "lambda2"); ar & CreateNVP(tolerance, "tolerance"); ar & CreateNVP(betaPath, "betaPath"); ar & CreateNVP(lambdaPath, "lambdaPath"); ar & CreateNVP(activeSet, "activeSet"); ar & CreateNVP(isActive, "isActive"); ar & CreateNVP(ignoreSet, "ignoreSet"); ar & CreateNVP(isIgnored, "isIgnored"); } } // namespace regression } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/methods/lars/lars_main.cpp000066400000000000000000000212371315013601400221010ustar00rootroot00000000000000/** * @file lars_main.cpp * @author Nishant Mehta * * Executable for LARS. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include #include #include #include #include "lars.hpp" PROGRAM_INFO("LARS", "An implementation of LARS: Least Angle Regression " "(Stagewise/laSso). This is a stage-wise homotopy-based algorithm for " "L1-regularized linear regression (LASSO) and L1+L2-regularized linear " "regression (Elastic Net).\n" "\n" "This program is able to train a LARS/LASSO/Elastic Net model or load a " "model from file, output regression predictions for a test set, and save " "the trained model to a file. The LARS algorithm is described in more " "detail below:\n" "\n" "Let X be a matrix where each row is a point and each column is a " "dimension, and let y be a vector of targets.\n" "\n" "The Elastic Net problem is to solve\n\n" " min_beta 0.5 || X * beta - y ||_2^2 + lambda_1 ||beta||_1 +\n" " 0.5 lambda_2 ||beta||_2^2\n\n" "If --lambda1 > 0 and --lambda2 = 0, the problem is the LASSO.\n" "If --lambda1 > 0 and --lambda2 > 0, the problem is the Elastic Net.\n" "If --lambda1 = 0 and --lambda2 > 0, the problem is ridge regression.\n" "If --lambda1 = 0 and --lambda2 = 0, the problem is unregularized linear " "regression.\n" "\n" "For efficiency reasons, it is not recommended to use this algorithm with " "--lambda_1 = 0. In that case, use the 'linear_regression' program, which " "implements both unregularized linear regression and ridge regression.\n" "\n" "To train a LARS/LASSO/Elastic Net model, the --input_file and " "--responses_file parameters must be given. The --lambda1 --lambda2, and " "--use_cholesky arguments control the training parameters. A trained model" " can be saved with the --output_model_file, or, if training is not desired" " at all, a model can be loaded with --input_model_file. Any output " "predictions from a test file can be saved into the file specified by the " "--output_predictions option."); PARAM_STRING_IN("input_file", "File containing covariates (X).", "i", ""); PARAM_STRING_IN("responses_file", "File containing y (responses/observations).", "r", ""); PARAM_STRING_IN("input_model_file", "File to load model from.", "m", ""); PARAM_STRING_OUT("output_model_file", "File to save model to.", "M"); PARAM_STRING_IN("test_file", "File containing points to regress on (test " "points).", "t", ""); // Kept for reverse compatibility until mlpack 3.0.0. PARAM_STRING_OUT("output_predictions", "If --test_file is specified, this file " "is where the predicted responses will be saved.", ""); // This is the future name of the parameter. PARAM_STRING_OUT("output_predictions_file", "If --test_file is specified, this " "file is where the predicted responses will be saved.", "o"); PARAM_DOUBLE_IN("lambda1", "Regularization parameter for l1-norm penalty.", "l", 0); PARAM_DOUBLE_IN("lambda2", "Regularization parameter for l2-norm penalty.", "L", 0); PARAM_FLAG("use_cholesky", "Use Cholesky decomposition during computation " "rather than explicitly computing the full Gram matrix.", "c"); using namespace arma; using namespace std; using namespace mlpack; using namespace mlpack::regression; int main(int argc, char* argv[]) { // Handle parameters, CLI::ParseCommandLine(argc, argv); double lambda1 = CLI::GetParam("lambda1"); double lambda2 = CLI::GetParam("lambda2"); bool useCholesky = CLI::HasParam("use_cholesky"); // Reverse compatibility. We can remove these for mlpack 3.0.0. if (CLI::HasParam("output_predictions") && CLI::HasParam("output_predictions_file")) Log::Fatal << "Cannot specify both --output_predictions and " << "--output_predictions_file!" << endl; if (CLI::HasParam("output_predictions")) { Log::Warn << "--output_predictions is deprecated and will be removed in " << "mlpack 3.0.0; use --output_predictions_file instead." << endl; CLI::GetParam("output_predictions_file") = CLI::GetParam("output_predictions"); } // Check parameters -- make sure everything given makes sense. These checks // can be simplified to HasParam() after the reverse compatibility options are // removed. if (CLI::HasParam("input_file") && !CLI::HasParam("responses_file")) Log::Fatal << "--input_file (-i) is specified, but --responses_file (-r) is" << " not!" << endl; if (CLI::HasParam("responses_file") && !CLI::HasParam("input_file")) Log::Fatal << "--responses_file (-r) is specified, but --input_file (-i) is" << " not!" << endl; if (!CLI::HasParam("input_file") && !CLI::HasParam("input_model_file")) Log::Fatal << "No input data specified (with --input_file (-i) and " << "--responses_file (-r)), and no input model specified (with " << "--input_model_file (-m))!" << endl; if (CLI::HasParam("input_file") && CLI::HasParam("input_model_file")) Log::Fatal << "Both --input_file (-i) and --input_model_file (-m) are " << "specified, but only one may be specified!" << endl; if ((CLI::GetParam("output_predictions_file") == "") && !CLI::HasParam("output_model_file")) Log::Warn << "--output_predictions_file (-o) and --output_model_file (-M) " << "are not specified; no results will be saved!" << endl; if ((CLI::GetParam("output_predictions_file") == "") && !CLI::HasParam("test_file")) Log::Warn << "--output_predictions_file (-o) specified, but --test_file " << "(-t) is not; no results will be saved." << endl; if (CLI::HasParam("test_file") && (CLI::GetParam("output_predictions_file") == "")) Log::Warn << "--test_file (-t) specified, but --output_predictions_file " << "(-o) is not; no results will be saved." << endl; // Initialize the object. LARS lars(useCholesky, lambda1, lambda2); if (CLI::HasParam("input_file")) { // Load covariates. We can avoid LARS transposing our data by choosing to // not transpose this data. const string inputFile = CLI::GetParam("input_file"); mat matX; data::Load(inputFile, matX, true, false); // Load responses. The responses should be a one-dimensional vector, and it // seems more likely that these will be stored with one response per line // (one per row). So we should not transpose upon loading. const string responsesFile = CLI::GetParam("responses_file"); mat matY; // Will be a vector. data::Load(responsesFile, matY, true, false); // Make sure y is oriented the right way. if (matY.n_rows == 1) matY = trans(matY); if (matY.n_cols > 1) Log::Fatal << "Only one column or row allowed in responses file!" << endl; if (matY.n_elem != matX.n_rows) Log::Fatal << "Number of responses must be equal to number of rows of X!" << endl; vec beta; lars.Train(matX, matY.unsafe_col(0), beta, false /* do not transpose */); } else // We must have --input_model_file. { const string inputModelFile = CLI::GetParam("input_model_file"); data::Load(inputModelFile, "lars_model", lars, true); } if (CLI::HasParam("test_file")) { Log::Info << "Regressing on test points." << endl; const string testFile = CLI::GetParam("test_file"); // Load test points. mat testPoints; data::Load(testFile, testPoints, true, false); // Make sure the dimensionality is right. We haven't transposed, so, we // check n_cols not n_rows. if (testPoints.n_cols != lars.BetaPath().back().n_elem) Log::Fatal << "Dimensionality of test set (" << testPoints.n_cols << ") " << "is not equal to the dimensionality of the model (" << lars.BetaPath().back().n_elem << ")!" << endl; arma::vec predictions; lars.Predict(testPoints.t(), predictions, false); // Save test predictions. One per line, so, don't transpose on save. if (CLI::GetParam("output_predictions_file") != "") { const string outputPredictionsFile = CLI::GetParam("output_predictions_file"); data::Save(outputPredictionsFile, predictions, true, false); } } if (CLI::HasParam("output_model_file")) { const string outputModelFile = CLI::GetParam("output_model_file"); data::Save(outputModelFile, "lars_model", lars, true); } } mlpack-2.2.5/src/mlpack/methods/linear_regression/000077500000000000000000000000001315013601400221745ustar00rootroot00000000000000mlpack-2.2.5/src/mlpack/methods/linear_regression/CMakeLists.txt000066400000000000000000000010501315013601400247300ustar00rootroot00000000000000# Define the files we need to compile # Anything not in this list will not be compiled into the output library # Do not include test programs here set(SOURCES linear_regression.hpp linear_regression.cpp ) # add directory name to sources set(DIR_SRCS) foreach(file ${SOURCES}) set(DIR_SRCS ${DIR_SRCS} ${CMAKE_CURRENT_SOURCE_DIR}/${file}) endforeach() # append sources (with directory name) to list of all mlpack sources (used at # the parent scope) set(MLPACK_SRCS ${MLPACK_SRCS} ${DIR_SRCS} PARENT_SCOPE) add_cli_executable(linear_regression) mlpack-2.2.5/src/mlpack/methods/linear_regression/linear_regression.cpp000066400000000000000000000123621315013601400264160ustar00rootroot00000000000000/** * @file linear_regression.cpp * @author James Cline * @author Michael Fox * * Implementation of simple linear regression. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include "linear_regression.hpp" #include using namespace mlpack; using namespace mlpack::regression; LinearRegression::LinearRegression(const arma::mat& predictors, const arma::vec& responses, const double lambda, const bool intercept, const arma::vec& weights) : lambda(lambda), intercept(intercept) { Train(predictors, responses, intercept, weights); } LinearRegression::LinearRegression(const LinearRegression& linearRegression) : parameters(linearRegression.parameters), lambda(linearRegression.lambda) { /* Nothing to do. */ } void LinearRegression::Train(const arma::mat& predictors, const arma::vec& responses, const bool intercept, const arma::vec& weights) { this->intercept = intercept; /* * We want to calculate the a_i coefficients of: * \sum_{i=0}^n (a_i * x_i^i) * In order to get the intercept value, we will add a row of ones. */ // We store the number of rows and columns of the predictors. // Reminder: Armadillo stores the data transposed from how we think of it, // that is, columns are actually rows (see: column major order). const size_t nCols = predictors.n_cols; arma::mat p = predictors; arma::vec r = responses; // Here we add the row of ones to the predictors. // The intercept is not penalized. Add an "all ones" row to design and set // intercept = false to get a penalized intercept. if (intercept) { p.insert_rows(0, arma::ones(1,nCols)); } if (weights.n_elem > 0) { p = p * diagmat(sqrt(weights)); r = sqrt(weights) % responses; } if (lambda != 0.0) { // Add the identity matrix to the predictors (this is equivalent to ridge // regression). See http://math.stackexchange.com/questions/299481/ for // more information. p.insert_cols(nCols, predictors.n_rows); p.submat(p.n_rows - predictors.n_rows, nCols, p.n_rows - 1, nCols + predictors.n_rows - 1) = sqrt(lambda) * arma::eye(predictors.n_rows, predictors.n_rows); } // We compute the QR decomposition of the predictors. // We transpose the predictors because they are in column major order. arma::mat Q, R; arma::qr(Q, R, arma::trans(p)); // We compute the parameters, B, like so: // R * B = Q^T * responses // B = Q^T * responses * R^-1 // If lambda > 0, then we must add a bunch of empty responses. if (lambda == 0.0) { arma::solve(parameters, R, arma::trans(Q) * r); } else { // Copy responses into larger vector. r.insert_rows(nCols,p.n_cols - nCols); arma::solve(parameters, R, arma::trans(Q) * r); } } void LinearRegression::Predict(const arma::mat& points, arma::vec& predictions) const { if (intercept) { // We want to be sure we have the correct number of dimensions in the // dataset. Log::Assert(points.n_rows == parameters.n_rows - 1); // Get the predictions, but this ignores the intercept value // (parameters[0]). predictions = arma::trans(arma::trans(parameters.subvec(1, parameters.n_elem - 1)) * points); // Now add the intercept. predictions += parameters(0); } else { // We want to be sure we have the correct number of dimensions in the dataset. Log::Assert(points.n_rows == parameters.n_rows); predictions = arma::trans(arma::trans(parameters) * points); } } //! Compute the L2 squared error on the given predictors and responses. double LinearRegression::ComputeError(const arma::mat& predictors, const arma::vec& responses) const { // Get the number of columns and rows of the dataset. const size_t nCols = predictors.n_cols; const size_t nRows = predictors.n_rows; // Calculate the differences between actual responses and predicted responses. // We must also add the intercept (parameters(0)) to the predictions. arma::vec temp; if (intercept) { // Ensure that we have the correct number of dimensions in the dataset. if (nRows != parameters.n_rows - 1) { Log::Fatal << "The test data must have the same number of columns as the " "training file." << std::endl; } temp = responses - arma::trans( (arma::trans(parameters.subvec(1, parameters.n_elem - 1)) * predictors) + parameters(0)); } else { // Ensure that we have the correct number of dimensions in the dataset. if (nRows != parameters.n_rows) { Log::Fatal << "The test data must have the same number of columns as the " "training file." << std::endl; } temp = responses - arma::trans((arma::trans(parameters) * predictors)); } const double cost = arma::dot(temp, temp) / nCols; return cost; } mlpack-2.2.5/src/mlpack/methods/linear_regression/linear_regression.hpp000066400000000000000000000114521315013601400264220ustar00rootroot00000000000000/** * @file linear_regression.hpp * @author James Cline * @author Michael Fox * * Simple least-squares linear regression. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_LINEAR_REGRESSION_LINEAR_REGRESSION_HPP #define MLPACK_METHODS_LINEAR_REGRESSION_LINEAR_REGRESSION_HPP #include namespace mlpack { namespace regression /** Regression methods. */ { /** * A simple linear regression algorithm using ordinary least squares. * Optionally, this class can perform ridge regression, if the lambda parameter * is set to a number greater than zero. */ class LinearRegression { public: /** * Creates the model. * * @param predictors X, matrix of data points to create B with. * @param responses y, the measured data for each point in X. * @param lambda Regularization constant for ridge regression. * @param intercept Whether or not to include an intercept term. * @param weights Observation weights (for boosting). */ LinearRegression(const arma::mat& predictors, const arma::vec& responses, const double lambda = 0, const bool intercept = true, const arma::vec& weights = arma::vec()); /** * Copy constructor. * * @param linearRegression the other instance to copy parameters from. */ LinearRegression(const LinearRegression& linearRegression); /** * Empty constructor. This gives a non-working model, so make sure Train() is * called (or make sure the model parameters are set) before calling * Predict()! */ LinearRegression() : lambda(0.0), intercept(true) { } /** * Train the LinearRegression model on the given data. Careful! This will * completely ignore and overwrite the existing model. This particular * implementation does not have an incremental training algorithm. To set the * regularization parameter lambda, call Lambda() or set a different value in * the constructor. * * @param predictors X, the matrix of data points to train the model on. * @param responses y, the vector of responses to each data point. * @param intercept Whether or not to fit an intercept term. * @param weights Observation weights (for boosting). */ void Train(const arma::mat& predictors, const arma::vec& responses, const bool intercept = true, const arma::vec& weights = arma::vec()); /** * Calculate y_i for each data point in points. * * @param points the data points to calculate with. * @param predictions y, will contain calculated values on completion. */ void Predict(const arma::mat& points, arma::vec& predictions) const; /** * Calculate the L2 squared error on the given predictors and responses using * this linear regression model. This calculation returns * * \f[ * (1 / n) * \| y - X B \|^2_2 * \f] * * where \f$ y \f$ is the responses vector, \f$ X \f$ is the matrix of * predictors, and \f$ B \f$ is the parameters of the trained linear * regression model. * * As this number decreases to 0, the linear regression fit is better. * * @param points Matrix of predictors (X). * @param responses Vector of responses (y). */ double ComputeError(const arma::mat& points, const arma::vec& responses) const; //! Return the parameters (the b vector). const arma::vec& Parameters() const { return parameters; } //! Modify the parameters (the b vector). arma::vec& Parameters() { return parameters; } //! Return the Tikhonov regularization parameter for ridge regression. double Lambda() const { return lambda; } //! Modify the Tikhonov regularization parameter for ridge regression. double& Lambda() { return lambda; } //! Return whether or not an intercept term is used in the model. bool Intercept() const { return intercept; } /** * Serialize the model. */ template void Serialize(Archive& ar, const unsigned int /* version */) { ar & data::CreateNVP(parameters, "parameters"); ar & data::CreateNVP(lambda, "lambda"); ar & data::CreateNVP(intercept, "intercept"); } private: /** * The calculated B. * Initialized and filled by constructor to hold the least squares solution. */ arma::vec parameters; /** * The Tikhonov regularization parameter for ridge regression (0 for linear * regression). */ double lambda; //! Indicates whether first parameter is intercept. bool intercept; }; } // namespace linear_regression } // namespace mlpack #endif // MLPACK_METHODS_LINEAR_REGRESSION_HPP mlpack-2.2.5/src/mlpack/methods/linear_regression/linear_regression_main.cpp000066400000000000000000000203321315013601400274160ustar00rootroot00000000000000/** * @file linear_regression_main.cpp * @author James Cline * * Main function for least-squares linear regression. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include #include #include #include #include "linear_regression.hpp" PROGRAM_INFO("Simple Linear Regression and Prediction", "An implementation of simple linear regression and simple ridge regression " "using ordinary least squares. This solves the problem\n\n" " y = X * b + e\n\n" "where X (--input_file) and y (the last column of --input_file, or " "--input_responses) are known and b is the desired variable. If the " "covariance matrix (X'X) is not invertible, or if the solution is " "overdetermined, then specify a Tikhonov regularization constant (--lambda)" " greater than 0, which will regularize the covariance matrix to make it " "invertible. The calculated b is saved to disk (--output_file).\n" "\n" "Optionally, the calculated value of b is used to predict the responses for" " another matrix X' (--test_file):\n\n" " y' = X' * b\n\n" "and these predicted responses, y', are saved to a file " "(--output_predictions). This type of regression is related to " "least-angle regression, which mlpack implements with the 'lars' " "executable."); PARAM_STRING_IN("training_file", "File containing training set X (regressors).", "t", ""); PARAM_STRING_IN("training_responses", "Optional file containing y (responses). " "If not given, the responses are assumed to be the last row of the input " "file.", "r", ""); PARAM_STRING_IN("input_model_file", "File containing existing model " "(parameters).", "m", ""); PARAM_STRING_OUT("output_model_file", "File to save trained model to.", "M"); PARAM_STRING_IN("test_file", "File containing X' (test regressors).", "T", ""); // Keep for reverse compatibility. We can remove these for mlpack 3.0.0. PARAM_STRING_OUT("output_predictions", "If --test_file is specified, this file " "is where the predicted responses will be saved.", "p"); // This is the future name of the parameter. PARAM_STRING_OUT("output_predictions_file", "If --test_file is specified, this " "file is where the predicted responses will be saved.", "o"); PARAM_DOUBLE_IN("lambda", "Tikhonov regularization for ridge regression. If 0," " the method reduces to linear regression.", "l", 0.0); using namespace mlpack; using namespace mlpack::regression; using namespace arma; using namespace std; int main(int argc, char* argv[]) { // Handle parameters. CLI::ParseCommandLine(argc, argv); // Reverse compatibility. We can remove these for mlpack 3.0.0. if (CLI::HasParam("output_predictions") && CLI::HasParam("output_predictions_file")) Log::Fatal << "Cannot specify both --output_predictions and " << "--output_predictions_file!" << endl; if (CLI::HasParam("output_predictions")) { Log::Warn << "--output_predictions (-p) is deprecated and will be removed " << "in mlpack 3.0.0; use --output_predictions_file (-o) instead." << endl; CLI::GetParam("output_predictions_file") = CLI::GetParam("output_predictions"); } const string inputModelFile = CLI::GetParam("input_model_file"); const string outputModelFile = CLI::GetParam("output_model_file"); const string outputPredictionsFile = CLI::GetParam("output_predictions_file"); const string trainingResponsesFile = CLI::GetParam("training_responses"); const string testFile = CLI::GetParam("test_file"); const string trainFile = CLI::GetParam("training_file"); const double lambda = CLI::GetParam("lambda"); if (testFile == "" && outputPredictionsFile != "") Log::Warn << "--output_predictions_file (-o) ignored because --test_file " << "(-T) is not specified." << endl; mat regressors; mat responses; LinearRegression lr; lr.Lambda() = lambda; bool computeModel = false; // We want to determine if an input file XOR model file were given. if (!CLI::HasParam("training_file")) { if (!CLI::HasParam("input_model_file")) Log::Fatal << "You must specify either --input_file or --model_file." << endl; else // The model file was specified, no problems. computeModel = false; } // The user specified an input file but no model file, no problems. else if (!CLI::HasParam("input_model_file")) computeModel = true; // The user specified both an input file and model file. // This is ambiguous -- which model should we use? A generated one or given // one? Report error and exit. else { Log::Fatal << "You must specify either --input_file or --model_file, not " << "both." << endl; } if (CLI::HasParam("test_file") && (CLI::GetParam("output_predictions_file") == "")) Log::Warn << "--test_file (-t) specified, but --output_predictions_file " << "(-o) is not; no results will be saved." << endl; // If they specified a model file, we also need a test file or we // have nothing to do. if (!computeModel && !CLI::HasParam("test_file")) { Log::Fatal << "When specifying --model_file, you must also specify " << "--test_file." << endl; } if (!computeModel && CLI::HasParam("lambda")) { Log::Warn << "--lambda ignored because no model is being trained." << endl; } if (outputModelFile == "" && outputPredictionsFile == "") { Log::Warn << "Neither --output_model_file nor --output_predictions_file are " << "specified; no output will be saved!" << endl; } // An input file was given and we need to generate the model. if (computeModel) { Timer::Start("load_regressors"); data::Load(trainFile, regressors, true); Timer::Stop("load_regressors"); // Are the responses in a separate file? if (!CLI::HasParam("training_responses")) { // The initial predictors for y, Nx1. responses = trans(regressors.row(regressors.n_rows - 1)); regressors.shed_row(regressors.n_rows - 1); } else { // The initial predictors for y, Nx1. Timer::Start("load_responses"); data::Load(trainingResponsesFile, responses, true); Timer::Stop("load_responses"); if (responses.n_rows == 1) responses = trans(responses); // Probably loaded backwards. if (responses.n_cols > 1) Log::Fatal << "The responses must have one column.\n"; if (responses.n_rows != regressors.n_cols) Log::Fatal << "The responses must have the same number of rows as the " "training file.\n"; } Timer::Start("regression"); lr = LinearRegression(regressors, responses.unsafe_col(0)); Timer::Stop("regression"); // Save the parameters. if (CLI::HasParam("output_model_file")) data::Save(outputModelFile, "linearRegressionModel", lr); } // Did we want to predict, too? if (CLI::HasParam("test_file")) { // A model file was passed in, so load it. if (!computeModel) { Timer::Start("load_model"); data::Load(inputModelFile, "linearRegressionModel", lr, true); Timer::Stop("load_model"); } // Load the test file data. arma::mat points; Timer::Start("load_test_points"); data::Load(testFile, points, true); Timer::Stop("load_test_points"); // Ensure that test file data has the right number of features. if ((lr.Parameters().n_elem - 1) != points.n_rows) { Log::Fatal << "The model was trained on " << lr.Parameters().n_elem - 1 << "-dimensional data, but the test points in '" << testFile << "' are " << points.n_rows << "-dimensional!" << endl; } // Perform the predictions using our model. arma::vec predictions; Timer::Start("prediction"); lr.Predict(points, predictions); Timer::Stop("prediction"); // Save predictions. if (outputPredictionsFile != "") data::Save(outputPredictionsFile, predictions, true, false); } } mlpack-2.2.5/src/mlpack/methods/local_coordinate_coding/000077500000000000000000000000001315013601400233065ustar00rootroot00000000000000mlpack-2.2.5/src/mlpack/methods/local_coordinate_coding/CMakeLists.txt000066400000000000000000000013001315013601400260400ustar00rootroot00000000000000# Define the files we need to compile # Anything not in this list will not be compiled into the output library # Do not include test programs here # In this library, these are specified twice, once here, and once for the individual library it belongs to, so make sure # that you have files in both sections set(SOURCES lcc.hpp lcc.cpp lcc_impl.hpp ) # add directory name to sources set(DIR_SRCS) foreach(file ${SOURCES}) set(DIR_SRCS ${DIR_SRCS} ${CMAKE_CURRENT_SOURCE_DIR}/${file}) endforeach() # append sources (with directory name) to list of all mlpack sources (used at the parent scope) set(MLPACK_SRCS ${MLPACK_SRCS} ${DIR_SRCS} PARENT_SCOPE) add_cli_executable(local_coordinate_coding) mlpack-2.2.5/src/mlpack/methods/local_coordinate_coding/lcc.cpp000066400000000000000000000175711315013601400245660ustar00rootroot00000000000000/** * @file lcc.cpp * @author Nishant Mehta * * Implementation of Local Coordinate Coding. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include "lcc.hpp" #include namespace mlpack { namespace lcc { LocalCoordinateCoding::LocalCoordinateCoding( const size_t atoms, const double lambda, const size_t maxIterations, const double tolerance) : atoms(atoms), lambda(lambda), maxIterations(maxIterations), tolerance(tolerance) { // Nothing to do. } void LocalCoordinateCoding::Encode(const arma::mat& data, arma::mat& codes) { arma::mat invSqDists = 1.0 / (repmat(trans(sum(square(dictionary))), 1, data.n_cols) + repmat(sum(square(data)), atoms, 1) - 2 * trans(dictionary) * data); arma::mat dictGram = trans(dictionary) * dictionary; arma::mat dictGramTD(dictGram.n_rows, dictGram.n_cols); codes.set_size(atoms, data.n_cols); for (size_t i = 0; i < data.n_cols; i++) { // Report progress. if ((i % 100) == 0) { Log::Debug << "Optimization at point " << i << "." << std::endl; } arma::vec invW = invSqDists.unsafe_col(i); arma::mat dictPrime = dictionary * diagmat(invW); arma::mat dictGramTD = diagmat(invW) * dictGram * diagmat(invW); bool useCholesky = false; regression::LARS lars(useCholesky, dictGramTD, 0.5 * lambda); // Run LARS for this point, by making an alias of the point and passing // that. arma::vec beta = codes.unsafe_col(i); lars.Train(dictPrime, data.unsafe_col(i), beta, false); beta %= invW; // Remember, beta is an alias of codes.col(i). } } void LocalCoordinateCoding::OptimizeDictionary(const arma::mat& data, const arma::mat& codes, const arma::uvec& adjacencies) { // Count number of atomic neighbors for each point x^i. arma::uvec neighborCounts = arma::zeros(data.n_cols, 1); if (adjacencies.n_elem > 0) { // This gets the column index. Intentional integer division. size_t curPointInd = (size_t) (adjacencies(0) / atoms); ++neighborCounts(curPointInd); size_t nextColIndex = (curPointInd + 1) * atoms; for (size_t l = 1; l < adjacencies.n_elem; l++) { // If l no longer refers to an element in this column, advance the column // number accordingly. if (adjacencies(l) >= nextColIndex) { curPointInd = (size_t) (adjacencies(l) / atoms); nextColIndex = (curPointInd + 1) * atoms; } ++neighborCounts(curPointInd); } } // Build dataPrime := [X x^1 ... x^1 ... x^n ... x^n] // where each x^i is repeated for the number of neighbors x^i has. arma::mat dataPrime = arma::zeros(data.n_rows, data.n_cols + adjacencies.n_elem); dataPrime(arma::span::all, arma::span(0, data.n_cols - 1)) = data; size_t curCol = data.n_cols; for (size_t i = 0; i < data.n_cols; i++) { if (neighborCounts(i) > 0) { dataPrime(arma::span::all, arma::span(curCol, curCol + neighborCounts(i) - 1)) = repmat(data.col(i), 1, neighborCounts(i)); } curCol += neighborCounts(i); } // Handle the case of inactive atoms (atoms not used in the given coding). std::vector inactiveAtoms; for (size_t j = 0; j < atoms; ++j) if (accu(codes.row(j) != 0) == 0) inactiveAtoms.push_back(j); const size_t nInactiveAtoms = inactiveAtoms.size(); const size_t nActiveAtoms = atoms - nInactiveAtoms; // Efficient construction of codes restricted to active atoms. arma::mat codesPrime = arma::zeros(nActiveAtoms, data.n_cols + adjacencies.n_elem); arma::vec wSquared = arma::ones(data.n_cols + adjacencies.n_elem, 1); if (nInactiveAtoms > 0) { Log::Warn << "There are " << nInactiveAtoms << " inactive atoms. They will be re-initialized randomly.\n"; // Create matrix holding only active codes. arma::mat activeCodes; math::RemoveRows(codes, inactiveAtoms, activeCodes); // Create reverse atom lookup for active atoms. arma::uvec atomReverseLookup(atoms); size_t inactiveOffset = 0; for (size_t i = 0; i < atoms; ++i) { if (inactiveAtoms[inactiveOffset] == i) ++inactiveOffset; else atomReverseLookup(i - inactiveOffset) = i; } codesPrime(arma::span::all, arma::span(0, data.n_cols - 1)) = activeCodes; // Fill the rest of codesPrime. for (size_t l = 0; l < adjacencies.n_elem; ++l) { // Recover the location in the codes matrix that this adjacency refers to. size_t atomInd = adjacencies(l) % atoms; size_t pointInd = (size_t) (adjacencies(l) / atoms); // Fill matrix. codesPrime(atomReverseLookup(atomInd), data.n_cols + l) = 1.0; wSquared(data.n_cols + l) = codes(atomInd, pointInd); } } else { // All atoms are active. codesPrime(arma::span::all, arma::span(0, data.n_cols - 1)) = codes; for (size_t l = 0; l < adjacencies.n_elem; ++l) { // Recover the location in the codes matrix that this adjacency refers to. size_t atomInd = adjacencies(l) % atoms; size_t pointInd = (size_t) (adjacencies(l) / atoms); // Fill matrix. codesPrime(atomInd, data.n_cols + l) = 1.0; wSquared(data.n_cols + l) = codes(atomInd, pointInd); } } wSquared.subvec(data.n_cols, wSquared.n_elem - 1) = lambda * abs(wSquared.subvec(data.n_cols, wSquared.n_elem - 1)); // Solve system. if (nInactiveAtoms == 0) { // No inactive atoms. We can solve directly. arma::mat A = codesPrime * diagmat(wSquared) * trans(codesPrime); arma::mat B = codesPrime * diagmat(wSquared) * trans(dataPrime); dictionary = trans(solve(A, B)); /* dictionary = trans(solve(codesPrime * diagmat(wSquared) * trans(codesPrime), codesPrime * diagmat(wSquared) * trans(dataPrime))); */ } else { // Inactive atoms must be reinitialized randomly, so we cannot solve // directly for the entire dictionary estimate. arma::mat dictionaryActive = trans(solve(codesPrime * diagmat(wSquared) * trans(codesPrime), codesPrime * diagmat(wSquared) * trans(dataPrime))); // Update all atoms. size_t currentInactiveIndex = 0; for (size_t i = 0; i < atoms; ++i) { if (inactiveAtoms[currentInactiveIndex] == i) { // This atom is inactive. Reinitialize it randomly. dictionary.col(i) = (data.col(math::RandInt(data.n_cols)) + data.col(math::RandInt(data.n_cols)) + data.col(math::RandInt(data.n_cols))); // Now normalize the atom. dictionary.col(i) /= norm(dictionary.col(i), 2); // Increment inactive atom counter. ++currentInactiveIndex; } else { // Update estimate. dictionary.col(i) = dictionaryActive.col(i - currentInactiveIndex); } } } } double LocalCoordinateCoding::Objective(const arma::mat& data, const arma::mat& codes, const arma::uvec& adjacencies) const { double weightedL1NormZ = 0; for (size_t l = 0; l < adjacencies.n_elem; l++) { // Map adjacency back to its location in the codes matrix. const size_t atomInd = adjacencies(l) % atoms; const size_t pointInd = (size_t) (adjacencies(l) / atoms); weightedL1NormZ += fabs(codes(atomInd, pointInd)) * arma::as_scalar( arma::sum(arma::square(dictionary.col(atomInd) - data.col(pointInd)))); } double froNormResidual = norm(data - dictionary * codes, "fro"); return std::pow(froNormResidual, 2.0) + lambda * weightedL1NormZ; } } // namespace lcc } // namespace mlpack mlpack-2.2.5/src/mlpack/methods/local_coordinate_coding/lcc.hpp000066400000000000000000000177161315013601400245740ustar00rootroot00000000000000/** * @file lcc.hpp * @author Nishant Mehta * * Definition of the LocalCoordinateCoding class, which performs the Local * Coordinate Coding algorithm. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_LOCAL_COORDINATE_CODING_LCC_HPP #define MLPACK_METHODS_LOCAL_COORDINATE_CODING_LCC_HPP #include #include // Include three simple dictionary initializers from sparse coding. #include "../sparse_coding/nothing_initializer.hpp" #include "../sparse_coding/data_dependent_random_initializer.hpp" #include "../sparse_coding/random_initializer.hpp" namespace mlpack { namespace lcc { /** * An implementation of Local Coordinate Coding (LCC) that codes data which * approximately lives on a manifold using a variation of l1-norm regularized * sparse coding; in LCC, the penalty on the absolute value of each point's * coefficient for each atom is weighted by the squared distance of that point * to that atom. * * Let d be the number of dimensions in the original space, m the number of * training points, and k the number of atoms in the dictionary (the dimension * of the learned feature space). The training data X is a d-by-m matrix where * each column is a point and each row is a dimension. The dictionary D is a * d-by-k matrix, and the sparse codes matrix Z is a k-by-m matrix. * This program seeks to minimize the objective: * min_{D,Z} ||X - D Z||_{Fro}^2 * + lambda sum_{i=1}^m sum_{j=1}^k dist(X_i,D_j)^2 Z_i^j * where lambda > 0. * * This problem is solved by an algorithm that alternates between a dictionary * learning step and a sparse coding step. The dictionary learning step updates * the dictionary D by solving a linear system (note that the objective is a * positive definite quadratic program). The sparse coding step involves * solving a large number of weighted l1-norm regularized linear regression * problems problems; this can be done efficiently using LARS, an algorithm * that can solve the LASSO (paper below). * * The papers are listed below. * * @code * @incollection{NIPS2009_0719, * title = {Nonlinear Learning using Local Coordinate Coding}, * author = {Kai Yu and Tong Zhang and Yihong Gong}, * booktitle = {Advances in Neural Information Processing Systems 22}, * editor = {Y. Bengio and D. Schuurmans and J. Lafferty and C. K. I. Williams * and A. Culotta}, * pages = {2223--2231}, * year = {2009} * } * @endcode * * @code * @article{efron2004least, * title={Least angle regression}, * author={Efron, B. and Hastie, T. and Johnstone, I. and Tibshirani, R.}, * journal={The Annals of statistics}, * volume={32}, * number={2}, * pages={407--499}, * year={2004}, * publisher={Institute of Mathematical Statistics} * } * @endcode */ class LocalCoordinateCoding { public: /** * Set the parameters to LocalCoordinateCoding, and train the dictionary. * This constructor will also initialize the dictionary using the given * DictionaryInitializer before training. * * If you want to initialize the dictionary to a custom matrix, consider * either writing your own DictionaryInitializer class (with void * Initialize(const arma::mat& data, arma::mat& dictionary) function), or call * the constructor that does not take a data matrix, then call Dictionary() to * set the dictionary matrix to a matrix of your choosing, and then call * Train() with sparse_coding::NothingInitializer (i.e. * Train(data)). * * @param data Data matrix. * @param atoms Number of atoms in dictionary. * @param lambda Regularization parameter for weighted l1-norm penalty. * @param maxIterations Maximum number of iterations for training (0 runs * until convergence). * @param tolerance Tolerance for the objective function. */ template< typename DictionaryInitializer = sparse_coding::DataDependentRandomInitializer > LocalCoordinateCoding(const arma::mat& data, const size_t atoms, const double lambda, const size_t maxIterations = 0, const double tolerance = 0.01, const DictionaryInitializer& initializer = DictionaryInitializer()); /** * Set the parameters to LocalCoordinateCoding. This constructor will not * train the model, and a subsequent call to Train() will be required before * the model can encode points with Encode(). * * @param atoms Number of atoms in dictionary. * @param lambda Regularization parameter for weighted l1-norm penalty. * @param maxIterations Maximum number of iterations for training (0 runs * until convergence). * @param tolerance Tolerance for the objective function. */ LocalCoordinateCoding(const size_t atoms, const double lambda, const size_t maxIterations = 0, const double tolerance = 0.01); /** * Run local coordinate coding. * * @param nIterations Maximum number of iterations to run algorithm. * @param objTolerance Tolerance of objective function. When the objective * function changes by a value lower than this tolerance, the optimization * terminates. */ template< typename DictionaryInitializer = sparse_coding::DataDependentRandomInitializer > void Train(const arma::mat& data, const DictionaryInitializer& initializer = DictionaryInitializer()); /** * Code each point via distance-weighted LARS. * * @param data Matrix containing points to encode. * @param codes Output matrix to store codes in. */ void Encode(const arma::mat& data, arma::mat& codes); /** * Learn dictionary by solving linear system. * * @param adjacencies Indices of entries (unrolled column by column) of * the coding matrix Z that are non-zero (the adjacency matrix for the * bipartite graph of points and atoms) */ void OptimizeDictionary(const arma::mat& data, const arma::mat& codes, const arma::uvec& adjacencies); /** * Compute objective function given the list of adjacencies. */ double Objective(const arma::mat& data, const arma::mat& codes, const arma::uvec& adjacencies) const; //! Get the number of atoms. size_t Atoms() const { return atoms; } //! Modify the number of atoms. size_t& Atoms() { return atoms; } //! Accessor for dictionary. const arma::mat& Dictionary() const { return dictionary; } //! Mutator for dictionary. arma::mat& Dictionary() { return dictionary; } //! Get the L1 regularization parameter. double Lambda() const { return lambda; } //! Modify the L1 regularization parameter. double& Lambda() { return lambda; } //! Get the maximum number of iterations. size_t MaxIterations() const { return maxIterations; } //! Modify the maximum number of iterations. size_t& MaxIterations() { return maxIterations; } //! Get the objective tolerance. double Tolerance() const { return tolerance; } //! Modify the objective tolerance. double& Tolerance() { return tolerance; } //! Serialize the model. template void Serialize(Archive& ar, const unsigned int /* version */); private: //! Number of atoms in dictionary. size_t atoms; //! Dictionary (columns are atoms). arma::mat dictionary; //! l1 regularization term. double lambda; //! Maximum number of iterations during training. size_t maxIterations; //! Tolerance for main objective. double tolerance; }; } // namespace lcc } // namespace mlpack // Include implementation. #include "lcc_impl.hpp" #endif mlpack-2.2.5/src/mlpack/methods/local_coordinate_coding/lcc_impl.hpp000066400000000000000000000072141315013601400256050ustar00rootroot00000000000000/** * @file lcc_impl.hpp * @author Nishant Mehta * * Implementation of Local Coordinate Coding * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_LOCAL_COORDINATE_CODING_LCC_IMPL_HPP #define MLPACK_METHODS_LOCAL_COORDINATE_CODING_LCC_IMPL_HPP // In case it hasn't been included yet. #include "lcc.hpp" namespace mlpack { namespace lcc { template LocalCoordinateCoding::LocalCoordinateCoding( const arma::mat& data, const size_t atoms, const double lambda, const size_t maxIterations, const double tolerance, const DictionaryInitializer& initializer) : atoms(atoms), lambda(lambda), maxIterations(maxIterations), tolerance(tolerance) { // Train the model. Train(data, initializer); } template void LocalCoordinateCoding::Train( const arma::mat& data, const DictionaryInitializer& initializer) { Timer::Start("local_coordinate_coding"); // Initialize the dictionary. initializer.Initialize(data, atoms, dictionary); double lastObjVal = DBL_MAX; // Take the initial coding step, which has to happen before entering the main // loop. Log::Info << "Initial Coding Step." << std::endl; arma::mat codes; Encode(data, codes); arma::uvec adjacencies = find(codes); Log::Info << " Sparsity level: " << 100.0 * ((double)(adjacencies.n_elem)) / ((double)(atoms * data.n_cols)) << "%.\n"; Log::Info << " Objective value: " << Objective(data, codes, adjacencies) << "." << std::endl; for (size_t t = 1; t != maxIterations; t++) { Log::Info << "Iteration " << t << " of " << maxIterations << "." << std::endl; // First step: optimize the dictionary. Log::Info << "Performing dictionary step..." << std::endl; OptimizeDictionary(data, codes, adjacencies); double dsObjVal = Objective(data, codes, adjacencies); Log::Info << " Objective value: " << dsObjVal << "." << std::endl; // Second step: perform the coding. Log::Info << "Performing coding step..." << std::endl; Encode(data, codes); adjacencies = find(codes); Log::Info << " Sparsity level: " << 100.0 * ((double) (adjacencies.n_elem)) / ((double)(atoms * data.n_cols)) << "%.\n"; // Terminate if the objective increased in the coding step. double curObjVal = Objective(data, codes, adjacencies); if (curObjVal > dsObjVal) { Log::Warn << "Objective increased in coding step! Terminating." << std::endl; break; } // Find the new objective value and improvement so we can check for // convergence. double improvement = lastObjVal - curObjVal; Log::Info << "Objective value: " << curObjVal << " (improvement " << std::scientific << improvement << ")." << std::endl; if (improvement < tolerance) { Log::Info << "Converged within tolerance " << tolerance << ".\n"; break; } lastObjVal = curObjVal; } Timer::Stop("local_coordinate_coding"); } template void LocalCoordinateCoding::Serialize(Archive& ar, const unsigned int /* version */) { ar & data::CreateNVP(atoms, "atoms"); ar & data::CreateNVP(dictionary, "dictionary"); ar & data::CreateNVP(lambda, "lambda"); ar & data::CreateNVP(maxIterations, "maxIterations"); ar & data::CreateNVP(tolerance, "tolerance"); } } // namespace lcc } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/methods/local_coordinate_coding/local_coordinate_coding_main.cpp000066400000000000000000000223571315013601400316530ustar00rootroot00000000000000/** * @file lcc_main.cpp * @author Nishant Mehta * * Executable for Local Coordinate Coding. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include #include #include #include #include "lcc.hpp" PROGRAM_INFO("Local Coordinate Coding", "An implementation of Local Coordinate Coding (LCC), which " "codes data that approximately lives on a manifold using a variation of l1-" "norm regularized sparse coding. Given a dense data matrix X with n points" " and d dimensions, LCC seeks to find a dense dictionary matrix D with k " "atoms in d dimensions, and a coding matrix Z with n points in k " "dimensions. Because of the regularization method used, the atoms in D " "should lie close to the manifold on which the data points lie." "\n\n" "The original data matrix X can then be reconstructed as D * Z. Therefore," " this program finds a representation of each point in X as a sparse linear" " combination of atoms in the dictionary D." "\n\n" "The coding is found with an algorithm which alternates between a " "dictionary step, which updates the dictionary D, and a coding step, which " "updates the coding matrix Z." "\n\n" "To run this program, the input matrix X must be specified (with -i), along" " with the number of atoms in the dictionary (-k). An initial dictionary " "may also be specified with the --initial_dictionary option. The l1-norm " "regularization parameter is specified with -l. For example, to run LCC on" " the dataset in data.csv using 200 atoms and an l1-regularization " "parameter of 0.1, saving the dictionary into dict.csv and the codes into " "codes.csv, use " "\n\n" "$ local_coordinate_coding -i data.csv -k 200 -l 0.1 -d dict.csv -c " "codes.csv" "\n\n" "The maximum number of iterations may be specified with the -n option. " "Optionally, the input data matrix X can be normalized before coding with " "the -N option."); // Training parameters. PARAM_STRING_IN("training_file", "Filename of the training data (X).", "t", ""); PARAM_INT_IN("atoms", "Number of atoms in the dictionary.", "k", 0); PARAM_DOUBLE_IN("lambda", "Weighted l1-norm regularization parameter.", "l", 0.0); PARAM_INT_IN("max_iterations", "Maximum number of iterations for LCC (0 " "indicates no limit).", "n", 0); PARAM_STRING_IN("initial_dictionary", "Filename for optional initial " "dictionary.", "i", ""); PARAM_FLAG("normalize", "If set, the input data matrix will be normalized " "before coding.", "N"); PARAM_DOUBLE_IN("tolerance", "Tolerance for objective function.", "o", 0.01); // Load/save a model. PARAM_STRING_IN("input_model_file", "File containing input LCC model.", "m", ""); PARAM_STRING_OUT("output_model_file", "File to save trained LCC model to.", "M"); // Test on another dataset. PARAM_STRING_IN("test_file", "File of test points to encode.", "T", ""); PARAM_STRING_OUT("dictionary_file", "Filename to save the output dictionary " "to.", "d"); PARAM_STRING_OUT("codes_file", "Filename to save the output codes to.", "c"); PARAM_INT_IN("seed", "Random seed. If 0, 'std::time(NULL)' is used.", "s", 0); using namespace arma; using namespace std; using namespace mlpack; using namespace mlpack::math; using namespace mlpack::lcc; using namespace mlpack::sparse_coding; // For NothingInitializer. int main(int argc, char* argv[]) { CLI::ParseCommandLine(argc, argv); if (CLI::GetParam("seed") != 0) RandomSeed((size_t) CLI::GetParam("seed")); else RandomSeed((size_t) std::time(NULL)); // Check for parameter validity. if (CLI::HasParam("input_model_file") && CLI::HasParam("initial_dictionary")) Log::Fatal << "Cannot specify both --input_model_file (-m) and " << "--initial_dictionary (-i)!" << endl; if (CLI::HasParam("training_file") && !CLI::HasParam("atoms")) Log::Fatal << "If --training_file is specified, the number of atoms in the " << "dictionary must be specified with --atoms (-k)!" << endl; if (!CLI::HasParam("training_file") && !CLI::HasParam("input_model_file")) Log::Fatal << "One of --training_file (-t) or --input_model_file (-m) must " << "be specified!" << endl; if (!CLI::HasParam("codes_file") && !CLI::HasParam("dictionary_file") && !CLI::HasParam("output_model_file")) Log::Warn << "Neither --codes_file (-c), --dictionary_file (-d), nor " << "--output_model_file (-M) are specified; no output will be saved." << endl; if (CLI::HasParam("codes_file") && !CLI::HasParam("test_file")) Log::Fatal << "--codes_file (-c) is specified, but no test matrix (" << "specified with --test_file or -T) is given to encode!" << endl; if (!CLI::HasParam("training_file")) { if (CLI::HasParam("atoms")) Log::Warn << "--atoms (-k) ignored because --training_file (-t) is not " << "specified." << endl; if (CLI::HasParam("lambda")) Log::Warn << "--lambda (-l) ignored because --training_file (-t) is not " << "specified." << endl; if (CLI::HasParam("initial_dictionary")) Log::Warn << "--initial_dictionary (-i) ignored because --training_file " << "(-t) is not specified." << endl; if (CLI::HasParam("max_iterations")) Log::Warn << "--max_iterations (-n) ignored because --training_file (-t) " << "is not specified." << endl; if (CLI::HasParam("normalize")) Log::Warn << "--normalize (-N) ignored because --training_file (-t) is " << "not specified." << endl; if (CLI::HasParam("tolerance")) Log::Warn << "--tolerance (-o) ignored because --training_file (-t) is " << "not specified." << endl; } // Do we have an existing model? LocalCoordinateCoding lcc(0, 0.0); if (CLI::HasParam("input_model_file")) { data::Load(CLI::GetParam("input_model_file"), "lcc_model", lcc, true); } if (CLI::HasParam("training_file")) { mat matX; data::Load(CLI::GetParam("training_file"), matX, true); // Normalize each point if the user asked for it. if (CLI::HasParam("normalize")) { Log::Info << "Normalizing data before coding..." << endl; for (size_t i = 0; i < matX.n_cols; ++i) matX.col(i) /= norm(matX.col(i), 2); } lcc.Lambda() = CLI::GetParam("lambda"); lcc.Atoms() = (size_t) CLI::GetParam("atoms"); lcc.MaxIterations() = (size_t) CLI::GetParam("max_iterations"); lcc.Tolerance() = CLI::GetParam("tolerance"); // Inform the user if we are overwriting their model. if (CLI::HasParam("input_model_file")) { Log::Info << "Using dictionary from existing model in '" << CLI::GetParam("input_model_file") << "' as initial " << "dictionary for training." << endl; lcc.Train(matX); } else if (CLI::HasParam("initial_dictionary")) { // Load initial dictionary directly into LCC object. data::Load(CLI::GetParam("initial_dictionary"), lcc.Dictionary(), true); // Validate the size of the initial dictionary. if (lcc.Dictionary().n_cols != lcc.Atoms()) { Log::Fatal << "The initial dictionary has " << lcc.Dictionary().n_cols << " atoms, but the number of atoms was specified to be " << lcc.Atoms() << "!" << endl; } if (lcc.Dictionary().n_rows != matX.n_rows) { Log::Fatal << "The initial dictionary has " << lcc.Dictionary().n_rows << " dimensions, but the data has " << matX.n_rows << " dimensions!" << endl; } // Train the model. lcc.Train(matX); } else { // Run with the default initialization. lcc.Train(matX); } } // Now, do we have any matrix to encode? if (CLI::HasParam("test_file")) { mat matY; data::Load(CLI::GetParam("test_file"), matY, true); if (matY.n_rows != lcc.Dictionary().n_rows) Log::Fatal << "Model was trained with a dimensionality of " << lcc.Dictionary().n_rows << ", but data in test file " << CLI::GetParam("test_file") << " has a dimensionality of " << matY.n_rows << "!" << endl; // Normalize each point if the user asked for it. if (CLI::HasParam("normalize")) { Log::Info << "Normalizing test data before coding..." << endl; for (size_t i = 0; i < matY.n_cols; ++i) matY.col(i) /= norm(matY.col(i), 2); } mat codes; lcc.Encode(matY, codes); if (CLI::HasParam("codes_file")) data::Save(CLI::GetParam("codes_file"), codes); } // Did the user want to save the dictionary? if (CLI::HasParam("dictionary_file")) data::Save(CLI::GetParam("dictionary_file"), lcc.Dictionary()); // Did the user want to save the model? if (CLI::HasParam("output_model_file")) data::Save(CLI::GetParam("output_model_file"), "lcc_model", lcc, false); // Non-fatal on failure. } mlpack-2.2.5/src/mlpack/methods/logistic_regression/000077500000000000000000000000001315013601400225375ustar00rootroot00000000000000mlpack-2.2.5/src/mlpack/methods/logistic_regression/CMakeLists.txt000066400000000000000000000011761315013601400253040ustar00rootroot00000000000000# Define the files we need to compile # Anything not in this list will not be compiled into the output library # Do not include test programs here set(SOURCES logistic_regression.hpp logistic_regression_impl.hpp logistic_regression_function.hpp logistic_regression_function_impl.hpp ) # add directory name to sources set(DIR_SRCS) foreach(file ${SOURCES}) set(DIR_SRCS ${DIR_SRCS} ${CMAKE_CURRENT_SOURCE_DIR}/${file}) endforeach() # append sources (with directory name) to list of all mlpack sources (used at # the parent scope) set(MLPACK_SRCS ${MLPACK_SRCS} ${DIR_SRCS} PARENT_SCOPE) add_cli_executable(logistic_regression) mlpack-2.2.5/src/mlpack/methods/logistic_regression/logistic_regression.hpp000066400000000000000000000251611315013601400273320ustar00rootroot00000000000000/** * @file logistic_regression.hpp * @author Sumedh Ghaisas * * The LogisticRegression class, which implements logistic regression. This * implements supports L2-regularization. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_LOGISTIC_REGRESSION_LOGISTIC_REGRESSION_HPP #define MLPACK_METHODS_LOGISTIC_REGRESSION_LOGISTIC_REGRESSION_HPP #include #include #include "logistic_regression_function.hpp" namespace mlpack { namespace regression { /** * The LogisticRegression class implements an L2-regularized logistic regression * model, and supports training with multiple optimizers and classification. * The class supports different observation types via the MatType template * parameter; for instance, logistic regression can be performed on sparse * datasets by specifying arma::sp_mat as the MatType parameter. * * @tparam MatType Type of data matrix. */ template class LogisticRegression { public: /** * Construct the LogisticRegression class with the given labeled training * data. This will train the model. Optionally, specify lambda, which is the * penalty parameter for L2-regularization. If not specified, it is set to 0, * which results in standard (unregularized) logistic regression. * * It is not possible to set a custom optimizer with this constructor. Either * use a constructor that does not train and call Train() with a custom * optimizer type, or use the constructor that takes an instantiated * optimizer. (This unfortunate situation is a language restriction of C++.) * * @param predictors Input training variables. * @param responses Outputs resulting from input training variables. * @param lambda L2-regularization parameter. */ LogisticRegression(const MatType& predictors, const arma::Row& responses, const double lambda = 0); /** * Construct the LogisticRegression class with the given labeled training * data. This will train the model. Optionally, specify lambda, which is the * penalty parameter for L2-regularization. If not specified, it is set to 0, * which results in standard (unregularized) logistic regression. * * It is not possible to set a custom optimizer with this constructor. Either * use a constructor that does not train and call Train() with a custom * optimizer type, or use the constructor that takes an instantiated * optimizer. (This unfortunate situation is a language restriction of C++.) * * @param predictors Input training variables. * @param responses Outputs results from input training variables. * @param initialPoint Initial model to train with. * @param lambda L2-regularization parameter. */ LogisticRegression(const MatType& predictors, const arma::Row& responses, const arma::vec& initialPoint, const double lambda = 0); /** * Construct the LogisticRegression class without performing any training. * The dimensionality of the data (which will be used to set the size of the * parameters vector) must be specified, and all of the parameters in the * model will be set to 0. Note that the dimensionality may be changed later * by directly modifying the parameters vector (using Parameters()). * * @param dimensionality Dimensionality of the data. * @param lambda L2-regularization parameter. */ LogisticRegression(const size_t dimensionality, const double lambda = 0); /** * Construct the LogisticRegression class with the given labeled training * data. This will train the model. This overload takes an already * instantiated optimizer (which holds the LogisticRegressionFunction error * function, which must also be instantiated), so that the optimizer can be * configured before the training is run by this constructor. The predictors * and responses and initial point are all taken from the error function * contained in the optimizer. * * @param optimizer Instantiated optimizer with instantiated error function. */ template class OptimizerType> LogisticRegression( OptimizerType>& optimizer); /** * Train the LogisticRegression model on the given input data. By default, * the L-BFGS optimization algorithm is used, but others can be specified * (such as mlpack::optimization::SGD). * * This will use the existing model parameters as a starting point for the * optimization. If this is not what you want, then you should access the * parameters vector directly with Parameters() and modify it as desired. * * @tparam OptimizerType Type of optimizer to use to train the model. * @param predictors Input training variables. * @param responses Outputs results from input training variables. */ template< template class OptimizerType = mlpack::optimization::L_BFGS > void Train(const MatType& predictors, const arma::Row& responses); /** * Train the LogisticRegression model with the given instantiated optimizer. * Using this overload allows configuring the instantiated optimizer before * training is performed. * * Note that the initial point of the optimizer * (optimizer.Function().GetInitialPoint()) will be used as the initial point * of the optimization, overwriting any existing trained model. If you don't * want to overwrite the existing model, set * optimizer.Function().GetInitialPoint() to the current parameters vector, * accessible via Parameters(). * * @param optimizer Instantiated optimizer with instantiated error function. */ template< template class OptimizerType = mlpack::optimization::L_BFGS > void Train(OptimizerType>& optimizer); //! Return the parameters (the b vector). const arma::vec& Parameters() const { return parameters; } //! Modify the parameters (the b vector). arma::vec& Parameters() { return parameters; } //! Return the lambda value for L2-regularization. const double& Lambda() const { return lambda; } //! Modify the lambda value for L2-regularization. double& Lambda() { return lambda; } /** * Predict the responses to a given set of predictors. The responses will be * either 0 or 1. Optionally, specify the decision boundary; logistic * regression returns a value between 0 and 1. If the value is greater than * the decision boundary, the response is taken to be 1; otherwise, it is 0. * By default the decision boundary is 0.5. * * This method is deprecated---you should use Classify() instead. * * @param predictors Input predictors. * @param responses Vector to put output predictions of responses into. * @param decisionBoundary Decision boundary (default 0.5). */ mlpack_deprecated void Predict(const MatType& predictors, arma::Row& responses, const double decisionBoundary = 0.5) const; /** * Classify the given point. The predicted label is returned. Optionally, * specify the decision boundary; logistic regression returns a value between * 0 and 1. If the value is greater than the decision boundary, the response * is taken to be 1; otherwise, it is 0. By default the decision boundary is * 0.5. * * @param point Point to classify. * @param decisionBoundary Decision boundary (default 0.5). * @return Predicted label of point. */ template size_t Classify(const VecType& point, const double decisionBoundary = 0.5) const; /** * Classify the given points, returning the predicted labels for each point. * Optionally, specify the decision boundary; logistic regression returns a * value between 0 and 1. If the value is greater than the decision boundary, * the response is taken to be 1; otherwise, it is 0. By default the decision * boundary is 0.5. * * @param dataset Set of points to classify. * @param labels Predicted labels for each point. * @param decisionBoundary Decision boundary (default 0.5). */ void Classify(const MatType& dataset, arma::Row& labels, const double decisionBoundary = 0.5) const; /** * Classify the given points, returning class probabilities for each point. * * @param dataset Set of points to classify. * @param probabilities Class probabilities for each point (output). */ void Classify(const MatType& dataset, arma::mat& probabilities) const; /** * Compute the accuracy of the model on the given predictors and responses, * optionally using the given decision boundary. The responses should be * either 0 or 1. Logistic regression returns a value between 0 and 1. If * the value is greater than the decision boundary, the response is taken to * be 1; otherwise, it is 0. By default, the decision boundary is 0.5. * * The accuracy is returned as a percentage, between 0 and 100. * * @param predictors Input predictors. * @param responses Vector of responses. * @param decisionBoundary Decision boundary (default 0.5). * @return Percentage of responses that are predicted correctly. */ double ComputeAccuracy(const MatType& predictors, const arma::Row& responses, const double decisionBoundary = 0.5) const; /** * Compute the error of the model. This returns the negative objective * function of the logistic regression log-likelihood function. For the model * to be optimal, the negative log-likelihood function should be minimized. * * @param predictors Input predictors. * @param responses Vector of responses. */ double ComputeError(const MatType& predictors, const arma::Row& responses) const; //! Serialize the model. template void Serialize(Archive& ar, const unsigned int /* version */); private: //! Vector of trained parameters (size: dimensionality plus one). arma::vec parameters; //! L2-regularization penalty parameter. double lambda; }; } // namespace regression } // namespace mlpack // Include implementation. #include "logistic_regression_impl.hpp" #endif // MLPACK_METHODS_LOGISTIC_REGRESSION_LOGISTIC_REGRESSION_HPP mlpack-2.2.5/src/mlpack/methods/logistic_regression/logistic_regression_function.hpp000066400000000000000000000121111315013601400312260ustar00rootroot00000000000000/** * @file logistic_regression_function.hpp * @author Sumedh Ghaisas * * Implementation of the logistic regression function, which is meant to be * optimized by a separate optimizer class that takes LogisticRegressionFunction * as its FunctionType class. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_LOGISTIC_REGRESSION_LOGISTIC_REGRESSION_FUNCTION_HPP #define MLPACK_METHODS_LOGISTIC_REGRESSION_LOGISTIC_REGRESSION_FUNCTION_HPP #include namespace mlpack { namespace regression { /** * The log-likelihood function for the logistic regression objective function. * This is used by various mlpack optimizers to train a logistic regression * model. */ template class LogisticRegressionFunction { public: LogisticRegressionFunction(const MatType& predictors, const arma::Row& responses, const double lambda = 0); LogisticRegressionFunction(const MatType& predictors, const arma::Row& responses, const arma::vec& initialPoint, const double lambda = 0); //! Return the initial point for the optimization. const arma::mat& InitialPoint() const { return initialPoint; } //! Modify the initial point for the optimization. arma::mat& InitialPoint() { return initialPoint; } //! Return the regularization parameter (lambda). const double& Lambda() const { return lambda; } //! Modify the regularization parameter (lambda). double& Lambda() { return lambda; } //! Return the matrix of predictors. const MatType& Predictors() const { return predictors; } //! Return the vector of responses. const arma::vec& Responses() const { return responses; } /** * Evaluate the logistic regression log-likelihood function with the given * parameters. Note that if a point has 0 probability of being classified * directly with the given parameters, then Evaluate() will return nan (this * is kind of a corner case and should not happen for reasonable models). * * The optimum (minimum) of this function is 0.0, and occurs when each point * is classified correctly with very high probability. * * @param parameters Vector of logistic regression parameters. */ double Evaluate(const arma::mat& parameters) const; /** * Evaluate the logistic regression log-likelihood function with the given * parameters, but using only one data point. This is useful for optimizers * such as SGD, which require a separable objective function. Note that if * the point has 0 probability of being classified correctly with the given * parameters, then Evaluate() will return nan (this is kind of a corner case * and should not happen for reasonable models). * * The optimum (minimum) of this function is 0.0, and occurs when the point is * classified correctly with very high probability. * * @param parameters Vector of logistic regression parameters. * @param i Index of point to use for objective function evaluation. */ double Evaluate(const arma::mat& parameters, const size_t i) const; /** * Evaluate the gradient of the logistic regression log-likelihood function * with the given parameters. * * @param parameters Vector of logistic regression parameters. * @param gradient Vector to output gradient into. */ void Gradient(const arma::mat& parameters, arma::mat& gradient) const; /** * Evaluate the gradient of the logistic regression log-likelihood function * with the given parameters, and with respect to only one point in the * dataset. This is useful for optimizers such as SGD, which require a * separable objective function. * * @param parameters Vector of logistic regression parameters. * @param i Index of points to use for objective function gradient evaluation. * @param gradient Vector to output gradient into. */ void Gradient(const arma::mat& parameters, const size_t i, arma::mat& gradient) const; //! Return the initial point for the optimization. const arma::mat& GetInitialPoint() const { return initialPoint; } //! Return the number of separable functions (the number of predictor points). size_t NumFunctions() const { return predictors.n_cols; } private: //! The initial point, from which to start the optimization. arma::mat initialPoint; //! The matrix of data points (predictors). const MatType& predictors; //! The vector of responses to the input data points. const arma::Row& responses; //! The regularization parameter for L2-regularization. double lambda; }; } // namespace regression } // namespace mlpack // Include implementation. #include "logistic_regression_function_impl.hpp" #endif // MLPACK_METHODS_LOGISTIC_REGRESSION_LOGISTIC_REGRESSION_FUNCTION_HPP mlpack-2.2.5/src/mlpack/methods/logistic_regression/logistic_regression_function_impl.hpp000066400000000000000000000144641315013601400322640ustar00rootroot00000000000000/** * @file logistic_regression_function.cpp * @author Sumedh Ghaisas * * Implementation of the LogisticRegressionFunction class. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_LOGISTIC_REGRESSION_FUNCTION_IMPL_HPP #define MLPACK_METHODS_LOGISTIC_REGRESSION_FUNCTION_IMPL_HPP // In case it hasn't been included yet. #include "logistic_regression_function.hpp" namespace mlpack { namespace regression { template LogisticRegressionFunction::LogisticRegressionFunction( const MatType& predictors, const arma::Row& responses, const double lambda) : predictors(predictors), responses(responses), lambda(lambda) { initialPoint = arma::zeros(predictors.n_rows + 1, 1); // Sanity check. if (responses.n_elem != predictors.n_cols) Log::Fatal << "LogisticRegressionFunction::LogisticRegressionFunction(): " << "predictors matrix has " << predictors.n_cols << " points, but " << "responses vector has " << responses.n_elem << " elements (should be" << " " << predictors.n_cols << ")!" << std::endl; } template LogisticRegressionFunction::LogisticRegressionFunction( const MatType& predictors, const arma::Row& responses, const arma::vec& initialPoint, const double lambda) : initialPoint(initialPoint), predictors(predictors), responses(responses), lambda(lambda) { //to check if initialPoint is compatible with predictors if (initialPoint.n_rows != (predictors.n_rows + 1) || initialPoint.n_cols != 1) this->initialPoint = arma::zeros(predictors.n_rows + 1, 1); } /** * Evaluate the logistic regression objective function given the estimated * parameters. */ template double LogisticRegressionFunction::Evaluate( const arma::mat& parameters) const { // The objective function is the log-likelihood function (w is the parameters // vector for the model; y is the responses; x is the predictors; sig() is the // sigmoid function): // f(w) = sum(y log(sig(w'x)) + (1 - y) log(sig(1 - w'x))). // We want to minimize this function. L2-regularization is just lambda // multiplied by the squared l2-norm of the parameters then divided by two. // For the regularization, we ignore the first term, which is the intercept // term. const double regularization = 0.5 * lambda * arma::dot(parameters.col(0).subvec(1, parameters.n_elem - 1), parameters.col(0).subvec(1, parameters.n_elem - 1)); // Calculate vectors of sigmoids. The intercept term is parameters(0, 0) and // does not need to be multiplied by any of the predictors. const arma::vec exponents = parameters(0, 0) + predictors.t() * parameters.col(0).subvec(1, parameters.n_elem - 1); const arma::vec sigmoid = 1.0 / (1.0 + arma::exp(-exponents)); // Assemble full objective function. Often the objective function and the // regularization as given are divided by the number of features, but this // doesn't actually affect the optimization result, so we'll just ignore those // terms for computational efficiency. double result = 0.0; for (size_t i = 0; i < responses.n_elem; ++i) { if (responses[i] == 1) result += log(sigmoid[i]); else result += log(1.0 - sigmoid[i]); } // Invert the result, because it's a minimization. return -result + regularization; } /** * Evaluate the logistic regression objective function, but with only one point. * This is useful for optimizers that use a separable objective function, such * as SGD. */ template double LogisticRegressionFunction::Evaluate( const arma::mat& parameters, const size_t i) const { // Calculate the regularization term. We must divide by the number of points, // so that sum(Evaluate(parameters, [1:points])) == Evaluate(parameters). const double regularization = lambda * (1.0 / (2.0 * predictors.n_cols)) * arma::dot(parameters.col(0).subvec(1, parameters.n_elem - 1), parameters.col(0).subvec(1, parameters.n_elem - 1)); // Calculate sigmoid. const double exponent = parameters(0, 0) + arma::dot(predictors.col(i), parameters.col(0).subvec(1, parameters.n_elem - 1)); const double sigmoid = 1.0 / (1.0 + std::exp(-exponent)); if (responses[i] == 1) return -log(sigmoid) + regularization; else return -log(1.0 - sigmoid) + regularization; } //! Evaluate the gradient of the logistic regression objective function. template void LogisticRegressionFunction::Gradient( const arma::mat& parameters, arma::mat& gradient) const { // Regularization term. arma::mat regularization; regularization = lambda * parameters.col(0).subvec(1, parameters.n_elem - 1); const arma::rowvec sigmoids = (1 / (1 + arma::exp(-parameters(0, 0) - parameters.col(0).subvec(1, parameters.n_elem - 1).t() * predictors))); gradient.set_size(parameters.n_elem); gradient[0] = -arma::accu(responses - sigmoids); gradient.col(0).subvec(1, parameters.n_elem - 1) = -predictors * (responses - sigmoids).t() + regularization; } /** * Evaluate the individual gradients of the logistic regression objective * function with respect to individual points. This is useful for optimizers * that use a separable objective function, such as SGD. */ template void LogisticRegressionFunction::Gradient( const arma::mat& parameters, const size_t i, arma::mat& gradient) const { // Calculate the regularization term. arma::mat regularization; regularization = lambda * parameters.col(0).subvec(1, parameters.n_elem - 1) / predictors.n_cols; const double sigmoid = 1.0 / (1.0 + std::exp(-parameters(0, 0) - arma::dot(predictors.col(i), parameters.col(0).subvec(1, parameters.n_elem - 1)))); gradient.set_size(parameters.n_elem); gradient[0] = -(responses[i] - sigmoid); gradient.col(0).subvec(1, parameters.n_elem - 1) = -predictors.col(i) * (responses[i] - sigmoid) + regularization; } } // namespace regression } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/methods/logistic_regression/logistic_regression_impl.hpp000066400000000000000000000141331315013601400303500ustar00rootroot00000000000000/** * @file logistic_regression_impl.hpp * @author Sumedh Ghaisas * * Implementation of the LogisticRegression class. This implementation supports * L2-regularization. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_LOGISTIC_REGRESSION_LOGISTIC_REGRESSION_IMPL_HPP #define MLPACK_METHODS_LOGISTIC_REGRESSION_LOGISTIC_REGRESSION_IMPL_HPP // In case it hasn't been included yet. #include "logistic_regression.hpp" namespace mlpack { namespace regression { template LogisticRegression::LogisticRegression( const MatType& predictors, const arma::Row& responses, const double lambda) : parameters(arma::zeros(predictors.n_rows + 1)), lambda(lambda) { Train(predictors, responses); } template LogisticRegression::LogisticRegression( const MatType& predictors, const arma::Row& responses, const arma::vec& initialPoint, const double lambda) : parameters(initialPoint), lambda(lambda) { Train(predictors, responses); } template LogisticRegression::LogisticRegression( const size_t dimensionality, const double lambda) : parameters(arma::zeros(dimensionality + 1)), lambda(lambda) { // No training to do here. } template template class OptimizerType> LogisticRegression::LogisticRegression( OptimizerType>& optimizer) : parameters(optimizer.Function().GetInitialPoint()), lambda(optimizer.Function().Lambda()) { Train(optimizer); } template template class OptimizerType> void LogisticRegression::Train(const MatType& predictors, const arma::Row& responses) { LogisticRegressionFunction errorFunction(predictors, responses, lambda); errorFunction.InitialPoint() = parameters; OptimizerType> optimizer(errorFunction); // Train the model. Timer::Start("logistic_regression_optimization"); const double out = optimizer.Optimize(parameters); Timer::Stop("logistic_regression_optimization"); Log::Info << "LogisticRegression::LogisticRegression(): final objective of " << "trained model is " << out << "." << std::endl; } template template class OptimizerType> void LogisticRegression::Train( OptimizerType>& optimizer) { // Everything is good. Just train the model. parameters = optimizer.Function().GetInitialPoint(); Timer::Start("logistic_regression_optimization"); const double out = optimizer.Optimize(parameters); Timer::Stop("logistic_regression_optimization"); Log::Info << "LogisticRegression::LogisticRegression(): final objective of " << "trained model is " << out << "." << std::endl; } template void LogisticRegression::Predict(const MatType& predictors, arma::Row& responses, const double decisionBoundary) const { // Calculate sigmoid function for each point. The (1.0 - decisionBoundary) // term correctly sets an offset so that floor() returns 0 or 1 correctly. responses = arma::conv_to>::from((1.0 / (1.0 + arma::exp(-parameters(0) - predictors.t() * parameters.subvec(1, parameters.n_elem - 1)))) + (1.0 - decisionBoundary)); } template template size_t LogisticRegression::Classify(const VecType& point, const double decisionBoundary) const { return size_t(1.0 / (1.0 + std::exp(-parameters(0) - arma::dot(point, parameters.subvec(1, parameters.n_elem - 1)))) + (1.0 - decisionBoundary)); } template void LogisticRegression::Classify(const MatType& dataset, arma::Row& labels, const double decisionBoundary) const { Predict(dataset, labels, decisionBoundary); } template void LogisticRegression::Classify(const MatType& dataset, arma::mat& probabilities) const { // Set correct size of output matrix. probabilities.set_size(2, dataset.n_cols); probabilities.row(1) = 1.0 / (1.0 + arma::exp(-parameters(0) - dataset.t() * parameters.subvec(1, parameters.n_elem - 1))).t(); probabilities.row(0) = 1.0 - probabilities.row(1); } template double LogisticRegression::ComputeError( const MatType& predictors, const arma::Row& responses) const { // Construct a new error function. LogisticRegressionFunction<> newErrorFunction(predictors, responses, lambda); return newErrorFunction.Evaluate(parameters); } template double LogisticRegression::ComputeAccuracy( const MatType& predictors, const arma::Row& responses, const double decisionBoundary) const { // Predict responses using the current model. arma::Row tempResponses; Predict(predictors, tempResponses, decisionBoundary); // Count the number of responses that were correct. size_t count = 0; for (size_t i = 0; i < responses.n_elem; i++) { if (responses(i) == tempResponses(i)) count++; } return (double) (count * 100) / responses.n_elem; } template template void LogisticRegression::Serialize( Archive& ar, const unsigned int /* version */) { ar & data::CreateNVP(parameters, "parameters"); ar & data::CreateNVP(lambda, "lambda"); } } // namespace regression } // namespace mlpack #endif // MLPACK_METHODS_LOGISTIC_REGRESSION_LOGISTIC_REGRESSION_IMPL_HPP mlpack-2.2.5/src/mlpack/methods/logistic_regression/logistic_regression_main.cpp000066400000000000000000000303661315013601400303340ustar00rootroot00000000000000/** * @file logistic_regression_main.cpp * @author Ryan Curtin * * Main executable for logistic regression. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include #include #include #include #include "logistic_regression.hpp" #include #include using namespace std; using namespace mlpack; using namespace mlpack::regression; using namespace mlpack::optimization; PROGRAM_INFO("L2-regularized Logistic Regression and Prediction", "An implementation of L2-regularized logistic regression using either the " "L-BFGS optimizer or SGD (stochastic gradient descent). This solves the " "regression problem" "\n\n" " y = (1 / 1 + e^-(X * b))" "\n\n" "where y takes values 0 or 1." "\n\n" "This program allows loading a logistic regression model from a file (-i) " "or training a logistic regression model given training data (-t), or both " "those things at once. In addition, this program allows classification on " "a test dataset (-T) and will save the classification results to the given " "output file (-o). The logistic regression model itself may be saved with " "a file specified using the -m option." "\n\n" "The training data given with the -t option should have class labels as its" " last dimension (so, if the training data is in CSV format, labels should " "be the last column). Alternately, the -l (--labels_file) option may be " "used to specify a separate file of labels." "\n\n" "When a model is being trained, there are many options. L2 regularization " "(to prevent overfitting) can be specified with the -l option, and the " "optimizer used to train the model can be specified with the --optimizer " "option. Available options are 'sgd' (stochastic gradient descent), " "'lbfgs' (the L-BFGS optimizer), and 'minibatch-sgd' (minibatch stochastic " "gradient descent). There are also various parameters for the optimizer; " "the --max_iterations parameter specifies the maximum number of allowed " "iterations, and the --tolerance (-e) parameter specifies the tolerance " "for convergence. For the SGD and mini-batch SGD optimizers, the " "--step_size parameter controls the step size taken at each iteration by " "the optimizer. The batch size for mini-batch SGD is controlled with the " "--batch_size (-b) parameter. If the objective function for your data is " "oscillating between Inf and 0, the step size is probably too large. There" " are more parameters for the optimizers, but the C++ interface must be " "used to access these." "\n\n" "For SGD, an iteration refers to a single point, and for mini-batch SGD, an" " iteration refers to a single batch. So to take a single pass over the " "dataset with SGD, --max_iterations should be set to the number of points " "in the dataset." "\n\n" "Optionally, the model can be used to predict the responses for another " "matrix of data points, if --test_file is specified. The --test_file " "option can be specified without --input_file, so long as an existing " "logistic regression model is given with --model_file. The output " "predictions from the logistic regression model are stored in the file " "given with --output_predictions." "\n\n" "This implementation of logistic regression does not support the general " "multi-class case but instead only the two-class case. Any responses must " "be either 0 or 1."); // Training parameters. PARAM_STRING_IN("training_file", "A file containing the training set (the " "matrix of predictors, X).", "t", ""); PARAM_STRING_IN("labels_file", "A file containing labels (0 or 1) for the " "points in the training set (y).", "l", ""); // Optimizer parameters. PARAM_DOUBLE_IN("lambda", "L2-regularization parameter for training.", "L", 0.0); PARAM_STRING_IN("optimizer", "Optimizer to use for training ('lbfgs' or " "'sgd').", "O", "lbfgs"); PARAM_DOUBLE_IN("tolerance", "Convergence tolerance for optimizer.", "e", 1e-10); PARAM_INT_IN("max_iterations", "Maximum iterations for optimizer (0 indicates " "no limit).", "n", 10000); PARAM_DOUBLE_IN("step_size", "Step size for SGD and mini-batch SGD optimizers.", "s", 0.01); PARAM_INT_IN("batch_size", "Batch size for mini-batch SGD.", "b", 50); // Model loading/saving. PARAM_STRING_IN("input_model_file", "File containing existing model " "(parameters).", "m", ""); PARAM_STRING_OUT("output_model_file", "File to save trained logistic regression" " model to.", "M"); // Testing. PARAM_STRING_IN("test_file", "File containing test dataset.", "T", ""); PARAM_STRING_OUT("output_file", "If --test_file is specified, this file is " "where the predictions for the test set will be saved.", "o"); PARAM_STRING_OUT("output_probabilities_file", "If --test_file is specified, " "this file is where the class probabilities for the test set will be " "saved.", "p"); PARAM_DOUBLE_IN("decision_boundary", "Decision boundary for prediction; if the " "logistic function for a point is less than the boundary, the class is " "taken to be 0; otherwise, the class is 1.", "d", 0.5); int main(int argc, char** argv) { CLI::ParseCommandLine(argc, argv); // Collect command-line options. const string trainingFile = CLI::GetParam("training_file"); const string labelsFile = CLI::GetParam("labels_file"); const double lambda = CLI::GetParam("lambda"); const string optimizerType = CLI::GetParam("optimizer"); const double tolerance = CLI::GetParam("tolerance"); const double stepSize = CLI::GetParam("step_size"); const size_t batchSize = (size_t) CLI::GetParam("batch_size"); const size_t maxIterations = (size_t) CLI::GetParam("max_iterations"); const string inputModelFile = CLI::GetParam("input_model_file"); const string outputModelFile = CLI::GetParam("output_model_file"); const string testFile = CLI::GetParam("test_file"); const string outputFile = CLI::GetParam("output_file"); const string outputProbabilitiesFile = CLI::GetParam("output_probabilities_file"); const double decisionBoundary = CLI::GetParam("decision_boundary"); // One of inputFile and modelFile must be specified. if (trainingFile.empty() && inputModelFile.empty()) Log::Fatal << "One of --input_model or --training_file must be specified." << endl; // If no output file is given, the user should know that the model will not be // saved, but only if a model is being trained. if (outputModelFile.empty() && !trainingFile.empty()) Log::Warn << "--output_model_file not given; trained model will not be " << "saved." << endl; // Tolerance needs to be positive. if (tolerance < 0.0) Log::Fatal << "Tolerance must be positive (received " << tolerance << ")." << endl; // Optimizer has to be L-BFGS or SGD. if (optimizerType != "lbfgs" && optimizerType != "sgd" && optimizerType != "minibatch-sgd") Log::Fatal << "--optimizer must be 'lbfgs', 'sgd', or 'minibatch-sgd'." << endl; // Lambda must be positive. if (lambda < 0.0) Log::Fatal << "L2-regularization parameter (--lambda) must be positive (" << "received " << lambda << ")." << endl; // Decision boundary must be between 0 and 1. if (decisionBoundary < 0.0 || decisionBoundary > 1.0) Log::Fatal << "Decision boundary (--decision_boundary) must be between 0.0 " << "and 1.0 (received " << decisionBoundary << ")." << endl; if ((stepSize < 0.0) && (optimizerType == "sgd" || optimizerType == "minibatch-sgd")) Log::Fatal << "Step size (--step_size) must be positive (received " << stepSize << ")." << endl; if (CLI::HasParam("step_size") && optimizerType == "lbfgs") Log::Warn << "Step size (--step_size) ignored because 'sgd' optimizer is " << "not being used." << endl; if (CLI::HasParam("batch_size") && optimizerType != "minibatch-sgd") Log::Warn << "Batch size (--batch_size) ignored because 'minibatch-sgd' " << "optimizer is not being used." << endl; // These are the matrices we might use. arma::mat regressors; arma::Mat responsesMat; arma::Row responses; arma::mat testSet; arma::Row predictions; // Load data matrix. if (!trainingFile.empty()) data::Load(trainingFile, regressors, true); // Load the model, if necessary. LogisticRegression<> model(0, 0); // Empty model. if (!inputModelFile.empty()) { data::Load(inputModelFile, "logistic_regression_model", model); } else { // Set the size of the parameters vector, if necessary. if (labelsFile.empty()) model.Parameters() = arma::zeros(regressors.n_rows - 1); else model.Parameters() = arma::zeros(regressors.n_rows); } // Check if the responses are in a separate file. if (!trainingFile.empty() && !labelsFile.empty()) { data::Load(labelsFile, responsesMat, true); if (responsesMat.n_cols == 1) responses = responsesMat.col(0).t(); else responses = responsesMat.row(0); if (responses.n_cols != regressors.n_cols) Log::Fatal << "The labels (--labels_file) must have the same number of " << "points as the training dataset (--training_file)." << endl; } else if (!trainingFile.empty()) { // The initial predictors for y, Nx1. responses = arma::conv_to>::from( regressors.row(regressors.n_rows - 1)); regressors.shed_row(regressors.n_rows - 1); } // Verify the labels. if (!trainingFile.empty() && max(responses) > 1) Log::Fatal << "The labels must be either 0 or 1, not " << max(responses) << "!" << endl; // Now, do the training. if (!trainingFile.empty()) { LogisticRegressionFunction<> lrf(regressors, responses, model.Parameters()); if (optimizerType == "sgd") { SGD> sgdOpt(lrf); sgdOpt.MaxIterations() = maxIterations; sgdOpt.Tolerance() = tolerance; sgdOpt.StepSize() = stepSize; Log::Info << "Training model with SGD optimizer." << endl; // This will train the model. model.Train(sgdOpt); } else if (optimizerType == "lbfgs") { L_BFGS> lbfgsOpt(lrf); lbfgsOpt.MaxIterations() = maxIterations; lbfgsOpt.MinGradientNorm() = tolerance; Log::Info << "Training model with L-BFGS optimizer." << endl; // This will train the model. model.Train(lbfgsOpt); } else if (optimizerType == "minibatch-sgd") { MiniBatchSGD> mbsgdOpt(lrf); mbsgdOpt.BatchSize() = batchSize; mbsgdOpt.Tolerance() = tolerance; mbsgdOpt.StepSize() = stepSize; mbsgdOpt.MaxIterations() = maxIterations; Log::Info << "Training model with mini-batch SGD optimizer (batch size " << batchSize << ")." << endl; model.Train(mbsgdOpt); } } if (!testFile.empty()) { data::Load(testFile, testSet, true); // We must perform predictions on the test set. Training (and the // optimizer) are irrelevant here; we'll pass in the model we have. if (!outputFile.empty()) { Log::Info << "Predicting classes of points in '" << testFile << "'." << endl; model.Classify(testSet, predictions, decisionBoundary); data::Save(outputFile, predictions, false); } if (!outputProbabilitiesFile.empty()) { Log::Info << "Calculating class probabilities of points in '" << testFile << "'." << endl; arma::mat probabilities; model.Classify(testSet, probabilities); data::Save(outputProbabilitiesFile, probabilities, false); } } if (!outputModelFile.empty()) { Log::Info << "Saving model to '" << outputModelFile << "'." << endl; data::Save(outputModelFile, "logistic_regression_model", model, false); } } mlpack-2.2.5/src/mlpack/methods/lsh/000077500000000000000000000000001315013601400172505ustar00rootroot00000000000000mlpack-2.2.5/src/mlpack/methods/lsh/CMakeLists.txt000066400000000000000000000011451315013601400220110ustar00rootroot00000000000000# Define the files we need to compile. # Anything not in this list will not be compiled into mlpack. set(SOURCES # LSH-search class lsh_search.hpp lsh_search_impl.hpp ) # Add directory name to sources. set(DIR_SRCS) foreach(file ${SOURCES}) set(DIR_SRCS ${DIR_SRCS} ${CMAKE_CURRENT_SOURCE_DIR}/${file}) endforeach() # Append sources (with directory name) to list of all mlpack sources (used at # the parent scope). set(MLPACK_SRCS ${MLPACK_SRCS} ${DIR_SRCS} PARENT_SCOPE) # The code to compute the approximate neighbor for the given query and reference # sets with p-stable LSH. add_cli_executable(lsh) mlpack-2.2.5/src/mlpack/methods/lsh/lsh_main.cpp000066400000000000000000000212201315013601400215430ustar00rootroot00000000000000/** * @file lsh_main.cpp * @author Parikshit Ram * * This file computes the approximate nearest-neighbors using 2-stable * Locality-sensitive Hashing. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include #include #include #include #include #include "lsh_search.hpp" using namespace std; using namespace mlpack; using namespace mlpack::neighbor; // Information about the program itself. PROGRAM_INFO("All K-Approximate-Nearest-Neighbor Search with LSH", "This program will calculate the k approximate-nearest-neighbors of a set " "of points using locality-sensitive hashing. You may specify a separate set" " of reference points and query points, or just a reference set which will " "be used as both the reference and query set. " "\n\n" "For example, the following will return 5 neighbors from the data for each " "point in 'input.csv' and store the distances in 'distances.csv' and the " "neighbors in the file 'neighbors.csv':" "\n\n" "$ lsh -k 5 -r input.csv -d distances.csv -n neighbors.csv " "\n\n" "The output files are organized such that row i and column j in the " "neighbors output file corresponds to the index of the point in the " "reference set which is the i'th nearest neighbor from the point in the " "query set with index j. Row i and column j in the distances output file " "corresponds to the distance between those two points." "\n\n" "Because this is approximate-nearest-neighbors search, results may be " "different from run to run. Thus, the --seed option can be specified to " "set the random seed."); // Define our input parameters that this program will take. PARAM_STRING_IN("reference_file", "File containing the reference dataset.", "r", ""); PARAM_STRING_OUT("distances_file", "File to output distances into.", "d"); PARAM_STRING_OUT("neighbors_file", "File to output neighbors into.", "n"); // We can load or save models. PARAM_STRING_IN("input_model_file", "File to load LSH model from. (Cannot be " "specified with --reference_file.)", "m", ""); PARAM_STRING_OUT("output_model_file", "File to save LSH model to.", "M"); // For testing recall. PARAM_STRING_IN("true_neighbors_file", "File of true neighbors to compute " "recall with (the recall is printed when -v is specified).", "t", ""); PARAM_INT_IN("k", "Number of nearest neighbors to find.", "k", 0); PARAM_STRING_IN("query_file", "File containing query points (optional).", "q", ""); PARAM_INT_IN("projections", "The number of hash functions for each table", "K", 10); PARAM_INT_IN("tables", "The number of hash tables to be used.", "L", 30); PARAM_DOUBLE_IN("hash_width", "The hash width for the first-level hashing in " "the LSH preprocessing. By default, the LSH class automatically estimates " "a hash width for its use.", "H", 0.0); PARAM_INT_IN("num_probes", "Number of additional probes for multiprobe LSH; if " "0, traditional LSH is used.", "T", 0); PARAM_INT_IN("second_hash_size", "The size of the second level hash table.", "S", 99901); PARAM_INT_IN("bucket_size", "The size of a bucket in the second level hash.", "B", 500); PARAM_INT_IN("seed", "Random seed. If 0, 'std::time(NULL)' is used.", "s", 0); int main(int argc, char *argv[]) { // Give CLI the command line parameters the user passed in. CLI::ParseCommandLine(argc, argv); if (CLI::GetParam("seed") != 0) math::RandomSeed((size_t) CLI::GetParam("seed")); else math::RandomSeed((size_t) time(NULL)); // Get all the parameters. const string referenceFile = CLI::GetParam("reference_file"); const string distancesFile = CLI::GetParam("distances_file"); const string neighborsFile = CLI::GetParam("neighbors_file"); const string inputModelFile = CLI::GetParam("input_model_file"); const string outputModelFile = CLI::GetParam("output_model_file"); size_t k = CLI::GetParam("k"); size_t secondHashSize = CLI::GetParam("second_hash_size"); size_t bucketSize = CLI::GetParam("bucket_size"); if (CLI::HasParam("input_model_file") && CLI::HasParam("reference_file")) { Log::Fatal << "Cannot specify both --reference_file and --input_model_file!" << " Either create a new model with --reference_file or use an existing" << " model with --input_model_file." << endl; } if (!CLI::HasParam("input_model_file") && !CLI::HasParam("reference_file")) { Log::Fatal << "Must specify either --input_model_file or --reference_file!" << endl; } if (!CLI::HasParam("neighbors_file") && !CLI::HasParam("distances_file") && !CLI::HasParam("output_model_file")) { Log::Warn << "Neither --neighbors_file, --distances_file, nor " << "--output_model_file are specified; no results will be saved." << endl; } if ((CLI::HasParam("query_file") && !CLI::HasParam("k")) || (!CLI::HasParam("query_file") && !CLI::HasParam("reference_file") && CLI::HasParam("k"))) { Log::Fatal << "Both --query_file or --reference_file and --k must be " << "specified if search is to be done!" << endl; } if (CLI::HasParam("input_model_file") && CLI::HasParam("k") && !CLI::HasParam("query_file")) { Log::Info << "Performing LSH-based approximate nearest neighbor search on " << "the reference dataset in the model stored in '" << inputModelFile << "'." << endl; } // These declarations are here so that the matrices don't go out of scope. arma::mat referenceData; arma::mat queryData; // Pick up the LSH-specific parameters. const size_t numProj = CLI::GetParam("projections"); const size_t numTables = CLI::GetParam("tables"); const double hashWidth = CLI::GetParam("hash_width"); const size_t numProbes = (size_t) CLI::GetParam("num_probes"); arma::Mat neighbors; arma::mat distances; if (hashWidth == 0.0) Log::Info << "Using LSH with " << numProj << " projections (K) and " << numTables << " tables (L) with default hash width." << endl; else Log::Info << "Using LSH with " << numProj << " projections (K) and " << numTables << " tables (L) with hash width(r): " << hashWidth << endl; LSHSearch<> allkann; if (CLI::HasParam("reference_file")) { data::Load(referenceFile, referenceData, true); Log::Info << "Loaded reference data from '" << referenceFile << "' (" << referenceData.n_rows << " x " << referenceData.n_cols << ")." << endl; Timer::Start("hash_building"); allkann.Train(referenceData, numProj, numTables, hashWidth, secondHashSize, bucketSize); Timer::Stop("hash_building"); } else if (CLI::HasParam("input_model_file")) { data::Load(inputModelFile, "lsh_model", allkann, true); // Fatal on fail. } if (CLI::HasParam("k")) { Log::Info << "Computing " << k << " distance approximate nearest neighbors." << endl; if (CLI::HasParam("query_file")) { if (CLI::GetParam("query_file") != "") { string queryFile = CLI::GetParam("query_file"); data::Load(queryFile, queryData, true); Log::Info << "Loaded query data from '" << queryFile << "' (" << queryData.n_rows << " x " << queryData.n_cols << ")." << endl; } allkann.Search(queryData, k, neighbors, distances, 0, numProbes); } else { allkann.Search(k, neighbors, distances, 0, numProbes); } } Log::Info << "Neighbors computed." << endl; // Compute recall, if desired. if (CLI::HasParam("true_neighbors_file")) { const string trueNeighborsFile = CLI::GetParam("true_neighbors_file"); // Load the true neighbors. arma::Mat trueNeighbors; data::Load(trueNeighborsFile, trueNeighbors, true); Log::Info << "Loaded true neighbor indices from '" << trueNeighborsFile << "'." << endl; // Compute recall and print it. double recallPercentage = 100 * allkann.ComputeRecall(neighbors, trueNeighbors); Log::Info << "Recall: " << recallPercentage << endl; } // Save output, if desired. if (CLI::HasParam("distances_file")) data::Save(distancesFile, distances); if (CLI::HasParam("neighbors_file")) data::Save(neighborsFile, neighbors); if (CLI::HasParam("output_model_file")) data::Save(outputModelFile, "lsh_model", allkann); } mlpack-2.2.5/src/mlpack/methods/lsh/lsh_search.hpp000066400000000000000000000444661315013601400221120ustar00rootroot00000000000000/** * @file lsh_search.hpp * @author Parikshit Ram * * Defines the LSHSearch class, which performs an approximate * nearest neighbor search for a queries in a query set * over a given dataset using Locality-sensitive hashing * with 2-stable distributions. * * The details of this method can be found in the following paper: * * @inproceedings{datar2004locality, * title={Locality-sensitive hashing scheme based on p-stable distributions}, * author={Datar, M. and Immorlica, N. and Indyk, P. and Mirrokni, V.S.}, * booktitle= * {Proceedings of the 12th Annual Symposium on Computational Geometry}, * pages={253--262}, * year={2004}, * organization={ACM} * } * * Additionally, the class implements Multiprobe LSH, which improves * approximation results during the search for approximate nearest neighbors. * The Multiprobe LSH algorithm was presented in the paper: * * @inproceedings{Lv2007multiprobe, * tile={Multi-probe LSH: efficient indexing for high-dimensional similarity * search}, * author={Lv, Qin and Josephson, William and Wang, Zhe and Charikar, Moses and * Li, Kai}, * booktitle={Proceedings of the 33rd international conference on Very large * data bases}, * year={2007}, * pages={950--961} * } * * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_NEIGHBOR_SEARCH_LSH_SEARCH_HPP #define MLPACK_METHODS_NEIGHBOR_SEARCH_LSH_SEARCH_HPP #include #include #include namespace mlpack { namespace neighbor { /** * The LSHSearch class; this class builds a hash on the reference set and uses * this hash to compute the distance-approximate nearest-neighbors of the given * queries. * * @tparam SortPolicy The sort policy for distances; see NearestNeighborSort. */ template class LSHSearch { public: /** * This function initializes the LSH class. It builds the hash on the * reference set with 2-stable distributions. See the individual functions * performing the hashing for details on how the hashing is done. * * @param referenceSet Set of reference points and the set of queries. * @param projections Cube of projection tables. For a cube of size (a, b, c) * we set numProj = a, numTables = c. b is the reference set * dimensionality. * @param hashWidth The width of hash for every table. If 0 (the default) is * provided, then the hash width is automatically obtained by computing * the average pairwise distance of 25 pairs. This should be a reasonable * upper bound on the nearest-neighbor distance in general. * @param secondHashSize The size of the second hash table. This should be a * large prime number. * @param bucketSize The size of the bucket in the second hash table. This is * the maximum number of points that can be hashed into single bucket. A * value of 0 indicates that there is no limit (so the second hash table * can be arbitrarily large---be careful!). */ LSHSearch(const arma::mat& referenceSet, const arma::cube& projections, const double hashWidth = 0.0, const size_t secondHashSize = 99901, const size_t bucketSize = 500); /** * This function initializes the LSH class. It builds the hash one the * reference set using the provided projections. See the individual functions * performing the hashing for details on how the hashing is done. * * @param referenceSet Set of reference points and the set of queries. * @param numProj Number of projections in each hash table (anything between * 10-50 might be a decent choice). * @param numTables Total number of hash tables (anything between 10-20 * should suffice). * @param hashWidth The width of hash for every table. If 0 (the default) is * provided, then the hash width is automatically obtained by computing * the average pairwise distance of 25 pairs. This should be a reasonable * upper bound on the nearest-neighbor distance in general. * @param secondHashSize The size of the second hash table. This should be a * large prime number. * @param bucketSize The size of the bucket in the second hash table. This is * the maximum number of points that can be hashed into single bucket. A * value of 0 indicates that there is no limit (so the second hash table * can be arbitrarily large---be careful!). */ LSHSearch(const arma::mat& referenceSet, const size_t numProj, const size_t numTables, const double hashWidth = 0.0, const size_t secondHashSize = 99901, const size_t bucketSize = 500); /** * Create an untrained LSH model. Be sure to call Train() before calling * Search(); otherwise, an exception will be thrown when Search() is called. */ LSHSearch(); /** * Clean memory. */ ~LSHSearch(); /** * Train the LSH model on the given dataset. If a correctly-sized projection * cube is not provided, this means building new hash tables. Otherwise, we * use the projections provided by the user. * * @param referenceSet Set of reference points and the set of queries. * @param numProj Number of projections in each hash table (anything between * 10-50 might be a decent choice). * @param numTables Total number of hash tables (anything between 10-20 * should suffice). * @param hashWidth The width of hash for every table. If 0 (the default) is * provided, then the hash width is automatically obtained by computing * the average pairwise distance of 25 pairs. This should be a reasonable * upper bound on the nearest-neighbor distance in general. * @param secondHashSize The size of the second hash table. This should be a * large prime number. * @param bucketSize The size of the bucket in the second hash table. This is * the maximum number of points that can be hashed into single bucket. A * value of 0 indicates that there is no limit (so the second hash table * can be arbitrarily large---be careful!). * @param projections Cube of projection tables. For a cube of size (a, b, c) * we set numProj = a, numTables = c. b is the reference set * dimensionality. */ void Train(const arma::mat& referenceSet, const size_t numProj, const size_t numTables, const double hashWidth = 0.0, const size_t secondHashSize = 99901, const size_t bucketSize = 500, const arma::cube& projection = arma::cube()); /** * Compute the nearest neighbors of the points in the given query set and * store the output in the given matrices. The matrices will be set to the * size of n columns by k rows, where n is the number of points in the query * dataset and k is the number of neighbors being searched for. * * @param querySet Set of query points. * @param k Number of neighbors to search for. * @param resultingNeighbors Matrix storing lists of neighbors for each query * point. * @param distances Matrix storing distances of neighbors for each query * point. * @param numTablesToSearch This parameter allows the user to have control * over the number of hash tables to be searched. This allows * the user to pick the number of tables it can afford for the time * available without having to build hashing for every table size. * By default, this is set to zero in which case all tables are * considered. * @param T The number of additional probing bins to examine with multiprobe * LSH. If T = 0, classic single-probe LSH is run (default). */ void Search(const arma::mat& querySet, const size_t k, arma::Mat& resultingNeighbors, arma::mat& distances, const size_t numTablesToSearch = 0, const size_t T = 0); /** * Compute the nearest neighbors and store the output in the given matrices. * The matrices will be set to the size of n columns by k rows, where n is * the number of points in the query dataset and k is the number of neighbors * being searched for. * * @param k Number of neighbors to search for. * @param resultingNeighbors Matrix storing lists of neighbors for each query * point. * @param distances Matrix storing distances of neighbors for each query * point. * @param numTablesToSearch This parameter allows the user to have control * over the number of hash tables to be searched. This allows * the user to pick the number of tables it can afford for the time * available without having to build hashing for every table size. * By default, this is set to zero in which case all tables are * considered. */ void Search(const size_t k, arma::Mat& resultingNeighbors, arma::mat& distances, const size_t numTablesToSearch = 0, size_t T = 0); /** * Compute the recall (% of neighbors found) given the neighbors returned by * LSHSearch::Search and a "ground truth" set of neighbors. The recall * returned will be in the range [0, 1]. * * @param foundNeighbors Set of neighbors to compute recall of. * @param realNeighbors Set of "ground truth" neighbors to compute recall * against. */ static double ComputeRecall(const arma::Mat& foundNeighbors, const arma::Mat& realNeighbors); /** * Serialize the LSH model. * * @param ar Archive to serialize to. */ template void Serialize(Archive& ar, const unsigned int version); //! Return the number of distance evaluations performed. size_t DistanceEvaluations() const { return distanceEvaluations; } //! Modify the number of distance evaluations performed. size_t& DistanceEvaluations() { return distanceEvaluations; } //! Return the reference dataset. const arma::mat& ReferenceSet() const { return *referenceSet; } //! Get the number of projections. size_t NumProjections() const { return projections.n_slices; } //! Get the offsets 'b' for each of the projections. (One 'b' per column.) const arma::mat& Offsets() const { return offsets; } //! Get the weights of the second hash. const arma::vec& SecondHashWeights() const { return secondHashWeights; } //! Get the bucket size of the second hash. size_t BucketSize() const { return bucketSize; } //! Get the second hash table. const std::vector>& SecondHashTable() const { return secondHashTable; } //! Get the projection tables. const arma::cube& Projections() { return projections; } //! Change the projection tables (this retrains the LSH model). void Projections(const arma::cube& projTables) { // Simply call Train() with the given projection tables. Train(*referenceSet, numProj, numTables, hashWidth, secondHashSize, bucketSize, projTables); } private: /** * This function takes a query and hashes it into each of the hash tables to * get keys for the query and then the key is hashed to a bucket of the second * hash table and all the points (if any) in those buckets are collected as * the potential neighbor candidates. * * @param queryPoint The query point currently being processed. * @param referenceIndices The list of neighbor candidates obtained from * hashing the query into all the hash tables and eventually into * multiple buckets of the second hash table. * @param numTablesToSearch The number of tables to perform the search in. If * 0, all tables are searched. * @param T The number of additional probing bins for multiprobe LSH. If 0, * single-probe is used. */ template void ReturnIndicesFromTable(const VecType& queryPoint, arma::uvec& referenceIndices, size_t numTablesToSearch, const size_t T) const; /** * This is a helper function that computes the distance of the query to the * neighbor candidates and appropriately stores the best 'k' candidates. This * is specific to the monochromatic search case, where the query set is the * reference set. * * @param queryIndex The index of the query in question * @param referenceIndices The vector of indices of candidate neighbors for * the query. * @param k Number of neighbors to search for. * @param neighbors Matrix holding output neighbors. * @param distances Matrix holding output distances. */ void BaseCase(const size_t queryIndex, const arma::uvec& referenceIndices, const size_t k, arma::Mat& neighbors, arma::mat& distances) const; /** * This is a helper function that computes the distance of the query to the * neighbor candidates and appropriately stores the best 'k' candidates. This * is specific to bichromatic search, where the query set is not the same as * the reference set. * * @param queryIndex The index of the query in question * @param referenceIndices The vector of indices of candidate neighbors for * the query. * @param k Number of neighbors to search for. * @param querySet Set of query points. * @param neighbors Matrix holding output neighbors. * @param distances Matrix holding output distances. */ void BaseCase(const size_t queryIndex, const arma::uvec& referenceIndices, const size_t k, const arma::mat& querySet, arma::Mat& neighbors, arma::mat& distances) const; /** * This function implements the core idea behind Multiprobe LSH. It is called * by ReturnIndicesFromTables when T > 0. Given a query's code and its * projection location, GetAdditionalProbingBins will calculate the T most * likely alternative bin codes (other than queryCode) where a query's * neighbors might be found in. * * @param queryCode vector containing the numProj-dimensional query code. * @param queryCodeNotFloored vector containing the projection location of the * query. * @param T number of additional probing bins. * @param additionalProbingBins matrix. Each column will hold one additional * bin. */ void GetAdditionalProbingBins(const arma::vec& queryCode, const arma::vec& queryCodeNotFloored, const size_t T, arma::mat& additionalProbingBins) const; /** * Returns the score of a perturbation vector generated by perturbation set A. * The score of a pertubation set (vector) is the sum of scores of the * participating actions. * @param A perturbation set to compute the score of. * @param scores vector containing score of each perturbation. */ double PerturbationScore(const std::vector& A, const arma::vec& scores) const; /** * Inline function used by GetAdditionalProbingBins. The vector shift operation * replaces the largest element of a vector A with (largest element) + 1. * Returns true if resulting vector is valid, otherwise false. * @param A perturbation set to shift. */ bool PerturbationShift(std::vector& A) const; /** * Inline function used by GetAdditionalProbingBins. The vector expansion * operation adds the element [1 + (largest_element)] to a vector A, where * largest_element is the largest element of A. Returns true if resulting vector * is valid, otherwise false. * @param A perturbation set to expand. */ bool PerturbationExpand(std::vector& A) const; /** * Return true if perturbation set A is valid. A perturbation set is invalid if * it contains two (or more) actions for the same dimension or dimensions that * are larger than the queryCode's dimensions. * @param A perturbation set to validate. */ bool PerturbationValid(const std::vector& A) const; //! Reference dataset. const arma::mat* referenceSet; //! If true, we own the reference set. bool ownsSet; //! The number of projections. size_t numProj; //! The number of hash tables. size_t numTables; //! The arma::cube containing the projection matrix of each table. arma::cube projections; // should be [numProj x dims] x numTables slices //! The list of the offsets 'b' for each of the projection for each table. arma::mat offsets; // should be numProj x numTables //! The hash width. double hashWidth; //! The big prime representing the size of the second hash. size_t secondHashSize; //! The weights of the second hash. arma::vec secondHashWeights; //! The bucket size of the second hash. size_t bucketSize; //! The final hash table; should be (< secondHashSize) vectors each with //! (<= bucketSize) elements. std::vector> secondHashTable; //! The number of elements present in each hash bucket; should be //! secondHashSize. arma::Col bucketContentSize; //! For a particular hash value, points to the row in secondHashTable //! corresponding to this value. Length secondHashSize. arma::Col bucketRowInHashTable; //! The number of distance evaluations. size_t distanceEvaluations; //! Candidate represents a possible candidate neighbor (distance, index). typedef std::pair Candidate; //! Compare two candidates based on the distance. struct CandidateCmp { bool operator()(const Candidate& c1, const Candidate& c2) { return !SortPolicy::IsBetter(c2.first, c1.first); }; }; //! Use a priority queue to represent the list of candidate neighbors. typedef std::priority_queue, CandidateCmp> CandidateList; }; // class LSHSearch } // namespace neighbor } // namespace mlpack //! Set the serialization version of the LSHSearch class. BOOST_TEMPLATE_CLASS_VERSION(template, mlpack::neighbor::LSHSearch, 1); // Include implementation. #include "lsh_search_impl.hpp" #endif mlpack-2.2.5/src/mlpack/methods/lsh/lsh_search_impl.hpp000066400000000000000000001142421315013601400231210ustar00rootroot00000000000000/** * @file lsh_search_impl.hpp * @author Parikshit Ram * * Implementation of the LSHSearch class. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_NEIGHBOR_SEARCH_LSH_SEARCH_IMPL_HPP #define MLPACK_METHODS_NEIGHBOR_SEARCH_LSH_SEARCH_IMPL_HPP #include #include namespace mlpack { namespace neighbor { // Construct the object with random tables template LSHSearch:: LSHSearch(const arma::mat& referenceSet, const size_t numProj, const size_t numTables, const double hashWidthIn, const size_t secondHashSize, const size_t bucketSize) : referenceSet(NULL), // This will be set in Train(). ownsSet(false), numProj(numProj), numTables(numTables), hashWidth(hashWidthIn), secondHashSize(secondHashSize), bucketSize(bucketSize), distanceEvaluations(0) { // Pass work to training function. Train(referenceSet, numProj, numTables, hashWidthIn, secondHashSize, bucketSize); } // Construct the object with given tables template LSHSearch:: LSHSearch(const arma::mat& referenceSet, const arma::cube& projections, const double hashWidthIn, const size_t secondHashSize, const size_t bucketSize) : referenceSet(NULL), // This will be set in Train(). ownsSet(false), numProj(projections.n_cols), numTables(projections.n_slices), hashWidth(hashWidthIn), secondHashSize(secondHashSize), bucketSize(bucketSize), distanceEvaluations(0) { // Pass work to training function Train(referenceSet, numProj, numTables, hashWidthIn, secondHashSize, bucketSize, projections); } // Empty constructor. template LSHSearch::LSHSearch() : referenceSet(new arma::mat()), // Use an empty dataset. ownsSet(true), numProj(0), numTables(0), hashWidth(0), secondHashSize(99901), bucketSize(500), distanceEvaluations(0) { } // Destructor. template LSHSearch::~LSHSearch() { if (ownsSet) delete referenceSet; } // Train on a new reference set. template void LSHSearch::Train(const arma::mat& referenceSet, const size_t numProj, const size_t numTables, const double hashWidthIn, const size_t secondHashSize, const size_t bucketSize, const arma::cube &projection) { // Set new reference set. if (this->referenceSet && ownsSet) delete this->referenceSet; this->referenceSet = &referenceSet; this->ownsSet = false; // Set new parameters. this->numProj = numProj; this->numTables = numTables; this->hashWidth = hashWidthIn; this->secondHashSize = secondHashSize; this->bucketSize = bucketSize; if (hashWidth == 0.0) // The user has not provided any value. { const size_t numSamples = 25; // Compute a heuristic hash width from the data. for (size_t i = 0; i < numSamples; i++) { size_t p1 = (size_t) math::RandInt(referenceSet.n_cols); size_t p2 = (size_t) math::RandInt(referenceSet.n_cols); hashWidth += std::sqrt(metric::EuclideanDistance::Evaluate( referenceSet.unsafe_col(p1), referenceSet.unsafe_col(p2))); } hashWidth /= numSamples; } Log::Info << "Hash width chosen as: " << hashWidth << std::endl; // Hash building procedure: // The first level hash for a single table outputs a 'numProj'-dimensional // integer key for each point in the set -- (key, pointID). The key creation // details are presented below. // Step I: Prepare the second level hash. // Obtain the weights for the second hash. secondHashWeights = arma::floor(arma::randu(numProj) * (double) secondHashSize); // Instead of putting the points in the row corresponding to the bucket, we // chose the next empty row and keep track of the row in which the bucket // lies. This allows us to stack together and slice out the empty buckets at // the end of the hashing. bucketRowInHashTable.set_size(secondHashSize); bucketRowInHashTable.fill(secondHashSize); // Step II: The offsets for all projections in all tables. // Since the 'offsets' are in [0, hashWidth], we obtain the 'offsets' // as randu(numProj, numTables) * hashWidth. offsets.randu(numProj, numTables); offsets *= hashWidth; // Step III: Obtain the 'numProj' projections for each table. projections.clear(); // Reset projections vector. if (projection.n_slices == 0) // Randomly generate the tables. { // For L2 metric, 2-stable distributions are used, and the normal Z ~ N(0, // 1) is a 2-stable distribution. // Build numTables random tables arranged in a cube. projections.randn(referenceSet.n_rows, numProj, numTables); } else if (projection.n_slices == numTables) // Take user-defined tables. { projections = projection; } else // The user gave something wrong. { throw std::invalid_argument("LSHSearch::Train(): number of projection " "tables provided must be equal to numProj"); } // We will store the second hash vectors in this matrix; the second hash // vector for table i will be held in row i. We have to use int and not // size_t, otherwise negative numbers are cast to 0. arma::Mat secondHashVectors(numTables, referenceSet.n_cols); for (size_t i = 0; i < numTables; i++) { // Step IV: create the 'numProj'-dimensional key for each point in each // table. // The following code performs the task of hashing each point to a // 'numProj'-dimensional integer key. Hence you get a ('numProj' x // 'referenceSet.n_cols') key matrix. // // For a single table, let the 'numProj' projections be denoted by 'proj_i' // and the corresponding offset be 'offset_i'. Then the key of a single // point is obtained as: // key = { floor( ( + offset_i) / 'hashWidth' ) forall i } arma::mat offsetMat = arma::repmat(offsets.unsafe_col(i), 1, referenceSet.n_cols); arma::mat hashMat = projections.slice(i).t() * (referenceSet); hashMat += offsetMat; hashMat /= hashWidth; // Step V: Putting the points in the 'secondHashTable' by hashing the key. // Now we hash every key, point ID to its corresponding bucket. We must // also normalize the hashes to the range [0, secondHashSize). arma::rowvec unmodVector = secondHashWeights.t() * arma::floor(hashMat); for (size_t j = 0; j < unmodVector.n_elem; ++j) { double shs = (double) secondHashSize; // Convenience cast. if (unmodVector[j] >= 0.0) { const size_t key = size_t(fmod(unmodVector[j], shs)); secondHashVectors(i, j) = key; } else { const double mod = fmod(-unmodVector[j], shs); const size_t key = (mod < 1.0) ? 0 : secondHashSize - size_t(mod); secondHashVectors(i, j) = key; } } } // Now, using the hash vectors for each table, count the number of rows we // have in the second hash table. arma::Row secondHashBinCounts(secondHashSize, arma::fill::zeros); for (size_t i = 0; i < secondHashVectors.n_elem; ++i) secondHashBinCounts[secondHashVectors[i]]++; // Enforce the maximum bucket size. const size_t effectiveBucketSize = (bucketSize == 0) ? SIZE_MAX : bucketSize; secondHashBinCounts.transform([effectiveBucketSize](size_t val) { return std::min(val, effectiveBucketSize); }); const size_t numRowsInTable = arma::accu(secondHashBinCounts > 0); bucketContentSize.zeros(numRowsInTable); secondHashTable.resize(numRowsInTable); // Next we must assign each point in each table to the right second hash // table. size_t currentRow = 0; for (size_t i = 0; i < numTables; ++i) { // Insert the point in the corresponding row to its bucket in the // 'secondHashTable'. for (size_t j = 0; j < secondHashVectors.n_cols; j++) { // This is the bucket number. size_t hashInd = (size_t) secondHashVectors(i, j); // The point ID is 'j'. // If this is currently an empty bucket, start a new row keep track of // which row corresponds to the bucket. const size_t maxSize = secondHashBinCounts[hashInd]; if (bucketRowInHashTable[hashInd] == secondHashSize) { bucketRowInHashTable[hashInd] = currentRow; secondHashTable[currentRow].set_size(maxSize); currentRow++; } // If this vector in the hash table is not full, add the point. const size_t index = bucketRowInHashTable[hashInd]; if (bucketContentSize[index] < maxSize) secondHashTable[index](bucketContentSize[index]++) = j; } // Loop over all points in the reference set. } // Loop over tables. Log::Info << "Final hash table size: " << numRowsInTable << " rows, with a " << "maximum length of " << arma::max(secondHashBinCounts) << ", " << "totaling " << arma::accu(secondHashBinCounts) << " elements." << std::endl; } // Base case where the query set is the reference set. (So, we can't return // ourselves as the nearest neighbor.) template inline force_inline void LSHSearch::BaseCase(const size_t queryIndex, const arma::uvec& referenceIndices, const size_t k, arma::Mat& neighbors, arma::mat& distances) const { // Let's build the list of candidate neighbors for the given query point. // It will be initialized with k candidates: // (WorstDistance, referenceSet->n_cols) const Candidate def = std::make_pair(SortPolicy::WorstDistance(), referenceSet->n_cols); std::vector vect(k, def); CandidateList pqueue(CandidateCmp(), std::move(vect)); for (size_t j = 0; j < referenceIndices.n_elem; ++j) { const size_t referenceIndex = referenceIndices[j]; // If the points are the same, skip this point. if (queryIndex == referenceIndex) continue; const double distance = metric::EuclideanDistance::Evaluate( referenceSet->unsafe_col(queryIndex), referenceSet->unsafe_col(referenceIndex)); Candidate c = std::make_pair(distance, referenceIndex); // If this distance is better than the worst candidate, let's insert it. if (CandidateCmp()(c, pqueue.top())) { pqueue.pop(); pqueue.push(c); } } for (size_t j = 1; j <= k; j++) { neighbors(k - j, queryIndex) = pqueue.top().second; distances(k - j, queryIndex) = pqueue.top().first; pqueue.pop(); } } // Base case for bichromatic search. template inline force_inline void LSHSearch::BaseCase(const size_t queryIndex, const arma::uvec& referenceIndices, const size_t k, const arma::mat& querySet, arma::Mat& neighbors, arma::mat& distances) const { // Let's build the list of candidate neighbors for the given query point. // It will be initialized with k candidates: // (WorstDistance, referenceSet->n_cols) const Candidate def = std::make_pair(SortPolicy::WorstDistance(), referenceSet->n_cols); std::vector vect(k, def); CandidateList pqueue(CandidateCmp(), std::move(vect)); for (size_t j = 0; j < referenceIndices.n_elem; ++j) { const size_t referenceIndex = referenceIndices[j]; const double distance = metric::EuclideanDistance::Evaluate( querySet.unsafe_col(queryIndex), referenceSet->unsafe_col(referenceIndex)); Candidate c = std::make_pair(distance, referenceIndex); // If this distance is better than the worst candidate, let's insert it. if (CandidateCmp()(c, pqueue.top())) { pqueue.pop(); pqueue.push(c); } } for (size_t j = 1; j <= k; j++) { neighbors(k - j, queryIndex) = pqueue.top().second; distances(k - j, queryIndex) = pqueue.top().first; pqueue.pop(); } } template inline force_inline double LSHSearch::PerturbationScore( const std::vector& A, const arma::vec& scores) const { double score = 0.0; for (size_t i = 0; i < A.size(); ++i) if (A[i]) score += scores(i); // add scores of non-zero indices return score; } template inline force_inline bool LSHSearch::PerturbationShift(std::vector& A) const { size_t maxPos = 0; for (size_t i = 0; i < A.size(); ++i) if (A[i] == 1) // Marked true. maxPos = i; if (maxPos + 1 < A.size()) // Otherwise, this is an invalid vector. { A[maxPos] = 0; A[maxPos + 1] = 1; return true; // valid } return false; // invalid } template inline force_inline bool LSHSearch::PerturbationExpand(std::vector& A) const { // Find the last '1' in A. size_t maxPos = 0; for (size_t i = 0; i < A.size(); ++i) if (A[i]) // Marked true. maxPos = i; if (maxPos + 1 < A.size()) // Otherwise, this is an invalid vector. { A[maxPos + 1] = 1; return true; } return false; } template inline force_inline bool LSHSearch::PerturbationValid( const std::vector& A) const { // Use check to mark dimensions we have seen before in A. If a dimension is // seen twice (or more), A is not a valid perturbation. std::vector check(numProj); if (A.size() > 2 * numProj) return false; // This should never happen. // Check that we only see each dimension once. If not, vector is not valid. for (size_t i = 0; i < A.size(); ++i) { // Only check dimensions that were included. if (!A[i]) continue; // If dimesnion is unseen thus far, mark it as seen. if (check[i % numProj] == false) check[i % numProj] = true; else return false; // If dimension was seen before, set is not valid. } // If we didn't fail, set is valid. return true; } // Compute additional probing bins for a query template void LSHSearch::GetAdditionalProbingBins( const arma::vec& queryCode, const arma::vec& queryCodeNotFloored, const size_t T, arma::mat& additionalProbingBins) const { // No additional bins requested. Our work is done. if (T == 0) return; // Each column of additionalProbingBins is the code of a bin. additionalProbingBins.set_size(numProj, T); // Copy the query's code, then in the end we will add/subtract according // to perturbations we calculated. for (size_t c = 0; c < T; ++c) additionalProbingBins.col(c) = queryCode; // Calculate query point's projection position. arma::mat projection = queryCodeNotFloored; // Use projection to calculate query's distance from hash limits. arma::vec limLow = projection - queryCode * hashWidth; arma::vec limHigh = hashWidth - limLow; // Calculate scores. score = distance^2. arma::vec scores(2 * numProj); scores.rows(0, numProj - 1) = arma::pow(limLow, 2); scores.rows(numProj, (2 * numProj) - 1) = arma::pow(limHigh, 2); // Actions vector describes what perturbation (-1/+1) corresponds to a score. arma::Col actions(2 * numProj); // will be [-1 ... 1 ...] actions.rows(0, numProj - 1) = // First numProj rows. -1 * arma::ones< arma::Col > (numProj); // -1s actions.rows(numProj, (2 * numProj) - 1) = // Last numProj rows. arma::ones< arma::Col > (numProj); // 1s // Acting dimension vector shows which coordinate to transform according to // actions (actions are described by actions vector above). arma::Col positions(2 * numProj); // Will be [0 1 2 ... 0 1 2 ...]. positions.rows(0, numProj - 1) = arma::linspace< arma::Col >(0, numProj - 1, numProj); positions.rows(numProj, 2 * numProj - 1) = arma::linspace< arma::Col >(0, numProj - 1, numProj); // Special case: No need to create heap for 1 or 2 codes. if (T <= 2) { // First, find location of minimum score, generate 1 perturbation vector, // and add its code to additionalProbingBins column 0. // Find location and value of smallest element of scores vector. double minscore = scores[0]; size_t minloc = 0; for (size_t s = 1; s < (2 * numProj); ++s) { if (minscore > scores[s]) { minscore = scores[s]; minloc = s; } } // Add or subtract 1 to dimension corresponding to minimum score. additionalProbingBins(positions[minloc], 0) += actions[minloc]; if (T == 1) return; // Done if asked for only 1 code. // Now, find location of second smallest score and generate one more vector. // The second perturbation vector still can't comprise of more than one // change in the bin codes, because of the way perturbation vectors // are generated: First we create the one with the smallest score (Ao) and // then we either add 1 extra dimension to it (Ae) or shift it by one (As). // Since As contains the second smallest score, and Ae contains both the // smallest and the second smallest, it's obvious that score(Ae) > // score(As). Therefore the second perturbation vector is ALWAYS the vector // containing only the second-lowest scoring perturbation. double minscore2 = scores[0]; size_t minloc2 = 0; for (size_t s = 0; s < (2 * numProj); ++s) // here we can't start from 1 { if (minscore2 > scores[s] && s != minloc) //second smallest { minscore2 = scores[s]; minloc2 = s; } } // Add or subtract 1 to create second-lowest scoring vector. additionalProbingBins(positions[minloc2], 1) += actions[minloc2]; return; } // General case: more than 2 perturbation vectors require use of minheap. // Sort everything in increasing order. arma::uvec sortidx = arma::sort_index(scores); scores = scores(sortidx); actions = actions(sortidx); positions = positions(sortidx); // Theory: // A probing sequence is a sequence of T probing bins where a query's // neighbors are most likely to be. Likelihood is dependent only on a bin's // score, which is the sum of scores of all dimension-action pairs, so we // need to calculate the T smallest sums of scores that are not conflicting. // // Method: // Store each perturbation set (pair of (dimension, action)) in a // std::vector. Create a minheap of scores, with each node pointing to its // relevant perturbation set. Each perturbation set popped from the minheap // is the next most likely perturbation set. // Transform perturbation set to perturbation vector by setting the // dimensions specified by the set to queryCode+action (action is {-1, 1}). // Perturbation sets (A) mark with 1 the (score, action, dimension) positions // included in a given perturbation vector. Other spaces are 0. std::vector Ao(2 * numProj); Ao[0] = 1; // Smallest vector includes only smallest score. std::vector< std::vector > perturbationSets; perturbationSets.push_back(Ao); // Storage of perturbation sets. std::priority_queue< std::pair, // contents: pairs of (score, index) std::vector< // container: vector of pairs std::pair >, std::greater< std::pair > // comparator of pairs > minHeap; // our minheap // Start by adding the lowest scoring set to the minheap. minHeap.push( std::make_pair(PerturbationScore(Ao, scores), 0) ); // Loop invariable: after pvec iterations, additionalProbingBins contains pvec // valid codes of the lowest-scoring bins (bins most likely to contain // neighbors of the query). for (size_t pvec = 0; pvec < T; ++pvec) { std::vector Ai; do { // Get the perturbation set corresponding to the minimum score. Ai = perturbationSets[ minHeap.top().second ]; minHeap.pop(); // .top() returns, .pop() removes // Shift operation on Ai (replace max with max+1). std::vector As = Ai; if (PerturbationShift(As) && PerturbationValid(As)) // Don't add invalid sets. { perturbationSets.push_back(As); // add shifted set to sets minHeap.push( std::make_pair(PerturbationScore(As, scores), perturbationSets.size() - 1)); } // Expand operation on Ai (add max+1 to set). std::vector Ae = Ai; if (PerturbationExpand(Ae) && PerturbationValid(Ae)) // Don't add invalid sets. { perturbationSets.push_back(Ae); // add expanded set to sets minHeap.push( std::make_pair(PerturbationScore(Ae, scores), perturbationSets.size() - 1)); } } while (!PerturbationValid(Ai));//Discard invalid perturbations // Found valid perturbation set Ai. Construct perturbation vector from set. for (size_t pos = 0; pos < Ai.size(); ++pos) // If Ai[pos] is marked, add action to probing vector. additionalProbingBins(positions(pos), pvec) += Ai[pos] ? actions(pos) : 0; } } template template void LSHSearch::ReturnIndicesFromTable( const VecType& queryPoint, arma::uvec& referenceIndices, size_t numTablesToSearch, const size_t T) const { // Decide on the number of tables to look into. if (numTablesToSearch == 0) // If no user input is given, search all. numTablesToSearch = numTables; // Sanity check to make sure that the existing number of tables is not // exceeded. if (numTablesToSearch > numTables) numTablesToSearch = numTables; // Hash the query in each of the 'numTablesToSearch' hash tables using the // 'numProj' projections for each table. This gives us 'numTablesToSearch' // keys for the query where each key is a 'numProj' dimensional integer // vector. // Compute the projection of the query in each table. arma::mat allProjInTables(numProj, numTablesToSearch); arma::mat queryCodesNotFloored(numProj, numTablesToSearch); for (size_t i = 0; i < numTablesToSearch; i++) queryCodesNotFloored.unsafe_col(i) = projections.slice(i).t() * queryPoint; queryCodesNotFloored += offsets.cols(0, numTablesToSearch - 1); allProjInTables = arma::floor(queryCodesNotFloored / hashWidth); // Use hashMat to store the primary probing codes and any additional codes // from multiprobe LSH. arma::Mat hashMat; hashMat.set_size(T + 1, numTablesToSearch); // Compute the primary hash value of each key of the query into a bucket of // the secondHashTable using the secondHashWeights. hashMat.row(0) = arma::conv_to> // Floor by typecasting ::from(secondHashWeights.t() * allProjInTables); // Mod to compute 2nd-level codes. for (size_t i = 0; i < numTablesToSearch; i++) hashMat(0, i) = (hashMat(0, i) % secondHashSize); // Compute hash codes of additional probing bins. if (T > 0) { for (size_t i = 0; i < numTablesToSearch; ++i) { // Construct this table's probing sequence of length T. arma::mat additionalProbingBins; GetAdditionalProbingBins(allProjInTables.unsafe_col(i), queryCodesNotFloored.unsafe_col(i), T, additionalProbingBins); // Map each probing bin to a bin in secondHashTable (just like we did for // the primary hash table). hashMat(arma::span(1, T), i) = // Compute code of rows 1:end of column i arma::conv_to< arma::Col >:: // floor by typecasting to size_t from( secondHashWeights.t() * additionalProbingBins ); for (size_t p = 1; p < T + 1; ++p) hashMat(p, i) = (hashMat(p, i) % secondHashSize); } } // Count number of points hashed in the same bucket as the query. size_t maxNumPoints = 0; for (size_t i = 0; i < numTablesToSearch; ++i) { for (size_t p = 0; p < T + 1; ++p) { const size_t hashInd = hashMat(p, i); // find query's bucket const size_t tableRow = bucketRowInHashTable[hashInd]; if (tableRow < secondHashSize) maxNumPoints += bucketContentSize[tableRow]; // count bucket contents } } // There are two ways to proceed here: // Either allocate a maxNumPoints-size vector, place all candidates, and run // unique on the vector to discard duplicates. // Or allocate a referenceSet->n_cols size vector (i.e. number of reference // points) of zeros, and mark found indices as 1. // Option 1 runs faster for small maxNumPoints but worse for larger values, so // we choose based on a heuristic. const float cutoff = 0.1; const float selectivity = static_cast(maxNumPoints) / static_cast(referenceSet->n_cols); if (selectivity > cutoff) { // Heuristic: larger maxNumPoints means we should use find() because it // should be faster. // Reference points hashed in the same bucket as the query are set to >0. arma::Col refPointsConsidered; refPointsConsidered.zeros(referenceSet->n_cols); for (size_t i = 0; i < numTablesToSearch; ++i) // for all tables { for (size_t p = 0; p < T + 1; ++p) // For entire probing sequence. { // get the sequence code size_t hashInd = hashMat(p, i); size_t tableRow = bucketRowInHashTable[hashInd]; if (tableRow < secondHashSize && bucketContentSize[tableRow] > 0) // Pick the indices in the bucket corresponding to hashInd. for (size_t j = 0; j < bucketContentSize[tableRow]; ++j) refPointsConsidered[ secondHashTable[tableRow](j) ]++; } } // Only keep reference points found in at least one bucket. referenceIndices = arma::find(refPointsConsidered > 0); return; } else { // Heuristic: smaller maxNumPoints means we should use unique() because it // should be faster. // Allocate space for the query's potential neighbors. arma::uvec refPointsConsideredSmall; refPointsConsideredSmall.zeros(maxNumPoints); // Retrieve candidates. size_t start = 0; for (size_t i = 0; i < numTablesToSearch; ++i) // For all tables { for (size_t p = 0; p < T + 1; ++p) { const size_t hashInd = hashMat(p, i); // Find the query's bucket. const size_t tableRow = bucketRowInHashTable[hashInd]; if (tableRow < secondHashSize) // Store all secondHashTable points in the candidates set. for (size_t j = 0; j < bucketContentSize[tableRow]; ++j) refPointsConsideredSmall(start++) = secondHashTable[tableRow](j); } } // Keep only one copy of each candidate. referenceIndices = arma::unique(refPointsConsideredSmall); return; } } // Search for nearest neighbors in a given query set. template void LSHSearch::Search(const arma::mat& querySet, const size_t k, arma::Mat& resultingNeighbors, arma::mat& distances, const size_t numTablesToSearch, const size_t T) { // Ensure the dimensionality of the query set is correct. if (querySet.n_rows != referenceSet->n_rows) { std::ostringstream oss; oss << "LSHSearch::Search(): dimensionality of query set (" << querySet.n_rows << ") is not equal to the dimensionality the model " << "was trained on (" << referenceSet->n_rows << ")!" << std::endl; throw std::invalid_argument(oss.str()); } if (k > referenceSet->n_cols) { std::ostringstream oss; oss << "LSHSearch::Search(): requested " << k << " approximate nearest " << "neighbors, but reference set has " << referenceSet->n_cols << " points!" << std::endl; throw std::invalid_argument(oss.str()); } // Set the size of the neighbor and distance matrices. resultingNeighbors.set_size(k, querySet.n_cols); distances.set_size(k, querySet.n_cols); // If the user asked for 0 nearest neighbors... uh... we're done. if (k == 0) return; // If the user requested more than the available number of additional probing // bins, set Teffective to maximum T. Maximum T is 2^numProj - 1 size_t Teffective = T; if (T > ((size_t) ((1 << numProj) - 1))) { Teffective = (1 << numProj) - 1; Log::Warn << "Requested " << T << " additional bins are more than " << "theoretical maximum. Using " << Teffective << " instead." << std::endl; } // If the user set multiprobe, log it if (Teffective > 0) Log::Info << "Running multiprobe LSH with " << Teffective <<" additional probing bins per table per query." << std::endl; size_t avgIndicesReturned = 0; Timer::Start("computing_neighbors"); // Parallelization to process more than one query at a time. #ifdef _WIN32 // Tiny workaround: Visual Studio only implements OpenMP 2.0, which doesn't // support unsigned loop variables. If we're building for Visual Studio, use // the intmax_t type instead. #pragma omp parallel for \ shared(resultingNeighbors, distances) \ schedule(dynamic)\ reduction(+:avgIndicesReturned) for (intmax_t i = 0; i < (intmax_t) querySet.n_cols; ++i) #else #pragma omp parallel for \ shared(resultingNeighbors, distances) \ schedule(dynamic)\ reduction(+:avgIndicesReturned) for (size_t i = 0; i < querySet.n_cols; ++i) #endif { // Go through every query point. // Hash every query into every hash table and eventually into the // 'secondHashTable' to obtain the neighbor candidates. arma::uvec refIndices; ReturnIndicesFromTable(querySet.col(i), refIndices, numTablesToSearch, Teffective); // An informative book-keeping for the number of neighbor candidates // returned on average. // Make atomic to avoid race conditions when multiple threads are running // #pragma omp atomic avgIndicesReturned = avgIndicesReturned + refIndices.n_elem; // Sequentially go through all the candidates and save the best 'k' // candidates. BaseCase(i, refIndices, k, querySet, resultingNeighbors, distances); } Timer::Stop("computing_neighbors"); distanceEvaluations += avgIndicesReturned; avgIndicesReturned /= querySet.n_cols; Log::Info << avgIndicesReturned << " distinct indices returned on average." << std::endl; } // Search for approximate neighbors of the reference set. template void LSHSearch:: Search(const size_t k, arma::Mat& resultingNeighbors, arma::mat& distances, const size_t numTablesToSearch, size_t T) { // This is monochromatic search; the query set is the reference set. resultingNeighbors.set_size(k, referenceSet->n_cols); distances.set_size(k, referenceSet->n_cols); // If the user requested more than the available number of additional probing // bins, set Teffective to maximum T. Maximum T is 2^numProj - 1 size_t Teffective = T; if (T > ((size_t) ((1 << numProj) - 1))) { Teffective = (1 << numProj) - 1; Log::Warn << "Requested " << T << " additional bins are more than " << "theoretical maximum. Using " << Teffective << " instead." << std::endl; } // If the user set multiprobe, log it if (T > 0) Log::Info << "Running multiprobe LSH with " << Teffective << " additional probing bins per table per query."<< std::endl; size_t avgIndicesReturned = 0; Timer::Start("computing_neighbors"); // Parallelization to process more than one query at a time. #ifdef _WIN32 // Tiny workaround: Visual Studio only implements OpenMP 2.0, which doesn't // support unsigned loop variables. If we're building for Visual Studio, use // the intmax_t type instead. #pragma omp parallel for \ shared(resultingNeighbors, distances) \ schedule(dynamic)\ reduction(+:avgIndicesReturned) for (intmax_t i = 0; i < (intmax_t) referenceSet->n_cols; ++i) #else #pragma omp parallel for \ shared(resultingNeighbors, distances) \ schedule(dynamic)\ reduction(+:avgIndicesReturned) for (size_t i = 0; i < referenceSet->n_cols; ++i) #endif { // Go through every query point. // Hash every query into every hash table and eventually into the // 'secondHashTable' to obtain the neighbor candidates. arma::uvec refIndices; ReturnIndicesFromTable(referenceSet->col(i), refIndices, numTablesToSearch, Teffective); // An informative book-keeping for the number of neighbor candidates // returned on average. // Make atomic to avoid race conditions when multiple threads are running. // #pragma omp atomic avgIndicesReturned += refIndices.n_elem; // Sequentially go through all the candidates and save the best 'k' // candidates. BaseCase(i, refIndices, k, resultingNeighbors, distances); } Timer::Stop("computing_neighbors"); distanceEvaluations += avgIndicesReturned; avgIndicesReturned /= referenceSet->n_cols; Log::Info << avgIndicesReturned << " distinct indices returned on average." << std::endl; } template double LSHSearch::ComputeRecall( const arma::Mat& foundNeighbors, const arma::Mat& realNeighbors) { if (foundNeighbors.n_rows != realNeighbors.n_rows || foundNeighbors.n_cols != realNeighbors.n_cols) throw std::invalid_argument("LSHSearch::ComputeRecall(): matrices provided" " must have equal size"); const size_t queries = foundNeighbors.n_cols; const size_t neighbors = foundNeighbors.n_rows; // Should be equal to k. // The recall is the set intersection of found and real neighbors. size_t found = 0; for (size_t col = 0; col < queries; ++col) for (size_t row = 0; row < neighbors; ++row) for (size_t nei = 0; nei < realNeighbors.n_rows; ++nei) if (realNeighbors(row, col) == foundNeighbors(nei, col)) { found++; break; } return ((double) found) / realNeighbors.n_elem; } template template void LSHSearch::Serialize(Archive& ar, const unsigned int version) { using data::CreateNVP; // If we are loading, we are going to own the reference set. if (Archive::is_loading::value) { if (ownsSet) delete referenceSet; ownsSet = true; } ar & CreateNVP(referenceSet, "referenceSet"); ar & CreateNVP(numProj, "numProj"); ar & CreateNVP(numTables, "numTables"); // Delete existing projections, if necessary. if (Archive::is_loading::value) projections.reset(); // Backward compatibility: older versions of LSHSearch stored the projection // tables in a std::vector. if (version == 0) { std::vector tmpProj; ar & CreateNVP(tmpProj, "projections"); projections.set_size(tmpProj[0].n_rows, tmpProj[0].n_cols, tmpProj.size()); for (size_t i = 0; i < tmpProj.size(); ++i) projections.slice(i) = tmpProj[i]; } else { ar & CreateNVP(projections, "projections"); } ar & CreateNVP(offsets, "offsets"); ar & CreateNVP(hashWidth, "hashWidth"); ar & CreateNVP(secondHashSize, "secondHashSize"); ar & CreateNVP(secondHashWeights, "secondHashWeights"); ar & CreateNVP(bucketSize, "bucketSize"); // needs specific handling for new version // Backward compatibility: in older versions of LSHSearch, the secondHashTable // was stored as an arma::Mat. So we need to properly load that, then // prune it down to size. if (version == 0) { arma::Mat tmpSecondHashTable; ar & CreateNVP(tmpSecondHashTable, "secondHashTable"); // The old secondHashTable was stored in row-major format, so we transpose // it. tmpSecondHashTable = tmpSecondHashTable.t(); secondHashTable.resize(tmpSecondHashTable.n_cols); for (size_t i = 0; i < tmpSecondHashTable.n_cols; ++i) { // Find length of each column. We know we are at the end of the list when // the value referenceSet->n_cols is seen. size_t len = 0; for ( ; len < tmpSecondHashTable.n_rows; ++len) if (tmpSecondHashTable(len, i) == referenceSet->n_cols) break; // Set the size of the new column correctly. secondHashTable[i].set_size(len); for (size_t j = 0; j < len; ++j) secondHashTable[i](j) = tmpSecondHashTable(j, i); } } else { size_t tables; if (Archive::is_saving::value) tables = secondHashTable.size(); ar & CreateNVP(tables, "numSecondHashTables"); // Set size of second hash table if needed. if (Archive::is_loading::value) { secondHashTable.clear(); secondHashTable.resize(tables); } for (size_t i = 0; i < secondHashTable.size(); ++i) { std::ostringstream oss; oss << "secondHashTable" << i; ar & CreateNVP(secondHashTable[i], oss.str()); } } // Backward compatibility: old versions of LSHSearch held bucketContentSize // for all possible buckets (of size secondHashSize), but now we hold a // compressed representation. if (version == 0) { // The vector was stored in the old uncompressed form. So we need to shrink // it. But we can't do that until we have bucketRowInHashTable, so we also // have to load that. arma::Col tmpBucketContentSize; ar & CreateNVP(tmpBucketContentSize, "bucketContentSize"); ar & CreateNVP(bucketRowInHashTable, "bucketRowInHashTable"); // Compress into a smaller vector by just dropping all of the zeros. bucketContentSize.set_size(secondHashTable.size()); for (size_t i = 0; i < tmpBucketContentSize.n_elem; ++i) if (tmpBucketContentSize[i] > 0) bucketContentSize[bucketRowInHashTable[i]] = tmpBucketContentSize[i]; } else { ar & CreateNVP(bucketContentSize, "bucketContentSize"); ar & CreateNVP(bucketRowInHashTable, "bucketRowInHashTable"); } ar & CreateNVP(distanceEvaluations, "distanceEvaluations"); } } // namespace neighbor } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/methods/matrix_completion/000077500000000000000000000000001315013601400222175ustar00rootroot00000000000000mlpack-2.2.5/src/mlpack/methods/matrix_completion/CMakeLists.txt000066400000000000000000000007251315013601400247630ustar00rootroot00000000000000# Define the files we need to compile. # Anything not in this list will not be compiled into mlpack. set(SOURCES matrix_completion.hpp matrix_completion.cpp ) # Add directory name to sources. set(DIR_SRCS) foreach(file ${SOURCES}) set(DIR_SRCS ${DIR_SRCS} ${CMAKE_CURRENT_SOURCE_DIR}/${file}) endforeach() # Append sources (with directory name) to list of all mlpack sources (used at # the parent scope). set(MLPACK_SRCS ${MLPACK_SRCS} ${DIR_SRCS} PARENT_SCOPE) mlpack-2.2.5/src/mlpack/methods/matrix_completion/matrix_completion.cpp000066400000000000000000000074031315013601400264640ustar00rootroot00000000000000/** * @file matrix_completion_impl.hpp * @author Stephen Tu * * Implementation of MatrixCompletion class. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include "matrix_completion.hpp" namespace mlpack { namespace matrix_completion { MatrixCompletion::MatrixCompletion(const size_t m, const size_t n, const arma::umat& indices, const arma::vec& values, const size_t r) : m(m), n(n), indices(indices), values(values), sdp(indices.n_cols, 0, arma::randu(m + n, r)) { CheckValues(); InitSDP(); } MatrixCompletion::MatrixCompletion(const size_t m, const size_t n, const arma::umat& indices, const arma::vec& values, const arma::mat& initialPoint) : m(m), n(n), indices(indices), values(values), sdp(indices.n_cols, 0, initialPoint) { CheckValues(); InitSDP(); } MatrixCompletion::MatrixCompletion(const size_t m, const size_t n, const arma::umat& indices, const arma::vec& values) : m(m), n(n), indices(indices), values(values), sdp(indices.n_cols, 0, arma::randu(m + n, DefaultRank(m, n, indices.n_cols))) { CheckValues(); InitSDP(); } void MatrixCompletion::CheckValues() { if (indices.n_rows != 2) Log::Fatal << "MatrixCompletion::CheckValues(): matrix of constraint indices does " << "not have 2 rows!" << std::endl; if (indices.n_cols != values.n_elem) Log::Fatal << "MatrixCompletion::CheckValues(): the number of constraint indices " << "(columns of constraint indices matrix) does not match the number of " << "constraint values (length of constraint value vector)!" << std::endl; for (size_t i = 0; i < values.n_elem; i++) { if (indices(0, i) >= m || indices(1, i) >= n) Log::Fatal << "MatrixCompletion::CheckValues(): indices (" << indices(0, i) << ", " << indices(1, i) << ") are out of bounds for matrix of size " << m << " x " << "n!" << std::endl; } } void MatrixCompletion::InitSDP() { sdp.SDP().C().eye(m + n, m + n); sdp.SDP().SparseB() = 2. * values; const size_t p = indices.n_cols; for (size_t i = 0; i < p; i++) { sdp.SDP().SparseA()[i].zeros(m + n, m + n); sdp.SDP().SparseA()[i](indices(0, i), m + indices(1, i)) = 1.; sdp.SDP().SparseA()[i](m + indices(1, i), indices(0, i)) = 1.; } } void MatrixCompletion::Recover(arma::mat& recovered) { recovered = sdp.Function().GetInitialPoint(); sdp.Optimize(recovered); recovered = recovered * trans(recovered); recovered = recovered(arma::span(0, m - 1), arma::span(m, m + n - 1)); } size_t MatrixCompletion::DefaultRank(const size_t m, const size_t n, const size_t p) { // If r = O(sqrt(p)), then we are guaranteed an exact solution. // For more details, see // // On the rank of extreme matrices in semidefinite programs and the // multiplicity of optimal eigenvalues. // Pablo Moscato, Michael Norman, and Gabor Pataki. // Math Oper. Res., 23(2). 1998. const size_t mpn = m + n; float r = 0.5 + sqrt(0.25 + 2 * p); if (ceil(r) > mpn) r = mpn; // An upper bound on the dimension. return ceil(r); } } // namespace matrix_completion } // namespace mlpack mlpack-2.2.5/src/mlpack/methods/matrix_completion/matrix_completion.hpp000066400000000000000000000112731315013601400264710ustar00rootroot00000000000000/** * @file matrix_completion.hpp * @author Stephen Tu * * A thin wrapper around nuclear norm minimization to solve * low rank matrix completion problems. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_MATRIX_COMPLETION_MATRIX_COMPLETION_HPP #define MLPACK_METHODS_MATRIX_COMPLETION_MATRIX_COMPLETION_HPP #include #include namespace mlpack { namespace matrix_completion { /** * This class implements the popular nuclear norm minimization heuristic for * matrix completion problems. That is, given known values M_ij's, the * following optimization problem (semi-definite program) is solved to fill in * the remaining unknown values of X * * min ||X||_* subj to X_ij = M_ij * * where ||X||_* denotes the nuclear norm (sum of singular values of X). * * For a theoretical treatment of the conditions necessary for exact recovery, * see the following paper: * * A Simpler Appoarch to Matrix Completion. * Benjamin Recht. JMLR 11. * http://arxiv.org/pdf/0910.0651v2.pdf * * An example of how to use this class is shown below: * * @code * size_t m, n; // size of unknown matrix * arma::umat indices; // contains the known indices [2 x n_entries] * arma::vec values; // contains the known values [n_entries] * arma::mat recovered; // will contain the completed matrix * * MatrixCompletion mc(m, n, indices, values); * mc.Recover(recovered); * @endcode * * @see LRSDP */ class MatrixCompletion { public: /** * Construct a matrix completion problem, specifying the maximum rank of the * solution. * * @param m Number of rows of original matrix. * @param n Number of columns of original matrix. * @param indices Matrix containing the indices of the known entries (must be * [2 x p]). * @param values Vector containing the values of the known entries (must be * length p). * @param r Maximum rank of solution. */ MatrixCompletion(const size_t m, const size_t n, const arma::umat& indices, const arma::vec& values, const size_t r); /** * Construct a matrix completion problem, specifying the initial point of the * optimization. * * @param m Number of rows of original matrix. * @param n Number of columns of original matrix. * @param indices Matrix containing the indices of the known entries (must be * [2 x p]). * @param values Vector containing the values of the known entries (must be * length p). * @param initialPoint Starting point for the SDP optimization. */ MatrixCompletion(const size_t m, const size_t n, const arma::umat& indices, const arma::vec& values, const arma::mat& initialPoint); /** * Construct a matrix completion problem. * * @param m Number of rows of original matrix. * @param n Number of columns of original matrix. * @param indices Matrix containing the indices of the known entries (must be * [2 x p]). * @param values Vector containing the values of the known entries (must be * length p). */ MatrixCompletion(const size_t m, const size_t n, const arma::umat& indices, const arma::vec& values); /** * Solve the underlying SDP to fill in the remaining values. * * @param recovered Will contain the completed matrix. */ void Recover(arma::mat& recovered); //! Return the underlying SDP. const optimization::LRSDP>& Sdp() const { return sdp; } //! Modify the underlying SDP. optimization::LRSDP>& Sdp() { return sdp; } private: //! Number of rows in original matrix. size_t m; //! Number of columns in original matrix. size_t n; //! Matrix containing the indices of the known entries (has two rows). arma::umat indices; //! Vector containing the values of the known entries. arma::mat values; //! The underlying SDP to be solved. optimization::LRSDP> sdp; //! Validate the input matrices. void CheckValues(); //! Initialize the SDP. void InitSDP(); //! Select a rank of the matrix given that is of size m x n and has p known //! elements. static size_t DefaultRank(const size_t m, const size_t n, const size_t p); }; } // namespace matrix_completion } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/methods/mean_shift/000077500000000000000000000000001315013601400205775ustar00rootroot00000000000000mlpack-2.2.5/src/mlpack/methods/mean_shift/CMakeLists.txt000066400000000000000000000007541315013601400233450ustar00rootroot00000000000000# Define the files we need to compile. # Anything not in this list will not be compiled into mlpack. set(SOURCES mean_shift.hpp mean_shift_impl.hpp ) # Add directory name to sources. set(DIR_SRCS) foreach(file ${SOURCES}) set(DIR_SRCS ${DIR_SRCS} ${CMAKE_CURRENT_SOURCE_DIR}/${file}) endforeach() # Append sources (with directory name) to list of all mlpack sources (used at # the parent scope). set(MLPACK_SRCS ${MLPACK_SRCS} ${DIR_SRCS} PARENT_SCOPE) add_cli_executable(mean_shift) mlpack-2.2.5/src/mlpack/methods/mean_shift/mean_shift.hpp000066400000000000000000000134221315013601400234270ustar00rootroot00000000000000/** * @file mean_shift.hpp * @author Shangtong Zhang * * Mean Shift clustering * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_MEAN_SHIFT_MEAN_SHIFT_HPP #define MLPACK_METHODS_MEAN_SHIFT_MEAN_SHIFT_HPP #include #include #include #include #include namespace mlpack { namespace meanshift /** Mean shift clustering. */ { /** * This class implements mean shift clustering. For each point in dataset, * apply mean shift algorithm until maximum iterations or convergence. Then * remove duplicate centroids. * * A simple example of how to run mean shift clustering is shown below. * * @code * extern arma::mat data; // Dataset we want to run mean shift on. * arma::Col assignments; // Cluster assignments. * arma::mat centroids; // Cluster centroids. * * MeanShift<> meanShift(); * meanShift.Cluster(dataset, assignments, centroids); * @endcode * * @tparam UseKernel Use kernel or mean to calculate new centroid. * If false, KernelType will be ignored. * @tparam KernelType The kernel to use. * @tparam MatType The type of matrix the data is stored in. */ template class MeanShift { public: /** * Create a mean shift object and set the parameters which mean shift will be * run with. * * @param radius If distance of two centroids is less than it, one will be * removed. If this value isn't positive, an estimation will be given * when clustering. * @param maxIterations Maximum number of iterations allowed before giving up * iterations will terminate. * @param kernel Optional KernelType object. */ MeanShift(const double radius = 0, const size_t maxIterations = 1000, const KernelType kernel = KernelType()); /** * Give an estimation of radius based on given dataset. * * @param data Dataset for estimation. * @param ratio Percentage of dataset to use for nearest neighbor search. */ double EstimateRadius(const MatType& data, const double ratio = 0.2); /** * Perform mean shift clustering on the data, returning a list of cluster * assignments and centroids. * * @tparam MatType Type of matrix. * @param data Dataset to cluster. * @param assignments Vector to store cluster assignments in. * @param centroids Matrix in which centroids are stored. */ void Cluster(const MatType& data, arma::Col& assignments, arma::mat& centroids, bool useSeeds = true); //! Get the maximum number of iterations. size_t MaxIterations() const { return maxIterations; } //! Set the maximum number of iterations. size_t& MaxIterations() { return maxIterations; } //! Get the radius. double Radius() const { return radius; } //! Set the radius. void Radius(double radius); //! Get the kernel. const KernelType& Kernel() const { return kernel; } //! Modify the kernel. KernelType& Kernel() { return kernel; } private: /** * To speed up, we can generate some seeds from data set and use * them as initial centroids rather than all the points in the data set. The * basic idea here is that we will place our points into hypercube bins of * side length binSize, and any bins that contain fewer than minFreq points * will be removed as possible seeds. Usually, 1 is a sufficient parameter * for minFreq, and the bin size can be set equal to the estimated radius. * * @param data The reference data set. * @param binSize Width of hypercube bins. * @param minFreq Minimum number of points in bin. * @param seed Matrix to store generated seeds in. */ void GenSeeds(const MatType& data, const double binSize, const int minFreq, MatType& seeds); /** * Use kernel to calculate new centroid given dataset and valid neighbors. * * @param data The whole dataset * @param neighbors Valid neighbors * @param distances Distances to neighbors # @param centroid Store calculated centroid */ template typename std::enable_if::type CalculateCentroid(const MatType& data, const std::vector& neighbors, const std::vector& distances, arma::colvec& centroid); /** * Use mean to calculate new centroid given dataset and valid neighbors. * * @param data The whole dataset * @param neighbors Valid neighbors * @param distances Distances to neighbors # @param centroid Store calculated centroid */ template typename std::enable_if::type CalculateCentroid(const MatType& data, const std::vector& neighbors, const std::vector&, /*unused*/ arma::colvec& centroid); /** * If distance of two centroids is less than radius, one will be removed. * Points with distance to current centroid less than radius will be used * to calculate new centroid. */ double radius; //! Maximum number of iterations before giving up. size_t maxIterations; //! Instantiated kernel. KernelType kernel; }; } // namespace meanshift } // namespace mlpack // Include implementation. #include "mean_shift_impl.hpp" #endif // MLPACK_METHODS_MEAN_SHIFT_MEAN_SHIFT_HPP mlpack-2.2.5/src/mlpack/methods/mean_shift/mean_shift_impl.hpp000066400000000000000000000173071315013601400244560ustar00rootroot00000000000000/** * @file mean_shift_impl.hpp * @author Shangtong Zhang * * Mean shift clustering implementation. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_MEAN_SHIFT_MEAN_SHIFT_IMPL_HPP #define MLPACK_METHODS_MEAN_SHIFT_MEAN_SHIFT_IMPL_HPP #include #include #include #include #include #include "map" // In case it hasn't been included yet. #include "mean_shift.hpp" namespace mlpack { namespace meanshift { /** * Construct the Mean Shift object. */ template MeanShift:: MeanShift(const double radius, const size_t maxIterations, const KernelType kernel) : radius(radius), maxIterations(maxIterations), kernel(kernel) { // Nothing to do. } template void MeanShift::Radius(double radius) { this->radius = radius; } // Estimate radius based on given dataset. template double MeanShift:: EstimateRadius(const MatType& data, double ratio) { neighbor::KNN neighborSearch(data); /** * For each point in dataset, select nNeighbors nearest points and get * nNeighbors distances. Use the maximum distance to estimate the duplicate * threshhold. */ const size_t nNeighbors = size_t(data.n_cols * ratio); arma::Mat neighbors; arma::mat distances; neighborSearch.Search(nNeighbors, neighbors, distances); // Get max distance for each point. arma::rowvec maxDistances = max(distances); // Calculate and return the radius. return sum(maxDistances) / (double) data.n_cols; } // Class to compare two vectors. template class less { public: bool operator()(const VecType& first, const VecType& second) const { for (size_t i = 0; i < first.n_rows; ++i) { if (first[i] == second[i]) continue; return first(i) < second(i); } return false; } }; // Generate seeds from given data set. template void MeanShift::GenSeeds( const MatType& data, const double binSize, const int minFreq, MatType& seeds) { typedef arma::colvec VecType; std::map > allSeeds; for (size_t i = 0; i < data.n_cols; ++i) { VecType binnedPoint = arma::floor(data.unsafe_col(i) / binSize); if (allSeeds.find(binnedPoint) == allSeeds.end()) allSeeds[binnedPoint] = 1; else allSeeds[binnedPoint]++; } // Remove seeds with too few points. First we count the number of seeds we // end up with, then we add them. std::map >::iterator it; size_t count = 0; for (it = allSeeds.begin(); it != allSeeds.end(); ++it) if (it->second >= minFreq) ++count; seeds.set_size(data.n_rows, count); count = 0; for (it = allSeeds.begin(); it != allSeeds.end(); ++it) { if (it->second >= minFreq) { seeds.col(count) = it->first; ++count; } } seeds *= binSize; } // Calculate new centroid with given kernel. template template typename std::enable_if::type MeanShift:: CalculateCentroid(const MatType& data, const std::vector& neighbors, const std::vector& distances, arma::colvec& centroid) { double sumWeight = 0; for (size_t i = 0; i < neighbors.size(); ++i) { if (distances[i] > 0) { double dist = distances[i] / radius; double weight = kernel.Gradient(dist) / dist; sumWeight += weight; centroid += weight * data.unsafe_col(neighbors[i]); } } if (sumWeight != 0) { centroid /= sumWeight; return true; } return false; } // Calculate new centroid by mean. template template typename std::enable_if::type MeanShift:: CalculateCentroid(const MatType& data, const std::vector& neighbors, const std::vector&, /*unused*/ arma::colvec& centroid) { for (size_t i = 0; i < neighbors.size(); ++i) centroid += data.unsafe_col(neighbors[i]); centroid /= neighbors.size(); return true; } /** * Perform Mean Shift clustering on the data set, returning a list of cluster * assignments and centroids. */ template inline void MeanShift::Cluster( const MatType& data, arma::Col& assignments, arma::mat& centroids, bool useSeeds) { if (radius <= 0) { // An invalid radius is given; an estimation is needed. Radius(EstimateRadius(data)); } MatType seeds; const MatType* pSeeds = &data; if (useSeeds) { GenSeeds(data, radius, 1, seeds); pSeeds = &seeds; } // Holds all centroids before removing duplicate ones. arma::mat allCentroids(pSeeds->n_rows, pSeeds->n_cols); assignments.set_size(data.n_cols); range::RangeSearch<> rangeSearcher(data); math::Range validRadius(0, radius); std::vector > neighbors; std::vector > distances; // For each seed, perform mean shift algorithm. for (size_t i = 0; i < pSeeds->n_cols; ++i) { // Initial centroid is the seed itself. allCentroids.col(i) = pSeeds->unsafe_col(i); for (size_t completedIterations = 0; completedIterations < maxIterations; completedIterations++) { // Store new centroid in this. arma::colvec newCentroid = arma::zeros(pSeeds->n_rows); rangeSearcher.Search(allCentroids.unsafe_col(i), validRadius, neighbors, distances); if (neighbors[0].size() <= 1) break; // Calculate new centroid. if (!CalculateCentroid(data, neighbors[0], distances[0], newCentroid)) newCentroid = allCentroids.unsafe_col(i); // If the mean shift vector is small enough, it has converged. if (metric::EuclideanDistance::Evaluate(newCentroid, allCentroids.unsafe_col(i)) < 1e-3 * radius) { // Determine if the new centroid is duplicate with old ones. bool isDuplicated = false; for (size_t k = 0; k < centroids.n_cols; ++k) { const double distance = metric::EuclideanDistance::Evaluate( allCentroids.unsafe_col(i), centroids.unsafe_col(k)); if (distance < radius) { isDuplicated = true; break; } } if (!isDuplicated) centroids.insert_cols(centroids.n_cols, allCentroids.unsafe_col(i)); // Get out of the loop. break; } // Update the centroid. allCentroids.col(i) = newCentroid; } } // Assign centroids to each point. neighbor::KNN neighborSearcher(centroids); arma::mat neighborDistances; arma::Mat resultingNeighbors; neighborSearcher.Search(data, 1, resultingNeighbors, neighborDistances); assignments = resultingNeighbors.t(); } } // namespace meanshift } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/methods/mean_shift/mean_shift_main.cpp000066400000000000000000000127121315013601400244270ustar00rootroot00000000000000/** * @file mean_shift_main.cpp * @author Shangtong Zhang * * Executable for running Mean Shift * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include #include #include #include #include "mean_shift.hpp" using namespace mlpack; using namespace mlpack::meanshift; using namespace mlpack::kernel; using namespace std; // Define parameters for the executable. PROGRAM_INFO("Mean Shift Clustering", "This program performs mean shift " "clustering on the given dataset, storing the learned cluster assignments " "either as a column of labels in the file containing the input dataset or " "in a separate file."); // Required options. PARAM_STRING_IN("input_file", "Input dataset to perform clustering on.", "i", ""); // This is kept for reverse compatibility and may be removed in mlpack 3.0.0. // At that time, --input_file should be made a required parameter. PARAM_STRING_IN("inputFile", "Input dataset to perform clustering on.", "", ""); // Output options. PARAM_FLAG("in_place", "If specified, a column containing the learned cluster " "assignments will be added to the input dataset file. In this case, " "--output_file is overridden.", "P"); PARAM_FLAG("labels_only", "If specified, only the output labels will be " "written to the file specified by --output_file.", "l"); PARAM_STRING_OUT("output_file", "File to write output labels or labeled data " "to.", "o"); PARAM_STRING_OUT("centroid_file", "If specified, the centroids of each cluster " "will be written to the given file.", "C"); // Mean shift configuration options. PARAM_INT_IN("max_iterations", "Maximum number of iterations before mean shift " "terminates.", "m", 1000); PARAM_DOUBLE_IN("radius", "If the distance between two centroids is less than " "the given radius, one will be removed. A radius of 0 or less means an " "estimate will be calculated and used for the radius.", "r", 0); int main(int argc, char** argv) { CLI::ParseCommandLine(argc, argv); // This is for reverse compatibility and may be removed in mlpack 3.0.0. if (CLI::HasParam("inputFile") && CLI::HasParam("input_file")) Log::Fatal << "Cannot specify both --input_file and --inputFile!" << endl; if (CLI::HasParam("inputFile")) { Log::Warn << "--inputFile is deprecated and will be removed in mlpack " << "3.0.0; use --input_file instead." << endl; CLI::GetParam("input_file") = CLI::GetParam("inputFile"); } if (CLI::GetParam("input_file") == "") Log::Fatal << "--input_file must be specified!" << endl; const string inputFile = CLI::GetParam("input_file"); const double radius = CLI::GetParam("radius"); const int maxIterations = CLI::GetParam("max_iterations"); if (maxIterations < 0) { Log::Fatal << "Invalid value for maximum iterations (" << maxIterations << ")! Must be greater than or equal to 0." << endl; } // Make sure we have an output file if we're not doing the work in-place. if (!CLI::HasParam("in_place") && !CLI::HasParam("output_file") && !CLI::HasParam("centroid_file")) { Log::Warn << "--output_file, --in_place, and --centroid_file are not set; " << "no results will be saved." << endl; } if (CLI::HasParam("labels_only") && !CLI::HasParam("output_file")) Log::Warn << "--labels_only ignored because --output_file is not specified." << endl; arma::mat dataset; data::Load(inputFile, dataset, true); // Fatal upon failure. arma::mat centroids; arma::Col assignments; MeanShift<> meanShift(radius, maxIterations); Timer::Start("clustering"); Log::Info << "Performing mean shift clustering..." << endl; meanShift.Cluster(dataset, assignments, centroids); Timer::Stop("clustering"); Log::Info << "Found " << centroids.n_cols << " centroids." << endl; if (radius <= 0.0) Log::Info << "Estimated radius was " << meanShift.Radius() << ".\n"; if (CLI::HasParam("in_place")) { // Add the column of assignments to the dataset; but we have to convert them // to type double first. arma::vec converted(assignments.n_elem); for (size_t i = 0; i < assignments.n_elem; i++) converted(i) = (double) assignments(i); dataset.insert_rows(dataset.n_rows, trans(converted)); // Save the dataset. data::Save(inputFile, dataset); } else { if (!CLI::HasParam("labels_only")) { // Convert the assignments to doubles. arma::vec converted(assignments.n_elem); for (size_t i = 0; i < assignments.n_elem; i++) converted(i) = (double) assignments(i); dataset.insert_rows(dataset.n_rows, trans(converted)); // Now save, in the different file. string outputFile = CLI::GetParam("output_file"); if (outputFile != "") data::Save(outputFile, dataset); } else { string outputFile = CLI::GetParam("output_file"); if (outputFile != "") data::Save(outputFile, assignments, false, false); // No transpose. } } // Should we write the centroids to a file? if (CLI::HasParam("centroid_file")) data::Save(CLI::GetParam("centroid_file"), centroids); } mlpack-2.2.5/src/mlpack/methods/naive_bayes/000077500000000000000000000000001315013601400207475ustar00rootroot00000000000000mlpack-2.2.5/src/mlpack/methods/naive_bayes/CMakeLists.txt000066400000000000000000000007751315013601400235200ustar00rootroot00000000000000# Define the files we need to compile. # Anything not in this list will not be compiled into mlpack. set(SOURCES naive_bayes_classifier.hpp naive_bayes_classifier_impl.hpp ) # Add directory name to sources. set(DIR_SRCS) foreach(file ${SOURCES}) set(DIR_SRCS ${DIR_SRCS} ${CMAKE_CURRENT_SOURCE_DIR}/${file}) endforeach() # Append sources (with directory name) to list of all mlpack sources (used at # the parent scope). set(MLPACK_SRCS ${MLPACK_SRCS} ${DIR_SRCS} PARENT_SCOPE) add_cli_executable(nbc) mlpack-2.2.5/src/mlpack/methods/naive_bayes/naive_bayes_classifier.hpp000066400000000000000000000136131315013601400261550ustar00rootroot00000000000000/** * @file naive_bayes_classifier.hpp * @author Parikshit Ram (pram@cc.gatech.edu) * * A Naive Bayes Classifier which parametrically estimates the distribution of * the features. It is assumed that the features have been sampled from a * Gaussian PDF. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_NAIVE_BAYES_NAIVE_BAYES_CLASSIFIER_HPP #define MLPACK_METHODS_NAIVE_BAYES_NAIVE_BAYES_CLASSIFIER_HPP #include namespace mlpack { namespace naive_bayes /** The Naive Bayes Classifier. */ { /** * The simple Naive Bayes classifier. This class trains on the data by * calculating the sample mean and variance of the features with respect to each * of the labels, and also the class probabilities. The class labels are * assumed to be positive integers (starting with 0), and are expected to be the * last row of the data input to the constructor. * * Mathematically, it computes P(X_i = x_i | Y = y_j) for each feature X_i for * each of the labels y_j. Alongwith this, it also computes the class * probabilities P(Y = y_j). * * For classifying a data point (x_1, x_2, ..., x_n), it computes the following: * arg max_y(P(Y = y)*P(X_1 = x_1 | Y = y) * ... * P(X_n = x_n | Y = y)) * * Example use: * * @code * extern arma::mat training_data, testing_data; * NaiveBayesClassifier<> nbc(training_data, 5); * arma::vec results; * * nbc.Classify(testing_data, results); * @endcode */ template class NaiveBayesClassifier { public: /** * Initializes the classifier as per the input and then trains it by * calculating the sample mean and variances. * * Example use: * @code * extern arma::mat training_data, testing_data; * extern arma::Row labels; * NaiveBayesClassifier nbc(training_data, labels, 5); * @endcode * * @param data Training data points. * @param labels Labels corresponding to training data points. * @param classes Number of classes in this classifier. * @param incrementalVariance If true, an incremental algorithm is used to * calculate the variance; this can prevent loss of precision in some * cases, but will be somewhat slower to calculate. */ NaiveBayesClassifier(const MatType& data, const arma::Row& labels, const size_t classes, const bool incrementalVariance = false); /** * Initialize the Naive Bayes classifier without performing training. All of * the parameters of the model will be initialized to zero. Be sure to use * Train() before calling Classify(), otherwise the results may be * meaningless. */ NaiveBayesClassifier(const size_t dimensionality = 0, const size_t classes = 0); /** * Train the Naive Bayes classifier on the given dataset. If the incremental * algorithm is used, the current model is used as a starting point (this is * the default). If the incremental algorithm is not used, then the current * model is ignored and the new model will be trained only on the given data. * Note that even if the incremental algorithm is not used, the data must have * the same dimensionality and number of classes that the model was * initialized with. If you want to change the dimensionality or number of * classes, either re-initialize or call Means(), Variances(), and * Probabilities() individually to set them to the right size. * * @param data The dataset to train on. * @param incremental Whether or not to use the incremental algorithm for * training. */ void Train(const MatType& data, const arma::Row& labels, const bool incremental = true); /** * Train the Naive Bayes classifier on the given point. This will use the * incremental algorithm for updating the model parameters. The data must be * the same dimensionality as the existing model parameters. * * @param point Data point to train on. * @param label Label of data point. */ template void Train(const VecType& point, const size_t label); /** * Given a bunch of data points, this function evaluates the class of each of * those data points, and puts it in the vector 'results'. * * @code * arma::mat test_data; // each column is a test point * arma::Row results; * ... * nbc.Classify(test_data, &results); * @endcode * * @param data List of data points. * @param results Vector that class predictions will be placed into. */ void Classify(const MatType& data, arma::Row& results); //! Get the sample means for each class. const MatType& Means() const { return means; } //! Modify the sample means for each class. MatType& Means() { return means; } //! Get the sample variances for each class. const MatType& Variances() const { return variances; } //! Modify the sample variances for each class. MatType& Variances() { return variances; } //! Get the prior probabilities for each class. const arma::vec& Probabilities() const { return probabilities; } //! Modify the prior probabilities for each class. arma::vec& Probabilities() { return probabilities; } //! Serialize the classifier. template void Serialize(Archive& ar, const unsigned int /* version */); private: //! Sample mean for each class. MatType means; //! Sample variances for each class. MatType variances; //! Class probabilities. arma::vec probabilities; //! Number of training points seen so far. size_t trainingPoints; }; } // namespace naive_bayes } // namespace mlpack // Include implementation. #include "naive_bayes_classifier_impl.hpp" #endif mlpack-2.2.5/src/mlpack/methods/naive_bayes/naive_bayes_classifier_impl.hpp000066400000000000000000000164431315013601400272020ustar00rootroot00000000000000/** * @file naive_bayes_classifier_impl.hpp * @author Parikshit Ram (pram@cc.gatech.edu) * @author Vahab Akbarzadeh (v.akbarzadeh@gmail.com) * * A Naive Bayes Classifier which parametrically estimates the distribution of * the features. This classifier makes its predictions based on the assumption * that the features have been sampled from a set of Gaussians with diagonal * covariance. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_NAIVE_BAYES_NAIVE_BAYES_CLASSIFIER_IMPL_HPP #define MLPACK_METHODS_NAIVE_BAYES_NAIVE_BAYES_CLASSIFIER_IMPL_HPP #include // In case it hasn't been included already. #include "naive_bayes_classifier.hpp" namespace mlpack { namespace naive_bayes { template NaiveBayesClassifier::NaiveBayesClassifier( const MatType& data, const arma::Row& labels, const size_t classes, const bool incremental) : trainingPoints(0) // Set when we call Train(). { const size_t dimensionality = data.n_rows; // Perform training, after initializing the model to 0 (that is, if Train() // won't do that for us, which it won't if we're using the incremental // algorithm). if (incremental) { probabilities.zeros(classes); means.zeros(dimensionality, classes); variances.zeros(dimensionality, classes); } else { probabilities.set_size(classes); means.set_size(dimensionality, classes); variances.set_size(dimensionality, classes); } Train(data, labels, incremental); } template NaiveBayesClassifier::NaiveBayesClassifier(const size_t dimensionality, const size_t classes) : trainingPoints(0) { // Initialize model to 0. probabilities.zeros(classes); means.zeros(dimensionality, classes); variances.zeros(dimensionality, classes); } template void NaiveBayesClassifier::Train(const MatType& data, const arma::Row& labels, const bool incremental) { // Calculate the class probabilities as well as the sample mean and variance // for each of the features with respect to each of the labels. if (incremental) { // Use incremental algorithm. // Fist, de-normalize probabilities. probabilities *= trainingPoints; for (size_t j = 0; j < data.n_cols; ++j) { const size_t label = labels[j]; ++probabilities[label]; arma::vec delta = data.col(j) - means.col(label); means.col(label) += delta / probabilities[label]; variances.col(label) += delta % (data.col(j) - means.col(label)); } for (size_t i = 0; i < probabilities.n_elem; ++i) { if (probabilities[i] > 2) variances.col(i) /= (probabilities[i] - 1); } } else { // Set all parameters to zero probabilities.zeros(); means.zeros(); variances.zeros(); // Don't use incremental algorithm. This is a two-pass algorithm. It is // possible to calculate the means and variances using a faster one-pass // algorithm but there are some precision and stability issues. If this is // too slow, it's an option to use the faster algorithm by default and then // have this (and the incremental algorithm) be other options. // Calculate the means. for (size_t j = 0; j < data.n_cols; ++j) { const size_t label = labels[j]; ++probabilities[label]; means.col(label) += data.col(j); } // Normalize means. for (size_t i = 0; i < probabilities.n_elem; ++i) if (probabilities[i] != 0.0) means.col(i) /= probabilities[i]; // Calculate variances. for (size_t j = 0; j < data.n_cols; ++j) { const size_t label = labels[j]; variances.col(label) += square(data.col(j) - means.col(label)); } // Normalize variances. for (size_t i = 0; i < probabilities.n_elem; ++i) if (probabilities[i] > 1) variances.col(i) /= (probabilities[i] - 1); } // Ensure that the variances are invertible. for (size_t i = 0; i < variances.n_elem; ++i) if (variances[i] == 0.0) variances[i] = 1e-50; probabilities /= data.n_cols; trainingPoints += data.n_cols; } template template void NaiveBayesClassifier::Train(const VecType& point, const size_t label) { // We must use the incremental algorithm here. probabilities *= trainingPoints; probabilities[label]++; arma::vec delta = point - means.col(label); means.col(label) += delta / probabilities[label]; if (probabilities[label] > 2) variances.col(label) *= (probabilities[label] - 2); variances.col(label) += (delta % (point - means.col(label))); if (probabilities[label] > 1) variances.col(label) /= probabilities[label] - 1; trainingPoints++; probabilities /= trainingPoints; } template void NaiveBayesClassifier::Classify(const MatType& data, arma::Row& results) { // Check that the number of features in the test data is same as in the // training data. Log::Assert(data.n_rows == means.n_rows); arma::vec probs = arma::log(probabilities); arma::mat invVar = 1.0 / variances; arma::mat testProbs = arma::repmat(probs.t(), data.n_cols, 1); results.set_size(data.n_cols); // No need to fill with anything yet. Log::Info << "Running Naive Bayes classifier on " << data.n_cols << " data points with " << data.n_rows << " features each." << std::endl; // Calculate the joint probability for each of the data points for each of the // means.n_cols. // Loop over every class. for (size_t i = 0; i < means.n_cols; i++) { // This is an adaptation of gmm::phi() for the case where the covariance is // a diagonal matrix. arma::mat diffs = data - arma::repmat(means.col(i), 1, data.n_cols); arma::mat rhs = -0.5 * arma::diagmat(invVar.col(i)) * diffs; arma::vec exponents(diffs.n_cols); for (size_t j = 0; j < diffs.n_cols; ++j) // log(exp(value)) == value exponents(j) = arma::accu(diffs.col(j) % rhs.unsafe_col(j)); // Calculate probability as sum of logarithm to decrease floating point // errors. testProbs.col(i) += (data.n_rows / -2.0 * log(2 * M_PI) - 0.5 * log(arma::det(arma::diagmat(variances.col(i)))) + exponents); } // Now calculate the label. for (size_t i = 0; i < data.n_cols; ++i) { // Find the index of the class with maximum probability for this point. arma::uword maxIndex = 0; arma::vec pointProbs = testProbs.row(i).t(); pointProbs.max(maxIndex); results[i] = maxIndex; } return; } template template void NaiveBayesClassifier::Serialize(Archive& ar, const unsigned int /* version */) { ar & data::CreateNVP(means, "means"); ar & data::CreateNVP(variances, "variances"); ar & data::CreateNVP(probabilities, "probabilities"); } } // namespace naive_bayes } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/methods/naive_bayes/nbc_main.cpp000066400000000000000000000162611315013601400232270ustar00rootroot00000000000000/** * @author Parikshit Ram (pram@cc.gatech.edu) * @file nbc_main.cpp * * This program runs the Simple Naive Bayes Classifier. * * This classifier does parametric naive bayes classification assuming that the * features are sampled from a Gaussian distribution. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include #include #include #include #include #include "naive_bayes_classifier.hpp" PROGRAM_INFO("Parametric Naive Bayes Classifier", "This program trains the Naive Bayes classifier on the given labeled " "training set, or loads a model from the given model file, and then may use" " that trained model to classify the points in a given test set." "\n\n" "Labels are expected to be the last row of the training set (--training_file)," " but labels can also be passed in separately as their own file " "(--labels_file). If training is not desired, a pre-existing model can be " "loaded with the --input_model_file (-m) option." "\n\n" "The '--incremental_variance' option can be used to force the training to " "use an incremental algorithm for calculating variance. This is slower, " "but can help avoid loss of precision in some cases." "\n\n" "If classifying a test set is desired, the test set should be in the file " "specified with the --test_file (-T) option, and the classifications will " "be saved to the file specified with the --output_file (-o) option. If " "saving a trained model is desired, the --output_model_file (-M) option " "should be given."); // Model loading/saving. PARAM_STRING_IN("input_model_file", "File containing input Naive Bayes model.", "m", ""); PARAM_STRING_OUT("output_model_file", "File to save trained Naive Bayes model " "to.", "M"); // Training parameters. PARAM_STRING_IN("training_file", "A file containing the training set.", "t", ""); PARAM_STRING_IN("labels_file", "A file containing labels for the training set.", "l", ""); PARAM_FLAG("incremental_variance", "The variance of each class will be " "calculated incrementally.", "I"); // Test parameters. PARAM_STRING_IN("test_file", "A file containing the test set.", "T", ""); PARAM_STRING_OUT("output_file", "The file in which the predicted labels for the" " test set will be written.", "o"); using namespace mlpack; using namespace mlpack::naive_bayes; using namespace std; using namespace arma; // A struct for saving the model with mappings. struct NBCModel { //! The model itself. NaiveBayesClassifier<> nbc; //! The mappings for labels. Col mappings; //! Serialize the model. template void Serialize(Archive& ar, const unsigned int /* version */) { ar & data::CreateNVP(nbc, "nbc"); ar & data::CreateNVP(mappings, "mappings"); } }; int main(int argc, char* argv[]) { CLI::ParseCommandLine(argc, argv); // Check input parameters. if (CLI::HasParam("training_file") && CLI::HasParam("input_model_file")) Log::Fatal << "Cannot specify both --training_file (-t) and " << "--input_model_file (-m)!" << endl; if (!CLI::HasParam("training_file") && !CLI::HasParam("input_model_file")) Log::Fatal << "Neither --training_file (-t) nor --input_model_file (-m) are" << " specified!" << endl; if (!CLI::HasParam("training_file") && CLI::HasParam("labels_file")) Log::Warn << "--labels_file (-l) ignored because --training_file (-t) is " << "not specified." << endl; if (!CLI::HasParam("training_file") && CLI::HasParam("incremental_variance")) Log::Warn << "--incremental_variance (-I) ignored because --training_file " << "(-t) is not specified." << endl; if (!CLI::HasParam("output_file") && !CLI::HasParam("output_model_file")) Log::Warn << "Neither --output_file (-o) nor --output_model_file (-M) " << "specified; no output will be saved!" << endl; if (CLI::HasParam("output_file") && !CLI::HasParam("test_file")) Log::Warn << "--output_file (-o) ignored because no test file specified " << "with --test_file (-T)." << endl; if (!CLI::HasParam("output_file") && CLI::HasParam("test_file")) Log::Warn << "--test_file (-T) specified, but classification results will " << "not be saved because --output_file (-o) is not specified." << endl; // Either we have to train a model, or load a model. NBCModel model; if (CLI::HasParam("training_file")) { const string trainingFile = CLI::GetParam("training_file"); mat trainingData; data::Load(trainingFile, trainingData, true); Row labels; // Did the user pass in labels? const string labelsFilename = CLI::GetParam("labels_file"); if (labelsFilename != "") { // Load labels. mat rawLabels; data::Load(labelsFilename, rawLabels, true, false); // Do the labels need to be transposed? if (rawLabels.n_cols == 1) rawLabels = rawLabels.t(); data::NormalizeLabels(rawLabels.row(0), labels, model.mappings); } else { // Use the last row of the training data as the labels. Log::Info << "Using last dimension of training data as training labels." << endl; data::NormalizeLabels(trainingData.row(trainingData.n_rows - 1), labels, model.mappings); // Remove the label row. trainingData.shed_row(trainingData.n_rows - 1); } const bool incrementalVariance = CLI::HasParam("incremental_variance"); Timer::Start("nbc_training"); model.nbc = NaiveBayesClassifier<>(trainingData, labels, model.mappings.n_elem, incrementalVariance); Timer::Stop("nbc_training"); } else { // Load the model from file. data::Load(CLI::GetParam("input_model_file"), "nbc_model", model); } // Do we need to do testing? if (CLI::HasParam("test_file")) { const string testingDataFilename = CLI::GetParam("test_file"); mat testingData; data::Load(testingDataFilename, testingData, true); if (testingData.n_rows != model.nbc.Means().n_rows) Log::Fatal << "Test data dimensionality (" << testingData.n_rows << ") " << "must be the same as training data (" << model.nbc.Means().n_rows << ")!" << std::endl; // Time the running of the Naive Bayes Classifier. Row results; Timer::Start("nbc_testing"); model.nbc.Classify(testingData, results); Timer::Stop("nbc_testing"); if (CLI::HasParam("output_file")) { // Un-normalize labels to prepare output. Row rawResults; data::RevertLabels(results, model.mappings, rawResults); // Output results. const string outputFilename = CLI::GetParam("output_file"); data::Save(outputFilename, rawResults, true); } } if (CLI::HasParam("output_model_file")) data::Save(CLI::GetParam("output_model_file"), "nbc_model", model, false); } mlpack-2.2.5/src/mlpack/methods/nca/000077500000000000000000000000001315013601400172235ustar00rootroot00000000000000mlpack-2.2.5/src/mlpack/methods/nca/CMakeLists.txt000066400000000000000000000010371315013601400217640ustar00rootroot00000000000000# Define the files we need to compile. # Anything not in this list will not be compiled into mlpack. set(SOURCES nca.hpp nca_impl.hpp nca_softmax_error_function.hpp nca_softmax_error_function_impl.hpp ) # Add directory name to sources. set(DIR_SRCS) foreach(file ${SOURCES}) set(DIR_SRCS ${DIR_SRCS} ${CMAKE_CURRENT_SOURCE_DIR}/${file}) endforeach() # Append sources (with directory name) to list of all mlpack sources (used at # the parent scope). set(MLPACK_SRCS ${MLPACK_SRCS} ${DIR_SRCS} PARENT_SCOPE) add_cli_executable (nca) mlpack-2.2.5/src/mlpack/methods/nca/nca.hpp000066400000000000000000000074151315013601400205040ustar00rootroot00000000000000/** * @file nca.hpp * @author Ryan Curtin * * Declaration of NCA class (Neighborhood Components Analysis). * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_NCA_NCA_HPP #define MLPACK_METHODS_NCA_NCA_HPP #include #include #include #include "nca_softmax_error_function.hpp" namespace mlpack { namespace nca /** Neighborhood Components Analysis. */ { /** * An implementation of Neighborhood Components Analysis, both a linear * dimensionality reduction technique and a distance learning technique. The * method seeks to improve k-nearest-neighbor classification on a dataset by * scaling the dimensions. The method is nonparametric, and does not require a * value of k. It works by using stochastic ("soft") neighbor assignments and * using optimization techniques over the gradient of the accuracy of the * neighbor assignments. * * For more details, see the following published paper: * * @code * @inproceedings{Goldberger2004, * author = {Goldberger, Jacob and Roweis, Sam and Hinton, Geoff and * Salakhutdinov, Ruslan}, * booktitle = {Advances in Neural Information Processing Systems 17}, * pages = {513--520}, * publisher = {MIT Press}, * title = {{Neighbourhood Components Analysis}}, * year = {2004} * } * @endcode */ template class OptimizerType = optimization::SGD> class NCA { public: /** * Construct the Neighborhood Components Analysis object. This simply stores * the reference to the dataset and labels as well as the parameters for * optimization before the actual optimization is performed. * * @param dataset Input dataset. * @param labels Input dataset labels. * @param stepSize Step size for stochastic gradient descent. * @param maxIterations Maximum iterations for stochastic gradient descent. * @param tolerance Tolerance for termination of stochastic gradient descent. * @param shuffle Whether or not to shuffle the dataset during SGD. * @param metric Instantiated metric to use. */ NCA(const arma::mat& dataset, const arma::Row& labels, MetricType metric = MetricType()); /** * Perform Neighborhood Components Analysis. The output distance learning * matrix is written into the passed reference. If LearnDistance() is called * with an outputMatrix which has the correct size (dataset.n_rows x * dataset.n_rows), that matrix will be used as the starting point for * optimization. * * @param output_matrix Covariance matrix of Mahalanobis distance. */ void LearnDistance(arma::mat& outputMatrix); //! Get the dataset reference. const arma::mat& Dataset() const { return dataset; } //! Get the labels reference. const arma::Row& Labels() const { return labels; } //! Get the optimizer. const OptimizerType >& Optimizer() const { return optimizer; } OptimizerType >& Optimizer() { return optimizer; } private: //! Dataset reference. const arma::mat& dataset; //! Labels reference. const arma::Row& labels; //! Metric to be used. MetricType metric; //! The function to optimize. SoftmaxErrorFunction errorFunction; //! The optimizer to use. OptimizerType > optimizer; }; } // namespace nca } // namespace mlpack // Include the implementation. #include "nca_impl.hpp" #endif mlpack-2.2.5/src/mlpack/methods/nca/nca_impl.hpp000066400000000000000000000030721315013601400215200ustar00rootroot00000000000000/** * @file nca_impl.hpp * @author Ryan Curtin * * Implementation of templated NCA class. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_NCA_NCA_IMPL_HPP #define MLPACK_METHODS_NCA_NCA_IMPL_HPP // In case it was not already included. #include "nca.hpp" namespace mlpack { namespace nca { // Just set the internal matrix reference. template class OptimizerType> NCA::NCA(const arma::mat& dataset, const arma::Row& labels, MetricType metric) : dataset(dataset), labels(labels), metric(metric), errorFunction(dataset, labels, metric), optimizer(OptimizerType >(errorFunction)) { /* Nothing to do. */ } template class OptimizerType> void NCA::LearnDistance(arma::mat& outputMatrix) { // See if we were passed an initialized matrix. if ((outputMatrix.n_rows != dataset.n_rows) || (outputMatrix.n_cols != dataset.n_rows)) outputMatrix.eye(dataset.n_rows, dataset.n_rows); Timer::Start("nca_sgd_optimization"); optimizer.Optimize(outputMatrix); Timer::Stop("nca_sgd_optimization"); } } // namespace nca } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/methods/nca/nca_main.cpp000066400000000000000000000277571315013601400215160ustar00rootroot00000000000000/** * @file nca_main.cpp * @author Ryan Curtin * * Executable for Neighborhood Components Analysis. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include #include #include #include #include #include #include #include #include "nca.hpp" #include #include // Define parameters. PROGRAM_INFO("Neighborhood Components Analysis (NCA)", "This program implements Neighborhood Components Analysis, both a linear " "dimensionality reduction technique and a distance learning technique. The" " method seeks to improve k-nearest-neighbor classification on a dataset " "by scaling the dimensions. The method is nonparametric, and does not " "require a value of k. It works by using stochastic (\"soft\") neighbor " "assignments and using optimization techniques over the gradient of the " "accuracy of the neighbor assignments." "\n\n" "To work, this algorithm needs labeled data. It can be given as the last " "row of the input dataset (--input_file), or alternatively in a separate " "file (--labels_file)." "\n\n" "This implementation of NCA uses stochastic gradient descent, mini-batch " "stochastic gradient descent, or the L_BFGS optimizer. These optimizers do" " not guarantee global convergence for a nonconvex objective function " "(NCA's objective function is nonconvex), so the final results could depend" " on the random seed or other optimizer parameters." "\n\n" "Stochastic gradient descent, specified by --optimizer \"sgd\", depends " "primarily on two parameters: the step size (--step_size) and the maximum " "number of iterations (--max_iterations). In addition, a normalized " "starting point can be used (--normalize), which is necessary if many " "warnings of the form 'Denominator of p_i is 0!' are given. Tuning the " "step size can be a tedious affair. In general, the step size is too large" " if the objective is not mostly uniformly decreasing, or if zero-valued " "denominator warnings are being issued. The step size is too small if the " "objective is changing very slowly. Setting the termination condition can " "be done easily once a good step size parameter is found; either increase " "the maximum iterations to a large number and allow SGD to find a minimum, " "or set the maximum iterations to 0 (allowing infinite iterations) and set " "the tolerance (--tolerance) to define the maximum allowed difference " "between objectives for SGD to terminate. Be careful---setting the " "tolerance instead of the maximum iterations can take a very long time and " "may actually never converge due to the properties of the SGD optimizer. " "Note that a single iteration of SGD refers to a single point, so to take " "a single pass over the dataset, set --max_iterations equal to the number " "of points in the dataset." "\n\n" "The mini-batch SGD optimizer, specified by --optimizer \"minibatch-sgd\", " "has the same parameters as SGD, but the batch size may also be specified " "with the --batch_size (-b) option. Each iteration of mini-batch SGD " "refers to a single mini-batch." "\n\n" "The L-BFGS optimizer, specified by --optimizer \"lbfgs\", uses a " "back-tracking line search algorithm to minimize a function. The " "following parameters are used by L-BFGS: --num_basis (specifies the number" " of memory points used by L-BFGS), --max_iterations, --armijo_constant, " "--wolfe, --tolerance (the optimization is terminated when the gradient " "norm is below this value), --max_line_search_trials, --min_step and " "--max_step (which both refer to the line search routine). For more " "details on the L-BFGS optimizer, consult either the mlpack L-BFGS " "documentation (in lbfgs.hpp) or the vast set of published literature on " "L-BFGS." "\n\n" "By default, the SGD optimizer is used."); PARAM_STRING_IN_REQ("input_file", "Input dataset to run NCA on.", "i"); PARAM_STRING_OUT("output_file", "Output file for learned distance matrix.", "o"); PARAM_STRING_IN("labels_file", "File of labels for input dataset.", "l", ""); PARAM_STRING_IN("optimizer", "Optimizer to use; 'sgd', 'minibatch-sgd', or " "'lbfgs'.", "O", "sgd"); PARAM_FLAG("normalize", "Use a normalized starting point for optimization. This" " is useful for when points are far apart, or when SGD is returning NaN.", "N"); PARAM_INT_IN("max_iterations", "Maximum number of iterations for SGD or L-BFGS " "(0 indicates no limit).", "n", 500000); PARAM_DOUBLE_IN("tolerance", "Maximum tolerance for termination of SGD or " "L-BFGS.", "t", 1e-7); PARAM_DOUBLE_IN("step_size", "Step size for stochastic gradient descent " "(alpha).", "a", 0.01); PARAM_FLAG("linear_scan", "Don't shuffle the order in which data points are " "visited for SGD or mini-batch SGD.", "L"); PARAM_INT_IN("batch_size", "Batch size for mini-batch SGD.", "b", 50); PARAM_INT_IN("num_basis", "Number of memory points to be stored for L-BFGS.", "B", 5); PARAM_DOUBLE_IN("armijo_constant", "Armijo constant for L-BFGS.", "A", 1e-4); PARAM_DOUBLE_IN("wolfe", "Wolfe condition parameter for L-BFGS.", "w", 0.9); PARAM_INT_IN("max_line_search_trials", "Maximum number of line search trials " "for L-BFGS.", "T", 50); PARAM_DOUBLE_IN("min_step", "Minimum step of line search for L-BFGS.", "m", 1e-20); PARAM_DOUBLE_IN("max_step", "Maximum step of line search for L-BFGS.", "M", 1e20); PARAM_INT_IN("seed", "Random seed. If 0, 'std::time(NULL)' is used.", "s", 0); using namespace mlpack; using namespace mlpack::nca; using namespace mlpack::metric; using namespace mlpack::optimization; using namespace std; int main(int argc, char* argv[]) { // Parse command line. CLI::ParseCommandLine(argc, argv); if (CLI::GetParam("seed") != 0) math::RandomSeed((size_t) CLI::GetParam("seed")); else math::RandomSeed((size_t) std::time(NULL)); const string inputFile = CLI::GetParam("input_file"); const string labelsFile = CLI::GetParam("labels_file"); const string outputFile = CLI::GetParam("output_file"); if (outputFile == "") Log::Warn << "--output_file (-o) not specified; no output will be saved!" << endl; const string optimizerType = CLI::GetParam("optimizer"); if ((optimizerType != "sgd") && (optimizerType != "lbfgs") && (optimizerType != "minibatch-sgd")) { Log::Fatal << "Optimizer type '" << optimizerType << "' unknown; must be " << "'sgd', 'minibatch-sgd', or 'lbfgs'!" << endl; } // Warn on unused parameters. if (optimizerType == "sgd" || optimizerType == "minibatch-sgd") { if (CLI::HasParam("num_basis")) Log::Warn << "Parameter --num_basis ignored (not using 'lbfgs' " << "optimizer)." << endl; if (CLI::HasParam("armijo_constant")) Log::Warn << "Parameter --armijo_constant ignored (not using 'lbfgs' " << "optimizer)." << endl; if (CLI::HasParam("wolfe")) Log::Warn << "Parameter --wolfe ignored (not using 'lbfgs' optimizer).\n"; if (CLI::HasParam("max_line_search_trials")) Log::Warn << "Parameter --max_line_search_trials ignored (not using " << "'lbfgs' optimizer." << endl; if (CLI::HasParam("min_step")) Log::Warn << "Parameter --min_step ignored (not using 'lbfgs' optimizer)." << endl; if (CLI::HasParam("max_step")) Log::Warn << "Parameter --max_step ignored (not using 'lbfgs' optimizer)." << endl; if (optimizerType == "sgd" && CLI::HasParam("batch_size")) Log::Warn << "Parameter --batch_size ignored (not using 'minibatch-sgd' " << "optimizer." << endl; } else if (optimizerType == "lbfgs") { if (CLI::HasParam("step_size")) Log::Warn << "Parameter --step_size ignored (not using 'sgd' or " << "'minibatch-sgd' optimizer)." << endl; if (CLI::HasParam("linear_scan")) Log::Warn << "Parameter --linear_scan ignored (not using 'sgd' or " << "'minibatch-sgd' optimizer)." << endl; if (CLI::HasParam("batch_size")) Log::Warn << "Parameter --batch_size ignored (not using 'minibatch-sgd' " << "optimizer)." << endl; } const double stepSize = CLI::GetParam("step_size"); const size_t maxIterations = (size_t) CLI::GetParam("max_iterations"); const double tolerance = CLI::GetParam("tolerance"); const bool normalize = CLI::HasParam("normalize"); const bool shuffle = !CLI::HasParam("linear_scan"); const int numBasis = CLI::GetParam("num_basis"); const double armijoConstant = CLI::GetParam("armijo_constant"); const double wolfe = CLI::GetParam("wolfe"); const int maxLineSearchTrials = CLI::GetParam("max_line_search_trials"); const double minStep = CLI::GetParam("min_step"); const double maxStep = CLI::GetParam("max_step"); const size_t batchSize = (size_t) CLI::GetParam("batch_size"); // Load data. arma::mat data; data::Load(inputFile, data, true); // Do we want to load labels separately? arma::umat rawLabels(1, data.n_cols); if (labelsFile != "") { data::Load(labelsFile, rawLabels, true); if (rawLabels.n_cols == 1) rawLabels = trans(rawLabels); if (rawLabels.n_rows > 1) Log::Fatal << "Labels must have only one column or row!" << endl; } else { Log::Info << "Using last column of input dataset as labels." << endl; for (size_t i = 0; i < data.n_cols; i++) rawLabels[i] = (int) data(data.n_rows - 1, i); data.shed_row(data.n_rows - 1); } // Now, normalize the labels. arma::uvec mappings; arma::Row labels; data::NormalizeLabels(rawLabels.row(0), labels, mappings); arma::mat distance; // Normalize the data, if necessary. if (normalize) { // Find the minimum and maximum values for each dimension. arma::vec ranges = arma::max(data, 1) - arma::min(data, 1); for (size_t d = 0; d < ranges.n_elem; ++d) if (ranges[d] == 0.0) ranges[d] = 1; // A range of 0 produces NaN later on. distance = diagmat(1.0 / ranges); Log::Info << "Using normalized starting point for optimization." << endl; } else { distance.eye(); } // Now create the NCA object and run the optimization. if (optimizerType == "sgd") { NCA > nca(data, labels); nca.Optimizer().StepSize() = stepSize; nca.Optimizer().MaxIterations() = maxIterations; nca.Optimizer().Tolerance() = tolerance; nca.Optimizer().Shuffle() = shuffle; nca.LearnDistance(distance); } else if (optimizerType == "lbfgs") { NCA, L_BFGS> nca(data, labels); nca.Optimizer().NumBasis() = numBasis; nca.Optimizer().MaxIterations() = maxIterations; nca.Optimizer().ArmijoConstant() = armijoConstant; nca.Optimizer().Wolfe() = wolfe; nca.Optimizer().MinGradientNorm() = tolerance; nca.Optimizer().MaxLineSearchTrials() = maxLineSearchTrials; nca.Optimizer().MinStep() = minStep; nca.Optimizer().MaxStep() = maxStep; nca.LearnDistance(distance); } else if (optimizerType == "minibatch-sgd") { NCA, MiniBatchSGD> nca(data, labels); nca.Optimizer().StepSize() = stepSize; nca.Optimizer().MaxIterations() = maxIterations; nca.Optimizer().Tolerance() = tolerance; nca.Optimizer().Shuffle() = shuffle; nca.Optimizer().BatchSize() = batchSize; nca.LearnDistance(distance); } // Save the output. if (outputFile != "") data::Save(outputFile, distance, true); } mlpack-2.2.5/src/mlpack/methods/nca/nca_softmax_error_function.hpp000066400000000000000000000137131315013601400253610ustar00rootroot00000000000000/** * @file nca_softmax_error_function.hpp * @author Ryan Curtin * * Implementation of the stochastic neighbor assignment probability error * function (the "softmax error"). * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_NCA_NCA_SOFTMAX_ERROR_FUNCTION_HPP #define MLPACK_METHODS_NCA_NCA_SOFTMAX_ERROR_FUNCTION_HPP #include namespace mlpack { namespace nca { /** * The "softmax" stochastic neighbor assignment probability function. * * The actual function is * * p_ij = (exp(-|| A x_i - A x_j || ^ 2)) / * (sum_{k != i} (exp(-|| A x_i - A x_k || ^ 2))) * * where x_n represents a point and A is the current scaling matrix. * * This class is more flexible than the original paper, allowing an arbitrary * metric function to be used in place of || A x_i - A x_j ||^2, meaning that * the squared Euclidean distance is not the only allowed metric for NCA. * However, that is probably the best way to use this class. * * In addition to the standard Evaluate() and Gradient() functions which mlpack * optimizers use, overloads of Evaluate() and Gradient() are given which only * operate on one point in the dataset. This is useful for optimizers like * stochastic gradient descent (see mlpack::optimization::SGD). */ template class SoftmaxErrorFunction { public: /** * Initialize with the given kernel; useful when the kernel has some state to * store, which is set elsewhere. If no kernel is given, an empty kernel is * used; this way, you can call the constructor with no arguments. A * reference to the dataset we will be optimizing over is also required. * * @param dataset Matrix containing the dataset. * @param labels Vector of class labels for each point in the dataset. * @param kernel Instantiated kernel (optional). */ SoftmaxErrorFunction(const arma::mat& dataset, const arma::Row& labels, MetricType metric = MetricType()); /** * Evaluate the softmax function for the given covariance matrix. This is the * non-separable implementation, where the objective function is not * decomposed into the sum of several objective functions. * * @param covariance Covariance matrix of Mahalanobis distance. */ double Evaluate(const arma::mat& covariance); /** * Evaluate the softmax objective function for the given covariance matrix on * only one point of the dataset. This is the separable implementation, where * the objective function is decomposed into the sum of many objective * functions, and here, only one of those constituent objective functions is * returned. * * @param covariance Covariance matrix of Mahalanobis distance. * @param i Index of point to use for objective function. */ double Evaluate(const arma::mat& covariance, const size_t i); /** * Evaluate the gradient of the softmax function for the given covariance * matrix. This is the non-separable implementation, where the objective * function is not decomposed into the sum of several objective functions. * * @param covariance Covariance matrix of Mahalanobis distance. * @param gradient Matrix to store the calculated gradient in. */ void Gradient(const arma::mat& covariance, arma::mat& gradient); /** * Evaluate the gradient of the softmax function for the given covariance * matrix on only one point of the dataset. This is the separable * implementation, where the objective function is decomposed into the sum of * many objective functions, and here, only one of those constituent objective * functions is returned. * * @param covariance Covariance matrix of Mahalanobis distance. * @param i Index of point to use for objective function. * @param gradient Matrix to store the calculated gradient in. */ void Gradient(const arma::mat& covariance, const size_t i, arma::mat& gradient); /** * Get the initial point. */ const arma::mat GetInitialPoint() const; /** * Get the number of functions the objective function can be decomposed into. * This is just the number of points in the dataset. */ size_t NumFunctions() const { return dataset.n_cols; } private: //! The dataset. const arma::mat& dataset; //! Labels for each point in the dataset. const arma::Row& labels; //! The instantiated metric. MetricType metric; //! Last coordinates. Used for the non-separable Evaluate() and Gradient(). arma::mat lastCoordinates; //! Stretched dataset. Kept internal to avoid memory reallocations. arma::mat stretchedDataset; //! Holds calculated p_i, for the non-separable Evaluate() and Gradient(). arma::vec p; //! Holds denominators for calculation of p_ij, for the non-separable //! Evaluate() and Gradient(). arma::vec denominators; //! False if nothing has ever been precalculated (only at construction time). bool precalculated; /** * Precalculate the denominators and numerators that will make up the p_ij, * but only if the coordinates matrix is different than the last coordinates * the Precalculate() method was run with. This method is only called by the * non-separable Evaluate() and Gradient(). * * This will update last_coordinates_ and stretched_dataset_, and also * calculate the p_i and denominators_ which are used in the calculation of * p_i or p_ij. The calculation will be O((n * (n + 1)) / 2), which is not * great. * * @param coordinates Coordinates matrix to use for precalculation. */ void Precalculate(const arma::mat& coordinates); }; } // namespace nca } // namespace mlpack // Include implementation. #include "nca_softmax_error_function_impl.hpp" #endif mlpack-2.2.5/src/mlpack/methods/nca/nca_softmax_error_function_impl.hpp000066400000000000000000000224141315013601400264000ustar00rootroot00000000000000/** * @file nca_softmax_impl.h * @author Ryan Curtin * * Implementation of the Softmax error function. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_NCA_NCA_SOFTMAX_ERROR_FUNCTCLIN_IMPL_H #define MLPACK_METHODS_NCA_NCA_SOFTMAX_ERROR_FUNCTCLIN_IMPL_H // In case it hasn't been included already. #include "nca_softmax_error_function.hpp" namespace mlpack { namespace nca { // Initialize with the given kernel. template SoftmaxErrorFunction::SoftmaxErrorFunction( const arma::mat& dataset, const arma::Row& labels, MetricType metric) : dataset(dataset), labels(labels), metric(metric), precalculated(false) { /* nothing to do */ } //! The non-separable implementation, which uses Precalculate() to save time. template double SoftmaxErrorFunction::Evaluate(const arma::mat& coordinates) { // Calculate the denominators and numerators, if necessary. Precalculate(coordinates); return -accu(p); // Sum of p_i for all i. We negate because our solver // minimizes, not maximizes. }; //! The separated objective function, which does not use Precalculate(). template double SoftmaxErrorFunction::Evaluate(const arma::mat& coordinates, const size_t i) { // Unfortunately each evaluation will take O(N) time because it requires a // scan over all points in the dataset. Our objective is to compute p_i. double denominator = 0; double numerator = 0; // It's quicker to do this now than one point at a time later. stretchedDataset = coordinates * dataset; for (size_t k = 0; k < dataset.n_cols; ++k) { // Don't consider the case where the points are the same. if (k == i) continue; // We want to evaluate exp(-D(A x_i, A x_k)). double eval = std::exp(-metric.Evaluate(stretchedDataset.unsafe_col(i), stretchedDataset.unsafe_col(k))); // If they are in the same class, update the numerator. if (labels[i] == labels[k]) numerator += eval; denominator += eval; } // Now the result is just a simple division, but we have to be sure that the // denominator is not 0. if (denominator == 0.0) { Log::Warn << "Denominator of p_" << i << " is 0!" << std::endl; return 0; } return -(numerator / denominator); // Negate because the optimizer is a // minimizer. } //! The non-separable implementation, where Precalculate() is used. template void SoftmaxErrorFunction::Gradient(const arma::mat& coordinates, arma::mat& gradient) { // Calculate the denominators and numerators, if necessary. Precalculate(coordinates); // Now, we handle the summation over i: // sum_i (p_i sum_k (p_ik x_ik x_ik^T) - // sum_{j in class of i} (p_ij x_ij x_ij^T) // We can algebraically manipulate the whole thing to produce a more // memory-friendly way to calculate this. Looping over each i and k (again // O((n * (n + 1)) / 2) as with the last step, we can add the following to the // sum: // // if class of i is the same as the class of k, add // (((p_i - (1 / p_i)) p_ik) + ((p_k - (1 / p_k)) p_ki)) x_ik x_ik^T // otherwise, add // (p_i p_ik + p_k p_ki) x_ik x_ik^T arma::mat sum; sum.zeros(stretchedDataset.n_rows, stretchedDataset.n_rows); for (size_t i = 0; i < stretchedDataset.n_cols; i++) { for (size_t k = (i + 1); k < stretchedDataset.n_cols; k++) { // Calculate p_ik and p_ki first. double eval = exp(-metric.Evaluate(stretchedDataset.unsafe_col(i), stretchedDataset.unsafe_col(k))); double p_ik = 0, p_ki = 0; p_ik = eval / denominators(i); p_ki = eval / denominators(k); // Subtract x_i from x_k. We are not using stretched points here. arma::vec x_ik = dataset.col(i) - dataset.col(k); arma::mat secondTerm = (x_ik * trans(x_ik)); if (labels[i] == labels[k]) sum += ((p[i] - 1) * p_ik + (p[k] - 1) * p_ki) * secondTerm; else sum += (p[i] * p_ik + p[k] * p_ki) * secondTerm; } } // Assemble the final gradient. gradient = -2 * coordinates * sum; } //! The separable implementation. template void SoftmaxErrorFunction::Gradient(const arma::mat& coordinates, const size_t i, arma::mat& gradient) { // We will need to calculate p_i before this evaluation is done, so these two // variables will hold the information necessary for that. double numerator = 0; double denominator = 0; // The gradient involves two matrix terms which are eventually combined into // one. arma::mat firstTerm; arma::mat secondTerm; firstTerm.zeros(coordinates.n_rows, coordinates.n_cols); secondTerm.zeros(coordinates.n_rows, coordinates.n_cols); // Compute the stretched dataset. stretchedDataset = coordinates * dataset; for (size_t k = 0; k < dataset.n_cols; ++k) { // Don't consider the case where the points are the same. if (i == k) continue; // Calculate the numerator of p_ik. double eval = exp(-metric.Evaluate(stretchedDataset.unsafe_col(i), stretchedDataset.unsafe_col(k))); // If the points are in the same class, we must add to the second term of // the gradient as well as the numerator of p_i. We will divide by the // denominator of p_ik later. For x_ik we are not using stretched points. arma::vec x_ik = dataset.col(i) - dataset.col(k); if (labels[i] == labels[k]) { numerator += eval; secondTerm += eval * x_ik * trans(x_ik); } // We always have to add to the denominator of p_i and the first term of the // gradient computation. We will divide by the denominator of p_ik later. denominator += eval; firstTerm += eval * x_ik * trans(x_ik); } // Calculate p_i. double p = 0; if (denominator == 0) { Log::Warn << "Denominator of p_" << i << " is 0!" << std::endl; // If the denominator is zero, then all p_ik should be zero and there is // no gradient contribution from this point. gradient.zeros(coordinates.n_rows, coordinates.n_rows); return; } else { p = numerator / denominator; firstTerm /= denominator; secondTerm /= denominator; } // Now multiply the first term by p_i, and add the two together and multiply // all by 2 * A. We negate it though, because our optimizer is a minimizer. gradient = -2 * coordinates * (p * firstTerm - secondTerm); } template const arma::mat SoftmaxErrorFunction::GetInitialPoint() const { return arma::eye(dataset.n_rows, dataset.n_rows); } template void SoftmaxErrorFunction::Precalculate( const arma::mat& coordinates) { // Ensure it is the right size. lastCoordinates.set_size(coordinates.n_rows, coordinates.n_cols); // Make sure the calculation is necessary. if ((accu(coordinates == lastCoordinates) == coordinates.n_elem) && precalculated) return; // No need to calculate; we already have this stuff saved. // Coordinates are different; save the new ones, and stretch the dataset. lastCoordinates = coordinates; stretchedDataset = coordinates * dataset; // For each point i, we must evaluate the softmax function: // p_ij = exp( -K(x_i, x_j) ) / ( sum_{k != i} ( exp( -K(x_i, x_k) ))) // p_i = sum_{j in class of i} p_ij // We will do this by keeping track of the denominators for each i as well as // the numerators (the sum for all j in class of i). This will be on the // order of O((n * (n + 1)) / 2), which really isn't all that great. p.zeros(stretchedDataset.n_cols); denominators.zeros(stretchedDataset.n_cols); for (size_t i = 0; i < stretchedDataset.n_cols; i++) { for (size_t j = (i + 1); j < stretchedDataset.n_cols; j++) { // Evaluate exp(-d(x_i, x_j)). double eval = exp(-metric.Evaluate(stretchedDataset.unsafe_col(i), stretchedDataset.unsafe_col(j))); // Add this to the denominators of both p_i and p_j: K(i, j) = K(j, i). denominators[i] += eval; denominators[j] += eval; // If i and j are the same class, add to numerator of both. if (labels[i] == labels[j]) { p[i] += eval; p[j] += eval; } } } // Divide p_i by their denominators. p /= denominators; // Clean up any bad values. for (size_t i = 0; i < stretchedDataset.n_cols; i++) { if (denominators[i] == 0.0) { Log::Debug << "Denominator of p_{" << i << ", j} is 0." << std::endl; // Set to usable values. denominators[i] = std::numeric_limits::infinity(); p[i] = 0; } } // We've done a precalculation. Mark it as done. precalculated = true; } } // namespace nca } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/methods/neighbor_search/000077500000000000000000000000001315013601400216045ustar00rootroot00000000000000mlpack-2.2.5/src/mlpack/methods/neighbor_search/CMakeLists.txt000066400000000000000000000035511315013601400243500ustar00rootroot00000000000000# Define the files we need to compile. # Anything not in this list will not be compiled into mlpack. set(SOURCES neighbor_search.hpp neighbor_search_impl.hpp neighbor_search_rules.hpp neighbor_search_rules_impl.hpp neighbor_search_stat.hpp ns_model.hpp ns_model_impl.hpp sort_policies/nearest_neighbor_sort.hpp sort_policies/nearest_neighbor_sort_impl.hpp sort_policies/furthest_neighbor_sort.hpp sort_policies/furthest_neighbor_sort_impl.hpp typedef.hpp unmap.hpp unmap.cpp ) # Add directory name to sources. set(DIR_SRCS) foreach(file ${SOURCES}) set(DIR_SRCS ${DIR_SRCS} ${CMAKE_CURRENT_SOURCE_DIR}/${file}) endforeach() # Append sources (with directory name) to list of all mlpack sources (used at # the parent scope). set(MLPACK_SRCS ${MLPACK_SRCS} ${DIR_SRCS} PARENT_SCOPE) # Add mlpack_knn and mlpack_kfn executables. add_cli_executable(knn) add_cli_executable(kfn) if (BUILD_CLI_EXECUTABLES) # -- mlpack_knn/mlpack_kfn compatibility start -- # Make a copy of mlpack_knn/mlpack_kfn both on Windows and *unix for # compatibility. This should be removed by mlpack 3.0.0. get_property(knn_loc TARGET mlpack_knn PROPERTY LOCATION) get_filename_component(knn_ext ${knn_loc} EXT) add_custom_command(TARGET mlpack_knn POST_BUILD COMMAND ${CMAKE_COMMAND} -E copy $ $/mlpack_allknn${knn_ext} ) get_property(kfn_loc TARGET mlpack_kfn PROPERTY LOCATION) get_filename_component(kfn_ext ${kfn_loc} EXT) add_custom_command(TARGET mlpack_kfn POST_BUILD COMMAND ${CMAKE_COMMAND} -E copy $ $/mlpack_allkfn${kfn_ext} ) install(PROGRAMS $/mlpack_allkfn${kfn_ext} DESTINATION bin) install(PROGRAMS $/mlpack_allknn${knn_ext} DESTINATION bin) endif () mlpack-2.2.5/src/mlpack/methods/neighbor_search/kfn_main.cpp000066400000000000000000000373451315013601400241060ustar00rootroot00000000000000/** * @file kfn_main.cpp * @author Ryan Curtin * * Implementation of the KFN executable. Allows some number of standard * options. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include #include #include #include #include #include #include "neighbor_search.hpp" #include "unmap.hpp" #include "ns_model.hpp" using namespace std; using namespace mlpack; using namespace mlpack::neighbor; using namespace mlpack::tree; using namespace mlpack::metric; // Information about the program itself. PROGRAM_INFO("All K-Furthest-Neighbors", "This program will calculate the all k-furthest-neighbors of a set of " "points. You may specify a separate set of reference points and query " "points, or just a reference set which will be used as both the reference " "and query set." "\n\n" "For example, the following will calculate the 5 furthest neighbors of each" "point in 'input.csv' and store the distances in 'distances.csv' and the " "neighbors in the file 'neighbors.csv':" "\n\n" "$ mlpack_kfn --k=5 --reference_file=input.csv " "--distances_file=distances.csv\n --neighbors_file=neighbors.csv" "\n\n" "The output files are organized such that row i and column j in the " "neighbors output file corresponds to the index of the point in the " "reference set which is the i'th furthest neighbor from the point in the " "query set with index j. Row i and column j in the distances output file " "corresponds to the distance between those two points."); // Define our input parameters that this program will take. PARAM_STRING_IN("reference_file", "File containing the reference dataset.", "r", ""); PARAM_STRING_OUT("distances_file", "File to output distances into.", "d"); PARAM_STRING_OUT("neighbors_file", "File to output neighbors into.", "n"); PARAM_STRING_IN("true_distances_file", "File of true distances to compute " "the effective error (average relative error) (it is printed when -v is " "specified).", "D", ""); PARAM_STRING_IN("true_neighbors_file", "File of true neighbors to compute the " "recall (it is printed when -v is specified).", "T", ""); // The option exists to load or save models. PARAM_STRING_IN("input_model_file", "File containing pre-trained kFN model.", "m", ""); PARAM_STRING_OUT("output_model_file", "If specified, the kFN model will be " "saved to the given file.", "M"); // The user may specify a query file of query points and a number of furthest // neighbors to search for. PARAM_STRING_IN("query_file", "File containing query points (optional).", "q", ""); PARAM_INT_IN("k", "Number of furthest neighbors to find.", "k", 0); // The user may specify the type of tree to use, and a few pararmeters for tree // building. PARAM_STRING_IN("tree_type", "Type of tree to use: 'kd', 'vp', 'rp', 'max-rp', " "'ub', 'cover', 'r', 'r-star', 'x', 'ball', 'hilbert-r', 'r-plus', " "'r-plus-plus', 'oct'.", "t", "kd"); PARAM_INT_IN("leaf_size", "Leaf size for tree building (used for kd-trees, " "vp trees, random projection trees, UB trees, R trees, R* trees, X trees, " "Hilbert R trees, R+ trees, R++ trees, and octrees).", "l", 20); PARAM_FLAG("random_basis", "Before tree-building, project the data onto a " "random orthogonal basis.", "R"); PARAM_INT_IN("seed", "Random seed (if 0, std::time(NULL) is used).", "s", 0); // Search settings. PARAM_STRING_IN("algorithm", "Type of neighbor search: 'naive', 'single_tree', " "'dual_tree', 'greedy'.", "a", "dual_tree"); PARAM_FLAG("naive", "(Deprecated) If true, O(n^2) naive mode is used for " "computation. Will be removed in mlpack 3.0.0. Use '--algorithm naive' " "instead.", "N"); PARAM_FLAG("single_mode", "(Deprecated) If true, single-tree search is used " "(as opposed to dual-tree search). Will be removed in mlpack 3.0.0. Use " "'--algorithm single_tree' instead.", "S"); PARAM_DOUBLE_IN("epsilon", "If specified, will do approximate furthest neighbor" " search with given relative error. Must be in the range [0,1).", "e", 0); PARAM_DOUBLE_IN("percentage", "If specified, will do approximate furthest " "neighbor search. Must be in the range (0,1] (decimal form). Resultant " "neighbors will be at least (p*100) % of the distance as the true furthest " "neighbor.", "p", 1); // Convenience typedef. typedef NSModel KFNModel; int main(int argc, char *argv[]) { // Give CLI the command line parameters the user passed in. CLI::ParseCommandLine(argc, argv); if (CLI::GetParam("seed") != 0) math::RandomSeed((size_t) CLI::GetParam("seed")); else math::RandomSeed((size_t) std::time(NULL)); // A user cannot specify both reference data and a model. if (CLI::HasParam("reference_file") && CLI::HasParam("input_model_file")) Log::Fatal << "Only one of --reference_file (-r) or --input_model_file (-m)" << " may be specified!" << endl; // A user must specify one of them... if (!CLI::HasParam("reference_file") && !CLI::HasParam("input_model_file")) Log::Fatal << "No model specified (--input_model_file) and no reference " << "data specified (--reference_file)! One must be provided." << endl; if (CLI::HasParam("input_model_file")) { // Notify the user of parameters that will be ignored. if (CLI::HasParam("tree_type")) Log::Warn << "--tree_type (-t) will be ignored because --input_model_file" << " is specified." << endl; if (CLI::HasParam("random_basis")) Log::Warn << "--random_basis (-R) will be ignored because " << "--input_model_file is specified." << endl; // Notify the user of parameters that will be only be considered for query // tree. if (CLI::HasParam("leaf_size")) Log::Warn << "--leaf_size (-l) will only be considered for the query " "tree, because --input_model_file is specified." << endl; } // The user should give something to do... if (!CLI::HasParam("k") && !CLI::HasParam("output_model_file")) Log::Warn << "Neither -k nor --output_model_file are specified, so no " << "results from this program will be saved!" << endl; // If the user specifies k but no output files, they should be warned. if (CLI::HasParam("k") && !(CLI::HasParam("neighbors_file") || CLI::HasParam("distances_file"))) Log::Warn << "Neither --neighbors_file nor --distances_file is specified, " << "so the furthest neighbor search results will not be saved!" << endl; // If the user specifies output files but no k, they should be warned. if ((CLI::HasParam("neighbors_file") || CLI::HasParam("distances_file")) && !CLI::HasParam("k")) Log::Warn << "An output file for furthest neighbor search is given (" << "--neighbors_file or --distances_file), but furthest neighbor search" << " is not being performed because k (--k) is not specified! No " << "results will be saved." << endl; if (!CLI::HasParam("k") && CLI::HasParam("true_neighbors_file")) Log::Warn << "--true_neighbors_file (-T) ignored because no search is being" << " performed (--k is not specified)." << endl; if (!CLI::HasParam("k") && CLI::HasParam("true_distances_file")) Log::Warn << "--true_distances_file (-D) ignored because no search is being" << " performed (--k is not specified)." << endl; // Sanity check on leaf size. const int lsInt = CLI::GetParam("leaf_size"); if (lsInt < 1) Log::Fatal << "Invalid leaf size: " << lsInt << ". Must be greater than 0." << endl; // Sanity check on epsilon. double epsilon = CLI::GetParam("epsilon"); if (epsilon < 0 || epsilon >= 1) Log::Fatal << "Invalid epsilon: " << epsilon << ". Must be in the range " << "[0,1)." << endl; // Sanity check on percentage. const double percentage = CLI::GetParam("percentage"); if (percentage <= 0 || percentage > 1) Log::Fatal << "Invalid percentage: " << percentage << ". Must be in the " << "range (0,1] (decimal form)." << endl; if (CLI::HasParam("percentage") && CLI::HasParam("epsilon")) Log::Fatal << "Cannot provide both epsilon and percentage." << endl; if (CLI::HasParam("percentage")) epsilon = 1 - percentage; // We either have to load the reference data, or we have to load the model. NSModel kfn; const string algorithm = CLI::GetParam("algorithm"); NeighborSearchMode searchMode = DUAL_TREE_MODE; if (algorithm == "naive") searchMode = NAIVE_MODE; else if (algorithm == "single_tree") searchMode = SINGLE_TREE_MODE; else if (algorithm == "dual_tree") searchMode = DUAL_TREE_MODE; else if (algorithm == "greedy") searchMode = GREEDY_SINGLE_TREE_MODE; else Log::Fatal << "Unknown neighbor search algorithm '" << algorithm << "'; " << "valid choices are 'naive', 'single_tree', 'dual_tree' and 'greedy'." << endl; if (CLI::HasParam("single_mode")) { searchMode = SINGLE_TREE_MODE; Log::Warn << "--single_mode is deprecated. Will be removed in mlpack " "3.0.0. Use '--algorithm single_tree' instead." << endl; if (CLI::HasParam("algorithm") && algorithm != "single_tree") Log::Fatal << "Contradiction between options --algorithm " << algorithm << " and --single_mode." << endl; } if (CLI::HasParam("naive")) { searchMode = NAIVE_MODE; Log::Warn << "--naive is deprecated. Will be removed in mlpack 3.0.0. Use " "'--algorithm naive' instead." << endl; if (CLI::HasParam("algorithm") && algorithm != "naive") Log::Fatal << "Contradiction between options --algorithm " << algorithm << " and --naive." << endl; if (CLI::HasParam("single_mode")) Log::Warn << "--single_mode ignored because --naive is present." << endl; } if (CLI::HasParam("reference_file")) { // Get all the parameters. const string referenceFile = CLI::GetParam("reference_file"); const string treeType = CLI::GetParam("tree_type"); const bool randomBasis = CLI::HasParam("random_basis"); KFNModel::TreeTypes tree = KFNModel::KD_TREE; if (treeType == "kd") tree = KFNModel::KD_TREE; else if (treeType == "cover") tree = KFNModel::COVER_TREE; else if (treeType == "r") tree = KFNModel::R_TREE; else if (treeType == "r-star") tree = KFNModel::R_STAR_TREE; else if (treeType == "ball") tree = KFNModel::BALL_TREE; else if (treeType == "x") tree = KFNModel::X_TREE; else if (treeType == "hilbert-r") tree = KFNModel::HILBERT_R_TREE; else if (treeType == "r-plus") tree = KFNModel::R_PLUS_TREE; else if (treeType == "r-plus-plus") tree = KFNModel::R_PLUS_PLUS_TREE; else if (treeType == "vp") tree = KFNModel::VP_TREE; else if (treeType == "rp") tree = KFNModel::RP_TREE; else if (treeType == "max-rp") tree = KFNModel::MAX_RP_TREE; else if (treeType == "ub") tree = KFNModel::UB_TREE; else if (treeType == "oct") tree = KFNModel::OCTREE; else Log::Fatal << "Unknown tree type '" << treeType << "'; valid choices are " << "'kd', 'vp', 'rp', 'max-rp', 'ub', 'cover', 'r', 'r-star', 'x', " << "'ball', 'hilbert-r', 'r-plus', 'r-plus-plus', and 'oct'." << endl; kfn.TreeType() = tree; kfn.RandomBasis() = randomBasis; arma::mat referenceSet; data::Load(referenceFile, referenceSet, true); Log::Info << "Loaded reference data from '" << referenceFile << "' (" << referenceSet.n_rows << "x" << referenceSet.n_cols << ")." << endl; kfn.BuildModel(std::move(referenceSet), size_t(lsInt), searchMode, epsilon); } else { // Load the model from file. const string inputModelFile = CLI::GetParam("input_model_file"); data::Load(inputModelFile, "kfn_model", kfn, true); // Fatal on failure. // Adjust search mode. kfn.SetSearchMode(searchMode); kfn.Epsilon() = epsilon; // If leaf_size wasn't provided, let's consider the current value in the // loaded model. Else, update it (only considered when building the query // tree). if (CLI::HasParam("leaf_size")) kfn.LeafSize() = size_t(lsInt); Log::Info << "Loaded kFN model from '" << inputModelFile << "' (trained on " << kfn.Dataset().n_rows << "x" << kfn.Dataset().n_cols << " dataset)." << endl; } // Perform search, if desired. if (CLI::HasParam("k")) { const string queryFile = CLI::GetParam("query_file"); const size_t k = (size_t) CLI::GetParam("k"); arma::mat queryData; if (queryFile != "") { data::Load(queryFile, queryData, true); Log::Info << "Loaded query data from '" << queryFile << "' (" << queryData.n_rows << "x" << queryData.n_cols << ")." << endl; } // Sanity check on k value: must be greater than 0, must be less than the // number of reference points. Since it is unsigned, we only test the upper // bound. if (k > kfn.Dataset().n_cols) { Log::Fatal << "Invalid k: " << k << "; must be greater than 0 and less " << "than or equal to the number of reference points (" << kfn.Dataset().n_cols << ")." << endl; } // Now run the search. arma::Mat neighbors; arma::mat distances; if (CLI::HasParam("query_file")) kfn.Search(std::move(queryData), k, neighbors, distances); else kfn.Search(k, neighbors, distances); Log::Info << "Search complete." << endl; // Save output, if desired. if (CLI::HasParam("neighbors_file")) data::Save(CLI::GetParam("neighbors_file"), neighbors); if (CLI::HasParam("distances_file")) data::Save(CLI::GetParam("distances_file"), distances); // Calculate the effective error, if desired. if (CLI::HasParam("true_distances_file")) { if (kfn.Epsilon() == 0) Log::Warn << "--true_distances_file (-D) specified, but the search is " << "exact, so there is no need to calculate the error!" << endl; const string trueDistancesFile = CLI::GetParam( "true_distances_file"); arma::mat trueDistances; data::Load(trueDistancesFile, trueDistances, true); if (trueDistances.n_rows != distances.n_rows || trueDistances.n_cols != distances.n_cols) Log::Fatal << "The true distances file must have the same number of " << "values than the set of distances being queried!" << endl; Log::Info << "Effective error: " << KFN::EffectiveError(distances, trueDistances) << endl; } // Calculate the recall, if desired. if (CLI::HasParam("true_neighbors_file")) { if (kfn.Epsilon() == 0) Log::Warn << "--true_neighbors_file (-T) specified, but the search is " << "exact, so there is no need to calculate the recall!" << endl; const string trueNeighborsFile = CLI::GetParam( "true_neighbors_file"); arma::Mat trueNeighbors; data::Load(trueNeighborsFile, trueNeighbors, true); if (trueNeighbors.n_rows != neighbors.n_rows || trueNeighbors.n_cols != neighbors.n_cols) Log::Fatal << "The true neighbors file must have the same number of " << "values than the set of neighbors being queried!" << endl; Log::Info << "Recall: " << KFN::Recall(neighbors, trueNeighbors) << endl; } } if (CLI::HasParam("output_model_file")) { const string outputModelFile = CLI::GetParam("output_model_file"); data::Save(outputModelFile, "kfn_model", kfn); } } mlpack-2.2.5/src/mlpack/methods/neighbor_search/knn_main.cpp000066400000000000000000000410661315013601400241110ustar00rootroot00000000000000/** * @file knn_main.cpp * @author Ryan Curtin * * Implementation of the AllkNN executable. Allows some number of standard * options. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include #include #include #include #include #include #include #include #include #include "neighbor_search.hpp" #include "unmap.hpp" #include "ns_model.hpp" using namespace std; using namespace mlpack; using namespace mlpack::neighbor; using namespace mlpack::tree; using namespace mlpack::metric; // Information about the program itself. PROGRAM_INFO("k-Nearest-Neighbors", "This program will calculate the k-nearest-neighbors of a set of " "points using kd-trees or cover trees (cover tree support is experimental " "and may be slow). You may specify a separate set of " "reference points and query points, or just a reference set which will be " "used as both the reference and query set." "\n\n" "For example, the following will calculate the 5 nearest neighbors of each" "point in 'input.csv' and store the distances in 'distances.csv' and the " "neighbors in the file 'neighbors.csv':" "\n\n" "$ mlpack_knn --k=5 --reference_file=input.csv " "--distances_file=distances.csv\n --neighbors_file=neighbors.csv" "\n\n" "The output files are organized such that row i and column j in the " "neighbors output file corresponds to the index of the point in the " "reference set which is the i'th nearest neighbor from the point in the " "query set with index j. Row i and column j in the distances output file " "corresponds to the distance between those two points."); // Define our input parameters that this program will take. PARAM_STRING_IN("reference_file", "File containing the reference dataset.", "r", ""); PARAM_STRING_OUT("distances_file", "File to output distances into.", "d"); PARAM_STRING_OUT("neighbors_file", "File to output neighbors into.", "n"); PARAM_STRING_IN("true_distances_file", "File of true distances to compute " "the effective error (average relative error) (it is printed when -v is " "specified).", "D", ""); PARAM_STRING_IN("true_neighbors_file", "File of true neighbors to compute the " "recall (it is printed when -v is specified).", "T", ""); // The option exists to load or save models. PARAM_STRING_IN("input_model_file", "File containing pre-trained kNN model.", "m", ""); PARAM_STRING_OUT("output_model_file", "If specified, the kNN model will be " "saved to the given file.", "M"); // The user may specify a query file of query points and a number of nearest // neighbors to search for. PARAM_STRING_IN("query_file", "File containing query points (optional).", "q", ""); PARAM_INT_IN("k", "Number of nearest neighbors to find.", "k", 0); // The user may specify the type of tree to use, and a few parameters for tree // building. PARAM_STRING_IN("tree_type", "Type of tree to use: 'kd', 'vp', 'rp', 'max-rp', " "'ub', 'cover', 'r', 'r-star', 'x', 'ball', 'hilbert-r', 'r-plus', " "'r-plus-plus', 'spill', 'oct'.", "t", "kd"); PARAM_INT_IN("leaf_size", "Leaf size for tree building (used for kd-trees, vp " "trees, random projection trees, UB trees, R trees, R* trees, X trees, " "Hilbert R trees, R+ trees, R++ trees, spill trees, and octrees).", "l", 20); PARAM_DOUBLE_IN("tau", "Overlapping size (only valid for spill trees).", "u", 0); PARAM_DOUBLE_IN("rho", "Balance threshold (only valid for spill trees).", "b", 0.7); PARAM_FLAG("random_basis", "Before tree-building, project the data onto a " "random orthogonal basis.", "R"); PARAM_INT_IN("seed", "Random seed (if 0, std::time(NULL) is used).", "s", 0); // Search settings. PARAM_STRING_IN("algorithm", "Type of neighbor search: 'naive', 'single_tree', " "'dual_tree', 'greedy'.", "a", "dual_tree"); PARAM_FLAG("naive", "(Deprecated) If true, O(n^2) naive mode is used for " "computation. Will be removed in mlpack 3.0.0. Use '--algorithm naive' " "instead.", "N"); PARAM_FLAG("single_mode", "(Deprecated) If true, single-tree search is used " "(as opposed to dual-tree search). Will be removed in mlpack 3.0.0. Use " "'--algorithm single_tree' instead.", "S"); PARAM_DOUBLE_IN("epsilon", "If specified, will do approximate nearest neighbor " "search with given relative error.", "e", 0); // Convenience typedef. typedef NSModel KNNModel; int main(int argc, char *argv[]) { // Give CLI the command line parameters the user passed in. CLI::ParseCommandLine(argc, argv); if (CLI::GetParam("seed") != 0) math::RandomSeed((size_t) CLI::GetParam("seed")); else math::RandomSeed((size_t) std::time(NULL)); // A user cannot specify both reference data and a model. if (CLI::HasParam("reference_file") && CLI::HasParam("input_model_file")) Log::Fatal << "Only one of --reference_file (-r) or --input_model_file (-m)" << " may be specified!" << endl; // A user must specify one of them... if (!CLI::HasParam("reference_file") && !CLI::HasParam("input_model_file")) Log::Fatal << "No model specified (--input_model_file) and no reference " << "data specified (--reference_file)! One must be provided." << endl; if (CLI::HasParam("input_model_file")) { // Notify the user of parameters that will be ignored. if (CLI::HasParam("tree_type")) Log::Warn << "--tree_type (-t) will be ignored because --input_model_file" << " is specified." << endl; if (CLI::HasParam("random_basis")) Log::Warn << "--random_basis (-R) will be ignored because " << "--input_model_file is specified." << endl; if (CLI::HasParam("tau")) Log::Warn << "--tau (-u) will be ignored because --input_model_file is " "specified." << endl; if (CLI::HasParam("rho")) Log::Warn << "--rho (-b) will be ignored because --input_model_file is " "specified." << endl; // Notify the user of parameters that will be only be considered for query // tree. if (CLI::HasParam("leaf_size")) Log::Warn << "--leaf_size (-l) will only be considered for the query " "tree, because --input_model_file is specified." << endl; } // The user should give something to do... if (!CLI::HasParam("k") && !CLI::HasParam("output_model_file")) Log::Warn << "Neither -k nor --output_model_file are specified, so no " << "results from this program will be saved!" << endl; // If the user specifies k but no output files, they should be warned. if (CLI::HasParam("k") && !(CLI::HasParam("neighbors_file") || CLI::HasParam("distances_file"))) Log::Warn << "Neither --neighbors_file nor --distances_file is specified, " << "so the nearest neighbor search results will not be saved!" << endl; // If the user specifies output files but no k, they should be warned. if ((CLI::HasParam("neighbors_file") || CLI::HasParam("distances_file")) && !CLI::HasParam("k")) Log::Warn << "An output file for nearest neighbor search is given (" << "--neighbors_file or --distances_file), but nearest neighbor search " << "is not being performed because k (--k) is not specified! No " << "results will be saved." << endl; if (!CLI::HasParam("k") && CLI::HasParam("true_neighbors_file")) Log::Warn << "--true_neighbors_file (-T) ignored because no search is being" << " performed (--k is not specified)." << endl; if (!CLI::HasParam("k") && CLI::HasParam("true_distances_file")) Log::Warn << "--true_distances_file (-D) ignored because no search is being" << " performed (--k is not specified)." << endl; // Sanity check on leaf size. const int lsInt = CLI::GetParam("leaf_size"); if (lsInt < 1) Log::Fatal << "Invalid leaf size: " << lsInt << ". Must be greater " "than 0." << endl; // Sanity check on tau. const double tau = CLI::GetParam("tau"); if (tau < 0) Log::Fatal << "Invalid tau: " << tau << ". Must be non-negative. " << endl; if (CLI::HasParam("tau") && "spill" != CLI::GetParam("tree_type")) Log::Fatal << "Tau parameter is only valid for spill trees." << endl; // Sanity check on rho. const double rho = CLI::GetParam("rho"); if (rho < 0 || rho > 1) Log::Fatal << "Invalid rho: " << rho << ". Must be in the range [0,1]. " << endl; if (CLI::HasParam("rho") && "spill" != CLI::GetParam("tree_type")) Log::Fatal << "Rho parameter is only valid for spill trees." << endl; // Sanity check on epsilon. const double epsilon = CLI::GetParam("epsilon"); if (epsilon < 0) Log::Fatal << "Invalid epsilon: " << epsilon << ". Must be non-negative. " << endl; // We either have to load the reference data, or we have to load the model. NSModel knn; const string algorithm = CLI::GetParam("algorithm"); NeighborSearchMode searchMode = DUAL_TREE_MODE; if (algorithm == "naive") searchMode = NAIVE_MODE; else if (algorithm == "single_tree") searchMode = SINGLE_TREE_MODE; else if (algorithm == "dual_tree") searchMode = DUAL_TREE_MODE; else if (algorithm == "greedy") searchMode = GREEDY_SINGLE_TREE_MODE; else Log::Fatal << "Unknown neighbor search algorithm '" << algorithm << "'; " << "valid choices are 'naive', 'single_tree', 'dual_tree' and 'greedy'." << endl; if (CLI::HasParam("single_mode")) { searchMode = SINGLE_TREE_MODE; Log::Warn << "--single_mode is deprecated. Will be removed in mlpack " "3.0.0. Use '--algorithm single_tree' instead." << endl; if (CLI::HasParam("algorithm") && algorithm != "single_tree") Log::Fatal << "Contradiction between options --algorithm " << algorithm << " and --single_mode." << endl; } if (CLI::HasParam("naive")) { searchMode = NAIVE_MODE; Log::Warn << "--naive is deprecated. Will be removed in mlpack 3.0.0. Use " "'--algorithm naive' instead." << endl; if (CLI::HasParam("algorithm") && algorithm != "naive") Log::Fatal << "Contradiction between options --algorithm " << algorithm << " and --naive." << endl; if (CLI::HasParam("single_mode")) Log::Warn << "--single_mode ignored because --naive is present." << endl; } if (CLI::HasParam("reference_file")) { // Get all the parameters. const string referenceFile = CLI::GetParam("reference_file"); const string treeType = CLI::GetParam("tree_type"); const bool randomBasis = CLI::HasParam("random_basis"); KNNModel::TreeTypes tree = KNNModel::KD_TREE; if (treeType == "kd") tree = KNNModel::KD_TREE; else if (treeType == "cover") tree = KNNModel::COVER_TREE; else if (treeType == "r") tree = KNNModel::R_TREE; else if (treeType == "r-star") tree = KNNModel::R_STAR_TREE; else if (treeType == "ball") tree = KNNModel::BALL_TREE; else if (treeType == "x") tree = KNNModel::X_TREE; else if (treeType == "hilbert-r") tree = KNNModel::HILBERT_R_TREE; else if (treeType == "r-plus") tree = KNNModel::R_PLUS_TREE; else if (treeType == "r-plus-plus") tree = KNNModel::R_PLUS_PLUS_TREE; else if (treeType == "spill") tree = KNNModel::SPILL_TREE; else if (treeType == "vp") tree = KNNModel::VP_TREE; else if (treeType == "rp") tree = KNNModel::RP_TREE; else if (treeType == "max-rp") tree = KNNModel::MAX_RP_TREE; else if (treeType == "ub") tree = KNNModel::UB_TREE; else if (treeType == "oct") tree = KNNModel::OCTREE; else Log::Fatal << "Unknown tree type '" << treeType << "'; valid choices are " << "'kd', 'vp', 'rp', 'max-rp', 'ub', 'cover', 'r', 'r-star', 'x', " << "'ball', 'hilbert-r', 'r-plus', 'r-plus-plus', 'spill', and " << "'oct'." << endl; knn.TreeType() = tree; knn.RandomBasis() = randomBasis; knn.LeafSize() = size_t(lsInt); knn.Tau() = tau; knn.Rho() = rho; arma::mat referenceSet; data::Load(referenceFile, referenceSet, true); Log::Info << "Loaded reference data from '" << referenceFile << "' (" << referenceSet.n_rows << " x " << referenceSet.n_cols << ")." << endl; knn.BuildModel(std::move(referenceSet), size_t(lsInt), searchMode, epsilon); } else { // Load the model from file. const string inputModelFile = CLI::GetParam("input_model_file"); data::Load(inputModelFile, "knn_model", knn, true); // Fatal on failure. // Adjust search mode. knn.SetSearchMode(searchMode); knn.Epsilon() = epsilon; // If leaf_size wasn't provided, let's consider the current value in the // loaded model. Else, update it (only considered when building the query // tree). if (CLI::HasParam("leaf_size")) knn.LeafSize() = size_t(lsInt); Log::Info << "Loaded kNN model from '" << inputModelFile << "' (trained on " << knn.Dataset().n_rows << "x" << knn.Dataset().n_cols << " dataset)." << endl; } // Perform search, if desired. if (CLI::HasParam("k")) { const string queryFile = CLI::GetParam("query_file"); const size_t k = (size_t) CLI::GetParam("k"); arma::mat queryData; if (queryFile != "") { data::Load(queryFile, queryData, true); Log::Info << "Loaded query data from '" << queryFile << "' (" << queryData.n_rows << "x" << queryData.n_cols << ")." << endl; } // Sanity check on k value: must be greater than 0, must be less than the // number of reference points. Since it is unsigned, we only test the upper // bound. if (k > knn.Dataset().n_cols) { Log::Fatal << "Invalid k: " << k << "; must be greater than 0 and less "; Log::Fatal << "than or equal to the number of reference points ("; Log::Fatal << knn.Dataset().n_cols << ")." << endl; } // Now run the search. arma::Mat neighbors; arma::mat distances; if (CLI::HasParam("query_file")) knn.Search(std::move(queryData), k, neighbors, distances); else knn.Search(k, neighbors, distances); Log::Info << "Search complete." << endl; // Save output, if desired. if (CLI::HasParam("neighbors_file")) data::Save(CLI::GetParam("neighbors_file"), neighbors); if (CLI::HasParam("distances_file")) data::Save(CLI::GetParam("distances_file"), distances); // Calculate the effective error, if desired. if (CLI::HasParam("true_distances_file")) { if (knn.TreeType() != KNNModel::SPILL_TREE && knn.Epsilon() == 0) Log::Warn << "--true_distances_file (-D) specified, but the search is " << "exact, so there is no need to calculate the error!" << endl; const string trueDistancesFile = CLI::GetParam( "true_distances_file"); arma::mat trueDistances; data::Load(trueDistancesFile, trueDistances, true); if (trueDistances.n_rows != distances.n_rows || trueDistances.n_cols != distances.n_cols) Log::Fatal << "The true distances file must have the same number of " << "values than the set of distances being queried!" << endl; Log::Info << "Effective error: " << KNN::EffectiveError(distances, trueDistances) << endl; } // Calculate the recall, if desired. if (CLI::HasParam("true_neighbors_file")) { if (knn.TreeType() != KNNModel::SPILL_TREE && knn.Epsilon() == 0) Log::Warn << "--true_neighbors_file (-T) specified, but the search is " << "exact, so there is no need to calculate the recall!" << endl; const string trueNeighborsFile = CLI::GetParam( "true_neighbors_file"); arma::Mat trueNeighbors; data::Load(trueNeighborsFile, trueNeighbors, true); if (trueNeighbors.n_rows != neighbors.n_rows || trueNeighbors.n_cols != neighbors.n_cols) Log::Fatal << "The true neighbors file must have the same number of " << "values than the set of neighbors being queried!" << endl; Log::Info << "Recall: " << KNN::Recall(neighbors, trueNeighbors) << endl; } } if (CLI::HasParam("output_model_file")) { const string outputModelFile = CLI::GetParam("output_model_file"); data::Save(outputModelFile, "knn_model", knn); } } mlpack-2.2.5/src/mlpack/methods/neighbor_search/neighbor_search.hpp000066400000000000000000000626241315013601400254510ustar00rootroot00000000000000/** * @file neighbor_search.hpp * @author Ryan Curtin * * Defines the NeighborSearch class, which performs an abstract * nearest-neighbor-like query on two datasets. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_NEIGHBOR_SEARCH_NEIGHBOR_SEARCH_HPP #define MLPACK_METHODS_NEIGHBOR_SEARCH_NEIGHBOR_SEARCH_HPP #include #include #include #include #include #include #include "neighbor_search_stat.hpp" #include "sort_policies/nearest_neighbor_sort.hpp" #include "neighbor_search_rules.hpp" namespace mlpack { namespace neighbor /** Neighbor-search routines. These include * all-nearest-neighbors and all-furthest-neighbors * searches. */ { // Forward declaration. template class TrainVisitor; //! NeighborSearchMode represents the different neighbor search modes available. enum NeighborSearchMode { NAIVE_MODE, SINGLE_TREE_MODE, DUAL_TREE_MODE, GREEDY_SINGLE_TREE_MODE }; /** * The NeighborSearch class is a template class for performing distance-based * neighbor searches. It takes a query dataset and a reference dataset (or just * a reference dataset) and, for each point in the query dataset, finds the k * neighbors in the reference dataset which have the 'best' distance according * to a given sorting policy. A constructor is given which takes only a * reference dataset, and if that constructor is used, the given reference * dataset is also used as the query dataset. * * The template parameters SortPolicy and Metric define the sort function used * and the metric (distance function) used. More information on those classes * can be found in the NearestNeighborSort class and the kernel::ExampleKernel * class. * * @tparam SortPolicy The sort policy for distances; see NearestNeighborSort. * @tparam MetricType The metric to use for computation. * @tparam MatType The type of data matrix. * @tparam TreeType The tree type to use; must adhere to the TreeType API. * @tparam DualTreeTraversalType The type of dual tree traversal to use * (defaults to the tree's default traverser). * @tparam SingleTreeTraversalType The type of single tree traversal to use * (defaults to the tree's default traverser). */ template class TreeType = tree::KDTree, template class DualTreeTraversalType = TreeType, MatType>::template DualTreeTraverser, template class SingleTreeTraversalType = TreeType, MatType>::template SingleTreeTraverser> class NeighborSearch { public: //! Convenience typedef. typedef TreeType, MatType> Tree; /** * Initialize the NeighborSearch object, passing a reference dataset (this is * the dataset which is searched). Optionally, perform the computation in * a different mode. An initialized distance metric can be given, for cases * where the metric has internal data (i.e. the distance::MahalanobisDistance * class). * * This method will copy the matrices to internal copies, which are rearranged * during tree-building. You can avoid this extra copy by pre-constructing * the trees and passing them using a different constructor, or by using the * construct that takes an rvalue reference to the dataset. * * @param referenceSet Set of reference points. * @param mode Neighbor search mode. * @param epsilon Relative approximate error (non-negative). * @param metric An optional instance of the MetricType class. */ NeighborSearch(const MatType& referenceSet, const NeighborSearchMode mode = DUAL_TREE_MODE, const double epsilon = 0, const MetricType metric = MetricType()); /** * Initialize the NeighborSearch object, taking ownership of the reference * dataset (this is the dataset which is searched). Optionally, perform the * computation in a different mode. An initialized distance metric can be * given, for cases where the metric has internal data (i.e. the * distance::MahalanobisDistance class). * * This method will not copy the data matrix, but will take ownership of it, * and depending on the type of tree used, may rearrange the points. If you * would rather a copy be made, consider using the constructor that takes a * const reference to the data instead. * * @param referenceSet Set of reference points. * @param mode Neighbor search mode. * @param epsilon Relative approximate error (non-negative). * @param metric An optional instance of the MetricType class. */ NeighborSearch(MatType&& referenceSet, const NeighborSearchMode mode = DUAL_TREE_MODE, const double epsilon = 0, const MetricType metric = MetricType()); /** * Initialize the NeighborSearch object with a copy of the given * pre-constructed reference tree (this is the tree built on the points that * will be searched). Optionally, choose to use single-tree mode. Naive mode * is not available as an option for this constructor. Additionally, an * instantiated distance metric can be given, for cases where the distance * metric holds data. * * This method will copy the given tree. You can avoid this copy by using the * construct that takes a rvalue reference to the tree. * * @note * Mapping the points of the matrix back to their original indices is not done * when this constructor is used, so if the tree type you are using maps * points (like BinarySpaceTree), then you will have to perform the re-mapping * manually. * @endnote * * @param referenceTree Pre-built tree for reference points. * @param mode Neighbor search mode. * @param epsilon Relative approximate error (non-negative). * @param metric Instantiated distance metric. */ NeighborSearch( const Tree& referenceTree, const NeighborSearchMode mode = DUAL_TREE_MODE, const double epsilon = 0, const MetricType metric = MetricType()); /** * Initialize the NeighborSearch object with the given pre-constructed * reference tree (this is the tree built on the points that will be * searched). Optionally, choose to use single-tree mode. Naive mode is not * available as an option for this constructor. Additionally, an instantiated * distance metric can be given, for cases where the distance metric holds * data. * * This method will take ownership of the given tree. There is no copying of * the data matrices (because tree-building is not necessary), so this is the * constructor to use when copies absolutely must be avoided. * * @note * Mapping the points of the matrix back to their original indices is not done * when this constructor is used, so if the tree type you are using maps * points (like BinarySpaceTree), then you will have to perform the re-mapping * manually. * @endnote * * @param referenceTree Pre-built tree for reference points. * @param mode Neighbor search mode. * @param epsilon Relative approximate error (non-negative). * @param metric Instantiated distance metric. */ NeighborSearch( Tree&& referenceTree, const NeighborSearchMode mode = DUAL_TREE_MODE, const double epsilon = 0, const MetricType metric = MetricType()); /** * Create a NeighborSearch object without any reference data. If Search() is * called before a reference set is set with Train(), an exception will be * thrown. * * @param mode Neighbor search mode. * @param epsilon Relative approximate error (non-negative). * @param metric Instantiated metric. */ NeighborSearch(const NeighborSearchMode mode = DUAL_TREE_MODE, const double epsilon = 0, const MetricType metric = MetricType()); /** * Initialize the NeighborSearch object, passing a reference dataset (this is * the dataset which is searched). Optionally, perform the computation in * naive mode or single-tree mode. An initialized distance metric can be * given, for cases where the metric has internal data (i.e. the * distance::MahalanobisDistance class). * * Deprecated. Will be removed in mlpack 3.0.0. * * This method will copy the matrices to internal copies, which are rearranged * during tree-building. You can avoid this extra copy by pre-constructing * the trees and passing them using a different constructor, or by using the * construct that takes an rvalue reference to the dataset. * * @param referenceSet Set of reference points. * @param naive If true, O(n^2) naive search will be used (as opposed to * dual-tree search). This overrides singleMode (if it is set to true). * @param singleMode If true, single-tree search will be used (as opposed to * dual-tree search). * @param epsilon Relative approximate error (non-negative). * @param metric An optional instance of the MetricType class. */ mlpack_deprecated NeighborSearch(const MatType& referenceSet, const bool naive, const bool singleMode = false, const double epsilon = 0, const MetricType metric = MetricType()); /** * Initialize the NeighborSearch object, taking ownership of the reference * dataset (this is the dataset which is searched). Optionally, perform the * computation in naive mode or single-tree mode. An initialized distance * metric can be given, for cases where the metric has internal data (i.e. the * distance::MahalanobisDistance class). * * Deprecated. Will be removed in mlpack 3.0.0. * * This method will not copy the data matrix, but will take ownership of it, * and depending on the type of tree used, may rearrange the points. If you * would rather a copy be made, consider using the constructor that takes a * const reference to the data instead. * * @param referenceSet Set of reference points. * @param naive If true, O(n^2) naive search will be used (as opposed to * dual-tree search). This overrides singleMode (if it is set to true). * @param singleMode If true, single-tree search will be used (as opposed to * dual-tree search). * @param epsilon Relative approximate error (non-negative). * @param metric An optional instance of the MetricType class. */ mlpack_deprecated NeighborSearch(MatType&& referenceSet, const bool naive, const bool singleMode = false, const double epsilon = 0, const MetricType metric = MetricType()); /** * Initialize the NeighborSearch object with the given pre-constructed * reference tree (this is the tree built on the points that will be * searched). Optionally, choose to use single-tree mode. Naive mode is not * available as an option for this constructor. Additionally, an instantiated * distance metric can be given, for cases where the distance metric holds * data. * * Deprecated. Will be removed in mlpack 3.0.0. * * This method won't take ownership of the given tree. There is no copying of * the data matrices in this constructor (because tree-building is not * necessary), so this is the constructor to use when copies absolutely must * be avoided. * * @note * Mapping the points of the matrix back to their original indices is not done * when this constructor is used, so if the tree type you are using maps * points (like BinarySpaceTree), then you will have to perform the re-mapping * manually. * @endnote * * @param referenceTree Pre-built tree for reference points. * @param singleMode Whether single-tree computation should be used (as * opposed to dual-tree computation). * @param epsilon Relative approximate error (non-negative). * @param metric Instantiated distance metric. */ mlpack_deprecated NeighborSearch(Tree* referenceTree, const bool singleMode, const double epsilon = 0, const MetricType metric = MetricType()); /** * Create a NeighborSearch object without any reference data. If Search() is * called before a reference set is set with Train(), an exception will be * thrown. * * Deprecated. Will be removed in mlpack 3.0.0. * * @param naive Whether to use naive search. * @param singleMode Whether single-tree computation should be used (as * opposed to dual-tree computation). * @param epsilon Relative approximate error (non-negative). * @param metric Instantiated metric. */ mlpack_deprecated NeighborSearch(const bool naive, const bool singleMode = false, const double epsilon = 0, const MetricType metric = MetricType()); /** * Copy the given NeighborSearch object (this may consume a lot of memory; be * careful!). */ NeighborSearch(const NeighborSearch& other); /** * Take possession of the given NeighborSearch object. */ NeighborSearch(NeighborSearch&& other); /** * Delete the NeighborSearch object. The tree is the only member we are * responsible for deleting. The others will take care of themselves. */ ~NeighborSearch(); /** * Set the reference set to a new reference set, and build a tree if * necessary. This method is called 'Train()' in order to match the rest of * the mlpack abstractions, even though calling this "training" is maybe a bit * of a stretch. * * @param referenceSet New set of reference data. */ void Train(const MatType& referenceSet); /** * Set the reference set to a new reference set, taking ownership of the set, * and build a tree if necessary. This method is called 'Train()' in order to * match the rest of the mlpack abstractions, even though calling this * "training" is maybe a bit of a stretch. * * @param referenceSet New set of reference data. */ void Train(MatType&& referenceSet); /** * Set the reference tree to a new reference tree. * * This method is deprecated and will be removed in mlpack 3.0.0! Train() * methods taking a reference to the reference tree are preferred. * * @param referenceTree Pre-built tree for reference points. */ mlpack_deprecated void Train(Tree* referenceTree); /** * Set the reference tree as a copy of the given reference tree. * * This method will copy the given tree. You can avoid this copy by using the * Train() method that takes a rvalue reference to the tree. * * @param referenceTree Pre-built tree for reference points. */ void Train(const Tree& referenceTree); /** * Set the reference tree to a new reference tree. * * This method will take ownership of the given tree. * * @param referenceTree Pre-built tree for reference points. */ void Train(Tree&& referenceTree); /** * For each point in the query set, compute the nearest neighbors and store * the output in the given matrices. The matrices will be set to the size of * n columns by k rows, where n is the number of points in the query dataset * and k is the number of neighbors being searched for. * * If querySet contains only a few query points, the extra cost of building a * tree on the points for dual-tree search may not be warranted, and it may be * worthwhile to set singleMode = false (either in the constructor or with * SingleMode()). * * @param querySet Set of query points (can be just one point). * @param k Number of neighbors to search for. * @param neighbors Matrix storing lists of neighbors for each query point. * @param distances Matrix storing distances of neighbors for each query * point. */ void Search(const MatType& querySet, const size_t k, arma::Mat& neighbors, arma::mat& distances); /** * Given a pre-built query tree, search for the nearest neighbors of each * point in the query tree, storing the output in the given matrices. The * matrices will be set to the size of n columns by k rows, where n is the * number of points in the query dataset and k is the number of neighbors * being searched for. * * This method is deprecated and will be removed in mlpack 3.0.0! The Search() * method taking a reference to the query tree is preferred. * * Note that if you are calling Search() multiple times with a single query * tree, you need to reset the bounds in the statistic of each query node, * otherwise the result may be wrong! You can do this by calling * TreeType::Stat()::Reset() on each node in the query tree. * * @param queryTree Tree built on query points. * @param k Number of neighbors to search for. * @param neighbors Matrix storing lists of neighbors for each query point. * @param distances Matrix storing distances of neighbors for each query * point. * @param sameSet Denotes whether or not the reference and query sets are the * same. */ mlpack_deprecated void Search(Tree* queryTree, const size_t k, arma::Mat& neighbors, arma::mat& distances, bool sameSet = false); /** * Given a pre-built query tree, search for the nearest neighbors of each * point in the query tree, storing the output in the given matrices. The * matrices will be set to the size of n columns by k rows, where n is the * number of points in the query dataset and k is the number of neighbors * being searched for. * * Note that if you are calling Search() multiple times with a single query * tree, you need to reset the bounds in the statistic of each query node, * otherwise the result may be wrong! You can do this by calling * TreeType::Stat()::Reset() on each node in the query tree. * * @param queryTree Tree built on query points. * @param k Number of neighbors to search for. * @param neighbors Matrix storing lists of neighbors for each query point. * @param distances Matrix storing distances of neighbors for each query * point. * @param sameSet Denotes whether or not the reference and query sets are the * same. */ void Search(Tree& queryTree, const size_t k, arma::Mat& neighbors, arma::mat& distances, bool sameSet = false); /** * Search for the nearest neighbors of every point in the reference set. This * is basically equivalent to calling any other overload of Search() with the * reference set as the query set; so, this lets you do * all-k-nearest-neighbors search. The results are stored in the given * matrices. The matrices will be set to the size of n columns by k rows, * where n is the number of points in the query dataset and k is the number of * neighbors being searched for. * * @param k Number of neighbors to search for. * @param neighbors Matrix storing lists of neighbors for each query point. * @param distances Matrix storing distances of neighbors for each query * point. */ void Search(const size_t k, arma::Mat& neighbors, arma::mat& distances); /** * Calculate the average relative error (effective error) between the * distances calculated and the true distances provided. The input matrices * must have the same size. * * Cases where the true distance is zero (the same point) or the calculated * distance is SortPolicy::WorstDistance() (didn't find enough points) will be * ignored. * * @param foundDistances Matrix storing lists of calculated distances for each * query point. * @param realDistances Matrix storing lists of true best distances for each * query point. * @return Average relative error. */ static double EffectiveError(arma::mat& foundDistances, arma::mat& realDistances); /** * Calculate the recall (% of neighbors found) given the list of found * neighbors and the true set of neighbors. The recall returned will be in * the range [0, 1]. * * @param foundNeighbors Matrix storing lists of calculated neighbors for each * query point. * @param realNeighbors Matrix storing lists of true best neighbors for each * query point. * @return Recall. */ static double Recall(arma::Mat& foundNeighbors, arma::Mat& realNeighbors); //! Return the total number of base case evaluations performed during the last //! search. size_t BaseCases() const { return baseCases; } //! Return the number of node combination scores during the last search. size_t Scores() const { return scores; } //! Access whether or not search is done in naive linear scan mode. //! Deprecated. Will be replaced in mlpack 3.0.0, by a new method: //! NeighborSearchMode SearchMode(). bool Naive() const { return naive; } //! Modify whether or not search is done in naive linear scan mode. //! Deprecated. Will be replaced in mlpack 3.0.0, by a new method: //! NeighborSearchMode& SearchMode(). bool& Naive() { return naive; } //! Access whether or not search is done in single-tree mode. //! Deprecated. Will be replaced in mlpack 3.0.0, by a new method: //! NeighborSearchMode SearchMode(). bool SingleMode() const { return singleMode; } //! Modify whether or not search is done in single-tree mode. //! Deprecated. Will be replaced in mlpack 3.0.0, by a new method: //! NeighborSearchMode& SearchMode(). bool& SingleMode() { return singleMode; } //! Access whether or not search is done in greedy mode. //! Deprecated. Will be replaced in mlpack 3.0.0, by a new method: //! NeighborSearchMode SearchMode(). bool Greedy() const { return greedy; } //! Modify whether or not search is done in greedy mode. //! Deprecated. Will be replaced in mlpack 3.0.0, by a new method: //! NeighborSearchMode& SearchMode(). bool& Greedy() { return greedy; } //! Access the relative error to be considered in approximate search. double Epsilon() const { return epsilon; } //! Modify the relative error to be considered in approximate search. double& Epsilon() { return epsilon; } //! Access the reference dataset. const MatType& ReferenceSet() const { return *referenceSet; } //! Access the reference tree. const Tree& ReferenceTree() const { return *referenceTree; } //! Modify the reference tree. Tree& ReferenceTree() { return *referenceTree; } //! Serialize the NeighborSearch model. template void Serialize(Archive& ar, const unsigned int /* version */); private: //! Permutations of reference points during tree building. std::vector oldFromNewReferences; //! Pointer to the root of the reference tree. Tree* referenceTree; //! Reference dataset. In some situations we may be the owner of this. const MatType* referenceSet; //! If true, this object created the trees and is responsible for them. bool treeOwner; //! If true, we own the reference set. bool setOwner; //! Indicates the neighbor search mode. NeighborSearchMode searchMode; //! Indicates if O(n^2) naive search is being used. bool naive; //! Indicates if single-tree search is being used (as opposed to dual-tree). bool singleMode; //! Indicates if greedy search is being used. bool greedy; //! Indicates the relative error to be considered in approximate search. double epsilon; //! Instantiation of metric. MetricType metric; //! The total number of base cases. size_t baseCases; //! The total number of scores (applicable for non-naive search). size_t scores; //! If this is true, the reference tree bounds need to be reset on a call to //! Search() without a query set. bool treeNeedsReset; //! Updates searchMode to be according to naive, singleMode and greedy //! booleans. This is only necessary until the modifiers Naive(), //! SingleMode() and Greedy() are removed in mlpack 3.0.0. void UpdateSearchMode(); //! Updates naive, singleMode and greedy flags according to searchMode. This //! is only necessary until the modifiers Naive(), SingleMode() and Greedy() //! are removed in mlpack 3.0.0. void UpdateSearchModeFlags(); //! The NSModel class should have access to internal members. template friend class TrainVisitor; }; // class NeighborSearch } // namespace neighbor } // namespace mlpack // Include implementation. #include "neighbor_search_impl.hpp" // Include convenience typedefs. #include "typedef.hpp" #endif mlpack-2.2.5/src/mlpack/methods/neighbor_search/neighbor_search_impl.hpp000066400000000000000000001301201315013601400264550ustar00rootroot00000000000000/** * @file neighbor_search_impl.hpp * @author Ryan Curtin * * Implementation of Neighbor-Search class to perform all-nearest-neighbors on * two specified data sets. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_NEIGHBOR_SEARCH_NEIGHBOR_SEARCH_IMPL_HPP #define MLPACK_METHODS_NEIGHBOR_SEARCH_NEIGHBOR_SEARCH_IMPL_HPP #include #include #include "neighbor_search_rules.hpp" #include namespace mlpack { namespace neighbor { //! Call the tree constructor that does mapping. template TreeType* BuildTree( const MatType& dataset, std::vector& oldFromNew, typename boost::enable_if_c< tree::TreeTraits::RearrangesDataset == true, TreeType* >::type = 0) { return new TreeType(dataset, oldFromNew); } //! Call the tree constructor that does not do mapping. template TreeType* BuildTree( const MatType& dataset, const std::vector& /* oldFromNew */, const typename boost::enable_if_c< tree::TreeTraits::RearrangesDataset == false, TreeType* >::type = 0) { return new TreeType(dataset); } //! Call the tree construct that does mapping. template TreeType* BuildTree( MatType&& dataset, std::vector& oldFromNew, typename boost::enable_if_c< tree::TreeTraits::RearrangesDataset == true, TreeType* >::type = 0) { return new TreeType(std::move(dataset), oldFromNew); } //! Call the tree constructor that does not do mapping. template TreeType* BuildTree( MatType&& dataset, std::vector& /* oldFromNew */, typename boost::enable_if_c< tree::TreeTraits::RearrangesDataset == false, TreeType* >::type = 0) { return new TreeType(std::move(dataset)); } // Construct the object. template class TreeType, template class DualTreeTraversalType, template class SingleTreeTraversalType> NeighborSearch::NeighborSearch(const MatType& referenceSetIn, const NeighborSearchMode mode, const double epsilon, const MetricType metric) : referenceTree(mode == NAIVE_MODE ? NULL : BuildTree(referenceSetIn, oldFromNewReferences)), referenceSet(mode == NAIVE_MODE ? &referenceSetIn : &referenceTree->Dataset()), treeOwner(mode != NAIVE_MODE), setOwner(false), searchMode(mode), epsilon(epsilon), metric(metric), baseCases(0), scores(0), treeNeedsReset(false) { // Update naive, singleMode and greedy flags according to searchMode. UpdateSearchModeFlags(); if (epsilon < 0) throw std::invalid_argument("epsilon must be non-negative"); } // Construct the object. template class TreeType, template class DualTreeTraversalType, template class SingleTreeTraversalType> NeighborSearch::NeighborSearch(MatType&& referenceSetIn, const NeighborSearchMode mode, const double epsilon, const MetricType metric) : referenceTree(mode == NAIVE_MODE ? NULL : BuildTree(std::move(referenceSetIn), oldFromNewReferences)), referenceSet(mode == NAIVE_MODE ? new MatType(std::move(referenceSetIn)) : &referenceTree->Dataset()), treeOwner(mode != NAIVE_MODE), setOwner(mode == NAIVE_MODE), searchMode(mode), epsilon(epsilon), metric(metric), baseCases(0), scores(0), treeNeedsReset(false) { // Update naive, singleMode and greedy flags according to searchMode. UpdateSearchModeFlags(); if (epsilon < 0) throw std::invalid_argument("epsilon must be non-negative"); } // Construct the object. template class TreeType, template class DualTreeTraversalType, template class SingleTreeTraversalType> NeighborSearch::NeighborSearch(const Tree& referenceTree, const NeighborSearchMode mode, const double epsilon, const MetricType metric) : referenceTree(new Tree(referenceTree)), referenceSet(&this->referenceTree->Dataset()), treeOwner(true), setOwner(false), searchMode(mode), epsilon(epsilon), metric(metric), baseCases(0), scores(0), treeNeedsReset(false) { // Update naive, singleMode and greedy flags according to searchMode. UpdateSearchModeFlags(); if (epsilon < 0) throw std::invalid_argument("epsilon must be non-negative"); } // Construct the object. template class TreeType, template class DualTreeTraversalType, template class SingleTreeTraversalType> NeighborSearch::NeighborSearch(Tree&& referenceTree, const NeighborSearchMode mode, const double epsilon, const MetricType metric) : referenceTree(new Tree(std::move(referenceTree))), referenceSet(&this->referenceTree->Dataset()), treeOwner(true), setOwner(false), searchMode(mode), epsilon(epsilon), metric(metric), baseCases(0), scores(0), treeNeedsReset(false) { // Update naive, singleMode and greedy flags according to searchMode. UpdateSearchModeFlags(); if (epsilon < 0) throw std::invalid_argument("epsilon must be non-negative"); } // Construct the object without a reference dataset. template class TreeType, template class DualTreeTraversalType, template class SingleTreeTraversalType> NeighborSearch::NeighborSearch(const NeighborSearchMode mode, const double epsilon, const MetricType metric) : referenceTree(NULL), referenceSet(new MatType()), // Empty matrix. treeOwner(false), setOwner(true), searchMode(mode), epsilon(epsilon), metric(metric), baseCases(0), scores(0), treeNeedsReset(false) { // Update naive, singleMode and greedy flags according to searchMode. UpdateSearchModeFlags(); if (epsilon < 0) throw std::invalid_argument("epsilon must be non-negative"); // Build the tree on the empty dataset, if necessary. if (mode != NAIVE_MODE) { referenceTree = BuildTree(*referenceSet, oldFromNewReferences); treeOwner = true; } } // Construct the object. template class TreeType, template class DualTreeTraversalType, template class SingleTreeTraversalType> NeighborSearch::NeighborSearch(const MatType& referenceSetIn, const bool naive, const bool singleMode, const double epsilon, const MetricType metric) : referenceTree(naive ? NULL : BuildTree(referenceSetIn, oldFromNewReferences)), referenceSet(naive ? &referenceSetIn : &referenceTree->Dataset()), treeOwner(!naive), // False if a tree was passed. If naive, then no trees. setOwner(false), naive(naive), singleMode(!naive && singleMode), // No single mode if naive. greedy(false), epsilon(epsilon), metric(metric), baseCases(0), scores(0), treeNeedsReset(false) { // Update searchMode according to naive, singleMode and greedy flags. UpdateSearchMode(); if (epsilon < 0) throw std::invalid_argument("epsilon must be non-negative"); } // Construct the object. template class TreeType, template class DualTreeTraversalType, template class SingleTreeTraversalType> NeighborSearch::NeighborSearch(MatType&& referenceSetIn, const bool naive, const bool singleMode, const double epsilon, const MetricType metric) : referenceTree(naive ? NULL : BuildTree(std::move(referenceSetIn), oldFromNewReferences)), referenceSet(naive ? new MatType(std::move(referenceSetIn)) : &referenceTree->Dataset()), treeOwner(!naive), setOwner(naive), naive(naive), singleMode(!naive && singleMode), greedy(false), epsilon(epsilon), metric(metric), baseCases(0), scores(0), treeNeedsReset(false) { // Update searchMode according to naive, singleMode and greedy flags. UpdateSearchMode(); if (epsilon < 0) throw std::invalid_argument("epsilon must be non-negative"); } // Construct the object. template class TreeType, template class DualTreeTraversalType, template class SingleTreeTraversalType> NeighborSearch::NeighborSearch(Tree* referenceTree, const bool singleMode, const double epsilon, const MetricType metric) : referenceTree(referenceTree), referenceSet(&referenceTree->Dataset()), treeOwner(false), setOwner(false), naive(false), singleMode(singleMode), greedy(false), epsilon(epsilon), metric(metric), baseCases(0), scores(0), treeNeedsReset(false) { // Update searchMode according to naive, singleMode and greedy flags. UpdateSearchMode(); if (epsilon < 0) throw std::invalid_argument("epsilon must be non-negative"); } // Construct the object without a reference dataset. template class TreeType, template class DualTreeTraversalType, template class SingleTreeTraversalType> NeighborSearch::NeighborSearch(const bool naive, const bool singleMode, const double epsilon, const MetricType metric) : referenceTree(NULL), referenceSet(new MatType()), // Empty matrix. treeOwner(false), setOwner(true), naive(naive), singleMode(singleMode), greedy(false), epsilon(epsilon), metric(metric), baseCases(0), scores(0), treeNeedsReset(false) { // Update searchMode according to naive, singleMode and greedy flags. UpdateSearchMode(); if (epsilon < 0) throw std::invalid_argument("epsilon must be non-negative"); // Build the tree on the empty dataset, if necessary. if (!naive) { referenceTree = BuildTree(*referenceSet, oldFromNewReferences); treeOwner = true; } } // Copy constructor. template class TreeType, template class DualTreeTraversalType, template class SingleTreeTraversalType> NeighborSearch:: NeighborSearch(const NeighborSearch& other) : oldFromNewReferences(other.oldFromNewReferences), referenceTree(other.naive ? NULL : new Tree(*other.referenceTree)), referenceSet(other.naive ? new MatType(*other.referenceSet) : &referenceTree->Dataset()), treeOwner(!other.naive), setOwner(other.naive), naive(other.naive), singleMode(other.singleMode), epsilon(other.epsilon), metric(other.metric), baseCases(other.baseCases), scores(other.scores), treeNeedsReset(other.treeNeedsReset) { // Nothing to do. } // Move constructor. template class TreeType, template class DualTreeTraversalType, template class SingleTreeTraversalType> NeighborSearch:: NeighborSearch(NeighborSearch&& other) : oldFromNewReferences(std::move(other.oldFromNewReferences)), referenceTree(other.referenceTree), referenceSet(other.referenceSet), treeOwner(other.treeOwner), setOwner(other.setOwner), naive(other.naive), singleMode(other.singleMode), epsilon(other.epsilon), metric(std::move(other.metric)), baseCases(other.baseCases), scores(other.scores), treeNeedsReset(other.treeNeedsReset) { other.referenceTree = NULL; other.referenceSet = new arma::mat(); // Empty dataset. other.treeOwner = false; other.setOwner = true; other.baseCases = 0; other.scores = 0; } // Clean memory. template class TreeType, template class DualTreeTraversalType, template class SingleTreeTraversalType> NeighborSearch::~NeighborSearch() { if (treeOwner && referenceTree) delete referenceTree; if (setOwner && referenceSet) delete referenceSet; } template class TreeType, template class DualTreeTraversalType, template class SingleTreeTraversalType> void NeighborSearch::Train( const MatType& referenceSet) { // Update searchMode. UpdateSearchMode(); // Clean up the old tree, if we built one. if (treeOwner && referenceTree) { oldFromNewReferences.clear(); delete referenceTree; } // We may need to rebuild the tree. if (searchMode != NAIVE_MODE) { referenceTree = BuildTree(referenceSet, oldFromNewReferences); treeOwner = true; } else { treeOwner = false; } // Delete the old reference set, if we owned it. if (setOwner && this->referenceSet) delete this->referenceSet; if (searchMode != NAIVE_MODE) this->referenceSet = &referenceTree->Dataset(); else this->referenceSet = &referenceSet; setOwner = false; // We don't own the set in either case. } template class TreeType, template class DualTreeTraversalType, template class SingleTreeTraversalType> void NeighborSearch::Train(MatType&& referenceSetIn) { // Update searchMode. UpdateSearchMode(); // Clean up the old tree, if we built one. if (treeOwner && referenceTree) { oldFromNewReferences.clear(); delete referenceTree; } // We may need to rebuild the tree. if (searchMode != NAIVE_MODE) { referenceTree = BuildTree(std::move(referenceSetIn), oldFromNewReferences); treeOwner = true; } else { treeOwner = false; } // Delete the old reference set, if we owned it. if (setOwner && referenceSet) delete referenceSet; if (searchMode != NAIVE_MODE) { referenceSet = &referenceTree->Dataset(); setOwner = false; } else { referenceSet = new MatType(std::move(referenceSetIn)); setOwner = true; } } template class TreeType, template class DualTreeTraversalType, template class SingleTreeTraversalType> void NeighborSearch::Train(Tree* referenceTree) { // Update searchMode. UpdateSearchMode(); if (searchMode == NAIVE_MODE) throw std::invalid_argument("cannot train on given reference tree when " "naive search (without trees) is desired"); if (treeOwner && this->referenceTree) { oldFromNewReferences.clear(); delete this->referenceTree; } if (setOwner && referenceSet) delete this->referenceSet; this->referenceTree = referenceTree; this->referenceSet = &referenceTree->Dataset(); treeOwner = false; setOwner = false; } template class TreeType, template class DualTreeTraversalType, template class SingleTreeTraversalType> void NeighborSearch::Train( const Tree& referenceTree) { // Update searchMode according to naive, singleMode and greedy flags. UpdateSearchMode(); if (naive) throw std::invalid_argument("cannot train on given reference tree when " "naive search (without trees) is desired"); if (treeOwner && this->referenceTree) { oldFromNewReferences.clear(); delete this->referenceTree; } if (setOwner && referenceSet) delete this->referenceSet; this->referenceTree = new Tree(referenceTree); this->referenceSet = &this->referenceTree->Dataset(); treeOwner = true; setOwner = false; } template class TreeType, template class DualTreeTraversalType, template class SingleTreeTraversalType> void NeighborSearch::Train(Tree&& referenceTree) { // Update searchMode according to naive, singleMode and greedy flags. UpdateSearchMode(); if (naive) throw std::invalid_argument("cannot train on given reference tree when " "naive search (without trees) is desired"); if (treeOwner && this->referenceTree) { oldFromNewReferences.clear(); delete this->referenceTree; } if (setOwner && referenceSet) delete this->referenceSet; this->referenceTree = new Tree(std::move(referenceTree)); this->referenceSet = &this->referenceTree->Dataset(); treeOwner = true; setOwner = false; } /** * Computes the best neighbors and stores them in resultingNeighbors and * distances. */ template class TreeType, template class DualTreeTraversalType, template class SingleTreeTraversalType> void NeighborSearch::Search( const MatType& querySet, const size_t k, arma::Mat& neighbors, arma::mat& distances) { // Update searchMode. UpdateSearchMode(); if (k > referenceSet->n_cols) { std::stringstream ss; ss << "requested value of k (" << k << ") is greater than the number of " << "points in the reference set (" << referenceSet->n_cols << ")"; throw std::invalid_argument(ss.str()); } Timer::Start("computing_neighbors"); baseCases = 0; scores = 0; // This will hold mappings for query points, if necessary. std::vector oldFromNewQueries; // If we have built the trees ourselves, then we will have to map all the // indices back to their original indices when this computation is finished. // To avoid an extra copy, we will store the neighbors and distances in a // separate matrix. arma::Mat* neighborPtr = &neighbors; arma::mat* distancePtr = &distances; // Mapping is only necessary if the tree rearranges points. if (tree::TreeTraits::RearrangesDataset) { if (searchMode == DUAL_TREE_MODE) { distancePtr = new arma::mat; // Query indices need to be mapped. neighborPtr = new arma::Mat; } else if (!oldFromNewReferences.empty()) neighborPtr = new arma::Mat; // Reference indices need mapping. } // Set the size of the neighbor and distance matrices. neighborPtr->set_size(k, querySet.n_cols); distancePtr->set_size(k, querySet.n_cols); typedef NeighborSearchRules RuleType; switch(searchMode) { case NAIVE_MODE: { // Create the helper object for the tree traversal. RuleType rules(*referenceSet, querySet, k, metric, epsilon); // The naive brute-force traversal. for (size_t i = 0; i < querySet.n_cols; ++i) for (size_t j = 0; j < referenceSet->n_cols; ++j) rules.BaseCase(i, j); baseCases += querySet.n_cols * referenceSet->n_cols; rules.GetResults(*neighborPtr, *distancePtr); break; } case SINGLE_TREE_MODE: { // Create the helper object for the tree traversal. RuleType rules(*referenceSet, querySet, k, metric, epsilon); // Create the traverser. SingleTreeTraversalType traverser(rules); // Now have it traverse for each point. for (size_t i = 0; i < querySet.n_cols; ++i) traverser.Traverse(i, *referenceTree); scores += rules.Scores(); baseCases += rules.BaseCases(); Log::Info << rules.Scores() << " node combinations were scored." << std::endl; Log::Info << rules.BaseCases() << " base cases were calculated." << std::endl; rules.GetResults(*neighborPtr, *distancePtr); break; } case DUAL_TREE_MODE: { // Build the query tree. Timer::Stop("computing_neighbors"); Timer::Start("tree_building"); Tree* queryTree = BuildTree(querySet, oldFromNewQueries); Timer::Stop("tree_building"); Timer::Start("computing_neighbors"); // Create the helper object for the tree traversal. RuleType rules(*referenceSet, queryTree->Dataset(), k, metric, epsilon); // Create the traverser. DualTreeTraversalType traverser(rules); traverser.Traverse(*queryTree, *referenceTree); scores += rules.Scores(); baseCases += rules.BaseCases(); Log::Info << rules.Scores() << " node combinations were scored." << std::endl; Log::Info << rules.BaseCases() << " base cases were calculated." << std::endl; rules.GetResults(*neighborPtr, *distancePtr); delete queryTree; break; } case GREEDY_SINGLE_TREE_MODE: { // Create the helper object for the tree traversal. RuleType rules(*referenceSet, querySet, k, metric); // Create the traverser. tree::GreedySingleTreeTraverser traverser(rules); // Now have it traverse for each point. for (size_t i = 0; i < querySet.n_cols; ++i) traverser.Traverse(i, *referenceTree); scores += rules.Scores(); baseCases += rules.BaseCases(); Log::Info << rules.Scores() << " node combinations were scored." << std::endl; Log::Info << rules.BaseCases() << " base cases were calculated." << std::endl; rules.GetResults(*neighborPtr, *distancePtr); break; } } Timer::Stop("computing_neighbors"); // Map points back to original indices, if necessary. if (tree::TreeTraits::RearrangesDataset) { if (searchMode == DUAL_TREE_MODE && !oldFromNewReferences.empty()) { // We must map both query and reference indices. neighbors.set_size(k, querySet.n_cols); distances.set_size(k, querySet.n_cols); for (size_t i = 0; i < distances.n_cols; i++) { // Map distances (copy a column). distances.col(oldFromNewQueries[i]) = distancePtr->col(i); // Map indices of neighbors. for (size_t j = 0; j < distances.n_rows; j++) { neighbors(j, oldFromNewQueries[i]) = oldFromNewReferences[(*neighborPtr)(j, i)]; } } // Finished with temporary matrices. delete neighborPtr; delete distancePtr; } else if (searchMode == DUAL_TREE_MODE) { // We must map query indices only. neighbors.set_size(k, querySet.n_cols); distances.set_size(k, querySet.n_cols); for (size_t i = 0; i < distances.n_cols; ++i) { // Map distances (copy a column). const size_t queryMapping = oldFromNewQueries[i]; distances.col(queryMapping) = distancePtr->col(i); neighbors.col(queryMapping) = neighborPtr->col(i); } // Finished with temporary matrices. delete neighborPtr; delete distancePtr; } else if (!oldFromNewReferences.empty()) { // We must map reference indices only. neighbors.set_size(k, querySet.n_cols); // Map indices of neighbors. for (size_t i = 0; i < neighbors.n_cols; i++) for (size_t j = 0; j < neighbors.n_rows; j++) neighbors(j, i) = oldFromNewReferences[(*neighborPtr)(j, i)]; // Finished with temporary matrix. delete neighborPtr; } } } // Search() template class TreeType, template class DualTreeTraversalType, template class SingleTreeTraversalType> void NeighborSearch::Search( Tree* queryTree, const size_t k, arma::Mat& neighbors, arma::mat& distances, bool sameSet) { Search(*queryTree, k, neighbors, distances, sameSet); } template class TreeType, template class DualTreeTraversalType, template class SingleTreeTraversalType> void NeighborSearch::Search( Tree& queryTree, const size_t k, arma::Mat& neighbors, arma::mat& distances, bool sameSet) { // Update searchMode. UpdateSearchMode(); if (k > referenceSet->n_cols) { std::stringstream ss; ss << "requested value of k (" << k << ") is greater than the number of " << "points in the reference set (" << referenceSet->n_cols << ")"; throw std::invalid_argument(ss.str()); } // Make sure we are in dual-tree mode. if (searchMode != DUAL_TREE_MODE) throw std::invalid_argument("cannot call NeighborSearch::Search() with a " "query tree when naive or singleMode are set to true"); Timer::Start("computing_neighbors"); baseCases = 0; scores = 0; // Get a reference to the query set. const MatType& querySet = queryTree.Dataset(); // We won't need to map query indices, but will we need to map distances? arma::Mat* neighborPtr = &neighbors; if (!oldFromNewReferences.empty() && tree::TreeTraits::RearrangesDataset) neighborPtr = new arma::Mat; neighborPtr->set_size(k, querySet.n_cols); distances.set_size(k, querySet.n_cols); // Create the helper object for the traversal. typedef NeighborSearchRules RuleType; RuleType rules(*referenceSet, querySet, k, metric, epsilon, sameSet); // Create the traverser. DualTreeTraversalType traverser(rules); traverser.Traverse(queryTree, *referenceTree); scores += rules.Scores(); baseCases += rules.BaseCases(); Log::Info << rules.Scores() << " node combinations were scored." << std::endl; Log::Info << rules.BaseCases() << " base cases were calculated." << std::endl; rules.GetResults(*neighborPtr, distances); Log::Info << rules.Scores() << " node combinations were scored.\n"; Log::Info << rules.BaseCases() << " base cases were calculated.\n"; Timer::Stop("computing_neighbors"); // Do we need to map indices? if (!oldFromNewReferences.empty() && tree::TreeTraits::RearrangesDataset) { // We must map reference indices only. neighbors.set_size(k, querySet.n_cols); // Map indices of neighbors. for (size_t i = 0; i < neighbors.n_cols; i++) for (size_t j = 0; j < neighbors.n_rows; j++) neighbors(j, i) = oldFromNewReferences[(*neighborPtr)(j, i)]; // Finished with temporary matrix. delete neighborPtr; } } template class TreeType, template class DualTreeTraversalType, template class SingleTreeTraversalType> void NeighborSearch::Search( const size_t k, arma::Mat& neighbors, arma::mat& distances) { // Update searchMode. UpdateSearchMode(); if (k > referenceSet->n_cols) { std::stringstream ss; ss << "requested value of k (" << k << ") is greater than the number of " << "points in the reference set (" << referenceSet->n_cols << ")"; throw std::invalid_argument(ss.str()); } Timer::Start("computing_neighbors"); baseCases = 0; scores = 0; arma::Mat* neighborPtr = &neighbors; arma::mat* distancePtr = &distances; if (!oldFromNewReferences.empty() && tree::TreeTraits::RearrangesDataset) { // We will always need to rearrange in this case. distancePtr = new arma::mat; neighborPtr = new arma::Mat; } // Initialize results. neighborPtr->set_size(k, referenceSet->n_cols); distancePtr->set_size(k, referenceSet->n_cols); // Create the helper object for the traversal. typedef NeighborSearchRules RuleType; RuleType rules(*referenceSet, *referenceSet, k, metric, epsilon, true /* don't return the same point as nearest neighbor */); switch (searchMode) { case NAIVE_MODE: { // The naive brute-force solution. for (size_t i = 0; i < referenceSet->n_cols; ++i) for (size_t j = 0; j < referenceSet->n_cols; ++j) rules.BaseCase(i, j); baseCases += referenceSet->n_cols * referenceSet->n_cols; break; } case SINGLE_TREE_MODE: { // Create the traverser. SingleTreeTraversalType traverser(rules); // Now have it traverse for each point. for (size_t i = 0; i < referenceSet->n_cols; ++i) traverser.Traverse(i, *referenceTree); scores += rules.Scores(); baseCases += rules.BaseCases(); Log::Info << rules.Scores() << " node combinations were scored." << std::endl; Log::Info << rules.BaseCases() << " base cases were calculated." << std::endl; break; } case DUAL_TREE_MODE: { // The dual-tree monochromatic search case may require resetting the // bounds in the tree. if (treeNeedsReset) { std::stack nodes; nodes.push(referenceTree); while (!nodes.empty()) { Tree* node = nodes.top(); nodes.pop(); // Reset bounds of this node. node->Stat().Reset(); // Then add the children. for (size_t i = 0; i < node->NumChildren(); ++i) nodes.push(&node->Child(i)); } } // Create the traverser. DualTreeTraversalType traverser(rules); if (tree::IsSpillTree::value) { // For Dual Tree Search on SpillTree, the queryTree must be built with // non overlapping (tau = 0). Tree queryTree(*referenceSet); traverser.Traverse(queryTree, *referenceTree); } else { traverser.Traverse(*referenceTree, *referenceTree); // Next time we perform this search, we'll need to reset the tree. treeNeedsReset = true; } scores += rules.Scores(); baseCases += rules.BaseCases(); Log::Info << rules.Scores() << " node combinations were scored." << std::endl; Log::Info << rules.BaseCases() << " base cases were calculated." << std::endl; // Next time we perform this search, we'll need to reset the tree. treeNeedsReset = true; break; } case GREEDY_SINGLE_TREE_MODE: { // Create the traverser. tree::GreedySingleTreeTraverser traverser(rules); // Now have it traverse for each point. for (size_t i = 0; i < referenceSet->n_cols; ++i) traverser.Traverse(i, *referenceTree); scores += rules.Scores(); baseCases += rules.BaseCases(); Log::Info << rules.Scores() << " node combinations were scored." << std::endl; Log::Info << rules.BaseCases() << " base cases were calculated." << std::endl; break; } } rules.GetResults(*neighborPtr, *distancePtr); Timer::Stop("computing_neighbors"); // Do we need to map the reference indices? if (!oldFromNewReferences.empty() && tree::TreeTraits::RearrangesDataset) { neighbors.set_size(k, referenceSet->n_cols); distances.set_size(k, referenceSet->n_cols); for (size_t i = 0; i < distances.n_cols; ++i) { // Map distances (copy a column). const size_t refMapping = oldFromNewReferences[i]; distances.col(refMapping) = distancePtr->col(i); // Map each neighbor's index. for (size_t j = 0; j < distances.n_rows; ++j) neighbors(j, refMapping) = oldFromNewReferences[(*neighborPtr)(j, i)]; } // Finished with temporary matrices. delete neighborPtr; delete distancePtr; } } //! Calculate the average relative error. template class TreeType, template class DualTreeTraversalType, template class SingleTreeTraversalType> double NeighborSearch::EffectiveError( arma::mat& foundDistances, arma::mat& realDistances) { if (foundDistances.n_rows != realDistances.n_rows || foundDistances.n_cols != realDistances.n_cols) throw std::invalid_argument("matrices provided must have equal size"); double effectiveError = 0; size_t numCases = 0; for (size_t i = 0; i < foundDistances.n_elem; i++) { if (realDistances(i) != 0 && foundDistances(i) != SortPolicy::WorstDistance()) { effectiveError += fabs(foundDistances(i) - realDistances(i)) / realDistances(i); numCases++; } } if (numCases) effectiveError /= numCases; return effectiveError; } //! Calculate the recall. template class TreeType, template class DualTreeTraversalType, template class SingleTreeTraversalType> double NeighborSearch::Recall( arma::Mat& foundNeighbors, arma::Mat& realNeighbors) { if (foundNeighbors.n_rows != realNeighbors.n_rows || foundNeighbors.n_cols != realNeighbors.n_cols) throw std::invalid_argument("matrices provided must have equal size"); size_t found = 0; for (size_t col = 0; col < foundNeighbors.n_cols; ++col) for (size_t row = 0; row < foundNeighbors.n_rows; ++row) for (size_t nei = 0; nei < realNeighbors.n_rows; ++nei) if (foundNeighbors(row, col) == realNeighbors(nei, col)) { found++; break; } return ((double) found) / realNeighbors.n_elem; } //! Serialize the NeighborSearch model. template class TreeType, template class DualTreeTraversalType, template class SingleTreeTraversalType> template void NeighborSearch::Serialize( Archive& ar, const unsigned int /* version */) { using data::CreateNVP; // Update searchMode. UpdateSearchMode(); // Serialize preferences for search. ar & CreateNVP(searchMode, "searchMode"); ar & CreateNVP(naive, "naive"); ar & CreateNVP(singleMode, "singleMode"); ar & CreateNVP(treeNeedsReset, "treeNeedsReset"); // If we are doing naive search, we serialize the dataset. Otherwise we // serialize the tree. if (searchMode == NAIVE_MODE) { // Delete the current reference set, if necessary and if we are loading. if (Archive::is_loading::value) { if (setOwner && referenceSet) delete referenceSet; setOwner = true; // We will own the reference set when we load it. } ar & CreateNVP(referenceSet, "referenceSet"); ar & CreateNVP(metric, "metric"); // If we are loading, set the tree to NULL and clean up memory if necessary. if (Archive::is_loading::value) { if (treeOwner && referenceTree) delete referenceTree; referenceTree = NULL; oldFromNewReferences.clear(); treeOwner = false; } } else { // Delete the current reference tree, if necessary and if we are loading. if (Archive::is_loading::value) { if (treeOwner && referenceTree) delete referenceTree; // After we load the tree, we will own it. treeOwner = true; } ar & CreateNVP(referenceTree, "referenceTree"); ar & CreateNVP(oldFromNewReferences, "oldFromNewReferences"); // If we are loading, set the dataset accordingly and clean up memory if // necessary. if (Archive::is_loading::value) { if (setOwner && referenceSet) delete referenceSet; referenceSet = &referenceTree->Dataset(); metric = referenceTree->Metric(); // Get the metric from the tree. setOwner = false; } } // Reset base cases and scores. if (Archive::is_loading::value) { baseCases = 0; scores = 0; } } //! Updates naive, singleMode and greedy flags according to searchMode. This is //! only necessary until the modifiers Naive(), SingleMode() and Greedy() are //! removed in mlpack 3.0.0. template class TreeType, template class DualTreeTraversalType, template class SingleTreeTraversalType> void NeighborSearch::UpdateSearchModeFlags() { switch (searchMode) { case NAIVE_MODE: naive = true; singleMode = false; greedy = false; break; case SINGLE_TREE_MODE: naive = false; singleMode = true; greedy = false; break; case DUAL_TREE_MODE: naive = false; singleMode = false; greedy = false; break; case GREEDY_SINGLE_TREE_MODE: naive = false; singleMode = true; greedy = true; break; } } //! Updates searchMode to be according to naive, singleMode and greedy booleans. //! This is only necessary until the modifiers Naive(), SingleMode() and //! Greedy() are removed in mlpack 3.0.0. template class TreeType, template class DualTreeTraversalType, template class SingleTreeTraversalType> void NeighborSearch::UpdateSearchMode() { if (naive) searchMode = NAIVE_MODE; else if (singleMode && greedy) searchMode = GREEDY_SINGLE_TREE_MODE; else if (singleMode) searchMode = SINGLE_TREE_MODE; else searchMode = DUAL_TREE_MODE; } } // namespace neighbor } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/methods/neighbor_search/neighbor_search_rules.hpp000066400000000000000000000207351315013601400266600ustar00rootroot00000000000000/** * @file neighbor_search_rules.hpp * @author Ryan Curtin * * Defines the pruning rules and base case rules necessary to perform a * tree-based search (with an arbitrary tree) for the NeighborSearch class. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_NEIGHBOR_SEARCH_NEIGHBOR_SEARCH_RULES_HPP #define MLPACK_METHODS_NEIGHBOR_SEARCH_NEIGHBOR_SEARCH_RULES_HPP #include namespace mlpack { namespace neighbor { /** * The NeighborSearchRules class is a template helper class used by * NeighborSearch class when performing distance-based neighbor searches. For * each point in the query dataset, it keeps track of the k neighbors in the * reference dataset which have the 'best' distance according to a given sorting * policy. * * @tparam SortPolicy The sort policy for distances. * @tparam MetricType The metric to use for computation. * @tparam TreeType The tree type to use; must adhere to the TreeType API. */ template class NeighborSearchRules { public: /** * Construct the NeighborSearchRules object. This is usually done from within * the NeighborSearch class at search time. * * @param referenceSet Set of reference data. * @param querySet Set of query data. * @param k Number of neighbors to search for. * @param metric Instantiated metric. * @param epsilon Relative approximate error. * @param sameSet If true, the query and reference set are taken to be the * same, and a query point will not return itself in the results. */ NeighborSearchRules(const typename TreeType::Mat& referenceSet, const typename TreeType::Mat& querySet, const size_t k, MetricType& metric, const double epsilon = 0, const bool sameSet = false); /** * Store the list of candidates for each query point in the given matrices. * * @param neighbors Matrix storing lists of neighbors for each query point. * @param distances Matrix storing distances of neighbors for each query * point. */ void GetResults(arma::Mat& neighbors, arma::mat& distances); /** * Get the distance from the query point to the reference point. * This will update the list of candidates with the new point if appropriate * and will track the number of base cases (number of points evaluated). * * @param queryIndex Index of query point. * @param referenceIndex Index of reference point. */ double BaseCase(const size_t queryIndex, const size_t referenceIndex); /** * Get the score for recursion order. A low score indicates priority for * recursion, while DBL_MAX indicates that the node should not be recursed * into at all (it should be pruned). * * @param queryIndex Index of query point. * @param referenceNode Candidate node to be recursed into. */ double Score(const size_t queryIndex, TreeType& referenceNode); /** * Get the child node with the best score. * * @param queryIndex Index of query point. * @param referenceNode Candidate node to be recursed into. */ size_t GetBestChild(const size_t queryIndex, TreeType& referenceNode); /** * Get the child node with the best score. * * @param queryNode Node to be considered. * @param referenceNode Candidate node to be recursed into. */ size_t GetBestChild(const TreeType& queryNode, TreeType& referenceNode); /** * Re-evaluate the score for recursion order. A low score indicates priority * for recursion, while DBL_MAX indicates that the node should not be recursed * into at all (it should be pruned). This is used when the score has already * been calculated, but another recursion may have modified the bounds for * pruning. So the old score is checked against the new pruning bound. * * @param queryIndex Index of query point. * @param referenceNode Candidate node to be recursed into. * @param oldScore Old score produced by Score() (or Rescore()). */ double Rescore(const size_t queryIndex, TreeType& referenceNode, const double oldScore) const; /** * Get the score for recursion order. A low score indicates priority for * recursionm while DBL_MAX indicates that the node should not be recursed * into at all (it should be pruned). * * @param queryNode Candidate query node to recurse into. * @param referenceNode Candidate reference node to recurse into. */ double Score(TreeType& queryNode, TreeType& referenceNode); /** * Re-evaluate the score for recursion order. A low score indicates priority * for recursion, while DBL_MAX indicates that the node should not be recursed * into at all (it should be pruned). This is used when the score has already * been calculated, but another recursion may have modified the bounds for * pruning. So the old score is checked against the new pruning bound. * * @param queryNode Candidate query node to recurse into. * @param referenceNode Candidate reference node to recurse into. * @param oldScore Old score produced by Socre() (or Rescore()). */ double Rescore(TreeType& queryNode, TreeType& referenceNode, const double oldScore) const; //! Get the number of base cases that have been performed. size_t BaseCases() const { return baseCases; } //! Modify the number of base cases that have been performed. size_t& BaseCases() { return baseCases; } //! Get the number of scores that have been performed. size_t Scores() const { return scores; } //! Modify the number of scores that have been performed. size_t& Scores() { return scores; } //! Convenience typedef. typedef typename tree::TraversalInfo TraversalInfoType; //! Get the traversal info. const TraversalInfoType& TraversalInfo() const { return traversalInfo; } //! Modify the traversal info. TraversalInfoType& TraversalInfo() { return traversalInfo; } protected: //! The reference set. const typename TreeType::Mat& referenceSet; //! The query set. const typename TreeType::Mat& querySet; //! Candidate represents a possible candidate neighbor (distance, index). typedef std::pair Candidate; //! Compare two candidates based on the distance. struct CandidateCmp { bool operator()(const Candidate& c1, const Candidate& c2) { return !SortPolicy::IsBetter(c2.first, c1.first); }; }; //! Use a priority queue to represent the list of candidate neighbors. typedef std::priority_queue, CandidateCmp> CandidateList; //! Set of candidate neighbors for each point. std::vector candidates; //! Number of neighbors to search for. const size_t k; //! The instantiated metric. MetricType& metric; //! Denotes whether or not the reference and query sets are the same. bool sameSet; //! Relative error to be considered in approximate search. const double epsilon; //! The last query point BaseCase() was called with. size_t lastQueryIndex; //! The last reference point BaseCase() was called with. size_t lastReferenceIndex; //! The last base case result. double lastBaseCase; //! The number of base cases that have been performed. size_t baseCases; //! The number of scores that have been performed. size_t scores; //! Traversal info for the parent combination; this is updated by the //! traversal before each call to Score(). TraversalInfoType traversalInfo; /** * Recalculate the bound for a given query node. */ double CalculateBound(TreeType& queryNode) const; /** * Helper function to insert a point into the list of candidate points. * * @param queryIndex Index of point whose neighbors we are inserting into. * @param neighbor Index of reference point which is being inserted. * @param distance Distance from query point to reference point. */ void InsertNeighbor(const size_t queryIndex, const size_t neighbor, const double distance); }; } // namespace neighbor } // namespace mlpack // Include implementation. #include "neighbor_search_rules_impl.hpp" #endif // MLPACK_METHODS_NEIGHBOR_SEARCH_NEIGHBOR_SEARCH_RULES_HPP mlpack-2.2.5/src/mlpack/methods/neighbor_search/neighbor_search_rules_impl.hpp000066400000000000000000000457741315013601400277130ustar00rootroot00000000000000/** * @file neighbor_search_rules_impl.hpp * @author Ryan Curtin * * Implementation of NeighborSearchRules. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_NEIGHBOR_SEARCH_NEAREST_NEIGHBOR_RULES_IMPL_HPP #define MLPACK_METHODS_NEIGHBOR_SEARCH_NEAREST_NEIGHBOR_RULES_IMPL_HPP // In case it hasn't been included yet. #include "neighbor_search_rules.hpp" #include namespace mlpack { namespace neighbor { template NeighborSearchRules::NeighborSearchRules( const typename TreeType::Mat& referenceSet, const typename TreeType::Mat& querySet, const size_t k, MetricType& metric, const double epsilon, const bool sameSet) : referenceSet(referenceSet), querySet(querySet), k(k), metric(metric), sameSet(sameSet), epsilon(epsilon), lastQueryIndex(querySet.n_cols), lastReferenceIndex(referenceSet.n_cols), baseCases(0), scores(0) { // We must set the traversal info last query and reference node pointers to // something that is both invalid (i.e. not a tree node) and not NULL. We'll // use the this pointer. traversalInfo.LastQueryNode() = (TreeType*) this; traversalInfo.LastReferenceNode() = (TreeType*) this; // Let's build the list of candidate neighbors for each query point. // It will be initialized with k candidates: (WorstDistance, size_t() - 1) // The list of candidates will be updated when visiting new points with the // BaseCase() method. const Candidate def = std::make_pair(SortPolicy::WorstDistance(), size_t() - 1); std::vector vect(k, def); CandidateList pqueue(CandidateCmp(), std::move(vect)); candidates.reserve(querySet.n_cols); for (size_t i = 0; i < querySet.n_cols; i++) candidates.push_back(pqueue); } template void NeighborSearchRules::GetResults( arma::Mat& neighbors, arma::mat& distances) { neighbors.set_size(k, querySet.n_cols); distances.set_size(k, querySet.n_cols); for (size_t i = 0; i < querySet.n_cols; i++) { CandidateList& pqueue = candidates[i]; for (size_t j = 1; j <= k; j++) { neighbors(k - j, i) = pqueue.top().second; distances(k - j, i) = pqueue.top().first; pqueue.pop(); } } }; template inline force_inline // Absolutely MUST be inline so optimizations can happen. double NeighborSearchRules:: BaseCase(const size_t queryIndex, const size_t referenceIndex) { // If the datasets are the same, then this search is only using one dataset // and we should not return identical points. if (sameSet && (queryIndex == referenceIndex)) return 0.0; // If we have already performed this base case, then do not perform it again. if ((lastQueryIndex == queryIndex) && (lastReferenceIndex == referenceIndex)) return lastBaseCase; double distance = metric.Evaluate(querySet.col(queryIndex), referenceSet.col(referenceIndex)); ++baseCases; InsertNeighbor(queryIndex, referenceIndex, distance); // Cache this information for the next time BaseCase() is called. lastQueryIndex = queryIndex; lastReferenceIndex = referenceIndex; lastBaseCase = distance; return distance; } template inline double NeighborSearchRules::Score( const size_t queryIndex, TreeType& referenceNode) { ++scores; // Count number of Score() calls. double distance; if (tree::TreeTraits::FirstPointIsCentroid) { // The first point in the tree is the centroid. So we can then calculate // the base case between that and the query point. double baseCase = -1.0; if (tree::TreeTraits::HasSelfChildren) { // If the parent node is the same, then we have already calculated the // base case. if ((referenceNode.Parent() != NULL) && (referenceNode.Point(0) == referenceNode.Parent()->Point(0))) baseCase = referenceNode.Parent()->Stat().LastDistance(); else baseCase = BaseCase(queryIndex, referenceNode.Point(0)); // Save this evaluation. referenceNode.Stat().LastDistance() = baseCase; } distance = SortPolicy::CombineBest(baseCase, referenceNode.FurthestDescendantDistance()); } else { distance = SortPolicy::BestPointToNodeDistance(querySet.col(queryIndex), &referenceNode); } // Compare against the best k'th distance for this query point so far. double bestDistance = candidates[queryIndex].top().first; bestDistance = SortPolicy::Relax(bestDistance, epsilon); return (SortPolicy::IsBetter(distance, bestDistance)) ? SortPolicy::ConvertToScore(distance) : DBL_MAX; } template inline size_t NeighborSearchRules:: GetBestChild(const size_t queryIndex, TreeType& referenceNode) { ++scores; return SortPolicy::GetBestChild(querySet.col(queryIndex), referenceNode); } template inline size_t NeighborSearchRules:: GetBestChild(const TreeType& queryNode, TreeType& referenceNode) { ++scores; return SortPolicy::GetBestChild(queryNode, referenceNode); } template inline double NeighborSearchRules::Rescore( const size_t queryIndex, TreeType& /* referenceNode */, const double oldScore) const { // If we are already pruning, still prune. if (oldScore == DBL_MAX) return oldScore; const double distance = SortPolicy::ConvertToDistance(oldScore); // Just check the score again against the distances. double bestDistance = candidates[queryIndex].top().first; bestDistance = SortPolicy::Relax(bestDistance, epsilon); return (SortPolicy::IsBetter(distance, bestDistance)) ? oldScore : DBL_MAX; } template inline double NeighborSearchRules::Score( TreeType& queryNode, TreeType& referenceNode) { ++scores; // Count number of Score() calls. // Update our bound. const double bestDistance = CalculateBound(queryNode); // Use the traversal info to see if a parent-child or parent-parent prune is // possible. This is a looser bound than we could make, but it might be // sufficient. const double queryParentDist = queryNode.ParentDistance(); const double queryDescDist = queryNode.FurthestDescendantDistance(); const double refParentDist = referenceNode.ParentDistance(); const double refDescDist = referenceNode.FurthestDescendantDistance(); const double score = traversalInfo.LastScore(); double adjustedScore; // We want to set adjustedScore to be the distance between the centroid of the // last query node and last reference node. We will do this by adjusting the // last score. In some cases, we can just use the last base case. if (tree::TreeTraits::FirstPointIsCentroid) { adjustedScore = traversalInfo.LastBaseCase(); } else if (score == 0.0) // Nothing we can do here. { adjustedScore = 0.0; } else { // The last score is equal to the distance between the centroids minus the // radii of the query and reference bounds along the axis of the line // between the two centroids. In the best case, these radii are the // furthest descendant distances, but that is not always true. It would // take too long to calculate the exact radii, so we are forced to use // MinimumBoundDistance() as a lower-bound approximation. const double lastQueryDescDist = traversalInfo.LastQueryNode()->MinimumBoundDistance(); const double lastRefDescDist = traversalInfo.LastReferenceNode()->MinimumBoundDistance(); adjustedScore = SortPolicy::CombineWorst(score, lastQueryDescDist); adjustedScore = SortPolicy::CombineWorst(adjustedScore, lastRefDescDist); } // Assemble an adjusted score. For nearest neighbor search, this adjusted // score is a lower bound on MinDistance(queryNode, referenceNode) that is // assembled without actually calculating MinDistance(). For furthest // neighbor search, it is an upper bound on // MaxDistance(queryNode, referenceNode). If the traversalInfo isn't usable // then the node should not be pruned by this. if (traversalInfo.LastQueryNode() == queryNode.Parent()) { const double queryAdjust = queryParentDist + queryDescDist; adjustedScore = SortPolicy::CombineBest(adjustedScore, queryAdjust); } else if (traversalInfo.LastQueryNode() == &queryNode) { adjustedScore = SortPolicy::CombineBest(adjustedScore, queryDescDist); } else { // The last query node wasn't this query node or its parent. So we force // the adjustedScore to be such that this combination can't be pruned here, // because we don't really know anything about it. // It would be possible to modify this section to try and make a prune based // on the query descendant distance and the distance between the query node // and last traversal query node, but this case doesn't actually happen for // kd-trees or cover trees. adjustedScore = SortPolicy::BestDistance(); } if (traversalInfo.LastReferenceNode() == referenceNode.Parent()) { const double refAdjust = refParentDist + refDescDist; adjustedScore = SortPolicy::CombineBest(adjustedScore, refAdjust); } else if (traversalInfo.LastReferenceNode() == &referenceNode) { adjustedScore = SortPolicy::CombineBest(adjustedScore, refDescDist); } else { // The last reference node wasn't this reference node or its parent. So we // force the adjustedScore to be such that this combination can't be pruned // here, because we don't really know anything about it. // It would be possible to modify this section to try and make a prune based // on the reference descendant distance and the distance between the // reference node and last traversal reference node, but this case doesn't // actually happen for kd-trees or cover trees. adjustedScore = SortPolicy::BestDistance(); } // Can we prune? if (!SortPolicy::IsBetter(adjustedScore, bestDistance)) { if (!(tree::TreeTraits::FirstPointIsCentroid && score == 0.0)) { // There isn't any need to set the traversal information because no // descendant combinations will be visited, and those are the only // combinations that would depend on the traversal information. return DBL_MAX; } } double distance; if (tree::TreeTraits::FirstPointIsCentroid) { // The first point in the node is the centroid, so we can calculate the // distance between the two points using BaseCase() and then find the // bounds. This is potentially loose for non-ball bounds. double baseCase = -1.0; if (tree::TreeTraits::HasSelfChildren && (traversalInfo.LastQueryNode()->Point(0) == queryNode.Point(0)) && (traversalInfo.LastReferenceNode()->Point(0) == referenceNode.Point(0))) { // We already calculated it. baseCase = traversalInfo.LastBaseCase(); } else { baseCase = BaseCase(queryNode.Point(0), referenceNode.Point(0)); } distance = SortPolicy::CombineBest(baseCase, queryNode.FurthestDescendantDistance() + referenceNode.FurthestDescendantDistance()); lastQueryIndex = queryNode.Point(0); lastReferenceIndex = referenceNode.Point(0); lastBaseCase = baseCase; traversalInfo.LastBaseCase() = baseCase; } else { distance = SortPolicy::BestNodeToNodeDistance(&queryNode, &referenceNode); } if (SortPolicy::IsBetter(distance, bestDistance)) { // Set traversal information. traversalInfo.LastQueryNode() = &queryNode; traversalInfo.LastReferenceNode() = &referenceNode; traversalInfo.LastScore() = distance; return SortPolicy::ConvertToScore(distance); } else { // There isn't any need to set the traversal information because no // descendant combinations will be visited, and those are the only // combinations that would depend on the traversal information. return DBL_MAX; } } template inline double NeighborSearchRules::Rescore( TreeType& queryNode, TreeType& /* referenceNode */, const double oldScore) const { if (oldScore == DBL_MAX || oldScore == 0.0) return oldScore; const double distance = SortPolicy::ConvertToDistance(oldScore); // Update our bound. const double bestDistance = CalculateBound(queryNode); return (SortPolicy::IsBetter(distance, bestDistance)) ? oldScore : DBL_MAX; } // Calculate the bound for a given query node in its current state and update // it. template inline double NeighborSearchRules:: CalculateBound(TreeType& queryNode) const { // This is an adapted form of the B(N_q) function in the paper // ``Tree-Independent Dual-Tree Algorithms'' by Curtin et. al.; the goal is to // place a bound on the worst possible distance a point combination could have // to improve any of the current neighbor estimates. If the best possible // distance between two nodes is greater than this bound, then the node // combination can be pruned (see Score()). // There are a couple ways we can assemble a bound. For simplicity, this is // described for nearest neighbor search (SortPolicy = NearestNeighborSort), // but the code that is written is adapted for whichever SortPolicy. // First, we can consider the current worst neighbor candidate distance of any // descendant point. This is assembled with 'worstDistance' by looping // through the points held by the query node, and then by taking the cached // worst distance from any child nodes (Stat().FirstBound()). This // corresponds roughly to B_1(N_q) in the paper. // The other way of bounding is to use the triangle inequality. To do this, // we find the current best kth-neighbor candidate distance of any descendant // query point, and use the triangle inequality to place a bound on the // distance that candidate would have to any other descendant query point. // This corresponds roughly to B_2(N_q) in the paper, and is the bounding // style for cover trees. // Then, to assemble the final bound, since both bounds are valid, we simply // take the better of the two. double worstDistance = SortPolicy::BestDistance(); double bestDistance = SortPolicy::WorstDistance(); double bestPointDistance = SortPolicy::WorstDistance(); double auxDistance = SortPolicy::WorstDistance(); // Loop over points held in the node. for (size_t i = 0; i < queryNode.NumPoints(); ++i) { const double distance = candidates[queryNode.Point(i)].top().first; if (SortPolicy::IsBetter(worstDistance, distance)) worstDistance = distance; if (SortPolicy::IsBetter(distance, bestPointDistance)) bestPointDistance = distance; } auxDistance = bestPointDistance; // Loop over children of the node, and use their cached information to // assemble bounds. for (size_t i = 0; i < queryNode.NumChildren(); ++i) { const double firstBound = queryNode.Child(i).Stat().FirstBound(); const double auxBound = queryNode.Child(i).Stat().AuxBound(); if (SortPolicy::IsBetter(worstDistance, firstBound)) worstDistance = firstBound; if (SortPolicy::IsBetter(auxBound, auxDistance)) auxDistance = auxBound; } // Add triangle inequality adjustment to best distance. It is possible this // could be tighter for some certain types of trees. bestDistance = SortPolicy::CombineWorst(auxDistance, 2 * queryNode.FurthestDescendantDistance()); // Add triangle inequality adjustment to best distance of points in node. bestPointDistance = SortPolicy::CombineWorst(bestPointDistance, queryNode.FurthestPointDistance() + queryNode.FurthestDescendantDistance()); if (SortPolicy::IsBetter(bestPointDistance, bestDistance)) bestDistance = bestPointDistance; // At this point: // worstDistance holds the value of B_1(N_q). // bestDistance holds the value of B_2(N_q). // auxDistance holds the value of B_aux(N_q). // Now consider the parent bounds. if (queryNode.Parent() != NULL) { // The parent's worst distance bound implies that the bound for this node // must be at least as good. Thus, if the parent worst distance bound is // better, then take it. if (SortPolicy::IsBetter(queryNode.Parent()->Stat().FirstBound(), worstDistance)) worstDistance = queryNode.Parent()->Stat().FirstBound(); // The parent's best distance bound implies that the bound for this node // must be at least as good. Thus, if the parent best distance bound is // better, then take it. if (SortPolicy::IsBetter(queryNode.Parent()->Stat().SecondBound(), bestDistance)) bestDistance = queryNode.Parent()->Stat().SecondBound(); } // Could the existing bounds be better? if (SortPolicy::IsBetter(queryNode.Stat().FirstBound(), worstDistance)) worstDistance = queryNode.Stat().FirstBound(); if (SortPolicy::IsBetter(queryNode.Stat().SecondBound(), bestDistance)) bestDistance = queryNode.Stat().SecondBound(); // Cache bounds for later. queryNode.Stat().FirstBound() = worstDistance; queryNode.Stat().SecondBound() = bestDistance; queryNode.Stat().AuxBound() = auxDistance; worstDistance = SortPolicy::Relax(worstDistance, epsilon); // We can't consider B_2 for Spill Trees. if (tree::IsSpillTree::value) return worstDistance; if (SortPolicy::IsBetter(worstDistance, bestDistance)) return worstDistance; else return bestDistance; } /** * Helper function to insert a point into the list of candidate points. * * @param queryIndex Index of point whose neighbors we are inserting into. * @param neighbor Index of reference point which is being inserted. * @param distance Distance from query point to reference point. */ template inline void NeighborSearchRules:: InsertNeighbor( const size_t queryIndex, const size_t neighbor, const double distance) { CandidateList& pqueue = candidates[queryIndex]; Candidate c = std::make_pair(distance, neighbor); if (CandidateCmp()(c, pqueue.top())) { pqueue.pop(); pqueue.push(c); } } } // namespace neighbor } // namespace mlpack #endif // MLPACK_METHODS_NEIGHBOR_SEARCH_NEAREST_NEIGHBOR_RULES_IMPL_HPP mlpack-2.2.5/src/mlpack/methods/neighbor_search/neighbor_search_stat.hpp000066400000000000000000000067421315013601400265030ustar00rootroot00000000000000/** * @file neighbor_search.hpp * @author Ryan Curtin * * Defines the NeighborSearch class, which performs an abstract * nearest-neighbor-like query on two datasets. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_NEIGHBOR_SEARCH_NEIGHBOR_SEARCH_STAT_HPP #define MLPACK_METHODS_NEIGHBOR_SEARCH_NEIGHBOR_SEARCH_STAT_HPP #include namespace mlpack { namespace neighbor { /** * Extra data for each node in the tree. For neighbor searches, each node only * needs to store a bound on neighbor distances. */ template class NeighborSearchStat { private: //! The first bound on the node's neighbor distances (B_1). This represents //! the worst candidate distance of any descendants of this node. double firstBound; //! The second bound on the node's neighbor distances (B_2). This represents //! a bound on the worst distance of any descendants of this node assembled //! using the best descendant candidate distance modified by the furthest //! descendant distance. double secondBound; //! The aux bound on the node's neighbor distances (B_aux). This represents //! the best descendant candidate distance (used to calculate secondBound). double auxBound; //! The last distance evaluation. double lastDistance; public: /** * Initialize the statistic with the worst possible distance according to * our sorting policy. */ NeighborSearchStat() : firstBound(SortPolicy::WorstDistance()), secondBound(SortPolicy::WorstDistance()), auxBound(SortPolicy::WorstDistance()), lastDistance(0.0) { } /** * Initialization for a fully initialized node. In this case, we don't need * to worry about the node. */ template NeighborSearchStat(TreeType& /* node */) : firstBound(SortPolicy::WorstDistance()), secondBound(SortPolicy::WorstDistance()), auxBound(SortPolicy::WorstDistance()), lastDistance(0.0) { } /** * Reset statistic parameters to initial values. */ void Reset() { firstBound = SortPolicy::WorstDistance(); secondBound = SortPolicy::WorstDistance(); auxBound = SortPolicy::WorstDistance(); lastDistance = 0.0; } //! Get the first bound. double FirstBound() const { return firstBound; } //! Modify the first bound. double& FirstBound() { return firstBound; } //! Get the second bound. double SecondBound() const { return secondBound; } //! Modify the second bound. double& SecondBound() { return secondBound; } //! Get the aux bound. double AuxBound() const { return auxBound; } //! Modify the aux bound. double& AuxBound() { return auxBound; } //! Get the last distance calculation. double LastDistance() const { return lastDistance; } //! Modify the last distance calculation. double& LastDistance() { return lastDistance; } //! Serialize the statistic to/from an archive. template void Serialize(Archive& ar, const unsigned int /* version */) { using data::CreateNVP; ar & CreateNVP(firstBound, "firstBound"); ar & CreateNVP(secondBound, "secondBound"); ar & CreateNVP(auxBound, "auxBound"); ar & CreateNVP(lastDistance, "lastDistance"); } }; } // namespace neighbor } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/methods/neighbor_search/ns_model.hpp000066400000000000000000000317201315013601400241200ustar00rootroot00000000000000/** * @file ns_model.hpp * @author Ryan Curtin * * This is a model for nearest or furthest neighbor search. It is useful in * that it provides an easy way to serialize a model, abstracts away the * different types of trees, and also reflects the NeighborSearch API and * automatically directs to the right tree type. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_NEIGHBOR_SEARCH_NS_MODEL_HPP #define MLPACK_METHODS_NEIGHBOR_SEARCH_NS_MODEL_HPP #include #include #include #include #include #include #include "neighbor_search.hpp" namespace mlpack { namespace neighbor { /** * Alias template for euclidean neighbor search. */ template class TreeType> using NSType = NeighborSearch, arma::mat>::template DualTreeTraverser>; template struct NSModelName { static const std::string Name() { return "neighbor_search_model"; } }; template<> struct NSModelName { static const std::string Name() { return "nearest_neighbor_search_model"; } }; template<> struct NSModelName { static const std::string Name() { return "furthest_neighbor_search_model"; } }; /** * MonoSearchVisitor executes a monochromatic neighbor search on the given * NSType. We don't make any difference for different instantiations of NSType. */ class MonoSearchVisitor : public boost::static_visitor { private: //! Number of neighbors to search for. const size_t k; //! Result matrix for neighbors. arma::Mat& neighbors; //! Result matrix for distances. arma::mat& distances; public: //! Perform monochromatic nearest neighbor search. template void operator()(NSType* ns) const; //! Construct the MonoSearchVisitor object with the given parameters. MonoSearchVisitor(const size_t k, arma::Mat& neighbors, arma::mat& distances) : k(k), neighbors(neighbors), distances(distances) {}; }; /** * BiSearchVisitor executes a bichromatic neighbor search on the given NSType. * We use template specialization to differentiate those tree types that * accept leafSize as a parameter. In these cases, before doing neighbor search, * a query tree with proper leafSize is built from the querySet. */ template class BiSearchVisitor : public boost::static_visitor { private: //! The query set for the bichromatic search. const arma::mat& querySet; //! The number of neighbors to search for. const size_t k; //! The result matrix for neighbors. arma::Mat& neighbors; //! The result matrix for distances. arma::mat& distances; //! The number of points in a leaf (for BinarySpaceTrees). const size_t leafSize; //! Overlapping size (for spill trees). const double tau; //! Balance threshold (for spill trees). const double rho; //! Bichromatic neighbor search on the given NSType considering the leafSize. template void SearchLeaf(NSType* ns) const; public: //! Alias template necessary for visual c++ compiler. template class TreeType> using NSTypeT = NSType; //! Default Bichromatic neighbor search on the given NSType instance. template class TreeType> void operator()(NSTypeT* ns) const; //! Bichromatic neighbor search on the given NSType specialized for KDTrees. void operator()(NSTypeT* ns) const; //! Bichromatic neighbor search on the given NSType specialized for BallTrees. void operator()(NSTypeT* ns) const; //! Bichromatic neighbor search specialized for SPTrees. void operator()(SpillKNN* ns) const; //! Bichromatic neighbor search specialized for octrees. void operator()(NSTypeT* ns) const; //! Construct the BiSearchVisitor. BiSearchVisitor(const arma::mat& querySet, const size_t k, arma::Mat& neighbors, arma::mat& distances, const size_t leafSize, const double tau, const double rho); }; /** * TrainVisitor sets the reference set to a new reference set on the given * NSType. We use template specialization to differentiate those tree types that * accept leafSize as a parameter. In these cases, a reference tree with proper * leafSize is built from the referenceSet. */ template class TrainVisitor : public boost::static_visitor { private: //! The reference set to use for training. arma::mat&& referenceSet; //! The leaf size, used only by BinarySpaceTree. size_t leafSize; //! Overlapping size (for spill trees). const double tau; //! Balance threshold (for spill trees). const double rho; //! Train on the given NSType considering the leafSize. template void TrainLeaf(NSType* ns) const; public: //! Alias template necessary for visual c++ compiler. template class TreeType> using NSTypeT = NSType; //! Default Train on the given NSType instance. template class TreeType> void operator()(NSTypeT* ns) const; //! Train on the given NSType specialized for KDTrees. void operator()(NSTypeT* ns) const; //! Train on the given NSType specialized for BallTrees. void operator()(NSTypeT* ns) const; //! Train specialized for SPTrees. void operator()(SpillKNN* ns) const; //! Train specialized for octrees. void operator()(NSTypeT* ns) const; //! Construct the TrainVisitor object with the given reference set, leafSize //! for BinarySpaceTrees, and tau and rho for spill trees. TrainVisitor(arma::mat&& referenceSet, const size_t leafSize, const double tau, const double rho); }; /** * SearchModeVisitor exposes the SearchMode() method of the given NSType. */ class SearchModeVisitor : public boost::static_visitor { public: //! Return the search mode. template NeighborSearchMode operator()(NSType* ns) const; }; /** * SetSearchModeVisitor modifies the SearchMode method of the given NSType. */ class SetSearchModeVisitor : public boost::static_visitor { NeighborSearchMode searchMode; public: //! Construct the SetSearchModeVisitor object with the given mode. SetSearchModeVisitor(const NeighborSearchMode searchMode) : searchMode(searchMode) {}; //! Set the search mode. template void operator()(NSType* ns) const; }; /** * EpsilonVisitor exposes the Epsilon method of the given NSType. */ class EpsilonVisitor : public boost::static_visitor { public: //! Return epsilon, the approximation parameter. template double& operator()(NSType *ns) const; }; /** * ReferenceSetVisitor exposes the referenceSet of the given NSType. */ class ReferenceSetVisitor : public boost::static_visitor { public: //! Return the reference set. template const arma::mat& operator()(NSType *ns) const; }; /** * DeleteVisitor deletes the given NSType instance. */ class DeleteVisitor : public boost::static_visitor { public: //! Delete the NSType object. template void operator()(NSType *ns) const; }; /** * The NSModel class provides an easy way to serialize a model, abstracts away * the different types of trees, and also reflects the NeighborSearch API. This * class is meant to be used by the command-line mlpack_knn and mlpack_kfn * programs, and thus does not have the same complete functionality and * flexibility as the NeighborSearch class. So if you are using it outside of * mlpack_knn and mlpack_kfn, be aware that it is limited! * * @tparam SortPolicy The sort policy for distances; see NearestNeighborSort. */ template class NSModel { public: //! Enum type to identify each accepted tree type. enum TreeTypes { KD_TREE, COVER_TREE, R_TREE, R_STAR_TREE, BALL_TREE, X_TREE, HILBERT_R_TREE, R_PLUS_TREE, R_PLUS_PLUS_TREE, VP_TREE, RP_TREE, MAX_RP_TREE, SPILL_TREE, UB_TREE, OCTREE }; private: //! Tree type considered for neighbor search. TreeTypes treeType; //! For tree types that accept the maxLeafSize parameter. size_t leafSize; //! Overlapping size (for spill trees). double tau; //! Balance threshold (for spill trees). double rho; //! If true, random projections are used. bool randomBasis; //! This is the random projection matrix; only used if randomBasis is true. arma::mat q; /** * nSearch holds an instance of the NeigborSearch class for the current * treeType. It is initialized every time BuildModel is executed. * We access to the contained value through the visitor classes defined above. */ boost::variant*, NSType*, NSType*, NSType*, NSType*, NSType*, NSType*, NSType*, NSType*, NSType*, NSType*, NSType*, SpillKNN*, NSType*, NSType*> nSearch; public: /** * Initialize the NSModel with the given type and whether or not a random * basis should be used. */ NSModel(TreeTypes treeType = TreeTypes::KD_TREE, bool randomBasis = false); //! Clean memory, if necessary. ~NSModel(); //! Serialize the neighbor search model. template void Serialize(Archive& ar, const unsigned int /* version */); //! Expose the dataset. const arma::mat& Dataset() const; //! Access the search mode. NeighborSearchMode SearchMode() const; //! Modify the search mode. void SetSearchMode(const NeighborSearchMode mode); //! Expose Epsilon. double Epsilon() const; double& Epsilon(); //! Expose leafSize. size_t LeafSize() const { return leafSize; } size_t& LeafSize() { return leafSize; } //! Expose tau. double Tau() const { return tau; } double& Tau() { return tau; } //! Expose rho. double Rho() const { return rho; } double& Rho() { return rho; } //! Expose treeType. TreeTypes TreeType() const { return treeType; } TreeTypes& TreeType() { return treeType; } //! Expose randomBasis. bool RandomBasis() const { return randomBasis; } bool& RandomBasis() { return randomBasis; } //! Build the reference tree. void BuildModel(arma::mat&& referenceSet, const size_t leafSize, const NeighborSearchMode searchMode, const double epsilon = 0); //! Perform neighbor search. The query set will be reordered. void Search(arma::mat&& querySet, const size_t k, arma::Mat& neighbors, arma::mat& distances); //! Perform monochromatic neighbor search. void Search(const size_t k, arma::Mat& neighbors, arma::mat& distances); //! Return a string representation of the current tree type. std::string TreeName() const; }; } // namespace neighbor } // namespace mlpack //! Set the serialization version of the NSModel class. BOOST_TEMPLATE_CLASS_VERSION(template, mlpack::neighbor::NSModel, 1); // Include implementation. #include "ns_model_impl.hpp" #endif mlpack-2.2.5/src/mlpack/methods/neighbor_search/ns_model_impl.hpp000066400000000000000000000443471315013601400251520ustar00rootroot00000000000000/** * @file ns_model_impl.hpp * @author Ryan Curtin * * This is a model for nearest or furthest neighbor search. It is useful in * that it provides an easy way to serialize a model, abstracts away the * different types of trees, and also reflects the NeighborSearch API and * automatically directs to the right tree type. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_NEIGHBOR_SEARCH_NS_MODEL_IMPL_HPP #define MLPACK_METHODS_NEIGHBOR_SEARCH_NS_MODEL_IMPL_HPP // In case it hasn't been included yet. #include "ns_model.hpp" #include namespace mlpack { namespace neighbor { //! Monochromatic neighbor search on the given NSType instance. template void MonoSearchVisitor::operator()(NSType *ns) const { if (ns) return ns->Search(k, neighbors, distances); throw std::runtime_error("no neighbor search model initialized"); } //! Save parameters for bichromatic neighbor search. template BiSearchVisitor::BiSearchVisitor(const arma::mat& querySet, const size_t k, arma::Mat& neighbors, arma::mat& distances, const size_t leafSize, const double tau, const double rho) : querySet(querySet), k(k), neighbors(neighbors), distances(distances), leafSize(leafSize), tau(tau), rho(rho) {} //! Default Bichromatic neighbor search on the given NSType instance. template template class TreeType> void BiSearchVisitor::operator()(NSTypeT* ns) const { if (ns) return ns->Search(querySet, k, neighbors, distances); throw std::runtime_error("no neighbor search model initialized"); } //! Bichromatic neighbor search on the given NSType specialized for KDTrees. template void BiSearchVisitor::operator()(NSTypeT* ns) const { if (ns) return SearchLeaf(ns); throw std::runtime_error("no neighbor search model initialized"); } //! Bichromatic neighbor search on the given NSType specialized for BallTrees. template void BiSearchVisitor::operator()(NSTypeT* ns) const { if (ns) return SearchLeaf(ns); throw std::runtime_error("no neighbor search model initialized"); } //! Bichromatic neighbor search specialized for SPTrees. template void BiSearchVisitor::operator()(SpillKNN* ns) const { if (ns) { if (!ns->Naive() && !ns->SingleMode()) { // For Dual Tree Search on SpillTrees, the queryTree must be built with // non overlapping (tau = 0). typename SpillKNN::Tree queryTree(std::move(querySet), 0 /* tau*/, leafSize, rho); ns->Search(queryTree, k, neighbors, distances); } else ns->Search(querySet, k, neighbors, distances); } else throw std::runtime_error("no neighbor search model initialized"); } //! Bichromatic neighbor search specialized for octrees. template void BiSearchVisitor::operator()(NSTypeT* ns) const { if (ns) return SearchLeaf(ns); throw std::runtime_error("no neighbor search model initialized"); } //! Bichromatic neighbor search on the given NSType considering the leafSize. template template void BiSearchVisitor::SearchLeaf(NSType* ns) const { if (!ns->Naive() && !ns->SingleMode()) { std::vector oldFromNewQueries; typename NSType::Tree queryTree(std::move(querySet), oldFromNewQueries, leafSize); arma::Mat neighborsOut; arma::mat distancesOut; ns->Search(queryTree, k, neighborsOut, distancesOut); // Unmap the query points. distances.set_size(distancesOut.n_rows, distancesOut.n_cols); neighbors.set_size(neighborsOut.n_rows, neighborsOut.n_cols); for (size_t i = 0; i < neighborsOut.n_cols; ++i) { neighbors.col(oldFromNewQueries[i]) = neighborsOut.col(i); distances.col(oldFromNewQueries[i]) = distancesOut.col(i); } } else ns->Search(querySet, k, neighbors, distances); } //! Save parameters for Train. template TrainVisitor::TrainVisitor(arma::mat&& referenceSet, const size_t leafSize, const double tau, const double rho) : referenceSet(std::move(referenceSet)), leafSize(leafSize), tau(tau), rho(rho) {} //! Default Train on the given NSType instance. template template class TreeType> void TrainVisitor::operator()(NSTypeT* ns) const { if (ns) return ns->Train(std::move(referenceSet)); throw std::runtime_error("no neighbor search model initialized"); } //! Train on the given NSType specialized for KDTrees. template void TrainVisitor::operator()(NSTypeT* ns) const { if (ns) return TrainLeaf(ns); throw std::runtime_error("no neighbor search model initialized"); } //! Train on the given NSType specialized for BallTrees. template void TrainVisitor::operator()(NSTypeT* ns) const { if (ns) return TrainLeaf(ns); throw std::runtime_error("no neighbor search model initialized"); } //! Train specialized for SPTrees. template void TrainVisitor::operator()(SpillKNN* ns) const { if (ns) { if (ns->Naive()) ns->Train(std::move(referenceSet)); else { typename SpillKNN::Tree tree(std::move(referenceSet), tau, leafSize, rho); ns->Train(std::move(tree)); } } else throw std::runtime_error("no neighbor search model initialized"); } //! Train specialized for Octrees. template void TrainVisitor::operator()(NSTypeT* ns) const { if (ns) return TrainLeaf(ns); throw std::runtime_error("no neighbor search model initialized"); } //! Train on the given NSType considering the leafSize. template template void TrainVisitor::TrainLeaf(NSType* ns) const { if (ns->Naive()) ns->Train(std::move(referenceSet)); else { std::vector oldFromNewReferences; typename NSType::Tree referenceTree(std::move(referenceSet), oldFromNewReferences, leafSize); ns->Train(std::move(referenceTree)); // Set the mappings. ns->oldFromNewReferences = std::move(oldFromNewReferences); } } //! Set the search mode. template void SetSearchModeVisitor::operator()(NSType* ns) const { if (ns) { switch (searchMode) { case NAIVE_MODE: ns->Naive() = true; ns->SingleMode() = false; ns->Greedy() = false; break; case SINGLE_TREE_MODE: ns->Naive() = false; ns->SingleMode() = true; ns->Greedy() = false; break; case DUAL_TREE_MODE: ns->Naive() = false; ns->SingleMode() = false; ns->Greedy() = false; break; case GREEDY_SINGLE_TREE_MODE: ns->Naive() = false; ns->SingleMode() = true; ns->Greedy() = true; break; } } else throw std::runtime_error("no neighbor search model initialized"); } //! Return the search mode. template NeighborSearchMode SearchModeVisitor::operator()(NSType* ns) const { if (ns) { if (ns->Naive()) return NAIVE_MODE; else if (ns->SingleMode() && ns->Greedy()) return GREEDY_SINGLE_TREE_MODE; else if (ns->SingleMode()) return SINGLE_TREE_MODE; else return DUAL_TREE_MODE; } else throw std::runtime_error("no neighbor search model initialized"); } //! Expose the Epsilon method of the given NSType. template double& EpsilonVisitor::operator()(NSType* ns) const { if (ns) return ns->Epsilon(); throw std::runtime_error("no neighbor search model initialized"); } //! Expose the referenceSet of the given NSType. template const arma::mat& ReferenceSetVisitor::operator()(NSType* ns) const { if (ns) return ns->ReferenceSet(); throw std::runtime_error("no neighbor search model initialized"); } //! Clean memory, if necessary. template void DeleteVisitor::operator()(NSType* ns) const { if (ns) delete ns; } /** * Initialize the NSModel with the given type and whether or not a random * basis should be used. */ template NSModel::NSModel(TreeTypes treeType, bool randomBasis) : treeType(treeType), leafSize(20), tau(0), rho(0.7), randomBasis(randomBasis) { // Nothing to do. } //! Clean memory, if necessary. template NSModel::~NSModel() { boost::apply_visitor(DeleteVisitor(), nSearch); } /** * Non-intrusive serialization for NeighborSearch class. We need this definition * because we are going to use the serialize function for boost variant, which * will look for a serialize function for its member types. */ template class TreeType, template class TraversalType, template class SingleTreeTraversalType> void serialize( Archive& ar, NeighborSearch& ns, const unsigned int version) { ns.Serialize(ar, version); } //! Serialize the kNN model. template template void NSModel::Serialize(Archive& ar, const unsigned int version) { ar & data::CreateNVP(treeType, "treeType"); // Backward compatibility: older versions of NSModel didn't include these // parameters. if (version > 0) { ar & data::CreateNVP(leafSize, "leafSize"); ar & data::CreateNVP(tau, "tau"); ar & data::CreateNVP(rho, "rho"); } ar & data::CreateNVP(randomBasis, "randomBasis"); ar & data::CreateNVP(q, "q"); // This should never happen, but just in case, be clean with memory. if (Archive::is_loading::value) boost::apply_visitor(DeleteVisitor(), nSearch); const std::string& name = NSModelName::Name(); ar & data::CreateNVP(nSearch, name); } //! Expose the dataset. template const arma::mat& NSModel::Dataset() const { return boost::apply_visitor(ReferenceSetVisitor(), nSearch); } //! Access the search mode. template NeighborSearchMode NSModel::SearchMode() const { return boost::apply_visitor(SearchModeVisitor(), nSearch); } //! Modify the search mode. template void NSModel::SetSearchMode(const NeighborSearchMode mode) { return boost::apply_visitor(SetSearchModeVisitor(mode), nSearch); } template double NSModel::Epsilon() const { return boost::apply_visitor(EpsilonVisitor(), nSearch); } template double& NSModel::Epsilon() { return boost::apply_visitor(EpsilonVisitor(), nSearch); } //! Build the reference tree. template void NSModel::BuildModel(arma::mat&& referenceSet, const size_t leafSize, const NeighborSearchMode searchMode, const double epsilon) { this->leafSize = leafSize; // Initialize random basis if necessary. if (randomBasis) { Log::Info << "Creating random basis..." << std::endl; while (true) { // [Q, R] = qr(randn(d, d)); // Q = Q * diag(sign(diag(R))); arma::mat r; if (arma::qr(q, r, arma::randn(referenceSet.n_rows, referenceSet.n_rows))) { arma::vec rDiag(r.n_rows); for (size_t i = 0; i < rDiag.n_elem; ++i) { if (r(i, i) < 0) rDiag(i) = -1; else if (r(i, i) > 0) rDiag(i) = 1; else rDiag(i) = 0; } q *= arma::diagmat(rDiag); // Check if the determinant is positive. if (arma::det(q) >= 0) break; } } } // Clean memory, if necessary. boost::apply_visitor(DeleteVisitor(), nSearch); // Do we need to modify the reference set? if (randomBasis) referenceSet = q * referenceSet; if (searchMode != NAIVE_MODE) { Timer::Start("tree_building"); Log::Info << "Building reference tree..." << std::endl; } switch (treeType) { case KD_TREE: nSearch = new NSType(searchMode, epsilon); break; case COVER_TREE: nSearch = new NSType(searchMode, epsilon); break; case R_TREE: nSearch = new NSType(searchMode, epsilon); break; case R_STAR_TREE: nSearch = new NSType(searchMode, epsilon); break; case BALL_TREE: nSearch = new NSType(searchMode, epsilon); break; case X_TREE: nSearch = new NSType(searchMode, epsilon); break; case HILBERT_R_TREE: nSearch = new NSType(searchMode, epsilon); break; case R_PLUS_TREE: nSearch = new NSType(searchMode, epsilon); break; case R_PLUS_PLUS_TREE: nSearch = new NSType(searchMode, epsilon); break; case VP_TREE: nSearch = new NSType(searchMode, epsilon); break; case RP_TREE: nSearch = new NSType(searchMode, epsilon); break; case MAX_RP_TREE: nSearch = new NSType(searchMode, epsilon); break; case SPILL_TREE: nSearch = new SpillKNN(searchMode, epsilon); break; case UB_TREE: nSearch = new NSType(searchMode, epsilon); break; case OCTREE: nSearch = new NSType(searchMode, epsilon); break; } TrainVisitor tn(std::move(referenceSet), leafSize, tau, rho); boost::apply_visitor(tn, nSearch); if (searchMode != NAIVE_MODE) { Timer::Stop("tree_building"); Log::Info << "Tree built." << std::endl; } } //! Perform neighbor search. The query set will be reordered. template void NSModel::Search(arma::mat&& querySet, const size_t k, arma::Mat& neighbors, arma::mat& distances) { // We may need to map the query set randomly. if (randomBasis) querySet = q * querySet; Log::Info << "Searching for " << k << " neighbors with "; switch (SearchMode()) { case NAIVE_MODE: Log::Info << "brute-force (naive) search..." << std::endl; break; case SINGLE_TREE_MODE: Log::Info << "single-tree " << TreeName() << " search..." << std::endl; break; case DUAL_TREE_MODE: Log::Info << "dual-tree " << TreeName() << " search..." << std::endl; break; case GREEDY_SINGLE_TREE_MODE: Log::Info << "greedy single-tree " << TreeName() << " search..." << std::endl; break; } BiSearchVisitor search(querySet, k, neighbors, distances, leafSize, tau, rho); boost::apply_visitor(search, nSearch); } //! Perform neighbor search. template void NSModel::Search(const size_t k, arma::Mat& neighbors, arma::mat& distances) { Log::Info << "Searching for " << k << " neighbors with "; switch (SearchMode()) { case NAIVE_MODE: Log::Info << "brute-force (naive) search..." << std::endl; break; case SINGLE_TREE_MODE: Log::Info << "single-tree " << TreeName() << " search..." << std::endl; break; case DUAL_TREE_MODE: Log::Info << "dual-tree " << TreeName() << " search..." << std::endl; break; case GREEDY_SINGLE_TREE_MODE: Log::Info << "greedy single-tree " << TreeName() << " search..." << std::endl; break; } if (Epsilon() != 0 && SearchMode() != NAIVE_MODE) Log::Info << "Maximum of " << Epsilon() * 100 << "% relative error." << std::endl; MonoSearchVisitor search(k, neighbors, distances); boost::apply_visitor(search, nSearch); } //! Get the name of the tree type. template std::string NSModel::TreeName() const { switch (treeType) { case KD_TREE: return "kd-tree"; case COVER_TREE: return "cover tree"; case R_TREE: return "R tree"; case R_STAR_TREE: return "R* tree"; case BALL_TREE: return "ball tree"; case X_TREE: return "X tree"; case HILBERT_R_TREE: return "Hilbert R tree"; case R_PLUS_TREE: return "R+ tree"; case R_PLUS_PLUS_TREE: return "R++ tree"; case SPILL_TREE: return "Spill tree"; case VP_TREE: return "vantage point tree"; case RP_TREE: return "random projection tree (mean split)"; case MAX_RP_TREE: return "random projection tree (max split)"; case UB_TREE: return "UB tree"; case OCTREE: return "octree"; default: return "unknown tree"; } } } // namespace neighbor } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/methods/neighbor_search/sort_policies/000077500000000000000000000000001315013601400244625ustar00rootroot00000000000000mlpack-2.2.5/src/mlpack/methods/neighbor_search/sort_policies/furthest_neighbor_sort.hpp000066400000000000000000000153461315013601400317740ustar00rootroot00000000000000/** * @file furthest_neighbor_sort.hpp * @author Ryan Curtin * * Implementation of the SortPolicy class for NeighborSearch; in this case, the * furthest neighbors are those that are most important. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_NEIGHBOR_SEARCH_FURTHEST_NEIGHBOR_SORT_HPP #define MLPACK_METHODS_NEIGHBOR_SEARCH_FURTHEST_NEIGHBOR_SORT_HPP #include namespace mlpack { namespace neighbor { /** * This class implements the necessary methods for the SortPolicy template * parameter of the NeighborSearch class. The sorting policy here is that the * minimum distance is the best (so, when used with NeighborSearch, the output * is furthest neighbors). */ class FurthestNeighborSort { public: /** * Return whether or not value is "better" than ref. In this case, that means * that the value is greater than or equal to the reference. * * @param value Value to compare * @param ref Value to compare with * * @return bool indicating whether or not (value >= ref). */ static inline bool IsBetter(const double value, const double ref) { return (value >= ref); } /** * Return the best possible distance between two nodes. In our case, this is * the maximum distance between the two tree nodes using the given distance * function. */ template static double BestNodeToNodeDistance(const TreeType* queryNode, const TreeType* referenceNode); /** * Return the best possible distance between two nodes, given that the * distance between the centers of the two nodes has already been calculated. * This is used in conjunction with trees that have self-children (like cover * trees). */ template static double BestNodeToNodeDistance(const TreeType* queryNode, const TreeType* referenceNode, const double centerToCenterDistance); /** * Return the best possible distance between the query node and the reference * child node given the base case distance between the query node and the * reference node. TreeType::ParentDistance() must be implemented to use * this. * * @param queryNode Query node. * @param referenceNode Reference node. * @param referenceChildNode Child of reference node which is being inspected. * @param centerToCenterDistance Distance between centers of query node and * reference node. */ template static double BestNodeToNodeDistance(const TreeType* queryNode, const TreeType* referenceNode, const TreeType* referenceChildNode, const double centerToCenterDistance); /** * Return the best possible distance between a node and a point. In our case, * this is the maximum distance between the tree node and the point using the * given distance function. */ template static double BestPointToNodeDistance(const VecType& queryPoint, const TreeType* referenceNode); /** * Return the best possible distance between a point and a node, given that * the distance between the point and the center of the node has already been * calculated. This is used in conjunction with trees that have * self-children (like cover trees). */ template static double BestPointToNodeDistance(const VecType& queryPoint, const TreeType* referenceNode, const double pointToCenterDistance); /** * Return the best child according to this sort policy. In this case it will * return the one with the maximum distance. */ template static size_t GetBestChild(const VecType& queryPoint, TreeType& referenceNode) { return referenceNode.GetFurthestChild(queryPoint); }; /** * Return the best child according to this sort policy. In this case it will * return the one with the maximum distance. */ template static size_t GetBestChild(const TreeType& queryNode, TreeType& referenceNode) { return referenceNode.GetFurthestChild(queryNode); }; /** * Return what should represent the worst possible distance with this * particular sort policy. In our case, this should be the minimum possible * distance, 0. * * @return 0 */ static inline double WorstDistance() { return 0; } /** * Return what should represent the best possible distance with this * particular sort policy. In our case, this should be the maximum possible * distance, DBL_MAX. * * @return DBL_MAX */ static inline double BestDistance() { return DBL_MAX; } /** * Return the best combination of the two distances. */ static inline double CombineBest(const double a, const double b) { if (a == DBL_MAX || b == DBL_MAX) return DBL_MAX; return a + b; } /** * Return the worst combination of the two distances. */ static inline double CombineWorst(const double a, const double b) { return std::max(a - b, 0.0); } /** * Return the given value relaxed. * * @param value Value to relax. * @param epsilon Relative error (non-negative). * * @return double Value relaxed. */ static inline double Relax(const double value, const double epsilon) { if (value == 0) return 0; if (value == DBL_MAX || epsilon >= 1) return DBL_MAX; return (1 / (1 - epsilon)) * value; } /** * Convert the given distance to a score. Lower scores are better, but for * furthest neighbor search, larger distances are better. Therefore we must * invert the given distance. */ static inline double ConvertToScore(const double distance) { if (distance == DBL_MAX) return 0.0; else if (distance == 0.0) return DBL_MAX; else return (1.0 / distance); } /** * Convert the given score back to a distance. This is the inverse of the * operation of converting a distance to a score, and again, for furthest * neighbor search, corresponds to inverting the value. */ static inline double ConvertToDistance(const double score) { return ConvertToScore(score); } }; } // namespace neighbor } // namespace mlpack // Include implementation of templated functions. #include "furthest_neighbor_sort_impl.hpp" #endif mlpack-2.2.5/src/mlpack/methods/neighbor_search/sort_policies/furthest_neighbor_sort_impl.hpp000066400000000000000000000043671315013601400330160ustar00rootroot00000000000000/*** * @file furthest_neighbor_sort_impl.hpp * @author Ryan Curtin * * Implementation of templated methods for the FurthestNeighborSort SortPolicy * class for the NeighborSearch class. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_NEIGHBOR_SEARCH_FURTHEST_NEIGHBOR_SORT_IMPL_HPP #define MLPACK_METHODS_NEIGHBOR_SEARCH_FURTHEST_NEIGHBOR_SORT_IMPL_HPP namespace mlpack { namespace neighbor { template inline double FurthestNeighborSort::BestNodeToNodeDistance( const TreeType* queryNode, const TreeType* referenceNode) { // This is not implemented yet for the general case because the trees do not // accept arbitrary distance metrics. return queryNode->MaxDistance(*referenceNode); } template inline double FurthestNeighborSort::BestNodeToNodeDistance( const TreeType* queryNode, const TreeType* referenceNode, const double centerToCenterDistance) { return queryNode->MaxDistance(*referenceNode, centerToCenterDistance); } template inline double FurthestNeighborSort::BestNodeToNodeDistance( const TreeType* queryNode, const TreeType* referenceNode, const TreeType* referenceChildNode, const double centerToCenterDistance) { return queryNode->MaxDistance(*referenceNode, centerToCenterDistance) + referenceChildNode->ParentDistance(); } template inline double FurthestNeighborSort::BestPointToNodeDistance( const VecType& point, const TreeType* referenceNode) { // This is not implemented yet for the general case because the trees do not // accept arbitrary distance metrics. return referenceNode->MaxDistance(point); } template inline double FurthestNeighborSort::BestPointToNodeDistance( const VecType& point, const TreeType* referenceNode, const double pointToCenterDistance) { return referenceNode->MaxDistance(point, pointToCenterDistance); } } // namespace neighbor } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/methods/neighbor_search/sort_policies/nearest_neighbor_sort.hpp000066400000000000000000000153031315013601400315620ustar00rootroot00000000000000/** * @file nearest_neighbor_sort.hpp * @author Ryan Curtin * * Implementation of the SortPolicy class for NeighborSearch; in this case, the * nearest neighbors are those that are most important. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_NEIGHBOR_SEARCH_NEAREST_NEIGHBOR_SORT_HPP #define MLPACK_METHODS_NEIGHBOR_SEARCH_NEAREST_NEIGHBOR_SORT_HPP #include namespace mlpack { namespace neighbor { /** * This class implements the necessary methods for the SortPolicy template * parameter of the NeighborSearch class. The sorting policy here is that the * minimum distance is the best (so, when used with NeighborSearch, the output * is nearest neighbors). * * This class is also meant to serve as a guide to implement a custom * SortPolicy. All of the methods implemented here must be implemented by any * other SortPolicy classes. */ class NearestNeighborSort { public: /** * Return whether or not value is "better" than ref. In this case, that means * that the value is less than or equal to the reference. * * @param value Value to compare * @param ref Value to compare with * * @return bool indicating whether or not (value <= ref). */ static inline bool IsBetter(const double value, const double ref) { return (value <= ref); } /** * Return the best possible distance between two nodes. In our case, this is * the minimum distance between the two tree nodes using the given distance * function. */ template static double BestNodeToNodeDistance(const TreeType* queryNode, const TreeType* referenceNode); /** * Return the best possible distance between two nodes, given that the * distance between the centers of the two nodes has already been calculated. * This is used in conjunction with trees that have self-children (like cover * trees). */ template static double BestNodeToNodeDistance(const TreeType* queryNode, const TreeType* referenceNode, const double centerToCenterDistance); /** * Return the best possible distance between the query node and the reference * child node given the base case distance between the query node and the * reference node. TreeType::ParentDistance() must be implemented to use * this. * * @param queryNode Query node. * @param referenceNode Reference node. * @param referenceChildNode Child of reference node which is being inspected. * @param centerToCenterDistance Distance between centers of query node and * reference node. */ template static double BestNodeToNodeDistance(const TreeType* queryNode, const TreeType* referenceNode, const TreeType* referenceChildNode, const double centerToCenterDistance); /** * Return the best possible distance between a node and a point. In our case, * this is the minimum distance between the tree node and the point using the * given distance function. */ template static double BestPointToNodeDistance(const VecType& queryPoint, const TreeType* referenceNode); /** * Return the best possible distance between a point and a node, given that * the distance between the point and the center of the node has already been * calculated. This is used in conjunction with trees that have * self-children (like cover trees). */ template static double BestPointToNodeDistance(const VecType& queryPoint, const TreeType* referenceNode, const double pointToCenterDistance); /** * Return the best child according to this sort policy. In this case it will * return the one with the minimum distance. */ template static size_t GetBestChild(const VecType& queryPoint, TreeType& referenceNode) { return referenceNode.GetNearestChild(queryPoint); }; /** * Return the best child according to this sort policy. In this case it will * return the one with the minimum distance. */ template static size_t GetBestChild(const TreeType& queryNode, TreeType& referenceNode) { return referenceNode.GetNearestChild(queryNode); }; /** * Return what should represent the worst possible distance with this * particular sort policy. In our case, this should be the maximum possible * distance, DBL_MAX. * * @return DBL_MAX */ static inline double WorstDistance() { return DBL_MAX; } /** * Return what should represent the best possible distance with this * particular sort policy. In our case, this should be the minimum possible * distance, 0.0. * * @return 0.0 */ static inline double BestDistance() { return 0.0; } /** * Return the best combination of the two distances. */ static inline double CombineBest(const double a, const double b) { return std::max(a - b, 0.0); } /** * Return the worst combination of the two distances. */ static inline double CombineWorst(const double a, const double b) { if (a == DBL_MAX || b == DBL_MAX) return DBL_MAX; return a + b; } /** * Return the given value relaxed. * * @param value Value to relax. * @param epsilon Relative error (non-negative). * * @return double Value relaxed. */ static inline double Relax(const double value, const double epsilon) { if (value == DBL_MAX) return DBL_MAX; return (1 / (1 + epsilon)) * value; } /** * Convert the given distance into a score. Lower scores are better, so in * the case of nearest neighbor sort where lower distances are better, we just * return the distance. */ static inline double ConvertToScore(const double distance) { return distance; } /** * Convert the given score to a distance. This is the inverse of the * operation provided by ConvertToScore(). For nearest neighbor search, there * is no need for any change. */ static inline double ConvertToDistance(const double score) { return score; } }; } // namespace neighbor } // namespace mlpack // Include implementation of templated functions. #include "nearest_neighbor_sort_impl.hpp" #endif mlpack-2.2.5/src/mlpack/methods/neighbor_search/sort_policies/nearest_neighbor_sort_impl.hpp000066400000000000000000000043321315013601400326030ustar00rootroot00000000000000/** * @file nearest_neighbor_sort_impl.hpp * @author Ryan Curtin * * Implementation of templated methods for the NearestNeighborSort SortPolicy * class for the NeighborSearch class. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_NEIGHBOR_NEAREST_NEIGHBOR_SORT_IMPL_HPP #define MLPACK_NEIGHBOR_NEAREST_NEIGHBOR_SORT_IMPL_HPP namespace mlpack { namespace neighbor { template inline double NearestNeighborSort::BestNodeToNodeDistance( const TreeType* queryNode, const TreeType* referenceNode) { // This is not implemented yet for the general case because the trees do not // accept arbitrary distance metrics. return queryNode->MinDistance(*referenceNode); } template inline double NearestNeighborSort::BestNodeToNodeDistance( const TreeType* queryNode, const TreeType* referenceNode, const double centerToCenterDistance) { return queryNode->MinDistance(*referenceNode, centerToCenterDistance); } template inline double NearestNeighborSort::BestNodeToNodeDistance( const TreeType* queryNode, const TreeType* /* referenceNode */, const TreeType* referenceChildNode, const double centerToCenterDistance) { return queryNode->MinDistance(*referenceChildNode, centerToCenterDistance) - referenceChildNode->ParentDistance(); } template inline double NearestNeighborSort::BestPointToNodeDistance( const VecType& point, const TreeType* referenceNode) { // This is not implemented yet for the general case because the trees do not // accept arbitrary distance metrics. return referenceNode->MinDistance(point); } template inline double NearestNeighborSort::BestPointToNodeDistance( const VecType& point, const TreeType* referenceNode, const double pointToCenterDistance) { return referenceNode->MinDistance(point, pointToCenterDistance); } } // namespace neighbor } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/methods/neighbor_search/typedef.hpp000066400000000000000000000061541315013601400237630ustar00rootroot00000000000000/** * @file typedef.hpp * @author Ryan Curtin * * Simple typedefs describing template instantiations of the NeighborSearch * class which are commonly used. This is meant to be included by * neighbor_search.h but is a separate file for simplicity. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_NEIGHBOR_SEARCH_TYPEDEF_H #define MLPACK_NEIGHBOR_SEARCH_TYPEDEF_H // In case someone included this directly. #include "neighbor_search.hpp" #include #include "sort_policies/nearest_neighbor_sort.hpp" #include "sort_policies/furthest_neighbor_sort.hpp" namespace mlpack { namespace neighbor { /** * The KNN class is the k-nearest-neighbors method. It returns L2 distances * (Euclidean distances) for each of the k nearest neighbors. */ typedef NeighborSearch KNN; /** * The KFN class is the k-furthest-neighbors method. It returns L2 distances * (Euclidean distances) for each of the k furthest neighbors. */ typedef NeighborSearch KFN; /** * The DefeatistKNN class is the k-nearest-neighbors method considering * defeatist search. It returns L2 distances (Euclidean distances) for each of * the k nearest neighbors found. * @tparam TreeType The tree type to use; must adhere to the TreeType API, * and implement Defeatist Traversers. */ template class TreeType = tree::SPTree> using DefeatistKNN = NeighborSearch< NearestNeighborSort, metric::EuclideanDistance, arma::mat, TreeType, TreeType, arma::mat>::template DefeatistDualTreeTraverser, TreeType, arma::mat>::template DefeatistSingleTreeTraverser>; /** * The SpillKNN class is the k-nearest-neighbors method considering defeatist * search on SPTree. It returns L2 distances (Euclidean distances) for each of * the k nearest neighbors found. */ typedef DefeatistKNN SpillKNN; /** * @deprecated * The AllkNN class is the k-nearest-neighbors method. It returns L2 distances * (Euclidean distances) for each of the k nearest neighbors. This typedef will * be removed in mlpack 3.0.0; use the KNN typedef instead. */ typedef NeighborSearch AllkNN; /** * @deprecated * The AllkFN class is the k-furthest-neighbors method. It returns L2 * distances (Euclidean distances) for each of the k furthest neighbors. This * typedef will be removed in mlpack 3.0.0; use the KFN typedef instead. */ typedef NeighborSearch AllkFN; } // namespace neighbor } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/methods/neighbor_search/unmap.cpp000066400000000000000000000041521315013601400234320ustar00rootroot00000000000000/** * @file unmap.cpp * @author Ryan Curtin * * Auxiliary function to unmap neighbor search results. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include "unmap.hpp" namespace mlpack { namespace neighbor { // Useful in the dual-tree setting. void Unmap(const arma::Mat& neighbors, const arma::mat& distances, const std::vector& referenceMap, const std::vector& queryMap, arma::Mat& neighborsOut, arma::mat& distancesOut, const bool squareRoot) { // Set matrices to correct size. neighborsOut.set_size(neighbors.n_rows, neighbors.n_cols); distancesOut.set_size(distances.n_rows, distances.n_cols); // Unmap distances. for (size_t i = 0; i < distances.n_cols; ++i) { // Map columns to the correct place. The ternary operator does not work // here... if (squareRoot) distancesOut.col(queryMap[i]) = sqrt(distances.col(i)); else distancesOut.col(queryMap[i]) = distances.col(i); // Map indices of neighbors. for (size_t j = 0; j < distances.n_rows; ++j) neighborsOut(j, queryMap[i]) = referenceMap[neighbors(j, i)]; } } // Useful in the single-tree setting. void Unmap(const arma::Mat& neighbors, const arma::mat& distances, const std::vector& referenceMap, arma::Mat& neighborsOut, arma::mat& distancesOut, const bool squareRoot) { // Set matrices to correct size. neighborsOut.set_size(neighbors.n_rows, neighbors.n_cols); // Take square root of distances, if necessary. if (squareRoot) distancesOut = sqrt(distances); else distancesOut = distances; // Map neighbors back to original locations. for (size_t j = 0; j < neighbors.n_elem; ++j) neighborsOut[j] = referenceMap[neighbors[j]]; } } // namespace neighbor } // namespace mlpack mlpack-2.2.5/src/mlpack/methods/neighbor_search/unmap.hpp000066400000000000000000000052051315013601400234370ustar00rootroot00000000000000/** * @file unmap.hpp * @author Ryan Curtin * * Convenience methods to unmap results. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_NEIGHBOR_SEARCH_UNMAP_HPP #define MLPACK_METHODS_NEIGHBOR_SEARCH_UNMAP_HPP #include namespace mlpack { namespace neighbor { /** * Assuming that the datasets have been mapped using the referenceMap and the * queryMap (such as during kd-tree construction), unmap the columns of the * distances and neighbors matrices into neighborsOut and distancesOut, and also * unmap the entries in each row of neighbors. This is useful for the dual-tree * case. * * @param neighbors Matrix of neighbors resulting from neighbor search. * @param distances Matrix of distances resulting from neighbor search. * @param referenceMap Mapping of reference set to old points. * @param queryMap Mapping of query set to old points. * @param neighborsOut Matrix to store unmapped neighbors into. * @param distancesOut Matrix to store unmapped distances into. * @param squareRoot If true, take the square root of the distances. */ void Unmap(const arma::Mat& neighbors, const arma::mat& distances, const std::vector& referenceMap, const std::vector& queryMap, arma::Mat& neighborsOut, arma::mat& distancesOut, const bool squareRoot = false); /** * Assuming that the datasets have been mapped using referenceMap (such as * during kd-tree construction), unmap the columns of the distances and * neighbors matrices into neighborsOut and distancesOut, and also unmap the * entries in each row of neighbors. This is useful for the single-tree case. * * @param neighbors Matrix of neighbors resulting from neighbor search. * @param distances Matrix of distances resulting from neighbor search. * @param referenceMap Mapping of reference set to old points. * @param neighborsOut Matrix to store unmapped neighbors into. * @param distancesOut Matrix to store unmapped distances into. * @param squareRoot If true, take the square root of the distances. */ void Unmap(const arma::Mat& neighbors, const arma::mat& distances, const std::vector& referenceMap, arma::Mat& neighborsOut, arma::mat& distancesOut, const bool squareRoot = false); } // namespace neighbor } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/methods/nmf/000077500000000000000000000000001315013601400172425ustar00rootroot00000000000000mlpack-2.2.5/src/mlpack/methods/nmf/CMakeLists.txt000066400000000000000000000000301315013601400217730ustar00rootroot00000000000000add_cli_executable(nmf) mlpack-2.2.5/src/mlpack/methods/nmf/nmf_main.cpp000066400000000000000000000125461315013601400215420ustar00rootroot00000000000000/** * @file nmf_main.cpp * @author Mohan Rajendran * * Main executable to run NMF. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include #include #include #include #include #include #include #include #include #include using namespace mlpack; using namespace mlpack::amf; using namespace std; // Document program. PROGRAM_INFO("Non-negative Matrix Factorization", "This program performs " "non-negative matrix factorization on the given dataset, storing the " "resulting decomposed matrices in the specified files. For an input " "dataset V, NMF decomposes V into two matrices W and H such that " "\n\n" "V = W * H" "\n\n" "where all elements in W and H are non-negative. If V is of size (n x m)," " then W will be of size (n x r) and H will be of size (r x m), where r is " "the rank of the factorization (specified by --rank)." "\n\n" "Optionally, the desired update rules for each NMF iteration can be chosen " "from the following list:" "\n\n" " - multdist: multiplicative distance-based update rules (Lee and Seung " "1999)\n" " - multdiv: multiplicative divergence-based update rules (Lee and Seung " "1999)\n" " - als: alternating least squares update rules (Paatero and Tapper 1994)" "\n\n" "The maximum number of iterations is specified with --max_iterations, and " "the minimum residue required for algorithm termination is specified with " "--min_residue."); // Parameters for program. PARAM_STRING_IN_REQ("input_file", "Input dataset to perform NMF on.", "i"); PARAM_STRING_OUT("w_file", "File to save the calculated W matrix to.", "W"); PARAM_STRING_OUT("h_file", "File to save the calculated H matrix to.", "H"); PARAM_INT_IN_REQ("rank", "Rank of the factorization.", "r"); PARAM_INT_IN("max_iterations", "Number of iterations before NMF terminates (0 " "runs until convergence.", "m", 10000); PARAM_INT_IN("seed", "Random seed. If 0, 'std::time(NULL)' is used.", "s", 0); PARAM_DOUBLE_IN("min_residue", "The minimum root mean square residue allowed " "for each iteration, below which the program terminates.", "e", 1e-5); PARAM_STRING_IN("update_rules", "Update rules for each iteration; ( multdist | " "multdiv | als ).", "u", "multdist"); int main(int argc, char** argv) { // Parse command line. CLI::ParseCommandLine(argc, argv); // Initialize random seed. if (CLI::GetParam("seed") != 0) math::RandomSeed((size_t) CLI::GetParam("seed")); else math::RandomSeed((size_t) std::time(NULL)); // Gather parameters. const string inputFile = CLI::GetParam("input_file"); const string hOutputFile = CLI::GetParam("h_file"); const string wOutputFile = CLI::GetParam("w_file"); const size_t r = CLI::GetParam("rank"); const size_t maxIterations = CLI::GetParam("max_iterations"); const double minResidue = CLI::GetParam("min_residue"); const string updateRules = CLI::GetParam("update_rules"); // Validate rank. if (r < 1) { Log::Fatal << "The rank of the factorization cannot be less than 1." << std::endl; } if ((updateRules != "multdist") && (updateRules != "multdiv") && (updateRules != "als")) { Log::Fatal << "Invalid update rules ('" << updateRules << "'); must be '" << "multdist', 'multdiv', or 'als'." << std::endl; } if (hOutputFile == "" && wOutputFile == "") { Log::Warn << "Neither --h_file nor --w_file are specified, so no output " << "will be saved!" << endl; } // Load input dataset. arma::mat V; data::Load(inputFile, V, true); arma::mat W; arma::mat H; // Perform NMF with the specified update rules. if (updateRules == "multdist") { Log::Info << "Performing NMF with multiplicative distance-based update " << "rules." << std::endl; SimpleResidueTermination srt(minResidue, maxIterations); AMF<> amf(srt); amf.Apply(V, r, W, H); } else if (updateRules == "multdiv") { Log::Info << "Performing NMF with multiplicative divergence-based update " << "rules." << std::endl; SimpleResidueTermination srt(minResidue, maxIterations); AMF amf(srt); amf.Apply(V, r, W, H); } else if (updateRules == "als") { Log::Info << "Performing NMF with alternating least squared update rules." << std::endl; SimpleResidueTermination srt(minResidue, maxIterations); AMF amf(srt); amf.Apply(V, r, W, H); } // Save results. if (wOutputFile != "") data::Save(wOutputFile, W, false); if (hOutputFile != "") data::Save(hOutputFile, H, false); } mlpack-2.2.5/src/mlpack/methods/nystroem_method/000077500000000000000000000000001315013601400217025ustar00rootroot00000000000000mlpack-2.2.5/src/mlpack/methods/nystroem_method/CMakeLists.txt000066400000000000000000000010331315013601400244370ustar00rootroot00000000000000# Define the files we need to compile # Anything not in this list will not be compiled into mlpack. set(SOURCES nystroem_method.hpp nystroem_method_impl.hpp ordered_selection.hpp random_selection.hpp kmeans_selection.hpp ) # Add directory name to sources. set(DIR_SRCS) foreach(file ${SOURCES}) set(DIR_SRCS ${DIR_SRCS} ${CMAKE_CURRENT_SOURCE_DIR}/${file}) endforeach() # Append sources (with directory name) to list of all mlpack sources (used at # the parent scope). set(MLPACK_SRCS ${MLPACK_SRCS} ${DIR_SRCS} PARENT_SCOPE) mlpack-2.2.5/src/mlpack/methods/nystroem_method/kmeans_selection.hpp000066400000000000000000000032441315013601400257410ustar00rootroot00000000000000/** * @file kmeans_selection.hpp * @author Marcus Edel * * Use the centroids of the K-Means clustering method for use in the Nystroem * method of kernel matrix approximation. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_NYSTROEM_METHOD_KMEANS_SELECTION_HPP #define MLPACK_METHODS_NYSTROEM_METHOD_KMEANS_SELECTION_HPP #include #include namespace mlpack { namespace kernel { /** * Implementation of the kmeans sampling scheme. * * @tparam ClusteringType Type of clustering. * @tparam maxIterations Maximum number of iterations allowed before giving up. */ template, size_t maxIterations = 5> class KMeansSelection { public: /** * Use the K-Means clustering method to select the specified number of points * in the dataset. You are responsible for deleting the returned matrix! * * @param data Dataset to sample from. * @param m Number of points to select. * @return Matrix pointer in which centroids are stored. */ const static arma::mat* Select(const arma::mat& data, const size_t m) { arma::Row assignments; arma::mat* centroids = new arma::mat; // Perform the K-Means clustering method. ClusteringType kmeans(maxIterations); kmeans.Cluster(data, m, assignments, *centroids); return centroids; } }; } // namespace kernel } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/methods/nystroem_method/nystroem_method.hpp000066400000000000000000000050751315013601400256420ustar00rootroot00000000000000/** * @file nystroem_method.hpp * @author Ryan Curtin * @author Marcus Edel * * Implementation of the Nystroem method for approximating a kernel matrix. * There are many variations on how to do this, so template parameters allow the * selection of many different techniques. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_NYSTROEM_METHOD_NYSTROEM_METHOD_HPP #define MLPACK_METHODS_NYSTROEM_METHOD_NYSTROEM_METHOD_HPP #include #include "kmeans_selection.hpp" namespace mlpack { namespace kernel { template< typename KernelType, typename PointSelectionPolicy = KMeansSelection<> > class NystroemMethod { public: /** * Create the NystroemMethod object. The constructor here does not really do * anything. * * @param data Data matrix. * @param kernel Kernel to be used for computation. * @param rank Rank to be used for matrix approximation. */ NystroemMethod(const arma::mat& data, KernelType& kernel, const size_t rank); /** * Apply the low-rank factorization to obtain an output matrix G such that * K' = G * G^T. * * @param output Matrix to store kernel approximation into. */ void Apply(arma::mat& output); /** * Construct the kernel matrix with matrix that contains the selected points. * * @param data Data matrix pointer. * @param miniKernel to store the constructed mini-kernel matrix in. * @param miniKernel to store the constructed semi-kernel matrix in. */ void GetKernelMatrix(const arma::mat* data, arma::mat& miniKernel, arma::mat& semiKernel); /** * Construct the kernel matrix with the selected points. * * @param points Indices of selected points. * @param miniKernel to store the constructed mini-kernel matrix in. * @param miniKernel to store the constructed semi-kernel matrix in. */ void GetKernelMatrix(const arma::Col& selectedPoints, arma::mat& miniKernel, arma::mat& semiKernel); private: //! The reference dataset. const arma::mat& data; //! The locally stored kernel, if it is necessary. KernelType& kernel; //! Rank used for matrix approximation. const size_t rank; }; } // namespace kernel } // namespace mlpack // Include implementation. #include "nystroem_method_impl.hpp" #endif mlpack-2.2.5/src/mlpack/methods/nystroem_method/nystroem_method_impl.hpp000066400000000000000000000061671315013601400266660ustar00rootroot00000000000000/** * @file nystroem_method_impl.hpp * @author Ryan Curtin * @author Marcus Edel * * Implementation of the Nystroem method for approximating a kernel matrix. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_NYSTROEM_METHOD_NYSTROEM_METHOD_IMPL_HPP #define MLPACK_METHODS_NYSTROEM_METHOD_NYSTROEM_METHOD_IMPL_HPP // In case it hasn't been included yet. #include "nystroem_method.hpp" namespace mlpack { namespace kernel { template NystroemMethod::NystroemMethod( const arma::mat& data, KernelType& kernel, const size_t rank) : data(data), kernel(kernel), rank(rank) { } template void NystroemMethod::GetKernelMatrix( const arma::mat* selectedData, arma::mat& miniKernel, arma::mat& semiKernel) { // Assemble mini-kernel matrix. for (size_t i = 0; i < rank; ++i) for (size_t j = 0; j < rank; ++j) miniKernel(i, j) = kernel.Evaluate(selectedData->col(i), selectedData->col(j)); // Construct semi-kernel matrix with interactions between selected data and // all points. for (size_t i = 0; i < data.n_cols; ++i) for (size_t j = 0; j < rank; ++j) semiKernel(i, j) = kernel.Evaluate(data.col(i), selectedData->col(j)); // Clean the memory. delete selectedData; } template void NystroemMethod::GetKernelMatrix( const arma::Col& selectedPoints, arma::mat& miniKernel, arma::mat& semiKernel) { // Assemble mini-kernel matrix. for (size_t i = 0; i < rank; ++i) for (size_t j = 0; j < rank; ++j) miniKernel(i, j) = kernel.Evaluate(data.col(selectedPoints(i)), data.col(selectedPoints(j))); // Construct semi-kernel matrix with interactions between selected points and // all points. for (size_t i = 0; i < data.n_cols; ++i) for (size_t j = 0; j < rank; ++j) semiKernel(i, j) = kernel.Evaluate(data.col(i), data.col(selectedPoints(j))); } template void NystroemMethod::Apply(arma::mat& output) { arma::mat miniKernel(rank, rank); arma::mat semiKernel(data.n_cols, rank); GetKernelMatrix(PointSelectionPolicy::Select(data, rank), miniKernel, semiKernel); // Singular value decomposition mini-kernel matrix. arma::mat U, V; arma::vec s; arma::svd(U, s, V, miniKernel); // Construct the output matrix. arma::mat normalization = arma::diagmat(1.0 / sqrt(s)); output = semiKernel * U * normalization * V; } } // namespace kernel } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/methods/nystroem_method/ordered_selection.hpp000066400000000000000000000023541315013601400261100ustar00rootroot00000000000000/** * @file ordered_selection.hpp * @author Ryan Curtin * * Select the first points of the dataset for use in the Nystroem method of * kernel matrix approximation. This is mostly for testing, but might have * other uses. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_NYSTROEM_METHOD_ORDERED_SELECTION_HPP #define MLPACK_METHODS_NYSTROEM_METHOD_ORDERED_SELECTION_HPP #include namespace mlpack { namespace kernel { class OrderedSelection { public: /** * Select the specified number of points in the dataset. * * @param data Dataset to sample from. * @param m Number of points to select. * @return Indices of selected points from the dataset. */ const static arma::Col Select(const arma::mat& /* unused */, const size_t m) { // This generates [0 1 2 3 ... (m - 1)]. return arma::linspace >(0, m - 1, m); } }; } // namespace kernel } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/methods/nystroem_method/random_selection.hpp000066400000000000000000000023731315013601400257450ustar00rootroot00000000000000/** * @file random_selection.hpp * @author Ryan Curtin * * Randomly select some points (with replacement) to use for the Nystroem * method. Replacement is suboptimal, but for rank << number of points, this is * unlikely. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_NYSTROEM_METHOD_RANDOM_SELECTION_HPP #define MLPACK_METHODS_NYSTROEM_METHOD_RANDOM_SELECTION_HPP #include namespace mlpack { namespace kernel { class RandomSelection { public: /** * Randomly select the specified number of points in the dataset. * * @param data Dataset to sample from. * @param m Number of points to select. * @return Indices of selected points from the dataset. */ const static arma::Col Select(const arma::mat& data, const size_t m) { arma::Col selectedPoints(m); for (size_t i = 0; i < m; ++i) selectedPoints(i) = math::RandInt(0, data.n_cols); return selectedPoints; } }; } // namespace kernel } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/methods/pca/000077500000000000000000000000001315013601400172255ustar00rootroot00000000000000mlpack-2.2.5/src/mlpack/methods/pca/CMakeLists.txt000066400000000000000000000010001315013601400217540ustar00rootroot00000000000000# Define the files we need to compile # Anything not in this list will not be compiled into mlpack. set(SOURCES pca.hpp pca_impl.hpp ) # Add directory name to sources. set(DIR_SRCS) foreach(file ${SOURCES}) set(DIR_SRCS ${DIR_SRCS} ${CMAKE_CURRENT_SOURCE_DIR}/${file}) endforeach() # Append sources (with directory name) to list of all mlpack sources (used at # the parent scope). set(MLPACK_SRCS ${MLPACK_SRCS} ${DIR_SRCS} PARENT_SCOPE) add_subdirectory(decomposition_policies) add_cli_executable(pca) mlpack-2.2.5/src/mlpack/methods/pca/decomposition_policies/000077500000000000000000000000001315013601400237705ustar00rootroot00000000000000mlpack-2.2.5/src/mlpack/methods/pca/decomposition_policies/CMakeLists.txt000066400000000000000000000007551315013601400265370ustar00rootroot00000000000000# Define the files we need to compile # Anything not in this list will not be compiled into mlpack. set(SOURCES exact_svd_method.hpp randomized_svd_method.hpp quic_svd_method.hpp ) # Add directory name to sources. set(DIR_SRCS) foreach(file ${SOURCES}) set(DIR_SRCS ${DIR_SRCS} ${CMAKE_CURRENT_SOURCE_DIR}/${file}) endforeach() # Append sources (with directory name) to list of all mlpack sources (used at # the parent scope). set(MLPACK_SRCS ${MLPACK_SRCS} ${DIR_SRCS} PARENT_SCOPE) mlpack-2.2.5/src/mlpack/methods/pca/decomposition_policies/exact_svd_method.hpp000066400000000000000000000044521315013601400300260ustar00rootroot00000000000000/** * @file exact_svd_method.hpp * @author Ajinkya Kale * @author Ryan Curtin * @author Marcus Edel * * Implementation of the exact svd method for use in the Principal Components * Analysis method. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_PCA_DECOMPOSITION_POLICIES_EXACT_SVD_METHOD_HPP #define MLPACK_METHODS_PCA_DECOMPOSITION_POLICIES_EXACT_SVD_METHOD_HPP #include namespace mlpack { namespace pca { /** * Implementation of the exact SVD policy. */ class ExactSVDPolicy { public: /** * Apply Principal Component Analysis to the provided data set using the * exact SVD method. * * @param data Data matrix. * @param centeredData Centered data matrix. * @param transformedData Matrix to put results of PCA into. * @param eigVal Vector to put eigenvalues into. * @param eigvec Matrix to put eigenvectors (loadings) into. * @param rank Rank of the decomposition. */ void Apply(const arma::mat& data, const arma::mat& centeredData, arma::mat& transformedData, arma::vec& eigVal, arma::mat& eigvec, const size_t /* rank */) { // This matrix will store the right singular values; we do not need them. arma::mat v; // Do singular value decomposition. Use the economical singular value // decomposition if the columns are much larger than the rows. if (data.n_rows < data.n_cols) { // Do economical singular value decomposition and compute only the left // singular vectors. arma::svd_econ(eigvec, eigVal, v, centeredData, 'l'); } else { arma::svd(eigvec, eigVal, v, centeredData); } // Now we must square the singular values to get the eigenvalues. // In addition we must divide by the number of points, because the // covariance matrix is X * X' / (N - 1). eigVal %= eigVal / (data.n_cols - 1); // Project the samples to the principals. transformedData = arma::trans(eigvec) * centeredData; } }; } // namespace pca } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/methods/pca/decomposition_policies/quic_svd_method.hpp000066400000000000000000000060431315013601400276610ustar00rootroot00000000000000/** * @file quic_svd_method.hpp * @author Marcus Edel * * Implementation of the QUIC-SVD policy for use in the Principal Components * Analysis method. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_PCA_DECOMPOSITION_POLICIES_QUIC_SVD_METHOD_HPP #define MLPACK_METHODS_PCA_DECOMPOSITION_POLICIES_QUIC_SVD_METHOD_HPP #include #include namespace mlpack { namespace pca { /** * Implementation of the QUIC-SVD policy. */ class QUICSVDPolicy { public: /** * Use QUIC-SVD method to perform the principal components analysis (PCA). * * @param epsilon Error tolerance fraction for calculated subspace. * @param delta Cumulative probability for Monte Carlo error lower bound. */ QUICSVDPolicy(const double epsilon = 0.03, const double delta = 0.1) : epsilon(epsilon), delta(delta) { /* Nothing to do here */ } /** * Apply Principal Component Analysis to the provided data set using the * QUIC-SVD method. * * @param data Data matrix. * @param centeredData Centered data matrix. * @param transformedData Matrix to put results of PCA into. * @param eigVal Vector to put eigenvalues into. * @param eigvec Matrix to put eigenvectors (loadings) into. * @param rank Rank of the decomposition. */ void Apply(const arma::mat& data, const arma::mat& centeredData, arma::mat& transformedData, arma::vec& eigVal, arma::mat& eigvec, const size_t /* rank */) { // This matrix will store the right singular values; we do not need them. arma::mat v, sigma; // Do singular value decomposition using the QUIC-SVD algorithm. svd::QUIC_SVD quicsvd(centeredData, eigvec, v, sigma, epsilon, delta); // Now we must square the singular values to get the eigenvalues. // In addition we must divide by the number of points, because the // covariance matrix is X * X' / (N - 1). eigVal = arma::pow(arma::diagvec(sigma), 2) / (data.n_cols - 1); // Project the samples to the principals. transformedData = arma::trans(eigvec) * centeredData; } //! Get the error tolerance fraction for calculated subspace. double Epsilon() const { return epsilon; } //! Modify the error tolerance fraction for calculated subspace. double& Epsilon() { return epsilon; } //! Get the cumulative probability for Monte Carlo error lower bound. double Delta() const { return delta; } //! Modify the cumulative probability for Monte Carlo error lower bound. double& Delta() { return delta; } private: //! Error tolerance fraction for calculated subspace. double epsilon; //! Cumulative probability for Monte Carlo error lower bound. double delta; }; } // namespace pca } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/methods/pca/decomposition_policies/randomized_svd_method.hpp000066400000000000000000000064341315013601400310600ustar00rootroot00000000000000/** * @file randomized_svd_method.hpp * @author Marcus Edel * * Implementation of the randomized svd method for use in the Principal * Components Analysis method. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_PCA_DECOMPOSITION_POLICIES_RANDOMIZED_SVD_METHOD_HPP #define MLPACK_METHODS_PCA_DECOMPOSITION_POLICIES_RANDOMIZED_SVD_METHOD_HPP #include #include #include namespace mlpack { namespace pca { /** * Implementation of the randomized SVD policy. */ class RandomizedSVDPolicy { public: /** * Use randomized SVD method to perform the principal components analysis * (PCA). * * @param iteratedPower Size of the normalized power iterations * (Default: rank + 2). * @param maxIterations Number of iterations for the power method * (Default: 2). */ RandomizedSVDPolicy(const size_t iteratedPower = 0, const size_t maxIterations = 2) : iteratedPower(iteratedPower), maxIterations(maxIterations) { /* Nothing to do here */ } /** * Apply Principal Component Analysis to the provided data set using the * randomized SVD. * * @param data Data matrix. * @param centeredData Centered data matrix. * @param transformedData Matrix to put results of PCA into. * @param eigVal Vector to put eigenvalues into. * @param eigvec Matrix to put eigenvectors (loadings) into. * @param rank Rank of the decomposition. */ void Apply(const arma::mat& data, const arma::mat& centeredData, arma::mat& transformedData, arma::vec& eigVal, arma::mat& eigvec, const size_t rank) { // This matrix will store the right singular values; we do not need them. arma::mat v; // Do singular value decomposition using the randomized SVD algorithm. svd::RandomizedSVD rsvd(iteratedPower, maxIterations); rsvd.Apply(data, eigvec, eigVal, v, rank); // Now we must square the singular values to get the eigenvalues. // In addition we must divide by the number of points, because the // covariance matrix is X * X' / (N - 1). eigVal %= eigVal / (data.n_cols - 1); // Project the samples to the principals. transformedData = arma::trans(eigvec) * centeredData; } //! Get the size of the normalized power iterations. size_t IteratedPower() const { return iteratedPower; } //! Modify the size of the normalized power iterations. size_t& IteratedPower() { return iteratedPower; } //! Get the number of iterations for the power method. size_t MaxIterations() const { return maxIterations; } //! Modify the number of iterations for the power method. size_t& MaxIterations() { return maxIterations; } private: //! Locally stored size of the normalized power iterations. size_t iteratedPower; //! Locally stored number of iterations for the power method. size_t maxIterations; }; } // namespace pca } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/methods/pca/pca.hpp000066400000000000000000000127741315013601400205140ustar00rootroot00000000000000/** * @file pca.hpp * @author Ajinkya Kale * @author Ryan Curtin * @author Marcus Edel * * Defines the PCA class to perform Principal Components Analysis on the * specified data set. There are many variations on how to do this, so * template parameters allow the selection of different techniques. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_PCA_PCA_HPP #define MLPACK_METHODS_PCA_PCA_HPP #include #include namespace mlpack { namespace pca { /** * This class implements principal components analysis (PCA). This is a * common, widely-used technique that is often used for either dimensionality * reduction or transforming data into a better basis. Further information on * PCA can be found in almost any statistics or machine learning textbook, and * all over the internet. Note this class will be changed to have the name PCA * in mlpack 3.0.0 */ template class PCAType { public: /** * Create the PCA object, specifying if the data should be scaled in each * dimension by standard deviation when PCA is performed. * * @param scaleData Whether or not to scale the data. */ PCAType(const bool scaleData = false, const DecompositionPolicy& decomposition = DecompositionPolicy()); /** * Apply Principal Component Analysis to the provided data set. It is safe * to pass the same matrix reference for both data and transformedData. * * @param data Data matrix. * @param transformedData Matrix to put results of PCA into. * @param eigval Vector to put eigenvalues into. * @param eigvec Matrix to put eigenvectors (loadings) into. */ void Apply(const arma::mat& data, arma::mat& transformedData, arma::vec& eigVal, arma::mat& eigvec); /** * Apply Principal Component Analysis to the provided data set. It is safe * to pass the same matrix reference for both data and transformedData. * * @param data Data matrix. * @param transformedData Matrix to store results of PCA in. * @param eigVal Vector to put eigenvalues into. */ void Apply(const arma::mat& data, arma::mat& transformedData, arma::vec& eigVal); /** * Use PCA for dimensionality reduction on the given dataset. This will save * the newDimension largest principal components of the data and remove the * rest. The parameter returned is the amount of variance of the data that * is retained; this is a value between 0 and 1. For instance, a value of * 0.9 indicates that 90% of the variance present in the data was retained. * * @param data Data matrix. * @param newDimension New dimension of the data. * @return Amount of the variance of the data retained (between 0 and 1). */ double Apply(arma::mat& data, const size_t newDimension); //! This overload is here to make sure int gets casted right to size_t. inline double Apply(arma::mat& data, const int newDimension) { return Apply(data, size_t(newDimension)); } /** * Use PCA for dimensionality reduction on the given dataset. This will save * as many dimensions as necessary to retain at least the given amount of * variance (specified by parameter varRetained). The amount should be * between 0 and 1; if the amount is 0, then only 1 dimension will be * retained. If the amount is 1, then all dimensions will be retained. * * The method returns the actual amount of variance retained, which will * always be greater than or equal to the varRetained parameter. * * @param data Data matrix. * @param varRetained Lower bound on amount of variance to retain; should be * between 0 and 1. * @return Actual amount of variance retained (between 0 and 1). */ double Apply(arma::mat& data, const double varRetained); //! Get whether or not this PCA object will scale (by standard deviation) //! the data when PCA is performed. bool ScaleData() const { return scaleData; } //! Modify whether or not this PCA object will scale (by standard deviation) //! the data when PCA is performed. bool& ScaleData() { return scaleData; } private: //! Scaling the data is when we reduce the variance of each dimension to 1. void ScaleData(arma::mat& centeredData) { if (scaleData) { // Scaling the data is when we reduce the variance of each dimension // to 1. We do this by dividing each dimension by its standard // deviation. arma::vec stdDev = arma::stddev( centeredData, 0, 1 /* for each dimension */); // If there are any zeroes, make them very small. for (size_t i = 0; i < stdDev.n_elem; ++i) if (stdDev[i] == 0) stdDev[i] = 1e-50; centeredData /= arma::repmat(stdDev, 1, centeredData.n_cols); } } //! Whether or not the data will be scaled by standard deviation when PCA is //! performed. bool scaleData; //! Decomposition method used to perform principal components analysis. DecompositionPolicy decomposition; }; // class PCA //! 3.0.0 TODO: break reverse-compatibility by changing PCAType to PCA. typedef PCAType PCA; } // namespace pca } // namespace mlpack // Include implementation. #include "pca_impl.hpp" #endif mlpack-2.2.5/src/mlpack/methods/pca/pca_impl.hpp000066400000000000000000000133551315013601400215310ustar00rootroot00000000000000/** * @file pca_impl.hpp * @author Ajinkya Kale * @author Ryan Curtin * @author Marcus Edel * * Implementation of PCA class to perform Principal Components Analysis on the * specified data set. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_PCA_PCA_IMPL_HPP #define MLPACK_METHODS_PCA_PCA_IMPL_HPP #include #include #include "pca.hpp" using namespace std; namespace mlpack { namespace pca { template PCAType::PCAType(const bool scaleData, const DecompositionPolicy& decomposition) : scaleData(scaleData), decomposition(decomposition) { } /** * Apply Principal Component Analysis to the provided data set. * * @param data - Data matrix * @param transformedData - Data with PCA applied * @param eigVal - contains eigen values in a column vector * @param eigvec - PCA Loadings/Coeffs/EigenVectors */ template void PCAType::Apply(const arma::mat& data, arma::mat& transformedData, arma::vec& eigVal, arma::mat& eigvec) { Timer::Start("pca"); // Center the data into a temporary matrix. arma::mat centeredData; math::Center(data, centeredData); // Scale the data if the user ask for. ScaleData(centeredData); decomposition.Apply(data, centeredData, transformedData, eigVal, eigvec, data.n_rows); Timer::Stop("pca"); } /** * Apply Principal Component Analysis to the provided data set. * * @param data - Data matrix * @param transformedData - Data with PCA applied * @param eigVal - contains eigen values in a column vector */ template void PCAType::Apply(const arma::mat& data, arma::mat& transformedData, arma::vec& eigVal) { arma::mat eigvec; Apply(data, transformedData, eigVal, eigvec); } /** * Use PCA for dimensionality reduction on the given dataset. This will save * the newDimension largest principal components of the data and remove the * rest. The parameter returned is the amount of variance of the data that is * retained; this is a value between 0 and 1. For instance, a value of 0.9 * indicates that 90% of the variance present in the data was retained. * * @param data Data matrix. * @param newDimension New dimension of the data. * @return Amount of the variance of the data retained (between 0 and 1). */ template double PCAType::Apply(arma::mat& data, const size_t newDimension) { // Parameter validation. if (newDimension == 0) Log::Fatal << "PCA::Apply(): newDimension (" << newDimension << ") cannot " << "be zero!" << endl; if (newDimension > data.n_rows) Log::Fatal << "PCA::Apply(): newDimension (" << newDimension << ") cannot " << "be greater than the existing dimensionality of the data (" << data.n_rows << ")!" << endl; arma::mat eigvec; arma::vec eigVal; Timer::Start("pca"); // Center the data into a temporary matrix. arma::mat centeredData; math::Center(data, centeredData); // Scale the data if the user ask for. ScaleData(centeredData); decomposition.Apply(data, centeredData, data, eigVal, eigvec, newDimension); if (newDimension < eigvec.n_rows) // Drop unnecessary rows. data.shed_rows(newDimension, data.n_rows - 1); // The svd method returns only non-zero eigenvalues so we have to calculate // the right dimension before calculating the amount of variance retained. double eigDim = std::min(newDimension - 1, (size_t) eigVal.n_elem - 1); Timer::Stop("pca"); // Calculate the total amount of variance retained. return (sum(eigVal.subvec(0, eigDim)) / sum(eigVal)); } /** * Use PCA for dimensionality reduction on the given dataset. This will save * as many dimensions as necessary to retain at least the given amount of * variance (specified by parameter varRetained). The amount should be * between 0 and 1; if the amount is 0, then only 1 dimension will be * retained. If the amount is 1, then all dimensions will be retained. * * The method returns the actual amount of variance retained, which will * always be greater than or equal to the varRetained parameter. */ template double PCAType::Apply(arma::mat& data, const double varRetained) { // Parameter validation. if (varRetained < 0) Log::Fatal << "PCA::Apply(): varRetained (" << varRetained << ") must be " << "greater than or equal to 0." << endl; if (varRetained > 1) Log::Fatal << "PCA::Apply(): varRetained (" << varRetained << ") should be " << "less than or equal to 1." << endl; arma::mat eigvec; arma::vec eigVal; Apply(data, data, eigVal, eigvec); // Calculate the dimension we should keep. size_t newDimension = 0; double varSum = 0.0; eigVal /= arma::sum(eigVal); // Normalize eigenvalues. while ((varSum < varRetained) && (newDimension < eigVal.n_elem)) { varSum += eigVal[newDimension]; ++newDimension; } // varSum is the actual variance we will retain. if (newDimension < eigVal.n_elem) data.shed_rows(newDimension, data.n_rows - 1); return varSum; } } // namespace pca } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/methods/pca/pca_main.cpp000066400000000000000000000111721315013601400215020ustar00rootroot00000000000000/** * @file pca_main.cpp * @author Ryan Curtin * @author Marcus Edel * * Main executable to run PCA. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include #include #include #include #include "pca.hpp" #include #include #include using namespace mlpack; using namespace mlpack::pca; using namespace std; // Document program. PROGRAM_INFO("Principal Components Analysis", "This program performs principal " "components analysis on the given dataset using the exact, randomized or " "QUIC SVD method. It will transform the data onto its principal components," " optionally performing dimensionality reduction by ignoring the principal " "components with the smallest eigenvalues."); // Parameters for program. PARAM_STRING_IN_REQ("input_file", "Input dataset to perform PCA on.", "i"); PARAM_STRING_OUT("output_file", "File to save modified dataset to.", "o"); PARAM_INT_IN("new_dimensionality", "Desired dimensionality of output dataset. " "If 0, no dimensionality reduction is performed.", "d", 0); PARAM_DOUBLE_IN("var_to_retain", "Amount of variance to retain; should be " "between 0 and 1. If 1, all variance is retained. Overrides -d.", "r", 0); PARAM_FLAG("scale", "If set, the data will be scaled before running PCA, such " "that the variance of each feature is 1.", "s"); PARAM_STRING_IN("decomposition_method", "Method used for the principal" "components analysis: 'exact', 'randomized', 'quic'.", "c", "exact"); //! Run RunPCA on the specified dataset with the given decomposition method. template void RunPCA(arma::mat& dataset, const size_t newDimension, const size_t scale, const double varToRetain) { PCAType p(scale); Log::Info << "Performing PCA on dataset..." << endl; double varRetained; if (varToRetain != 0) { if (newDimension != 0) Log::Warn << "New dimensionality (-d) ignored because --var_to_retain " << "(-r) was specified." << endl; varRetained = p.Apply(dataset, varToRetain); } else { varRetained = p.Apply(dataset, newDimension); } Log::Info << (varRetained * 100) << "% of variance retained (" << dataset.n_rows << " dimensions)." << endl; } int main(int argc, char** argv) { // Parse commandline. CLI::ParseCommandLine(argc, argv); // Load input dataset. string inputFile = CLI::GetParam("input_file"); arma::mat dataset; data::Load(inputFile, dataset); // Issue a warning if the user did not specify an output file. if (!CLI::HasParam("output_file")) Log::Warn << "--output_file is not specified; no output will be " << "saved." << endl; // Find out what dimension we want. size_t newDimension = dataset.n_rows; // No reduction, by default. if (CLI::GetParam("new_dimensionality") != 0) { // Validate the parameter. newDimension = (size_t) CLI::GetParam("new_dimensionality"); if (newDimension > dataset.n_rows) { Log::Fatal << "New dimensionality (" << newDimension << ") cannot be greater than existing dimensionality (" << dataset.n_rows << ")!" << endl; } } // Get the options for running PCA. const size_t scale = CLI::HasParam("scale"); const double varToRetain = CLI::GetParam("var_to_retain"); const string decompositionMethod = CLI::GetParam( "decomposition_method"); // Perform PCA. if (decompositionMethod == "exact") { RunPCA(dataset, newDimension, scale, varToRetain); } else if (decompositionMethod == "randomized") { RunPCA(dataset, newDimension, scale, varToRetain); } else if (decompositionMethod == "quic") { RunPCA(dataset, newDimension, scale, varToRetain); } else { // Invalid decomposition method. Log::Fatal << "Invalid decomposition method ('" << decompositionMethod << "'); valid choices are 'exact', 'randomized', 'quic'." << endl; } // Now save the results. string outputFile = CLI::GetParam("output_file"); if (outputFile != "") data::Save(outputFile, dataset); } mlpack-2.2.5/src/mlpack/methods/perceptron/000077500000000000000000000000001315013601400206435ustar00rootroot00000000000000mlpack-2.2.5/src/mlpack/methods/perceptron/CMakeLists.txt000066400000000000000000000011371315013601400234050ustar00rootroot00000000000000cmake_minimum_required(VERSION 2.8) # Define the files we need to compile. # Anything not in this list will not be compiled into mlpack. set(SOURCES perceptron.hpp perceptron_impl.hpp ) # Add directory name to sources. set(DIR_SRCS) foreach(file ${SOURCES}) set(DIR_SRCS ${DIR_SRCS} ${CMAKE_CURRENT_SOURCE_DIR}/${file}) endforeach() # Append sources (with directory name) to list of all mlpack sources (used at # the parent scope). set(MLPACK_SRCS ${MLPACK_SRCS} ${DIR_SRCS} PARENT_SCOPE) add_subdirectory(initialization_methods) add_subdirectory(learning_policies) add_cli_executable(perceptron) mlpack-2.2.5/src/mlpack/methods/perceptron/initialization_methods/000077500000000000000000000000001315013601400254155ustar00rootroot00000000000000mlpack-2.2.5/src/mlpack/methods/perceptron/initialization_methods/CMakeLists.txt000066400000000000000000000007061315013601400301600ustar00rootroot00000000000000# Define the files we need to compile # Anything not in this list will not be compiled into mlpack. set(SOURCES random_init.hpp zero_init.hpp ) # Add directory name to sources. set(DIR_SRCS) foreach(file ${SOURCES}) set(DIR_SRCS ${DIR_SRCS} ${CMAKE_CURRENT_SOURCE_DIR}/${file}) endforeach() # Append sources (with directory name) to list of all mlpack sources (used at # the parent scope). set(MLPACK_SRCS ${MLPACK_SRCS} ${DIR_SRCS} PARENT_SCOPE) mlpack-2.2.5/src/mlpack/methods/perceptron/initialization_methods/random_init.hpp000066400000000000000000000022341315013601400304320ustar00rootroot00000000000000/** * @file random_init.hpp * @author Udit Saxena * * Random initialization for perceptron weights. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_PERCEPTRON_INITIALIZATION_METHODS_RANDOM_INIT_HPP #define MLPACK_METHODS_PERCEPTRON_INITIALIZATION_METHODS_RANDOM_INIT_HPP #include namespace mlpack { namespace perceptron { /** * This class is used to initialize weights for the weightVectors matrix in a * random manner. */ class RandomInitialization { public: RandomInitialization() { } inline static void Initialize(arma::mat& weights, arma::vec& biases, const size_t numFeatures, const size_t numClasses) { weights.randu(numFeatures, numClasses); biases.randu(numClasses); } }; // class RandomInitialization } // namespace perceptron } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/methods/perceptron/initialization_methods/zero_init.hpp000066400000000000000000000022051315013601400301270ustar00rootroot00000000000000/** * @file zero_init.hpp * @author Udit Saxena * * Implementation of ZeroInitialization policy for perceptrons. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_PERCEPTRON_INITIALIZATION_METHODS_ZERO_INIT_HPP #define MLPACK_METHODS_PERCEPTRON_INITIALIZATION_METHODS_ZERO_INIT_HPP #include namespace mlpack { namespace perceptron { /** * This class is used to initialize the matrix weightVectors to zero. */ class ZeroInitialization { public: ZeroInitialization() { } inline static void Initialize(arma::mat& weights, arma::vec& biases, const size_t numFeatures, const size_t numClasses) { weights.zeros(numFeatures, numClasses); biases.zeros(numClasses); } }; // class ZeroInitialization } // namespace perceptron } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/methods/perceptron/learning_policies/000077500000000000000000000000001315013601400243315ustar00rootroot00000000000000mlpack-2.2.5/src/mlpack/methods/perceptron/learning_policies/CMakeLists.txt000066400000000000000000000006771315013601400271030ustar00rootroot00000000000000# Define the files we need to compile # Anything not in this list will not be compiled into mlpack. set(SOURCES simple_weight_update.hpp ) # Add directory name to sources. set(DIR_SRCS) foreach(file ${SOURCES}) set(DIR_SRCS ${DIR_SRCS} ${CMAKE_CURRENT_SOURCE_DIR}/${file}) endforeach() # Append sources (with directory name) to list of all mlpack sources (used at # the parent scope). set(MLPACK_SRCS ${MLPACK_SRCS} ${DIR_SRCS} PARENT_SCOPE) mlpack-2.2.5/src/mlpack/methods/perceptron/learning_policies/simple_weight_update.hpp000066400000000000000000000044761315013601400312570ustar00rootroot00000000000000/** * @file simple_weight_update.hpp * @author Udit Saxena * * Simple weight update rule for the perceptron. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef _MLPACK_METHODS_PERCEPTRON_LEARNING_POLICIES_SIMPLE_WEIGHT_UPDATE_HPP #define _MLPACK_METHODS_PERCEPTRON_LEARNING_POLICIES_SIMPLE_WEIGHT_UPDATE_HPP #include /** * This class is used to update the weightVectors matrix according to the simple * update rule as discussed by Rosenblatt: * * if a vector x has been incorrectly classified by a weight w, * then w = w - x * and w'= w'+ x * * where w' is the weight vector which correctly classifies x. */ namespace mlpack { namespace perceptron { class SimpleWeightUpdate { public: /** * This function is called to update the weightVectors matrix. It decreases * the weights of the incorrectly classified class while increasing the weight * of the correct class it should have been classified to. * * @tparam Type of vector (should be an Armadillo vector like arma::vec or * arma::sp_vec or something similar). * @param trainingPoint Point that was misclassified. * @param weights Matrix of weights. * @param biases Vector of biases. * @param incorrectClass Index of class that the point was incorrectly * classified as. * @param correctClass Index of the true class of the point. * @param instanceWeight Weight to be given to this particular point during * training (this is useful for boosting). */ template void UpdateWeights(const VecType& trainingPoint, arma::mat& weights, arma::vec& biases, const size_t incorrectClass, const size_t correctClass, const double instanceWeight = 1.0) { weights.col(incorrectClass) -= instanceWeight * trainingPoint; biases(incorrectClass) -= instanceWeight; weights.col(correctClass) += instanceWeight * trainingPoint; biases(correctClass) += instanceWeight; } }; } // namespace perceptron } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/methods/perceptron/perceptron.hpp000066400000000000000000000133551315013601400235440ustar00rootroot00000000000000/** * @file perceptron.hpp * @author Udit Saxena * * Definition of Perceptron class. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_PERCEPTRON_PERCEPTRON_HPP #define MLPACK_METHODS_PERCEPTRON_PERCEPTRON_HPP #include #include "initialization_methods/zero_init.hpp" #include "initialization_methods/random_init.hpp" #include "learning_policies/simple_weight_update.hpp" namespace mlpack { namespace perceptron { /** * This class implements a simple perceptron (i.e., a single layer neural * network). It converges if the supplied training dataset is linearly * separable. * * @tparam LearnPolicy Options of SimpleWeightUpdate and GradientDescent. * @tparam WeightInitializationPolicy Option of ZeroInitialization and * RandomInitialization. */ template class Perceptron { public: /** * Constructor: create the perceptron with the given number of classes and * initialize the weight matrix, but do not perform any training. (Call the * Train() function to perform training.) * * @param numClasses Number of classes in the dataset. * @param dimensionality Dimensionality of the dataset. * @param maxIterations Maximum number of iterations for the perceptron * learning algorithm. */ Perceptron(const size_t numClasses = 0, const size_t dimensionality = 0, const size_t maxIterations = 1000); /** * Constructor: constructs the perceptron by building the weights matrix, * which is later used in classification. The number of classes should be * specified separately, and the labels vector should contain values in the * range [0, numClasses - 1]. The data::NormalizeLabels() function can be * used if the labels vector does not contain values in the required range. * * @param data Input, training data. * @param labels Labels of dataset. * @param numClasses Number of classes in the dataset. * @param maxIterations Maximum number of iterations for the perceptron * learning algorithm. */ Perceptron(const MatType& data, const arma::Row& labels, const size_t numClasses, const size_t maxIterations = 1000); /** * Alternate constructor which copies parameters from an already initiated * perceptron. * * @param other The other initiated Perceptron object from which we copy the * values from. * @param data The data on which to train this Perceptron object on. * @param D Weight vector to use while training. For boosting purposes. * @param labels The labels of data. */ Perceptron(const Perceptron<>& other, const MatType& data, const arma::Row& labels, const arma::rowvec& instanceWeights); /** * Train the perceptron on the given data for up to the maximum number of * iterations (specified in the constructor or through MaxIterations()). A * single iteration corresponds to a single pass through the data, so if you * want to pass through the dataset only once, set MaxIterations() to 1. * * This training does not reset the model weights, so you can call Train() on * multiple datasets sequentially. * * @param data Dataset on which training should be performed. * @param labels Labels of the dataset. Make sure that these labels don't * contain any values greater than NumClasses()! * @param instanceWeights Cost matrix. Stores the cost of mispredicting * instances. This is useful for boosting. */ void Train(const MatType& data, const arma::Row& labels, const arma::rowvec& instanceWeights = arma::rowvec()); /** * Classification function. After training, use the weights matrix to * classify test, and put the predicted classes in predictedLabels. * * @param test Testing data or data to classify. * @param predictedLabels Vector to store the predicted classes after * classifying test. */ void Classify(const MatType& test, arma::Row& predictedLabels); /** * Serialize the perceptron. */ template void Serialize(Archive& ar, const unsigned int /* version */); //! Get the maximum number of iterations. size_t MaxIterations() const { return maxIterations; } //! Modify the maximum number of iterations. size_t& MaxIterations() { return maxIterations; } //! Get the number of classes this perceptron has been trained for. size_t NumClasses() const { return weights.n_cols; } //! Get the weight matrix. const arma::mat& Weights() const { return weights; } //! Modify the weight matrix. You had better know what you are doing! arma::mat& Weights() { return weights; } //! Get the biases. const arma::vec& Biases() const { return biases; } //! Modify the biases. You had better know what you are doing! arma::vec& Biases() { return biases; } private: //! The maximum number of iterations during training. size_t maxIterations; /** * Stores the weights for each of the input class labels. Each column * corresponds to the weights for one class label, and each row corresponds to * the weights for one dimension of the input data. The biases are held in a * separate vector. */ arma::mat weights; //! The biases for each class. arma::vec biases; }; } // namespace perceptron } // namespace mlpack #include "perceptron_impl.hpp" #endif mlpack-2.2.5/src/mlpack/methods/perceptron/perceptron_impl.hpp000066400000000000000000000143231315013601400245610ustar00rootroot00000000000000/** * @file perceptron_impl.hpp * @author Udit Saxena * * Implementation of Perceptron class. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_PERCEPTRON_PERCEPTRON_IMPL_HPP #define MLPACK_METHODS_PERCEPTRON_PERCEPTRON_IMPL_HPP #include "perceptron.hpp" namespace mlpack { namespace perceptron { /** * Construct the perceptron with the given number of classes and maximum number * of iterations. */ template< typename LearnPolicy, typename WeightInitializationPolicy, typename MatType > Perceptron::Perceptron( const size_t numClasses, const size_t dimensionality, const size_t maxIterations) : maxIterations(maxIterations) { WeightInitializationPolicy wip; wip.Initialize(weights, biases, dimensionality, numClasses); } /** * Constructor - constructs the perceptron. Or rather, builds the weights * matrix, which is later used in classification. It adds a bias input vector * of 1 to the input data to take care of the bias weights. * * @param data Input, training data. * @param labels Labels of dataset. * @param maxIterations Maximum number of iterations for the perceptron learning * algorithm. */ template< typename LearnPolicy, typename WeightInitializationPolicy, typename MatType > Perceptron::Perceptron( const MatType& data, const arma::Row& labels, const size_t numClasses, const size_t maxIterations) : maxIterations(maxIterations) { WeightInitializationPolicy wip; wip.Initialize(weights, biases, data.n_rows, numClasses); // Start training. Train(data, labels); } /** * Alternate constructor which copies parameters from an already initiated * perceptron. * * @param other The other initiated Perceptron object from which we copy the * values from. * @param data The data on which to train this Perceptron object on. * @param instanceWeights Weight vector to use while training. For boosting * purposes. * @param labels The labels of data. */ template< typename LearnPolicy, typename WeightInitializationPolicy, typename MatType > Perceptron::Perceptron( const Perceptron<>& other, const MatType& data, const arma::Row& labels, const arma::rowvec& instanceWeights) : maxIterations(other.maxIterations) { // Insert a row of ones at the top of the training data set. WeightInitializationPolicy wip; wip.Initialize(weights, biases, data.n_rows, other.NumClasses()); Train(data, labels, instanceWeights); } /** * Classification function. After training, use the weights matrix to classify * test, and put the predicted classes in predictedLabels. * * @param test Testing data or data to classify. * @param predictedLabels Vector to store the predicted classes after * classifying test. */ template< typename LearnPolicy, typename WeightInitializationPolicy, typename MatType > void Perceptron::Classify( const MatType& test, arma::Row& predictedLabels) { arma::vec tempLabelMat; arma::uword maxIndex = 0; // Could probably be faster if done in batch. for (size_t i = 0; i < test.n_cols; i++) { tempLabelMat = weights.t() * test.col(i) + biases; tempLabelMat.max(maxIndex); predictedLabels(0, i) = maxIndex; } } /** * Training function. It trains on trainData using the cost matrix * instanceWeights. * * @param data Data to train on. * @param labels Labels of data. * @param instanceWeights Cost matrix. Stores the cost of mispredicting * instances. This is useful for boosting. */ template< typename LearnPolicy, typename WeightInitializationPolicy, typename MatType > void Perceptron::Train( const MatType& data, const arma::Row& labels, const arma::rowvec& instanceWeights) { size_t j, i = 0; bool converged = false; size_t tempLabel; arma::uword maxIndexRow, maxIndexCol; arma::mat tempLabelMat; LearnPolicy LP; const bool hasWeights = (instanceWeights.n_elem > 0); while ((i < maxIterations) && (!converged)) { // This outer loop is for each iteration, and we use the 'converged' // variable for noting whether or not convergence has been reached. i++; converged = true; // Now this inner loop is for going through the dataset in each iteration. for (j = 0; j < data.n_cols; j++) { // Multiply for each variable and check whether the current weight vector // correctly classifies this. tempLabelMat = weights.t() * data.col(j) + biases; tempLabelMat.max(maxIndexRow, maxIndexCol); // Check whether prediction is correct. if (maxIndexRow != labels(0, j)) { // Due to incorrect prediction, convergence set to false. converged = false; tempLabel = labels(0, j); // Send maxIndexRow for knowing which weight to update, send j to know // the value of the vector to update it with. Send tempLabel to know // the correct class. if (hasWeights) LP.UpdateWeights(data.col(j), weights, biases, maxIndexRow, tempLabel, instanceWeights(j)); else LP.UpdateWeights(data.col(j), weights, biases, maxIndexRow, tempLabel); } } } } //! Serialize the perceptron. template template void Perceptron::Serialize( Archive& ar, const unsigned int /* version */) { // We just need to serialize the maximum number of iterations, the weights, // and the biases. ar & data::CreateNVP(maxIterations, "maxIterations"); ar & data::CreateNVP(weights, "weights"); ar & data::CreateNVP(biases, "biases"); } } // namespace perceptron } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/methods/perceptron/perceptron_main.cpp000066400000000000000000000243501315013601400245400ustar00rootroot00000000000000/** * @file perceptron_main.cpp * @author Udit Saxena * * This program runs the Simple Perceptron Classifier. * * Perceptrons are simple single-layer binary classifiers, which solve linearly * separable problems with a linear decision boundary. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include #include #include #include #include #include "perceptron.hpp" using namespace mlpack; using namespace mlpack::perceptron; using namespace std; using namespace arma; PROGRAM_INFO("Perceptron", "This program implements a perceptron, which is a single level neural " "network. The perceptron makes its predictions based on a linear predictor " "function combining a set of weights with the feature vector. The " "perceptron learning rule is able to converge, given enough iterations " "using the --max_iterations (-n) parameter, if the data supplied is " "linearly separable. The perceptron is parameterized by a matrix of weight" " vectors that denote the numerical weights of the neural network." "\n\n" "This program allows loading a perceptron from a model (-m) or training a " "perceptron given training data (-t), or both those things at once. In " "addition, this program allows classification on a test dataset (-T) and " "will save the classification results to the given output file (-o). The " "perceptron model itself may be saved with a file specified using the -M " "option." "\n\n" "The training data given with the -t option should have class labels as its" " last dimension (so, if the training data is in CSV format, labels should " "be the last column). Alternately, the -l (--labels_file) option may be " "used to specify a separate file of labels." "\n\n" "All these options make it easy to train a perceptron, and then re-use that" " perceptron for later classification. The invocation below trains a " "perceptron on 'training_data.csv' (and 'training_labels.csv)' and saves " "the model to 'perceptron.xml'." "\n\n" "$ perceptron -t training_data.csv -l training_labels.csv -M perceptron.xml" "\n\n" "Then, this model can be re-used for classification on 'test_data.csv'. " "The example below does precisely that, saving the predicted classes to " "'predictions.csv'." "\n\n" "$ perceptron -m perceptron.xml -T test_data.csv -o predictions.csv" "\n\n" "Note that all of the options may be specified at once: predictions may be " "calculated right after training a model, and model training can occur even" " if an existing perceptron model is passed with -m (--input_model_file). " "However, note that the number of classes and the dimensionality of all " "data must match. So you cannot pass a perceptron model trained on 2 " "classes and then re-train with a 4-class dataset. Similarly, attempting " "classification on a 3-dimensional dataset with a perceptron that has been " "trained on 8 dimensions will cause an error." ); // Training parameters. PARAM_STRING_IN("training_file", "A file containing the training set.", "t", ""); PARAM_STRING_IN("labels_file", "A file containing labels for the training set.", "l", ""); PARAM_INT_IN("max_iterations","The maximum number of iterations the perceptron " "is to be run", "n", 1000); // Model loading/saving. PARAM_STRING_IN("input_model_file", "File containing input perceptron model.", "m", ""); PARAM_STRING_OUT("output_model_file", "File to save trained perceptron model " "to.", "M"); // Testing/classification parameters. PARAM_STRING_IN("test_file", "A file containing the test set.", "T", ""); PARAM_STRING_OUT("output_file", "The file in which the predicted labels for the" " test set will be written.", "o"); // When we save a model, we must also save the class mappings. So we use this // auxiliary structure to store both the perceptron and the mapping, and we'll // save this. class PerceptronModel { private: Perceptron<>& p; Col& map; public: PerceptronModel(Perceptron<>& p, Col& map) : p(p), map(map) { } template void Serialize(Archive& ar, const unsigned int /* version */) { ar & data::CreateNVP(p, "perceptron"); ar & data::CreateNVP(map, "mappings"); } }; int main(int argc, char** argv) { CLI::ParseCommandLine(argc, argv); // First, get all parameters and validate them. const string trainingDataFile = CLI::GetParam("training_file"); const string labelsFile = CLI::GetParam("labels_file"); const string inputModelFile = CLI::GetParam("input_model_file"); const string testDataFile = CLI::GetParam("test_file"); const string outputModelFile = CLI::GetParam("output_model_file"); const string outputFile = CLI::GetParam("output_file"); const size_t maxIterations = (size_t) CLI::GetParam("max_iterations"); // We must either load a model or train a model. if (!CLI::HasParam("input_model_file") && !CLI::HasParam("training_file")) Log::Fatal << "Either an input model must be specified with " << "--input_model_file or training data must be given " << "(--training_file)!" << endl; // If the user isn't going to save the output model or any predictions, we // should issue a warning. if (!CLI::HasParam("output_model_file") && !CLI::HasParam("test_file")) Log::Warn << "Output will not be saved! (Neither --test_file nor " << "--output_model_file are specified.)" << endl; if (testDataFile == "" && outputFile != "") Log::Warn << "--output_file will be ignored because --test_file is not " << "specified." << endl; if (CLI::HasParam("test_file") && !CLI::HasParam("output_file")) Log::Fatal << "--output_file must be specified with --test_file." << endl; // Now, load our model, if there is one. Perceptron<>* p = NULL; Col mappings; if (CLI::HasParam("input_model_file")) { Log::Info << "Loading saved perceptron from model file '" << inputModelFile << "'." << endl; // The parameters here are invalid, but we are about to load the model // anyway... p = new Perceptron<>(0, 0); PerceptronModel pm(*p, mappings); // Also load class mappings. data::Load(inputModelFile, "perceptron_model", pm, true); } // Next, load the training data and labels (if they have been given). if (CLI::HasParam("training_file")) { Log::Info << "Training perceptron on dataset '" << trainingDataFile; if (labelsFile != "") Log::Info << "' with labels in '" << labelsFile << "'"; else Log::Info << "'"; Log::Info << " for a maximum of " << maxIterations << " iterations." << endl; mat trainingData; data::Load(trainingDataFile, trainingData, true); // Load labels. mat labelsIn; // Did the user pass in labels? if (CLI::HasParam("labels_file")) { // Load labels. const string labelsFile = CLI::GetParam("labels_file"); data::Load(labelsFile, labelsIn, true); } else { // Use the last row of the training data as the labels. Log::Info << "Using the last dimension of training set as labels." << endl; labelsIn = trainingData.row(trainingData.n_rows - 1).t(); trainingData.shed_row(trainingData.n_rows - 1); } // Do the labels need to be transposed? if (labelsIn.n_cols == 1) labelsIn = labelsIn.t(); // Normalize the labels. Row labels; data::NormalizeLabels(labelsIn.row(0), labels, mappings); // Now, if we haven't already created a perceptron, do it. Otherwise, make // sure the dimensions are right, then continue training. if (p == NULL) { // Create and train the classifier. Timer::Start("training"); p = new Perceptron<>(trainingData, labels, max(labels) + 1, maxIterations); Timer::Stop("training"); } else { // Check dimensionality. if (p->Weights().n_rows != trainingData.n_rows) { Log::Fatal << "Perceptron from '" << inputModelFile << "' is built on " << "data with " << p->Weights().n_rows << " dimensions, but data in" << " '" << trainingDataFile << "' has " << trainingData.n_rows << "dimensions!" << endl; } // Check the number of labels. if (max(labels) + 1 > p->Weights().n_cols) { Log::Fatal << "Perceptron from '" << inputModelFile << "' has " << p->Weights().n_cols << " classes, but the training data has " << max(labels) + 1 << " classes!" << endl; } // Now train. Timer::Start("training"); p->MaxIterations() = maxIterations; p->Train(trainingData, labels.t()); Timer::Stop("training"); } } // Now, the training procedure is complete. Do we have any test data? if (CLI::HasParam("test_file")) { Log::Info << "Classifying dataset '" << testDataFile << "'." << endl; mat testData; data::Load(testDataFile, testData, true); if (testData.n_rows != p->Weights().n_rows) { Log::Fatal << "Test data dimensionality (" << testData.n_rows << ") must " << "be the same as the dimensionality of the perceptron (" << p->Weights().n_rows << ")!" << endl; } // Time the running of the perceptron classifier. Row predictedLabels(testData.n_cols); Timer::Start("testing"); p->Classify(testData, predictedLabels); Timer::Stop("testing"); // Un-normalize labels to prepare output. Row results; data::RevertLabels(predictedLabels, mappings, results); // Save the predicted labels. if (outputFile != "") data::Save(outputFile, results, false /* non-fatal */); } // Lastly, do we need to save the output model? if (CLI::HasParam("output_model_file")) { PerceptronModel pm(*p, mappings); data::Save(outputModelFile, "perceptron_model", pm); } // Clean up memory. delete p; } mlpack-2.2.5/src/mlpack/methods/preprocess/000077500000000000000000000000001315013601400206475ustar00rootroot00000000000000mlpack-2.2.5/src/mlpack/methods/preprocess/CMakeLists.txt000066400000000000000000000012151315013601400234060ustar00rootroot00000000000000# Define the files we need to compile. # Anything not in this list will not be compiled into mlpack. set(SOURCES ) # Add directory name to sources. set(DIR_SRCS) foreach(file ${SOURCES}) set(DIR_SRCS ${DIR_SRCS} ${CMAKE_CURRENT_SOURCE_DIR}/${file}) endforeach() # Append sources (with directory name) to list of all mlpack sources (used at # the parent scope). set(MLPACK_SRCS ${MLPACK_SRCS} ${DIR_SRCS} PARENT_SCOPE) #add_cli_executable(preprocess_stats) add_cli_executable(preprocess_split) add_cli_executable(preprocess_binarize) add_cli_executable(preprocess_describe) #add_cli_executable(preprocess_scan) add_cli_executable(preprocess_imputer) mlpack-2.2.5/src/mlpack/methods/preprocess/preprocess_binarize_main.cpp000066400000000000000000000066531315013601400264410ustar00rootroot00000000000000/** * @file preprocess_binarize_main.cpp * @author Keon Kim * * binarize CLI executable * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include #include #include #include #include PROGRAM_INFO("Binarize Data", "This utility takes a dataset and binarizes the " "variables into either 0 or 1 given threshold. User can apply binarization " "on a dimension or the whole dataset. A dimension can be specified using " "--dimension (-d) option. Threshold can also be specified with the " "--threshold (-t) option; The default is 0.0." "\n\n" "The program does not modify the original file, but instead makes a " "separate file to save the binarized data; The program requires you to " "specify the file name with --output_file (-o)." "\n\n" "For example, if we want to make all variables greater than 5 in dataset " "to 1 and ones that are less than or equal to 5.0 to 0, and save the " "result to result.csv, we could run" "\n\n" "$ mlpack_preprocess_binarize -i dataset.csv -t 5 -o result.csv" "\n\n" "But if we want to apply this to only the first (0th) dimension of the " "dataset, we could run" "\n\n" "$ mlpack_preprocess_binarize -i dataset.csv -t 5 -d 0 -o result.csv"); // Define parameters for data. PARAM_STRING_IN_REQ("input_file", "File containing data.", "i"); // Define optional parameters. PARAM_STRING_OUT("output_file", "File to save the output.", "o"); PARAM_INT_IN("dimension", "Dimension to apply the binarization. If not set, the" " program will binarize every dimension by default.", "d", 0); PARAM_DOUBLE_IN("threshold", "Threshold to be applied for binarization. If not " "set, the threshold defaults to 0.0.", "t", 0.0); using namespace mlpack; using namespace arma; using namespace std; int main(int argc, char** argv) { // Parse command line options. CLI::ParseCommandLine(argc, argv); const string inputFile = CLI::GetParam("input_file"); const string outputFile = CLI::GetParam("output_file"); const size_t dimension = (size_t) CLI::GetParam("dimension"); const double threshold = CLI::GetParam("threshold"); // Check on data parameters. if (!CLI::HasParam("dimension")) Log::Warn << "You did not specify --dimension, so the program will perform " << "binarize on every dimensions." << endl; if (!CLI::HasParam("threshold")) Log::Warn << "You did not specify --threshold, so the threshold will be " << "automatically set to '0.0'." << endl; if (!CLI::HasParam("output_file")) Log::Warn << "You did not specify --output_file, so no result will be " << "saved." << endl; // Load the data. arma::mat input; arma::mat output; data::Load(inputFile, input, true); Timer::Start("binarize"); if (CLI::HasParam("dimension")) { data::Binarize(input, output, threshold, dimension); } else { // binarize the whole data data::Binarize(input, output, threshold); } Timer::Stop("binarize"); if (CLI::HasParam("output_file")) data::Save(outputFile, output, false); } mlpack-2.2.5/src/mlpack/methods/preprocess/preprocess_describe_main.cpp000066400000000000000000000175551315013601400264210ustar00rootroot00000000000000/** * @file preprocess_describe_main.cpp * @author Keon Kim * * Descriptive Statistics Class and CLI executable. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include #include #include #include #include #include using namespace mlpack; using namespace mlpack::data; using namespace std; using namespace boost; PROGRAM_INFO("Descriptive Statistics", "This utility takes a dataset and " "prints out the descriptive statistics of the data. Descriptive statistics " "is the discipline of quantitatively describing the main features of a " "collection of information, or the quantitative description itself. The " "program does not modify the original file, but instead prints out the " "statistics to the console. The printed result will look like a table." "\n\n" "Optionally, width and precision of the output can be adjusted by a user " "using the --width (-w) and --precision (-p). A user can also select a " "specific dimension to analyize if he or she has too many dimensions." "--population (-P) is a flag which can be used when the user wants the " "dataset to be considered as a population. Otherwise, the dataset will " "be considered as a sample." "\n\n" "So, a simple example where we want to print out statistical facts about " "dataset.csv, and keep the default settings, we could run" "\n\n" "$ mlpack_preprocess_describe -i dataset.csv -v" "\n\n" "If we want to customize the width to 10 and precision to 5 and consider " "the dataset as a population, we could run" "\n\n" "$ mlpack_preprocess_describe -i dataset.csv -w 10 -p 5 -P -v"); // Define parameters for data. PARAM_STRING_IN_REQ("input_file", "File containing data,", "i"); PARAM_INT_IN("dimension", "Dimension of the data. Use this to specify a " "dimension", "d", 0); PARAM_INT_IN("precision", "Precision of the output statistics.", "p", 4); PARAM_INT_IN("width", "Width of the output table.", "w", 8); PARAM_FLAG("population", "If specified, the program will calculate statistics " "assuming the dataset is the population. By default, the program will " "assume the dataset as a sample.", "P"); PARAM_FLAG("row_major", "If specified, the program will calculate statistics " "across rows, not across columns. (Remember that in mlpack, a column " "represents a point, so this option is generally not necessary.)", "r"); /** * Calculates the sum of deviations to the Nth Power. * * @param input Vector that captures a dimension of a dataset. * @param rowMean Mean of the given vector. * @param n Degree of power. * @return sum of nth power deviations. */ double SumNthPowerDeviations(const arma::rowvec& input, const double& fMean, size_t n) { return arma::sum(arma::pow(input - fMean, static_cast(n))); } /** * Calculates Skewness of the given vector. * * @param input Vector that captures a dimension of a dataset * @param rowStd Standard Deviation of the given vector. * @param rowMean Mean of the given vector. * @return Skewness of the given vector. */ double Skewness(const arma::rowvec& input, const double& fStd, const double& fMean, const bool population) { double skewness = 0; const double S3 = pow(fStd, 3); const double M3 = SumNthPowerDeviations(input, fMean, 3); const double n = input.n_elem; if (population) { // Calculate population skewness skewness = M3 / (n * S3); } else { // Calculate sample skewness. skewness = n * M3 / ((n - 1) * (n - 2) * S3); } return skewness; } /** * Calculates excess kurtosis of the given vector. * * @param input Vector that captures a dimension of a dataset * @param rowStd Standard Deviation of the given vector. * @param rowMean Mean of the given vector. * @return Kurtosis of the given vector. */ double Kurtosis(const arma::rowvec& input, const double& fStd, const double& fMean, const bool population) { double kurtosis = 0; const double M4 = SumNthPowerDeviations(input, fMean, 4); const double n = input.n_elem; if (population) { // Calculate population excess kurtosis. const double M2 = SumNthPowerDeviations(input, fMean, 2); kurtosis = n * (M4 / pow(M2, 2)) - 3; } else { // Calculate sample excess kurtosis. const double S4 = pow(fStd, 4); const double norm3 = (3 * (n - 1) * (n - 1)) / ((n - 2) * (n - 3)); const double normC = (n * (n + 1)) / ((n - 1) * (n - 2) * (n - 3)); const double normM = M4 / S4; kurtosis = normC * normM - norm3; } return kurtosis; } /** * Calculates standard error of standard deviation. * * @param input Vector that captures a dimension of a dataset * @param rowStd Standard Deviation of the given vector. * @return Standard error of the stanrdard devation of the given vector. */ double StandardError(const size_t size, const double& fStd) { return fStd / sqrt(size); } int main(int argc, char** argv) { // Parse command line options. CLI::ParseCommandLine(argc, argv); const string inputFile = CLI::GetParam("input_file"); const size_t dimension = static_cast(CLI::GetParam("dimension")); const size_t precision = static_cast(CLI::GetParam("precision")); const size_t width = static_cast(CLI::GetParam("width")); const bool population = CLI::HasParam("population"); const bool rowMajor = CLI::HasParam("row_major"); // Load the data. arma::mat data; data::Load(inputFile, data); // Generate boost format recipe. const string widthPrecision("%-" + to_string(width) + "." + to_string(precision)); const string widthOnly("%-" + to_string(width) + "."); string stringFormat = ""; string numberFormat = ""; // We are going to print 11 different categories. for (size_t i = 0; i < 11; ++i) { stringFormat += widthOnly + "s"; numberFormat += widthPrecision + "f"; } Timer::Start("statistics"); // Print the headers. Log::Info << boost::format(stringFormat) % "dim" % "var" % "mean" % "std" % "median" % "min" % "max" % "range" % "skew" % "kurt" % "SE" << endl; // Lambda function to print out the results. auto PrintStatResults = [&](size_t dim, bool rowMajor) { arma::rowvec feature; if (rowMajor) feature = arma::conv_to::from(data.col(dim)); else feature = data.row(dim); // f at the front of the variable names means "feature". const double fMax = arma::max(feature); const double fMin = arma::min(feature); const double fMean = arma::mean(feature); const double fStd = arma::stddev(feature, population); // Print statistics of the given dimension. Log::Info << boost::format(numberFormat) % dim % arma::var(feature, population) % fMean % fStd % arma::median(feature) % fMin % fMax % (fMax - fMin) // range % Skewness(feature, fStd, fMean, population) % Kurtosis(feature, fStd, fMean, population) % StandardError(feature.n_elem, fStd) << endl; }; // If the user specified dimension, describe statistics of the given // dimension. If a dimension is not specified, describe all dimensions. if (CLI::HasParam("dimension")) { PrintStatResults(dimension, rowMajor); } else { const size_t dimensions = rowMajor ? data.n_cols : data.n_rows; for (size_t i = 0; i < dimensions; ++i) { PrintStatResults(i, rowMajor); } } Timer::Stop("statistics"); } mlpack-2.2.5/src/mlpack/methods/preprocess/preprocess_imputer_main.cpp000066400000000000000000000166441315013601400263240ustar00rootroot00000000000000/** * @file preprocess_imputer_main.cpp * @author Keon Kim * * a utility that provides imputation strategies fore * missing values. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include #include #include #include #include #include #include #include #include #include #include #include #include #include PROGRAM_INFO("Impute Data", "This utility takes a dataset and converts user " "defined missing variable to another to provide more meaningful analysis " "\n\n" "The program does not modify the original file, but instead makes a " "separate file to save the output data; You can save the output by " "specifying the file name with --output_file (-o)." "\n\n" "For example, if we consider 'NULL' in dimension 0 to be a missing " "variable and want to delete whole row containing the NULL in the " "column-wise dataset, and save the result to result.csv, we could run" "\n\n" "$ mlpack_preprocess_imputer -i dataset.csv -o result.csv -m NULL -d 0 \n" "> -s listwise_deletion"); PARAM_STRING_IN_REQ("input_file", "File containing data,", "i"); PARAM_STRING_OUT("output_file", "File to save output", "o"); PARAM_STRING_IN("missing_value", "User defined missing value", "m", ""); PARAM_STRING_IN("strategy", "imputation strategy to be applied. Strategies " "should be one of 'custom', 'mean', 'median', and 'listwise_deletion'.", "s", ""); PARAM_DOUBLE_IN("custom_value", "user_defined custom value", "c", 0.0); PARAM_INT_IN("dimension", "the dimension to apply imputation", "d", 0); using namespace mlpack; using namespace arma; using namespace std; using namespace data; int main(int argc, char** argv) { CLI::ParseCommandLine(argc, argv); const string inputFile = CLI::GetParam("input_file"); const string outputFile = CLI::GetParam("output_file"); const string missingValue = CLI::GetParam("missing_value"); const double customValue = CLI::GetParam("custom_value"); const size_t dimension = (size_t) CLI::GetParam("dimension"); string strategy = CLI::GetParam("strategy"); // The program needs user-defined missing values. // Missing values can be any list of strings such as "1", "a", "NULL". if (!CLI::HasParam("missing_value")) Log::Fatal << "--missing_value must be specified in order to perform " << "any imputation strategies." << endl; if (!CLI::HasParam("strategy")) Log::Fatal << "--strategy must be specified in order to perform " << "imputation."<< endl; if (!CLI::HasParam("output_file")) Log::Warn << "--output_file is not specified, no " << "results from this program will be saved!" << endl; if (!CLI::HasParam("dimension")) Log::Warn << "--dimension is not specified, the imputation will be " << "applied to all dimensions."<< endl; // If custom value is specified, and imputation strategy is not, // set imputation strategy to "custom" if (CLI::HasParam("custom_value") && !CLI::HasParam("strategy")) { strategy = "custom"; Log::Warn << "--custom_value is specified without --strategy, " << "--strategy is automatically set to 'custom'." << endl; } // Custom value and any other impute strategies cannot be specified at // the same time. if (CLI::HasParam("custom_value") && CLI::HasParam("strategy") && strategy != "custom") Log::Fatal << "--custom_value cannot be specified with " << "impute strategies excluding 'custom' strategy" << endl; // custom_value must be specified when using "custom" imputation strategy if ((strategy == "custom") && !CLI::HasParam("custom_value")) Log::Fatal << "--custom_value must be specified when using " << "'custom' strategy" << endl; arma::mat input; // Policy tells how the DatasetMapper should map the values. std::set missingSet; missingSet.insert(missingValue); MissingPolicy policy(missingSet); using MapperType = DatasetMapper; DatasetMapper info(policy); Load(inputFile, input, info, true, true); // print how many mapping exist in each dimensions std::vector dirtyDimensions; for (size_t i = 0; i < input.n_rows; ++i) { size_t numMappings = info.NumMappings(i); if (numMappings > 0) { Log::Info << "Replacing " << numMappings << " values in dimension " << i << "." << endl; dirtyDimensions.push_back(i); } } if (dirtyDimensions.size() == 0) { Log::Warn << "The file does not contain any user-defined missing " << "variables. The program did not perform any imputation." << endl; } else if (CLI::HasParam("dimension") && !(std::find(dirtyDimensions.begin(), dirtyDimensions.end(), dimension) != dirtyDimensions.end())) { Log::Warn << "The given dimension of the file does not contain any " << "user-defined missing variables. The program did not perform any " << "imputation." << endl; } else { // Initialize imputer class Imputer> imputer(info); if (strategy == "mean") { Imputer> imputer(info); } else if (strategy == "median") { Imputer> imputer(info); } else if (strategy == "listwise_deletion") { Imputer> imputer(info); } else if (strategy == "custom") { CustomImputation strat(customValue); Imputer> imputer(info, strat); } else { Log::Fatal << "'" << strategy << "' imputation strategy does not exist" << endl; } Timer::Start("imputation"); if (CLI::HasParam("dimension")) { // when --dimension is specified, // the program will apply the changes to only the given dimension. Log::Info << "Performing '" << strategy << "' imputation strategy " << "to replace '" << missingValue << "' on dimension " << dimension << "." << endl; imputer.Impute(input, missingValue, dimension); } else { // when --dimension is not specified, // the program will apply the changes to all dimensions. Log::Info << "Performing '" << strategy << "' imputation strategy " << "to replace '" << missingValue << "' on all dimensions." << endl; for (size_t i : dirtyDimensions) { imputer.Impute(input, missingValue, i); } } Timer::Stop("imputation"); if (!outputFile.empty()) { Log::Info << "Saving results to '" << outputFile << "'." << endl; Save(outputFile, input, false); } } } mlpack-2.2.5/src/mlpack/methods/preprocess/preprocess_split_main.cpp000066400000000000000000000152451315013601400257660ustar00rootroot00000000000000/** * @file preprocess_split_main.cpp * @author Keon Kim * * split data CLI executable * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include #include #include #include #include #include #include PROGRAM_INFO("Split Data", "This utility takes a dataset and optionally labels " "and splits them into a training set and a test set. Before the split, the " "points in the dataset are randomly reordered. The percentage of the " "dataset to be used as the test set can be specified with the --test_ratio " "(-r) option; the default is 0.2 (20%)." "\n\n" "The program does not modify the original file, but instead makes separate " "files to save the training and test files; The program requires you to " "specify the file names with --training_file (-t) and --test_file (-T)." "\n\n" "Optionally, labels can be also be split along with the data by specifying " "the --input_labels_file (-I) option. Splitting labels works the same way " "as splitting the data. The output training and test labels will be saved " "to the files specified by --training_labels_file (-l) and " "--test_labels_file (-L), respectively." "\n\n" "So, a simple example where we want to split dataset.csv into " "train.csv and test.csv with 60% of the data in the training set and 40% " "of the dataset in the test set, we could run" "\n\n" "$ mlpack_preprocess_split -i dataset.csv -t train.csv -T test.csv -r 0.4" "\n\n" "If we had a dataset in dataset.csv and associated labels in labels.csv, " "and we wanted to split these into training_set.csv, training_labels.csv, " "test_set.csv, and test_labels.csv, with 30% of the data in the test set, " "we could run" "\n\n" "$ mlpack_preprocess_split -i dataset.csv -I labels.csv -r 0.3\n" "> -t training_set.csv -l training_labels.csv -T test_set.csv\n" "> -L test_labels.csv"); // Define parameters for data. PARAM_STRING_IN_REQ("input_file", "File containing data,", "i"); PARAM_STRING_OUT("training_file", "File name to save train data", "t"); PARAM_STRING_OUT("test_file", "File name to save test data", "T"); // Define optional parameters. PARAM_STRING_IN("input_labels_file", "File containing labels", "I", ""); PARAM_STRING_OUT("training_labels_file", "File name to save train label", "l"); PARAM_STRING_OUT("test_labels_file", "File name to save test label", "L"); // Define optional test ratio, default is 0.2 (Test 20% Train 80%). PARAM_DOUBLE_IN("test_ratio", "Ratio of test set; if not set," "the ratio defaults to 0.2", "r", 0.2); PARAM_INT_IN("seed", "Random seed (0 for std::time(NULL)).", "s", 0); using namespace mlpack; using namespace arma; using namespace std; int main(int argc, char** argv) { // Parse command line options. CLI::ParseCommandLine(argc, argv); const string inputFile = CLI::GetParam("input_file"); const string inputLabels = CLI::GetParam("input_labels_file"); const string trainingFile = CLI::GetParam("training_file"); const string testFile = CLI::GetParam("test_file"); const string trainingLabelsFile = CLI::GetParam("training_labels_file"); const string testLabelsFile = CLI::GetParam("test_labels_file"); const double testRatio = CLI::GetParam("test_ratio"); if (CLI::GetParam("seed") == 0) mlpack::math::RandomSeed(std::time(NULL)); else mlpack::math::RandomSeed((size_t) CLI::GetParam("seed")); // Make sure the user specified output filenames. if (trainingFile == "") Log::Warn << "--training_file (-t) is not specified; no training set will " << "be saved!" << endl; if (testFile == "") Log::Warn << "--test_file (-T) is not specified; no test set will be saved!" << endl; // Check on label parameters. if (CLI::HasParam("input_labels_file")) { if (!CLI::HasParam("training_labels_file")) { Log::Warn << "--training_labels_file (-l) is not specified; no training " << "set labels will be saved!" << endl; } if (!CLI::HasParam("test_labels_file")) { Log::Warn << "--test_labels_file (-L) is not specified; no test set " << "labels will be saved!" << endl; } } else { if (CLI::HasParam("training_labels_file")) Log::Warn << "--training_labels_file ignored because --input_labels is " << "not specified." << endl; if (CLI::HasParam("test_labels_file")) Log::Warn << "--test_labels_file ignored because --input_labels is not " << "specified." << endl; } // Check test_ratio. if (CLI::HasParam("test_ratio")) { if ((testRatio < 0.0) || (testRatio > 1.0)) { Log::Fatal << "Invalid parameter for test_ratio; " << "--test_ratio must be between 0.0 and 1.0." << endl; } } else // If test_ratio is not set, warn the user. { Log::Warn << "You did not specify --test_ratio, so it will be automatically" << " set to 0.2." << endl; } // Load the data. arma::mat data; data::Load(inputFile, data, true); // If parameters for labels exist, we must split the labels too. if (CLI::HasParam("input_labels_file")) { arma::mat labels; data::Load(inputLabels, labels, true); arma::rowvec labelsRow = labels.row(0); const auto value = data::Split(data, labelsRow, testRatio); Log::Info << "Training data contains " << get<0>(value).n_cols << " points." << endl; Log::Info << "Test data contains " << get<1>(value).n_cols << " points." << endl; if (trainingFile != "") data::Save(trainingFile, get<0>(value), false); if (testFile != "") data::Save(testFile, get<1>(value), false); if (trainingLabelsFile != "") data::Save(trainingLabelsFile, get<2>(value), false); if (testLabelsFile != "") data::Save(testLabelsFile, get<3>(value), false); } else // We have no labels, so just split the dataset. { const auto value = data::Split(data, testRatio); Log::Info << "Training data contains " << get<0>(value).n_cols << " points." << endl; Log::Info << "Test data contains " << get<1>(value).n_cols << " points." << endl; if (trainingFile != "") data::Save(trainingFile, get<0>(value), false); if (testFile != "") data::Save(testFile, get<1>(value), false); } } mlpack-2.2.5/src/mlpack/methods/quic_svd/000077500000000000000000000000001315013601400202775ustar00rootroot00000000000000mlpack-2.2.5/src/mlpack/methods/quic_svd/CMakeLists.txt000066400000000000000000000007031315013601400230370ustar00rootroot00000000000000# Define the files we need to compile. # Anything not in this list will not be compiled into mlpack. set(SOURCES quic_svd.hpp quic_svd.cpp ) # Add directory name to sources. set(DIR_SRCS) foreach(file ${SOURCES}) set(DIR_SRCS ${DIR_SRCS} ${CMAKE_CURRENT_SOURCE_DIR}/${file}) endforeach() # Append sources (with directory name) to list of all mlpack sources (used at # the parent scope). set(MLPACK_SRCS ${MLPACK_SRCS} ${DIR_SRCS} PARENT_SCOPE) mlpack-2.2.5/src/mlpack/methods/quic_svd/quic_svd.cpp000066400000000000000000000045751315013601400226330ustar00rootroot00000000000000/** * @file quic_svd.cpp * @author Siddharth Agrawal * * An implementation of QUIC-SVD. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ // In case it hasn't been included yet. #include "quic_svd.hpp" using namespace mlpack::tree; namespace mlpack { namespace svd { QUIC_SVD::QUIC_SVD(const arma::mat& dataset, arma::mat& u, arma::mat& v, arma::mat& sigma, const double epsilon, const double delta) : dataset(dataset) { // Since columns are sample in the implementation, the matrix is transposed if // necessary for maximum speedup. CosineTree* ctree; if (dataset.n_cols > dataset.n_rows) ctree = new CosineTree(dataset, epsilon, delta); else ctree = new CosineTree(dataset.t(), epsilon, delta); // Get subspace basis by creating the cosine tree. ctree->GetFinalBasis(basis); // Delete cosine tree. delete ctree; // Use the ExtractSVD algorithm mentioned in the paper to extract the SVD of // the original dataset in the obtained subspace. ExtractSVD(u, v, sigma); } void QUIC_SVD::ExtractSVD(arma::mat& u, arma::mat& v, arma::mat& sigma) { // Calculate A * V_hat, necessary for further calculations. arma::mat projectedMat; if (dataset.n_cols > dataset.n_rows) projectedMat = dataset.t() * basis; else projectedMat = dataset * basis; // Calculate the squared projected matrix. arma::mat projectedMatSquared = projectedMat.t() * projectedMat; // Calculate the SVD of the above matrix. arma::mat uBar, vBar; arma::vec sigmaBar; arma::svd(uBar, sigmaBar, vBar, projectedMatSquared); // Calculate the approximate SVD of the original matrix, using the SVD of the // squared projected matrix. v = basis * vBar; sigma = arma::sqrt(diagmat(sigmaBar)); u = projectedMat * vBar * sigma.i(); // Since columns are sampled, the unitary matrices have to be exchanged, if // the transposed matrix is not passed. if (dataset.n_cols > dataset.n_rows) { arma::mat tempMat = u; u = v; v = tempMat; } } } // namespace svd } // namespace mlpack mlpack-2.2.5/src/mlpack/methods/quic_svd/quic_svd.hpp000066400000000000000000000070751315013601400226360ustar00rootroot00000000000000/** * @file quic_svd.hpp * @author Siddharth Agrawal * * An implementation of QUIC-SVD. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_QUIC_SVD_QUIC_SVD_HPP #define MLPACK_METHODS_QUIC_SVD_QUIC_SVD_HPP #include #include namespace mlpack { namespace svd { /** * QUIC-SVD is a matrix factorization technique, which operates in a subspace * such that A's approximation in that subspace has minimum error(A being the * data matrix). The subspace is constructed using a cosine tree, which ensures * minimum representative rank(and thus a fast running time). It follows a * splitting policy based on Length-squared(LS) sampling and constructs the * child nodes based on the absolute cosines of the remaining points relative to * the pivot. The centroids of the points in the child nodes are added to the * subspace span in each step. Each node is then placed into a queue prioritized * by its residual error. The subspace approximation error of A after each step * is calculated using a Monte Carlo estimate. If the error is below a certain * threshold, the method proceeds to calculate the Singular Value Decomposition * in the obtained subspace. Otherwise, the same procedure is repeated until we * obtain a subspace of sufficiently low error. Technical details can be found * in the following paper: * * http://www.cc.gatech.edu/~isbell/papers/isbell-quicsvd-nips-2008.pdf * * An example of how to use the interface is shown below: * * @code * arma::mat data; // Data matrix. * * const double epsilon = 0.01; // Relative error limit of data in subspace. * const double delta = 0.1 // Lower error bound for Monte Carlo estimate. * * arma::mat u, v, sigma; // Matrices for the factors. data = u * sigma * v.t() * * // Get the factorization in the constructor. * QUIC_SVD(data, u, v, sigma, epsilon, delta); * @endcode */ class QUIC_SVD { public: /** * Constructor which implements the QUIC-SVD algorithm. The function calls the * CosineTree constructor to create a subspace basis, where the original * matrix's projection has minimum reconstruction error. The constructor then * uses the ExtractSVD() function to calculate the SVD of the original dataset * in that subspace. * * @param dataset Matrix for which SVD is calculated. * @param u First unitary matrix. * @param v Second unitary matrix. * @param sigma Diagonal matrix of singular values. * @param epsilon Error tolerance fraction for calculated subspace. * @param delta Cumulative probability for Monte Carlo error lower bound. */ QUIC_SVD(const arma::mat& dataset, arma::mat& u, arma::mat& v, arma::mat& sigma, const double epsilon = 0.03, const double delta = 0.1); /** * This function uses the vector subspace created using a cosine tree to * calculate an approximate SVD of the original matrix. * * @param u First unitary matrix. * @param v Second unitary matrix. * @param sigma Diagonal matrix of singular values. */ void ExtractSVD(arma::mat& u, arma::mat& v, arma::mat& sigma); private: //! Matrix for which cosine tree is constructed. const arma::mat& dataset; //! Subspace basis of the input dataset. arma::mat basis; }; } // namespace svd } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/methods/radical/000077500000000000000000000000001315013601400200615ustar00rootroot00000000000000mlpack-2.2.5/src/mlpack/methods/radical/CMakeLists.txt000066400000000000000000000007461315013601400226300ustar00rootroot00000000000000# Define the files we need to compile # Anything not in this list will not be compiled into the output library set(SOURCES radical.hpp radical.cpp ) # add directory name to sources set(DIR_SRCS) foreach(file ${SOURCES}) set(DIR_SRCS ${DIR_SRCS} ${CMAKE_CURRENT_SOURCE_DIR}/${file}) endforeach() # append sources (with directory name) to list of all mlpack sources (used at the parent scope) set(MLPACK_SRCS ${MLPACK_SRCS} ${DIR_SRCS} PARENT_SCOPE) add_cli_executable(radical) mlpack-2.2.5/src/mlpack/methods/radical/radical.cpp000066400000000000000000000124261315013601400221710ustar00rootroot00000000000000/** * @file radical.cpp * @author Nishant Mehta * * Implementation of Radical class * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include "radical.hpp" #include #include using namespace std; using namespace arma; using namespace mlpack; using namespace mlpack::radical; // Set the parameters to RADICAL. Radical::Radical(const double noiseStdDev, const size_t replicates, const size_t angles, const size_t sweeps, const size_t m) : noiseStdDev(noiseStdDev), replicates(replicates), angles(angles), sweeps(sweeps), m(m) { // Nothing to do here. } void Radical::CopyAndPerturb(mat& xNew, const mat& x) const { Timer::Start("radical_copy_and_perturb"); xNew = repmat(x, replicates, 1) + noiseStdDev * randn(replicates * x.n_rows, x.n_cols); Timer::Stop("radical_copy_and_perturb"); } double Radical::Vasicek(vec& z) const { z = sort(z); // Apparently slower. /* vec logs = log(z.subvec(m, z.n_elem - 1) - z.subvec(0, z.n_elem - 1 - m)); //vec val = sum(log(z.subvec(m, nPoints - 1) - z.subvec(0, nPoints - 1 - m))); return (double) sum(logs); */ // Apparently faster. double sum = 0; uword range = z.n_elem - m; for (uword i = 0; i < range; i++) { sum += log(z(i + m) - z(i)); } return sum; } double Radical::DoRadical2D(const mat& matX) { CopyAndPerturb(perturbed, matX); mat::fixed<2, 2> matJacobi; vec values(angles); for (size_t i = 0; i < angles; i++) { const double theta = (i / (double) angles) * M_PI / 2.0; const double cosTheta = cos(theta); const double sinTheta = sin(theta); matJacobi(0, 0) = cosTheta; matJacobi(1, 0) = -sinTheta; matJacobi(0, 1) = sinTheta; matJacobi(1, 1) = cosTheta; candidate = perturbed * matJacobi; vec candidateY1 = candidate.unsafe_col(0); vec candidateY2 = candidate.unsafe_col(1); values(i) = Vasicek(candidateY1) + Vasicek(candidateY2); } uword indOpt = 0; values.min(indOpt); // we ignore the return value; we don't care about it return (indOpt / (double) angles) * M_PI / 2.0; } void Radical::DoRadical(const mat& matXT, mat& matY, mat& matW) { // matX is nPoints by nDims (although less intuitive than columns being // points, and although this is the transpose of the ICA literature, this // choice is for computational efficiency when repeatedly generating // two-dimensional coordinate projections for Radical2D). Timer::Start("radical_transpose_data"); mat matX = trans(matXT); Timer::Stop("radical_transpose_data"); // If m was not specified, initialize m as recommended in // (Learned-Miller and Fisher, 2003). if (m < 1) m = floor(sqrt((double) matX.n_rows)); const size_t nDims = matX.n_cols; const size_t nPoints = matX.n_rows; Timer::Start("radical_whiten_data"); mat matXWhitened; mat matWhitening; WhitenFeatureMajorMatrix(matX, matY, matWhitening); Timer::Stop("radical_whiten_data"); // matY is now the whitened form of matX. // In the RADICAL code, they do not copy and perturb initially, although the // paper does. We follow the code as it should match their reported results // and likely does a better job bouncing out of local optima. //GeneratePerturbedX(X, X); // Initialize the unmixing matrix to the whitening matrix. Timer::Start("radical_do_radical"); matW = matWhitening; mat matYSubspace(nPoints, 2); mat matJ = eye(nDims, nDims); for (size_t sweepNum = 0; sweepNum < sweeps; sweepNum++) { Log::Info << "RADICAL: sweep " << sweepNum << "." << std::endl; for (size_t i = 0; i < nDims - 1; i++) { for (size_t j = i + 1; j < nDims; j++) { Log::Debug << "RADICAL 2D on dimensions " << i << " and " << j << "." << std::endl; matYSubspace.col(0) = matY.col(i); matYSubspace.col(1) = matY.col(j); const double thetaOpt = DoRadical2D(matYSubspace); const double cosThetaOpt = cos(thetaOpt); const double sinThetaOpt = sin(thetaOpt); // Set elements of transformation matrix. matJ(i, i) = cosThetaOpt; matJ(j, i) = -sinThetaOpt; matJ(i, j) = sinThetaOpt; matJ(j, j) = cosThetaOpt; matY *= matJ; // Unset elements of transformation matrix, so matJ = eye() again. matJ(i, i) = 1; matJ(j, i) = 0; matJ(i, j) = 0; matJ(j, j) = 1; } } } Timer::Stop("radical_do_radical"); // The final transposes provide W and Y in the typical form from the ICA // literature. Timer::Start("radical_transpose_data"); matW = trans(matW); matY = trans(matY); Timer::Stop("radical_transpose_data"); } void mlpack::radical::WhitenFeatureMajorMatrix(const mat& matX, mat& matXWhitened, mat& matWhitening) { mat matU, matV; vec s; svd(matU, s, matV, cov(matX)); matWhitening = matU * diagmat(1 / sqrt(s)) * trans(matV); matXWhitened = matX * matWhitening; } mlpack-2.2.5/src/mlpack/methods/radical/radical.hpp000066400000000000000000000114431315013601400221740ustar00rootroot00000000000000/** * @file radical.hpp * @author Nishant Mehta * * Declaration of Radical class (RADICAL is Robust, Accurate, Direct ICA * aLgorithm). * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_RADICAL_RADICAL_HPP #define MLPACK_METHODS_RADICAL_RADICAL_HPP #include namespace mlpack { namespace radical { /** * An implementation of RADICAL, an algorithm for independent component * analysis (ICA). * * Let X be a matrix where each column is a point and each row a dimension. * The goal is to find a square unmixing matrix W such that Y = W X and * the rows of Y are independent components. * * For more details, see the following paper: * * @code * @article{learned2003ica, * title = {ICA Using Spacings Estimates of Entropy}, * author = {Learned-Miller, E.G. and Fisher III, J.W.}, * journal = {Journal of Machine Learning Research}, * volume = {4}, * pages = {1271--1295}, * year = {2003} * } * @endcode */ class Radical { public: /** * Set the parameters to RADICAL. * * @param noiseStdDev Standard deviation of the Gaussian noise added to the * replicates of the data points during Radical2D * @param replicates Number of Gaussian-perturbed replicates to use (per * point) in Radical2D * @param angles Number of angles to consider in brute-force search during * Radical2D * @param sweeps Number of sweeps. Each sweep calls Radical2D once for each * pair of dimensions * @param m The variable m from Vasicek's m-spacing estimator of entropy. */ Radical(const double noiseStdDev = 0.175, const size_t replicates = 30, const size_t angles = 150, const size_t sweeps = 0, const size_t m = 0); /** * Run RADICAL. * * @param matX Input data into the algorithm - a matrix where each column is * a point and each row is a dimension. * @param matY Estimated independent components - a matrix where each column * is a point and each row is an estimated independent component. * @param matW Estimated unmixing matrix, where matY = matW * matX. */ void DoRadical(const arma::mat& matX, arma::mat& matY, arma::mat& matW); /** * Vasicek's m-spacing estimator of entropy, with overlap modification from * (Learned-Miller and Fisher, 2003). * * @param x Empirical sample (one-dimensional) over which to estimate entropy. */ double Vasicek(arma::vec& x) const; /** * Make replicates of each data point (the number of replicates is set in * either the constructor or with Replicates()) and perturb data with Gaussian * noise with standard deviation noiseStdDev. */ void CopyAndPerturb(arma::mat& xNew, const arma::mat& x) const; //! Two-dimensional version of RADICAL. double DoRadical2D(const arma::mat& matX); //! Get the standard deviation of the additive Gaussian noise. double NoiseStdDev() const { return noiseStdDev; } //! Modify the standard deviation of the additive Gaussian noise. double& NoiseStdDev() { return noiseStdDev; } //! Get the number of Gaussian-perturbed replicates used per point. size_t Replicates() const { return replicates; } //! Modify the number of Gaussian-perturbed replicates used per point. size_t& Replicates() { return replicates; } //! Get the number of angles considered during brute-force search. size_t Angles() const { return angles; } //! Modify the number of angles considered during brute-force search. size_t& Angles() { return angles; } //! Get the number of sweeps. size_t Sweeps() const { return sweeps; } //! Modify the number of sweeps. size_t& Sweeps() { return sweeps; } private: //! Standard deviation of the Gaussian noise added to the replicates of //! the data points during Radical2D. double noiseStdDev; //! Number of Gaussian-perturbed replicates to use (per point) in Radical2D. size_t replicates; //! Number of angles to consider in brute-force search during Radical2D. size_t angles; //! Number of sweeps; each sweep calls Radical2D once for each pair of //! dimensions. size_t sweeps; //! Value of m to use for Vasicek's m-spacing estimator of entropy. size_t m; //! Internal matrix, held as member variable to prevent memory reallocations. arma::mat perturbed; //! Internal matrix, held as member variable to prevent memory reallocations. arma::mat candidate; }; void WhitenFeatureMajorMatrix(const arma::mat& matX, arma::mat& matXWhitened, arma::mat& matWhitening); } // namespace radical } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/methods/radical/radical_main.cpp000066400000000000000000000120771315013601400231770ustar00rootroot00000000000000/** * @file radical_main.cpp * @author Nishant Mehta * * Executable for RADICAL. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include #include #include #include #include #include "radical.hpp" PROGRAM_INFO("RADICAL", "An implementation of RADICAL, a method for independent" "component analysis (ICA). Assuming that we have an input matrix X, the" "goal is to find a square unmixing matrix W such that Y = W * X and the " "dimensions of Y are independent components. If the algorithm is running" "particularly slowly, try reducing the number of replicates."); PARAM_STRING_IN_REQ("input_file", "Input dataset filename for ICA.", "i"); // Kept for reverse compatibility until mlpack 3.0.0. PARAM_STRING_OUT("output_ic", "File to save independent components to " "(deprecated: use --output_ic_file).", ""); PARAM_STRING_OUT("output_unmixing", "File to save unmixing matrix to " "(deprecated: use --output_unmixing_file).", ""); // These are the new parameter names. PARAM_STRING_OUT("output_ic_file", "File to save independent components to.", "o"); PARAM_STRING_OUT("output_unmixing_file", "File to save unmixing matrix to.", "u"); PARAM_DOUBLE_IN("noise_std_dev", "Standard deviation of Gaussian noise.", "n", 0.175); PARAM_INT_IN("replicates", "Number of Gaussian-perturbed replicates to use " "(per point) in Radical2D.", "r", 30); PARAM_INT_IN("angles", "Number of angles to consider in brute-force search " "during Radical2D.", "a", 150); PARAM_INT_IN("sweeps", "Number of sweeps; each sweep calls Radical2D once for " "each pair of dimensions.", "S", 0); PARAM_INT_IN("seed", "Random seed. If 0, 'std::time(NULL)' is used.", "s", 0); PARAM_FLAG("objective", "If set, an estimate of the final objective function " "is printed.", "O"); using namespace mlpack; using namespace mlpack::radical; using namespace mlpack::math; using namespace std; using namespace arma; int main(int argc, char* argv[]) { // Handle parameters. CLI::ParseCommandLine(argc, argv); // Reverse compatibility. We can remove these for mlpack 3.0.0. if (CLI::HasParam("output_ic") && CLI::HasParam("output_ic_file")) Log::Fatal << "Cannot specify both --output_ic and --output_ic_file!" << endl; if (CLI::HasParam("output_unmixing") && CLI::HasParam("output_unmixing_file")) Log::Fatal << "Cannot specify both --output_unmixing and " << "--output_unmixing_file!" << endl; if (CLI::HasParam("output_ic")) { Log::Warn << "--output_ic is deprecated and will be removed in mlpack " << "3.0.0; use --output_ic_file instead." << endl; CLI::GetParam("output_ic_file") = CLI::GetParam("output_ic"); } if (CLI::HasParam("output_unmixing")) { Log::Warn << "--output_unmixing is deprecated and will be removed in mlpack" << " 3.0.0; use --output_unmixing_file instead." << endl; CLI::GetParam("output_unmixing_file") = CLI::GetParam("output_unmixing"); } // Set random seed. if (CLI::GetParam("seed") != 0) RandomSeed((size_t) CLI::GetParam("seed")); else RandomSeed((size_t) std::time(NULL)); if ((CLI::GetParam("output_ic_file") == "") && (CLI::GetParam("output_unmixing_file") == "")) Log::Warn << "Neither --output_ic_file nor --output_unmixing_file were " << "specified; no output will be saved!" << endl; // Load the data. const string matXFilename = CLI::GetParam("input_file"); mat matX; data::Load(matXFilename, matX); // Load parameters. double noiseStdDev = CLI::GetParam("noise_std_dev"); size_t nReplicates = CLI::GetParam("replicates"); size_t nAngles = CLI::GetParam("angles"); size_t nSweeps = CLI::GetParam("sweeps"); if (nSweeps == 0) { nSweeps = matX.n_rows - 1; } // Run RADICAL. Radical rad(noiseStdDev, nReplicates, nAngles, nSweeps); mat matY; mat matW; rad.DoRadical(matX, matY, matW); // Save results. const string matYFilename = CLI::GetParam("output_ic_file"); if (matYFilename != "") data::Save(matYFilename, matY); const string matWFilename = CLI::GetParam("output_unmixing_file"); if (matWFilename != "") data::Save(matWFilename, matW); if (CLI::HasParam("objective")) { // Compute and print objective. mat matYT = trans(matY); double valEst = 0; for (size_t i = 0; i < matYT.n_cols; i++) { vec y = vec(matYT.col(i)); valEst += rad.Vasicek(y); } // Force output even if --verbose is not given. const bool ignoring = Log::Info.ignoreInput; Log::Info.ignoreInput = false; Log::Info << "Objective (estimate): " << valEst << "." << endl; Log::Info.ignoreInput = ignoring; } } mlpack-2.2.5/src/mlpack/methods/randomized_svd/000077500000000000000000000000001315013601400214725ustar00rootroot00000000000000mlpack-2.2.5/src/mlpack/methods/randomized_svd/CMakeLists.txt000066400000000000000000000007171315013601400242370ustar00rootroot00000000000000# Define the files we need to compile. # Anything not in this list will not be compiled into mlpack. set(SOURCES randomized_svd.hpp randomized_svd.cpp ) # Add directory name to sources. set(DIR_SRCS) foreach(file ${SOURCES}) set(DIR_SRCS ${DIR_SRCS} ${CMAKE_CURRENT_SOURCE_DIR}/${file}) endforeach() # Append sources (with directory name) to list of all mlpack sources (used at # the parent scope). set(MLPACK_SRCS ${MLPACK_SRCS} ${DIR_SRCS} PARENT_SCOPE) mlpack-2.2.5/src/mlpack/methods/randomized_svd/randomized_svd.cpp000066400000000000000000000072401315013601400252110ustar00rootroot00000000000000/** * @file randomized_svd.cpp * @author Marcus Edel * * Implementation of the randomized SVD method. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include "randomized_svd.hpp" namespace mlpack { namespace svd { RandomizedSVD::RandomizedSVD(const arma::mat& data, arma::mat& u, arma::vec& s, arma::mat& v, const size_t iteratedPower, const size_t maxIterations, const size_t rank, const double eps) : iteratedPower(iteratedPower), maxIterations(maxIterations), eps(eps) { if (rank == 0) { Apply(data, u, s, v, data.n_rows); } else { Apply(data, u, s, v, rank); } } RandomizedSVD::RandomizedSVD(const size_t iteratedPower, const size_t maxIterations, const double eps) : iteratedPower(iteratedPower), maxIterations(maxIterations), eps(eps) { /* Nothing to do here */ } void RandomizedSVD::Apply(const arma::mat& data, arma::mat& u, arma::vec& s, arma::mat& v, const size_t rank) { if (iteratedPower == 0) iteratedPower = rank + 2; // Center the data into a temporary matrix. arma::vec rowMean = arma::sum(data, 1) / data.n_cols + eps; arma::mat R, Q, Qdata; // Apply the centered data matrix to a random matrix, obtaining Q. if (data.n_cols >= data.n_rows) { R = arma::randn(data.n_rows, iteratedPower); Q = (data.t() * R) - arma::repmat(arma::trans(R.t() * rowMean), data.n_cols, 1); } else { R = arma::randn(data.n_cols, iteratedPower); Q = (data * R) - (rowMean * (arma::ones(1, data.n_cols) * R)); } // Form a matrix Q whose columns constitute a // well-conditioned basis for the columns of the earlier Q. if (maxIterations == 0) { arma::qr_econ(Q, v, Q); } else { arma::lu(Q, v, Q); } // Perform normalized power iterations. for (size_t i = 0; i < maxIterations; ++i) { if (data.n_cols >= data.n_rows) { Q = (data * Q) - rowMean * (arma::ones(1, data.n_cols) * Q); arma::lu(Q, v, Q); Q = (data.t() * Q) - arma::repmat(rowMean.t() * Q, data.n_cols, 1); } else { Q = (data.t() * Q) - arma::repmat(rowMean.t() * Q, data.n_cols, 1); arma::lu(Q, v, Q); Q = (data * Q) - (rowMean * (arma::ones(1, data.n_cols) * Q)); } // Computing the LU decomposition is more efficient than computing the QR // decomposition, so we only use it in the last iteration, a pivoted QR // decomposition which renormalizes Q, ensuring that the columns of Q are // orthonormal. if (i < (maxIterations - 1)) { arma::lu(Q, v, Q); } else { arma::qr_econ(Q, v, Q); } } // Do economical singular value decomposition and compute only the // approximations of the left singular vectors by using the centered data // applied to Q. if (data.n_cols >= data.n_rows) { Qdata = (data * Q) - rowMean * (arma::ones(1, data.n_cols) * Q); arma::svd_econ(u, s, v, Qdata); v = Q * v; } else { Qdata = (Q.t() * data) - arma::repmat(Q.t() * rowMean, 1, data.n_cols); arma::svd_econ(u, s, v, Qdata); u = Q * u; } } } // namespace svd } // namespace mlpack mlpack-2.2.5/src/mlpack/methods/randomized_svd/randomized_svd.hpp000066400000000000000000000112001315013601400252050ustar00rootroot00000000000000/** * @file randomized_svd.hpp * @author Marcus Edel * * An implementation of the randomized SVD method. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_RANDOMIZED_SVD_RANDOMIZED_SVD_HPP #define MLPACK_METHODS_RANDOMIZED_SVD_RANDOMIZED_SVD_HPP #include namespace mlpack { namespace svd { /** * Randomized SVD is a matrix factorization that is based on randomized matrix * approximation techniques, developed in in "Finding structure with randomness: * Probabilistic algorithms for constructing approximate matrix decompositions". * * For more information, see the following. * * @code * @article{Halko2011, * author = {Halko, N. and Martinsson, P. G. and Tropp, J. A.}, * title = {Finding Structure with Randomness: Probabilistic Algorithms for Constructing Approximate Matrix Decompositions}, * journal = {SIAM Rev.}, * volume = {53}, * year = {2011}, * } * @endcode * * @code * @article{Szlam2014, * author = {Arthur Szlam Yuval Kluger and Mark Tygert}, * title = {An implementation of a randomized algorithm for principal component analysis}, * journal = {CoRR}, * volume = {abs/1412.3510}, * year = {2014}, * } * @endcode * * An example of how to use the interface is shown below: * * @code * arma::mat data; // Rating data in the form of coordinate list. * * const size_t rank = 20; // Rank used for the decomposition. * * // Make a RandomizedSVD object. * RandomizedSVD rSVD(); * * arma::mat u, s, v; * * // Use the Apply() method to get a factorization. * rSVD.Apply(data, u, s, v, rank); * @endcode */ class RandomizedSVD { public: /** * Create object for the randomized SVD method. * * @param data Data matrix. * @param u First unitary matrix. * @param v Second unitary matrix. * @param sigma Diagonal matrix of singular values. * @param iteratedPower Size of the normalized power iterations * (Default: rank + 2). * @param maxIterations Number of iterations for the power method * (Default: 2). * @param rank Rank of the approximation (Default: number of rows.) * @param eps The eps coefficient to avoid division by zero (numerical * stability). */ RandomizedSVD(const arma::mat& data, arma::mat& u, arma::vec& s, arma::mat& v, const size_t iteratedPower = 0, const size_t maxIterations = 2, const size_t rank = 0, const double eps = 1e-7); /** * Create object for the randomized SVD method. * * @param iteratedPower Size of the normalized power iterations * (Default: rank + 2). * @param maxIterations Number of iterations for the power method * (Default: 2). * @param eps The eps coefficient to avoid division by zero (numerical * stability). */ RandomizedSVD(const size_t iteratedPower = 0, const size_t maxIterations = 2, const double eps = 1e-7); /** * Apply Principal Component Analysis to the provided data set using the * randomized SVD. * * @param data Data matrix. * @param u First unitary matrix. * @param v Second unitary matrix. * @param sigma Diagonal matrix of singular values. * @param rank Rank of the approximation. */ void Apply(const arma::mat& data, arma::mat& u, arma::vec& s, arma::mat& v, const size_t rank); //! Get the size of the normalized power iterations. size_t IteratedPower() const { return iteratedPower; } //! Modify the size of the normalized power iterations. size_t& IteratedPower() { return iteratedPower; } //! Get the number of iterations for the power method. size_t MaxIterations() const { return maxIterations; } //! Modify the number of iterations for the power method. size_t& MaxIterations() { return maxIterations; } //! Get the value used for decomposition stability. double Epsilon() const { return eps; } //! Modify the value used for decomposition stability. double& Epsilon() { return eps; } private: //! Locally stored size of the normalized power iterations. size_t iteratedPower; //! Locally stored number of iterations for the power method. size_t maxIterations; //! The value used for numerical stability. double eps; }; } // namespace svd } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/methods/range_search/000077500000000000000000000000001315013601400211035ustar00rootroot00000000000000mlpack-2.2.5/src/mlpack/methods/range_search/CMakeLists.txt000066400000000000000000000011441315013601400236430ustar00rootroot00000000000000# Define the files we need to compile. # Anything not in this list will not be compiled into mlpack. set(SOURCES range_search.hpp range_search_impl.hpp range_search_rules.hpp range_search_rules_impl.hpp range_search_stat.hpp rs_model.hpp rs_model_impl.hpp ) # Add directory name to sources. set(DIR_SRCS) foreach(file ${SOURCES}) set(DIR_SRCS ${DIR_SRCS} ${CMAKE_CURRENT_SOURCE_DIR}/${file}) endforeach() # Append sources (with directory name) to list of all mlpack sources (used at # the parent scope). set(MLPACK_SRCS ${MLPACK_SRCS} ${DIR_SRCS} PARENT_SCOPE) add_cli_executable(range_search) mlpack-2.2.5/src/mlpack/methods/range_search/range_search.hpp000066400000000000000000000326451315013601400242470ustar00rootroot00000000000000/** * @file range_search.hpp * @author Ryan Curtin * * Defines the RangeSearch class, which performs a generalized range search on * points. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_RANGE_SEARCH_RANGE_SEARCH_HPP #define MLPACK_METHODS_RANGE_SEARCH_RANGE_SEARCH_HPP #include #include #include #include "range_search_stat.hpp" namespace mlpack { namespace range /** Range-search routines. */ { //! Forward declaration. class TrainVisitor; /** * The RangeSearch class is a template class for performing range searches. It * is implemented in the style of a generalized tree-independent dual-tree * algorithm; for more details on the actual algorithm, see the RangeSearchRules * class. * * @tparam MetricType Metric to use for range search calculations. * @tparam MatType Type of data to use. * @tparam TreeType Type of tree to use; must satisfy the TreeType policy API. */ template class TreeType = tree::KDTree> class RangeSearch { public: //! Convenience typedef. typedef TreeType Tree; /** * Initialize the RangeSearch object with a given reference dataset (this is * the dataset which is searched). Optionally, perform the computation in * naive mode or single-tree mode. Additionally, an instantiated metric can be * given, for cases where the distance metric holds data. * * This method will copy the matrices to internal copies, which are rearranged * during tree-building. You can avoid this extra copy by pre-constructing * the trees and passing them using a different constructor. * * @param referenceSet Reference dataset. * @param naive Whether the computation should be done in O(n^2) naive mode. * @param singleMode Whether single-tree computation should be used (as * opposed to dual-tree computation). * @param metric Instantiated distance metric. */ RangeSearch(const MatType& referenceSet, const bool naive = false, const bool singleMode = false, const MetricType metric = MetricType()); /** * Initialize the RangeSearch object with the given reference dataset (this is * the dataset which is searched), taking ownership of the matrix. * Optionally, perform the computation in naive mode or single-tree mode. * Additionally, an instantiated metric can be given, for cases where the * distance metric holds data. * * This method will not copy the data matrix, but will take ownership of it, * and depending on the type of tree used, may rearrange the points. If you * would rather a copy be made, consider using the constructor that takes a * const reference to the data instead. * * @param referenceSet Set of reference points. * @param naive If true, brute force naive search will be used (as opposed to * dual-tree search). This overrides singleMode (if it is set to true). * @param singleMode If true, single-tree search will be used (as opposed to * dual-tree search). * @param metric An optional instance of the MetricType class. */ RangeSearch(MatType&& referenceSet, const bool naive = false, const bool singleMode = false, const MetricType metric = MetricType()); /** * Initialize the RangeSearch object with the given pre-constructed reference * tree (this is the tree built on the reference set, which is the set that is * searched). Optionally, choose to use single-tree mode, which will not * build a tree on query points. Naive mode is not available as an option for * this constructor. Additionally, an instantiated distance metric can be * given, for cases where the distance metric holds data. * * There is no copying of the data matrices in this constructor (because * tree-building is not necessary), so this is the constructor to use when * copies absolutely must be avoided. * * @note * Because tree-building (at least with BinarySpaceTree) modifies the ordering * of a matrix, be aware that mapping of the points back to their original * indices is not done when this constructor is used. * @endnote * * @param referenceTree Pre-built tree for reference points. * @param referenceSet Set of reference points corresponding to referenceTree. * @param singleMode Whether single-tree computation should be used (as * opposed to dual-tree computation). * @param metric Instantiated distance metric. */ RangeSearch(Tree* referenceTree, const bool singleMode = false, const MetricType metric = MetricType()); /** * Initialize the RangeSearch object without any reference data. If the * monochromatic Search() is called before a reference set is set with * Train(), no results will be returned (since the reference set is empty). * * @param naive Whether to use naive search. * @param singleMode Whether single-tree computation should be used (as * opposed to dual-tree computation). * @param metric Instantiated metric. */ RangeSearch(const bool naive = false, const bool singleMode = false, const MetricType metric = MetricType()); /** * Copy constructor: this will copy any trees, so it may not be a great idea * to call this! */ RangeSearch(const RangeSearch& other); /** * Move constructor: take possession of all the members of the other model. */ RangeSearch(RangeSearch&& other); /** * Destroy the RangeSearch object. If trees were created, they will be * deleted. */ ~RangeSearch(); /** * Set the reference set to a new reference set, and build a tree if * necessary. This method is called 'Train()' in order to match the rest of * the mlpack abstractions, even though calling this "training" is maybe a bit * of a stretch. * * @param referenceSet New set of reference data. */ void Train(const MatType& referenceSet); /** * Set the reference set to a new reference set, taking ownership of the set. * A tree is built if necessary. This method is called 'Train()' in order to * match the rest of the mlpack abstractions, even though calling this * "training" is maybe a bit of a stretch. * * @param referenceSet New set of reference data. */ void Train(MatType&& referenceSet); /** * Set the reference tree to a new reference tree. */ void Train(Tree* referenceTree); /** * Search for all reference points in the given range for each point in the * query set, returning the results in the neighbors and distances objects. * Each entry in the external vector corresponds to a query point. Each of * these entries holds a vector which contains the indices and distances of * the reference points falling into the given range. * * That is: * * - neighbors.size() and distances.size() both equal the number of query * points. * * - neighbors[i] contains the indices of all the points in the reference set * which have distances inside the given range to query point i. * * - distances[i] contains all of the distances corresponding to the indices * contained in neighbors[i]. * * - neighbors[i] and distances[i] are not sorted in any particular order. * * @param querySet Set of query points to search with. * @param range Range of distances in which to search. * @param neighbors Object which will hold the list of neighbors for each * point which fell into the given range, for each query point. * @param distances Object which will hold the list of distances for each * point which fell into the given range, for each query point. */ void Search(const MatType& querySet, const math::Range& range, std::vector>& neighbors, std::vector>& distances); /** * Given a pre-built query tree, search for all reference points in the given * range for each point in the query set, returning the results in the * neighbors and distances objects. * * Each entry in the external vector corresponds to a query point. Each of * these entries holds a vector which contains the indices and distances of * the reference points falling into the given range. * * That is: * * - neighbors.size() and distances.size() both equal the number of query * points. * * - neighbors[i] contains the indices of all the points in the reference set * which have distances inside the given range to query point i. * * - distances[i] contains all of the distances corresponding to the indices * contained in neighbors[i]. * * - neighbors[i] and distances[i] are not sorted in any particular order. * * If either naive or singleMode are set to true, this will throw an * invalid_argument exception; passing in a query tree implies dual-tree * search. * * If you want to use the reference tree as the query tree, instead call the * overload of Search() that does not take a query set. * * @param queryTree Tree built on query points. * @param range Range of distances in which to search. * @param neighbors Object which will hold the list of neighbors for each * point which fell into the given range, for each query point. * @param distances Object which will hold the list of distances for each * point which fell into the given range, for each query point. */ void Search(Tree* queryTree, const math::Range& range, std::vector>& neighbors, std::vector>& distances); /** * Search for all points in the given range for each point in the reference * set (which was passed to the constructor), returning the results in the * neighbors and distances objects. This means that the query set and the * reference set are the same. * * Each entry in the external vector corresponds to a query point. Each of * these entries holds a vector which contains the indices and distances of * the reference points falling into the given range. * * That is: * * - neighbors.size() and distances.size() both equal the number of query * points. * * - neighbors[i] contains the indices of all the points in the reference set * which have distances inside the given range to query point i. * * - distances[i] contains all of the distances corresponding to the indices * contained in neighbors[i]. * * - neighbors[i] and distances[i] are not sorted in any particular order. * * @param queryTree Tree built on query points. * @param range Range of distances in which to search. * @param neighbors Object which will hold the list of neighbors for each * point which fell into the given range, for each query point. * @param distances Object which will hold the list of distances for each * point which fell into the given range, for each query point. */ void Search(const math::Range& range, std::vector>& neighbors, std::vector>& distances); //! Get whether single-tree search is being used. bool SingleMode() const { return singleMode; } //! Modify whether single-tree search is being used. bool& SingleMode() { return singleMode; } //! Get whether naive search is being used. bool Naive() const { return naive; } //! Modify whether naive search is being used. bool& Naive() { return naive; } //! Get the number of base cases during the last search. size_t BaseCases() const { return baseCases; } //! Get the number of scores during the last search. size_t Scores() const { return scores; } //! Serialize the model. template void Serialize(Archive& ar, const unsigned int version); //! Return the reference set. const MatType& ReferenceSet() const { return *referenceSet; } //! Return the reference tree (or NULL if in naive mode). Tree* ReferenceTree() { return referenceTree; } private: //! Mappings to old reference indices (used when this object builds trees). std::vector oldFromNewReferences; //! Reference tree. Tree* referenceTree; //! Reference set (data should be accessed using this). In some situations we //! may be the owner of this. const MatType* referenceSet; //! If true, this object is responsible for deleting the trees. bool treeOwner; //! If true, we own the reference set. bool setOwner; //! If true, O(n^2) naive computation is used. bool naive; //! If true, single-tree computation is used. bool singleMode; //! Instantiated distance metric. MetricType metric; //! The total number of base cases during the last search. size_t baseCases; //! The total number of scores during the last search. size_t scores; //! For access to mappings when building models. friend class TrainVisitor; }; } // namespace range } // namespace mlpack // Include implementation. #include "range_search_impl.hpp" #endif mlpack-2.2.5/src/mlpack/methods/range_search/range_search_impl.hpp000066400000000000000000000546041315013601400252670ustar00rootroot00000000000000/** * @file range_search_impl.hpp * @author Ryan Curtin * * Implementation of the RangeSearch class. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_RANGE_SEARCH_RANGE_SEARCH_IMPL_HPP #define MLPACK_METHODS_RANGE_SEARCH_RANGE_SEARCH_IMPL_HPP // Just in case it hasn't been included. #include "range_search.hpp" // The rules for traversal. #include "range_search_rules.hpp" namespace mlpack { namespace range { template TreeType* BuildTree( typename TreeType::Mat& dataset, std::vector& oldFromNew, typename boost::enable_if_c< tree::TreeTraits::RearrangesDataset == true, TreeType* >::type = 0) { return new TreeType(dataset, oldFromNew); } //! Call the tree constructor that does not do mapping. template TreeType* BuildTree( const typename TreeType::Mat& dataset, const std::vector& /* oldFromNew */, const typename boost::enable_if_c< tree::TreeTraits::RearrangesDataset == false, TreeType* >::type = 0) { return new TreeType(dataset); } template TreeType* BuildTree( typename TreeType::Mat&& dataset, std::vector& oldFromNew, const typename boost::enable_if_c< tree::TreeTraits::RearrangesDataset == true, TreeType* >::type = 0) { return new TreeType(std::move(dataset), oldFromNew); } template TreeType* BuildTree( typename TreeType::Mat&& dataset, const std::vector& /* oldFromNew */, const typename boost::enable_if_c< tree::TreeTraits::RearrangesDataset == false, TreeType* >::type = 0) { return new TreeType(std::move(dataset)); } template class TreeType> RangeSearch::RangeSearch( const MatType& referenceSetIn, const bool naive, const bool singleMode, const MetricType metric) : referenceTree(naive ? NULL : BuildTree( const_cast(referenceSetIn), oldFromNewReferences)), referenceSet(naive ? &referenceSetIn : &referenceTree->Dataset()), treeOwner(!naive), // If in naive mode, we are not building any trees. setOwner(false), naive(naive), singleMode(!naive && singleMode), // Naive overrides single mode. metric(metric), baseCases(0), scores(0) { // Nothing to do. } // Move constructor. template class TreeType> RangeSearch::RangeSearch( MatType&& referenceSet, const bool naive, const bool singleMode, const MetricType metric) : referenceTree(naive ? NULL : BuildTree(std::move(referenceSet), oldFromNewReferences)), referenceSet(naive ? new MatType(std::move(referenceSet)) : &referenceTree->Dataset()), treeOwner(!naive), setOwner(naive), naive(naive), singleMode(!naive && singleMode), metric(metric), baseCases(0), scores(0) { // Nothing to do. } template class TreeType> RangeSearch::RangeSearch( Tree* referenceTree, const bool singleMode, const MetricType metric) : referenceTree(referenceTree), referenceSet(&referenceTree->Dataset()), treeOwner(false), setOwner(false), naive(false), singleMode(singleMode), metric(metric), baseCases(0), scores(0) { // Nothing else to initialize. } template class TreeType> RangeSearch::RangeSearch( const bool naive, const bool singleMode, const MetricType metric) : referenceTree(NULL), referenceSet(new MatType()), // Empty matrix. treeOwner(false), setOwner(true), naive(naive), singleMode(singleMode), metric(metric), baseCases(0), scores(0) { // Build the tree on the empty dataset, if necessary. if (!naive) { referenceTree = BuildTree(const_cast(*referenceSet), oldFromNewReferences); treeOwner = true; } } template class TreeType> RangeSearch::RangeSearch( const RangeSearch& other) : oldFromNewReferences(other.oldFromNewReferences), referenceTree(other.naive ? NULL : new Tree(*other.referenceTree)), referenceSet(other.naive ? new MatType(*other.referenceSet) : &referenceTree->Dataset()), treeOwner(!other.naive), setOwner(other.naive), naive(other.naive), singleMode(other.singleMode), metric(other.metric), baseCases(other.baseCases), scores(other.scores) { // Nothing to do. } template class TreeType> RangeSearch::RangeSearch( RangeSearch&& other) : oldFromNewReferences(std::move(other.oldFromNewReferences)), referenceTree(other.naive ? NULL : std::move(other.referenceTree)), referenceSet(other.naive ? std::move(other.referenceSet) : &referenceTree->Dataset()), treeOwner(other.treeOwner), setOwner(other.setOwner), naive(other.naive), singleMode(other.singleMode), metric(std::move(other.metric)), baseCases(other.baseCases), scores(other.scores) { other.referenceTree = NULL; other.referenceSet = new arma::mat(); // Empty dataset. other.treeOwner = false; other.setOwner = true; other.baseCases = 0; other.scores = 0; } template class TreeType> RangeSearch::~RangeSearch() { if (treeOwner && referenceTree) delete referenceTree; if (setOwner && referenceSet) delete referenceSet; } template class TreeType> void RangeSearch::Train( const MatType& referenceSet) { // Clean up the old tree, if we built one. if (treeOwner && referenceTree) delete referenceTree; // Rebuild the tree, if necessary. if (!naive) { referenceTree = BuildTree(const_cast(referenceSet), oldFromNewReferences); treeOwner = true; } else { treeOwner = false; } // Delete the old reference set, if we owned it. if (setOwner && this->referenceSet) delete this->referenceSet; if (!naive) this->referenceSet = &referenceTree->Dataset(); else this->referenceSet = &referenceSet; setOwner = false; } template class TreeType> void RangeSearch::Train( MatType&& referenceSet) { // Clean up the old tree, if we built one. if (treeOwner && referenceTree) delete referenceTree; // We may need to rebuild the tree. if (!naive) { referenceTree = BuildTree(std::move(referenceSet), oldFromNewReferences); treeOwner = true; } else { treeOwner = false; } // Delete the old reference set, if we owned it. if (setOwner && this->referenceSet) delete this->referenceSet; if (!naive) { this->referenceSet = &referenceTree->Dataset(); setOwner = false; } else { this->referenceSet = new MatType(std::move(referenceSet)); setOwner = true; } } template class TreeType> void RangeSearch::Train( Tree* referenceTree) { if (naive) throw std::invalid_argument("cannot train on given reference tree when " "naive search (without trees) is desired"); if (treeOwner && referenceTree) delete this->referenceTree; if (setOwner && referenceSet) delete this->referenceSet; this->referenceTree = referenceTree; this->referenceSet = &referenceTree->Dataset(); treeOwner = false; setOwner = false; } template class TreeType> void RangeSearch::Search( const MatType& querySet, const math::Range& range, std::vector>& neighbors, std::vector>& distances) { if (querySet.n_rows != referenceSet->n_rows) { std::ostringstream oss; oss << "RangeSearch::Search(): dimensionalities of query set (" << querySet.n_rows << ") and reference set (" << referenceSet->n_rows << ") do not match!"; throw std::invalid_argument(oss.str()); } // If there are no points, there is no search to be done. if (referenceSet->n_cols == 0) return; Timer::Start("range_search/computing_neighbors"); // This will hold mappings for query points, if necessary. std::vector oldFromNewQueries; // If we have built the trees ourselves, then we will have to map all the // indices back to their original indices when this computation is finished. // To avoid extra copies, we will store the unmapped neighbors and distances // in a separate object. std::vector>* neighborPtr = &neighbors; std::vector>* distancePtr = &distances; // Mapping is only necessary if the tree rearranges points. if (tree::TreeTraits::RearrangesDataset) { // Query indices only need to be mapped if we are building the query tree // ourselves. if (!singleMode && !naive) { distancePtr = new std::vector>; neighborPtr = new std::vector>; } // Reference indices only need to be mapped if we built the reference tree // ourselves. else if (treeOwner) neighborPtr = new std::vector>; } // Resize each vector. neighborPtr->clear(); // Just in case there was anything in it. neighborPtr->resize(querySet.n_cols); distancePtr->clear(); distancePtr->resize(querySet.n_cols); // Create the helper object for the traversal. typedef RangeSearchRules RuleType; // Reset counts. baseCases = 0; scores = 0; if (naive) { RuleType rules(*referenceSet, querySet, range, *neighborPtr, *distancePtr, metric); // The naive brute-force solution. for (size_t i = 0; i < querySet.n_cols; ++i) for (size_t j = 0; j < referenceSet->n_cols; ++j) rules.BaseCase(i, j); baseCases += (querySet.n_cols * referenceSet->n_cols); } else if (singleMode) { // Create the traverser. RuleType rules(*referenceSet, querySet, range, *neighborPtr, *distancePtr, metric); typename Tree::template SingleTreeTraverser traverser(rules); // Now have it traverse for each point. for (size_t i = 0; i < querySet.n_cols; ++i) traverser.Traverse(i, *referenceTree); baseCases += rules.BaseCases(); scores += rules.Scores(); } else // Dual-tree recursion. { // Build the query tree. Timer::Stop("range_search/computing_neighbors"); Timer::Start("range_search/tree_building"); Tree* queryTree = BuildTree(const_cast(querySet), oldFromNewQueries); Timer::Stop("range_search/tree_building"); Timer::Start("range_search/computing_neighbors"); // Create the traverser. RuleType rules(*referenceSet, queryTree->Dataset(), range, *neighborPtr, *distancePtr, metric); typename Tree::template DualTreeTraverser traverser(rules); traverser.Traverse(*queryTree, *referenceTree); baseCases += rules.BaseCases(); scores += rules.Scores(); // Clean up tree memory. delete queryTree; } Timer::Stop("range_search/computing_neighbors"); // Map points back to original indices, if necessary. if (tree::TreeTraits::RearrangesDataset) { if (!singleMode && !naive && treeOwner) { // We must map both query and reference indices. neighbors.clear(); neighbors.resize(querySet.n_cols); distances.clear(); distances.resize(querySet.n_cols); for (size_t i = 0; i < distances.size(); i++) { // Map distances (copy a column). const size_t queryMapping = oldFromNewQueries[i]; distances[queryMapping] = (*distancePtr)[i]; // Copy each neighbor individually, because we need to map it. neighbors[queryMapping].resize(distances[queryMapping].size()); for (size_t j = 0; j < distances[queryMapping].size(); j++) neighbors[queryMapping][j] = oldFromNewReferences[(*neighborPtr)[i][j]]; } // Finished with temporary objects. delete neighborPtr; delete distancePtr; } else if (!singleMode && !naive) { // We must map query indices only. neighbors.clear(); neighbors.resize(querySet.n_cols); distances.clear(); distances.resize(querySet.n_cols); for (size_t i = 0; i < distances.size(); ++i) { // Map distances and neighbors (copy a column). const size_t queryMapping = oldFromNewQueries[i]; distances[queryMapping] = (*distancePtr)[i]; neighbors[queryMapping] = (*neighborPtr)[i]; } // Finished with temporary objects. delete neighborPtr; delete distancePtr; } else if (treeOwner) { // We must map reference indices only. neighbors.clear(); neighbors.resize(querySet.n_cols); for (size_t i = 0; i < neighbors.size(); i++) { neighbors[i].resize((*neighborPtr)[i].size()); for (size_t j = 0; j < neighbors[i].size(); j++) neighbors[i][j] = oldFromNewReferences[(*neighborPtr)[i][j]]; } // Finished with temporary object. delete neighborPtr; } } } template class TreeType> void RangeSearch::Search( Tree* queryTree, const math::Range& range, std::vector>& neighbors, std::vector>& distances) { // If there are no points, there is no search to be done. if (referenceSet->n_cols == 0) return; Timer::Start("range_search/computing_neighbors"); // Get a reference to the query set. const MatType& querySet = queryTree->Dataset(); // Make sure we are in dual-tree mode. if (singleMode || naive) throw std::invalid_argument("cannot call RangeSearch::Search() with a " "query tree when naive or singleMode are set to true"); // We won't need to map query indices, but will we need to map distances? std::vector>* neighborPtr = &neighbors; if (treeOwner && tree::TreeTraits::RearrangesDataset) neighborPtr = new std::vector>; // Resize each vector. neighborPtr->clear(); // Just in case there was anything in it. neighborPtr->resize(querySet.n_cols); distances.clear(); distances.resize(querySet.n_cols); // Create the helper object for the traversal. typedef RangeSearchRules RuleType; RuleType rules(*referenceSet, queryTree->Dataset(), range, *neighborPtr, distances, metric); // Create the traverser. typename Tree::template DualTreeTraverser traverser(rules); traverser.Traverse(*queryTree, *referenceTree); Timer::Stop("range_search/computing_neighbors"); baseCases = rules.BaseCases(); scores = rules.Scores(); // Do we need to map indices? if (treeOwner && tree::TreeTraits::RearrangesDataset) { // We must map reference indices only. neighbors.clear(); neighbors.resize(querySet.n_cols); for (size_t i = 0; i < neighbors.size(); i++) { neighbors[i].resize((*neighborPtr)[i].size()); for (size_t j = 0; j < neighbors[i].size(); j++) neighbors[i][j] = oldFromNewReferences[(*neighborPtr)[i][j]]; } // Finished with temporary object. delete neighborPtr; } } template class TreeType> void RangeSearch::Search( const math::Range& range, std::vector>& neighbors, std::vector>& distances) { // If there are no points, there is no search to be done. if (referenceSet->n_cols == 0) return; Timer::Start("range_search/computing_neighbors"); // Here, we will use the query set as the reference set. std::vector>* neighborPtr = &neighbors; std::vector>* distancePtr = &distances; if (tree::TreeTraits::RearrangesDataset && treeOwner) { // We will always need to rearrange in this case. distancePtr = new std::vector>; neighborPtr = new std::vector>; } // Resize each vector. neighborPtr->clear(); // Just in case there was anything in it. neighborPtr->resize(referenceSet->n_cols); distancePtr->clear(); distancePtr->resize(referenceSet->n_cols); // Create the helper object for the traversal. typedef RangeSearchRules RuleType; RuleType rules(*referenceSet, *referenceSet, range, *neighborPtr, *distancePtr, metric, true /* don't return the query in the results */); if (naive) { // The naive brute-force solution. for (size_t i = 0; i < referenceSet->n_cols; ++i) for (size_t j = 0; j < referenceSet->n_cols; ++j) rules.BaseCase(i, j); baseCases = (referenceSet->n_cols * referenceSet->n_cols); scores = 0; } else if (singleMode) { // Create the traverser. typename Tree::template SingleTreeTraverser traverser(rules); // Now have it traverse for each point. for (size_t i = 0; i < referenceSet->n_cols; ++i) traverser.Traverse(i, *referenceTree); baseCases = rules.BaseCases(); scores = rules.Scores(); } else // Dual-tree recursion. { // Create the traverser. typename Tree::template DualTreeTraverser traverser(rules); traverser.Traverse(*referenceTree, *referenceTree); baseCases = rules.BaseCases(); scores = rules.Scores(); } Timer::Stop("range_search/computing_neighbors"); // Do we need to map the reference indices? if (treeOwner && tree::TreeTraits::RearrangesDataset) { neighbors.clear(); neighbors.resize(referenceSet->n_cols); distances.clear(); distances.resize(referenceSet->n_cols); for (size_t i = 0; i < distances.size(); i++) { // Map distances (copy a column). const size_t refMapping = oldFromNewReferences[i]; distances[refMapping] = (*distancePtr)[i]; // Copy each neighbor individually, because we need to map it. neighbors[refMapping].resize(distances[refMapping].size()); for (size_t j = 0; j < distances[refMapping].size(); j++) { neighbors[refMapping][j] = oldFromNewReferences[(*neighborPtr)[i][j]]; } } // Finished with temporary objects. delete neighborPtr; delete distancePtr; } } template class TreeType> template void RangeSearch::Serialize( Archive& ar, const unsigned int /* version */) { using data::CreateNVP; // Serialize preferences for search. ar & CreateNVP(naive, "naive"); ar & CreateNVP(singleMode, "singleMode"); // Reset base cases and scores if we are loading. if (Archive::is_loading::value) { baseCases = 0; scores = 0; } // If we are doing naive search, we serialize the dataset. Otherwise we // serialize the tree. if (naive) { if (Archive::is_loading::value) { if (setOwner && referenceSet) delete referenceSet; setOwner = true; } ar & CreateNVP(referenceSet, "referenceSet"); ar & CreateNVP(metric, "metric"); // If we are loading, set the tree to NULL and clean up memory if necessary. if (Archive::is_loading::value) { if (treeOwner && referenceTree) delete referenceTree; referenceTree = NULL; oldFromNewReferences.clear(); treeOwner = false; } } else { // Delete the current reference tree, if necessary and if we are loading. if (Archive::is_loading::value) { if (treeOwner && referenceTree) delete referenceTree; // After we load the tree, we will own it. treeOwner = true; } ar & CreateNVP(referenceTree, "referenceTree"); ar & CreateNVP(oldFromNewReferences, "oldFromNewReferences"); // If we are loading, set the dataset accordingly and clean up memory if // necessary. if (Archive::is_loading::value) { if (setOwner && referenceSet) delete referenceSet; referenceSet = &referenceTree->Dataset(); metric = referenceTree->Metric(); // Get the metric from the tree. setOwner = false; } } } } // namespace range } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/methods/range_search/range_search_main.cpp000066400000000000000000000320771315013601400252450ustar00rootroot00000000000000/** * @file range_search_main.cpp * @author Ryan Curtin * @author Matthew Amidon * * Implementation of the RangeSearch executable. Allows some number of standard * options. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include #include #include #include #include #include "range_search.hpp" #include "rs_model.hpp" using namespace std; using namespace mlpack; using namespace mlpack::range; using namespace mlpack::tree; using namespace mlpack::metric; // Information about the program itself. PROGRAM_INFO("Range Search", "This program implements range search with a Euclidean distance metric. " "For a given query point, a given range, and a given set of reference " "points, the program will return all of the reference points with distance " "to the query point in the given range. This is performed for an entire " "set of query points. You may specify a separate set of reference and query" " points, or only a reference set -- which is then used as both the " "reference and query set. The given range is taken to be inclusive (that " "is, points with a distance exactly equal to the minimum and maximum of the" " range are included in the results)." "\n\n" "For example, the following will calculate the points within the range [2, " "5] of each point in 'input.csv' and store the distances in 'distances.csv'" " and the neighbors in 'neighbors.csv':" "\n\n" "$ range_search --min=2 --max=5 --reference_file=input.csv\n" " --distances_file=distances.csv --neighbors_file=neighbors.csv" "\n\n" "The output files are organized such that line i corresponds to the points " "found for query point i. Because sometimes 0 points may be found in the " "given range, lines of the output files may be empty. The points are not " "ordered in any specific manner." "\n\n" "Because the number of points returned for each query point may differ, the" " resultant CSV-like files may not be loadable by many programs. However, " "at this time a better way to store this non-square result is not known. " "As a result, any output files will be written as CSVs in this manner, " "regardless of the given extension."); // Define our input parameters that this program will take. PARAM_STRING_IN("reference_file", "File containing the reference dataset.", "r", ""); PARAM_STRING_OUT("distances_file", "File to output distances into.", "d"); PARAM_STRING_OUT("neighbors_file", "File to output neighbors into.", "n"); // The option exists to load or save models. PARAM_STRING_IN("input_model_file", "File containing pre-trained range search " "model.", "m", ""); PARAM_STRING_OUT("output_model_file", "If specified, the range search model " "will be saved to the given file.", "M"); // The user may specify a query file of query points and a range to search for. PARAM_STRING_IN("query_file", "File containing query points (optional).", "q", ""); PARAM_DOUBLE_IN("max", "Upper bound in range (if not specified, +inf will be " "used.", "U", 0.0); PARAM_DOUBLE_IN("min", "Lower bound in range.", "L", 0.0); // The user may specify the type of tree to use, and a few parameters for tree // building. PARAM_STRING_IN("tree_type", "Type of tree to use: 'kd', 'vp', 'rp', 'max-rp', " "'ub', 'cover', 'r', 'r-star', 'x', 'ball', 'hilbert-r', 'r-plus', " "'r-plus-plus', 'oct'.", "t", "kd"); PARAM_INT_IN("leaf_size", "Leaf size for tree building (used for kd-trees, " "vp trees, random projection trees, UB trees, R trees, R* trees, X trees, " "Hilbert R trees, R+ trees, R++ trees, and octrees).", "l", 20); PARAM_FLAG("random_basis", "Before tree-building, project the data onto a " "random orthogonal basis.", "R"); PARAM_INT_IN("seed", "Random seed (if 0, std::time(NULL) is used).", "s", 0); // Search settings. PARAM_FLAG("naive", "If true, O(n^2) naive mode is used for computation.", "N"); PARAM_FLAG("single_mode", "If true, single-tree search is used (as opposed to " "dual-tree search).", "s"); typedef RangeSearch<> RSType; typedef CoverTree CoverTreeType; typedef RangeSearch RSCoverType; int main(int argc, char *argv[]) { // Give CLI the command line parameters the user passed in. CLI::ParseCommandLine(argc, argv); if (CLI::GetParam("seed") != 0) math::RandomSeed((size_t) CLI::GetParam("seed")); else math::RandomSeed((size_t) std::time(NULL)); // A user cannot specify both reference data and a model. if (CLI::HasParam("reference_file") && CLI::HasParam("input_model_file")) Log::Fatal << "Only one of --reference_file (-r) or --input_model_file (-m)" << " may be specified!" << endl; // A user must specify one of them... if (!CLI::HasParam("reference_file") && !CLI::HasParam("input_model_file")) Log::Fatal << "No model specified (--input_model_file) and no reference " << "data specified (--reference_file)! One must be provided." << endl; if (CLI::HasParam("input_model_file")) { // Notify the user of parameters that will be ignored. if (CLI::HasParam("tree_type")) Log::Warn << "--tree_type (-t) will be ignored because --input_model_file" << " is specified." << endl; if (CLI::HasParam("leaf_size")) Log::Warn << "--leaf_size (-l) will be ignored because --input_model_file" << " is specified." << endl; if (CLI::HasParam("random_basis")) Log::Warn << "--random_basis (-R) will be ignored because " << "--input_model_file is specified." << endl; if (CLI::HasParam("naive")) Log::Warn << "--naive (-N) will be ignored because --input_model_file is " << "specified." << endl; } // The user must give something to do... if (!CLI::HasParam("min") && !CLI::HasParam("max") && !CLI::HasParam("output_model_file")) Log::Warn << "Neither --min, --max, nor --output_model_file are specified, " << "so no results from this program will be saved!" << endl; // If the user specifies a range but not output files, they should be warned. if ((CLI::HasParam("min") || CLI::HasParam("max")) && !(CLI::HasParam("neighbors_file") || CLI::HasParam("distances_file"))) Log::Warn << "Neither --neighbors_file nor --distances_file is specified, " << "so the range search results will not be saved!" << endl; // If the user specifies output files but no range, they should be warned. if ((CLI::HasParam("neighbors_file") || CLI::HasParam("distances_file")) && !(CLI::HasParam("min") || CLI::HasParam("max"))) Log::Warn << "An output file for range search is given (--neighbors_file " << "or --distances_file), but range search is not being performed " << "because neither --min nor --max are specified! No results will be " << "saved." << endl; // Sanity check on leaf size. int lsInt = CLI::GetParam("leaf_size"); if (lsInt < 1) Log::Fatal << "Invalid leaf size: " << lsInt << ". Must be greater than 0." << endl; // We either have to load the reference data, or we have to load the model. RSModel rs; const bool naive = CLI::HasParam("naive"); const bool singleMode = CLI::HasParam("single_mode"); if (CLI::HasParam("reference_file")) { // Get all the parameters. const string referenceFile = CLI::GetParam("reference_file"); const string treeType = CLI::GetParam("tree_type"); const bool randomBasis = CLI::HasParam("random_basis"); RSModel::TreeTypes tree = RSModel::KD_TREE; if (treeType == "kd") tree = RSModel::KD_TREE; else if (treeType == "cover") tree = RSModel::COVER_TREE; else if (treeType == "r") tree = RSModel::R_TREE; else if (treeType == "r-star") tree = RSModel::R_STAR_TREE; else if (treeType == "ball") tree = RSModel::BALL_TREE; else if (treeType == "x") tree = RSModel::X_TREE; else if (treeType == "hilbert-r") tree = RSModel::HILBERT_R_TREE; else if (treeType == "r-plus") tree = RSModel::R_PLUS_TREE; else if (treeType == "r-plus-plus") tree = RSModel::R_PLUS_PLUS_TREE; else if (treeType == "vp") tree = RSModel::VP_TREE; else if (treeType == "rp") tree = RSModel::RP_TREE; else if (treeType == "max-rp") tree = RSModel::MAX_RP_TREE; else if (treeType == "ub") tree = RSModel::UB_TREE; else if (treeType == "oct") tree = RSModel::OCTREE; else Log::Fatal << "Unknown tree type '" << treeType << "; valid choices are " << "'kd', 'vp', 'rp', 'max-rp', 'ub', 'cover', 'r', 'r-star', 'x', " << "'ball', 'hilbert-r', 'r-plus', 'r-plus-plus', and 'oct'." << endl; rs.TreeType() = tree; rs.RandomBasis() = randomBasis; arma::mat referenceSet; data::Load(referenceFile, referenceSet, true); Log::Info << "Loaded reference data from '" << referenceFile << "' (" << referenceSet.n_rows << "x" << referenceSet.n_cols << ")." << endl; const size_t leafSize = size_t(lsInt); rs.BuildModel(std::move(referenceSet), leafSize, naive, singleMode); } else { // Load the model from file. const string inputModelFile = CLI::GetParam("input_model_file"); data::Load(inputModelFile, "rs_model", rs, true); // Fatal on failure. Log::Info << "Loaded range search model from '" << inputModelFile << "' (" << "trained on " << rs.Dataset().n_rows << "x" << rs.Dataset().n_cols << " dataset)." << endl; // Adjust singleMode and naive if necessary. rs.SingleMode() = CLI::HasParam("single_mode"); rs.Naive() = CLI::HasParam("naive"); rs.LeafSize() = size_t(lsInt); } // Perform search, if desired. if (CLI::HasParam("min") || CLI::HasParam("max")) { const string queryFile = CLI::GetParam("query_file"); const double min = CLI::GetParam("min"); const double max = CLI::HasParam("max") ? CLI::GetParam("max") : DBL_MAX; math::Range r(min, max); arma::mat queryData; if (queryFile != "") { data::Load(queryFile, queryData, true); Log::Info << "Loaded query data from '" << queryFile << "' (" << queryData.n_rows << "x" << queryData.n_cols << ")." << endl; } // Naive mode overrides single mode. if (singleMode && naive) Log::Warn << "--single_mode ignored because --naive is present." << endl; // Now run the search. vector> neighbors; vector> distances; if (CLI::HasParam("query_file")) rs.Search(std::move(queryData), r, neighbors, distances); else rs.Search(r, neighbors, distances); Log::Info << "Search complete." << endl; // Save output, if desired. We have to do this by hand. if (CLI::HasParam("distances_file")) { const string distancesFile = CLI::GetParam("distances_file"); fstream distancesStr(distancesFile.c_str(), fstream::out); if (!distancesStr.is_open()) { Log::Warn << "Cannot open file '" << distancesFile << "' to save output" << " distances to!" << endl; } else { // Loop over each point. for (size_t i = 0; i < distances.size(); ++i) { // Store the distances of each point. We may have 0 points to store, // so we must account for that possibility. for (size_t j = 0; j + 1 < distances[i].size(); ++j) distancesStr << distances[i][j] << ", "; if (distances[i].size() > 0) distancesStr << distances[i][distances[i].size() - 1]; distancesStr << endl; } distancesStr.close(); } } if (CLI::HasParam("neighbors_file")) { const string neighborsFile = CLI::GetParam("neighbors_file"); fstream neighborsStr(neighborsFile.c_str(), fstream::out); if (!neighborsStr.is_open()) { Log::Warn << "Cannot open file '" << neighborsFile << "' to save output" << " neighbor indices to!" << endl; } else { // Loop over each point. for (size_t i = 0; i < neighbors.size(); ++i) { // Store the neighbors of each point. We may have 0 points to store, // so we must account for that possibility. for (size_t j = 0; j + 1 < neighbors[i].size(); ++j) neighborsStr << neighbors[i][j] << ", "; if (neighbors[i].size() > 0) neighborsStr << neighbors[i][neighbors[i].size() - 1]; neighborsStr << endl; } neighborsStr.close(); } } } // Save the output model, if desired. if (CLI::HasParam("output_model_file")) { const string outputModelFile = CLI::GetParam("output_model_file"); data::Save(outputModelFile, "rs_model", rs); } } mlpack-2.2.5/src/mlpack/methods/range_search/range_search_rules.hpp000066400000000000000000000136541315013601400254600ustar00rootroot00000000000000/** * @file range_search_rules.hpp * @author Ryan Curtin * * Rules for range search, so that it can be done with arbitrary tree types. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_RANGE_SEARCH_RANGE_SEARCH_RULES_HPP #define MLPACK_METHODS_RANGE_SEARCH_RANGE_SEARCH_RULES_HPP #include namespace mlpack { namespace range { /** * The RangeSearchRules class is a template helper class used by RangeSearch * class when performing range searches. * * @tparam MetricType The metric to use for computation. * @tparam TreeType The tree type to use; must adhere to the TreeType API. */ template class RangeSearchRules { public: /** * Construct the RangeSearchRules object. This is usually done from within * the RangeSearch class at search time. * * @param referenceSet Set of reference data. * @param querySet Set of query data. * @param range Range to search for. * @param neighbors Vector to store resulting neighbors in. * @param distances Vector to store resulting distances in. * @param metric Instantiated metric. * @param sameSet If true, the query and reference set are taken to be the * same, and a query point will not return itself in the results. */ RangeSearchRules(const arma::mat& referenceSet, const arma::mat& querySet, const math::Range& range, std::vector >& neighbors, std::vector >& distances, MetricType& metric, const bool sameSet = false); /** * Compute the base case between the given query point and reference point. * * @param queryIndex Index of query point. * @param referenceIndex Index of reference point. */ double BaseCase(const size_t queryIndex, const size_t referenceIndex); /** * Get the score for recursion order. A low score indicates priority for * recursion, while DBL_MAX indicates that the node should not be recursed * into at all (it should be pruned). * * @param queryIndex Index of query point. * @param referenceNode Candidate node to be recursed into. */ double Score(const size_t queryIndex, TreeType& referenceNode); /** * Re-evaluate the score for recursion order. A low score indicates priority * for recursion, while DBL_MAX indicates that the node should not be recursed * into at all (it should be pruned). This is used when the score has already * been calculated, but another recursion may have modified the bounds for * pruning. So the old score is checked against the new pruning bound. * * @param queryIndex Index of query point. * @param referenceNode Candidate node to be recursed into. * @param oldScore Old score produced by Score() (or Rescore()). */ double Rescore(const size_t queryIndex, TreeType& referenceNode, const double oldScore) const; /** * Get the score for recursion order. A low score indicates priority for * recursion, while DBL_MAX indicates that the node should not be recursed * into at all (it should be pruned). * * @param queryNode Candidate query node to recurse into. * @param referenceNode Candidate reference node to recurse into. */ double Score(TreeType& queryNode, TreeType& referenceNode); /** * Re-evaluate the score for recursion order. A low score indicates priority * for recursion, while DBL_MAX indicates that the node should not be recursed * into at all (it should be pruned). This is used when the score has already * been calculated, but another recursion may have modified the bounds for * pruning. So the old score is checked against the new pruning bound. * * @param queryNode Candidate query node to recurse into. * @param referenceNode Candidate reference node to recurse into. * @param oldScore Old score produced by Score() (or Rescore()). */ double Rescore(TreeType& queryNode, TreeType& referenceNode, const double oldScore) const; typedef typename tree::TraversalInfo TraversalInfoType; const TraversalInfoType& TraversalInfo() const { return traversalInfo; } TraversalInfoType& TraversalInfo() { return traversalInfo; } //! Get the number of base cases. size_t BaseCases() const { return baseCases; } //! Get the number of scores (that is, calls to RangeDistance()). size_t Scores() const { return scores; } private: //! The reference set. const arma::mat& referenceSet; //! The query set. const arma::mat& querySet; //! The range of distances for which we are searching. const math::Range& range; //! The vector the resultant neighbor indices should be stored in. std::vector >& neighbors; //! The vector the resultant neighbor distances should be stored in. std::vector >& distances; //! The instantiated metric. MetricType& metric; //! If true, the query and reference set are taken to be the same. bool sameSet; //! The last query index. size_t lastQueryIndex; //! The last reference index. size_t lastReferenceIndex; //! Add all the points in the given node to the results for the given query //! point. If the base case has already been calculated, we make sure to not //! add that to the results twice. void AddResult(const size_t queryIndex, TreeType& referenceNode); TraversalInfoType traversalInfo; //! The number of base cases. size_t baseCases; //! THe number of scores. size_t scores; }; } // namespace range } // namespace mlpack // Include implementation. #include "range_search_rules_impl.hpp" #endif mlpack-2.2.5/src/mlpack/methods/range_search/range_search_rules_impl.hpp000066400000000000000000000215511315013601400264740ustar00rootroot00000000000000/** * @file range_search_rules_impl.hpp * @author Ryan Curtin * * Implementation of rules for range search with generic trees. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_RANGE_SEARCH_RANGE_SEARCH_RULES_IMPL_HPP #define MLPACK_METHODS_RANGE_SEARCH_RANGE_SEARCH_RULES_IMPL_HPP // In case it hasn't been included yet. #include "range_search_rules.hpp" namespace mlpack { namespace range { template RangeSearchRules::RangeSearchRules( const arma::mat& referenceSet, const arma::mat& querySet, const math::Range& range, std::vector >& neighbors, std::vector >& distances, MetricType& metric, const bool sameSet) : referenceSet(referenceSet), querySet(querySet), range(range), neighbors(neighbors), distances(distances), metric(metric), sameSet(sameSet), lastQueryIndex(querySet.n_cols), lastReferenceIndex(referenceSet.n_cols), baseCases(0), scores(0) { // Nothing to do. } //! The base case. Evaluate the distance between the two points and add to the //! results if necessary. template inline force_inline double RangeSearchRules::BaseCase( const size_t queryIndex, const size_t referenceIndex) { // If the datasets are the same, don't return the point as in its own range. if (sameSet && (queryIndex == referenceIndex)) return 0.0; // If we have just performed this base case, don't do it again. if ((lastQueryIndex == queryIndex) && (lastReferenceIndex == referenceIndex)) return 0.0; // No value to return... this shouldn't do anything bad. const double distance = metric.Evaluate(querySet.unsafe_col(queryIndex), referenceSet.unsafe_col(referenceIndex)); ++baseCases; // Update last indices, so we don't accidentally perform a base case twice. lastQueryIndex = queryIndex; lastReferenceIndex = referenceIndex; if (range.Contains(distance)) { neighbors[queryIndex].push_back(referenceIndex); distances[queryIndex].push_back(distance); } return distance; } //! Single-tree scoring function. template double RangeSearchRules::Score(const size_t queryIndex, TreeType& referenceNode) { // We must get the minimum and maximum distances and store them in this // object. math::Range distances; if (tree::TreeTraits::FirstPointIsCentroid) { // In this situation, we calculate the base case. So we should check to be // sure we haven't already done that. double baseCase; if (tree::TreeTraits::HasSelfChildren && (referenceNode.Parent() != NULL) && (referenceNode.Point(0) == referenceNode.Parent()->Point(0))) { // If the tree has self-children and this is a self-child, the base case // was already calculated. baseCase = referenceNode.Parent()->Stat().LastDistance(); lastQueryIndex = queryIndex; lastReferenceIndex = referenceNode.Point(0); } else { // We must calculate the base case by hand. baseCase = BaseCase(queryIndex, referenceNode.Point(0)); } // This may be possibly loose for non-ball bound trees. distances.Lo() = baseCase - referenceNode.FurthestDescendantDistance(); distances.Hi() = baseCase + referenceNode.FurthestDescendantDistance(); // Update last distance calculation. referenceNode.Stat().LastDistance() = baseCase; } else { distances = referenceNode.RangeDistance(querySet.unsafe_col(queryIndex)); ++scores; } // If the ranges do not overlap, prune this node. if (!distances.Contains(range)) return DBL_MAX; // In this case, all of the points in the reference node will be part of the // results. if ((distances.Lo() >= range.Lo()) && (distances.Hi() <= range.Hi())) { AddResult(queryIndex, referenceNode); return DBL_MAX; // We don't need to go any deeper. } // Otherwise the score doesn't matter. Recursion order is irrelevant in // range search. return 0.0; } //! Single-tree rescoring function. template double RangeSearchRules::Rescore( const size_t /* queryIndex */, TreeType& /* referenceNode */, const double oldScore) const { // If it wasn't pruned before, it isn't pruned now. return oldScore; } //! Dual-tree scoring function. template double RangeSearchRules::Score(TreeType& queryNode, TreeType& referenceNode) { math::Range distances; if (tree::TreeTraits::FirstPointIsCentroid) { // It is possible that the base case has already been calculated. double baseCase = 0.0; if ((traversalInfo.LastQueryNode() != NULL) && (traversalInfo.LastReferenceNode() != NULL) && (traversalInfo.LastQueryNode()->Point(0) == queryNode.Point(0)) && (traversalInfo.LastReferenceNode()->Point(0) == referenceNode.Point(0))) { baseCase = traversalInfo.LastBaseCase(); // Make sure that if BaseCase() is called, we don't duplicate results. lastQueryIndex = queryNode.Point(0); lastReferenceIndex = referenceNode.Point(0); } else { // We must calculate the base case. baseCase = BaseCase(queryNode.Point(0), referenceNode.Point(0)); } distances.Lo() = baseCase - queryNode.FurthestDescendantDistance() - referenceNode.FurthestDescendantDistance(); distances.Hi() = baseCase + queryNode.FurthestDescendantDistance() + referenceNode.FurthestDescendantDistance(); // Update the last distances performed for the query and reference node. traversalInfo.LastBaseCase() = baseCase; } else { // Just perform the calculation. distances = referenceNode.RangeDistance(queryNode); ++scores; } // If the ranges do not overlap, prune this node. if (!distances.Contains(range)) return DBL_MAX; // In this case, all of the points in the reference node will be part of all // the results for each point in the query node. if ((distances.Lo() >= range.Lo()) && (distances.Hi() <= range.Hi())) { for (size_t i = 0; i < queryNode.NumDescendants(); ++i) AddResult(queryNode.Descendant(i), referenceNode); return DBL_MAX; // We don't need to go any deeper. } // Otherwise the score doesn't matter. Recursion order is irrelevant in range // search. traversalInfo.LastQueryNode() = &queryNode; traversalInfo.LastReferenceNode() = &referenceNode; return 0.0; } //! Dual-tree rescoring function. template double RangeSearchRules::Rescore( TreeType& /* queryNode */, TreeType& /* referenceNode */, const double oldScore) const { // If it wasn't pruned before, it isn't pruned now. return oldScore; } //! Add all the points in the given node to the results for the given query //! point. template void RangeSearchRules::AddResult(const size_t queryIndex, TreeType& referenceNode) { // Some types of trees calculate the base case evaluation before Score() is // called, so if the base case has already been calculated, then we must avoid // adding that point to the results again. size_t baseCaseMod = 0; if (tree::TreeTraits::FirstPointIsCentroid && (queryIndex == lastQueryIndex) && (referenceNode.Point(0) == lastReferenceIndex)) { baseCaseMod = 1; } // Resize distances and neighbors vectors appropriately. We have to use // reserve() and not resize(), because we don't know if we will encounter the // case where the datasets and points are the same (and we skip in that case). const size_t oldSize = neighbors[queryIndex].size(); neighbors[queryIndex].reserve(oldSize + referenceNode.NumDescendants() - baseCaseMod); distances[queryIndex].reserve(oldSize + referenceNode.NumDescendants() - baseCaseMod); for (size_t i = baseCaseMod; i < referenceNode.NumDescendants(); ++i) { if ((&referenceSet == &querySet) && (queryIndex == referenceNode.Descendant(i))) continue; const double distance = metric.Evaluate(querySet.unsafe_col(queryIndex), referenceNode.Dataset().unsafe_col(referenceNode.Descendant(i))); neighbors[queryIndex].push_back(referenceNode.Descendant(i)); distances[queryIndex].push_back(distance); } } } // namespace range } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/methods/range_search/range_search_stat.hpp000066400000000000000000000033161315013601400252730ustar00rootroot00000000000000/** * @file range_search_stat.hpp * @author Ryan Curtin * * Statistic class for RangeSearch, which just holds the last visited node and * the corresponding base case result. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_RANGE_SEARCH_RANGE_SEARCH_STAT_HPP #define MLPACK_METHODS_RANGE_SEARCH_RANGE_SEARCH_STAT_HPP #include namespace mlpack { namespace range { /** * Statistic class for RangeSearch, to be set to the StatisticType of the tree * type that range search is being performed with. This class just holds the * last visited node and the corresponding base case result. */ class RangeSearchStat { public: /** * Initialize the statistic. */ RangeSearchStat() : lastDistance(0.0) { } /** * Initialize the statistic given a tree node that this statistic belongs to. * In this case, we ignore the node. */ template RangeSearchStat(TreeType& /* node */) : lastDistance(0.0) { } //! Get the last distance evaluation. double LastDistance() const { return lastDistance; } //! Modify the last distance evaluation. double& LastDistance() { return lastDistance; } //! Serialize the statistic. template void Serialize(Archive& ar, const unsigned int /* version */) { ar & data::CreateNVP(lastDistance, "lastDistance"); } private: //! The last distance evaluation. double lastDistance; }; } // namespace neighbor } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/methods/range_search/rs_model.hpp000066400000000000000000000306671315013601400234340ustar00rootroot00000000000000/** * @file rs_model.hpp * @author Ryan Curtin * * This is a model for range search. It is useful in that it provides an easy * way to serialize a model, abstracts away the different types of trees, and * also reflects the RangeSearch API and automatically directs to the right * tree types. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_RANGE_SEARCH_RS_MODEL_HPP #define MLPACK_METHODS_RANGE_SEARCH_RS_MODEL_HPP #include #include #include #include #include #include "range_search.hpp" namespace mlpack { namespace range { /** * Alias template for Range Search. */ template class TreeType> using RSType = RangeSearch; struct RSModelName { static const std::string Name() { return "range_search_model"; } }; /** * MonoSearchVisitor executes a monochromatic range search on the given * RSType. Range Search is performed on the reference set itself, no querySet. */ class MonoSearchVisitor : public boost::static_visitor { private: //! The range to search for. const math::Range& range; //! Output neighbors. std::vector>& neighbors; //! Output distances. std::vector>& distances; public: //! Perform monochromatic search with the given RangeSearch object. template void operator()(RSType* rs) const; //! Construct the MonoSearchVisitor with the given parameters. MonoSearchVisitor(const math::Range& range, std::vector>& neighbors, std::vector>& distances): range(range), neighbors(neighbors), distances(distances) {}; }; /** * BiSearchVisitor executes a bichromatic range search on the given RSType. * We use template specialization to differentiate those tree types that * accept leafSize as a parameter. In these cases, before doing range search, * a query tree with proper leafSize is built from the querySet. */ class BiSearchVisitor : public boost::static_visitor { private: //! The query set for the bichromatic search. const arma::mat& querySet; //! Range to search neighbours for. const math::Range& range; //! The result vector for neighbors. std::vector>& neighbors; //! The result vector for distances. std::vector>& distances; //! The number of points in a leaf (for BinarySpaceTrees). const size_t leafSize; //! Bichromatic range search on the given RSType considering the leafSize. template void SearchLeaf(RSType* rs) const; public: //! Alias template necessary for visual c++ compiler. template class TreeType> using RSTypeT = RSType; //! Default Bichromatic range search on the given RSType instance. template class TreeType> void operator()(RSTypeT* rs) const; //! Bichromatic range search on the given RSType specialized for KDTrees. void operator()(RSTypeT* rs) const; //! Bichromatic range search on the given RSType specialized for BallTrees. void operator()(RSTypeT* rs) const; //! Bichromatic range search specialized for octrees. void operator()(RSTypeT* rs) const; //! Construct the BiSearchVisitor. BiSearchVisitor(const arma::mat& querySet, const math::Range& range, std::vector>& neighbors, std::vector>& distances, const size_t leafSize); }; /** * TrainVisitor sets the reference set to a new reference set on the given * RSType. We use template specialization to differentiate those tree types that * accept leafSize as a parameter. In these cases, a reference tree with proper * leafSize is built from the referenceSet. */ class TrainVisitor : public boost::static_visitor { private: //! The reference set to use for training. arma::mat&& referenceSet; //! The leaf size, used only by BinarySpaceTree. size_t leafSize; //! Train on the given RsType considering the leafSize. template void TrainLeaf(RSType* rs) const; public: //! Alias template necessary for visual c++ compiler. template class TreeType> using RSTypeT = RSType; //! Default Train on the given RSType instance. template class TreeType> void operator()(RSTypeT* rs) const; //! Train on the given RSType specialized for KDTrees. void operator()(RSTypeT* rs) const; //! Train on the given RSType specialized for BallTrees. void operator()(RSTypeT* rs) const; //! Train specialized for octrees. void operator()(RSTypeT* rs) const; //! Construct the TrainVisitor object with the given reference set, leafSize TrainVisitor(arma::mat&& referenceSet, const size_t leafSize); }; /** * ReferenceSetVisitor exposes the referenceSet of the given RSType. */ class ReferenceSetVisitor : public boost::static_visitor { public: //! Return the reference set. template const arma::mat& operator()(RSType* rs) const; }; /** * DeleteVisitor deletes the given RSType instance. */ class DeleteVisitor : public boost::static_visitor { public: //! Delete the RSType object. template void operator()(RSType* rs) const; }; /** * Exposes the seralize method of the given RSType. */ template class SerializeVisitor : public boost::static_visitor { private: //! Archive to serialize to. Archive& ar; //! Name of the model to serialize. const std::string& name; public: //! Serialize the given model. template void operator()(RSType* rs) const; //! Construct the SerializeVisitor with the given archive and name. SerializeVisitor(Archive& ar, const std::string& name); }; /** * SingleModeVisitor exposes the SingleMode() method of the given RSType. */ class SingleModeVisitor : public boost::static_visitor { public: /** * Get a reference to the singleMode parameter of the given RangeSeach * object. */ template bool& operator()(RSType* rs) const; }; /** * NaiveVisitor exposes the Naive() method of the given RSType. */ class NaiveVisitor : public boost::static_visitor { public: /** * Get a reference to the naive parameter of the given RangeSearch object. */ template bool& operator()(RSType* rs) const; }; class RSModel { public: enum TreeTypes { KD_TREE, COVER_TREE, R_TREE, R_STAR_TREE, BALL_TREE, X_TREE, HILBERT_R_TREE, R_PLUS_TREE, R_PLUS_PLUS_TREE, VP_TREE, RP_TREE, MAX_RP_TREE, UB_TREE, OCTREE }; private: TreeTypes treeType; size_t leafSize; //! If true, we randomly project the data into a new basis before search. bool randomBasis; //! Random projection matrix. arma::mat q; /** * rSearch holds an instance of the RangeSearch class for the current * treeType. It is initialized every time BuildModel is executed. * We access to the contained value through the visitor classes defined above. */ boost::variant*, RSType*, RSType*, RSType*, RSType*, RSType*, RSType*, RSType*, RSType*, RSType*, RSType*, RSType*, RSType*, RSType*> rSearch; public: /** * Initialize the RSModel with the given type and whether or not a random * basis should be used. * * @param treeType Type of tree to use. * @param randomBasis Whether or not to use a random basis. */ RSModel(const TreeTypes treeType = TreeTypes::KD_TREE, const bool randomBasis = false); //! Copy constructor. RSModel(const RSModel& other); //! Move constructor. RSModel(RSModel&& other); //! Copy operator. RSModel& operator=(const RSModel& other); //! Move operator. RSModel& operator=(RSModel&& other); /** * Clean memory, if necessary. */ ~RSModel(); //! Serialize the range search model. template void Serialize(Archive& ar, const unsigned int /* version */); //! Expose the dataset. const arma::mat& Dataset() const; //! Get whether the model is in single-tree search mode. bool SingleMode() const; //! Modify whether the model is in single-tree search mode. bool& SingleMode(); //! Get whether the model is in naive search mode. bool Naive() const; //! Modify whether the model is in naive search mode. bool& Naive(); //! Get the leaf size (applicable to everything but the cover tree). size_t LeafSize() const { return leafSize; } //! Modify the leaf size (applicable to everything but the cover tree). size_t& LeafSize() { return leafSize; } //! Get the type of tree. TreeTypes TreeType() const { return treeType; } //! Modify the type of tree (don't do this after the model has been built). TreeTypes& TreeType() { return treeType; } //! Get whether a random basis is used. bool RandomBasis() const { return randomBasis; } //! Modify whether a random basis is used (don't do this after the model has //! been built). bool& RandomBasis() { return randomBasis; } /** * Build the reference tree on the given dataset with the given parameters. * This takes possession of the reference set to avoid a copy. * * @param referenceSet Set of reference points. * @param leafSize Leaf size of tree (ignored for the cover tree). * @param naive Whether naive search should be used. * @param singleMode Whether single-tree search should be used. */ void BuildModel(arma::mat&& referenceSet, const size_t leafSize, const bool naive, const bool singleMode); /** * Perform range search. This takes possession of the query set, so the query * set will not be usable after the search. For more information on the * output format, see RangeSearch<>::Search(). * * @param querySet Set of query points. * @param range Range to search for. * @param neighbors Output: neighbors falling within the desired range. * @param distances Output: distances of neighbors. */ void Search(arma::mat&& querySet, const math::Range& range, std::vector>& neighbors, std::vector>& distances); /** * Perform monochromatic range search, with the reference set as the query * set. For more information on the output format, see * RangeSearch<>::Search(). * * @param range Range to search for. * @param neighbors Output: neighbors falling within the desired range. * @param distances Output: distances of neighbors. */ void Search(const math::Range& range, std::vector>& neighbors, std::vector>& distances); private: /** * Return a string representing the name of the tree. This is used for * logging output. */ std::string TreeName() const; /** * Clean up memory. */ void CleanMemory(); }; } // namespace range } // namespace mlpack // Include implementation (of Serialize() and inline functions). #include "rs_model_impl.hpp" #endif mlpack-2.2.5/src/mlpack/methods/range_search/rs_model_impl.hpp000066400000000000000000000341331315013601400244450ustar00rootroot00000000000000/** * @file rs_model_impl.hpp * @author Ryan Curtin * * Implementation of Serialize() and inline functions for RSModel. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_RANGE_SEARCH_RS_MODEL_IMPL_HPP #define MLPACK_METHODS_RANGE_SEARCH_RS_MODEL_IMPL_HPP // In case it hasn't been included yet. #include "rs_model.hpp" #include namespace mlpack { namespace range { /** * Initialize the RSModel with the given tree type and whether or not a random * basis should be used. */ inline RSModel::RSModel(TreeTypes treeType, bool randomBasis) : treeType(treeType), leafSize(0), randomBasis(randomBasis) { // Nothing to do. } // Copy constructor. inline RSModel::RSModel(const RSModel& other) : treeType(other.treeType), leafSize(other.leafSize), randomBasis(other.randomBasis), rSearch(other.rSearch) { // Nothing to do. } // Move constructor. inline RSModel::RSModel(RSModel&& other) : treeType(other.treeType), leafSize(other.leafSize), randomBasis(other.randomBasis), rSearch(other.rSearch) { // Reset other model. other.treeType = TreeTypes::KD_TREE; other.leafSize = 0; other.randomBasis = false; other.rSearch = decltype(other.rSearch)(); } // Copy operator. inline RSModel& RSModel::operator=(const RSModel& other) { boost::apply_visitor(DeleteVisitor(), rSearch); treeType = other.treeType; leafSize = other.leafSize; randomBasis = other.randomBasis; rSearch = other.rSearch; return *this; } // Move operator. inline RSModel& RSModel::operator=(RSModel&& other) { boost::apply_visitor(DeleteVisitor(), rSearch); treeType = other.treeType; leafSize = other.leafSize; randomBasis = other.randomBasis; rSearch = other.rSearch; // Reset other model. other.treeType = TreeTypes::KD_TREE; other.leafSize = 0; other.randomBasis = false; other.rSearch = decltype(other.rSearch)(); return *this; } // Clean memory, if necessary. inline RSModel::~RSModel() { boost::apply_visitor(DeleteVisitor(), rSearch); } inline void RSModel::BuildModel(arma::mat&& referenceSet, const size_t leafSize, const bool naive, const bool singleMode) { // Initialize random basis if necessary. if (randomBasis) { Log::Info << "Creating random basis..." << std::endl; math::RandomBasis(q, referenceSet.n_rows); } this->leafSize = leafSize; // Clean memory, if necessary. boost::apply_visitor(DeleteVisitor(), rSearch); // Do we need to modify the reference set? if (randomBasis) referenceSet = q * referenceSet; if (!naive) { Timer::Start("tree_building"); Log::Info << "Building reference tree..." << std::endl; } switch (treeType) { case KD_TREE: rSearch = new RSType (naive, singleMode); break; case COVER_TREE: rSearch = new RSType(naive, singleMode); break; case R_TREE: rSearch = new RSType(naive, singleMode); break; case R_STAR_TREE: rSearch = new RSType(naive, singleMode); break; case BALL_TREE: rSearch = new RSType(naive, singleMode); break; case X_TREE: rSearch = new RSType(naive, singleMode); break; case HILBERT_R_TREE: rSearch = new RSType(naive, singleMode); break; case R_PLUS_TREE: rSearch = new RSType(naive, singleMode); break; case R_PLUS_PLUS_TREE: rSearch = new RSType(naive, singleMode); break; case VP_TREE: rSearch = new RSType(naive, singleMode); break; case RP_TREE: rSearch = new RSType(naive, singleMode); break; case MAX_RP_TREE: rSearch = new RSType(naive, singleMode); break; case UB_TREE: rSearch = new RSType(naive, singleMode); break; case OCTREE: rSearch = new RSType(naive, singleMode); break; } TrainVisitor tn(std::move(referenceSet), leafSize); boost::apply_visitor(tn, rSearch); if (!naive) { Timer::Stop("tree_building"); Log::Info << "Tree built." << std::endl; } } // Perform range search. inline void RSModel::Search(arma::mat&& querySet, const math::Range& range, std::vector>& neighbors, std::vector>& distances) { // We may need to map the query set randomly. if (randomBasis) querySet = q * querySet; Log::Info << "Search for points in the range [" << range.Lo() << ", " << range.Hi() << "] with "; if (!Naive() && !SingleMode()) Log::Info << "dual-tree " << TreeName() << " search..." << std::endl; else if (!Naive()) Log::Info << "single-tree " << TreeName() << " search..." << std::endl; else Log::Info << "brute-force (naive) search..." << std::endl; BiSearchVisitor search(querySet, range, neighbors, distances, leafSize); boost::apply_visitor(search, rSearch); } // Perform range search (monochromatic case). inline void RSModel::Search(const math::Range& range, std::vector>& neighbors, std::vector>& distances) { Log::Info << "Search for points in the range [" << range.Lo() << ", " << range.Hi() << "] with "; if (!Naive() && !SingleMode()) Log::Info << "dual-tree " << TreeName() << " search..." << std::endl; else if (!Naive()) Log::Info << "single-tree " << TreeName() << " search..." << std::endl; else Log::Info << "brute-force (naive) search..." << std::endl; MonoSearchVisitor search(range, neighbors, distances); boost::apply_visitor(search, rSearch); } // Get the name of the tree type. inline std::string RSModel::TreeName() const { switch (treeType) { case KD_TREE: return "kd-tree"; case COVER_TREE: return "cover tree"; case R_TREE: return "R tree"; case R_STAR_TREE: return "R* tree"; case BALL_TREE: return "ball tree"; case X_TREE: return "X tree"; case HILBERT_R_TREE: return "Hilbert R tree"; case R_PLUS_TREE: return "R+ tree"; case R_PLUS_PLUS_TREE: return "R++ tree"; case VP_TREE: return "vantage point tree"; case RP_TREE: return "random projection tree (mean split)"; case MAX_RP_TREE: return "random projection tree (max split)"; case UB_TREE: return "UB tree"; case OCTREE: return "octree"; default: return "unknown tree"; } } // Clean memory. inline void RSModel::CleanMemory() { boost::apply_visitor(DeleteVisitor(), rSearch); } //! Monochromatic range search on the given RSType instance. template void MonoSearchVisitor::operator()(RSType* rs) const { if (rs) return rs->Search(range, neighbors, distances); throw std::runtime_error("no range search model initialized"); } //! Save parameters for bichromatic range search. BiSearchVisitor::BiSearchVisitor(const arma::mat& querySet, const math::Range& range, std::vector>& neighbors, std::vector>& distances, const size_t leafSize): querySet(querySet), range(range), neighbors(neighbors), distances(distances), leafSize(leafSize) {} //! Default Bichromatic range search on the given RSType instance. template class TreeType> void BiSearchVisitor::operator()(RSTypeT* rs) const { if (rs) return rs->Search(querySet, range, neighbors, distances); throw std::runtime_error("no range search model initialized"); } //! Bichromatic range search on the given RSType specialized for KDTrees. void BiSearchVisitor::operator()(RSTypeT* rs) const { if (rs) return SearchLeaf(rs); throw std::runtime_error("no range search model initialized"); } //! Bichromatic range search on the given RSType specialized for BallTrees. void BiSearchVisitor::operator()(RSTypeT* rs) const { if (rs) return SearchLeaf(rs); throw std::runtime_error("no range search model initialized"); } //! Bichromatic range search specialized for Ocrees. void BiSearchVisitor::operator()(RSTypeT* rs) const { if (rs) return SearchLeaf(rs); throw std::runtime_error("no range search model initialized"); } //! Bichromatic range search on the given RSType considering the leafSize. template void BiSearchVisitor::SearchLeaf(RSType* rs) const { if (!rs->Naive() && !rs->SingleMode()) { // Build a second tree and search. Timer::Start("tree_building"); Log::Info << "Building query tree..." << std::endl; std::vector oldFromNewQueries; typename RSType::Tree queryTree(std::move(querySet), oldFromNewQueries, leafSize); Log::Info << "Tree built." << std::endl; Timer::Stop("tree_building"); std::vector> neighborsOut; std::vector> distancesOut; rs->Search(&queryTree, range, neighborsOut, distancesOut); // Remap the query points. neighbors.resize(queryTree.Dataset().n_cols); distances.resize(queryTree.Dataset().n_cols); for (size_t i = 0; i < queryTree.Dataset().n_cols; ++i) { neighbors[oldFromNewQueries[i]] = neighborsOut[i]; distances[oldFromNewQueries[i]] = distancesOut[i]; } } else rs->Search(querySet, range, neighbors, distances); } //! Save parameters for Train. TrainVisitor::TrainVisitor(arma::mat&& referenceSet, const size_t leafSize) : referenceSet(std::move(referenceSet)), leafSize(leafSize) {} //! Default Train on the given RSType instance. template class TreeType> void TrainVisitor::operator()(RSTypeT* rs) const { if (rs) return rs->Train(std::move(referenceSet)); throw std::runtime_error("no range search model initialized"); } //! Train on the given RSType specialized for KDTrees. void TrainVisitor::operator()(RSTypeT* rs) const { if (rs) return TrainLeaf(rs); throw std::runtime_error("no range search model initialized"); } //! Train on the given RSType specialized for BallTrees. void TrainVisitor::operator()(RSTypeT* rs) const { if (rs) return TrainLeaf(rs); throw std::runtime_error("no range search model initialized"); } //! Train specialized for Octrees. void TrainVisitor::operator()(RSTypeT* rs) const { if (rs) return TrainLeaf(rs); throw std::runtime_error("no range search model initialized"); } //! Train on the given RSType considering the leafSize. template void TrainVisitor::TrainLeaf(RSType* rs) const { if (rs->Naive()) rs->Train(std::move(referenceSet)); else { std::vector oldFromNewReferences; typename RSType::Tree* tree = new typename RSType::Tree(std::move(referenceSet), oldFromNewReferences, leafSize); rs->Train(tree); // Give the model ownership of the tree and the mappings. rs->treeOwner = true; rs->oldFromNewReferences = std::move(oldFromNewReferences); } } //! Expose the referenceSet of the given RSType. template const arma::mat& ReferenceSetVisitor::operator()(RSType* rs) const { if (rs) return rs->ReferenceSet(); throw std::runtime_error("no range search model initialized"); } //! For cleaning memory template void DeleteVisitor::operator()(RSType* rs) const { if (rs) delete rs; } //! Save parameters for serializing template SerializeVisitor::SerializeVisitor(Archive& ar, const std::string& name) : ar(ar), name(name) {} //! Serializes the given RSType instance. template template void SerializeVisitor::operator()(RSType* rs) const { ar & data::CreateNVP(rs, name); } //! Return whether single mode enabled template bool& SingleModeVisitor::operator()(RSType* rs) const { if (rs) return rs->SingleMode(); throw std::runtime_error("no range search model initialized"); } //! Exposes Naive() function of given RSType template bool& NaiveVisitor::operator()(RSType* rs) const { if (rs) return rs->Naive(); throw std::runtime_error("no range search model initialized"); } // Serialize the model. template void RSModel::Serialize(Archive& ar, const unsigned int /* version */) { using data::CreateNVP; ar & CreateNVP(treeType, "treeType"); ar & CreateNVP(randomBasis, "randomBasis"); ar & CreateNVP(q, "q"); // This should never happen, but just in case... if (Archive::is_loading::value) boost::apply_visitor(DeleteVisitor(), rSearch); // We'll only need to serialize one of the model objects, based on the type. const std::string& name = RSModelName::Name(); SerializeVisitor s(ar, name); boost::apply_visitor(s, rSearch); } inline const arma::mat& RSModel::Dataset() const { return boost::apply_visitor(ReferenceSetVisitor(), rSearch); } inline bool RSModel::SingleMode() const { return boost::apply_visitor(SingleModeVisitor(), rSearch); } inline bool& RSModel::SingleMode() { return boost::apply_visitor(SingleModeVisitor(), rSearch); } inline bool RSModel::Naive() const { return boost::apply_visitor(NaiveVisitor(), rSearch); } inline bool& RSModel::Naive() { return boost::apply_visitor(NaiveVisitor(), rSearch); } } // namespace range } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/methods/rann/000077500000000000000000000000001315013601400174205ustar00rootroot00000000000000mlpack-2.2.5/src/mlpack/methods/rann/CMakeLists.txt000066400000000000000000000026061315013601400221640ustar00rootroot00000000000000# Define the files we need to compile # Anything not in this list will not be compiled into the output library # Do not include test programs here set(SOURCES # rank-approximate search files ra_search.hpp ra_search_impl.hpp # rank-approximate search rules ra_search_rules.hpp ra_search_rules_impl.hpp # query statistic ra_query_stat.hpp # typedefs ra_typedef.hpp # utilities ra_util.hpp ra_util.cpp # model ra_model.hpp ra_model_impl.hpp ) # add directory name to sources set(DIR_SRCS) foreach(file ${SOURCES}) set(DIR_SRCS ${DIR_SRCS} ${CMAKE_CURRENT_SOURCE_DIR}/${file}) endforeach() # Append sources (with directory name) to list of all mlpack sources (used at the parent scope) set(MLPACK_SRCS ${MLPACK_SRCS} ${DIR_SRCS} PARENT_SCOPE) # The code to compute the rank-approximate neighbor for the given query and # reference sets. add_cli_executable(krann) if (BUILD_CLI_EXECUTABLES) # Compatibility: retain mlpack_allkrann until mlpack 3.0.0. get_property(krann_loc TARGET mlpack_krann PROPERTY LOCATION) get_filename_component(krann_ext ${krann_loc} EXT) add_custom_command(TARGET mlpack_krann POST_BUILD COMMAND ${CMAKE_COMMAND} -E copy $ $/mlpack_allkrann${krann_ext} ) install(PROGRAMS $/mlpack_allkrann${krann_ext} DESTINATION bin) endif () mlpack-2.2.5/src/mlpack/methods/rann/krann_main.cpp000066400000000000000000000274521315013601400222530ustar00rootroot00000000000000/** * @file allkrann_main.cpp * @author Parikshit Ram * * Implementation of the kRANN executable. Allows some number of standard * options. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include #include #include #include "ra_search.hpp" #include "ra_model.hpp" #include using namespace std; using namespace mlpack; using namespace mlpack::neighbor; using namespace mlpack::tree; using namespace mlpack::metric; // Information about the program itself. PROGRAM_INFO("K-Rank-Approximate-Nearest-Neighbors (kRANN)", "This program will calculate the k rank-approximate-nearest-neighbors of a " "set of points. You may specify a separate set of reference points and " "query points, or just a reference set which will be used as both the " "reference and query set. You must specify the rank approximation (in \%) " "(and optionally the success probability)." "\n\n" "For example, the following will return 5 neighbors from the top 0.1\% of " "the data (with probability 0.95) for each point in 'input.csv' and store " "the distances in 'distances.csv' and the neighbors in the file " "'neighbors.csv':" "\n\n" "$ allkrann -k 5 -r input.csv -d distances.csv -n neighbors.csv --tau 0.1" "\n\n" "Note that tau must be set such that the number of points in the " "corresponding percentile of the data is greater than k. Thus, if we " "choose tau = 0.1 with a dataset of 1000 points and k = 5, then we are " "attempting to choose 5 nearest neighbors out of the closest 1 point -- " "this is invalid and the program will terminate with an error message." "\n\n" "The output files are organized such that row i and column j in the " "neighbors output file corresponds to the index of the point in the " "reference set which is the i'th nearest neighbor from the point in the " "query set with index j. Row i and column j in the distances output file " "corresponds to the distance between those two points."); // Define our input parameters that this program will take. PARAM_STRING_IN("reference_file", "File containing the reference dataset.", "r", ""); PARAM_STRING_OUT("distances_file", "File to output distances into.", "d"); PARAM_STRING_OUT("neighbors_file", "File to output neighbors into.", "n"); // The option exists to load or save models. PARAM_STRING_IN("input_model_file", "File containing pre-trained kNN model.", "m", ""); PARAM_STRING_OUT("output_model_file", "If specified, the kNN model will be " "saved to the given file.", "M"); // The user may specify a query file of query points and a number of nearest // neighbors to search for. PARAM_STRING_IN("query_file", "File containing query points (optional).", "q", ""); PARAM_INT_IN("k", "Number of nearest neighbors to find.", "k", 0); // The user may specify the type of tree to use, and a few parameters for tree // building. PARAM_STRING_IN("tree_type", "Type of tree to use: 'kd', 'ub', 'cover', 'r', " "'x', 'r-star', 'hilbert-r', 'r-plus', 'r-plus-plus', 'oct'.", "t", "kd"); PARAM_INT_IN("leaf_size", "Leaf size for tree building (used for kd-trees, " "UB trees, R trees, R* trees, X trees, Hilbert R trees, R+ trees, " "R++ trees, and octrees).", "l", 20); PARAM_FLAG("random_basis", "Before tree-building, project the data onto a " "random orthogonal basis.", "R"); PARAM_INT_IN("seed", "Random seed (if 0, std::time(NULL) is used).", "s", 0); // Search options. PARAM_DOUBLE_IN("tau", "The allowed rank-error in terms of the percentile of " "the data.", "T", 5); PARAM_DOUBLE_IN("alpha", "The desired success probability.", "a", 0.95); PARAM_FLAG("naive", "If true, sampling will be done without using a tree.", "N"); PARAM_FLAG("single_mode", "If true, single-tree search is used (as opposed to " "dual-tree search.", "s"); PARAM_FLAG("sample_at_leaves", "The flag to trigger sampling at leaves.", "L"); PARAM_FLAG("first_leaf_exact", "The flag to trigger sampling only after " "exactly exploring the first leaf.", "X"); PARAM_INT_IN("single_sample_limit", "The limit on the maximum number of " "samples (and hence the largest node you can approximate).", "S", 20); // Convenience typedef. typedef RAModel RANNModel; int main(int argc, char *argv[]) { // Give CLI the command line parameters the user passed in. CLI::ParseCommandLine(argc, argv); if (CLI::GetParam("seed") != 0) math::RandomSeed((size_t) CLI::GetParam("seed")); else math::RandomSeed((size_t) std::time(NULL)); // A user cannot specify both reference data and a model. if (CLI::HasParam("reference_file") && CLI::HasParam("input_model_file")) Log::Fatal << "Only one of --reference_file (-r) or --input_model_file (-m)" << " may be specified!" << endl; // A user must specify one of them... if (!CLI::HasParam("reference_file") && !CLI::HasParam("input_model_file")) Log::Fatal << "No model specified (--input_model_file) and no reference " << "data specified (--reference_file)! One must be provided." << endl; if (CLI::HasParam("input_model_file")) { // Notify the user of parameters that will be ignored. if (CLI::HasParam("tree_type")) Log::Warn << "--tree_type (-t) will be ignored because --input_model_file" << " is specified." << endl; if (CLI::HasParam("leaf_size")) Log::Warn << "--leaf_size (-l) will be ignored because --input_model_file" << " is specified." << endl; if (CLI::HasParam("random_basis")) Log::Warn << "--random_basis (-R) will be ignored because " << "--input_model_file is specified." << endl; if (CLI::HasParam("naive")) Log::Warn << "--naive (-N) will be ignored because --input_model_file is " << "specified." << endl; } // The user should give something to do... if (!CLI::HasParam("k") && !CLI::HasParam("output_model_file")) Log::Warn << "Neither -k nor --output_model_file are specified, so no " << "results from this program will be saved!" << endl; // If the user specifies k but no output files, they should be warned. if (CLI::HasParam("k") && !(CLI::HasParam("neighbors_file") || CLI::HasParam("distances_file"))) Log::Warn << "Neither --neighbors_file nor --distances_file is specified, " << "so the nearest neighbor search results will not be saved!" << endl; // If the user specifies output files but no k, they should be warned. if ((CLI::HasParam("neighbors_file") || CLI::HasParam("distances_file")) && !CLI::HasParam("k")) Log::Warn << "An output file for nearest neighbor search is given (" << "--neighbors_file or --distances_file), but nearest neighbor search " << "is not being performed because k (--k) is not specified! No " << "results will be saved." << endl; // Sanity check on leaf size. const int lsInt = CLI::GetParam("leaf_size"); if (lsInt < 1) { Log::Fatal << "Invalid leaf size: " << lsInt << ". Must be greater " "than 0." << endl; } // We either have to load the reference data, or we have to load the model. RANNModel rann; const bool naive = CLI::HasParam("naive"); const bool singleMode = CLI::HasParam("single_mode"); if (CLI::HasParam("reference_file")) { // Get all the parameters. const string referenceFile = CLI::GetParam("reference_file"); const string treeType = CLI::GetParam("tree_type"); const bool randomBasis = CLI::HasParam("random_basis"); RANNModel::TreeTypes tree = RANNModel::KD_TREE; if (treeType == "kd") tree = RANNModel::KD_TREE; else if (treeType == "cover") tree = RANNModel::COVER_TREE; else if (treeType == "r") tree = RANNModel::R_TREE; else if (treeType == "r-star") tree = RANNModel::R_STAR_TREE; else if (treeType == "x") tree = RANNModel::X_TREE; else if (treeType == "hilbert-r") tree = RANNModel::HILBERT_R_TREE; else if (treeType == "r-plus") tree = RANNModel::R_PLUS_TREE; else if (treeType == "r-plus-plus") tree = RANNModel::R_PLUS_PLUS_TREE; else if (treeType == "ub") tree = RANNModel::UB_TREE; else if (treeType == "oct") tree = RANNModel::OCTREE; else Log::Fatal << "Unknown tree type '" << treeType << "'; valid choices are " << "'kd', 'ub', 'cover', 'r', 'r-star', 'x', 'hilbert-r', " << "'r-plus', 'r-plus-plus', 'oct'." << endl; rann.TreeType() = tree; rann.RandomBasis() = randomBasis; arma::mat referenceSet; data::Load(referenceFile, referenceSet, true); Log::Info << "Loaded reference data from '" << referenceFile << "' (" << referenceSet.n_rows << " x " << referenceSet.n_cols << ")." << endl; rann.BuildModel(std::move(referenceSet), size_t(lsInt), naive, singleMode); } else { // Load the model from file. const string inputModelFile = CLI::GetParam("input_model_file"); data::Load(inputModelFile, "rann_model", rann, true); // Fatal on failure. Log::Info << "Loaded rank-approximate kNN model from '" << inputModelFile << "' (trained on " << rann.Dataset().n_rows << "x" << rann.Dataset().n_cols << " dataset)." << endl; // Adjust singleMode and naive if necessary. rann.SingleMode() = CLI::HasParam("single_mode"); rann.Naive() = CLI::HasParam("naive"); rann.LeafSize() = size_t(lsInt); } // Apply the parameters for search. if (CLI::HasParam("tau")) rann.Tau() = CLI::GetParam("tau"); if (CLI::HasParam("alpha")) rann.Alpha() = CLI::GetParam("alpha"); if (CLI::HasParam("single_sample_limit")) rann.SingleSampleLimit() = CLI::GetParam("single_sample_limit"); rann.SampleAtLeaves() = CLI::HasParam("sample_at_leaves"); rann.FirstLeafExact() = CLI::HasParam("sample_at_leaves"); // Perform search, if desired. if (CLI::HasParam("k")) { const string queryFile = CLI::GetParam("query_file"); const size_t k = (size_t) CLI::GetParam("k"); arma::mat queryData; if (queryFile != "") { data::Load(queryFile, queryData, true); Log::Info << "Loaded query data from '" << queryFile << "' (" << queryData.n_rows << "x" << queryData.n_cols << ")." << endl; } // Sanity check on k value: must be greater than 0, must be less than the // number of reference points. Since it is unsigned, we only test the upper // bound. if (k > rann.Dataset().n_cols) { Log::Fatal << "Invalid k: " << k << "; must be greater than 0 and less "; Log::Fatal << "than or equal to the number of reference points ("; Log::Fatal << rann.Dataset().n_cols << ")." << endl; } // Naive mode overrides single mode. if (singleMode && naive) { Log::Warn << "--single_mode ignored because --naive is present." << endl; } arma::Mat neighbors; arma::mat distances; if (CLI::HasParam("query_file")) rann.Search(std::move(queryData), k, neighbors, distances); else rann.Search(k, neighbors, distances); Log::Info << "Search complete." << endl; // Save output, if desired. if (CLI::HasParam("neighbors_file")) data::Save(CLI::GetParam("neighbors_file"), neighbors); if (CLI::HasParam("distances_file")) data::Save(CLI::GetParam("distances_file"), distances); } if (CLI::HasParam("output_model_file")) { const string outputModelFile = CLI::GetParam("output_model_file"); data::Save(outputModelFile, "rann_model", rann); } } mlpack-2.2.5/src/mlpack/methods/rann/ra_model.hpp000066400000000000000000000136251315013601400217220ustar00rootroot00000000000000/** * @file ra_model.hpp * @author Ryan Curtin * * This is a model for rank-approximate nearest neighbor search. It provides an * easy way to serialize a rank-approximate neighbor search model by abstracting * the types of trees and reflecting the RASearch API. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_RANN_RA_MODEL_HPP #define MLPACK_METHODS_RANN_RA_MODEL_HPP #include #include #include #include #include "ra_search.hpp" namespace mlpack { namespace neighbor { /** * The RAModel class provides an abstraction for the RASearch class, abstracting * away the TreeType parameter and allowing it to be specified at runtime in * this class. This class is written for the sake of the 'allkrann' program, * but is not necessarily restricted to that use. * * @param SortPolicy Sorting policy for neighbor searching (see RASearch). */ template class RAModel { public: /** * The list of tree types we can use with RASearch. Does not include ball * trees; see #338. */ enum TreeTypes { KD_TREE, COVER_TREE, R_TREE, R_STAR_TREE, X_TREE, HILBERT_R_TREE, R_PLUS_TREE, R_PLUS_PLUS_TREE, UB_TREE, OCTREE }; private: //! The type of tree being used. TreeTypes treeType; //! The leaf size of the tree being used (useful only for the kd-tree). size_t leafSize; //! If true, randomly project into a new basis. bool randomBasis; //! The basis to project into. arma::mat q; //! Typedef the RASearch class we'll use. template class TreeType> using RAType = RASearch; //! Non-NULL if the kd-tree is used. RAType* kdTreeRA; //! Non-NULL if the cover tree is used. RAType* coverTreeRA; //! Non-NULL if the R tree is used. RAType* rTreeRA; //! Non-NULL if the R* tree is used. RAType* rStarTreeRA; //! Non-NULL if the X tree is used. RAType* xTreeRA; //! Non-NULL if the Hilbert R tree is used. RAType* hilbertRTreeRA; //! Non-NULL if the R+ tree is used. RAType* rPlusTreeRA; //! Non-NULL if the R++ tree is used. RAType* rPlusPlusTreeRA; //! Non-NULL if the UB tree is used. RAType* ubTreeRA; //! Non-NULL if the octree is used. RAType* octreeRA; public: /** * Initialize the RAModel with the given type and whether or not a random * basis should be used. */ RAModel(TreeTypes treeType = TreeTypes::KD_TREE, bool randomBasis = false); //! Clean memory, if necessary. ~RAModel(); //! Serialize the model. template void Serialize(Archive& ar, const unsigned int /* version */); //! Expose the dataset. const arma::mat& Dataset() const; //! Get whether or not single-tree search is being used. bool SingleMode() const; //! Modify whether or not single-tree search is being used. bool& SingleMode(); //! Get whether or not naive search is being used. bool Naive() const; //! Modify whether or not naive search is being used. bool& Naive(); //! Get the rank-approximation in percentile of the data. double Tau() const; //! Modify the rank-approximation in percentile of the data. double& Tau(); //! Get the desired success probability. double Alpha() const; //! Modify the desired success probability. double& Alpha(); //! Get whether or not sampling is done at the leaves. bool SampleAtLeaves() const; //! Modify whether or not sampling is done at the leaves. bool& SampleAtLeaves(); //! Get whether or not we traverse to the first leaf without approximation. bool FirstLeafExact() const; //! Modify whether or not we traverse to the first leaf without approximation. bool& FirstLeafExact(); //! Get the limit on the size of a node that can be approximated. size_t SingleSampleLimit() const; //! Modify the limit on the size of a node that can be approximation. size_t& SingleSampleLimit(); //! Get the leaf size (only relevant when the kd-tree is used). size_t LeafSize() const; //! Modify the leaf size (only relevant when the kd-tree is used). size_t& LeafSize(); //! Get the type of tree being used. TreeTypes TreeType() const; //! Modify the type of tree being used. TreeTypes& TreeType(); //! Get whether or not a random basis is being used. bool RandomBasis() const; //! Modify whether or not a random basis is being used. Be sure to rebuild //! the model using BuildModel(). bool& RandomBasis(); //! Build the reference tree. void BuildModel(arma::mat&& referenceSet, const size_t leafSize, const bool naive, const bool singleMode); //! Perform rank-approximate neighbor search, taking ownership of the query //! set. void Search(arma::mat&& querySet, const size_t k, arma::Mat& neighbors, arma::mat& distances); /** * Perform rank-approximate neighbor search, using the reference set as the * query set. */ void Search(const size_t k, arma::Mat& neighbors, arma::mat& distances); //! Get the name of the tree type. std::string TreeName() const; }; } // namespace neighbor } // namespace mlpack #include "ra_model_impl.hpp" #endif mlpack-2.2.5/src/mlpack/methods/rann/ra_model_impl.hpp000066400000000000000000000611211315013601400227350ustar00rootroot00000000000000/** * @file ra_model_impl.hpp * @author Ryan Curtin * * Implementation of the RAModel class. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_RANN_RA_MODEL_IMPL_HPP #define MLPACK_METHODS_RANN_RA_MODEL_IMPL_HPP // In case it hasn't been included yet. #include "ra_model.hpp" #include namespace mlpack { namespace neighbor { template RAModel::RAModel(const TreeTypes treeType, const bool randomBasis) : treeType(treeType), leafSize(20), randomBasis(randomBasis), kdTreeRA(NULL), coverTreeRA(NULL), rTreeRA(NULL), rStarTreeRA(NULL), xTreeRA(NULL), hilbertRTreeRA(NULL), rPlusTreeRA(NULL), rPlusPlusTreeRA(NULL), ubTreeRA(NULL), octreeRA(NULL) { // Nothing to do. } template RAModel::~RAModel() { delete kdTreeRA; delete coverTreeRA; delete rTreeRA; delete rStarTreeRA; delete xTreeRA; delete hilbertRTreeRA; delete rPlusTreeRA; delete rPlusPlusTreeRA; delete ubTreeRA; delete octreeRA; } template template void RAModel::Serialize(Archive& ar, const unsigned int /* version */) { ar & data::CreateNVP(treeType, "treeType"); ar & data::CreateNVP(randomBasis, "randomBasis"); ar & data::CreateNVP(q, "q"); // This should never happen, but just in case, be clean with memory. if (Archive::is_loading::value) { delete kdTreeRA; delete coverTreeRA; delete rTreeRA; delete rStarTreeRA; delete xTreeRA; delete hilbertRTreeRA; delete rPlusTreeRA; delete rPlusPlusTreeRA; delete ubTreeRA; delete octreeRA; // Set all the pointers to NULL. kdTreeRA = NULL; coverTreeRA = NULL; rTreeRA = NULL; rStarTreeRA = NULL; xTreeRA = NULL; hilbertRTreeRA = NULL; rPlusPlusTreeRA = NULL; rPlusTreeRA = NULL; ubTreeRA = NULL; } // We only need to serialize one of the kRANN objects. switch (treeType) { case KD_TREE: ar & data::CreateNVP(kdTreeRA, "ra_model"); break; case COVER_TREE: ar & data::CreateNVP(coverTreeRA, "ra_model"); break; case R_TREE: ar & data::CreateNVP(rTreeRA, "ra_model"); break; case R_STAR_TREE: ar & data::CreateNVP(rStarTreeRA, "ra_model"); break; case X_TREE: ar & data::CreateNVP(xTreeRA, "ra_model"); break; case HILBERT_R_TREE: ar & data::CreateNVP(hilbertRTreeRA, "ra_model"); break; case R_PLUS_TREE: ar & data::CreateNVP(rPlusTreeRA, "ra_model"); break; case R_PLUS_PLUS_TREE: ar & data::CreateNVP(rPlusPlusTreeRA, "ra_model"); break; case UB_TREE: ar & data::CreateNVP(ubTreeRA, "ra_model"); break; case OCTREE: ar & data::CreateNVP(octreeRA, "ra_model"); break; } } template const arma::mat& RAModel::Dataset() const { if (kdTreeRA) return kdTreeRA->ReferenceSet(); else if (coverTreeRA) return coverTreeRA->ReferenceSet(); else if (rTreeRA) return rTreeRA->ReferenceSet(); else if (rStarTreeRA) return rStarTreeRA->ReferenceSet(); else if (xTreeRA) return xTreeRA->ReferenceSet(); else if (hilbertRTreeRA) return hilbertRTreeRA->ReferenceSet(); else if (rPlusTreeRA) return rPlusTreeRA->ReferenceSet(); else if (rPlusPlusTreeRA) return rPlusPlusTreeRA->ReferenceSet(); else if (ubTreeRA) return ubTreeRA->ReferenceSet(); else if (octreeRA) return octreeRA->ReferenceSet(); throw std::runtime_error("no rank-approximate nearest neighbor search model " "initialized"); } template bool RAModel::Naive() const { if (kdTreeRA) return kdTreeRA->Naive(); else if (coverTreeRA) return coverTreeRA->Naive(); else if (rTreeRA) return rTreeRA->Naive(); else if (rStarTreeRA) return rStarTreeRA->Naive(); else if (xTreeRA) return xTreeRA->Naive(); else if (hilbertRTreeRA) return hilbertRTreeRA->Naive(); else if (rPlusTreeRA) return rPlusTreeRA->Naive(); else if (rPlusPlusTreeRA) return rPlusPlusTreeRA->Naive(); else if (ubTreeRA) return ubTreeRA->Naive(); else if (octreeRA) return octreeRA->Naive(); throw std::runtime_error("no rank-approximate nearest neighbor search model " "initialized"); } template bool& RAModel::Naive() { if (kdTreeRA) return kdTreeRA->Naive(); else if (coverTreeRA) return coverTreeRA->Naive(); else if (rTreeRA) return rTreeRA->Naive(); else if (rStarTreeRA) return rStarTreeRA->Naive(); else if (xTreeRA) return xTreeRA->Naive(); else if (hilbertRTreeRA) return hilbertRTreeRA->Naive(); else if (rPlusTreeRA) return rPlusTreeRA->Naive(); else if (rPlusPlusTreeRA) return rPlusPlusTreeRA->Naive(); else if (ubTreeRA) return ubTreeRA->Naive(); else if (octreeRA) return octreeRA->Naive(); throw std::runtime_error("no rank-approximate nearest neighbor search model " "initialized"); } template bool RAModel::SingleMode() const { if (kdTreeRA) return kdTreeRA->SingleMode(); else if (coverTreeRA) return coverTreeRA->SingleMode(); else if (rTreeRA) return rTreeRA->SingleMode(); else if (rStarTreeRA) return rStarTreeRA->SingleMode(); else if (xTreeRA) return xTreeRA->SingleMode(); else if (hilbertRTreeRA) return hilbertRTreeRA->SingleMode(); else if (rPlusTreeRA) return rPlusTreeRA->SingleMode(); else if (rPlusPlusTreeRA) return rPlusPlusTreeRA->SingleMode(); else if (ubTreeRA) return ubTreeRA->SingleMode(); else if (octreeRA) return octreeRA->SingleMode(); throw std::runtime_error("no rank-approximate nearest neighbor search model " "initialized"); } template bool& RAModel::SingleMode() { if (kdTreeRA) return kdTreeRA->SingleMode(); else if (coverTreeRA) return coverTreeRA->SingleMode(); else if (rTreeRA) return rTreeRA->SingleMode(); else if (rStarTreeRA) return rStarTreeRA->SingleMode(); else if (xTreeRA) return xTreeRA->SingleMode(); else if (hilbertRTreeRA) return hilbertRTreeRA->SingleMode(); else if (rPlusTreeRA) return rPlusTreeRA->SingleMode(); else if (rPlusPlusTreeRA) return rPlusPlusTreeRA->SingleMode(); else if (ubTreeRA) return ubTreeRA->SingleMode(); else if (octreeRA) return octreeRA->SingleMode(); throw std::runtime_error("no rank-approximate nearest neighbor search model " "initialized"); } template double RAModel::Tau() const { if (kdTreeRA) return kdTreeRA->Tau(); else if (coverTreeRA) return coverTreeRA->Tau(); else if (rTreeRA) return rTreeRA->Tau(); else if (rStarTreeRA) return rStarTreeRA->Tau(); else if (xTreeRA) return xTreeRA->Tau(); else if (hilbertRTreeRA) return hilbertRTreeRA->Tau(); else if (rPlusTreeRA) return rPlusTreeRA->Tau(); else if (rPlusPlusTreeRA) return rPlusPlusTreeRA->Tau(); else if (ubTreeRA) return ubTreeRA->Tau(); else if (octreeRA) return octreeRA->Tau(); throw std::runtime_error("no rank-approximate nearest neighbor search model " "initialized"); } template double& RAModel::Tau() { if (kdTreeRA) return kdTreeRA->Tau(); else if (coverTreeRA) return coverTreeRA->Tau(); else if (rTreeRA) return rTreeRA->Tau(); else if (rStarTreeRA) return rStarTreeRA->Tau(); else if (xTreeRA) return xTreeRA->Tau(); else if (hilbertRTreeRA) return hilbertRTreeRA->Tau(); else if (rPlusTreeRA) return rPlusTreeRA->Tau(); else if (rPlusPlusTreeRA) return rPlusPlusTreeRA->Tau(); else if (ubTreeRA) return ubTreeRA->Tau(); else if (octreeRA) return octreeRA->Tau(); throw std::runtime_error("no rank-approximate nearest neighbor search model " "initialized"); } template double RAModel::Alpha() const { if (kdTreeRA) return kdTreeRA->Alpha(); else if (coverTreeRA) return coverTreeRA->Alpha(); else if (rTreeRA) return rTreeRA->Alpha(); else if (rStarTreeRA) return rStarTreeRA->Alpha(); else if (xTreeRA) return xTreeRA->Alpha(); else if (hilbertRTreeRA) return hilbertRTreeRA->Alpha(); else if (rPlusTreeRA) return rPlusTreeRA->Alpha(); else if (rPlusPlusTreeRA) return rPlusPlusTreeRA->Alpha(); else if (ubTreeRA) return ubTreeRA->Alpha(); else if (octreeRA) return octreeRA->Alpha(); throw std::runtime_error("no rank-approximate nearest neighbor search model " "initialized"); } template double& RAModel::Alpha() { if (kdTreeRA) return kdTreeRA->Alpha(); else if (coverTreeRA) return coverTreeRA->Alpha(); else if (rTreeRA) return rTreeRA->Alpha(); else if (rStarTreeRA) return rStarTreeRA->Alpha(); else if (xTreeRA) return xTreeRA->Alpha(); else if (hilbertRTreeRA) return hilbertRTreeRA->Alpha(); else if (rPlusTreeRA) return rPlusTreeRA->Alpha(); else if (rPlusPlusTreeRA) return rPlusPlusTreeRA->Alpha(); else if (ubTreeRA) return ubTreeRA->Alpha(); else if (octreeRA) return octreeRA->Alpha(); throw std::runtime_error("no rank-approximate nearest neighbor search model " "initialized"); } template bool RAModel::SampleAtLeaves() const { if (kdTreeRA) return kdTreeRA->SampleAtLeaves(); else if (coverTreeRA) return coverTreeRA->SampleAtLeaves(); else if (rTreeRA) return rTreeRA->SampleAtLeaves(); else if (rStarTreeRA) return rStarTreeRA->SampleAtLeaves(); else if (xTreeRA) return xTreeRA->SampleAtLeaves(); else if (hilbertRTreeRA) return hilbertRTreeRA->SampleAtLeaves(); else if (rPlusTreeRA) return rPlusTreeRA->SampleAtLeaves(); else if (rPlusPlusTreeRA) return rPlusPlusTreeRA->SampleAtLeaves(); else if (ubTreeRA) return ubTreeRA->SampleAtLeaves(); else if (octreeRA) return octreeRA->SampleAtLeaves(); throw std::runtime_error("no rank-approximate nearest neighbor search model " "initialized"); } template bool& RAModel::SampleAtLeaves() { if (kdTreeRA) return kdTreeRA->SampleAtLeaves(); else if (coverTreeRA) return coverTreeRA->SampleAtLeaves(); else if (rTreeRA) return rTreeRA->SampleAtLeaves(); else if (rStarTreeRA) return rStarTreeRA->SampleAtLeaves(); else if (xTreeRA) return xTreeRA->SampleAtLeaves(); else if (hilbertRTreeRA) return hilbertRTreeRA->SampleAtLeaves(); else if (rPlusTreeRA) return rPlusTreeRA->SampleAtLeaves(); else if (rPlusPlusTreeRA) return rPlusPlusTreeRA->SampleAtLeaves(); else if (ubTreeRA) return ubTreeRA->SampleAtLeaves(); else if (octreeRA) return octreeRA->SampleAtLeaves(); throw std::runtime_error("no rank-approximate nearest neighbor search model " "initialized"); } template bool RAModel::FirstLeafExact() const { if (kdTreeRA) return kdTreeRA->FirstLeafExact(); else if (coverTreeRA) return coverTreeRA->FirstLeafExact(); else if (rTreeRA) return rTreeRA->FirstLeafExact(); else if (rStarTreeRA) return rStarTreeRA->FirstLeafExact(); else if (xTreeRA) return xTreeRA->FirstLeafExact(); else if (hilbertRTreeRA) return hilbertRTreeRA->FirstLeafExact(); else if (rPlusTreeRA) return rPlusTreeRA->FirstLeafExact(); else if (rPlusPlusTreeRA) return rPlusPlusTreeRA->FirstLeafExact(); else if (ubTreeRA) return ubTreeRA->FirstLeafExact(); else if (octreeRA) return octreeRA->FirstLeafExact(); throw std::runtime_error("no rank-approximate nearest neighbor search model " "initialized"); } template bool& RAModel::FirstLeafExact() { if (kdTreeRA) return kdTreeRA->FirstLeafExact(); else if (coverTreeRA) return coverTreeRA->FirstLeafExact(); else if (rTreeRA) return rTreeRA->FirstLeafExact(); else if (rStarTreeRA) return rStarTreeRA->FirstLeafExact(); else if (xTreeRA) return xTreeRA->FirstLeafExact(); else if (hilbertRTreeRA) return hilbertRTreeRA->FirstLeafExact(); else if (rPlusTreeRA) return rPlusTreeRA->FirstLeafExact(); else if (rPlusPlusTreeRA) return rPlusPlusTreeRA->FirstLeafExact(); else if (ubTreeRA) return ubTreeRA->FirstLeafExact(); else if (octreeRA) return octreeRA->FirstLeafExact(); throw std::runtime_error("no rank-approximate nearest neighbor search model " "initialized"); } template size_t RAModel::SingleSampleLimit() const { if (kdTreeRA) return kdTreeRA->SingleSampleLimit(); else if (coverTreeRA) return coverTreeRA->SingleSampleLimit(); else if (rTreeRA) return rTreeRA->SingleSampleLimit(); else if (rStarTreeRA) return rStarTreeRA->SingleSampleLimit(); else if (xTreeRA) return xTreeRA->SingleSampleLimit(); else if (hilbertRTreeRA) return hilbertRTreeRA->SingleSampleLimit(); else if (rPlusTreeRA) return rPlusTreeRA->SingleSampleLimit(); else if (rPlusPlusTreeRA) return rPlusPlusTreeRA->SingleSampleLimit(); else if (ubTreeRA) return ubTreeRA->SingleSampleLimit(); else if (octreeRA) return octreeRA->SingleSampleLimit(); throw std::runtime_error("no rank-approximate nearest neighbor search model " "initialized"); } template size_t& RAModel::SingleSampleLimit() { if (kdTreeRA) return kdTreeRA->SingleSampleLimit(); else if (coverTreeRA) return coverTreeRA->SingleSampleLimit(); else if (rTreeRA) return rTreeRA->SingleSampleLimit(); else if (rStarTreeRA) return rStarTreeRA->SingleSampleLimit(); else if (xTreeRA) return xTreeRA->SingleSampleLimit(); else if (hilbertRTreeRA) return hilbertRTreeRA->SingleSampleLimit(); else if (rPlusTreeRA) return rPlusTreeRA->SingleSampleLimit(); else if (rPlusPlusTreeRA) return rPlusPlusTreeRA->SingleSampleLimit(); else if (ubTreeRA) return ubTreeRA->SingleSampleLimit(); else if (octreeRA) return octreeRA->SingleSampleLimit(); throw std::runtime_error("no rank-approximate nearest neighbor search model " "initialized"); } template size_t RAModel::LeafSize() const { return leafSize; } template size_t& RAModel::LeafSize() { return leafSize; } template typename RAModel::TreeTypes RAModel::TreeType() const { return treeType; } template typename RAModel::TreeTypes& RAModel::TreeType() { return treeType; } template bool RAModel::RandomBasis() const { return randomBasis; } template bool& RAModel::RandomBasis() { return randomBasis; } template void RAModel::BuildModel(arma::mat&& referenceSet, const size_t leafSize, const bool naive, const bool singleMode) { // Initialize random basis, if necessary. if (randomBasis) { Log::Info << "Creating random basis..." << std::endl; math::RandomBasis(q, referenceSet.n_rows); } // Clean memory, if necessary. delete kdTreeRA; delete coverTreeRA; delete rTreeRA; delete rStarTreeRA; delete xTreeRA; delete hilbertRTreeRA; delete rPlusTreeRA; delete rPlusPlusTreeRA; delete ubTreeRA; delete octreeRA; this->leafSize = leafSize; if (randomBasis) referenceSet = q * referenceSet; if (!naive) { Timer::Start("tree_building"); Log::Info << "Building reference tree..." << std::endl; } switch (treeType) { case KD_TREE: // Build tree, if necessary. if (naive) { kdTreeRA = new RAType(std::move(referenceSet), naive, singleMode); } else { std::vector oldFromNewReferences; typename RAType::Tree* kdTree = new typename RAType::Tree(std::move(referenceSet), oldFromNewReferences, leafSize); kdTreeRA = new RAType(kdTree, singleMode); // Give the model ownership of the tree. kdTreeRA->treeOwner = true; kdTreeRA->oldFromNewReferences = oldFromNewReferences; } break; case COVER_TREE: coverTreeRA = new RAType(std::move(referenceSet), naive, singleMode); break; case R_TREE: rTreeRA = new RAType(std::move(referenceSet), naive, singleMode); break; case R_STAR_TREE: rStarTreeRA = new RAType(std::move(referenceSet), naive, singleMode); break; case X_TREE: xTreeRA = new RAType(std::move(referenceSet), naive, singleMode); break; case HILBERT_R_TREE: hilbertRTreeRA = new RAType(std::move(referenceSet), naive, singleMode); break; case R_PLUS_TREE: rPlusTreeRA = new RAType(std::move(referenceSet), naive, singleMode); break; case R_PLUS_PLUS_TREE: rPlusPlusTreeRA = new RAType(std::move(referenceSet), naive, singleMode); break; case UB_TREE: ubTreeRA = new RAType(std::move(referenceSet), naive, singleMode); break; case OCTREE: // Build tree, if necessary. if (naive) { octreeRA = new RAType(std::move(referenceSet), naive, singleMode); } else { std::vector oldFromNewReferences; typename RAType::Tree* octree = new typename RAType::Tree(std::move(referenceSet), oldFromNewReferences, leafSize); octreeRA = new RAType(octree, singleMode); // Give the model ownership of the tree. octreeRA->treeOwner = true; octreeRA->oldFromNewReferences = oldFromNewReferences; } break; } if (!naive) { Timer::Stop("tree_building"); Log::Info << "Tree built." << std::endl; } } template void RAModel::Search(arma::mat&& querySet, const size_t k, arma::Mat& neighbors, arma::mat& distances) { // Apply the random basis if necessary. if (randomBasis) querySet = q * querySet; Log::Info << "Searching for " << k << " approximate nearest neighbors with "; if (!Naive() && !SingleMode()) Log::Info << "dual-tree rank-approximate " << TreeName() << " search..."; else if (!Naive()) Log::Info << "single-tree rank-approximate " << TreeName() << " search..."; else Log::Info << "brute-force (naive) rank-approximate search..."; Log::Info << std::endl; switch (treeType) { case KD_TREE: if (!kdTreeRA->Naive() && !kdTreeRA->SingleMode()) { // Build a second tree and search. Timer::Start("tree_building"); Log::Info << "Building query tree..." << std::endl; std::vector oldFromNewQueries; typename RAType::Tree queryTree(std::move(querySet), oldFromNewQueries, leafSize); Log::Info << "Tree built." << std::endl; Timer::Stop("tree_building"); arma::Mat neighborsOut; arma::mat distancesOut; kdTreeRA->Search(&queryTree, k, neighborsOut, distancesOut); // Unmap the query points. distances.set_size(distancesOut.n_rows, distancesOut.n_cols); neighbors.set_size(neighborsOut.n_rows, neighborsOut.n_cols); for (size_t i = 0; i < neighborsOut.n_cols; ++i) { neighbors.col(oldFromNewQueries[i]) = neighborsOut.col(i); distances.col(oldFromNewQueries[i]) = distancesOut.col(i); } } else { // Search without building a second tree. kdTreeRA->Search(querySet, k, neighbors, distances); } break; case COVER_TREE: // No mapping necessary. coverTreeRA->Search(querySet, k, neighbors, distances); break; case R_TREE: // No mapping necessary. rTreeRA->Search(querySet, k, neighbors, distances); break; case R_STAR_TREE: // No mapping necessary. rStarTreeRA->Search(querySet, k, neighbors, distances); break; case X_TREE: // No mapping necessary. xTreeRA->Search(querySet, k, neighbors, distances); break; case HILBERT_R_TREE: // No mapping necessary. hilbertRTreeRA->Search(querySet, k, neighbors, distances); break; case R_PLUS_TREE: // No mapping necessary. rPlusTreeRA->Search(querySet, k, neighbors, distances); break; case R_PLUS_PLUS_TREE: // No mapping necessary. rPlusPlusTreeRA->Search(querySet, k, neighbors, distances); break; case UB_TREE: // No mapping necessary. ubTreeRA->Search(querySet, k, neighbors, distances); break; case OCTREE: if (!octreeRA->Naive() && !octreeRA->SingleMode()) { // Build a second tree and search. Timer::Start("tree_building"); Log::Info << "Building query tree..." << std::endl; std::vector oldFromNewQueries; typename RAType::Tree queryTree(std::move(querySet), oldFromNewQueries, leafSize); Log::Info << "Tree built." << std::endl; Timer::Stop("tree_building"); arma::Mat neighborsOut; arma::mat distancesOut; octreeRA->Search(&queryTree, k, neighborsOut, distancesOut); // Unmap the query points. distances.set_size(distancesOut.n_rows, distancesOut.n_cols); neighbors.set_size(neighborsOut.n_rows, neighborsOut.n_cols); for (size_t i = 0; i < neighborsOut.n_cols; ++i) { neighbors.col(oldFromNewQueries[i]) = neighborsOut.col(i); distances.col(oldFromNewQueries[i]) = distancesOut.col(i); } } else { // Search without building a second tree. octreeRA->Search(querySet, k, neighbors, distances); } break; } } template void RAModel::Search(const size_t k, arma::Mat& neighbors, arma::mat& distances) { Log::Info << "Searching for " << k << " approximate nearest neighbors with "; if (!Naive() && !SingleMode()) Log::Info << "dual-tree rank-approximate " << TreeName() << " search..."; else if (!Naive()) Log::Info << "single-tree rank-approximate " << TreeName() << " search..."; else Log::Info << "brute-force (naive) rank-approximate search..."; Log::Info << std::endl; switch (treeType) { case KD_TREE: kdTreeRA->Search(k, neighbors, distances); break; case COVER_TREE: coverTreeRA->Search(k, neighbors, distances); break; case R_TREE: rTreeRA->Search(k, neighbors, distances); break; case R_STAR_TREE: rStarTreeRA->Search(k, neighbors, distances); break; case X_TREE: xTreeRA->Search(k, neighbors, distances); break; case HILBERT_R_TREE: hilbertRTreeRA->Search(k, neighbors, distances); break; case R_PLUS_TREE: rPlusTreeRA->Search(k, neighbors, distances); break; case R_PLUS_PLUS_TREE: rPlusPlusTreeRA->Search(k, neighbors, distances); break; case UB_TREE: ubTreeRA->Search(k, neighbors, distances); break; case OCTREE: octreeRA->Search(k, neighbors, distances); break; } } template std::string RAModel::TreeName() const { switch (treeType) { case KD_TREE: return "kd-tree"; case COVER_TREE: return "cover tree"; case R_TREE: return "R tree"; case R_STAR_TREE: return "R* tree"; case X_TREE: return "X tree"; case HILBERT_R_TREE: return "Hilbert R tree"; case R_PLUS_TREE: return "R+ tree"; case R_PLUS_PLUS_TREE: return "R++ tree"; case UB_TREE: return "UB tree"; case OCTREE: return "octree"; default: return "unknown tree"; } } } // namespace neighbor } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/methods/rann/ra_query_stat.hpp000066400000000000000000000045341315013601400230210ustar00rootroot00000000000000/** * @file ra_query_stat.hpp * @author Parikshit Ram * * Defines the RAQueryStat class, which is the statistic used for * rank-approximate nearest neighbor search (RASearch). * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_RANN_RA_QUERY_STAT_HPP #define MLPACK_METHODS_RANN_RA_QUERY_STAT_HPP #include #include #include #include namespace mlpack { namespace neighbor { /** * Extra data for each node in the tree. For neighbor searches, each node only * needs to store a bound on neighbor distances. * * Every query is required to make a minimum number of samples to guarantee the * desired approximation error. The 'numSamplesMade' keeps track of the minimum * number of samples made by all queries in the node in question. */ template class RAQueryStat { public: /** * Initialize the statistic with the worst possible distance according to our * sorting policy. */ RAQueryStat() : bound(SortPolicy::WorstDistance()), numSamplesMade(0) { } /** * Initialization for a node. */ template RAQueryStat(const TreeType& /* node */) : bound(SortPolicy::WorstDistance()), numSamplesMade(0) { } //! Get the bound. double Bound() const { return bound; } //! Modify the bound. double& Bound() { return bound; } //! Get the number of samples made. size_t NumSamplesMade() const { return numSamplesMade; } //! Modify the number of samples made. size_t& NumSamplesMade() { return numSamplesMade; } //! Serialize the statistic. template void Serialize(Archive& ar, const unsigned int /* version */) { ar & data::CreateNVP(bound, "bound"); ar & data::CreateNVP(numSamplesMade, "numSamplesMade"); } private: //! The bound on the node's neighbor distances. double bound; //! The minimum number of samples made by any query in this node. size_t numSamplesMade; }; } // namespace neighbor } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/methods/rann/ra_search.hpp000066400000000000000000000505611315013601400220670ustar00rootroot00000000000000/** * @file ra_search.hpp * @author Parikshit Ram * * Defines the RASearch class, which performs an abstract rank-approximate * nearest/farthest neighbor query on two datasets. * * The details of this method can be found in the following paper: * * @inproceedings{ram2009rank, * title={{Rank-Approximate Nearest Neighbor Search: Retaining Meaning and * Speed in High Dimensions}}, * author={{Ram, P. and Lee, D. and Ouyang, H. and Gray, A. G.}}, * booktitle={{Advances of Neural Information Processing Systems}}, * year={2009} * } * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_RANN_RA_SEARCH_HPP #define MLPACK_METHODS_RANN_RA_SEARCH_HPP #include #include #include #include #include "ra_query_stat.hpp" #include "ra_util.hpp" namespace mlpack { namespace neighbor { // Forward declaration. template class RAModel; /** * The RASearch class: This class provides a generic manner to perform * rank-approximate search via random-sampling. If the 'naive' option is chosen, * this rank-approximate search will be done by randomly sampling from the whole * set. If the 'naive' option is not chosen, the sampling is done in a * stratified manner in the tree as mentioned in the algorithms in Figure 2 of * the following paper: * * @inproceedings{ram2009rank, * title={{Rank-Approximate Nearest Neighbor Search: Retaining Meaning and * Speed in High Dimensions}}, * author={{Ram, P. and Lee, D. and Ouyang, H. and Gray, A. G.}}, * booktitle={{Advances of Neural Information Processing Systems}}, * year={2009} * } * * RASearch is currently known to not work with ball trees (#356). * * @tparam SortPolicy The sort policy for distances; see NearestNeighborSort. * @tparam MetricType The metric to use for computation. * @tparam TreeType The tree type to use. */ template class TreeType = tree::KDTree> class RASearch { public: //! Convenience typedef. typedef TreeType, MatType> Tree; /** * Initialize the RASearch object, passing both a reference dataset (this is * the dataset that will be searched). Optionally, perform the computation in * naive mode or single-tree mode. An initialized distance metric can be * given, for cases where the metric has internal data (i.e. the * distance::MahalanobisDistance class). * * This method will copy the matrices to internal copies, which are rearranged * during tree-building. You can avoid this extra copy by pre-constructing * the trees and using the appropriate constructor, or by using the * constructor that takes an rvalue reference to the data with std::move(). * * tau, the rank-approximation parameter, specifies that we are looking for k * neighbors with probability alpha of being in the top tau percent of nearest * neighbors. So, as an example, if our dataset has 1000 points, and we want * 5 nearest neighbors with 95% probability of being in the top 5% of nearest * neighbors (or, the top 50 nearest neighbors), we set k = 5, tau = 5, and * alpha = 0.95. * * The method will fail (and throw a std::invalid_argument exception) if the * value of tau is too low: tau must be set such that the number of points in * the corresponding percentile of the data is greater than k. Thus, if we * choose tau = 0.1 with a dataset of 1000 points and k = 5, then we are * attempting to choose 5 nearest neighbors out of the closest 1 point -- this * is invalid. * * @param referenceSet Set of reference points. * @param naive If true, the rank-approximate search will be performed by * directly sampling the whole set instead of using the stratified * sampling on the tree. * @param singleMode If true, single-tree search will be used (as opposed to * dual-tree search). This is useful when Search() will be called with * few query points. * @param metric An optional instance of the MetricType class. * @param tau The rank-approximation in percentile of the data. The default * value is 5%. * @param alpha The desired success probability. The default value is 0.95. * @param sampleAtLeaves Sample at leaves for faster but less accurate * computation. This defaults to 'false'. * @param firstLeafExact Traverse to the first leaf without approximation. * This can ensure that the query definitely finds its (near) duplicate * if there exists one. This defaults to 'false' for now. * @param singleSampleLimit The limit on the largest node that can be * approximated by sampling. This defaults to 20. */ RASearch(const MatType& referenceSet, const bool naive = false, const bool singleMode = false, const double tau = 5, const double alpha = 0.95, const bool sampleAtLeaves = false, const bool firstLeafExact = false, const size_t singleSampleLimit = 20, const MetricType metric = MetricType()); /** * Initialize the RASearch object, passing both a reference dataset (this is * the dataset that will be searched). Optionally, perform the computation in * naive mode or single-tree mode. An initialized distance metric can be * given, for cases where the metric has internal data (i.e. the * distance::MahalanobisDistance class). * * This method will take ownership of the given reference set, avoiding a * copy. If you need to use the reference set for other purposes, too, * consider using the constructor that takes a const reference. * * tau, the rank-approximation parameter, specifies that we are looking for k * neighbors with probability alpha of being in the top tau percent of nearest * neighbors. So, as an example, if our dataset has 1000 points, and we want * 5 nearest neighbors with 95% probability of being in the top 5% of nearest * neighbors (or, the top 50 nearest neighbors), we set k = 5, tau = 5, and * alpha = 0.95. * * The method will fail (and throw a std::invalid_argument exception) if the * value of tau is too low: tau must be set such that the number of points in * the corresponding percentile of the data is greater than k. Thus, if we * choose tau = 0.1 with a dataset of 1000 points and k = 5, then we are * attempting to choose 5 nearest neighbors out of the closest 1 point -- this * is invalid. * * @param referenceSet Set of reference points. * @param naive If true, the rank-approximate search will be performed by * directly sampling the whole set instead of using the stratified * sampling on the tree. * @param singleMode If true, single-tree search will be used (as opposed to * dual-tree search). This is useful when Search() will be called with * few query points. * @param metric An optional instance of the MetricType class. * @param tau The rank-approximation in percentile of the data. The default * value is 5%. * @param alpha The desired success probability. The default value is 0.95. * @param sampleAtLeaves Sample at leaves for faster but less accurate * computation. This defaults to 'false'. * @param firstLeafExact Traverse to the first leaf without approximation. * This can ensure that the query definitely finds its (near) duplicate * if there exists one. This defaults to 'false' for now. * @param singleSampleLimit The limit on the largest node that can be * approximated by sampling. This defaults to 20. */ RASearch(MatType&& referenceSet, const bool naive = false, const bool singleMode = false, const double tau = 5, const double alpha = 0.95, const bool sampleAtLeaves = false, const bool firstLeafExact = false, const size_t singleSampleLimit = 20, const MetricType metric = MetricType()); /** * Initialize the RASearch object with the given pre-constructed reference * tree. It is assumed that the points in the tree's dataset correspond to * the reference set. Optionally, choose to use single-tree mode. Naive mode * is not available as an option for this constructor; instead, to run naive * computation, use a different constructor. Additionally, an instantiated * distance metric can be given, for cases where the distance metric holds * data. * * There is no copying of the data matrices in this constructor (because * tree-building is not necessary), so this is the constructor to use when * copies absolutely must be avoided. * * tau, the rank-approximation parameter, specifies that we are looking for k * neighbors with probability alpha of being in the top tau percent of nearest * neighbors. So, as an example, if our dataset has 1000 points, and we want * 5 nearest neighbors with 95% probability of being in the top 5% of nearest * neighbors (or, the top 50 nearest neighbors), we set k = 5, tau = 5, and * alpha = 0.95. * * The method will fail (and throw a std::invalid_argument exception) if the * value of tau is too low: tau must be set such that the number of points in * the corresponding percentile of the data is greater than k. Thus, if we * choose tau = 0.1 with a dataset of 1000 points and k = 5, then we are * attempting to choose 5 nearest neighbors out of the closest 1 point -- this * is invalid. * * @note * Tree-building may (at least with BinarySpaceTree) modify the ordering * of a matrix, so be aware that the results you get from Search() will * correspond to the modified matrix. * @endnote * * @param referenceTree Pre-built tree for reference points. * @param singleMode Whether single-tree computation should be used (as * opposed to dual-tree computation). * @param tau The rank-approximation in percentile of the data. The default * value is 5%. * @param alpha The desired success probability. The default value is 0.95. * @param sampleAtLeaves Sample at leaves for faster but less accurate * computation. This defaults to 'false'. * @param firstLeafExact Traverse to the first leaf without approximation. * This can ensure that the query definitely finds its (near) duplicate * if there exists one. This defaults to 'false' for now. * @param singleSampleLimit The limit on the largest node that can be * approximated by sampling. This defaults to 20. * @param metric Instantiated distance metric. */ RASearch(Tree* referenceTree, const bool singleMode = false, const double tau = 5, const double alpha = 0.95, const bool sampleAtLeaves = false, const bool firstLeafExact = false, const size_t singleSampleLimit = 20, const MetricType metric = MetricType()); /** * Create an RASearch object with no reference data. If Search() is called * before a reference set is set with Train(), an exception will be thrown. * * @param naive Whether naive (brute-force) search should be used. * @param singleMode Whether single-tree computation should be used (as * opposed to dual-tree computation). * @param tau The rank-approximation in percentile of the data. The default * value is 5%. * @param alpha The desired success probability. The default value is 0.95. * @param sampleAtLeaves Sample at leaves for faster but less accurate * computation. This defaults to 'false'. * @param firstLeafExact Traverse to the first leaf without approximation. * This can ensure that the query definitely finds its (near) duplicate * if there exists one. This defaults to 'false' for now. * @param singleSampleLimit The limit on the largest node that can be * approximated by sampling. This defaults to 20. * @param metric Instantiated distance metric. */ RASearch(const bool naive = false, const bool singleMode = false, const double tau = 5, const double alpha = 0.95, const bool sampleAtLeaves = false, const bool firstLeafExact = false, const size_t singleSampleLimit = 20, const MetricType metric = MetricType()); /** * Delete the RASearch object. The tree is the only member we are * responsible for deleting. The others will take care of themselves. */ ~RASearch(); /** * "Train" the model on the given reference set. If tree-based search is * being used (if Naive() is false), this means rebuilding the reference tree. * This particular method will make a copy of the given reference data. To * avoid that copy, use the Train() method that takes an rvalue reference with * std::move(). * * @param referenceSet New reference set to use. */ void Train(const MatType& referenceSet); /** * "Train" the model on the given reference set, taking ownership of the data * matrix. If tree-based search is being used (if Naive() is false), this * also means rebuilding the reference tree. If you need to keep a copy of * the reference data, use the Train() method that takes a const reference to * the data. * * @param referenceSet New reference set to use. */ void Train(MatType&& referenceSet); /** * Compute the rank approximate nearest neighbors of each query point in the * query set and store the output in the given matrices. The matrices will be * set to the size of n columns by k rows, where n is the number of points in * the query dataset and k is the number of neighbors being searched for. * * If querySet is small or only contains one point, it can be faster to do * single-tree search; single-tree search can be set with the SingleMode() * function or in the constructor. * * @param querySet Set of query points (can be a single point). * @param k Number of neighbors to search for. * @param neighbors Matrix storing lists of neighbors for each query point. * @param distances Matrix storing distances of neighbors for each query * point. */ void Search(const MatType& querySet, const size_t k, arma::Mat& neighbors, arma::mat& distances); /** * Compute the rank approximate nearest neighbors of each point in the * pre-built query tree and store the output in the given matrices. The * matrices will be set to the size of n columns by k rows, where n is the * number of points in the query dataset and k is the number of neighbors * being searched for. * * If singleMode or naive is enabled, then this method will throw a * std::invalid_argument exception; calling this function implies a dual-tree * algorithm. * * @note * If the tree type you are using modifies the data matrix, be aware that the * results returned from this function will be with respect to the modified * data matrix. * @endnote * * @param queryTree Tree built on query points. * @param k Number of neighbors to search for. * @param neighbors Matrix storing lists of neighbors for each query point. * @param distances Matrix storing distances of neighbors for each query * point. */ void Search(Tree* queryTree, const size_t k, arma::Mat& neighbors, arma::mat& distances); /** * Compute the rank approximate nearest neighbors of each point in the * reference set (that is, the query set is taken to be the reference set), * and store the output in the given matrices. The matrices will be set to * the size of n columns by k rows, where n is the number of points in the * query dataset and k is the number of neighbors being searched for. * * @param k Number of neighbors to search for. * @param neighbors Matrix storing lists of neighbors for each point. * @param distances Matrix storing distances of neighbors for each query * point. */ void Search(const size_t k, arma::Mat& neighbors, arma::mat& distances); /** * This function recursively resets the RAQueryStat of the given query tree to * set 'bound' to SortPolicy::WorstDistance and 'numSamplesMade' to 0. This * allows a user to perform multiple searches with the same query tree, * possibly with different levels of approximation without requiring to build * a new pair of trees for every new (approximate) search. * * If Search() is called multiple times with the same query tree without * calling ResetQueryTree(), the results may not satisfy the theoretical * guarantees provided by the rank-approximate neighbor search algorithm. * * @param queryTree Tree whose statistics should be reset. */ void ResetQueryTree(Tree* queryTree) const; //! Access the reference set. const MatType& ReferenceSet() const { return *referenceSet; } //! Get whether or not naive (brute-force) search is used. bool Naive() const { return naive; } //! Modify whether or not naive (brute-force) search is used. bool& Naive() { return naive; } //! Get whether or not single-tree search is used. bool SingleMode() const { return singleMode; } //! Modify whether or not single-tree search is used. bool& SingleMode() { return singleMode; } //! Get the rank-approximation in percentile of the data. double Tau() const { return tau; } //! Modify the rank-approximation in percentile of the data. double& Tau() { return tau; } //! Get the desired success probability. double Alpha() const { return alpha; } //! Modify the desired success probability. double& Alpha() { return alpha; } //! Get whether or not sampling is done at the leaves. bool SampleAtLeaves() const { return sampleAtLeaves; } //! Modify whether or not sampling is done at the leaves. bool& SampleAtLeaves() { return sampleAtLeaves; } //! Get whether or not we traverse to the first leaf without approximation. bool FirstLeafExact() const { return firstLeafExact; } //! Modify whether or not we traverse to the first leaf without approximation. bool& FirstLeafExact() { return firstLeafExact; } //! Get the limit on the size of a node that can be approximated. size_t SingleSampleLimit() const { return singleSampleLimit; } //! Modify the limit on the size of a node that can be approximation. size_t& SingleSampleLimit() { return singleSampleLimit; } //! Serialize the object. template void Serialize(Archive& ar, const unsigned int /* version */); private: //! Permutations of reference points during tree building. std::vector oldFromNewReferences; //! Pointer to the root of the reference tree. Tree* referenceTree; //! Reference dataset. In some situations we may own this dataset. const MatType* referenceSet; //! If true, this object created the trees and is responsible for them. bool treeOwner; //! If true, we are responsible for deleting the dataset. bool setOwner; //! Indicates if naive random sampling on the set is being used. bool naive; //! Indicates if single-tree search is being used (opposed to dual-tree). bool singleMode; //! The rank-approximation in percentile of the data (between 0 and 100). double tau; //! The desired success probability (between 0 and 1). double alpha; //! Whether or not sampling is done at the leaves. Faster, but less accurate. bool sampleAtLeaves; //! If true, we will traverse to the first leaf without approximation. bool firstLeafExact; //! The limit on the number of points in the largest node that can be //! approximated by sampling. size_t singleSampleLimit; //! Instantiation of kernel. MetricType metric; //! RAModel can modify internal members as necessary. friend class RAModel; }; // class RASearch } // namespace neighbor } // namespace mlpack // Include implementation. #include "ra_search_impl.hpp" // Include convenient typedefs. #include "ra_typedef.hpp" #endif mlpack-2.2.5/src/mlpack/methods/rann/ra_search_impl.hpp000066400000000000000000000552021315013601400231050ustar00rootroot00000000000000/** * @file ra_search_impl.hpp * @author Parikshit Ram * * Implementation of RASearch class to perform rank-approximate * all-nearest-neighbors on two specified data sets. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_RANN_RA_SEARCH_IMPL_HPP #define MLPACK_METHODS_RANN_RA_SEARCH_IMPL_HPP #include #include "ra_search_rules.hpp" namespace mlpack { namespace neighbor { namespace aux { //! Call the tree constructor that does mapping. template TreeType* BuildTree( const typename TreeType::Mat& dataset, std::vector& oldFromNew, typename boost::enable_if_c< tree::TreeTraits::RearrangesDataset == true, TreeType* >::type = 0) { return new TreeType(dataset, oldFromNew); } //! Call the tree constructor that does not do mapping. template TreeType* BuildTree( const typename TreeType::Mat& dataset, const std::vector& /* oldFromNew */, const typename boost::enable_if_c< tree::TreeTraits::RearrangesDataset == false, TreeType* >::type = 0) { return new TreeType(dataset); } //! Call the tree constructor that does mapping. template TreeType* BuildTree( typename TreeType::Mat&& dataset, std::vector& oldFromNew, typename boost::enable_if_c< tree::TreeTraits::RearrangesDataset == true, TreeType* >::type = 0) { return new TreeType(std::move(dataset), oldFromNew); } //! Call the tree constructor that does not do mapping. template TreeType* BuildTree( typename TreeType::Mat&& dataset, const std::vector& /* oldFromNew */, const typename boost::enable_if_c< tree::TreeTraits::RearrangesDataset == false, TreeType* >::type = 0) { return new TreeType(std::move(dataset)); } } // namespace aux // Construct the object. template class TreeType> RASearch:: RASearch(const MatType& referenceSetIn, const bool naive, const bool singleMode, const double tau, const double alpha, const bool sampleAtLeaves, const bool firstLeafExact, const size_t singleSampleLimit, const MetricType metric) : referenceTree(naive ? NULL : aux::BuildTree( const_cast(referenceSetIn), oldFromNewReferences)), referenceSet(naive ? &referenceSetIn : &referenceTree->Dataset()), treeOwner(!naive), setOwner(false), naive(naive), singleMode(!naive && singleMode), // No single mode if naive. tau(tau), alpha(alpha), sampleAtLeaves(sampleAtLeaves), firstLeafExact(firstLeafExact), singleSampleLimit(singleSampleLimit), metric(metric) { // Nothing to do. } // Construct the object, taking ownership of the data matrix. template class TreeType> RASearch:: RASearch(MatType&& referenceSetIn, const bool naive, const bool singleMode, const double tau, const double alpha, const bool sampleAtLeaves, const bool firstLeafExact, const size_t singleSampleLimit, const MetricType metric) : referenceTree(naive ? NULL : aux::BuildTree( std::move(referenceSetIn), oldFromNewReferences)), referenceSet(naive ? new MatType(std::move(referenceSetIn)) : &referenceTree->Dataset()), treeOwner(!naive), setOwner(naive), naive(naive), singleMode(!naive && singleMode), // No single mode if naive. tau(tau), alpha(alpha), sampleAtLeaves(sampleAtLeaves), firstLeafExact(firstLeafExact), singleSampleLimit(singleSampleLimit), metric(metric) { // Nothing to do. } // Construct the object. template class TreeType> RASearch:: RASearch(Tree* referenceTree, const bool singleMode, const double tau, const double alpha, const bool sampleAtLeaves, const bool firstLeafExact, const size_t singleSampleLimit, const MetricType metric) : referenceTree(referenceTree), referenceSet(&referenceTree->Dataset()), treeOwner(false), setOwner(false), naive(false), singleMode(singleMode), tau(tau), alpha(alpha), sampleAtLeaves(sampleAtLeaves), firstLeafExact(firstLeafExact), singleSampleLimit(singleSampleLimit), metric(metric) // Nothing else to initialize. { } // Empty constructor. template class TreeType> RASearch:: RASearch(const bool naive, const bool singleMode, const double tau, const double alpha, const bool sampleAtLeaves, const bool firstLeafExact, const size_t singleSampleLimit, const MetricType metric) : referenceTree(NULL), referenceSet(new MatType()), treeOwner(false), setOwner(true), naive(naive), singleMode(singleMode), tau(tau), alpha(alpha), sampleAtLeaves(sampleAtLeaves), firstLeafExact(firstLeafExact), singleSampleLimit(singleSampleLimit), metric(metric) { // Build the tree on the empty dataset, if necessary. if (!naive) { referenceTree = aux::BuildTree(*referenceSet, oldFromNewReferences); treeOwner = true; } } /** * The tree and the dataset are the only members we may be responsible for * deleting. The others will take care of themselves. */ template class TreeType> RASearch:: ~RASearch() { if (treeOwner && referenceTree) delete referenceTree; if (setOwner) delete referenceSet; } // Train on a new reference set. template class TreeType> void RASearch::Train( const MatType& referenceSet) { // Clean up the old tree, if we built one. if (treeOwner && referenceTree) delete referenceTree; // We may need to rebuild the tree. if (!naive) { referenceTree = aux::BuildTree(referenceSet, oldFromNewReferences); treeOwner = true; } else { treeOwner = false; } // Delete the old reference set, if we owned it. if (setOwner && this->referenceSet) delete this->referenceSet; if (!naive) this->referenceSet = &referenceTree->Dataset(); else this->referenceSet = &referenceSet; setOwner = false; // We don't own the set in either case. } // Train on a new reference set. template class TreeType> void RASearch::Train( MatType&& referenceSet) { // Clean up the old tree, if we built one. if (treeOwner && referenceTree) delete referenceTree; // We may need to rebuild the tree. if (!naive) { referenceTree = aux::BuildTree(std::move(referenceSet), oldFromNewReferences); treeOwner = true; } else { treeOwner = false; } // Delete the old reference set, if we owned it. if (setOwner && this->referenceSet) delete this->referenceSet; if (!naive) { this->referenceSet = &referenceTree->Dataset(); setOwner = false; } else { this->referenceSet = new MatType(std::move(referenceSet)); setOwner = true; } } /** * Computes the best neighbors and stores them in resultingNeighbors and * distances. */ template class TreeType> void RASearch:: Search(const MatType& querySet, const size_t k, arma::Mat& neighbors, arma::mat& distances) { if (k > referenceSet->n_cols) { std::stringstream ss; ss << "requested value of k (" << k << ") is greater than the number of " << "points in the reference set (" << referenceSet->n_cols << ")"; throw std::invalid_argument(ss.str()); } Timer::Start("computing_neighbors"); // This will hold mappings for query points, if necessary. std::vector oldFromNewQueries; // If we have built the trees ourselves, then we will have to map all the // indices back to their original indices when this computation is finished. // To avoid an extra copy, we will store the neighbors and distances in a // separate matrix. arma::Mat* neighborPtr = &neighbors; arma::mat* distancePtr = &distances; // Mapping is only required if this tree type rearranges points and we are not // in naive mode. if (tree::TreeTraits::RearrangesDataset) { if (!singleMode && !naive) { distancePtr = new arma::mat; // Query indices need to be mapped. neighborPtr = new arma::Mat; } else if (treeOwner) neighborPtr = new arma::Mat; // All indices need mapping. } // Set the size of the neighbor and distance matrices. neighborPtr->set_size(k, querySet.n_cols); distancePtr->set_size(k, querySet.n_cols); typedef RASearchRules RuleType; if (naive) { RuleType rules(*referenceSet, querySet, k, metric, tau, alpha, naive, sampleAtLeaves, firstLeafExact, singleSampleLimit, false); // Find how many samples from the reference set we need and sample uniformly // from the reference set without replacement. const size_t numSamples = RAUtil::MinimumSamplesReqd(referenceSet->n_cols, k, tau, alpha); arma::uvec distinctSamples; math::ObtainDistinctSamples(0, referenceSet->n_cols, numSamples, distinctSamples); // Run the base case on each combination of query point and sampled // reference point. for (size_t i = 0; i < querySet.n_cols; ++i) for (size_t j = 0; j < distinctSamples.n_elem; ++j) rules.BaseCase(i, (size_t) distinctSamples[j]); rules.GetResults(*neighborPtr, *distancePtr); } else if (singleMode) { RuleType rules(*referenceSet, querySet, k, metric, tau, alpha, naive, sampleAtLeaves, firstLeafExact, singleSampleLimit, false); // If the reference root node is a leaf, then the sampling has already been // done in the RASearchRules constructor. This happens when naive = true. if (!referenceTree->IsLeaf()) { Log::Info << "Performing single-tree traversal..." << std::endl; // Create the traverser. typename Tree::template SingleTreeTraverser traverser(rules); // Now have it traverse for each point. for (size_t i = 0; i < querySet.n_cols; ++i) traverser.Traverse(i, *referenceTree); Log::Info << "Single-tree traversal complete." << std::endl; Log::Info << "Average number of distance calculations per query point: " << (rules.NumDistComputations() / querySet.n_cols) << "." << std::endl; } rules.GetResults(*neighborPtr, *distancePtr); } else // Dual-tree recursion. { Log::Info << "Performing dual-tree traversal..." << std::endl; // Build the query tree. Timer::Stop("computing_neighbors"); Timer::Start("tree_building"); Tree* queryTree = aux::BuildTree(const_cast(querySet), oldFromNewQueries); Timer::Stop("tree_building"); Timer::Start("computing_neighbors"); RuleType rules(*referenceSet, queryTree->Dataset(), k, metric, tau, alpha, naive, sampleAtLeaves, firstLeafExact, singleSampleLimit, false); typename Tree::template DualTreeTraverser traverser(rules); Log::Info << "Query statistic pre-search: " << queryTree->Stat().NumSamplesMade() << std::endl; traverser.Traverse(*queryTree, *referenceTree); Log::Info << "Dual-tree traversal complete." << std::endl; Log::Info << "Average number of distance calculations per query point: " << (rules.NumDistComputations() / querySet.n_cols) << "." << std::endl; rules.GetResults(*neighborPtr, *distancePtr); delete queryTree; } Timer::Stop("computing_neighbors"); // Map points back to original indices, if necessary. if (tree::TreeTraits::RearrangesDataset) { if (!singleMode && !naive && treeOwner) { // We must map both query and reference indices. neighbors.set_size(k, querySet.n_cols); distances.set_size(k, querySet.n_cols); for (size_t i = 0; i < distances.n_cols; i++) { // Map distances (copy a column). distances.col(oldFromNewQueries[i]) = distancePtr->col(i); // Map indices of neighbors. for (size_t j = 0; j < distances.n_rows; j++) { neighbors(j, oldFromNewQueries[i]) = oldFromNewReferences[(*neighborPtr)(j, i)]; } } // Finished with temporary matrices. delete neighborPtr; delete distancePtr; } else if (!singleMode && !naive) { // We must map query indices only. neighbors.set_size(k, querySet.n_cols); distances.set_size(k, querySet.n_cols); for (size_t i = 0; i < distances.n_cols; ++i) { // Map distances (copy a column). const size_t queryMapping = oldFromNewQueries[i]; distances.col(queryMapping) = distancePtr->col(i); neighbors.col(queryMapping) = neighborPtr->col(i); } // Finished with temporary matrices. delete neighborPtr; delete distancePtr; } else if (treeOwner) { // We must map reference indices only. neighbors.set_size(k, querySet.n_cols); // Map indices of neighbors. for (size_t i = 0; i < neighbors.n_cols; i++) for (size_t j = 0; j < neighbors.n_rows; j++) neighbors(j, i) = oldFromNewReferences[(*neighborPtr)(j, i)]; // Finished with temporary matrix. delete neighborPtr; } } } template class TreeType> void RASearch::Search( Tree* queryTree, const size_t k, arma::Mat& neighbors, arma::mat& distances) { Timer::Start("computing_neighbors"); // Get a reference to the query set. const MatType& querySet = queryTree->Dataset(); // Make sure we are in dual-tree mode. if (singleMode || naive) throw std::invalid_argument("cannot call NeighborSearch::Search() with a " "query tree when naive or singleMode are set to true"); // We won't need to map query indices, but will we need to map distances? arma::Mat* neighborPtr = &neighbors; if (treeOwner && tree::TreeTraits::RearrangesDataset) neighborPtr = new arma::Mat; neighborPtr->set_size(k, querySet.n_cols); distances.set_size(k, querySet.n_cols); // Create the helper object for the tree traversal. typedef RASearchRules RuleType; RuleType rules(*referenceSet, queryTree->Dataset(), k, metric, tau, alpha, naive, sampleAtLeaves, firstLeafExact, singleSampleLimit, false); // Create the traverser. typename Tree::template DualTreeTraverser traverser(rules); traverser.Traverse(*queryTree, *referenceTree); rules.GetResults(*neighborPtr, distances); Timer::Stop("computing_neighbors"); // Do we need to map indices? if (treeOwner && tree::TreeTraits::RearrangesDataset) { // We must map reference indices only. neighbors.set_size(k, querySet.n_cols); // Map indices of neighbors. for (size_t i = 0; i < neighbors.n_cols; i++) for (size_t j = 0; j < neighbors.n_rows; j++) neighbors(j, i) = oldFromNewReferences[(*neighborPtr)(j, i)]; // Finished with temporary matrix. delete neighborPtr; } } template class TreeType> void RASearch::Search( const size_t k, arma::Mat& neighbors, arma::mat& distances) { Timer::Start("computing_neighbors"); arma::Mat* neighborPtr = &neighbors; arma::mat* distancePtr = &distances; if (tree::TreeTraits::RearrangesDataset && treeOwner) { // We will always need to rearrange in this case. distancePtr = new arma::mat; neighborPtr = new arma::Mat; } // Initialize results. neighborPtr->set_size(k, referenceSet->n_cols); distancePtr->set_size(k, referenceSet->n_cols); // Create the helper object for the tree traversal. typedef RASearchRules RuleType; RuleType rules(*referenceSet, *referenceSet, k, metric, tau, alpha, naive, sampleAtLeaves, firstLeafExact, singleSampleLimit, true /* same sets */); if (naive) { // Find how many samples from the reference set we need and sample uniformly // from the reference set without replacement. const size_t numSamples = RAUtil::MinimumSamplesReqd(referenceSet->n_cols, k, tau, alpha); arma::uvec distinctSamples; math::ObtainDistinctSamples(0, referenceSet->n_cols, numSamples, distinctSamples); // The naive brute-force solution. for (size_t i = 0; i < referenceSet->n_cols; ++i) for (size_t j = 0; j < referenceSet->n_cols; ++j) rules.BaseCase(i, j); } else if (singleMode) { // Create the traverser. typename Tree::template SingleTreeTraverser traverser(rules); // Now have it traverse for each point. for (size_t i = 0; i < referenceSet->n_cols; ++i) traverser.Traverse(i, *referenceTree); } else { // Create the traverser. typename Tree::template DualTreeTraverser traverser(rules); traverser.Traverse(*referenceTree, *referenceTree); } rules.GetResults(*neighborPtr, *distancePtr); Timer::Stop("computing_neighbors"); // Do we need to map the reference indices? if (treeOwner && tree::TreeTraits::RearrangesDataset) { neighbors.set_size(k, referenceSet->n_cols); distances.set_size(k, referenceSet->n_cols); for (size_t i = 0; i < distances.n_cols; ++i) { // Map distances (copy a column). const size_t refMapping = oldFromNewReferences[i]; distances.col(refMapping) = distancePtr->col(i); // Map each neighbor's index. for (size_t j = 0; j < distances.n_rows; ++j) neighbors(j, refMapping) = oldFromNewReferences[(*neighborPtr)(j, i)]; } // Finished with temporary matrices. delete neighborPtr; delete distancePtr; } } template class TreeType> void RASearch::ResetQueryTree( Tree* queryNode) const { queryNode->Stat().Bound() = SortPolicy::WorstDistance(); queryNode->Stat().NumSamplesMade() = 0; for (size_t i = 0; i < queryNode->NumChildren(); i++) ResetQueryTree(&queryNode->Child(i)); } template class TreeType> template void RASearch::Serialize( Archive& ar, const unsigned int /* version */) { using data::CreateNVP; // Serialize preferences for search. ar & CreateNVP(naive, "naive"); ar & CreateNVP(singleMode, "singleMode"); ar & CreateNVP(tau, "tau"); ar & CreateNVP(alpha, "alpha"); ar & CreateNVP(sampleAtLeaves, "sampleAtLeaves"); ar & CreateNVP(firstLeafExact, "firstLeafExact"); ar & CreateNVP(singleSampleLimit, "singleSampleLimit"); // If we are doing naive search, we serialize the dataset. Otherwise we // serialize the tree. if (naive) { if (Archive::is_loading::value) { if (setOwner && referenceSet) delete referenceSet; setOwner = true; } ar & CreateNVP(referenceSet, "referenceSet"); ar & CreateNVP(metric, "metric"); // If we are loading, set the tree to NULL and clean up memory if necessary. if (Archive::is_loading::value) { if (treeOwner && referenceTree) delete referenceTree; referenceTree = NULL; oldFromNewReferences.clear(); treeOwner = false; } } else { // Delete the current reference tree, if necessary and if we are loading. if (Archive::is_loading::value) { if (treeOwner && referenceTree) delete referenceTree; // After we load the tree, we will own it. treeOwner = true; } ar & CreateNVP(referenceTree, "referenceTree"); ar & CreateNVP(oldFromNewReferences, "oldFromNewReferences"); // If we are loading, set the dataset accordingly and clean up memory if // necessary. if (Archive::is_loading::value) { if (setOwner && referenceSet) delete referenceSet; referenceSet = &referenceTree->Dataset(); metric = referenceTree->Metric(); setOwner = false; } } } } // namespace neighbor } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/methods/rann/ra_search_rules.hpp000066400000000000000000000313051315013601400232740ustar00rootroot00000000000000/** * @file ra_search_rules.hpp * @author Parikshit Ram * * Defines the pruning rules and base case rules necessary to perform a * tree-based rank-approximate search (with an arbitrary tree) for the RASearch * class. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_RANN_RA_SEARCH_RULES_HPP #define MLPACK_METHODS_RANN_RA_SEARCH_RULES_HPP #include namespace mlpack { namespace neighbor { /** * The RASearchRules class is a template helper class used by RASearch class * when performing rank-approximate search via random-sampling. * * @tparam SortPolicy The sort policy for distances. * @tparam MetricType The metric to use for computation. * @tparam TreeType The tree type to use; must adhere to the TreeType API. */ template class RASearchRules { public: /** * Construct the RASearchRules object. This is usually done from within * the RASearch class at search time. * * @param referenceSet Set of reference data. * @param querySet Set of query data. * @param k Number of neighbors to search for. * @param metric Instantiated metric. * @param tau The rank-approximation in percentile of the data. * @param alpha The desired success probability. * @param naive If true, the rank-approximate search will be performed by * directly sampling the whole set instead of using the stratified * sampling on the tree. * @param sampleAtLeaves Sample at leaves for faster but less accurate * computation. * @param firstLeafExact Traverse to the first leaf without approximation. * @param singleSampleLimit The limit on the largest node that can be * approximated by sampling. * @param sameSet If true, the query and reference set are taken to be the * same, and a query point will not return itself in the results. */ RASearchRules(const arma::mat& referenceSet, const arma::mat& querySet, const size_t k, MetricType& metric, const double tau = 5, const double alpha = 0.95, const bool naive = false, const bool sampleAtLeaves = false, const bool firstLeafExact = false, const size_t singleSampleLimit = 20, const bool sameSet = false); /** * Store the list of candidates for each query point in the given matrices. * * @param neighbors Matrix storing lists of neighbors for each query point. * @param distances Matrix storing distances of neighbors for each query * point. */ void GetResults(arma::Mat& neighbors, arma::mat& distances); /** * Get the distance from the query point to the reference point. * This will update the list of candidates with the new point if appropriate. * * @param queryIndex Index of query point. * @param referenceIndex Index of reference point. */ double BaseCase(const size_t queryIndex, const size_t referenceIndex); /** * Get the score for recursion order. A low score indicates priority for * recursion, while DBL_MAX indicates that the node should not be recursed * into at all (it should be pruned). * * For rank-approximation, the scoring function first checks if pruning * by distance is possible. * If yes, then the node is given the score of * 'DBL_MAX' and the expected number of samples from that node are * added to the number of samples made for the query. * * If no, then the function tries to see if the node can be pruned by * approximation. If number of samples required from this node is small * enough, then that number of samples are acquired from this node * and the score is set to be 'DBL_MAX'. * * If the pruning by approximation is not possible either, the algorithm * continues with the usual tree-traversal. * * @param queryIndex Index of query point. * @param referenceNode Candidate node to be recursed into. */ double Score(const size_t queryIndex, TreeType& referenceNode); /** * Get the score for recursion order. A low score indicates priority for * recursion, while DBL_MAX indicates that the node should not be recursed * into at all (it should be pruned). * * For rank-approximation, the scoring function first checks if pruning * by distance is possible. * If yes, then the node is given the score of * 'DBL_MAX' and the expected number of samples from that node are * added to the number of samples made for the query. * * If no, then the function tries to see if the node can be pruned by * approximation. If number of samples required from this node is small * enough, then that number of samples are acquired from this node * and the score is set to be 'DBL_MAX'. * * If the pruning by approximation is not possible either, the algorithm * continues with the usual tree-traversal. * * @param queryIndex Index of query point. * @param referenceNode Candidate node to be recursed into. * @param baseCaseResult Result of BaseCase(queryIndex, referenceNode). */ double Score(const size_t queryIndex, TreeType& referenceNode, const double baseCaseResult); /** * Re-evaluate the score for recursion order. A low score indicates priority * for recursion, while DBL_MAX indicates that the node should not be * recursed into at all (it should be pruned). This is used when the score * has already been calculated, but another recursion may have modified the * bounds for pruning. So the old score is checked against the new pruning * bound. * * For rank-approximation, it also checks if the number of samples left * for a query to satisfy the rank constraint is small enough at this * point of the algorithm, then this node is approximated by sampling * and given a new score of 'DBL_MAX'. * * @param queryIndex Index of query point. * @param referenceNode Candidate node to be recursed into. * @param oldScore Old score produced by Score() (or Rescore()). */ double Rescore(const size_t queryIndex, TreeType& referenceNode, const double oldScore); /** * Get the score for recursion order. A low score indicates priority for * recursionm while DBL_MAX indicates that the node should not be recursed * into at all (it should be pruned). * * For the rank-approximation, we check if the referenceNode can be * approximated by sampling. If it can be, enough samples are made for * every query in the queryNode. No further query-tree traversal is * performed. * * The 'NumSamplesMade' query stat is propagated up the tree. And then * if pruning occurs (by distance or by sampling), the 'NumSamplesMade' * stat is not propagated down the tree. If no pruning occurs, the * stat is propagated down the tree. * * @param queryNode Candidate query node to recurse into. * @param referenceNode Candidate reference node to recurse into. */ double Score(TreeType& queryNode, TreeType& referenceNode); /** * Get the score for recursion order, passing the base case result (in the * situation where it may be needed to calculate the recursion order). A low * score indicates priority for recursion, while DBL_MAX indicates that the * node should not be recursed into at all (it should be pruned). * * For the rank-approximation, we check if the referenceNode can be * approximated by sampling. If it can be, enough samples are made for * every query in the queryNode. No further query-tree traversal is * performed. * * The 'NumSamplesMade' query stat is propagated up the tree. And then * if pruning occurs (by distance or by sampling), the 'NumSamplesMade' * stat is not propagated down the tree. If no pruning occurs, the * stat is propagated down the tree. * * @param queryNode Candidate query node to recurse into. * @param referenceNode Candidate reference node to recurse into. * @param baseCaseResult Result of BaseCase(queryIndex, referenceNode). */ double Score(TreeType& queryNode, TreeType& referenceNode, const double baseCaseResult); /** * Re-evaluate the score for recursion order. A low score indicates priority * for recursion, while DBL_MAX indicates that the node should not be * recursed into at all (it should be pruned). This is used when the score * has already been calculated, but another recursion may have modified the * bounds for pruning. So the old score is checked against the new pruning * bound. * * For the rank-approximation, we check if the referenceNode can be * approximated by sampling. If it can be, enough samples are made for * every query in the queryNode. No further query-tree traversal is * performed. * * The 'NumSamplesMade' query stat is propagated up the tree. And then * if pruning occurs (by distance or by sampling), the 'NumSamplesMade' * stat is not propagated down the tree. If no pruning occurs, the * stat is propagated down the tree. * * @param queryNode Candidate query node to recurse into. * @param referenceNode Candidate reference node to recurse into. * @param oldScore Old score produced by Socre() (or Rescore()). */ double Rescore(TreeType& queryNode, TreeType& referenceNode, const double oldScore); size_t NumDistComputations() { return numDistComputations; } size_t NumEffectiveSamples() { if (numSamplesMade.n_elem == 0) return 0; else return arma::sum(numSamplesMade); } typedef typename tree::TraversalInfo TraversalInfoType; const TraversalInfoType& TraversalInfo() const { return traversalInfo; } TraversalInfoType& TraversalInfo() { return traversalInfo; } private: //! The reference set. const arma::mat& referenceSet; //! The query set. const arma::mat& querySet; //! Candidate represents a possible candidate neighbor (distance, index). typedef std::pair Candidate; //! Compare two candidates based on the distance. struct CandidateCmp { bool operator()(const Candidate& c1, const Candidate& c2) { return !SortPolicy::IsBetter(c2.first, c1.first); }; }; //! Use a priority queue to represent the list of candidate neighbors. typedef std::priority_queue, CandidateCmp> CandidateList; //! Set of candidate neighbors for each point. std::vector candidates; //! Number of neighbors to search for. const size_t k; //! The instantiated metric. MetricType& metric; //! Whether to sample at leaves or just use all of it bool sampleAtLeaves; //! Whether to do exact computation on the first leaf before any sampling bool firstLeafExact; //! The limit on the largest node that can be approximated by sampling size_t singleSampleLimit; //! The minimum number of samples required per query size_t numSamplesReqd; //! The number of samples made for every query arma::Col numSamplesMade; //! The sampling ratio double samplingRatio; // TO REMOVE: just for testing size_t numDistComputations; //! If the query and reference set are identical, this is true. bool sameSet; TraversalInfoType traversalInfo; /** * Helper function to insert a point into the list of candidate points. * * @param queryIndex Index of point whose neighbors we are inserting into. * @param neighbor Index of reference point which is being inserted. * @param distance Distance from query point to reference point. */ void InsertNeighbor(const size_t queryIndex, const size_t neighbor, const double distance); /** * Perform actual scoring for single-tree case. */ double Score(const size_t queryIndex, TreeType& referenceNode, const double distance, const double bestDistance); /** * Perform actual scoring for dual-tree case. */ double Score(TreeType& queryNode, TreeType& referenceNode, const double distance, const double bestDistance); static_assert(tree::TreeTraits::UniqueNumDescendants, "TreeType " "must provide a unique number of descendants points."); }; // class RASearchRules } // namespace neighbor } // namespace mlpack // Include implementation. #include "ra_search_rules_impl.hpp" #endif // MLPACK_METHODS_RANN_RA_SEARCH_RULES_HPP mlpack-2.2.5/src/mlpack/methods/rann/ra_search_rules_impl.hpp000066400000000000000000000752461315013601400243310ustar00rootroot00000000000000/** * @file ra_search_rules_impl.hpp * @author Parikshit Ram * * Implementation of RASearchRules. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_RANN_RA_SEARCH_RULES_IMPL_HPP #define MLPACK_METHODS_RANN_RA_SEARCH_RULES_IMPL_HPP // In case it hasn't been included yet. #include "ra_search_rules.hpp" namespace mlpack { namespace neighbor { template RASearchRules:: RASearchRules(const arma::mat& referenceSet, const arma::mat& querySet, const size_t k, MetricType& metric, const double tau, const double alpha, const bool naive, const bool sampleAtLeaves, const bool firstLeafExact, const size_t singleSampleLimit, const bool sameSet) : referenceSet(referenceSet), querySet(querySet), k(k), metric(metric), sampleAtLeaves(sampleAtLeaves), firstLeafExact(firstLeafExact), singleSampleLimit(singleSampleLimit), sameSet(sameSet) { // Validate tau to make sure that the rank approximation is greater than the // number of neighbors requested. // The rank approximation. const size_t n = referenceSet.n_cols; const size_t t = (size_t) std::ceil(tau * (double) n / 100.0); if (t < k) { Log::Warn << "Rank-approximation percentile " << tau << " corresponds to " << t << " points, which is less than k (" << k << ")."; Log::Fatal << "Cannot return " << k << " approximate nearest neighbors " << "from the nearest " << t << " points. Increase tau!" << std::endl; } else if (t == k) Log::Warn << "Rank-approximation percentile " << tau << " corresponds to " << t << " points; because k = " << k << ", this is exact search!" << std::endl; Timer::Start("computing_number_of_samples_reqd"); numSamplesReqd = RAUtil::MinimumSamplesReqd(n, k, tau, alpha); Timer::Stop("computing_number_of_samples_reqd"); // Initialize some statistics to be collected during the search. numSamplesMade = arma::zeros >(querySet.n_cols); numDistComputations = 0; samplingRatio = (double) numSamplesReqd / (double) n; Log::Info << "Minimum samples required per query: " << numSamplesReqd << ", sampling ratio: " << samplingRatio << std::endl; // Let's build the list of candidate neighbors for each query point. // It will be initialized with k candidates: (WorstDistance, size_t() - 1) // The list of candidates will be updated when visiting new points with the // BaseCase() method. const Candidate def = std::make_pair(SortPolicy::WorstDistance(), size_t() - 1); std::vector vect(k, def); CandidateList pqueue(CandidateCmp(), std::move(vect)); candidates.reserve(querySet.n_cols); for (size_t i = 0; i < querySet.n_cols; i++) candidates.push_back(pqueue); if (naive)// No tree traversal; just do naive sampling here. { // Sample enough points. arma::uvec distinctSamples; for (size_t i = 0; i < querySet.n_cols; ++i) { math::ObtainDistinctSamples(0, n, numSamplesReqd, distinctSamples); for (size_t j = 0; j < distinctSamples.n_elem; j++) BaseCase(i, (size_t) distinctSamples[j]); } } } template void RASearchRules::GetResults( arma::Mat& neighbors, arma::mat& distances) { neighbors.set_size(k, querySet.n_cols); distances.set_size(k, querySet.n_cols); for (size_t i = 0; i < querySet.n_cols; i++) { CandidateList& pqueue = candidates[i]; for (size_t j = 1; j <= k; j++) { neighbors(k - j, i) = pqueue.top().second; distances(k - j, i) = pqueue.top().first; pqueue.pop(); } } }; template inline force_inline double RASearchRules::BaseCase( const size_t queryIndex, const size_t referenceIndex) { // If the datasets are the same, then this search is only using one dataset // and we should not return identical points. if (sameSet && (queryIndex == referenceIndex)) return 0.0; double distance = metric.Evaluate(querySet.unsafe_col(queryIndex), referenceSet.unsafe_col(referenceIndex)); InsertNeighbor(queryIndex, referenceIndex, distance); numSamplesMade[queryIndex]++; // TO REMOVE numDistComputations++; return distance; } template inline double RASearchRules::Score( const size_t queryIndex, TreeType& referenceNode) { const arma::vec queryPoint = querySet.unsafe_col(queryIndex); const double distance = SortPolicy::BestPointToNodeDistance(queryPoint, &referenceNode); const double bestDistance = candidates[queryIndex].top().first; return Score(queryIndex, referenceNode, distance, bestDistance); } template inline double RASearchRules::Score( const size_t queryIndex, TreeType& referenceNode, const double baseCaseResult) { const arma::vec queryPoint = querySet.unsafe_col(queryIndex); const double distance = SortPolicy::BestPointToNodeDistance(queryPoint, &referenceNode, baseCaseResult); const double bestDistance = candidates[queryIndex].top().first; return Score(queryIndex, referenceNode, distance, bestDistance); } template inline double RASearchRules::Score( const size_t queryIndex, TreeType& referenceNode, const double distance, const double bestDistance) { // If this is better than the best distance we've seen so far, maybe there // will be something down this node. Also check if enough samples are already // made for this query. if (SortPolicy::IsBetter(distance, bestDistance) && numSamplesMade[queryIndex] < numSamplesReqd) { // We cannot prune this node; try approximating it by sampling. // If we are required to visit the first leaf (to find possible duplicates), // make sure we do not approximate. if (numSamplesMade[queryIndex] > 0 || !firstLeafExact) { // Check if this node can be approximated by sampling. size_t samplesReqd = (size_t) std::ceil(samplingRatio * (double) referenceNode.NumDescendants()); samplesReqd = std::min(samplesReqd, numSamplesReqd - numSamplesMade[queryIndex]); if (samplesReqd > singleSampleLimit && !referenceNode.IsLeaf()) { // If too many samples required and not at a leaf, then can't prune. return distance; } else { if (!referenceNode.IsLeaf()) { // Then samplesReqd <= singleSampleLimit. // Hence, approximate the node by sampling enough number of points. arma::uvec distinctSamples; math::ObtainDistinctSamples(0, referenceNode.NumDescendants(), samplesReqd, distinctSamples); for (size_t i = 0; i < distinctSamples.n_elem; i++) // The counting of the samples are done in the 'BaseCase' function // so no book-keeping is required here. BaseCase(queryIndex, referenceNode.Descendant(distinctSamples[i])); // Node approximated, so we can prune it. return DBL_MAX; } else // We are at a leaf. { if (sampleAtLeaves) // If allowed to sample at leaves. { // Approximate node by sampling enough number of points. arma::uvec distinctSamples; math::ObtainDistinctSamples(0, referenceNode.NumDescendants(), samplesReqd, distinctSamples); for (size_t i = 0; i < distinctSamples.n_elem; i++) // The counting of the samples are done in the 'BaseCase' function // so no book-keeping is required here. BaseCase(queryIndex, referenceNode.Descendant(distinctSamples[i])); // (Leaf) node approximated, so we can prune it. return DBL_MAX; } else { // Not allowed to sample from leaves, so cannot prune. return distance; } } } } else { // Try first to visit the first leaf to boost your accuracy and find // (near) duplicates if they exist. return distance; } } else { // Either there cannot be anything better in this node, or enough number of // samples are already made. So prune it. // Add 'fake' samples from this node; they are fake because the distances to // these samples need not be computed. // If enough samples are already made, this step does not change the result // of the search. numSamplesMade[queryIndex] += (size_t) std::floor( samplingRatio * (double) referenceNode.NumDescendants()); return DBL_MAX; } } template inline double RASearchRules:: Rescore(const size_t queryIndex, TreeType& referenceNode, const double oldScore) { // If we are already pruning, still prune. if (oldScore == DBL_MAX) return oldScore; // Just check the score again against the distances. const double bestDistance = candidates[queryIndex].top().first; // If this is better than the best distance we've seen so far, // maybe there will be something down this node. // Also check if enough samples are already made for this query. if (SortPolicy::IsBetter(oldScore, bestDistance) && numSamplesMade[queryIndex] < numSamplesReqd) { // We cannot prune this node; thus, we try approximating this node by // sampling. // Here, we assume that since we are re-scoring, the algorithm has already // sampled some candidates, and if specified, also traversed to the first // leaf. So no check regarding that is made any more. // Check if this node can be approximated by sampling. size_t samplesReqd = (size_t) std::ceil(samplingRatio * (double) referenceNode.NumDescendants()); samplesReqd = std::min(samplesReqd, numSamplesReqd - numSamplesMade[queryIndex]); if (samplesReqd > singleSampleLimit && !referenceNode.IsLeaf()) { // If too many samples are required and we are not at a leaf, then we // can't prune. return oldScore; } else { if (!referenceNode.IsLeaf()) { // Then, samplesReqd <= singleSampleLimit. Hence, approximate the node // by sampling enough number of points. arma::uvec distinctSamples; math::ObtainDistinctSamples(0, referenceNode.NumDescendants(), samplesReqd, distinctSamples); for (size_t i = 0; i < distinctSamples.n_elem; i++) // The counting of the samples are done in the 'BaseCase' function so // no book-keeping is required here. BaseCase(queryIndex, referenceNode.Descendant(distinctSamples[i])); // Node approximated, so we can prune it. return DBL_MAX; } else // We are at a leaf. { if (sampleAtLeaves) { // Approximate node by sampling enough points. arma::uvec distinctSamples; math::ObtainDistinctSamples(0, referenceNode.NumDescendants(), samplesReqd, distinctSamples); for (size_t i = 0; i < distinctSamples.n_elem; i++) // The counting of the samples are done in the 'BaseCase' function // so no book-keeping is required here. BaseCase(queryIndex, referenceNode.Descendant(distinctSamples[i])); // (Leaf) node approximated, so we can prune it. return DBL_MAX; } else { // We cannot sample from leaves, so we cannot prune. return oldScore; } } } } else { // Either there cannot be anything better in this node, or enough number of // samples are already made, so prune it. // Add 'fake' samples from this node; they are fake because the distances to // these samples need not be computed. If enough samples are already made, // this step does not change the result of the search. numSamplesMade[queryIndex] += (size_t) std::floor(samplingRatio * (double) referenceNode.NumDescendants()); return DBL_MAX; } } // Rescore(point, node, oldScore) template inline double RASearchRules::Score( TreeType& queryNode, TreeType& referenceNode) { // First try to find the distance bound to check if we can prune by distance. // Calculate the best node-to-node distance. const double distance = SortPolicy::BestNodeToNodeDistance(&queryNode, &referenceNode); double pointBound = DBL_MAX; double childBound = DBL_MAX; const double maxDescendantDistance = queryNode.FurthestDescendantDistance(); for (size_t i = 0; i < queryNode.NumPoints(); i++) { const double bound = candidates[queryNode.Point(i)].top().first + maxDescendantDistance; if (bound < pointBound) pointBound = bound; } for (size_t i = 0; i < queryNode.NumChildren(); i++) { const double bound = queryNode.Child(i).Stat().Bound(); if (bound < childBound) childBound = bound; } // Update the bound. queryNode.Stat().Bound() = std::min(pointBound, childBound); const double bestDistance = queryNode.Stat().Bound(); return Score(queryNode, referenceNode, distance, bestDistance); } template inline double RASearchRules::Score( TreeType& queryNode, TreeType& referenceNode, const double baseCaseResult) { // First try to find the distance bound to check if we can prune // by distance. // Find the best node-to-node distance. const double distance = SortPolicy::BestNodeToNodeDistance(&queryNode, &referenceNode, baseCaseResult); double pointBound = DBL_MAX; double childBound = DBL_MAX; const double maxDescendantDistance = queryNode.FurthestDescendantDistance(); for (size_t i = 0; i < queryNode.NumPoints(); i++) { const double bound = candidates[queryNode.Point(i)].top().first + maxDescendantDistance; if (bound < pointBound) pointBound = bound; } for (size_t i = 0; i < queryNode.NumChildren(); i++) { const double bound = queryNode.Child(i).Stat().Bound(); if (bound < childBound) childBound = bound; } // update the bound queryNode.Stat().Bound() = std::min(pointBound, childBound); const double bestDistance = queryNode.Stat().Bound(); return Score(queryNode, referenceNode, distance, bestDistance); } template inline double RASearchRules::Score( TreeType& queryNode, TreeType& referenceNode, const double distance, const double bestDistance) { // Update the number of samples made for this node -- propagate up from child // nodes if child nodes have made samples that the parent node is not aware // of. Remember, we must propagate down samples made to the child nodes if // 'queryNode' descend is deemed necessary. // Only update from children if a non-leaf node, obviously. if (!queryNode.IsLeaf()) { size_t numSamplesMadeInChildNodes = std::numeric_limits::max(); // Find the minimum number of samples made among all children. for (size_t i = 0; i < queryNode.NumChildren(); i++) { const size_t numSamples = queryNode.Child(i).Stat().NumSamplesMade(); if (numSamples < numSamplesMadeInChildNodes) numSamplesMadeInChildNodes = numSamples; } // The number of samples made for a node is propagated up from the child // nodes if the child nodes have made samples that the parent (which is the // current 'queryNode') is not aware of. queryNode.Stat().NumSamplesMade() = std::max( queryNode.Stat().NumSamplesMade(), numSamplesMadeInChildNodes); } // Now check if the node-pair interaction can be pruned. // If this is better than the best distance we've seen so far, maybe there // will be something down this node. Also check if enough samples are already // made for this 'queryNode'. if (SortPolicy::IsBetter(distance, bestDistance) && queryNode.Stat().NumSamplesMade() < numSamplesReqd) { // We cannot prune this node; try approximating this node by sampling. // If we are required to visit the first leaf (to find possible duplicates), // make sure we do not approximate. if (queryNode.Stat().NumSamplesMade() > 0 || !firstLeafExact) { // Check if this node can be approximated by sampling. size_t samplesReqd = (size_t) std::ceil(samplingRatio * (double) referenceNode.NumDescendants()); samplesReqd = std::min(samplesReqd, numSamplesReqd - queryNode.Stat().NumSamplesMade()); if (samplesReqd > singleSampleLimit && !referenceNode.IsLeaf()) { // If too many samples are required and we are not at a leaf, then we // can't prune. Since query tree descent is necessary now, propagate // the number of samples made down to the children. // Iterate through all children and propagate the number of samples made // to the children. Only update if the parent node has made samples the // children have not seen. for (size_t i = 0; i < queryNode.NumChildren(); i++) queryNode.Child(i).Stat().NumSamplesMade() = std::max( queryNode.Stat().NumSamplesMade(), queryNode.Child(i).Stat().NumSamplesMade()); return distance; } else { if (!referenceNode.IsLeaf()) { // Then samplesReqd <= singleSampleLimit. Hence, approximate node by // sampling enough number of points for every query in the query node. arma::uvec distinctSamples; for (size_t i = 0; i < queryNode.NumDescendants(); ++i) { const size_t queryIndex = queryNode.Descendant(i); math::ObtainDistinctSamples(0, referenceNode.NumDescendants(), samplesReqd, distinctSamples); for (size_t j = 0; j < distinctSamples.n_elem; j++) // The counting of the samples are done in the 'BaseCase' function // so no book-keeping is required here. BaseCase(queryIndex, referenceNode.Descendant(distinctSamples[j])); } // Update the number of samples made for the queryNode and also update // the number of sample made for the child nodes. queryNode.Stat().NumSamplesMade() += samplesReqd; // Since we are not going to descend down the query tree for this // reference node, there is no point updating the number of samples // made for the child nodes of this query node. // Node is approximated, so we can prune it. return DBL_MAX; } else { if (sampleAtLeaves) { // Approximate node by sampling enough number of points for every // query in the query node. arma::uvec distinctSamples; for (size_t i = 0; i < queryNode.NumDescendants(); ++i) { const size_t queryIndex = queryNode.Descendant(i); math::ObtainDistinctSamples(0, referenceNode.NumDescendants(), samplesReqd, distinctSamples); for (size_t j = 0; j < distinctSamples.n_elem; j++) // The counting of the samples are done in the 'BaseCase' // function so no book-keeping is required here. BaseCase(queryIndex, referenceNode.Descendant(distinctSamples[j])); } // Update the number of samples made for the queryNode and also // update the number of sample made for the child nodes. queryNode.Stat().NumSamplesMade() += samplesReqd; // Since we are not going to descend down the query tree for this // reference node, there is no point updating the number of samples // made for the child nodes of this query node. // (Leaf) node is approximated, so we can prune it. return DBL_MAX; } else { // We cannot sample from leaves, so we cannot prune. Propagate the // number of samples made down to the children. // Go through all children and propagate the number of // samples made to the children. for (size_t i = 0; i < queryNode.NumChildren(); i++) queryNode.Child(i).Stat().NumSamplesMade() = std::max( queryNode.Stat().NumSamplesMade(), queryNode.Child(i).Stat().NumSamplesMade()); return distance; } } } } else { // We must first visit the first leaf to boost accuracy. // Go through all children and propagate the number of // samples made to the children. for (size_t i = 0; i < queryNode.NumChildren(); i++) queryNode.Child(i).Stat().NumSamplesMade() = std::max( queryNode.Stat().NumSamplesMade(), queryNode.Child(i).Stat().NumSamplesMade()); return distance; } } else { // Either there cannot be anything better in this node, or enough number of // samples are already made, so prune it. // Add 'fake' samples from this node; fake because the distances to // these samples need not be computed. If enough samples are already made, // this step does not change the result of the search since this queryNode // will never be descended anymore. queryNode.Stat().NumSamplesMade() += (size_t) std::floor(samplingRatio * (double) referenceNode.NumDescendants()); // Since we are not going to descend down the query tree for this reference // node, there is no point updating the number of samples made for the child // nodes of this query node. return DBL_MAX; } } template inline double RASearchRules:: Rescore(TreeType& queryNode, TreeType& referenceNode, const double oldScore) { if (oldScore == DBL_MAX) return oldScore; // First try to find the distance bound to check if we can prune by distance. double pointBound = DBL_MAX; double childBound = DBL_MAX; const double maxDescendantDistance = queryNode.FurthestDescendantDistance(); for (size_t i = 0; i < queryNode.NumPoints(); i++) { const double bound = candidates[queryNode.Point(i)].top().first + maxDescendantDistance; if (bound < pointBound) pointBound = bound; } for (size_t i = 0; i < queryNode.NumChildren(); i++) { const double bound = queryNode.Child(i).Stat().Bound(); if (bound < childBound) childBound = bound; } // Update the bound. queryNode.Stat().Bound() = std::min(pointBound, childBound); const double bestDistance = queryNode.Stat().Bound(); // Now check if the node-pair interaction can be pruned by sampling. // Update the number of samples made for that node. Propagate up from child // nodes if child nodes have made samples that the parent node is not aware // of. Remember, we must propagate down samples made to the child nodes if // the parent samples. // Only update from children if a non-leaf node, obviously. if (!queryNode.IsLeaf()) { size_t numSamplesMadeInChildNodes = std::numeric_limits::max(); // Find the minimum number of samples made among all children for (size_t i = 0; i < queryNode.NumChildren(); i++) { const size_t numSamples = queryNode.Child(i).Stat().NumSamplesMade(); if (numSamples < numSamplesMadeInChildNodes) numSamplesMadeInChildNodes = numSamples; } // The number of samples made for a node is propagated up from the child // nodes if the child nodes have made samples that the parent (which is the // current 'queryNode') is not aware of. queryNode.Stat().NumSamplesMade() = std::max( queryNode.Stat().NumSamplesMade(), numSamplesMadeInChildNodes); } // Now check if the node-pair interaction can be pruned by sampling. // If this is better than the best distance we've seen so far, maybe there // will be something down this node. Also check if enough samples are already // made for this query. if (SortPolicy::IsBetter(oldScore, bestDistance) && queryNode.Stat().NumSamplesMade() < numSamplesReqd) { // We cannot prune this node, so approximate by sampling. // Here we assume that since we are re-scoring, the algorithm has already // sampled some candidates, and if specified, also traversed to the first // leaf. So no checks regarding that are made any more. size_t samplesReqd = (size_t) std::ceil( samplingRatio * (double) referenceNode.NumDescendants()); samplesReqd = std::min(samplesReqd, numSamplesReqd - queryNode.Stat().NumSamplesMade()); if (samplesReqd > singleSampleLimit && !referenceNode.IsLeaf()) { // If too many samples are required and we are not at a leaf, then we // can't prune. // Since query tree descent is necessary now, propagate the number of // samples made down to the children. // Go through all children and propagate the number of samples made to the // children. Only update if the parent node has made samples the children // have not seen. for (size_t i = 0; i < queryNode.NumChildren(); i++) queryNode.Child(i).Stat().NumSamplesMade() = std::max( queryNode.Stat().NumSamplesMade(), queryNode.Child(i).Stat().NumSamplesMade()); return oldScore; } else { if (!referenceNode.IsLeaf()) // If not a leaf, { // then samplesReqd <= singleSampleLimit. Hence, approximate the node // by sampling enough points for every query in the query node. arma::uvec distinctSamples; for (size_t i = 0; i < queryNode.NumDescendants(); ++i) { const size_t queryIndex = queryNode.Descendant(i); math::ObtainDistinctSamples(0, referenceNode.NumDescendants(), samplesReqd, distinctSamples); for (size_t j = 0; j < distinctSamples.n_elem; j++) // The counting of the samples are done in the 'BaseCase' // function so no book-keeping is required here. BaseCase(queryIndex, referenceNode.Descendant(distinctSamples[j])); } // Update the number of samples made for the query node and also update // the number of samples made for the child nodes. queryNode.Stat().NumSamplesMade() += samplesReqd; // Since we are not going to descend down the query tree for this // reference node, there is no point updating the number of samples made // for the child nodes of this query node. // Node approximated, so we can prune it. return DBL_MAX; } else // We are at a leaf. { if (sampleAtLeaves) { // Approximate node by sampling enough points for every query in the // query node. arma::uvec distinctSamples; for (size_t i = 0; i < queryNode.NumDescendants(); ++i) { const size_t queryIndex = queryNode.Descendant(i); math::ObtainDistinctSamples(0, referenceNode.NumDescendants(), samplesReqd, distinctSamples); for (size_t j = 0; j < distinctSamples.n_elem; j++) // The counting of the samples are done in BaseCase() so no // book-keeping is required here. BaseCase(queryIndex, referenceNode.Descendant(distinctSamples[j])); } // Update the number of samples made for the query node and also // update the number of samples made for the child nodes. queryNode.Stat().NumSamplesMade() += samplesReqd; // Since we are not going to descend down the query tree for this // reference node, there is no point updating the number of samples // made for the child nodes of this query node. // (Leaf) node approximated, so we can prune it. return DBL_MAX; } else { // We cannot sample from leaves, so we cannot prune. // Propagate the number of samples made down to the children. for (size_t i = 0; i < queryNode.NumChildren(); i++) queryNode.Child(i).Stat().NumSamplesMade() = std::max( queryNode.Stat().NumSamplesMade(), queryNode.Child(i).Stat().NumSamplesMade()); return oldScore; } } } } else { // Either there cannot be anything better in this node, or enough samples // are already made, so prune it. // Add 'fake' samples from this node; fake because the distances to // these samples need not be computed. If enough samples are already made, // this step does not change the result of the search since this query node // will never be descended anymore. queryNode.Stat().NumSamplesMade() += (size_t) std::floor(samplingRatio * (double) referenceNode.NumDescendants()); // Since we are not going to descend down the query tree for this reference // node, there is no point updating the number of samples made for the child // nodes of this query node. return DBL_MAX; } } // Rescore(node, node, oldScore) /** * Helper function to insert a point into the list of candidate points. * * @param queryIndex Index of point whose neighbors we are inserting into. * @param neighbor Index of reference point which is being inserted. * @param distance Distance from query point to reference point. */ template inline void RASearchRules:: InsertNeighbor( const size_t queryIndex, const size_t neighbor, const double distance) { CandidateList& pqueue = candidates[queryIndex]; Candidate c = std::make_pair(distance, neighbor); if (CandidateCmp()(c, pqueue.top())) { pqueue.pop(); pqueue.push(c); } } } // namespace neighbor } // namespace mlpack #endif // MLPACK_METHODS_RANN_RA_SEARCH_RULES_IMPL_HPP mlpack-2.2.5/src/mlpack/methods/rann/ra_typedef.hpp000066400000000000000000000055401315013601400222570ustar00rootroot00000000000000/** * @file ra_typedef.hpp * @author Parikshit Ram * * Simple typedefs describing template instantiations of the RASearch * class which are commonly used. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_RANN_RA_TYPEDEF_HPP #define MLPACK_RANN_RA_TYPEDEF_HPP // In case someone included this directly. #include "ra_search.hpp" #include #include #include namespace mlpack { namespace neighbor { /** * The KRANN class is the k-rank-approximate-nearest-neighbors method. It * returns L2 distances for each of the k rank-approximate nearest-neighbors. * * The approximation is controlled with two parameters (see allkrann_main.cpp) * which can be specified at search time. So the tree building is done only once * while the search can be performed multiple times with different approximation * levels. */ typedef RASearch<> KRANN; /** * The KRAFN class is the k-rank-approximate-farthest-neighbors method. It * returns L2 distances for each of the k rank-approximate farthest-neighbors. * * The approximation is controlled with two parameters (see allkrann_main.cpp) * which can be specified at search time. So the tree building is done only once * while the search can be performed multiple times with different approximation * levels. */ typedef RASearch KRAFN; /** * @deprecated * The AllkRANN class is the all-k-rank-approximate-nearest-neighbors method. It * returns L2 distances for each of the k rank-approximate nearest-neighbors. * * The approximation is controlled with two parameters (see allkrann_main.cpp) * which can be specified at search time. So the tree building is done only once * while the search can be performed multiple times with different approximation * levels. * * This typedef will be removed in mlpack 3.0.0; use the KRANN typedef instead. */ typedef RASearch<> AllkRANN; /** * @deprecated * The AllkRAFN class is the all-k-rank-approximate-farthest-neighbors method. * It returns L2 distances for each of the k rank-approximate * farthest-neighbors. * * The approximation is controlled with two parameters (see allkrann_main.cpp) * which can be specified at search time. So the tree building is done only once * while the search can be performed multiple times with different approximation * levels. * * This typedef will be removed in mlpack 3.0.0; use the KRANN typedef instead. */ typedef RASearch<> AllkRAFN; } // namespace neighbor } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/methods/rann/ra_util.cpp000066400000000000000000000104011315013601400215570ustar00rootroot00000000000000/** * @file ra_util.cpp * @author Parikshit Ram * @author Ryan Curtin * * Utilities for rank-approximate neighbor search. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include "ra_util.hpp" using namespace mlpack; using namespace mlpack::neighbor; size_t mlpack::neighbor::RAUtil::MinimumSamplesReqd(const size_t n, const size_t k, const double tau, const double alpha) { size_t ub = n; // The upper bound on the binary search. size_t lb = k; // The lower bound on the binary search. size_t m = lb; // The minimum number of random samples. // The rank-approximation. const size_t t = (size_t) std::ceil(tau * (double) n / 100.0); double prob; Log::Assert(alpha <= 1.0); // going through all values of sample sizes // to find the minimum samples required to satisfy the // desired bound bool done = false; // This performs a binary search on the integer values between 'lb = k' // and 'ub = n' to find the minimum number of samples 'm' required to obtain // the desired success probability 'alpha'. do { prob = SuccessProbability(n, k, m, t); if (prob > alpha) { if (prob - alpha < 0.001 || ub < lb + 2) { done = true; break; } else ub = m; } else { if (prob < alpha) { if (m == lb) { m++; continue; } else lb = m; } else { done = true; break; } } m = (ub + lb) / 2; } while (!done); return (std::min(m + 1, n)); } double mlpack::neighbor::RAUtil::SuccessProbability(const size_t n, const size_t k, const size_t m, const size_t t) { if (k == 1) { if (m > n - t) return 1.0; double eps = (double) t / (double) n; return 1.0 - std::pow(1.0 - eps, (double) m); } // Faster implementation for topK = 1. else { if (m < k) return 0.0; if (m > n - t + k - 1) return 1.0; double eps = (double) t / (double) n; double sum = 0.0; // The probability that 'k' of the 'm' samples lie within the top 't' // of the neighbors is given by: // sum_{j = k}^m Choose(m, j) (t/n)^j (1 - t/n)^{m - j} // which is also equal to // 1 - sum_{j = 0}^{k - 1} Choose(m, j) (t/n)^j (1 - t/n)^{m - j} // // So this is a m - k term summation or a k term summation. So if // m > 2k, do the k term summation, otherwise do the m term summation. size_t lb; size_t ub; bool topHalf; if (2 * k < m) { // Compute 1 - sum_{j = 0}^{k - 1} Choose(m, j) eps^j (1 - eps)^{m - j} // eps = t/n. // // Choosing 'lb' as 1 and 'ub' as k so as to sum from 1 to (k - 1), and // add the term (1 - eps)^m term separately. lb = 1; ub = k; topHalf = true; sum = std::pow(1 - eps, (double) m); } else { // Compute sum_{j = k}^m Choose(m, j) eps^j (1 - eps)^{m - j} // eps = t/n. // // Choosing 'lb' as k and 'ub' as m so as to sum from k to (m - 1), and // add the term eps^m term separately. lb = k; ub = m; topHalf = false; sum = std::pow(eps, (double) m); } for (size_t j = lb; j < ub; j++) { // Compute Choose(m, j). double mCj = (double) m; size_t jTrans; // If j < m - j, compute Choose(m, j). // If j > m - j, compute Choose(m, m - j). if (topHalf) jTrans = j; else jTrans = m - j; for(size_t i = 2; i <= jTrans; i++) { mCj *= (double) (m - (i - 1)); mCj /= (double) i; } sum += (mCj * std::pow(eps, (double) j) * std::pow(1.0 - eps, (double) (m - j))); } if (topHalf) sum = 1.0 - sum; return sum; } // For k > 1. } mlpack-2.2.5/src/mlpack/methods/rann/ra_util.hpp000066400000000000000000000045641315013601400216010ustar00rootroot00000000000000/** * @file ra_util.hpp * @author Parikshit Ram * @author Ryan Curtin * * Utilities for rank-approximate neighbor search. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_RANN_RA_UTIL_HPP #define MLPACK_METHODS_RANN_RA_UTIL_HPP #include namespace mlpack { namespace neighbor { class RAUtil { public: /** * Compute the minimum number of samples required to guarantee * the given rank-approximation and success probability. * * @param n Size of the set to be sampled from. * @param k The number of neighbors required within the rank-approximation. * @param tau The rank-approximation in percentile of the data. * @param alpha The success probability desired. */ static size_t MinimumSamplesReqd(const size_t n, const size_t k, const double tau, const double alpha); /** * Compute the success probability of obtaining 'k'-neighbors from a * set of size 'n' within the top 't' neighbors if 'm' samples are made. * * @param n Size of the set being sampled from. * @param k The number of neighbors required within the rank-approximation. * @param m The number of random samples. * @param t The desired rank-approximation. */ static double SuccessProbability(const size_t n, const size_t k, const size_t m, const size_t t); /** * Pick up desired number of samples (with replacement) from a given range * of integers so that only the distinct samples are returned from * the range [0 - specified upper bound) * * @param numSamples Number of random samples. * @param rangeUpperBound The upper bound on the range of integers. * @param distinctSamples The list of the distinct samples. */ static void ObtainDistinctSamples(const size_t numSamples, const size_t rangeUpperBound, arma::uvec& distinctSamples); }; } // namespace neighbor } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/methods/regularized_svd/000077500000000000000000000000001315013601400216535ustar00rootroot00000000000000mlpack-2.2.5/src/mlpack/methods/regularized_svd/CMakeLists.txt000066400000000000000000000010241315013601400244100ustar00rootroot00000000000000# Define the files we need to compile. # Anything not in this list will not be compiled into mlpack. set(SOURCES regularized_svd.hpp regularized_svd_impl.hpp regularized_svd_function.hpp regularized_svd_function.cpp ) # Add directory name to sources. set(DIR_SRCS) foreach(file ${SOURCES}) set(DIR_SRCS ${DIR_SRCS} ${CMAKE_CURRENT_SOURCE_DIR}/${file}) endforeach() # Append sources (with directory name) to list of all mlpack sources (used at # the parent scope). set(MLPACK_SRCS ${MLPACK_SRCS} ${DIR_SRCS} PARENT_SCOPE) mlpack-2.2.5/src/mlpack/methods/regularized_svd/regularized_svd.hpp000066400000000000000000000073441315013601400255650ustar00rootroot00000000000000/** * @file regularized_svd.hpp * @author Siddharth Agrawal * * An implementation of Regularized SVD. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_REGULARIZED_SVD_REGULARIZED_SVD_HPP #define MLPACK_METHODS_REGULARIZED_SVD_REGULARIZED_SVD_HPP #include #include #include #include "regularized_svd_function.hpp" namespace mlpack { namespace svd { /** * Regularized SVD is a matrix factorization technique that seeks to reduce the * error on the training set, that is on the examples for which the ratings have * been provided by the users. It is a fairly straightforward technique where * the user and item matrices are updated with the help of Stochastic Gradient * Descent(SGD) updates. The updates also penalize the learning of large feature * values by means of regularization. More details can be found in the following * links: * * http://sifter.org/~simon/journal/20061211.html * http://www.cs.uic.edu/~liub/KDD-cup-2007/proceedings/Regular-Paterek.pdf * * An example of how to use the interface is shown below: * * @code * arma::mat data; // Rating data in the form of coordinate list. * * const size_t rank = 20; // Rank used for the decomposition. * const size_t iterations = 10; // Number of iterations used for optimization. * * const double alpha = 0.01 // Learning rate for the SGD optimizer. * const double lambda = 0.1 // Regularization parameter for the optimization. * * // Make a RegularizedSVD object. * RegularizedSVD<> rSVD(iterations, alpha, lambda); * * arma::mat u, v; // User and item matrices. * * // Use the Apply() method to get a factorization. * rSVD.Apply(data, rank, u, v); * @endcode */ template< template class OptimizerType = mlpack::optimization::SGD > class RegularizedSVD { public: /** * Constructor for Regularized SVD. Obtains the user and item matrices after * training on the passed data. The constructor initiates an object of class * RegularizedSVDFunction for optimization. It uses the SGD optimizer by * default. The optimizer uses a template specialization of Optimize(). * * @param iterations Number of optimization iterations. * @param alpha Learning rate for the SGD optimizer. * @param lambda Regularization parameter for the optimization. */ RegularizedSVD(const size_t iterations = 10, const double alpha = 0.01, const double lambda = 0.02); /** * Obtains the user and item matrices using the provided data and rank. * * @param data Rating data matrix. * @param rank Rank parameter to be used for optimization. * @param u Item matrix obtained on decomposition. * @param v User matrix obtained on decomposition. */ void Apply(const arma::mat& data, const size_t rank, arma::mat& u, arma::mat& v); private: //! Number of optimization iterations. size_t iterations; //! Learning rate for the SGD optimizer. double alpha; //! Regularization parameter for the optimization. double lambda; }; } // namespace svd } // namespace mlpack namespace mlpack { namespace cf { //! Factorizer traits of Regularized SVD. template<> class FactorizerTraits > { public: //! Data provided to RegularizedSVD need not be cleaned. static const bool UsesCoordinateList = true; }; } // namespace cf } // namespace mlpack // Include implementation. #include "regularized_svd_impl.hpp" #endif mlpack-2.2.5/src/mlpack/methods/regularized_svd/regularized_svd_function.cpp000066400000000000000000000151171315013601400274620ustar00rootroot00000000000000/** * @file regularized_svd_function.cpp * @author Siddharth Agrawal * * An implementation of the RegularizedSVDFunction class. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include "regularized_svd_function.hpp" namespace mlpack { namespace svd { RegularizedSVDFunction::RegularizedSVDFunction(const arma::mat& data, const size_t rank, const double lambda) : data(data), rank(rank), lambda(lambda) { // Number of users and items in the data. numUsers = max(data.row(0)) + 1; numItems = max(data.row(1)) + 1; // Initialize the parameters. initialPoint.randu(rank, numUsers + numItems); } double RegularizedSVDFunction::Evaluate(const arma::mat& parameters) const { // The cost for the optimization is as follows: // f(u, v) = sum((rating(i, j) - u(i).t() * v(j))^2) // The sum is over all the ratings in the rating matrix. // 'i' points to the user and 'j' points to the item being considered. // The regularization term is added to the above cost, where the vectors u(i) // and v(j) are regularized for each rating they contribute to. double cost = 0.0; for(size_t i = 0; i < data.n_cols; i++) { // Indices for accessing the the correct parameter columns. const size_t user = data(0, i); const size_t item = data(1, i) + numUsers; // Calculate the squared error in the prediction. const double rating = data(2, i); double ratingError = rating - arma::dot(parameters.col(user), parameters.col(item)); double ratingErrorSquared = ratingError * ratingError; // Calculate the regularization penalty corresponding to the parameters. double userVecNorm = arma::norm(parameters.col(user), 2); double itemVecNorm = arma::norm(parameters.col(item), 2); double regularizationError = lambda * (userVecNorm * userVecNorm + itemVecNorm * itemVecNorm); cost += (ratingErrorSquared + regularizationError); } return cost; } double RegularizedSVDFunction::Evaluate(const arma::mat& parameters, const size_t i) const { // Indices for accessing the the correct parameter columns. const size_t user = data(0, i); const size_t item = data(1, i) + numUsers; // Calculate the squared error in the prediction. const double rating = data(2, i); double ratingError = rating - arma::dot(parameters.col(user), parameters.col(item)); double ratingErrorSquared = ratingError * ratingError; // Calculate the regularization penalty corresponding to the parameters. double userVecNorm = arma::norm(parameters.col(user), 2); double itemVecNorm = arma::norm(parameters.col(item), 2); double regularizationError = lambda * (userVecNorm * userVecNorm + itemVecNorm * itemVecNorm); return (ratingErrorSquared + regularizationError); } void RegularizedSVDFunction::Gradient(const arma::mat& parameters, arma::mat& gradient) const { // For an example with rating corresponding to user 'i' and item 'j', the // gradients for the parameters is as follows: // grad(u(i)) = lambda * u(i) - error * v(j) // grad(v(j)) = lambda * v(j) - error * u(i) // 'error' is the prediction error for that example, which is: // rating(i, j) - u(i).t() * v(j) // The full gradient is calculated by summing the contributions over all the // training examples. gradient.zeros(rank, numUsers + numItems); for(size_t i = 0; i < data.n_cols; i++) { // Indices for accessing the the correct parameter columns. const size_t user = data(0, i); const size_t item = data(1, i) + numUsers; // Prediction error for the example. const double rating = data(2, i); double ratingError = rating - arma::dot(parameters.col(user), parameters.col(item)); // Gradient is non-zero only for the parameter columns corresponding to the // example. gradient.col(user) += 2 * (lambda * parameters.col(user) - ratingError * parameters.col(item)); gradient.col(item) += 2 * (lambda * parameters.col(item) - ratingError * parameters.col(user)); } } } // namespace svd } // namespace mlpack // Template specialization for the SGD optimizer. namespace mlpack { namespace optimization { template<> double SGD::Optimize(arma::mat& parameters) { // Find the number of functions to use. const size_t numFunctions = function.NumFunctions(); // To keep track of where we are and how things are going. size_t currentFunction = 0; double overallObjective = 0; // Calculate the first objective function. for(size_t i = 0; i < numFunctions; i++) overallObjective += function.Evaluate(parameters, i); const arma::mat data = function.Dataset(); // Now iterate! for(size_t i = 1; i != maxIterations; i++, currentFunction++) { // Is this iteration the start of a sequence? if ((currentFunction % numFunctions) == 0) { // Reset the counter variables. overallObjective = 0; currentFunction = 0; } const size_t numUsers = function.NumUsers(); // Indices for accessing the the correct parameter columns. const size_t user = data(0, currentFunction); const size_t item = data(1, currentFunction) + numUsers; // Prediction error for the example. const double rating = data(2, currentFunction); double ratingError = rating - arma::dot(parameters.col(user), parameters.col(item)); double lambda = function.Lambda(); // Gradient is non-zero only for the parameter columns corresponding to the // example. parameters.col(user) -= stepSize * (lambda * parameters.col(user) - ratingError * parameters.col(item)); parameters.col(item) -= stepSize * (lambda * parameters.col(item) - ratingError * parameters.col(user)); // Now add that to the overall objective function. overallObjective += function.Evaluate(parameters, currentFunction); } return overallObjective; } } // namespace optimization } // namespace mlpack mlpack-2.2.5/src/mlpack/methods/regularized_svd/regularized_svd_function.hpp000066400000000000000000000072331315013601400274670ustar00rootroot00000000000000/** * @file regularized_svd_function.hpp * @author Siddharth Agrawal * * An implementation of the RegularizedSVDFunction class. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_REGULARIZED_SVD_REGULARIZED_FUNCTION_SVD_HPP #define MLPACK_METHODS_REGULARIZED_SVD_REGULARIZED_FUNCTION_SVD_HPP #include #include namespace mlpack { namespace svd { class RegularizedSVDFunction { public: /** * Constructor for RegularizedSVDFunction class. The constructor calculates * the number of users and items in the passed data. It also randomly * initializes the parameter values. * * @param data Dataset for which SVD is calculated. * @param rank Rank used for matrix factorization. * @param lambda Regularization parameter used for optimization. */ RegularizedSVDFunction(const arma::mat& data, const size_t rank, const double lambda); /** * Evaluates the cost function over all examples in the data. * * @param parameters Parameters(user/item matrices) of the decomposition. */ double Evaluate(const arma::mat& parameters) const; /** * Evaluates the cost function for one training example. Useful for the SGD * optimizer abstraction which uses one training example at a time. * * @param parameters Parameters(user/item matrices) of the decomposition. * @param i Index of the training example to be used. */ double Evaluate(const arma::mat& parameters, const size_t i) const; /** * Evaluates the full gradient of the cost function over all the training * examples. * * @param parameters Parameters(user/item matrices) of the decomposition. * @param gradient Calculated gradient for the parameters. */ void Gradient(const arma::mat& parameters, arma::mat& gradient) const; //! Return the initial point for the optimization. const arma::mat& GetInitialPoint() const { return initialPoint; } //! Return the dataset passed into the constructor. const arma::mat& Dataset() const { return data; } //! Return the number of training examples. Useful for SGD optimizer. size_t NumFunctions() const { return data.n_cols; } //! Return the number of users in the data. size_t NumUsers() const { return numUsers; } //! Return the number of items in the data. size_t NumItems() const { return numItems; } //! Return the regularization parameters. double Lambda() const { return lambda; } //! Return the rank used for the factorization. size_t Rank() const { return rank; } private: //! Rating data. const arma::mat& data; //! Initial parameter point. arma::mat initialPoint; //! Rank used for matrix factorization. size_t rank; //! Regularization parameter for the optimization. double lambda; //! Number of users in the given dataset. size_t numUsers; //! Number of items in the given dataset. size_t numItems; }; } // namespace svd } // namespace mlpack namespace mlpack { namespace optimization { /** * Template specialization for SGD optimizer. Used because the gradient * affects only a small number of parameters per example, and thus the normal * abstraction does not work as fast as we might like it to. */ template<> double SGD::Optimize( arma::mat& parameters); } // namespace optimization } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/methods/regularized_svd/regularized_svd_impl.hpp000066400000000000000000000036531315013601400266050ustar00rootroot00000000000000/** * @file regularized_svd_impl.hpp * @author Siddharth Agrawal * * An implementation of Regularized SVD. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_REGULARIZED_SVD_REGULARIZED_SVD_IMPL_HPP #define MLPACK_METHODS_REGULARIZED_SVD_REGULARIZED_SVD_IMPL_HPP namespace mlpack { namespace svd { template class OptimizerType> RegularizedSVD::RegularizedSVD(const size_t iterations, const double alpha, const double lambda) : iterations(iterations), alpha(alpha), lambda(lambda) { // Nothing to do. } template class OptimizerType> void RegularizedSVD::Apply(const arma::mat& data, const size_t rank, arma::mat& u, arma::mat& v) { // Make the optimizer object using a RegularizedSVDFunction object. RegularizedSVDFunction rSVDFunc(data, rank, lambda); mlpack::optimization::SGD optimizer(rSVDFunc, alpha, iterations * data.n_cols); // Get optimized parameters. arma::mat parameters = rSVDFunc.GetInitialPoint(); optimizer.Optimize(parameters); // Constants for extracting user and item matrices. const size_t numUsers = max(data.row(0)) + 1; const size_t numItems = max(data.row(1)) + 1; // Extract user and item matrices from the optimized parameters. u = parameters.submat(0, numUsers, rank - 1, numUsers + numItems - 1).t(); v = parameters.submat(0, 0, rank - 1, numUsers - 1); } } // namespace svd } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/methods/softmax_regression/000077500000000000000000000000001315013601400224035ustar00rootroot00000000000000mlpack-2.2.5/src/mlpack/methods/softmax_regression/CMakeLists.txt000066400000000000000000000011101315013601400251340ustar00rootroot00000000000000# Define the files we need to compile. # Anything not in this list will not be compiled into mlpack. set(SOURCES softmax_regression.hpp softmax_regression_impl.hpp softmax_regression_function.hpp softmax_regression_function.cpp ) # Add directory name to sources. set(DIR_SRCS) foreach(file ${SOURCES}) set(DIR_SRCS ${DIR_SRCS} ${CMAKE_CURRENT_SOURCE_DIR}/${file}) endforeach() # Append sources (with directory name) to list of all mlpack sources (used at # the parent scope). set(MLPACK_SRCS ${MLPACK_SRCS} ${DIR_SRCS} PARENT_SCOPE) add_cli_executable(softmax_regression) mlpack-2.2.5/src/mlpack/methods/softmax_regression/softmax_regression.hpp000066400000000000000000000206651315013601400270460ustar00rootroot00000000000000/** * @file softmax_regression.hpp * @author Siddharth Agrawal * * An implementation of softmax regression. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_SOFTMAX_REGRESSION_SOFTMAX_REGRESSION_HPP #define MLPACK_METHODS_SOFTMAX_REGRESSION_SOFTMAX_REGRESSION_HPP #include #include #include "softmax_regression_function.hpp" namespace mlpack { namespace regression { /** * Softmax Regression is a classifier which can be used for classification when * the data available can take two or more class values. It is a generalization * of Logistic Regression (which is used only for binary classification). The * model has a different set of parameters for each class, but can be easily * converted into a vectorized implementation as has been done in this module. * The model can be used for direct classification of feature data or in * conjunction with unsupervised learning methods. More technical details about * the model can be found on the following webpage: * * http://ufldl.stanford.edu/wiki/index.php/Softmax_Regression * * An example on how to use the interface is shown below: * * @code * arma::mat train_data; // Training data matrix. * arma::vec labels; // Labels associated with the data. * const size_t inputSize = 784; // Size of input feature vector. * const size_t numClasses = 10; // Number of classes. * * // Train the model using default options. * SoftmaxRegression<> regressor1(train_data, labels, inputSize, numClasses); * * const size_t numBasis = 5; // Parameter required for L-BFGS algorithm. * const size_t numIterations = 100; // Maximum number of iterations. * * // Use an instantiated optimizer for the training. * SoftmaxRegressionFunction srf(train_data, labels, inputSize, numClasses); * L_BFGS optimizer(srf, numBasis, numIterations); * SoftmaxRegression regressor2(optimizer); * * arma::mat test_data; // Test data matrix. * arma::vec predictions1, predictions2; // Vectors to store predictions in. * * // Obtain predictions from both the learned models. * regressor1.Classify(test_data, predictions1); * regressor2.Classify(test_data, predictions2); * @endcode */ template< template class OptimizerType = mlpack::optimization::L_BFGS > class SoftmaxRegression { public: /** * Initialize the SoftmaxRegression without performing training. Default * value of lambda is 0.0001. Be sure to use Train() before calling * Classify() or ComputeAccuracy(), otherwise the results may be meaningless. * * @param inputSize Size of the input feature vector. * @param numClasses Number of classes for classification. * @param fitIntercept add intercept term or not. */ SoftmaxRegression(const size_t inputSize, const size_t numClasses, const bool fitIntercept = false); /** * Construct the SoftmaxRegression class with the provided data and labels. * This will train the model. Optionally, the parameter 'lambda' can be * passed, which controls the amount of L2-regularization in the objective * function. By default, the model takes a small value. * * @param data Input training features. Each column associate with one sample * @param labels Labels associated with the feature data. * @param inputSize Size of the input feature vector. * @param numClasses Number of classes for classification. * @param lambda L2-regularization constant. * @param fitIntercept add intercept term or not. */ SoftmaxRegression(const arma::mat& data, const arma::Row& labels, const size_t numClasses, const double lambda = 0.0001, const bool fitIntercept = false); /** * Construct the softmax regression model with the given training data. This * will train the model. This overload takes an already instantiated optimizer * and uses it to train the model. The optimizer should hold an instantiated * SoftmaxRegressionFunction object for the function to operate upon. This * option should be preferred when the optimizer options are to be changed. * * @param optimizer Instantiated optimizer with instantiated error function. */ SoftmaxRegression(OptimizerType& optimizer); /** * Predict the class labels for the provided feature points. The function * calculates the probabilities for every class, given a data point. It then * chooses the class which has the highest probability among all. * * This method is deprecated and will be removed in mlpack 3.0.0. You should * use Classify() instead. * * @param testData Matrix of data points for which predictions are to be made. * @param predictions Vector to store the predictions in. */ mlpack_deprecated void Predict(const arma::mat& testData, arma::Row& predictions) const; /** * Classify the given points, returning the predicted labels for each point. * The function calculates the probabilities for every class, given a data * point. It then chooses the class which has the highest probability among * all. * * @param dataset Set of points to classify. * @param labels Predicted labels for each point. */ void Classify(const arma::mat& dataset, arma::Row& labels) const; /** * Computes accuracy of the learned model given the feature data and the * labels associated with each data point. Predictions are made using the * provided data and are compared with the actual labels. * * @param testData Matrix of data points using which predictions are made. * @param labels Vector of labels associated with the data. */ double ComputeAccuracy(const arma::mat& testData, const arma::Row& labels) const; /** * Train the softmax regression model with the given optimizer. * The optimizer should hold an instantiated * SoftmaxRegressionFunction object for the function to operate upon. This * option should be preferred when the optimizer options are to be changed. * @param optimizer Instantiated optimizer with instantiated error function. * @return Objective value of the final point. */ double Train(OptimizerType& optimizer); /** * Train the softmax regression with the given training data. * @param data Input data with each column as one example. * @param labels Labels associated with the feature data. * @param numClasses Number of classes for classification. * @return Objective value of the final point. */ double Train(const arma::mat &data, const arma::Row& labels, const size_t numClasses); //! Sets the number of classes. size_t& NumClasses() { return numClasses; } //! Gets the number of classes. size_t NumClasses() const { return numClasses; } //! Sets the regularization parameter. double& Lambda() { return lambda; } //! Gets the regularization parameter. double Lambda() const { return lambda; } //! Gets the intercept term flag. We can't change this after training. bool FitIntercept() const { return fitIntercept; } //! Get the model parameters. arma::mat& Parameters() { return parameters; } //! Get the model parameters. const arma::mat& Parameters() const { return parameters; } //! Gets the features size of the training data size_t FeatureSize() const { return fitIntercept ? parameters.n_cols - 1 : parameters.n_cols; } /** * Serialize the SoftmaxRegression model. */ template void Serialize(Archive& ar, const unsigned int /* version */) { using mlpack::data::CreateNVP; ar & CreateNVP(parameters, "parameters"); ar & CreateNVP(numClasses, "numClasses"); ar & CreateNVP(lambda, "lambda"); ar & CreateNVP(fitIntercept, "fitIntercept"); } private: //! Parameters after optimization. arma::mat parameters; //! Number of classes. size_t numClasses; //! L2-regularization constant. double lambda; //! Intercept term flag. bool fitIntercept; }; } // namespace regression } // namespace mlpack // Include implementation. #include "softmax_regression_impl.hpp" #endif mlpack-2.2.5/src/mlpack/methods/softmax_regression/softmax_regression_function.cpp000066400000000000000000000163751315013601400307510ustar00rootroot00000000000000/** * @file softmax_regression_function.cpp * @author Siddharth Agrawal * * Implementation of function to be optimized for softmax regression. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include "softmax_regression_function.hpp" using namespace mlpack; using namespace mlpack::regression; SoftmaxRegressionFunction::SoftmaxRegressionFunction( const arma::mat& data, const arma::Row& labels, const size_t numClasses, const double lambda, const bool fitIntercept) : data(data), numClasses(numClasses), lambda(lambda), fitIntercept(fitIntercept) { // Initialize the parameters to suitable values. initialPoint = InitializeWeights(); // Calculate the label matrix. GetGroundTruthMatrix(labels, groundTruth); } /** * Initializes parameter weights to random values taken from a scaled standard * normal distribution. The weights cannot be initialized to zero, as that will * lead to each class output being the same. */ const arma::mat SoftmaxRegressionFunction::InitializeWeights() { return InitializeWeights(data.n_rows, numClasses, fitIntercept); } const arma::mat SoftmaxRegressionFunction::InitializeWeights( const size_t featureSize, const size_t numClasses, const bool fitIntercept) { arma::mat parameters; InitializeWeights(parameters, featureSize, numClasses, fitIntercept); return parameters; } void SoftmaxRegressionFunction::InitializeWeights( arma::mat &weights, const size_t featureSize, const size_t numClasses, const bool fitIntercept) { // Initialize values to 0.005 * r. 'r' is a matrix of random values taken from // a Gaussian distribution with mean zero and variance one. // If the fitIntercept flag is true, parameters.col(0) is the intercept. if (fitIntercept) weights.randn(numClasses, featureSize + 1); else weights.randn(numClasses, featureSize); weights *= 0.005; } /** * This is equivalent to applying the indicator function to the training * labels. The output is in the form of a matrix, which leads to simpler * calculations in the Evaluate() and Gradient() methods. */ void SoftmaxRegressionFunction::GetGroundTruthMatrix(const arma::Row& labels, arma::sp_mat& groundTruth) { // Calculate the ground truth matrix according to the labels passed. The // ground truth matrix is a matrix of dimensions 'numClasses * numExamples', // where each column contains a single entry of '1', marking the label // corresponding to that example. // Row pointers and column pointers corresponding to the entries. arma::uvec rowPointers(labels.n_elem); arma::uvec colPointers(labels.n_elem + 1); // Row pointers are the labels of the examples, and column pointers are the // number of cumulative entries made uptil that column. for(size_t i = 0; i < labels.n_elem; i++) { rowPointers(i) = labels(i); colPointers(i+1) = i + 1; } // All entries are '1'. arma::vec values; values.ones(labels.n_elem); // Calculate the matrix. groundTruth = arma::sp_mat(rowPointers, colPointers, values, numClasses, labels.n_elem); } /** * Evaluate the probabilities matrix. If fitIntercept flag is true, * it should consider the parameters.cols(0) intercept term. */ void SoftmaxRegressionFunction::GetProbabilitiesMatrix( const arma::mat& parameters, arma::mat& probabilities) const { arma::mat hypothesis; if (fitIntercept) { // In order to add the intercept term, we should compute following matrix: // [1; data] = arma::join_cols(ones(1, data.n_cols), data) // hypothesis = arma::exp(parameters * [1; data]). // // Since the cost of join maybe high due to the copy of original data, // split the hypothesis computation to two components. hypothesis = arma::exp(arma::repmat(parameters.col(0), 1, data.n_cols) + parameters.cols(1, parameters.n_cols - 1) * data); } else { hypothesis = arma::exp(parameters * data); } probabilities = hypothesis / arma::repmat(arma::sum(hypothesis, 0), numClasses, 1); } /** * Evaluates the objective function given the parameters. */ double SoftmaxRegressionFunction::Evaluate(const arma::mat& parameters) const { // The objective function is the negative log likelihood of the model // calculated over all the training examples. Mathematically it is as follows: // log likelihood = sum(1{y_i = j} * log(probability(j))) / m // The sum is over all 'i's and 'j's, where 'i' points to a training example // and 'j' points to a particular class. 1{x} is an indicator function whose // value is 1 only when 'x' is satisfied, otherwise it is 0. // 'm' is the number of training examples. // The cost also takes into account the regularization to control the // parameter weights. // Calculate the class probabilities for each training example. The // probabilities for each of the classes are given by: // p_j = exp(theta_j' * x_i) / sum(exp(theta_k' * x_i)) // The sum is calculated over all the classes. // x_i is the input vector for a particular training example. // theta_j is the parameter vector associated with a particular class. arma::mat probabilities; GetProbabilitiesMatrix(parameters, probabilities); // Calculate the log likelihood and regularization terms. double logLikelihood, weightDecay, cost; logLikelihood = arma::accu(groundTruth % arma::log(probabilities)) / data.n_cols; weightDecay = 0.5 * lambda * arma::accu(parameters % parameters); // The cost is the sum of the negative log likelihood and the regularization // terms. cost = -logLikelihood + weightDecay; return cost; } /** * Calculates and stores the gradient values given a set of parameters. */ void SoftmaxRegressionFunction::Gradient(const arma::mat& parameters, arma::mat& gradient) const { // Calculate the class probabilities for each training example. The // probabilities for each of the classes are given by: // p_j = exp(theta_j' * x_i) / sum(exp(theta_k' * x_i)) // The sum is calculated over all the classes. // x_i is the input vector for a particular training example. // theta_j is the parameter vector associated with a particular class. arma::mat probabilities; GetProbabilitiesMatrix(parameters, probabilities); // Calculate the parameter gradients. gradient.set_size(parameters.n_rows, parameters.n_cols); if (fitIntercept) { // Treating the intercept term parameters.col(0) seperately to avoid // the cost of building matrix [1; data]. arma::mat inner = probabilities - groundTruth; gradient.col(0) = inner * arma::ones(data.n_cols, 1) / data.n_cols + lambda * parameters.col(0); gradient.cols(1, parameters.n_cols - 1) = inner * data.t() / data.n_cols + lambda * parameters.cols(1, parameters.n_cols - 1); } else { gradient = (probabilities - groundTruth) * data.t() / data.n_cols + lambda * parameters; } } mlpack-2.2.5/src/mlpack/methods/softmax_regression/softmax_regression_function.hpp000066400000000000000000000127201315013601400307440ustar00rootroot00000000000000/** * @file softmax_regression_function.hpp * @author Siddharth Agrawal * * The function to be optimized for softmax regression. Any mlpack optimizer * can be used. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_SOFTMAX_REGRESSION_SOFTMAX_REGRESSION_FUNCTION_HPP #define MLPACK_METHODS_SOFTMAX_REGRESSION_SOFTMAX_REGRESSION_FUNCTION_HPP #include namespace mlpack { namespace regression { class SoftmaxRegressionFunction { public: /** * Construct the Softmax Regression objective function with the given * parameters. * * @param data Input training data, each column associate with one sample * @param labels Labels associated with the feature data. * @param inputSize Size of the input feature vector. * @param numClasses Number of classes for classification. * @param lambda L2-regularization constant. * @param fitIntercept Intercept term flag. */ SoftmaxRegressionFunction(const arma::mat& data, const arma::Row& labels, const size_t numClasses, const double lambda = 0.0001, const bool fitIntercept = false); //! Initializes the parameters of the model to suitable values. const arma::mat InitializeWeights(); /** * Initialize Softmax Regression weights (trainable parameters) with the given * parameters. * * @param featureSize The number of features in the training set. * @param numClasses Number of classes for classification. * @param fitIntercept If true, an intercept is fitted. * @return Initialized model weights. */ static const arma::mat InitializeWeights(const size_t featureSize, const size_t numClasses, const bool fitIntercept = false); /** * Initialize Softmax Regression weights (trainable parameters) with the given * parameters. * * @param weights This will be filled with the initialized model weights. * @param featureSize The number of features in the training set. * @param numClasses Number of classes for classification. * @param fitIntercept Intercept term flag. */ static void InitializeWeights(arma::mat &weights, const size_t featureSize, const size_t numClasses, const bool fitIntercept = false); /** * Constructs the ground truth label matrix with the passed labels. * * @param labels Labels associated with the training data. * @param groundTruth Pointer to arma::mat which stores the computed matrix. */ void GetGroundTruthMatrix(const arma::Row& labels, arma::sp_mat& groundTruth); /** * Evaluate the probabilities matrix with the passed parameters. * probabilities(i, j) = * exp(\theta_i * data_j) / sum_k(exp(\theta_k * data_j)). * It represents the probability of data_j belongs to class i. * * @param parameters Current values of the model parameters. * @param probabilities Pointer to arma::mat which stores the probabilities. */ void GetProbabilitiesMatrix(const arma::mat& parameters, arma::mat& probabilities) const; /** * Evaluates the objective function of the softmax regression model using the * given parameters. The cost function has terms for the log likelihood error * and the regularization cost. The objective function takes a low value when * the model generalizes well for the given training data, while having small * parameter values. * * @param parameters Current values of the model parameters. */ double Evaluate(const arma::mat& parameters) const; /** * Evaluates the gradient values of the objective function given the current * set of parameters. The function calculates the probabilities for each class * given the parameters, and computes the gradients based on the difference * from the ground truth. * * @param parameters Current values of the model parameters. * @param gradient Matrix where gradient values will be stored. */ void Gradient(const arma::mat& parameters, arma::mat& gradient) const; //! Return the initial point for the optimization. const arma::mat& GetInitialPoint() const { return initialPoint; } //! Gets the number of classes. size_t NumClasses() const { return numClasses; } //! Gets the features size of the training data size_t FeatureSize() const { return fitIntercept ? initialPoint.n_cols - 1 : initialPoint.n_cols; } //! Sets the regularization parameter. double& Lambda() { return lambda; } //! Gets the regularization parameter. double Lambda() const { return lambda; } //! Gets the intercept flag. bool FitIntercept() const { return fitIntercept; } private: //! Training data matrix. const arma::mat& data; //! Label matrix for the provided data. arma::sp_mat groundTruth; //! Initial parameter point. arma::mat initialPoint; //! Number of classes. size_t numClasses; //! L2-regularization constant. double lambda; //! Intercept term flag. bool fitIntercept; }; } // namespace regression } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/methods/softmax_regression/softmax_regression_impl.hpp000066400000000000000000000135761315013601400300720ustar00rootroot00000000000000/** * @file softmax_regression_impl.hpp * @author Siddharth Agrawal * * Implementation of softmax regression. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_SOFTMAX_REGRESSION_SOFTMAX_REGRESSION_IMPL_HPP #define MLPACK_METHODS_SOFTMAX_REGRESSION_SOFTMAX_REGRESSION_IMPL_HPP // In case it hasn't been included yet. #include "softmax_regression.hpp" namespace mlpack { namespace regression { template class OptimizerType> SoftmaxRegression:: SoftmaxRegression(const size_t inputSize, const size_t numClasses, const bool fitIntercept) : numClasses(numClasses), lambda(0.0001), fitIntercept(fitIntercept) { SoftmaxRegressionFunction::InitializeWeights(parameters, inputSize, numClasses, fitIntercept); } template class OptimizerType> SoftmaxRegression::SoftmaxRegression(const arma::mat& data, const arma::Row& labels, const size_t numClasses, const double lambda, const bool fitIntercept) : numClasses(numClasses), lambda(lambda), fitIntercept(fitIntercept) { SoftmaxRegressionFunction regressor(data, labels, numClasses, lambda, fitIntercept); OptimizerType optimizer(regressor); parameters = regressor.GetInitialPoint(); Train(optimizer); } template class OptimizerType> SoftmaxRegression::SoftmaxRegression( OptimizerType& optimizer) : parameters(optimizer.Function().GetInitialPoint()), numClasses(optimizer.Function().NumClasses()), lambda(optimizer.Function().Lambda()), fitIntercept(optimizer.Function().FitIntercept()) { Train(optimizer); } template class OptimizerType> void SoftmaxRegression::Predict(const arma::mat& testData, arma::Row& predictions) const { Classify(testData, predictions); } template class OptimizerType> void SoftmaxRegression::Classify(const arma::mat& dataset, arma::Row& labels) const { if (dataset.n_rows != FeatureSize()) { std::ostringstream oss; oss << "SoftmaxRegression::Classify(): dataset has " << dataset.n_rows << " dimensions, but model has " << FeatureSize() << "dimensions"; throw std::invalid_argument(oss.str()); } // Calculate the probabilities for each test input. arma::mat hypothesis, probabilities; if (fitIntercept) { // In order to add the intercept term, we should compute following matrix: // [1; data] = arma::join_cols(ones(1, data.n_cols), data) // hypothesis = arma::exp(parameters * [1; data]). // // Since the cost of join maybe high due to the copy of original data, // split the hypothesis computation to two components. hypothesis = arma::exp( arma::repmat(parameters.col(0), 1, dataset.n_cols) + parameters.cols(1, parameters.n_cols - 1) * dataset); } else { hypothesis = arma::exp(parameters * dataset); } probabilities = hypothesis / arma::repmat(arma::sum(hypothesis, 0), numClasses, 1); // Prepare necessary data. labels.zeros(dataset.n_cols); double maxProbability = 0; // For each test input. for (size_t i = 0; i < dataset.n_cols; i++) { // For each class. for (size_t j = 0; j < numClasses; j++) { // If a higher class probability is encountered, change prediction. if (probabilities(j, i) > maxProbability) { maxProbability = probabilities(j, i); labels(i) = j; } } // Set maximum probability to zero for the next input. maxProbability = 0; } } template class OptimizerType> double SoftmaxRegression::ComputeAccuracy( const arma::mat& testData, const arma::Row& labels) const { arma::Row predictions; // Get predictions for the provided data. Classify(testData, predictions); // Increment count for every correctly predicted label. size_t count = 0; for (size_t i = 0; i < predictions.n_elem; i++) if (predictions(i) == labels(i)) count++; // Return percentage accuracy. return (count * 100.0) / predictions.n_elem; } template class OptimizerType> double SoftmaxRegression::Train( OptimizerType& optimizer) { // Train the model. Timer::Start("softmax_regression_optimization"); const double out = optimizer.Optimize(parameters); Timer::Stop("softmax_regression_optimization"); Log::Info << "SoftmaxRegression::SoftmaxRegression(): final objective of " << "trained model is " << out << "." << std::endl; return out; } template class OptimizerType> double SoftmaxRegression::Train(const arma::mat& data, const arma::Row& labels, const size_t numClasses) { SoftmaxRegressionFunction regressor(data, labels, numClasses, lambda, fitIntercept); OptimizerType optimizer(regressor); return Train(optimizer); } } // namespace regression } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/methods/softmax_regression/softmax_regression_main.cpp000066400000000000000000000256401315013601400300430ustar00rootroot00000000000000/** * @file softmax_regression_main.cpp * * Main program for softmax regression. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include #include #include #include #include #include #include #include // Define parameters for the executable. PROGRAM_INFO("Softmax Regression", "This program performs softmax regression, " "a generalization of logistic regression to the multiclass case, and has " "support for L2 regularization. The program is able to train a model, load" " an existing model, and give predictions (and optionally their accuracy) " "for test data." "\n\n" "Training a softmax regression model is done by giving a file of training " "points with --training_file (-t) and their corresponding labels with " "--labels_file (-l). The number of classes can be manually specified with " "the --number_of_classes (-n) option, and the maximum number of iterations " "of the L-BFGS optimizer can be specified with the --max_iterations (-M) " "option. The L2 regularization constant can be specified with --lambda " "(-r), and if an intercept term is not desired in the model, the " "--no_intercept (-N) can be specified." "\n\n" "The trained model can be saved to a file with the --output_model_file (-m) " "option. If training is not desired, but only testing is, a model can be " "loaded with the --input_model_file (-i) option. At the current time, a loaded " "model cannot be trained further, so specifying both -i and -t is not " "allowed." "\n\n" "The program is also able to evaluate a model on test data. A test dataset" " can be specified with the --test_data (-T) option. Class predictions " "will be saved in the file specified with the --predictions_file (-p) " "option. If labels are specified for the test data, with the --test_labels" " (-L) option, then the program will print the accuracy of the predictions " "on the given test set and its corresponding labels."); // Required options. PARAM_STRING_IN("training_file", "A file containing the training set (the " "matrix of predictors, X).", "t", ""); PARAM_STRING_IN("labels_file", "A file containing labels (0 or 1) for the " "points in the training set (y). The labels must order as a row.", "l", ""); // Model loading/saving. PARAM_STRING_IN("input_model_file", "File containing existing model " "(parameters).", "m", ""); PARAM_STRING_OUT("output_model_file", "File to save trained softmax regression " "model to.", "M"); // Testing. PARAM_STRING_IN("test_data", "File containing test dataset.", "T", ""); PARAM_STRING_OUT("predictions_file", "File to save predictions for test dataset" " into.", "p"); PARAM_STRING_IN("test_labels", "File containing test labels.", "L", ""); // Softmax configuration options. PARAM_INT_IN("max_iterations", "Maximum number of iterations before " "termination.", "n", 400); PARAM_INT_IN("number_of_classes", "Number of classes for classification; if " "unspecified (or 0), the number of classes found in the labels will be " "used.", "c", 0); PARAM_DOUBLE_IN("lambda", "L2-regularization constant", "r", 0.0001); PARAM_FLAG("no_intercept", "Do not add the intercept term to the model.", "N"); using namespace std; // Count the number of classes in the given labels (if numClasses == 0). size_t CalculateNumberOfClasses(const size_t numClasses, const arma::Row& trainLabels); // Test the accuracy of the model. template void TestClassifyAcc(const string& testFile, const string& predictionsFile, const string& testLabels, const size_t numClasses, const Model& model); // Build the softmax model given the parameters. template unique_ptr TrainSoftmax(const string& trainingFile, const string& labelsFile, const string& inputModelFile, const size_t maxIterations); int main(int argc, char** argv) { using namespace mlpack; CLI::ParseCommandLine(argc, argv); const string trainingFile = CLI::GetParam("training_file"); const string labelsFile = CLI::GetParam("labels_file"); const string inputModelFile = CLI::GetParam("input_model_file"); const string outputModelFile = CLI::GetParam("output_model_file"); const string testLabelsFile = CLI::GetParam("test_labels"); const int maxIterations = CLI::GetParam("max_iterations"); const string predictionsFile = CLI::GetParam("predictions_file"); // One of inputFile and modelFile must be specified. if (!CLI::HasParam("input_model_file") && !CLI::HasParam("training_file")) Log::Fatal << "One of --input_model_file or --training_file must be specified." << endl; if ((CLI::HasParam("training_file") || CLI::HasParam("labels_file")) && !(CLI::HasParam("training_file") && CLI::HasParam("labels_file"))) Log::Fatal << "--labels_file must be specified with --training_file!" << endl; if (maxIterations < 0) Log::Fatal << "Invalid value for maximum iterations (" << maxIterations << ")! Must be greater than or equal to 0." << endl; // Make sure we have an output file of some sort. if (!CLI::HasParam("output_model_file") && !CLI::HasParam("test_labels") && !CLI::HasParam("predictions_file")) Log::Warn << "None of --output_model_file, --test_labels, or " << "--predictions_file are set; no results from this program will be " << "saved." << endl; using SM = regression::SoftmaxRegression<>; unique_ptr sm = TrainSoftmax(trainingFile, labelsFile, inputModelFile, maxIterations); TestClassifyAcc(CLI::GetParam("test_data"), CLI::GetParam("predictions_file"), CLI::GetParam("test_labels"), sm->NumClasses(), *sm); if (CLI::HasParam("output_model_file")) data::Save(CLI::GetParam("output_model_file"), "softmax_regression_model", *sm, true); } size_t CalculateNumberOfClasses(const size_t numClasses, const arma::Row& trainLabels) { if (numClasses == 0) { const set unique_labels(begin(trainLabels), end(trainLabels)); return unique_labels.size(); } else { return numClasses; } } template void TestClassifyAcc(const string& testFile, const string& predictionsFile, const string& testLabelsFile, size_t numClasses, const Model& model) { using namespace mlpack; // If there is no test set, there is nothing to test on. if (testFile.empty() && predictionsFile.empty() && testLabelsFile.empty()) return; if (!testLabelsFile.empty() && testFile.empty()) { Log::Warn << "--test_labels specified, but --test_file is not specified." << " The parameter will be ignored." << endl; return; } if (!predictionsFile.empty() && testFile.empty()) { Log::Warn << "--predictions_file specified, but --test_file is not " << "specified. The parameter will be ignored." << endl; return; } // Get the test dataset, and get predictions. arma::mat testData; data::Load(testFile, testData, true); arma::Row predictLabels; model.Classify(testData, predictLabels); // Save predictions, if desired. if (!predictionsFile.empty()) data::Save(predictionsFile, predictLabels); // Calculate accuracy, if desired. if (!testLabelsFile.empty()) { arma::Mat tmpTestLabels; arma::Row testLabels; data::Load(testLabelsFile, tmpTestLabels, true); testLabels = tmpTestLabels.row(0); if (testData.n_cols != testLabels.n_elem) { Log::Fatal << "Test data in --test_data has " << testData.n_cols << " points, but labels in --test_labels have " << testLabels.n_elem << " labels!" << endl; } vector bingoLabels(numClasses, 0); vector labelSize(numClasses, 0); for (arma::uword i = 0; i != predictLabels.n_elem; ++i) { if (predictLabels(i) == testLabels(i)) { ++bingoLabels[testLabels(i)]; } ++labelSize[testLabels(i)]; } size_t totalBingo = 0; for (size_t i = 0; i != bingoLabels.size(); ++i) { Log::Info << "Accuracy for points with label " << i << " is " << (bingoLabels[i] / static_cast(labelSize[i])) << " (" << bingoLabels[i] << " of " << labelSize[i] << ")." << endl; totalBingo += bingoLabels[i]; } Log::Info << "Total accuracy for all points is " << (totalBingo) / static_cast(predictLabels.n_elem) << " (" << totalBingo << " of " << predictLabels.n_elem << ")." << endl; } } template unique_ptr TrainSoftmax(const string& trainingFile, const string& labelsFile, const string& inputModelFile, const size_t maxIterations) { using namespace mlpack; using SRF = regression::SoftmaxRegressionFunction; unique_ptr sm; if (!inputModelFile.empty()) { sm.reset(new Model(0, 0, false)); mlpack::data::Load(inputModelFile, "softmax_regression_model", *sm, true); } else { arma::mat trainData; arma::Row trainLabels; arma::Mat tmpTrainLabels; //load functions of mlpack do not works on windows, it will complain //"[FATAL] Unable to detect type of 'softmax_data.txt'; incorrect extension?" data::Load(trainingFile, trainData, true); data::Load(labelsFile, tmpTrainLabels, true); trainLabels = tmpTrainLabels.row(0); if (trainData.n_cols != trainLabels.n_elem) Log::Fatal << "Samples of input_data should same as the size of " << "input_label." << endl; const size_t numClasses = CalculateNumberOfClasses( (size_t) CLI::GetParam("number_of_classes"), trainLabels); const bool intercept = CLI::HasParam("no_intercept") ? false : true; SRF smFunction(trainData, trainLabels, numClasses, intercept, CLI::GetParam("lambda")); const size_t numBasis = 5; optimization::L_BFGS optimizer(smFunction, numBasis, maxIterations); sm.reset(new Model(optimizer)); } return sm; } mlpack-2.2.5/src/mlpack/methods/sparse_autoencoder/000077500000000000000000000000001315013601400223475ustar00rootroot00000000000000mlpack-2.2.5/src/mlpack/methods/sparse_autoencoder/CMakeLists.txt000066400000000000000000000011121315013601400251020ustar00rootroot00000000000000# Define the files we need to compile. # Anything not in this list will not be compiled into mlpack. set(SOURCES sparse_autoencoder.hpp sparse_autoencoder_impl.hpp sparse_autoencoder_function.hpp sparse_autoencoder_function.cpp maximal_inputs.hpp maximal_inputs.cpp ) # Add directory name to sources. set(DIR_SRCS) foreach(file ${SOURCES}) set(DIR_SRCS ${DIR_SRCS} ${CMAKE_CURRENT_SOURCE_DIR}/${file}) endforeach() # Append sources (with directory name) to list of all mlpack sources (used at # the parent scope). set(MLPACK_SRCS ${MLPACK_SRCS} ${DIR_SRCS} PARENT_SCOPE) mlpack-2.2.5/src/mlpack/methods/sparse_autoencoder/maximal_inputs.cpp000066400000000000000000000023111315013601400261020ustar00rootroot00000000000000/** * @file maximal_inputs.cpp * @author Tham Ngap Wei * * Implementation of MaximalInputs(). * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include "maximal_inputs.hpp" namespace mlpack { namespace nn { void MaximalInputs(const arma::mat& parameters, arma::mat& output) { arma::mat paramTemp(parameters.submat(0, 0, (parameters.n_rows - 1) / 2 - 1, parameters.n_cols - 2).t()); double const mean = arma::mean(arma::mean(paramTemp)); paramTemp -= mean; NormalizeColByMax(paramTemp, output); } void NormalizeColByMax(const arma::mat &input, arma::mat &output) { output.set_size(input.n_rows, input.n_cols); for (arma::uword i = 0; i != input.n_cols; ++i) { const double max = arma::max(arma::abs(input.col(i))); if (max != 0.0) { output.col(i) = input.col(i) / max; } else { output.col(i) = input.col(i); } } } } // namespace nn } // namespace mlpack mlpack-2.2.5/src/mlpack/methods/sparse_autoencoder/maximal_inputs.hpp000066400000000000000000000061771315013601400261250ustar00rootroot00000000000000/** * @file maximal_inputs.hpp * @author Tham Ngap Wei * * A function to find the maximal inputs of an autoencoder. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_NN_MAXIMAL_INPUTS_HPP #define MLPACK_METHODS_NN_MAXIMAL_INPUTS_HPP #include namespace mlpack { namespace nn { /** * Given a parameters matrix from an autoencoder, maximize the hidden units of * the parameters, storing the maximal inputs in the given output matrix. * Details can be found on the 'Visualizing a Trained Autoencoder' page of the * Stanford UFLDL tutorial: * * http://deeplearning.stanford.edu/wiki/index.php/Main_Page * * This function is based on the implementation (display_network.m) from the * "Exercise: Sparse Autoencoder" page of the UFLDL tutorial: * * http://deeplearning.stanford.edu/wiki/index.php/Exercise:Sparse_Autoencoder * * Example usage of this function can be seen below. Note that this function * can work with the ColumnsToBlocks class in order to reshape the maximal * inputs for visualization, as in the UFLDL tutorial. The code below * demonstrates this. * * @code * arma::mat data; // Data matrix. * const size_t vSize = 64; // Size of visible layer, depends on the data. * const size_t hSize = 25; // Size of hidden layer, depends on requirements. * * const size_t numBasis = 5; // Parameter required for L-BFGS algorithm. * const size_t numIterations = 100; // Maximum number of iterations. * * // Use an instantiated optimizer for the training. * SparseAutoencoder encoder(data, vSize, hSize); * * arma::mat maximalInput; // Store the features learned by sparse autoencoder * mlpack::nn::MaximalInputs(encoder.Parameters(), maximalInput); * * arma::mat outputs; * const bool scale = true; * * ColumnsToBlocks ctb(5,5); * arma::mat output; * ctb.Transform(maximalInput, output); * // Save the output as PGM, for visualization. * output.save(fileName, arma::pgm_binary); * @endcode * * @pre Layout of parameters * * The layout of the parameters matrix should be same as following * @code * // vSize 1 * // | | | * // hSize| w1 |b1| * // |________|__| * // | | | * // hSize| w2' | | * // |________|__| * // 1| b2' | | * @endcode * * Also, the square root of vSize must be an integer (i.e. vSize must be a * perfect square). * * @param parameters The parameters of the autoencoder. * @param output Matrix to store the maximal inputs in. */ void MaximalInputs(const arma::mat& parameters, arma::mat& output); /** * Normalize each column of the input matrix by its maximum value, if that * maximum value is not zero. * * @param input The input data to normalize. * @param output A matrix to store the input data in after normalization. */ void NormalizeColByMax(const arma::mat& input, arma::mat& output); } // namespace nn } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/methods/sparse_autoencoder/sparse_autoencoder.hpp000066400000000000000000000142561315013601400267550ustar00rootroot00000000000000/** * @file sparse_autoencoder.hpp * @author Siddharth Agrawal * * An implementation of sparse autoencoders. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_SPARSE_AUTOENCODER_SPARSE_AUTOENCODER_HPP #define MLPACK_METHODS_SPARSE_AUTOENCODER_SPARSE_AUTOENCODER_HPP #include #include #include "sparse_autoencoder_function.hpp" namespace mlpack { namespace nn { /** * A sparse autoencoder is a neural network whose aim to learn compressed * representations of the data, typically for dimensionality reduction, with a * constraint on the activity of the neurons in the network. Sparse autoencoders * can be stacked together to learn a hierarchy of features, which provide a * better representation of the data for classification. This is a method used * in the recently developed field of deep learning. More technical details * about the model can be found on the following webpage: * * http://deeplearning.stanford.edu/wiki/index.php/UFLDL_Tutorial * * An example of how to use the interface is shown below: * * @code * arma::mat data; // Data matrix. * const size_t vSize = 64; // Size of visible layer, depends on the data. * const size_t hSize = 25; // Size of hidden layer, depends on requirements. * * // Train the model using default options. * SparseAutoencoder encoder1(data, vSize, hSize); * * const size_t numBasis = 5; // Parameter required for L-BFGS algorithm. * const size_t numIterations = 100; // Maximum number of iterations. * * // Use an instantiated optimizer for the training. * SparseAutoencoderFunction saf(data, vSize, hSize); * L_BFGS optimizer(saf, numBasis, numIterations); * SparseAutoencoder encoder2(optimizer); * * arma::mat features1, features2; // Matrices for storing new representations. * * // Get new representations from the trained models. * encoder1.GetNewFeatures(data, features1); * encoder2.GetNewFeatures(data, features2); * @endcode * * This implementation allows the use of arbitrary mlpack optimizers via the * OptimizerType template parameter. * * @tparam OptimizerType The optimizer to use; by default this is L-BFGS. Any * mlpack optimizer can be used here. */ template< template class OptimizerType = mlpack::optimization::L_BFGS > class SparseAutoencoder { public: /** * Construct the sparse autoencoder model with the given training data. This * will train the model. The parameters 'lambda', 'beta' and 'rho' can be set * optionally. Changing these parameters will have an effect on regularization * and sparsity of the model. * * @param data Input data with each column as one example. * @param visibleSize Size of input vector expected at the visible layer. * @param hiddenSize Size of input vector expected at the hidden layer. * @param lambda L2-regularization parameter. * @param beta KL divergence parameter. * @param rho Sparsity parameter. */ SparseAutoencoder(const arma::mat& data, const size_t visibleSize, const size_t hiddenSize, const double lambda = 0.0001, const double beta = 3, const double rho = 0.01); /** * Construct the sparse autoencoder model with the given training data. This * will train the model. This overload takes an already instantiated optimizer * and uses it to train the model. The optimizer should hold an instantiated * SparseAutoencoderFunction object for the function to operate upon. This * option should be preferred when the optimizer options are to be changed. * * @param optimizer Instantiated optimizer with instantiated error function. */ SparseAutoencoder(OptimizerType& optimizer); /** * Transforms the provided data into the representation learned by the sparse * autoencoder. The function basically performs a feedforward computation * using the learned weights, and returns the hidden layer activations. * * @param data Matrix of the provided data. * @param features The hidden layer representation of the provided data. */ void GetNewFeatures(arma::mat& data, arma::mat& features); /** * Returns the elementwise sigmoid of the passed matrix, where the sigmoid * function of a real number 'x' is [1 / (1 + exp(-x))]. * * @param x Matrix of real values for which we require the sigmoid activation. */ void Sigmoid(const arma::mat& x, arma::mat& output) const { output = (1.0 / (1 + arma::exp(-x))); } //! Sets size of the visible layer. void VisibleSize(const size_t visible) { this->visibleSize = visible; } //! Gets size of the visible layer. size_t VisibleSize() const { return visibleSize; } //! Sets size of the hidden layer. void HiddenSize(const size_t hidden) { this->hiddenSize = hidden; } //! Gets the size of the hidden layer. size_t HiddenSize() const { return hiddenSize; } //! Sets the L2-regularization parameter. void Lambda(const double l) { this->lambda = l; } //! Gets the L2-regularization parameter. double Lambda() const { return lambda; } //! Sets the KL divergence parameter. void Beta(const double b) { this->beta = b; } //! Gets the KL divergence parameter. double Beta() const { return beta; } //! Sets the sparsity parameter. void Rho(const double r) { this->rho = r; } //! Gets the sparsity parameter. double Rho() const { return rho; } private: //! Parameters after optimization. arma::mat parameters; //! Size of the visible layer. size_t visibleSize; //! Size of the hidden layer. size_t hiddenSize; //! L2-regularization parameter. double lambda; //! KL divergence parameter. double beta; //! Sparsity parameter. double rho; }; } // namespace nn } // namespace mlpack // Include implementation. #include "sparse_autoencoder_impl.hpp" #endif mlpack-2.2.5/src/mlpack/methods/sparse_autoencoder/sparse_autoencoder_function.cpp000066400000000000000000000213531315013601400306510ustar00rootroot00000000000000/** * @file sparse_autoencoder_function.cpp * @author Siddharth Agrawal * * Implementation of function to be optimized for sparse autoencoders. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include "sparse_autoencoder_function.hpp" using namespace mlpack; using namespace mlpack::nn; using namespace std; SparseAutoencoderFunction::SparseAutoencoderFunction(const arma::mat& data, const size_t visibleSize, const size_t hiddenSize, const double lambda, const double beta, const double rho) : data(data), visibleSize(visibleSize), hiddenSize(hiddenSize), lambda(lambda), beta(beta), rho(rho) { // Initialize the parameters to suitable values. initialPoint = InitializeWeights(); } /** Initializes the parameter weights if the initial point is not passed to the * constructor. The weights w1, w2 are initialized to randomly in the range * [-r, r] where 'r' is decided using the sizes of the visible and hidden * layers. The biases b1, b2 are initialized to 0. */ const arma::mat SparseAutoencoderFunction::InitializeWeights() { // The module uses a matrix to store the parameters, its structure looks like: // vSize 1 // | | | // hSize| w1 |b1| // |________|__| // | | | // hSize| w2' | | // |________|__| // 1| b2' | | // // There are (hiddenSize + 1) empty cells in the matrix, but it is small // compared to the matrix size. The above structure allows for smooth matrix // operations without making the code too ugly. // Initialize w1 and w2 to random values in the range [0, 1], then set b1 and // b2 to 0. arma::mat parameters; parameters.randu(2 * hiddenSize + 1, visibleSize + 1); parameters.row(2 * hiddenSize).zeros(); parameters.col(visibleSize).zeros(); // Decide the parameter 'r' depending on the size of the visible and hidden // layers. The formula used is r = sqrt(6) / sqrt(vSize + hSize + 1). const double range = sqrt(6) / sqrt(visibleSize + hiddenSize + 1); //Shift range of w1 and w2 values from [0, 1] to [-r, r]. parameters.submat(0, 0, 2 * hiddenSize - 1, visibleSize - 1) = 2 * range * (parameters.submat(0, 0, 2 * hiddenSize - 1, visibleSize - 1) - 0.5); return parameters; } /** Evaluates the objective function given the parameters. */ double SparseAutoencoderFunction::Evaluate(const arma::mat& parameters) const { // The objective function is the average squared reconstruction error of the // network. w1 and b1 are the weights and biases associated with the hidden // layer, whereas w2 and b2 are associated with the output layer. // f(w1,w2,b1,b2) = sum((data - sigmoid(w2*sigmoid(w1data + b1) + b2))^2) / 2m // 'm' is the number of training examples. // The cost also takes into account the regularization and KL divergence terms // to control the parameter weights and sparsity of the model respectively. // Compute the limits for the parameters w1, w2, b1 and b2. const size_t l1 = hiddenSize; const size_t l2 = visibleSize; const size_t l3 = 2 * hiddenSize; // w1, w2, b1 and b2 are not extracted separately, 'parameters' is directly // used in their place to avoid copying data. The following representations // are used: // w1 <- parameters.submat(0, 0, l1-1, l2-1) // w2 <- parameters.submat(l1, 0, l3-1, l2-1).t() // b1 <- parameters.submat(0, l2, l1-1, l2) // b2 <- parameters.submat(l3, 0, l3, l2-1).t() arma::mat hiddenLayer, outputLayer; // Compute activations of the hidden and output layers. Sigmoid(parameters.submat(0, 0, l1 - 1, l2 - 1) * data + arma::repmat(parameters.submat(0, l2, l1 - 1, l2), 1, data.n_cols), hiddenLayer); Sigmoid(parameters.submat(l1, 0, l3 - 1, l2 - 1).t() * hiddenLayer + arma::repmat(parameters.submat(l3, 0, l3, l2 - 1).t(), 1, data.n_cols), outputLayer); arma::mat rhoCap, diff; // Average activations of the hidden layer. rhoCap = arma::sum(hiddenLayer, 1) / data.n_cols; // Difference between the reconstructed data and the original data. diff = outputLayer - data; double wL2SquaredNorm; // Calculate squared L2-norms of w1 and w2. wL2SquaredNorm = arma::accu(parameters.submat(0, 0, l3 - 1, l2 - 1) % parameters.submat(0, 0, l3 - 1, l2 - 1)); double sumOfSquaresError, weightDecay, klDivergence, cost; // Calculate the reconstruction error, the regularization cost and the KL // divergence cost terms. 'sumOfSquaresError' is the average squared l2-norm // of the reconstructed data difference. 'weightDecay' is the squared l2-norm // of the weights w1 and w2. 'klDivergence' is the cost of the hidden layer // activations not being low. It is given by the following formula: // KL = sum_over_hSize(rho*log(rho/rhoCaq) + (1-rho)*log((1-rho)/(1-rhoCap))) sumOfSquaresError = 0.5 * arma::accu(diff % diff) / data.n_cols; weightDecay = 0.5 * lambda * wL2SquaredNorm; klDivergence = beta * arma::accu(rho * arma::log(rho / rhoCap) + (1 - rho) * arma::log((1 - rho) / (1 - rhoCap))); // The cost is the sum of the terms calculated above. cost = sumOfSquaresError + weightDecay + klDivergence; return cost; } /** Calculates and stores the gradient values given a set of parameters. */ void SparseAutoencoderFunction::Gradient(const arma::mat& parameters, arma::mat& gradient) const { // Performs a feedforward pass of the neural network, and computes the // activations of the output layer as in the Evaluate() method. It uses the // Backpropagation algorithm to calculate the delta values at each layer, // except for the input layer. The delta values are then used with input layer // and hidden layer activations to get the parameter gradients. // Compute the limits for the parameters w1, w2, b1 and b2. const size_t l1 = hiddenSize; const size_t l2 = visibleSize; const size_t l3 = 2 * hiddenSize; // w1, w2, b1 and b2 are not extracted separately, 'parameters' is directly // used in their place to avoid copying data. The following representations // are used: // w1 <- parameters.submat(0, 0, l1-1, l2-1) // w2 <- parameters.submat(l1, 0, l3-1, l2-1).t() // b1 <- parameters.submat(0, l2, l1-1, l2) // b2 <- parameters.submat(l3, 0, l3, l2-1).t() arma::mat hiddenLayer, outputLayer; // Compute activations of the hidden and output layers. Sigmoid(parameters.submat(0, 0, l1 - 1, l2 - 1) * data + arma::repmat(parameters.submat(0, l2, l1 - 1, l2), 1, data.n_cols), hiddenLayer); Sigmoid(parameters.submat(l1, 0, l3 - 1, l2 - 1).t() * hiddenLayer + arma::repmat(parameters.submat(l3, 0, l3, l2 - 1).t(), 1, data.n_cols), outputLayer); arma::mat rhoCap, diff; // Average activations of the hidden layer. rhoCap = arma::sum(hiddenLayer, 1) / data.n_cols; // Difference between the reconstructed data and the original data. diff = outputLayer - data; arma::mat klDivGrad, delOut, delHid; // The delta vector for the output layer is given by diff * f'(z), where z is // the preactivation and f is the activation function. The derivative of the // sigmoid function turns out to be f(z) * (1 - f(z)). For every other layer // in the neural network which comes before the output layer, the delta values // are given del_n = w_n' * del_(n+1) * f'(z_n). Since our cost function also // includes the KL divergence term, we adjust for that in the formula below. klDivGrad = beta * (-(rho / rhoCap) + (1 - rho) / (1 - rhoCap)); delOut = diff % outputLayer % (1 - outputLayer); delHid = (parameters.submat(l1, 0, l3 - 1, l2 - 1) * delOut + arma::repmat(klDivGrad, 1, data.n_cols)) % hiddenLayer % (1 - hiddenLayer); gradient.zeros(2 * hiddenSize + 1, visibleSize + 1); // Compute the gradient values using the activations and the delta values. The // formula also accounts for the regularization terms in the objective. // function. gradient.submat(0, 0, l1 - 1, l2 - 1) = delHid * data.t() / data.n_cols + lambda * parameters.submat(0, 0, l1 - 1, l2 - 1); gradient.submat(l1, 0, l3 - 1, l2 - 1) = (delOut * hiddenLayer.t() / data.n_cols + lambda * parameters.submat(l1, 0, l3 - 1, l2 - 1).t()).t(); gradient.submat(0, l2, l1 - 1, l2) = arma::sum(delHid, 1) / data.n_cols; gradient.submat(l3, 0, l3, l2 - 1) = (arma::sum(delOut, 1) / data.n_cols).t(); } mlpack-2.2.5/src/mlpack/methods/sparse_autoencoder/sparse_autoencoder_function.hpp000066400000000000000000000112521315013601400306530ustar00rootroot00000000000000/** * @file sparse_autoencoder_function.hpp * @author Siddharth Agrawal * * The function to be optimized for sparse autoencoders. Any mlpack optimizer * can be used. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_SPARSE_AUTOENCODER_SPARSE_AUTOENCODER_FUNCTION_HPP #define MLPACK_METHODS_SPARSE_AUTOENCODER_SPARSE_AUTOENCODER_FUNCTION_HPP #include namespace mlpack { namespace nn { /** * This is a class for the sparse autoencoder objective function. It can be used * to create learning models like self-taught learning, stacked autoencoders, * conditional random fields (CRFs), and so forth. */ class SparseAutoencoderFunction { public: /** * Construct the sparse autoencoder objective function with the given * parameters. * * @param data The data matrix. * @param visibleSize Size of input vector expected at the visible layer. * @param hiddenSize Size of input vector expected at the hidden layer. * @param lambda L2-regularization parameter. * @param beta KL divergence parameter. * @param rho Sparsity parameter. */ SparseAutoencoderFunction(const arma::mat& data, const size_t visibleSize, const size_t hiddenSize, const double lambda = 0.0001, const double beta = 3, const double rho = 0.01); //! Initializes the parameters of the model to suitable values. const arma::mat InitializeWeights(); /** * Evaluates the objective function of the sparse autoencoder model using the * given parameters. The cost function has terms for the reconstruction * error, regularization cost and the sparsity cost. The objective function * takes a low value when the model is able to reconstruct the data well * using weights which are low in value and when the average activations of * neurons in the hidden layers agrees well with the sparsity parameter 'rho'. * * @param parameters Current values of the model parameters. */ double Evaluate(const arma::mat& parameters) const; /** * Evaluates the gradient values of the objective function given the current * set of parameters. The function performs a feedforward pass and computes * the error in reconstructing the data points. It then uses the * backpropagation algorithm to compute the gradient values. * * @param parameters Current values of the model parameters. * @param gradient Matrix where gradient values will be stored. */ void Gradient(const arma::mat& parameters, arma::mat& gradient) const; /** * Returns the elementwise sigmoid of the passed matrix, where the sigmoid * function of a real number 'x' is [1 / (1 + exp(-x))]. * * @param x Matrix of real values for which we require the sigmoid activation. */ void Sigmoid(const arma::mat& x, arma::mat& output) const { output = (1.0 / (1 + arma::exp(-x))); } //! Return the initial point for the optimization. const arma::mat& GetInitialPoint() const { return initialPoint; } //! Sets size of the visible layer. void VisibleSize(const size_t visible) { this->visibleSize = visible; } //! Gets size of the visible layer. size_t VisibleSize() const { return visibleSize; } //! Sets size of the hidden layer. void HiddenSize(const size_t hidden) { this->hiddenSize = hidden; } //! Gets the size of the hidden layer. size_t HiddenSize() const { return hiddenSize; } //! Sets the L2-regularization parameter. void Lambda(const double l) { this->lambda = l; } //! Gets the L2-regularization parameter. double Lambda() const { return lambda; } //! Sets the KL divergence parameter. void Beta(const double b) { this->beta = b; } //! Gets the KL divergence parameter. double Beta() const { return beta; } //! Sets the sparsity parameter. void Rho(const double r) { this->rho = r; } //! Gets the sparsity parameter. double Rho() const { return rho; } private: //! The matrix of data points. const arma::mat& data; //! Initial parameter vector. arma::mat initialPoint; //! Size of the visible layer. size_t visibleSize; //! Size of the hidden layer. size_t hiddenSize; //! L2-regularization parameter. double lambda; //! KL divergence parameter. double beta; //! Sparsity parameter. double rho; }; } // namespace nn } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/methods/sparse_autoencoder/sparse_autoencoder_impl.hpp000066400000000000000000000057121315013601400277730ustar00rootroot00000000000000/** * @file sparse_autoencoder_impl.hpp * @author Siddharth Agrawal * * Implementation of sparse autoencoders. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_SPARSE_AUTOENCODER_SPARSE_AUTOENCODER_IMPL_HPP #define MLPACK_METHODS_SPARSE_AUTOENCODER_SPARSE_AUTOENCODER_IMPL_HPP // In case it hasn't been included yet. #include "sparse_autoencoder.hpp" namespace mlpack { namespace nn { template class OptimizerType> SparseAutoencoder::SparseAutoencoder(const arma::mat& data, const size_t visibleSize, const size_t hiddenSize, double lambda, double beta, double rho) : visibleSize(visibleSize), hiddenSize(hiddenSize), lambda(lambda), beta(beta), rho(rho) { SparseAutoencoderFunction encoderFunction(data, visibleSize, hiddenSize, lambda, beta, rho); OptimizerType optimizer(encoderFunction); parameters = encoderFunction.GetInitialPoint(); // Train the model. Timer::Start("sparse_autoencoder_optimization"); const double out = optimizer.Optimize(parameters); Timer::Stop("sparse_autoencoder_optimization"); Log::Info << "SparseAutoencoder::SparseAutoencoder(): final objective of " << "trained model is " << out << "." << std::endl; } template class OptimizerType> SparseAutoencoder::SparseAutoencoder( OptimizerType &optimizer) : parameters(optimizer.Function().GetInitialPoint()), visibleSize(optimizer.Function().VisibleSize()), hiddenSize(optimizer.Function().HiddenSize()), lambda(optimizer.Function().Lambda()), beta(optimizer.Function().Beta()), rho(optimizer.Function().Rho()) { Timer::Start("sparse_autoencoder_optimization"); const double out = optimizer.Optimize(parameters); Timer::Stop("sparse_autoencoder_optimization"); Log::Info << "SparseAutoencoder::SparseAutoencoder(): final objective of " << "trained model is " << out << "." << std::endl; } template class OptimizerType> void SparseAutoencoder::GetNewFeatures(arma::mat& data, arma::mat& features) { const size_t l1 = hiddenSize; const size_t l2 = visibleSize; Sigmoid(parameters.submat(0, 0, l1 - 1, l2 - 1) * data + arma::repmat(parameters.submat(0, l2, l1 - 1, l2), 1, data.n_cols), features); } } // namespace nn } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/methods/sparse_coding/000077500000000000000000000000001315013601400213025ustar00rootroot00000000000000mlpack-2.2.5/src/mlpack/methods/sparse_coding/CMakeLists.txt000066400000000000000000000011531315013601400240420ustar00rootroot00000000000000# Define the files we need to compile # Anything not in this list will not be compiled into the output library set(SOURCES data_dependent_random_initializer.hpp nothing_initializer.hpp random_initializer.hpp sparse_coding.hpp sparse_coding.cpp sparse_coding_impl.hpp ) # add directory name to sources set(DIR_SRCS) foreach(file ${SOURCES}) set(DIR_SRCS ${DIR_SRCS} ${CMAKE_CURRENT_SOURCE_DIR}/${file}) endforeach() # append sources (with directory name) to list of all mlpack sources (used at the parent scope) set(MLPACK_SRCS ${MLPACK_SRCS} ${DIR_SRCS} PARENT_SCOPE) add_cli_executable(sparse_coding) mlpack-2.2.5/src/mlpack/methods/sparse_coding/data_dependent_random_initializer.hpp000066400000000000000000000037351315013601400307250ustar00rootroot00000000000000/** * @file data_dependent_random_initializer.hpp * @author Nishant Mehta * * A sensible heuristic for initializing dictionaries for sparse coding. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_SPARSE_CODING_DATA_DEPENDENT_RANDOM_INITIALIZER_HPP #define MLPACK_METHODS_SPARSE_CODING_DATA_DEPENDENT_RANDOM_INITIALIZER_HPP #include #include namespace mlpack { namespace sparse_coding { /** * A data-dependent random dictionary initializer for SparseCoding. This * creates random dictionary atoms by adding three random observations from the * data together, and then normalizing the atom. */ class DataDependentRandomInitializer { public: /** * Initialize the dictionary by adding together three random observations from * the data, and then normalizing the atom. This implementation is simple * enough to be included with the definition. * * @param data Dataset to initialize the dictionary with. * @param atoms Number of atoms in dictionary. * @param dictionary Dictionary to initialize. */ static void Initialize(const arma::mat& data, const size_t atoms, arma::mat& dictionary) { // Set the size of the dictionary. dictionary.set_size(data.n_rows, atoms); // Create each atom. for (size_t i = 0; i < atoms; ++i) { // Add three atoms together. dictionary.col(i) = (data.col(math::RandInt(data.n_cols)) + data.col(math::RandInt(data.n_cols)) + data.col(math::RandInt(data.n_cols))); // Now normalize the atom. dictionary.col(i) /= norm(dictionary.col(i), 2); } } }; } // namespace sparse_coding } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/methods/sparse_coding/nothing_initializer.hpp000066400000000000000000000026101315013601400260630ustar00rootroot00000000000000/** * @file nothing_initializer.hpp * @author Ryan Curtin * * An initializer for SparseCoding which does precisely nothing. It is useful * for when you have an already defined dictionary and you plan on setting it * with SparseCoding::Dictionary(). * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_SPARSE_CODING_NOTHING_INITIALIZER_HPP #define MLPACK_METHODS_SPARSE_CODING_NOTHING_INITIALIZER_HPP #include namespace mlpack { namespace sparse_coding { /** * A DictionaryInitializer for SparseCoding which does not initialize anything; * it is useful for when the dictionary is already known and will be set with * SparseCoding::Dictionary(). */ class NothingInitializer { public: /** * This function does not initialize the dictionary. This will cause problems * for SparseCoding if the dictionary is not set manually before running the * method. */ static void Initialize(const arma::mat& /* data */, const size_t /* atoms */, arma::mat& /* dictionary */) { // Do nothing! } }; } // namespace sparse_coding } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/methods/sparse_coding/random_initializer.hpp000066400000000000000000000031401315013601400256740ustar00rootroot00000000000000/** * @file random_initializer.hpp * @author Nishant Mehta * * A very simple random dictionary initializer for SparseCoding; it is probably * not a very good choice. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_SPARSE_CODING_RANDOM_INITIALIZER_HPP #define MLPACK_METHODS_SPARSE_CODING_RANDOM_INITIALIZER_HPP #include namespace mlpack { namespace sparse_coding { /** * A DictionaryInitializer for use with the SparseCoding class. This provides a * random, normally distributed dictionary, such that each atom has a norm of 1. */ class RandomInitializer { public: /** * Initialize the dictionary randomly from a normal distribution, such that * each atom has a norm of 1. This is simple enough to be included with the * definition. * * @param data Dataset to use for initialization. * @param atoms Number of atoms (columns) in the dictionary. * @param dictionary Dictionary to initialize. */ static void Initialize(const arma::mat& data, const size_t atoms, arma::mat& dictionary) { // Create random dictionary. dictionary.randn(data.n_rows, atoms); // Normalize each atom. for (size_t j = 0; j < atoms; ++j) dictionary.col(j) /= norm(dictionary.col(j), 2); } }; } // namespace sparse_coding } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/methods/sparse_coding/sparse_coding.cpp000066400000000000000000000204531315013601400246320ustar00rootroot00000000000000/** * @file sparse_coding.cpp * @author Nishant Mehta * * Implementation of Sparse Coding with Dictionary Learning using l1 (LASSO) or * l1+l2 (Elastic Net) regularization. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include "sparse_coding.hpp" #include #include namespace mlpack { namespace sparse_coding { SparseCoding::SparseCoding( const size_t atoms, const double lambda1, const double lambda2, const size_t maxIterations, const double objTolerance, const double newtonTolerance) : atoms(atoms), lambda1(lambda1), lambda2(lambda2), maxIterations(maxIterations), objTolerance(objTolerance), newtonTolerance(newtonTolerance) { // Nothing to do. } void SparseCoding::Encode(const arma::mat& data, arma::mat& codes) { // When using the Cholesky version of LARS, this is correct even if // lambda2 > 0. arma::mat matGram = trans(dictionary) * dictionary; codes.set_size(atoms, data.n_cols); for (size_t i = 0; i < data.n_cols; ++i) { // Report progress. if ((i % 100) == 0) Log::Debug << "Optimization at point " << i << "." << std::endl; bool useCholesky = true; regression::LARS lars(useCholesky, matGram, lambda1, lambda2); // Create an alias of the code (using the same memory), and then LARS will // place the result directly into that; then we will not need to have an // extra copy. arma::vec code = codes.unsafe_col(i); lars.Train(dictionary, data.unsafe_col(i), code, false); } } // Dictionary step for optimization. double SparseCoding::OptimizeDictionary(const arma::mat& data, const arma::mat& codes, const arma::uvec& adjacencies) { // Count the number of atomic neighbors for each point x^i. arma::uvec neighborCounts = arma::zeros(data.n_cols, 1); if (adjacencies.n_elem > 0) { // This gets the column index. Intentional integer division. size_t curPointInd = (size_t) (adjacencies(0) / atoms); size_t nextColIndex = (curPointInd + 1) * atoms; for (size_t l = 1; l < adjacencies.n_elem; ++l) { // If l no longer refers to an element in this column, advance the column // number accordingly. if (adjacencies(l) >= nextColIndex) { curPointInd = (size_t) (adjacencies(l) / atoms); nextColIndex = (curPointInd + 1) * atoms; } ++neighborCounts(curPointInd); } } // Handle the case of inactive atoms (atoms not used in the given coding). std::vector inactiveAtoms; for (size_t j = 0; j < atoms; ++j) { if (arma::accu(codes.row(j) != 0) == 0) inactiveAtoms.push_back(j); } const size_t nInactiveAtoms = inactiveAtoms.size(); const size_t nActiveAtoms = atoms - nInactiveAtoms; // Efficient construction of Z restricted to active atoms. arma::mat matActiveZ; if (nInactiveAtoms > 0) { math::RemoveRows(codes, inactiveAtoms, matActiveZ); } if (nInactiveAtoms > 0) { Log::Warn << "There are " << nInactiveAtoms << " inactive atoms. They will be re-initialized randomly.\n"; } Log::Debug << "Solving Dual via Newton's Method.\n"; // Solve using Newton's method in the dual - note that the final dot // multiplication with inv(A) seems to be unavoidable. Although more // expensive, the code written this way (we use solve()) should be more // numerically stable than just using inv(A) for everything. arma::vec dualVars = arma::zeros(nActiveAtoms); //vec dualVars = 1e-14 * ones(nActiveAtoms); // Method used by feature sign code - fails miserably here. Perhaps the // MATLAB optimizer fmincon does something clever? //vec dualVars = 10.0 * randu(nActiveAtoms, 1); //vec dualVars = diagvec(solve(dictionary, data * trans(codes)) // - codes * trans(codes)); //for (size_t i = 0; i < dualVars.n_elem; i++) // if (dualVars(i) < 0) // dualVars(i) = 0; bool converged = false; // If we have any inactive atoms, we must construct these differently. arma::mat codesXT; arma::mat codesZT; if (inactiveAtoms.empty()) { codesXT = codes * trans(data); codesZT = codes * trans(codes); } else { codesXT = matActiveZ * trans(data); codesZT = matActiveZ * trans(matActiveZ); } double normGradient = 0; double improvement = 0; for (size_t t = 1; (t != maxIterations) && !converged; ++t) { arma::mat A = codesZT + diagmat(dualVars); arma::mat matAInvZXT = solve(A, codesXT); arma::vec gradient = -arma::sum(arma::square(matAInvZXT), 1); gradient += 1; arma::mat hessian = -(-2 * (matAInvZXT * trans(matAInvZXT)) % inv(A)); arma::vec searchDirection = -solve(hessian, gradient); //printf("%e\n", norm(searchDirection, 2)); // Armijo line search. const double c = 1e-4; double alpha = 1.0; const double rho = 0.9; double sufficientDecrease = c * dot(gradient, searchDirection); // A maxIterations parameter for the Armijo line search may be a good idea, // but it doesn't seem to be causing any problems for now. while (true) { // Calculate objective. double sumDualVars = arma::sum(dualVars); double fOld = -(-trace(trans(codesXT) * matAInvZXT) - sumDualVars); double fNew = -(-trace(trans(codesXT) * solve(codesZT + diagmat(dualVars + alpha * searchDirection), codesXT)) - (sumDualVars + alpha * arma::sum(searchDirection))); if (fNew <= fOld + alpha * sufficientDecrease) { searchDirection = alpha * searchDirection; improvement = fOld - fNew; break; } alpha *= rho; } // Take step and print useful information. dualVars += searchDirection; normGradient = arma::norm(gradient, 2); Log::Debug << "Newton Method iteration " << t << ":" << std::endl; Log::Debug << " Gradient norm: " << std::scientific << normGradient << "." << std::endl; Log::Debug << " Improvement: " << std::scientific << improvement << ".\n"; if (normGradient < newtonTolerance) converged = true; } if (inactiveAtoms.empty()) { // Directly update dictionary. dictionary = trans(solve(codesZT + diagmat(dualVars), codesXT)); } else { arma::mat activeDictionary = trans(solve(codesZT + diagmat(dualVars), codesXT)); // Update all atoms. size_t currentInactiveIndex = 0; for (size_t i = 0; i < atoms; ++i) { if (inactiveAtoms[currentInactiveIndex] == i) { // This atom is inactive. Reinitialize it randomly. dictionary.col(i) = (data.col(math::RandInt(data.n_cols)) + data.col(math::RandInt(data.n_cols)) + data.col(math::RandInt(data.n_cols))); dictionary.col(i) /= arma::norm(dictionary.col(i), 2); // Increment inactive index counter. ++currentInactiveIndex; } else { // Update estimate. dictionary.col(i) = activeDictionary.col(i - currentInactiveIndex); } } } return normGradient; } // Project each atom of the dictionary back into the unit ball (if necessary). void SparseCoding::ProjectDictionary() { for (size_t j = 0; j < atoms; j++) { double atomNorm = arma::norm(dictionary.col(j), 2); if (atomNorm > 1) { Log::Info << "Norm of atom " << j << " exceeds 1 (" << std::scientific << atomNorm << "). Shrinking...\n"; dictionary.col(j) /= atomNorm; } } } // Compute the objective function. double SparseCoding::Objective(const arma::mat& data, const arma::mat& codes) const { double l11NormZ = arma::sum(arma::sum(arma::abs(codes))); double froNormResidual = arma::norm(data - (dictionary * codes), "fro"); if (lambda2 > 0) { double froNormZ = arma::norm(codes, "fro"); return 0.5 * (std::pow(froNormResidual, 2.0) + (lambda2 * std::pow(froNormZ, 2.0))) + (lambda1 * l11NormZ); } else // It can be simpler. { return 0.5 * std::pow(froNormResidual, 2.0) + lambda1 * l11NormZ; } } } // namespace sparse_coding } // namespace mlpack mlpack-2.2.5/src/mlpack/methods/sparse_coding/sparse_coding.hpp000066400000000000000000000253071315013601400246420ustar00rootroot00000000000000/** * @file sparse_coding.hpp * @author Nishant Mehta * * Definition of the SparseCoding class, which performs L1 (LASSO) or * L1+L2 (Elastic Net)-regularized sparse coding with dictionary learning * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_SPARSE_CODING_SPARSE_CODING_HPP #define MLPACK_METHODS_SPARSE_CODING_SPARSE_CODING_HPP #include #include // Include our three simple dictionary initializers. #include "nothing_initializer.hpp" #include "data_dependent_random_initializer.hpp" #include "random_initializer.hpp" namespace mlpack { namespace sparse_coding { /** * An implementation of Sparse Coding with Dictionary Learning that achieves * sparsity via an l1-norm regularizer on the codes (LASSO) or an (l1+l2)-norm * regularizer on the codes (the Elastic Net). * * Let d be the number of dimensions in the original space, m the number of * training points, and k the number of atoms in the dictionary (the dimension * of the learned feature space). The training data X is a d-by-m matrix where * each column is a point and each row is a dimension. The dictionary D is a * d-by-k matrix, and the sparse codes matrix Z is a k-by-m matrix. * This program seeks to minimize the objective: * * \f[ * \min_{D,Z} 0.5 ||X - D Z||_{F}^2\ + \lambda_1 \sum_{i=1}^m ||Z_i||_1 * + 0.5 \lambda_2 \sum_{i=1}^m ||Z_i||_2^2 * \f] * * subject to \f$ ||D_j||_2 <= 1 \f$ for \f$ 1 <= j <= k \f$ * where typically \f$ lambda_1 > 0 \f$ and \f$ lambda_2 = 0 \f$. * * This problem is solved by an algorithm that alternates between a dictionary * learning step and a sparse coding step. The dictionary learning step updates * the dictionary D using a Newton method based on the Lagrange dual (see the * paper below for details). The sparse coding step involves solving a large * number of sparse linear regression problems; this can be done efficiently * using LARS, an algorithm that can solve the LASSO or the Elastic Net (papers * below). * * Here are those papers: * * @code * @incollection{lee2007efficient, * title = {Efficient sparse coding algorithms}, * author = {Honglak Lee and Alexis Battle and Rajat Raina and Andrew Y. Ng}, * booktitle = {Advances in Neural Information Processing Systems 19}, * editor = {B. Sch\"{o}lkopf and J. Platt and T. Hoffman}, * publisher = {MIT Press}, * address = {Cambridge, MA}, * pages = {801--808}, * year = {2007} * } * @endcode * * @code * @article{efron2004least, * title={Least angle regression}, * author={Efron, B. and Hastie, T. and Johnstone, I. and Tibshirani, R.}, * journal={The Annals of statistics}, * volume={32}, * number={2}, * pages={407--499}, * year={2004}, * publisher={Institute of Mathematical Statistics} * } * @endcode * * @code * @article{zou2005regularization, * title={Regularization and variable selection via the elastic net}, * author={Zou, H. and Hastie, T.}, * journal={Journal of the Royal Statistical Society Series B}, * volume={67}, * number={2}, * pages={301--320}, * year={2005}, * publisher={Royal Statistical Society} * } * @endcode * * Note that the implementation here does not use the feature-sign search * algorithm from Honglak Lee's paper, but instead the LARS algorithm suggested * in that paper. * * When Train() is called, the dictionary is initialized using the * DictionaryInitializationPolicy class. Possible choices include the * RandomInitializer, which provides an entirely random dictionary, the * DataDependentRandomInitializer, which provides a random dictionary based * loosely on characteristics of the dataset, and the NothingInitializer, which * does not initialize the dictionary -- instead, the user should set the * dictionary using the Dictionary() mutator method. * * Once a dictionary is trained with Train(), another matrix may be encoded with * the Encode() function. * * @tparam DictionaryInitializationPolicy The class to use to initialize the * dictionary; must have 'void Initialize(const arma::mat& data, arma::mat& * dictionary)' function. */ class SparseCoding { public: /** * Set the parameters to SparseCoding. lambda2 defaults to 0. This * constructor will train the model. If that is not desired, call the other * constructor that does not take a data matrix. This constructor will also * initialize the dictionary using the given DictionaryInitializer before * training. * * If you want to initialize the dictionary to a custom matrix, consider * either writing your own DictionaryInitializer class (with void * Initialize(const arma::mat& data, arma::mat& dictionary) function), or call * the constructor that does not take a data matrix, then call Dictionary() to * set the dictionary matrix to a matrix of your choosing, and then call * Train() with NothingInitializer (i.e. Train(data)). * * @param data Data matrix. * @param atoms Number of atoms in dictionary. * @param lambda1 Regularization parameter for l1-norm penalty. * @param lambda2 Regularization parameter for l2-norm penalty. * @param maxIterations Maximum number of iterations to run algorithm. If 0, * the algorithm will run until convergence (or forever). * @param objTolerance Tolerance for objective function. When an iteration of * the algorithm produces an improvement smaller than this, the algorithm * will terminate. * @param newtonTolerance Tolerance for the Newton's method dictionary * optimization step. */ template SparseCoding(const arma::mat& data, const size_t atoms, const double lambda1, const double lambda2 = 0, const size_t maxIterations = 0, const double objTolerance = 0.01, const double newtonTolerance = 1e-6, const DictionaryInitializer& initializer = DictionaryInitializer()); /** * Set the parameters to SparseCoding. lambda2 defaults to 0. This * constructor will not train the model, and a subsequent call to Train() will * be required before the model can encode points with Encode(). * * @param atoms Number of atoms in dictionary. * @param lambda1 Regularization parameter for l1-norm penalty. * @param lambda2 Regularization parameter for l2-norm penalty. * @param maxIterations Maximum number of iterations to run algorithm. If 0, * the algorithm will run until convergence (or forever). * @param objTolerance Tolerance for objective function. When an iteration of * the algorithm produces an improvement smaller than this, the algorithm * will terminate. * @param newtonTolerance Tolerance for the Newton's method dictionary * optimization step. */ SparseCoding(const size_t atoms, const double lambda1, const double lambda2 = 0, const size_t maxIterations = 0, const double objTolerance = 0.01, const double newtonTolerance = 1e-6); /** * Train the sparse coding model on the given dataset. */ template void Train(const arma::mat& data, const DictionaryInitializer& initializer = DictionaryInitializer()); /** * Sparse code each point in the given dataset via LARS, using the current * dictionary and store the encoded data in the codes matrix. * * @param data Input data matrix to be encoded. * @param codes Output codes matrix. */ void Encode(const arma::mat& data, arma::mat& codes); /** * Learn dictionary via Newton method based on Lagrange dual. * * @param data Data matrix. * @param codes Matrix of codes. * @param adjacencies Indices of entries (unrolled column by column) of * the coding matrix Z that are non-zero (the adjacency matrix for the * bipartite graph of points and atoms). * @return the norm of the gradient of the Lagrange dual with respect to * the dual variables */ double OptimizeDictionary(const arma::mat& data, const arma::mat& codes, const arma::uvec& adjacencies); /** * Project each atom of the dictionary back onto the unit ball, if necessary. */ void ProjectDictionary(); /** * Compute the objective function. */ double Objective(const arma::mat& data, const arma::mat& codes) const; //! Access the dictionary. const arma::mat& Dictionary() const { return dictionary; } //! Modify the dictionary. arma::mat& Dictionary() { return dictionary; } //! Access the number of atoms. size_t Atoms() const { return atoms; } //! Modify the number of atoms. size_t& Atoms() { return atoms; } //! Access the L1 regularization term. double Lambda1() const { return lambda1; } //! Modify the L1 regularization term. double& Lambda1() { return lambda1; } //! Access the L2 regularization term. double Lambda2() const { return lambda2; } //! Modify the L2 regularization term. double& Lambda2() { return lambda2; } //! Get the maximum number of iterations. size_t MaxIterations() const { return maxIterations; } //! Modify the maximum number of iterations. size_t& MaxIterations() { return maxIterations; } //! Get the objective tolerance. double ObjTolerance() const { return objTolerance; } //! Modify the objective tolerance. double& ObjTolerance() { return objTolerance; } //! Get the tolerance for Newton's method (dictionary optimization step). double NewtonTolerance() const { return newtonTolerance; } //! Modify the tolerance for Newton's method (dictionary optimization step). double& NewtonTolerance() { return newtonTolerance; } //! Serialize the sparse coding model. template void Serialize(Archive& ar, const unsigned int /* version */); private: //! Number of atoms. size_t atoms; //! Dictionary (columns are atoms). arma::mat dictionary; //! l1 regularization term. double lambda1; //! l2 regularization term. double lambda2; //! Maximum number of iterations during training. size_t maxIterations; //! Tolerance for main objective. double objTolerance; //! Tolerance for Newton's method (dictionary training). double newtonTolerance; }; } // namespace sparse_coding } // namespace mlpack // Include implementation. #include "sparse_coding_impl.hpp" #endif mlpack-2.2.5/src/mlpack/methods/sparse_coding/sparse_coding_impl.hpp000066400000000000000000000076501315013601400256640ustar00rootroot00000000000000/** * @file sparse_coding_impl.hpp * @author Nishant Mehta * * Implementation of Sparse Coding with Dictionary Learning using l1 (LASSO) or * l1+l2 (Elastic Net) regularization. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_METHODS_SPARSE_CODING_SPARSE_CODING_IMPL_HPP #define MLPACK_METHODS_SPARSE_CODING_SPARSE_CODING_IMPL_HPP // In case it hasn't already been included. #include "sparse_coding.hpp" namespace mlpack { namespace sparse_coding { template SparseCoding::SparseCoding( const arma::mat& data, const size_t atoms, const double lambda1, const double lambda2, const size_t maxIterations, const double objTolerance, const double newtonTolerance, const DictionaryInitializer& initializer) : atoms(atoms), lambda1(lambda1), lambda2(lambda2), maxIterations(maxIterations), objTolerance(objTolerance), newtonTolerance(newtonTolerance) { Train(data, initializer); } template void SparseCoding::Train( const arma::mat& data, const DictionaryInitializer& initializer) { // Now, train. Timer::Start("sparse_coding"); // Initialize the dictionary. initializer.Initialize(data, atoms, dictionary); double lastObjVal = DBL_MAX; // Take the initial coding step, which has to happen before entering the main // optimization loop. Log::Info << "Initial coding step." << std::endl; arma::mat codes(atoms, data.n_cols); Encode(data, codes); arma::uvec adjacencies = find(codes); Log::Info << " Sparsity level: " << 100.0 * ((double) (adjacencies.n_elem)) / ((double) (atoms * data.n_cols)) << "%." << std::endl; Log::Info << " Objective value: " << Objective(data, codes) << "." << std::endl; for (size_t t = 1; t != maxIterations; ++t) { // Print current iteration, and maximum number of iterations (if it isn't // 0). Log::Info << "Iteration " << t; if (maxIterations != 0) Log::Info << " of " << maxIterations; Log::Info << "." << std::endl; // First step: optimize the dictionary. Log::Info << "Performing dictionary step... " << std::endl; OptimizeDictionary(data, codes, adjacencies); Log::Info << " Objective value: " << Objective(data, codes) << "." << std::endl; // Second step: perform the coding. Log::Info << "Performing coding step..." << std::endl; Encode(data, codes); // Get the indices of all the nonzero elements in the codes. adjacencies = find(codes); Log::Info << " Sparsity level: " << 100.0 * ((double) (adjacencies.n_elem)) / ((double) (atoms * data.n_cols)) << "%." << std::endl; // Find the new objective value and improvement so we can check for // convergence. double curObjVal = Objective(data, codes); double improvement = lastObjVal - curObjVal; Log::Info << " Objective value: " << curObjVal << " (improvement " << std::scientific << improvement << ")." << std::endl; // Have we converged? if (improvement < objTolerance) { Log::Info << "Converged within tolerance " << objTolerance << ".\n"; break; } lastObjVal = curObjVal; } Timer::Stop("sparse_coding"); } template void SparseCoding::Serialize(Archive& ar, const unsigned int /* version */) { ar & data::CreateNVP(atoms, "atoms"); ar & data::CreateNVP(dictionary, "dictionary"); ar & data::CreateNVP(lambda1, "lambda1"); ar & data::CreateNVP(lambda2, "lambda2"); ar & data::CreateNVP(maxIterations, "maxIterations"); ar & data::CreateNVP(objTolerance, "objTolerance"); ar & data::CreateNVP(newtonTolerance, "newtonTolerance"); } } // namespace sparse_coding } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/methods/sparse_coding/sparse_coding_main.cpp000066400000000000000000000243451315013601400256420ustar00rootroot00000000000000/** * @file sparse_coding_main.cpp * @author Nishant Mehta * * Executable for Sparse Coding. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include #include #include #include #include "sparse_coding.hpp" PROGRAM_INFO("Sparse Coding", "An implementation of Sparse Coding with " "Dictionary Learning, which achieves sparsity via an l1-norm regularizer on" " the codes (LASSO) or an (l1+l2)-norm regularizer on the codes (the " "Elastic Net). Given a dense data matrix X with n points and d dimensions," " sparse coding seeks to find a dense dictionary matrix D with k atoms in " "d dimensions, and a sparse coding matrix Z with n points in k dimensions." "\n\n" "The original data matrix X can then be reconstructed as D * Z. Therefore," " this program finds a representation of each point in X as a sparse linear" " combination of atoms in the dictionary D." "\n\n" "The sparse coding is found with an algorithm which alternates between a " "dictionary step, which updates the dictionary D, and a sparse coding step," " which updates the sparse coding matrix." "\n\n" "Once a dictionary D is found, the sparse coding model may be used to " "encode other matrices, and saved for future usage." "\n\n" "To run this program, either an input matrix or an already-saved sparse " "coding model must be specified. An input matrix may be specified with the" " --training_file (-t) option, along with the number of atoms in the " "dictionary (--atoms, or -k). It is also possible to specify an initial " "dictionary for the optimization, with the --initial_dictionary (-i) " "option. An input model may be specified with the --input_model_file (-m) " "option. There are also other training options available." "\n\n" "As an example, to build a sparse coding model on the dataset in " "data.csv using 200 atoms and an l1-regularization parameter of 0.1, saving" " the model into model.xml, use " "\n\n" "$ sparse_coding -t data.csv -k 200 -l 0.1 -M model.xml" "\n\n" "Then, this model could be used to encode a new matrix, otherdata.csv, and " "save the output codes to codes.csv:" "\n\n" "$ sparse_coding -m model.xml -T otherdata.csv -c codes.csv"); // Train the model. PARAM_STRING_IN("training_file", "Filename of the training data (X).", "t", ""); PARAM_INT_IN("atoms", "Number of atoms in the dictionary.", "k", 0); PARAM_DOUBLE_IN("lambda1", "Sparse coding l1-norm regularization parameter.", "l", 0); PARAM_DOUBLE_IN("lambda2", "Sparse coding l2-norm regularization parameter.", "L", 0); PARAM_INT_IN("max_iterations", "Maximum number of iterations for sparse coding " "(0 indicates no limit).", "n", 0); PARAM_STRING_IN("initial_dictionary", "Filename for optional initial " "dictionary.", "i", ""); PARAM_FLAG("normalize", "If set, the input data matrix will be normalized " "before coding.", "N"); PARAM_INT_IN("seed", "Random seed. If 0, 'std::time(NULL)' is used.", "s", 0); PARAM_DOUBLE_IN("objective_tolerance", "Tolerance for convergence of the " "objective function.", "o", 0.01); PARAM_DOUBLE_IN("newton_tolerance", "Tolerance for convergence of Newton " "method.", "w", 1e-6); // Load/save a model. PARAM_STRING_IN("input_model_file", "File containing input sparse coding " "model.", "m", ""); PARAM_STRING_OUT("output_model_file", "File to save trained sparse coding " "model to.", "M"); PARAM_STRING_OUT("dictionary_file", "Filename to save the output dictionary " "to.", "d"); PARAM_STRING_OUT("codes_file", "Filename to save the output sparse codes to.", "c"); PARAM_STRING_OUT("test_file", "File containing data matrix to be encoded by " "trained model.", "T"); using namespace arma; using namespace std; using namespace mlpack; using namespace mlpack::math; using namespace mlpack::sparse_coding; int main(int argc, char* argv[]) { CLI::ParseCommandLine(argc, argv); if (CLI::GetParam("seed") != 0) RandomSeed((size_t) CLI::GetParam("seed")); else RandomSeed((size_t) time(NULL)); // Check for parameter validity. if (CLI::HasParam("input_model_file") && CLI::HasParam("initial_dictionary")) Log::Fatal << "Cannot specify both --input_model_file (-m) and " << "--initial_dictionary (-i)!" << endl; if (CLI::HasParam("training_file") && !CLI::HasParam("atoms")) Log::Fatal << "If --training_file is specified, the number of atoms in the " << "dictionary must be specified with --atoms (-k)!" << endl; if (!CLI::HasParam("training_file") && !CLI::HasParam("input_model_file")) Log::Fatal << "One of --training_file (-t) or --input_model_file (-m) must " << "be specified!" << endl; if (!CLI::HasParam("codes_file") && !CLI::HasParam("dictionary_file") && !CLI::HasParam("output_model_file")) Log::Warn << "Neither --codes_file (-c), --dictionary_file (-d), nor " << "--output_model_file (-M) are specified; no output will be saved." << endl; if (CLI::HasParam("codes_file") && !CLI::HasParam("test_file")) Log::Fatal << "--codes_file (-c) is specified, but no test matrix (" << "specified with --test_file or -T) is given to encode!" << endl; if (!CLI::HasParam("training_file")) { if (CLI::HasParam("atoms")) Log::Warn << "--atoms (-k) ignored because --training_file (-t) is not " << "specified." << endl; if (CLI::HasParam("lambda1")) Log::Warn << "--lambda1 (-l) ignored because --training_file (-t) is not " << "specified." << endl; if (CLI::HasParam("lambda2")) Log::Warn << "--lambda2 (-L) ignored because --training_file (-t) is not " << "specified." << endl; if (CLI::HasParam("initial_dictionary")) Log::Warn << "--initial_dictionary (-i) ignored because --training_file " << "(-t) is not specified." << endl; if (CLI::HasParam("max_iterations")) Log::Warn << "--max_iterations (-n) ignored because --training_file (-t) " << "is not specified." << endl; if (CLI::HasParam("normalize")) Log::Warn << "--normalize (-N) ignored because --training_file (-t) is " << "not specified." << endl; if (CLI::HasParam("objective_tolerance")) Log::Warn << "--objective_tolerance (-o) ignored because --training_file " << "(-t) is not specified." << endl; if (CLI::HasParam("newton_tolerance")) Log::Warn << "--newton_tolerance (-w) ignored because --training_file " << "(-t) is not specified." << endl; } // Do we have an existing model? SparseCoding sc(0, 0.0); if (CLI::HasParam("input_model_file")) { data::Load(CLI::GetParam("input_model_file"), "sparse_coding_model", sc, true); } if (CLI::HasParam("training_file")) { mat matX; data::Load(CLI::GetParam("training_file"), matX, true); // Normalize each point if the user asked for it. if (CLI::HasParam("normalize")) { Log::Info << "Normalizing data before coding..." << endl; for (size_t i = 0; i < matX.n_cols; ++i) matX.col(i) /= norm(matX.col(i), 2); } sc.Lambda1() = CLI::GetParam("lambda1"); sc.Lambda2() = CLI::GetParam("lambda2"); sc.MaxIterations() = (size_t) CLI::GetParam("max_iterations"); sc.Atoms() = (size_t) CLI::GetParam("atoms"); sc.ObjTolerance() = CLI::GetParam("objective_tolerance"); sc.NewtonTolerance() = CLI::GetParam("newton_tolerance"); // Inform the user if we are overwriting their model. if (CLI::HasParam("input_model_file")) { Log::Info << "Using dictionary from existing model in '" << CLI::GetParam("input_model_file") << "' as initial " << "dictionary for training." << endl; sc.Train(matX); } else if (CLI::HasParam("initial_dictionary")) { // Load initial dictionary directly into sparse coding object. data::Load(CLI::GetParam("initial_dictionary"), sc.Dictionary(), true); // Validate size of initial dictionary. if (sc.Dictionary().n_cols != sc.Atoms()) { Log::Fatal << "The initial dictionary has " << sc.Dictionary().n_cols << " atoms, but the number of atoms was specified to be " << sc.Atoms() << "!" << endl; } if (sc.Dictionary().n_rows != matX.n_rows) { Log::Fatal << "The initial dictionary has " << sc.Dictionary().n_rows << " dimensions, but the data has " << matX.n_rows << " dimensions!" << endl; } // Run sparse coding. sc.Train(matX); } else { // Run sparse coding with the default initialization. sc.Train(matX); } } // Now, de we have any matrix to encode? if (CLI::HasParam("test_file")) { mat matY; data::Load(CLI::GetParam("test_file"), matY, true); if (matY.n_rows != sc.Dictionary().n_rows) Log::Fatal << "Model was trained with a dimensionality of " << sc.Dictionary().n_rows << ", but data in test file '" << CLI::GetParam("test_file") << " has a dimensionality of " << matY.n_rows << "!" << endl; // Normalize each point if the user asked for it. if (CLI::HasParam("normalize")) { Log::Info << "Normalizing test data before coding..." << endl; for (size_t i = 0; i < matY.n_cols; ++i) matY.col(i) /= norm(matY.col(i), 2); } mat codes; sc.Encode(matY, codes); if (CLI::HasParam("codes_file")) data::Save(CLI::GetParam("codes_file"), codes); } // Did the user want to save the dictionary? if (CLI::HasParam("dictionary_file")) data::Save(CLI::GetParam("dictionary_file"), sc.Dictionary()); // Did the user want to save the model? if (CLI::HasParam("output_model_file")) data::Save(CLI::GetParam("output_model_file"), "sparse_coding_model", sc, false); // Non-fatal on failure. } mlpack-2.2.5/src/mlpack/prereqs.hpp000066400000000000000000000063621315013601400172200ustar00rootroot00000000000000/** * @file prereqs.hpp * * The core includes that mlpack expects; standard C++ includes and Armadillo. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_PREREQS_HPP #define MLPACK_PREREQS_HPP // Defining _USE_MATH_DEFINES should set M_PI. #define _USE_MATH_DEFINES #include // First, check if Armadillo was included before, warning if so. #ifdef ARMA_INCLUDES #pragma message "Armadillo was included before mlpack; this can sometimes cause\ problems. It should only be necessary to include and not \ ." #endif // Next, standard includes. #include #include #include #include #include #include #include #include #include #include #include // Defining _USE_MATH_DEFINES should set M_PI. #define _USE_MATH_DEFINES #include // For tgamma(). #include // But if it's not defined, we'll do it. #ifndef M_PI #define M_PI 3.141592653589793238462643383279 #endif // Give ourselves a nice way to force functions to be inline if we need. #define force_inline #if defined(__GNUG__) && !defined(DEBUG) #undef force_inline #define force_inline __attribute__((always_inline)) #elif defined(_MSC_VER) && !defined(DEBUG) #undef force_inline #define force_inline __forceinline #endif // We'll need the necessary boost::serialization features, as well as what we // use with mlpack. In Boost 1.59 and newer, the BOOST_PFTO code is no longer // defined, but we still need to define it (as nothing) so that the mlpack // serialization shim compiles. #include #include #include // boost_backport.hpp handles the version and backporting of serialization (and // other) features. #include "mlpack/core/boost_backport/boost_backport.hpp" // Boost 1.59 and newer don't use BOOST_PFTO, but our shims do. We can resolve // any issue by setting BOOST_PFTO to nothing. #ifndef BOOST_PFTO #define BOOST_PFTO #endif #include #include // Now include Armadillo through the special mlpack extensions. #include #include // Ensure that the user isn't doing something stupid with their Armadillo // defines. #include // All code should have access to logging. #include #include #include // On Visual Studio, disable C4519 (default arguments for function templates) // since it's by default an error, which doesn't even make any sense because // it's part of the C++11 standard. #ifdef _MSC_VER #pragma warning(disable : 4519) #define ARMA_USE_CXX11 #endif // We need to be able to mark functions deprecated. #include #endif mlpack-2.2.5/src/mlpack/tests/000077500000000000000000000000001315013601400161615ustar00rootroot00000000000000mlpack-2.2.5/src/mlpack/tests/CMakeLists.txt000066400000000000000000000051461315013601400207270ustar00rootroot00000000000000# mlpack test executable. add_executable(mlpack_test adaboost_test.cpp akfn_test.cpp aknn_test.cpp arma_extend_test.cpp armadillo_svd_test.cpp aug_lagrangian_test.cpp binarize_test.cpp cf_test.cpp cli_test.cpp cosine_tree_test.cpp dbscan_test.cpp decision_stump_test.cpp decision_tree_test.cpp det_test.cpp distribution_test.cpp drusilla_select_test.cpp emst_test.cpp fastmks_test.cpp gmm_test.cpp gradient_descent_test.cpp hmm_test.cpp hoeffding_tree_test.cpp hyperplane_test.cpp imputation_test.cpp ind2sub_test.cpp kernel_test.cpp kernel_pca_test.cpp kernel_traits_test.cpp kfn_test.cpp kmeans_test.cpp knn_test.cpp krann_search_test.cpp lars_test.cpp lbfgs_test.cpp lin_alg_test.cpp linear_regression_test.cpp load_save_test.cpp local_coordinate_coding_test.cpp log_test.cpp logistic_regression_test.cpp lrsdp_test.cpp lsh_test.cpp math_test.cpp matrix_completion_test.cpp maximal_inputs_test.cpp mean_shift_test.cpp metric_test.cpp minibatch_sgd_test.cpp mlpack_test.cpp nbc_test.cpp nca_test.cpp nmf_test.cpp nystroem_method_test.cpp octree_test.cpp pca_test.cpp perceptron_test.cpp qdafn_test.cpp quic_svd_test.cpp radical_test.cpp randomized_svd_test.cpp range_search_test.cpp rectangle_tree_test.cpp regularized_svd_test.cpp sa_test.cpp sdp_primal_dual_test.cpp sgd_test.cpp serialization.hpp serialization.cpp serialization_test.cpp softmax_regression_test.cpp sort_policy_test.cpp sparse_autoencoder_test.cpp sparse_coding_test.cpp spill_tree_test.cpp split_data_test.cpp svd_batch_test.cpp svd_incremental_test.cpp termination_policy_test.cpp tree_test.cpp tree_traits_test.cpp union_find_test.cpp svd_batch_test.cpp svd_incremental_test.cpp nystroem_method_test.cpp armadillo_svd_test.cpp ub_tree_test.cpp vantage_point_tree_test.cpp prefixedoutstream_test.cpp timer_test.cpp ) # Link dependencies of test executable. target_link_libraries(mlpack_test mlpack ${BOOST_unit_test_framework_LIBRARY} ) # Copy test data into right place. add_custom_command(TARGET mlpack_test POST_BUILD COMMAND ${CMAKE_COMMAND} -E copy_directory ${CMAKE_CURRENT_SOURCE_DIR}/data/ ${PROJECT_BINARY_DIR} ) add_custom_command(TARGET mlpack_test POST_BUILD COMMAND ${CMAKE_COMMAND} -E tar xjpf mnist_first250_training_4s_and_9s.tar.bz2 WORKING_DIRECTORY ${PROJECT_BINARY_DIR} ) # For 'make test'. add_test (NAME mlpack_test COMMAND mlpack_test "--log_level=test_suite" # Set UTF runtime param WORKING_DIRECTORY ${CMAKE_BINARY_DIR}/ # This is where test files are put. ) mlpack-2.2.5/src/mlpack/tests/adaboost_test.cpp000066400000000000000000000710131315013601400215220ustar00rootroot00000000000000/** * @file AdaBoost_test.cpp * @author Udit Saxena * * Tests for AdaBoost class. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include #include #include #include "test_tools.hpp" #include "serialization.hpp" using namespace arma; using namespace mlpack; using namespace mlpack::adaboost; using namespace mlpack::decision_stump; using namespace mlpack::perceptron; BOOST_AUTO_TEST_SUITE(AdaBoostTest); /** * This test case runs the AdaBoost.mh algorithm on the UCI Iris dataset. It * checks whether the hamming loss breaches the upperbound, which is provided by * ztAccumulator. */ BOOST_AUTO_TEST_CASE(HammingLossBoundIris) { arma::mat inputData; if (!data::Load("iris.csv", inputData)) BOOST_FAIL("Cannot load test dataset iris.csv!"); arma::Mat labels; if (!data::Load("iris_labels.txt", labels)) BOOST_FAIL("Cannot load labels for iris iris_labels.txt"); // Define your own weak learner, perceptron in this case. // Run the perceptron for perceptronIter iterations. int perceptronIter = 400; Perceptron<> p(inputData, labels.row(0), max(labels.row(0)) + 1, perceptronIter); // Define parameters for AdaBoost. size_t iterations = 100; double tolerance = 1e-10; AdaBoost<> a(inputData, labels.row(0), p, iterations, tolerance); arma::Row predictedLabels; a.Classify(inputData, predictedLabels); size_t countError = 0; for (size_t i = 0; i < labels.n_cols; i++) if (labels(i) != predictedLabels(i)) countError++; double hammingLoss = (double) countError / labels.n_cols; BOOST_REQUIRE_LE(hammingLoss, a.ZtProduct()); } /** * This test case runs the AdaBoost.mh algorithm on the UCI Iris dataset. It * checks if the error returned by running a single instance of the weak learner * is worse than running the boosted weak learner using adaboost. */ BOOST_AUTO_TEST_CASE(WeakLearnerErrorIris) { arma::mat inputData; if (!data::Load("iris.csv", inputData)) BOOST_FAIL("Cannot load test dataset iris.csv!"); arma::Mat labels; if (!data::Load("iris_labels.txt",labels)) BOOST_FAIL("Cannot load labels for iris iris_labels.txt"); // Define your own weak learner, perceptron in this case. // Run the perceptron for perceptronIter iterations. int perceptronIter = 400; arma::Row perceptronPrediction(labels.n_cols); Perceptron<> p(inputData, labels.row(0), max(labels.row(0)) + 1, perceptronIter); p.Classify(inputData, perceptronPrediction); size_t countWeakLearnerError = 0; for (size_t i = 0; i < labels.n_cols; i++) if (labels(i) != perceptronPrediction(i)) countWeakLearnerError++; double weakLearnerErrorRate = (double) countWeakLearnerError / labels.n_cols; // Define parameters for AdaBoost. size_t iterations = 100; double tolerance = 1e-10; AdaBoost<> a(inputData, labels.row(0), p, iterations, tolerance); arma::Row predictedLabels; a.Classify(inputData, predictedLabels); size_t countError = 0; for (size_t i = 0; i < labels.n_cols; i++) if (labels(i) != predictedLabels(i)) countError++; double error = (double) countError / labels.n_cols; BOOST_REQUIRE_LE(error, weakLearnerErrorRate); } /** * This test case runs the AdaBoost.mh algorithm on the UCI Vertebral Column * dataset. It checks whether the hamming loss breaches the upperbound, which * is provided by ztAccumulator. */ BOOST_AUTO_TEST_CASE(HammingLossBoundVertebralColumn) { arma::mat inputData; if (!data::Load("vc2.csv", inputData)) BOOST_FAIL("Cannot load test dataset vc2.csv!"); arma::Mat labels; if (!data::Load("vc2_labels.txt",labels)) BOOST_FAIL("Cannot load labels for vc2_labels.txt"); // Define your own weak learner, perceptron in this case. // Run the perceptron for perceptronIter iterations. size_t perceptronIter = 800; Perceptron<> p(inputData, labels.row(0), max(labels.row(0)) + 1, perceptronIter); // Define parameters for AdaBoost. size_t iterations = 50; double tolerance = 1e-10; AdaBoost<> a(inputData, labels.row(0), p, iterations, tolerance); arma::Row predictedLabels; a.Classify(inputData, predictedLabels); size_t countError = 0; for (size_t i = 0; i < labels.n_cols; i++) if (labels(i) != predictedLabels(i)) countError++; double hammingLoss = (double) countError / labels.n_cols; BOOST_REQUIRE_LE(hammingLoss, a.ZtProduct()); } /** * This test case runs the AdaBoost.mh algorithm on the UCI Vertebral Column * dataset. It checks if the error returned by running a single instance of the * weak learner is worse than running the boosted weak learner using adaboost. */ BOOST_AUTO_TEST_CASE(WeakLearnerErrorVertebralColumn) { arma::mat inputData; if (!data::Load("vc2.csv", inputData)) BOOST_FAIL("Cannot load test dataset vc2.csv!"); arma::Mat labels; if (!data::Load("vc2_labels.txt",labels)) BOOST_FAIL("Cannot load labels for vc2_labels.txt"); // Define your own weak learner, perceptron in this case. // Run the perceptron for perceptronIter iterations. size_t perceptronIter = 800; Row perceptronPrediction(labels.n_cols); Perceptron<> p(inputData, labels.row(0), max(labels.row(0)) + 1, perceptronIter); p.Classify(inputData, perceptronPrediction); size_t countWeakLearnerError = 0; for (size_t i = 0; i < labels.n_cols; i++) if (labels(i) != perceptronPrediction(i)) countWeakLearnerError++; double weakLearnerErrorRate = (double) countWeakLearnerError / labels.n_cols; // Define parameters for AdaBoost. size_t iterations = 50; double tolerance = 1e-10; AdaBoost<> a(inputData, labels.row(0), p, iterations, tolerance); arma::Row predictedLabels; a.Classify(inputData, predictedLabels); size_t countError = 0; for (size_t i = 0; i < labels.n_cols; i++) if (labels(i) != predictedLabels(i)) countError++; double error = (double) countError / labels.n_cols; BOOST_REQUIRE_LE(error, weakLearnerErrorRate); } /** * This test case runs the AdaBoost.mh algorithm on non-linearly separable * dataset. It checks whether the hamming loss breaches the upperbound, which * is provided by ztAccumulator. */ BOOST_AUTO_TEST_CASE(HammingLossBoundNonLinearSepData) { arma::mat inputData; if (!data::Load("train_nonlinsep.txt", inputData)) BOOST_FAIL("Cannot load test dataset train_nonlinsep.txt!"); arma::Mat labels; if (!data::Load("train_labels_nonlinsep.txt",labels)) BOOST_FAIL("Cannot load labels for train_labels_nonlinsep.txt"); // Define your own weak learner, perceptron in this case. // Run the perceptron for perceptronIter iterations. size_t perceptronIter = 800; Perceptron<> p(inputData, labels.row(0), max(labels.row(0)) + 1, perceptronIter); // Define parameters for AdaBoost. size_t iterations = 50; double tolerance = 1e-10; AdaBoost<> a(inputData, labels.row(0), p, iterations, tolerance); arma::Row predictedLabels; a.Classify(inputData, predictedLabels); size_t countError = 0; for (size_t i = 0; i < labels.n_cols; i++) if (labels(i) != predictedLabels(i)) countError++; double hammingLoss = (double) countError / labels.n_cols; BOOST_REQUIRE_LE(hammingLoss, a.ZtProduct()); } /** * This test case runs the AdaBoost.mh algorithm on a non-linearly separable * dataset. It checks if the error returned by running a single instance of the * weak learner is worse than running the boosted weak learner using AdaBoost. */ BOOST_AUTO_TEST_CASE(WeakLearnerErrorNonLinearSepData) { arma::mat inputData; if (!data::Load("train_nonlinsep.txt", inputData)) BOOST_FAIL("Cannot load test dataset train_nonlinsep.txt!"); arma::Mat labels; if (!data::Load("train_labels_nonlinsep.txt",labels)) BOOST_FAIL("Cannot load labels for train_labels_nonlinsep.txt"); // Define your own weak learner, perceptron in this case. // Run the perceptron for perceptronIter iterations. size_t perceptronIter = 800; Row perceptronPrediction(labels.n_cols); Perceptron<> p(inputData, labels.row(0), max(labels.row(0)) + 1, perceptronIter); p.Classify(inputData, perceptronPrediction); size_t countWeakLearnerError = 0; for (size_t i = 0; i < labels.n_cols; i++) if (labels(i) != perceptronPrediction(i)) countWeakLearnerError++; double weakLearnerErrorRate = (double) countWeakLearnerError / labels.n_cols; // Define parameters for AdaBoost. size_t iterations = 50; double tolerance = 1e-10; AdaBoost<> a(inputData, labels.row(0), p, iterations, tolerance); arma::Row predictedLabels; a.Classify(inputData, predictedLabels); size_t countError = 0; for (size_t i = 0; i < labels.n_cols; i++) if (labels(i) != predictedLabels(i)) countError++; double error = (double) countError / labels.n_cols; BOOST_REQUIRE_LE(error, weakLearnerErrorRate); } /** * This test case runs the AdaBoost.mh algorithm on the UCI Iris dataset. It * checks whether the Hamming loss breaches the upper bound, which is provided * by ztAccumulator. This uses decision stumps as the weak learner. */ BOOST_AUTO_TEST_CASE(HammingLossIris_DS) { arma::mat inputData; if (!data::Load("iris.csv", inputData)) BOOST_FAIL("Cannot load test dataset iris.csv!"); arma::Mat labels; if (!data::Load("iris_labels.txt",labels)) BOOST_FAIL("Cannot load labels for iris_labels.txt"); // Define your own weak learner, decision stumps in this case. const size_t numClasses = 3; const size_t inpBucketSize = 6; DecisionStump<> ds(inputData, labels.row(0), numClasses, inpBucketSize); // Define parameters for AdaBoost. size_t iterations = 50; double tolerance = 1e-10; AdaBoost> a(inputData, labels.row(0), ds, iterations, tolerance); arma::Row predictedLabels; a.Classify(inputData, predictedLabels); size_t countError = 0; for (size_t i = 0; i < labels.n_cols; i++) if (labels(i) != predictedLabels(i)) countError++; double hammingLoss = (double) countError / labels.n_cols; BOOST_REQUIRE_LE(hammingLoss, a.ZtProduct()); } /** * This test case runs the AdaBoost.mh algorithm on a non-linearly separable * dataset. It checks if the error returned by running a single instance of the * weak learner is worse than running the boosted weak learner using adaboost. * This is for the weak learner: decision stumps. */ BOOST_AUTO_TEST_CASE(WeakLearnerErrorIris_DS) { arma::mat inputData; if (!data::Load("iris.csv", inputData)) BOOST_FAIL("Cannot load test dataset iris.csv!"); arma::Mat labels; if (!data::Load("iris_labels.txt", labels)) BOOST_FAIL("Cannot load labels for iris_labels.txt"); // no need to map the labels here // Define your own weak learner, decision stumps in this case. const size_t numClasses = 3; const size_t inpBucketSize = 6; arma::Row dsPrediction(labels.n_cols); DecisionStump<> ds(inputData, labels.row(0), numClasses, inpBucketSize); ds.Classify(inputData, dsPrediction); size_t countWeakLearnerError = 0; for (size_t i = 0; i < labels.n_cols; i++) if (labels(i) != dsPrediction(i)) countWeakLearnerError++; double weakLearnerErrorRate = (double) countWeakLearnerError / labels.n_cols; // Define parameters for AdaBoost. size_t iterations = 50; double tolerance = 1e-10; AdaBoost> a(inputData, labels.row(0), ds, iterations, tolerance); arma::Row predictedLabels; a.Classify(inputData, predictedLabels); size_t countError = 0; for (size_t i = 0; i < labels.n_cols; i++) if (labels(i) != predictedLabels(i)) countError++; double error = (double) countError / labels.n_cols; BOOST_REQUIRE_LE(error, weakLearnerErrorRate); } /** * This test case runs the AdaBoost.mh algorithm on the UCI Vertebral Column * dataset. It checks if the error returned by running a single instance of the * weak learner is worse than running the boosted weak learner using adaboost. * This is for the weak learner: decision stumps. */ BOOST_AUTO_TEST_CASE(HammingLossBoundVertebralColumn_DS) { arma::mat inputData; if (!data::Load("vc2.csv", inputData)) BOOST_FAIL("Cannot load test dataset vc2.csv!"); arma::Mat labels; if (!data::Load("vc2_labels.txt",labels)) BOOST_FAIL("Cannot load labels for vc2_labels.txt"); // Define your own weak learner, decision stumps in this case. const size_t numClasses = 3; const size_t inpBucketSize = 6; DecisionStump<> ds(inputData, labels.row(0), numClasses, inpBucketSize); // Define parameters for AdaBoost. size_t iterations = 50; double tolerance = 1e-10; AdaBoost> a(inputData, labels.row(0), ds, iterations, tolerance); arma::Row predictedLabels; a.Classify(inputData, predictedLabels); size_t countError = 0; for (size_t i = 0; i < labels.n_cols; i++) if (labels(i) != predictedLabels(i)) countError++; double hammingLoss = (double) countError / labels.n_cols; BOOST_REQUIRE_LE(hammingLoss, a.ZtProduct()); } /** * This test case runs the AdaBoost.mh algorithm on the UCI Vertebral Column * dataset. It checks if the error returned by running a single instance of the * weak learner is worse than running the boosted weak learner using adaboost. * This is for the weak learner: decision stumps. */ BOOST_AUTO_TEST_CASE(WeakLearnerErrorVertebralColumn_DS) { arma::mat inputData; if (!data::Load("vc2.csv", inputData)) BOOST_FAIL("Cannot load test dataset vc2.csv!"); arma::Mat labels; if (!data::Load("vc2_labels.txt", labels)) BOOST_FAIL("Cannot load labels for vc2_labels.txt"); // Define your own weak learner, decision stumps in this case. const size_t numClasses = 3; const size_t inpBucketSize = 6; arma::Row dsPrediction(labels.n_cols); DecisionStump<> ds(inputData, labels.row(0), numClasses, inpBucketSize); ds.Classify(inputData, dsPrediction); size_t countWeakLearnerError = 0; for (size_t i = 0; i < labels.n_cols; i++) if (labels(i) != dsPrediction(i)) countWeakLearnerError++; double weakLearnerErrorRate = (double) countWeakLearnerError / labels.n_cols; // Define parameters for AdaBoost. size_t iterations = 50; double tolerance = 1e-10; AdaBoost> a(inputData, labels.row(0), ds, iterations, tolerance); arma::Row predictedLabels; a.Classify(inputData, predictedLabels); size_t countError = 0; for (size_t i = 0; i < labels.n_cols; i++) if (labels(i) != predictedLabels(i)) countError++; double error = (double) countError / labels.n_cols; BOOST_REQUIRE_LE(error, weakLearnerErrorRate); } /** * This test case runs the AdaBoost.mh algorithm on non-linearly separable * dataset. It checks whether the hamming loss breaches the upperbound, which * is provided by ztAccumulator. This is for the weak learner: decision stumps. */ BOOST_AUTO_TEST_CASE(HammingLossBoundNonLinearSepData_DS) { arma::mat inputData; if (!data::Load("train_nonlinsep.txt", inputData)) BOOST_FAIL("Cannot load test dataset train_nonlinsep.txt!"); arma::Mat labels; if (!data::Load("train_labels_nonlinsep.txt",labels)) BOOST_FAIL("Cannot load labels for train_labels_nonlinsep.txt"); // Define your own weak learner, decision stumps in this case. const size_t numClasses = 2; const size_t inpBucketSize = 6; DecisionStump<> ds(inputData, labels.row(0), numClasses, inpBucketSize); // Define parameters for Adaboost. size_t iterations = 50; double tolerance = 1e-10; AdaBoost > a(inputData, labels.row(0), ds, iterations, tolerance); arma::Row predictedLabels; a.Classify(inputData, predictedLabels); size_t countError = 0; for (size_t i = 0; i < labels.n_cols; i++) if (labels(i) != predictedLabels(i)) countError++; double hammingLoss = (double) countError / labels.n_cols; BOOST_REQUIRE_LE(hammingLoss, a.ZtProduct()); } /** * This test case runs the AdaBoost.mh algorithm on a non-linearly separable * dataset. It checks if the error returned by running a single instance of the * weak learner is worse than running the boosted weak learner using adaboost. * This for the weak learner: decision stumps. */ BOOST_AUTO_TEST_CASE(WeakLearnerErrorNonLinearSepData_DS) { arma::mat inputData; if (!data::Load("train_nonlinsep.txt", inputData)) BOOST_FAIL("Cannot load test dataset train_nonlinsep.txt!"); arma::Mat labels; if (!data::Load("train_labels_nonlinsep.txt",labels)) BOOST_FAIL("Cannot load labels for train_labels_nonlinsep.txt"); // Define your own weak learner, decision stumps in this case. const size_t numClasses = 2; const size_t inpBucketSize = 3; arma::Row dsPrediction(labels.n_cols); DecisionStump<> ds(inputData, labels.row(0), numClasses, inpBucketSize); ds.Classify(inputData, dsPrediction); size_t countWeakLearnerError = 0; for (size_t i = 0; i < labels.n_cols; i++) if (labels(i) != dsPrediction(i)) countWeakLearnerError++; double weakLearnerErrorRate = (double) countWeakLearnerError / labels.n_cols; // Define parameters for AdaBoost. size_t iterations = 500; double tolerance = 1e-23; AdaBoost > a(inputData, labels.row(0), ds, iterations, tolerance); arma::Row predictedLabels; a.Classify(inputData, predictedLabels); size_t countError = 0; for (size_t i = 0; i < labels.n_cols; i++) if (labels(i) != predictedLabels(i)) countError++; double error = (double) countError / labels.n_cols; BOOST_REQUIRE_LE(error, weakLearnerErrorRate); } /** * This test case runs the AdaBoost.mh algorithm on the UCI Vertebral Column * dataset. It tests the Classify function and checks for a satisfactory error * rate. */ BOOST_AUTO_TEST_CASE(ClassifyTest_VERTEBRALCOL) { arma::mat inputData; if (!data::Load("vc2.csv", inputData)) BOOST_FAIL("Cannot load test dataset vc2.csv!"); arma::Mat labels; if (!data::Load("vc2_labels.txt",labels)) BOOST_FAIL("Cannot load labels for vc2_labels.txt"); // Define your own weak learner, perceptron in this case. // Run the perceptron for perceptronIter iterations. size_t perceptronIter = 1000; arma::mat testData; if (!data::Load("vc2_test.csv", testData)) BOOST_FAIL("Cannot load test dataset vc2_test.csv!"); arma::Mat trueTestLabels; if (!data::Load("vc2_test_labels.txt",trueTestLabels)) BOOST_FAIL("Cannot load labels for vc2_test_labels.txt"); Row perceptronPrediction(labels.n_cols); Perceptron<> p(inputData, labels.row(0), max(labels.row(0)) + 1, perceptronIter); p.Classify(inputData, perceptronPrediction); // Define parameters for AdaBoost. size_t iterations = 100; double tolerance = 1e-10; AdaBoost<> a(inputData, labels.row(0), p, iterations, tolerance); arma::Row predictedLabels(testData.n_cols); a.Classify(testData, predictedLabels); size_t localError = 0; for (size_t i = 0; i < trueTestLabels.n_cols; i++) if (trueTestLabels(i) != predictedLabels(i)) localError++; double lError = (double) localError / trueTestLabels.n_cols; BOOST_REQUIRE_LE(lError, 0.30); } /** * This test case runs the AdaBoost.mh algorithm on a non linearly separable * dataset. It tests the Classify function and checks for a satisfactory error * rate. */ BOOST_AUTO_TEST_CASE(ClassifyTest_NONLINSEP) { arma::mat inputData; if (!data::Load("train_nonlinsep.txt", inputData)) BOOST_FAIL("Cannot load test dataset train_nonlinsep.txt!"); arma::Mat labels; if (!data::Load("train_labels_nonlinsep.txt", labels)) BOOST_FAIL("Cannot load labels for train_labels_nonlinsep.txt"); // Define your own weak learner; in this test decision stumps are used. const size_t numClasses = 2; const size_t inpBucketSize = 3; arma::mat testData; if (!data::Load("test_nonlinsep.txt", testData)) BOOST_FAIL("Cannot load test dataset test_nonlinsep.txt!"); arma::Mat trueTestLabels; if (!data::Load("test_labels_nonlinsep.txt", trueTestLabels)) BOOST_FAIL("Cannot load labels for test_labels_nonlinsep.txt"); arma::Row dsPrediction(labels.n_cols); DecisionStump<> ds(inputData, labels.row(0), numClasses, inpBucketSize); // Define parameters for AdaBoost. size_t iterations = 50; double tolerance = 1e-10; AdaBoost > a(inputData, labels.row(0), ds, iterations, tolerance); arma::Row predictedLabels(testData.n_cols); a.Classify(testData, predictedLabels); size_t localError = 0; for (size_t i = 0; i < trueTestLabels.n_cols; i++) if (trueTestLabels(i) != predictedLabels(i)) localError++; double lError = (double) localError / trueTestLabels.n_cols; BOOST_REQUIRE_LE(lError, 0.30); } /** * This test case runs the AdaBoost.mh algorithm on the UCI Iris Dataset. It * trains it on two thirds of the Iris dataset (iris_train.csv), and tests on * the remaining third of the dataset (iris_test.csv). It tests the Classify() * function and checks for a satisfactory error rate. */ BOOST_AUTO_TEST_CASE(ClassifyTest_IRIS) { arma::mat inputData; if (!data::Load("iris_train.csv", inputData)) BOOST_FAIL("Cannot load test dataset iris_train.csv!"); arma::Mat labels; if (!data::Load("iris_train_labels.csv", labels)) BOOST_FAIL("Cannot load labels for iris_train_labels.csv"); // Define your own weak learner, perceptron in this case. // Run the perceptron for perceptronIter iterations. size_t perceptronIter = 800; Perceptron<> p(inputData, labels.row(0), max(labels.row(0)) + 1, perceptronIter); // Define parameters for AdaBoost. size_t iterations = 50; double tolerance = 1e-10; AdaBoost<> a(inputData, labels.row(0), p, iterations, tolerance); arma::mat testData; if (!data::Load("iris_test.csv", testData)) BOOST_FAIL("Cannot load test dataset iris_test.csv!"); arma::Row predictedLabels(testData.n_cols); a.Classify(testData, predictedLabels); arma::Mat trueTestLabels; if (!data::Load("iris_test_labels.csv", trueTestLabels)) BOOST_FAIL("Cannot load test dataset iris_test_labels.csv!"); size_t localError = 0; for (size_t i = 0; i < trueTestLabels.n_cols; i++) if (trueTestLabels(i) != predictedLabels(i)) localError++; double lError = (double) localError / labels.n_cols; BOOST_REQUIRE_LE(lError, 0.30); } /** * Ensure that the Train() function works like it is supposed to, by building * AdaBoost on one dataset and then re-training on another dataset. */ BOOST_AUTO_TEST_CASE(TrainTest) { // First train on the iris dataset. arma::mat inputData; if (!data::Load("iris_train.csv", inputData)) BOOST_FAIL("Cannot load test dataset iris_train.csv!"); arma::Mat labels; if (!data::Load("iris_train_labels.csv", labels)) BOOST_FAIL("Cannot load labels for iris_train_labels.csv"); size_t perceptronIter = 800; Perceptron<> p(inputData, labels.row(0), max(labels.row(0)) + 1, perceptronIter); // Now train AdaBoost. size_t iterations = 50; double tolerance = 1e-10; AdaBoost<> a(inputData, labels.row(0), p, iterations, tolerance); // Now load another dataset... if (!data::Load("vc2.csv", inputData)) BOOST_FAIL("Cannot load test dataset vc2.csv!"); if (!data::Load("vc2_labels.txt", labels)) BOOST_FAIL("Cannot load labels for vc2_labels.txt"); Perceptron<> p2(inputData, labels.row(0), max(labels.row(0)) + 1, perceptronIter); a.Train(inputData, labels.row(0), p2, iterations, tolerance); // Load test set to see if it trained on vc2 correctly. arma::mat testData; if (!data::Load("vc2_test.csv", testData)) BOOST_FAIL("Cannot load test dataset vc2_test.csv!"); arma::Mat trueTestLabels; if (!data::Load("vc2_test_labels.txt", trueTestLabels)) BOOST_FAIL("Cannot load labels for vc2_test_labels.txt"); // Define parameters for AdaBoost. arma::Row predictedLabels(testData.n_cols); a.Classify(testData, predictedLabels); int localError = 0; for (size_t i = 0; i < trueTestLabels.n_cols; i++) if (trueTestLabels(i) != predictedLabels(i)) localError++; double lError = (double) localError / trueTestLabels.n_cols; BOOST_REQUIRE_LE(lError, 0.30); } BOOST_AUTO_TEST_CASE(PerceptronSerializationTest) { // Build an AdaBoost object. mat data = randu(10, 500); Row labels(500); for (size_t i = 0; i < 250; ++i) labels[i] = 0; for (size_t i = 250; i < 500; ++i) labels[i] = 1; Perceptron<> p(data, labels, 2, 800); AdaBoost<> ab(data, labels, p, 50, 1e-10); // Now create another dataset to train with. mat otherData = randu(5, 200); Row otherLabels(200); for (size_t i = 0; i < 100; ++i) otherLabels[i] = 1; for (size_t i = 100; i < 150; ++i) otherLabels[i] = 0; for (size_t i = 150; i < 200; ++i) otherLabels[i] = 2; Perceptron<> p2(otherData, otherLabels, 3, 500); AdaBoost<> abText(otherData, otherLabels, p2, 50, 1e-10); AdaBoost<> abXml, abBinary; SerializeObjectAll(ab, abXml, abText, abBinary); // Now check that the objects are the same. BOOST_REQUIRE_CLOSE(ab.Tolerance(), abXml.Tolerance(), 1e-5); BOOST_REQUIRE_CLOSE(ab.Tolerance(), abText.Tolerance(), 1e-5); BOOST_REQUIRE_CLOSE(ab.Tolerance(), abBinary.Tolerance(), 1e-5); BOOST_REQUIRE_CLOSE(ab.ZtProduct(), abXml.ZtProduct(), 1e-5); BOOST_REQUIRE_CLOSE(ab.ZtProduct(), abText.ZtProduct(), 1e-5); BOOST_REQUIRE_CLOSE(ab.ZtProduct(), abBinary.ZtProduct(), 1e-5); BOOST_REQUIRE_EQUAL(ab.WeakLearners(), abXml.WeakLearners()); BOOST_REQUIRE_EQUAL(ab.WeakLearners(), abText.WeakLearners()); BOOST_REQUIRE_EQUAL(ab.WeakLearners(), abBinary.WeakLearners()); for (size_t i = 0; i < ab.WeakLearners(); ++i) { CheckMatrices(ab.WeakLearner(i).Weights(), abXml.WeakLearner(i).Weights(), abText.WeakLearner(i).Weights(), abBinary.WeakLearner(i).Weights()); CheckMatrices(ab.WeakLearner(i).Biases(), abXml.WeakLearner(i).Biases(), abText.WeakLearner(i).Biases(), abBinary.WeakLearner(i).Biases()); } } BOOST_AUTO_TEST_CASE(DecisionStumpSerializationTest) { // Build an AdaBoost object. mat data = randu(10, 500); Row labels(500); for (size_t i = 0; i < 250; ++i) labels[i] = 0; for (size_t i = 250; i < 500; ++i) labels[i] = 1; DecisionStump<> p(data, labels, 2, 800); AdaBoost> ab(data, labels, p, 50, 1e-10); // Now create another dataset to train with. mat otherData = randu(5, 200); Row otherLabels(200); for (size_t i = 0; i < 100; ++i) otherLabels[i] = 1; for (size_t i = 100; i < 150; ++i) otherLabels[i] = 0; for (size_t i = 150; i < 200; ++i) otherLabels[i] = 2; DecisionStump<> p2(otherData, otherLabels, 3, 500); AdaBoost> abText(otherData, otherLabels, p2, 50, 1e-10); AdaBoost> abXml, abBinary; SerializeObjectAll(ab, abXml, abText, abBinary); // Now check that the objects are the same. BOOST_REQUIRE_CLOSE(ab.Tolerance(), abXml.Tolerance(), 1e-5); BOOST_REQUIRE_CLOSE(ab.Tolerance(), abText.Tolerance(), 1e-5); BOOST_REQUIRE_CLOSE(ab.Tolerance(), abBinary.Tolerance(), 1e-5); BOOST_REQUIRE_CLOSE(ab.ZtProduct(), abXml.ZtProduct(), 1e-5); BOOST_REQUIRE_CLOSE(ab.ZtProduct(), abText.ZtProduct(), 1e-5); BOOST_REQUIRE_CLOSE(ab.ZtProduct(), abBinary.ZtProduct(), 1e-5); BOOST_REQUIRE_EQUAL(ab.WeakLearners(), abXml.WeakLearners()); BOOST_REQUIRE_EQUAL(ab.WeakLearners(), abText.WeakLearners()); BOOST_REQUIRE_EQUAL(ab.WeakLearners(), abBinary.WeakLearners()); for (size_t i = 0; i < ab.WeakLearners(); ++i) { BOOST_REQUIRE_EQUAL(ab.WeakLearner(i).SplitDimension(), abXml.WeakLearner(i).SplitDimension()); BOOST_REQUIRE_EQUAL(ab.WeakLearner(i).SplitDimension(), abText.WeakLearner(i).SplitDimension()); BOOST_REQUIRE_EQUAL(ab.WeakLearner(i).SplitDimension(), abBinary.WeakLearner(i).SplitDimension()); CheckMatrices(ab.WeakLearner(i).Split(), abXml.WeakLearner(i).Split(), abText.WeakLearner(i).Split(), abBinary.WeakLearner(i).Split()); CheckMatrices(ab.WeakLearner(i).BinLabels(), abXml.WeakLearner(i).BinLabels(), abText.WeakLearner(i).BinLabels(), abBinary.WeakLearner(i).BinLabels()); } } BOOST_AUTO_TEST_SUITE_END(); mlpack-2.2.5/src/mlpack/tests/akfn_test.cpp000066400000000000000000000167151315013601400206550ustar00rootroot00000000000000/** * @file akfn_test.cpp * * Tests for KFN (k-furthest-neighbors) with different values of epsilon. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include #include #include #include #include "test_tools.hpp" using namespace mlpack; using namespace mlpack::neighbor; using namespace mlpack::tree; using namespace mlpack::metric; using namespace mlpack::bound; BOOST_AUTO_TEST_SUITE(AKFNTest); /** * Test the dual-tree furthest-neighbors method with different values for * epsilon. This uses both a query and reference dataset. * * Errors are produced if the results are not according to relative error. */ BOOST_AUTO_TEST_CASE(ApproxVsExact1) { arma::mat dataset; if (!data::Load("test_data_3_1000.csv", dataset)) BOOST_FAIL("Cannot load test dataset test_data_3_1000.csv!"); KFN exact(dataset); arma::Mat neighborsExact; arma::mat distancesExact; exact.Search(dataset, 15, neighborsExact, distancesExact); for (size_t c = 0; c < 4; c++) { KFN* akfn; double epsilon; switch (c) { case 0: // Use the dual-tree method with e=0.02. epsilon = 0.02; break; case 1: // Use the dual-tree method with e=0.05. epsilon = 0.05; break; case 2: // Use the dual-tree method with e=0.10. epsilon = 0.10; break; case 3: // Use the dual-tree method with e=0.20. epsilon = 0.20; break; } // Now perform the actual calculation. akfn = new KFN(dataset, DUAL_TREE_MODE, epsilon); arma::Mat neighborsApprox; arma::mat distancesApprox; akfn->Search(dataset, 15, neighborsApprox, distancesApprox); for (size_t i = 0; i < neighborsApprox.n_elem; i++) REQUIRE_RELATIVE_ERR(distancesApprox(i), distancesExact(i), epsilon); // Clean the memory. delete akfn; } } /** * Test the dual-tree furthest-neighbors method with the exact method. This * uses only a reference dataset. * * Errors are produced if the results are not according to relative error. */ BOOST_AUTO_TEST_CASE(ApproxVsExact2) { arma::mat dataset; if (!data::Load("test_data_3_1000.csv", dataset)) BOOST_FAIL("Cannot load test dataset test_data_3_1000.csv!"); KFN exact(dataset); arma::Mat neighborsExact; arma::mat distancesExact; exact.Search(15, neighborsExact, distancesExact); KFN akfn(dataset, DUAL_TREE_MODE, 0.05); arma::Mat neighborsApprox; arma::mat distancesApprox; akfn.Search(15, neighborsApprox, distancesApprox); for (size_t i = 0; i < neighborsApprox.n_elem; i++) REQUIRE_RELATIVE_ERR(distancesApprox[i], distancesExact[i], 0.05); } /** * Test the single-tree furthest-neighbors method with the exact method. This * uses only a reference dataset. * * Errors are produced if the results are not according to relative error. */ BOOST_AUTO_TEST_CASE(SingleTreeVsExact) { arma::mat dataset; if (!data::Load("test_data_3_1000.csv", dataset)) BOOST_FAIL("Cannot load test dataset test_data_3_1000.csv!"); KFN exact(dataset); arma::Mat neighborsExact; arma::mat distancesExact; exact.Search(15, neighborsExact, distancesExact); KFN akfn(dataset, SINGLE_TREE_MODE, 0.05); arma::Mat neighborsApprox; arma::mat distancesApprox; akfn.Search(15, neighborsApprox, distancesApprox); for (size_t i = 0; i < neighborsApprox.n_elem; i++) REQUIRE_RELATIVE_ERR(distancesApprox[i], distancesExact[i], 0.05); } /** * Test the cover tree single-tree furthest-neighbors method against the exact * method. This uses only a random reference dataset. * * Errors are produced if the results are not according to relative error. */ BOOST_AUTO_TEST_CASE(SingleCoverTreeTest) { arma::mat dataset; dataset.randu(75, 1000); // 75 dimensional, 1000 points. KFN exact(dataset); arma::Mat neighborsExact; arma::mat distancesExact; exact.Search(dataset, 15, neighborsExact, distancesExact); StandardCoverTree, arma::mat> tree(dataset); NeighborSearch, arma::mat, StandardCoverTree> coverTreeSearch(std::move(tree), SINGLE_TREE_MODE, 0.05); arma::Mat neighborsCoverTree; arma::mat distancesCoverTree; coverTreeSearch.Search(dataset, 15, neighborsCoverTree, distancesCoverTree); for (size_t i = 0; i < neighborsCoverTree.n_elem; ++i) REQUIRE_RELATIVE_ERR(distancesCoverTree[i], distancesExact[i], 0.05); } /** * Test the cover tree dual-tree furthest neighbors method against the exact * method. * * Errors are produced if the results are not according to relative error. */ BOOST_AUTO_TEST_CASE(DualCoverTreeTest) { arma::mat dataset; data::Load("test_data_3_1000.csv", dataset); KFN exact(dataset); arma::Mat neighborsExact; arma::mat distancesExact; exact.Search(dataset, 15, neighborsExact, distancesExact); StandardCoverTree, arma::mat> referenceTree(dataset); NeighborSearch, arma::mat, StandardCoverTree> coverTreeSearch(std::move(referenceTree), DUAL_TREE_MODE, 0.05); arma::Mat neighborsCoverTree; arma::mat distancesCoverTree; coverTreeSearch.Search(dataset, 15, neighborsCoverTree, distancesCoverTree); for (size_t i = 0; i < neighborsCoverTree.n_elem; ++i) REQUIRE_RELATIVE_ERR(distancesCoverTree[i], distancesExact[i], 0.05); } /** * Test the ball tree single-tree furthest-neighbors method against the exact * method. This uses only a random reference dataset. * * Errors are produced if the results are not according to relative error. */ BOOST_AUTO_TEST_CASE(SingleBallTreeTest) { arma::mat dataset; dataset.randu(75, 1000); // 75 dimensional, 1000 points. KFN exact(dataset); arma::Mat neighborsExact; arma::mat distancesExact; exact.Search(dataset, 15, neighborsExact, distancesExact); NeighborSearch ballTreeSearch(dataset, SINGLE_TREE_MODE, 0.05); arma::Mat neighborsBallTree; arma::mat distancesBallTree; ballTreeSearch.Search(dataset, 15, neighborsBallTree, distancesBallTree); for (size_t i = 0; i < neighborsBallTree.n_elem; ++i) REQUIRE_RELATIVE_ERR(distancesBallTree(i), distancesExact(i), 0.05); } /** * Test the ball tree dual-tree furthest neighbors method against the exact * method. * * Errors are produced if the results are not according to relative error. */ BOOST_AUTO_TEST_CASE(DualBallTreeTest) { arma::mat dataset; data::Load("test_data_3_1000.csv", dataset); KFN exact(dataset); arma::Mat neighborsExact; arma::mat distancesExact; exact.Search(15, neighborsExact, distancesExact); NeighborSearch ballTreeSearch(dataset, DUAL_TREE_MODE, 0.05); arma::Mat neighborsBallTree; arma::mat distancesBallTree; ballTreeSearch.Search(15, neighborsBallTree, distancesBallTree); for (size_t i = 0; i < neighborsBallTree.n_elem; ++i) REQUIRE_RELATIVE_ERR(distancesBallTree(i), distancesExact(i), 0.05); } BOOST_AUTO_TEST_SUITE_END(); mlpack-2.2.5/src/mlpack/tests/aknn_test.cpp000066400000000000000000000415761315013601400206700ustar00rootroot00000000000000/** * @file aknn_test.cpp * * Test file for KNN class with different values of epsilon. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include #include #include #include #include #include #include #include "test_tools.hpp" using namespace mlpack; using namespace mlpack::neighbor; using namespace mlpack::tree; using namespace mlpack::metric; using namespace mlpack::bound; BOOST_AUTO_TEST_SUITE(AKNNTest); /** * Test the dual-tree nearest-neighbors method with different values for * epsilon. This uses both a query and reference dataset. * * Errors are produced if the results are not according to relative error. */ BOOST_AUTO_TEST_CASE(ApproxVsExact1) { arma::mat dataset; if (!data::Load("test_data_3_1000.csv", dataset)) BOOST_FAIL("Cannot load test dataset test_data_3_1000.csv!"); KNN exact(dataset); arma::Mat neighborsExact; arma::mat distancesExact; exact.Search(dataset, 15, neighborsExact, distancesExact); for (size_t c = 0; c < 4; c++) { KNN* aknn; double epsilon; switch (c) { case 0: // Use the dual-tree method with e=0.02. epsilon = 0.02; break; case 1: // Use the dual-tree method with e=0.05. epsilon = 0.05; break; case 2: // Use the dual-tree method with e=0.10. epsilon = 0.10; break; case 3: // Use the dual-tree method with e=0.20. epsilon = 0.20; break; } // Now perform the actual calculation. aknn = new KNN(dataset, DUAL_TREE_MODE, epsilon); arma::Mat neighborsApprox; arma::mat distancesApprox; aknn->Search(dataset, 15, neighborsApprox, distancesApprox); for (size_t i = 0; i < neighborsApprox.n_elem; i++) REQUIRE_RELATIVE_ERR(distancesApprox(i), distancesExact(i), epsilon); // Clean the memory. delete aknn; } } /** * Test the dual-tree nearest-neighbors method with the exact method. This uses * only a reference dataset. * * Errors are produced if the results are not according to relative error. */ BOOST_AUTO_TEST_CASE(ApproxVsExact2) { arma::mat dataset; if (!data::Load("test_data_3_1000.csv", dataset)) BOOST_FAIL("Cannot load test dataset test_data_3_1000.csv!"); KNN exact(dataset); arma::Mat neighborsExact; arma::mat distancesExact; exact.Search(15, neighborsExact, distancesExact); KNN aknn(dataset, DUAL_TREE_MODE, 0.05); arma::Mat neighborsApprox; arma::mat distancesApprox; aknn.Search(15, neighborsApprox, distancesApprox); for (size_t i = 0; i < neighborsApprox.n_elem; i++) REQUIRE_RELATIVE_ERR(distancesApprox(i), distancesExact(i), 0.05); } /** * Test the single-tree nearest-neighbors method with the exact method. This * uses only a reference dataset. * * Errors are produced if the results are not according to relative error. */ BOOST_AUTO_TEST_CASE(SingleTreeApproxVsExact) { arma::mat dataset; if (!data::Load("test_data_3_1000.csv", dataset)) BOOST_FAIL("Cannot load test dataset test_data_3_1000.csv!"); KNN exact(dataset); arma::Mat neighborsExact; arma::mat distancesExact; exact.Search(15, neighborsExact, distancesExact); KNN aknn(dataset, SINGLE_TREE_MODE, 0.05); arma::Mat neighborsApprox; arma::mat distancesApprox; aknn.Search(15, neighborsApprox, distancesApprox); for (size_t i = 0; i < neighborsApprox.n_elem; i++) REQUIRE_RELATIVE_ERR(distancesApprox[i], distancesExact[i], 0.05); } /** * Test the cover tree single-tree nearest-neighbors method against the exact * method. This uses only a random reference dataset. * * Errors are produced if the results are not according to relative error. */ BOOST_AUTO_TEST_CASE(SingleCoverTreeTest) { arma::mat dataset; dataset.randu(75, 1000); // 75 dimensional, 1000 points. KNN exact(dataset); arma::Mat neighborsExact; arma::mat distancesExact; exact.Search(dataset, 15, neighborsExact, distancesExact); StandardCoverTree, arma::mat> tree(dataset); NeighborSearch, arma::mat, StandardCoverTree> coverTreeSearch(std::move(tree), SINGLE_TREE_MODE, 0.05); arma::Mat neighborsCoverTree; arma::mat distancesCoverTree; coverTreeSearch.Search(dataset, 15, neighborsCoverTree, distancesCoverTree); for (size_t i = 0; i < neighborsCoverTree.n_elem; ++i) REQUIRE_RELATIVE_ERR(distancesCoverTree[i], distancesExact[i], 0.05); } /** * Test the cover tree dual-tree nearest neighbors method against the exact * method. * * Errors are produced if the results are not according to relative error. */ BOOST_AUTO_TEST_CASE(DualCoverTreeTest) { arma::mat dataset; data::Load("test_data_3_1000.csv", dataset); KNN exact(dataset); arma::Mat neighborsExact; arma::mat distancesExact; exact.Search(dataset, 15, neighborsExact, distancesExact); NeighborSearch coverTreeSearch(dataset, DUAL_TREE_MODE, 0.05); arma::Mat neighborsCoverTree; arma::mat distancesCoverTree; coverTreeSearch.Search(dataset, 15, neighborsCoverTree, distancesCoverTree); for (size_t i = 0; i < neighborsCoverTree.n_elem; ++i) REQUIRE_RELATIVE_ERR(distancesCoverTree[i], distancesExact[i], 0.05); } /** * Test the ball tree single-tree nearest-neighbors method against the exact * method. This uses only a random reference dataset. * * Errors are produced if the results are not according to relative error. */ BOOST_AUTO_TEST_CASE(SingleBallTreeTest) { arma::mat dataset; dataset.randu(50, 300); // 50 dimensional, 300 points. KNN exact(dataset); arma::Mat neighborsExact; arma::mat distancesExact; exact.Search(dataset, 15, neighborsExact, distancesExact); NeighborSearch ballTreeSearch(dataset, SINGLE_TREE_MODE, 0.05); arma::Mat neighborsBallTree; arma::mat distancesBallTree; ballTreeSearch.Search(dataset, 15, neighborsBallTree, distancesBallTree); for (size_t i = 0; i < neighborsBallTree.n_elem; ++i) REQUIRE_RELATIVE_ERR(distancesBallTree(i), distancesExact(i), 0.05); } /** * Test the ball tree dual-tree nearest neighbors method against the exact * method. * * Errors are produced if the results are not according to relative error. */ BOOST_AUTO_TEST_CASE(DualBallTreeTest) { arma::mat dataset; data::Load("test_data_3_1000.csv", dataset); KNN exact(dataset); arma::Mat neighborsExact; arma::mat distancesExact; exact.Search(15, neighborsExact, distancesExact); NeighborSearch ballTreeSearch(dataset, DUAL_TREE_MODE, 0.05); arma::Mat neighborsBallTree; arma::mat distancesBallTree; ballTreeSearch.Search(15, neighborsBallTree, distancesBallTree); for (size_t i = 0; i < neighborsBallTree.n_elem; ++i) REQUIRE_RELATIVE_ERR(distancesBallTree(i), distancesExact(i), 0.05); } /** * Test the spill tree hybrid sp-tree search (defeatist search on overlapping * nodes, and backtracking in non-overlapping nodes) against the naive method. * This uses only a random reference dataset. * * Errors are produced if the results are not according to relative error. */ BOOST_AUTO_TEST_CASE(SingleSpillTreeTest) { arma::mat dataset; dataset.randu(50, 300); // 50 dimensional, 300 points. const size_t k = 3; KNN exact(dataset); arma::Mat neighborsExact; arma::mat distancesExact; exact.Search(dataset, k, neighborsExact, distancesExact); double maxDist = 0; for (size_t i = 0; i < neighborsExact.n_cols; ++i) if (distancesExact(k - 1, i) > maxDist) maxDist = distancesExact(k - 1, i); // If we are sure that tau is a valid upper bound of the kth nearest neighbor // of the query points, then we can be sure that we will satisfy the // requirements on the relative error. SPTree, arma::mat> referenceTree(dataset, maxDist * 1.01 /* tau parameter */); NeighborSearch spTreeSearch(std::move(referenceTree), SINGLE_TREE_MODE, 0.05); arma::Mat neighborsSPTree; arma::mat distancesSPTree; spTreeSearch.Search(dataset, k, neighborsSPTree, distancesSPTree); for (size_t i = 0; i < neighborsSPTree.n_elem; ++i) REQUIRE_RELATIVE_ERR(distancesSPTree(i), distancesExact(i), 0.05); } /** * Make sure sparse nearest neighbors works with kd trees. */ BOOST_AUTO_TEST_CASE(SparseKNNKDTreeTest) { // The dimensionality of these datasets must be high so that the probability // of a completely empty point is very low. In this case, with dimensionality // 70, the probability of all 70 dimensions being zero is 0.8^70 = 1.65e-7 in // the reference set and 0.9^70 = 6.27e-4 in the query set. arma::sp_mat queryDataset; queryDataset.sprandu(70, 200, 0.2); arma::sp_mat referenceDataset; referenceDataset.sprandu(70, 500, 0.1); arma::mat denseQuery(queryDataset); arma::mat denseReference(referenceDataset); typedef NeighborSearch SparseKNN; SparseKNN aknn(referenceDataset, DUAL_TREE_MODE, 0.05); arma::mat distancesSparse; arma::Mat neighborsSparse; aknn.Search(queryDataset, 10, neighborsSparse, distancesSparse); KNN exact(denseReference); arma::mat distancesExact; arma::Mat neighborsExact; exact.Search(denseQuery, 10, neighborsExact, distancesExact); for (size_t i = 0; i < neighborsExact.n_cols; ++i) for (size_t j = 0; j < neighborsExact.n_rows; ++j) REQUIRE_RELATIVE_ERR(distancesSparse(j, i), distancesExact(j, i), 0.05); } /** * Ensure that we can build an NSModel and get correct * results. */ BOOST_AUTO_TEST_CASE(KNNModelTest) { typedef NSModel KNNModel; arma::mat queryData = arma::randu(10, 50); arma::mat referenceData = arma::randu(10, 200); // Build all the possible models. KNNModel models[26]; models[0] = KNNModel(KNNModel::TreeTypes::KD_TREE, true); models[1] = KNNModel(KNNModel::TreeTypes::KD_TREE, false); models[2] = KNNModel(KNNModel::TreeTypes::COVER_TREE, true); models[3] = KNNModel(KNNModel::TreeTypes::COVER_TREE, false); models[4] = KNNModel(KNNModel::TreeTypes::R_TREE, true); models[5] = KNNModel(KNNModel::TreeTypes::R_TREE, false); models[6] = KNNModel(KNNModel::TreeTypes::R_STAR_TREE, true); models[7] = KNNModel(KNNModel::TreeTypes::R_STAR_TREE, false); models[8] = KNNModel(KNNModel::TreeTypes::X_TREE, true); models[9] = KNNModel(KNNModel::TreeTypes::X_TREE, false); models[10] = KNNModel(KNNModel::TreeTypes::BALL_TREE, true); models[11] = KNNModel(KNNModel::TreeTypes::BALL_TREE, false); models[12] = KNNModel(KNNModel::TreeTypes::HILBERT_R_TREE, true); models[13] = KNNModel(KNNModel::TreeTypes::HILBERT_R_TREE, false); models[14] = KNNModel(KNNModel::TreeTypes::R_PLUS_TREE, true); models[15] = KNNModel(KNNModel::TreeTypes::R_PLUS_TREE, false); models[16] = KNNModel(KNNModel::TreeTypes::R_PLUS_PLUS_TREE, true); models[17] = KNNModel(KNNModel::TreeTypes::R_PLUS_PLUS_TREE, false); models[18] = KNNModel(KNNModel::TreeTypes::VP_TREE, true); models[19] = KNNModel(KNNModel::TreeTypes::VP_TREE, false); models[20] = KNNModel(KNNModel::TreeTypes::RP_TREE, true); models[21] = KNNModel(KNNModel::TreeTypes::RP_TREE, false); models[22] = KNNModel(KNNModel::TreeTypes::MAX_RP_TREE, true); models[23] = KNNModel(KNNModel::TreeTypes::MAX_RP_TREE, false); models[24] = KNNModel(KNNModel::TreeTypes::UB_TREE, true); models[25] = KNNModel(KNNModel::TreeTypes::UB_TREE, false); for (size_t j = 0; j < 3; ++j) { // Get a baseline. KNN aknn(referenceData); arma::Mat neighborsExact; arma::mat distancesExact; aknn.Search(queryData, 3, neighborsExact, distancesExact); for (size_t i = 0; i < 26; ++i) { // We only have std::move() constructors so make a copy of our data. arma::mat referenceCopy(referenceData); arma::mat queryCopy(queryData); if (j == 0) models[i].BuildModel(std::move(referenceCopy), 20, DUAL_TREE_MODE, 0.05); if (j == 1) models[i].BuildModel(std::move(referenceCopy), 20, SINGLE_TREE_MODE, 0.05); if (j == 2) models[i].BuildModel(std::move(referenceCopy), 20, NAIVE_MODE); arma::Mat neighborsApprox; arma::mat distancesApprox; models[i].Search(std::move(queryCopy), 3, neighborsApprox, distancesApprox); BOOST_REQUIRE_EQUAL(neighborsApprox.n_rows, neighborsExact.n_rows); BOOST_REQUIRE_EQUAL(neighborsApprox.n_cols, neighborsExact.n_cols); BOOST_REQUIRE_EQUAL(neighborsApprox.n_elem, neighborsExact.n_elem); BOOST_REQUIRE_EQUAL(distancesApprox.n_rows, distancesExact.n_rows); BOOST_REQUIRE_EQUAL(distancesApprox.n_cols, distancesExact.n_cols); BOOST_REQUIRE_EQUAL(distancesApprox.n_elem, distancesExact.n_elem); for (size_t k = 0; k < distancesApprox.n_elem; ++k) REQUIRE_RELATIVE_ERR(distancesApprox[k], distancesExact[k], 0.05); } } } /** * Ensure that we can build an NSModel and get correct * results, in the case where the reference set is the same as the query set. */ BOOST_AUTO_TEST_CASE(KNNModelMonochromaticTest) { typedef NSModel KNNModel; arma::mat referenceData = arma::randu(10, 200); // Build all the possible models. KNNModel models[26]; models[0] = KNNModel(KNNModel::TreeTypes::KD_TREE, true); models[1] = KNNModel(KNNModel::TreeTypes::KD_TREE, false); models[2] = KNNModel(KNNModel::TreeTypes::COVER_TREE, true); models[3] = KNNModel(KNNModel::TreeTypes::COVER_TREE, false); models[4] = KNNModel(KNNModel::TreeTypes::R_TREE, true); models[5] = KNNModel(KNNModel::TreeTypes::R_TREE, false); models[6] = KNNModel(KNNModel::TreeTypes::R_STAR_TREE, true); models[7] = KNNModel(KNNModel::TreeTypes::R_STAR_TREE, false); models[8] = KNNModel(KNNModel::TreeTypes::X_TREE, true); models[9] = KNNModel(KNNModel::TreeTypes::X_TREE, false); models[10] = KNNModel(KNNModel::TreeTypes::BALL_TREE, true); models[11] = KNNModel(KNNModel::TreeTypes::BALL_TREE, false); models[12] = KNNModel(KNNModel::TreeTypes::HILBERT_R_TREE, true); models[13] = KNNModel(KNNModel::TreeTypes::HILBERT_R_TREE, false); models[14] = KNNModel(KNNModel::TreeTypes::R_PLUS_TREE, true); models[15] = KNNModel(KNNModel::TreeTypes::R_PLUS_TREE, false); models[16] = KNNModel(KNNModel::TreeTypes::R_PLUS_PLUS_TREE, true); models[17] = KNNModel(KNNModel::TreeTypes::R_PLUS_PLUS_TREE, false); models[18] = KNNModel(KNNModel::TreeTypes::VP_TREE, true); models[19] = KNNModel(KNNModel::TreeTypes::VP_TREE, false); models[20] = KNNModel(KNNModel::TreeTypes::RP_TREE, true); models[21] = KNNModel(KNNModel::TreeTypes::RP_TREE, false); models[22] = KNNModel(KNNModel::TreeTypes::MAX_RP_TREE, true); models[23] = KNNModel(KNNModel::TreeTypes::MAX_RP_TREE, false); models[24] = KNNModel(KNNModel::TreeTypes::UB_TREE, true); models[25] = KNNModel(KNNModel::TreeTypes::UB_TREE, false); for (size_t j = 0; j < 2; ++j) { // Get a baseline. KNN exact(referenceData); arma::Mat neighborsExact; arma::mat distancesExact; exact.Search(3, neighborsExact, distancesExact); for (size_t i = 0; i < 26; ++i) { // We only have a std::move() constructor... so copy the data. arma::mat referenceCopy(referenceData); if (j == 0) models[i].BuildModel(std::move(referenceCopy), 20, DUAL_TREE_MODE, 0.05); if (j == 1) models[i].BuildModel(std::move(referenceCopy), 20, SINGLE_TREE_MODE, 0.05); arma::Mat neighborsApprox; arma::mat distancesApprox; models[i].Search(3, neighborsApprox, distancesApprox); BOOST_REQUIRE_EQUAL(neighborsApprox.n_rows, neighborsExact.n_rows); BOOST_REQUIRE_EQUAL(neighborsApprox.n_cols, neighborsExact.n_cols); BOOST_REQUIRE_EQUAL(neighborsApprox.n_elem, neighborsExact.n_elem); BOOST_REQUIRE_EQUAL(distancesApprox.n_rows, distancesExact.n_rows); BOOST_REQUIRE_EQUAL(distancesApprox.n_cols, distancesExact.n_cols); BOOST_REQUIRE_EQUAL(distancesApprox.n_elem, distancesExact.n_elem); for (size_t k = 0; k < distancesApprox.n_elem; ++k) REQUIRE_RELATIVE_ERR(distancesApprox[k], distancesExact[k], 0.05); } } } BOOST_AUTO_TEST_SUITE_END(); mlpack-2.2.5/src/mlpack/tests/arma_extend_test.cpp000066400000000000000000000150361315013601400222200ustar00rootroot00000000000000/** * @file arma_extend_test.cpp * @author Ryan Curtin * * Test of the mlpack extensions to Armadillo. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include #include #include "test_tools.hpp" using namespace mlpack; using namespace arma; BOOST_AUTO_TEST_SUITE(ArmaExtendTest); /** * Make sure we can reshape a matrix in-place without changing anything. */ BOOST_AUTO_TEST_CASE(InplaceReshapeColumnTest) { arma::mat X; X.randu(1, 10); arma::mat oldX = X; arma::inplace_reshape(X, 2, 5); BOOST_REQUIRE_EQUAL(X.n_rows, 2); BOOST_REQUIRE_EQUAL(X.n_cols, 5); for (size_t i = 0; i < 10; ++i) BOOST_REQUIRE_CLOSE(X[i], oldX[i], 1e-5); // Order should be preserved. } /** * Make sure we can reshape a large matrix. */ BOOST_AUTO_TEST_CASE(InplaceReshapeMatrixTest) { arma::mat X; X.randu(8, 10); arma::mat oldX = X; arma::inplace_reshape(X, 10, 8); BOOST_REQUIRE_EQUAL(X.n_rows, 10); BOOST_REQUIRE_EQUAL(X.n_cols, 8); for (size_t i = 0; i < 80; ++i) BOOST_REQUIRE_CLOSE(X[i], oldX[i], 1e-5); // Order should be preserved. } /** * Test const_row_col_iterator for basic functionality. */ BOOST_AUTO_TEST_CASE(ConstRowColIteratorTest) { mat X; X.zeros(5, 5); for (size_t i = 0; i < 5; ++i) X.col(i) += i; for (size_t i = 0; i < 5; ++i) X.row(i) += 3 * i; // Make sure default constructor works okay. mat::const_row_col_iterator it; // Make sure ++ operator, operator* and comparison operators work fine. size_t count = 0; for (it = X.begin_row_col(); it != X.end_row_col(); it++) { // Check iterator value. BOOST_REQUIRE_EQUAL(*it, (count % 5) * 3 + (count / 5)); // Check iterator position. BOOST_REQUIRE_EQUAL(it.row(), count % 5); BOOST_REQUIRE_EQUAL(it.col(), count / 5); count++; } BOOST_REQUIRE_EQUAL(count, 25); it = X.end_row_col(); do { it--; count--; // Check iterator value. BOOST_REQUIRE_EQUAL(*it, (count % 5) * 3 + (count / 5)); // Check iterator position. BOOST_REQUIRE_EQUAL(it.row(), count % 5); BOOST_REQUIRE_EQUAL(it.col(), count / 5); } while (it != X.begin_row_col()); BOOST_REQUIRE_EQUAL(count, 0); } /** * Test row_col_iterator for basic functionality. */ BOOST_AUTO_TEST_CASE(RowColIteratorTest) { mat X; X.zeros(5, 5); for (size_t i = 0; i < 5; ++i) X.col(i) += i; for (size_t i = 0; i < 5; ++i) X.row(i) += 3 * i; // Make sure default constructor works okay. mat::row_col_iterator it; // Make sure ++ operator, operator* and comparison operators work fine. size_t count = 0; for (it = X.begin_row_col(); it != X.end_row_col(); it++) { // Check iterator value. BOOST_REQUIRE_EQUAL(*it, (count % 5) * 3 + (count / 5)); // Check iterator position. BOOST_REQUIRE_EQUAL(it.row(), count % 5); BOOST_REQUIRE_EQUAL(it.col(), count / 5); count++; } BOOST_REQUIRE_EQUAL(count, 25); it = X.end_row_col(); do { it--; count--; // Check iterator value. BOOST_REQUIRE_EQUAL(*it, (count % 5) * 3 + (count / 5)); // Check iterator position. BOOST_REQUIRE_EQUAL(it.row(), count % 5); BOOST_REQUIRE_EQUAL(it.col(), count / 5); } while (it != X.begin_row_col()); BOOST_REQUIRE_EQUAL(count, 0); } /** * Operator-- test for mat::row_col_iterator and mat::const_row_col_iterator */ BOOST_AUTO_TEST_CASE(MatRowColIteratorDecrementOperatorTest) { mat test = ones(5, 5); mat::row_col_iterator it1 = test.begin_row_col(); mat::row_col_iterator it2 = it1; // check that postfix-- does not decrement the position when position is // pointing to the begining it2--; BOOST_REQUIRE_EQUAL(it1.row(), it2.row()); BOOST_REQUIRE_EQUAL(it1.col(), it2.col()); // check that prefix-- does not decrement the position when position is // pointing to the begining --it2; BOOST_REQUIRE_EQUAL(it1.row(), it2.row()); BOOST_REQUIRE_EQUAL(it1.col(), it2.col()); } // These tests don't work when the sparse iterators hold references and not // pointers internally because of the lack of default constructor. #if ARMA_VERSION_MAJOR > 4 || \ (ARMA_VERSION_MAJOR == 4 && ARMA_VERSION_MINOR > 320) /** * Test sparse const_row_col_iterator for basic functionality. */ BOOST_AUTO_TEST_CASE(ConstSpRowColIteratorTest) { sp_mat X(5, 5); for (size_t i = 0; i < 5; ++i) X.col(i) += i; for (size_t i = 0; i < 5; ++i) X.row(i) += 3 * i; // Make sure default constructor works okay. sp_mat::const_row_col_iterator it; // Make sure ++ operator, operator* and comparison operators work fine. size_t count = 1; for (it = X.begin_row_col(); it != X.end_row_col(); it++) { // Check iterator value. BOOST_REQUIRE_EQUAL(*it, (count % 5) * 3 + (count / 5)); // Check iterator position. BOOST_REQUIRE_EQUAL(it.row(), count % 5); BOOST_REQUIRE_EQUAL(it.col(), count / 5); count++; } BOOST_REQUIRE_EQUAL(count, 25); it = X.end_row_col(); do { it--; count--; // Check iterator value. BOOST_REQUIRE_EQUAL(*it, (count % 5) * 3 + (count / 5)); // Check iterator position. BOOST_REQUIRE_EQUAL(it.row(), count % 5); BOOST_REQUIRE_EQUAL(it.col(), count / 5); } while (it != X.begin_row_col()); BOOST_REQUIRE_EQUAL(count, 1); } /** * Test sparse row_col_iterator for basic functionality. */ BOOST_AUTO_TEST_CASE(SpRowColIteratorTest) { sp_mat X(5, 5); for (size_t i = 0; i < 5; ++i) X.col(i) += i; for (size_t i = 0; i < 5; ++i) X.row(i) += 3 * i; // Make sure default constructor works okay. sp_mat::row_col_iterator it; // Make sure ++ operator, operator* and comparison operators work fine. size_t count = 1; for (it = X.begin_row_col(); it != X.end_row_col(); it++) { // Check iterator value. BOOST_REQUIRE_EQUAL(*it, (count % 5) * 3 + (count / 5)); // Check iterator position. BOOST_REQUIRE_EQUAL(it.row(), count % 5); BOOST_REQUIRE_EQUAL(it.col(), count / 5); count++; } BOOST_REQUIRE_EQUAL(count, 25); it = X.end_row_col(); do { it--; count--; // Check iterator value. BOOST_REQUIRE_EQUAL(*it, (count % 5) * 3 + (count / 5)); // Check iterator position. BOOST_REQUIRE_EQUAL(it.row(), count % 5); BOOST_REQUIRE_EQUAL(it.col(), count / 5); } while (it != X.begin_row_col()); BOOST_REQUIRE_EQUAL(count, 1); } #endif BOOST_AUTO_TEST_SUITE_END(); mlpack-2.2.5/src/mlpack/tests/armadillo_svd_test.cpp000066400000000000000000000024761315013601400225550ustar00rootroot00000000000000#include #include #include #include "test_tools.hpp" BOOST_AUTO_TEST_SUITE(ArmadilloSVDTest); using namespace std; using namespace mlpack; using namespace mlpack::cf; using namespace arma; /** * Test armadillo SVD for normal factorization * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ BOOST_AUTO_TEST_CASE(ArmadilloSVDNormalFactorizationTest) { mat test = randu(20, 20); SVDWrapper<> svd; arma::mat W, H, sigma; double result = svd.Apply(test, W, sigma, H); BOOST_REQUIRE_LT(result, 0.01); test = randu(50, 50); result = svd.Apply(test, W, sigma, H); BOOST_REQUIRE_LT(result, 0.01); } /** * Test armadillo SVD for low rank matrix factorization */ BOOST_AUTO_TEST_CASE(ArmadilloSVDLowRankFactorizationTest) { mat W_t = randu(30, 3); mat H_t = randu(3, 40); // create a row-rank matrix mat test = W_t * H_t; SVDWrapper<> svd; arma::mat W, H; double result = svd.Apply(test, 3, W, H); BOOST_REQUIRE_LT(result, 0.01); } BOOST_AUTO_TEST_SUITE_END(); mlpack-2.2.5/src/mlpack/tests/aug_lagrangian_test.cpp000066400000000000000000000040271315013601400226660ustar00rootroot00000000000000/** * @file aug_lagrangian_test.cpp * @author Ryan Curtin * * Test of the AugmentedLagrangian class using the test functions defined in * aug_lagrangian_test_functions.hpp. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include #include #include #include #include "test_tools.hpp" using namespace mlpack; using namespace mlpack::optimization; BOOST_AUTO_TEST_SUITE(AugLagrangianTest); /** * Tests the Augmented Lagrangian optimizer using the * AugmentedLagrangianTestFunction class. */ BOOST_AUTO_TEST_CASE(AugLagrangianTestFunctionTest) { // The choice of 10 memory slots is arbitrary. AugLagrangianTestFunction f; AugLagrangian aug(f); arma::vec coords = f.GetInitialPoint(); if (!aug.Optimize(coords, 0)) BOOST_FAIL("Optimization reported failure."); double finalValue = f.Evaluate(coords); BOOST_REQUIRE_CLOSE(finalValue, 70.0, 1e-5); BOOST_REQUIRE_CLOSE(coords[0], 1.0, 1e-5); BOOST_REQUIRE_CLOSE(coords[1], 4.0, 1e-5); } /** * Tests the Augmented Lagrangian optimizer using the Gockenbach function. */ BOOST_AUTO_TEST_CASE(GockenbachFunctionTest) { GockenbachFunction f; AugLagrangian aug(f); arma::vec coords = f.GetInitialPoint(); if (!aug.Optimize(coords, 0)) BOOST_FAIL("Optimization reported failure."); double finalValue = f.Evaluate(coords); // Higher tolerance for smaller values. BOOST_REQUIRE_CLOSE(finalValue, 29.633926, 1e-5); BOOST_REQUIRE_CLOSE(coords[0], 0.12288178, 1e-3); BOOST_REQUIRE_CLOSE(coords[1], -1.10778185, 1e-5); BOOST_REQUIRE_CLOSE(coords[2], 0.015099932, 1e-3); } BOOST_AUTO_TEST_SUITE_END(); mlpack-2.2.5/src/mlpack/tests/binarize_test.cpp000066400000000000000000000042121315013601400215260ustar00rootroot00000000000000/** * @file binarize_test.cpp * @author Keon Kim * * Test the Binarzie method. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include #include #include #include #include "test_tools.hpp" using namespace mlpack; using namespace arma; using namespace mlpack::data; BOOST_AUTO_TEST_SUITE(BinarizeTest); BOOST_AUTO_TEST_CASE(BinerizeOneDimension) { mat input; input << 1 << 2 << 3 << endr << 4 << 5 << 6 << endr // this row will be tested << 7 << 8 << 9; mat output; const double threshold = 5.0; const size_t dimension = 1; Binarize(input, output, threshold, dimension); BOOST_REQUIRE_CLOSE(output(0, 0), 1, 1e-5); // 1 BOOST_REQUIRE_CLOSE(output(0, 1), 2, 1e-5); // 2 BOOST_REQUIRE_CLOSE(output(0, 2), 3, 1e-5); // 3 BOOST_REQUIRE_SMALL(output(1, 0), 1e-5); // 4 target BOOST_REQUIRE_SMALL(output(1, 1), 1e-5); // 5 target BOOST_REQUIRE_CLOSE(output(1, 2), 1, 1e-5); // 6 target BOOST_REQUIRE_CLOSE(output(2, 0), 7, 1e-5); // 7 BOOST_REQUIRE_CLOSE(output(2, 1), 8, 1e-5); // 8 BOOST_REQUIRE_CLOSE(output(2, 2), 9, 1e-5); // 9 } BOOST_AUTO_TEST_CASE(BinerizeAll) { mat input; input << 1 << 2 << 3 << endr << 4 << 5 << 6 << endr // this row will be tested << 7 << 8 << 9; mat output; const double threshold = 5.0; Binarize(input, output, threshold); BOOST_REQUIRE_SMALL(output(0, 0), 1e-5); // 1 BOOST_REQUIRE_SMALL(output(0, 1), 1e-5); // 2 BOOST_REQUIRE_SMALL(output(0, 2), 1e-5); // 3 BOOST_REQUIRE_SMALL(output(1, 0), 1e-5); // 4 BOOST_REQUIRE_SMALL(output(1, 1), 1e-5); // 5 BOOST_REQUIRE_CLOSE(output(1, 2), 1.0, 1e-5); // 6 BOOST_REQUIRE_CLOSE(output(2, 0), 1.0, 1e-5); // 7 BOOST_REQUIRE_CLOSE(output(2, 1), 1.0, 1e-5); // 8 BOOST_REQUIRE_CLOSE(output(2, 2), 1.0, 1e-5); // 9 } BOOST_AUTO_TEST_SUITE_END(); mlpack-2.2.5/src/mlpack/tests/cf_test.cpp000066400000000000000000000365501315013601400203250ustar00rootroot00000000000000/** * @file cf_test.cpp * @author Mudit Raj Gupta * * Test file for CF class. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include #include #include #include #include "test_tools.hpp" #include "serialization.hpp" BOOST_AUTO_TEST_SUITE(CFTest); using namespace mlpack; using namespace mlpack::cf; using namespace std; /** * Make sure that correct number of recommendations are generated when query * set. Default case. */ BOOST_AUTO_TEST_CASE(CFGetRecommendationsAllUsersTest) { // Dummy number of recommendations. size_t numRecs = 3; // GroupLens100k.csv dataset has 943 users. size_t numUsers = 943; // Matrix to save recommendations into. arma::Mat recommendations; // Load GroupLens data. arma::mat dataset; data::Load("GroupLens100k.csv", dataset); // Make data into sparse matrix. arma::sp_mat cleanedData; CF::CleanData(dataset, cleanedData); // Create a CF object. CF c(cleanedData); // Generate recommendations when query set is not specified. c.GetRecommendations(numRecs, recommendations); // Check if correct number of recommendations are generated. BOOST_REQUIRE_EQUAL(recommendations.n_rows, numRecs); // Check if recommendations are generated for all users. BOOST_REQUIRE_EQUAL(recommendations.n_cols, numUsers); } /** * Make sure that the recommendations are generated for queried users only. */ BOOST_AUTO_TEST_CASE(CFGetRecommendationsQueriedUserTest) { // Number of users that we will search for recommendations for. size_t numUsers = 10; // Default number of recommendations. size_t numRecsDefault = 5; // Create dummy query set. arma::Col users = arma::zeros >(numUsers, 1); for (size_t i = 0; i < numUsers; i++) users(i) = i; // Matrix to save recommendations into. arma::Mat recommendations; // Load GroupLens data. arma::mat dataset; data::Load("GroupLens100k.csv", dataset); // Make data into sparse matrix. arma::sp_mat cleanedData; CF::CleanData(dataset, cleanedData); CF c(cleanedData); // Generate recommendations when query set is specified. c.GetRecommendations(numRecsDefault, recommendations, users); // Check if correct number of recommendations are generated. BOOST_REQUIRE_EQUAL(recommendations.n_rows, numRecsDefault); // Check if recommendations are generated for the right number of users. BOOST_REQUIRE_EQUAL(recommendations.n_cols, numUsers); } /** * Make sure recommendations that are generated are reasonably accurate. */ BOOST_AUTO_TEST_CASE(RecommendationAccuracyTest) { // Load the GroupLens dataset; then, we will remove some values from it. arma::mat dataset; data::Load("GroupLens100k.csv", dataset); // Save the columns we've removed. arma::mat savedCols(3, 300); // Remove 300 5-star ratings. size_t currentCol = 0; for (size_t i = 0; i < dataset.n_cols; ++i) { if (currentCol == 300) break; if (dataset(2, i) > 4.5) // 5-star rating. { // Make sure we don't have this user yet. This is a slow way to do this // but I don't particularly care here because it's in the tests. bool found = false; for (size_t j = 0; j < currentCol; ++j) { if (savedCols(0, j) == dataset(0, i)) { found = true; break; } } // If this user doesn't already exist in savedCols, add them. Otherwise // ignore this point. if (!found) { savedCols.col(currentCol) = dataset.col(i); dataset.shed_col(i); ++currentCol; } } } // Make data into sparse matrix. arma::sp_mat cleanedData; CF::CleanData(dataset, cleanedData); // Now create the CF object. CF c(cleanedData); // Obtain 150 recommendations for the users in savedCols, and make sure the // missing item shows up in most of them. First, create the list of users, // which requires casting from doubles... arma::Col users(300); for (size_t i = 0; i < 300; ++i) users(i) = (size_t) savedCols(0, i); arma::Mat recommendations; size_t numRecs = 150; c.GetRecommendations(numRecs, recommendations, users); BOOST_REQUIRE_EQUAL(recommendations.n_rows, numRecs); BOOST_REQUIRE_EQUAL(recommendations.n_cols, 300); size_t failures = 0; for (size_t i = 0; i < 300; ++i) { size_t targetItem = (size_t) savedCols(1, i); bool found = false; // Make sure the target item shows up in the recommendations. for (size_t j = 0; j < numRecs; ++j) { const size_t user = users(i); const size_t item = recommendations(j, i); if (item == targetItem) { found = true; } else { // Make sure we aren't being recommended an item that the user already // rated. BOOST_REQUIRE_EQUAL((double) c.CleanedData()(item, user), 0.0); } } if (!found) ++failures; } // Make sure the right item showed up in at least 2/3 of the recommendations. // Random chance (that is, if we selected recommendations randomly) for this // GroupLens dataset would give somewhere around a 10% success rate (failures // would be closer to 270). BOOST_REQUIRE_LT(failures, 100); } // Make sure that Predict() is returning reasonable results. BOOST_AUTO_TEST_CASE(CFPredictTest) { // Load the GroupLens dataset; then, we will remove some values from it. arma::mat dataset; data::Load("GroupLens100k.csv", dataset); // Save the columns we've removed. arma::mat savedCols(3, 300); // Remove 300 5-star ratings. size_t currentCol = 0; for (size_t i = 0; i < dataset.n_cols; ++i) { if (currentCol == 300) break; if (dataset(2, i) > 4.5) // 5-star rating. { // Make sure we don't have this user yet. This is a slow way to do this // but I don't particularly care here because it's in the tests. bool found = false; for (size_t j = 0; j < currentCol; ++j) { if (savedCols(0, j) == dataset(0, i)) { found = true; break; } } // If this user doesn't already exist in savedCols, add them. Otherwise // ignore this point. if (!found) { savedCols.col(currentCol) = dataset.col(i); dataset.shed_col(i); ++currentCol; } } } // Make data into sparse matrix. arma::sp_mat cleanedData; CF::CleanData(dataset, cleanedData); // Now create the CF object. CF c(cleanedData); // Now, for each removed rating, make sure the prediction is... reasonably // accurate. double totalError = 0.0; for (size_t i = 0; i < savedCols.n_cols; ++i) { const double prediction = c.Predict(savedCols(0, i), savedCols(1, i)); const double error = std::pow(prediction - savedCols(2, i), 2.0); totalError += error; } totalError = std::sqrt(totalError) / savedCols.n_cols; // The mean squared error should be less than one. BOOST_REQUIRE_LT(totalError, 0.5); } // Do the same thing as the previous test, but ensure that the ratings we // predict with the batch Predict() are the same as the individual Predict() // calls. BOOST_AUTO_TEST_CASE(CFBatchPredictTest) { // Load the GroupLens dataset; then, we will remove some values from it. arma::mat dataset; data::Load("GroupLens100k.csv", dataset); // Save the columns we've removed. arma::mat savedCols(3, 300); // Remove 300 5-star ratings. size_t currentCol = 0; for (size_t i = 0; i < dataset.n_cols; ++i) { if (currentCol == 300) break; if (dataset(2, i) > 4.5) // 5-star rating. { // Make sure we don't have this user yet. This is a slow way to do this // but I don't particularly care here because it's in the tests. bool found = false; for (size_t j = 0; j < currentCol; ++j) { if (savedCols(0, j) == dataset(0, i)) { found = true; break; } } // If this user doesn't already exist in savedCols, add them. Otherwise // ignore this point. if (!found) { savedCols.col(currentCol) = dataset.col(i); dataset.shed_col(i); ++currentCol; } } } // Make data into sparse matrix. arma::sp_mat cleanedData; CF::CleanData(dataset, cleanedData); // Now create the CF object. CF c(cleanedData); // Get predictions for all user/item pairs we held back. arma::Mat combinations(2, savedCols.n_cols); for (size_t i = 0; i < savedCols.n_cols; ++i) { combinations(0, i) = size_t(savedCols(0, i)); combinations(1, i) = size_t(savedCols(1, i)); } arma::vec predictions; c.Predict(combinations, predictions); for (size_t i = 0; i < combinations.n_cols; ++i) { const double prediction = c.Predict(combinations(0, i), combinations(1, i)); BOOST_REQUIRE_CLOSE(prediction, predictions[i], 1e-8); } } /** * Make sure we can train an already-trained model and it works okay. */ BOOST_AUTO_TEST_CASE(TrainTest) { // Generate random data. arma::sp_mat randomData; randomData.sprandu(100, 100, 0.3); CF c(randomData); // Now retrain with data we know about. arma::mat dataset; data::Load("GroupLens100k.csv", dataset); // Save the columns we've removed. arma::mat savedCols(3, 300); // Remove 300 5-star ratings. size_t currentCol = 0; for (size_t i = 0; i < dataset.n_cols; ++i) { if (currentCol == 300) break; if (dataset(2, i) > 4.5) // 5-star rating. { // Make sure we don't have this user yet. This is a slow way to do this // but I don't particularly care here because it's in the tests. bool found = false; for (size_t j = 0; j < currentCol; ++j) { if (savedCols(0, j) == dataset(0, i)) { found = true; break; } } // If this user doesn't already exist in savedCols, add them. Otherwise // ignore this point. if (!found) { savedCols.col(currentCol) = dataset.col(i); dataset.shed_col(i); ++currentCol; } } } // Make data into sparse matrix. arma::sp_mat cleanedData; CF::CleanData(dataset, cleanedData); // Now retrain. c.Train(dataset); // Get predictions for all user/item pairs we held back. arma::Mat combinations(2, savedCols.n_cols); for (size_t i = 0; i < savedCols.n_cols; ++i) { combinations(0, i) = size_t(savedCols(0, i)); combinations(1, i) = size_t(savedCols(1, i)); } arma::vec predictions; c.Predict(combinations, predictions); for (size_t i = 0; i < combinations.n_cols; ++i) { const double prediction = c.Predict(combinations(0, i), combinations(1, i)); BOOST_REQUIRE_CLOSE(prediction, predictions[i], 1e-8); } } /** * Make sure we can train a model after using the empty constructor. */ BOOST_AUTO_TEST_CASE(EmptyConstructorTrainTest) { // Use default constructor. CF c; // Now retrain with data we know about. arma::mat dataset; data::Load("GroupLens100k.csv", dataset); // Save the columns we've removed. arma::mat savedCols(3, 300); // Remove 300 5-star ratings. size_t currentCol = 0; for (size_t i = 0; i < dataset.n_cols; ++i) { if (currentCol == 300) break; if (dataset(2, i) > 4.5) // 5-star rating. { // Make sure we don't have this user yet. This is a slow way to do this // but I don't particularly care here because it's in the tests. bool found = false; for (size_t j = 0; j < currentCol; ++j) { if (savedCols(0, j) == dataset(0, i)) { found = true; break; } } // If this user doesn't already exist in savedCols, add them. Otherwise // ignore this point. if (!found) { savedCols.col(currentCol) = dataset.col(i); dataset.shed_col(i); ++currentCol; } } } // Make data into sparse matrix. arma::sp_mat cleanedData; CF::CleanData(dataset, cleanedData); // Now retrain. c.Train(cleanedData); // Get predictions for all user/item pairs we held back. arma::Mat combinations(2, savedCols.n_cols); for (size_t i = 0; i < savedCols.n_cols; ++i) { combinations(0, i) = size_t(savedCols(0, i)); combinations(1, i) = size_t(savedCols(1, i)); } arma::vec predictions; c.Predict(combinations, predictions); for (size_t i = 0; i < combinations.n_cols; ++i) { const double prediction = c.Predict(combinations(0, i), combinations(1, i)); BOOST_REQUIRE_CLOSE(prediction, predictions[i], 1e-8); } } /** * Ensure we can load and save the CF model. */ BOOST_AUTO_TEST_CASE(SerializationTest) { // Load a dataset to train on. arma::mat dataset; data::Load("GroupLens100k.csv", dataset); arma::sp_mat cleanedData; CF::CleanData(dataset, cleanedData); CF c(cleanedData); arma::sp_mat randomData; randomData.sprandu(100, 100, 0.3); CF cXml(randomData); CF cBinary; CF cText(cleanedData, amf::NMFALSFactorizer(), 5, 5); SerializeObjectAll(c, cXml, cText, cBinary); // Check the internals. BOOST_REQUIRE_EQUAL(c.NumUsersForSimilarity(), cXml.NumUsersForSimilarity()); BOOST_REQUIRE_EQUAL(c.NumUsersForSimilarity(), cBinary.NumUsersForSimilarity()); BOOST_REQUIRE_EQUAL(c.NumUsersForSimilarity(), cText.NumUsersForSimilarity()); BOOST_REQUIRE_EQUAL(c.Rank(), cXml.Rank()); BOOST_REQUIRE_EQUAL(c.Rank(), cBinary.Rank()); BOOST_REQUIRE_EQUAL(c.Rank(), cText.Rank()); CheckMatrices(c.W(), cXml.W(), cBinary.W(), cText.W()); CheckMatrices(c.H(), cXml.H(), cBinary.H(), cText.H()); BOOST_REQUIRE_EQUAL(c.CleanedData().n_rows, cXml.CleanedData().n_rows); BOOST_REQUIRE_EQUAL(c.CleanedData().n_rows, cBinary.CleanedData().n_rows); BOOST_REQUIRE_EQUAL(c.CleanedData().n_rows, cText.CleanedData().n_rows); BOOST_REQUIRE_EQUAL(c.CleanedData().n_cols, cXml.CleanedData().n_cols); BOOST_REQUIRE_EQUAL(c.CleanedData().n_cols, cBinary.CleanedData().n_cols); BOOST_REQUIRE_EQUAL(c.CleanedData().n_cols, cText.CleanedData().n_cols); BOOST_REQUIRE_EQUAL(c.CleanedData().n_nonzero, cXml.CleanedData().n_nonzero); BOOST_REQUIRE_EQUAL(c.CleanedData().n_nonzero, cBinary.CleanedData().n_nonzero); BOOST_REQUIRE_EQUAL(c.CleanedData().n_nonzero, cText.CleanedData().n_nonzero); for (size_t i = 0; i <= c.CleanedData().n_cols; ++i) { BOOST_REQUIRE_EQUAL(c.CleanedData().col_ptrs[i], cXml.CleanedData().col_ptrs[i]); BOOST_REQUIRE_EQUAL(c.CleanedData().col_ptrs[i], cBinary.CleanedData().col_ptrs[i]); BOOST_REQUIRE_EQUAL(c.CleanedData().col_ptrs[i], cText.CleanedData().col_ptrs[i]); } for (size_t i = 0; i <= c.CleanedData().n_nonzero; ++i) { BOOST_REQUIRE_EQUAL(c.CleanedData().row_indices[i], cXml.CleanedData().row_indices[i]); BOOST_REQUIRE_EQUAL(c.CleanedData().row_indices[i], cBinary.CleanedData().row_indices[i]); BOOST_REQUIRE_EQUAL(c.CleanedData().row_indices[i], cText.CleanedData().row_indices[i]); BOOST_REQUIRE_CLOSE(c.CleanedData().values[i], cXml.CleanedData().values[i], 1e-5); BOOST_REQUIRE_CLOSE(c.CleanedData().values[i], cBinary.CleanedData().values[i], 1e-5); BOOST_REQUIRE_CLOSE(c.CleanedData().values[i], cText.CleanedData().values[i], 1e-5); } } BOOST_AUTO_TEST_SUITE_END(); mlpack-2.2.5/src/mlpack/tests/cli_test.cpp000066400000000000000000000103201315013601400204670ustar00rootroot00000000000000/** * @file cli_test.cpp * @author Matthew Amidon, Ryan Curtin * * Test for the CLI input parameter system. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include #define DEFAULT_INT 42 #include #include "test_tools.hpp" using namespace mlpack; using namespace mlpack::util; BOOST_AUTO_TEST_SUITE(CLITest); /** * Tests that CLI works as intended, namely that CLI::Add propagates * successfully. */ BOOST_AUTO_TEST_CASE(TestCLIAdd) { // Check that the CLI::HasParam returns false if no value has been specified // on the commandline and ignores any programmatical assignments. CLI::Add("global/bool", "True or False", "alias/bool"); // CLI::HasParam should return false here. BOOST_REQUIRE(!CLI::HasParam("global/bool")); // Check the description of our variable. BOOST_REQUIRE_EQUAL(CLI::GetDescription("global/bool").compare( std::string("True or False")) , 0); // Check that our aliasing works. BOOST_REQUIRE_EQUAL(CLI::HasParam("global/bool"), CLI::HasParam("alias/bool")); BOOST_REQUIRE_EQUAL(CLI::GetDescription("global/bool").compare( CLI::GetDescription("alias/bool")), 0); BOOST_REQUIRE_EQUAL(CLI::GetParam("global/bool"), CLI::GetParam("alias/bool")); } /** * Tests that the various PARAM_* macros work properly. */ BOOST_AUTO_TEST_CASE(TestOption) { // This test will involve creating an option, and making sure CLI reflects // this. PARAM_IN(int, "test_parent/test", "test desc", "", DEFAULT_INT, false); BOOST_REQUIRE_EQUAL(CLI::GetDescription("test_parent/test"), "test desc"); BOOST_REQUIRE_EQUAL(CLI::GetParam("test_parent/test"), DEFAULT_INT); } /** * Ensure that a Boolean option which we define is set correctly. */ BOOST_AUTO_TEST_CASE(TestBooleanOption) { PARAM_FLAG("flag_test", "flag test description", ""); BOOST_REQUIRE_EQUAL(CLI::HasParam("flag_test"), false); BOOST_REQUIRE_EQUAL(CLI::GetDescription("flag_test"), "flag test description"); // Now check that CLI reflects that it is false by default. BOOST_REQUIRE_EQUAL(CLI::GetParam("flag_test"), false); // Now, if we specify this flag, it should be true. int argc = 2; char* argv[2]; argv[0] = strcpy(new char[strlen("programname") + 1], "programname"); argv[1] = strcpy(new char[strlen("--flag_test") + 1], "--flag_test"); CLI::ParseCommandLine(argc, argv); BOOST_REQUIRE_EQUAL(CLI::GetParam("flag_test"), true); BOOST_REQUIRE_EQUAL(CLI::HasParam("flag_test"), true); delete[] argv[0]; delete[] argv[1]; } /** * Test that a vector option works correctly. */ BOOST_AUTO_TEST_CASE(TestVectorOption) { PARAM_VECTOR_IN(size_t, "test_vec", "test description", "t"); int argc = 5; const char* argv[5]; argv[0] = "./test"; argv[1] = "--test_vec"; argv[2] = "1"; argv[3] = "2"; argv[4] = "4"; Log::Fatal.ignoreInput = true; CLI::ParseCommandLine(argc, const_cast(argv)); Log::Fatal.ignoreInput = false; BOOST_REQUIRE(CLI::HasParam("test_vec")); std::vector v = CLI::GetParam>("test_vec"); BOOST_REQUIRE_EQUAL(v.size(), 3); BOOST_REQUIRE_EQUAL(v[0], 1); BOOST_REQUIRE_EQUAL(v[1], 2); BOOST_REQUIRE_EQUAL(v[2], 4); } /** * Test that we can use a vector option by specifying it many times. */ BOOST_AUTO_TEST_CASE(TestVectorOption2) { PARAM_VECTOR_IN(size_t, "test2_vec", "test description", "T"); int argc = 7; const char* argv[7]; argv[0] = "./test"; argv[1] = "--test2_vec"; argv[2] = "1"; argv[3] = "--test2_vec"; argv[4] = "2"; argv[5] = "--test2_vec"; argv[6] = "4"; Log::Fatal.ignoreInput = true; CLI::ParseCommandLine(argc, const_cast(argv)); Log::Fatal.ignoreInput = false; BOOST_REQUIRE(CLI::HasParam("test_vec")); std::vector v = CLI::GetParam>("test_vec"); BOOST_REQUIRE_EQUAL(v.size(), 3); BOOST_REQUIRE_EQUAL(v[0], 1); BOOST_REQUIRE_EQUAL(v[1], 2); BOOST_REQUIRE_EQUAL(v[2], 4); } BOOST_AUTO_TEST_SUITE_END(); mlpack-2.2.5/src/mlpack/tests/cosine_tree_test.cpp000066400000000000000000000162061315013601400222300ustar00rootroot00000000000000/** * @file cosine_tree_test.cpp * @author Siddharth Agrawal * * Test file for CosineTree class. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include #include #include #include "test_tools.hpp" BOOST_AUTO_TEST_SUITE(CosineTreeTest); using namespace mlpack; using namespace mlpack::tree; /** * Constructs a cosine tree with epsilon = 1. Checks if the root node is split * further, as it shouldn't be. */ BOOST_AUTO_TEST_CASE(CosineTreeNoSplit) { // Initialize constants required for the test. const size_t numRows = 10; const size_t numCols = 15; const double epsilon = 1; const double delta = 0.1; // Make a random dataset. arma::mat data = arma::randu(numRows, numCols); // Make a cosine tree, with the generated dataset and the defined constants. // Note that the value of epsilon is one. CosineTree ctree(data, epsilon, delta); arma::mat basis; ctree.GetFinalBasis(basis); // Since epsilon is one, there should be no splitting and the only vector in // the basis should come from the root node. BOOST_REQUIRE_EQUAL(basis.n_cols, 1); } /** * Checks CosineTree::CosineNodeSplit() by doing a depth first search on a * random dataset and checking if it satisfies the split condition. */ BOOST_AUTO_TEST_CASE(CosineNodeCosineSplit) { // Initialize constants required for the test. const size_t numRows = 500; const size_t numCols = 1000; // Calculation accuracy. const double precision = 1e-15; // Make a random dataset and the root object. arma::mat data = arma::randu(numRows, numCols); CosineTree root(data); // Stack for depth first search of the tree. std::vector nodeStack; nodeStack.push_back(&root); // While stack is not empty. while (nodeStack.size()) { // Pop a node from the stack and split it. CosineTree *currentNode, *currentLeft, *currentRight; currentNode = nodeStack.back(); currentNode->CosineNodeSplit(); nodeStack.pop_back(); // Obtain pointers to the children of the node. currentLeft = currentNode->Left(); currentRight = currentNode->Right(); // If children exist. if (currentLeft && currentRight) { // Push the child nodes on to the stack. nodeStack.push_back(currentLeft); nodeStack.push_back(currentRight); // Obtain the split point of the popped node. arma::vec splitPoint = data.col(currentNode->SplitPointIndex()); // Column indices of the the child nodes. std::vector leftIndices, rightIndices; leftIndices = currentLeft->VectorIndices(); rightIndices = currentRight->VectorIndices(); // The columns in the popped should be split into left and right nodes. BOOST_REQUIRE_EQUAL(currentNode->NumColumns(), leftIndices.size() + rightIndices.size()); // Calculate the cosine values for each of the columns in the node. arma::vec cosines; cosines.zeros(currentNode->NumColumns()); size_t i, j, k; for (i = 0; i < leftIndices.size(); i++) cosines(i) = arma::norm_dot(data.col(leftIndices[i]), splitPoint); for (j = 0, k = i; j < rightIndices.size(); j++, k++) cosines(k) = arma::norm_dot(data.col(rightIndices[j]), splitPoint); // Check if the columns assigned to the children agree with the splitting // condition. Due to miscalculations cosineMax calculated by // CosineNodeSplit may differ from cosineMax below, so we have to handle // minor differences. double cosineMax = arma::max(cosines % (cosines < 1.0 + precision)); double cosineMin = arma::min(cosines); // If max(cosines) is close to 1.0 cosineMax and cosineMax2 may // differ significantly. double cosineMax2 = arma::max(cosines % (cosines < 1.0 - precision)); if (std::fabs(cosineMax - cosineMax2) < precision) { // Check with some precision. for (i = 0; i < leftIndices.size(); i++) BOOST_REQUIRE_LT(cosineMax - cosines(i), cosines(i) - cosineMin + precision); for (j = 0, k = i; j < rightIndices.size(); j++, k++) BOOST_REQUIRE_GT(cosineMax - cosines(k), cosines(k) - cosineMin - precision); } else { size_t numMax1Errors = 0; size_t numMax2Errors = 0; // Find errors for cosineMax. for (i = 0; i < leftIndices.size(); i++) if (cosineMax - cosines(i) >= cosines(i) - cosineMin + precision) numMax1Errors++; for (j = 0, k = i; j < rightIndices.size(); j++, k++) if (cosineMax - cosines(k) <= cosines(k) - cosineMin - precision) numMax1Errors++; // Find errors for cosineMax2. for (i = 0; i < leftIndices.size(); i++) if (cosineMax2 - cosines(i) >= cosines(i) - cosineMin + precision) numMax2Errors++; for (j = 0, k = i; j < rightIndices.size(); j++, k++) if (cosineMax2 - cosines(k) <= cosines(k) - cosineMin - precision) numMax2Errors++; // One of the maximum cosine values should be correct BOOST_REQUIRE_EQUAL(std::min(numMax1Errors, numMax2Errors), 0); } } } } /** * Checks CosineTree::ModifiedGramSchmidt() by creating a random basis for the * vector subspace and checking if all the vectors are orthogonal to each other. */ BOOST_AUTO_TEST_CASE(CosineTreeModifiedGramSchmidt) { // Initialize constants required for the test. const size_t numRows = 100; const size_t numCols = 50; const double epsilon = 1; const double delta = 0.1; // Make a random dataset. arma::mat data = arma::randu(numRows, numCols); // Declare a queue and a dummy CosineTree object. CosineNodeQueue basisQueue; CosineTree dummyTree(data, epsilon, delta); for(size_t i = 0; i < numCols; i++) { // Make a new CosineNode object. CosineTree* basisNode; basisNode = new CosineTree(data); // Use the columns of the dataset as random centroids. arma::vec centroid = data.col(i); arma::vec newBasisVector; // Obtain the orthonormalized version of the centroid. dummyTree.ModifiedGramSchmidt(basisQueue, centroid, newBasisVector); // Check if the obtained vector is orthonormal to the basis vectors. CosineNodeQueue::const_iterator j = basisQueue.begin(); CosineTree* currentNode; for(; j != basisQueue.end(); j++) { currentNode = *j; BOOST_REQUIRE_SMALL(arma::dot(currentNode->BasisVector(), newBasisVector), 1e-5); } // Add the obtained vector to the basis. basisNode->BasisVector(newBasisVector); basisNode->L2Error(arma::randu()); basisQueue.push(basisNode); } // Deallocate memory given to the objects. for(size_t i = 0; i < numCols; i++) { CosineTree* currentNode; currentNode = basisQueue.top(); basisQueue.pop(); delete currentNode; } } BOOST_AUTO_TEST_SUITE_END(); mlpack-2.2.5/src/mlpack/tests/data/000077500000000000000000000000001315013601400170725ustar00rootroot00000000000000mlpack-2.2.5/src/mlpack/tests/data/GroupLens100k.csv000066400000000000000000035666231315013601400221460ustar00rootroot00000000000000195,241,3 185,301,3 21,376,1 243,50,2 165,345,1 297,473,4 114,264,2 252,464,5 304,450,3 5,85,3 61,256,2 285,1013,5 199,221,5 209,39,3 223,28,3 302,784,3 121,386,5 193,273,2 290,1041,4 233,1183,2 118,391,4 166,485,4 298,143,4 290,117,2 307,0,4 94,545,2 37,94,5 101,767,2 62,276,4 159,233,5 49,245,3 300,97,4 224,192,4 289,87,4 96,193,3 156,273,4 180,1080,1 277,602,5 275,795,1 6,31,4 9,15,4 283,303,4 200,978,2 275,563,3 286,326,5 245,200,5 241,1136,5 248,240,5 98,3,5 177,331,3 250,99,4 80,431,2 259,321,4 24,180,5 58,195,5 71,678,2 86,383,4 289,142,5 41,422,5 291,514,4 114,19,3 19,287,1 200,218,4 12,525,3 245,918,4 137,25,5 166,231,1 59,426,5 56,303,5 222,273,4 188,511,4 242,14,3 91,1048,1 245,415,3 193,164,4 240,689,2 177,247,4 253,1443,3 292,4,3 126,228,5 224,236,5 298,228,3 224,479,5 275,53,3 290,143,5 221,365,4 266,517,5 41,402,3 10,110,4 94,624,4 7,337,4 161,24,4 86,1015,4 278,153,5 144,274,2 118,1152,5 61,497,4 61,381,3 27,208,4 134,22,4 31,293,3 89,381,5 285,207,4 292,684,3 215,143,4 165,327,5 249,495,4 270,131,5 159,173,5 264,117,4 197,497,3 41,95,5 167,150,5 109,306,4 57,143,4 89,647,4 270,345,4 61,20,3 278,831,3 236,513,4 93,788,4 127,484,3 297,316,4 43,194,5 263,199,5 193,384,2 71,194,5 221,749,5 249,263,3 40,264,3 223,244,3 81,134,3 261,1146,4 292,470,3 215,657,3 249,139,3 58,22,5 285,378,5 243,814,4 6,478,4 173,367,1 86,273,4 193,1210,2 81,1133,2 12,835,2 12,271,4 243,755,2 304,426,5 94,786,2 42,13,2 298,954,4 56,418,3 83,404,3 268,503,4 298,110,3 193,465,4 159,134,4 98,267,3 9,485,4 258,116,4 84,426,3 302,918,4 212,272,5 120,513,3 89,97,5 48,558,2 41,793,3 154,322,2 67,116,4 171,176,4 18,3,4 267,230,4 4,1,3 304,116,2 43,293,4 42,136,4 278,1335,1 79,465,5 253,163,4 297,280,3 278,1239,1 65,297,4 17,442,3 267,1034,2 98,78,4 12,97,4 25,257,3 6,454,4 221,754,4 199,672,5 118,327,4 212,171,5 275,321,3 93,1216,3 129,378,4 37,327,4 159,718,3 292,1266,3 25,929,2 129,215,4 91,1078,3 255,451,4 0,60,4 71,47,4 55,754,3 12,359,4 14,404,2 91,76,3 206,475,2 291,173,5 231,482,5 250,747,2 223,25,3 180,219,4 258,254,4 304,470,4 51,279,3 160,201,5 147,407,5 124,234,2 96,227,5 57,1097,4 82,233,4 89,346,4 271,177,5 193,180,3 124,477,4 109,687,1 298,13,4 150,9,5 268,126,4 5,13,5 53,105,3 302,68,5 15,943,1 300,789,4 275,1090,3 304,213,2 193,1027,2 90,322,2 86,553,4 293,108,4 285,170,4 199,317,5 228,327,1 177,567,4 302,841,2 61,64,4 206,590,3 91,171,4 300,400,4 35,338,5 69,745,3 62,241,3 27,200,3 278,67,4 249,6,4 13,97,3 298,1017,3 193,53,3 302,814,3 118,236,5 294,217,5 267,929,2 267,1,2 65,257,4 232,201,5 82,622,4 213,333,3 191,475,2 99,343,4 267,144,1 300,55,4 306,88,5 233,140,3 82,575,4 180,263,2 296,132,4 37,152,5 6,381,4 263,812,4 180,871,1 200,145,1 84,506,4 268,366,3 58,467,3 285,142,4 192,95,1 112,594,5 291,10,5 129,1013,3 274,97,4 188,519,5 218,81,1 217,208,5 122,426,3 118,221,5 157,176,4 221,117,4 301,321,2 278,500,3 300,78,5 180,2,2 200,694,1 12,197,3 0,188,3 144,236,5 22,384,4 200,766,4 295,704,5 41,545,3 32,871,3 300,553,3 15,63,5 94,134,3 153,356,4 76,483,5 295,507,5 301,302,2 243,672,3 221,76,4 12,214,5 15,704,5 269,451,4 144,14,2 186,63,5 199,303,5 169,748,5 100,828,3 183,217,3 127,203,4 180,1294,1 183,152,3 0,32,4 0,159,4 183,320,5 53,594,3 93,342,4 127,507,4 22,322,2 300,226,3 300,190,3 111,902,1 81,182,3 221,723,3 217,429,3 307,1196,4 302,133,5 132,750,3 214,211,2 68,255,5 253,661,4 275,1,4 103,983,1 62,1066,3 266,409,4 12,55,5 239,878,3 285,236,2 293,270,5 89,1085,4 17,25,4 91,228,3 307,648,4 143,88,3 190,301,4 58,950,3 199,95,5 15,196,5 60,677,3 270,198,4 270,708,3 141,168,5 274,596,3 221,150,3 86,39,3 206,257,4 271,1392,2 176,332,4 206,1114,2 298,576,3 270,377,4 304,424,4 48,958,2 93,1223,3 129,1016,3 9,174,3 202,320,3 190,285,4 42,322,3 20,557,5 196,95,5 12,343,2 193,65,3 233,205,4 307,401,4 307,639,4 268,521,5 93,264,4 267,61,3 271,11,5 120,290,3 295,19,5 133,285,3 179,461,5 233,611,3 103,116,2 37,757,1 268,844,1 6,162,4 233,1450,3 274,404,2 51,249,3 101,822,3 12,185,4 177,730,4 235,70,3 255,780,5 262,175,5 243,185,3 278,1180,4 42,814,4 82,77,2 150,196,5 253,435,2 108,630,3 296,715,3 248,187,4 143,698,4 300,603,4 63,391,3 91,500,2 221,96,4 267,435,3 292,134,5 212,172,5 159,459,2 12,497,4 58,714,5 4,16,4 124,162,5 173,314,5 113,504,3 212,514,4 22,195,2 127,14,4 238,55,4 180,278,1 290,79,4 249,237,4 200,648,3 59,59,5 180,324,2 118,406,3 286,0,5 215,227,3 215,530,4 202,470,4 91,586,3 12,891,3 212,175,4 285,287,5 116,1046,2 98,110,1 10,557,3 64,46,2 294,193,4 268,216,2 84,258,2 249,595,5 136,143,5 200,959,2 256,136,4 110,327,4 90,479,4 214,210,4 180,937,1 188,1059,5 0,19,4 302,403,4 298,304,3 186,209,4 221,277,2 213,567,4 292,769,3 284,190,4 302,251,3 95,155,4 71,1109,3 114,1066,4 6,429,3 115,349,3 72,479,4 268,245,5 262,418,5 69,430,3 220,474,4 71,181,5 24,356,4 289,49,5 188,525,4 298,302,3 263,293,3 199,364,5 186,134,4 183,186,4 62,288,2 12,228,4 297,485,3 234,184,4 61,711,4 245,93,2 53,741,5 62,761,3 10,731,3 91,167,4 7,549,3 306,173,4 302,199,4 255,848,2 71,53,3 163,405,2 116,149,4 223,76,4 192,868,3 93,183,2 280,337,2 129,108,3 127,370,1 93,719,1 181,844,3 128,872,1 253,228,4 63,380,4 150,175,2 44,24,4 192,878,3 275,921,4 275,56,3 233,186,4 180,305,1 20,369,1 292,248,3 263,720,5 9,610,5 196,345,3 275,141,3 307,426,4 220,942,4 130,125,4 267,823,2 108,7,3 197,57,3 229,679,4 180,740,1 191,1060,4 233,447,3 89,899,4 192,940,4 127,602,5 125,904,2 243,264,4 89,288,3 156,24,3 304,70,3 118,381,5 20,221,2 230,180,4 279,507,3 287,131,3 278,1496,2 300,32,4 71,698,3 89,258,2 307,54,3 58,741,3 93,743,4 129,641,4 25,1014,3 55,120,5 81,507,2 61,11,4 275,39,3 180,1014,1 151,300,3 177,844,4 216,596,4 78,302,4 137,483,4 307,80,5 74,283,2 268,197,4 306,93,3 221,780,3 120,739,3 268,21,1 12,863,4 229,741,5 268,506,4 238,1098,5 244,1027,5 55,545,3 294,960,5 270,1027,2 221,811,2 68,239,3 9,6,4 21,375,3 293,930,3 81,716,1 278,398,4 268,233,1 5,97,5 242,1038,4 297,180,4 281,324,1 77,322,1 117,199,5 282,1113,5 170,291,4 69,216,4 9,99,5 244,180,4 106,332,3 245,560,1 12,900,1 275,69,4 243,16,2 188,55,5 225,241,5 61,1015,4 275,416,4 213,477,4 305,234,4 221,25,3 279,630,5 59,429,5 55,70,4 41,273,5 0,201,5 12,808,4 172,288,4 14,748,1 184,22,4 279,539,3 243,380,4 149,292,4 6,496,4 177,316,4 177,741,3 94,1216,3 233,1461,3 96,221,5 108,126,2 116,267,5 268,704,2 129,1245,3 263,654,4 206,12,3 41,587,5 245,408,2 86,366,4 100,303,3 255,126,4 91,793,3 180,761,2 212,234,1 91,738,2 291,660,5 245,664,4 273,844,5 187,691,5 17,85,4 4,438,1 235,631,3 192,406,4 143,708,4 89,1197,5 47,608,4 4,224,2 21,127,5 310,431,4 7,21,5 275,187,4 221,172,5 71,865,4 298,133,4 0,170,5 307,294,3 164,215,4 221,48,3 180,120,4 199,10,5 233,625,4 243,706,4 89,24,5 207,215,5 262,95,4 133,322,4 278,585,4 1,291,4 287,592,2 48,301,4 285,152,5 204,303,3 21,79,4 233,317,4 222,327,3 14,24,3 267,146,4 93,1219,3 273,404,4 6,491,5 267,216,2 15,54,5 163,619,3 289,160,4 91,514,4 238,1069,5 55,448,5 247,233,4 233,9,3 279,1048,2 307,186,5 275,63,5 191,947,3 121,508,4 84,587,3 261,930,2 200,271,3 180,869,2 294,738,4 262,567,4 294,38,4 200,1099,4 92,819,3 158,1027,5 157,664,2 292,422,3 81,596,3 275,180,5 12,822,5 216,1,3 82,659,4 188,19,5 221,795,4 145,1021,5 266,120,3 125,293,3 180,1059,1 124,79,4 42,119,4 12,779,1 252,258,2 41,43,3 76,517,4 290,685,5 267,20,3 261,27,3 233,80,3 28,244,3 235,56,5 157,728,3 155,660,4 231,51,5 167,865,5 36,287,4 140,244,3 234,229,4 101,69,3 76,171,3 89,505,5 185,565,5 43,659,5 117,773,5 6,660,5 48,1002,2 61,67,1 41,1027,4 177,432,4 84,50,2 76,473,5 57,1098,2 55,1046,4 196,687,1 285,98,4 89,257,3 180,1287,1 294,189,4 223,68,4 271,316,4 220,1009,3 65,876,1 206,317,5 233,486,3 6,647,5 86,81,5 194,1051,1 43,448,5 305,286,4 193,171,3 93,61,3 166,658,4 107,99,4 229,303,5 180,926,1 53,301,4 89,21,4 180,695,2 285,356,4 13,268,4 310,178,2 91,120,5 20,439,1 243,549,1 180,404,4 64,805,4 36,539,2 43,442,5 243,182,4 0,264,4 269,24,5 298,386,2 93,571,3 285,745,4 238,271,5 215,54,5 253,120,3 61,664,2 177,384,4 193,22,4 267,954,3 187,142,5 275,293,4 157,1097,4 206,844,3 160,47,1 304,653,4 46,323,3 63,735,4 190,750,3 6,377,5 58,91,5 68,267,5 9,460,3 20,128,4 57,8,4 193,151,3 6,199,5 112,125,5 172,327,5 94,232,4 15,193,5 58,322,4 310,653,3 291,588,4 42,202,4 78,49,4 234,69,5 124,189,5 283,321,3 302,160,5 253,377,3 254,1033,1 103,300,2 89,922,5 5,462,4 278,121,1 285,297,4 221,447,3 296,56,5 41,624,3 129,1216,4 253,356,3 108,474,1 229,1443,2 243,309,3 5,300,2 35,747,4 255,442,3 101,514,1 103,284,4 20,446,5 110,300,4 17,407,5 24,221,4 109,943,3 269,97,5 67,236,5 82,214,4 5,257,2 88,215,5 127,316,4 304,511,4 183,411,2 285,174,5 278,1427,3 255,85,5 220,47,5 139,331,3 189,976,2 10,226,3 200,202,5 149,180,5 125,244,3 19,207,2 143,741,4 180,929,1 108,565,4 84,1064,3 212,132,3 221,378,1 222,10,3 214,420,4 217,207,3 173,936,5 274,185,3 67,741,1 267,582,4 159,461,4 194,272,4 223,177,4 4,109,1 98,1015,5 1,250,5 291,8,4 71,567,4 84,227,3 82,280,5 91,830,2 6,542,3 86,400,2 286,925,4 0,154,2 233,631,2 221,52,5 23,63,5 6,553,3 81,55,3 160,317,3 195,392,4 55,90,4 81,476,3 6,471,2 255,760,4 225,55,4 278,740,5 307,1285,3 15,7,5 179,201,3 202,92,4 144,55,5 287,304,4 83,741,3 43,643,3 16,12,3 312,116,4 147,0,4 196,346,4 20,163,5 278,981,3 238,490,5 184,286,5 296,88,4 302,67,4 185,249,1 72,205,3 103,755,2 93,215,3 238,193,5 196,510,5 279,0,4 0,116,3 223,582,1 302,396,1 59,161,4 197,257,4 238,512,5 5,68,3 232,374,4 84,641,4 109,37,3 183,521,3 98,872,1 12,417,2 200,517,4 12,857,1 213,130,3 295,227,4 221,86,3 278,724,4 216,181,2 84,432,3 238,233,3 12,71,4 193,76,3 207,662,5 108,177,3 229,171,4 58,484,2 312,477,3 69,1132,3 61,181,5 197,233,3 64,124,4 173,659,5 89,11,5 129,1247,3 99,353,2 282,431,5 274,417,3 310,97,5 194,750,4 129,104,4 268,251,1 285,72,5 6,622,3 55,221,5 209,203,5 238,8,5 95,86,4 296,72,2 248,238,3 93,859,2 83,120,4 274,264,4 134,1045,3 290,1177,4 124,381,1 69,398,4 310,8,4 300,522,4 151,684,5 243,171,4 274,1090,2 52,280,4 197,117,2 243,789,4 25,124,4 150,12,3 123,495,1 23,190,5 270,64,3 306,633,3 293,1244,3 233,240,2 24,500,3 292,136,3 200,431,3 74,239,1 12,180,5 206,67,2 1,49,5 312,565,4 143,124,4 187,442,4 275,323,4 144,973,1 71,233,4 82,384,4 180,618,3 108,401,4 206,106,3 184,215,4 13,212,5 148,318,2 56,78,5 229,962,5 175,874,4 252,96,4 283,268,4 105,525,4 120,179,3 61,85,2 290,417,4 83,1032,4 292,379,2 206,57,3 193,186,4 108,96,3 282,844,4 296,274,5 180,333,1 77,254,4 10,424,4 307,58,4 192,1077,4 296,233,3 86,584,4 249,203,2 7,49,5 185,147,4 311,691,4 90,682,3 4,453,1 290,375,3 174,126,5 144,736,2 6,643,5 275,418,5 82,209,5 101,523,3 152,173,1 61,301,3 48,994,3 267,297,3 206,553,2 312,615,5 285,43,3 278,167,5 275,473,5 61,58,4 253,218,1 82,96,4 62,99,5 15,177,5 296,232,2 89,944,5 84,24,2 41,97,4 302,392,4 273,49,5 103,298,3 93,791,4 183,97,4 292,707,3 247,588,4 17,949,3 216,26,1 199,891,4 200,147,1 295,221,5 6,661,3 195,380,4 68,426,3 71,195,4 255,471,4 127,181,4 150,746,3 6,170,3 285,84,5 171,219,4 307,515,4 189,973,2 81,755,1 307,435,4 58,234,1 63,1062,3 144,755,2 219,297,4 20,323,4 284,268,4 206,64,3 197,657,3 219,332,3 209,69,4 180,13,1 157,127,2 142,681,3 74,236,2 198,220,4 222,1149,2 296,24,4 275,77,4 298,846,4 292,324,2 300,137,2 0,46,4 163,280,4 95,672,4 290,1015,4 6,450,5 232,176,4 5,516,4 201,282,3 213,116,4 183,601,4 276,256,3 193,211,1 94,67,4 24,256,4 5,22,4 37,572,1 312,435,4 21,240,3 261,616,3 129,568,3 65,180,5 20,947,1 180,1331,1 261,173,3 205,301,5 221,21,5 75,60,4 150,702,4 313,27,5 12,146,3 43,257,4 302,417,4 15,88,2 269,557,5 247,116,5 124,317,5 137,522,5 267,385,2 290,14,5 233,146,3 238,95,5 14,330,3 93,154,2 135,88,4 222,422,3 81,193,4 144,354,3 279,844,3 178,338,1 177,198,4 306,948,4 9,487,5 115,330,3 22,257,5 307,173,4 184,113,4 187,236,3 117,653,5 245,720,4 233,97,4 193,238,3 93,23,4 121,377,4 311,99,4 261,63,5 153,241,3 222,762,3 98,402,4 82,42,4 129,306,4 173,401,5 255,486,5 58,176,4 160,167,1 243,52,3 249,195,4 42,39,3 284,149,5 41,952,2 96,669,5 121,509,4 60,322,3 221,105,2 3,263,3 303,258,1 36,402,5 48,67,1 302,1097,4 164,371,5 175,323,5 2,334,1 55,868,3 43,14,4 189,116,4 28,188,4 93,173,4 129,948,3 116,180,5 302,778,1 18,434,5 193,190,4 157,23,4 55,446,4 261,222,3 180,1333,1 213,136,4 91,746,4 187,95,5 57,172,5 243,153,5 133,878,4 297,624,4 253,229,4 229,137,3 15,208,5 150,834,5 180,1326,1 144,1247,3 199,587,5 247,256,3 296,431,4 311,132,5 150,11,5 109,567,3 304,482,5 140,257,5 43,239,4 185,262,3 213,212,4 232,207,4 103,286,2 311,152,2 0,221,4 205,322,1 229,418,4 55,449,3 93,650,5 204,315,4 13,173,5 267,789,2 275,1080,3 82,928,3 267,579,3 221,1040,3 278,88,4 4,423,1 111,330,4 295,428,5 17,201,3 12,867,5 86,209,5 9,284,5 180,327,3 22,462,4 252,745,3 233,227,3 298,1046,2 65,0,3 215,173,5 289,207,3 78,1160,2 263,447,2 3,302,5 143,830,3 137,516,4 63,432,2 4,0,4 275,356,5 61,432,5 238,474,5 292,165,3 129,233,5 263,69,4 207,196,5 23,762,5 278,1161,3 2,244,1 100,595,3 161,1018,4 222,907,1 98,245,3 238,429,3 159,159,5 171,579,4 302,1159,2 53,675,5 43,506,3 209,96,5 163,929,4 298,239,2 27,216,3 304,78,3 17,728,3 81,342,1 108,1011,4 206,24,4 91,1208,1 108,0,4 14,221,3 57,708,5 302,692,4 151,110,5 193,159,2 91,240,3 76,90,3 243,661,3 176,320,2 130,220,3 196,301,3 226,49,4 84,281,3 294,71,4 180,0,3 276,254,4 278,95,4 0,252,5 17,181,4 275,567,4 86,176,5 176,68,1 212,12,4 124,133,5 127,738,4 290,427,5 24,207,4 287,271,5 206,1349,2 270,55,3 4,362,3 273,747,5 69,418,5 310,558,2 150,918,5 198,267,5 200,208,3 98,273,1 10,739,4 58,76,4 183,276,3 221,87,4 37,160,5 58,417,2 103,299,3 297,1345,3 179,1118,3 6,673,2 120,13,5 267,1040,1 251,276,4 302,410,4 209,526,5 233,647,3 311,572,5 307,214,3 233,1396,4 74,545,3 116,14,5 245,238,3 63,515,5 84,186,5 238,80,3 58,53,4 255,219,3 215,195,5 202,281,1 12,194,3 143,152,5 99,267,3 209,273,5 93,470,4 12,806,1 124,656,3 64,1141,4 0,112,5 75,174,4 293,507,4 262,1450,4 293,929,3 120,116,1 84,12,3 302,425,3 211,179,1 5,491,5 180,239,1 278,745,5 302,1108,4 183,190,4 309,115,5 312,21,3 313,1149,4 12,120,5 42,4,4 57,213,2 214,163,3 61,287,2 279,126,5 160,897,3 10,722,5 93,217,3 34,242,2 310,565,4 47,679,3 84,603,4 287,526,3 183,513,5 150,928,3 89,689,4 10,37,3 103,1015,1 105,581,4 180,1009,1 36,116,4 275,844,4 21,257,5 69,81,4 4,97,3 307,94,4 59,207,5 269,777,5 242,207,4 91,539,2 80,279,4 292,411,1 199,477,5 12,307,3 55,183,4 115,249,4 294,171,4 62,1006,5 294,234,4 103,1009,1 155,640,5 268,1164,1 159,429,5 236,190,4 286,251,1 289,131,3 44,108,5 223,677,3 144,763,2 276,1010,3 64,99,3 271,1100,5 115,254,3 183,85,5 284,150,5 221,147,2 71,27,4 270,186,5 93,210,5 245,424,5 114,7,5 175,326,3 12,395,3 128,330,2 256,1259,2 94,0,5 146,903,5 150,57,4 183,659,3 310,385,3 104,267,4 157,509,3 33,311,4 71,426,5 262,415,5 93,1047,4 199,290,3 44,117,4 278,143,4 144,21,5 70,88,5 181,68,5 192,626,4 213,301,4 150,484,5 101,321,3 233,570,2 248,929,2 194,327,4 108,257,5 221,551,2 281,287,4 116,757,2 22,380,4 111,326,1 302,144,1 251,299,4 150,371,5 281,326,5 303,236,5 289,567,3 63,159,4 27,78,4 167,1277,3 264,470,4 17,112,5 82,81,5 89,498,5 233,1185,4 86,195,5 25,684,3 149,128,4 160,97,4 69,209,4 50,181,3 221,1056,4 91,175,5 203,215,4 163,684,5 56,681,3 183,206,4 59,402,3 91,179,5 42,203,4 221,1041,4 196,299,4 91,789,3 293,281,3 200,746,2 200,214,2 192,409,3 270,704,4 213,692,3 72,656,5 89,186,4 314,272,3 47,308,3 254,471,1 269,670,4 65,6,3 5,477,4 100,221,3 206,1045,4 143,181,3 84,82,4 101,624,3 157,769,5 296,587,4 89,506,5 270,481,5 129,900,1 177,275,3 89,244,3 180,1093,1 310,142,3 266,16,4 200,50,2 193,646,4 58,386,3 0,226,4 115,750,3 169,291,5 109,577,3 59,1020,5 286,346,4 196,54,3 37,678,5 194,1013,4 278,226,4 83,747,4 30,885,2 315,97,5 24,24,5 167,273,4 102,23,4 298,587,4 193,477,3 286,293,5 233,581,4 278,1047,1 86,8,4 180,407,1 278,1150,2 48,46,5 295,854,5 43,94,4 91,215,3 134,38,3 12,65,3 261,385,3 6,675,3 115,941,3 317,473,4 140,825,2 268,12,4 221,1043,4 81,454,4 278,253,3 41,684,4 144,1244,5 183,160,2 48,624,3 176,242,1 312,98,4 31,289,3 307,847,4 144,447,5 129,541,3 129,805,3 164,287,2 248,254,3 48,580,3 194,299,3 117,474,5 129,315,4 103,292,3 200,1228,3 141,81,4 118,717,5 302,93,3 98,49,5 305,13,5 91,708,2 226,294,5 2,336,1 93,819,1 58,1106,4 29,538,3 261,820,3 5,507,3 310,715,4 267,363,3 261,552,4 213,274,3 15,55,5 261,292,2 292,131,4 61,131,5 93,345,4 12,58,4 239,312,5 101,160,2 82,300,2 290,6,5 311,27,4 30,483,5 290,69,4 55,171,5 108,587,4 109,1245,2 58,428,4 245,1217,3 64,195,5 23,366,2 91,114,3 307,740,4 300,659,4 213,1128,4 157,240,4 268,673,2 307,492,3 31,150,3 223,190,4 214,422,5 31,1011,4 153,288,2 200,508,3 84,297,4 179,67,5 183,35,3 187,217,5 304,10,1 143,507,4 72,93,1 193,204,3 176,202,4 275,272,4 197,6,4 107,289,4 188,196,5 72,55,4 171,461,3 119,545,2 100,470,3 4,101,3 25,234,2 267,1248,2 275,772,3 12,149,5 6,400,4 127,481,4 103,6,3 292,38,3 255,24,5 89,820,3 274,68,3 21,509,5 311,493,5 206,191,3 263,503,5 136,686,4 184,739,4 306,686,1 41,175,3 144,471,3 188,633,3 261,120,3 250,147,2 258,771,4 238,57,5 311,920,5 91,14,3 80,741,2 310,418,3 101,447,3 248,745,5 94,526,4 18,654,3 78,99,5 188,750,4 252,509,5 200,918,3 0,16,3 213,41,5 6,80,5 233,131,4 58,147,3 12,353,2 5,468,5 81,13,4 108,626,5 304,49,5 194,153,3 276,278,4 222,7,2 91,80,3 200,68,2 93,57,5 216,143,4 243,147,2 312,199,3 180,873,1 115,1215,3 302,432,4 116,150,4 220,326,4 45,306,3 90,27,4 150,316,5 63,175,4 89,552,2 115,270,4 290,1138,3 61,110,3 195,250,3 302,119,2 48,546,5 306,1021,4 302,175,5 285,153,4 290,500,4 234,86,4 253,378,1 275,156,5 134,1207,3 56,242,3 275,1156,2 6,575,5 249,403,4 317,767,2 233,807,2 288,281,3 86,1078,2 49,822,3 24,257,5 17,495,5 192,789,3 262,509,4 208,905,2 206,715,3 313,534,4 249,337,4 261,567,3 94,171,4 93,469,4 58,582,5 276,281,4 302,1285,4 270,713,3 268,234,3 147,139,1 222,976,2 209,356,5 184,198,4 173,79,1 234,479,4 275,938,3 98,353,2 307,162,4 302,737,2 223,872,2 297,251,4 43,207,4 314,12,4 214,196,4 268,8,4 41,194,5 292,78,3 245,67,5 100,404,4 91,664,3 248,87,4 59,524,5 12,330,3 270,749,4 91,730,4 253,187,3 310,202,5 262,196,4 200,659,3 278,78,3 137,495,4 208,250,5 216,6,4 260,339,5 175,257,4 302,1036,3 80,168,4 61,113,4 71,529,4 275,363,3 87,749,2 48,6,4 262,116,3 8,297,5 91,527,4 248,707,4 261,753,3 195,654,5 206,1435,3 255,770,2 275,225,4 133,312,5 310,848,3 180,1382,1 202,147,3 246,735,5 312,744,3 310,82,5 250,1013,5 226,410,4 58,549,5 200,205,2 57,99,5 248,722,4 285,1315,5 10,724,3 6,227,4 91,845,3 159,55,5 102,126,4 10,109,3 86,1,4 44,762,2 292,604,3 290,731,4 253,574,3 48,333,4 221,1283,4 160,161,2 267,0,3 58,214,5 176,208,4 150,1297,4 298,234,1 28,331,4 29,434,5 296,181,3 314,184,4 22,171,4 261,46,2 320,495,4 190,753,3 105,777,4 6,150,4 177,677,3 83,11,5 93,167,5 263,32,3 238,528,5 89,656,5 260,874,5 189,301,5 111,288,5 143,105,3 198,257,4 223,19,1 84,500,3 300,201,5 144,742,1 293,126,5 129,205,3 102,120,3 151,411,2 266,839,4 285,230,3 199,23,2 4,210,4 159,116,4 5,356,4 157,71,3 296,735,4 249,243,4 56,759,2 57,267,5 22,1005,3 300,1227,4 306,264,3 275,1094,1 222,410,1 91,23,3 136,299,5 163,116,5 275,37,3 212,293,3 285,33,5 231,196,4 149,220,4 20,102,1 129,730,3 221,440,2 0,89,4 188,1004,4 48,37,1 310,4,3 35,306,4 127,227,3 150,88,5 247,474,5 94,1228,2 212,608,4 202,180,5 307,862,3 268,46,4 197,99,1 296,306,4 304,188,5 265,675,3 196,228,3 73,271,5 126,293,4 193,3,4 176,55,5 44,472,3 56,27,4 238,186,5 267,93,2 237,251,3 200,1009,3 130,1280,4 269,96,4 158,126,5 229,201,4 91,218,4 317,355,4 122,530,3 266,402,4 231,629,3 4,381,5 15,154,3 179,761,4 177,281,3 318,312,5 179,736,3 269,735,5 268,657,2 292,495,5 268,792,4 53,684,3 20,97,5 302,208,5 12,765,4 313,94,5 150,386,5 229,377,5 200,402,3 94,1205,4 269,369,5 255,715,5 79,581,3 302,434,5 311,120,3 150,1005,1 61,257,5 188,1114,4 76,194,5 98,741,5 290,1027,3 292,747,2 180,1341,1 205,899,1 82,337,4 261,178,4 252,215,4 222,595,3 107,49,4 93,346,5 292,778,1 100,280,2 266,979,3 200,1244,4 313,1262,2 270,110,4 313,275,1 17,386,4 206,3,4 312,95,5 20,298,1 214,143,4 278,1375,4 233,1014,2 295,247,5 269,82,4 209,160,5 200,78,4 4,375,2 183,180,4 103,410,1 274,448,3 184,268,5 275,549,4 278,1181,3 215,68,5 20,456,1 15,470,3 146,291,5 290,249,4 27,94,3 28,538,2 290,470,4 6,579,3 180,15,1 296,217,3 307,558,4 86,210,5 96,88,5 20,595,3 58,709,3 237,755,3 177,208,4 185,469,5 298,614,4 9,503,5 109,681,4 108,100,1 156,249,1 266,385,3 180,326,3 206,86,4 46,994,3 147,113,5 93,8,5 59,221,4 243,408,4 275,245,4 89,905,2 233,19,4 105,106,4 215,696,4 293,1198,2 322,256,2 139,267,4 219,302,4 66,63,5 169,298,3 229,141,4 298,640,4 6,580,5 274,500,3 43,249,5 290,213,4 10,740,5 58,285,3 173,394,1 193,233,3 56,203,4 313,416,4 200,196,4 183,154,3 193,791,4 158,1036,2 185,982,3 180,978,2 67,6,3 285,720,3 315,305,4 279,780,4 12,13,4 210,126,4 186,214,3 70,133,3 305,241,5 63,683,4 302,276,3 197,134,5 231,90,5 97,46,4 52,23,3 298,970,2 253,1115,3 6,105,4 11,299,4 238,9,5 237,110,4 129,266,5 89,661,5 62,19,3 39,267,4 180,220,1 297,151,3 103,326,2 41,184,4 180,994,1 257,287,1 290,577,4 147,69,5 304,186,4 183,70,4 93,555,3 157,1010,4 6,527,5 173,236,4 157,189,5 200,852,4 275,42,1 277,310,4 228,346,1 100,251,3 62,1027,3 274,519,4 274,172,3 61,1072,4 229,233,4 108,974,3 72,356,5 82,117,3 3,360,5 129,244,1 63,777,5 14,472,1 243,88,5 6,642,4 218,346,1 294,703,5 292,287,3 124,996,2 278,486,3 75,581,3 271,47,4 268,284,5 243,379,4 270,219,3 320,286,3 305,863,3 223,331,3 56,1046,4 144,590,4 84,276,2 115,6,2 51,94,4 208,687,1 144,259,4 207,201,4 159,186,5 140,273,5 259,989,5 176,298,4 81,230,2 222,968,5 106,270,2 25,24,3 296,1015,3 243,166,3 14,677,1 285,708,4 81,410,3 166,363,3 98,180,5 55,195,2 292,345,3 6,649,3 89,424,4 227,474,3 81,918,3 42,150,4 9,288,4 196,514,5 56,755,3 245,81,2 61,23,4 322,222,4 12,319,1 267,62,1 17,862,3 270,409,2 306,508,3 53,297,4 294,46,5 193,236,3 193,81,2 310,384,5 286,256,4 289,81,4 261,95,4 278,490,5 289,392,3 144,392,5 304,60,4 268,155,5 275,179,5 322,297,4 295,257,5 17,964,4 71,527,4 223,948,3 124,238,5 243,651,5 134,430,2 137,210,4 58,603,3 220,1058,4 12,450,1 41,68,4 9,339,4 218,881,3 59,603,4 124,151,1 62,49,4 254,447,3 310,171,5 6,581,5 6,126,5 188,202,3 58,469,3 312,147,2 233,160,3 5,142,2 304,959,1 225,146,3 203,339,5 12,492,5 185,280,4 5,274,4 268,81,2 68,299,3 258,958,4 4,61,4 180,1163,3 134,448,3 221,1206,2 4,230,2 285,257,4 103,248,3 302,64,4 294,72,4 200,685,2 12,288,2 183,99,5 261,785,3 233,613,3 0,63,5 324,484,3 311,640,5 206,809,2 261,508,3 238,477,5 141,180,5 295,241,4 290,570,2 12,487,3 293,675,3 68,173,5 194,264,4 120,508,5 278,508,3 48,16,2 6,195,5 279,471,2 220,779,3 174,95,3 179,430,4 310,1221,3 43,119,4 317,256,5 58,587,2 319,116,4 255,938,5 309,23,4 235,264,2 82,138,3 279,127,3 42,51,4 17,493,3 302,86,3 90,426,4 317,630,4 274,257,3 96,481,5 173,159,5 267,469,3 187,768,2 93,88,3 6,43,5 157,84,4 255,764,4 220,68,4 195,66,5 231,174,5 158,684,4 98,181,4 174,70,4 253,623,2 325,21,4 302,290,3 269,52,4 180,1000,1 253,417,3 55,234,1 10,189,3 161,180,4 116,828,3 267,51,3 319,176,5 5,293,2 209,379,4 150,968,5 41,683,4 61,364,2 206,120,3 58,69,3 25,454,3 233,704,5 269,465,5 96,483,3 10,659,3 4,376,1 55,796,4 304,922,5 172,285,5 66,1094,4 212,11,5 267,683,3 35,882,5 99,320,1 268,728,2 130,99,5 307,297,5 13,708,5 283,304,4 190,751,3 221,28,3 200,420,2 206,863,3 302,1314,3 51,1085,4 304,528,5 222,317,4 21,78,4 136,545,5 291,327,3 248,10,5 268,615,4 196,293,4 41,602,4 25,1015,3 6,559,3 192,434,4 6,558,5 298,185,3 114,126,5 58,432,5 216,21,5 278,708,4 256,344,4 278,788,4 278,918,3 62,221,3 177,72,5 89,1193,4 110,312,4 12,847,5 93,624,4 58,495,4 178,904,4 302,301,4 298,515,4 9,504,4 61,463,4 55,68,4 91,288,3 307,377,3 12,143,4 180,1347,1 14,931,1 243,154,3 233,232,2 14,126,2 109,1178,2 180,301,2 235,312,4 309,535,4 36,54,3 233,616,3 302,368,1 74,408,3 196,517,1 313,691,5 186,522,3 150,401,3 267,263,3 223,214,4 291,194,5 15,190,5 98,596,4 233,481,4 302,322,1 232,98,3 65,248,4 279,203,3 300,173,5 91,1141,4 98,409,5 220,1249,2 96,97,4 312,672,4 57,108,4 269,780,5 12,475,2 188,0,5 66,146,3 233,49,4 39,879,3 293,221,4 292,628,3 6,240,4 86,774,2 313,1288,2 130,749,5 295,47,5 80,2,4 150,185,4 56,925,3 233,133,5 52,173,5 279,543,4 122,134,5 108,796,3 95,478,4 235,285,5 200,312,5 173,470,5 129,930,2 150,14,4 89,528,5 58,11,5 2,342,3 309,844,5 223,657,1 3,356,4 24,614,5 10,516,2 297,90,2 58,169,4 146,304,4 313,1517,4 255,412,4 233,617,3 245,7,3 254,677,2 91,105,3 271,126,5 103,268,5 275,405,2 275,33,2 96,49,5 149,120,2 13,529,5 22,169,4 12,96,4 164,324,4 243,6,4 94,415,4 27,97,5 258,268,3 81,595,3 27,172,3 93,454,3 275,383,3 297,7,5 150,209,4 76,237,5 199,240,4 200,404,4 192,331,3 37,138,2 290,225,5 112,325,5 312,190,5 206,530,4 213,150,5 43,122,4 17,153,4 296,627,4 278,115,1 6,27,5 114,91,4 307,580,4 61,137,1 80,823,3 292,1160,2 12,780,3 12,337,1 40,27,4 279,553,1 286,248,5 116,49,5 177,105,2 200,116,2 255,1056,2 220,203,4 317,658,4 261,10,4 153,487,4 185,384,4 302,1094,2 301,322,2 197,178,4 98,167,5 228,312,2 125,261,4 71,225,4 108,30,4 33,241,5 172,322,5 155,275,3 121,214,4 275,582,3 223,527,3 207,87,5 294,482,5 278,64,1 42,63,5 88,196,5 307,434,4 314,304,5 41,1040,4 163,298,4 6,152,5 92,411,2 124,1179,3 69,49,4 176,959,3 74,475,1 61,400,3 129,365,5 311,227,3 157,413,4 278,41,4 209,57,4 42,65,4 150,489,5 292,664,2 292,35,1 101,404,2 275,290,3 20,838,1 193,662,4 37,431,1 91,452,1 310,179,4 197,213,4 81,660,4 266,237,4 290,465,5 150,691,3 59,46,4 91,78,4 96,114,5 313,1217,4 318,337,2 4,406,3 14,684,4 98,203,4 122,191,5 46,339,5 221,134,5 223,148,1 57,283,4 319,293,4 267,134,4 82,639,2 105,691,3 286,10,5 304,185,4 180,1319,1 48,48,2 5,220,4 84,646,4 127,735,5 278,826,1 270,629,2 302,747,2 248,123,5 279,692,3 206,826,3 59,615,3 20,183,4 285,627,4 144,182,5 310,27,5 24,227,4 75,91,4 245,405,3 200,291,3 234,646,4 285,132,4 47,173,5 143,684,3 4,23,4 84,271,4 285,6,4 63,92,2 150,428,5 190,300,4 286,55,5 95,152,4 124,614,3 149,99,2 92,14,5 83,527,5 317,49,2 12,166,4 212,470,3 177,233,4 127,417,4 194,495,4 12,569,5 275,842,4 53,267,5 304,346,3 13,473,4 17,57,4 262,920,3 288,848,4 193,320,3 10,745,4 297,841,4 55,214,5 12,843,1 37,464,5 307,164,3 213,651,4 101,299,3 6,419,5 60,327,5 306,99,3 20,589,1 310,67,1 94,1229,1 302,181,5 144,12,5 49,252,5 193,529,4 144,0,3 221,156,4 6,187,5 108,99,4 89,630,5 6,77,3 180,1323,1 200,331,2 12,684,5 81,72,4 266,422,3 193,1205,1 268,105,1 98,894,3 234,1148,4 199,664,4 311,187,3 144,49,5 233,70,3 212,47,5 243,215,4 315,587,1 84,174,4 123,49,3 136,236,4 12,566,1 150,161,5 186,115,5 192,553,3 48,740,4 290,53,4 315,291,4 270,513,4 193,403,3 267,720,3 276,1196,4 300,605,3 88,1047,3 252,49,4 101,731,3 310,661,4 200,942,3 245,815,4 171,487,3 279,37,3 42,1056,2 310,660,3 58,286,5 267,82,4 314,650,3 144,298,4 247,173,3 326,190,4 267,671,2 296,285,5 294,150,4 12,876,2 69,583,3 144,459,1 274,175,4 47,258,4 234,418,5 82,412,1 146,257,4 91,520,4 245,727,1 42,283,5 206,202,3 233,484,3 200,586,4 285,688,5 68,11,5 236,493,4 84,132,4 275,84,3 310,365,5 319,398,3 113,174,5 41,120,4 6,679,4 153,301,4 105,659,4 312,70,4 89,525,5 93,185,4 223,42,3 43,229,2 228,314,1 150,479,5 310,504,4 319,201,4 112,328,3 254,858,3 192,826,2 275,788,3 258,749,4 203,171,3 77,411,4 84,97,4 278,392,1 221,322,3 287,126,5 41,605,3 24,728,4 118,212,5 115,184,3 122,12,3 314,656,4 141,242,1 12,479,3 200,325,2 42,630,2 194,386,4 94,173,5 129,331,4 232,481,4 43,529,5 291,85,4 175,293,2 156,404,3 206,786,3 238,203,3 250,143,5 268,922,4 177,147,4 137,120,4 29,81,4 301,244,2 33,689,4 291,275,5 270,10,4 68,174,3 41,455,3 310,567,5 182,240,4 268,410,1 287,195,5 267,41,4 307,633,4 307,165,3 56,830,1 206,409,3 270,210,5 15,143,5 89,602,5 208,407,4 298,237,4 278,1227,4 127,139,4 306,172,5 166,391,1 21,790,1 290,158,4 193,704,2 9,488,4 94,127,3 9,656,4 58,854,4 123,10,5 6,132,5 255,691,5 84,628,3 270,1265,2 275,1415,3 154,987,2 317,475,4 306,257,5 27,6,5 235,728,5 37,671,3 6,92,5 254,216,2 183,728,3 153,174,5 310,402,4 115,300,3 93,228,3 220,507,4 94,635,1 43,55,2 304,202,4 206,507,4 129,160,4 97,162,3 327,8,4 177,217,3 292,292,4 161,741,4 127,78,4 306,1410,4 268,513,4 194,185,3 326,532,4 188,90,3 205,1393,1 94,142,4 30,681,2 93,156,5 72,587,2 255,818,4 290,365,3 221,152,4 206,97,4 221,297,4 285,150,5 115,261,3 6,173,5 147,494,4 310,494,4 177,254,4 180,596,3 122,846,4 290,76,4 236,527,5 139,300,3 289,221,4 176,78,4 64,201,4 310,180,4 124,795,3 76,167,4 57,959,4 116,404,5 247,126,5 4,422,4 253,285,1 288,6,4 240,293,3 212,689,3 98,507,4 274,522,4 167,283,2 27,379,4 143,30,3 197,650,4 180,1092,1 220,267,5 266,738,4 128,302,3 300,495,5 93,32,3 317,63,4 297,476,4 289,475,3 15,941,4 129,814,3 180,303,1 177,124,4 41,505,3 319,283,4 137,150,4 196,848,3 214,156,4 93,1118,4 292,723,3 78,245,5 278,1491,4 188,29,4 232,805,4 197,23,2 221,171,5 275,300,4 69,416,3 304,14,1 200,369,1 56,408,4 12,313,1 205,244,1 124,172,5 127,142,5 91,762,3 64,55,3 235,505,5 261,76,2 89,957,4 143,90,2 62,840,1 322,116,3 196,175,5 276,272,5 175,287,3 37,837,2 98,545,4 325,185,4 58,662,4 58,701,5 25,14,4 6,181,4 111,353,3 108,153,2 120,404,2 292,166,3 296,197,3 275,10,5 221,209,4 286,91,4 61,442,3 105,702,4 275,1217,4 229,209,5 245,183,4 21,510,4 164,257,5 160,173,2 108,88,4 304,86,1 194,180,5 6,192,5 325,479,4 76,124,3 84,57,4 185,587,4 255,279,5 83,528,5 73,287,3 101,431,3 193,769,4 266,113,5 0,91,3 15,503,5 210,299,2 89,30,4 233,656,4 59,1019,4 91,946,4 157,0,4 86,999,3 275,103,1 0,227,5 41,142,4 42,25,5 298,1321,3 129,199,5 306,70,5 146,338,5 310,228,5 295,285,5 216,81,5 79,885,4 313,8,4 63,526,4 248,78,5 20,297,5 67,117,2 214,150,5 304,237,3 307,416,3 101,117,3 188,119,1 111,749,4 129,621,3 187,473,4 55,584,3 55,229,5 19,10,2 19,175,2 221,24,3 48,147,1 306,430,4 143,312,5 22,403,4 143,960,3 159,2,3 21,226,4 78,507,3 17,646,4 150,480,3 311,479,5 255,28,4 157,567,4 310,140,4 302,178,5 24,477,5 194,406,2 151,146,3 144,1000,4 150,259,1 193,575,2 270,623,2 161,120,4 312,64,2 5,531,3 21,432,3 12,914,5 326,460,3 199,401,4 270,21,5 268,477,4 314,430,2 177,120,5 209,501,3 75,134,5 317,647,5 278,1290,4 74,120,4 89,617,5 43,173,5 292,728,2 216,194,5 223,707,2 245,120,4 283,905,3 300,171,5 243,30,4 94,394,3 302,329,3 197,639,3 255,801,3 45,689,5 304,208,5 82,363,1 223,1207,1 294,66,4 115,247,3 200,36,2 154,747,2 317,507,4 273,287,4 262,332,2 144,171,5 187,190,3 118,312,5 269,305,5 261,90,3 130,844,4 249,259,4 32,306,3 36,182,4 5,210,5 84,516,5 307,163,4 41,745,3 101,1024,2 310,69,4 180,1321,1 16,507,3 173,395,1 124,149,1 180,1363,1 234,510,5 0,265,1 294,726,5 55,193,5 82,1034,4 99,354,4 105,827,2 269,326,5 180,679,1 114,227,4 285,770,2 233,150,3 15,91,4 129,409,5 270,120,2 319,1156,4 188,461,5 312,30,4 48,237,4 59,78,4 12,225,4 0,120,4 149,245,5 12,547,3 178,750,1 221,425,1 6,613,5 156,1131,3 192,367,1 129,992,5 165,321,5 61,3,4 252,182,5 260,116,4 268,1019,4 268,135,4 321,196,5 6,646,5 111,747,3 169,244,5 270,822,3 293,287,5 150,521,5 310,212,4 25,256,3 290,626,4 25,6,3 220,467,3 317,203,5 86,995,3 278,87,1 278,561,3 206,13,4 278,162,5 229,237,1 93,234,4 292,930,1 120,85,5 197,179,3 291,652,4 91,780,3 290,571,3 47,689,4 101,263,2 0,113,5 179,78,3 254,878,3 249,1,4 118,715,5 100,281,3 243,219,2 66,0,3 290,98,4 58,237,5 310,72,4 176,918,4 0,131,4 143,777,4 0,73,1 267,67,4 231,704,5 48,757,1 101,312,3 278,1092,4 278,1492,1 21,172,5 121,714,5 144,314,5 118,1100,5 260,258,4 0,133,4 93,44,5 329,10,4 290,740,5 5,179,4 187,87,4 298,920,3 252,202,4 214,193,4 290,272,3 302,866,3 5,476,1 306,1109,4 129,875,4 94,482,3 73,325,4 12,304,4 3,259,4 260,293,4 158,258,4 136,54,5 173,698,5 285,157,3 86,1182,3 269,229,3 90,171,4 295,271,5 124,482,4 61,1117,3 327,199,4 295,509,5 233,499,3 236,99,5 149,12,4 300,609,3 150,24,4 270,7,4 86,302,3 292,1219,2 112,293,4 310,517,3 180,122,2 327,904,3 109,300,2 287,741,3 110,886,3 193,195,3 238,604,4 108,4,3 290,823,4 15,167,4 13,356,2 21,686,1 206,745,4 311,1298,4 267,249,4 67,410,1 194,886,4 270,49,5 73,8,4 307,801,3 143,65,4 194,13,4 17,198,3 12,917,3 173,40,1 108,158,4 226,292,5 232,356,5 263,474,5 204,677,1 274,1065,3 55,67,3 77,1159,5 129,681,4 126,379,5 129,567,5 57,1099,2 48,472,3 12,272,3 202,335,3 329,135,5 108,194,5 185,405,1 292,147,1 279,1027,5 142,330,5 182,95,3 59,698,4 177,130,4 296,215,4 58,1116,4 275,428,5 178,257,5 86,385,2 197,1168,4 118,53,4 296,19,4 0,97,4 267,204,5 278,173,4 63,186,5 118,1261,3 74,1016,5 26,741,3 306,20,4 36,684,3 81,14,3 243,237,5 270,273,3 173,1013,3 209,134,5 261,257,4 319,67,5 84,659,4 310,347,4 81,207,3 0,185,4 144,367,3 275,400,3 22,212,3 63,514,5 62,236,3 292,226,2 321,31,5 73,284,3 296,201,3 81,215,4 279,144,3 199,226,5 289,20,3 42,819,2 94,572,1 180,19,1 177,925,4 80,475,2 193,409,3 324,401,2 275,346,4 206,132,4 86,134,5 330,6,4 314,7,3 105,434,3 285,82,5 86,156,3 86,162,4 285,654,3 231,7,2 253,379,4 95,90,5 231,0,4 314,97,4 42,552,4 304,678,3 60,689,2 43,664,1 91,1015,2 167,254,1 275,269,4 327,567,3 221,1052,3 92,221,4 329,234,5 81,503,4 1,313,1 88,731,5 37,215,5 307,84,4 23,152,4 234,1463,4 0,220,5 221,714,2 221,68,5 42,113,5 330,485,3 222,321,4 200,451,1 157,270,4 31,248,4 313,89,2 312,244,3 101,575,2 210,525,4 267,424,4 331,769,3 37,507,2 279,974,4 9,462,4 91,385,3 267,373,2 68,257,4 209,95,4 212,143,5 253,49,5 57,271,5 326,209,3 290,384,4 290,323,1 245,595,3 10,713,4 328,99,4 85,257,5 6,620,5 245,79,2 307,480,4 53,819,3 176,650,3 9,654,5 82,630,2 144,992,3 254,184,4 17,606,3 225,179,4 233,615,2 273,24,5 292,155,4 82,475,3 294,172,5 285,1038,5 41,47,5 207,203,3 231,274,2 266,93,3 270,241,4 124,96,3 322,332,4 304,55,1 144,249,5 37,1029,5 201,514,1 180,974,2 331,565,4 107,12,3 193,519,5 143,61,2 193,1182,2 147,171,5 143,1146,4 268,960,5 289,70,5 248,596,2 64,675,5 300,394,1 266,545,3 206,753,4 200,776,1 313,1094,3 209,630,5 21,455,1 58,930,2 91,714,4 49,474,5 187,158,3 302,699,3 196,287,3 243,675,4 43,87,2 163,596,4 10,229,4 5,296,3 185,924,5 189,146,4 183,1136,5 84,268,3 184,126,5 43,256,4 292,483,5 149,0,4 59,178,4 74,146,3 268,639,5 137,492,4 298,270,3 91,927,3 298,23,3 291,182,5 4,393,2 61,558,3 197,548,3 287,1038,2 151,271,5 41,998,4 63,332,3 98,681,2 58,120,4 134,232,3 6,21,5 23,426,5 143,746,5 260,321,4 200,474,4 132,257,5 109,244,3 4,383,3 138,267,4 111,321,4 233,595,2 300,183,4 290,1470,3 284,215,3 84,52,3 274,182,3 295,274,4 270,196,4 28,747,2 220,171,5 322,8,4 110,339,4 94,175,3 206,169,4 135,275,5 123,615,4 184,527,4 166,403,3 285,340,5 83,321,3 150,528,5 263,400,5 288,0,3 143,63,5 55,28,3 22,527,4 327,741,4 124,784,3 199,71,4 248,22,4 129,55,5 139,318,4 48,101,2 157,482,5 221,57,3 193,212,2 176,88,5 6,267,3 58,548,4 144,410,2 264,6,2 247,281,2 238,46,2 318,878,5 41,101,5 300,1034,4 325,68,2 179,66,1 279,98,2 144,681,3 213,78,4 258,209,4 56,863,3 260,596,4 135,297,4 292,704,5 193,469,3 74,495,5 201,171,3 22,182,3 37,402,1 51,1008,5 94,719,2 64,96,5 206,289,2 200,1,2 189,750,4 161,684,3 220,249,5 91,133,4 48,694,3 101,390,2 5,499,4 151,24,3 144,277,4 327,270,3 115,749,4 89,236,4 220,317,5 127,282,5 93,466,4 220,1217,3 280,331,4 293,538,4 299,947,4 325,152,4 61,27,3 158,248,4 75,810,4 73,236,4 80,410,2 279,226,3 223,21,5 63,76,3 193,755,1 14,19,3 42,327,4 243,99,4 326,804,4 20,927,3 82,253,2 13,21,3 317,609,5 91,755,3 221,1077,2 61,156,3 12,839,3 270,299,2 58,12,5 207,513,4 288,814,3 278,248,3 325,49,5 72,11,5 27,233,4 5,94,2 89,353,3 95,518,4 6,626,3 253,648,1 327,518,5 246,750,3 44,471,3 322,126,5 267,565,3 290,815,3 58,404,3 199,408,2 331,974,3 238,611,5 21,398,4 266,146,3 234,318,4 86,69,5 215,142,2 267,120,2 238,316,5 268,921,5 206,467,4 269,147,4 183,558,3 303,270,4 330,478,2 156,282,4 238,182,5 260,338,5 300,57,4 144,338,3 9,320,4 47,307,5 320,630,4 31,590,3 124,1035,2 0,83,4 20,741,3 21,185,5 291,323,3 71,128,4 255,641,4 91,1094,2 72,474,4 289,273,4 82,542,2 55,596,3 82,215,4 214,21,3 100,368,2 327,520,4 306,174,4 200,22,4 196,569,4 25,285,3 89,488,5 97,516,5 56,249,3 162,287,3 0,30,3 103,323,1 332,893,3 310,21,4 236,210,4 43,602,4 21,95,5 212,545,4 256,257,3 326,299,2 278,1016,3 52,844,3 84,96,2 42,285,4 180,6,4 296,573,1 200,650,4 319,98,4 93,179,5 234,84,4 304,130,3 233,228,4 327,590,3 327,753,4 257,322,4 2,322,2 15,69,4 285,424,2 326,701,2 199,264,5 206,130,3 291,9,5 213,178,5 154,320,4 105,212,4 199,585,4 304,215,5 278,1112,3 177,983,2 330,132,3 57,44,5 166,1305,5 150,190,3 325,167,3 296,442,2 190,287,3 80,470,3 283,257,4 4,266,4 149,324,1 256,58,5 144,442,3 270,190,5 175,296,3 157,37,4 151,715,5 231,637,5 108,929,3 242,659,4 56,743,5 144,1056,1 234,274,5 180,123,1 144,181,5 248,475,3 43,10,3 193,565,4 108,217,4 48,9,3 268,209,1 86,232,4 313,790,4 291,131,4 6,299,4 290,459,5 291,175,5 289,1027,3 121,426,3 16,150,4 58,46,5 28,688,2 273,410,3 189,339,1 212,49,5 13,110,3 320,130,4 220,1313,3 194,99,5 235,186,3 91,618,4 302,575,3 41,209,5 245,422,3 180,822,2 196,230,3 180,368,3 129,171,5 275,1130,3 251,741,4 220,1066,3 291,487,5 176,123,3 41,784,4 0,69,3 12,177,4 75,275,5 268,71,2 2,330,4 289,428,4 158,814,3 247,473,2 213,1064,5 29,180,4 7,181,5 237,117,3 248,175,4 263,1068,5 97,654,3 122,274,4 180,687,1 6,161,5 118,268,3 180,456,1 137,482,5 55,62,3 290,121,3 325,467,3 91,174,4 292,653,5 161,1046,5 302,548,3 324,503,3 266,653,5 129,545,4 215,576,1 300,52,1 90,422,5 300,383,5 290,671,3 17,195,3 194,1083,4 221,938,3 326,273,2 253,576,1 331,692,5 266,54,4 15,442,5 157,78,4 304,13,4 86,66,4 312,174,4 42,497,5 233,1034,3 89,10,4 229,195,5 0,59,5 261,184,3 220,1406,3 278,381,4 210,677,3 286,1015,5 166,602,4 118,153,5 125,877,5 59,473,5 295,426,5 299,242,4 193,970,3 82,185,4 206,1241,5 310,1115,3 180,405,1 129,549,5 244,221,4 167,234,2 255,755,4 0,176,5 58,9,4 222,257,1 242,224,3 147,1148,5 9,47,4 177,548,4 294,3,4 98,123,2 333,116,3 262,522,5 229,401,5 151,131,5 188,44,3 129,230,3 333,281,4 90,192,3 243,96,2 82,865,3 221,216,3 9,202,4 172,299,4 268,167,4 291,99,5 59,507,4 196,430,3 312,264,4 233,505,4 233,958,2 153,483,4 13,55,5 200,1210,3 180,358,1 51,747,4 307,578,3 211,514,4 12,41,4 267,98,3 118,244,4 43,201,4 125,883,5 158,110,4 89,300,4 319,41,4 300,24,3 113,268,4 8,690,5 314,16,1 136,194,5 182,561,3 296,300,4 333,602,5 17,953,3 151,96,5 183,497,5 324,429,5 38,314,4 230,126,3 301,308,2 62,149,4 200,374,3 199,102,2 12,93,3 296,21,4 200,843,2 13,92,3 239,342,3 183,715,3 215,11,5 37,121,1 256,275,5 255,777,4 199,228,5 147,176,2 248,21,5 183,46,4 275,57,4 267,431,3 223,257,3 144,24,2 297,260,4 243,742,5 288,409,2 58,131,5 300,1111,4 55,1089,3 326,191,5 284,287,5 132,327,3 70,345,4 292,1131,3 12,907,1 0,26,2 270,171,5 285,268,5 48,925,1 289,152,3 225,269,4 103,121,3 310,232,4 59,177,5 199,190,5 127,275,4 156,747,2 302,459,4 4,444,3 267,539,1 289,217,2 180,1345,1 188,275,3 89,658,4 320,133,3 278,107,4 196,769,3 216,565,4 192,681,1 33,309,4 292,156,5 296,299,3 23,741,4 258,404,3 302,1006,5 325,281,2 9,217,4 333,634,2 271,7,4 75,1128,5 12,299,1 193,430,4 255,290,5 147,184,1 275,317,5 226,125,4 310,552,3 197,426,4 12,179,5 285,99,3 270,450,3 58,317,5 327,654,4 24,173,5 89,970,4 156,149,5 105,68,4 172,321,4 275,1134,4 275,75,4 48,545,1 114,233,5 306,21,3 81,217,3 115,1081,3 79,49,3 58,380,5 235,142,4 55,173,5 81,412,1 81,68,4 143,726,3 6,525,5 48,530,3 0,259,1 242,128,2 312,487,5 206,272,4 333,221,4 82,94,4 161,229,2 325,495,5 235,685,3 16,8,3 91,1214,2 81,146,3 200,241,4 222,236,5 167,294,4 185,976,3 245,355,2 61,134,4 319,455,3 47,602,4 208,268,2 235,1327,4 91,672,4 70,284,3 4,166,2 66,239,5 187,553,2 325,53,3 233,461,4 30,301,4 227,885,1 171,602,3 313,1138,5 296,651,3 263,658,5 117,173,5 215,285,4 289,1012,2 255,277,5 199,819,3 48,311,3 117,432,5 292,194,3 12,28,2 41,404,4 292,565,3 124,157,4 314,229,4 295,82,5 187,203,4 200,3,4 252,746,3 314,530,5 209,133,5 118,1169,3 150,508,4 80,272,4 323,747,5 42,14,5 297,431,4 249,126,4 285,1264,5 202,293,2 266,225,3 193,734,4 302,98,4 192,194,1 56,587,4 91,671,3 206,268,4 324,153,3 279,85,4 196,448,5 38,351,5 196,509,5 116,0,4 131,921,5 270,179,5 221,432,4 102,116,4 200,25,4 269,386,5 103,99,4 94,95,4 129,203,5 289,238,2 313,832,4 312,968,4 294,721,4 268,411,3 48,0,2 331,227,5 300,10,4 124,433,4 335,65,3 0,144,2 326,229,4 261,291,4 312,204,5 320,522,3 247,184,3 37,383,5 223,777,1 216,1221,1 5,474,5 330,46,5 37,422,5 0,173,5 307,59,3 206,641,3 214,1038,5 55,238,4 108,1010,3 9,123,5 319,209,5 268,179,3 289,379,3 310,204,5 128,269,3 108,280,2 234,897,3 334,327,3 12,507,3 200,557,2 275,800,3 80,117,2 287,199,4 262,96,4 292,86,4 135,116,4 317,659,3 294,404,5 200,479,4 231,707,4 196,565,4 312,179,5 108,229,5 167,595,4 200,979,3 221,553,2 114,10,4 333,223,2 118,696,5 197,384,3 90,506,4 61,280,3 238,97,5 323,1032,4 200,822,3 321,49,5 106,304,4 63,1,3 27,49,4 245,201,3 167,1196,5 33,258,2 285,464,5 183,520,4 105,285,4 197,1116,3 290,52,5 24,476,4 0,158,3 180,1392,1 168,300,4 59,171,4 177,426,5 148,326,2 279,95,4 204,983,1 91,430,4 243,368,4 307,290,3 234,683,4 217,193,3 306,312,5 17,68,3 22,214,2 183,131,5 243,236,5 210,180,1 235,695,2 144,671,3 234,647,4 115,1015,2 177,357,1 10,560,2 328,511,4 182,404,4 307,466,4 206,575,3 197,248,2 99,749,4 290,167,5 114,761,4 150,168,5 304,402,2 337,493,3 291,524,5 233,670,3 233,583,3 278,274,3 233,637,4 109,78,4 105,272,3 127,110,3 297,150,3 41,844,5 127,746,3 189,716,3 0,81,5 98,420,3 312,207,3 12,44,3 304,301,4 93,184,5 270,203,4 127,82,5 266,49,5 141,188,4 0,55,4 17,213,4 187,233,4 234,99,4 302,407,4 99,265,2 177,301,4 41,780,4 17,487,3 183,13,4 292,520,3 292,848,2 197,155,3 233,965,4 180,1350,1 193,152,3 0,271,3 264,278,2 158,322,4 331,228,5 333,228,2 125,257,4 199,224,4 62,245,3 270,133,3 178,315,5 307,958,3 269,69,5 180,1197,1 20,444,3 325,674,4 267,822,2 108,844,4 338,131,5 243,94,4 61,701,2 320,614,5 253,140,3 294,422,4 270,240,3 6,518,4 333,51,4 135,13,5 191,1159,4 258,175,4 243,508,5 237,814,2 72,126,5 248,454,4 319,290,4 12,819,4 9,282,4 320,206,3 200,990,4 101,558,3 189,741,3 310,98,5 308,332,3 61,684,2 115,186,5 294,965,5 233,71,3 254,983,1 160,581,1 86,549,4 58,558,5 139,321,3 223,300,3 89,485,5 13,791,5 193,215,3 221,500,2 89,310,4 327,42,3 6,632,5 150,227,5 296,222,5 206,528,4 129,929,3 313,742,1 180,925,1 12,508,5 231,522,4 200,86,3 222,469,4 17,601,3 81,494,3 143,402,3 185,321,5 249,173,3 320,193,3 27,11,4 27,894,4 150,404,3 206,1101,3 200,163,3 5,508,4 41,379,4 220,894,2 327,9,4 269,158,4 268,339,5 215,248,3 200,1423,3 84,85,4 94,842,4 305,274,4 255,234,3 84,691,3 10,311,4 304,209,3 180,320,2 150,6,4 295,960,5 118,594,3 313,928,3 278,362,5 187,356,4 213,871,2 233,208,4 4,425,3 0,79,4 245,577,2 293,978,3 313,72,4 311,97,4 207,661,4 42,381,5 253,595,4 2,293,2 43,152,4 24,741,4 93,78,4 261,405,3 34,1024,3 147,500,4 69,422,5 82,264,5 4,221,4 307,1027,2 108,61,3 48,172,3 313,467,4 333,1162,4 268,204,3 37,317,3 101,221,3 328,296,4 304,1410,3 235,288,4 312,130,4 331,283,5 120,120,2 59,182,5 338,1029,1 295,543,4 10,719,1 262,271,5 302,202,5 287,181,4 290,16,4 307,627,3 12,754,3 63,230,3 276,23,4 129,571,3 292,385,2 278,367,1 188,252,4 295,31,4 304,168,5 302,261,5 94,210,3 206,1097,4 109,1247,3 311,407,4 278,1412,5 14,300,4 115,483,4 197,50,3 12,1,3 331,231,5 43,54,4 61,715,4 147,528,5 302,420,4 275,55,5 310,483,4 57,474,5 84,487,4 329,583,3 180,1066,1 300,514,3 12,829,1 126,267,1 36,55,5 313,923,5 200,209,2 197,510,4 93,741,3 208,257,2 304,609,3 66,404,5 293,119,2 245,97,4 193,161,3 306,392,3 94,975,2 267,251,3 215,297,5 4,452,1 222,844,4 292,123,4 223,1118,3 298,175,4 129,70,5 129,49,5 53,312,4 61,472,4 311,494,4 124,21,5 317,356,4 203,747,1 181,292,3 48,568,3 68,55,5 63,958,4 324,178,5 285,271,5 115,879,3 214,88,4 45,332,5 245,293,2 212,24,4 89,212,5 109,187,4 211,510,4 56,1058,3 56,824,1 296,281,3 275,175,5 105,44,3 150,65,4 275,65,3 268,75,3 153,285,4 209,218,3 305,318,4 323,470,5 264,471,3 84,388,3 53,324,3 17,497,4 270,344,3 122,21,4 86,1188,5 216,809,3 197,147,3 115,256,3 130,273,3 296,691,3 265,873,2 108,795,3 188,479,5 21,293,1 233,470,3 327,678,2 55,78,4 177,977,2 215,225,3 37,443,1 218,178,5 42,943,2 278,1483,3 235,506,3 295,1008,3 270,489,4 205,902,2 20,294,3 317,46,2 58,229,4 150,174,5 262,85,4 307,192,3 151,124,5 122,164,5 168,173,4 293,9,3 196,650,5 262,891,3 62,108,4 205,361,1 51,497,5 315,212,5 71,88,3 188,704,4 79,86,4 197,745,4 84,55,4 193,55,5 109,81,4 98,740,3 6,194,5 322,545,2 20,981,1 333,92,4 11,81,4 42,234,3 227,287,4 108,89,3 12,63,5 177,287,5 180,886,1 122,605,3 81,63,5 137,284,4 86,1181,3 200,303,2 69,201,4 177,654,4 326,557,4 314,653,5 250,54,3 41,69,3 310,481,4 128,271,4 306,192,3 9,3,4 337,210,4 94,513,2 341,1046,2 341,791,3 200,212,4 31,275,4 256,288,4 13,174,5 298,173,4 5,133,5 319,432,4 304,256,2 27,152,3 307,608,4 286,217,5 61,420,5 268,171,3 118,627,4 278,1141,1 223,1441,3 307,527,3 150,434,4 327,215,3 294,492,5 61,95,4 58,1108,3 254,257,4 101,194,4 127,659,2 7,78,4 196,1418,2 216,577,5 312,203,4 161,297,4 29,288,2 259,318,2 56,293,4 333,85,4 307,53,2 209,254,4 212,446,4 188,1020,5 219,305,4 103,1240,1 338,581,4 27,183,4 50,147,3 243,156,4 233,490,4 274,587,3 185,52,1 98,1051,1 268,130,5 310,719,3 269,1118,5 285,1034,3 310,93,3 210,256,5 238,670,5 200,97,4 42,402,4 314,215,4 52,923,3 307,451,2 337,612,3 89,356,5 302,326,1 246,270,2 143,302,4 101,1029,1 89,738,5 71,526,4 285,247,5 200,31,3 326,496,4 140,124,5 166,674,1 261,216,3 150,812,4 12,858,1 275,206,4 245,1072,4 297,97,4 22,87,3 93,699,2 129,771,4 4,402,3 296,175,4 177,249,4 127,416,4 269,280,5 62,250,4 41,356,5 99,287,2 333,99,5 161,221,4 183,1019,4 12,624,2 71,78,4 212,7,3 81,12,2 313,734,5 58,487,3 13,312,2 235,199,3 324,239,1 285,163,3 267,767,3 82,76,4 312,229,3 20,217,4 324,655,4 282,82,4 222,322,2 129,417,5 27,281,4 42,6,4 292,558,2 285,431,3 175,271,5 236,498,2 331,450,5 302,272,3 285,12,2 326,168,2 261,49,2 311,630,5 101,733,2 15,654,5 22,89,2 248,181,5 17,208,4 292,215,4 307,606,3 163,688,5 305,1008,4 326,654,4 279,755,4 105,96,5 108,146,4 155,57,4 132,259,1 22,510,5 111,688,4 115,312,5 270,12,4 312,135,5 239,897,5 51,404,4 279,201,3 261,1277,4 274,251,2 186,731,3 12,427,5 267,945,3 233,282,3 15,150,5 335,107,3 234,434,5 215,273,3 245,214,2 12,912,1 20,438,1 93,98,3 81,274,2 338,54,3 58,1115,3 216,684,5 294,735,5 169,327,3 150,825,1 12,211,5 222,0,4 245,195,3 153,136,3 157,143,4 10,119,2 17,629,3 196,180,5 234,432,4 330,68,5 243,277,3 216,539,1 311,133,5 298,167,4 233,1171,3 223,631,2 326,473,3 183,779,4 61,1106,1 64,69,1 100,927,2 209,464,4 143,236,4 319,249,4 310,691,4 158,327,3 127,76,3 166,47,1 290,557,4 55,142,3 37,391,5 292,263,3 114,68,1 275,249,4 279,224,4 294,587,4 25,320,3 301,327,3 144,108,4 200,379,1 56,251,2 279,99,3 309,257,3 25,268,4 307,3,5 268,173,1 261,70,4 220,683,4 262,520,3 255,275,3 0,228,4 265,507,4 58,126,5 324,504,4 326,132,4 281,268,4 150,299,4 103,282,4 290,1016,4 275,769,4 333,1107,4 223,878,3 63,1132,4 57,41,4 105,583,4 158,257,4 267,247,3 317,285,3 5,524,5 326,430,3 76,22,4 94,14,4 254,451,3 143,327,3 101,306,4 268,1013,3 183,171,4 305,305,5 48,731,3 180,1346,1 292,513,4 329,120,4 124,1073,3 290,146,4 268,213,3 12,167,4 304,75,1 312,434,5 306,228,5 313,53,4 268,528,5 282,185,5 157,7,5 91,86,3 84,841,3 19,117,4 192,392,4 166,221,4 200,1186,3 124,345,1 143,879,5 233,627,2 290,573,1 223,976,2 151,779,5 70,461,5 150,754,3 134,228,2 91,930,1 94,32,3 129,124,5 268,404,1 296,276,3 61,526,4 220,16,4 10,742,2 229,49,5 158,929,4 173,106,5 96,6,5 83,288,5 62,947,3 124,142,5 159,125,3 315,482,4 31,116,3 326,92,4 12,855,5 215,201,4 91,1211,3 0,139,1 262,182,4 4,172,4 84,371,4 193,518,4 108,549,5 200,197,4 339,171,4 48,116,1 6,641,3 238,285,1 197,567,3 236,22,4 238,134,5 4,240,1 71,381,4 296,479,4 248,825,1 24,126,3 93,226,3 194,590,4 91,84,3 84,708,5 307,501,5 310,116,4 246,250,4 234,791,4 328,325,3 337,78,4 243,427,4 186,69,4 252,482,5 193,61,2 69,70,3 202,331,5 48,71,2 307,672,4 245,425,3 279,230,3 179,432,5 109,1249,3 326,810,4 338,46,4 193,131,3 0,224,2 35,318,2 341,745,4 259,1104,5 39,753,4 174,30,4 61,826,2 137,99,5 251,8,5 58,420,5 109,539,3 0,234,5 333,268,3 300,94,5 62,5,3 268,804,2 150,356,5 267,403,4 198,472,4 21,779,1 27,440,2 298,209,4 316,325,3 253,383,1 177,244,3 296,193,3 89,965,5 10,733,3 324,513,4 248,410,3 17,963,3 310,117,3 333,292,3 293,482,4 296,85,5 292,646,5 293,875,3 285,141,4 307,568,3 221,163,4 48,720,2 302,1089,1 72,473,5 92,844,4 84,1100,4 222,215,5 41,1042,2 233,211,2 15,287,3 12,318,4 134,293,4 167,410,1 71,203,4 143,522,5 302,397,1 127,214,3 319,10,4 266,683,4 59,489,4 188,693,4 115,904,2 248,239,4 109,299,3 200,1062,3 179,120,5 86,1071,3 5,208,4 62,300,5 178,894,5 147,97,3 12,311,1 14,277,1 175,304,5 101,65,3 292,250,4 41,203,5 327,522,5 205,332,4 278,66,4 157,41,3 69,150,3 270,660,4 36,221,3 278,1094,1 249,199,5 102,143,4 49,1083,5 127,1140,4 335,576,1 274,190,4 94,172,5 86,650,4 20,677,2 144,1216,2 12,859,1 311,675,3 199,430,5 101,66,1 324,505,5 220,1072,4 1,296,4 304,732,3 274,968,2 10,214,3 340,875,4 230,125,5 268,473,4 12,539,3 101,808,3 253,239,1 233,485,3 255,931,3 248,57,5 304,946,4 261,14,3 324,186,3 183,835,4 10,427,4 39,257,3 312,739,2 275,1313,3 100,1050,2 235,698,4 206,133,4 214,81,3 124,944,5 119,281,4 292,460,2 159,92,5 297,417,4 325,443,4 245,848,1 277,300,2 165,287,3 327,3,3 69,264,4 297,464,4 342,185,4 204,312,3 200,460,4 275,1477,3 90,263,4 249,293,1 67,404,3 245,98,3 9,703,3 96,434,4 98,117,2 101,301,3 69,151,4 40,30,3 177,178,2 5,18,4 88,245,5 253,256,3 93,401,4 41,403,5 129,565,4 12,613,4 285,641,3 290,409,5 213,120,4 245,283,1 129,412,3 319,1209,4 59,809,4 140,743,5 287,96,4 144,749,4 188,495,5 129,54,5 327,430,2 176,1038,3 200,280,2 300,455,3 135,55,4 73,14,4 168,428,3 0,119,1 99,301,4 302,715,2 215,497,3 5,475,1 328,97,4 229,510,2 112,320,3 63,99,4 12,875,2 268,770,1 5,153,3 326,961,3 178,344,1 59,151,4 221,249,2 82,251,4 329,50,5 124,289,4 180,285,1 326,450,4 160,13,4 17,81,3 23,371,4 199,285,4 72,201,2 21,28,1 95,7,5 342,1106,3 296,11,5 278,1410,3 109,201,2 93,256,4 71,175,2 101,88,4 118,683,4 59,150,5 294,403,4 307,446,4 311,1202,5 342,54,3 283,258,2 275,562,3 279,735,2 310,309,4 17,738,3 86,208,5 12,89,3 57,1096,5 223,242,2 278,779,4 55,567,4 329,214,5 6,91,5 178,314,5 63,238,3 296,698,4 20,423,1 187,791,2 90,194,5 292,193,4 93,726,5 273,147,2 56,281,5 275,779,3 215,650,5 150,240,3 61,7,5 196,67,2 58,384,4 118,274,5 117,323,4 303,297,5 25,8,4 311,846,3 307,964,4 269,706,5 296,30,3 220,99,5 115,759,3 118,192,4 176,299,2 160,653,3 302,234,4 116,173,4 326,215,3 326,1097,4 22,515,4 180,1050,2 47,660,5 75,530,4 188,128,3 0,124,3 311,143,1 300,409,4 305,475,3 37,615,3 222,297,5 144,1291,1 327,527,5 173,457,4 302,30,3 22,82,4 5,174,4 172,937,3 312,238,3 37,779,4 183,88,4 43,154,3 243,12,4 12,262,5 343,478,4 39,339,2 140,221,4 143,285,4 323,596,4 221,699,3 95,483,5 89,198,5 0,214,3 269,378,5 250,256,3 245,108,5 129,89,4 325,317,5 8,520,4 220,31,4 19,185,3 36,78,4 278,870,4 162,55,4 83,283,3 200,675,2 45,1061,5 71,81,3 116,175,5 268,607,4 147,213,5 293,1066,4 120,173,3 19,171,3 58,723,5 107,124,3 48,52,4 293,677,2 239,300,5 298,601,3 245,801,1 12,787,1 302,1507,1 206,1282,4 254,270,4 194,476,2 311,556,5 143,301,3 101,398,2 296,514,5 105,164,5 290,420,4 144,551,5 88,935,5 84,70,4 281,270,3 338,855,5 134,226,3 150,90,2 220,466,4 285,195,4 115,194,4 93,737,2 143,171,4 213,207,5 233,518,5 243,595,4 221,738,4 73,125,3 44,126,5 343,305,5 115,886,3 180,1361,1 143,460,4 188,1098,5 52,227,3 1,289,3 298,738,3 312,138,3 273,274,5 320,520,2 133,538,4 268,485,3 93,654,4 261,1219,4 180,1264,1 108,3,2 11,95,4 108,41,1 89,306,5 76,497,5 313,619,3 47,209,3 304,1100,4 197,356,5 221,292,3 206,185,4 157,579,4 254,550,1 86,1046,3 300,8,3 278,1497,4 298,342,3 338,287,3 12,781,3 209,721,4 199,527,4 192,692,4 296,677,3 127,215,5 310,37,3 168,878,5 173,81,1 12,439,1 94,377,4 320,223,3 179,82,5 149,126,5 331,232,4 101,82,3 262,677,2 127,96,3 238,287,2 274,201,3 310,470,4 266,144,4 252,209,4 249,63,5 283,338,3 326,848,2 10,89,2 221,92,2 298,25,4 275,747,3 273,495,5 251,128,4 243,1224,2 74,819,3 193,51,4 327,626,3 200,954,3 252,197,5 220,38,4 333,316,3 270,413,4 157,524,5 63,704,5 293,23,4 27,479,5 268,958,5 298,269,4 150,654,4 176,86,4 268,14,2 278,739,3 331,672,5 268,482,4 90,681,2 245,16,2 289,417,3 8,486,5 216,796,4 233,13,3 291,1049,4 64,1128,4 221,230,2 298,31,3 278,684,3 14,619,4 67,177,5 292,209,3 42,930,1 343,277,3 55,367,3 338,29,3 143,517,3 124,733,3 11,734,5 268,483,3 89,178,5 184,236,4 242,274,3 268,1090,2 10,428,5 12,87,4 119,24,5 197,401,3 164,303,3 137,97,5 93,560,3 292,187,3 38,257,4 158,236,3 343,38,3 68,1016,5 229,672,3 159,123,4 43,227,5 297,1141,4 344,1159,3 93,132,4 120,121,2 324,108,2 159,1018,5 204,332,4 342,43,3 320,1027,2 101,985,1 267,122,3 18,152,4 124,510,5 331,1187,5 89,131,5 15,656,5 315,49,1 271,10,4 84,379,4 278,1117,3 268,760,2 74,695,4 248,468,4 310,670,3 57,221,4 253,98,3 307,631,3 124,1271,1 48,39,1 82,1100,2 15,293,4 93,213,5 294,623,5 151,865,5 127,226,2 118,234,5 121,1267,2 275,560,2 250,108,4 6,89,3 183,274,5 261,627,2 278,12,3 180,763,1 20,55,5 297,659,3 97,320,3 144,948,4 163,457,4 231,63,4 183,125,3 268,208,4 25,99,5 56,1092,3 116,337,3 296,96,5 275,968,4 118,1262,3 344,721,3 317,71,4 245,409,1 157,808,3 177,650,4 253,624,3 20,105,2 224,135,5 40,485,4 233,190,4 77,288,4 89,8,4 312,414,2 179,715,1 343,461,2 267,809,2 194,226,3 71,602,4 30,134,4 302,1266,3 63,730,3 61,88,5 150,661,4 188,1371,4 212,78,5 218,12,1 344,707,3 243,711,3 219,287,5 0,5,5 238,922,5 289,201,4 193,522,5 199,830,4 345,212,3 266,213,4 99,339,3 41,520,2 213,44,4 263,319,4 144,1101,1 9,21,5 298,70,3 312,607,4 208,241,4 220,91,4 292,645,3 183,1011,3 69,259,2 89,29,5 143,1168,4 0,103,1 20,287,3 5,522,5 247,180,4 167,408,4 233,877,2 43,237,4 295,1072,5 295,95,5 205,287,5 75,99,5 326,49,3 307,810,4 337,167,3 124,237,3 298,1073,3 84,202,5 76,430,5 17,366,4 292,571,2 285,227,3 245,567,4 173,901,3 267,162,2 290,554,1 150,477,5 268,62,1 10,96,4 82,747,2 82,124,5 144,716,3 55,425,4 338,434,4 34,241,2 17,461,3 193,707,3 13,513,4 344,650,4 278,414,3 11,470,5 125,331,2 15,21,5 115,757,1 219,324,1 150,327,3 279,10,5 9,154,4 72,1148,4 179,212,5 12,830,3 180,1290,1 91,131,3 344,201,3 268,481,3 58,240,4 321,507,4 17,24,3 342,134,5 61,855,4 143,527,4 23,661,5 107,281,3 94,517,4 275,382,2 186,426,5 12,314,5 331,97,5 11,171,4 346,21,5 200,7,3 89,854,5 192,1131,3 98,202,4 121,707,5 14,741,2 221,1238,2 56,55,3 331,594,4 5,497,4 338,57,3 267,153,4 101,201,4 212,473,2 72,195,4 282,69,4 121,211,5 200,453,2 297,651,3 6,9,4 313,28,5 129,1276,4 200,274,4 303,680,2 129,747,4 117,175,5 181,236,3 12,793,4 241,933,5 68,1133,5 76,152,5 150,195,4 278,201,4 232,957,5 283,681,3 180,300,2 285,418,5 326,13,4 255,194,5 330,1099,2 101,185,4 118,337,1 233,315,4 294,377,4 13,99,5 183,1005,3 215,720,4 129,147,4 129,228,4 157,99,5 221,971,2 121,791,3 58,13,5 30,704,5 253,500,3 296,474,5 192,327,3 291,27,4 0,48,3 241,1151,5 266,558,3 81,704,3 291,1038,4 13,454,4 307,510,5 235,169,5 333,3,3 129,1214,2 144,202,5 155,204,3 339,434,4 93,384,2 93,108,4 167,987,2 312,150,1 95,644,5 307,108,3 93,392,3 20,994,2 4,233,2 316,349,5 101,61,3 117,155,5 275,785,3 115,258,4 80,92,3 91,594,3 249,110,4 343,214,3 319,147,4 78,123,5 93,312,4 0,205,4 127,965,4 268,663,5 317,794,2 15,939,2 53,275,5 290,1108,4 297,171,4 233,291,4 105,14,3 113,1103,5 298,136,4 300,770,2 72,6,4 331,43,3 307,1018,4 186,27,4 93,782,2 14,136,4 285,55,2 221,755,4 17,698,5 67,244,3 133,747,5 333,1206,2 242,222,4 321,478,5 333,480,5 242,12,4 267,15,3 89,240,5 266,483,5 232,47,5 76,3,3 183,91,3 147,595,5 58,663,4 109,733,2 284,627,2 243,100,5 313,365,4 302,653,5 185,332,3 91,784,3 150,485,5 5,187,3 292,124,2 193,50,4 290,551,3 86,789,4 298,49,4 55,0,4 276,8,4 173,822,4 91,1046,1 176,181,5 40,750,4 0,75,4 112,261,2 270,656,4 322,6,2 302,372,2 137,237,5 324,97,4 105,63,4 221,154,4 344,366,4 272,327,3 143,1038,4 156,126,5 210,309,3 55,30,4 167,1015,5 302,128,5 75,257,3 222,248,2 59,27,5 320,506,3 140,931,3 72,285,4 225,479,4 89,712,4 271,171,4 18,312,2 144,285,3 341,763,1 223,321,2 327,1125,3 267,551,2 178,353,4 307,525,3 266,692,4 344,401,4 5,212,4 11,142,5 209,159,4 289,545,2 292,299,2 57,247,4 302,180,5 297,497,5 346,500,4 235,171,3 101,120,3 289,403,3 91,122,2 150,273,5 5,431,4 255,1288,4 42,215,5 188,631,5 262,513,3 21,116,4 249,43,4 268,187,2 277,97,4 154,293,3 139,333,2 17,189,4 238,197,5 103,341,3 250,257,3 71,63,5 304,337,3 71,565,4 338,225,2 0,71,4 193,510,4 315,548,5 200,149,4 205,1126,4 47,186,5 278,417,3 93,152,5 216,52,1 93,764,3 249,484,4 78,287,3 229,392,3 127,63,5 310,366,3 75,517,3 61,152,4 5,514,4 214,10,2 144,568,4 212,714,5 93,1198,3 9,293,3 343,180,3 52,99,5 19,677,4 206,293,3 122,284,5 255,1027,4 173,93,2 4,153,3 307,487,4 221,435,4 199,6,4 64,120,4 6,484,5 294,842,4 62,110,3 6,510,5 197,10,4 294,1502,2 266,27,4 90,98,2 150,320,4 12,301,5 292,1097,2 41,130,2 327,1134,1 13,518,5 233,141,2 229,153,4 151,97,2 163,312,5 54,143,5 317,1013,2 2,331,1 289,817,3 124,174,2 242,92,2 20,669,3 267,227,4 6,653,5 81,177,4 317,523,3 88,380,4 300,122,4 192,672,4 0,184,4 322,78,4 20,218,5 196,327,4 183,14,3 312,481,5 108,822,3 151,166,5 296,628,3 166,1146,4 263,523,3 279,570,3 221,576,1 20,590,3 209,500,4 279,229,3 85,285,3 319,173,4 143,49,5 255,96,4 64,426,5 197,428,4 183,216,3 150,708,5 17,529,4 42,723,4 85,318,3 241,304,5 96,27,5 113,194,4 187,68,4 300,229,4 84,240,3 128,312,3 105,76,4 260,747,3 187,6,5 12,207,5 341,287,5 298,285,4 310,203,5 124,812,1 275,462,4 12,420,2 140,471,5 221,549,3 190,895,3 143,515,2 215,1046,3 150,212,5 143,844,4 3,355,3 95,63,5 159,78,4 48,368,1 109,331,3 208,350,2 177,1003,4 343,96,3 10,202,4 240,306,4 238,311,2 275,718,3 17,190,4 140,534,5 17,970,4 161,41,3 341,590,3 277,524,5 101,216,2 15,446,5 342,81,5 108,356,2 300,731,4 302,201,5 249,377,4 233,506,4 216,67,3 86,522,5 94,25,3 244,93,2 94,288,2 333,1007,4 200,895,3 125,322,3 149,474,5 58,870,2 226,8,3 168,602,5 292,552,3 200,189,4 57,7,4 302,839,2 327,1105,2 57,849,5 298,100,2 338,572,3 234,197,3 57,346,3 290,454,5 177,274,5 98,1118,4 295,291,5 342,568,3 17,512,4 311,237,3 61,293,1 96,427,4 214,63,4 342,46,4 333,222,5 348,410,4 310,432,3 235,133,4 26,595,2 63,210,4 143,712,4 109,21,4 108,664,5 0,95,5 266,596,3 88,126,5 58,257,3 238,1244,5 221,95,5 7,293,3 94,136,3 279,565,4 310,468,5 10,520,2 221,1225,4 183,644,3 16,0,4 222,68,5 63,428,4 261,251,3 86,153,4 285,95,4 206,52,1 333,7,4 78,936,2 275,146,4 313,327,4 126,285,1 93,202,5 347,627,4 347,369,4 205,895,4 84,507,2 311,638,5 58,1008,4 343,1282,2 187,210,4 342,370,2 183,971,3 307,745,4 61,297,4 243,1016,4 134,53,3 13,150,5 326,468,4 147,70,5 5,155,3 51,256,3 129,57,2 221,412,3 75,11,3 43,316,4 292,171,5 94,31,1 129,46,3 11,96,5 37,98,5 12,408,3 197,187,5 327,143,4 11,203,5 240,749,5 215,627,4 200,356,4 193,581,1 71,44,5 307,735,3 127,426,5 345,581,3 177,545,3 127,421,4 43,81,4 279,418,3 12,909,2 311,1020,3 84,638,3 221,312,4 106,320,2 4,435,5 101,417,3 308,325,5 197,96,3 338,96,4 279,315,5 302,324,1 188,59,3 278,1208,4 292,67,3 221,233,2 183,854,4 291,656,5 322,1016,3 302,449,3 275,1209,2 299,321,4 58,971,4 285,703,2 248,12,4 233,520,3 82,293,3 7,456,1 278,1499,5 199,217,5 149,267,5 256,304,4 7,384,1 27,99,5 118,85,4 154,327,2 245,367,1 272,344,3 173,116,5 229,124,5 231,920,4 197,322,2 13,12,4 275,447,4 94,741,4 102,125,5 93,100,2 233,1462,5 266,186,5 243,323,4 278,576,1 237,475,3 157,227,5 285,814,3 194,297,4 189,281,3 91,41,4 268,210,4 187,627,5 342,711,4 342,374,2 320,482,5 44,120,4 278,396,4 22,482,4 321,191,5 12,317,3 174,55,2 90,330,5 158,1001,3 184,195,4 243,178,5 347,404,4 295,949,4 42,370,4 15,229,5 261,274,4 101,685,3 213,407,4 276,150,3 2,327,5 118,525,2 48,167,5 62,287,3 319,117,1 91,824,4 71,67,3 270,132,4 193,316,4 12,897,1 278,1494,4 307,655,3 252,567,4 266,1400,4 243,225,1 294,400,3 267,140,3 343,118,5 32,327,4 343,618,4 346,1034,3 234,21,4 312,113,4 89,612,4 71,11,5 319,172,5 270,434,4 93,643,5 193,647,4 129,275,4 339,0,5 48,55,5 306,142,3 262,485,4 0,212,2 307,825,3 150,463,4 344,87,4 264,327,4 239,299,3 81,190,4 24,497,4 150,99,3 221,39,1 196,384,2 292,654,3 279,403,3 209,403,5 233,212,3 317,247,3 103,543,3 335,618,3 307,474,4 22,227,4 233,849,2 93,231,3 0,232,2 143,303,4 294,1039,2 129,227,4 274,629,3 62,249,5 19,193,3 353,461,3 307,321,2 144,184,4 109,1205,3 302,409,4 316,299,4 179,630,5 342,75,4 344,47,5 300,88,2 180,1311,1 101,232,3 63,221,4 302,232,4 40,312,3 228,259,1 143,854,4 58,568,4 190,312,5 304,211,3 13,407,5 278,215,3 315,734,4 192,287,1 215,230,2 200,536,3 347,239,3 21,682,1 103,822,1 340,293,3 168,171,5 212,508,4 100,839,3 89,312,5 158,830,2 64,190,4 113,203,3 214,204,3 183,258,3 120,124,2 278,991,4 93,1073,2 58,228,3 72,478,5 58,6,4 253,471,3 28,357,2 144,552,3 229,198,3 233,730,2 51,115,4 217,32,4 202,116,4 98,345,4 145,306,3 58,99,5 297,143,4 342,654,5 296,97,5 118,865,3 23,128,3 180,823,1 333,249,3 31,249,4 180,1383,1 91,47,4 196,747,3 115,839,1 250,120,4 1,311,3 157,67,3 274,303,3 144,173,5 278,1486,1 233,7,5 192,233,3 338,269,2 63,7,4 353,132,3 214,238,3 183,530,4 232,173,5 221,181,4 6,167,5 89,434,5 139,288,4 314,222,5 292,780,2 239,287,5 221,828,3 310,377,5 200,270,4 160,55,3 180,881,1 310,950,3 6,268,3 42,240,4 58,207,5 322,245,4 223,324,1 317,196,5 177,1011,4 213,530,4 48,312,3 94,736,3 344,173,4 332,65,5 173,254,5 20,568,3 327,401,3 279,322,2 307,232,3 313,622,5 291,509,4 275,683,4 236,1191,5 6,284,5 57,339,4 61,1059,1 150,223,5 95,99,5 353,241,5 78,268,5 90,130,2 298,644,4 343,282,4 61,1135,3 151,219,5 91,1040,3 341,722,3 267,948,2 233,500,4 177,134,2 266,469,4 158,870,4 235,78,4 253,182,4 237,180,3 328,128,3 292,281,2 134,175,4 203,257,2 293,546,3 150,493,4 310,257,4 243,1177,3 269,595,5 82,891,2 295,1141,5 300,482,4 129,221,4 101,172,3 298,1226,1 15,227,5 333,460,3 342,186,4 255,1118,3 20,216,3 275,21,5 314,233,3 193,29,3 243,53,2 86,628,4 48,212,3 254,248,5 324,864,3 177,209,5 278,166,3 10,46,4 310,81,5 291,404,3 57,1105,4 56,865,3 161,173,4 309,831,1 344,472,2 353,256,3 163,321,4 116,420,5 329,446,4 338,474,5 4,41,5 81,10,4 208,897,3 213,284,5 59,750,2 219,300,4 213,1038,4 132,314,4 12,763,2 337,203,3 177,192,4 10,203,3 185,590,4 192,116,4 263,515,5 247,99,4 177,283,4 116,167,5 223,568,3 222,247,1 161,49,5 128,338,2 6,431,4 76,180,3 278,661,2 286,38,5 156,272,5 330,189,3 176,0,3 285,233,3 9,655,5 88,116,5 326,142,4 115,805,4 206,312,4 86,518,4 27,173,5 180,1127,1 296,55,5 245,99,4 187,172,5 118,485,4 282,587,4 187,209,4 290,769,4 108,277,3 10,401,4 247,927,3 257,312,5 255,575,3 186,791,5 290,174,2 188,651,5 312,162,2 180,1370,1 238,478,5 118,545,4 143,520,4 304,297,4 342,822,3 325,731,5 94,674,2 278,407,5 275,455,2 344,150,5 183,603,4 276,128,4 196,719,2 252,293,4 302,8,5 295,653,5 31,245,4 278,529,3 48,589,1 94,473,4 317,1031,3 317,193,5 285,682,5 190,306,3 47,49,4 297,194,4 170,244,3 338,81,4 80,925,3 206,41,4 268,152,3 302,1070,2 6,53,3 338,197,5 10,434,4 22,420,5 187,510,2 136,326,4 75,689,2 37,719,5 220,1034,3 58,927,4 12,813,5 317,314,5 341,606,3 199,120,5 268,653,4 6,88,5 135,846,4 253,242,2 182,482,5 83,264,5 150,192,4 69,750,4 313,937,3 290,1238,2 233,191,3 37,66,4 155,11,3 55,227,3 353,462,4 310,177,5 121,213,2 313,590,5 0,257,5 229,208,1 41,141,4 327,749,4 129,356,5 266,463,5 266,323,3 189,299,4 275,852,5 192,464,3 58,125,5 346,415,3 108,68,4 295,947,1 27,142,4 350,339,1 58,759,2 317,502,4 180,334,1 47,653,5 93,692,4 214,449,2 81,517,4 154,324,2 302,317,5 22,27,3 0,80,5 302,415,3 319,471,3 56,409,3 304,191,2 292,143,4 12,386,3 326,214,4 326,885,2 298,366,4 8,285,5 110,325,3 245,450,2 259,1024,5 292,238,3 304,150,4 115,1252,2 123,165,5 160,212,2 197,14,3 312,131,5 267,716,1 112,99,4 41,280,3 233,708,4 127,608,4 292,1100,3 155,63,3 193,807,2 24,454,4 281,257,5 326,81,2 63,55,5 200,275,5 313,125,2 324,176,5 135,746,4 180,1317,1 279,264,4 270,282,4 261,404,2 5,204,3 307,282,3 5,132,4 129,157,5 84,288,3 238,529,5 121,552,3 6,529,5 333,58,5 17,13,5 94,131,3 312,669,3 278,411,3 183,461,4 101,684,3 270,731,4 9,63,4 163,124,5 275,229,4 215,366,3 48,627,4 140,49,4 113,190,3 81,126,2 206,1377,3 54,55,4 88,220,1 333,473,3 159,20,1 310,325,2 285,190,4 22,176,4 31,99,3 331,155,4 235,477,3 250,49,5 6,488,3 58,133,5 42,116,4 98,257,5 294,152,5 0,77,1 5,69,3 12,893,1 278,752,2 10,734,3 91,777,4 256,150,4 22,704,4 319,410,3 287,201,5 233,920,4 357,468,4 12,340,2 342,274,5 325,645,2 255,171,3 89,271,5 293,117,3 320,29,4 338,132,4 204,288,4 235,306,4 243,746,4 302,193,5 5,480,5 235,203,3 89,268,5 180,318,3 192,267,3 158,450,5 311,674,5 233,78,3 213,6,5 302,479,4 6,656,4 304,805,3 17,88,3 180,675,3 179,420,5 118,271,5 159,431,3 226,243,3 188,1097,4 290,1252,3 53,326,5 94,416,3 317,238,4 302,505,4 339,404,5 61,305,4 278,701,4 180,741,4 196,186,5 9,701,3 295,278,4 268,178,4 4,421,4 57,662,2 342,19,5 269,440,5 311,1123,4 309,747,3 235,734,5 12,451,3 290,469,3 91,280,3 294,411,2 345,244,4 45,126,5 61,99,4 342,11,5 102,299,3 173,368,1 84,565,3 216,553,3 30,503,5 81,475,3 90,264,5 47,201,4 129,2,5 82,21,5 58,187,4 235,660,3 151,254,5 351,54,1 261,789,3 325,502,3 144,199,4 215,171,4 232,56,5 313,1519,3 130,312,5 306,426,3 359,333,4 291,123,4 151,1027,5 313,267,5 159,174,4 276,116,4 317,48,3 353,693,5 293,249,5 263,1269,2 275,248,4 221,1088,1 217,503,3 55,622,3 12,24,1 12,631,3 291,150,5 129,373,4 23,507,4 317,383,3 92,274,4 6,141,3 143,292,4 307,1045,4 77,268,3 275,227,4 141,894,4 233,293,3 233,1122,3 86,1189,4 285,789,1 317,933,4 214,473,4 67,1046,1 6,422,5 109,203,3 345,61,3 71,180,1 302,159,4 199,356,5 261,844,4 234,473,5 325,525,5 124,948,3 6,155,5 346,78,5 12,415,3 126,226,4 12,501,5 307,489,4 221,671,1 250,131,5 108,741,5 96,654,5 221,93,3 302,233,5 294,1458,5 324,142,1 185,741,3 67,1027,4 14,695,2 255,661,2 27,270,4 48,128,2 126,299,5 101,325,3 143,899,4 313,203,5 292,152,4 78,285,5 314,301,5 180,1201,1 22,187,3 82,322,4 58,47,5 293,6,4 256,306,4 48,2,3 55,97,4 42,224,2 279,124,2 345,3,4 291,428,5 150,1064,3 177,848,3 144,280,4 326,89,3 129,182,5 212,10,4 41,450,2 267,1058,3 278,726,3 17,193,3 311,56,5 68,108,3 111,305,5 307,169,3 324,473,5 313,784,3 326,948,4 93,828,2 221,418,2 41,24,3 295,684,4 188,273,4 279,380,3 143,21,5 108,244,3 118,657,5 171,696,3 17,514,5 221,363,1 86,392,4 243,207,5 362,390,2 331,147,5 271,473,5 94,522,4 101,182,4 293,298,3 26,924,3 163,983,4 120,8,5 339,203,4 114,529,5 333,71,3 267,30,4 59,513,4 144,636,3 89,5,4 343,507,4 97,69,3 278,999,4 278,389,3 156,406,4 333,288,3 306,526,5 326,236,4 307,115,4 210,686,2 188,172,5 270,199,5 341,1006,4 327,187,5 143,203,2 42,297,4 267,825,1 148,268,5 91,251,4 338,213,3 17,285,5 168,180,5 188,280,2 253,237,3 249,1013,4 129,945,4 171,656,3 9,614,4 215,94,3 94,23,3 359,115,3 300,691,3 344,120,3 163,244,5 322,743,5 263,557,5 196,339,2 236,27,4 195,305,4 178,304,4 341,131,5 55,81,4 345,186,3 117,546,5 331,1149,3 249,741,3 192,870,3 292,446,4 347,927,5 338,11,5 291,342,2 208,348,2 22,98,4 48,257,2 233,489,4 200,446,5 42,500,4 289,549,3 275,289,4 345,321,3 291,251,3 362,404,4 354,299,4 350,878,5 210,262,3 331,695,3 117,184,5 307,482,3 99,688,3 240,879,5 252,565,4 291,918,5 333,206,4 222,254,4 312,173,4 243,1097,5 298,500,3 327,283,3 17,70,4 231,470,3 310,173,5 6,607,4 200,707,4 344,284,5 183,1135,4 129,48,4 231,21,3 45,287,2 212,150,5 209,215,4 74,474,5 89,322,3 13,6,5 0,211,4 271,771,2 331,1015,5 116,596,4 183,286,4 78,812,5 9,199,5 118,143,4 157,276,4 58,569,4 321,184,5 128,994,2 41,691,4 297,495,5 317,480,4 292,133,5 59,1125,4 284,537,5 344,918,2 71,69,4 341,24,2 93,30,4 222,716,1 338,189,4 129,345,4 217,264,3 361,332,5 333,449,1 175,750,1 144,1211,2 188,814,3 59,484,4 292,256,2 305,99,4 307,318,4 129,52,3 319,500,3 320,1193,5 154,318,3 337,309,3 213,324,3 48,417,3 56,1027,3 235,184,5 137,616,4 231,172,4 330,99,4 296,156,2 94,87,4 267,332,4 57,155,5 298,377,3 293,326,3 285,1090,4 233,600,3 206,155,2 268,491,4 356,283,4 254,52,3 91,716,3 12,160,5 64,196,5 16,236,2 166,697,4 312,608,3 41,98,5 64,209,4 302,365,3 232,97,5 353,85,5 267,1089,2 6,225,5 80,6,4 322,885,3 118,86,5 298,417,4 173,455,1 7,88,4 78,899,4 231,214,3 270,337,1 355,312,5 176,270,2 180,457,3 5,150,3 362,1494,5 116,750,5 305,743,4 89,493,5 116,367,3 180,1385,1 232,417,4 43,541,3 12,302,4 126,242,5 91,560,3 235,863,2 233,659,4 362,1167,2 176,149,4 116,120,4 300,513,3 193,0,4 275,495,4 220,1209,3 362,672,2 217,515,5 84,413,4 59,497,5 187,232,3 143,959,2 183,87,3 82,411,1 193,630,2 10,750,2 177,1050,3 317,1022,2 327,131,5 141,27,4 292,656,4 12,436,1 245,230,1 345,726,1 322,198,4 193,509,4 98,122,3 279,482,4 313,1275,4 304,942,2 105,922,4 173,382,1 61,954,4 9,222,5 233,525,3 91,659,4 360,272,3 0,142,1 331,257,5 285,558,4 307,804,4 325,447,3 322,21,5 364,300,5 22,314,3 255,782,4 2,333,3 194,98,3 235,222,5 37,388,5 94,504,3 350,872,3 270,82,4 296,249,1 90,350,4 118,471,4 41,1050,4 269,702,4 310,525,5 93,561,3 351,745,4 349,173,5 232,285,3 249,0,4 153,805,4 254,568,1 228,874,1 331,1012,3 57,236,4 319,975,2 306,168,5 109,793,3 243,817,2 329,595,5 342,473,5 58,24,4 63,172,5 279,933,2 282,626,4 313,119,3 335,12,3 311,586,3 221,410,3 180,932,1 269,740,5 58,64,4 173,62,4 312,43,3 263,207,5 58,381,4 300,510,4 340,876,3 59,1123,4 302,848,3 0,150,4 12,785,3 55,93,4 58,174,4 57,245,5 233,94,3 364,14,3 163,147,5 193,201,3 150,424,4 82,1042,3 115,179,5 48,346,3 232,643,5 101,270,2 143,284,4 70,474,5 144,1076,3 298,741,4 262,78,4 275,630,3 307,967,4 0,50,4 268,434,3 94,735,4 310,707,5 243,89,4 312,317,4 257,309,5 122,484,5 333,182,4 129,742,2 268,63,4 327,450,4 42,299,5 129,11,4 89,184,5 11,131,5 188,215,5 206,134,2 314,54,5 4,138,3 194,468,3 87,301,3 152,677,2 147,1011,4 269,154,5 217,272,4 144,673,4 222,368,1 221,3,3 127,784,2 191,126,4 227,650,4 353,190,4 327,81,4 343,715,3 37,449,1 268,136,4 247,54,4 309,303,5 12,611,4 129,334,3 61,773,1 134,76,4 293,263,2 88,812,5 193,432,3 93,38,3 317,395,1 150,222,5 180,1119,1 311,233,5 176,174,5 296,49,5 312,161,3 274,88,3 292,201,3 329,464,5 322,99,4 134,565,3 228,357,1 287,899,5 294,1220,5 161,150,3 86,54,4 127,377,5 267,266,3 281,301,5 360,221,2 279,285,4 333,275,4 360,386,3 86,384,5 343,174,5 311,480,5 129,1046,5 356,221,5 189,117,3 307,191,5 105,7,4 356,927,4 319,677,3 216,116,4 215,763,2 229,632,4 262,81,4 232,494,4 313,671,5 203,309,1 278,239,4 187,194,3 176,178,5 180,329,1 183,457,3 85,871,3 78,312,2 52,180,4 124,203,5 325,527,3 275,746,4 237,299,4 38,332,4 200,174,2 275,745,4 12,333,1 116,11,5 300,650,5 279,789,4 2,349,3 344,955,4 173,392,4 180,865,1 289,157,5 359,237,4 246,256,4 161,116,4 42,335,4 257,310,4 347,818,4 190,327,3 214,482,4 183,664,2 113,156,2 183,51,4 48,820,1 232,211,5 344,172,5 54,1088,1 292,98,3 12,352,4 98,195,4 48,324,3 366,1011,4 122,126,5 193,382,1 140,824,4 322,149,4 278,1179,2 329,574,4 108,1243,3 176,317,4 84,791,4 247,248,4 297,236,5 275,138,4 22,256,3 51,656,5 200,1193,4 329,7,5 267,402,4 69,175,4 279,52,5 233,151,4 12,758,2 324,180,4 267,268,4 153,201,3 221,688,4 353,268,4 41,221,4 98,231,4 95,169,5 110,1023,3 86,299,3 327,348,2 12,189,4 346,226,4 116,257,4 228,285,4 328,590,2 344,507,4 290,395,4 319,1187,4 93,33,1 71,514,4 217,694,3 17,11,5 345,1109,1 217,761,4 177,57,5 333,301,5 302,48,2 144,1207,4 41,924,4 328,78,4 285,10,5 32,342,4 325,78,4 338,182,4 307,520,3 263,1224,3 129,671,5 113,182,5 275,228,3 10,382,2 255,201,3 272,310,4 115,660,4 12,136,5 331,81,5 5,478,5 90,688,5 78,136,4 307,264,3 9,650,4 294,1400,5 17,180,3 48,298,2 342,422,5 91,422,3 243,761,3 362,287,4 42,422,4 200,1424,3 83,30,4 58,1046,2 233,156,2 299,327,3 344,1081,2 190,268,3 267,113,5 146,268,4 70,221,3 75,58,4 231,461,4 101,247,3 204,325,4 199,24,4 310,430,4 196,194,5 355,936,2 302,1015,3 100,814,3 118,1258,3 263,55,5 209,446,5 275,62,3 307,177,4 63,180,4 302,1013,3 270,283,3 200,512,3 91,627,4 53,332,5 300,186,4 80,409,4 30,497,4 20,233,5 310,211,3 263,791,5 131,136,4 306,462,5 310,274,4 21,402,5 42,317,5 200,60,2 185,769,2 124,474,1 338,72,3 144,119,2 50,131,4 129,83,4 346,143,5 109,714,2 99,904,3 310,469,3 188,247,4 364,1016,4 43,226,4 200,1097,2 294,24,5 192,275,4 124,1092,1 9,710,4 275,271,5 42,1052,3 341,426,4 13,210,4 7,189,4 279,143,2 332,738,5 10,720,3 93,379,3 7,685,3 114,461,4 263,558,5 298,296,3 298,1020,3 267,482,5 94,201,4 23,24,4 158,545,4 173,311,5 367,378,4 292,942,2 185,549,4 249,581,4 184,637,4 233,565,2 220,256,4 186,658,5 312,186,4 115,198,4 108,8,3 173,411,1 6,207,5 370,96,5 295,254,2 279,81,2 270,274,4 109,790,2 58,925,1 216,575,1 144,664,5 333,203,4 41,567,4 199,142,5 88,386,5 310,587,4 234,268,4 286,155,5 343,203,4 42,288,4 98,10,5 144,158,4 199,81,5 268,315,4 12,516,5 183,207,4 326,143,4 217,516,3 5,486,5 278,791,3 267,160,3 84,123,5 74,471,4 17,482,4 233,290,3 195,237,4 317,749,4 1,280,3 294,101,4 275,80,4 193,1408,2 290,1077,4 144,895,2 159,761,3 289,215,4 173,1253,1 326,434,4 144,470,4 82,464,4 276,590,4 213,55,5 344,1314,3 329,43,5 327,1276,3 5,130,5 333,692,3 155,191,4 90,478,4 129,21,5 74,270,5 327,264,5 290,378,3 221,814,2 346,167,5 327,509,5 289,422,5 11,156,5 150,113,5 293,602,5 243,231,4 129,62,4 258,761,2 57,424,5 244,111,4 183,1231,3 121,726,4 143,128,4 304,356,5 15,95,5 0,174,5 6,617,4 15,545,4 79,44,4 172,293,5 103,1016,1 160,522,3 178,1315,3 11,70,4 58,140,4 338,635,4 320,484,4 200,203,4 58,515,4 55,117,4 190,331,2 64,317,5 248,992,3 144,228,3 261,335,3 234,51,4 115,603,3 48,475,1 285,324,1 220,587,3 196,343,4 197,22,4 206,27,4 344,250,5 144,759,2 315,1083,4 76,178,5 82,574,4 327,54,4 177,1034,4 345,32,5 88,25,3 333,1314,4 262,68,5 52,198,5 311,497,5 212,0,2 269,172,5 84,689,2 84,403,3 183,948,3 329,822,3 229,1049,3 183,254,3 311,131,5 321,178,5 31,117,3 183,1009,4 17,179,4 54,88,5 372,587,3 273,545,3 362,96,2 176,196,4 86,795,4 23,476,5 136,260,5 252,236,4 297,199,3 343,596,2 304,637,5 84,498,4 43,167,5 32,257,4 292,7,3 364,108,2 89,41,4 98,962,3 175,249,4 233,413,4 307,514,3 313,982,4 84,1167,3 136,49,5 129,1015,4 116,1056,2 224,63,4 108,116,5 84,198,5 14,923,3 178,268,3 153,332,3 169,332,4 48,201,3 61,182,4 342,1131,4 42,865,4 94,470,5 115,293,2 94,1,2 222,119,2 150,418,3 290,1097,4 302,78,5 14,288,3 307,442,3 42,315,5 333,76,3 25,863,2 129,778,4 137,513,5 233,236,3 296,78,3 334,323,1 333,619,2 275,596,3 68,688,3 296,863,3 152,63,5 183,698,5 57,567,4 177,479,3 347,287,5 61,172,5 306,427,4 221,945,2 116,239,3 9,704,4 93,1031,2 325,184,5 58,738,4 306,188,4 338,430,4 317,181,4 320,493,4 216,184,3 71,434,5 248,430,5 298,1140,4 12,887,2 150,971,4 234,704,5 159,3,4 11,14,5 61,77,2 313,41,5 88,150,5 229,581,4 119,8,4 315,613,2 330,453,3 39,270,2 344,171,4 54,1015,1 91,1212,2 72,27,3 372,693,5 372,706,4 335,404,3 324,483,5 7,300,4 140,236,4 140,1257,4 344,215,5 86,87,5 208,1104,2 6,586,4 289,97,4 15,691,4 174,175,3 184,196,5 327,330,4 243,19,4 180,1047,2 245,839,4 268,292,3 245,384,1 220,788,4 129,149,5 108,175,5 173,331,5 93,27,4 84,1097,4 47,521,2 236,97,4 292,22,4 312,493,3 177,69,4 292,321,2 5,527,4 235,14,5 37,392,5 275,736,4 142,327,4 275,853,4 89,692,3 17,959,4 250,221,4 6,171,4 43,105,2 84,381,4 183,12,3 285,19,4 55,218,5 344,402,3 44,1060,2 72,155,4 253,124,3 183,201,3 359,308,2 213,235,5 17,178,4 56,1093,2 362,615,3 199,28,4 147,227,4 62,322,1 269,65,4 5,27,2 290,833,3 313,142,5 81,519,3 345,217,3 102,221,3 188,656,5 153,181,5 12,383,2 290,1082,3 147,472,5 270,1090,4 263,218,5 153,49,5 369,55,2 26,297,4 267,394,2 81,479,4 93,117,3 341,1367,5 298,888,3 176,244,3 43,184,4 370,174,1 185,236,2 101,175,3 42,237,2 325,133,3 242,712,3 348,104,2 81,24,2 256,49,5 13,69,1 345,16,1 5,466,4 226,275,4 156,297,4 289,1335,3 188,846,4 85,888,5 248,332,4 41,210,4 151,789,5 327,1247,3 192,484,5 58,708,5 292,227,3 345,976,3 206,182,2 238,1019,3 300,76,3 247,182,5 287,11,4 302,450,5 254,435,4 209,201,5 362,101,4 298,478,4 91,833,1 322,326,4 373,1046,3 248,627,3 221,400,2 121,69,5 121,510,5 22,31,3 9,695,4 143,297,3 195,662,5 151,965,5 156,739,2 17,609,4 106,257,4 124,998,4 333,474,4 206,170,3 84,479,4 110,268,5 11,190,5 303,321,4 341,543,1 200,481,4 332,747,4 166,1125,5 302,1223,2 221,61,4 5,135,5 222,275,4 339,1132,5 69,545,2 243,763,5 354,309,4 229,184,4 294,209,4 307,97,3 209,48,3 129,1227,3 248,455,3 214,229,3 206,1117,3 135,312,2 275,116,4 215,823,3 268,49,3 292,465,3 274,141,2 135,1141,4 111,346,1 291,1013,3 76,175,4 313,940,3 180,828,1 199,32,4 290,550,2 300,151,3 290,236,4 333,715,3 215,217,4 357,481,2 12,242,3 99,288,3 118,11,3 267,557,3 93,272,4 199,204,4 332,97,4 255,160,5 264,408,3 267,107,3 319,2,4 89,177,5 346,608,4 285,404,3 135,222,4 343,536,4 243,21,4 349,0,4 298,126,5 220,128,5 327,402,3 180,20,1 253,173,5 157,565,3 261,418,3 242,1465,3 229,6,3 300,410,1 103,346,2 347,0,4 207,301,1 150,674,2 104,257,5 71,209,4 321,88,3 279,179,4 9,366,4 155,136,4 371,158,5 362,315,3 177,865,4 180,111,1 345,231,3 113,506,3 13,13,3 270,247,4 353,846,3 262,327,4 257,892,1 99,690,4 258,11,5 89,603,5 129,366,4 326,292,3 56,172,5 238,492,5 317,304,2 12,775,2 338,31,5 252,187,4 310,198,4 310,225,4 282,41,5 124,385,3 360,25,3 282,23,4 333,209,3 180,1389,1 335,201,1 253,221,4 275,543,3 88,82,4 267,264,3 310,747,4 269,16,2 343,195,4 57,479,3 327,30,4 233,220,2 59,632,4 1,12,4 320,179,4 130,0,4 306,90,4 263,708,5 180,1281,1 101,402,3 196,807,3 307,486,4 373,125,3 5,116,2 366,249,5 298,256,2 24,479,4 86,207,5 276,747,3 331,654,5 15,734,3 235,418,5 10,735,4 0,106,4 5,31,4 71,123,4 213,951,3 304,51,2 344,296,4 268,901,5 335,863,1 313,401,4 32,312,5 344,1016,2 345,391,3 122,49,3 160,285,2 329,203,5 233,780,2 180,147,2 261,237,4 232,491,5 199,293,4 212,134,5 129,595,4 345,166,2 141,361,3 324,134,5 82,27,4 89,520,4 12,529,5 313,764,3 360,274,4 333,435,3 313,146,4 362,905,2 91,182,4 246,271,4 229,50,4 43,754,3 97,208,2 325,673,3 302,95,5 91,317,2 11,195,5 93,63,5 326,287,4 287,316,4 372,723,5 336,105,2 178,300,4 266,6,5 240,331,3 86,181,4 310,678,4 57,19,1 275,76,3 193,449,1 235,50,5 43,8,5 353,479,4 302,61,2 133,299,3 91,691,4 337,603,4 223,85,3 340,879,5 193,217,4 93,1205,3 331,299,5 179,110,5 320,708,4 107,180,3 198,241,5 45,312,5 84,854,3 187,503,3 173,332,4 152,21,2 118,187,4 44,475,3 286,345,5 13,497,5 188,20,2 362,188,5 366,245,4 267,81,3 13,180,5 199,569,4 90,528,4 194,257,4 193,558,2 300,280,4 59,271,4 277,346,4 180,1369,1 343,476,3 43,208,5 37,224,5 17,275,5 90,81,5 335,394,2 304,155,4 101,809,2 180,1271,1 155,316,4 364,257,4 31,121,2 5,14,3 235,755,1 233,964,3 231,497,4 129,624,5 290,40,4 343,24,4 221,231,4 12,906,1 377,553,3 213,326,5 278,761,3 362,1006,5 296,134,4 12,231,3 12,860,3 86,78,5 194,60,3 157,10,4 12,47,5 188,120,2 343,662,5 13,921,4 180,839,1 180,1258,1 93,49,5 205,903,1 88,706,5 61,1127,2 287,339,5 328,514,4 353,881,4 290,100,4 152,126,3 284,167,4 302,152,5 12,504,3 245,674,4 92,475,4 267,128,2 324,1410,4 225,6,4 296,174,4 343,450,4 232,68,5 86,683,5 69,471,3 180,1377,1 259,299,3 199,44,3 245,719,1 91,526,3 329,49,5 81,102,2 298,495,3 27,217,3 63,82,3 261,1053,2 58,101,2 293,324,3 293,470,4 343,57,3 275,45,3 20,973,3 42,992,3 71,643,4 272,901,5 53,1015,4 275,264,4 327,161,4 89,812,4 160,126,3 304,244,1 68,8,4 272,899,3 94,13,5 176,288,2 333,921,4 63,419,3 118,561,4 22,418,3 153,479,5 270,24,3 275,230,3 59,670,4 278,463,4 41,11,4 319,575,3 278,225,4 377,62,3 346,464,3 14,507,2 327,369,3 203,291,5 377,366,3 294,484,4 254,762,5 66,120,4 327,442,4 56,236,4 19,404,3 242,27,4 93,1209,3 327,370,4 187,147,4 307,203,4 343,567,5 129,684,3 205,257,4 118,110,5 346,207,2 150,513,4 12,20,3 372,597,3 209,185,4 143,273,3 57,812,5 75,473,5 293,146,4 183,76,3 136,221,5 310,526,4 258,234,2 42,596,3 91,195,4 253,161,3 94,82,5 103,474,4 213,247,4 127,789,4 292,54,4 194,1012,3 10,134,4 177,177,4 269,155,5 268,1479,1 150,233,4 173,1000,1 150,427,5 275,163,4 129,332,5 331,287,5 188,142,5 42,846,5 187,12,4 171,484,3 261,43,2 134,801,2 303,274,4 307,392,4 341,1070,4 75,332,3 344,987,2 302,256,4 292,678,2 1,279,3 367,49,4 343,706,4 91,551,3 39,302,4 287,156,4 232,116,3 89,477,5 111,324,1 44,275,5 2,340,1 158,876,3 58,500,1 116,357,4 206,149,3 317,400,3 21,998,4 266,383,3 123,156,2 5,134,5 68,47,5 262,1443,3 81,1000,1 56,6,4 12,868,3 285,88,4 150,1296,1 261,269,3 217,409,3 371,218,5 212,203,5 76,275,2 6,7,5 298,581,2 144,751,4 204,257,3 105,0,4 313,137,5 345,236,4 285,1502,3 338,100,3 292,30,2 179,68,4 346,316,1 236,152,3 294,153,5 215,203,4 290,1045,4 333,1171,3 328,257,3 145,261,4 143,193,5 91,530,4 312,486,3 314,45,4 343,105,2 269,1013,4 229,968,4 89,1100,4 275,233,5 324,167,3 14,753,5 323,297,5 72,47,2 231,314,5 327,97,4 127,587,5 42,314,4 26,280,3 188,99,4 295,14,3 341,517,3 331,331,4 338,692,5 296,447,3 302,259,3 180,323,1 243,286,3 150,505,4 250,182,5 158,224,4 341,155,4 193,116,3 84,1166,3 243,121,4 250,236,5 179,215,5 362,70,3 232,268,5 296,237,5 222,1299,1 184,317,4 307,86,4 261,144,1 296,234,2 208,303,2 294,1445,4 223,211,1 278,738,1 360,651,4 41,81,4 292,26,3 233,602,4 349,227,4 279,158,4 173,48,4 295,454,1 373,684,4 255,11,5 105,222,4 180,268,1 313,692,3 86,1073,3 365,852,5 74,107,4 84,497,4 269,285,5 127,422,4 298,484,4 377,201,3 289,472,1 232,96,5 292,404,1 91,294,2 298,714,4 129,677,4 183,257,3 182,224,1 279,6,4 373,230,2 253,258,2 148,304,4 290,1212,3 180,275,2 250,264,3 275,1012,3 66,1092,5 268,918,4 275,1171,4 40,169,4 275,1252,1 94,877,1 297,472,3 288,472,1 5,285,2 326,1066,4 233,603,5 344,196,4 196,778,2 119,256,2 292,615,3 173,195,5 177,681,3 136,171,5 127,299,5 252,126,5 87,320,1 221,448,4 246,110,5 59,175,4 91,312,5 255,384,5 342,24,2 313,214,4 114,171,4 262,209,3 304,248,3 243,61,2 266,567,4 86,809,3 329,180,5 133,257,4 12,60,4 345,132,5 107,120,3 367,636,2 304,659,4 297,285,4 255,48,4 285,276,4 285,106,1 326,31,4 200,127,2 287,14,4 307,609,4 333,386,4 203,314,4 256,164,4 346,434,5 180,826,2 12,783,1 25,507,3 117,510,5 238,113,3 338,588,5 170,326,4 377,7,4 13,508,5 61,32,1 63,283,4 268,176,5 199,150,3 369,264,5 326,477,4 9,508,4 107,274,5 310,364,4 198,323,1 362,54,5 179,55,5 59,193,4 13,120,3 17,135,5 269,221,5 267,16,3 322,202,5 333,501,3 353,715,3 335,570,1 143,32,5 225,168,5 300,167,4 98,684,3 180,1288,1 196,270,2 311,356,5 53,1011,2 199,37,3 68,1142,5 4,39,4 180,765,1 302,79,4 109,574,3 298,207,4 274,422,4 209,199,5 326,1096,4 98,6,4 63,239,1 100,1033,2 17,777,2 300,158,3 89,165,4 212,1,4 250,519,5 294,98,4 278,174,5 63,635,4 302,230,4 183,195,4 196,91,1 314,155,5 22,661,3 290,55,5 4,89,3 145,1293,4 253,89,1 325,558,3 289,90,2 93,471,3 188,483,5 291,478,4 193,424,2 325,632,4 15,760,2 303,287,3 220,720,5 159,208,4 79,57,4 177,75,3 61,146,3 157,549,3 343,312,3 290,364,3 111,301,4 206,187,3 12,456,1 233,0,3 259,271,3 62,12,4 324,639,3 193,123,4 70,55,5 29,677,2 290,3,4 377,1266,3 176,259,2 20,291,3 325,131,4 243,507,4 127,601,4 275,778,2 262,244,4 322,254,4 91,985,2 275,281,4 185,1276,4 263,24,4 160,273,2 85,299,3 302,90,5 9,134,5 300,406,2 59,501,4 376,442,4 141,349,4 373,590,4 109,731,3 351,181,5 108,1012,3 327,349,3 180,282,3 353,630,4 333,1072,4 180,1372,1 53,120,4 292,11,4 278,1488,3 137,110,4 297,501,5 317,65,4 66,150,4 300,159,2 46,320,4 303,681,3 108,203,4 188,504,5 15,182,5 268,644,4 296,115,4 372,138,3 273,814,3 122,510,5 12,39,2 60,341,2 75,420,3 283,312,3 338,805,4 4,152,5 279,1046,3 198,321,2 342,142,4 302,925,2 231,275,5 0,217,3 150,416,3 269,240,5 268,271,3 338,247,4 45,327,4 197,1013,2 150,504,5 183,477,4 233,273,3 347,741,4 373,128,5 220,52,4 307,583,4 267,745,3 262,126,4 82,794,3 167,6,1 283,343,4 374,43,3 90,434,4 310,57,3 203,190,4 108,199,2 200,69,3 311,240,3 214,182,5 127,172,5 12,350,1 377,51,5 344,948,3 11,753,4 53,405,2 127,236,4 222,1013,4 196,32,2 269,799,5 372,153,5 23,274,5 213,1400,4 15,26,2 324,520,4 242,220,5 199,577,5 342,701,4 344,478,4 326,644,4 140,749,1 12,72,3 298,346,4 180,921,1 12,466,5 275,226,4 188,482,5 134,227,4 255,1039,3 83,150,4 206,244,3 188,95,5 134,641,4 83,522,4 268,212,5 306,153,5 116,287,3 42,647,5 358,249,4 372,1,4 326,172,4 325,81,3 189,293,3 71,519,5 377,355,4 90,337,4 362,1266,2 127,590,4 251,267,5 213,511,5 302,248,4 312,134,5 238,167,4 360,147,1 93,446,4 108,930,2 297,310,3 328,268,4 65,116,3 290,789,4 174,628,3 346,422,4 290,154,3 6,202,5 200,284,4 373,355,3 302,325,2 130,274,2 185,1252,4 346,1087,1 183,587,5 233,327,2 292,146,2 275,1238,1 100,117,3 268,496,3 255,981,3 304,601,3 93,62,3 326,649,4 42,117,4 346,267,4 193,516,3 280,988,2 231,80,5 41,87,5 17,525,4 54,677,3 157,181,5 287,12,5 101,688,3 322,248,3 293,543,4 267,231,3 93,645,5 156,2,3 278,146,4 89,474,3 275,1072,3 300,683,3 190,342,3 200,323,5 212,447,4 144,830,1 64,134,4 290,153,4 378,68,4 344,558,1 61,178,4 330,276,4 42,53,3 243,239,3 278,468,4 0,208,4 312,416,2 150,1073,2 328,123,5 331,353,5 302,158,3 248,471,3 5,512,4 0,258,1 307,143,3 173,576,1 326,187,5 331,270,4 58,659,4 129,958,4 93,143,3 279,116,5 150,46,3 248,168,5 183,33,2 343,314,5 338,138,3 73,1083,3 346,68,5 292,760,2 307,233,3 267,420,3 327,184,4 364,907,3 199,14,4 4,93,3 317,508,5 233,422,4 117,815,3 353,296,4 129,404,4 278,3,4 197,216,4 341,236,4 194,1227,1 47,987,2 6,280,3 63,317,4 252,646,3 98,55,5 12,442,4 150,774,2 90,509,3 342,210,5 278,91,4 56,247,5 93,427,5 41,27,5 342,791,5 143,208,2 289,242,3 99,315,5 373,976,1 248,683,4 144,345,5 378,704,4 183,69,4 93,656,5 177,519,5 302,1087,2 20,773,2 327,1041,3 275,275,4 76,49,4 236,182,5 4,388,1 233,53,2 21,501,4 225,282,2 198,677,1 147,356,5 325,611,2 58,561,4 93,205,4 108,519,5 328,275,4 320,49,4 173,654,5 143,72,3 55,185,3 86,567,5 268,386,3 200,339,5 164,418,4 190,269,3 189,627,4 4,410,1 168,307,3 302,287,4 68,150,5 144,1039,1 278,489,3 0,107,5 252,172,5 76,264,3 282,215,4 223,1084,1 335,116,3 377,1283,2 173,117,2 295,97,5 129,209,5 0,261,3 88,948,3 57,203,4 262,1472,5 91,272,4 66,742,4 310,626,4 233,726,3 350,1315,4 89,432,3 193,1219,3 221,818,2 310,509,4 58,1064,5 194,420,4 144,43,5 37,1013,5 185,70,5 81,108,1 263,435,3 360,948,4 199,172,5 278,404,3 6,504,3 120,234,1 300,379,4 379,175,3 331,6,4 315,70,1 6,265,4 292,1,3 17,791,5 165,257,4 372,848,3 176,194,4 253,14,3 327,99,5 343,110,4 222,117,2 188,606,4 182,249,2 275,1109,3 193,508,3 266,1072,5 312,181,4 9,497,5 275,68,4 59,728,4 263,202,2 115,530,2 61,120,4 344,312,4 125,321,3 89,422,5 226,116,2 270,712,4 24,268,4 360,1040,2 84,970,3 345,779,2 337,55,3 362,432,4 216,1302,2 158,244,5 140,747,3 48,121,2 364,812,5 372,24,4 300,66,2 148,311,1 20,772,3 356,741,4 372,80,2 81,280,3 89,95,4 326,197,4 267,1156,1 373,172,3 82,782,4 317,654,4 215,692,3 380,581,5 332,315,5 20,324,4 278,194,4 278,23,5 348,369,2 126,747,5 55,94,4 37,70,5 275,878,3 373,475,2 247,197,5 320,7,4 343,267,3 150,601,4 193,283,3 374,582,2 379,196,3 380,49,5 102,526,5 267,152,5 231,650,3 87,880,5 10,267,5 248,67,5 66,275,4 176,275,5 233,95,2 134,32,3 344,844,3 331,180,5 345,560,3 10,738,3 238,653,5 275,431,5 213,293,3 329,602,5 362,181,1 336,630,4 243,410,4 181,171,5 42,392,4 63,446,4 115,420,3 256,287,3 129,3,2 53,236,4 311,602,5 197,628,4 278,731,3 319,430,5 0,11,5 58,567,5 256,220,3 328,854,4 57,1103,2 42,485,4 338,41,4 343,44,5 91,470,4 199,312,5 346,878,3 17,731,3 91,595,2 357,1528,3 86,230,3 12,117,4 304,153,4 211,190,3 40,745,3 191,300,4 343,420,2 359,510,5 364,123,4 9,163,4 233,846,4 270,581,3 108,95,5 75,149,5 58,1112,4 380,211,5 4,108,5 278,385,3 63,897,2 40,434,3 307,842,3 84,660,4 338,515,4 94,509,4 37,403,5 346,734,2 124,269,4 344,294,4 81,1100,4 10,721,3 238,78,3 2,317,4 55,178,3 58,194,5 118,929,3 294,1134,4 346,322,1 290,45,4 346,215,3 336,256,3 86,678,3 380,280,2 214,450,3 176,947,2 291,225,4 313,1053,1 255,293,3 89,85,5 266,621,3 314,210,4 61,208,4 124,484,5 344,316,4 311,95,5 200,67,2 252,174,2 151,203,4 270,223,4 312,1065,2 98,362,4 325,143,5 25,342,3 269,552,1 205,872,3 378,201,5 69,526,4 180,977,1 93,155,5 85,1174,5 67,287,4 59,70,3 150,392,2 21,434,5 197,171,4 95,473,4 304,238,3 300,181,5 31,865,3 278,113,5 9,190,5 278,206,5 320,142,3 9,495,5 129,133,5 25,870,2 12,538,1 48,593,3 55,691,4 317,126,5 158,880,1 129,939,3 347,146,5 242,581,5 14,17,1 84,513,5 134,378,2 285,511,2 275,1273,1 101,238,3 292,870,1 42,160,4 289,621,3 175,99,5 225,13,5 360,237,4 37,382,2 328,923,3 377,64,3 25,314,3 4,229,3 268,141,1 42,274,4 296,6,4 150,734,5 123,78,3 22,404,4 372,1134,3 326,1140,3 187,97,5 68,590,3 344,281,3 193,416,2 310,186,4 203,285,3 214,7,2 17,969,3 84,282,3 95,172,3 276,301,4 326,24,2 300,7,4 32,894,3 117,22,5 279,541,3 279,1478,3 200,332,2 6,569,3 302,163,4 68,747,2 72,506,3 295,9,2 208,320,4 93,419,4 178,309,4 187,37,3 285,1112,3 353,207,4 118,384,5 187,76,4 183,123,5 267,780,3 124,27,4 59,488,5 219,342,3 176,195,3 128,306,2 338,549,2 377,288,5 278,172,5 313,104,4 294,67,4 144,104,2 245,540,3 233,116,2 57,181,4 107,236,3 15,163,5 138,302,5 315,264,3 336,229,5 115,331,3 91,375,3 151,240,4 167,747,2 254,840,1 264,180,2 235,221,4 325,658,4 216,257,1 311,208,3 0,13,5 286,590,5 293,688,3 313,254,5 188,1402,4 106,299,1 266,249,5 343,189,5 150,64,4 342,366,4 9,384,4 69,382,2 108,130,1 372,398,3 377,1477,3 124,63,5 377,273,3 40,97,4 53,146,5 212,513,5 253,842,2 333,244,2 223,517,1 30,492,5 9,518,5 124,24,1 91,87,3 193,25,3 12,264,4 278,577,4 300,215,4 1,302,4 325,450,2 187,204,3 337,510,4 327,481,3 268,505,5 124,406,2 84,274,3 91,180,4 14,285,2 373,161,2 147,168,5 53,256,4 275,1227,1 267,678,4 384,1366,5 55,180,5 270,14,3 307,183,4 63,6,4 329,37,4 262,885,2 243,179,4 0,96,3 155,210,4 61,154,1 270,169,5 249,479,5 294,418,4 313,868,4 345,71,3 4,387,2 346,86,3 165,342,4 193,365,2 89,196,5 192,173,4 53,126,4 200,264,3 127,55,3 341,973,2 11,275,4 275,120,4 48,150,5 376,353,4 58,734,5 218,302,4 275,237,5 58,124,3 384,208,4 372,289,5 312,72,5 307,204,3 181,863,4 0,43,5 235,419,4 320,473,4 379,529,5 287,886,5 231,55,5 89,202,5 7,171,5 307,1005,4 302,209,4 138,457,4 144,1089,2 89,961,2 42,320,3 65,470,5 276,285,5 292,81,4 200,461,1 266,448,3 266,174,5 278,100,3 362,335,4 339,417,5 58,447,4 63,215,4 354,881,4 207,429,4 55,95,5 83,236,4 372,229,4 73,99,4 290,1243,4 127,293,4 185,553,1 329,212,5 292,207,3 91,31,3 17,56,4 118,450,5 98,236,5 167,257,4 42,49,4 118,273,4 150,836,4 58,135,3 229,152,5 22,503,4 130,13,5 94,116,4 84,7,4 378,636,2 24,134,3 0,52,3 313,1220,3 180,739,2 252,526,5 171,477,3 248,99,5 343,86,4 307,182,4 329,1015,3 129,352,1 231,312,3 377,195,4 48,51,2 312,447,3 41,264,3 312,99,5 93,805,4 6,566,1 96,167,4 234,291,3 57,239,4 324,324,1 83,63,5 59,185,4 291,474,5 93,257,5 315,18,5 93,482,5 42,0,5 217,653,4 101,745,2 366,759,4 223,386,4 6,636,4 356,293,4 295,276,5 291,510,5 78,369,2 183,692,3 381,126,3 69,227,5 37,217,3 196,320,3 6,623,4 372,150,4 377,214,4 306,449,2 349,529,4 270,51,4 12,853,1 187,418,5 177,21,5 103,24,3 319,277,3 344,450,5 63,581,4 166,1308,1 231,180,4 88,736,1 327,233,4 294,736,5 232,653,4 234,345,4 213,220,5 373,110,2 278,1132,2 377,541,4 248,197,5 20,819,3 124,1182,2 5,124,3 136,182,5 193,184,4 331,1217,5 346,84,5 0,162,4 285,49,4 180,148,1 20,843,4 298,317,4 17,194,3 231,193,4 278,555,3 56,974,3 124,939,2 193,526,4 162,63,4 256,236,2 21,120,3 63,228,4 172,259,4 264,14,3 209,175,4 290,173,5 280,537,4 78,300,3 243,190,5 247,209,3 341,174,5 235,110,4 296,1295,4 158,677,5 254,564,1 285,308,5 317,87,4 76,173,5 108,251,5 243,1044,5 263,3,4 127,189,4 157,162,4 82,608,4 22,379,5 213,312,4 109,782,3 307,218,3 158,285,1 112,326,5 177,82,4 253,213,1 29,1006,5 108,321,2 72,922,3 290,566,5 59,479,4 295,237,4 355,688,5 361,257,4 15,68,5 317,500,4 226,285,3 270,53,3 183,552,3 255,1206,3 209,72,5 320,214,3 220,622,3 310,135,5 129,253,2 292,1146,4 114,465,5 206,186,5 345,931,2 167,122,3 326,237,4 30,320,4 181,762,3 223,723,3 351,215,4 104,271,4 89,176,5 310,160,4 212,734,5 59,659,4 7,510,5 125,288,3 246,49,5 216,173,3 33,298,5 108,222,4 58,658,3 176,402,5 310,417,4 180,255,1 98,368,4 2,299,2 136,234,5 19,0,3 286,110,3 223,750,3 297,650,5 286,239,2 55,72,4 344,865,3 109,231,3 82,719,4 386,1165,3 42,46,1 362,182,4 5,483,5 346,287,5 223,147,3 298,958,2 279,498,4 255,727,4 377,190,5 261,21,4 89,706,5 362,801,2 336,1132,4 6,81,3 27,227,5 150,1196,5 63,37,3 24,150,4 261,97,4 180,124,3 96,96,5 384,605,4 61,234,4 270,191,5 19,68,1 80,273,3 270,327,2 260,1024,5 362,133,2 69,210,3 200,405,1 91,188,4 91,190,4 76,209,3 94,203,5 371,634,5 238,496,4 88,715,3 12,513,5 373,124,5 331,96,5 338,477,5 248,120,3 151,161,5 221,6,5 359,13,5 373,1027,1 84,1152,4 144,927,3 105,85,3 297,992,4 150,488,5 101,357,3 67,49,5 373,291,4 55,745,4 150,230,1 8,5,5 112,975,5 294,214,5 346,299,5 12,751,1 302,581,4 193,57,4 167,24,5 310,187,4 329,274,5 359,133,5 10,743,4 157,824,4 214,209,4 310,401,4 141,88,3 329,254,4 279,419,3 386,55,5 259,325,5 89,610,5 262,317,5 95,477,2 302,475,3 57,192,3 0,209,4 180,367,1 58,691,3 57,1068,2 177,1282,3 326,126,4 252,281,4 200,672,3 200,1426,2 144,923,2 231,95,5 76,68,3 245,468,3 341,190,5 17,184,3 173,28,2 342,950,1 317,11,4 180,1384,1 258,179,5 327,482,5 218,545,4 53,272,4 12,856,3 295,484,5 177,88,4 363,689,4 111,677,3 291,483,5 9,155,4 264,741,5 353,507,3 307,821,4 372,240,5 275,940,3 327,147,3 5,457,1 177,285,3 81,309,4 302,182,5 338,230,2 358,750,4 199,173,5 335,207,2 377,150,3 212,683,4 380,646,4 353,267,4 35,881,5 269,217,5 61,117,2 197,183,3 362,236,2 93,221,3 150,735,4 5,198,4 377,792,3 331,234,3 293,825,1 362,269,2 149,49,5 294,497,5 365,447,5 35,681,1 310,514,4 347,974,4 117,435,5 91,189,4 279,448,3 290,71,4 12,614,4 309,256,5 4,226,4 357,642,3 302,66,5 344,220,5 275,365,3 384,220,5 173,65,5 200,192,3 55,50,3 71,264,4 341,55,5 275,839,3 360,497,4 248,171,3 292,157,2 84,299,3 20,120,1 329,484,5 327,230,2 91,128,4 368,947,2 298,1038,4 55,390,3 321,317,4 372,419,4 315,288,2 176,46,3 275,750,4 47,265,3 384,523,5 108,471,2 68,332,3 223,222,3 48,100,3 327,217,4 55,382,2 263,152,5 9,702,5 295,845,2 214,237,2 91,30,4 269,12,4 304,556,4 58,168,4 364,286,4 386,217,3 74,136,4 177,366,4 91,10,4 207,380,3 22,228,3 57,274,5 14,147,3 17,185,4 0,183,4 86,95,5 118,741,5 12,719,4 93,1043,4 294,225,4 168,259,1 386,179,4 338,672,5 325,565,4 356,6,3 114,510,5 150,502,3 322,331,3 109,778,3 290,27,4 359,194,3 177,98,4 157,175,4 200,595,4 21,175,5 194,830,2 5,182,4 285,737,5 183,230,3 198,987,1 262,293,2 292,424,4 12,886,5 42,595,3 0,156,4 180,9,2 365,671,5 12,682,1 229,606,3 265,271,4 322,244,2 193,214,3 167,471,3 345,545,4 98,675,4 20,757,1 302,185,4 309,13,5 267,138,2 269,294,5 262,133,5 387,312,5 206,1225,2 266,551,3 89,99,5 10,8,5 353,174,5 42,48,4 91,580,4 320,134,4 264,124,4 79,268,3 59,492,5 78,5,4 71,508,4 372,426,4 61,231,3 326,232,3 245,1100,5 235,595,4 268,138,1 177,203,4 386,513,3 221,779,3 36,23,4 48,142,3 234,134,4 384,3,2 88,693,5 267,208,4 12,268,2 58,201,4 377,271,4 224,142,2 385,454,3 118,208,4 271,21,5 285,353,4 189,268,4 373,121,2 177,464,3 37,93,5 373,580,4 177,243,1 28,1017,4 358,830,3 91,97,5 243,167,5 75,63,5 343,1006,4 386,198,4 192,32,3 291,627,3 279,575,3 237,1189,3 289,163,4 379,131,4 82,767,4 247,514,5 177,182,4 177,341,4 372,88,5 121,190,5 173,861,1 296,272,4 377,1179,3 298,1019,4 183,510,4 300,30,3 177,1168,4 275,1043,3 320,275,3 194,1088,4 0,200,3 238,1191,1 148,307,2 233,1148,3 330,652,3 343,243,3 133,338,2 127,321,2 29,254,4 177,565,4 82,931,4 199,684,4 309,273,3 388,954,4 64,660,4 329,207,5 362,162,3 207,210,5 197,747,2 178,271,5 272,689,4 84,491,4 120,125,3 13,812,2 263,671,3 339,264,5 279,689,2 313,1219,5 66,826,3 193,470,3 17,462,4 307,44,4 88,92,2 266,574,3 221,124,5 267,977,2 209,24,4 301,306,4 206,539,3 14,863,4 124,115,4 61,224,3 259,681,4 298,99,3 350,312,5 355,271,5 296,142,5 287,299,5 248,173,4 384,366,4 372,624,4 386,413,4 380,190,5 308,323,3 44,14,4 55,55,5 54,253,2 317,104,1 94,553,3 40,68,4 378,519,5 183,765,3 200,543,2 242,386,4 292,400,1 5,498,4 320,123,3 171,182,5 75,1157,4 196,689,3 15,417,5 298,215,5 98,1066,4 339,185,4 63,1138,1 180,833,3 245,0,4 79,193,3 127,237,4 200,683,3 370,23,4 94,402,1 173,1052,5 84,448,4 386,691,1 233,209,3 20,859,2 71,503,4 12,123,5 2,344,3 343,287,4 253,966,3 325,398,4 245,587,4 271,497,4 234,192,5 236,82,4 180,1016,1 280,876,4 98,99,5 140,224,3 295,171,5 88,120,5 258,312,5 5,196,5 127,150,3 346,587,3 6,176,4 333,169,3 233,927,2 101,442,3 6,470,4 140,404,3 30,513,5 270,257,3 253,609,2 235,236,4 51,286,5 213,49,3 314,503,3 60,1126,4 180,1056,2 311,918,3 86,38,3 62,1007,3 84,107,2 255,180,4 278,570,4 25,116,3 289,431,5 235,1012,2 386,519,4 118,108,5 345,808,3 338,522,5 221,528,2 229,7,5 328,337,2 183,646,5 291,854,5 59,206,3 245,894,5 230,251,4 143,761,3 253,754,3 4,396,2 302,472,4 150,202,3 167,116,5 322,150,4 89,994,4 353,656,4 371,199,5 338,66,3 283,327,4 348,846,4 176,507,4 288,23,4 261,194,2 317,418,5 0,149,5 379,176,3 22,548,3 64,172,3 55,434,3 209,402,4 146,339,4 338,27,4 11,752,5 1,307,3 263,282,5 344,314,5 192,110,1 300,6,4 372,67,5 93,37,2 356,832,4 25,236,3 315,196,4 292,163,4 216,225,1 144,412,3 370,176,4 285,476,3 73,149,3 177,194,4 320,526,3 336,741,5 89,189,5 55,188,4 324,402,2 335,844,1 12,801,2 63,201,4 180,1086,1 295,280,2 386,57,4 292,719,1 182,1216,3 203,1193,4 328,299,4 123,225,4 42,515,5 180,845,3 307,754,3 203,481,4 195,110,4 199,742,3 93,941,4 382,318,2 48,1077,1 267,356,4 388,175,4 342,117,2 232,204,4 327,692,2 16,244,2 177,7,4 366,16,5 6,449,4 117,852,5 27,221,5 223,703,3 289,754,4 311,130,5 388,108,3 81,528,4 300,685,4 157,148,3 269,746,5 261,236,3 189,221,4 270,509,4 206,327,2 86,228,4 292,844,2 93,0,4 10,184,4 372,595,3 333,1009,5 384,78,3 296,256,3 329,96,5 377,161,4 221,367,1 325,209,3 48,405,2 233,86,3 232,567,5 214,214,3 108,392,4 78,324,5 270,865,4 96,465,3 168,132,4 343,147,2 252,741,4 14,243,2 329,57,5 266,176,5 341,256,2 113,495,4 86,317,4 24,188,5 112,321,3 4,443,2 93,587,4 231,1127,2 94,110,4 64,525,4 157,61,5 200,324,5 89,1196,4 23,177,5 310,964,3 279,558,3 72,99,4 109,383,2 177,256,5 200,526,3 209,483,4 73,136,3 270,1100,4 379,1448,4 359,302,3 322,281,3 380,494,4 214,221,4 327,660,5 144,591,3 292,75,3 261,624,3 212,677,4 266,23,5 158,747,3 129,239,4 317,159,3 362,425,2 101,796,2 222,224,3 338,153,4 331,977,4 157,228,3 17,214,3 133,258,2 12,889,1 235,147,4 245,718,4 10,651,4 106,339,5 311,481,5 223,845,4 105,284,4 312,577,3 93,536,4 128,268,4 313,405,3 335,238,3 191,251,1 327,182,5 223,286,3 124,72,5 177,499,4 70,922,5 129,1273,2 185,256,4 292,444,4 311,268,5 267,997,1 167,256,5 202,325,4 346,327,4 154,285,4 42,945,4 373,453,4 163,925,2 290,178,5 319,1010,3 312,404,3 81,413,4 180,818,3 200,1038,3 129,540,3 236,478,5 8,478,4 294,1114,5 278,431,3 255,865,4 304,177,4 377,874,3 268,984,3 351,3,3 49,285,2 310,844,4 59,97,4 83,6,4 307,513,4 94,654,4 164,68,3 173,287,3 342,482,5 295,150,2 275,140,4 300,426,4 180,927,3 292,182,4 4,401,1 377,3,3 278,1230,4 243,952,4 333,315,4 278,0,3 243,163,3 293,247,5 113,181,3 177,270,4 210,285,4 386,941,4 277,21,5 109,805,3 35,884,5 134,442,4 384,261,4 90,180,5 356,290,4 253,503,3 122,254,1 326,293,3 319,715,1 307,581,3 263,380,4 341,573,1 360,285,5 267,6,4 76,404,3 386,31,5 0,182,5 48,230,3 379,586,4 247,185,5 143,453,3 220,127,3 21,221,4 200,76,2 334,244,4 389,989,4 135,18,4 275,681,3 137,149,3 79,422,3 307,432,5 217,11,5 381,545,2 268,69,1 325,451,3 287,176,3 127,47,4 84,44,3 232,57,3 13,171,5 385,120,3 12,152,4 378,731,5 344,902,3 12,342,1 321,193,5 242,126,4 275,576,2 313,398,3 108,865,4 247,152,3 199,472,4 108,90,4 48,115,4 22,1004,3 197,240,3 150,321,2 300,771,3 296,101,1 143,479,4 151,190,5 185,43,5 118,146,4 195,579,2 108,422,4 269,550,4 343,4,3 245,160,3 132,321,2 326,475,2 269,287,5 242,1367,2 7,684,4 243,257,5 353,899,4 266,770,3 42,530,4 140,470,4 175,12,4 268,855,5 98,321,3 349,152,3 134,602,4 120,97,5 296,172,4 84,654,3 55,760,3 373,426,3 213,38,4 20,564,3 339,525,5 302,375,2 276,404,3 278,208,5 127,64,4 192,361,3 324,94,2 324,163,1 388,1118,3 29,285,5 279,567,2 345,133,5 58,610,3 63,216,2 8,339,4 223,892,3 238,123,5 362,211,1 228,348,4 95,482,5 71,355,4 275,668,1 317,574,2 209,237,3 177,454,3 221,1437,4 337,407,5 12,300,1 231,203,4 40,99,4 200,577,2 330,867,4 320,204,5 335,1046,4 384,434,3 144,4,3 268,194,3 166,135,4 12,596,3 41,938,4 346,1282,1 372,113,5 302,390,1 129,280,4 64,327,4 310,131,4 5,194,4 386,199,5 270,401,4 189,290,3 14,273,4 285,228,1 346,471,5 321,215,3 274,299,4 346,279,4 330,63,4 295,13,4 129,281,5 93,945,3 376,747,4 5,487,5 10,349,4 337,483,5 6,480,5 178,749,1 313,281,5 381,530,4 25,596,2 233,135,4 289,192,4 279,94,5 377,140,3 290,495,5 180,265,1 278,264,5 209,49,5 248,160,3 292,478,4 188,617,2 94,711,2 302,124,2 42,251,4 302,218,5 377,385,3 313,273,3 150,82,5 87,300,4 324,184,5 233,400,2 292,463,3 333,310,4 267,243,4 107,20,3 373,199,5 173,247,5 180,681,4 261,948,4 7,143,5 326,582,2 4,99,5 69,297,5 180,1067,1 6,384,5 278,819,4 349,339,4 301,878,2 392,1218,4 373,734,5 22,426,5 386,446,4 233,31,3 285,1181,2 180,928,1 12,153,5 86,238,4 56,843,2 268,435,3 250,611,5 118,173,4 348,743,2 127,567,4 21,549,5 22,738,2 15,653,5 360,708,5 388,248,3 188,224,4 134,184,4 150,410,4 24,519,3 388,612,5 335,998,2 235,545,4 263,215,5 6,666,5 349,656,5 159,483,5 379,185,3 373,929,2 302,544,2 6,209,4 356,234,4 37,0,5 310,945,4 386,558,3 278,809,2 256,99,5 81,471,3 294,161,4 221,143,5 59,217,4 311,484,4 156,136,5 302,23,3 326,152,4 6,497,5 9,98,5 298,16,1 270,293,2 69,1064,4 331,290,4 321,91,4 327,627,3 43,147,4 187,325,3 346,172,2 306,182,3 367,395,2 372,842,3 311,482,5 275,448,2 19,741,4 144,683,5 309,739,4 158,102,1 91,567,3 379,240,2 310,1092,5 12,600,4 59,419,4 10,99,4 372,93,2 4,142,3 313,723,2 388,159,4 9,193,4 268,97,4 312,656,4 166,132,5 202,270,3 49,8,4 37,256,1 372,356,4 285,951,2 94,659,5 379,312,4 256,530,5 378,615,2 0,247,4 53,596,2 266,21,4 130,18,4 326,202,3 179,155,5 59,162,4 366,773,4 212,96,5 372,519,4 151,698,5 210,875,2 298,461,5 360,401,3 199,230,4 192,1,3 340,947,3 173,27,5 377,72,3 312,96,4 37,144,1 270,317,5 279,61,3 270,43,4 371,182,5 108,355,4 391,177,5 117,183,5 0,207,5 341,239,3 319,973,3 9,587,4 6,668,1 249,91,5 177,507,3 375,236,3 194,66,2 188,274,5 267,90,3 121,174,5 0,127,4 187,78,5 275,417,4 261,928,3 43,236,3 300,518,4 180,989,1 27,293,3 275,339,5 185,116,5 12,807,2 310,760,3 363,1047,5 296,1216,1 341,94,4 356,257,4 177,595,3 268,530,5 86,6,4 127,0,4 365,446,5 392,1033,2 186,746,4 6,231,3 12,704,5 63,150,3 238,515,5 6,657,3 235,152,2 58,973,3 378,553,4 359,236,4 285,1139,3 292,154,2 101,662,3 76,522,5 379,171,3 306,401,2 193,160,4 188,731,2 292,641,3 342,302,4 5,465,4 95,0,5 205,888,2 223,276,3 294,163,5 144,272,5 2,298,3 270,516,3 327,725,4 121,186,4 367,550,4 388,203,4 291,287,3 295,749,5 295,483,4 150,171,5 229,738,5 377,196,3 57,653,5 248,136,4 373,8,1 124,235,1 267,761,2 319,1089,3 275,435,4 91,213,4 157,49,4 61,696,4 197,317,4 294,449,4 21,947,1 333,960,4 50,63,4 373,1010,4 351,95,4 221,782,2 112,318,2 114,641,5 74,951,5 313,596,4 4,369,1 119,923,4 6,182,4 295,297,1 372,277,5 177,116,4 50,678,3 286,460,5 221,279,3 301,269,2 296,704,2 347,293,4 193,743,3 53,747,5 342,227,5 179,402,3 373,947,2 94,384,4 299,260,3 282,708,5 290,155,5 180,936,3 338,10,4 93,67,4 108,391,3 200,98,3 323,1093,5 285,1193,4 41,417,5 285,595,3 104,306,2 41,366,2 58,130,4 311,488,5 326,709,4 136,471,4 322,466,5 331,448,4 98,330,3 278,89,3 278,227,4 345,414,2 112,507,4 196,88,5 198,293,1 163,330,5 286,300,3 249,70,5 93,575,2 151,844,3 65,596,3 336,370,4 98,311,2 74,596,3 392,470,4 197,192,4 369,41,3 82,406,1 343,683,3 221,356,4 59,81,3 302,293,4 51,530,5 87,314,4 192,257,3 177,97,5 199,207,5 386,1006,5 150,731,4 275,449,1 182,87,3 386,223,5 8,526,3 198,110,3 326,1055,2 59,604,3 324,613,4 6,100,5 362,81,3 304,481,2 304,187,2 1,306,3 262,142,5 124,135,5 341,57,5 289,497,4 302,14,3 298,935,4 235,182,2 333,1523,4 315,63,4 225,8,5 194,778,2 248,97,5 285,1410,2 307,1046,3 221,246,1 233,11,1 268,400,3 134,503,4 292,473,5 304,510,4 300,214,5 298,66,2 233,169,5 199,372,4 59,60,4 223,391,4 6,503,5 207,427,4 159,31,5 101,564,2 293,741,4 4,175,3 41,1043,4 377,154,4 321,520,5 338,513,3 55,230,3 116,927,3 302,231,4 294,601,5 320,854,3 294,97,5 69,587,5 6,135,5 362,554,1 112,241,2 359,734,5 302,275,4 101,46,2 313,154,5 63,160,3 275,648,4 388,27,4 145,346,3 205,1429,1 332,78,3 294,156,5 117,654,5 275,3,4 159,108,2 100,1056,2 249,968,5 291,1009,4 227,312,5 11,683,5 194,324,2 392,14,3 181,422,5 345,127,2 55,737,3 338,478,5 193,450,2 373,181,5 262,124,4 15,159,4 75,196,5 333,1132,4 51,14,5 116,1015,5 58,472,3 90,617,3 233,434,3 127,57,3 278,49,3 140,545,4 374,355,4 99,1235,3 221,7,1 343,194,5 333,199,4 313,1502,3 379,517,3 199,583,4 310,490,4 218,131,5 362,1,4 341,188,5 209,734,4 91,158,4 327,290,4 177,24,3 103,411,3 373,879,5 282,658,5 101,830,2 58,608,2 116,741,4 96,431,4 185,326,3 333,607,4 129,283,2 12,99,5 143,1285,4 101,97,4 5,192,3 180,1360,1 162,97,4 206,831,3 292,70,4 115,1088,2 269,715,4 249,183,1 166,168,1 120,136,5 392,1284,3 12,70,4 273,254,2 144,301,4 392,317,3 302,395,4 302,121,4 58,44,5 294,811,4 163,299,5 42,965,4 377,507,4 90,326,4 134,563,1 212,356,5 181,120,3 228,749,2 252,297,3 63,63,4 42,209,5 43,1057,4 355,747,4 294,1132,4 161,543,4 29,687,3 244,410,3 124,497,5 302,721,2 253,428,4 173,1032,1 98,244,3 110,271,3 255,1,5 20,594,3 331,217,5 129,1207,4 344,1246,2 183,581,4 342,332,3 304,203,2 112,323,2 282,237,5 88,761,3 79,698,3 293,345,3 213,855,4 379,426,4 378,7,5 285,54,4 346,805,3 222,320,1 319,1040,3 381,275,3 278,1229,3 234,522,5 71,422,5 300,469,4 189,404,4 261,761,2 253,34,2 74,1149,4 2,323,2 342,726,4 266,432,5 140,287,3 150,48,3 327,21,5 222,24,1 9,694,3 82,121,1 346,238,5 248,38,4 304,99,3 292,479,5 226,115,4 278,289,4 180,1128,1 392,125,4 69,397,2 341,656,5 249,323,2 197,635,3 339,180,4 258,107,4 291,117,3 386,30,3 84,317,4 342,190,5 234,236,4 295,209,3 312,649,4 215,6,5 138,126,5 296,624,3 304,173,3 42,791,1 311,583,5 287,275,4 109,76,4 129,93,5 47,422,4 261,142,3 199,195,4 15,98,5 74,99,5 58,822,5 268,443,3 94,150,4 379,1443,1 180,1133,2 18,309,4 84,211,2 331,727,4 332,179,1 0,241,5 181,99,3 234,173,4 252,116,5 284,312,5 24,237,4 32,291,4 69,1034,3 327,1014,3 56,1070,3 129,507,4 261,777,4 89,691,4 292,90,2 149,92,4 262,204,5 163,117,5 127,685,4 275,419,4 61,474,4 145,312,4 200,19,2 183,404,2 266,641,4 275,403,4 221,431,3 21,392,4 233,944,3 300,53,3 6,434,5 302,567,4 64,238,5 327,1108,3 291,653,5 393,66,5 290,47,5 392,401,3 200,855,3 364,893,1 278,65,2 279,91,3 388,123,4 362,227,3 312,202,5 95,434,3 384,339,4 20,686,2 304,384,1 307,27,3 42,404,4 335,741,3 209,43,3 63,237,4 224,171,5 291,172,5 56,410,4 220,271,5 168,126,4 384,473,5 195,24,4 310,714,2 243,1106,2 202,0,3 306,745,4 206,519,4 320,513,4 338,174,5 84,210,5 150,199,3 130,285,5 203,1021,5 384,194,1 91,929,2 15,478,5 377,1043,3 9,446,4 4,440,1 150,413,5 221,526,4 59,87,4 9,417,4 359,196,5 315,189,5 377,122,3 59,142,3 393,41,4 392,1205,3 377,178,2 93,281,3 17,659,5 129,832,4 386,422,3 91,237,5 223,281,4 171,581,4 252,805,4 177,745,3 243,293,4 369,115,3 6,635,4 176,1066,4 342,162,5 292,32,2 307,704,5 322,179,5 316,327,4 222,285,1 278,432,4 390,526,3 177,453,4 121,736,4 295,21,4 327,553,3 176,877,1 190,85,5 279,156,3 263,167,5 98,68,4 313,217,4 351,6,3 319,451,3 293,354,4 127,212,3 380,377,4 6,430,4 148,895,4 392,632,2 124,197,3 76,1027,1 386,1018,4 127,424,5 275,726,3 335,450,2 322,288,2 74,124,3 323,753,5 48,289,2 257,750,5 107,514,5 93,920,5 207,523,4 209,242,2 275,231,3 200,442,3 188,293,5 331,248,3 108,290,3 273,477,5 94,63,5 391,332,4 182,719,4 292,618,1 380,770,2 384,660,4 158,1024,2 391,301,5 129,1048,3 384,98,2 0,147,2 371,671,5 58,824,4 221,238,5 366,440,3 41,1039,3 140,150,2 48,1008,3 290,382,2 114,762,2 373,222,5 172,241,5 279,101,5 59,510,4 353,496,4 319,187,4 324,509,4 144,6,5 362,590,4 4,68,1 268,174,5 267,181,4 93,365,3 115,285,3 50,495,4 262,94,5 177,863,2 362,657,3 94,861,1 89,366,4 329,548,5 6,482,4 209,64,4 119,404,4 221,721,3 129,65,5 290,587,4 275,671,3 313,4,4 42,62,3 69,127,4 215,3,5 340,1526,4 386,1096,3 180,885,1 384,1410,3 392,538,3 342,723,4 384,223,2 84,463,5 12,362,3 229,1191,4 314,284,5 268,6,3 233,178,3 148,309,2 291,110,4 293,747,3 185,287,1 325,203,3 118,23,4 162,233,3 298,508,4 116,306,5 49,124,2 279,447,3 341,654,4 93,730,3 143,1015,3 6,460,4 307,69,4 312,842,3 256,115,3 156,0,5 0,111,1 261,146,3 41,735,5 227,649,3 276,14,4 103,281,3 17,236,3 231,190,4 143,95,5 294,996,3 278,651,4 362,113,5 164,180,5 66,471,4 274,94,3 108,93,4 59,672,4 36,160,5 377,158,3 294,6,5 180,261,2 186,85,4 144,38,4 180,975,1 126,448,4 329,569,4 173,844,5 48,712,3 278,104,4 377,446,4 306,80,5 382,504,4 69,47,4 373,423,1 333,1240,2 379,153,3 319,553,4 362,365,2 245,235,4 262,143,4 91,160,2 302,1011,4 274,661,3 12,390,3 42,431,3 343,124,3 218,113,5 290,1187,4 245,931,1 20,117,1 388,110,3 57,282,1 275,301,5 189,747,3 300,150,2 312,830,3 372,178,3 327,569,3 6,180,3 103,247,2 209,187,3 129,668,4 202,618,3 93,389,5 86,1186,2 171,429,3 372,227,4 93,99,5 221,669,3 6,6,5 187,484,3 117,473,5 104,285,4 2,347,4 367,435,3 113,653,3 234,212,4 376,287,5 295,503,5 384,128,3 2,350,3 256,461,4 397,513,4 108,247,2 118,249,2 328,302,4 345,30,4 287,49,4 55,814,4 17,954,4 193,174,3 203,296,5 10,290,4 101,297,3 270,57,3 53,257,4 186,174,2 307,632,4 289,117,4 221,767,2 292,1056,2 42,16,3 173,267,5 292,180,3 59,20,3 93,81,4 307,11,5 29,27,4 327,915,2 388,428,4 159,117,3 306,100,3 62,1010,1 94,727,3 292,68,3 91,214,4 40,422,2 292,503,4 378,451,3 167,287,1 200,846,2 293,78,4 185,224,4 253,198,4 377,147,4 300,581,2 311,495,5 126,287,5 10,215,3 257,314,3 310,591,5 91,248,3 160,214,2 180,831,1 17,187,3 42,97,5 311,966,3 327,549,3 279,49,3 53,259,4 324,304,2 267,369,2 388,844,4 140,863,3 300,65,4 180,242,1 386,772,4 392,931,3 37,210,1 291,78,5 233,90,5 317,136,4 79,207,5 118,543,2 307,4,4 359,587,3 150,78,4 386,197,4 270,424,2 359,301,4 377,545,2 144,695,3 356,124,5 392,939,2 84,88,4 389,988,5 108,1013,4 0,192,4 127,117,5 144,894,3 379,855,3 297,68,4 386,1114,3 388,606,3 205,749,3 268,92,3 14,8,4 263,381,4 134,182,4 89,78,4 341,107,4 312,153,2 339,204,4 200,35,1 302,738,5 6,483,5 353,1006,4 84,701,2 384,152,4 144,712,4 326,195,4 245,144,1 24,49,5 367,218,2 86,86,4 6,549,4 221,195,5 275,1239,4 194,45,3 28,677,3 312,155,3 338,297,2 392,127,3 12,905,3 278,119,1 150,182,3 285,167,4 359,27,4 274,495,3 331,870,3 311,442,4 75,324,2 245,546,4 396,6,5 146,750,2 321,750,2 7,567,4 273,242,2 338,187,4 386,200,5 41,182,4 205,677,1 275,402,4 174,182,4 200,186,3 292,138,3 22,746,3 17,46,3 306,152,5 379,61,1 193,285,1 58,716,2 310,192,5 94,403,5 287,68,5 49,122,4 116,587,3 314,126,5 258,356,5 166,732,2 78,6,5 98,264,3 173,949,3 255,551,3 17,427,3 302,734,4 65,279,4 345,741,4 53,826,3 43,251,2 183,68,3 164,650,5 210,356,2 335,1056,4 122,734,2 273,207,4 384,422,2 12,229,3 325,240,3 350,983,5 324,85,3 249,194,2 187,55,4 63,878,3 144,347,4 125,301,4 189,362,2 260,358,5 285,28,2 283,886,4 345,293,3 82,62,4 17,691,3 302,741,4 305,12,4 372,746,4 361,311,5 359,282,4 393,1209,3 173,1027,4 279,4,4 59,522,4 278,997,5 322,771,3 381,24,2 398,812,3 101,207,4 285,992,2 180,250,1 58,596,2 322,285,3 389,123,4 292,582,3 200,1152,2 91,960,4 108,1038,2 120,427,5 294,81,4 72,179,4 100,120,4 290,65,4 292,208,3 346,818,1 71,202,3 381,170,3 179,27,3 397,226,2 330,704,2 338,631,4 397,180,4 245,28,1 172,1264,3 302,183,5 198,116,3 373,930,3 338,630,5 243,203,4 44,99,5 314,602,5 232,81,4 263,182,5 193,513,3 302,236,5 315,356,4 90,229,4 362,416,1 116,108,4 288,1015,5 278,62,3 12,664,2 59,131,4 275,594,2 12,344,4 360,182,4 332,87,5 196,61,2 174,668,1 338,1300,3 335,281,3 20,874,4 308,937,4 274,116,3 242,285,4 143,192,4 114,31,5 249,222,4 185,987,4 346,828,4 377,548,3 307,24,4 264,476,3 330,57,3 129,741,5 298,332,4 53,740,5 20,440,3 129,38,4 314,99,5 242,267,4 392,680,3 275,1088,2 335,654,3 237,219,3 295,922,5 83,147,4 17,967,3 177,876,2 157,285,4 292,484,3 386,207,3 23,293,3 47,510,5 180,1373,1 86,24,4 381,473,5 140,280,4 177,186,4 387,55,3 89,13,5 86,704,4 171,1133,2 12,260,1 206,567,4 269,1147,5 251,274,5 370,236,5 317,157,5 148,300,3 147,508,5 223,1043,3 325,199,2 86,63,5 395,840,4 388,714,3 47,305,4 200,635,2 386,182,4 324,192,4 89,902,4 155,123,3 273,475,4 325,562,3 21,109,1 151,66,5 284,901,4 17,192,5 188,14,2 143,180,4 279,180,3 13,602,4 326,658,4 261,446,3 221,948,3 197,409,1 290,374,1 124,62,3 338,738,3 248,244,2 363,320,2 311,658,5 325,232,4 261,780,3 6,153,5 276,120,2 85,880,2 12,815,1 291,82,5 230,845,4 298,997,2 359,78,4 87,260,5 181,470,4 185,30,4 342,683,3 180,1387,1 307,312,3 230,150,1 346,64,2 63,8,4 380,128,4 275,306,4 180,1283,1 213,256,3 310,238,3 108,558,3 82,451,3 371,147,5 368,270,5 269,713,4 93,684,4 397,503,3 310,0,4 84,233,4 333,1410,1 93,169,5 191,339,4 55,432,4 312,514,5 71,126,5 400,485,4 254,258,3 93,649,5 395,290,4 165,299,5 397,68,5 23,285,5 334,259,3 233,1049,3 320,212,4 384,766,1 371,1082,3 74,1000,1 274,221,4 71,176,4 270,355,4 187,716,4 227,86,1 216,839,1 124,999,3 93,457,4 325,140,3 61,507,4 372,203,5 367,287,3 312,66,1 180,24,5 356,117,5 123,95,4 275,202,4 221,745,5 35,268,3 337,944,4 322,299,2 282,209,5 7,55,5 304,529,5 139,257,3 10,662,4 254,832,4 62,677,2 193,43,4 388,655,5 90,321,4 274,101,3 351,172,1 370,178,3 329,71,5 197,691,2 364,6,2 0,263,2 63,1140,5 86,62,4 63,16,3 178,302,1 129,495,5 268,275,5 173,20,1 302,117,2 236,422,4 248,107,3 384,1352,4 356,325,5 298,481,4 278,213,3 216,280,2 180,747,1 318,301,4 180,1008,1 312,526,4 247,10,5 108,819,3 268,524,4 381,289,4 377,1027,2 209,184,4 127,722,3 362,471,1 392,1047,3 75,959,3 101,509,4 303,892,3 359,193,3 200,239,3 253,950,4 338,239,4 233,72,2 81,224,3 382,463,4 278,318,4 88,844,2 13,8,4 285,1229,1 180,259,1 208,300,3 329,46,5 263,761,3 266,577,3 91,95,4 302,573,1 86,565,5 392,87,3 193,1057,2 166,125,3 89,1124,4 68,149,5 355,327,4 91,392,3 145,687,1 362,1484,4 268,1070,2 118,198,5 17,168,5 170,1021,3 324,339,3 75,287,2 113,519,3 386,565,3 290,1214,1 395,147,4 147,115,5 10,596,2 372,57,4 368,987,3 291,55,5 127,279,1 196,749,5 215,14,3 143,590,3 373,823,4 293,410,3 388,1529,2 300,160,3 140,475,3 298,221,2 169,880,3 84,326,3 40,517,3 392,682,4 350,537,4 307,176,5 143,470,4 331,927,5 100,108,2 59,491,5 243,1038,4 250,11,4 124,691,3 10,732,4 6,165,3 327,1040,3 233,264,3 180,1048,1 43,4,4 93,683,4 362,61,2 255,845,4 204,268,3 384,303,3 327,695,3 72,88,5 353,179,3 392,251,3 392,762,5 93,1088,2 158,219,5 338,64,4 129,728,4 273,741,4 300,153,4 292,150,4 127,493,4 101,596,3 338,24,4 89,470,4 104,332,3 184,27,5 338,180,4 221,287,4 197,174,3 62,299,4 153,639,5 255,369,3 13,474,3 89,1108,3 20,833,1 392,889,1 37,117,5 377,1036,2 24,7,4 177,738,4 5,482,5 275,167,5 392,650,4 0,218,1 61,317,5 223,1162,2 84,339,3 291,422,5 303,309,3 326,409,2 372,505,4 398,218,3 260,987,3 17,169,5 157,402,4 243,195,5 71,120,3 302,791,5 36,21,5 397,480,3 326,199,4 24,968,3 221,8,5 27,677,2 386,143,3 68,99,5 341,1159,3 140,1012,1 267,473,5 307,692,3 292,91,4 81,656,4 81,457,1 141,287,3 263,843,1 116,97,4 232,275,5 158,470,4 226,1009,3 243,1073,4 233,76,3 24,168,5 295,55,5 106,268,5 12,500,5 94,682,4 215,107,4 278,1031,3 14,306,1 379,1167,3 180,824,1 114,442,4 362,568,2 353,284,5 94,365,4 0,231,3 144,405,3 346,81,5 311,274,5 372,728,4 244,472,2 60,330,2 396,1018,3 6,184,5 115,346,2 121,469,3 360,96,4 378,401,3 243,180,4 317,161,5 289,473,3 199,559,4 44,236,4 224,417,5 59,612,4 54,684,1 270,140,4 386,563,1 258,199,4 118,1201,4 278,1051,4 311,7,5 91,662,4 157,802,3 91,101,2 326,516,2 127,13,5 66,6,5 15,602,5 86,96,5 388,549,3 392,61,4 57,63,5 307,159,4 254,405,1 120,248,1 199,390,4 12,218,1 213,306,3 398,590,3 270,743,4 265,123,4 353,701,3 335,275,4 315,282,5 86,520,3 14,457,5 245,469,4 91,277,3 378,442,4 300,709,3 228,747,3 42,950,3 350,1104,4 202,23,4 144,326,5 267,734,3 5,525,3 293,104,3 281,337,3 378,191,4 13,201,3 93,623,2 23,257,4 390,187,3 250,171,5 266,474,5 45,150,4 291,284,4 275,208,4 221,141,2 26,120,4 401,256,4 300,755,4 166,1309,3 6,561,5 369,656,3 319,158,4 205,293,2 325,66,2 372,209,5 200,56,4 386,843,5 58,489,4 248,478,5 313,1084,1 279,1312,5 353,1100,3 11,27,5 377,221,3 59,179,4 266,942,4 353,60,5 71,479,5 255,76,3 377,232,2 6,190,5 243,885,5 302,299,1 56,150,3 333,215,3 292,442,4 387,815,4 41,408,3 17,392,3 215,92,4 249,947,3 180,877,1 209,256,5 268,474,5 398,283,2 166,72,2 335,48,4 377,464,3 262,214,4 331,203,4 270,410,1 365,184,5 267,168,5 222,21,5 155,179,5 238,7,5 6,682,4 62,951,3 344,442,5 1,256,4 163,618,4 71,99,5 300,272,1 266,203,4 275,314,4 313,762,5 55,194,5 176,335,2 310,182,5 266,478,4 116,142,1 45,180,4 42,704,4 63,366,4 398,1313,3 353,275,3 163,180,5 295,273,4 43,716,3 300,848,4 94,89,2 0,235,4 196,126,5 231,233,3 335,398,3 304,189,3 61,212,4 388,506,5 386,500,4 80,411,1 206,7,3 337,662,5 221,664,1 298,915,3 23,317,5 6,398,4 91,711,3 28,97,4 302,1221,3 379,228,3 263,744,5 334,268,4 243,474,4 347,275,3 216,362,1 6,138,3 342,305,4 383,271,5 377,395,4 136,248,4 91,45,4 115,321,2 100,23,4 320,735,4 76,51,5 261,317,5 311,614,4 392,274,4 267,245,5 362,1098,2 199,1,4 61,844,3 233,299,3 392,1094,2 76,143,3 276,627,4 239,288,4 47,169,4 196,577,3 337,132,4 112,276,3 346,11,3 314,179,4 392,141,4 378,1031,2 135,41,3 32,332,4 81,478,4 186,274,5 110,895,2 386,1011,4 326,513,4 372,486,4 294,240,5 378,400,3 93,409,4 377,1219,3 342,457,5 345,75,4 9,159,4 374,683,4 157,243,4 234,189,4 20,858,2 388,37,2 344,332,3 25,291,3 212,199,5 4,416,3 57,949,1 398,7,3 394,150,3 292,976,2 187,731,3 221,267,4 2,329,2 24,12,4 307,308,1 213,21,3 41,78,5 261,431,3 254,404,4 253,572,2 93,95,3 41,427,3 298,1118,4 108,67,3 300,422,1 56,1010,3 262,176,4 227,203,3 341,203,4 289,449,2 310,484,1 46,301,5 377,283,3 300,163,3 71,580,4 212,474,4 191,1404,5 338,22,5 143,31,4 41,233,4 254,412,2 223,661,5 331,716,3 314,731,3 235,65,2 300,1090,3 373,158,4 124,382,2 20,664,3 261,490,3 327,331,3 378,89,2 6,226,3 338,125,4 196,285,1 333,513,4 378,163,4 108,195,4 319,741,4 347,472,3 151,50,4 327,227,3 5,458,2 17,508,4 373,760,3 311,169,5 307,160,3 1,315,5 3,293,5 59,494,3 367,95,3 211,285,4 47,426,4 89,613,4 270,310,3 91,719,3 268,182,3 253,185,3 91,108,3 28,258,4 278,383,4 364,12,3 307,198,4 94,673,2 24,196,3 268,170,5 268,526,5 253,97,4 252,94,4 319,1080,4 12,715,4 101,166,2 109,27,4 398,1227,3 63,70,3 90,63,4 121,422,4 162,96,4 300,87,4 183,21,3 0,251,2 360,110,3 292,63,5 279,1014,3 377,450,4 392,1468,3 330,304,5 267,324,3 205,259,3 300,179,3 62,411,3 48,656,5 17,628,3 150,659,4 108,182,5 76,777,2 319,89,4 95,524,2 159,122,4 20,716,1 353,69,3 312,422,4 196,257,4 270,738,4 226,149,3 125,242,5 249,476,3 100,755,3 14,475,4 333,6,5 5,237,5 173,1031,3 377,971,4 255,8,4 386,380,4 6,588,5 253,102,2 62,978,3 36,596,5 94,141,4 392,11,5 62,105,2 200,155,4 145,301,4 91,200,3 12,316,5 369,704,3 313,63,5 84,424,4 159,925,2 380,651,5 198,275,4 278,79,4 143,1283,3 380,723,3 179,693,5 298,114,3 372,27,3 39,242,2 5,80,4 6,500,5 398,458,4 94,184,3 392,952,4 377,1133,4 155,345,3 294,52,1 327,735,3 20,984,2 365,859,2 148,688,2 212,627,5 352,285,5 372,385,3 143,215,4 214,69,3 60,242,2 180,1386,1 362,958,1 330,810,4 61,175,5 377,301,5 94,698,2 395,750,3 233,75,2 86,514,4 127,135,5 384,672,2 326,381,3 307,746,3 278,1238,1 319,868,4 402,110,4 404,55,4 306,510,5 353,154,2 100,283,4 140,116,4 388,784,3 193,587,4 279,1458,4 378,390,4 302,1225,4 197,653,5 200,562,1 69,541,2 341,1047,1 388,495,4 146,689,4 238,557,5 341,3,4 379,708,4 330,932,3 183,90,3 61,814,3 87,689,4 361,335,2 250,248,5 324,491,4 314,268,5 302,1020,4 342,918,5 21,647,4 309,250,5 212,280,4 326,238,3 221,868,3 268,4,2 213,237,4 302,54,4 12,616,3 384,1535,5 167,1046,2 19,596,3 296,268,4 392,455,3 45,1023,5 366,664,5 327,545,3 307,497,5 115,259,2 143,92,1 362,346,3 6,206,4 220,117,1 263,209,5 266,87,4 328,271,5 84,1136,4 325,442,5 221,201,4 366,435,4 223,619,3 377,303,4 333,226,1 344,1047,2 380,693,4 331,341,4 398,72,3 395,259,3 255,225,5 215,432,3 356,818,4 116,771,4 344,92,4 17,952,3 140,327,4 157,730,2 327,1262,3 343,11,5 168,603,4 388,791,4 76,88,5 9,175,4 302,843,3 312,227,3 223,654,4 217,788,3 243,116,2 12,545,3 364,274,4 213,11,5 298,751,3 229,185,4 99,341,3 353,381,5 325,444,4 314,47,4 154,323,2 258,97,4 243,216,5 118,104,2 174,482,5 372,116,4 273,276,4 311,179,4 400,116,3 393,567,5 94,621,4 344,1015,3 404,591,1 290,714,5 143,190,4 233,498,4 351,180,4 329,79,2 392,1046,3 15,692,4 367,566,3 310,42,4 47,194,5 243,722,3 43,473,4 144,684,4 313,287,5 267,381,3 307,1210,3 304,133,5 386,577,2 343,432,4 320,496,5 96,428,4 289,684,3 63,662,3 298,284,5 38,899,3 129,321,4 396,107,4 345,684,3 14,470,4 69,88,4 63,155,4 101,49,4 341,583,4 268,641,3 233,167,3 130,301,5 343,8,5 151,1013,2 392,595,4 13,274,4 111,299,4 279,537,5 56,475,3 48,507,3 362,143,4 399,327,3 404,1581,1 338,379,3 48,736,1 326,280,3 302,42,3 82,662,5 302,138,3 69,168,4 80,1027,1 391,257,2 382,473,5 200,588,3 52,249,2 381,182,3 312,500,5 307,78,4 47,432,3 5,207,4 248,633,5 404,170,1 252,447,2 167,322,3 12,435,2 307,588,4 213,194,4 58,117,5 269,216,5 0,199,3 353,961,4 91,558,3 404,579,1 226,6,5 233,492,3 322,327,3 98,925,3 206,143,3 293,349,4 199,1059,3 259,537,1 173,13,5 297,70,5 278,653,5 404,1408,1 291,233,5 12,289,4 296,266,3 391,1142,4 380,215,5 206,58,4 236,198,4 273,82,5 238,1202,5 386,81,4 233,288,4 404,952,3 272,318,4 200,175,4 298,94,3 337,485,3 285,288,5 173,422,2 144,216,3 65,14,3 344,355,3 180,1342,1 386,332,3 174,8,4 295,474,4 61,179,4 342,7,5 12,280,3 261,190,4 233,1456,3 308,302,2 209,210,5 229,442,4 302,530,4 319,122,4 306,1139,2 209,678,3 302,704,5 150,159,4 238,151,3 384,336,4 228,257,2 296,194,1 0,179,3 372,101,5 150,63,5 312,198,4 207,522,4 193,97,4 291,206,5 230,120,4 372,450,5 377,287,3 327,514,5 124,119,1 36,362,3 284,275,4 279,194,3 377,526,4 270,418,3 175,239,4 338,606,5 364,320,5 41,279,4 55,37,2 177,133,3 100,974,2 268,808,1 101,183,2 58,430,4 343,247,4 298,701,4 398,485,3 390,263,1 229,431,4 22,12,4 266,823,4 285,576,2 392,430,2 388,489,3 48,513,4 285,553,4 312,564,1 125,318,2 263,442,5 325,943,2 58,526,5 156,1282,2 344,282,4 194,303,4 25,475,3 42,90,3 150,286,4 221,943,3 320,198,4 397,11,3 380,402,3 275,742,1 307,1403,4 366,99,5 12,446,2 95,1231,5 311,603,5 386,167,5 40,173,4 163,322,4 310,424,2 173,292,5 158,871,1 94,444,4 144,682,3 395,1024,4 10,259,1 75,1006,4 391,491,4 289,226,2 63,211,3 212,192,4 310,231,3 307,805,4 392,811,3 281,306,3 42,152,5 47,131,5 255,50,4 362,587,2 362,155,3 344,283,4 307,462,4 289,738,3 183,136,5 404,993,1 143,409,3 10,751,4 404,386,1 37,81,5 392,348,3 8,506,4 199,767,4 367,16,5 194,770,2 208,49,5 336,134,5 341,31,5 386,447,3 382,320,5 302,244,3 255,973,3 344,1010,3 129,357,4 345,568,3 88,738,2 74,1150,2 315,8,4 250,14,4 249,128,4 285,403,5 338,227,4 134,324,4 292,553,1 314,708,4 193,11,5 48,1075,2 360,13,4 319,53,4 143,299,3 203,44,5 131,274,3 311,156,1 232,312,5 233,69,3 192,1257,3 243,134,4 230,747,4 343,227,4 398,923,5 108,171,5 17,422,5 48,461,2 248,201,4 398,1542,3 176,99,5 58,639,5 326,493,4 229,290,4 393,173,5 58,94,2 15,563,1 207,434,5 386,696,1 187,208,2 398,371,3 302,777,4 245,401,3 302,363,2 383,354,4 292,434,4 152,320,3 378,175,5 91,93,3 262,152,3 367,319,5 347,405,4 93,233,5 82,1027,4 40,288,2 144,726,2 82,105,4 269,274,5 280,937,2 377,275,4 392,277,4 84,212,4 67,281,1 61,248,2 56,1094,2 363,288,3 298,422,3 405,47,5 199,839,4 342,87,4 302,557,4 94,421,2 124,193,5 360,184,5 373,362,3 333,152,4 295,123,5 291,221,3 114,542,2 289,21,5 193,194,3 333,514,4 391,164,5 159,462,4 41,1044,2 149,627,4 324,151,4 105,21,4 29,293,4 256,1461,5 248,234,4 231,201,4 215,1160,4 231,271,4 404,1431,1 233,204,3 400,637,4 377,65,3 333,284,4 319,891,3 322,507,4 295,962,5 302,728,3 59,215,4 378,734,4 304,304,3 278,1034,3 404,189,2 373,951,2 372,126,2 359,241,4 114,81,4 177,258,1 401,110,4 341,1127,5 221,930,1 233,241,4 311,150,2 296,244,3 278,1270,4 58,845,4 159,160,3 7,6,3 90,160,3 221,11,5 379,58,4 233,501,4 323,282,3 353,903,5 12,693,4 91,819,1 370,54,4 378,229,4 275,316,4 69,120,3 137,115,2 392,777,3 144,1053,1 127,208,4 404,520,4 58,505,5 93,101,3 102,49,5 12,623,5 5,422,3 233,1197,3 275,830,3 404,1176,1 147,506,5 206,240,3 143,18,4 379,731,4 405,478,4 74,289,4 70,275,4 391,22,5 42,94,4 17,484,5 134,474,4 116,410,3 11,380,4 249,747,2 42,955,1 17,63,5 279,585,4 317,484,5 404,1502,1 114,474,5 83,221,4 170,301,4 200,1226,1 393,87,3 175,1011,4 121,569,3 391,292,4 58,707,4 98,11,5 346,221,4 57,1018,4 84,501,4 17,98,5 206,193,4 304,80,3 15,50,4 12,884,1 333,683,4 312,193,4 289,831,3 294,569,3 120,293,4 303,342,3 286,247,5 223,1052,3 116,627,5 358,294,3 404,523,1 388,377,5 326,805,4 224,491,4 159,1222,4 232,1193,5 298,134,4 345,517,4 311,204,5 347,471,4 404,569,1 325,507,3 199,928,4 398,195,5 6,392,4 392,1091,3 12,269,4 213,215,4 237,120,4 177,509,4 22,208,5 243,289,3 200,466,2 16,124,1 200,301,4 199,801,4 384,248,2 388,835,4 390,55,5 398,401,3 150,86,4 247,293,3 312,63,4 4,78,3 268,755,1 330,701,3 392,264,4 108,293,4 144,2,3 261,121,2 278,207,5 275,426,5 19,930,1 398,1231,3 143,955,4 102,470,4 10,394,2 365,435,5 275,13,4 392,135,5 297,196,4 134,743,4 274,27,4 235,531,2 118,1085,4 48,422,2 215,367,2 249,356,4 253,175,3 249,22,4 345,209,4 310,328,4 129,362,3 388,503,4 156,1301,5 254,747,1 146,344,4 114,88,5 231,460,5 322,712,4 91,747,3 327,244,4 275,144,3 346,317,3 116,55,5 248,747,3 6,541,4 124,0,4 404,720,1 36,194,5 6,598,1 325,678,3 233,522,4 186,195,4 312,472,3 307,610,4 245,368,3 99,989,3 262,98,3 286,267,4 400,198,3 353,46,4 353,148,5 377,53,4 335,737,1 278,1119,3 313,577,4 360,654,3 191,301,5 378,479,5 352,270,2 224,1442,4 378,97,5 54,404,1 185,938,5 170,343,3 293,1006,4 401,254,4 91,462,4 234,196,5 84,93,3 37,251,5 173,275,5 94,587,3 90,483,4 373,194,3 320,190,3 353,212,5 93,87,3 298,949,2 129,32,5 374,760,3 307,180,4 292,413,4 344,659,5 47,171,5 18,691,3 22,70,3 108,440,2 0,249,4 91,976,2 214,257,3 173,948,5 147,162,4 19,94,3 378,134,4 343,677,2 80,123,3 369,208,5 243,71,4 388,582,2 396,193,3 233,195,3 213,31,4 84,156,3 94,767,1 324,457,3 382,640,4 41,833,1 59,658,4 94,160,3 187,391,5 99,332,3 243,208,4 306,514,4 248,23,4 108,408,2 45,326,4 307,465,5 283,285,4 236,512,5 275,1179,2 347,244,4 147,1038,2 103,545,1 391,178,5 48,541,2 377,300,3 140,879,1 362,745,4 282,406,3 285,76,3 39,332,4 278,249,3 103,1225,3 64,47,5 313,121,1 6,513,2 5,485,4 55,200,4 238,700,5 57,545,2 138,245,4 341,482,4 6,403,5 312,68,5 298,180,3 292,15,2 214,184,4 342,653,5 398,530,3 346,290,5 137,518,5 248,270,4 173,196,5 226,18,4 285,0,4 405,663,2 191,256,4 373,215,5 6,629,5 22,190,3 82,0,4 392,30,4 360,11,4 12,865,3 103,750,4 308,1392,2 333,321,3 9,434,5 255,863,4 0,84,3 333,339,3 129,285,5 380,633,3 234,187,4 294,317,5 9,705,4 278,120,4 343,844,3 369,11,4 192,552,4 275,570,2 346,840,3 116,885,5 404,209,5 89,16,4 266,684,3 267,257,2 324,106,2 265,312,4 94,567,4 63,214,5 372,484,4 273,318,5 302,997,3 297,212,3 353,735,5 58,139,1 288,684,4 353,78,2 343,273,2 307,741,4 144,37,3 289,522,3 187,631,5 325,204,4 377,941,3 359,662,4 372,19,2 312,664,4 219,285,5 285,635,3 206,1,3 226,105,3 238,1055,5 86,182,4 285,56,5 24,426,4 243,553,3 91,172,3 291,299,4 331,878,4 333,181,3 294,1027,5 341,99,5 57,60,5 386,287,3 345,683,4 380,913,1 392,373,3 404,464,1 268,1064,5 235,613,5 388,97,4 354,335,4 353,317,3 279,691,3 391,275,4 233,928,1 254,454,2 314,507,4 335,153,5 326,920,4 42,174,2 346,285,3 291,193,4 209,181,5 319,96,5 338,769,4 12,195,4 285,194,4 164,331,4 243,630,4 4,417,3 86,1088,3 55,215,4 86,72,3 394,171,5 345,672,3 220,230,4 279,209,2 163,332,5 372,1229,3 353,952,3 275,470,4 150,683,3 386,671,2 353,704,4 379,21,4 382,424,4 302,200,5 193,197,3 151,150,4 341,522,4 187,215,5 101,163,3 392,685,4 138,507,4 90,325,3 307,237,5 233,20,3 267,182,4 342,129,3 225,27,4 183,1194,3 405,822,3 150,583,3 173,1229,1 108,317,4 56,263,2 300,596,3 397,198,4 233,738,3 275,1470,2 373,814,4 317,236,5 162,285,3 205,881,1 400,134,1 362,1018,5 278,116,5 335,691,3 302,577,2 91,1206,3 61,663,4 0,90,5 231,746,3 197,196,4 404,691,5 300,95,5 215,71,2 295,203,5 287,63,5 89,963,5 275,805,4 362,369,3 194,1416,3 84,567,3 344,245,4 386,460,5 21,117,4 93,545,3 48,110,2 362,345,4 220,1011,4 71,95,5 91,52,3 257,872,5 124,576,2 378,521,5 147,6,5 220,264,3 311,592,5 48,94,2 180,1356,1 229,525,3 15,317,5 233,84,2 183,641,4 123,549,4 80,281,5 329,1027,4 369,237,4 84,442,4 247,323,4 401,479,5 264,409,4 176,269,1 391,1013,3 61,1130,3 398,741,4 255,187,5 273,712,5 388,491,5 335,66,4 404,582,1 64,581,3 69,196,4 86,408,3 391,318,5 230,404,4 362,144,1 61,1073,4 312,133,5 388,99,5 382,474,2 159,23,5 57,227,5 173,570,1 300,152,3 221,1266,3 93,673,3 157,635,4 377,87,4 331,933,2 94,2,1 82,116,5 198,284,4 380,12,4 80,594,4 406,432,4 55,293,4 392,835,4 159,692,5 333,844,2 84,527,4 187,1212,2 310,14,5 117,527,4 17,18,3 404,185,5 359,250,5 242,510,5 117,217,5 408,64,4 290,1272,2 96,78,5 329,691,5 200,669,4 51,844,5 321,11,4 202,6,3 313,236,5 127,964,3 378,416,5 63,475,1 298,181,3 81,526,3 327,422,4 11,401,5 311,704,5 249,1160,4 248,54,5 144,762,4 401,763,3 180,1375,1 48,122,1 398,32,3 320,197,4 4,428,3 268,119,1 27,567,4 314,237,5 194,650,5 84,1073,3 118,181,4 4,384,4 91,550,2 78,739,4 273,293,3 225,23,4 90,173,5 221,91,3 404,77,2 344,723,5 157,81,5 403,686,3 180,102,1 406,204,4 59,196,4 371,440,4 17,965,2 379,651,3 85,326,4 15,160,5 120,630,4 233,1019,4 213,482,4 275,808,2 295,116,3 397,422,5 296,208,4 278,223,4 351,233,4 371,873,4 358,272,4 262,185,4 388,394,2 69,138,3 307,454,4 268,43,3 278,951,3 129,175,5 388,478,4 344,470,3 388,193,4 14,6,1 129,27,4 353,928,4 237,457,4 312,520,4 307,615,2 387,4,4 115,894,2 231,97,4 275,209,4 279,236,3 91,134,4 188,267,4 329,199,5 91,66,3 334,677,3 193,280,2 327,288,4 293,259,4 405,961,4 174,214,5 404,10,4 199,182,5 199,7,4 298,777,4 281,293,4 84,159,3 404,1577,1 177,362,3 81,219,2 331,1243,4 268,237,5 37,78,3 371,546,5 400,608,3 118,549,4 129,173,5 398,153,3 291,143,5 373,126,4 101,567,2 327,479,3 183,409,3 245,288,2 42,401,4 268,1100,4 64,210,4 114,309,3 129,1206,1 129,442,5 6,395,4 193,288,1 36,10,4 307,1020,4 221,203,5 86,32,3 333,68,1 184,85,5 216,397,1 12,270,1 214,431,5 344,48,3 7,688,4 222,741,3 39,344,4 313,1027,3 233,192,4 353,698,3 326,1011,2 5,58,5 129,225,5 392,526,3 344,99,5 255,405,3 373,278,4 384,1070,4 183,1395,4 379,922,3 290,217,4 6,204,5 300,0,4 34,258,4 362,195,4 270,1132,3 83,290,3 212,179,5 229,275,5 275,67,4 300,565,3 26,369,4 267,72,3 111,311,5 84,657,3 211,178,1 89,148,3 300,172,4 290,1375,3 314,11,5 388,413,4 196,189,3 182,158,4 298,72,2 278,939,5 193,525,4 98,254,3 307,1072,3 294,1169,5 180,1051,2 101,100,4 183,1397,5 10,299,3 15,299,5 268,54,4 392,1031,3 1,314,1 312,673,2 384,289,3 404,786,3 322,14,3 290,187,3 394,314,5 203,258,2 344,288,3 222,281,4 172,291,5 41,431,3 325,654,5 341,580,3 376,99,3 183,285,4 129,392,5 6,78,4 377,865,2 82,180,4 310,233,4 129,98,5 386,692,5 116,194,5 275,126,5 344,32,4 404,413,1 397,49,5 192,683,4 17,658,4 310,920,4 94,264,3 335,1073,5 296,271,5 340,291,5 289,104,2 294,945,2 378,49,4 177,272,3 408,1294,1 118,82,4 344,301,5 408,1241,2 327,1135,4 27,144,3 302,1010,2 307,663,5 9,274,4 180,880,1 404,171,5 221,839,3 342,249,5 404,429,1 191,275,2 386,392,2 53,870,5 408,49,5 223,134,1 98,2,3 290,793,4 268,885,3 105,87,3 327,555,3 58,427,5 314,791,5 345,830,3 177,180,5 179,1130,5 343,470,3 267,476,3 382,662,5 264,256,4 307,43,4 15,75,5 229,182,3 9,370,4 213,22,5 200,530,2 89,511,4 238,649,5 405,238,3 86,410,4 351,656,4 235,1101,4 344,731,4 338,346,4 103,743,1 408,222,4 376,750,3 143,1141,5 209,661,2 333,662,5 56,99,5 369,174,3 311,434,4 309,1021,5 209,691,4 338,1138,3 18,381,3 339,401,4 386,45,3 268,1132,1 0,9,3 360,1073,3 253,74,1 317,62,3 278,390,5 66,121,3 404,772,1 177,54,4 391,284,3 150,120,5 21,691,4 353,731,2 349,323,4 390,25,5 384,662,4 184,284,5 221,1178,1 245,402,4 120,56,5 212,602,5 150,653,4 140,294,5 372,487,3 325,435,3 385,49,4 67,470,3 138,285,4 333,1050,4 412,221,4 71,356,4 206,173,4 285,880,5 292,842,3 0,253,1 391,533,4 48,400,2 408,47,2 193,738,3 200,773,1 173,123,5 255,6,4 63,565,3 377,28,3 55,1035,2 55,201,4 233,1457,4 406,195,4 197,94,3 4,371,3 183,63,4 5,123,5 392,714,1 405,155,5 290,394,3 65,474,2 267,1094,2 266,2,4 252,82,4 360,87,4 377,410,3 6,130,5 275,692,4 346,256,4 304,128,3 223,1038,5 386,487,3 275,546,4 351,182,5 253,96,5 333,426,4 221,239,2 245,259,5 61,226,1 84,69,4 10,709,2 379,68,4 79,198,2 108,671,2 279,321,4 404,177,3 43,433,4 161,627,4 362,472,4 398,654,3 398,958,3 353,57,3 226,1007,4 373,123,3 297,27,4 94,47,4 292,254,3 93,245,4 242,434,4 43,117,3 221,394,1 406,49,4 0,128,5 388,215,2 252,86,5 289,203,4 398,526,3 250,297,5 6,672,3 243,68,4 90,1049,3 214,215,4 98,870,2 317,434,5 402,120,5 12,404,2 267,66,3 404,646,1 223,719,4 396,987,1 310,777,4 157,272,3 372,698,4 174,495,5 112,285,4 186,521,3 200,220,3 285,1046,1 17,274,5 233,557,4 20,976,2 307,577,2 180,1378,1 249,677,2 377,174,4 9,233,4 126,449,5 17,130,4 101,674,3 180,224,3 335,231,3 392,1445,5 235,704,4 15,181,5 294,182,1 404,788,1 13,241,4 145,299,3 302,590,4 406,248,2 400,518,4 295,814,3 199,214,4 378,283,4 290,88,3 235,57,2 133,891,2 383,688,4 129,355,4 223,1211,2 377,575,3 157,592,4 242,812,4 43,90,2 84,707,4 114,11,5 250,595,3 390,179,5 163,822,4 285,52,2 243,366,1 25,254,3 243,723,4 64,735,4 285,87,4 6,120,5 134,78,3 388,52,2 405,317,5 53,410,5 292,214,4 199,111,3 307,126,4 375,268,5 100,49,4 333,524,5 366,333,4 372,377,5 307,63,4 379,752,4 384,81,1 315,1038,5 290,754,2 398,587,5 57,683,4 331,408,3 267,205,3 195,285,5 388,152,3 356,1094,3 368,180,5 302,650,5 93,404,3 209,418,4 199,1032,2 408,287,1 220,69,3 404,1117,1 255,3,5 252,704,5 120,191,4 74,290,1 285,1285,5 151,312,4 405,495,4 387,146,4 356,333,4 390,133,4 302,41,5 325,87,2 114,469,2 240,291,4 398,1278,3 229,10,4 202,150,4 217,4,3 346,229,4 405,604,5 70,288,2 319,551,4 408,199,2 12,471,5 48,587,4 331,21,5 373,236,5 305,115,5 212,1214,1 392,293,4 180,1329,1 302,1177,2 140,871,1 177,95,4 58,1109,4 183,115,4 270,517,4 275,213,5 255,119,1 371,326,5 254,287,4 294,379,4 65,20,1 48,342,2 58,418,2 362,350,2 326,651,4 404,425,1 150,226,5 193,231,2 372,841,3 342,198,5 292,602,5 150,791,4 319,76,3 347,410,4 270,529,4 384,284,5 338,97,4 193,225,3 151,285,5 222,755,3 159,603,4 4,420,1 330,58,5 353,123,5 405,84,2 391,872,3 136,14,4 293,299,4 278,51,3 380,1438,3 379,432,3 55,768,4 278,234,3 268,422,4 91,183,3 47,655,4 58,1027,1 307,174,5 12,609,2 345,731,3 108,551,2 81,275,4 57,489,4 58,728,4 232,380,4 243,1046,2 294,654,5 404,1337,1 327,75,3 408,98,3 101,587,4 310,527,4 90,481,3 101,444,2 99,751,4 366,550,3 48,773,2 21,385,3 66,870,3 373,539,3 275,40,3 89,327,3 179,203,3 333,159,4 152,55,5 58,527,4 267,452,1 86,402,3 9,167,4 180,1353,1 69,188,4 384,171,2 188,509,5 205,313,1 115,64,2 6,658,5 375,288,3 264,257,4 135,99,5 331,545,4 392,545,2 321,652,4 200,49,4 381,474,3 63,745,5 6,447,3 384,502,3 37,225,1 346,684,3 373,769,5 289,53,3 211,196,5 404,1553,4 323,331,3 4,143,3 328,146,3 415,124,5 304,267,3 400,498,3 174,659,3 15,30,5 312,648,3 275,469,3 215,27,4 300,228,3 193,187,4 233,430,3 3,287,4 328,80,2 269,470,5 43,190,4 311,51,5 302,696,3 248,215,4 249,327,3 231,317,5 197,175,4 279,662,4 377,733,3 407,688,3 405,1125,3 373,6,1 241,360,5 398,417,3 192,814,3 89,1192,4 248,85,4 401,15,3 404,655,1 200,88,3 182,264,2 165,893,4 266,473,5 48,171,1 398,228,2 93,75,4 94,370,2 58,475,2 292,418,3 240,349,2 331,4,5 404,1560,1 409,299,3 177,215,4 357,895,4 377,419,4 404,439,1 279,1477,4 302,171,5 188,380,3 342,777,5 21,225,4 346,245,4 9,496,4 333,190,4 17,496,4 82,109,4 406,210,4 150,658,5 369,510,4 300,1027,5 344,0,3 243,949,1 415,723,4 392,226,4 5,55,4 58,446,5 372,14,4 397,519,5 310,199,4 278,200,5 4,242,1 198,404,2 331,409,4 282,287,2 275,287,4 327,635,3 91,639,5 233,321,2 250,865,2 398,147,4 386,1068,2 41,731,5 93,582,3 166,380,5 278,1046,4 397,492,5 386,673,2 268,635,3 98,287,4 26,245,4 22,97,5 249,812,5 55,587,4 76,482,4 27,478,4 12,846,4 405,428,4 76,356,3 329,844,5 377,738,4 125,287,4 349,615,4 192,28,3 404,721,1 252,486,4 229,175,4 329,152,5 242,172,3 270,527,3 63,632,5 96,429,5 159,446,4 296,747,2 313,1056,2 253,447,3 386,80,3 124,173,5 405,487,4 222,251,1 144,679,3 157,136,5 215,152,4 378,293,3 390,196,5 404,539,1 338,285,5 55,848,2 371,560,5 386,650,2 398,174,3 48,1081,3 366,55,5 345,290,5 118,828,5 250,417,4 89,500,5 346,461,2 9,474,4 0,240,4 342,707,4 144,229,5 293,124,3 336,234,3 215,1009,3 6,563,3 377,9,3 98,826,3 12,651,5 307,485,4 302,1047,4 221,848,4 294,85,5 194,920,3 129,596,4 388,941,3 136,50,1 344,25,3 221,275,5 93,615,4 333,21,4 94,100,1 356,863,5 94,790,3 338,521,5 307,920,4 55,69,4 399,299,4 378,522,4 384,457,3 221,81,4 290,1156,3 403,332,2 302,43,4 5,520,4 362,306,5 187,201,2 333,504,4 62,407,4 290,105,4 289,42,3 12,293,2 180,1150,1 362,92,4 56,280,4 129,799,4 415,249,4 220,495,3 263,682,2 344,675,4 94,656,5 298,149,5 408,196,3 0,129,3 227,426,4 33,244,4 114,470,2 52,257,4 412,270,4 151,79,5 392,741,4 329,171,5 18,200,3 278,803,4 398,731,2 344,63,5 91,474,5 362,664,2 410,171,5 127,209,4 400,272,2 415,873,1 143,392,4 254,824,1 359,221,2 192,749,4 6,259,1 20,667,1 22,517,5 283,345,4 98,401,4 293,1012,2 97,937,3 40,152,4 183,949,4 58,546,3 326,1074,4 91,290,4 124,647,4 43,587,4 285,1117,1 183,236,4 178,345,3 13,844,3 374,442,4 176,333,3 53,1087,3 400,321,2 262,525,5 11,199,1 278,183,5 326,894,3 256,129,2 404,996,1 28,269,4 193,945,3 275,155,5 404,231,4 159,227,2 359,495,3 93,948,5 400,110,4 373,384,4 200,91,3 84,1020,3 344,386,4 129,127,4 84,631,3 94,549,4 404,1099,1 29,537,4 326,417,3 222,142,4 405,650,3 310,774,3 6,674,5 206,1196,4 346,181,5 48,10,3 91,727,3 342,37,3 292,750,3 319,251,2 330,502,4 343,171,4 202,49,5 75,120,2 255,565,5 323,126,4 129,183,4 237,545,3 405,644,5 392,385,4 388,1202,5 4,184,3 294,1187,3 338,495,5 42,190,5 405,13,4 377,229,3 75,602,3 118,567,4 315,529,2 98,106,3 12,587,4 338,1038,4 405,462,5 185,545,4 270,184,3 73,300,3 373,86,5 55,391,4 276,180,3 306,238,3 193,442,3 404,441,1 267,384,3 290,355,4 221,398,4 406,230,3 343,190,5 184,938,3 269,568,4 252,242,2 406,784,3 265,282,3 302,1238,1 377,499,4 165,322,5 180,979,1 386,181,5 215,420,5 199,147,4 220,160,3 307,90,4 269,508,3 57,380,4 378,1218,2 242,530,4 405,57,4 362,86,3 297,506,4 61,124,4 143,104,2 285,160,2 226,92,5 173,1311,4 157,372,2 375,274,5 268,511,5 144,750,4 338,771,4 341,723,1 278,237,4 31,1015,1 248,356,4 384,1009,3 248,41,5 275,124,4 158,1189,5 416,2,4 267,698,3 416,780,3 62,327,2 6,272,3 193,202,3 81,139,3 279,722,5 293,474,5 194,212,4 193,221,1 15,155,4 392,341,5 287,236,4 282,454,4 322,209,4 293,894,4 72,287,3 404,71,3 311,527,5 370,745,4 255,826,3 86,731,4 4,392,2 71,160,5 101,553,2 397,171,5 289,161,3 5,469,3 93,69,4 136,249,5 342,641,4 111,753,4 203,301,5 91,575,2 376,153,5 333,442,3 359,165,5 91,147,2 373,115,1 196,288,4 6,211,1 298,510,4 411,317,5 300,672,4 124,97,5 129,194,5 403,268,4 405,468,4 405,418,1 348,454,2 6,125,3 234,6,4 313,958,3 380,1532,4 150,1043,2 42,750,2 299,871,5 262,431,2 109,326,3 404,134,5 74,189,5 343,244,3 404,699,1 295,844,5 313,23,1 302,207,5 292,626,2 101,98,2 91,42,3 409,311,2 320,192,3 26,977,2 279,3,3 109,366,3 13,203,5 341,273,2 390,356,5 415,347,3 374,1072,2 404,1433,1 177,27,5 408,496,3 65,741,5 285,24,3 238,854,5 74,150,5 397,143,5 9,268,4 82,684,4 79,233,3 393,217,4 26,474,2 346,131,5 331,830,3 275,639,4 79,214,5 144,215,5 396,1017,4 392,168,3 337,293,1 80,150,2 48,174,5 324,431,5 12,275,5 94,626,4 223,322,3 58,185,5 320,55,4 6,618,3 317,185,5 108,236,4 253,891,3 105,1114,4 306,407,5 377,595,5 342,155,4 262,63,5 75,22,5 279,281,3 213,267,2 302,1117,3 137,613,4 48,184,5 213,12,3 304,257,4 43,163,4 288,404,2 216,230,5 198,285,5 270,950,2 17,0,5 416,596,3 396,177,5 127,85,5 199,558,4 260,341,3 153,461,3 405,435,4 23,55,4 61,741,2 263,432,5 274,207,3 404,95,3 327,613,4 2,326,4 400,203,5 71,171,1 84,1148,3 310,210,3 310,85,5 229,95,2 200,55,5 12,519,4 279,39,5 76,99,3 129,566,2 170,312,4 221,1053,1 408,658,5 314,167,4 188,473,5 13,14,4 384,628,2 275,170,4 321,0,2 267,760,1 188,78,3 416,1094,3 22,142,3 404,1564,1 215,1100,4 338,641,5 48,54,4 90,503,3 415,355,5 98,65,3 59,637,5 404,733,2 35,267,2 215,281,5 17,96,4 20,670,5 304,271,3 232,256,4 180,949,1 306,448,4 209,186,5 199,1227,4 0,254,2 143,179,4 279,53,2 324,474,4 255,741,5 377,251,4 124,1051,2 268,151,4 404,1468,1 396,220,4 13,41,4 101,162,2 129,533,5 302,140,3 291,319,5 270,316,3 411,217,3 101,577,2 212,131,5 415,577,4 324,174,5 405,185,3 248,918,5 61,659,4 408,1175,4 59,498,3 200,8,3 300,240,3 207,55,2 199,716,4 405,607,4 381,97,3 398,382,2 268,1027,2 404,963,1 197,78,3 306,654,4 129,68,5 310,88,5 117,21,5 47,27,2 193,635,2 58,1107,3 373,1149,1 13,175,1 312,627,4 179,731,3 398,70,3 185,99,4 48,1067,3 290,142,3 341,254,4 22,132,4 372,268,5 404,1249,1 267,568,3 404,213,4 338,134,5 292,865,3 358,116,4 233,134,4 373,757,1 242,654,4 384,488,5 345,1089,2 404,449,1 15,232,5 353,413,4 398,743,3 325,402,3 304,777,4 386,168,5 344,878,2 302,225,4 350,358,4 59,12,4 307,424,4 270,1138,3 241,290,3 309,274,5 247,178,3 93,442,4 233,450,3 377,730,3 298,723,3 5,203,3 304,1455,4 360,339,3 343,285,3 247,483,2 39,346,2 142,346,5 167,281,5 295,120,5 295,481,5 315,233,1 81,184,3 404,126,5 267,180,4 343,78,4 386,379,2 388,1297,5 292,466,4 266,366,4 307,263,2 43,384,3 302,25,4 233,843,2 111,302,4 335,1011,5 85,878,2 398,985,3 415,68,4 82,683,4 331,865,2 285,267,4 166,1306,2 129,269,5 93,565,2 233,480,5 43,213,5 103,234,2 386,1117,3 362,766,2 89,1205,2 362,65,4 408,198,4 364,24,4 342,979,5 20,325,5 279,721,3 44,951,4 200,330,4 4,412,3 275,731,4 271,356,5 333,887,2 82,254,5 233,201,3 124,410,3 405,587,4 144,769,1 398,61,3 337,461,4 63,0,4 362,264,3 98,761,2 297,273,3 101,93,2 289,195,4 353,56,5 372,401,4 14,936,4 392,354,3 114,186,5 93,216,4 298,150,4 307,508,4 10,193,4 58,171,5 277,268,5 59,199,4 377,132,5 80,317,5 25,978,3 397,658,3 285,328,4 12,658,3 260,409,5 373,1406,2 279,1167,5 20,695,2 129,297,5 84,126,5 238,602,5 389,99,5 344,131,5 325,664,1 296,422,3 69,481,4 342,195,4 195,93,3 312,55,2 295,283,4 219,681,4 6,562,2 360,1118,3 384,446,3 271,482,5 143,64,4 337,193,3 416,550,3 268,654,4 300,104,3 307,476,4 311,542,5 400,172,3 182,202,3 384,198,3 290,400,4 333,557,4 362,560,2 183,57,4 103,287,2 302,1257,2 83,1039,3 200,26,3 326,63,2 390,14,4 188,30,3 162,317,4 405,500,5 373,931,1 362,1015,4 400,526,4 360,683,4 372,403,4 408,5,4 294,503,4 84,267,4 275,267,4 141,54,2 205,989,1 278,410,3 290,1205,3 192,300,4 254,217,3 267,210,4 327,204,4 2,306,3 393,226,4 227,654,4 4,88,5 101,392,3 405,639,3 279,467,4 144,303,2 322,171,5 233,197,3 278,1218,3 69,184,4 121,735,4 275,741,4 335,1058,3 243,364,2 412,305,4 12,172,2 56,224,3 335,823,3 255,721,3 384,55,5 162,215,3 159,327,3 384,47,5 275,929,2 150,163,5 116,116,5 411,27,4 263,41,5 372,104,3 28,268,4 384,215,2 419,330,3 109,657,3 177,490,4 391,537,2 225,97,5 144,68,5 253,233,4 129,216,3 274,635,3 382,602,5 91,219,1 7,182,5 319,99,4 297,201,3 9,492,4 109,758,3 129,974,5 299,256,4 192,844,4 404,718,1 94,497,3 415,271,5 181,282,2 373,1045,5 221,159,1 109,211,1 377,654,4 400,25,3 70,150,1 287,229,2 349,175,4 311,502,5 338,468,5 234,746,2 322,247,3 298,918,3 380,96,4 421,108,2 108,848,2 404,89,4 288,470,4 345,878,5 313,376,3 6,547,5 144,78,5 398,126,2 57,256,5 408,515,4 404,1229,1 98,627,4 216,664,4 410,650,4 208,1136,4 405,316,4 345,157,2 197,81,3 312,97,4 118,116,5 161,253,3 372,210,4 180,149,1 338,660,5 291,534,3 196,232,4 302,429,4 311,381,4 370,662,5 379,179,2 94,431,3 318,266,4 362,545,3 301,257,3 282,49,5 200,288,2 311,473,5 129,146,4 116,213,5 108,157,1 267,567,3 267,731,3 140,1058,1 275,168,5 386,1007,4 329,87,5 66,832,4 199,227,5 278,488,2 333,325,1 300,863,4 245,1221,3 41,195,5 278,24,5 233,873,1 304,654,4 381,1533,4 377,282,4 229,180,4 94,356,4 398,21,3 415,1187,3 247,95,4 96,173,4 392,689,4 319,32,4 5,186,4 0,102,1 390,96,4 245,150,5 320,215,4 392,1138,3 268,613,3 302,448,4 263,344,4 327,878,3 58,865,3 194,635,2 279,941,5 313,779,4 84,153,4 100,121,1 121,463,5 193,82,3 404,205,1 307,504,3 314,339,4 372,473,3 125,681,1 249,292,4 313,771,1 12,734,3 200,596,2 89,190,5 392,788,1 289,519,3 384,167,3 200,824,1 322,214,5 48,930,2 292,1045,1 194,822,4 292,420,3 412,123,5 290,1209,4 82,721,4 416,146,4 233,871,2 124,86,5 306,402,3 187,126,4 404,1530,1 347,833,4 343,1171,4 372,703,2 150,698,4 15,27,5 411,201,3 247,289,3 397,14,5 409,881,3 388,492,5 289,1090,2 311,655,5 319,3,3 273,116,4 187,929,4 82,691,4 415,209,5 326,257,1 74,272,5 346,870,4 238,304,4 128,257,2 93,11,4 341,219,1 213,287,3 320,483,5 86,67,3 199,1410,3 415,42,1 415,1034,3 336,228,3 4,399,1 173,39,4 188,512,4 329,746,3 365,217,3 68,128,3 267,714,1 66,122,4 177,14,5 392,418,4 192,688,2 377,431,4 404,694,1 108,1027,4 380,511,4 387,183,4 307,1420,4 150,450,5 408,58,5 371,76,5 275,407,5 416,82,5 327,78,4 326,752,4 188,638,4 295,23,2 116,312,5 58,70,3 284,454,4 91,123,4 295,178,4 91,222,5 10,572,3 393,772,4 297,184,3 71,483,4 43,377,3 91,517,5 93,968,4 320,613,3 143,196,4 337,133,5 78,12,3 344,300,4 373,146,3 270,63,5 10,276,5 297,502,4 278,540,3 416,422,4 43,95,4 312,163,3 223,293,4 5,517,3 333,420,4 346,6,4 149,146,4 315,520,5 320,88,3 167,99,4 289,435,2 345,173,5 0,117,3 307,106,4 91,677,2 278,1109,3 416,119,2 411,174,4 263,11,5 112,245,5 90,615,4 320,646,3 88,256,5 248,221,4 397,356,4 262,161,5 199,209,5 416,577,3 98,327,4 180,1368,1 384,407,5 89,516,3 196,160,4 233,548,3 317,413,4 212,233,4 176,21,4 263,605,5 392,499,4 48,517,4 101,143,3 82,422,4 362,216,2 254,975,1 88,282,4 396,333,3 415,1097,3 221,70,4 201,483,4 12,482,5 400,209,4 287,177,5 157,126,5 84,522,4 384,252,3 150,481,4 59,137,2 404,91,1 203,244,3 20,244,1 306,422,5 221,14,3 63,661,4 326,196,4 212,691,4 90,233,5 186,190,5 256,948,3 404,1175,3 392,575,3 404,638,1 416,1043,3 289,624,4 269,871,5 48,203,1 307,132,3 345,82,4 144,469,5 114,968,1 213,460,4 183,583,3 311,587,5 392,567,4 327,210,4 381,507,3 222,110,4 342,163,3 145,345,4 61,285,3 28,342,3 222,408,3 341,557,5 393,160,4 180,1039,1 348,822,4 188,595,3 144,573,2 392,364,3 310,124,4 65,287,4 267,927,1 243,954,4 188,134,4 296,595,3 329,426,5 93,240,4 313,394,2 128,267,1 313,67,4 394,631,5 270,94,4 313,70,5 250,426,4 404,365,3 6,625,5 101,865,2 144,99,5 373,28,3 281,304,4 88,276,4 392,362,3 81,69,4 404,469,1 193,143,4 256,300,3 293,750,4 213,131,5 196,78,5 81,1032,1 275,93,2 424,270,5 182,226,4 268,1167,2 57,68,1 55,992,3 238,133,5 307,167,4 392,361,3 253,139,4 293,925,3 275,73,3 193,226,1 275,565,4 418,1450,4 376,322,2 283,323,3 395,677,3 304,82,3 199,673,4 124,780,3 56,270,3 22,221,4 21,237,5 278,235,5 183,567,2 43,432,4 371,573,4 378,654,5 209,522,4 292,631,3 63,68,4 408,200,1 48,384,1 282,124,5 414,1523,5 326,80,4 64,434,4 89,181,3 396,324,3 221,451,1 360,465,4 163,747,5 150,432,3 106,301,4 324,834,5 12,422,5 157,221,3 186,581,1 41,171,5 372,648,4 343,659,3 233,1454,2 115,1256,1 209,275,5 327,355,3 238,209,4 371,324,4 392,704,4 182,272,4 170,267,4 296,23,4 222,596,4 48,877,2 311,512,5 183,804,3 82,104,2 198,947,1 294,173,4 313,818,4 392,1313,3 386,844,4 100,236,5 124,995,3 421,670,4 247,167,4 310,522,5 216,117,4 404,1554,1 405,212,2 424,423,2 136,116,5 333,708,4 378,210,5 335,721,3 310,207,4 278,533,1 144,297,1 363,287,4 226,474,4 312,233,4 184,222,4 371,321,3 325,264,4 386,983,1 386,435,4 268,626,1 398,127,3 44,0,5 124,863,3 177,539,3 17,223,5 333,657,3 180,276,1 415,760,4 315,179,4 275,90,5 167,221,5 400,865,3 424,747,3 263,450,4 405,522,3 249,627,4 109,194,2 311,729,3 399,258,3 221,1058,1 185,337,3 91,287,3 408,519,2 343,212,4 140,677,4 215,233,4 319,412,3 200,318,2 379,117,2 26,514,4 291,658,5 305,268,5 61,422,3 48,107,2 416,709,4 111,315,5 90,288,4 341,951,3 348,474,4 422,743,4 410,181,3 193,24,2 173,161,5 180,947,1 34,876,2 311,1115,3 398,435,2 233,1453,3 421,558,3 86,490,5 20,929,1 321,312,5 238,507,5 268,413,3 389,739,4 424,190,3 81,1027,2 159,474,5 269,683,4 412,296,5 421,921,4 213,133,4 400,283,3 278,852,1 312,1209,4 124,274,5 290,48,4 364,1047,3 408,213,4 243,150,5 275,378,3 272,320,4 410,208,4 404,698,2 46,291,4 204,242,2 267,824,3 404,574,5 86,185,5 221,384,4 392,780,4 44,20,3 17,125,5 20,99,5 91,163,4 362,349,1 392,782,3 334,346,5 42,683,4 232,190,4 93,60,5 83,878,4 386,431,4 306,23,4 12,481,5 274,745,4 404,421,1 341,516,5 183,71,3 118,1196,4 140,251,4 197,508,4 89,149,3 7,686,1 193,6,3 267,188,4 380,500,4 377,1060,2 221,251,2 111,983,3 302,762,4 326,602,4 331,1027,4 398,945,3 404,1352,1 405,11,4 25,409,2 200,24,3 246,0,4 375,761,4 337,482,4 384,173,2 316,270,3 392,1440,3 349,210,2 421,183,4 0,53,3 406,216,4 326,233,5 399,894,4 129,587,4 277,172,5 390,202,4 249,239,4 298,552,3 90,312,4 26,99,5 392,365,4 416,41,4 233,731,2 415,276,5 405,700,5 238,689,1 89,130,5 48,293,1 317,14,5 327,293,3 252,63,5 0,266,4 231,514,2 401,6,4 310,1231,4 6,481,3 101,747,3 285,573,4 0,23,3 99,322,3 347,14,4 233,692,2 341,296,3 294,87,4 171,177,3 200,641,4 327,281,3 414,153,4 197,195,3 98,412,3 314,185,4 63,71,4 343,283,3 221,894,4 10,108,3 172,879,4 405,413,2 55,121,2 183,251,2 404,1226,3 397,416,3 143,175,4 286,27,5 221,809,2 108,948,3 406,427,3 238,99,3 131,123,4 93,1222,4 82,467,4 292,55,4 362,1009,4 229,181,2 55,431,5 248,185,4 20,318,2 333,741,2 41,193,5 41,495,5 186,432,4 23,215,4 6,644,4 124,731,4 193,622,1 416,158,4 62,267,3 300,98,4 89,1047,4 353,151,3 276,24,4 17,450,3 275,385,3 222,293,4 263,1102,5 171,1171,3 307,728,3 134,652,4 9,494,4 91,654,4 416,11,4 298,197,4 372,317,5 325,669,3 233,233,4 275,1128,4 23,99,5 305,149,5 386,316,4 101,826,2 93,1225,4 296,203,3 192,126,5 61,447,2 124,492,4 173,574,1 312,160,4 335,1053,1 275,664,3 377,418,4 338,401,3 267,1476,2 91,275,5 41,214,5 378,495,5 397,283,2 371,580,5 15,283,1 406,615,3 150,317,5 324,187,2 180,1366,2 245,82,4 293,245,4 38,344,3 24,238,4 302,366,4 272,302,4 381,1228,5 247,853,5 424,226,4 84,1168,4 22,218,1 346,323,1 311,189,5 342,434,4 21,553,1 267,432,4 81,670,1 61,180,4 149,272,4 384,479,5 353,606,3 6,189,5 200,384,2 382,85,5 344,480,3 428,72,3 302,37,1 296,1006,4 89,311,4 362,4,1 177,325,4 225,149,4 15,173,5 294,746,4 27,645,4 248,685,5 300,332,4 362,183,3 270,383,3 21,406,3 328,180,4 205,894,5 29,530,5 307,88,5 357,131,5 404,450,5 4,79,2 382,136,5 14,928,1 91,1032,2 421,918,5 378,650,4 200,240,2 398,1273,1 63,94,4 327,321,3 392,1182,3 71,179,4 348,236,2 346,567,4 267,104,2 415,225,4 5,470,2 2,271,2 234,519,4 315,21,4 330,181,4 119,244,3 406,193,4 278,58,4 293,1015,4 238,431,5 326,407,2 359,156,4 12,267,4 233,432,2 22,237,5 398,322,1 377,698,4 333,395,4 416,66,4 249,1136,5 275,4,3 144,41,5 427,306,4 313,219,4 326,263,2 68,320,4 236,501,4 279,464,3 300,418,3 214,442,4 25,321,3 91,100,2 373,27,5 144,50,3 167,14,5 86,381,3 23,474,4 220,939,4 223,317,5 310,61,3 411,426,4 344,50,5 175,947,4 93,192,5 327,84,1 350,291,4 411,91,3 155,196,5 398,93,5 205,314,5 333,576,2 416,657,2 199,579,2 293,321,1 176,171,5 331,143,5 247,193,4 196,1227,4 93,1003,3 209,750,4 415,1146,4 317,795,3 109,232,4 129,254,4 338,508,4 378,854,4 377,193,4 312,28,2 93,333,3 114,495,1 294,388,4 373,449,4 191,1136,4 177,1299,3 52,747,2 344,738,4 186,1118,3 55,714,1 200,320,3 200,550,1 373,691,5 356,404,5 61,19,4 298,12,4 344,268,5 9,194,4 129,167,3 386,853,5 289,132,3 149,234,4 148,320,2 86,191,3 199,275,5 216,37,3 392,1,4 9,520,4 378,215,4 392,214,4 327,503,3 377,695,3 211,426,4 317,814,3 310,314,5 428,63,4 344,404,4 56,221,5 5,247,3 119,285,5 310,1045,3 292,365,2 275,685,3 267,483,4 221,470,3 392,791,1 415,233,5 388,661,3 45,6,4 279,386,4 42,180,4 229,143,3 94,231,4 307,196,3 182,221,4 12,401,4 38,305,3 392,109,2 335,1016,5 41,741,4 200,1162,1 75,384,2 393,449,3 115,343,5 58,81,5 127,948,4 58,1117,2 298,1072,4 415,55,5 426,937,5 238,194,5 37,915,5 301,298,2 17,161,4 302,435,4 192,154,4 278,840,4 192,442,4 58,17,4 400,136,3 392,1164,3 223,548,3 401,126,5 91,65,3 83,384,4 6,263,4 346,95,4 262,494,5 355,299,3 127,49,4 316,330,4 415,133,4 122,486,3 392,778,3 176,337,3 397,69,4 256,223,4 6,680,1 31,244,2 388,216,3 317,1520,3 15,605,4 384,155,4 94,746,5 290,1218,4 338,844,4 202,247,5 109,67,2 55,450,3 63,57,3 69,431,3 75,771,3 57,922,5 362,684,4 389,180,4 353,427,4 0,85,5 48,38,2 379,526,4 260,891,5 101,180,2 129,172,3 180,1173,1 362,151,5 386,942,4 197,181,4 408,517,1 203,0,2 20,327,3 338,1247,3 185,716,3 428,85,5 364,149,5 405,194,5 59,160,4 378,299,3 379,63,3 416,551,2 71,478,4 411,435,4 268,762,1 380,472,5 390,473,5 48,997,2 347,312,5 4,432,5 328,654,4 356,286,4 426,681,5 199,49,5 337,173,4 86,253,4 193,615,3 108,1156,4 93,424,5 307,71,4 294,132,4 333,167,5 114,92,3 157,182,3 255,226,4 22,545,3 275,587,4 12,419,4 42,567,4 396,1297,3 417,361,1 278,232,5 426,340,5 378,53,2 398,167,3 57,49,4 398,113,4 397,660,3 192,258,2 69,108,3 380,82,4 183,173,3 268,664,1 362,504,3 43,215,1 6,621,4 105,312,4 365,163,5 17,69,4 253,698,3 200,122,2 6,160,3 422,236,4 43,356,4 422,314,4 13,115,5 6,427,5 246,150,4 308,241,4 254,97,5 353,658,4 6,287,4 84,208,4 313,1135,5 103,826,2 115,263,3 372,565,4 91,92,4 373,120,4 91,962,5 343,175,5 386,1537,3 82,93,4 285,777,5 382,512,5 315,462,5 72,1072,4 53,49,5 416,195,5 298,293,2 326,380,4 180,1335,1 9,12,3 27,422,2 346,698,1 156,92,3 115,581,3 183,446,3 286,275,4 310,49,5 360,202,5 176,197,4 17,478,4 362,187,4 270,236,4 380,497,5 286,116,5 367,217,2 345,142,3 177,430,4 20,687,1 34,679,4 84,1008,2 384,113,5 48,69,2 308,305,2 0,195,5 275,195,4 245,194,3 118,253,2 196,173,5 392,419,3 384,1128,5 313,763,3 398,577,2 91,88,5 288,925,3 93,447,5 294,41,3 248,1102,5 296,741,3 304,288,4 127,1062,2 392,553,4 325,563,3 313,947,3 245,584,1 286,290,5 58,108,4 364,288,3 400,461,4 279,1400,5 269,401,5 362,939,2 199,651,2 12,804,4 330,241,4 221,199,3 361,267,2 337,791,4 344,14,4 163,275,3 285,709,4 94,6,5 21,877,1 386,151,1 200,1168,4 144,211,2 289,143,3 372,30,3 416,134,3 415,303,5 289,698,3 37,139,5 294,158,4 274,929,3 63,450,2 366,200,5 329,276,4 327,222,4 15,133,4 144,264,5 232,30,3 243,558,4 319,738,4 177,293,2 346,293,1 249,473,5 180,1343,1 362,386,1 282,94,5 55,167,2 415,416,3 97,115,5 245,229,3 20,472,3 198,474,5 292,1216,1 375,13,4 360,99,5 292,1010,3 129,797,1 42,10,5 261,708,5 288,146,3 94,68,5 406,754,3 327,530,4 329,392,4 292,346,2 408,679,1 285,289,3 416,207,3 393,231,4 143,272,4 392,727,3 75,285,5 304,206,5 55,43,4 21,454,5 416,1022,4 17,12,5 233,275,3 263,601,4 378,132,4 180,1391,1 251,739,3 295,239,1 140,1141,1 333,299,3 59,557,4 109,750,3 345,1017,3 271,520,5 398,229,3 274,117,3 319,691,4 208,765,4 243,925,2 416,630,3 344,1013,3 416,746,3 406,465,3 343,303,3 221,408,3 10,240,4 317,139,4 357,356,4 384,1134,1 39,293,4 416,234,2 6,71,5 63,95,4 377,257,4 317,210,5 101,839,2 275,815,2 41,976,2 22,69,2 19,120,3 344,76,3 20,241,3 388,477,5 249,134,5 392,82,4 253,418,4 74,823,1 428,1221,3 419,18,3 275,205,5 233,6,2 78,261,5 307,384,4 103,1010,3 307,123,4 199,146,5 255,924,5 221,1418,1 428,494,3 384,193,3 206,321,3 62,507,4 343,302,4 48,385,4 150,604,4 415,737,2 320,662,2 31,234,3 127,264,5 329,192,5 268,345,2 152,215,2 315,317,5 324,356,4 0,38,4 19,251,4 94,650,5 271,200,3 348,411,1 406,641,2 334,270,4 124,356,3 255,124,5 183,10,3 306,473,5 75,199,5 404,1187,3 261,754,3 29,300,4 93,960,4 105,47,3 12,413,5 342,96,4 312,497,5 9,182,5 233,420,1 32,750,4 6,388,4 188,478,5 304,920,5 404,1206,1 58,97,5 353,602,5 69,337,2 2,353,3 362,1227,2 310,386,4 267,664,2 404,229,2 56,596,3 342,474,5 353,663,5 93,722,3 296,94,3 58,199,5 243,1047,4 56,198,5 10,523,4 103,149,5 57,1101,1 91,402,4 405,661,3 12,771,1 416,63,5 122,706,5 386,226,4 421,853,4 404,61,1 97,744,3 205,1312,1 331,325,5 252,495,5 308,299,3 284,681,4 400,653,3 384,461,2 382,13,5 233,14,3 345,784,3 319,128,4 122,703,3 105,193,5 212,120,5 377,938,4 404,770,1 416,402,4 247,677,3 279,1059,3 403,326,2 404,1246,1 58,38,4 12,811,2 271,468,5 168,704,5 377,722,3 313,1088,1 43,192,3 14,888,3 403,747,4 159,563,3 415,531,3 103,311,3 302,383,3 330,80,5 107,9,5 415,219,4 313,1516,4 1,300,4 270,0,3 74,409,5 404,187,1 343,510,4 63,11,5 296,127,4 313,6,4 134,11,4 93,222,5 22,511,5 353,343,5 315,13,4 275,332,4 397,990,2 325,553,3 155,21,3 261,331,3 359,180,4 407,682,3 294,413,4 150,1100,4 400,197,4 233,185,3 401,125,4 337,605,3 311,135,5 342,213,4 0,163,3 404,577,1 329,70,5 342,131,5 81,481,4 248,217,3 317,325,4 423,1345,4 322,763,3 342,497,5 330,179,5 296,107,4 310,133,5 140,119,4 392,221,4 274,143,4 220,153,3 248,590,5 248,8,5 180,1376,1 428,510,5 337,305,4 57,482,5 273,87,4 15,601,5 101,634,2 161,596,4 86,7,5 279,68,4 392,377,4 345,93,3 398,816,4 273,110,4 93,714,4 100,122,2 91,721,3 210,454,3 193,98,3 27,88,4 289,825,2 326,420,2 359,12,3 338,524,5 353,528,4 250,475,2 384,292,3 55,746,4 12,563,1 431,221,4 255,1060,4 176,167,4 91,143,4 160,1265,2 21,357,5 110,332,4 17,774,3 420,473,4 434,779,2 285,1201,3 7,209,4 199,575,4 57,149,4 13,516,4 306,196,4 248,211,4 6,333,5 423,24,4 428,627,3 58,366,4 266,226,3 229,356,5 72,80,5 276,871,3 150,1016,2 7,257,5 268,504,3 90,317,5 295,44,5 400,602,4 243,160,4 125,751,3 338,193,4 221,301,3 175,749,4 227,326,1 307,99,5 118,615,2 89,517,2 400,384,3 386,430,3 327,156,5 193,126,5 245,116,3 21,207,5 256,1007,5 374,565,4 93,495,3 189,1312,2 353,198,4 209,952,3 5,510,5 280,257,2 173,414,3 4,218,3 404,1562,1 144,265,3 415,48,4 326,88,4 114,1007,5 397,215,5 377,417,3 384,484,4 177,227,5 167,818,4 108,761,3 353,86,3 415,865,4 424,38,4 373,264,5 338,1112,4 338,75,3 40,0,4 12,518,5 304,549,3 398,773,3 417,312,3 405,744,4 298,142,3 91,1013,3 25,332,3 343,236,3 333,791,4 286,200,5 377,47,5 90,133,4 415,31,2 302,550,2 137,184,4 362,127,5 428,44,3 392,195,4 386,1013,3 255,778,4 103,146,3 302,217,4 1,312,5 84,529,3 263,237,5 240,312,4 342,268,4 394,173,5 199,659,3 259,361,5 51,426,5 234,64,2 158,755,4 388,517,4 319,6,4 391,285,2 17,317,5 317,311,4 314,186,4 84,416,3 124,68,4 307,128,5 390,923,2 283,299,3 285,639,5 392,1090,2 238,491,3 296,182,4 222,845,2 179,422,4 188,133,5 57,197,3 129,287,5 94,401,3 1,278,4 344,24,3 424,184,2 37,418,5 27,449,1 396,893,1 144,269,5 215,404,3 428,1015,4 296,230,3 161,709,4 57,954,4 150,1263,4 37,1033,1 275,558,4 78,149,3 360,516,5 292,53,3 397,203,4 5,283,2 415,403,3 158,410,3 327,431,1 420,442,5 196,225,4 108,156,4 203,335,1 22,268,5 283,331,3 390,1100,4 349,588,5 428,734,4 397,30,3 310,492,4 5,303,4 415,720,3 261,580,3 405,611,5 180,8,4 415,894,4 266,272,4 345,120,4 95,49,5 93,1027,2 225,1116,3 285,154,4 397,227,5 373,279,3 15,8,5 176,432,4 262,422,5 410,1469,3 57,317,3 431,294,3 93,174,4 233,746,3 58,217,5 401,233,4 416,587,3 12,874,1 298,521,3 212,173,5 420,155,5 267,97,4 262,268,4 266,239,4 193,93,3 87,903,5 405,1046,3 365,442,5 212,567,4 344,534,3 408,201,3 310,167,4 242,195,4 3,49,5 81,221,3 423,260,3 7,126,5 326,587,4 415,769,4 86,237,3 296,470,3 404,349,1 0,229,4 391,97,5 239,335,3 268,253,1 282,1486,2 392,232,3 432,301,5 404,1045,2 427,689,5 362,401,2 199,596,4 390,194,2 42,691,5 329,132,5 404,431,3 326,734,2 197,64,2 129,110,5 342,47,3 370,173,4 392,327,5 372,96,3 279,734,2 101,384,3 103,244,2 337,581,5 82,976,3 14,590,2 170,305,3 275,134,5 233,286,3 242,316,5 397,500,3 144,982,1 300,356,5 7,187,5 392,70,3 57,122,4 396,55,5 17,608,4 314,745,3 343,207,5 425,479,5 434,239,3 392,93,4 78,250,5 82,567,4 152,322,2 342,61,2 320,514,5 348,8,4 397,275,4 58,520,5 71,86,4 416,435,3 12,376,1 431,619,4 398,549,3 338,152,4 302,1187,4 404,774,1 297,177,5 358,23,3 285,765,3 249,320,5 199,541,3 188,193,5 335,215,5 324,492,4 353,173,4 261,587,4 231,13,4 144,630,4 138,306,4 93,549,1 158,116,5 273,0,4 324,312,2 435,237,3 20,447,4 261,139,2 434,298,4 193,503,2 10,21,4 416,741,2 137,356,4 94,177,5 243,1053,3 35,309,4 339,422,4 250,294,4 248,299,4 233,781,3 325,171,4 243,254,2 342,381,3 199,122,4 153,88,5 103,353,3 424,317,2 392,48,4 94,180,4 273,149,5 346,1058,3 270,309,3 233,235,3 346,297,5 88,13,4 194,1417,4 405,481,5 429,9,4 335,1217,3 358,269,4 183,381,5 290,618,3 395,273,4 9,131,5 275,550,3 221,661,3 307,146,3 372,205,4 73,128,3 63,198,4 61,540,3 216,826,2 341,91,4 86,575,3 214,185,4 321,488,3 415,293,4 400,293,1 289,171,5 233,1047,3 114,180,4 242,68,3 362,154,2 81,356,4 188,173,5 93,1152,4 416,683,3 425,602,5 0,35,2 182,484,5 22,188,5 319,848,4 12,339,2 421,324,2 404,649,1 183,249,4 253,620,3 263,229,4 91,153,4 254,443,3 20,293,3 346,1027,2 167,280,2 275,185,5 343,1049,3 405,674,4 401,150,5 386,115,3 344,331,1 333,630,4 75,1155,3 397,653,4 156,292,5 17,522,4 279,779,4 122,479,3 386,417,3 180,1164,1 400,49,1 10,454,3 173,203,4 192,1405,4 392,1220,3 136,256,5 275,117,3 360,738,4 268,1437,3 428,1118,3 341,128,5 377,320,3 202,249,4 249,88,4 346,54,5 398,172,3 268,446,3 404,1315,1 369,658,4 380,303,5 332,275,4 151,21,5 401,478,5 399,688,3 115,902,2 269,1209,5 176,474,4 12,184,3 322,55,5 392,736,2 307,460,4 127,97,4 222,825,1 431,741,4 150,632,5 220,1133,4 188,637,5 127,1038,4 117,163,5 283,339,4 323,257,4 17,134,3 382,312,2 397,190,4 255,401,4 220,684,3 416,1035,3 300,268,5 206,432,3 116,221,5 183,56,5 34,321,3 157,215,3 433,1059,3 6,494,5 215,1034,1 84,508,4 353,434,4 392,671,3 13,22,5 180,1359,1 324,434,3 53,474,5 298,545,3 300,299,4 117,31,5 262,482,5 337,99,4 408,994,4 434,379,3 124,721,3 293,244,3 89,286,4 344,549,3 188,8,3 17,240,3 255,1085,5 93,580,4 397,63,4 91,202,4 99,347,3 346,391,2 408,269,2 343,558,3 368,113,5 199,362,3 416,1134,4 388,403,5 100,1092,1 94,1218,1 379,427,3 9,419,4 282,675,3 62,923,3 251,99,5 329,762,5 129,755,4 33,331,5 218,934,3 436,164,4 385,545,2 6,519,5 313,681,5 435,160,4 182,330,3 0,22,4 431,110,4 161,942,4 210,302,3 300,275,1 425,492,4 1,298,4 285,731,5 297,202,3 361,287,4 371,447,4 417,300,2 253,611,3 425,503,4 392,402,3 267,167,4 343,182,5 404,1274,1 89,704,5 91,225,3 317,656,5 63,508,3 404,360,2 91,327,3 200,183,3 187,65,3 185,117,2 427,339,4 338,960,3 333,267,4 428,370,2 404,550,1 360,178,4 61,327,3 404,1112,1 369,492,5 341,286,3 306,113,5 302,166,3 91,61,3 333,312,4 408,1020,4 13,167,4 415,474,2 302,684,1 124,495,5 418,704,5 12,216,1 127,98,4 257,242,3 336,449,2 266,779,4 312,848,3 193,209,3 330,214,3 58,698,4 341,974,2 51,234,2 373,474,1 323,299,5 285,791,3 29,230,2 157,115,5 366,325,4 196,315,4 93,134,4 326,178,2 373,575,3 268,426,5 236,602,5 427,315,4 221,0,4 333,131,3 197,630,3 298,654,3 360,761,2 359,126,5 255,833,3 92,234,4 404,1108,1 312,440,3 13,595,3 233,1297,3 0,223,5 262,264,4 198,686,1 118,994,4 420,123,4 47,285,3 51,92,4 250,1027,3 319,368,4 279,714,2 296,128,4 228,301,5 379,629,2 384,499,4 398,767,3 51,274,4 7,650,5 386,1142,5 55,257,4 9,711,4 124,925,3 274,100,4 415,426,5 42,271,5 220,172,4 403,891,2 83,193,5 180,423,1 298,709,4 84,191,4 416,978,3 48,345,4 416,1208,3 205,890,2 70,64,5 102,95,4 384,127,5 373,233,4 353,628,3 437,475,5 372,731,3 436,1120,5 304,6,4 384,964,4 69,761,3 290,768,1 187,160,3 298,661,4 193,258,2 278,464,5 270,548,4 261,357,3 12,650,5 221,718,1 221,657,3 173,66,1 179,172,5 404,784,1 415,177,5 12,23,1 279,233,3 333,1005,3 166,289,3 27,222,5 199,431,5 266,392,3 89,147,2 140,409,4 9,185,4 188,15,3 415,767,3 138,221,3 374,38,3 344,290,3 388,735,5 298,172,5 388,777,4 71,549,4 377,99,4 177,494,4 124,82,4 254,322,2 243,455,3 436,707,4 291,167,5 415,326,4 153,142,3 311,113,5 404,580,3 118,454,4 200,1219,2 6,578,4 214,448,4 59,506,4 342,146,4 14,0,1 161,1011,4 184,278,4 285,545,1 369,21,4 70,49,3 94,238,3 377,1438,3 428,708,4 196,209,5 9,198,4 176,688,3 353,152,3 392,188,4 268,432,3 428,490,3 111,306,4 58,49,5 372,1005,2 287,356,5 118,267,5 215,356,4 422,271,5 158,120,3 89,601,5 108,120,5 398,1134,2 242,631,5 12,672,3 93,651,4 360,233,4 362,228,3 212,228,4 292,738,2 117,192,5 110,241,4 194,234,3 291,264,4 12,286,1 223,722,2 199,691,3 13,476,4 298,135,4 278,72,4 344,715,3 206,579,3 234,514,4 404,29,1 311,172,3 414,242,1 421,257,4 317,864,2 232,192,4 377,978,3 436,1403,2 1,297,3 119,251,3 436,189,3 300,238,2 268,842,3 317,284,4 180,1027,2 43,203,4 389,712,4 345,1221,4 291,222,5 195,691,5 398,1178,2 362,8,3 196,879,3 290,539,3 43,469,3 262,647,5 426,679,5 233,1262,3 101,285,3 9,709,4 26,507,3 173,761,5 345,52,1 223,580,1 311,264,1 59,63,4 183,605,5 404,758,1 173,1040,5 94,392,5 369,607,4 379,184,4 343,221,4 235,297,4 430,285,4 271,287,4 406,173,5 112,1251,4 362,473,5 139,324,3 330,513,3 350,989,5 264,150,2 408,21,2 21,171,4 178,330,2 253,654,4 183,1396,3 359,962,5 233,483,5 10,174,3 220,823,3 55,89,2 70,134,4 101,628,3 416,98,4 200,259,4 243,583,5 173,12,3 412,254,3 254,859,2 346,214,4 359,190,4 344,747,2 436,418,5 397,478,4 294,641,4 428,940,3 81,1125,4 93,201,2 13,654,5 238,174,5 57,557,5 41,565,5 270,288,4 366,447,4 353,320,2 292,297,4 284,513,3 199,134,4 222,974,1 125,326,3 270,641,5 344,12,4 223,50,4 206,160,4 290,1504,4 360,530,5 212,273,5 74,1046,3 286,97,4 377,8,5 120,514,4 108,6,4 333,651,5 346,55,5 398,1400,3 0,72,3 303,741,3 344,97,5 433,814,4 157,432,3 320,461,4 48,737,3 150,152,3 6,222,5 386,70,2 408,663,4 346,124,5 117,16,3 441,400,2 285,126,4 199,1048,3 398,678,3 41,62,4 171,513,3 388,516,4 268,160,1 266,544,2 329,417,5 405,395,3 436,13,5 406,968,4 103,324,1 389,303,5 313,872,4 405,502,3 397,647,5 158,288,2 311,515,3 311,194,5 270,604,4 94,495,4 324,516,4 313,417,5 9,557,4 404,1057,1 177,322,3 147,77,1 421,125,4 129,1135,4 369,630,4 335,123,1 192,99,5 175,49,5 404,1316,1 300,200,4 125,312,5 10,712,5 278,718,4 428,632,3 427,874,4 310,940,4 362,168,5 184,14,3 416,131,4 242,193,4 398,294,4 180,619,2 6,258,3 280,321,4 415,1516,2 315,131,4 62,115,5 221,160,4 292,429,3 58,141,1 95,22,5 327,502,3 392,49,5 243,737,4 180,145,1 238,835,5 403,293,4 379,495,4 311,1038,5 134,257,4 400,434,5 323,876,1 61,1011,3 431,236,5 129,618,4 81,150,2 61,163,5 434,430,3 342,123,4 221,1,3 353,161,3 366,287,5 362,678,4 57,194,4 426,299,4 404,1518,2 144,548,5 209,151,5 404,1044,3 360,523,4 233,633,4 290,293,5 233,482,5 199,257,4 422,309,3 397,1118,4 193,192,4 408,164,4 255,1230,3 0,66,3 275,203,5 59,504,4 155,317,4 435,72,4 392,1168,5 193,70,4 404,958,1 338,162,4 159,136,4 221,1028,1 384,324,4 209,863,3 435,410,4 223,299,4 229,384,1 53,117,4 90,747,2 296,196,3 408,7,3 300,384,3 229,483,5 7,175,5 300,203,5 406,654,4 55,734,2 21,229,4 285,65,4 201,172,2 122,961,3 310,194,4 377,120,4 275,100,4 278,471,3 94,225,4 177,477,5 55,24,4 356,865,5 200,692,4 296,292,3 91,507,5 129,270,5 353,510,4 187,180,3 300,230,2 404,1216,3 400,519,3 388,401,3 226,120,2 390,529,5 172,677,3 341,1165,1 93,602,4 275,222,5 71,134,4 243,76,4 268,498,4 93,420,4 134,938,4 4,427,5 129,688,2 266,646,5 31,258,2 37,27,4 278,247,4 328,249,3 108,387,5 329,472,4 275,1034,3 163,120,5 358,454,4 195,7,5 220,3,3 13,49,5 196,361,4 93,287,3 17,793,3 434,1033,2 12,26,3 317,156,5 384,384,1 12,911,2 373,203,4 93,51,5 182,449,3 404,448,1 91,581,5 434,461,5 229,96,5 157,171,4 200,1064,3 22,0,5 293,822,3 405,641,3 37,21,5 118,510,5 197,202,3 267,133,5 270,741,3 98,693,1 435,173,3 207,401,4 393,1078,3 310,722,4 21,996,1 439,689,4 371,648,3 335,409,3 116,1011,4 391,320,5 324,541,2 408,479,5 65,280,4 343,245,4 436,654,4 238,704,4 333,172,4 441,384,3 30,123,4 356,983,3 214,181,3 114,1072,5 89,1199,4 101,4,3 177,761,3 69,95,4 390,46,4 324,553,1 127,504,4 425,1115,4 59,415,4 187,239,1 12,200,1 217,153,4 392,379,2 415,545,3 392,1178,4 324,126,5 388,501,4 338,301,4 313,93,4 302,461,3 392,24,2 388,1450,5 43,273,4 377,1045,3 298,432,5 48,582,4 326,454,2 359,206,4 352,299,3 384,704,3 410,404,4 382,18,4 118,99,5 129,880,4 58,624,3 434,442,3 143,814,1 339,173,4 101,529,3 250,180,4 339,503,1 441,30,3 180,257,3 168,233,4 36,175,4 166,1304,1 189,332,4 198,258,1 159,22,5 243,41,5 98,408,2 398,1,3 93,823,4 341,812,5 408,338,2 157,471,3 302,1198,3 22,526,4 199,770,4 331,294,3 143,784,4 23,108,3 267,545,4 327,1138,1 405,198,5 248,1015,3 187,184,4 0,64,4 212,777,5 384,41,1 233,1399,3 199,87,4 404,168,1 441,181,4 78,289,3 404,526,5 116,1094,3 386,743,3 284,236,4 352,315,5 267,471,1 231,473,5 12,384,3 307,659,3 71,116,4 59,233,4 129,242,2 261,814,2 143,189,5 392,1052,3 405,526,4 209,196,5 209,299,4 212,590,4 238,88,4 196,529,3 235,548,4 380,659,2 388,497,5 425,607,4 17,150,3 328,244,3 415,106,5 333,305,4 197,526,4 422,322,3 180,458,1 12,851,1 245,627,1 434,692,3 238,487,5 341,927,3 277,305,5 255,549,5 392,590,5 380,1399,3 332,872,3 12,237,3 444,596,1 428,430,5 428,96,4 177,618,3 11,49,4 282,208,4 372,513,4 78,1007,4 347,1027,4 14,932,1 424,182,3 62,305,3 247,234,3 393,180,4 25,239,3 150,483,4 6,554,4 221,80,1 21,1000,1 346,384,4 197,227,3 342,498,5 9,482,5 111,285,4 43,20,2 129,121,3 0,189,5 57,215,3 140,0,3 343,1164,1 327,510,4 405,180,5 316,244,4 392,302,4 293,901,4 444,194,2 59,55,4 225,208,3 333,869,3 400,195,5 378,450,4 181,221,3 206,996,1 396,323,2 5,188,3 240,886,4 215,149,5 125,1174,5 221,299,5 267,925,2 255,367,1 435,131,1 342,68,5 292,279,2 377,470,3 243,865,5 377,1231,3 434,755,3 425,632,4 337,602,5 426,301,4 406,156,2 213,19,4 415,1502,4 73,120,4 307,519,4 120,507,4 13,491,4 80,543,2 24,113,5 307,57,3 342,236,4 345,454,3 177,70,4 319,402,4 378,503,5 341,1072,1 249,68,5 224,244,2 243,51,4 4,258,1 132,303,3 373,256,3 371,11,4 435,453,4 193,940,2 384,525,3 395,105,4 393,120,4 194,270,4 47,180,5 62,224,2 428,1112,3 428,943,3 328,11,4 421,1016,4 124,383,3 411,3,3 325,478,5 241,1356,5 311,155,3 275,575,3 292,205,4 243,684,2 350,749,5 329,1043,5 417,303,4 391,1141,5 256,404,3 88,211,3 386,443,4 21,152,5 415,818,3 360,196,5 241,236,4 75,97,5 192,209,4 136,265,5 9,55,5 405,78,3 6,603,3 428,634,3 416,724,4 63,174,5 429,526,4 183,66,3 341,178,5 94,1017,3 385,6,3 220,23,5 267,100,2 108,214,3 14,458,5 424,824,2 386,188,5 267,1001,1 223,1044,2 124,93,5 392,395,1 329,587,5 431,49,5 229,139,3 398,762,2 416,194,5 279,628,4 232,922,4 114,951,5 281,878,2 278,1311,3 262,660,5 441,53,3 275,1219,4 118,309,5 72,317,4 1,18,3 96,191,1 446,1033,2 444,409,1 267,237,3 269,281,3 226,24,4 68,146,3 410,195,4 63,402,4 420,914,4 101,501,3 152,509,3 187,163,4 86,160,5 242,15,3 319,894,4 362,36,2 196,549,3 405,434,5 109,10,4 395,929,3 437,1027,2 12,782,3 313,1031,4 372,195,5 42,270,3 157,434,5 356,712,5 333,69,3 150,701,3 421,272,5 312,603,4 4,456,1 372,650,4 310,641,4 428,692,4 84,621,3 276,92,4 362,760,3 312,154,2 293,342,4 19,567,4 377,285,5 215,199,5 206,874,2 327,689,3 9,602,5 280,681,3 89,179,4 310,40,3 392,658,4 10,725,3 384,170,3 386,47,4 428,317,5 338,158,3 273,117,4 333,707,4 408,478,5 267,121,2 325,385,5 177,15,4 305,24,3 394,364,5 347,475,4 268,193,5 404,1028,1 17,151,3 278,433,4 268,142,3 270,429,5 212,840,4 293,1160,3 275,422,5 150,50,4 342,193,5 55,1091,3 89,482,5 405,208,1 157,529,4 180,1374,1 380,99,4 345,240,4 274,626,3 143,164,4 379,180,3 386,135,3 255,684,5 392,122,4 91,247,4 255,774,5 421,925,2 325,670,3 270,264,5 377,49,4 307,965,3 436,1261,3 327,568,4 206,99,2 300,1,2 434,745,4 221,567,5 55,168,4 319,684,4 333,1403,4 441,97,4 20,815,1 6,495,5 217,3,3 392,393,5 206,204,4 278,637,4 253,442,3 177,695,4 398,567,2 24,431,2 373,551,4 199,1138,3 159,6,3 232,498,3 386,440,1 63,538,1 185,225,5 188,233,5 295,308,1 63,61,2 434,114,4 404,1247,1 390,203,3 424,37,3 188,175,4 378,195,4 420,193,4 85,287,3 105,195,5 296,200,4 193,426,4 448,970,4 434,651,4 73,323,3 376,218,3 56,455,3 25,149,3 89,82,5 392,66,3 392,755,4 25,126,5 353,482,4 404,370,1 446,120,5 384,217,2 434,140,2 392,240,4 300,193,4 358,312,5 295,267,4 298,85,4 327,578,3 365,572,5 58,273,1 312,470,4 291,407,4 93,54,4 428,1038,5 249,687,2 69,205,3 202,303,3 269,1470,4 404,592,1 345,264,4 333,565,3 362,808,4 165,747,2 248,27,4 180,12,2 291,124,2 132,293,3 328,482,4 295,133,5 327,198,4 262,236,2 134,264,3 390,496,3 41,117,4 298,1131,1 434,1027,2 101,95,3 21,153,4 278,1500,1 175,235,4 108,294,4 10,39,3 324,196,4 319,404,4 347,99,4 233,402,1 20,290,3 61,2,3 380,76,2 312,1049,4 448,380,4 388,1051,2 327,163,3 200,61,1 157,515,5 159,229,2 91,844,3 302,540,3 320,606,4 233,653,5 80,97,5 319,21,5 245,54,4 206,152,5 201,285,1 103,345,3 232,422,4 406,116,3 407,747,5 396,260,1 27,664,3 394,120,3 135,514,5 200,173,3 81,404,3 143,296,4 304,174,4 404,1434,1 88,516,5 393,507,4 449,469,5 255,1209,5 98,747,4 404,794,2 345,966,2 19,143,2 37,549,2 382,8,5 261,392,2 342,404,4 307,617,4 209,87,4 263,174,5 63,69,5 292,52,3 122,131,3 0,99,5 215,363,2 311,610,5 299,99,3 416,31,2 214,433,5 12,348,1 326,424,3 449,782,3 15,481,5 24,632,4 39,749,3 253,257,4 129,626,5 337,515,5 428,426,5 436,172,4 326,95,2 278,720,5 420,199,3 176,469,5 270,470,3 90,514,5 193,478,3 264,299,5 190,271,4 20,257,4 46,267,4 84,450,4 291,126,5 434,216,4 405,442,4 353,1240,4 114,8,5 404,672,5 311,182,5 378,685,4 266,66,3 58,202,4 193,659,3 446,217,4 377,895,4 150,706,4 415,478,5 142,324,5 428,108,3 444,822,1 68,507,4 428,196,4 275,976,2 292,327,2 346,72,2 341,949,2 302,754,2 42,172,5 388,28,2 143,220,3 84,478,4 91,21,3 157,116,3 180,1394,1 319,770,3 124,207,3 379,134,3 320,99,4 346,409,5 356,236,5 312,658,4 113,481,4 386,132,2 326,422,3 229,484,5 41,71,3 300,870,4 180,951,1 143,596,4 405,98,5 436,754,3 419,282,5 84,285,4 89,651,4 266,53,3 341,121,4 262,613,3 48,1069,3 197,32,3 384,180,1 275,773,2 384,285,3 46,322,2 304,404,3 140,321,4 449,1146,4 245,789,3 49,324,1 171,482,3 411,134,4 157,501,4 71,229,1 20,978,2 340,287,4 159,543,4 156,146,5 400,341,1 180,878,2 389,330,2 372,493,4 397,233,4 144,476,2 342,120,2 63,213,3 298,69,3 233,207,4 434,233,4 404,1166,1 335,152,5 381,1141,3 141,357,2 362,217,2 234,210,5 404,395,1 362,80,4 59,605,4 42,470,3 436,85,4 296,1013,3 398,771,4 310,575,3 329,285,5 304,200,3 56,931,3 207,207,4 41,478,4 319,275,2 233,427,4 213,186,4 415,398,4 84,236,3 312,525,4 386,560,3 200,1007,3 188,1116,5 49,507,5 302,1302,3 58,563,2 41,215,5 422,257,5 201,603,5 41,1048,3 177,195,4 436,664,2 436,238,4 341,142,5 244,49,4 233,962,3 406,199,4 55,553,4 5,505,4 195,427,4 262,162,5 353,133,4 94,590,5 310,321,4 325,97,5 291,704,4 317,137,4 428,1135,4 139,879,4 133,750,5 310,70,4 341,12,3 230,475,3 325,968,4 375,97,5 345,404,3 77,297,3 310,419,1 220,475,2 355,891,1 384,179,4 25,293,3 372,657,4 416,225,3 292,803,1 416,635,3 12,332,3 434,385,4 129,142,5 373,404,4 131,153,4 331,94,5 0,225,3 69,190,3 150,162,4 344,324,1 347,1119,3 180,287,4 275,408,3 324,179,4 238,953,5 199,55,4 324,524,5 233,426,4 150,355,2 434,1216,3 28,873,4 392,1214,3 415,1406,2 93,16,2 431,410,3 404,1420,1 55,727,3 89,420,4 94,228,3 249,55,4 41,94,5 397,161,5 192,55,1 37,132,2 330,10,2 292,225,1 28,301,4 94,78,4 193,567,2 292,1041,3 101,635,3 404,1177,1 173,767,1 57,1102,5 221,217,5 425,613,4 326,410,3 185,299,5 338,181,5 20,147,1 270,220,3 444,627,1 331,264,4 71,50,4 362,95,5 5,268,4 261,364,4 21,193,5 358,0,4 445,331,3 326,633,5 58,208,5 268,715,4 57,310,4 4,409,1 212,237,5 344,310,5 302,764,3 5,86,4 404,1556,1 102,68,3 262,194,5 117,420,4 278,1246,2 200,70,3 173,215,5 326,218,4 144,194,5 355,332,5 30,78,2 216,390,4 233,1125,4 313,1177,2 84,417,3 307,190,4 129,384,5 255,236,4 416,562,2 125,750,4 300,630,1 424,322,2 80,755,1 444,918,1 150,1112,4 341,1299,1 398,388,3 374,299,4 180,283,2 200,558,2 127,283,3 378,529,5 434,251,2 404,578,1 436,196,5 416,46,3 275,7,4 449,99,4 113,99,5 307,483,3 44,1059,3 192,146,2 234,651,4 292,709,3 387,116,5 9,126,5 259,312,5 197,153,4 4,368,1 448,290,2 182,53,2 267,54,4 261,199,3 292,1016,3 94,1089,1 160,186,3 404,385,3 17,431,4 231,492,4 245,2,2 341,1008,1 180,983,1 0,242,1 428,966,4 105,683,4 160,507,2 373,149,4 279,475,5 101,929,2 180,1332,1 255,472,5 223,961,2 156,1257,5 46,305,4 143,530,5 21,194,4 221,126,5 395,327,4 58,100,5 326,264,2 155,10,2 64,6,1 319,160,4 12,873,5 58,32,3 193,481,3 262,527,4 113,639,2 129,716,3 143,706,3 118,39,4 341,761,2 435,132,3 289,209,5 416,28,2 233,1077,2 216,120,1 438,124,3 403,312,5 93,540,3 61,513,3 43,411,1 177,213,1 61,215,4 12,820,3 222,299,3 416,543,3 180,872,1 269,175,4 424,474,5 220,2,4 58,426,5 108,161,2 81,7,4 223,161,4 149,409,4 177,257,4 108,545,3 290,383,4 290,23,5 221,372,3 9,132,5 268,997,5 107,13,5 377,448,3 404,509,1 89,903,3 88,267,5 158,309,5 449,57,3 428,743,4 314,30,3 99,312,5 392,54,4 196,312,4 313,794,4 386,201,3 84,419,4 12,681,1 289,587,4 243,1208,3 17,922,5 129,43,4 404,466,4 329,21,5 449,141,5 314,193,4 5,317,4 177,596,4 406,492,3 324,285,4 40,208,4 180,307,1 213,345,3 341,133,4 338,257,3 404,764,1 430,747,4 378,1,3 429,655,4 62,125,3 270,69,5 405,39,3 328,287,2 424,171,5 275,696,2 294,1038,4 56,320,4 349,285,5 412,627,4 94,42,2 23,8,5 394,891,3 149,277,2 427,331,4 392,26,4 449,800,4 114,430,4 325,529,5 269,236,1 390,285,4 441,553,2 325,1230,3 160,190,2 18,257,4 63,384,4 14,410,2 325,526,5 248,167,4 12,448,4 444,203,3 444,1590,4 164,90,4 366,451,4 341,771,1 327,638,2 114,49,5 298,275,4 449,215,5 325,196,1 157,185,3 353,650,3 270,881,3 326,12,2 449,784,3 400,99,4 157,238,3 425,210,4 449,161,5 404,1561,1 292,648,4 267,193,4 333,475,3 238,662,5 417,268,5 225,257,5 383,342,3 446,69,3 404,767,3 102,233,3 388,135,4 394,254,3 449,143,5 192,306,4 312,585,2 215,167,4 302,247,2 267,39,3 307,153,4 268,740,5 180,336,1 405,409,4 144,300,4 391,165,5 415,974,2 55,6,5 364,741,3 33,328,5 310,1034,4 20,395,2 17,513,5 388,755,2 81,293,4 362,740,3 292,425,1 121,381,3 406,221,4 144,257,4 205,689,2 150,460,4 86,1183,3 449,461,4 59,416,4 424,82,2 86,701,3 81,288,1 116,24,4 341,507,3 143,749,4 275,561,3 381,136,2 348,1116,3 61,461,2 362,791,4 42,284,4 298,953,3 430,537,4 338,1243,4 366,218,4 267,197,4 182,256,2 424,249,4 197,217,3 298,51,4 242,82,4 191,276,3 63,229,5 277,293,4 290,728,4 386,293,2 444,407,3 12,827,1 378,198,4 424,312,1 162,346,4 296,325,2 185,298,3 424,55,5 362,214,3 5,275,2 84,215,3 14,743,4 428,1295,2 406,237,5 384,28,1 353,196,4 248,82,5 404,1174,1 279,430,4 408,964,2 401,234,3 372,81,1 203,303,3 362,460,3 183,8,5 12,786,3 173,55,5 118,740,4 17,486,4 431,273,3 206,627,3 306,194,3 279,152,5 293,824,3 113,209,3 58,581,4 415,404,5 302,157,3 166,567,3 448,136,5 244,239,1 285,823,1 285,420,1 154,291,3 449,383,3 415,841,4 85,288,3 12,449,3 134,942,3 300,221,4 233,587,3 101,78,2 415,567,4 279,976,3 293,357,2 58,763,4 200,412,3 373,0,4 42,408,3 449,738,4 9,97,4 199,124,5 151,482,5 10,93,3 317,707,4 17,233,3 266,167,4 157,409,3 296,434,3 12,838,1 104,751,3 329,173,5 279,273,5 184,514,4 63,153,4 266,171,5 394,923,4 262,167,5 255,475,4 310,1478,3 59,76,4 344,70,3 127,281,3 285,274,4 343,471,3 183,514,5 6,479,4 150,431,5 41,229,5 428,422,4 122,481,4 421,275,5 304,245,3 450,288,1 20,740,3 108,57,4 91,27,3 5,533,4 0,153,5 351,567,5 137,317,5 392,37,4 6,306,5 213,245,2 229,227,2 449,1268,4 261,110,4 0,213,4 42,627,3 203,1280,2 4,434,4 10,729,3 193,691,2 436,639,1 183,142,3 408,186,3 384,921,4 222,1196,3 180,260,1 321,271,4 418,172,5 223,195,4 43,228,3 290,243,2 94,327,5 377,240,4 69,510,5 270,515,4 220,257,1 298,542,5 416,206,4 378,1205,2 350,753,5 41,409,3 311,3,3 302,357,2 243,300,2 310,750,3 251,6,4 241,305,5 441,13,1 213,268,3 342,960,4 82,470,3 294,434,5 218,567,1 94,240,3 324,46,3 434,3,4 21,410,1 45,908,5 322,1049,5 58,203,5 405,1009,4 223,726,4 105,698,4 199,754,5 375,273,3 379,0,4 73,123,3 73,332,4 451,24,2 312,945,3 338,191,5 381,8,4 233,283,3 221,590,4 386,209,4 312,549,4 228,311,3 112,677,2 304,422,4 441,38,3 89,142,5 233,193,5 255,723,4 93,767,3 143,1284,3 235,194,2 2,263,2 416,10,5 92,476,5 410,167,5 404,1179,1 424,428,4 374,217,3 428,132,3 404,510,2 213,508,4 206,264,3 388,1073,2 270,116,3 214,356,4 17,704,3 343,895,4 432,172,4 449,178,5 26,294,3 436,583,3 326,588,3 313,326,4 118,976,3 230,923,5 449,37,4 428,320,3 94,190,5 257,747,5 278,1006,4 337,211,4 405,187,4 176,293,4 234,1118,3 113,95,3 180,267,1 398,1219,2 221,832,2 268,215,1 300,251,3 369,256,5 416,540,2 441,228,3 115,136,2 41,412,1 310,784,3 45,537,3 360,155,4 58,512,4 275,568,4 147,712,3 27,69,4 324,233,3 61,228,3 177,240,5 408,683,4 279,72,3 37,423,3 338,602,5 318,879,4 358,404,3 405,49,5 404,1003,1 22,709,4 183,422,4 113,185,3 61,269,2 415,91,3 415,832,3 89,317,5 436,417,3 405,825,3 378,447,4 339,70,5 416,97,5 173,271,5 61,227,3 446,68,4 12,595,3 84,162,3 157,183,3 58,182,5 316,267,3 398,41,2 206,985,3 278,1010,3 441,451,3 114,177,5 235,442,4 89,201,3 346,826,1 304,97,4 448,285,4 140,299,5 96,31,5 406,1027,3 89,878,3 101,443,1 173,1138,2 197,182,5 125,259,1 229,925,3 140,105,5 415,940,3 341,22,5 206,153,2 193,191,5 370,97,5 302,819,3 422,1133,4 388,158,2 331,126,5 37,87,5 296,136,5 408,490,2 327,469,4 121,45,5 200,38,1 386,51,5 59,528,4 9,0,4 253,210,3 380,29,4 216,1227,2 321,590,3 220,460,4 366,257,4 428,709,4 27,435,5 387,309,5 109,747,3 330,946,5 86,117,4 434,674,3 296,686,2 84,214,4 206,1146,4 279,134,4 329,65,5 92,814,4 428,272,4 107,136,5 415,40,3 279,131,4 6,175,3 300,198,4 360,933,3 118,844,4 392,746,4 391,1159,2 406,477,4 307,73,4 374,76,4 451,522,2 57,407,5 393,216,5 12,603,5 378,238,4 317,228,1 61,167,5 409,285,4 290,180,5 329,418,5 253,131,4 386,1,4 428,1424,3 412,259,1 424,758,2 233,926,4 233,672,4 332,314,5 81,198,4 415,50,5 12,588,3 243,144,3 344,845,4 378,82,4 453,492,2 4,213,3 324,525,3 193,1092,3 365,6,2 12,480,3 408,380,2 289,448,1 160,275,5 337,131,2 290,91,4 415,505,5 304,855,5 415,283,4 269,739,5 452,1031,1 129,267,4 450,268,2 166,203,4 388,274,5 310,43,3 167,618,3 252,88,4 15,356,5 263,653,5 9,651,3 453,1062,4 413,99,5 224,21,5 388,70,4 157,147,4 94,285,5 434,627,5 6,417,4 124,282,5 99,291,2 84,983,2 412,300,3 245,231,3 346,474,4 408,936,2 322,242,1 5,523,3 413,263,3 206,865,3 206,977,3 12,476,4 452,420,4 428,143,4 428,498,4 133,14,5 238,206,5 117,133,5 215,8,4 384,793,2 127,762,4 207,366,2 294,114,5 275,577,4 248,248,4 298,58,5 58,731,3 150,188,5 377,404,3 397,1449,5 96,660,5 322,874,3 449,658,5 379,473,4 188,126,4 116,256,5 197,380,3 434,272,5 408,653,3 127,613,3 13,715,5 94,715,3 408,153,5 449,251,3 337,169,5 200,654,4 381,480,5 173,137,1 84,316,3 81,480,5 294,289,4 406,160,2 221,158,3 193,779,2 270,422,4 2,348,3 416,424,4 74,741,1 93,695,4 319,0,3 13,282,4 449,1159,5 313,1164,2 267,799,1 228,339,4 196,258,1 267,37,1 41,76,5 129,40,3 411,172,5 221,280,3 247,404,4 263,178,5 434,759,1 306,213,5 117,216,3 354,306,4 444,122,1 393,558,4 82,34,1 405,941,4 372,1078,4 428,454,3 199,419,5 404,529,1 19,97,3 23,357,3 300,736,2 365,55,5 197,558,3 15,628,4 267,232,3 431,545,3 406,67,4 40,180,4 268,395,4 245,929,2 428,161,4 404,651,1 0,160,4 55,163,4 391,128,4 180,471,1 6,540,2 213,323,5 153,707,4 326,312,4 292,1225,3 44,107,4 177,457,3 302,451,2 118,716,3 360,512,5 297,595,3 69,68,4 150,419,5 291,134,4 415,70,4 450,322,4 392,724,2 449,565,4 196,664,4 180,984,1 377,920,4 184,446,4 129,229,3 373,1041,5 346,958,5 236,184,4 81,239,1 386,68,3 21,167,5 144,932,1 103,712,3 266,293,3 221,260,1 261,922,4 42,214,5 274,450,3 302,151,4 143,159,2 390,237,5 451,1108,2 329,76,4 196,322,3 408,180,4 441,558,2 209,178,3 452,124,3 183,516,4 380,134,5 446,822,3 453,181,3 15,194,5 446,543,4 232,55,5 344,311,3 404,1549,3 300,830,4 450,881,1 4,363,1 324,99,4 344,971,4 453,417,3 327,473,4 233,471,2 449,495,5 406,81,3 373,229,5 157,203,4 285,948,4 242,777,4 304,526,5 37,233,5 392,99,1 81,1163,2 12,515,5 324,506,3 386,82,4 285,422,4 275,802,2 311,967,5 416,1038,3 144,1001,1 86,200,2 331,119,4 160,134,2 449,678,1 340,886,5 55,76,3 342,920,4 364,947,1 324,98,5 167,741,5 63,231,2 184,274,4 292,652,5 215,651,4 292,636,3 268,175,2 0,61,3 274,945,3 392,839,4 336,1015,4 252,78,5 197,173,5 449,386,5 233,847,3 173,346,4 95,1153,5 278,615,3 155,47,4 436,1112,4 393,551,3 294,394,4 326,225,3 317,516,3 392,147,4 331,52,3 392,40,4 43,146,4 404,521,1 153,526,4 311,178,5 25,12,3 84,510,4 233,1458,3 435,657,5 124,567,5 428,218,4 388,3,4 452,97,4 270,271,3 233,750,2 237,257,3 370,78,5 302,758,1 262,187,5 415,116,5 353,51,5 21,203,5 197,1093,1 451,151,2 343,250,5 434,159,5 454,134,5 194,54,4 232,514,5 173,596,3 449,907,1 64,428,4 197,500,4 416,627,3 359,268,4 260,1236,3 415,1091,3 346,24,2 195,1117,4 38,299,3 404,1423,1 437,14,4 443,49,5 48,99,4 276,273,4 124,87,5 343,485,4 333,627,4 89,44,3 279,654,3 88,951,2 345,185,3 194,131,5 398,273,3 159,217,4 192,259,1 343,254,4 16,743,3 310,264,5 159,602,4 174,131,3 412,6,3 42,55,5 395,985,4 119,147,3 378,3,5 189,894,3 98,977,3 428,466,4 192,351,1 268,497,4 434,187,4 173,121,1 12,108,4 12,430,1 115,729,4 444,11,2 313,780,3 317,696,5 57,12,3 353,135,5 157,234,1 86,627,4 293,748,3 25,741,3 377,11,5 333,134,4 278,39,4 365,183,4 405,1266,3 254,440,2 449,264,5 231,918,3 245,175,4 360,434,5 411,633,5 448,1008,4 449,1225,4 63,310,2 215,927,3 264,590,5 29,6,4 428,409,4 63,3,3 402,257,4 444,762,2 268,596,1 416,613,3 300,178,3 233,650,4 103,330,3 405,731,4 205,308,2 166,234,3 69,624,3 3,353,5 329,160,4 94,442,3 180,840,1 416,126,4 453,632,2 388,698,5 157,153,4 404,216,1 156,267,5 101,362,2 326,1130,4 378,22,4 275,875,3 199,139,4 453,180,3 307,225,3 78,1016,3 98,405,3 291,189,5 159,0,4 295,651,4 88,814,4 298,174,5 91,1011,4 360,653,4 302,1186,4 415,958,5 226,1016,4 63,51,3 434,553,3 295,88,5 302,228,3 227,271,5 318,332,4 93,160,3 320,13,3 447,1293,1 451,494,4 266,79,4 91,801,2 336,120,5 101,719,2 278,272,2 449,182,4 309,221,3 153,478,4 82,404,5 42,731,4 267,96,4 91,409,3 42,76,3 406,475,2 233,1455,4 163,236,2 329,526,3 372,708,5 270,643,3 129,294,3 192,927,2 159,49,4 438,13,5 306,185,5 431,1046,5 415,331,4 144,738,2 428,1223,2 74,410,5 404,87,3 444,1244,1 362,90,4 435,432,5 392,830,1 172,878,5 381,638,3 335,1097,3 415,63,5 25,1007,3 379,482,4 435,1205,3 302,808,2 245,742,1 72,517,5 388,1517,2 47,70,3 333,318,3 405,163,5 266,227,5 17,526,4 313,738,5 384,941,2 245,264,4 129,1272,2 329,94,5 4,208,5 296,69,5 86,119,2 177,894,3 331,683,5 320,729,3 436,450,5 319,61,4 446,147,4 400,69,4 250,287,4 109,1187,4 313,408,4 93,1146,4 435,502,4 346,240,3 451,133,3 395,0,4 129,201,5 422,268,3 275,580,4 221,221,4 279,287,5 243,565,4 294,370,4 422,14,4 167,258,2 204,327,3 182,293,3 166,948,1 451,735,3 192,326,1 348,287,3 390,747,3 397,57,4 139,871,3 439,885,5 373,368,1 319,37,4 243,39,2 307,685,4 223,685,4 396,326,2 270,311,2 10,50,4 343,534,3 108,1022,2 198,988,1 42,949,3 180,146,1 257,689,4 449,1036,2 59,477,3 84,1130,4 61,762,1 361,293,3 325,524,5 175,269,4 80,221,2 94,559,1 20,326,3 415,173,5 396,182,4 128,287,1 353,210,2 408,202,5 118,281,5 223,1440,3 300,742,2 349,603,5 324,132,3 120,274,4 395,976,3 210,227,3 51,741,4 312,65,1 447,359,4 268,567,2 325,602,4 12,877,1 285,90,4 253,560,3 391,7,5 91,411,2 391,510,5 416,544,1 400,64,4 384,402,3 213,184,5 415,300,5 86,3,5 310,603,3 209,126,5 302,1209,1 386,457,1 353,739,4 89,32,4 143,941,4 421,378,2 25,925,2 424,280,2 129,67,5 70,153,3 406,745,4 422,347,3 373,716,3 118,1260,4 344,475,3 42,878,4 238,735,5 144,430,5 275,123,5 67,124,1 108,213,1 346,126,5 386,38,3 124,1059,4 392,53,4 376,163,4 264,933,3 221,69,3 257,288,2 114,76,2 404,317,5 298,55,4 404,669,1 220,402,4 210,68,3 312,402,3 397,64,3 384,522,4 298,169,5 231,269,3 129,720,3 91,454,2 325,47,3 436,695,3 394,63,5 223,10,3 199,392,4 55,371,3 353,583,5 452,247,4 26,324,2 286,339,5 114,268,3 378,156,4 333,448,3 362,553,1 105,955,3 193,179,3 71,37,3 193,950,3 180,277,2 48,412,1 356,976,5 407,257,3 91,208,5 347,1060,5 441,40,4 193,63,5 60,346,5 47,227,3 398,474,5 57,88,3 42,154,4 454,628,3 373,309,5 114,21,3 17,960,3 408,86,3 388,141,3 10,190,4 25,1008,2 386,202,4 235,68,5 129,563,4 243,738,3 415,265,3 192,193,4 118,1159,5 289,741,2 306,81,4 188,819,1 390,126,5 317,791,2 341,814,4 372,99,3 325,85,2 311,203,4 313,87,5 307,283,4 268,1153,3 373,167,1 166,718,1 80,146,4 93,91,4 81,512,4 421,199,5 302,267,5 321,513,4 212,99,5 147,587,4 269,180,4 229,69,4 84,94,4 252,332,2 250,251,3 15,495,5 372,558,3 441,577,2 4,390,4 300,175,4 343,294,3 48,267,3 254,233,5 150,728,4 22,49,4 159,1072,4 272,304,4 6,652,4 373,543,1 57,119,2 404,27,4 270,484,4 300,442,4 393,227,5 312,515,4 261,1047,2 325,614,4 337,434,4 159,1015,4 298,606,4 255,1089,2 144,619,3 390,506,4 59,198,5 327,6,4 50,484,1 61,13,4 300,551,3 90,96,5 391,297,1 183,741,3 379,113,3 221,119,2 404,436,1 388,150,4 221,762,3 13,284,5 362,153,4 297,98,3 92,124,1 379,464,4 286,207,4 300,119,2 140,289,1 334,287,4 373,232,3 221,402,3 434,172,5 273,470,4 432,49,5 296,117,3 449,193,5 376,1104,3 292,923,2 435,741,5 21,870,3 221,807,3 36,947,4 61,161,4 388,1285,5 447,261,4 456,181,4 234,78,4 446,233,4 453,160,4 455,174,3 5,99,5 341,159,3 396,116,3 180,689,3 333,407,4 338,0,5 424,258,1 205,312,5 378,81,4 449,901,4 416,761,3 404,191,5 95,95,4 386,54,3 61,920,2 144,795,3 257,271,5 323,8,5 338,195,4 357,58,4 124,49,5 416,1046,4 425,428,5 286,747,4 404,1585,1 302,184,5 24,274,4 23,116,4 59,1122,4 415,733,3 191,257,5 405,646,5 285,213,1 282,193,4 11,317,5 413,293,2 153,134,5 90,356,5 439,1037,5 342,99,5 266,726,4 327,622,3 432,324,2 427,342,2 59,442,4 353,497,4 342,187,4 319,155,5 409,747,2 302,824,3 63,124,2 262,21,5 173,475,4 452,245,5 124,800,3 300,21,4 415,245,4 87,885,5 450,293,5 434,184,4 286,249,3 392,312,4 183,163,3 404,1317,1 275,548,3 419,301,4 443,750,4 416,22,3 233,217,2 393,116,5 425,485,3 33,285,5 365,853,5 113,178,5 232,248,5 353,478,4 377,109,3 91,924,3 400,12,2 147,992,4 72,172,5 54,256,3 216,175,4 93,809,3 206,190,4 122,142,5 441,128,4 98,471,3 428,126,4 75,326,3 200,99,4 435,225,4 286,297,4 156,507,5 342,51,5 436,651,4 392,35,3 205,339,3 97,172,1 424,209,3 129,432,3 449,1262,4 84,249,3 365,218,5 378,269,3 379,448,3 396,656,5 360,172,5 346,720,5 304,236,2 180,985,2 446,99,5 397,151,4 159,968,1 213,339,3 326,343,4 292,746,2 393,96,4 448,281,3 41,865,4 263,670,4 438,12,3 221,214,4 247,113,5 81,1161,1 387,325,5 61,54,5 389,327,4 306,144,4 54,272,5 223,14,4 14,814,1 88,212,4 249,95,2 129,889,4 373,650,4 253,1132,3 317,141,4 185,595,4 95,78,4 310,450,3 449,63,4 451,434,3 98,272,5 307,491,3 89,513,3 64,215,4 392,804,2 449,1035,2 366,378,4 384,256,3 149,457,4 253,678,2 329,72,5 379,630,4 273,163,5 408,324,4 237,297,5 267,1064,4 96,356,5 220,75,4 312,217,2 9,143,4 449,941,5 108,379,5 267,526,4 353,130,3 311,519,5 144,237,4 252,97,5 353,691,2 457,1100,4 269,257,3 428,1027,3 380,132,5 11,281,5 12,677,3 391,871,4 51,474,4 197,659,4 261,735,3 341,1162,3 441,571,3 313,110,4 278,830,5 287,317,4 255,120,5 400,472,1 302,947,2 279,1180,2 98,337,4 193,94,3 346,97,5 95,197,5 404,940,1 163,1024,4 91,726,4 380,484,4 40,285,4 302,285,5 292,21,3 310,317,5 57,193,3 337,479,5 116,297,5 406,162,3 15,811,2 91,724,3 273,180,5 128,326,3 415,229,4 307,674,4 449,466,4 446,67,5 389,288,3 238,339,5 219,267,4 338,410,2 408,167,5 275,153,4 266,221,4 12,209,3 450,299,4 429,128,5 436,1152,5 255,747,4 302,506,5 400,161,5 436,117,2 415,53,5 294,190,5 124,1248,3 180,328,1 218,214,5 312,225,4 382,482,5 405,237,2 221,247,4 181,122,4 183,253,2 294,93,4 255,830,4 324,344,3 159,863,1 127,53,2 436,498,5 326,418,4 115,992,2 266,479,4 377,664,2 12,523,4 47,424,3 404,564,2 306,461,4 257,331,5 180,762,1 4,378,3 415,286,4 129,683,5 183,941,3 45,293,2 377,327,3 377,432,4 406,417,4 42,236,4 245,1231,1 397,392,5 449,1211,4 354,871,4 396,321,1 398,272,3 342,728,3 457,143,4 455,714,3 107,470,2 404,227,1 93,22,5 295,268,5 150,545,2 101,1238,2 313,245,5 294,141,4 344,234,3 69,192,4 62,1136,5 324,482,5 43,244,4 267,553,3 143,194,5 307,691,3 362,824,4 333,839,4 422,897,4 435,844,5 27,799,4 81,518,4 20,451,4 450,871,2 405,590,3 405,712,4 12,10,1 345,175,4 150,1108,4 405,489,3 310,479,4 75,88,4 96,201,5 0,187,3 69,185,4 411,479,4 311,712,5 398,1458,3 91,1,3 320,19,3 295,434,5 362,1073,2 267,1078,3 327,202,5 42,70,4 289,204,3 416,768,1 266,1470,2 344,1046,4 56,297,3 392,20,3 456,703,4 304,182,4 36,264,4 209,209,5 454,292,4 48,178,5 84,207,5 192,721,3 220,108,2 58,565,4 6,552,3 434,695,3 183,496,4 214,178,4 153,413,4 425,429,3 363,268,4 249,116,3 456,635,4 392,69,3 242,1280,5 344,271,5 428,25,3 221,110,3 43,175,5 223,402,4 343,300,4 449,96,4 400,660,3 300,718,4 441,37,3 393,978,5 12,841,2 41,275,1 453,486,4 421,476,4 449,761,3 378,51,4 453,483,3 378,176,4 17,920,5 346,150,3 311,608,3 406,948,3 147,203,3 326,8,5 219,318,4 316,322,2 404,540,1 34,331,4 48,218,1 100,1046,2 158,411,3 380,174,5 302,1046,2 384,86,3 55,401,5 333,1136,4 266,449,2 227,689,5 197,279,3 285,1503,4 57,31,5 344,581,5 207,185,4 5,479,4 423,8,5 245,100,2 400,224,1 298,277,3 225,249,4 0,101,2 346,222,4 425,181,2 143,654,5 369,603,4 395,321,4 456,442,4 58,510,5 81,264,4 197,454,3 127,401,1 398,399,3 404,513,1 371,285,5 449,142,5 329,69,4 335,157,3 298,392,2 182,269,3 446,878,3 406,61,3 6,514,3 82,390,2 0,68,3 321,181,5 319,49,4 384,188,5 443,905,4 350,285,5 359,422,4 342,545,1 20,930,2 129,553,4 260,303,3 88,149,5 93,7,5 449,582,4 347,923,4 366,199,4 380,48,2 404,622,1 400,500,2 217,99,4 377,171,4 233,287,3 401,47,5 331,53,4 344,125,5 157,123,4 428,463,3 392,1042,3 441,635,5 386,267,3 378,151,5 415,253,2 124,478,4 81,173,5 378,602,5 285,471,3 293,405,2 449,479,4 118,981,4 404,215,2 63,156,4 312,413,3 358,245,3 362,402,3 61,46,4 309,180,4 91,366,3 275,627,4 405,510,5 89,154,5 269,252,5 180,924,2 176,58,4 120,180,5 214,353,4 293,1131,4 425,525,4 175,302,3 404,86,1 392,222,4 406,181,4 327,221,3 461,681,5 268,99,5 449,845,3 326,1099,4 382,196,5 338,264,3 390,299,2 89,546,3 212,317,5 319,183,5 353,604,3 151,156,5 441,28,3 377,635,3 109,789,4 406,146,4 326,194,4 362,100,1 129,330,3 456,774,3 285,392,4 415,606,5 28,293,4 187,863,2 386,1198,5 270,37,2 381,716,3 29,1012,3 333,511,4 221,269,2 386,23,5 458,933,3 91,954,4 275,261,4 95,175,4 6,490,5 275,651,4 13,17,3 253,135,4 451,190,5 209,419,4 406,264,3 227,97,3 81,201,4 449,560,4 234,493,4 266,215,4 452,696,4 408,132,4 101,101,3 258,316,5 434,575,3 62,1008,4 304,1103,4 376,312,5 298,1213,2 6,117,2 422,299,3 270,30,4 172,267,4 278,1107,1 397,175,4 62,254,4 37,525,1 58,226,3 316,350,3 91,72,3 391,631,5 424,1313,3 387,689,5 38,346,4 366,566,4 416,199,4 435,1521,2 408,603,4 456,191,5 6,536,3 91,475,2 42,301,4 416,130,4 93,430,4 373,209,4 246,27,5 300,1011,4 298,474,4 72,270,2 9,697,4 426,318,3 448,1193,4 391,846,4 392,322,2 193,234,2 433,405,3 220,32,4 15,6,5 415,935,5 453,86,4 242,136,3 276,284,4 362,1051,3 383,346,4 294,647,4 206,180,3 267,301,5 115,913,2 380,458,4 449,486,4 243,627,4 435,156,5 183,994,3 415,21,5 325,233,3 454,754,3 456,707,4 302,482,5 99,1237,2 396,478,4 267,581,5 302,939,2 129,217,5 6,52,5 10,11,2 296,221,4 58,706,3 457,647,4 101,349,3 323,306,5 274,225,3 84,178,4 55,63,5 193,69,3 349,192,4 118,321,4 222,331,4 455,942,4 453,1298,2 233,588,3 428,228,2 377,1106,3 459,9,3 214,226,5 128,244,2 424,321,3 31,256,4 82,1164,2 144,121,1 59,841,4 396,704,5 22,356,3 115,298,3 454,281,3 206,78,4 317,68,5 296,46,2 12,257,4 14,299,4 437,49,5 376,55,4 411,192,4 335,1229,2 400,484,4 335,925,1 101,576,3 116,1013,3 290,283,4 9,473,4 109,549,3 269,76,2 388,704,5 89,1019,5 86,89,2 12,378,1 289,514,3 285,166,5 90,525,4 383,747,4 312,541,3 424,272,4 392,201,3 74,117,3 42,50,1 207,69,3 119,124,4 294,495,5 331,173,5 185,94,3 199,738,4 369,152,2 379,299,3 19,86,5 393,422,5 451,640,3 136,596,5 240,301,3 177,38,2 203,873,3 130,284,5 58,172,5 159,411,3 43,160,4 27,257,5 435,545,3 275,197,5 274,434,3 109,422,4 455,293,1 456,365,4 405,23,3 449,1,4 233,433,3 22,108,3 20,319,3 0,169,5 435,65,5 384,1020,5 221,824,3 390,221,2 461,894,4 267,287,4 20,705,2 206,526,4 290,287,5 59,523,4 437,8,4 296,68,3 460,49,3 446,251,3 91,81,2 27,442,4 416,1549,3 203,302,5 200,657,3 102,251,2 320,189,4 21,650,4 278,406,4 12,546,1 197,197,4 436,422,5 403,327,4 451,60,1 408,526,4 4,380,1 449,236,5 243,855,5 307,481,5 364,271,4 71,6,1 177,259,1 221,1217,1 312,390,3 127,195,5 321,187,3 404,1065,1 311,371,3 420,237,5 312,22,4 167,8,1 214,175,5 167,929,3 58,755,2 373,465,5 243,958,4 384,485,2 424,447,2 199,418,4 17,662,4 58,63,5 287,345,5 176,22,5 6,98,5 144,687,4 400,153,1 188,88,5 366,233,4 118,754,1 278,155,4 323,321,4 449,38,4 245,830,1 78,1021,5 173,722,5 254,321,2 108,66,5 198,507,4 208,332,2 313,1472,4 189,538,2 150,208,4 108,172,5 91,499,4 434,342,5 262,495,5 89,608,5 456,499,5 233,1063,4 328,281,3 144,731,4 3,270,4 89,150,2 193,938,3 263,239,4 230,288,4 315,506,3 415,1011,4 82,596,2 377,958,3 93,6,4 297,510,4 91,55,5 169,287,3 434,433,2 345,570,3 188,197,4 310,583,3 275,733,1 221,317,5 416,249,4 334,332,4 458,595,3 94,189,4 12,756,3 116,178,5 408,172,3 244,20,3 246,268,4 436,726,3 229,140,4 212,198,5 115,871,3 406,178,3 298,605,4 12,233,5 330,474,3 459,326,4 17,403,4 430,878,3 345,146,4 392,469,4 94,596,3 58,669,4 388,509,3 160,308,2 434,409,5 415,212,5 312,661,3 21,664,1 275,324,3 404,422,5 69,174,3 320,44,4 362,43,3 209,925,2 458,193,3 47,646,4 250,1011,4 384,1096,5 223,365,3 89,514,5 334,257,1 326,1018,3 267,1475,2 124,473,3 397,662,2 193,99,4 63,299,3 296,52,3 449,777,3 416,76,3 0,37,3 198,0,1 248,249,4 307,212,4 315,190,5 302,1,3 346,272,5 14,925,1 133,315,4 268,1016,5 123,97,4 378,178,5 398,968,3 138,1175,4 456,242,2 221,232,2 315,275,2 95,184,5 333,944,4 136,120,5 333,530,5 338,55,5 0,8,5 214,22,3 404,1109,1 335,40,3 453,478,4 158,917,4 450,359,3 6,564,4 404,462,1 143,172,5 268,501,3 254,244,1 345,131,4 386,183,3 193,1044,2 398,92,3 49,287,4 237,124,3 321,149,4 353,791,4 384,454,4 404,226,1 206,1332,3 311,511,3 89,215,5 58,1004,5 249,49,5 248,297,4 213,526,4 428,1544,2 36,67,5 179,728,5 404,196,4 249,686,1 373,119,3 42,210,4 391,267,5 456,526,5 363,874,3 404,392,4 449,814,3 143,212,4 245,757,1 345,549,4 72,58,5 343,72,3 386,1536,4 362,332,1 267,283,3 466,1016,2 270,106,1 232,508,4 148,261,1 42,277,3 129,342,4 303,293,4 437,320,5 8,275,4 396,301,5 72,134,5 329,117,5 456,1029,2 255,97,5 448,223,4 425,526,3 104,323,4 467,925,2 12,88,4 83,244,4 17,761,3 327,447,3 278,473,5 180,136,2 144,258,3 101,666,3 212,213,5 105,1027,3 449,707,4 415,965,5 307,22,5 89,964,5 206,14,4 81,96,4 346,596,3 61,709,3 90,210,2 118,51,3 221,182,4 354,357,4 98,814,2 379,572,1 159,404,3 272,314,4 115,192,4 298,288,3 129,304,4 436,165,4 421,0,3 451,26,5 63,404,3 327,594,3 93,524,5 69,404,3 456,163,4 379,528,3 422,354,3 430,322,3 61,8,4 89,616,4 338,268,5 120,791,3 457,190,5 266,565,3 180,293,2 144,770,2 178,892,2 89,492,5 222,619,2 314,460,4 362,257,3 10,749,5 292,548,3 76,132,2 449,399,3 9,81,4 108,1059,4 465,307,1 338,88,5 279,76,3 11,169,4 91,207,4 343,356,5 425,198,5 232,500,3 392,315,5 415,1477,2 101,326,2 425,650,4 89,51,5 438,241,5 327,176,3 392,98,3 404,1390,1 404,1167,1 255,582,5 89,126,4 16,116,3 359,9,5 434,728,2 271,78,5 94,531,4 73,353,3 322,237,4 436,4,2 362,556,1 388,525,3 436,213,4 9,509,5 326,671,2 248,113,5 311,1166,4 6,464,4 436,49,5 421,99,4 158,272,5 156,475,1 63,167,5 232,602,4 27,10,4 444,290,2 446,299,4 223,285,3 400,247,3 173,157,2 82,63,5 344,380,4 58,844,5 354,241,4 392,160,4 377,767,4 275,97,5 403,341,3 258,178,4 168,210,5 300,606,4 31,507,4 404,948,5 380,88,5 173,432,5 366,768,3 321,63,5 78,581,5 434,215,3 311,70,4 185,242,2 374,524,4 429,252,1 411,507,4 398,227,2 372,1112,1 93,1117,4 1,276,4 377,723,3 5,535,4 267,390,3 467,460,4 157,19,4 378,202,4 3,299,5 206,840,3 431,249,1 185,1015,5 193,490,3 80,0,4 147,431,5 415,201,4 37,111,5 194,46,5 342,462,4 377,230,3 398,363,4 425,847,4 455,447,3 298,203,4 199,57,4 256,323,5 398,66,3 74,412,2 48,1065,2 313,1228,2 436,657,4 388,964,5 12,22,5 10,167,3 114,236,2 344,195,5 36,88,4 331,105,4 275,257,5 421,357,2 293,1078,2 81,237,3 429,236,5 296,446,4 17,402,3 86,404,4 200,222,4 346,171,5 54,596,2 307,30,3 372,214,4 319,94,3 436,282,1 325,88,4 311,652,5 327,332,3 329,66,4 144,11,5 94,814,3 322,203,3 157,429,5 88,268,5 175,346,4 12,434,5 143,67,2 138,295,4 181,256,3 6,583,4 81,587,5 93,317,5 64,377,5 300,473,4 312,767,3 344,1220,3 307,494,3 291,19,2 292,25,3 459,256,2 48,217,2 436,484,4 245,249,4 401,244,1 342,357,1 415,167,5 441,590,3 348,19,5 333,110,3 242,13,3 266,28,3 457,317,4 108,1221,4 101,229,2 415,84,3 193,484,3 268,927,1 0,245,5 405,21,3 180,470,2 268,424,5 200,772,4 410,450,4 327,382,3 332,299,4 441,21,2 196,187,3 42,87,5 456,719,3 53,828,2 36,549,4 209,410,3 10,369,3 150,626,2 211,381,5 346,232,5 379,182,4 329,369,4 283,300,5 467,641,3 291,213,3 267,9,4 298,472,3 373,843,4 58,82,4 252,272,3 116,742,1 214,269,3 222,928,3 451,505,3 90,519,4 366,52,4 300,41,4 405,126,4 245,37,2 380,606,4 267,709,3 382,301,4 412,325,3 346,67,5 458,1059,1 338,44,5 436,64,4 338,178,5 17,844,3 355,325,4 173,1073,4 285,767,3 263,203,5 254,828,1 233,418,4 298,287,3 183,656,4 238,526,5 458,215,3 267,46,1 372,194,4 256,1471,2 326,146,2 16,149,5 25,412,2 1,281,4 140,755,3 270,494,5 416,430,4 203,241,5 264,236,5 143,23,4 362,443,4 21,186,5 245,443,4 12,549,4 379,613,3 193,641,2 93,153,5 41,0,5 37,199,5 276,236,4 296,270,2 342,174,5 261,973,3 329,1083,5 223,1057,3 429,546,2 214,379,3 12,537,1 37,68,5 333,173,4 449,494,4 298,344,4 232,70,5 261,1034,3 221,449,3 56,110,4 405,186,2 384,110,2 372,948,4 311,185,3 323,871,5 64,955,4 249,500,5 434,464,2 307,647,4 471,400,4 215,121,5 416,138,3 434,928,2 346,270,1 372,413,3 238,317,1 405,1007,4 159,843,3 338,636,4 144,553,3 327,272,3 347,830,4 415,500,5 333,1015,3 279,157,2 373,253,3 386,402,3 268,706,2 404,1579,1 257,332,2 455,661,4 86,131,5 112,267,4 452,355,2 342,10,5 150,135,4 4,98,3 150,1285,5 149,150,4 88,874,3 298,184,3 220,567,4 218,268,5 406,431,4 221,635,4 188,130,4 238,90,4 464,201,4 42,282,2 325,612,5 457,386,4 377,294,3 159,639,3 10,69,4 471,10,5 251,13,4 124,363,3 424,323,3 199,98,5 72,649,3 449,381,3 348,119,3 144,149,5 12,668,1 404,1267,1 115,1012,3 276,843,4 173,721,4 362,567,2 105,209,4 12,474,3 406,143,3 419,136,4 306,209,2 69,180,4 275,450,3 404,998,1 345,91,4 12,888,3 5,20,3 458,133,3 342,168,5 86,790,2 129,248,5 17,5,5 233,744,4 180,930,1 455,209,3 75,768,1 378,126,5 378,516,4 405,68,4 345,301,3 431,117,4 375,196,4 150,461,4 449,517,4 218,381,5 279,1065,4 405,824,4 116,976,3 436,500,4 460,301,3 386,113,5 143,523,5 248,1038,5 404,307,1 14,408,3 288,150,2 63,846,3 157,796,3 333,270,3 297,470,4 462,1131,1 112,322,4 382,236,4 93,10,5 338,460,5 255,88,5 398,412,2 378,527,5 401,510,5 222,294,3 89,888,3 88,12,2 41,386,3 271,210,5 345,393,4 175,110,4 449,109,4 449,1202,3 158,594,5 173,845,5 86,215,5 269,182,5 84,189,4 281,688,2 373,290,3 341,110,3 285,380,5 338,446,4 313,945,5 471,192,5 243,470,1 223,27,4 357,917,1 233,22,4 268,147,1 59,592,5 36,26,4 392,316,4 116,32,4 199,187,4 231,208,3 129,1045,4 109,314,4 109,172,1 398,745,5 320,1330,4 180,1362,1 356,0,5 405,182,5 321,233,4 289,1078,2 429,233,4 158,23,5 427,749,5 285,1279,5 193,402,2 144,241,5 456,57,4 368,167,3 453,143,4 416,1027,3 93,738,2 41,370,4 389,514,4 10,273,3 285,124,4 199,678,4 397,166,3 221,406,2 434,596,3 325,169,2 118,590,4 409,312,5 220,63,5 441,833,2 114,595,1 434,203,3 17,377,3 448,211,5 192,245,3 388,46,4 428,207,4 310,201,4 471,876,3 377,400,4 129,1038,4 98,27,3 144,819,2 271,745,3 61,249,5 451,275,1 380,630,4 372,420,4 95,186,5 349,203,4 424,176,3 25,0,3 451,200,1 398,40,2 89,161,5 29,241,5 421,269,3 285,185,5 379,30,1 63,80,4 180,990,1 40,473,5 364,761,4 232,583,4 279,117,2 91,455,2 41,1046,4 451,88,5 153,473,5 41,426,4 120,123,5 290,1072,5 392,841,4 143,1137,4 101,777,3 222,475,3 285,46,4 347,411,2 187,567,4 345,21,5 97,628,5 161,1010,4 84,384,3 458,357,2 91,166,3 107,303,3 326,131,5 308,330,5 22,94,4 42,741,5 449,709,3 233,1202,4 107,717,4 342,65,3 304,962,4 313,761,4 318,339,3 344,1052,3 436,709,4 224,1202,5 400,70,2 471,257,5 311,632,5 193,30,3 449,659,4 63,208,5 64,64,3 58,962,5 446,8,2 10,422,5 255,37,4 84,194,3 470,94,4 406,1117,4 382,426,5 213,751,2 144,208,4 388,474,5 462,454,3 404,1383,1 176,153,4 455,273,3 173,387,1 223,279,4 243,409,4 222,1290,3 233,21,4 392,496,4 314,381,4 449,151,5 405,6,4 413,677,1 333,212,4 157,172,5 396,287,4 456,384,4 177,122,4 473,7,5 471,167,5 416,421,3 415,548,4 136,180,5 229,70,5 294,719,4 215,474,5 432,681,2 215,187,5 279,698,4 233,1199,3 23,126,5 280,287,4 44,822,4 12,50,3 333,499,3 470,139,5 270,192,5 431,870,2 302,155,5 435,786,5 326,0,4 306,285,3 289,138,2 93,273,4 235,658,3 404,52,2 294,385,4 148,325,3 449,303,4 350,750,4 410,769,4 334,312,3 415,9,3 323,287,5 150,482,5 333,346,3 268,468,4 94,414,3 335,789,2 94,1230,1 130,123,5 174,99,2 471,448,5 335,400,1 108,178,4 406,381,3 308,1024,5 465,353,2 135,285,5 293,121,3 377,173,4 341,726,3 449,519,5 409,288,1 6,498,4 168,212,5 137,12,4 404,752,1 449,603,4 397,992,3 326,505,3 221,622,2 404,692,2 302,871,3 446,205,4 65,23,3 435,214,4 333,428,4 292,581,4 118,596,4 270,129,1 104,287,4 404,1529,1 41,289,3 428,635,3 344,567,4 330,222,4 206,659,4 254,199,3 86,320,2 173,741,4 193,153,3 12,328,2 144,406,2 200,52,3 336,221,5 333,238,3 464,113,4 0,21,4 362,289,3 434,1038,4 222,1008,1 118,49,5 366,405,4 9,215,4 307,596,3 188,422,5 275,152,4 302,320,3 151,1053,1 454,737,3 307,711,4 455,186,4 97,522,5 415,105,3 4,20,3 0,20,1 406,509,4 452,1156,2 313,277,5 402,545,3 226,321,3 109,1230,2 43,945,3 390,677,2 290,684,5 124,747,3 393,382,2 296,16,3 149,323,4 62,928,3 12,803,2 449,641,4 473,601,3 214,228,3 74,300,4 369,293,1 166,1307,1 177,1,4 176,420,3 287,21,5 144,1287,4 268,1147,4 458,356,4 273,70,4 176,342,3 373,134,4 455,1247,3 268,1478,2 158,831,3 459,743,3 82,1,4 372,526,4 441,183,2 193,455,1 12,3,5 86,229,5 435,97,4 411,525,4 6,616,5 137,434,5 144,1046,3 404,461,2 326,87,2 406,55,5 41,14,4 84,653,4 416,448,3 20,550,3 6,218,1 167,124,4 89,302,4 109,95,4 275,379,3 405,514,2 428,1284,3 436,662,5 263,191,4 143,19,4 294,237,4 144,672,4 333,130,4 385,1015,4 345,571,5 388,567,3 279,1132,3 454,723,3 221,539,3 23,248,4 278,593,1 443,915,3 214,522,4 192,186,4 317,1119,3 199,0,5 446,759,4 243,450,4 143,469,2 58,50,5 294,419,4 22,407,5 373,70,5 316,312,4 366,560,4 197,186,4 187,1262,3 266,32,5 200,447,3 197,978,5 150,97,4 341,662,4 406,415,3 98,63,5 404,1118,3 406,370,2 127,621,4 177,196,2 404,76,1 471,825,3 298,263,2 456,155,5 107,221,2 393,560,4 94,519,4 290,824,4 302,952,3 449,493,3 221,88,5 362,427,5 396,94,4 20,122,4 183,735,3 323,254,4 74,224,2 388,603,4 129,131,5 275,196,5 63,312,4 26,49,3 455,978,3 339,587,5 134,172,4 124,210,3 402,924,4 289,14,4 165,750,4 416,691,4 422,332,3 20,766,1 58,918,4 351,272,2 362,136,5 287,179,5 404,945,2 405,632,5 234,193,5 339,485,4 144,557,2 108,230,3 180,1338,1 416,1013,4 10,432,4 425,427,2 116,747,3 311,88,5 415,257,5 409,537,3 304,95,3 94,126,4 382,638,4 451,120,5 294,964,4 382,134,5 235,503,3 372,154,4 84,149,3 405,691,3 224,565,4 464,215,3 304,627,4 264,1196,2 159,168,4 307,234,3 105,273,3 468,512,5 245,209,3 304,11,5 341,122,5 232,505,5 299,455,4 25,299,4 199,514,5 310,184,2 10,748,5 4,426,3 328,196,4 104,269,5 328,704,3 0,178,3 297,182,3 459,274,3 402,147,5 424,677,1 20,452,2 359,1196,3 463,288,4 55,150,4 459,19,4 377,48,3 323,122,4 109,68,4 319,91,5 333,429,4 365,558,5 330,1140,3 238,483,5 58,434,5 196,231,4 329,251,4 404,174,1 333,581,5 441,229,3 311,862,5 229,620,2 434,586,3 89,631,5 58,611,3 415,927,3 449,210,5 312,623,4 9,210,5 341,247,3 222,94,5 248,1011,3 382,271,3 468,854,4 89,473,5 465,180,4 127,192,3 287,222,3 197,172,4 346,402,5 279,545,4 384,615,4 124,727,3 202,474,3 48,90,5 346,819,2 318,306,4 9,530,5 421,447,4 310,305,4 405,29,4 379,160,2 243,31,2 378,8,4 405,734,3 415,287,5 326,958,5 200,187,4 471,1010,4 254,830,4 274,293,4 436,478,5 212,501,5 270,238,3 325,135,4 338,674,4 62,257,3 221,771,2 91,121,3 242,6,3 144,451,3 344,116,4 302,221,3 473,409,2 274,49,4 20,262,1 36,126,4 298,18,1 449,846,4 37,680,5 424,689,1 103,1011,4 302,677,1 353,6,4 98,341,1 453,704,3 125,271,3 378,186,5 310,215,5 384,602,5 404,668,1 294,728,4 58,403,3 268,516,4 223,555,1 456,284,5 475,714,4 424,52,4 221,768,2 90,567,2 326,507,2 61,187,3 397,3,2 140,281,5 388,63,4 418,487,5 342,426,5 124,55,1 180,234,1 379,133,3 311,275,4 342,173,5 384,1523,5 456,757,2 12,95,4 398,683,3 388,711,3 343,6,4 406,312,4 394,337,4 428,746,3 417,343,1 372,659,4 377,1522,2 306,82,5 233,284,4 388,49,5 457,529,4 434,170,5 59,617,3 86,788,3 327,619,3 401,221,4 390,509,5 266,943,3 407,299,3 471,232,4 402,180,4 270,755,2 6,215,4 245,24,3 138,739,2 388,557,4 189,353,4 256,268,3 408,428,5 401,31,3 200,434,4 451,185,1 406,172,5 411,116,4 91,152,4 338,402,3 415,651,4 325,207,3 201,257,4 12,837,1 473,479,5 245,419,3 121,672,3 275,590,3 220,384,4 74,234,4 68,122,4 217,172,3 185,78,5 444,120,1 298,691,4 179,777,2 41,468,4 12,558,1 232,482,5 78,305,5 444,245,1 93,380,4 379,413,2 449,558,3 416,549,3 137,186,5 140,334,1 183,220,5 87,307,4 473,67,3 404,141,1 404,558,5 68,234,3 384,80,3 232,827,4 130,275,5 293,1087,1 115,345,4 475,791,4 477,142,5 428,281,3 267,194,4 372,777,5 327,747,3 410,185,5 406,68,4 302,719,2 357,381,2 285,410,2 377,777,3 215,220,4 473,735,3 327,325,4 401,95,5 264,322,3 278,1484,4 200,75,4 398,545,2 21,52,3 326,873,3 472,272,5 158,587,2 175,245,5 175,342,2 290,8,5 17,653,4 243,91,4 420,656,4 177,201,5 338,90,5 267,240,3 453,285,3 117,179,5 277,881,3 41,678,2 312,683,4 39,268,1 201,422,3 457,177,4 473,1013,3 449,1445,4 118,812,4 462,249,4 278,228,4 422,1010,3 465,187,3 473,527,5 258,184,4 48,1079,4 199,494,3 25,315,3 320,60,5 462,949,3 156,254,3 4,450,1 248,282,5 261,6,4 452,72,4 48,560,2 290,419,4 321,512,4 441,872,2 9,356,5 302,197,4 55,780,4 292,289,2 290,411,3 150,938,4 462,224,3 302,323,3 449,203,4 215,24,3 404,653,2 23,299,4 21,514,5 362,430,2 22,628,4 465,264,3 103,1114,4 312,7,3 446,179,5 114,6,5 61,865,2 393,287,4 129,657,5 90,494,4 367,88,4 144,312,4 375,662,3 183,209,4 5,199,3 100,110,2 90,613,4 463,677,3 346,117,4 304,903,4 454,55,5 6,555,3 377,558,4 278,366,3 243,85,4 390,88,3 341,208,5 390,199,5 265,267,4 457,318,4 275,85,3 471,930,2 319,163,4 310,322,3 9,161,4 323,878,4 285,596,3 275,395,4 395,594,3 266,127,5 317,404,2 453,50,2 460,258,2 42,384,5 398,139,4 69,495,4 449,903,5 25,128,4 24,140,4 315,198,3 320,86,3 415,1520,3 405,24,1 473,175,5 427,312,5 449,1091,3 9,160,4 307,208,4 388,483,5 197,958,3 435,1060,3 456,824,5 434,380,4 12,236,5 225,404,4 423,293,5 455,707,4 326,404,2 185,305,4 325,173,4 333,876,3 453,314,4 454,19,3 157,658,5 238,442,5 385,280,3 434,381,3 376,357,3 307,407,5 360,780,2 266,188,4 416,175,5 132,307,4 267,653,5 372,285,3 59,365,4 396,895,4 388,409,3 174,63,5 268,392,1 445,268,4 471,688,4 453,309,4 10,658,5 115,878,2 473,517,4 148,337,2 188,43,4 341,1013,1 12,769,4 43,142,4 292,490,4 331,181,5 302,222,4 322,474,3 290,447,5 117,287,5 428,414,3 406,1187,2 437,268,4 36,91,4 373,1013,1 44,224,4 404,792,1 290,760,3 194,557,3 177,299,5 226,123,4 91,116,4 453,173,4 314,177,4 415,722,4 176,160,3 211,526,5 410,87,3 397,162,3 417,330,3 113,88,5 174,660,4 4,429,5 362,630,1 396,337,4 449,280,4 80,99,3 363,987,2 456,545,2 118,293,1 360,65,4 55,482,4 43,0,4 98,91,4 268,487,4 404,1478,1 424,221,5 444,49,2 405,41,5 332,293,3 358,407,5 344,147,3 415,559,3 467,38,3 58,55,5 344,1095,3 434,183,5 345,109,2 10,706,5 101,200,2 406,1,4 57,495,2 41,728,3 269,583,5 245,40,2 15,947,3 448,292,4 350,287,3 449,525,4 200,378,3 173,237,5 397,434,5 261,384,2 446,90,4 392,539,3 370,233,5 89,720,3 435,536,4 387,677,4 362,1511,1 307,185,4 306,379,3 279,1296,4 195,69,3 89,192,4 17,64,5 434,312,5 82,579,4 221,228,3 453,163,3 450,242,4 255,224,4 362,442,4 86,37,5 98,750,4 253,1469,2 294,180,4 392,570,3 462,23,3 353,957,4 313,594,3 193,375,2 477,121,2 72,268,4 0,186,4 454,333,3 59,1120,3 394,236,4 426,262,5 1,110,4 415,479,5 71,317,5 188,498,4 221,62,3 275,824,3 31,221,3 81,110,4 346,69,2 111,300,3 129,449,2 451,68,5 307,47,4 200,287,4 272,315,4 462,346,1 100,180,4 386,462,4 434,615,2 270,195,4 454,530,3 393,71,4 64,392,4 455,21,4 270,86,3 391,98,5 101,337,2 377,90,3 268,536,5 473,734,4 462,241,2 420,88,5 417,301,2 158,259,2 258,146,4 300,823,3 269,49,5 377,175,4 42,289,4 350,325,5 373,99,5 373,23,3 345,272,4 234,190,4 17,78,4 259,747,4 248,174,4 278,531,1 94,97,4 465,127,2 353,418,4 278,273,3 255,0,5 356,1046,4 146,312,4 353,637,4 423,13,4 197,692,3 390,11,5 207,96,4 446,287,4 469,474,4 436,161,4 245,392,3 14,307,5 392,1406,3 159,181,5 127,171,3 252,327,4 161,507,5 369,321,3 12,562,1 456,44,5 396,173,5 221,474,4 231,75,3 420,465,4 386,472,4 475,1036,1 249,330,3 304,581,4 462,592,1 71,146,5 84,528,3 434,81,5 411,486,3 151,233,4 58,204,3 353,512,5 310,22,3 300,72,4 122,8,5 453,874,1 295,190,5 271,513,5 394,312,3 12,477,4 436,682,2 178,320,1 253,93,3 214,14,3 434,51,5 69,149,3 459,296,3 473,660,4 353,706,4 275,950,3 98,828,4 177,218,4 479,189,5 164,499,3 433,368,4 266,107,4 359,22,5 404,188,1 118,709,4 12,871,3 37,741,5 20,16,4 150,51,5 440,24,3 452,565,3 376,267,3 326,558,2 180,680,1 392,844,4 55,402,4 5,493,4 473,51,4 177,175,4 444,885,3 10,227,3 182,228,3 346,830,1 170,261,4 283,305,4 83,97,4 254,99,3 6,96,5 64,364,3 416,483,4 69,402,4 471,976,3 249,270,4 218,615,5 449,138,5 416,1118,3 157,797,4 91,703,3 398,596,3 248,52,4 262,299,3 115,1225,2 383,988,4 98,274,1 266,64,4 94,891,3 369,513,4 231,149,3 151,782,4 304,199,3 14,302,3 372,714,2 41,1041,3 341,132,4 22,174,5 225,24,4 384,135,3 74,1027,4 378,99,5 215,1217,3 428,152,4 43,590,4 147,68,5 63,31,1 30,339,3 342,136,4 157,569,3 457,495,3 93,628,4 478,679,3 326,54,4 220,469,3 402,6,5 150,68,4 425,605,5 229,417,5 302,89,4 180,812,2 268,820,1 449,199,3 275,1078,2 94,446,2 48,1027,2 144,634,4 331,1314,2 150,475,3 6,134,5 451,501,2 416,764,3 269,25,5 326,9,4 263,366,4 436,190,4 94,139,3 418,305,5 231,288,4 312,473,5 199,840,3 353,515,5 372,1077,3 91,825,2 156,1050,4 356,743,5 428,31,4 351,88,5 99,878,4 473,1027,1 424,287,5 444,149,2 96,188,4 48,927,2 428,568,2 292,577,2 405,588,5 428,1013,3 404,573,1 415,928,4 101,992,2 117,1078,4 453,1453,2 421,49,4 109,54,3 275,1082,3 379,735,4 457,173,3 462,20,1 129,554,4 431,404,4 253,497,4 408,275,4 42,1053,3 456,78,5 436,97,5 393,38,4 435,714,4 21,84,5 328,891,2 452,230,2 449,366,3 325,182,5 192,365,4 471,939,4 397,181,4 63,142,4 380,655,4 58,200,4 364,314,4 384,184,5 456,678,4 144,1286,2 380,78,3 200,233,5 167,120,4 391,198,5 415,216,4 416,168,3 327,1111,4 379,415,2 390,63,5 41,442,3 1,257,3 416,357,2 341,14,3 101,670,3 342,558,3 291,152,4 439,1072,4 199,279,4 411,287,4 388,400,3 333,968,4 298,812,4 343,763,1 214,133,4 365,97,5 123,208,3 55,422,5 453,944,3 177,1314,4 428,805,2 114,120,3 263,773,2 343,814,2 86,166,4 274,209,4 53,271,5 279,30,4 477,317,5 391,199,3 463,268,5 172,330,4 449,552,2 379,1403,2 456,948,3 350,326,5 464,49,4 221,23,3 192,72,3 429,743,3 278,808,3 291,6,3 428,793,3 451,608,4 400,279,2 282,434,5 0,134,4 478,227,4 456,52,4 200,961,4 248,272,4 204,299,3 384,1285,3 380,672,3 285,393,5 179,657,5 298,727,2 255,619,3 306,229,5 428,567,3 83,14,4 451,466,3 424,490,2 117,674,5 203,299,3 455,567,2 320,528,4 42,495,5 300,236,4 343,971,4 394,209,5 402,122,3 319,565,3 140,1243,3 362,698,2 377,279,2 404,399,1 379,57,2 233,629,2 415,495,5 415,1088,2 405,786,3 483,234,2 294,1472,4 289,229,4 329,356,4 342,116,2 143,846,4 253,101,3 126,749,1 453,506,3 238,161,5 245,137,1 325,704,3 93,509,5 483,549,4 439,1503,4 408,488,5 200,949,3 239,271,5 415,929,3 267,226,4 20,562,2 273,845,2 59,96,3 58,8,4 245,651,5 267,479,5 300,93,4 171,611,3 485,845,2 12,215,3 404,301,4 455,962,4 186,734,4 377,194,3 404,777,1 188,195,5 267,406,1 467,1007,4 317,204,3 47,482,5 298,345,3 400,143,5 441,545,3 139,293,3 289,682,2 235,1400,3 86,99,5 356,279,5 421,180,4 192,411,3 415,1006,5 197,402,4 483,69,5 388,629,3 467,208,5 93,418,3 406,398,3 40,195,3 291,23,4 124,209,5 386,3,3 460,681,1 229,500,3 249,843,4 397,1125,4 82,65,4 450,261,1 386,356,5 137,517,4 59,660,4 444,1186,3 235,522,2 449,522,5 390,204,5 331,1010,3 273,761,5 331,297,4 289,433,4 454,342,4 275,1005,3 388,496,4 278,823,4 384,658,4 173,0,3 333,1312,4 279,214,3 405,1020,5 14,250,2 456,236,4 322,120,3 275,652,5 464,197,2 296,507,4 340,357,1 292,415,4 319,762,4 51,863,3 199,545,3 170,339,3 261,98,3 176,805,4 386,602,4 150,447,2 14,935,5 12,799,1 245,210,4 325,607,4 416,927,3 312,184,5 307,207,4 176,653,4 253,1262,1 404,624,3 367,55,4 77,300,5 410,708,5 446,132,4 406,1229,2 483,194,5 359,743,4 23,54,5 326,745,3 199,825,4 5,164,5 473,179,5 59,180,4 10,258,3 329,698,5 243,580,4 466,49,4 266,363,2 48,144,1 193,627,3 183,116,2 443,1482,2 345,430,5 373,150,3 101,55,3 268,356,5 122,503,5 424,240,2 447,268,5 405,92,4 88,6,5 404,670,2 416,420,4 6,191,4 294,228,4 358,6,5 412,180,5 45,124,4 405,429,4 182,215,4 464,108,3 424,203,4 124,293,4 298,166,3 127,190,4 372,161,3 182,211,4 407,318,5 10,413,3 91,233,4 473,133,4 273,1062,4 233,181,3 345,615,1 22,596,3 452,6,5 261,422,4 454,213,3 252,155,3 53,339,4 101,649,3 344,169,5 158,1277,3 333,229,4 109,401,4 452,455,3 416,391,3 140,234,1 361,322,2 386,232,3 209,153,4 101,181,3 398,406,3 464,473,3 59,120,4 325,37,3 404,30,1 355,314,4 366,217,4 434,1108,3 286,199,4 441,78,3 76,200,4 163,257,5 451,804,4 183,401,3 255,233,5 192,273,3 267,401,1 416,100,3 478,930,2 15,466,5 415,384,5 94,182,5 53,6,4 428,192,4 263,221,5 479,174,3 6,446,5 449,684,4 252,731,4 266,175,5 457,47,4 398,186,3 369,434,3 57,175,4 257,257,2 252,522,4 373,201,3 275,1244,3 278,711,5 415,175,4 267,99,3 187,482,5 248,741,3 293,471,3 199,450,4 338,232,1 410,303,3 68,762,3 155,356,4 185,105,2 17,59,4 450,321,4 404,663,1 278,989,1 295,520,4 416,747,4 338,256,4 248,582,4 325,176,3 84,318,4 4,134,4 329,237,5 335,32,3 253,399,3 404,652,1 479,182,4 205,360,1 404,451,5 29,257,5 441,43,2 409,897,3 93,1090,3 377,422,4 183,165,3 294,216,4 252,181,3 384,478,5 473,316,4 253,132,5 466,149,4 311,487,5 184,689,4 81,513,4 21,995,1 129,745,5 408,320,2 371,22,5 76,510,2 61,229,2 353,1016,3 307,122,3 436,212,4 236,704,3 341,27,2 408,96,5 156,49,4 351,99,4 98,618,4 445,747,2 91,28,3 199,1418,5 196,398,2 94,174,5 195,65,3 386,272,4 116,121,2 142,271,4 371,184,5 93,385,4 275,49,5 311,427,3 258,1134,5 150,465,5 183,630,4 483,299,4 386,214,2 124,78,5 315,169,4 22,201,3 468,498,5 213,707,4 144,558,2 19,497,3 295,256,5 177,273,4 353,172,3 404,1239,1 486,1043,3 362,652,3 458,977,2 356,269,5 279,231,3 129,471,4 253,312,5 428,23,3 347,224,3 59,143,4 411,723,4 285,284,1 193,196,4 258,474,5 360,524,4 278,659,4 427,300,4 457,924,3 353,386,4 404,621,1 449,477,5 303,312,5 416,654,4 457,516,4 384,1127,3 233,47,2 262,99,5 386,222,5 94,450,3 462,409,1 91,355,3 416,637,4 144,799,2 197,1141,5 446,173,5 212,684,3 421,14,3 297,863,3 486,55,4 390,30,2 268,50,2 402,865,4 193,134,3 384,150,2 302,11,4 261,659,4 221,79,2 142,332,5 285,250,5 279,387,2 406,683,3 344,10,4 415,337,3 157,119,1 458,321,4 428,610,4 415,203,5 300,587,5 64,49,5 377,325,3 109,1217,3 74,219,1 343,508,4 428,55,4 485,470,5 158,293,4 333,345,5 386,1239,5 400,201,4 304,747,3 372,160,4 226,287,2 193,632,3 456,726,4 275,425,3 392,47,2 12,522,4 302,215,5 290,284,4 392,171,5 404,39,2 144,379,3 6,611,5 286,844,5 436,88,2 184,180,4 278,233,2 334,304,4 447,291,4 416,80,5 362,565,3 333,276,3 25,150,3 372,131,3 398,808,3 74,755,2 61,272,4 101,184,3 9,696,3 183,126,5 41,228,4 235,209,2 84,392,4 20,566,2 426,1295,5 248,1046,3 84,9,4 124,658,4 54,116,3 313,377,5 68,281,3 467,68,4 157,167,5 194,126,5 6,90,3 151,214,5 285,657,5 380,492,4 71,442,3 463,481,5 338,483,5 473,483,5 53,24,4 454,254,2 391,179,5 384,151,3 221,678,2 377,225,3 360,812,4 285,551,3 415,996,3 288,253,1 335,121,5 58,275,5 233,979,2 276,275,4 456,369,3 84,415,3 386,734,2 372,1086,1 326,1,2 393,131,4 372,47,5 278,1194,1 151,409,4 290,580,5 206,992,3 465,348,2 392,1177,3 455,709,3 6,587,4 428,127,3 44,761,4 392,80,2 233,529,4 397,153,2 399,320,4 150,524,4 4,448,2 331,7,5 428,133,5 415,327,5 267,26,4 37,83,5 416,6,3 93,224,3 119,14,4 4,225,3 423,988,2 481,320,3 410,257,4 434,62,2 378,683,4 94,179,3 342,1066,3 322,478,4 450,682,1 186,662,3 75,215,4 91,1039,3 221,208,4 376,6,4 12,823,3 235,1038,2 436,130,5 96,0,4 454,507,4 143,241,4 380,1406,3 453,377,3 470,595,1 328,197,4 86,779,4 386,548,5 353,286,3 457,320,3 302,516,5 19,587,4 425,504,4 27,163,4 112,327,5 0,67,4 331,292,4 342,78,4 455,446,3 250,21,5 57,691,2 234,68,4 344,1225,3 298,258,3 331,11,5 304,689,4 129,468,5 405,274,3 95,173,5 212,155,5 211,198,5 353,153,4 316,263,4 176,11,5 434,224,3 416,1214,2 415,52,2 151,370,4 94,90,5 453,152,3 313,760,4 406,87,3 429,435,4 152,293,2 415,1040,3 168,497,3 486,279,5 178,301,4 20,871,2 268,196,5 101,671,1 93,400,4 319,585,3 249,234,2 6,659,5 408,708,4 221,650,4 486,454,2 303,285,1 275,107,3 93,450,4 237,925,3 269,545,4 64,659,5 326,483,3 278,395,3 321,195,4 415,122,4 270,117,3 221,366,2 362,221,5 461,357,1 470,476,5 425,477,4 372,632,4 4,400,5 388,94,3 187,225,3 279,160,4 331,256,4 181,190,4 326,21,4 353,954,3 105,11,4 54,180,4 463,357,3 34,260,3 12,462,5 206,223,3 302,94,5 20,679,1 199,1218,3 480,317,1 12,326,3 421,292,3 405,768,1 428,435,4 37,444,2 457,846,5 41,172,5 221,207,3 1,294,4 86,61,5 26,285,3 327,197,3 114,182,5 338,432,4 338,382,1 353,274,4 430,987,2 446,590,4 118,1051,4 182,76,3 430,321,4 61,430,2 325,484,5 270,478,4 345,156,3 438,256,4 398,97,4 90,611,4 449,176,4 129,768,3 233,469,2 165,983,5 82,716,4 275,472,4 388,1146,4 12,671,1 462,310,4 390,507,2 292,741,2 390,59,5 278,152,5 332,254,3 404,1138,1 413,309,4 323,247,5 486,596,4 78,18,5 220,22,4 450,263,3 384,1011,3 378,88,4 292,366,2 449,741,4 279,745,4 326,150,4 333,761,3 262,1125,5 486,469,5 404,946,1 6,293,1 219,299,5 10,55,4 61,251,3 388,79,3 486,473,4 364,341,2 163,844,3 144,987,1 22,293,1 310,482,4 269,927,4 93,646,5 360,58,4 172,320,4 275,409,4 17,475,3 60,303,4 71,133,5 428,366,3 150,285,5 449,1043,4 290,4,5 134,97,5 393,654,5 4,404,3 263,701,4 338,1016,5 353,99,5 102,1088,1 279,161,3 43,97,2 464,476,4 307,142,4 13,11,5 292,312,4 0,145,4 455,230,2 278,15,4 313,1209,4 422,976,1 416,1017,3 333,332,4 466,761,3 172,268,4 310,275,4 404,654,5 459,128,3 89,708,5 434,840,2 470,98,2 278,397,4 17,734,4 434,122,2 372,1146,4 378,11,5 345,28,4 449,508,4 114,3,4 400,156,3 471,117,4 302,799,3 398,264,3 229,514,5 415,365,4 324,173,2 68,474,3 267,779,3 342,22,5 312,191,3 86,574,3 473,99,5 338,450,3 485,14,3 468,483,5 177,317,5 82,845,3 91,1072,5 214,479,5 129,53,5 12,98,4 111,878,4 10,715,3 453,606,2 204,285,2 294,659,5 129,1150,3 415,442,5 310,621,3 57,123,5 377,844,3 462,99,4 400,527,5 460,320,3 417,314,2 450,318,2 74,122,3 91,293,3 431,122,3 342,237,4 37,69,5 223,1220,3 471,755,4 380,525,4 140,814,4 306,214,4 333,94,3 453,745,2 148,322,2 473,473,5 41,82,4 228,244,3 127,486,5 391,296,4 84,257,4 457,590,3 74,322,2 9,49,5 12,784,3 324,210,3 173,952,5 462,9,1 20,200,5 150,136,5 472,812,3 408,161,4 456,11,5 411,69,4 392,1073,3 456,3,4 268,153,3 96,918,5 129,1266,4 320,632,5 268,527,4 279,55,5 404,1565,1 111,268,3 57,10,5 225,173,4 195,256,2 292,808,2 351,194,4 271,192,4 339,945,5 449,88,5 255,1108,4 486,75,4 450,1391,1 242,0,4 392,390,3 243,65,4 401,24,4 467,543,3 275,1030,2 348,9,5 6,230,3 327,552,3 388,7,4 467,404,2 170,886,4 64,184,4 177,755,3 177,845,3 491,1020,3 451,184,5 415,1138,3 457,409,1 434,244,2 414,327,5 407,285,3 397,157,3 84,505,4 462,2,2 20,546,2 333,60,3 404,48,1 460,1005,5 177,763,3 380,656,4 483,553,4 424,446,3 27,669,4 345,172,3 19,865,1 243,199,5 285,1104,5 180,1084,1 458,180,4 268,1134,2 361,301,5 400,236,3 100,844,3 176,692,4 71,228,1 405,525,5 463,299,4 151,1040,5 197,870,1 14,454,1 83,110,4 345,258,2 127,327,2 400,552,5 267,398,3 457,329,3 221,281,4 221,120,3 292,110,2 10,210,3 404,647,1 449,237,5 0,175,5 422,326,2 477,150,5 209,1011,4 95,41,1 143,256,4 267,119,2 64,293,4 448,121,1 372,365,4 311,69,5 88,186,5 478,27,4 428,825,3 77,1046,1 455,199,4 333,88,4 392,417,3 310,767,2 17,415,5 428,169,5 397,125,4 462,102,1 17,3,3 434,299,2 95,6,5 445,882,3 144,594,3 444,503,3 295,356,5 41,384,5 108,210,5 357,583,4 454,596,3 140,120,4 441,25,3 404,771,1 421,216,3 275,941,4 397,133,3 196,332,2 91,678,4 450,330,5 386,208,5 150,381,4 57,644,5 485,822,4 108,943,3 317,237,3 211,422,4 269,59,5 178,916,3 298,1005,4 347,408,4 234,654,4 83,1027,3 17,44,5 425,190,4 327,379,3 252,55,3 121,192,4 193,177,3 403,242,3 415,1494,3 390,327,3 22,13,4 89,874,1 58,231,3 15,215,5 109,848,3 144,88,4 226,293,3 432,689,2 129,500,5 492,221,3 300,142,4 378,199,4 221,545,3 428,88,4 449,28,3 467,741,3 307,472,3 485,1013,3 212,251,3 379,1044,3 290,1058,4 273,923,3 94,435,5 6,200,2 434,49,5 280,332,3 404,1209,1 307,81,4 406,659,3 297,482,5 300,549,3 312,24,2 194,58,3 187,628,4 455,2,4 443,299,4 23,237,5 215,181,4 436,432,3 53,23,1 127,941,5 424,378,2 124,322,3 402,126,4 287,543,5 483,24,3 64,167,4 398,173,3 406,3,4 234,1020,5 469,1096,3 150,85,5 59,194,4 42,188,5 388,151,4 349,132,5 335,167,5 295,54,5 298,190,4 450,259,5 13,919,4 0,165,5 319,626,4 451,596,5 404,1588,1 127,837,5 302,481,5 157,545,3 378,404,3 386,63,3 278,9,4 342,76,3 151,119,2 188,1399,3 188,171,5 297,704,4 386,1127,4 59,384,4 59,664,4 335,1036,1 310,464,4 347,106,4 436,465,2 279,924,4 248,1072,4 233,182,4 486,938,3 458,845,4 467,506,5 406,868,3 292,2,2 434,225,4 452,411,2 415,258,2 428,819,3 418,285,4 58,780,4 467,293,3 392,408,4 329,863,4 353,19,5 402,239,1 42,24,5 436,968,4 84,704,5 377,116,3 68,1141,4 317,212,4 253,1442,4 331,814,4 67,257,5 345,88,4 122,196,5 405,452,2 491,317,5 450,874,2 342,143,4 100,0,3 0,137,1 101,174,4 489,0,3 435,101,4 276,472,2 261,168,3 212,30,4 342,154,1 489,332,3 457,938,4 351,691,3 406,858,3 213,318,3 415,422,4 477,683,4 263,268,5 488,1292,5 159,12,4 36,664,3 248,209,3 423,682,3 97,167,2 212,285,3 55,719,3 312,836,4 91,230,3 383,877,4 265,284,4 327,240,5 267,383,3 14,305,5 212,174,4 393,153,3 51,470,4 444,474,5 192,268,4 464,480,4 114,356,5 434,256,4 240,342,2 73,312,5 373,55,5 433,545,5 401,590,4 199,281,4 63,96,3 243,110,4 408,134,5 186,96,3 118,95,5 402,409,2 94,390,2 456,227,5 125,332,2 275,1220,3 485,220,4 428,1442,2 482,236,3 129,887,3 225,507,4 270,1410,2 292,239,2 405,480,3 471,249,5 335,120,4 307,431,4 61,55,5 0,246,1 53,470,4 405,714,4 17,286,4 452,567,3 326,7,4 298,894,2 91,199,3 451,1533,1 307,253,2 180,14,3 432,747,4 470,587,1 220,334,4 307,317,4 99,299,4 194,263,3 93,641,4 150,117,3 150,630,3 189,124,3 392,807,4 59,127,3 93,189,5 112,302,5 372,479,3 321,607,3 393,549,4 397,484,5 331,10,5 329,147,4 116,236,4 386,95,4 0,88,5 268,87,1 8,614,4 473,160,4 144,1072,5 489,8,4 471,1209,3 344,42,3 127,872,1 454,646,4 109,32,4 425,417,3 471,624,4 421,234,2 91,197,5 157,95,4 449,14,3 462,929,1 457,151,5 449,836,4 326,3,4 198,891,1 292,257,3 235,131,4 278,128,1 357,862,5 225,526,4 131,55,5 270,391,3 477,672,3 118,830,2 327,10,3 403,258,5 267,70,3 10,576,3 451,13,3 427,288,4 492,283,4 193,89,3 410,264,5 312,327,4 452,41,5 93,942,3 0,1,3 335,93,3 124,257,5 174,192,4 336,514,5 392,996,1 158,1094,5 229,133,4 486,1187,3 300,143,4 209,628,3 245,411,1 297,526,5 436,1133,4 343,271,5 193,193,4 75,323,4 452,153,3 195,107,4 223,1400,1 220,1089,3 324,1,1 444,86,3 435,89,3 428,639,3 129,1033,2 457,49,2 379,769,3 483,140,4 269,712,5 241,0,4 252,587,5 310,299,4 481,987,4 342,86,4 12,738,4 274,825,2 304,162,3 188,240,3 300,75,4 405,633,4 292,88,5 275,1046,3 416,177,3 287,426,5 342,659,3 467,30,3 393,1032,3 233,215,3 388,173,4 144,1131,3 404,211,1 312,524,5 180,1348,1 302,1269,1 275,719,2 154,989,3 458,824,3 396,512,5 159,99,5 402,470,5 121,428,3 145,257,4 313,500,4 243,536,5 457,332,1 285,416,3 42,290,3 394,341,4 456,473,5 399,303,4 483,95,5 375,513,4 457,422,2 285,474,4 384,159,4 449,64,3 209,171,5 221,78,5 353,862,3 415,707,4 253,63,4 266,230,4 292,61,1 209,22,5 405,736,3 311,0,5 366,918,5 42,81,4 405,755,3 372,391,4 488,341,3 312,479,5 300,1073,2 270,659,5 13,126,2 324,0,2 307,29,4 337,653,5 449,48,5 449,175,4 161,10,4 486,292,5 406,268,3 406,201,4 404,551,1 151,70,5 304,87,2 84,480,4 445,293,1 221,377,1 388,171,5 245,474,4 173,220,4 477,49,3 473,920,3 11,391,4 437,236,5 5,21,3 212,180,4 33,1023,5 241,474,3 206,136,3 43,199,4 70,63,4 353,506,3 434,549,3 263,1073,4 143,532,4 478,167,5 140,243,5 324,271,3 200,170,3 262,315,5 495,141,2 343,954,4 467,50,3 393,553,4 454,195,4 249,287,4 392,86,4 397,94,5 75,41,3 439,282,5 373,147,4 379,70,4 217,202,4 24,478,5 331,981,3 12,82,2 175,150,4 193,502,4 466,263,2 454,160,4 405,475,4 493,63,5 285,882,5 99,327,4 12,312,4 63,264,4 329,90,4 325,490,4 278,420,3 334,901,5 157,274,5 451,172,4 192,37,3 150,527,5 200,1421,2 386,318,1 359,44,4 373,844,2 70,256,5 393,575,2 416,726,5 471,231,4 267,368,1 392,1119,3 360,89,2 270,217,3 255,117,5 456,470,4 278,412,4 395,287,3 317,380,1 49,318,5 17,318,4 384,864,4 470,93,5 377,86,4 177,236,4 313,793,4 449,433,3 432,656,5 463,327,3 76,96,2 194,796,3 454,299,4 451,791,5 421,409,5 127,131,3 180,1388,1 372,647,4 455,79,2 325,719,2 455,160,3 58,215,4 419,318,4 177,471,4 338,116,3 434,135,4 456,47,5 183,527,5 123,171,3 482,143,2 191,283,5 72,254,2 89,116,3 167,125,5 456,979,4 144,509,4 94,81,3 18,318,4 57,432,5 20,321,3 270,475,1 194,752,3 256,675,4 429,8,3 302,807,2 270,11,4 266,788,5 4,442,4 36,81,1 312,822,3 404,779,3 384,23,3 9,156,5 14,927,1 494,173,5 453,81,4 304,167,4 327,738,3 325,396,3 431,245,4 449,728,4 300,526,4 5,418,4 197,689,3 197,24,2 451,487,4 416,440,3 206,290,3 327,664,2 378,460,4 317,23,4 89,174,3 177,339,1 448,1317,2 322,740,3 449,1183,1 157,117,5 494,166,4 250,110,3 144,1289,1 223,275,3 383,327,4 485,688,2 323,537,4 233,173,3 436,403,5 12,461,5 433,120,4 292,502,4 476,814,5 332,434,4 394,97,5 453,244,3 492,761,4 480,209,4 454,172,4 373,818,3 292,296,4 57,500,2 108,762,2 5,49,4 344,1116,4 191,49,4 453,171,2 489,283,3 55,182,5 37,96,5 279,97,5 446,6,5 312,495,5 342,323,5 478,173,5 453,172,2 310,728,4 109,904,3 22,413,3 327,269,2 415,104,2 197,214,4 93,24,3 485,146,2 5,519,4 221,215,4 14,13,4 339,214,5 459,285,4 400,587,2 456,630,4 42,312,5 352,327,2 431,150,4 444,844,2 293,346,5 428,1034,3 456,698,4 302,574,4 492,68,5 9,301,4 386,49,5 6,595,5 311,433,3 22,123,5 255,231,3 404,468,1 283,261,4 466,108,5 425,753,1 61,604,3 485,475,3 58,122,3 360,474,4 475,203,4 492,750,5 6,610,3 105,210,4 150,495,4 338,649,4 386,968,3 344,695,3 318,268,3 130,250,5 404,623,4 486,657,4 57,513,5 279,93,2 150,151,3 362,751,5 61,743,3 459,288,4 279,89,4 496,233,2 233,1446,3 463,708,5 455,442,4 404,430,3 325,500,3 10,385,3 268,722,1 415,125,5 19,422,2 101,318,4 294,90,5 338,208,5 199,1033,3 59,209,4 56,244,4 472,1128,4 472,256,4 485,619,2 362,468,2 392,779,4 486,120,4 478,324,1 290,7,4 422,303,4 405,27,3 432,59,5 12,767,4 290,237,5 273,273,4 14,268,5 467,70,5 428,1202,4 83,293,3 434,253,3 494,70,5 41,227,4 109,63,4 79,302,4 483,754,4 93,259,2 404,1110,1 361,327,2 456,870,1 267,1073,3 223,96,5 311,68,4 455,0,2 395,328,2 415,275,3 231,132,4 473,315,5 47,479,4 255,768,5 331,158,5 435,126,5 61,240,1 103,125,4 255,567,5 116,171,5 377,1134,2 398,90,4 173,238,4 405,830,2 302,12,4 356,9,5 338,85,4 317,604,4 200,386,2 314,55,5 58,283,2 456,596,3 356,594,4 467,282,4 188,104,2 471,180,5 255,55,3 372,1187,3 180,844,3 39,336,4 483,52,1 304,1512,2 292,954,2 243,280,3 439,511,3 367,180,4 217,293,2 99,1234,4 5,168,4 370,41,3 300,404,4 21,450,4 157,744,4 296,602,5 496,839,3 243,832,3 330,693,4 471,1090,4 346,0,4 487,479,3 377,61,4 330,481,2 235,202,4 395,590,3 387,844,4 497,474,3 310,320,3 492,60,4 453,1002,2 469,293,3 296,264,3 193,1411,2 79,99,5 213,426,5 94,198,5 421,589,2 55,157,3 176,120,2 415,154,5 455,478,5 393,78,5 400,317,4 441,158,4 255,1227,1 206,11,3 5,245,3 267,157,2 449,69,4 369,97,4 234,326,3 206,561,2 435,446,1 255,1113,4 164,14,5 475,231,3 100,923,4 441,341,2 177,1015,4 254,671,2 221,175,4 494,138,2 428,549,3 415,196,5 434,1418,2 31,287,4 477,281,3 343,384,2 103,9,2 93,355,4 254,117,1 386,218,2 378,157,1 277,244,3 293,331,3 183,356,5 439,514,4 434,72,3 315,172,1 236,356,4 235,210,3 56,124,3 390,459,4 360,736,4 376,315,4 236,189,4 124,1245,2 86,47,4 313,279,3 486,500,4 406,98,4 494,673,3 486,251,1 473,648,4 143,186,4 314,24,5 434,402,4 307,707,4 96,134,5 248,202,5 144,878,5 109,93,4 469,823,4 345,6,2 408,473,5 449,727,3 267,32,3 43,134,5 99,1236,3 456,435,4 401,1047,2 428,402,4 42,755,3 193,356,4 295,286,4 462,863,3 135,646,5 394,327,4 270,506,2 42,938,3 115,332,2 103,844,3 483,650,5 393,454,4 331,762,5 17,519,4 243,552,5 384,673,3 266,514,5 25,275,4 268,203,2 59,509,5 267,578,1 414,55,5 404,49,5 396,503,5 428,480,3 200,1427,4 71,524,4 43,131,4 415,1090,3 90,987,2 404,968,3 312,485,3 129,298,3 335,62,2 446,182,5 58,58,5 495,63,3 433,274,3 103,839,1 496,1184,1 487,567,3 416,381,2 346,186,5 62,474,4 451,419,3 478,247,4 245,852,5 212,221,3 436,392,3 57,299,4 404,184,4 86,231,3 487,509,4 377,234,4 20,877,2 269,144,3 197,167,4 285,175,4 382,80,4 206,825,2 471,399,5 411,205,2 6,681,2 292,855,3 74,844,3 342,82,4 435,391,4 132,271,5 337,300,4 454,1173,3 436,1210,4 434,189,4 289,402,2 307,247,4 435,894,4 496,226,2 286,49,5 443,99,5 405,595,3 362,1034,2 313,1151,4 451,970,4 91,741,3 236,178,4 51,21,5 383,270,4 455,817,3 63,49,5 15,142,5 310,176,5 93,76,3 478,527,4 327,822,3 285,180,3 486,671,4 420,516,2 452,187,4 302,1156,2 404,178,1 229,569,4 129,261,3 185,828,4 93,368,1 496,362,2 488,288,2 431,762,5 392,416,3 453,87,4 21,839,4 250,63,5 424,362,1 428,704,4 353,136,3 229,608,3 278,371,4 267,123,4 117,918,5 416,507,3 451,268,5 405,221,3 295,197,5 91,90,3 415,54,2 180,988,1 294,187,3 180,1336,1 21,931,1 461,288,5 58,605,4 200,226,4 263,193,5 151,236,5 6,528,2 418,222,4 233,46,2 486,275,3 362,759,1 235,431,5 238,674,5 144,299,3 12,605,4 486,931,3 390,287,3 441,285,2 63,161,3 82,731,4 89,301,5 189,280,3 403,288,1 132,345,3 422,1264,4 61,553,1 22,131,4 193,228,1 39,304,4 311,205,5 339,198,5 468,609,4 356,244,4 333,344,2 89,605,5 4,256,5 419,752,5 235,482,5 329,131,5 327,398,2 12,772,1 415,553,3 298,310,4 127,339,4 464,299,3 17,167,3 434,89,4 454,510,5 94,1090,3 415,746,5 341,605,5 412,256,4 416,714,2 465,320,2 449,173,5 285,110,5 435,745,5 151,1300,5 498,510,5 498,209,3 404,1591,1 275,83,2 436,482,5 416,241,3 81,167,5 416,870,2 415,401,5 177,81,5 125,314,4 300,155,4 310,548,2 86,201,5 199,68,5 490,474,4 266,454,3 61,69,3 83,273,4 129,26,4 275,263,3 490,44,5 275,749,4 310,126,4 369,855,3 473,704,3 300,90,3 25,749,4 333,309,3 22,274,5 449,1046,4 271,97,4 353,420,2 193,545,3 6,142,3 12,199,3 492,251,4 74,925,3 480,1088,3 243,945,4 310,62,3 129,422,5 267,221,4 434,541,1 345,140,4 497,175,2 434,185,4 116,677,4 291,249,3 471,119,5 392,726,3 441,167,4 6,677,3 377,743,3 86,198,5 266,63,5 17,152,4 94,30,4 452,32,4 431,314,5 436,736,1 486,116,5 306,226,5 128,285,5 406,878,3 387,110,3 84,431,4 15,281,5 108,734,5 346,156,5 214,495,5 279,175,3 129,251,5 455,736,3 93,702,3 20,299,3 206,22,4 63,21,4 386,60,3 415,10,4 89,702,3 206,178,4 150,1038,4 42,1046,3 41,581,3 163,292,4 285,1374,5 199,168,5 403,342,1 476,279,4 378,700,4 300,239,4 10,257,5 60,257,4 84,526,4 15,501,4 455,459,3 144,928,2 26,243,3 444,828,1 406,243,3 426,873,5 38,312,4 428,139,1 455,932,3 366,175,5 318,267,4 333,1013,2 14,12,1 485,873,3 44,933,2 1,241,5 426,244,5 324,136,5 361,263,1 296,113,5 58,160,3 415,296,4 327,27,5 471,150,3 84,283,3 425,131,4 359,0,3 295,292,5 454,322,3 405,62,3 58,21,4 20,673,2 116,788,4 195,201,3 278,69,1 84,56,5 58,1020,4 373,762,3 74,476,4 118,1033,3 373,143,5 492,430,5 27,858,3 311,462,5 492,120,5 275,239,4 320,210,4 370,209,4 82,70,3 466,267,5 375,245,3 270,525,5 359,581,4 393,678,3 200,1010,3 392,1243,3 271,209,5 175,474,5 462,284,4 310,99,1 391,168,4 280,307,1 324,603,4 494,152,5 243,720,5 312,321,3 471,194,5 455,217,4 168,482,3 459,531,3 290,705,3 311,186,5 126,221,5 233,429,4 379,27,4 449,722,3 473,487,3 342,743,4 172,873,4 15,94,5 58,98,4 431,409,4 450,456,2 387,768,3 180,454,1 375,197,5 441,779,3 434,673,2 333,677,3 386,789,1 210,8,3 183,377,4 297,193,5 63,767,2 401,863,3 392,2,3 327,698,4 363,293,5 52,120,4 183,182,4 101,664,1 473,190,5 10,744,5 233,152,3 164,175,4 416,427,3 15,403,5 461,331,5 183,43,4 29,258,4 94,169,5 19,180,4 415,288,3 455,67,4 473,135,4 424,297,4 91,754,3 372,264,4 406,184,5 378,735,4 433,219,5 341,281,1 365,412,4 307,509,3 338,68,4 494,88,3 129,268,4 120,478,5 317,288,3 283,346,5 445,879,2 124,194,5 379,212,2 407,287,4 485,320,3 454,581,2 312,769,4 143,195,4 311,173,5 209,93,4 347,987,3 275,401,3 385,514,5 394,595,2 434,799,4 384,895,5 188,98,5 229,78,5 255,402,4 395,1398,3 344,55,5 422,275,5 342,151,4 84,446,3 398,83,2 374,938,3 392,236,4 404,995,1 394,256,5 295,99,5 345,747,4 478,134,4 279,28,3 338,98,4 435,1118,4 290,30,4 198,115,5 59,173,4 404,65,5 127,120,4 205,268,4 379,94,4 499,110,4 450,983,4 491,491,4 122,434,5 205,307,2 423,274,5 495,525,3 425,489,4 134,287,3 37,401,5 330,31,4 471,762,4 483,264,5 153,918,4 143,214,4 368,49,5 88,110,4 492,81,5 456,240,3 298,87,3 496,565,3 231,530,4 424,256,3 411,407,4 164,221,5 338,87,4 398,404,3 93,727,2 12,662,5 275,930,2 173,268,5 271,49,4 275,78,4 82,507,2 408,675,2 419,274,5 179,185,4 406,628,3 358,285,5 473,47,4 311,849,5 42,110,4 338,435,4 319,225,4 180,362,1 396,331,2 382,267,5 475,71,4 11,132,4 384,450,1 94,1100,2 300,264,4 386,225,3 390,7,3 471,746,5 449,476,4 456,721,4 313,392,4 345,214,3 261,432,4 450,681,4 113,55,3 183,175,4 82,526,4 12,349,2 347,933,4 192,309,4 404,1098,1 378,284,5 290,289,4 429,272,4 348,595,2 458,78,3 89,606,5 81,1162,2 501,891,2 372,135,4 431,257,4 235,475,3 416,216,4 194,981,2 487,747,4 449,418,5 191,120,2 14,251,2 108,322,3 96,203,5 333,654,4 108,264,5 449,688,3 171,605,3 36,272,3 283,306,4 422,325,4 10,653,3 221,734,5 117,558,4 84,187,2 378,61,2 436,516,4 395,828,3 375,110,4 180,973,4 390,603,4 362,158,1 213,180,3 177,321,3 84,221,2 12,341,4 83,275,4 451,553,3 213,649,5 397,731,4 181,202,3 21,166,3 47,1064,2 248,88,5 427,907,4 456,455,2 300,202,4 496,464,3 451,170,4 252,21,5 372,402,3 463,332,4 96,430,3 420,181,5 180,1037,1 479,202,4 327,1482,4 335,104,4 90,497,3 433,124,5 335,731,3 428,14,5 423,688,1 369,284,3 419,250,5 499,780,3 406,447,4 445,325,2 486,738,2 366,178,5 222,865,4 388,513,5 15,78,5 144,201,4 32,244,3 453,604,2 404,1017,1 449,581,4 338,233,4 345,116,4 59,7,3 288,20,1 240,334,3 274,514,3 42,1022,3 296,7,5 465,330,5 395,618,3 263,675,3 63,567,4 473,256,3 197,209,4 502,180,5 415,1036,2 380,149,4 274,153,2 342,785,4 89,836,5 84,422,4 17,210,5 10,56,2 473,76,5 458,281,3 278,145,1 93,175,4 449,269,4 501,300,1 392,68,4 88,1073,5 405,430,3 282,174,4 221,40,3 188,282,5 346,76,5 405,2,3 79,212,3 63,602,3 473,75,4 377,618,3 417,257,5 94,595,2 197,100,5 193,497,3 471,681,4 63,10,4 492,99,5 471,317,5 331,272,5 391,1011,4 283,747,3 293,339,4 141,407,4 449,6,4 302,28,2 439,322,1 410,719,3 426,988,5 457,716,1 324,1002,3 395,1027,3 212,431,4 302,1272,2 487,433,4 187,497,5 309,1385,1 249,1198,3 140,258,1 12,828,3 405,426,4 502,1315,1 252,330,3 91,239,2 150,170,5 398,721,2 428,200,3 174,233,5 390,68,4 471,608,5 466,287,4 291,57,5 498,663,3 495,131,3 285,89,4 418,513,4 253,547,2 434,366,3 495,1073,2 345,11,5 353,1193,4 172,304,5 2,320,5 386,134,5 304,478,3 353,188,3 377,923,3 338,316,4 121,660,4 187,27,3 56,832,4 250,684,4 344,287,3 202,287,5 42,268,5 312,177,5 50,82,5 157,208,5 486,171,4 404,1266,1 134,55,4 488,291,4 449,154,4 160,264,2 341,731,3 221,728,4 339,87,5 359,404,3 343,136,5 449,11,4 415,81,5 359,173,3 406,315,4 247,195,2 449,773,4 362,741,2 492,878,4 313,410,4 432,193,5 403,257,4 326,1169,4 429,287,4 76,55,4 293,475,3 14,926,4 437,120,5 199,176,4 478,54,4 166,224,3 294,229,4 42,407,5 449,382,2 179,379,5 414,179,5 294,157,4 65,740,4 382,503,4 345,950,2 494,477,4 391,169,5 445,310,2 213,317,4 333,27,3 415,530,5 423,242,4 365,444,5 225,407,5 424,6,3 169,325,5 468,135,4 448,170,4 486,217,2 25,303,4 10,316,4 323,274,4 91,70,5 91,11,5 243,659,4 279,1034,4 304,1014,1 457,12,4 452,232,2 436,132,5 0,29,3 428,290,4 425,509,4 449,487,4 344,27,3 6,627,3 493,182,5 393,49,5 17,485,3 342,527,3 333,49,5 422,689,4 464,495,3 473,264,5 488,320,3 494,657,3 499,1011,4 304,152,3 14,254,5 503,162,4 176,54,3 287,344,5 183,442,3 192,401,3 20,674,5 492,422,2 3,327,3 221,72,4 499,158,2 173,1034,4 93,568,1 367,200,5 483,342,2 271,22,5 452,96,3 122,99,4 292,932,2 415,338,5 84,169,4 428,1078,2 4,24,3 260,300,4 269,721,4 397,62,2 398,156,3 377,791,4 84,99,3 335,761,5 328,283,3 455,430,4 274,195,3 494,412,5 267,540,3 377,379,3 0,62,2 300,28,4 461,677,3 480,65,3 430,753,3 434,942,3 405,513,1 346,234,2 322,185,4 94,448,3 499,664,3 129,247,3 449,144,3 425,524,4 220,217,4 502,68,4 142,312,5 124,215,3 434,39,3 471,684,3 4,441,1 7,232,4 17,60,4 439,750,3 214,190,4 15,940,1 47,1063,4 293,362,1 230,254,3 354,263,4 63,309,4 497,190,4 310,214,4 150,184,4 124,368,3 279,216,3 39,285,2 453,518,2 398,317,5 221,227,5 193,624,3 118,688,4 330,123,4 338,99,5 101,167,3 404,1546,2 173,450,5 395,299,3 415,20,3 250,471,3 416,200,4 404,206,1 497,191,5 166,725,1 335,795,3 477,275,5 435,738,4 449,400,3 69,213,3 360,332,2 434,10,5 371,1272,4 404,1399,1 206,297,3 177,789,3 6,97,4 345,49,5 118,761,4 473,552,2 4,185,5 267,737,2 233,1120,5 298,189,5 9,693,5 93,721,2 200,81,4 84,27,4 477,865,1 267,163,2 81,8,4 338,701,4 408,748,3 346,163,3 178,689,1 478,285,1 434,214,2 455,497,4 127,498,5 93,474,5 482,180,4 404,556,1 129,891,3 424,251,2 313,696,3 482,899,3 392,684,3 267,228,2 226,136,5 281,261,4 368,242,3 302,52,3 428,194,4 268,238,2 462,305,4 449,300,4 498,311,4 504,192,3 349,171,5 436,483,4 344,701,4 386,97,4 469,543,3 245,237,5 465,171,4 408,8,4 488,1237,4 370,264,5 282,201,5 416,183,4 61,654,3 232,260,5 140,6,5 425,604,4 462,273,3 496,30,3 101,408,2 255,30,5 436,215,5 262,173,5 314,427,4 12,685,5 233,4,3 278,282,3 176,357,2 13,318,1 109,400,3 143,434,4 404,75,3 283,899,4 499,835,5 176,222,4 12,416,2 104,747,2 398,1034,3 486,42,3 415,82,5 353,508,5 144,938,4 449,467,4 453,481,3 445,287,2 233,1167,2 233,854,3 420,515,5 192,561,3 415,632,4 209,55,5 306,400,1 456,454,4 159,1011,5 416,763,3 47,518,3 465,565,3 496,539,2 425,317,5 64,254,3 300,117,4 125,345,3 165,293,3 2,259,4 312,76,3 144,275,1 398,1136,4 386,292,4 351,171,5 416,791,4 498,346,4 462,987,2 293,751,3 452,577,3 263,183,5 505,469,4 169,983,5 321,155,4 143,325,4 449,156,3 378,703,3 455,11,3 156,289,4 492,885,2 209,179,4 31,239,2 421,221,4 458,294,3 416,474,4 278,180,3 3,257,5 64,650,4 415,141,4 435,95,4 91,91,4 494,796,4 302,457,3 302,380,4 393,228,3 492,10,3 58,2,4 48,81,1 84,640,4 481,288,3 404,648,1 86,21,4 127,70,4 13,920,5 5,292,3 220,385,3 494,126,4 293,306,3 449,770,3 455,1477,4 341,222,4 385,221,4 372,155,2 404,238,3 180,982,2 364,1419,2 398,1245,1 243,779,4 215,411,2 415,738,5 330,213,3 278,469,3 436,228,3 109,225,3 89,1096,4 495,182,2 473,78,5 109,55,1 177,280,3 408,263,1 404,1345,1 266,1089,3 234,302,4 398,650,3 404,811,1 454,1,4 69,482,5 499,1046,3 451,497,4 471,379,5 487,32,2 413,299,4 10,404,3 326,84,2 326,1102,4 353,1064,3 404,790,1 291,1141,4 458,2,2 269,530,4 209,194,4 452,10,5 117,6,5 300,664,2 89,484,5 456,639,4 82,203,5 393,207,5 4,431,4 499,293,3 294,42,4 144,485,3 505,1045,4 29,1,3 102,203,3 307,498,3 454,464,3 494,139,5 316,321,3 386,216,3 127,221,3 404,760,1 15,3,5 451,95,2 215,312,5 471,26,4 496,241,1 281,889,4 232,639,2 302,846,4 61,874,4 451,482,5 127,196,4 380,516,4 318,305,4 386,221,4 473,609,3 173,11,5 462,1382,2 447,302,4 213,478,4 184,1019,4 415,76,4 487,197,4 199,225,4 503,545,4 455,1018,4 226,323,4 416,418,4 157,88,5 327,1312,4 389,276,2 456,741,4 457,208,4 443,677,3 292,1207,3 268,156,3 487,404,3 174,146,3 3,209,3 197,299,2 415,316,5 6,198,5 354,359,4 48,626,2 504,139,4 408,11,4 386,546,4 434,658,4 290,468,5 429,1346,5 428,569,4 17,950,3 408,59,5 21,861,1 447,883,4 442,270,4 453,195,2 356,104,4 307,87,4 473,645,4 270,136,4 10,691,4 492,95,4 428,279,2 434,173,5 447,895,5 290,1477,2 398,1230,3 0,248,4 486,231,4 360,1102,4 364,994,4 6,228,3 167,274,3 36,173,5 327,687,1 408,209,4 338,482,5 403,330,3 380,994,4 127,651,3 43,379,4 93,1013,4 336,470,5 345,943,3 89,660,5 289,482,5 180,923,3 436,472,5 406,217,4 468,640,4 91,53,3 473,505,5 93,178,5 223,469,4 496,574,3 296,301,4 58,614,4 151,68,5 84,1172,4 428,1009,3 263,287,5 406,226,2 418,256,4 62,107,2 396,482,5 158,242,4 498,180,3 112,6,3 99,677,3 196,585,3 307,236,3 312,419,5 498,513,5 452,116,4 473,507,3 450,302,2 380,1017,4 317,1043,4 504,988,1 158,332,5 150,69,4 469,267,2 353,659,3 447,267,3 362,10,5 351,49,5 344,110,4 453,706,3 467,497,5 80,725,4 494,229,5 269,355,3 377,683,3 467,3,5 455,659,5 58,54,5 384,144,1 380,19,5 473,321,4 304,630,3 65,126,4 380,317,5 396,614,5 233,745,2 185,933,3 404,190,4 404,66,5 6,22,3 338,149,4 137,181,4 356,146,5 475,764,4 57,184,2 232,191,5 397,952,3 423,288,5 428,548,4 180,1001,1 373,69,4 486,236,4 55,199,4 456,117,4 5,502,3 150,180,5 451,215,3 319,1,4 229,239,1 378,92,3 24,654,4 92,865,2 25,1013,3 144,1290,3 41,53,4 176,49,5 252,1015,3 270,442,3 310,120,4 382,356,5 424,301,5 441,568,2 261,234,2 446,134,5 233,1268,3 217,167,4 410,21,4 430,306,3 238,180,3 456,126,5 453,322,2 455,176,4 248,11,5 6,582,2 477,353,3 406,404,3 377,222,4 463,193,5 180,476,1 285,923,4 117,222,5 143,280,3 113,155,4 497,126,4 415,13,4 483,227,5 421,323,5 496,626,3 378,567,5 478,87,4 372,400,4 504,590,4 341,300,5 267,596,2 157,683,3 249,332,4 428,503,3 129,245,4 188,203,5 403,1237,3 457,791,4 392,320,3 465,231,4 503,65,4 116,762,5 386,745,1 282,167,5 15,769,3 405,920,4 302,116,3 295,124,5 377,156,3 193,480,3 298,299,4 300,402,4 462,1163,1 449,649,4 234,511,5 485,689,2 398,495,3 17,429,4 487,57,3 211,267,5 20,668,1 505,478,4 340,1024,5 404,725,1 400,482,4 456,663,4 343,695,3 499,243,3 486,139,3 89,69,5 384,603,4 449,287,3 364,268,4 416,1138,3 129,764,4 483,462,4 338,356,5 6,174,5 391,345,4 101,651,2 148,285,5 51,120,4 469,918,3 42,1034,4 496,779,2 436,275,5 458,1046,3 233,155,2 290,366,4 12,854,4 378,1021,3 176,152,4 268,777,3 496,259,4 414,640,3 404,1,1 324,270,3 359,123,5 267,3,4 200,1192,4 238,184,4 344,142,5 144,235,1 140,973,4 496,942,4 252,493,5 261,420,4 373,288,1 21,104,1 505,409,2 337,606,4 124,450,4 310,175,4 434,134,3 457,345,4 464,524,3 58,356,5 479,171,3 122,461,4 467,115,4 213,222,3 436,87,3 424,514,3 173,363,1 504,98,4 450,748,3 506,342,5 97,513,5 0,268,5 415,1335,1 384,428,4 270,178,4 270,392,4 389,274,5 405,460,3 326,153,4 292,6,3 467,468,4 416,482,5 357,510,2 415,1399,4 312,93,3 342,520,5 48,381,2 449,94,3 253,541,3 401,123,4 150,229,3 498,237,2 253,195,4 416,727,3 462,281,3 449,0,4 86,870,4 315,922,5 335,3,4 59,356,4 278,127,5 17,401,3 377,805,4 416,777,4 238,473,5 416,999,4 93,191,4 416,1206,3 307,156,5 278,1434,3 266,384,3 428,624,3 496,251,3 476,780,4 422,346,3 84,1009,2 220,297,4 202,99,1 454,503,4 275,353,4 249,526,4 235,590,4 103,256,4 43,99,5 182,54,4 7,517,4 31,474,5 290,822,3 127,267,3 446,257,5 4,193,4 398,451,3 404,553,1 371,261,4 380,565,2 333,1169,4 249,312,5 229,97,5 317,160,3 312,117,4 278,98,3 17,164,4 467,410,3 307,640,4 325,7,4 255,105,4 384,1158,4 388,518,4 449,153,3 1,282,5 496,507,3 297,293,3 388,131,5 503,178,1 390,317,4 503,806,4 416,560,3 129,234,4 462,126,5 267,127,3 298,1505,4 5,464,1 377,6,4 129,410,5 275,1300,4 487,153,3 87,310,5 206,275,2 91,673,4 489,992,1 4,228,2 485,286,4 455,483,4 326,69,4 496,656,3 329,62,3 79,153,3 63,474,5 388,466,3 398,1183,3 119,257,5 180,104,1 94,167,4 261,209,3 94,27,4 216,28,2 420,6,3 333,590,4 0,31,5 451,206,4 297,57,4 387,300,4 190,299,4 513,95,5 398,187,4 377,209,4 55,549,4 279,219,5 183,380,4 98,314,4 504,712,3 220,6,4 376,337,3 286,236,5 296,429,1 425,612,3 329,97,5 391,812,3 275,414,3 93,110,4 424,750,2 393,100,4 93,527,5 250,32,3 233,179,3 382,99,4 82,831,3 40,201,2 333,427,4 377,384,4 141,332,5 94,490,4 168,494,3 415,92,4 456,218,4 335,1040,2 362,67,2 270,522,4 424,876,3 311,631,3 329,98,4 492,924,3 492,155,1 471,70,2 313,422,4 489,256,3 275,822,3 428,210,5 467,22,4 22,960,5 65,404,3 60,326,2 48,158,2 502,658,5 416,1231,2 233,1050,2 502,899,5 392,1467,4 73,350,3 104,689,3 231,116,3 285,702,2 144,155,5 502,193,4 302,10,4 354,270,3 291,522,4 235,495,3 263,1354,4 58,482,5 473,292,4 451,587,3 21,430,4 415,256,3 62,332,4 270,97,5 507,237,4 1,275,4 101,244,3 434,375,2 300,180,5 75,1156,1 435,503,4 502,163,3 510,345,4 449,1019,4 89,88,5 337,512,5 503,71,4 513,14,4 5,460,4 497,53,2 245,440,3 370,422,5 156,99,5 215,279,2 314,172,4 456,56,4 497,186,4 206,482,5 76,267,5 89,299,3 103,257,3 22,587,4 454,69,3 59,493,4 400,403,2 223,283,3 492,410,1 398,96,4 221,185,5 152,49,1 488,309,4 503,677,4 485,993,3 232,46,5 449,711,3 95,193,2 487,473,2 416,773,4 412,301,2 243,214,4 336,126,3 496,273,3 482,461,3 69,23,4 416,1040,3 252,11,5 86,865,4 449,226,3 403,738,4 505,280,3 503,1262,4 190,890,3 476,708,5 458,826,3 266,731,4 395,99,2 200,750,3 415,195,5 424,155,5 449,922,5 311,492,5 315,184,2 200,454,3 404,731,5 199,983,3 404,1438,1 41,755,5 84,204,4 164,269,4 483,559,4 371,128,4 359,327,3 108,424,2 346,431,4 444,208,4 166,317,5 436,68,2 253,434,3 466,256,4 306,55,4 82,68,4 278,131,3 261,450,4 479,614,4 329,93,4 324,433,5 456,65,4 329,27,5 250,1015,3 384,185,1 310,561,3 233,886,3 265,24,3 410,226,3 471,757,1 415,514,5 424,31,3 392,928,3 82,232,4 37,587,5 91,408,3 255,784,4 434,85,4 503,580,4 82,14,4 320,47,4 10,507,4 434,4,2 471,567,5 262,201,4 213,63,5 268,47,5 437,180,4 6,186,4 280,303,5 302,602,5 505,228,4 275,525,4 485,881,2 415,691,5 401,181,5 221,1015,3 222,973,2 61,49,5 75,317,3 412,235,4 341,25,2 384,1102,3 193,561,2 52,63,5 414,753,4 360,513,5 236,407,5 292,683,3 403,347,3 57,1100,5 300,131,4 207,193,5 323,293,5 10,78,4 362,78,2 108,78,5 487,179,2 275,191,5 488,880,2 209,516,4 498,481,2 176,91,4 118,331,4 427,330,4 12,795,3 310,782,3 398,430,2 468,152,4 233,734,3 435,641,4 229,24,3 437,117,4 467,248,3 382,653,5 319,70,3 75,6,4 384,1159,2 499,970,5 269,89,5 306,184,3 120,164,4 345,1134,4 353,201,3 424,67,4 320,63,3 383,315,5 502,126,5 12,826,3 263,18,5 248,587,3 313,587,5 405,922,3 173,714,3 192,81,2 307,275,4 478,1012,1 190,899,4 454,278,3 505,493,5 279,768,3 209,567,4 473,743,3 93,186,4 404,394,3 212,142,5 496,381,4 499,468,4 345,299,5 213,126,4 374,287,4 380,960,3 331,930,2 63,81,3 37,126,2 388,126,5 422,750,3 390,773,2 109,894,2 214,270,4 335,574,3 453,403,3 10,221,3 17,90,3 378,630,5 342,613,5 270,370,5 90,131,3 405,171,5 505,464,4 504,244,4 189,470,5 148,301,4 445,887,1 362,650,3 441,41,4 193,422,3 404,559,1 183,944,4 112,423,1 214,312,5 467,12,4 59,734,5 436,746,4 270,76,4 59,434,4 377,731,4 416,1445,3 273,865,4 392,153,2 449,1221,3 330,1193,3 460,268,3 177,37,3 168,683,5 335,1436,2 69,7,4 33,323,5 505,559,3 150,1202,5 193,735,2 144,362,4 351,128,5 514,343,2 274,70,3 453,233,3 30,267,3 449,1151,5 360,638,4 486,254,2 30,31,5 408,152,4 395,24,3 446,747,1 477,187,4 292,587,3 296,153,5 468,604,4 377,67,2 394,230,4 342,41,4 300,738,2 300,520,3 405,174,5 469,949,3 325,434,3 362,292,4 129,691,5 492,209,5 58,769,4 444,478,3 495,317,4 181,110,4 63,510,4 424,674,3 275,342,4 243,731,1 226,1027,2 304,970,4 336,519,5 465,545,4 320,152,4 386,276,4 235,224,3 9,605,5 267,225,4 415,749,5 302,27,3 428,672,3 58,209,4 185,1398,2 392,716,3 473,1008,4 415,681,3 307,1410,4 450,1392,2 469,545,4 405,631,4 144,766,2 416,292,4 485,256,3 161,143,3 449,100,5 416,678,2 178,306,3 115,303,2 341,422,4 384,174,4 42,96,5 421,681,2 313,995,4 4,182,4 486,709,4 473,116,4 101,209,3 369,606,5 345,745,3 449,394,3 144,1027,5 57,309,4 477,657,3 436,123,5 65,247,4 388,166,3 12,678,4 112,947,3 22,529,4 449,1049,4 135,136,5 449,93,4 467,257,4 391,248,1 386,160,1 458,410,2 488,456,3 360,169,5 497,473,4 478,639,4 511,264,4 381,149,2 234,82,4 404,97,4 205,747,4 487,467,5 346,239,5 405,134,5 329,450,5 387,681,4 275,61,2 208,285,2 58,674,5 209,326,4 292,24,3 19,93,2 65,507,4 268,581,4 61,511,4 400,534,2 289,495,4 415,789,4 44,819,4 89,638,5 238,511,5 346,157,3 449,271,5 295,258,1 229,950,5 355,346,4 388,89,3 278,400,5 274,61,3 473,706,5 4,439,1 81,283,4 450,269,4 514,361,4 326,327,2 0,140,3 17,518,4 499,115,4 292,249,3 406,501,2 195,339,3 68,41,5 295,662,5 248,545,3 338,409,2 297,429,5 222,929,2 294,160,4 169,322,3 397,87,4 359,482,5 512,116,5 384,424,3 290,654,4 232,13,4 83,0,2 373,251,3 496,569,3 177,23,3 197,221,3 118,55,4 108,567,5 243,520,4 434,791,4 486,11,5 474,285,2 255,78,5 202,814,4 405,147,3 199,27,5 307,18,3 412,283,4 223,312,5 390,60,5 248,422,4 159,272,5 326,474,4 415,80,5 503,369,3 455,1266,4 4,28,4 449,628,4 175,951,2 349,184,5 453,257,4 72,31,4 455,56,4 176,267,3 505,384,4 343,290,3 499,57,3 23,179,5 94,707,2 496,187,3 12,528,4 270,126,5 457,11,5 455,422,3 359,514,4 338,677,2 108,180,5 17,283,3 399,748,4 477,47,4 310,443,2 261,1094,2 312,427,3 416,69,4 93,762,3 441,349,2 345,635,3 42,230,4 44,755,2 233,590,3 42,195,4 326,189,4 77,256,4 435,167,3 104,263,2 415,240,5 462,248,2 41,42,2 522,113,5 155,650,4 377,500,4 193,355,2 156,933,2 402,273,3 266,163,3 153,287,3 428,123,4 96,131,5 499,236,3 56,10,3 307,230,3 290,32,4 416,707,2 245,566,5 446,150,3 487,291,3 294,59,5 338,126,5 414,478,4 249,987,4 416,55,5 197,0,4 429,292,3 504,747,1 511,22,4 89,135,5 143,1064,4 47,293,3 311,520,5 275,690,4 292,461,4 434,152,3 434,61,3 279,506,3 320,479,4 444,843,2 378,648,4 386,1133,1 221,520,5 94,69,4 456,228,4 446,230,2 492,651,5 150,715,2 397,93,2 6,546,3 473,171,5 113,654,3 252,201,5 503,1436,2 514,312,4 517,123,3 101,547,2 496,257,4 324,185,4 157,38,5 67,457,1 308,318,4 503,413,5 424,582,3 494,378,5 404,373,1 379,664,2 386,146,2 206,457,3 109,214,3 342,529,5 115,510,4 414,194,5 84,193,4 507,194,3 476,14,4 359,325,3 267,323,4 279,724,3 406,518,4 474,99,5 101,230,2 338,160,3 94,462,5 437,285,2 416,19,2 93,267,4 22,99,5 312,257,3 405,275,4 91,217,4 494,55,5 312,662,5 112,123,3 85,269,5 415,330,4 12,883,2 391,133,5 362,116,5 248,740,4 497,521,3 302,257,4 436,210,4 117,78,5 233,15,2 333,641,5 345,37,3 279,1465,5 193,120,2 415,844,4 494,450,4 300,80,3 504,497,5 436,517,2 185,753,2 260,686,5 307,117,3 63,226,3 454,320,2 212,503,5 498,660,3 300,446,4 25,245,4 503,193,3 386,469,3 278,931,3 102,404,3 295,78,4 6,448,3 450,331,4 166,95,5 20,412,2 392,801,3 454,203,4 30,174,5 384,1065,4 285,146,5 58,641,5 258,120,3 427,895,4 404,1220,1 467,284,4 327,61,3 353,888,5 445,302,2 447,315,1 55,398,4 370,626,4 95,194,5 392,375,4 200,771,5 373,1214,1 307,557,4 382,1062,5 115,267,5 327,225,3 177,684,4 223,332,3 269,4,5 56,63,5 373,1048,1 166,477,5 223,325,4 192,780,3 88,239,4 408,482,4 326,326,3 346,755,2 415,1073,5 508,318,2 325,656,5 91,363,3 494,390,3 5,409,4 471,738,5 460,312,4 121,179,5 453,641,2 517,1046,4 493,236,4 498,214,4 471,254,5 377,495,3 386,90,4 415,224,1 360,429,5 428,165,5 372,422,2 95,317,5 306,180,5 12,879,3 386,150,3 47,268,1 274,98,3 233,184,3 67,285,5 499,29,4 472,546,3 472,9,3 176,10,4 325,63,4 365,560,5 147,49,5 48,1017,2 89,971,4 485,817,3 478,421,3 16,136,4 63,435,5 78,905,5 515,198,3 449,224,4 406,634,3 380,846,4 478,160,3 144,686,2 320,509,5 504,186,1 200,955,4 435,124,4 255,683,5 90,134,4 357,557,4 278,93,3 311,175,4 325,510,4 300,501,4 450,1295,3 17,948,3 93,89,3 330,734,4 71,211,5 471,364,4 420,10,2 268,95,1 405,640,5 313,124,5 302,136,4 362,530,4 517,290,3 446,116,4 144,22,4 373,185,5 332,512,4 444,294,1 382,339,5 102,300,4 345,179,5 408,190,5 74,677,3 493,0,3 325,650,4 166,434,5 270,285,4 314,175,4 415,282,5 319,549,5 306,203,3 326,1006,4 166,1303,4 234,602,3 278,623,4 424,21,3 220,357,3 392,1257,3 456,281,4 144,876,2 464,178,3 471,87,2 509,288,2 324,49,5 180,1358,1 304,326,3 360,501,4 17,199,3 213,153,3 12,315,5 386,46,4 456,257,5 486,135,5 58,110,4 144,788,4 377,14,4 304,63,5 43,248,4 347,125,5 456,239,3 358,297,5 392,89,2 293,618,3 465,1312,3 320,8,4 173,1281,5 131,174,3 404,728,4 4,377,1 158,273,3 278,443,3 478,70,1 434,153,4 24,473,4 373,160,5 233,1132,3 448,545,2 429,301,4 360,59,4 327,69,4 302,238,3 478,747,3 449,401,4 433,755,2 379,462,4 475,780,4 427,878,4 473,233,5 329,567,5 268,76,1 91,590,4 350,299,5 504,467,4 378,232,3 200,1267,4 424,528,4 21,230,2 434,283,2 405,70,3 374,4,4 326,11,3 436,580,1 465,349,4 408,483,4 201,203,3 404,1252,1 362,69,2 393,83,4 493,198,4 485,5,4 338,529,5 382,530,3 359,747,2 231,245,4 276,146,4 415,346,4 469,92,4 21,366,1 391,879,4 270,504,4 245,11,5 388,996,3 513,428,4 279,238,3 305,320,3 523,150,1 471,1468,4 500,24,3 14,49,5 117,131,4 494,1443,2 124,789,4 313,814,5 12,154,2 202,992,3 404,1289,2 375,426,4 290,139,4 243,940,4 277,285,5 209,434,4 494,199,5 324,27,3 449,735,5 42,478,4 285,223,5 480,312,4 226,320,3 524,0,4 469,220,4 249,675,5 1,0,4 220,155,5 62,14,3 127,132,5 425,1203,4 63,678,3 388,473,5 442,322,2 520,80,1 177,1100,4 470,417,3 51,116,4 4,391,2 180,876,2 64,257,3 378,190,5 270,256,4 169,347,3 177,1156,3 523,204,5 55,233,4 223,735,3 192,93,3 215,57,4 270,1045,4 523,88,5 513,745,5 157,292,4 121,68,2 127,654,3 478,163,4 453,10,1 415,1425,5 6,418,3 485,122,3 228,310,5 415,267,4 291,481,5 469,292,4 487,654,3 70,174,4 108,28,3 444,479,3 159,530,5 177,94,5 523,659,5 446,641,4 382,222,3 180,335,2 194,1415,2 457,741,4 307,14,3 291,155,5 129,981,1 375,222,4 482,249,3 177,590,5 212,654,4 434,84,4 404,1587,1 39,244,3 435,199,3 392,256,4 346,225,4 177,237,4 231,301,5 523,284,3 129,531,5 183,511,4 58,712,5 12,645,4 183,526,4 478,384,2 453,256,4 229,264,5 100,279,3 427,987,1 25,627,3 453,196,4 15,239,4 404,572,3 261,180,3 12,688,2 122,97,4 61,0,2 462,283,3 150,780,3 177,234,1 404,1165,1 12,346,5 250,1097,3 333,82,4 324,767,3 270,662,4 415,328,3 502,172,5 63,430,4 192,71,2 21,448,1 103,301,5 335,387,1 45,244,3 428,484,3 338,79,3 377,692,4 30,489,4 270,613,4 264,297,5 380,282,5 336,14,5 466,239,3 347,110,5 63,747,1 302,386,5 496,384,3 162,432,1 469,245,2 91,144,2 474,346,4 404,1589,1 116,143,4 353,752,5 364,287,5 9,220,4 180,590,4 456,1011,4 372,654,5 328,168,4 275,796,3 166,239,1 101,565,2 373,619,3 416,122,2 436,656,5 194,1406,2 386,96,2 101,90,3 505,1607,2 326,210,3 397,479,5 56,762,5 456,768,2 485,2,2 503,754,4 216,32,4 58,442,5 416,272,3 58,646,5 483,777,5 353,254,2 440,8,4 454,303,3 103,312,4 292,37,1 9,431,4 384,481,3 373,973,4 449,706,5 245,207,4 90,175,5 517,24,5 261,958,2 491,482,2 513,327,3 392,875,3 124,434,4 439,970,5 431,149,5 269,334,3 398,11,3 377,247,3 302,539,1 473,284,5 404,1147,1 356,596,4 478,435,4 172,689,5 180,474,2 457,1260,4 151,503,4 373,923,5 444,116,1 297,133,5 43,80,4 345,365,2 364,293,1 261,39,4 12,553,2 342,88,3 523,614,2 393,127,3 10,68,3 115,305,3 89,1194,5 448,1005,4 435,287,4 83,224,4 499,552,2 326,1016,2 284,301,5 4,389,5 455,49,4 353,651,4 141,123,4 519,314,4 507,68,3 336,448,4 94,192,3 412,8,4 302,2,3 479,173,5 59,1049,3 66,24,4 326,463,4 121,581,5 258,287,3 518,1616,5 495,155,3 292,164,3 221,30,5 89,429,3 266,228,4 434,422,2 494,234,5 275,72,3 115,115,3 503,117,3 513,188,5 291,606,4 449,59,3 411,6,5 507,143,3 373,88,2 436,217,2 492,179,4 25,125,4 160,434,2 404,524,1 278,435,4 147,88,5 353,1196,3 9,115,4 320,174,3 436,745,4 434,750,4 359,307,4 494,215,4 66,545,3 353,514,3 278,596,5 462,1011,2 233,601,4 345,801,4 397,734,4 22,651,4 434,1400,4 455,368,3 445,327,3 42,139,4 448,116,3 298,483,4 494,67,5 266,925,2 429,263,2 93,808,2 200,10,4 452,171,5 404,775,1 392,1227,3 221,387,2 387,741,5 494,43,3 428,164,5 315,167,3 59,607,5 362,71,1 377,386,4 452,383,2 385,832,3 467,250,4 327,316,4 5,503,3 343,404,2 71,581,4 418,180,4 452,426,3 503,401,4 482,248,2 449,612,4 205,261,1 292,745,3 93,65,2 378,78,5 454,777,4 216,678,5 434,160,3 393,209,4 279,1220,5 449,464,4 285,208,4 458,273,4 437,865,5 503,214,4 487,484,3 441,81,3 522,381,5 416,205,2 256,69,4 478,229,4 347,120,5 71,14,5 507,734,4 245,94,3 114,32,4 138,459,3 455,672,3 449,746,4 296,513,3 441,942,4 471,140,4 326,320,3 524,122,3 471,500,3 378,558,3 522,152,4 275,691,4 465,1,1 173,695,4 93,585,1 467,320,3 229,27,5 22,282,4 486,142,3 302,524,5 313,1040,4 180,933,3 510,291,5 381,55,5 180,1161,1 326,217,3 254,684,3 415,366,5 247,6,2 527,484,2 377,650,4 479,482,3 503,198,4 392,479,4 37,1032,5 392,558,3 298,460,3 283,271,5 514,689,2 233,3,4 267,396,2 326,284,4 48,1071,1 449,492,4 13,95,4 492,545,5 416,264,3 491,126,5 483,925,4 180,1025,1 18,287,3 84,196,5 416,153,4 192,299,4 253,7,5 193,404,2 452,99,5 496,99,4 425,493,3 304,120,3 377,10,3 297,22,4 310,1041,3 313,366,4 204,314,4 290,416,4 428,215,4 275,20,3 232,377,4 96,525,3 405,214,3 93,55,5 55,209,5 17,473,4 307,590,3 319,420,4 31,247,4 196,677,2 244,132,2 400,1015,3 200,237,3 253,77,3 294,117,3 497,196,5 494,171,5 527,238,5 269,278,5 398,558,3 310,749,5 415,218,4 485,285,2 295,522,4 279,234,5 428,30,3 206,59,3 15,468,3 366,558,4 432,58,5 370,49,4 82,321,3 398,709,2 492,297,3 297,317,5 262,689,5 177,89,3 111,320,3 362,536,1 5,320,3 428,257,4 285,143,3 91,99,5 498,741,4 428,391,3 496,81,4 503,196,4 122,482,4 317,93,4 312,403,4 471,410,4 125,327,5 503,416,3 129,81,5 344,1007,3 93,507,5 468,529,5 503,718,3 503,280,4 392,1184,3 434,174,4 517,918,5 473,497,4 503,933,4 503,74,4 496,863,3 479,233,4 285,215,4 493,190,4 310,723,4 205,1433,1 507,171,5 313,10,5 434,199,5 421,52,4 503,142,4 346,981,1 523,649,2 473,195,5 441,684,2 196,683,4 471,1094,4 482,448,3 424,840,1 221,237,5 449,122,2 471,134,4 262,86,4 478,1243,3 240,287,5 444,247,1 338,185,4 455,1007,4 17,8,5 245,745,4 264,244,4 485,716,2 94,970,3 300,194,5 531,57,4 9,333,4 12,612,4 47,495,5 25,180,4 243,1078,2 329,553,3 457,287,3 188,131,5 159,670,5 488,270,4 179,257,5 457,955,5 353,65,2 252,299,4 428,136,5 290,545,3 256,244,4 341,7,4 435,324,3 506,337,5 471,577,5 0,210,3 529,526,4 503,1441,3 193,68,4 415,745,5 63,568,3 286,8,5 310,195,5 395,741,4 449,9,4 275,557,4 80,274,4 405,203,5 451,202,3 279,764,4 43,158,3 530,989,5 263,513,5 523,1267,3 6,573,5 462,148,2 278,70,3 62,1011,3 505,71,3 502,136,5 61,1008,4 388,601,4 85,327,2 369,134,4 231,602,4 471,216,5 477,495,5 386,767,1 6,615,4 58,415,3 398,553,3 100,256,4 320,82,4 505,1218,2 113,854,3 144,116,5 91,1010,3 84,29,3 424,16,4 38,936,5 41,719,4 348,543,4 101,500,2 275,461,4 269,726,5 206,87,2 150,781,4 115,323,2 455,272,3 487,490,4 80,1046,3 175,24,3 122,318,4 212,510,4 202,149,5 229,236,5 499,222,4 91,142,3 185,594,3 373,105,3 20,239,4 416,117,4 64,513,4 458,335,2 82,738,5 265,285,4 155,177,5 255,184,5 346,691,4 235,215,5 249,339,4 352,271,5 475,1187,2 523,604,1 495,632,3 434,1073,2 94,422,5 117,52,5 523,181,5 400,429,2 343,69,3 199,106,3 398,225,3 458,120,5 397,124,3 72,170,5 504,53,3 517,283,4 36,824,2 129,260,4 478,121,1 189,897,2 136,173,5 350,244,3 226,272,3 381,331,3 228,271,3 108,215,3 436,780,4 451,1426,5 478,654,4 513,46,4 449,513,5 209,289,4 408,477,4 436,167,3 408,1064,2 342,0,5 415,345,4 341,130,5 396,587,4 457,283,4 326,181,4 267,728,3 300,227,3 501,682,3 238,64,5 466,99,5 424,285,1 360,128,4 459,1114,3 531,602,5 362,1213,1 372,431,5 93,785,3 313,1310,5 233,68,4 449,1040,4 127,158,4 4,100,5 523,602,3 337,442,5 371,1108,4 476,87,5 455,984,3 528,324,3 463,704,5 418,88,3 250,428,4 387,297,5 197,762,3 404,643,3 353,192,3 482,312,2 455,85,2 496,764,3 159,247,5 344,1022,2 499,132,3 424,444,3 18,886,4 235,672,4 300,126,4 293,409,4 73,327,4 377,403,4 143,68,5 160,180,2 473,209,5 104,270,2 416,15,3 444,830,1 302,517,4 9,691,4 473,755,1 453,99,4 118,328,3 302,419,4 173,209,4 326,68,2 183,1013,2 446,4,3 93,931,2 454,528,3 500,306,4 505,49,5 453,142,4 391,58,4 393,155,4 144,737,3 4,207,4 523,1043,4 345,575,3 483,878,4 400,631,4 400,281,3 5,521,5 415,93,2 494,28,2 482,270,3 43,24,2 302,24,4 273,97,5 435,275,4 507,187,4 183,1166,5 14,332,1 473,495,4 170,257,4 359,49,4 156,1243,3 372,142,3 147,208,5 436,131,5 41,933,4 415,914,5 392,814,4 473,675,3 492,249,4 496,801,2 428,183,4 233,136,3 69,299,4 479,318,3 255,814,5 370,63,4 370,654,4 456,53,4 404,956,1 93,92,4 404,415,2 57,651,5 494,108,5 495,96,1 329,728,5 346,355,5 391,649,5 405,218,3 325,482,5 270,234,3 487,604,3 12,356,3 91,420,4 86,227,5 17,971,3 2,267,3 255,740,4 435,793,4 91,159,4 494,221,5 111,338,4 314,326,4 475,237,3 416,197,4 332,152,4 295,179,5 193,819,1 221,505,2 503,403,4 421,326,3 398,581,3 86,20,3 202,282,5 298,1004,5 310,647,4 85,887,4 297,0,5 274,407,3 378,446,4 100,741,4 495,379,2 296,236,4 278,290,3 516,283,2 142,322,3 76,249,3 499,738,2 285,120,3 292,938,2 345,55,5 59,172,4 124,704,5 454,46,2 510,312,5 244,1032,5 505,778,2 508,49,5 22,855,4 192,507,4 103,288,4 526,151,2 338,275,4 0,39,3 449,12,3 463,325,4 526,955,4 367,312,5 453,508,2 270,519,5 338,156,4 159,409,4 523,197,4 6,655,3 473,60,3 338,427,5 467,136,4 320,1125,3 331,263,3 386,971,2 331,455,4 436,207,5 484,288,3 36,565,4 531,94,5 6,557,4 490,18,4 12,190,3 488,271,5 499,163,4 436,793,4 435,158,4 499,1162,1 310,422,5 341,1166,1 51,203,4 428,1216,2 518,351,5 177,126,5 42,132,4 531,69,4 91,650,4 41,57,5 108,410,4 434,48,4 295,302,4 405,1201,3 84,711,3 428,163,4 424,54,4 90,602,5 12,908,5 496,151,2 176,175,4 326,297,3 275,221,4 455,68,4 506,825,5 302,543,4 199,391,5 357,317,5 127,386,2 513,401,4 488,262,2 388,779,3 494,641,4 160,185,4 344,222,5 458,0,4 416,366,2 350,1023,4 362,215,3 25,830,2 108,808,4 279,323,5 298,961,4 338,708,5 93,992,4 449,450,4 183,707,4 196,894,3 438,404,4 405,473,5 475,943,2 41,124,4 327,11,5 397,493,3 496,587,4 198,1325,3 429,220,5 104,312,5 209,68,4 392,760,4 475,733,4 204,1024,1 384,7,5 294,630,5 222,590,3 267,178,4 407,314,5 465,333,3 415,24,4 106,267,4 404,418,4 359,247,4 74,113,4 392,420,2 384,495,2 532,948,4 101,37,2 17,93,3 507,567,4 496,229,2 183,653,4 109,230,1 61,435,3 506,1033,5 520,1015,3 222,748,4 312,166,3 379,565,3 392,238,4 101,331,3 234,700,4 349,264,2 344,533,4 453,650,4 103,289,4 51,587,4 415,784,3 446,677,3 207,995,3 388,207,5 531,368,3 137,132,4 353,660,4 311,513,3 501,293,3 404,1586,1 372,188,5 501,878,3 495,153,2 472,275,4 451,683,4 397,48,3 34,880,2 452,267,4 118,275,2 508,287,5 188,646,4 380,138,3 293,412,3 428,691,3 302,53,3 471,551,5 290,230,3 311,653,5 338,639,5 245,678,2 499,385,3 434,117,2 492,174,4 297,78,5 124,779,2 449,417,4 94,204,3 471,394,3 505,1015,4 317,281,4 464,27,3 243,104,2 379,698,3 128,301,4 485,596,3 424,1463,2 25,23,3 416,1181,3 388,609,5 108,402,5 274,134,3 386,194,4 90,181,4 194,325,3 60,747,2 326,707,4 532,476,4 499,639,4 5,46,3 345,180,5 229,567,3 255,87,5 317,377,4 17,706,3 268,184,5 424,678,3 405,197,2 388,611,4 366,182,5 372,63,4 183,737,3 329,292,3 384,142,3 335,1495,1 386,649,2 197,55,5 313,283,3 425,523,4 292,659,2 267,88,4 261,734,4 523,65,3 496,100,4 232,195,5 253,1182,4 496,86,3 531,215,5 373,927,1 43,447,2 42,85,4 337,707,5 312,492,3 302,549,3 221,805,4 486,264,5 421,249,5 330,957,5 473,528,5 502,53,2 475,584,1 428,590,3 285,13,4 425,834,3 459,300,3 369,194,4 256,1159,4 495,726,5 229,94,5 302,381,3 362,321,2 278,648,3 319,65,4 455,1167,4 485,275,4 373,225,5 326,427,4 0,269,5 445,753,3 290,402,4 477,603,3 392,493,4 303,894,3 452,16,4 34,677,3 193,692,4 465,332,4 384,133,5 454,691,3 523,94,3 238,420,5 497,257,2 500,323,4 213,99,4 344,69,5 478,751,3 0,132,4 472,507,2 105,215,5 404,768,1 455,639,4 84,511,3 89,25,4 12,320,2 452,275,5 520,178,4 502,401,3 143,243,3 483,93,4 380,98,5 415,709,4 513,174,4 471,474,5 41,174,2 290,214,4 274,131,3 313,77,4 416,254,3 233,949,2 425,1063,4 266,654,4 144,760,4 326,1068,4 416,38,3 532,1176,1 531,332,4 424,326,4 503,402,3 458,221,4 415,143,5 416,561,4 143,143,4 473,180,5 158,71,3 186,650,5 63,190,4 436,57,4 327,558,3 59,432,4 333,124,3 408,614,5 449,610,5 192,236,4 404,210,1 326,685,4 404,1306,1 522,661,4 206,755,2 434,26,1 444,1276,2 504,65,4 531,398,3 310,503,4 120,1265,4 364,150,4 415,392,4 377,1047,2 428,92,4 313,1266,3 486,44,5 290,562,3 372,498,4 494,216,5 115,190,4 304,179,4 319,81,3 337,82,2 61,90,4 278,1265,1 415,157,3 453,78,4 452,287,4 189,14,4 150,548,4 483,596,3 486,86,5 436,380,5 404,787,1 180,357,2 477,16,2 96,182,5 82,248,2 182,175,3 17,479,4 386,510,3 93,740,4 176,468,4 495,1132,3 428,761,4 346,116,5 12,400,1 404,403,4 384,482,4 406,454,3 389,125,5 406,1011,3 231,513,4 86,721,4 382,88,3 523,142,3 221,270,4 193,1010,3 373,68,5 69,82,4 312,167,3 496,93,3 353,271,3 453,1268,3 304,169,4 196,55,1 503,1507,3 495,920,5 267,980,1 95,180,5 144,976,3 372,65,4 452,67,4 182,1158,3 333,196,4 520,155,4 307,67,4 504,704,3 2,287,2 500,150,4 31,312,4 307,240,4 306,659,3 302,356,5 82,410,2 115,202,5 458,7,5 72,271,4 494,391,5 449,257,4 106,312,2 509,329,2 302,454,3 188,1064,5 22,432,5 292,432,3 371,558,4 14,117,1 268,714,4 221,256,4 159,454,4 286,681,4 415,467,5 390,658,4 517,933,3 43,23,3 532,525,2 462,992,2 428,530,5 377,715,3 456,394,2 466,1141,5 209,481,5 12,552,2 285,1238,3 145,327,3 76,214,2 214,195,4 496,1554,2 472,292,4 57,740,2 524,280,3 300,195,4 416,484,3 182,209,3 235,461,4 275,330,4 59,404,4 7,228,5 119,120,4 25,301,5 427,876,5 532,49,5 463,115,4 496,548,4 262,542,5 436,96,3 486,187,4 101,770,2 462,123,5 468,193,5 292,159,4 477,80,4 275,127,4 486,548,4 406,207,4 415,782,3 434,53,4 423,126,4 502,280,3 57,170,5 275,119,2 504,421,3 311,1125,4 42,734,4 450,291,3 57,171,5 52,567,4 117,55,5 406,229,4 482,276,3 326,221,2 215,789,3 436,41,3 485,1201,4 198,92,4 93,1217,4 446,285,2 101,52,2 177,404,3 496,76,3 486,540,3 84,411,3 378,574,2 289,1284,3 372,827,3 201,480,1 23,68,5 406,0,4 17,638,4 263,216,3 59,834,4 235,684,2 256,180,5 43,677,3 503,528,4 105,279,2 6,139,5 513,301,5 150,656,5 376,97,5 333,745,3 253,27,4 63,217,1 42,216,2 478,132,2 362,57,3 286,180,3 415,930,3 435,380,4 478,404,4 505,76,3 289,925,3 350,321,5 379,669,1 23,485,3 477,368,3 447,899,3 456,113,5 398,1138,4 52,117,4 295,60,3 485,885,3 150,274,5 240,681,2 417,898,5 15,10,5 370,116,3 416,664,2 42,1047,3 266,549,4 302,615,4 7,227,5 434,353,3 347,290,4 243,745,3 496,98,3 378,167,4 520,68,3 498,10,3 150,602,5 261,54,3 343,279,3 495,37,2 187,4,4 298,507,4 486,160,5 193,1040,2 7,194,5 431,1015,3 199,981,2 395,332,4 453,685,2 494,628,3 269,256,4 314,270,3 307,468,5 453,49,4 475,1073,4 415,683,5 84,26,4 346,459,3 294,704,4 259,287,3 278,218,2 132,312,3 373,982,2 12,526,5 68,885,4 250,312,5 229,203,4 282,48,4 404,666,1 307,429,4 200,734,3 338,507,4 59,58,5 389,318,5 455,221,2 151,719,5 333,656,4 523,1559,4 396,21,4 63,181,4 122,285,5 221,472,1 182,225,3 392,629,4 52,545,4 397,27,5 255,209,4 311,213,3 408,165,4 128,310,3 214,202,3 42,346,3 101,28,1 329,134,3 473,480,4 252,7,4 531,131,5 449,904,5 108,63,2 313,626,4 286,425,3 123,27,3 473,243,4 455,110,3 492,78,5 307,714,5 345,368,3 425,491,5 486,678,2 339,49,4 245,23,4 379,80,3 465,314,5 313,1470,4 255,14,5 342,176,4 303,299,5 451,233,3 268,1396,4 416,269,2 6,676,3 486,734,4 432,332,2 523,1153,1 499,286,3 485,717,3 114,921,3 470,0,4 449,1478,3 12,842,5 494,132,3 157,193,5 384,168,5 248,466,4 381,179,5 121,723,4 323,259,5 381,196,4 496,55,4 238,49,5 416,942,3 268,211,4 158,318,1 336,635,4 499,1013,2 140,312,5 379,339,3 446,483,5 534,0,3 220,406,2 418,133,5 523,233,4 452,233,3 454,256,4 206,523,4 144,409,4 243,172,4 458,146,3 173,322,1 316,747,5 532,287,2 428,184,4 278,763,3 344,293,3 304,194,3 519,282,4 90,97,5 157,398,3 404,1107,1 535,173,5 459,321,3 266,404,3 6,99,5 250,812,3 400,761,2 22,81,3 381,13,3 96,196,3 512,471,4 486,819,3 522,185,3 233,380,3 453,55,3 218,1013,3 302,269,4 485,124,3 268,782,1 117,134,5 58,431,4 430,299,4 220,49,4 523,941,4 255,21,5 285,736,4 118,1259,5 252,99,4 177,96,5 477,738,4 150,769,4 386,112,4 214,49,5 281,339,3 388,450,2 404,713,1 433,1050,3 468,133,5 526,587,4 168,330,5 416,460,3 513,747,2 527,57,5 315,729,4 193,761,3 393,30,3 333,285,4 415,12,5 324,1231,1 404,1570,1 1,304,3 180,1276,2 425,615,4 180,919,1 212,184,5 429,285,4 458,863,4 475,747,2 24,142,3 188,500,4 404,970,1 433,1094,5 42,2,2 479,346,3 313,800,3 59,581,4 151,274,4 441,709,5 404,1199,1 21,201,5 238,960,5 292,28,1 506,116,3 491,123,4 15,946,4 267,233,4 499,826,2 464,587,4 523,478,4 445,687,2 454,1136,3 454,726,3 6,556,4 329,209,5 494,1109,4 14,14,4 93,508,5 271,95,5 86,143,4 150,237,5 129,97,5 534,285,2 345,183,1 250,150,5 108,76,4 453,735,3 58,422,5 415,66,4 436,479,4 434,293,4 475,327,4 496,70,4 307,200,5 534,518,3 129,938,4 392,409,4 275,958,4 384,486,4 483,233,4 532,102,3 285,587,5 118,21,4 457,85,5 462,1116,1 343,128,4 438,281,3 449,501,5 397,273,3 534,514,3 501,889,2 429,11,4 91,619,3 388,104,3 238,498,5 234,180,3 98,124,4 428,11,5 278,977,1 503,287,5 468,9,5 412,244,2 176,199,4 362,6,3 252,342,4 197,275,3 437,279,5 526,27,3 270,702,3 380,1116,4 15,497,5 471,719,5 454,30,4 383,312,5 200,630,2 453,271,5 388,383,2 486,68,4 455,167,4 392,464,4 122,656,4 415,878,3 338,196,5 296,257,5 456,156,5 261,1012,2 292,1228,1 129,928,4 339,519,5 450,1024,3 338,226,2 473,695,3 488,342,5 133,327,4 532,274,4 525,6,4 74,426,4 307,522,4 323,748,3 393,540,3 428,1016,3 266,809,3 339,172,5 74,303,2 115,887,2 261,581,4 88,723,4 386,175,3 40,317,4 386,454,4 526,614,4 357,528,3 144,53,5 499,0,4 157,651,4 392,355,3 478,210,4 503,275,3 484,346,2 388,233,4 312,482,5 404,1548,1 486,1216,3 9,479,5 275,16,4 58,507,5 183,410,3 312,1090,2 449,791,4 109,229,3 403,677,4 471,1247,4 55,231,4 333,370,2 140,117,5 523,422,4 15,199,5 268,658,4 268,461,3 69,160,3 380,897,5 89,426,5 212,404,3 357,267,3 99,285,3 180,1279,1 151,160,5 486,80,3 159,201,4 56,23,3 115,899,4 472,1141,5 129,158,4 518,1279,5 435,399,3 505,474,1 53,290,1 531,745,5 390,275,3 233,142,3 183,400,3 273,590,4 300,46,4 300,234,2 426,687,5 290,390,1 217,711,3 496,1041,3 455,580,3 242,422,3 455,577,2 532,327,4 248,63,5 261,784,3 523,186,5 245,1051,1 532,594,2 118,915,1 426,291,2 425,672,4 69,312,4 516,299,5 531,768,2 255,53,5 109,585,3 93,403,4 397,158,3 113,317,3 478,615,4 99,907,1 526,962,4 302,657,5 342,228,4 17,165,4 120,627,3 298,1049,4 406,28,3 63,178,5 467,1167,2 127,1047,2 278,188,5 94,505,3 456,958,4 197,120,3 410,526,4 319,287,4 477,402,2 536,27,3 311,523,5 496,227,3 404,225,2 255,1040,4 253,767,3 513,268,4 215,214,5 62,740,3 326,714,4 534,788,2 478,488,5 324,654,4 327,171,4 498,462,5 5,481,4 5,528,4 214,299,3 278,108,5 20,435,4 449,130,4 209,820,3 89,650,5 534,835,5 12,882,3 373,740,3 200,587,4 390,333,5 0,238,4 84,152,3 278,1086,2 532,244,3 503,76,4 338,422,3 256,197,3 319,78,4 511,0,4 103,747,2 465,881,5 481,297,4 89,305,4 415,451,3 346,403,4 245,215,3 304,285,4 489,122,2 144,256,5 335,55,4 489,763,1 377,1406,3 322,677,2 233,473,4 310,478,5 302,339,5 2,354,3 20,975,1 499,142,3 331,1089,5 307,521,3 91,553,2 37,187,2 341,366,5 340,258,3 26,147,3 102,249,4 415,247,5 377,1210,3 452,297,4 96,95,5 193,49,3 12,94,5 278,1401,1 369,182,4 302,951,3 64,62,2 81,98,4 81,739,2 101,193,3 532,581,3 333,904,1 194,412,3 213,179,5 359,305,4 304,791,4 310,356,5 405,771,4 249,178,4 57,1104,2 221,709,4 536,525,3 108,69,4 504,194,3 188,854,3 496,160,5 534,164,4 503,692,4 477,1520,3 345,171,5 302,507,4 496,53,3 6,26,4 419,287,3 20,300,4 434,156,4 496,500,2 455,461,3 268,517,4 499,814,3 372,384,3 388,482,5 406,290,4 532,251,4 180,981,1 89,169,5 117,512,5 532,650,4 446,110,3 526,126,5 434,337,2 405,507,4 471,287,5 535,273,4 513,608,4 488,330,5 404,626,1 122,478,4 275,157,3 177,750,4 144,565,5 453,530,2 35,357,5 14,301,4 108,1073,4 215,81,4 4,398,3 436,216,3 70,196,5 326,501,3 37,104,3 533,1046,4 294,124,5 420,913,3 507,222,4 284,197,5 426,1264,5 434,127,3 387,894,4 434,37,2 6,608,3 384,870,1 78,689,4 404,701,1 415,447,3 404,642,1 426,302,5 517,13,3 457,281,2 341,123,4 523,482,4 199,178,4 58,51,4 531,347,4 183,81,3 503,418,3 503,1134,4 536,45,3 312,429,5 143,822,3 271,41,4 238,189,1 144,449,3 277,257,3 86,501,5 312,408,2 458,251,4 341,409,3 407,346,3 421,514,4 505,87,4 453,325,4 496,216,4 397,699,2 278,1304,4 259,881,5 326,581,4 510,270,5 118,299,5 292,301,4 328,273,3 448,275,5 416,88,5 290,1302,3 298,528,4 27,322,3 483,254,3 534,1092,4 449,672,3 415,916,4 404,398,1 467,131,5 432,1597,1 304,15,3 424,97,4 398,586,3 451,516,2 82,190,4 490,656,5 485,1513,4 386,249,4 17,780,3 341,173,2 405,205,1 82,120,4 143,86,5 91,63,4 407,538,1 465,175,4 378,526,3 453,630,2 506,322,5 200,422,4 441,1066,3 183,19,4 326,1072,2 307,1125,3 187,763,4 232,526,5 140,126,2 468,1557,5 273,281,5 253,1468,3 497,1425,3 438,92,4 290,762,4 17,424,3 50,602,3 6,76,5 129,30,4 292,823,3 261,472,2 345,287,2 222,923,1 489,125,2 193,8,4 391,324,4 453,426,4 453,734,2 25,1011,4 274,0,4 33,293,1 406,1043,3 468,172,4 221,722,3 449,55,4 492,527,5 199,88,5 502,181,3 331,596,5 384,1427,4 212,23,5 213,171,3 49,323,5 270,601,3 369,424,3 449,939,2 505,229,4 532,449,5 216,545,2 444,146,2 333,509,4 377,181,4 275,762,3 91,799,3 87,299,3 372,3,4 341,195,3 150,377,4 91,448,3 17,131,5 4,455,1 473,71,3 213,521,4 179,152,1 454,125,5 200,581,5 436,420,4 152,264,4 488,680,3 427,299,5 312,417,3 312,839,2 487,227,4 317,868,3 503,1414,3 483,121,2 346,251,2 182,180,2 372,207,4 519,1050,3 58,1049,2 223,743,1 442,257,5 48,79,1 13,749,3 251,0,5 219,304,4 41,160,4 102,254,5 449,1191,5 255,232,4 356,819,4 428,46,4 405,178,5 167,870,3 465,747,2 71,117,3 456,379,4 526,203,5 452,155,5 24,194,4 392,88,3 384,191,5 434,1132,2 253,264,3 434,671,1 362,858,4 57,462,3 536,601,3 206,209,3 179,732,5 21,215,4 372,109,3 255,173,4 503,27,4 458,21,5 494,52,1 485,145,2 436,747,4 449,157,3 484,306,3 200,788,3 15,509,4 454,583,4 42,316,2 453,567,4 43,428,4 176,627,2 456,427,5 502,312,5 386,768,1 109,324,3 449,202,4 22,779,1 126,61,5 127,217,3 496,561,2 132,748,4 12,91,3 342,507,5 499,171,2 6,404,3 463,257,5 347,322,5 267,327,1 390,461,4 58,193,3 310,650,4 93,96,4 229,426,5 496,167,5 296,116,4 344,587,3 331,321,4 449,192,5 533,454,5 497,488,3 207,738,4 373,142,2 274,415,3 404,568,1 109,801,3 10,23,3 388,130,3 398,632,3 103,507,2 503,971,3 408,513,5 415,971,4 473,10,5 313,992,5 327,588,4 268,1005,3 245,76,2 496,654,4 233,130,3 327,648,3 108,451,2 424,473,4 404,40,1 404,69,3 429,97,5 532,723,4 455,99,3 94,93,5 72,288,2 404,1181,1 496,104,2 233,841,4 48,691,1 325,229,3 343,458,4 486,214,4 488,301,5 446,280,3 55,1034,4 63,182,5 499,528,4 84,1169,3 10,579,5 400,327,4 1,13,4 304,222,4 353,422,4 345,958,2 98,844,3 536,681,1 180,759,1 513,178,4 449,611,4 444,63,2 12,291,5 151,14,5 347,545,3 504,622,3 373,871,5 507,149,5 245,412,4 505,509,5 320,529,4 233,162,3 362,894,3 167,293,4 4,167,3 346,467,2 61,714,2 393,55,5 487,661,4 488,907,5 405,210,5 19,762,1 475,32,4 215,401,2 441,481,3 526,645,5 229,0,5 522,153,4 95,215,4 221,226,3 485,269,2 499,521,4 536,556,3 120,594,2 251,409,5 200,123,3 270,863,3 405,528,2 392,869,3 63,202,4 268,52,1 136,242,4 526,63,3 450,936,4 290,231,4 290,127,4 232,222,4 368,171,5 513,82,5 405,651,2 171,771,1 11,194,4 450,298,1 416,71,4 253,422,5 270,88,3 486,72,3 297,256,4 81,210,4 505,1013,3 362,447,5 101,234,3 86,1040,4 362,28,1 487,332,4 439,270,5 540,755,4 404,84,4 536,503,3 465,681,1 527,357,2 523,409,2 496,78,4 99,689,4 248,0,4 503,1003,4 485,330,2 285,706,5 441,1182,3 292,481,4 81,461,4 0,193,4 6,390,3 108,238,4 406,344,4 458,992,3 392,403,3 221,213,4 162,271,4 120,426,4 253,677,3 480,41,3 290,235,4 453,489,2 362,602,4 342,942,4 159,473,4 532,672,3 381,6,2 537,163,3 373,549,5 89,18,3 380,681,2 510,298,2 449,1053,2 292,731,3 485,335,2 404,547,1 300,500,3 536,1064,1 58,175,5 326,650,4 342,179,5 452,58,2 428,263,3 446,482,5 59,94,4 421,1198,3 511,197,5 278,394,4 532,321,4 420,173,5 220,239,4 386,1090,1 487,199,2 514,287,4 359,169,5 371,446,5 199,194,5 14,279,3 503,442,3 279,1040,5 393,221,4 324,190,3 261,68,4 416,285,5 491,96,3 449,133,3 108,738,4 63,268,5 267,1090,2 496,256,4 304,214,2 206,565,4 531,654,5 422,327,1 302,150,5 298,196,3 372,162,4 508,679,1 235,150,2 81,80,3 408,495,5 325,43,1 391,343,4 485,120,3 507,12,4 393,384,5 523,701,4 534,170,3 9,526,4 454,221,3 360,52,2 333,1040,3 420,301,4 93,182,5 92,0,5 505,519,5 393,143,5 266,407,5 480,595,4 180,359,1 129,664,3 204,874,2 338,133,5 473,198,5 434,1043,4 356,470,5 93,40,3 523,192,4 534,505,5 388,410,4 245,571,3 464,189,4 239,244,4 456,19,5 372,143,3 144,824,4 63,194,5 457,233,4 392,1013,3 326,171,4 400,706,2 83,865,4 472,267,5 434,709,4 503,95,4 199,53,4 199,97,5 255,116,5 292,181,5 276,741,4 476,254,5 275,1406,1 27,199,2 477,287,5 536,149,3 179,789,1 536,202,4 536,176,3 94,178,3 454,276,4 58,488,4 513,343,3 11,201,4 362,706,3 218,854,5 522,515,5 428,178,3 536,506,4 125,310,4 292,14,3 498,212,3 523,178,5 457,951,2 456,528,4 44,49,5 344,364,2 369,113,3 390,70,3 235,274,3 494,161,3 144,378,3 465,26,3 457,181,4 52,95,4 350,879,2 405,234,4 41,218,1 512,545,4 297,132,3 88,136,1 454,1264,3 124,40,2 97,321,3 89,17,3 425,656,5 534,661,3 233,545,1 245,1410,2 456,63,5 520,152,4 502,152,2 219,293,4 441,402,4 444,1527,2 424,561,1 434,254,3 55,375,3 424,596,1 498,426,5 392,55,2 188,23,4 531,411,2 268,273,1 353,603,4 74,407,4 193,413,3 116,545,3 405,410,4 444,880,1 153,481,4 141,462,3 449,98,4 466,247,3 184,275,4 7,683,4 379,511,3 468,922,5 298,274,4 300,27,4 541,205,2 357,212,5 499,318,4 93,391,3 290,78,5 529,534,4 184,110,4 100,254,4 98,357,2 375,180,4 254,280,1 295,198,5 319,567,4 497,10,3 449,606,5 415,472,2 520,99,3 531,691,5 20,260,1 129,78,5 216,78,5 495,1400,3 534,284,4 66,23,4 140,1282,3 124,108,3 58,148,4 354,285,5 453,214,4 536,311,3 500,126,5 307,511,5 8,689,1 193,229,1 86,780,5 495,1443,1 248,227,4 143,475,2 523,927,4 465,549,3 249,474,4 435,1177,3 163,369,5 406,228,3 177,297,2 143,257,4 415,11,5 267,248,4 398,746,5 202,275,4 486,193,5 513,149,3 338,202,4 268,80,3 278,138,3 338,207,4 194,151,3 388,434,4 362,848,2 267,1036,2 93,124,1 359,204,5 12,418,3 94,648,4 6,55,5 57,731,3 397,714,2 471,373,2 404,1557,1 400,723,4 311,670,5 177,91,3 233,609,4 342,301,4 307,70,4 432,293,3 338,734,4 404,72,5 494,162,5 345,292,3 392,273,4 64,777,4 536,205,1 59,735,5 391,487,4 389,741,4 496,173,4 405,3,2 331,369,2 20,747,1 82,1046,2 497,301,3 379,60,4 129,406,2 253,209,5 428,1032,1 254,446,3 525,268,5 61,728,3 157,128,5 292,251,2 524,828,2 56,248,5 404,38,1 233,139,2 513,557,4 511,317,5 94,281,4 457,292,5 454,24,3 57,511,3 540,653,3 193,181,3 457,316,5 444,958,5 275,98,4 214,287,2 440,14,3 206,236,4 494,615,4 249,269,4 511,10,5 4,49,4 404,516,3 114,95,3 278,454,5 235,55,5 251,275,5 321,22,5 477,349,1 23,175,5 35,288,2 241,330,5 343,486,5 522,241,5 94,355,4 488,681,4 310,11,4 134,233,4 372,185,5 317,375,3 81,27,3 338,28,3 441,155,4 416,1209,2 294,737,4 451,203,3 523,126,5 458,596,3 536,569,2 520,229,3 353,13,4 194,499,4 48,12,3 221,844,3 254,981,2 398,213,4 324,960,4 385,116,5 473,1122,4 177,248,3 456,95,5 94,62,3 377,276,4 411,55,5 435,171,3 221,131,2 504,270,4 350,342,3 471,96,3 317,627,4 541,395,4 59,152,3 386,155,5 404,1574,1 398,974,2 517,762,1 392,422,3 91,503,3 486,293,4 296,297,5 163,865,5 362,297,5 183,24,4 29,303,4 111,322,3 536,474,4 186,213,4 446,84,4 310,7,4 377,630,4 536,133,5 35,332,4 526,1108,3 140,278,1 429,116,3 196,38,2 441,627,4 531,1118,5 493,106,4 497,227,2 153,190,4 268,1010,4 324,22,5 531,344,4 275,602,5 270,569,3 294,526,4 544,553,3 307,356,4 398,431,3 180,1131,1 362,570,1 308,333,4 118,10,5 505,462,3 127,244,2 462,220,5 536,1084,4 381,503,3 496,1015,4 252,684,2 232,196,5 401,528,4 478,143,4 458,1039,2 531,95,5 360,149,2 404,1158,1 43,70,3 454,244,3 390,293,2 534,7,4 428,601,5 449,126,5 415,587,5 311,683,5 222,681,4 462,13,1 360,503,4 108,70,4 173,110,5 298,431,3 446,446,3 292,1247,2 40,174,5 61,237,5 505,175,5 51,236,4 58,1118,4 206,4,3 520,287,3 71,663,3 236,8,4 291,297,4 378,397,1 150,30,3 275,99,5 392,472,3 513,3,4 292,545,1 93,82,4 253,448,5 57,174,5 435,820,4 485,219,3 61,173,4 205,690,1 300,372,4 493,193,4 307,197,3 192,540,1 544,131,4 43,230,2 473,617,4 355,315,4 456,231,4 143,332,3 428,222,4 505,96,4 398,1089,2 536,142,1 232,7,3 388,172,3 505,401,4 515,190,4 503,233,3 302,1509,3 288,221,2 449,427,4 499,215,4 404,1273,1 343,201,4 151,784,5 478,53,3 325,71,2 352,325,2 358,471,4 150,560,3 233,1043,2 307,447,3 531,337,3 503,132,5 343,646,4 258,14,3 481,256,4 404,719,1 452,415,2 494,565,4 200,437,1 526,206,4 453,836,2 59,228,4 94,778,3 367,216,5 14,929,2 392,137,3 388,68,5 243,457,3 278,189,3 273,99,5 504,650,3 338,150,4 253,226,4 278,149,3 473,41,4 344,381,4 538,381,5 530,904,4 379,434,3 377,468,5 150,516,2 434,20,4 289,483,3 533,925,4 127,81,5 346,469,5 535,1,4 185,120,2 42,777,5 492,526,5 409,885,2 398,383,2 398,233,3 310,190,4 544,221,4 206,695,3 17,587,4 398,194,2 292,459,3 480,523,5 400,1010,3 449,377,5 473,1171,4 532,273,4 44,595,3 453,95,4 299,875,5 196,434,5 455,522,4 520,71,3 248,788,5 129,236,5 531,300,4 492,1277,5 464,63,5 83,542,5 415,353,4 286,3,4 89,284,5 294,420,4 275,915,4 150,1069,4 245,992,3 5,509,4 465,268,2 346,16,4 58,672,5 398,411,2 186,64,5 335,89,5 457,306,4 283,538,2 457,1047,4 131,522,4 300,132,4 197,207,3 313,160,5 232,90,3 434,350,2 189,822,2 150,565,3 151,353,3 17,434,4 270,134,4 406,289,3 290,1076,4 362,270,4 478,254,2 239,872,2 4,407,5 513,0,5 531,234,3 497,1102,4 12,78,3 436,1074,4 37,1034,5 68,244,1 441,11,4 370,196,4 210,456,4 12,673,3 103,315,4 289,166,2 269,6,4 533,116,5 220,401,2 456,133,5 403,299,4 473,590,3 43,68,4 200,1223,2 513,10,4 541,647,4 485,1588,3 404,587,2 292,132,3 80,149,3 192,0,4 243,1131,4 186,196,4 93,749,4 311,485,5 205,1021,1 532,474,1 298,513,5 307,1455,4 546,327,4 200,128,4 391,269,4 218,70,1 495,745,3 320,498,3 336,124,4 542,209,3 302,327,3 415,325,5 366,144,3 238,426,5 5,492,5 177,728,4 200,1069,5 268,1187,1 81,282,2 312,654,4 532,121,1 499,420,4 496,267,4 177,875,2 405,167,3 373,962,5 107,126,4 292,650,3 9,273,4 227,937,1 392,94,4 31,1022,3 197,297,1 405,467,1 71,8,5 14,865,4 428,427,4 436,673,3 245,253,1 397,84,4 6,61,3 255,793,4 457,236,4 289,628,3 377,216,3 11,241,5 278,528,3 338,194,3 333,424,4 58,134,5 486,226,3 526,98,3 429,163,3 253,70,3 54,117,5 408,854,4 475,62,3 270,440,3 36,146,3 206,318,3 398,1392,3 455,448,3 57,188,3 216,55,5 424,324,3 415,155,5 422,695,3 270,650,4 454,96,5 72,63,5 265,274,5 420,175,5 434,110,3 370,21,5 242,1147,3 20,405,1 519,268,5 212,941,4 345,155,4 415,915,3 401,695,4 245,80,5 183,482,5 536,843,4 243,161,4 415,626,5 404,50,1 532,302,4 404,1424,1 61,471,2 473,182,5 453,132,4 507,238,2 459,145,4 532,846,3 23,728,5 80,120,4 262,57,4 540,27,4 97,87,3 532,217,2 346,30,5 415,86,5 323,149,4 232,203,5 541,317,4 226,1006,4 398,142,5 129,312,5 233,194,2 279,770,3 252,14,4 456,716,3 400,315,5 392,621,4 478,522,4 183,284,5 310,30,4 135,317,5 487,242,3 58,740,4 333,422,5 425,434,3 478,49,4 177,653,3 17,212,5 494,580,5 221,395,1 415,719,4 427,322,3 233,132,3 220,731,4 526,209,4 400,196,4 441,1097,4 329,659,5 503,24,4 56,317,5 150,153,4 408,317,4 175,324,3 448,126,5 454,427,4 263,122,4 455,285,3 22,417,4 415,984,3 415,811,4 547,272,5 534,181,3 306,418,4 434,469,2 267,1053,1 353,31,3 244,755,3 12,848,1 458,277,4 404,1073,3 216,372,2 479,509,4 473,236,4 547,689,3 177,279,4 536,317,4 415,97,5 346,762,5 531,937,3 485,1092,4 373,464,5 456,14,4 325,432,2 103,180,5 536,873,3 485,1078,2 200,545,2 304,316,4 267,71,3 453,684,3 319,1214,1 324,653,4 278,123,3 317,762,3 229,81,5 144,355,4 296,159,1 267,1230,2 120,312,5 341,285,4 506,681,5 116,172,5 449,293,4 232,128,3 262,221,4 531,689,4 434,1203,3 291,208,5 177,264,5 306,134,4 68,272,3 333,487,5 545,412,4 268,930,1 397,490,5 507,501,4 467,181,5 262,497,5 471,742,4 12,338,3 380,101,2 360,672,4 357,126,1 374,184,5 464,394,1 6,28,3 49,275,2 129,973,4 61,1133,2 213,474,5 31,270,3 69,392,4 532,160,4 143,287,2 35,874,3 150,130,5 400,271,3 237,844,3 428,447,3 136,475,1 179,683,5 486,94,4 465,356,4 7,434,5 485,284,5 523,526,5 200,504,3 424,1596,3 372,141,3 547,635,4 464,583,3 337,174,4 44,410,3 25,13,3 41,201,5 157,984,4 327,539,3 451,160,5 261,130,5 273,68,5 522,392,5 408,88,5 415,69,5 58,461,5 444,747,1 351,384,4 503,419,3 58,402,5 434,401,3 491,510,5 392,442,3 536,686,1 398,94,3 449,468,4 462,149,2 326,918,5 263,872,3 494,500,3 373,1133,4 156,288,4 102,256,3 222,925,4 523,466,4 405,173,4 467,97,5 477,22,2 187,156,3 392,363,2 398,37,2 108,372,5 379,305,4 44,12,5 453,227,3 416,578,2 338,179,5 458,122,3 221,940,3 306,528,4 291,179,5 431,843,4 473,170,4 531,762,5 263,184,5 140,297,5 434,79,2 403,331,4 532,81,4 404,795,3 531,531,3 407,357,4 384,143,3 55,116,5 298,473,5 475,41,4 300,745,3 466,123,5 536,662,3 180,1096,1 436,653,5 326,864,5 397,496,3 109,40,4 209,180,5 248,236,5 511,1458,4 398,759,1 534,180,4 547,275,3 146,936,3 388,22,4 544,398,4 221,548,4 502,87,4 521,99,5 329,196,5 457,693,4 200,94,3 408,608,3 377,402,4 326,675,3 175,740,3 526,284,5 390,543,4 311,509,5 253,587,3 24,264,4 6,551,4 295,527,5 392,116,4 409,327,3 432,11,5 338,1109,4 404,1578,1 286,120,4 495,41,5 221,654,4 392,478,4 311,413,3 183,96,2 134,229,3 541,213,3 531,491,4 455,132,3 12,584,4 25,545,2 469,951,3 507,72,3 398,565,4 520,240,4 405,518,4 128,677,1 267,380,3 416,684,1 521,95,3 6,599,4 410,194,3 212,256,4 275,799,3 173,738,5 499,174,5 496,1406,3 455,60,4 534,38,4 486,195,5 242,214,3 483,421,3 386,805,1 449,207,5 99,873,1 393,432,4 180,290,3 310,731,4 384,657,2 505,602,5 226,933,2 287,233,4 497,31,4 183,214,4 504,565,3 216,540,3 193,528,4 442,326,4 344,277,3 498,749,5 12,711,4 220,78,4 478,117,3 449,289,4 159,769,4 532,404,3 517,546,3 9,332,4 453,249,4 453,565,4 96,204,2 489,14,1 434,195,4 84,133,5 278,1498,4 457,8,5 428,76,3 64,27,4 503,294,4 342,1072,4 69,167,4 520,201,3 404,1111,2 497,537,1 486,259,2 509,324,1 412,274,5 328,136,5 302,49,5 416,324,2 326,771,3 199,324,5 516,1015,1 534,211,4 338,4,3 388,953,4 398,419,3 136,259,3 497,11,4 377,631,5 383,288,5 524,410,3 131,11,4 533,236,4 550,683,5 537,962,4 520,160,2 180,412,2 173,99,5 337,215,4 48,419,4 497,514,4 372,408,2 94,274,3 523,167,3 292,809,1 274,161,3 294,416,5 493,125,4 416,385,3 310,217,4 276,924,4 115,309,4 327,231,3 58,10,5 436,90,3 550,577,5 377,719,2 416,67,3 451,457,1 310,227,5 526,633,5 320,462,3 12,109,3 471,2,5 434,636,4 302,249,4 428,365,3 83,24,3 23,420,5 188,135,4 193,836,4 465,650,3 487,519,4 493,49,5 338,648,5 467,434,4 359,164,4 129,476,4 491,184,3 17,503,5 437,844,4 496,88,4 486,585,2 483,257,5 520,231,3 458,219,3 296,282,4 111,1105,4 494,403,4 8,293,4 496,196,3 6,445,2 425,173,3 68,245,5 69,182,4 20,280,2 520,247,3 514,291,3 252,590,3 471,367,3 331,332,5 394,377,5 307,1153,2 118,24,5 83,316,3 6,379,4 339,416,5 55,190,4 290,945,4 289,464,3 550,755,1 542,134,5 386,592,3 505,366,3 478,1015,3 2,319,5 517,1016,3 19,933,4 543,325,3 343,925,2 487,185,4 266,143,5 275,270,4 360,164,5 486,201,5 302,649,5 536,528,3 396,176,5 449,1029,1 472,6,2 541,745,4 434,473,3 524,299,4 302,1072,4 89,173,5 307,841,3 59,417,3 312,484,3 457,520,4 344,240,4 466,339,3 392,1039,3 150,637,5 470,968,2 425,609,4 346,275,3 42,171,4 505,677,3 531,311,2 159,200,5 306,199,3 449,631,5 294,512,4 534,488,4 494,61,3 21,987,1 140,260,1 523,741,3 518,334,5 290,11,5 32,293,3 89,316,4 503,70,5 362,120,2 89,895,3 345,1,5 496,385,2 233,1204,1 338,473,4 545,927,4 444,312,2 300,110,1 331,1046,3 503,715,4 542,981,3 495,1472,3 415,263,3 398,392,4 118,225,3 480,506,4 542,82,4 523,505,4 298,810,4 193,117,3 292,941,4 282,624,3 449,662,4 386,101,3 497,217,3 312,143,4 473,345,5 314,1064,4 392,1223,3 499,82,4 451,258,2 322,762,4 205,358,1 457,752,4 100,840,2 290,474,5 428,804,3 330,197,4 221,390,3 261,484,4 404,518,2 425,97,4 362,87,2 42,558,1 392,842,3 95,237,4 547,321,4 542,94,3 335,1010,2 262,203,4 404,709,4 63,236,4 529,155,4 449,877,2 505,294,4 453,209,4 451,96,4 108,121,2 488,354,5 214,635,2 458,254,4 225,506,2 311,854,5 415,315,3 194,507,3 522,82,5 446,844,3 550,404,3 494,830,1 449,131,5 353,8,3 314,275,4 483,0,5 534,922,4 326,643,3 335,124,3 302,1141,4 362,199,3 392,1238,3 285,762,2 289,68,4 398,47,3 541,507,3 479,297,2 438,287,3 434,833,5 188,96,4 502,384,1 210,1126,1 406,171,4 338,736,3 1,286,3 534,453,3 91,1156,2 248,308,3 520,237,3 415,248,3 338,21,5 331,823,3 43,312,4 47,527,5 200,590,3 177,577,4 300,558,4 406,27,4 326,208,4 91,6,4 95,189,4 300,1134,3 404,706,1 269,446,4 13,237,5 313,805,4 307,194,5 37,355,2 420,116,5 223,688,3 89,761,3 9,32,4 183,746,3 434,1230,2 262,645,5 160,21,2 270,233,5 9,244,4 503,203,3 269,793,4 278,1360,3 452,238,3 15,508,2 362,76,2 275,297,5 458,322,3 363,261,3 459,301,4 47,182,5 140,332,5 185,567,4 209,482,5 450,994,1 452,650,4 378,152,4 238,1203,4 333,1197,3 344,708,4 534,952,5 379,167,4 473,516,4 67,474,5 216,719,3 526,189,4 436,178,4 292,290,2 275,973,2 547,263,4 156,268,4 457,7,4 552,504,5 180,289,2 525,314,5 105,1241,4 101,203,4 398,152,2 498,413,3 294,104,4 544,209,5 432,321,2 405,1219,3 285,211,1 377,199,3 515,522,3 542,78,4 510,293,4 215,942,5 177,468,3 507,217,2 93,48,4 137,486,3 87,1190,5 219,263,3 369,175,4 86,110,4 312,448,3 369,63,4 373,175,4 493,747,1 429,99,5 302,927,3 193,27,5 11,167,4 15,108,4 543,311,2 346,332,5 278,171,2 310,208,2 238,267,2 98,344,3 233,159,2 505,221,4 329,150,4 285,216,3 531,681,4 428,456,1 292,13,3 388,80,3 551,12,3 536,1110,3 386,736,3 313,941,3 507,435,4 384,810,4 328,184,3 108,1034,2 540,209,5 503,49,3 63,587,4 236,484,4 404,1183,1 19,209,4 486,3,4 455,53,3 84,192,3 505,69,4 507,208,5 101,210,3 267,10,4 483,201,5 150,844,4 421,557,4 313,684,4 497,651,5 416,221,3 388,692,4 268,631,4 550,551,3 342,230,5 457,662,4 550,12,1 462,688,2 108,448,5 452,173,4 279,199,5 9,403,4 505,1278,4 544,100,4 112,115,3 14,242,1 388,1443,3 453,202,2 177,1047,2 438,474,3 446,962,5 196,21,5 200,380,3 379,422,3 406,731,4 428,754,3 362,192,3 497,170,3 202,221,4 464,1077,2 6,257,4 74,293,3 266,181,5 449,750,5 536,1067,3 453,653,2 181,125,5 531,372,3 319,1521,3 415,864,3 471,558,5 489,1127,4 342,62,4 449,1038,5 473,492,4 449,966,5 98,200,3 355,309,3 398,1191,3 477,218,2 539,6,4 313,698,5 406,90,4 71,627,4 127,814,3 441,799,3 209,207,5 84,98,5 452,720,4 454,317,3 302,1012,1 453,1106,4 534,606,5 233,1444,4 424,229,4 434,472,3 174,418,5 307,654,4 324,501,4 532,524,3 537,201,4 449,1602,3 436,511,4 279,1206,4 297,117,4 446,434,4 292,830,3 59,505,5 540,398,3 406,117,3 481,300,4 505,654,4 84,13,4 453,247,3 268,65,1 386,116,3 506,891,5 4,415,1 341,67,3 55,87,1 221,467,2 455,215,4 206,247,3 200,190,4 21,183,5 496,297,3 327,185,4 327,343,4 342,930,3 488,333,4 485,298,1 372,89,4 137,193,5 145,300,2 406,399,1 496,721,3 212,477,5 362,178,4 404,1264,2 346,49,5 91,203,4 6,592,5 533,1027,5 127,1034,3 420,55,5 325,95,3 471,632,4 502,513,3 298,46,4 448,169,4 458,831,3 544,72,4 455,91,4 326,549,2 531,514,5 496,154,3 503,1135,5 505,607,4 380,258,2 58,180,5 371,451,4 320,185,4 391,113,4 7,173,5 243,654,5 534,173,4 128,881,2 337,513,5 388,392,2 553,27,4 408,1536,4 143,116,4 527,750,4 249,236,2 439,318,2 456,167,5 216,549,1 226,459,2 491,85,3 124,398,3 371,99,3 229,649,4 436,683,3 243,245,5 398,381,3 404,1238,1 520,290,1 477,78,4 23,7,5 304,1072,1 496,691,3 381,257,2 58,173,5 449,471,4 183,955,3 345,357,4 312,478,5 532,356,3 89,479,5 356,116,5 520,426,3 552,510,5 297,96,4 127,496,3 293,239,3 127,25,4 279,218,2 69,94,4 124,913,1 131,126,4 347,6,4 411,113,4 505,645,4 329,469,5 31,404,4 124,584,4 415,387,2 494,1418,1 463,180,3 535,142,5 268,643,5 441,209,3 364,1136,5 546,315,5 379,285,5 436,671,1 317,565,4 89,1198,5 9,173,4 84,658,4 384,272,2 496,747,4 346,1015,3 424,852,4 455,1219,3 549,258,2 420,212,3 82,870,2 452,470,4 397,81,5 379,216,2 454,693,4 555,326,5 100,596,3 248,124,3 386,245,3 56,125,3 103,332,2 425,606,4 180,1113,1 342,276,4 424,942,4 498,206,5 292,176,4 275,150,5 373,742,1 199,410,3 233,462,4 531,330,4 119,116,3 68,180,5 12,67,3 532,930,2 499,1110,4 84,181,4 517,716,5 420,174,2 160,49,2 441,280,3 494,68,3 499,1615,4 345,76,4 202,256,3 298,537,3 397,384,3 47,522,5 310,519,5 94,209,5 425,600,3 346,27,4 300,425,4 183,65,4 536,1072,3 404,1521,1 393,6,5 9,128,4 335,236,5 123,153,5 491,63,4 188,237,5 196,229,4 99,879,1 86,171,5 504,78,3 6,177,4 428,580,2 534,184,4 268,181,4 388,530,4 95,97,5 110,343,2 390,131,4 310,193,4 275,801,3 514,328,2 483,317,5 428,54,4 304,734,4 531,209,5 444,870,2 350,288,5 367,568,3 89,567,5 263,285,2 505,208,4 359,935,4 371,331,4 268,68,1 464,854,4 475,434,3 415,575,5 502,606,5 386,264,4 466,107,4 248,482,5 233,210,3 445,269,4 408,174,4 523,283,3 424,228,3 434,356,4 510,332,4 245,117,1 518,348,5 452,474,5 459,310,5 27,448,2 425,835,3 558,523,3 293,330,4 12,708,4 453,130,3 393,1370,2 384,126,4 458,110,3 536,1138,2 444,404,4 377,110,3 416,180,3 353,708,5 185,302,3 434,960,1 455,918,4 549,404,4 53,0,4 500,1277,3 250,116,4 233,392,2 374,182,5 369,322,2 78,267,5 330,413,4 150,581,5 333,311,2 494,678,3 534,202,3 456,217,4 86,1176,1 185,331,4 84,190,4 550,689,5 471,754,4 444,543,2 236,177,4 522,548,4 473,194,5 129,180,5 404,955,2 404,554,1 453,433,3 94,1132,3 372,190,4 15,236,5 477,391,2 312,49,5 200,1072,2 514,892,1 540,419,4 398,825,2 388,604,5 503,566,2 532,545,3 183,398,3 327,560,3 249,99,5 200,728,2 404,1103,1 481,294,3 415,356,5 428,80,3 268,240,1 380,1400,4 74,0,4 261,99,3 451,613,3 451,493,5 444,324,1 531,204,5 356,1033,2 550,364,5 335,382,1 552,181,3 292,635,4 5,494,4 485,747,2 269,249,2 432,753,3 61,1090,3 447,749,5 325,789,1 62,404,4 495,419,3 7,81,5 392,414,4 312,628,3 233,101,2 404,182,1 270,698,4 446,404,2 552,522,4 434,785,4 221,337,1 115,257,4 486,539,2 127,731,4 523,489,3 185,539,4 124,512,4 459,148,4 449,473,5 275,87,3 159,507,5 248,567,4 503,185,3 372,381,4 93,807,2 337,0,3 550,863,5 436,1089,1 517,457,3 112,49,5 338,662,5 493,506,4 526,512,4 263,210,5 37,394,3 369,483,4 261,558,3 129,893,4 89,527,5 187,461,4 494,1182,4 143,728,4 93,203,4 233,515,3 298,250,5 78,318,4 534,69,4 552,21,5 93,715,3 14,309,4 505,731,4 20,988,3 333,404,3 473,197,3 270,268,4 488,877,2 331,327,5 187,552,4 449,764,3 114,55,5 292,496,4 242,1464,3 69,1145,3 384,505,2 416,323,1 262,249,2 250,117,3 454,897,3 454,173,4 298,745,4 456,844,4 220,58,2 12,169,5 74,195,4 471,657,5 91,424,4 81,423,1 338,613,3 503,615,4 415,57,5 434,71,4 289,1046,4 471,1118,5 523,198,4 528,330,4 312,392,4 415,794,2 486,731,5 436,143,2 231,131,5 452,49,5 57,1088,1 23,581,4 93,142,4 233,514,5 268,427,5 294,133,5 292,203,3 499,274,1 559,6,3 180,117,2 455,100,3 201,241,3 180,748,1 505,147,3 457,356,3 333,1311,4 502,240,5 56,747,4 494,469,5 464,282,3 539,99,5 443,911,4 298,497,4 451,161,3 69,131,4 121,268,5 22,6,4 525,282,3 380,227,4 319,1090,4 275,143,5 20,759,1 342,929,1 423,322,5 416,64,4 41,470,4 86,448,3 57,69,4 117,550,5 415,461,5 91,77,3 384,960,4 451,59,1 37,246,5 127,691,4 463,1597,3 177,10,5 485,871,5 453,484,4 98,120,3 448,104,1 370,179,4 124,1184,3 215,317,5 115,296,3 338,510,5 78,115,5 203,261,4 25,404,2 268,41,5 547,247,4 59,159,4 560,50,3 214,201,4 536,69,4 233,527,4 210,519,4 496,719,2 4,161,1 331,745,5 499,119,3 550,285,4 32,681,4 61,282,4 23,10,5 428,577,3 553,7,4 353,1062,3 392,551,2 422,8,5 405,201,3 268,314,4 81,408,1 101,449,1 496,12,2 258,780,3 278,842,4 243,63,5 326,163,3 465,901,5 550,457,2 177,762,4 278,1120,4 37,312,5 393,745,2 377,284,4 449,481,5 502,746,3 550,30,4 559,122,2 302,618,3 510,354,2 424,243,1 42,279,3 535,229,5 302,167,5 446,1131,3 518,326,4 265,13,4 55,861,3 499,275,5 529,475,4 559,107,1 268,217,2 266,379,2 430,244,4 321,655,5 177,353,4 504,1284,3 507,87,3 325,32,2 507,108,3 59,211,5 10,237,3 457,56,1 14,256,4 532,344,3 360,189,5 536,587,1 540,780,5 525,300,2 510,357,1 377,152,4 17,581,5 0,255,4 267,1045,3 6,415,5 404,140,2 434,150,3 326,71,2 113,175,5 536,696,2 551,150,3 396,126,5 488,990,3 266,123,5 360,273,3 200,30,1 373,96,5 4,94,4 67,408,3 333,230,2 151,273,5 415,142,5 156,116,5 449,1260,4 372,164,5 326,339,4 449,613,4 531,467,5 372,496,3 455,201,3 108,412,3 57,208,5 428,167,5 359,283,3 535,203,4 229,215,4 285,804,3 453,193,3 503,131,5 373,63,5 500,1533,4 379,653,4 310,520,4 532,117,4 23,726,3 462,596,2 482,404,3 378,194,3 453,654,3 109,339,3 400,320,2 500,297,4 404,527,1 276,618,4 373,4,4 511,312,3 129,226,3 499,55,5 486,287,4 307,698,4 100,116,4 526,652,4 498,182,4 81,825,3 475,174,4 293,297,5 337,489,5 262,878,2 444,594,2 200,120,2 504,257,1 534,507,5 61,221,5 199,548,4 392,193,4 67,110,3 115,1253,2 56,1095,3 342,256,3 499,2,4 248,182,4 536,290,2 560,163,2 234,195,3 433,832,4 250,280,4 359,844,3 113,179,3 320,6,4 292,940,2 451,968,2 544,709,3 386,75,3 415,139,4 368,165,4 533,590,5 125,343,4 449,699,1 317,173,4 296,82,4 544,227,5 243,173,3 21,1002,1 350,677,4 188,655,4 480,434,5 348,14,4 89,355,4 428,150,5 541,7,3 307,95,4 531,684,5 503,415,4 150,197,4 478,839,1 513,44,4 531,585,4 396,155,5 400,167,1 384,460,4 539,49,5 144,16,3 453,96,4 531,590,5 130,241,5 386,122,3 496,249,3 188,515,1 455,281,3 14,322,1 344,654,4 552,1450,4 24,967,4 454,227,4 540,659,5 81,455,1 495,10,4 520,658,4 157,249,4 404,555,1 428,381,3 532,68,4 341,381,3 399,306,3 180,455,1 343,7,5 471,48,5 373,1276,3 404,1538,1 499,315,3 479,516,4 462,740,1 84,290,3 22,385,4 242,508,4 248,316,5 6,570,3 369,660,5 263,13,4 58,761,4 52,627,5 74,110,4 331,69,2 386,19,4 93,731,3 185,1045,3 544,24,2 560,22,5 473,193,5 503,371,4 415,774,4 24,185,4 319,678,4 294,21,4 327,600,4 547,1050,4 338,159,5 502,96,4 223,348,4 326,959,5 307,96,1 393,3,4 478,116,3 362,624,4 76,208,4 315,182,1 322,291,4 390,503,5 434,289,3 84,309,3 91,1027,2 302,585,2 59,167,5 279,196,2 467,7,4 197,5,2 433,843,3 500,843,4 248,116,4 445,689,2 482,285,3 384,630,3 93,863,2 324,207,3 536,2,2 553,581,3 261,293,2 488,871,2 379,37,2 53,929,1 245,1138,2 540,698,4 62,223,4 505,661,5 10,289,3 553,228,3 513,306,4 298,1102,4 532,179,3 478,287,3 266,1184,2 415,525,5 416,156,4 378,426,5 294,402,4 75,76,2 268,444,3 21,210,3 503,116,4 278,59,4 311,210,4 220,221,3 424,589,3 206,280,3 421,369,2 409,268,5 12,577,3 453,691,5 433,762,5 499,661,2 508,750,3 304,479,5 243,317,5 197,410,1 9,177,5 473,1049,4 540,422,3 94,447,3 385,684,4 248,470,4 379,138,1 113,678,2 255,150,5 353,88,4 338,129,4 526,55,4 245,172,5 392,84,3 507,114,3 478,462,4 505,1109,1 199,865,4 471,1052,4 326,10,4 317,55,3 222,992,4 473,632,4 416,322,3 471,454,4 555,495,5 180,1254,1 40,215,3 404,1443,2 449,91,4 362,53,3 478,181,4 225,223,4 27,194,4 342,1210,4 432,1004,5 9,10,4 493,99,5 233,1203,3 485,108,3 535,440,2 12,403,5 91,181,4 345,495,5 317,479,4 184,513,5 397,475,3 264,239,3 292,402,3 39,878,2 542,198,4 273,1151,4 529,1299,2 550,949,2 176,567,3 496,239,4 193,224,3 342,738,3 129,411,4 327,644,4 350,340,4 503,383,2 342,85,5 550,259,5 478,1038,4 94,71,2 373,317,2 456,402,4 525,874,3 534,136,4 504,55,1 416,473,4 550,575,2 435,346,4 551,251,2 210,422,5 376,507,4 285,817,2 193,85,3 532,25,3 319,587,3 541,398,2 143,653,4 151,793,5 93,52,4 464,285,4 300,38,3 157,122,3 446,203,4 410,567,4 537,190,5 245,91,1 462,595,3 497,918,4 544,523,4 455,59,4 536,458,3 467,581,3 478,420,4 91,410,4 6,506,5 428,1109,2 531,283,5 527,747,3 317,131,4 401,49,4 182,1089,2 359,356,5 550,222,4 535,69,2 124,371,1 248,289,2 428,504,4 550,291,3 524,120,4 450,241,1 83,1046,2 486,194,4 115,518,5 212,478,4 538,660,5 532,410,2 550,580,5 364,270,4 167,677,1 273,210,5 91,1213,2 156,684,3 547,117,5 519,288,4 6,645,5 550,180,2 478,248,2 415,415,4 9,181,5 86,180,5 84,609,3 344,1011,3 425,331,4 212,117,4 425,193,4 526,123,4 290,564,2 342,1193,4 550,21,5 469,18,4 488,329,4 552,433,3 252,688,5 480,513,4 408,206,3 523,184,4 498,175,4 428,519,3 292,462,4 411,95,5 500,248,3 520,422,3 536,57,4 32,677,4 12,0,3 302,1231,3 434,412,2 479,190,4 241,110,4 233,969,4 399,322,4 93,922,5 225,175,4 434,41,3 537,236,4 77,326,1 496,782,3 505,52,4 82,545,4 91,641,3 434,108,4 428,701,5 39,895,4 248,477,4 74,684,4 455,237,4 287,174,1 560,142,1 471,0,5 143,270,2 192,353,3 536,660,4 8,482,5 81,417,4 537,495,5 562,1034,4 17,477,5 473,494,4 553,695,3 59,514,5 325,779,2 255,738,5 408,155,2 468,704,5 503,93,4 541,264,4 285,818,3 121,659,3 518,908,5 193,1206,1 233,607,3 496,1156,2 384,383,1 536,1049,2 200,179,3 526,200,3 143,279,1 565,176,4 562,691,5 193,21,5 555,191,5 487,209,4 125,689,3 451,647,4 319,23,3 34,357,1 373,520,4 23,40,5 549,891,2 342,356,5 477,380,5 473,995,3 144,822,3 445,333,3 163,410,2 454,514,4 193,731,3 532,658,4 159,1196,4 471,98,3 341,356,3 57,115,5 532,844,4 405,132,5 275,460,4 341,1102,3 532,3,3 233,318,3 158,95,4 495,140,3 114,272,4 547,886,4 523,41,3 120,126,5 480,649,3 377,143,4 307,604,4 304,69,4 333,337,1 245,1027,3 114,176,5 542,693,4 255,287,5 496,2,4 514,242,3 194,808,3 377,20,3 526,202,4 531,241,4 533,1033,3 338,1525,4 536,653,3 465,94,2 406,711,2 353,280,1 129,1088,2 120,249,2 490,272,5 294,136,4 381,176,4 5,202,3 183,528,4 307,516,4 483,68,5 386,662,4 6,664,4 531,240,5 406,402,4 398,117,3 377,1229,2 70,356,5 388,453,2 519,299,4 341,6,4 466,1015,4 108,176,4 57,1047,1 515,627,4 59,490,4 108,11,4 491,186,5 393,801,1 517,684,5 192,762,3 400,82,4 446,247,5 408,1392,1 384,649,5 463,1225,4 529,213,2 91,229,3 449,927,3 337,207,3 278,683,3 215,155,5 200,407,4 388,526,3 346,1038,5 404,1178,1 344,415,4 560,6,5 362,163,2 27,55,5 556,261,2 379,750,3 376,270,4 14,281,3 325,433,5 304,940,2 379,162,2 495,86,5 327,322,3 456,1139,2 502,560,5 462,1013,2 556,165,4 541,495,4 61,43,3 401,1059,3 243,753,4 359,303,4 520,227,4 390,281,4 17,460,4 544,88,3 221,10,5 455,692,3 388,630,5 450,307,1 140,822,3 109,195,4 416,379,3 551,224,3 502,381,4 51,12,5 146,269,3 456,104,3 148,336,2 404,1230,1 449,588,3 218,257,5 48,945,2 473,602,5 252,484,5 341,297,3 416,244,4 480,215,5 33,291,5 65,49,5 372,70,5 47,184,4 275,1193,3 497,181,4 540,762,3 231,749,3 453,495,4 415,244,2 452,213,3 544,228,3 167,596,3 12,653,5 434,712,5 358,545,3 532,426,4 531,720,4 344,297,5 151,48,5 373,182,4 405,172,2 311,607,5 536,487,4 345,374,1 416,384,5 275,914,4 541,865,2 74,404,4 538,241,5 523,160,4 454,56,4 30,874,4 294,201,5 428,82,4 398,300,4 48,41,4 377,735,4 380,704,5 412,249,3 478,174,4 560,804,3 123,143,4 213,704,4 384,506,3 545,16,4 416,339,3 502,487,5 483,55,5 439,936,5 222,124,3 235,126,5 307,213,2 353,165,4 144,761,3 159,281,4 535,726,3 522,934,5 547,457,3 86,656,4 436,1006,5 40,194,4 71,219,3 453,69,4 384,899,4 381,99,4 540,0,4 456,650,5 98,474,5 496,549,4 186,215,5 100,1131,3 542,178,4 541,68,4 17,41,3 144,411,4 317,1011,4 473,215,4 21,93,3 262,314,4 200,424,3 73,257,4 405,420,4 292,210,4 455,720,4 392,568,4 290,565,4 279,696,5 14,675,4 397,238,3 471,37,4 392,239,2 188,245,4 55,793,3 495,38,5 424,197,4 276,1128,3 529,691,4 557,115,5 562,232,4 522,2,4 541,55,5 200,281,2 555,169,4 9,133,5 55,10,4 238,752,5 451,78,4 338,78,4 173,1220,5 129,419,5 279,283,3 456,482,5 424,175,3 360,268,4 455,173,4 225,712,5 111,886,5 482,198,3 519,1027,1 422,147,3 206,110,3 496,37,3 531,1469,5 544,471,5 184,422,5 531,561,5 233,123,4 550,131,5 525,269,3 462,14,4 499,720,1 450,689,4 395,870,2 192,475,2 200,463,1 273,684,5 523,183,1 137,14,4 109,764,3 338,3,4 536,225,2 331,368,4 560,1100,3 558,901,4 342,167,4 563,1398,2 199,357,5 473,520,5 51,150,5 56,819,3 406,196,4 503,257,5 416,1246,3 275,714,3 29,171,4 462,243,4 285,929,2 233,1168,4 0,219,3 523,553,4 531,420,5 404,1041,1 404,264,2 425,654,4 268,57,2 428,545,3 559,1159,3 449,590,4 434,1102,4 399,287,4 404,1584,1 543,1279,3 121,955,4 235,175,2 15,641,5 359,285,5 446,925,3 380,213,2 534,432,5 565,394,1 527,422,1 324,497,4 547,310,3 529,327,4 312,632,5 355,291,3 493,288,1 362,577,4 98,21,5 187,650,4 473,169,4 377,1180,2 507,213,3 333,304,2 188,312,2 263,855,3 536,955,4 496,126,5 564,51,5 208,0,5 320,862,3 424,186,3 82,583,4 9,136,4 353,150,3 453,317,5 280,325,1 103,627,4 58,167,5 127,689,3 393,37,4 478,57,4 457,460,4 551,335,3 75,136,5 333,639,4 562,400,4 550,560,5 560,150,2 535,264,5 416,390,2 405,116,4 238,299,1 117,319,5 550,317,5 6,285,4 428,762,4 250,977,2 393,312,5 572,193,4 98,299,4 505,662,4 496,406,2 449,384,4 449,281,5 68,235,4 506,146,5 274,256,3 120,99,4 307,55,5 307,645,5 522,49,5 551,321,3 498,135,4 327,43,3 540,559,3 507,81,3 536,241,3 194,197,3 452,423,1 405,239,4 548,865,4 566,1018,5 53,294,3 484,537,3 495,180,5 531,154,4 455,742,2 362,830,1 405,356,4 242,1196,4 143,318,3 455,169,5 331,236,5 310,467,4 351,11,4 61,82,5 193,7,3 208,284,5 499,27,3 48,639,1 443,514,4 389,301,5 117,54,5 532,126,5 200,802,2 268,55,5 560,203,3 509,285,3 566,635,4 454,379,3 6,612,4 175,327,4 568,110,3 561,442,5 290,997,1 28,299,3 415,236,3 427,242,4 143,0,4 424,1595,2 0,92,5 488,686,3 88,701,5 415,1299,3 91,119,2 537,210,4 424,565,2 434,326,3 55,172,4 392,146,5 458,249,5 477,426,4 302,1088,1 487,418,3 449,304,4 416,0,4 535,602,4 83,94,4 321,602,5 275,754,3 199,400,2 235,8,5 450,335,4 501,337,4 535,68,5 434,583,3 307,370,3 415,933,2 342,296,5 336,379,4 478,499,4 183,507,4 188,526,5 320,59,4 12,221,3 86,280,4 478,631,5 436,11,5 544,225,3 194,1412,2 396,344,4 434,714,3 69,207,4 435,760,4 103,12,3 224,426,5 418,616,4 345,469,3 542,512,4 55,110,2 186,427,4 556,293,3 137,512,5 453,473,4 560,342,4 356,122,4 456,171,5 266,168,5 405,211,2 94,24,3 449,274,4 415,241,4 298,128,4 63,424,4 338,234,3 472,115,5 220,107,3 475,193,5 451,98,3 447,320,4 427,271,5 275,1041,1 392,998,4 415,1050,3 495,135,1 262,526,5 478,126,5 536,735,3 536,469,2 453,0,3 377,256,4 6,46,5 42,659,4 122,522,3 150,760,3 495,134,2 405,433,5 10,741,3 423,171,3 536,257,4 41,923,3 328,173,4 566,251,1 254,681,5 440,287,2 462,361,1 497,511,5 513,422,5 310,500,5 526,142,2 93,21,4 117,395,5 483,292,5 541,692,4 307,23,4 535,49,5 88,320,4 479,165,5 462,1604,2 455,504,4 275,288,2 238,131,5 311,184,5 312,94,3 553,285,4 5,500,5 373,664,4 473,202,5 338,527,5 183,650,3 551,300,4 108,1227,3 456,30,4 485,299,4 434,824,3 361,747,1 233,1452,2 500,281,4 449,872,3 486,67,5 343,120,3 534,1148,4 504,150,3 532,18,3 5,258,1 338,414,3 575,8,3 89,508,5 263,201,5 185,97,5 290,568,3 384,1006,3 180,1330,1 566,481,5 416,173,3 542,1261,2 206,54,3 398,68,3 566,614,4 531,499,5 532,97,4 353,115,5 473,134,5 205,1023,1 129,1094,3 398,185,4 302,1109,1 478,120,4 532,434,4 511,96,5 433,1151,5 17,176,3 473,707,4 118,167,5 322,932,3 304,792,5 341,196,4 59,11,4 307,944,4 43,449,2 353,530,4 528,872,4 294,94,4 386,193,3 313,21,4 428,47,3 172,301,5 307,493,5 549,254,3 536,680,1 455,41,4 312,0,4 458,299,4 522,1021,4 452,78,3 532,46,1 4,451,1 59,165,4 560,583,3 244,1046,3 273,275,4 193,1065,3 470,7,5 532,53,4 378,384,2 465,182,3 250,281,4 396,339,2 490,189,4 17,197,3 342,702,4 255,65,4 305,755,3 404,67,1 201,0,3 124,104,3 503,81,4 498,650,4 400,193,4 498,86,4 494,506,4 221,1138,3 25,457,3 139,301,4 324,182,3 285,389,1 523,290,4 534,734,5 279,747,2 404,1208,3 459,13,5 492,326,5 509,293,3 327,911,3 173,763,4 48,89,1 505,226,4 545,671,3 89,542,3 351,81,3 191,107,4 233,1449,3 243,155,4 62,675,3 415,826,4 207,309,4 345,711,3 267,1221,2 416,217,3 497,590,4 428,160,3 356,150,5 405,558,3 471,560,5 333,296,5 416,247,4 391,254,3 464,96,2 494,1134,5 473,172,5 535,132,4 404,426,5 576,470,3 245,558,3 473,462,5 405,233,4 572,142,2 455,719,3 539,108,4 260,124,5 317,57,4 346,1011,4 335,110,3 572,191,4 553,545,3 189,988,3 531,537,4 339,142,5 129,99,3 469,294,3 415,297,4 189,312,5 91,264,4 414,479,5 0,7,1 216,182,3 270,356,5 572,126,4 541,366,4 497,1082,3 535,583,5 48,697,2 252,678,3 532,239,1 74,824,1 197,97,4 127,418,3 465,116,5 415,584,1 552,477,4 123,473,3 72,381,4 524,236,4 55,77,3 327,300,2 89,953,4 285,460,2 448,272,4 37,258,3 275,390,2 89,649,5 109,1248,3 28,285,5 406,288,3 495,221,3 268,301,3 173,416,4 234,191,4 71,193,4 333,162,4 505,549,4 405,704,4 1,290,3 534,478,4 261,287,3 312,442,5 177,818,2 310,355,4 538,18,5 433,146,3 473,460,5 566,612,4 462,234,2 542,230,3 512,264,5 562,366,4 325,653,1 58,513,5 289,124,3 466,6,5 495,1062,3 456,143,5 471,430,5 213,165,4 534,501,5 453,11,3 505,653,4 298,57,3 452,585,2 243,267,5 541,1217,3 83,299,4 421,126,4 42,78,4 234,430,2 187,99,4 61,194,5 552,481,4 477,160,3 268,133,4 256,128,4 535,216,3 517,369,4 526,49,4 448,701,5 503,62,3 338,941,4 499,88,4 188,12,4 182,355,3 98,470,4 43,120,4 496,624,3 278,402,1 353,215,3 278,889,3 0,204,3 409,346,1 408,1378,3 292,590,3 428,767,3 578,0,4 321,301,5 513,317,4 298,482,5 362,263,3 42,784,3 108,163,5 418,13,5 573,299,4 532,257,4 48,95,1 200,206,3 464,11,4 165,285,1 416,72,3 471,392,3 187,96,5 520,175,4 497,22,4 408,525,3 473,652,4 518,335,5 392,172,5 499,69,4 311,207,5 489,123,4 307,567,5 505,55,4 150,203,4 486,37,2 401,272,4 424,669,3 400,152,2 550,273,2 392,824,4 449,194,4 310,638,4 188,602,5 289,434,3 21,174,4 384,443,1 392,587,4 406,99,5 446,146,4 170,314,4 486,297,5 473,683,4 180,99,3 453,57,4 398,470,3 384,738,1 458,119,2 69,575,2 449,298,2 404,1421,1 560,581,4 541,720,2 451,490,4 484,340,4 341,193,3 341,143,5 522,791,4 542,174,3 58,60,4 441,664,2 20,853,5 471,176,4 135,203,4 483,577,3 333,936,3 334,322,4 416,854,2 455,607,4 408,381,4 478,303,4 295,293,1 116,209,4 550,236,4 317,841,2 453,602,4 402,844,4 343,321,2 335,99,3 193,72,3 310,518,3 420,86,4 372,172,5 425,487,5 22,202,4 559,202,4 151,363,4 59,608,3 377,571,3 163,99,5 541,27,4 536,515,3 576,228,4 327,754,3 456,743,3 233,872,3 366,671,4 520,473,3 94,199,2 398,57,3 236,237,4 129,214,5 276,49,3 482,611,3 157,91,4 0,233,4 91,449,2 416,213,5 115,357,2 15,233,5 391,327,3 339,661,2 449,152,5 425,203,3 499,251,2 28,479,4 180,766,1 478,100,4 56,108,4 400,8,3 86,1048,3 453,27,4 130,292,3 378,658,5 527,192,4 451,1088,1 285,210,4 516,282,4 532,294,4 510,894,4 398,179,3 49,1007,5 532,878,3 223,468,1 566,301,4 546,300,3 233,177,5 346,507,3 84,228,3 63,968,3 266,234,3 243,2,5 377,1399,3 341,698,4 537,194,4 296,12,3 449,380,2 285,178,5 454,401,4 140,291,1 495,98,3 530,322,5 180,281,4 518,1591,5 503,427,3 506,244,5 284,99,4 544,446,3 495,508,3 496,66,3 502,404,3 405,19,3 12,12,5 289,227,4 115,251,2 530,288,3 362,316,5 513,203,5 441,317,4 173,552,5 471,228,5 400,450,2 531,1495,2 160,131,1 486,1034,4 255,35,3 108,124,5 550,581,5 206,513,4 86,782,4 424,306,4 94,88,3 298,254,2 404,420,1 537,955,3 568,221,3 2,257,2 513,242,2 326,1100,4 554,167,4 404,1517,2 575,279,5 243,1056,4 346,257,4 279,730,3 533,929,4 338,497,4 424,143,4 561,55,1 326,272,2 179,659,5 214,98,4 193,623,2 83,202,3 58,233,5 496,1051,2 83,596,3 505,553,3 275,322,3 405,273,3 560,468,4 539,244,3 155,186,5 405,745,3 93,79,2 313,1013,3 572,660,4 221,283,3 404,396,4 292,549,1 283,344,4 186,659,5 480,237,4 298,731,4 552,426,5 547,254,4 473,418,4 278,80,4 428,131,3 544,809,4 156,312,5 290,974,2 526,180,4 457,179,4 290,163,4 458,224,3 428,534,2 561,635,2 499,1323,2 533,23,5 0,104,2 228,299,2 298,651,3 373,457,5 210,490,3 250,741,5 523,706,4 378,173,5 221,568,2 485,1016,3 200,520,2 544,53,4 83,116,4 150,608,4 356,1276,5 520,89,2 337,473,4 138,296,5 454,190,5 336,249,3 193,996,3 242,513,4 312,87,2 276,1282,2 0,146,3 423,426,4 12,789,2 507,442,4 325,422,3 440,750,4 452,142,2 61,97,4 56,870,3 365,216,5 458,686,3 353,97,3 360,55,4 114,22,5 196,244,4 576,283,4 468,126,4 250,274,4 471,654,5 483,923,5 124,180,5 94,76,4 557,935,5 238,497,4 293,1080,3 12,620,4 306,394,3 435,178,3 531,1045,4 199,67,5 150,214,3 70,247,3 556,7,5 331,122,4 565,32,2 275,357,3 108,985,2 529,321,4 215,10,5 492,745,4 275,394,2 566,6,4 13,427,4 477,97,5 51,814,4 57,639,5 485,257,5 449,230,3 298,633,2 563,332,3 82,24,2 398,42,3 129,249,3 380,15,4 94,1187,2 499,497,4 560,500,3 94,704,5 494,1118,4 456,229,4 333,12,3 252,894,4 341,1169,3 233,24,3 71,227,1 341,325,1 405,71,3 505,1072,4 485,126,5 555,519,5 420,332,4 508,309,1 23,172,5 533,594,4 377,755,3 446,0,3 466,275,5 136,0,3 536,326,2 542,84,2 536,446,3 333,126,4 550,90,1 353,168,3 225,181,1 504,401,5 423,332,5 150,25,3 20,558,1 497,1421,3 400,171,3 290,54,4 397,228,3 290,1216,3 136,891,3 342,57,4 404,1441,1 86,126,4 522,1008,5 279,728,2 187,467,4 471,472,4 516,872,3 369,106,4 451,512,4 465,301,5 478,180,5 85,682,5 559,117,3 293,268,5 333,482,5 384,507,2 441,176,4 157,543,2 367,671,2 415,688,4 487,258,1 12,870,2 545,117,5 520,172,4 518,242,1 84,142,4 416,182,4 275,633,4 82,110,3 471,747,5 487,844,3 199,759,4 141,175,5 532,282,3 368,918,5 279,173,3 547,146,5 285,968,5 167,324,1 404,503,2 0,98,3 27,226,4 76,126,2 571,299,4 263,25,4 180,976,1 391,171,5 453,660,4 389,282,4 473,131,4 93,685,4 194,142,5 558,187,5 392,63,4 503,1438,4 574,214,3 48,432,5 400,364,4 221,1010,4 535,270,3 500,405,3 300,108,5 242,735,4 536,150,2 71,240,4 502,232,5 4,394,2 482,537,2 434,384,5 492,234,2 583,113,4 188,629,4 406,180,3 559,318,4 497,97,4 279,314,5 542,14,3 534,21,3 107,747,3 112,236,3 553,86,4 429,1239,3 503,236,3 550,315,5 384,255,4 545,285,2 492,342,3 217,7,3 378,204,5 310,620,4 101,435,2 393,97,5 563,749,3 331,678,5 454,23,3 575,824,4 86,1180,3 456,142,5 496,138,3 15,530,5 397,524,3 377,202,4 348,120,2 277,314,4 261,485,5 310,415,4 246,299,2 71,654,5 103,110,1 287,879,1 60,299,5 397,501,3 333,220,5 300,567,4 298,248,3 235,691,4 415,402,5 63,195,4 507,0,5 5,529,4 380,478,5 199,757,3 275,432,4 200,1134,5 345,99,3 0,0,5 319,240,4 404,650,5 542,1193,4 317,25,5 86,434,5 40,413,4 404,903,1 532,233,2 377,125,4 17,97,5 41,236,4 91,4,4 319,807,4 406,564,3 405,1064,2 14,545,2 393,264,4 513,681,4 525,345,3 488,259,3 147,150,4 275,240,4 453,117,4 356,272,5 386,126,4 58,650,5 424,339,4 449,864,4 434,210,4 436,133,5 415,193,5 556,256,2 221,174,3 451,293,2 15,384,5 183,693,5 404,605,3 209,656,4 180,306,1 531,567,5 497,1006,3 245,197,4 415,250,5 342,56,5 428,2,2 550,565,5 242,457,4 360,82,3 523,236,3 587,30,3 564,178,5 537,404,3 150,131,5 496,791,3 449,483,3 499,583,1 586,994,3 505,489,3 360,22,5 144,514,5 484,293,1 468,653,4 268,1039,1 76,640,5 568,0,4 407,311,3 428,156,4 473,70,5 487,69,3 386,228,2 505,27,4 12,800,3 248,134,5 392,1062,4 495,624,4 322,872,3 268,522,5 372,213,4 449,1032,3 497,163,3 487,27,4 335,997,1 93,796,2 93,390,3 437,99,4 452,384,3 13,209,5 377,1041,3 545,97,5 552,130,5 301,300,4 379,181,3 568,2,1 478,189,4 313,1518,4 245,234,3 445,301,4 176,134,5 457,468,4 92,282,4 362,649,2 492,264,5 99,348,3 285,81,3 84,954,4 159,932,3 19,173,4 346,180,5 566,1130,4 58,409,3 534,282,4 502,653,5 552,603,5 523,865,2 556,507,4 304,134,3 353,87,2 456,548,4 379,955,4 566,1203,5 271,204,5 567,605,5 386,7,4 275,23,4 503,50,4 12,550,1 404,1469,2 392,999,3 448,409,3 327,96,3 183,638,3 398,116,2 567,834,4 477,217,3 573,314,3 198,99,3 568,282,4 449,198,5 192,22,4 576,995,3 528,322,4 304,250,5 289,175,4 467,474,4 503,395,2 267,183,4 345,182,4 23,288,3 11,590,5 513,407,5 473,6,5 501,357,4 362,379,4 90,126,5 535,20,3 192,402,3 63,143,3 585,627,3 491,185,3 213,170,4 555,478,5 72,178,5 307,274,4 279,408,3 270,545,2 48,556,3 513,1013,2 310,132,3 117,640,5 496,809,3 435,1467,5 59,636,4 404,1477,1 467,611,4 300,196,5 183,202,3 526,323,3 156,285,5 492,762,4 559,755,2 496,228,2 89,989,3 302,214,5 377,685,4 416,554,1 163,514,4 401,285,5 466,297,4 483,587,5 200,230,2 275,387,2 362,422,3 575,434,4 180,116,2 584,1487,4 446,64,3 118,411,4 90,209,5 540,464,4 221,731,4 454,1033,2 428,355,3 200,465,4 463,300,4 287,189,1 434,751,3 46,682,3 586,747,1 319,799,4 377,728,4 561,461,5 58,609,4 558,72,4 415,1515,5 290,940,4 535,510,5 12,794,2 312,631,4 488,872,3 63,422,4 531,1414,2 215,568,3 532,429,5 144,293,4 180,875,1 404,183,1 415,590,5 523,540,1 559,863,3 398,1243,3 392,242,4 384,17,5 507,215,5 526,192,3 493,321,2 150,473,5 415,8,5 56,283,3 43,736,1 518,262,5 521,522,5 463,293,4 415,468,4 467,292,5 27,608,3 494,741,5 587,720,5 249,257,4 333,67,3 200,229,3 505,81,5 545,287,4 233,431,4 353,257,4 137,11,5 415,285,5 550,120,5 471,361,5 12,602,4 282,20,3 368,889,3 549,322,5 535,581,2 487,8,4 485,935,3 356,475,3 401,514,5 386,427,4 298,914,4 325,180,4 502,713,4 320,173,3 477,144,1 231,268,3 520,181,3 393,232,3 544,21,3 517,6,3 531,733,3 377,98,4 415,917,4 398,753,3 205,748,2 457,31,4 401,9,2 210,229,3 206,461,3 415,0,5 293,236,4 199,62,4 341,124,2 535,30,3 391,514,5 444,234,1 488,688,5 101,1239,2 455,409,4 108,721,3 424,231,3 416,517,5 388,738,2 499,534,3 576,203,4 516,110,3 472,24,4 492,97,4 263,644,4 200,1420,3 317,69,5 428,237,5 427,311,4 404,1192,1 548,49,5 268,190,5 390,57,4 356,219,5 534,41,3 560,52,3 346,370,1 535,386,3 290,0,5 214,1062,5 200,1005,2 342,949,3 373,977,2 319,569,4 503,166,3 519,273,3 330,233,4 37,1035,4 344,225,3 522,726,4 388,641,4 451,110,3 550,160,5 459,712,4 324,81,3 125,339,5 333,150,4 517,99,4 282,293,4 71,76,4 188,206,5 29,891,4 277,312,5 478,878,4 335,1050,2 461,270,1 372,225,3 439,920,5 499,207,4 109,257,4 193,75,2 453,120,4 559,259,1 310,538,4 550,49,2 262,698,4 71,590,5 560,108,1 225,108,4 400,0,2 499,254,3 400,275,4 14,247,1 564,211,5 384,442,3 559,422,4 471,354,3 338,565,3 526,212,4 226,13,4 331,251,5 555,47,5 438,1047,4 449,310,4 503,654,4 565,1231,2 398,281,3 566,220,5 496,380,3 288,124,2 5,484,5 532,377,4 424,126,4 140,830,2 525,146,4 513,731,5 5,136,5 560,488,4 568,24,4 197,190,4 362,230,1 585,664,3 444,907,1 404,1559,1 40,187,4 378,63,5 250,146,3 536,136,4 398,76,2 9,461,3 532,64,4 63,120,2 428,403,4 343,1141,5 177,422,4 370,392,2 507,100,5 560,1008,4 275,1010,3 454,222,4 378,171,4 449,432,3 537,97,5 253,180,5 17,282,5 528,291,4 313,865,4 560,14,3 94,187,3 503,1420,4 294,120,4 563,826,3 506,826,5 311,624,3 586,326,3 228,268,4 282,408,4 536,81,2 234,495,4 131,483,4 196,227,4 495,16,3 58,974,4 2,338,3 136,679,5 48,1082,2 465,384,4 452,962,4 6,63,5 129,564,3 496,97,4 281,299,3 496,407,4 484,244,3 144,133,4 87,318,3 550,134,5 279,388,5 384,210,3 371,332,5 531,65,5 408,1096,2 193,12,4 293,120,5 536,416,2 505,181,5 478,0,5 534,1123,4 362,656,5 310,398,4 416,143,3 452,68,4 143,7,4 581,947,1 388,209,2 297,513,4 539,740,3 361,321,3 529,49,4 479,641,4 56,180,5 475,208,4 565,7,4 344,463,3 292,543,3 378,143,5 218,878,4 252,219,4 233,1460,2 91,369,1 379,505,3 266,173,5 195,286,3 453,315,4 390,510,5 177,55,4 532,478,4 560,210,4 38,269,4 535,182,3 398,940,3 312,522,5 550,615,5 94,152,5 502,245,5 497,627,4 76,245,5 566,479,4 300,454,5 393,281,3 506,299,5 176,216,3 326,300,3 542,116,3 205,682,1 551,1276,3 552,645,4 209,300,4 301,679,2 434,121,3 179,200,2 479,164,5 531,299,5 215,256,3 404,1205,1 541,1058,4 253,21,4 404,801,1 140,925,4 508,299,3 253,224,3 419,474,4 268,409,4 342,470,4 415,558,3 583,257,4 338,49,4 193,548,3 186,167,5 302,720,4 392,750,2 11,707,3 358,120,4 341,546,5 590,786,3 523,496,2 546,353,4 437,254,4 115,1213,3 536,236,3 459,675,4 48,49,1 136,209,5 493,173,5 68,97,5 209,3,4 405,698,4 550,14,5 52,1086,4 471,116,3 436,606,5 565,1192,5 503,562,3 584,1346,2 94,649,4 454,41,2 388,502,3 499,178,4 560,18,3 578,844,4 409,753,3 270,508,4 565,422,2 304,1511,3 61,1131,2 177,8,2 12,636,2 532,189,2 377,180,4 414,268,4 538,49,3 385,23,4 503,150,4 193,466,5 415,126,5 503,209,4 416,26,3 436,659,4 586,880,2 362,517,4 436,154,3 313,378,3 464,190,4 464,198,3 91,194,5 587,364,5 547,1404,3 433,236,5 419,407,4 536,459,2 196,264,5 25,117,3 485,249,1 262,270,1 254,563,1 397,229,3 415,431,2 536,614,3 227,749,3 536,268,3 526,155,3 384,235,2 560,1523,4 312,332,4 454,316,3 542,206,5 435,38,3 487,198,4 266,664,4 30,318,4 531,828,3 247,175,5 262,135,4 560,517,4 21,635,3 6,602,4 237,470,4 19,819,2 298,487,4 313,844,5 397,795,3 176,962,4 408,321,2 275,1140,3 551,13,4 331,281,5 313,195,3 58,483,4 478,357,1 12,483,5 560,1138,1 89,19,4 270,87,4 587,203,5 94,541,2 245,980,1 404,1470,1 415,1216,4 12,137,1 295,236,5 474,315,5 29,173,5 404,938,5 532,8,4 451,142,3 404,1062,5 560,677,2 408,1523,4 91,236,4 500,180,4 163,716,3 400,63,3 233,306,2 14,234,1 156,339,5 552,97,5 536,206,4 532,1281,3 434,147,3 424,1088,2 373,1512,2 429,627,3 560,467,1 531,1209,4 518,323,1 472,136,4 293,290,2 590,1016,3 310,76,5 506,688,5 403,269,4 70,180,3 523,524,3 169,258,3 580,49,4 326,268,3 143,60,3 333,264,3 21,23,5 348,470,3 410,565,4 485,765,4 302,1410,2 441,52,3 391,288,5 22,223,5 586,874,1 534,237,4 188,499,5 378,575,4 458,327,3 307,215,3 545,345,5 207,516,3 253,414,3 538,130,4 458,229,4 10,356,5 531,484,5 404,536,1 586,348,3 398,734,3 404,519,2 483,49,5 180,1058,1 497,1141,4 523,930,3 456,131,5 372,549,3 61,650,4 227,136,1 12,116,3 291,234,3 428,626,2 61,402,4 455,1323,4 149,287,4 416,615,2 560,123,3 160,256,3 213,461,4 439,1190,5 573,320,1 335,289,3 544,647,3 82,405,2 551,716,3 342,152,5 405,427,5 362,677,1 130,126,4 103,293,3 398,230,3 17,972,3 206,848,3 396,877,1 269,580,5 243,400,3 536,456,1 499,659,2 483,509,4 377,619,3 502,19,5 267,543,3 559,12,3 522,628,5 176,172,4 454,461,3 93,664,3 527,309,3 560,221,3 450,880,4 574,321,3 554,86,4 591,417,4 144,281,5 68,306,2 392,796,3 269,859,5 552,306,4 457,192,4 53,545,3 424,830,3 486,1243,2 515,309,4 256,581,5 373,482,3 267,202,5 421,1186,4 579,824,4 76,14,2 550,958,5 302,740,4 263,1117,4 270,284,4 307,175,4 333,954,1 275,398,2 315,82,4 434,761,4 531,152,4 12,310,3 127,923,3 520,720,4 59,403,3 591,123,5 87,312,3 462,12,3 74,12,5 499,283,3 400,1288,2 536,6,4 499,844,4 302,779,5 411,153,3 455,12,4 293,49,5 494,213,5 404,1552,1 86,569,3 591,49,5 69,229,4 585,147,3 298,236,2 91,736,4 549,992,4 542,175,4 458,741,4 10,726,3 221,167,4 404,1516,1 56,404,4 302,63,5 565,82,4 315,677,1 486,793,5 95,444,4 449,1139,2 337,134,5 478,297,3 536,240,3 83,684,3 12,11,5 335,274,4 293,927,3 254,664,3 91,270,2 455,1217,3 53,180,5 591,203,5 275,158,3 7,272,3 456,43,4 459,590,2 12,517,4 473,502,4 200,678,3 581,245,4 402,287,4 313,120,4 372,95,4 270,276,4 502,175,5 550,306,4 449,314,4 153,873,3 428,440,3 101,186,3 143,3,4 84,1102,3 388,422,5 480,647,5 21,683,3 369,432,3 285,395,4 404,198,1 404,1558,1 350,887,4 245,400,1 328,330,3 560,529,4 438,299,4 217,55,3 448,557,4 292,677,2 287,120,2 398,805,3 48,70,3 188,486,5 177,86,4 471,929,5 424,146,3 296,10,4 496,144,4 550,741,5 373,47,5 404,1031,1 362,773,4 464,317,4 51,110,4 455,181,3 329,104,4 144,602,5 544,635,3 47,208,5 428,185,4 386,581,3 499,1325,4 9,204,5 180,1196,1 550,237,5 444,292,3 327,194,3 585,38,4 446,980,2 416,395,2 177,199,3 424,206,2 404,414,2 185,55,3 341,865,1 22,150,3 434,317,5 490,339,4 188,6,3 554,479,4 196,372,1 343,150,5 327,1438,3 177,225,4 578,82,5 475,709,5 473,482,5 502,203,3 566,482,5 9,662,3 478,738,1 266,52,4 325,153,2 254,4,2 410,654,4 290,828,2 245,1043,1 187,63,5 448,752,5 541,186,4 393,171,4 531,167,5 41,626,2 377,968,4 205,1430,1 80,282,4 14,180,5 298,964,4 328,116,3 35,287,4 344,771,4 9,466,4 492,317,5 434,259,3 32,259,4 100,146,4 410,160,2 269,78,4 408,432,4 322,143,4 117,170,5 548,251,3 91,1017,4 5,366,2 153,173,5 89,442,4 405,959,2 560,505,3 398,726,4 428,685,2 24,115,4 404,644,1 267,173,5 496,38,3 560,496,4 544,473,3 15,11,5 429,123,5 576,661,4 408,169,4 566,0,3 393,194,5 404,460,3 406,356,4 591,296,5 12,850,5 434,94,3 253,1027,2 235,434,4 404,806,1 455,431,4 560,1219,2 6,156,5 27,285,3 589,284,5 404,81,4 386,237,5 5,63,4 415,1047,3 415,823,2 529,177,5 462,49,4 502,777,5 163,1015,3 505,746,2 232,274,5 279,139,4 526,867,4 395,404,3 496,770,4 514,314,4 573,257,5 542,590,4 505,660,5 396,456,1 306,256,5 420,163,4 473,297,3 531,817,2 268,159,2 502,44,5 584,581,3 565,479,4 406,971,3 58,497,5 360,769,3 242,161,4 405,497,5 408,203,5 453,75,1 37,417,5 403,301,4 446,16,3 541,474,3 297,587,4 455,184,4 434,316,2 447,337,1 289,140,4 496,402,3 434,435,4 536,186,4 373,462,1 508,259,2 325,316,3 48,1070,3 23,236,4 91,948,3 576,116,4 578,527,4 302,226,3 84,494,3 104,339,3 536,312,4 404,778,1 110,302,3 267,58,5 415,863,3 538,482,5 550,184,5 545,435,5 404,1145,2 434,434,3 189,244,4 400,120,3 434,519,4 266,158,4 96,174,5 93,483,5 223,364,3 550,354,4 226,747,1 427,333,4 591,78,4 542,237,3 372,552,4 323,322,4 413,271,5 252,189,5 270,160,2 398,1073,4 505,53,4 507,180,3 287,267,4 404,402,5 493,923,4 157,52,1 454,64,3 585,755,1 268,317,4 47,190,5 479,264,3 415,627,4 59,72,4 347,925,3 487,298,3 592,632,5 485,304,3 270,528,4 12,232,4 268,678,1 452,400,3 487,86,5 193,158,3 123,167,5 363,318,3 532,95,4 157,220,2 499,41,5 433,147,3 457,407,5 108,155,5 75,546,2 6,594,2 456,475,2 560,0,2 155,82,3 473,257,4 93,208,5 439,242,1 307,170,4 157,110,4 216,49,1 97,24,5 325,564,3 93,199,4 362,978,1 75,1047,2 86,49,5 454,312,4 353,1136,4 591,968,4 455,90,2 585,558,5 471,195,4 94,197,5 539,741,4 448,1366,4 523,30,4 275,720,3 84,1120,3 405,812,4 536,471,2 238,588,3 81,2,2 407,301,5 510,879,5 51,18,5 464,179,3 390,190,3 362,249,1 193,133,2 415,306,1 269,558,5 169,987,3 450,686,2 363,947,4 281,342,4 342,240,3 390,10,3 541,245,3 395,281,4 338,203,3 404,508,1 267,259,3 582,82,4 242,467,3 428,153,3 205,1394,1 568,125,5 327,160,4 20,243,4 173,277,5 59,29,5 567,268,4 520,270,3 233,416,3 449,789,2 547,1072,4 105,24,4 90,428,4 516,368,5 262,479,3 536,466,3 213,134,3 392,904,3 472,326,3 522,165,4 302,672,4 456,933,3 478,691,3 278,157,3 496,454,4 317,190,5 521,172,4 25,747,1 386,196,2 540,273,4 568,923,3 93,505,5 312,537,2 261,419,3 471,416,4 505,529,5 310,448,3 449,370,3 458,865,5 182,374,2 539,149,3 300,2,2 58,210,5 575,180,4 372,224,4 117,563,1 217,46,4 523,606,3 42,8,4 123,173,3 183,174,3 434,228,2 377,701,4 536,921,3 166,520,5 58,225,4 486,767,3 523,289,2 159,292,5 118,878,5 242,317,4 307,201,4 595,677,3 456,372,2 10,727,3 118,458,4 293,681,3 471,122,4 436,286,2 404,21,5 456,37,3 434,796,3 569,302,5 400,492,4 199,500,4 459,6,3 82,195,5 524,411,2 58,746,4 591,19,4 327,271,5 544,143,3 444,22,3 598,681,4 473,91,4 233,476,1 79,259,1 114,173,5 235,173,3 467,180,3 155,771,3 384,121,3 457,1069,4 567,193,3 232,142,4 540,809,3 586,878,1 114,672,3 592,391,3 473,965,4 465,323,1 362,804,4 523,723,3 335,185,4 261,71,3 104,326,4 477,517,4 431,180,5 447,303,3 540,254,3 118,300,4 91,221,4 212,186,5 357,665,3 200,333,4 416,57,3 436,496,5 496,411,1 55,407,4 450,687,1 243,715,3 444,220,1 494,503,4 400,146,2 471,3,3 94,140,4 549,299,4 428,84,4 356,273,4 464,193,4 233,187,2 364,221,4 534,167,5 17,804,4 12,660,5 233,691,3 524,331,4 301,987,2 180,18,1 535,569,3 275,355,3 506,270,5 248,147,3 195,115,3 471,79,3 213,91,4 113,645,4 196,678,1 576,683,4 377,449,3 451,484,2 129,10,5 502,133,5 433,8,1 290,37,3 449,356,5 547,656,5 80,41,4 575,69,5 317,283,3 18,210,4 431,507,5 302,978,4 436,300,3 422,688,4 380,1531,2 545,457,1 290,21,5 278,203,3 536,89,1 473,97,5 470,101,5 173,138,3 552,495,3 42,1055,3 292,282,2 17,708,5 360,212,5 180,128,2 524,1010,3 36,117,2 462,812,4 550,202,5 432,94,3 544,253,4 135,236,4 452,392,3 478,215,3 294,430,5 494,649,5 137,661,4 192,741,4 158,125,5 307,565,4 242,460,3 200,643,3 147,495,3 532,707,2 592,78,4 349,209,4 106,322,1 176,63,4 307,161,4 436,1038,2 302,708,5 360,203,4 93,548,5 96,190,5 304,180,4 245,287,5 588,312,5 434,1227,2 520,72,3 194,92,3 487,99,2 300,1015,4 372,392,4 144,691,2 496,383,2 369,209,3 328,193,3 550,745,5 488,538,4 534,479,4 502,429,5 404,1403,1 400,13,3 269,1008,5 494,388,5 325,836,4 534,520,5 557,123,4 534,628,4 181,478,5 547,234,3 544,264,4 522,574,4 593,236,3 347,755,4 183,738,3 473,640,4 327,707,2 206,126,5 448,458,4 325,227,4 504,87,4 255,202,4 393,418,5 560,61,3 221,1335,2 540,238,4 434,450,4 405,1117,3 58,435,5 346,543,4 416,1156,4 540,418,5 565,70,2 513,335,1 567,922,3 57,1011,4 400,654,3 497,143,1 253,1032,3 91,170,4 256,474,5 298,227,3 537,691,3 9,510,4 51,1010,4 576,94,5 596,327,4 292,69,3 473,847,4 452,590,3 457,595,4 532,379,4 372,733,3 542,809,3 277,751,5 576,187,3 53,287,4 343,297,4 416,745,5 471,950,1 55,819,3 94,207,4 530,456,1 282,207,5 5,173,4 129,117,4 577,293,3 424,1433,4 270,275,3 58,528,4 84,78,3 487,520,3 550,339,4 496,24,4 378,123,5 532,495,5 585,202,3 505,580,2 392,254,4 419,115,4 542,384,3 553,97,5 179,366,1 535,678,4 544,218,2 82,273,4 588,267,1 151,548,4 71,173,5 389,0,5 503,182,3 533,239,5 497,1130,3 172,305,5 221,101,2 503,53,4 95,181,4 94,120,4 367,773,4 377,318,3 47,97,5 292,195,4 466,454,3 90,49,5 392,1209,3 600,323,4 212,984,3 161,207,3 362,709,5 503,67,5 62,545,2 547,476,1 4,171,5 534,422,5 601,677,4 390,962,5 498,201,4 496,807,2 527,478,4 157,693,5 404,4,4 471,745,5 531,678,5 310,753,3 297,529,5 574,505,2 236,196,4 236,488,4 471,369,4 503,584,2 346,470,4 393,596,2 536,1007,2 479,7,5 379,6,3 313,561,4 547,759,3 551,828,3 377,726,4 487,175,4 37,589,1 183,522,4 415,432,4 482,364,2 69,230,3 428,149,5 231,185,4 499,38,4 174,11,4 166,7,5 542,215,4 180,17,1 200,46,4 267,179,3 520,825,2 599,91,3 589,220,4 180,223,1 168,242,3 161,0,4 17,235,3 255,404,4 591,432,5 188,123,5 260,242,5 352,339,4 469,287,4 560,238,3 537,68,5 302,105,2 326,231,4 341,817,4 404,1483,1 223,422,4 456,672,4 388,78,4 382,479,5 451,873,2 292,198,5 576,14,3 287,288,3 384,1142,4 415,979,4 565,88,4 21,228,2 222,863,3 431,1048,2 523,212,4 183,392,4 449,1171,5 75,1005,3 293,545,4 7,240,4 451,242,5 91,404,2 408,426,5 604,126,5 565,241,5 602,20,3 75,59,4 58,78,5 513,658,3 536,101,1 124,175,5 94,273,4 14,878,3 262,731,5 91,283,2 220,28,3 151,116,4 252,95,5 263,1069,4 213,173,4 578,178,3 547,618,3 46,326,4 522,210,4 517,1027,3 428,247,5 91,576,3 163,290,5 369,173,3 591,322,1 399,285,4 388,178,4 494,200,2 180,110,3 91,79,2 108,331,3 536,204,5 602,229,4 467,602,5 532,86,4 576,185,4 575,380,3 524,1011,3 285,338,5 39,269,3 88,65,3 61,1017,3 532,1027,2 579,6,3 196,353,2 591,335,1 475,210,5 292,47,5 560,512,3 129,1243,4 384,510,4 544,519,4 317,863,2 509,357,1 347,367,3 467,18,4 290,81,4 61,96,2 118,22,3 331,0,4 398,52,4 386,531,3 405,124,3 560,63,3 298,1506,3 442,643,3 162,300,3 333,234,3 452,2,4 441,55,5 173,842,2 536,460,3 220,229,3 199,889,4 264,292,4 499,124,3 278,40,2 377,475,3 313,720,5 567,58,1 415,677,2 275,188,4 448,178,4 484,287,3 56,709,3 529,69,4 415,1013,3 560,615,3 370,522,4 80,404,3 86,426,4 189,271,5 537,215,4 0,196,5 432,434,4 150,146,2 333,497,4 296,918,1 449,1400,4 36,230,2 436,115,3 322,10,5 495,479,3 377,628,5 4,413,3 473,417,3 93,1221,3 527,392,2 483,14,5 89,461,5 235,422,5 424,575,3 431,321,3 267,1313,2 341,534,3 594,1263,2 307,731,4 221,374,1 415,1539,4 212,392,3 587,206,2 555,324,2 362,31,2 435,20,3 560,1118,3 28,878,3 517,122,2 321,47,4 214,126,4 285,39,4 28,660,5 58,476,3 206,37,3 300,120,4 538,184,4 302,1425,2 448,14,4 160,132,2 243,1167,4 551,117,3 404,1030,1 397,518,4 209,955,3 560,1023,3 408,1592,4 192,738,4 488,1242,4 197,355,3 534,58,3 94,77,3 415,1159,4 5,504,4 333,484,3 585,280,3 91,473,4 550,61,5 319,457,4 450,1393,1 135,115,5 467,691,4 478,225,3 286,312,4 532,195,4 345,577,2 428,731,4 2,341,4 180,830,1 428,61,3 262,514,5 81,431,4 5,506,4 392,552,3 380,933,2 486,182,5 445,291,5 498,317,5 499,303,2 567,953,2 495,228,2 547,221,5 551,759,3 446,1027,3 189,272,4 278,11,2 82,1015,4 48,418,4 391,490,5 238,503,4 416,49,3 118,824,3 151,401,5 391,1257,1 115,287,3 532,220,3 527,656,5 568,49,5 523,897,4 455,695,3 221,579,3 233,641,3 497,49,4 515,180,4 81,325,2 560,162,3 560,153,4 274,430,3 591,323,4 220,543,4 94,639,3 1,292,4 0,172,5 93,398,4 502,415,2 180,269,4 310,1118,4 505,771,1 440,116,4 504,6,3 605,122,3 320,381,3 591,201,5 319,545,4 436,731,4 471,789,3 561,113,1 500,236,4 596,823,3 206,608,4 329,215,5 81,429,5 48,261,5 384,656,4 404,93,5 496,1418,2 315,706,4 213,607,4 473,379,4 206,238,3 199,526,4 320,131,5 534,208,5 194,1414,1 350,322,5 416,757,2 560,670,3 262,131,5 266,61,3 599,228,3 542,85,4 599,1109,3 487,510,4 522,706,5 410,37,4 200,922,3 455,507,4 338,526,4 550,1266,4 456,173,5 413,345,5 289,96,3 532,201,4 398,154,2 405,572,3 523,75,4 398,394,3 61,958,4 12,432,4 107,293,4 302,714,4 273,124,4 420,673,5 504,175,4 516,268,3 127,470,4 451,659,4 41,500,5 523,641,4 62,590,3 536,769,3 412,507,4 508,878,1 222,332,4 453,967,2 558,203,3 599,126,5 221,94,4 489,236,1 436,10,1 499,1007,4 380,528,5 561,190,5 12,6,2 290,567,4 221,364,4 46,268,4 121,10,1 466,92,4 46,257,4 356,471,3 523,177,3 565,153,3 150,846,5 84,457,3 308,303,3 421,200,4 492,171,5 64,734,4 6,214,4 402,283,1 576,392,4 486,98,4 522,651,2 513,749,4 106,285,2 578,1073,3 415,274,5 560,457,4 298,301,4 193,658,4 88,99,5 574,180,2 438,236,5 428,224,2 532,221,5 572,426,4 298,297,4 473,922,4 457,126,5 268,199,4 576,37,2 255,807,4 590,1098,5 496,394,4 6,571,3 591,1622,4 504,198,4 250,404,3 520,32,4 484,312,4 553,68,5 404,1575,1 317,891,3 180,546,1 330,237,4 242,85,5 434,696,4 12,791,5 536,10,3 558,513,4 0,74,4 582,662,4 567,184,4 392,738,3 543,331,3 292,49,5 494,549,3 537,116,3 404,1436,1 62,281,1 523,645,5 140,292,2 453,835,2 406,524,4 504,613,3 542,189,5 591,1314,2 124,339,1 513,366,5 497,339,2 536,94,1 341,192,5 502,1193,5 391,287,4 550,342,4 486,778,2 275,43,3 56,825,2 304,198,4 388,762,1 400,355,4 448,243,4 536,647,4 388,39,3 534,1395,4 513,391,4 263,1008,4 15,426,5 329,176,4 353,955,4 531,402,4 263,655,4 275,626,3 67,24,4 17,65,3 581,249,3 197,116,1 308,1295,2 249,324,4 183,50,4 157,743,4 373,117,5 215,407,3 591,741,4 517,235,3 285,1287,4 158,1048,4 307,91,4 535,198,3 11,215,5 173,661,5 246,221,3 492,190,4 591,987,1 517,221,5 574,506,2 350,300,3 221,575,3 197,142,3 115,301,3 150,491,3 585,218,3 494,464,5 434,22,4 522,513,4 557,507,5 524,684,4 415,312,5 396,272,4 470,150,2 20,242,2 226,220,4 528,268,3 300,192,3 84,509,4 313,66,4 12,790,5 408,173,4 404,738,2 553,227,5 475,287,4 420,233,5 196,171,5 254,146,4 189,590,4 388,699,2 267,657,3 362,0,2 108,596,2 536,1104,1 333,174,4 507,172,4 96,407,5 592,210,4 291,747,3 41,401,5 318,749,3 602,173,3 576,719,4 558,293,1 58,1100,5 93,669,3 607,1171,5 45,49,4 117,97,5 422,627,4 436,253,3 591,287,5 225,190,4 101,126,2 329,738,5 317,254,4 343,274,4 450,1025,1 279,155,4 253,150,2 536,12,4 327,126,5 203,169,5 505,32,3 551,470,3 436,565,3 455,178,5 4,69,4 479,236,2 507,150,5 399,305,3 15,207,5 578,167,4 554,1053,3 377,691,4 449,353,4 144,562,3 541,237,4 284,345,4 522,207,5 377,516,3 404,585,4 290,324,4 473,282,3 398,263,3 513,1034,3 522,268,5 390,479,4 275,571,3 551,741,4 6,236,5 343,87,3 91,1207,4 518,907,5 446,469,4 522,284,5 534,3,3 377,1008,3 199,567,5 473,177,4 393,448,3 416,1182,4 377,634,2 408,876,2 19,356,1 160,472,1 448,472,3 406,482,4 314,182,3 200,180,2 6,212,3 353,1510,4 471,566,4 600,1072,2 225,96,3 292,247,3 404,199,2 315,186,2 401,475,3 456,233,5 319,16,5 471,476,5 415,563,4 428,217,3 342,199,2 236,179,4 537,55,4 576,217,3 200,666,2 32,347,4 313,65,5 553,541,3 455,958,4 243,467,1 540,142,4 554,545,3 499,698,3 378,621,5 449,53,4 12,440,1 536,284,4 255,146,4 59,628,3 214,227,5 242,731,4 20,873,2 500,128,4 291,97,5 478,265,3 600,180,5 320,512,4 384,850,5 455,193,3 82,476,2 386,22,2 424,177,3 484,320,3 606,486,4 428,282,3 28,78,4 550,240,4 431,99,3 353,649,3 383,332,4 75,627,2 568,150,5 88,879,5 213,155,5 523,209,3 406,992,4 532,461,2 559,477,4 317,84,3 483,142,4 605,417,5 448,1371,4 587,431,4 403,21,5 396,170,5 394,272,2 579,147,4 547,297,4 122,288,1 143,315,5 12,913,2 513,41,5 98,404,4 328,6,3 585,186,4 591,1133,5 560,64,3 494,87,4 212,126,5 497,58,4 405,380,3 159,184,5 84,663,4 523,171,3 264,687,2 494,83,3 310,71,4 486,366,3 523,288,4 200,664,2 69,449,1 129,448,4 492,686,1 478,194,4 353,1118,4 600,833,1 135,274,4 382,314,5 493,8,2 217,1072,5 61,289,3 302,469,4 229,279,4 247,68,1 465,897,1 403,753,3 588,287,5 271,182,4 335,87,2 232,193,4 524,275,5 454,264,4 532,24,4 5,515,4 478,256,4 452,508,4 576,109,4 469,812,3 119,236,3 427,328,3 153,650,4 436,69,3 91,822,4 505,791,2 451,186,3 298,208,3 602,172,4 150,210,5 443,270,3 220,762,4 531,301,5 351,16,2 481,310,4 312,819,2 5,12,2 391,247,4 310,293,4 154,331,2 292,356,4 567,1049,4 391,172,4 454,3,3 292,482,5 584,112,3 435,286,4 588,339,1 275,238,4 607,489,4 337,649,5 193,401,3 326,99,4 404,1077,1 483,251,3 452,366,2 380,692,4 576,1290,3 604,526,4 256,165,4 398,927,2 236,658,4 282,819,4 496,808,3 550,990,2 360,236,4 146,318,4 302,422,4 129,4,4 402,1046,2 531,116,5 292,355,3 494,49,5 473,614,4 200,513,3 485,695,3 115,327,3 118,822,3 108,54,2 471,923,2 560,1073,3 506,301,5 325,608,3 61,169,3 591,1008,3 103,272,3 194,677,3 478,534,3 459,292,4 525,753,2 547,322,4 517,1078,1 215,402,3 313,293,5 527,81,4 378,728,4 193,182,3 302,534,1 311,164,5 587,226,3 602,10,5 316,354,4 12,493,4 346,98,3 302,412,2 184,8,4 404,363,1 0,267,5 167,684,3 253,20,3 536,59,3 174,507,1 369,922,4 566,317,2 421,741,2 120,936,4 578,88,3 326,22,4 206,683,3 480,366,3 115,654,4 150,511,5 372,180,5 536,771,3 213,248,3 553,66,3 360,257,3 483,624,4 359,99,5 541,287,2 469,136,3 404,390,1 198,6,4 349,135,5 278,553,1 605,14,5 552,198,4 458,14,4 6,589,2 448,1403,5 409,257,2 584,462,5 86,509,5 588,288,3 436,777,3 279,312,3 373,831,1 34,327,3 494,281,5 12,832,2 261,124,3 303,277,4 114,97,3 127,24,3 439,170,5 477,40,3 580,918,5 436,94,4 513,57,4 546,310,2 441,225,3 94,1220,4 377,299,4 568,247,4 441,809,2 428,142,3 587,70,4 302,99,5 513,169,3 544,67,4 560,1119,4 73,6,4 495,6,4 349,257,3 166,1224,3 467,356,5 553,81,4 375,706,4 518,298,5 58,146,5 599,1227,2 591,407,5 375,10,4 483,134,4 436,650,4 456,506,4 2,302,3 268,150,5 428,273,3 42,685,3 528,983,4 534,602,4 592,209,2 58,649,5 416,998,3 538,366,3 404,375,5 324,113,5 285,534,5 0,33,2 163,325,3 454,446,4 526,708,5 536,936,3 61,52,2 312,738,3 560,317,3 26,117,3 12,705,1 269,582,5 503,622,3 405,451,2 269,738,4 192,1167,4 93,131,4 373,695,3 203,317,5 600,418,4 291,527,5 434,163,2 605,240,3 565,650,4 234,174,4 163,933,5 120,581,2 467,427,4 436,1160,4 492,114,4 117,602,4 279,125,3 373,392,4 523,473,4 7,340,2 586,346,3 404,785,1 67,274,5 379,663,3 296,54,4 436,729,3 508,686,1 118,1165,5 354,259,4 248,1010,5 552,264,5 248,461,5 536,351,1 262,510,5 535,162,5 369,677,4 371,78,5 469,284,3 27,759,3 183,181,4 157,180,3 504,264,4 151,1052,5 381,506,4 431,120,4 344,746,3 446,88,5 453,194,4 137,55,5 362,589,3 384,49,1 521,78,3 99,309,3 382,516,5 436,46,4 108,627,2 487,202,4 592,4,4 587,1046,3 386,78,4 57,268,4 404,514,1 180,1371,1 605,185,5 565,234,3 415,1225,3 68,49,5 327,404,4 221,641,3 312,182,5 607,1114,4 118,536,5 483,247,4 433,346,1 499,146,3 550,1517,4 357,323,4 12,878,2 547,648,4 58,219,2 197,54,3 233,496,4 496,451,2 520,742,1 135,302,4 319,256,4 594,1066,4 384,207,3 591,454,4 360,97,5 89,487,5 452,317,4 449,557,3 333,1524,4 20,924,2 497,60,4 311,165,5 248,404,3 567,474,4 434,825,2 95,485,3 507,179,5 587,143,3 86,203,5 565,6,4 175,221,5 17,94,4 296,212,3 180,103,1 496,678,3 522,288,4 453,276,2 560,41,3 572,210,5 513,209,5 353,92,4 394,342,5 421,759,3 532,285,4 503,256,5 606,528,4 487,221,4 380,1118,4 461,320,5 559,24,3 76,174,4 587,39,4 428,364,2 196,1,3 607,660,3 143,646,4 453,418,4 61,195,4 200,482,3 216,778,1 220,272,5 59,184,4 542,193,3 200,698,3 497,524,4 192,904,4 497,1285,3 183,1116,2 506,404,5 604,530,4 320,653,4 324,114,3 193,654,5 600,698,3 502,155,1 343,843,1 71,475,4 553,8,4 551,863,3 536,1008,2 180,92,1 444,173,4 197,215,4 392,185,3 48,402,3 268,110,1 59,527,4 247,49,5 609,314,4 326,116,3 296,267,4 292,293,2 283,749,3 392,820,3 415,198,5 74,1058,1 398,49,3 58,43,4 20,983,1 454,57,3 229,446,1 454,1135,3 331,404,4 143,1100,4 449,240,4 500,116,4 275,1027,3 487,522,3 193,719,2 565,754,2 342,333,5 398,1177,3 416,814,4 606,106,4 487,7,3 292,197,4 320,656,4 221,558,3 610,299,5 587,782,4 269,243,3 362,697,2 544,230,4 492,175,5 6,79,4 129,402,5 363,260,2 140,146,4 404,131,5 118,131,5 221,1073,3 527,49,5 280,293,3 217,182,5 503,1109,2 501,332,4 252,124,3 48,97,4 542,52,3 454,938,4 593,743,3 487,21,4 6,50,2 565,76,4 415,202,3 550,192,5 420,95,4 269,163,5 22,160,2 312,719,2 555,131,5 221,391,4 341,477,3 345,324,1 242,693,4 468,494,5 505,249,2 616,636,3 387,568,5 351,227,3 439,581,3 103,455,3 5,309,2 81,819,3 444,309,1 482,274,4 482,11,2 22,471,2 292,1310,3 296,287,3 525,885,3 416,471,2 517,150,3 552,88,5 329,125,5 520,1021,4 57,473,4 373,1050,4 415,120,5 505,610,5 36,545,3 531,403,5 532,567,5 12,192,5 272,339,3 483,450,4 454,0,4 143,134,5 89,683,3 329,865,5 220,95,5 499,516,4 605,146,5 237,285,5 109,872,2 17,156,3 542,128,4 331,469,5 71,1,3 233,320,2 268,161,3 502,247,4 381,49,1 404,1248,1 475,185,5 459,220,4 268,659,1 591,1186,4 592,654,3 558,434,2 160,482,3 505,192,4 489,272,1 187,280,3 50,202,4 591,200,5 436,581,5 292,232,2 253,416,3 566,605,4 536,230,3 392,950,3 514,270,4 221,27,5 534,171,3 503,37,4 582,208,4 541,62,3 448,117,1 517,594,3 233,267,2 556,342,4 232,622,3 544,679,2 117,412,4 564,639,4 530,331,4 581,749,5 19,150,3 298,1222,3 404,561,1 187,176,4 416,668,2 183,120,2 607,149,3 458,472,4 375,267,3 295,250,5 526,21,5 311,504,5 550,754,4 427,304,3 35,872,3 593,221,4 436,735,5 550,126,5 404,214,5 81,229,2 506,309,4 617,203,3 206,711,4 67,120,1 352,331,5 324,268,4 531,265,4 392,577,4 520,249,3 345,548,4 10,718,3 420,184,4 585,1406,3 536,200,3 561,719,4 393,0,4 613,125,4 51,761,3 177,534,3 380,94,4 330,285,4 428,678,4 275,1169,4 150,49,5 492,149,5 326,526,4 150,498,5 212,283,5 62,827,1 312,152,3 503,498,4 348,324,3 576,379,3 498,176,3 415,49,5 594,1133,5 84,484,5 585,173,4 608,537,1 377,160,4 273,876,3 262,1019,3 388,180,4 89,463,5 496,1091,3 263,87,3 454,169,3 109,1,3 599,226,4 617,99,4 163,341,2 531,749,5 492,1125,2 335,287,3 333,323,4 614,734,3 192,198,5 605,472,4 549,253,1 180,125,2 327,179,4 343,13,5 456,185,5 398,62,3 609,10,4 617,780,3 238,488,5 372,755,3 591,183,5 596,299,5 586,260,3 467,256,4 507,210,3 614,516,5 302,55,5 435,552,3 338,520,4 532,228,4 249,152,2 404,62,3 94,70,5 453,269,4 410,731,4 523,977,3 25,221,3 360,656,5 436,150,5 526,1332,3 550,42,2 599,553,4 333,97,4 282,392,4 150,80,5 565,726,4 76,454,3 402,747,5 534,337,3 517,545,4 424,271,4 576,442,4 315,650,5 157,251,3 408,97,5 617,184,5 536,178,4 245,218,5 495,227,1 598,281,5 380,216,2 454,287,2 98,455,3 313,201,5 572,477,4 58,392,2 616,6,3 567,524,3 615,268,4 607,64,5 467,825,3 398,337,1 536,404,2 94,538,4 451,454,1 94,585,2 550,422,1 43,870,3 91,979,3 197,160,3 263,181,5 602,171,5 408,171,5 226,1046,2 617,366,3 534,504,4 488,263,4 127,201,2 614,169,4 217,38,2 469,846,3 236,524,4 592,171,4 188,491,3 436,1147,4 177,1032,2 373,641,1 607,1152,3 150,487,4 22,184,4 559,287,4 565,195,4 272,271,4 415,497,4 317,526,5 373,1093,4 565,484,3 199,551,4 453,190,4 206,741,4 199,755,3 553,844,3 253,203,4 421,123,3 162,268,3 497,547,2 56,545,4 362,184,5 523,711,4 270,193,5 535,48,3 598,872,5 588,994,1 12,899,5 163,741,5 377,21,5 582,264,4 255,4,5 144,815,5 381,510,4 550,691,4 572,68,4 276,110,4 345,1024,3 386,227,5 279,750,3 233,487,4 364,99,5 619,70,5 279,1472,3 590,190,5 367,10,4 523,191,4 371,627,4 505,85,3 223,220,2 12,564,1 338,184,4 333,287,3 553,410,3 220,565,3 119,0,4 592,1034,3 617,92,3 485,1133,3 534,96,4 613,286,3 565,81,4 2,328,4 536,238,2 195,381,4 334,306,5 588,326,3 156,409,4 20,442,4 576,39,4 585,927,3 473,191,4 486,461,2 496,430,4 536,605,3 609,750,4 486,401,4 113,196,4 507,505,5 568,286,4 196,10,1 478,260,1 455,324,3 541,381,3 505,341,3 24,567,4 398,63,3 513,714,4 498,1301,5 329,30,5 463,259,2 392,925,4 157,449,3 483,664,4 382,284,5 291,491,4 444,602,3 322,618,3 584,1511,5 279,1111,4 449,1401,2 292,505,5 252,80,4 534,1165,4 449,206,4 522,793,4 192,312,4 384,491,2 567,490,2 93,159,4 478,152,4 144,933,1 591,805,4 610,349,4 557,18,5 346,96,4 415,160,4 177,312,5 487,186,3 434,830,2 429,513,4 550,943,2 280,330,3 114,846,4 361,349,5 372,174,3 451,623,2 200,457,4 127,650,5 398,976,3 449,549,4 2,316,2 93,596,2 599,1,3 177,1313,3 282,55,5 362,558,3 565,87,3 86,79,4 487,879,3 587,259,2 333,271,4 59,601,4 331,226,5 415,374,1 287,687,1 71,404,3 436,182,3 34,257,2 591,254,4 199,1090,4 384,1130,3 144,1086,1 496,1209,4 443,312,4 531,591,3 503,96,4 12,176,5 362,225,1 582,11,5 189,684,3 502,120,3 113,484,3 570,495,3 558,3,4 42,249,2 471,217,4 405,692,3 485,283,2 251,474,5 194,876,3 6,678,5 324,189,4 449,636,4 290,122,4 617,181,4 285,335,5 566,516,5 384,237,5 451,530,4 3,328,5 550,954,3 63,189,4 526,473,3 529,482,3 120,545,1 300,81,5 535,233,4 552,484,3 567,473,5 424,356,5 317,300,4 356,23,4 513,379,4 462,111,1 189,275,4 617,27,4 405,12,2 536,557,4 331,840,4 387,265,5 549,845,2 502,497,5 378,203,5 1,293,1 561,479,4 607,8,4 260,287,4 579,357,4 540,624,4 618,244,4 370,526,5 424,1418,3 129,451,4 185,879,3 428,832,3 329,650,5 497,648,3 350,688,4 327,154,4 431,677,4 451,653,2 327,148,2 483,150,4 226,249,2 277,287,5 578,97,4 271,603,4 499,195,4 416,673,2 129,1078,3 452,654,3 519,677,2 616,859,1 205,299,1 302,333,3 124,190,5 434,131,3 508,257,4 428,57,4 238,149,5 434,167,5 456,410,3 471,203,5 392,475,3 58,57,4 499,234,5 406,558,3 536,209,3 505,403,5 553,741,3 464,133,4 55,322,3 556,253,4 209,422,5 526,513,5 0,143,4 560,639,5 460,120,2 451,7,4 473,220,4 531,894,3 535,723,4 283,267,5 267,505,4 613,24,1 12,441,1 415,936,2 591,846,5 231,418,4 12,844,3 504,331,4 579,299,3 362,574,1 216,209,4 96,152,5 0,270,2 428,507,4 450,336,2 149,318,4 115,19,3 61,128,3 451,167,4 485,281,2 377,574,3 486,977,1 325,231,2 119,826,2 209,98,4 532,691,4 485,761,4 450,882,1 402,684,4 449,413,3 612,257,5 292,221,3 536,135,4 499,713,2 326,545,2 477,92,4 338,426,5 499,257,4 542,769,4 536,187,4 267,1272,2 537,96,5 398,737,4 372,747,4 558,314,5 621,1,4 14,291,5 620,403,3 576,364,5 434,454,3 497,557,4 275,738,2 234,521,5 173,146,4 382,202,5 302,169,5 404,1568,1 542,402,4 221,325,4 449,339,4 150,503,4 36,49,5 600,622,1 487,259,2 262,193,5 200,178,5 422,312,4 398,109,2 534,356,2 478,175,4 215,422,4 587,98,5 533,455,5 509,686,2 22,154,3 511,190,4 221,619,3 541,88,4 537,195,4 481,875,3 514,321,3 180,406,2 23,918,3 80,110,3 21,545,3 536,190,4 5,185,4 43,404,3 314,174,5 93,229,2 188,165,4 560,140,2 240,345,3 556,270,4 574,172,5 600,175,2 84,377,4 296,0,3 620,566,3 302,1510,3 358,747,3 84,495,4 327,1477,3 57,126,4 447,339,4 267,465,3 456,121,2 495,658,3 273,279,1 520,1058,1 505,273,4 344,49,5 585,925,4 12,687,1 221,65,4 618,116,5 23,356,5 47,478,4 523,517,3 392,721,2 143,317,5 342,630,4 425,1078,3 266,178,5 473,120,4 422,301,5 475,889,1 485,459,4 485,6,5 98,237,4 441,683,3 483,226,5 275,30,4 425,184,5 415,762,5 186,212,4 427,343,3 316,298,4 118,347,3 267,726,2 136,384,5 115,46,3 296,473,4 384,958,3 344,735,3 108,173,5 513,238,5 402,116,4 58,594,3 69,98,4 550,24,1 219,994,3 243,771,4 544,567,3 497,150,4 431,107,3 4,436,1 621,844,3 430,357,2 487,229,3 379,381,3 517,923,3 5,511,4 90,388,2 424,337,1 406,96,4 456,55,4 217,54,4 464,174,5 187,53,4 93,141,3 550,470,5 275,770,2 91,156,4 428,264,4 421,256,4 377,226,3 473,86,4 180,1244,1 531,328,4 458,1015,4 12,182,4 302,77,2 404,1567,1 347,117,4 523,190,4 465,872,2 444,27,4 531,37,3 129,426,5 416,325,4 539,124,3 113,482,4 333,327,3 294,95,1 449,505,5 505,950,3 591,156,5 74,24,5 307,418,4 384,345,3 496,577,4 534,57,5 108,450,5 535,228,4 129,469,2 294,108,4 180,1053,2 220,694,4 400,480,3 605,1109,2 601,303,4 34,878,4 345,53,4 452,156,4 449,312,5 434,894,3 392,270,3 539,0,3 486,780,3 313,795,2 614,1020,5 204,267,2 495,527,4 614,885,2 196,525,5 441,16,4 273,8,5 81,865,3 312,460,3 607,68,4 341,237,4 342,580,4 619,443,3 34,936,4 116,422,4 550,272,4 10,179,2 270,78,4 378,250,5 475,789,4 314,512,5 377,484,4 223,238,4 69,553,3 392,282,3 533,289,4 526,630,4 496,404,3 404,715,1 200,671,2 49,1009,5 421,306,4 317,403,3 478,96,3 422,753,4 435,707,3 565,206,5 428,158,3 471,945,2 581,257,4 584,862,5 471,417,3 298,646,4 159,14,2 290,120,2 573,271,4 75,357,2 503,83,3 540,221,4 296,190,3 400,356,4 446,30,4 55,385,3 536,761,3 333,236,4 585,27,3 572,479,4 350,310,4 449,1220,5 16,285,3 115,252,3 311,1515,4 207,370,5 307,293,3 398,79,3 534,177,4 307,1251,3 109,287,4 131,150,3 536,401,1 522,180,5 5,190,4 143,14,4 317,268,5 357,581,5 523,214,2 108,939,3 550,718,1 279,12,5 338,1152,4 434,731,4 487,1024,2 292,1297,3 434,432,5 449,282,3 61,567,3 184,204,3 449,691,4 416,962,4 58,136,5 536,420,2 120,741,5 550,1066,2 400,68,3 503,698,4 486,410,3 505,299,3 596,1151,4 335,69,5 93,431,4 311,518,5 605,619,4 621,796,2 311,431,5 199,471,4 282,150,4 498,1100,5 296,1072,3 618,38,2 421,287,3 398,824,2 127,1220,3 560,217,3 603,443,2 192,24,4 58,287,5 621,105,2 268,601,4 479,113,4 523,23,3 135,282,4 25,108,3 101,81,2 373,1032,4 445,285,3 4,150,3 150,1097,1 616,395,1 578,327,3 177,152,4 328,322,2 489,297,3 473,174,4 449,79,3 620,541,2 214,217,3 333,286,3 614,86,4 458,18,3 499,42,3 560,204,3 379,424,4 173,746,5 51,125,5 547,280,4 285,760,4 278,1058,4 478,146,3 63,519,5 542,370,5 591,124,2 437,219,4 378,510,4 605,95,5 94,143,5 377,69,4 560,345,5 200,1044,2 486,78,5 230,0,3 475,215,4 517,695,5 616,564,4 303,327,3 416,166,3 254,833,4 451,1402,1 600,173,4 129,327,4 22,229,4 109,293,3 428,208,4 592,233,2 621,755,3 151,631,4 283,937,3 592,195,5 473,275,5 254,558,4 292,229,2 539,248,3 416,172,5 348,457,4 180,1149,1 225,68,4 590,381,4 345,1230,3 585,567,3 89,530,4 591,353,4 384,1120,4 96,662,5 416,90,2 606,85,4 533,1198,5 93,97,4 511,526,5 556,126,4 258,153,5 403,878,3 254,120,2 426,936,5 497,932,3 245,131,4 520,205,5 498,7,5 335,858,2 99,346,4 560,660,4 81,180,4 416,14,5 428,501,3 617,95,3 623,470,4 388,184,5 456,110,3 71,97,5 475,711,3 384,304,4 326,91,4 446,149,4 531,10,5 567,164,4 326,64,2 566,256,3 233,403,4 84,475,3 12,739,1 233,110,3 377,142,4 71,24,5 590,321,2 595,122,2 233,608,3 120,356,5 158,1012,4 355,330,3 333,905,5 323,874,3 127,167,4 188,316,4 607,275,2 589,753,3 302,194,4 550,1086,1 474,312,2 495,76,2 221,1086,1 173,139,4 386,319,4 150,496,5 441,66,3 457,303,4 208,13,3 495,773,5 454,196,5 256,1128,5 591,1007,4 187,876,2 611,925,2 14,689,4 531,194,5 602,930,2 267,940,2 438,894,3 56,1072,3 183,643,4 89,810,4 475,342,4 386,92,5 591,346,4 86,823,3 438,146,4 83,545,3 416,383,4 525,675,5 441,671,3 599,582,3 565,116,4 494,356,5 233,63,4 534,812,5 484,325,2 129,61,4 366,323,5 221,1177,2 517,12,4 497,483,4 415,711,4 566,918,4 584,729,3 398,1218,3 326,855,4 454,626,3 485,180,4 536,214,3 31,8,3 271,422,4 436,386,2 213,174,5 602,156,1 397,196,5 127,728,2 503,55,3 379,1064,4 541,410,4 623,277,4 560,392,2 94,967,5 81,522,5 514,306,4 233,645,3 568,301,4 505,182,5 275,654,4 313,215,3 188,208,1 456,356,5 464,655,3 59,326,4 425,207,4 596,241,4 451,418,4 543,285,4 53,254,3 455,954,4 591,250,5 604,600,5 150,740,2 302,442,4 12,732,5 177,743,3 488,269,4 432,267,3 343,209,4 335,762,3 593,285,3 536,329,2 592,608,3 587,142,5 619,1042,4 129,1230,4 89,309,3 10,236,4 503,727,3 209,71,3 480,658,5 16,293,4 614,1191,4 126,270,5 542,515,4 469,49,5 135,474,4 48,1066,3 532,483,3 324,547,3 605,209,3 585,175,3 41,418,5 415,761,3 550,747,4 428,187,4 605,762,5 1,309,4 416,263,2 594,288,4 434,716,3 467,661,4 367,144,2 372,208,4 294,426,4 86,322,3 480,779,1 482,19,2 409,314,4 404,382,1 386,658,4 488,265,5 499,987,3 470,500,3 278,1214,2 278,1024,2 424,304,3 605,123,3 223,1151,3 456,371,4 557,846,4 561,417,5 559,0,4 600,256,2 400,683,4 59,649,4 592,244,3 560,536,4 479,126,3 578,654,3 455,1327,4 393,664,2 553,950,3 410,49,5 393,779,2 591,317,5 428,116,4 545,859,4 235,317,5 404,387,4 536,720,2 199,42,3 480,504,5 475,1117,3 592,401,4 492,126,3 455,94,4 90,482,4 278,801,4 310,172,5 429,221,4 619,929,2 487,171,3 568,675,4 533,149,3 615,299,4 69,203,3 591,1376,3 362,1072,4 297,602,5 343,215,4 292,48,3 536,1018,1 163,297,3 103,2,3 216,567,4 503,722,4 278,868,1 289,317,4 451,635,5 187,120,4 550,1,2 279,65,5 206,1224,3 541,318,3 41,282,3 436,287,2 200,41,4 618,808,1 488,882,2 364,339,5 199,422,5 449,65,4 488,1024,5 492,274,1 168,479,4 591,186,5 436,82,4 429,55,4 302,263,3 589,149,5 626,25,3 234,691,4 16,322,1 292,1015,2 398,621,4 333,57,4 517,117,5 535,188,5 482,106,3 471,377,4 201,317,1 274,419,2 495,377,1 345,641,3 59,204,4 557,13,4 12,731,5 576,844,4 435,580,4 277,537,4 196,321,3 199,140,4 523,1455,3 620,832,3 553,431,4 82,3,2 383,750,4 243,49,5 285,311,4 162,878,2 12,426,5 536,193,3 497,202,5 587,257,4 576,931,3 349,49,5 219,288,4 584,1265,3 558,190,5 581,825,3 215,788,5 539,590,3 440,258,3 278,1205,5 404,1398,1 591,146,4 193,1410,1 507,175,4 482,431,3 532,527,4 233,523,3 526,86,3 270,526,5 398,422,3 18,209,3 416,95,3 532,275,1 392,825,3 81,120,4 143,1196,4 535,560,3 300,50,4 220,281,4 372,1132,3 474,258,5 55,66,2 20,987,1 622,49,5 452,356,5 627,269,5 91,3,4 420,3,3 496,723,5 522,8,4 238,633,4 560,91,3 287,514,4 566,1019,3 576,726,5 397,123,5 550,549,5 380,175,4 320,431,5 404,797,1 464,602,4 50,478,3 287,215,4 553,273,3 622,274,5 94,508,4 620,262,1 419,99,5 616,293,1 143,587,4 600,275,4 197,922,3 390,645,4 42,275,4 434,227,4 535,213,2 579,328,3 621,283,1 536,171,3 65,293,4 353,1084,3 434,11,5 540,595,4 290,368,3 624,747,3 144,311,3 275,1055,4 449,221,3 531,544,2 353,24,2 488,300,3 10,285,5 430,326,3 532,150,3 464,21,3 508,602,4 314,522,4 499,1225,4 500,292,4 129,435,3 7,258,1 346,175,3 449,646,4 346,654,5 504,202,4 206,283,3 473,381,3 449,191,4 267,52,3 498,538,1 86,476,3 446,156,4 439,85,5 507,218,1 103,275,4 578,110,4 503,57,3 290,415,4 564,729,5 61,386,2 377,131,4 177,782,4 535,861,3 17,124,3 64,237,3 532,192,4 435,720,3 373,94,4 397,736,2 485,12,4 621,992,4 499,324,3 478,209,4 22,78,4 124,1114,3 428,300,3 462,287,1 523,612,4 534,590,4 506,315,5 345,332,4 614,258,1 531,1336,3 462,256,4 535,226,5 341,1314,1 496,999,2 424,1015,3 432,339,3 496,644,3 279,366,5 91,395,3 86,727,4 455,602,5 448,8,4 84,434,4 58,580,5 193,990,2 588,677,4 449,548,3 473,1517,3 151,486,5 424,345,5 459,302,3 559,257,5 613,116,3 436,50,1 515,901,5 626,698,1 343,123,5 471,714,4 448,1194,5 310,793,4 278,293,2 408,212,4 584,1523,3 560,596,3 540,180,5 607,161,3 342,89,4 402,1011,1 428,670,3 195,284,5 11,158,4 591,424,5 416,799,2 574,482,3 573,689,3 531,830,2 150,429,4 416,824,4 532,194,4 552,150,5 147,520,1 444,1251,1 331,233,5 179,1045,2 303,110,3 424,200,3 56,294,5 607,418,4 193,943,2 547,257,4 23,199,5 486,225,3 587,72,3 531,120,4 523,28,3 55,522,4 550,697,4 478,178,1 6,323,1 296,248,3 294,203,4 394,180,5 456,530,5 304,685,3 437,470,4 231,165,4 311,662,5 232,431,3 346,162,4 599,1406,2 258,96,4 523,285,5 343,88,5 386,731,1 401,454,3 535,401,4 393,363,3 429,1006,3 434,1127,2 537,237,5 248,256,3 536,508,4 1,308,1 620,146,3 500,474,5 476,24,5 194,739,3 114,656,3 552,491,3 267,629,4 233,516,3 39,315,3 193,86,4 565,706,4 232,233,4 178,332,5 228,287,4 456,99,5 458,99,1 384,497,3 392,78,4 516,596,4 565,771,4 297,126,5 152,257,5 12,327,3 486,747,4 566,297,4 253,553,3 416,144,3 307,927,4 275,420,4 214,194,5 620,106,4 450,287,5 513,657,4 427,346,4 206,1027,3 105,160,3 628,283,4 101,891,2 523,276,3 6,428,5 471,233,4 522,65,4 279,1181,3 436,769,3 312,483,5 582,267,5 558,686,3 285,475,4 140,283,5 330,681,5 455,98,3 183,78,3 566,190,3 59,264,5 304,183,3 404,1407,1 568,116,3 550,316,5 607,488,5 378,0,4 496,186,5 406,520,3 428,249,2 93,28,2 384,190,2 388,167,5 192,894,1 600,195,3 454,1027,2 327,116,4 496,152,4 536,874,1 243,356,5 485,878,3 400,508,4 494,495,5 296,184,5 55,945,4 5,273,4 492,94,5 594,1058,4 150,8,4 176,1217,4 68,264,4 298,282,3 81,833,1 620,747,4 58,0,2 392,28,4 520,143,3 536,652,4 378,270,3 451,923,5 349,478,5 56,104,3 505,174,5 103,254,1 51,284,5 420,447,3 456,7,5 270,520,5 53,327,4 326,630,3 468,482,5 527,237,3 366,562,4 380,930,4 464,86,4 498,656,5 428,386,4 267,454,3 532,281,4 49,267,4 590,739,4 607,608,5 78,92,2 620,134,5 84,344,4 478,482,4 536,748,2 223,328,3 345,249,3 24,836,4 58,745,5 536,697,3 379,520,2 275,52,4 538,152,5 386,951,5 188,161,3 569,244,1 552,473,5 600,183,3 449,135,5 401,257,4 626,178,5 285,171,4 552,176,4 525,407,5 536,274,4 55,777,4 384,1153,5 600,495,4 387,332,5 554,248,4 428,760,2 486,127,5 190,749,4 415,695,3 447,1601,4 150,462,5 307,678,4 416,641,5 478,99,3 180,1337,1 108,530,4 584,211,5 544,87,3 588,258,5 550,650,4 317,1062,3 561,500,5 456,264,5 579,1027,3 144,341,4 532,13,3 449,609,4 98,0,4 435,1134,4 422,545,2 333,184,4 587,422,3 5,177,4 58,648,4 393,183,3 482,108,5 467,272,2 317,738,5 388,479,5 109,1209,3 486,619,3 391,303,4 289,264,4 452,1016,3 287,434,4 609,482,5 307,287,4 384,674,5 24,130,4 300,42,5 620,540,4 318,688,3 632,27,4 517,272,5 604,136,5 495,132,5 144,651,5 185,202,5 436,243,3 37,404,5 364,894,4 513,356,4 369,496,3 103,404,3 18,267,2 410,180,5 289,731,4 10,85,4 151,691,5 69,567,3 591,194,4 452,209,4 296,3,1 180,817,1 290,797,4 381,1380,3 552,480,3 405,189,5 505,233,5 540,375,3 377,408,2 343,315,4 607,96,3 5,472,2 505,57,4 300,141,3 30,327,2 604,179,4 626,1003,4 451,509,4 536,234,1 515,49,5 482,269,3 594,824,2 124,366,4 266,30,4 451,429,3 607,264,3 454,1196,4 319,452,3 499,728,4 600,410,2 275,392,4 429,49,4 502,434,3 621,729,4 471,587,3 306,208,5 388,1113,2 564,169,5 419,507,3 84,300,4 337,426,4 536,612,3 541,190,5 535,500,3 5,407,4 540,195,4 144,1214,2 531,249,3 144,571,5 342,526,5 441,272,4 408,282,4 326,78,3 327,28,3 617,1,2 392,565,3 432,506,4 63,1139,1 292,491,5 488,987,3 127,180,4 591,263,2 63,187,4 20,144,1 554,235,5 560,1528,3 589,863,1 620,450,1 525,918,3 344,377,4 177,505,3 276,123,3 275,657,4 118,209,5 542,734,4 173,8,5 547,30,5 129,353,5 453,692,2 554,488,5 275,1243,3 353,488,4 507,316,4 25,1010,3 505,537,3 565,14,3 604,581,4 404,51,1 291,843,5 274,95,3 614,288,2 90,135,4 528,320,4 581,221,4 124,519,5 108,81,5 326,201,4 353,557,4 624,738,3 607,788,3 448,268,5 503,1029,3 607,694,5 243,81,3 93,172,4 296,150,3 496,740,4 617,236,4 333,114,5 431,23,1 532,9,2 86,88,4 531,878,3 449,34,2 115,389,4 349,514,5 477,762,5 591,196,5 379,1112,4 525,677,1 492,49,5 183,92,4 176,287,5 617,130,4 620,553,4 5,537,2 613,99,5 307,427,5 180,1301,1 621,754,4 497,1069,3 307,824,4 91,779,3 200,1068,2 497,602,4 513,256,4 106,901,5 162,299,3 406,24,3 27,175,5 425,98,4 248,479,5 17,612,5 377,795,2 591,270,4 516,274,5 538,955,5 460,254,2 503,179,4 505,795,3 319,230,2 598,470,4 540,256,5 43,210,4 618,402,5 478,281,5 591,1024,1 590,203,4 631,187,4 311,834,5 249,180,4 451,244,2 592,64,3 520,1011,3 471,1035,4 540,214,4 290,84,2 388,524,4 267,203,3 279,66,4 379,711,2 441,694,5 377,702,4 267,11,4 449,506,5 600,163,4 334,747,2 6,596,3 587,24,4 75,512,5 220,468,3 196,37,3 313,98,4 405,88,4 319,367,3 534,657,4 349,213,3 112,272,4 462,115,5 485,1271,3 449,489,5 181,180,5 565,511,4 384,199,3 307,134,5 206,32,2 494,226,5 255,656,5 238,227,2 552,1020,2 576,11,4 345,97,2 599,37,3 297,185,4 503,578,4 590,285,4 183,282,5 349,488,4 519,24,4 544,216,5 377,806,3 370,356,5 621,478,4 311,612,5 604,283,2 377,27,4 599,525,4 269,120,4 157,54,4 523,280,2 578,407,3 453,739,2 173,1261,5 364,234,2 623,863,3 473,72,3 285,427,5 221,507,3 415,657,5 64,87,4 63,134,4 409,339,2 520,391,3 567,5,3 490,899,5 536,91,3 456,51,4 278,921,3 194,1192,4 206,413,2 22,431,4 180,1339,1 478,78,4 440,682,2 203,320,1 462,543,4 600,249,4 550,446,5 424,567,3 531,469,5 321,527,5 415,6,4 125,299,4 604,14,5 91,124,4 452,741,3 253,0,3 497,120,2 406,39,1 619,267,4 404,232,1 477,411,4 384,249,3 311,1019,5 556,871,5 582,238,2 72,152,3 536,348,1 103,533,2 545,55,5 532,0,4 34,875,2 622,522,4 453,450,4 293,99,4 576,384,5 499,929,3 302,474,4 537,239,2 471,71,5 621,11,5 589,243,3 150,380,5 14,290,3 534,203,5 307,422,5 169,686,3 619,564,4 550,383,1 103,236,3 457,472,4 561,196,4 108,87,4 353,250,5 315,171,1 52,6,3 255,1050,4 0,118,5 477,236,5 249,750,2 398,1479,3 405,542,4 559,280,3 473,98,4 302,761,4 88,236,4 454,404,3 521,317,4 617,495,4 526,58,5 584,706,5 483,249,4 270,658,3 404,1223,1 434,943,2 585,402,4 591,875,1 304,201,3 457,708,4 55,175,5 415,194,5 620,180,5 249,366,4 362,571,2 292,894,3 616,131,1 477,215,5 607,192,4 151,132,5 580,846,3 278,665,2 0,25,3 601,124,4 614,643,4 553,575,4 542,201,4 607,1112,3 405,202,4 526,18,3 485,863,3 341,872,3 108,117,3 233,428,4 21,162,1 335,283,4 478,208,4 477,356,5 525,242,1 256,380,5 317,214,2 503,97,5 591,262,1 465,454,3 342,514,4 495,531,5 180,321,1 571,1009,2 492,753,3 194,372,3 300,1282,4 384,1157,5 144,1024,4 390,545,3 114,12,5 453,106,3 43,89,2 456,424,4 30,298,4 534,629,2 486,1445,3 457,286,4 302,959,4 544,68,4 527,193,5 12,753,4 440,0,5 351,193,3 378,673,3 550,1206,1 401,1100,4 285,526,4 405,63,4 559,234,2 310,1220,4 404,1358,1 449,509,4 404,583,1 337,212,5 464,528,3 509,880,2 591,41,5 567,285,3 178,361,1 59,640,5 59,410,3 405,944,3 619,464,4 459,252,3 457,426,4 415,87,3 312,151,3 415,28,2 483,96,5 140,285,4 266,825,3 591,410,2 107,251,3 477,468,3 467,274,4 395,1214,2 477,27,3 302,1214,1 405,606,4 494,146,5 354,680,4 220,185,4 172,983,4 572,204,3 63,339,4 599,434,5 536,192,4 327,728,4 533,627,5 398,25,2 585,234,3 379,749,4 392,1054,4 270,125,3 505,80,1 243,196,4 413,257,5 506,327,5 290,1228,2 266,205,5 619,767,5 455,227,3 138,149,4 550,807,3 497,270,2 507,527,5 311,536,5 499,380,4 206,174,1 458,332,3 291,263,3 392,1439,3 41,65,4 413,259,3 331,894,5 524,254,1 398,155,3 531,864,2 533,470,5 560,12,3 5,426,4 440,404,3 560,435,4 446,175,4 84,1171,4 341,788,3 554,6,4 631,287,3 84,461,4 632,1131,2 537,214,5 360,211,5 147,221,4 565,94,2 464,256,4 494,670,2 17,707,3 566,272,5 61,158,3 600,108,4 630,271,4 559,479,3 77,812,2 633,457,4 298,22,4 1,305,4 180,408,2 362,505,2 428,779,3 290,734,4 521,132,3 333,314,4 302,551,2 603,182,3 248,805,5 591,459,3 536,356,4 319,759,3 564,854,5 187,469,5 139,285,5 560,1068,4 416,796,3 298,24,3 536,132,4 531,258,3 600,38,1 536,609,4 552,647,4 377,364,2 140,329,1 444,275,3 144,273,3 232,202,3 553,117,4 321,156,5 534,150,4 493,14,5 406,483,4 605,94,4 185,1335,3 323,826,4 617,272,4 175,873,4 63,193,5 416,403,3 411,171,5 534,281,3 633,475,3 462,6,4 98,594,4 487,207,4 380,431,5 10,543,4 307,147,3 307,7,5 616,52,1 496,1046,3 621,282,4 584,165,4 82,1059,3 273,14,5 550,1313,2 229,692,2 129,1027,4 454,280,3 535,384,4 552,489,4 584,189,4 554,284,5 473,488,4 536,84,2 180,241,1 551,454,3 623,49,5 310,497,4 617,777,3 323,13,5 485,886,5 32,878,3 453,356,3 237,300,3 80,590,5 9,31,4 620,363,3 436,180,4 626,280,3 599,549,4 47,526,4 457,123,4 335,764,4 566,495,5 616,97,2 386,12,4 421,333,4 545,568,4 614,178,4 398,507,3 517,865,5 510,871,5 350,327,4 150,515,5 421,92,4 560,303,3 38,268,4 118,930,1 215,41,5 495,431,4 463,747,4 559,812,4 624,171,4 278,501,5 595,257,3 494,167,5 289,1059,3 551,248,3 328,312,4 183,595,4 58,490,4 523,179,4 226,1010,4 547,269,5 213,602,4 535,175,3 594,1093,3 585,185,2 592,215,5 565,704,4 620,32,4 248,293,3 568,251,3 449,163,4 454,300,2 140,247,3 551,619,3 200,749,3 558,264,4 605,196,3 457,424,3 473,1044,4 591,302,5 560,31,4 287,1357,5 544,387,3 275,646,4 596,987,1 4,414,1 384,718,2 558,126,4 406,122,3 587,346,5 436,462,5 93,280,3 438,284,5 412,299,4 576,622,5 633,236,5 495,245,4 65,120,3 454,38,2 364,124,3 421,457,3 5,434,4 270,197,4 638,87,3 623,345,3 84,1005,3 2,180,4 415,35,2 313,627,5 108,232,4 127,704,3 617,696,3 324,318,3 223,924,3 633,272,3 63,209,3 537,126,5 22,90,4 541,1097,4 550,78,5 584,44,5 455,489,4 12,90,2 436,233,4 296,173,5 523,529,4 534,10,4 626,209,3 109,312,5 398,385,3 245,1227,1 478,731,4 602,99,4 150,522,5 587,1038,4 278,1131,1 122,275,4 605,10,5 291,0,4 317,317,5 94,514,5 55,153,2 541,57,4 304,750,3 180,274,3 514,309,3 384,944,5 491,922,5 393,762,3 245,632,3 314,153,5 566,610,4 587,14,5 429,514,4 513,68,4 589,275,4 113,356,4 176,291,3 351,301,4 105,711,3 536,21,2 89,272,3 300,464,4 552,98,5 456,154,4 51,257,5 127,1191,2 527,173,5 22,144,3 533,1058,4 576,560,4 275,251,3 416,1227,2 132,299,3 477,254,4 175,320,4 542,663,4 297,434,5 535,698,3 229,238,4 587,622,3 97,427,5 71,96,4 84,515,4 477,136,4 86,234,3 306,630,3 531,1425,3 404,194,5 312,185,3 584,115,3 532,602,4 311,490,5 360,420,3 405,623,5 535,647,3 560,381,4 489,257,2 496,745,5 623,270,3 594,927,3 441,160,3 553,85,4 540,768,1 536,170,3 58,86,4 101,259,2 547,990,1 598,1151,4 629,293,4 585,225,4 302,501,4 544,443,3 451,481,5 268,646,4 411,275,5 344,954,4 415,659,5 638,241,4 173,14,5 297,950,4 498,325,3 404,233,5 94,185,5 441,549,2 278,185,5 406,435,3 483,143,4 392,27,4 245,96,3 344,238,4 536,497,3 536,964,2 70,13,5 576,794,3 620,875,2 362,522,3 168,299,5 543,688,2 183,1147,3 247,63,5 633,404,4 465,88,3 209,166,4 341,46,5 223,747,3 238,433,5 162,257,4 483,238,4 306,27,3 286,545,4 298,1055,4 552,204,4 626,143,2 295,514,5 543,324,1 388,24,3 404,218,5 469,124,4 560,192,3 434,198,5 629,933,3 488,339,4 324,235,3 542,442,4 136,404,5 416,68,3 550,41,5 161,104,2 345,203,4 631,621,4 428,275,5 534,477,5 494,190,3 401,41,4 590,55,4 536,305,3 238,220,5 621,10,4 619,293,5 268,955,3 360,693,4 406,158,3 592,69,5 494,574,3 529,318,3 313,256,5 488,293,3 607,885,1 404,660,3 449,134,3 22,249,4 485,294,3 220,1184,3 456,508,4 220,1016,4 236,168,5 43,654,3 17,284,5 270,460,5 343,173,5 544,160,4 586,302,4 473,14,5 588,894,5 341,1007,3 452,167,4 200,215,4 6,133,4 546,346,4 471,549,5 448,982,2 307,662,5 295,143,4 380,150,5 457,429,5 157,186,5 267,12,3 397,709,2 424,78,4 124,454,5 355,285,3 311,190,5 495,560,5 447,332,2 290,209,5 1,24,4 6,584,4 560,409,1 591,1059,2 443,250,5 626,1193,4 629,844,3 532,180,5 416,12,2 498,68,5 560,734,3 373,76,5 50,172,5 613,410,3 558,1100,4 641,69,2 474,380,4 293,234,3 392,25,3 483,8,1 544,664,3 119,741,4 379,314,4 471,583,1 398,780,2 300,63,5 380,14,2 6,193,5 589,1016,4 605,227,5 177,142,4 576,1031,3 513,708,3 608,286,5 620,422,4 161,357,3 404,548,1 372,1109,4 503,1132,3 305,110,4 467,158,3 386,98,5 63,651,2 585,275,3 148,324,2 619,100,2 325,480,1 551,122,3 212,257,4 253,1090,3 520,131,3 396,134,5 619,120,5 509,677,4 2,337,2 481,126,4 636,99,4 189,543,4 618,327,1 415,311,3 623,261,4 424,635,4 307,403,3 17,648,3 500,507,4 542,830,2 486,844,4 197,69,3 626,402,2 420,671,3 559,1162,3 58,236,3 559,249,4 499,128,4 504,377,5 329,500,5 639,10,4 511,301,4 373,567,5 547,602,5 486,2,5 213,195,4 199,116,5 322,257,4 261,0,3 636,689,5 197,131,4 545,894,3 116,405,3 619,378,4 147,174,4 416,394,4 334,354,3 194,432,3 577,379,3 417,287,5 197,654,4 576,594,4 496,185,4 86,413,3 473,1019,3 294,207,5 342,460,2 342,264,2 565,1043,3 386,120,2 542,646,3 115,284,4 129,588,4 621,29,4 332,185,4 547,306,4 599,1230,2 587,1040,2 480,691,4 631,163,4 633,301,5 415,820,4 415,363,2 310,196,4 140,322,4 193,237,5 343,197,5 408,27,2 532,596,3 578,99,4 486,180,4 531,1501,1 262,621,4 452,222,4 641,1038,5 384,501,3 193,90,3 26,8,4 326,481,4 457,1010,3 434,66,4 61,61,3 27,428,5 294,628,5 115,324,3 255,1046,4 12,240,3 84,812,4 89,1136,2 157,201,5 584,638,4 267,1227,1 373,246,1 552,85,3 611,877,2 523,99,5 335,1187,3 406,69,4 455,657,3 629,716,3 275,764,3 471,750,5 434,391,3 287,519,5 518,287,4 509,258,2 68,741,3 388,1040,3 208,180,4 482,98,3 388,57,4 444,345,5 205,1061,3 571,1136,3 559,135,3 408,57,4 81,227,3 609,215,4 31,741,3 467,14,4 253,388,3 565,391,4 76,832,1 141,345,5 12,638,3 345,317,5 415,777,3 71,169,3 600,178,5 505,225,4 390,58,5 553,226,3 14,284,4 446,1045,3 545,929,5 599,175,5 539,239,3 439,1590,5 535,469,5 404,522,2 587,332,5 311,153,4 456,754,4 541,345,3 233,648,3 508,331,2 213,1072,5 502,478,4 456,461,5 330,490,3 532,318,3 129,815,5 591,456,1 386,229,3 551,865,3 12,393,2 373,122,2 641,950,3 586,271,5 616,605,3 285,432,5 372,216,3 159,762,4 517,119,3 82,370,3 386,191,5 532,411,1 598,119,3 449,734,4 297,21,4 22,155,3 441,99,2 206,734,4 287,510,4 71,460,3 200,171,5 505,416,4 627,329,5 641,37,4 622,814,2 617,1070,1 84,209,3 507,120,2 81,287,3 623,747,3 101,227,4 373,173,5 333,935,3 489,595,1 625,287,3 487,482,3 324,442,4 292,657,1 143,474,1 459,272,4 312,132,5 305,285,4 558,237,1 501,538,3 22,173,4 617,723,3 463,297,4 516,49,5 193,448,1 285,94,5 467,422,4 311,486,5 180,819,1 576,280,3 415,1015,5 213,23,3 524,24,5 416,387,3 575,258,2 561,426,4 497,984,1 434,403,2 513,134,4 377,741,4 72,153,5 491,522,4 483,398,4 304,90,2 550,714,1 84,464,4 446,741,3 617,11,4 82,173,5 159,239,4 329,431,4 441,507,3 307,855,4 293,248,5 612,302,4 333,73,2 326,356,4 216,146,3 348,1127,3 499,60,4 344,244,2 616,487,4 611,1062,5 370,185,5 29,872,1 372,602,4 596,935,3 428,661,3 542,95,4 125,325,2 534,120,4 192,279,4 551,14,3 534,420,4 529,171,4 453,611,3 377,280,3 503,451,2 572,491,4 465,86,3 75,342,3 434,182,5 416,943,4 447,304,4 445,886,4 269,693,5 84,68,4 428,68,5 573,330,1 457,168,5 388,480,5 2,301,2 337,195,2 56,7,4 633,976,3 540,81,3 488,258,2 494,1,2 555,506,5 586,687,3 275,457,4 278,658,5 428,1417,3 297,865,3 468,655,5 333,605,5 535,194,4 576,224,4 591,290,3 496,939,2 313,409,5 206,196,4 533,150,4 642,418,4 599,26,3 463,878,4 20,261,4 475,201,4 388,406,1 209,152,5 591,55,5 103,129,1 428,274,4 386,507,4 626,466,5 483,822,4 467,213,5 292,80,4 233,862,5 529,356,5 289,270,3 445,300,3 269,442,3 453,355,1 245,740,5 597,322,4 605,965,5 289,228,3 173,150,3 560,79,2 238,658,3 13,426,5 499,245,5 415,1219,3 144,327,5 53,337,3 538,154,4 615,354,4 465,23,4 294,558,4 206,1048,3 560,707,3 140,249,4 86,296,3 435,217,4 641,920,5 261,94,3 622,641,3 101,187,2 462,150,4 620,196,4 377,590,4 396,474,4 311,836,4 617,943,2 213,167,3 485,236,4 0,157,3 406,704,4 6,439,1 210,461,4 499,96,4 243,526,5 531,471,5 505,355,3 266,719,3 531,176,4 0,36,2 483,567,3 477,41,5 541,178,4 560,644,3 572,175,3 490,284,5 374,301,5 641,745,3 454,124,3 550,779,5 614,659,4 372,69,4 637,193,3 59,428,5 193,14,4 322,292,4 109,450,4 327,285,5 536,490,4 63,545,3 553,78,5 509,287,3 449,244,4 108,789,2 144,353,4 634,872,3 467,855,4 279,79,3 278,379,4 458,290,4 537,167,3 233,674,4 432,173,5 467,159,3 568,339,4 20,992,4 620,71,2 22,133,4 639,52,4 279,1047,4 168,257,5 605,286,4 454,175,3 624,95,5 396,22,5 341,655,5 643,99,4 236,126,5 319,299,4 617,195,4 473,82,3 531,356,5 629,475,5 352,257,5 559,1170,3 307,402,4 605,1276,3 13,31,5 194,383,2 206,179,3 620,3,4 434,245,5 594,741,2 522,188,5 311,530,5 486,418,3 373,1247,3 285,1037,5 643,747,4 10,548,4 391,245,5 428,1010,4 90,30,5 445,358,3 634,293,3 550,333,4 405,403,5 276,136,3 626,955,2 12,759,1 129,863,2 290,426,4 296,454,4 206,281,4 75,55,5 627,167,4 450,262,2 560,201,3 176,526,4 591,265,1 503,384,4 260,320,3 630,285,3 370,451,2 456,172,5 333,11,5 143,422,5 591,257,5 66,104,4 109,1221,2 452,157,2 496,172,5 629,116,5 229,90,3 537,316,4 512,180,5 313,35,2 626,96,2 435,218,5 197,199,4 456,85,3 137,602,4 43,422,4 566,131,3 333,885,4 489,514,3 607,728,4 621,239,3 614,293,3 528,339,1 275,122,4 209,731,4 20,288,3 82,754,5 471,540,5 578,287,4 327,214,3 487,384,4 150,110,4 377,272,4 405,86,3 449,841,4 296,108,4 550,353,3 416,297,3 300,692,5 255,322,5 480,3,3 542,109,2 143,250,4 498,878,3 108,385,1 101,372,2 505,891,1 632,175,3 404,584,1 496,113,4 360,85,4 197,432,2 214,236,4 480,97,4 405,51,5 598,1094,4 565,478,4 563,120,4 456,553,4 302,1217,4 542,1618,3 285,1074,5 327,683,5 576,229,3 441,275,4 429,293,2 623,741,4 624,257,4 453,519,4 533,369,4 541,203,3 427,537,4 451,187,4 628,57,4 231,31,4 95,55,5 505,139,3 451,173,4 537,567,3 438,272,2 416,708,3 488,679,5 509,1024,3 9,512,4 302,378,4 172,936,4 324,627,3 199,234,2 319,768,3 550,55,5 82,780,4 536,100,2 617,76,3 505,37,3 635,221,5 480,1038,4 12,849,4 368,751,4 525,311,2 252,221,4 192,1073,3 404,481,3 605,257,4 451,285,4 384,1461,4 408,29,4 494,704,4 572,631,4 384,88,4 362,97,3 43,88,5 292,285,3 13,221,4 369,49,4 482,0,4 410,27,4 459,257,3 588,306,1 136,88,5 644,747,1 520,116,4 31,306,2 316,287,4 606,527,4 266,390,3 551,1314,3 12,746,4 550,845,3 267,49,5 424,357,4 249,557,4 93,588,5 27,116,4 521,127,4 441,202,3 434,88,4 641,719,5 598,933,3 372,179,3 386,495,3 84,167,4 503,4,4 386,683,3 314,769,3 248,190,4 496,1,1 404,346,4 341,1527,3 539,125,3 637,182,4 418,477,5 486,173,5 465,567,3 298,93,1 232,94,5 343,371,4 464,637,3 550,1034,2 42,311,4 641,727,4 325,181,2 639,940,5 434,587,4 233,180,3 639,172,5 400,814,3 642,670,4 165,242,3 282,1078,4 57,1005,2 343,1013,4 406,384,4 200,401,2 261,51,3 362,450,2 535,541,1 585,577,3 21,108,4 477,110,3 526,542,4 292,271,4 647,95,5 212,520,4 183,117,2 547,503,5 542,380,4 12,193,5 641,1468,4 609,526,4 458,263,4 444,894,2 53,150,2 193,639,1 384,31,5 549,537,5 396,196,5 378,382,2 579,293,4 13,287,4 552,478,5 324,1229,3 477,71,1 81,510,3 150,133,4 547,344,1 392,107,2 541,478,4 624,150,3 424,218,2 457,404,4 18,324,4 549,309,5 594,762,3 344,432,4 526,316,4 621,518,3 455,70,3 173,30,4 533,321,4 456,583,4 561,726,5 110,257,4 234,337,1 144,649,4 631,70,4 597,348,4 392,108,3 426,680,5 59,545,4 632,525,4 196,28,3 378,87,4 415,747,4 467,221,4 89,496,5 540,451,3 641,131,3 245,10,4 252,1467,3 591,1318,1 398,287,3 94,202,3 323,49,5 550,290,4 471,1227,4 126,342,5 541,789,3 350,747,4 632,142,4 605,755,3 30,1020,3 619,677,3 61,482,4 193,863,2 639,37,4 456,447,4 229,98,3 455,171,5 144,404,3 12,845,2 469,283,4 541,72,3 333,227,5 307,677,3 629,277,4 532,741,4 194,1029,2 391,323,1 444,323,1 579,122,4 428,289,3 496,780,3 532,20,3 400,234,1 634,299,3 0,180,5 490,1280,3 446,157,3 629,1039,4 193,240,2 177,315,4 621,195,3 71,520,4 434,23,4 537,212,3 297,204,5 492,257,5 384,652,4 59,274,4 384,275,3 107,318,5 532,193,4 20,199,5 550,657,5 245,227,3 591,951,4 412,269,4 584,637,4 145,270,3 488,285,4 420,11,5 531,150,5 513,404,2 502,167,5 90,186,5 523,678,2 619,1034,4 496,79,3 194,312,5 585,469,4 461,312,5 510,907,4 531,913,5 626,174,1 436,98,4 450,988,1 499,297,4 454,86,3 496,287,2 5,271,4 644,68,4 324,15,1 73,293,4 319,409,4 48,216,3 428,651,4 526,506,5 392,71,4 425,133,4 129,288,5 170,905,3 473,208,5 84,630,4 540,1034,3 243,392,3 560,169,4 267,742,1 84,179,4 206,508,4 641,104,5 280,309,4 360,658,5 647,472,3 448,747,2 58,653,4 560,301,4 334,299,5 534,130,4 496,108,4 329,167,3 525,935,5 444,1600,1 356,545,5 101,217,3 307,238,3 594,128,3 297,392,4 122,241,5 441,185,4 638,487,4 285,314,5 416,287,3 560,1152,3 235,865,3 291,461,3 503,536,3 621,119,1 428,182,4 397,1019,3 455,231,2 355,1293,4 12,429,5 345,1227,4 400,477,2 391,126,5 605,63,5 48,1020,5 449,713,4 623,271,5 127,69,3 473,226,4 572,182,3 384,472,3 188,190,5 311,477,5 499,741,3 633,1007,2 535,62,4 576,807,3 6,233,5 428,94,3 505,257,4 56,41,5 478,81,4 523,669,4 631,24,1 647,287,4 489,276,3 505,877,3 329,992,4 629,126,2 632,287,2 472,274,5 193,154,3 487,82,4 233,524,4 231,126,3 513,650,4 455,805,3 565,24,2 550,824,5 174,171,5 392,227,3 499,43,1 243,168,5 5,404,1 228,343,5 587,288,2 183,1159,5 480,426,4 623,951,3 591,237,5 380,13,5 542,184,4 637,471,3 302,108,4 493,426,5 641,1032,3 524,117,3 211,527,5 270,493,4 633,918,2 386,324,2 403,900,2 82,160,4 590,274,4 43,259,4 602,180,5 608,877,1 5,165,4 61,404,3 106,287,3 534,43,4 197,1243,2 35,1025,5 436,446,4 266,217,4 587,698,4 253,1049,3 404,57,1 435,567,5 483,256,5 426,358,5 12,762,1 336,830,1 275,46,4 453,648,2 159,191,5 638,305,4 17,133,5 456,88,5 233,530,3 20,560,1 6,402,4 541,208,4 299,287,4 458,545,1 599,227,3 89,1133,3 533,324,4 384,130,4 143,1225,4 17,968,3 530,311,5 129,23,5 641,1414,4 594,596,2 310,1040,3 633,285,5 602,747,5 591,272,5 323,288,5 550,267,4 549,303,3 248,55,5 388,190,5 9,528,3 576,654,4 392,720,2 55,96,3 531,525,5 555,63,5 505,46,4 599,225,4 144,978,3 620,558,5 384,524,4 411,63,4 319,234,3 584,223,2 488,988,3 633,123,3 404,148,1 279,78,4 544,688,4 533,741,5 12,354,3 268,364,2 430,688,3 449,3,3 531,418,5 15,0,5 92,150,1 620,292,3 616,133,3 145,318,4 404,74,2 296,229,2 442,285,5 587,315,5 386,714,5 98,116,5 220,177,4 373,1209,4 220,180,4 81,20,1 633,220,1 390,490,3 225,651,3 646,553,4 478,188,2 643,299,5 205,342,1 594,367,1 206,69,3 605,759,3 587,651,2 506,1088,5 469,99,4 186,68,4 465,126,3 584,1017,2 25,120,3 541,683,4 150,964,5 421,97,5 493,1196,3 398,550,1 604,173,3 326,203,4 620,230,4 245,575,1 541,817,4 535,138,4 562,253,3 222,283,2 312,494,2 497,482,3 641,767,4 379,151,2 498,515,4 270,647,4 333,317,4 492,272,4 620,66,4 373,553,2 402,596,2 415,661,4 620,207,4 334,341,2 108,97,4 150,631,4 616,216,1 345,275,1 279,677,2 649,629,5 392,269,5 591,688,2 513,13,3 536,181,4 295,1159,4 250,44,5 183,844,3 188,82,4 307,366,4 624,427,5 552,640,4 447,1175,2 84,750,3 589,125,5 421,550,2 449,301,5 587,277,5 643,822,4 477,742,1 261,442,3 621,520,5 625,323,4 20,980,2 377,527,5 200,221,3 434,175,5 276,507,4 611,8,3 159,167,4 486,299,5 617,281,3 586,872,3 302,800,1 592,65,5 175,926,3 641,209,5 536,258,1 457,191,4 341,481,5 318,258,2 327,751,2 143,69,4 150,951,3 641,134,3 497,274,3 197,264,3 197,1244,4 329,256,5 310,529,3 551,1619,3 603,233,5 275,273,3 565,156,5 62,301,3 451,431,2 295,281,4 605,745,5 458,147,5 275,200,5 193,482,4 105,274,4 494,287,4 499,1017,3 75,1018,3 492,55,4 524,594,2 623,885,4 428,90,3 575,236,4 507,1134,3 1,272,4 153,944,3 362,681,1 617,391,3 230,470,5 103,310,1 638,57,3 93,53,4 238,427,5 263,477,5 649,516,3 478,323,1 117,209,5 454,88,3 456,469,5 565,185,3 12,285,3 647,297,2 435,215,4 367,560,2 591,0,4 518,877,5 283,689,3 486,683,5 621,104,3 12,866,5 520,10,4 531,688,4 150,13,5 560,44,3 416,23,3 453,453,3 404,355,5 598,927,4 300,701,4 307,31,5 386,514,5 500,455,3 270,844,1 542,736,3 555,287,4 17,237,5 642,153,4 405,638,4 449,120,3 215,200,3 560,13,3 444,741,1 600,289,3 624,691,3 636,828,2 200,117,1 405,711,3 188,484,4 560,1109,2 576,1530,4 473,962,5 255,1045,4 71,791,3 333,901,4 590,933,3 420,99,4 647,673,3 134,259,3 412,876,3 393,390,4 285,138,3 304,314,5 221,825,2 270,189,4 374,769,3 12,412,1 513,728,4 560,714,3 43,195,4 312,891,4 392,39,1 542,27,4 534,468,3 558,209,4 279,526,5 637,49,4 404,809,1 343,707,4 157,741,4 632,777,2 10,97,2 56,471,1 317,402,2 486,110,3 362,171,5 93,517,5 631,201,4 531,600,3 600,139,1 377,786,3 536,398,2 496,650,4 614,639,3 141,513,5 143,261,3 551,6,3 562,49,5 197,184,3 357,1020,5 313,404,4 2,321,3 213,168,4 550,526,5 531,657,5 646,747,4 313,55,1 58,1114,3 449,429,4 229,627,3 436,211,3 395,221,5 642,228,3 538,196,5 550,97,5 591,261,5 621,193,4 178,339,4 617,78,5 567,1202,5 480,196,3 534,513,5 487,10,1 649,513,3 473,788,4 650,291,2 588,270,3 553,251,4 0,135,3 480,152,5 603,184,2 591,271,5 544,214,3 486,955,4 520,379,3 647,303,5 143,465,2 496,525,3 547,314,3 307,10,5 4,370,1 177,49,5 587,495,3 377,233,4 591,60,4 626,299,4 263,1473,2 345,596,3 180,843,1 344,78,4 80,236,4 586,311,2 421,664,5 209,513,5 542,70,4 471,65,5 542,747,3 550,1078,1 536,552,2 591,31,5 578,1046,3 393,225,2 91,1210,3 500,312,3 617,209,3 560,97,4 6,577,3 362,1100,3 209,14,4 93,190,5 135,743,5 12,768,3 404,602,3 456,746,4 398,236,3 253,621,4 61,356,4 500,256,4 392,823,3 396,434,4 242,707,3 550,911,3 0,256,4 69,410,3 310,508,3 333,552,1 456,654,5 641,442,2 209,391,3 307,20,3 633,149,3 497,201,3 17,626,3 533,686,5 392,272,3 307,116,3 502,639,1 333,302,4 566,180,1 393,539,4 187,21,5 252,481,5 127,769,3 591,329,3 617,49,5 405,519,4 567,1004,1 627,301,5 542,164,4 638,169,4 206,747,3 522,7,5 412,14,4 397,86,4 177,318,1 233,289,3 571,1170,3 531,928,3 353,58,5 405,516,2 372,274,5 598,987,4 72,212,4 415,537,4 179,11,2 594,290,3 559,1007,3 478,240,3 456,452,2 416,401,4 323,281,5 611,923,5 442,677,2 278,759,3 311,1450,4 234,317,5 304,663,2 353,189,4 98,366,4 290,939,3 548,287,4 469,117,4 448,60,5 471,770,4 471,383,3 302,691,4 316,878,3 255,355,3 193,173,4 631,27,3 465,343,5 505,929,1 473,512,5 607,57,2 471,227,5 490,235,4 605,32,4 495,553,2 565,855,5 436,136,5 478,196,4 234,85,4 495,88,5 638,486,5 495,417,3 108,583,2 296,242,1 531,1225,4 143,47,5 233,612,4 502,204,4 42,587,4 488,314,5 263,602,5 636,830,1 212,462,5 568,474,3 471,89,5 590,87,3 302,97,5 576,167,5 605,287,4 415,1131,2 275,280,3 221,928,1 550,233,4 569,301,4 15,945,5 451,780,3 75,513,4 294,68,5 455,451,2 255,450,4 415,23,5 628,8,4 109,683,4 513,509,3 384,134,3 593,13,4 206,630,2 620,575,2 233,388,3 629,870,2 499,707,5 541,654,4 626,161,3 394,96,5 404,1221,1 416,185,5 434,166,3 400,650,4 299,299,4 607,1118,5 122,513,5 504,552,4 566,477,5 522,71,4 620,300,4 294,227,4 372,1227,2 532,22,3 502,317,5 626,580,3 592,500,2 263,745,3 109,420,4 133,325,5 325,218,2 25,49,4 454,70,3 270,167,2 278,557,4 295,1283,4 480,189,5 618,61,1 638,461,5 544,207,3 461,309,5 540,14,3 448,58,5 434,923,3 471,72,4 268,172,1 285,209,5 462,269,3 396,422,5 193,379,1 638,658,3 547,326,3 446,596,3 641,96,4 499,87,4 362,173,4 453,496,3 478,69,4 331,104,2 478,184,4 523,96,5 621,692,4 278,165,4 525,0,5 558,526,4 136,747,4 463,285,3 341,207,4 550,221,5 233,87,3 644,178,5 96,81,4 245,177,5 333,483,5 213,7,4 533,824,4 344,43,3 553,173,5 5,193,4 600,739,4 345,362,3 432,473,3 642,1100,3 435,761,4 294,212,5 478,397,1 109,688,3 326,270,3 542,185,3 424,288,1 596,297,5 68,293,2 408,460,3 461,314,4 449,1424,4 184,24,4 547,6,5 233,312,4 533,985,5 518,747,2 12,31,4 641,1022,3 206,184,4 273,299,5 209,392,3 302,1144,2 560,134,4 451,478,5 544,403,4 641,132,5 373,217,4 441,634,4 428,1,3 641,49,5 225,812,4 643,293,4 94,289,3 143,326,3 159,150,4 317,192,3 395,454,2 294,195,5 416,512,5 12,203,5 585,228,3 380,120,2 129,894,5 193,80,2 591,333,3 617,377,4 173,68,5 340,681,3 372,416,3 653,3,4 51,526,5 594,1008,4 75,1154,2 413,300,3 499,638,4 621,167,4 169,321,5 536,300,2 513,630,4 642,545,3 454,39,3 526,516,5 159,152,3 503,317,5 600,481,4 393,415,5 631,418,4 101,71,3 111,936,4 320,211,3 550,173,4 544,54,3 542,87,4 160,99,4 392,346,4 75,171,5 519,241,5 314,644,4 642,1148,3 394,239,1 604,753,3 486,822,1 469,507,5 487,63,5 475,366,3 367,183,5 535,1117,2 327,314,4 89,904,4 80,618,3 183,1060,3 330,220,4 492,58,5 457,244,2 243,762,4 436,752,4 20,6,5 494,478,4 177,154,4 536,743,3 359,63,5 62,285,4 633,595,3 393,738,4 550,239,3 264,326,3 638,708,3 93,799,3 278,1241,1 117,799,4 620,116,5 362,205,2 652,163,3 528,1037,4 540,224,4 617,704,3 12,4,1 526,478,4 276,292,4 587,287,4 285,131,5 534,268,4 315,68,3 212,181,4 266,179,5 362,596,4 292,158,3 591,167,5 307,48,3 499,61,3 513,108,3 547,11,5 289,251,3 63,433,4 145,330,5 298,1225,2 347,1059,3 473,1015,3 638,552,3 367,291,4 641,150,3 619,624,3 20,4,2 190,338,3 496,509,3 542,186,4 487,132,4 65,236,4 347,122,5 275,8,5 279,754,2 564,9,5 216,561,3 64,14,5 121,1043,5 643,256,5 520,257,4 565,97,4 603,412,3 647,585,3 652,572,1 503,1521,3 42,780,3 579,280,2 304,41,4 400,96,4 5,513,5 587,1507,3 70,281,3 541,229,4 605,21,5 629,24,2 455,921,4 544,70,5 558,527,4 325,392,4 636,594,3 379,11,5 496,94,4 538,480,4 560,240,2 590,71,3 405,664,3 534,970,2 618,240,5 177,183,5 649,842,2 129,341,3 93,218,4 124,426,4 492,6,3 377,925,1 292,64,3 652,401,1 464,477,4 404,566,2 566,616,4 605,6,4 384,744,4 143,499,4 591,318,4 86,545,3 89,862,4 319,384,4 124,299,5 373,97,5 584,1157,4 377,293,2 117,959,5 421,435,3 532,96,2 531,352,2 538,526,4 560,92,4 494,183,5 532,312,5 635,595,5 387,258,3 558,426,4 473,434,5 591,178,5 554,325,4 633,822,3 158,194,3 319,120,5 118,985,3 541,99,4 362,170,5 443,274,4 109,375,2 622,203,5 552,602,5 636,279,2 103,344,4 58,617,4 388,615,4 245,797,2 560,958,3 621,366,4 652,443,1 550,182,4 591,344,4 327,7,3 457,1337,3 436,280,1 35,877,5 406,422,4 408,632,4 554,270,3 590,920,4 12,331,3 486,132,4 414,322,2 496,107,3 390,420,2 568,236,4 464,318,3 447,886,2 449,685,4 421,1006,4 600,583,4 586,342,4 600,417,2 180,325,1 200,566,3 591,27,4 93,584,3 12,736,4 278,750,4 150,1298,4 229,419,5 532,209,5 464,474,3 462,116,3 532,1032,4 617,548,2 522,203,5 499,180,3 477,167,4 186,735,4 536,548,2 412,268,4 143,342,2 559,974,3 629,221,4 13,627,5 587,450,5 531,196,5 649,746,3 513,734,4 599,264,3 550,11,4 543,257,3 285,748,3 637,409,4 333,175,3 506,351,1 621,71,3 492,263,3 192,342,1 628,527,5 404,535,1 467,1013,3 473,641,4 342,2,4 591,168,5 449,774,4 572,1011,2 284,204,4 254,263,2 22,203,3 587,427,4 234,1192,4 312,120,4 362,55,5 652,49,5 91,1045,3 345,167,4 89,285,5 624,601,3 411,339,4 626,156,4 6,435,5 653,422,4 427,349,4 342,282,4 641,374,1 398,925,2 592,596,2 150,660,4 179,97,5 9,663,4 415,184,4 487,110,4 566,175,5 416,96,4 313,782,3 331,209,5 300,149,4 278,424,4 21,522,5 408,630,3 628,1108,4 491,82,4 333,854,3 641,257,3 0,236,2 633,978,3 404,56,1 360,177,5 268,191,4 506,878,5 548,224,3 415,395,2 652,1043,1 534,434,5 641,71,4 608,900,1 276,6,2 353,49,4 531,925,3 408,617,4 641,467,3 465,61,3 438,245,4 404,378,1 626,648,4 404,1473,1 221,224,1 101,385,2 617,142,4 108,71,5 311,514,5 302,870,1 180,918,1 14,934,3 212,482,5 451,169,4 392,3,4 47,135,4 129,64,4 559,1332,3 607,477,3 352,244,4 136,1116,2 499,404,4 326,25,3 647,187,5 209,234,3 306,0,5 434,683,4 324,649,3 129,762,5 535,317,5 523,468,4 310,699,3 449,311,4 654,273,3 332,167,4 560,257,2 103,306,2 591,651,4 325,215,2 630,345,4 554,146,4 591,532,4 530,357,1 266,155,5 292,202,3 545,716,5 610,302,3 300,762,4 449,204,4 435,594,5 107,254,2 213,88,4 343,0,3 631,57,3 591,136,5 59,49,5 326,402,3 365,233,1 333,633,4 59,798,4 268,402,1 619,562,5 566,503,4 29,314,4 492,180,5 435,722,3 455,671,1 581,287,3 140,125,5 620,1011,5 366,332,4 536,1196,3 144,122,4 393,62,4 270,579,2 536,1068,2 156,514,5 556,1069,2 460,346,4 362,510,4 595,287,4 254,342,2 270,281,2 21,20,4 454,677,3 17,735,4 135,524,5 416,922,3 275,1117,4 65,294,3 5,176,4 531,1220,5 98,762,5 416,61,3 384,663,3 113,658,4 621,65,3 326,200,5 246,258,3 298,170,4 405,918,2 647,104,3 383,299,4 233,223,4 61,198,4 621,363,1 600,20,3 621,372,1 453,1189,3 446,152,4 238,510,5 255,281,3 550,1375,1 404,711,1 487,495,4 591,832,4 646,325,3 560,272,5 331,650,5 233,504,4 180,882,1 322,318,2 415,208,5 578,48,3 177,808,4 356,110,5 495,32,4 304,170,5 524,110,4 544,992,2 469,247,3 270,61,2 616,88,4 455,474,5 599,402,3 275,522,4 199,495,5 617,149,2 399,293,3 652,271,4 560,227,3 489,285,2 68,236,3 591,58,4 384,186,4 504,422,4 448,99,5 404,1224,1 550,826,5 591,63,5 494,769,3 193,579,4 585,81,2 586,299,4 551,221,4 129,526,5 534,638,4 233,174,2 199,47,2 8,401,4 555,1064,4 467,172,5 560,460,3 434,120,3 599,209,4 310,701,3 224,603,5 270,3,5 502,37,3 487,182,4 434,545,4 504,236,3 429,164,4 428,117,3 649,551,4 605,420,4 654,125,2 626,548,3 313,1015,4 377,1530,4 650,305,5 88,201,3 502,557,5 553,222,3 535,53,2 98,24,3 192,293,1 546,257,4 641,500,2 594,14,4 220,345,5 584,1534,4 243,221,2 641,965,5 153,641,3 494,194,5 153,60,4 647,153,5 591,431,1 127,477,5 167,290,4 346,244,5 43,98,4 492,116,5 532,90,2 600,1614,4 626,719,2 584,206,5 487,210,4 158,254,3 573,244,5 329,194,3 538,57,3 526,530,3 637,126,2 482,196,3 173,339,5 649,1473,3 364,472,4 436,153,4 629,11,4 620,150,5 523,1203,3 652,565,5 233,1038,3 320,603,5 379,501,1 504,132,5 517,116,5 104,301,5 590,234,3 502,185,5 243,316,5 124,200,3 118,684,4 386,618,1 609,1557,3 344,233,4 285,173,4 10,730,4 496,71,3 221,577,3 474,314,4 404,1582,1 379,662,4 503,322,4 206,470,3 620,299,3 1,9,2 341,134,3 510,321,3 606,846,4 607,1123,4 395,716,3 574,193,4 457,13,5 40,515,5 405,130,2 639,384,5 448,309,3 189,309,4 388,1097,4 654,692,3 435,185,3 80,288,3 654,1402,3 180,1354,1 642,188,4 160,495,3 454,99,4 501,892,2 600,126,4 297,142,5 637,160,4 377,281,4 12,402,2 653,257,4 592,99,5 453,72,3 377,1062,4 552,110,4 535,82,5 378,562,2 517,470,3 6,526,5 485,594,2 591,822,1 59,210,4 449,80,4 238,173,4 544,738,4 302,1152,3 378,841,4 107,283,3 444,1009,1 404,576,3 541,587,4 653,317,5 479,602,4 154,287,3 338,198,5 398,1085,3 21,1001,1 292,1047,3 653,249,1 428,474,4 345,10,4 48,269,2 551,273,3 216,299,4 427,337,4 626,703,4 591,781,2 482,229,5 78,332,2 296,346,3 424,451,2 620,248,5 639,203,5 42,1051,1 13,123,5 108,927,3 368,267,5 473,274,3 547,116,4 560,218,1 636,362,2 654,197,4 619,147,3 494,842,3 523,180,3 535,131,4 544,134,4 428,483,5 415,418,4 310,100,4 310,683,4 526,175,2 57,11,5 482,449,4 638,165,3 565,404,5 124,750,5 120,82,4 591,819,3 647,193,5 408,481,4 496,184,3 554,268,5 473,498,5 311,206,5 582,197,4 461,287,5 127,482,5 455,345,5 290,234,2 454,517,4 456,136,5 402,369,3 494,1117,5 486,205,4 245,203,3 404,1052,5 467,55,5 233,176,3 454,627,4 500,287,4 565,212,5 307,530,4 631,95,5 494,144,4 488,307,4 619,280,5 453,872,2 550,478,3 82,69,4 547,531,4 628,186,5 447,901,4 654,669,3 307,64,3 250,247,4 212,191,5 361,688,5 485,110,4 529,55,3 631,158,3 129,464,5 263,1474,2 267,150,3 531,147,5 406,647,3 643,49,4 15,57,4 118,916,4 94,206,5 72,151,3 353,134,3 471,184,5 401,474,3 199,160,4 578,182,4 490,257,4 641,595,5 406,446,3 342,448,5 215,236,5 495,49,5 465,305,5 533,819,3 292,714,3 432,302,4 51,462,5 591,235,3 217,658,4 500,123,4 445,326,2 529,162,3 486,1018,5 642,55,5 311,525,5 560,889,1 327,448,3 58,656,4 559,596,2 566,505,5 318,750,3 585,209,4 480,677,3 487,257,4 306,738,2 373,448,4 98,590,4 449,1285,3 217,268,4 278,662,3 89,152,5 504,160,3 268,2,3 428,21,5 623,136,4 384,253,1 356,830,3 449,524,3 591,618,1 269,225,4 101,1227,1 592,70,4 566,496,5 220,214,4 541,22,5 451,442,5 486,27,4 613,8,4 527,180,5 180,1151,2 115,348,2 621,251,1 505,402,4 129,530,5 93,1072,5 415,684,3 314,208,5 551,1151,3 424,217,3 459,293,2 654,522,3 536,648,3 373,196,5 532,132,5 6,619,4 365,670,5 654,735,3 591,681,4 592,723,3 503,1040,3 605,529,4 137,116,4 594,368,3 554,116,4 259,890,5 22,18,4 586,681,3 250,249,3 215,99,5 200,126,5 267,234,3 503,542,4 558,198,5 345,90,1 100,6,3 46,287,2 213,515,5 360,706,4 499,1008,4 150,87,5 323,292,4 436,55,4 41,202,4 337,51,5 416,939,2 615,688,4 649,308,3 10,38,3 524,251,3 638,1019,4 494,621,2 510,947,3 233,784,3 58,198,4 424,332,3 600,587,3 151,526,4 617,160,4 609,567,4 620,1117,3 592,181,2 57,510,5 637,553,3 203,1295,5 315,672,2 619,681,2 638,1193,5 492,741,3 456,738,4 379,59,4 575,55,3 607,672,4 505,1243,2 654,52,2 109,1246,2 180,755,2 279,154,5 566,490,3 602,428,5 436,641,1 496,720,3 177,61,4 496,18,4 478,944,5 534,128,5 439,327,3 212,55,5 509,875,2 586,1623,2 415,72,3 356,274,5 535,51,3 623,326,4 282,203,4 616,99,4 0,130,1 435,648,5 357,638,4 12,16,1 386,189,5 311,174,3 341,755,3 177,229,4 404,54,1 462,1032,2 13,819,3 278,150,4 476,723,4 624,165,3 620,768,3 534,115,3 452,237,4 216,221,5 275,452,1 416,624,4 520,624,3 639,731,4 5,11,4 428,318,3 408,1193,5 362,777,4 98,331,3 269,741,2 380,343,3 467,199,4 654,65,2 275,366,3 43,630,1 638,526,4 200,225,3 560,316,3 654,280,2 183,49,4 607,331,4 441,974,3 647,68,1 518,1061,5 503,194,4 592,124,4 312,192,4 614,68,4 638,516,2 124,411,3 473,237,4 233,39,2 631,236,3 15,203,5 319,232,4 535,448,4 654,51,3 653,557,3 609,401,5 40,429,5 235,195,1 569,325,1 616,241,3 359,270,2 624,208,3 567,987,1 406,179,4 587,219,5 644,674,4 233,605,5 531,50,5 532,82,2 528,293,4 654,223,3 575,274,3 550,974,5 428,628,3 611,321,3 94,181,2 654,917,2 546,311,4 449,76,4 633,459,3 615,749,5 600,1078,3 380,486,5 592,583,3 55,1056,3 159,194,4 649,196,4 58,522,4 393,401,4 621,779,4 129,314,4 312,176,4 115,747,2 373,184,5 150,952,5 346,929,2 88,274,5 137,221,4 642,142,4 542,577,3 99,1233,1 307,601,4 424,144,3 639,238,5 449,920,4 326,1069,4 483,229,5 300,417,3 589,99,5 654,343,4 626,739,1 654,13,3 526,207,4 550,155,5 294,96,5 456,552,5 501,677,3 404,1467,1 613,280,3 595,293,4 602,379,4 380,1097,4 502,525,3 648,0,5 408,480,3 451,63,4 59,704,4 93,80,4 657,275,4 548,677,3 315,481,3 338,63,5 379,99,4 271,174,5 434,8,4 552,173,4 376,681,3 6,502,4 534,726,4 550,874,4 314,202,3 620,16,4 565,217,4 6,443,5 222,256,4 416,363,3 532,755,4 400,658,3 654,934,3 501,341,4 350,331,5 295,687,1 179,195,5 428,844,4 206,201,3 235,428,1 455,195,4 606,381,3 532,57,4 405,844,3 554,301,3 333,142,2 180,1021,1 591,305,5 436,237,5 444,99,2 503,211,4 398,541,3 335,41,5 647,410,2 373,985,3 322,287,3 243,628,4 447,285,2 566,645,5 372,471,3 523,479,4 553,42,3 221,711,3 415,650,4 140,24,5 449,1047,3 614,198,5 415,180,5 613,287,2 436,184,5 584,970,3 437,244,5 584,1322,3 529,59,5 476,35,4 370,434,3 254,894,2 638,209,3 547,22,5 84,327,3 279,392,4 415,675,5 552,217,4 398,1540,3 553,13,4 566,640,5 306,1027,4 652,256,3 91,173,5 140,984,4 654,178,4 302,823,3 341,407,5 655,688,2 655,326,2 373,565,3 578,215,5 620,62,1 188,149,4 386,557,4 398,214,2 9,529,4 497,287,3 404,432,4 642,483,5 615,314,4 302,1022,2 90,191,4 619,500,4 550,79,1 108,110,4 243,696,4 62,1009,3 591,214,5 15,226,5 91,115,3 294,64,5 439,749,5 649,49,5 591,1608,1 625,681,3 553,317,5 605,264,4 392,1538,2 91,382,1 532,71,2 498,691,4 297,167,5 400,734,5 502,481,5 449,964,4 449,25,5 509,332,3 120,117,2 581,92,5 561,476,4 638,197,2 620,99,5 270,404,2 541,209,3 180,757,1 560,731,3 591,249,4 523,780,1 503,217,4 652,225,3 275,678,3 552,1199,3 478,172,5 646,603,4 55,225,4 279,293,2 244,596,4 84,605,4 586,258,4 636,331,4 611,14,4 525,249,2 619,239,5 347,24,4 12,127,1 594,281,4 278,168,5 652,575,1 299,408,4 461,21,5 25,312,5 302,473,5 200,317,5 377,929,2 467,1050,2 449,650,5 108,226,5 621,131,4 535,214,4 246,1021,4 617,167,5 42,481,4 504,76,3 434,226,4 536,734,3 514,681,4 28,181,4 327,636,3 659,81,2 491,68,3 520,267,5 446,558,3 199,192,4 531,481,5 641,1010,3 342,67,1 558,522,4 200,217,4 326,149,4 624,168,5 451,403,4 565,95,3 89,646,5 523,683,4 291,1072,5 617,8,3 560,7,3 278,678,4 617,240,4 533,146,5 461,299,5 150,468,1 240,688,3 108,394,3 159,921,5 345,143,4 503,105,3 536,457,3 462,106,3 654,97,4 163,251,4 302,69,4 178,287,5 473,281,4 609,488,4 634,268,5 444,1597,1 503,385,3 594,993,4 114,239,5 657,7,5 581,123,4 476,288,5 618,331,4 267,575,1 391,57,4 377,693,3 621,721,3 99,269,3 124,429,4 527,522,4 658,65,4 85,241,4 446,95,5 465,209,4 654,725,2 550,716,3 494,53,5 486,920,5 416,432,4 393,237,5 654,944,2 450,300,4 495,172,5 622,691,3 151,1299,4 335,1047,4 338,173,4 17,385,2 554,356,4 609,96,3 353,185,4 531,449,2 304,178,1 389,328,3 68,6,5 585,248,2 560,228,3 637,171,4 584,1120,4 483,561,3 320,518,4 435,46,4 289,210,3 340,1279,2 362,200,2 647,132,4 623,241,4 117,200,5 275,410,4 59,68,4 306,587,4 57,254,4 542,225,4 268,416,2 522,475,3 652,152,2 487,237,1 576,240,5 175,236,3 652,1209,2 492,47,4 476,777,4 55,41,4 329,68,5 275,767,3 307,664,4 523,1453,3 71,646,1 638,961,1 587,214,5 436,89,3 55,52,3 392,143,3 605,409,3 628,161,5 451,426,4 240,269,3 550,117,5 494,923,3 377,297,3 233,479,4 528,244,3 238,744,5 560,230,2 89,233,4 57,24,4 6,590,3 281,267,4 620,382,2 605,422,5 145,339,4 374,572,4 292,76,2 553,208,4 654,184,4 362,639,2 626,723,2 214,516,5 558,317,5 633,596,4 335,784,1 226,1066,4 444,267,1 333,78,4 453,477,2 225,235,3 486,91,4 260,244,4 412,689,4 642,819,3 58,754,4 278,1011,5 633,128,4 439,312,4 621,431,5 341,432,5 639,1072,5 477,1100,4 637,180,5 276,471,1 622,482,5 591,483,4 647,1049,4 649,520,3 221,43,3 335,1445,1 217,641,3 592,1027,3 505,72,4 132,901,3 547,301,4 603,200,3 531,163,5 263,115,4 585,160,5 550,590,5 587,750,3 628,268,3 93,163,3 384,520,3 314,285,5 496,182,4 647,1243,3 388,922,5 275,192,4 253,322,3 419,477,3 57,212,5 598,236,5 654,346,3 187,117,3 428,499,1 614,706,3 649,216,3 238,1331,3 649,6,4 150,407,5 103,749,5 158,6,5 329,204,3 451,464,5 505,146,3 425,1450,4 275,182,5 576,10,2 25,590,3 296,751,4 504,95,4 537,182,4 159,513,4 659,391,2 221,327,5 628,331,4 542,585,3 392,770,3 391,256,5 225,88,5 657,256,4 470,421,5 649,736,2 653,820,3 200,185,3 536,52,2 275,724,2 647,553,4 537,30,3 84,581,4 353,80,3 613,545,1 498,126,4 491,285,4 628,203,5 362,283,2 536,110,3 581,180,4 262,442,5 584,69,5 645,689,3 200,272,2 576,422,4 380,886,3 217,590,3 360,78,4 5,301,4 436,481,5 346,549,5 64,178,3 591,507,5 550,171,2 94,484,5 182,229,5 402,128,4 479,503,4 154,326,2 323,258,5 540,476,4 220,285,4 188,1401,4 327,327,4 333,133,5 180,262,1 598,110,5 542,518,4 641,1180,2 433,818,3 243,70,4 323,474,5 654,1157,3 607,237,5 565,326,3 540,7,5 643,242,4 89,193,5 522,166,4 624,187,4 377,179,3 298,729,4 118,254,3 398,209,3 196,306,3 457,528,3 51,472,4 253,392,3 213,297,3 517,590,3 489,150,1 603,669,5 5,308,2 498,271,5 29,49,3 513,274,5 647,796,3 520,226,3 132,242,3 184,115,4 471,55,5 357,7,5 261,1013,5 644,1017,3 592,691,3 84,151,5 523,493,4 12,470,1 642,116,3 10,602,4 298,0,3 523,830,3 576,202,3 61,581,4 295,110,3 623,474,4 531,251,4 94,208,4 6,383,3 279,1,3 451,210,2 20,991,2 333,41,4 93,929,2 233,316,2 17,413,4 402,863,4 654,204,3 473,210,5 71,186,4 628,462,4 487,499,4 6,486,3 269,420,5 659,166,2 326,110,4 53,306,4 57,496,2 60,332,3 326,681,3 591,704,5 422,1237,3 317,76,3 396,640,5 311,489,5 488,326,5 662,587,4 604,1039,2 652,435,1 638,1120,2 457,277,2 373,1009,5 523,450,3 633,1198,1 311,606,5 0,108,5 293,1133,3 89,125,2 144,315,5 647,1071,2 550,504,5 561,65,1 652,621,3 323,0,5 39,271,2 600,670,4 562,411,2 614,126,5 536,175,2 327,181,2 517,125,4 531,500,5 585,650,3 564,1017,5 200,232,4 496,272,4 414,321,4 597,349,4 531,291,4 396,99,5 497,185,4 248,3,4 641,1177,3 587,722,2 644,196,5 294,739,4 83,257,4 168,203,3 89,518,5 353,285,4 526,201,3 17,752,4 416,50,3 12,298,3 459,249,2 483,848,3 629,257,3 388,415,4 471,664,4 652,1013,2 129,577,5 464,510,4 471,6,5 15,320,3 69,428,3 641,551,4 647,97,4 320,169,4 645,303,3 642,202,4 190,330,4 188,377,4 456,190,5 654,549,2 81,677,1 387,627,4 335,14,4 370,430,5 420,81,4 249,992,5 492,454,5 626,171,3 540,117,4 400,10,2 151,65,5 554,168,5 485,1368,3 649,398,3 302,21,5 338,73,4 384,432,4 331,755,2 536,581,3 270,486,4 576,337,3 624,212,4 0,181,4 654,302,4 533,273,3 46,873,3 513,530,3 492,482,5 42,968,5 639,719,3 649,707,3 659,95,3 587,281,5 226,128,5 457,97,3 266,140,4 463,292,5 300,162,3 654,804,2 12,666,1 340,329,5 566,305,3 560,80,2 605,548,4 652,82,5 371,175,3 503,526,4 654,57,3 533,618,4 359,171,4 107,123,4 362,746,5 393,495,5 218,935,4 290,923,4 649,479,5 652,1139,1 285,887,5 616,589,1 465,55,4 233,844,3 290,89,5 622,628,3 297,945,3 362,549,4 591,1038,4 478,212,4 424,23,2 95,88,5 497,135,3 634,747,2 607,336,4 93,734,5 513,482,4 428,297,5 473,696,4 405,486,3 486,411,1 642,596,2 592,82,5 197,236,2 310,63,5 144,87,5 523,1128,2 292,401,2 658,506,5 654,161,3 541,194,3 504,509,3 641,52,2 448,935,5 654,99,3 471,67,5 270,172,4 642,927,4 504,659,3 372,745,4 456,208,5 525,342,3 587,831,1 526,651,4 17,746,3 496,23,4 289,198,3 372,1118,5 160,639,2 434,162,3 488,260,2 596,292,5 503,659,4 416,256,3 457,650,3 654,1650,4 392,353,4 534,209,5 654,50,2 456,567,4 542,409,3 441,366,2 591,173,5 600,185,4 102,210,3 415,318,5 629,242,2 499,995,1 278,300,4 38,747,5 314,120,2 436,264,3 628,885,3 369,479,4 406,709,4 514,322,3 463,175,4 418,614,5 505,197,2 591,918,5 397,429,4 325,615,5 437,20,2 378,130,5 659,844,3 384,182,3 646,587,4 513,987,2 63,530,3 604,132,5 522,162,5 544,0,5 478,182,5 645,681,3 629,70,3 362,708,4 22,210,4 93,653,5 150,194,3 397,1,3 428,565,3 372,165,5 494,150,5 71,470,4 639,662,5 658,645,4 641,64,4 615,271,5 155,805,3 592,157,3 457,300,1 235,749,5 654,726,2 591,1264,1 199,482,5 516,0,3 193,370,3 372,238,3 503,222,5 485,321,2 223,581,4 0,70,3 652,156,5 89,55,5 499,463,4 563,344,4 12,491,5 197,194,3 456,21,5 591,520,5 649,750,2 400,629,4 9,318,3 503,1443,3 98,239,4 617,476,2 535,661,5 659,509,3 591,545,4 523,515,4 544,632,3 212,454,4 450,679,1 48,56,4 289,929,3 170,304,2 422,470,3 290,973,1 0,222,5 113,614,2 238,171,4 492,64,4 471,1089,5 536,918,4 400,24,4 531,353,4 400,1008,4 140,675,5 226,1142,4 453,659,3 449,933,3 22,101,3 398,659,3 641,258,5 524,124,3 596,14,5 172,686,1 290,218,4 590,1110,4 84,22,4 632,78,5 267,573,2 43,63,5 436,945,3 513,712,3 584,1008,5 513,69,5 535,595,3 559,245,5 91,71,3 386,192,5 42,27,4 10,14,5 457,581,1 492,11,3 311,639,2 460,284,4 344,65,3 547,653,5 654,743,2 17,548,4 587,96,2 585,495,3 607,3,3 537,380,3 449,1443,4 235,180,4 249,115,4 571,475,4 266,208,5 568,457,2 452,1229,2 581,6,5 292,530,4 118,339,4 293,147,3 663,178,4 310,795,3 644,174,5 567,434,2 541,126,5 471,484,3 605,454,2 521,429,5 525,747,1 372,626,4 535,134,5 173,545,3 456,1167,5 405,450,2 517,863,3 405,4,4 214,27,4 632,233,4 530,326,3 494,451,2 617,172,3 536,572,2 42,300,5 398,398,3 71,379,1 71,684,4 94,66,2 560,641,3 98,1078,3 644,511,5 362,1011,4 626,684,3 235,933,4 22,130,4 404,841,5 540,650,5 393,163,4 663,88,5 221,933,2 522,193,5 536,43,3 539,322,3 157,582,3 342,3,5 388,64,4 662,180,4 266,678,4 624,545,2 652,210,1 20,437,1 105,494,4 386,91,4 487,184,4 346,289,3 536,872,2 550,1168,4 648,1015,4 230,596,3 2,351,2 591,1046,1 212,755,2 428,1047,2 292,1040,2 537,709,3 644,522,5 408,285,5 84,731,3 590,579,2 243,1108,4 428,95,4 398,574,1 535,1038,5 527,614,4 456,49,5 576,57,4 494,446,4 621,1302,2 576,544,3 17,433,3 658,600,3 234,482,5 489,149,5 400,160,2 12,637,3 506,897,5 500,1006,4 600,70,1 617,761,3 141,185,4 108,541,3 637,143,5 456,222,5 312,214,4 576,865,5 329,99,4 654,1096,3 523,435,4 537,734,3 373,247,1 206,195,4 475,385,2 522,933,4 253,81,4 638,1162,1 524,249,3 367,182,5 655,244,1 89,153,5 468,214,4 516,221,4 61,379,5 544,412,4 471,20,3 478,294,1 160,203,2 588,293,5 491,211,3 296,418,3 642,442,4 60,270,1 654,508,3 649,664,2 48,357,1 662,1046,4 418,603,5 344,761,5 400,514,4 89,386,5 628,879,4 15,582,4 656,689,4 628,194,4 311,595,5 641,1078,5 156,596,3 614,526,4 327,203,3 200,257,2 628,206,4 404,1090,1 323,327,4 404,82,1 279,495,5 408,522,4 654,958,3 275,81,4 429,327,4 523,300,4 658,606,5 462,476,2 495,163,3 115,10,5 50,209,4 476,552,5 352,904,4 654,684,2 664,761,4 505,260,3 664,286,4 310,659,4 386,205,4 48,323,4 553,691,4 333,864,2 604,826,3 342,49,5 386,1017,3 662,244,4 449,70,3 55,654,4 302,250,4 449,97,4 319,203,5 551,49,4 528,325,4 398,203,3 12,894,1 592,76,4 594,929,2 550,420,4 415,136,3 290,410,4 267,621,3 206,434,4 492,170,5 663,72,2 274,431,4 424,116,3 180,299,3 252,152,3 592,450,3 578,602,5 96,433,4 396,1000,1 581,759,3 76,30,3 467,143,5 617,958,4 632,409,2 444,54,1 496,221,3 302,84,3 294,356,4 636,1373,1 513,679,1 587,271,5 434,844,3 290,1011,4 173,245,5 499,44,4 587,109,3 103,590,4 499,530,3 505,54,4 428,113,5 597,312,5 488,242,4 101,611,4 138,99,5 236,133,5 381,121,3 513,469,3 275,1097,4 593,125,3 157,575,4 200,681,3 124,171,5 166,529,5 642,711,3 539,309,4 331,844,3 290,281,4 296,528,3 647,251,4 240,309,4 652,309,4 613,1008,3 254,55,5 386,195,2 517,1010,4 428,232,3 473,214,5 88,234,5 378,136,5 554,119,4 378,435,3 622,87,4 200,468,4 513,64,3 169,303,4 307,613,3 397,193,5 115,478,4 523,76,3 428,630,4 372,567,4 549,124,4 238,44,5 566,155,5 439,1104,5 619,98,3 344,638,4 468,610,5 316,353,4 639,565,4 465,95,5 94,64,4 436,1035,5 483,430,4 485,1374,3 408,707,4 424,688,2 362,21,3 200,302,2 652,731,2 572,184,3 343,366,5 583,171,4 573,301,4 12,656,4 492,21,5 11,160,5 492,237,3 442,686,3 10,429,3 499,474,5 196,402,3 607,960,4 325,172,5 641,239,3 656,299,2 540,431,4 647,202,1 659,251,2 590,513,4 176,321,2 64,422,5 499,134,5 121,468,5 342,175,5 263,99,5 298,791,4 638,50,2 362,690,3 325,228,3 294,560,5 4,395,5 531,304,3 492,208,5 409,315,4 599,49,4 466,116,2 614,987,1 333,855,4 591,312,5 620,880,2 200,1127,4 193,431,4 392,290,4 374,232,4 540,61,4 294,264,4 453,434,2 451,520,3 58,183,4 535,96,3 618,225,5 617,1467,3 602,922,4 560,284,4 71,55,5 456,58,5 659,925,2 95,126,5 175,297,4 341,845,2 652,526,2 117,174,5 372,678,2 540,120,3 9,272,4 330,1295,5 196,240,3 467,1011,4 386,257,4 147,131,4 664,526,3 457,288,2 550,401,4 541,149,2 649,428,4 351,155,4 594,925,1 39,309,3 174,132,4 424,822,3 502,839,1 654,1369,3 377,1144,3 621,14,4 12,178,2 456,818,2 406,200,4 57,650,4 491,290,4 233,27,4 600,132,4 638,211,4 415,368,2 451,489,4 649,1118,3 357,511,5 531,8,5 545,346,5 576,1208,4 665,55,4 489,6,3 654,475,2 120,643,4 59,229,4 238,164,5 591,341,2 36,840,3 576,3,4 57,134,4 500,117,3 55,394,3 645,345,2 158,1022,2 604,236,3 500,409,4 58,478,5 554,287,3 607,300,1 497,209,2 130,268,5 449,232,3 643,120,5 665,172,4 514,257,4 592,8,3 641,622,4 55,264,4 647,32,1 373,422,3 391,208,5 652,179,5 379,264,3 404,560,1 408,1072,4 608,313,1 404,47,1 233,196,5 560,1229,3 404,693,1 404,1071,1 659,183,3 72,747,2 497,55,3 584,1343,3 604,8,4 428,741,4 68,878,1 243,948,4 547,120,5 54,78,5 485,92,4 665,1046,3 551,116,3 568,120,3 654,272,4 631,171,5 356,303,5 304,769,3 591,252,1 605,30,4 523,434,4 660,182,4 388,660,4 478,273,4 194,1413,2 69,172,4 285,482,5 400,173,4 649,444,4 378,237,5 604,97,5 533,822,4 616,435,3 124,121,1 578,237,3 565,460,4 268,1443,1 644,657,4 404,1036,3 654,1231,3 523,43,4 312,194,5 654,1604,3 454,274,4 660,131,5 647,177,4 64,214,5 483,167,4 476,731,4 586,325,3 410,1196,4 397,116,4 279,401,4 441,287,4 497,221,3 459,247,4 387,507,3 662,410,3 621,164,5 262,49,5 204,321,3 637,513,2 300,796,4 581,236,3 386,320,3 594,590,4 157,237,5 524,595,4 115,1254,2 652,180,4 449,140,3 662,657,4 537,10,4 661,514,4 321,302,3 486,470,3 397,495,5 247,21,2 201,515,4 649,94,3 590,366,3 134,1216,2 69,27,4 487,428,4 81,411,1 150,215,4 566,132,4 449,955,4 245,430,3 404,943,3 626,398,3 343,202,4 663,432,3 623,346,4 221,1059,2 275,758,1 180,235,1 278,209,4 292,392,3 600,475,1 550,37,1 638,970,4 281,332,3 523,844,5 647,674,2 352,330,4 556,891,3 362,407,5 404,203,5 485,235,3 151,20,3 456,622,3 388,731,4 544,180,5 98,1131,4 108,14,4 633,274,3 262,587,3 453,430,3 623,814,3 573,1312,4 462,146,3 536,484,3 660,130,3 405,7,4 654,507,3 125,336,5 239,352,1 278,1485,1 605,659,5 535,495,5 525,292,5 559,133,5 600,948,2 89,9,5 636,110,3 90,142,4 337,1123,4 586,886,2 620,754,3 536,643,5 613,409,3 12,635,2 330,268,5 531,509,5 354,323,4 206,159,2 74,55,5 298,27,4 658,271,4 200,510,3 633,1047,3 129,933,4 629,411,1 404,200,1 513,791,4 495,1458,4 585,155,4 526,95,4 373,221,4 286,107,4 404,1437,1 247,282,1 605,482,5 482,1151,4 261,401,4 659,472,2 223,675,3 609,142,5 84,229,3 338,155,5 453,413,2 457,120,1 601,870,3 424,179,4 241,1010,3 282,432,4 415,43,4 607,505,4 534,204,3 578,302,3 372,280,3 541,500,4 492,195,4 209,314,5 599,232,2 395,545,4 81,0,4 659,96,3 342,1139,3 659,361,2 10,661,3 504,163,4 5,70,4 233,497,5 377,244,3 364,284,4 643,325,5 586,287,4 647,506,1 132,244,3 659,1180,1 666,233,2 665,121,2 6,630,4 391,309,4 57,150,3 641,21,4 25,110,3 457,178,4 333,735,3 529,190,5 535,168,5 44,287,3 654,532,2 633,470,4 523,379,2 297,603,5 564,69,5 613,475,3 387,772,3 552,483,5 539,595,4 657,0,4 666,284,5 576,24,4 8,370,5 206,232,3 36,95,4 526,428,5 426,989,5 51,740,4 345,519,5 333,711,3 267,1302,1 73,689,4 578,427,4 623,257,4 206,285,2 639,951,4 103,269,4 485,272,3 496,117,4 478,587,1 544,195,4 177,1037,2 98,974,3 548,1046,3 438,239,3 190,339,4 600,471,1 458,471,5 465,994,5 158,325,3 618,1313,3 302,81,4 517,299,3 476,281,4 591,630,3 393,249,4 640,58,4 663,228,3 59,481,4 377,976,3 649,242,2 109,42,3 458,173,4 233,635,3 591,282,4 598,594,5 665,222,3 626,214,1 61,462,4 213,92,4 560,171,2 384,319,3 486,16,3 12,85,1 638,69,3 289,719,3 465,293,3 654,26,3 195,1240,3 526,466,3 374,602,4 199,117,4 392,774,4 520,228,2 285,738,3 285,136,4 289,649,2 108,507,4 410,3,4 379,461,4 513,177,4 660,479,5 266,142,4 253,464,3 143,477,4 614,193,5 628,116,5 658,568,2 617,432,2 223,355,4 201,190,2 654,516,4 653,545,4 398,548,4 641,293,5 392,754,3 21,200,4 473,970,4 245,410,3 192,68,5 348,124,4 541,46,5 342,472,3 449,490,3 644,199,5 666,222,5 327,259,2 473,11,5 499,475,2 157,366,4 572,161,4 428,803,3 556,298,4 457,182,4 259,1242,5 483,654,5 193,208,3 490,6,3 534,526,3 384,206,4 412,470,4 667,344,2 663,186,5 565,126,5 620,416,3 653,221,5 85,337,1 636,984,2 560,178,4 499,513,5 89,140,5 22,54,4 397,431,3 494,227,5 660,96,4 328,10,3 663,658,5 555,602,5 654,627,3 649,65,3 589,1128,3 403,309,4 496,1302,2 415,470,5 367,97,3 369,602,5 384,429,5 32,322,4 489,472,2 494,63,5 352,345,4 672,293,4 89,133,5 473,251,4 54,21,5 362,659,4 513,227,5 292,185,2 449,184,5 499,567,1 362,194,4 408,342,3 619,287,4 654,282,3 434,213,4 566,647,4 415,238,5 424,309,3 263,155,2 453,254,4 342,656,5 649,203,4 471,767,5 449,447,4 578,268,3 199,131,5 249,221,4 647,496,4 449,168,5 359,653,5 416,581,3 619,682,3 642,86,5 495,171,5 458,6,5 183,39,4 652,86,4 383,328,3 608,303,5 153,196,5 255,1423,3 642,793,3 594,1258,3 135,123,5 518,1590,5 270,346,3 647,747,3 566,59,5 183,663,3 665,1169,4 661,290,2 666,300,1 89,609,5 307,738,4 654,35,2 221,180,4 398,453,3 566,134,3 653,65,4 456,10,4 663,117,3 17,196,4 492,270,1 446,366,3 452,1015,4 458,1189,4 630,288,4 255,282,3 10,645,3 573,909,1 670,946,3 536,22,4 404,552,1 379,418,3 618,287,3 647,190,5 115,1141,4 568,124,3 346,594,2 61,127,2 109,565,4 249,94,5 552,434,4 661,49,3 84,512,4 499,24,3 312,734,3 604,317,5 534,169,4 269,323,2 69,403,4 668,504,3 502,124,3 483,312,5 647,106,4 200,268,3 77,287,4 408,284,4 342,235,5 170,345,4 592,116,4 478,355,3 405,637,4 378,187,4 644,80,4 536,267,4 285,1501,2 631,78,5 547,13,1 331,121,5 660,117,4 654,504,3 536,426,4 647,563,1 404,1029,1 581,1214,4 98,172,4 667,230,2 216,180,1 525,1083,5 502,386,4 98,209,5 203,300,4 536,169,3 662,955,4 428,52,1 631,96,4 623,284,5 456,26,4 617,596,4 542,935,4 400,506,4 675,264,5 532,11,4 584,1484,3 642,151,4 406,478,4 449,1440,3 124,400,4 216,471,3 185,1082,1 298,477,4 652,1227,2 509,257,4 471,547,1 642,215,4 362,1008,2 646,630,4 267,1072,4 662,6,4 639,473,4 61,722,2 41,134,4 640,513,4 561,88,1 659,461,2 520,828,2 647,684,5 415,716,2 658,601,4 623,107,3 585,238,3 362,1156,2 653,68,4 484,318,3 338,487,5 647,117,4 444,202,3 290,1219,5 327,117,3 631,67,1 418,99,5 594,254,3 498,181,2 238,45,4 486,272,5 415,1053,3 238,418,3 662,116,4 144,1022,1 657,136,3 454,470,4 599,585,2 424,170,3 520,150,3 598,814,3 266,773,3 428,122,4 673,126,5 449,1152,5 449,327,4 232,734,5 508,301,5 600,190,4 665,202,4 295,303,3 43,66,3 58,96,5 493,221,5 392,840,3 560,99,4 594,761,4 76,251,1 592,806,4 379,707,3 392,23,3 513,72,4 175,116,4 232,99,4 547,442,4 373,321,4 503,161,4 478,282,4 302,1091,1 17,481,5 636,470,2 221,692,4 456,180,4 573,344,2 550,475,5 89,446,5 0,45,4 453,938,2 628,316,4 558,215,5 449,420,4 295,288,3 93,943,1 459,0,2 396,191,5 556,287,1 199,384,5 6,416,3 594,236,3 270,476,3 588,537,5 388,470,4 392,627,4 129,66,4 605,143,4 569,287,2 392,476,3 326,86,3 469,457,4 467,177,5 534,603,4 591,962,5 313,105,2 550,229,5 89,63,4 302,401,4 605,27,4 536,847,3 202,257,3 633,340,2 652,501,2 654,706,3 43,162,4 494,97,5 278,590,2 636,514,4 408,1359,2 312,743,3 211,245,5 502,296,5 628,196,5 268,660,4 447,343,4 415,30,5 424,572,3 591,338,3 199,747,3 379,257,4 177,475,3 591,10,5 550,430,4 343,1081,2 416,558,4 359,530,4 584,1192,5 269,212,5 322,875,2 463,602,5 470,49,3 304,281,3 108,16,4 220,1216,4 182,379,4 193,293,4 2,270,3 665,181,4 500,6,4 371,636,4 523,503,5 494,683,5 582,275,4 292,272,4 614,21,4 457,703,2 331,449,5 404,557,1 404,1393,1 503,140,3 429,180,4 187,565,5 605,1517,4 588,891,4 58,105,4 115,220,4 114,123,5 267,172,4 665,637,3 591,215,4 315,514,4 80,24,5 471,253,4 586,748,2 349,182,3 64,72,4 396,198,5 348,117,2 52,256,4 649,181,3 307,961,4 621,0,3 518,329,5 552,422,3 270,602,4 637,514,4 627,937,5 386,317,3 551,411,2 534,482,5 505,65,4 545,221,4 487,97,4 436,175,2 530,287,1 456,82,5 600,286,1 90,203,4 446,26,3 398,160,3 161,402,3 143,123,4 629,249,1 44,596,3 663,477,5 619,353,5 17,410,3 434,1214,3 607,214,3 69,263,4 450,332,5 654,279,2 349,178,5 452,974,2 268,1072,3 619,794,4 296,581,4 637,229,5 575,513,5 331,171,5 629,190,3 398,443,1 649,139,2 646,116,3 487,264,4 394,14,3 68,297,4 267,94,4 626,181,4 651,322,3 436,602,5 659,796,2 641,825,5 436,954,4 435,312,5 503,309,4 536,264,3 649,484,3 152,324,2 473,1112,3 193,520,4 513,27,5 94,1227,3 641,995,2 392,117,4 431,2,3 283,753,3 435,25,3 327,684,4 541,409,4 525,281,3 327,64,4 459,126,4 84,781,2 659,545,2 550,692,5 503,207,4 51,317,5 576,559,3 59,433,5 602,182,4 659,16,1 404,515,1 274,120,3 644,433,4 645,682,3 300,209,4 404,848,1 624,264,3 620,943,5 45,99,4 22,7,4 659,62,2 449,181,5 112,254,5 556,149,3 665,182,5 330,1016,2 233,214,3 621,1551,2 233,165,5 416,848,1 676,285,1 333,432,5 270,176,3 333,1425,4 424,549,4 592,814,3 131,1153,3 636,409,2 292,233,5 649,497,4 58,502,4 428,657,3 267,55,4 153,323,2 105,738,3 390,236,4 320,31,3 188,662,3 98,104,2 415,186,5 454,474,4 391,481,5 476,1040,5 456,196,5 372,1065,4 449,162,4 434,116,3 599,10,5 618,650,5 251,285,5 642,281,3 667,306,4 434,357,4 302,582,1 456,76,4 649,201,3 218,3,4 599,650,4 483,678,2 290,932,4 607,198,1 536,180,2 189,120,3 534,920,4 421,286,3 449,82,4 499,249,4 613,254,5 14,924,2 605,1038,4 600,156,3 495,1613,3 637,88,4 540,234,1 523,1165,5 576,1270,3 386,624,2 636,832,1 596,224,4 416,1010,3 616,646,3 681,923,5 327,76,4 627,873,5 608,907,1 520,474,3 654,949,3 622,233,4 434,393,4 605,213,4 473,68,5 357,1523,5 591,99,5 566,169,3 290,422,4 605,431,5 199,90,4 233,488,3 592,219,3 534,172,5 233,503,4 20,184,5 612,470,3 664,342,3 203,145,3 455,210,4 244,893,1 617,189,4 234,194,4 147,473,5 606,179,4 292,1285,4 91,683,3 291,664,3 471,833,3 621,567,4 586,324,5 157,230,2 69,500,4 404,773,1 662,10,5 342,192,4 544,16,3 431,116,4 378,356,5 331,120,5 285,568,4 275,734,4 378,218,3 649,434,4 576,98,3 353,247,4 585,154,3 413,301,5 404,96,2 658,257,4 605,927,4 421,259,3 613,278,3 477,237,3 547,332,4 601,49,5 101,595,2 548,514,5 193,275,3 499,273,3 362,49,5 37,451,5 335,709,4 405,503,4 621,541,2 91,273,4 607,567,5 57,248,4 665,3,5 248,95,4 654,610,3 422,339,4 666,434,3 652,1619,2 406,182,4 478,478,4 502,209,5 590,93,3 495,1138,2 605,708,5 495,824,3 379,1038,3 663,1089,1 617,131,4 542,470,3 526,92,4 653,277,3 93,500,4 436,27,3 245,1088,1 505,538,4 180,411,2 343,95,4 384,234,5 80,595,3 12,735,4 267,801,3 663,6,3 105,13,4 628,10,2 392,1180,3 654,209,3 206,520,4 473,1420,4 534,132,5 356,684,3 42,173,4 456,401,4 658,95,4 584,922,5 542,43,3 129,397,3 586,891,3 290,392,3 229,222,5 466,741,2 532,684,4 576,43,3 468,63,5 12,864,5 150,264,5 518,265,5 392,104,3 578,172,5 404,852,1 639,194,4 333,154,2 134,320,4 586,341,1 452,363,3 245,747,1 124,392,4 96,602,4 628,990,1 662,602,4 514,749,2 429,461,3 503,68,4 66,1051,3 659,312,4 341,411,3 654,575,2 649,126,2 302,149,5 58,473,5 638,691,3 605,131,5 543,748,4 380,302,3 550,978,4 467,156,4 446,1047,2 292,128,3 551,410,3 436,179,4 478,327,4 295,185,3 485,254,3 591,330,3 576,86,5 416,398,3 344,124,3 458,968,3 58,671,5 42,11,5 649,673,4 501,679,3 286,475,1 27,321,2 541,69,4 84,418,5 333,493,4 12,655,5 532,185,3 512,404,3 550,2,5 654,1318,3 416,62,3 681,734,4 659,431,4 410,72,4 502,215,5 623,923,4 668,536,3 648,274,2 607,657,3 665,491,4 615,298,3 428,0,3 620,40,4 609,293,1 613,121,3 636,543,3 456,539,3 668,461,5 626,585,3 652,408,2 496,247,4 85,268,4 177,123,4 573,288,4 598,534,4 12,410,2 478,187,2 17,426,5 402,476,4 681,622,3 143,404,4 623,533,3 287,172,3 434,1224,3 183,164,4 318,331,4 568,275,4 406,254,4 72,0,2 513,18,4 560,519,4 266,185,5 410,57,3 493,478,3 302,22,5 473,698,4 604,482,5 663,731,3 294,496,5 471,342,5 609,426,5 632,182,4 404,22,5 545,348,4 462,951,1 373,618,3 536,64,3 115,354,2 89,653,5 527,177,4 477,339,5 144,283,4 304,1484,3 55,152,4 641,79,5 550,454,1 152,180,1 349,194,5 286,709,4 406,448,2 297,152,3 193,1090,3 639,577,3 587,171,5 264,116,5 576,650,5 441,239,2 624,247,4 406,656,4 654,251,2 527,483,3 307,320,3 663,513,5 164,259,3 626,457,3 658,418,5 495,157,2 536,316,3 48,500,3 652,1011,4 536,524,3 619,894,3 547,1024,4 312,502,5 183,234,2 195,1006,4 536,511,3 276,470,3 421,451,3 390,771,2 616,184,5 58,312,5 331,76,4 639,356,5 617,70,4 488,874,2 551,290,2 456,303,4 565,482,4 660,526,4 231,169,5 485,276,3 681,214,4 188,213,1 640,82,4 536,958,3 663,465,4 109,384,3 22,478,5 620,392,3 313,150,4 302,71,3 377,714,4 69,224,3 465,325,3 48,1078,1 586,937,2 681,264,3 505,379,4 654,1207,3 12,567,3 108,185,3 325,179,1 647,428,4 681,324,4 604,461,5 586,901,2 82,297,4 435,784,2 372,430,5 590,237,5 91,226,1 521,178,5 310,191,3 540,49,5 655,749,2 143,54,4 81,538,3 187,293,2 473,69,4 587,317,4 173,400,1 441,448,2 277,922,5 665,205,4 294,381,5 494,1045,5 475,82,3 375,320,3 187,741,5 233,922,4 198,322,3 563,311,3 594,677,1 345,738,3 384,427,3 379,743,3 0,168,5 590,85,5 513,213,5 302,500,4 158,363,1 660,312,4 626,731,3 678,519,4 290,455,3 298,80,4 297,483,4 302,596,1 96,194,5 520,745,4 654,1006,3 307,152,5 9,692,4 310,1049,3 473,59,3 296,745,3 345,54,5 404,1569,1 480,49,4 372,168,5 126,900,5 211,644,3 233,276,3 350,309,5 488,325,4 289,173,5 124,185,3 341,377,4 343,126,5 607,97,5 486,190,4 299,263,1 492,692,4 450,879,1 513,257,4 629,476,4 464,257,5 121,134,4 120,281,1 410,228,3 639,1015,3 451,779,1 372,94,5 134,323,3 585,199,4 542,565,4 129,1209,2 522,24,4 662,273,3 380,639,5 659,825,3 473,603,4 659,97,4 86,1187,2 488,751,5 656,1008,4 653,256,4 135,8,5 566,267,4 307,602,5 582,174,5 558,186,3 300,734,2 532,332,4 238,1097,5 652,736,1 600,57,1 664,176,3 300,741,4 662,470,3 531,25,3 431,256,5 477,230,1 428,384,3 251,289,3 392,1167,3 624,194,4 42,490,4 535,55,3 404,696,1 580,99,5 626,116,3 667,257,2 536,306,3 256,1021,2 659,248,2 544,728,3 581,409,3 289,120,4 386,186,4 586,337,4 294,789,3 256,312,5 665,178,5 520,234,3 37,201,2 144,351,4 292,16,2 93,249,4 243,234,1 262,179,4 434,889,1 596,712,2 353,307,4 157,454,4 17,22,4 658,854,2 456,199,5 604,190,5 379,628,2 270,380,3 254,929,1 387,287,5 57,654,5 641,1028,3 597,749,5 658,160,3 420,49,5 647,121,1 263,524,5 61,520,5 388,65,3 497,159,5 558,162,4 267,635,3 446,1141,5 160,427,3 254,221,3 536,467,2 619,299,3 388,299,3 62,324,2 536,201,3 659,100,3 566,603,4 659,1177,1 624,221,4 670,249,5 681,67,5 523,80,1 649,483,5 617,191,5 540,475,5 631,469,4 659,6,3 420,218,3 147,180,5 533,755,4 386,1128,4 333,19,4 449,186,5 220,26,4 255,678,3 10,716,2 504,495,5 287,133,2 612,63,5 304,195,4 591,80,4 206,356,5 451,602,4 326,657,2 604,339,4 475,998,2 558,518,5 93,195,4 456,755,2 444,272,2 639,11,5 672,287,4 538,21,3 275,1266,4 631,745,3 405,523,4 471,182,5 342,64,5 205,345,5 159,460,5 683,63,4 162,356,4 668,168,3 605,67,5 587,933,4 626,85,3 300,96,4 62,479,3 639,200,4 587,530,3 384,918,4 346,99,3 343,1136,3 76,24,2 633,292,3 492,403,4 304,164,4 465,78,3 160,271,5 453,210,2 545,184,4 435,659,4 349,172,4 478,257,5 4,397,2 491,656,3 300,339,4 681,351,1 484,302,4 541,530,4 633,99,4 261,410,2 487,775,4 487,889,1 665,830,2 553,938,4 325,0,3 517,1334,3 436,587,3 541,191,5 406,176,4 415,85,1 502,728,3 215,215,4 642,88,3 405,142,1 168,320,3 129,537,5 285,183,3 379,169,4 596,49,5 94,101,4 266,46,5 576,131,4 590,515,3 536,488,3 654,1350,3 325,440,2 155,509,4 228,895,4 599,760,4 565,383,3 536,726,2 310,734,4 636,1343,4 312,747,3 416,121,2 664,683,3 527,587,2 197,580,3 359,24,4 567,503,3 449,660,3 458,163,4 278,175,3 619,992,5 491,530,4 114,136,5 177,678,4 150,96,5 465,272,4 275,1108,3 384,416,2 177,143,4 642,69,3 684,268,3 664,1039,4 165,346,5 578,55,3 400,476,1 5,167,4 404,807,1 108,201,5 531,914,4 599,61,4 93,301,4 341,530,3 567,658,3 664,345,2 69,134,4 106,258,2 505,971,3 340,871,4 20,572,2 605,171,5 435,654,5 641,141,4 499,14,2 373,557,1 551,925,2 93,1011,4 504,525,5 661,590,4 652,387,2 540,233,5 494,156,5 537,257,3 434,67,4 333,886,5 607,143,4 661,285,3 537,186,5 221,157,3 49,543,4 6,633,5 672,268,4 502,484,4 550,4,4 449,470,4 541,174,3 183,184,4 348,1027,2 505,730,4 599,88,5 681,91,5 346,688,4 531,363,3 654,1045,3 410,116,2 331,405,3 654,1168,3 111,271,5 180,1172,1 514,327,2 499,392,3 599,539,3 417,894,4 394,195,4 503,478,4 629,256,3 199,508,4 461,10,5 486,159,4 213,507,4 605,224,1 542,854,4 550,331,4 473,600,5 528,885,4 379,8,3 404,782,2 473,288,3 48,1073,2 535,282,3 590,47,4 628,508,5 532,99,5 173,870,1 675,519,4 206,654,4 609,330,3 63,100,2 379,193,4 373,65,3 93,1210,5 283,309,3 587,596,4 638,511,2 488,324,5 591,460,4 459,457,2 353,532,5 545,99,3 620,171,5 416,826,2 124,375,3 199,461,4 313,469,3 682,1482,3 192,176,4 509,299,5 268,761,1 576,1032,4 219,257,3 55,472,2 616,766,3 428,67,3 275,454,4 379,269,3 523,51,4 681,350,4 15,97,5 620,221,4 652,394,1 442,339,5 333,180,4 428,81,4 690,602,5 341,691,1 93,417,3 683,146,2 594,507,5 653,745,3 560,723,3 431,863,2 333,674,4 633,13,3 523,54,2 658,76,4 542,3,4 502,49,5 435,627,5 536,298,2 362,94,3 337,196,5 681,1302,2 625,293,3 584,1020,3 560,683,3 652,1205,3 664,475,4 459,306,4 654,1394,3 659,401,3 298,530,3 457,588,4 576,828,3 683,37,3 459,1250,3 81,6,3 268,1109,2 549,49,5 653,1034,4 162,27,3 667,553,3 10,124,4 518,324,1 486,954,5 371,287,5 560,215,3 298,273,3 620,256,5 51,24,5 482,194,3 609,287,3 538,284,4 293,150,5 647,27,5 84,120,2 649,580,2 623,1046,3 551,987,3 48,24,2 565,672,4 536,717,4 498,356,5 429,275,1 633,234,3 498,270,3 540,141,5 623,236,4 17,613,4 138,301,3 330,303,5 197,152,4 377,478,4 436,701,1 660,177,4 646,489,4 628,6,2 359,522,3 351,652,3 275,327,4 351,143,5 642,222,4 621,143,5 604,878,3 270,204,5 486,685,4 344,286,4 638,97,4 617,654,4 313,1468,4 326,844,3 624,182,3 530,301,5 368,315,5 523,64,4 585,232,4 344,68,4 631,479,5 6,640,5 432,918,5 471,576,3 385,981,3 540,87,3 458,677,4 576,55,3 458,300,2 114,653,5 497,447,4 616,518,3 520,120,2 295,220,5 59,674,4 625,327,1 501,894,4 621,81,3 654,1417,4 82,408,4 594,1311,3 1,310,5 535,171,5 12,223,4 665,201,5 560,239,1 462,125,4 450,285,1 4,203,4 617,1057,3 362,226,4 374,1216,4 406,131,4 643,306,4 99,1232,3 682,247,4 579,470,3 390,99,4 606,120,2 270,68,4 653,1284,4 600,841,1 681,1117,3 616,183,1 457,125,4 514,301,3 623,880,3 630,312,4 457,749,5 10,737,3 313,741,4 495,142,3 525,341,2 591,256,4 290,66,4 357,1148,3 40,96,3 444,332,2 649,636,3 41,461,2 640,123,4 673,404,4 617,317,5 654,133,4 532,210,4 199,923,5 173,622,3 631,130,4 550,156,4 566,167,5 137,601,4 405,504,4 547,459,4 492,875,1 406,142,4 233,417,3 84,503,4 17,177,3 536,989,2 278,762,3 255,318,2 613,404,2 326,97,4 359,932,3 199,442,5 352,314,4 455,762,4 298,212,5 628,1118,5 258,297,4 349,270,3 72,174,5 622,65,4 660,442,4 623,1016,3 658,143,4 208,270,2 279,608,4 91,435,4 532,1040,2 590,115,4 478,171,4 83,86,5 307,612,4 671,126,4 523,567,4 691,55,3 572,274,4 458,1050,3 503,941,4 624,583,3 587,216,4 600,11,3 472,255,4 278,624,3 576,938,5 579,404,2 313,495,4 274,624,2 473,616,3 436,160,2 591,319,5 312,682,3 268,628,2 534,51,4 541,236,4 12,366,3 12,32,5 560,242,1 623,248,3 649,506,4 377,4,3 550,312,4 170,353,3 434,576,3 425,510,4 545,6,5 659,709,3 589,743,4 457,979,5 341,964,4 452,92,2 425,653,5 133,314,3 569,323,2 144,172,5 649,76,3 369,180,4 329,0,5 159,824,2 345,126,5 552,616,4 678,173,3 502,279,1 127,464,4 598,755,5 183,450,4 653,312,5 5,302,3 56,495,4 235,190,4 293,110,4 114,478,5 456,1036,2 310,622,2 660,435,4 420,143,5 531,1206,2 665,791,4 120,299,3 377,85,4 507,22,4 665,300,4 584,509,5 147,55,5 20,977,1 544,256,5 423,309,3 233,495,4 639,335,3 206,91,2 681,779,3 670,384,5 638,663,2 349,167,5 487,691,4 353,581,4 662,543,4 313,746,1 689,383,3 12,683,5 629,1046,4 505,202,4 193,473,4 586,686,1 536,954,4 621,55,5 441,173,4 551,146,3 689,363,3 536,529,4 520,49,4 262,481,4 235,206,3 549,747,4 389,844,2 61,596,2 672,343,5 177,723,4 327,1216,3 160,1116,3 69,596,3 180,322,2 566,298,4 654,1044,3 473,604,3 553,677,3 663,167,4 147,548,3 267,158,2 386,445,2 245,403,3 47,193,4 255,1207,3 428,22,4 233,435,3 404,1231,1 404,678,1 587,117,3 659,23,3 504,171,3 267,478,4 642,207,5 391,311,4 665,173,3 572,712,4 688,272,3 689,217,5 664,64,4 465,230,1 638,386,3 183,706,4 86,194,5 405,609,1 460,318,3 591,366,4 59,226,4 363,989,4 313,654,4 6,539,3 415,430,4 526,237,5 662,46,4 517,1039,3 270,497,5 496,417,3 344,3,4 362,89,5 200,555,4 267,558,2 585,741,3 685,27,4 623,627,4 449,238,5 452,870,1 280,300,3 652,97,2 10,331,5 664,327,4 526,59,4 633,257,4 626,272,4 451,422,5 614,528,5 471,11,5 536,922,3 654,31,4 557,252,5 513,185,4 605,149,4 91,50,4 295,479,5 526,961,3 591,701,4 273,627,4 531,507,4 339,427,1 398,505,3 506,297,5 438,6,4 314,641,5 434,151,4 177,1257,4 170,302,4 664,587,4 395,8,4 177,624,3 263,636,4 451,47,5 152,356,5 301,747,1 617,215,3 495,52,3 589,474,4 654,524,2 454,126,5 39,875,3 298,201,4 553,10,4 681,654,5 615,677,2 526,654,3 347,242,3 456,411,2 450,258,4 457,587,5 479,95,4 384,92,3 540,876,1 536,146,2 209,762,2 654,961,5 666,650,5 591,750,3 294,215,5 0,40,2 507,473,5 392,635,3 673,293,4 212,105,4 477,1269,1 21,567,4 644,54,3 127,683,4 585,143,4 328,878,2 453,613,3 440,293,4 360,206,4 531,329,4 65,762,4 377,52,3 0,161,4 388,237,5 180,1325,1 449,496,5 692,180,3 69,173,5 200,285,2 105,243,4 545,456,1 667,293,3 550,175,4 647,183,5 659,845,2 10,714,3 30,135,5 362,172,5 310,281,5 275,50,3 667,402,4 551,124,3 183,212,5 550,179,5 585,357,4 624,527,3 638,167,1 345,119,3 620,721,4 150,506,5 499,618,3 685,97,5 562,236,5 652,684,3 333,80,4 591,143,5 681,508,2 333,461,4 64,97,4 48,239,3 192,217,4 652,6,2 534,55,3 424,230,3 685,473,5 613,471,3 370,180,3 408,492,4 278,202,2 100,925,3 641,43,3 668,897,1 221,618,4 428,526,5 515,249,4 435,94,4 623,918,4 653,127,5 118,404,4 453,68,4 642,261,3 480,429,4 109,271,4 396,610,5 607,75,4 320,285,4 405,236,1 187,161,4 428,920,2 617,558,3 37,154,5 681,1044,3 253,68,5 462,223,3 45,304,5 649,628,3 425,426,5 628,565,5 473,13,5 684,285,1 665,1097,4 471,120,5 649,195,4 590,465,3 589,254,1 405,219,3 172,298,4 626,213,3 88,49,5 151,7,5 558,1140,2 79,530,4 651,258,2 157,225,3 670,203,5 210,1024,3 536,208,4 654,781,3 472,320,2 565,528,4 410,317,4 646,221,4 306,61,3 494,443,3 585,32,5 278,975,3 529,182,4 585,848,3 665,257,4 642,248,3 454,8,4 221,218,4 565,96,3 101,327,2 560,233,3 435,973,5 664,6,4 444,8,2 664,1282,3 488,1611,5 605,82,5 456,416,4 604,254,2 380,513,5 220,575,3 180,5,1 168,498,3 366,301,5 564,29,5 532,106,3 638,201,2 17,488,4 5,471,1 292,199,4 388,98,5 302,170,4 486,332,3 338,448,3 290,123,5 681,163,3 180,1083,2 471,40,4 537,172,3 618,824,2 311,171,4 44,256,5 607,606,5 672,302,5 296,749,5 687,287,5 20,636,4 377,41,4 434,257,4 223,192,4 585,719,4 94,959,2 428,707,3 633,6,4 384,57,4 24,403,3 0,109,1 12,649,2 343,863,3 341,92,4 415,332,4 523,196,4 404,641,1 660,726,4 456,451,3 1,268,4 28,263,3 402,105,2 617,719,3 159,284,4 449,263,3 599,561,3 647,215,4 333,115,4 404,36,1 43,297,2 10,366,3 267,69,3 665,198,5 324,57,3 428,236,3 405,746,2 605,88,5 618,312,5 399,300,4 662,285,3 345,239,1 647,522,3 449,965,4 603,4,2 628,49,5 13,301,5 536,454,1 278,136,4 628,708,3 206,410,3 568,8,5 660,424,4 397,210,4 683,552,4 242,21,3 20,816,3 94,94,3 689,24,3 275,159,4 583,81,3 580,812,5 536,1100,3 550,75,4 372,726,4 553,57,4 206,316,4 304,844,3 6,72,3 298,47,4 406,95,3 177,76,4 654,158,3 616,606,4 193,541,3 101,272,3 649,226,2 231,654,4 158,875,2 502,1316,4 696,293,4 438,590,4 279,485,5 431,470,3 617,65,4 261,426,4 607,692,3 607,131,2 523,519,3 550,916,3 531,0,5 115,1019,3 607,701,1 642,227,4 386,106,3 547,38,5 594,267,4 429,317,5 243,1187,4 406,728,4 494,1181,3 499,1440,2 454,422,5 541,108,4 359,274,4 207,85,2 526,208,4 513,131,4 659,1,2 188,49,5 676,236,4 377,1057,3 275,793,2 340,894,4 81,273,3 285,315,5 310,87,4 485,627,3 405,91,4 565,79,3 641,1027,4 666,123,5 521,653,4 255,974,3 592,274,3 233,258,2 101,654,3 197,368,1 532,370,3 496,357,4 531,558,5 494,434,5 605,202,5 600,97,3 692,498,4 561,4,4 682,894,2 312,498,3 342,535,4 642,52,4 93,327,3 268,630,4 688,878,2 657,432,4 200,227,3 485,507,4 581,120,3 666,315,4 647,239,2 636,293,3 579,545,1 600,184,4 567,161,2 55,968,3 270,923,3 591,13,5 560,894,1 436,206,4 39,258,2 614,159,3 12,21,4 359,1133,3 647,877,3 676,404,4 373,191,5 507,152,3 647,173,5 197,72,3 654,1015,3 525,322,2 475,167,5 609,11,5 649,218,3 63,383,2 530,894,2 556,57,4 532,235,4 345,66,3 576,446,3 681,1106,2 346,385,1 614,631,5 668,22,4 89,207,3 659,430,4 560,810,3 526,650,5 369,171,4 585,1046,3 206,721,3 298,486,5 684,301,3 79,513,3 534,165,4 338,656,4 279,415,5 408,366,3 696,712,5 367,669,3 173,762,1 1,254,4 42,746,4 292,403,4 697,1062,2 217,174,3 619,245,4 522,943,4 669,481,5 591,3,4 152,181,5 58,464,2 100,369,2 307,229,4 358,267,4 134,54,4 191,234,3 312,431,5 663,78,4 609,55,3 653,1008,3 150,747,2 642,287,4 221,423,1 689,209,3 302,268,5 532,19,5 9,196,5 625,242,1 473,613,4 683,47,4 483,203,5 663,196,4 304,47,5 507,184,5 625,285,5 681,332,4 647,78,5 541,771,4 616,568,1 378,515,4 364,276,4 327,548,4 185,290,4 452,52,3 63,90,4 444,244,2 628,134,5 510,287,4 355,306,4 408,185,5 642,149,5 502,320,2 373,171,3 668,256,3 495,190,5 604,251,4 405,52,4 492,194,3 319,1058,4 428,51,4 267,1097,3 436,1598,5 473,217,4 617,384,4 94,1125,4 473,415,4 373,978,3 91,242,1 293,312,5 93,316,5 684,990,1 255,362,3 404,842,2 532,381,1 424,894,4 27,27,4 625,987,1 489,454,4 188,862,4 523,321,4 486,96,5 658,126,5 617,422,5 427,325,3 269,534,5 685,424,5 290,779,5 57,97,4 405,122,4 226,273,4 84,821,3 267,560,3 57,198,4 585,239,3 89,1204,3 591,853,5 591,172,5 621,1206,2 618,719,4 58,678,4 567,614,5 496,1227,2 599,809,3 505,214,5 184,159,1 404,446,4 304,142,3 322,22,5 487,327,4 197,726,4 465,884,2 311,944,5 462,472,4 654,1028,1 129,63,5 536,424,3 390,497,4 622,503,3 691,1022,2 692,272,3 659,995,1 243,79,3 224,285,4 176,185,4 278,60,4 294,808,4 267,727,2 346,182,3 535,434,3 425,196,4 483,404,4 268,529,3 607,165,3 408,208,5 233,153,3 626,1133,1 628,659,5 560,678,3 449,526,5 5,184,5 373,1012,2 343,280,3 643,596,4 503,3,4 19,147,5 188,117,1 353,472,3 538,169,5 637,429,5 484,329,3 414,747,5 464,422,3 542,479,4 353,310,5 585,30,4 353,302,5 325,483,5 583,39,4 647,675,2 664,23,3 576,62,4 542,528,4 585,26,3 342,133,5 369,198,4 504,190,3 654,1584,4 124,507,1 180,1390,1 468,305,4 390,181,4 634,12,2 469,1066,4 697,567,2 586,338,3 93,779,3 589,14,3 681,1088,2 423,537,5 559,410,3 345,390,2 689,789,3 466,248,3 641,569,1 558,196,4 249,247,2 618,514,1 473,290,4 567,637,3 478,317,5 671,49,3 233,65,3 388,490,5 641,62,3 483,72,4 173,125,5 440,341,4 261,1134,3 558,510,2 253,230,3 609,6,2 693,1049,3 266,152,5 542,162,4 654,526,3 329,101,4 360,610,4 587,173,3 644,285,4 605,124,4 623,686,2 93,678,4 395,244,3 181,49,5 360,48,3 491,771,1 317,384,4 537,41,1 585,590,3 449,730,3 469,0,3 302,142,4 689,79,3 173,177,5 71,1146,5 516,126,4 499,267,5 6,665,4 317,617,3 326,151,3 467,110,4 434,161,1 0,65,4 344,254,4 238,461,5 12,38,3 310,65,4 657,95,4 551,1013,4 681,596,1 621,854,3 653,677,4 261,152,3 663,662,4 480,282,5 544,548,4 592,182,4 605,160,4 425,134,3 268,254,1 535,707,3 679,19,4 75,92,4 560,51,4 206,474,2 294,380,5 345,540,3 233,764,3 275,768,1 20,258,2 676,147,4 426,257,4 605,992,5 400,510,2 137,508,4 233,649,3 477,1047,4 387,8,3 231,99,5 482,179,2 682,267,4 89,495,4 633,507,4 541,356,5 197,430,3 495,355,2 407,312,4 416,1415,2 378,433,3 553,172,3 621,49,5 290,116,5 641,461,4 294,464,4 485,934,4 221,1187,3 344,484,4 433,6,1 641,526,4 175,285,2 150,92,5 222,1015,5 704,1042,5 681,356,3 84,709,2 84,81,3 654,697,4 542,517,3 523,415,4 362,203,2 536,280,1 692,131,4 229,120,4 255,537,5 652,473,4 550,942,5 591,11,5 25,115,2 397,601,4 679,1011,3 109,1089,2 596,263,4 704,577,3 523,704,3 83,627,3 377,93,3 566,208,4 128,902,2 501,686,4 513,292,3 523,949,4 114,175,5 415,1261,5 243,432,5 298,90,4 689,275,3 654,1378,3 148,345,4 267,190,4 386,428,3 540,173,4 58,179,4 649,237,4 275,474,5 436,274,5 654,207,3 526,6,5 531,250,4 649,448,3 110,314,5 196,182,5 512,126,4 90,600,4 604,474,3 520,21,4 499,97,4 333,628,4 144,1050,2 527,297,4 534,60,3 278,1036,1 404,665,1 498,156,3 542,703,3 561,415,5 658,178,1 610,895,3 71,4,4 477,64,4 659,196,3 140,1013,3 441,1217,2 654,1133,3 322,49,5 202,322,3 639,749,5 486,238,5 8,241,4 623,341,3 235,327,5 669,650,4 700,284,5 222,1050,3 428,417,3 623,332,4 298,501,4 256,285,5 298,401,3 217,647,4 692,130,3 564,637,4 405,404,3 670,232,4 619,34,3 537,565,3 442,11,5 591,304,4 91,362,3 670,580,2 583,312,5 652,195,2 641,1075,2 692,479,4 491,284,4 434,194,5 159,482,5 492,59,2 693,431,4 90,327,4 698,747,2 506,689,4 638,196,3 336,878,3 622,434,5 213,581,3 652,229,3 698,99,4 693,190,5 239,268,5 143,513,5 697,511,4 278,479,3 631,183,5 457,142,4 455,142,3 335,203,5 494,1468,5 531,1187,4 494,228,3 297,210,5 559,653,5 657,457,3 517,146,4 544,418,3 654,908,3 618,349,3 278,341,4 520,430,4 415,753,5 300,11,4 58,788,4 61,568,1 631,281,4 101,67,2 620,397,2 652,447,4 691,284,3 348,99,4 605,173,5 518,258,1 370,182,5 544,181,3 387,306,4 485,123,5 654,1007,3 342,97,5 456,1038,5 0,76,4 110,353,4 703,204,5 416,257,4 523,583,1 605,152,3 199,61,5 658,653,4 594,1060,3 506,749,5 670,22,4 404,192,4 668,325,1 486,684,3 545,163,4 148,873,3 617,575,4 607,132,4 124,686,3 471,27,5 678,287,4 581,2,3 554,409,4 416,356,5 300,249,4 346,201,4 552,514,5 392,67,4 71,465,4 649,54,4 300,392,3 269,581,3 278,1184,1 435,91,3 183,714,4 372,434,4 403,689,5 337,214,3 346,202,5 646,327,3 620,560,4 649,691,3 464,31,3 321,257,4 456,116,4 58,184,5 456,202,4 434,14,3 706,13,3 608,257,3 691,691,3 498,274,3 428,209,4 159,588,3 460,157,2 233,1099,2 622,658,5 408,1092,2 642,110,4 588,285,3 344,619,2 194,385,2 462,1162,4 513,180,4 497,211,3 416,945,4 388,282,5 11,415,3 475,1035,2 652,1022,3 486,549,3 249,457,5 209,442,4 665,1265,5 647,4,4 542,110,4 536,87,2 267,830,3 43,495,4 434,30,5 500,124,3 223,686,2 699,180,5 193,87,3 404,7,4 681,93,3 480,99,4 144,446,5 562,209,4 199,409,3 594,150,5 703,208,3 428,404,3 617,96,5 591,533,5 456,69,4 629,171,3 22,213,3 465,327,4 642,23,4 48,996,1 63,272,2 670,187,2 684,318,2 567,508,4 410,1474,3 177,6,4 654,1606,3 462,1376,4 609,590,3 517,819,2 619,0,5 696,330,3 654,76,3 654,499,2 587,163,5 93,950,3 449,822,3 140,618,4 290,596,3 449,273,4 449,379,5 473,663,4 314,203,5 663,468,3 290,201,4 84,131,5 292,173,5 268,791,4 71,68,4 591,306,4 15,683,5 626,938,3 387,558,5 547,233,4 352,357,1 248,420,5 221,715,2 489,925,2 532,1015,3 241,282,4 243,67,5 550,727,2 503,447,5 377,236,4 685,186,5 658,88,4 499,610,5 546,332,4 221,815,1 221,190,2 4,215,1 684,339,2 703,353,4 298,969,4 478,280,3 429,247,3 404,1102,2 109,87,4 456,194,5 404,1069,1 112,8,3 654,1537,3 641,87,5 691,286,3 495,203,3 665,528,5 654,791,3 641,138,1 451,61,2 101,48,2 528,318,4 654,283,2 654,618,3 234,1175,5 186,178,5 681,442,3 404,683,3 405,662,5 647,470,4 604,324,2 396,854,4 108,94,4 454,63,4 471,77,1 157,1015,3 686,244,3 300,757,3 262,0,5 428,201,4 530,687,1 17,527,4 681,939,2 471,927,4 626,1135,4 298,76,3 694,299,1 656,150,4 398,232,3 181,149,3 245,180,5 692,288,3 372,570,1 456,230,4 436,842,4 592,192,4 39,320,4 84,135,4 218,37,1 542,238,2 392,203,4 59,683,4 549,287,5 233,442,3 27,287,5 387,595,4 544,383,3 541,272,3 653,824,3 581,49,5 4,419,3 654,732,3 166,82,5 652,966,2 496,398,4 327,691,4 674,305,5 404,1218,1 532,21,4 561,401,5 649,649,2 404,783,1 523,402,4 495,226,1 657,771,3 319,273,4 660,675,4 404,1043,4 179,960,5 89,453,2 696,297,4 290,99,5 290,68,5 377,124,2 214,173,4 542,169,4 11,214,4 665,698,3 405,38,4 644,58,5 127,392,4 631,149,2 482,228,3 150,627,5 11,3,5 478,726,5 626,82,3 456,791,4 605,41,3 58,615,5 585,396,3 360,167,4 665,131,4 71,221,1 473,273,3 632,171,3 335,779,3 140,870,3 682,510,5 567,477,4 89,220,4 620,249,4 364,267,5 588,748,3 367,233,3 522,508,4 62,24,4 180,1344,1 254,324,1 704,57,2 372,509,3 432,325,2 638,304,1 444,6,1 115,322,3 629,831,2 479,88,4 489,1385,4 302,424,4 586,349,3 415,462,4 12,817,3 249,753,4 649,1064,4 633,747,3 312,504,5 268,495,5 435,558,4 681,1073,4 42,293,5 98,312,5 200,195,4 607,184,5 404,479,4 392,695,4 434,104,3 642,366,4 193,506,4 404,11,5 439,349,5 384,1068,4 34,332,4 696,300,5 344,1280,4 707,321,3 533,272,5 654,935,3 300,461,2 662,331,4 568,472,4 404,789,1 536,167,4 649,670,3 620,179,4 664,155,5 443,8,5 456,93,3 670,747,3 506,314,5 63,287,4 287,135,5 55,442,4 183,64,4 550,684,1 681,292,4 216,402,5 526,3,2 397,173,5 436,698,4 533,293,5 388,153,3 415,471,4 493,293,4 621,471,3 665,244,3 560,701,3 405,1193,4 599,509,5 183,0,4 653,49,5 654,1084,2 283,750,3 396,345,4 245,171,5 293,1010,2 141,424,4 600,224,1 292,517,5 486,95,5 397,72,3 444,117,2 335,1182,1 641,363,5 55,372,4 446,283,4 618,404,3 523,1151,3 582,6,5 206,68,4 123,172,2 654,480,2 167,224,5 306,69,4 180,288,4 654,1648,3 324,199,2 422,124,2 706,662,4 633,20,2 19,377,3 523,835,2 660,94,5 464,704,4 568,507,3 609,78,3 628,339,2 302,254,4 312,210,5 81,473,3 547,1013,4 473,207,3 606,44,4 591,1513,5 622,647,5 307,209,4 382,344,2 697,82,5 617,595,4 94,464,3 566,483,4 449,315,4 449,165,5 683,160,3 626,68,3 286,99,5 408,191,4 98,146,5 536,63,3 620,240,4 201,268,4 641,147,5 494,142,1 40,167,5 601,987,4 504,587,5 654,671,2 294,236,4 449,124,4 578,285,4 616,174,4 616,572,4 685,179,5 565,191,5 608,287,2 404,100,1 89,217,5 70,173,2 641,67,3 108,228,5 598,1314,4 345,422,4 451,153,5 617,238,3 532,695,3 449,534,3 649,285,3 532,203,4 329,968,5 345,841,1 636,407,5 681,49,5 129,28,3 93,227,4 542,7,4 475,79,3 197,433,3 636,254,3 497,513,4 456,404,5 221,376,1 623,740,4 581,267,4 392,1434,3 86,152,5 669,944,4 55,500,3 505,91,3 434,565,4 497,886,3 654,212,4 681,540,3 647,398,4 115,677,3 110,301,5 13,264,3 456,731,4 307,495,3 471,568,4 642,180,3 594,928,2 58,370,4 505,223,1 649,174,4 504,203,3 621,297,4 681,121,3 386,294,3 13,922,5 144,342,5 708,218,4 144,897,1 620,267,4 631,185,5 703,380,3 115,514,4 658,658,3 327,1106,3 706,255,4 663,80,5 685,171,4 232,632,5 180,459,1 706,715,2 158,475,5 659,624,3 693,195,5 261,282,3 473,106,3 649,142,5 491,478,3 408,660,5 451,630,4 37,400,3 265,320,3 534,152,4 392,809,4 524,14,4 477,68,3 434,238,4 662,272,4 91,257,4 532,290,3 89,132,5 523,929,3 627,304,5 617,3,2 552,134,4 398,543,2 523,71,4 22,143,3 550,594,2 434,2,3 147,226,4 505,390,2 333,878,3 351,55,5 608,124,4 549,120,5 275,183,4 53,99,5 203,315,4 572,478,4 503,650,4 473,211,4 166,287,3 17,703,3 692,1521,3 400,565,5 406,247,4 591,404,4 12,572,3 529,581,4 584,60,4 233,415,4 82,224,3 540,1184,2 360,793,3 371,442,4 455,225,2 664,297,3 425,49,4 567,854,1 505,240,2 324,646,5 256,274,4 693,228,4 551,285,4 415,1440,3 127,366,4 568,825,3 633,410,4 7,226,4 42,167,4 704,826,4 520,55,4 345,384,5 647,174,3 200,316,3 312,299,4 587,93,2 391,704,5 324,31,3 120,155,4 587,418,5 494,789,3 704,399,4 73,275,4 621,46,3 266,154,3 576,406,4 692,603,3 473,167,3 618,49,4 589,13,5 193,782,2 419,602,4 664,14,4 275,545,3 633,125,3 431,814,3 576,0,5 531,635,5 568,978,3 268,507,4 665,428,5 388,505,4 206,181,3 652,508,4 249,1425,5 614,302,5 384,45,5 670,627,3 275,312,5 453,88,1 296,983,1 91,582,3 425,704,5 600,46,3 377,1034,3 209,233,4 670,52,3 486,741,5 91,635,3 477,159,2 406,714,4 708,563,1 617,122,2 299,880,5 623,875,3 361,244,4 59,6,5 681,1219,4 664,171,4 478,107,4 641,768,5 129,1278,4 513,135,4 351,38,5 234,461,3 377,409,3 544,819,3 58,197,5 41,63,5 644,86,4 391,270,1 456,64,5 15,475,3 536,242,1 618,1015,4 532,190,4 710,420,4 617,672,3 631,482,5 534,220,3 43,101,2 649,967,4 416,105,2 147,417,3 415,736,3 660,51,4 602,228,4 158,275,5 496,730,3 378,653,5 665,473,5 428,699,3 659,526,3 319,231,4 560,577,3 43,6,5 663,449,3 122,22,4 637,237,4 654,1016,3 664,507,2 292,637,4 405,150,2 377,317,5 397,46,3 638,723,3 453,1034,3 124,116,3 473,510,5 404,447,4 108,80,2 157,1066,4 706,525,1 654,727,2 408,605,4 453,236,4 524,150,5 307,202,5 513,472,3 453,878,4 467,654,5 404,1270,2 632,321,3 631,654,3 505,67,4 400,142,4 662,1072,3 116,595,3 203,321,3 635,739,4 587,207,3 634,300,3 238,68,1 448,1141,4 182,201,4 653,293,3 232,497,5 649,3,3 654,96,3 708,63,5 536,134,5 560,211,3 708,768,3 706,152,3 199,581,4 173,720,2 502,274,5 683,217,1 620,745,4 654,866,4 312,141,3 498,294,2 500,1066,5 234,49,5 654,116,2 681,96,4 654,148,4 666,317,5 561,1038,4 462,1196,4 388,1035,2 629,69,2 441,26,2 600,171,4 620,173,3 344,130,4 86,925,4 305,282,3 71,401,4 592,68,5 247,249,3 27,184,5 580,180,3 57,461,4 657,97,4 587,124,3 90,749,5 647,67,1 621,45,4 626,1073,3 626,257,4 709,626,4 625,947,1 479,212,5 182,227,4 641,194,3 624,190,3 360,450,3 607,422,4 523,577,5 585,378,4 416,392,4 631,80,5 497,182,4 575,256,4 12,760,4 591,117,3 336,180,2 428,337,3 601,126,5 607,752,5 508,753,1 591,1096,4 129,293,5 337,524,4 707,470,4 415,26,4 38,293,4 58,1047,4 278,30,3 692,471,3 597,291,4 487,195,3 664,110,4 492,356,5 333,743,3 324,1148,4 229,490,3 457,695,3 166,236,4 392,289,3 453,482,3 604,299,2 115,180,4 465,257,4 231,249,4 527,401,4 642,234,4 44,1058,2 706,675,4 457,525,5 633,312,5 531,334,3 477,299,3 317,178,4 498,604,1 494,422,5 441,116,3 434,68,4 706,865,2 48,170,4 693,418,4 441,32,3 59,203,4 248,172,5 704,297,5 638,136,3 489,92,4 358,117,3 451,946,5 188,177,5 264,755,4 252,432,3 693,299,4 69,141,3 428,180,5 84,1135,3 652,215,3 505,172,4 644,402,3 536,1450,3 103,870,2 660,356,4 617,355,2 115,1038,4 343,927,2 539,1047,4 692,426,4 205,336,2 81,366,4 398,575,3 177,734,5 495,432,4 560,1448,5 591,120,4 471,372,4 398,450,3 99,325,3 200,345,4 617,370,3 587,780,2 167,1011,5 317,320,4 267,524,4 614,517,4 247,0,3 353,240,3 69,407,4 560,46,4 568,15,3 253,402,3 300,575,4 681,272,4 613,716,4 614,461,4 704,621,4 331,741,5 140,1046,4 478,176,4 665,1044,4 605,1279,2 267,239,2 569,689,3 300,450,4 434,158,5 560,173,4 683,87,4 471,575,5 377,78,4 589,129,1 372,379,4 507,167,4 449,1207,3 617,214,4 591,1280,3 434,28,3 220,1266,3 492,70,5 518,331,3 455,134,4 541,98,5 647,194,5 59,214,4 55,226,3 591,921,3 698,0,3 416,894,3 392,167,4 462,267,4 473,44,5 494,401,3 404,142,5 685,503,5 617,1031,2 12,669,3 528,270,4 462,471,3 578,432,3 642,99,5 233,12,3 58,211,4 200,144,3 317,721,4 263,6,5 229,265,4 412,331,3 685,356,5 489,223,2 621,704,3 233,300,3 585,253,4 520,754,3 654,270,3 599,1187,3 233,222,3 703,315,4 486,229,5 503,630,4 435,1047,2 449,392,4 380,131,5 379,569,3 372,82,5 711,746,3 487,611,4 487,514,4 662,923,3 449,844,4 611,236,3 18,201,4 193,418,2 641,789,4 682,263,2 560,613,3 616,428,3 193,1043,2 560,404,2 473,496,5 189,325,4 397,494,4 658,201,4 342,178,5 703,603,5 197,68,4 654,1066,2 526,497,4 654,571,2 565,958,4 576,207,4 421,285,5 607,317,4 641,698,5 188,933,2 620,95,5 558,501,4 505,238,3 568,14,4 655,346,4 513,214,4 432,272,3 398,563,3 652,238,5 225,202,5 600,409,4 455,420,3 536,433,3 458,331,3 21,180,5 319,88,4 428,280,3 486,402,4 100,287,4 632,384,4 672,346,4 533,0,5 216,171,1 296,167,5 547,1277,4 523,491,3 647,384,5 426,331,5 647,289,3 465,312,5 329,14,5 563,256,4 101,94,4 693,683,4 551,247,4 90,1125,1 647,826,3 151,172,5 392,654,3 693,192,4 637,240,3 393,256,4 587,130,5 664,306,3 668,520,4 455,844,3 560,187,4 37,408,5 499,558,4 654,1196,3 157,865,2 581,471,4 188,164,5 647,721,3 343,284,5 404,1035,1 504,0,3 679,275,5 335,1078,1 505,194,4 660,1044,3 428,89,4 209,754,3 649,312,4 641,14,5 185,355,5 600,90,5 599,529,4 326,895,5 536,320,3 392,83,3 346,158,4 464,135,4 302,745,4 532,686,2 540,1035,2 513,312,5 467,69,3 625,288,1 591,191,5 699,143,4 464,171,3 416,1039,2 584,1318,2 342,1007,4 326,85,4 523,442,4 406,314,4 566,292,5 200,184,5 531,553,4 388,284,5 452,76,3 17,188,5 665,655,4 247,179,3 188,150,5 547,716,4 649,366,2 652,747,5 692,142,4 663,209,4 84,1173,3 353,732,3 504,293,3 645,318,3 652,404,3 576,672,3 192,152,4 177,156,5 12,452,2 313,774,3 682,285,2 485,332,2 478,269,4 675,750,4 404,1414,1 504,209,4 327,848,3 343,70,3 641,248,5 384,172,4 388,519,3 548,471,3 483,172,5 245,671,4 654,1251,3 243,69,4 642,76,3 127,219,1 17,630,5 576,209,3 275,41,4 56,287,4 532,293,4 694,306,4 505,424,4 665,517,4 392,131,2 607,195,5 189,147,4 560,129,4 386,773,3 315,632,4 93,709,3 714,227,3 180,108,1 285,780,4 700,315,5 377,481,4 587,84,5 549,300,2 624,178,4 425,22,4 59,8,5 64,475,3 99,268,4 626,183,4 498,901,5 370,68,5 310,403,3 91,381,4 586,268,3 570,123,4 176,257,3 12,814,4 491,130,3 641,624,3 560,11,5 665,185,2 253,1059,3 455,1009,5 621,165,5 601,747,3 576,948,2 712,309,4 310,754,4 560,664,3 536,478,4 233,510,5 667,285,4 628,264,4 194,275,4 287,173,4 473,181,5 111,750,4 666,1100,3 148,1294,3 663,168,5 607,968,5 712,538,3 150,198,3 270,130,4 714,201,5 585,671,2 499,395,3 428,495,4 572,177,4 697,496,3 428,1217,3 476,755,4 386,433,5 162,304,2 591,293,3 638,706,5 391,249,3 496,121,1 180,1334,1 532,503,4 506,747,5 398,627,3 587,71,4 436,1226,3 63,649,3 386,230,3 449,608,5 333,1131,2 503,410,4 180,682,1 398,823,2 496,27,3 134,202,4 618,257,5 550,69,4 652,21,5 681,657,4 483,50,4 533,762,4 191,24,4 435,584,3 328,294,4 353,59,5 411,650,4 647,3,1 639,3,4 70,428,4 689,84,1 455,96,4 660,530,4 15,124,3 653,968,5 436,285,2 706,639,2 706,301,4 560,131,2 22,216,2 326,446,4 444,761,1 715,317,5 612,27,3 362,68,3 378,309,4 568,747,2 229,283,1 621,736,5 526,99,5 537,274,4 642,507,4 143,281,4 382,434,4 505,714,2 658,316,4 505,801,4 626,433,4 235,99,3 93,812,5 658,180,3 639,185,5 189,507,3 647,686,1 568,224,3 360,217,3 712,1655,2 605,55,5 633,339,4 681,1427,3 148,1295,3 23,78,4 111,293,3 405,135,4 502,18,5 424,0,2 449,1090,4 654,165,3 166,511,5 346,404,4 292,191,5 607,49,1 13,190,4 560,318,2 153,210,4 594,323,3 199,621,3 12,409,1 535,745,5 698,320,3 157,407,5 428,3,4 406,560,4 56,239,2 404,1404,1 667,353,4 384,1251,5 715,482,5 565,1004,5 415,424,4 706,477,4 10,28,3 508,288,2 12,774,4 483,391,4 621,557,2 593,987,2 496,544,3 436,738,3 585,435,2 681,68,4 477,195,3 20,563,3 518,1292,5 498,299,4 531,306,4 523,603,4 600,442,4 592,469,2 681,190,3 678,180,5 550,543,4 664,426,5 323,275,5 654,1474,3 696,272,5 37,410,3 658,734,3 326,567,2 710,63,4 313,331,5 585,236,4 42,247,4 101,87,3 542,96,3 24,494,4 659,20,3 587,398,3 446,814,3 88,48,4 378,392,4 710,683,3 636,716,3 262,97,4 10,723,3 144,65,4 665,24,3 660,407,5 407,323,5 655,285,1 471,1109,5 304,432,2 346,942,4 9,68,4 560,446,3 669,614,3 602,1239,5 494,89,4 564,712,5 639,167,5 523,750,4 341,250,5 166,654,4 658,401,3 130,8,5 615,1312,4 223,659,4 660,215,5 671,280,3 403,306,4 5,78,3 560,190,3 680,897,4 287,650,4 659,201,2 663,734,4 63,558,3 94,21,4 544,78,4 523,128,5 177,459,2 388,201,5 415,394,2 97,501,2 452,551,2 594,921,4 436,1097,3 290,1078,2 632,275,3 570,63,4 151,152,4 526,176,5 448,741,3 302,282,3 632,316,3 544,377,3 525,49,5 25,455,1 4,437,1 506,333,5 693,194,4 714,738,2 587,153,4 86,801,4 86,46,3 586,1482,4 379,203,2 522,115,5 624,432,3 28,267,5 300,958,4 707,267,3 457,895,5 471,68,5 275,26,3 372,23,4 417,345,2 662,984,3 591,247,4 641,234,2 449,1219,5 683,237,3 621,576,2 101,521,3 522,865,5 560,222,4 89,835,5 329,282,5 632,650,3 245,549,3 476,1050,5 266,229,4 660,236,4 590,99,5 605,207,3 650,241,5 647,442,2 268,195,1 677,110,4 513,199,2 108,544,2 369,422,4 179,317,5 584,969,3 592,161,5 342,746,4 0,198,4 138,675,4 505,659,3 639,63,5 76,131,3 292,1017,3 657,723,3 559,1013,4 473,691,4 662,740,4 398,1539,3 711,730,5 659,1077,2 369,612,2 243,240,4 392,257,4 431,6,2 542,23,3 659,160,1 627,299,5 444,339,5 523,78,4 398,55,3 180,1094,1 242,723,3 523,318,4 652,377,3 496,193,3 545,321,4 62,712,3 560,1102,4 395,236,4 646,704,4 486,650,5 715,90,5 654,392,2 307,76,3 373,228,5 559,357,3 587,558,5 345,210,4 654,957,3 657,510,4 275,374,1 408,194,4 609,698,2 513,258,4 366,669,4 524,287,4 672,310,4 325,160,3 416,497,4 659,70,2 93,200,4 304,143,2 167,279,4 382,483,4 649,222,3 392,1181,3 670,1221,3 65,285,1 345,742,2 715,274,5 647,861,1 248,274,4 453,258,4 180,23,1 238,920,5 599,449,4 579,180,5 670,1238,3 188,653,3 409,310,3 697,49,5 411,80,2 291,198,5 342,384,3 155,514,3 647,635,4 464,168,4 371,445,4 685,203,4 590,46,3 654,320,3 600,377,2 710,651,4 200,85,4 652,1,1 275,232,3 574,426,4 289,1034,4 248,257,5 416,830,2 647,225,4 659,49,4 15,944,4 170,689,3 16,470,2 275,551,3 659,929,2 312,418,3 600,1134,2 492,596,4 129,37,4 626,176,5 592,158,4 652,100,3 544,182,4 647,102,1 473,513,4 494,659,3 698,1059,3 698,323,4 647,433,5 48,1080,3 626,545,3 547,291,4 638,637,4 682,747,3 385,322,4 641,411,2 550,695,2 583,228,3 200,136,4 300,185,4 621,551,2 649,268,4 11,173,5 392,560,3 378,55,5 616,643,4 312,660,4 534,685,5 312,101,3 441,6,4 559,88,5 275,194,5 714,409,4 143,962,4 293,878,4 103,14,5 455,558,3 600,1046,1 307,21,4 333,898,4 523,70,3 384,556,2 663,672,3 547,927,3 420,602,4 445,305,3 520,27,3 229,131,5 377,1,2 618,299,5 12,654,5 311,478,5 485,1128,4 523,648,4 478,1006,4 534,656,5 455,8,3 108,237,2 502,602,3 660,167,5 269,868,1 380,58,3 491,241,5 494,218,4 666,185,4 681,398,4 384,176,4 617,675,2 659,568,2 460,303,4 531,28,3 607,274,5 531,96,5 275,708,4 670,257,5 197,651,3 669,227,5 330,1100,4 551,256,3 416,201,4 333,123,5 405,574,1 638,65,3 401,134,4 692,654,3 428,248,4 587,809,4 388,177,4 12,890,1 312,501,3 485,474,4 68,288,4 660,427,4 497,593,2 659,602,4 294,69,5 654,1102,3 718,672,3 73,12,4 93,71,3 620,272,4 505,748,4 404,80,3 298,726,4 457,194,4 42,7,4 665,466,4 592,658,5 504,227,2 390,257,3 343,693,5 534,654,4 588,299,5 532,791,3 470,767,3 560,429,3 222,618,2 329,707,3 561,683,4 144,1272,5 129,391,4 653,755,4 478,510,5 450,876,4 536,3,2 559,182,5 275,66,3 710,719,3 82,318,1 471,50,5 623,287,4 213,68,2 378,177,5 296,199,3 449,1270,2 662,924,3 654,290,3 576,530,4 307,195,3 502,9,5 682,312,2 343,708,5 100,124,4 525,331,2 0,56,5 681,50,5 657,181,5 477,958,4 711,391,5 144,453,1 453,288,3 597,897,4 449,78,4 213,190,4 428,536,4 647,923,1 81,945,2 641,70,5 415,833,3 704,195,4 637,99,3 660,422,4 701,345,1 589,281,2 415,280,5 693,8,5 6,550,1 621,379,4 206,241,4 685,184,5 513,300,4 523,572,4 183,475,2 641,1412,3 692,38,3 233,509,4 500,1009,4 663,677,2 64,401,4 449,1115,3 341,318,4 175,1007,4 560,430,2 621,684,2 63,418,2 435,366,4 720,86,3 708,226,2 444,1096,1 267,76,2 561,805,1 342,428,4 523,708,5 525,878,3 59,392,4 480,162,4 37,1030,5 454,68,4 620,419,4 62,812,5 338,1266,3 322,543,4 39,327,3 496,567,3 106,263,3 269,573,3 449,277,5 658,565,3 0,49,5 428,392,3 658,669,2 275,354,3 268,447,2 706,486,2 604,520,5 623,285,5 492,259,1 654,285,3 639,381,4 658,491,3 661,5,5 310,55,5 550,143,5 618,330,4 392,870,3 673,117,3 268,132,3 159,210,4 307,49,5 638,198,3 197,381,4 626,91,4 158,300,2 538,44,4 404,858,1 637,117,3 408,1448,5 144,180,5 668,339,4 607,15,2 713,281,4 377,917,3 453,656,3 371,695,4 551,242,3 538,356,4 536,413,4 207,65,4 222,184,2 560,120,3 544,742,3 560,567,3 698,455,1 668,522,4 647,1046,2 173,370,5 495,1285,2 234,1450,4 307,1134,4 509,242,3 497,442,3 447,270,4 439,1193,5 106,311,4 502,426,5 497,606,3 685,204,5 10,122,3 378,565,4 636,147,3 698,759,3 651,698,5 353,428,3 428,431,4 542,213,3 327,180,4 157,226,2 451,317,5 698,982,3 452,121,3 233,618,2 696,149,5 689,66,4 502,450,4 266,1335,1 641,958,5 449,968,4 494,90,2 576,264,5 708,194,5 592,865,5 173,243,4 675,180,5 643,257,4 398,146,5 558,321,4 397,473,4 576,581,4 325,238,3 82,392,5 487,1038,4 696,332,3 338,80,5 525,749,4 625,331,3 241,267,5 681,181,4 551,239,2 333,507,3 278,1320,4 716,286,5 372,1038,4 64,68,3 718,658,4 25,273,3 10,41,3 711,1177,4 611,117,3 531,505,5 681,79,1 683,519,4 272,346,4 424,99,4 536,287,2 453,1202,2 659,1034,2 21,16,4 663,222,4 711,384,5 304,171,4 428,725,2 591,420,5 560,743,3 127,489,5 381,134,3 486,41,3 714,230,3 677,180,3 410,434,3 278,516,4 266,99,5 449,276,3 283,332,3 462,257,5 327,983,3 434,1410,1 647,392,4 663,804,5 495,97,4 373,819,4 406,202,4 161,236,4 592,780,3 232,11,2 229,293,5 708,560,3 535,70,5 532,283,1 216,232,4 624,213,4 715,234,2 473,526,5 717,755,5 449,1247,4 69,142,5 473,229,3 711,365,5 187,287,4 456,631,5 681,1304,3 695,1175,4 698,15,3 683,185,4 576,99,4 175,128,3 366,6,5 654,182,4 689,173,4 670,684,5 98,930,2 658,674,4 216,635,2 360,275,4 683,120,3 636,410,1 536,781,3 497,171,3 406,222,4 344,80,4 537,143,4 538,126,3 720,878,4 502,131,5 91,267,4 634,874,2 158,627,3 654,1225,3 559,110,3 421,128,4 654,1107,3 12,885,5 706,165,3 197,121,1 404,959,1 416,43,2 536,654,3 342,9,4 368,899,4 556,268,3 343,3,4 714,180,4 284,221,4 482,115,3 452,257,4 715,142,5 143,750,4 715,117,2 333,202,4 471,270,5 435,747,3 711,96,5 317,209,4 12,230,3 1,283,4 580,282,2 274,471,3 264,627,4 552,614,5 57,602,5 636,49,4 404,1406,1 704,426,2 604,292,3 290,128,5 641,1472,4 101,225,2 659,403,2 250,209,4 58,583,4 51,106,4 685,78,4 455,52,4 372,231,3 115,1133,4 587,132,5 721,545,3 245,66,2 270,181,3 664,70,4 710,94,4 454,16,3 697,167,3 654,1193,5 633,818,2 172,323,5 189,596,2 706,153,3 465,143,5 243,125,4 657,509,3 658,836,3 6,624,3 687,681,5 248,247,5 653,110,4 482,379,3 307,1044,4 653,316,4 486,215,4 456,558,4 200,1130,5 6,203,5 50,183,3 641,540,5 634,261,5 331,627,4 715,195,5 715,158,4 310,185,3 405,193,5 550,553,5 550,216,1 504,987,3 457,684,3 641,249,5 716,747,3 649,662,4 658,519,3 536,198,4 715,193,5 654,922,3 415,712,4 420,652,3 144,163,4 697,528,5 626,283,2 633,281,4 681,762,4 502,614,5 454,933,3 595,312,5 693,609,4 665,4,2 58,429,5 401,470,4 221,450,3 393,229,3 503,392,3 665,660,4 55,232,1 362,434,3 531,124,5 659,124,3 655,315,3 538,288,4 12,156,3 449,190,5 654,24,3 654,1127,3 289,108,3 653,167,4 434,672,3 50,654,3 585,272,5 185,683,4 711,71,4 654,401,2 86,577,3 238,170,5 320,1049,3 636,474,1 12,895,5 486,403,4 681,1015,2 714,215,4 196,878,4 697,94,3 416,141,3 616,135,3 706,282,4 658,384,5 424,61,4 616,496,3 353,380,5 537,422,4 486,577,3 614,948,3 404,735,5 307,941,3 485,13,5 40,57,3 377,94,4 406,187,3 600,762,5 675,270,3 425,647,3 720,983,3 456,782,3 108,233,4 485,136,4 101,268,2 420,182,5 457,466,4 617,968,3 696,301,5 183,339,5 681,741,3 398,754,2 534,70,4 706,1280,4 486,48,4 490,695,3 633,273,3 294,83,2 360,165,4 392,152,3 637,227,3 404,463,1 290,363,3 503,64,4 633,923,4 333,22,4 500,146,3 255,987,4 513,646,3 720,686,3 177,1046,2 200,58,4 523,303,4 459,1379,3 481,314,3 692,52,4 487,134,4 20,378,3 436,811,3 654,19,3 353,527,5 302,490,4 392,923,4 343,10,3 96,22,5 325,366,3 710,203,3 587,727,3 591,321,1 446,259,2 711,733,4 479,208,4 621,156,4 664,233,3 6,401,5 566,602,5 377,774,3 711,1118,4 253,385,2 551,281,3 592,240,5 716,24,5 398,67,3 564,969,4 689,97,5 698,543,4 665,78,3 61,401,3 177,483,4 151,1135,5 387,299,4 621,808,2 685,63,5 344,299,3 569,300,3 400,274,4 266,126,5 647,217,3 566,429,4 40,356,4 647,55,1 313,157,3 524,126,3 669,649,2 436,495,4 532,14,4 493,356,5 377,596,3 292,179,5 621,1038,5 43,221,4 306,162,3 621,141,3 629,410,4 590,126,4 447,306,2 524,474,3 81,70,4 302,297,4 306,735,3 425,482,5 406,225,3 150,132,5 582,257,4 397,202,4 343,296,4 304,268,4 63,234,4 536,291,2 692,22,4 385,839,5 496,596,3 621,407,5 539,819,3 235,63,5 221,746,2 544,490,3 428,20,2 216,585,2 636,1225,2 660,500,4 69,87,4 317,865,4 63,184,4 681,378,4 594,329,4 565,239,3 663,512,4 716,323,3 186,422,4 415,685,5 193,673,2 193,156,4 364,475,4 372,576,1 654,171,4 302,126,5 697,477,4 566,674,4 649,193,4 93,543,3 590,661,3 723,304,3 151,870,3 238,651,5 323,330,4 726,122,3 214,55,5 415,475,5 675,353,4 424,749,2 373,402,2 108,754,5 520,1243,3 564,165,4 649,209,3 177,63,5 378,115,4 146,303,5 632,251,3 720,288,3 279,7,5 531,227,5 404,237,5 704,559,2 708,28,3 637,448,2 655,874,2 497,805,3 300,801,2 477,654,3 68,181,4 42,565,3 494,415,5 665,301,5 197,163,3 683,933,3 654,874,3 714,32,3 605,561,4 616,666,2 59,135,4 471,312,5 621,495,4 356,472,3 693,670,3 649,968,3 452,185,4 536,740,2 12,305,3 641,28,5 675,747,4 710,268,5 94,664,2 664,215,4 647,1091,1 190,315,5 197,107,3 594,843,4 101,257,4 714,283,4 576,84,3 428,481,3 673,281,5 708,21,5 142,1037,3 390,167,4 187,143,3 486,726,3 492,409,4 556,326,3 453,391,2 6,544,2 59,744,5 623,327,4 465,517,4 347,973,4 620,623,5 387,257,5 536,518,3 720,332,3 364,136,3 279,228,3 563,343,4 616,773,1 724,1196,3 393,108,4 683,370,2 59,69,4 592,279,3 700,296,4 505,760,2 507,221,3 513,485,3 537,142,3 37,77,5 6,651,3 95,82,3 487,317,4 600,63,4 304,707,3 534,97,2 706,530,5 600,124,1 86,71,3 180,475,4 502,442,5 345,63,4 238,179,5 163,404,5 632,565,3 150,81,3 15,272,5 496,576,2 523,512,4 415,839,4 397,755,3 487,95,3 12,788,5 717,840,4 639,125,4 681,545,3 75,191,5 313,317,5 681,156,4 249,321,3 659,131,3 356,454,5 664,368,4 532,149,3 607,327,4 459,275,5 532,238,3 279,1216,5 572,143,4 715,152,4 333,739,3 394,153,5 568,235,4 628,181,5 290,984,3 478,271,4 278,1490,5 705,244,3 290,93,2 93,1010,4 406,192,3 641,596,4 108,404,5 232,587,5 544,392,4 278,545,3 215,168,3 665,514,5 679,407,5 559,136,4 455,426,4 718,290,3 576,68,4 616,630,2 707,1050,4 657,30,3 177,171,4 642,68,3 591,1084,3 275,173,5 587,738,4 654,128,3 692,24,4 588,257,2 453,527,4 206,469,3 362,815,1 515,168,5 500,921,4 86,422,3 706,82,3 708,822,3 578,64,3 457,596,3 710,342,3 566,581,3 275,345,4 638,268,3 423,287,1 552,185,3 23,11,5 453,299,4 434,719,2 706,1175,2 726,677,3 294,185,5 681,69,4 642,172,4 302,123,4 107,404,3 448,336,4 690,0,5 222,242,3 473,485,4 444,143,3 560,424,4 2,332,2 496,65,3 550,1216,1 496,224,3 654,959,3 567,461,4 636,272,3 673,928,3 392,244,3 714,755,2 565,11,4 621,448,2 706,426,4 621,211,3 416,233,4 17,222,5 346,191,4 451,155,4 619,673,3 140,596,4 405,0,4 5,172,5 659,433,3 647,372,3 704,264,5 180,717,1 61,1076,3 726,440,2 652,764,1 416,94,5 12,278,5 681,187,4 498,482,5 662,283,4 544,745,4 268,648,2 378,745,3 710,1073,3 675,312,4 726,116,3 653,180,3 649,647,3 696,682,1 726,180,3 343,515,5 471,28,5 654,1254,3 191,812,4 82,863,4 84,656,4 471,62,4 662,404,3 147,167,5 270,846,4 624,944,3 638,190,3 89,481,5 139,872,2 544,96,3 323,410,5 488,312,4 5,267,3 654,1343,3 553,863,4 681,650,4 719,871,3 563,471,4 567,300,1 431,0,2 428,140,3 415,27,5 697,209,5 449,454,4 520,24,2 161,78,4 631,356,4 574,49,2 717,974,2 140,345,1 559,99,5 212,163,5 587,394,4 477,25,5 711,212,3 644,955,4 406,249,4 435,264,3 679,247,4 728,327,3 333,8,4 536,46,4 715,706,4 693,198,5 622,182,3 692,332,3 48,427,5 693,49,5 726,104,1 298,512,4 681,221,4 531,770,3 434,976,2 295,126,5 198,312,4 6,229,3 618,117,5 327,1517,3 658,698,3 550,327,5 465,345,3 386,741,2 173,904,3 674,530,5 398,178,3 297,422,5 649,658,3 717,221,4 475,84,2 523,639,1 649,232,2 641,390,4 379,1115,4 135,126,5 692,431,4 647,23,3 726,719,2 144,893,1 600,150,3 585,237,2 392,376,3 553,219,3 233,1368,3 291,498,5 658,761,3 662,279,3 726,1118,3 520,190,4 29,682,3 649,194,4 141,6,4 498,131,4 636,149,1 591,180,3 522,532,4 715,835,4 612,126,4 654,655,3 710,482,5 51,918,5 140,929,4 605,747,3 501,299,2 608,318,1 642,154,2 539,293,4 477,450,5 404,1106,1 599,240,5 617,954,2 556,886,3 566,356,2 726,67,4 338,1134,2 710,228,3 415,706,4 614,274,4 500,839,4 725,354,3 55,228,3 485,1085,3 302,955,4 589,8,3 158,404,5 703,303,2 681,580,2 715,601,5 449,196,5 560,179,4 536,580,3 415,814,4 621,402,4 638,273,1 594,410,3 654,174,3 442,342,5 638,795,1 714,174,3 80,185,5 497,424,2 163,281,5 478,186,4 541,624,3 698,677,3 693,199,4 706,139,2 654,329,2 58,190,4 283,686,3 113,473,5 114,316,5 665,409,2 654,515,2 479,461,4 566,810,4 646,249,3 675,687,1 641,558,5 642,419,4 337,487,5 621,142,4 645,257,3 567,602,5 82,234,1 314,22,5 658,804,5 653,55,4 723,341,3 255,677,5 278,129,1 578,675,3 282,865,3 463,983,2 452,11,5 326,98,4 499,568,4 565,392,2 683,624,3 726,124,4 599,187,4 173,69,5 668,507,3 233,172,3 643,1609,3 344,8,4 633,286,3 391,614,5 300,677,2 449,85,4 242,150,3 101,199,3 556,321,3 647,478,4 501,750,3 587,141,5 592,845,2 428,78,4 20,852,5 471,391,4 599,540,1 327,722,3 654,1641,4 338,938,4 436,203,5 523,110,5 607,212,4 449,431,4 91,427,4 290,92,4 674,508,5 637,264,5 578,1445,2 86,576,4 273,257,5 408,178,5 503,484,4 53,251,3 436,317,4 592,475,2 678,326,4 698,748,3 560,276,3 428,418,4 664,96,2 556,333,4 721,123,4 14,930,1 330,301,5 180,304,2 554,761,4 526,424,4 644,202,4 692,68,3 685,233,4 109,160,5 512,249,3 129,474,3 629,254,5 728,688,4 302,280,3 657,69,3 654,37,2 711,464,4 246,63,5 12,443,4 390,212,4 590,486,4 610,332,4 620,332,4 727,507,4 211,268,3 456,628,4 255,459,4 312,451,3 56,677,3 541,12,4 649,678,3 652,281,3 115,895,2 396,687,1 626,120,3 589,514,3 707,534,2 617,965,4 415,989,2 613,1133,2 396,988,1 647,366,3 418,404,3 425,132,5 42,472,3 562,117,4 449,286,4 656,0,3 587,601,3 560,709,4 692,1231,2 607,654,5 659,152,4 452,23,4 12,796,5 486,746,4 652,475,2 457,207,4 681,123,2 682,349,2 404,376,1 405,227,3 544,257,3 200,636,3 96,99,2 467,194,5 404,1571,1 180,291,1 663,171,5 428,177,4 687,338,5 550,271,5 678,120,2 12,560,1 268,121,1 91,734,3 652,457,2 292,264,3 566,492,4 151,283,5 267,256,4 580,8,5 715,159,2 299,832,4 485,274,4 659,356,2 255,242,4 285,69,5 561,97,4 396,133,5 532,30,3 245,1187,3 657,717,3 715,498,4 614,305,4 706,466,4 701,293,1 192,579,4 566,173,1 262,601,4 523,132,5 454,746,4 229,450,4 435,440,3 285,945,3 404,1217,5 271,653,5 424,315,4 681,1436,2 726,366,3 293,251,4 654,186,5 587,1427,5 591,337,2 580,220,2 455,78,3 690,293,4 233,472,5 454,11,3 478,214,3 332,268,2 636,1101,3 471,239,4 107,739,3 726,216,3 642,93,4 326,310,3 545,689,2 256,59,5 22,193,4 703,210,5 388,395,3 108,635,5 602,384,4 626,460,3 350,311,5 681,28,2 34,747,4 726,932,1 636,743,4 456,930,2 302,844,4 702,292,4 313,52,1 683,110,4 270,72,2 657,942,3 436,613,5 428,231,4 100,978,2 353,517,3 410,207,4 547,312,5 673,24,4 547,590,3 710,1465,4 636,404,1 566,474,4 715,415,3 542,37,3 693,120,5 127,741,3 416,1085,4 215,78,4 219,331,3 351,91,3 477,221,2 120,134,5 560,355,1 633,49,4 660,309,2 86,1178,3 609,270,1 312,330,3 505,577,3 144,330,3 48,76,1 513,282,4 298,1035,2 307,505,4 536,197,2 342,1038,5 628,63,5 720,402,4 344,292,4 713,251,3 708,27,5 605,190,5 429,257,4 715,469,4 615,259,3 158,249,3 112,288,2 631,403,5 183,491,4 688,150,3 390,660,5 587,416,5 657,514,5 279,574,2 710,698,5 278,409,5 664,132,3 268,1427,5 586,904,3 591,482,5 306,167,5 622,297,2 478,601,4 186,22,4 664,193,3 473,49,5 331,293,5 628,146,5 646,133,4 304,0,5 428,467,3 649,779,2 716,292,5 715,502,3 578,708,5 7,176,4 663,480,5 559,428,3 342,126,5 467,21,5 268,508,4 482,273,4 584,170,3 647,577,4 706,185,3 618,256,3 647,264,4 654,59,3 72,267,3 570,173,4 393,410,4 88,180,4 528,309,4 569,242,1 489,474,4 683,236,5 683,93,3 513,341,1 143,650,4 711,398,5 534,446,5 711,541,4 482,479,3 710,136,5 566,835,3 242,279,1 726,584,2 352,332,4 737,187,3 550,89,1 647,116,2 617,90,4 694,881,4 518,338,3 467,24,5 600,819,1 487,658,3 536,82,4 55,143,5 693,274,4 40,275,2 456,189,5 658,660,5 407,269,5 313,64,4 373,281,5 492,187,5 649,738,2 169,257,3 388,1196,3 642,48,3 718,125,2 698,111,3 40,1038,3 688,470,4 471,100,5 20,982,2 698,234,3 10,24,3 472,318,3 77,236,5 642,1073,2 586,339,5 664,126,4 452,93,4 617,43,4 605,238,4 233,55,3 576,216,5 382,513,5 737,650,4 168,682,3 566,520,3 310,580,3 540,1073,1 675,343,5 465,21,5 670,577,3 338,143,3 647,614,4 618,54,1 80,268,3 587,314,4 726,207,4 558,68,5 449,806,4 100,277,2 462,743,3 720,316,4 47,288,1 385,272,3 620,70,3 678,356,5 486,0,5 503,69,3 560,9,3 639,248,4 473,684,3 378,185,5 689,203,3 598,974,5 706,495,3 670,575,5 234,970,4 654,227,3 168,133,5 478,471,1 455,1056,3 617,116,5 497,155,5 541,79,3 269,199,5 467,149,5 654,609,4 455,267,5 641,1284,4 585,249,3 13,761,3 75,5,5 332,173,5 647,567,5 545,249,4 378,62,2 659,79,1 718,65,3 304,299,3 341,97,3 353,142,4 342,66,3 415,1057,5 234,152,4 221,677,3 513,526,4 478,197,5 658,1118,4 338,204,5 542,473,5 593,180,3 466,272,4 663,63,4 467,190,4 503,960,4 409,872,4 421,395,4 89,659,4 685,587,4 479,704,4 22,95,4 76,0,5 712,1430,3 591,220,5 631,422,4 654,1628,3 488,357,5 665,863,3 47,55,3 735,180,2 485,1170,3 719,257,4 261,277,3 550,330,5 503,87,3 568,1013,3 206,225,2 180,884,1 733,81,4 275,381,4 175,288,3 143,356,4 587,845,4 269,664,4 727,281,4 653,283,4 546,331,3 338,1403,5 639,683,4 310,654,4 146,749,5 687,677,5 83,743,4 86,844,4 424,685,3 504,176,3 513,155,4 228,936,2 194,357,2 542,312,3 523,117,4 707,279,4 404,769,1 451,72,3 606,484,3 81,472,2 636,545,1 649,1125,4 670,181,4 373,411,4 434,447,3 369,135,4 41,37,3 436,401,2 536,5,2 549,327,5 498,886,5 523,381,3 415,210,5 200,64,4 373,234,3 706,1396,1 711,727,4 177,434,4 234,0,4 654,628,3 535,471,3 532,1046,3 720,680,3 700,303,4 658,142,5 469,149,5 424,208,2 657,317,4 681,747,3 652,415,1 183,654,3 525,327,2 307,587,5 660,708,4 280,689,5 542,301,4 560,370,1 605,627,4 449,50,4 326,662,4 98,650,5 392,948,3 108,160,3 345,1231,1 140,99,4 542,530,4 704,49,4 558,152,3 704,171,3 704,626,3 275,1480,2 531,345,5 654,716,1 638,154,3 222,534,3 617,691,4 617,275,3 238,631,5 694,337,2 649,221,4 405,610,3 183,590,3 125,680,5 441,216,3 446,468,4 391,344,4 372,201,3 193,28,2 587,482,4 344,302,4 465,181,4 209,683,3 634,885,4 78,257,5 456,587,5 50,135,4 232,317,5 312,27,3 189,695,3 573,309,4 641,728,3 42,417,4 589,286,4 654,149,3 726,691,4 729,297,4 235,96,5 687,331,5 547,1088,2 504,201,3 485,879,5 171,424,1 742,320,2 487,180,4 73,339,5 290,1208,1 278,925,4 398,78,3 532,355,4 77,879,5 375,327,3 697,176,1 99,257,4 538,302,5 124,7,4 621,596,5 243,25,5 40,237,5 692,182,2 109,325,4 576,227,3 221,570,2 29,251,3 99,899,4 624,646,4 685,169,5 455,120,2 681,355,3 93,285,4 706,169,5 200,159,5 471,1046,4 681,572,4 424,194,4 716,267,5 453,525,4 172,333,4 206,292,2 649,197,4 273,596,3 715,162,4 451,862,5 392,202,4 692,161,3 652,76,3 382,133,5 552,135,4 560,174,4 649,233,4 404,850,1 576,139,4 652,63,4 614,210,5 633,324,1 629,594,5 588,271,5 587,385,2 641,1218,4 605,156,4 300,67,4 540,1052,3 12,761,5 317,1047,4 150,0,5 715,156,3 377,754,3 444,236,2 576,7,4 565,53,3 270,746,3 527,209,5 285,197,4 520,1013,3 577,322,3 610,343,5 415,793,5 63,30,4 109,691,4 704,232,3 513,434,3 553,221,4 641,217,3 150,641,3 659,558,2 702,293,2 292,186,3 603,199,1 603,163,4 638,312,1 536,86,3 697,484,4 408,607,4 592,3,4 592,139,4 720,304,3 696,1058,2 82,827,3 674,304,4 527,184,4 585,760,3 621,88,5 532,196,5 654,777,2 275,28,3 41,467,4 536,31,3 526,168,4 670,404,3 540,1077,4 665,30,3 507,153,5 408,22,4 710,24,4 715,627,3 720,631,4 671,863,3 233,43,3 641,0,5 676,149,3 535,97,4 6,667,4 477,409,3 654,738,4 213,1016,4 453,1104,3 652,448,3 307,150,4 490,128,4 696,122,5 665,190,4 741,257,5 404,1440,1 6,634,3 453,741,3 289,190,3 180,931,1 706,1627,5 270,955,4 327,228,3 415,954,4 497,52,4 539,1013,4 333,256,4 144,677,2 736,427,4 707,0,5 452,1169,3 654,41,3 678,168,3 292,120,3 333,709,3 388,590,3 707,937,3 128,747,2 477,567,5 414,431,4 536,403,3 681,290,1 312,695,3 600,21,4 578,81,3 233,171,3 532,236,2 455,788,3 550,23,5 58,735,5 654,1461,3 438,292,3 682,320,5 650,267,2 530,747,4 275,218,4 686,335,2 279,70,4 253,415,4 296,210,4 89,489,5 526,465,2 118,1243,3 501,349,3 513,430,4 528,875,3 641,55,4 421,293,3 342,6,5 137,136,5 540,940,4 643,288,1 629,294,4 456,409,4 141,258,3 568,267,3 496,203,3 563,830,3 458,257,3 236,473,5 710,487,4 394,457,3 466,326,4 404,437,1 483,745,4 275,121,3 425,167,3 711,180,5 278,709,4 499,478,5 654,1531,2 212,68,3 715,662,5 302,110,3 714,424,4 341,2,2 346,741,5 587,872,3 665,208,4 566,191,4 30,261,5 660,221,3 670,297,4 658,392,3 408,606,5 68,1143,5 715,470,2 425,630,3 353,854,4 706,485,3 353,245,4 289,116,3 402,471,4 157,324,4 704,384,4 520,167,4 604,281,4 534,630,5 535,81,4 312,201,5 337,285,4 494,10,5 732,9,3 244,299,4 733,203,4 183,631,5 499,774,1 283,302,5 302,394,2 744,491,5 41,925,3 296,534,3 434,181,4 158,947,2 434,205,5 0,191,4 267,207,4 267,222,3 636,534,2 542,581,3 264,747,5 735,295,4 551,1047,3 234,922,4 654,1130,5 57,1083,4 83,410,2 663,126,5 733,98,4 25,147,3 653,467,4 290,1304,3 520,6,3 0,177,5 477,842,5 712,1433,3 129,1087,2 401,11,4 359,1148,4 353,108,3 486,346,2 652,516,1 89,58,5 61,1128,5 603,47,5 710,1151,1 720,689,3 378,513,3 203,287,3 393,27,4 717,925,2 404,658,4 493,64,5 675,947,1 362,209,4 637,233,4 649,97,4 707,325,4 654,521,3 689,238,2 707,870,1 455,233,3 585,159,4 404,1146,2 168,49,5 42,68,4 536,941,3 658,635,3 267,859,1 585,180,4 404,744,1 622,282,4 61,948,4 144,245,4 744,189,5 462,247,3 644,90,3 379,442,4 614,525,4 532,746,5 663,267,3 503,489,4 199,201,5 737,366,3 693,47,4 617,762,2 129,1274,5 206,1434,2 388,1203,4 319,121,3 249,11,5 654,805,3 522,633,5 441,143,4 547,339,1 600,672,1 538,123,4 269,1072,5 421,150,4 120,716,5 122,63,3 386,409,3 684,333,1 637,549,5 682,899,1 292,788,2 591,149,5 20,655,5 327,426,3 415,171,5 505,1135,3 91,192,4 704,150,3 503,52,4 373,322,3 565,162,5 459,241,4 619,974,3 223,1380,3 654,879,2 449,605,5 644,517,5 564,651,5 290,721,4 649,182,4 490,123,5 532,1172,4 654,1489,2 320,356,4 658,941,3 187,677,3 586,689,3 0,4,3 644,167,4 605,1189,3 536,881,4 243,0,4 685,133,5 715,96,4 659,163,2 428,960,3 342,287,2 596,1533,1 310,629,5 711,789,4 342,55,5 636,1232,5 624,254,2 105,27,4 485,247,4 617,194,3 715,483,4 709,115,4 312,62,4 739,318,3 58,457,4 734,0,4 82,55,1 313,1073,3 528,269,4 350,988,4 392,21,4 252,1403,3 683,721,2 6,574,3 652,213,3 94,57,3 384,1109,2 231,1148,5 692,10,4 270,461,4 234,178,5 411,181,4 587,432,5 453,76,4 537,384,3 714,72,4 550,1418,1 542,55,5 560,361,2 704,7,3 697,750,3 436,706,3 291,47,5 6,671,1 501,257,2 644,626,2 279,763,4 559,488,3 561,217,4 428,1070,2 589,124,3 64,1043,3 462,1243,1 489,292,2 618,567,5 640,191,4 647,176,5 532,422,5 519,285,5 523,1112,3 471,403,3 297,207,5 506,332,4 221,179,3 269,1073,5 706,1170,3 662,271,5 715,380,4 434,1267,5 293,826,1 42,698,4 605,70,5 457,133,5 660,237,4 12,86,5 623,146,4 609,274,4 254,299,3 454,275,4 392,976,4 696,1244,1 619,739,5 502,69,4 452,8,3 696,753,3 66,234,3 12,630,3 664,747,4 492,683,4 471,1057,4 166,734,4 278,32,4 535,187,3 665,501,3 477,615,4 688,596,4 514,899,4 91,8,4 633,755,3 711,784,5 711,81,5 657,128,3 304,683,3 550,10,5 341,326,4 82,258,2 458,1114,3 663,91,4 131,520,4 585,467,3 589,126,4 233,1284,3 456,472,4 114,979,4 221,293,3 633,761,3 42,549,3 499,708,4 59,377,4 540,1441,1 237,293,3 362,207,4 612,125,5 733,418,4 91,574,2 694,300,3 588,242,3 81,1077,3 623,1027,3 93,927,3 129,1,4 682,305,3 503,1083,4 709,299,3 643,292,4 496,10,3 479,660,4 647,406,4 150,289,1 489,180,4 183,222,4 662,242,3 526,181,5 495,95,4 468,581,5 309,49,5 143,97,4 746,227,4 698,494,3 159,275,5 6,433,4 438,1327,4 196,331,2 58,320,4 472,8,5 654,752,3 4,386,3 188,174,5 392,1050,3 620,762,4 102,180,4 543,322,2 654,1534,3 58,773,2 722,27,3 496,372,4 607,1182,1 397,167,3 606,497,4 657,44,5 605,24,5 84,706,4 738,358,5 605,924,4 473,85,4 709,199,4 242,476,4 591,897,2 535,61,4 174,194,3 654,1599,3 187,927,3 631,214,4 652,505,2 604,1225,4 525,543,1 710,706,5 693,698,4 177,228,4 154,330,3 315,530,5 9,59,3 505,422,5 428,69,4 739,747,3 229,587,5 502,752,1 200,472,3 144,110,3 502,503,4 654,1559,2 668,81,4 698,257,5 373,1000,1 178,299,4 502,701,2 748,1,4 653,21,5 331,229,5 143,306,1 9,707,4 376,193,5 616,144,1 0,86,5 747,153,3 708,815,2 486,1015,5 458,116,5 566,468,4 674,1652,5 631,180,5 320,181,3 602,1482,5 536,1044,3 675,168,5 696,276,5 660,199,3 520,7,3 275,71,4 377,30,4 629,894,4 545,976,5 647,237,3 511,55,5 547,343,1 618,251,3 300,545,4 744,214,3 404,44,1 717,299,5 489,1066,2 456,234,3 723,310,1 617,10,4 645,346,2 706,994,4 22,161,3 377,673,3 690,55,4 233,1451,4 320,282,3 415,780,4 120,410,1 434,624,2 654,699,3 485,1081,2 373,1,4 492,182,5 416,218,3 715,968,4 576,190,4 693,21,5 535,442,3 221,55,5 615,347,3 734,257,4 384,651,5 62,595,2 487,513,2 576,193,4 300,411,4 404,350,1 415,1468,3 733,97,4 746,24,3 681,696,4 714,216,2 649,545,1 487,723,3 717,110,4 620,108,4 748,55,2 372,945,5 446,628,3 747,527,3 388,55,5 607,135,3 647,69,2 679,273,3 620,52,4 325,418,3 659,227,3 461,258,3 716,321,5 586,690,4 213,172,4 279,527,3 94,215,5 12,667,1 498,49,3 310,498,4 533,285,3 265,318,2 600,153,5 398,30,3 703,460,3 68,233,5 23,91,5 653,12,1 91,94,3 643,290,4 449,1434,4 665,1010,4 706,798,4 715,186,3 478,1607,2 200,435,3 338,130,5 664,116,4 327,596,3 434,155,4 645,1236,3 748,124,5 293,1011,4 726,745,4 654,1643,1 485,297,3 681,736,3 452,495,4 294,221,4 584,1622,4 349,434,5 599,21,5 681,326,3 647,996,1 652,233,3 112,287,3 278,1489,4 439,69,4 434,630,2 478,23,3 523,404,2 404,793,5 498,193,4 652,450,2 745,95,4 653,962,4 214,179,3 536,41,3 654,432,2 377,212,5 412,327,3 531,450,4 245,95,3 653,267,1 489,136,3 505,66,3 745,116,4 717,590,4 737,195,4 84,64,3 343,475,3 88,730,3 150,442,5 665,635,4 693,215,4 285,300,5 652,789,2 392,1027,3 356,759,3 268,207,2 572,9,4 730,196,5 416,231,3 708,632,3 514,1398,4 698,223,3 656,921,4 585,201,4 507,228,2 715,1038,5 545,233,4 641,1479,1 232,292,4 707,750,4 695,522,5 610,750,4 503,631,3 454,269,4 428,927,2 576,70,5 513,384,3 232,370,5 27,331,2 691,1039,2 689,63,5 278,1487,4 324,1522,4 605,134,5 331,95,5 81,174,4 494,943,5 375,300,3 275,234,4 503,89,3 487,167,4 84,245,4 576,146,4 324,204,4 700,99,5 439,903,5 715,190,5 404,316,4 733,165,3 552,190,4 628,983,3 681,411,1 548,99,4 543,270,3 560,1511,5 732,252,3 718,49,2 163,221,4 692,567,4 640,302,3 450,1264,4 170,303,3 438,120,2 428,653,4 206,545,3 385,404,4 0,237,4 734,740,2 652,519,3 662,927,3 605,325,4 748,662,4 449,760,4 405,10,4 605,193,4 346,75,5 489,245,2 716,327,4 736,185,5 416,79,4 626,3,2 649,741,3 513,581,4 551,933,3 642,500,4 710,195,5 314,323,3 338,484,5 404,562,1 105,646,3 449,66,3 704,120,5 550,215,5 662,22,4 594,293,2 243,1135,3 663,211,4 585,28,5 422,298,3 560,979,3 233,941,3 392,929,3 388,71,3 377,253,1 233,955,3 513,392,3 656,285,4 547,283,3 133,300,2 615,332,2 641,584,5 748,365,4 544,741,4 698,299,3 696,595,4 298,151,4 545,293,1 607,24,4 654,497,3 279,1050,4 556,324,3 654,69,2 720,258,3 197,130,3 711,811,4 715,672,4 1,273,3 749,329,2 456,471,4 663,327,3 662,267,3 654,902,3 319,144,4 664,844,4 551,24,3 742,287,2 563,299,4 638,512,4 652,695,1 655,343,4 524,1013,3 48,180,1 285,741,5 482,317,3 342,80,5 408,429,4 562,152,4 31,110,3 631,401,3 681,316,4 424,1594,2 435,37,3 693,503,3 690,169,5 693,196,5 521,11,5 720,190,3 698,478,3 65,824,3 183,169,5 632,109,3 641,232,4 531,1038,4 688,124,3 685,326,5 326,6,3 93,209,4 706,285,5 393,185,5 738,68,5 623,272,4 720,81,4 12,691,4 513,402,3 715,85,5 681,204,3 696,241,5 55,577,3 668,516,3 626,225,1 206,81,3 693,1262,3 681,74,4 522,0,5 193,870,2 450,327,5 383,301,5 537,293,3 250,299,4 298,247,5 746,107,4 479,1006,4 631,143,4 737,317,5 200,606,4 690,7,2 682,298,3 360,741,1 652,87,3 576,548,5 654,63,4 720,748,3 0,155,4 742,272,3 619,224,3 293,742,2 663,478,5 534,961,4 12,575,3 681,553,3 726,627,3 647,172,5 649,178,2 653,287,3 628,14,5 502,738,1 675,116,4 531,795,5 406,495,5 654,905,2 587,1052,3 478,264,4 633,931,3 616,1018,4 455,473,5 654,739,3 292,567,4 737,150,4 392,77,2 221,1065,1 185,37,5 623,1047,4 453,21,4 37,716,1 488,886,2 746,207,5 495,55,5 84,86,4 737,204,5 714,69,3 12,509,5 536,391,2 580,223,4 502,284,4 378,305,3 532,662,5 737,179,5 748,48,4 564,638,5 698,324,5 473,506,4 333,90,4 444,1011,1 487,356,4 726,258,4 584,212,5 660,602,3 550,848,5 560,558,1 585,1041,4 501,680,1 478,95,4 596,294,3 641,88,2 542,156,3 547,1015,4 604,259,4 471,889,4 449,478,4 563,596,4 255,386,4 273,219,4 6,201,3 740,94,2 587,229,1 307,791,3 392,943,4 578,210,3 267,1187,3 478,379,3 700,254,3 486,789,3 708,124,4 747,747,4 454,6,4 521,191,5 553,422,4 544,198,4 116,6,3 692,402,2 529,63,5 703,151,2 573,268,5 249,1072,5 120,471,3 631,734,4 471,390,2 61,143,3 268,817,3 672,749,5 388,614,4 578,325,3 214,167,5 512,117,4 478,110,4 440,120,4 589,12,4 549,1619,4 415,96,5 654,238,2 726,830,3 250,59,4 691,755,2 344,460,3 666,167,3 560,431,5 483,691,5 720,134,3 421,325,3 5,501,4 181,595,5 404,440,1 136,865,3 404,60,1 718,280,3 452,48,3 607,320,2 457,254,2 602,270,2 654,91,3 573,1061,5 59,224,3 499,71,4 405,1202,2 193,1409,2 505,95,4 647,142,4 542,97,4 369,268,5 746,22,5 134,451,2 7,95,3 197,819,1 659,215,2 737,182,5 302,40,5 362,853,1 536,425,1 451,658,4 109,396,3 144,596,4 103,329,1 679,844,4 639,55,5 488,873,2 449,704,4 583,448,2 12,571,2 715,495,5 392,64,2 592,660,2 659,315,4 330,177,3 451,417,4 456,947,1 535,152,4 516,537,4 392,85,2 449,820,2 72,659,4 713,180,5 748,0,4 698,275,3 457,643,4 4,446,3 587,68,2 206,215,5 279,102,3 596,762,4 324,131,3 658,156,4 101,545,3 536,468,3 654,85,4 663,777,3 534,608,4 428,206,4 473,835,3 536,444,3 36,384,4 726,32,3 744,6,4 658,171,3 737,198,4 653,115,4 665,186,5 290,755,3 558,181,4 689,648,4 268,7,2 380,777,4 715,95,2 641,564,4 642,409,4 641,578,4 641,443,1 285,723,3 576,727,3 184,320,5 302,828,2 341,1069,3 6,522,4 536,1334,3 659,66,1 662,24,4 57,180,3 507,422,5 533,120,4 542,271,3 616,287,1 725,844,3 647,66,4 737,95,5 217,41,4 600,200,5 84,173,4 428,65,2 140,256,3 668,116,1 42,185,3 304,381,5 652,54,3 723,936,3 412,935,4 479,478,4 2,343,4 716,149,4 464,180,3 727,116,4 532,565,4 653,27,5 591,1013,4 721,117,4 338,718,3 536,704,3 605,312,5 656,627,3 748,141,4 344,1073,3 243,157,3 647,405,3 269,318,5 656,474,4 691,99,4 502,132,5 682,301,5 439,987,1 652,135,1 623,322,2 627,844,5 238,513,1 514,747,2 732,146,1 579,288,5 571,288,3 397,126,4 523,203,3 233,221,3 675,1526,1 527,504,4 689,162,3 278,238,4 186,7,5 473,518,4 623,1094,2 404,1547,1 536,1133,3 649,280,2 649,553,2 449,621,5 698,988,4 617,123,1 302,173,5 732,675,4 624,482,5 200,1191,3 384,1036,1 592,321,2 431,293,4 592,567,4 344,257,4 540,731,3 681,1090,3 536,140,3 4,227,5 129,289,3 267,293,3 623,677,3 377,1046,2 553,237,3 654,353,2 717,239,1 750,567,3 103,123,2 331,322,5 386,677,3 716,293,3 536,92,3 112,1250,5 629,1022,4 746,222,5 453,693,2 246,749,4 706,123,4 73,538,3 711,782,3 617,624,4 311,136,3 167,312,5 308,257,5 748,398,3 451,179,4 233,1220,4 505,460,2 729,331,3 683,238,4 639,317,5 387,236,5 706,44,4 665,590,2 404,438,1 726,23,3 25,814,2 300,502,3 222,116,5 706,704,4 93,120,2 6,140,5 706,1108,5 77,870,3 748,251,3 653,215,4 715,339,3 504,434,3 537,99,4 211,734,4 391,302,4 477,6,1 715,22,4 542,1440,3 237,537,4 527,167,4 488,315,5 696,124,3 710,1118,4 527,202,4 746,431,5 713,254,2 415,41,3 571,276,1 451,517,5 560,750,3 466,245,5 624,479,4 598,507,3 128,241,4 726,72,4 704,63,5 701,227,5 616,562,1 749,285,4 16,918,4 416,803,3 397,603,5 668,901,2 428,558,3 58,899,4 553,819,2 710,738,3 177,179,3 215,97,5 372,183,4 711,497,3 585,540,3 638,413,3 642,549,3 531,226,4 647,180,5 522,69,5 652,571,2 592,618,3 39,299,3 327,134,3 377,508,4 297,522,4 505,502,4 295,627,5 403,682,4 23,287,3 755,62,3 737,177,4 560,970,3 425,490,4 631,704,5 626,238,3 709,21,3 173,380,5 333,505,3 496,324,2 398,202,4 32,270,4 665,522,4 302,733,1 653,78,5 664,392,3 292,779,3 748,30,5 428,583,4 268,67,3 614,177,5 605,728,4 484,327,2 261,954,2 648,1243,3 535,212,5 726,432,5 544,550,4 675,143,4 372,495,5 670,719,3 388,823,3 372,149,4 644,182,4 243,870,3 253,615,1 585,10,3 654,43,2 699,95,4 536,23,1 756,398,3 681,722,1 270,473,3 702,6,4 704,0,5 628,422,5 710,462,5 746,47,5 449,89,4 620,400,1 614,174,5 343,741,3 524,123,3 600,152,4 468,510,5 51,99,4 454,708,3 259,257,3 706,734,4 406,208,5 266,197,5 478,509,4 647,678,3 755,366,4 499,117,3 668,126,5 548,126,5 593,49,3 512,221,5 479,257,3 402,290,4 177,78,4 750,300,5 387,99,3 634,275,3 486,422,4 671,1027,4 292,1420,2 372,98,5 416,450,4 140,290,5 626,190,4 273,199,4 681,861,1 531,134,3 530,244,4 267,1117,3 559,239,3 436,427,5 626,728,1 617,190,4 659,809,3 41,238,5 565,574,1 233,8,3 435,64,4 61,244,2 659,754,2 498,689,4 398,288,4 567,131,2 398,1216,4 469,282,5 550,116,5 482,581,3 590,171,3 497,88,5 566,658,4 285,450,5 584,1004,4 657,150,5 610,285,5 180,832,1 710,657,4 544,229,5 398,226,2 465,267,2 481,268,4 691,320,3 369,30,3 576,64,5 578,208,4 336,227,5 391,271,5 638,470,2 698,824,3 487,731,4 453,198,3 621,117,1 663,69,3 10,689,4 717,688,4 620,127,4 231,47,5 129,454,4 503,446,4 535,473,5 732,293,2 638,1194,2 6,669,5 654,646,3 312,614,4 602,11,5 756,195,4 206,1271,3 733,606,5 475,1270,2 455,356,4 240,267,4 255,627,5 346,659,2 654,233,3 59,754,4 454,381,3 560,213,3 82,992,2 94,196,4 587,131,5 668,526,3 486,377,5 698,306,3 379,356,4 605,175,5 451,211,2 526,627,3 565,160,4 681,672,3 471,1073,5 716,99,4 681,1221,3 63,152,3 635,0,3 497,446,3 494,185,5 665,603,3 91,757,1 649,179,3 325,662,1 416,545,3 345,233,2 617,196,3 498,520,4 740,782,3 555,11,5 710,475,4 587,82,5 575,318,3 502,739,5 566,123,4 689,193,4 630,300,4 516,257,5 93,435,5 23,97,5 696,988,2 216,61,2 542,160,4 480,85,5 550,709,5 176,59,4 681,8,3 441,230,3 756,70,4 639,918,5 59,285,5 341,486,5 452,163,3 755,602,5 380,93,3 98,119,2 431,248,5 617,81,4 654,536,3 279,727,3 628,291,4 3,10,4 668,299,4 592,173,4 415,199,5 197,63,4 670,1072,3 384,1498,5 245,840,1 523,964,4 585,451,3 61,698,4 525,689,3 56,472,3 638,583,2 499,210,3 297,87,5 306,163,4 623,123,4 400,507,3 706,162,2 676,322,4 692,1144,2 715,104,2 750,177,5 547,465,5 586,265,1 494,230,3 653,203,4 434,861,1 352,326,2 503,101,3 494,767,3 333,247,4 199,454,3 646,290,3 588,322,2 140,716,4 654,528,4 450,267,2 61,126,4 467,237,3 326,301,3 550,234,1 663,461,4 550,187,5 434,747,4 726,131,2 697,490,2 398,163,2 584,735,4 737,915,3 63,97,4 678,415,3 746,1049,3 504,49,3 706,370,3 726,418,2 652,1187,1 179,738,3 626,948,2 536,624,3 509,680,1 325,175,2 555,512,4 157,283,5 681,1654,2 483,110,4 566,8,4 373,933,3 723,876,1 285,933,3 736,88,4 633,12,4 513,889,1 671,930,1 405,479,4 618,306,2 275,0,5 649,1246,1 313,24,3 726,176,4 6,604,4 456,175,5 692,630,3 293,534,4 489,49,5 458,259,2 715,404,4 167,404,4 317,195,3 714,203,4 404,673,1 617,418,4 659,119,1 143,631,4 681,446,2 621,249,4 283,288,3 755,526,3 717,272,3 536,663,3 89,339,4 409,689,4 500,740,5 631,97,4 275,1045,3 196,1221,3 306,482,5 590,392,4 513,651,4 15,179,5 500,341,4 550,54,5 458,1038,3 609,476,2 716,330,3 626,553,2 683,203,4 526,13,2 471,894,4 647,504,4 459,514,5 628,201,4 270,1119,2 199,404,3 151,120,5 22,540,4 654,293,3 709,263,2 473,658,5 681,760,4 325,126,1 535,150,3 444,817,1 456,184,5 368,357,3 726,1216,3 218,630,5 652,549,3 621,232,4 530,285,5 594,823,3 556,179,5 290,10,4 711,745,4 313,730,4 706,150,4 623,259,2 495,1059,1 473,25,4 746,485,5 587,587,4 180,863,2 642,518,4 362,160,4 21,999,3 647,402,4 707,1046,2 279,202,4 21,565,3 619,403,4 652,741,3 746,1374,4 744,424,4 343,117,3 647,1375,2 655,302,4 202,741,3 194,450,5 93,432,4 550,281,5 536,309,3 638,274,4 605,427,3 410,55,4 654,1220,3 498,250,5 698,318,3 57,772,4 507,229,2 506,256,5 377,662,3 300,270,4 392,500,3 638,299,3 226,239,1 671,475,5 710,339,5 667,28,3 98,281,3 552,660,5 740,398,2 457,473,4 545,144,4 629,929,3 503,1276,4 677,6,4 315,88,1 698,976,2 531,43,5 560,588,3 664,248,5 373,974,4 451,614,3 596,234,4 663,233,3 633,863,3 310,943,4 127,403,3 532,204,5 649,162,3 626,596,3 697,602,4 726,1230,3 446,10,4 180,1318,1 689,1206,3 663,46,4 604,175,4 520,654,4 145,1292,5 567,212,4 702,116,4 499,411,1 667,332,3 485,149,3 619,117,4 561,229,1 186,461,5 6,451,5 617,426,5 621,216,4 457,202,5 384,1007,4 649,153,3 416,167,4 601,180,5 432,245,4 599,268,4 681,82,3 746,782,1 536,80,3 710,691,3 639,750,4 562,180,4 610,298,1 659,1182,1 665,683,4 373,636,4 4,249,3 58,401,4 634,0,4 62,472,2 536,336,3 659,1073,1 415,215,5 714,173,4 617,175,4 331,55,5 304,712,4 10,503,3 520,134,4 96,132,1 304,197,4 744,519,3 315,43,4 657,55,5 713,120,4 649,81,3 344,497,4 739,288,4 560,656,4 735,531,4 495,185,4 492,404,2 664,194,3 392,392,3 525,474,5 101,72,3 531,924,4 750,247,5 587,637,4 473,43,3 32,338,3 400,314,4 586,270,4 751,285,1 748,187,3 658,268,4 652,203,4 386,21,5 644,227,3 290,245,5 650,285,4 614,630,4 524,927,3 631,233,3 540,587,4 248,402,4 138,287,4 560,182,5 714,149,4 709,750,3 270,814,3 9,169,4 697,658,3 689,793,3 647,124,2 255,181,4 757,346,3 373,1217,2 58,178,5 6,605,3 456,179,5 631,509,5 665,1131,3 12,162,3 404,90,2 653,245,1 572,173,4 606,474,4 21,143,5 390,489,4 48,84,3 291,490,4 658,176,5 485,1119,3 748,1336,3 657,526,5 362,247,5 654,292,4 670,240,5 665,529,3 654,1139,3 598,318,2 609,116,4 435,110,4 654,166,4 603,447,5 567,493,4 748,734,5 377,779,2 654,1601,3 449,708,3 6,385,4 439,197,4 591,478,4 506,221,5 141,293,3 553,65,3 144,825,2 398,454,4 333,536,4 406,256,4 503,39,4 398,331,3 633,1141,3 715,132,5 343,85,4 654,345,4 416,4,4 131,250,4 574,303,2 206,124,4 193,1407,1 467,120,4 536,669,2 626,404,3 57,366,5 435,789,3 740,68,4 415,842,3 87,879,3 220,383,3 536,1112,3 748,153,5 388,382,2 715,143,2 556,95,5 311,426,5 372,21,5 746,115,4 456,256,3 349,88,4 726,568,2 83,272,4 429,41,3 398,384,3 714,1010,4 343,12,3 654,653,3 675,1653,1 594,180,5 523,557,4 404,1296,1 748,405,4 222,1087,4 59,512,5 278,1026,4 674,1100,4 77,92,4 118,825,4 526,282,4 632,120,3 404,172,5 42,366,4 81,545,3 589,285,5 741,108,1 732,1008,2 653,172,5 271,237,5 747,21,4 313,1011,4 456,657,4 129,320,5 681,203,3 593,318,3 681,1231,2 736,221,3 473,647,4 726,431,2 229,497,5 392,368,3 514,686,3 362,324,1 331,37,2 358,322,3 649,62,2 180,1116,2 553,99,3 748,150,5 585,1272,4 715,442,4 91,48,3 706,524,3 266,55,5 477,0,4 675,961,4 392,134,1 717,1047,2 740,434,4 158,24,5 310,1216,3 560,955,4 406,392,2 392,4,3 594,879,3 373,239,1 677,923,2 327,229,3 147,233,3 757,134,5 310,392,4 629,321,3 731,299,4 626,75,3 587,958,5 343,493,4 306,177,3 475,647,4 290,782,2 629,95,4 730,377,1 402,221,5 665,426,4 641,574,3 645,299,3 422,328,3 531,266,3 723,265,1 378,160,2 594,819,2 534,1135,4 513,25,3 626,10,4 103,1027,2 159,1141,5 636,288,2 450,1021,4 607,486,4 736,46,3 751,271,4 62,283,3 101,218,2 558,392,2 591,192,5 435,181,5 434,1060,3 150,32,5 522,305,5 623,1015,3 750,304,2 5,307,3 737,174,4 426,303,4 197,479,4 6,237,5 669,14,4 697,306,4 485,128,4 548,117,4 720,301,3 693,653,4 503,244,4 68,196,5 700,314,5 554,120,3 534,1044,4 633,105,3 682,186,5 607,659,5 616,647,3 503,464,3 620,390,3 434,568,3 285,385,3 57,404,2 457,116,4 448,247,4 660,47,4 48,395,4 275,1481,4 642,731,3 193,736,4 471,225,5 730,167,1 166,512,4 659,200,3 415,65,5 352,269,2 587,474,2 657,843,3 706,268,4 4,144,1 160,485,1 591,843,4 711,430,3 523,46,2 622,180,5 523,208,4 641,7,5 536,494,2 536,509,3 544,574,3 109,183,1 404,419,5 452,1272,2 158,1257,1 607,60,5 654,474,3 91,201,3 536,1244,3 504,124,3 504,384,4 631,16,3 233,711,2 639,84,5 295,312,5 566,652,5 499,203,3 560,442,4 654,1287,3 465,291,4 293,1253,3 516,104,1 124,143,5 434,171,5 642,194,5 681,941,2 618,1230,2 88,1118,3 550,746,3 693,97,5 27,446,3 12,163,3 757,297,4 553,264,4 713,596,3 537,27,3 58,196,5 628,300,3 714,57,4 428,492,4 486,87,4 624,133,4 139,244,3 12,634,1 607,299,1 94,236,2 467,371,2 666,312,3 510,681,4 718,222,5 756,160,3 598,887,5 458,107,1 351,85,4 93,193,4 658,191,4 82,355,4 641,81,5 531,267,4 457,57,5 430,301,3 396,171,5 210,63,3 36,404,4 302,92,5 681,2,3 380,0,5 206,8,4 531,897,4 737,264,4 542,760,2 398,306,3 103,925,1 89,529,3 643,1619,4 447,300,1 462,925,1 487,49,4 587,657,5 494,160,4 519,301,3 719,303,4 436,142,5 711,583,4 681,801,2 697,274,4 233,98,5 537,81,4 756,99,3 691,411,4 726,948,3 532,42,4 502,661,3 406,257,4 127,954,5 695,688,1 565,229,2 63,140,4 467,461,4 757,516,3 243,1118,5 704,299,5 404,1498,1 415,499,5 261,89,4 586,315,4 710,282,4 659,482,4 345,402,3 617,21,4 707,327,3 263,92,5 653,123,4 300,144,3 663,635,3 384,36,4 478,687,1 180,1101,1 605,215,5 206,460,3 670,326,1 505,209,5 556,49,4 654,1060,2 278,844,1 591,824,1 757,126,5 513,168,5 461,271,5 302,245,5 706,164,3 535,209,5 721,299,3 662,286,5 585,585,2 231,422,4 749,300,4 748,1088,3 561,117,3 695,311,4 297,195,4 549,687,3 702,457,3 616,443,4 715,587,4 715,180,4 757,257,4 665,256,3 278,1169,1 243,1094,2 550,43,4 127,1052,3 346,23,3 610,267,5 642,254,4 353,519,3 746,302,5 755,98,3 689,522,4 91,0,4 296,31,4 618,878,4 50,143,5 236,186,3 659,541,2 200,201,3 654,318,3 748,72,4 617,683,3 302,652,4 602,293,4 71,49,2 410,227,3 619,422,5 520,384,3 283,902,4 494,430,5 607,173,3 613,293,4 223,53,3 693,516,4 692,299,2 205,1175,1 664,356,4 322,1011,4 704,931,5 255,81,5 606,221,3 647,70,3 559,269,4 578,380,3 292,178,4 641,626,3 592,422,4 618,126,4 654,771,3 586,269,4 157,28,3 89,698,4 497,630,3 654,18,2 523,431,1 328,332,4 307,614,3 72,432,4 458,878,4 200,96,2 12,345,4 756,6,4 233,549,2 626,630,3 740,93,3 216,1033,3 341,420,3 662,320,5 639,341,5 591,751,4 714,155,4 295,1006,4 640,483,5 478,88,4 711,48,3 477,974,4 591,222,5 483,180,5 57,209,4 740,1028,1 675,314,4 397,12,3 708,264,4 652,422,2 502,25,2 397,398,4 641,416,3 605,748,4 757,271,4 732,1128,4 292,46,3 81,303,3 479,510,4 591,899,4 319,832,1 449,499,4 565,201,4 590,299,3 698,233,3 740,1089,1 641,1139,4 404,954,1 761,301,5 609,312,4 488,287,4 649,607,4 568,280,3 270,201,4 373,466,4 55,88,4 477,133,2 99,885,3 454,240,4 156,119,1 649,500,3 328,285,4 492,238,5 497,136,3 371,55,4 542,251,3 730,509,1 628,86,5 641,973,3 486,81,5 707,846,3 449,395,2 647,446,5 591,852,5 454,216,4 120,743,3 478,750,4 757,596,2 290,203,4 750,315,4 478,650,5 465,1175,5 354,327,4 743,126,5 91,54,3 64,86,5 311,176,3 625,263,1 344,236,4 592,14,4 404,1194,1 313,789,4 737,21,3 698,270,3 472,13,4 711,795,4 177,167,4 621,384,5 384,726,1 94,195,4 462,1016,2 302,131,5 393,72,3 12,198,5 471,1001,4 532,299,4 373,467,4 416,464,4 720,257,3 279,75,2 523,471,3 753,675,3 710,1220,4 399,331,2 663,707,4 473,206,4 449,201,4 248,1068,5 536,49,4 748,553,3 397,131,5 102,486,4 12,66,1 221,144,2 626,38,4 181,177,5 730,191,5 375,153,4 707,537,2 415,98,4 649,672,3 585,227,3 703,179,4 120,1193,4 304,286,3 757,234,5 64,654,4 397,402,4 533,2,4 434,80,3 267,654,4 436,86,3 653,171,4 734,8,4 503,237,3 746,649,4 338,6,4 735,254,1 682,307,3 513,172,5 475,691,3 535,1139,1 477,1220,2 252,658,5 752,672,1 115,538,2 23,70,5 538,235,3 517,411,1 295,49,5 285,16,4 704,274,5 505,431,4 756,568,3 523,836,2 675,171,5 617,496,2 681,409,3 552,1123,4 166,237,4 467,57,4 247,88,5 259,332,4 392,72,4 654,309,3 707,933,4 21,1,2 302,281,3 692,522,4 560,57,3 408,497,4 536,235,3 523,1123,3 547,99,5 587,403,3 762,27,3 188,989,3 441,293,2 598,947,4 310,486,4 673,287,3 497,674,4 616,474,1 617,461,2 408,274,4 312,635,4 278,796,4 621,214,3 710,344,4 528,681,4 494,384,3 494,504,5 539,249,4 535,418,3 424,187,3 50,171,5 345,215,3 665,237,4 361,1024,2 565,510,4 342,581,3 11,97,5 665,741,3 463,320,4 341,293,3 658,647,3 392,619,4 656,345,4 174,10,5 289,418,4 488,677,4 757,586,4 689,78,4 78,901,3 733,590,4 93,214,4 665,338,4 552,7,3 654,325,2 597,307,4 2,325,2 647,429,5 454,291,3 523,123,5 763,85,3 536,293,1 673,116,5 275,1073,3 551,747,4 389,1295,2 637,635,3 766,1067,4 392,41,4 544,81,4 392,1336,3 382,1004,3 680,258,2 737,126,4 523,93,2 275,581,3 428,434,4 499,447,3 99,293,4 550,626,3 638,426,4 147,134,5 654,611,3 675,244,4 413,324,3 566,606,4 453,161,3 547,281,4 665,180,2 736,174,5 762,587,4 61,12,4 535,1114,5 91,244,4 499,963,4 468,489,5 364,325,2 536,627,2 704,173,5 720,49,5 46,288,4 560,945,3 586,352,2 647,1002,4 522,581,4 619,7,3 654,319,5 536,974,3 0,105,4 711,567,5 647,409,2 740,53,3 535,189,5 497,292,4 653,173,5 503,730,3 152,171,1 7,186,4 742,14,3 59,1124,4 537,11,4 312,567,4 617,1065,3 245,184,5 756,650,4 531,823,4 633,741,4 373,845,2 542,179,4 716,979,4 331,832,5 681,808,2 319,194,5 540,584,2 526,90,2 607,171,1 220,761,4 483,221,5 667,96,2 621,30,3 261,171,2 375,287,3 567,771,1 560,607,3 714,425,5 456,13,4 517,475,4 292,509,3 753,936,4 591,7,5 12,447,1 523,611,3 127,65,3 526,527,3 726,1656,3 456,457,3 731,872,5 502,318,3 766,485,4 180,627,3 479,1387,4 15,301,5 619,90,2 233,835,4 627,360,5 644,208,5 654,1435,2 499,508,4 457,285,4 496,69,4 550,194,5 482,289,3 726,1046,2 394,88,5 396,222,4 727,870,2 768,268,5 647,224,1 86,708,3 20,436,1 633,120,5 565,214,3 187,590,5 424,199,4 416,639,5 552,237,5 683,401,3 229,503,3 576,565,4 719,994,4 627,172,3 447,326,2 471,671,4 698,1128,3 319,945,5 449,529,3 415,806,4 236,497,4 664,1224,2 527,55,3 310,493,4 485,1196,4 93,1109,4 274,168,3 546,339,4 605,90,5 22,654,3 243,608,3 550,571,1 690,97,4 420,128,5 270,510,5 649,754,3 454,97,4 576,27,5 314,519,4 638,659,2 74,830,3 621,63,5 436,513,4 513,6,5 743,602,5 322,761,4 232,520,5 535,402,3 716,300,4 620,297,4 560,156,4 749,330,4 16,125,4 353,486,3 208,15,4 229,621,3 581,476,4 90,734,4 56,257,5 703,428,4 670,985,2 428,287,3 323,291,3 583,108,4 523,894,4 560,94,2 144,1045,4 58,1110,5 100,819,3 647,661,3 292,370,2 406,7,5 330,1198,1 494,264,5 172,994,5 536,384,2 430,689,3 12,81,2 586,309,3 81,132,4 453,168,4 388,426,5 708,199,4 654,978,3 275,1139,2 609,152,5 211,85,4 714,251,1 415,1052,4 343,268,4 473,461,4 743,236,4 547,747,3 153,514,4 87,897,4 617,484,3 726,152,4 486,771,3 415,848,3 604,275,4 550,314,5 757,809,3 605,199,5 187,1040,3 0,166,2 495,484,3 471,539,3 449,426,5 343,94,4 118,1015,5 661,92,5 714,731,3 149,275,5 93,434,4 639,789,4 462,318,1 461,322,2 730,182,1 755,549,2 718,659,5 649,509,3 416,215,3 701,312,5 658,22,5 500,844,3 317,731,5 300,1012,3 698,150,3 544,95,5 542,478,4 708,186,5 748,22,3 681,126,5 700,332,3 489,301,4 25,839,2 641,1062,3 338,548,4 660,513,3 451,87,2 456,238,5 617,124,3 639,13,4 596,282,5 663,648,4 265,244,1 679,120,3 15,157,4 619,14,5 226,126,4 692,427,3 279,1098,5 520,183,4 348,125,2 703,49,5 604,830,1 628,171,5 503,195,4 463,301,5 17,516,2 553,124,3 754,318,3 278,1027,4 407,1295,4 497,434,3 652,312,4 266,410,3 626,57,5 757,430,3 404,366,1 223,91,1 384,519,3 251,148,5 752,78,4 730,356,5 770,761,2 643,327,4 82,299,3 449,480,5 710,1013,4 385,824,4 317,539,4 434,249,4 118,251,3 642,176,4 293,1088,2 652,196,3 456,449,4 513,429,4 48,1035,2 658,195,4 658,152,4 649,514,4 311,505,4 199,94,5 737,209,5 658,793,3 476,368,4 526,461,3 275,889,3 5,152,4 668,173,3 453,14,2 100,1008,2 449,935,5 144,891,2 617,1184,2 415,321,3 372,704,4 767,172,5 714,11,4 297,283,4 380,275,3 618,280,4 174,49,5 757,891,2 398,71,4 681,807,4 229,404,4 565,287,3 327,955,4 502,451,1 665,287,3 715,81,5 694,318,5 195,410,4 88,708,3 481,747,4 668,426,4 716,289,3 118,136,5 221,392,4 415,791,4 600,839,2 0,114,5 772,168,5 626,509,4 235,426,5 107,930,2 605,182,5 723,689,1 553,150,4 576,117,3 626,581,3 188,660,4 12,880,2 623,180,4 63,227,4 128,905,5 748,878,4 654,903,5 757,6,5 733,197,1 550,94,5 761,115,1 41,522,5 285,740,4 435,1057,4 513,47,4 620,583,5 524,761,4 653,168,5 183,395,3 523,483,4 748,143,5 507,131,5 486,1439,4 386,26,1 345,2,3 307,133,5 713,476,2 233,769,4 577,1263,3 623,590,3 654,123,3 762,87,4 424,1,2 494,522,5 756,225,3 665,176,3 715,490,4 89,1004,2 620,122,4 339,583,3 685,98,5 505,293,4 584,583,3 522,476,3 476,730,4 287,631,4 466,0,4 544,378,4 665,505,5 560,789,1 607,426,4 134,326,4 290,244,2 454,95,4 496,715,4 452,253,2 618,187,4 408,732,4 328,198,4 157,282,5 664,314,4 486,565,4 496,558,4 338,567,3 373,10,4 707,863,3 412,302,5 269,250,5 655,345,3 58,621,4 647,928,4 746,172,3 663,76,3 233,356,4 550,226,5 434,218,5 576,738,3 733,173,4 737,190,4 465,326,3 764,284,5 451,65,4 233,606,4 133,301,2 659,495,3 642,422,4 654,49,4 659,824,2 518,263,2 6,67,4 55,384,4 542,741,3 439,299,3 425,177,4 12,222,5 666,526,4 359,660,5 449,72,3 748,402,4 550,1046,4 540,526,3 546,344,5 404,563,1 715,0,5 232,503,5 317,13,4 210,204,5 711,109,5 726,154,3 489,116,1 557,19,5 647,519,4 560,514,3 362,945,4 274,433,3 175,507,3 663,483,5 301,293,1 248,30,4 327,342,3 681,14,4 771,311,4 494,100,5 491,136,4 377,219,2 123,194,4 285,339,4 714,238,4 581,454,1 709,264,4 69,288,3 112,244,3 748,10,5 83,285,5 415,618,4 765,450,2 668,309,4 488,303,3 639,209,5 553,217,4 755,116,4 475,654,4 755,177,5 101,540,2 703,177,5 530,750,4 486,30,5 199,322,3 757,176,5 706,902,3 664,184,4 705,627,4 719,905,4 339,273,4 587,644,5 575,14,4 434,6,4 605,297,4 524,288,3 730,486,4 197,26,2 599,171,4 62,286,3 470,931,5 620,384,5 747,47,4 505,207,4 585,466,4 585,789,3 572,684,3 392,50,4 243,709,3 377,392,3 29,320,4 668,117,2 97,434,5 292,685,3 278,976,4 267,651,4 757,1141,5 451,135,4 698,1142,3 428,641,4 599,514,5 724,263,1 463,15,4 763,222,3 319,175,4 289,30,4 714,249,2 560,606,5 294,793,4 384,528,4 531,569,4 654,546,4 449,1027,4 312,230,2 434,560,2 697,655,1 658,497,3 605,68,4 196,553,4 729,49,4 378,394,2 671,755,2 25,249,3 164,168,5 300,70,4 307,16,4 48,238,2 254,334,4 351,175,5 404,401,3 523,582,4 746,179,5 535,706,5 618,272,4 61,1134,2 585,152,2 711,775,4 609,704,3 368,345,4 653,108,3 58,95,5 71,971,4 635,257,5 698,412,3 637,120,4 144,61,2 400,256,2 186,133,3 607,309,1 732,1141,4 178,1126,1 499,58,4 621,948,3 153,257,3 145,271,5 434,567,2 706,11,3 513,171,4 716,116,4 175,318,3 743,507,5 660,195,3 720,357,1 5,88,4 675,481,4 398,469,4 486,1,3 698,284,4 715,292,4 215,21,5 384,11,3 766,477,4 663,151,3 278,777,4 732,287,2 136,410,5 226,12,5 757,247,4 664,537,4 773,1090,1 459,1141,4 461,236,5 274,541,3 749,269,4 388,648,4 456,147,4 698,990,3 585,675,3 63,126,5 506,312,5 540,101,4 163,306,5 770,689,4 535,739,4 723,908,1 560,745,3 639,301,5 30,270,4 748,621,3 393,796,3 396,750,3 670,924,3 706,714,3 451,196,5 540,499,4 773,1418,1 682,357,2 520,392,3 372,258,5 750,249,3 641,1035,4 710,762,1 712,1126,3 759,64,2 762,199,4 721,121,3 750,471,2 571,120,2 642,162,4 692,195,2 487,413,2 324,1202,5 462,309,3 539,257,4 428,465,2 659,264,2 481,49,4 144,30,5 59,1059,4 453,133,3 503,218,3 760,277,4 455,22,4 405,133,5 555,480,5 449,111,2 37,399,1 710,777,4 720,327,5 756,770,2 654,1465,3 745,127,3 404,565,1 278,670,2 654,693,3 764,236,3 716,234,4 762,0,4 637,185,5 410,209,5 517,8,3 686,748,4 275,217,4 532,822,4 697,254,3 704,553,2 497,78,3 58,368,2 465,404,3 692,422,3 639,1243,3 513,1159,4 717,283,4 681,40,3 487,206,3 547,236,4 659,138,2 573,899,4 20,49,3 750,417,5 757,268,4 746,530,4 298,54,2 692,672,4 711,731,5 693,640,4 663,602,5 760,288,2 536,58,3 585,75,5 452,21,5 17,701,3 754,293,3 718,391,4 320,530,4 78,675,3 534,59,5 610,312,3 681,1439,2 590,450,3 683,180,4 632,116,3 344,190,5 86,495,5 706,711,3 39,241,4 647,229,5 776,285,2 536,1069,3 654,287,3 756,677,2 494,187,4 746,278,4 737,94,4 507,69,4 566,510,2 218,663,5 25,248,2 211,126,2 526,854,2 9,601,5 434,982,2 428,49,5 714,398,2 304,190,3 258,180,4 275,27,4 560,643,3 600,86,4 401,18,4 654,1638,4 629,271,5 397,588,3 115,302,3 665,495,4 404,1228,1 310,38,4 679,1088,2 270,503,3 439,735,5 559,11,5 572,282,4 473,287,3 711,419,3 653,430,4 712,1175,3 268,58,4 652,577,1 621,432,4 541,863,3 744,518,5 659,385,2 326,130,4 388,498,4 659,55,1 312,156,3 390,481,4 665,429,4 502,13,3 331,221,4 550,734,5 12,159,4 531,945,5 756,1,3 444,207,2 681,303,1 451,21,5 553,293,3 758,1015,5 513,9,4 729,747,4 472,302,4 654,830,2 738,78,4 214,691,3 665,10,4 517,9,3 746,660,5 221,50,3 143,1009,3 695,882,4 641,623,3 84,922,4 604,120,1 646,426,4 550,67,2 689,87,4 542,1554,3 455,45,3 763,244,4 649,231,3 434,639,4 775,421,2 404,217,5 388,93,2 166,672,4 604,117,3 270,80,3 434,423,1 665,167,4 619,415,4 523,510,5 542,1098,4 730,392,5 605,476,4 464,0,4 535,179,4 302,1229,1 703,485,4 617,745,2 665,923,2 479,63,3 373,155,2 770,202,1 668,346,3 346,927,3 544,540,4 657,167,3 536,637,3 380,524,5 624,587,4 103,824,1 550,181,5 723,298,1 640,433,4 5,6,2 436,411,3 641,402,4 550,185,5 502,285,3 601,147,4 641,484,5 144,858,3 608,293,2 621,66,1 776,167,5 708,163,3 646,741,4 553,20,1 628,190,3 398,1169,3 404,1064,1 544,150,4 727,24,4 468,519,4 759,182,2 428,189,5 660,505,3 167,251,1 652,176,3 79,236,4 574,97,4 553,131,4 674,1254,1 110,304,2 91,431,3 575,322,3 748,120,3 532,108,2 554,747,4 449,502,4 454,1159,4 21,160,4 599,398,4 93,552,3 732,846,3 757,287,4 626,195,5 101,12,3 755,403,3 779,69,2 586,936,4 716,290,4 408,265,1 772,71,3 531,401,5 415,400,2 675,681,1 756,259,3 765,486,3 711,3,4 443,747,1 693,447,3 69,506,4 537,78,4 454,147,3 571,12,4 715,506,5 715,1046,3 605,247,5 744,176,3 559,846,4 331,312,5 698,108,3 428,6,2 132,749,4 221,1439,3 0,10,2 726,342,3 645,312,5 360,726,3 491,133,3 298,59,5 436,704,4 469,470,5 703,1295,4 144,727,2 773,194,3 89,56,5 509,312,5 249,190,5 570,963,4 649,98,4 698,276,3 534,86,5 718,741,4 708,91,4 657,1078,2 603,55,2 174,185,4 390,602,5 715,46,3 275,942,4 362,282,2 750,486,5 520,762,4 436,218,3 541,774,2 339,65,5 449,715,4 585,426,3 503,505,4 760,146,4 285,371,4 708,228,2 553,110,4 692,571,2 726,540,4 663,51,5 659,210,4 333,168,4 467,376,2 725,818,3 647,322,5 428,602,4 651,300,1 591,260,1 536,9,4 727,321,4 720,1391,3 6,509,5 553,215,3 579,251,5 0,244,2 6,593,3 676,306,5 471,1034,4 737,225,3 108,731,3 378,371,4 405,726,3 183,1007,4 329,558,3 541,14,2 591,242,1 536,738,1 232,215,5 356,410,3 614,318,4 668,113,5 628,332,4 765,1443,2 654,215,4 652,201,3 644,639,4 663,769,4 346,14,2 605,426,4 663,424,3 743,962,5 67,1088,1 604,287,5 93,735,5 6,639,3 335,545,3 620,121,2 560,130,4 388,419,3 129,931,3 760,293,3 41,226,4 71,232,4 157,160,2 127,273,4 206,366,3 294,384,4 150,486,5 313,400,3 436,434,3 659,662,2 451,201,3 449,81,3 232,186,4 292,526,4 726,889,1 278,169,3 29,312,5 115,288,4 200,199,5 80,283,3 279,741,4 641,119,3 665,330,4 504,153,1 639,78,5 496,163,4 500,368,4 536,122,2 743,427,4 644,187,4 423,680,3 560,90,4 12,438,1 109,807,2 772,26,1 6,386,3 179,371,5 406,234,4 70,176,2 293,322,3 526,656,4 628,80,3 140,14,5 713,870,3 29,256,4 649,443,2 268,401,2 764,14,2 191,514,4 473,944,4 94,132,3 773,738,2 231,164,4 398,339,2 59,707,4 151,277,4 278,287,3 760,923,4 584,339,2 386,473,5 248,155,5 654,473,3 707,357,2 710,1517,3 613,457,4 585,50,4 377,761,3 716,221,4 612,0,4 586,285,4 778,283,3 762,959,4 654,23,3 683,69,4 408,1557,5 494,154,3 664,474,3 473,477,4 376,271,5 318,345,3 578,65,4 751,747,4 15,422,5 459,846,3 173,285,5 765,52,4 711,621,4 219,268,5 386,579,5 602,448,4 591,300,1 726,825,2 693,81,5 405,95,5 124,208,4 359,186,4 648,14,4 406,71,4 664,281,4 676,125,1 757,223,4 471,121,3 326,128,4 275,201,4 751,299,3 675,49,5 587,0,4 300,1051,1 586,877,2 536,970,4 575,136,3 587,392,4 736,179,4 653,70,3 404,815,1 773,919,2 763,117,3 561,230,1 36,171,4 765,228,3 660,116,4 759,180,3 114,228,3 5,191,4 398,131,3 452,958,4 427,267,4 755,397,3 775,768,3 455,418,4 302,812,4 714,192,5 222,308,4 706,237,4 763,139,3 171,123,4 559,495,3 405,288,3 710,229,3 302,1134,2 733,661,3 681,555,2 476,89,4 302,669,2 186,208,4 654,1649,4 496,95,4 449,125,5 218,222,5 579,99,3 664,832,3 675,0,5 327,152,2 384,346,3 469,234,3 746,131,4 200,514,5 523,481,5 58,509,4 618,301,4 532,595,2 223,156,4 642,0,5 453,677,2 325,514,5 573,318,5 326,30,2 473,524,4 371,4,4 6,501,5 721,6,4 523,477,3 94,404,3 710,1284,3 535,404,2 710,587,4 617,728,3 744,284,1 449,372,3 664,190,3 638,777,5 726,808,4 773,510,3 614,522,5 233,1169,1 730,215,5 777,93,2 109,332,4 762,366,3 471,238,5 626,317,5 756,27,3 405,367,2 91,983,2 670,590,3 563,288,4 342,25,3 344,565,3 623,122,3 58,704,4 48,181,3 462,19,5 706,491,2 392,280,4 298,819,3 757,107,5 678,63,4 718,8,4 415,251,4 662,315,4 434,124,3 654,894,3 57,168,4 600,927,1 416,289,4 233,477,3 271,203,4 266,497,5 558,54,4 711,176,2 91,742,2 572,512,4 384,317,2 67,595,2 760,1196,3 72,186,5 98,289,4 381,133,3 623,1119,4 561,0,2 408,944,3 715,356,5 654,288,3 505,434,5 406,655,4 255,122,2 615,872,3 605,418,4 682,1279,3 115,901,2 663,208,4 742,223,5 598,1276,4 721,236,4 109,392,3 781,1215,2 223,320,2 654,520,3 627,339,5 663,179,4 642,22,5 664,30,3 519,897,5 467,245,5 735,49,3 757,618,4 153,237,5 585,317,3 116,155,4 313,716,3 763,741,3 516,760,5 404,1227,1 483,97,4 587,730,2 78,256,3 757,57,4 642,65,3 462,120,3 703,288,3 101,855,2 486,54,5 737,62,3 220,390,3 311,656,5 566,174,5 624,201,3 491,204,4 285,3,5 296,545,3 785,275,1 536,706,4 415,411,2 404,781,1 194,59,3 746,43,2 476,236,4 376,167,5 710,740,4 704,281,5 617,67,3 681,65,3 197,185,5 746,834,3 703,153,3 416,189,5 753,283,3 522,300,4 327,777,3 307,484,3 429,673,4 494,731,4 714,81,4 689,745,2 763,819,3 499,280,3 715,660,3 654,160,2 587,577,5 637,503,2 179,746,4 645,891,2 398,672,3 746,182,5 662,257,3 294,99,5 243,143,1 388,834,5 96,82,1 639,549,4 212,457,4 632,664,3 681,86,5 472,123,4 679,8,4 465,173,5 394,471,3 505,61,3 600,122,1 492,549,4 505,68,5 631,55,3 757,1084,5 17,956,3 620,175,3 607,698,5 773,575,1 274,1218,2 456,215,5 290,1066,4 629,125,4 757,55,5 392,337,2 91,185,4 404,138,3 772,63,4 714,788,4 290,635,4 292,467,2 394,214,5 150,469,3 516,334,3 12,872,1 285,883,5 61,675,3 547,184,5 370,442,4 311,193,4 58,186,5 75,222,2 692,507,2 91,1073,3 746,291,4 407,327,2 324,209,2 242,24,3 473,115,5 541,733,3 550,355,4 707,763,4 464,142,4 715,24,4 232,49,3 720,156,3 3,326,5 457,272,4 471,42,4 601,894,3 552,512,4 670,176,4 120,275,3 194,133,5 91,788,5 770,87,4 641,98,2 444,1007,1 93,1224,3 175,6,5 310,237,4 545,768,4 649,90,4 777,77,1 266,741,3 416,77,2 458,747,4 12,484,1 326,791,4 505,698,4 708,81,4 757,49,4 703,190,3 753,8,4 434,186,4 626,807,2 249,942,4 733,142,5 255,217,3 681,571,4 722,173,4 714,925,4 449,279,4 578,257,5 641,814,4 498,96,4 670,355,3 398,692,3 4,408,2 591,1022,1 746,442,5 180,115,1 553,731,4 400,150,1 248,6,5 720,356,5 785,404,4 327,356,4 408,1098,4 404,207,5 587,484,5 404,433,3 638,647,3 697,704,4 565,581,5 446,122,3 623,110,3 177,0,4 715,501,3 151,410,4 94,388,4 304,708,5 591,264,4 747,70,3 400,747,3 502,165,5 697,189,5 157,120,4 542,918,2 681,322,2 220,41,5 539,1225,4 326,47,4 346,187,5 772,958,4 200,1135,1 710,508,4 625,747,2 14,327,3 733,209,3 586,303,4 51,301,4 91,770,1 583,164,1 746,653,5 765,356,4 639,1053,1 285,1073,4 0,34,1 266,81,4 172,318,4 595,12,2 662,124,3 452,79,2 698,23,3 471,63,5 611,603,4 748,63,4 332,99,4 553,120,4 663,221,3 10,526,4 649,163,4 653,264,5 617,500,4 686,677,4 681,833,3 465,907,4 724,244,4 492,646,4 404,179,3 533,977,4 703,513,4 591,844,4 24,462,4 716,301,5 726,565,3 558,232,3 486,256,4 680,538,4 294,742,4 654,1280,3 685,1183,1 455,187,4 739,241,4 642,167,5 458,522,4 496,474,4 526,199,3 435,82,5 86,513,4 670,553,4 670,28,3 783,268,5 747,495,4 531,794,2 414,184,4 757,864,4 767,281,4 520,96,3 504,21,5 581,762,2 263,136,3 408,646,5 353,605,5 540,552,4 325,186,1 641,1530,3 183,691,4 221,417,2 333,568,2 326,107,3 290,549,4 404,79,1 408,527,4 676,0,4 587,120,5 503,408,4 718,6,2 434,217,3 663,202,4 708,3,3 626,567,2 346,1046,1 726,561,2 275,789,3 720,698,3 242,124,3 756,194,4 681,327,3 719,305,4 649,71,2 238,241,5 446,143,5 388,204,4 652,81,4 710,693,5 494,203,4 532,865,2 675,507,1 770,82,5 723,299,3 327,193,3 343,750,4 330,174,4 521,509,5 710,431,4 769,476,4 392,643,3 256,120,3 91,782,3 681,180,5 494,175,5 714,297,4 715,416,3 407,293,5 503,1209,3 376,172,5 160,283,3 776,152,1 41,466,3 723,1175,1 560,630,3 652,222,3 312,401,3 415,64,5 494,225,4 479,256,4 344,741,4 183,484,4 709,49,4 532,366,2 175,880,3 444,126,2 703,495,5 550,410,1 625,329,3 779,432,1 706,377,3 496,1239,5 561,72,4 560,47,4 787,567,3 513,173,5 397,587,4 614,186,5 673,49,4 641,391,4 560,216,3 721,411,2 629,297,5 471,185,5 278,780,3 229,434,4 587,274,3 654,220,3 787,722,3 707,270,1 75,128,3 639,346,3 79,204,5 221,385,2 565,202,4 436,522,3 210,704,4 290,261,4 175,149,4 114,278,3 765,482,3 172,302,5 55,203,5 654,1061,3 416,108,2 58,189,5 531,841,4 200,430,1 765,81,3 708,738,3 542,168,4 617,202,3 151,142,5 455,413,3 649,187,3 451,193,4 757,152,5 769,300,4 670,1216,4 208,126,5 613,755,4 737,968,4 433,470,2 607,498,4 748,624,3 471,779,4 469,6,3 626,182,5 654,731,3 775,671,3 317,187,3 649,228,2 711,948,4 497,753,2 461,291,5 706,1256,2 787,22,3 756,156,3 697,1148,3 327,571,3 144,689,4 750,734,4 416,1090,3 456,728,4 757,286,5 451,461,4 755,121,1 497,380,3 701,258,3 614,22,5 693,1204,3 467,94,4 516,1046,2 534,13,3 654,603,4 681,271,5 631,203,4 352,749,4 679,97,4 498,306,4 585,404,5 770,236,5 415,132,2 378,182,4 582,356,5 412,49,5 585,799,3 730,204,1 654,1195,3 331,451,4 307,60,3 531,71,3 531,618,5 386,527,4 261,545,2 96,168,5 647,151,5 621,549,4 772,238,4 765,76,2 467,171,4 566,187,5 772,228,3 233,411,2 48,286,4 148,327,2 342,317,5 434,392,2 457,653,5 402,514,4 206,1330,3 781,249,4 90,21,5 538,285,4 745,183,4 627,287,5 502,422,5 675,12,1 478,603,3 491,653,4 289,48,3 452,409,4 477,39,1 30,810,4 12,204,2 591,588,5 253,171,5 296,41,3 678,72,4 652,104,3 649,402,3 746,86,5 263,22,5 592,25,4 267,199,4 356,125,5 719,344,2 478,263,3 144,328,4 508,300,2 249,417,5 38,306,2 681,1027,3 746,466,4 560,718,1 233,225,2 535,95,4 473,88,5 647,1175,1 696,627,4 681,805,3 725,273,4 738,99,5 499,1384,4 620,124,4 659,514,2 138,1232,5 548,410,3 714,446,3 485,19,3 748,520,4 788,1011,4 333,254,3 624,189,3 742,99,5 670,509,3 404,383,3 716,285,3 200,580,3 726,1410,2 709,196,4 278,229,4 785,176,4 387,322,4 464,153,2 665,95,3 707,689,4 298,193,3 392,110,3 748,221,3 185,476,4 0,136,5 714,82,4 210,198,5 705,332,1 84,477,4 233,604,3 620,538,1 408,1368,4 781,1599,3 544,562,3 715,237,4 405,157,2 740,4,3 706,1106,3 756,247,4 434,540,4 578,513,3 497,448,3 319,571,3 398,238,3 641,394,5 709,98,4 645,309,3 605,116,4 108,678,3 547,881,4 513,30,4 558,384,4 408,302,4 333,163,3 538,238,3 757,302,4 642,232,4 671,1060,4 560,522,4 392,404,4 503,1049,4 177,155,2 306,98,4 121,1118,3 668,275,2 771,897,3 748,943,4 785,632,4 10,525,3 681,519,4 639,150,4 499,49,3 789,379,4 714,158,3 455,179,4 789,1073,3 527,108,4 465,10,3 617,403,5 696,123,5 503,398,4 658,646,3 310,966,3 163,750,4 434,82,4 322,221,3 786,309,5 541,207,4 538,639,2 30,1019,3 127,87,4 580,1374,5 692,683,3 711,1090,3 297,503,3 629,287,4 770,257,5 419,749,4 642,481,4 494,94,3 560,96,3 243,8,5 233,805,2 746,1193,5 206,55,4 5,526,4 746,210,5 531,23,5 127,432,4 689,89,1 22,693,4 221,225,3 351,97,5 781,1022,3 406,285,4 765,89,1 694,312,2 526,670,5 326,731,1 536,261,5 621,183,5 416,808,3 669,173,4 785,229,4 498,300,4 649,451,2 302,4,2 605,506,4 170,268,4 654,1191,4 187,152,5 715,94,4 769,874,4 5,518,5 781,321,4 121,1167,4 748,836,5 750,1010,4 693,490,3 772,90,4 397,204,5 576,664,4 545,446,3 700,749,5 638,370,1 93,1057,4 654,1210,4 782,287,3 5,490,4 765,97,3 653,116,4 732,292,4 454,94,4 539,280,3 405,498,5 453,658,2 313,545,4 392,543,3 658,312,5 550,197,5 449,291,5 523,210,5 454,306,4 390,21,4 772,638,4 679,168,5 144,134,5 267,626,3 91,57,4 296,26,1 659,889,1 536,1266,3 91,281,4 748,158,4 377,467,5 285,1118,3 89,1195,4 654,201,2 592,172,5 507,95,2 313,14,5 747,7,4 419,512,5 473,178,5 704,1,3 681,464,3 647,501,5 740,30,3 144,1008,2 776,356,5 331,117,5 647,48,2 579,454,4 494,432,4 602,61,2 707,872,5 499,7,4 492,200,5 4,385,2 681,1047,3 599,176,5 767,277,2 660,497,5 754,874,1 408,503,2 312,384,4 573,332,3 782,333,3 197,3,3 475,737,3 384,653,5 145,244,5 689,225,3 784,194,4 473,435,3 483,274,3 609,734,3 547,580,4 560,197,3 644,69,4 664,865,3 255,1041,5 620,23,4 714,626,3 540,167,4 693,1202,4 362,1013,1 453,292,4 466,918,2 540,258,1 483,23,1 534,627,4 522,662,5 150,472,4 696,221,4 503,391,5 641,124,4 652,491,4 441,10,4 654,517,2 344,203,4 89,675,2 621,213,4 657,428,4 756,49,4 624,249,4 715,413,4 421,474,4 471,172,5 233,10,2 550,149,3 73,301,4 534,557,5 304,407,5 52,150,4 654,683,3 633,984,4 542,549,2 342,8,5 723,345,1 726,202,5 501,258,3 264,110,2 304,1073,2 536,723,3 327,432,2 750,735,5 777,6,4 398,696,2 541,788,3 641,730,5 746,499,4 702,146,3 706,777,3 781,937,3 660,68,4 746,1659,2 108,316,2 373,67,1 662,95,5 750,479,4 629,619,4 600,106,4 653,80,2 115,271,3 513,293,3 183,427,4 713,236,3 775,233,5 748,144,4 371,435,5 216,95,4 615,346,4 346,685,5 132,315,4 151,738,5 652,479,4 649,522,3 466,474,4 714,91,3 710,64,4 439,309,3 312,518,5 620,824,3 652,522,4 315,285,5 416,1287,1 80,287,3 591,121,4 670,53,3 383,326,4 788,126,5 649,171,4 263,788,4 659,242,2 762,285,4 127,160,5 769,507,5 636,1050,2 290,819,4 93,31,5 327,743,4 730,506,3 406,213,4 66,411,1 608,889,1 507,190,5 750,418,4 599,448,4 68,1015,3 243,108,4 702,256,5 710,403,3 16,6,4 278,727,4 302,240,4 497,553,3 748,549,4 641,721,3 416,573,2 427,1023,4 645,314,4 304,88,3 397,509,4 787,479,3 552,99,5 726,800,2 706,693,4 398,290,3 560,549,1 378,413,5 658,447,4 667,209,5 94,691,4 151,422,5 751,312,3 20,799,1 428,479,4 17,736,3 269,814,4 399,689,3 298,6,3 759,722,2 629,117,4 396,852,4 377,134,2 654,799,2 633,839,2 221,584,3 115,49,3 453,54,2 748,104,1 233,1329,3 591,1016,4 2,318,2 594,8,4 24,182,4 719,314,4 444,751,1 487,492,3 773,526,1 621,293,3 86,471,4 253,125,3 22,525,3 573,346,3 710,1116,4 164,317,5 659,194,4 607,318,4 547,641,4 647,815,1 61,80,4 566,478,5 200,684,3 12,49,5 683,400,3 450,328,4 257,285,5 793,241,5 757,191,4 419,13,5 560,379,2 279,225,3 432,322,1 180,874,3 415,591,3 749,293,4 527,76,3 647,65,5 362,37,3 628,21,5 21,687,1 148,267,4 436,691,4 506,268,2 441,233,4 732,1172,2 240,299,4 504,306,4 592,49,4 783,306,4 503,290,4 449,421,3 573,331,3 694,259,4 275,224,3 57,1062,1 663,285,4 496,602,3 542,117,3 775,484,2 634,254,4 447,344,5 773,88,2 296,110,3 68,301,4 438,300,3 710,214,3 748,237,3 720,392,5 42,120,4 662,761,4 278,1479,3 406,1089,2 328,650,4 591,656,4 302,1508,1 626,796,4 592,356,5 663,503,4 529,486,4 108,366,3 473,1133,3 757,94,3 746,495,5 692,418,2 392,471,3 21,711,4 388,730,3 497,692,3 496,848,2 726,311,3 663,426,4 70,743,4 144,393,1 757,196,3 710,743,4 498,519,3 536,98,2 401,203,5 558,193,3 89,1038,5 665,661,3 462,1066,2 748,85,4 606,510,5 706,1006,4 668,323,3 694,1023,5 341,843,3 541,1060,2 649,215,4 173,385,1 144,730,3 473,474,4 496,422,3 67,762,1 766,206,5 697,193,4 626,561,2 322,293,3 600,426,4 651,394,3 498,483,4 610,268,4 566,1450,3 737,404,2 419,85,5 48,319,5 135,14,4 12,405,1 719,1061,5 532,229,4 400,126,1 612,508,4 564,82,5 767,339,2 696,267,5 455,1058,4 238,41,5 487,163,3 520,95,4 394,251,3 444,590,2 747,173,5 569,304,5 441,745,3 757,80,5 462,92,4 628,198,5 675,299,4 550,707,1 502,731,3 238,481,3 165,312,5 371,874,4 12,472,4 535,565,5 499,201,4 252,0,5 404,63,5 623,269,3 452,180,5 793,886,4 436,81,3 158,828,4 451,134,3 279,470,3 94,1046,3 412,257,4 619,927,5 728,332,4 296,89,4 654,416,2 379,581,4 689,715,1 550,203,4 710,1117,4 757,426,4 55,21,5 531,76,5 531,675,5 61,70,4 304,527,4 607,332,4 568,18,5 746,315,4 438,99,3 537,180,3 779,661,5 455,863,4 146,300,5 12,347,2 378,162,4 755,641,2 737,356,4 757,154,1 500,590,4 156,117,2 681,251,3 647,549,4 675,256,5 675,287,1 541,426,5 614,236,4 587,565,2 180,1046,2 629,99,3 696,0,5 723,244,2 513,63,4 342,158,2 720,654,2 343,561,2 648,290,5 696,817,4 458,567,3 513,424,5 245,808,2 326,185,2 605,194,5 577,354,1 436,841,4 520,731,3 786,898,3 763,142,5 443,244,4 450,304,3 5,316,3 777,1272,3 353,736,4 471,81,5 734,743,3 540,754,5 460,241,3 206,203,3 703,130,5 344,273,3 780,482,5 736,11,4 224,605,5 653,53,3 523,1552,3 532,296,4 652,818,3 233,1445,3 757,171,4 659,21,4 532,133,4 533,14,4 547,1243,4 541,417,4 455,505,4 654,189,3 93,41,4 730,94,3 665,12,4 444,180,2 194,233,5 770,27,5 710,715,5 686,339,4 449,868,4 785,470,4 740,69,4 84,230,2 456,8,5 457,22,4 706,210,3 540,1083,4 773,293,1 590,356,5 698,932,3 787,753,4 471,1033,3 537,233,3 626,809,3 746,647,5 338,123,4 41,293,4 401,12,3 415,1019,5 704,240,4 778,256,4 795,1073,1 461,99,4 773,228,2 643,260,4 689,662,4 310,53,4 594,108,2 58,276,4 542,719,2 596,249,4 542,655,4 9,581,4 766,97,5 451,180,4 726,203,3 436,76,4 406,227,4 469,256,4 726,194,4 451,123,5 772,558,2 631,274,3 255,470,5 209,104,3 647,558,2 760,213,1 483,135,5 720,69,3 601,256,4 415,872,5 413,342,2 290,199,4 405,560,3 12,896,1 754,688,3 647,518,4 59,614,5 692,487,4 755,1118,4 654,344,3 532,175,1 84,844,3 773,402,2 762,237,4 794,171,3 129,807,5 236,655,4 793,117,2 641,844,5 618,173,4 659,214,3 416,446,3 554,149,4 621,678,3 52,24,4 787,184,4 755,233,3 312,46,3 568,285,5 177,1196,4 223,69,2 591,258,2 789,153,4 223,714,1 603,636,4 678,131,4 495,108,3 331,78,5 127,391,3 156,275,4 143,473,4 693,173,5 152,78,5 392,6,4 639,133,5 794,116,4 647,509,5 129,235,5 755,752,2 666,136,3 566,193,3 621,85,4 546,314,4 586,885,2 516,822,2 758,331,4 536,220,3 753,618,4 671,224,2 618,182,5 370,193,3 587,420,5 599,683,4 416,222,5 640,864,5 757,888,3 231,581,5 693,130,5 270,301,5 449,475,4 362,186,2 544,372,3 720,293,3 779,490,4 221,1044,3 693,225,3 792,275,3 720,214,4 708,635,3 621,27,3 647,230,2 517,543,3 770,241,4 505,745,5 637,226,2 433,974,5 591,762,5 404,714,1 757,323,5 708,173,5 531,294,5 560,692,3 707,49,5 781,682,1 576,150,4 680,287,1 726,400,2 735,245,4 416,1538,2 120,196,4 416,99,3 659,176,2 622,78,5 781,248,2 275,589,2 573,269,3 717,1164,3 710,712,3 408,403,2 427,1312,4 307,381,4 663,131,4 452,692,5 740,650,4 636,925,2 726,200,4 773,672,2 200,63,3 613,0,5 659,897,4 745,195,4 757,825,3 772,36,3 292,426,4 100,411,2 75,269,3 485,590,4 520,162,3 345,668,1 696,750,5 505,180,5 532,12,3 773,53,1 780,99,5 434,561,5 689,93,4 641,1508,2 315,434,2 473,791,4 494,385,3 14,110,4 588,681,4 157,470,4 569,747,3 422,923,4 379,120,3 620,471,3 792,1066,4 779,201,4 720,299,5 151,780,5 641,1502,2 275,1207,3 6,3,5 363,285,5 664,596,3 668,748,3 730,461,5 500,545,4 317,3,2 740,76,3 248,408,4 398,4,3 710,948,4 781,983,2 794,366,3 217,175,5 644,473,5 421,274,5 541,450,3 781,328,3 193,88,3 505,93,3 795,400,3 795,183,1 520,98,3 221,421,2 12,831,4 532,70,4 598,254,5 737,194,4 746,843,4 773,171,3 486,355,4 636,324,1 416,293,4 747,227,3 431,299,4 628,683,5 760,545,5 704,37,5 789,156,2 496,162,2 748,292,4 490,99,5 760,147,5 720,714,2 446,208,4 449,442,4 402,234,5 294,167,5 118,409,1 675,131,5 373,1216,2 780,473,5 783,677,4 654,384,3 598,1013,4 606,482,4 664,142,4 545,589,4 493,97,4 692,117,2 746,704,5 544,120,5 734,332,4 641,1013,5 704,27,4 363,301,4 388,185,2 320,85,4 757,63,5 671,24,5 745,230,2 506,677,5 633,1283,3 499,233,3 697,229,3 378,709,4 14,933,4 6,543,3 317,711,4 789,1062,5 536,432,4 594,474,5 535,135,4 715,130,5 654,535,3 538,237,3 424,384,2 748,1046,3 344,322,3 42,624,4 436,708,5 86,943,5 433,476,5 343,168,5 245,68,3 613,870,2 503,627,4 621,89,4 678,7,2 620,809,3 492,322,4 550,761,5 101,430,3 371,200,2 773,643,4 486,281,4 641,366,5 788,474,5 406,426,4 748,198,5 392,793,4 504,173,4 12,36,1 624,172,3 523,237,4 659,116,3 536,289,2 665,854,4 748,253,2 702,274,4 17,220,5 403,688,2 505,1088,1 398,120,3 607,479,3 605,815,2 781,1537,3 708,384,4 542,1523,4 405,217,3 603,183,3 642,467,4 726,548,3 652,126,5 574,126,2 42,538,3 624,356,3 415,317,5 746,473,5 765,520,4 695,304,4 776,99,1 324,181,3 746,151,3 654,80,3 559,276,3 295,255,5 550,1138,4 128,303,3 475,66,4 372,388,3 683,1282,3 562,861,1 473,355,5 652,755,1 5,488,5 741,320,3 697,512,2 494,157,3 550,1442,5 532,513,3 405,477,4 695,343,5 306,227,5 311,22,4 372,49,5 757,606,5 734,275,4 166,98,4 733,750,4 591,521,5 763,70,5 659,185,3 765,201,3 180,1349,1 84,530,4 302,568,3 59,444,5 746,938,3 86,409,4 642,208,5 547,282,3 585,840,3 449,831,2 654,305,3 307,121,4 558,256,3 455,1221,2 715,1202,2 723,679,1 473,22,4 541,185,4 53,345,4 24,603,4 649,494,3 422,244,4 692,1310,1 795,417,4 591,233,5 653,472,2 750,87,4 373,741,5 505,160,4 199,595,4 706,292,4 360,704,5 300,419,3 536,465,4 494,180,5 556,299,4 653,659,5 649,495,4 663,148,3 740,1073,2 424,537,2 507,510,4 539,110,4 556,197,5 607,43,4 450,358,2 654,1068,1 757,683,4 773,1027,2 205,1431,1 561,180,3 362,281,2 534,195,4 406,214,3 553,818,3 469,12,4 22,712,4 428,190,5 191,124,3 641,450,5 341,211,5 424,120,4 757,23,4 390,173,5 605,47,4 416,259,3 741,116,2 713,471,2 378,635,3 41,283,3 654,707,3 703,653,5 170,309,4 329,384,5 720,880,3 180,980,1 534,481,4 702,257,4 19,273,4 177,332,3 685,650,5 797,93,3 416,412,3 682,302,3 215,734,5 478,1443,1 441,218,3 404,674,1 475,25,4 591,924,3 797,0,4 171,462,4 517,279,4 644,27,4 732,278,2 221,741,5 748,577,3 608,1011,1 366,799,4 642,199,3 524,1046,2 795,228,3 397,185,4 659,207,4 404,920,1 534,8,5 526,646,5 615,321,4 357,1265,4 703,661,3 785,403,4 787,228,3 787,50,4 749,305,4 663,174,4 275,927,3 345,96,4 616,1072,3 654,1367,5 600,503,4 278,27,2 455,379,3 195,152,5 776,0,4 613,840,2 560,194,3 327,132,5 405,446,4 748,171,5 607,99,4 142,293,3 636,332,3 229,370,4 632,176,3 681,1409,3 706,729,3 744,173,3 535,712,4 773,562,1 535,497,5 478,454,4 93,3,4 795,678,4 275,184,4 17,110,3 393,385,3 654,86,3 609,94,2 732,18,5 349,131,5 636,0,4 541,121,3 94,96,4 757,688,1 706,161,5 714,474,4 453,422,4 176,215,4 93,691,4 797,997,3 449,225,4 325,193,4 29,160,4 221,401,4 642,520,4 9,700,4 795,86,5 763,320,1 565,1436,2 585,384,3 757,751,3 550,923,5 707,20,1 435,709,4 756,742,2 664,78,3 109,365,3 124,481,1 730,193,3 746,155,3 644,432,4 197,126,5 287,1064,4 180,1364,1 401,236,4 647,253,3 404,41,1 532,227,4 682,315,4 167,299,5 670,1214,3 233,278,3 453,184,2 457,513,5 485,331,3 28,325,2 550,773,5 639,1227,4 644,962,4 642,417,4 670,558,4 683,691,4 624,153,3 362,234,5 47,356,5 461,329,3 715,391,2 781,1389,3 712,306,3 542,1013,4 183,6,3 233,96,2 654,323,3 302,789,4 313,432,3 54,49,4 746,1202,5 681,258,3 148,299,3 723,1616,1 790,288,4 249,174,5 759,215,2 659,144,2 787,234,3 765,647,3 159,236,3 727,677,4 654,248,3 163,471,5 587,723,2 675,317,5 801,134,4 649,433,4 649,626,2 506,318,3 628,257,4 523,549,3 732,6,3 69,100,3 664,318,4 641,541,5 307,284,5 179,938,4 641,142,5 726,789,2 467,41,4 662,356,5 341,275,3 734,689,4 592,630,3 495,267,4 194,581,4 208,812,5 562,780,4 485,116,3 629,275,1 794,1035,2 513,228,3 24,97,5 381,356,4 48,431,5 289,650,3 757,390,3 291,95,4 708,95,5 450,937,4 92,120,3 641,137,4 550,447,4 221,475,3 607,447,5 770,221,2 502,431,5 523,320,3 147,7,4 295,18,5 757,162,5 605,150,5 388,670,5 662,1323,3 754,689,5 797,987,3 745,565,4 559,317,4 787,691,3 646,254,4 663,44,4 732,120,3 398,449,2 342,196,4 663,683,4 434,330,5 297,192,5 713,288,3 456,6,4 503,356,4 307,98,4 786,360,3 757,372,4 274,469,3 694,994,4 695,314,5 794,635,3 222,819,4 270,498,3 784,208,3 797,1118,3 581,14,3 505,675,1 366,183,5 235,132,5 349,653,5 278,947,3 715,185,3 291,630,5 797,163,4 622,226,4 302,1051,2 496,1176,1 693,616,4 678,68,4 730,503,3 621,93,2 441,208,4 449,938,4 81,196,4 799,741,4 766,186,4 663,479,5 560,655,4 498,11,5 803,10,4 410,229,3 681,239,4 21,383,3 505,754,4 750,1660,1 649,1030,3 594,1022,1 75,69,4 151,154,5 423,49,3 457,545,3 537,209,3 479,862,4 704,372,3 747,677,2 293,180,5 327,797,2 12,757,1 746,2,2 687,325,5 643,126,4 730,152,3 71,552,5 124,268,1 431,275,4 674,649,5 492,287,4 1,236,4 639,567,4 434,99,3 726,270,4 704,567,5 69,221,4 353,864,3 428,91,4 500,684,3 654,1098,3 710,39,4 252,191,1 755,91,3 83,465,4 654,749,2 667,310,4 173,124,5 692,649,3 279,57,4 797,273,5 302,874,4 547,2,1 578,332,4 726,764,2 802,285,5 91,46,4 286,951,4 681,686,2 233,624,3 188,60,3 17,268,5 664,180,4 531,268,4 486,257,5 185,257,1 795,1041,4 801,293,4 274,449,3 696,688,4 100,150,3 394,49,5 753,475,4 344,219,3 377,42,3 415,158,1 592,722,4 647,171,5 117,671,4 118,353,5 144,218,5 200,1422,3 179,784,4 636,322,1 720,988,3 795,158,3 5,256,2 654,164,3 653,275,1 787,69,4 592,760,2 532,65,4 669,231,3 215,55,5 535,479,5 475,238,4 335,25,5 556,749,4 404,230,3 637,678,3 466,9,4 307,1514,4 453,259,1 550,738,4 579,686,3 193,487,3 129,184,5 436,29,4 795,539,2 632,404,4 609,418,5 500,23,3 17,207,4 674,346,4 772,187,3 91,973,2 779,198,5 756,3,5 247,155,5 504,30,4 485,1301,3 150,177,5 536,677,1 622,221,4 471,1,5 679,814,3 474,901,5 587,777,3 550,71,5 513,193,4 581,410,1 523,31,4 627,332,5 763,30,4 535,492,4 732,297,2 277,514,5 483,81,4 779,97,1 180,258,1 711,841,3 536,19,3 232,510,5 708,229,2 293,323,4 442,308,5 121,1044,4 779,466,3 785,448,2 82,30,5 641,173,5 380,741,4 665,516,4 739,321,3 6,512,4 718,777,3 787,204,4 642,204,5 732,247,3 386,918,5 757,285,5 359,320,3 429,18,5 404,727,4 737,55,4 300,248,3 665,81,3 591,311,2 567,655,3 518,312,5 585,172,3 726,746,2 621,832,4 628,466,5 55,731,4 614,854,4 765,509,3 758,299,5 547,325,4 295,296,4 765,134,4 795,526,3 753,741,3 326,43,3 449,607,4 795,153,3 142,314,4 373,227,5 105,190,5 607,27,4 774,347,3 494,471,5 801,285,2 654,256,3 377,746,3 688,6,5 252,1038,4 609,27,4 243,1117,4 803,545,3 755,553,1 637,402,3 757,127,4 750,651,4 406,88,4 803,30,4 791,110,3 536,1102,4 424,668,3 228,750,3 734,812,4 716,110,4 693,177,4 773,126,4 503,724,3 792,297,4 641,1310,3 404,28,4 150,613,4 591,677,2 193,281,3 250,470,3 565,30,3 636,337,4 370,0,4 729,108,4 140,695,4 384,282,2 774,285,4 617,69,3 91,178,5 561,160,3 688,249,5 662,1275,3 294,78,4 599,183,3 750,84,3 618,21,5 592,659,5 757,68,5 683,741,4 659,1410,2 464,55,4 397,497,5 499,470,4 453,955,2 748,14,5 53,116,5 762,209,3 545,52,5 641,375,3 33,989,5 495,420,3 486,24,1 649,403,3 644,3,4 613,146,5 751,270,5 343,97,4 525,287,4 534,151,4 289,0,5 302,186,5 713,684,4 523,446,5 635,8,3 730,418,4 603,126,4 534,691,4 84,231,3 641,215,3 734,627,3 158,455,3 512,322,5 559,254,4 649,1148,4 708,1,4 377,581,5 298,153,4 781,877,3 633,314,5 599,95,5 388,196,5 454,392,3 591,190,5 254,826,2 714,86,4 797,150,3 42,580,3 249,122,3 674,873,4 769,0,5 496,551,3 776,215,4 773,27,3 312,142,3 436,697,2 85,303,3 723,303,4 486,285,2 762,10,4 103,846,2 726,538,2 193,650,3 534,1473,4 785,85,4 433,110,5 734,24,4 473,87,4 740,279,3 452,627,3 405,283,1 711,78,4 333,171,3 131,285,3 670,230,3 453,98,3 743,627,2 681,569,2 600,142,3 139,303,4 776,689,4 765,133,5 745,545,3 773,184,2 757,633,5 704,190,1 150,970,5 607,1008,4 456,1209,4 795,269,4 711,659,4 744,7,4 757,1500,3 536,321,1 473,192,4 737,207,4 681,422,5 441,575,2 373,49,3 240,894,2 791,594,3 746,493,5 775,441,2 444,329,2 465,67,3 799,126,4 499,216,4 717,819,2 715,610,5 222,338,4 179,221,5 556,11,5 398,1209,2 491,527,5 592,160,5 767,64,4 653,282,5 708,446,2 662,1047,4 674,257,3 757,655,5 604,99,5 317,1049,4 271,199,5 34,326,3 304,477,3 737,171,4 580,268,3 629,974,4 646,992,4 681,166,2 650,326,4 72,81,2 619,945,4 405,195,2 620,86,5 696,282,5 451,85,4 665,602,4 620,893,1 628,522,3 633,716,4 762,374,2 765,365,3 200,971,3 290,211,4 462,750,4 496,201,4 160,315,5 675,113,5 792,120,3 638,110,2 550,1134,5 526,672,4 794,99,5 642,98,4 715,1049,4 698,220,4 267,404,2 647,16,2 395,124,3 486,76,3 658,648,3 781,1657,2 514,268,2 317,513,2 607,356,5 404,1020,1 773,870,1 353,484,4 326,94,3 660,271,4 637,509,3 751,309,1 662,49,5 803,203,4 532,124,5 531,146,4 180,743,2 61,209,4 748,735,3 714,734,4 157,187,4 496,796,3 640,267,4 415,754,4 187,179,5 607,7,2 654,631,3 719,268,3 617,0,4 616,312,1 192,160,3 704,254,5 501,242,3 58,503,5 623,1088,2 341,495,4 647,168,5 483,293,4 616,668,1 547,244,4 752,897,4 507,356,5 302,664,4 726,53,3 676,844,3 753,818,3 771,878,4 453,960,1 180,1251,1 298,238,3 275,386,3 780,287,2 354,1391,4 428,62,2 599,160,4 428,1208,3 353,269,5 746,326,4 558,507,3 487,189,5 91,1022,2 319,824,4 233,198,5 707,312,5 605,938,4 795,163,3 706,207,5 491,481,3 338,326,4 616,479,4 566,46,4 192,754,4 342,522,5 726,293,4 583,422,4 803,81,5 275,253,2 75,1152,2 684,324,3 751,339,4 113,167,3 415,734,5 687,335,2 670,596,4 522,429,4 649,611,4 405,114,4 499,281,4 540,90,5 782,345,5 428,339,5 654,386,3 298,511,4 572,49,4 396,180,4 5,126,5 778,470,4 327,88,5 797,718,1 620,418,4 456,160,4 6,654,5 97,151,3 275,677,3 646,256,2 520,239,3 486,651,5 720,876,3 330,267,5 617,201,2 551,1050,3 390,457,4 671,123,3 683,0,4 176,301,4 795,192,3 605,646,3 577,342,2 709,1038,4 405,46,4 763,526,4 711,691,5 502,267,5 773,51,3 658,194,4 424,233,3 462,300,5 235,116,3 95,143,4 494,558,4 544,28,3 701,287,1 623,904,4 697,420,2 705,99,1 684,326,2 619,97,4 692,513,4 742,296,5 473,659,5 644,487,4 586,331,4 789,173,4 378,207,4 657,116,4 560,154,2 203,267,3 323,677,3 723,309,5 693,180,5 523,49,4 705,49,5 641,462,3 503,98,3 473,565,5 428,683,4 43,196,4 591,1198,5 798,478,5 94,706,3 591,1072,5 781,345,2 649,95,4 346,976,5 584,9,3 642,31,4 690,49,4 618,81,5 594,357,2 678,418,3 803,1075,3 533,404,3 561,285,4 591,327,1 598,273,5 84,172,3 300,161,3 533,545,4 587,41,5 706,630,4 468,285,5 89,1203,4 429,136,3 523,548,4 326,708,4 718,120,1 587,183,4 587,164,2 795,430,4 715,728,2 504,70,4 448,284,5 545,750,3 612,193,5 744,526,3 130,812,3 114,281,4 804,318,2 772,91,4 746,590,2 505,41,3 527,172,5 775,167,5 294,154,4 542,69,4 175,261,4 720,526,5 451,162,4 543,342,2 573,326,3 591,1225,4 391,243,3 789,37,2 689,280,3 224,704,5 338,237,5 0,126,5 408,299,3 266,80,4 382,192,4 302,384,4 607,304,3 622,193,5 624,402,3 542,683,4 620,1027,4 804,714,4 464,173,3 249,932,3 353,463,4 723,879,3 406,513,4 669,479,5 311,1191,3 762,212,4 170,285,3 193,628,3 766,494,4 707,762,4 750,777,3 254,116,2 357,1005,5 349,22,5 659,773,3 55,595,4 797,137,3 538,68,5 214,76,3 773,104,1 773,519,3 689,11,4 485,318,3 91,7,5 748,194,5 590,516,4 794,501,3 756,692,4 706,528,4 803,317,5 193,539,1 200,182,4 345,218,2 144,1288,1 324,479,4 503,141,3 681,37,3 521,133,5 14,219,4 386,678,5 563,322,3 587,683,4 757,315,5 795,398,4 377,567,4 267,91,4 464,403,2 12,397,2 267,577,2 789,283,4 654,593,3 215,63,5 121,1266,4 9,184,5 507,51,4 496,207,3 745,55,3 118,180,4 742,267,4 746,501,5 746,1027,1 415,1,4 781,245,3 787,638,3 353,170,4 606,29,4 537,152,4 803,587,4 423,434,3 801,322,5 750,855,2 757,386,2 697,221,4 428,173,4 406,134,3 341,191,4 436,110,3 307,151,5 646,71,4 550,76,3 446,297,4 710,131,5 124,69,3 15,660,4 689,375,3 531,314,3 491,191,3 502,11,3 235,254,3 709,309,3 795,1118,4 144,929,2 75,1070,3 752,514,5 749,270,4 142,325,5 446,497,4 342,560,3 507,1152,4 765,227,3 803,398,4 434,742,3 726,423,1 763,76,4 592,285,5 344,714,4 805,323,2 670,430,2 762,257,3 552,519,5 795,490,4 789,411,4 56,470,4 665,299,3 253,117,4 714,52,1 103,331,2 785,428,4 656,108,1 657,602,4 607,460,4 390,172,4 290,669,5 462,844,3 801,451,4 386,61,2 387,314,3 658,75,4 647,497,3 509,456,2 134,469,4 449,630,4 715,210,5 492,889,3 483,698,4 762,460,4 436,173,5 416,54,5 345,656,4 547,330,4 435,22,4 221,155,4 789,1182,2 803,197,5 706,190,5 58,31,4 513,746,4 454,1170,3 494,394,1 362,551,4 485,974,3 57,152,5 362,918,5 654,465,3 623,124,3 280,288,3 607,474,3 795,516,2 795,794,3 424,167,5 300,561,3 451,728,1 496,651,5 604,872,3 653,251,2 674,310,3 541,40,4 523,185,3 681,548,3 553,594,3 156,110,3 688,221,5 484,747,2 41,368,4 763,99,4 434,44,5 55,299,4 158,66,1 206,525,4 500,12,4 533,1326,2 524,321,2 590,1027,3 380,417,3 653,99,1 220,150,1 522,383,3 697,433,4 598,762,5 610,261,4 804,16,4 561,142,5 12,69,3 803,432,4 415,7,5 4,449,1 618,384,5 600,162,4 666,130,5 732,241,4 662,618,4 584,1448,5 711,698,5 659,1019,4 653,738,4 457,146,2 723,907,1 90,656,4 129,178,4 268,162,2 505,417,4 686,263,3 661,812,3 767,1015,2 496,232,2 804,386,3 59,478,5 711,415,3 768,14,3 531,410,3 698,928,3 108,209,5 255,283,4 536,177,4 750,658,5 775,495,3 129,826,4 58,590,4 124,745,3 386,9,4 377,268,4 641,861,4 659,67,4 454,13,3 710,731,4 797,1223,2 592,579,1 803,230,4 320,274,4 520,401,3 743,481,3 605,174,4 550,323,3 657,951,2 405,276,3 723,287,4 6,637,4 503,1117,3 243,117,2 458,15,2 378,209,4 278,455,3 797,195,3 686,318,4 339,178,1 748,186,3 474,69,4 644,429,5 427,1279,3 457,180,2 714,88,3 88,0,5 757,826,3 24,22,4 681,149,4 591,743,3 733,171,4 795,741,3 486,175,5 715,865,3 342,27,5 497,463,4 756,206,2 732,1170,3 264,99,5 531,534,5 393,577,2 754,303,4 746,496,5 253,342,2 58,683,3 660,27,5 536,613,3 654,1170,3 698,267,4 737,68,5 601,8,4 726,186,5 675,351,1 266,578,3 747,143,4 485,9,4 623,149,4 591,343,4 513,196,4 757,1097,5 242,115,4 397,65,4 23,401,4 789,138,2 795,870,1 626,650,4 307,273,3 698,251,4 792,823,3 308,988,3 786,268,3 298,98,3 689,711,4 292,231,2 757,764,2 89,731,5 91,657,3 720,381,4 785,186,4 538,486,3 532,1146,3 215,395,3 787,70,3 120,236,5 805,404,3 726,283,3 221,1034,2 567,485,4 740,227,2 419,300,3 629,1054,3 373,224,3 773,178,5 379,548,3 150,150,5 415,153,4 594,951,5 356,1027,5 762,150,4 534,1038,4 485,244,3 600,155,4 793,474,5 774,257,4 393,67,5 654,47,4 586,894,4 220,173,4 81,78,3 806,398,4 773,237,5 710,400,3 275,172,5 326,522,4 744,202,3 193,515,3 703,654,3 488,747,4 122,431,5 658,181,4 420,126,4 653,384,4 346,155,5 670,32,5 770,992,4 806,418,5 197,706,2 322,178,4 592,277,3 221,362,2 253,664,2 649,848,2 454,251,3 289,526,4 220,93,3 734,146,1 710,49,4 302,385,4 738,968,1 279,383,4 560,344,4 757,411,5 787,503,4 805,628,3 797,115,3 469,359,2 449,166,5 747,195,3 705,0,4 2,324,1 638,509,3 144,1209,1 278,197,3 377,185,3 706,381,3 580,284,5 654,1008,2 560,45,4 560,1014,2 776,55,5 765,587,3 160,209,2 341,498,5 591,97,5 694,345,5 173,49,4 750,1006,4 781,287,4 654,214,2 692,356,5 180,594,2 590,380,4 531,51,4 568,404,3 176,128,3 275,623,2 326,159,4 498,197,5 7,180,4 772,55,2 567,611,3 806,116,4 517,474,4 716,273,4 599,28,2 626,672,2 737,495,4 726,355,3 698,243,3 267,155,3 791,123,4 108,833,3 455,180,3 610,872,3 406,231,3 487,268,3 754,268,5 176,339,4 726,68,4 703,301,4 640,495,2 286,1066,2 647,214,2 767,755,3 290,1276,4 756,88,4 738,285,2 681,976,3 723,871,1 21,289,5 400,301,3 556,297,5 379,97,4 82,861,4 797,479,3 642,789,4 536,522,3 217,185,3 405,181,4 416,451,2 466,268,4 681,99,3 610,886,2 560,167,4 590,791,4 740,784,3 547,924,2 15,171,5 213,495,4 542,133,5 542,15,3 745,231,3 706,722,3 576,76,3 726,1087,2 796,306,2 371,298,4 145,314,5 108,747,3 800,889,2 458,248,2 424,519,3 384,52,1 789,122,3 550,65,2 431,627,5 703,734,4 804,320,3 312,464,3 505,581,3 747,257,5 708,26,3 458,814,4 662,30,4 748,68,5 404,42,1 144,175,5 560,954,3 36,826,3 434,398,3 304,284,5 779,207,3 639,230,5 449,692,3 415,294,5 495,1228,1 781,293,3 44,281,4 473,301,5 325,384,3 711,72,5 795,931,4 209,791,3 654,264,3 434,572,1 652,721,1 471,209,5 747,134,4 371,594,4 697,488,3 578,747,3 775,126,5 641,863,3 795,495,5 366,412,4 498,312,5 197,530,5 577,1097,2 324,528,4 735,247,4 206,301,4 746,951,2 392,384,4 748,476,3 781,309,4 474,49,5 808,677,2 521,22,5 607,548,4 805,704,4 488,891,3 658,738,4 534,653,5 667,322,4 542,101,4 451,1382,1 803,84,4 58,791,4 748,193,5 680,303,3 536,434,3 560,366,3 604,596,3 124,71,4 513,461,4 233,29,4 804,594,3 710,110,2 607,1038,5 395,470,4 696,335,3 300,430,4 398,14,5 404,70,1 742,310,5 657,177,5 795,764,3 755,99,5 710,734,5 568,1283,2 568,755,3 188,3,5 655,268,3 91,1089,3 341,380,5 535,407,5 689,158,3 231,68,3 81,285,4 759,603,4 806,384,4 608,474,2 781,307,4 706,581,5 289,322,3 539,507,4 488,1279,3 706,1529,3 326,337,1 439,1264,5 132,342,2 311,9,5 253,28,2 183,495,5 578,244,2 753,254,3 808,288,1 137,44,5 132,268,4 362,448,3 803,48,2 560,275,4 560,202,4 478,179,4 377,105,2 717,273,3 777,264,4 523,495,2 710,9,5 649,653,3 123,6,4 643,236,4 744,285,1 329,184,4 585,126,4 354,287,5 200,395,3 151,777,3 99,271,4 550,731,4 787,707,2 324,104,3 6,144,1 478,1141,5 103,49,5 364,845,3 631,587,2 747,513,4 522,209,5 290,20,2 524,1314,4 550,72,2 532,870,2 275,880,3 660,172,4 557,1067,2 462,869,2 726,281,4 515,659,5 524,404,4 446,97,4 603,557,4 555,186,5 803,191,4 794,3,4 621,155,5 205,359,1 732,1657,3 483,6,4 676,1239,5 531,317,5 94,176,3 388,673,2 670,4,2 710,285,4 626,95,3 378,172,5 534,720,3 814,195,4 158,987,3 665,196,4 784,21,4 708,938,4 359,209,4 670,3,5 467,99,5 492,299,4 320,610,4 143,53,2 306,171,5 726,1217,4 275,478,5 658,663,4 271,650,4 803,549,4 649,415,3 633,1008,2 710,247,5 536,345,3 592,965,5 649,714,3 526,69,4 495,9,5 6,600,5 762,21,4 50,133,2 652,380,2 353,237,4 654,206,3 616,218,4 762,274,5 711,401,4 508,344,1 292,588,4 86,1184,4 294,88,5 605,98,4 89,7,5 507,97,3 449,430,5 641,89,4 616,200,1 647,201,5 297,264,4 620,1012,2 500,404,4 338,190,5 523,401,2 738,131,4 801,446,2 386,240,1 678,290,4 17,203,3 605,530,5 669,97,2 806,464,4 496,419,3 388,1049,4 219,339,4 591,874,4 803,1221,3 641,312,5 649,570,3 674,311,2 715,526,5 726,71,3 765,422,3 654,1500,3 624,256,4 653,590,5 747,318,3 720,323,3 806,470,4 795,48,3 544,30,4 312,519,5 816,747,4 5,424,3 81,821,2 652,562,1 676,456,1 416,782,3 640,88,4 663,11,5 781,677,3 785,199,5 531,3,5 714,975,1 552,637,3 707,254,5 403,244,3 638,749,2 748,950,4 520,30,3 505,477,4 693,1454,3 772,233,2 386,855,5 389,285,4 803,212,3 453,841,2 552,189,5 250,23,3 88,474,5 751,287,5 806,249,4 748,471,4 755,160,3 623,6,4 661,318,3 748,77,3 327,191,4 498,496,2 275,2,3 87,353,5 449,1520,3 599,180,4 306,1064,3 676,470,4 439,328,5 804,754,3 720,1295,3 804,746,3 266,482,5 83,485,5 805,176,3 435,762,4 101,287,2 568,684,4 536,179,4 398,778,4 59,88,5 632,127,3 626,577,3 108,390,2 180,1327,1 709,602,4 475,87,4 560,1,3 770,153,2 707,747,4 750,49,5 692,659,3 4,234,4 654,151,3 665,269,3 574,293,1 715,836,4 233,866,4 804,175,4 647,413,1 711,738,4 267,561,4 697,204,4 233,129,1 12,585,3 762,1267,5 704,230,3 381,22,5 373,11,4 286,651,4 419,483,5 820,741,4 633,124,4 196,81,5 405,14,4 769,254,4 306,579,4 707,303,4 670,95,5 408,204,3 473,63,5 129,943,4 538,961,4 737,379,3 517,1113,2 757,1110,4 532,93,4 367,99,4 212,596,5 103,270,1 651,244,4 794,234,3 500,110,3 772,383,2 575,247,4 641,754,3 209,450,3 707,147,4 761,236,3 357,854,3 187,75,4 710,730,4 529,814,4 654,301,4 773,207,2 262,132,5 542,575,4 324,481,4 319,182,4 538,132,4 757,996,4 590,209,3 551,249,3 762,741,4 757,301,5 757,479,5 814,943,3 212,581,4 560,1034,3 466,23,4 451,495,5 536,542,5 421,560,3 665,488,4 679,14,3 767,844,2 795,575,3 687,753,5 42,130,3 556,528,5 491,99,4 267,267,5 220,226,3 789,231,4 27,572,4 624,516,3 817,301,5 380,508,5 711,1042,3 586,354,3 654,918,2 804,474,5 773,671,1 626,54,4 542,203,4 787,600,4 455,185,4 513,96,5 794,406,3 672,327,4 74,321,1 458,988,5 647,824,4 6,505,5 792,293,5 746,90,5 537,21,5 813,564,3 587,746,4 254,442,1 740,37,2 804,575,4 746,134,5 760,677,2 805,181,5 772,1020,5 494,416,3 523,106,3 654,274,4 377,1100,3 180,1160,1 703,135,4 649,525,4 434,566,3 289,27,5 5,0,4 49,14,2 528,327,4 494,131,4 638,177,5 797,931,4 327,72,4 692,508,3 4,168,5 516,332,3 594,747,2 183,300,3 617,506,4 605,746,4 109,467,3 715,424,5 235,273,1 811,287,4 15,70,5 681,78,4 90,55,1 791,1131,3 390,482,3 560,195,4 319,731,3 188,516,4 89,321,4 335,150,1 423,739,5 399,257,5 814,385,2 785,230,2 708,292,4 797,20,5 733,481,2 158,1091,2 604,21,4 741,180,3 626,635,4 243,1040,4 334,321,4 129,1244,3 551,825,2 720,754,4 795,715,3 632,4,3 297,214,5 752,95,1 525,507,4 698,1614,3 220,808,3 278,514,3 804,526,3 499,99,4 291,1,4 473,21,4 304,430,4 750,99,4 377,708,4 621,794,2 657,31,3 547,677,4 632,1018,4 565,203,3 398,267,3 687,258,5 747,172,4 382,237,5 215,465,4 775,435,4 302,143,5 654,1222,3 120,346,3 748,467,3 544,568,3 456,81,5 626,26,3 622,290,3 654,88,4 737,297,3 614,427,5 56,1000,1 797,585,2 341,261,2 215,237,5 229,495,5 58,239,2 729,299,3 617,1047,3 499,1159,5 822,93,2 544,98,4 550,87,4 805,406,3 670,683,3 567,241,4 708,859,3 658,355,3 366,636,3 663,195,4 670,143,4 621,394,2 767,116,4 654,769,2 566,204,3 675,844,5 554,180,5 714,317,5 473,708,5 488,332,4 565,317,4 416,81,4 804,221,4 629,173,3 621,168,5 353,477,5 331,124,5 795,487,2 302,227,4 750,55,4 516,404,4 747,709,3 384,132,1 681,293,3 415,182,5 765,98,3 787,569,3 575,49,4 756,1089,2 542,88,4 58,643,4 745,549,4 267,87,2 569,257,3 157,232,3 547,1012,3 641,417,5 304,748,2 531,707,4 520,108,5 701,299,3 709,141,3 787,402,3 100,410,2 703,1453,3 305,1513,4 767,256,4 0,15,5 804,472,4 625,312,5 221,108,3 233,491,3 641,233,1 746,204,5 654,1420,3 787,53,4 789,448,2 177,657,5 6,432,5 528,287,4 221,184,4 591,326,4 82,37,5 804,95,4 225,508,4 507,195,3 786,291,3 803,364,4 321,653,5 534,283,4 664,213,4 62,0,3 377,58,4 10,433,4 403,65,4 662,126,5 619,819,4 319,750,4 723,343,1 675,258,4 302,1219,2 709,312,4 275,391,3 487,1049,4 658,628,4 43,552,3 664,814,4 560,21,3 683,385,3 797,196,2 654,920,3 529,659,3 670,741,5 787,187,4 41,150,4 805,2,2 693,422,5 535,120,4 748,731,4 649,630,3 719,748,3 805,191,4 121,189,4 797,1088,3 6,648,5 681,281,4 451,508,4 199,70,4 534,941,4 659,32,2 619,124,2 771,325,4 673,0,4 789,375,2 428,171,5 746,152,4 455,581,5 793,49,5 509,322,4 298,152,3 617,941,2 794,185,3 652,384,4 434,820,2 553,404,4 312,587,4 681,475,1 670,160,5 711,214,3 659,679,2 115,606,2 449,601,4 279,21,5 177,719,3 775,163,3 726,541,2 803,653,3 233,519,4 685,175,3 253,431,2 341,460,3 41,317,5 434,221,3 779,132,5 616,182,4 808,271,5 767,69,4 763,1151,3 223,698,4 737,41,2 768,545,4 803,80,4 659,105,2 333,81,4 802,270,2 175,92,5 424,117,1 1,299,4 549,876,4 397,522,4 530,337,1 173,779,1 607,938,4 707,755,2 787,193,4 785,179,4 96,185,3 697,219,3 157,9,4 529,87,4 498,473,4 404,1260,1 300,217,4 605,249,4 699,49,5 795,167,5 765,653,4 654,954,3 803,608,3 353,41,2 795,201,4 453,97,1 91,626,3 19,495,5 242,190,5 150,651,5 209,707,5 180,284,2 715,489,4 235,49,3 531,497,4 756,216,3 261,55,4 421,671,3 422,822,3 555,293,2 86,12,3 610,354,1 558,225,5 755,175,4 687,328,5 824,110,3 714,171,4 628,380,4 803,150,3 518,350,5 242,736,3 551,931,3 806,238,4 215,654,5 816,123,4 542,714,3 542,708,3 626,422,3 600,388,2 763,596,4 618,345,3 293,116,4 553,595,3 570,461,4 507,199,4 344,267,4 291,63,5 772,432,3 366,563,2 772,237,4 725,256,3 654,296,4 346,194,4 291,247,4 87,285,5 206,142,4 488,257,5 710,1045,3 359,1038,5 708,469,3 689,738,3 795,25,2 177,1010,3 822,210,5 607,63,4 534,192,4 775,20,3 448,1010,4 465,97,3 566,182,4 770,90,4 759,277,4 726,41,5 349,482,5 649,154,2 163,470,5 737,78,3 486,271,5 803,174,4 129,328,4 762,195,4 595,180,4 693,202,4 715,520,3 670,863,3 806,635,4 388,117,2 520,966,3 748,215,4 711,721,3 769,117,4 624,515,3 720,731,4 804,446,4 607,55,5 609,49,4 215,66,3 512,49,5 770,948,5 424,397,1 638,96,1 285,461,5 338,545,4 24,928,4 745,156,4 24,429,4 292,212,3 748,780,4 61,473,4 652,127,3 801,326,2 471,131,5 768,933,4 533,148,2 534,193,5 61,651,4 304,1017,5 732,13,5 726,130,2 590,614,4 814,172,5 433,742,1 428,1088,2 342,275,5 746,738,3 681,10,4 344,304,4 652,929,4 419,269,3 540,417,5 654,187,3 267,38,3 731,881,5 710,723,5 499,8,4 183,1296,2 757,526,5 708,636,3 592,142,4 794,674,3 263,200,5 565,181,4 641,253,4 751,895,3 463,247,5 698,249,4 660,646,4 683,207,3 69,537,2 826,328,3 269,122,5 789,28,2 536,233,3 112,257,5 747,85,4 36,929,3 633,762,3 641,722,4 654,1377,3 392,356,2 283,301,4 707,596,2 773,1181,1 714,1044,2 653,299,5 670,11,5 450,883,1 415,377,5 748,147,3 726,94,4 353,493,4 177,232,4 499,284,3 822,80,4 715,480,4 415,190,5 535,735,5 795,173,5 636,273,5 824,422,5 801,443,4 183,219,3 560,66,1 750,271,4 676,1244,4 523,516,4 805,653,5 644,72,3 641,152,3 594,245,4 773,30,1 627,291,5 643,322,4 647,0,5 253,628,2 592,154,5 64,0,3 781,263,4 472,301,4 223,327,4 434,1150,1 585,22,2 94,256,5 553,274,4 794,513,4 415,728,5 428,1227,3 720,317,4 624,207,3 733,698,4 748,388,3 654,1052,1 14,236,3 711,135,1 55,86,4 478,30,4 496,0,4 652,194,5 210,274,2 628,728,4 465,186,3 476,845,4 805,237,4 143,1011,4 267,229,3 566,233,3 795,635,2 193,132,3 377,217,3 744,645,4 750,130,5 674,936,1 59,616,4 12,263,4 792,1141,5 803,253,4 659,63,3 553,70,4 0,78,4 42,257,5 81,1062,3 400,891,1 780,321,2 709,22,5 200,314,3 662,329,4 22,61,3 550,1620,1 779,356,5 610,333,5 269,942,5 765,493,3 343,712,3 654,1166,3 685,520,5 188,9,5 795,4,4 278,210,4 564,1395,5 806,1443,3 521,542,4 710,231,3 615,330,4 647,499,5 81,1127,1 655,321,1 800,894,5 127,72,3 803,1487,3 456,840,4 804,41,2 654,404,2 804,89,2 777,238,4 233,551,2 634,1024,2 220,183,4 333,1503,3 735,514,5 649,524,3 513,115,4 649,529,4 434,263,3 794,796,3 279,87,3 659,190,4 84,923,1 370,503,4 790,298,2 828,277,1 638,726,2 781,1278,3 706,85,4 584,58,4 568,12,3 193,142,3 720,21,5 748,133,4 722,136,3 758,677,2 342,605,5 372,389,3 654,630,4 601,120,4 730,0,2 405,190,5 363,324,4 265,236,3 746,633,5 6,473,5 726,1272,3 681,52,2 221,236,4 620,1028,2 787,738,2 587,677,2 275,870,2 500,951,4 803,384,4 744,182,3 599,778,2 772,190,4 188,1400,4 337,188,4 13,80,5 641,82,5 467,723,4 806,1049,5 691,126,3 714,264,5 789,789,2 591,465,5 200,191,4 69,14,3 579,249,5 497,1160,3 782,875,4 741,507,4 468,473,5 803,663,3 664,104,2 416,1015,4 495,606,3 595,681,4 677,49,4 580,126,5 99,880,1 540,401,3 431,283,4 803,662,5 75,516,5 591,187,5 536,237,4 649,372,1 402,275,4 544,172,5 751,346,4 485,150,2 489,846,3 441,63,5 471,322,4 781,247,4 369,320,2 654,886,3 773,1304,3 698,117,4 642,11,5 828,188,4 520,171,3 404,661,1 752,358,4 803,673,4 665,150,2 52,49,4 806,98,5 641,167,5 757,427,4 527,68,3 6,355,4 379,958,2 711,661,5 379,683,3 781,686,2 397,24,4 17,491,4 536,708,4 629,299,4 601,456,3 386,788,4 15,447,5 647,93,5 806,841,4 810,300,5 681,20,4 680,285,5 756,57,3 803,404,4 486,940,3 526,182,5 678,55,4 738,95,5 708,450,1 542,146,4 455,1546,4 751,325,1 434,63,5 709,334,1 721,150,5 392,549,3 665,728,4 449,1052,3 559,320,3 806,270,3 681,418,3 177,292,4 624,392,4 746,1141,4 185,297,3 307,433,4 269,92,5 761,1661,1 576,225,4 654,973,2 429,151,4 522,721,3 795,1048,4 514,285,2 803,193,4 341,874,1 365,200,5 324,198,5 428,654,3 740,87,4 376,233,5 612,606,4 681,237,3 258,64,3 663,191,4 457,95,4 757,741,4 814,482,5 829,384,4 262,257,3 118,8,4 797,992,3 585,68,4 824,19,2 684,872,2 618,664,5 345,707,3 220,209,5 804,760,3 605,840,3 741,1011,4 379,221,3 614,731,4 254,272,2 660,426,4 789,108,3 303,762,4 763,8,4 703,490,5 200,1169,4 144,1282,1 693,96,5 374,175,4 496,180,5 177,791,5 12,834,3 794,79,3 270,624,3 623,304,4 325,225,5 384,731,3 702,1046,3 662,875,3 536,987,1 544,549,3 523,545,4 825,335,4 233,285,3 730,1268,3 641,576,4 803,1078,4 654,1321,2 629,279,2 750,69,4 404,517,1 665,332,3 816,117,3 453,723,3 621,1078,2 757,789,4 825,312,5 611,299,4 628,318,4 313,541,4 711,98,4 558,397,3 456,214,4 536,745,3 830,876,2 756,548,5 404,452,3 728,682,2 805,482,4 37,143,5 566,211,2 91,565,4 388,656,5 364,275,2 342,461,4 777,192,4 436,185,3 597,257,5 658,6,3 310,96,4 301,357,3 344,279,3 673,124,5 84,482,5 373,14,3 317,99,5 711,219,5 740,721,3 652,454,3 451,6,5 692,580,3 321,126,4 109,641,2 392,458,4 141,314,3 43,143,4 702,49,5 267,1412,2 235,97,5 712,339,3 765,645,4 659,1418,1 524,596,3 378,226,4 781,1240,2 520,94,3 392,1034,3 44,1000,3 560,27,2 818,1536,5 710,908,4 377,721,3 579,1013,3 311,498,4 752,181,3 785,237,4 682,55,5 576,54,3 747,473,4 472,128,4 486,49,4 129,155,3 772,59,5 694,269,4 214,97,5 538,609,4 649,734,3 746,27,4 777,1034,1 709,133,5 654,650,4 681,6,4 626,226,3 716,270,2 371,97,5 789,228,3 586,749,3 42,728,4 677,281,3 451,182,4 706,1112,2 748,294,3 473,413,4 666,503,3 652,227,4 616,237,3 649,211,3 473,519,5 68,123,4 131,284,4 453,384,3 302,7,5 681,30,3 654,424,3 564,381,5 523,6,2 415,692,3 732,236,3 520,12,2 667,282,5 777,615,4 406,273,3 4,430,3 7,687,1 711,624,3 585,76,3 456,1027,3 449,647,5 572,656,4 744,514,4 803,450,2 566,184,5 827,1645,4 654,1631,3 734,331,3 653,143,5 807,332,4 536,656,3 326,285,2 449,714,3 787,97,5 249,97,5 625,922,5 93,468,4 84,734,3 631,431,3 270,696,4 814,181,3 544,731,4 450,878,4 751,689,4 429,100,2 520,158,3 542,650,3 415,214,5 662,362,2 596,126,4 542,210,4 508,325,4 755,54,5 706,115,5 693,131,5 486,431,3 519,239,1 560,704,3 275,1015,3 91,67,3 536,115,3 720,195,5 544,175,4 647,819,2 449,214,5 386,32,3 592,97,5 295,300,5 300,684,3 284,356,5 746,78,4 329,1034,4 587,361,3 84,595,3 393,95,5 343,275,4 654,448,3 560,651,5 486,143,5 473,204,5 550,345,4 108,526,3 449,899,5 620,870,3 601,342,2 665,31,4 795,366,5 298,168,4 628,306,5 527,1617,1 449,253,3 58,424,4 772,52,3 232,477,5 751,904,2 731,689,5 806,527,4 605,236,4 505,879,1 150,605,5 366,49,5 805,88,5 449,209,3 826,346,3 756,116,4 693,691,4 804,580,2 553,214,5 566,202,4 542,10,3 558,195,5 775,485,4 144,346,3 531,217,5 542,635,3 415,1134,2 797,383,2 587,553,3 536,687,1 587,470,5 720,263,1 290,83,3 822,683,4 648,1282,2 803,185,4 93,367,2 345,1038,2 633,121,3 335,474,4 660,614,4 307,1064,5 798,44,4 196,176,5 100,825,3 84,1170,3 386,185,2 725,1013,1 36,1026,3 554,194,4 659,747,3 94,138,4 248,208,5 118,276,4 455,196,4 638,152,3 55,61,5 750,61,4 668,301,4 777,229,2 622,162,3 776,204,4 747,653,4 455,654,3 561,132,2 769,918,5 824,1046,3 803,472,4 683,709,5 547,750,4 822,1216,1 642,398,3 513,1046,3 697,283,1 398,615,1 440,6,4 198,538,1 373,1100,4 487,244,3 63,88,3 492,191,3 770,240,1 517,846,5 813,655,3 275,24,4 6,213,5 605,923,5 436,696,4 740,240,4 473,247,4 638,715,1 681,63,5 691,203,5 560,30,2 328,38,2 505,28,2 654,247,2 416,766,1 745,37,2 377,0,4 654,0,2 794,218,3 735,256,3 654,25,3 792,843,4 822,501,5 697,190,2 655,271,3 693,30,4 591,288,4 830,95,5 793,237,5 660,78,5 797,838,4 594,345,4 62,320,3 797,1281,3 261,416,2 91,933,2 346,90,1 487,68,4 644,210,4 803,120,4 123,116,3 803,184,4 633,136,3 769,24,5 274,623,3 428,86,3 803,140,3 390,233,4 391,293,4 396,497,4 477,76,1 182,430,2 567,318,2 695,233,4 577,677,3 89,526,5 428,377,3 789,377,3 694,310,4 551,590,3 708,217,4 600,221,4 813,99,4 59,95,4 827,886,4 576,110,4 795,745,3 795,854,3 605,1198,3 693,156,4 180,1320,1 150,723,4 708,55,5 657,180,3 394,0,5 212,212,5 769,545,4 6,670,5 535,545,2 617,789,3 685,207,5 344,737,3 757,13,5 587,27,5 520,404,2 706,132,2 52,283,2 636,935,4 486,1010,3 649,575,1 494,233,5 806,192,4 458,171,5 665,509,4 755,422,3 502,46,5 828,1066,4 245,431,3 707,411,1 748,825,3 654,482,4 744,221,2 797,1248,4 804,421,4 598,844,5 762,196,4 377,1424,2 520,747,3 783,1037,3 649,370,2 719,301,5 6,567,5 814,182,5 478,422,2 804,469,5 711,231,3 822,738,4 0,260,1 681,698,3 586,988,2 585,184,2 803,444,4 34,325,3 289,167,3 760,180,5 525,750,2 681,280,3 755,929,3 652,151,2 720,679,3 757,270,4 344,885,3 665,27,3 358,180,5 652,673,3 391,1006,5 342,475,2 372,417,5 22,233,2 456,317,5 544,173,4 580,274,3 12,770,3 654,1552,4 278,20,3 675,894,1 12,57,4 392,963,2 643,321,5 698,297,4 789,55,4 158,1013,4 767,49,4 824,368,3 746,460,5 664,404,3 388,925,3 681,778,3 789,400,4 740,81,3 834,0,3 624,180,4 832,339,5 832,543,1 604,294,4 688,110,3 475,172,5 678,203,3 810,291,3 236,57,4 373,283,1 343,234,3 637,61,3 600,130,4 665,524,4 145,335,5 180,273,4 726,201,4 93,1045,2 803,127,5 724,257,4 238,517,3 714,825,2 78,282,4 143,115,4 668,353,1 522,1194,5 415,314,3 421,298,1 182,176,5 658,135,5 726,37,1 206,422,4 781,242,3 127,173,3 757,281,3 266,201,5 221,67,4 585,91,3 372,11,5 761,474,5 555,133,5 750,180,5 608,407,5 765,381,3 769,299,5 681,189,4 767,0,5 748,227,5 748,500,4 407,270,3 547,294,5 648,322,3 536,479,4 652,214,2 550,27,4 787,283,3 353,810,5 89,522,4 9,498,4 211,630,5 451,383,2 733,221,1 822,30,5 658,210,3 803,209,5 317,306,3 84,220,2 425,473,4 694,287,4 405,152,3 494,78,5 529,97,4 746,491,4 672,241,4 88,735,3 539,454,4 233,1062,3 278,635,5 544,210,3 748,249,3 300,1229,1 631,762,3 467,125,3 12,686,1 243,707,4 293,830,3 789,1043,4 665,233,3 600,1027,2 835,164,4 773,536,2 498,192,4 232,22,5 434,1046,3 434,1184,1 365,52,5 428,429,4 160,68,4 706,220,4 681,709,3 17,174,4 373,22,3 100,716,3 504,72,4 592,117,4 446,221,3 658,172,4 795,3,5 652,299,4 576,578,4 1,99,5 582,54,4 713,49,5 84,51,3 765,173,3 654,1073,3 496,757,2 773,292,1 791,545,3 698,12,4 483,426,5 696,294,3 476,721,5 764,126,5 86,157,3 618,143,5 747,142,3 644,10,4 444,839,1 822,209,4 815,308,5 752,483,5 649,526,3 666,879,3 558,549,4 206,1011,3 693,644,4 769,180,3 681,618,3 834,324,5 697,152,2 716,750,4 549,251,1 726,62,2 12,601,4 206,534,3 623,357,3 560,180,3 449,656,4 523,366,5 404,1508,1 628,321,3 560,1148,4 532,147,3 604,495,5 347,476,3 427,270,2 806,426,4 384,404,2 455,1008,5 711,385,3 81,234,1 698,224,3 319,171,4 795,1089,4 422,747,3 124,387,2 488,352,4 692,941,2 782,332,4 467,95,5 158,1047,3 535,167,5 693,392,3 446,27,4 200,714,4 706,498,4 434,190,4 565,464,2 93,180,4 434,577,5 751,1104,3 274,21,3 425,199,2 248,241,5 317,7,4 495,392,1 692,692,3 221,77,1 135,268,5 710,143,2 108,190,4 534,380,3 614,12,4 732,145,3 618,293,1 485,1093,2 590,731,3 713,6,4 486,99,5 243,1052,2 587,654,3 408,485,3 392,190,3 765,229,3 587,401,5 789,51,4 362,327,3 683,727,2 715,247,4 388,58,5 220,404,3 493,527,3 528,285,4 369,493,3 775,442,3 769,294,4 279,659,5 542,302,4 495,483,3 460,8,5 654,395,2 380,442,5 720,193,5 750,733,1 543,309,2 76,153,5 554,247,4 520,153,2 151,279,5 73,306,4 431,92,2 657,653,4 658,605,5 825,1238,4 685,181,5 176,6,4 592,384,4 522,1040,4 565,99,5 89,210,5 824,543,3 781,689,4 666,481,4 377,57,4 706,308,2 795,116,5 5,459,2 639,368,3 427,258,4 647,110,5 832,545,2 560,450,2 715,422,4 737,135,4 692,526,3 746,996,3 715,175,3 444,743,2 405,419,4 822,190,5 632,288,3 406,587,4 313,819,5 629,982,3 757,976,2 224,214,5 544,192,3 617,420,3 696,106,5 536,978,2 654,1646,3 553,281,3 771,320,5 41,49,5 482,150,2 444,1010,1 77,293,3 715,110,4 285,761,2 187,518,4 386,771,4 200,80,1 492,474,3 523,747,2 392,921,4 814,356,5 400,43,4 643,1024,4 436,520,4 441,175,5 499,256,3 705,117,3 386,128,5 405,120,5 829,412,1 806,575,4 457,284,4 425,195,4 220,484,2 625,357,1 346,292,5 779,96,5 566,846,4 261,67,2 804,442,5 664,762,4 449,87,5 449,116,4 592,163,4 805,232,2 748,1227,4 765,126,5 420,186,4 720,874,3 835,267,3 658,49,3 804,167,5 263,46,5 290,800,3 653,1019,4 681,653,4 536,920,3 837,297,3 647,186,3 458,24,2 749,299,3 600,1295,1 536,13,4 706,791,4 470,403,2 492,327,4 795,175,5 688,108,5 476,48,5 115,306,3 505,641,4 254,871,4 401,0,5 781,688,3 233,652,3 398,402,3 709,317,4 487,357,3 205,271,5 454,548,4 736,126,5 304,1,2 838,236,3 238,63,1 789,770,4 456,213,5 715,444,3 497,174,5 822,6,5 607,1100,4 654,1004,4 834,142,5 658,315,4 96,495,2 584,528,3 804,663,5 362,66,1 436,1090,3 566,486,4 789,565,3 704,110,4 127,868,3 786,303,4 760,1276,1 723,360,1 304,596,2 307,656,4 205,309,5 63,270,3 402,8,3 839,478,4 785,698,4 81,474,1 550,350,3 758,257,4 531,180,5 193,392,2 706,1017,3 520,1,3 550,146,4 772,174,4 0,44,5 665,113,4 209,229,3 787,301,4 451,51,3 763,495,5 711,786,3 422,343,4 624,379,3 660,160,4 706,951,3 708,780,3 659,249,4 732,220,4 270,237,4 698,596,3 118,285,5 748,594,4 528,257,4 473,384,4 478,628,3 391,196,5 229,204,3 379,108,2 431,844,4 384,382,1 386,67,4 455,365,2 471,171,5 805,253,3 746,966,3 679,99,3 427,285,3 9,275,4 452,368,2 433,927,5 331,982,2 640,426,4 594,297,4 540,416,4 369,133,4 674,343,4 673,762,5 4,120,4 797,562,2 654,194,3 773,833,1 415,181,4 696,128,5 773,185,3 795,727,3 450,990,2 795,1268,5 759,818,1 235,504,3 15,14,5 313,938,4 770,706,4 756,21,4 774,263,4 664,619,3 483,828,2 114,47,5 839,151,4 605,661,4 629,408,3 317,967,3 653,735,5 523,1064,1 151,774,4 544,390,2 660,69,4 535,485,4 795,1000,2 709,1100,4 560,55,5 198,8,5 42,410,3 762,878,3 566,176,4 415,1263,4 628,326,3 636,117,1 405,654,3 532,987,2 6,469,3 714,89,5 525,741,3 807,293,5 296,214,2 449,654,4 748,172,5 564,511,3 541,432,3 718,531,3 449,268,5 795,750,5 382,181,5 386,641,4 654,173,3 824,293,4 767,1013,2 631,275,2 629,49,3 726,90,4 746,418,5 37,409,3 378,193,5 342,222,5 636,285,5 756,587,3 649,96,3 626,552,3 452,131,3 839,844,5 832,160,1 762,163,4 647,126,3 436,377,4 773,513,2 789,171,4 775,510,5 822,1117,3 832,155,4 737,38,3 540,842,4 665,204,3 815,257,3 715,48,4 82,565,4 734,292,3 689,430,2 620,270,5 42,353,4 834,192,4 217,465,4 505,45,3 298,97,4 91,249,4 182,61,2 804,88,4 357,178,4 42,925,2 425,431,3 642,968,4 825,430,5 592,55,5 6,565,4 621,276,4 652,10,2 532,173,4 780,126,5 620,27,4 766,99,5 235,193,3 769,252,5 531,229,5 781,1236,3 737,392,3 797,686,4 698,1283,3 384,218,1 600,240,4 604,527,5 765,446,3 114,283,2 756,1209,2 513,182,3 300,49,5 453,203,4 523,617,3 405,508,3 739,287,4 814,215,3 798,49,4 436,506,5 744,1125,2 740,97,5 304,159,4 839,422,5 494,402,5 755,1008,4 787,1247,3 642,714,5 818,861,2 683,733,3 550,163,4 365,199,5 682,587,4 682,886,4 507,268,4 637,97,3 587,879,1 185,819,2 534,187,3 721,146,3 415,410,3 362,233,3 641,203,4 649,133,5 667,256,3 757,511,5 331,194,5 353,312,3 129,1018,4 515,285,5 745,522,3 618,186,5 479,271,4 84,134,5 391,188,4 699,78,3 649,475,2 473,469,3 446,55,5 398,719,3 742,299,4 641,1048,3 129,88,4 781,1476,3 446,155,5 817,315,4 116,95,5 323,300,5 763,10,4 199,233,4 710,314,4 548,619,3 560,920,3 621,82,5 323,326,4 692,482,3 814,523,4 317,1203,2 456,769,4 506,893,5 604,300,3 806,404,4 822,96,5 579,248,5 497,261,2 659,61,2 233,272,3 454,52,1 614,208,5 0,47,5 197,823,2 41,272,3 39,357,3 48,92,5 457,843,4 832,672,4 470,877,4 739,937,1 759,184,2 715,191,3 801,440,3 815,342,4 100,845,3 747,182,4 708,214,3 620,409,4 604,285,4 554,20,4 681,581,1 517,275,5 536,322,1 746,98,5 789,183,3 813,184,3 424,361,3 279,927,5 649,160,3 325,497,5 720,989,5 12,629,2 786,347,4 636,148,2 800,306,4 255,1149,5 462,507,4 832,482,4 503,297,4 704,404,4 778,410,3 806,526,5 530,689,5 695,309,4 804,199,5 397,968,4 710,98,3 765,519,4 450,324,3 475,299,5 416,443,4 826,257,3 666,460,4 697,1298,2 621,552,3 268,830,2 802,242,1 732,283,2 550,750,4 437,863,3 525,330,3 642,662,4 496,174,4 494,1207,4 801,299,4 832,120,1 486,571,1 731,285,5 617,784,3 767,274,4 21,429,4 639,201,5 449,179,4 762,657,3 473,627,4 654,701,2 756,684,3 746,407,5 787,300,2 573,302,3 598,259,1 657,8,4 829,738,4 368,750,4 325,390,4 679,116,4 544,577,4 804,568,1 665,944,4 451,44,4 710,965,5 526,495,4 293,596,3 386,204,5 475,3,4 726,233,2 148,677,2 405,128,5 377,1310,4 745,225,4 830,244,2 822,32,3 561,78,4 653,925,4 652,193,3 754,258,3 729,124,4 775,647,3 831,180,3 649,287,3 392,938,4 641,48,4 675,878,3 403,322,3 748,61,3 693,209,4 794,718,2 806,1033,5 707,845,2 434,410,3 834,707,5 266,613,5 5,241,4 551,287,2 608,242,1 600,131,5 397,234,2 677,24,2 750,482,5 806,150,4 434,0,5 471,464,3 590,507,4 585,229,2 523,392,3 623,268,4 733,110,3 659,379,2 829,68,5 113,97,4 706,629,3 345,152,3 270,46,3 544,750,3 809,300,5 839,516,4 338,167,4 737,256,3 631,155,3 279,1113,4 118,505,5 224,193,5 221,626,3 533,20,4 253,583,3 785,317,5 544,93,3 254,759,1 397,520,5 144,641,3 791,20,3 658,603,4 835,11,5 255,525,3 824,985,5 86,178,4 48,7,3 377,416,3 838,122,3 804,247,4 806,497,4 770,274,5 647,198,4 489,740,4 843,21,4 757,384,4 715,955,4 405,31,5 804,182,5 398,379,3 693,519,5 649,1109,4 200,567,3 641,408,5 692,281,4 133,0,5 748,842,3 654,442,4 696,747,5 127,178,3 787,236,4 588,300,2 73,244,3 238,136,5 741,249,3 179,468,5 586,350,2 649,1117,3 799,117,3 402,927,3 312,545,4 400,581,4 150,961,1 607,461,4 742,297,4 161,146,4 772,217,2 842,185,2 531,138,5 658,480,5 607,955,3 633,627,4 671,14,3 561,140,4 197,49,5 386,384,3 708,402,3 531,315,4 692,317,4 428,755,2 180,1393,1 252,1024,3 765,678,3 712,688,3 486,626,4 839,646,5 143,233,4 562,402,4 169,875,3 710,88,5 42,57,3 397,590,3 665,184,4 177,65,4 411,22,4 519,99,4 616,603,2 707,146,4 773,422,1 526,152,5 715,674,2 654,115,2 732,124,2 330,505,2 804,826,4 495,416,1 71,683,4 698,180,3 579,322,2 752,482,5 434,383,3 308,689,3 12,805,5 253,870,2 607,1203,2 46,321,2 732,1066,5 639,67,4 681,357,3 710,276,5 791,1010,3 706,1380,3 822,208,4 663,159,3 828,267,4 578,876,1 386,210,4 457,188,4 762,95,2 668,347,1 704,225,3 746,28,1 803,86,4 453,321,2 547,228,5 768,823,2 816,0,4 534,611,4 685,197,5 610,345,5 58,132,3 486,124,5 803,163,4 606,473,4 404,101,1 362,674,3 93,1187,3 177,482,4 12,674,5 837,172,5 806,621,3 827,170,3 448,762,2 794,163,3 801,558,2 774,309,3 751,269,4 292,316,4 576,424,2 697,329,4 91,465,4 783,302,4 550,231,5 531,99,5 638,58,3 756,167,4 723,270,2 415,131,4 654,64,2 289,256,4 635,312,5 792,457,3 833,268,5 540,1314,1 702,863,2 737,152,4 454,233,4 195,12,2 685,513,5 797,747,5 737,746,4 300,156,2 43,635,4 285,727,3 489,918,4 665,470,4 757,304,4 600,182,4 221,194,4 279,166,4 652,510,4 285,203,3 233,328,2 180,1114,1 763,755,3 188,913,2 486,691,5 716,470,4 300,362,4 704,143,3 835,895,3 56,474,2 560,502,4 781,342,2 743,27,3 786,310,4 698,454,3 777,581,1 270,207,4 803,214,5 803,32,4 475,1179,3 620,778,3 845,1073,3 796,49,5 830,590,4 313,69,1 398,275,3 760,0,1 787,233,3 765,418,3 378,338,3 697,506,4 827,18,5 779,507,3 293,122,4 187,150,3 16,99,4 563,49,4 659,206,4 806,419,3 649,450,2 654,132,4 180,1322,1 787,446,3 618,28,1 594,234,3 670,567,5 173,715,5 804,107,3 755,134,2 842,451,2 832,366,3 108,53,3 791,1334,4 740,814,3 763,322,3 360,54,2 693,631,4 392,10,3 641,1094,2 762,236,3 825,37,3 343,121,1 605,221,3 686,747,3 592,738,5 464,614,3 204,241,4 591,292,5 185,1032,3 536,332,2 762,175,4 436,194,2 702,258,1 547,904,4 662,677,2 832,128,3 750,379,3 386,10,3 566,172,4 552,377,3 185,886,4 80,275,4 824,282,2 200,95,4 180,1085,1 278,513,4 585,819,4 245,222,5 795,1227,4 681,365,4 681,567,3 805,342,3 598,747,4 667,270,4 532,256,4 376,199,5 803,201,4 287,293,2 834,392,5 459,284,4 813,199,4 703,479,5 837,595,5 750,435,4 696,681,2 732,819,2 750,404,3 748,279,4 178,690,3 592,1118,5 787,596,3 787,72,3 772,664,2 362,150,4 615,327,3 708,88,3 620,545,3 335,627,3 398,542,3 292,162,4 615,328,3 845,93,4 641,364,4 6,185,4 270,209,4 297,275,2 605,497,4 486,317,3 736,426,3 711,420,4 415,252,3 685,527,5 421,136,5 570,603,3 726,992,4 449,1162,3 307,506,3 523,605,4 641,945,2 465,54,4 829,81,3 6,24,3 377,206,4 94,227,4 255,795,5 160,486,3 832,186,5 412,13,5 438,306,3 756,202,5 749,268,4 687,1233,5 698,221,3 378,711,3 311,588,5 604,123,3 641,377,3 760,260,1 155,85,4 703,169,3 473,325,3 714,454,3 789,1131,2 535,83,4 327,661,3 654,331,3 404,942,1 665,958,4 845,626,4 804,237,5 755,417,3 559,1404,4 473,529,5 566,1021,5 673,299,3 805,120,4 738,215,4 794,576,3 505,247,2 629,928,4 803,204,4 457,743,4 773,239,1 173,1085,5 803,134,3 689,741,3 766,55,4 663,226,3 787,520,4 665,422,3 692,63,3 777,261,4 520,16,1 567,492,3 543,749,3 729,14,4 392,1248,4 534,135,5 150,1268,5 115,339,3 789,737,3 157,822,2 473,12,5 605,99,5 707,1116,4 467,6,3 728,878,3 670,769,2 683,780,3 599,55,5 398,54,2 525,299,2 779,886,4 517,124,5 388,427,3 786,332,3 845,56,2 633,0,3 652,762,1 780,323,4 765,187,4 710,184,4 681,3,3 649,198,4 715,204,5 535,388,5 214,233,4 570,46,3 802,747,1 117,843,5 129,41,4 748,510,4 631,160,3 523,653,5 66,124,4 638,178,1 127,714,4 728,689,2 0,24,4 842,297,2 448,1072,5 345,1257,4 806,142,4 623,293,3 737,185,4 757,252,5 675,915,5 406,602,4 439,311,5 795,214,5 318,260,3 623,239,2 229,213,4 787,482,5 756,264,3 258,483,4 689,215,4 770,288,4 839,731,3 534,236,4 159,496,4 290,233,4 471,1238,5 404,625,1 773,299,2 578,185,3 269,87,5 180,827,1 483,70,2 647,122,4 241,739,5 605,651,3 746,638,5 262,356,5 777,237,3 625,267,4 302,387,2 657,517,4 449,793,5 86,272,3 325,509,5 719,332,4 711,58,2 442,357,1 683,251,4 530,889,1 757,836,4 422,590,5 781,901,2 654,95,3 824,275,1 449,950,4 605,1046,2 197,228,3 307,364,3 108,287,5 806,583,4 681,517,4 234,206,4 665,69,4 636,123,3 667,81,4 638,236,1 657,49,4 515,211,4 744,99,5 614,791,4 726,120,4 655,337,3 734,268,3 63,185,4 220,1010,4 555,126,5 538,962,4 331,72,4 200,143,4 779,422,5 669,656,5 836,475,3 565,402,3 344,13,4 59,130,4 765,433,5 777,779,3 326,332,2 84,525,4 467,191,4 669,478,5 191,6,4 22,234,1 605,1009,3 745,180,5 812,293,1 471,495,4 333,237,4 379,478,4 565,762,4 641,40,3 370,76,5 624,639,3 726,247,5 470,464,5 605,1150,3 89,64,4 333,300,2 620,734,4 373,37,4 500,1010,4 639,777,4 98,779,5 711,574,3 741,221,2 94,494,4 550,287,4 405,169,3 266,216,4 833,743,4 327,808,4 275,404,3 760,49,5 449,567,4 441,1187,3 692,6,4 255,545,4 841,361,3 531,495,5 685,479,5 441,61,2 743,1133,3 533,234,4 333,18,4 825,240,4 473,149,5 478,565,3 726,209,3 652,1027,2 665,133,5 388,583,4 536,195,3 576,28,3 757,158,3 416,178,4 794,7,5 681,338,2 700,256,4 292,1332,4 771,306,4 596,677,1 343,272,4 333,1047,4 805,521,3 225,274,3 267,59,5 566,1251,3 738,1428,5 605,120,4 693,152,4 757,379,4 494,448,5 749,687,1 714,275,3 660,165,5 579,221,3 317,132,4 717,470,5 646,21,5 451,370,3 789,940,3 61,68,4 193,218,2 644,446,3 681,419,3 392,411,3 292,3,4 434,24,5 718,22,3 337,268,4 542,508,3 541,10,2 541,96,4 4,373,3 306,195,3 129,1012,4 845,376,2 705,24,4 665,317,5 404,950,1 726,196,3 772,1474,4 307,140,3 749,357,3 456,691,4 733,55,1 513,233,3 711,201,4 587,353,5 500,120,4 791,843,4 654,1505,3 781,1388,3 81,495,4 306,71,3 643,332,3 461,865,5 172,331,4 647,192,4 404,707,1 795,202,3 290,412,4 10,207,4 748,602,5 663,153,5 183,723,4 294,450,4 828,191,5 836,844,4 497,167,4 587,61,2 757,3,4 678,709,4 772,51,3 498,6,4 392,1048,4 550,201,4 542,11,5 795,321,3 699,179,3 486,365,3 649,140,4 657,407,5 536,126,5 789,582,2 748,632,4 457,19,4 534,460,3 664,1008,4 326,366,4 534,777,2 641,464,4 846,289,4 795,418,5 784,422,2 505,664,2 298,752,5 373,16,2 672,267,1 200,885,1 25,282,3 714,954,4 577,257,1 718,281,4 682,331,3 791,23,3 566,133,5 333,282,4 773,467,2 795,277,4 789,264,4 489,291,3 160,14,2 175,404,2 233,218,2 326,335,2 486,269,5 15,151,4 738,525,5 771,677,4 654,1169,3 520,1012,1 471,545,4 714,116,3 198,13,4 433,273,5 292,1208,2 781,1642,2 835,179,5 769,243,4 715,503,5 295,314,5 632,938,4 641,1125,1 842,664,3 629,30,2 795,28,3 612,513,4 753,743,3 483,314,3 449,27,4 609,275,4 388,345,4 58,227,4 715,567,4 843,402,3 775,438,1 787,780,3 746,162,4 127,404,4 604,222,5 338,172,5 526,10,4 398,171,3 681,124,4 682,689,4 93,476,2 623,275,5 59,208,5 404,500,3 160,314,5 369,442,5 664,1131,2 591,935,4 421,4,3 267,2,1 759,201,3 585,187,2 269,474,5 49,326,3 834,659,4 698,22,4 325,377,4 689,395,2 131,49,3 762,628,5 839,134,5 847,163,5 502,24,4 353,305,5 619,968,4 647,839,1 609,271,4 124,110,3 285,201,4 732,286,3 832,440,1 324,494,3 804,32,5 456,161,5 248,251,2 765,130,3 714,26,3 662,350,2 124,394,3 702,409,4 505,392,3 127,684,3 675,901,4 837,731,4 331,8,4 647,927,4 641,376,3 17,962,5 720,174,5 594,303,3 549,293,3 709,181,4 473,140,4 757,248,4 647,755,2 664,124,4 129,425,4 323,457,4 693,467,4 651,281,4 294,70,5 436,477,5 822,47,5 806,0,4 359,198,5 546,312,5 307,658,3 117,557,5 515,193,4 307,1168,5 0,250,4 395,120,5 642,161,3 832,159,5 845,381,3 693,479,4 762,626,3 590,201,3 312,124,3 604,131,5 726,379,3 266,91,4 772,267,4 693,658,4 532,526,4 769,288,5 660,190,4 669,167,3 130,136,1 566,11,4 712,312,3 393,178,5 649,704,4 621,221,5 779,509,4 553,0,3 714,379,3 21,209,3 691,237,4 789,608,2 847,293,5 382,656,5 547,276,3 621,1180,4 697,143,2 91,209,4 673,684,3 150,955,4 797,256,4 804,215,2 539,299,3 653,1015,4 662,472,3 718,356,4 740,233,4 842,237,3 806,945,3 453,123,4 797,364,3 794,380,2 804,239,3 718,288,2 757,684,5 560,402,3 716,474,5 839,477,3 559,1264,3 781,986,3 373,38,4 585,214,4 842,97,3 289,179,1 513,153,4 310,649,3 721,290,4 836,219,4 607,693,3 206,95,3 331,30,4 101,410,2 827,56,3 550,204,5 9,628,4 505,327,4 763,175,4 703,606,4 523,637,2 665,247,3 733,603,4 47,528,4 715,3,2 821,1090,1 428,100,4 576,435,4 814,152,4 143,1012,1 124,755,4 824,6,5 791,475,1 192,121,1 84,520,3 586,307,3 222,983,3 748,293,2 473,583,5 730,661,3 487,303,4 710,221,3 600,257,5 787,434,3 183,487,5 386,88,5 814,527,5 845,59,4 118,88,4 829,232,3 532,470,4 682,61,4 486,248,1 567,186,3 540,70,5 614,207,4 550,808,5 380,293,5 795,272,2 324,133,4 785,69,4 564,212,4 762,194,4 392,16,1 773,520,2 343,477,4 89,96,5 455,31,4 795,780,4 532,186,4 665,1109,3 710,401,4 292,66,3 681,11,5 454,117,4 710,7,5 665,263,3 712,346,4 59,22,4 639,65,4 698,13,3 362,88,4 846,184,2 467,1015,3 647,221,5 441,153,4 631,182,4 748,167,5 837,747,3 618,549,5 829,99,5 680,257,1 805,171,3 82,242,3 404,528,1 108,654,3 384,939,3 659,175,3 822,143,5 773,648,3 404,860,1 415,135,5 464,650,3 781,1314,3 575,814,3 762,110,2 785,215,4 550,760,1 790,326,5 353,495,3 746,587,5 0,194,5 715,569,3 809,878,5 275,430,3 698,307,4 640,284,5 806,264,5 108,364,4 667,357,3 789,785,3 765,317,5 845,1517,2 726,183,3 499,237,4 760,257,4 723,881,1 845,89,2 17,80,3 628,285,4 566,639,4 767,287,4 536,651,3 624,404,3 199,242,3 607,10,5 829,510,5 563,291,4 726,153,3 748,377,5 525,306,2 559,1072,3 757,296,4 415,284,2 748,174,3 230,49,4 757,715,2 744,187,3 747,192,3 730,96,5 101,333,2 779,21,4 757,120,2 560,426,4 61,81,4 449,632,5 787,326,3 675,172,5 795,519,3 827,970,4 344,517,4 746,579,5 659,348,3 559,652,4 832,830,1 633,239,3 714,754,2 587,78,4 415,156,4 233,478,5 638,215,3 563,180,4 806,1090,3 803,395,3 555,492,5 718,236,2 497,921,5 791,290,2 681,233,3 803,992,2 552,22,5 150,169,5 681,577,3 770,3,1 822,474,5 587,1057,2 635,120,5 610,335,5 541,731,3 726,274,3 187,672,4 665,175,4 652,232,3 327,605,3 69,754,3 745,398,3 562,677,2 361,331,5 357,637,3 804,659,3 833,281,4 662,1244,4 505,9,2 477,466,5 773,740,1 539,221,4 587,1043,4 647,1109,3 220,258,4 641,403,3 806,176,4 542,113,4 584,18,3 405,9,3 830,180,5 755,137,2 765,210,4 427,321,4 239,750,3 762,506,4 654,661,2 333,336,4 847,1125,5 43,479,4 745,430,5 715,126,5 486,832,4 108,430,3 744,479,3 534,15,4 268,662,4 692,214,4 411,185,5 637,203,5 737,750,3 638,86,3 486,1409,5 456,581,5 750,590,1 591,241,5 297,418,5 759,161,3 300,218,4 101,475,3 451,460,4 84,588,3 833,292,3 193,71,3 832,204,4 548,236,4 177,747,4 827,920,4 797,402,4 561,503,4 434,70,3 224,509,5 820,179,5 400,311,3 757,190,5 804,385,3 517,828,3 834,156,4 730,7,2 748,181,3 698,120,3 665,518,4 618,575,4 789,0,3 312,146,4 539,292,4 794,199,3 760,287,4 560,493,4 768,257,3 794,432,4 778,221,4 772,1187,2 173,154,4 392,709,4 404,1580,1 215,301,5 681,172,4 693,613,4 28,11,5 444,1198,1 471,108,4 842,614,3 804,39,3 436,435,4 647,727,2 600,203,2 820,96,5 379,553,2 88,87,4 576,30,4 215,49,4 395,839,3 847,480,3 825,171,5 9,477,5 832,51,3 147,662,5 226,284,4 698,210,1 833,257,4 503,210,4 552,518,5 215,183,4 795,484,4 592,746,4 546,337,2 787,182,5 832,203,1 345,202,4 755,408,2 483,215,4 659,738,2 351,78,4 654,284,4 615,322,4 707,1060,3 215,65,2 90,500,2 782,334,3 406,238,4 756,240,3 646,402,4 714,68,4 756,201,4 459,223,4 833,99,4 483,236,3 777,194,4 621,100,5 584,712,4 665,435,3 93,657,3 41,490,3 91,234,3 696,281,4 146,897,5 331,326,5 723,537,2 503,1045,4 550,409,5 503,293,2 452,683,3 499,761,4 392,622,3 787,68,4 89,384,4 851,684,3 787,450,4 728,353,5 681,723,4 587,230,4 48,371,4 814,622,3 814,602,3 750,41,5 850,1008,2 757,577,4 781,990,2 647,482,5 200,854,4 55,1027,4 140,275,1 151,739,4 848,927,5 526,169,3 527,82,5 827,268,4 471,596,5 786,328,4 654,1374,3 416,372,3 483,28,3 672,271,5 341,1056,2 797,689,4 659,346,3 499,779,3 646,212,3 366,97,5 404,37,5 256,56,5 789,366,4 659,770,2 396,679,1 720,687,3 101,395,2 834,627,3 788,92,4 654,124,2 803,227,4 795,97,5 206,222,3 652,120,4 89,401,5 566,31,5 275,992,3 384,483,4 434,27,3 787,797,2 505,186,5 467,366,4 642,823,3 22,215,4 693,603,4 492,90,3 333,55,4 636,872,1 687,337,5 668,489,5 181,47,3 829,287,1 850,26,4 679,284,5 804,1231,3 633,921,4 648,146,4 839,464,4 424,123,2 377,622,3 523,194,2 120,297,2 670,146,1 405,151,2 312,503,5 275,517,4 338,403,4 839,165,5 762,282,4 836,224,3 25,251,3 177,606,3 167,743,5 620,1046,3 772,44,4 300,3,4 12,196,4 804,146,5 302,318,5 341,215,5 803,1177,3 814,130,2 118,69,3 594,1164,1 193,172,5 424,975,1 820,274,5 290,2,3 837,180,5 42,221,4 692,97,4 449,86,5 757,99,5 95,182,4 404,287,5 836,273,4 391,259,1 698,819,2 418,78,4 471,93,5 86,534,4 76,95,3 793,513,5 59,655,4 605,102,3 441,203,3 637,430,4 417,332,5 622,126,4 787,698,3 55,236,5 693,1034,4 658,495,5 550,602,5 781,871,2 527,293,3 638,56,3 715,47,5 534,44,3 679,194,4 221,153,3 483,149,4 846,825,3 832,126,5 565,69,4 641,230,3 769,281,5 52,155,4 559,126,5 795,356,4 757,220,3 267,289,3 617,814,4 797,831,4 754,342,3 317,39,4 726,683,4 644,482,5 304,165,4 150,193,4 322,294,3 654,1405,3 795,120,5 804,147,2 544,587,4 781,351,1 19,142,3 406,126,3 556,252,3 604,293,4 285,381,5 660,750,4 561,3,1 587,741,4 830,332,4 729,116,3 812,288,4 698,716,1 829,224,3 401,275,5 772,768,1 350,881,5 108,167,3 838,259,2 832,853,4 638,957,4 795,416,4 734,125,3 369,51,4 706,96,4 497,663,5 670,116,3 757,1158,5 748,116,4 750,290,3 498,190,5 342,52,5 766,140,4 681,16,3 212,169,5 732,15,3 93,463,5 642,1011,4 423,150,2 587,366,5 488,318,3 797,366,3 843,194,3 702,470,4 653,274,5 766,171,5 496,1029,1 737,232,3 491,1146,1 688,14,5 838,256,3 311,1297,5 166,240,5 190,344,4 647,431,5 853,185,3 550,762,5 245,569,1 424,173,3 755,398,2 333,606,3 636,290,4 847,68,2 599,230,3 599,229,4 655,300,3 682,311,3 17,528,5 837,317,5 757,25,4 615,285,5 626,61,4 270,492,4 485,1046,2 585,203,3 108,563,3 454,297,4 386,100,4 193,464,3 837,71,4 86,807,3 662,332,5 252,426,5 644,215,4 790,327,4 536,729,3 505,575,4 667,299,4 825,49,5 839,1064,5 838,243,3 693,175,5 286,814,3 659,271,4 847,31,5 804,136,5 814,450,3 787,539,3 845,565,5 536,1009,2 68,116,4 379,207,2 649,1059,3 849,203,5 486,1424,4 756,1187,3 814,404,4 805,196,4 805,203,5 572,181,4 698,146,2 391,492,4 654,1272,2 658,366,3 649,1038,3 457,189,4 404,1583,1 366,4,4 450,1279,1 501,269,2 285,1050,4 762,126,4 483,78,5 673,322,3 654,257,2 744,97,5 647,39,4 71,190,5 703,258,2 489,409,4 499,386,2 746,477,4 665,503,4 853,978,4 789,1118,4 124,20,3 4,364,1 835,879,4 12,454,3 451,855,4 408,326,2 64,510,4 366,163,4 307,518,4 360,227,4 396,285,4 404,854,1 748,131,4 405,85,4 842,443,2 232,132,5 469,180,4 636,292,3 605,490,4 748,658,5 36,832,4 233,950,1 714,654,4 792,149,4 377,274,5 706,99,5 745,78,5 157,448,2 54,120,3 485,1010,4 834,653,5 544,233,3 647,929,3 817,911,3 714,590,4 781,1393,4 221,139,1 298,395,4 605,137,3 773,21,2 626,160,2 828,317,5 647,412,2 757,482,5 458,288,4 551,814,3 850,1244,4 322,92,4 654,530,4 404,746,1 710,256,3 290,771,4 838,824,4 458,244,3 804,31,4 206,116,3 737,70,3 681,200,4 386,297,3 619,408,4 669,510,4 229,210,5 636,1010,1 457,1066,5 311,508,5 12,306,2 12,226,5 652,138,2 621,1073,2 628,689,2 783,327,3 853,474,4 496,290,3 20,443,3 292,244,3 813,558,3 568,320,4 404,1193,1 382,131,5 803,202,4 467,169,4 781,1243,3 600,68,3 848,142,5 285,954,5 857,330,3 846,287,4 847,528,5 455,545,4 638,614,5 748,433,4 832,378,2 750,380,1 377,254,4 572,506,5 795,545,4 502,0,5 91,306,4 850,986,1 48,160,1 398,101,3 765,558,4 748,615,3 688,297,4 392,121,1 590,654,4 785,81,4 731,937,1 806,449,4 200,653,3 836,534,1 654,1212,2 88,221,5 793,220,4 591,880,1 542,729,3 693,238,4 541,584,2 116,1164,3 804,11,4 653,217,2 483,228,5 759,124,4 654,812,3 503,138,3 143,126,4 129,1279,4 202,1048,2 189,545,3 451,57,3 746,500,5 339,179,3 659,221,2 683,117,4 853,498,4 696,472,5 592,370,3 715,471,3 431,247,4 319,146,4 750,754,4 392,279,4 404,443,3 708,128,2 805,143,5 685,126,5 196,3,3 439,241,5 785,519,4 781,325,5 812,334,2 520,185,4 716,105,4 606,212,4 825,91,4 180,1198,1 221,476,2 505,210,4 55,420,4 605,684,3 86,1027,4 852,1024,4 387,590,4 434,678,3 327,226,3 620,120,3 737,229,4 787,711,3 91,1041,3 292,274,3 697,504,2 662,68,4 244,595,4 721,306,4 617,68,4 121,1112,5 542,191,4 581,368,1 681,55,4 453,278,4 639,232,4 709,186,5 143,146,3 377,391,3 532,407,4 850,692,5 726,506,2 767,110,3 166,830,3 751,288,1 824,987,3 845,257,3 696,927,3 199,93,4 111,890,3 781,1643,2 641,365,4 377,707,4 733,82,4 724,299,4 800,258,3 653,237,4 803,661,4 842,447,4 707,865,5 673,303,3 757,481,5 632,332,3 765,481,3 658,175,4 108,575,3 435,216,4 300,231,4 824,840,4 559,844,3 850,10,5 30,152,4 803,156,4 801,444,3 538,1210,3 843,89,3 689,222,4 849,207,5 747,178,4 825,181,4 223,379,4 720,287,3 750,167,5 757,763,1 694,304,3 814,495,5 854,282,3 779,299,3 293,253,3 188,180,3 839,1265,5 421,218,4 747,187,4 751,321,1 616,674,4 639,21,4 596,322,3 781,749,4 108,714,2 829,193,4 839,251,4 804,174,5 745,49,5 532,76,4 483,384,4 806,704,4 649,78,3 591,280,4 704,285,3 145,344,4 572,257,4 422,285,4 416,1089,3 523,1092,4 276,257,4 464,6,5 399,268,4 591,1128,5 30,302,3 7,402,4 302,476,3 834,130,5 816,357,4 746,3,4 832,652,4 853,86,4 423,114,1 233,357,1 823,677,3 775,275,4 699,167,3 302,229,3 785,160,4 6,179,5 654,370,3 94,780,2 380,177,4 214,432,3 746,301,5 787,81,3 845,204,5 693,355,4 795,281,4 781,1608,1 704,1227,2 638,356,3 469,275,5 805,156,3 614,735,5 292,149,3 530,258,1 565,164,5 789,747,1 641,224,4 631,90,3 628,97,5 784,1049,3 619,950,3 716,245,5 664,292,4 532,475,2 827,324,2 486,57,5 590,210,4 638,11,3 605,844,4 276,761,3 652,745,5 834,49,4 377,950,3 377,960,3 786,306,4 757,685,3 183,812,4 159,60,4 634,326,5 746,692,5 157,561,4 851,472,3 705,330,5 803,94,2 737,602,5 708,567,4 614,422,5 765,51,4 851,407,5 842,635,4 623,249,4 752,509,4 626,696,5 826,267,4 292,315,3 756,143,4 665,631,4 58,686,1 795,730,3 116,409,3 714,287,4 188,951,5 676,221,4 733,581,2 710,192,4 344,123,5 706,64,4 746,1203,4 121,402,4 832,153,5 591,331,3 229,99,4 678,240,3 826,312,3 631,232,3 797,355,3 244,209,3 641,382,5 848,171,5 709,222,4 774,269,2 492,123,3 488,331,5 839,427,4 757,433,3 579,257,5 681,153,5 398,56,4 375,356,4 703,171,2 796,327,2 681,253,2 475,207,5 605,434,4 726,679,3 640,241,5 844,310,4 787,124,3 325,3,1 688,747,5 852,301,4 12,506,1 602,418,2 462,242,1 794,770,3 787,743,4 157,174,4 773,202,2 200,1400,2 620,939,3 442,312,4 505,567,5 577,249,2 737,384,5 471,809,5 714,110,3 307,419,4 838,280,3 850,271,5 464,7,4 647,454,3 706,955,5 499,300,2 253,166,3 840,891,3 662,133,5 795,1039,3 505,769,3 858,110,4 806,494,4 861,49,5 790,49,5 311,609,5 756,1034,2 845,35,2 237,404,4 359,55,4 654,745,3 196,181,3 0,152,3 683,595,3 825,160,3 435,42,2 338,135,5 585,294,3 681,217,3 855,878,3 732,223,4 545,180,5 557,14,3 529,203,4 473,942,4 726,108,2 812,537,3 772,1186,3 723,325,4 647,167,5 485,0,4 762,68,4 737,173,5 824,297,5 390,470,2 25,677,2 819,537,3 416,78,3 291,49,4 708,451,3 862,753,3 731,323,2 748,244,4 562,69,4 783,291,4 652,653,2 279,227,3 41,659,3 705,6,3 178,913,5 331,355,3 38,271,2 814,101,3 605,92,4 307,181,5 356,406,3 641,725,2 547,897,1 326,506,4 698,404,3 531,780,5 843,422,3 417,749,2 789,229,4 535,403,4 621,395,1 771,331,4 618,16,1 851,322,3 706,601,4 657,487,4 48,1074,2 755,2,1 665,254,4 689,236,4 652,596,4 436,628,3 765,94,3 372,432,3 792,108,4 767,331,4 200,211,4 421,446,4 714,231,4 834,609,5 271,207,4 428,846,3 542,662,4 688,120,5 660,193,5 551,404,3 689,233,4 642,210,4 775,568,3 534,78,3 570,193,3 542,396,3 632,1045,4 785,239,1 850,254,3 411,210,4 693,236,4 845,135,3 606,211,3 652,192,4 832,613,2 380,461,4 835,506,4 850,543,4 553,683,4 721,677,3 279,110,4 822,232,4 339,14,5 390,147,3 665,615,3 498,152,4 362,185,3 404,59,1 454,659,4 649,68,2 772,170,5 581,474,5 746,928,3 4,188,5 845,1410,4 701,1126,2 475,55,4 689,232,3 267,200,3 392,183,4 103,249,3 631,0,3 706,646,5 493,85,3 82,819,2 638,650,4 781,531,2 388,587,5 793,556,4 435,1226,2 586,322,4 81,168,4 279,384,5 506,690,5 451,126,5 566,473,5 404,726,1 390,478,4 801,1024,3 354,328,3 398,400,3 642,1138,3 117,257,5 317,264,4 101,264,3 845,402,3 243,1149,4 803,674,3 853,125,3 389,12,2 536,473,5 794,741,2 774,268,4 157,809,4 547,357,2 797,464,4 765,674,3 367,776,2 300,403,3 497,82,3 436,431,3 829,21,5 685,173,4 58,632,3 486,587,5 444,643,3 837,496,5 11,227,4 446,1325,4 505,496,5 13,172,4 829,176,4 507,233,4 584,1557,5 832,1213,4 377,1052,3 458,285,4 642,403,4 787,9,4 581,292,5 617,525,5 307,566,4 636,933,1 432,292,3 804,602,4 675,63,5 711,59,1 572,346,4 454,590,4 713,14,3 653,96,3 605,11,2 298,748,1 397,428,4 724,99,5 690,477,4 605,654,4 416,809,3 215,762,4 787,52,1 236,175,3 748,840,3 845,735,4 803,0,5 715,281,3 849,489,5 198,407,5 388,944,4 604,356,5 396,209,4 402,99,5 664,108,4 732,321,2 229,167,4 434,320,3 344,64,4 777,208,4 390,627,4 497,123,3 789,571,3 842,144,3 478,147,2 302,270,2 765,673,3 832,21,3 449,1281,3 488,267,2 12,227,4 176,641,4 377,434,4 215,672,4 834,1152,4 781,1378,3 859,3,4 560,672,3 270,480,3 622,120,4 550,176,5 755,1273,2 406,755,2 84,49,5 434,925,3 781,514,3 552,1008,4 411,149,4 744,180,2 742,878,4 797,221,3 115,314,3 435,98,3 824,863,3 566,22,4 748,230,4 486,70,3 471,385,5 267,177,4 715,525,5 829,401,4 359,296,4 568,257,5 847,151,5 745,23,4 641,291,2 200,45,4 378,140,4 48,724,2 711,25,2 357,64,4 743,237,4 495,146,3 507,90,4 740,274,4 775,21,5 822,273,4 693,377,3 591,251,3 565,683,4 746,1066,2 541,70,3 21,88,5 662,259,2 805,23,3 499,471,3 641,65,5 483,173,5 786,258,4 498,116,3 626,69,4 658,577,3 773,257,1 715,27,5 666,191,5 647,144,4 77,24,3 662,287,4 327,317,5 829,94,3 624,167,3 495,468,3 584,17,2 531,721,3 523,497,5 763,1220,4 553,12,2 825,683,3 842,194,4 765,950,3 653,734,4 844,902,4 831,333,2 58,661,3 388,87,3 853,408,2 607,417,1 598,275,2 533,476,3 682,132,5 605,470,4 60,268,3 772,227,3 471,173,5 127,604,3 292,468,4 449,167,5 406,157,2 681,771,4 639,179,5 850,337,3 706,899,4 611,274,5 487,706,2 654,763,1 681,120,4 637,175,3 726,264,4 220,54,4 594,545,4 550,91,5 607,741,4 836,18,4 528,321,4 226,248,2 664,8,4 605,23,5 794,239,2 466,1225,4 285,430,5 681,46,1 777,120,3 587,160,4 444,1080,1 392,1015,5 642,738,3 626,801,2 750,268,5 372,212,4 339,501,2 404,544,1 560,479,4 773,134,3 852,330,2 787,217,4 615,302,4 726,49,4 824,49,4 592,105,2 245,723,4 852,244,3 20,993,2 855,677,3 862,263,3 435,326,5 404,776,1 391,49,5 718,117,2 845,1285,4 641,870,3 282,411,5 827,752,4 708,126,5 591,938,3 724,357,3 765,492,4 840,330,5 842,182,5 93,503,5 552,55,4 654,336,2 659,98,2 285,277,5 296,227,2 301,288,3 560,214,3 845,602,5 665,855,5 855,257,4 754,301,4 795,325,4 658,130,4 467,96,5 756,192,4 846,404,3 781,1668,2 806,611,5 312,624,4 695,312,3 624,22,4 523,413,4 617,22,5 773,848,1 150,221,5 758,297,4 177,303,4 797,141,3 838,275,3 485,326,3 714,178,4 825,1218,4 605,128,3 814,464,5 424,342,3 343,25,3 307,670,4 717,741,5 513,12,3 747,175,5 380,133,5 80,1058,3 576,467,3 471,741,5 654,854,3 806,21,5 850,563,3 620,577,5 649,501,3 765,271,4 773,435,2 840,747,4 803,567,4 379,650,3 654,1146,3 633,990,3 726,377,3 853,121,3 856,293,3 307,68,2 710,419,5 436,236,4 726,385,2 281,318,4 433,150,5 652,940,1 849,201,4 449,629,3 845,673,4 143,13,4 863,70,3 764,24,4 681,565,3 64,530,4 248,326,4 660,683,3 624,497,4 196,1419,1 756,128,3 544,1227,3 345,1216,4 803,521,3 624,143,4 822,658,4 837,168,4 84,237,2 803,242,3 709,191,5 44,6,3 862,300,4 471,442,4 717,545,4 750,385,3 654,310,3 693,583,4 446,1015,3 310,644,5 659,37,2 166,614,5 517,120,5 759,49,3 618,52,2 797,661,3 762,417,4 721,457,4 649,635,3 762,97,4 683,201,4 795,392,4 668,11,5 689,119,1 624,654,3 600,209,4 83,822,3 652,574,1 416,795,4 787,75,3 446,545,2 805,149,4 226,404,2 297,819,4 863,407,5 681,383,2 704,283,3 750,747,2 542,93,3 182,93,3 238,646,5 710,142,5 863,52,4 864,1027,1 647,367,2 832,156,2 752,498,3 757,528,4 550,778,4 428,276,4 777,737,1 806,1482,4 505,683,5 296,155,4 845,584,2 605,306,4 764,241,5 840,287,3 479,293,1 342,153,5 647,443,3 543,330,3 357,173,1 846,237,2 861,406,3 671,814,4 797,269,4 711,622,4 187,317,5 682,681,1 386,525,4 356,686,3 804,357,3 15,86,4 747,356,3 785,233,3 837,310,4 402,117,5 794,183,4 278,549,4 434,780,3 692,605,4 846,1171,1 325,848,1 787,1134,2 810,303,5 534,701,1 847,583,3 853,1013,3 124,1269,3 713,299,5 756,422,3 774,271,4 863,641,3 331,239,4 825,187,4 777,203,4 806,1137,5 824,831,3 845,671,4 536,337,1 696,1159,1 285,746,4 797,1516,4 702,236,5 660,208,4 742,221,4 138,241,3 566,1020,4 751,310,3 291,196,5 647,14,1 362,3,5 474,126,4 278,863,5 662,741,4 539,474,4 369,192,4 233,99,4 828,249,3 124,48,3 597,342,2 734,326,3 668,257,2 297,131,5 416,500,3 488,537,4 715,411,2 647,8,1 523,418,1 457,282,5 789,190,3 536,211,3 726,231,3 16,110,3 293,267,4 816,146,3 267,422,2 769,324,4 766,175,3 292,527,4 253,141,3 434,368,1 534,510,3 803,183,5 254,545,3 620,269,4 418,49,5 784,300,4 514,1429,3 617,97,5 275,442,4 772,99,4 405,417,5 833,761,4 832,448,2 644,426,5 696,299,5 336,49,5 604,152,4 57,312,5 649,643,3 644,133,5 415,698,5 774,330,4 320,650,3 756,23,4 215,355,3 534,212,5 20,974,3 287,527,4 285,21,4 473,130,4 513,210,3 850,759,4 711,403,3 233,124,3 541,314,4 333,930,1 730,507,1 785,182,4 49,123,1 757,1024,3 740,24,3 771,303,4 726,774,4 274,447,3 307,193,5 681,385,2 794,1109,3 867,203,2 150,167,5 102,117,3 675,257,2 716,281,5 197,822,2 41,1045,3 785,142,4 642,91,4 292,662,3 577,323,1 436,449,3 157,510,5 801,55,3 776,156,3 717,878,2 0,100,2 853,272,4 834,608,4 783,285,3 211,317,5 93,416,3 633,740,3 829,632,4 180,221,4 344,1100,4 457,249,1 621,401,3 121,82,5 477,446,4 426,327,4 842,634,2 732,458,4 184,285,4 710,1159,5 300,317,5 457,297,5 831,325,4 797,305,3 435,339,5 707,361,1 433,282,3 111,244,4 667,751,4 641,469,4 850,526,5 587,1468,3 838,474,5 740,201,3 327,519,5 311,672,5 378,501,5 307,652,5 665,581,4 531,832,4 559,180,4 572,21,4 79,85,5 866,659,4 437,814,5 775,660,5 685,134,5 584,739,4 486,1073,1 770,872,3 795,426,4 541,120,2 850,171,5 531,190,5 851,117,4 183,133,5 578,193,5 473,509,4 837,479,4 804,160,1 654,581,2 711,780,4 657,1100,4 781,1510,2 827,1267,2 84,356,4 649,180,4 312,488,4 649,0,3 408,529,4 546,302,3 795,55,5 715,704,5 434,107,1 814,30,4 313,71,2 599,575,3 716,357,2 506,120,5 755,324,3 109,1181,2 846,762,1 618,294,4 832,127,3 547,12,1 715,426,5 267,587,3 866,195,3 200,172,3 654,881,3 664,214,2 783,270,3 456,708,5 774,342,4 626,50,5 785,380,3 757,440,3 708,293,3 681,658,1 502,706,5 333,216,2 494,420,1 681,990,2 844,876,2 69,62,3 481,880,3 552,196,5 698,1067,3 663,70,4 681,719,4 789,90,3 195,761,3 789,385,2 863,49,5 797,1048,3 747,49,5 511,272,5 853,280,3 626,11,4 553,1041,3 710,215,4 592,1011,3 845,518,4 657,168,5 785,131,5 470,224,5 654,511,3 824,507,4 416,667,2 600,1539,2 397,172,4 824,1014,2 866,479,5 550,801,4 702,221,4 534,318,5 649,227,4 469,8,5 726,657,5 696,180,4 779,171,5 711,1468,4 845,650,3 858,24,4 860,169,5 333,153,4 830,0,4 804,222,5 624,602,4 631,683,5 628,3,3 693,227,4 384,1021,3 809,325,5 93,68,3 333,170,4 43,446,4 394,215,3 220,120,2 547,155,5 866,22,5 678,195,4 829,226,3 404,364,1 390,47,4 248,1,3 797,28,4 547,594,4 864,927,1 692,187,2 652,96,3 245,1038,4 704,549,2 453,113,3 653,0,4 781,1037,4 201,194,4 842,151,2 404,201,4 773,356,2 845,600,5 266,202,5 318,681,3 644,659,3 288,741,4 730,944,4 178,901,1 867,1030,1 609,209,3 299,1093,5 571,123,5 785,190,4 433,627,1 789,707,3 710,567,3 789,863,4 275,999,2 224,602,5 720,324,3 362,119,1 266,238,4 333,1225,4 275,782,1 245,173,3 587,50,4 824,740,4 118,258,4 706,901,5 597,311,5 769,287,4 433,286,5 696,24,3 842,158,2 505,167,5 764,285,5 216,384,2 150,207,4 845,647,5 609,182,4 377,76,4 496,194,4 787,982,3 529,194,3 715,487,4 766,505,5 473,525,5 220,1436,3 658,523,4 703,656,4 9,58,4 847,614,5 454,288,3 642,267,4 804,153,5 832,927,2 522,1013,5 329,201,5 642,482,4 372,940,4 268,180,2 417,326,1 116,264,4 499,68,4 388,41,4 853,924,2 808,327,5 354,305,4 406,97,5 787,120,4 395,596,4 795,614,4 561,434,4 803,402,3 337,522,3 700,49,5 109,11,4 544,545,3 797,939,1 275,819,3 238,204,3 638,82,4 649,199,4 342,233,1 767,825,1 587,442,3 64,355,5 785,14,3 681,575,4 647,930,2 706,497,3 126,227,5 856,320,4 689,167,3 394,738,3 831,470,4 716,404,3 772,13,5 756,332,4 749,880,2 863,272,5 590,427,4 617,282,3 17,955,5 839,97,5 795,380,3 781,314,4 560,70,2 157,231,3 623,294,3 839,497,5 685,180,4 591,193,4 536,463,4 781,1257,2 861,209,4 503,772,3 743,49,3 531,1015,4 574,320,3 404,857,1 678,69,4 268,450,1 556,531,5 180,331,2 785,464,4 839,662,4 264,106,1 654,1197,3 822,403,4 319,239,3 536,212,4 794,830,2 497,1072,3 707,282,1 845,192,5 836,627,3 842,402,2 716,339,4 840,269,4 869,516,2 804,1169,5 803,208,3 492,0,3 765,430,3 404,722,1 534,63,5 628,11,5 304,485,5 842,448,3 47,523,3 591,596,2 292,96,4 275,446,4 570,656,4 298,165,4 180,1379,1 827,1072,4 814,95,5 364,0,4 504,603,5 862,1293,4 633,476,3 819,288,2 824,1290,2 206,254,3 842,226,3 781,1385,3 681,958,4 842,204,4 795,602,4 471,32,5 596,110,3 737,510,4 670,30,2 654,844,2 738,55,4 773,253,1 233,1020,4 668,171,3 721,12,2 765,208,3 235,654,3 596,23,3 756,178,4 859,288,3 714,1046,3 771,312,5 659,167,5 526,210,4 787,369,2 838,1663,1 371,43,4 748,754,4 576,306,3 641,764,3 658,203,4 449,1302,4 439,271,5 842,150,2 823,285,2 685,11,5 184,700,3 307,130,4 587,172,5 323,269,5 816,14,3 232,120,4 782,327,4 255,553,4 233,444,2 540,675,3 578,581,4 585,120,5 839,233,5 108,221,4 857,285,4 488,750,5 777,495,1 706,223,4 850,404,5 827,300,2 599,194,4 822,228,3 829,695,2 261,417,3 498,529,4 267,258,3 526,285,2 86,26,4 681,88,4 548,180,4 392,138,4 633,1334,2 44,150,2 681,1089,2 196,183,1 494,143,4 300,322,4 867,397,1 770,541,4 270,39,1 711,954,2 488,1612,4 37,500,5 845,422,4 298,164,4 129,1275,4 849,55,1 103,126,3 496,96,4 748,182,5 312,308,4 697,282,2 828,338,2 63,750,2 48,327,2 817,1104,1 461,135,4 847,479,5 144,889,2 541,522,4 760,221,4 275,6,5 114,740,3 870,244,3 415,508,5 755,420,4 795,973,3 372,193,4 436,291,5 654,295,4 531,309,4 410,221,3 760,1013,1 756,204,4 280,341,1 21,227,4 847,206,5 765,503,3 664,1060,4 1,126,5 824,471,5 845,482,5 5,297,3 127,450,4 393,215,3 714,742,2 10,53,3 652,379,3 748,175,4 270,590,4 494,201,4 726,158,2 292,284,5 471,561,5 452,47,4 587,940,5 606,310,4 471,259,4 853,78,4 795,216,4 61,707,3 824,923,2 861,504,4 341,319,5 804,422,1 531,482,5 302,772,4 568,1196,4 837,222,3 356,321,3 561,143,5 829,650,4 444,688,1 845,795,1 720,401,4 289,595,4 839,736,4 829,182,4 830,1062,4 497,228,2 806,678,4 630,306,4 278,43,1 22,152,4 782,344,4 388,300,4 58,289,3 654,327,2 467,179,5 416,163,3 861,126,5 683,394,2 707,273,4 822,567,3 785,198,4 384,1016,3 849,565,5 673,314,3 214,87,3 459,278,2 105,565,4 810,293,4 693,488,4 682,910,3 174,272,2 850,239,4 595,322,4 268,463,3 540,37,3 709,0,4 268,120,1 708,180,4 647,545,4 586,312,5 78,339,4 377,12,3 88,235,5 765,1202,3 607,206,5 220,650,4 869,951,3 331,1041,4 853,254,1 853,290,2 264,281,5 553,377,4 654,1070,2 797,575,3 404,1386,2 772,203,3 863,167,4 772,46,4 195,172,2 845,1109,3 846,433,3 851,256,4 763,587,5 874,771,5 782,259,4 804,90,5 866,210,3 843,178,3 499,299,4 532,47,4 17,602,3 23,131,3 591,729,4 746,508,5 502,743,2 415,417,4 559,1020,4 566,646,5 822,332,3 663,716,1 654,281,3 659,99,3 140,254,4 706,1310,3 774,306,4 348,275,5 136,1027,5 604,142,1 5,534,2 31,297,5 641,95,5 660,188,4 726,402,4 342,188,4 660,214,3 820,180,4 647,21,4 726,155,4 804,581,3 253,61,3 48,918,5 604,125,5 709,653,4 302,832,2 103,271,4 496,166,2 861,356,3 491,192,4 837,1114,4 129,738,5 698,110,3 591,131,5 552,152,5 479,99,4 788,1160,3 414,530,5 681,256,2 730,55,2 324,522,3 462,136,2 767,251,3 693,527,3 723,357,1 663,587,3 709,268,3 649,186,2 90,650,5 845,1054,3 693,229,4 773,3,2 793,23,5 415,552,4 564,461,4 693,495,4 796,780,5 710,190,5 20,259,2 343,310,4 682,914,2 830,272,3 708,144,3 520,297,3 806,126,3 845,442,4 605,195,4 519,310,3 538,371,2 795,814,4 159,1133,4 875,293,4 475,293,3 333,895,5 704,117,4 428,482,5 732,274,3 452,195,4 752,461,4 600,287,1 679,285,4 654,402,2 442,293,5 715,477,4 125,349,2 757,201,5 341,187,3 581,150,4 686,287,4 322,992,4 820,173,5 726,865,3 871,331,3 159,149,4 828,1017,2 233,44,4 471,214,4 263,185,5 626,204,5 279,71,4 679,514,4 713,762,4 199,741,4 466,1011,3 842,412,2 31,267,5 689,1041,4 828,511,4 575,474,1 653,194,4 756,683,4 765,230,2 757,553,3 150,301,3 578,203,3 533,104,4 275,147,3 441,54,3 830,339,4 206,21,3 843,70,3 803,677,4 787,203,3 563,929,3 642,49,4 425,652,4 302,1033,1 845,554,2 790,285,3 245,1134,1 565,218,1 557,285,4 863,707,3 667,123,3 842,415,2 520,741,3 346,368,4 659,28,2 658,57,4 734,99,2 449,331,4 717,981,4 573,261,5 473,312,4 619,768,4 804,273,2 384,216,2 654,180,3 874,168,5 794,755,3 746,281,2 278,21,1 844,749,3 623,247,4 535,163,4 683,150,3 278,1177,4 803,258,4 865,318,4 834,215,4 261,366,4 833,274,3 852,747,2 406,507,4 737,143,5 678,183,4 200,92,5 693,448,4 278,256,5 607,282,4 662,709,3 451,131,2 626,175,5 5,131,5 691,1053,3 259,349,4 526,196,4 765,506,3 681,254,3 693,428,4 496,1076,4 644,672,3 61,1129,4 333,178,4 853,317,5 715,650,5 867,737,2 774,299,4 233,115,2 428,202,5 726,782,3 649,508,3 428,728,2 275,256,4 284,193,4 766,656,4 1,284,5 827,1195,2 396,389,3 811,1392,3 642,469,4 861,287,5 297,478,5 795,248,1 832,207,3 477,366,4 867,561,2 706,1250,4 822,1106,3 827,873,3 505,89,2 737,402,3 776,116,5 870,514,4 665,431,3 172,326,5 542,28,2 630,337,2 629,6,4 443,257,3 469,285,4 709,203,4 708,160,5 307,131,3 392,495,5 678,483,4 863,207,4 188,1314,3 867,205,5 559,131,3 652,861,2 651,537,4 212,507,4 386,1077,1 103,894,2 408,215,4 863,1445,3 296,91,3 770,70,5 859,282,4 691,844,3 544,201,4 789,210,4 456,90,4 641,164,4 804,82,4 289,88,3 392,205,3 836,595,3 793,846,5 863,565,4 652,221,3 400,180,3 101,116,3 822,21,5 853,286,3 788,285,1 824,741,4 629,596,4 4,445,4 623,267,4 832,190,4 417,327,1 396,194,3 409,904,4 780,63,4 802,689,4 781,1382,3 797,51,3 485,234,2 740,47,4 537,222,4 797,70,3 799,863,4 649,429,4 457,508,4 607,605,5 390,8,5 803,104,3 765,191,4 756,37,3 536,1474,2 229,134,2 5,461,5 12,747,4 401,627,3 462,252,5 710,172,3 262,464,4 706,418,3 797,117,4 654,604,3 817,285,4 473,57,4 492,461,2 233,377,4 424,81,3 822,90,3 453,491,3 679,24,4 750,193,5 747,249,5 697,967,1 700,271,5 707,283,5 617,470,3 802,886,5 147,189,2 415,1427,3 379,355,2 757,731,4 765,167,5 342,567,1 817,327,4 91,256,2 803,738,4 641,553,4 72,682,2 820,117,3 744,530,3 263,97,5 711,242,4 653,209,5 93,958,5 279,583,4 810,747,3 254,293,2 633,282,2 212,217,4 536,96,2 698,1374,3 428,162,4 869,480,4 861,214,4 876,381,3 22,170,5 567,1285,4 647,677,3 752,49,4 852,325,2 842,264,3 607,507,4 452,183,4 642,513,3 867,1479,1 0,167,5 681,454,4 706,7,5 799,456,2 611,0,4 803,124,4 647,178,4 541,958,3 319,26,3 636,740,1 708,726,2 860,293,3 380,854,3 795,290,4 522,507,3 762,468,4 487,497,3 745,577,4 762,161,4 362,718,3 746,108,5 726,1,4 86,691,5 298,516,4 785,282,4 830,271,5 710,216,4 843,626,3 798,498,4 210,595,3 740,287,4 856,897,5 773,233,2 737,201,4 58,14,5 845,32,5 494,172,5 488,947,2 499,767,2 617,150,3 691,248,3 292,692,4 471,357,5 806,500,3 12,370,3 813,673,3 665,199,5 98,115,2 449,2,4 294,734,5 842,216,4 94,50,4 576,398,4 720,172,5 839,495,5 665,283,3 795,312,4 302,478,5 746,595,5 750,431,4 404,1034,1 605,734,5 416,124,5 492,167,5 842,602,2 95,264,5 127,130,5 641,12,4 872,291,5 326,256,2 609,0,4 746,300,1 564,164,4 850,67,3 69,228,3 730,477,4 386,1109,2 711,377,4 791,1014,5 495,742,2 859,714,4 585,808,3 492,181,5 386,568,2 157,1046,4 575,124,4 377,172,5 842,654,3 467,0,5 267,78,3 785,587,5 507,317,4 781,1024,2 325,448,3 547,342,4 122,181,4 822,373,1 778,242,4 435,203,5 793,750,3 746,264,4 820,63,5 295,0,5 592,275,1 16,474,4 826,330,3 730,131,3 591,1183,5 660,299,3 392,180,4 495,287,2 850,110,3 861,192,4 781,1662,2 814,198,4 496,392,4 783,311,3 627,241,5 293,1013,2 786,346,4 816,280,4 116,10,5 842,207,3 832,150,4 341,653,4 536,969,3 382,179,5 591,468,4 842,287,4 592,156,3 863,692,4 206,10,3 661,284,5 175,918,2 839,168,5 377,316,5 505,429,4 722,0,3 752,198,5 306,504,3 654,859,3 707,236,5 167,0,5 58,465,4 658,3,3 304,862,4 388,476,4 805,922,3 839,627,4 827,922,3 290,630,5 468,64,4 233,646,3 144,54,3 730,479,4 640,304,5 560,951,3 529,99,4 715,491,3 534,214,4 125,352,5 693,518,4 167,762,2 608,258,1 726,177,4 683,434,3 607,1261,5 531,225,4 542,197,4 391,180,5 143,247,4 797,154,3 451,471,5 757,507,4 861,167,4 756,127,3 771,326,4 392,57,3 866,134,5 861,1010,5 536,25,3 715,609,4 825,183,3 740,214,4 790,268,4 752,193,4 803,924,4 676,747,4 794,264,3 849,7,5 628,330,3 485,263,3 652,509,2 532,823,1 117,171,5 15,99,5 748,464,4 636,921,1 709,719,3 697,464,3 747,698,3 90,204,5 863,226,4 547,345,4 261,64,4 615,345,3 42,121,2 544,203,4 449,716,4 795,806,2 676,244,5 290,157,2 775,134,4 851,150,4 298,1257,2 586,677,2 522,94,4 757,604,3 845,216,4 879,385,3 652,356,4 621,99,5 560,425,1 822,70,3 465,287,4 12,322,3 310,569,4 195,1021,4 324,529,4 568,299,3 795,314,5 416,161,3 850,679,3 877,282,3 129,187,4 756,325,3 649,660,3 624,285,4 739,299,4 48,208,5 91,49,5 6,8,5 850,362,4 664,684,2 795,217,3 652,385,1 568,283,4 560,483,4 253,440,3 420,878,4 827,346,1 352,300,3 514,288,1 708,549,3 814,249,1 289,691,5 683,364,4 250,6,3 706,487,4 607,125,1 194,747,2 527,540,3 832,49,2 576,142,3 129,0,5 193,210,4 183,55,3 853,31,4 729,814,3 177,221,4 384,511,5 628,327,3 839,190,4 713,404,5 621,865,2 112,256,5 605,536,2 434,475,3 789,1281,5 378,256,4 388,500,5 791,6,4 869,641,4 781,296,3 689,210,3 231,3,4 392,372,4 879,823,4 50,49,5 660,143,5 294,226,4 472,241,3 876,30,4 832,202,5 762,96,3 531,106,5 709,88,4 206,157,3 827,729,3 647,562,5 746,81,4 172,258,3 840,352,1 449,556,5 642,128,5 111,345,5 804,471,2 436,1205,4 12,381,1 377,565,3 710,57,4 794,1051,3 523,131,4 17,381,3 573,882,4 497,655,3 746,49,5 868,411,5 879,306,4 701,228,4 845,432,4 654,255,3 832,327,2 270,312,4 745,229,1 847,203,5 551,514,3 641,422,3 544,404,4 235,87,2 594,543,3 82,239,1 867,567,1 28,311,4 654,772,3 814,214,5 415,624,5 626,520,2 478,545,2 706,472,4 663,1100,3 669,658,5 806,357,3 526,510,5 647,378,1 806,629,4 477,326,3 16,242,1 850,741,5 641,948,1 847,317,5 536,51,3 870,345,3 877,581,4 803,583,4 359,478,4 791,281,3 622,210,3 839,65,3 209,1027,3 785,171,5 335,2,1 55,440,4 814,124,5 654,202,3 736,155,5 759,681,3 361,312,4 641,842,3 449,217,4 605,167,5 313,996,1 639,427,5 423,1083,5 845,1043,4 748,626,2 785,6,5 647,1227,3 531,1091,2 830,265,3 789,46,2 843,552,4 734,284,4 434,664,3 832,656,4 663,318,4 550,1010,5 828,12,4 594,292,4 388,846,4 838,320,1 300,480,4 869,565,2 256,920,5 311,611,5 480,172,4 812,341,1 393,21,5 681,1034,3 641,93,2 313,149,4 446,292,4 255,731,5 550,517,4 825,565,3 790,318,2 550,1058,3 726,482,4 449,222,3 576,53,4 291,116,4 642,275,5 398,500,2 333,1073,2 773,649,1 829,612,4 653,331,4 584,633,4 434,571,2 629,987,2 850,1131,3 480,478,4 822,272,3 861,973,2 845,230,2 264,49,2 822,731,5 488,889,5 685,466,5 290,976,2 652,93,2 840,285,5 879,298,4 820,125,5 621,205,1 480,206,3 534,82,4 803,1187,2 236,173,4 366,442,4 327,57,4 449,498,5 879,397,3 833,312,5 681,184,4 664,925,3 827,902,4 400,132,4 261,495,4 150,3,5 565,272,5 333,706,4 275,79,3 278,28,2 61,951,3 702,234,1 654,467,3 199,244,3 86,450,4 814,131,5 786,689,5 842,577,3 863,53,4 829,125,5 536,489,4 683,224,3 523,507,5 476,19,4 497,149,3 235,369,3 473,429,3 344,736,3 803,221,5 697,432,4 400,483,3 870,173,5 154,325,2 406,44,4 740,474,3 879,231,4 398,237,1 270,50,4 371,677,4 36,229,4 486,65,5 799,303,3 164,327,3 621,40,3 391,312,5 693,126,5 839,116,3 505,199,4 416,245,4 756,565,3 814,734,5 824,1116,3 781,1404,2 821,332,4 268,231,1 631,173,5 720,178,5 449,172,5 641,165,5 795,716,3 410,8,4 251,223,4 58,471,3 475,72,4 869,520,3 222,258,3 709,329,3 751,293,3 454,7,4 706,215,3 684,336,2 444,78,4 307,155,4 473,314,5 714,127,3 836,282,5 832,478,2 541,171,4 814,184,3 803,770,3 311,739,4 456,392,3 694,241,5 550,25,4 677,286,3 748,933,3 682,339,4 588,872,5 880,53,4 689,50,3 850,1313,1 850,237,5 781,989,3 180,330,1 447,318,5 343,131,4 654,211,3 61,270,1 485,261,1 688,762,4 304,196,2 706,949,2 587,67,5 386,6,5 737,208,4 714,684,3 634,322,3 199,1072,3 428,469,5 832,511,4 652,162,4 843,153,3 704,78,5 500,220,3 285,116,2 647,199,2 621,422,3 860,1226,4 600,55,3 416,419,4 831,24,2 408,427,4 705,287,3 746,950,2 362,142,2 536,1128,1 797,818,3 820,1083,5 446,54,4 637,81,2 845,609,4 737,417,3 487,0,3 712,897,3 211,862,2 58,654,5 62,78,3 497,485,2 642,64,4 696,978,5 720,329,3 183,472,4 637,173,5 494,3,3 746,1002,1 862,269,3 124,201,5 196,537,3 847,195,5 585,654,4 452,422,4 566,178,5 375,602,4 473,285,5 592,698,4 449,160,5 861,404,2 880,132,4 867,580,2 520,76,3 373,627,3 455,3,3 159,954,4 534,491,4 842,101,2 161,6,3 763,190,3 379,560,2 665,519,3 618,362,2 214,131,5 825,293,4 698,128,4 404,1072,1 853,116,3 832,714,2 199,78,5 449,1118,4 183,402,3 617,233,4 850,191,4 879,300,4 804,419,4 764,247,2 803,656,4 290,290,5 869,53,2 845,621,4 670,26,3 456,96,5 230,312,3 231,689,4 804,855,4 663,152,4 785,708,2 150,662,4 550,186,5 649,208,3 723,271,5 488,1264,2 243,152,4 544,967,5 690,523,5 494,379,3 566,197,5 91,155,4 880,422,4 486,1275,2 631,507,2 835,428,4 787,161,3 40,55,4 693,176,5 94,98,4 604,677,1 91,303,4 863,1530,3 797,809,3 851,545,4 748,430,5 864,6,5 150,55,4 118,124,5 780,222,4 654,1399,3 693,22,3 740,272,3 803,362,4 641,77,3 639,169,5 371,217,5 41,96,3 373,788,4 851,126,4 757,410,4 871,257,4 180,410,3 415,273,4 797,86,3 620,539,3 535,379,4 878,299,3 267,451,1 846,1049,3 710,450,5 657,729,3 513,201,4 267,187,4 500,1080,3 773,401,2 144,357,4 209,831,3 621,404,4 737,469,4 804,647,4 829,186,2 177,163,3 756,287,4 187,198,4 404,31,1 843,227,3 183,271,4 802,753,2 12,110,5 869,510,3 870,26,2 232,613,4 254,596,4 256,112,4 685,47,5 68,287,5 692,192,4 507,1066,4 718,317,5 616,446,4 654,590,3 781,322,3 531,930,3 188,417,3 523,1125,1 868,845,2 766,162,4 617,186,5 413,689,4 199,608,3 757,483,5 293,537,5 772,587,1 493,285,4 82,590,4 195,844,4 566,99,1 377,166,4 285,256,3 868,309,4 415,656,5 544,202,4 592,365,4 748,81,5 81,337,1 467,698,3 485,844,4 716,49,4 270,706,4 649,204,4 563,312,4 197,517,3 144,545,3 248,180,3 850,825,4 814,134,2 863,69,4 607,847,4 255,43,4 696,285,4 692,198,3 762,4,4 623,13,5 822,470,3 720,456,3 861,473,5 757,28,3 263,234,5 723,988,1 89,643,5 302,587,5 620,0,3 697,293,4 845,519,5 434,95,5 397,182,4 748,479,5 783,269,3 706,154,3 654,134,4 842,78,2 871,322,2 89,1202,5 359,1141,4 664,698,4 293,272,3 263,741,2 43,426,3 746,429,4 845,54,5 603,99,5 481,345,3 579,870,4 832,210,3 720,68,4 42,123,4 836,14,3 187,264,5 626,1134,3 536,272,3 93,371,4 658,48,3 496,450,2 10,717,5 879,790,2 590,922,4 549,923,4 278,1204,3 266,97,5 628,110,5 623,596,3 480,203,4 835,874,1 200,16,3 715,214,5 882,791,4 708,209,4 307,660,4 866,78,4 205,681,3 205,331,3 795,541,3 199,1216,4 772,508,4 342,207,4 861,186,4 279,749,5 482,120,2 565,227,2 618,322,3 518,679,5 298,71,3 59,133,4 746,557,4 473,650,5 854,197,4 197,404,2 765,377,4 842,218,2 813,589,2 48,1076,4 710,254,4 746,426,5 652,943,2 238,207,3 777,173,4 518,339,5 396,49,5 773,653,2 848,14,5 238,237,5 681,1010,4 89,197,5 794,180,4 702,844,4 652,290,4 238,1114,2 620,142,2 787,578,3 775,473,5 842,587,2 393,657,3 877,641,3 14,274,4 20,634,4 664,99,3 752,214,5 869,465,4 157,173,5 706,532,5 5,496,4 404,1304,1 12,750,5 773,1109,1 580,252,5 567,529,3 756,473,3 13,506,4 532,202,4 420,171,5 773,372,2 467,46,5 416,181,4 688,327,5 795,218,4 300,283,4 843,689,3 246,6,4 850,49,5 811,325,4 760,687,2 345,160,3 840,271,4 581,256,3 863,525,4 499,282,2 534,194,4 494,120,5 652,327,4 649,21,3 833,6,4 866,955,4 879,650,5 715,647,4 814,674,2 10,698,4 770,312,3 392,691,3 822,123,4 441,978,3 805,285,3 441,194,4 831,244,3 93,526,5 660,567,4 513,24,4 647,184,5 214,203,3 453,116,3 814,470,2 831,680,2 664,146,4 503,968,4 711,505,3 842,514,3 6,548,4 311,151,2 56,929,2 592,10,4 828,152,4 647,356,2 880,549,3 845,463,2 268,774,1 536,131,3 726,100,2 587,161,5 558,11,3 188,132,5 787,194,3 861,213,3 441,768,1 877,426,5 773,558,1 270,123,4 504,1038,4 683,116,4 696,120,4 683,81,5 664,182,4 200,357,1 581,299,3 536,895,3 710,167,4 879,819,3 0,122,4 798,257,5 654,723,3 880,670,3 451,194,4 313,411,3 429,299,3 853,215,3 673,150,2 706,868,1 521,491,4 695,306,5 163,406,2 668,878,2 628,650,5 294,49,5 879,294,5 664,293,2 654,267,3 748,384,3 542,520,4 871,814,4 874,333,4 555,177,5 192,78,4 692,650,3 845,1049,4 795,185,3 331,659,3 499,209,3 20,357,3 710,161,5 483,471,4 294,82,5 388,1006,4 882,6,5 659,143,3 454,43,3 573,212,4 829,402,4 183,215,4 44,925,3 290,171,5 767,353,3 553,236,3 619,146,3 333,520,4 9,191,4 869,170,4 795,432,2 185,329,4 879,768,3 670,200,3 431,108,2 584,82,3 573,315,4 523,272,3 804,660,4 392,1179,4 623,830,3 710,761,3 749,748,3 827,959,5 473,96,5 752,21,4 452,549,3 302,743,3 494,85,5 542,701,2 783,320,3 591,654,5 577,271,2 455,124,4 560,115,4 13,522,4 756,172,4 850,152,3 803,55,3 863,10,5 844,1591,3 882,143,4 740,392,2 879,832,4 391,462,3 845,523,3 773,175,4 676,267,5 293,300,4 377,605,5 853,236,3 789,773,4 127,318,5 853,14,3 606,210,5 738,326,5 525,275,4 462,1059,2 450,873,4 446,1314,4 726,433,5 659,639,1 362,932,2 592,590,4 591,148,4 809,293,5 552,366,4 12,238,4 803,61,4 91,992,4 827,312,3 781,532,2 415,147,5 298,206,3 750,707,4 41,558,2 654,728,2 681,466,3 726,26,4 679,272,3 449,228,4 862,881,4 185,688,4 832,549,2 583,160,4 867,0,4 665,105,2 879,79,2 737,649,3 876,236,4 882,67,4 58,479,5 21,731,4 677,146,4 200,894,3 547,256,5 768,747,2 587,180,5 450,306,4 834,672,4 483,27,5 681,71,3 806,251,4 820,99,2 513,383,3 57,110,4 338,502,4 326,717,4 244,257,4 104,268,4 576,81,4 665,268,5 763,1011,4 501,263,3 325,504,3 17,391,3 881,192,5 56,193,4 434,884,3 396,7,4 652,80,1 773,67,3 803,67,3 327,301,4 505,541,3 84,312,4 659,289,4 677,0,5 795,209,3 714,160,5 757,256,5 846,107,2 486,595,5 642,230,2 756,1272,2 532,317,5 756,449,2 462,282,5 729,741,3 715,63,5 404,730,3 751,257,3 834,161,5 125,265,5 880,662,5 456,61,3 781,295,3 879,203,5 621,1077,3 626,287,3 67,8,4 790,287,3 215,90,4 228,897,5 464,131,4 750,433,4 845,416,4 663,325,2 642,638,4 767,299,5 814,201,4 619,139,4 832,662,3 775,759,3 806,1614,4 872,257,3 860,713,4 118,120,4 214,271,3 346,203,4 654,232,3 803,195,4 663,161,4 391,322,3 839,505,5 876,269,4 163,824,4 200,156,4 80,455,1 445,267,2 93,1208,2 746,674,2 746,525,5 434,446,3 591,291,1 6,614,4 605,2,5 778,180,5 605,384,4 307,852,5 662,843,2 832,487,5 233,206,2 869,133,4 89,1201,5 415,239,1 205,1428,1 681,1230,2 822,236,4 101,840,2 732,295,2 456,193,5 523,448,3 81,99,5 778,14,4 173,708,4 829,204,5 781,679,1 675,344,2 126,257,5 614,637,5 642,27,4 883,115,4 797,492,3 490,285,4 478,293,3 600,1083,5 748,540,3 881,615,4 508,306,2 6,487,4 462,879,4 753,594,2 789,659,3 423,14,4 863,774,1 781,1006,3 163,677,4 860,948,4 781,1651,1 560,474,3 362,46,5 705,755,4 480,69,5 879,840,3 748,357,3 255,1032,4 628,503,4 59,516,4 774,344,5 80,474,5 205,241,3 605,178,5 536,47,4 806,750,3 392,730,3 25,840,2 175,267,5 478,1027,1 37,242,3 824,173,5 785,88,4 494,795,4 668,49,5 746,14,4 832,92,4 335,958,3 822,221,3 708,281,5 638,746,3 769,150,5 372,171,5 551,409,3 665,612,5 820,404,4 880,1117,3 84,696,3 144,924,4 773,711,1 730,203,4 148,271,3 809,332,5 816,128,4 804,196,5 496,188,4 94,419,4 665,110,3 641,1065,3 257,327,3 704,317,5 475,578,2 702,8,2 404,859,1 663,630,4 750,256,4 737,176,4 325,134,3 12,490,4 58,450,5 795,447,4 822,239,3 833,291,5 882,128,5 352,342,2 775,705,3 456,192,5 434,306,5 756,180,3 643,987,4 787,428,3 536,97,3 186,240,3 200,133,4 748,1273,2 751,300,4 744,123,5 221,245,4 404,542,1 554,24,4 706,718,3 706,57,3 814,88,4 441,227,5 641,1029,4 825,1090,3 550,1038,4 392,995,3 697,653,1 880,179,5 267,357,3 685,55,5 486,712,4 864,824,1 754,747,4 208,8,3 710,675,5 713,1015,5 654,269,4 882,726,3 845,7,4 726,728,2 837,23,4 416,210,4 747,68,4 832,158,2 446,232,4 704,227,3 329,224,4 653,248,5 882,82,3 834,457,4 595,275,3 566,135,5 536,386,4 456,287,4 536,565,2 858,248,5 747,691,3 651,287,2 827,245,2 720,302,3 659,365,1 666,271,5 762,509,4 638,701,2 877,691,4 842,473,3 471,415,3 773,229,2 342,407,5 860,288,5 804,536,5 311,605,5 769,677,2 710,239,1 810,689,5 777,196,4 642,203,3 203,879,2 862,1061,4 63,110,4 814,98,4 769,6,5 188,502,3 647,476,3 369,99,4 140,1022,4 814,93,3 832,97,3 398,65,3 91,37,3 653,335,3 333,195,4 829,789,1 536,14,3 627,325,5 649,156,3 499,93,2 619,705,3 837,275,4 850,303,3 846,657,3 649,53,2 847,418,5 882,748,3 860,51,5 331,116,4 183,486,4 509,244,3 864,928,2 449,703,3 720,470,5 847,184,3 455,126,5 641,831,3 619,144,5 769,330,3 473,65,4 797,230,2 737,925,3 804,87,2 69,472,3 243,113,4 349,479,5 582,529,4 829,98,3 404,32,1 405,527,4 206,63,5 832,110,2 825,54,5 752,210,4 706,1007,3 715,196,5 424,539,2 617,117,3 740,400,3 727,99,5 57,1069,4 876,948,3 845,515,4 824,594,3 456,203,5 756,30,4 791,470,4 847,422,4 356,931,4 491,55,5 359,520,5 810,257,5 789,848,4 263,446,5 885,3,3 561,126,5 742,288,3 714,545,4 827,301,4 607,864,4 707,1279,1 807,270,3 880,48,5 591,685,5 249,27,4 592,96,4 658,214,4 654,324,2 673,251,2 441,987,1 588,688,4 806,70,5 214,225,4 623,256,3 707,125,4 127,500,3 770,196,1 667,271,5 536,116,2 610,346,4 115,639,3 805,13,3 842,401,2 127,431,2 253,167,1 662,454,2 863,715,2 803,9,4 732,136,5 682,753,3 861,197,5 692,707,3 879,53,3 600,1062,3 559,121,3 746,96,5 775,847,2 63,624,3 536,693,4 616,614,3 103,285,1 836,288,5 452,475,3 880,52,2 486,590,2 803,931,3 876,270,4 789,281,4 692,126,4 726,1075,2 598,871,2 329,20,5 604,13,5 128,1175,4 787,57,4 867,90,3 845,52,3 143,8,5 880,203,4 255,590,5 861,525,4 469,257,4 795,563,1 845,215,4 790,299,5 540,525,4 804,951,5 755,565,4 716,825,2 662,0,4 428,1117,4 307,470,3 693,522,4 714,39,1 877,510,4 811,357,3 534,29,4 580,275,3 861,215,5 112,221,3 658,1202,4 473,938,4 710,280,3 880,558,2 82,750,3 769,357,3 235,198,4 457,27,3 845,85,5 167,472,2 804,381,4 289,824,3 180,302,1 832,639,3 609,581,4 177,30,4 641,421,3 626,46,2 559,277,1 850,1015,5 806,383,4 681,232,2 298,312,3 867,88,4 831,327,3 578,87,4 662,1085,3 221,469,3 756,228,3 827,9,3 734,474,4 561,131,4 353,18,5 530,299,4 424,911,2 809,312,5 148,257,3 273,317,5 665,504,4 298,243,2 27,228,2 453,609,3 862,351,1 404,853,1 694,990,5 661,1379,2 819,747,1 832,52,1 726,1228,2 605,507,4 883,69,4 471,23,5 144,820,3 879,49,5 805,185,4 519,689,5 524,471,2 877,658,4 456,247,4 806,1062,4 91,545,2 663,21,2 794,553,3 221,716,1 803,413,4 587,383,1 21,3,5 319,1046,4 362,1214,1 295,6,5 823,990,3 433,224,4 843,1473,4 5,473,5 616,178,4 711,101,4 752,321,3 136,288,3 267,217,2 388,81,4 797,1269,3 504,691,3 533,330,4 853,1076,3 863,133,5 748,87,4 534,707,5 353,208,3 801,686,3 861,180,5 542,1198,2 454,236,3 494,231,5 639,69,4 820,14,5 469,1133,4 292,151,4 882,310,4 681,95,4 592,199,5 842,192,3 710,1445,2 760,126,3 242,110,4 199,377,5 858,275,4 457,186,5 709,503,4 566,194,3 502,212,5 710,218,2 513,49,5 691,327,4 882,49,4 715,101,2 456,400,3 593,482,3 850,30,4 739,285,5 193,178,4 422,321,3 86,187,4 248,143,4 342,554,1 307,199,5 865,320,3 489,126,5 408,13,5 787,199,4 787,548,4 781,267,3 379,728,3 423,0,1 93,1044,4 86,800,3 315,191,1 298,434,3 850,771,3 212,54,5 746,480,5 696,875,3 586,879,3 748,109,2 536,712,3 741,12,4 863,218,4 777,160,3 636,274,3 449,415,5 663,275,5 291,114,4 746,922,5 746,427,3 342,509,5 145,326,3 845,658,5 585,565,3 804,549,3 639,353,4 846,224,1 839,704,4 795,232,4 773,76,1 404,1061,1 734,123,5 681,57,3 513,586,4 703,496,3 716,845,4 344,743,4 795,1,5 578,168,4 485,994,4 596,0,3 837,248,4 803,638,4 444,301,1 42,168,5 768,830,1 503,525,3 560,49,3 743,22,4 424,354,3 748,483,5 665,708,4 708,627,3 434,789,4 487,526,3 806,504,3 70,513,4 290,772,3 623,23,3 732,128,2 560,285,4 835,215,4 879,180,5 49,546,4 206,315,5 658,12,4 879,782,1 346,136,2 110,320,3 41,85,3 392,297,4 716,1136,5 215,746,4 792,814,3 849,172,5 712,361,1 885,174,4 879,507,4 384,46,4 423,881,3 879,720,1 481,287,3 633,126,5 803,1284,2 342,234,4 748,1050,3 654,1256,3 853,288,2 683,375,3 55,279,4 870,1196,3 757,1051,5 6,609,5 804,342,5 293,285,5 639,337,5 377,13,5 485,923,3 422,281,4 579,747,2 881,408,4 667,537,5 824,285,4 756,209,4 338,144,3 397,55,4 446,581,4 221,454,3 547,202,5 432,357,2 870,1118,3 757,182,5 882,134,4 629,251,2 279,618,4 837,1004,4 674,1006,4 631,167,4 735,1387,5 206,691,3 864,596,1 750,2,3 678,530,4 600,237,2 832,27,3 590,24,4 806,632,4 522,411,3 720,580,2 391,662,4 804,454,4 331,839,4 832,517,3 200,57,4 283,269,3 242,282,3 772,791,4 639,303,4 594,324,3 508,342,3 279,217,4 221,355,4 200,267,4 515,430,3 299,293,3 654,729,2 704,21,5 501,260,2 649,1034,2 617,608,4 539,219,3 229,161,4 485,1378,3 862,901,5 789,721,3 762,960,5 504,1408,3 778,299,3 524,256,4 302,1406,1 610,751,5 25,474,3 591,1047,3 267,717,4 757,121,4 748,256,3 302,72,3 536,1083,3 879,754,3 528,748,4 188,741,3 836,236,3 296,27,4 757,488,5 494,119,5 863,891,3 746,70,5 879,1015,4 166,492,4 726,430,4 337,497,4 278,1071,4 859,302,3 789,110,3 765,207,5 748,522,4 537,3,3 845,131,5 327,688,5 206,870,5 550,467,5 513,647,3 876,514,5 865,305,4 665,136,4 669,948,2 789,357,2 324,1017,3 795,872,3 76,172,5 511,257,3 882,406,3 290,773,3 144,154,2 43,108,3 689,683,4 715,964,2 845,185,5 850,108,4 822,195,5 413,747,3 372,644,5 710,1114,4 867,447,2 845,82,4 428,213,3 446,865,2 17,142,4 794,172,4 883,274,4 536,923,3 845,91,4 494,93,3 876,273,4 781,947,2 822,139,3 842,120,3 641,201,3 853,404,4 882,866,5 845,1167,4 744,506,1 795,809,3 331,430,5 404,46,5 812,258,2 763,13,4 757,419,3 737,228,3 269,85,4 647,422,4 197,446,4 449,416,4 513,48,2 665,221,3 531,185,4 869,47,4 567,223,4 620,79,4 649,401,3 619,622,4 300,428,4 379,227,3 416,722,5 715,945,2 884,1310,2 377,1437,3 863,21,5 742,337,1 91,407,4 397,691,4 467,215,5 827,60,5 842,96,3 285,820,4 353,304,4 623,14,4 150,124,4 707,595,4 642,565,3 839,461,3 498,485,3 586,242,3 69,819,1 740,289,3 542,21,3 654,788,3 741,14,4 747,516,3 882,71,4 664,431,4 444,978,2 556,267,5 540,541,1 644,267,4 642,355,4 647,830,1 290,1247,4 616,669,1 853,1196,3 652,257,3 870,176,5 81,86,3 792,1186,2 55,409,4 58,150,5 863,721,2 832,674,4 436,78,4 471,160,5 737,88,5 746,735,5 839,496,4 698,879,3 99,894,2 129,195,5 803,701,2 848,287,5 647,483,5 636,865,3 533,474,4 859,210,3 883,8,5 74,459,5 867,159,4 434,229,3 486,47,2 703,346,4 715,80,4 664,422,4 710,844,4 869,662,3 745,143,5 382,478,4 553,30,4 654,1264,3 777,233,3 582,512,5 847,208,5 675,21,5 853,267,3 621,203,3 654,54,2 825,819,3 427,885,4 773,757,1 878,236,4 659,238,2 617,581,4 397,195,4 795,196,3 435,1027,4 302,563,1 642,504,4 274,163,4 839,944,3 421,866,3 199,950,5 296,497,3 665,132,3 884,209,5 698,977,4 820,131,5 845,707,3 726,635,3 534,495,5 882,515,4 681,66,4 671,268,3 883,268,5 726,1041,2 504,567,4 672,314,5 881,514,5 787,270,3 91,451,2 398,8,3 655,269,3 428,410,3 249,233,3 880,600,5 654,785,2 726,927,3 706,134,2 871,105,3 681,216,4 326,183,3 826,271,4 94,1115,4 449,482,3 691,293,3 689,495,4 823,320,2 787,55,3 804,141,4 83,476,4 592,254,5 763,695,3 565,169,5 884,404,4 730,319,1 797,61,4 870,894,3 129,94,5 681,1083,2 888,71,3 565,418,2 750,784,4 720,110,4 563,1033,3 101,750,3 829,49,5 200,210,3 851,596,3 24,172,4 242,284,5 869,41,2 137,208,4 641,941,4 787,431,1 518,327,2 746,215,2 850,9,3 591,884,2 614,677,1 464,99,3 285,1100,5 716,596,4 888,658,4 408,484,2 861,229,3 221,574,3 297,209,5 302,594,2 398,240,4 754,326,2 590,195,4 785,417,4 487,181,3 616,169,1 842,1410,3 396,317,4 623,472,3 709,267,4 663,190,3 781,257,4 647,111,2 576,172,5 338,166,4 671,274,5 644,663,4 837,384,4 748,570,3 0,190,5 812,876,1 278,363,4 503,239,1 668,0,5 646,230,4 832,443,3 180,121,2 125,677,3 514,904,2 675,1482,4 599,384,3 846,472,2 390,75,3 850,289,4 697,510,2 803,527,4 535,140,4 803,1073,1 463,1024,2 496,471,3 384,1013,2 229,21,5 791,99,4 773,1,1 7,10,3 589,236,3 536,95,3 101,293,2 862,332,5 47,214,4 888,133,4 646,87,4 183,639,4 885,818,4 859,513,5 108,281,3 534,498,4 600,507,4 797,443,2 621,673,2 760,290,3 837,178,5 600,590,3 864,244,3 771,287,2 385,126,5 551,1046,3 641,1046,3 422,677,3 888,159,4 851,288,2 306,63,4 877,948,3 333,72,3 832,918,2 377,706,3 287,268,5 591,47,5 381,481,5 200,1266,3 58,88,5 329,962,5 765,211,5 202,889,2 770,708,5 767,627,3 844,312,4 675,167,5 862,314,5 14,124,5 698,1012,3 853,381,4 544,193,3 880,789,3 786,879,3 434,927,3 861,146,5 560,427,4 621,69,3 773,514,2 415,1188,5 835,495,4 697,624,3 487,525,4 124,152,2 786,341,2 658,68,3 473,606,4 888,653,3 531,430,5 746,346,5 551,293,4 504,189,4 785,68,4 458,675,3 312,237,4 603,442,3 867,1034,1 773,576,2 408,878,1 346,176,5 746,408,1 620,685,5 408,713,3 698,1010,4 536,229,2 748,132,4 703,631,3 424,878,2 435,505,5 58,68,5 787,822,3 243,213,5 591,92,4 550,70,4 232,88,3 565,190,4 386,474,3 781,1225,2 451,529,3 711,432,3 580,514,4 693,520,3 21,126,5 384,1494,3 806,379,4 362,64,4 822,86,5 608,876,5 830,99,4 781,1104,3 609,282,3 873,305,4 885,780,4 620,182,4 531,519,5 781,285,2 748,495,5 544,889,2 781,988,3 832,1015,1 658,133,4 473,656,5 269,267,5 888,64,4 869,567,4 878,281,4 761,874,5 449,504,5 706,535,3 861,1092,5 637,684,4 847,150,4 428,87,3 739,293,4 867,264,3 805,484,5 730,587,3 398,1395,4 827,1152,3 763,172,3 665,318,4 289,134,4 213,959,2 333,257,4 576,63,5 176,323,4 401,8,4 412,146,2 683,273,2 845,701,4 678,142,2 614,215,4 631,49,5 541,870,2 693,1220,3 750,94,5 662,871,3 560,473,5 360,175,4 719,1175,5 488,749,5 836,12,4 256,60,5 822,432,4 295,180,5 828,236,3 591,177,5 847,233,4 861,490,3 404,445,1 884,427,4 876,285,2 485,1597,5 866,317,5 772,216,3 435,1052,4 888,57,3 715,472,4 476,293,4 846,925,1 773,55,2 22,521,4 344,117,3 734,105,3 883,321,3 581,1032,2 674,890,2 434,21,4 765,1125,4 845,397,1 214,551,3 243,1466,5 738,1430,5 885,67,3 377,380,4 458,650,3 747,221,4 456,182,5 801,671,3 804,166,3 649,419,3 880,221,5 178,1233,1 882,238,3 653,238,4 877,171,4 891,133,5 43,132,4 710,169,5 847,1117,5 327,719,3 579,14,3 233,237,3 866,95,5 297,332,5 496,11,4 118,81,2 788,292,4 708,404,3 150,43,4 379,514,4 526,602,4 757,545,3 804,173,3 788,8,5 767,590,4 856,299,3 642,97,3 693,491,4 482,256,2 711,1052,4 605,968,5 827,285,4 871,596,4 262,519,3 576,6,2 649,315,3 842,22,2 457,735,4 270,47,4 849,299,5 757,546,5 382,477,5 396,108,4 566,245,4 762,208,4 517,105,5 660,432,5 795,486,5 392,462,4 531,81,5 312,513,4 255,594,4 652,424,2 861,63,5 629,309,3 849,209,5 803,678,4 709,495,4 412,285,5 338,169,5 449,527,5 880,1123,4 832,695,3 565,121,2 291,485,4 879,227,3 325,52,1 118,287,4 556,336,5 662,1008,3 488,338,3 266,714,4 742,302,5 867,72,1 885,146,5 435,71,5 523,215,5 153,221,2 250,534,3 889,117,2 786,244,3 333,243,3 599,678,2 560,1017,3 850,587,4 891,272,4 797,952,2 832,627,4 885,234,3 490,236,3 456,50,5 177,245,4 591,1142,5 400,370,3 675,8,2 127,380,3 698,1027,2 869,254,2 617,734,3 233,662,4 326,683,4 888,167,4 353,734,3 533,242,3 642,113,4 79,482,5 267,285,5 434,1290,1 223,979,1 521,10,4 888,761,3 822,52,5 738,660,2 888,245,4 668,193,3 791,1196,4 880,440,2 772,530,5 892,55,5 842,624,2 824,274,3 691,762,3 756,231,3 824,121,1 797,65,3 451,236,2 746,72,4 702,627,4 439,339,2 289,404,2 624,253,3 884,0,5 649,136,3 837,95,4 840,753,4 782,263,4 886,930,3 591,543,4 612,631,3 885,232,3 825,287,3 862,1023,3 652,365,2 614,508,4 726,173,4 842,673,2 488,323,3 780,204,5 755,0,4 654,652,3 853,504,4 847,754,5 313,131,4 858,420,5 881,203,5 649,379,2 525,276,2 268,134,4 880,12,4 847,171,5 536,75,3 850,306,4 663,527,5 129,452,3 662,122,3 541,234,3 647,618,3 853,222,4 797,162,3 882,223,4 890,545,3 893,14,3 302,553,2 757,293,5 320,196,5 867,357,2 346,173,4 112,126,4 866,173,5 797,172,5 885,159,1 391,299,2 398,98,3 191,99,5 633,330,4 772,402,2 770,1128,5 820,212,5 449,569,4 86,93,4 89,1044,2 750,192,5 173,1310,3 740,672,4 654,48,1 592,470,3 804,85,4 757,122,1 853,85,3 746,1455,3 607,268,3 471,795,4 436,169,5 523,814,3 313,721,1 629,239,3 867,108,3 659,490,4 782,268,4 144,635,4 747,236,4 804,100,2 411,23,3 462,886,5 616,645,4 723,681,1 652,69,2 452,203,4 554,339,4 453,192,2 892,146,3 499,76,3 804,211,3 641,1145,1 791,830,2 809,330,4 629,734,2 748,86,4 891,780,4 888,818,2 828,212,4 765,638,3 803,1027,3 867,182,5 285,930,4 845,179,5 845,316,3 879,1290,3 663,49,5 665,285,5 574,95,5 6,572,5 491,220,3 21,925,1 893,887,4 353,82,4 882,664,4 879,395,2 797,794,3 434,570,2 893,404,3 859,293,2 550,6,5 804,770,5 637,194,4 577,221,4 660,70,4 214,86,5 795,197,4 753,358,3 869,602,5 68,171,5 64,63,5 404,197,2 382,57,4 715,264,5 565,109,1 853,1085,3 880,88,4 846,371,5 751,305,5 94,380,4 560,208,4 748,741,4 600,404,1 760,234,3 624,237,4 242,156,5 223,552,4 881,172,5 715,631,4 736,95,2 880,1045,3 839,47,3 374,772,3 424,521,3 416,251,3 853,602,4 467,11,4 879,71,3 853,927,3 647,226,3 26,287,3 777,97,4 457,631,4 173,723,5 730,68,5 578,731,4 881,139,3 10,371,4 540,626,4 845,754,3 438,275,5 853,21,2 890,116,3 789,97,5 311,13,5 787,692,2 446,409,2 801,442,4 185,158,5 199,448,5 444,1142,4 560,199,4 780,203,4 411,683,4 863,965,4 795,270,5 876,59,5 6,378,4 101,54,3 393,157,3 644,512,5 845,769,5 846,1159,4 685,450,4 847,898,3 880,293,3 529,332,3 814,595,5 845,227,5 866,257,3 889,97,4 779,525,5 176,180,4 542,232,4 454,122,3 626,712,2 805,1070,4 0,3,3 404,525,1 864,168,5 714,375,2 853,249,4 312,81,3 254,323,5 691,167,2 881,70,5 628,654,5 882,560,3 560,538,1 752,133,4 59,500,3 467,117,3 536,645,2 505,1406,2 536,381,3 0,262,1 213,97,4 670,256,5 856,13,4 891,150,4 311,506,5 795,8,3 870,1384,3 867,172,4 803,187,4 783,753,3 89,269,4 565,154,2 41,70,4 388,210,4 392,1043,4 455,402,2 877,654,3 144,226,4 820,356,5 740,495,5 886,1011,1 523,822,4 862,302,1 405,506,4 550,460,3 587,62,5 869,385,4 795,1100,5 659,404,2 839,528,4 576,78,4 654,208,3 704,848,3 483,448,4 748,807,3 473,489,5 386,288,1 558,514,4 604,68,5 558,143,5 787,0,3 467,470,3 803,718,3 401,116,3 766,221,5 750,848,2 295,482,5 544,425,3 866,251,2 695,1061,4 665,543,4 221,1219,4 755,730,3 480,321,4 846,117,3 505,872,4 843,49,5 813,447,3 550,469,5 410,448,3 287,213,2 454,1085,3 686,987,3 893,332,4 678,267,4 886,838,4 893,245,4 804,150,5 870,746,3 893,296,4 621,208,5 652,657,2 542,390,3 717,814,4 434,1015,4 333,949,3 870,1344,3 193,77,1 647,12,3 491,44,3 851,819,4 711,767,5 585,84,3 48,1016,3 869,215,4 326,1217,4 641,999,3 726,1138,3 636,23,2 663,193,4 654,268,3 523,692,5 824,119,3 891,848,2 757,212,5 654,1622,4 563,1015,2 789,825,1 784,11,4 850,272,5 762,156,4 862,325,5 845,238,4 76,27,5 762,69,5 675,327,5 535,1062,5 858,1013,4 726,819,2 845,1017,4 275,259,3 535,178,2 645,892,3 654,45,4 646,830,3 267,24,3 360,69,4 755,434,3 707,24,3 885,1302,1 465,1606,5 641,1135,4 654,1277,2 482,742,1 605,404,4 144,900,1 880,182,4 591,546,4 410,275,3 654,519,3 794,96,2 834,525,3 750,1077,3 880,160,3 78,0,4 767,150,2 449,1114,4 44,110,4 633,1046,3 25,368,2 180,1162,2 404,416,2 869,769,4 880,513,4 727,545,2 352,259,1 875,47,5 416,16,4 856,324,1 697,257,3 260,325,4 523,498,4 760,294,4 695,244,4 680,689,4 292,454,2 879,347,4 662,321,4 885,231,3 452,514,4 631,590,4 289,808,4 888,487,2 819,314,3 338,514,5 767,120,4 112,324,4 175,875,3 785,98,4 300,293,4 0,202,4 449,195,5 62,1137,2 486,288,2 861,477,4 599,569,4 895,1100,2 176,95,3 842,541,2 895,402,1 377,39,3 312,171,4 863,12,4 681,945,4 597,690,2 698,830,2 659,384,3 384,418,2 560,1130,4 689,7,4 388,508,4 863,385,3 720,215,5 872,299,4 362,314,3 456,195,5 641,66,4 392,343,3 67,712,2 4,405,1 450,326,4 487,172,4 292,955,3 459,312,4 456,150,5 252,654,4 485,1321,3 879,565,3 12,530,3 863,162,4 692,1134,3 505,398,5 823,293,3 209,653,5 879,930,3 311,659,4 400,603,4 486,149,5 787,644,3 880,134,4 795,195,5 373,1047,3 635,99,5 857,291,3 553,21,4 62,747,4 737,213,4 600,98,3 51,1084,4 863,221,4 654,654,3 837,142,5 847,161,2 877,793,4 32,328,4 691,24,4 811,293,5 756,155,3 360,215,5 553,229,5 421,669,2 636,120,4 866,523,5 863,172,5 454,454,3 108,208,1 712,344,3 757,473,5 869,548,2 617,442,4 781,265,1 746,479,5 806,484,5 186,82,5 752,194,1 297,525,5 335,366,3 851,929,3 289,234,3 760,325,1 710,81,3 824,869,3 464,427,3 42,69,4 886,1014,5 589,115,5 843,317,4 327,938,4 406,674,3 876,58,5 629,755,4 328,301,5 660,971,3 446,226,2 292,217,2 247,171,4 9,653,5 638,1464,2 882,68,2 880,178,5 730,647,4 888,1588,5 641,943,5 832,426,3 685,96,2 845,45,4 312,57,3 654,286,3 591,1009,5 772,1,3 837,301,4 639,495,4 355,311,3 845,1132,2 726,0,3 638,115,3 787,227,3 853,486,4 726,205,3 892,425,4 401,482,5 285,115,5 690,630,4 310,209,5 895,107,3 600,258,1 857,180,2 701,686,1 639,46,4 278,861,5 423,99,5 660,229,4 604,116,2 844,271,3 591,889,1 851,677,3 681,1187,3 845,1100,3 803,971,3 364,308,1 605,1054,4 789,172,3 881,185,5 869,1005,2 652,656,4 881,214,5 409,353,3 590,78,4 192,120,3 56,116,4 633,221,3 307,72,3 551,283,3 843,68,5 789,41,5 877,285,4 509,321,3 594,947,3 659,1109,2 763,27,4 754,310,4 247,97,5 798,653,5 630,333,2 845,503,5 559,267,4 781,1282,2 649,567,3 867,451,2 381,333,5 882,52,5 448,59,5 711,754,4 841,267,5 834,214,4 861,21,5 765,495,5 591,88,4 605,54,4 436,414,4 385,596,3 804,6,5 795,172,5 763,865,4 654,1633,2 554,128,4 542,366,4 847,124,5 462,474,3 302,127,4 465,183,4 449,132,5 888,195,5 665,865,2 893,133,4 860,85,5 90,510,5 89,211,4 891,86,5 540,510,4 158,1220,5 878,0,4 504,101,1 665,88,4 827,82,3 702,595,3 478,525,4 453,1125,2 644,238,3 662,681,3 733,422,4 727,146,4 94,194,5 233,831,2 697,481,2 605,962,5 767,965,4 661,984,4 681,248,3 581,1013,4 888,885,3 617,126,5 536,447,3 478,94,4 845,399,1 449,619,4 503,439,3 709,333,2 805,161,3 801,483,3 885,107,5 888,72,3 193,386,2 881,104,3 384,97,4 449,309,4 803,62,4 296,209,4 895,280,2 659,229,3 845,1181,2 845,836,5 428,27,3 665,143,3 371,673,5 248,406,3 757,230,3 8,200,5 803,825,3 652,293,2 681,273,4 895,66,2 726,28,3 629,686,3 716,974,2 795,231,3 885,272,2 492,172,4 602,215,4 882,881,4 885,418,3 852,872,3 17,21,5 364,590,4 697,198,2 180,883,1 243,248,4 665,1020,5 868,311,2 532,119,1 677,1114,3 895,309,4 50,180,5 845,738,4 505,322,3 739,268,4 664,11,4 550,795,4 480,483,4 748,196,4 877,125,3 744,257,5 726,434,3 869,486,4 499,820,2 752,503,3 387,679,5 870,258,3 797,282,5 891,440,3 449,617,4 535,422,4 331,306,5 763,14,4 813,442,3 896,0,5 641,1208,3 553,755,3 327,914,3 593,356,4 877,21,2 278,372,4 705,293,4 681,947,2 750,484,4 398,180,3 404,1310,1 656,117,1 776,520,5 795,927,2 778,70,4 74,1151,1 213,356,5 847,194,3 762,504,4 847,98,3 723,677,2 641,121,2 681,24,4 404,160,1 846,179,2 846,182,4 369,221,3 850,312,4 852,878,4 418,274,5 842,510,3 605,172,5 733,164,3 457,602,4 658,97,4 824,1015,3 662,1050,3 710,240,4 893,268,3 559,475,2 654,1295,3 843,108,2 313,507,3 830,249,5 292,419,4 517,287,3 362,428,5 819,263,3 398,377,3 560,674,3 382,204,4 814,403,4 503,938,4 536,485,3 861,432,4 657,6,4 270,689,4 512,251,5 838,219,3 704,719,5 468,198,4 726,238,4 392,596,3 889,236,3 292,570,2 678,222,5 449,299,4 621,481,3 787,491,3 641,355,4 757,272,4 663,301,4 346,199,4 832,596,1 825,626,4 863,602,4 130,743,4 652,52,2 828,639,3 827,212,2 716,677,3 344,86,5 6,545,4 689,553,3 290,1089,2 422,306,3 786,323,2 327,656,4 829,96,4 539,627,3 74,1047,4 12,201,5 806,677,3 706,211,4 861,428,5 506,249,5 889,180,4 822,159,4 619,122,3 157,216,5 692,377,2 420,708,4 693,510,5 270,479,4 889,88,4 631,384,4 546,288,3 683,167,4 617,108,2 13,472,5 397,71,3 704,622,5 822,163,3 863,122,4 587,81,5 695,905,3 221,536,4 795,0,2 845,171,4 879,596,3 726,925,3 576,160,5 697,587,4 540,403,4 852,291,4 745,448,1 704,88,2 889,339,4 749,682,1 787,473,3 789,287,4 866,1607,2 896,596,5 450,329,3 706,605,4 886,992,5 373,571,2 838,128,4 832,433,3 880,140,3 462,818,1 817,321,2 703,174,3 625,257,4 839,96,3 795,182,5 523,240,5 17,209,5 140,408,5 620,675,3 353,601,3 534,698,4 709,873,3 378,192,4 740,422,3 757,32,4 726,245,4 789,230,4 763,431,5 564,380,2 697,647,4 248,178,5 867,231,1 55,49,5 880,280,3 674,317,5 536,271,4 888,654,4 889,162,3 682,21,4 585,450,4 891,225,3 806,72,3 605,259,3 746,267,5 845,237,5 397,201,3 794,385,3 232,126,5 640,49,3 392,686,3 813,674,3 654,1213,2 792,287,4 824,257,4 825,231,3 534,424,5 879,1283,4 148,244,3 657,85,4 886,368,5 769,92,5 504,470,4 830,149,3 851,840,4 879,878,3 748,231,4 321,345,3 727,284,4 553,1011,3 683,48,4 862,241,4 495,149,2 861,120,5 863,214,4 507,207,5 746,507,5 233,556,1 638,581,3 616,1020,4 757,779,5 449,113,5 46,300,4 879,450,2 676,677,4 882,708,5 441,67,3 888,1102,2 844,1433,4 333,224,3 748,95,5 730,195,5 845,605,4 386,429,3 6,442,5 765,174,3 829,522,4 700,310,5 429,527,4 298,92,2 69,683,3 455,1016,4 270,791,4 193,656,4 536,1004,3 804,540,3 270,762,3 896,471,5 373,526,4 788,49,5 652,178,4 730,426,5 772,175,4 560,69,4 560,85,4 862,360,5 684,298,2 886,0,5 12,749,5 378,643,5 84,1069,4 398,143,3 696,287,2 880,111,2 631,81,4 888,727,3 553,203,5 372,79,3 197,461,3 726,166,2 675,470,3 386,247,4 832,1230,4 877,211,3 622,209,5 780,173,5 12,816,1 170,287,2 757,656,5 457,128,4 373,755,3 164,173,4 312,653,5 647,274,5 900,274,3 779,186,5 726,368,2 654,374,2 544,97,5 822,641,4 488,901,4 180,846,1 893,301,4 378,95,5 824,514,4 845,672,4 544,422,4 494,664,1 42,541,3 571,318,4 759,1036,5 879,1000,2 626,99,5 750,225,3 416,171,3 504,126,1 779,510,5 270,1,1 292,174,2 628,474,4 542,791,4 493,120,4 12,505,5 487,288,1 842,671,3 885,117,1 756,431,3 208,1085,4 585,82,2 508,267,2 814,1077,2 802,687,1 773,773,1 368,178,4 654,1255,3 888,380,4 307,184,4 211,86,5 633,543,3 787,808,3 544,77,2 552,558,3 576,1516,3 167,457,1 895,524,5 6,650,5 828,854,4 22,366,4 888,1266,3 647,182,5 839,494,3 398,387,2 492,368,2 870,240,3 681,880,3 387,301,5 378,330,4 659,253,1 416,595,3 839,284,4 895,211,2 304,728,3 764,521,5 745,0,4 591,357,1 313,282,4 64,24,4 681,97,4 502,276,4 891,1218,2 715,600,4 706,310,4 483,273,4 384,854,5 773,691,1 271,233,4 584,164,4 665,162,3 486,221,4 891,660,5 866,497,4 616,858,3 722,168,4 749,287,4 826,689,3 428,432,3 598,287,4 572,179,4 664,256,3 597,346,3 675,889,1 415,37,3 573,339,1 748,745,5 715,392,3 560,86,3 748,321,4 404,228,1 646,120,4 653,214,4 409,322,3 804,4,4 490,283,3 370,448,3 778,20,5 626,470,3 282,90,5 342,409,3 200,467,4 772,1239,3 712,341,3 235,178,1 748,236,3 544,116,4 20,768,1 41,940,4 12,68,4 847,94,5 717,1046,3 891,431,4 869,703,3 628,192,5 115,602,3 745,221,3 863,244,4 895,135,5 874,922,5 708,507,4 697,171,5 812,987,3 877,401,4 720,76,5 492,150,3 852,300,1 898,207,3 900,508,4 740,477,5 726,78,4 781,680,3 825,10,4 141,146,1 220,1097,4 832,384,3 895,751,1 310,1296,4 504,741,4 290,172,5 449,510,5 591,332,5 315,173,1 547,78,5 384,58,2 665,606,4 746,1158,2 778,194,5 839,78,4 621,1407,1 714,1214,1 364,315,4 797,804,4 748,509,4 656,281,3 824,280,3 397,177,5 502,1474,5 763,742,1 647,430,5 877,70,4 704,825,4 456,225,3 140,406,2 845,1238,2 773,527,4 701,226,4 670,225,3 379,214,3 641,140,4 601,0,4 797,279,2 689,580,2 408,434,3 765,567,2 832,557,4 762,143,3 285,987,3 550,596,4 676,13,1 874,513,5 124,708,3 639,345,4 12,901,3 494,1244,5 660,7,5 456,377,4 847,164,5 626,229,4 823,291,3 832,184,5 654,237,3 726,126,4 427,293,4 560,510,4 881,558,3 718,292,3 338,654,4 367,126,4 578,1109,1 287,196,5 880,629,4 583,430,3 776,1078,2 885,172,5 830,173,5 633,287,3 843,215,5 681,150,5 839,530,5 17,71,3 747,152,4 770,0,5 781,1250,3 21,61,4 362,734,3 649,213,3 89,1135,3 888,401,3 698,339,4 730,704,5 505,854,4 839,506,4 879,801,3 532,933,3 518,349,5 275,500,4 698,190,3 704,98,3 647,1027,2 716,279,4 710,495,5 428,472,3 503,516,4 617,961,1 654,898,2 866,294,4 752,179,2 63,131,4 803,510,4 714,696,2 881,14,5 628,131,5 726,4,3 262,259,2 893,844,3 822,691,4 830,257,2 845,577,3 286,234,4 825,801,4 896,213,5 492,23,4 729,409,1 786,749,5 43,193,5 804,724,3 711,728,5 404,167,1 384,426,4 405,671,2 505,517,4 278,99,4 851,514,5 576,626,5 663,55,4 452,3,4 842,449,2 74,507,4 187,193,3 428,71,2 247,120,2 829,312,5 61,703,2 591,345,4 820,105,2 890,49,4 618,565,4 297,198,4 787,1276,3 390,214,4 877,529,5 415,613,5 663,701,4 880,150,2 594,221,3 880,117,4 763,12,2 624,1019,3 895,685,3 144,105,4 622,215,4 567,478,5 898,693,5 261,649,4 188,432,5 737,160,4 745,201,5 469,1083,3 716,306,5 902,1131,3 532,181,3 603,217,3 636,1243,1 12,689,3 847,639,1 720,180,5 579,347,3 669,610,5 781,297,4 825,227,3 757,19,4 594,863,4 585,53,3 492,153,4 654,250,3 879,1266,4 144,221,5 845,231,3 834,196,5 898,208,5 803,23,5 821,925,2 885,207,3 211,316,5 893,751,3 393,770,4 276,220,4 783,897,4 542,422,3 620,366,3 853,146,3 888,80,4 807,244,4 698,126,3 173,1090,3 551,279,3 564,514,5 867,100,4 692,78,4 805,132,5 879,117,3 596,117,3 605,677,3 605,205,4 773,825,2 229,160,5 587,116,4 696,832,3 775,634,4 388,76,2 850,131,4 895,848,2 893,254,3 538,274,4 804,98,2 12,557,1 895,743,3 109,53,4 497,9,5 879,1517,2 647,1028,2 176,1109,3 795,221,5 803,180,5 760,470,3 441,409,4 804,258,1 560,237,4 471,1013,4 879,3,4 885,356,4 491,526,5 451,814,2 881,1014,3 401,509,5 506,49,5 820,762,3 378,380,5 903,8,4 885,2,3 302,1181,2 396,267,4 532,142,4 17,58,4 832,664,3 263,674,4 818,302,4 794,216,1 888,1073,3 544,565,4 234,81,2 455,1197,4 377,238,3 886,110,5 888,575,3 519,309,4 536,538,1 757,108,3 586,876,2 785,731,4 290,248,4 545,312,2 616,344,1 618,807,3 400,470,4 532,125,4 356,455,3 814,500,3 586,329,3 759,171,3 822,1134,3 882,29,4 333,752,4 885,6,5 513,215,5 176,677,3 706,193,4 773,925,1 453,316,4 731,304,2 836,19,4 42,545,4 705,470,4 757,185,5 389,257,5 882,193,3 111,309,4 658,791,4 666,215,4 499,406,3 621,110,4 867,402,2 906,1056,3 845,228,3 765,728,3 462,1605,2 863,123,5 486,545,3 654,1267,3 795,402,4 302,865,2 730,1502,5 576,734,5 542,12,3 710,828,2 567,640,5 703,602,5 838,949,4 839,637,3 870,336,3 879,577,3 869,237,4 665,25,3 681,392,4 652,637,1 790,321,4 904,332,3 773,199,2 867,1027,3 779,495,4 552,610,5 763,288,5 455,460,4 587,1239,5 432,275,5 696,49,5 379,237,3 338,221,4 486,258,2 845,1539,3 794,2,2 5,169,4 5,222,4 873,320,3 62,258,3 906,762,5 756,154,2 756,100,4 832,1070,3 903,288,5 523,1040,2 498,662,5 825,100,5 453,65,4 710,51,5 811,747,5 405,131,5 566,296,3 654,1266,2 762,72,3 434,819,1 344,780,3 473,433,4 797,1282,4 895,457,1 748,23,2 658,489,4 569,288,1 275,574,2 529,195,5 861,275,5 895,47,4 232,434,5 196,946,2 756,657,2 542,173,4 800,325,4 560,1477,3 886,278,5 658,87,2 896,477,3 714,404,3 233,203,2 726,195,4 649,661,3 853,534,3 880,87,3 825,431,3 566,108,2 791,128,4 386,180,4 504,1062,3 552,274,5 853,332,3 773,226,5 478,65,3 316,677,2 886,199,1 302,20,2 233,242,1 225,512,3 863,1134,3 641,559,4 849,171,5 725,1058,5 688,294,1 681,134,4 845,722,2 825,52,5 797,861,3 829,180,5 757,1006,5 591,171,5 880,641,4 760,475,2 893,1015,3 845,747,3 832,402,1 750,1100,1 321,504,4 867,650,5 262,198,5 536,148,3 279,549,2 794,152,3 386,476,1 436,144,1 490,318,1 610,300,4 467,296,4 728,287,2 870,780,4 408,504,5 500,410,4 187,173,5 814,712,4 377,88,4 626,67,4 243,233,3 869,51,2 405,605,3 658,96,5 607,602,5 665,479,4 797,78,4 773,160,2 458,872,4 200,1055,2 579,0,3 891,72,3 552,479,5 863,348,4 793,0,4 467,152,5 839,512,5 550,569,4 714,78,5 895,683,4 880,281,4 505,185,4 534,222,5 906,618,2 629,236,5 757,863,4 542,514,4 654,643,3 895,712,2 803,142,3 794,580,4 822,285,5 756,569,3 758,0,5 292,428,4 705,741,2 180,409,1 595,148,3 638,548,2 560,779,1 736,172,4 237,1257,1 835,658,5 107,6,5 173,131,2 889,171,5 467,317,5 737,87,3 404,754,2 254,6,2 795,473,2 874,170,5 647,218,4 791,695,3 901,257,3 820,434,4 671,279,2 642,41,4 734,285,5 628,299,4 847,1100,5 280,270,5 499,734,4 895,317,4 660,218,2 698,97,4 505,7,5 81,124,3 854,59,3 895,383,2 832,551,3 558,179,4 311,647,5 267,185,3 587,1073,5 343,21,3 762,55,5 829,201,5 647,779,1 850,931,3 633,321,3 644,771,3 221,254,3 566,678,4 532,131,5 803,614,5 638,527,4 737,473,4 426,285,4 372,196,3 536,379,2 795,37,5 740,402,5 858,761,5 850,257,4 532,87,4 627,337,5 893,637,3 200,732,3 416,170,3 531,78,5 820,317,5 636,224,3 896,126,5 660,537,3 293,280,3 624,513,3 292,234,3 879,242,2 3,323,5 765,661,3 594,335,2 333,274,4 496,589,2 749,878,4 890,590,4 292,99,4 840,322,3 485,677,1 814,392,4 413,339,4 681,245,5 620,394,4 616,268,1 513,190,5 897,287,4 186,185,4 876,639,2 902,99,5 900,1048,3 502,225,5 583,24,3 733,317,5 787,1272,3 387,870,2 654,300,2 871,931,4 691,1011,1 764,970,4 17,51,5 845,539,2 894,596,2 17,715,5 757,184,4 506,596,5 108,10,4 662,182,4 396,747,2 689,120,3 750,537,4 270,538,1 401,115,3 565,735,4 269,602,5 455,1080,4 552,1125,4 895,324,1 634,236,3 602,175,2 600,7,3 617,64,3 84,63,5 654,78,5 781,337,2 704,224,4 472,1006,4 514,346,3 867,175,4 523,227,3 906,99,5 905,99,4 681,25,3 539,19,4 275,530,4 644,64,4 867,201,3 740,280,2 496,789,2 825,173,5 220,577,4 600,229,4 523,1100,4 654,1099,3 834,280,4 416,731,4 221,653,3 10,747,1 861,10,4 888,1133,4 499,244,2 453,467,3 587,1179,2 668,132,4 441,149,4 879,181,5 788,293,3 895,788,2 620,239,4 870,274,3 898,215,5 806,373,3 74,987,2 696,454,4 428,671,2 311,142,4 768,1027,3 452,353,4 889,442,4 654,170,2 641,472,1 164,126,4 852,322,3 373,761,5 715,519,4 757,451,3 313,709,3 415,937,3 429,147,2 706,961,2 43,755,3 291,149,4 48,454,1 327,1020,3 86,803,3 558,194,3 20,627,3 369,524,4 777,179,4 192,285,4 410,0,4 751,326,5 654,355,3 725,309,4 261,368,2 863,97,5 869,16,4 245,226,4 492,173,3 751,1293,3 567,511,1 660,171,5 746,481,5 93,567,3 415,1076,1 789,16,2 844,268,4 905,120,4 862,1037,1 792,249,4 881,814,2 787,470,3 97,987,1 143,173,5 804,92,5 600,454,4 408,99,5 560,478,4 434,654,2 457,237,4 878,150,3 803,557,3 871,0,3 550,595,5 773,430,4 777,55,3 396,990,1 180,107,1 781,1587,3 103,267,3 536,606,4 888,381,2 177,50,4 752,22,2 867,473,4 633,115,3 902,317,5 642,678,3 199,229,5 703,343,4 620,160,3 639,312,5 757,958,3 638,380,2 406,843,2 180,1010,1 813,218,4 891,126,5 436,558,3 448,250,3 641,678,2 6,69,1 804,179,3 326,179,4 585,1248,3 262,27,3 658,173,4 392,332,4 164,186,3 806,139,3 231,588,3 895,158,2 839,746,4 754,880,1 801,6,5 710,392,4 893,287,3 250,844,4 395,116,4 610,323,3 112,291,3 652,55,5 274,180,4 547,524,5 485,741,2 842,830,4 478,203,4 869,50,2 318,300,4 747,55,4 659,82,3 896,226,3 525,332,3 884,865,3 345,187,4 732,301,4 806,100,4 861,183,2 886,1135,5 641,731,4 757,566,4 728,747,4 839,883,5 710,422,3 797,824,3 903,96,4 683,177,4 863,293,4 520,11,5 823,303,3 523,1420,5 371,163,4 881,470,4 835,287,1 496,431,3 750,944,3 888,493,3 664,471,3 536,957,2 802,310,5 652,3,3 660,748,2 238,208,5 834,178,5 869,558,2 780,179,4 874,132,4 667,251,2 670,78,2 773,232,2 406,185,4 886,472,4 862,257,5 607,85,5 846,257,5 874,267,4 756,120,2 455,1133,4 839,209,3 177,780,4 853,590,2 534,519,4 834,236,4 497,771,1 647,103,1 880,97,5 741,49,4 498,885,4 645,331,3 880,96,3 803,623,2 536,962,3 59,483,5 570,356,4 536,639,3 631,10,4 633,594,4 507,209,4 335,780,3 325,194,4 895,185,4 455,1239,3 689,545,4 773,97,4 538,602,4 468,506,5 511,285,5 302,431,3 830,330,4 848,297,5 888,55,5 653,136,4 307,471,2 848,587,5 832,646,4 757,1018,4 715,98,5 673,221,3 879,251,2 349,97,4 814,187,3 882,1044,5 451,1012,1 899,457,2 804,94,3 649,602,4 693,87,4 454,290,3 576,404,3 449,434,4 199,8,4 467,426,5 665,481,4 486,178,3 558,586,4 897,318,5 187,509,3 200,1173,5 607,261,3 333,688,3 560,1169,3 150,370,4 30,305,3 415,322,3 819,300,2 229,430,3 405,581,4 452,401,3 526,1100,4 80,927,4 333,185,3 249,54,5 884,431,4 881,7,5 278,1243,3 458,1037,4 405,484,3 326,173,4 898,684,3 386,582,4 748,575,3 755,110,4 795,683,5 726,410,3 765,216,4 617,163,3 822,422,5 700,688,3 298,96,4 520,88,3 536,1444,3 863,950,3 534,197,4 606,706,4 261,85,3 659,150,5 825,587,4 267,549,2 267,939,2 386,434,3 899,136,3 537,95,4 757,420,4 710,746,4 258,38,4 797,320,3 591,1066,5 839,152,3 483,418,4 702,24,3 471,174,5 331,826,4 885,1323,2 362,392,4 378,427,4 584,51,3 781,259,2 867,614,4 772,392,2 759,203,4 658,184,4 12,273,3 822,158,3 378,528,4 556,196,5 394,99,4 663,530,2 398,222,3 848,26,5 523,817,3 891,603,5 380,1114,4 907,55,4 830,327,3 734,297,4 533,747,4 888,210,4 360,185,3 779,656,3 344,136,4 587,549,3 310,725,3 683,281,4 773,78,2 81,111,1 827,51,3 394,185,5 885,16,4 867,178,4 825,402,4 308,285,4 810,322,5 795,1036,2 899,129,1 441,435,3 377,927,2 652,199,4 158,865,5 757,413,4 902,993,3 436,252,1 883,178,5 701,537,4 743,656,5 453,135,3 672,300,3 94,418,4 716,889,1 644,615,3 820,233,5 612,1314,4 643,180,4 455,207,4 576,662,5 880,923,3 773,794,1 693,672,4 654,1040,3 463,263,4 828,13,2 255,321,4 795,719,4 353,167,5 746,286,4 681,630,3 471,78,5 333,528,5 416,227,3 568,293,2 861,478,4 849,968,5 803,132,3 825,55,5 609,317,5 839,212,4 404,1075,2 376,257,4 662,654,4 866,194,5 12,510,5 179,190,4 803,558,3 844,1462,1 732,281,3 557,282,3 494,178,5 718,78,4 799,256,4 685,526,3 553,288,4 726,1302,2 428,7,3 804,933,1 537,317,5 623,244,3 620,454,4 654,708,3 875,177,4 333,814,3 38,338,3 180,1170,1 492,117,4 625,322,1 888,567,3 310,941,5 879,279,2 232,167,5 378,418,4 782,886,5 787,518,4 915,466,3 797,238,4 781,538,3 822,183,3 803,418,3 910,356,4 662,179,4 420,217,4 837,110,4 812,357,3 485,1608,3 331,254,4 658,1296,2 449,747,4 616,173,1 846,167,4 649,426,4 641,155,1 710,113,5 61,170,4 633,474,5 770,402,4 895,81,3 820,0,5 911,237,4 880,80,3 504,494,3 55,113,4 455,432,4 882,19,4 459,116,3 911,1040,4 775,195,3 869,1040,2 565,386,4 830,346,3 805,432,4 17,418,3 505,270,4 503,716,4 886,209,5 370,184,3 704,214,2 781,992,3 538,268,5 300,127,5 715,545,1 710,76,3 356,149,4 846,409,1 884,595,4 746,630,5 907,413,3 895,143,4 299,686,2 879,346,5 389,8,5 623,0,4 478,7,5 62,2,2 406,215,4 628,54,4 799,1046,3 874,581,5 846,175,3 311,418,3 824,716,4 862,293,4 293,275,4 698,20,3 803,322,4 889,323,4 17,713,4 748,159,3 893,220,4 867,121,3 467,528,3 397,1040,3 411,356,4 869,91,4 604,404,3 101,1051,2 893,1141,4 861,174,5 879,1001,3 797,1543,3 547,627,2 839,202,5 824,13,3 531,632,5 635,812,5 455,1019,4 531,479,5 654,7,3 433,475,4 433,287,5 542,236,4 758,120,5 827,1596,3 319,237,4 881,116,4 344,1008,2 89,170,2 838,284,5 895,384,4 706,516,3 820,69,4 388,819,3 757,568,3 663,155,4 893,327,4 787,37,3 907,155,3 662,918,3 862,1606,2 471,374,5 681,470,3 805,194,3 501,287,5 788,180,4 322,267,4 629,495,3 471,759,5 649,422,3 902,181,5 486,627,4 807,301,5 581,234,3 279,669,2 550,356,5 200,47,3 814,417,4 391,339,5 913,723,3 465,81,3 167,117,4 832,182,5 722,288,2 324,194,2 547,309,3 740,14,4 302,1045,3 838,318,1 881,85,5 151,723,5 822,54,4 886,1539,5 787,221,3 513,175,4 607,130,4 408,356,5 333,149,4 452,507,4 333,845,3 740,565,4 607,482,4 629,639,1 392,878,3 451,69,5 275,507,5 71,525,4 893,248,3 683,99,4 843,624,3 737,237,4 841,268,5 344,4,3 649,293,3 647,88,4 880,290,3 861,110,5 882,15,4 176,182,4 824,925,4 22,194,4 794,149,3 502,233,5 505,232,4 822,154,3 278,1087,4 268,520,4 874,473,5 768,404,2 605,805,5 605,233,4 415,89,4 333,219,3 756,61,3 750,745,4 794,181,4 720,937,3 724,14,4 751,304,4 850,122,4 884,185,4 233,621,2 450,1394,1 191,1170,2 756,121,1 294,469,3 641,738,5 564,1621,4 717,117,4 851,180,4 404,745,1 547,741,5 692,728,4 423,507,3 869,788,4 559,482,5 845,1266,3 453,626,2 846,38,2 896,185,5 10,317,5 877,274,4 560,675,3 870,301,5 649,185,4 665,209,2 646,69,3 913,110,1 637,434,3 276,277,1 692,221,2 58,264,4 721,755,3 591,1069,5 910,587,4 10,448,3 387,0,5 884,69,5 863,1118,3 647,565,4 838,291,3 98,1047,4 550,280,5 503,126,5 670,67,3 846,233,2 183,505,4 173,279,5 845,21,4 620,93,2 717,716,4 681,889,2 45,299,3 915,1427,3 168,442,4 255,228,3 547,225,5 704,398,5 151,595,2 147,190,1 453,464,3 801,759,3 715,513,5 787,517,3 803,377,4 868,117,1 886,180,5 704,194,2 492,233,5 889,842,3 797,258,5 875,237,4 794,726,3 863,175,5 845,63,4 658,1137,4 576,297,4 585,929,2 524,290,2 895,132,2 136,117,5 477,590,3 560,732,3 876,954,4 876,215,4 714,156,4 180,597,1 658,1171,4 756,384,3 296,464,3 657,211,3 896,392,4 498,424,3 845,481,5 193,366,3 513,151,4 560,1069,4 755,209,4 440,312,4 397,413,3 174,87,4 467,951,3 89,241,4 439,748,3 586,293,3 19,632,4 676,686,4 781,291,4 697,175,4 879,830,4 846,447,4 691,325,3 825,945,3 384,60,2 779,274,4 715,479,5 364,351,1 391,190,5 787,132,5 377,381,4 869,1133,4 748,801,3 617,923,4 497,203,2 773,417,2 763,98,4 536,963,3 757,202,5 727,741,4 888,216,4 12,151,5 488,348,4 756,67,4 616,439,4 496,363,3 807,324,1 904,257,3 869,503,5 478,200,4 327,63,4 642,32,3 905,320,4 646,567,4 642,515,4 732,1022,1 621,120,1 605,87,4 869,271,4 456,627,4 415,802,3 832,185,1 451,491,4 592,545,3 781,323,2 25,297,3 550,167,5 197,290,2 877,735,5 626,122,3 456,565,4 891,75,4 291,602,5 795,1040,5 737,180,4 392,537,3 883,1213,1 693,964,4 376,894,3 803,1046,3 100,818,1 31,6,4 681,379,4 804,734,4 171,22,3 422,126,4 658,468,4 559,210,4 12,661,5 772,1366,5 895,397,2 519,293,3 863,6,5 839,237,5 843,150,4 861,198,5 233,465,4 888,478,4 559,616,3 891,70,3 681,551,3 895,221,4 592,762,3 457,24,1 492,256,5 797,390,3 833,342,4 728,900,1 741,236,4 323,249,4 473,490,4 662,288,1 895,272,5 436,582,1 231,899,5 527,30,5 448,287,3 802,244,4 451,575,2 536,99,4 372,176,3 638,134,4 749,304,4 233,163,3 344,261,5 652,88,5 827,305,3 891,577,4 617,135,3 338,822,3 415,450,5 428,971,4 100,224,3 37,287,5 870,946,2 143,11,4 545,897,4 704,147,5 221,540,2 704,28,5 444,459,2 479,11,5 422,878,3 641,1178,3 822,179,4 532,473,3 891,707,4 773,946,2 794,237,3 827,1004,3 362,72,2 786,270,1 732,148,4 863,719,3 197,30,3 745,88,4 806,230,4 503,450,1 585,778,3 663,150,4 882,528,5 845,730,3 750,755,2 901,317,5 803,471,3 757,124,2 220,149,5 555,603,5 293,257,3 824,490,4 53,239,4 192,23,2 642,184,5 849,227,5 801,395,2 846,478,3 830,325,4 880,654,4 910,167,4 735,1277,1 560,231,3 560,509,3 803,405,3 824,175,5 726,754,2 678,720,3 394,422,5 666,195,5 762,15,5 718,377,4 874,175,4 864,121,3 906,289,4 858,845,5 849,14,5 37,325,5 157,685,5 685,429,4 859,158,3 879,927,2 846,366,3 142,306,4 878,762,5 638,51,3 27,229,4 576,741,4 747,88,5 785,870,1 523,172,4 867,150,5 681,275,3 895,97,5 711,968,4 313,933,4 434,1239,4 533,332,5 523,97,3 526,171,5 631,54,2 915,270,3 915,231,3 907,191,2 879,7,4 797,1182,1 398,844,3 659,306,3 900,286,3 892,263,3 834,353,3 306,221,4 659,1482,3 743,0,4 772,353,2 866,209,5 539,472,3 863,216,4 870,882,3 726,256,2 310,197,3 658,61,4 659,134,4 843,430,4 893,272,3 839,193,3 581,747,3 542,943,3 456,1118,4 701,342,2 814,6,4 210,1329,3 795,279,4 422,749,5 832,324,4 797,404,5 124,89,5 880,49,3 649,14,3 827,223,3 12,61,5 723,335,1 193,96,3 74,78,5 415,689,5 575,275,3 565,1,5 450,680,1 140,1027,4 870,236,3 458,684,3 659,40,1 716,280,4 726,443,2 809,303,4 797,227,3 876,274,4 639,760,5 701,682,1 879,234,3 404,567,4 24,0,5 823,267,4 465,49,5 898,274,4 829,819,1 69,450,4 84,629,3 868,180,3 534,528,3 726,1243,3 767,404,4 621,48,3 488,302,4 197,247,3 711,737,4 270,691,4 180,1328,1 614,936,2 535,143,4 861,412,4 792,695,3 513,110,5 647,24,2 807,749,5 906,723,5 379,189,5 680,293,5 91,450,3 341,288,2 795,596,5 866,10,3 915,99,5 12,52,1 874,500,4 592,132,4 839,98,5 738,96,5 213,481,4 297,173,5 722,190,3 832,450,1 653,741,4 292,435,3 917,487,3 756,545,3 885,471,3 839,508,3 662,147,4 890,277,4 63,86,4 805,510,5 252,233,4 485,279,2 697,479,2 763,355,4 814,435,3 839,654,5 822,91,5 726,509,4 562,475,3 610,306,4 532,488,4 832,1596,5 647,22,3 536,672,3 604,470,3 268,656,4 9,708,4 803,1055,4 775,191,5 773,204,4 906,684,5 626,1,3 311,221,3 659,721,1 822,215,5 616,634,4 706,448,2 803,947,1 536,1005,2 428,513,3 789,435,4 463,321,3 863,355,4 881,293,4 590,284,5 891,176,4 824,306,4 451,525,4 568,272,3 415,237,4 896,927,5 434,290,4 805,474,4 635,274,3 829,434,5 428,664,2 746,29,5 221,116,5 867,208,4 795,180,5 317,215,4 833,150,4 898,230,1 341,10,5 918,1100,5 636,595,2 605,186,4 534,46,5 844,1393,4 885,160,5 839,296,5 845,674,2 455,615,3 456,468,4 415,1220,5 621,510,4 410,484,4 689,1027,4 732,618,3 918,786,3 920,23,3 915,365,3 737,80,4 391,346,4 781,1604,2 599,770,3 452,716,2 523,1049,2 542,691,4 585,549,4 396,312,4 69,595,3 860,274,5 845,288,4 692,491,3 768,283,3 883,165,3 754,263,2 909,181,4 446,289,4 863,131,5 502,735,4 5,339,2 895,671,2 531,1188,5 382,495,5 885,317,5 531,1167,4 432,268,5 605,293,2 199,719,4 846,473,4 882,751,4 907,172,3 550,459,3 595,894,3 325,174,1 531,271,5 270,187,2 757,675,2 785,723,4 338,120,3 902,408,4 715,49,5 398,768,3 196,325,3 765,135,3 890,279,3 75,155,3 895,525,4 664,410,4 566,480,5 605,237,4 638,922,4 336,391,5 845,268,5 839,169,4 295,10,5 710,287,1 772,546,4 852,886,2 859,1060,3 304,175,4 663,714,3 912,98,4 772,1096,4 560,170,5 909,1011,4 898,153,5 499,1165,4 669,605,4 825,228,4 806,403,3 71,492,5 822,196,5 888,497,4 748,156,3 853,126,4 112,873,5 733,131,3 891,648,5 895,355,3 839,179,5 526,179,5 518,1611,5 716,124,4 748,447,2 725,1027,2 845,377,4 849,662,2 845,479,5 604,948,5 915,1045,2 757,1021,5 706,302,3 541,179,3 715,87,4 544,626,3 654,249,3 695,346,1 535,488,4 602,746,3 750,269,4 750,88,3 831,875,3 797,462,3 837,186,3 5,8,4 581,268,4 55,160,4 513,94,4 602,750,4 661,1380,5 861,23,4 633,117,4 833,346,4 377,457,4 193,176,3 870,49,5 659,429,4 105,161,5 652,517,2 881,475,3 789,1445,4 194,430,3 787,257,4 814,968,5 89,222,4 734,6,3 881,691,4 553,272,3 649,471,3 337,179,4 331,681,4 292,192,3 453,201,3 902,745,2 869,380,3 90,68,5 874,172,5 885,696,1 808,285,4 711,139,4 838,865,2 900,77,4 816,327,4 574,110,1 915,469,3 870,689,3 880,433,2 466,1010,2 799,596,4 561,457,2 425,645,3 789,558,3 511,182,5 17,215,4 449,726,4 704,684,5 536,273,2 804,24,4 586,267,4 425,1019,4 647,203,5 804,395,4 691,299,4 853,219,4 550,158,4 882,255,5 641,385,5 324,736,4 765,525,2 157,55,5 867,166,1 766,27,4 781,343,3 261,65,3 786,349,1 53,244,4 298,383,3 681,155,5 449,925,4 310,75,4 15,198,5 434,648,3 708,216,5 845,30,4 895,767,2 915,159,3 552,131,4 906,339,2 693,659,3 536,510,5 505,741,5 757,495,3 781,534,3 186,434,4 889,152,3 900,21,5 10,175,3 633,8,5 298,602,3 663,626,1 462,303,3 559,283,3 896,150,5 576,236,4 853,190,4 652,1443,3 437,0,4 453,221,3 531,419,4 676,979,2 644,211,4 654,780,1 381,1016,4 560,482,4 292,160,2 626,32,1 847,427,5 221,98,3 763,173,5 757,1045,4 361,299,5 830,748,2 906,590,5 886,1034,5 478,299,2 544,327,4 200,655,4 304,32,3 687,303,5 797,820,5 540,402,3 895,122,3 915,450,3 599,3,4 279,379,2 93,176,5 803,3,4 822,185,4 825,230,3 428,1019,4 750,741,3 795,808,4 856,18,4 444,1013,1 313,1252,4 693,160,4 842,141,2 746,948,5 215,275,4 182,143,3 698,272,3 737,661,4 827,189,3 833,116,4 421,116,2 249,150,4 748,685,4 708,362,3 534,506,5 617,924,2 221,1438,3 743,275,4 846,201,4 738,186,4 786,330,3 682,310,3 806,61,3 704,683,3 292,216,3 560,2,3 536,749,3 896,418,4 822,67,3 863,63,5 606,274,4 425,204,4 63,325,3 602,179,4 615,312,5 378,46,5 243,844,3 762,172,4 716,236,5 842,251,3 853,269,4 314,300,2 377,117,4 298,3,3 531,6,5 886,767,4 765,178,4 896,472,3 789,495,3 909,99,4 804,116,3 593,241,4 416,155,3 665,493,4 710,15,5 859,300,2 454,192,4 686,894,4 918,256,4 528,879,4 729,275,3 626,848,4 649,271,4 869,180,4 755,225,3 177,330,4 879,236,4 911,203,2 73,267,3 907,54,3 628,293,3 858,1094,2 449,1283,3 829,240,4 492,337,4 824,1027,3 853,618,2 682,343,3 242,9,4 313,845,3 882,1120,3 505,481,5 888,272,4 879,727,4 915,251,2 200,181,4 541,762,4 914,303,3 221,257,5 329,404,5 782,270,5 652,972,2 815,258,2 769,741,4 844,1021,2 183,316,3 888,128,5 670,221,1 632,497,2 757,541,2 560,172,4 206,762,3 406,795,2 746,671,4 880,76,2 636,150,5 825,767,3 125,989,4 803,1040,3 907,287,4 641,101,5 285,733,2 589,5,5 411,968,3 750,1,4 501,270,5 397,483,4 513,143,3 907,693,4 605,332,5 449,748,4 523,69,4 886,632,5 921,160,3 889,515,2 847,587,3 311,177,5 795,184,4 675,269,4 449,283,4 863,421,3 842,431,2 893,243,4 496,552,2 885,1072,4 637,228,1 453,401,3 772,259,2 711,654,5 748,403,5 876,731,4 893,280,3 822,281,3 904,244,3 330,633,3 647,925,3 434,495,4 853,6,4 198,812,3 257,293,4 353,1465,5 789,214,2 605,107,1 891,53,3 881,472,3 662,507,4 341,168,5 555,317,5 900,7,3 917,130,3 869,469,3 503,306,4 200,505,4 505,43,4 922,409,3 353,432,3 502,292,4 797,1033,2 122,184,4 652,402,2 885,482,4 744,150,2 30,518,4 895,190,4 64,110,4 723,348,2 807,261,5 452,225,3 536,477,4 619,858,4 310,379,4 404,941,1 804,171,4 542,1,3 913,87,2 664,185,4 499,830,3 426,288,5 275,70,4 910,434,5 43,49,5 6,70,5 402,256,2 377,159,2 406,503,3 827,957,5 173,987,1 828,274,4 861,977,3 424,264,3 869,130,4 789,1047,4 704,93,4 726,1033,2 497,184,4 757,75,3 233,748,3 406,929,2 275,1144,2 802,987,1 839,7,5 94,714,1 809,320,5 473,615,4 245,132,3 12,861,3 642,1064,4 312,480,4 797,157,2 377,542,4 805,11,5 900,987,4 889,1064,3 397,446,2 711,584,4 591,442,5 576,171,4 765,176,3 314,302,4 255,985,5 912,57,5 215,507,4 93,171,4 533,1053,5 867,422,2 860,735,4 789,976,1 879,362,4 901,294,2 750,489,4 882,0,3 726,156,3 249,90,5 832,383,3 379,150,4 197,229,3 830,297,5 880,384,4 804,384,1 921,142,4 732,984,3 262,661,4 888,6,3 17,47,4 484,310,3 927,7,5 649,565,3 839,404,4 901,326,3 681,21,5 536,304,4 325,520,2 756,342,3 623,299,4 797,1538,2 822,171,5 332,519,4 893,739,4 652,27,4 566,270,4 891,229,4 777,10,5 373,202,3 702,117,5 863,510,4 592,24,3 763,273,3 623,762,3 619,163,5 746,1097,4 404,570,5 449,317,5 911,171,3 186,299,4 342,202,5 449,967,4 696,322,4 850,973,2 816,839,2 781,1190,3 307,409,4 747,185,5 860,1147,3 856,474,5 520,323,2 556,291,4 653,113,5 745,126,2 913,196,4 290,95,4 626,196,5 310,416,3 869,498,4 446,293,4 681,432,3 748,210,5 372,855,3 912,356,5 681,684,3 619,62,5 893,284,4 464,280,2 614,356,5 838,285,4 704,221,5 891,88,5 886,83,4 312,221,3 874,650,5 794,825,3 745,684,3 882,1018,5 862,304,4 302,678,2 885,97,4 455,696,4 649,577,3 774,326,5 786,937,3 300,401,2 591,59,4 879,545,3 839,473,5 704,94,4 863,1209,2 902,202,4 457,459,4 271,132,1 392,1038,3 756,938,4 804,44,4 726,569,2 455,394,2 466,257,2 124,242,2 6,601,3 774,311,3 187,683,3 518,902,5 926,221,5 560,454,3 196,310,4 279,141,4 317,120,1 670,233,4 863,482,5 188,497,5 751,345,4 921,209,3 681,54,4 302,1336,1 342,214,5 714,23,3 658,174,5 915,228,3 436,424,4 380,158,3 777,267,2 895,596,4 858,1060,4 448,13,3 750,416,2 393,404,3 920,1059,2 789,551,2 866,67,4 720,1038,5 544,79,3 560,267,3 175,292,5 486,391,4 891,62,4 881,525,4 787,317,5 888,461,5 830,49,5 842,428,4 846,644,3 513,1038,5 333,28,2 394,1027,2 831,293,4 654,192,3 416,89,3 781,1416,2 789,187,4 747,450,1 843,257,4 415,14,4 842,799,4 590,12,4 567,134,4 921,370,3 866,649,5 487,233,4 732,126,3 629,192,3 284,257,2 320,490,3 166,553,1 553,769,1 619,759,3 832,729,4 652,116,4 926,137,4 449,500,4 266,1027,3 830,299,3 804,411,3 837,274,5 453,47,4 845,450,4 880,37,3 693,186,4 536,788,2 567,660,4 310,698,4 89,504,5 692,312,5 91,227,4 756,750,3 856,347,1 209,221,4 707,8,1 486,155,4 681,656,4 723,300,4 178,681,5 449,306,5 803,225,4 685,316,5 726,525,4 711,172,5 923,95,4 874,3,3 850,681,1 587,569,4 551,872,3 2,346,5 624,211,3 698,321,3 922,332,5 654,765,3 384,181,5 795,825,2 129,346,4 278,1029,4 248,602,5 463,306,5 264,675,2 681,230,1 698,115,4 877,415,5 592,743,3 885,635,3 720,305,3 760,863,4 854,1020,3 704,142,3 888,781,2 891,172,5 359,519,4 805,94,5 904,327,3 720,259,3 503,632,3 865,299,1 422,292,4 536,646,4 898,750,4 654,1192,3 654,27,3 928,88,5 895,545,2 444,1050,1 649,184,3 804,54,5 885,128,5 822,94,4 787,408,3 746,55,5 108,24,4 845,134,4 806,450,5 923,116,2 455,823,3 5,260,3 846,143,4 507,178,4 922,826,3 918,242,3 841,269,5 895,561,2 91,701,3 496,628,2 879,232,4 847,489,5 846,484,3 536,276,2 884,238,3 899,588,5 641,650,4 204,747,4 757,602,5 827,339,5 837,205,4 441,221,3 896,201,2 408,1391,1 675,192,5 639,188,5 859,152,4 851,6,3 795,1284,4 789,775,3 912,300,1 520,225,4 697,485,4 927,97,5 918,689,3 90,417,2 843,325,3 245,615,5 601,236,4 168,537,4 539,8,5 921,214,3 404,380,1 742,291,3 869,519,5 730,433,1 853,199,5 861,602,5 552,44,4 657,99,4 428,98,3 574,962,1 373,683,5 533,1051,4 895,684,3 853,124,3 884,64,2 176,306,4 797,1424,4 803,199,3 747,199,3 285,1078,3 832,503,4 895,451,3 711,559,3 930,49,3 915,780,3 880,477,4 822,152,4 874,95,4 390,49,4 816,362,3 750,1445,2 885,61,3 354,1174,5 6,462,4 866,227,5 641,1,4 821,407,5 737,696,2 838,695,2 923,173,5 863,275,5 700,302,4 873,339,3 785,203,4 487,750,3 311,96,5 291,168,5 571,8,5 506,537,4 706,952,4 641,800,3 649,482,5 902,707,4 427,302,3 886,691,5 757,233,4 845,527,5 188,485,5 757,532,4 129,470,2 802,337,2 654,641,3 681,152,3 150,276,4 839,164,5 396,664,3 542,699,2 871,110,4 329,317,5 903,693,3 86,48,5 888,1418,2 886,98,5 98,309,3 758,116,5 97,210,4 789,65,3 644,242,1 560,3,3 745,683,4 886,825,1 617,698,3 825,390,4 860,546,4 780,257,2 882,63,4 912,91,4 916,24,4 118,923,4 926,500,4 885,237,3 189,327,3 24,407,5 923,1399,4 789,95,3 173,647,5 21,249,5 800,293,5 825,342,5 750,299,2 620,500,3 523,1073,2 594,13,5 732,545,1 710,212,5 710,68,3 863,622,3 857,288,3 452,452,2 869,553,2 654,213,3 373,287,4 401,747,3 853,536,3 11,327,4 746,650,5 618,826,3 496,3,3 659,471,2 243,1027,3 898,659,4 573,343,5 726,87,5 891,190,5 888,301,4 345,232,4 294,51,5 454,448,4 266,292,4 893,846,4 789,780,4 842,958,2 837,270,4 737,175,5 843,777,4 121,238,4 881,236,5 895,140,3 863,55,5 884,94,4 284,299,4 659,89,2 706,173,2 93,626,3 915,1334,4 678,27,5 674,899,4 636,6,1 861,200,3 892,116,4 845,49,5 24,418,4 462,24,3 654,938,3 353,609,4 581,596,3 926,293,5 844,899,3 710,134,4 899,279,2 480,251,4 710,1162,4 720,106,4 849,173,5 781,1255,2 656,8,4 775,674,3 649,214,2 454,268,4 700,689,4 534,962,5 555,171,5 861,918,4 839,55,5 895,226,4 902,203,3 646,1046,4 390,124,3 416,1056,2 678,153,4 821,1239,3 693,1027,3 594,257,4 888,185,5 706,659,5 636,24,4 503,417,3 819,342,4 544,76,3 229,116,5 56,844,4 485,740,3 714,469,4 292,1134,3 617,53,3 710,168,5 914,287,2 749,244,3 693,647,5 915,243,4 682,905,4 341,292,4 647,13,2 895,199,4 817,886,4 888,385,3 651,293,2 523,465,4 918,258,4 672,257,2 890,933,3 931,134,5 737,454,4 819,315,3 436,720,2 127,495,5 424,545,3 634,116,2 22,55,4 710,312,4 650,293,1 115,342,2 638,791,2 879,691,3 46,306,4 931,6,4 750,736,4 908,285,4 931,434,4 907,97,5 492,1015,4 795,210,3 775,815,2 693,497,5 171,641,4 799,124,3 415,446,4 842,95,3 882,714,5 806,656,4 888,174,4 804,545,2 357,44,3 931,284,4 770,293,4 885,171,5 681,761,3 835,418,2 498,256,5 888,677,3 561,152,4 607,69,4 920,7,3 609,7,4 320,18,4 681,4,3 278,1493,1 895,1671,2 751,350,3 758,741,5 526,184,5 861,736,4 101,228,3 619,1035,4 708,202,4 415,150,3 879,999,3 658,166,3 876,299,3 773,404,1 345,67,3 591,466,5 763,69,4 565,385,1 825,434,4 785,707,4 325,549,5 659,133,4 550,68,4 629,324,3 702,992,4 912,259,1 921,1034,3 222,70,5 915,522,3 920,0,3 505,94,5 822,187,5 708,172,4 111,312,5 895,366,4 789,180,4 895,548,2 449,712,3 454,120,4 290,741,3 434,264,3 886,1277,2 879,64,4 628,22,5 931,104,2 921,88,5 197,180,4 526,491,3 845,1147,3 416,321,3 540,404,3 772,381,3 915,169,4 496,89,4 834,1672,3 392,784,3 600,140,4 845,506,3 847,529,5 628,173,5 869,208,4 678,152,2 879,68,4 867,120,2 205,1174,1 931,1203,5 789,761,5 486,782,4 756,745,3 63,78,4 810,891,4 642,218,5 832,635,3 337,496,3 761,172,5 28,1018,4 565,442,4 625,271,5 441,858,3 867,187,3 805,27,3 631,131,5 115,248,2 715,386,4 586,917,3 109,237,3 12,37,3 853,603,4 832,71,2 217,602,4 420,196,3 462,1283,4 888,519,4 315,274,5 670,801,3 794,1554,3 310,230,4 921,71,4 660,1034,3 801,656,4 444,432,2 756,808,4 782,257,4 200,440,1 398,467,3 507,231,3 845,485,5 609,8,3 665,646,5 594,2,4 12,210,4 787,613,4 773,52,4 715,179,3 863,793,3 255,408,4 870,750,4 42,207,5 706,296,3 803,980,3 660,139,3 127,701,3 754,270,1 379,271,4 231,507,1 912,1111,1 867,95,2 605,683,3 444,1046,1 521,520,5 396,185,5 832,1153,4 803,983,4 795,111,4 880,195,3 681,257,3 746,886,5 879,1275,3 711,195,4 862,348,1 771,309,4 588,878,4 789,40,3 773,683,1 907,700,4 751,344,1 861,819,4 794,575,2 521,47,4 300,408,4 65,283,3 845,1472,5 781,899,3 832,143,4 647,636,2 622,185,3 895,75,3 846,731,4 845,70,4 662,747,2 718,239,1 794,203,3 454,212,4 779,78,4 415,231,5 845,392,3 143,222,4 471,659,5 886,709,5 765,395,2 663,158,3 261,91,3 893,331,3 757,461,4 652,1134,2 157,517,4 304,473,5 869,780,3 895,413,3 880,102,1 710,541,1 917,1064,4 880,126,4 806,414,3 702,1196,3 902,253,2 756,10,4 915,1134,3 320,492,4 805,154,3 879,1029,2 396,651,3 704,209,5 443,305,5 177,510,5 845,551,4 327,1400,2 255,840,2 467,49,5 621,279,3 233,628,4 867,224,1 641,782,4 305,257,2 869,590,2 144,589,1 724,180,4 842,98,2 495,960,2 806,738,4 885,812,4 180,507,3 215,0,4 465,16,5 642,480,4 918,18,4 748,184,4 748,88,4 838,325,4 405,490,4 781,334,2 462,236,4 706,219,2 553,3,2 553,530,4 795,320,2 520,86,3 449,187,3 931,503,4 551,1277,3 863,475,2 496,61,4 845,718,2 327,158,3 871,236,4 760,425,1 706,1173,5 773,518,5 200,365,2 762,60,5 888,514,5 623,454,3 225,285,4 886,234,3 732,404,2 552,180,4 550,635,5 755,122,2 615,288,4 832,932,4 143,125,4 715,613,4 880,942,4 703,610,3 918,252,3 852,270,3 895,422,3 852,327,3 473,187,5 81,49,5 785,65,4 505,516,2 845,71,4 0,54,5 715,791,4 415,352,2 927,186,5 912,7,2 344,627,3 540,65,4 144,309,4 803,239,4 604,268,4 513,510,3 827,894,2 574,78,5 70,285,4 271,193,5 907,203,4 914,309,3 745,171,5 896,193,5 876,401,3 896,567,5 490,126,3 534,516,4 895,435,3 877,81,3 536,128,3 893,581,4 327,68,4 789,939,3 915,865,3 566,88,5 748,627,4 715,492,5 836,275,1 863,139,3 723,875,1 885,66,4 744,204,2 757,1015,4 867,426,4 624,407,4 803,928,3 180,1186,1 833,149,5 246,120,4 692,1247,3 923,5,4 681,267,5 707,925,3 118,298,4 10,120,3 629,549,3 504,312,5 885,823,4 797,721,3 867,500,3 360,27,3 286,167,5 666,85,5 586,333,3 715,621,3 261,131,3 446,12,5 886,205,5 738,300,5 58,524,3 654,8,3 593,14,4 895,131,3 880,738,4 333,306,3 888,1078,2 933,49,5 755,472,3 891,499,5 880,400,1 591,746,4 869,461,4 842,678,4 898,229,4 137,0,4 933,422,3 862,331,4 227,285,5 177,683,5 648,253,4 847,153,5 845,745,3 781,1381,3 814,126,3 888,398,3 564,970,5 9,481,4 60,750,3 791,12,4 697,1020,1 870,334,3 654,203,3 673,741,5 67,275,5 720,64,1 787,226,3 605,584,4 805,1073,3 333,128,4 827,269,5 863,215,4 461,260,2 932,1016,3 879,578,3 703,660,4 912,11,4 910,184,5 917,1199,4 654,1478,2 888,770,2 311,222,5 787,84,1 456,1028,3 693,662,4 803,211,3 806,20,4 829,229,3 359,68,3 918,321,3 867,209,5 885,26,2 153,199,5 824,412,3 535,567,4 853,2,1 518,1294,5 652,167,3 706,1100,4 71,187,4 885,181,4 457,498,4 326,536,4 457,1225,2 758,256,4 524,268,5 586,263,4 664,659,4 902,180,4 58,181,5 662,191,4 61,1027,1 379,844,4 6,585,3 806,819,3 891,101,3 795,401,5 795,131,4 910,173,4 436,220,5 934,475,4 601,537,4 812,900,1 642,672,4 6,95,5 362,548,4 369,301,5 663,214,4 536,836,3 853,175,3 797,559,3 692,854,2 888,1071,3 37,293,5 193,517,4 654,1513,2 895,214,5 710,317,5 689,1040,3 193,1106,3 892,293,3 129,256,4 43,519,5 415,1010,4 538,131,5 818,247,5 829,431,3 753,275,5 58,1111,3 726,844,3 842,20,2 583,229,4 706,948,3 665,168,4 738,171,4 708,727,4 757,237,5 888,746,4 621,1202,3 449,661,4 842,175,4 104,342,2 129,354,4 818,267,4 729,257,5 890,280,5 642,152,4 899,492,2 681,1078,3 496,236,3 746,391,3 584,918,2 704,402,4 477,11,5 323,689,4 896,1253,2 884,417,4 853,487,4 841,885,4 931,1064,5 532,215,4 459,297,2 662,88,4 888,182,3 732,1114,3 886,81,4 845,101,2 846,566,3 937,863,4 932,316,4 898,1,3 861,656,5 290,183,4 603,440,2 895,461,3 921,755,2 342,402,4 441,152,3 811,260,1 771,299,4 601,357,4 773,447,2 773,1217,3 899,473,4 658,430,4 842,446,2 93,66,3 454,143,3 752,434,4 902,275,5 34,299,5 820,426,5 574,181,3 867,523,3 522,1046,5 706,1067,4 804,500,5 588,321,3 877,136,3 894,283,3 737,205,3 327,234,3 814,192,4 373,293,2 923,522,5 886,422,2 419,172,3 434,57,3 745,7,4 221,149,3 458,258,4 832,669,1 706,810,4 720,872,3 654,1528,2 896,203,4 918,357,3 908,581,5 398,301,4 61,189,5 934,283,4 405,659,3 475,65,3 891,96,5 338,342,3 20,985,1 908,291,4 649,392,3 159,212,4 760,454,2 845,779,4 873,747,3 499,182,4 912,526,5 434,143,4 255,227,3 723,1061,1 659,226,2 503,1146,4 386,1010,3 517,627,5 791,590,2 270,653,5 918,886,3 832,319,4 726,397,2 877,180,3 715,171,4 773,22,3 867,272,3 845,678,3 441,446,3 746,289,3 294,0,4 824,11,5 500,596,3 460,257,4 384,1448,4 890,470,5 78,221,4 496,209,4 338,199,5 233,988,2 852,257,3 898,217,4 629,931,2 845,1247,4 663,284,5 888,172,5 814,199,5 853,284,4 869,395,3 444,288,1 457,734,2 681,298,4 473,468,4 692,173,4 774,328,3 757,704,5 861,182,5 106,324,3 928,208,3 746,1224,3 393,88,5 814,49,5 531,135,5 483,1,4 873,288,4 884,90,3 795,190,4 342,197,4 816,323,2 351,430,2 748,619,4 787,210,4 710,424,4 862,897,1 326,245,4 633,271,5 485,20,3 877,14,4 250,124,4 708,10,5 505,201,5 284,269,4 275,1212,1 770,78,1 706,1641,5 495,841,2 706,92,5 926,23,3 302,1038,5 900,622,4 744,426,4 939,192,3 641,236,5 403,749,3 386,562,2 772,257,5 654,1632,3 882,27,3 118,203,4 896,142,5 863,381,3 881,221,5 915,213,3 473,199,3 803,182,4 915,174,4 496,724,3 86,475,2 371,291,5 789,1090,1 746,312,5 803,755,3 627,1024,5 631,142,5 738,196,1 377,948,3 845,1117,5 550,83,1 918,346,3 887,868,4 899,182,3 904,281,3 523,569,4 523,522,4 641,570,3 867,416,1 787,660,5 615,342,4 496,48,3 879,401,3 222,312,5 396,297,4 473,1010,4 931,175,5 392,448,2 312,14,2 881,987,5 781,897,3 6,576,2 880,1539,1 893,82,4 748,525,5 832,395,3 268,734,2 592,232,2 888,460,3 818,1159,4 706,1210,4 885,170,4 58,602,5 453,479,4 550,450,1 607,1220,2 939,567,3 591,481,4 896,55,2 830,30,4 221,769,3 623,121,3 869,67,3 640,208,4 620,87,2 934,99,3 714,273,3 692,280,3 871,747,3 695,426,5 891,143,5 115,291,4 939,13,3 859,304,4 770,68,5 921,381,4 862,1021,2 330,21,4 801,233,5 898,171,4 624,234,3 924,4,4 487,135,4 770,143,1 933,572,2 709,173,4 567,55,4 594,274,4 312,140,4 941,116,4 862,872,2 888,154,3 285,329,5 770,407,5 773,240,4 465,894,3 696,117,3 122,13,5 893,862,5 663,407,5 720,171,5 879,863,3 939,204,3 547,180,4 586,357,3 443,327,5 932,27,4 83,317,5 233,1074,3 896,496,3 255,49,4 398,221,3 845,966,3 864,675,2 939,271,4 193,678,2 639,460,4 711,387,3 845,726,4 405,179,5 910,495,3 867,918,4 816,545,4 829,479,5 765,392,3 621,161,3 846,403,3 278,31,3 607,21,4 863,1227,3 911,131,5 609,126,5 885,788,3 936,274,4 151,87,5 867,116,2 556,681,2 0,41,5 609,184,5 881,32,2 832,91,2 705,180,4 888,23,4 834,312,5 404,43,1 747,325,3 345,176,4 824,116,5 780,214,3 859,299,4 829,1,3 393,251,3 827,508,2 252,489,5 715,224,3 605,110,4 0,138,3 789,24,2 486,801,4 478,508,4 300,121,2 845,468,2 941,199,4 797,96,1 370,30,5 845,189,5 939,654,4 528,342,3 172,325,5 805,215,4 600,863,1 442,259,1 839,196,5 304,274,2 824,1243,5 788,0,3 591,471,1 766,482,5 912,1239,2 895,24,3 320,126,3 830,315,3 721,475,4 889,624,3 681,659,2 729,326,2 932,186,4 257,876,3 270,469,3 428,64,3 214,158,3 765,385,3 757,525,4 592,287,4 891,117,4 804,654,3 858,293,3 709,298,3 344,1006,5 432,285,5 830,6,5 124,1203,3 834,526,4 536,1165,2 931,54,3 863,98,3 937,121,1 392,728,4 910,380,5 791,146,4 902,287,4 649,418,4 773,200,2 177,627,4 842,190,3 847,88,5 895,480,4 72,893,1 879,39,2 866,97,5 829,611,4 769,302,4 436,100,3 443,306,3 160,928,1 234,428,4 468,160,3 832,942,4 670,55,1 896,476,3 16,627,1 555,339,5 864,267,4 746,110,4 882,211,5 765,88,4 842,674,5 864,90,3 752,293,5 558,519,5 193,78,3 526,426,4 902,190,5 744,124,5 388,301,5 772,178,5 654,4,2 652,525,3 550,293,4 746,132,5 687,358,5 304,317,3 449,95,4 566,497,4 434,43,2 454,146,4 91,384,4 898,639,1 892,147,3 652,224,1 863,86,5 936,8,5 541,626,3 661,1651,3 845,1310,2 923,126,3 804,630,5 583,248,4 904,299,4 912,480,3 378,1112,4 658,567,4 485,303,3 127,257,2 845,218,4 778,108,3 496,449,2 824,292,3 625,265,1 636,300,1 941,603,4 451,662,2 184,177,4 880,116,5 565,180,2 825,509,4 939,314,4 641,741,5 124,204,5 910,97,2 800,287,5 140,1281,3 680,1175,4 586,688,1 73,507,4 143,55,4 320,115,3 901,49,5 879,366,4 884,194,4 850,342,2 300,204,4 649,79,2 794,41,3 616,566,2 700,327,4 697,95,4 644,185,4 734,282,2 853,1676,3 290,97,5 868,239,4 641,779,5 182,120,3 543,287,2 708,858,3 884,55,3 806,209,4 441,432,4 918,320,2 825,677,4 378,285,4 653,152,4 706,293,2 827,1055,1 587,1218,2 434,686,2 781,1379,2 843,6,3 377,97,5 757,504,5 825,569,4 653,150,4 473,466,4 932,63,5 400,285,2 406,203,3 324,382,1 895,1017,3 863,233,4 909,844,4 889,448,1 885,217,3 870,546,3 732,116,2 408,60,4 880,93,2 842,580,3 642,168,4 879,1052,3 891,94,4 709,509,4 876,78,4 536,713,3 550,362,4 941,422,5 885,75,4 40,95,4 876,747,4 932,446,2 862,1678,3 738,175,1 886,90,5 495,494,3 670,778,3 654,1472,3 458,695,4 473,173,5 498,526,5 914,299,3 535,719,4 869,394,3 221,90,2 849,120,5 888,186,4 621,209,3 338,1257,3 94,397,1 502,129,5 803,138,3 471,431,5 654,1128,3 772,230,2 895,19,1 845,1,5 620,431,4 586,679,1 451,841,2 183,312,4 891,595,3 552,496,4 845,587,4 817,750,5 582,707,5 427,314,5 733,192,4 907,656,4 845,653,5 794,213,4 909,236,4 396,64,2 180,595,4 398,378,3 520,750,3 502,462,1 801,52,4 797,622,3 893,1130,4 893,510,4 876,565,4 915,80,5 797,739,2 619,312,5 48,464,3 702,507,3 804,142,3 870,21,5 314,163,4 759,738,4 789,227,3 939,65,4 453,282,3 867,80,4 108,571,3 932,155,4 861,202,4 891,109,3 746,614,5 560,541,1 416,1229,2 415,1118,5 889,257,3 884,172,4 677,274,2 647,160,3 814,583,3 567,233,3 384,1455,4 893,1559,4 604,69,3 473,95,4 795,734,2 893,256,3 615,287,4 797,602,3 672,326,4 918,595,3 880,933,3 916,470,4 838,110,4 922,236,4 764,136,5 814,173,4 353,220,4 933,1424,1 925,285,4 931,1265,4 636,8,1 180,677,2 200,9,3 898,281,5 226,740,3 777,567,3 302,374,2 767,309,4 822,181,4 689,434,5 365,772,3 319,184,4 746,39,2 221,66,4 923,194,5 881,659,3 814,180,5 942,57,4 451,99,5 502,693,5 885,370,1 883,164,3 526,190,5 793,268,5 737,63,4 824,242,4 708,227,3 423,285,4 560,602,4 477,10,4 284,63,3 416,230,4 413,885,4 685,172,5 523,714,4 931,678,2 794,1198,3 871,287,5 923,272,3 882,276,4 863,160,4 869,493,3 744,203,3 462,301,5 906,316,5 681,942,3 781,1533,2 750,916,2 404,1576,1 295,113,5 498,587,4 869,527,4 540,583,3 621,580,4 879,721,3 750,709,3 479,442,4 590,305,5 675,294,1 853,128,3 775,194,3 832,976,2 550,1027,4 859,346,4 834,498,5 845,1178,2 692,120,2 539,596,4 621,483,3 926,227,5 654,169,3 898,249,2 550,225,5 331,40,5 692,156,4 861,483,4 455,951,4 367,4,3 654,1110,3 560,1043,2 115,245,5 629,215,5 836,274,4 591,175,5 881,224,5 750,196,3 918,461,3 902,22,5 839,143,3 249,8,2 668,150,5 353,221,3 895,1220,2 649,621,3 275,396,1 103,245,3 877,495,5 920,931,3 760,987,1 591,223,5 891,236,4 866,249,4 863,968,4 681,682,2 767,814,3 30,1021,5 743,339,3 829,55,2 304,325,2 436,70,3 900,928,4 255,380,5 787,620,3 535,435,3 845,386,3 184,844,4 832,510,4 789,144,2 58,523,3 852,299,5 781,872,4 939,872,3 369,136,4 612,271,5 297,49,5 373,226,4 382,659,4 473,736,4 275,384,4 885,392,3 26,929,2 879,78,4 415,549,4 507,46,3 633,472,2 902,186,5 763,120,5 91,43,3 626,327,4 908,115,5 540,658,5 825,229,4 626,357,3 378,6,5 397,230,2 449,388,4 867,97,4 934,470,4 617,51,3 449,49,5 326,180,4 647,233,5 424,11,5 867,661,2 824,1162,3 839,514,5 814,140,4 906,619,4 726,454,3 932,97,5 836,279,2 494,1078,5 300,232,4 795,691,5 560,1009,3 772,957,4 444,299,1 359,222,5 921,201,5 663,663,4 722,188,3 657,41,4 942,110,4 888,1069,3 715,417,4 86,764,3 906,273,5 863,1,4 707,472,1 931,1304,2 811,285,2 842,581,2 794,476,3 888,754,3 206,194,3 609,171,4 523,13,5 938,930,2 889,661,3 5,530,4 621,252,3 343,545,3 894,180,5 839,256,3 696,594,4 822,89,4 496,6,3 400,427,4 144,117,3 750,703,2 891,731,4 879,43,4 682,879,3 180,1033,1 683,157,3 882,548,4 869,178,4 777,366,5 746,233,5 866,116,3 828,221,4 822,1266,4 143,6,2 740,117,1 261,608,3 129,325,5 459,1066,4 653,142,5 405,121,3 275,216,4 57,136,5 884,500,3 730,612,2 781,687,2 853,3,2 592,281,5 787,119,2 392,1075,3 814,256,3 888,222,4 787,280,4 461,325,4 660,164,5 393,357,3 789,157,2 547,55,5 217,22,4 603,6,4 849,583,4 885,0,4 471,404,5 670,264,3 895,357,1 585,580,2 700,288,4 664,201,3 539,339,4 285,41,4 61,454,3 617,217,3 591,22,5 221,166,3 847,117,2 842,550,3 910,484,3 863,746,3 654,299,3 238,530,5 882,167,5 302,0,5 845,493,5 445,244,4 787,230,3 607,316,5 310,704,3 343,430,3 891,10,3 572,156,4 847,1020,5 682,269,3 895,174,2 895,602,4 832,6,3 604,237,1 942,10,4 822,12,5 871,362,4 681,178,4 37,636,2 898,185,4 915,45,4 795,754,4 935,1257,2 649,477,4 865,241,3 881,242,4 794,464,3 920,312,5 698,369,3 457,203,4 623,24,4 795,1216,3 907,132,5 664,49,4 576,312,4 591,381,4 852,258,3 398,95,3 845,553,4 535,777,4 916,49,3 160,69,3 714,134,2 312,1469,1 467,173,5 893,712,4 888,88,4 679,268,4 233,526,3 730,135,4 754,288,1 689,1184,1 664,472,4 733,190,4 161,178,3 819,287,5 885,800,3 509,291,4 775,563,3 918,1135,2 666,97,4 805,1513,3 927,495,5 520,753,3 654,457,3 886,71,4 740,203,4 667,288,2 746,605,5 840,306,5 726,759,1 642,131,5 600,435,4 783,312,5 772,197,4 937,312,5 730,520,1 445,306,3 338,395,4 628,172,5 755,273,3 41,654,3 274,379,3 781,875,2 931,613,4 748,413,4 412,326,3 170,269,4 563,1024,2 535,78,4 490,512,5 356,120,5 540,1046,2 654,10,2 193,203,4 306,160,3 900,11,5 753,117,2 335,0,3 747,494,3 794,208,5 893,689,4 488,884,4 126,229,5 444,256,2 928,30,2 94,7,5 835,610,5 504,131,5 513,654,4 649,217,3 653,195,5 801,262,1 917,639,3 850,300,3 654,466,3 540,139,5 869,37,3 42,256,4 233,105,4 762,461,5 833,254,3 620,6,4 921,248,3 473,923,4 607,203,4 621,587,4 789,708,3 503,186,3 367,558,3 863,779,2 882,1590,3 343,1047,3 9,413,4 879,745,4 532,225,4 12,902,3 499,59,5 517,128,5 587,205,4 327,902,3 553,244,3 758,936,4 471,366,5 787,222,4 338,217,3 642,673,3 312,236,2 885,387,1 861,1116,4 209,162,3 576,101,4 665,1070,3 542,159,3 869,1111,2 928,516,5 880,471,4 714,226,3 906,824,3 804,105,5 377,95,4 909,244,2 12,910,2 834,542,5 487,190,3 867,149,5 748,1614,4 892,0,5 813,664,4 535,7,5 243,57,3 268,603,3 861,116,5 659,182,2 76,518,5 222,1283,1 870,212,3 298,514,4 867,194,2 629,322,4 320,703,3 895,514,3 803,747,4 844,267,3 719,285,5 737,202,3 662,1016,2 868,283,1 726,198,4 12,568,2 893,343,4 119,126,4 867,177,5 388,1167,3 931,496,5 549,0,3 933,176,3 791,0,4 561,134,5 638,212,5 89,510,5 463,298,4 305,18,5 748,222,4 842,562,2 845,492,5 750,301,4 599,231,3 718,57,3 866,288,5 447,301,5 853,54,4 149,149,3 922,471,4 813,55,3 407,326,5 931,1183,3 434,720,4 450,334,4 259,333,5 502,502,3 861,981,4 896,171,4 311,529,5 394,285,4 781,1301,3 915,823,3 893,354,3 853,482,4 930,236,3 81,240,3 892,404,5 343,761,3 457,518,4 881,234,3 737,422,4 628,136,5 917,427,5 579,146,3 654,979,2 909,287,3 850,290,4 362,222,5 789,207,3 710,1169,3 918,269,4 773,117,1 720,326,2 861,178,5 89,274,5 310,95,5 889,426,5 532,168,4 895,151,3 868,49,4 806,7,4 96,68,5 850,684,4 941,260,4 614,426,5 693,1268,5 825,448,4 918,1046,3 757,628,4 186,706,5 282,1008,3 886,831,2 617,182,4 834,734,5 756,221,4 536,288,1 708,440,4 847,632,3 902,1007,3 884,232,3 449,346,4 12,287,1 623,126,4 482,282,5 842,94,2 906,281,4 304,474,4 885,70,4 898,24,3 896,950,3 617,158,3 298,479,4 692,215,4 666,237,3 200,1425,2 607,181,4 209,654,5 807,312,5 408,1540,4 824,545,5 346,925,1 920,281,2 823,321,4 937,239,2 912,264,4 707,149,4 921,215,3 929,120,4 376,688,3 275,585,3 913,386,3 880,677,2 814,171,5 880,177,3 510,299,4 920,719,4 499,531,4 494,510,4 531,979,4 917,203,1 663,366,3 942,569,1 660,174,2 789,247,4 157,650,5 434,526,4 832,431,4 464,47,3 935,817,4 867,418,3 380,306,2 346,417,4 416,202,4 523,520,4 781,251,3 757,143,4 209,968,4 931,966,4 10,400,3 715,21,5 436,12,4 318,357,3 644,318,3 914,1037,2 939,288,3 540,945,5 507,709,4 839,271,4 759,927,1 781,1665,2 814,664,2 912,236,4 885,745,3 787,619,3 315,161,3 507,523,5 879,221,4 405,628,3 285,231,4 652,306,4 732,219,2 895,871,3 449,229,4 767,470,3 560,193,4 456,370,4 445,878,3 845,229,3 895,173,5 845,715,3 942,231,4 620,1092,4 618,180,4 863,1216,3 486,379,2 504,122,3 886,1472,1 912,97,4 706,1119,4 338,638,4 415,541,1 753,327,3 707,712,4 576,567,3 581,546,4 787,163,3 701,229,4 785,422,5 591,115,4 699,27,3 850,120,4 822,0,4 903,627,3 664,630,2 398,418,3 712,343,5 815,354,2 921,55,1 618,684,3 800,299,5 895,181,4 803,226,4 536,752,2 654,1185,3 806,683,5 531,303,5 900,195,4 862,681,3 889,134,5 292,172,5 671,108,4 888,1005,4 932,576,1 61,465,3 845,425,1 787,656,4 802,241,5 660,180,5 862,871,2 708,191,4 665,174,4 874,70,2 681,107,3 898,264,4 787,1517,3 452,84,3 756,270,3 436,1062,5 416,342,2 895,216,2 888,695,3 453,631,3 721,470,4 900,167,4 428,256,4 833,287,5 790,180,5 310,495,5 618,194,5 895,879,4 682,471,3 797,422,3 789,410,3 12,157,1 176,747,3 6,340,3 880,482,4 785,124,4 645,348,2 879,139,4 83,3,3 786,1433,1 896,176,5 535,227,5 941,426,5 269,218,5 845,414,2 900,497,4 59,236,4 919,346,4 101,946,3 478,479,5 869,446,4 698,1186,4 780,190,4 918,297,3 822,110,4 333,245,4 229,173,5 285,40,2 681,541,2 391,588,4 853,357,2 617,419,3 863,136,4 757,151,5 290,591,3 58,218,5 740,426,5 566,186,5 888,239,3 850,1027,3 484,300,2 891,283,5 763,293,3 356,1047,2 424,189,3 755,52,3 531,1427,4 662,150,3 486,247,1 758,327,5 297,120,4 747,179,4 849,741,5 435,434,4 931,228,4 404,3,4 592,152,5 57,120,2 797,485,4 765,366,2 46,302,4 620,64,3 746,482,5 346,281,5 804,768,2 449,371,4 930,677,3 773,167,1 921,394,4 726,259,1 653,380,3 738,602,4 929,147,1 893,263,3 454,567,4 853,822,2 408,1069,4 896,408,4 481,244,4 804,224,1 736,186,5 619,173,5 177,193,4 893,1314,3 617,1038,4 307,478,5 898,434,3 622,285,2 540,253,3 775,437,2 932,522,4 934,845,4 832,404,3 653,1164,1 912,233,4 646,14,4 941,486,4 895,96,4 415,845,3 850,891,2 835,133,3 839,142,4 896,602,5 746,624,3 532,548,4 239,241,5 424,216,1 311,660,5 886,870,5 377,182,4 915,81,4 876,82,3 880,128,4 806,172,3 624,483,4 367,6,4 838,1084,5 893,345,4 888,951,3 654,460,2 862,1236,4 885,4,3 654,239,3 821,271,3 221,99,5 693,1019,4 717,404,5 788,1007,4 274,472,3 710,271,5 839,631,3 25,270,3 101,383,2 862,689,4 726,232,4 144,270,4 418,190,4 25,290,3 668,221,3 888,38,2 576,1043,4 555,322,2 636,243,1 798,689,3 732,1113,3 536,546,1 787,192,4 845,720,4 199,379,5 173,10,5 354,1232,4 591,91,5 794,567,3 906,827,5 851,404,3 792,455,3 646,1013,3 326,323,3 681,550,2 858,954,5 463,256,4 935,284,4 384,214,2 896,68,5 910,422,4 127,632,4 747,424,4 235,27,4 71,653,4 537,136,3 882,1040,3 845,478,4 473,185,4 641,471,5 434,178,5 888,211,2 646,236,3 3,358,5 906,124,4 759,840,3 626,21,5 434,297,4 596,325,1 144,227,4 869,1041,2 867,218,2 278,192,2 567,78,4 561,384,2 863,183,4 821,409,1 386,28,1 781,1015,3 928,418,4 763,279,4 550,127,4 757,954,2 886,409,4 692,228,2 118,27,5 649,151,3 716,125,5 886,257,1 928,133,4 315,461,3 638,173,4 78,514,5 806,120,4 480,197,4 302,292,4 629,152,3 355,321,3 906,471,5 789,234,1 888,474,4 464,527,3 789,416,2 534,426,4 491,513,3 918,936,4 652,228,3 267,142,2 931,708,4 708,746,2 644,172,4 659,473,2 660,173,5 837,299,2 641,426,3 591,86,4 617,769,2 664,68,5 499,120,3 820,508,5 620,878,4 885,21,4 720,173,5 631,98,5 221,53,4 906,300,4 101,446,4 763,275,3 594,287,3 473,196,5 805,81,4 822,427,5 668,328,1 869,89,4 294,143,4 624,209,3 908,274,5 864,844,1 221,187,3 536,698,4 573,345,4 895,797,2 710,120,1 751,315,3 587,1060,5 732,289,4 839,610,4 320,510,4 885,95,3 487,477,3 765,182,4 344,384,3 531,312,5 84,280,3 782,871,4 689,584,2 786,897,3 93,672,3 787,11,5 779,496,2 664,273,3 567,29,4 10,82,5 331,1156,4 822,216,3 805,152,4 353,723,2 507,203,3 652,507,3 775,49,5 861,1109,5 373,152,5 910,1202,4 789,273,3 740,264,5 354,688,4 98,171,5 449,180,4 832,301,3 709,179,4 740,193,4 473,610,4 636,1032,3 882,171,4 752,526,4 912,464,2 714,6,3 750,577,4 896,587,4 937,124,3 12,369,1 663,654,3 888,1169,2 740,1151,3 843,209,4 302,402,5 757,750,4 568,256,4 907,184,4 592,731,3 94,430,3 13,194,5 918,49,3 750,86,5 885,731,3 643,329,4 884,948,4 725,248,1 906,85,5 542,60,4 697,203,2 822,238,4 89,155,4 505,215,4 328,1010,3 781,1137,2 930,475,3 559,457,3 715,478,4 789,121,2 938,105,3 371,1089,5 398,664,3 452,214,3 803,410,3 767,475,4 791,8,3 486,808,2 499,116,4 641,627,3 853,302,3 902,844,1 773,410,1 941,322,3 726,206,5 832,572,1 723,878,1 408,114,2 698,308,3 550,507,4 710,958,5 793,247,4 780,402,4 757,801,3 882,8,4 681,231,3 792,596,3 889,182,3 839,565,5 392,95,4 165,687,3 916,739,5 765,601,4 415,264,5 547,695,4 614,25,4 157,977,3 750,78,4 642,272,3 561,173,5 91,731,3 251,846,4 640,1193,3 879,1058,4 730,169,5 566,222,4 435,422,4 189,99,4 720,719,5 867,454,5 854,530,3 896,870,3 803,83,3 270,214,4 933,24,4 692,143,4 492,357,4 879,11,5 918,201,3 885,149,4 734,244,3 654,635,3 536,174,4 709,171,4 18,7,5 667,881,3 550,33,4 730,126,4 44,277,3 904,149,4 384,197,3 607,516,4 805,227,4 818,339,5 767,247,3 608,893,1 663,13,4 869,442,3 373,78,4 772,0,3 642,71,4 915,1112,4 685,191,5 486,688,1 417,299,3 471,402,5 641,34,2 908,530,4 832,197,4 294,950,5 879,91,4 605,280,4 920,1046,1 664,409,3 804,268,5 853,828,2 82,247,3 884,355,3 48,53,2 626,430,4 863,237,5 441,187,3 366,267,4 850,915,3 795,186,5 706,602,3 795,820,4 114,683,3 688,49,5 708,116,4 846,947,1 656,272,3 53,633,1 268,938,2 362,769,4 559,300,3 118,193,5 940,146,4 920,71,4 794,402,3 868,1133,1 827,1671,2 895,227,5 902,90,5 652,801,2 806,553,4 636,180,4 785,221,4 626,227,4 431,14,4 238,482,5 703,21,2 158,298,3 626,179,5 895,764,4 384,54,2 937,987,3 605,292,5 523,558,3 639,173,5 888,63,5 805,1,3 707,684,3 712,326,2 270,202,4 902,520,5 692,95,4 889,100,2 694,681,1 234,523,5 437,283,2 843,510,3 653,407,5 666,78,3 345,78,5 335,256,4 609,186,4 572,215,4 646,202,3 756,175,5 712,285,3 641,440,1 861,104,3 486,230,1 931,524,5 917,418,3 541,201,3 869,68,4 498,207,4 853,133,4 801,566,4 523,475,3 13,429,5 404,1018,1 895,160,3 129,1141,4 12,309,4 726,1436,2 643,297,4 856,686,1 825,575,4 835,656,5 893,128,4 94,233,2 502,12,3 904,6,4 934,126,4 789,208,1 751,903,4 842,27,3 837,99,4 529,254,4 933,196,5 789,185,3 315,715,5 849,479,5 941,614,3 917,922,4 48,819,1 804,104,2 503,175,3 939,95,5 923,1035,2 891,471,3 772,220,2 874,691,2 641,216,2 931,1396,4 12,171,5 522,406,4 36,209,4 129,372,4 569,878,2 773,1016,3 855,749,5 676,1048,3 158,129,1 797,288,3 842,134,5 10,27,5 923,299,2 930,1151,4 554,317,4 692,297,3 755,70,3 623,927,3 757,747,1 827,381,3 862,312,5 831,872,2 927,332,3 307,653,5 849,493,3 891,4,4 585,568,3 850,351,1 922,147,4 896,454,3 91,110,3 696,819,3 838,531,3 821,431,3 345,163,3 880,442,5 638,450,4 845,167,5 910,237,2 737,433,4 888,454,4 565,522,4 915,52,4 628,415,4 928,514,5 885,1073,2 649,549,3 715,1019,5 536,1399,2 494,224,4 453,971,2 822,761,4 12,721,3 694,894,1 446,14,1 578,434,5 912,788,4 698,1032,4 681,1,3 591,68,5 855,309,3 647,294,4 298,480,3 540,78,5 803,205,3 824,983,5 876,537,4 699,221,3 654,1021,3 605,567,4 451,82,3 609,749,4 531,1311,4 872,293,4 820,78,5 663,1108,4 406,94,3 789,930,2 711,209,5 893,146,3 858,367,3 477,201,4 756,90,4 499,726,2 748,209,4 830,269,4 331,470,4 863,43,4 732,533,3 746,417,5 900,70,4 677,14,3 911,153,4 804,741,3 795,658,3 275,471,3 463,254,4 893,325,3 667,268,5 939,193,5 903,65,4 888,402,3 405,520,3 907,180,3 931,178,5 528,326,4 889,84,1 369,426,5 917,136,5 932,469,4 521,199,4 696,269,5 859,244,3 622,524,4 882,269,4 319,624,4 443,285,2 654,458,2 89,267,4 896,221,4 834,22,4 284,318,3 638,310,3 503,173,4 822,149,4 313,786,2 885,185,4 702,763,2 764,13,5 6,364,4 853,615,4 845,785,4 559,927,3 180,1381,1 632,920,3 129,551,5 620,1227,3 842,76,2 915,505,3 915,1205,2 41,968,5 825,372,3 689,779,4 626,6,5 893,282,3 173,383,1 637,167,4 932,62,2 912,178,3 880,14,3 48,3,2 893,335,3 540,377,5 806,510,5 391,509,4 659,317,3 900,227,5 765,71,2 428,199,3 111,332,4 61,510,4 777,450,1 740,789,3 918,203,4 927,1006,5 522,873,4 843,55,4 926,379,5 827,197,4 795,154,5 180,332,3 795,274,4 373,272,2 583,226,4 215,415,3 701,878,1 896,450,4 706,1400,3 521,134,5 765,181,4 115,1255,1 421,247,3 796,987,1 874,1421,3 429,120,2 504,68,3 570,1038,3 936,284,4 867,448,3 379,497,4 685,196,5 490,115,5 136,684,5 895,430,3 797,1022,3 496,82,2 643,456,4 929,7,3 487,417,3 384,953,4 895,1283,2 494,520,5 631,719,3 93,93,2 726,61,3 86,432,3 109,721,3 772,654,3 600,117,1 37,405,2 435,80,3 898,495,5 620,94,4 573,99,5 934,716,4 641,930,4 388,133,5 384,204,2 703,317,5 942,1066,2 665,117,3 638,654,3 497,108,3 587,7,5 535,9,4 255,63,5 874,503,5 867,239,5 773,448,1 919,330,3 689,3,3 689,450,4 882,146,2 714,432,2 866,510,5 831,257,3 6,178,5 775,183,4 710,196,4 537,187,4 923,525,3 631,226,3 933,296,5 915,16,4 763,1027,4 763,219,3 772,1017,3 762,1064,5 762,173,4 789,216,4 373,3,2 144,1040,5 471,365,4 845,190,5 285,400,1 863,1046,3 308,878,4 729,6,4 544,449,2 859,1601,3 124,1270,2 723,326,4 550,823,1 157,301,4 830,116,3 733,212,5 879,1422,3 664,239,5 907,321,2 816,244,2 566,49,1 471,229,5 893,478,5 101,10,3 907,68,3 450,333,3 746,97,5 789,116,5 328,185,3 43,156,4 513,207,4 668,914,3 877,530,2 829,225,5 845,608,5 773,174,3 822,174,4 867,435,3 880,476,4 392,140,2 803,259,2 12,274,3 687,348,5 896,483,3 853,1060,1 455,257,4 935,339,4 758,274,4 794,1100,4 904,320,4 941,583,4 669,416,4 108,62,3 840,299,4 229,55,3 942,807,4 752,241,4 903,777,3 856,258,4 538,495,3 478,192,3 795,948,4 866,215,3 920,470,2 888,93,4 895,215,5 795,395,2 860,381,5 680,327,3 840,357,1 757,889,3 882,249,3 720,242,3 828,99,4 664,686,2 279,172,3 592,1220,3 591,1355,4 931,569,4 723,346,4 831,322,3 733,274,4 471,1078,4 721,507,4 93,402,3 818,181,4 797,224,4 882,588,5 497,662,4 494,182,5 78,275,3 716,830,3 531,366,5 773,121,1 869,549,3 227,811,5 261,57,3 789,1038,3 837,404,4 679,317,5 844,1398,3 941,346,5 834,356,5 670,87,4 528,263,2 279,672,4 786,1023,2 901,133,3 864,23,4 639,1066,4 653,1282,1 939,171,4 917,288,2 652,718,3 560,1038,3 834,611,4 915,654,3 726,69,5 587,560,3 789,14,5 864,100,1 906,146,5 763,226,4 881,931,4 88,300,5 647,290,3 918,56,5 408,1511,5 537,198,5 333,136,2 396,181,5 253,495,4 698,274,3 584,169,5 665,45,4 890,530,4 355,293,1 882,78,4 746,178,5 589,545,1 803,236,4 787,63,5 79,78,4 726,1221,1 773,366,2 781,1012,3 297,426,5 715,496,3 726,1248,3 505,524,4 880,754,4 706,735,4 881,495,5 652,1015,3 565,68,4 871,309,4 511,49,5 920,287,3 647,757,2 735,292,4 863,249,3 504,97,4 921,226,4 560,771,4 449,626,3 858,927,3 732,106,4 737,1046,3 587,384,3 434,90,4 433,973,5 405,432,3 4,238,4 757,183,5 871,117,4 795,1406,3 594,825,1 840,270,4 753,116,4 307,1139,4 626,549,1 874,180,4 726,357,2 726,566,2 591,54,4 921,587,4 486,398,5 804,1016,3 891,11,5 879,345,5 101,233,3 868,755,1 896,194,5 751,538,4 850,330,3 829,626,3 895,0,4 895,478,3 909,292,4 822,418,4 605,248,3 827,1621,1 799,275,3 647,1029,2 902,316,4 297,545,3 850,1276,2 861,140,4 859,715,2 833,126,5 565,120,3 427,893,4 624,215,4 793,149,4 860,82,5 521,529,4 646,356,5 404,204,3 845,498,4 513,1073,4 115,299,3 893,557,5 814,432,3 733,741,4 693,525,5 421,184,4 897,299,2 525,293,3 487,14,4 926,14,5 487,196,2 750,1139,2 132,305,4 909,22,4 408,1019,5 816,23,4 863,0,5 63,479,3 888,152,5 620,624,4 9,517,4 710,133,5 693,581,4 895,133,5 118,297,4 504,226,2 75,1153,5 877,7,3 887,268,5 789,741,4 762,517,4 292,10,3 640,337,3 317,530,4 917,210,2 111,327,4 338,505,4 685,434,5 659,567,3 108,1244,2 933,150,3 668,354,2 726,79,4 626,194,4 455,381,1 933,201,5 845,529,5 789,64,4 880,448,3 853,134,4 388,66,2 323,284,4 689,1089,3 520,567,3 116,131,4 183,1120,4 748,79,1 84,269,3 638,518,4 634,878,3 452,98,3 648,23,4 853,243,3 755,180,4 918,1314,2 386,515,3 632,327,4 869,218,2 728,321,4 850,1046,3 698,522,2 434,202,4 377,46,4 282,99,4 233,693,3 471,221,5 892,470,4 535,93,4 586,887,3 523,660,3 540,62,3 931,378,2 888,69,3 780,186,5 867,155,3 746,30,4 304,85,4 632,957,3 637,95,4 906,739,5 48,61,2 795,88,5 63,654,4 839,605,4 773,227,4 918,167,1 917,274,4 710,659,5 928,187,4 757,352,4 744,509,3 182,273,5 889,270,3 659,327,3 37,939,1 920,180,5 893,318,4 762,38,4 897,323,4 563,244,4 706,87,3 915,38,4 473,317,5 654,1515,3 921,62,3 324,468,4 776,41,5 534,388,4 292,512,5 938,257,4 845,564,2 578,519,4 893,1312,3 906,180,4 895,503,3 710,85,5 877,1064,1 906,286,4 150,384,3 922,455,4 364,236,3 471,124,5 710,201,4 63,257,3 772,426,3 654,427,3 477,67,1 313,807,4 850,695,3 63,27,4 475,745,3 541,419,3 708,52,3 81,169,4 853,92,5 636,846,3 863,217,4 803,210,4 434,918,5 863,46,5 451,513,3 803,767,3 824,115,3 920,86,2 591,70,4 920,727,3 520,519,3 891,480,5 797,484,5 681,626,4 643,116,4 895,707,2 164,155,3 889,478,5 889,514,5 765,704,4 623,318,3 830,314,3 707,124,4 888,1010,3 505,944,4 710,300,4 654,235,3 12,776,1 518,332,3 879,21,4 828,197,4 824,924,4 63,717,4 888,66,2 715,202,4 736,57,4 882,706,3 850,270,5 876,169,5 714,0,5 910,382,3 797,755,3 513,78,4 62,275,4 870,196,3 757,435,3 795,257,4 473,293,3 75,954,4 302,404,4 903,420,5 746,529,5 531,276,5 522,450,5 534,99,5 806,1088,4 720,7,4 681,162,3 290,574,2 497,479,5 781,353,2 638,47,4 720,517,2 492,805,3 200,639,4 938,1053,4 209,78,4 434,126,4 657,194,3 933,194,4 880,227,3 720,734,4 420,330,2 862,878,2 939,180,3 641,621,4 488,268,3 109,938,4 822,124,4 912,143,5 723,307,1 850,143,5 422,619,4 863,181,3 803,38,2 879,825,3 130,286,4 660,134,5 621,212,5 781,1661,4 475,731,3 781,1295,3 787,6,4 644,557,4 917,581,4 84,518,4 794,709,3 935,105,3 6,628,3 533,92,1 876,268,4 795,627,4 405,207,2 933,410,3 289,180,5 626,791,4 795,587,5 845,109,3 649,117,4 404,645,2 860,583,5 800,332,5 915,64,3 888,496,4 623,897,1 424,1109,1 634,14,3 632,97,4 795,96,3 683,1027,4 108,825,3 863,587,3 681,474,3 532,442,3 453,312,5 89,480,5 398,38,2 885,940,2 855,314,5 888,126,4 787,281,4 918,1136,4 16,220,2 658,692,4 843,250,4 791,1046,3 913,370,4 287,326,1 915,707,4 909,116,4 895,186,5 560,95,1 47,322,3 470,419,1 920,691,4 779,659,3 869,648,4 292,1227,1 697,172,5 917,644,4 681,108,3 307,500,4 647,664,2 605,208,4 649,481,3 877,49,4 836,761,2 734,241,5 631,194,5 475,392,4 681,450,3 839,442,5 930,302,4 294,1296,4 816,927,3 540,621,3 755,859,1 843,299,3 882,565,3 197,200,3 942,355,4 882,454,4 795,271,4 310,68,5 737,428,3 781,243,4 884,207,3 895,11,3 797,97,1 698,242,2 592,236,4 290,843,5 384,873,3 118,472,3 416,779,4 528,308,3 196,567,4 833,49,5 748,684,4 709,233,4 718,68,5 789,363,2 558,173,4 567,482,5 206,210,5 918,326,4 847,134,4 304,427,3 896,65,3 495,651,5 14,120,3 253,268,2 895,1405,3 446,736,4 901,992,3 795,708,3 485,300,4 735,992,4 561,228,1 365,287,4 282,210,4 806,28,4 773,427,1 895,232,2 327,22,3 902,976,1 842,140,4 917,432,2 681,823,1 772,566,2 473,467,4 497,495,3 628,209,5 754,871,1 724,320,2 270,147,3 647,228,4 866,49,5 869,9,4 885,239,3 682,608,3 639,826,3 495,173,4 298,117,2 885,79,3 636,244,3 829,14,4 847,746,5 719,312,3 531,587,5 228,302,1 885,186,4 416,505,4 404,435,1 692,11,4 863,209,4 797,1539,4 58,41,5 69,654,4 183,28,3 585,194,4 317,481,5 150,685,3 805,495,5 803,513,4 48,716,2 827,954,3 830,287,1 393,140,3 765,603,4 320,172,4 642,28,2 786,903,3 741,6,3 931,176,4 542,465,4 746,389,4 867,620,2 781,1527,2 888,819,2 67,126,4 781,1299,2 841,343,1 173,214,5 863,709,2 4,424,2 561,590,4 912,81,3 873,275,4 713,249,5 707,299,4 931,616,4 905,695,4 777,41,5 797,767,4 681,848,2 150,804,4 867,228,3 822,565,4 144,239,5 726,147,2 918,1151,4 547,977,2 267,116,4 732,739,3 184,46,4 637,221,4 711,68,3 485,1225,4 863,30,4 509,298,3 665,146,3 915,13,5 262,134,5 221,587,4 915,178,3 805,175,5 898,427,4 456,120,4 912,126,4 845,611,5 180,712,2 881,426,5 404,35,2 390,175,3 788,507,4 681,185,4 642,630,3 311,812,5 654,1159,3 714,41,5 692,233,2 617,63,4 888,193,5 652,427,1 254,327,2 633,677,2 270,648,3 888,171,4 581,404,3 826,749,3 413,287,5 720,298,3 835,169,5 863,495,5 785,132,5 353,895,4 803,22,4 540,426,4 242,698,4 129,349,4 893,1225,4 801,329,2 534,639,3 685,213,5 910,658,3 434,209,4 636,282,2 890,1196,5 708,171,5 41,131,5 932,230,1 683,172,3 863,143,5 707,148,3 846,443,3 707,475,3 258,171,4 663,482,4 323,762,5 853,256,3 910,142,5 750,238,4 794,117,2 398,224,3 822,76,4 912,0,2 795,49,5 879,89,3 896,227,4 576,61,3 867,469,1 456,221,5 547,16,3 473,110,4 849,180,5 726,86,4 864,918,5 591,301,5 653,7,5 932,432,1 806,434,3 349,180,4 71,641,4 714,247,4 836,285,4 698,1092,3 895,231,3 344,469,4 875,18,5 243,170,5 882,655,5 839,87,4 825,126,5 568,297,3 751,258,5 803,565,4 647,453,3 178,915,5 786,325,4 725,247,2 343,528,5 221,541,2 787,645,3 425,662,4 550,110,5 897,682,3 879,172,3 450,325,4 748,1027,4 668,663,4 607,326,2 668,95,2 702,180,5 564,706,5 803,509,5 939,7,5 899,428,2 877,273,3 566,513,5 644,88,4 900,545,4 804,382,2 846,6,3 189,6,4 923,282,4 76,526,4 867,135,5 879,809,3 880,10,4 576,239,3 880,173,5 902,222,5 902,55,5 585,95,4 505,70,5 798,318,4 544,265,2 756,470,4 29,750,3 863,93,4 772,144,3 803,88,4 845,448,3 644,31,5 399,747,2 913,154,5 183,503,4 34,263,2 741,474,4 118,474,4 900,143,5 620,779,4 696,590,4 526,639,4 565,209,4 584,85,5 939,152,2 346,105,2 654,1140,3 880,133,5 891,647,4 494,431,5 644,513,5 454,434,4 787,629,2 904,716,1 748,931,3 620,172,4 587,841,3 666,22,3 617,431,5 870,510,2 89,219,4 707,346,3 285,928,4 586,875,2 483,87,4 931,651,3 484,285,2 654,1634,3 680,309,3 660,191,4 845,1028,1 644,468,5 294,510,5 906,820,5 797,704,4 719,897,4 931,565,4 114,771,4 647,249,4 642,658,5 626,522,4 909,281,3 781,1159,2 885,221,4 628,222,5 17,136,5 715,722,4 895,428,5 720,198,4 651,256,2 627,1295,5 932,181,4 797,1410,1 510,259,4 483,230,2 904,325,3 933,182,2 502,57,4 591,324,2 617,1224,2 492,133,3 654,577,2 12,325,3 772,12,4 68,333,3 773,500,1 640,1038,4 724,285,5 654,177,4 896,199,5 933,268,2 373,525,4 929,650,3 850,181,5 740,133,5 565,10,3 345,181,5 884,418,4 384,1,3 199,171,5 830,686,2 565,630,4 353,654,3 504,418,3 654,1013,3 547,54,5 882,461,5 863,90,5 488,244,3 867,411,5 430,293,5 267,23,2 822,400,4 755,257,3 795,721,3 642,239,5 634,327,3 646,172,5 922,761,4 390,1162,2 932,398,3 895,195,3 419,1346,3 757,300,3 748,641,2 644,708,3 785,101,4 103,339,3 755,741,3 504,258,3 344,90,4 416,3,3 708,807,4 941,538,3 748,228,3 823,288,2 729,272,2 803,754,3 662,692,4 647,725,3 891,202,5 765,7,5 652,185,5 459,99,5 578,68,2 881,928,1 538,318,5 666,268,5 621,124,3 8,49,5 693,51,4 404,64,1 902,466,3 609,351,1 912,429,2 715,658,4 653,153,3 630,1526,2 930,244,4 692,158,4 855,748,3 706,1162,4 772,182,4 786,301,3 673,1196,3 747,180,4 58,29,5 801,259,4 666,961,2 845,1187,2 642,120,4 393,650,4 215,545,2 912,116,1 893,690,3 55,166,3 726,248,2 917,1100,4 293,839,3 392,450,3 662,123,3 720,144,4 787,54,4 867,49,5 434,1013,2 576,469,5 895,941,4 669,198,4 526,186,5 883,735,3 658,493,4 654,1498,3 898,462,4 513,418,4 550,385,1 783,271,4 889,434,5 786,1432,3 523,169,4 853,292,5 492,203,5 393,203,5 306,432,5 642,384,3 839,630,4 10,728,4 654,1069,4 795,509,3 424,897,3 879,194,4 455,404,1 781,934,2 479,49,4 805,460,4 391,512,5 748,99,3 765,21,3 720,196,4 313,364,3 360,193,4 697,422,2 892,723,3 915,473,4 543,337,2 775,655,5 526,660,5 795,548,3 84,1017,4 674,462,5 533,147,4 12,745,3 941,312,3 745,182,4 116,183,3 775,176,4 681,384,3 825,396,3 325,610,3 561,193,5 587,1410,1 711,397,4 428,842,1 302,400,3 551,126,4 842,269,4 858,1131,3 755,120,3 388,484,5 797,207,3 313,826,4 685,208,5 605,147,3 940,123,5 803,422,3 292,142,4 647,411,1 673,256,4 346,209,4 772,209,2 938,688,5 641,400,4 723,306,3 760,1156,5 882,196,4 698,282,4 118,6,5 404,186,5 732,123,5 302,122,4 369,389,1 513,208,3 576,409,3 935,280,4 846,1085,4 824,256,4 91,1215,4 449,335,3 879,367,1 869,217,4 710,703,4 832,1011,4 560,1058,1 127,415,3 14,248,1 337,142,2 653,97,5 895,298,1 822,192,5 888,1217,4 805,121,3 1,288,3 709,332,3 93,194,3 654,1120,3 710,994,4 393,671,3 896,7,3 925,321,2 94,214,4 848,426,4 478,914,4 245,446,3 570,44,4 935,951,4 883,581,5 874,477,4 921,228,4 531,490,5 90,1191,4 787,173,2 61,133,4 698,761,3 560,378,2 337,516,5 289,233,3 457,761,3 523,581,3 529,175,3 331,747,4 746,988,3 532,1047,3 641,27,5 882,311,3 473,654,5 179,52,5 870,55,5 343,49,5 921,450,4 534,949,3 310,746,3 649,120,3 531,202,5 180,742,1 767,743,3 895,134,3 362,238,3 853,921,5 560,654,3 885,42,2 388,14,2 457,483,5 405,491,4 536,549,2 662,186,5 398,544,2 524,99,4 912,309,3 757,97,5 939,55,5 499,285,1 910,312,2 879,1295,3 494,195,3 921,190,3 513,271,4 757,345,2 536,854,3 789,471,2 822,458,4 27,4,3 270,1116,3 629,97,5 177,160,5 885,505,4 640,510,5 797,706,2 578,6,3 869,243,3 843,180,5 513,167,4 804,27,3 560,848,2 792,590,4 579,596,1 757,170,5 657,734,3 899,120,2 654,17,3 888,728,3 619,27,4 761,514,5 869,22,4 306,471,3 797,768,2 918,6,3 639,90,4 91,728,4 912,474,4 921,289,4 523,38,5 479,526,4 84,315,3 740,741,4 899,1027,2 732,125,2 941,268,2 832,32,2 779,182,2 846,70,4 83,281,4 879,268,4 723,285,1 513,21,4 328,49,4 333,208,3 550,99,4 726,549,4 763,281,4 755,401,4 720,238,4 268,638,4 653,637,4 675,301,5 794,704,4 830,316,4 920,172,5 218,905,4 592,50,3 839,581,5 720,936,3 688,149,4 748,811,3 882,404,3 44,741,4 486,558,3 887,791,5 803,366,3 235,134,2 312,650,3 820,273,5 648,297,4 877,99,2 681,242,1 921,150,5 711,485,4 730,610,3 870,895,3 746,523,5 434,267,5 629,68,3 620,576,3 888,201,3 242,630,4 867,566,1 681,147,3 347,116,4 372,366,3 842,589,3 670,21,4 607,194,1 881,194,5 781,258,1 711,87,4 879,275,4 805,587,4 84,427,5 513,87,4 121,56,2 665,120,3 704,192,3 822,63,5 918,14,5 428,195,4 221,411,1 157,106,3 915,592,4 664,482,4 384,1069,5 588,332,5 918,474,3 522,13,5 417,1312,2 623,312,5 752,285,3 617,192,4 713,290,3 142,285,2 756,55,4 186,172,5 915,830,1 560,659,3 0,239,3 837,6,5 531,426,5 923,835,3 880,217,4 845,240,4 942,185,5 939,354,1 814,133,4 148,339,4 9,616,5 12,916,4 673,596,3 882,691,3 869,209,4 720,260,3 678,62,3 392,258,4 874,320,3 918,200,4 863,11,5 698,929,2 553,201,4 222,1051,1 681,174,3 846,217,3 920,131,3 853,224,1 654,734,3 773,428,1 725,534,3 88,85,5 770,188,5 639,692,5 616,447,3 233,938,2 302,1334,3 849,81,5 797,379,3 416,184,3 842,208,3 654,422,3 333,189,4 862,287,4 304,463,3 880,303,3 133,293,4 888,450,3 647,1270,4 787,186,4 451,500,3 822,746,4 197,938,3 514,299,5 728,361,4 749,302,4 626,734,4 554,504,4 825,331,3 645,677,3 494,181,5 21,404,1 660,248,3 507,513,5 863,101,4 895,738,2 293,985,3 656,293,5 245,256,4 795,565,4 706,1478,5 291,180,4 654,1067,3 869,791,3 879,594,1 785,126,4 434,426,3 637,449,1 922,1027,4 292,167,4 298,1067,3 915,943,2 907,146,2 151,762,5 42,99,4 891,186,5 658,196,5 846,103,3 449,234,3 41,594,1 150,1046,2 304,12,3 891,177,5 22,678,3 861,479,5 846,755,1 642,434,5 647,152,4 585,171,4 475,93,2 233,723,4 167,930,3 600,95,2 915,1267,3 915,380,3 726,290,4 840,343,3 861,1198,2 536,640,4 649,238,3 880,332,5 563,116,4 882,173,4 605,595,4 709,417,3 61,450,3 522,522,3 453,471,3 718,422,3 862,748,2 781,1390,4 917,657,3 550,414,4 428,785,2 888,878,3 193,167,5 710,558,3 789,88,4 942,214,5 773,209,1 617,287,3 918,507,5 803,133,4 598,475,4 654,1,3 850,156,4 835,237,4 272,312,3 845,517,4 803,479,5 729,300,1 406,152,4 654,510,3 646,70,4 628,68,5 513,80,4 854,44,3 706,489,2 587,97,1 529,173,4 182,648,4 754,886,3 378,576,4 898,227,3 619,833,2 27,95,5 933,460,4 475,244,4 101,549,2 558,659,1 832,107,2 693,605,4 669,134,3 434,608,4 388,708,4 223,554,3 886,182,1 898,134,4 893,936,4 763,945,4 874,526,4 890,14,4 647,809,4 457,0,4 942,442,2 880,99,4 918,285,4 787,290,4 535,486,4 711,366,4 396,479,5 58,928,2 623,99,5 473,470,3 661,9,4 917,27,4 832,1016,4 83,78,4 931,670,3 912,41,3 918,288,3 736,195,3 654,314,4 266,157,4 806,234,1 714,657,4 759,287,4 886,7,4 891,384,3 935,297,4 726,160,4 933,227,4 884,96,5 918,818,3 697,605,2 650,284,4 638,161,3 546,318,4 621,201,4 631,94,5 839,488,3 275,1482,3 558,86,4 312,741,3 689,721,3 893,876,3 591,894,3 109,62,3 659,1049,4 286,940,3 684,323,3 896,925,4 756,1239,3 692,976,3 834,179,5 647,207,5 787,379,3 692,663,2 505,565,4 500,121,4 487,293,4 832,205,4 259,269,5 494,95,4 843,1038,4 885,229,2 917,486,4 290,716,3 536,342,2 638,785,3 797,1296,3 715,256,5 223,590,3 652,212,2 256,13,5 863,418,4 757,7,5 889,203,4 462,748,3 805,49,5 188,1055,3 867,474,4 931,482,5 485,305,1 885,473,4 373,1196,4 931,156,4 681,768,2 757,211,4 384,420,2 886,933,4 712,301,4 663,136,3 804,1090,2 835,257,4 895,167,4 405,215,3 662,107,2 624,677,3 495,276,5 789,221,3 621,206,5 101,448,4 424,4,1 846,734,4 398,889,2 775,440,2 818,320,4 820,1059,5 941,510,4 757,199,5 757,1038,5 333,192,4 451,81,3 934,256,2 415,163,5 775,436,1 706,695,4 560,71,2 269,184,5 883,299,1 536,1024,1 747,188,4 885,287,4 420,207,2 523,234,1 451,22,2 161,825,3 98,325,3 908,299,5 658,318,3 842,203,3 404,1333,1 459,1136,3 542,1173,3 926,455,2 326,177,4 48,366,3 360,442,3 193,264,4 906,812,5 845,195,4 487,741,4 909,331,2 706,746,3 124,662,3 900,475,5 647,179,1 713,150,3 896,484,3 549,180,5 682,299,3 226,99,5 756,232,3 61,171,5 921,211,2 313,1093,1 830,11,5 773,985,1 699,530,4 513,184,3 178,352,1 918,741,4 180,592,1 553,596,4 654,155,2 880,229,4 933,314,4 688,117,4 803,467,4 654,210,3 749,327,4 907,199,2 726,1015,3 814,431,5 10,51,3 609,504,4 862,690,3 935,110,4 473,747,3 839,366,4 42,754,3 521,204,4 717,750,5 752,346,2 662,322,2 707,507,4 456,484,4 918,324,4 605,155,4 777,185,4 896,735,3 206,684,3 630,309,4 755,244,3 624,3,4 818,314,5 670,471,5 744,63,5 150,610,4 614,527,4 0,6,4 810,288,2 746,462,3 846,256,3 594,814,3 746,208,3 388,945,3 278,712,3 343,18,4 450,885,4 720,454,5 708,577,4 918,1085,4 895,571,2 757,24,4 737,192,5 591,339,5 797,960,1 770,215,5 782,749,4 880,523,4 390,321,3 428,529,4 624,99,3 278,461,3 226,1067,4 662,306,4 648,281,4 383,878,4 61,738,2 752,70,5 605,716,3 453,366,4 647,82,4 847,602,5 882,918,4 937,1032,2 920,81,3 585,1206,2 638,646,3 633,928,3 801,217,3 209,401,5 795,388,4 746,944,4 579,342,5 541,167,4 353,12,3 451,606,5 781,1141,3 928,473,4 932,733,2 550,672,4 730,14,4 885,228,3 709,94,3 485,15,3 906,117,4 497,530,3 93,422,4 117,187,5 750,587,5 536,184,4 602,449,3 464,356,4 145,268,4 845,201,5 681,222,1 709,1018,4 658,442,5 658,641,2 853,918,4 254,742,1 408,1158,2 96,171,4 649,37,3 832,194,5 751,357,4 785,120,2 649,20,2 885,240,4 746,188,4 665,477,4 526,153,3 636,124,3 655,325,1 853,287,5 415,117,2 862,899,3 879,122,4 22,1003,3 398,657,3 681,210,4 832,641,3 791,150,3 313,116,4 406,394,1 861,430,5 665,804,4 93,938,4 95,199,5 704,654,3 405,22,4 664,420,4 69,677,3 792,256,4 368,334,2 457,1108,4 302,872,3 404,417,5 115,144,2 750,141,4 392,891,3 453,311,3 41,1047,1 941,303,5 509,260,2 489,99,3 576,95,4 654,275,4 200,11,4 404,1539,2 498,165,5 805,16,4 652,428,3 798,483,3 706,1140,3 488,753,5 847,641,5 880,355,3 555,208,5 765,484,3 652,1266,1 847,178,5 455,97,3 862,1233,3 884,27,4 434,400,3 801,257,5 428,586,3 765,195,3 918,317,5 421,772,3 708,566,2 756,349,3 863,500,3 912,184,4 550,39,1 531,1161,2 573,895,2 939,146,4 642,126,5 931,208,5 801,423,2 747,649,1 740,120,2 618,749,3 822,97,5 39,1037,1 654,895,4 590,44,5 896,741,3 879,271,5 616,671,3 803,233,4 647,28,2 902,117,4 659,293,3 665,47,4 665,434,4 449,189,4 746,20,2 879,470,4 652,778,1 942,209,4 626,173,3 57,1007,1 321,32,4 592,130,4 798,172,5 505,1018,5 618,553,3 932,218,1 740,780,4 797,708,5 333,865,3 654,750,3 806,596,4 889,192,4 765,612,3 485,1136,5 487,753,4 654,795,2 687,340,5 446,256,3 879,696,2 885,450,3 657,476,3 453,750,4 933,238,4 404,734,5 795,622,3 387,97,5 660,196,4 373,275,4 913,735,3 797,203,4 213,187,5 910,6,4 9,229,4 404,202,1 759,119,1 587,365,5 479,196,3 28,179,4 544,176,3 924,677,3 200,691,3 777,149,3 245,940,1 93,207,4 938,475,5 779,484,4 300,281,4 935,235,5 916,1013,2 756,968,3 502,78,5 428,233,4 302,1096,3 824,247,4 827,701,2 414,203,4 741,0,4 831,321,3 662,149,5 704,226,4 620,67,4 390,650,5 670,49,5 531,494,4 900,567,5 874,11,5 850,865,3 704,842,2 648,49,4 919,299,3 234,1104,2 22,58,4 896,678,5 924,217,4 663,517,4 711,60,3 428,509,4 664,195,4 228,885,1 543,327,3 531,104,3 886,415,2 221,636,2 329,236,4 895,14,3 333,141,3 579,124,3 28,305,4 783,309,4 631,844,4 599,430,3 542,507,4 473,514,5 750,97,5 885,650,5 789,225,3 555,987,1 611,116,4 302,724,1 920,251,4 536,123,4 591,342,3 12,496,5 397,132,3 775,217,4 942,684,4 607,110,1 654,362,3 845,714,4 939,609,1 899,507,3 577,287,3 869,184,4 663,528,4 731,244,4 917,173,3 735,1088,1 895,69,4 384,655,5 705,324,1 597,268,3 652,226,3 492,24,4 649,560,3 864,188,4 906,496,5 853,845,3 560,548,2 629,741,5 892,121,2 932,213,3 302,91,4 703,647,5 888,136,4 773,1078,1 462,270,1 525,120,2 799,299,4 797,948,3 654,143,3 536,278,2 323,339,5 653,94,4 365,674,4 785,8,5 842,224,2 897,747,4 746,10,5 222,328,2 221,595,3 672,291,4 930,361,3 692,47,5 871,863,3 640,512,5 847,475,3 917,44,4 918,476,4 882,567,3 673,1619,4 653,10,4 465,898,5 359,878,3 888,596,3 486,840,2 28,656,4 485,925,2 822,134,4 275,539,1 906,755,4 772,11,3 675,302,4 64,1040,3 163,328,4 587,814,4 652,796,2 866,474,5 862,750,4 436,25,2 398,551,1 483,182,4 730,139,2 839,63,4 767,24,4 434,767,3 839,749,4 942,11,5 934,741,5 886,86,5 855,322,2 4,65,1 415,420,5 313,37,5 585,230,3 748,157,3 681,590,3 852,677,4 880,483,4 692,176,3 479,1120,4 879,1196,3 822,473,5 932,214,3 193,185,5 522,953,5 570,113,4 616,356,4 754,287,1 657,272,4 662,814,4 623,823,2 712,310,3 829,309,4 462,763,2 937,476,1 504,500,2 614,179,4 487,143,3 673,826,4 797,719,5 9,469,4 703,271,5 230,299,4 926,6,3 746,415,5 888,429,4 832,4,1 668,407,5 889,133,5 910,88,4 527,426,4 877,65,3 714,167,4 839,706,5 668,646,5 863,177,4 188,1019,4 720,1264,3 248,49,4 486,52,2 868,241,2 638,195,3 765,632,4 675,538,4 880,475,2 621,98,4 621,158,3 857,268,4 551,180,3 478,221,4 922,173,5 447,257,4 473,504,5 12,144,2 58,59,5 770,136,4 915,27,4 435,238,3 803,143,4 660,706,5 773,214,3 473,670,3 853,814,2 659,484,3 53,249,4 715,841,3 86,65,5 71,1147,4 415,897,4 918,417,4 559,471,2 668,245,4 896,21,5 701,221,5 902,1072,3 665,506,3 585,410,2 650,115,2 794,654,3 881,596,4 896,81,5 885,548,3 795,285,2 225,369,3 847,637,5 660,317,5 815,312,5 893,514,4 502,488,4 875,186,4 479,78,4 510,886,5 891,173,5 592,121,1 523,125,4 832,653,5 726,634,2 412,288,4 918,267,3 682,316,4 302,203,4 891,844,4 157,384,3 619,923,3 654,241,4 697,661,2 520,107,3 404,780,5 647,560,2 850,1088,3 533,6,4 747,63,4 922,180,5 342,216,3 895,94,4 416,410,2 513,1114,4 881,121,2 939,1400,1 918,4,4 659,2,1 310,221,4 773,230,1 173,406,1 718,581,3 918,1113,3 262,483,4 732,323,4 765,61,3 434,1068,4 484,268,4 476,793,4 209,1117,4 454,123,4 539,180,4 295,284,5 359,284,5 342,156,4 772,923,1 3,361,5 867,227,5 594,743,3 651,983,2 845,293,3 496,718,3 157,207,5 667,354,2 496,683,3 553,526,4 341,762,3 663,56,4 367,572,3 892,236,4 912,654,4 819,270,2 783,750,4 815,287,4 297,844,3 850,352,3 852,329,1 797,87,4 594,239,3 737,526,5 384,175,2 434,635,4 261,630,4 715,605,5 206,514,5 889,141,3 449,101,4 682,322,3 829,0,4 936,267,1 795,569,2 897,750,3 746,13,3 719,267,4 129,299,5 726,412,2 715,473,5 114,78,4 480,497,5 69,342,4 115,1257,2 857,306,3 398,23,4 631,450,4 918,581,5 775,193,4 895,237,3 658,251,4 846,475,4 681,238,3 528,331,4 503,574,3 605,618,4 279,865,3 584,1154,5 866,180,5 501,342,5 377,43,3 732,0,2 275,64,4 448,543,3 675,173,5 487,677,2 657,466,4 896,131,5 895,270,1 346,94,4 239,348,1 910,98,3 300,865,4 892,258,3 789,272,5 803,27,4 794,927,1 795,442,2 261,475,3 560,209,3 379,49,4 834,632,5 832,825,2 930,120,2 575,209,4 296,123,4 325,226,3 863,62,3 746,116,2 212,116,4 82,1040,4 36,6,4 541,195,4 732,149,2 520,650,3 620,49,5 879,326,3 333,70,3 835,653,5 659,94,2 822,227,3 623,979,4 20,979,2 486,965,5 726,807,2 921,405,4 416,992,3 278,94,3 880,285,2 874,511,5 633,739,2 689,654,4 576,692,1 252,120,5 434,55,5 867,200,2 492,108,4 285,168,3 486,63,5 654,580,2 633,675,4 412,459,3 781,346,1 853,663,4 850,146,4 845,662,4 912,587,3 879,1022,2 921,448,4 498,429,3 879,70,4 607,217,4 933,222,5 150,435,3 177,214,5 517,0,4 543,312,5 787,561,3 654,81,2 772,363,4 542,317,3 765,172,4 595,294,4 400,277,4 589,1013,3 937,762,4 935,1085,3 901,332,3 838,409,1 599,664,5 896,160,5 900,95,5 918,1047,3 922,830,4 699,317,4 648,470,5 649,167,4 537,711,3 560,161,3 305,302,3 846,10,3 372,134,1 455,484,4 435,347,4 664,720,3 871,279,3 184,49,4 796,126,4 663,691,3 617,659,3 10,736,4 774,301,3 626,678,3 360,282,4 624,21,3 794,95,2 654,185,3 292,0,2 935,312,4 880,624,5 678,747,4 896,198,4 874,1072,5 746,484,5 877,706,2 677,236,3 536,689,2 591,430,2 891,422,5 488,345,5 331,225,5 65,281,3 726,840,3 565,172,3 893,169,4 677,741,4 863,228,4 108,1015,5 704,698,5 609,161,5 756,78,4 775,707,5 404,391,5 839,167,5 565,155,4 923,1010,3 681,1216,3 646,78,4 789,68,1 750,90,4 681,270,4 462,0,1 920,14,4 302,332,4 900,12,1 93,90,5 398,77,3 747,514,4 762,46,3 255,95,5 863,99,5 632,317,4 893,309,3 536,189,4 788,283,3 711,70,5 888,316,4 66,755,3 454,619,3 535,303,3 901,249,4 773,392,1 882,212,2 846,120,3 928,22,3 526,128,2 471,256,4 918,220,4 340,334,4 843,23,5 726,227,4 215,26,3 865,304,2 566,491,4 275,1035,2 881,81,5 628,356,4 183,261,5 456,387,2 536,120,1 384,714,3 275,54,4 899,199,2 750,27,5 839,1017,3 629,0,4 594,272,3 701,448,3 475,318,1 898,50,1 853,169,4 12,852,1 895,342,1 499,369,3 565,878,2 932,545,2 302,595,4 405,557,3 912,201,4 879,602,5 531,451,5 896,192,3 920,78,4 495,704,2 188,317,5 406,194,4 326,55,2 654,339,3 795,738,5 843,1098,2 885,230,2 449,402,4 789,824,3 762,624,4 293,92,4 886,37,5 15,590,4 40,151,4 830,63,5 891,213,2 787,549,3 449,220,4 487,872,3 726,282,2 873,149,4 748,24,4 940,116,5 715,198,4 909,251,2 931,1511,5 670,158,5 765,964,3 660,417,4 454,49,5 893,380,3 446,227,4 687,897,5 359,82,4 760,292,4 242,366,3 404,1005,1 278,384,4 165,314,3 937,332,4 883,380,5 933,99,4 755,88,4 654,868,2 653,534,3 864,301,5 880,431,3 617,730,2 177,409,4 532,845,2 41,825,3 789,62,2 544,509,3 838,1008,3 896,181,4 832,208,5 591,513,5 802,338,3 621,134,4 693,509,5 791,596,3 869,1,2 642,495,4 814,541,4 665,432,3 180,105,2 652,175,3 289,992,4 538,203,4 485,949,4 627,7,2 829,70,4 924,681,4 406,746,3 55,558,4 644,21,4 795,433,4 268,16,2 895,630,2 434,1551,3 922,244,3 176,126,5 551,1094,3 795,187,2 870,172,5 620,312,5 654,127,3 400,750,1 467,81,5 662,1118,3 912,174,5 889,654,3 773,6,2 91,198,3 471,149,3 775,186,4 878,14,4 866,275,1 929,762,3 591,356,4 520,342,3 585,21,3 302,460,4 886,417,4 636,930,1 915,221,3 587,221,3 895,200,3 473,186,5 910,500,3 576,707,3 793,419,4 938,408,4 885,656,5 264,287,4 852,687,3 118,1264,3 617,457,3 209,97,5 853,462,3 563,117,4 526,68,4 617,61,2 197,474,4 715,481,5 703,527,3 933,404,5 817,689,3 404,1486,1 536,182,3 751,881,4 924,55,3 688,409,1 780,267,2 285,412,3 654,254,3 845,1068,4 503,329,4 12,565,5 279,391,5 644,317,5 634,681,2 177,11,5 378,150,4 452,825,1 197,342,3 765,215,3 827,13,4 772,474,3 804,46,5 845,491,3 110,310,4 879,244,2 710,41,3 900,565,5 814,190,5 935,926,4 840,872,4 845,683,5 748,1087,2 421,566,3 692,55,4 611,258,3 617,402,4 699,422,4 665,384,3 848,173,5 590,865,3 523,703,4 636,739,2 662,14,4 762,474,4 703,492,4 449,22,5 654,22,3 343,587,5 881,227,5 859,889,2 302,1039,1 695,747,1 89,315,5 535,147,4 15,731,5 893,897,4 666,68,3 267,95,5 497,134,5 498,623,2 880,402,3 827,380,3 654,899,3 681,789,3 691,1046,2 892,1217,3 915,90,4 473,254,4 693,88,4 478,430,4 878,1283,3 795,237,3 419,189,5 615,338,3 789,67,3 918,3,1 127,68,4 681,695,4 797,1163,3 889,214,4 248,852,4 607,189,4 653,2,3 922,688,3 10,426,4 863,627,4 682,306,3 541,281,3 544,167,4 899,833,1 647,1,4 42,420,3 617,274,3 173,950,1 926,366,5 670,565,4 567,519,2 665,134,4 797,193,4 523,1045,3 542,182,4 297,124,3 607,442,5 942,236,4 824,129,2 755,170,4 898,94,5 715,614,3 795,44,3 902,1100,4 5,467,3 711,167,2 833,474,5 676,288,1 439,360,5 373,53,4 781,338,3 866,167,4 27,447,4 159,10,4 788,1016,3 589,739,4 94,249,4 917,134,1 853,23,4 748,865,3 803,194,5 12,261,4 713,2,5 547,285,1 405,723,3 914,269,3 561,184,5 523,460,3 189,257,3 868,14,1 716,299,5 386,952,2 882,48,3 850,827,2 185,11,1 885,1207,3 642,447,3 405,443,3 75,263,3 344,247,5 12,392,3 726,940,2 653,69,4 523,3,4 775,144,2 915,720,4 714,63,5 928,0,3 646,72,5 372,232,3 900,678,4 789,245,4 384,356,4 787,962,4 806,312,5 631,526,4 789,48,3 94,1226,2 180,1355,1 666,209,3 915,280,3 415,790,2 681,228,4 393,23,5 851,259,3 748,166,2 695,301,5 867,197,5 642,155,5 12,126,5 345,158,4 926,273,1 341,151,4 403,271,4 617,24,2 186,136,5 665,513,4 239,357,2 282,290,2 773,43,1 885,53,3 6,591,5 733,96,4 405,601,3 523,258,3 731,874,1 507,628,4 372,87,4 877,59,4 795,1302,2 693,142,4 297,548,4 484,345,4 636,675,3 845,1044,3 473,283,4 853,839,2 210,527,4 824,384,5 787,176,3 715,69,4 882,330,3 750,82,5 63,173,5 795,610,4 497,516,4 792,281,4 689,1272,3 849,293,5 654,1647,2 765,434,3 671,1022,2 267,218,3 642,143,4 899,470,2 822,317,5 705,147,4 587,552,4 886,364,5 442,332,5 25,283,3 654,46,3 789,6,4 720,27,5 561,317,3 462,15,4 879,184,5 600,356,4 839,429,5 683,762,2 742,307,2 469,318,3 386,285,2 846,410,1 353,709,4 12,473,4 753,476,5 641,992,4 763,131,5 723,747,1 652,983,4 682,304,4 310,370,5 448,639,5 504,723,4 206,392,4 805,178,5 755,172,3 449,733,2 61,203,3 513,1599,4 789,677,3 781,1398,2 863,465,4 526,85,4 536,483,4 661,274,4 935,249,5 693,68,5 592,317,5 534,954,3 803,159,4 942,470,5 726,420,5 163,8,4 55,677,4 915,50,2 895,117,2 840,677,4 724,8,4 668,190,3 789,248,3 835,88,4 4,365,3 9,698,4 711,461,3 933,616,4 866,174,5 434,823,1 886,1078,1 706,136,5 837,454,4 900,747,4 310,50,4 671,254,2 344,199,4 473,212,4 787,725,4 930,309,3 289,65,4 427,301,5 467,962,5 730,605,3 404,1054,3 714,38,3 802,302,4 787,99,5 795,99,3 689,401,3 543,258,1 806,417,4 882,558,3 143,283,3 880,264,5 705,681,2 839,428,3 148,302,4 716,261,4 772,709,3 850,980,1 617,180,5 760,14,5 649,431,4 664,565,2 649,159,3 689,762,4 479,184,2 763,217,4 926,420,4 540,264,5 451,384,4 835,792,2 696,121,4 884,401,3 747,497,4 776,817,5 856,546,3 895,88,5 915,1108,3 647,1040,3 941,192,5 255,221,4 45,331,4 472,245,5 654,380,3 507,377,5 825,180,5 839,299,3 825,194,5 620,567,5 660,273,4 937,475,4 845,132,4 40,179,5 933,228,4 605,500,4 662,236,4 268,88,2 885,383,3 805,55,5 512,684,4 882,59,5 773,449,2 879,548,4 659,808,2 406,387,2 924,184,4 147,237,4 939,169,4 801,412,4 392,321,4 606,493,5 415,71,2 618,175,5 94,471,5 869,482,5 937,8,3 160,196,3 892,409,4 696,1011,1 767,15,3 367,52,2 681,426,4 853,237,5 711,233,2 782,880,4 144,435,5 449,670,3 632,55,2 536,641,4 108,225,5 503,1089,4 664,237,4 605,918,2 536,418,2 697,426,1 618,561,3 452,731,3 757,539,3 496,168,4 609,69,4 405,192,4 487,238,4 889,1038,4 795,698,4 753,14,5 835,191,5 398,793,3 926,78,3 529,1225,4 534,275,3 402,150,4 880,825,1 6,38,5 523,942,3 544,231,3 624,422,4 861,55,3 797,587,4 898,68,3 499,150,3 71,1050,4 91,245,4 471,139,3 591,188,5 880,167,3 605,824,5 803,731,4 781,1526,2 520,22,3 658,1167,4 534,565,3 883,257,5 594,1141,5 638,738,3 405,564,3 649,731,3 895,471,2 669,484,5 662,826,2 795,1056,2 696,6,5 611,1059,4 652,711,3 436,605,4 847,731,5 757,343,3 266,68,4 919,331,3 866,269,5 61,236,3 531,203,5 631,738,3 786,304,3 829,210,4 915,529,4 120,24,5 888,169,4 278,151,5 917,1170,4 693,484,4 915,142,3 307,41,4 652,184,2 119,743,4 591,267,5 664,133,4 483,210,4 922,545,4 814,434,4 832,615,5 12,321,3 862,345,5 536,215,3 209,204,4 832,29,4 929,244,3 572,275,3 478,21,4 888,179,4 255,264,4 850,594,3 866,149,5 520,221,4 285,96,4 781,1095,2 757,226,4 907,647,4 678,167,5 279,594,3 886,23,5 621,182,4 933,553,4 550,66,5 861,741,5 879,800,3 850,1024,2 809,287,3 566,386,4 453,529,2 58,619,4 532,747,3 596,180,4 39,342,1 921,171,5 853,293,2 592,300,4 778,1027,4 392,1530,4 896,621,3 427,753,4 716,887,5 697,524,1 585,79,2 665,195,3 932,549,1 6,124,4 326,1128,2 877,19,2 51,332,4 836,24,3 579,270,5 149,123,2 311,637,5 372,68,4 91,38,3 46,303,3 617,6,4 675,268,2 664,156,3 867,1097,5 623,235,3 494,497,3 847,678,3 795,793,4 713,283,3 681,561,2 411,213,3 550,654,5 578,381,3 614,134,4 900,55,1 708,233,5 261,726,3 676,507,5 745,402,4 542,61,3 574,175,4 693,469,4 287,97,5 553,327,4 889,523,4 791,275,3 859,845,2 566,1011,3 887,68,4 703,678,2 536,1162,1 840,332,4 681,800,3 60,309,4 843,167,4 723,749,2 746,303,4 157,647,5 547,1010,2 714,117,2 734,116,3 794,428,3 425,608,3 129,1057,5 697,427,1 63,47,5 931,1572,4 852,287,4 233,701,2 830,322,2 893,222,4 550,530,5 536,60,4 808,306,5 17,515,5 373,273,4 863,769,3 886,186,4 834,184,4 863,540,2 681,1066,3 635,117,5 662,1010,3 688,595,3 465,225,4 888,675,2 359,954,5 272,303,3 649,108,3 600,742,1 297,203,4 338,292,5 654,311,2 452,779,3 327,346,5 886,696,1 377,54,4 676,299,5 345,195,3 912,173,5 511,324,2 404,379,2 515,213,3 642,446,4 861,59,5 762,233,3 710,484,4 450,677,5 870,312,5 692,527,1 711,626,4 669,7,4 6,662,5 668,521,4 384,670,3 932,109,1 697,1114,2 901,514,5 712,268,4 561,172,5 446,277,3 386,108,4 902,97,5 483,152,5 404,1550,1 662,312,5 883,639,1 911,482,5 486,745,4 862,267,5 601,116,5 688,259,3 143,275,3 882,321,5 514,293,3 605,180,5 475,46,3 665,662,4 794,134,3 803,551,4 692,257,4 757,1000,5 862,747,3 785,27,5 638,482,5 713,0,3 830,306,2 42,126,4 616,452,1 566,630,3 654,1062,3 644,339,4 258,292,4 845,95,4 808,312,4 886,717,1 902,176,4 772,854,2 804,330,4 809,901,5 845,487,5 212,63,5 918,57,5 876,225,3 668,530,3 434,32,3 502,173,5 832,451,1 870,749,3 803,1169,3 463,294,5 867,158,2 906,280,5 617,7,3 930,743,4 720,333,1 827,903,3 899,324,1 930,274,5 293,1027,3 148,344,4 787,930,2 710,920,5 547,304,1 386,549,2 392,1269,3 200,478,4 479,653,4 117,507,4 689,8,3 900,948,3 473,126,5 536,527,3 488,350,5 12,77,1 275,11,5 682,271,4 274,198,4 652,173,5 360,284,4 481,327,4 522,257,5 762,24,4 503,215,4 845,574,2 862,1433,2 193,495,4 681,627,4 624,173,4 451,1056,1 660,656,4 732,1225,3 646,404,4 804,468,4 931,53,4 275,302,4 888,10,5 746,392,2 828,169,4 434,180,5 621,780,3 678,0,3 921,402,3 803,1227,3 880,70,4 395,822,2 197,422,3 320,434,5 784,167,4 99,891,2 12,690,4 446,11,5 456,283,3 441,226,3 658,473,2 696,327,5 357,113,5 273,471,3 386,512,5 757,330,4 670,1596,1 453,146,3 653,475,3 843,143,3 693,117,4 906,332,5 913,1354,1 532,1173,3 223,467,4 888,978,3 454,180,4 839,511,5 772,183,2 513,948,3 639,268,5 706,63,3 625,332,1 942,594,2 620,587,3 842,132,3 795,110,4 698,136,4 882,383,3 833,24,3 560,523,4 888,28,3 584,1474,3 692,575,2 115,297,3 751,1126,3 905,739,4 497,672,3 120,735,5 531,233,5 666,27,5 678,726,4 889,422,5 261,254,3 846,215,3 882,136,5 428,38,3 748,657,4 556,288,4 605,49,5 147,173,5 936,514,5 859,899,3 786,876,2 876,54,4 698,219,2 737,230,3 868,268,4 879,1150,3 514,341,3 867,824,1 921,264,5 787,443,3 290,799,2 499,410,2 377,1073,3 606,886,3 275,38,3 337,274,5 408,179,5 479,248,1 431,287,5 880,201,4 217,203,3 794,431,3 762,606,4 343,82,4 789,430,3 626,519,5 17,241,5 487,214,5 92,275,2 623,116,3 795,150,5 729,874,2 775,431,1 678,293,1 825,678,2 921,70,4 449,42,4 654,644,3 842,55,3 465,172,3 439,922,5 94,398,4 842,565,3 889,636,3 670,1108,2 803,691,5 869,153,4 235,509,3 869,987,2 475,89,3 748,97,5 898,470,4 920,189,2 931,588,5 915,726,4 689,69,2 323,124,5 121,356,3 697,174,3 467,134,5 808,301,5 746,602,5 895,45,2 536,496,4 856,115,5 310,90,3 541,41,3 270,471,2 503,728,5 917,189,5 458,618,4 434,684,2 188,136,4 942,61,3 794,24,5 794,119,3 705,272,3 526,557,4 933,387,3 461,654,5 665,194,3 255,1470,3 659,1134,2 302,635,3 278,449,4 607,302,4 526,213,4 795,483,5 109,363,3 911,495,4 449,171,4 928,270,2 404,372,2 915,133,5 939,94,5 600,134,4 905,627,5 911,142,5 93,635,4 335,584,3 715,419,4 856,987,2 665,152,4 888,189,3 659,402,3 124,1169,1 837,234,2 879,253,2 899,293,4 773,807,1 764,221,2 882,1221,5 654,901,2 935,13,4 244,150,3 607,22,5 485,1175,3 27,30,4 652,808,3 842,435,2 708,213,1 889,488,4 746,874,3 931,646,5 773,306,1 659,469,2 268,185,2 896,565,2 654,117,2 495,482,4 503,204,3 839,663,3 531,338,5 772,60,5 176,173,4 876,701,4 744,206,2 544,174,4 924,322,4 742,268,4 503,475,5 788,136,2 17,87,3 526,191,4 842,270,5 664,187,4 615,348,4 926,40,4 531,738,5 853,68,4 621,173,4 822,530,4 888,16,4 911,422,5 806,968,4 711,400,3 806,160,4 869,696,4 173,98,3 792,292,4 151,293,4 618,95,5 505,528,3 777,156,3 102,293,4 785,236,5 856,282,5 882,225,3 392,287,3 727,303,4 17,855,5 539,279,3 279,2,2 360,602,5 874,752,3 100,545,4 935,248,5 406,738,3 832,239,4 708,430,5 918,1011,4 900,34,4 344,973,3 632,76,3 879,733,3 441,267,4 837,11,4 850,880,3 765,671,3 63,233,4 906,197,5 626,27,3 785,695,3 895,473,3 683,66,3 6,131,5 393,68,5 292,184,5 288,120,3 803,431,3 698,49,3 446,24,4 814,64,5 602,221,4 631,692,2 803,264,4 787,269,2 118,750,3 803,172,4 746,85,5 884,945,3 790,8,5 273,628,5 917,178,2 893,743,3 392,397,4 617,98,3 869,470,4 757,363,4 155,156,4 497,8,2 930,115,4 654,565,3 888,96,3 923,201,4 599,372,3 896,72,3 748,8,3 217,662,3 591,1257,1 485,830,3 879,87,3 689,72,2 939,173,4 622,201,1 710,188,5 735,285,4 708,1217,4 320,602,5 127,467,1 436,120,3 877,316,4 473,928,3 540,109,4 765,401,3 101,784,2 889,736,3 836,1046,1 720,322,3 879,404,4 869,640,4 699,168,3 93,1008,4 534,24,4 70,51,4 127,228,2 711,65,5 869,191,5 416,267,4 663,610,5 333,1201,4 756,174,3 895,1077,3 692,696,4 654,657,3 906,712,5 683,1300,3 936,296,4 486,430,3 845,178,5 804,771,3 4,454,4 410,7,3 866,206,5 886,759,5 614,167,5 473,581,5 824,234,3 408,474,4 826,300,4 647,196,3 458,104,4 746,38,4 422,338,2 880,105,4 436,510,5 433,14,3 670,16,4 206,72,3 756,275,4 842,274,3 571,13,4 585,217,3 763,88,4 814,649,2 910,403,3 896,96,5 715,121,2 88,14,5 432,204,3 750,178,4 715,174,2 512,840,4 931,237,3 939,318,2 183,237,4 503,184,5 692,648,2 718,619,4 298,1299,2 346,122,3 869,301,4 845,142,5 782,330,3 266,232,4 677,514,4 750,214,4 673,538,1 133,268,3 214,180,4 10,7,4 623,306,3 814,630,4 700,274,5 879,364,2 750,116,4 787,78,4 862,338,3 586,913,4 762,729,5 653,290,4 852,298,4 922,287,5 639,183,5 827,59,4 795,315,5 879,767,2 906,728,5 718,96,3 886,251,4 128,989,2 664,125,4 786,351,2 134,553,3 658,240,3 787,362,2 816,288,2 434,548,3 84,514,5 628,143,5 542,462,3 837,407,4 233,610,5 536,947,1 846,455,1 789,584,2 726,38,2 442,174,2 911,481,5 886,925,5 814,162,4 255,244,4 910,203,4 591,1015,4 81,894,1 681,1177,1 706,199,2 794,120,3 362,175,4 626,1477,3 699,72,3 693,214,3 757,1142,5 536,507,4 896,182,5 882,226,3 453,251,2 197,525,4 325,177,5 913,450,2 787,435,3 621,1406,1 879,718,3 907,487,4 723,1233,1 867,179,4 681,677,1 262,322,1 822,7,5 929,282,4 450,987,1 222,404,1 845,22,4 820,475,4 502,182,5 665,272,3 912,529,2 770,303,5 172,300,5 787,96,3 906,1118,5 372,1529,2 200,769,3 907,478,4 942,1010,2 570,483,4 915,153,4 693,450,4 535,377,5 919,681,3 629,356,3 619,1065,5 541,94,3 710,115,5 777,199,5 811,677,4 641,135,3 884,195,3 536,638,2 888,511,5 397,704,5 706,3,3 620,1034,4 312,510,4 428,24,4 177,282,5 726,439,1 859,284,5 576,195,5 582,424,5 526,167,5 388,520,3 662,275,3 503,628,4 771,258,2 832,648,3 710,179,4 9,588,5 496,21,5 711,1220,4 266,1144,3 862,876,1 805,1015,1 434,1,4 923,0,5 692,590,3 530,891,3 766,504,4 930,514,5 506,251,5 880,275,5 801,260,3 918,1108,3 688,404,5 660,169,4 498,428,4 317,691,4 776,526,4 363,267,3 665,115,4 853,7,5 471,425,4 710,237,4 698,457,4 428,1075,2 746,84,3 670,180,5 628,244,3 863,92,3 287,210,5 757,117,2 267,36,3 803,162,3 404,139,3 888,54,4 858,292,4 377,291,3 298,784,2 893,752,5 346,179,5 473,356,5 706,920,4 654,1160,3 898,526,4 631,678,4 681,865,2 861,78,5 570,190,4 342,201,4 920,650,3 549,312,5 932,933,1 359,650,4 229,232,1 774,689,3 850,263,2 518,1237,5 918,325,3 941,30,5 384,503,4 455,579,4 9,237,4 694,267,5 710,65,4 395,471,5 681,183,4 942,548,1 879,624,4 659,230,2 428,468,4 715,217,3 499,442,4 877,196,4 84,203,4 354,681,4 526,233,5 199,175,5 716,332,4 761,954,5 553,1045,4 933,1036,1 863,470,5 814,433,3 926,1088,5 787,67,3 846,210,4 547,232,5 185,54,4 920,421,3 899,123,4 770,180,4 795,11,5 76,120,2 893,106,3 776,201,5 108,143,4 587,754,3 746,1630,3 825,553,4 367,447,3 654,941,4 683,65,4 849,70,5 888,247,4 617,469,3 605,203,4 214,207,4 765,233,4 551,236,4 710,565,2 550,1303,1 278,804,3 313,172,1 921,567,3 939,548,2 642,473,5 455,746,4 439,56,5 5,463,2 126,49,4 586,332,4 623,292,4 704,624,5 853,743,2 715,661,3 371,325,4 536,327,2 709,184,4 689,1117,1 605,256,5 560,761,3 666,356,5 598,1277,5 658,233,4 654,5,4 804,208,4 689,239,1 904,474,3 750,143,4 628,195,4 388,181,5 359,256,4 911,601,5 896,384,3 939,473,3 765,746,5 665,404,2 467,583,4 499,30,4 621,720,4 923,276,3 662,317,4 842,1479,2 600,317,4 523,229,3 763,672,4 773,411,3 197,70,3 624,203,3 899,479,4 619,322,5 923,227,4 672,344,4 756,514,5 793,126,5 369,649,5 900,1619,5 258,293,3 118,457,5 707,1151,5 654,911,3 904,312,4 664,153,3 880,505,4 418,211,1 895,274,4 893,750,3 681,558,4 536,285,3 520,289,3 642,178,4 902,280,4 48,300,3 706,304,5 239,339,4 745,61,3 330,466,3 6,426,5 633,514,4 12,423,1 428,505,4 640,63,4 748,1138,3 890,716,4 697,130,4 885,802,2 880,522,4 285,188,3 708,545,4 636,684,3 536,71,1 452,749,4 837,167,5 776,203,5 536,139,2 898,409,1 917,169,4 665,1012,3 729,872,2 711,93,4 898,187,2 428,110,2 307,549,4 567,198,3 428,166,3 654,30,3 910,202,4 653,688,3 830,747,2 660,257,4 72,196,5 874,480,5 446,199,3 505,835,4 688,474,4 901,175,5 457,301,5 604,214,3 558,69,3 449,344,2 639,54,5 401,94,5 335,293,4 855,687,2 933,628,4 932,51,3 715,135,5 42,101,4 895,301,2 922,2,4 437,280,4 353,184,3 45,92,4 880,120,5 629,122,4 921,93,3 436,202,1 791,543,4 78,9,5 874,171,4 787,502,4 902,648,4 902,411,2 846,160,2 442,747,4 908,1120,5 373,54,2 922,116,4 683,216,2 460,574,2 832,67,4 587,1090,4 525,99,5 654,1112,3 542,211,4 12,183,1 698,8,2 879,86,4 795,482,5 863,657,2 797,490,4 486,384,4 886,1032,4 386,520,3 908,508,5 150,417,3 731,321,3 918,470,3 384,233,1 262,195,4 83,69,5 270,96,5 757,478,5 832,95,5 600,172,5 708,671,2 794,767,3 654,1355,3 605,201,4 859,285,4 900,577,3 869,384,3 523,514,4 659,656,2 302,235,4 933,87,4 140,1279,1 715,659,4 626,288,2 939,3,2 642,628,3 713,1027,4 912,655,3 665,126,5 264,283,4 326,304,5 193,124,2 93,237,5 681,940,4 676,150,4 587,238,5 248,194,4 715,173,5 888,177,5 893,44,4 537,171,4 844,345,3 879,1477,3 803,63,5 61,216,2 728,312,3 290,63,5 641,50,5 918,740,3 531,917,4 393,939,3 795,723,2 718,455,1 683,264,4 696,126,5 806,623,3 681,404,2 789,925,2 896,70,5 716,684,4 58,514,4 71,517,4 275,178,5 449,740,3 220,245,5 285,454,1 806,62,5 599,181,4 915,297,3 388,134,2 120,256,5 644,190,5 621,247,4 715,427,3 633,14,4 912,182,4 502,495,5 888,651,5 760,274,4 862,305,5 939,316,4 750,398,3 797,376,3 254,839,1 63,462,4 937,594,2 372,527,3 557,743,4 870,257,5 473,221,4 693,656,4 592,948,2 878,126,5 482,49,5 575,293,3 939,750,3 673,750,3 644,171,4 662,695,3 633,932,3 667,366,5 799,750,4 838,49,5 543,293,2 185,293,3 880,174,2 649,492,4 453,470,3 697,24,2 661,12,4 669,483,5 271,173,5 36,577,3 551,99,4 323,285,5 182,448,2 694,902,4 710,380,5 910,101,3 215,495,5 746,268,4 933,207,5 126,689,1 505,709,5 516,236,1 720,83,3 290,1488,2 267,60,4 737,259,2 681,182,3 886,698,1 882,341,4 668,268,3 865,301,2 734,276,3 663,1097,3 654,656,3 493,844,4 206,207,4 209,0,5 867,55,3 452,272,4 591,409,5 918,143,4 12,173,4 926,559,2 932,178,5 529,442,4 696,1088,3 726,65,3 631,63,5 298,641,4 806,95,3 862,1242,4 302,782,2 662,172,3 711,229,3 841,753,1 888,184,4 896,434,3 396,57,5 806,967,4 886,1050,4 202,236,3 654,1379,4 834,271,4 882,44,5 670,209,5 147,968,4 708,78,3 628,233,4 651,285,3 822,659,5 892,1214,3 109,1054,2 541,951,4 275,406,2 342,12,5 815,348,4 911,317,4 845,504,5 870,434,3 607,285,4 239,306,4 716,249,1 296,247,3 822,217,4 559,248,5 587,106,5 498,914,4 245,154,1 726,392,3 452,796,1 502,822,2 590,1040,2 795,774,2 606,434,3 636,1027,3 412,306,2 586,314,4 725,116,1 911,196,5 845,1034,4 773,549,2 568,6,4 898,384,3 233,685,3 444,273,2 214,171,4 760,357,3 895,1216,2 942,180,4 652,247,3 763,94,5 188,156,4 763,590,3 696,283,5 787,426,2 757,967,5 124,167,5 933,708,3 89,191,4 689,293,3 655,895,5 450,260,2 891,500,3 922,280,4 821,1109,4 832,55,4 641,317,2 286,221,5 556,874,4 502,268,5 670,678,3 915,426,4 499,482,4 806,173,5 212,830,4 246,339,3 937,104,1 930,1021,1 749,303,4 478,272,4 17,99,5 888,1194,3 155,654,3 621,7,4 313,1052,5 808,257,3 882,398,5 234,65,2 795,300,1 681,209,4 638,660,4 770,767,4 670,175,2 749,875,2 329,375,4 279,111,3 807,285,4 166,1199,4 845,426,4 494,184,5 434,75,3 888,153,4 891,99,5 373,442,5 621,664,2 575,254,3 863,1015,4 814,615,1 771,322,4 900,233,4 405,87,2 711,37,4 450,877,1 707,126,3 893,306,3 336,24,3 942,411,2 188,7,5 558,660,3 917,736,3 915,1,3 544,430,3 404,58,1 536,173,3 113,152,3 877,69,3 803,179,4 500,978,3 748,583,3 737,407,5 365,636,5 638,381,2 665,1153,3 449,117,3 370,126,4 590,215,4 541,392,3 436,93,4 644,959,4 854,85,2 841,750,4 557,274,4 873,190,4 915,316,4 693,629,3 864,110,1 118,123,4 795,177,3 931,233,3 662,55,5 296,180,4 747,327,4 745,280,3 659,265,2 91,117,2 392,248,3 1,303,4 886,762,5 654,1106,4 84,920,3 758,221,5 504,583,4 6,30,4 25,292,3 531,351,3 449,385,4 471,1138,5 697,142,3 624,282,3 640,29,4 707,277,4 938,120,5 806,415,3 706,647,4 149,122,4 921,172,5 895,85,1 480,579,4 909,288,3 280,322,3 657,470,4 914,327,2 885,662,4 723,682,1 814,68,4 853,596,2 723,332,4 302,96,5 553,14,4 804,1053,3 848,632,5 531,86,5 323,309,4 669,174,2 834,96,5 737,199,3 633,284,4 710,1167,4 184,702,4 879,123,5 721,99,4 576,173,5 893,326,4 373,277,2 118,63,4 884,382,2 681,110,3 923,120,4 183,1100,4 709,418,4 863,37,3 715,221,4 795,450,5 877,115,2 636,12,1 772,1070,2 822,197,4 580,221,3 341,148,5 839,131,4 408,288,1 497,133,3 199,811,4 795,515,4 698,14,1 787,590,3 897,241,4 685,10,4 239,747,3 847,24,5 926,779,1 416,48,3 0,148,2 290,194,4 813,217,3 882,133,5 824,180,4 586,257,4 69,90,3 71,57,4 180,870,2 631,195,3 624,293,3 75,181,4 617,567,4 935,180,4 267,474,4 416,179,5 941,968,4 804,287,1 42,844,5 931,379,4 717,830,3 363,686,1 248,407,5 750,366,4 884,188,5 748,140,4 446,78,3 313,418,4 895,731,4 772,264,2 627,257,5 674,285,4 726,267,4 451,944,4 659,173,4 221,733,2 830,82,4 935,332,3 804,93,1 893,747,3 769,14,5 732,285,4 789,49,4 247,805,3 827,509,3 654,1645,3 689,237,5 459,320,3 631,1182,2 513,305,4 772,120,2 734,675,3 14,282,4 706,346,5 645,907,3 267,50,3 942,183,5 797,780,2 871,762,3 12,618,3 692,96,5 416,712,2 633,1048,2 93,745,4 497,233,4 746,662,5 660,63,4 486,66,3 665,639,4 35,260,5 895,720,4 812,897,1 680,989,4 406,490,4 799,291,5 740,691,1 891,290,4 311,49,5 202,878,4 800,353,4 415,310,3 291,281,4 654,873,4 513,11,5 785,70,5 850,618,4 61,206,3 664,417,4 59,653,4 895,229,4 853,510,4 346,248,5 866,182,3 718,63,5 763,116,5 341,87,1 245,650,4 869,194,4 895,225,3 161,121,2 665,498,4 932,450,1 252,698,4 941,130,5 891,171,5 591,14,5 471,251,4 648,180,4 789,297,5 17,417,3 471,677,4 833,293,3 238,11,5 806,525,5 400,677,3 665,309,5 714,157,2 933,120,3 882,505,5 933,770,3 723,327,4 742,275,5 551,322,2 683,755,4 659,0,3 926,229,5 623,844,3 915,21,4 34,265,3 326,167,4 869,27,4 553,178,3 536,427,4 619,1502,4 662,8,2 456,955,4 887,285,5 355,257,5 861,227,5 274,228,3 477,945,2 824,244,5 683,7,5 879,677,3 816,326,4 691,207,4 120,49,5 670,685,3 647,61,5 784,182,5 720,581,3 887,761,5 400,489,3 7,335,3 933,209,4 462,865,3 726,226,4 663,320,3 322,846,3 769,325,4 692,520,5 200,565,3 882,371,3 773,203,3 563,297,3 832,248,1 706,617,3 329,988,5 738,317,4 846,152,4 585,182,4 654,1083,3 697,0,4 434,38,3 500,99,4 656,110,5 577,297,4 742,301,5 710,264,2 876,175,5 647,6,3 769,327,3 641,172,5 797,1065,2 536,38,2 822,82,3 905,306,3 787,553,3 729,99,5 129,927,4 192,181,4 669,968,2 918,192,2 931,497,5 386,238,1 740,238,2 415,178,2 93,356,5 665,212,4 824,249,5 434,419,4 795,553,2 321,506,4 931,514,4 879,814,4 706,526,5 871,924,4 477,282,4 912,272,3 498,172,5 665,97,4 12,773,1 485,514,5 635,9,5 756,664,3 579,865,4 915,3,4 888,434,4 756,256,4 847,64,2 730,282,4 668,322,1 387,671,4 268,201,2 89,208,5 926,66,4 933,674,4 847,132,4 923,741,3 459,136,5 676,242,3 895,541,3 591,57,5 682,270,3 471,66,4 942,654,4 819,312,5 881,1059,3 783,332,4 65,534,4 538,305,4 393,201,5 942,95,4 806,132,5 20,618,2 435,839,3 142,287,5 781,690,3 644,257,3 708,401,3 923,172,5 897,285,2 647,1032,2 681,85,2 43,624,3 937,1011,5 820,160,4 863,180,5 920,1033,3 934,120,4 345,87,4 832,1117,3 644,207,5 175,457,4 390,186,4 597,689,3 738,464,1 726,545,2 408,704,2 178,537,4 732,741,3 56,987,4 707,257,5 268,1102,5 653,268,4 885,1434,3 660,303,2 473,1285,2 22,153,3 681,819,3 891,402,3 388,602,5 941,499,5 909,507,4 523,310,4 654,513,5 681,737,3 867,213,3 649,619,2 931,1450,5 499,15,4 842,1117,2 536,516,4 942,372,3 604,618,4 879,420,2 559,150,3 652,1182,1 886,317,5 879,688,4 920,754,4 876,69,5 910,834,3 152,567,4 931,160,3 871,976,3 652,160,4 706,380,3 884,236,5 887,136,4 421,301,3 137,13,3 769,1011,5 462,1008,3 726,664,3 762,221,5 847,500,3 877,480,5 941,478,4 496,230,3 649,1214,3 729,236,3 397,95,4 541,180,4 716,298,4 362,1066,3 580,474,4 883,474,4 416,742,2 610,679,4 531,21,5 909,413,4 673,312,5 898,933,3 629,534,4 795,280,4 775,865,3 891,87,4 658,69,4 712,881,3 882,54,4 702,321,3 706,316,3 658,314,3 675,749,4 267,175,5 56,322,3 689,207,5 844,908,4 912,209,2 250,78,5 915,134,4 456,675,3 879,20,2 915,49,5 727,115,4 811,244,2 910,239,1 642,158,3 285,249,4 404,25,3 678,321,3 649,390,2 404,1040,5 658,707,3 773,446,1 331,677,4 530,897,5 454,549,4 829,264,5 933,178,2 753,236,3 523,81,4 842,68,3 273,1050,4 24,120,4 451,178,5 918,245,3 521,479,5 612,434,5 881,415,4 733,486,4 473,478,5 711,1502,4 822,172,5 714,27,5 392,124,4 316,259,4 596,150,4 918,716,3 696,974,1 12,48,4 882,1114,4 525,272,2 888,225,2 882,64,4 552,505,4 485,407,3 84,1112,2 607,734,4 893,310,4 209,173,5 605,474,4 317,274,4 814,613,3 706,637,4 496,1045,3 893,1378,4 743,8,3 917,63,4 789,50,3 859,310,4 871,349,3 692,190,2 768,234,3 232,69,5 748,739,3 659,741,2 610,287,3 233,640,4 750,381,3 931,488,4 683,482,5 58,1073,4 876,581,2 803,122,4 550,208,5 312,231,3 882,88,5 941,171,5 885,1230,3 937,299,3 286,63,5 711,416,4 206,596,3 668,481,4 794,587,5 388,1120,4 804,805,4 405,512,5 150,426,5 794,256,3 746,1133,5 200,1397,4 341,1011,4 659,237,3 852,331,3 882,769,4 214,53,4 552,527,3 100,865,4 550,1252,2 302,992,2 302,513,5 810,306,4 931,1049,4 683,410,3 649,132,4 712,749,3 313,948,4 736,473,5 703,172,4 691,410,4 861,596,3 158,590,4 757,90,4 730,481,3 932,208,2 587,379,3 805,520,3 560,469,3 331,768,3 566,656,5 888,70,3 268,716,1 930,311,4 893,470,4 845,68,5 621,1059,3 717,684,4 804,194,3 473,422,5 457,844,3 471,420,5 96,172,3 41,110,1 488,298,2 407,271,4 857,689,3 435,924,4 837,127,4 888,149,5 434,471,2 893,274,4 493,180,4 702,122,4 434,500,3 333,760,2 325,519,5 804,567,3 25,750,4 895,496,3 531,548,5 378,93,5 313,755,3 925,299,3 888,733,3 576,767,3 556,245,5 736,10,3 480,662,4 822,68,5 503,363,2 43,327,4 715,431,5 600,324,4 495,558,5 13,653,4 869,78,4 199,469,4 720,990,3 658,478,5 175,339,5 552,80,3 892,410,3 730,65,4 572,178,4 737,658,4 401,227,3 663,52,3 715,207,5 876,202,4 820,55,5 832,176,5 882,85,3 287,198,4 908,681,3 720,677,3 881,683,3 720,422,5 901,293,2 890,24,5 834,293,3 669,602,5 452,119,1 877,516,4 917,637,4 7,509,4 223,990,1 378,180,4 566,526,3 875,275,4 931,427,4 652,580,1 300,68,5 797,1468,3 572,285,3 710,735,5 408,537,3 495,720,5 55,224,2 678,49,5 861,172,5 190,314,5 908,164,5 313,1216,2 689,201,2 697,498,3 647,204,3 867,232,2 115,55,5 888,326,3 424,91,5 718,284,4 730,206,4 523,707,4 285,273,2 552,497,4 912,301,4 428,595,3 89,1191,5 290,222,5 478,136,4 896,204,3 340,298,5 675,686,1 404,208,3 344,37,2 636,245,2 294,460,5 129,418,5 269,233,5 654,410,3 513,23,3 725,322,3 486,719,4 720,659,5 762,1100,3 431,826,3 924,298,3 424,1415,3 747,233,4 913,691,3 415,301,5 711,587,4 917,212,5 290,26,3 284,182,4 750,557,3 386,174,5 649,505,3 664,97,4 424,293,2 221,16,2 0,42,4 715,184,5 827,274,3 404,855,1 177,992,5 821,24,3 591,1021,5 534,85,4 667,49,5 665,193,3 942,126,5 864,293,4 925,287,3 120,11,5 706,881,4 895,410,2 798,173,5 850,124,4 502,222,5 624,750,4 879,434,4 879,1012,3 696,814,3 333,186,4 842,184,3 869,185,4 566,177,4 40,193,3 665,6,4 230,865,3 642,196,4 585,287,4 861,461,4 917,605,4 154,305,5 659,745,4 212,626,4 290,784,4 695,1125,3 707,545,3 865,886,3 297,8,4 473,486,4 285,122,5 901,479,5 652,166,2 262,433,4 787,1111,3 853,48,4 772,153,5 890,1027,3 888,270,3 228,315,1 714,221,3 709,134,5 324,175,3 801,298,4 378,1074,3 917,15,4 268,64,4 496,393,3 932,79,2 404,240,1 626,664,3 697,256,3 877,98,4 451,480,5 633,108,4 717,749,3 926,124,4 432,456,1 792,128,4 803,1209,2 879,99,5 719,261,4 42,3,4 921,49,5 936,223,4 568,324,1 702,299,4 495,650,2 804,718,4 775,478,4 302,996,2 715,398,3 187,454,4 891,587,5 307,545,3 723,892,3 750,110,3 458,454,2 721,627,4 863,190,4 532,214,4 483,21,5 809,268,5 556,175,4 719,309,4 523,795,3 739,301,5 735,126,4 718,87,3 640,133,5 690,499,3 209,301,5 926,738,3 686,312,5 937,872,3 600,167,5 869,948,3 76,155,4 884,450,2 377,530,4 828,123,4 748,356,4 654,862,3 302,82,5 80,115,3 91,706,4 420,97,5 425,142,3 522,731,4 901,180,3 324,615,4 697,403,1 490,22,2 61,166,2 586,288,3 267,712,4 757,487,3 300,116,5 473,608,4 248,222,4 153,186,5 707,110,4 708,208,3 362,11,5 619,242,3 814,251,2 896,132,4 89,655,5 940,180,5 715,1015,3 805,167,4 53,822,2 825,1221,3 485,684,3 888,545,4 746,844,2 654,741,3 931,174,4 496,28,4 270,8,4 893,1380,3 704,61,5 213,252,5 842,52,2 846,98,2 698,287,3 884,277,3 763,0,4 238,632,5 888,94,4 415,741,4 168,198,4 748,98,5 715,215,5 267,272,3 127,185,5 830,1011,4 647,120,5 596,687,4 901,303,3 267,209,3 358,929,4 889,229,3 827,270,2 898,282,4 129,403,5 845,1003,3 789,762,3 275,901,4 378,184,5 676,741,4 747,131,3 746,68,5 932,474,2 892,171,5 289,384,4 882,901,4 421,858,3 869,526,5 757,1022,4 746,11,4 834,422,4 829,209,5 86,221,4 870,344,3 496,366,4 658,158,4 621,97,5 523,202,4 516,24,2 879,110,4 560,78,3 864,431,1 670,454,4 532,935,4 654,11,3 698,618,2 923,257,3 494,81,5 180,1295,1 429,234,2 172,328,4 550,1043,3 7,221,5 935,546,5 7,430,2 890,458,5 905,9,4 362,736,1 710,353,3 893,899,3 536,282,4 735,677,1 882,228,4 406,183,4 942,1073,4 594,409,4 674,1627,5 4,366,3 518,878,5 654,126,5 852,326,3 665,215,3 805,180,2 623,320,4 665,142,2 805,209,5 498,142,3 748,163,3 752,184,3 323,338,3 733,602,4 724,332,5 797,1238,4 804,0,4 534,644,4 664,0,4 58,433,4 428,146,2 842,442,4 853,273,3 462,18,5 302,120,3 414,135,5 723,258,2 58,1092,5 502,581,5 643,249,4 824,405,2 13,918,4 681,195,5 536,270,2 832,442,3 159,457,5 652,1206,1 478,356,4 12,915,4 541,11,4 923,704,5 728,299,4 477,194,4 867,407,5 296,143,3 908,288,3 803,167,5 394,126,5 222,872,3 362,23,3 591,195,5 500,293,3 746,518,5 681,712,3 795,858,2 411,938,4 892,143,4 757,429,5 696,1046,3 777,280,2 689,55,4 344,237,5 665,962,3 682,268,3 673,288,2 890,284,5 638,525,4 795,306,4 870,907,3 895,992,4 720,244,3 915,294,2 300,657,3 408,922,5 701,747,2 888,469,4 353,746,2 12,234,2 796,686,2 663,82,4 631,608,3 347,236,4 294,545,4 845,183,5 652,155,4 668,131,4 755,229,3 437,147,5 505,78,5 757,886,5 652,142,3 893,534,4 637,384,5 935,740,4 791,925,3 806,380,2 629,120,4 601,879,4 475,400,3 654,1425,2 591,116,5 746,92,4 664,238,3 930,268,3 539,332,4 726,404,3 662,234,2 701,257,5 527,257,4 920,273,4 892,245,3 895,639,2 245,417,3 893,922,5 644,46,4 619,137,5 687,306,4 496,439,1 781,246,1 57,0,5 542,659,3 388,523,5 724,321,4 333,1019,4 832,261,2 845,41,5 128,299,3 220,11,5 804,747,2 307,272,2 935,590,4 91,684,3 560,472,3 418,27,3 311,212,5 473,322,2 846,260,1 647,448,3 559,263,3 384,1117,3 637,522,4 567,518,3 896,32,5 794,394,2 884,87,4 850,227,4 879,155,4 151,282,4 457,432,4 879,10,4 863,230,3 56,476,4 454,229,3 905,474,3 84,404,2 681,289,1 327,982,3 893,1009,4 921,256,4 693,835,4 47,1062,3 565,85,4 233,836,3 755,234,3 732,1162,2 932,582,3 454,81,5 167,545,3 591,284,5 144,448,3 499,581,4 453,462,2 129,122,4 617,120,4 893,110,3 129,290,4 183,159,3 605,467,4 710,654,4 726,277,2 679,149,5 626,186,5 933,1448,5 287,175,4 829,97,5 740,698,4 895,1133,3 912,431,3 449,775,4 757,37,3 544,433,3 158,872,2 585,183,2 746,524,5 773,195,3 921,470,3 234,169,4 803,1410,3 779,163,4 777,53,2 792,507,4 761,273,4 891,49,5 814,646,5 500,1013,4 144,627,2 900,139,4 740,173,5 416,39,3 558,204,5 837,285,4 118,448,5 612,575,3 536,512,4 899,287,2 478,473,5 544,943,4 638,603,4 746,189,4 879,287,4 62,13,4 850,976,3 881,661,3 839,6,4 905,6,3 863,66,4 586,1264,4 567,178,2 884,150,4 513,88,4 676,108,1 941,877,4 356,925,4 716,287,1 884,385,2 850,345,5 689,166,2 241,293,4 384,557,2 845,497,4 894,12,5 726,684,3 652,530,5 209,167,5 564,189,5 732,743,4 933,435,3 910,189,5 732,1084,4 874,182,5 698,9,4 895,86,4 434,193,4 144,741,4 847,199,2 940,992,4 449,518,4 659,218,1 349,186,5 480,87,4 405,701,3 454,249,3 453,284,2 845,1065,3 697,486,2 881,167,5 58,192,4 918,284,5 659,455,1 843,259,1 839,120,2 305,1250,5 623,476,3 756,152,3 734,747,3 532,1290,1 857,99,3 404,98,5 427,351,4 268,51,4 615,878,4 663,181,4 130,296,4 498,524,4 765,132,3 394,865,3 313,1062,5 599,510,5 827,1465,4 649,300,2 327,312,4 920,226,3 834,195,5 560,801,1 825,175,5 743,301,5 703,268,4 629,212,2 43,86,5 605,210,5 616,655,4 58,518,4 551,755,2 343,407,5 750,10,1 619,236,4 888,120,4 373,828,2 507,182,5 870,16,3 681,832,1 715,485,5 364,1010,3 600,293,1 882,208,3 926,68,4 797,377,4 726,510,4 904,123,4 748,290,4 685,264,4 621,585,3 795,878,4 864,471,1 621,403,3 576,471,4 159,128,4 933,392,2 866,185,5 506,287,5 624,1015,2 285,279,4 549,595,2 801,301,4 870,875,3 275,427,4 643,870,4 388,968,4 716,297,3 798,288,3 929,150,2 917,442,3 755,196,2 298,11,5 12,825,5 654,639,2 882,463,5 664,150,3 787,330,4 624,521,3 720,405,1 198,268,5 654,159,3 696,256,5 692,741,3 897,269,4 329,402,5 604,370,5 695,310,5 294,401,5 806,1132,3 253,450,2 191,254,2 362,750,1 451,503,2 859,201,4 895,290,3 360,318,5 929,287,1 296,356,4 450,301,3 116,930,3 217,513,4 859,315,3 0,164,5 441,217,3 654,784,2 576,47,5 859,332,3 793,274,4 585,2,5 660,209,5 568,99,5 428,226,2 832,380,4 876,257,4 342,149,4 534,706,4 714,107,4 756,312,3 645,750,2 909,136,3 806,256,4 931,171,5 757,897,3 504,143,3 845,569,4 717,14,5 456,146,5 704,182,2 680,681,1 347,684,4 534,264,3 691,865,4 882,702,3 757,153,5 430,268,3 221,445,3 651,95,4 302,868,2 289,94,4 936,241,3 428,154,2 731,288,3 932,195,4 903,327,2 915,211,5 473,607,4 720,683,4 94,510,4 920,891,3 590,708,4 415,79,2 869,383,3 715,518,3 578,201,5 746,506,3 740,172,2 496,1414,2 566,126,5 392,120,4 328,7,2 706,247,4 647,451,3 852,263,3 888,11,5 698,123,4 825,500,3 855,312,5 707,117,5 673,244,4 455,546,3 902,332,4 766,182,4 535,426,5 302,146,4 373,778,3 720,50,4 803,283,4 642,237,3 531,239,3 708,6,3 942,50,1 86,120,5 681,194,4 772,184,4 297,356,5 737,1,3 609,507,3 939,854,5 931,153,5 428,203,4 863,788,4 898,482,4 44,283,4 74,288,1 880,374,1 639,390,3 457,512,4 681,1092,3 41,1049,3 799,293,3 715,201,4 654,261,5 881,20,2 499,1310,1 882,3,4 907,150,3 153,495,3 748,134,4 713,470,4 689,196,4 216,16,3 552,513,3 846,94,4 262,22,3 935,242,2 867,226,1 710,180,4 935,1013,3 900,160,5 665,161,4 882,872,3 451,198,5 69,558,3 932,71,3 932,1245,1 846,197,4 486,348,3 713,409,3 648,249,3 652,153,3 902,185,5 706,317,5 81,639,3 846,427,3 915,222,4 716,865,1 654,760,2 428,380,3 926,287,5 658,602,5 824,288,1 787,143,4 809,242,4 324,770,1 372,528,4 200,602,4 759,65,2 443,271,5 302,288,2 396,288,3 307,769,4 882,345,4 942,225,4 714,87,3 310,46,2 698,481,2 22,88,5 444,95,4 797,691,4 297,186,5 413,323,4 293,880,3 432,136,5 757,337,4 275,315,4 912,268,5 926,239,3 186,51,4 921,182,3 882,18,2 874,460,4 629,10,5 893,85,4 915,392,2 876,196,4 373,280,3 478,156,5 900,256,4 654,181,4 473,955,4 692,191,2 502,948,3 614,99,3 534,274,4 614,96,4 782,306,5 296,195,4 794,166,3 642,844,3 773,544,1 233,963,4 912,88,5 446,275,4 576,545,3 724,18,5 271,55,5 834,483,4 136,95,5 914,312,4 314,3,4 820,559,3 748,1022,3 547,761,4 222,154,5 663,844,2 918,686,1 757,513,5 939,707,3 641,1238,4 125,315,4 150,173,5 278,970,4 781,1513,2 749,872,3 837,7,4 350,894,3 825,225,4 706,457,3 654,722,3 757,627,4 691,210,4 473,649,4 933,173,5 752,180,3 849,96,5 742,325,3 499,294,4 748,152,4 108,150,5 731,936,4 748,967,3 787,636,2 895,1422,2 893,58,5 415,476,4 268,280,1 685,177,5 935,300,3 441,88,4 362,149,5 896,409,3 933,513,5 333,43,4 845,267,4 673,14,4 392,394,3 436,442,4 711,1036,4 902,1069,4 150,628,4 628,308,3 839,95,2 710,78,4 647,71,4 748,417,5 321,1018,4 435,166,3 273,872,3 309,293,1 829,171,5 378,434,5 614,190,5 424,318,1 473,420,3 144,307,2 428,99,5 304,48,3 583,449,2 192,825,2 566,607,4 920,844,4 748,526,4 0,115,3 27,218,5 737,516,3 242,236,2 903,738,4 895,823,1 233,118,3 879,143,5 617,650,5 513,179,3 621,531,3 885,517,4 17,509,4 553,132,4 862,339,3 616,530,2 749,321,2 192,1406,3 891,181,5 150,944,5 93,1220,3 384,527,4 173,64,5 759,450,5 695,8,5 425,99,4 487,485,4 614,191,5 888,1187,2 767,314,3 930,257,3 327,645,3 49,99,2 503,440,4 545,757,4 838,276,2 842,549,3 499,551,1 917,791,3 796,180,5 821,110,4 638,285,4 231,293,2 534,470,4 720,303,3 742,743,5 255,10,5 732,712,4 869,238,3 942,199,4 703,134,5 757,653,4 544,27,4 488,321,5 232,522,4 730,142,5 629,238,4 270,505,4 554,110,4 663,493,5 226,822,2 718,213,2 895,156,4 520,245,4 289,401,4 756,205,4 728,293,2 940,257,4 262,30,4 814,711,3 864,1010,1 911,193,4 371,871,4 932,172,3 250,0,4 247,78,3 939,88,4 12,174,4 932,317,4 898,192,3 738,54,1 723,1126,3 668,215,3 133,507,3 728,750,3 869,1266,2 867,183,3 920,124,3 675,545,3 307,210,4 183,370,5 854,178,3 842,497,2 726,117,4 723,342,1 641,587,5 601,293,5 93,50,3 743,155,4 736,63,4 896,659,4 895,575,2 471,230,5 535,587,3 425,518,4 706,1167,3 563,271,3 893,903,4 662,762,5 494,1541,4 804,121,5 590,63,5 237,256,4 941,215,4 325,587,3 312,426,5 392,1224,3 490,653,5 735,253,1 751,324,2 569,285,4 233,81,3 118,30,5 693,134,5 402,146,5 617,894,3 787,545,3 767,741,3 312,88,5 869,714,3 505,495,5 778,327,4 457,75,4 803,497,5 596,476,5 918,216,4 891,175,5 939,426,5 378,523,4 789,383,2 665,187,5 890,281,5 726,168,5 325,567,4 885,474,5 803,400,2 818,318,4 560,545,1 795,1521,3 442,268,3 704,68,3 548,0,5 560,152,3 497,316,3 839,491,5 405,281,3 220,846,4 756,97,4 286,245,4 416,160,3 711,364,3 629,464,1 832,68,2 296,484,3 682,300,2 416,162,4 757,1526,3 483,950,1 654,130,2 12,164,3 414,257,4 889,526,4 681,707,3 921,221,4 733,172,3 494,66,3 900,173,5 936,125,4 463,519,5 740,273,4 920,559,2 879,187,4 931,225,3 268,22,5 620,691,4 629,818,3 895,197,4 803,171,4 882,552,4 15,938,4 278,1000,4 408,656,3 718,86,2 777,27,4 588,687,4 494,79,3 446,200,2 756,150,4 231,49,4 806,476,4 607,82,5 508,891,1 560,422,2 626,590,3 789,107,3 270,565,4 86,251,3 550,96,5 642,87,2 824,592,3 536,514,4 665,526,4 362,588,3 842,24,2 706,879,2 736,153,4 167,275,1 325,8,1 891,24,4 631,355,4 876,110,3 756,741,4 867,188,5 215,314,5 263,149,5 479,151,4 928,173,3 766,723,4 676,6,4 893,12,4 882,460,5 867,921,5 686,299,4 845,451,3 633,24,4 750,0,3 544,166,3 590,955,4 665,179,4 754,299,4 797,945,2 455,549,2 84,140,3 461,538,3 108,52,4 449,865,4 867,446,2 898,72,4 697,99,2 245,49,5 536,108,1 711,494,4 253,137,1 488,322,5 471,1214,4 708,422,3 55,237,5 459,123,4 627,689,5 846,242,1 605,515,4 750,215,4 173,239,1 722,321,2 856,274,5 480,392,3 549,221,4 942,187,4 435,49,4 772,5,3 550,586,4 769,257,5 934,236,5 158,290,4 760,204,4 238,504,5 485,8,5 614,212,5 497,178,4 653,596,4 761,269,4 17,957,5 891,428,4 398,539,2 536,221,2 617,153,3 880,527,5 641,1077,5 787,683,5 470,81,5 362,311,3 893,13,4 915,47,5 13,239,5 746,261,5 690,603,5 560,1228,1 588,338,5 23,654,5 212,6,4 692,448,2 404,1555,1 560,511,4 893,908,3 888,720,3 883,99,5 405,465,4 520,549,3 188,791,5 668,149,3 806,490,5 777,131,2 93,410,3 704,545,3 488,896,2 895,517,3 886,1115,5 912,221,3 217,152,4 746,422,5 221,1078,1 910,726,2 37,1027,5 38,300,3 537,526,3 845,90,4 342,495,5 550,719,2 494,1156,4 732,695,3 763,404,4 859,948,3 706,167,3 405,280,3 585,117,4 826,688,3 392,626,4 585,158,4 540,277,2 738,750,3 586,330,3 806,198,5 536,185,4 485,323,4 533,409,5 915,701,3 824,590,4 275,553,2 937,280,2 906,408,4 898,256,4 726,422,3 772,461,5 434,192,3 886,471,4 706,5,3 421,236,4 12,748,3 786,299,4 795,68,5 91,321,3 765,413,4 891,670,5 768,1311,2 798,747,2 428,581,3 559,23,2 698,684,3 770,891,5 118,826,3 895,54,3 806,604,3 414,482,5 806,1410,1 715,731,5 845,1208,1 451,624,3 905,14,3 762,229,3 785,865,3 626,198,5 882,282,4 344,569,2 870,120,4 898,745,4 690,317,5 759,364,5 605,958,5 766,241,4 73,314,5 757,686,3 387,217,5 386,398,3 545,257,4 849,167,5 893,884,2 923,285,3 652,630,2 221,49,4 880,3,3 551,976,3 708,61,3 755,78,4 846,108,5 665,483,4 42,276,1 295,210,4 502,305,5 837,227,4 888,78,3 810,285,5 886,68,4 714,195,4 727,123,3 450,305,2 544,402,5 732,284,4 536,615,2 737,168,5 715,10,4 504,182,3 900,404,4 804,417,2 805,239,2 879,1051,1 459,311,4 221,100,4 889,653,5 101,434,3 252,81,3 499,1047,3 614,386,3 871,1027,3 391,516,5 873,123,4 78,310,4 404,736,1 605,281,4 716,326,3 703,207,3 456,830,2 797,219,3 797,89,3 306,131,4 788,110,3 704,1034,4 814,493,5 754,936,4 915,234,3 658,659,3 428,788,4 268,169,2 755,146,4 534,517,5 821,357,3 846,81,4 877,139,2 713,110,3 405,923,4 730,212,5 942,793,3 815,325,4 108,809,3 654,468,3 874,332,5 312,126,5 9,501,4 206,2,2 929,454,1 692,287,2 793,180,4 109,337,1 180,684,2 486,228,3 288,14,3 937,180,5 862,1677,1 142,321,4 883,508,4 662,474,4 394,162,5 921,417,4 421,332,4 859,380,3 169,677,4 721,321,3 326,46,4 896,54,3 623,543,4 200,115,1 578,288,2 822,150,4 478,669,3 917,920,4 288,476,2 663,653,5 615,325,3 795,227,5 879,1269,3 847,473,5 639,213,5 596,1015,4 923,631,4 290,759,2 505,448,2 882,115,5 513,777,4 750,401,3 921,173,5 733,477,4 813,16,3 930,249,2 801,562,3 662,99,4 58,958,4 343,1019,5 756,894,4 785,185,4 902,930,2 940,6,4 456,144,3 664,55,5 15,285,2 851,839,3 273,236,4 692,505,2 931,182,4 455,264,3 200,701,1 55,297,4 806,1412,2 584,19,4 378,142,4 734,303,4 69,1144,3 833,245,4 433,117,5 191,1264,3 0,197,5 692,470,3 867,22,5 456,27,5 772,430,1 384,532,4 708,231,5 809,299,5 502,483,4 294,208,5 775,27,5 919,327,2 690,184,5 680,1393,5 888,1013,2 573,287,4 862,751,4 867,657,3 748,660,5 886,476,1 785,201,4 590,167,3 895,63,4 166,605,4 748,434,4 412,514,5 520,683,3 812,889,4 692,194,4 855,321,4 762,701,3 526,11,4 773,225,2 889,173,5 589,1060,2 636,267,2 270,495,5 612,477,5 132,538,1 238,303,1 845,65,4 726,175,4 542,1158,5 541,93,3 869,672,5 642,67,3 688,12,1 623,689,4 716,129,2 885,789,4 878,221,4 614,301,4 926,299,5 796,947,1 300,14,4 519,870,1 919,1611,4 499,918,3 140,983,4 751,620,1 884,317,5 681,718,2 451,49,5 638,510,4 556,251,3 609,483,3 752,426,5 770,242,3 888,203,4 664,285,4 110,285,4 700,312,4 838,507,3 720,55,3 757,530,5 888,3,3 885,1045,2 809,285,4 398,150,2 746,87,2 587,761,4 923,922,5 880,545,4 642,404,3 915,181,3 545,199,5 180,150,2 233,1100,3 842,653,2 658,134,3 714,204,5 534,791,4 737,226,4 175,284,5 870,95,5 845,691,3 931,143,3 634,873,3 757,150,5 681,924,3 248,317,5 604,301,4 638,172,1 749,747,3 633,590,4 882,1020,5 879,417,4 133,677,4 405,72,2 428,615,3 748,1091,3 444,1128,4 151,32,5 747,646,3 726,257,2 895,1073,2 743,478,5 763,844,4 715,240,3 10,195,5 932,452,1 886,175,5 878,596,2 853,49,4 908,165,5 681,249,4 539,256,4 373,712,1 925,302,3 918,877,2 714,80,4 802,268,5 839,302,5 523,854,4 398,175,3 861,99,5 456,286,4 117,316,5 863,47,5 631,548,3 939,682,3 681,1134,2 694,353,4 765,204,5 562,254,5 748,704,4 690,671,1 268,475,1 845,541,3 907,356,3 332,482,4 560,1266,3 708,412,2 715,150,5 621,95,5 867,401,1 302,16,4 307,292,4 710,311,5 614,513,5 764,274,4 863,561,4 609,50,5 268,1410,3 822,156,5 185,1212,3 28,302,4 770,7,5 496,293,4 889,167,5 894,49,5 711,651,3 842,392,2 206,537,2 895,595,2 898,238,3 692,519,2 628,99,5 885,581,1 709,281,2 183,791,4 396,692,4 614,324,2 654,271,3 315,57,3 706,484,4 744,201,3 658,211,4 486,23,4 792,822,3 525,324,3 496,825,3 732,921,3 775,946,2 779,602,2 709,197,4 782,300,4 692,98,3 636,256,2 623,315,4 855,325,2 636,321,3 409,688,2 875,528,4 605,843,4 651,306,4 915,189,4 698,1060,3 449,32,5 889,659,2 319,1051,2 888,206,3 845,1450,4 134,580,4 734,180,4 654,1100,2 803,473,4 863,42,3 744,229,2 650,682,3 243,650,4 863,522,4 380,123,5 863,431,2 911,96,4 820,704,5 843,88,3 726,81,3 784,0,4 398,779,1 536,346,4 750,371,3 781,681,4 859,312,4 495,27,2 453,134,2 477,152,3 710,1052,4 779,285,4 699,173,4 753,458,4 550,432,5 591,122,4 781,1384,4 604,209,3 144,97,5 268,230,1 516,310,3 315,426,5 448,267,2 296,146,3 942,72,3 775,354,3 446,534,4 555,318,3 827,282,3 526,356,5 322,97,4 599,186,5 921,97,5 668,186,5 867,207,3 655,902,2 130,247,3 796,285,2 586,889,1 693,602,4 404,1566,1 805,155,4 880,434,3 937,839,2 774,343,5 720,14,4 539,14,3 915,526,4 69,256,4 279,67,3 652,244,4 775,180,4 773,201,5 861,830,3 706,765,3 503,558,5 502,198,4 98,677,2 911,184,3 936,292,4 453,237,3 200,791,4 372,738,3 898,709,3 412,332,2 654,291,2 780,86,4 871,1060,4 213,113,4 893,323,3 765,184,4 622,14,4 767,312,5 740,6,3 804,544,1 902,104,3 765,186,4 592,734,4 618,325,2 794,167,5 716,342,4 463,331,4 424,183,4 869,645,4 925,324,1 188,198,5 647,435,5 918,918,2 82,78,5 689,185,4 931,193,5 653,120,4 29,163,4 780,326,4 842,568,1 726,187,3 933,487,5 726,6,2 706,278,3 677,275,5 593,18,3 693,482,5 605,1015,3 933,647,3 456,24,4 925,314,4 839,82,5 880,394,3 863,208,3 876,380,4 906,595,4 449,701,4 471,565,4 814,992,2 398,319,3 829,196,4 296,366,2 789,583,4 900,228,4 415,941,4 221,37,2 327,585,1 566,247,4 670,202,3 576,464,4 931,113,5 817,299,2 587,467,3 779,338,4 267,79,3 4,433,5 24,132,3 193,66,1 61,21,4 642,127,3 377,369,2 895,673,2 547,49,5 345,471,4 735,252,5 887,534,4 867,46,2 388,186,5 839,504,5 478,830,2 726,470,3 221,357,2 346,120,3 493,513,2 822,769,4 317,897,4 806,193,4 773,364,2 649,135,4 869,504,4 626,232,2 523,11,3 726,595,4 576,408,5 255,664,4 746,214,5 757,1243,3 909,124,3 597,894,2 889,227,4 513,709,5 503,927,4 937,1013,4 729,245,4 888,21,3 663,581,1 750,346,4 377,82,4 845,413,4 439,323,5 456,178,4 576,178,2 891,728,4 715,403,4 891,264,4 700,299,3 56,14,4 750,95,4 896,49,5 670,848,3 795,784,5 745,203,5 903,450,4 188,524,5 765,576,3 641,1052,3 564,508,4 17,511,5 199,930,3 405,1196,3 915,245,5 505,606,4 769,747,5 726,469,5 806,78,5 31,454,2 921,948,5 446,117,4 880,525,5 880,124,5 880,404,4 592,404,3 726,1034,2 560,495,3 766,299,4 710,426,5 12,764,2 171,427,4 915,212,4 703,186,4 222,287,3 498,97,4 893,120,3 804,553,1 694,322,2 496,52,3 746,474,5 815,293,5 757,1011,4 93,1134,4 757,664,2 909,23,3 931,610,5 478,407,5 397,70,5 711,723,3 654,312,4 938,1189,5 621,374,2 858,1314,4 610,881,4 874,299,3 641,404,3 246,69,5 658,422,4 892,411,3 937,369,5 863,1302,2 278,945,3 795,1047,2 806,93,2 393,714,4 678,248,3 647,825,3 535,754,4 915,193,4 797,27,4 22,422,3 842,249,4 593,244,3 696,455,3 920,410,2 651,299,4 649,632,4 804,475,1 9,429,3 661,275,3 82,727,4 804,524,4 566,672,3 200,21,2 633,1066,4 772,151,5 784,287,3 533,759,2 762,508,5 428,528,4 888,510,4 673,677,3 658,177,5 534,301,3 818,380,4 929,268,4 901,305,4 876,339,3 644,655,4 880,26,3 378,233,5 37,1015,5 736,168,4 335,367,1 471,471,5 550,392,5 918,111,3 462,124,4 546,301,5 633,689,3 706,1021,3 302,46,5 595,299,4 810,314,4 773,57,1 587,201,1 517,288,4 941,606,5 503,201,3 924,947,2 696,741,3 436,1005,3 215,80,4 579,285,4 866,650,5 711,715,5 393,172,5 436,51,3 807,299,4 495,189,5 822,151,5 898,78,5 926,1228,3 763,283,4 772,208,5 56,49,5 721,332,5 483,116,4 487,177,4 893,251,3 506,1236,5 234,462,4 715,116,4 746,428,4 891,495,5 706,781,3 748,985,3 907,483,4 723,288,1 436,171,4 42,273,5 868,293,3 867,58,4 853,11,5 560,543,2 233,30,4 757,579,4 600,420,1 933,663,4 932,650,3 888,116,4 10,57,3 803,21,5 642,402,3 774,899,3 709,342,3 730,526,5 726,327,4 63,590,4 642,324,2 537,641,3 885,938,4 932,745,4 898,167,4 392,482,4 704,1543,4 382,315,5 607,1062,5 526,134,2 169,293,3 820,116,3 815,330,5 726,70,3 446,234,2 926,1092,4 942,194,4 733,312,4 743,300,3 879,1209,4 821,234,3 692,581,2 647,471,3 434,270,4 795,775,4 851,49,5 670,1490,1 888,299,3 803,1015,4 753,281,4 323,872,5 536,196,4 929,844,3 757,217,4 716,824,2 639,168,5 665,236,3 835,1064,4 467,1069,5 822,720,4 915,471,3 1,271,5 919,269,3 740,65,3 483,175,4 707,1022,3 662,409,3 86,173,5 478,168,5 373,95,4 692,186,3 939,309,3 10,300,4 388,198,5 859,689,4 783,314,4 526,115,4 931,506,5 221,197,4 523,274,3 188,254,2 285,15,3 649,657,3 772,431,4 856,257,5 786,318,3 870,306,3 829,28,1 918,457,2 416,514,4 327,120,4 785,356,5 869,275,4 862,323,5 626,467,2 636,236,2 733,495,5 608,0,1 550,124,4 4,62,1 478,755,1 750,393,4 695,519,5 873,196,4 882,486,5 896,388,3 629,272,5 505,217,3 302,3,4 471,422,5 744,27,2 757,549,4 485,296,4 431,281,5 912,24,3 907,110,3 781,747,4 845,848,3 738,194,5 505,603,4 483,414,3 903,299,4 654,199,4 885,238,3 867,131,4 617,1211,2 640,968,4 665,656,4 89,749,4 846,1203,3 386,203,2 710,754,3 696,245,5 765,402,3 780,317,3 794,88,4 937,150,4 787,198,5 763,55,4 665,91,3 757,480,5 750,153,3 846,92,1 870,1023,3 531,1227,3 862,298,2 921,745,4 295,275,5 90,342,4 647,634,2 872,749,3 814,85,5 22,115,5 333,495,3 632,146,4 834,182,4 444,297,2 898,253,2 795,65,5 494,630,2 902,182,4 863,85,4 787,140,3 333,207,5 898,82,4 891,945,3 93,364,3 675,221,4 845,3,5 881,179,4 495,770,2 769,297,4 421,217,4 790,753,4 453,110,1 649,493,3 870,1136,3 449,1134,4 653,146,3 386,99,5 12,131,4 866,21,5 681,186,5 893,342,2 37,34,5 869,331,2 887,179,4 654,659,2 781,1011,2 539,273,4 545,49,5 649,55,3 705,257,4 757,169,5 106,321,1 861,976,4 353,970,3 550,309,4 799,288,4 353,195,3 180,1024,1 602,312,5 654,952,3 60,293,2 302,558,4 918,243,2 762,54,4 714,470,4 485,287,4 750,708,4 654,1134,3 215,209,4 654,55,3 746,187,5 455,1106,4 915,508,4 359,95,3 386,171,4 892,239,4 57,173,4 485,267,3 737,188,4 757,140,4 278,686,4 784,151,4 842,51,2 847,81,5 539,285,4 4,180,5 496,664,2 4,411,3 870,201,4 471,175,5 626,941,2 404,969,1 275,333,4 641,53,4 900,180,4 245,1219,3 261,337,4 1,277,3 473,3,5 877,164,4 832,117,2 863,390,4 524,13,3 931,1204,5 590,434,4 555,267,4 858,534,5 905,543,4 404,1307,1 842,627,2 867,684,1 762,152,4 616,55,1 790,300,3 766,169,5 313,476,3 599,173,4 523,1047,4 763,236,4 825,421,2 753,291,3 327,910,3 843,96,3 292,876,2 710,474,5 923,420,4 826,301,4 388,485,4 715,505,4 726,54,3 910,204,3 451,660,4 726,976,2 781,126,4 792,247,4 471,226,5 750,81,4 458,408,2 853,734,3 708,49,5 423,275,2 425,967,3 748,214,4 487,88,4 157,209,4 939,342,2 325,426,4 585,233,3 935,252,5 862,902,3 714,1,3 302,614,4 275,244,3 404,919,1 477,392,4 899,283,2 927,357,5 773,221,3 261,595,4 624,191,2 488,337,3 879,848,3 259,880,4 658,1266,3 268,1426,2 790,244,4 884,153,3 708,1058,5 17,31,2 870,1021,3 143,99,5 652,495,2 275,566,3 787,117,3 853,460,3 386,190,4 895,470,3 882,174,5 641,410,5 453,485,3 6,68,5 221,691,4 862,341,1 664,134,4 818,345,5 454,238,3 343,627,4 843,183,3 14,747,3 302,286,4 451,101,2 545,378,4 454,199,5 806,226,4 678,214,3 59,495,4 304,504,3 894,300,4 873,13,4 938,992,4 404,1590,1 278,29,2 671,283,4 884,587,4 15,201,5 462,110,2 842,70,2 795,760,3 740,180,4 789,66,3 704,81,5 697,180,3 404,922,2 654,316,3 576,214,5 68,78,4 662,977,4 794,173,4 649,925,3 478,484,3 926,104,1 567,177,4 879,792,4 789,715,4 832,427,2 660,279,3 847,844,5 853,143,3 761,245,1 424,853,4 647,1257,2 902,247,2 617,72,3 663,496,3 797,274,4 885,199,3 902,0,3 698,827,3 649,416,3 82,755,4 315,581,5 591,234,3 567,99,4 763,3,3 455,401,2 448,474,5 829,500,3 654,1023,3 652,779,2 168,481,3 935,92,5 918,975,2 577,244,3 486,802,2 770,96,1 292,95,3 932,131,3 905,675,5 898,478,4 654,1406,2 757,652,3 591,479,4 746,175,4 396,242,1 931,212,3 930,689,4 709,345,4 21,49,5 847,356,5 377,322,3 537,482,5 541,779,3 895,38,2 605,941,4 278,179,2 787,678,2 706,1544,2 933,160,4 850,78,4 535,630,2 741,99,5 885,557,3 746,7,5 888,251,3 737,178,3 829,160,4 922,927,4 379,96,3 763,124,4 711,431,4 609,754,5 681,362,2 867,997,2 941,361,3 398,234,4 444,280,1 12,818,1 915,48,3 708,426,4 921,549,3 589,273,3 720,987,3 390,193,4 672,322,2 505,1,4 503,281,4 850,914,5 606,461,4 30,1018,5 646,495,4 795,879,3 424,442,2 900,110,3 405,96,5 101,759,1 930,219,3 848,567,4 545,815,3 868,12,3 879,583,3 895,567,2 850,454,3 750,432,3 600,147,3 793,285,3 495,21,4 446,1008,4 756,230,2 756,257,5 942,26,4 795,413,3 452,789,4 615,747,3 604,11,4 629,14,3 860,300,4 183,367,1 664,87,3 623,1066,4 846,172,5 707,120,3 668,189,3 802,320,4 893,978,3 737,172,5 747,168,4 681,584,4 654,463,3 258,1073,3 416,778,2 757,289,5 797,81,4 726,1223,3 311,613,4 933,388,3 795,408,3 615,894,3 319,1290,3 877,174,2 307,628,4 99,886,2 795,1298,2 84,41,3 315,99,4 772,427,4 881,418,5 558,565,5 499,739,3 61,71,3 292,814,2 893,474,3 378,96,3 221,146,4 879,28,2 787,1406,3 918,99,5 795,193,4 737,731,3 911,610,3 565,133,5 735,322,1 69,78,4 764,49,2 731,331,5 641,415,5 781,880,3 896,658,5 151,392,5 716,1281,4 621,226,3 381,495,3 718,219,5 688,299,5 624,90,4 520,832,2 544,678,2 911,653,3 717,254,4 726,230,3 720,285,5 842,660,3 425,481,5 920,814,5 668,289,2 895,208,3 713,116,5 879,391,3 382,487,4 639,41,5 233,88,3 757,721,3 665,171,3 806,194,3 559,92,3 781,298,3 335,870,2 314,136,5 386,177,3 797,13,2 6,216,4 236,482,5 643,258,4 335,1248,3 605,826,3 652,289,3 503,224,4 654,1148,3 906,627,5 910,637,4 906,201,5 292,228,2 457,630,4 233,51,4 874,495,4 706,731,4 415,149,5 765,213,2 803,455,3 864,239,2 804,1013,4 526,430,3 824,124,5 560,39,2 71,707,4 456,153,5 673,120,4 549,270,5 747,482,4 797,601,3 683,14,5 115,915,2 268,431,4 717,288,3 436,416,5 568,761,3 592,203,4 885,1094,2 896,428,5 931,575,2 789,215,5 667,287,4 521,207,5 647,568,3 502,223,3 772,97,4 342,1266,4 405,162,3 789,142,3 915,86,3 750,493,4 449,484,5 757,249,4 758,755,4 270,728,4 93,586,4 221,422,4 600,233,1 877,434,4 183,240,3 746,221,2 898,1015,3 275,843,4 63,1064,1 934,923,4 401,274,5 750,142,5 931,656,5 935,49,4 703,184,4 406,568,3 377,192,4 942,404,4 762,971,3 893,322,2 683,214,5 885,226,3 746,984,2 452,940,2 577,245,2 499,754,3 649,229,4 702,0,4 885,156,4 756,1015,3 876,736,1 912,962,4 560,959,4 685,968,5 520,248,4 797,94,5 696,1066,5 804,336,2 697,8,3 863,577,3 748,257,4 180,686,1 328,247,3 329,693,5 550,283,4 662,11,5 898,746,1 706,702,4 388,727,3 879,54,3 76,198,5 859,1040,2 532,120,4 497,173,3 817,270,4 914,332,3 884,820,3 408,44,4 372,631,3 719,315,4 446,174,3 330,159,5 804,557,5 760,1271,1 444,326,2 591,245,5 644,213,4 492,677,3 565,1064,5 870,269,5 495,93,1 816,596,2 710,415,3 6,317,5 710,249,2 453,941,2 696,324,4 748,71,3 644,707,3 905,275,5 540,473,5 658,42,4 565,410,4 839,482,5 221,264,3 726,596,3 748,401,4 804,49,4 762,167,5 748,1262,2 591,244,1 769,987,3 681,557,1 757,447,4 765,49,4 619,99,1 617,89,1 463,291,5 789,549,4 641,931,5 931,224,2 932,404,3 523,384,3 832,922,5 101,3,2 499,1194,4 707,268,3 781,250,3 834,420,4 252,131,5 331,567,4 607,305,4 910,271,4 62,268,3 783,322,4 714,3,4 17,7,5 863,595,4 902,514,4 914,267,5 621,933,2 581,931,2 159,156,5 806,233,3 313,539,3 917,513,2 560,229,3 814,56,5 869,21,4 705,409,4 726,210,4 568,13,4 714,256,4 893,1657,4 912,155,3 626,946,3 486,196,3 649,430,3 651,332,4 415,874,2 896,237,4 863,1411,1 846,1136,5 591,221,1 843,596,3 532,659,5 682,321,2 553,94,4 803,321,5 747,193,4 807,287,3 344,461,5 912,78,4 902,819,4 737,221,4 248,91,5 789,1243,1 910,483,3 285,80,3 393,576,2 801,673,2 740,581,3 3,357,2 665,545,4 664,755,3 331,410,4 935,136,4 743,187,3 898,203,4 762,356,4 850,70,4 803,442,5 307,178,4 807,311,3 846,1011,1 703,434,4 888,1112,5 921,158,3 544,379,3 780,180,5 746,583,5 795,1035,4 415,490,4 942,422,3 416,650,4 882,402,5 773,568,2 709,431,5 492,185,5 769,49,3 325,499,3 618,26,4 803,24,4 424,671,2 295,8,4 708,540,3 866,175,3 502,545,4 932,384,3 127,505,4 839,162,4 918,755,3 381,58,5 769,13,5 386,1186,4 609,97,5 907,426,5 723,312,5 928,317,4 893,1047,4 917,381,4 799,221,4 428,175,3 880,529,5 193,514,4 654,120,3 801,447,3 806,609,3 850,405,2 579,49,5 747,174,5 386,178,5 910,398,5 754,321,3 644,173,4 387,327,4 845,948,2 803,90,4 452,574,2 638,1192,4 372,458,4 885,65,3 730,13,3 755,182,4 898,237,2 915,32,2 781,1126,2 568,123,5 238,506,5 697,708,4 889,210,2 238,275,5 522,66,4 891,819,3 879,41,5 900,394,3 782,947,3 278,430,4 621,194,5 401,709,2 772,789,3 750,173,4 714,563,2 812,679,2 896,615,5 888,1072,5 726,1214,2 670,549,3 560,234,3 814,135,5 436,474,3 373,692,5 532,401,4 278,460,3 472,19,3 474,257,1 548,120,4 566,489,4 496,140,3 711,394,4 795,499,4 871,327,4 108,227,5 378,662,3 162,315,5 591,1274,3 691,522,3 486,97,5 914,306,3 451,14,4 711,422,3 424,300,4 663,185,5 468,602,5 428,1100,5 505,30,4 681,87,4 587,325,4 345,95,5 112,298,5 638,203,3 348,290,3 804,624,3 744,95,4 288,454,4 942,942,5 436,715,5 909,256,3 843,120,3 456,134,5 879,175,5 795,152,5 535,190,4 806,626,4 415,102,3 320,478,4 832,66,3 658,85,5 638,579,2 890,932,3 531,981,3 249,153,4 845,75,4 437,256,4 763,254,4 726,778,2 641,139,3 496,768,3 233,464,2 506,750,5 681,780,2 304,461,5 415,217,3 307,708,3 745,567,4 428,225,3 285,347,4 938,221,5 17,632,5 803,190,4 550,769,2 911,13,5 794,188,3 877,224,3 715,604,3 804,117,3 799,24,4 922,272,5 523,238,2 576,201,4 787,728,4 140,248,2 710,171,5 670,183,3 533,716,5 755,1073,4 932,20,1 938,1022,4 150,422,4 757,133,5 768,120,4 853,457,3 443,268,4 806,140,3 649,718,3 199,203,5 440,337,4 369,522,3 834,209,5 647,20,3 707,318,5 910,81,2 746,356,5 493,357,3 626,76,2 672,299,3 667,136,3 773,272,1 776,508,4 804,201,2 550,639,4 838,454,4 589,247,4 893,255,3 906,41,4 746,1204,3 680,1104,3 926,392,5 830,712,5 91,707,4 41,140,3 455,479,4 794,551,2 649,87,3 600,482,4 853,95,3 803,126,3 576,316,5 248,68,5 267,1207,2 658,89,2 297,355,3 923,210,3 532,134,3 757,115,5 923,284,4 449,272,3 127,457,4 649,808,3 668,207,2 861,415,3 594,844,3 765,473,5 716,268,5 617,678,1 623,978,4 623,618,3 547,949,4 787,754,3 781,301,3 377,1220,3 681,94,5 386,717,4 915,220,4 20,989,2 612,49,5 526,173,4 478,202,3 869,657,4 129,801,5 494,210,5 707,844,5 654,909,3 84,185,3 881,49,5 267,764,2 903,277,5 623,545,3 869,855,3 539,12,4 652,658,1 785,654,4 370,72,5 692,160,3 200,923,3 746,492,5 647,564,3 531,11,5 877,203,2 296,945,2 756,327,3 238,132,3 416,229,3 701,349,1 639,314,5 499,327,3 888,817,4 806,134,5 665,478,4 837,21,4 526,178,3 771,321,4 853,282,3 788,741,3 487,96,4 269,565,5 119,507,2 879,315,5 842,859,3 708,182,5 830,602,5 754,879,4 252,167,3 449,75,3 93,636,3 279,404,2 847,970,5 408,210,4 444,271,3 416,187,4 83,99,4 726,519,4 891,1223,4 386,242,1 590,69,4 762,1128,4 814,390,2 553,525,4 585,1089,3 737,234,2 869,430,3 591,275,5 295,12,3 692,506,4 404,1411,1 487,321,3 795,297,5 918,563,2 631,132,4 626,627,4 193,166,2 804,521,5 682,131,5 602,987,4 649,527,3 804,163,3 14,296,3 50,691,3 879,1663,4 642,684,3 852,293,2 449,491,5 481,242,2 523,193,4 591,69,4 901,299,4 122,287,3 899,185,2 746,694,2 845,809,3 51,281,4 757,570,4 599,1238,2 0,123,5 879,1,3 757,649,5 408,1345,3 45,747,5 689,1209,3 710,922,5 439,882,5 748,608,4 801,195,3 326,227,4 874,49,5 671,873,4 441,120,2 828,514,4 742,293,2 933,431,5 901,482,4 453,7,5 787,1477,3 852,1279,4 386,27,5 785,194,4 817,874,1 467,88,4 803,525,4 665,653,5 918,1277,4 729,180,2 523,699,5 827,44,4 532,553,1 863,113,5 740,179,4 755,90,3 457,55,5 921,183,3 91,997,2 730,480,3 582,518,5 193,1111,3 932,3,3 565,741,3 654,913,3 338,186,5 891,320,5 737,227,5 814,153,5 670,88,5 221,248,1 874,653,4 844,689,5 681,361,2 885,1047,4 941,513,4 478,608,5 392,723,3 882,646,5 428,1219,3 398,81,3 747,186,4 118,315,4 473,136,5 931,130,4 397,609,4 317,808,4 757,172,5 654,245,3 748,57,3 747,526,5 406,66,1 895,695,1 756,197,4 777,7,1 560,177,4 846,238,5 652,285,4 911,151,4 718,70,3 753,242,1 475,267,4 799,222,5 505,656,5 581,840,2 588,337,3 302,45,3 359,257,4 781,907,3 312,630,2 341,248,3 924,331,4 772,232,1 36,830,2 870,812,3 715,510,5 458,49,4 927,877,5 746,317,5 93,46,5 448,638,5 814,94,3 835,317,5 560,155,4 716,325,3 373,596,4 850,341,2 586,300,3 644,495,3 773,193,3 926,448,4 353,64,4 147,132,5 754,244,4 406,133,5 933,383,4 929,404,3 692,87,3 757,227,3 715,68,5 804,558,3 931,647,5 668,180,5 726,88,5 747,136,3 698,303,4 879,947,4 668,513,3 895,21,5 41,180,5 896,135,5 497,422,3 882,175,4 842,187,2 781,1536,3 313,40,5 560,232,1 391,1225,4 591,21,5 522,254,5 746,186,5 740,91,3 879,411,3 637,116,4 714,95,4 786,936,3 885,588,3 907,433,4 886,420,5 500,107,4 803,175,4 542,1072,3 794,151,4 12,798,4 695,123,5 542,22,4 275,247,4 726,396,2 797,109,4 790,258,3 824,618,4 629,124,3 886,945,4 752,356,4 449,749,3 652,218,1 0,94,4 278,190,3 641,185,5 926,236,4 784,885,3 24,133,4 757,240,3 2,335,1 845,25,4 329,421,4 806,565,4 773,525,4 879,292,4 796,747,1 864,327,3 663,172,4 806,180,5 258,270,3 448,273,2 915,8,5 745,160,3 290,49,5 705,8,3 483,225,4 803,1139,3 9,179,5 235,525,3 212,153,5 503,355,4 906,695,5 805,257,3 542,176,4 811,880,4 870,126,5 870,574,5 762,381,5 674,895,5 709,885,3 746,958,5 895,568,2 187,355,4 604,78,5 480,7,3 794,549,3 221,737,3 161,473,3 353,675,5 911,55,2 773,678,5 761,814,1 554,257,3 740,479,5 321,8,4 906,247,5 550,279,3 765,68,4 129,1219,5 863,1100,4 740,659,3 757,27,4 869,193,3 889,402,1 534,257,5 888,170,4 882,354,5 898,283,3 348,105,1 233,494,4 851,680,4 888,648,2 527,844,3 672,11,4 312,209,4 656,268,5 547,14,2 390,704,5 550,63,5 449,1310,4 329,442,4 649,192,3 712,269,2 757,123,5 532,241,4 863,61,4 405,562,1 905,272,4 795,160,5 596,288,5 297,741,3 531,1299,3 11,87,5 537,161,3 931,97,5 879,719,2 496,175,4 596,293,4 797,450,2 183,317,5 592,416,5 681,822,2 814,422,5 851,273,3 671,320,4 773,658,3 591,1010,4 501,262,1 772,939,2 789,150,4 657,474,4 613,13,3 428,227,2 408,115,4 853,22,4 456,12,3 795,585,3 94,565,2 407,333,2 882,7,4 446,715,2 748,67,4 497,180,2 58,23,4 579,618,3 853,323,3 397,602,4 121,85,5 885,622,1 621,505,3 777,143,4 398,559,3 347,150,3 405,149,4 434,404,4 711,171,5 501,312,4 641,406,5 581,124,3 893,1461,3 198,750,3 732,282,3 667,327,4 758,404,4 726,239,3 496,122,3 446,222,5 44,844,4 757,383,5 10,87,3 737,46,3 619,675,3 71,0,4 778,257,5 804,228,2 188,510,4 587,209,4 266,156,5 770,14,5 869,95,4 781,991,2 672,527,5 864,120,1 567,302,4 224,481,5 267,143,4 891,228,3 866,656,5 863,201,5 795,746,4 498,897,4 800,300,5 795,179,2 773,509,2 362,147,3 658,256,2 737,567,3 457,526,2 221,456,1 805,951,2 200,28,3 533,124,3 803,830,3 367,446,1 863,28,4 893,1250,4 315,126,2 869,5,4 803,70,4 307,46,4 782,299,4 885,1169,3 415,923,5 880,399,2 639,925,3 531,1135,2 264,814,3 804,708,4 542,356,4 436,199,4 895,769,5 783,257,5 781,1143,3 933,1,4 935,8,4 804,322,5 803,281,4 726,167,5 762,940,3 917,961,4 188,658,4 107,280,4 436,188,2 863,105,3 711,793,4 912,172,5 781,357,4 756,229,4 768,110,5 881,1443,4 920,1031,5 750,237,3 765,1020,2 920,132,5 896,464,5 200,200,4 681,394,3 642,664,3 456,1046,2 307,1146,4 753,921,3 540,110,1 153,184,5 911,167,5 874,257,4 392,281,4 723,994,1 935,115,4 626,654,4 869,958,4 642,245,5 733,478,4 804,189,5 142,257,3 641,116,4 941,302,4 111,257,3 273,743,5 752,656,5 805,221,4 748,270,5 751,301,5 876,332,4 750,309,3 933,144,3 823,287,3 886,1412,4 405,284,5 815,686,2 918,675,4 896,839,3 765,27,5 270,99,5 711,66,3 6,264,5 716,147,3 763,274,4 746,332,4 50,704,1 380,119,1 846,300,5 12,766,1 585,216,5 795,292,5 495,167,3 853,99,5 931,777,4 732,12,3 757,424,5 6,274,4 401,167,5 268,1073,1 889,213,4 307,1073,3 814,157,2 729,321,1 803,198,5 641,794,4 674,302,5 188,587,4 849,27,5 912,740,4 708,317,5 536,183,3 814,202,4 504,96,4 847,213,5 879,98,3 278,1287,4 649,927,2 796,989,2 446,273,1 932,240,2 745,81,4 454,661,4 428,771,3 785,110,5 378,708,5 926,157,2 129,95,5 691,193,4 892,848,3 739,327,3 344,53,3 524,675,2 801,303,3 912,215,4 640,22,5 341,88,3 607,506,3 633,322,4 806,698,4 867,89,3 723,257,4 503,381,4 900,522,4 763,1283,3 441,1169,4 891,614,5 902,528,4 325,446,4 726,89,3 628,287,4 886,968,5 392,303,4 385,117,3 842,666,2 933,948,3 353,164,4 795,21,4 825,98,3 515,203,4 804,865,1 888,150,3 449,1248,3 639,539,3 795,229,5 931,273,5 824,826,4 920,933,3 797,142,5 900,249,3 496,215,3 922,333,5 127,461,4 737,167,3 748,634,1 789,404,3 649,234,3 533,24,5 747,708,4 835,55,4 502,1008,2 740,16,2 388,663,4 522,178,3 329,283,5 863,316,4 722,257,4 765,264,3 864,116,2 449,1106,4 853,263,1 942,123,3 458,97,5 910,968,5 669,190,4 755,1059,4 846,95,4 757,354,4 331,172,5 502,220,5 696,342,4 837,288,5 726,163,5 386,550,2 915,185,3 755,590,4 832,655,4 628,272,2 384,487,5 867,186,4 617,708,2 651,124,2 0,216,3 693,162,4 325,89,1 233,147,3 654,929,2 59,1121,5 346,1010,3 751,306,5 327,750,3 466,292,4 520,678,3 757,38,2 56,194,3 715,133,5 756,549,3 307,179,5 850,747,3 757,30,3 745,63,4 711,49,4 867,409,3 600,177,4 616,422,1 340,907,3 326,777,3 290,96,4 576,402,4 11,237,5 797,995,3 885,178,2 373,178,1 566,95,4 472,1142,4 571,221,2 779,658,4 605,117,4 487,152,2 475,779,3 575,207,3 679,241,4 636,281,3 732,257,3 795,105,2 937,716,2 105,99,3 847,516,5 428,357,3 863,285,5 525,301,5 716,23,2 893,257,4 838,741,3 449,259,2 405,805,4 462,538,1 511,1237,4 598,277,3 416,662,3 57,133,5 267,126,4 313,1144,4 672,285,4 616,233,3 898,49,5 884,70,4 725,897,2 531,7,5 748,299,4 789,401,2 454,941,4 654,962,3 293,256,3 652,678,2 803,823,3 441,180,4 829,448,2 338,1239,5 91,557,3 881,171,5 285,1059,5 795,466,3 496,150,3 787,131,5 797,399,3 469,304,4 795,830,2 832,121,2 785,175,4 601,258,4 850,329,3 525,259,1 569,270,4 933,662,5 606,136,4 787,581,4 232,527,5 898,657,2 839,670,3 845,58,4 495,154,1 888,234,3 933,604,4 158,124,5 779,384,4 405,129,3 773,177,4 466,126,5 418,173,5 621,624,3 652,292,3 917,494,3 640,656,4 591,8,5 502,189,5 891,569,3 708,96,5 710,432,4 900,442,3 931,217,3 384,496,5 137,473,5 584,854,3 161,54,3 726,249,5 786,257,5 82,126,4 462,590,4 900,418,5 649,116,4 593,220,4 935,474,5 94,854,3 486,567,4 491,649,2 489,1382,1 882,692,4 806,549,5 238,462,5 726,218,3 750,312,2 859,271,3 654,222,3 936,300,1 191,268,3 435,746,5 795,431,2 870,793,3 704,141,2 881,117,4 939,627,4 862,261,3 654,219,2 775,281,3 933,55,5 389,753,4 893,1500,4 827,326,4 704,596,4 270,177,3 623,1288,3 850,840,3 556,164,5 302,1227,2 386,530,3 646,1262,3 416,404,3 780,288,3 717,120,4 404,424,2 278,1069,3 789,249,5 874,63,5 392,559,3 845,403,4 697,478,2 158,292,4 209,120,4 803,414,3 591,679,1 649,134,4 143,57,3 647,712,2 879,769,4 935,267,4 503,833,2 653,86,4 594,110,4 341,113,5 620,1,3 478,31,3 654,740,3 759,872,4 654,217,3 276,99,4 456,209,5 762,431,5 144,1032,1 373,1321,3 763,63,5 845,731,4 242,25,3 633,280,4 436,152,5 587,541,3 475,450,3 746,461,5 497,189,4 814,418,3 794,230,4 94,738,3 339,94,5 612,317,5 710,379,3 888,231,3 144,225,1 765,629,3 795,249,5 870,182,3 671,514,5 167,272,4 822,99,5 896,67,5 899,617,4 809,337,4 192,714,3 505,704,5 129,819,5 825,90,4 701,687,1 505,173,5 362,301,5 937,14,2 693,317,5 803,6,4 939,299,5 535,79,2 906,97,5 709,655,5 805,788,4 773,93,2 449,1196,3 777,78,3 738,497,4 329,422,5 788,247,3 561,49,5 172,261,4 492,88,4 825,0,4 889,1148,5 198,242,1 824,686,5 424,232,2 486,1313,1 665,63,4 121,126,5 255,976,4 554,318,5 233,512,5 746,528,5 757,1073,1 428,998,2 415,281,5 313,0,5 624,175,4 820,120,3 61,1132,4 591,567,5 551,120,4 523,738,2 881,565,4 898,156,4 845,28,2 648,120,2 405,708,5 436,138,3 1,287,3 847,649,4 284,184,3 841,312,4 730,484,4 567,652,4 533,1214,3 108,174,1 620,782,3 327,497,5 760,122,3 746,1496,4 794,318,4 895,404,2 168,605,5 801,378,4 725,832,5 455,288,4 94,61,4 736,191,5 727,1354,4 722,171,4 202,476,4 605,590,3 485,288,3 459,1010,4 869,809,3 803,719,3 879,127,3 58,284,4 769,812,5 866,430,4 290,746,4 373,1092,2 345,514,5 882,747,5 275,108,4 451,274,4 715,120,5 845,126,5 406,707,3 760,457,1 188,530,3 681,1224,4 498,164,5 771,314,5 338,503,5 935,1096,5 163,825,4 560,87,2 942,93,4 918,1276,4 714,207,3 920,95,4 830,353,4 727,236,4 592,976,3 801,680,4 599,719,3 935,1006,5 893,305,4 654,1173,3 484,241,5 838,92,4 765,604,3 658,704,5 44,180,4 532,392,4 880,322,2 670,809,2 711,375,3 729,268,5 931,675,4 278,448,3 653,117,2 233,660,5 938,325,5 756,95,4 143,234,1 748,649,3 451,264,3 377,224,3 660,755,3 43,418,4 795,552,4 660,572,3 900,88,3 777,248,3 621,433,4 532,549,4 670,837,3 915,55,5 554,49,5 455,208,3 852,876,2 611,146,4 726,551,2 820,27,5 654,649,3 221,173,5 408,170,4 863,567,4 89,1200,5 534,708,5 491,198,3 495,1156,1 242,1114,3 732,1046,2 607,478,5 845,400,5 496,945,4 770,273,4 553,734,3 386,566,2 812,309,4 576,1053,3 627,331,5 94,48,3 610,352,3 496,432,3 428,306,3 416,209,3 893,298,3 233,366,4 278,1223,3 773,28,1 870,548,3 536,731,3 586,306,4 462,275,3 176,326,3 669,1098,3 649,1134,2 837,55,5 662,209,3 928,184,5 842,484,2 449,587,4 746,161,5 278,1,4 243,275,5 404,181,1 617,692,3 804,122,4 43,522,4 652,1477,2 763,21,4 616,854,3 542,196,4 829,203,3 243,527,3 939,8,3 654,6,3 449,545,4 773,187,3 673,254,4 189,326,2 647,166,4 12,778,3 610,310,4 184,257,4 302,483,5 390,176,4 718,889,1 772,1068,4 772,215,4 232,646,5 585,392,3 483,565,4 902,695,3 406,153,5 587,154,5 591,1070,4 482,509,3 908,743,3 110,306,2 707,741,1 778,925,4 925,288,3 470,392,5 693,526,5 825,232,4 783,325,5 6,414,2 536,1193,3 938,817,3 454,715,3 890,312,5 617,549,3 900,134,4 626,401,3 193,434,4 132,285,2 531,1198,3 67,180,5 540,72,4 457,959,1 876,327,2 654,218,2 74,116,4 494,490,5 290,124,4 692,129,1 861,927,4 746,284,5 928,31,3 748,135,5 915,577,1 298,596,3 781,1251,3 866,63,5 536,313,1 757,535,2 290,150,5 560,461,3 832,217,4 424,1221,2 649,28,2 584,542,3 246,57,4 755,158,4 180,472,2 681,297,4 298,855,3 428,442,4 922,762,4 879,179,5 681,582,2 765,180,4 585,176,3 868,410,4 649,99,4 785,415,4 773,181,4 942,280,4 931,458,4 71,197,5 828,508,5 890,285,5 103,221,3 525,247,4 811,288,1 802,305,4 289,565,3 829,202,4 839,948,4 444,545,2 563,126,4 212,196,5 797,48,4 772,186,5 915,216,4 262,321,3 834,185,4 604,407,5 98,275,2 450,303,3 587,728,3 380,95,5 839,639,3 618,297,5 423,989,5 769,333,5 935,1343,5 777,81,3 0,57,4 796,1253,2 755,288,4 61,545,4 850,1093,1 644,434,4 775,4,4 206,427,4 261,566,1 609,132,4 832,444,4 885,714,1 12,809,5 789,663,3 576,549,3 737,635,3 446,507,3 822,477,4 639,690,4 765,431,3 151,366,3 378,525,4 398,525,3 503,180,3 923,81,4 835,602,5 625,680,1 879,116,4 647,89,3 641,397,2 346,283,3 275,424,4 714,461,4 789,1024,1 310,134,4 803,1290,3 559,404,4 628,276,5 828,69,4 378,174,5 850,366,2 880,194,4 552,524,4 404,356,5 795,38,3 895,26,1 362,454,5 839,133,3 869,76,3 779,317,5 775,513,5 920,394,3 737,146,3 871,741,4 882,285,3 806,81,4 861,270,5 343,68,2 702,275,3 600,478,4 497,508,3 307,636,3 933,528,5 566,432,4 765,214,3 428,815,2 495,194,4 933,96,4 14,257,3 372,152,5 249,182,4 932,507,3 587,968,5 915,731,3 499,6,5 920,368,1 216,760,4 703,68,3 792,1364,2 673,865,5 806,569,4 935,1278,3 300,386,3 888,49,4 250,467,2 266,1109,3 587,267,5 200,231,2 779,473,3 231,177,5 164,325,5 853,245,3 918,84,2 17,523,4 681,218,2 295,243,1 649,388,3 378,473,5 199,287,5 927,876,5 877,473,5 424,180,4 594,49,5 278,268,4 663,97,4 392,584,2 405,69,3 756,182,4 893,885,3 915,84,2 839,525,4 342,63,5 726,182,3 834,132,5 304,59,3 833,404,4 642,185,4 544,163,4 628,91,4 930,346,4 388,488,4 106,326,3 698,472,3 833,314,5 253,456,2 96,1125,3 743,306,4 801,184,3 321,482,5 712,314,4 644,97,4 665,759,3 91,400,3 746,841,5 336,226,5 462,1114,4 554,327,4 893,918,4 468,167,4 737,253,2 523,21,3 173,780,4 863,576,3 715,72,4 772,180,5 760,1162,2 747,322,4 693,143,4 844,750,2 801,325,5 861,171,5 659,121,1 12,437,1 495,495,1 861,181,5 621,418,4 507,450,3 746,181,5 598,1356,2 681,201,4 806,210,4 411,173,5 452,226,3 415,292,5 660,630,3 786,680,3 893,854,4 870,91,3 734,318,4 234,343,5 609,202,4 797,161,3 726,99,2 816,6,4 315,987,1 853,132,3 928,181,4 861,142,5 310,442,3 750,558,4 473,653,5 841,1104,2 744,78,3 801,330,4 479,484,4 147,188,4 804,55,4 621,1015,3 880,50,5 160,193,1 341,149,3 366,184,5 805,256,4 757,251,3 539,268,4 649,587,3 493,328,3 331,224,3 898,196,4 859,273,3 143,1027,3 94,72,4 787,1125,5 127,222,5 659,785,1 621,28,4 850,1033,1 549,236,3 564,170,5 193,714,3 879,741,4 862,909,2 57,202,5 586,261,4 404,467,3 560,942,3 638,1004,2 534,206,4 922,337,4 591,753,3 206,516,3 471,190,5 717,743,3 895,49,5 686,320,4 548,747,4 836,293,4 296,526,5 267,1177,1 891,392,4 770,461,3 348,618,4 647,745,4 4,162,5 773,553,1 480,180,5 828,461,4 500,221,4 616,442,4 177,470,4 933,167,4 436,181,2 221,192,4 845,1106,4 806,545,4 148,312,5 814,416,5 612,296,5 900,520,2 849,647,5 748,126,4 565,132,4 473,1199,4 845,387,3 859,55,4 773,385,2 594,716,2 747,207,4 652,641,1 863,135,4 483,470,4 314,432,4 787,684,3 505,761,3 17,171,3 805,356,3 497,13,4 834,287,2 726,179,3 520,207,3 785,279,3 832,474,3 781,1614,3 832,430,2 405,196,4 757,351,4 702,287,4 785,500,4 665,203,3 449,391,4 797,950,3 633,1083,2 785,1043,4 861,433,5 633,116,4 532,37,2 797,1138,3 915,929,2 379,88,5 157,21,5 197,473,5 756,96,4 937,283,2 373,471,2 748,654,5 723,947,1 275,727,2 428,1108,2 504,357,3 701,351,1 868,125,2 933,66,4 895,1247,2 638,193,4 726,773,3 832,670,5 842,378,2 404,697,1 901,297,2 560,614,4 484,344,1 522,196,5 746,25,3 857,333,4 591,95,5 579,150,2 818,299,5 533,128,4 540,28,2 871,1046,4 820,1196,5 762,99,5 681,227,4 697,430,1 766,523,5 307,6,4 757,290,4 874,184,4 806,153,2 200,589,1 758,983,2 762,391,4 803,1049,3 505,131,4 850,128,4 560,185,3 377,66,2 829,967,4 915,958,4 109,779,3 828,280,3 424,156,2 341,487,5 915,0,4 273,1059,4 129,388,3 685,517,5 379,479,4 778,6,3 58,67,2 467,507,4 706,318,5 550,508,4 837,86,4 842,176,3 454,27,4 12,208,3 715,1123,3 298,240,3 591,474,5 505,522,5 898,473,3 824,321,5 397,836,4 255,91,1 652,61,3 530,310,4 502,355,4 778,508,2 804,101,4 937,814,3 647,37,5 290,70,4 889,635,3 732,514,5 832,521,2 59,132,4 880,553,1 287,257,4 870,1387,4 740,21,5 706,743,3 449,77,2 895,1521,2 829,587,5 346,147,3 404,1545,1 487,588,3 326,254,3 659,39,2 742,300,4 353,180,4 547,658,4 127,552,3 302,1043,3 469,128,3 659,657,1 404,302,1 615,257,4 373,238,4 795,731,5 892,1244,2 290,938,4 412,24,3 278,55,4 715,691,5 681,225,3 120,171,5 474,353,2 648,116,5 540,392,3 867,127,5 817,345,4 644,179,4 248,237,5 331,366,4 47,242,3 933,854,4 888,190,4 846,199,3 449,596,4 276,283,4 727,288,3 220,55,5 428,1300,4 795,607,3 750,160,2 12,186,5 711,194,3 624,518,2 810,299,5 508,337,3 287,204,5 805,1017,4 876,920,4 612,175,5 912,602,4 938,889,2 880,20,3 508,265,1 652,727,2 428,379,3 902,95,2 464,134,3 794,142,3 772,171,5 906,933,4 893,244,4 806,1083,4 831,285,3 825,67,3 814,1038,5 587,23,2 641,495,4 428,636,3 128,322,1 653,1047,3 740,82,4 814,175,4 652,701,3 56,1015,4 576,48,4 867,93,1 910,209,3 835,268,5 275,214,4 458,234,1 772,95,2 499,214,1 503,845,4 454,182,4 746,167,4 502,196,5 868,595,3 885,236,4 531,119,2 644,366,3 505,193,5 532,510,4 758,219,5 832,683,3 720,80,2 94,551,1 898,366,4 497,656,3 757,509,3 760,839,4 877,150,1 669,82,3 386,41,4 645,1021,4 805,97,4 53,404,4 888,1230,3 223,221,4 158,271,5 723,937,3 829,497,5 488,748,4 698,1335,3 641,398,3 193,660,5 825,419,3 903,735,4 721,110,3 891,142,2 619,180,4 853,194,3 934,147,4 150,257,5 726,750,3 842,81,3 918,747,1 415,414,4 655,301,3 915,960,3 401,136,4 704,251,1 941,495,5 932,72,4 210,442,1 803,575,4 900,464,4 681,247,3 0,141,2 206,384,3 904,1010,3 267,378,1 622,227,3 278,758,4 482,221,3 235,272,1 719,346,3 942,1187,3 478,489,4 879,1414,2 797,160,3 621,807,3 27,671,3 12,142,1 761,748,1 310,965,4 923,12,3 158,879,1 893,1072,4 536,692,4 108,256,5 803,69,4 302,804,4 635,282,3 855,689,4 795,1011,3 647,209,4 884,659,5 818,69,4 475,398,3 922,124,4 550,1135,5 505,176,5 915,53,3 295,631,5 289,101,3 895,30,3 918,239,3 449,212,4 801,677,4 734,288,1 880,811,2 294,448,4 759,630,3 829,483,5 814,76,4 398,425,3 194,1314,4 688,256,5 565,230,1 933,962,5 584,59,4 659,203,3 520,565,3 880,89,3 305,1027,2 823,686,2 803,357,3 489,117,2 768,1010,3 662,120,4 429,1374,4 786,307,3 787,404,4 502,97,5 916,275,5 591,94,4 828,1192,4 863,71,4 523,22,5 787,635,3 880,213,4 886,95,4 821,587,2 814,86,5 544,171,5 780,244,2 922,294,5 881,88,5 832,151,2 405,669,3 332,126,4 886,46,5 390,185,5 693,1125,5 814,517,3 869,44,5 616,412,1 692,938,4 839,513,5 195,268,3 832,588,5 679,202,3 888,565,3 250,594,3 587,224,5 681,38,4 594,120,2 711,943,4 42,143,4 774,749,5 915,76,3 698,245,4 842,500,2 183,275,4 904,124,3 845,430,5 850,298,4 222,288,1 647,62,4 720,331,4 9,69,4 594,471,3 341,1093,3 268,186,4 405,660,5 888,92,3 942,116,4 654,502,3 804,134,4 0,215,5 129,1156,3 888,1066,3 756,678,4 406,175,4 837,113,4 726,770,3 561,392,2 121,512,4 196,271,4 834,317,5 698,1162,5 892,76,4 740,55,4 732,129,2 668,613,4 795,77,3 825,257,4 180,716,1 786,305,3 664,418,4 304,227,2 757,178,5 177,587,4 649,1626,3 654,994,3 869,1207,2 883,1008,2 879,67,5 534,487,5 888,748,2 127,530,4 796,268,3 734,92,2 926,409,1 532,653,3 326,244,1 476,738,4 274,392,3 364,814,3 860,530,4 343,529,4 795,1075,2 91,367,1 233,1010,3 262,434,4 737,163,5 828,732,2 590,181,3 29,402,2 804,1032,3 490,293,2 766,614,4 935,507,3 850,121,2 715,548,4 806,253,4 877,370,3 278,378,3 585,49,4 885,825,1 682,324,2 542,233,4 896,233,5 837,133,3 890,236,5 692,175,2 776,14,4 866,654,4 324,510,4 787,1302,3 523,226,2 206,846,3 441,469,4 550,195,5 775,617,3 561,581,4 803,746,3 748,238,4 486,26,5 187,10,5 837,273,4 845,208,4 869,65,4 449,488,4 821,70,4 563,684,3 503,157,3 795,944,5 390,714,2 849,518,4 586,318,3 364,740,2 681,262,1 523,221,2 502,99,5 794,57,4 537,654,3 832,514,3 847,609,5 726,113,5 665,153,3 158,741,2 921,236,4 832,666,1 898,175,4 845,366,4 629,863,4 932,0,3 804,234,2 559,108,3 683,49,4 861,69,4 869,812,4 845,184,5 585,122,3 827,901,4 847,494,2 573,241,5 923,407,3 494,635,3 342,285,4 89,478,5 804,351,5 886,253,4 275,95,5 290,403,4 388,385,3 607,156,1 863,971,2 918,10,4 392,79,3 926,762,4 941,94,5 824,362,4 882,322,5 756,269,3 628,692,5 463,269,4 542,59,5 895,839,2 874,510,5 391,494,3 839,51,3 320,1100,3 344,85,4 426,267,5 621,450,4 644,60,5 845,607,4 614,270,2 697,482,3 601,260,3 498,54,4 428,424,3 493,142,5 723,987,1 772,454,4 634,306,4 621,21,4 803,669,4 711,62,4 460,293,3 879,1094,3 641,770,3 936,299,4 362,256,2 614,47,5 803,1488,3 853,187,4 815,331,4 913,642,4 716,471,4 814,581,1 764,41,5 895,7,5 499,1134,3 836,257,4 748,1040,4 455,543,3 720,96,4 591,886,5 824,454,4 938,933,3 526,1210,3 885,424,4 830,27,3 629,110,5 238,115,5 917,630,4 785,495,5 7,54,5 404,397,1 802,682,1 592,237,4 918,327,2 845,524,4 834,426,4 773,1015,3 937,471,4 823,322,2 505,579,3 562,256,5 71,214,4 435,469,4 858,1280,3 756,52,3 143,454,3 495,442,2 565,377,4 532,288,2 846,1006,4 844,309,4 406,401,2 536,965,2 652,207,3 785,321,3 480,198,5 435,277,2 188,404,2 806,472,3 867,180,5 386,0,4 845,226,4 542,777,4 829,95,3 893,11,5 918,1283,3 814,239,2 638,730,2 803,635,3 311,1171,5 863,404,5 644,59,5 638,739,4 664,1314,4 846,150,4 881,204,5 14,822,2 386,529,4 654,1105,2 495,505,3 860,13,4 550,1438,5 444,901,4 193,227,1 449,618,3 885,379,3 670,378,3 505,190,4 581,918,5 290,401,4 565,575,2 918,708,3 792,927,3 772,918,5 584,1148,4 869,872,2 416,449,2 97,209,4 663,656,5 115,1219,2 711,995,4 716,454,2 424,325,1 741,545,1 520,1239,3 788,248,3 889,738,2 454,14,2 76,191,3 662,2,4 676,90,5 626,386,2 393,741,5 879,985,3 654,390,2 718,215,4 419,123,5 665,1473,3 923,503,5 888,143,4 832,99,4 748,208,4 870,271,2 262,233,4 463,514,5 822,449,1 886,293,5 499,408,4 753,1015,4 762,1005,2 698,1327,4 681,552,3 888,663,2 397,99,3 714,587,4 449,213,1 912,749,4 199,428,5 918,217,4 473,522,5 58,491,4 658,719,3 803,119,3 877,212,3 275,974,3 544,719,3 832,187,4 847,264,4 829,473,5 531,117,4 880,464,3 628,657,4 339,496,5 696,309,3 853,117,2 847,495,2 755,117,2 629,180,3 782,342,5 32,879,3 453,370,3 298,639,3 644,95,3 919,349,4 789,861,1 185,1041,5 534,143,3 614,69,4 325,473,5 70,301,3 641,1223,4 653,461,4 419,126,5 789,104,2 404,53,2 12,503,5 854,164,4 454,76,4 296,293,3 303,242,3 789,484,3 726,157,2 499,133,5 888,1261,3 789,293,2 233,463,4 639,181,5 808,747,3 856,23,1 918,371,3 882,126,5 804,412,2 541,346,3 796,299,2 555,706,3 829,194,3 188,1403,5 874,97,5 757,194,5 428,807,3 729,247,3 891,1453,3 895,222,4 919,345,4 389,474,1 795,492,3 918,627,3 633,289,3 850,354,4 599,182,5 789,213,3 795,85,5 886,27,5 91,405,2 879,257,4 183,386,4 485,302,4 845,641,5 844,301,3 693,664,4 4,403,2 834,370,5 794,174,5 846,498,4 312,325,4 58,566,4 851,1051,4 715,484,5 388,653,5 728,271,4 12,646,5 715,723,4 697,201,3 388,370,4 754,330,3 393,28,3 311,617,5 714,10,4 531,1216,4 144,1215,2 909,249,1 621,79,3 791,120,4 681,191,3 268,131,5 296,147,3 818,326,4 177,146,4 698,184,4 624,120,3 377,215,4 869,326,4 763,110,4 404,400,1 862,320,4 659,390,2 624,475,2 449,695,4 861,475,4 718,297,2 879,374,1 107,0,4 870,314,3 660,88,5 932,402,3 879,216,4 444,287,2 804,1100,5 600,120,2 392,484,2 692,684,4 709,339,4 456,174,5 532,590,4 804,1109,5 419,854,5 522,96,4 881,379,5 829,678,3 604,337,2 824,927,3 197,681,3 70,167,5 729,256,5 115,294,3 916,149,5 350,306,4 658,450,5 378,196,5 744,193,4 780,194,4 446,82,5 658,182,4 896,264,3 703,1298,3 702,747,3 907,446,3 398,217,4 895,1182,2 681,685,4 471,293,4 300,635,3 129,76,5 896,239,4 576,1218,3 752,522,4 715,619,3 733,14,4 779,293,3 787,214,3 641,20,5 348,284,5 814,116,3 239,894,5 654,567,3 935,627,1 165,686,1 934,863,5 703,630,3 278,565,4 883,381,5 845,177,4 848,120,5 585,283,3 627,293,4 747,82,3 845,750,5 503,790,3 787,285,5 291,418,4 275,918,4 556,345,2 779,185,4 834,24,5 473,503,5 814,484,4 397,207,5 623,92,5 180,545,2 869,1117,3 473,203,4 752,173,4 915,569,3 932,470,3 746,403,5 839,658,5 726,721,2 653,567,4 379,210,3 842,173,4 471,650,4 331,146,4 697,197,2 496,950,2 727,14,4 233,924,2 867,479,4 384,209,1 637,127,3 180,1007,1 523,484,2 773,400,2 888,261,4 842,738,2 834,68,5 794,930,2 792,147,4 159,54,4 884,78,4 787,432,2 935,281,2 665,643,3 781,1254,2 744,11,5 694,323,2 852,357,1 773,196,1 641,287,1 522,637,4 177,99,4 762,49,4 838,712,2 213,275,3 200,548,3 801,572,4 353,450,3 869,327,3 576,495,5 338,433,4 324,92,4 683,715,2 832,434,2 888,855,4 600,49,5 93,464,5 607,504,5 850,878,4 803,987,4 63,274,4 869,400,3 275,149,4 567,630,5 871,894,5 254,331,2 668,267,3 454,552,3 920,274,1 580,1096,4 138,236,3 822,605,4 804,216,2 492,1087,2 726,273,5 494,567,1 388,0,4 888,635,4 496,199,3 832,167,5 845,483,5 769,928,4 829,175,3 920,1278,2 803,446,3 440,537,3 270,180,5 751,1023,3 787,430,2 449,219,4 141,41,4 746,12,3 429,747,3 858,286,5 486,273,4 460,326,4 787,181,2 714,233,4 715,608,3 879,789,3 805,654,3 729,327,2 689,746,3 46,285,3 825,623,4 832,288,1 536,520,2 452,1144,2 451,474,2 915,1004,4 665,510,4 438,289,4 785,202,4 921,216,3 380,312,2 542,248,2 221,47,5 633,275,5 649,176,2 813,52,4 496,401,4 745,1,3 850,982,2 762,482,4 101,152,2 13,185,4 494,54,2 792,150,5 353,169,4 84,8,4 245,826,1 906,7,3 636,459,2 832,237,2 585,807,3 836,150,5 6,51,4 377,70,4 853,8,5 912,746,3 503,594,4 850,249,5 937,747,2 845,558,5 787,325,4 757,241,3 866,587,3 893,1079,4 698,885,3 610,339,5 199,929,3 706,80,2 223,124,3 877,508,4 806,540,4 235,293,2 416,211,1 397,683,4 862,1312,1 924,332,3 910,621,3 620,24,4 607,468,3 760,876,2 795,933,3 747,209,3 274,824,2 866,8,5 200,54,4 918,793,4 689,356,5 795,479,4 534,300,4 794,471,3 561,189,4 587,624,3 797,257,4 449,811,4 180,987,2 795,767,2 317,1159,5 898,654,4 630,681,2 937,545,3 758,590,3 693,178,4 408,126,4 379,199,4 902,184,5 768,410,3 843,116,4 850,411,2 867,738,2 789,226,3 307,391,4 748,100,4 473,24,5 341,485,5 81,769,4 497,236,2 851,824,3 505,515,4 807,331,4 221,34,1 895,326,5 829,924,4 278,320,5 823,324,4 850,410,3 760,236,5 644,481,4 620,153,5 12,862,4 804,57,4 380,595,3 772,58,5 917,855,4 678,221,4 825,32,3 643,976,4 683,62,4 271,31,4 767,13,5 878,865,5 499,1468,1 699,172,5 909,750,3 847,477,5 751,343,4 496,67,4 560,183,3 345,174,4 485,241,4 886,410,4 885,1208,2 556,1243,2 862,327,5 600,238,3 315,486,3 428,234,3 824,1086,3 739,325,3 785,288,4 536,430,4 868,1381,3 41,173,5 531,814,4 353,526,4 639,61,3 116,474,5 837,189,4 585,163,2 789,143,4 191,288,4 644,640,5 903,154,4 723,872,3 902,47,4 715,55,5 803,514,5 827,885,1 317,133,5 547,369,3 714,70,3 853,422,4 928,482,4 823,988,2 456,119,2 888,248,3 539,331,4 789,82,3 733,273,4 415,272,4 854,528,4 377,55,4 932,1069,2 738,332,4 621,385,3 821,538,2 706,613,2 550,558,5 421,716,3 757,22,4 708,471,4 75,202,4 101,240,3 406,658,5 343,63,5 392,194,3 798,285,5 575,762,3 908,528,3 838,120,3 485,918,3 797,416,3 626,51,3 853,923,4 942,580,4 776,126,1 922,321,4 533,290,4 483,1015,4 885,19,2 845,187,3 914,320,3 24,526,4 619,754,5 633,247,4 494,575,3 555,323,4 845,639,1 155,479,5 850,346,5 70,152,4 837,70,3 812,891,1 716,302,4 789,392,2 536,567,2 842,205,3 221,71,4 934,1015,4 578,233,3 746,174,4 17,492,5 842,156,2 931,549,2 803,365,4 654,1265,3 541,320,4 503,619,4 800,327,5 880,98,3 931,210,5 513,156,4 629,8,2 697,299,4 74,832,2 886,139,5 292,450,3 852,285,3 654,583,3 690,177,5 886,827,3 283,318,3 365,16,5 734,514,4 599,946,4 789,72,4 794,418,3 804,1097,3 861,435,4 540,150,3 797,120,5 750,750,4 775,97,4 337,85,4 292,50,3 877,587,2 757,167,5 866,322,3 942,54,5 763,746,3 24,203,5 558,862,5 591,128,5 832,201,4 942,53,4 339,479,5 887,513,5 824,978,4 636,814,2 444,507,2 746,314,4 846,526,2 888,649,2 735,532,3 886,430,3 789,236,4 714,745,5 602,21,4 785,284,3 710,154,4 647,780,4 387,275,2 794,233,4 703,491,5 416,110,3 59,174,5 27,566,4 547,545,4 333,10,4 531,425,5 811,291,3 907,962,4 744,482,1 781,288,3 396,342,2 641,672,2 847,180,5 289,471,4 851,968,5 180,1060,2 939,520,4 330,30,2 919,301,4 652,683,5 326,27,3 298,9,5 862,326,5 594,273,3 849,567,5 644,63,3 845,696,5 652,1138,3 552,0,3 579,120,4 613,275,4 871,273,3 561,203,1 765,480,4 785,180,4 804,21,1 583,221,4 853,1133,3 895,507,2 824,824,4 267,171,5 822,355,3 843,69,4 910,482,3 755,95,4 600,415,3 505,47,2 755,754,3 464,97,4 726,64,2 737,117,3 93,244,1 736,474,4 814,614,2 873,324,2 255,215,5 891,152,5 769,474,5 565,19,4 748,190,4 859,48,2 275,94,5 842,379,3 43,171,4 268,440,1 805,402,4 849,21,5 77,475,3 757,895,5 457,474,4 832,1028,1 853,63,5 654,637,4 847,508,4 6,663,3 932,764,1 475,383,4 248,301,4 832,232,2 654,479,4 750,384,4 181,0,4 538,530,4 335,1093,1 6,10,3 55,929,3 827,321,3 503,43,4 750,54,4 676,538,3 772,385,3 895,120,3 867,216,2 888,865,4 904,741,4 585,264,5 435,263,2 359,192,5 463,287,4 895,1436,1 704,70,5 825,308,4 794,6,5 665,292,3 915,69,4 864,20,2 620,232,3 891,131,5 193,709,3 344,411,3 623,328,3 932,160,2 544,204,4 748,1184,4 710,954,1 424,1012,1 711,842,3 425,403,3 918,139,5 507,174,4 941,49,5 78,274,4 765,185,3 874,173,5 536,106,3 647,768,1 853,483,3 575,323,2 424,95,4 342,386,4 647,46,2 550,379,3 898,30,3 906,317,5 880,356,5 895,807,3 858,14,4 907,422,4 626,270,5 845,467,4 197,366,3 755,194,3 711,190,3 726,178,3 720,879,3 536,722,2 867,745,2 436,1266,4 535,208,2 869,55,5 540,654,4 687,308,5 869,173,5 415,509,4 452,55,5 62,261,4 830,740,2 578,691,4 6,631,5 628,391,4 880,62,4 649,446,3 899,317,4 689,152,5 814,683,4 665,65,4 905,124,4 180,129,1 547,315,4 895,475,2 623,245,4 724,300,4 392,331,4 775,524,2 221,89,2 642,1015,3 845,484,5 908,13,4 795,796,3 654,236,3 589,274,4 405,471,3 827,25,3 371,52,5 780,285,1 822,27,3 621,172,5 737,127,4 304,58,3 740,49,5 845,207,5 748,530,5 880,450,1 455,738,3 931,639,2 502,65,3 905,116,4 473,481,3 828,318,4 621,479,4 832,234,4 565,48,2 504,327,4 314,740,5 795,117,4 266,553,3 798,291,4 681,875,3 642,508,3 923,1477,4 656,301,2 873,115,4 863,400,4 811,327,4 813,671,3 14,471,3 845,719,4 737,140,3 434,478,3 41,72,4 649,550,3 805,157,2 911,172,4 631,567,3 535,221,4 626,8,4 692,271,4 842,181,2 804,23,4 580,6,4 832,323,3 847,201,5 392,819,3 536,659,3 96,192,4 814,448,2 935,274,4 208,248,2 579,281,5 486,84,2 526,507,3 850,530,3 803,176,5 850,974,2 796,339,2 523,95,4 697,854,2 861,199,5 707,686,2 486,300,4 304,468,2 931,179,4 920,321,3 810,307,4 747,317,5 906,244,4 646,28,4 560,76,1 245,251,1 874,288,4 703,13,3 641,774,4 689,628,1 846,78,4 642,431,5 389,299,5 748,283,4 91,401,3 816,257,3 889,631,5 900,519,5 446,180,5 697,489,3 910,20,4 860,178,1 915,762,3 638,661,2 652,204,1 787,355,4 930,254,4 607,126,5 822,194,4 584,508,4 862,329,2 101,510,3 845,55,5 664,88,4 818,301,5 853,454,2 867,163,2 497,409,3 748,94,3 478,583,3 830,357,2 460,747,1 276,1007,3 862,1430,4 760,1286,1 862,321,1 827,693,2 888,522,4 847,497,5 215,150,3 879,1164,2 757,142,5 495,195,3 757,216,2 479,55,4 158,297,5 593,268,4 468,151,4 598,244,3 787,1138,1 514,303,4 338,8,5 90,661,4 726,407,4 891,366,4 900,229,5 654,1232,3 523,68,4 711,414,4 850,839,3 839,69,3 755,49,4 891,57,4 895,87,5 871,755,4 769,274,5 714,594,3 915,227,3 870,189,2 881,747,5 757,312,4 585,1217,5 734,287,4 885,1118,4 572,422,3 850,335,4 665,955,4 781,354,3 920,120,5 338,210,5 10,430,2 434,201,4 710,960,5 659,448,3 185,176,4 849,704,5 566,433,5 764,1008,5 797,1075,3 473,651,4 781,1081,3 649,562,3 822,708,3 910,182,4 790,747,3 888,1015,3 828,426,4 536,318,4 428,411,4 644,71,3 843,172,5 867,152,2 243,27,4 384,377,1 415,570,3 874,194,4 738,97,3 895,635,3 795,181,4 748,397,3 531,1406,2 588,309,5 942,173,4 850,879,3 888,1096,3 586,680,2 850,1675,2 19,322,4 685,298,5 307,217,5 769,245,5 569,885,2 647,716,4 180,1352,1 654,453,3 839,172,5 853,234,2 867,381,4 876,206,3 649,230,2 330,305,5 866,1158,5 750,172,4 523,217,3 880,107,3 408,1327,2 848,132,5 613,507,4 748,76,3 822,214,4 488,894,4 824,1198,4 616,423,1 824,930,3 847,662,5 522,256,5 710,274,5 496,110,4 904,301,5 783,876,4 658,55,5 406,1262,2 827,1067,4 397,481,5 795,567,4 918,1133,2 888,641,3 634,330,4 794,381,4 895,886,2 5,305,4 834,649,5 436,225,1 862,900,1 673,110,5 326,392,3 895,574,2 436,14,4 621,78,5 837,171,5 866,473,5 652,131,3 838,257,4 862,318,2 6,196,4 151,943,4 663,30,4 611,863,4 886,442,4 629,281,3 510,1526,4 377,163,4 905,472,4 600,428,5 487,285,1 514,537,3 842,433,4 621,229,3 658,293,4 773,11,3 689,65,3 853,491,4 931,660,5 757,454,4 393,430,5 654,1085,3 879,46,4 692,217,4 129,329,4 621,1215,4 893,1114,4 656,300,3 882,739,4 114,643,3 48,146,1 240,285,5 833,345,3 636,146,1 787,384,3 552,608,4 847,488,5 936,13,4 647,574,3 933,673,4 159,407,4 748,539,3 424,434,3 814,601,3 248,426,5 918,530,3 787,588,5 803,581,3 869,314,2 773,830,2 307,355,3 665,1450,3 623,992,4 456,275,4 794,1412,3 505,136,2 706,479,3 885,590,3 659,258,4 867,782,1 20,872,2 642,503,4 888,630,3 832,292,4 720,291,3 726,190,4 532,402,3 931,440,2 877,497,4 879,393,3 864,626,1 295,193,5 392,410,2 453,983,3 585,575,3 728,309,3 715,558,2 191,8,5 879,27,5 659,172,5 668,510,5 446,410,2 523,63,2 31,627,4 523,207,5 670,173,5 450,323,4 459,303,2 723,267,4 12,444,4 937,472,3 896,183,4 462,1027,2 404,511,1 753,290,4 345,1010,1 710,650,4 939,357,1 888,131,4 912,497,3 654,685,2 853,105,3 404,541,1 915,483,4 505,585,2 473,1220,4 772,356,4 435,940,4 822,55,5 888,470,3 715,161,4 536,683,3 689,704,1 794,558,2 845,197,5 621,362,4 560,793,2 221,565,4 346,695,4 839,194,5 931,744,5 467,116,2 715,57,5 937,1027,5 652,174,2 918,0,4 532,280,4 587,712,3 895,894,2 654,926,3 834,285,3 278,1038,4 941,1049,5 745,422,3 93,1009,4 747,113,4 312,180,4 285,407,4 535,613,4 477,63,5 324,407,5 513,194,5 849,195,3 814,221,4 654,730,3 162,325,3 478,195,4 915,70,3 751,349,4 845,1529,2 698,476,3 515,514,4 777,754,2 806,1065,5 112,299,3 795,81,3 497,461,3 881,755,3 879,283,4 805,186,5 829,78,4 942,720,5 842,152,3 478,201,4 659,1064,2 748,183,2 880,830,2 833,315,5 273,595,3 820,844,5 482,514,4 534,185,4 739,872,2 245,55,1 488,879,2 931,1453,4 932,1027,2 86,714,3 940,474,4 777,68,2 692,290,3 693,193,5 617,132,4 850,1275,2 534,186,2 185,1384,2 824,248,3 830,259,2 256,85,4 479,301,4 697,706,2 568,287,3 541,422,4 5,284,3 525,1006,3 523,525,3 542,187,4 461,872,4 189,236,5 233,630,3 915,1400,3 157,1302,3 845,512,5 550,1219,5 404,428,5 910,922,4 636,741,4 456,527,5 567,126,4 115,649,2 714,172,5 803,636,3 941,891,3 893,315,4 292,87,3 620,54,5 602,325,4 932,558,2 477,95,2 620,39,3 449,570,2 335,733,1 889,233,5 654,721,1 558,1555,3 373,456,1 542,264,4 750,430,4 895,72,3 845,43,1 756,561,3 931,429,4 814,216,3 901,496,5 888,236,4 131,99,4 576,558,3 209,172,4 372,683,4 592,741,4 675,293,4 356,747,5 869,1089,2 757,146,4 607,162,1 405,656,5 897,357,4 333,526,3 543,303,3 767,14,2 410,602,5 892,819,3 503,611,4 862,1679,2 793,13,5 144,337,3 846,425,2 497,63,4 869,468,4 942,420,2 379,78,4 726,1164,2 504,172,3 698,595,3 748,110,3 842,626,2 48,576,1 663,355,3 918,20,2 885,1064,4 658,63,4 813,233,3 704,861,1 858,256,2 792,234,3 825,21,5 789,943,1 652,627,4 827,23,4 814,670,4 485,268,4 485,882,3 391,836,5 832,105,2 653,844,4 263,274,5 532,482,4 839,418,5 103,677,2 775,439,2 497,6,3 896,422,5 534,210,4 938,1050,5 814,401,5 806,221,4 681,317,4 916,277,3 841,1394,4 245,734,4 886,469,3 406,150,4 784,317,4 843,567,4 453,497,3 17,173,4 871,312,5 888,267,4 714,121,4 871,927,2 880,391,5 889,49,5 235,332,3 845,692,5 803,291,2 845,264,5 550,325,4 631,876,1 837,69,4 882,10,2 882,602,4 114,184,5 862,333,5 935,128,4 874,54,3 12,548,4 436,22,4 748,585,4 746,503,5 879,684,4 895,429,3 884,581,2 845,771,4 180,1365,1 795,256,5 453,274,2 860,1008,5 773,7,1 663,641,4 373,805,3 941,96,5 895,1470,1 335,209,5 746,731,3 327,298,2 4,382,3 757,567,4 539,269,4 293,514,5 493,299,5 839,153,3 885,1018,4 727,318,3 128,309,2 463,509,4 899,482,4 732,1337,4 540,98,4 732,590,3 451,473,3 618,232,4 888,281,4 877,190,4 632,49,4 532,191,3 751,899,4 496,127,4 804,392,3 879,239,4 494,377,5 726,409,2 362,384,4 880,232,3 846,219,4 726,1024,2 822,186,5 936,92,4 531,446,4 584,1500,4 778,234,4 877,649,2 762,58,5 346,420,2 659,171,4 402,0,4 770,587,5 680,288,5 500,828,3 392,506,2 390,300,4 565,495,5 614,698,3 767,474,2 85,325,3 624,384,4 662,627,4 931,735,3 642,514,4 326,748,3 114,191,5 585,734,3 804,435,3 717,596,5 662,146,3 720,330,3 822,653,5 649,449,1 775,430,4 225,11,5 772,167,5 926,865,4 842,656,3 494,1187,5 429,522,4 706,424,5 895,483,4 726,738,4 670,569,3 647,383,4 628,85,5 551,125,4 523,309,4 710,661,3 384,29,5 891,116,4 825,1227,3 594,459,4 652,14,3 879,81,3 685,1,3 743,480,3 320,704,3 725,762,2 687,1126,5 505,442,4 863,289,3 872,285,2 867,1508,1 424,63,4 654,793,1 758,470,4 101,878,3 64,236,4 415,868,3 771,751,3 781,324,2 530,328,5 153,640,5 279,8,5 157,194,5 473,258,1 820,236,5 850,63,5 267,387,1 879,248,4 342,530,5 377,469,3 298,854,4 795,1296,2 666,659,4 675,891,4 436,155,2 902,527,4 404,1551,1 527,203,5 720,358,3 632,236,4 605,450,3 405,99,4 932,217,3 272,306,2 317,734,5 59,602,5 728,337,1 740,681,3 644,1158,4 863,560,4 188,178,5 620,720,4 872,288,2 677,99,5 112,332,4 532,579,3 832,88,5 232,303,5 441,171,5 686,293,3 659,799,2 567,426,4 707,14,3 706,110,4 285,2,2 94,237,5 867,578,1 769,332,5 421,294,3 879,455,3 610,341,3 876,726,4 681,715,2 621,256,3 184,479,4 708,143,3 467,530,4 732,276,1 888,356,4 842,257,4 920,928,1 942,160,4 444,993,1 221,627,5 730,483,3 261,215,3 591,46,5 275,96,3 652,692,1 660,0,5 864,545,1 839,49,4 942,719,1 477,761,4 653,24,1 787,585,2 806,171,5 83,755,3 868,514,5 789,160,4 380,195,5 771,747,3 310,226,4 649,354,2 715,259,1 869,339,3 500,695,4 835,321,2 108,357,2 452,256,3 877,18,4 12,810,5 762,211,4 866,55,5 613,292,3 882,275,5 683,392,4 536,275,4 323,272,5 871,277,3 869,64,3 942,22,4 360,46,4 869,332,3 888,84,3 462,274,5 867,635,3 6,143,5 886,561,5 535,693,5 835,326,3 665,80,4 872,312,5 942,927,5 895,835,3 870,936,3 576,233,3 475,429,4 233,92,3 795,143,5 109,28,3 870,194,5 816,328,4 698,6,2 268,1004,4 844,1237,2 928,479,3 919,291,3 507,526,5 867,238,3 358,49,5 686,322,2 382,165,4 499,167,4 307,171,4 880,55,1 933,82,4 832,290,3 356,282,5 693,140,5 814,528,5 795,446,3 652,124,2 658,609,3 916,534,4 173,283,4 750,195,4 772,736,3 144,76,3 197,203,3 853,301,3 877,461,4 915,238,3 837,209,4 804,707,3 622,602,4 504,206,3 853,202,4 772,169,5 715,635,2 794,168,5 641,583,4 773,90,1 888,717,4 881,422,5 307,1120,3 584,639,2 296,207,4 589,1330,4 14,937,3 278,374,1 298,746,4 397,426,4 658,475,3 715,199,4 933,525,2 863,422,5 881,10,4 872,320,1 706,640,1 352,897,2 832,507,5 392,688,3 702,322,2 520,272,3 863,662,4 794,150,3 896,415,5 828,293,2 779,519,4 279,244,3 676,293,5 772,88,4 567,603,4 861,422,4 813,440,2 765,522,3 384,1534,4 806,825,3 793,115,5 487,78,4 804,203,2 715,208,3 42,1054,2 449,1296,4 642,173,4 787,719,3 868,252,4 503,120,4 828,258,2 679,13,5 886,287,4 61,672,2 665,96,4 23,222,5 879,618,4 436,960,5 751,330,4 756,203,4 659,280,3 300,23,4 84,473,5 879,245,5 846,173,4 647,742,1 845,513,3 455,366,3 695,285,5 853,123,5 830,904,4 845,440,4 863,236,4 906,646,3 762,82,3 803,93,4 670,545,5 379,178,3 917,416,2 827,69,3 891,624,3 344,11,5 775,90,4 711,237,3 785,545,4 745,384,5 824,981,5 915,1219,3 95,513,4 605,80,3 496,415,2 896,98,5 327,274,4 791,839,2 71,270,1 877,481,4 425,483,5 706,482,5 591,892,1 57,490,4 869,353,4 864,147,3 740,208,3 877,8,4 392,383,3 642,160,3 536,88,4 888,543,3 883,322,2 215,64,4 882,386,5 864,270,1 526,274,3 652,190,5 871,904,4 781,311,4 94,109,2 880,455,1 721,293,2 930,126,5 704,418,3 711,417,3 70,97,4 804,20,2 899,1131,1 392,747,3 494,632,5 579,287,5 619,81,5 899,870,1 255,983,3 835,291,5 795,448,4 492,1012,1 891,424,5 300,366,4 523,494,4 801,133,3 858,1047,3 455,190,3 869,264,4 557,8,4 882,518,5 757,92,5 703,299,2 523,268,4 420,524,4 644,653,5 880,191,5 850,1104,4 697,1019,2 58,379,3 125,285,3 525,284,5 253,111,2 888,407,3 886,283,4 885,684,2 222,476,3 825,848,4 920,214,4 303,878,3 900,258,2 847,392,5 485,546,3 404,702,2 864,221,2 909,297,2 437,300,4 348,545,3 885,495,4 808,244,3 452,281,4 24,78,4 832,174,4 814,194,4 882,795,3 84,1038,4 748,428,4 457,1038,5 516,327,3 576,182,5 698,470,3 751,353,2 592,120,4 285,472,3 863,691,2 862,349,1 436,1141,4 647,863,3 803,160,4 775,673,3 452,81,3 346,104,2 783,303,4 893,9,4 867,138,1 938,596,4 662,590,3 920,201,4 646,76,4 289,135,4 539,116,4 787,179,4 662,41,5 719,318,3 707,150,4 864,70,1 805,1097,4 861,518,4 644,29,4 666,267,3 649,314,3 768,221,4 565,692,5 312,215,4 665,497,5 882,1008,4 587,28,3 654,12,3 850,472,4 393,11,4 803,968,4 915,4,3 647,584,3 605,126,4 753,294,4 845,489,4 853,756,3 496,621,2 649,498,3 895,657,4 646,176,5 693,489,4 662,128,3 714,175,5 863,628,3 667,595,3 626,134,4 176,209,4 642,655,4 833,322,2 502,236,4 641,568,2 217,287,2 238,508,5 285,100,5 770,380,3 863,734,5 787,43,4 715,212,5 853,150,4 837,27,4 889,403,4 591,298,1 449,724,3 879,567,5 436,116,1 942,449,1 150,490,4 734,49,5 55,635,4 869,771,4 822,707,4 626,510,4 254,218,5 779,418,4 839,707,4 550,8,5 221,97,4 229,498,4 892,814,3 404,442,4 797,1284,3 888,32,5 736,31,4 845,629,3 853,149,3 730,63,5 747,270,3 749,326,4 408,889,1 7,242,2 885,1266,3 279,422,5 773,584,1 540,171,5 726,555,2 652,520,4 642,1,3 745,21,4 664,545,2 733,27,4 863,8,5 884,661,3 882,660,4 578,209,3 394,514,4 565,233,3 409,302,3 345,719,2 701,750,4 451,152,4 415,630,3 787,1182,2 496,440,2 353,7,5 881,274,5 757,239,3 863,24,4 893,314,4 792,117,2 654,503,5 696,234,4 675,271,4 544,1187,3 231,171,4 112,741,3 633,545,4 505,711,3 885,201,3 877,193,4 931,645,4 706,386,4 755,738,4 867,144,1 654,675,2 900,661,4 654,1237,2 726,27,5 428,230,2 859,219,3 628,269,3 836,762,1 494,654,5 650,126,4 714,253,1 921,152,4 789,372,3 843,206,4 560,432,1 398,27,2 430,327,4 537,527,5 795,664,2 659,434,4 415,1336,1 474,268,4 842,434,2 154,299,2 815,327,4 674,85,4 697,434,3 839,185,4 532,52,1 485,1610,3 827,327,3 505,738,4 870,359,3 617,110,3 83,814,4 795,355,4 840,312,5 885,782,1 757,222,5 895,82,5 542,567,3 746,82,4 802,324,4 804,628,3 902,281,4 331,384,5 897,688,3 469,272,3 845,10,5 847,605,4 505,203,5 456,366,4 863,568,3 845,661,3 822,424,5 870,1430,4 415,519,5 502,346,5 387,293,4 20,14,4 832,520,4 763,1045,4 763,691,4 587,11,5 792,99,4 497,268,4 654,965,3 822,169,4 737,549,3 880,653,4 704,96,3 929,1314,3 649,624,3 567,1124,4 415,234,2 935,812,5 895,718,1 255,273,5 330,7,3 458,293,5 881,131,5 523,91,4 845,225,4 681,244,3 884,654,3 918,680,2 177,822,2 447,1061,5 748,226,4 864,117,1 832,233,3 619,171,4 206,17,2 748,762,1 307,448,3 792,239,4 879,379,3 326,174,2 827,546,2 522,862,4 302,1040,2 647,94,3 209,131,4 757,174,4 668,167,4 377,472,3 900,1040,5 400,583,3 781,1667,3 727,286,4 836,249,2 706,442,3 825,539,3 885,9,3 865,888,2 888,686,2 830,312,5 235,281,5 542,199,4 812,8,3 495,698,3 344,979,4 619,77,4 889,479,5 825,1408,2 672,306,3 882,723,4 391,603,5 453,686,3 829,228,2 758,747,4 325,198,5 176,143,5 331,349,4 883,267,4 94,945,3 456,224,4 298,407,4 150,1049,4 942,764,3 833,306,4 869,502,4 731,287,4 864,846,5 794,90,5 518,873,5 894,274,5 773,317,1 832,193,3 519,892,2 503,160,4 404,1563,1 806,587,5 373,219,2 386,587,3 879,6,3 734,763,3 379,195,4 925,339,4 71,196,5 814,0,5 709,201,3 534,613,5 644,505,5 377,63,4 532,291,4 268,10,3 761,933,1 310,130,3 140,741,4 797,471,3 845,561,5 86,1179,3 496,231,3 730,485,4 98,788,4 708,553,4 938,1276,5 898,120,5 915,176,3 499,9,3 920,227,3 715,229,3 750,171,5 641,367,4 888,30,3 302,686,1 397,606,3 855,288,1 885,725,1 494,417,4 935,117,3 834,1062,4 932,366,4 787,111,3 89,68,1 915,209,4 772,287,2 10,212,4 787,8,4 377,1091,3 373,1205,2 765,175,2 842,419,3 778,120,3 740,1015,3 804,450,5 536,732,3 503,663,3 941,356,4 839,579,3 904,128,4 922,290,4 335,24,3 89,752,4 787,878,4 906,282,4 931,967,4 122,186,4 933,1284,3 795,236,5 797,576,2 932,167,3 942,97,5 234,95,4 853,193,3 746,654,3 326,160,3 538,339,2 841,314,3 275,186,5 827,511,5 463,258,4 920,602,3 143,71,4 860,528,5 805,46,4 791,595,3 775,482,5 526,474,3 597,750,3 129,87,2 384,567,3 586,259,4 933,659,5 803,1138,3 708,848,4 17,611,4 757,104,2 233,654,3 842,472,2 665,652,4 784,55,4 902,178,5 313,1046,4 720,268,5 513,795,4 325,201,4 906,274,5 8,6,4 10,355,4 715,209,5 781,348,3 931,648,4 720,57,2 591,96,4 803,98,4 72,95,2 33,288,1 459,18,5 911,14,4 796,335,2 708,120,4 895,189,5 906,519,5 720,941,4 393,90,4 617,171,5 874,331,3 453,80,1 775,550,3 804,664,4 550,161,5 591,525,5 769,221,4 824,870,3 893,902,4 681,931,1 735,293,3 806,385,4 847,214,5 869,513,5 791,124,3 450,357,1 838,254,3 879,570,2 789,390,2 882,318,3 623,120,3 94,670,3 715,293,4 795,428,4 773,405,1 540,930,3 755,140,3 773,731,1 703,487,5 892,117,4 696,545,4 871,825,3 273,233,5 111,887,4 446,49,5 824,117,4 739,331,3 804,12,3 839,527,5 860,581,2 768,117,4 843,254,3 853,1225,4 485,105,1 882,1591,5 223,923,3 186,709,4 737,251,4 609,479,5 847,165,5 425,135,4 886,596,5 681,27,3 903,215,4 764,506,5 537,275,1 839,518,5 587,177,5 467,64,3 646,1062,3 869,581,5 605,283,4 522,431,5 523,301,5 621,762,4 923,6,4 312,175,4 932,150,4 127,274,5 765,225,3 451,1254,2 876,548,4 188,27,4 423,968,1 879,48,3 915,560,3 187,634,2 344,243,3 665,380,3 314,162,3 939,791,2 732,272,4 885,25,4 664,95,3 720,320,3 893,178,5 93,187,4 555,285,4 93,158,3 847,526,3 885,80,4 765,193,3 604,244,3 346,10,5 278,546,1 659,392,2 654,461,3 750,630,5 413,312,4 803,372,2 842,49,3 715,182,2 641,724,4 434,443,3 864,454,4 762,170,3 506,1015,5 80,716,2 847,432,3 789,560,3 804,508,5 392,981,3 0,125,2 602,209,4 698,682,3 362,1055,4 904,115,3 616,652,4 213,181,4 900,27,5 543,269,3 342,71,5 626,124,2 886,411,5 307,503,4 827,58,5 295,468,5 806,1038,4 885,78,5 895,1003,2 14,321,3 888,1141,4 881,738,4 737,312,5 642,567,4 849,49,5 388,418,3 404,1191,1 711,761,4 832,78,3 689,147,3 292,84,3 5,495,4 620,1015,4 691,475,3 404,212,2 554,88,4 797,419,3 444,1533,1 896,97,5 27,443,3 941,1027,4 372,169,5 697,182,3 446,470,4 937,110,5 485,250,5 858,281,3 853,1015,2 861,520,5 495,251,2 663,448,2 833,543,4 567,734,2 693,137,3 390,227,2 875,288,3 843,98,3 782,298,5 675,479,5 832,459,2 747,96,4 915,67,3 847,804,5 522,693,5 794,122,4 839,215,4 360,201,3 488,359,5 792,272,3 863,274,4 917,132,1 869,179,3 451,195,4 566,611,4 17,211,5 839,515,5 247,342,4 666,8,5 683,171,5 605,97,5 850,159,5 310,240,3 398,540,3 868,124,3 665,810,4 166,673,2 405,123,4 773,409,1 234,969,4 664,254,4 454,462,4 651,274,4 806,519,5 658,217,4 654,450,3 647,595,3 266,1239,5 405,482,4 881,210,4 642,66,4 641,733,3 653,273,4 333,235,4 689,780,2 787,714,3 904,747,2 0,82,3 879,576,3 827,1061,4 750,427,4 517,743,4 795,268,3 869,762,4 379,609,2 300,424,4 885,194,4 485,712,3 868,115,4 681,236,3 935,747,2 826,268,5 9,22,5 109,575,2 565,143,3 420,268,3 879,778,3 423,689,3 708,558,3 851,258,4 853,327,1 921,475,1 540,500,4 100,762,3 895,27,2 787,654,3 933,174,4 779,209,5 90,299,4 542,468,4 10,172,5 685,653,5 716,741,5 704,82,4 905,741,3 888,53,3 764,9,4 896,229,4 729,293,4 861,95,4 781,1392,2 842,6,5 275,171,5 703,213,2 95,195,4 333,120,3 159,58,4 891,635,4 681,1046,3 760,747,4 320,418,4 503,291,5 619,594,5 587,87,5 715,273,5 740,195,5 781,533,3 233,1448,4 892,150,4 880,684,2 843,82,5 895,1193,3 895,1041,2 910,92,4 245,596,2 942,558,4 924,562,2 933,189,4 578,69,3 639,32,3 893,60,4 789,1187,3 547,299,5 907,46,3 832,1385,4 591,297,5 748,3,4 845,372,3 90,293,3 654,533,2 877,731,4 757,81,4 888,176,4 822,624,4 57,353,3 848,233,5 532,738,5 229,142,5 888,472,4 631,172,5 267,407,5 881,78,5 789,686,1 886,239,5 933,301,4 748,47,3 805,5,2 427,287,4 885,465,1 929,236,3 649,490,3 331,275,3 874,134,4 661,1341,4 882,970,3 885,448,3 538,58,5 278,283,1 404,1418,2 731,242,5 342,21,4 263,429,5 226,318,4 765,1297,3 892,257,3 787,552,3 623,1011,4 596,274,4 697,88,4 845,429,3 681,731,3 879,476,3 880,526,3 232,659,5 478,198,5 565,63,5 797,584,3 888,90,4 182,738,4 449,321,4 924,244,3 900,120,4 809,322,4 585,181,3 939,115,2 862,358,3 25,759,1 698,877,3 327,650,5 471,264,4 505,62,4 533,108,4 640,269,3 455,1603,4 434,16,2 295,249,2 822,791,3 896,94,3 744,181,2 871,819,3 772,89,4 895,1213,2 304,65,3 794,819,3 867,199,3 89,214,2 525,149,2 839,99,5 639,590,4 528,300,4 788,1006,4 268,602,5 693,70,4 803,152,4 846,24,3 938,219,5 398,461,3 539,1015,4 300,257,4 539,470,4 706,366,4 900,476,3 783,299,4 870,171,5 344,468,5 898,201,4 920,239,1 779,704,5 832,54,3 313,584,2 131,663,5 6,210,5 628,237,5 634,245,5 898,152,5 552,630,5 726,66,4 504,10,4 879,104,3 851,24,3 559,120,3 701,270,1 322,63,5 776,222,4 789,115,4 637,187,3 702,14,5 884,168,5 825,1239,5 895,201,2 591,470,4 757,310,4 708,272,4 939,320,4 428,283,3 187,49,4 931,614,5 937,404,3 935,1367,5 30,610,4 642,6,4 853,0,3 863,471,4 620,1035,1 668,191,5 745,120,3 649,822,3 787,497,5 523,115,4 239,285,5 670,287,5 281,357,3 923,321,2 915,162,3 746,153,3 666,282,4 540,203,4 536,29,3 624,24,2 839,44,4 711,1054,4 471,199,4 496,540,4 532,448,4 129,770,2 536,210,4 42,72,4 824,677,4 770,133,4 896,210,5 795,564,3 843,54,4 862,299,5 878,254,4 879,63,5 888,168,5 495,425,3 793,108,4 882,314,3 193,222,4 455,1100,3 450,1294,2 239,301,5 301,270,4 776,237,4 675,1233,1 605,650,4 893,278,4 814,70,5 116,251,3 458,306,5 895,819,2 898,47,4 755,322,3 896,549,3 696,368,5 753,292,4 626,527,4 513,897,2 843,920,5 304,662,3 845,1123,4 64,470,4 872,878,2 456,124,4 220,750,4 502,123,5 416,417,4 832,134,4 756,171,4 646,196,5 762,223,5 706,701,3 832,226,2 428,135,4 100,830,3 459,305,4 91,62,3 10,392,4 747,167,3 888,427,4 843,44,4 893,507,3 594,1027,3 863,64,3 255,186,3 404,1543,1 832,744,4 842,163,3 61,930,1 587,167,5 910,506,4 424,10,3 862,682,1 559,1214,2 415,77,2 921,661,3 15,479,5 448,461,5 689,392,4 741,13,5 781,269,4 839,483,5 746,810,3 900,154,5 898,221,4 578,392,4 861,646,5 942,120,3 526,877,1 621,94,4 915,930,1 401,117,4 449,110,4 797,80,3 860,318,5 886,1028,5 470,431,1 394,749,5 887,99,4 921,273,3 12,241,2 620,1184,3 335,590,5 453,610,2 233,279,3 747,602,5 504,180,3 653,587,4 285,234,4 278,258,3 805,270,3 933,505,4 861,237,4 888,191,3 451,93,1 252,517,5 887,643,4 837,918,5 483,719,4 267,1109,3 180,1078,1 386,517,4 317,186,4 853,619,2 410,78,4 654,358,3 774,312,4 164,1118,3 531,281,5 895,1239,4 898,290,4 175,1096,4 278,1480,4 912,167,4 17,603,5 869,82,4 485,543,4 681,683,3 757,894,4 859,203,4 822,639,1 600,920,5 832,435,2 893,147,3 458,126,4 654,75,3 496,742,3 550,53,3 896,377,5 877,268,4 647,216,2 837,257,5 879,875,4 759,110,4 846,446,3 930,282,4 647,628,4 12,904,2 828,285,4 734,320,3 349,426,5 867,853,4 914,333,3 653,254,2 933,256,4 817,268,3 932,210,4 896,478,4 795,477,5 806,830,4 803,237,4 785,848,2 902,88,4 853,712,4 850,832,3 803,215,4 850,339,5 902,641,4 451,519,3 567,422,4 735,747,2 439,461,5 832,23,4 547,274,3 10,450,2 654,42,3 797,190,4 669,704,5 941,321,3 473,662,4 629,1196,3 756,26,4 73,330,4 664,32,2 395,280,3 881,404,4 523,134,3 814,659,4 544,154,3 540,992,4 757,215,4 179,39,4 405,603,3 93,473,5 654,843,4 524,281,4 863,54,4 762,731,3 846,288,5 884,273,5 12,175,3 915,422,3 902,110,3 215,128,4 838,812,4 536,301,4 714,94,4 424,1128,3 652,95,4 805,229,4 755,21,3 884,567,4 405,530,3 751,750,4 912,131,3 896,863,4 568,470,3 888,430,4 930,299,5 797,497,3 746,287,4 111,331,4 935,925,4 838,6,2 886,21,5 267,718,1 895,142,4 665,659,4 559,167,4 587,306,4 330,0,1 533,596,5 532,254,2 303,322,3 803,392,3 886,221,3 804,213,2 659,216,2 887,152,4 916,286,4 746,366,3 264,974,4 869,210,3 585,116,4 22,180,4 845,1217,4 795,390,4 522,41,3 881,55,4 706,631,4 801,357,3 871,271,4 325,93,4 652,669,1 876,287,3 547,253,1 787,481,4 718,654,4 618,120,5 930,895,3 773,567,2 719,320,4 641,968,2 755,1148,5 850,265,3 818,245,4 733,143,2 765,39,3 921,174,3 866,495,5 160,176,2 879,955,3 730,124,3 661,245,5 814,944,4 937,297,4 846,55,1 653,145,3 772,41,3 923,204,4 336,66,4 842,209,3 806,27,4 827,5,1 839,1213,1 405,505,4 279,69,4 891,612,5 795,264,5 631,200,4 906,82,5 145,293,1 889,0,4 885,177,5 770,476,5 619,405,4 58,525,4 404,1573,1 853,631,4 610,1242,3 838,1047,1 805,226,2 712,751,2 839,660,5 748,160,3 476,545,4 893,1403,3 197,88,5 863,195,4 567,487,5 625,677,1 619,93,5 873,304,4 750,213,4 631,522,3 825,577,5 639,1,4 6,568,4 939,150,3 663,565,4 746,1019,4 847,107,5 807,750,3 770,85,5 48,651,5 668,21,3 923,495,5 838,117,2 845,704,3 858,274,3 547,346,2 482,100,2 462,312,4 879,259,4 821,90,3 654,14,3 637,201,3 756,575,3 868,1060,1 616,495,1 781,1377,2 888,173,4 229,421,3 72,182,4 901,171,4 658,1063,5 488,687,2 898,254,4 853,651,3 882,99,4 801,287,3 789,927,3 754,257,5 502,210,5 398,819,4 403,937,4 302,654,5 697,9,4 932,466,3 278,832,4 853,55,5 863,238,4 663,430,2 890,110,3 804,1628,5 696,279,3 543,876,2 745,131,4 654,802,3 637,210,4 613,288,2 885,93,4 922,104,4 893,528,4 509,747,3 814,81,4 779,203,5 875,434,4 797,393,4 937,828,1 486,203,4 617,754,2 842,1064,3 785,197,5 36,232,4 602,249,5 942,401,2 882,1447,5 805,1047,3 882,955,4 568,300,4 752,271,4 880,410,3 526,278,4 523,650,4 805,44,4 939,172,4 560,506,4 545,4,5 275,684,4 726,587,4 534,483,5 772,108,4 452,762,4 829,624,3 452,450,2 654,1097,3 458,404,3 326,82,2 307,222,4 621,540,2 853,1334,2 141,90,5 393,122,5 826,244,3 800,301,4 861,824,5 835,530,4 722,167,5 806,469,5 183,191,4 918,557,5 127,379,4 610,905,2 834,190,4 698,285,3 749,324,1 837,704,5 488,346,5 41,167,3 863,4,4 220,398,3 499,312,3 93,85,5 273,755,3 746,209,4 436,601,3 585,779,4 43,21,4 860,285,4 497,429,4 746,237,3 885,402,4 795,53,4 789,664,3 806,401,5 826,293,4 832,505,2 338,52,4 592,110,5 879,1257,3 739,339,4 639,579,5 233,370,3 879,237,4 827,581,3 452,451,2 326,395,3 732,457,2 744,49,2 834,14,5 803,97,5 795,225,3 848,675,5 870,299,4 888,614,3 832,1142,4 685,541,1 773,182,4 249,258,1 592,761,4 575,123,4 696,243,5 826,287,3 769,249,5 882,1461,5 448,514,5 290,94,4 765,196,3 550,384,5 718,283,2 436,475,4 902,692,5 108,21,4 345,659,2 467,646,5 496,943,3 773,186,3 404,662,2 898,422,4 888,574,3 58,80,4 805,407,5 920,483,3 789,195,3 450,947,3 879,207,5 136,78,5 121,518,4 672,320,3 480,201,4 806,205,2 822,1045,3 789,71,2 278,130,1 869,167,4 871,267,1 484,888,5 805,75,3 862,905,4 886,171,5 757,52,4 763,49,3 302,76,4 20,126,5 907,526,3 591,970,4 428,191,3 900,1388,5 850,1279,4 516,739,4 605,322,4 853,521,2 566,78,2 274,229,3 769,116,5 559,497,4 867,217,3 902,46,5 926,404,5 934,404,4 661,99,5 897,346,3 789,1164,2 405,175,5 858,475,5 626,1043,2 516,293,1 512,209,5 405,568,3 748,37,3 256,284,5 787,560,3 715,283,3 531,530,5 931,862,4 867,495,2 174,868,3 118,314,5 912,417,3 193,317,5 666,181,5 707,992,4 932,175,3 693,498,4 886,1238,3 12,589,2 523,385,4 902,442,5 560,747,2 395,117,4 132,354,2 902,195,4 867,366,2 866,203,4 494,1132,3 861,519,4 906,283,5 861,9,5 869,287,4 681,1038,4 659,71,3 498,327,5 267,184,3 794,545,3 681,728,3 746,135,5 869,1020,2 915,763,3 707,298,1 832,860,3 862,907,1 473,184,5 585,16,5 713,257,4 850,47,4 796,308,3 343,289,2 320,220,5 123,0,3 693,473,4 884,684,3 212,211,4 513,133,3 639,1257,3 915,170,4 869,134,3 72,284,4 879,844,3 350,897,5 536,701,3 667,346,4 882,201,4 789,1,3 748,213,3 795,235,4 827,1461,3 921,175,3 329,478,5 577,345,3 838,263,3 797,414,3 795,1045,3 867,161,3 180,244,2 886,931,2 243,743,3 869,181,5 791,362,3 755,7,4 902,179,5 104,750,2 850,55,5 177,530,4 904,236,3 483,741,3 585,1045,3 866,11,5 91,68,5 884,583,3 888,68,3 618,390,3 0,230,1 588,333,1 183,1085,4 436,195,4 628,1037,3 906,259,2 900,863,5 879,1138,4 806,541,5 785,172,4 681,173,4 915,427,4 718,126,3 617,55,4 879,1445,4 508,327,1 602,227,3 941,182,3 532,712,2 311,601,4 392,76,3 869,478,5 462,128,2 888,846,4 886,201,5 812,270,4 899,404,3 723,750,2 199,21,4 757,651,5 806,135,5 654,175,2 877,178,4 711,392,3 346,747,2 870,209,5 889,519,4 922,167,5 872,268,2 740,78,4 898,143,3 921,154,2 871,891,3 706,14,4 328,293,2 647,404,4 648,677,3 143,293,4 406,1040,3 416,770,3 877,167,4 893,882,3 413,894,4 512,738,5 371,843,4 832,171,2 532,479,4 484,301,5 535,499,4 324,1117,3 915,264,4 646,299,4 681,240,4 880,225,3 789,158,3 66,272,4 867,215,2 618,405,2 621,449,1 888,553,4 911,245,2 716,311,5 541,86,3 891,481,5 473,54,4 726,608,3 713,1151,2 761,708,3 408,205,4 898,430,1 527,525,4 893,1257,3 897,242,1 342,662,5 832,429,4 209,113,4 795,478,4 882,1226,3 795,62,3 832,181,5 748,422,4 869,840,2 801,216,3 758,244,3 621,152,4 867,236,1 471,90,5 839,181,4 777,116,3 882,385,3 866,285,5 485,888,4 759,775,5 421,562,3 833,332,5 362,231,2 834,487,5 542,1415,2 850,1375,2 327,577,2 114,217,3 665,649,5 709,478,5 591,682,1 920,923,3 346,3,4 300,89,3 86,133,4 906,1325,4 916,590,3 48,288,4 789,958,3 452,236,4 756,558,4 453,587,3 845,175,4 275,88,5 654,121,2 568,454,3 672,309,5 869,424,4 766,21,4 800,751,4 746,791,5 322,1072,4 723,337,3 452,1036,1 713,99,1 681,671,2 794,200,4 231,267,4 9,699,4 215,133,4 803,1064,3 654,822,2 737,49,5 913,1405,4 861,649,4 797,209,4 787,240,5 662,894,4 866,131,3 398,327,4 574,356,5 478,470,4 851,234,4 473,27,4 895,52,1 532,683,4 861,185,3 857,332,4 681,158,3 895,759,2 814,257,4 97,658,5 915,54,3 404,639,1 840,305,4 891,120,4 914,344,4 654,774,2 850,911,4 933,211,4 408,602,5 926,421,4 714,1216,2 880,479,4 787,626,4 399,342,4 918,431,4 716,545,3 879,282,3 893,125,3 937,405,3 454,923,3 804,345,4 285,628,5 464,150,3 879,37,3 591,426,5 523,432,5 711,1035,5 795,133,3 90,526,4 663,481,5 502,171,5 592,844,3 804,677,4 473,677,2 233,661,3 457,136,5 683,584,2 828,189,4 547,471,2 882,184,5 847,434,3 843,863,3 115,325,2 296,33,3 412,282,5 929,152,2 258,172,4 403,285,1 173,581,4 378,264,4 513,264,4 765,615,3 896,509,3 773,68,4 637,454,3 863,398,4 906,143,5 886,224,4 863,95,5 486,69,3 918,322,4 901,301,3 846,368,1 676,128,5 725,254,2 467,247,4 639,95,5 654,824,2 666,314,4 607,78,5 404,667,1 404,1534,1 415,268,4 718,184,4 830,283,3 555,426,5 642,214,3 899,601,1 267,434,4 483,683,5 445,288,3 850,753,2 535,85,3 845,138,2 366,564,2 623,301,4 781,537,4 815,341,4 584,556,4 845,390,3 614,214,4 615,244,3 757,128,4 645,293,2 889,588,5 296,478,5 911,442,4 921,426,5 331,551,3 795,22,2 410,88,3 108,475,3 550,154,4 879,147,2 755,142,5 108,830,2 186,1064,4 891,191,5 566,422,2 865,268,3 782,285,3 388,160,2 898,545,2 636,590,3 882,633,3 404,732,1 91,925,3 929,125,5 752,749,2 822,160,3 662,97,5 462,245,4 706,1203,3 895,316,4 625,301,4 726,221,3 392,892,3 520,146,4 871,116,4 449,285,4 404,26,1 434,929,3 400,124,3 47,649,3 436,99,4 879,654,4 404,1265,1 932,134,4 447,1021,5 839,615,5 715,236,5 891,416,3 339,210,3 457,198,4 781,878,3 640,335,3 435,692,5 845,435,4 691,1131,4 859,1046,2 118,195,5 757,474,5 716,312,5 879,69,4 822,293,3 848,196,5 760,116,5 390,651,4 473,426,5 863,799,1 22,602,4 895,424,2 714,272,5 649,270,3 839,117,3 895,370,2 915,201,3 404,724,1 915,1207,2 534,1062,4 881,0,5 805,287,3 658,488,4 592,7,3 775,240,1 778,0,4 589,283,2 0,203,5 654,1444,3 591,422,5 177,880,2 600,81,1 784,747,3 910,192,4 895,425,2 565,21,3 435,468,3 654,317,4 876,552,4 896,435,4 465,3,3 869,377,3 891,67,4 98,97,5 639,80,5 922,454,4 333,129,4 4,232,4 628,424,3 311,188,5 864,500,1 853,761,2 533,684,3 863,196,4 723,1590,1 879,824,4 406,567,2 920,303,2 394,747,3 693,434,4 666,693,4 362,152,3 838,116,5 411,430,4 706,12,4 842,615,3 275,268,4 486,21,5 880,57,3 636,234,1 563,280,3 69,738,2 916,245,4 848,420,5 893,753,4 243,64,4 683,380,2 888,483,4 609,606,5 270,215,5 859,301,4 89,275,4 328,656,3 746,94,3 926,71,5 920,327,5 674,243,3 882,23,4 839,13,5 827,85,3 895,553,2 912,741,3 795,27,3 619,741,5 475,200,4 891,221,4 596,741,4 787,983,3 665,515,5 706,1020,3 714,267,4 91,146,2 787,150,1 824,739,2 310,80,3 879,780,3 615,306,2 817,302,5 863,754,4 828,309,3 616,445,2 262,377,5 895,153,3 681,127,4 801,218,5 404,1544,2 721,309,4 865,899,4 325,647,5 885,200,3 902,128,3 604,134,5 576,822,3 537,181,4 896,87,4 882,256,5 473,142,4 388,481,5 379,233,2 861,58,5 638,693,5 320,132,5 614,1064,4 233,983,2 837,203,4 853,605,4 124,1036,2 428,41,5 326,465,3 842,157,2 620,417,3 629,819,4 933,236,4 942,67,4 641,368,2 266,209,4 459,287,2 772,674,5 499,173,2 329,63,5 912,612,5 763,150,4 862,875,2 670,510,3 381,150,4 726,383,2 787,175,5 177,294,3 934,814,4 746,465,3 344,392,3 344,677,2 842,689,5 842,143,3 910,185,5 540,811,3 939,745,3 870,661,3 862,258,1 932,709,2 402,759,1 464,867,2 754,322,4 882,315,5 773,179,5 537,207,3 38,287,5 926,622,3 605,229,2 715,467,3 867,639,5 915,416,2 20,263,3 939,315,4 795,203,5 879,779,3 534,179,4 547,749,4 863,659,4 881,840,1 688,116,4 757,736,3 467,285,4 373,1193,4 652,198,4 942,1027,2 471,21,5 451,514,4 715,1112,4 221,430,4 891,400,3 723,241,1 801,183,4 890,106,5 814,1298,3 609,515,3 278,387,3 91,630,4 845,954,3 292,175,4 789,1214,1 853,172,4 623,297,4 937,470,3 416,822,2 938,126,5 886,754,5 836,327,4 924,772,1 677,116,4 699,47,4 879,1214,1 845,60,3 814,226,2 204,293,3 200,216,3 319,469,5 902,581,3 822,78,4 652,62,2 899,30,2 706,196,4 862,330,4 592,282,4 869,1229,2 806,297,4 795,338,2 235,594,3 880,7,4 842,439,1 12,361,4 918,236,4 884,522,3 918,714,5 496,773,4 715,434,4 746,603,5 882,731,3 292,236,3 874,602,4 884,224,3 847,660,3 693,426,4 693,481,5 761,331,1 741,293,3 653,595,3 866,202,4 888,685,3 416,150,5 733,464,4 658,505,3 814,8,4 724,880,5 416,536,4 866,30,5 326,120,2 662,327,4 386,366,3 649,356,4 830,236,4 294,231,3 846,124,3 109,1227,3 706,862,4 714,479,5 341,181,5 590,1119,4 255,988,5 503,503,4 630,287,3 781,535,2 610,271,5 859,507,4 795,317,4 759,299,1 626,575,3 748,173,5 656,116,4 12,797,2 895,236,5 811,872,4 921,1109,4 920,65,5 555,301,4 915,75,3 693,204,5 215,46,4 885,47,4 820,110,4 605,254,5 787,648,3 867,1284,2 899,743,2 775,1218,3 390,695,4 757,130,3 867,450,2 659,357,2 587,65,3 896,1050,3 885,267,5 430,331,3 444,55,5 737,257,4 829,553,5 876,87,4 803,596,3 906,1039,5 388,429,5 641,418,4 756,826,3 751,347,4 832,1427,3 926,256,5 678,526,4 605,7,2 880,215,4 633,293,4 362,115,4 151,318,2 891,611,5 704,754,5 720,728,3 346,146,4 715,497,5 706,105,3 891,738,4 550,263,3 449,723,5 884,68,4 932,156,4 696,286,4 716,814,3 751,908,3 529,219,5 869,639,3 900,81,5 827,206,4 426,330,4 869,683,3 351,173,5 806,1077,4 539,404,3 177,132,4 532,226,4 452,271,5 846,1166,5 615,361,3 882,317,4 801,768,5 618,160,4 83,407,5 541,0,4 880,187,4 684,871,2 405,630,5 173,933,4 839,472,5 180,119,1 880,413,5 592,684,3 307,524,5 775,666,2 842,178,4 483,232,5 451,515,3 924,326,3 491,152,4 193,789,1 915,708,3 850,983,3 897,333,3 268,709,1 842,174,4 226,286,4 278,450,1 678,108,3 794,11,4 942,75,4 652,754,2 850,230,4 499,288,4 873,345,3 708,230,3 933,155,3 845,664,4 158,253,3 912,517,4 915,789,2 845,698,3 886,654,1 313,7,4 806,502,3 895,138,2 610,304,4 915,432,3 113,134,4 654,1111,2 845,473,5 275,199,5 75,805,4 341,136,2 746,319,5 760,456,1 143,526,5 341,245,4 935,826,2 485,324,2 599,577,2 693,494,4 535,27,5 615,936,4 866,482,5 470,171,4 906,868,5 535,196,3 773,384,1 933,500,4 941,1203,4 797,253,5 747,208,4 927,1024,5 801,97,4 568,276,2 910,481,4 542,152,3 757,501,4 614,268,4 62,256,3 725,408,3 591,248,4 536,632,3 715,734,5 935,404,2 456,659,5 293,333,4 797,1031,3 917,706,5 679,49,5 889,446,3 378,402,4 880,1088,1 895,203,4 607,689,4 732,1010,4 697,126,4 209,650,4 177,231,5 896,442,5 907,524,4 757,1134,2 915,180,4 571,285,4 485,1015,2 259,306,3 795,513,3 789,69,3 880,1027,3 698,984,3 449,456,2 757,10,3 621,228,2 929,106,3 760,1151,2 803,741,4 654,673,3 268,370,5 902,522,5 478,334,3 647,683,4 649,322,3 550,81,5 697,612,5 384,250,2 891,421,1 566,198,4 923,561,3 895,590,3 687,876,5 72,128,4 929,870,3 891,273,4 915,71,3 762,59,5 832,133,5 775,94,4 918,24,4 669,518,5 912,183,3 377,931,2 710,217,4 657,23,3 435,143,5 915,95,3 863,714,4 942,121,1 740,225,2 491,650,3 929,299,4 870,351,3 245,110,3 764,846,4 453,301,4 851,0,4 326,23,2 185,330,3 839,201,5 917,1136,5 908,260,5 604,0,4 868,275,4 398,0,4 843,431,5 706,1060,3 931,1020,4 649,578,3 362,179,3 931,515,5 765,190,4 27,195,4 654,606,4 122,133,4 81,117,3 891,767,4 183,836,3 98,293,4 825,270,4 373,817,3 833,750,3 441,68,3 582,482,5 756,322,3 893,878,4 631,72,3 832,1209,1 864,1046,1 457,78,5 0,2,4 302,283,4 853,513,4 245,163,3 879,40,1 828,128,4 935,272,3 105,8,4 879,173,4 863,731,4 877,513,4 706,717,5 453,94,2 591,876,2 624,94,3 931,140,4 895,202,5 803,572,3 694,263,1 889,264,2 658,501,4 654,29,5 464,844,4 877,528,5 86,151,4 465,908,5 631,53,3 929,44,4 936,257,4 795,768,4 895,22,2 428,684,3 781,887,3 931,658,5 904,272,3 346,185,5 395,270,4 804,635,4 884,416,3 532,110,4 781,1387,3 268,805,3 380,1059,5 746,285,4 307,823,3 787,185,3 306,108,5 495,415,1 584,274,4 847,169,5 832,191,5 903,780,4 662,520,3 594,978,3 428,172,4 327,236,4 882,288,5 654,294,3 770,285,2 888,12,4 929,174,2 720,300,4 918,831,3 906,1220,5 891,80,3 4,199,2 727,242,2 472,474,5 109,650,4 895,581,2 876,163,5 654,112,3 765,971,3 866,1038,5 553,727,3 877,317,5 293,0,5 652,470,2 933,171,5 649,72,3 552,1193,5 684,332,1 759,236,3 803,519,4 890,741,4 880,186,4 775,669,3 733,723,3 158,1151,4 696,1024,2 861,495,5 720,63,4 921,746,3 885,233,3 826,315,3 814,1156,2 457,337,3 914,314,4 89,59,4 617,486,4 93,337,4 921,293,4 825,264,5 42,579,3 696,125,5 729,534,2 58,945,1 641,244,4 468,606,5 920,379,4 370,65,4 656,743,4 663,227,4 942,484,5 600,259,4 539,1010,4 935,116,4 726,134,2 804,997,4 534,692,3 619,595,2 746,497,5 477,707,3 895,1230,1 645,1175,4 362,255,3 931,187,3 895,84,3 210,116,4 931,195,4 888,561,3 592,48,3 327,326,3 936,407,5 90,473,3 842,560,4 888,86,4 745,596,4 82,150,3 757,745,4 921,21,5 644,954,4 918,221,3 349,602,5 942,755,2 915,635,3 536,949,3 874,55,5 900,90,1 590,65,2 834,233,5 860,739,4 819,332,5 788,590,3 48,788,4 669,221,4 703,97,5 662,299,4 885,126,4 692,185,2 302,630,4 516,747,4 763,24,2 906,688,4 939,167,3 912,68,2 166,85,4 772,729,3 880,411,1 887,273,4 453,326,3 886,8,2 927,265,5 624,653,3 746,478,5 822,88,5 392,1408,4 762,134,5 166,136,5 842,171,3 594,870,2 861,173,5 585,52,5 40,968,4 797,173,4 346,545,4 314,672,4 859,515,3 12,836,4 710,402,4 692,631,5 926,416,4 893,1022,3 930,251,3 706,56,4 505,1062,5 666,486,5 886,124,5 893,294,3 388,273,4 750,331,3 313,143,3 404,371,1 715,649,3 552,49,4 836,283,1 772,92,3 863,117,4 267,455,2 616,610,4 794,715,3 935,1240,4 300,225,5 912,267,2 898,110,4 808,1024,1 445,337,2 534,196,5 631,227,3 653,82,5 48,98,4 441,1073,3 740,366,2 456,293,2 607,58,5 866,50,3 838,99,3 649,205,4 879,1180,3 521,167,5 40,172,4 877,87,4 91,211,4 869,94,4 492,973,3 532,116,5 804,3,2 851,121,1 880,139,2 805,855,5 177,68,5 591,974,4 917,24,4 693,99,4 923,700,4 748,747,3 263,233,4 830,143,5 898,662,4 884,142,4 520,126,4 824,1253,1 623,234,4 91,259,1 713,1013,3 642,54,4 772,6,2 300,287,4 178,270,1 353,291,4 850,91,5 607,287,5 547,290,5 895,467,2 781,876,3 617,722,3 746,581,5 803,981,4 632,96,3 569,357,2 940,256,4 223,543,1 292,1263,3 915,696,4 572,653,4 744,13,3 203,268,4 641,1132,3 804,451,3 415,1151,4 895,233,4 757,180,4 310,435,3 930,908,5 677,1128,1 324,70,3 497,250,3 326,267,4 43,317,5 503,502,4 600,364,3 805,1009,3 654,552,2 415,277,3 258,747,4 851,826,2 81,482,5 789,545,1 886,475,1 751,332,3 392,309,4 825,38,4 669,244,4 903,287,4 652,406,1 936,285,4 918,97,5 233,164,5 737,950,2 223,401,5 266,187,5 895,745,3 935,1189,3 544,141,3 449,258,3 880,208,3 806,141,3 765,517,3 863,173,5 882,462,3 517,239,1 675,325,2 867,842,1 730,27,4 806,101,4 790,305,5 266,134,5 865,343,2 865,346,4 689,992,3 867,425,4 576,317,5 769,293,3 824,290,5 157,513,3 58,115,4 830,478,4 585,240,4 64,65,3 839,214,4 806,1408,4 748,325,4 499,248,3 688,236,3 746,63,5 863,654,4 474,326,4 678,99,3 845,173,5 862,343,4 835,209,4 828,1119,2 520,209,3 682,346,4 779,497,5 483,3,4 920,195,5 778,303,3 933,134,4 101,791,3 845,233,5 846,1399,5 897,538,3 694,327,3 714,548,3 918,244,2 663,236,2 822,100,3 935,324,5 839,203,4 774,244,3 714,30,4 896,322,4 199,933,2 933,64,4 501,327,4 895,10,2 706,184,3 503,504,4 652,68,4 885,543,4 879,199,4 311,1049,5 932,1227,1 647,163,4 789,3,3 127,116,5 669,194,4 830,300,2 715,227,4 803,54,4 939,285,3 708,4,4 931,29,4 642,491,4 647,385,4 6,11,5 915,117,2 300,79,3 546,293,1 449,43,3 20,329,4 890,1039,3 473,484,4 734,330,3 649,131,4 452,201,4 654,303,2 939,293,4 720,161,2 394,20,3 871,299,5 448,105,3 726,1048,1 167,180,4 180,1275,1 108,1134,4 835,689,3 621,23,4 922,263,3 550,764,1 862,1394,4 482,67,1 918,283,3 632,870,3 884,755,2 373,1058,2 880,192,5 862,884,1 290,942,4 863,432,3 333,881,3 662,314,4 718,214,4 475,939,3 592,1013,1 180,106,1 602,49,5 679,136,4 801,878,5 681,894,4 607,167,1 599,565,3 144,826,2 900,377,5 880,63,5 939,68,2 353,44,5 884,624,3 884,1060,2 716,146,4 411,168,4 893,247,4 587,66,1 845,601,4 406,161,4 550,217,5 822,205,4 803,68,4 701,288,2 667,992,4 748,392,5 814,522,4 676,287,5 837,712,4 325,492,5 527,421,2 930,285,5 760,124,4 520,124,3 659,746,4 90,332,5 714,194,4 536,199,3 922,99,5 638,279,3 837,493,4 888,256,4 891,434,4 345,565,5 861,96,4 797,264,5 522,407,5 795,987,3 786,1670,1 513,473,5 386,730,1 942,41,5 503,972,4 609,672,4 20,320,3 711,50,3 600,474,4 893,338,4 503,734,5 806,67,4 405,670,5 388,558,3 846,97,4 751,337,3 296,750,4 282,172,5 769,935,5 797,877,4 904,872,3 63,388,4 525,126,4 884,244,2 828,457,3 697,480,3 926,737,3 542,194,4 503,199,4 708,181,4 921,98,4 553,49,4 879,281,2 233,175,3 869,0,5 111,689,4 723,268,4 425,660,4 892,758,3 517,712,5 108,1209,3 536,688,1 824,8,3 794,745,3 693,604,4 201,95,4 895,708,3 291,471,3 652,822,2 918,894,4 869,257,4 658,190,5 906,285,5 748,70,4 342,424,5 867,945,1 65,8,4 839,1450,5 880,207,3 935,974,3 847,442,5 677,297,3 212,194,5 333,809,3 319,55,5 847,581,4 801,668,1 404,671,1 694,747,1 392,832,4 517,236,4 63,9,5 869,177,4 422,315,4 869,473,4 617,237,1 614,434,5 757,342,2 75,292,4 935,123,4 869,49,3 95,233,4 397,78,4 86,647,5 789,1229,2 61,508,4 81,317,4 915,761,3 340,880,5 555,132,5 243,290,2 709,209,4 814,638,2 17,14,4 654,1175,4 548,281,3 918,739,3 816,454,3 787,571,3 710,153,4 638,192,3 62,136,4 890,273,5 426,880,5 912,316,4 773,120,1 404,34,2 416,575,3 302,6,4 91,1193,4 863,213,2 867,60,5 845,1438,2 151,400,3 880,8,3 457,20,2 485,291,4 362,385,1 10,398,3 24,611,4 444,248,2 91,760,2 906,293,4 710,119,2 466,1058,4 127,506,4 312,224,4 310,738,4 721,285,4 383,257,4 900,794,3 866,651,5 869,272,3 414,683,3 827,750,3 898,257,5 773,450,1 536,959,3 762,279,2 523,513,5 937,254,1 477,177,4 611,6,3 591,285,5 880,848,2 882,647,4 279,240,2 904,293,3 904,99,4 449,721,5 550,183,1 931,612,4 938,117,5 726,42,3 276,0,4 591,268,4 425,658,4 862,328,2 344,683,4 901,245,1 882,312,3 565,134,5 754,327,4 938,297,5 446,21,4 714,760,3 910,547,3 249,275,4 902,51,3 253,162,2 466,180,3 496,32,4 822,233,4 325,195,4 258,285,4 893,235,4 932,788,4 173,86,5 488,358,5 755,94,3 751,1264,3 690,649,5 941,314,4 879,136,4 896,469,4 821,168,4 626,236,4 609,203,1 636,284,3 881,173,5 891,89,2 591,325,4 274,187,2 772,779,4 814,82,4 544,232,4 896,698,4 446,264,4 850,822,3 159,627,3 869,1018,3 915,317,4 706,198,2 275,236,5 832,1018,5 689,0,4 821,750,3 702,590,4 847,87,4 480,190,5 623,1009,4 341,427,5 605,200,4 597,285,5 140,987,3 550,152,3 876,556,4 379,198,3 404,193,1 863,608,3 478,985,1 652,1132,2 880,1132,2 310,510,4 932,238,3 923,195,4 847,513,5 922,684,4 144,234,4 624,854,4 609,377,5 549,1088,3 571,300,4 876,227,4 617,467,3 932,201,2 936,303,4 711,293,4 903,1040,2 895,159,3 793,284,5 929,173,3 279,450,5 880,863,3 869,488,4 561,356,1 895,249,3 93,70,4 173,166,3 683,407,5 889,22,5 898,716,1 197,136,4 935,257,3 886,163,4 654,573,2 822,193,5 869,1007,3 388,552,2 861,514,4 586,915,3 921,167,3 382,285,5 779,215,4 890,99,5 652,545,2 860,25,3 881,94,4 452,143,4 795,761,3 942,317,3 592,973,2 335,948,4 845,51,4 723,301,3 878,293,3 880,184,5 832,454,3 882,1064,5 416,133,4 803,107,3 826,747,4 54,173,4 859,864,4 748,178,4 895,1219,1 755,250,4 881,97,5 638,282,4 761,420,4 614,474,4 138,474,5 659,158,1 503,121,1 828,407,4 853,24,3 845,656,5 513,273,4 565,55,4 223,236,3 835,41,3 785,178,4 750,496,4 877,235,2 523,662,2 773,234,1 775,108,4 765,526,5 449,46,3 275,1198,4 918,275,5 542,323,3 93,366,4 763,863,4 62,282,4 895,650,4 803,528,4 642,602,5 896,1027,4 113,223,3 820,283,3 906,87,5 682,244,2 93,1218,4 937,675,3 300,819,3 755,418,3 499,169,5 915,218,3 115,1243,2 937,126,5 756,253,2 757,291,4 873,356,5 665,973,4 806,88,4 93,232,3 808,339,4 931,649,5 659,427,4 882,210,5 915,172,4 624,478,4 802,260,1 847,430,5 492,880,1 863,187,3 559,270,4 669,473,3 879,55,5 827,895,4 895,704,5 36,471,2 932,237,2 721,129,4 885,127,4 755,29,4 803,49,4 311,168,5 143,961,4 75,918,3 585,678,3 624,199,3 183,508,4 842,180,3 787,288,4 681,75,3 610,885,4 560,700,3 283,876,2 893,299,4 569,267,3 607,610,3 652,685,2 755,500,3 103,309,2 398,53,4 829,750,2 231,743,3 61,115,3 850,301,5 650,514,5 455,149,4 827,302,4 935,268,4 193,603,3 631,68,4 803,153,3 847,461,5 828,256,4 867,505,4 434,777,4 931,493,4 561,510,2 542,461,4 710,70,3 560,738,2 773,231,2 311,835,5 772,1528,5 404,723,1 932,86,4 490,407,5 895,53,2 792,1013,3 628,631,3 789,240,5 649,596,3 797,735,5 680,749,5 441,312,3 803,378,3 503,213,4 930,315,5 607,293,3 679,6,5 302,289,4 881,226,4 915,1097,4 559,301,5 850,21,5 879,422,5 39,345,2 47,301,4 278,86,1 880,1214,1 261,654,4 832,163,2 802,299,3 877,417,3 591,1058,3 812,306,4 845,1205,3 372,470,3 935,247,4 845,202,5 467,954,4 923,30,3 449,615,4 903,552,3 744,922,3 653,301,5 912,8,5 301,332,3 428,120,3 936,846,4 722,88,3 393,76,3 789,268,3 819,323,3 499,422,3 834,175,4 681,157,2 233,502,2 291,788,4 893,292,4 667,301,5 129,680,3 600,99,4 814,628,4 797,62,5 86,193,5 473,629,3 520,420,4 926,1013,3 562,320,5 294,10,4 748,497,4 9,524,5 461,180,4 863,108,5 915,479,4 426,321,3 720,630,5 452,150,3 889,126,5 880,1176,1 869,187,5 763,594,4 795,1162,3 467,431,5 63,171,4 723,1431,1 829,192,5 915,527,3 362,704,2 925,257,4 760,1011,1 496,90,2 915,630,4 879,120,2 781,750,2 907,418,4 926,394,3 882,182,5 795,142,5 168,524,3 496,568,2 748,272,4 428,1073,3 881,1411,3 366,875,3 841,327,2 335,406,1 754,687,3 786,327,3 832,805,4 824,126,3 285,905,5 663,704,4 776,521,5 609,422,4 935,475,4 893,25,4 900,62,5 540,677,5 662,264,4 602,272,1 822,426,4 845,287,4 654,156,3 312,228,3 863,1111,2 312,581,2 892,160,5 939,46,3 863,94,5 863,938,4 172,880,3 679,142,4 937,24,4 267,28,1 942,63,5 818,344,4 275,331,4 767,8,5 535,1029,3 795,848,4 936,254,3 804,1118,3 86,1117,3 183,628,3 388,662,4 455,55,5 715,153,5 863,545,4 528,990,1 936,123,4 483,731,5 462,739,4 868,814,1 822,140,4 526,174,3 876,306,3 360,154,3 343,462,4 714,55,5 496,238,4 748,195,4 832,741,3 679,293,4 804,723,2 839,655,4 688,180,5 311,123,3 901,306,3 312,575,3 422,99,5 681,160,3 795,171,4 869,12,4 327,49,4 890,408,4 879,356,5 929,704,2 600,933,1 92,933,3 724,287,3 827,354,2 697,27,2 772,49,5 530,1315,4 36,225,5 380,519,5 915,714,4 592,467,3 813,443,2 565,167,4 346,236,4 792,627,3 626,78,3 617,417,3 536,0,2 362,1477,1 335,1117,4 748,291,4 524,741,3 744,167,3 906,1283,5 526,22,5 665,695,3 564,206,4 853,259,3 863,992,4 646,81,4 531,760,4 597,747,4 882,268,3 833,14,4 457,275,5 871,404,4 935,258,3 918,306,4 901,227,3 803,553,2 785,207,5 895,1350,2 846,741,3 853,152,4 885,1066,5 124,65,5 901,422,4 707,1053,3 550,218,5 213,895,4 600,1038,4 442,306,3 652,53,3 871,973,4 918,366,4 747,420,4 560,23,3 890,1277,5 311,458,4 449,90,4 534,465,3 654,694,3 880,239,1 791,236,3 862,306,5 296,99,5 469,873,3 652,37,3 850,750,4 614,261,4 891,950,4 805,418,5 29,134,5 406,233,3 685,426,5 563,301,3 926,400,2 862,309,5 823,244,2 653,1013,3 840,688,5 656,339,4 405,1072,3 488,983,5 390,290,3 825,1230,3 777,153,5 867,555,3 803,684,4 882,152,5 791,117,2 404,1487,1 772,808,1 762,526,3 707,111,1 534,918,4 804,7,3 327,809,3 836,221,3 252,199,4 215,1066,5 723,328,4 5,532,4 866,1153,5 693,201,4 844,1233,4 926,226,2 885,798,1 720,1024,3 787,321,4 266,67,4 794,108,3 617,938,2 882,522,5 803,428,4 796,242,2 941,257,4 932,232,2 920,221,5 621,577,4 10,364,3 445,321,3 838,126,5 911,185,3 408,133,5 565,789,3 475,324,1 803,78,4 903,401,4 676,454,5 913,738,2 926,624,3 789,576,2 891,194,5 232,461,5 534,131,5 839,189,5 789,1470,2 756,124,2 726,394,3 918,260,3 600,64,4 885,720,5 589,590,3 879,209,4 776,272,4 846,46,2 750,595,4 920,1015,4 716,288,4 787,42,3 534,267,3 942,283,2 520,256,3 867,645,5 932,194,4 867,141,1 264,293,4 729,339,3 839,22,5 756,63,5 870,309,3 621,174,4 292,222,4 559,545,2 803,306,4 836,15,2 587,583,3 891,209,4 797,481,3 114,301,4 895,741,1 748,494,4 748,1135,4 900,72,5 863,442,4 400,215,4 847,179,2 129,143,5 609,70,4 888,479,5 822,3,5 766,1120,5 659,117,2 937,1046,3 144,268,5 699,201,3 515,473,5 485,404,4 268,746,4 837,0,5 814,90,3 707,275,2 927,287,3 469,221,3 803,433,4 428,1138,2 232,134,4 701,689,1 916,99,4 773,443,1 748,427,3 505,1019,4 901,86,4 660,514,5 715,78,4 595,327,5 649,518,4 863,281,3 829,549,5 796,719,2 513,117,2 623,311,4 275,709,4 652,355,1 486,404,4 405,431,5 932,264,4 367,843,3 839,606,4 708,249,4 348,283,5 103,120,2 628,267,5 404,381,1 557,99,5 898,741,4 850,203,4 361,346,5 639,372,3 405,1169,4 342,257,5 875,173,4 661,267,5 612,529,5 898,565,3 879,96,4 325,506,2 668,514,5 935,994,3 891,515,5 891,154,2 757,78,4 707,280,4 633,546,4 620,275,4 803,327,4 757,1087,3 552,506,3 59,422,4 588,303,5 496,54,3 258,167,5 882,87,4 99,884,2 757,712,3 518,894,4 57,822,1 789,267,4 732,273,3 835,285,3 391,315,5 576,1041,4 803,151,4 711,940,5 174,135,4 311,528,5 623,410,4 715,948,3 915,29,4 245,764,2 534,178,4 888,606,4 568,273,4 465,299,3 716,120,2 405,56,4 709,155,4 628,323,2 710,97,5 932,356,4 642,779,4 915,511,5 632,44,3 757,261,5 532,176,4 931,203,4 845,791,4 652,770,2 594,126,5 805,110,3 566,339,3 405,1108,4 816,875,4 867,433,3 933,224,2 505,446,4 850,327,3 896,27,4 882,60,5 885,175,4 772,285,3 885,211,2 707,929,3 834,508,4 879,365,2 491,461,3 915,191,4 874,962,4 196,49,5 477,70,3 845,784,4 926,94,5 681,1131,3 9,152,4 541,87,3 879,409,4 732,19,5 921,82,4 715,739,4 885,732,4 880,21,5 775,22,4 523,220,4 377,264,4 12,59,4 935,1022,2 639,91,4 269,241,5 886,120,5 900,435,4 794,566,2 51,276,5 863,548,3 678,41,4 392,940,4 879,574,3 4,379,3 93,583,4 467,217,4 803,238,4 93,736,4 817,244,4 730,189,5 561,482,4 765,483,4 605,214,4 940,14,4 804,225,3 20,876,2 795,976,2 654,1017,3 566,426,3 293,871,4 803,71,4 822,1069,4 881,509,5 326,32,3 930,296,4 392,249,4 877,795,2 778,595,4 847,484,5 233,1119,3 824,627,4 659,234,3 734,122,3 830,209,5 379,85,4 765,22,4 71,236,3 209,27,4 905,822,3 455,587,3 895,180,5 196,575,4 933,434,4 850,158,3 302,32,4 106,1242,3 794,424,3 828,124,3 638,422,2 795,226,4 411,0,4 882,711,3 931,431,4 754,339,1 631,99,3 672,325,4 726,93,4 405,371,4 797,417,4 591,526,5 896,238,2 755,918,5 428,497,5 598,283,4 380,117,1 670,122,5 462,891,2 91,368,3 709,91,3 571,923,1 828,275,4 795,14,4 362,126,4 659,674,3 57,190,5 600,381,4 850,126,5 865,881,2 882,38,4 325,428,5 726,225,3 888,1152,4 623,339,3 188,488,5 621,738,2 824,929,5 314,474,4 4,104,3 837,944,4 613,234,5 601,507,3 803,1,4 109,30,3 0,206,5 921,446,1 900,865,3 578,326,3 412,275,4 885,422,3 711,39,5 37,450,5 757,198,4 56,167,3 213,208,5 931,494,5 924,875,3 63,918,4 404,791,5 626,244,4 768,596,2 169,299,5 529,606,5 834,27,4 931,1125,5 803,1024,4 900,215,4 918,331,4 41,1,5 143,197,4 928,434,3 903,180,3 428,292,4 787,88,5 727,470,4 730,1085,1 715,632,4 733,287,4 855,293,4 665,442,4 935,19,5 850,1142,5 837,81,4 929,237,4 918,282,4 681,467,5 654,698,2 706,475,3 388,683,4 903,236,5 853,506,4 716,284,5 905,1008,2 587,21,5 781,1610,3 667,68,1 871,590,3 757,229,4 531,297,4 933,817,1 616,667,4 871,150,2 800,331,5 86,684,3 942,181,5 496,412,3 613,6,2 86,371,3 503,675,4 386,654,3 494,209,5 804,432,4 605,37,4 891,78,5 870,1385,3 933,426,4 709,55,5 867,63,5 708,68,5 390,208,5 805,130,4 678,82,5 726,981,4 829,293,3 863,541,4 805,454,3 921,659,3 266,160,4 915,215,4 879,467,3 252,750,3 494,581,4 164,431,5 649,669,3 814,624,4 282,731,4 664,91,4 836,277,3 905,128,4 746,234,5 150,142,5 795,199,5 915,233,4 388,685,3 444,342,1 849,68,5 392,576,4 932,159,3 742,258,3 88,24,5 895,126,5 905,269,4 886,587,4 639,225,5 302,576,3 765,435,4 898,469,4 565,507,4 98,543,4 902,1047,4 765,658,3 134,37,3 698,332,3 531,1,5 205,1432,1 920,525,4 108,55,5 398,138,3 805,174,5 565,171,3 499,461,4 763,277,4 803,670,3 17,49,4 536,310,3 690,747,4 649,210,4 898,1100,5 795,125,3 888,230,3 278,1249,1 605,81,5 915,1041,3 560,116,3 734,12,4 861,281,5 787,422,5 891,1,4 831,242,2 353,511,3 757,221,4 341,475,4 591,139,3 900,242,2 193,569,3 791,761,4 926,402,4 917,1264,1 794,738,1 804,1148,4 904,318,2 620,298,1 748,677,2 647,808,3 603,97,2 932,1109,3 847,108,4 36,567,3 406,174,4 650,300,3 566,653,5 497,478,3 879,1495,4 642,1220,3 750,100,4 915,510,5 888,435,3 180,1280,1 362,16,4 882,476,5 912,482,3 681,364,3 923,479,3 20,990,2 414,173,5 652,691,2 704,95,5 889,285,5 690,303,3 881,184,5 441,76,3 188,13,5 853,511,3 710,213,4 889,166,2 177,202,4 937,761,4 763,201,4 31,49,4 933,131,4 915,734,4 75,1158,3 331,684,4 941,233,4 782,291,4 877,854,3 929,112,5 804,270,3 933,3,5 428,179,5 938,741,5 832,270,5 496,738,4 923,1,3 626,187,4 888,22,3 507,162,3 655,311,1 806,522,3 738,167,1 900,234,3 291,227,5 834,485,4 550,227,5 576,132,4 659,174,3 621,3,4 252,3,4 593,291,3 424,88,4 617,86,3 870,1071,3 63,355,3 678,431,4 223,106,3 312,496,4 602,88,5 436,7,4 942,217,4 706,738,2 435,868,4 660,184,5 850,297,5 404,1463,1 539,342,4 921,194,3 302,561,4 401,14,5 917,174,3 5,215,5 906,1162,4 535,86,3 732,150,4 822,417,4 435,985,3 654,131,3 420,422,2 641,608,3 589,110,3 845,40,3 565,49,2 605,272,4 787,809,3 231,474,5 820,124,4 836,8,3 922,221,4 329,965,5 523,791,4 839,492,5 478,150,4 295,152,4 845,560,3 726,237,2 863,683,4 895,3,3 932,229,3 889,194,5 885,28,1 405,55,5 329,208,3 532,497,4 793,472,4 777,142,1 751,268,5 716,6,4 456,68,5 888,181,4 740,450,3 720,281,4 449,698,4 863,84,2 888,249,4 419,492,3 824,120,5 398,1206,3 391,325,2 652,0,4 789,78,4 372,1443,3 935,1007,5 392,683,4 176,97,5 863,804,4 37,184,2 915,72,3 652,99,4 327,71,3 806,131,4 550,1302,1 393,545,4 847,404,5 434,248,4 465,194,4 888,626,2 660,227,5 931,508,3 919,309,4 881,202,4 935,1162,5 898,731,3 767,244,2 845,526,5 566,189,4 931,13,4 428,1132,2 632,422,4 872,327,4 667,12,4 27,587,3 19,21,5 653,23,4 926,587,5 485,24,4 785,96,4 806,372,4 863,1043,3 850,261,4 821,100,2 653,13,2 348,984,3 870,186,5 326,432,4 654,434,2 814,78,4 544,217,4 896,432,4 880,1032,1 639,149,4 253,142,4 896,172,3 902,3,4 654,315,4 931,132,4 797,111,3 513,187,5 745,505,3 714,943,2 592,477,5 639,208,5 919,298,2 795,577,4 706,706,5 632,81,4 747,191,3 824,404,5 634,357,1 647,662,1 929,264,3 320,429,3 127,27,5 427,241,4 895,577,2 895,454,2 592,284,2 585,409,3 892,927,3 696,325,4 489,951,2 344,57,4 824,684,4 591,653,5 622,450,4 746,1020,5 658,81,4 471,213,4 933,172,3 408,875,2 706,769,3 233,161,3 505,230,3 681,48,3 803,448,3 809,875,3 909,272,3 392,789,4 456,694,3 23,6,4 200,805,3 921,81,3 221,244,3 881,215,4 212,41,5 869,512,4 748,1187,3 220,549,4 701,894,1 718,273,3 335,272,5 76,635,2 626,52,4 912,6,5 291,192,4 874,417,4 357,257,4 167,844,4 835,162,5 889,384,4 900,392,5 493,237,5 625,335,1 618,32,3 913,312,3 905,470,3 866,1064,5 915,255,3 863,469,4 797,567,4 435,506,4 853,824,3 895,147,2 670,61,5 716,886,5 752,97,5 912,56,4 797,72,4 720,334,3 559,1018,4 895,8,4 843,293,2 278,853,1 832,225,3 302,389,3 311,600,5 869,57,5 55,192,5 846,261,5 918,30,3 915,684,2 710,87,5 839,1638,4 483,120,4 850,924,3 520,194,4 20,407,5 505,520,5 846,316,3 783,333,3 150,123,5 892,975,1 814,225,3 931,518,4 877,275,3 678,567,2 803,646,5 696,323,5 647,434,5 902,460,3 693,660,5 496,62,3 668,356,4 649,627,3 930,458,4 586,327,1 832,839,2 658,610,4 453,605,2 933,491,4 457,115,4 393,167,5 850,689,4 906,461,4 536,481,4 649,603,3 654,506,4 421,338,2 397,110,3 663,94,4 710,185,3 927,164,5 885,708,3 449,600,3 820,483,5 449,185,3 294,142,4 886,256,5 565,259,2 804,767,2 714,248,4 513,176,3 331,1209,3 822,237,5 575,470,4 320,525,3 540,82,5 746,663,2 143,410,4 756,226,4 258,927,4 645,876,3 853,57,3 439,257,4 452,228,2 868,475,1 310,938,2 932,387,1 453,503,2 559,200,3 86,1185,3 371,6,3 892,1011,3 523,5,5 822,630,4 307,258,3 547,538,2 867,431,2 807,339,5 795,1054,3 891,1034,3 829,173,5 663,305,4 660,163,4 773,149,1 814,685,5 302,62,1 877,214,2 765,428,4 921,180,5 585,43,3 795,70,4 864,411,1 353,713,4 371,233,5 909,55,4 12,824,1 449,602,5 846,0,3 763,272,3 486,209,4 338,653,5 797,124,3 804,427,5 935,273,3 456,422,5 487,192,3 918,312,5 253,240,4 503,453,5 938,254,5 669,95,5 896,482,3 644,184,5 825,778,3 832,432,3 193,135,5 933,12,5 529,236,4 822,57,5 393,117,4 647,227,5 880,574,2 929,234,2 886,242,1 800,244,3 711,141,4 926,142,3 566,506,5 933,413,5 278,1033,4 229,422,5 681,774,1 644,49,4 789,569,2 278,427,1 278,428,4 580,843,5 575,677,3 847,738,5 0,243,2 935,695,2 938,265,2 814,229,5 736,170,4 477,779,3 659,7,2 589,475,3 129,16,5 726,626,3 942,78,5 497,99,3 924,787,3 935,677,3 473,1123,4 150,300,4 654,190,4 542,81,4 456,46,4 644,652,5 196,801,4 921,738,3 478,234,3 889,666,2 757,361,5 221,789,1 820,470,4 767,126,5 917,153,2 781,256,3 496,416,2 882,55,5 91,1036,2 841,751,4 814,612,5 926,1034,4 587,551,1 752,63,4 364,107,2 804,641,4 392,624,4 809,327,5 861,654,5 381,234,5 832,149,3 434,411,3 847,519,5 795,632,5 878,180,4 879,1276,4 554,46,2 915,580,4 583,227,5 889,500,4 857,677,1 81,602,5 494,661,5 624,299,3 298,502,4 845,505,3 427,244,5 346,626,4 697,194,4 879,249,3 302,154,3 287,209,3 834,590,4 279,402,3 658,213,3 561,1125,4 803,654,4 895,526,4 837,174,3 880,94,4 659,209,4 901,478,4 812,265,2 832,216,2 932,180,2 880,520,4 93,247,4 574,317,5 633,949,5 485,457,3 629,21,3 786,361,3 82,478,5 746,69,4 67,925,1 745,454,4 867,708,4 882,346,4 795,47,3 6,420,3 827,178,4 607,735,4 220,736,4 751,749,2 561,322,2 452,54,4 915,289,3 292,283,2 623,2,3 91,148,3 824,99,4 855,285,4 942,507,5 726,567,3 852,269,4 704,256,4 536,1419,1 937,755,3 902,60,4 853,97,4 631,684,2 896,525,5 536,203,3 761,285,4 187,258,3 222,1233,3 565,684,3 58,657,4 605,832,5 471,104,3 931,428,5 938,1027,5 551,257,4 338,91,4 310,577,2 547,596,4 933,791,3 628,152,5 864,587,2 715,494,4 829,426,5 853,296,4 832,180,2 657,68,4 707,335,2 523,675,3 567,1136,4 233,384,2 15,134,4 435,142,2 646,1015,4 795,778,3 658,446,3 453,603,3 915,163,4 824,147,4 794,401,2 508,689,3 616,73,5 520,202,3 295,897,4 233,143,3 647,474,1 930,136,3 885,158,2 638,214,1 746,432,3 778,110,4 626,81,4 870,261,3 19,49,3 875,603,5 756,16,3 534,31,3 732,147,3 773,777,5 869,69,4 29,779,4 63,434,4 587,158,1 742,257,5 896,405,3 866,602,5 232,179,5 63,240,3 695,326,4 0,18,5 37,672,5 200,1354,1 764,169,5 852,303,4 666,474,5 895,264,4 689,231,4 773,547,1 662,675,3 715,629,4 915,754,2 370,175,4 607,420,5 665,123,3 513,136,3 505,332,4 845,1296,3 745,264,4 654,1009,3 153,474,4 269,562,3 803,31,3 762,136,4 457,274,5 879,1057,2 935,124,4 912,199,5 715,514,5 158,117,4 927,486,5 926,419,5 61,75,4 270,427,4 814,232,3 362,371,4 547,249,5 711,177,2 748,239,1 879,1035,2 912,234,1 243,923,4 534,607,4 762,193,5 842,503,2 854,854,4 533,287,4 596,989,2 781,180,3 1,285,4 841,305,4 626,565,3 406,736,4 665,22,4 342,221,4 839,602,5 789,738,4 275,1051,2 785,280,4 570,68,2 872,341,4 94,51,4 327,446,2 471,824,5 458,185,4 891,7,5 900,434,5 893,170,3 257,325,5 576,49,4 715,299,5 877,13,5 620,183,3 55,392,4 726,401,3 898,173,5 377,312,5 275,1018,5 732,543,1 748,1012,1 784,68,4 931,37,2 610,353,3 681,22,4 547,430,5 872,338,3 830,741,3 886,595,5 561,513,1 19,242,4 814,96,5 652,182,3 334,339,5 826,288,3 747,6,4 832,162,3 882,112,4 649,1418,3 879,84,3 896,78,5 17,141,4 650,994,1 466,221,3 862,908,3 255,448,3 781,303,4 921,578,3 647,691,4 423,839,4 496,804,3 397,654,4 789,134,3 721,24,4 398,560,2 860,241,5 885,68,2 938,755,5 891,55,4 918,237,3 653,236,4 879,149,4 477,149,4 726,814,3 882,207,4 672,894,3 377,286,2 654,501,4 932,653,4 830,326,2 93,411,2 279,451,2 756,249,4 641,90,4 45,285,5 249,587,5 803,251,4 342,115,5 885,1009,5 922,279,3 794,190,4 879,230,2 424,160,3 493,322,3 698,545,3 780,293,1 446,210,4 882,95,4 831,263,3 893,31,4 604,507,5 525,180,4 928,194,4 875,526,5 507,227,5 939,691,4 436,248,5 536,493,4 34,320,3 834,131,5 915,558,3 384,239,4 915,766,4 535,992,3 847,237,4 930,684,4 619,431,4 549,14,5 475,958,3 654,1081,3 2,352,1 118,726,5 902,197,4 842,1134,3 633,409,4 483,13,4 846,416,2 806,143,4 862,291,2 300,298,3 118,171,4 512,434,5 159,831,1 839,502,4 924,259,3 183,479,4 698,269,4 663,156,3 404,180,5 746,1169,2 405,424,3 763,221,4 284,285,3 526,581,2 804,1117,5 822,180,4 803,1243,2 936,325,1 750,915,1 850,326,4 545,342,3 912,226,1 824,194,5 585,355,4 406,84,4 654,792,3 436,419,3 918,125,4 824,619,3 578,267,3 896,207,5 697,186,2 621,8,4 393,417,4 942,186,5 896,167,3 477,1040,3 37,432,5 591,814,3 845,549,4 41,657,2 915,918,5 845,447,5 394,299,3 392,540,3 693,132,5 80,78,5 541,431,5 157,82,5 278,26,5 371,272,5 887,630,4 793,223,4 393,402,4 835,288,1 693,529,5 682,287,3 320,658,4 148,310,3 13,381,5 48,199,3 415,344,5 845,1077,2 831,894,2 767,762,2 642,715,3 805,127,3 941,527,5 452,1302,2 602,226,3 434,657,3 862,303,3 118,286,4 804,410,2 842,215,2 415,172,5 536,505,3 801,258,2 910,133,4 559,57,3 628,41,2 849,658,4 895,378,2 828,85,4 522,201,4 124,288,5 941,658,5 762,11,5 405,8,5 879,975,2 726,107,3 505,640,5 885,8,5 587,696,5 891,356,5 457,193,2 822,173,5 900,193,5 746,198,4 571,99,3 599,1003,4 594,1009,4 392,719,3 863,650,5 576,762,3 869,792,5 565,77,1 806,427,4 932,391,3 670,10,4 496,794,1 867,23,2 755,224,1 863,27,5 828,409,3 853,257,4 906,116,5 832,525,4 183,675,4 658,511,3 497,267,2 918,988,2 588,750,4 376,677,2 845,424,5 654,1577,3 935,285,5 891,464,4 58,607,4 756,142,3 696,338,2 660,85,4 449,60,4 871,844,3 495,67,4 298,210,4 200,696,4 750,864,2 454,51,3 918,115,3 868,274,4 806,204,3 886,108,5 553,716,3 58,89,2 360,120,2 730,693,5 478,142,1 845,199,4 926,814,3 57,55,5 404,434,1 740,731,4 541,400,3 388,417,4 681,751,4 341,8,5 931,653,5 545,218,5 896,529,3 806,421,4 867,192,2 877,173,3 568,117,4 931,478,5 302,418,4 916,755,4 91,840,3 896,134,3 711,509,2 349,189,4 829,172,4 536,707,3 803,631,3 396,483,5 794,201,3 441,91,5 748,21,5 733,482,4 564,85,5 709,178,4 700,325,4 515,356,3 863,202,5 910,583,3 891,135,4 681,624,3 839,21,3 413,10,5 933,613,3 513,99,4 653,507,1 631,549,2 767,681,3 885,327,3 726,185,5 917,215,2 711,41,1 200,0,3 391,10,4 551,627,3 314,465,1 744,19,1 921,454,4 903,708,3 221,471,2 534,847,3 475,432,4 663,413,5 710,940,3 144,233,5 914,345,2 902,654,5 845,488,4 157,297,3 895,187,3 882,503,5 763,370,3 397,632,4 485,49,5 27,528,4 339,968,5 544,66,1 933,660,4 516,1176,5 929,116,3 868,1131,1 327,315,5 714,96,3 380,267,4 842,649,3 846,662,2 585,422,2 483,86,5 765,662,5 633,743,5 275,372,2 877,920,4 550,519,4 869,285,4 586,321,3 880,1065,3 891,662,5 48,282,3 847,182,3 861,471,5 402,475,4 773,100,2 703,431,5 478,469,5 746,1426,2 230,14,4 781,312,5 805,28,4 176,155,5 665,293,3 562,300,4 58,72,4 415,619,4 832,199,4 898,514,3 335,84,3 388,721,2 931,66,2 855,306,4 449,10,5 921,432,4 918,128,5 713,280,3 881,207,5 172,257,4 827,751,1 536,971,3 300,40,3 233,614,5 861,80,5 868,236,4 804,927,3 778,878,3 898,454,3 523,527,4 17,186,5 233,303,3 581,741,3 499,556,3 846,209,3 841,323,4 914,346,5 536,377,2 869,52,2 889,356,5 428,356,5 933,754,4 847,972,5 233,634,2 874,186,4 814,113,5 757,61,2 158,1011,5 918,19,1 465,259,4 415,596,3 797,431,4 880,1479,2 826,299,3 860,291,4 342,465,4 733,484,5 795,692,3 591,181,5 101,1,2 471,401,5 885,32,4 473,430,4 805,878,3 302,411,3 788,149,5 877,284,5 726,229,3 825,28,3 12,898,1 875,510,5 804,272,2 544,1027,4 822,425,4 837,59,4 880,755,4 937,327,2 268,166,1 814,520,4 658,516,5 824,822,4 931,172,3 654,1041,2 881,428,4 773,507,3 594,675,2 733,201,5 391,268,5 898,826,2 896,973,4 902,187,5 644,181,5 523,1539,2 804,889,3 707,404,4 763,1,3 751,886,1 896,527,3 884,116,4 902,870,3 748,327,4 647,185,5 670,65,5 819,327,2 885,99,4 795,30,4 853,268,4 605,0,5 451,526,3 451,824,5 850,332,5 434,287,4 378,1034,3 893,6,4 730,654,5 921,249,2 912,18,5 504,120,4 886,558,4 763,692,3 880,22,4 765,192,3 895,799,3 275,695,2 541,434,4 755,227,3 874,212,4 283,333,3 935,271,4 886,6,4 785,209,4 749,885,3 804,949,3 933,253,4 291,330,5 151,567,5 607,267,4 599,390,3 377,560,3 388,154,2 915,23,2 619,930,3 621,87,3 757,235,4 920,96,2 626,529,3 833,275,5 880,1410,2 850,680,1 895,32,2 936,863,3 249,322,2 364,318,4 752,312,5 536,429,3 545,108,5 690,181,5 886,117,5 885,167,4 769,239,2 642,146,3 307,479,4 537,272,3 677,221,3 847,185,5 733,604,4 868,286,2 882,1655,5 591,23,4 265,288,3 763,818,3 709,63,4 716,627,5 536,470,3 101,173,4 533,984,4 879,280,4 499,357,4 886,150,5 221,1144,3 654,1194,3 859,314,3 895,283,4 124,94,5 485,99,5 48,701,3 936,235,4 587,927,4 839,638,4 804,1001,1 676,116,4 825,585,4 623,1058,1 408,1049,4 829,565,3 861,766,4 898,160,4 245,225,2 89,136,5 772,234,4 503,8,4 298,380,3 891,575,4 124,84,3 10,381,3 942,608,2 882,278,3 802,303,3 869,1450,3 854,169,2 822,201,4 853,270,4 937,342,4 931,707,4 932,104,2 781,893,2 463,478,4 278,629,4 591,179,5 797,670,2 499,553,3 845,53,3 854,918,3 843,1,4 369,56,5 654,225,3 869,63,5 832,590,2 658,72,4 473,150,3 166,215,4 93,822,3 697,134,3 906,355,4 803,289,4 906,321,5 391,656,5 272,285,3 599,517,5 832,1334,2 770,201,4 839,70,3 845,654,3 785,454,1 688,716,3 885,11,5 882,434,4 361,878,5 797,647,3 772,886,2 261,203,3 620,789,4 654,971,3 617,632,3 763,471,3 760,6,4 682,326,4 523,487,4 144,63,4 768,472,3 893,333,3 873,750,3 405,1100,4 415,3,4 771,343,4 889,473,5 313,811,4 581,99,5 878,684,4 847,653,5 566,181,5 802,339,5 894,987,3 633,92,5 616,451,1 741,123,4 710,1023,5 12,27,5 691,256,4 933,93,4 773,654,1 553,283,3 103,327,3 842,225,3 845,87,4 202,747,2 915,482,5 454,21,4 617,565,3 803,628,3 843,173,4 333,560,2 531,367,3 845,139,4 617,312,4 215,96,4 900,931,4 885,190,5 565,136,5 336,150,5 845,140,4 748,567,4 757,284,5 941,483,5 847,402,4 663,95,3 806,299,5 915,549,2 668,174,4 759,70,4 523,12,4 496,371,4 490,324,1 55,1090,2 874,517,4 789,1184,2 572,134,4 662,465,3 714,976,2 832,428,3 869,496,4 828,115,4 9,84,4 118,348,3 929,755,3 373,824,3 902,720,4 503,704,4 814,870,1 940,454,4 700,123,5 631,484,4 703,155,3 937,242,4 876,325,4 803,650,4 773,379,2 62,9,4 658,513,5 846,180,4 906,482,4 784,660,3 814,834,3 652,110,2 591,930,1 781,1597,2 6,508,5 585,71,2 90,182,5 615,315,4 882,658,3 771,263,4 456,168,5 795,151,3 867,168,5 887,190,5 748,1243,3 863,132,5 709,326,3 611,476,2 614,82,4 932,94,3 828,704,4 906,698,5 822,49,5 906,470,5 326,216,3 690,63,5 902,10,2 867,126,4 610,689,3 879,822,3 824,123,3 885,155,4 897,1295,4 700,268,5 891,514,5 494,418,1 915,203,3 879,692,5 530,893,1 648,251,4 542,68,4 866,27,5 726,543,3 931,192,3 77,410,4 734,300,3 275,451,3 839,524,5 144,8,2 874,357,3 884,141,2 75,850,4 486,392,4 467,203,5 851,251,3 897,342,3 850,482,4 647,525,3 664,455,4 435,1247,3 880,198,5 757,387,3 822,654,5 524,292,3 359,14,3 43,174,4 854,508,3 307,683,3 197,469,3 935,1010,4 686,878,3 658,527,4 917,68,3 642,175,5 544,270,3 424,227,4 15,126,5 436,151,4 398,432,3 345,657,3 879,901,4 824,830,3 647,427,2 266,709,4 494,76,4 681,279,3 869,97,4 648,256,5 529,1135,4 822,11,4 770,168,5 200,1207,4 737,27,4 775,184,4 493,602,3 642,10,4 708,37,3 274,173,4 748,1132,2 510,342,3 542,217,3 89,846,5 747,656,4 362,7,5 804,97,5 706,250,5 81,1058,1 685,805,5 535,274,5 938,284,5 451,1203,4 859,691,5 879,16,3 698,869,3 534,494,3 503,755,3 834,504,3 520,826,1 910,646,4 781,299,4 884,166,3 885,143,4 404,703,2 392,81,4 12,670,3 499,424,4 904,470,4 880,181,3 785,503,4 647,390,3 12,357,3 576,97,4 822,65,4 769,595,4 277,301,3 473,605,3 709,180,3 720,325,4 91,39,3 392,731,4 893,164,4 907,215,3 902,1066,2 713,275,2 803,796,4 785,97,5 790,303,4 845,699,2 670,450,4 652,366,3 317,293,4 893,19,5 789,684,4 837,92,3 913,731,2 537,288,1 377,802,3 885,939,2 744,275,1 787,1458,2 886,99,2 834,987,3 560,487,4 869,1073,2 459,150,3 629,865,3 932,574,1 140,865,5 839,656,5 789,364,4 454,769,3 932,473,5 520,215,2 681,796,2 797,1434,2 444,92,1 48,173,1 912,603,2 867,725,2 850,590,5 886,127,5 326,317,5 805,674,3 565,135,4 880,273,3 888,788,2 824,236,4 91,154,2 785,87,4 541,292,3 879,762,3 400,210,4 861,543,5 845,356,4 542,58,4 870,323,3 693,203,4 932,152,3 869,420,2 652,78,4 668,182,3 787,129,2 847,426,5 457,96,1 43,172,5 454,158,3 775,508,5 869,721,2 405,1152,2 41,754,4 621,126,5 785,384,4 876,55,5 475,209,4 922,281,4 805,483,4 845,477,4 633,1196,4 795,404,5 880,0,4 850,1286,1 789,948,4 744,168,4 757,87,4 617,143,4 706,9,5 173,142,5 416,24,2 560,635,1 814,142,5 746,734,4 20,671,3 759,374,4 566,151,4 650,689,3 223,569,4 652,738,3 393,81,4 392,65,3 781,1619,3 824,324,5 834,126,4 428,606,3 805,78,3 842,100,3 862,989,1 898,402,3 883,528,5 698,1056,3 509,325,4 707,224,2 750,536,4 879,384,4 871,1164,2 906,401,5 407,309,4 0,28,1 885,760,4 652,141,2 483,196,4 541,694,2 845,835,5 845,943,2 726,575,4 708,664,3 139,320,4 847,173,5 150,134,5 893,886,4 931,479,5 880,237,1 942,1227,3 788,275,5 789,201,3 853,497,3 939,199,3 788,150,2 614,152,4 620,141,3 765,179,4 669,143,4 912,150,4 341,11,5 892,770,3 471,384,5 641,1335,2 20,8,5 471,830,5 129,818,3 541,508,4 696,304,5 114,627,5 788,128,5 644,300,2 895,1266,2 937,865,5 424,742,4 550,209,4 445,320,4 180,1367,1 942,474,5 780,68,3 776,179,5 676,289,1 879,299,3 279,32,3 618,332,2 876,158,4 840,325,4 914,300,2 928,520,5 491,274,2 804,240,2 159,287,5 605,636,3 662,844,3 908,381,5 768,0,4 928,186,5 408,510,5 604,522,5 91,216,3 795,230,3 642,1027,3 906,818,4 434,52,3 823,318,2 502,85,5 4,209,3 325,513,3 659,651,4 889,131,5 845,64,3 938,410,4 942,839,4 619,172,5 912,175,5 903,274,5 520,525,3 658,482,4 906,184,4 758,49,4 751,322,1 342,251,4 874,130,4 806,842,2 874,420,4 839,81,3 884,71,1 772,407,5 603,671,1 453,70,3 200,283,3 853,465,3 464,928,3 428,97,4 751,1175,2 866,527,4 369,169,4 828,528,4 920,209,4 715,51,5 937,1015,3 390,377,3 879,817,2 266,76,3 642,958,3 892,10,4 930,332,5 884,212,3 587,567,4 234,1133,4 618,230,4 697,356,4 378,344,3 536,90,2 926,541,2 641,409,1 635,14,5 822,175,4 660,95,4 885,227,4 869,201,3 405,206,2 775,173,5 93,621,3 858,1008,4 867,497,3 534,191,4 895,586,3 61,6,4 523,656,4 875,287,3 567,190,4 750,24,5 668,648,4 863,81,5 91,24,3 326,175,4 896,632,5 732,470,3 396,356,5 503,513,4 747,401,2 908,879,4 427,891,4 641,1184,4 696,275,5 791,741,3 853,478,4 335,715,2 845,94,3 685,190,5 206,44,3 662,280,3 424,333,4 706,309,4 817,311,2 608,284,5 592,143,4 428,506,5 614,237,3 864,99,4 765,218,3 787,10,2 708,958,4 935,1201,4 846,234,1 842,707,2 424,268,4 681,692,3 405,1146,4 591,891,1 543,299,4 294,1049,5 918,311,2 895,172,5 720,808,1 331,88,5 41,281,4 777,711,3 233,126,4 637,21,5 859,256,3 737,120,4 556,864,3 654,1152,3 774,346,3 641,974,2 614,520,4 896,293,3 748,139,3 896,202,4 773,99,1 587,94,4 232,63,5 889,443,4 696,306,4 93,126,5 302,70,3 12,840,1 895,556,3 895,144,1 861,210,5 886,1119,5 845,805,3 143,759,2 578,49,5 797,570,3 867,777,2 928,99,4 560,434,3 541,89,4 455,32,4 828,638,4 896,707,2 155,8,4 238,185,1 891,66,4 638,1064,1 921,420,4 72,187,5 937,677,3 805,160,3 757,1089,1 597,21,5 3,359,5 885,1488,1 895,319,3 869,432,3 24,124,5 870,215,3 647,293,3 932,939,1 878,49,4 642,225,2 686,267,5 803,1614,4 920,661,4 880,489,4 397,477,5 503,1036,1 775,181,3 404,45,1 5,110,2 747,812,4 471,553,5 609,194,3 937,116,3 867,508,3 874,41,4 372,237,4 893,339,4 931,1029,2 879,549,4 434,754,2 523,1072,5 773,572,2 762,3,5 879,30,4 730,602,5 623,695,4 647,473,4 545,759,5 720,1118,4 874,326,4 649,584,1 586,304,4 845,88,5 900,171,5 681,299,2 917,165,4 535,482,4 641,117,3 794,639,4 629,119,4 856,891,3 931,55,4 129,218,5 572,518,4 904,136,3 933,515,3 839,413,4 275,142,5 870,525,5 416,93,3 882,153,4 757,67,3 104,879,3 694,288,2 931,166,4 881,356,4 197,127,3 740,356,5 12,802,3 933,153,3 471,915,5 212,1011,3 225,473,3 935,1078,1 879,1224,2 885,187,4 767,1060,1 870,241,3 794,1040,3 804,149,5 59,518,4 798,426,5 756,731,3 645,747,3 392,863,3 832,63,3 398,10,4 404,939,1 626,940,3 302,172,5 917,484,3 851,357,3 939,656,4 105,462,3 915,235,4 932,215,3 698,19,4 362,581,2 605,403,4 429,167,4 213,10,5 632,93,4 924,560,3 645,271,4 748,779,1 863,51,4 572,835,3 400,98,4 903,273,5 895,809,1 806,167,4 790,274,5 916,267,4 714,154,4 863,716,3 879,92,4 875,877,2 457,98,4 398,510,3 488,311,2 895,205,3 845,176,3 842,153,3 676,100,5 842,670,3 853,798,3 691,409,5 295,149,1 871,475,4 849,418,5 615,300,3 302,220,5 880,572,3 638,85,4 726,1187,2 307,410,4 747,215,4 747,654,3 559,1133,3 751,314,2 636,1257,1 915,87,4 319,826,4 562,171,5 740,163,3 98,173,5 787,522,4 549,248,4 927,175,3 869,176,4 288,116,4 886,1382,4 783,749,5 473,377,4 921,90,4 733,94,4 781,294,2 592,72,2 896,242,4 384,1505,4 757,98,3 797,1042,3 787,116,4 792,405,2 261,218,3 379,513,2 231,424,4 64,76,5 789,84,3 591,1263,4 372,327,4 119,514,5 855,346,2 863,72,5 202,627,4 791,24,2 884,134,2 757,404,4 585,695,3 927,875,5 532,918,2 732,949,4 662,110,3 658,344,4 830,293,4 834,204,3 926,540,5 550,199,4 755,431,4 481,285,3 662,293,3 912,342,1 547,270,3 794,997,3 882,510,4 372,124,4 415,764,4 341,715,2 660,293,4 880,560,4 173,707,5 665,743,3 822,409,4 487,650,5 838,322,4 803,494,3 933,315,4 842,636,2 685,88,4 910,175,4 485,1142,3 917,1265,4 670,442,3 494,738,4 773,1117,3 941,660,4 874,7,3 591,885,3 867,67,2 783,343,4 888,868,3 781,1663,4 918,304,4 918,15,4 720,947,1 912,63,5 327,95,4 654,326,3 822,229,3 827,315,5 757,136,5 297,274,3 636,299,3 703,221,3 373,594,3 677,126,5 873,310,4 20,986,3 869,171,4 751,588,4 231,195,5 129,257,4 907,122,3 814,1203,5 290,927,2 142,270,4 667,474,4 696,880,2 676,474,4 895,230,1 882,780,3 750,6,3 200,124,2 496,390,3 851,471,3 836,110,4 845,69,4 654,87,2 485,458,2 733,282,5 805,142,5 594,289,4 470,70,3 803,96,4 824,221,5 150,921,4 742,747,4 674,320,2 869,607,4 787,390,2 534,317,4 711,1219,5 829,660,4 919,332,4 243,1427,4 547,514,5 869,650,3 43,150,4 931,662,4 384,1064,3 815,259,3 919,306,3 845,46,5 781,1512,2 757,749,2 760,282,4 623,761,4 850,874,5 337,478,5 552,486,5 845,232,5 762,737,2 941,878,4 895,91,1 832,678,3 531,916,4 883,126,4 910,82,4 932,383,1 6,49,5 575,203,4 451,289,2 423,687,2 895,281,2 660,203,5 662,323,2 377,442,4 760,262,1 882,895,5 907,27,4 295,245,4 790,301,4 654,711,3 893,511,5 590,236,3 653,366,4 715,30,3 937,274,4 866,6,5 881,176,5 932,227,4 803,401,3 851,116,4 884,814,4 903,795,3 885,384,3 879,392,3 157,284,5 665,645,3 859,628,3 833,283,4 693,480,4 84,171,4 863,127,4 502,962,5 329,24,5 748,201,5 160,224,1 641,109,2 869,126,5 389,689,3 660,198,5 803,948,3 523,130,5 871,1039,3 750,1034,2 756,234,3 907,222,4 891,762,2 367,163,3 884,422,4 565,384,3 853,543,3 902,63,5 881,182,4 416,116,4 455,771,4 879,160,2 502,302,5 536,1153,1 839,587,4 931,638,5 687,301,5 605,78,3 708,175,4 525,844,5 329,227,5 931,481,5 715,524,3 863,115,4 861,844,4 895,179,5 400,590,3 895,6,4 859,25,3 931,69,4 757,1291,1 55,280,2 900,390,5 847,162,5 237,254,3 620,90,3 150,965,4 932,475,2 844,339,1 155,527,4 415,726,5 503,448,4 792,6,3 895,79,2 321,654,5 799,475,3 723,1433,1 879,355,4 591,150,4 458,270,4 526,1148,4 874,920,5 932,568,1 681,377,3 915,182,4 623,507,4 820,472,3 550,332,5 781,1589,3 76,97,4 933,1017,4 129,751,5 566,1297,5 871,120,4 536,223,3 768,684,3 449,422,5 861,567,3 726,558,2 158,321,5 787,664,2 931,559,2 101,300,3 390,426,5 714,97,5 216,404,3 386,323,4 721,844,5 804,37,3 853,13,4 709,356,4 797,1062,3 641,146,4 103,406,2 932,865,2 654,1630,4 781,315,4 632,70,3 175,180,3 586,301,3 789,154,3 270,962,5 386,172,4 942,171,4 601,242,3 882,65,3 536,130,4 552,212,5 787,469,3 388,195,3 877,738,3 892,297,4 343,472,4 940,221,2 481,248,2 752,303,4 853,1280,2 304,864,3 877,320,2 471,142,4 233,257,2 520,495,2 697,21,1 183,211,4 832,345,5 761,255,3 918,987,3 517,741,5 918,411,2 913,774,3 941,70,5 895,171,5 922,459,4 772,651,3 889,428,4 789,1027,3 279,363,3 885,152,3 879,368,1 398,464,3 630,293,3 803,158,4 832,654,2 497,237,4 863,68,5 845,11,5 891,419,2 746,133,5 939,49,4 488,327,4 325,427,5 794,409,2 133,337,4 891,1123,4 458,931,4 882,169,3 877,392,3 652,144,2 842,631,2 726,52,1 663,0,4 24,6,4 891,435,3 711,77,4 868,99,5 668,251,2 839,498,4 349,126,5 451,287,2 902,823,3 235,716,3 850,7,4 473,1062,5 882,301,5 384,99,4 4,447,2 912,203,4 873,181,4 6,356,5 621,116,4 797,784,3 638,989,1 711,227,3 115,123,3 932,24,2 496,155,5 482,172,4 824,565,5 512,256,4 373,240,5 861,822,4 752,68,4 404,729,1 788,99,5 845,494,4 10,238,4 850,830,5 681,921,3 846,739,4 488,299,5 931,154,3 917,957,3 888,155,5 270,173,5 775,199,4 915,755,3 708,225,3 451,284,3 297,484,3 543,345,4 604,545,2 471,580,4 341,474,5 763,417,4 902,155,5 917,0,3 825,88,5 850,325,3 523,500,2 822,513,5 772,1035,3 461,321,5 888,730,2 894,1013,3 880,503,3 845,364,2 861,221,5 926,28,5 803,1100,3 301,265,2 891,472,3 350,257,5 850,222,4 845,579,5 876,737,4 918,98,4 174,110,4 896,497,5 289,61,2 313,92,1 771,1024,3 718,161,4 822,587,3 41,135,4 415,146,5 346,410,5 649,362,2 660,178,4 849,434,4 229,63,5 780,134,5 748,1033,2 200,1165,3 404,1026,1 449,780,4 885,434,3 621,417,3 177,872,3 505,204,5 710,446,4 879,460,4 862,301,4 895,615,3 850,179,5 942,52,3 839,503,3 293,454,3 909,283,3 942,540,4 867,1205,3 313,1290,1 575,0,4 865,314,4 706,69,3 879,52,4 781,1189,2 861,256,5 886,194,4 740,27,3 327,470,3 608,947,1 452,402,4 781,330,3 766,431,5 871,333,1 935,15,4 855,299,4 795,1196,3 924,446,4 791,404,3 804,424,5 746,21,3 546,320,4 560,638,3 714,21,4 646,293,3 737,3,4 721,404,3 860,320,1 724,872,4 386,152,4 679,0,4 804,587,2 886,167,4 879,297,4 502,380,5 905,299,3 786,747,4 718,467,3 938,14,5 881,545,2 404,756,1 832,859,2 795,524,4 436,400,5 846,495,4 384,692,4 792,180,4 746,95,5 5,181,4 485,278,4 81,211,4 617,87,4 9,39,4 740,130,4 404,340,1 373,405,3 870,325,5 664,923,4 898,426,5 458,476,1 667,339,4 21,208,4 804,1064,5 520,182,3 789,1015,2 353,9,5 428,736,4 653,1114,3 806,431,5 915,267,5 496,171,5 597,259,3 869,557,4 649,1049,3 929,281,4 910,431,3 631,317,5 607,133,3 611,201,2 805,126,5 769,301,2 668,234,2 923,11,4 194,134,5 834,1044,4 800,681,5 113,521,5 804,941,3 681,755,2 777,549,4 662,545,3 458,925,4 641,383,5 12,279,4 915,248,3 891,754,4 899,653,2 619,111,4 869,495,5 913,380,3 866,272,3 885,86,4 197,941,4 830,155,4 501,753,2 598,845,5 693,179,4 404,657,4 726,552,2 750,120,4 845,793,5 845,375,2 660,495,5 920,70,4 879,770,3 874,651,5 806,402,4 921,287,2 658,386,4 787,519,4 649,503,3 933,65,4 670,582,3 377,549,2 449,520,4 62,284,3 327,50,3 808,318,3 885,215,5 910,854,5 423,258,2 760,8,2 754,298,2 478,153,3 93,630,5 758,126,2 797,71,3 663,3,4 535,0,5 706,274,4 547,251,3 618,596,4 942,232,5 901,274,4 338,249,5 792,49,5 345,57,3 544,384,3 891,134,5 842,161,2 641,1053,3 681,76,3 455,203,3 641,795,4 538,201,5 709,270,3 850,8,4 614,189,3 834,256,3 17,380,4 895,800,2 895,116,2 765,177,4 658,835,4 544,394,4 272,267,5 485,147,2 631,6,3 447,873,3 797,1002,3 886,64,5 390,130,2 61,274,4 781,1142,2 628,275,5 186,203,2 877,661,1 592,1015,4 616,1611,1 804,195,2 845,172,4 832,222,4 877,236,3 909,1024,2 114,82,3 890,865,5 457,51,4 931,483,5 307,256,4 536,675,4 891,95,4 536,513,4 791,1053,1 933,233,2 407,241,4 884,81,4 863,282,5 278,481,4 832,400,2 878,303,4 697,210,2 926,254,4 441,870,1 781,886,4 659,428,4 343,116,3 307,65,4 406,1159,1 882,1403,3 888,954,3 842,526,3 932,1187,1 63,692,3 918,330,4 139,987,3 839,126,4 523,55,4 912,59,3 393,794,2 915,41,5 477,234,2 781,332,3 935,245,4 409,271,4 275,22,5 638,713,2 166,640,4 278,46,4 839,519,5 433,410,5 616,217,2 748,207,5 713,747,5 893,116,3 861,179,5 895,631,2 906,95,5 302,830,4 877,639,1 933,494,4 103,470,3 415,1482,4 338,93,2 406,442,3 495,332,3 591,1275,1 377,68,3 932,651,3 620,889,1 869,457,1 794,131,3 723,263,3 937,49,5 797,1034,4 200,1102,3 386,52,4 762,132,3 715,141,3 487,508,2 747,285,3 469,290,2 706,172,2 292,695,2 847,120,4 804,8,3 774,886,4 846,190,4 527,201,5 785,3,4 581,270,4 501,322,4 665,107,3 867,68,2 658,466,3 767,619,2 75,23,2 454,371,4 345,394,1 648,814,3 270,124,3 845,217,4 847,420,5 631,181,3 698,18,4 867,160,2 748,620,3 659,180,4 285,172,4 323,267,4 846,7,4 222,277,4 787,447,2 925,312,3 868,126,5 390,95,3 797,475,2 780,877,1 384,513,4 775,317,4 887,279,3 142,689,2 918,259,4 565,1027,2 839,970,4 869,316,4 436,128,1 624,704,3 553,6,3 792,121,3 778,116,4 651,747,3 428,745,3 709,11,4 877,534,1 416,120,3 624,596,2 931,177,5 747,632,4 100,1027,3 715,226,3 859,268,2 629,263,2 505,488,4 344,214,4 61,116,4 847,503,3 775,587,4 557,136,4 814,426,5 585,692,3 647,513,2 708,1187,4 881,193,3 881,146,4 804,67,3 906,1243,5 635,105,4 274,418,3 850,471,3 874,936,4 842,229,3 22,450,2 704,362,2 166,183,1 69,992,3 362,38,4 708,97,4 937,1282,3 839,211,4 100,471,3 122,293,1 748,66,1 658,225,4 573,886,4 279,215,5 493,497,4 882,1225,3 682,878,3 746,230,3 785,285,4 556,269,3 879,761,4 642,95,5 873,675,3 632,580,3 832,575,3 754,309,4 733,21,3 560,1293,1 405,94,4 915,791,3 695,177,4 909,299,4 614,27,4 425,616,3 839,644,3 893,901,3 221,379,4 772,317,4 115,689,3 592,283,4 917,87,2 621,1418,2 576,426,4 804,110,3 503,547,2 882,1130,5 659,46,2 542,602,5 902,292,4 479,207,2 915,59,4 861,259,5 508,244,2 594,985,2 846,140,3 606,173,3 478,233,5 861,194,5 377,568,3 880,142,5 537,175,4 888,278,2 189,825,3 637,510,3 496,143,4 881,190,5 885,709,4 708,650,4 0,17,4 820,94,5 870,358,3 576,1335,1 803,615,3 874,478,4 611,242,2 13,275,4 392,32,3 902,323,4 935,534,2 708,832,4 928,171,4 926,470,4 57,729,5 533,823,4 560,181,3 886,1495,4 675,992,5 652,584,2 853,1046,1 528,323,2 879,301,5 895,194,4 485,306,3 880,762,3 300,333,3 693,171,5 892,844,3 253,227,4 893,259,2 698,94,3 195,109,1 881,142,4 681,471,3 641,384,5 652,117,3 446,201,3 93,133,5 720,267,4 662,325,4 797,242,4 803,154,3 702,120,5 732,1374,3 863,185,4 915,149,4 388,487,5 837,717,5 353,743,4 816,299,3 116,248,4 706,159,5 667,894,3 773,95,2 697,515,2 825,664,5 902,6,2 668,78,2 486,339,1 695,284,4 560,283,1 939,97,4 910,427,4 360,225,3 895,661,3 781,300,3 836,1048,1 880,61,4 577,267,2 605,712,4 877,780,1 884,1220,3 820,280,3 776,8,5 915,158,3 869,86,5 891,81,3 882,899,5 764,150,4 929,175,3 776,522,4 862,361,1 893,970,3 846,116,2 689,62,3 471,263,3 394,317,4 804,99,5 467,23,3 827,330,4 868,297,3 921,809,4 550,720,5 880,418,5 867,66,3 333,37,2 302,239,3 805,301,4 532,207,4 290,575,4 923,143,3 898,517,4 515,581,5 137,146,4 567,528,4 900,401,4 836,471,3 415,419,3 387,218,5 896,139,3 591,314,5 923,432,5 592,552,2 842,213,3 847,688,1 931,108,2 501,681,5 866,221,4 662,175,5 886,844,4 896,500,5 647,1059,2 682,625,3 746,201,4 586,1624,4 493,244,3 920,777,3 910,478,5 704,565,4 902,120,3 631,1,4 750,236,2 893,115,4 734,274,4 373,653,3 0,58,5 854,170,3 825,425,2 188,458,4 587,731,4 936,221,3 888,0,3 882,944,4 663,495,5 942,815,4 192,158,4 918,87,2 377,587,5 499,121,3 638,322,1 862,537,2 773,1227,1 531,833,4 773,71,1 810,894,5 373,26,4 878,116,4 12,521,5 838,234,4 870,3,3 158,357,1 300,61,3 37,1031,4 879,26,3 503,321,4 524,6,3 803,229,4 393,89,3 781,339,3 693,384,4 832,263,2 895,495,4 839,208,4 502,115,5 942,96,2 714,84,3 748,808,3 689,76,3 746,184,5 591,462,4 809,880,4 896,683,2 243,187,4 891,640,5 404,377,4 700,311,3 197,150,4 888,1138,1 803,736,3 0,14,5 837,173,4 787,650,4 540,89,4 895,392,3 888,738,3 931,523,5 56,0,5 930,470,3 353,1038,4 918,299,4 935,1008,4 882,209,4 869,131,4 434,405,3 392,247,4 548,23,3 874,461,4 915,977,1 569,320,1 915,862,3 822,24,3 900,194,5 392,0,3 726,16,1 773,1089,1 787,565,4 757,275,2 855,311,2 292,314,3 885,100,4 642,429,5 536,650,3 898,172,3 492,316,3 341,1015,1 802,288,3 449,497,3 931,221,4 834,605,5 910,1038,4 879,472,3 434,138,2 881,134,5 659,1614,2 746,31,5 912,427,3 863,735,5 747,846,4 619,94,4 893,261,4 400,184,4 834,159,3 267,449,1 304,649,4 722,163,4 330,131,3 931,162,4 883,85,3 916,281,4 911,27,4 608,146,1 832,22,5 824,273,4 845,133,4 370,495,4 525,270,3 681,773,4 874,287,4 547,8,1 757,228,3 535,415,4 652,618,3 372,215,4 931,528,4 706,935,4 654,1043,3 915,195,4 832,31,5 789,9,1 906,224,5 576,587,4 899,477,2 278,61,3 299,1011,4 881,180,5 677,331,4 861,6,5 362,24,3 853,468,5 836,124,5 920,171,4 455,58,4 513,236,4 681,366,3 893,675,3 846,218,4 279,945,4 941,315,4 891,14,4 910,708,5 894,116,3 842,445,3 749,337,3 902,8,3 842,527,3 404,953,4 896,175,5 877,126,4 834,377,4 832,183,3 158,844,1 441,927,3 600,227,5 895,152,4 880,6,4 750,117,2 549,681,4 457,530,5 659,422,3 706,171,2 544,138,3 435,927,4 159,301,5 772,1554,4 775,190,5 797,171,4 556,304,3 746,472,3 392,271,4 837,123,4 542,8,4 870,78,5 517,234,4 893,471,3 658,120,4 816,923,3 531,683,5 513,67,4 343,99,5 805,503,4 566,38,3 885,432,2 907,182,4 747,203,3 453,987,2 737,270,3 906,267,4 703,285,5 748,390,3 843,160,3 302,237,4 892,289,3 560,595,2 853,18,3 937,290,4 386,94,2 202,457,3 867,190,3 748,199,4 787,3,3 520,0,2 495,150,3 906,181,5 853,12,3 698,268,4 327,299,5 915,6,4 550,299,4 1,274,5 275,364,3 492,207,4 683,215,3 748,929,3 931,433,5 59,591,4 845,568,3 483,929,3 62,321,2 606,949,3 296,723,3 649,897,3 520,402,4 915,110,4 416,817,2 884,6,3 654,58,4 696,253,2 718,409,1 939,528,3 179,734,4 691,761,4 922,822,4 180,1214,1 845,301,5 906,927,5 654,426,4 659,208,4 813,412,2 549,257,5 310,427,4 915,641,3 906,1219,5 624,134,5 632,95,4 616,673,3 934,285,5 619,233,3 748,232,5 909,356,4 297,401,3 744,0,2 707,627,3 706,864,5 789,232,3 298,732,3 621,68,4 906,122,4 786,293,3 888,468,4 745,404,2 404,88,1 642,209,4 907,653,3 891,565,4 664,312,4 886,495,4 884,208,2 881,95,4 82,943,3 806,929,5 600,1115,4 927,268,5 869,588,4 659,162,2 654,478,4 618,10,2 939,212,4 0,110,5 781,306,4 649,24,3 0,51,4 720,318,3 520,264,3 502,43,5 654,332,2 915,596,2 623,404,4 587,1077,4 879,1619,3 373,136,2 914,304,2 910,210,3 915,384,3 877,56,4 915,154,2 794,411,3 255,99,4 832,580,1 91,1051,2 833,627,5 393,1483,4 356,824,3 312,777,2 941,173,5 617,415,4 497,179,4 653,281,3 834,927,3 331,63,5 850,302,4 670,37,5 278,1436,3 880,288,1 918,294,3 880,214,3 872,357,2 936,224,2 916,878,2 882,1117,4 891,764,2 619,626,5 378,842,4 560,525,3 94,293,2 824,406,3 670,451,4 555,522,5 579,24,3 845,418,5 529,274,5 766,480,5 416,140,3 879,382,3 587,312,5 863,144,4 560,10,4 906,743,5 446,23,3 858,380,4 304,44,5 869,30,4 591,408,1 473,728,4 890,120,4 218,432,5 939,481,5 815,242,4 377,411,2 778,274,4 428,229,2 885,567,3 740,254,3 641,1424,2 834,684,4 915,683,3 842,88,5 617,1220,2 467,90,5 621,189,4 932,193,4 863,332,5 606,854,4 937,234,1 525,297,4 881,100,3 773,173,3 863,65,3 384,230,2 897,270,3 879,408,2 143,689,3 902,12,5 850,111,1 618,848,2 879,1092,3 747,171,4 923,177,5 675,254,5 843,929,2 884,24,4 885,27,4 846,195,3 895,449,1 804,64,3 918,235,5 942,281,5 794,80,4 889,522,4 638,27,4 689,202,4 143,136,4 314,78,4 903,731,3 149,290,4 931,99,5 689,157,4 880,321,4 794,61,4 912,460,4 199,238,3 416,157,2 536,462,3 901,288,3 587,820,4 825,719,3 21,525,4 720,328,3 626,55,2 586,320,2 455,93,3 714,124,3 547,404,4 159,249,4 772,342,1 853,695,2 41,734,4 670,120,4 536,889,1 867,157,1 461,327,5 942,613,5 613,120,4 608,312,5 290,404,4 707,677,2 845,379,3 906,1166,5 893,297,3 907,193,3 932,228,1 725,0,4 932,88,4 300,720,3 654,974,3 589,297,2 711,450,5 939,300,3 380,141,3 843,548,3 654,377,1 641,1090,4 449,139,3 903,746,4 693,505,4 415,386,3 505,134,5 540,303,4 124,66,5 842,299,3 915,152,3 654,447,4 551,1361,3 156,243,5 744,274,1 68,221,3 923,287,3 245,432,5 906,1156,5 662,974,4 275,32,4 869,568,2 669,520,4 864,98,1 794,563,1 291,202,4 832,663,3 491,171,3 923,113,3 105,69,3 536,805,3 644,176,4 566,196,5 156,234,5 144,95,5 404,68,4 883,948,2 581,675,2 658,356,4 870,285,3 890,475,5 715,282,4 892,124,3 494,968,4 533,299,4 781,260,2 915,947,2 232,8,5 757,327,1 59,482,5 115,269,3 869,460,4 824,15,3 888,1064,4 854,58,3 797,745,4 264,1015,3 782,879,4 246,99,3 641,778,3 755,96,3 698,1009,3 834,464,3 906,1047,5 867,731,3 803,95,5 881,392,4 494,232,4 451,75,4 547,682,4 304,960,3 268,478,4 587,462,4 801,664,4 381,1267,5 800,354,3 565,107,2 842,238,3 700,291,4 348,695,3 415,254,5 912,190,5 757,505,3 882,511,5 720,379,5 268,696,4 756,147,4 477,231,2 631,474,3 624,49,5 942,150,4 895,492,5 937,221,5 384,196,4 444,1066,1 862,885,3 588,335,1 317,207,4 23,1006,5 157,289,4 449,150,5 867,630,4 909,49,5 853,179,4 662,1160,3 279,378,5 502,282,5 896,417,4 720,124,3 880,175,4 885,183,4 805,173,5 847,140,4 879,1040,4 475,69,3 863,366,5 893,934,3 871,281,5 932,52,1 813,287,4 940,357,2 312,356,5 718,508,2 48,476,2 760,401,3 870,341,4 623,409,4 694,886,3 621,153,4 708,287,5 891,167,4 854,511,4 22,641,3 746,62,3 372,228,4 869,656,5 740,1040,4 884,99,3 452,89,3 238,381,3 915,285,4 534,428,3 912,227,5 879,176,5 188,567,4 327,384,3 506,753,5 879,229,3 762,1038,4 759,254,3 597,242,2 215,188,3 881,130,4 663,521,3 558,258,3 931,173,4 644,22,5 733,281,4 558,260,3 769,99,5 804,126,3 333,217,3 17,27,3 746,93,4 560,736,3 933,208,1 899,99,4 896,180,3 846,567,4 696,259,3 177,327,3 880,254,3 465,6,4 877,153,3 877,1148,4 562,166,4 188,169,4 654,514,4 373,545,5 422,886,5 267,238,3 642,526,3 581,507,4 613,285,2 392,391,4 57,222,5 921,76,4 756,469,3 931,516,5 898,70,4 863,156,4 560,477,4 697,944,2 206,128,3 915,217,3 824,455,3 926,167,4 641,580,2 701,379,4 786,878,4 390,434,5 664,1027,4 486,93,3 885,55,4 863,1247,3 692,210,2 634,876,3 832,37,1 903,681,4 652,197,4 536,194,3 260,299,5 909,11,4 428,696,3 614,285,4 536,482,4 795,293,3 902,420,3 781,1172,2 572,236,4 891,377,4 31,180,4 512,257,4 831,287,3 757,208,5 17,55,5 938,716,4 606,99,4 84,744,3 895,719,1 750,20,5 841,748,4 478,497,5 901,143,5 707,116,4 931,635,3 118,491,5 222,818,3 768,236,3 450,886,1 877,96,3 905,404,3 652,317,4 342,582,4 825,176,5 726,162,4 577,312,5 513,97,5 487,222,4 710,166,2 567,522,3 408,498,3 626,16,2 592,87,4 812,303,1 901,190,5 94,193,5 671,300,4 756,404,4 893,49,4 327,70,4 488,306,4 681,549,2 268,107,5 877,1040,1 226,755,3 125,309,2 891,71,4 619,49,4 902,271,4 665,650,5 576,1034,3 706,505,2 845,66,4 881,201,4 892,234,3 455,213,4 637,3,4 652,332,5 839,186,3 89,941,4 929,285,3 832,1186,5 425,488,5 485,292,3 292,410,2 377,398,3 544,226,4 197,95,4 869,427,4 898,65,4 804,233,5 58,968,3 792,236,3 789,21,5 823,747,1 109,738,4 304,221,2 863,225,3 746,607,4 801,263,4 621,1410,4 499,160,2 467,4,3 647,762,2 591,511,5 428,240,3 745,228,2 850,1094,3 869,54,3 785,210,4 834,404,3 877,58,3 658,209,5 906,685,4 732,761,4 772,188,5 692,503,5 853,41,4 933,1310,1 732,244,3 845,160,4 795,247,3 594,99,4 541,63,4 845,210,2 493,203,5 344,317,5 736,257,5 275,1231,3 922,1010,4 485,293,2 531,228,5 662,63,5 310,78,4 532,63,5 902,156,4 329,142,5 392,1196,3 923,481,4 886,179,4 909,312,4 536,55,5 652,236,2 797,139,4 319,357,4 915,30,3 882,272,4 502,82,5 477,868,2 591,197,5 612,1156,2 206,176,3 199,1027,2 843,81,3 503,666,3 394,1059,2 923,274,4 396,473,5 435,1488,2 842,391,2 216,10,4 785,274,4 709,300,3 833,285,4 861,98,4 924,216,2 536,324,1 474,538,3 867,502,3 611,475,3 804,139,3 861,929,5 733,161,3 840,314,4 621,1229,1 805,627,3 525,124,2 745,232,4 869,245,3 540,923,5 886,431,5 400,428,3 803,411,2 504,950,3 895,76,4 84,481,4 617,264,4 486,425,3 9,11,5 29,68,5 880,27,5 651,327,4 647,739,4 931,674,4 423,303,4 737,90,4 732,243,2 835,749,3 636,327,4 915,1073,3 941,271,5 772,211,2 405,501,1 860,461,4 931,602,5 29,318,4 579,828,2 275,283,4 499,366,3 746,1014,4 561,87,5 915,819,2 898,739,5 582,285,4 347,368,3 710,407,5 221,332,5 467,123,5 722,987,1 933,210,4 853,825,2 753,126,4 86,395,1 94,391,3 662,281,3 794,430,4 877,215,4 270,426,5 130,535,5 710,957,5 917,429,1 934,273,5 895,100,3 765,513,4 392,142,5 850,455,2 804,161,2 726,110,3 839,643,4 824,105,4 789,258,2 576,1146,4 670,1,4 756,567,4 591,761,5 485,472,3 830,207,2 662,684,4 893,8,4 441,985,1 99,345,3 765,502,3 708,824,2 267,362,1 641,120,5 697,131,4 434,431,3 912,422,3 565,659,4 670,6,5 542,65,3 775,1171,2 699,55,3 781,1664,2 829,417,3 681,142,3 795,384,5 863,402,5 927,113,5 3,300,5 386,675,1 0,87,4 825,1109,4 609,65,3 829,731,5 613,236,2 941,661,4 882,381,3 405,673,4 270,175,3 551,618,3 837,749,4 708,117,5 750,89,3 697,639,2 654,163,2 351,209,3 806,756,4 900,49,4 885,1420,2 850,192,4 927,172,4 616,1186,3 882,51,3 108,249,2 850,247,4 362,27,4 797,49,5 633,146,2 404,1068,1 866,0,4 895,327,1 859,215,4 746,273,4 926,153,3 867,153,3 743,173,4 476,274,5 692,381,4 750,212,5 485,221,3 829,86,4 809,677,4 194,241,4 658,608,4 834,284,4 566,58,5 617,23,2 621,6,5 181,14,4 842,193,2 918,1013,4 880,587,3 359,174,3 765,64,4 632,147,1 839,80,4 921,251,2 118,654,5 882,1170,5 866,181,4 397,184,5 405,184,5 896,140,4 125,302,3 861,187,5 902,213,4 454,1166,4 795,1414,3 681,143,3 412,320,3 118,92,4 150,530,3 920,391,4 639,160,4 708,154,2 194,840,2 108,27,3 901,327,3 878,1046,2 880,619,2 892,596,4 585,762,4 183,196,4 69,0,4 582,654,5 877,97,4 870,288,3 317,422,5 935,1015,3 292,116,3 623,125,4 795,299,4 910,505,3 295,513,5 893,1280,3 582,99,5 690,691,5 891,825,2 851,825,3 603,566,5 646,195,4 393,175,5 627,983,5 732,275,5 938,282,5 838,105,2 876,172,4 695,898,3 902,11,5 371,263,4 505,198,4 869,698,3 893,330,4 888,187,5 644,745,4 822,865,2 434,945,2 607,508,1 920,1316,2 885,398,3 806,228,4 737,116,3 917,164,4 641,1036,2 883,922,3 901,203,3 526,432,4 882,413,3 905,123,4 915,747,2 756,471,3 920,762,3 737,268,2 922,306,4 462,1006,3 806,420,3 756,251,3 563,923,3 814,178,2 438,267,4 869,630,2 893,1149,4 783,331,4 824,1007,1 485,327,2 869,944,4 617,232,3 398,355,3 643,275,4 654,1089,3 491,477,2 852,333,3 714,366,3 566,522,3 781,1242,3 863,158,4 693,483,4 942,823,4 878,291,4 846,479,3 756,824,3 78,284,5 654,1247,3 315,222,4 928,126,5 341,152,4 884,734,3 630,322,2 645,258,3 797,754,3 793,454,4 787,741,3 703,505,4 413,432,5 891,179,5 886,490,2 487,126,4 318,349,3 429,122,2 772,731,3 648,126,5 796,326,2 587,715,5 40,155,4 900,738,5 806,719,4 418,0,4 784,495,4 534,299,3 931,190,4 896,195,3 851,120,4 362,229,2 794,20,3 598,1047,2 843,587,4 280,299,4 829,423,1 844,895,3 141,267,5 926,767,4 536,78,3 504,94,4 411,194,4 935,814,3 863,222,5 915,677,2 911,478,4 372,230,3 935,299,3 654,196,3 298,398,2 880,392,4 893,288,2 884,952,3 719,895,5 341,41,3 349,1038,4 15,942,3 611,99,4 320,941,3 654,446,4 918,749,3 806,397,3 278,230,2 939,628,3 305,256,4 881,195,4 450,872,5 635,271,5 895,927,3 723,905,1 801,200,4 628,731,5 832,272,3 91,168,5 915,548,3 331,49,5 869,652,4 895,481,3 270,42,3 503,154,3 804,728,3 773,180,3 536,602,4 711,1073,3 757,714,4 869,190,3 637,186,2 913,780,5 893,112,4 748,142,4 926,120,5 923,171,4 893,677,3 519,989,4 654,1135,2 721,327,5 405,366,4 756,81,4 915,197,4 222,27,4 731,293,3 881,289,4 268,267,5 870,650,2 398,1041,3 344,39,3 620,117,3 654,638,3 338,228,3 714,144,2 592,495,5 910,208,5 863,590,4 384,203,1 765,605,3 455,216,3 454,580,3 61,746,3 696,270,4 895,678,3 732,280,2 867,565,1 692,184,5 929,254,3 732,249,1 62,992,2 276,1011,3 795,78,5 725,293,5 822,734,4 765,171,3 939,381,3 893,1193,5 262,180,4 863,317,5 620,2,5 723,293,4 552,186,5 452,651,3 884,173,5 633,865,3 654,1548,2 343,257,3 700,18,5 393,293,4 37,161,5 891,232,5 696,251,1 915,379,2 912,345,3 591,812,4 942,99,5 91,417,3 679,516,4 844,307,4 591,734,5 757,921,5 617,94,3 74,221,5 923,317,5 353,886,4 933,532,3 5,7,4 880,104,3 921,714,3 886,464,5 930,292,4 649,202,3 591,286,3 772,10,2 896,117,5 898,228,2 850,819,3 523,57,4 313,1224,3 404,1428,1 577,750,3 748,940,5 785,173,4 135,256,3 441,163,2 574,602,5 683,72,4 896,495,5 339,195,4 935,345,4 772,22,5 928,283,2 726,809,2 902,663,4 221,684,4 896,280,4 910,153,4 441,440,3 275,635,4 772,719,1 915,745,3 902,345,3 692,610,4 861,249,5 346,712,3 681,345,2 56,256,5 939,203,4 781,320,2 217,656,5 788,740,5 494,207,5 909,97,4 663,182,3 827,905,3 300,754,4 681,234,1 935,897,1 931,527,5 787,28,3 679,23,4 624,96,4 825,78,4 396,528,4 641,925,5 931,356,5 813,668,3 726,143,4 939,11,4 802,258,2 933,549,4 544,187,2 881,411,1 907,493,3 926,927,4 920,677,5 457,968,4 619,419,3 942,201,2 122,1268,2 697,384,4 498,204,5 335,392,3 850,894,3 888,127,5 910,171,4 522,1471,5 293,292,4 785,450,2 715,418,5 780,55,3 748,65,3 469,276,4 847,511,5 867,432,4 446,951,4 270,314,4 893,108,1 665,492,5 905,8,4 893,312,4 781,1013,2 677,13,3 528,306,5 591,236,4 174,565,3 906,1050,5 830,1118,3 565,142,3 550,1050,4 9,285,4 604,929,2 478,337,1 415,469,4 757,508,5 925,320,3 159,8,3 772,567,1 941,98,5 625,269,2 904,344,4 657,200,3 636,1283,1 420,172,1 921,0,5 942,425,4 747,212,3 25,470,2 591,984,4 692,229,2 315,322,1 17,82,5 9,178,5 313,68,5 907,590,4 804,402,4 805,264,4 741,23,3 789,120,3 681,81,4 882,804,4 494,8,5 470,626,1 832,173,2 891,214,4 167,1027,2 698,1642,3 915,475,2 392,366,3 832,1148,4 859,99,4 787,509,5 842,494,3 185,76,5 933,152,5 388,578,1 327,55,4 910,141,4 653,750,3 895,146,2 726,1205,2 631,257,4 910,215,4 188,1153,3 806,509,5 6,606,3 595,288,3 536,380,3 884,93,2 494,1262,4 616,1315,1 780,49,5 873,99,4 827,169,3 794,1094,3 278,16,4 912,10,4 710,69,5 807,874,4 877,151,4 920,150,3 874,27,4 798,126,4 883,145,3 772,31,4 714,67,4 822,155,5 850,823,4 615,291,4 658,185,3 246,180,4 912,163,2 814,471,1 850,1058,3 248,1166,4 896,672,5 626,147,3 878,750,2 440,99,3 755,274,3 641,219,4 710,316,4 683,731,4 751,1526,1 726,939,2 560,1020,4 454,384,3 906,18,5 853,1010,2 716,239,2 449,755,3 531,1482,4 652,839,4 623,688,3 881,65,4 828,150,4 859,1058,1 155,76,2 858,457,3 631,209,5 850,409,4 623,254,3 404,312,4 879,208,3 587,369,5 850,844,3 684,881,3 861,60,5 842,214,2 425,670,4 876,240,4 895,1097,3 873,124,3 853,492,5 789,1117,3 487,287,2 852,689,2 660,651,2 747,196,3 502,474,2 911,473,3 704,525,3 932,126,5 888,410,2 837,567,4 846,577,3 878,124,5 435,272,4 863,168,5 828,1120,4 415,222,5 870,954,3 192,332,1 820,147,3 804,738,1 184,238,3 372,734,5 703,209,4 861,639,3 42,225,3 874,268,4 889,451,2 745,175,5 144,868,4 605,471,4 193,549,3 141,337,2 369,172,3 853,82,4 305,288,3 880,199,2 880,727,3 663,173,5 874,706,4 604,525,5 80,209,4 605,526,4 587,20,5 660,565,4 269,1006,5 560,691,1 707,818,3 180,298,1 712,271,4 800,342,4 797,583,3 620,107,3 850,22,4 885,771,1 862,271,5 185,404,3 746,191,5 893,59,5 935,23,4 867,746,2 767,254,4 72,245,3 700,126,4 895,734,3 867,1036,1 765,197,4 205,1232,1 804,431,5 119,117,2 806,207,4 668,482,3 6,85,4 431,312,4 888,745,4 915,398,3 737,215,3 794,918,4 881,464,3 727,285,3 850,675,3 451,76,3 41,410,4 861,134,5 806,658,4 554,1012,4 775,88,5 689,88,2 846,12,3 756,173,5 804,945,2 803,495,5 935,323,5 880,135,4 762,25,4 906,172,4 932,231,1 591,457,3 714,1015,4 629,471,3 573,1021,2 861,88,5 367,440,3 588,689,4 876,201,4 428,738,3 378,706,5 752,172,5 853,420,3 863,473,4 623,878,3 560,200,3 395,973,4 713,923,3 636,618,2 779,27,5 939,208,4 891,1117,3 654,943,3 778,124,4 428,650,4 144,10,5 325,1125,2 797,553,2 258,545,3 868,1162,2 441,238,3 795,35,1 895,1248,2 558,21,1 628,55,5 449,1111,3 216,683,5 621,230,4 689,52,2 631,1027,2 22,448,2 459,8,3 590,709,3 715,825,2 933,161,3 617,173,5 858,762,4 881,567,5 942,398,1 901,94,4 926,131,2 424,402,4 707,980,3 888,97,4 377,576,2 893,989,3 783,689,4 644,520,4 886,126,3 896,187,5 787,747,3 665,706,5 91,238,4 886,927,5 714,181,5 61,284,4 757,209,4 437,618,4 920,127,1 829,221,3 762,691,2 829,450,4 860,44,5 327,257,5 787,657,3 428,539,3 850,434,4 652,171,3 853,470,2 694,311,3 256,1009,4 639,300,2 590,84,3 757,349,4 487,131,3 937,927,5 140,293,4 775,509,5 816,293,4 888,492,3 885,62,3 300,182,3 388,86,5 853,356,4 801,199,4 436,201,5 795,549,3 7,357,2 786,358,3 362,788,4 710,581,5 144,925,3 864,546,5 812,749,4 94,678,2 37,1028,1 329,116,5 531,310,2 177,173,5 891,6,4 314,201,3 429,150,4 796,297,3 939,1166,4 333,289,3 436,214,3 741,126,5 706,481,3 921,28,3 496,448,2 560,1114,3 643,545,4 832,577,1 879,540,2 649,207,5 880,42,3 902,105,2 590,711,3 641,3,3 935,24,4 146,285,5 494,1206,5 464,274,4 873,312,3 697,227,3 915,179,5 909,306,2 164,201,4 33,897,5 937,120,5 326,874,4 496,387,4 804,678,4 847,22,2 651,878,3 454,78,4 895,61,2 37,1036,4 373,195,1 742,8,5 880,95,3 657,529,4 649,190,4 404,1100,3 834,199,4 311,180,4 882,123,5 785,125,4 113,199,3 746,6,4 760,741,2 17,961,4 921,379,4 622,257,4 759,194,4 907,49,4 910,1059,4 886,55,5 783,298,3 942,392,2 468,237,4 847,49,5 916,236,5 842,650,2 876,691,4 630,314,4 870,332,2 762,12,3 926,81,2 929,164,5 918,116,4 902,119,2 327,517,2 667,901,2 879,1118,3 931,442,4 846,69,3 554,251,5 893,69,3 21,173,5 18,293,3 396,13,3 794,94,4 896,520,5 915,824,1 180,1324,1 536,844,2 558,93,3 886,384,4 803,424,4 58,726,2 885,918,4 863,403,4 518,267,5 811,332,5 599,52,4 558,201,1 522,168,5 664,470,3 748,179,4 898,88,4 489,254,1 711,140,3 450,1037,1 939,69,3 698,205,3 892,259,2 822,1066,4 929,63,4 215,2,4 221,1249,1 880,575,3 915,146,1 404,708,1 839,653,4 416,237,4 550,264,4 560,206,3 758,117,5 544,541,2 750,485,3 915,651,4 803,929,3 924,97,4 715,493,5 721,822,3 926,774,3 884,49,3 278,221,1 471,99,5 880,767,3 550,671,1 537,173,4 805,552,3 518,750,4 789,426,4 912,179,3 471,215,4 9,478,5 751,259,3 449,58,4 591,684,2 58,504,4 795,1038,4 842,384,3 895,421,3 510,677,2 912,82,4 446,131,4 895,379,2 481,293,4 333,418,3 737,81,5 842,228,3 550,1117,5 61,275,5 419,285,4 649,600,3 863,126,4 423,257,2 853,1027,2 697,525,2 180,1241,1 560,628,3 895,654,4 902,426,5 625,878,1 846,481,2 663,723,3 654,1143,3 635,24,5 421,270,3 820,388,5 853,755,3 885,404,3 767,596,2 435,591,3 892,475,3 561,549,4 886,500,4 795,208,3 937,259,4 201,178,1 942,405,3 827,747,2 246,257,5 587,691,4 861,466,4 416,65,3 550,759,3 536,714,4 893,0,4 541,743,2 750,480,4 772,1169,3 93,1064,4 880,416,2 425,640,4 273,1162,2 803,161,2 726,473,3 795,1510,3 706,846,5 505,483,4 930,354,2 7,565,3 43,431,5 748,484,4 869,99,4 928,21,5 25,514,4 773,249,3 459,12,3 837,8,4 754,537,4 922,1011,5 550,210,5 596,824,5 176,41,4 644,91,3 929,136,2 928,55,4 870,180,3 781,1291,3 861,97,5 0,12,5 808,332,3 874,31,5 372,189,5 879,1133,5 456,159,4 915,236,3 152,321,3 903,761,2 937,322,3 621,197,4 621,430,5 500,275,4 890,147,5 931,384,2 932,143,4 869,123,4 59,227,4 746,46,5 587,285,4 560,175,4 654,470,3 877,496,2 896,1032,4 858,287,4 880,131,3 116,1058,3 714,628,2 909,8,4 554,300,4 697,497,4 652,481,2 449,68,4 405,1078,2 845,86,4 175,344,5 837,82,5 941,78,5 32,287,4 839,180,3 716,1010,4 867,754,4 455,1420,3 176,237,3 897,326,5 859,516,4 384,22,5 869,573,1 362,383,1 644,55,3 226,14,4 922,627,4 752,652,4 931,426,4 879,108,4 532,27,4 689,635,4 779,173,5 886,418,2 15,32,2 935,454,3 424,257,2 920,819,3 624,164,3 144,332,2 317,166,4 925,301,4 911,652,3 507,78,2 388,208,4 15,409,5 924,815,3 864,474,4 616,301,4 927,47,5 932,839,3 932,166,2 880,419,3 359,470,4 861,497,4 503,400,2 621,225,4 660,194,5 932,93,1 896,64,4 917,663,4 920,1027,4 688,357,4 842,167,3 221,229,4 902,762,5 739,270,2 157,293,1 65,299,5 906,1015,5 824,408,3 621,1227,1 871,545,4 342,1046,1 404,737,1 847,126,3 642,186,4 498,518,3 936,294,4 812,262,3 814,402,4 858,117,3 177,268,4 20,285,3 253,199,3 753,285,3 850,1013,3 689,641,3 866,68,2 815,263,4 770,171,4 635,759,5 706,166,2 795,777,4 771,327,5 626,22,4 765,496,3 846,88,2 895,23,4 776,134,3 836,1008,5 723,894,4 89,134,5 386,11,5 773,213,3 928,432,2 495,1090,1 144,1278,1 400,87,4 733,163,3 918,878,3 763,632,5 312,78,5 424,454,2 233,1009,2 789,182,4 918,874,1 536,339,4 791,14,4 709,285,4 418,68,4 620,81,5 747,407,5 498,1482,1 915,143,3 718,97,5 11,68,5 907,514,4 544,450,3 896,865,5 809,341,5 685,49,4 893,289,2 604,332,4 12,335,2 628,434,4 795,684,4 891,97,5 635,234,4 931,161,4 895,419,4 212,923,4 748,297,4 806,514,4 465,678,3 343,495,4 941,704,4 940,762,3 804,257,3 882,784,3 751,331,4 325,522,4 879,1013,4 845,214,5 687,878,5 879,1223,3 877,135,4 343,318,1 869,197,4 217,97,5 642,955,4 751,751,3 170,325,2 694,332,2 882,172,4 393,548,4 454,171,4 794,49,3 891,567,4 607,196,5 847,196,5 265,99,5 748,820,3 845,514,5 387,199,5 882,303,3 599,567,4 890,117,4 499,845,3 512,762,3 598,0,4 662,863,3 435,424,4 325,518,5 469,326,3 888,404,2 455,792,3 715,71,3 71,105,4 372,464,4 43,189,5 832,12,2 799,120,4 907,481,3 710,160,4 654,371,3 393,626,5 726,24,3 862,335,2 723,350,1 933,497,3 750,738,3 560,24,2 220,116,4 898,356,4 435,10,5 927,171,5 292,409,2 611,24,3 159,324,3 803,645,4 915,424,5 266,11,5 646,201,4 770,693,3 797,800,3 931,1410,4 871,1375,2 373,924,3 703,196,5 863,48,3 522,1035,4 828,19,3 508,180,4 12,812,1 765,967,4 876,97,5 726,678,5 649,641,3 341,513,5 777,34,1 292,237,4 457,514,4 942,1,5 613,1141,3 861,1008,4 637,225,5 763,6,4 271,133,5 917,528,3 886,142,5 94,461,4 839,636,3 880,519,5 882,549,3 893,959,5 935,220,4 886,114,5 641,392,5 879,555,3 548,322,2 151,659,5 832,75,2 200,507,4 832,196,3 665,68,3 909,0,4 729,150,4 931,164,4 825,650,4 292,819,2 867,81,2 756,144,3 590,109,2 641,190,4 822,142,4 915,187,3 681,1018,5 781,244,4 781,1253,3 591,687,1 660,179,5 720,875,3 322,155,5 93,996,4 922,128,5 249,201,4 453,601,2 794,202,3 815,270,4 115,126,5 491,698,3 192,346,4 781,1087,2 775,656,3 895,182,4 839,88,5 500,543,4 824,865,4 626,689,5 904,878,3 12,881,3 806,49,5 915,565,3 526,317,3 11,126,4 707,288,4 795,558,3 397,482,5 591,1141,5 392,8,4 787,134,3 327,714,2 626,88,5 906,70,5 893,92,4 373,822,1 885,116,2 373,180,3 825,209,5 909,24,3 507,185,3 900,209,4 926,373,4 658,78,4 897,271,4 919,287,3 765,529,4 327,166,3 590,602,5 812,269,5 914,751,3 833,286,2 832,297,5 931,477,4 935,409,3 895,67,3 891,128,3 182,49,2 937,1253,1 910,442,4 726,127,4 938,8,5 705,116,4 883,461,4 869,692,4 891,418,3 859,306,3 853,248,3 879,146,4 929,1047,2 895,401,4 871,929,3 889,161,4 935,5,5 706,506,5 829,133,3 669,1298,4 863,558,4 804,809,2 877,701,1 869,8,5 895,273,2 590,282,4 505,11,5 654,60,3 481,257,2 881,1115,4 898,683,3 781,1669,3 721,695,4 930,314,5 657,918,2 773,596,2 243,192,4 587,215,5 785,483,4 659,256,4 787,735,3 434,54,5 942,193,5 863,193,4 880,24,3 536,979,3 880,514,4 762,316,3 705,23,3 429,302,4 488,878,5 589,136,5 805,225,3 675,285,4 888,293,3 37,767,5 586,987,2 757,639,5 386,567,2 870,1429,3 715,488,4 660,470,4 828,0,4 921,50,4 94,630,4 649,257,3 843,221,3 718,401,4 747,3,4 248,843,5 642,356,5 679,150,5 449,365,3 710,305,5 424,49,5 780,171,5 902,1141,5 672,897,3 939,515,4 843,918,3 424,26,3 456,207,4 863,677,4 658,654,4 882,954,5 928,422,4 838,863,3 789,79,2 538,162,4 713,8,3 404,1528,1 758,23,3 722,177,3 826,937,3 329,81,4 91,596,2 710,199,4 939,301,4 806,392,4 496,925,2 591,517,5 803,641,3 237,236,3 732,296,3 845,212,3 923,426,4 559,21,2 704,376,4 897,314,5 658,1043,4 427,309,4 765,70,3 428,935,4 710,386,4 906,312,5 550,142,4 329,467,5 886,141,1 888,432,4 212,287,4 932,209,3 881,1051,2 392,314,5 421,322,3 850,331,1 428,176,4 939,650,4 730,493,3 540,622,3 495,205,4 867,113,5 805,230,3 903,201,2 939,268,4 655,299,2 795,7,5 877,514,4 621,724,3 765,1049,3 631,202,3 566,82,4 822,41,4 864,1239,5 891,183,4 223,728,3 272,895,4 937,507,4 445,299,3 839,755,4 715,167,5 803,181,4 780,133,5 895,127,4 478,646,5 932,664,1 901,325,3 869,381,3 797,733,3 526,133,5 884,98,4 882,1004,5 882,777,4 781,255,2 789,202,4 814,120,2 824,843,2 839,500,4 498,257,2 903,602,4 434,817,2 928,204,4 932,185,4 870,81,3 915,567,4 591,281,4 763,530,5 862,285,5 879,383,3 654,110,2 898,602,4 877,583,4 870,330,3 566,488,5 654,356,4 869,195,3 113,171,5 882,203,4 180,369,2 851,263,3 757,60,3 600,386,3 536,172,4 712,689,1 869,476,4 882,11,4 505,84,3 939,99,3 882,284,5 255,20,4 404,169,1 736,159,4 755,982,2 677,299,4 845,801,2 883,1017,2 748,545,3 854,474,4 632,653,3 621,171,5 553,76,4 850,175,4 124,762,3 120,314,4 885,692,4 571,812,4 504,81,4 434,234,4 922,712,5 623,281,4 404,1572,1 708,539,3 843,420,4 471,207,5 662,12,3 345,225,3 787,527,5 449,1517,4 26,1016,4 789,474,3 845,431,3 896,401,5 279,410,3 918,249,3 903,793,4 416,1410,3 24,176,3 794,404,1 850,688,3 714,232,3 891,486,5 221,222,4 752,186,3 867,1187,1 619,1090,4 902,78,4 664,930,3 808,314,5 757,418,4 560,924,3 861,558,4 910,152,5 532,43,4 938,279,5 867,316,5 748,138,4 626,275,2 933,212,4 662,1066,3 765,131,4 785,49,4 547,97,5 266,88,5 787,171,3 592,178,5 478,14,3 652,231,2 452,66,4 869,709,3 932,55,5 633,224,3 863,80,3 6,316,4 880,228,4 93,720,2 926,277,1 933,98,3 896,39,3 528,688,2 706,503,1 936,49,5 886,49,5 777,245,2 853,174,4 158,596,5 307,1117,4 772,250,3 867,549,4 253,134,5 216,824,3 483,160,4 642,233,4 628,199,4 449,404,4 493,126,5 750,98,4 737,196,4 828,257,3 662,947,4 787,661,4 275,468,4 523,1183,3 915,534,3 397,661,2 72,513,4 853,167,4 528,259,4 428,561,2 794,38,4 444,878,2 879,1216,3 703,99,4 881,98,5 161,27,4 429,297,3 200,44,2 905,285,5 73,299,3 861,677,4 180,286,2 888,82,4 523,417,1 62,293,2 225,22,3 143,182,4 918,333,4 916,247,4 394,287,2 824,24,4 882,429,5 879,1243,3 341,1010,3 429,296,4 669,418,4 832,644,3 71,769,4 755,221,2 737,929,3 342,734,5 451,70,3 781,1256,1 391,874,3 645,322,3 654,602,4 893,275,5 662,983,3 737,0,5 708,64,2 937,455,1 900,0,5 328,321,3 814,836,5 932,734,3 616,853,1 933,731,5 404,465,1 641,811,4 503,738,3 886,12,1 931,76,2 891,63,4 777,404,3 436,247,2 803,503,3 882,583,3 720,203,5 793,136,5 795,244,3 893,267,3 428,299,3 830,687,1 552,526,3 249,158,4 850,309,5 59,140,3 585,55,5 797,1502,3 243,1011,2 581,312,5 832,447,3 569,339,3 787,229,3 832,979,3 319,50,5 528,299,4 910,450,2 752,315,4 891,1090,2 504,647,4 37,412,1 649,638,3 848,117,5 702,925,4 792,221,3 177,222,4 521,179,5 570,180,4 852,879,5 935,234,3 921,575,4 795,215,5 748,225,4 845,777,4 233,842,2 863,163,4 292,72,2 313,567,5 822,502,5 845,496,5 505,454,3 866,190,5 629,567,4 536,498,3 251,123,5 920,399,4 881,410,3 296,658,4 709,99,4 266,958,3 377,81,4 876,51,4 885,179,5 485,280,3 931,1115,4 74,865,2 576,21,5 398,918,2 879,400,3 891,237,4 659,522,3 912,99,3 84,662,5 545,891,4 619,587,5 456,251,4 931,446,3 377,203,4 917,71,1 714,712,4 628,325,3 907,78,4 243,55,5 298,509,5 415,925,2 607,233,5 804,644,5 918,303,4 664,236,3 760,281,4 587,150,4 774,314,5 313,1047,4 689,126,4 292,65,2 745,173,5 885,57,4 442,947,1 616,199,5 233,767,2 449,414,3 536,952,3 902,239,4 786,690,4 492,958,2 233,698,3 782,293,3 665,49,3 876,462,4 674,222,1 229,9,3 523,492,4 859,286,3 665,99,4 449,231,4 718,293,2 294,940,4 647,285,1 867,206,3 415,1135,4 415,623,3 428,582,3 757,42,3 931,469,3 747,78,4 467,142,5 447,287,1 458,1013,1 268,672,4 716,1050,3 815,677,4 42,142,4 871,293,3 845,98,4 692,227,2 669,160,2 327,480,3 469,741,4 915,57,5 755,87,1 862,347,2 373,731,4 660,297,3 404,1089,1 348,458,4 534,134,3 678,110,3 876,154,2 295,658,5 550,32,5 789,167,4 842,196,2 832,466,2 938,679,2 158,14,5 832,229,1 750,737,4 895,50,2 326,249,2 302,1208,2 593,275,3 789,282,2 245,174,4 932,410,2 642,171,5 590,510,3 200,32,4 888,146,3 591,590,4 5,236,2 424,683,2 654,1447,3 267,448,2 40,513,4 129,202,4 434,9,5 6,163,5 756,1013,3 879,730,4 89,490,4 535,94,5 444,1377,2 361,677,2 694,301,4 406,188,4 888,123,4 537,57,4 620,61,4 857,753,4 663,524,4 845,211,5 478,199,5 909,123,3 824,1050,4 590,3,4 886,470,3 746,1040,4 329,626,5 935,243,4 285,182,4 858,409,4 421,233,4 871,716,4 678,750,5 834,513,3 940,297,5 377,727,3 13,523,5 681,47,4 747,299,4 876,530,5 634,741,3 566,302,3 726,146,3 462,285,4 381,251,2 845,180,5 845,270,5 173,97,5 428,10,4 920,761,2 663,316,3 693,240,3 891,759,3 869,712,4 814,658,5 538,214,4 939,708,5 917,1638,5 829,398,5 434,78,4 756,187,3 585,550,2 513,267,4 726,365,3 917,1194,4 850,596,4 896,289,4 434,154,3 266,180,5 221,464,2 295,461,4 542,46,3 806,483,4 789,567,3 918,147,3 773,673,2 798,330,4 223,525,4 91,107,2 454,743,3 720,236,3 891,524,5 587,49,5 880,579,5 665,128,4 587,402,3 200,736,2 581,472,3 918,81,5 404,620,1 884,187,3 605,1010,3 670,653,3 915,116,2 850,1050,2 617,1162,2 740,215,4 877,484,3 845,181,5 704,116,5 785,187,5 889,662,4 584,285,4 656,507,4 317,940,4 884,392,3 238,178,5 744,301,4 802,263,2 614,13,5 888,238,4 879,95,4 730,201,5 396,590,4 473,125,4 883,637,4 882,337,4 848,206,5 647,150,2 471,602,5 587,719,4 766,647,4 863,624,4 938,590,5 412,99,4 884,110,4 397,282,3 269,117,3 915,673,3 885,264,4 863,562,3 783,259,4 434,952,3 885,14,3 598,865,2 885,658,4 775,678,4 199,464,4 675,482,4 681,1227,1 896,120,5 879,760,4 42,730,4 448,212,3 642,81,3 528,689,3 895,1044,3 934,281,4 591,98,5 767,273,3 803,950,3 288,108,3 654,1310,3 793,272,4 696,262,1 814,189,5 888,918,5 867,546,3 794,0,4 659,181,2 502,184,5 726,398,3 379,1100,4 344,209,4 188,184,5 891,61,4 861,482,5 496,41,4 715,233,5 827,287,3 879,23,3 830,270,2 757,615,4 795,42,4 941,264,5 658,1020,5 931,599,2 885,91,3 832,46,5 490,492,4 114,99,5 885,958,3 633,844,3 587,233,5 120,10,2 882,862,3 79,886,4 200,704,3 794,9,4 942,1043,3 792,2,4 544,683,4 310,780,2 888,257,4 664,120,2 932,165,3 632,194,4 346,259,1 909,117,3 415,329,3 753,1196,3 746,643,5 12,777,3 756,448,3 534,503,3 937,596,3 633,244,3 709,419,4 392,585,3 180,1340,1 208,292,4 616,670,4 879,587,4 740,177,5 311,483,5 842,0,3 654,92,3 117,233,5 896,234,3 531,97,5 933,1410,4 813,666,2 293,254,3 781,879,4 762,1179,2 487,133,2 523,428,2 706,461,4 894,293,4 144,248,4 817,257,4 900,422,4 852,339,1 757,432,5 757,446,4 179,180,2 540,708,5 653,222,4 859,711,3 879,247,4 486,177,5 829,48,5 886,377,5 45,261,5 841,339,5 917,167,3 879,233,5 539,146,3 654,1387,3 822,51,3 238,1064,5 814,1132,3 647,526,4 591,1165,3 527,249,3 863,231,4 840,1293,5 25,935,4 795,63,4 886,1012,4 144,217,3 900,150,3 917,195,3 726,1184,1 757,292,3 941,309,4 600,194,3 43,180,4 566,55,4 888,384,3 325,130,2 931,486,3 733,820,2 884,419,4 742,339,3 653,11,5 867,428,2 773,565,2 531,839,4 298,48,4 533,281,5 512,120,5 787,61,3 715,707,4 939,257,5 37,626,5 826,331,3 595,49,5 681,720,4 532,450,2 893,874,3 382,527,4 681,179,3 536,1010,3 891,417,4 264,272,5 893,960,4 224,477,5 641,927,5 937,287,5 400,481,4 804,190,4 401,407,5 814,132,5 832,377,3 513,86,5 558,520,2 773,742,1 560,708,3 323,507,5 879,929,2 915,124,3 408,78,4 789,257,3 937,224,4 692,565,2 918,314,3 775,602,4 795,1031,3 803,412,4 505,195,4 923,199,4 879,80,4 724,747,4 374,1045,2 835,899,2 845,380,4 933,162,4 918,27,4 898,178,2 794,71,3 652,1135,2 578,522,3 916,750,2 887,110,4 404,664,1 869,689,2 633,695,4 179,654,5 757,65,3 756,930,2 845,637,4 536,590,3 382,123,4 746,516,5 870,181,5 242,76,3 685,21,5 921,257,4 645,879,3 47,192,2 531,863,4 652,619,3 532,507,4 607,513,5 921,66,3 854,509,4 110,303,4 718,299,2 634,332,5 893,535,5 405,209,5 545,664,2 902,683,3 864,472,3 633,1161,1 568,332,3 63,495,5 893,735,4 150,172,5 547,292,4 306,268,4 891,193,4 279,317,5 73,357,2 779,3,3 888,202,2 727,322,3 863,257,5 861,195,5 32,299,4 877,922,3 937,1060,4 889,193,5 746,173,5 446,761,3 641,431,2 582,194,4 668,194,2 750,203,4 94,647,3 865,895,2 850,817,2 804,596,3 846,418,3 822,230,3 235,520,3 822,127,2 477,203,4 496,927,3 607,339,4 541,21,3 893,1004,5 900,635,2 810,987,4 677,276,3 150,27,4 676,739,1 742,407,4 531,1239,2 915,167,4 889,483,3 757,95,5 876,689,4 798,320,4 671,180,3 638,948,3 880,408,4 765,374,2 189,23,3 935,2,4 538,55,2 765,608,3 756,163,3 295,695,4 397,96,4 931,204,5 934,8,1 746,652,5 891,658,4 180,543,1 877,63,5 902,153,4 890,923,5 390,97,4 392,651,3 120,457,1 882,274,4 920,1050,3 193,198,4 897,311,2 63,346,3 647,575,4 775,443,2 876,301,2 806,997,3 397,134,3 470,1218,4 823,258,4 737,224,3 428,795,3 101,0,3 406,218,4 497,336,4 342,1116,3 773,529,5 99,750,4 818,176,4 786,312,5 827,474,4 920,1286,1 777,215,3 879,1016,3 682,353,3 929,105,4 12,514,2 464,835,3 768,1092,3 745,719,3 879,469,4 465,240,5 867,326,4 881,198,5 326,182,3 863,234,5 912,595,1 795,485,5 522,1120,5 362,300,3 769,267,5 906,49,4 542,795,3 377,659,4 871,289,2 839,174,4 99,270,3 859,320,3 744,8,4 645,1312,3 942,30,4 902,86,4 786,285,3 88,172,5 931,194,4 696,8,4 405,293,3 915,970,4 423,299,2 503,167,5 147,172,5 12,8,3 902,59,4 522,530,5 681,26,3 641,207,5 680,291,4 910,373,1 428,79,3 935,120,4 937,1151,3 879,309,3 649,256,3 888,650,4 915,568,2 915,96,4 274,678,3 233,446,3 918,293,3 560,927,2 915,1100,4 93,225,2 806,542,2 683,82,5 853,410,2 668,110,4 911,660,2 918,270,4 311,428,5 926,10,5 407,750,4 847,152,5 894,741,4 882,12,4 941,749,4 921,203,3 260,595,2 871,283,3 157,69,4 900,1046,3 617,482,5 893,276,4 920,193,3 933,419,4 898,180,3 317,1029,2 893,211,5 842,142,2 895,178,2 140,1039,3 745,67,4 581,293,1 832,516,2 631,467,3 692,653,3 804,140,2 940,407,5 562,219,4 773,96,2 795,386,3 312,558,3 863,664,3 880,541,1 156,1015,5 737,210,3 900,450,4 912,530,2 834,484,5 592,392,4 98,328,4 847,483,5 912,473,5 880,185,3 455,108,3 213,517,3 662,1058,2 932,49,4 292,711,2 893,124,3 783,345,4 916,695,5 595,285,4 881,819,3 641,659,3 893,18,4 926,534,3 835,522,5 597,299,4 830,749,4 568,545,3 505,691,4 682,314,4 753,339,2 540,94,4 904,300,4 721,865,4 893,197,4 880,581,1 737,108,4 255,828,4 408,82,3 473,8,5 781,889,1 936,18,1 628,698,3 373,87,3 932,577,1 795,341,5 882,9,5 915,192,4 839,614,5 891,483,5 741,283,3 797,693,3 762,514,4 207,780,3 454,257,5 78,762,5 931,147,2 775,523,5 654,136,4 881,150,5 879,1184,1 377,1167,3 931,188,5 895,28,2 174,97,5 772,540,1 842,169,1 455,13,5 932,183,1 893,189,5 513,221,4 931,529,4 755,65,4 590,193,4 917,854,5 824,596,5 773,1214,1 891,69,4 847,527,3 917,152,1 587,99,1 575,6,5 839,608,4 937,147,3 640,557,5 839,854,4 893,302,4 614,665,2 814,167,3 639,174,5 881,120,4 302,258,3 205,325,1 906,24,5 644,47,4 858,1007,4 296,1135,3 864,743,4 693,71,4 893,977,3 302,80,4 266,1034,4 915,78,3 711,945,4 624,196,5 6,308,3 937,6,4 567,429,3 642,57,4 794,16,2 342,714,5 770,163,2 819,285,4 756,0,4 881,257,3 833,147,4 787,398,3 654,297,4 931,133,4 641,39,4 726,1249,1 891,68,5 23,698,3 847,133,5 913,1258,1 834,134,5 746,128,5 732,115,4 333,1010,4 676,907,4 502,7,5 929,522,2 909,253,1 912,418,5 889,178,5 21,997,1 751,994,4 776,691,5 789,581,3 845,520,3 587,110,1 789,274,4 560,587,2 145,341,1 879,380,4 715,384,1 757,11,5 932,626,2 550,726,5 715,630,5 668,653,5 499,81,4 426,333,5 837,68,4 929,0,3 457,1334,1 871,681,3 714,1187,2 915,249,4 709,172,3 312,234,3 846,1030,2 289,63,4 846,819,1 928,134,5 275,292,4 794,67,3 915,540,2 932,37,2 220,422,2 879,126,5 933,427,4 370,203,5 642,201,3 458,274,4 934,596,4 659,297,2 341,323,1 880,494,5 885,131,3 421,6,3 922,292,4 833,299,3 206,1169,2 217,430,3 707,1048,2 93,689,4 560,384,2 654,888,3 287,99,5 833,180,5 531,659,4 496,1040,3 682,913,2 931,615,5 882,384,1 463,49,4 915,160,3 926,110,4 915,386,4 616,191,5 707,675,3 487,30,4 434,168,5 58,142,1 289,356,3 840,324,3 180,1351,1 662,318,1 737,527,4 111,314,5 880,1227,3 644,45,5 934,299,4 669,482,5 25,281,4 825,81,3 935,1334,4 434,626,3 930,125,4 804,168,4 869,196,5 148,332,1 410,193,5 886,304,5 895,636,2 663,133,5 88,404,3 591,557,5 670,525,2 559,274,4 942,230,2 215,832,2 604,933,4 929,115,5 829,87,4 910,464,5 842,73,2 879,1187,2 864,684,3 444,270,1 803,447,3 792,105,3 726,97,4 536,517,4 909,173,5 885,565,3 275,1272,2 641,1057,3 837,254,4 710,726,4 864,107,1 802,537,4 835,323,4 654,1117,3 94,49,5 560,650,3 647,219,3 845,62,3 867,377,2 840,257,5 649,289,2 888,2,4 101,683,2 481,312,5 891,158,4 842,195,2 936,302,4 654,278,3 933,629,4 845,81,2 541,248,4 101,1075,2 804,356,5 619,150,4 861,545,4 786,350,3 638,13,5 857,8,5 778,293,5 193,711,3 795,654,3 824,839,4 726,251,2 619,757,2 377,50,3 823,242,1 884,364,3 776,651,5 44,23,3 747,117,2 797,180,5 420,508,2 682,126,4 933,711,4 770,172,4 808,321,3 816,8,3 405,461,5 750,654,3 931,492,5 893,29,4 529,469,3 915,256,3 931,227,4 795,194,5 893,717,3 238,527,5 931,519,4 40,49,5 328,533,3 641,171,5 58,506,4 708,294,3 388,366,4 435,426,3 904,750,3 886,454,5 692,545,1 939,182,3 550,16,5 910,172,5 249,468,4 604,186,5 862,894,5 459,244,3 880,193,3 154,321,2 797,1445,4 689,68,5 649,519,4 937,258,2 723,322,2 607,41,5 893,269,3 513,198,3 647,741,5 544,49,5 904,1050,2 576,293,4 932,38,3 295,1250,5 616,16,1 293,20,3 652,1034,2 654,195,3 918,879,3 641,1031,4 21,730,3 179,234,4 863,392,3 933,448,4 783,301,5 781,751,4 756,117,3 869,426,4 428,527,4 896,24,2 532,626,2 150,663,5 845,462,5 909,596,3 832,818,1 377,955,3 434,584,3 467,54,5 929,13,4 491,473,5 89,515,5 285,1132,4 644,287,3 12,217,1 839,462,5 803,927,4 863,57,5 275,948,3 867,1239,5 428,70,3 805,69,2 108,49,5 931,211,4 804,548,3 733,704,4 837,486,4 267,839,2 845,96,4 618,292,3 501,306,4 897,327,2 542,143,4 813,446,3 804,81,3 806,1273,3 658,558,1 306,120,1 523,59,5 415,302,4 298,353,4 144,543,4 932,163,2 327,609,3 329,383,2 552,482,5 454,735,3 871,974,4 266,264,5 757,318,4 503,99,5 535,204,5 554,12,5 933,88,5 617,213,2 656,6,3 895,951,4 797,827,4 90,78,5 755,150,4 933,169,4 223,146,3 366,216,5 869,87,2 561,147,5 863,508,5 882,495,2 621,426,4 902,272,3 362,1012,3 644,134,5 478,204,3 721,116,4 773,517,1 649,144,3 917,380,5 895,128,4 861,978,5 836,276,2 143,211,5 536,18,4 392,768,4 328,422,4 577,299,4 404,549,2 893,123,5 882,236,3 324,171,4 178,346,3 654,558,2 895,163,4 477,432,3 896,124,4 915,366,3 221,596,1 270,734,4 551,110,3 576,215,4 397,236,3 879,150,4 928,27,4 25,1012,1 451,479,5 322,272,4 895,42,3 757,76,3 750,596,2 471,234,5 932,833,1 787,827,3 888,164,3 880,747,3 879,62,3 885,213,3 590,953,3 876,221,2 879,635,3 722,149,3 523,646,3 331,471,3 886,408,4 867,54,5 285,401,3 931,95,4 925,271,5 649,175,4 846,602,3 278,944,5 737,99,2 746,171,5 392,818,3 486,1219,4 794,582,4 816,287,4 942,203,3 654,1627,2 869,578,2 585,231,3 829,509,4 262,140,5 647,363,5 869,312,4 539,761,4 697,213,1 721,147,3 915,368,2 652,392,2 746,1245,1 773,210,3 773,61,2 536,791,3 803,166,3 804,337,1 882,264,3 800,357,4 644,193,4 772,323,3 471,94,3 602,473,4 807,263,5 921,833,1 870,1433,3 941,327,3 536,977,2 869,691,2 942,131,3 161,293,3 84,161,2 542,561,2 600,405,2 538,300,5 827,461,3 918,422,5 489,288,1 483,88,4 429,63,4 542,431,4 726,1614,1 576,940,4 581,987,1 845,736,4 306,707,4 917,81,3 617,185,4 918,286,4 942,55,5 704,422,2 406,151,4 885,63,5 837,120,2 918,526,4 404,173,5 278,241,3 931,509,4 783,267,3 917,629,3 616,163,1 531,142,4 915,461,4 806,317,5 934,545,4 869,267,3 628,880,3 714,49,5 233,1002,2 746,168,5 553,275,3 859,331,2 883,920,5 839,10,3 188,296,3 453,731,4 847,175,4 803,249,4 306,830,1 703,381,4 888,513,1 806,448,5 830,507,3 822,126,5 740,171,5 480,143,4 319,596,3 317,450,4 532,69,4 770,127,2 880,595,3 882,395,2 814,203,4 895,653,3 746,16,4 649,613,3 101,315,3 869,1220,3 591,788,4 849,317,5 847,646,5 581,116,3 882,529,3 879,1048,3 866,256,4 771,293,4 893,346,4 757,385,3 906,78,5 12,869,3 840,750,3 644,38,3 582,601,4 567,481,4 853,196,4 861,632,5 895,1027,2 292,431,5 504,754,3 616,440,3 703,493,5 882,988,5 935,716,2 806,94,4 302,115,5 909,209,4 641,793,4 763,938,4 845,47,5 448,85,4 757,195,4 193,653,2 889,175,4 706,653,4 775,6,4 805,689,2 930,844,3 672,339,5 850,348,3 937,257,5 833,12,2 879,1046,3 889,186,5 918,952,3 368,195,5 853,120,1 652,1086,2 533,275,5 462,984,1 658,481,4 935,357,4 398,1059,3 861,968,5 462,935,2 559,196,4 896,119,3 697,418,3 599,758,2 255,933,3 641,63,5 726,615,2 345,194,5 241,274,5 497,1403,3 576,120,5 155,99,4 906,474,3 861,131,5 494,134,3 478,61,3 478,249,4 415,543,2 704,229,4 893,590,4 762,78,5 873,19,3 660,30,3 880,711,3 692,401,3 841,873,5 890,594,3 732,1116,2 921,79,3 772,173,3 888,508,2 873,126,5 698,201,3 428,1111,3 659,86,2 866,299,2 876,13,5 932,434,4 496,1614,3 626,692,2 825,510,3 280,258,3 847,565,4 820,70,5 898,194,4 175,272,4 555,172,3 875,21,4 898,0,3 899,116,2 657,922,3 939,163,2 617,54,2 513,567,4 740,185,5 806,747,4 888,275,4 286,741,3 747,426,4 929,410,1 64,193,4 775,859,3 907,6,3 421,440,4 311,196,4 59,418,3 880,13,1 891,317,5 437,299,4 915,649,4 386,664,2 520,230,2 610,749,5 879,341,3 931,490,5 845,660,4 526,422,3 869,605,4 757,901,4 576,818,3 901,256,3 885,549,4 74,128,3 0,27,4 881,406,2 866,854,5 879,1011,4 826,311,2 933,964,4 163,247,4 846,927,3 899,204,4 631,21,4 915,91,5 926,24,3 890,404,3 720,681,3 699,650,4 193,574,1 879,225,4 941,1220,4 767,236,4 641,939,2 705,236,4 902,174,4 942,624,3 58,52,5 658,169,3 531,98,5 785,418,4 116,163,5 24,81,4 871,627,4 624,651,4 295,186,5 784,268,5 453,510,3 690,321,3 917,196,2 795,242,3 177,172,5 381,513,3 599,81,5 591,747,2 940,299,4 642,407,4 654,914,4 804,418,4 794,746,3 706,133,4 888,58,4 710,377,4 534,133,5 877,739,2 917,964,4 797,1283,3 762,142,3 888,762,4 847,944,5 781,347,4 860,69,4 926,194,4 710,728,3 753,306,3 617,712,4 645,351,1 326,951,2 729,0,4 434,100,3 942,1329,3 846,506,3 404,640,1 853,10,5 408,662,4 120,0,4 290,292,5 871,272,3 881,6,4 463,11,5 886,367,5 307,582,4 623,594,3 908,338,4 388,411,3 885,94,5 33,898,5 864,94,1 495,1040,1 401,99,5 500,272,4 507,214,3 124,81,5 131,1018,3 795,476,2 681,171,5 203,8,5 536,500,3 99,897,4 266,575,3 531,753,4 116,270,4 907,123,3 918,274,5 850,479,5 895,472,2 842,520,2 890,125,5 886,1083,5 869,489,3 690,204,5 879,466,4 499,1314,4 455,482,4 835,259,2 665,565,3 531,452,4 93,1006,4 48,207,4 785,702,3 885,1013,5 861,184,5 664,247,4 652,187,5 705,124,5 278,1273,3 382,603,5 446,473,3 544,385,2 560,530,1 654,1635,4 813,634,2 560,356,3 2,339,5 714,691,3 795,124,4 941,519,5 886,404,5 942,568,2 55,1073,3 441,737,3 620,385,3 916,762,3 439,244,4 229,626,5 931,704,4 708,596,4 523,116,3 746,648,3 883,13,4 547,126,5 552,169,4 932,596,1 685,602,5 434,595,4 746,275,5 37,677,5 886,24,2 657,170,4 757,1046,3 879,731,4 868,24,2 859,893,2 698,116,4 839,90,5 933,198,4 906,290,5 100,594,2 885,41,5 450,747,4 813,200,2 605,179,4 895,709,4 933,1064,2 803,117,4 770,495,5 839,171,3 704,392,4 704,575,4 942,545,4 250,184,5 921,14,4 787,401,3 20,857,1 833,885,4 888,1238,1 846,65,3 931,487,5 915,712,3 706,8,5 861,1049,5 494,576,1 400,202,4 290,738,3 926,55,4 649,70,3 931,889,1 573,304,3 257,299,5 436,557,3 909,180,1 926,476,3 357,220,5 245,476,4 649,558,3 109,568,4 912,49,4 845,575,4 231,43,4 923,510,5 567,523,2 726,370,2 839,156,4 915,581,4 243,382,3 745,185,4 428,461,4 527,78,5 333,402,4 48,224,2 48,255,4 744,126,2 877,1099,3 265,324,1 926,411,1 410,201,4 640,202,4 845,522,4 920,537,4 911,516,4 704,282,5 410,173,4 891,26,4 535,548,3 866,187,4 757,288,2 935,18,5 915,474,4 742,241,4 933,482,3 25,327,2 841,257,3 590,7,3 762,189,4 247,186,3 885,761,5 882,516,4 663,317,5 876,60,5 523,173,4 333,213,3 371,1211,4 842,356,2 787,21,5 891,226,4 797,943,4 633,1010,4 431,474,4 789,720,3 765,738,2 160,117,2 933,810,4 503,621,4 801,193,4 726,747,4 803,155,4 630,876,2 692,1089,4 584,13,4 540,825,3 845,767,4 898,202,4 109,66,3 921,449,4 681,1152,3 942,384,4 909,830,1 811,331,4 931,611,5 25,1009,2 76,190,3 845,1248,3 194,108,3 652,264,4 895,1680,3 888,41,5 698,1008,4 469,123,3 939,190,4 704,819,3 706,241,4 891,27,4 275,540,3 654,1637,3 144,1010,5 921,390,3 327,565,5 938,256,5 892,287,3 898,567,4 762,818,2 782,894,4 559,1015,3 274,227,4 918,446,4 877,662,5 707,1027,2 236,63,5 814,442,3 917,131,4 772,152,5 843,404,2 689,153,3 664,534,4 605,62,3 483,171,5 402,49,5 930,507,4 696,290,5 592,162,4 373,192,4 773,87,1 841,348,3 341,235,3 787,174,3 932,68,4 926,760,3 406,497,4 415,602,5 10,106,4 616,514,3 83,590,4 794,225,3 931,526,4 853,1283,2 915,232,3 907,171,3 825,189,3 748,161,3 905,990,3 839,207,4 926,70,5 879,256,5 227,274,3 931,46,4 524,180,4 707,1039,2 629,194,4 93,354,2 487,320,3 591,680,1 722,49,4 595,221,3 825,567,4 751,677,3 193,398,2 937,249,3 560,205,3 685,167,5 935,256,3 807,747,4 496,248,5 56,716,4 58,1119,1 787,622,3 803,244,4 644,180,4 913,215,3 541,47,5 652,446,2 879,569,3 906,505,5 879,656,4 652,237,1 730,214,5 837,256,5 907,557,4 795,510,4 803,522,5 722,432,3 619,698,5 179,355,3 654,715,2 806,416,3 889,520,5 409,351,3 598,219,5 623,869,4 752,268,5 877,548,4 525,271,5 896,178,3 770,94,4 898,146,2 893,311,3 622,69,4 776,211,5 576,451,3 357,1158,5 278,575,3 105,81,3 926,1414,4 832,1069,5 927,267,5 769,409,4 845,420,4 434,558,3 449,503,5 8,384,5 812,892,3 415,1228,2 834,186,4 730,236,4 932,203,3 654,1136,3 794,209,4 873,520,5 462,689,4 449,62,4 845,1219,2 710,904,3 69,448,2 861,44,4 692,27,2 656,326,1 863,224,3 729,1011,5 599,78,4 638,834,4 428,662,4 845,727,4 915,582,4 709,276,4 910,227,4 704,172,2 771,750,3 492,169,3 804,144,2 833,514,5 934,312,5 933,81,4 888,694,3 0,171,5 326,65,3 352,312,5 923,152,4 882,58,5 478,237,4 893,1402,3 870,904,3 678,172,5 845,67,3 761,110,2 866,677,3 643,124,4 702,327,3 225,91,2 416,101,3 900,221,4 406,70,3 392,209,4 845,384,5 659,678,2 839,649,4 842,595,3 935,1067,4 895,965,4 151,450,5 888,163,4 825,624,3 415,1285,5 912,130,5 765,442,3 896,1530,4 400,633,1 880,204,4 932,153,2 884,171,3 406,407,4 544,1090,3 737,153,3 882,250,5 467,160,3 930,180,4 882,921,5 486,788,4 914,749,4 505,171,5 668,63,4 614,682,1 846,454,2 942,226,1 233,189,3 436,318,5 803,430,4 928,203,4 535,116,4 536,283,3 905,1010,4 900,825,2 800,680,1 886,567,2 642,435,4 941,688,3 633,759,3 500,92,4 751,1242,4 853,88,4 763,317,5 732,1131,4 804,416,2 860,9,3 650,268,5 924,287,5 704,14,3 715,366,4 415,1167,4 129,238,4 921,61,3 285,855,2 846,134,4 887,201,4 625,291,1 804,154,1 880,171,4 933,702,4 899,135,2 300,216,3 726,450,5 824,931,3 892,322,2 876,370,5 818,532,4 673,291,4 814,312,5 22,386,3 177,338,3 891,404,4 355,287,4 860,115,4 462,1215,3 726,537,3 863,379,3 757,238,3 884,178,1 121,1073,4 560,68,1 939,356,4 638,508,3 822,101,4 468,509,4 392,865,3 263,516,5 408,528,5 880,226,4 690,78,5 590,855,4 398,181,4 518,681,1 39,301,3 917,198,3 46,1021,3 932,61,1 713,117,5 560,513,4 833,1016,2 846,227,4 12,134,5 711,552,5 23,96,4 129,292,5 235,519,4 298,88,5 891,212,3 891,446,3 915,120,3 435,233,3 922,324,4 654,1283,2 879,61,3 907,495,5 888,1193,4 397,85,3 922,256,5 150,609,5 105,317,5 832,1426,3 748,565,3 934,1047,3 847,431,2 652,143,3 697,606,2 664,264,3 17,132,5 221,1290,2 921,121,2 229,548,5 710,48,4 870,146,5 918,534,3 910,513,3 814,731,5 929,273,4 941,209,4 620,7,5 674,234,1 650,301,5 416,495,3 795,146,5 932,41,1 398,99,3 891,522,5 885,832,5 837,253,3 71,22,4 863,23,5 494,447,5 915,959,4 623,300,3 885,691,3 879,38,4 921,67,4 941,479,5 713,322,4 646,135,5 765,464,3 585,78,4 249,80,4 206,1022,3 583,540,3 663,791,4 424,3,4 906,1027,5 609,134,3 805,1011,4 311,510,5 829,430,3 882,793,4 473,491,4 905,236,4 708,514,4 834,224,2 434,176,5 896,825,4 832,930,4 905,239,3 887,236,5 547,0,4 692,635,1 668,195,3 858,1325,4 532,1085,3 935,0,4 403,875,2 776,287,4 744,935,1 150,201,5 803,526,4 893,1254,4 867,519,4 883,514,4 874,179,5 918,124,4 248,128,5 557,284,5 794,925,2 84,714,4 537,49,5 862,342,5 840,301,5 803,443,4 455,1550,3 896,287,5 923,312,4 902,233,4 474,302,1 881,581,5 915,210,4 746,554,2 935,1159,5 424,299,2 850,1022,3 620,750,4 781,1666,3 934,254,4 879,328,4 851,108,3 795,529,3 877,44,3 369,13,3 895,460,3 822,407,5 704,160,5 879,327,4 327,37,3 434,741,4 681,287,4 842,221,3 900,203,5 869,88,3 22,175,3 841,301,5 786,748,4 935,1198,4 815,299,4 748,418,5 822,650,5 888,473,4 931,388,3 853,221,4 845,786,4 889,185,2 248,122,3 362,259,2 298,82,5 845,613,5 543,326,2 915,68,4 327,280,4 380,482,5 832,356,4 863,76,4 544,448,2 824,1012,2 535,72,4 832,446,5 869,222,4 715,603,3 715,192,5 886,288,5 86,55,4 839,68,4 591,349,4 266,1,3 108,384,4 496,176,4 935,755,4 802,259,3 876,738,4 942,467,2 885,511,1 487,299,4 748,968,4 275,1009,3 829,567,4 314,1083,4 847,1062,5 300,654,1 704,180,5 911,63,4 681,526,3 388,674,3 125,880,5 692,134,4 893,462,4 898,7,4 540,172,5 921,10,5 344,814,3 930,305,4 660,120,2 906,475,4 915,208,3 649,158,3 265,275,3 253,404,3 915,1069,4 85,258,4 799,236,4 870,10,3 781,292,2 726,384,3 794,221,3 456,201,4 703,888,3 406,120,4 879,1043,4 903,1151,4 889,656,5 714,182,3 845,580,4 256,1136,5 919,267,3 895,773,3 337,168,5 377,1146,4 48,903,2 757,575,4 882,1287,4 888,321,3 910,196,4 649,161,3 906,150,4 545,566,4 867,678,3 145,285,3 641,57,3 501,265,3 900,116,4 404,384,1 605,950,2 879,991,4 317,628,4 915,692,3 633,283,4 869,6,4 532,297,4 822,135,5 931,754,2 676,350,2 248,657,4 895,306,3 606,481,5 708,378,3 715,429,5 933,971,3 888,481,4 710,253,2 430,302,4 418,196,5 594,546,4 879,121,3 880,258,3 645,894,3 587,1097,4 902,237,5 896,367,1 804,401,2 333,13,3 730,719,3 422,9,4 65,256,3 220,287,3 884,203,4 640,197,5 942,233,3 815,322,4 565,387,3 135,257,5 757,138,4 485,531,4 658,186,5 853,274,4 641,419,4 312,489,4 900,408,3 313,475,5 737,151,4 871,870,3 879,183,4 847,1064,2 871,353,4 715,504,4 43,200,2 43,30,4 346,1243,3 874,210,5 756,1072,4 824,289,4 806,601,5 623,325,3 715,450,4 942,71,2 832,576,1 459,282,3 795,659,5 708,232,3 502,683,4 649,488,3 416,548,3 12,755,2 220,1015,3 268,824,1 450,257,4 843,171,4 711,142,5 895,1111,3 456,132,4 278,822,3 750,152,4 633,987,1 94,449,2 639,769,4 900,559,3 894,747,3 558,55,3 842,210,2 665,207,3 832,178,5 197,80,5 12,7,4 726,929,3 794,754,3 839,195,4 82,49,3 304,310,5 859,392,2 626,657,3 285,214,3 737,342,3 12,284,5 726,577,3 681,215,4 766,343,4 668,312,4 843,945,3 622,152,3 939,149,3 902,210,5 626,316,5 319,95,5 532,743,2 540,1090,3 691,1027,3 16,268,4 716,475,4 404,1042,1 536,492,4 641,68,5 757,565,4 275,160,3 891,48,4 89,707,5 339,377,5 549,274,4 207,392,4 585,430,3 306,49,5 927,748,5 935,865,2 449,794,3 906,495,4 576,355,4 489,23,4 804,230,3 795,332,5 270,135,3 517,507,3 706,189,5 832,645,5 249,990,2 767,271,5 12,259,1 770,651,4 485,1141,5 939,160,3 499,181,2 298,1378,3 723,748,4 781,1277,4 924,558,3 843,470,3 471,49,5 938,423,3 911,191,4 221,161,2 832,1273,1 541,49,4 642,392,4 196,750,3 877,754,2 850,1290,2 889,674,5 907,602,4 888,518,4 245,239,3 649,264,4 931,8,5 376,293,5 850,173,5 762,463,3 794,472,2 736,136,5 871,346,2 715,565,3 392,301,4 117,99,5 801,435,4 784,293,4 932,116,2 59,14,4 449,627,4 806,117,4 117,4,2 390,132,4 506,180,5 734,1011,2 893,817,3 767,761,1 898,176,3 886,1046,5 876,474,4 857,688,5 756,68,3 415,123,4 748,379,3 390,647,5 81,133,4 435,186,5 912,170,3 268,236,2 922,293,4 850,1674,3 326,274,4 775,606,4 591,184,5 694,677,4 784,78,4 560,184,4 867,94,2 685,22,5 915,740,3 811,299,5 789,96,2 386,231,2 589,18,5 153,151,4 865,312,1 621,175,4 880,224,2 445,339,2 936,99,3 44,825,3 393,182,4 896,545,4 748,983,3 862,749,4 888,302,3 825,95,5 379,317,4 840,315,4 115,595,5 926,754,5 694,285,3 6,560,4 915,85,4 900,402,2 803,218,3 398,403,3 489,986,3 803,602,5 895,1118,3 664,300,4 682,677,1 847,41,2 471,337,4 900,87,5 649,172,5 456,274,5 497,526,3 531,247,4 586,291,3 845,1220,3 851,1614,2 806,407,3 560,132,3 880,68,3 886,104,3 726,11,5 888,523,4 398,231,2 893,697,4 863,393,3 737,233,4 757,819,4 457,618,2 812,325,3 842,421,2 836,925,1 881,419,5 408,465,4 894,282,4 923,49,5 307,120,3 117,178,5 880,233,3 312,844,3 632,299,4 880,567,4 616,426,4 810,320,3 795,606,4 790,331,5 552,656,5 535,21,5 621,217,3 814,174,3 910,193,4 326,155,4 534,1097,5 482,227,5 489,1011,3 619,1218,3 893,291,4 824,369,3 847,510,4 206,804,3 435,549,4 91,232,3 750,201,4 384,131,4 229,68,4 84,324,2 942,23,4 767,234,2 313,1296,4 907,194,4 678,587,3 881,209,4 657,959,4 558,1400,3 880,819,2 285,344,4 846,651,5 906,271,5 832,200,4 9,609,4 896,410,5 267,1015,3 404,1219,3 697,173,3 861,494,4 499,1068,4 279,275,5 708,568,3 900,117,3 932,120,3 4,418,3 885,558,2 884,28,1 641,154,3 748,636,1 942,79,2 781,935,3 893,1008,4 909,221,4 888,158,3 867,99,5 626,192,5 270,115,2 757,311,3 398,365,3 759,1134,4 471,865,5 884,289,1 814,478,4 803,630,3 556,738,3 465,402,3 248,92,4 866,143,3 551,299,4 670,54,3 862,315,5 788,287,3 199,138,3 676,357,5 748,233,4 513,113,5 750,226,4 269,671,5 882,478,5 879,402,3 906,696,5 432,244,3 907,99,4 16,275,4 547,163,5 895,558,3 842,529,3 893,241,4 362,495,4 576,199,3 816,221,4 715,172,4 642,193,4 697,120,2 553,69,4 591,169,5 797,104,3 795,795,4 889,150,5 176,186,4 833,8,3 902,58,4 451,163,4 490,11,5 869,234,3 888,289,2 928,49,4 337,477,3 926,63,5 864,404,2 845,469,5 885,22,4 872,874,1 681,331,4 654,21,2 587,185,4 757,331,4 917,516,3 638,515,4 531,160,5 891,600,5 462,475,3 652,1100,2 869,582,2 736,500,1 814,704,5 825,525,3 931,495,4 878,24,4 834,133,3 902,1097,5 921,270,3 882,197,5 912,477,4 58,27,5 641,933,2 879,186,5 326,41,3 890,180,3 932,171,2 576,401,4 906,923,5 928,483,3 600,14,1 845,193,4 338,142,5 43,632,3 880,704,1 710,621,4 896,650,3 874,422,5 13,587,4 896,759,5 66,116,5 847,96,5 834,238,5 781,894,4 879,157,2 61,190,5 900,94,4 837,353,4 716,126,4 929,49,2 797,1508,3 896,88,4 915,205,3 814,391,4 693,237,3 638,952,2 795,283,3 937,285,3 118,257,2 290,576,1 822,182,4 824,410,3 935,1369,4 388,493,5 832,760,2 799,180,4 681,430,4 851,406,3 789,1039,2 158,1131,5 893,701,4 422,291,4 845,510,5 757,840,3 748,78,4 620,794,1 48,714,3 457,82,4 879,201,4 560,616,4 520,67,4 508,308,2 911,417,4 882,150,5 500,677,3 908,85,5 663,457,3 703,57,3 791,507,2 889,151,4 431,292,5 880,1056,1 856,327,3 141,321,2 785,196,3 907,204,3 787,64,4 724,878,4 686,323,2 795,212,4 863,287,5 624,514,4 200,6,3 653,417,4 918,69,4 775,566,2 895,659,5 634,149,3 652,570,1 650,331,3 804,958,2 915,1193,4 550,398,3 933,130,4 654,761,2 221,63,5 323,845,5 773,707,2 726,1027,2 760,150,2 827,19,2 795,422,4 927,133,5 858,474,4 787,166,3 922,741,4 509,872,3 535,509,4 886,203,5 38,301,5 867,6,5 763,716,3 795,282,3 217,163,3 843,175,3 536,207,4 942,448,1 748,93,5 715,160,3 936,136,3 404,1479,2 377,96,5 797,398,5 487,204,4 915,279,2 658,520,5 921,229,4 814,526,5 816,830,1 711,94,4 907,173,3 845,622,1 307,138,3 505,323,1 516,180,4 402,404,5 748,730,3 795,671,3 906,761,5 703,638,2 531,447,4 776,195,5 862,346,2 419,689,5 552,588,5 853,131,5 765,432,3 372,130,4 471,201,5 631,193,4 542,641,3 555,481,5 891,477,5 654,56,3 581,457,4 448,1004,5 587,734,5 614,518,5 557,268,4 491,520,5 404,1004,1 770,113,4 839,356,5 710,227,3 654,864,4 559,49,5 757,355,2 885,577,4 915,105,3 877,511,5 869,1072,5 906,632,5 762,98,4 889,120,2 471,418,4 633,291,3 408,325,3 929,274,4 47,427,4 770,595,4 69,945,3 711,1039,4 76,133,4 889,184,5 931,198,5 935,293,3 850,285,4 818,285,5 914,285,4 653,784,4 12,524,5 723,897,1 539,824,4 942,204,5 915,63,5 910,426,3 542,460,3 14,844,2 888,264,4 410,237,3 879,622,4 879,314,5 877,552,3 310,447,5 893,935,4 766,920,5 916,475,5 803,256,5 570,31,2 626,434,5 477,14,5 382,735,5 144,48,3 393,61,4 653,747,4 173,201,5 608,749,4 189,929,2 566,588,5 456,0,4 879,32,3 663,68,3 379,301,5 931,540,1 6,520,5 600,283,4 487,215,2 641,446,4 534,418,3 877,201,4 805,272,4 880,1163,1 304,484,2 490,13,2 619,259,5 854,581,3 663,53,3 670,171,5 559,507,3 880,176,4 797,37,4 781,936,1 832,823,1 740,478,5 317,457,4 748,229,3 494,636,3 534,49,5 757,339,3 94,218,4 824,747,5 922,404,4 862,270,4 730,844,2 71,80,3 845,57,4 885,583,4 436,209,3 48,1072,5 664,1047,4 5,538,2 659,178,4 832,266,1 816,116,5 896,231,5 157,3,4 654,542,3 617,32,2 398,122,2 560,143,3 935,322,3 170,271,5 707,221,5 569,326,4 330,510,5 591,182,5 762,124,3 795,868,4 450,320,3 794,78,2 853,854,4 805,187,3 185,146,4 700,750,4 818,244,3 797,418,4 386,640,5 537,120,3 932,822,2 869,734,3 621,718,2 68,627,3 862,886,3 907,95,4 906,1053,3 937,928,2 767,99,5 647,87,4 444,150,4 893,277,4 923,133,4 889,450,2 792,0,4 845,1477,4 544,94,4 939,175,4 849,97,1 831,306,4 803,925,4 601,117,3 898,747,4 550,275,5 762,190,4 800,312,5 886,1282,5 200,582,1 845,788,4 508,293,2 536,893,1 941,134,3 585,221,3 922,49,5 935,275,5 805,248,4 644,194,4 532,167,4 939,346,3 118,270,4 362,41,2 151,1034,4 12,792,5 714,120,4 773,264,3 416,208,4 824,146,5 868,314,3 797,472,2 738,287,1 343,714,4 931,810,4 406,190,5 797,727,4 642,3,4 215,146,4 888,337,1 900,293,3 538,257,4 867,132,2 832,522,3 535,166,3 735,323,3 737,664,2 229,194,3 918,63,5 850,127,4 794,104,1 899,128,4 362,651,4 583,180,4 932,283,2 876,450,4 703,605,2 915,1010,4 565,466,3 867,221,3 745,207,4 405,143,1 388,607,3 692,76,2 704,814,3 509,323,1 773,198,4 917,708,4 482,8,2 806,185,4 415,731,5 766,0,5 926,567,5 805,0,4 663,120,3 134,225,3 926,1015,5 845,269,3 777,225,4 325,236,2 445,298,2 882,46,3 675,315,4 837,44,4 129,6,5 853,741,2 242,7,5 516,471,2 646,401,4 850,1257,3 645,285,3 180,1316,1 898,355,2 895,218,3 769,296,5 623,749,4 329,230,5 668,656,5 405,381,5 933,192,4 253,525,3 516,754,3 915,719,2 858,312,5 412,236,4 845,585,2 710,471,1 672,312,4 750,51,2 715,107,2 302,426,4 404,805,1 942,21,4 795,370,5 940,918,5 652,1243,3 795,94,4 473,548,5 863,199,4 191,110,2 878,120,4 882,366,5 668,270,2 888,1427,3 789,469,4 108,1160,3 905,220,4 795,87,5 801,182,5 720,871,3 344,160,3 393,719,2 27,602,3 809,338,5 881,287,3 757,149,5 867,233,4 415,534,4 313,321,4 524,247,4 863,596,4 681,1266,3 362,272,3 617,147,3 907,126,4 86,127,3 900,727,4 799,274,4 888,59,3 612,602,5 893,285,5 799,49,4 814,654,3 773,566,1 674,271,3 863,1283,3 814,209,2 837,418,5 477,181,5 863,110,3 310,613,4 539,24,4 849,152,4 790,330,1 902,24,4 616,546,1 866,528,5 826,325,3 449,21,5 942,27,4 759,24,2 763,1056,1 919,244,2 504,116,4 839,136,5 827,960,2 877,110,4 770,282,4 419,178,5 928,135,3 59,600,4 559,256,3 888,832,3 715,500,5 505,88,5 333,1262,4 614,282,4 631,190,5 895,1010,2 843,683,3 654,221,2 919,257,4 755,8,2 614,71,2 915,150,3 404,427,1 820,706,5 902,356,5 408,135,4 877,431,3 839,935,4 390,22,4 496,641,3 453,588,2 804,830,4 456,367,1 917,1098,4 322,180,5 537,203,3 62,180,3 877,233,1 698,293,3 57,199,3 794,657,2 82,844,3 933,450,4 714,940,2 467,1133,5 189,8,1 715,228,3 880,379,4 249,628,4 888,99,4 654,1010,3 893,461,4 771,257,5 233,1459,3 915,10,4 880,614,4 444,932,1 471,61,5 647,108,5 892,530,4 180,280,2 670,218,3 57,6,5 478,402,3 530,907,1 346,454,2 886,1027,5 927,245,5 722,747,5 533,918,5 652,430,4 906,747,5 834,203,3 850,251,3 478,474,1 617,134,4 895,57,3 889,433,4 41,432,2 830,128,2 881,27,5 843,174,3 921,199,3 880,196,3 902,29,5 698,105,3 839,674,4 587,236,2 331,819,4 885,48,4 268,1477,1 579,2,5 749,1279,1 876,583,4 584,197,5 818,303,4 681,539,2 726,801,2 534,155,2 565,22,4 654,512,3 831,747,3 120,317,5 356,411,2 180,1357,1 880,138,3 117,426,5 634,267,5 885,88,4 935,126,5 536,370,3 792,8,3 850,283,3 804,400,4 652,1045,1 941,299,5 499,370,4 485,1609,2 285,1052,4 863,472,4 888,942,3 536,269,3 536,495,4 930,13,4 583,107,3 454,10,3 480,499,4 937,289,3 750,602,4 746,1178,1 243,87,4 850,294,5 868,695,2 270,243,2 889,68,4 870,906,3 718,509,4 941,214,5 568,294,3 806,471,4 293,519,5 755,299,4 845,567,4 528,267,5 865,271,2 654,672,3 647,632,3 647,143,4 781,748,4 465,32,4 621,248,5 266,194,4 885,173,5 893,283,3 253,841,3 931,202,4 319,247,5 398,28,3 293,353,3 896,200,5 850,824,4 845,728,4 895,762,2 12,115,5 920,229,3 81,434,5 335,237,3 424,32,4 489,221,3 663,175,4 919,339,4 345,364,1 906,120,4 895,1207,3 711,173,5 918,731,3 245,657,4 723,331,4 344,149,5 532,384,4 138,743,5 676,321,4 692,8,3 866,133,5 465,509,2 681,402,3 891,227,3 757,519,5 494,440,3 789,327,3 869,202,4 797,1336,3 891,581,3 820,494,5 324,385,4 881,69,3 499,762,3 186,693,5 918,263,3 113,170,4 400,662,1 900,429,3 496,72,3 382,187,5 804,404,3 882,189,4 250,479,5 591,6,5 539,514,5 787,202,5 6,172,5 668,120,3 895,808,2 757,341,4 824,287,1 449,355,4 534,1100,4 715,626,4 668,204,4 915,942,4 781,884,3 331,973,4 885,395,2 933,901,4 539,275,4 874,356,5 923,27,4 869,207,4 404,1305,1 926,1094,2 926,62,4 862,288,4 817,287,5 513,189,5 493,662,5 830,203,5 401,180,4 845,603,4 803,379,4 467,771,4 920,146,3 618,545,2 856,19,3 913,401,5 548,257,5 891,30,4 477,143,5 882,57,3 737,96,4 562,303,2 869,1097,4 44,763,4 937,410,3 850,233,4 71,402,3 885,630,4 911,522,4 798,306,3 553,287,3 616,557,3 931,664,2 898,587,3 305,284,4 291,164,4 618,67,3 715,602,5 341,164,3 805,199,4 771,353,4 654,846,2 554,243,5 618,326,3 487,173,4 806,742,3 918,812,4 867,709,3 781,349,4 48,11,4 495,216,5 888,731,2 0,121,3 151,558,1 467,126,4 194,506,4 459,1170,3 804,293,1 877,169,4 467,225,2 813,183,3 942,731,4 674,749,4 773,1273,1 692,177,5 859,311,4 720,1441,4 911,647,3 941,281,5 538,659,5 724,110,3 444,1008,2 654,960,3 831,312,5 888,91,3 922,339,5 891,1268,5 44,992,4 696,762,4 874,805,4 710,257,4 915,275,4 879,97,5 665,11,4 895,664,1 899,863,2 762,158,3 853,471,1 896,10,2 863,464,3 804,178,4 926,825,4 353,921,4 531,1593,4 740,282,4 891,124,4 591,500,4 243,734,5 406,167,5 888,662,3 531,575,5 871,408,3 274,226,3 931,489,4 882,305,3 906,119,4 650,321,3 854,461,4 933,0,2 392,133,2 894,884,2 393,150,5 554,273,4 715,82,4 890,322,3 861,788,5 942,830,2 621,976,2 644,356,5 9,706,5 763,230,3 647,1625,1 915,738,3 166,464,5 881,454,3 342,259,1 137,741,4 591,2,4 342,316,5 415,95,4 918,659,4 917,142,4 822,218,2 895,714,3 881,968,5 235,410,1 842,61,4 670,236,5 506,894,5 814,166,2 464,512,5 17,316,5 649,225,3 797,132,3 5,536,4 794,143,4 494,153,4 654,714,3 618,55,3 827,115,4 868,1013,4 424,96,2 839,220,4 316,682,2 760,242,3 931,85,4 591,627,3 869,731,2 935,1376,5 853,508,4 497,264,2 833,236,5 550,567,4 846,76,4 825,1,3 855,315,5 900,19,1 897,312,4 451,527,4 637,184,5 835,301,5 535,87,4 451,171,4 926,7,4 341,56,3 928,653,3 702,741,3 932,482,4 926,410,4 206,237,2 857,292,3 931,835,5 893,1152,3 891,98,3 906,392,5 822,153,5 118,470,4 772,263,2 747,587,4 929,280,4 746,8,5 775,636,3 893,24,2 294,581,5 545,270,5 932,423,1 450,884,1 654,953,2 665,281,3 813,144,2 845,174,5 767,221,4 499,48,4 797,394,3 642,427,4 506,305,5 885,281,3 896,648,3 756,432,4 893,706,4 902,478,4 824,1033,4 694,342,4 393,32,4 858,150,2 915,175,4 378,27,4 861,565,3 435,1262,3 38,318,4 268,939,1 405,183,2 591,202,5 270,1281,2 707,351,1 718,426,4 624,485,3 893,638,5 631,632,4 932,66,1 213,720,3 449,1420,4 942,172,5 793,12,4 455,128,3 778,224,4 832,335,2 335,49,4 863,194,4 921,126,3 888,268,4 310,229,5 895,469,2 785,683,4 863,96,4 839,182,5 907,11,3 183,273,4 830,196,4 872,258,1 769,122,3 652,440,3 915,1009,4 888,699,3 94,472,4 916,284,4 795,590,3 904,870,2 644,427,4 828,581,4 751,1278,3 118,1136,5 939,507,5 9,483,5 795,52,1 654,1644,4 853,184,4 449,24,3 879,171,5 849,131,5 789,316,4 526,498,5 292,557,3 903,254,5 867,587,1 196,226,3 404,971,1 889,435,3 442,38,1 726,21,4 898,317,4 756,402,4 76,221,4 633,684,4 584,461,3 838,129,3 805,1128,3 904,321,3 804,366,4 685,193,5 869,46,3 915,1013,3 616,558,1 174,186,4 845,72,4 404,1207,1 896,716,1 587,282,4 931,510,5 404,637,1 931,804,4 895,398,1 895,240,5 116,894,2 585,805,4 806,577,4 936,987,2 520,78,4 773,412,1 84,1074,3 626,228,2 710,1288,2 682,606,5 550,50,5 551,844,3 881,68,5 853,285,1 740,312,4 787,628,1 918,332,4 745,209,5 921,144,3 891,479,4 659,120,2 682,345,4 935,863,4 772,199,4 482,472,3 360,526,4 465,120,3 649,150,3 888,539,2 926,327,4 298,318,3 694,357,5 147,163,4 163,689,4 881,377,5 485,245,3 150,497,5 886,672,5 845,614,5 907,480,3 906,4,5 895,259,2 245,383,2 715,1100,5 696,894,2 692,661,4 605,153,3 813,97,4 670,97,4 404,876,1 649,1,3 923,99,4 910,626,3 25,247,3 415,230,3 665,427,3 428,43,3 247,322,1 906,257,4 746,257,2 331,561,5 915,1596,3 891,201,4 843,240,4 814,87,4 600,8,4 76,182,5 197,427,4 934,124,4 176,203,3 114,695,4 266,182,4 833,267,3 931,135,5 221,1205,2 377,427,3 845,495,3 682,512,5 882,738,2 678,422,3 932,233,3 926,117,5 591,133,5 797,702,4 473,342,3 158,1253,1 803,745,4 531,758,2 386,187,5 869,569,2 590,25,3 912,275,3 925,291,3 915,843,3 795,648,3 787,316,4 908,169,5 531,126,5 542,731,3 793,1250,4 729,684,2 9,685,4 373,81,4 921,218,1 483,185,4 12,140,2 338,735,3 690,226,4 710,82,5 748,832,2 454,285,5 662,596,3 435,634,3 822,203,4 642,217,3 834,522,3 870,908,3 706,922,5 740,209,3 498,523,4 326,259,1 888,25,4 547,271,2 885,466,4 144,942,3 726,208,3 842,632,3 665,356,4 456,385,3 758,293,5 931,234,2 233,1184,3 710,418,5 657,627,3 535,180,5 804,227,3 681,727,3 785,691,4 69,417,3 863,14,4 942,418,2 907,482,4 879,217,4 748,728,4 696,878,4 920,110,4 486,10,5 924,324,4 895,488,5 850,1597,3 888,532,3 920,258,4 377,37,3 935,903,5 471,355,3 507,49,5 145,310,4 919,312,5 424,292,4 942,238,5 892,357,2 720,221,5 910,150,5 668,96,4 413,269,5 404,175,1 4,374,3 23,517,4 891,21,5 845,27,5 534,497,4 523,133,5 682,327,2 12,181,5 94,432,4 356,825,3 545,120,5 558,512,5 25,322,2 918,22,3 832,249,3 715,177,5 378,432,4 786,287,1 449,227,4 931,1448,5 903,814,4 845,509,4 806,229,4 920,244,1 617,30,4 898,28,2 931,1557,5 617,505,4 824,283,3 415,391,5 846,715,3 55,294,3 854,165,4 720,1392,3 933,707,3 931,120,3 670,561,5 901,7,5 912,94,4 780,301,5 298,113,4 922,8,4 912,728,3 649,130,3 304,68,3 180,1136,1 778,117,5 641,1151,5 879,226,2 386,24,2 863,256,4 930,297,4 886,94,4 939,435,4 698,322,4 61,527,5 840,321,4 895,1,3 559,605,4 895,479,3 692,57,3 907,477,4 758,280,4 437,251,4 905,282,4 811,681,4 900,93,4 824,251,5 912,426,4 889,670,5 931,854,5 874,293,2 670,117,5 536,11,3 933,71,3 665,290,3 621,2,1 223,125,3 683,209,3 797,731,2 668,247,4 934,117,4 391,173,5 373,470,4 912,918,4 33,287,2 710,93,2 777,218,3 282,108,4 719,271,4 863,728,4 744,9,5 650,275,4 428,938,4 718,519,5 921,42,3 863,142,4 832,57,2 804,715,4 523,470,4 909,627,1 659,78,2 52,14,5 832,3,3 541,71,3 787,1106,3 869,356,5 455,71,1 23,57,3 341,1009,1 901,270,2 912,656,5 787,95,3 717,1027,4 863,116,4 714,575,2 896,505,4 888,27,4 864,762,1 910,654,5 692,49,3 650,308,1 861,434,5 726,826,3 726,228,2 553,865,3 793,18,4 882,693,5 922,150,4 403,338,1 808,299,4 710,156,3 757,5,2 647,440,3 550,228,5 930,749,5 462,1198,1 911,658,5 522,154,4 926,551,4 663,508,4 757,540,4 832,939,2 666,426,5 665,512,4 623,865,3 877,1038,3 859,343,3 553,55,4 748,477,5 386,407,4 767,345,3 822,16,4 397,711,2 566,428,4 626,525,4 906,709,4 94,506,4 879,1183,3 855,326,4 550,627,5 838,844,4 803,356,5 885,627,3 703,518,3 803,454,5 353,935,4 335,578,3 638,164,3 489,457,3 906,1046,5 889,312,5 658,163,4 19,81,4 892,285,4 941,434,5 428,193,4 311,643,5 322,650,5 785,457,3 879,190,5 674,426,5 900,1604,5 559,10,4 617,48,3 777,422,1 818,146,5 619,559,4 925,293,3 747,57,4 861,81,4 842,233,4 377,435,4 659,122,2 699,97,3 641,94,5 821,0,4 708,696,5 885,942,3 62,120,1 889,135,5 602,287,3 926,154,4 923,236,4 825,809,3 63,731,4 658,498,4 882,13,3 688,0,3 681,41,5 373,24,5 905,276,3 362,99,5 885,46,4 92,117,3 755,587,4 663,99,5 866,155,5 939,270,2 69,1029,2 346,692,5 591,1619,1 702,1011,4 883,712,3 638,355,2 607,186,4 703,88,5 498,99,4 91,224,3 647,495,4 298,19,3 605,545,4 915,82,4 577,324,1 746,1658,1 939,136,3 918,754,3 693,27,4 532,1000,1 289,182,4 938,253,3 938,840,4 406,93,4 912,435,3 797,940,3 866,327,5 6,638,5 408,853,4 118,322,4 895,386,2 678,285,5 385,180,3 592,774,3 619,421,1 289,150,2 405,156,3 870,225,5 748,976,4 803,228,4 893,1591,4 783,330,4 803,99,5 891,520,5 896,75,4 351,174,1 547,1046,4 652,41,2 553,116,4 597,258,3 322,333,3 689,71,2 668,124,3 687,748,5 888,1552,3 899,236,4 652,409,1 932,226,1 238,38,5 765,549,3 691,65,2 48,234,2 612,88,5 926,403,4 681,283,4 612,11,5 454,545,3 848,405,4 895,312,4 868,150,5 505,4,4 803,207,5 926,401,4 757,207,4 800,880,3 416,567,2 681,764,4 918,300,3 814,443,2 645,287,3 851,567,4 807,871,5 159,487,5 889,602,5 574,167,5 682,330,2 906,276,5 71,843,4 898,213,4 55,404,4 846,203,4 763,285,4 720,321,4 187,299,4 880,473,3 427,268,5 10,228,4 404,1381,1 757,434,5 789,212,3 778,251,3 882,80,5 905,286,5 327,581,5 693,433,5 536,1146,3 882,353,4 938,404,4 926,721,3 748,400,1 915,430,3 931,44,5 415,110,4 429,126,4 829,486,5 614,85,5 910,602,5 707,456,4 324,1486,3 863,171,5 380,587,3 296,689,5 920,135,4 540,458,5 647,670,3 716,1046,4 747,194,4 313,608,4 889,199,4 895,1621,2 885,180,5 566,9,4 939,81,4 816,272,5 926,755,4 428,155,4 41,86,4 432,299,3 730,1038,4 200,238,1 890,755,4 599,1418,3 845,615,3 654,955,3 742,285,3 932,10,4 775,192,3 874,301,5 632,225,4 601,299,3 888,1109,3 659,1132,2 637,404,3 682,288,4 567,133,5 929,689,3 649,143,3 891,1284,4 525,257,3 789,89,2 923,848,3 933,285,4 320,497,5 649,442,5 681,208,3 889,603,5 532,292,3 578,3,2 795,98,3 781,327,5 942,721,3 180,272,1 720,126,5 867,1075,1 710,722,5 932,6,4 607,215,5 755,621,3 626,659,4 42,253,3 888,880,3 921,68,3 434,207,4 900,767,3 653,188,4 626,683,4 879,880,4 221,409,2 405,25,3 404,78,5 566,704,5 654,1172,2 621,108,5 621,390,2 700,236,5 665,961,3 898,596,2 40,190,4 104,346,3 92,13,4 532,55,3 918,318,3 895,844,3 408,631,3 857,322,2 842,179,3 768,1321,2 864,945,1 895,41,4 918,92,5 353,431,3 763,199,4 760,1557,1 392,1000,4 199,446,4 937,125,4 888,683,2 707,739,5 513,152,4 942,784,2 634,687,2 880,510,5 659,418,2 836,949,2 439,303,5 320,51,3 14,224,3 578,152,4 550,48,3 864,410,1 849,495,5 748,448,3 275,91,4 620,925,3 290,239,4 753,272,3 63,650,4 932,179,5 882,198,4 844,903,3 889,172,4 771,330,5 834,631,5 487,161,3 592,534,3 504,299,4 893,271,4 129,750,5 795,747,5 628,270,4 861,657,5 143,58,4 657,191,4 734,299,4 225,595,3 599,1273,2 916,404,3 560,491,4 703,192,5 942,228,2 933,480,4 873,513,5 269,1108,5 934,0,3 878,117,3 285,799,5 838,0,4 338,212,4 860,462,3 895,641,2 183,734,3 57,167,5 825,182,5 882,47,4 746,402,5 88,450,3 372,167,5 911,506,3 478,130,3 675,249,4 593,99,4 915,100,3 845,182,4 804,921,5 740,66,3 552,604,4 473,508,5 153,522,5 261,746,4 194,365,3 591,530,5 675,99,5 853,708,4 714,1087,1 423,291,4 918,418,5 733,120,4 803,762,4 720,434,4 805,195,5 896,924,5 746,725,2 880,398,4 392,574,2 889,207,5 654,152,2 813,6,4 734,236,4 824,272,5 307,395,4 626,240,4 942,565,4 813,357,2 824,695,3 565,165,4 503,560,4 746,180,5 13,662,5 880,741,4 777,167,5 654,53,2 372,78,4 804,1156,5 642,182,5 562,293,3 46,304,5 801,265,3 928,11,4 84,195,4 486,172,4 144,292,4 278,825,4 680,537,3 212,193,4 641,251,5 335,745,3 529,180,3 853,122,1 13,24,2 617,199,5 935,927,3 706,284,5 902,45,4 726,82,5 522,55,3 588,325,1 560,159,3 756,425,3 942,229,1 615,326,3 886,755,5 767,300,5 879,185,4 850,1253,1 177,267,4 794,153,3 275,412,3 550,659,3 757,12,5 66,1046,3 494,0,4 917,339,1 555,320,4 520,173,4 865,302,4 415,121,3 4,372,3 917,970,4 533,117,4 874,427,4 619,392,5 482,257,4 801,568,3 255,596,4 48,403,3 499,12,5 647,1336,3 453,386,2 220,1207,3 497,76,2 923,428,4 814,228,3 649,173,4 524,146,3 847,198,5 754,300,3 696,236,5 558,299,4 774,332,4 797,814,5 397,7,3 455,323,4 726,471,2 901,267,1 861,68,5 180,879,1 847,482,5 846,194,4 458,256,5 901,988,2 313,11,4 703,480,5 659,443,2 591,134,5 846,171,4 269,55,5 585,977,2 505,692,4 842,430,3 451,455,1 750,483,3 814,264,5 879,66,1 544,61,5 882,194,5 932,575,1 869,801,3 311,90,3 922,925,4 197,683,3 188,472,5 930,343,4 879,178,4 525,312,5 434,97,5 900,1642,5 662,298,2 270,465,4 84,160,4 434,332,3 396,521,5 451,222,5 837,190,5 797,322,4 918,309,3 726,95,4 678,94,3 726,362,3 61,14,2 578,330,3 933,312,3 915,401,3 702,126,5 930,124,4 762,465,4 121,196,5 581,0,4 654,1011,3 455,228,3 143,169,4 129,793,5 705,322,4 831,677,2 909,404,4 847,518,5 86,1177,3 271,68,4 931,505,4 923,215,4 643,872,4 933,151,4 591,147,2 850,716,3 681,691,3 806,624,3 300,16,4 101,271,3 942,430,4 918,342,4 921,630,3 542,13,4 915,131,3 660,57,4 393,392,4 942,723,1 649,377,3 853,430,3 942,66,4 911,173,3 917,85,4 866,179,5 921,401,3 932,409,3 842,160,2 935,320,3 756,454,3 289,98,4 40,204,4 781,1588,3 922,824,4 863,450,4 860,508,5 863,227,5 619,1479,3 697,132,2 720,747,3 912,185,3 882,513,4 584,651,4 910,162,4 797,167,4 719,901,4 307,641,5 845,203,3 313,1028,2 721,747,4 536,702,3 535,431,4 918,945,4 732,932,1 888,1021,4 939,312,5 103,8,2 883,197,5 652,1230,2 404,659,2 756,560,2 698,474,4 404,388,2 924,218,3 942,200,5 498,646,5 681,1220,3 891,0,5 751,1462,4 824,325,4 506,344,5 748,429,4 188,531,4 937,357,4 406,130,3 867,588,4 942,385,1 849,94,5 861,251,3 576,769,4 233,622,2 937,870,2 552,523,5 893,212,4 630,331,3 603,446,4 93,181,5 772,181,4 906,110,5 721,870,2 89,499,4 717,256,4 333,9,4 540,1408,4 291,482,5 641,758,3 879,1034,4 921,213,2 487,322,1 933,793,4 531,186,4 127,179,5 906,404,4 737,97,4 885,193,3 893,56,4 756,573,3 756,297,4 338,345,5 879,1156,4 912,95,5 532,777,4 654,44,3 879,590,4 649,473,4 869,1045,3 770,110,4 855,269,3 931,152,4 331,244,4 750,808,3 415,183,4 769,472,5 637,174,4 622,184,4 290,152,4 681,23,4 717,281,5 640,482,5 605,235,3 821,188,4 780,231,3 853,481,3 494,577,3 915,530,4 200,194,3 446,236,4 541,172,4 917,461,3 825,384,5 827,462,2 902,627,3 893,932,3 864,417,1 541,383,3 801,439,3 232,214,5 839,510,4 584,29,4 588,331,4 642,117,2 503,399,3 888,8,4 849,161,3 646,297,3 715,297,5 820,458,5 933,185,2 619,417,3 824,125,3 58,128,5 458,470,3 885,580,4 456,432,5 532,754,3 629,731,4 279,221,3 544,325,3 621,160,3 700,0,4 900,687,2 726,378,2 853,810,3 853,627,2 479,168,5 845,130,3 652,75,3 775,52,2 591,1081,3 884,299,4 942,227,3 895,385,3 327,434,4 472,284,4 202,743,2 797,825,5 614,267,4 319,339,2 740,217,4 934,933,4 680,269,1 920,418,5 369,834,5 397,152,4 628,240,5 838,1244,4 465,683,4 850,805,4 644,673,3 626,281,2 636,1059,2 662,1326,4 921,374,2 919,300,2 926,203,4 902,708,4 878,275,4 546,268,3 888,426,4 869,203,4 114,116,4 787,225,4 932,78,3 818,257,2 931,561,2 712,299,2 545,681,3 926,408,4 902,519,4 428,1437,1 342,273,3 898,498,3 863,780,3 922,1276,5 773,524,2 891,469,4 562,870,2 850,1539,2 665,691,3 926,818,3 618,683,4 344,41,2 157,708,5 910,190,5 353,96,3 838,180,3 882,21,3 402,236,5 520,214,1 216,807,2 832,218,4 547,293,3 465,91,4 909,133,3 917,498,4 932,199,4 631,366,2 296,337,2 689,117,4 918,182,3 638,284,1 534,189,4 629,63,5 780,1499,5 792,684,3 882,89,3 934,280,5 192,486,5 497,1494,3 84,519,3 681,53,4 888,296,3 652,471,1 605,57,3 642,1097,4 915,1681,3 763,410,3 903,116,4 896,1218,4 798,190,3 706,25,3 689,105,3 810,677,5 636,14,4 134,4,3 392,786,5 307,536,4 824,745,5 606,18,3 845,38,3 918,1172,3 926,173,3 915,202,4 883,212,4 441,741,3 900,210,4 686,285,3 768,475,4 929,170,1 536,583,2 939,526,3 881,368,3 331,983,2 819,301,5 873,136,4 141,133,5 506,840,5 536,422,2 710,202,4 879,200,4 341,13,5 615,301,5 637,28,2 888,215,4 935,1114,4 647,602,5 888,1151,3 846,704,3 606,237,4 859,309,4 874,522,4 707,293,3 803,427,3 269,553,1 843,417,3 693,184,4 942,940,1 932,560,3 888,657,4 605,183,5 884,537,4 895,391,3 380,418,5 584,282,4 896,669,3 869,252,4 658,179,5 561,565,4 506,257,4 658,161,3 180,825,1 534,381,5 311,646,5 898,432,4 388,285,2 889,101,3 193,501,4 641,567,4 813,435,3 449,169,5 839,434,4 765,384,3 869,479,5 57,583,5 845,486,4 770,250,5 502,632,5 926,95,5 674,241,4 889,209,4 792,404,3 5,489,5 706,286,4 453,85,2 931,197,4 86,597,2 931,435,3 789,239,3 771,244,5 931,100,3 926,384,4 748,1015,5 675,911,3 863,1032,2 751,682,4 845,548,4 833,325,4 896,116,3 881,264,5 144,267,4 864,257,4 920,404,3 424,346,4 431,1011,5 906,552,5 94,226,2 755,154,4 458,1012,3 885,685,4 915,229,3 882,221,3 897,315,5 672,895,5 842,418,2 550,940,4 69,226,3 893,627,3 581,320,3 834,615,4 659,90,4 576,1027,4 850,317,5 850,16,5 623,309,4 822,25,5 193,207,3 915,2,3 494,217,4 760,476,1 620,72,5 863,342,5 503,461,4 706,1254,3 670,254,5 882,712,3 662,508,4 592,312,4 550,474,5 346,356,5 540,542,4 404,1259,1 649,510,5 877,1120,2 242,305,4 24,691,4 898,27,5 649,67,3 478,251,2 921,134,2 795,706,3 518,345,4 888,124,4 893,99,4 755,1239,4 657,54,4 857,1367,4 672,321,4 654,527,5 697,987,1 921,227,4 812,750,5 333,303,3 492,832,2 416,190,5 483,94,4 931,207,5 869,168,4 200,14,3 751,320,3 614,581,3 720,208,3 882,514,5 812,299,4 513,431,4 665,648,3 193,500,3 886,719,5 792,978,3 898,63,4 898,723,5 914,301,4 932,158,3 482,226,3 177,117,4 905,743,4 416,152,5 105,58,4 796,256,5 885,23,4 912,168,4 931,169,4 547,470,5 884,475,4 222,172,5 844,285,5 534,631,4 22,918,5 801,645,4 935,251,2 660,49,5 932,124,4 585,226,2 523,211,5 888,741,3 449,1125,4 931,184,4 940,272,3 842,198,3 847,478,5 552,606,4 250,201,4 730,132,1 379,173,4 942,216,3 893,349,3 396,11,4 859,69,5 853,155,3 331,553,3 850,923,4 492,261,3 586,244,1 795,90,2 807,269,4 70,99,4 392,144,3 885,366,4 26,122,5 935,824,4 885,380,2 746,510,5 641,451,1 842,659,2 294,55,4 404,581,3 822,87,5 19,203,3 763,251,3 404,656,1 614,1127,1 896,95,5 560,88,4 394,257,4 428,777,3 863,166,4 690,242,1 616,867,4 758,180,5 404,85,1 720,0,5 676,987,4 127,1135,3 937,595,5 233,85,2 891,429,5 935,318,4 939,419,4 561,431,5 641,826,1 804,551,3 879,1467,4 485,107,4 918,381,5 933,580,2 233,493,4 514,258,3 771,301,5 917,519,3 845,525,4 868,1078,2 327,53,3 847,654,4 94,622,3 496,299,3 180,236,5 839,641,4 446,281,4 862,268,3 642,78,4 619,6,4 803,192,4 532,920,2 682,258,3 670,840,2 931,631,4 891,478,5 360,49,5 895,183,3 783,326,4 37,194,1 916,472,3 824,284,3 837,583,4 806,1075,3 941,477,5 926,216,1 1,301,5 805,233,4 891,215,5 233,791,4 13,524,5 636,92,3 713,368,3 558,346,3 626,540,4 841,287,3 361,682,1 861,257,5 659,314,4 775,218,3 89,729,5 292,500,4 775,589,1 891,149,5 860,736,3 668,473,4 573,267,5 439,212,4 775,178,4 527,1253,3 658,518,4 710,132,5 464,126,4 587,471,4 184,301,4 806,227,4 787,510,5 61,214,3 91,921,1 620,199,4 670,503,4 930,99,4 769,128,5 706,1023,5 926,819,4 620,78,5 797,131,4 694,988,3 884,180,3 304,241,5 265,923,2 794,107,3 29,28,3 734,126,4 599,801,2 880,216,3 932,448,1 897,309,4 659,549,2 911,267,2 180,1032,1 748,482,4 37,446,5 888,233,4 912,55,5 448,332,3 747,731,4 917,207,3 115,274,2 906,738,5 915,404,2 820,596,3 863,356,5 915,237,4 249,143,4 918,339,5 877,450,2 932,99,5 837,126,5 924,671,3 825,94,5 261,81,3 724,327,4 205,335,1 658,615,4 552,177,5 861,196,4 604,273,3 906,234,4 893,321,3 585,256,3 354,1428,4 853,704,4 804,78,5 654,381,3 893,1088,2 892,68,5 918,68,3 665,210,4 906,128,5 642,570,3 298,44,3 863,401,3 863,929,3 876,184,4 568,290,4 879,683,4 910,479,4 923,895,4 516,116,4 89,497,5 582,199,5 794,27,4 647,234,4 926,1046,4 708,692,4 876,743,5 879,238,4 896,704,3 863,1108,4 825,226,4 510,339,4 879,94,3 833,271,4 550,3,2 578,24,4 806,1015,4 575,99,4 845,1209,2 924,323,4 706,691,4 267,379,2 692,442,2 718,253,1 372,316,4 710,150,4 17,115,5 626,469,3 221,404,3 681,167,5 732,99,5 752,327,3 488,875,2 896,272,3 550,84,1 715,97,5 876,970,4 861,150,5 393,927,4 932,272,3 841,302,5 10,323,1 24,656,4 404,575,1 342,1111,3 581,239,4 781,885,3 935,294,3 153,171,4 862,353,1 652,707,2 920,49,4 895,123,4 932,57,3 885,1227,2 765,518,4 895,434,4 898,233,4 757,210,4 826,342,4 25,287,4 212,97,5 21,384,4 880,794,2 659,270,3 780,96,4 829,647,5 319,773,4 754,937,3 879,627,2 438,1599,5 523,225,3 490,683,5 456,87,4 720,1220,3 742,180,3 879,0,4 891,203,4 267,746,3 386,173,5 879,193,5 221,761,3 922,474,5 505,433,4 757,299,2 918,1072,4 415,654,5 804,180,3 720,1294,3 621,227,5 904,507,4 876,661,5 621,500,3 550,88,4 902,301,4 845,1478,3 536,568,2 853,170,4 640,300,4 325,477,3 746,194,4 664,221,3 920,256,3 720,265,3 898,236,4 933,156,2 542,167,3 418,493,3 119,49,4 587,1310,1 824,97,5 775,237,4 942,40,4 98,1046,4 879,286,4 450,989,3 605,440,4 400,609,4 51,190,5 532,520,3 915,88,5 222,120,3 845,659,3 762,844,4 885,64,3 671,236,2 94,1221,2 938,545,4 907,527,4 822,226,1 893,688,3 523,174,3 628,159,4 882,885,3 916,311,2 46,261,5 787,669,3 937,590,3 690,734,5 488,897,3 747,478,4 920,471,2 942,91,5 553,1283,3 673,409,3 804,430,1 772,947,2 795,32,3 6,440,2 866,197,5 188,185,2 292,627,3 795,782,4 732,845,2 20,875,2 353,209,3 495,660,3 932,678,1 434,7,3 757,581,3 654,366,3 48,1068,3 803,173,5 296,1108,3 560,54,4 917,82,4 747,181,4 711,89,3 834,193,4 404,109,1 428,181,4 880,1077,3 683,476,5 503,153,4 500,1096,5 750,688,2 698,531,3 506,306,5 471,1028,4 714,205,4 662,748,3 915,173,5 448,1009,4 755,81,3 108,870,2 920,759,2 663,515,5 662,258,2 723,330,3 832,346,3 825,770,3 670,1302,3 93,558,4 750,322,1 898,189,4 866,422,3 375,704,3 507,173,4 449,515,5 312,530,3 721,596,3 812,689,4 730,194,1 86,848,5 285,239,3 589,292,3 644,197,3 600,430,4 543,300,2 701,306,2 418,299,4 653,14,3 124,570,3 923,518,4 547,412,3 63,196,3 656,454,1 916,327,2 681,943,3 540,394,2 297,281,4 298,60,4 850,3,5 836,716,1 896,645,5 834,309,4 874,285,3 22,172,5 591,259,4 58,391,2 845,194,4 524,712,4 91,432,5 858,2,5 331,301,5 736,173,2 631,650,5 824,104,3 473,447,5 762,86,2 692,627,4 845,940,2 338,177,5 670,552,5 870,751,3 653,81,5 869,192,5 344,217,3 860,380,4 716,257,5 820,482,5 933,656,3 109,402,3 739,257,3 550,244,3 531,522,5 911,654,5 915,805,4 298,178,4 698,327,2 278,643,1 513,195,5 869,20,3 891,132,3 829,126,4 937,251,4 550,789,2 932,21,5 895,257,5 620,234,3 91,854,5 843,94,4 863,530,5 933,69,4 550,95,5 714,236,4 678,289,2 845,391,2 937,244,3 504,242,2 912,188,3 831,49,3 869,1209,1 434,365,2 901,126,3 803,186,4 108,1138,2 932,635,2 819,357,1 922,974,4 931,485,5 863,767,3 664,270,2 931,229,4 536,44,3 279,11,5 457,63,4 918,150,4 596,285,3 933,804,4 483,124,4 918,287,4 344,285,3 460,304,2 913,777,5 770,97,1 576,731,4 392,933,3 654,534,2 924,566,3 433,423,1 842,199,3 268,1360,4 896,179,5 737,1015,3 781,329,4 904,116,3 401,1283,3 737,53,3 696,224,3 895,256,4 175,256,1 649,384,4 917,69,3 931,1138,2 525,590,4 863,78,5 888,317,4 658,154,3 697,65,3 296,283,4 654,689,2 804,70,3 863,372,2 471,548,5 592,271,5 564,922,4 642,46,4 158,8,3 649,656,4 910,477,5 396,357,2 292,97,4 631,11,5 893,1006,3 845,141,3 53,147,3 762,84,4 89,22,5 891,180,4 830,172,3 822,714,5 877,264,3 326,248,2 295,497,5 866,196,4 482,675,4 150,72,4 663,763,4 757,607,5 23,275,5 428,478,4 692,171,3 444,1015,1 891,683,5 795,704,4 75,95,5 781,254,4 772,239,2 561,233,5 451,209,4 197,92,3 926,228,3 755,1030,2 434,721,3 756,227,4 834,286,4 726,878,4 710,315,4 942,49,4 906,236,5 560,158,1 102,55,5 879,553,3 912,14,3 869,1663,4 923,604,3 302,541,2 870,988,3 641,356,2 918,190,5 499,92,4 933,649,4 450,244,2 362,223,4 864,455,1 888,603,3 882,749,3 889,6,4 662,173,5 550,214,4 588,876,4 785,264,4 453,131,2 935,99,4 598,545,4 665,654,4 300,154,1 910,624,5 929,99,3 643,307,4 759,844,5 611,126,2 628,38,2 824,322,4 348,712,3 610,301,5 851,99,4 874,44,3 243,120,1 796,1022,3 488,682,2 933,190,5 917,497,4 631,50,4 715,134,3 461,345,1 884,337,3 941,123,4 867,79,2 804,120,3 406,649,2 466,301,4 580,136,5 885,1092,1 388,239,3 920,658,5 319,291,3 76,41,5 860,19,4 814,97,4 681,116,4 942,366,4 503,124,4 714,16,3 877,955,2 935,236,4 918,281,4 325,700,4 895,18,2 904,590,4 803,478,4 815,321,4 278,164,4 773,63,3 37,419,5 844,241,4 781,49,3 867,11,5 223,377,4 565,93,2 882,215,4 912,21,5 275,174,5 129,116,5 55,27,5 532,819,2 649,61,3 585,66,5 553,190,5 481,681,3 888,85,4 327,175,5 933,509,5 647,81,5 602,6,5 279,1062,3 838,457,5 864,407,5 495,87,1 880,256,5 801,564,3 289,119,4 594,1046,2 794,1,3 824,832,4 885,545,1 714,234,2 749,257,3 941,413,4 929,23,1 891,28,2 898,497,4 909,126,5 760,507,1 932,182,4 803,1259,3 775,133,4 404,94,3 915,381,4 344,940,3 888,290,3 711,82,4 233,658,3 93,150,5 756,94,4 784,49,5 911,497,5 532,1141,4 746,520,5 525,244,2 911,609,4 893,344,4 210,889,2 910,430,4 559,514,3 640,431,5 933,85,3 814,558,3 662,651,4 918,302,4 392,1057,4 774,304,4 693,14,4 620,803,4 842,422,2 912,142,5 206,791,2 882,25,3 918,1196,4 549,256,4 614,628,4 718,381,2 863,796,3 58,85,3 514,894,4 919,285,2 787,370,3 792,126,5 926,0,5 756,553,3 870,565,3 781,1383,3 909,683,4 746,203,5 658,711,3 939,677,4 485,812,5 752,182,1 839,520,5 24,85,4 865,339,2 293,293,4 605,184,3 539,120,2 909,309,3 681,80,3 781,268,3 932,162,2 751,901,5 499,401,3 845,557,4 847,450,4 357,207,2 726,172,5 832,801,1 868,248,4 845,385,3 518,990,2 930,303,4 906,426,5 911,645,3 434,429,5 630,872,2 937,741,3 931,473,5 93,292,4 874,22,5 762,954,2 850,160,3 937,925,3 715,1268,4 895,99,3 917,703,4 293,294,4 433,0,4 921,431,5 348,24,3 532,608,4 864,0,1 895,210,4 840,887,5 102,97,3 315,303,3 331,549,5 692,116,4 678,482,5 794,422,2 918,117,4 903,87,3 892,219,3 918,8,5 554,234,3 552,133,4 891,195,4 762,1097,3 55,870,2 551,0,3 720,283,4 476,110,5 800,267,5 879,22,5 637,209,4 620,584,4 789,1046,3 824,1048,3 333,517,4 48,299,1 543,301,5 522,167,4 827,984,3 296,152,5 292,1118,1 550,365,5 915,156,4 95,422,5 787,322,3 820,21,5 891,238,4 746,475,3 715,695,2 405,97,4 641,401,4 278,743,2 757,175,5 706,312,2 899,660,4 915,171,5 402,281,5 822,741,4 888,134,2 520,596,2 918,136,2 773,560,1 531,406,2 781,993,2 853,342,3 765,836,3 405,285,3 605,234,3 113,526,3 485,301,5 902,209,4 800,270,5 935,987,3 380,224,3 922,543,4 523,149,2 550,685,3 814,143,4 645,306,3 797,1101,4 892,49,5 839,1673,4 941,677,3 191,286,4 605,197,4 657,234,2 746,658,4 863,671,2 892,818,3 752,88,3 896,370,2 876,85,4 804,370,1 892,23,4 513,1100,4 846,163,3 48,812,3 692,133,4 98,472,4 880,69,2 929,1009,2 605,187,4 877,152,5 476,273,5 871,684,4 663,57,4 711,500,3 869,735,1 311,82,4 750,209,5 665,191,4 382,184,5 311,524,5 793,256,4 710,90,4 704,587,3 757,344,5 903,534,3 707,879,3 452,780,3 895,287,3 895,198,3 895,78,5 893,1294,3 536,314,4 888,81,4 942,584,1 880,78,4 875,317,5 503,152,3 108,594,3 832,283,1 671,219,2 307,529,4 704,796,4 633,110,4 729,267,4 772,28,2 879,375,3 660,664,3 859,261,4 137,496,5 294,131,5 932,451,1 646,146,4 462,268,5 74,863,4 870,96,3 642,1214,3 935,12,4 713,256,3 920,236,3 212,69,3 21,264,3 378,316,5 789,402,4 772,366,2 832,409,3 781,360,3 98,432,4 10,10,2 931,502,4 763,68,5 283,314,5 783,339,3 928,196,3 917,971,5 584,312,3 861,405,4 467,43,4 629,470,4 888,95,4 263,172,5 715,464,5 681,585,1 243,454,2 845,508,4 290,581,4 485,824,2 662,832,4 627,300,4 789,1013,2 689,513,1 640,257,3 877,1091,3 310,414,3 879,90,3 456,1220,4 806,595,4 879,410,4 803,475,3 673,180,4 536,302,4 828,9,3 304,126,5 787,147,3 10,746,3 689,442,3 644,958,4 578,293,4 290,561,4 654,915,2 892,120,4 710,548,4 750,93,3 660,217,3 770,81,2 828,211,4 48,120,1 580,935,3 199,522,4 905,283,4 877,689,2 876,305,3 921,366,3 822,918,4 81,659,5 9,603,4 819,346,4 914,690,4 917,41,3 681,400,1 935,1170,5 434,408,3 342,477,5 920,184,3 757,236,4 842,481,2 129,443,4 682,257,3 728,345,1 256,935,4 681,146,1 746,126,5 377,24,4 743,482,4 739,1037,4 715,1125,3 891,287,4 755,1651,1 759,747,4 720,1025,3 803,39,3 21,232,3 492,434,5 863,200,5 325,55,2 605,1064,5 91,1209,1 532,739,4 889,447,2 661,1510,4 869,658,4 453,1088,2 934,619,2 879,474,4 880,472,2 891,510,5 638,864,1 880,119,2 681,61,3 659,300,3 901,78,5 870,268,3 710,968,5 63,502,4 730,607,4 937,236,2 839,652,5 787,530,4 931,1455,4 931,501,4 511,185,5 338,643,5 540,1029,3 929,256,4 799,14,4 654,1141,2 795,572,4 587,446,3 374,233,5 787,158,3 252,965,5 762,197,5 591,983,1 879,793,4 534,276,5 845,50,4 405,100,3 845,1040,4 678,317,5 926,425,4 917,1171,3 617,1062,3 803,482,5 814,158,3 508,270,4 859,662,3 870,300,4 931,81,3 12,428,5 847,522,5 786,344,3 268,738,1 733,229,2 850,814,3 652,1041,2 923,526,4 317,142,5 910,25,4 842,1156,3 893,236,4 912,257,4 918,123,3 933,623,4 792,741,3 547,175,4 863,1139,1 654,1244,3 560,98,3 278,543,1 43,96,2 804,212,3 885,54,4 177,1027,3 851,505,4 566,512,4 906,311,5 607,286,3 15,65,4 796,293,3 494,239,4 775,558,4 600,472,3 681,64,3 116,281,5 0,151,5 932,392,2 805,179,4 176,159,4 40,134,4 657,197,5 220,37,2 621,1148,3 513,420,4 656,872,3 658,504,4 499,1056,3 822,516,5 918,110,4 891,299,4 444,182,2 835,186,5 704,470,5 649,601,4 304,96,4 811,301,3 302,190,5 566,493,5 918,146,4 553,1027,3 337,989,4 757,116,4 559,221,4 912,407,5 828,104,3 434,602,3 622,273,4 638,59,3 861,175,5 372,177,4 895,90,2 278,6,5 803,290,4 929,142,2 279,182,3 560,8,4 837,1038,5 928,143,3 314,287,3 708,761,3 587,300,5 20,52,4 629,1078,1 641,256,5 941,317,5 890,126,4 20,272,4 882,69,3 491,1120,2 746,195,2 498,55,4 874,133,5 898,413,2 605,587,5 641,1054,4 838,332,4 129,1277,5 2,345,5 886,1059,5 718,254,2 386,580,4 381,755,3 921,410,1 768,12,4 888,704,4 903,784,5 581,327,3 869,1043,2 728,893,1 915,136,5 748,432,3 891,482,5 937,117,5 928,588,5 845,186,4 457,68,2 644,201,3 629,545,3 888,958,3 406,100,3 934,180,4 653,247,2 329,495,5 558,651,4 895,68,5 832,179,5 931,522,4 922,120,4 649,152,4 829,227,3 895,660,4 464,834,3 787,191,4 939,263,1 900,320,1 915,97,5 910,86,5 84,332,1 690,771,5 448,69,4 906,171,4 738,748,5 846,156,1 267,115,4 483,392,1 896,404,5 931,98,4 841,901,5 523,1106,4 832,25,1 311,198,5 292,161,3 772,195,4 373,116,5 832,152,3 880,650,5 434,440,3 238,202,1 899,8,2 942,426,4 600,207,4 576,96,5 837,152,4 895,299,2 594,107,2 769,322,5 830,21,5 828,474,4 268,377,3 867,64,2 814,227,5 681,803,3 520,238,5 624,731,3 157,6,5 373,476,1 626,384,2 893,270,2 881,745,4 907,662,3 55,65,3 931,512,5 755,402,2 658,525,5 377,1090,2 906,741,5 930,280,3 916,288,4 183,94,4 937,99,5 884,1029,1 641,399,4 360,691,4 406,72,4 772,521,4 802,321,2 791,1163,3 388,346,4 893,51,4 477,31,3 526,8,5 805,317,5 915,1078,2 620,147,4 709,886,2 893,594,3 881,469,4 714,100,3 824,320,3 732,8,3 547,327,4 922,0,3 829,150,3 846,601,3 931,167,5 328,126,4 492,273,5 166,1124,5 822,167,5 187,186,3 495,418,2 935,1322,4 915,225,3 726,446,3 463,248,2 748,177,4 895,244,4 863,107,3 27,433,4 587,381,3 441,95,4 912,171,5 536,1047,2 894,221,3 573,311,4 781,253,2 900,428,5 323,320,3 867,154,2 647,248,3 765,293,2 550,301,3 42,201,5 910,472,3 889,132,5 536,344,4 746,99,5 309,1141,5 769,110,5 896,184,5 642,4,3 833,297,4 572,494,2 653,454,3 4,367,1 84,180,4 888,946,4 848,37,5 915,272,3 530,312,5 900,1034,4 669,514,2 803,819,4 441,404,3 938,273,5 938,251,3 804,1070,4 473,410,2 863,1424,2 649,634,3 36,61,5 645,353,3 773,216,2 609,484,5 655,339,3 268,1266,1 937,219,4 180,279,4 338,517,5 853,475,3 906,219,5 886,577,4 833,341,2 600,659,3 415,16,2 806,138,2 704,415,3 923,1148,3 555,134,2 918,276,5 906,987,3 863,229,2 895,482,3 822,685,4 285,366,5 706,478,3 641,130,3 293,332,4 942,184,2 748,404,2 907,263,3 285,536,4 639,1009,3 399,312,5 780,522,5 671,1189,2 200,439,2 794,435,3 633,268,4 758,322,4 942,402,4 384,447,3 803,144,3 715,194,1 881,229,5 503,247,4 933,1202,5 565,520,4 836,180,3 942,400,1 916,2,1 750,657,3 740,723,4 842,227,4 932,514,3 748,355,4 108,409,1 124,221,5 931,398,4 654,1261,3 880,446,4 931,525,5 850,338,4 933,601,3 449,1479,3 532,146,1 691,0,4 43,182,4 864,830,1 916,254,3 797,698,3 718,136,1 188,522,4 408,177,5 327,67,3 869,169,5 935,342,3 788,761,3 748,1439,3 746,487,5 64,8,5 828,844,3 621,741,3 711,203,4 320,427,4 609,316,3 610,314,5 279,584,3 647,210,4 918,180,4 863,120,4 302,633,3 832,1352,3 862,354,4 748,419,4 663,229,3 457,482,5 797,401,3 794,207,4 406,287,4 845,427,3 652,181,3 715,189,5 708,737,1 323,741,5 200,1136,4 760,200,2 649,88,4 64,391,5 304,172,3 550,545,2 745,134,1 499,241,3 392,773,4 144,52,2 626,517,4 805,95,5 857,126,5 860,212,5 750,478,2 803,217,4 804,476,4 607,1280,4 885,1466,5 23,150,5 583,49,4 773,176,4 428,471,3 757,203,4 679,256,4 527,212,4 405,600,3 322,1047,3 592,39,1 5,201,3 921,430,4 405,153,5 839,175,3 536,461,3 882,206,3 773,420,1 714,99,2 531,346,4 698,590,2 730,1274,1 441,449,3 535,1049,5 859,326,3 451,212,4 520,180,4 820,503,4 827,344,1 536,526,4 773,81,2 756,637,3 420,78,4 880,71,2 122,320,4 473,646,4 915,639,4 71,648,4 313,931,4 804,1104,2 747,226,3 931,88,5 726,197,4 434,200,4 917,655,4 659,1138,2 641,685,5 365,757,3 845,559,1 716,244,4 647,175,4 720,738,4 748,154,2 689,46,1 577,1015,4 886,227,4 748,202,4 935,1314,3 918,1118,3 458,297,3 889,204,5 434,634,3 863,203,5 737,654,3 576,67,4 876,1401,4 867,654,4 797,82,4 935,288,5 710,50,4 525,122,3 805,87,4 906,106,5 839,198,4 917,506,5 400,187,1 933,302,4 381,167,4 346,362,1 874,479,5 261,233,3 806,495,5 842,426,2 903,1073,4 920,796,3 912,3,4 880,497,4 555,242,1 326,528,3 843,99,4 870,1175,3 473,58,3 883,211,4 845,191,5 630,885,4 847,429,5 531,1010,5 872,325,4 833,244,4 888,736,3 76,227,3 807,326,5 746,714,5 359,136,5 647,903,2 22,72,3 307,428,4 659,635,2 888,207,4 939,878,3 900,141,4 456,355,4 891,476,4 536,1403,2 233,1,2 936,282,4 771,270,4 665,235,4 789,175,3 814,587,5 850,260,3 61,150,5 803,234,5 697,495,3 587,355,4 681,1310,3 617,587,4 832,1005,1 665,641,5 290,832,3 720,227,5 895,801,2 902,508,4 901,300,2 388,486,5 799,404,4 453,526,4 917,150,2 591,602,5 844,302,1 843,124,3 278,1495,3 805,701,3 861,650,5 397,707,3 532,242,3 560,565,3 384,91,3 425,495,3 338,789,2 757,306,3 180,814,3 12,187,4 706,344,5 641,180,5 781,1607,3 456,568,3 576,4,4 748,180,5 647,376,3 881,290,4 757,269,4 787,152,3 570,143,2 87,325,5 850,686,2 173,1016,2 640,527,4 757,392,4 416,472,2 861,431,5 458,124,4 929,708,4 350,303,3 86,179,4 923,180,3 786,905,1 870,270,5 795,661,5 918,292,4 936,236,4 294,415,4 638,60,3 861,90,5 773,217,1 915,51,5 880,242,2 654,814,2 608,14,5 853,321,1 618,181,4 550,894,3 459,869,2 150,704,5 793,186,5 502,528,2 926,1177,2 939,170,2 896,762,3 933,225,4 317,943,2 638,99,1 520,203,4 879,406,1 213,54,4 832,1627,3 931,447,2 921,384,3 863,182,4 863,188,4 932,176,4 335,172,5 795,61,4 928,495,3 513,432,5 621,768,1 441,1,3 714,1221,2 750,314,3 129,232,4 558,166,3 882,579,3 888,209,4 61,923,1 850,1119,2 560,1209,1 933,704,4 907,143,4 869,11,4 674,268,5 696,985,1 923,275,2 398,46,3 879,939,3 885,203,3 544,691,3 535,385,4 576,124,4 933,143,4 916,120,1 737,239,3 920,173,5 900,755,4 839,178,5 885,208,4 275,771,4 935,6,4 762,702,5 931,461,4 842,172,2 565,264,4 937,409,1 82,703,3 188,581,5 855,271,5 746,1044,4 845,196,4 804,143,3 242,245,4 450,265,2 932,174,4 403,300,3 312,602,5 895,247,4 592,273,3 923,55,3 373,503,4 654,1154,3 873,284,4 668,6,3 58,242,1 750,273,4 932,8,3 388,238,3 942,575,4 243,66,4 238,13,5 921,918,5 182,1214,1 176,220,3 772,67,2 871,925,4 231,285,3 795,449,3 156,257,3 527,677,3 879,1221,4 390,590,4 918,405,3 522,1068,5 809,877,4 362,312,5 681,32,4 741,281,3 245,158,3 912,465,3 798,1544,4 725,1037,2 930,110,3 846,49,4 861,264,5 922,814,4 81,483,4 883,509,5 845,737,4 882,348,2 908,706,5 502,186,5 795,144,2 928,428,4 224,478,4 58,3,4 820,992,4 338,215,3 911,418,4 304,565,3 208,128,2 373,410,3 935,1374,5 616,319,5 314,317,5 231,434,4 275,181,5 487,745,4 853,1282,2 918,21,5 798,305,4 787,293,3 834,1277,5 794,1029,3 310,549,3 906,977,5 867,1,2 659,93,2 893,533,4 654,306,3 641,747,5 680,893,1 434,83,2 522,212,5 925,244,3 428,128,4 440,299,3 292,121,3 198,1353,1 773,552,2 150,379,5 652,370,1 920,275,1 902,126,5 541,193,4 937,844,1 884,548,3 923,8,4 708,16,4 888,948,3 902,927,2 750,659,4 609,175,4 614,299,4 881,132,5 757,830,4 547,923,3 804,518,4 891,496,4 785,99,4 802,304,5 536,603,3 812,242,3 926,209,5 758,236,3 664,199,4 78,235,5 9,92,4 772,23,3 591,294,4 757,319,5 779,426,3 893,286,4 845,746,3 344,180,4 404,1444,1 7,259,3 698,290,3 794,46,3 180,1380,2 163,327,5 531,214,5 435,855,4 533,507,4 869,366,4 628,193,5 603,287,3 535,430,5 700,285,4 911,356,5 536,608,3 652,209,4 12,399,4 825,3,4 158,931,3 540,1040,3 746,193,3 681,1477,3 666,274,4 392,7,3 733,497,4 228,881,4 870,401,3 902,368,4 424,826,1 850,94,4 415,94,3 587,659,4 536,227,3 271,186,5 909,120,1 452,1078,1 804,316,4 496,225,3 909,741,4 546,750,4 487,491,2 909,182,4 746,899,5 787,444,4 778,446,4 829,230,2 886,70,5 853,507,4 926,570,3 270,609,3 900,68,5 824,1010,3 331,618,3 906,780,5 869,938,3 388,256,3 795,233,2 886,608,4 859,780,2 825,398,4 942,716,4 730,519,4 891,796,4 875,285,5 757,314,5 605,1148,4 880,1216,5 270,428,4 875,522,5 923,401,3 915,65,3 769,256,4 926,1283,4 915,420,5 744,602,4 585,938,4 939,6,4 814,238,5 891,174,4 920,68,4 918,677,2 847,131,5 545,299,3 918,94,4 320,58,4 596,747,5 536,68,2 784,136,2 388,617,4 483,37,4 932,225,2 745,82,4 573,310,4 636,126,2 855,327,3 861,201,5 789,209,4 617,775,2 132,270,5 494,392,5 870,304,3 727,747,3 681,7,3 468,486,5 882,240,4 720,1064,5 200,27,3 456,237,5 535,639,4 895,477,5 775,426,3 885,163,4 654,409,2 279,181,3 302,87,4 711,95,5 895,426,4 685,25,5 41,590,4 393,342,3 349,428,4 297,678,3 597,537,4 621,977,2 933,1134,3 709,500,3 881,500,5 801,747,4 803,512,5 790,293,3 678,96,3 494,589,4 869,653,4 915,22,4 362,79,4 916,0,3 850,929,3 863,87,4 726,839,2 859,677,3 287,285,4 638,862,4 642,571,3 929,209,2 649,157,2 888,192,4 814,332,3 623,274,4 278,373,1 767,283,1 433,1196,5 668,602,5 734,755,2 781,1386,3 502,579,3 894,150,5 58,516,5 391,527,5 447,269,5 591,749,5 658,672,4 846,221,5 302,942,2 99,314,5 748,469,5 881,431,5 754,285,5 19,14,4 326,627,2 879,526,4 114,557,5 556,306,5 367,412,1 942,215,4 882,1011,5 370,201,5 789,791,2 244,716,4 900,1119,4 415,99,5 1,295,3 604,110,3 649,22,3 922,740,5 233,1447,3 670,81,4 923,128,4 804,10,2 20,0,5 915,557,3 906,14,5 846,500,3 843,11,5 43,197,4 882,81,3 787,299,5 859,338,3 885,409,4 545,235,4 245,65,3 829,691,4 879,317,5 333,160,3 915,122,3 907,708,4 814,317,5 927,126,5 882,489,4 910,198,3 936,873,3 654,1639,3 723,1104,1 785,375,3 873,653,5 382,212,5 576,659,3 803,840,4 620,404,5 929,239,1 903,279,5 654,733,3 572,133,4 17,126,5 886,89,5 933,203,4 703,632,5 778,254,4 144,1011,4 836,284,4 893,921,4 536,299,1 215,180,3 644,183,3 514,325,2 872,347,3 942,762,4 805,1058,3 845,79,4 773,398,2 709,293,3 373,30,5 814,918,5 645,327,3 814,1,3 657,126,5 891,525,4 424,69,3 927,275,5 797,1,4 907,208,3 885,52,1 901,0,5 863,7,5 388,142,3 726,490,4 304,483,3 238,432,5 814,27,4 893,873,4 888,482,4 818,254,1 723,886,3 936,507,1 930,312,4 861,91,5 537,87,2 915,709,3 373,183,2 626,1266,4 633,299,3 752,299,1 867,167,3 795,198,3 292,506,4 881,3,4 923,70,5 270,514,5 608,351,1 911,519,2 639,801,3 621,177,4 710,364,3 345,639,3 748,974,4 921,561,3 726,848,2 826,285,3 681,782,2 837,49,5 909,290,1 722,285,3 640,496,5 730,1086,1 920,121,2 467,942,3 785,227,4 902,474,4 922,275,5 314,92,5 565,152,2 877,165,4 392,1418,3 560,663,4 797,110,1 542,190,4 472,126,5 705,686,1 427,750,5 592,292,1 784,994,3 802,989,2 619,418,2 523,613,5 188,58,3 295,22,5 773,192,5 451,97,5 818,743,5 792,251,4 307,512,3 920,409,2 915,226,3 725,24,4 359,473,5 755,567,3 542,527,4 506,293,5 289,377,3 795,322,2 853,944,3 804,658,3 702,99,4 518,1433,5 710,96,4 893,508,4 884,152,2 442,244,3 747,0,4 692,734,4 725,831,5 404,176,1 752,171,3 910,418,5 932,731,3 917,160,1 567,496,2 574,530,1 270,185,4 765,379,2 772,839,1 419,284,5 591,12,5 587,55,4 621,1405,3 826,357,2 692,179,3 888,943,3 415,1132,4 398,245,3 233,655,4 550,232,4 854,637,4 451,1409,1 842,90,3 920,279,3 942,68,5 729,120,4 922,248,4 885,182,5 70,5,3 900,57,4 192,404,3 912,507,3 795,605,4 882,855,5 118,256,4 505,513,5 726,234,3 816,120,3 882,683,3 115,531,2 929,243,4 756,28,2 908,293,3 362,738,3 839,173,4 766,658,5 891,448,2 775,548,5 278,485,4 827,556,2 849,78,5 682,682,3 900,179,2 870,509,3 797,844,5 778,94,5 827,736,1 528,332,4 918,11,3 883,284,4 654,1165,3 921,1078,1 793,935,5 915,1216,1 773,397,1 942,167,2 664,495,3 849,384,5 6,197,3 696,1021,1 649,564,3 888,1068,1 797,609,3 906,365,5 372,186,2 772,69,3 341,832,3 937,0,4 12,540,1 937,272,5 641,1286,2 676,239,5 395,150,3 483,240,3 888,31,4 395,322,4 915,157,2 932,584,1 187,175,4 918,863,2 846,141,3 504,490,3 804,405,1 891,379,4 829,836,5 550,832,3 593,126,4 933,68,5 156,474,3 742,321,3 873,110,3 692,196,3 588,323,1 482,90,3 532,1160,3 12,892,3 929,189,4 322,534,3 934,14,5 748,548,3 310,391,5 279,738,3 676,1010,3 931,1019,5 641,476,5 906,484,5 377,734,4 846,132,3 757,309,3 752,178,2 746,304,5 748,84,4 470,945,2 929,15,1 566,524,5 233,199,5 536,900,1 763,20,2 888,1047,3 726,121,2 747,132,3 730,495,5 698,2,3 772,750,3 719,310,5 638,672,4 540,138,3 345,1187,1 532,171,4 802,306,4 782,329,1 762,608,4 915,240,4 869,646,4 714,143,5 681,175,4 42,891,3 659,1239,3 933,526,3 20,322,2 835,356,5 934,282,4 933,583,4 415,309,5 652,1064,1 900,65,5 847,854,5 907,299,3 931,1148,4 681,721,4 795,938,3 652,1131,1 797,392,3 155,645,4 773,545,1 12,628,1 746,504,5 203,11,4 576,825,4 157,430,5 631,175,3 726,650,3 861,1108,5 898,150,2 644,649,5 51,6,5 779,603,3 324,234,1 781,271,5 338,659,4 867,577,2 869,288,2 377,237,3 449,1060,4 592,125,5 922,1016,5 710,709,4 572,522,4 333,478,4 805,116,2 182,257,3 56,143,3 894,327,4 773,49,4 804,209,3 918,271,5 605,392,4 932,173,4 750,208,4 658,485,4 543,747,3 486,61,3 898,212,4 434,708,4 641,552,5 789,596,3 261,268,3 939,470,4 917,659,4 43,691,3 641,1181,2 773,865,1 449,233,3 798,1062,4 737,249,4 621,61,4 868,287,3 397,366,3 886,121,5 400,536,4 221,240,3 788,627,3 241,1354,5 496,264,4 54,6,3 777,195,2 931,1034,4 918,254,4 173,1312,4 925,261,3 323,409,5 927,167,5 770,49,4 879,925,3 806,90,5 826,332,3 292,1043,2 589,1008,3 227,55,2 902,191,5 496,49,5 867,198,5 910,473,5 893,826,3 832,640,4 889,96,4 415,299,4 593,198,4 150,1040,3 918,301,4 882,344,3 832,175,2 880,28,2 405,204,2 779,514,3 921,234,2 657,21,4 531,476,4 839,197,3 621,275,4 628,126,5 487,81,4 926,741,5 631,565,4 870,160,5 144,753,3 435,467,4 624,197,4 405,216,4 932,69,2 616,815,1 912,288,5 879,794,2 567,610,3 879,378,4 427,748,4 278,82,5 806,257,3 646,173,4 765,447,3 939,258,4 767,894,2 51,287,3 229,202,2 869,110,3 895,549,2 845,632,3 842,217,2 591,108,4 814,514,5 846,239,1 708,840,4 879,119,2 69,171,5 231,356,4 895,1221,2 726,171,5 658,198,4 428,1011,3 917,994,3 890,596,3 886,131,4 692,483,3 902,80,5 869,283,2 604,63,5 580,1352,4 58,90,4 361,320,2 325,210,4 732,923,4 762,736,2 937,822,4 847,811,2 819,257,3 737,918,4 325,96,4 893,249,4 877,698,1 715,176,2 737,754,3 933,195,5 794,69,3 893,1037,3 154,871,3 781,1088,2 404,1422,1 785,527,5 909,285,3 48,1,1 931,606,4 867,134,5 932,958,1 670,272,4 857,514,4 797,730,3 678,171,5 902,594,2 561,484,5 885,195,3 565,288,1 886,1278,3 899,409,2 377,401,4 293,828,3 906,763,4 685,178,5 434,762,5 536,428,3 937,105,5 641,997,3 543,291,4 268,707,4 708,67,5 405,392,4 903,89,2 891,430,4 684,288,2 478,339,1 576,87,3 911,615,3 897,308,5 333,548,4 860,474,3 101,226,4 59,134,5 863,741,4 816,404,3 800,298,2 915,89,3 6,151,4 778,49,5 192,1089,2 474,301,3 560,196,4 795,76,5 886,426,5 888,602,4 302,367,1 882,270,2 221,132,1 845,198,5 877,257,3 684,874,3 560,16,2 844,305,2 703,522,5 454,522,4 322,321,2 654,191,3 317,24,5 532,675,5 684,885,1 6,366,5 926,98,2 514,339,3 918,222,4 706,419,3 507,654,4 415,658,5 931,840,2 939,353,5 861,747,4 449,398,4 452,24,4 485,243,3 748,117,3 590,522,4 895,729,4 885,176,4 715,1285,2 937,247,1 563,257,4 765,583,3 806,1,4 245,738,2 172,244,4 715,654,4 503,299,4 862,1126,4 895,587,5 748,364,3 290,1046,2 759,257,5 327,45,2 362,167,4 454,116,3 503,741,4 386,213,5 886,392,4 496,120,4 266,4,3 896,22,3 313,767,5 431,254,5 255,229,4 282,70,4 560,941,3 177,1118,4 585,67,4 869,1019,3 477,709,5 726,91,2 658,187,3 891,494,4 560,198,4 681,998,2 723,321,1 847,172,5 930,289,2 500,627,4 900,731,5 624,69,3 234,284,4 697,662,1 652,159,3 836,739,5 642,629,3 765,606,1 882,512,5 85,1175,5 763,97,5 746,177,5 804,321,2 797,14,4 200,799,2 864,470,1 850,11,4 918,876,3 935,1128,5 300,99,5 942,525,4 822,709,4 827,557,3 17,958,3 898,132,3 909,2,2 232,844,4 932,192,4 664,209,4 93,1100,3 777,440,3 896,76,4 869,745,3 619,451,3 866,88,5 891,691,4 292,44,5 373,163,4 935,326,4 295,208,4 587,377,3 212,124,5 896,403,4 795,95,4 480,210,5 907,630,4 492,332,4 342,962,5 834,503,5 896,186,5 895,238,4 915,194,3 869,147,2 930,301,4 372,173,4 942,195,5 386,846,3 889,482,5 431,297,3 356,840,3 711,418,3 781,904,4 275,806,2 868,121,3 857,326,3 902,1008,4 685,317,5 81,21,3 860,936,4 129,271,5 895,569,2 327,186,4 494,1115,3 535,449,2 547,150,1 489,546,4 436,134,4 888,218,2 914,895,2 932,390,1 167,923,2 789,367,2 614,196,4 115,310,3 757,470,3 795,57,3 703,133,5 901,753,3 891,824,4 12,165,5 582,523,5 825,549,3 654,1553,2 647,447,3 755,255,4 600,197,4 889,228,2 508,704,4 932,762,3 906,331,5 869,474,5 931,0,4 604,274,4 649,269,4 942,8,3 803,707,3 16,221,3 842,82,3 628,274,5 917,88,5 709,301,4 797,944,3 899,695,2 710,416,4 654,1464,2 886,403,4 457,6,4 653,719,4 261,69,4 15,366,3 397,581,2 869,317,5 888,979,4 803,287,1 888,656,4 867,185,2 12,833,1 652,186,4 919,310,3 611,479,4 686,268,4 853,19,2 572,527,4 867,173,5 654,3,2 852,681,4 250,24,4 884,738,4 838,845,2 810,242,3 405,653,4 499,382,3 767,534,3 726,391,4 755,382,3 835,184,5 927,190,5 726,464,2 400,242,3 665,482,5 895,565,4 471,95,5 360,1151,2 734,324,1 937,457,4 915,545,2 504,181,1 921,275,3 449,177,4 178,312,4 922,865,4 638,1100,3 664,416,3 915,1072,4 60,300,1 845,434,5 605,86,4 824,918,1 845,395,5 238,268,5 767,268,3 58,952,5 641,755,5 912,180,3 362,237,4 714,938,4 48,153,5 702,818,2 146,301,4 933,95,4 375,814,3 293,327,4 731,320,3 81,236,3 773,218,4 885,654,4 863,124,4 647,457,2 347,826,4 392,398,4 792,116,4 942,671,5 476,450,5 681,1045,3 832,539,1 765,160,3 654,1636,3 795,126,5 931,520,5 921,383,4 672,301,3 157,124,3 41,102,3 842,440,2 863,1207,2 915,1008,5 682,293,3 684,287,2 922,590,5 681,84,3 623,832,4 345,1209,3 771,271,5 937,840,3 794,433,3 136,689,2 915,536,4 65,1015,3 829,88,5 939,429,4 514,331,3 13,18,5 654,1142,3 756,357,3 830,55,5 832,11,5 335,71,3 292,764,3 666,196,4 422,507,4 536,491,3 841,271,4 939,237,4 822,95,4 254,825,1 942,37,3 935,254,5 654,910,2 478,135,4 506,117,5 383,285,4 586,538,3 889,189,4 937,1012,2 795,93,3 931,474,4 757,173,5 834,97,5 307,777,3 392,571,4 897,257,3 836,933,2 941,196,5 500,927,3 21,791,4 473,46,4 902,409,4 669,185,4 933,133,4 910,529,4 928,97,5 891,541,1 933,180,4 384,200,4 457,434,4 766,176,5 920,293,4 803,624,3 550,57,5 324,1139,3 731,303,5 700,343,3 879,24,4 938,507,5 915,79,3 467,236,4 612,278,4 888,178,3 939,215,4 866,689,5 719,886,5 591,126,5 622,287,1 889,529,4 808,330,2 940,0,5 683,97,4 473,380,4 394,117,3 921,1156,2 882,97,3 668,479,5 843,256,4 290,507,5 898,97,4 869,1013,2 933,473,4 598,279,5 568,327,4 536,744,2 821,94,4 338,240,4 25,682,3 797,929,5 721,120,5 415,679,3 621,180,5 714,281,3 453,139,3 689,427,1 794,824,2 624,960,4 888,740,4 552,654,4 522,434,5 681,745,3 845,490,3 232,3,3 942,273,3 710,21,4 593,519,4 581,24,3 882,744,5 882,407,5 261,698,5 912,468,3 236,285,3 400,190,4 531,331,4 428,216,3 893,136,5 785,195,4 806,430,4 436,65,3 660,168,5 449,583,5 197,672,3 720,152,4 877,50,4 503,1092,1 748,203,4 424,1187,3 706,1141,1 888,198,5 628,875,3 891,185,3 937,256,5 441,89,3 765,711,3 845,1108,3 709,482,5 534,161,3 821,901,4 828,283,3 825,61,4 879,747,4 659,83,2 307,8,4 797,659,3 886,547,1 922,247,4 291,601,4 265,8,4 839,184,5 91,12,4 659,195,4 863,152,5 879,584,1 917,658,4 551,147,3 558,310,3 307,168,5 505,362,3 617,520,2 760,627,4 229,698,4 889,673,3 343,172,5 115,268,3 873,301,5 895,1045,2 797,280,4 936,1006,4 757,1033,4 12,821,3 721,927,3 447,311,1 69,379,3 709,257,2 96,207,5 108,923,3 933,506,4 633,933,2 131,805,3 888,194,4 867,267,4 880,429,4 549,242,2 425,288,2 891,836,5 814,53,3 647,109,3 851,249,4 456,226,4 404,1474,1 245,157,1 869,522,5 536,927,1 157,229,2 654,1629,3 496,183,3 578,227,3 889,495,5 502,508,5 801,175,5 762,497,4 573,753,4 863,618,3 357,1395,4 550,582,3 896,214,4 520,187,4 941,944,5 903,110,4 185,294,2 550,191,5 285,651,4 863,189,4 789,61,3 880,678,1 883,1072,4 853,281,2 279,171,3 745,167,3 498,173,3 726,10,3 932,281,3 895,70,5 704,228,3 929,534,4 762,131,3 709,126,5 824,136,2 478,495,3 940,293,4 928,430,1 638,18,4 890,115,3 428,221,4 795,1125,1 772,126,5 847,70,5 554,117,4 268,822,3 853,1012,1 473,189,3 888,1486,3 329,731,5 781,252,2 94,385,2 412,272,2 787,1041,3 920,322,4 918,1059,3 392,822,3 938,274,4 341,506,4 118,1263,3 292,501,3 609,605,5 588,327,5 613,534,2 654,1642,5 589,546,4 795,273,5 517,919,3 942,567,3 825,70,5 765,237,4 662,181,5 388,628,2 732,1379,2 906,0,5 652,150,3 298,211,4 140,180,4 404,512,1 861,865,4 912,237,3 920,253,3 909,747,3 311,429,5 386,1197,3 505,227,5 875,530,4 363,677,4 338,479,5 797,925,4 877,462,2 463,126,5 312,196,5 6,597,3 697,514,4 21,67,4 333,98,4 748,185,4 720,228,5 614,707,2 518,347,5 815,1024,4 465,160,2 914,257,2 665,90,3 915,497,3 939,1136,3 285,71,4 803,435,5 723,351,1 499,545,4 935,918,5 659,945,2 942,138,1 814,483,4 547,287,3 846,87,2 302,545,2 408,337,3 354,318,5 773,452,2 500,273,3 789,152,3 534,637,4 912,461,3 711,399,3 487,632,5 760,244,5 781,1609,1 870,346,5 520,299,3 915,938,3 906,325,5 853,173,3 895,209,4 837,282,5 280,747,5 937,684,3 285,402,5 415,290,4 748,738,3 235,303,4 915,11,4 292,93,2 82,87,5 386,726,5 801,669,4 842,525,3 456,450,4 740,944,5 709,302,4 307,254,4 726,55,3 755,419,4 918,257,4 828,254,3 647,38,3 406,168,5 922,1000,1 681,716,3 406,473,3 906,270,5 266,430,4 516,228,3 714,411,2 726,204,5 822,472,3 764,282,4 768,119,1 918,1257,3 523,264,4 847,190,5 724,293,3 185,268,1 654,154,4 880,190,5 888,160,4 681,469,5 272,337,3 789,450,3 869,1411,2 715,486,5 638,513,4 453,63,4 429,6,3 326,677,3 634,301,4 177,110,4 888,422,4 931,118,5 503,370,3 453,747,4 912,194,4 773,116,2 935,107,4 789,12,3 405,970,3 835,173,5 325,1117,2 895,228,4 654,557,4 871,1010,1 933,960,4 863,327,5 250,596,3 931,168,5 852,321,3 718,76,3 715,131,5 832,287,2 773,522,2 880,81,5 845,78,4 806,68,5 96,422,5 647,49,5 658,254,3 547,474,4 621,184,3 754,878,4 194,614,4 486,824,3 553,67,2 752,192,4 144,58,1 916,8,5 523,954,1 547,182,5 880,180,4 893,317,5 297,481,5 672,78,5 449,731,3 920,283,4 863,3,4 930,899,4 715,822,3 737,6,4 906,142,5 486,1208,4 910,214,3 681,72,5 827,651,5 629,1060,2 910,207,4 689,283,4 837,221,4 880,30,5 683,923,2 893,530,3 487,55,4 455,1128,4 891,968,4 918,590,3 11,479,4 787,45,3 392,96,4 560,225,1 801,196,3 61,63,4 633,332,4 288,362,3 526,143,4 775,522,4 517,618,4 418,268,4 605,691,5 547,217,4 906,96,5 474,305,5 17,723,4 159,951,4 467,63,5 807,345,5 888,830,2 592,180,4 757,734,5 847,240,5 343,755,2 69,93,3 931,480,4 79,63,5 846,608,2 825,101,4 867,229,3 922,828,4 129,760,3 701,449,1 892,780,3 554,99,5 882,227,4 696,249,4 900,236,3 707,180,5 493,706,4 932,1182,3 605,177,5 863,264,5 855,747,3 931,1120,5 713,293,4 415,304,3 500,247,4 772,1251,4 867,237,4 191,117,2 140,590,4 803,1176,3 824,281,4 920,366,4 580,1366,5 581,830,2 900,37,3 843,12,3 124,709,5 292,302,4 654,1640,3 404,374,1 696,885,5 888,646,2 606,55,5 845,734,2 804,1007,4 200,470,2 891,632,4 825,186,4 476,65,5 881,24,2 921,654,2 425,480,5 144,96,5 380,479,5 793,248,3 492,248,4 292,27,3 891,678,3 23,323,5 665,369,2 785,496,4 404,444,4 789,130,2 810,900,4 879,93,3 915,108,3 891,1077,3 694,339,4 797,826,4 36,120,2 845,941,4 278,1443,3 931,513,5 710,1189,3 766,179,5 845,649,5 900,321,4 300,20,2 842,377,2 751,354,2 933,501,4 419,546,4 556,1175,5 773,390,1 33,990,4 882,237,4 842,126,2 941,497,5 152,186,2 940,1006,4 902,251,3 25,844,3 896,209,5 626,63,5 842,1038,3 918,291,3 177,219,3 942,824,3 664,1046,1 670,194,5 891,182,5 212,156,4 477,123,4 869,247,4 942,192,4 886,217,5 869,723,4 847,209,5 144,120,2 849,95,4 641,954,3 886,419,5 912,287,2 428,439,1 665,192,4 867,1182,1 547,24,2 777,622,1 885,363,3 48,474,4 881,283,3 932,87,3 449,387,3 932,81,3 677,284,3 872,306,3 806,482,5 403,287,3 898,193,5 931,413,4 915,1118,3 536,85,4 884,215,3 933,461,4 765,809,2 895,234,1 814,513,1 849,484,5 898,95,4 573,285,3 664,741,4 932,11,4 853,320,3 882,1073,4 918,538,3 541,385,3 795,377,4 406,209,4 693,210,5 720,877,3 707,886,2 120,291,4 895,224,1 879,272,5 654,142,4 560,503,3 0,93,2 842,464,2 763,731,3 941,258,4 503,422,4 550,257,4 649,26,3 183,371,3 814,150,4 542,63,4 457,99,4 398,89,2 715,140,4 908,325,4 746,864,5 653,495,4 726,1445,3 931,404,4 942,469,4 456,97,5 893,304,4 881,175,4 293,1046,3 820,293,4 220,632,3 832,1180,1 566,649,4 503,66,2 806,288,4 805,236,2 795,327,5 249,983,3 845,1177,2 763,105,2 485,545,2 796,258,3 815,689,4 787,442,4 879,167,3 398,342,2 757,918,5 931,196,5 907,731,3 58,601,2 690,1171,5 548,150,3 720,994,3 927,134,4 827,639,2 765,498,3 302,1216,1 15,38,5 561,81,5 223,730,4 607,92,4 926,37,5 491,1097,4 716,259,1 918,1513,2 860,304,4 942,1046,2 98,347,4 803,309,4 448,197,4 841,332,4 213,249,2 932,567,2 560,416,2 809,872,3 636,116,2 360,366,3 830,689,4 936,115,4 233,834,3 93,1139,2 404,1245,1 263,208,5 885,1216,4 614,331,2 286,894,2 591,1078,1 931,209,4 785,116,4 436,461,5 896,848,4 554,404,4 188,179,5 918,13,4 886,928,1 291,248,3 835,662,5 477,99,5 895,1302,4 64,177,5 250,293,3 270,27,5 625,326,4 12,431,4 942,414,1 173,167,1 933,402,4 737,203,4 338,175,4 905,407,4 415,1593,5 154,244,2 879,267,5 654,20,2 861,11,5 882,735,3 591,1011,5 805,80,5 473,404,4 637,152,3 787,27,5 81,581,4 921,94,3 918,173,4 708,575,4 696,244,3 789,99,2 820,13,4 112,978,5 909,204,4 795,735,3 479,97,4 587,6,3 703,321,2 856,303,2 255,929,3 523,518,4 874,178,5 478,270,3 879,626,3 847,71,5 252,317,5 918,814,2 605,925,3 845,428,2 738,21,5 832,10,5 864,624,1 915,736,3 55,747,4 416,364,4 888,297,4 621,1230,2 803,196,4 353,282,4 900,251,3 500,149,5 303,273,4 863,733,3 882,846,4 889,513,5 89,195,4 621,198,5 139,287,3 928,478,4 804,172,4 793,514,5 737,448,3 485,1404,5 338,216,3 921,698,3 845,209,5 560,504,4 916,627,5 121,698,5 144,739,2 109,1228,3 773,839,2 25,121,1 663,201,4 304,1285,5 808,298,4 587,293,4 279,587,5 814,379,3 197,237,4 930,271,5 523,429,3 784,272,3 849,87,5 912,202,4 691,507,3 387,120,4 719,241,4 264,0,5 918,891,3 804,152,4 222,545,5 17,356,4 366,330,4 537,88,4 189,287,5 324,526,4 879,293,4 698,69,4 706,814,2 483,422,5 893,279,3 553,1040,3 109,584,2 869,643,2 337,381,5 693,356,5 162,201,3 82,1048,3 708,0,4 315,96,5 881,587,4 356,507,5 295,136,4 483,467,5 550,402,3 544,194,4 893,15,3 741,590,4 918,296,4 472,149,5 275,299,4 785,94,5 548,404,4 467,8,5 384,521,4 346,1290,1 891,275,4 899,1297,2 773,451,1 769,936,4 623,1113,4 654,68,3 12,903,1 516,130,3 278,968,3 554,264,3 915,155,5 870,275,5 716,994,5 736,356,5 923,63,4 775,131,3 867,761,4 932,96,2 663,96,3 942,218,4 310,169,5 268,123,5 832,343,4 757,1282,4 437,281,5 396,491,4 375,99,4 317,392,5 444,457,2 756,430,4 895,691,4 149,13,4 805,517,3 747,95,5 718,734,5 863,672,3 147,193,5 541,131,3 827,899,2 150,835,4 769,923,5 150,189,4 101,153,3 84,1165,4 293,341,3 30,191,4 915,678,3 907,317,5 424,404,2 786,750,4 401,409,1 903,723,4 931,430,3 882,581,3 248,146,5 522,948,5 167,1050,4 905,284,5 17,951,2 444,0,3 196,305,2 668,55,2 850,474,4 915,460,4 269,282,5 654,648,3 617,381,2 710,714,4 359,143,2 867,726,2 852,260,3 895,646,3 521,513,2 397,0,5 158,287,3 275,1412,1 192,281,5 777,628,2 806,422,5 901,1015,2 837,237,4 660,254,3 573,749,3 43,306,4 526,525,5 173,87,5 867,210,3 681,0,4 636,507,2 862,1295,3 888,645,3 747,198,4 496,181,4 906,287,5 278,900,4 402,1198,2 852,306,1 898,124,3 896,608,5 88,401,4 787,695,3 828,312,4 581,117,2 864,925,1 863,549,4 598,293,4 428,408,2 763,215,4 550,925,2 395,405,2 931,63,2 732,290,2 415,78,5 821,205,3 869,654,4 589,123,5 576,1045,4 730,590,1 654,304,4 411,207,4 224,97,5 693,182,5 931,150,3 894,0,4 789,754,3 715,516,5 633,407,3 542,946,4 520,41,5 915,283,2 891,704,4 863,800,3 381,285,2 888,208,2 748,46,4 937,275,3 487,704,4 831,259,3 58,1113,5 278,778,3 925,236,3 851,925,3 681,1011,4 457,822,3 593,514,5 746,0,5 904,457,4 454,258,2 803,434,3 820,97,5 660,144,1 748,844,3 926,93,2 667,895,4 776,244,5 659,153,4 610,305,5 809,288,5 781,331,4 454,163,4 902,146,3 918,199,4 893,329,3 775,216,4 715,12,2 404,195,1 882,530,3 180,1286,1 279,232,4 580,921,5 922,410,4 871,892,4 884,160,4 290,572,4 784,173,5 748,442,4 937,288,1 88,106,5 898,116,4 861,176,4 746,57,3 579,256,5 121,27,4 448,592,4 639,688,4 642,720,2 805,251,1 571,283,3 343,301,5 607,91,3 939,214,2 631,133,5 898,179,3 293,404,4 697,85,2 781,270,2 665,959,4 587,90,5 890,739,5 416,301,3 159,126,5 292,126,5 801,332,4 485,251,3 698,763,3 532,7,3 910,419,4 759,97,3 803,293,5 531,915,3 736,99,5 147,126,1 505,685,3 56,120,4 731,268,5 642,654,4 707,1078,1 683,408,3 911,500,4 540,1411,1 885,216,2 733,49,4 931,605,4 921,595,4 891,187,5 714,105,2 346,426,4 500,244,3 932,1036,1 888,512,4 616,497,3 781,341,2 114,507,5 528,874,4 420,426,4 874,1102,5 935,844,4 918,688,2 643,254,4 536,191,4 345,692,4 91,64,4 797,928,3 773,188,2 57,237,5 803,131,4 814,213,5 915,703,3 689,273,3 845,317,5 496,583,4 867,384,2 617,14,3 487,70,3 620,37,3 626,231,3 560,48,2 939,284,4 926,90,4 523,317,4 787,356,4 882,420,5 882,951,3 428,1017,3 803,1059,3 746,513,4 541,239,3 748,49,5 915,214,3 109,946,3 164,222,4 851,289,4 893,165,4 893,749,4 901,878,4 269,264,4 765,663,2 589,675,4 9,8,4 74,472,3 932,221,1 292,508,3 200,236,4 659,88,3 892,95,4 496,762,3 925,268,5 220,930,3 804,184,5 853,110,3 225,178,4 320,477,4 720,160,5 653,404,4 536,24,2 268,514,4 108,179,3 656,257,2 935,311,3 692,209,3 621,473,3 927,327,3 642,450,2 654,691,3 243,649,3 392,692,3 893,904,3 891,161,4 805,99,4 668,474,3 937,292,3 942,795,3 540,257,4 150,558,2 449,1489,3 565,461,4 926,27,4 540,467,4 707,761,5 927,8,5 534,6,5 880,565,4 869,942,2 706,504,4 716,590,4 846,684,2 891,1443,3 819,894,2 333,46,4 208,275,2 494,1038,5 785,0,4 317,339,4 464,407,5 551,280,3 866,293,3 935,1225,3 870,903,3 902,404,4 900,446,3 933,418,4 861,207,2 58,272,2 885,49,5 911,426,5 797,923,3 891,130,4 338,211,4 908,223,5 896,173,5 592,57,4 353,318,3 63,183,4 888,180,4 797,442,3 900,14,5 829,833,1 772,894,2 527,409,4 270,168,5 345,150,4 837,992,3 697,133,3 237,150,2 473,30,4 641,72,4 869,10,4 642,38,4 886,97,3 845,39,2 845,97,4 502,691,3 897,301,4 915,556,4 895,750,4 917,746,3 663,659,3 934,119,3 765,497,4 805,169,5 906,277,5 587,264,5 804,127,5 930,545,3 553,180,4 912,27,3 819,750,1 895,175,5 305,740,1 532,264,3 938,236,5 781,350,3 737,134,5 789,1076,3 874,21,3 82,244,2 869,507,3 786,267,4 573,357,2 302,626,3 814,525,4 620,383,3 42,590,5 42,122,1 799,0,4 846,119,1 879,2,1 517,404,5 654,871,3 711,1479,4 24,175,4 885,10,5 889,156,4 693,187,5 880,731,5 495,659,3 748,443,2 387,52,5 703,339,3 780,178,5 681,70,5 450,875,4 473,55,5 421,266,4 919,271,3 706,51,3 740,150,3 714,142,3 869,3,2 618,78,5 647,280,3 932,95,2 745,227,4 871,1283,3 524,105,2 690,495,5 139,302,5 326,497,4 31,407,3 891,207,4 839,479,5 97,193,5 517,409,3 797,201,2 813,4,3 576,180,5 868,1046,2 842,131,3 471,419,3 398,1541,2 923,470,4 473,123,5 891,156,5 938,470,5 861,484,5 494,572,4 404,957,1 748,711,3 806,203,4 720,241,3 867,171,5 853,289,1 838,1380,3 901,186,3 863,417,3 861,204,4 765,427,5 602,55,4 788,123,4 825,259,3 17,966,3 906,814,5 449,653,4 664,377,3 853,527,4 377,440,3 879,109,3 531,424,4 850,552,4 632,158,4 652,134,5 347,595,4 748,204,4 933,785,1 805,874,3 523,480,4 859,731,4 720,262,3 617,530,4 902,543,2 736,21,4 751,267,2 896,522,5 55,574,3 864,742,1 885,1,4 780,209,4 848,624,5 605,575,3 302,938,3 869,434,3 12,445,1 746,123,5 203,332,1 924,557,1 877,154,3 879,4,3 915,355,3 187,483,5 845,100,4 388,132,5 878,595,2 839,431,5 625,679,1 803,232,4 662,239,3 144,768,2 817,312,4 573,327,3 647,455,2 805,168,5 665,264,3 793,99,5 302,1085,1 932,440,2 518,886,5 866,171,5 888,549,3 937,992,5 531,989,3 486,312,3 658,215,4 942,738,4 415,152,4 863,150,5 931,415,3 733,293,1 873,274,4 902,1380,4 302,362,1 920,24,3 787,156,5 633,1027,3 915,147,2 891,630,4 400,404,2 9,197,3 688,257,5 4,173,5 890,977,4 920,142,5 22,226,3 847,215,5 738,49,4 868,281,3 746,509,5 912,317,4 834,587,3 722,8,3 934,684,4 536,674,3 693,704,5 936,293,1 706,863,4 898,199,4 560,610,5 825,747,4 772,231,3 748,822,3 805,89,4 883,462,5 706,707,3 534,1169,3 382,199,5 902,650,5 886,273,1 483,209,5 714,777,2 878,110,4 709,78,4 710,47,4 668,478,5 683,247,3 406,6,4 888,7,3 618,577,4 484,751,3 392,612,4 852,332,4 886,1062,1 837,312,5 446,769,3 526,658,4 605,96,5 629,814,3 805,208,3 866,747,4 692,1135,3 647,317,3 874,963,4 748,635,4 489,108,5 440,281,4 850,1012,2 882,233,4 101,6,2 895,95,5 934,116,4 357,1004,4 873,285,4 895,510,5 827,530,4 834,173,5 592,222,5 903,172,3 763,280,3 740,695,3 929,409,3 797,658,4 210,214,5 553,171,5 658,656,5 930,256,4 220,143,4 206,172,3 935,250,4 867,404,1 101,171,3 902,49,5 652,410,2 894,99,4 378,620,4 129,823,3 928,275,2 787,327,4 576,175,5 724,275,4 850,1336,3 888,76,3 779,312,5 827,345,4 592,0,3 362,180,5 924,199,2 861,119,3 847,707,4 562,565,4 499,1009,4 779,49,5 822,133,5 129,92,5 129,120,5 536,777,3 654,912,4 888,1,3 864,1008,5 850,978,3 832,473,5 393,379,4 192,689,4 620,808,4 765,90,5 649,478,5 428,198,5 846,595,3 933,215,1 787,555,2 896,368,4 935,286,4 935,765,3 448,119,1 660,761,2 720,873,3 820,150,4 763,595,3 536,442,3 617,627,2 486,290,3 112,974,5 942,390,2 863,684,4 749,322,3 278,63,1 645,749,3 653,369,2 616,581,4 912,689,3 659,228,2 420,497,4 494,1090,4 805,420,4 675,537,4 720,261,3 912,208,2 377,77,3 879,475,3 715,203,5 275,1089,1 12,224,2 11,202,3 mlpack-2.2.5/src/mlpack/tests/data/casecheck.arff000066400000000000000000000004731315013601400216470ustar00rootroot00000000000000% @file: casecheck.arff % @author: Praveen Ch % % This file declares the @relation, @attribute, @data tags in non uniform mix % of upper-case and lower-case letters to show that the ARFF loader is case % insensitive. @RELATION dummy @ATTribute height numeric @ATTRIBUTE weight numeric @DATA 120,56 45,85 99,200 mlpack-2.2.5/src/mlpack/tests/data/completion_X.csv000066400000000000000000000234201315013601400222500ustar00rootroot000000000000002.268917959069277346e-01,1.201769815832157140e-01,2.791472705838512480e-01,4.129696002411505917e-01,7.921637265182315257e-01,5.032788432358387132e-01,3.835664173525583087e-01,8.451234748542018060e-01,3.228975933850825042e-01,3.677241101206029650e-01,3.115613861833520515e-01,2.843637987382546251e-01,8.263612223976396498e-01,2.578882500552053259e-01,3.735987908409875713e-01,4.096958161076692528e-01,4.836063397606226166e-01,1.840499653987820172e-01,5.339870859567330541e-01,6.943075938475559150e-01 1.815514517683395057e-01,1.481438141299214750e-01,3.246365312873157327e-01,6.180958286371067700e-01,8.395422972369482872e-01,7.283799838784086322e-01,3.862362541987202080e-01,9.634855656153386017e-01,3.472450609549778133e-01,2.884095444455062607e-01,4.367424157124643602e-01,4.721652983026459749e-01,8.968181674236590517e-01,3.975936290126179662e-01,3.850861370247186111e-01,4.765619276085552025e-01,4.977252301323637496e-01,1.081737498640471112e-01,7.862189295351967866e-01,9.150673041547885411e-01 4.485725462770236982e-01,6.210431906279154646e-01,8.079162416271070679e-01,7.479165249287595962e-01,9.660638138607120506e-01,6.775024396804142368e-01,6.268219497800838758e-01,1.488129887707428711e+00,4.081229070480736354e-01,9.937536925587704406e-01,9.942593357890014971e-01,9.277558021971723523e-01,9.732006968930746460e-01,4.241106289124610962e-01,5.442549648603086654e-01,4.212243188620748424e-01,5.900020369048364355e-01,2.652600104280545823e-01,8.213977178710969440e-01,9.447098806592240106e-01 3.702964231051512067e-01,3.938319501534544576e-01,6.395889777312965263e-01,8.706414932452468669e-01,1.207707943992026856e+00,9.457458390400318438e-01,6.340944313462977266e-01,1.528845711341907831e+00,5.020885255631167832e-01,7.048629386590496981e-01,8.098977940228742067e-01,8.049719509373847171e-01,1.262115877670982211e+00,5.351700897498899989e-01,5.974078234042232705e-01,6.255845921380651653e-01,7.258190154091623825e-01,2.294328483941174279e-01,1.055931543348610946e+00,1.238692223038866835e+00 3.526016035061635967e-01,2.703621037792456328e-01,4.439314100431043242e-01,4.229666868320861584e-01,9.031752423357746196e-01,4.597582533544301575e-01,5.007159852721363436e-01,1.059317307971314293e+00,3.691519879382180180e-01,6.584508547254678268e-01,4.731792376876083339e-01,3.876907164298784014e-01,9.169708773125819778e-01,2.428012038917182203e-01,4.609839449546466184e-01,4.136321988942701644e-01,5.616141572314510277e-01,2.821873896562767658e-01,5.105189321111793799e-01,6.990347228681695890e-01 4.776614239431458131e-01,3.967295410837491931e-01,5.957997622485082800e-01,4.518458514484933186e-01,1.063991127879416743e+00,4.539370653399564248e-01,6.300085470808846067e-01,1.298949699351007459e+00,4.349946655237709736e-01,9.295503644763772888e-01,6.180235338336663720e-01,4.782498767941839590e-01,1.063025575275893919e+00,2.430658821032348205e-01,5.652089609470616338e-01,4.509587467231170876e-01,6.692198367772439482e-01,3.844592684576819686e-01,5.207452078366189285e-01,7.502845369622384020e-01 5.056457998237853246e-01,5.532375286855771845e-01,7.272338358680021653e-01,4.881611398595514517e-01,9.503154304545203823e-01,4.119876009143376150e-01,6.322290100374869937e-01,1.346923545831661517e+00,3.940692886592210842e-01,1.071956010788616576e+00,8.011397166848406304e-01,6.554305764513597143e-01,9.325968522063492472e-01,2.505047222933129425e-01,5.435261528354763394e-01,3.651050090405553172e-01,5.993729646038712389e-01,3.697943299987761501e-01,5.141127848547054624e-01,6.928395068906985088e-01 8.064260552339440613e-01,9.177319992214625355e-01,1.273416261287542817e+00,1.173494511088804204e+00,1.848081436294184865e+00,1.126013492925364012e+00,1.127445641813147770e+00,2.552912276563224836e+00,7.695028070582169422e-01,1.677483037275186994e+00,1.496622542088449359e+00,1.342675624209609841e+00,1.865740751901193484e+00,6.657448925587777788e-01,1.000914478221888837e+00,8.186106152427350402e-01,1.138850827545863087e+00,5.458898643619807256e-01,1.326285146704942131e+00,1.629686184016899242e+00 1.738422620439678545e-01,1.567384743183915241e-01,2.784381394849713143e-01,4.002518394974652338e-01,5.979703370306685972e-01,4.505220850103247576e-01,3.029192179481221547e-01,7.169537418545651741e-01,2.471528659888790891e-01,3.132373072381882118e-01,3.462479895999485691e-01,3.421883485660967561e-01,6.262180569679969278e-01,2.483049716330763546e-01,2.896125897456474640e-01,3.130639915304364029e-01,3.603263077700779027e-01,1.161228340704642814e-01,4.951422090051941227e-01,5.939392287737175202e-01 4.039456635170143861e-01,5.176489546874343262e-01,6.792690958633013087e-01,5.895486598390766719e-01,8.467228200114418346e-01,5.310638428784866250e-01,5.488301219725905122e-01,1.263870612002495175e+00,3.555434049210253189e-01,8.788683417928322417e-01,8.123858974263451493e-01,7.357475027678473944e-01,8.479815776774547453e-01,3.287810257594642183e-01,4.765747311001282127e-01,3.595012751203302548e-01,5.216642260308997914e-01,2.574900849029022609e-01,6.448484722778824452e-01,7.686516817055363271e-01 3.940846782559529471e-01,4.147717414961812343e-01,5.314428096115817457e-01,2.745963014161701965e-01,6.611575129806924744e-01,1.995841559432231793e-01,4.627733071586718205e-01,9.432515422337117705e-01,2.729784988062347595e-01,8.394228484686327985e-01,5.583141288527634361e-01,4.237136582360849779e-01,6.359725511002094489e-01,1.245788131178181823e-01,3.907073643697869247e-01,2.273802394140331529e-01,4.240316145344125709e-01,3.018514857677208063e-01,2.675618098487977381e-01,4.053152472613138491e-01 6.181385940128862888e-01,7.753320651824442411e-01,9.493655495231986263e-01,5.533211270559912354e-01,9.819006196623043525e-01,3.896925857346590361e-01,7.305044367182026432e-01,1.572348101912398377e+00,4.118710806852865502e-01,1.382958817823869868e+00,1.065249233800557027e+00,8.787280785934115102e-01,9.413207627638698893e-01,2.665065130425555151e-01,6.045051118209724406e-01,3.284817436254397482e-01,6.242767966384996647e-01,4.288307785111500348e-01,5.341110254630515586e-01,7.002714360078681199e-01 2.929236407853995083e-01,5.375045292961507792e-01,5.849436198864613745e-01,3.276623462636701345e-01,2.917804957829630008e-01,1.628580143777714484e-01,3.192001003892946942e-01,7.647813248733912328e-01,1.318063667665049510e-01,7.601259858819182647e-01,7.165563717624559015e-01,6.415369614491145400e-01,2.580284780706327830e-01,1.537451077116215314e-01,2.365621509923022669e-01,4.824290586088247901e-02,1.852122752468948197e-01,1.511663393741338457e-01,2.752925054650718351e-01,2.767917790417023793e-01 3.036686150026711162e-01,5.088741548959666572e-01,7.019940518817437969e-01,9.187845699751328032e-01,9.295555377881823400e-01,9.172063736459384886e-01,5.341178316295921435e-01,1.415286958013770136e+00,3.963752712126933364e-01,6.737014883809999688e-01,9.567201408953059927e-01,9.937306044691960327e-01,9.754041175353654891e-01,5.599205367827400082e-01,4.858583602521173161e-01,4.860159183826575635e-01,5.462939906370443133e-01,1.197171183316589060e-01,1.065523125996545639e+00,1.143011359415453176e+00 3.422631357449678613e-01,2.213860487484220485e-01,3.979823446324060043e-01,3.932591826509272415e-01,9.171120351530952508e-01,4.496671551666838740e-01,4.920501282725772141e-01,1.021922881227457225e+00,3.730116119067053981e-01,6.140735702178622413e-01,4.075089008512043365e-01,3.210010083282257565e-01,9.338141519905384103e-01,2.280255878839305139e-01,4.589387293709081828e-01,4.264042465506259205e-01,5.710287168605510111e-01,2.866053736369176264e-01,4.882095437920391245e-01,6.897026618730657255e-01 3.780667996848537937e-01,6.806705533316103063e-01,8.522610306419799997e-01,8.982008697364650684e-01,8.542117439745799956e-01,8.082942431138352424e-01,5.711940870538670945e-01,1.490272384107881765e+00,3.692378767891994484e-01,9.050920875655098508e-01,1.128811782008320996e+00,1.126253668216393278e+00,8.734109392420458251e-01,5.244718079861424576e-01,4.911232686296309047e-01,3.963609896800371346e-01,5.069895362553684670e-01,1.574863079920674758e-01,9.856003900484041624e-01,1.036848281384411319e+00 6.883324258806501339e-01,7.107449492784763123e-01,1.003308451906025134e+00,8.577879360163622735e-01,1.539453231544298806e+00,8.264850387549846911e-01,9.359281739938727362e-01,2.048856709250512775e+00,6.369817749657655792e-01,1.403509931769793440e+00,1.133924660048421185e+00,9.738173329176448290e-01,1.545846618401569561e+00,4.773398860971982405e-01,8.317210303940278937e-01,6.659385898396117387e-01,9.566742015114944042e-01,4.982074781111152095e-01,9.700659879672112185e-01,1.251533822910914751e+00 2.193289792584471876e-01,1.444168029996975577e-01,2.818065587621303436e-01,3.633923749930468849e-01,6.918207240471885289e-01,4.249170950590445139e-01,3.510302821331614531e-01,7.711702625950600520e-01,2.827324571482761617e-01,3.794669403393211571e-01,3.149359926256778408e-01,2.816037458866646470e-01,7.164755829946196641e-01,2.219594975519232627e-01,3.351744749340607177e-01,3.465339819716781733e-01,4.239023072800830461e-01,1.733166634953264840e-01,4.584510381735480977e-01,5.963850938307371230e-01 3.809063081518008209e-01,5.465817697684511867e-01,7.721669308951958355e-01,9.586052775992502939e-01,1.106283291516237721e+00,9.653566779438295109e-01,6.326368609364459994e-01,1.598030251157036119e+00,4.672374614612428978e-01,8.109833578557517075e-01,1.013156428293742195e+00,1.020910772735745153e+00,1.151273032905090199e+00,5.791976528722865281e-01,5.762718957159478350e-01,5.599879712525445141e-01,6.592452753751245620e-01,1.919744367288593878e-01,1.115927899774691978e+00,1.240457852259422511e+00 4.097349117171018573e-01,5.057586327159691031e-01,7.362059580237217959e-01,8.695357873373452673e-01,1.148209772988772137e+00,8.875464877592552648e-01,6.510232800554107691e-01,1.572878003935865632e+00,4.806516459123344465e-01,8.401188797848546663e-01,9.279061656787667145e-01,9.039974376823688429e-01,1.186938454467031834e+00,5.209607410316934306e-01,5.947537097079912405e-01,5.660933502038212328e-01,6.923799436450011058e-01,2.429177052172657447e-01,1.018626457793137208e+00,1.178166114246190999e+00 mlpack-2.2.5/src/mlpack/tests/data/completion_indices.csv000066400000000000000000000022601315013601400234560ustar00rootroot0000000000000017,1,0,0,11,9,7,15,6,4,17,5,9,8,4,17,13,19,4,16,7,11,9,14,4,6,12,0,13,16,18,18,19,6,15,0,4,8,10,19,12,5,3,0,4,9,0,16,9,16,18,18,9,6,18,0,5,3,7,14,4,12,13,2,14,17,17,9,4,9,15,13,17,12,16,14,3,9,19,11,4,6,16,11,19,8,7,0,19,8,0,2,12,5,11,16,19,15,3,2,14,7,0,15,0,12,6,13,15,15,10,13,5,15,5,13,4,17,9,6,18,10,16,15,7,7,14,1,4,10,2,4,6,6,4,4,15,18,13,10,3,5,10,4,1,5,5,18,10,10,3,19,10,2,12,10,2,17,8,13,3,15,11,5,2,1,18,15,13,2,17,6,0,0,7,5,17,14,1,6,12,11,16,8,16,5,18,1,12,13,1,12,11,2,0,12,19,10,6,2,3,14,11,13,8,10,2,0,5,15,3,0,2,10,17,5,10,0,18,5,7,12,0,17,16,3,16,9,10,12,7,14,16,7,11,1,18,15,9,8 1,11,1,6,6,4,19,9,6,5,5,12,11,2,3,15,1,1,19,10,3,19,0,15,11,12,4,7,8,6,15,12,14,5,6,18,15,13,3,5,9,0,0,19,4,10,4,15,6,16,5,0,5,11,17,16,10,13,9,4,18,17,0,17,7,17,6,3,12,9,19,12,7,12,3,9,8,17,11,10,6,2,9,13,19,9,1,10,6,15,8,9,2,14,7,13,2,0,2,1,18,18,3,17,5,11,18,6,16,3,17,9,3,2,5,18,8,0,15,16,8,11,2,12,5,6,3,16,2,12,4,14,17,9,9,1,4,11,10,16,7,1,19,0,9,13,7,14,7,9,10,12,8,8,16,6,12,11,6,3,1,15,4,8,7,6,7,11,13,3,16,15,15,13,0,19,18,14,17,14,13,14,12,3,11,4,13,19,15,15,5,0,18,15,11,6,10,15,13,13,11,5,1,19,11,1,19,12,11,18,6,14,10,13,10,17,5,17,9,18,14,5,0,9,7,3,18,2,4,14,12,10,17,11,9,3,19,14,14,8 mlpack-2.2.5/src/mlpack/tests/data/data_3d_ind.txt000066400000000000000000001375501315013601400217770ustar00rootroot00000000000000 -3.5457222e-01 -1.8368365e-01 2.6444858e-01 8.1483334e-03 -4.9224169e-01 2.0274160e-01 -1.9478140e-01 4.0490080e-01 -7.1873382e-02 -2.1419699e-01 -1.0326522e-01 4.6347464e-01 -8.3140464e-02 3.9471698e-01 5.3785385e-02 1.7803811e-01 2.7633228e-01 -2.5867528e-01 -2.2088398e-01 3.3914680e-02 1.8966136e-01 -2.9169293e-01 -4.9499891e-01 3.8154905e-01 -8.3305058e-02 2.6238128e-02 -4.1686637e-01 -4.5399649e-01 1.0128746e-01 2.9710163e-01 1.4342777e-01 8.6304189e-02 -2.6373543e-02 -2.0394141e-01 -1.1350285e-01 8.3763598e-03 2.4490867e-01 -2.1521682e-01 3.5781714e-01 -3.1353218e-01 -4.9251107e-01 3.1551862e-01 -4.2797741e-01 3.6032520e-01 2.8988399e-01 -3.3291299e-01 -3.2419361e-01 -3.4471148e-02 1.8286010e-01 1.3997382e-01 3.1803832e-01 -3.0074041e-01 2.0462857e-01 -2.0060991e-01 4.3254212e-01 4.9745923e-01 1.9382723e-01 3.7246205e-01 2.6867944e-01 -3.4141118e-01 -2.9420523e-01 1.0676140e-01 -3.5765233e-02 -1.1596380e-01 -1.3496316e-01 9.2431041e-03 4.3294651e-01 -2.2631908e-01 3.5920526e-01 2.0596580e-01 2.3457422e-01 2.9363836e-01 -4.9142660e-02 2.0536182e-01 2.8751580e-01 -2.3402742e-01 6.5653446e-02 1.8254875e-01 -1.7829941e-01 4.9546985e-01 2.0166250e-01 3.9629509e-01 -2.2330340e-01 4.3373936e-02 -2.0020896e-01 -3.6543822e-01 2.5292636e-02 9.6288746e-02 1.9266791e-01 -5.9572221e-02 -7.2832892e-02 3.9557298e-01 -4.3553481e-01 4.7083791e-01 -3.8670788e-01 -1.7363884e-02 -1.2338936e-01 1.9599961e-01 1.9869879e-01 6.2462502e-03 -3.5363348e-01 -2.8900453e-01 -1.8628929e-01 -4.2723502e-01 1.7009534e-01 2.3335150e-02 1.6404232e-01 -3.9052168e-01 -3.2316148e-01 1.8671055e-01 4.0671240e-01 -4.9294956e-01 4.9857617e-01 3.2741427e-01 1.5630246e-01 3.4325714e-02 2.9466008e-01 -2.7607171e-01 3.6718161e-01 3.5391198e-01 -2.0745251e-01 -3.2884976e-01 4.5859376e-01 -4.6373162e-02 4.8215782e-01 2.5469714e-01 1.4676791e-01 -3.2890651e-01 -4.5154318e-02 7.9398907e-02 -3.3397302e-01 4.0020586e-01 -2.7164801e-01 -3.8410507e-01 -2.0455794e-01 2.2129677e-01 -1.5755890e-02 2.0384708e-02 1.8837942e-01 3.4356486e-01 1.8469908e-01 2.2790706e-01 -4.9928631e-01 -3.8470570e-01 6.3390031e-02 2.6674631e-01 -3.0119208e-01 2.8966640e-01 -3.0928301e-01 -2.2118021e-01 4.2544895e-01 4.9953828e-01 4.4129066e-01 4.7122618e-01 1.6062376e-01 3.5805344e-01 -2.2762941e-01 -2.9299308e-01 -2.5559109e-01 -3.2260091e-02 -2.3699748e-01 -3.7005500e-01 1.1232967e-01 2.0885498e-01 5.1614948e-02 1.6896402e-01 -3.8420200e-01 4.0309906e-03 -3.2469854e-01 -4.6120496e-01 -4.3526766e-01 4.0218274e-01 -2.6554446e-01 -1.1079840e-01 -3.3709688e-01 4.3932881e-01 2.1471414e-01 -1.5106882e-01 -3.3903183e-01 1.6697077e-01 3.1196919e-01 -7.5627399e-02 3.3631471e-01 1.6965630e-01 6.3765804e-03 -2.7423610e-01 -3.2399497e-01 -4.8599495e-01 -2.4564893e-01 5.2615138e-02 3.2559753e-01 3.9796692e-01 -3.0467961e-02 4.5926827e-01 4.3835877e-01 -4.4195794e-01 -2.0701739e-02 8.1282231e-02 -1.6063215e-01 -4.0215463e-01 -3.6706871e-02 3.6981440e-01 2.4158479e-01 9.3934302e-02 -3.1418536e-01 2.6313715e-01 1.9503317e-01 -3.6800478e-01 -1.9766726e-01 -2.4612221e-01 4.7266493e-01 1.4349771e-02 -4.0797124e-01 3.6120489e-01 -2.4536345e-01 2.2555157e-01 2.0212506e-01 -4.3363739e-02 4.4355033e-01 -2.6380844e-01 3.8530467e-01 -7.6044453e-02 -2.9516693e-01 8.5408067e-02 2.1259174e-01 -4.1181681e-01 -1.7109877e-01 4.2219674e-01 4.0149754e-01 1.3994949e-01 1.8317757e-01 3.2594698e-01 -3.2640931e-01 -7.2446103e-02 1.3209291e-01 -4.8279310e-01 3.5910003e-01 1.1468095e-01 3.0407057e-01 2.7160459e-01 4.2406695e-01 2.4937396e-01 1.8960219e-01 -2.2580005e-01 1.6854763e-02 4.7743611e-01 2.8457251e-01 2.6253134e-02 -4.0098367e-01 4.2790750e-01 -4.7888893e-01 -1.0433223e-01 4.3965235e-01 -4.9191031e-01 4.1949411e-01 4.9886860e-01 1.2182564e-01 2.8791781e-01 -4.3276636e-01 -3.3887581e-01 1.9678327e-01 -4.1947175e-01 -3.0648195e-01 -2.1782593e-01 2.6466940e-01 -8.9463389e-02 -1.8850951e-01 -3.1618337e-02 4.4244616e-01 -2.9238671e-01 2.1465232e-01 -2.3899749e-01 1.1442891e-01 4.5299924e-01 -2.5550795e-01 -2.2172901e-01 -4.5928837e-01 -5.7607773e-02 1.7467723e-01 -4.7496020e-01 1.0161438e-01 -4.8004066e-01 -1.2249558e-01 3.0593447e-02 2.9245674e-01 -4.8990684e-01 -2.8503104e-01 -3.7697158e-02 -4.8832690e-01 4.0278583e-01 1.8841569e-01 4.2498449e-01 2.7828787e-01 1.7103432e-01 2.5221519e-01 -1.5912013e-02 1.9738757e-01 4.4495660e-01 -1.6101416e-01 -5.6218167e-02 -3.7337858e-01 -2.6140910e-01 1.1180885e-01 -4.4513281e-01 -1.0347606e-01 2.2583280e-01 2.6623277e-01 1.2020560e-01 -8.5588256e-02 4.6992992e-01 -4.2359653e-01 -2.7397916e-01 3.2341996e-01 -1.1986305e-01 -4.1110509e-01 -2.1817003e-01 4.0589787e-01 -1.4380345e-01 2.6701217e-01 2.5137944e-01 4.1273915e-01 1.9858918e-01 2.2528013e-01 1.1359428e-01 -2.2593932e-02 -1.7657282e-01 -3.9384662e-01 1.5806371e-01 2.1499108e-01 -3.7852931e-01 -2.9332420e-01 -3.7102443e-01 -2.9173453e-01 2.0023035e-01 -4.4976735e-01 -2.7464262e-01 1.0749127e-01 -4.9953718e-01 -5.4056643e-02 -2.5679749e-01 1.0158294e-01 -1.6995282e-01 -1.0936397e-01 -3.7855992e-01 3.5294366e-01 4.0308861e-02 2.2329025e-01 -4.7340930e-01 4.9513738e-01 -1.6314175e-01 4.4339228e-01 4.2409741e-01 3.6857918e-01 -3.8607976e-01 -1.9085327e-01 3.9017120e-01 -2.2332937e-01 -6.2193456e-02 -2.6058773e-02 3.9811731e-01 4.6105726e-01 -1.7588324e-01 3.6450144e-01 3.7694248e-01 -4.8692791e-01 1.5156167e-01 2.7962612e-01 4.1175198e-01 2.1571806e-01 2.4089491e-01 1.2099680e-01 5.2204174e-02 2.0593243e-01 1.2738791e-02 -3.7496523e-01 1.0951687e-01 -3.7629777e-01 1.4003613e-01 3.0267791e-01 -3.9836167e-01 3.1277567e-01 -1.4153989e-01 -4.7779346e-02 -3.7994521e-01 -3.9659461e-01 2.4274958e-01 7.6396069e-02 4.9682852e-01 1.8270081e-01 1.7065234e-01 4.8390578e-01 2.7685536e-01 -1.0683515e-01 3.6444893e-01 6.2741852e-02 -4.9332075e-01 1.3375407e-01 1.0156135e-01 3.0172302e-01 -3.4692725e-01 3.4056822e-01 2.7381212e-01 2.7974113e-01 4.5846494e-01 -2.8440573e-01 1.5505863e-01 -2.4123748e-01 -1.2344417e-01 3.7761252e-01 4.8250598e-01 6.9084304e-03 3.5330316e-01 4.6197618e-01 -4.5379370e-01 7.8614363e-02 2.7737665e-01 5.1982125e-02 2.7351696e-01 4.2306238e-01 -2.3965303e-01 -4.8730940e-01 3.5856102e-01 -3.7303341e-01 3.2744654e-01 -4.8451212e-01 2.2513773e-01 1.4203775e-02 2.4176764e-01 -4.7820885e-01 4.9748710e-01 -2.1010672e-01 2.1285994e-01 -1.2952238e-01 -6.6915087e-02 -3.8124258e-01 -3.5832476e-01 -1.2230792e-01 4.0997395e-01 1.8356336e-01 -1.0386790e-01 1.7890070e-01 6.3628042e-02 4.0105283e-01 -2.0081847e-01 2.3967555e-01 2.7655998e-01 3.0473662e-02 -4.9628838e-02 4.7675840e-01 -2.9576911e-01 3.7385592e-01 -4.1175286e-01 -8.9423939e-02 -1.7484569e-01 -3.3002889e-01 4.4514221e-01 4.1752280e-01 -4.0377364e-01 -3.0000622e-01 4.2086722e-02 2.7957478e-01 1.6873521e-01 2.4332485e-01 1.5712409e-01 2.3043465e-02 4.6368171e-01 4.7912073e-01 3.5931521e-01 -1.9293298e-01 -2.0539334e-01 -2.5268383e-01 3.5672201e-01 -2.0654778e-02 3.2336298e-01 -3.6829408e-02 -5.6115608e-04 9.8466908e-02 4.0438378e-01 -8.7789841e-02 -1.7891274e-01 -2.2882215e-01 2.3649252e-02 4.9489508e-02 -4.4440089e-01 2.6201183e-01 -1.7024116e-01 -6.4620157e-02 -2.6751256e-01 -7.2596688e-02 3.8176403e-01 3.1043273e-01 8.5328047e-02 -9.3736328e-02 2.4102000e-01 -1.9183437e-01 3.5841668e-01 3.8237926e-01 -9.1808948e-02 3.7676347e-01 -2.8640564e-01 -3.1184164e-02 3.4632949e-01 3.9479369e-01 -2.0671061e-01 1.3739081e-01 -4.7173817e-03 2.3339206e-02 -1.7748807e-01 -2.5368412e-01 -8.0974061e-03 4.0430307e-01 3.7954562e-01 2.7835746e-01 -3.9000668e-02 4.3605671e-01 7.8990616e-02 -2.9757198e-01 -4.8954633e-01 2.7073463e-01 -9.5306349e-02 1.6142108e-01 2.0970042e-01 2.8166790e-01 -1.8162144e-01 4.4805349e-02 1.7656952e-01 -2.3243419e-01 4.5008577e-01 2.5101546e-01 -3.4351137e-01 3.8599682e-01 3.9908196e-01 3.9134520e-01 1.5902158e-01 3.7846376e-01 -1.4019804e-01 -3.6946442e-01 -1.6968978e-01 -8.5624200e-02 -4.6913901e-01 1.2342532e-03 -9.3600802e-03 3.7009528e-01 -4.6340748e-01 1.0667274e-01 4.4806786e-01 -4.2690262e-02 4.5096147e-01 4.6267181e-01 5.5515802e-02 7.9927958e-02 -5.1482671e-02 2.7868807e-01 1.8072170e-02 -4.8281893e-01 2.7615049e-01 -3.8187467e-01 -2.5941484e-01 -2.4630298e-01 -4.2690248e-01 4.1265762e-02 -2.5475719e-01 -2.7623766e-01 -2.6350651e-01 1.3906793e-01 4.1727979e-01 1.0532731e-01 -6.2315969e-02 -2.4152237e-01 3.1303963e-01 1.5779795e-01 4.4278098e-01 -4.0404852e-01 3.4557993e-01 -1.6903326e-01 -4.3051652e-01 3.9668526e-01 4.3069789e-01 3.4165305e-01 -1.7487582e-01 2.5779872e-01 -3.2598081e-01 2.9264295e-01 1.3289293e-01 -2.2789108e-01 -4.8346364e-01 3.1470332e-01 -1.2745460e-01 -3.0749428e-01 -2.5285234e-01 -4.3497576e-01 -2.6847806e-01 -3.4857983e-01 -1.1663483e-01 3.7543719e-01 -4.1867308e-01 6.2639510e-02 2.2112326e-01 2.6878989e-02 -8.1419678e-02 -5.6691341e-02 -3.2965000e-02 3.8357724e-01 -3.3100658e-01 4.9930162e-02 4.4041937e-01 4.6256508e-01 2.8028555e-01 -3.8958710e-01 -1.7227499e-02 3.0201107e-01 3.4229509e-02 -3.6514930e-01 -1.5101897e-01 4.3936177e-01 -4.5384643e-01 -1.1303131e-01 3.4495202e-01 -2.4634910e-01 -2.9527078e-01 -4.4067419e-01 3.5536309e-01 1.8923039e-02 -1.1365275e-01 -3.6434271e-01 3.7175347e-01 -3.0675357e-01 1.8478748e-02 4.9107977e-01 -6.0078659e-02 1.9215153e-01 -4.6026564e-01 -4.2670626e-01 6.0781870e-02 4.6462929e-01 -1.8859679e-01 3.0139234e-01 -3.4826002e-01 -4.9373687e-01 2.4349738e-01 -6.9839143e-03 2.4690772e-01 -2.7706674e-01 2.1508386e-01 -4.5447645e-01 -4.3058260e-01 -2.2328442e-01 -3.0534000e-01 2.6632704e-01 7.0857311e-04 -1.9911233e-01 -2.6082192e-01 -3.0981385e-02 -1.0076396e-01 4.1621662e-01 4.7313423e-01 6.8880899e-02 -4.3437424e-02 4.7484889e-01 5.4959135e-02 8.9028529e-02 3.4103860e-01 1.5889690e-01 -3.1687912e-03 -1.1680307e-01 7.0037466e-02 -9.4277447e-02 6.3421458e-02 -1.9552330e-01 -1.3160562e-01 -5.8865241e-02 -2.4390024e-01 -3.6066156e-01 3.2366845e-01 -4.2627045e-01 -1.0423887e-02 -2.9058225e-01 1.9359914e-01 -4.3835108e-01 -1.6368150e-01 2.7197746e-02 -4.0719053e-01 2.1152911e-01 -4.6129791e-01 2.4644010e-01 -3.6411503e-01 4.5595173e-01 4.3888976e-02 -5.8952602e-03 1.6904407e-01 2.0632542e-01 1.0646420e-01 -3.7281699e-01 -2.1619606e-01 1.0181470e-01 4.3641581e-01 3.8775847e-01 4.8457183e-01 6.0351437e-02 -4.3888155e-03 4.3533435e-01 2.4014525e-01 4.2825276e-01 3.6712129e-02 4.4061165e-01 3.3564674e-01 -2.5929235e-01 3.6576189e-01 4.1781032e-01 -2.3867617e-01 -4.5638485e-03 4.8720388e-01 3.1823205e-01 -3.2016399e-01 2.4542236e-01 -2.0248399e-01 -2.3382978e-01 -3.9646808e-01 3.3506953e-01 3.4711193e-01 -3.5871839e-01 -4.6799545e-01 2.2032127e-01 1.3750514e-01 -3.9323635e-01 -1.6669009e-01 -1.5425782e-01 2.1467607e-01 3.3774601e-01 5.1053989e-02 1.5664985e-01 -1.5851741e-01 4.2738426e-01 4.4753667e-01 -8.1482246e-02 -2.5426532e-01 -4.0368248e-01 -5.9875329e-02 -1.1757071e-02 -2.2853012e-01 4.9162980e-01 2.2421305e-01 -2.4855016e-01 4.7377227e-02 4.4624572e-01 3.9121093e-01 7.8664038e-02 -4.1086306e-01 3.0773822e-01 -3.3414821e-01 -1.3609092e-01 3.6659305e-01 -1.0558485e-01 -2.8785681e-02 3.1835433e-01 4.6269793e-01 4.5068949e-01 4.2310519e-02 3.6511344e-01 -1.0006495e-01 4.6507553e-01 1.3341872e-01 -4.3617494e-01 -3.3014023e-01 1.0256813e-01 -2.6279105e-01 4.5759796e-01 -3.9910617e-01 -4.7509347e-01 -2.3357792e-01 9.8376587e-03 -1.6426840e-01 2.8846436e-01 -9.2697229e-02 4.8185911e-01 -4.7715488e-01 2.3928514e-01 4.0144786e-01 -3.4585781e-01 -1.3219905e-01 -2.1683047e-01 -2.9090802e-01 2.2714385e-01 -1.8468279e-01 -1.7156457e-01 3.9086333e-02 -2.7777144e-01 1.5335098e-01 2.4463390e-02 1.4521320e-01 -4.9615426e-01 -1.7645854e-01 2.5471290e-01 3.6158721e-01 2.7216241e-01 3.6976590e-01 -2.2350467e-01 3.1672199e-01 -1.0200203e-01 -2.6444863e-02 -1.5162862e-01 -4.0436514e-01 3.7262025e-01 2.1865707e-01 -3.7683487e-02 -8.1409827e-02 -5.6593778e-02 3.5329319e-01 -1.1622600e-01 -6.0020295e-02 4.1881295e-01 -2.7508942e-01 2.8943542e-02 2.4426776e-02 -1.4696200e-01 3.0334553e-01 6.0225251e-02 2.4866671e-01 1.2379133e-01 -1.1637729e-01 -2.7011942e-01 -2.5364145e-01 4.2303680e-01 2.2120006e-01 6.7158818e-02 -1.6879295e-01 -1.5217468e-02 1.1087580e-01 -5.8322366e-02 -9.0602588e-02 1.4335626e-01 4.7034604e-01 2.6517607e-01 2.4319873e-01 3.7031875e-01 -6.6863383e-02 4.0089402e-01 -1.9565577e-01 1.3719769e-01 3.0815926e-01 -3.0742800e-01 2.7167490e-01 -1.7950126e-01 -4.1511056e-01 -2.9647912e-01 -4.8971434e-01 -9.6702882e-02 4.8278833e-01 1.6710105e-01 4.6547962e-01 -2.3494724e-01 -1.0809252e-02 -2.8419047e-01 -2.5106446e-01 3.0305860e-01 -2.3838394e-01 4.7354592e-01 -2.4047416e-01 3.9850726e-01 3.1694311e-02 -2.6835980e-01 4.1304597e-01 3.8421723e-01 -3.6199918e-01 -5.4174155e-02 9.0217131e-02 6.6312010e-02 -3.8436532e-01 3.9675014e-01 3.6175632e-01 4.2963452e-01 2.0942311e-01 -4.8380198e-01 1.9316962e-01 4.6372478e-01 2.8065325e-01 1.2637913e-01 -3.4626939e-01 4.7753988e-01 -4.5353757e-01 2.5732861e-01 -8.8132238e-02 4.5492455e-01 5.2082482e-02 4.5954673e-01 -1.6244900e-01 2.2417030e-01 1.5036601e-01 -2.8257173e-01 -4.6366856e-01 3.7890632e-01 -3.0763422e-01 -7.6475430e-02 6.4184079e-02 1.1115082e-01 3.6518519e-02 -3.0852734e-01 -2.8912474e-01 3.2909566e-01 3.8041398e-01 -2.8525048e-01 4.0222582e-01 -3.0682918e-01 -3.3449737e-02 3.4029164e-01 4.6808514e-01 -3.4785559e-02 9.4342057e-02 3.9858991e-01 -3.8588817e-02 2.4083287e-01 4.0279114e-01 -5.1497902e-02 2.9216878e-01 -1.0364974e-01 2.5536599e-01 1.7818444e-01 -1.4854285e-02 -2.2401020e-01 -3.2029110e-01 3.3421951e-01 3.6130163e-01 -1.1444228e-01 -3.7294515e-02 -2.5246034e-01 3.0706330e-01 6.1991725e-02 6.4898499e-02 1.5972988e-02 9.9770869e-04 -3.5967776e-01 -3.4866654e-01 -9.7259496e-02 3.9091094e-01 -4.3868600e-01 3.7930650e-01 -4.9200379e-02 3.7580867e-01 -2.5610437e-01 4.8456998e-01 -3.9401764e-01 1.9642462e-01 1.3439158e-01 -7.9459062e-02 1.7195620e-01 -1.6774580e-02 9.8778539e-02 2.9621704e-01 8.2416383e-02 -1.6795541e-01 -3.2873819e-01 8.9969417e-02 2.7425856e-02 1.0740302e-01 -3.7988996e-01 -2.9824078e-01 -2.9854812e-01 -4.5095292e-01 -2.1648542e-01 4.2888071e-01 3.0332475e-01 3.0263044e-01 4.8144516e-01 -3.4886632e-01 1.3082193e-01 -3.9660160e-01 -1.5318220e-01 -4.0134428e-01 4.2350932e-01 -2.5727375e-01 -2.6158959e-01 -4.1614606e-01 2.4922133e-01 1.4538368e-01 -1.6239172e-01 -2.1373198e-01 3.6324308e-02 -2.0263360e-01 -3.6469079e-01 -3.5680454e-02 4.7626994e-01 8.4747918e-03 -2.9197226e-01 -4.2192679e-01 -1.1155518e-01 -4.0184060e-03 1.7544703e-01 3.7048562e-01 -2.9419413e-01 4.6956785e-01 2.8081168e-01 4.2086691e-01 -4.5295232e-01 1.4136469e-01 -3.7470491e-01 4.9163138e-01 1.0415909e-02 3.3672619e-01 -4.8653255e-01 -4.6508343e-01 3.9347324e-01 -1.2160471e-01 4.5167407e-01 3.8330280e-01 3.0935045e-01 -1.3657915e-01 -2.7297165e-01 -3.6985499e-01 -4.6162375e-01 -2.0976314e-01 1.0428657e-01 2.4048246e-01 1.4334443e-03 1.9718921e-01 -2.2874695e-01 -1.7793343e-01 -4.8338689e-01 1.2663556e-01 -3.6612146e-01 3.4816678e-01 -2.6316477e-01 -3.3047022e-01 -4.3979273e-01 5.1261767e-02 -2.9044191e-01 4.3738003e-01 -3.5658507e-01 3.4493368e-01 1.1745161e-01 3.4461391e-02 4.4107938e-01 -2.1602624e-01 2.4080718e-01 2.8642844e-01 -1.7540340e-01 2.1515765e-01 2.8244810e-01 3.2930141e-01 2.2987255e-01 -4.1052006e-01 -2.4564400e-02 2.4250852e-01 3.4680905e-01 -1.9145248e-01 9.5207309e-02 8.9632350e-04 2.5554703e-01 9.0771928e-02 2.9613961e-01 1.3627240e-01 7.7903862e-02 -1.9905782e-01 -1.4212629e-01 -3.3382621e-01 1.8114753e-01 -4.8384148e-01 9.1441564e-02 1.5990443e-01 4.6451939e-01 -4.5425553e-01 -2.3589767e-01 -3.3913672e-01 4.9229611e-01 1.0830177e-02 -3.8507571e-01 4.2263112e-01 -2.7415042e-01 8.3768369e-02 1.3714931e-01 2.2043022e-01 3.4872954e-01 3.7427028e-01 -6.8427777e-02 3.2604673e-01 -2.9445915e-02 3.2748044e-01 2.1552822e-01 1.0106346e-01 -4.9576686e-01 2.9912718e-01 -1.3785386e-01 -4.9413394e-01 2.6144517e-02 -3.3732005e-01 2.2837342e-01 1.7152119e-01 3.0480647e-01 1.8433071e-01 1.7794706e-01 4.0623466e-01 -4.4397500e-02 1.4104130e-01 -4.4768204e-01 2.0040001e-01 -3.4472758e-01 4.6278692e-01 -4.4248079e-01 -2.3293019e-01 4.0002501e-02 4.5350135e-01 1.4670780e-01 -2.4133918e-01 1.3194589e-01 -4.9423000e-01 3.4293933e-01 -3.0717223e-01 4.7932509e-01 -1.2283439e-01 -1.5521682e-01 -2.4059905e-01 8.3159855e-02 2.4126463e-01 4.8167695e-01 2.9397932e-01 -4.2946327e-01 1.8019375e-01 1.8030896e-01 -1.5207591e-01 -1.8000241e-01 2.6334140e-01 3.7606211e-01 2.9041919e-02 9.5984733e-02 -2.0376828e-02 -3.6959834e-01 -7.0041506e-02 2.0274396e-01 -3.5170591e-01 4.2043952e-03 -2.4927280e-01 1.1890030e-01 -1.7411147e-01 -9.3625824e-02 3.4195428e-01 -1.2136440e-01 -2.2648241e-02 4.5838010e-01 -1.4074605e-01 2.8763530e-01 2.2428882e-01 3.7323314e-01 9.5166013e-02 1.0339846e-01 -2.0564386e-01 2.9044090e-01 1.4190671e-01 2.1774846e-01 -3.7962732e-01 -2.5860528e-02 2.1088930e-01 2.6415948e-01 -4.8926063e-01 4.0570937e-01 -4.9085433e-01 -1.1722659e-01 -4.1538144e-02 4.3414721e-01 -1.3997071e-01 5.0209774e-02 4.0533577e-01 2.8542825e-02 -4.9430630e-01 7.8705689e-02 -9.8206436e-02 6.8962117e-02 4.6518308e-03 2.4591235e-02 -3.0609327e-01 -4.4987721e-01 -3.9678914e-01 -3.8160121e-01 4.6038582e-01 -3.5934842e-02 2.8367543e-01 -8.0190186e-02 4.9750283e-01 -1.5664442e-01 -4.0464592e-02 -4.3018259e-01 1.6857149e-01 -4.4045006e-01 -2.5795115e-01 -2.9754884e-01 8.5022994e-02 -3.8303124e-01 -1.0479131e-03 6.2896742e-02 4.6590802e-01 -3.4357075e-01 7.5013830e-02 -2.2554724e-01 -3.6325033e-01 3.0935054e-01 -4.4927381e-01 -1.6268013e-01 3.3321666e-01 -1.1960333e-01 9.2062977e-03 4.6749414e-01 3.7499118e-01 -3.0150916e-01 1.1615825e-02 -4.2265410e-01 -3.3403431e-01 -2.3755449e-01 3.4843495e-01 -3.0198283e-01 -1.0357103e-01 -3.8774957e-01 2.8321132e-01 3.9995509e-02 4.7741680e-01 1.8590749e-01 -1.3505646e-01 -4.0383738e-02 1.5599577e-01 -4.1198127e-01 -9.4123436e-02 -4.5552201e-02 1.3614869e-01 -4.7458079e-02 1.7938427e-01 3.7490424e-01 -1.0756748e-01 1.1398009e-03 3.6446192e-01 1.5496945e-01 -4.9758711e-02 -3.5930159e-01 -3.5347924e-01 3.2577355e-01 -4.5942782e-01 -3.2421748e-01 4.3254787e-01 2.8411380e-01 5.4014580e-02 -2.0568773e-01 1.5338013e-01 -1.5383006e-01 4.6451764e-01 -2.3345305e-01 -3.6037900e-01 -3.6456274e-01 -3.4808948e-01 7.0752466e-02 -2.6531712e-01 -7.4547456e-03 4.2467871e-01 1.8540301e-01 -6.4496563e-02 -3.1217404e-01 -3.0388332e-01 -8.0118508e-02 4.0101370e-01 2.6193517e-01 -3.1434577e-01 -2.9933009e-01 2.1681432e-01 -3.9896626e-01 -4.8948545e-01 -2.6570350e-01 -5.9762890e-02 2.1718853e-01 -2.2534013e-01 -3.2039976e-01 2.0588425e-01 1.0213592e-01 8.6218398e-02 3.9986142e-01 2.0381199e-01 2.4971308e-02 9.9653743e-02 1.4992875e-01 3.8675424e-01 -4.7486515e-01 -4.2822259e-01 -3.6944902e-01 4.4662214e-01 -4.1848012e-01 -2.9721998e-01 -1.6139054e-01 3.1420461e-01 -2.5525980e-01 -3.1905270e-01 -1.1580951e-01 1.4146532e-01 -3.3536054e-01 2.2113954e-01 4.2390251e-01 2.5851035e-01 4.4609756e-01 2.0668596e-01 -2.9109489e-01 2.3065948e-01 -2.6299320e-01 3.8613024e-01 1.7868764e-01 -4.7368367e-01 4.4232569e-02 -4.0208442e-01 2.2488427e-01 2.5596949e-01 1.5112938e-02 2.2248716e-02 -2.6965593e-01 -3.8280275e-01 6.2700427e-02 -1.9392710e-01 4.1745072e-01 -4.6065040e-01 2.3256356e-01 -3.2837770e-01 2.1165315e-01 -2.8905210e-01 -1.1870734e-01 -2.0069596e-01 1.6513972e-01 8.1496475e-02 -1.1267184e-01 -1.0569744e-01 -2.0749561e-01 -2.9597783e-01 -3.1557400e-01 4.8200637e-01 2.3820415e-01 -3.9966706e-01 6.9683630e-02 -2.5703430e-01 -4.7689982e-01 4.8161428e-01 -3.0067281e-01 -3.0806970e-01 1.1124636e-01 2.8421030e-01 2.0584818e-01 -1.1883260e-01 -3.5153481e-01 1.2715118e-01 1.4750787e-01 1.9691171e-01 -1.9991938e-01 4.3482889e-01 -5.1066998e-04 1.3633028e-01 -1.2254773e-01 3.2088251e-01 -4.2793111e-01 -1.7388566e-01 -5.7823687e-02 3.6767876e-01 -4.6151057e-01 2.4915383e-01 2.0227966e-01 2.3682627e-02 1.8218507e-01 -8.9606102e-02 -1.1910193e-01 -2.2608066e-01 -1.7060067e-01 1.1080818e-01 3.5663008e-01 -4.4202994e-01 -4.5571919e-01 3.3574151e-01 3.4340408e-01 -4.6057197e-01 -3.6100430e-01 2.6155458e-01 -3.3167664e-02 3.9730593e-01 2.3929754e-02 -2.2513541e-01 4.1416073e-01 2.5433053e-01 3.2517172e-01 3.2690231e-01 2.1199773e-01 1.1474855e-01 -4.2794603e-01 -2.3029507e-01 3.9952132e-01 -4.9699423e-01 4.0182199e-01 4.9419915e-01 -1.7804526e-01 -3.1961835e-01 -2.1257358e-01 -1.7891747e-01 1.0223942e-02 3.6923520e-01 3.4059565e-01 -4.4971092e-01 -1.7783824e-01 -1.0093914e-01 9.0243420e-02 -3.7459628e-01 -3.3788717e-01 5.2793097e-02 1.8878927e-01 3.9830647e-01 1.0226196e-01 2.7837171e-02 -1.7166725e-02 3.3707889e-01 -2.9188267e-01 -3.1479805e-02 -3.2324027e-01 -1.0482476e-01 -4.5133373e-01 2.8109290e-01 6.1332930e-02 -4.0120660e-01 -4.1647945e-01 1.9774764e-01 -3.4580247e-01 -1.2999202e-01 -3.6883318e-01 -3.0685125e-01 2.0773356e-01 -3.0132184e-01 2.2749903e-01 -1.7093037e-01 -2.9438535e-01 -9.1772950e-02 2.4076781e-01 1.6184223e-02 6.5402611e-02 -2.0314061e-01 4.0195151e-01 1.4964686e-01 -2.1392883e-01 4.3848168e-01 -3.1062163e-01 -2.4190582e-01 -4.1882069e-01 2.8719290e-01 -2.4964856e-01 -7.8134926e-02 -2.5787370e-01 3.7353598e-01 -2.9786712e-02 -7.6831093e-02 7.2593372e-02 -2.7392237e-01 2.2705721e-01 4.1113465e-01 2.8356095e-01 -3.1542785e-01 1.3337164e-01 2.6612258e-01 -3.0291000e-01 -4.5321184e-01 -4.3533829e-02 -4.2641539e-01 -1.6885954e-01 -2.3513281e-01 -9.8981946e-02 3.2530202e-01 -7.7547765e-03 4.8063874e-01 -3.2120876e-01 4.6105245e-01 -4.8438809e-01 1.6473393e-01 -4.5823551e-02 -2.0104665e-01 -1.1983006e-03 7.8481930e-02 3.2184629e-01 -6.7244977e-02 -9.4433238e-02 4.5593580e-01 6.9045814e-02 3.3957272e-01 4.2951798e-01 -2.2941670e-01 2.6735160e-01 1.6738378e-01 3.3731982e-01 1.0258504e-01 4.0644580e-01 3.6626244e-01 5.0950235e-02 -3.7788831e-01 9.6678993e-02 2.7747286e-01 8.2553881e-02 -3.5901355e-01 -4.2373232e-01 -2.4410567e-01 3.6089501e-01 4.6210399e-01 -4.0013675e-02 -1.3532048e-01 -2.5737448e-01 -1.3909146e-01 -1.0468341e-01 -1.1026712e-01 -3.4445104e-01 -2.3966569e-01 3.1940475e-01 3.6917738e-01 -2.2112868e-01 4.9068977e-01 -3.9393338e-03 -1.3660091e-01 3.9573834e-01 -3.7578285e-01 4.3120324e-01 6.5427433e-02 3.9281896e-01 -1.5704150e-01 -3.6564865e-01 3.1867359e-01 -2.1785813e-01 -5.6167479e-02 2.7379873e-01 -3.9040524e-01 -1.1484732e-01 2.8272274e-01 4.6274738e-01 -4.6156830e-01 4.4116180e-01 3.3067688e-01 -3.4622888e-01 4.2825164e-01 2.8123026e-01 1.7719869e-01 4.0436293e-01 7.3755491e-02 1.6870769e-02 1.1604054e-01 4.5146553e-01 -1.6501756e-01 -1.1399625e-01 7.1066718e-02 -2.1278158e-01 -1.2191142e-01 4.4849838e-03 1.8001517e-02 -3.7305443e-01 -4.3645682e-01 -2.9253317e-01 4.0223439e-01 -5.6436526e-02 1.2714001e-01 4.7533111e-01 1.7022454e-01 -2.0277902e-02 -1.6366305e-01 4.7366172e-01 -3.5217544e-01 1.5453170e-01 -1.9144571e-01 -1.6063450e-01 -4.8726524e-01 2.1653865e-01 1.9133000e-01 2.6656289e-01 -4.9853817e-01 -3.6077499e-01 4.6586675e-01 2.0025122e-01 -4.6203573e-01 1.1168274e-01 -3.3997515e-01 1.3084982e-02 -7.8571482e-02 -4.6745825e-01 -1.8854047e-01 3.6601257e-01 2.8328396e-01 3.3903264e-01 -1.9193315e-01 -4.9173374e-01 2.9710017e-01 -3.7498651e-01 -2.9279113e-01 -1.1571549e-01 -2.3213505e-01 8.0692150e-02 -4.0020780e-01 2.7925274e-01 -4.0459010e-02 -2.6808147e-01 3.5561046e-02 2.2237991e-01 -3.7502666e-02 3.5909076e-01 -4.3237086e-01 -6.7108340e-04 -4.3341400e-01 -2.3666863e-01 3.7710141e-02 -2.7128300e-01 1.9780131e-01 4.6605955e-01 -1.9011277e-01 -5.1512544e-02 6.3983855e-02 2.6618297e-03 -6.9208382e-02 4.8884179e-01 2.6622460e-01 3.1211848e-01 3.2657240e-01 6.8666290e-02 -1.3886645e-01 2.4764406e-01 -3.9942814e-01 2.1230893e-01 -1.4322464e-01 1.2658971e-02 3.2328190e-01 -1.0835768e-01 4.7916933e-01 4.3650799e-01 3.2836358e-01 4.8652049e-01 6.0838217e-02 2.9672974e-01 3.5346437e-01 1.3489765e-01 -3.8008233e-01 -1.7988295e-01 4.6523397e-01 -3.5067060e-01 2.1795939e-01 -3.6977589e-01 2.6395624e-01 -2.6845652e-01 4.3675838e-01 -3.6165891e-01 -7.9159281e-02 4.1722387e-01 1.3003448e-02 4.3132348e-01 -4.0717084e-01 -4.1515396e-02 4.3008165e-01 -4.2892175e-01 -2.9351937e-01 -6.5952938e-02 4.6699567e-01 -1.9256962e-01 -3.8833927e-01 6.4844417e-02 -1.1369358e-01 3.1598120e-01 -4.9553978e-01 -6.3705723e-02 3.5581416e-01 -9.7982996e-02 -1.8236261e-01 -3.1923346e-02 -3.0135139e-01 -4.5846690e-01 -3.3755315e-01 -2.5353191e-01 -3.6844027e-01 3.0317706e-01 3.4124085e-01 -3.7071915e-03 2.8964828e-01 1.3260558e-01 -2.6048965e-01 4.9159005e-01 4.6978460e-01 3.1274592e-01 -2.9818585e-01 -3.6543594e-01 4.0445247e-01 -2.6255847e-01 -3.8449467e-01 3.4490887e-01 6.2686486e-02 1.7666737e-01 -3.6199924e-01 2.6174690e-01 -4.0567902e-01 -4.9126845e-01 3.0229336e-01 4.1831131e-01 3.2352538e-01 4.0964548e-01 -1.7583277e-01 -3.1550710e-01 -2.9448245e-01 -2.0696669e-01 2.0663808e-01 2.0407038e-01 1.8581657e-01 3.4120147e-01 4.5622684e-01 -3.8071743e-01 4.8398924e-01 -4.1426558e-01 6.4695532e-02 -2.2787086e-01 -4.4901820e-01 -4.2136398e-01 3.1254117e-01 -2.0764661e-01 1.0407506e-01 7.6918023e-02 -2.4960084e-01 4.8181944e-01 -1.9697974e-01 -4.7391653e-01 -4.2304720e-01 3.4151902e-01 4.5722378e-01 2.2766733e-01 -4.9300062e-01 -2.9296405e-01 4.1527958e-01 1.8129218e-01 2.1192880e-01 3.8613805e-01 -3.4752579e-02 4.8619640e-01 -2.3574897e-01 3.6912487e-02 2.3769508e-01 -2.2458232e-02 4.6278926e-02 1.4764170e-01 2.2973231e-01 -1.2022330e-01 -1.7590389e-01 4.3601710e-01 2.7770499e-01 -9.7007809e-02 4.1029285e-01 -2.4843106e-01 -3.2318592e-01 1.5290738e-01 2.2480254e-02 -2.6088252e-01 -4.4242525e-01 6.4927085e-02 4.1186971e-02 4.3878128e-01 -4.7467912e-01 -4.5404651e-01 -2.4220806e-01 -2.3532916e-01 4.0153201e-01 -3.7426853e-01 1.0580742e-01 2.6278818e-01 4.1443663e-02 -4.7801822e-01 1.1847851e-01 2.9861606e-03 -1.2666861e-01 4.2900771e-01 1.6959699e-01 -2.6743550e-01 -3.3972944e-01 -4.9993787e-01 1.6377096e-01 3.1053537e-01 -3.2067805e-01 3.9186256e-01 1.6494292e-01 -6.9515705e-02 4.6390349e-01 -3.0458362e-01 1.0130230e-01 3.9811515e-01 2.2392374e-01 -8.0480613e-02 -3.3927434e-01 3.9537637e-01 2.3002239e-01 -1.0133808e-03 -4.7157459e-01 4.0182575e-01 5.4325123e-02 -4.4368408e-01 -1.6013670e-01 1.1756586e-01 2.8238852e-02 -1.3000987e-01 3.4399270e-02 3.1438250e-02 9.5408669e-02 1.1941256e-01 4.0006285e-01 -9.6537627e-02 1.5253092e-01 6.7461365e-02 -7.0817461e-02 1.9174569e-01 3.3739299e-01 1.2887615e-01 -4.1783493e-02 4.7992583e-01 -2.7970926e-01 1.1223282e-01 -4.4126016e-01 -3.3138558e-01 1.8579846e-01 1.5631222e-01 3.5650433e-01 -8.5792657e-02 -2.7572589e-01 3.0109402e-01 2.6752527e-01 -2.9420010e-01 -3.4799466e-01 3.2745154e-01 3.7550893e-01 -4.7563661e-01 3.0391296e-03 -4.8349561e-01 4.7379468e-01 1.8138223e-01 4.2068290e-01 4.7314419e-02 3.1765958e-01 -3.2529538e-01 -2.3054815e-01 -4.7900698e-01 -4.9745387e-01 -4.2601011e-01 1.7610232e-01 -1.2650885e-01 -4.1067845e-01 -4.2968868e-02 3.6748834e-01 -5.3148091e-03 -5.6167729e-02 -3.6879269e-02 -3.4652847e-01 3.4532097e-01 -2.9427179e-01 4.2753119e-01 -4.3303226e-01 8.4953586e-02 -1.4256039e-01 -5.4610022e-02 -2.8482865e-01 6.1846562e-02 -1.0622421e-01 -2.7303615e-02 1.2822243e-01 -9.4330789e-02 6.4126032e-02 -4.2662904e-01 3.5327402e-01 -3.4427412e-01 6.7360423e-02 2.1329396e-01 -3.1746598e-01 -2.3443922e-01 -3.5828762e-01 4.4548743e-01 -2.0893249e-01 8.3846724e-02 -3.3447853e-02 2.8595941e-02 -2.1457335e-01 3.9376577e-01 2.0714294e-01 -2.2132135e-01 2.5272743e-02 2.5990945e-01 2.5813863e-01 4.0630916e-01 3.4974617e-01 -4.2319739e-01 4.0377021e-01 -3.9214185e-01 -3.5188035e-01 3.7071247e-01 -3.2889497e-01 4.4738924e-01 3.2828353e-01 6.3577593e-02 4.9770625e-01 3.5797507e-01 4.3894697e-01 -3.4779296e-02 7.8296794e-03 2.5432149e-01 -1.2851766e-02 -4.8870104e-01 3.8470987e-01 3.0051868e-01 -2.4535688e-01 -2.5107135e-01 3.5445599e-01 -2.5883129e-01 4.8823110e-01 -1.5728420e-01 -8.0084380e-02 -4.2622870e-01 5.6569165e-02 -3.8487639e-01 -3.0700719e-01 -2.1241240e-01 3.7267331e-01 -3.0161361e-01 -5.9059558e-02 2.0481107e-01 2.1704188e-01 3.3444897e-01 -2.8882257e-01 4.5140219e-01 -4.4787154e-01 -1.0663473e-01 1.5577911e-01 -2.8221711e-01 1.5947651e-01 -1.6745425e-01 -1.5013566e-01 6.3797433e-02 3.1571231e-01 2.1336498e-01 -3.5339968e-01 4.2344037e-01 -3.2229485e-02 2.4348307e-01 -1.6011613e-01 -4.9351011e-01 -2.7522763e-01 2.0767931e-01 -4.8106127e-01 2.0299878e-01 -2.6617858e-01 8.9589531e-02 5.8810008e-02 1.8066729e-01 -2.7732065e-01 -2.3978758e-01 -2.0620005e-01 3.5661184e-01 1.3334997e-01 -3.2729313e-01 9.7721898e-02 -1.0462425e-01 -3.3965202e-01 6.1580769e-02 -4.0403096e-02 1.5694326e-01 -3.5132633e-01 -2.6073931e-01 -2.6197004e-01 3.2654441e-01 6.5059167e-02 2.3846758e-01 3.4886392e-01 3.7661635e-01 -1.9219083e-01 -2.8440351e-02 -2.7450208e-01 -4.5826255e-01 2.5658004e-01 5.5663699e-02 4.3022856e-01 -4.5444483e-01 -3.2107000e-01 -3.0722625e-01 4.8797806e-03 -3.6377917e-01 -4.4746688e-01 -4.4137967e-01 -3.9024485e-01 -2.7829551e-01 -1.6905554e-01 3.5087869e-01 -2.8723366e-01 -3.2730072e-01 -1.7070348e-01 -3.5396944e-01 3.0290658e-01 -3.2788521e-01 -2.2026582e-02 2.8916866e-01 -2.7167256e-01 1.4331326e-02 1.2166327e-02 -2.8783907e-01 1.0693427e-01 4.8318599e-01 -1.5046581e-01 -8.9591312e-02 -1.8009494e-01 -4.1004603e-01 1.9695244e-01 4.3802935e-01 4.5640931e-01 -1.6455742e-01 -4.9490827e-01 -9.5532554e-02 -9.9582156e-02 2.8155177e-01 -9.3940607e-03 3.4378484e-01 1.1888056e-01 4.1281642e-01 -2.7638726e-01 -7.9198868e-02 2.5544351e-01 -3.0555118e-01 4.7480055e-01 -1.7635214e-01 -2.9495599e-01 -3.8557539e-01 -4.6373429e-01 -1.2283822e-01 -2.1480440e-01 3.5720617e-01 -1.7365307e-01 -3.6528757e-01 -2.5910889e-01 -3.7716885e-01 -3.0012312e-01 2.4603604e-01 6.0091556e-02 1.2376963e-02 -1.6115417e-01 -1.6742112e-01 -1.5427755e-01 -2.1967651e-01 -2.7557581e-01 7.2280467e-02 -4.3593431e-02 3.0227325e-01 4.4865052e-01 4.8907853e-01 -2.8316151e-01 -2.4741384e-01 5.9338730e-02 -1.8872848e-01 4.8676397e-01 4.3152920e-01 1.4900324e-01 -1.7317231e-03 -3.2562391e-01 -4.8215278e-01 -4.4896181e-01 5.8711572e-02 2.2342873e-02 -1.3932377e-01 -4.1501350e-01 1.1475215e-01 -3.7095623e-01 -2.7674346e-01 3.9298375e-01 -3.8763761e-02 1.6093206e-01 2.5743950e-01 2.2453731e-01 2.0556221e-01 9.0943697e-02 -3.3903647e-01 -1.7822285e-02 -3.0089494e-02 -2.1477218e-01 -6.5019659e-02 -4.2202911e-01 -5.7596584e-02 4.3692480e-01 -4.4386022e-01 -2.2738781e-01 -1.4868351e-01 1.7065177e-01 -1.7252080e-01 4.7363080e-01 1.0202769e-01 -2.8725146e-01 -3.3373133e-01 -3.6363148e-01 5.9309589e-03 -1.3204071e-01 -3.9150787e-01 -3.8812065e-01 1.4212840e-01 2.1971372e-01 4.0696477e-01 4.3623451e-01 -6.3150316e-02 2.9398332e-01 5.2300411e-02 1.0480058e-01 -2.2908108e-01 -4.6948723e-01 4.8396550e-01 -3.4207511e-01 1.3289741e-01 3.9057501e-01 3.6831046e-01 -8.8802705e-02 -5.6519869e-02 -3.8587304e-01 -4.6361329e-01 -1.6472894e-01 -4.1218976e-01 -6.0675440e-02 2.5516259e-01 -3.5451480e-01 4.5283194e-01 3.6703893e-01 3.8094736e-01 -3.2931213e-01 4.9602550e-01 4.3449750e-01 -4.7952977e-01 -2.0930038e-01 4.6898349e-01 3.5722738e-01 3.8437932e-01 -3.9403551e-01 3.9081058e-01 -4.6277742e-02 4.4153711e-01 4.6802495e-01 4.7074485e-01 -2.5722104e-01 3.2303698e-01 -1.4350034e-01 -2.3797240e-01 -6.0980427e-02 -1.2949442e-01 -2.5400756e-01 1.1888651e-01 -2.1077795e-01 -2.9318765e-01 5.7420350e-02 -2.0450265e-01 4.5761851e-01 1.4978336e-01 -1.9873326e-01 -1.5193626e-01 -4.6306038e-03 -4.3907829e-01 -2.9868136e-01 -6.2538566e-02 2.7013697e-01 2.4006702e-01 -1.8115948e-02 -4.4226796e-01 4.3725435e-01 -1.7389140e-01 -4.9567642e-01 1.3739853e-01 -4.0619703e-01 1.1719233e-01 -1.6141937e-02 2.7308080e-01 4.0685091e-02 -5.4907780e-02 -1.7949881e-01 1.5471656e-01 -4.3876456e-01 3.1674141e-01 3.5940188e-01 4.2403046e-01 -5.1419989e-02 -1.4193092e-01 -2.0095270e-01 8.8168002e-02 -2.5643318e-01 3.2851575e-01 4.9517586e-01 -3.4183062e-01 1.7482385e-01 1.5057421e-01 3.4877517e-01 -4.8593247e-01 2.7565443e-02 2.1422283e-01 1.1329733e-02 -1.1386327e-01 -3.2681953e-01 3.3018484e-01 2.7732168e-01 1.2510463e-01 -3.7133511e-01 -2.3727092e-02 -4.3230761e-01 2.6453702e-01 3.5836384e-02 -4.0540411e-01 -6.7458995e-02 -2.6908225e-01 -2.8828155e-01 -6.6532544e-02 3.2938204e-01 -4.9502678e-01 2.1601150e-01 -2.5594094e-01 4.0971316e-01 3.6599649e-01 3.1424218e-02 -2.8175748e-01 2.3031030e-01 4.0090843e-01 2.7581351e-02 -1.1764112e-01 3.1698448e-01 -2.7212869e-01 8.2432186e-04 3.1338017e-01 -2.1645476e-01 -3.5287155e-01 -7.3805457e-02 4.6927318e-01 -3.6173592e-01 -1.4153659e-01 -4.0589008e-01 -3.2735858e-01 4.5385320e-01 -1.0622835e-01 -3.2539818e-01 -1.7482596e-01 2.0558347e-01 -5.2599241e-02 -3.7875693e-01 2.1584679e-01 -2.9749371e-01 2.3245443e-01 -4.6208833e-01 1.6166920e-01 -3.4213862e-01 -1.7081634e-01 -1.0376136e-02 8.4625588e-02 -4.8537979e-01 -2.0590166e-01 1.8147113e-01 2.6796254e-01 -3.4946955e-01 2.5127273e-01 -4.9415819e-01 3.3182686e-01 4.9000273e-01 -4.1735594e-01 -4.7894142e-01 2.2205286e-01 -5.8806552e-02 -4.8599232e-01 3.8416112e-01 -1.2368643e-01 1.0346144e-01 2.3037524e-01 2.8017614e-01 1.8345929e-01 -1.6941840e-01 2.9423320e-01 -9.8561120e-02 4.1234339e-02 8.9026584e-02 5.6886238e-02 -3.9617672e-01 2.6774689e-01 1.6762140e-01 1.1432529e-01 -2.4725204e-01 2.6337838e-01 1.7146114e-01 -3.1177233e-01 -4.7916229e-01 1.3123660e-01 -2.0612289e-01 -1.0995307e-01 3.1406727e-02 3.1476814e-02 -2.9659734e-01 -3.5143042e-01 -3.9517259e-02 2.4990832e-01 2.7677760e-01 -2.1043701e-01 -2.1563237e-01 -4.7413973e-01 -2.3038073e-01 1.0523788e-01 4.2886193e-01 4.6735095e-01 -3.9341357e-01 2.0153278e-02 -4.1302844e-01 1.1442165e-01 4.6549550e-01 -9.4955470e-02 -4.1206589e-01 -2.5646935e-02 -3.6912721e-01 4.4685456e-01 -1.7195362e-01 -9.3142427e-02 -2.6093886e-01 -2.9734732e-02 1.5179907e-01 1.9811693e-01 3.8117194e-01 3.6635834e-01 7.9162244e-02 2.1237162e-01 1.4952710e-01 2.3686631e-01 4.3327805e-01 9.8898252e-02 1.8555608e-01 -5.5654120e-02 6.0635069e-02 7.5121023e-02 -4.9117910e-01 -5.3832371e-02 -4.7321792e-01 -4.7248896e-01 3.1301358e-01 5.9508975e-02 -4.5516699e-01 -2.6164232e-01 3.2595547e-01 -3.5102677e-01 -1.8170750e-01 -1.9294801e-01 2.0825397e-01 -4.5030030e-01 -6.7016279e-02 8.7789788e-02 8.6383989e-02 4.5741032e-01 -9.2777648e-03 -2.3033178e-01 1.8434394e-01 2.8968163e-01 4.2552633e-02 1.0551930e-01 1.7459628e-01 1.6381197e-01 3.7301468e-01 3.9155461e-02 -7.9720838e-02 -4.2037033e-01 -2.1305589e-02 -7.3347515e-02 -4.9668944e-01 -4.7858989e-01 2.6794741e-01 2.7026576e-01 -2.7255381e-01 -2.3417152e-02 1.1921062e-01 -2.3166048e-01 -2.1639450e-01 -4.1460370e-01 -2.7762597e-01 -2.5556790e-01 3.0935661e-01 1.6190259e-01 -1.5629096e-01 3.2036302e-01 4.2790493e-02 -3.7813767e-01 -2.9971073e-01 4.0264533e-01 1.7957395e-01 -1.2847373e-01 -4.6977887e-02 -1.4971619e-01 -4.2852534e-01 -2.7374287e-01 -2.8663139e-01 -3.0428847e-01 -2.0344763e-01 2.7420541e-01 2.5311525e-01 2.1424990e-02 -2.6406737e-01 -1.9881386e-01 4.7257553e-01 7.2966931e-02 4.8338486e-01 2.5049744e-01 -3.0208644e-01 3.8096037e-01 -7.6801330e-03 -3.5767068e-01 -4.0758458e-01 3.7625654e-01 -2.1922963e-01 -4.6296514e-01 1.6810393e-01 -2.2397354e-01 -4.0085210e-01 4.3512857e-01 1.2239084e-01 -3.8664100e-01 3.2734632e-01 -1.4991601e-01 3.5152663e-01 -3.8846710e-01 -1.6215809e-01 -3.8027159e-01 2.3903994e-02 -4.9767636e-01 -1.6454585e-01 -1.9724831e-01 1.8309634e-01 -3.9891429e-01 3.0786988e-01 2.6175450e-01 -2.1989222e-01 -1.7186497e-01 -2.0605369e-01 -2.6194482e-01 4.9233756e-01 -3.7759081e-01 -1.1777030e-01 3.7271478e-01 -1.0993043e-01 3.9206256e-01 3.2187832e-01 8.8532123e-02 -7.4645858e-02 4.4376658e-01 -3.2943733e-01 3.9246146e-01 -4.6442615e-01 -3.5387352e-01 4.7248695e-01 2.4614108e-01 -4.5543746e-01 2.8518517e-01 1.1000230e-01 4.4153532e-01 2.1599861e-01 -2.5498868e-02 -2.7814950e-01 -2.5520047e-01 2.2406089e-01 -3.0499681e-01 -1.5264275e-01 -2.0965185e-01 4.2699028e-01 1.1407856e-01 3.8862211e-01 -2.0800544e-02 2.8041194e-01 -1.3467877e-01 3.8212375e-01 1.6504391e-01 -2.3144909e-01 -2.5152845e-01 -1.3800071e-01 -4.9882342e-01 -3.2230428e-02 -6.2998686e-02 3.0959953e-01 2.9120884e-01 5.3318452e-02 -4.9300939e-01 3.8631632e-01 -2.6493307e-01 3.9283242e-01 4.5890460e-01 -5.5827987e-02 -1.0848630e-01 2.5410774e-01 -7.6070754e-02 3.4147830e-01 4.2301220e-03 1.9549103e-01 -4.6605647e-01 -3.1328120e-01 1.8885178e-01 -4.5855610e-01 1.8543218e-01 -4.7254329e-01 -1.0311966e-01 3.4472525e-01 -1.1211325e-02 1.6907639e-01 -5.4561352e-02 -3.4474391e-01 1.3722818e-01 8.1760135e-02 -4.4933936e-01 1.4956730e-01 -1.3212279e-01 1.1975707e-01 -8.6630729e-02 -1.5113389e-01 1.1484351e-01 -1.4267496e-01 -4.7571445e-01 1.5326463e-01 4.9049344e-01 1.0185762e-01 -2.7166686e-01 -1.5717013e-02 -1.8929015e-01 -4.9905147e-01 -2.8417145e-01 3.2021875e-01 -1.0575810e-01 -4.8032706e-01 -3.1985103e-01 2.1679559e-01 -2.7311701e-01 -1.2056903e-01 -1.4711032e-01 -1.6765018e-01 -1.7347100e-01 -2.9789029e-01 4.5441427e-01 3.5649176e-01 -4.8114337e-02 1.5394291e-01 -9.0466741e-02 -2.3091509e-01 -9.6505930e-02 4.2879690e-01 3.8793543e-01 -3.1751438e-01 3.2592932e-01 -5.2650333e-02 2.6350849e-01 -3.8250762e-01 -7.1332147e-02 3.6487780e-01 2.6472731e-01 3.5775736e-01 4.0285765e-01 -3.8707175e-01 2.8054821e-02 2.4741725e-01 -4.7250844e-01 2.0892758e-01 -2.1769330e-01 5.3928843e-03 2.9182765e-02 4.6581561e-01 -9.2082189e-02 2.0358675e-01 -3.2831127e-02 -3.1922678e-01 -7.8111271e-02 3.2483015e-01 -1.6162869e-01 4.3709323e-02 -3.8631601e-01 3.0936876e-01 3.2513657e-01 1.0398040e-01 4.4514535e-01 -2.7209598e-02 1.8814259e-01 -4.1495625e-01 -2.0286403e-02 -2.3993631e-01 -3.5005096e-01 1.0215100e-02 -4.3574471e-01 9.4185513e-02 -3.3406382e-01 2.6962853e-01 -2.0865134e-01 1.7584673e-01 -7.0091457e-02 1.5908382e-01 -1.4080486e-01 -2.0387745e-01 7.6163693e-02 -4.7446853e-01 3.4847903e-01 -9.3011435e-02 -3.5948556e-01 1.6210846e-01 4.7675284e-01 2.2011646e-01 5.2633305e-02 3.5111738e-02 4.2343205e-01 -3.5557393e-02 -1.6761664e-01 3.6357370e-01 -3.7401362e-02 -2.0656020e-01 2.9351276e-01 3.7868290e-01 4.8950026e-01 3.2904737e-02 -2.2947592e-01 -2.2371271e-01 -4.5108842e-01 -2.3276558e-01 3.7059716e-01 4.8044283e-01 -3.6829134e-01 5.2359227e-02 1.2829871e-01 1.1236071e-01 -3.8444334e-03 1.3598824e-01 2.7481617e-01 4.8076254e-01 4.4910961e-01 3.2208157e-01 -1.0729527e-01 -3.0392129e-01 1.9934786e-01 3.4038405e-01 1.7523892e-01 -2.2346387e-01 -1.7213379e-01 -4.6817135e-01 3.3494712e-01 -2.5391267e-01 4.2603992e-01 -2.9514842e-01 4.2473801e-01 -3.4364046e-01 -2.4990250e-01 -3.2693486e-01 2.3134994e-01 4.5183711e-01 1.0739279e-01 -3.9705217e-01 4.9406513e-01 4.1763334e-01 -2.5837579e-01 -2.1799162e-01 4.2027657e-01 -2.4731346e-01 2.1756288e-01 -1.2351241e-01 -1.6538680e-01 -1.9499917e-01 -4.9365927e-01 2.1790570e-01 -1.9423928e-03 4.8365822e-01 -3.9884519e-01 7.7245742e-02 -5.9364533e-03 -3.1625627e-01 3.0003690e-01 3.0403839e-01 -1.3205707e-01 2.9105310e-01 4.4406788e-01 -4.7952874e-01 -1.0326398e-01 2.1750806e-02 4.3247910e-01 -1.6830515e-01 -1.0502407e-01 -5.0190051e-02 -3.2276253e-01 2.9896233e-01 2.6384852e-01 -3.1220694e-01 1.3857190e-01 3.6669977e-01 4.4203660e-01 2.4933134e-01 -1.9102409e-01 -2.4677665e-01 4.9684330e-01 1.9763342e-01 -3.3506888e-01 3.7655999e-01 1.4936536e-01 -1.3983764e-01 -4.8311246e-01 -3.6759259e-01 8.6012909e-04 -2.9358192e-01 2.0151222e-01 4.1247297e-01 1.8807412e-01 1.1962236e-01 -2.5757225e-01 6.4543832e-02 -3.1224024e-01 3.5846497e-01 3.4440813e-01 1.6362885e-01 4.1053976e-01 -4.6641220e-01 -4.0195120e-01 -3.2322391e-01 -3.9797554e-01 -5.1617603e-02 2.1443505e-01 -1.1669367e-01 -2.2393644e-01 2.9100130e-01 -2.3591496e-04 2.0845924e-01 2.1783956e-01 -3.7404247e-01 -2.3734074e-01 2.9436729e-01 -2.1543147e-01 -3.9290554e-01 -1.9399327e-01 4.3210913e-01 -6.7829705e-04 -4.3730196e-01 -4.3765351e-01 -1.0016295e-01 -4.0195054e-01 1.2109889e-01 2.1615603e-01 2.0844737e-01 2.6750656e-01 -1.9492233e-01 -1.7336796e-01 2.7271797e-01 -3.4063512e-02 3.1985509e-01 3.0270143e-02 4.7052043e-01 4.9974435e-01 -4.4811875e-01 -1.9083898e-01 -2.1374595e-01 -4.8908123e-01 3.8702208e-02 -5.1810917e-02 9.5580574e-02 7.7789586e-03 4.9505132e-01 2.5560849e-01 -8.6653828e-02 -5.6006598e-02 -7.9646173e-02 -1.8743273e-02 3.3349886e-01 -5.9435652e-02 3.7386898e-01 -3.4307043e-02 -4.7592559e-01 2.1211722e-01 -5.3388788e-02 3.5820550e-01 -1.4763267e-01 -2.2612221e-01 -4.8711139e-01 4.3864107e-01 -2.3900375e-01 -1.9662779e-01 3.0462678e-01 -2.8418513e-02 5.5298704e-02 -4.9225419e-01 -2.5866961e-01 2.4256371e-01 1.7473085e-01 1.4691607e-01 1.6979517e-01 -3.4421152e-01 -3.7478998e-01 1.5880144e-01 4.0602004e-01 -3.0270105e-02 -2.8144685e-01 -3.5966027e-01 -8.7201832e-02 9.5664581e-02 -1.8318929e-03 -2.5969191e-01 2.3435873e-01 -5.0940849e-02 3.2842699e-01 -3.7278616e-01 2.5086765e-01 -3.6582508e-01 -3.8575532e-01 -2.5875474e-01 5.9224250e-02 -1.4314318e-01 -7.7202126e-02 -2.9406429e-01 2.9018229e-01 -3.0759296e-01 4.3035837e-01 4.1009312e-01 6.0856923e-02 3.6062614e-02 -4.7112176e-01 -9.3189367e-02 2.0383971e-01 -1.8197189e-01 -3.3326599e-02 -1.4105260e-01 -1.4977287e-01 1.2480566e-01 -8.3068107e-02 4.4430688e-01 -1.7036285e-01 -3.4691500e-01 -1.1775214e-01 3.2272307e-01 2.9406776e-01 -4.8739826e-01 1.3293937e-01 -9.2491855e-02 -4.0130868e-01 2.0132895e-01 1.2060860e-01 -4.6599783e-01 4.7438402e-01 -4.4341765e-03 -4.8709375e-01 -1.1210119e-01 -4.7879996e-01 -3.2516055e-01 4.1528954e-01 -2.1796942e-01 -2.4867243e-02 -3.9573771e-01 -3.3656224e-01 3.3950276e-01 -3.0294311e-01 1.5928411e-01 8.7032545e-02 5.8053485e-03 -2.4879296e-01 -3.6854731e-01 1.7611696e-01 -4.3800238e-01 1.3012379e-01 -2.7996417e-01 4.1310756e-01 -1.3663134e-01 -5.2940005e-02 3.5676682e-01 2.5350036e-01 -2.6118390e-01 7.1352022e-02 1.4470085e-01 -2.0482314e-01 5.8981778e-02 -5.7709521e-02 -2.7598745e-01 -4.7737941e-01 -2.1257967e-01 5.5350548e-02 2.5620244e-01 -2.3930834e-01 -7.2147714e-03 1.1274173e-01 4.8926985e-01 1.7595692e-01 -9.6310657e-02 -3.6689372e-02 2.3584726e-01 -5.8292309e-02 -4.4699430e-01 7.5330873e-02 -3.0276836e-01 -1.1659412e-02 2.3810998e-01 -4.2565776e-01 5.2844045e-02 4.3227520e-01 3.0511721e-01 -3.6061733e-01 -3.7705245e-01 -2.2659065e-02 -4.4590912e-01 -3.0057109e-02 4.1300444e-01 -1.0981159e-01 3.1616497e-02 -4.2449892e-01 -4.1760347e-02 1.1489459e-01 -2.2841105e-01 4.6317064e-01 3.1287411e-01 -1.7885514e-01 1.9990777e-01 -1.4068608e-01 4.2566004e-02 -2.1722200e-01 3.8061814e-01 -3.9790306e-01 -8.6004480e-02 -3.5149237e-01 -1.5270321e-01 3.0443172e-02 -4.8729167e-01 -4.6727737e-01 -4.3431461e-01 -1.3903784e-01 -4.0518794e-01 1.3934399e-01 -2.0263895e-01 3.9828349e-01 4.2987720e-01 2.4713896e-01 4.6832395e-01 4.2448659e-01 -3.6163864e-01 -3.7229440e-01 -2.7661920e-01 -8.2491860e-02 5.8990175e-02 2.1579412e-02 -3.2268280e-01 7.2995085e-02 1.1561645e-02 -7.9221237e-02 -1.8589861e-01 4.7261462e-01 3.7368159e-02 1.6742135e-01 2.8582925e-02 -4.4043171e-01 -2.2097103e-01 -4.1614000e-01 2.8586154e-01 -4.5005508e-01 1.8004224e-01 4.6147460e-01 -1.3138336e-01 1.7395707e-01 -4.6531971e-01 2.4552892e-01 2.6244031e-01 3.9905797e-01 4.5624223e-01 4.0114087e-01 -3.1104329e-01 -3.5784961e-01 1.7268867e-01 6.8466444e-02 1.1526392e-01 2.8480232e-01 -7.8901089e-02 -1.0475243e-01 -2.2432647e-01 4.6265284e-01 -2.3277503e-01 4.1966756e-01 2.1502704e-01 5.4762184e-02 -1.4216680e-01 -2.2138554e-01 4.4989485e-01 -3.1707898e-02 3.4673317e-01 1.0004809e-01 -9.9203847e-02 1.1246750e-01 -2.8232269e-01 -4.0291765e-01 -5.4889146e-03 -9.1861876e-02 2.3928865e-01 7.6156240e-02 -2.9142266e-01 3.8418186e-01 5.7701437e-02 -1.8854821e-01 -4.7256381e-01 2.9034424e-01 4.6152747e-01 -3.5640231e-01 9.5964753e-02 -8.4825849e-02 4.6624897e-01 -3.3861109e-01 4.1744226e-01 1.4430084e-01 -2.4203924e-01 4.7125939e-01 6.1214560e-02 4.6047756e-01 -2.9573669e-01 -1.9875682e-01 5.1085996e-02 2.8091001e-01 5.6733779e-02 4.0218380e-01 -1.4823092e-03 -4.2366914e-01 -1.3219823e-01 5.8863313e-02 7.7637379e-02 3.1350660e-01 -1.5864475e-01 -4.6871719e-01 8.6086222e-02 -4.8167026e-02 2.8941686e-01 -1.1314086e-01 -7.3534494e-02 3.2221014e-02 mlpack-2.2.5/src/mlpack/tests/data/data_3d_mixed.txt000066400000000000000000001375501315013601400223330ustar00rootroot00000000000000 3.7701413e-02 1.2973443e-01 2.3166497e-01 -2.9617368e-02 3.4942382e-01 3.0428846e-01 4.9722060e-02 -2.9116549e-01 -1.8698914e-01 4.2832096e-02 3.1002101e-01 5.2322836e-02 4.5432097e-02 -1.4444738e-01 -2.6616559e-01 -1.1364752e-02 -2.3325236e-01 -2.1806150e-01 3.7655530e-02 4.2968645e-02 2.8246455e-02 1.2336727e-02 3.6578056e-01 3.9982191e-01 -1.5860756e-02 -3.6623580e-01 1.3252177e-01 7.4149522e-02 4.5368895e-03 5.9796221e-02 -9.0837300e-03 5.1599330e-03 -1.2430540e-01 1.1836750e-02 -3.3381041e-02 1.7781775e-01 -2.0514881e-02 4.5956428e-01 -5.3684205e-02 1.0569880e-02 3.0487112e-01 4.2612903e-01 9.2980937e-02 -9.0090545e-02 -1.4150061e-01 4.4689475e-03 -3.8209815e-02 4.0565548e-01 1.3424374e-02 2.6669731e-01 -2.7543892e-01 3.5389203e-02 -3.5692343e-01 4.5464754e-02 1.0179860e-02 1.3542266e-01 -6.2347154e-01 -3.7247652e-02 -2.1457354e-01 -2.8200864e-01 3.6952679e-02 -1.8923932e-01 7.0036792e-02 1.0459714e-03 1.1587881e-02 1.5181213e-01 -4.0660530e-02 5.4193709e-01 -1.3476842e-01 1.7515389e-02 2.2090011e-01 -3.4951087e-01 4.0822121e-02 1.2291382e-01 -2.0572499e-01 4.1240590e-02 1.9873351e-02 1.2983537e-02 7.3185283e-02 -1.0798160e-01 -3.3534858e-01 -5.6822038e-02 2.8179644e-01 -3.4445625e-02 -8.8254428e-03 7.8191569e-02 3.5719748e-01 2.6497360e-03 -8.0736641e-02 -1.7146666e-01 1.3185620e-02 -5.1853332e-01 -1.3963877e-01 -8.2204679e-02 3.2831318e-01 6.7153421e-02 4.1971911e-02 2.7496172e-02 -1.3975927e-01 -4.9086976e-02 -8.4562022e-02 3.3570085e-01 -6.2418881e-03 2.1953464e-01 3.5709789e-01 -1.3435619e-02 -3.5524650e-01 -2.6579367e-02 7.4970837e-02 1.0991957e-01 -9.4598479e-02 1.1376633e-01 -1.4093137e-01 -2.2282079e-01 5.6984383e-03 2.7842641e-01 -1.7869979e-01 8.2067024e-02 1.8955090e-02 -2.3564325e-01 2.2702412e-02 3.9584696e-01 2.1673613e-01 6.1909009e-02 -7.8487808e-03 -4.0221309e-01 -4.5829332e-02 1.5184638e-01 1.8524657e-01 -1.0893723e-02 4.7022145e-01 1.0066029e-01 -1.7762894e-02 -1.2143673e-01 4.6674895e-01 -2.2738985e-02 1.1246220e-01 -9.8502550e-02 2.1600534e-02 8.7591829e-02 -3.9214262e-01 -9.0292880e-02 -1.1621284e-02 3.6409972e-01 -3.1306667e-03 -3.0936880e-01 -1.4532433e-01 -7.0073107e-02 6.6856661e-02 1.5072348e-01 2.6893737e-02 3.2287162e-01 -6.8842255e-01 -1.1855295e-02 4.0778357e-01 -4.3778051e-01 -1.7814873e-02 -1.7789746e-01 3.9254828e-01 -4.0425454e-02 -2.0783986e-01 2.8979816e-01 9.4797812e-03 5.4907656e-03 -2.2098286e-01 -4.9652514e-02 2.2021693e-01 2.0224199e-01 -3.3596093e-02 -2.9170366e-01 6.1091184e-01 -7.0857165e-02 1.8137644e-01 3.5507595e-02 8.5552209e-02 -1.4133869e-01 -2.2241680e-01 -2.5770171e-03 1.9759686e-01 2.7627371e-01 -1.6934270e-02 4.1669331e-01 -1.8247996e-01 -3.4386709e-02 -1.4479284e-01 -1.0908436e-02 -2.3655785e-02 -1.3540082e-01 5.7768883e-01 4.7632039e-02 2.0363428e-01 -3.7226177e-01 7.0070142e-02 1.4934805e-01 -4.4241604e-01 4.8783490e-02 -1.1028603e-01 2.0226014e-01 -1.9960045e-02 6.0644802e-02 3.8228153e-01 -1.1460016e-02 1.3105699e-01 -3.7825167e-01 6.7005918e-02 -7.9335204e-02 -9.8061804e-02 5.2749155e-03 -2.6477933e-01 3.8608738e-01 -7.3343993e-02 -1.2704346e-01 -1.2394569e-01 -4.3453160e-02 4.1663813e-01 -5.0751924e-02 3.9223989e-03 4.4211301e-01 -1.8326924e-01 5.4876457e-02 -3.1512033e-01 -1.3878681e-01 5.1107862e-02 1.0431274e-02 1.9218750e-02 5.4719955e-02 2.2329442e-01 2.0688591e-01 -1.7626234e-02 2.5209422e-01 -3.4239143e-01 -6.5745333e-02 2.0320154e-01 1.0606427e-01 -3.1543875e-02 5.1733908e-01 1.9654044e-01 3.1365854e-02 1.3973547e-01 -3.5163912e-01 -1.0254050e-02 2.2418183e-01 -4.3545432e-01 5.5098755e-02 2.6981527e-01 -3.4502872e-02 -5.2589722e-02 -2.0327713e-01 -4.5686154e-02 -9.1171569e-02 2.7903097e-01 1.7886389e-01 -6.0014623e-02 6.9348765e-01 4.1568794e-02 -2.2461698e-02 3.7986455e-01 -4.0332475e-01 2.8243567e-02 1.0517234e-01 4.0128493e-01 3.1376096e-03 -2.2211873e-01 4.8299334e-01 -4.6784026e-02 -2.7606606e-03 -8.3099609e-03 2.2076386e-02 -4.0911735e-01 -2.3229906e-01 -3.4976213e-02 2.6831708e-01 4.3820787e-02 -8.2307190e-02 1.1264024e-01 3.4018095e-02 5.3399380e-02 -3.1035467e-02 2.1245718e-01 2.6678428e-02 -6.0448541e-01 2.7915778e-01 3.3846452e-02 1.6395547e-01 -4.3554460e-02 2.3691058e-02 -1.2008674e-01 4.5187575e-01 9.6290312e-02 -2.0953596e-01 -1.1689113e-01 -9.0830236e-03 1.9908508e-01 -4.5219127e-01 -1.4616229e-02 2.6191468e-01 -1.6076364e-01 -6.2896396e-02 2.0082254e-01 -7.6492735e-02 2.3289361e-02 3.4041629e-02 3.3903511e-01 5.1326320e-02 3.1934544e-02 2.2577302e-01 -2.2592560e-02 -3.3251335e-03 -1.9137263e-01 -1.0163359e-01 1.4391639e-01 1.6401544e-01 -6.9610427e-02 -1.3893822e-01 4.6386368e-02 4.7610876e-02 -3.5669298e-01 -1.5725881e-01 2.0285780e-02 3.3143834e-01 -4.2289400e-01 5.9816810e-03 8.2379624e-02 -2.9059238e-01 -3.7814192e-02 -2.4551731e-01 2.4710741e-01 -2.2173248e-02 -3.1040482e-01 -1.3106287e-01 -1.9999353e-02 -2.0269131e-01 4.9087648e-01 -7.6223141e-02 4.2999189e-02 3.1098413e-01 -5.6834896e-02 1.9456086e-01 3.3199304e-01 2.4102607e-02 -2.7557659e-01 9.2376868e-02 1.6740082e-03 3.7355212e-01 2.3551527e-01 -1.5449392e-02 -4.3512668e-01 -5.5911757e-02 -3.6306531e-02 6.0812491e-01 -2.3345420e-01 -3.6929451e-02 -2.6638046e-01 -3.6800509e-01 3.8394044e-02 -4.0087981e-01 -1.3712198e-01 2.9606558e-02 2.9208255e-01 -5.8811706e-02 -3.8930257e-02 5.3812924e-01 -1.8666428e-01 -7.0275596e-02 4.5891532e-01 1.3983650e-01 2.0001321e-02 1.2268353e-01 -4.9391113e-01 -1.1122544e-02 9.2429746e-02 -2.1715735e-01 -4.4004638e-02 -2.1019075e-01 -5.5682671e-03 -3.4193937e-02 2.9788083e-01 1.8782121e-01 -4.4847378e-02 5.1893331e-01 6.6160344e-02 -1.3802412e-02 -3.3307957e-01 2.0464997e-01 6.6153300e-02 -1.9688731e-01 -1.2063999e-02 -2.4582974e-02 2.6501387e-01 -4.1559201e-01 -3.3002318e-02 9.1305419e-03 -4.0401390e-01 -6.3590744e-02 -2.5592879e-01 -8.5429168e-02 1.4163657e-02 2.4876483e-01 -2.1950724e-01 8.1963176e-02 -6.1699360e-02 -1.6091644e-01 -8.0016789e-03 -2.8157452e-01 -3.9345323e-01 -4.4248832e-02 6.1020894e-02 1.3782842e-01 2.6061655e-03 -2.5709288e-02 -5.3604624e-01 -2.6073374e-02 -3.8363738e-01 -3.8511231e-01 1.8772005e-02 -3.4412394e-02 -2.5565253e-01 -7.5054810e-03 -2.3592724e-01 -3.7648996e-01 5.6566762e-02 -6.2579113e-01 6.6719987e-02 -6.0291803e-02 4.9454446e-01 1.4160196e-01 -1.1511013e-02 -4.5664013e-01 -5.5895943e-02 -5.5257623e-02 4.4908919e-01 -1.3774252e-01 -1.6740714e-02 -3.2179308e-01 2.1342434e-01 5.2587345e-02 2.1698421e-01 1.4893085e-01 -1.6210713e-02 2.5336867e-01 -5.8486040e-02 1.4644523e-02 -2.8343513e-01 -2.7152123e-01 8.0345470e-04 1.5263996e-02 -3.2538748e-01 2.6617318e-02 -4.3231553e-01 -2.4816459e-01 -7.8981284e-02 2.4256308e-01 1.5090585e-01 1.8397532e-02 3.9926701e-01 2.0582023e-01 -9.6238672e-02 9.4713428e-02 1.8120124e-01 3.0165247e-02 3.9965200e-02 -2.7150752e-01 -1.0171867e-02 5.6993396e-02 -2.3706825e-01 1.6002699e-02 2.8306986e-01 -6.6932745e-01 -1.3762038e-02 -1.9515829e-01 3.1078912e-01 -1.7692676e-02 4.0386059e-01 -2.4066368e-01 1.0022002e-02 6.1190817e-02 -8.7443278e-03 -6.0363986e-02 6.1240221e-02 -7.8181935e-02 2.8644424e-02 -6.4608402e-02 7.7387914e-02 5.6934386e-02 -4.1440166e-01 6.2886101e-02 -2.0688539e-02 2.0424108e-02 2.4732809e-01 -7.3987310e-03 1.0280263e-01 -4.3232834e-01 1.7796229e-02 -2.7935649e-01 -8.1587854e-02 -1.0220217e-02 -7.1286098e-02 -4.2652895e-01 -6.4931424e-02 2.4049239e-01 4.1419845e-02 -1.5268295e-02 -1.6977267e-01 -3.9896256e-01 -1.3005432e-02 7.6124543e-02 -6.7770637e-02 -3.8149483e-03 1.8689382e-02 2.7308871e-01 8.4805357e-03 2.3453897e-01 -5.4601264e-01 4.6018500e-02 -1.2280894e-01 -3.2430601e-01 6.3225543e-03 2.7567408e-01 4.2848240e-01 3.6861716e-02 6.0808447e-02 -1.3051500e-01 -4.1435674e-02 2.1990228e-01 -1.1367669e-02 -9.0680853e-03 5.0947110e-01 -3.3582330e-02 -3.0218387e-02 5.3321739e-01 3.0394808e-02 2.3925633e-03 1.3567641e-01 -5.2004245e-01 -7.4321778e-02 -7.6397217e-02 2.4118646e-02 -1.9821175e-02 -3.9894166e-01 2.6992080e-01 2.2724268e-02 2.8999121e-01 -9.3544716e-02 8.5216555e-02 1.1522379e-01 1.9541151e-02 7.2173194e-02 1.6631965e-01 -4.3707512e-01 -2.2085539e-03 -4.7793279e-02 -7.1270528e-02 -5.7907834e-02 -2.6575275e-01 -1.4792744e-02 -7.7283645e-02 5.9727902e-02 2.2093902e-01 -8.2867822e-03 9.5307585e-02 4.1998517e-01 -1.4116623e-02 -2.0157389e-01 3.9515761e-01 2.7838933e-02 -2.2294011e-02 -4.0175188e-01 5.9159523e-03 3.0925114e-01 1.2302654e-01 -4.4635028e-03 -4.1790852e-01 -2.9196187e-01 -7.7293057e-02 -1.2592829e-01 7.7392837e-02 1.7642810e-02 2.6062170e-01 -5.9719587e-01 1.8973567e-02 -4.2267504e-01 -1.9401834e-02 -3.3317999e-02 -1.0731308e-01 -1.7481323e-01 6.8143200e-02 -4.1763472e-01 3.0967473e-02 -1.7677758e-02 -3.6463112e-01 4.4911731e-01 -9.4589879e-03 -6.5895984e-02 4.1533990e-01 -7.0015283e-02 3.6333707e-01 1.1422895e-01 -2.5610480e-02 1.7340327e-02 -1.0237857e-01 2.7527566e-02 2.8576322e-01 -5.2398747e-02 6.6332488e-02 1.8539350e-01 8.3993445e-04 -4.8586231e-02 -2.1934545e-01 -3.2016030e-01 2.9559987e-02 -9.6868202e-02 -2.2371265e-01 5.2730847e-02 2.4794110e-01 1.6539127e-01 5.9020463e-02 1.2405768e-01 2.0479446e-01 -2.7911816e-02 -3.2765770e-01 4.5301680e-01 -4.2117570e-02 5.0493533e-02 -1.5127759e-01 4.9303104e-02 -5.2929939e-01 -1.9020037e-02 3.5901254e-02 -2.2786818e-01 -3.5451370e-01 -8.5999297e-02 -7.3731733e-02 3.6357242e-01 2.1107597e-02 -2.9963256e-01 -3.2033743e-01 -9.1993801e-02 -1.2387499e-01 2.4745368e-01 -9.8010870e-03 2.9315908e-01 -1.7658527e-01 1.7627760e-02 -5.4735135e-01 9.5065756e-02 5.7403258e-03 -3.2629591e-01 4.5053212e-01 -3.9986392e-02 -4.4981421e-02 -7.2700986e-02 1.7696554e-02 -1.7275221e-01 1.7332742e-01 1.8100105e-03 4.1582825e-02 -5.6410955e-01 4.8227345e-02 -1.5812181e-01 -3.4431909e-01 2.9931503e-02 2.7935518e-02 -3.3635100e-01 -5.1033324e-03 9.7763462e-02 6.8699019e-02 2.5586364e-03 -2.1406533e-01 5.0576201e-02 -7.0700073e-03 -2.1965400e-01 1.7141707e-01 3.7212950e-02 -6.0160392e-01 4.6933142e-02 -1.1198508e-02 2.5712978e-01 1.6688635e-01 3.2835761e-02 -9.5549788e-02 3.2054275e-01 3.0241520e-02 -6.0455565e-01 1.6104169e-01 -2.7026549e-02 5.9331121e-01 2.8869756e-02 5.7968995e-03 1.5082700e-01 -6.2014866e-02 -3.5961340e-02 -2.4444773e-01 -7.5421334e-02 5.8695886e-02 2.0935967e-01 -9.0601159e-02 5.1546652e-03 1.8934077e-02 -5.5678570e-01 5.2701790e-02 1.6132382e-02 -3.8361951e-01 -1.2677366e-02 5.0164965e-01 -3.4840264e-01 -3.3054091e-02 5.1983664e-01 -6.6234313e-02 -6.3395528e-02 2.5949735e-01 -2.0350292e-02 -4.3461706e-02 -1.7023277e-01 -3.7851160e-01 -5.7286933e-02 -2.1482808e-03 9.6328119e-02 9.1262908e-02 -2.3255507e-02 -1.5322125e-01 1.1202667e-02 2.0338868e-01 4.5510590e-01 -5.8097691e-02 7.8933122e-02 2.6983815e-01 5.5605919e-02 1.1507028e-01 -1.7645174e-01 -2.0841973e-03 -1.6183301e-01 -9.6834499e-02 -1.1112398e-02 -6.0138396e-02 -5.0994861e-01 -1.1962815e-02 4.9807961e-03 4.3392354e-01 1.3239732e-02 4.6291402e-01 4.1360232e-02 -4.1050840e-02 2.2411603e-01 6.4434760e-02 -7.5917605e-03 9.2975306e-02 -5.2055734e-01 4.6898564e-02 -5.4487580e-01 5.7551293e-02 3.8298693e-02 -2.7841877e-01 -1.7740071e-01 5.9505322e-02 2.2307962e-01 -3.4590700e-01 -1.9328163e-02 4.5036644e-01 -3.4276489e-01 5.8221410e-02 -1.1694978e-01 -3.3151496e-01 2.3314752e-02 2.7636372e-02 4.2187863e-01 4.0261727e-02 -5.9208244e-01 -1.0538720e-01 2.9570440e-02 -9.7105723e-02 3.9411924e-01 3.5389087e-02 -2.4993235e-01 -1.0996657e-01 -7.4607212e-02 5.6589362e-01 5.9364040e-02 -7.8958251e-02 1.9545905e-01 1.0083066e-01 1.2094674e-02 1.9861554e-01 2.5565244e-01 6.9000853e-03 2.0579596e-02 2.0322600e-01 4.3064431e-02 -1.5391699e-01 1.1674189e-02 -6.8238381e-02 1.1416073e-01 3.4469368e-01 2.1910410e-02 1.7538876e-01 -4.6038580e-01 -3.6653615e-02 4.8215622e-01 -9.5511820e-02 -1.4636685e-03 -1.4873092e-01 1.0862707e-01 8.7050177e-02 -1.4017054e-01 -1.4250738e-01 -6.6514550e-03 -2.7815966e-02 9.3094449e-02 -4.9937164e-02 1.4309682e-01 -6.5148261e-02 -6.4444939e-02 2.9980751e-01 -3.0241648e-03 4.4138120e-03 3.0089700e-01 1.4932165e-02 2.2812348e-02 2.4575505e-02 -2.4517220e-01 -2.7165686e-02 -1.3962330e-01 3.2256780e-01 -2.0359647e-02 1.4002249e-01 -3.8116132e-01 2.3111874e-02 2.2360790e-02 6.1095788e-02 7.4610595e-03 1.2172095e-01 5.5676125e-02 -1.0238462e-02 2.7845777e-01 -4.8344225e-01 -1.8052188e-02 4.8710660e-01 -2.3394956e-01 5.1394310e-02 1.0508119e-01 -9.1786449e-02 4.3108369e-02 -3.6916529e-01 -6.4911079e-03 -1.3832070e-02 -4.3420468e-01 5.4692043e-01 6.1530091e-02 -9.6372536e-02 -3.5524928e-01 -6.8369342e-02 2.7276473e-01 -4.3949126e-02 2.7234026e-02 2.1432653e-01 2.3764289e-01 4.9244248e-02 -4.6568680e-01 -1.7148398e-01 -5.5345308e-02 -5.6253618e-02 -1.3934397e-01 -3.2931253e-02 -2.5832581e-01 -3.8080538e-01 1.7443814e-02 -5.6954413e-03 -5.8783777e-02 9.6186237e-02 -3.0728119e-02 -2.0836066e-01 -5.7234621e-02 -2.7833510e-01 -2.2693644e-01 3.7426914e-02 1.1742181e-01 -5.0887120e-01 -1.1815832e-02 5.5393662e-01 6.6648129e-02 6.2720506e-02 -3.5291606e-01 4.8502524e-02 -1.2901073e-02 5.2128544e-01 -3.7745059e-01 4.5280052e-02 -3.6682682e-02 -1.2901815e-01 1.3888335e-02 3.5541565e-01 3.7312853e-01 2.9180567e-02 -4.7001076e-02 1.8448548e-01 -2.8019178e-02 -2.0684970e-01 3.7837342e-03 8.1870376e-02 4.8740046e-02 -2.0854920e-01 4.3766813e-02 -5.0868842e-01 -7.8851912e-02 6.2187962e-02 2.1688620e-01 -3.6132632e-01 3.7032659e-02 2.5732940e-01 -1.6064694e-01 5.0111298e-02 2.0262855e-01 -2.6796507e-01 2.3432962e-02 -2.1362381e-01 -1.6305334e-01 -1.2042935e-02 2.4544811e-02 -2.4805904e-01 1.7180929e-02 2.8969613e-01 2.5181405e-01 -4.9155720e-02 1.6324545e-01 -7.6380995e-02 5.5897334e-02 -1.7372756e-01 -1.2372318e-01 -5.2407227e-03 2.1206998e-02 -4.2723617e-02 1.1287677e-03 -8.8254638e-02 4.5328984e-01 -5.3068603e-02 6.2199080e-01 3.6221394e-02 2.0548564e-02 -3.6264250e-01 -1.8465678e-01 -7.0574936e-02 5.0189446e-01 8.3645212e-03 -9.5397932e-03 2.1846623e-01 -5.1359100e-02 2.9020485e-02 1.8392099e-01 -1.4480903e-01 -4.3698218e-02 -1.5552495e-01 1.7354459e-01 -4.4056041e-05 1.0926195e-01 -9.1728704e-02 -5.3819864e-03 -2.7143128e-01 4.7998738e-01 5.5314376e-02 2.2989648e-01 2.5703949e-01 2.5294741e-02 3.7965784e-01 -4.9637763e-01 2.1547992e-02 -4.9962254e-01 1.7550424e-01 8.7463718e-03 4.1888185e-01 2.5398465e-01 -2.2368273e-02 -3.2615068e-01 4.2673570e-01 -2.3619253e-02 -7.9312703e-02 -1.8116661e-01 1.2063248e-02 -2.5806015e-01 1.2894003e-01 6.4820586e-02 2.3224206e-01 7.0196516e-02 -5.2579851e-02 -2.1006089e-01 3.2506080e-01 2.2313964e-02 9.1375100e-02 8.3590178e-03 -3.2955814e-02 6.2772814e-01 -8.4986446e-02 -2.2067245e-02 -3.9685849e-01 -3.2076674e-01 -1.4863972e-02 5.8190590e-01 7.6722208e-02 -3.6032282e-03 -5.0117353e-01 -1.2185594e-01 7.3309484e-02 -4.3590736e-01 -3.7363599e-02 5.9187939e-03 2.7643833e-01 -5.7953912e-01 -3.2759358e-02 -2.3657076e-01 3.6557781e-01 3.6241633e-02 -2.7797263e-02 3.4471234e-01 -1.1954491e-02 2.5027858e-01 -1.6794938e-01 -2.2498363e-02 -3.9860719e-01 3.6972779e-01 -2.1790603e-02 4.6174810e-01 1.1606847e-01 -2.9114516e-02 -3.2031591e-01 4.8667494e-01 -1.9411087e-03 4.7065465e-01 7.1843400e-02 7.3334714e-02 -1.8811769e-01 -1.1737877e-01 2.0059083e-02 -3.2253745e-01 -2.8313419e-01 -1.1628954e-02 -1.4710225e-01 -2.7764607e-01 2.2909631e-02 2.3378568e-01 -3.9876820e-01 -5.9960075e-02 2.3320904e-01 2.0056346e-01 -7.7095682e-03 -1.8204463e-01 -3.1862817e-01 6.6338180e-03 2.3605437e-01 -1.1462211e-01 2.4500506e-02 2.8456029e-02 -2.9797422e-01 -3.3950018e-02 -1.2516428e-03 1.4825183e-01 1.8698088e-02 -5.8022228e-01 1.5483719e-01 3.3852198e-02 3.3472753e-01 -2.8643712e-01 4.9406276e-03 -3.5724452e-01 4.8014008e-01 -7.4193854e-02 -9.9961230e-02 -1.3680957e-01 -6.1254455e-02 3.4335994e-01 -2.0314426e-02 2.6896556e-02 2.4071040e-01 -3.2141742e-01 -2.3372068e-02 4.3151115e-01 -2.1446882e-01 4.4555736e-02 2.8368321e-02 -2.8562953e-01 -3.3291675e-02 4.6329581e-01 2.3695485e-01 -2.6080052e-02 1.5393086e-01 4.2235496e-01 6.4930066e-02 -9.3592513e-02 -5.5155622e-02 -4.2769120e-03 1.9136227e-01 -3.2798478e-01 -3.6433849e-02 2.9244628e-01 -1.9737411e-01 4.0899857e-02 -5.2681297e-01 1.5693377e-01 -9.9879263e-02 1.7996797e-01 1.7023634e-01 4.3717927e-02 -4.4852722e-02 -3.9278485e-01 4.3735541e-03 -5.3142198e-01 1.5018565e-01 -3.0606475e-02 6.2897686e-01 -6.5030368e-02 -1.5928077e-02 -1.7642270e-01 2.3740325e-01 4.2696079e-02 3.1327083e-01 -3.4712442e-01 -5.5062126e-02 4.2492805e-01 1.2897226e-01 -4.2893497e-02 -6.6451195e-03 7.5428821e-02 6.7199668e-03 -1.4448584e-02 -4.0953483e-01 -3.5188575e-02 -2.3833647e-01 6.9371005e-02 1.9061128e-03 -3.7841067e-01 -2.1437781e-02 -1.3969363e-02 1.8951931e-01 1.4967617e-01 3.1772867e-02 2.2886176e-01 5.9056424e-02 3.9816810e-02 3.1308163e-01 -4.9613475e-02 5.3156825e-02 4.8804563e-03 -2.0600609e-01 -2.3620048e-02 1.9612853e-01 -2.7449212e-01 5.4790141e-02 -8.6420581e-02 -1.5516551e-01 -5.6179730e-02 2.1534994e-01 1.8387286e-01 -3.0498937e-02 -3.9322697e-01 -1.6240114e-01 -9.0732257e-02 2.6458328e-01 2.0165713e-01 3.2128049e-02 -2.9225157e-01 -2.6261774e-01 3.1037720e-02 -1.1340862e-01 -3.3022224e-01 5.1115554e-02 -3.0864159e-01 2.0216188e-01 -5.1101728e-03 4.5454396e-02 -4.2665631e-02 -3.2086324e-02 -2.5871963e-01 5.8338388e-01 7.5890374e-02 -3.6129020e-01 -1.4926817e-01 -4.1202089e-03 5.3137040e-01 -2.0923346e-01 -1.4842031e-02 -3.8088878e-01 2.1995389e-01 -7.1118551e-02 3.9337565e-02 3.1457881e-01 1.3266701e-02 -4.5048934e-01 1.8134236e-01 3.5205856e-02 3.3525402e-01 -1.7158101e-01 2.7201294e-02 -3.4383915e-01 1.6798385e-01 3.4795011e-02 -6.1492213e-01 6.4917970e-02 3.7301421e-02 -2.8729188e-01 -1.3644566e-01 6.2648504e-02 1.1348495e-01 -4.5013713e-01 4.9179088e-03 -4.5446223e-01 2.4801628e-01 3.6392508e-02 2.2375965e-01 2.3900395e-01 -2.5694774e-03 -3.8335555e-01 3.2372958e-01 4.8339326e-03 4.6940204e-01 -2.9218661e-01 -3.3105051e-02 9.6946419e-02 2.2564091e-02 -5.6948019e-02 1.4977757e-01 2.5533399e-01 1.3184932e-02 -1.0772635e-01 -6.6019216e-02 6.5083195e-03 -1.5391960e-01 -3.3213066e-01 4.0684052e-02 -2.0116873e-02 -3.1100854e-01 -4.7940454e-02 -1.5512578e-01 3.8375176e-01 -9.3090381e-02 5.9850276e-02 2.7213206e-01 -1.6841204e-02 1.0954804e-01 -4.2848645e-01 2.4280241e-02 -2.6212417e-01 2.5683635e-02 -9.0477118e-02 1.7679246e-03 4.9726750e-02 1.2412776e-02 3.9304186e-02 4.0984005e-01 5.3721694e-02 2.2223222e-01 1.6332989e-02 -4.4434992e-02 -1.4036691e-01 4.4126604e-02 5.0006069e-02 2.1612413e-01 9.4512772e-02 -7.2649874e-02 -2.9216671e-03 1.8865187e-01 -8.7343376e-02 -1.3571895e-01 3.2366715e-01 3.6070425e-02 8.1931497e-02 1.1105941e-01 9.1091425e-03 1.9005971e-01 2.8715087e-01 2.2376733e-02 3.1752651e-01 -2.1973043e-01 -1.2427710e-02 1.5085476e-01 -1.4164665e-01 -1.2930556e-02 -4.5426865e-01 -2.2783481e-01 4.1146859e-02 3.1179185e-01 3.5426060e-01 7.4266488e-03 -1.8168379e-01 4.6046964e-01 -7.4263329e-02 -1.9488860e-02 1.2570961e-01 2.4508429e-03 -3.6094502e-01 4.0958521e-02 2.9766217e-02 1.2710349e-01 -4.8676495e-01 -4.6844239e-02 -1.2167972e-01 -2.8469870e-01 -2.1294684e-02 4.9398835e-01 -1.9370266e-02 -5.5668369e-02 2.8969791e-01 2.5275730e-01 7.6674389e-02 -5.3546560e-02 -4.4753037e-02 -1.6891952e-02 -2.1067092e-01 4.9215123e-02 3.2202732e-02 -3.3074502e-01 1.8707514e-01 -6.7028905e-02 5.2796451e-01 7.9460347e-02 3.3170054e-02 -4.3926638e-01 7.7215935e-02 5.7142585e-03 1.5618975e-01 1.5949787e-01 -2.4667166e-02 -4.8925041e-03 7.3046514e-02 -2.3965859e-02 -2.1483525e-01 4.0141518e-01 -5.4792739e-02 -2.0296970e-01 -2.9561182e-01 -5.9409924e-02 -2.4090041e-01 2.8520411e-01 -9.4588482e-02 7.5052659e-02 7.7079684e-02 2.5833408e-02 9.5179288e-02 -3.1763318e-01 -9.4885388e-03 1.8485577e-01 2.8099489e-01 -1.1212078e-02 -1.6980116e-01 -1.6093922e-01 -3.5947766e-02 2.8362723e-01 -2.4197088e-01 1.2439716e-02 -5.0427442e-01 -6.3131677e-02 3.6428605e-02 2.3504413e-01 2.5617814e-02 8.1399173e-02 -1.2870470e-01 -2.0062715e-02 7.2916993e-03 -1.2964345e-01 -1.2131159e-01 -1.7847097e-02 -9.3545656e-02 2.6898720e-01 -9.3729821e-03 -4.3333446e-01 -1.9599524e-01 9.7162273e-02 -5.0649688e-02 -1.2470560e-01 3.3358425e-02 1.5233307e-01 4.1326446e-01 3.8616889e-02 -1.4803548e-01 -2.8364245e-01 7.4461902e-02 -5.5185476e-02 -2.6748311e-01 7.8967157e-03 1.7112771e-01 -4.5189355e-01 -6.2769198e-02 3.3839774e-02 3.2334383e-01 -5.7457164e-02 6.6535454e-01 6.9056306e-02 -8.6220730e-02 2.4084456e-02 -1.6145824e-02 7.2943810e-03 -1.0310095e-02 2.2961897e-01 -3.7738266e-02 -3.2723471e-01 -3.0427270e-01 1.5453998e-02 3.5714424e-02 1.3412686e-01 1.3159844e-02 1.7394113e-02 4.1190880e-01 2.0932964e-02 3.0091001e-03 -4.1044439e-01 1.7223546e-02 2.7838996e-01 -9.1457912e-02 6.6277191e-03 -3.5713732e-01 2.4840814e-01 -9.5521158e-03 3.4792248e-01 3.0640012e-01 -6.6916456e-02 -1.4215330e-01 3.7912240e-01 -5.7912524e-02 1.1369972e-01 1.9649208e-01 2.5098644e-02 1.2749318e-01 3.4450014e-01 3.9284051e-02 -3.4303888e-01 2.0872891e-02 3.7805734e-02 1.0071789e-01 1.4185178e-01 -9.0936252e-03 -1.7546294e-01 -1.0487388e-03 -4.2220796e-02 -5.8200196e-02 -2.4260517e-01 -8.6779442e-02 1.1232232e-01 8.6951205e-02 5.1371604e-02 -4.7495475e-01 5.3657651e-02 1.0017648e-02 3.5579601e-01 1.2623445e-01 1.1799743e-03 7.3447873e-02 5.1128324e-02 7.3622070e-02 1.1797516e-01 -1.4880978e-01 -4.7316203e-02 3.4059701e-01 6.2472511e-02 -8.1941075e-02 -1.2447531e-01 2.1976376e-01 -4.2474383e-02 1.5868798e-02 3.8046552e-01 3.6515790e-02 1.9306381e-01 9.6345136e-02 2.1024671e-02 -4.3630711e-01 -2.6395630e-01 -7.7848561e-02 5.0256712e-01 9.4647783e-02 -1.2420422e-02 5.7691566e-02 1.7018321e-01 1.4938758e-02 -1.4368199e-01 -2.5619934e-01 5.2756058e-02 -1.6085001e-01 -3.1007077e-01 -1.3081626e-02 -2.0344626e-01 -4.1523794e-01 8.3111460e-03 3.0565134e-01 -3.4078891e-01 4.7334113e-02 1.6849275e-01 -4.4692555e-01 -3.1089721e-02 2.4101057e-01 2.2837940e-01 -4.4406538e-02 -1.9543693e-01 -9.5155283e-02 4.5836306e-02 1.9916274e-01 2.8287503e-01 -5.9450554e-02 1.0016731e-01 -1.5244814e-01 7.9264793e-03 -1.3275320e-01 2.5244621e-01 -3.3201823e-02 -9.7712478e-02 3.7070019e-01 -1.5598743e-02 -1.8207895e-01 -3.6346124e-01 -5.9406803e-02 9.7002022e-02 -1.9220777e-01 -4.4915195e-02 6.3984319e-01 -2.6431421e-02 1.6558609e-02 -2.4571278e-01 -2.7811457e-01 5.0616108e-02 -4.4073460e-01 -3.2621680e-03 4.0344811e-03 -4.2997180e-01 -6.9931060e-02 6.5317597e-02 2.0157945e-01 -2.7897031e-01 1.0588720e-01 -1.0345596e-01 -1.9621463e-01 8.9799561e-02 -8.9434751e-02 -2.2788210e-01 2.0814460e-02 -2.6090126e-02 -4.0173767e-01 3.6950389e-02 3.1098150e-01 -2.1532878e-01 1.1806391e-02 3.1179080e-02 1.4286509e-01 1.1782250e-02 -3.6771529e-02 1.8924505e-01 -6.1360034e-02 -1.8617156e-01 3.8424562e-01 6.0511734e-02 -3.1825741e-01 -1.4297749e-01 3.8130683e-02 6.0472192e-04 -4.5641517e-01 1.8465710e-02 4.2057497e-01 2.4212777e-03 3.7004623e-02 -3.5163050e-01 1.0423536e-01 -1.0996544e-02 2.8902643e-01 3.7668820e-01 -2.8881065e-02 -4.0933080e-01 -1.5242294e-01 8.9307609e-02 -1.7242523e-01 -2.2688256e-01 3.5154429e-02 -4.9487209e-01 2.2783304e-01 -3.7868883e-02 -3.2548707e-01 1.7786316e-01 6.8474783e-02 7.0388912e-04 -2.5710407e-01 -8.2473845e-02 -1.6708319e-01 1.1388608e-01 -8.0981449e-02 3.9876454e-02 2.1496351e-01 -2.6533784e-03 1.0428494e-01 2.0404417e-01 6.2153301e-02 -3.0268395e-01 -5.7330220e-03 4.4727976e-02 4.8274816e-02 4.0516857e-02 6.6697157e-03 -4.8757382e-01 -1.3030251e-01 -5.1797708e-02 -1.6258750e-02 3.8365792e-01 -1.4232426e-02 2.7266440e-01 1.2877149e-01 -6.7228052e-02 2.2430278e-01 -6.6289283e-02 -1.0760742e-02 -2.8275041e-02 -1.3535866e-02 -7.6441488e-03 3.3886822e-01 -5.1155512e-01 -3.6557022e-02 7.5854112e-05 -1.6753089e-01 -4.5709793e-02 4.1919376e-01 1.2007170e-01 3.6422089e-02 1.8617868e-01 -2.8852334e-02 7.9630817e-02 1.0835156e-01 -4.1976452e-01 1.1445008e-02 -5.7702574e-03 -5.3027497e-01 8.1412071e-03 8.9695932e-02 -4.3722396e-01 5.3468726e-02 2.7291967e-01 1.8674624e-01 3.0834768e-02 -5.1318144e-01 1.0488749e-01 -2.1939686e-02 5.4883892e-01 -4.4744606e-02 5.7050910e-02 2.0460962e-01 1.1674477e-01 9.2218062e-03 -4.7522306e-01 -2.1422324e-01 1.3320645e-02 -5.1387955e-01 -1.8166556e-01 5.4359142e-02 2.6588511e-01 6.1367894e-02 -9.0240193e-03 1.2068818e-01 3.5979500e-01 6.7963596e-03 -5.5098559e-01 -4.5462033e-02 3.0443624e-02 -2.3874604e-01 -2.0572601e-01 -3.2484271e-03 -2.9519506e-01 1.9105415e-01 2.2230920e-03 -2.5371374e-01 5.3396581e-01 8.5308155e-02 -4.0332226e-03 -1.4137818e-01 3.3409623e-02 -1.0579757e-02 -2.4753560e-01 9.8418884e-02 6.6956579e-02 -3.6597980e-01 -8.0716400e-02 -3.9392080e-02 1.7055653e-01 -8.8322668e-02 -3.0261780e-02 1.0608353e-01 -1.8786681e-02 2.5362521e-01 -2.5693022e-01 3.3415805e-02 -5.6241065e-01 8.7653562e-02 1.0275948e-01 5.5257569e-03 -1.0345620e-01 -9.6966524e-03 -1.6097909e-01 -4.0746136e-01 -5.8182324e-03 -1.7576174e-01 4.2206958e-01 7.9717165e-03 1.4962954e-01 -2.9825048e-01 -2.0650502e-02 -3.2993331e-01 -3.9487111e-01 -8.0647894e-02 4.0769548e-01 5.9108136e-02 -4.1609290e-02 -2.4599061e-01 5.5240253e-01 -4.3023240e-02 2.8835015e-01 -2.2771314e-02 1.7282869e-03 4.9977109e-01 1.7618653e-02 -4.6997284e-02 -2.2505270e-01 5.5660642e-01 1.8269661e-02 1.3976407e-01 -5.5991327e-01 5.2275477e-02 2.3160460e-01 3.3696169e-01 2.4036571e-02 2.9096177e-01 -3.4611272e-01 2.9725713e-02 -3.8349165e-01 -2.7834915e-01 1.4930540e-02 -9.3722456e-02 -1.8660204e-01 2.2446319e-02 1.3958463e-01 -1.9269629e-01 2.5277455e-02 3.5526675e-01 6.8850216e-02 -1.0505121e-02 4.6803228e-01 -1.7048284e-01 7.8575325e-03 1.4075095e-01 3.1441515e-01 -5.2690536e-02 -2.3212519e-01 3.0105369e-01 2.4863001e-02 3.4968215e-01 -1.7944784e-01 -5.2667711e-03 -2.0676629e-01 6.2444509e-01 3.4277430e-02 -5.4006580e-01 -8.3743614e-02 1.4071806e-02 -2.5798172e-02 -2.5491073e-01 5.9281395e-02 -2.3911210e-01 1.3783989e-01 6.0204380e-02 -8.6017979e-02 -3.0211347e-01 -3.3303887e-02 -3.6496077e-01 5.1174776e-01 -1.0962262e-02 -3.0015684e-01 -2.1979901e-01 -3.0662593e-02 4.3332079e-02 -2.8807706e-01 -6.6949114e-02 3.8552532e-01 -2.2123543e-02 -2.7004038e-02 1.4720169e-02 -3.3155075e-01 8.3027183e-02 -1.1348724e-01 -1.9311938e-01 -1.4206127e-02 4.9149948e-01 2.3966749e-01 -5.3421719e-02 6.9340501e-02 3.4457905e-01 -1.7974307e-02 -6.3137749e-02 -4.1314157e-02 5.2314429e-03 7.5688845e-02 -6.5179617e-02 1.5499239e-02 -1.7965027e-01 -3.2530073e-01 -1.4454706e-02 -1.8189007e-02 -1.0272721e-01 1.7165484e-02 4.8225677e-02 -3.7412214e-01 2.7106941e-02 -4.1791549e-01 -2.5854156e-01 -7.0098325e-02 -4.0153868e-02 3.6162405e-01 1.6965620e-02 2.9132522e-01 -2.9925058e-01 4.6616336e-03 3.0357262e-01 1.6256056e-01 -7.4624468e-02 -4.5977649e-02 1.8429015e-01 -3.2147439e-02 -3.7781630e-01 -3.0325996e-01 -1.1034709e-02 5.5334115e-01 2.2712003e-01 2.0079630e-02 -5.1082976e-02 -4.0861086e-01 -7.4901341e-02 7.7250917e-02 1.5182079e-01 -2.0245620e-02 -3.3380967e-01 7.0808215e-01 -5.5031552e-02 -1.9639706e-01 1.2081335e-01 3.5226106e-02 -1.6316300e-01 -2.4913872e-01 -1.9502698e-02 -2.7649083e-01 1.4723952e-01 -3.3066077e-02 5.8497818e-01 -6.1690279e-02 4.2524772e-02 -3.2020807e-01 1.8055733e-01 -1.4594466e-02 1.3504166e-01 2.1908396e-01 1.6775729e-02 6.6042975e-02 3.5745384e-02 -1.2140937e-02 -3.9287763e-01 1.1244007e-01 -6.1131883e-02 3.2926683e-01 6.8591035e-02 -6.3778015e-02 2.8479618e-02 1.9643748e-01 6.1182818e-02 -4.7963858e-01 -1.0262844e-01 -9.6117377e-03 6.9310697e-02 -2.2698436e-02 6.8634357e-02 -7.9458705e-02 -2.4471017e-01 4.1455690e-02 1.0037962e-01 1.5868771e-02 3.0307247e-02 2.1950752e-01 -5.1590510e-01 5.2600951e-02 -6.3169122e-01 8.2424836e-03 4.6521622e-02 -5.4089694e-01 -1.8169878e-02 -1.4008133e-02 1.0601252e-01 -4.7064168e-01 7.3280348e-03 4.0515179e-01 -6.1759903e-01 2.0480231e-02 1.7919172e-01 -5.7952024e-02 -1.5537019e-02 4.8002120e-01 2.6250667e-01 -6.7676359e-02 2.3605668e-02 1.0653677e-01 -2.7120606e-02 6.2196630e-01 -1.0851131e-01 -1.7882891e-02 -3.6284595e-01 2.4839435e-01 -5.8049235e-02 -6.5828025e-02 3.3979866e-01 3.4121129e-02 -4.6344700e-01 -9.2908863e-02 3.7290512e-02 6.4625432e-02 -1.8161528e-01 -2.9963524e-02 5.9686640e-01 -6.7008380e-02 4.6863956e-02 -2.2084943e-02 2.4829829e-01 3.1778759e-02 -3.0634409e-01 6.1044646e-02 4.0983271e-02 1.5781595e-01 -6.1238340e-02 -2.4801564e-02 5.5052421e-01 4.5379546e-02 1.3720738e-02 -2.3060822e-01 -1.2103871e-01 4.0568433e-02 6.4206401e-02 3.8014419e-01 4.9563267e-02 -4.8081090e-01 1.4960347e-01 7.3364567e-03 1.5361924e-01 -1.3445176e-01 -5.0500312e-03 -1.8058051e-01 3.6349324e-01 -4.6193136e-02 -1.5806888e-01 -1.7856981e-01 -4.0596410e-02 -1.8206357e-01 1.2257066e-01 2.8289932e-04 1.6200981e-01 -4.1676734e-02 -2.7962773e-03 -2.4591091e-01 4.2897109e-01 -1.2753023e-02 2.9292717e-01 -2.6667275e-01 -1.6141839e-02 -1.5052164e-01 -3.9067946e-01 -4.9632506e-02 -2.5997519e-01 3.3943867e-01 5.8800162e-03 4.1601094e-01 -2.7841379e-01 -2.2248062e-04 -2.9988752e-01 5.3440226e-01 -5.9930735e-02 -2.0362178e-01 3.8658255e-01 -5.5795663e-03 -2.4556059e-01 5.7141031e-01 2.8743507e-02 -4.2618927e-01 -1.0122013e-01 -3.5054108e-03 -3.4179191e-01 3.7606892e-01 -6.0285295e-02 2.3327840e-01 1.0443994e-01 -5.1787032e-02 2.3409804e-01 5.9688258e-02 -1.8820538e-02 1.9838273e-01 1.7756924e-01 -6.8056607e-02 1.8664300e-01 -9.3335979e-02 -3.7042619e-03 2.3620152e-01 3.3425236e-01 -1.6759821e-02 -1.2336510e-01 -4.9910554e-01 3.6316888e-02 -2.4288197e-01 3.3125357e-01 -7.7206381e-03 3.8450864e-01 -2.1893844e-01 5.1449612e-03 -3.2370349e-01 -2.8592239e-01 1.0262444e-02 -3.6679476e-01 -6.8408710e-02 -8.2511229e-02 3.4536984e-02 -1.4877842e-02 -7.6041010e-03 -7.4329080e-02 5.5725419e-01 4.1230281e-02 -3.5962913e-01 -1.1490370e-01 -8.5853658e-03 -3.4124365e-01 4.6545280e-01 5.5492122e-02 -1.7122873e-01 -5.5693345e-02 -2.5629251e-02 -6.2223075e-02 1.5811449e-01 -2.0401650e-02 -1.9150875e-01 3.0922041e-01 8.1993809e-03 2.7988199e-01 -8.3596855e-02 -2.2657087e-02 -2.2320609e-01 -4.9620572e-01 1.8358681e-02 -2.6997519e-01 1.2414635e-01 -3.8368842e-03 1.4838746e-01 -5.8841101e-01 -5.8233441e-02 -2.4781042e-01 3.7094188e-01 5.2469551e-02 -1.8925146e-01 1.6293667e-01 -1.3560198e-02 1.9131877e-01 3.4081942e-01 3.9704302e-02 2.5813202e-01 2.7332544e-01 3.4068811e-02 1.2103492e-01 -1.6976579e-01 2.0050708e-04 8.3102249e-02 -2.8221284e-01 3.1352412e-02 -1.5526398e-01 1.8153310e-01 -1.0438972e-02 -3.8894987e-01 2.6333357e-01 1.4596410e-02 -5.3464041e-01 -1.7507681e-01 2.1627035e-02 9.5907215e-02 1.7104861e-01 6.4375369e-02 -1.7417165e-01 -2.9509827e-01 -2.2053907e-02 -2.7009610e-01 4.7991541e-01 -3.6815893e-02 -2.4916750e-01 2.0016556e-01 6.5909049e-02 -4.3977608e-02 1.9430075e-02 -8.8043741e-03 -4.9991819e-02 -4.9691072e-01 -1.9033585e-02 1.8124816e-01 -2.0580826e-01 1.4618739e-02 4.6072078e-01 3.2378278e-01 7.1317964e-02 1.1042053e-01 -4.1635760e-02 -4.8929286e-02 1.4139641e-01 -9.3406544e-02 -1.0239599e-02 -1.0685636e-01 5.6860886e-01 5.3448898e-02 5.1610103e-02 1.7073600e-01 9.8215824e-02 -3.6007059e-02 -2.6523712e-01 -3.5316323e-02 6.6596888e-01 -7.1184613e-02 -9.8608208e-02 2.0089376e-01 2.0454389e-01 6.7254591e-03 3.5152257e-01 -5.8874572e-01 7.0606538e-02 -3.4759183e-01 -8.9313310e-02 2.4453006e-02 3.6434431e-01 -6.8074621e-01 4.4611709e-02 -3.4056019e-01 -7.7800434e-02 1.0973205e-02 -1.7403786e-01 1.9239081e-01 2.2675195e-02 -3.1262999e-01 8.9318626e-02 2.1884741e-02 -3.0017220e-01 1.5145592e-01 -4.6949956e-02 -2.3711630e-02 -2.7312207e-01 -1.2855289e-02 -3.9962514e-01 1.9371418e-01 4.2601432e-02 1.1039141e-01 1.1440868e-01 -5.4422694e-02 -2.3631967e-01 1.9198693e-02 -9.1273826e-02 -1.3683837e-01 5.5217268e-02 -4.1049049e-02 3.0316531e-01 2.0284377e-01 2.7408226e-02 -8.0300834e-02 -2.0464240e-01 2.9956055e-04 1.6611353e-01 1.1652452e-01 9.4833860e-02 -2.4033217e-02 -1.2303187e-01 -5.6933530e-02 8.3855349e-02 -1.2425760e-01 1.1710438e-02 -3.1433916e-01 9.9199725e-02 -1.3562303e-02 -3.2008395e-01 -4.2807707e-01 1.7111350e-02 2.8306790e-01 -2.8774725e-01 6.5880875e-02 -4.4187982e-02 1.5159902e-01 -3.1695889e-02 -2.0398399e-01 1.6675972e-01 -2.3726506e-03 1.2514497e-01 -3.9427029e-01 8.4689479e-03 -4.7691792e-01 3.0968298e-01 -5.0003951e-02 -2.1858782e-01 -4.2085868e-02 -3.4309933e-02 -1.4674184e-01 3.0802428e-01 3.1264920e-03 -5.3642882e-01 -7.7773610e-02 -1.7687202e-02 5.0349020e-01 -2.4007216e-02 -5.2888485e-02 -7.9816352e-02 -1.2016063e-01 1.2122268e-02 -3.8671528e-02 -4.1184045e-01 2.1559637e-02 -3.8041338e-01 -1.0461722e-01 1.2656141e-02 -2.8749261e-01 -1.7298621e-01 5.9928530e-02 2.4635426e-01 9.4599970e-02 -8.1650380e-04 -4.0723136e-01 3.8485763e-01 6.5280236e-02 -3.9085462e-01 -1.5112866e-01 3.1698339e-02 9.2804833e-02 2.2722717e-01 -1.2925650e-02 2.9097937e-01 2.4581750e-01 2.0710041e-02 -5.6828520e-01 9.3971987e-02 -5.6508339e-02 6.5975366e-02 2.2186257e-01 -2.2770263e-02 -4.1176006e-01 7.3492746e-02 5.3631147e-02 5.2791859e-02 -1.0873192e-01 2.5589479e-02 -6.2159261e-01 1.1332491e-01 -1.9165754e-02 -3.7507639e-01 -4.0545052e-01 6.4209733e-02 -3.2709099e-01 7.8601956e-02 7.4532266e-02 -4.4249594e-01 -2.0053156e-02 2.6816461e-02 1.7012799e-01 -2.9432509e-01 -1.4389327e-02 3.6764755e-01 -4.1237270e-02 1.9296246e-02 1.2522878e-02 -7.8225342e-03 -2.2320211e-02 3.8262445e-01 1.9289577e-01 -2.3301703e-02 -1.6633469e-01 -9.6799446e-02 -3.2404417e-02 -1.9894147e-01 -1.6677511e-01 4.7120987e-02 -4.0601942e-01 1.8539916e-01 1.5955147e-02 -3.2815484e-02 2.0335476e-02 -1.8887252e-03 -1.6747369e-02 4.0992611e-01 -1.5619193e-02 -1.6672119e-01 -2.6538173e-01 -3.2793420e-02 -8.3781004e-02 5.1360268e-01 5.5420744e-02 2.3903741e-01 -4.9197938e-01 1.5686273e-02 -4.8796043e-01 2.8257296e-01 2.1660009e-02 -2.0565933e-01 -3.7160047e-01 1.6521675e-02 -4.4405956e-01 3.1330569e-01 -6.6377618e-02 1.7728850e-01 -5.9363124e-02 3.3949885e-02 2.1814089e-02 1.0431604e-01 3.5395677e-02 2.1742866e-01 -4.7348332e-01 1.9436345e-02 6.6183484e-02 -2.3431453e-01 1.8750284e-02 6.6433634e-03 -4.5804337e-01 -1.9883137e-02 1.4427109e-01 -6.3056841e-02 -5.2790690e-02 1.7825685e-01 3.4107434e-01 2.8491449e-02 2.2981608e-01 4.8753631e-01 -6.1412101e-02 -2.5246579e-03 3.7798151e-01 -7.4813804e-02 1.2828120e-01 1.5368789e-01 8.6847590e-03 -5.0703842e-01 5.9206942e-02 1.9837992e-02 5.4830561e-03 -5.6339412e-02 -6.2433115e-02 1.3021414e-02 -1.4724917e-01 8.3647803e-03 -3.1221846e-03 -3.1216142e-01 1.4441958e-02 1.0255544e-01 -2.2278377e-01 -4.0045453e-02 7.6147202e-02 -1.8369180e-01 3.6638445e-02 -2.2065536e-01 2.3422279e-01 2.7503245e-02 1.8773805e-01 5.1528942e-01 -5.2333932e-02 1.9753488e-01 7.9459523e-02 -4.5697155e-02 -2.9150297e-02 1.7279610e-01 2.6734084e-03 -2.6038608e-01 4.6960516e-01 -2.7998954e-02 -5.6325142e-02 -2.2341779e-01 -5.3400225e-02 -1.7733417e-01 -8.0960410e-02 7.6361141e-02 -1.3904784e-01 -2.0356870e-01 -3.6630357e-04 -1.5019370e-01 1.3576081e-01 2.4464477e-03 -2.9157879e-01 4.8170586e-01 3.1489884e-02 1.6547685e-01 2.1982966e-01 -4.1024245e-02 -1.0854271e-01 -6.4199399e-02 6.5126997e-02 -2.0698421e-01 -2.7404925e-01 -4.7657718e-02 -1.3176021e-01 -3.3165487e-01 -6.2589123e-02 -1.1727400e-01 -7.7926526e-02 5.9713281e-02 -4.8113767e-01 -2.5513224e-02 4.7444437e-02 -4.2736456e-01 1.5538268e-01 8.5839195e-02 -2.3715409e-01 -1.6426979e-01 5.7845925e-02 -4.0018981e-01 -1.8063563e-02 -7.9362814e-02 1.6826891e-01 1.6392273e-01 9.2479305e-03 -5.4940763e-01 2.9643552e-01 1.1856886e-02 1.4995629e-01 1.7378118e-01 8.3754532e-02 -7.9722641e-02 -1.0898366e-01 -5.1742275e-03 -1.8308793e-01 2.8623723e-01 4.4494813e-02 -5.8848829e-01 -1.3720016e-01 3.6660356e-02 -2.7662718e-01 -1.8940160e-01 -7.2802815e-03 1.0509058e-01 -4.4649830e-01 2.4235968e-02 -4.5587123e-01 -2.0293427e-01 -1.0225112e-01 6.6183991e-02 2.5229219e-01 -5.6706142e-02 -2.5300456e-01 -2.8191389e-01 8.2744561e-03 4.1554624e-01 -3.3504200e-01 -4.2094051e-02 -1.1656261e-01 -8.2641887e-03 2.5695234e-02 -4.2638305e-01 3.7778138e-02 2.5165779e-02 3.4800424e-01 1.1148849e-01 1.9915189e-02 -1.1892779e-01 -3.3478156e-01 -1.5776393e-02 4.6188228e-01 -1.3639349e-01 -5.2626439e-02 -3.7596185e-02 1.6045295e-01 -3.0192009e-02 1.1058367e-01 4.4163212e-01 5.1317358e-02 7.9966768e-02 -2.7700768e-01 -2.2588856e-02 5.1002619e-01 2.3396755e-01 8.9806668e-02 9.4746481e-02 -2.8814648e-01 1.2764529e-02 2.1517740e-01 3.7796040e-02 3.7027002e-02 -1.5933771e-01 -2.1689278e-01 -7.9586452e-02 1.7481267e-02 3.3565741e-01 -4.6407761e-02 3.9710913e-01 1.9870219e-01 6.1764654e-02 1.1240835e-01 2.0638829e-01 1.1999409e-02 -1.1182082e-01 -1.0460890e-01 5.2229536e-02 -1.3088594e-01 3.9759413e-02 5.0342925e-02 -3.4369696e-01 1.3782148e-01 -2.9287743e-02 -3.4341216e-02 4.8023345e-02 -5.4271892e-02 -2.6550560e-01 1.7923779e-01 3.2367967e-02 -4.7340589e-02 -4.6149489e-01 1.4447711e-02 -2.5145302e-01 1.9108903e-01 4.7568569e-02 1.5231105e-01 3.5899066e-01 -5.0307355e-02 -1.0550288e-01 4.9036260e-01 -5.3102336e-02 1.0080274e-01 1.3136626e-01 -1.0202817e-02 -1.2972753e-01 2.3993169e-01 9.1866363e-02 -2.1567960e-02 -2.9032642e-01 1.2208751e-02 -1.4884770e-01 -6.6319811e-02 4.2905005e-02 2.7378379e-01 6.4600635e-02 -4.5900088e-02 5.3289819e-01 -3.7288490e-02 3.3057318e-03 -4.1846950e-01 -6.6139511e-02 5.5167235e-02 3.4819179e-02 -3.0666990e-01 -2.7279960e-02 -3.0751666e-01 -3.6164330e-01 -1.2088978e-02 -4.4873761e-01 -6.8146517e-02 -3.9548644e-02 1.7354584e-01 6.0243799e-02 3.0616389e-02 -2.3847995e-01 -3.3231568e-01 -4.4066552e-02 -1.5055501e-01 1.4091899e-02 2.5228558e-02 -2.8188406e-01 -1.5890354e-01 -1.7473089e-02 4.0558217e-01 1.8062585e-01 3.8954437e-03 4.3701834e-01 -3.5046388e-01 -7.7673894e-03 -4.0410291e-01 -1.3856661e-02 -4.0625133e-02 -1.8635152e-01 2.8090641e-01 -3.1974791e-02 2.4466836e-01 2.9095700e-01 4.3805429e-02 -4.0180575e-01 1.5474619e-02 -1.3821337e-02 2.2189854e-01 -7.4388547e-02 2.0273068e-03 7.9632302e-02 1.9629739e-01 7.2284399e-02 -4.0034530e-01 -7.4755356e-03 8.1086618e-02 1.5861315e-01 -7.8190288e-02 -1.5878729e-02 9.7029895e-02 -1.5232988e-01 -5.7168242e-02 5.7663174e-02 -1.2873683e-01 -5.3671008e-02 3.7706318e-03 -8.8572081e-02 3.3264089e-02 3.5258612e-01 -5.4997295e-01 -3.7124334e-02 -7.0998156e-02 2.1396088e-01 5.0224394e-02 1.9108610e-01 2.8483044e-01 -7.7174302e-02 3.7901678e-01 3.0224408e-02 -3.8829650e-03 6.3354570e-03 -1.4243916e-01 4.0061985e-02 3.2129000e-01 -3.9658370e-01 -2.5627121e-02 -2.2887392e-02 -4.2077763e-01 6.9830636e-02 6.1700805e-02 -9.5112428e-02 -4.7923119e-02 2.4835291e-02 1.2832457e-01 6.0211425e-02 -5.1684624e-01 4.2937383e-02 -4.1600769e-02 6.1622336e-01 -9.8449691e-02 -6.8165232e-03 -2.9711641e-01 4.3487864e-01 2.1432472e-02 3.8014552e-03 -4.7140798e-01 1.0930849e-01 -3.0221618e-02 -2.8917366e-01 3.4878194e-02 3.0271637e-01 1.6943199e-01 3.5929683e-02 -2.8046211e-01 -1.0129476e-02 -3.1102408e-02 -3.7401854e-01 3.5512614e-01 8.3775193e-03 4.6361477e-01 -2.3208435e-01 4.7092185e-02 -1.9770076e-01 1.3321282e-01 7.7310968e-02 -1.0185368e-02 -1.5369493e-01 6.6596241e-02 1.7689404e-01 -2.7193489e-01 4.1835088e-02 -1.3988145e-01 2.9694344e-01 -6.5352427e-02 1.6081735e-01 -5.2050665e-02 -3.1153841e-03 3.3461572e-01 1.8097691e-01 -4.4688358e-02 3.3529926e-01 6.8014030e-02 1.5782506e-02 1.7265946e-01 -5.6646787e-01 3.0418945e-02 4.0053065e-01 1.3815065e-01 -2.4628399e-02 5.0081092e-01 5.1953853e-02 -5.8045983e-02 -2.5817102e-01 1.6282255e-01 1.9032397e-02 -3.7768949e-01 2.5235820e-01 2.6310981e-02 6.9054015e-02 -4.5005582e-01 -2.9987827e-02 1.9800526e-01 1.1589768e-01 8.4431779e-02 1.5312235e-04 -2.0955520e-01 -1.1779553e-02 -4.5128274e-01 -2.5412521e-01 -1.1588795e-02 -3.4765509e-01 5.3566040e-01 1.6021357e-02 -1.9382695e-01 -1.0217746e-01 4.7633361e-02 -2.0392865e-01 -1.0857734e-01 -2.6815768e-02 -2.8739158e-01 -1.5819733e-01 3.5545729e-02 -3.7694449e-01 -4.6656671e-02 5.1471823e-02 2.4751074e-01 2.1214733e-01 -6.4967037e-02 -1.7001730e-01 4.4075792e-01 -1.6065172e-02 2.0723474e-01 3.1096486e-01 1.2585228e-02 2.1494562e-01 -3.2801839e-01 2.2723940e-02 1.9754967e-01 1.4635643e-01 3.2547319e-02 -1.1369191e-01 -2.2783855e-01 -3.4535412e-02 -3.4577532e-01 -4.6989711e-01 -2.9787904e-02 -3.7369219e-01 3.7974198e-01 -2.2572245e-03 1.0962907e-01 -5.8948801e-03 5.7504278e-02 1.0074785e-02 -4.3756316e-01 -9.4598933e-04 -7.5461316e-02 1.0373947e-01 2.6401116e-02 -1.8196190e-01 -2.2093345e-01 -7.1674075e-02 -2.0127631e-01 -2.3038597e-02 -3.4052485e-03 3.8413985e-01 -1.5757508e-01 -3.5144410e-02 -3.4970778e-01 3.6790790e-01 -7.7831074e-02 1.1979533e-01 2.1867174e-02 -3.0131816e-02 1.7844088e-01 -1.3798286e-01 4.4072100e-02 8.5293950e-02 3.5793241e-01 5.3761458e-03 1.4618357e-01 -2.3671356e-01 1.3686173e-02 1.2593254e-01 3.9614308e-01 -6.2205796e-02 -3.9433560e-02 -9.3688654e-02 3.5620330e-02 -3.9866589e-02 2.0849292e-01 -6.8570392e-03 2.8024389e-01 1.2905647e-01 9.2554370e-03 -4.3524866e-01 -1.1742400e-01 -8.1400421e-02 -5.4399344e-02 2.5515046e-01 2.2424907e-02 -2.3936407e-01 1.1729202e-01 1.5283101e-03 3.0572170e-01 1.7496146e-01 9.4246791e-02 2.5121976e-02 -2.8247024e-01 -3.3285342e-02 -3.5287358e-01 7.1767826e-02 1.5217922e-02 -2.5720242e-01 -5.7100928e-02 -1.8108192e-02 -7.5049992e-02 1.6014114e-01 8.5375745e-03 4.2630789e-01 -1.1765401e-01 -1.9456029e-02 -2.7208413e-02 3.6803097e-01 -3.9318584e-02 -3.5748977e-01 -2.3781845e-01 -4.7120257e-02 -2.1993443e-01 1.1361523e-01 -4.0202577e-02 -3.2391720e-01 -5.8343043e-02 -8.0167890e-02 -1.8022755e-01 -8.9563003e-02 -4.9866281e-02 -1.1279043e-01 4.9366822e-01 -6.2678434e-02 2.3481068e-01 -2.8943785e-02 3.3758510e-02 2.2969471e-01 3.4356529e-01 5.0147515e-02 -1.1818004e-01 2.3177301e-03 -4.5234781e-02 -1.8654189e-01 2.8009099e-01 -4.6892045e-02 3.4125747e-01 2.0449532e-01 5.5020545e-02 -3.7924112e-01 -1.3529578e-01 5.1875226e-02 3.6803049e-02 -3.0635651e-01 4.2092033e-02 -2.2680316e-02 3.1833270e-02 2.2329012e-02 -1.5118461e-01 6.8922901e-02 -2.5738278e-02 -9.3507002e-02 5.3971998e-01 7.5385428e-04 -2.6080092e-01 -1.5044930e-01 4.1557153e-02 3.3158721e-01 -2.1171141e-01 -2.8563072e-02 8.0809768e-02 -2.2893053e-03 -5.7697988e-02 -2.2623418e-01 5.2083962e-02 -3.4143762e-02 1.3839534e-01 1.9071297e-01 -5.7143242e-02 3.0220497e-01 1.8694100e-01 -4.1522306e-02 -2.1893373e-01 -3.3196186e-01 8.2771418e-03 -4.9017764e-01 3.1526856e-01 3.1084307e-02 -2.5611349e-01 -2.6059805e-01 -4.1902408e-02 1.4409400e-01 3.0923161e-01 -1.5630624e-03 4.9276574e-01 -1.0922166e-02 -3.4493839e-02 3.5143058e-01 -7.0009734e-02 4.1657746e-03 -2.4180945e-01 9.3743514e-02 -7.8282405e-02 2.4264336e-01 1.3657733e-01 2.5062011e-02 -6.1688250e-02 2.7051286e-01 -1.7367678e-02 -3.5523397e-01 6.9199549e-01 -1.1185377e-02 2.0664992e-01 3.2680607e-01 8.2022843e-02 9.5743692e-02 -3.1378139e-01 4.1468652e-02 2.4886430e-01 -5.7658474e-01 -1.2132151e-02 -2.1851055e-01 5.2002925e-01 1.4843876e-02 -3.9829309e-02 -1.0308575e-02 4.0035303e-02 -1.5134922e-01 9.5619491e-02 2.2561303e-02 4.0417990e-01 4.6959281e-02 1.2186626e-02 -2.7058487e-02 -1.4878888e-01 -1.3174128e-04 -4.1680917e-01 4.8337951e-01 -5.5984752e-02 4.2941202e-01 1.4802957e-01 -4.7373635e-02 3.7398369e-01 -1.6825013e-01 8.5326182e-02 -8.2398226e-02 -3.1822774e-02 2.3365637e-02 2.9771274e-01 -6.3319828e-01 1.2606535e-02 1.4372592e-01 3.6422871e-01 2.0943023e-02 2.0367571e-01 -1.9417794e-01 -1.5117552e-02 -1.6528127e-01 1.7504465e-01 -4.0393026e-02 6.0329545e-01 -1.6036722e-01 -2.6501945e-02 -4.2814416e-02 -1.0365904e-01 5.8832526e-02 -2.8835678e-01 -2.1841625e-01 -3.3430948e-02 2.6892394e-02 -2.1089609e-01 -6.1217130e-02 -1.5649241e-01 2.6365694e-01 8.0623657e-03 2.1795179e-01 5.7428512e-03 -7.9771824e-03 4.4013653e-01 7.5151841e-02 -5.2097016e-02 -2.6882147e-01 2.3921562e-01 -1.3429612e-02 -3.3402421e-01 -3.8129680e-01 1.2749506e-02 4.3211955e-01 -1.0864453e-01 7.9353416e-02 -1.8792550e-01 -1.8656853e-01 6.8699586e-02 -2.3326244e-01 -2.4947354e-01 -8.5017579e-02 1.4893376e-01 5.3938050e-02 2.2199611e-02 -4.3378628e-02 -2.4652536e-01 -6.8457695e-02 -1.6194680e-01 -7.4720323e-02 2.3514496e-02 -1.6839607e-02 -1.8425714e-03 -7.5562044e-02 -1.7257385e-01 9.5205077e-02 5.5737548e-03 2.7736681e-01 -8.3283125e-02 7.4324757e-03 6.8481905e-03 9.8996802e-02 mlpack-2.2.5/src/mlpack/tests/data/erdosrenyi-n100.csv000066400000000000000000000230631315013601400224520ustar00rootroot000000000000000, 1 0, 2 0, 3 0, 4 0, 8 0, 12 0, 14 0, 16 0, 18 0, 32 0, 33 0, 36 0, 39 0, 43 0, 44 0, 45 0, 50 0, 51 0, 53 0, 54 0, 55 0, 57 0, 58 0, 61 0, 66 0, 67 0, 68 0, 73 0, 74 0, 75 0, 80 0, 88 0, 91 0, 93 0, 94 0, 98 1, 16 1, 18 1, 19 1, 28 1, 29 1, 33 1, 36 1, 38 1, 44 1, 45 1, 52 1, 53 1, 57 1, 69 1, 70 1, 83 1, 84 1, 90 1, 93 1, 94 2, 3 2, 4 2, 7 2, 8 2, 9 2, 10 2, 13 2, 17 2, 18 2, 20 2, 22 2, 23 2, 26 2, 30 2, 31 2, 38 2, 46 2, 57 2, 58 2, 61 2, 62 2, 65 2, 66 2, 68 2, 80 2, 83 2, 90 2, 94 2, 95 3, 6 3, 11 3, 13 3, 15 3, 16 3, 17 3, 18 3, 28 3, 30 3, 31 3, 33 3, 37 3, 41 3, 42 3, 44 3, 45 3, 49 3, 51 3, 57 3, 59 3, 61 3, 64 3, 70 3, 73 3, 75 3, 77 3, 78 3, 81 3, 84 3, 86 3, 92 3, 96 4, 9 4, 20 4, 21 4, 27 4, 29 4, 30 4, 33 4, 36 4, 40 4, 41 4, 43 4, 45 4, 46 4, 47 4, 49 4, 50 4, 53 4, 62 4, 67 4, 72 4, 73 4, 74 4, 75 4, 77 4, 78 4, 83 4, 86 4, 89 4, 90 4, 91 5, 6 5, 9 5, 12 5, 16 5, 21 5, 27 5, 31 5, 37 5, 39 5, 42 5, 43 5, 46 5, 48 5, 50 5, 51 5, 55 5, 58 5, 61 5, 62 5, 66 5, 72 5, 74 5, 75 5, 76 5, 84 5, 86 5, 88 5, 92 6, 7 6, 13 6, 16 6, 18 6, 19 6, 21 6, 23 6, 26 6, 29 6, 30 6, 32 6, 35 6, 37 6, 42 6, 45 6, 46 6, 54 6, 60 6, 63 6, 64 6, 68 6, 71 6, 74 6, 76 6, 79 6, 82 6, 84 6, 89 6, 92 6, 93 6, 96 7, 9 7, 10 7, 11 7, 12 7, 14 7, 15 7, 21 7, 36 7, 47 7, 53 7, 57 7, 61 7, 71 7, 72 7, 89 7, 90 7, 92 8, 10 8, 11 8, 12 8, 13 8, 15 8, 19 8, 21 8, 22 8, 23 8, 26 8, 29 8, 35 8, 40 8, 41 8, 44 8, 46 8, 48 8, 54 8, 56 8, 57 8, 63 8, 64 8, 65 8, 66 8, 67 8, 68 8, 72 8, 73 8, 77 8, 87 8, 89 8, 91 8, 92 8, 95 8, 96 8, 99 9, 11 9, 15 9, 19 9, 20 9, 22 9, 31 9, 32 9, 33 9, 38 9, 39 9, 44 9, 47 9, 51 9, 52 9, 57 9, 62 9, 64 9, 66 9, 67 9, 68 9, 70 9, 81 9, 83 9, 86 9, 91 9, 95 10, 12 10, 14 10, 20 10, 30 10, 35 10, 40 10, 50 10, 53 10, 59 10, 65 10, 70 10, 80 10, 83 10, 90 10, 93 10, 95 10, 96 11, 12 11, 14 11, 21 11, 24 11, 25 11, 30 11, 38 11, 40 11, 43 11, 45 11, 54 11, 59 11, 69 11, 75 11, 78 11, 79 11, 86 11, 87 11, 89 11, 90 11, 91 11, 98 12, 14 12, 18 12, 23 12, 27 12, 30 12, 32 12, 33 12, 37 12, 39 12, 41 12, 46 12, 51 12, 52 12, 55 12, 56 12, 57 12, 59 12, 62 12, 65 12, 73 12, 76 12, 80 12, 84 12, 86 12, 89 12, 93 13, 17 13, 18 13, 21 13, 22 13, 23 13, 27 13, 31 13, 32 13, 40 13, 43 13, 44 13, 52 13, 60 13, 61 13, 72 13, 73 13, 75 13, 76 13, 80 13, 81 13, 85 13, 86 13, 88 13, 92 13, 96 14, 17 14, 30 14, 32 14, 35 14, 37 14, 40 14, 42 14, 45 14, 46 14, 48 14, 49 14, 52 14, 53 14, 54 14, 59 14, 61 14, 63 14, 69 14, 73 14, 74 14, 77 14, 78 14, 80 14, 81 14, 83 14, 84 14, 86 14, 88 14, 89 14, 91 14, 92 14, 96 15, 16 15, 17 15, 20 15, 21 15, 22 15, 27 15, 30 15, 34 15, 45 15, 51 15, 53 15, 54 15, 59 15, 67 15, 69 15, 71 15, 76 15, 78 15, 81 15, 84 15, 88 15, 91 15, 92 15, 93 15, 94 15, 99 16, 18 16, 28 16, 30 16, 31 16, 35 16, 38 16, 39 16, 42 16, 44 16, 45 16, 50 16, 65 16, 66 16, 69 16, 70 16, 72 16, 74 16, 80 16, 84 16, 88 16, 91 16, 92 16, 93 16, 95 16, 97 17, 22 17, 23 17, 27 17, 39 17, 40 17, 41 17, 49 17, 52 17, 54 17, 57 17, 59 17, 61 17, 63 17, 69 17, 71 17, 74 17, 78 17, 81 17, 84 17, 88 17, 93 17, 94 17, 95 17, 96 17, 97 17, 99 18, 26 18, 27 18, 28 18, 30 18, 31 18, 37 18, 42 18, 43 18, 44 18, 48 18, 51 18, 56 18, 57 18, 66 18, 69 18, 70 18, 71 18, 85 18, 86 18, 95 18, 96 18, 97 19, 21 19, 24 19, 27 19, 28 19, 30 19, 37 19, 39 19, 40 19, 41 19, 44 19, 50 19, 54 19, 58 19, 59 19, 62 19, 75 19, 81 19, 82 19, 83 19, 89 19, 96 19, 98 20, 22 20, 24 20, 28 20, 30 20, 41 20, 42 20, 49 20, 51 20, 54 20, 61 20, 62 20, 69 20, 71 20, 73 20, 74 20, 75 20, 77 20, 79 20, 82 20, 85 20, 86 20, 89 20, 91 20, 98 21, 23 21, 30 21, 32 21, 36 21, 42 21, 43 21, 46 21, 48 21, 50 21, 55 21, 57 21, 64 21, 72 21, 73 21, 74 21, 75 21, 80 21, 81 21, 82 21, 88 21, 94 21, 95 21, 99 22, 23 22, 24 22, 26 22, 27 22, 32 22, 33 22, 34 22, 36 22, 40 22, 43 22, 50 22, 54 22, 63 22, 66 22, 69 22, 70 22, 73 22, 81 22, 82 22, 83 22, 88 22, 89 22, 90 22, 91 22, 97 22, 99 23, 26 23, 28 23, 30 23, 36 23, 37 23, 39 23, 42 23, 47 23, 48 23, 52 23, 53 23, 54 23, 59 23, 61 23, 63 23, 64 23, 71 23, 77 23, 78 23, 81 23, 83 23, 89 23, 90 23, 92 23, 95 23, 97 23, 98 23, 99 24, 29 24, 30 24, 31 24, 35 24, 40 24, 42 24, 43 24, 44 24, 45 24, 50 24, 53 24, 54 24, 57 24, 59 24, 64 24, 68 24, 70 24, 79 24, 81 24, 87 24, 88 24, 90 24, 92 24, 96 24, 97 25, 27 25, 31 25, 36 25, 41 25, 44 25, 48 25, 49 25, 51 25, 57 25, 58 25, 60 25, 66 25, 70 25, 78 25, 80 25, 81 25, 88 25, 93 25, 99 26, 36 26, 37 26, 39 26, 40 26, 44 26, 47 26, 50 26, 51 26, 53 26, 55 26, 61 26, 66 26, 69 26, 73 26, 74 26, 76 26, 77 26, 78 26, 81 26, 84 26, 85 26, 86 26, 90 26, 92 26, 97 27, 30 27, 31 27, 32 27, 55 27, 60 27, 61 27, 63 27, 65 27, 68 27, 70 27, 71 27, 75 27, 77 27, 80 27, 83 27, 87 27, 91 27, 93 27, 96 27, 98 28, 31 28, 38 28, 42 28, 43 28, 46 28, 50 28, 61 28, 65 28, 70 28, 71 28, 72 28, 73 28, 76 28, 77 28, 78 28, 82 28, 85 28, 87 28, 90 28, 91 28, 98 29, 30 29, 34 29, 37 29, 38 29, 46 29, 49 29, 51 29, 52 29, 53 29, 56 29, 60 29, 62 29, 68 29, 69 29, 70 29, 79 29, 81 29, 82 29, 87 29, 98 30, 32 30, 39 30, 45 30, 53 30, 57 30, 60 30, 66 30, 73 30, 82 30, 83 31, 39 31, 42 31, 43 31, 46 31, 47 31, 52 31, 53 31, 54 31, 55 31, 56 31, 61 31, 62 31, 73 31, 77 31, 79 31, 80 31, 85 31, 86 31, 87 31, 94 32, 34 32, 36 32, 40 32, 41 32, 43 32, 44 32, 46 32, 50 32, 55 32, 58 32, 62 32, 67 32, 69 32, 70 32, 71 32, 72 32, 74 32, 77 32, 79 32, 85 32, 90 33, 34 33, 36 33, 38 33, 41 33, 44 33, 48 33, 50 33, 55 33, 60 33, 66 33, 69 33, 73 33, 74 33, 77 33, 79 33, 84 34, 36 34, 38 34, 39 34, 41 34, 43 34, 45 34, 47 34, 48 34, 50 34, 55 34, 56 34, 62 34, 69 34, 71 34, 72 34, 75 34, 77 34, 84 34, 86 34, 87 34, 92 34, 97 35, 42 35, 44 35, 45 35, 46 35, 47 35, 61 35, 63 35, 70 35, 72 35, 75 35, 79 35, 84 35, 86 35, 89 35, 93 35, 96 35, 97 35, 99 36, 37 36, 40 36, 48 36, 49 36, 50 36, 51 36, 56 36, 63 36, 64 36, 65 36, 69 36, 71 36, 75 36, 77 36, 85 36, 88 36, 90 36, 95 36, 96 36, 97 36, 99 37, 47 37, 52 37, 57 37, 61 37, 64 37, 65 37, 67 37, 68 37, 69 37, 71 37, 73 37, 75 37, 77 37, 78 37, 82 37, 83 37, 84 37, 94 38, 40 38, 55 38, 58 38, 61 38, 62 38, 68 38, 70 38, 75 38, 77 38, 78 38, 86 38, 90 38, 92 38, 94 38, 95 38, 99 39, 40 39, 41 39, 42 39, 47 39, 50 39, 57 39, 67 39, 68 39, 75 39, 77 39, 80 39, 81 39, 82 39, 83 39, 89 39, 95 39, 96 39, 99 40, 48 40, 50 40, 51 40, 55 40, 56 40, 57 40, 60 40, 67 40, 69 40, 70 40, 72 40, 74 40, 75 40, 77 40, 78 40, 81 40, 85 40, 87 40, 90 40, 92 40, 94 41, 42 41, 45 41, 46 41, 49 41, 50 41, 55 41, 63 41, 73 41, 77 41, 83 41, 84 41, 90 41, 91 41, 92 41, 96 42, 43 42, 49 42, 50 42, 56 42, 61 42, 67 42, 74 42, 79 42, 80 42, 81 42, 83 42, 85 42, 87 42, 90 42, 92 42, 98 43, 44 43, 53 43, 55 43, 56 43, 60 43, 64 43, 65 43, 69 43, 77 43, 78 43, 81 43, 83 43, 84 43, 87 43, 96 44, 45 44, 48 44, 56 44, 60 44, 69 44, 71 44, 75 44, 76 44, 77 44, 81 44, 83 44, 91 44, 93 44, 94 44, 96 45, 50 45, 51 45, 55 45, 58 45, 59 45, 60 45, 66 45, 68 45, 70 45, 72 45, 78 45, 83 45, 84 45, 85 45, 86 45, 87 45, 90 45, 95 45, 96 45, 98 46, 59 46, 65 46, 68 46, 71 46, 76 46, 91 46, 94 47, 49 47, 51 47, 54 47, 56 47, 58 47, 61 47, 62 47, 66 47, 69 47, 71 47, 75 47, 78 47, 79 47, 83 47, 90 47, 92 47, 93 47, 94 47, 95 47, 96 47, 98 48, 50 48, 52 48, 54 48, 58 48, 59 48, 60 48, 66 48, 67 48, 68 48, 70 48, 72 48, 75 48, 78 48, 82 48, 84 48, 86 48, 87 48, 88 48, 89 48, 90 48, 97 49, 55 49, 56 49, 60 49, 66 49, 68 49, 69 49, 74 49, 79 49, 83 50, 53 50, 55 50, 62 50, 65 50, 73 50, 74 50, 75 50, 79 50, 80 50, 84 50, 97 51, 54 51, 57 51, 61 51, 66 51, 71 51, 72 51, 74 51, 80 51, 85 51, 96 51, 99 52, 53 52, 54 52, 56 52, 61 52, 67 52, 76 52, 79 52, 81 52, 88 52, 90 52, 93 52, 97 52, 98 53, 55 53, 56 53, 57 53, 58 53, 60 53, 72 53, 73 53, 74 53, 79 53, 89 53, 94 53, 95 53, 96 54, 60 54, 63 54, 64 54, 71 54, 78 54, 85 54, 90 54, 92 54, 95 54, 96 55, 57 55, 61 55, 64 55, 66 55, 67 55, 70 55, 71 55, 72 55, 74 55, 76 55, 81 55, 83 55, 93 55, 95 56, 57 56, 61 56, 62 56, 64 56, 70 56, 82 56, 85 56, 86 56, 87 56, 91 56, 92 56, 97 56, 99 57, 59 57, 60 57, 64 57, 65 57, 67 57, 69 57, 70 57, 73 57, 76 57, 78 57, 83 57, 93 58, 59 58, 63 58, 72 58, 77 58, 78 58, 82 58, 83 58, 90 58, 98 58, 99 59, 69 59, 76 59, 78 59, 84 59, 98 59, 99 60, 68 60, 69 60, 78 60, 82 60, 86 60, 90 60, 91 60, 92 60, 93 61, 63 61, 64 61, 65 61, 66 61, 68 61, 70 61, 71 61, 72 61, 80 61, 81 61, 88 61, 90 61, 94 61, 99 62, 64 62, 65 62, 74 62, 77 62, 81 62, 86 62, 88 62, 91 62, 94 62, 97 62, 98 63, 64 63, 68 63, 70 63, 74 63, 82 63, 83 63, 91 63, 93 63, 94 63, 95 63, 98 63, 99 64, 69 64, 70 64, 72 64, 76 64, 80 64, 82 64, 83 64, 84 64, 85 64, 86 64, 91 64, 92 65, 69 65, 73 65, 78 65, 79 65, 81 65, 85 65, 95 66, 72 66, 77 66, 81 66, 93 66, 96 66, 97 67, 70 67, 73 67, 77 67, 78 67, 79 67, 82 67, 84 67, 85 67, 86 67, 93 67, 94 67, 95 67, 96 68, 69 68, 74 68, 76 68, 80 68, 89 68, 94 68, 95 68, 98 69, 75 69, 84 69, 91 69, 94 69, 96 69, 98 70, 79 70, 81 70, 83 70, 88 70, 91 70, 92 70, 93 70, 95 70, 97 70, 99 71, 73 71, 74 71, 76 71, 79 71, 80 71, 86 71, 89 71, 91 71, 98 71, 99 72, 77 72, 78 72, 82 72, 90 72, 93 73, 79 73, 82 73, 83 73, 87 73, 95 73, 96 73, 99 74, 75 74, 76 74, 91 74, 93 74, 97 74, 98 75, 76 75, 77 75, 80 75, 87 75, 92 75, 93 75, 95 76, 79 76, 80 76, 81 76, 86 77, 81 77, 82 77, 88 77, 90 77, 91 77, 97 78, 80 78, 84 78, 87 78, 90 78, 92 78, 93 78, 98 78, 99 79, 80 79, 85 79, 88 79, 92 79, 93 79, 94 79, 98 79, 99 80, 82 80, 83 80, 84 80, 87 80, 89 80, 91 80, 92 80, 94 81, 82 81, 93 81, 95 81, 97 82, 84 82, 87 82, 93 82, 98 83, 84 83, 97 83, 98 84, 86 84, 91 84, 92 84, 93 84, 96 85, 86 85, 88 85, 89 85, 92 85, 94 85, 99 86, 93 86, 95 86, 97 87, 89 87, 91 87, 95 87, 96 88, 92 88, 94 88, 96 88, 97 88, 99 89, 91 89, 96 89, 97 89, 99 90, 94 91, 93 92, 96 92, 97 92, 98 93, 96 94, 95 94, 97 94, 98 95, 96 95, 98 96, 97 98, 99 mlpack-2.2.5/src/mlpack/tests/data/fake.csv000066400000000000000000000045741315013601400205270ustar00rootroot00000000000000-0.28992,-0.13173,-0.61575 -1.2142,-1.1994,-0.4661 0.031128,0.084721,-0.057328 -0.86099,-0.79692,-0.35194 -0.45353,-0.11696,-0.15629 0.82319,0.31796,0.5654 0.87603,0.62511,0.54203 -1.8458,-1.2202,-1.6845 1.3571,0.86032,0.95889 -0.88157,-0.092799,-0.21391 -1.7254,-0.80534,-1.2231 -3.7444,-2.7681,-2.8587 -1.9593,-1.9499,-1.6215 0.036513,-0.65684,-0.16414 -1.0091,-0.9538,-0.82084 -2.6299,-2.4503,-2.0805 1.4404,0.91235,1.0368 2.3232,1.7905,1.3835 -0.22953,0.29435,-0.55121 0.52442,0.53273,0.29293 1.9345,1.1211,1.5277 -0.71909,-0.91217,-0.13798 -1.0023,-0.31518,-0.83793 0.88445,0.55393,0.63455 -0.14286,0.75819,0.10212 -0.57519,-0.41389,-0.16138 -0.54536,-0.54785,0.19468 0.18259,-0.038652,-0.11602 1.5583,1.5063,1.1687 0.67228,0.25056,0.28417 1.8047,1.3339,1.8819 1.6772,1.8611,1.5816 0.30442,0.39109,0.31353 -0.66552,-0.38255,-0.36366 -0.99614,-0.31771,0.10233 0.65813,0.72853,0.66725 1.3433,0.87076,0.9503 0.5952,0.54358,0.15799 0.80337,0.48182,0.92845 -1.0895,-0.77068,-0.43275 -1.2246,-1.0886,-0.81559 -3.2332,-2.2593,-1.8321 2.36,1.5532,1.98 -1.417,-0.72475,-1.6775 0.48999,0.27822,0.63959 1.7494,1.1849,1.2174 0.098846,0.48884,0.58291 -2.2389,-1.239,-1.5603 -2.5684,-2.0838,-2.044 1.833,1.9878,1.488 -1.3408,-0.41722,-0.78372 0.41603,-0.03137,-0.19393 1.3308,0.97246,0.50187 1.8763,1.4224,1.5111 0.47414,0.35165,0.45279 0.72983,0.62162,0.81429 0.072535,-0.15613,0.19178 0.24045,0.31484,0.73666 -0.63399,-0.44661,0.16174 -0.7575,-0.29941,-0.23175 -0.22151,0.13724,-0.053466 -0.61552,-0.30699,-0.18651 0.13717,0.14514,-0.13645 0.79648,1.0154,0.66609 0.02903,0.11115,-0.027887 1.2644,0.91254,0.91018 -0.36039,-0.27273,-0.093994 1.8088,1.6939,1.1004 -1.4649,-1.1623,-1.3663 0.16635,0.21012,-0.5929 -1.9238,-0.92677,-1.2546 -1.1433,-1.2331,-0.83555 -1.3747,-1.2459,-0.9011 0.66298,0.6126,0.17198 -0.4638,-0.12425,-0.29331 -1.1176,-0.7349,-0.67311 0.13308,0.25935,0.25244 -0.068732,-0.5169,-0.16423 -0.56212,-0.51442,-0.3723 -1.7361,-1.4933,-1.4681 0.98691,0.45932,0.37285 0.21825,-0.36061,-0.26815 0.90569,0.70056,0.68695 1.9013,1.2122,1.4488 -1.7678,-1.0156,-1.173 0.38102,-0.26481,0.1876 -1.7408,-1.4657,-1.3288 -0.40932,-0.036133,0.12221 0.15014,0.23639,0.54873 -1.9972,-1.75,-1.8577 -1.8673,-1.0512,-1.2368 0.33336,1.028,0.67552 0.12516,0.33989,0.54363 1.2947,1.1484,1.3026 -2.6568,-1.9047,-1.6224 0.80258,1.1538,0.43664 0.68373,0.22631,-0.13506 2.0724,1.9613,1.4781 -0.057605,0.069219,0.048158 0.25527,0.50668,-0.053834 mlpack-2.2.5/src/mlpack/tests/data/german.csv000066400000000000000000001431511315013601400210650ustar00rootroot000000000000001,6,4,12,5,5,3,4,1,67,3,2,1,2,1,0,0,1,0,0,1,0,0,1 2,48,2,60,1,3,2,2,1,22,3,1,1,1,1,0,0,1,0,0,1,0,0,1 4,12,4,21,1,4,3,3,1,49,3,1,2,1,1,0,0,1,0,0,1,0,1,0 1,42,2,79,1,4,3,4,2,45,3,1,2,1,1,0,0,0,0,0,0,0,0,1 1,24,3,49,1,3,3,4,4,53,3,2,2,1,1,1,0,1,0,0,0,0,0,1 4,36,2,91,5,3,3,4,4,35,3,1,2,2,1,0,0,1,0,0,0,0,1,0 4,24,2,28,3,5,3,4,2,53,3,1,1,1,1,0,0,1,0,0,1,0,0,1 2,36,2,69,1,3,3,2,3,35,3,1,1,2,1,0,1,1,0,1,0,0,0,0 4,12,2,31,4,4,1,4,1,61,3,1,1,1,1,0,0,1,0,0,1,0,1,0 2,30,4,52,1,1,4,2,3,28,3,2,1,1,1,1,0,1,0,0,1,0,0,0 2,12,2,13,1,2,2,1,3,25,3,1,1,1,1,1,0,1,0,1,0,0,0,1 1,48,2,43,1,2,2,4,2,24,3,1,1,1,1,0,0,1,0,1,0,0,0,1 2,12,2,16,1,3,2,1,3,22,3,1,1,2,1,0,0,1,0,0,1,0,0,1 1,24,4,12,1,5,3,4,3,60,3,2,1,1,1,1,0,1,0,0,1,0,1,0 1,15,2,14,1,3,2,4,3,28,3,1,1,1,1,1,0,1,0,1,0,0,0,1 1,24,2,13,2,3,2,2,3,32,3,1,1,1,1,0,0,1,0,0,1,0,1,0 4,24,4,24,5,5,3,4,2,53,3,2,1,1,1,0,0,1,0,0,1,0,0,1 1,30,0,81,5,2,3,3,3,25,1,3,1,1,1,0,0,1,0,0,1,0,0,1 2,24,2,126,1,5,2,2,4,44,3,1,1,2,1,0,1,1,0,0,0,0,0,0 4,24,2,34,3,5,3,2,3,31,3,1,2,2,1,0,0,1,0,0,1,0,0,1 4,9,4,21,1,3,3,4,3,48,3,3,1,2,1,1,0,1,0,0,1,0,0,1 1,6,2,26,3,3,3,3,1,44,3,1,2,1,1,0,0,1,0,1,0,0,0,1 1,10,4,22,1,2,3,3,1,48,3,2,2,1,2,1,0,1,0,1,0,0,1,0 2,12,4,18,2,2,3,4,2,44,3,1,1,1,1,0,1,1,0,0,1,0,0,1 4,10,4,21,5,3,4,1,3,26,3,2,1,1,2,0,0,1,0,0,1,0,0,1 1,6,2,14,1,3,3,2,1,36,1,1,1,2,1,0,0,1,0,0,1,0,1,0 4,6,0,4,1,5,4,4,3,39,3,1,1,1,1,0,0,1,0,0,1,0,1,0 3,12,1,4,4,3,2,3,1,42,3,2,1,1,1,0,0,1,0,1,0,0,0,1 2,7,2,24,1,3,3,2,1,34,3,1,1,1,1,0,0,0,0,0,1,0,0,1 1,60,3,68,1,5,3,4,4,63,3,2,1,2,1,0,0,1,0,0,1,0,0,1 2,18,2,19,4,2,4,3,1,36,1,1,1,2,1,0,0,1,0,0,1,0,0,1 1,24,2,40,1,3,3,2,3,27,2,1,1,1,1,0,0,1,0,0,1,0,0,1 2,18,2,59,2,3,3,2,3,30,3,2,1,2,1,1,0,1,0,0,1,0,0,1 4,12,4,13,5,5,3,4,4,57,3,1,1,1,1,0,0,1,0,1,0,0,1,0 3,12,2,15,1,2,2,1,2,33,1,1,1,2,1,0,0,1,0,0,1,0,0,0 2,45,4,47,1,2,3,2,2,25,3,2,1,1,1,0,0,1,0,0,1,0,1,0 4,48,4,61,1,3,3,3,4,31,1,1,1,2,1,0,0,1,0,0,0,0,0,1 3,18,2,21,1,3,3,2,1,37,2,1,1,1,1,0,0,0,1,0,1,0,0,1 3,10,2,12,1,3,3,2,3,37,3,1,1,2,1,0,0,1,0,0,1,0,0,1 2,9,2,5,1,3,3,3,1,24,3,1,1,1,1,0,0,1,0,0,1,0,0,1 4,30,2,23,3,5,3,2,3,30,1,1,1,1,1,0,0,1,0,0,1,0,0,0 2,12,2,12,3,3,1,1,3,26,3,1,1,2,1,0,0,1,0,0,1,0,0,1 2,18,3,62,1,3,3,4,1,44,3,1,2,2,1,0,0,1,0,0,1,0,1,0 1,30,4,62,2,4,4,4,3,24,3,2,1,1,1,0,1,1,0,1,0,0,0,1 1,48,4,61,1,5,2,4,4,58,2,2,1,1,1,0,1,1,0,0,0,0,1,0 4,11,4,14,1,2,2,4,3,35,3,2,1,1,1,1,0,1,0,0,1,0,0,0 4,36,2,23,3,5,3,4,3,39,3,1,1,1,1,0,0,1,0,0,1,0,0,1 1,6,2,14,3,1,2,2,2,23,3,1,1,2,1,0,1,1,0,1,0,1,0,0 4,11,4,72,1,3,3,4,2,39,3,2,1,1,1,1,0,1,0,0,1,0,1,0 4,12,2,21,2,3,2,2,1,28,3,1,1,1,1,0,0,0,1,0,1,0,0,1 2,24,3,23,5,2,3,2,2,29,1,1,1,1,1,0,0,1,0,0,1,0,1,0 2,27,3,60,1,5,3,2,3,30,3,2,1,2,1,0,1,1,0,0,1,0,0,0 4,12,2,13,1,3,3,2,3,25,3,1,1,1,1,0,0,1,0,0,1,0,0,1 4,18,2,34,5,3,3,1,2,31,3,1,1,2,1,0,1,1,0,0,1,0,0,1 2,36,3,22,1,5,3,4,4,57,1,2,1,2,1,1,0,1,0,0,0,0,0,1 4,6,1,8,5,3,3,2,1,26,2,1,2,1,1,1,0,0,0,0,1,0,1,0 2,12,2,65,5,1,3,1,4,52,3,1,1,2,1,0,0,1,0,0,1,0,0,0 4,36,4,96,1,3,2,2,3,31,2,2,1,1,1,0,0,1,0,0,1,0,0,1 3,18,2,20,1,5,2,2,3,23,3,1,1,1,1,1,0,1,0,0,1,0,0,0 1,36,4,62,1,2,2,4,4,23,3,2,1,2,1,0,0,0,1,1,0,0,1,0 2,9,2,14,1,3,4,1,1,27,1,1,1,2,1,0,0,1,0,0,1,0,0,1 2,15,4,15,5,5,3,4,1,50,3,2,1,2,1,0,0,0,0,0,1,0,0,1 2,36,0,20,1,5,3,4,4,61,3,1,1,2,1,0,0,1,0,0,0,0,0,0 2,48,0,144,1,3,3,2,3,25,3,1,1,2,1,0,0,1,0,0,1,0,0,1 4,24,2,32,1,2,2,4,2,26,3,1,1,2,1,0,0,1,0,0,1,0,0,1 4,27,2,52,5,5,3,4,2,48,3,4,2,2,1,0,0,1,0,0,1,0,0,1 4,12,2,22,1,2,2,2,3,29,1,1,1,1,1,0,0,1,0,0,1,0,0,1 2,12,2,10,4,3,4,1,1,22,3,1,1,1,1,1,0,1,0,0,1,0,0,1 4,36,2,18,1,3,3,4,4,37,2,1,1,2,1,0,0,1,0,0,0,0,0,1 4,36,2,24,5,3,2,4,3,25,3,1,1,1,1,0,0,1,0,0,1,0,0,1 4,36,2,81,1,3,2,2,2,30,1,1,1,1,1,0,1,1,0,0,1,0,0,1 4,7,4,7,5,5,3,2,2,46,3,2,1,2,1,0,0,1,0,1,0,0,1,0 1,8,4,12,1,5,3,4,4,51,1,2,2,2,1,0,0,1,0,0,0,0,0,0 2,42,4,60,1,4,2,1,1,41,1,2,1,1,1,0,0,1,0,0,1,0,1,0 1,36,2,20,5,5,3,4,4,40,3,1,1,2,1,0,0,1,0,0,1,0,0,0 1,12,4,15,1,5,3,4,4,66,3,2,1,1,1,0,1,1,0,0,0,0,0,0 1,42,2,40,1,2,3,3,3,34,3,1,1,1,1,0,0,1,0,0,1,0,0,1 2,11,3,48,1,4,3,4,2,51,3,1,1,1,1,0,0,1,0,0,1,0,0,1 4,54,0,94,5,3,3,2,2,39,3,1,2,1,1,0,1,1,0,0,1,0,1,0 2,30,2,38,1,2,4,1,2,22,3,1,1,1,1,0,0,1,0,0,1,0,0,1 4,24,2,59,5,2,2,1,3,44,3,2,1,2,1,0,0,1,0,0,1,0,0,1 4,15,2,12,3,5,3,3,2,47,2,1,1,2,1,0,0,1,0,0,1,0,0,1 4,18,2,16,2,3,2,4,2,24,3,1,1,1,1,0,0,1,0,1,0,0,1,0 1,24,2,18,1,5,2,4,1,58,3,1,1,2,1,0,0,0,0,0,1,0,1,0 1,10,2,23,1,5,3,4,1,52,3,1,1,1,1,0,0,1,0,0,1,0,1,0 4,12,4,14,1,3,2,2,1,29,3,2,1,2,1,0,0,0,0,0,1,0,0,0 2,18,4,13,1,2,2,1,2,27,3,2,1,1,1,0,0,1,0,0,1,0,0,1 2,36,2,126,2,3,3,4,4,47,3,1,2,2,1,0,0,1,0,0,0,0,0,1 1,18,2,22,2,4,3,3,3,30,3,1,2,2,1,1,0,1,0,0,1,0,0,0 1,12,0,11,1,4,3,3,1,28,3,2,1,1,1,0,0,1,0,0,1,0,0,1 4,12,4,6,1,5,3,4,1,56,3,1,1,1,1,0,0,1,0,0,1,0,0,1 1,12,4,14,1,5,3,3,1,54,3,1,1,1,1,0,1,1,0,0,1,0,0,1 4,12,4,8,5,5,2,3,2,33,1,1,2,1,1,0,0,1,0,0,1,0,1,0 3,24,4,36,5,5,3,4,4,20,3,2,1,1,1,0,0,0,1,1,0,0,0,1 2,12,2,13,4,5,3,4,1,54,3,1,1,2,1,1,0,1,0,0,1,0,0,1 2,54,0,159,1,2,3,4,4,58,3,1,1,2,1,0,0,1,0,1,0,0,0,1 4,12,4,20,5,4,2,2,3,61,3,1,1,1,1,0,0,1,0,0,1,0,0,1 2,18,2,26,2,3,3,4,3,34,3,1,1,1,1,0,0,1,0,0,1,0,0,1 2,36,4,23,1,5,3,4,1,36,3,1,1,1,1,0,0,1,0,0,1,0,0,1 2,20,3,71,5,4,3,4,2,36,1,2,2,2,1,0,1,1,0,1,0,0,0,0 4,24,2,15,2,5,4,4,1,41,3,1,1,1,1,1,0,1,0,1,0,0,1,0 2,36,2,23,1,4,3,4,3,24,3,1,1,1,1,0,0,1,0,1,0,0,0,1 4,6,3,9,1,3,2,2,1,24,3,1,1,1,1,0,0,1,0,0,1,0,0,1 2,9,4,19,1,4,3,3,3,35,3,1,1,2,1,0,0,1,0,1,0,0,0,1 4,12,2,24,5,2,4,4,3,26,3,1,1,2,1,0,1,1,0,1,0,0,0,1 2,24,4,119,1,3,3,3,3,39,3,2,2,2,1,0,0,0,1,0,1,0,0,0 4,18,1,65,1,5,3,4,4,39,1,2,2,2,1,1,0,1,0,0,1,0,0,0 2,12,2,61,1,4,3,2,3,32,3,1,1,1,1,1,0,1,0,0,1,0,0,1 1,24,2,77,5,2,2,2,2,30,3,1,1,2,2,0,0,1,0,0,1,0,0,1 2,14,2,14,3,5,4,2,1,35,3,1,1,2,1,0,0,1,0,0,1,0,0,1 2,6,3,14,2,5,1,2,3,31,1,2,2,1,1,0,0,1,0,0,1,0,0,1 3,15,2,4,1,2,2,4,2,23,3,1,1,2,1,0,0,1,0,1,0,0,0,1 2,18,2,63,1,4,3,3,1,28,3,1,1,1,1,1,0,1,0,1,0,0,1,0 4,36,4,79,1,3,2,2,1,25,2,2,1,2,1,1,0,1,0,0,1,0,0,1 1,12,2,17,3,5,4,1,1,35,3,1,1,1,1,0,0,1,0,0,1,0,0,1 4,48,4,36,5,5,3,1,1,47,3,1,1,2,1,0,0,1,0,0,1,0,0,1 1,42,2,72,5,4,2,3,3,30,3,1,1,2,1,0,0,1,0,0,1,0,0,0 1,10,4,21,5,2,2,3,1,27,3,2,1,1,2,0,0,0,1,1,0,0,0,1 1,33,4,43,3,3,2,4,3,23,3,2,1,1,1,0,0,1,0,0,1,0,0,1 2,12,4,24,3,4,1,3,3,36,3,1,1,2,1,1,0,1,0,0,1,0,0,0 1,21,2,18,1,3,2,2,1,25,3,2,1,2,1,0,0,1,0,0,1,0,0,1 4,24,4,39,1,5,2,2,3,41,3,2,1,2,1,0,1,1,0,1,0,0,0,0 4,12,2,18,1,3,3,2,1,24,3,1,1,1,1,0,0,1,0,1,0,0,1,0 3,10,4,8,1,5,3,4,4,63,3,2,1,2,1,1,0,1,0,0,0,0,0,1 2,18,2,19,5,2,2,3,1,27,3,1,1,1,1,0,0,1,0,1,0,0,0,1 1,12,4,21,1,3,3,2,2,30,3,2,1,1,1,1,0,1,0,0,1,0,0,1 1,12,2,7,1,3,4,2,1,40,3,1,1,1,1,0,0,1,0,0,1,0,1,0 2,12,2,6,1,3,3,2,3,30,3,1,1,1,1,0,0,1,0,0,1,0,0,1 2,12,4,19,1,1,3,2,3,34,3,2,1,2,1,0,1,1,0,0,1,0,0,0 1,12,4,35,1,3,2,2,1,29,3,2,1,1,1,1,0,0,1,0,1,0,0,1 2,48,2,85,5,4,2,2,3,24,3,1,1,1,1,1,0,1,0,0,1,0,0,1 1,36,3,69,1,3,3,3,2,29,2,1,1,2,1,0,0,1,0,0,1,0,0,1 4,15,2,27,1,2,3,3,2,27,1,2,1,1,1,0,0,1,0,0,1,0,1,0 4,18,2,20,1,3,3,4,4,47,1,2,1,1,1,0,0,1,0,0,0,0,0,1 4,60,2,101,2,4,2,4,1,21,3,1,1,2,1,0,0,1,0,0,1,0,0,1 4,12,4,12,5,5,2,2,1,38,3,2,1,2,1,0,0,1,0,0,1,0,0,1 4,27,3,86,4,3,3,2,3,27,3,2,1,1,1,0,1,1,0,0,1,0,0,1 2,12,2,8,3,3,3,3,1,66,3,1,1,1,1,0,0,1,0,0,1,0,1,0 2,15,4,27,5,4,3,2,1,35,1,3,1,2,1,0,0,0,0,0,1,0,0,1 3,12,2,19,1,3,2,2,3,44,3,1,1,2,1,0,0,1,0,1,0,0,1,0 3,6,2,7,4,2,4,2,1,27,3,1,1,1,2,1,0,1,0,0,1,1,0,0 2,36,2,48,1,2,2,1,4,30,3,1,1,2,1,0,0,1,0,0,1,0,0,0 1,27,2,34,1,3,3,2,3,27,3,1,1,1,1,0,0,1,0,0,1,0,0,0 1,18,2,25,1,3,3,2,3,22,3,1,1,1,1,0,0,1,0,0,1,0,0,1 4,21,4,23,1,2,2,4,2,23,3,1,1,2,1,0,0,1,0,0,1,0,0,1 2,48,1,36,2,4,3,2,3,30,3,1,1,1,1,0,0,1,0,0,1,0,0,1 1,6,4,9,1,5,2,4,4,39,3,2,1,2,1,1,0,1,0,0,1,0,0,1 4,12,4,7,2,4,2,3,3,51,3,2,1,2,1,1,0,1,0,0,1,0,0,1 1,36,4,54,1,3,3,2,2,28,3,2,1,1,1,0,0,0,0,0,1,0,0,1 4,18,4,16,4,5,3,4,3,46,3,2,1,1,1,0,0,1,0,0,1,0,0,1 4,6,2,13,2,5,3,4,4,42,1,1,2,2,1,0,0,1,0,0,0,0,0,1 4,10,2,19,1,3,3,4,2,38,3,1,1,2,2,0,0,1,0,0,1,0,0,1 3,36,2,58,1,3,3,1,3,24,3,1,1,1,1,0,0,1,0,0,1,0,0,1 2,24,4,78,4,5,2,4,4,29,3,1,1,1,1,0,1,1,0,1,0,0,0,1 2,24,3,70,2,4,3,4,3,36,3,1,1,2,1,0,0,1,0,1,0,0,0,0 1,12,2,13,1,3,2,4,3,20,3,1,1,1,1,0,0,1,0,1,0,0,0,1 1,9,4,13,2,5,3,4,1,48,3,2,2,1,2,0,0,0,0,0,1,0,0,1 1,12,1,3,1,5,4,1,3,45,1,1,1,1,1,0,0,1,0,0,1,0,1,0 2,24,2,35,2,4,3,3,3,38,1,2,1,2,1,1,0,1,0,0,1,0,0,1 4,6,4,19,5,3,3,2,1,34,3,2,2,1,1,0,0,1,0,0,1,0,1,0 4,24,4,29,2,5,3,4,1,36,3,1,2,2,1,0,0,1,0,0,1,0,0,1 4,18,4,11,1,2,2,1,2,30,3,2,1,1,1,1,0,1,0,0,1,0,0,1 4,15,2,13,3,4,3,3,2,36,3,2,1,2,1,0,0,1,0,0,1,0,0,1 2,10,2,73,1,1,3,4,4,70,1,1,1,2,1,1,0,1,0,0,0,0,0,0 4,36,2,9,3,5,3,4,2,36,3,1,1,1,1,1,0,1,0,0,1,0,0,1 4,6,2,30,3,3,3,2,3,32,3,1,1,2,1,0,0,1,0,0,1,0,0,1 1,18,2,11,1,1,2,2,3,33,3,1,1,1,1,0,0,1,0,0,1,0,0,1 2,11,2,16,4,2,2,1,1,20,3,1,1,1,1,0,0,1,0,0,1,0,0,1 4,24,2,40,1,4,2,4,2,25,3,1,1,2,1,0,0,1,0,1,0,0,0,1 2,24,4,19,1,5,1,4,1,31,3,2,1,2,1,0,0,1,0,0,1,0,0,1 1,15,0,10,1,5,3,3,3,33,3,2,2,1,1,1,0,1,0,1,0,0,0,1 4,12,2,8,1,3,2,1,1,26,3,1,1,2,1,0,0,1,0,0,1,0,0,1 2,24,3,21,1,1,2,2,2,34,3,1,1,2,1,0,0,1,0,0,1,0,0,0 2,8,2,14,1,3,3,2,1,33,3,1,1,1,2,0,0,0,0,0,1,0,0,1 1,21,3,34,1,2,3,1,2,26,3,2,1,1,1,0,0,1,0,0,1,0,0,1 4,30,1,75,5,1,2,1,1,53,1,1,1,2,1,0,1,1,0,0,1,0,0,0 1,12,2,26,1,3,1,1,3,42,3,1,1,1,1,0,0,1,0,0,1,0,0,1 1,6,4,3,3,5,3,4,3,52,3,2,1,1,1,0,0,1,0,0,1,0,0,1 4,12,2,20,1,4,3,2,3,31,3,2,2,2,1,0,0,1,0,1,0,0,0,0 1,21,4,6,1,5,3,4,1,65,3,2,1,1,1,1,0,1,0,0,1,0,0,1 4,36,3,96,1,2,1,1,3,28,3,2,1,1,1,0,0,1,0,0,1,0,0,1 2,36,3,45,1,3,1,2,1,30,2,2,1,2,1,0,0,1,0,0,1,0,0,0 1,21,1,16,5,3,3,2,2,40,3,2,2,1,1,1,0,1,0,0,1,0,1,0 4,24,4,38,4,3,3,4,1,50,3,1,1,2,1,0,0,1,0,0,1,0,0,1 2,18,4,9,1,5,3,4,3,36,1,1,2,2,1,1,0,1,0,0,1,0,0,1 4,15,4,14,1,3,3,2,2,31,3,2,1,1,1,0,0,1,0,0,1,0,0,1 2,9,1,51,1,5,2,4,4,74,1,1,2,2,1,0,1,1,0,0,0,0,0,0 2,16,4,12,1,1,3,3,3,68,3,3,1,2,1,1,0,1,0,0,0,1,0,0 1,12,2,7,2,4,4,1,2,20,3,1,1,1,1,0,0,1,0,0,1,0,0,1 2,18,0,32,1,3,2,4,3,33,1,2,1,2,1,0,0,1,0,0,1,0,0,1 4,24,2,46,4,3,3,3,2,54,3,3,1,2,1,0,0,1,0,0,1,0,0,0 2,48,0,38,2,4,3,4,4,34,3,1,2,1,1,0,0,1,0,0,0,0,1,0 2,27,2,39,1,3,3,2,3,36,3,1,2,2,1,0,0,1,0,0,1,0,0,1 4,6,2,21,1,4,4,2,1,29,3,1,1,1,1,0,0,1,0,1,0,0,0,1 2,45,2,30,2,3,3,4,2,21,3,1,1,1,1,0,0,0,0,1,0,0,0,1 2,9,4,15,1,5,2,3,3,34,3,2,1,2,1,0,0,1,0,0,1,0,0,0 4,6,4,14,1,3,2,1,3,28,3,2,1,2,1,0,0,1,0,0,1,0,0,1 2,12,2,10,2,2,2,4,3,27,1,4,1,1,1,0,0,1,0,1,0,0,0,1 2,24,2,28,5,5,3,4,4,36,1,1,1,2,1,0,1,1,0,0,0,0,0,1 2,18,3,43,1,5,1,3,4,40,3,1,1,2,1,0,0,1,0,0,1,0,0,0 4,9,4,9,3,5,3,2,3,52,3,2,1,2,1,0,0,1,0,0,1,0,0,1 1,12,2,12,1,3,4,3,1,27,3,1,1,1,1,1,0,1,0,0,1,0,1,0 4,27,3,51,1,4,3,4,3,26,3,2,1,1,1,0,0,1,0,0,1,0,0,1 1,12,2,9,1,4,4,4,2,21,3,1,1,1,1,0,0,1,0,1,0,0,0,1 4,12,4,15,1,5,3,1,1,38,3,2,2,1,1,1,0,1,0,0,1,0,1,0 1,30,4,106,1,5,3,4,4,38,3,3,2,2,1,0,1,1,0,0,0,0,0,0 4,12,4,19,1,5,3,4,1,43,3,3,1,2,1,0,0,1,0,0,1,0,0,1 2,12,4,14,1,4,3,3,2,26,3,1,1,1,1,0,0,1,0,0,1,0,0,1 1,24,2,66,1,3,4,2,3,21,2,1,1,1,1,0,0,1,0,0,1,0,1,0 4,12,2,14,4,4,3,2,2,55,3,1,1,1,2,0,1,1,0,0,1,0,0,1 4,9,4,31,5,3,3,2,1,33,3,2,2,1,1,0,0,1,0,0,1,0,0,1 4,36,2,38,5,5,2,4,1,45,3,1,1,2,1,0,0,1,0,0,1,0,1,0 1,27,0,53,1,1,3,4,2,50,2,2,1,2,1,0,0,1,0,0,1,0,0,1 3,30,3,19,1,5,3,4,1,66,3,1,1,2,1,0,0,1,0,0,1,0,0,0 4,36,4,33,5,5,3,2,3,51,3,1,1,2,1,0,0,1,0,0,1,0,0,1 2,6,4,9,5,4,2,3,2,39,3,2,1,1,1,0,0,1,0,0,1,0,1,0 1,18,0,31,1,4,3,1,2,31,1,1,1,2,1,0,0,1,0,0,1,0,0,1 3,36,2,39,1,3,3,2,1,23,3,1,1,2,1,0,0,1,0,0,1,0,0,1 1,24,2,30,1,3,1,2,1,24,3,1,1,1,1,0,0,1,0,1,0,0,1,0 4,10,2,14,1,3,2,4,3,64,3,1,1,2,1,1,0,1,0,0,1,0,0,1 2,12,2,6,1,2,4,1,1,26,1,1,1,1,1,0,0,0,0,0,1,0,1,0 1,12,2,12,5,3,2,4,2,23,1,1,1,2,1,0,0,1,0,1,0,0,0,1 4,12,2,7,1,3,3,2,1,30,1,2,1,1,1,0,0,1,0,0,1,0,0,1 4,24,3,30,5,3,3,4,1,32,3,2,2,2,1,0,0,1,0,0,1,0,0,1 4,15,2,47,1,3,3,2,3,30,3,1,1,2,1,0,1,1,0,0,1,0,0,1 4,36,0,26,1,3,3,2,3,27,3,2,1,1,1,0,0,1,0,0,1,0,0,1 2,48,2,110,4,4,3,2,4,27,1,2,1,2,1,0,0,0,1,0,1,0,0,1 1,12,2,79,1,5,3,4,4,53,3,1,1,2,1,0,0,1,0,0,0,0,0,0 4,9,2,15,1,4,3,2,3,22,3,1,1,1,1,0,0,1,0,0,1,0,0,1 1,24,2,31,1,2,3,1,4,22,1,1,1,1,1,0,0,1,0,0,0,0,0,1 3,36,2,42,1,3,3,2,3,26,3,1,1,1,1,0,0,1,0,0,1,0,0,1 4,9,2,25,3,5,3,4,4,51,3,1,1,1,1,1,0,1,0,0,0,0,1,0 4,12,2,21,2,4,3,1,4,35,3,1,1,1,1,0,0,1,0,0,1,0,0,1 2,18,2,9,1,3,4,2,1,25,3,1,1,1,1,0,0,0,0,0,1,0,1,0 4,4,4,15,1,4,3,1,1,42,3,3,2,1,1,0,0,1,0,0,1,0,1,0 1,24,2,18,1,1,3,2,3,30,2,1,2,1,1,0,0,1,0,0,1,0,0,0 2,6,2,146,5,1,3,2,2,23,3,1,1,2,1,1,0,1,0,0,1,1,0,0 2,21,2,28,2,5,1,2,3,61,1,2,1,1,1,0,0,1,0,1,0,0,1,0 4,12,4,13,1,3,2,2,2,35,3,2,1,1,1,0,0,1,0,0,1,0,0,1 1,30,2,25,1,5,3,3,2,39,3,1,2,1,1,0,0,0,0,0,1,0,0,1 1,24,2,9,5,5,2,2,3,29,1,1,1,1,1,1,0,1,0,0,1,0,0,1 4,6,2,16,1,4,3,2,2,51,3,1,2,1,1,0,0,1,0,0,1,0,0,1 1,48,0,46,1,5,3,4,4,24,3,2,2,1,1,0,1,1,0,0,0,0,0,1 4,12,4,12,1,3,2,2,1,27,3,2,1,1,1,0,0,1,0,0,1,0,0,1 4,12,1,34,3,3,2,3,1,35,3,1,2,1,1,0,0,1,0,0,1,0,1,0 4,24,2,13,1,4,3,1,1,25,3,1,1,2,1,0,0,1,0,0,1,0,0,1 4,12,4,7,1,5,3,4,1,52,3,3,1,1,1,0,0,1,0,0,1,0,0,1 4,6,0,12,2,3,3,1,4,35,1,1,1,1,2,1,0,1,0,1,0,0,0,1 3,24,2,19,1,3,3,2,1,26,3,1,1,1,1,0,0,1,0,0,1,0,0,1 4,18,2,4,1,1,2,4,1,22,3,1,1,1,1,0,0,0,1,1,0,0,0,1 1,6,4,7,4,4,2,4,1,39,3,2,1,2,1,1,0,1,0,0,1,0,1,0 3,12,2,23,1,3,2,2,3,46,3,1,1,1,1,0,0,1,0,0,1,0,1,0 2,30,2,22,1,3,2,2,4,24,1,1,1,1,1,1,0,0,0,0,1,0,0,1 4,24,3,42,2,3,3,3,2,35,3,2,1,1,1,0,0,1,0,0,1,0,0,1 2,9,2,20,5,4,3,1,3,24,3,1,1,2,1,0,0,1,0,0,1,0,0,1 2,60,3,74,5,3,3,1,1,27,3,1,1,1,1,0,0,1,0,0,1,0,1,0 4,24,4,27,1,3,3,2,1,35,3,2,1,1,1,0,0,1,0,0,1,0,1,0 1,12,1,21,1,3,1,1,4,29,3,1,1,1,1,0,0,1,0,0,0,0,0,1 4,15,2,38,2,2,2,4,3,23,3,1,1,2,1,0,1,1,0,0,1,0,0,1 4,11,4,12,2,1,2,4,1,57,3,3,1,1,1,0,0,1,0,0,1,0,1,0 1,12,2,17,1,3,3,2,1,27,3,1,1,1,1,0,0,1,0,0,1,0,0,1 1,24,2,16,1,5,2,4,3,55,3,1,1,1,1,0,0,1,0,0,1,0,0,1 1,18,4,53,1,5,3,4,4,36,3,3,1,2,1,1,0,1,0,0,0,0,0,0 4,12,4,27,1,5,2,4,4,57,1,3,1,1,1,0,0,1,0,0,0,0,1,0 4,10,4,12,1,5,3,4,1,32,3,2,2,1,2,1,0,1,0,0,1,0,1,0 2,15,2,8,1,5,3,3,3,37,3,1,2,1,1,0,0,1,0,0,1,0,0,1 4,36,4,63,5,5,3,4,1,36,3,2,1,1,1,0,0,1,0,0,1,0,0,1 4,24,2,15,1,2,2,3,3,38,2,1,1,2,1,0,0,1,0,0,1,0,0,1 1,14,2,90,1,5,1,4,2,45,3,1,1,2,2,1,0,1,0,0,1,0,0,0 4,24,2,10,5,5,3,2,3,25,3,2,1,1,1,0,0,1,0,0,1,0,0,1 4,18,2,27,5,4,3,3,2,32,3,1,1,1,2,1,0,1,0,0,1,0,0,1 4,12,4,14,3,4,2,4,3,37,3,1,1,2,1,0,0,1,0,1,0,0,0,1 2,48,1,122,5,1,3,4,4,36,3,1,1,2,1,1,0,0,1,0,0,0,0,0 2,48,2,31,1,4,3,4,1,28,3,2,1,1,1,0,0,1,0,0,1,0,0,1 1,30,2,120,1,2,1,1,4,34,3,1,1,2,1,0,0,1,0,0,1,0,1,0 4,9,2,27,1,3,3,2,1,32,3,1,2,1,1,0,0,1,0,0,1,0,0,1 4,18,4,24,1,3,2,2,3,26,3,2,1,1,1,0,0,1,0,0,1,0,0,1 1,12,2,13,5,5,1,4,2,49,3,1,1,2,1,0,0,1,0,0,1,0,1,0 4,6,2,46,1,2,2,4,2,32,3,1,1,1,1,0,0,1,0,0,1,0,0,1 4,24,2,19,2,3,3,4,3,29,3,1,1,2,1,0,0,1,0,1,0,0,0,0 4,15,4,34,4,5,3,4,4,23,3,2,1,2,1,0,1,1,0,1,0,0,0,1 4,12,2,16,1,3,3,2,1,50,3,1,1,1,1,0,0,1,0,0,1,0,0,1 3,18,1,14,5,4,3,4,3,49,1,1,1,1,1,0,0,1,0,0,1,0,1,0 4,15,4,15,5,5,3,4,2,63,3,1,1,1,1,0,0,1,0,0,1,0,0,1 2,24,4,39,2,2,1,2,3,37,3,1,1,2,1,1,0,1,0,0,1,0,0,1 1,47,2,107,1,2,2,1,1,35,3,1,1,2,1,1,0,1,0,0,1,0,1,0 1,48,2,48,1,4,3,3,2,26,3,1,2,1,1,0,1,1,0,0,1,0,0,1 2,48,3,76,2,1,3,4,4,31,3,1,1,2,1,0,0,1,0,0,0,0,0,0 2,12,2,11,1,3,2,4,1,49,3,2,1,2,1,0,0,0,0,0,1,0,0,1 1,24,3,10,1,2,4,4,1,48,2,1,1,1,1,0,0,1,0,0,1,0,0,1 4,12,2,11,1,3,4,2,1,26,3,1,1,2,2,0,0,1,0,0,1,0,0,1 2,36,2,94,1,2,4,4,3,28,3,1,1,2,1,0,1,1,0,1,0,0,0,0 1,24,4,64,1,5,2,4,4,44,3,2,2,2,1,0,1,1,0,0,0,0,0,0 3,42,4,48,1,5,3,4,4,56,3,1,1,1,1,0,1,1,0,0,0,0,0,1 4,48,4,76,5,5,1,2,3,46,1,2,2,1,1,0,0,1,0,0,1,0,0,0 2,48,2,100,1,2,2,2,3,26,3,1,1,2,1,0,0,1,0,0,1,0,0,1 4,12,2,47,5,2,2,4,3,20,3,1,1,1,1,0,1,1,0,1,0,0,0,1 4,10,2,13,5,5,3,2,2,45,3,1,1,1,2,1,0,0,1,0,1,0,1,0 4,18,2,25,1,3,3,4,1,43,3,1,1,2,1,0,0,1,0,0,1,0,0,1 2,21,4,27,4,4,3,2,3,32,3,2,1,2,1,0,0,1,0,0,1,0,0,1 4,6,2,7,1,1,2,4,1,54,3,1,1,2,1,1,0,1,0,0,1,1,0,0 2,36,0,38,1,3,2,1,3,42,3,1,1,2,1,0,0,1,0,0,1,0,0,1 3,24,4,13,5,4,3,2,1,37,1,2,2,1,1,1,0,1,0,0,1,0,1,0 1,10,4,10,1,4,3,3,2,49,3,2,1,2,1,1,0,0,1,0,1,0,0,1 4,48,4,101,3,3,3,2,4,44,1,1,1,1,1,1,0,1,0,0,0,0,0,1 4,6,2,15,4,3,1,2,1,33,3,1,1,1,1,0,0,1,0,0,1,0,0,1 4,30,2,48,5,4,2,4,2,24,2,1,1,1,1,0,1,1,0,1,0,0,1,0 1,12,2,7,2,2,4,3,4,33,3,1,1,2,1,0,0,1,0,0,1,0,1,0 2,8,2,12,1,3,2,4,1,24,3,1,1,1,1,0,0,1,0,0,1,0,0,1 2,9,2,3,1,3,4,4,1,22,3,1,1,1,1,1,0,1,0,1,0,0,1,0 2,48,2,54,5,1,3,4,4,40,1,1,1,2,1,0,0,1,0,0,0,1,0,0 4,24,2,55,2,3,3,1,3,25,2,1,1,1,1,0,0,1,0,0,1,0,0,1 3,24,2,37,1,2,2,4,3,26,3,1,1,1,1,0,0,1,0,0,1,0,0,1 2,12,2,7,1,4,4,3,3,25,1,1,1,1,1,1,0,1,0,0,1,0,1,0 3,4,2,15,5,2,3,2,1,29,3,1,2,1,2,1,0,1,0,0,1,0,1,0 1,36,1,27,1,5,3,4,3,31,1,1,1,1,1,0,0,1,0,0,1,0,0,1 1,12,2,7,1,3,3,3,2,38,3,1,2,1,1,0,0,0,0,0,1,0,1,0 2,24,2,44,5,3,2,4,2,48,3,1,1,2,1,0,0,1,0,0,1,0,1,0 4,12,4,7,1,3,3,2,3,32,3,2,1,1,1,0,0,1,0,0,1,0,0,1 1,15,3,36,1,5,2,4,2,27,3,2,1,1,1,0,0,1,0,0,1,0,1,0 2,30,4,42,1,1,4,2,3,28,3,2,1,1,1,1,0,1,0,0,1,0,0,0 1,24,2,19,1,2,1,3,2,32,3,1,1,1,1,0,0,1,0,0,1,0,0,1 1,24,2,29,1,4,3,1,4,34,3,1,1,2,1,0,1,1,0,0,0,0,0,0 1,18,2,27,4,3,3,2,3,28,3,1,1,1,1,0,0,1,0,0,1,0,0,1 4,18,4,10,1,3,2,3,1,36,3,2,1,1,1,1,0,1,0,0,1,0,0,1 1,8,4,34,1,4,3,4,1,39,3,2,1,1,2,1,0,1,0,0,1,0,1,0 4,12,4,58,5,5,3,4,2,49,3,1,1,2,1,0,0,1,0,1,0,0,0,1 4,24,2,15,4,4,2,3,3,34,3,1,2,2,1,1,0,1,0,0,1,0,0,1 3,36,2,45,1,5,3,2,3,31,3,1,1,1,1,0,0,1,0,0,1,0,0,1 2,6,2,11,1,5,3,4,3,28,3,1,2,1,1,0,0,1,0,0,1,0,0,1 1,24,4,66,1,1,3,4,4,75,3,2,1,2,1,0,1,1,0,0,0,0,0,0 4,18,4,19,2,3,2,2,1,30,3,2,1,1,1,0,0,1,0,0,1,0,0,1 2,60,2,74,2,2,2,2,2,24,3,1,1,1,1,1,0,1,0,0,1,0,0,0 4,48,4,116,2,3,2,4,3,24,1,2,1,1,1,0,1,1,0,1,0,0,1,0 1,24,0,41,1,5,3,4,4,23,1,2,2,1,1,0,0,1,0,1,0,0,0,1 1,6,4,34,1,3,1,4,1,44,3,1,1,2,1,0,0,1,0,1,0,0,0,0 2,13,2,21,1,2,2,4,2,23,3,1,1,1,1,0,0,0,0,0,1,0,1,0 1,15,2,13,5,3,2,2,3,24,3,1,1,1,1,0,0,1,0,1,0,0,0,1 1,24,2,42,1,3,3,4,2,28,3,1,1,1,1,0,0,1,0,0,1,0,0,1 2,10,2,15,1,3,1,2,3,31,3,1,1,1,1,0,0,1,0,0,1,0,1,0 2,24,4,57,1,2,2,4,4,24,3,2,1,2,1,0,0,1,0,0,0,0,0,1 1,21,2,36,1,4,2,4,3,26,3,1,1,1,1,0,0,1,0,1,0,0,1,0 2,18,2,32,3,2,4,3,1,25,3,1,1,1,1,0,0,1,0,1,0,0,0,1 2,18,2,44,1,5,3,1,1,33,1,1,1,2,1,0,0,0,1,0,1,0,0,0 3,10,2,39,1,2,3,1,2,37,3,1,2,1,1,1,0,0,0,0,1,0,1,0 4,15,4,15,1,3,2,2,3,43,3,1,1,1,1,0,0,1,0,0,1,0,1,0 2,13,4,9,1,2,3,4,1,23,3,2,1,1,1,0,0,0,0,0,1,0,0,1 2,24,2,38,3,1,2,4,4,23,3,1,1,1,1,0,0,1,0,1,0,1,0,0 4,6,3,17,2,3,3,2,1,34,3,2,1,1,1,0,0,1,0,0,1,0,1,0 2,9,4,11,4,5,3,3,4,32,3,2,2,1,1,0,0,1,0,0,0,0,0,1 4,9,2,12,1,2,2,4,1,23,3,1,1,2,1,0,0,1,0,1,0,0,0,1 2,9,2,10,1,3,2,2,3,29,3,1,1,1,2,0,0,1,0,0,1,0,0,1 4,18,4,32,5,1,3,4,4,38,3,1,1,2,1,0,1,1,0,0,1,0,0,0 1,12,0,62,1,3,3,2,2,28,3,2,1,2,1,0,0,1,0,1,0,0,0,1 4,10,2,7,3,5,3,4,4,46,3,1,1,2,1,0,0,1,0,0,0,0,0,1 2,24,2,12,1,2,3,2,1,23,2,1,1,1,1,1,0,1,0,0,1,0,1,0 4,12,4,23,5,5,3,4,1,49,3,1,1,2,1,0,0,0,1,0,1,0,0,1 4,36,3,45,1,3,3,2,3,26,3,2,1,2,1,0,0,1,0,0,1,0,0,0 4,12,2,8,1,3,4,2,1,28,3,1,1,1,1,0,0,1,0,0,1,0,0,1 1,30,2,24,1,4,2,4,1,23,3,1,1,1,1,0,0,1,0,1,0,0,0,1 2,18,2,12,5,3,3,4,4,61,3,1,1,1,1,0,0,1,0,0,0,0,0,1 3,12,2,34,5,5,3,3,3,37,3,1,1,1,1,0,0,1,0,0,1,0,0,0 3,12,3,22,1,3,2,2,3,36,2,2,1,2,1,1,0,1,0,0,1,0,0,1 4,6,2,18,1,3,4,2,2,21,3,1,1,1,1,0,0,1,0,1,0,0,0,1 1,18,2,25,1,1,3,1,3,25,3,1,1,1,1,0,0,1,0,0,1,1,0,0 4,12,2,15,1,4,3,4,3,36,3,1,1,2,1,0,0,1,0,0,1,0,0,1 4,18,4,38,1,4,3,1,3,27,3,2,1,1,1,0,1,1,0,0,1,0,0,1 1,18,2,36,1,2,2,4,3,22,3,1,1,1,1,0,0,1,0,1,0,0,0,1 1,36,2,34,1,5,3,2,3,42,3,1,2,1,1,0,0,1,0,0,1,0,0,1 2,18,2,30,1,4,2,4,1,40,3,1,1,1,1,0,0,1,0,1,0,0,0,1 4,36,2,31,5,3,3,4,1,36,3,1,1,1,1,1,0,1,0,0,1,0,0,1 4,18,4,61,1,5,3,4,3,33,3,2,1,2,1,0,0,1,0,0,1,0,0,1 4,10,4,21,1,2,2,3,1,23,3,2,1,1,1,0,0,1,0,1,0,0,0,1 4,60,4,138,5,5,3,4,4,63,1,1,1,2,1,1,0,1,0,0,0,0,0,0 2,60,1,148,2,5,2,4,4,60,1,2,1,2,1,0,0,1,0,0,0,0,0,0 1,48,1,77,1,4,2,4,3,37,3,1,1,1,1,0,0,0,0,1,0,0,0,1 4,18,3,23,1,1,4,3,1,34,3,2,1,1,1,0,0,1,0,0,1,0,0,1 4,7,3,8,5,5,3,4,4,36,3,1,1,1,1,0,0,1,0,0,0,0,0,1 2,36,2,143,1,5,3,2,4,57,3,1,1,2,1,1,0,1,0,0,0,0,0,0 4,6,4,4,2,3,2,4,3,52,3,2,1,1,1,1,0,1,0,0,1,0,1,0 1,20,2,22,5,4,3,4,3,39,3,1,1,2,1,0,0,1,0,0,1,0,0,1 2,18,2,130,1,1,2,4,4,38,3,1,1,2,1,0,1,1,0,0,0,0,0,0 4,22,2,13,5,4,2,4,2,25,3,1,1,1,1,1,0,1,0,1,0,0,0,1 3,12,2,13,1,2,3,1,1,26,3,1,1,1,1,1,0,1,0,0,1,0,0,1 4,30,3,43,2,3,3,2,2,26,3,2,1,1,1,0,0,1,0,0,1,0,1,0 4,18,4,22,1,3,2,1,3,25,3,2,1,1,1,0,0,1,0,0,1,0,0,1 4,18,2,11,5,2,2,2,1,21,3,1,1,2,1,0,0,1,0,1,0,0,0,1 2,18,4,74,1,1,3,4,2,40,2,2,1,2,1,0,0,1,0,0,1,0,0,0 2,15,4,23,3,3,3,4,3,27,1,1,1,1,1,0,0,1,0,0,1,0,0,1 4,9,2,14,1,4,2,2,3,27,3,2,1,1,1,0,0,1,0,0,1,0,0,1 4,18,2,18,1,3,4,2,2,30,3,1,1,2,1,1,0,1,0,0,1,0,0,0 2,12,2,10,4,2,2,4,1,19,3,1,1,1,1,0,0,1,0,1,0,0,1,0 1,36,2,32,1,4,3,4,4,39,1,1,2,2,1,1,0,1,0,0,0,0,0,0 1,6,4,20,1,4,2,4,3,31,3,1,1,1,1,0,0,1,0,0,1,0,0,1 4,9,4,24,1,1,3,3,3,31,3,1,1,1,1,0,0,1,0,0,1,0,0,0 2,39,3,118,2,4,3,3,4,32,3,1,1,2,1,0,0,1,0,1,0,0,0,1 1,12,2,26,1,1,2,4,4,55,3,1,1,1,1,0,0,1,0,0,0,0,0,0 1,36,4,23,1,3,4,2,2,46,3,2,1,2,1,0,0,1,0,0,1,0,0,1 2,12,2,12,1,5,1,1,1,46,3,2,1,1,1,1,0,1,0,1,0,0,0,1 4,24,4,15,4,3,2,1,1,43,3,2,1,1,1,0,0,1,0,0,1,0,1,0 4,18,2,15,1,2,4,4,1,39,3,1,1,2,1,0,0,1,0,0,1,0,0,1 2,18,4,19,5,3,4,4,1,28,1,2,1,1,1,0,0,1,0,0,1,0,0,1 4,24,3,86,1,2,3,2,3,27,1,2,1,2,1,0,0,1,0,0,1,0,0,1 4,14,3,8,1,3,3,2,3,27,3,2,1,1,1,1,0,1,0,0,1,0,1,0 2,18,3,29,5,5,3,4,3,43,3,1,2,1,1,1,0,1,0,0,1,0,0,1 2,24,2,20,1,2,4,1,2,22,3,1,1,2,1,0,0,1,0,0,1,0,0,1 4,24,4,22,5,4,3,4,3,43,3,2,2,2,1,0,1,1,0,0,1,0,0,1 1,15,2,11,1,2,4,2,1,27,3,1,1,1,2,0,0,1,0,0,1,0,0,1 4,24,2,32,3,5,1,2,3,26,3,1,1,2,1,0,0,1,0,0,1,0,0,0 3,12,4,9,3,4,4,2,1,28,3,3,1,2,1,1,0,1,0,0,1,0,0,1 2,24,2,20,1,5,2,4,3,20,3,1,1,2,1,0,0,1,0,0,1,0,0,1 4,33,4,73,1,4,3,2,3,35,3,2,1,2,1,0,1,1,0,0,1,0,0,0 4,12,4,23,1,1,3,2,3,42,2,2,1,2,1,0,0,1,0,0,1,0,0,0 4,10,2,16,3,3,3,2,4,40,3,1,2,1,2,1,0,1,0,1,0,0,1,0 1,24,2,14,5,3,2,2,2,35,3,1,1,1,1,1,0,1,0,0,1,0,0,1 4,36,4,58,1,5,3,2,2,35,3,2,2,2,1,0,1,1,0,0,1,0,0,1 1,12,2,26,1,2,3,1,1,33,3,1,2,1,1,1,0,1,0,0,1,0,1,0 1,18,3,85,5,3,2,2,3,23,3,2,1,2,1,0,0,1,0,1,0,0,0,1 4,21,2,28,3,4,2,2,3,31,1,1,1,1,1,1,0,1,0,0,1,0,0,0 2,18,2,10,5,3,2,2,2,33,3,1,1,1,1,1,0,1,0,0,1,0,0,1 4,15,2,32,4,4,2,3,3,20,3,1,1,1,1,1,0,1,0,1,0,0,0,1 2,12,2,20,5,3,3,2,3,30,3,1,1,1,1,0,1,1,0,0,1,0,0,1 2,12,4,10,1,4,3,3,1,47,3,2,2,1,1,1,0,1,0,0,1,0,1,0 4,21,3,16,2,4,3,3,1,34,3,2,1,1,1,0,0,1,0,0,1,0,0,0 2,12,2,28,5,5,2,2,2,25,1,1,1,2,1,0,0,1,0,0,1,0,0,1 2,18,2,28,1,3,4,3,3,21,3,1,1,2,1,0,1,1,0,1,0,0,0,1 4,28,4,27,1,5,3,2,3,29,3,2,1,1,1,0,0,1,0,0,1,0,0,1 4,18,4,11,4,3,3,3,1,46,3,2,1,1,1,0,0,1,0,0,1,0,0,1 4,9,2,13,1,5,3,4,3,20,3,1,1,1,1,0,0,1,0,0,1,0,0,1 1,18,4,12,1,1,2,4,4,55,3,3,2,1,1,0,0,1,0,0,0,1,0,0 4,5,2,34,1,4,3,4,1,74,3,1,1,1,1,0,0,1,0,0,1,0,1,0 2,24,2,113,1,3,3,3,3,29,1,2,1,2,1,0,0,0,1,0,1,0,0,0 1,6,4,19,1,1,3,4,4,36,3,3,1,2,1,0,0,1,0,0,0,0,0,0 4,24,4,21,1,3,1,2,1,33,3,2,1,2,1,0,0,1,0,0,1,0,0,1 1,9,2,21,1,3,3,2,1,25,3,1,1,1,1,0,0,1,0,0,1,0,0,1 2,12,2,15,5,3,4,1,1,25,3,1,1,2,1,0,0,1,0,0,1,0,0,1 4,6,2,7,3,4,4,4,1,23,3,1,1,1,1,0,0,1,0,1,0,0,1,0 4,24,4,13,4,5,2,4,1,37,3,2,1,2,1,1,0,1,0,0,1,0,0,1 1,42,4,34,1,1,3,4,3,65,3,2,1,1,1,0,0,0,1,0,1,1,0,0 3,12,1,6,1,2,2,1,1,26,3,1,1,1,1,0,0,1,0,0,1,1,0,0 4,12,2,19,1,5,3,4,3,39,3,1,1,2,1,1,0,1,0,0,1,0,0,0 1,12,2,16,1,3,2,3,2,30,3,1,1,1,1,0,0,0,1,0,1,0,0,1 2,20,3,26,1,3,3,3,3,29,1,2,1,2,1,0,0,1,0,0,1,0,0,1 4,12,2,7,1,5,3,4,3,41,1,1,2,1,1,0,0,1,0,0,1,0,1,0 2,48,4,51,1,3,2,3,3,30,3,1,1,2,1,0,0,1,0,0,1,0,0,0 4,9,4,12,5,5,2,4,2,41,3,2,1,1,1,0,0,1,0,1,0,0,1,0 1,36,2,18,1,2,2,4,3,34,3,1,1,2,1,1,0,1,0,0,1,0,0,1 2,7,2,26,1,3,3,2,1,35,3,1,1,1,1,0,0,0,0,0,1,0,0,1 3,12,2,14,5,5,2,4,1,55,3,1,1,2,1,0,0,1,0,0,1,0,0,0 2,15,3,15,4,3,4,3,2,61,2,2,1,1,1,0,0,1,0,0,1,0,0,1 4,36,4,111,5,3,3,2,3,30,3,1,1,2,1,0,1,1,0,0,1,0,0,0 4,6,2,5,1,3,2,1,1,29,3,1,1,1,1,0,0,1,0,0,1,0,0,1 4,12,0,28,1,5,3,4,2,34,3,2,1,1,1,0,0,1,0,0,1,0,0,1 4,24,2,27,1,5,3,4,3,35,3,1,1,2,1,0,1,1,0,0,1,0,0,0 1,24,2,48,1,4,3,3,2,31,3,1,1,2,1,1,0,0,1,0,1,0,0,1 4,24,2,27,1,2,2,1,4,29,3,1,1,2,1,0,1,1,0,0,1,0,0,0 1,11,4,39,1,3,3,2,1,36,3,2,2,1,1,1,0,1,0,1,0,0,0,1 1,12,2,34,1,5,3,4,4,35,3,1,1,2,1,0,1,1,0,0,0,0,0,1 1,6,2,3,1,2,2,1,1,27,3,1,1,1,1,0,0,1,0,0,1,0,0,1 4,18,2,46,1,2,3,2,3,32,3,1,1,2,1,0,0,1,0,0,1,0,0,1 1,36,2,36,1,3,3,2,2,37,3,1,2,1,1,0,0,0,0,0,1,0,0,1 1,15,2,17,1,2,3,3,1,36,3,1,1,1,1,1,0,1,0,0,1,0,0,1 2,12,2,30,1,2,2,1,1,34,3,1,1,1,1,0,0,1,0,1,0,0,0,0 2,12,2,8,5,5,3,4,2,38,3,2,1,1,1,0,0,1,0,0,1,0,0,1 4,18,2,20,1,4,3,1,3,34,2,2,1,2,1,0,0,1,0,0,1,0,0,1 1,24,2,29,1,3,3,4,4,63,1,1,2,2,1,0,1,0,0,0,1,0,0,1 1,24,3,17,1,2,2,2,3,29,3,1,1,2,1,0,0,1,0,1,0,0,1,0 4,48,3,72,5,5,3,3,3,32,1,2,2,1,1,0,0,1,0,0,1,0,0,1 4,33,3,28,1,3,2,2,3,26,3,2,1,2,1,0,0,1,0,0,1,0,0,1 4,24,3,47,1,4,3,3,3,35,3,2,1,2,1,0,1,1,0,0,1,0,1,0 2,24,2,31,2,2,4,2,3,22,3,1,1,2,1,0,0,1,0,1,0,0,0,1 1,6,2,4,1,2,2,4,2,23,3,1,1,1,1,0,0,1,0,0,1,0,0,1 1,9,2,7,1,3,3,3,3,28,3,1,1,1,1,1,0,1,0,0,1,0,1,0 4,6,2,12,5,1,3,4,2,36,3,1,2,2,1,0,0,1,0,0,1,0,0,0 2,18,4,12,1,3,4,2,3,33,3,1,1,1,1,0,0,1,0,0,1,0,0,1 1,18,0,31,1,2,2,4,2,26,3,1,1,1,1,0,0,1,0,1,0,0,0,1 4,39,2,26,3,3,3,4,3,24,3,1,1,1,1,0,1,1,0,0,1,0,0,1 3,24,2,52,1,4,3,2,3,25,1,1,1,1,1,0,0,1,0,0,1,0,0,1 2,12,2,10,2,4,3,4,1,39,3,1,1,1,1,0,0,1,0,0,1,0,1,0 1,15,4,15,1,5,3,4,3,44,3,2,2,2,1,0,0,1,0,0,1,0,0,1 2,12,4,36,1,3,2,1,1,23,3,1,1,1,1,0,0,1,0,0,1,0,1,0 2,24,2,12,1,2,3,1,2,26,3,1,1,1,1,1,0,1,0,0,1,0,0,1 1,30,2,36,4,5,2,4,2,57,3,2,1,2,1,0,0,1,0,1,0,0,0,1 4,15,3,10,4,4,2,2,2,30,3,2,1,1,1,0,0,1,0,0,1,0,0,1 4,12,4,12,3,3,3,4,1,44,3,1,1,2,1,1,0,1,0,0,1,0,0,1 2,6,3,12,1,1,3,4,2,47,3,1,1,2,1,1,0,1,0,0,1,0,0,0 4,12,2,31,1,3,3,4,3,52,3,1,1,2,1,0,0,1,0,0,1,0,0,1 4,24,2,38,1,5,2,4,4,62,3,1,1,2,1,1,0,0,1,0,0,0,0,1 4,10,2,14,2,3,3,2,1,35,3,1,1,1,2,1,0,1,0,1,0,0,1,0 4,6,2,35,1,3,3,3,2,26,3,1,1,1,1,1,0,0,0,1,0,0,0,1 4,12,4,19,1,5,3,2,4,26,3,2,1,1,1,0,0,1,0,0,1,0,0,1 2,27,0,83,1,5,2,4,4,42,3,2,1,2,1,0,0,1,0,0,0,0,0,0 4,6,4,12,2,3,2,1,2,27,3,2,1,1,1,0,0,1,0,0,1,0,0,1 2,6,2,4,5,5,3,4,2,38,3,1,1,1,1,0,0,1,0,0,1,0,0,1 1,12,4,21,1,3,3,2,1,39,3,2,2,1,2,1,0,1,0,1,0,0,1,0 1,24,2,30,5,3,4,4,3,20,3,1,1,1,1,0,0,1,0,0,1,0,0,1 2,36,2,90,2,2,3,1,4,29,3,1,1,2,1,0,0,0,1,1,0,0,0,0 4,24,4,16,1,4,3,3,2,40,3,2,1,1,1,0,0,1,0,0,1,0,0,1 2,18,2,13,1,5,4,2,1,32,3,1,1,1,1,0,0,0,0,0,1,0,1,0 3,6,4,13,2,5,1,4,3,28,3,2,2,2,1,1,0,1,0,0,1,0,0,1 1,24,2,31,1,2,2,1,2,27,3,1,1,1,1,1,0,1,0,0,1,0,0,1 1,36,2,55,1,5,3,4,4,42,3,1,2,1,1,0,1,1,0,0,0,0,0,1 3,9,2,11,2,5,1,4,1,49,3,1,1,1,1,0,0,1,0,0,1,0,0,1 2,24,4,12,2,2,3,4,4,38,1,2,2,1,1,0,0,1,0,0,1,0,0,1 1,24,2,12,1,2,2,4,2,24,3,1,1,1,1,1,0,1,0,1,0,0,0,1 4,10,2,13,5,3,3,4,2,27,3,1,1,1,1,1,0,0,0,0,1,0,1,0 3,15,4,24,3,3,3,2,3,36,3,1,1,2,1,0,1,1,0,0,1,0,0,1 2,15,1,68,2,1,3,2,2,34,3,1,2,2,1,1,0,1,0,0,1,0,0,0 4,24,2,14,1,3,4,2,2,28,3,1,1,1,1,0,0,1,0,0,1,0,0,1 4,39,2,86,2,5,3,2,3,45,3,1,1,2,1,0,1,1,0,0,1,0,0,0 1,12,2,8,1,4,3,2,1,26,3,1,1,1,1,1,0,1,0,0,1,0,0,1 4,36,2,47,1,3,3,2,4,32,3,1,1,2,1,0,1,1,0,0,0,0,0,0 3,15,2,27,1,4,3,4,2,26,3,1,1,2,1,0,0,1,0,1,0,0,0,1 2,12,3,6,1,3,4,4,1,20,3,2,1,1,1,0,0,0,1,1,0,0,0,1 4,24,2,23,5,2,3,1,2,54,3,1,1,1,1,1,0,1,0,0,1,0,0,1 1,6,4,6,1,4,2,3,2,37,3,2,1,1,2,1,0,1,0,0,1,0,0,1 1,6,4,14,1,2,3,4,1,40,3,1,2,1,2,1,0,1,0,0,1,0,1,0 4,36,4,71,1,2,2,4,2,23,3,2,1,2,1,0,0,1,0,1,0,0,0,1 1,6,2,12,2,5,3,2,2,43,3,1,1,2,1,1,0,1,0,0,1,0,0,1 4,6,4,7,5,5,3,4,4,36,3,2,1,1,1,0,0,1,0,0,0,0,0,1 4,24,4,55,1,5,3,4,4,44,3,2,1,1,1,0,0,1,0,0,0,0,0,1 1,18,2,32,1,3,2,2,1,24,3,1,1,1,1,0,0,1,0,0,1,0,0,1 1,48,0,71,1,3,3,4,4,53,3,2,2,1,1,0,0,1,0,0,0,0,0,1 4,24,2,35,2,4,2,4,3,23,3,1,1,1,1,0,1,1,0,0,1,0,0,1 2,18,2,11,1,3,2,4,1,26,3,1,2,1,1,0,0,0,0,0,1,0,1,0 2,26,2,80,1,2,3,3,3,30,3,2,1,1,1,0,1,1,0,0,1,0,0,1 4,15,4,15,2,3,2,3,3,31,3,1,1,1,1,0,0,1,0,0,1,0,0,1 4,4,4,15,1,4,3,1,1,42,3,2,2,1,1,0,0,1,0,0,1,0,1,0 1,36,2,23,1,3,1,4,3,31,3,1,1,1,1,0,0,1,0,1,0,0,0,1 1,6,2,7,1,2,3,4,1,41,3,1,2,2,1,1,0,1,0,0,1,0,1,0 2,36,2,23,1,4,3,1,3,32,3,2,2,1,1,0,0,1,0,0,1,0,0,1 2,15,2,26,2,3,2,4,3,28,3,2,1,2,1,1,0,1,0,1,0,0,0,1 4,12,3,15,1,3,4,4,1,41,3,1,1,1,1,0,1,1,0,1,0,0,0,1 4,24,2,13,2,4,4,3,2,26,3,1,1,2,1,0,0,1,0,0,1,0,0,1 4,24,2,31,5,2,3,2,3,25,3,2,1,1,1,0,0,1,0,0,1,0,0,1 3,21,4,23,1,2,1,1,3,33,3,1,1,1,1,0,0,1,0,1,0,0,0,1 1,6,2,14,5,1,2,3,2,75,3,1,1,2,1,1,0,1,0,0,1,0,0,0 2,18,4,36,1,5,2,4,2,37,3,1,1,2,1,0,0,1,0,0,1,0,0,1 1,48,2,78,1,5,3,4,4,42,1,1,1,1,1,1,0,1,0,0,0,0,0,0 3,18,2,30,1,2,2,1,2,45,2,1,1,1,1,0,0,1,0,0,1,0,1,0 2,12,2,15,1,2,4,1,1,23,3,1,1,1,1,0,0,1,0,1,0,0,0,1 4,24,3,20,1,5,3,4,4,60,3,2,1,2,1,1,0,1,0,0,0,0,0,1 1,30,2,64,5,5,3,4,2,31,3,1,1,1,1,0,0,1,0,0,1,0,0,1 3,18,2,29,1,3,3,1,1,34,3,1,2,1,1,0,0,1,0,0,1,0,1,0 4,12,4,13,1,5,3,4,1,61,3,2,1,1,1,1,0,1,0,0,1,0,1,0 1,24,3,13,1,1,3,2,1,43,3,2,2,1,1,1,0,1,0,0,0,0,0,1 4,24,4,20,1,3,2,4,3,37,3,1,1,2,1,1,0,1,0,0,1,0,0,1 4,24,2,16,1,4,3,1,3,32,1,1,2,1,1,0,0,1,0,0,1,0,0,1 1,12,1,6,1,3,2,4,1,24,1,1,1,1,1,0,0,1,0,0,1,0,1,0 4,48,4,89,5,4,3,1,4,35,3,2,1,2,1,0,1,1,0,0,0,0,0,1 4,12,4,10,5,4,2,4,1,23,3,2,1,1,1,0,0,1,0,0,1,0,0,1 4,6,1,18,3,5,3,4,2,45,1,1,2,1,1,0,0,1,0,0,1,0,1,0 1,48,2,70,1,4,4,1,1,34,3,2,1,2,1,0,0,0,0,0,1,0,0,1 2,12,4,20,2,2,3,1,3,27,3,1,1,1,1,1,0,1,0,0,1,0,0,1 2,9,2,12,1,4,2,4,2,67,3,2,1,2,1,0,0,1,0,0,1,0,0,0 2,12,2,13,1,2,3,1,3,22,2,1,1,1,1,0,0,1,0,0,1,0,0,1 2,18,0,23,2,2,2,3,3,28,3,2,1,1,1,1,0,1,0,0,1,0,0,1 4,21,0,50,5,3,2,4,2,29,1,2,1,2,1,1,0,1,0,0,1,0,0,1 1,24,1,36,1,4,3,4,3,27,1,1,1,1,1,0,0,1,0,0,1,0,0,1 2,18,4,19,1,2,3,2,1,31,3,2,1,1,1,0,0,1,0,0,1,0,1,0 1,24,2,30,5,5,3,4,4,49,1,1,2,2,1,0,1,1,0,0,0,0,0,1 1,24,1,15,1,4,3,4,3,24,1,1,1,1,1,0,0,0,0,1,0,0,1,0 3,6,3,7,1,2,2,1,2,29,1,1,1,1,1,0,0,1,0,0,1,0,0,1 2,36,2,124,5,3,3,4,4,37,3,1,1,2,1,1,0,1,0,0,0,0,0,1 2,24,3,47,5,3,3,2,2,37,1,2,1,2,1,0,0,1,0,0,1,0,0,0 2,24,3,16,2,4,2,2,2,23,3,2,1,2,1,0,0,1,0,1,0,0,0,1 1,12,2,14,1,4,1,3,3,36,3,1,1,1,1,1,0,1,0,0,1,0,0,1 4,24,4,26,4,5,3,2,3,34,3,1,1,1,1,0,0,1,0,0,1,0,0,1 2,48,2,40,5,4,3,1,3,41,3,2,2,2,1,0,0,1,0,0,1,0,0,1 1,48,2,68,1,3,2,2,3,31,3,1,1,2,1,0,0,1,0,0,1,0,0,1 1,24,2,32,1,2,2,4,1,23,3,1,1,2,1,0,0,1,0,1,0,0,1,0 4,30,4,60,1,4,3,2,3,38,3,1,1,1,1,0,0,0,1,0,1,0,0,1 4,24,2,54,5,1,2,4,2,26,3,1,1,2,1,0,1,1,0,1,0,0,0,0 1,15,2,8,1,3,2,4,2,22,3,1,1,1,1,0,0,1,0,0,1,0,1,0 2,9,2,11,1,5,3,4,3,27,3,2,1,1,1,0,0,1,0,0,1,0,1,0 4,15,4,28,1,4,2,3,3,24,1,2,1,1,1,0,0,0,1,0,1,0,0,1 2,12,2,29,1,4,2,1,1,27,3,1,1,1,1,0,0,1,0,0,1,0,0,1 4,24,4,19,5,3,2,2,3,33,3,2,1,2,1,0,0,1,0,0,1,0,0,1 2,36,4,28,1,2,1,4,3,27,3,2,1,1,1,1,0,1,0,0,1,0,0,1 4,24,2,9,1,2,4,3,3,27,3,2,1,1,1,0,0,1,0,0,1,0,1,0 2,18,4,11,1,5,3,3,1,30,1,2,1,1,1,1,0,0,0,0,1,0,0,1 2,12,4,31,1,2,3,3,1,49,1,2,2,1,1,1,0,1,0,0,1,0,1,0 4,9,2,14,1,3,2,2,1,26,3,1,1,1,1,0,0,1,0,1,0,0,0,1 2,36,2,24,1,2,3,1,4,33,3,1,1,1,1,0,0,1,0,1,0,0,1,0 4,12,2,21,5,5,2,4,4,52,3,1,1,2,1,1,0,1,0,0,0,0,0,0 1,18,2,20,1,3,2,4,1,20,1,1,1,1,1,0,0,1,0,1,0,0,0,1 1,9,4,28,1,3,3,2,1,36,3,2,2,1,1,1,0,1,0,1,0,0,0,1 1,12,2,13,1,3,3,1,2,21,3,1,1,1,1,0,0,0,0,0,1,0,1,0 1,18,2,12,1,3,4,3,1,47,3,1,1,2,1,0,0,1,0,0,1,0,1,0 1,12,4,22,1,5,3,3,2,60,3,2,1,1,1,0,0,1,0,0,1,0,0,1 1,12,4,4,1,4,2,3,1,58,3,4,1,2,1,0,0,1,0,0,1,0,1,0 2,24,3,20,5,3,2,4,3,42,3,2,1,2,1,1,0,1,0,1,0,0,0,1 4,21,2,16,4,5,2,4,1,36,1,1,1,1,1,0,0,1,0,0,1,0,1,0 2,24,2,27,1,3,2,4,2,20,3,1,1,2,1,1,0,1,0,1,0,0,1,0 1,24,1,14,5,5,3,3,3,40,2,1,1,2,1,0,0,1,0,0,1,0,0,0 2,6,1,9,2,2,2,1,2,32,2,1,1,1,1,1,0,1,0,0,1,0,1,0 1,24,2,14,1,4,2,4,3,23,3,2,1,1,1,1,0,1,0,1,0,0,0,1 2,24,0,42,1,3,3,4,1,36,3,3,1,2,1,0,0,1,0,0,1,0,1,0 4,18,4,28,1,4,3,2,2,31,1,2,1,1,1,1,0,1,0,0,1,0,0,1 4,24,3,39,1,3,3,2,4,32,3,1,1,1,1,0,0,1,0,0,0,0,0,1 2,7,2,23,1,2,2,1,1,45,3,1,1,1,1,0,0,0,0,0,1,0,0,1 2,9,2,9,1,3,2,1,2,30,3,1,1,1,1,0,0,1,0,0,1,0,0,1 2,24,1,18,1,4,2,4,4,34,1,1,1,1,1,0,0,1,0,0,0,0,1,0 4,36,2,33,1,3,2,2,3,28,3,1,1,2,1,0,0,1,0,0,1,0,0,0 3,10,2,13,1,2,2,2,2,23,3,1,1,1,1,0,0,1,0,0,1,0,0,1 1,24,1,28,3,3,3,4,1,22,2,1,1,2,1,0,0,1,0,0,1,0,0,1 4,24,4,45,1,3,3,2,1,74,3,1,1,2,1,0,0,1,0,0,1,0,0,0 2,36,2,27,2,3,2,4,4,50,3,1,1,1,1,0,0,0,1,0,0,0,0,1 4,18,2,21,1,2,3,1,1,33,3,1,1,1,1,0,0,1,0,0,1,0,0,1 4,15,2,13,5,5,3,4,4,45,1,1,2,1,1,0,1,1,0,0,0,0,0,1 1,12,2,7,2,1,2,3,2,22,3,1,1,1,1,0,0,1,0,0,1,0,0,1 3,10,2,12,2,5,2,4,4,48,3,1,2,1,1,1,0,1,0,0,0,0,1,0 1,21,2,34,4,2,2,2,3,29,1,1,1,1,1,0,0,1,0,0,1,0,0,1 1,24,1,36,1,3,2,4,3,22,1,1,1,1,2,0,1,0,0,1,0,0,0,1 4,18,3,18,1,4,2,1,1,22,3,1,1,1,1,0,0,1,0,0,1,0,0,1 2,48,0,122,5,3,3,2,3,48,1,1,1,2,1,0,0,1,0,0,1,0,0,0 2,60,3,92,5,3,3,2,4,27,3,1,1,1,1,0,0,1,0,0,0,0,0,0 1,6,4,37,1,3,3,3,1,37,3,3,2,1,1,1,0,1,0,1,0,0,0,1 2,30,2,34,2,3,2,4,3,21,3,1,1,1,1,0,0,0,1,1,0,0,0,1 4,12,2,6,1,3,1,2,1,49,3,1,1,1,1,1,0,1,0,0,1,0,1,0 2,21,4,37,1,4,3,3,2,27,3,2,1,1,1,0,0,1,0,0,1,0,0,1 4,18,4,15,1,3,3,2,2,32,1,2,1,1,1,1,0,1,0,0,1,0,0,1 4,48,2,39,5,3,1,2,1,38,1,1,1,1,1,0,0,1,0,0,1,0,0,1 1,12,2,19,1,2,2,1,3,22,3,1,1,1,1,0,0,1,0,1,0,0,0,1 1,18,2,26,1,3,3,4,4,65,3,2,1,1,1,0,0,1,0,0,0,0,0,1 4,15,2,20,5,5,3,2,3,35,3,1,1,1,1,0,0,1,0,0,1,0,0,1 3,6,2,21,1,3,3,2,1,41,3,1,1,2,1,0,0,1,0,0,1,0,0,1 2,9,1,14,2,4,3,3,4,29,3,1,1,1,1,1,0,1,0,0,1,0,0,1 4,42,4,40,3,3,3,4,1,36,3,2,1,2,1,0,0,1,0,0,1,0,0,1 4,9,2,38,5,5,3,4,1,64,3,1,1,1,1,0,0,1,0,0,1,0,1,0 1,24,2,37,1,3,2,4,3,28,3,1,1,1,1,0,0,1,0,0,1,0,0,1 1,18,1,16,1,3,3,3,3,44,1,1,1,1,1,0,0,1,0,0,1,0,0,1 2,15,2,14,5,2,3,1,2,23,3,1,1,1,1,0,0,1,0,0,1,0,0,1 4,9,2,20,1,2,2,2,3,19,3,2,1,1,1,0,0,0,1,1,0,0,0,1 2,24,2,14,1,2,2,4,3,25,3,1,1,2,1,1,0,1,0,0,1,0,1,0 4,12,2,14,1,5,3,4,2,47,1,3,2,2,1,0,0,1,0,0,1,0,0,1 4,24,2,14,3,4,2,1,3,28,3,1,1,1,1,0,0,1,0,0,1,0,0,1 4,60,3,157,1,4,3,4,3,21,3,2,1,2,1,0,0,1,0,0,1,0,0,1 4,12,2,15,1,2,2,3,3,34,3,1,2,1,1,0,0,1,0,0,1,0,0,1 1,42,3,44,1,4,3,2,2,26,1,2,2,2,1,0,0,1,0,0,1,0,0,1 1,18,2,8,1,1,2,1,1,27,3,1,1,1,1,0,0,1,0,0,1,1,0,0 2,15,2,13,1,5,3,4,3,38,3,2,1,1,1,0,0,1,0,0,1,0,1,0 4,15,2,46,2,3,3,2,2,40,3,1,1,2,1,0,0,1,0,0,1,0,0,0 4,24,4,19,1,4,4,2,3,33,3,2,1,2,1,0,0,0,0,0,1,0,0,1 1,18,4,19,1,4,4,1,2,32,3,2,1,2,1,0,0,1,0,0,1,0,0,0 4,36,3,80,5,2,3,4,3,27,3,2,1,2,1,0,0,1,0,1,0,0,0,1 1,30,0,46,1,3,1,2,1,32,3,2,1,1,1,0,0,0,0,0,1,0,0,1 4,12,2,14,3,3,2,2,2,26,3,1,1,1,1,1,0,1,0,0,1,0,0,1 3,24,2,9,1,4,3,3,4,38,1,1,2,1,1,1,0,1,0,0,0,0,0,1 1,12,2,7,1,3,3,4,3,40,3,1,2,1,1,0,0,1,0,1,0,0,1,0 1,48,2,75,1,4,3,1,4,50,3,1,1,2,1,0,0,1,0,0,0,0,0,0 2,12,2,19,1,3,3,2,2,37,3,1,1,1,1,0,0,1,0,0,1,0,1,0 1,24,2,23,1,5,3,1,1,45,3,1,1,1,1,1,0,0,1,0,1,0,0,1 2,36,3,81,2,5,3,4,3,42,3,4,1,2,1,1,0,1,0,0,1,0,0,0 4,24,4,23,1,4,3,3,3,35,3,2,1,2,1,0,1,1,0,0,1,0,0,1 1,14,2,40,1,1,3,4,4,22,3,1,1,1,1,1,0,1,0,0,0,0,0,1 2,12,2,9,1,5,3,4,3,41,1,1,2,1,1,1,0,1,0,0,1,0,1,0 4,48,2,102,5,4,3,3,3,37,2,1,1,2,1,0,0,1,0,0,1,0,0,1 2,30,0,42,1,3,2,1,3,28,3,2,1,1,1,0,0,1,0,0,1,0,0,1 2,18,4,64,1,5,3,1,4,41,3,1,1,2,1,0,0,1,0,0,1,0,0,1 3,12,2,13,1,3,4,4,1,23,3,1,1,1,1,0,0,1,0,1,0,0,0,1 1,12,2,9,5,3,4,2,3,23,3,1,1,1,1,1,0,1,0,0,1,0,0,1 4,21,2,22,1,5,3,2,1,50,3,2,1,1,1,0,0,1,0,0,1,0,0,1 2,6,3,10,1,1,3,1,2,35,2,2,1,2,1,0,0,1,0,0,1,0,0,0 3,6,4,10,1,3,2,4,2,50,3,1,1,1,1,0,0,1,0,0,1,0,1,0 4,24,4,63,1,1,3,2,4,27,1,2,1,2,1,0,0,0,1,0,1,0,0,0 2,30,1,35,4,3,3,2,3,34,2,1,2,2,1,0,0,1,0,0,1,0,0,1 4,48,1,36,1,3,2,1,1,27,2,1,1,1,1,0,0,1,0,0,1,0,0,1 1,12,4,48,1,5,3,4,2,43,3,2,1,2,1,1,0,0,1,1,0,0,0,1 3,30,4,30,1,5,3,4,2,47,3,1,1,1,1,0,0,1,0,0,1,0,0,1 4,24,4,41,2,3,3,3,2,27,3,2,1,2,1,0,0,1,0,0,1,0,1,0 4,36,2,57,2,4,3,2,3,31,3,2,1,2,1,0,0,1,0,0,1,0,0,1 4,60,2,104,1,5,3,4,2,42,3,1,1,2,1,1,0,1,0,0,1,0,0,0 4,6,4,21,3,3,4,2,3,24,3,1,1,1,1,1,0,1,0,0,1,0,0,1 4,21,3,26,3,2,3,2,1,41,1,1,2,1,1,0,0,1,0,0,1,0,1,0 4,30,4,45,1,4,2,4,3,26,3,1,1,2,1,0,0,1,0,1,0,0,0,0 4,24,4,52,1,5,3,4,3,33,3,1,1,2,1,0,0,1,0,0,1,0,0,1 2,72,2,56,2,3,4,2,3,24,3,1,1,1,1,0,0,1,0,0,1,0,0,1 1,24,2,24,1,5,3,4,1,64,1,1,1,1,1,0,0,1,0,1,0,0,1,0 4,18,2,15,1,2,2,1,1,26,3,1,1,1,1,0,0,1,0,0,1,0,0,1 4,6,2,15,1,2,2,2,4,56,3,1,1,1,1,0,0,1,0,0,1,0,0,1 4,12,2,23,5,3,3,4,4,37,3,1,1,2,1,0,0,1,0,0,0,0,0,1 4,15,3,15,1,3,4,3,1,33,1,2,1,1,1,0,0,1,0,0,1,0,0,1 4,24,4,51,1,2,4,3,4,47,3,3,1,2,1,0,0,1,0,0,0,0,0,1 2,36,3,99,2,4,3,3,2,31,3,2,2,2,1,0,0,1,0,0,1,0,1,0 4,60,2,65,5,3,3,4,4,34,3,1,2,2,1,1,0,1,0,0,0,0,0,1 3,10,4,13,5,4,3,2,2,27,3,2,1,2,1,0,0,1,0,0,1,0,0,1 2,36,3,29,2,5,3,3,4,30,3,1,1,1,1,1,0,1,0,0,0,0,0,1 4,9,2,28,2,5,3,4,3,35,3,1,1,2,1,0,0,0,1,0,1,0,0,1 1,12,2,37,4,3,3,3,2,31,3,1,2,1,1,1,0,1,0,0,1,0,0,1 1,15,4,10,1,3,1,3,2,25,3,2,1,1,1,0,0,1,0,0,1,0,0,1 2,15,2,26,2,3,2,2,1,25,3,1,1,1,1,0,0,1,0,0,1,0,1,0 2,24,2,29,2,2,3,1,3,29,3,1,1,1,1,0,0,1,0,0,1,0,0,1 1,6,4,47,5,2,3,3,1,44,3,2,2,1,1,1,0,1,0,0,1,0,1,0 4,24,2,23,1,4,3,2,3,28,3,1,1,2,1,0,0,1,0,0,1,0,0,1 4,6,2,12,3,3,3,4,2,50,3,1,1,1,1,0,1,1,0,1,0,0,0,1 2,12,2,11,1,4,3,3,1,29,3,2,1,1,2,0,0,0,0,0,1,0,0,1 4,12,4,9,1,1,2,2,2,38,3,1,1,1,1,1,0,1,0,0,1,1,0,0 4,18,4,18,1,3,3,2,3,24,3,2,1,1,1,0,0,1,0,0,1,0,0,1 3,15,2,19,1,5,3,4,3,40,3,1,1,2,1,0,0,1,0,1,0,0,0,0 4,12,2,11,3,3,2,4,3,29,3,1,1,1,1,0,0,1,0,1,0,0,1,0 1,48,4,63,1,5,3,4,4,46,3,2,1,2,1,0,1,1,0,0,0,0,0,1 3,24,2,14,2,5,2,2,4,47,3,1,1,2,1,0,0,1,0,0,0,0,0,1 2,30,3,25,2,5,3,2,2,41,2,2,1,1,1,0,0,1,0,0,1,0,0,1 2,27,2,25,1,2,2,1,2,32,3,1,2,2,1,0,0,1,0,0,1,0,0,1 4,15,2,53,3,5,2,4,4,35,3,1,1,1,1,1,0,1,0,0,0,0,0,1 2,48,2,66,2,4,3,2,2,24,3,1,1,1,1,1,0,1,0,0,1,0,0,1 2,12,0,30,1,2,2,3,2,25,3,2,1,1,1,0,0,1,0,1,0,0,0,1 2,9,2,12,1,5,2,4,1,25,3,1,1,1,1,0,0,1,0,0,1,0,0,1 2,9,2,21,1,3,3,2,1,37,3,1,2,1,1,0,0,1,0,0,1,0,1,0 4,18,4,6,3,5,3,3,2,32,1,2,1,2,1,0,0,1,0,0,1,0,0,0 1,6,1,12,1,5,2,4,4,35,3,1,1,1,1,0,0,1,0,0,0,0,0,1 4,21,2,25,5,5,3,4,1,46,3,1,1,2,1,0,1,1,0,0,1,0,0,0 1,9,4,11,1,3,3,4,1,25,3,2,1,1,1,0,0,1,0,0,1,0,1,0 2,60,2,140,1,4,3,2,4,27,3,1,1,2,1,1,0,1,0,0,1,0,0,0 4,30,4,76,5,5,3,4,3,63,3,2,1,1,1,0,1,1,0,0,1,0,0,1 4,30,4,31,5,5,3,2,3,40,3,2,2,2,1,0,0,1,0,0,1,0,0,1 4,18,2,15,1,3,3,2,4,32,3,1,1,2,1,0,0,1,0,0,0,0,0,0 3,24,4,31,5,3,3,2,3,31,3,2,1,2,1,0,0,1,0,0,1,0,0,1 2,20,0,61,2,5,4,4,3,31,1,2,1,2,1,0,1,1,0,0,1,0,0,1 3,9,0,13,1,2,3,2,3,34,3,2,1,2,1,0,0,1,0,0,1,0,0,0 2,6,1,4,4,2,2,2,2,24,1,1,2,1,1,0,0,1,0,1,0,0,0,1 1,12,2,12,1,3,2,2,1,24,3,1,1,1,1,1,0,1,0,0,1,0,1,0 2,9,2,8,3,3,2,3,1,66,3,1,1,1,1,0,0,1,0,0,1,0,1,0 4,27,2,26,1,3,2,3,1,21,3,1,1,1,1,1,0,1,0,1,0,0,0,1 4,6,4,2,4,3,2,2,1,41,1,2,1,1,1,1,0,1,0,0,1,0,1,0 4,15,4,13,3,3,4,2,2,47,3,2,1,1,1,0,0,1,0,0,1,0,1,0 1,18,2,19,1,3,2,4,3,25,1,2,1,1,1,0,0,1,0,1,0,0,0,1 2,48,1,64,1,5,2,3,4,59,3,1,1,1,1,0,0,1,0,1,0,0,0,1 3,24,4,13,4,3,1,4,1,36,3,2,1,2,1,0,0,1,0,0,1,0,0,1 2,24,3,64,1,2,3,2,3,33,3,1,1,1,1,0,0,1,0,0,1,0,0,1 1,24,2,20,1,3,3,4,1,21,3,1,2,1,1,0,0,1,0,1,0,0,1,0 2,8,2,8,1,4,2,2,1,44,3,1,1,1,1,0,0,0,0,0,1,0,1,0 4,24,2,26,4,3,2,4,3,28,3,1,1,2,1,0,1,1,0,1,0,0,0,1 4,4,4,34,1,4,2,1,1,37,3,1,2,1,1,1,0,1,0,0,1,0,0,1 2,36,1,40,5,2,2,2,4,29,1,1,1,1,1,0,0,1,0,0,1,1,0,0 2,24,2,116,1,3,2,4,3,23,3,2,1,1,1,0,1,1,0,1,0,0,0,0 1,18,2,44,2,3,3,4,3,35,3,1,2,2,1,1,0,1,0,0,1,0,1,0 4,6,4,68,1,4,3,3,4,45,3,2,2,2,1,1,0,1,0,0,1,0,0,0 2,30,0,43,2,3,2,4,3,26,3,2,1,1,1,0,0,1,0,1,0,0,1,0 1,24,1,23,2,4,3,3,3,32,1,1,1,1,1,1,0,1,0,0,1,0,0,1 2,10,1,10,1,3,3,4,1,23,2,1,1,1,1,0,0,1,0,0,1,0,1,0 4,21,2,32,5,5,3,3,2,41,3,1,1,2,1,0,0,1,0,0,1,0,0,1 1,24,1,25,3,3,3,4,1,22,2,1,1,2,1,0,0,1,0,0,1,0,0,1 1,39,4,142,5,4,3,4,2,30,3,2,1,2,1,0,0,1,0,0,1,0,0,0 1,13,4,18,1,2,3,1,2,28,1,2,1,1,1,0,0,1,0,0,1,0,1,0 1,15,2,25,1,1,2,4,3,23,3,1,1,1,1,1,0,1,0,1,0,0,0,1 1,12,2,13,1,2,2,1,1,37,3,1,1,1,1,1,0,1,0,0,1,0,1,0 4,21,2,52,5,3,3,3,3,26,3,1,1,1,1,0,1,1,0,0,1,0,0,1 4,15,2,30,1,4,3,2,3,33,3,1,1,1,1,0,1,1,0,0,1,0,0,1 1,6,2,4,1,5,2,1,2,49,1,1,1,2,1,0,0,1,0,0,1,0,0,1 1,18,2,10,1,2,2,2,3,23,3,1,1,1,1,1,0,1,0,0,1,0,1,0 2,12,2,8,2,4,2,4,1,23,3,1,1,1,1,0,0,1,0,1,0,0,1,0 4,30,4,58,1,4,2,2,3,25,3,2,1,1,1,0,0,1,0,0,1,0,0,1 4,12,3,16,4,5,3,4,4,55,3,2,2,1,1,0,0,1,0,0,0,0,0,1 1,24,2,13,5,4,2,4,4,32,3,1,1,1,1,1,0,1,0,1,0,0,0,1 3,6,4,13,1,3,3,1,1,74,3,3,2,1,2,1,0,1,0,0,1,1,0,0 3,15,4,13,5,3,3,4,4,39,3,2,1,2,1,0,0,1,0,0,0,0,0,1 4,24,2,14,1,3,3,2,1,31,3,1,1,2,1,1,0,0,0,0,1,0,0,1 1,12,4,7,1,5,3,3,2,35,3,2,1,1,1,1,0,1,0,0,1,0,0,1 4,15,4,50,5,5,2,4,3,59,3,1,1,2,1,1,0,1,0,0,1,0,0,1 1,18,4,21,1,3,2,4,1,24,3,2,1,1,1,0,0,1,0,1,0,0,0,1 1,12,2,22,1,3,3,3,2,24,3,1,1,1,1,0,0,1,0,0,1,0,1,0 4,21,4,127,5,5,3,4,4,30,3,1,1,2,1,1,0,1,0,0,0,0,0,0 4,24,4,25,2,4,4,3,2,27,3,2,1,2,1,1,0,1,0,0,1,0,0,1 2,12,2,12,1,5,4,3,1,40,1,2,1,1,1,0,0,0,0,0,1,0,1,0 1,30,2,31,1,2,1,4,2,31,3,1,1,1,1,0,0,1,0,0,1,0,1,0 4,10,2,29,5,2,2,4,1,31,3,1,1,1,1,0,1,1,0,1,0,0,0,1 2,12,4,36,1,5,3,4,3,28,3,3,1,2,1,0,0,1,0,1,0,0,0,1 4,12,4,17,1,5,3,4,1,63,3,2,1,2,1,0,0,1,0,0,1,0,1,0 1,24,2,28,5,5,2,4,1,26,3,1,1,1,1,0,1,1,0,1,0,0,0,1 1,36,4,81,1,3,2,2,4,25,3,2,1,2,1,0,0,1,0,0,1,0,0,0 4,21,4,33,1,5,3,4,3,36,3,1,1,2,1,0,1,1,0,0,1,0,0,0 4,24,4,22,2,5,3,4,2,52,1,2,1,1,1,0,0,1,0,0,1,0,0,1 3,12,4,15,3,1,3,4,4,66,1,3,1,1,1,1,0,1,0,0,0,1,0,0 1,24,2,14,5,3,2,4,1,25,3,1,1,1,1,1,0,1,0,1,0,0,0,1 4,36,4,35,1,4,3,4,3,37,3,2,1,2,1,1,0,1,0,0,1,0,0,1 1,18,2,35,1,4,2,1,1,25,3,1,1,1,1,0,0,0,0,0,1,0,0,1 4,36,4,57,4,5,3,2,3,38,3,2,1,2,1,0,1,1,0,0,1,0,0,0 2,18,2,39,1,1,2,4,3,67,3,1,1,2,1,0,0,1,0,0,1,0,0,1 2,39,4,49,1,4,3,2,1,25,3,2,1,1,1,0,0,0,0,0,1,0,0,1 4,24,4,19,4,5,3,4,1,60,3,1,1,2,1,1,0,1,0,0,1,0,0,1 2,12,0,14,1,3,3,2,1,31,3,1,1,2,1,0,0,1,0,0,1,0,1,0 2,12,2,8,2,2,2,2,2,23,1,1,1,1,1,1,0,1,0,0,1,0,1,0 2,20,2,65,5,1,1,4,1,60,3,1,1,2,1,0,1,1,0,0,1,0,0,0 2,18,2,19,4,3,3,2,2,35,3,1,1,2,1,0,0,1,0,0,1,0,1,0 4,22,2,27,3,5,3,4,3,40,3,1,1,1,1,0,0,1,0,0,1,0,0,1 4,48,4,28,5,5,3,3,3,38,3,2,2,2,1,0,1,1,0,0,1,0,0,1 2,48,3,62,1,5,3,4,4,50,3,1,1,1,1,0,0,1,0,0,0,0,0,1 1,40,4,60,1,3,3,3,4,27,1,1,1,2,1,0,0,1,0,0,1,0,0,1 2,21,2,12,1,5,2,4,2,39,3,1,2,1,1,0,0,1,0,0,1,0,0,1 4,24,2,63,5,5,3,4,3,41,3,1,2,2,1,0,1,1,0,0,1,0,0,0 4,6,4,12,5,3,4,2,2,27,3,2,1,1,1,0,0,1,0,0,1,0,0,1 3,24,2,29,1,5,1,4,4,51,3,1,1,1,1,0,0,1,0,0,0,0,0,1 4,24,2,31,3,5,3,3,4,32,3,1,1,2,1,0,0,1,0,1,0,0,0,1 4,9,2,23,2,2,2,4,2,22,3,1,1,1,1,0,0,1,0,1,0,0,0,1 1,18,2,75,5,5,3,4,2,51,3,1,2,2,1,0,1,1,0,0,0,0,0,1 4,12,4,13,1,2,2,4,2,22,3,2,1,1,1,0,0,1,0,1,0,0,1,0 4,24,3,7,5,5,4,4,3,54,3,2,1,2,1,1,0,1,0,0,1,0,0,1 2,9,2,15,5,2,3,2,1,35,3,1,1,1,1,1,0,1,0,0,1,1,0,0 4,24,4,16,1,5,3,4,4,54,3,2,2,1,1,0,0,1,0,0,0,0,0,1 2,18,4,18,1,5,2,4,1,48,1,2,1,2,1,0,0,0,0,1,0,0,1,0 1,20,4,43,1,5,2,4,2,24,3,2,1,1,1,0,0,1,0,0,1,0,0,1 4,12,4,10,5,5,3,4,3,35,3,2,1,1,1,0,0,1,0,0,1,0,0,1 2,12,2,75,5,1,2,2,1,24,3,1,1,1,1,1,0,1,0,1,0,1,0,0 1,36,2,93,1,4,3,1,3,24,3,1,1,2,1,1,0,1,0,0,1,0,0,1 2,6,2,6,1,2,4,3,1,26,3,1,1,1,2,0,0,1,0,0,1,0,1,0 4,12,4,9,5,5,3,4,1,65,3,4,1,1,1,0,0,1,0,0,1,0,0,1 2,42,1,93,1,1,3,2,4,55,1,1,1,2,1,0,1,1,0,0,0,0,0,0 2,15,0,18,1,2,2,1,1,26,3,2,1,1,1,1,0,1,0,1,0,1,0,0 2,8,2,9,1,2,4,2,1,26,3,1,1,2,1,0,0,1,0,0,1,0,0,1 2,6,2,5,1,4,4,3,1,28,1,1,1,1,1,0,0,0,0,0,1,0,1,0 1,36,4,96,1,4,3,4,3,24,3,2,1,2,1,0,1,1,0,0,1,0,0,1 1,48,2,31,1,3,3,4,3,54,3,1,1,1,1,0,0,1,0,0,1,0,0,1 1,48,2,39,1,4,3,4,4,46,3,1,2,1,1,1,0,1,0,0,0,0,0,1 2,36,3,74,1,3,2,2,2,54,3,1,1,1,1,1,0,1,0,1,0,0,0,1 4,6,2,13,3,3,1,4,1,62,3,1,1,1,1,0,0,1,0,0,1,0,0,1 4,6,4,16,1,4,2,2,3,24,3,2,1,2,1,0,0,1,0,1,0,0,0,1 1,36,2,159,1,1,1,3,3,43,3,1,1,1,1,0,0,0,1,0,1,0,0,0 1,18,2,13,1,3,4,3,1,26,1,1,1,1,1,0,0,1,0,0,1,0,0,1 4,12,2,11,1,3,4,2,1,27,3,2,1,2,1,1,0,1,0,0,1,0,0,1 3,12,2,30,1,3,4,1,3,24,3,1,1,1,1,0,0,1,0,0,1,0,0,1 1,36,2,27,1,5,3,2,2,41,1,1,2,1,1,0,0,1,0,0,1,0,0,1 1,8,4,7,1,5,3,4,1,47,3,2,1,1,1,1,0,1,0,0,1,0,1,0 4,18,4,38,1,2,1,2,3,35,3,2,1,2,1,0,0,1,0,0,1,0,0,0 1,21,4,16,1,5,4,3,3,30,3,2,1,2,1,1,0,1,0,0,1,0,0,1 1,18,4,40,1,5,2,4,1,33,1,3,1,2,1,1,0,1,0,1,0,0,0,1 4,18,0,42,1,3,3,2,3,36,2,2,2,1,1,0,0,1,0,0,1,0,0,1 1,36,2,83,5,5,3,4,4,47,3,1,1,1,1,0,1,1,0,0,0,0,0,1 2,48,3,67,5,3,3,4,4,38,3,1,2,2,1,0,0,1,0,0,0,0,0,1 4,24,3,24,3,3,3,2,3,44,3,2,2,2,1,0,0,1,0,0,1,0,0,1 1,18,2,12,1,2,2,3,3,23,3,1,1,2,1,1,0,1,0,1,0,0,0,1 1,45,0,118,1,5,3,4,3,29,3,2,1,1,1,0,0,1,0,1,0,0,0,1 2,24,2,51,5,5,2,4,3,42,3,1,1,2,1,0,0,1,0,0,1,0,0,1 3,15,2,23,1,2,2,3,1,25,3,1,1,1,1,0,0,1,0,0,1,0,1,0 1,12,0,11,1,3,3,4,3,48,1,2,1,1,1,1,0,1,0,0,1,0,0,1 4,12,2,9,5,3,2,2,3,21,3,1,1,1,1,0,0,1,0,0,1,0,0,1 4,4,2,6,1,2,2,3,1,23,3,1,2,1,1,0,0,1,0,1,0,0,1,0 1,24,4,30,1,5,3,4,2,63,3,2,1,2,1,0,1,1,0,0,1,0,0,1 4,24,4,26,1,5,4,3,1,46,3,2,1,1,1,0,0,0,1,0,1,0,0,1 1,36,2,52,1,4,3,2,2,29,3,1,1,1,1,0,0,1,0,0,1,0,0,1 4,21,3,30,1,3,3,2,1,28,2,2,1,1,1,0,1,1,0,0,1,0,1,0 4,18,2,19,1,2,2,4,1,23,3,1,1,1,1,0,0,1,0,0,1,0,0,1 4,24,1,16,1,4,3,4,3,50,1,1,1,2,1,0,0,1,0,0,1,0,0,1 4,18,2,34,1,5,3,4,2,47,1,3,2,2,1,0,0,1,0,0,1,0,0,1 2,21,2,40,5,4,3,3,3,35,3,1,1,2,1,0,0,1,0,0,1,0,0,1 4,18,2,68,5,3,3,4,3,68,3,2,1,1,1,1,0,1,0,1,0,0,0,1 4,24,2,12,1,2,4,2,1,28,3,1,1,1,1,1,0,1,0,0,1,0,0,1 1,9,2,14,1,4,3,4,1,59,3,1,1,1,1,0,0,1,0,0,1,0,0,1 1,12,2,7,1,5,3,4,1,57,2,1,1,1,1,0,0,1,0,0,1,0,1,0 1,20,4,22,1,3,4,2,2,33,1,2,1,1,2,1,0,0,0,1,0,0,0,1 4,24,4,40,5,4,3,4,2,43,3,2,1,2,1,0,1,1,0,0,1,0,0,1 4,15,4,15,1,3,3,4,4,35,3,2,1,2,1,0,0,1,0,0,0,0,0,1 1,18,1,14,1,4,3,4,4,32,3,2,2,1,1,1,0,1,0,0,0,0,1,0 4,36,3,109,1,5,3,2,3,45,3,2,2,2,1,1,0,1,0,0,1,0,0,1 4,24,2,15,2,2,4,3,1,33,3,1,1,2,1,1,0,1,0,0,1,0,0,1 4,10,2,9,5,4,2,3,2,40,3,1,1,2,1,0,0,1,0,0,1,0,0,1 4,15,4,33,1,3,3,2,4,28,3,1,1,2,1,0,0,1,0,0,0,0,0,1 1,15,2,40,1,3,2,2,2,29,3,1,1,2,1,1,0,1,0,0,1,0,0,1 4,9,2,36,2,3,3,2,1,26,3,1,2,1,2,1,0,0,0,1,0,0,0,1 4,24,4,58,4,3,3,2,1,27,3,2,1,1,1,0,1,1,0,0,1,0,0,1 4,18,3,22,1,3,4,2,3,28,3,1,1,2,1,0,0,1,0,0,1,0,0,1 1,24,2,24,1,2,2,4,1,35,3,1,1,2,1,0,0,1,0,0,1,0,0,1 4,27,4,45,4,2,3,2,1,32,2,2,2,2,1,0,0,1,0,0,1,0,1,0 4,10,2,22,1,3,3,2,1,25,1,1,1,1,1,0,0,1,0,1,0,0,1,0 4,15,2,22,3,3,2,4,3,20,3,1,1,1,1,0,0,1,0,1,0,0,0,1 1,18,2,24,1,2,2,1,3,27,2,1,1,1,1,0,0,1,0,0,1,0,0,1 4,12,4,33,1,5,3,4,2,42,2,1,1,1,1,0,0,1,0,0,1,0,0,1 4,36,2,74,5,5,3,2,2,37,3,2,1,1,1,0,0,1,0,0,1,0,0,1 1,12,2,7,1,5,2,4,2,24,3,1,1,1,1,0,0,1,0,1,0,0,0,1 4,36,3,77,3,4,2,4,3,40,3,2,1,2,1,0,0,1,0,0,1,0,0,1 3,6,4,13,1,5,3,4,1,46,3,2,2,1,2,1,0,1,0,0,1,0,0,1 1,24,4,14,2,4,3,1,1,26,3,2,1,2,1,0,0,1,0,0,1,0,0,1 4,15,2,9,5,2,2,1,1,24,3,1,1,1,1,0,0,1,0,0,1,0,0,1 1,12,2,36,1,3,3,2,2,29,3,1,2,1,1,0,0,0,1,0,1,0,1,0 2,11,4,13,4,3,2,4,3,40,3,2,1,1,1,1,0,1,0,0,1,0,0,1 1,18,1,19,1,2,3,4,4,36,1,1,1,2,1,0,0,0,1,0,0,0,0,0 4,36,2,36,1,5,3,2,3,28,3,1,1,1,1,0,0,1,0,0,1,0,0,1 1,9,2,14,1,2,3,2,4,27,3,1,1,2,1,1,0,1,0,0,0,0,0,0 4,30,4,67,5,4,3,3,2,36,3,2,1,1,1,0,0,1,0,0,1,0,0,1 4,24,2,78,1,4,3,3,3,38,3,1,1,2,1,0,1,1,0,0,1,0,0,0 4,24,2,93,5,3,1,4,4,48,3,1,1,2,1,0,1,1,0,0,0,0,0,1 2,30,4,22,5,5,3,4,1,36,3,2,1,1,1,1,0,1,0,0,1,0,0,1 4,18,4,11,1,1,2,4,3,65,3,2,1,1,1,0,0,1,0,0,1,1,0,0 2,24,2,41,1,4,1,3,3,43,3,1,1,2,1,0,0,1,0,0,1,0,0,1 1,12,2,8,1,2,2,4,2,53,3,1,1,1,1,0,0,1,0,0,1,0,0,1 2,24,4,28,5,4,3,3,4,34,3,2,2,2,1,0,0,1,0,0,1,0,0,1 2,48,2,157,1,3,3,2,3,23,3,1,1,2,1,0,0,1,0,0,1,0,0,1 4,36,4,66,1,5,3,4,3,34,3,2,1,2,1,1,0,1,0,0,1,0,0,0 4,28,1,78,5,2,3,4,1,40,1,2,2,2,1,0,1,0,0,1,0,0,0,1 1,27,4,24,1,5,3,4,3,43,2,4,2,2,1,0,0,1,0,0,1,0,0,0 4,15,4,18,1,5,3,4,3,46,3,2,1,2,1,0,0,1,0,0,1,0,0,1 1,12,4,22,1,3,3,4,2,38,1,2,1,1,2,1,0,1,0,0,1,0,1,0 2,36,4,58,1,3,3,4,3,34,3,2,1,2,1,0,1,1,0,0,1,0,0,1 4,18,4,12,5,3,3,3,2,29,3,2,1,2,1,0,0,1,0,0,1,0,0,1 4,36,3,89,5,4,3,2,3,31,2,1,2,2,1,0,1,1,0,0,1,0,0,0 1,21,2,26,1,2,2,4,2,28,3,1,1,2,1,0,0,1,0,1,0,0,0,0 4,12,4,16,4,4,2,2,2,35,3,1,1,1,2,0,0,1,0,0,1,0,0,1 4,15,2,22,5,4,2,4,1,33,1,1,1,1,1,0,0,1,0,1,0,0,1,0 1,18,2,42,1,3,3,3,3,42,3,1,1,1,1,0,0,0,1,0,1,0,0,1 1,16,4,26,1,5,3,4,2,43,1,1,1,2,1,1,0,0,0,1,0,0,0,1 4,20,4,35,5,2,1,4,1,44,3,2,1,2,1,1,0,1,0,0,1,0,0,1 4,36,4,105,5,5,3,4,4,42,3,2,1,1,1,0,1,1,0,0,0,0,0,1 4,15,2,14,5,3,4,2,1,40,3,1,1,2,1,0,0,1,0,1,0,0,0,1 4,24,2,13,1,5,3,1,1,36,3,1,1,2,1,0,0,1,0,0,1,0,0,0 1,12,2,11,1,3,3,2,1,20,3,1,2,2,1,0,0,1,0,1,0,0,0,0 1,21,2,38,5,4,3,2,1,24,3,1,1,1,2,1,0,0,1,0,1,0,1,0 2,36,2,37,5,3,4,2,3,27,3,1,1,1,1,0,0,1,0,0,1,0,0,1 4,15,3,36,1,2,2,2,2,46,3,2,1,1,1,0,1,1,0,0,1,0,1,0 2,9,2,32,5,3,2,2,1,33,3,1,1,1,1,1,0,1,0,0,1,0,1,0 4,36,3,45,1,3,2,4,1,34,3,2,1,1,1,0,0,1,0,0,1,0,0,1 2,24,4,47,1,2,2,4,3,25,1,1,1,1,1,0,0,1,0,0,1,0,1,0 2,30,2,30,5,5,2,4,3,25,3,1,1,1,1,0,0,1,0,0,1,0,0,1 4,11,2,21,4,5,1,2,1,28,3,1,1,2,1,0,0,1,0,0,1,0,0,1 1,24,1,32,1,3,3,2,2,31,3,1,1,2,1,0,0,1,0,1,0,0,0,1 2,48,0,184,1,3,2,2,2,32,1,1,1,2,2,0,0,1,0,0,1,0,0,0 4,10,2,28,2,3,3,2,1,32,3,1,2,1,1,0,1,0,1,0,1,0,0,1 1,6,2,149,1,5,3,4,4,68,1,1,1,2,1,1,0,1,0,0,1,0,0,0 1,24,2,24,2,1,1,1,2,33,3,1,1,1,1,0,0,1,0,0,1,0,0,1 1,24,2,33,1,5,3,2,2,39,3,1,1,2,1,0,0,1,0,1,0,0,0,0 4,18,4,18,1,3,2,2,4,28,3,2,1,1,1,0,0,1,0,0,1,0,0,1 4,48,3,127,3,4,3,1,3,37,3,1,1,2,1,0,0,1,0,0,1,0,0,0 1,9,2,14,1,2,2,4,2,22,3,1,1,1,1,0,0,1,0,1,0,0,0,1 2,12,2,20,1,4,3,4,2,30,3,1,2,2,1,1,0,1,0,1,0,0,0,1 1,24,1,69,1,2,1,1,2,55,1,1,1,2,1,0,0,1,0,0,1,0,0,1 1,12,1,7,1,2,3,2,3,46,1,2,1,2,1,1,0,1,0,0,1,0,0,1 1,18,4,10,1,2,2,4,2,21,3,1,1,1,1,0,0,1,0,1,0,0,0,1 1,48,2,103,1,4,3,4,4,39,2,3,2,2,1,0,1,1,0,0,0,0,0,1 4,30,2,19,5,5,3,4,3,58,3,1,1,2,1,0,0,1,0,0,1,0,0,1 1,12,3,13,1,3,3,2,1,43,3,2,2,1,1,1,0,1,0,0,1,0,1,0 1,24,2,17,1,2,3,1,2,24,3,1,1,1,2,0,0,0,1,0,1,0,1,0 2,9,2,17,1,2,2,2,3,22,3,1,1,2,1,0,0,1,0,0,1,0,0,1 4,9,4,12,1,3,3,1,1,30,3,2,1,1,1,1,0,1,0,0,1,0,0,1 4,12,4,5,3,5,3,4,2,42,3,2,2,2,1,0,0,1,0,0,1,0,0,1 1,12,2,15,1,3,2,1,3,23,1,1,1,1,1,0,0,1,0,0,1,0,0,1 2,30,3,19,2,2,3,3,4,30,2,2,1,1,1,0,0,1,0,0,1,0,0,0 3,9,2,7,1,3,2,2,1,28,3,1,1,1,1,0,0,1,0,0,1,0,1,0 2,6,2,21,1,2,4,3,3,30,3,1,1,2,1,0,0,1,0,1,0,0,0,0 2,60,2,63,1,3,3,4,4,42,3,1,1,1,1,0,0,1,0,0,0,0,0,1 4,24,4,68,5,3,3,4,2,46,3,2,2,2,1,0,1,1,0,0,1,0,0,0 4,12,2,35,5,2,3,3,2,45,3,1,2,2,1,1,0,1,0,0,1,0,0,0 4,10,2,15,1,3,3,2,1,31,3,1,2,1,2,1,0,1,0,0,1,0,1,0 4,24,2,9,5,4,3,2,3,31,2,1,1,2,1,0,0,1,0,0,1,0,0,1 4,4,4,15,1,4,3,1,1,42,3,3,2,1,1,1,0,1,0,0,1,0,1,0 1,15,2,18,1,2,2,1,2,46,3,1,1,1,1,0,0,0,0,1,0,0,0,1 2,48,0,84,3,2,2,1,3,30,3,2,1,1,1,1,0,1,0,0,1,0,0,1 1,24,1,33,3,2,3,4,4,30,3,1,2,2,1,0,0,1,0,0,0,0,0,1 4,12,2,29,5,1,3,4,4,38,3,1,1,2,1,1,0,1,0,0,1,0,0,0 4,18,2,15,1,2,4,1,2,43,3,1,2,1,1,0,0,0,1,0,1,0,1,0 4,24,2,36,2,5,3,4,3,31,3,2,1,1,1,0,0,1,0,0,1,0,0,1 2,18,4,36,1,1,4,3,3,40,3,3,2,2,1,0,0,1,0,0,1,1,0,0 1,36,3,21,1,4,3,1,3,24,3,2,1,2,1,0,0,1,0,0,1,0,0,1 2,24,2,41,3,2,2,4,3,28,3,1,1,1,1,0,1,1,0,1,0,0,0,1 4,36,2,110,1,1,2,2,3,26,3,2,1,2,1,0,0,1,0,0,1,0,0,0 1,12,2,19,1,3,2,4,2,29,3,1,1,2,1,1,0,0,0,0,1,0,0,1 1,24,4,12,4,5,2,4,2,57,3,2,1,2,1,0,0,1,0,1,0,0,0,0 3,30,4,37,5,5,3,4,2,49,2,2,1,1,1,0,0,1,0,0,1,0,1,0 2,9,4,12,1,5,3,4,1,37,3,3,1,1,1,0,0,1,0,0,1,0,1,0 1,28,2,40,1,3,3,2,3,45,3,1,1,1,1,1,0,1,0,0,1,0,1,0 2,24,2,31,2,5,3,4,4,30,3,1,1,1,1,0,0,1,0,0,0,0,0,1 4,6,4,17,1,5,4,2,1,30,3,2,1,1,1,0,0,1,0,1,0,0,0,1 2,21,3,24,1,3,1,4,2,47,3,2,1,1,1,1,0,1,0,0,1,0,0,1 4,15,2,36,5,3,3,2,4,29,3,1,1,1,1,1,0,1,0,0,1,0,0,1 4,24,2,24,3,5,3,2,3,35,1,2,1,2,1,0,0,1,0,0,1,0,0,1 2,6,2,5,1,2,4,1,2,22,3,1,1,1,1,0,0,1,0,0,1,0,1,0 2,30,2,17,5,3,2,1,3,26,3,1,1,1,1,0,0,1,0,0,1,0,0,1 2,27,4,25,3,3,3,2,2,23,3,2,1,1,1,0,0,1,0,0,1,0,1,0 4,15,2,36,1,5,2,2,3,54,1,1,1,2,1,0,0,1,0,1,0,0,0,0 4,42,2,72,5,4,4,4,2,29,3,1,1,2,1,0,0,1,0,1,0,0,0,1 1,11,4,39,1,3,3,2,1,40,3,2,2,1,1,1,0,1,0,0,1,0,1,0 2,15,2,15,2,3,3,2,1,22,3,1,1,1,1,0,0,0,0,0,1,0,0,1 4,24,2,74,1,3,3,4,2,43,3,1,2,1,1,1,0,1,0,0,1,0,1,0 1,24,1,12,1,1,2,4,4,29,3,2,1,1,1,1,0,0,1,1,0,1,0,0 1,60,2,73,1,5,3,4,4,36,3,1,1,1,1,0,0,0,1,1,0,0,0,1 4,30,4,28,1,3,2,2,3,33,3,1,1,2,1,0,0,1,0,0,1,0,0,1 3,24,2,13,3,3,2,3,3,57,3,1,1,1,1,0,0,1,0,0,1,0,1,0 2,6,2,8,1,3,2,3,1,64,3,1,1,1,1,0,0,0,0,0,1,0,0,1 2,18,3,24,5,5,3,2,2,42,3,2,1,1,1,0,0,1,0,0,1,0,0,1 4,24,3,25,1,5,3,4,3,47,3,2,2,1,1,1,0,1,0,0,1,0,1,0 2,15,1,13,2,3,4,2,2,25,3,1,1,1,1,1,0,1,0,1,0,0,0,1 2,30,4,84,1,4,3,2,2,49,3,1,1,1,1,0,0,1,0,0,1,0,0,1 4,48,2,48,1,1,3,2,3,33,1,1,1,2,1,0,0,1,0,1,0,0,0,0 3,21,2,29,2,3,2,1,3,28,1,1,1,2,1,1,0,1,0,0,1,0,0,0 1,36,2,82,1,3,3,2,2,26,3,1,2,1,1,0,1,1,0,0,1,0,0,1 4,24,4,20,1,4,3,2,2,30,3,2,1,1,1,0,0,1,0,0,1,0,1,0 1,15,4,14,1,3,2,3,2,25,3,2,1,1,1,0,0,1,0,1,0,0,0,1 3,42,0,63,1,2,1,1,2,33,3,2,1,1,1,0,0,1,0,0,1,0,0,1 4,13,2,14,2,1,2,4,1,64,3,1,1,1,1,0,0,1,0,0,1,0,0,1 1,24,2,66,1,1,3,2,4,29,3,1,1,2,1,0,1,1,0,0,0,0,0,0 2,24,4,17,1,5,3,2,2,48,3,2,1,1,1,0,0,1,0,0,1,0,1,0 4,12,4,36,5,2,3,1,2,37,3,2,2,1,1,0,0,1,0,0,1,0,1,0 4,15,1,16,2,5,3,4,3,34,1,1,2,1,1,0,0,1,0,0,1,0,1,0 1,18,2,19,5,4,4,4,3,23,3,2,1,1,1,0,0,1,0,1,0,0,1,0 1,36,2,40,1,1,3,3,2,30,3,1,1,2,1,0,0,1,0,0,1,0,0,0 4,12,2,24,5,5,3,3,3,50,3,1,1,2,1,1,0,1,0,0,1,0,0,1 4,12,2,17,1,4,2,4,1,31,3,1,1,1,1,0,0,1,0,0,1,0,1,0 1,30,2,39,1,3,1,4,2,40,3,1,1,2,1,0,1,1,0,0,1,0,0,0 4,12,2,8,1,5,3,4,3,38,3,1,1,1,1,0,0,1,0,0,1,0,0,1 1,45,2,18,1,3,3,4,4,23,3,1,1,2,1,0,0,1,0,0,0,0,0,1 2,45,4,46,2,1,3,4,3,27,3,1,1,1,1,0,1,1,0,0,1,0,0,1 mlpack-2.2.5/src/mlpack/tests/data/iris.csv000066400000000000000000000045371315013601400205660ustar00rootroot000000000000005.1,3.5,1.4,0.2 4.9,3.0,1.4,0.2 4.7,3.2,1.3,0.2 4.6,3.1,1.5,0.2 5.0,3.6,1.4,0.2 5.4,3.9,1.7,0.4 4.6,3.4,1.4,0.3 5.0,3.4,1.5,0.2 4.4,2.9,1.4,0.2 4.9,3.1,1.5,0.1 5.4,3.7,1.5,0.2 4.8,3.4,1.6,0.2 4.8,3.0,1.4,0.1 4.3,3.0,1.1,0.1 5.8,4.0,1.2,0.2 5.7,4.4,1.5,0.4 5.4,3.9,1.3,0.4 5.1,3.5,1.4,0.3 5.7,3.8,1.7,0.3 5.1,3.8,1.5,0.3 5.4,3.4,1.7,0.2 5.1,3.7,1.5,0.4 4.6,3.6,1.0,0.2 5.1,3.3,1.7,0.5 4.8,3.4,1.9,0.2 5.0,3.0,1.6,0.2 5.0,3.4,1.6,0.4 5.2,3.5,1.5,0.2 5.2,3.4,1.4,0.2 4.7,3.2,1.6,0.2 4.8,3.1,1.6,0.2 5.4,3.4,1.5,0.4 5.2,4.1,1.5,0.1 5.5,4.2,1.4,0.2 4.9,3.1,1.5,0.1 5.0,3.2,1.2,0.2 5.5,3.5,1.3,0.2 4.9,3.1,1.5,0.1 4.4,3.0,1.3,0.2 5.1,3.4,1.5,0.2 5.0,3.5,1.3,0.3 4.5,2.3,1.3,0.3 4.4,3.2,1.3,0.2 5.0,3.5,1.6,0.6 5.1,3.8,1.9,0.4 4.8,3.0,1.4,0.3 5.1,3.8,1.6,0.2 4.6,3.2,1.4,0.2 5.3,3.7,1.5,0.2 5.0,3.3,1.4,0.2 7.0,3.2,4.7,1.4 6.4,3.2,4.5,1.5 6.9,3.1,4.9,1.5 5.5,2.3,4.0,1.3 6.5,2.8,4.6,1.5 5.7,2.8,4.5,1.3 6.3,3.3,4.7,1.6 4.9,2.4,3.3,1.0 6.6,2.9,4.6,1.3 5.2,2.7,3.9,1.4 5.0,2.0,3.5,1.0 5.9,3.0,4.2,1.5 6.0,2.2,4.0,1.0 6.1,2.9,4.7,1.4 5.6,2.9,3.6,1.3 6.7,3.1,4.4,1.4 5.6,3.0,4.5,1.5 5.8,2.7,4.1,1.0 6.2,2.2,4.5,1.5 5.6,2.5,3.9,1.1 5.9,3.2,4.8,1.8 6.1,2.8,4.0,1.3 6.3,2.5,4.9,1.5 6.1,2.8,4.7,1.2 6.4,2.9,4.3,1.3 6.6,3.0,4.4,1.4 6.8,2.8,4.8,1.4 6.7,3.0,5.0,1.7 6.0,2.9,4.5,1.5 5.7,2.6,3.5,1.0 5.5,2.4,3.8,1.1 5.5,2.4,3.7,1.0 5.8,2.7,3.9,1.2 6.0,2.7,5.1,1.6 5.4,3.0,4.5,1.5 6.0,3.4,4.5,1.6 6.7,3.1,4.7,1.5 6.3,2.3,4.4,1.3 5.6,3.0,4.1,1.3 5.5,2.5,4.0,1.3 5.5,2.6,4.4,1.2 6.1,3.0,4.6,1.4 5.8,2.6,4.0,1.2 5.0,2.3,3.3,1.0 5.6,2.7,4.2,1.3 5.7,3.0,4.2,1.2 5.7,2.9,4.2,1.3 6.2,2.9,4.3,1.3 5.1,2.5,3.0,1.1 5.7,2.8,4.1,1.3 6.3,3.3,6.0,2.5 5.8,2.7,5.1,1.9 7.1,3.0,5.9,2.1 6.3,2.9,5.6,1.8 6.5,3.0,5.8,2.2 7.6,3.0,6.6,2.1 4.9,2.5,4.5,1.7 7.3,2.9,6.3,1.8 6.7,2.5,5.8,1.8 7.2,3.6,6.1,2.5 6.5,3.2,5.1,2.0 6.4,2.7,5.3,1.9 6.8,3.0,5.5,2.1 5.7,2.5,5.0,2.0 5.8,2.8,5.1,2.4 6.4,3.2,5.3,2.3 6.5,3.0,5.5,1.8 7.7,3.8,6.7,2.2 7.7,2.6,6.9,2.3 6.0,2.2,5.0,1.5 6.9,3.2,5.7,2.3 5.6,2.8,4.9,2.0 7.7,2.8,6.7,2.0 6.3,2.7,4.9,1.8 6.7,3.3,5.7,2.1 7.2,3.2,6.0,1.8 6.2,2.8,4.8,1.8 6.1,3.0,4.9,1.8 6.4,2.8,5.6,2.1 7.2,3.0,5.8,1.6 7.4,2.8,6.1,1.9 7.9,3.8,6.4,2.0 6.4,2.8,5.6,2.2 6.3,2.8,5.1,1.5 6.1,2.6,5.6,1.4 7.7,3.0,6.1,2.3 6.3,3.4,5.6,2.4 6.4,3.1,5.5,1.8 6.0,3.0,4.8,1.8 6.9,3.1,5.4,2.1 6.7,3.1,5.6,2.4 6.9,3.1,5.1,2.3 5.8,2.7,5.1,1.9 6.8,3.2,5.9,2.3 6.7,3.3,5.7,2.5 6.7,3.0,5.2,2.3 6.3,2.5,5.0,1.9 6.5,3.0,5.2,2.0 6.2,3.4,5.4,2.3 5.9,3.0,5.1,1.8mlpack-2.2.5/src/mlpack/tests/data/iris_labels.txt000066400000000000000000000004541315013601400221260ustar00rootroot000000000000000 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 mlpack-2.2.5/src/mlpack/tests/data/iris_test.csv000066400000000000000000000016661315013601400216250ustar00rootroot000000000000004.7,3.2,1.6,0.2 4.8,3.1,1.6,0.2 5.4,3.4,1.5,0.4 5.2,4.1,1.5,0.1 5.5,4.2,1.4,0.2 4.9,3.1,1.5,0.1 5,3.2,1.2,0.2 5.5,3.5,1.3,0.2 4.9,3.1,1.5,0.1 4.4,3,1.3,0.2 5.1,3.4,1.5,0.2 5,3.5,1.3,0.3 4.5,2.3,1.3,0.3 4.4,3.2,1.3,0.2 5,3.5,1.6,0.6 5.1,3.8,1.9,0.4 4.8,3,1.4,0.3 5.1,3.8,1.6,0.2 4.6,3.2,1.4,0.2 5.3,3.7,1.5,0.2 5,3.3,1.4,0.2 5.7,2.6,3.5,1 5.5,2.4,3.8,1.1 5.5,2.4,3.7,1 5.8,2.7,3.9,1.2 6,2.7,5.1,1.6 5.4,3,4.5,1.5 6,3.4,4.5,1.6 6.7,3.1,4.7,1.5 6.3,2.3,4.4,1.3 5.6,3,4.1,1.3 5.5,2.5,4,1.3 5.5,2.6,4.4,1.2 6.1,3,4.6,1.4 5.8,2.6,4,1.2 5,2.3,3.3,1 5.6,2.7,4.2,1.3 5.7,3,4.2,1.2 5.7,2.9,4.2,1.3 6.2,2.9,4.3,1.3 5.1,2.5,3,1.1 5.7,2.8,4.1,1.3 7.2,3,5.8,1.6 7.4,2.8,6.1,1.9 7.9,3.8,6.4,2 6.4,2.8,5.6,2.2 6.3,2.8,5.1,1.5 6.1,2.6,5.6,1.4 7.7,3,6.1,2.3 6.3,3.4,5.6,2.4 6.4,3.1,5.5,1.8 6,3,4.8,1.8 6.9,3.1,5.4,2.1 6.7,3.1,5.6,2.4 6.9,3.1,5.1,2.3 5.8,2.7,5.1,1.9 6.8,3.2,5.9,2.3 6.7,3.3,5.7,2.5 6.7,3,5.2,2.3 6.3,2.5,5,1.9 6.5,3,5.2,2 6.2,3.4,5.4,2.3 5.9,3,5.1,1.8 mlpack-2.2.5/src/mlpack/tests/data/iris_test_labels.csv000066400000000000000000000001761315013601400231420ustar00rootroot000000000000000 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 mlpack-2.2.5/src/mlpack/tests/data/iris_train.csv000066400000000000000000000024341315013601400217550ustar00rootroot000000000000005.1,3.5,1.4,0.2 4.9,3,1.4,0.2 4.7,3.2,1.3,0.2 4.6,3.1,1.5,0.2 5,3.6,1.4,0.2 5.4,3.9,1.7,0.4 4.6,3.4,1.4,0.3 5,3.4,1.5,0.2 4.4,2.9,1.4,0.2 4.9,3.1,1.5,0.1 5.4,3.7,1.5,0.2 4.8,3.4,1.6,0.2 4.8,3,1.4,0.1 4.3,3,1.1,0.1 5.8,4,1.2,0.2 5.7,4.4,1.5,0.4 5.4,3.9,1.3,0.4 5.1,3.5,1.4,0.3 5.7,3.8,1.7,0.3 5.1,3.8,1.5,0.3 5.4,3.4,1.7,0.2 5.1,3.7,1.5,0.4 4.6,3.6,1,0.2 5.1,3.3,1.7,0.5 4.8,3.4,1.9,0.2 5,3,1.6,0.2 5,3.4,1.6,0.4 5.2,3.5,1.5,0.2 5.2,3.4,1.4,0.2 7,3.2,4.7,1.4 6.4,3.2,4.5,1.5 6.9,3.1,4.9,1.5 5.5,2.3,4,1.3 6.5,2.8,4.6,1.5 5.7,2.8,4.5,1.3 6.3,3.3,4.7,1.6 4.9,2.4,3.3,1 6.6,2.9,4.6,1.3 5.2,2.7,3.9,1.4 5,2,3.5,1 5.9,3,4.2,1.5 6,2.2,4,1 6.1,2.9,4.7,1.4 5.6,2.9,3.6,1.3 6.7,3.1,4.4,1.4 5.6,3,4.5,1.5 5.8,2.7,4.1,1 6.2,2.2,4.5,1.5 5.6,2.5,3.9,1.1 5.9,3.2,4.8,1.8 6.1,2.8,4,1.3 6.3,2.5,4.9,1.5 6.1,2.8,4.7,1.2 6.4,2.9,4.3,1.3 6.6,3,4.4,1.4 6.8,2.8,4.8,1.4 6.7,3,5,1.7 6,2.9,4.5,1.5 6.3,3.3,6,2.5 5.8,2.7,5.1,1.9 7.1,3,5.9,2.1 6.3,2.9,5.6,1.8 6.5,3,5.8,2.2 7.6,3,6.6,2.1 4.9,2.5,4.5,1.7 7.3,2.9,6.3,1.8 6.7,2.5,5.8,1.8 7.2,3.6,6.1,2.5 6.5,3.2,5.1,2 6.4,2.7,5.3,1.9 6.8,3,5.5,2.1 5.7,2.5,5,2 5.8,2.8,5.1,2.4 6.4,3.2,5.3,2.3 6.5,3,5.5,1.8 7.7,3.8,6.7,2.2 7.7,2.6,6.9,2.3 6,2.2,5,1.5 6.9,3.2,5.7,2.3 5.6,2.8,4.9,2 7.7,2.8,6.7,2 6.3,2.7,4.9,1.8 6.7,3.3,5.7,2.1 7.2,3.2,6,1.8 6.2,2.8,4.8,1.8 6.1,3,4.9,1.8 6.4,2.8,5.6,2.1 mlpack-2.2.5/src/mlpack/tests/data/iris_train_labels.csv000066400000000000000000000002561315013601400232770ustar00rootroot000000000000000 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 mlpack-2.2.5/src/mlpack/tests/data/johnson8-4-4.csv000066400000000000000000000072601315013601400216640ustar00rootroot000000000000001, 0 2, 0 2, 1 3, 0 3, 1 3, 2 4, 0 4, 1 4, 2 4, 3 5, 0 5, 1 6, 0 6, 2 6, 5 7, 0 7, 3 7, 5 7, 6 8, 0 8, 4 8, 5 8, 6 8, 7 9, 1 9, 2 9, 5 9, 6 10, 1 10, 3 10, 5 10, 7 10, 9 11, 1 11, 4 11, 5 11, 8 11, 9 11, 10 12, 2 12, 3 12, 6 12, 7 12, 9 12, 10 13, 2 13, 4 13, 6 13, 8 13, 9 13, 11 13, 12 14, 3 14, 4 14, 7 14, 8 14, 10 14, 11 14, 12 14, 13 15, 0 15, 1 15, 5 16, 0 16, 2 16, 6 16, 15 17, 0 17, 3 17, 7 17, 15 17, 16 18, 0 18, 4 18, 8 18, 15 18, 16 18, 17 19, 1 19, 2 19, 9 19, 15 19, 16 20, 1 20, 3 20, 10 20, 15 20, 17 20, 19 21, 1 21, 4 21, 11 21, 15 21, 18 21, 19 21, 20 22, 2 22, 3 22, 12 22, 16 22, 17 22, 19 22, 20 23, 2 23, 4 23, 13 23, 16 23, 18 23, 19 23, 21 23, 22 24, 3 24, 4 24, 14 24, 17 24, 18 24, 20 24, 21 24, 22 24, 23 25, 5 25, 6 25, 9 25, 15 25, 16 25, 19 26, 5 26, 7 26, 10 26, 15 26, 17 26, 20 26, 25 27, 5 27, 8 27, 11 27, 15 27, 18 27, 21 27, 25 27, 26 28, 6 28, 7 28, 12 28, 16 28, 17 28, 22 28, 25 28, 26 29, 6 29, 8 29, 13 29, 16 29, 18 29, 23 29, 25 29, 27 29, 28 30, 7 30, 8 30, 14 30, 17 30, 18 30, 24 30, 26 30, 27 30, 28 30, 29 31, 9 31, 10 31, 12 31, 19 31, 20 31, 22 31, 25 31, 26 31, 28 32, 9 32, 11 32, 13 32, 19 32, 21 32, 23 32, 25 32, 27 32, 29 32, 31 33, 10 33, 11 33, 14 33, 20 33, 21 33, 24 33, 26 33, 27 33, 30 33, 31 33, 32 34, 12 34, 13 34, 14 34, 22 34, 23 34, 24 34, 28 34, 29 34, 30 34, 31 34, 32 34, 33 35, 0 35, 1 35, 5 35, 15 36, 0 36, 2 36, 6 36, 16 36, 35 37, 0 37, 3 37, 7 37, 17 37, 35 37, 36 38, 0 38, 4 38, 8 38, 18 38, 35 38, 36 38, 37 39, 1 39, 2 39, 9 39, 19 39, 35 39, 36 40, 1 40, 3 40, 10 40, 20 40, 35 40, 37 40, 39 41, 1 41, 4 41, 11 41, 21 41, 35 41, 38 41, 39 41, 40 42, 2 42, 3 42, 12 42, 22 42, 36 42, 37 42, 39 42, 40 43, 2 43, 4 43, 13 43, 23 43, 36 43, 38 43, 39 43, 41 43, 42 44, 3 44, 4 44, 14 44, 24 44, 37 44, 38 44, 40 44, 41 44, 42 44, 43 45, 5 45, 6 45, 9 45, 25 45, 35 45, 36 45, 39 46, 5 46, 7 46, 10 46, 26 46, 35 46, 37 46, 40 46, 45 47, 5 47, 8 47, 11 47, 27 47, 35 47, 38 47, 41 47, 45 47, 46 48, 6 48, 7 48, 12 48, 28 48, 36 48, 37 48, 42 48, 45 48, 46 49, 6 49, 8 49, 13 49, 29 49, 36 49, 38 49, 43 49, 45 49, 47 49, 48 50, 7 50, 8 50, 14 50, 30 50, 37 50, 38 50, 44 50, 46 50, 47 50, 48 50, 49 51, 9 51, 10 51, 12 51, 31 51, 39 51, 40 51, 42 51, 45 51, 46 51, 48 52, 9 52, 11 52, 13 52, 32 52, 39 52, 41 52, 43 52, 45 52, 47 52, 49 52, 51 53, 10 53, 11 53, 14 53, 33 53, 40 53, 41 53, 44 53, 46 53, 47 53, 50 53, 51 53, 52 54, 12 54, 13 54, 14 54, 34 54, 42 54, 43 54, 44 54, 48 54, 49 54, 50 54, 51 54, 52 54, 53 55, 15 55, 16 55, 19 55, 25 55, 35 55, 36 55, 39 55, 45 56, 15 56, 17 56, 20 56, 26 56, 35 56, 37 56, 40 56, 46 56, 55 57, 15 57, 18 57, 21 57, 27 57, 35 57, 38 57, 41 57, 47 57, 55 57, 56 58, 16 58, 17 58, 22 58, 28 58, 36 58, 37 58, 42 58, 48 58, 55 58, 56 59, 16 59, 18 59, 23 59, 29 59, 36 59, 38 59, 43 59, 49 59, 55 59, 57 59, 58 60, 17 60, 18 60, 24 60, 30 60, 37 60, 38 60, 44 60, 50 60, 56 60, 57 60, 58 60, 59 61, 19 61, 20 61, 22 61, 31 61, 39 61, 40 61, 42 61, 51 61, 55 61, 56 61, 58 62, 19 62, 21 62, 23 62, 32 62, 39 62, 41 62, 43 62, 52 62, 55 62, 57 62, 59 62, 61 63, 20 63, 21 63, 24 63, 33 63, 40 63, 41 63, 44 63, 53 63, 56 63, 57 63, 60 63, 61 63, 62 64, 22 64, 23 64, 24 64, 34 64, 42 64, 43 64, 44 64, 54 64, 58 64, 59 64, 60 64, 61 64, 62 64, 63 65, 25 65, 26 65, 28 65, 31 65, 45 65, 46 65, 48 65, 51 65, 55 65, 56 65, 58 65, 61 66, 25 66, 27 66, 29 66, 32 66, 45 66, 47 66, 49 66, 52 66, 55 66, 57 66, 59 66, 62 66, 65 67, 26 67, 27 67, 30 67, 33 67, 46 67, 47 67, 50 67, 53 67, 56 67, 57 67, 60 67, 63 67, 65 67, 66 68, 28 68, 29 68, 30 68, 34 68, 48 68, 49 68, 50 68, 54 68, 58 68, 59 68, 60 68, 64 68, 65 68, 66 68, 67 69, 31 69, 32 69, 33 69, 34 69, 51 69, 52 69, 53 69, 54 69, 61 69, 62 69, 63 69, 64 69, 65 69, 66 69, 67 69, 68 mlpack-2.2.5/src/mlpack/tests/data/keller4.csv000066400000000000000000001153261315013601400211610ustar00rootroot000000000000001, 0 2, 0 2, 1 3, 0 3, 1 3, 2 4, 0 4, 1 4, 2 4, 3 5, 0 5, 4 6, 0 6, 1 6, 2 6, 3 6, 5 7, 1 7, 3 8, 0 8, 1 8, 2 8, 4 8, 6 8, 7 9, 0 9, 1 9, 2 9, 3 9, 4 9, 5 9, 6 9, 7 9, 8 10, 0 10, 2 10, 3 10, 4 10, 6 10, 7 10, 8 10, 9 11, 1 11, 2 11, 3 11, 4 11, 5 11, 6 11, 8 11, 9 11, 10 12, 5 12, 6 12, 11 13, 5 13, 6 13, 11 13, 12 14, 5 14, 6 14, 11 14, 12 14, 13 15, 7 15, 8 15, 10 15, 12 15, 14 16, 5 16, 6 16, 7 16, 8 16, 9 16, 12 16, 13 16, 15 17, 0 17, 5 17, 6 17, 8 17, 9 17, 10 17, 12 17, 13 17, 14 17, 15 17, 16 18, 5 18, 6 18, 7 18, 9 18, 10 18, 13 18, 14 18, 15 18, 16 18, 17 19, 7 19, 8 19, 10 19, 15 19, 16 19, 18 20, 1 20, 6 20, 7 20, 8 20, 9 20, 11 20, 12 20, 15 20, 16 20, 17 20, 19 21, 2 21, 6 21, 8 21, 9 21, 10 21, 11 21, 13 21, 16 21, 17 21, 18 21, 19 21, 20 22, 3 22, 6 22, 7 22, 9 22, 10 22, 11 22, 14 22, 15 22, 17 22, 18 22, 19 22, 20 22, 21 23, 7 23, 8 23, 10 23, 12 23, 14 23, 15 23, 19 23, 20 23, 22 24, 5 24, 7 24, 8 24, 9 24, 11 24, 12 24, 13 24, 16 24, 19 24, 20 24, 21 24, 23 25, 4 25, 5 25, 8 25, 9 25, 10 25, 11 25, 12 25, 13 25, 14 25, 17 25, 20 25, 21 25, 22 25, 23 25, 24 26, 5 26, 7 26, 9 26, 10 26, 11 26, 13 26, 14 26, 18 26, 19 26, 21 26, 22 26, 23 26, 24 26, 25 27, 0 27, 4 27, 5 27, 12 27, 13 27, 14 27, 16 27, 17 27, 18 27, 24 27, 25 27, 26 28, 0 28, 1 28, 2 28, 3 28, 6 28, 12 28, 13 28, 14 28, 16 28, 17 28, 18 28, 20 28, 21 28, 22 28, 27 29, 1 29, 3 29, 7 29, 15 29, 16 29, 18 29, 19 29, 20 29, 22 29, 23 29, 24 29, 26 30, 0 30, 1 30, 2 30, 4 30, 8 30, 15 30, 16 30, 17 30, 19 30, 20 30, 21 30, 23 30, 24 30, 25 30, 28 30, 29 31, 0 31, 1 31, 2 31, 3 31, 4 31, 9 31, 16 31, 17 31, 18 31, 20 31, 21 31, 22 31, 24 31, 25 31, 26 31, 27 31, 28 31, 29 31, 30 32, 0 32, 2 32, 3 32, 4 32, 10 32, 15 32, 17 32, 18 32, 19 32, 21 32, 22 32, 23 32, 25 32, 26 32, 28 32, 29 32, 30 32, 31 33, 1 33, 2 33, 3 33, 4 33, 11 33, 12 33, 13 33, 14 33, 20 33, 21 33, 22 33, 24 33, 25 33, 26 33, 27 33, 28 33, 30 33, 31 33, 32 34, 0 34, 4 34, 5 34, 6 34, 11 34, 27 34, 28 34, 33 35, 0 35, 1 35, 2 35, 3 35, 5 35, 6 35, 8 35, 9 35, 10 35, 27 35, 28 35, 30 35, 31 35, 32 35, 34 36, 1 36, 3 36, 7 36, 8 36, 10 36, 29 36, 30 36, 32 37, 0 37, 1 37, 2 37, 4 37, 6 37, 7 37, 8 37, 9 37, 11 37, 28 37, 29 37, 30 37, 31 37, 33 37, 35 37, 36 38, 0 38, 1 38, 2 38, 3 38, 4 38, 6 38, 8 38, 9 38, 10 38, 11 38, 28 38, 30 38, 31 38, 32 38, 33 38, 34 38, 35 38, 36 38, 37 39, 0 39, 2 39, 3 39, 4 39, 6 39, 7 39, 9 39, 10 39, 11 39, 28 39, 29 39, 31 39, 32 39, 33 39, 35 39, 36 39, 37 39, 38 40, 1 40, 2 40, 3 40, 4 40, 5 40, 8 40, 9 40, 10 40, 11 40, 27 40, 30 40, 31 40, 32 40, 33 40, 34 40, 35 40, 37 40, 38 40, 39 41, 0 41, 4 41, 5 41, 6 41, 11 41, 12 41, 13 41, 14 41, 16 41, 17 41, 18 41, 24 41, 25 41, 26 41, 34 41, 35 41, 40 42, 0 42, 1 42, 2 42, 3 42, 5 42, 6 42, 8 42, 9 42, 10 42, 12 42, 13 42, 14 42, 16 42, 17 42, 18 42, 20 42, 21 42, 22 42, 34 42, 35 42, 37 42, 38 42, 39 42, 41 43, 1 43, 3 43, 7 43, 8 43, 10 43, 15 43, 16 43, 18 43, 19 43, 20 43, 22 43, 23 43, 24 43, 26 43, 36 43, 37 43, 39 44, 0 44, 1 44, 2 44, 4 44, 6 44, 7 44, 8 44, 9 44, 11 44, 15 44, 16 44, 17 44, 19 44, 20 44, 21 44, 23 44, 24 44, 25 44, 35 44, 36 44, 37 44, 38 44, 40 44, 42 44, 43 45, 0 45, 1 45, 2 45, 3 45, 4 45, 6 45, 8 45, 9 45, 10 45, 11 45, 16 45, 17 45, 18 45, 20 45, 21 45, 22 45, 24 45, 25 45, 26 45, 35 45, 37 45, 38 45, 39 45, 40 45, 41 45, 42 45, 43 45, 44 46, 0 46, 2 46, 3 46, 4 46, 6 46, 7 46, 9 46, 10 46, 11 46, 15 46, 17 46, 18 46, 19 46, 21 46, 22 46, 23 46, 25 46, 26 46, 35 46, 36 46, 38 46, 39 46, 40 46, 42 46, 43 46, 44 46, 45 47, 1 47, 2 47, 3 47, 4 47, 5 47, 8 47, 9 47, 10 47, 11 47, 12 47, 13 47, 14 47, 20 47, 21 47, 22 47, 24 47, 25 47, 26 47, 34 47, 37 47, 38 47, 39 47, 40 47, 41 47, 42 47, 44 47, 45 47, 46 48, 12 48, 14 48, 15 48, 16 48, 18 48, 23 48, 24 48, 26 49, 5 49, 6 49, 11 49, 12 49, 13 49, 15 49, 16 49, 17 49, 23 49, 24 49, 25 49, 27 49, 28 49, 33 49, 41 49, 42 49, 47 49, 48 50, 5 50, 6 50, 11 50, 12 50, 13 50, 14 50, 16 50, 17 50, 18 50, 24 50, 25 50, 26 50, 27 50, 28 50, 33 50, 34 50, 41 50, 42 50, 47 50, 48 50, 49 51, 5 51, 6 51, 11 51, 13 51, 14 51, 15 51, 17 51, 18 51, 23 51, 25 51, 26 51, 27 51, 28 51, 33 51, 41 51, 42 51, 47 51, 48 51, 49 51, 50 52, 7 52, 8 52, 10 52, 12 52, 14 52, 15 52, 16 52, 18 52, 19 52, 20 52, 22 52, 29 52, 30 52, 32 52, 43 52, 44 52, 46 52, 48 52, 49 52, 51 53, 5 53, 6 53, 7 53, 8 53, 9 53, 12 53, 13 53, 15 53, 16 53, 17 53, 19 53, 20 53, 21 53, 27 53, 28 53, 29 53, 30 53, 31 53, 41 53, 42 53, 43 53, 44 53, 45 53, 48 53, 49 53, 50 53, 52 54, 5 54, 6 54, 8 54, 9 54, 10 54, 12 54, 13 54, 14 54, 16 54, 17 54, 18 54, 20 54, 21 54, 22 54, 27 54, 28 54, 30 54, 31 54, 32 54, 35 54, 41 54, 42 54, 44 54, 45 54, 46 54, 49 54, 50 54, 51 54, 52 54, 53 55, 5 55, 6 55, 7 55, 9 55, 10 55, 13 55, 14 55, 15 55, 17 55, 18 55, 19 55, 21 55, 22 55, 27 55, 28 55, 29 55, 31 55, 32 55, 41 55, 42 55, 43 55, 45 55, 46 55, 48 55, 50 55, 51 55, 52 55, 53 55, 54 56, 7 56, 8 56, 10 56, 15 56, 16 56, 18 56, 19 56, 20 56, 22 56, 23 56, 24 56, 26 56, 29 56, 30 56, 32 56, 36 56, 43 56, 44 56, 46 56, 48 56, 52 56, 53 56, 55 57, 6 57, 7 57, 8 57, 9 57, 11 57, 15 57, 16 57, 17 57, 19 57, 20 57, 21 57, 23 57, 24 57, 25 57, 28 57, 29 57, 30 57, 31 57, 33 57, 37 57, 42 57, 43 57, 44 57, 45 57, 47 57, 49 57, 52 57, 53 57, 54 57, 56 58, 6 58, 8 58, 9 58, 10 58, 11 58, 16 58, 17 58, 18 58, 20 58, 21 58, 22 58, 24 58, 25 58, 26 58, 28 58, 30 58, 31 58, 32 58, 33 58, 38 58, 42 58, 44 58, 45 58, 46 58, 47 58, 50 58, 53 58, 54 58, 55 58, 56 58, 57 59, 6 59, 7 59, 9 59, 10 59, 11 59, 15 59, 17 59, 18 59, 19 59, 21 59, 22 59, 23 59, 25 59, 26 59, 28 59, 29 59, 31 59, 32 59, 33 59, 39 59, 42 59, 43 59, 45 59, 46 59, 47 59, 51 59, 52 59, 54 59, 55 59, 56 59, 57 59, 58 60, 7 60, 8 60, 10 60, 12 60, 14 60, 19 60, 20 60, 22 60, 23 60, 24 60, 26 60, 29 60, 30 60, 32 60, 43 60, 44 60, 46 60, 48 60, 49 60, 51 60, 52 60, 56 60, 57 60, 59 61, 5 61, 7 61, 8 61, 9 61, 11 61, 12 61, 13 61, 19 61, 20 61, 21 61, 23 61, 24 61, 25 61, 27 61, 29 61, 30 61, 31 61, 33 61, 41 61, 43 61, 44 61, 45 61, 47 61, 48 61, 49 61, 50 61, 53 61, 56 61, 57 61, 58 61, 60 62, 5 62, 8 62, 9 62, 10 62, 11 62, 12 62, 13 62, 14 62, 20 62, 21 62, 22 62, 24 62, 25 62, 26 62, 27 62, 30 62, 31 62, 32 62, 33 62, 40 62, 41 62, 44 62, 45 62, 46 62, 47 62, 49 62, 50 62, 51 62, 54 62, 57 62, 58 62, 59 62, 60 62, 61 63, 5 63, 7 63, 9 63, 10 63, 11 63, 13 63, 14 63, 19 63, 21 63, 22 63, 23 63, 25 63, 26 63, 27 63, 29 63, 31 63, 32 63, 33 63, 41 63, 43 63, 45 63, 46 63, 47 63, 48 63, 50 63, 51 63, 55 63, 56 63, 58 63, 59 63, 60 63, 61 63, 62 64, 0 64, 4 64, 12 64, 13 64, 14 64, 16 64, 17 64, 18 64, 24 64, 25 64, 26 64, 27 64, 28 64, 33 64, 34 64, 35 64, 40 64, 41 64, 49 64, 50 64, 51 64, 53 64, 54 64, 55 64, 61 64, 62 64, 63 65, 0 65, 1 65, 2 65, 3 65, 12 65, 13 65, 14 65, 16 65, 17 65, 18 65, 20 65, 21 65, 22 65, 27 65, 28 65, 30 65, 31 65, 32 65, 34 65, 35 65, 37 65, 38 65, 39 65, 42 65, 49 65, 50 65, 51 65, 53 65, 54 65, 55 65, 57 65, 58 65, 59 65, 64 66, 1 66, 3 66, 15 66, 16 66, 18 66, 19 66, 20 66, 22 66, 23 66, 24 66, 26 66, 29 66, 30 66, 32 66, 36 66, 37 66, 39 66, 43 66, 52 66, 53 66, 55 66, 56 66, 57 66, 59 66, 60 66, 61 66, 63 67, 0 67, 1 67, 2 67, 4 67, 15 67, 16 67, 17 67, 19 67, 20 67, 21 67, 23 67, 24 67, 25 67, 28 67, 29 67, 30 67, 31 67, 33 67, 35 67, 36 67, 37 67, 38 67, 40 67, 44 67, 52 67, 53 67, 54 67, 56 67, 57 67, 58 67, 60 67, 61 67, 62 67, 65 67, 66 68, 0 68, 1 68, 2 68, 3 68, 4 68, 16 68, 17 68, 18 68, 20 68, 21 68, 22 68, 24 68, 25 68, 26 68, 28 68, 30 68, 31 68, 32 68, 33 68, 35 68, 37 68, 38 68, 39 68, 40 68, 45 68, 53 68, 54 68, 55 68, 57 68, 58 68, 59 68, 61 68, 62 68, 63 68, 64 68, 65 68, 66 68, 67 69, 0 69, 2 69, 3 69, 4 69, 15 69, 17 69, 18 69, 19 69, 21 69, 22 69, 23 69, 25 69, 26 69, 28 69, 29 69, 31 69, 32 69, 33 69, 35 69, 36 69, 38 69, 39 69, 40 69, 46 69, 52 69, 54 69, 55 69, 56 69, 58 69, 59 69, 60 69, 62 69, 63 69, 65 69, 66 69, 67 69, 68 70, 1 70, 2 70, 3 70, 4 70, 12 70, 13 70, 14 70, 20 70, 21 70, 22 70, 24 70, 25 70, 26 70, 27 70, 30 70, 31 70, 32 70, 33 70, 34 70, 37 70, 38 70, 39 70, 40 70, 47 70, 49 70, 50 70, 51 70, 57 70, 58 70, 59 70, 61 70, 62 70, 63 70, 64 70, 65 70, 67 70, 68 70, 69 71, 34 71, 35 71, 40 71, 41 71, 42 71, 47 71, 64 71, 65 71, 70 72, 34 72, 35 72, 40 72, 41 72, 42 72, 47 72, 64 72, 65 72, 70 72, 71 73, 34 73, 35 73, 40 73, 41 73, 42 73, 47 73, 64 73, 65 73, 70 73, 71 73, 72 74, 36 74, 37 74, 39 74, 43 74, 44 74, 46 74, 66 74, 67 74, 69 74, 71 74, 73 75, 34 75, 35 75, 36 75, 37 75, 38 75, 41 75, 42 75, 43 75, 44 75, 45 75, 64 75, 65 75, 66 75, 67 75, 68 75, 71 75, 72 75, 74 76, 0 76, 34 76, 35 76, 37 76, 38 76, 39 76, 41 76, 42 76, 44 76, 45 76, 46 76, 64 76, 65 76, 67 76, 68 76, 69 76, 71 76, 72 76, 73 76, 74 76, 75 77, 34 77, 35 77, 36 77, 38 77, 39 77, 41 77, 42 77, 43 77, 45 77, 46 77, 64 77, 65 77, 66 77, 68 77, 69 77, 72 77, 73 77, 74 77, 75 77, 76 78, 36 78, 37 78, 39 78, 43 78, 44 78, 46 78, 66 78, 67 78, 69 78, 74 78, 75 78, 77 79, 1 79, 35 79, 36 79, 37 79, 38 79, 40 79, 42 79, 43 79, 44 79, 45 79, 47 79, 65 79, 66 79, 67 79, 68 79, 70 79, 71 79, 74 79, 75 79, 76 79, 78 80, 2 80, 35 80, 37 80, 38 80, 39 80, 40 80, 42 80, 44 80, 45 80, 46 80, 47 80, 65 80, 67 80, 68 80, 69 80, 70 80, 72 80, 75 80, 76 80, 77 80, 78 80, 79 81, 3 81, 35 81, 36 81, 38 81, 39 81, 40 81, 42 81, 43 81, 45 81, 46 81, 47 81, 65 81, 66 81, 68 81, 69 81, 70 81, 73 81, 74 81, 76 81, 77 81, 78 81, 79 81, 80 82, 36 82, 37 82, 39 82, 43 82, 44 82, 46 82, 66 82, 67 82, 69 82, 71 82, 73 82, 74 82, 78 82, 79 82, 81 83, 34 83, 36 83, 37 83, 38 83, 40 83, 41 83, 43 83, 44 83, 45 83, 47 83, 64 83, 66 83, 67 83, 68 83, 70 83, 71 83, 72 83, 75 83, 78 83, 79 83, 80 83, 82 84, 4 84, 34 84, 37 84, 38 84, 39 84, 40 84, 41 84, 44 84, 45 84, 46 84, 47 84, 64 84, 67 84, 68 84, 69 84, 70 84, 71 84, 72 84, 73 84, 76 84, 79 84, 80 84, 81 84, 82 84, 83 85, 34 85, 36 85, 38 85, 39 85, 40 85, 41 85, 43 85, 45 85, 46 85, 47 85, 64 85, 66 85, 68 85, 69 85, 70 85, 72 85, 73 85, 77 85, 78 85, 80 85, 81 85, 82 85, 83 85, 84 86, 48 86, 49 86, 51 86, 52 86, 53 86, 55 86, 60 86, 61 86, 63 86, 71 86, 73 86, 74 86, 75 86, 77 86, 82 86, 83 86, 85 87, 34 87, 35 87, 40 87, 41 87, 42 87, 47 87, 48 87, 49 87, 50 87, 52 87, 53 87, 54 87, 60 87, 61 87, 62 87, 71 87, 72 87, 74 87, 75 87, 76 87, 82 87, 83 87, 84 87, 86 88, 5 88, 34 88, 35 88, 40 88, 41 88, 42 88, 47 88, 49 88, 50 88, 51 88, 53 88, 54 88, 55 88, 61 88, 62 88, 63 88, 71 88, 72 88, 73 88, 75 88, 76 88, 77 88, 83 88, 84 88, 85 88, 86 88, 87 89, 34 89, 35 89, 40 89, 41 89, 42 89, 47 89, 48 89, 50 89, 51 89, 52 89, 54 89, 55 89, 60 89, 62 89, 63 89, 72 89, 73 89, 74 89, 76 89, 77 89, 82 89, 84 89, 85 89, 86 89, 87 89, 88 90, 36 90, 37 90, 39 90, 43 90, 44 90, 46 90, 48 90, 49 90, 51 90, 52 90, 53 90, 55 90, 56 90, 57 90, 59 90, 71 90, 73 90, 74 90, 75 90, 77 90, 78 90, 79 90, 81 90, 86 90, 87 90, 89 91, 34 91, 35 91, 36 91, 37 91, 38 91, 41 91, 42 91, 43 91, 44 91, 45 91, 48 91, 49 91, 50 91, 52 91, 53 91, 54 91, 56 91, 57 91, 58 91, 71 91, 72 91, 74 91, 75 91, 76 91, 78 91, 79 91, 80 91, 86 91, 87 91, 88 91, 90 92, 6 92, 34 92, 35 92, 37 92, 38 92, 39 92, 41 92, 42 92, 44 92, 45 92, 46 92, 49 92, 50 92, 51 92, 53 92, 54 92, 55 92, 57 92, 58 92, 59 92, 71 92, 72 92, 73 92, 75 92, 76 92, 77 92, 79 92, 80 92, 81 92, 87 92, 88 92, 89 92, 90 92, 91 93, 34 93, 35 93, 36 93, 38 93, 39 93, 41 93, 42 93, 43 93, 45 93, 46 93, 48 93, 50 93, 51 93, 52 93, 54 93, 55 93, 56 93, 58 93, 59 93, 72 93, 73 93, 74 93, 76 93, 77 93, 78 93, 80 93, 81 93, 86 93, 88 93, 89 93, 90 93, 91 93, 92 94, 7 94, 36 94, 37 94, 39 94, 43 94, 44 94, 46 94, 52 94, 53 94, 55 94, 56 94, 57 94, 59 94, 60 94, 61 94, 63 94, 74 94, 75 94, 77 94, 78 94, 79 94, 81 94, 82 94, 83 94, 85 94, 86 94, 90 94, 91 94, 93 95, 8 95, 35 95, 36 95, 37 95, 38 95, 40 95, 42 95, 43 95, 44 95, 45 95, 47 95, 52 95, 53 95, 54 95, 56 95, 57 95, 58 95, 60 95, 61 95, 62 95, 74 95, 75 95, 76 95, 78 95, 79 95, 80 95, 82 95, 83 95, 84 95, 87 95, 90 95, 91 95, 92 95, 94 96, 9 96, 35 96, 37 96, 38 96, 39 96, 40 96, 42 96, 44 96, 45 96, 46 96, 47 96, 53 96, 54 96, 55 96, 57 96, 58 96, 59 96, 61 96, 62 96, 63 96, 75 96, 76 96, 77 96, 79 96, 80 96, 81 96, 83 96, 84 96, 85 96, 88 96, 91 96, 92 96, 93 96, 94 96, 95 97, 10 97, 35 97, 36 97, 38 97, 39 97, 40 97, 42 97, 43 97, 45 97, 46 97, 47 97, 52 97, 54 97, 55 97, 56 97, 58 97, 59 97, 60 97, 62 97, 63 97, 74 97, 76 97, 77 97, 78 97, 80 97, 81 97, 82 97, 84 97, 85 97, 89 97, 90 97, 92 97, 93 97, 94 97, 95 97, 96 98, 36 98, 37 98, 39 98, 43 98, 44 98, 46 98, 48 98, 49 98, 51 98, 56 98, 57 98, 59 98, 60 98, 61 98, 63 98, 71 98, 73 98, 78 98, 79 98, 81 98, 82 98, 83 98, 85 98, 86 98, 87 98, 89 98, 90 98, 94 98, 95 98, 97 99, 34 99, 36 99, 37 99, 38 99, 40 99, 41 99, 43 99, 44 99, 45 99, 47 99, 48 99, 49 99, 50 99, 56 99, 57 99, 58 99, 60 99, 61 99, 62 99, 71 99, 72 99, 78 99, 79 99, 80 99, 82 99, 83 99, 84 99, 86 99, 87 99, 88 99, 91 99, 94 99, 95 99, 96 99, 98 100, 11 100, 34 100, 37 100, 38 100, 39 100, 40 100, 41 100, 44 100, 45 100, 46 100, 47 100, 49 100, 50 100, 51 100, 57 100, 58 100, 59 100, 61 100, 62 100, 63 100, 71 100, 72 100, 73 100, 79 100, 80 100, 81 100, 83 100, 84 100, 85 100, 87 100, 88 100, 89 100, 92 100, 95 100, 96 100, 97 100, 98 100, 99 101, 34 101, 36 101, 38 101, 39 101, 40 101, 41 101, 43 101, 45 101, 46 101, 47 101, 48 101, 50 101, 51 101, 56 101, 58 101, 59 101, 60 101, 62 101, 63 101, 72 101, 73 101, 78 101, 80 101, 81 101, 82 101, 84 101, 85 101, 86 101, 88 101, 89 101, 93 101, 94 101, 96 101, 97 101, 98 101, 99 101, 100 102, 48 102, 49 102, 51 102, 52 102, 53 102, 55 102, 60 102, 61 102, 63 102, 86 102, 87 102, 89 102, 90 102, 91 102, 93 102, 98 102, 99 102, 101 103, 12 103, 41 103, 42 103, 47 103, 48 103, 49 103, 50 103, 52 103, 53 103, 54 103, 60 103, 61 103, 62 103, 64 103, 65 103, 70 103, 71 103, 86 103, 87 103, 88 103, 90 103, 91 103, 92 103, 98 103, 99 103, 100 103, 102 104, 13 104, 41 104, 42 104, 47 104, 49 104, 50 104, 51 104, 53 104, 54 104, 55 104, 61 104, 62 104, 63 104, 64 104, 65 104, 70 104, 72 104, 87 104, 88 104, 89 104, 91 104, 92 104, 93 104, 99 104, 100 104, 101 104, 102 104, 103 105, 14 105, 41 105, 42 105, 47 105, 48 105, 50 105, 51 105, 52 105, 54 105, 55 105, 60 105, 62 105, 63 105, 64 105, 65 105, 70 105, 73 105, 86 105, 88 105, 89 105, 90 105, 92 105, 93 105, 98 105, 100 105, 101 105, 102 105, 103 105, 104 106, 15 106, 43 106, 44 106, 46 106, 48 106, 49 106, 51 106, 52 106, 53 106, 55 106, 56 106, 57 106, 59 106, 66 106, 67 106, 69 106, 74 106, 86 106, 87 106, 89 106, 90 106, 91 106, 93 106, 94 106, 95 106, 97 106, 102 106, 103 106, 105 107, 16 107, 41 107, 42 107, 43 107, 44 107, 45 107, 48 107, 49 107, 50 107, 52 107, 53 107, 54 107, 56 107, 57 107, 58 107, 64 107, 65 107, 66 107, 67 107, 68 107, 75 107, 86 107, 87 107, 88 107, 90 107, 91 107, 92 107, 94 107, 95 107, 96 107, 102 107, 103 107, 104 107, 106 108, 17 108, 41 108, 42 108, 44 108, 45 108, 46 108, 49 108, 50 108, 51 108, 53 108, 54 108, 55 108, 57 108, 58 108, 59 108, 64 108, 65 108, 67 108, 68 108, 69 108, 76 108, 87 108, 88 108, 89 108, 91 108, 92 108, 93 108, 95 108, 96 108, 97 108, 103 108, 104 108, 105 108, 106 108, 107 109, 18 109, 41 109, 42 109, 43 109, 45 109, 46 109, 48 109, 50 109, 51 109, 52 109, 54 109, 55 109, 56 109, 58 109, 59 109, 64 109, 65 109, 66 109, 68 109, 69 109, 77 109, 86 109, 88 109, 89 109, 90 109, 92 109, 93 109, 94 109, 96 109, 97 109, 102 109, 104 109, 105 109, 106 109, 107 109, 108 110, 19 110, 43 110, 44 110, 46 110, 52 110, 53 110, 55 110, 56 110, 57 110, 59 110, 60 110, 61 110, 63 110, 66 110, 67 110, 69 110, 78 110, 90 110, 91 110, 93 110, 94 110, 95 110, 97 110, 98 110, 99 110, 101 110, 102 110, 106 110, 107 110, 109 111, 20 111, 42 111, 43 111, 44 111, 45 111, 47 111, 52 111, 53 111, 54 111, 56 111, 57 111, 58 111, 60 111, 61 111, 62 111, 65 111, 66 111, 67 111, 68 111, 70 111, 79 111, 90 111, 91 111, 92 111, 94 111, 95 111, 96 111, 98 111, 99 111, 100 111, 103 111, 106 111, 107 111, 108 111, 110 112, 21 112, 42 112, 44 112, 45 112, 46 112, 47 112, 53 112, 54 112, 55 112, 57 112, 58 112, 59 112, 61 112, 62 112, 63 112, 65 112, 67 112, 68 112, 69 112, 70 112, 80 112, 91 112, 92 112, 93 112, 95 112, 96 112, 97 112, 99 112, 100 112, 101 112, 104 112, 107 112, 108 112, 109 112, 110 112, 111 113, 22 113, 42 113, 43 113, 45 113, 46 113, 47 113, 52 113, 54 113, 55 113, 56 113, 58 113, 59 113, 60 113, 62 113, 63 113, 65 113, 66 113, 68 113, 69 113, 70 113, 81 113, 90 113, 92 113, 93 113, 94 113, 96 113, 97 113, 98 113, 100 113, 101 113, 105 113, 106 113, 108 113, 109 113, 110 113, 111 113, 112 114, 23 114, 43 114, 44 114, 46 114, 48 114, 49 114, 51 114, 56 114, 57 114, 59 114, 60 114, 61 114, 63 114, 66 114, 67 114, 69 114, 82 114, 86 114, 87 114, 89 114, 94 114, 95 114, 97 114, 98 114, 99 114, 101 114, 102 114, 103 114, 105 114, 106 114, 110 114, 111 114, 113 115, 24 115, 41 115, 43 115, 44 115, 45 115, 47 115, 48 115, 49 115, 50 115, 56 115, 57 115, 58 115, 60 115, 61 115, 62 115, 64 115, 66 115, 67 115, 68 115, 70 115, 83 115, 86 115, 87 115, 88 115, 94 115, 95 115, 96 115, 98 115, 99 115, 100 115, 102 115, 103 115, 104 115, 107 115, 110 115, 111 115, 112 115, 114 116, 25 116, 41 116, 44 116, 45 116, 46 116, 47 116, 49 116, 50 116, 51 116, 57 116, 58 116, 59 116, 61 116, 62 116, 63 116, 64 116, 67 116, 68 116, 69 116, 70 116, 84 116, 87 116, 88 116, 89 116, 95 116, 96 116, 97 116, 99 116, 100 116, 101 116, 103 116, 104 116, 105 116, 108 116, 111 116, 112 116, 113 116, 114 116, 115 117, 26 117, 41 117, 43 117, 45 117, 46 117, 47 117, 48 117, 50 117, 51 117, 56 117, 58 117, 59 117, 60 117, 62 117, 63 117, 64 117, 66 117, 68 117, 69 117, 70 117, 85 117, 86 117, 88 117, 89 117, 94 117, 96 117, 97 117, 98 117, 100 117, 101 117, 102 117, 104 117, 105 117, 109 117, 110 117, 112 117, 113 117, 114 117, 115 117, 116 118, 48 118, 49 118, 51 118, 52 118, 53 118, 55 118, 60 118, 61 118, 63 118, 71 118, 73 118, 74 118, 75 118, 77 118, 82 118, 83 118, 85 118, 86 118, 102 118, 103 118, 105 118, 106 118, 107 118, 109 118, 114 118, 115 118, 117 119, 34 119, 35 119, 40 119, 48 119, 49 119, 50 119, 52 119, 53 119, 54 119, 60 119, 61 119, 62 119, 64 119, 65 119, 70 119, 71 119, 72 119, 74 119, 75 119, 76 119, 82 119, 83 119, 84 119, 87 119, 102 119, 103 119, 104 119, 106 119, 107 119, 108 119, 114 119, 115 119, 116 119, 118 120, 27 120, 34 120, 35 120, 40 120, 49 120, 50 120, 51 120, 53 120, 54 120, 55 120, 61 120, 62 120, 63 120, 64 120, 65 120, 70 120, 71 120, 72 120, 73 120, 75 120, 76 120, 77 120, 83 120, 84 120, 85 120, 88 120, 103 120, 104 120, 105 120, 107 120, 108 120, 109 120, 115 120, 116 120, 117 120, 118 120, 119 121, 34 121, 35 121, 40 121, 48 121, 50 121, 51 121, 52 121, 54 121, 55 121, 60 121, 62 121, 63 121, 64 121, 65 121, 70 121, 72 121, 73 121, 74 121, 76 121, 77 121, 82 121, 84 121, 85 121, 89 121, 102 121, 104 121, 105 121, 106 121, 108 121, 109 121, 114 121, 116 121, 117 121, 118 121, 119 121, 120 122, 36 122, 37 122, 39 122, 48 122, 49 122, 51 122, 52 122, 53 122, 55 122, 56 122, 57 122, 59 122, 66 122, 67 122, 69 122, 71 122, 73 122, 74 122, 75 122, 77 122, 78 122, 79 122, 81 122, 90 122, 102 122, 103 122, 105 122, 106 122, 107 122, 109 122, 110 122, 111 122, 113 122, 118 122, 119 122, 121 123, 34 123, 35 123, 36 123, 37 123, 38 123, 48 123, 49 123, 50 123, 52 123, 53 123, 54 123, 56 123, 57 123, 58 123, 64 123, 65 123, 66 123, 67 123, 68 123, 71 123, 72 123, 74 123, 75 123, 76 123, 78 123, 79 123, 80 123, 91 123, 102 123, 103 123, 104 123, 106 123, 107 123, 108 123, 110 123, 111 123, 112 123, 118 123, 119 123, 120 123, 122 124, 28 124, 34 124, 35 124, 37 124, 38 124, 39 124, 49 124, 50 124, 51 124, 53 124, 54 124, 55 124, 57 124, 58 124, 59 124, 64 124, 65 124, 67 124, 68 124, 69 124, 71 124, 72 124, 73 124, 75 124, 76 124, 77 124, 79 124, 80 124, 81 124, 92 124, 103 124, 104 124, 105 124, 107 124, 108 124, 109 124, 111 124, 112 124, 113 124, 119 124, 120 124, 121 124, 122 124, 123 125, 34 125, 35 125, 36 125, 38 125, 39 125, 48 125, 50 125, 51 125, 52 125, 54 125, 55 125, 56 125, 58 125, 59 125, 64 125, 65 125, 66 125, 68 125, 69 125, 72 125, 73 125, 74 125, 76 125, 77 125, 78 125, 80 125, 81 125, 93 125, 102 125, 104 125, 105 125, 106 125, 108 125, 109 125, 110 125, 112 125, 113 125, 118 125, 120 125, 121 125, 122 125, 123 125, 124 126, 29 126, 36 126, 37 126, 39 126, 52 126, 53 126, 55 126, 56 126, 57 126, 59 126, 60 126, 61 126, 63 126, 66 126, 67 126, 69 126, 74 126, 75 126, 77 126, 78 126, 79 126, 81 126, 82 126, 83 126, 85 126, 94 126, 106 126, 107 126, 109 126, 110 126, 111 126, 113 126, 114 126, 115 126, 117 126, 118 126, 122 126, 123 126, 125 127, 30 127, 35 127, 36 127, 37 127, 38 127, 40 127, 52 127, 53 127, 54 127, 56 127, 57 127, 58 127, 60 127, 61 127, 62 127, 65 127, 66 127, 67 127, 68 127, 70 127, 74 127, 75 127, 76 127, 78 127, 79 127, 80 127, 82 127, 83 127, 84 127, 95 127, 106 127, 107 127, 108 127, 110 127, 111 127, 112 127, 114 127, 115 127, 116 127, 119 127, 122 127, 123 127, 124 127, 126 128, 31 128, 35 128, 37 128, 38 128, 39 128, 40 128, 53 128, 54 128, 55 128, 57 128, 58 128, 59 128, 61 128, 62 128, 63 128, 65 128, 67 128, 68 128, 69 128, 70 128, 75 128, 76 128, 77 128, 79 128, 80 128, 81 128, 83 128, 84 128, 85 128, 96 128, 107 128, 108 128, 109 128, 111 128, 112 128, 113 128, 115 128, 116 128, 117 128, 120 128, 123 128, 124 128, 125 128, 126 128, 127 129, 32 129, 35 129, 36 129, 38 129, 39 129, 40 129, 52 129, 54 129, 55 129, 56 129, 58 129, 59 129, 60 129, 62 129, 63 129, 65 129, 66 129, 68 129, 69 129, 70 129, 74 129, 76 129, 77 129, 78 129, 80 129, 81 129, 82 129, 84 129, 85 129, 97 129, 106 129, 108 129, 109 129, 110 129, 112 129, 113 129, 114 129, 116 129, 117 129, 121 129, 122 129, 124 129, 125 129, 126 129, 127 129, 128 130, 36 130, 37 130, 39 130, 48 130, 49 130, 51 130, 56 130, 57 130, 59 130, 60 130, 61 130, 63 130, 66 130, 67 130, 69 130, 71 130, 73 130, 78 130, 79 130, 81 130, 82 130, 83 130, 85 130, 98 130, 102 130, 103 130, 105 130, 110 130, 111 130, 113 130, 114 130, 115 130, 117 130, 118 130, 119 130, 121 130, 122 130, 126 130, 127 130, 129 131, 34 131, 36 131, 37 131, 38 131, 40 131, 48 131, 49 131, 50 131, 56 131, 57 131, 58 131, 60 131, 61 131, 62 131, 64 131, 66 131, 67 131, 68 131, 70 131, 71 131, 72 131, 78 131, 79 131, 80 131, 82 131, 83 131, 84 131, 99 131, 102 131, 103 131, 104 131, 110 131, 111 131, 112 131, 114 131, 115 131, 116 131, 118 131, 119 131, 120 131, 123 131, 126 131, 127 131, 128 131, 130 132, 33 132, 34 132, 37 132, 38 132, 39 132, 40 132, 49 132, 50 132, 51 132, 57 132, 58 132, 59 132, 61 132, 62 132, 63 132, 64 132, 67 132, 68 132, 69 132, 70 132, 71 132, 72 132, 73 132, 79 132, 80 132, 81 132, 83 132, 84 132, 85 132, 100 132, 103 132, 104 132, 105 132, 111 132, 112 132, 113 132, 115 132, 116 132, 117 132, 119 132, 120 132, 121 132, 124 132, 127 132, 128 132, 129 132, 130 132, 131 133, 34 133, 36 133, 38 133, 39 133, 40 133, 48 133, 50 133, 51 133, 56 133, 58 133, 59 133, 60 133, 62 133, 63 133, 64 133, 66 133, 68 133, 69 133, 70 133, 72 133, 73 133, 78 133, 80 133, 81 133, 82 133, 84 133, 85 133, 101 133, 102 133, 104 133, 105 133, 110 133, 112 133, 113 133, 114 133, 116 133, 117 133, 118 133, 120 133, 121 133, 125 133, 126 133, 128 133, 129 133, 130 133, 131 133, 132 134, 0 134, 4 134, 5 134, 6 134, 11 134, 27 134, 28 134, 33 134, 34 134, 71 134, 72 134, 73 134, 75 134, 76 134, 77 134, 83 134, 84 134, 85 134, 87 134, 88 134, 89 134, 91 134, 92 134, 93 134, 99 134, 100 134, 101 134, 119 134, 120 134, 121 134, 123 134, 124 134, 125 134, 131 134, 132 134, 133 135, 0 135, 1 135, 2 135, 3 135, 5 135, 6 135, 8 135, 9 135, 10 135, 27 135, 28 135, 30 135, 31 135, 32 135, 35 135, 71 135, 72 135, 73 135, 75 135, 76 135, 77 135, 79 135, 80 135, 81 135, 87 135, 88 135, 89 135, 91 135, 92 135, 93 135, 95 135, 96 135, 97 135, 119 135, 120 135, 121 135, 123 135, 124 135, 125 135, 127 135, 128 135, 129 135, 134 136, 1 136, 3 136, 7 136, 8 136, 10 136, 29 136, 30 136, 32 136, 36 136, 74 136, 75 136, 77 136, 78 136, 79 136, 81 136, 82 136, 83 136, 85 136, 90 136, 91 136, 93 136, 94 136, 95 136, 97 136, 98 136, 99 136, 101 136, 122 136, 123 136, 125 136, 126 136, 127 136, 129 136, 130 136, 131 136, 133 137, 0 137, 1 137, 2 137, 4 137, 6 137, 7 137, 8 137, 9 137, 11 137, 28 137, 29 137, 30 137, 31 137, 33 137, 37 137, 74 137, 75 137, 76 137, 78 137, 79 137, 80 137, 82 137, 83 137, 84 137, 90 137, 91 137, 92 137, 94 137, 95 137, 96 137, 98 137, 99 137, 100 137, 122 137, 123 137, 124 137, 126 137, 127 137, 128 137, 130 137, 131 137, 132 137, 135 137, 136 138, 0 138, 1 138, 2 138, 3 138, 4 138, 6 138, 8 138, 9 138, 10 138, 11 138, 28 138, 30 138, 31 138, 32 138, 33 138, 38 138, 75 138, 76 138, 77 138, 79 138, 80 138, 81 138, 83 138, 84 138, 85 138, 91 138, 92 138, 93 138, 95 138, 96 138, 97 138, 99 138, 100 138, 101 138, 123 138, 124 138, 125 138, 127 138, 128 138, 129 138, 131 138, 132 138, 133 138, 134 138, 135 138, 136 138, 137 139, 0 139, 2 139, 3 139, 4 139, 6 139, 7 139, 9 139, 10 139, 11 139, 28 139, 29 139, 31 139, 32 139, 33 139, 39 139, 74 139, 76 139, 77 139, 78 139, 80 139, 81 139, 82 139, 84 139, 85 139, 90 139, 92 139, 93 139, 94 139, 96 139, 97 139, 98 139, 100 139, 101 139, 122 139, 124 139, 125 139, 126 139, 128 139, 129 139, 130 139, 132 139, 133 139, 135 139, 136 139, 137 139, 138 140, 1 140, 2 140, 3 140, 4 140, 5 140, 8 140, 9 140, 10 140, 11 140, 27 140, 30 140, 31 140, 32 140, 33 140, 40 140, 71 140, 72 140, 73 140, 79 140, 80 140, 81 140, 83 140, 84 140, 85 140, 87 140, 88 140, 89 140, 95 140, 96 140, 97 140, 99 140, 100 140, 101 140, 119 140, 120 140, 121 140, 127 140, 128 140, 129 140, 131 140, 132 140, 133 140, 134 140, 135 140, 137 140, 138 140, 139 141, 0 141, 4 141, 5 141, 6 141, 11 141, 12 141, 13 141, 14 141, 16 141, 17 141, 18 141, 24 141, 25 141, 26 141, 41 141, 71 141, 72 141, 73 141, 75 141, 76 141, 77 141, 83 141, 84 141, 85 141, 87 141, 88 141, 89 141, 91 141, 92 141, 93 141, 99 141, 100 141, 101 141, 103 141, 104 141, 105 141, 107 141, 108 141, 109 141, 115 141, 116 141, 117 141, 134 141, 135 141, 140 142, 0 142, 1 142, 2 142, 3 142, 5 142, 6 142, 8 142, 9 142, 10 142, 12 142, 13 142, 14 142, 16 142, 17 142, 18 142, 20 142, 21 142, 22 142, 42 142, 71 142, 72 142, 73 142, 75 142, 76 142, 77 142, 79 142, 80 142, 81 142, 87 142, 88 142, 89 142, 91 142, 92 142, 93 142, 95 142, 96 142, 97 142, 103 142, 104 142, 105 142, 107 142, 108 142, 109 142, 111 142, 112 142, 113 142, 134 142, 135 142, 137 142, 138 142, 139 142, 141 143, 1 143, 3 143, 7 143, 8 143, 10 143, 15 143, 16 143, 18 143, 19 143, 20 143, 22 143, 23 143, 24 143, 26 143, 43 143, 74 143, 75 143, 77 143, 78 143, 79 143, 81 143, 82 143, 83 143, 85 143, 90 143, 91 143, 93 143, 94 143, 95 143, 97 143, 98 143, 99 143, 101 143, 106 143, 107 143, 109 143, 110 143, 111 143, 113 143, 114 143, 115 143, 117 143, 136 143, 137 143, 139 144, 0 144, 1 144, 2 144, 4 144, 6 144, 7 144, 8 144, 9 144, 11 144, 15 144, 16 144, 17 144, 19 144, 20 144, 21 144, 23 144, 24 144, 25 144, 44 144, 74 144, 75 144, 76 144, 78 144, 79 144, 80 144, 82 144, 83 144, 84 144, 90 144, 91 144, 92 144, 94 144, 95 144, 96 144, 98 144, 99 144, 100 144, 106 144, 107 144, 108 144, 110 144, 111 144, 112 144, 114 144, 115 144, 116 144, 135 144, 136 144, 137 144, 138 144, 140 144, 142 144, 143 145, 0 145, 1 145, 2 145, 3 145, 4 145, 6 145, 8 145, 9 145, 10 145, 11 145, 16 145, 17 145, 18 145, 20 145, 21 145, 22 145, 24 145, 25 145, 26 145, 45 145, 75 145, 76 145, 77 145, 79 145, 80 145, 81 145, 83 145, 84 145, 85 145, 91 145, 92 145, 93 145, 95 145, 96 145, 97 145, 99 145, 100 145, 101 145, 107 145, 108 145, 109 145, 111 145, 112 145, 113 145, 115 145, 116 145, 117 145, 135 145, 137 145, 138 145, 139 145, 140 145, 141 145, 142 145, 143 145, 144 146, 0 146, 2 146, 3 146, 4 146, 6 146, 7 146, 9 146, 10 146, 11 146, 15 146, 17 146, 18 146, 19 146, 21 146, 22 146, 23 146, 25 146, 26 146, 46 146, 74 146, 76 146, 77 146, 78 146, 80 146, 81 146, 82 146, 84 146, 85 146, 90 146, 92 146, 93 146, 94 146, 96 146, 97 146, 98 146, 100 146, 101 146, 106 146, 108 146, 109 146, 110 146, 112 146, 113 146, 114 146, 116 146, 117 146, 135 146, 136 146, 138 146, 139 146, 140 146, 142 146, 143 146, 144 146, 145 147, 1 147, 2 147, 3 147, 4 147, 5 147, 8 147, 9 147, 10 147, 11 147, 12 147, 13 147, 14 147, 20 147, 21 147, 22 147, 24 147, 25 147, 26 147, 47 147, 71 147, 72 147, 73 147, 79 147, 80 147, 81 147, 83 147, 84 147, 85 147, 87 147, 88 147, 89 147, 95 147, 96 147, 97 147, 99 147, 100 147, 101 147, 103 147, 104 147, 105 147, 111 147, 112 147, 113 147, 115 147, 116 147, 117 147, 134 147, 137 147, 138 147, 139 147, 140 147, 141 147, 142 147, 144 147, 145 147, 146 148, 12 148, 14 148, 15 148, 16 148, 18 148, 23 148, 24 148, 26 148, 48 148, 86 148, 87 148, 89 148, 90 148, 91 148, 93 148, 98 148, 99 148, 101 148, 102 148, 103 148, 105 148, 106 148, 107 148, 109 148, 114 148, 115 148, 117 148, 118 148, 119 148, 121 148, 122 148, 123 148, 125 148, 130 148, 131 148, 133 149, 5 149, 6 149, 11 149, 12 149, 13 149, 15 149, 16 149, 17 149, 23 149, 24 149, 25 149, 27 149, 28 149, 33 149, 49 149, 86 149, 87 149, 88 149, 90 149, 91 149, 92 149, 98 149, 99 149, 100 149, 102 149, 103 149, 104 149, 106 149, 107 149, 108 149, 114 149, 115 149, 116 149, 118 149, 119 149, 120 149, 122 149, 123 149, 124 149, 130 149, 131 149, 132 149, 141 149, 142 149, 147 149, 148 150, 5 150, 6 150, 11 150, 12 150, 13 150, 14 150, 16 150, 17 150, 18 150, 24 150, 25 150, 26 150, 27 150, 28 150, 33 150, 50 150, 87 150, 88 150, 89 150, 91 150, 92 150, 93 150, 99 150, 100 150, 101 150, 103 150, 104 150, 105 150, 107 150, 108 150, 109 150, 115 150, 116 150, 117 150, 119 150, 120 150, 121 150, 123 150, 124 150, 125 150, 131 150, 132 150, 133 150, 134 150, 141 150, 142 150, 147 150, 148 150, 149 151, 5 151, 6 151, 11 151, 13 151, 14 151, 15 151, 17 151, 18 151, 23 151, 25 151, 26 151, 27 151, 28 151, 33 151, 51 151, 86 151, 88 151, 89 151, 90 151, 92 151, 93 151, 98 151, 100 151, 101 151, 102 151, 104 151, 105 151, 106 151, 108 151, 109 151, 114 151, 116 151, 117 151, 118 151, 120 151, 121 151, 122 151, 124 151, 125 151, 130 151, 132 151, 133 151, 141 151, 142 151, 147 151, 148 151, 149 151, 150 152, 7 152, 8 152, 10 152, 12 152, 14 152, 15 152, 16 152, 18 152, 19 152, 20 152, 22 152, 29 152, 30 152, 32 152, 52 152, 86 152, 87 152, 89 152, 90 152, 91 152, 93 152, 94 152, 95 152, 97 152, 102 152, 103 152, 105 152, 106 152, 107 152, 109 152, 110 152, 111 152, 113 152, 118 152, 119 152, 121 152, 122 152, 123 152, 125 152, 126 152, 127 152, 129 152, 143 152, 144 152, 146 152, 148 152, 149 152, 151 153, 5 153, 6 153, 7 153, 8 153, 9 153, 12 153, 13 153, 15 153, 16 153, 17 153, 19 153, 20 153, 21 153, 27 153, 28 153, 29 153, 30 153, 31 153, 53 153, 86 153, 87 153, 88 153, 90 153, 91 153, 92 153, 94 153, 95 153, 96 153, 102 153, 103 153, 104 153, 106 153, 107 153, 108 153, 110 153, 111 153, 112 153, 118 153, 119 153, 120 153, 122 153, 123 153, 124 153, 126 153, 127 153, 128 153, 141 153, 142 153, 143 153, 144 153, 145 153, 148 153, 149 153, 150 153, 152 154, 5 154, 6 154, 8 154, 9 154, 10 154, 12 154, 13 154, 14 154, 16 154, 17 154, 18 154, 20 154, 21 154, 22 154, 27 154, 28 154, 30 154, 31 154, 32 154, 54 154, 87 154, 88 154, 89 154, 91 154, 92 154, 93 154, 95 154, 96 154, 97 154, 103 154, 104 154, 105 154, 107 154, 108 154, 109 154, 111 154, 112 154, 113 154, 119 154, 120 154, 121 154, 123 154, 124 154, 125 154, 127 154, 128 154, 129 154, 135 154, 141 154, 142 154, 144 154, 145 154, 146 154, 149 154, 150 154, 151 154, 152 154, 153 155, 5 155, 6 155, 7 155, 9 155, 10 155, 13 155, 14 155, 15 155, 17 155, 18 155, 19 155, 21 155, 22 155, 27 155, 28 155, 29 155, 31 155, 32 155, 55 155, 86 155, 88 155, 89 155, 90 155, 92 155, 93 155, 94 155, 96 155, 97 155, 102 155, 104 155, 105 155, 106 155, 108 155, 109 155, 110 155, 112 155, 113 155, 118 155, 120 155, 121 155, 122 155, 124 155, 125 155, 126 155, 128 155, 129 155, 141 155, 142 155, 143 155, 145 155, 146 155, 148 155, 150 155, 151 155, 152 155, 153 155, 154 156, 7 156, 8 156, 10 156, 15 156, 16 156, 18 156, 19 156, 20 156, 22 156, 23 156, 24 156, 26 156, 29 156, 30 156, 32 156, 56 156, 90 156, 91 156, 93 156, 94 156, 95 156, 97 156, 98 156, 99 156, 101 156, 106 156, 107 156, 109 156, 110 156, 111 156, 113 156, 114 156, 115 156, 117 156, 122 156, 123 156, 125 156, 126 156, 127 156, 129 156, 130 156, 131 156, 133 156, 136 156, 143 156, 144 156, 146 156, 148 156, 152 156, 153 156, 155 157, 6 157, 7 157, 8 157, 9 157, 11 157, 15 157, 16 157, 17 157, 19 157, 20 157, 21 157, 23 157, 24 157, 25 157, 28 157, 29 157, 30 157, 31 157, 33 157, 57 157, 90 157, 91 157, 92 157, 94 157, 95 157, 96 157, 98 157, 99 157, 100 157, 106 157, 107 157, 108 157, 110 157, 111 157, 112 157, 114 157, 115 157, 116 157, 122 157, 123 157, 124 157, 126 157, 127 157, 128 157, 130 157, 131 157, 132 157, 137 157, 142 157, 143 157, 144 157, 145 157, 147 157, 149 157, 152 157, 153 157, 154 157, 156 158, 6 158, 8 158, 9 158, 10 158, 11 158, 16 158, 17 158, 18 158, 20 158, 21 158, 22 158, 24 158, 25 158, 26 158, 28 158, 30 158, 31 158, 32 158, 33 158, 58 158, 91 158, 92 158, 93 158, 95 158, 96 158, 97 158, 99 158, 100 158, 101 158, 107 158, 108 158, 109 158, 111 158, 112 158, 113 158, 115 158, 116 158, 117 158, 123 158, 124 158, 125 158, 127 158, 128 158, 129 158, 131 158, 132 158, 133 158, 138 158, 142 158, 144 158, 145 158, 146 158, 147 158, 150 158, 153 158, 154 158, 155 158, 156 158, 157 159, 6 159, 7 159, 9 159, 10 159, 11 159, 15 159, 17 159, 18 159, 19 159, 21 159, 22 159, 23 159, 25 159, 26 159, 28 159, 29 159, 31 159, 32 159, 33 159, 59 159, 90 159, 92 159, 93 159, 94 159, 96 159, 97 159, 98 159, 100 159, 101 159, 106 159, 108 159, 109 159, 110 159, 112 159, 113 159, 114 159, 116 159, 117 159, 122 159, 124 159, 125 159, 126 159, 128 159, 129 159, 130 159, 132 159, 133 159, 139 159, 142 159, 143 159, 145 159, 146 159, 147 159, 151 159, 152 159, 154 159, 155 159, 156 159, 157 159, 158 160, 7 160, 8 160, 10 160, 12 160, 14 160, 19 160, 20 160, 22 160, 23 160, 24 160, 26 160, 29 160, 30 160, 32 160, 60 160, 86 160, 87 160, 89 160, 94 160, 95 160, 97 160, 98 160, 99 160, 101 160, 102 160, 103 160, 105 160, 110 160, 111 160, 113 160, 114 160, 115 160, 117 160, 118 160, 119 160, 121 160, 126 160, 127 160, 129 160, 130 160, 131 160, 133 160, 143 160, 144 160, 146 160, 148 160, 149 160, 151 160, 152 160, 156 160, 157 160, 159 161, 5 161, 7 161, 8 161, 9 161, 11 161, 12 161, 13 161, 19 161, 20 161, 21 161, 23 161, 24 161, 25 161, 27 161, 29 161, 30 161, 31 161, 33 161, 61 161, 86 161, 87 161, 88 161, 94 161, 95 161, 96 161, 98 161, 99 161, 100 161, 102 161, 103 161, 104 161, 110 161, 111 161, 112 161, 114 161, 115 161, 116 161, 118 161, 119 161, 120 161, 126 161, 127 161, 128 161, 130 161, 131 161, 132 161, 141 161, 143 161, 144 161, 145 161, 147 161, 148 161, 149 161, 150 161, 153 161, 156 161, 157 161, 158 161, 160 162, 5 162, 8 162, 9 162, 10 162, 11 162, 12 162, 13 162, 14 162, 20 162, 21 162, 22 162, 24 162, 25 162, 26 162, 27 162, 30 162, 31 162, 32 162, 33 162, 62 162, 87 162, 88 162, 89 162, 95 162, 96 162, 97 162, 99 162, 100 162, 101 162, 103 162, 104 162, 105 162, 111 162, 112 162, 113 162, 115 162, 116 162, 117 162, 119 162, 120 162, 121 162, 127 162, 128 162, 129 162, 131 162, 132 162, 133 162, 140 162, 141 162, 144 162, 145 162, 146 162, 147 162, 149 162, 150 162, 151 162, 154 162, 157 162, 158 162, 159 162, 160 162, 161 163, 5 163, 7 163, 9 163, 10 163, 11 163, 13 163, 14 163, 19 163, 21 163, 22 163, 23 163, 25 163, 26 163, 27 163, 29 163, 31 163, 32 163, 33 163, 63 163, 86 163, 88 163, 89 163, 94 163, 96 163, 97 163, 98 163, 100 163, 101 163, 102 163, 104 163, 105 163, 110 163, 112 163, 113 163, 114 163, 116 163, 117 163, 118 163, 120 163, 121 163, 126 163, 128 163, 129 163, 130 163, 132 163, 133 163, 141 163, 143 163, 145 163, 146 163, 147 163, 148 163, 150 163, 151 163, 155 163, 156 163, 158 163, 159 163, 160 163, 161 163, 162 164, 0 164, 4 164, 12 164, 13 164, 14 164, 16 164, 17 164, 18 164, 24 164, 25 164, 26 164, 27 164, 28 164, 33 164, 64 164, 71 164, 72 164, 73 164, 75 164, 76 164, 77 164, 83 164, 84 164, 85 164, 103 164, 104 164, 105 164, 107 164, 108 164, 109 164, 115 164, 116 164, 117 164, 119 164, 120 164, 121 164, 123 164, 124 164, 125 164, 131 164, 132 164, 133 164, 134 164, 135 164, 140 164, 141 164, 149 164, 150 164, 151 164, 153 164, 154 164, 155 164, 161 164, 162 164, 163 165, 0 165, 1 165, 2 165, 3 165, 12 165, 13 165, 14 165, 16 165, 17 165, 18 165, 20 165, 21 165, 22 165, 27 165, 28 165, 30 165, 31 165, 32 165, 65 165, 71 165, 72 165, 73 165, 75 165, 76 165, 77 165, 79 165, 80 165, 81 165, 103 165, 104 165, 105 165, 107 165, 108 165, 109 165, 111 165, 112 165, 113 165, 119 165, 120 165, 121 165, 123 165, 124 165, 125 165, 127 165, 128 165, 129 165, 134 165, 135 165, 137 165, 138 165, 139 165, 142 165, 149 165, 150 165, 151 165, 153 165, 154 165, 155 165, 157 165, 158 165, 159 165, 164 166, 1 166, 3 166, 15 166, 16 166, 18 166, 19 166, 20 166, 22 166, 23 166, 24 166, 26 166, 29 166, 30 166, 32 166, 66 166, 74 166, 75 166, 77 166, 78 166, 79 166, 81 166, 82 166, 83 166, 85 166, 106 166, 107 166, 109 166, 110 166, 111 166, 113 166, 114 166, 115 166, 117 166, 122 166, 123 166, 125 166, 126 166, 127 166, 129 166, 130 166, 131 166, 133 166, 136 166, 137 166, 139 166, 143 166, 152 166, 153 166, 155 166, 156 166, 157 166, 159 166, 160 166, 161 166, 163 167, 0 167, 1 167, 2 167, 4 167, 15 167, 16 167, 17 167, 19 167, 20 167, 21 167, 23 167, 24 167, 25 167, 28 167, 29 167, 30 167, 31 167, 33 167, 67 167, 74 167, 75 167, 76 167, 78 167, 79 167, 80 167, 82 167, 83 167, 84 167, 106 167, 107 167, 108 167, 110 167, 111 167, 112 167, 114 167, 115 167, 116 167, 122 167, 123 167, 124 167, 126 167, 127 167, 128 167, 130 167, 131 167, 132 167, 135 167, 136 167, 137 167, 138 167, 140 167, 144 167, 152 167, 153 167, 154 167, 156 167, 157 167, 158 167, 160 167, 161 167, 162 167, 165 167, 166 168, 0 168, 1 168, 2 168, 3 168, 4 168, 16 168, 17 168, 18 168, 20 168, 21 168, 22 168, 24 168, 25 168, 26 168, 28 168, 30 168, 31 168, 32 168, 33 168, 68 168, 75 168, 76 168, 77 168, 79 168, 80 168, 81 168, 83 168, 84 168, 85 168, 107 168, 108 168, 109 168, 111 168, 112 168, 113 168, 115 168, 116 168, 117 168, 123 168, 124 168, 125 168, 127 168, 128 168, 129 168, 131 168, 132 168, 133 168, 135 168, 137 168, 138 168, 139 168, 140 168, 145 168, 153 168, 154 168, 155 168, 157 168, 158 168, 159 168, 161 168, 162 168, 163 168, 164 168, 165 168, 166 168, 167 169, 0 169, 2 169, 3 169, 4 169, 15 169, 17 169, 18 169, 19 169, 21 169, 22 169, 23 169, 25 169, 26 169, 28 169, 29 169, 31 169, 32 169, 33 169, 69 169, 74 169, 76 169, 77 169, 78 169, 80 169, 81 169, 82 169, 84 169, 85 169, 106 169, 108 169, 109 169, 110 169, 112 169, 113 169, 114 169, 116 169, 117 169, 122 169, 124 169, 125 169, 126 169, 128 169, 129 169, 130 169, 132 169, 133 169, 135 169, 136 169, 138 169, 139 169, 140 169, 146 169, 152 169, 154 169, 155 169, 156 169, 158 169, 159 169, 160 169, 162 169, 163 169, 165 169, 166 169, 167 169, 168 170, 1 170, 2 170, 3 170, 4 170, 12 170, 13 170, 14 170, 20 170, 21 170, 22 170, 24 170, 25 170, 26 170, 27 170, 30 170, 31 170, 32 170, 33 170, 70 170, 71 170, 72 170, 73 170, 79 170, 80 170, 81 170, 83 170, 84 170, 85 170, 103 170, 104 170, 105 170, 111 170, 112 170, 113 170, 115 170, 116 170, 117 170, 119 170, 120 170, 121 170, 127 170, 128 170, 129 170, 131 170, 132 170, 133 170, 134 170, 137 170, 138 170, 139 170, 140 170, 147 170, 149 170, 150 170, 151 170, 157 170, 158 170, 159 170, 161 170, 162 170, 163 170, 164 170, 165 170, 167 170, 168 170, 169 mlpack-2.2.5/src/mlpack/tests/data/lars_dependent_x.csv000066400000000000000000000521171315013601400231330ustar00rootroot000000000000000.770543395308778,0.770543395308778,0.798744597705081,0.608949108747765,0.545321356039494,0.794599054148421 0.958119979361072,0.958119979361072,0.0374610286671668,0.626840657554567,0.210911407135427,0.453840289963409 0.554797379299998,0.554797379299998,0.97612506756559,0.323186047142372,0.245417155558243,0.731659527402371 0.802059812704101,0.802059812704101,0.574135144706815,0.647878859890625,0.165924137691036,0.915119871729985 0.471793392440304,0.471793392440304,0.554373870138079,0.631778694922104,0.156506751198322,0.361025912454352 0.896709318272769,0.896709318272769,0.149119001114741,0.0486452835611999,0.0122742066159844,0.519625633722171 0.440505611943081,0.440505611943081,0.775265555595979,0.519994700793177,0.235483285970986,0.198502873303369 0.247927301563323,0.247927301563323,0.138449061429128,0.794573726132512,0.997875242726877,0.0078889480791986 0.676748670171946,0.676748670171946,0.943438759306446,0.217336730798706,0.489546146942303,0.970621104585007 0.869923679158092,0.869923679158092,0.526969622354954,0.625752114225179,0.291107058059424,0.785516276955605 0.320178238907829,0.320178238907829,0.919485602062196,0.174321409547701,0.362714490387589,0.874802699312568 0.47439336264506,0.47439336264506,0.133299250155687,0.0750944316387177,0.880165371811017,0.856184191768989 0.283276654081419,0.283276654081419,0.674808070994914,0.69905846635811,0.0187087233643979,0.37274225265719 0.68541223439388,0.68541223439388,0.589103810023516,0.834459997713566,0.49227444967255,0.531986056128517 0.578978698002174,0.578978698002174,0.496746773133054,0.421585951698944,0.278658676426858,0.353599831927568 0.243961337720975,0.243961337720975,0.946794684045017,0.891823311802,0.605879531474784,0.232594644417986 0.320262607187033,0.320262607187033,0.440011841943488,0.0300505138002336,0.37385322060436,0.0201348827686161 0.506977130891755,0.506977130891755,0.886579792480916,0.414973055943847,0.558295612921938,0.389327850891277 0.444202757207677,0.444202757207677,0.738163970876485,0.949507211335003,0.45813559833914,0.700902088312432 0.489585470175371,0.489585470175371,0.561272984836251,0.122781845275313,0.981761425733566,0.724682597909123 0.34476433112286,0.34476433112286,0.359726563794538,0.477582204155624,0.835301015758887,0.472603860544041 0.993986550020054,0.993986550020054,0.211531843291596,0.224698930280283,0.365933037595823,0.77385708806105 0.567668182076886,0.567668182076886,0.999351436505094,0.645848179003224,0.892188291996717,0.281611224636436 0.889694716781378,0.889694716781378,0.811041570268571,0.55332144536078,0.258389746770263,0.0651183763984591 0.81461025448516,0.81461025448516,0.671707708621398,0.302478610072285,0.331338283838704,0.571541028795764 0.350993102649227,0.350993102649227,0.218467470724136,0.982080390909687,0.67267514532432,0.160327741410583 0.600131849525496,0.600131849525496,0.197953358758241,0.877707025967538,0.741751604247838,0.0555056335870177 0.16573429456912,0.16573429456912,0.195367005188018,0.704614557558671,0.61587404133752,0.599304404575378 0.0812725948635489,0.0812725948635489,0.0533260083757341,0.928122595185414,0.119169038487598,0.658267140155658 0.21497818059288,0.21497818059288,0.371621503029019,0.943425714271143,0.439432439394295,0.840307253180072 0.799683186924085,0.799683186924085,0.393569524865597,0.158529347041622,0.11342703201808,0.914355636341497 0.247853476321325,0.247853476321325,0.441765338880941,0.052085273899138,0.0674383144360036,0.898010921431705 0.85085881431587,0.85085881431587,0.635435110889375,0.692502932623029,0.0629085372202098,0.830703402869403 0.396172950742766,0.396172950742766,0.769967890810221,0.662396527128294,0.265327289700508,0.483213061466813 0.298370959470049,0.298370959470049,0.310864252503961,0.328508617123589,0.847264007199556,0.313291329424828 0.160010410705581,0.160010410705581,0.208954244153574,0.794401464983821,0.185781493084505,0.754146493505687 0.799551450647414,0.799551450647414,0.595569520490244,0.846403024857864,0.772957552457228,0.317673712270334 0.443326942622662,0.443326942622662,0.652574986219406,0.478257794864476,0.432310863398015,0.0438285605050623 0.946347675286233,0.946347675286233,0.396609399700537,0.960392333567142,0.12515452853404,0.028887702152133 0.239954357035458,0.239954357035458,0.592664403840899,0.810386102180928,0.955561304697767,0.454390789149329 0.494607799453661,0.494607799453661,0.683102818438783,0.940642626024783,0.742153347469866,0.457140894141048 0.561606136383489,0.561606136383489,0.824552145320922,0.160492844181135,0.291050506290048,0.497028856072575 0.636981911491603,0.636981911491603,0.0809724545106292,0.945439815986902,0.0318641343619674,0.176899010548368 0.300766218919307,0.300766218919307,0.217299768934026,0.630780449835584,0.804619520669803,0.382383213145658 0.330897266510874,0.330897266510874,0.147956276545301,0.716154860099778,0.803858371451497,0.818178624613211 0.858292472315952,0.858292472315952,0.294939268380404,0.399515164084733,0.129114920739084,0.570928036002442 0.105412760050967,0.105412760050967,0.659894781187177,0.956881555728614,0.799535702215508,0.222845279844478 0.199896053643897,0.199896053643897,0.977230369346216,0.0678093621972948,0.382451267447323,0.930019361665472 0.671980000566691,0.671980000566691,0.611456188140437,0.838611331069842,0.260001812828705,0.305959114339203 0.736317271599546,0.736317271599546,0.804108906304464,0.947173471096903,0.459708407521248,0.0959023812320083 0.14315973687917,0.14315973687917,0.962252646684647,0.383277770131826,0.957621598616242,0.503816117998213 0.202218254795298,0.202218254795298,0.0387519798241556,0.486300072167069,0.562795898644254,0.581493584671989 0.40826775948517,0.40826775948517,0.0513113860506564,0.161927720997483,0.108086388558149,0.257425453979522 0.849958138307557,0.849958138307557,0.743356617167592,0.69245832436718,0.641728761140257,0.221775696612895 0.949406941188499,0.949406941188499,0.824537160573527,0.969788218382746,0.424090675776824,0.767285527894273 0.187720352783799,0.187720352783799,0.101453577401116,0.67355727776885,0.23602771316655,0.781541871838272 0.0685224384069443,0.0685224384069443,0.635784528916702,0.53415762912482,0.595941075822338,0.239885379560292 0.72323187883012,0.72323187883012,0.0232293272856623,0.0389460402075201,0.319255773443729,0.0103388621937484 0.143260676413774,0.143260676413774,0.112624881556258,0.0623767210636288,0.32067215279676,0.731304446002468 0.927025411976501,0.927025411976501,0.260670081945136,0.90092084929347,0.699644683394581,0.502494215965271 0.176128917373717,0.176128917373717,0.685933414380997,0.958509972086176,0.274267208762467,0.854399499483407 0.378598502604291,0.378598502604291,0.881483748555183,0.486840357072651,0.558519888669252,0.265070944325998 0.97910336824134,0.97910336824134,0.744032583432272,0.398992688627914,0.0336542536970228,0.178522098809481 0.47415173240006,0.47415173240006,0.487124339211732,0.424214565195143,0.64435416366905,0.122807318810374 0.51716972165741,0.51716972165741,0.665786263300106,0.290303043089807,0.505876101786271,0.335594906937331 0.54721895698458,0.54721895698458,0.568629143992439,0.952686201548204,0.907047313638031,0.488997620996088 0.587992592249066,0.587992592249066,0.871070827357471,0.0233417951967567,0.649243521271273,0.823274289956316 0.910697423387319,0.910697423387319,0.661839921725914,0.782616809476167,0.611684447852895,0.73676001210697 0.321067587006837,0.321067587006837,0.163303593173623,0.0877945604734123,0.140272407559678,0.505106888012961 0.410706030670553,0.410706030670553,0.303832090925425,0.379142401041463,0.435574732720852,0.241137827280909 0.167380342958495,0.167380342958495,0.187375672161579,0.770959843182936,0.238817942561582,0.394552333746105 0.000718161696568131,0.000718161696568131,0.162543963408098,0.541685610311106,0.528535610996187,0.635587439639494 0.619446818251163,0.619446818251163,0.654740107478574,0.524835488991812,0.920988468453288,0.839280831161886 0.0395362919662148,0.0395362919662148,0.115660971496254,0.375258939573541,0.72841140627861,0.248257958795875 0.596897850977257,0.596897850977257,0.0713481165003031,0.738359819166362,0.375155063346028,0.00847275019623339 0.0881966631859541,0.0881966631859541,0.226319527020678,0.412145281210542,0.406507096718997,0.901022934354842 0.429164422443137,0.429164422443137,0.135601942893118,0.380811020266265,0.243824498262256,0.566245457855985 0.72073832899332,0.72073832899332,0.198768118629232,0.140477485954762,0.172997357323766,0.60131739010103 0.884889899054542,0.884889899054542,0.634364293655381,0.923971947748214,0.574750137748197,0.510697678895667 0.705741208279505,0.705741208279505,0.803692424204201,0.969692119164392,0.662231606896967,0.109647983917966 0.303548135561869,0.303548135561869,0.459603104507551,0.0027192928828299,0.446989309974015,0.354090734850615 0.115147949429229,0.115147949429229,0.763615214731544,0.248555515427142,0.315104187931865,0.0552782923914492 0.622677482198924,0.622677482198924,0.799018179997802,0.326398127246648,0.524189503863454,0.839016374433413 0.620930234901607,0.620930234901607,0.903093484928831,0.327542515704408,0.194263850804418,0.552262911573052 0.807403031503782,0.807403031503782,0.866500288713723,0.172327949199826,0.797524248016998,0.320657665841281 0.261597996810451,0.261597996810451,0.318909951020032,0.173607711447403,0.795854905387387,0.913758810376748 0.428978954907507,0.428978954907507,0.925567992962897,0.138736971188337,0.766646008007228,0.592030816012993 0.818737738998607,0.818737738998607,0.775342132663354,0.354212455218658,0.346854257863015,0.27323359134607 0.119413774926215,0.119413774926215,0.787705763243139,0.655245966278017,0.993202771060169,0.541557133430615 0.14055386511609,0.14055386511609,0.234273051610216,0.573329999810085,0.779459054581821,0.246409290470183 0.581461552530527,0.581461552530527,0.546827974030748,0.517214486608282,0.0607388555072248,0.61647154041566 0.166419122600928,0.166419122600928,0.0393055712338537,0.76177074201405,0.680417035939172,0.408574270550162 0.928816596744582,0.928816596744582,0.685424251714721,0.92965946579352,0.496865136316046,0.260026418371126 0.728603375144303,0.728603375144303,0.926794296363369,0.819057745393366,0.603705764980987,0.9522351808846 0.447038050275296,0.447038050275296,0.325584794627503,0.79330649250187,0.848912167595699,0.735492380335927 0.225046696374193,0.225046696374193,0.377377944998443,0.741721166297793,0.772323156939819,0.999235172290355 0.0491630886681378,0.0491630886681378,0.34577715001069,0.748360154218972,0.338201173348352,0.45680531905964 0.634701209375635,0.634701209375635,0.432143525918946,0.165281996363774,0.958197456551716,0.763475358719006 0.15241247182712,0.15241247182712,0.254186156904325,0.90391239291057,0.364452324807644,0.468085650121793 0.906389028998092,0.906389028998092,0.813865840202197,0.0967478179372847,0.909217240754515,0.102584184147418 0.506717499811202,0.506717499811202,0.845513387816027,0.202838632743806,0.353000021539629,0.453393256990239 0.508562345057726,0.508562345057726,0.372753552859649,0.177153343334794,0.120040463050827,0.157713126856834 0.104498882777989,0.104498882777989,0.315593787236139,0.499088249634951,0.890405811369419,0.841395126422867 0.585353479720652,0.585353479720652,0.676449340302497,0.838179897749797,0.0855822877492756,0.729468867415562 0.352760325418785,0.352760325418785,0.331040226854384,0.592209017369896,0.3968675727956,0.690579992718995 0.362486797850579,0.362486797850579,0.117484578164294,0.217327452264726,0.590144062414765,0.0744721973314881 0.309920646715909,0.309920646715909,0.186409746296704,0.0873714161571115,0.679943374125287,0.573747171089053 0.869199776090682,0.869199776090682,0.666699846740812,0.315070114564151,0.282651170855388,0.376006946898997 0.199786175973713,0.199786175973713,0.131453710375354,0.597511313389987,0.108056674944237,0.666857759701088 0.399972369195893,0.399972369195893,0.130327441962436,0.436175609240308,0.812146118609235,0.800018354086205 0.351122235413641,0.351122235413641,0.395375785185024,0.844494342571124,0.878622018266469,0.646628283429891 0.0488236227538437,0.0488236227538437,0.26489243353717,0.925526605453342,0.495759914861992,0.106758111622185 0.957536548608914,0.957536548608914,0.0577582712285221,0.55394846200943,0.945107186445966,0.261169746983796 0.153932305285707,0.153932305285707,0.170468609780073,0.326113213784993,0.193040325306356,0.255554487230256 0.766924206167459,0.766924206167459,0.0909049678593874,0.417826240416616,0.993216006085277,0.337030659196898 0.151809929637238,0.151809929637238,0.20154758868739,0.513795396778733,0.621785229071975,0.770663267932832 0.409166981698945,0.409166981698945,0.916486835340038,0.0710981278680265,0.590903421398252,0.379730458837003 0.608375463401899,0.608375463401899,0.940090198768303,0.295074652181938,0.945960321696475,0.101493935799226 0.736061909934506,0.736061909934506,0.318164042197168,0.447890881681815,0.835820836946368,0.359490954084322 0.64233170892112,0.64233170892112,0.193961575161666,0.157901215367019,0.443961360491812,0.243096087593585 0.402826209552586,0.402826209552586,0.150644771521911,0.53773302026093,0.185187296476215,0.832288629375398 0.405552322743461,0.405552322743461,0.755286786705256,0.127513157436624,0.930539330933243,0.379294647369534 0.129419900011271,0.129419900011271,0.284586386522278,0.058446784503758,0.372513838810846,0.812712400685996 0.335570019437,0.335570019437,0.571504134917632,0.306273883441463,0.416838316712528,0.557531502330676 0.312234999844804,0.312234999844804,0.444254707545042,0.0394677105359733,0.440904777264223,0.020945399068296 0.247299033217132,0.247299033217132,0.733821457251906,0.610647986875847,0.278235272271559,0.326258784625679 0.807046507718042,0.807046507718042,0.640676052076742,0.896434315945953,0.263186002150178,0.653507075272501 0.379803462186828,0.379803462186828,0.00354928523302078,0.693388707470149,0.638776563107967,0.991632496938109 0.705248493468389,0.705248493468389,0.6227061371319,0.459811052074656,0.0412854210007936,0.0257394362706691 0.0131496209651232,0.0131496209651232,0.321820007404312,0.212960618315265,0.485073666321114,0.62209526845254 0.294309546006843,0.294309546006843,0.957886629970744,0.0720925810746849,0.0972754620015621,0.356298314174637 0.469378540525213,0.469378540525213,0.410474271979183,0.615953365107998,0.301463018637151,0.199949803529307 0.278468182310462,0.278468182310462,0.270293100504205,0.578160049160942,0.986931339139119,0.372385042021051 0.223071841523051,0.223071841523051,0.641179747879505,0.385627724928781,0.534257140709087,0.261503693880513 0.229620956117287,0.229620956117287,0.688168776454404,0.393260131357238,0.544614845653996,0.535374483559281 0.309520238079131,0.309520238079131,0.949012532597408,0.0395275058690459,0.451625783229247,0.659400541801006 0.308049103943631,0.308049103943631,0.695204212795943,0.924639535602182,0.213532728608698,0.984755293000489 0.587537573650479,0.587537573650479,0.782101542688906,0.0625752799678594,0.95176326436922,0.0323775873985142 0.340484286192805,0.340484286192805,0.72715583210811,0.295447092037648,0.0832722233608365,0.158721339423209 0.00525797298178077,0.00525797298178077,0.50199552741833,0.331868754001334,0.456781850894913,0.561971765942872 0.737251290120184,0.737251290120184,0.173577737063169,0.759537681238726,0.641526546794921,0.8635548572056 0.790815666085109,0.790815666085109,0.331423030002043,0.265257194172591,0.420581184793264,0.618206752464175 0.343021306209266,0.343021306209266,0.415855374187231,0.957754531642422,0.324394906172529,0.950096277054399 0.151467205490917,0.151467205490917,0.464836775325239,0.425251424312592,0.531590871978551,0.75175549974665 0.384887027787045,0.384887027787045,0.528386991703883,0.113531364826486,0.118156370008364,0.32806187751703 0.629953657276928,0.629953657276928,0.871463749324903,0.285663897637278,0.639753693714738,0.754417029675096 0.760863705538213,0.760863705538213,0.062380611198023,0.820329068927094,0.107289742445573,0.261314681731164 0.852924212580547,0.852924212580547,0.257427043048665,0.143486212007701,0.297988091828302,0.18872676207684 0.29967774101533,0.29967774101533,0.64874199219048,0.729367177234963,0.129041373962536,0.843802250921726 0.145089966477826,0.145089966477826,0.268982543842867,0.392117839306593,0.936481584096327,0.639356024097651 0.102559315040708,0.102559315040708,0.561309703625739,0.429783878847957,0.70515199447982,0.0963134735357016 0.321284720674157,0.321284720674157,0.997955115279183,0.222558073466644,0.645602829055861,0.0269630649127066 0.552068174118176,0.552068174118176,0.825408807490021,0.846672186395153,0.236213416792452,0.73613910167478 0.257994162617251,0.257994162617251,0.0505063484888524,0.755304714199156,0.845429901266471,0.0257019996643066 0.232751900330186,0.232751900330186,0.890681677963585,0.818943985272199,0.020829743007198,0.506976580945775 0.284186959965155,0.284186959965155,0.463541151490062,0.723432988859713,0.0739412454422563,0.819361584959552 0.233969841850922,0.233969841850922,0.729306100169197,0.0279890322126448,0.0919705745764077,0.783815417671576 0.318426341982558,0.318426341982558,0.104916631942615,0.730990814510733,0.143854098860174,0.359829638386145 0.197641223436221,0.197641223436221,0.277205696096644,0.985921697225422,0.650980514008552,0.602332003647462 0.308478910941631,0.308478910941631,0.830667185364291,0.390310686547309,0.295701578026637,0.794459346681833 0.466019284911454,0.466019284911454,0.53632109798491,0.298557824222371,0.6183152126614,0.350876598386094 0.522982559865341,0.522982559865341,0.0139162752311677,0.713703766930848,0.58265150594525,0.145391793223098 0.888590852264315,0.888590852264315,0.0716590562369674,0.831634032307193,0.701939321821555,0.476233148714527 0.604137443006039,0.604137443006039,0.683762417407706,0.0189970252104104,0.734672857448459,0.538940965663642 0.335381280397996,0.335381280397996,0.516404886031523,0.637712598079816,0.817540523828939,0.138430034741759 0.0385837701614946,0.0385837701614946,0.306347566423938,0.381967377150431,0.351992441806942,0.159262034576386 0.278883800143376,0.278883800143376,0.675052077043802,0.299731208244339,0.471319406526163,0.696902097901329 0.809923141030595,0.809923141030595,0.914374560117722,0.953134258277714,0.629289220785722,0.763724039308727 0.516456580488011,0.516456580488011,0.223435196327046,0.0487272015307099,0.314375189365819,0.539996706647798 0.198877595830709,0.198877595830709,0.579201703891158,0.295771913137287,0.704956357600167,0.567292805062607 0.124305489007384,0.124305489007384,0.380636846879497,0.0677287306170911,0.935128577519208,0.617421944858506 0.47897565853782,0.47897565853782,0.286699899239466,0.961533318739384,0.4866793602705,0.5041830514092 0.416779418010265,0.416779418010265,0.296650541713461,0.41016675485298,0.403534589568153,0.706583806313574 0.303997287992388,0.303997287992388,0.326594652375206,0.567674678284675,0.747516687726602,0.213588877813891 0.457668769406155,0.457668769406155,0.229032607050613,0.817260816460475,0.126421087421477,0.870487443404272 0.573519257828593,0.573519257828593,0.769175143213943,0.356371976668015,0.740455557359383,0.0667041861452162 0.49594064662233,0.49594064662233,0.272376055130735,0.167576737236232,0.507399585330859,0.0207885678391904 0.990902705350891,0.990902705350891,0.421079489635304,0.122195026837289,0.786411653272808,0.223972936160862 0.119159578578547,0.119159578578547,0.484617685899138,0.159179983893409,0.526446465402842,0.877076966688037 0.00825249101035297,0.00825249101035297,0.461832728004083,0.546467618318275,0.231179421301931,0.426196962362155 0.270562664372846,0.270562664372846,0.582122593186796,0.98280820553191,0.728904620977119,0.361875669565052 0.720435785362497,0.720435785362497,0.658529957290739,0.0088436515070498,0.433530527865514,0.602684148121625 0.377534364815801,0.377534364815801,0.523364462191239,0.732090490171686,0.137234514812008,0.0407253163866699 0.754738048417494,0.754738048417494,0.035105864983052,0.565975194564089,0.926740822615102,0.210182754090056 0.865921065909788,0.865921065909788,0.130658007692546,0.458590539637953,0.415776170790195,0.689433455001563 0.69920409261249,0.69920409261249,0.455422467552125,0.675362611422315,0.699723488651216,0.0494503679219633 0.805330341681838,0.805330341681838,0.210509035270661,0.141314522596076,0.695562741486356,0.673588232370093 0.690989301307127,0.690989301307127,0.670222575776279,0.638796924613416,0.678210513433442,0.487536709289998 0.368577940156683,0.368577940156683,0.195236068451777,0.840230083558708,0.240584616316482,0.838123076362535 0.862264465074986,0.862264465074986,0.692369350232184,0.471990576013923,0.940210580825806,0.48527231416665 0.623960651224479,0.623960651224479,0.683608189923689,0.475773985031992,0.0870378867257386,0.723064525052905 0.123515208018944,0.123515208018944,0.0281406561844051,0.260851332684979,0.116678821155801,0.0838006648700684 0.515511596109718,0.515511596109718,0.162576802773401,0.428130541695282,0.739171740366146,0.416124096140265 0.631256699794903,0.631256699794903,0.954100469825789,0.684717376250774,0.846531821414828,0.393525664461777 0.50005456013605,0.50005456013605,0.723552323877811,0.811237944057211,0.463678582571447,0.823814715491608 0.836317169480026,0.836317169480026,0.799004154046997,0.66016830294393,0.159381838515401,0.687262010294944 0.934401060920209,0.934401060920209,0.0537555175833404,0.897665447555482,0.657897339668125,0.926599261816591 0.901231835829094,0.901231835829094,0.14715939364396,0.391133429249749,0.408918454544619,0.297349939588457 0.409514611121267,0.409514611121267,0.802803199738264,0.695815495448187,0.886157537577674,0.619819510960951 0.650985729182139,0.650985729182139,0.247550260042772,0.401743698399514,0.284948233515024,0.199218056863174 mlpack-2.2.5/src/mlpack/tests/data/lars_dependent_y.csv000066400000000000000000000071041315013601400231300ustar00rootroot00000000000000-0.390961869386956 -0.662108632735908 -0.785457096062601 -0.449331900803372 -0.348649296443909 -1.36199251376092 -0.70229591941461 1.16603425238281 -0.619432117324322 -0.56444153143093 -0.148003480629995 0.729358019772917 -0.150851936778054 -0.101207775296643 -0.600859709084034 0.295580128207803 -0.656498439144343 -0.537937534507364 0.481975412694737 0.28878194373101 0.736231854418293 -0.835015887394547 -0.31504010502249 -1.71360143530183 -1.09557029488496 0.894629601622 0.276747205993161 1.39295740914531 1.48968757572584 1.42158754263073 -0.806623883312568 0.0800622182432562 -0.751037866808474 -0.151376914000139 0.581457782303914 1.20535438600928 -0.257638132199645 -0.584831652697176 -1.17487018601969 1.14776507811621 0.467618450289592 -0.999192211544141 -0.200733316596597 0.998950976878405 1.52844104659744 -0.911966092186049 1.10854223649949 0.00325751467607915 -0.55084393103607 -0.773959189653397 0.596143366303295 1.18740106606856 -0.340407341485843 -0.887310111662373 -0.562186620896682 1.21423257980496 0.59715467877686 -1.10115240910091 0.715207085479051 -0.0116611572448164 1.04898543120362 -0.328249563695863 -2.09107027878053 -0.244051756337285 -0.568351654801518 0.685664078220725 -0.551196405431256 -0.35217349906452 -0.0722649111412466 -0.0693891912233084 0.882193761412054 1.54182837414555 0.391471044626087 1.15719474921934 -0.143156185746193 1.31696245889179 0.196950188605115 -0.725452543236315 -0.394724327372387 -0.473603130783886 -0.262900037923828 -0.374973117839545 -0.354769138852134 -1.07088467665017 -1.19079648866318 1.0411154825706 -0.286112107569352 -1.43851730623282 1.16347255767323 1.08381756301969 -0.515326196560636 1.47861823206767 -0.856506424723193 -0.0090023553930223 1.1580501452554 1.68580815778114 1.09926331928 0.185408866964281 1.17743926728144 -1.51809465535916 -0.849716476164758 -0.934971309732646 1.70629763463512 -0.193925246829167 0.643095705192536 0.0394855381455272 0.534810921642929 -1.43137116660364 0.841399685712531 1.11806790158153 1.27212438825518 1.16550495289266 -0.212605973007157 0.296374805970117 0.123319525504485 1.40107644582167 -0.693088790634647 -0.814312215894461 -0.147085189353675 -0.633666329551488 0.59891175548546 -0.129044296452776 0.700246837455779 0.0379995286930352 -0.567406820366159 -0.0132774799130857 -0.441641674144194 1.56064155790955 -1.50636721472256 0.972010303754359 -1.02083936473355 -0.231865165755153 1.11024696519598 0.0940651285927743 0.325838771881536 -0.417499177856371 0.811625136528164 -0.910460558254272 -0.870683749672025 0.838110897457227 0.616538767935708 -0.609009230742231 1.13034772826359 0.940826609730721 -0.738411434926093 -0.451536442851648 -0.395174529170617 -1.33307440229692 0.454113327898085 1.40879297070205 0.464821013156325 -0.745400589192286 -0.110520450863987 1.05994194140658 -0.00943516939878464 0.58482074784115 -0.293470759410411 0.492905235849321 1.56674607191235 0.0328466040082276 -0.200610032537952 0.381865671137348 0.160965742077678 -0.599426455097273 0.406515709822997 0.509706746786833 0.235133035341278 -0.188073323806748 -0.353249259758741 0.591064180247486 0.991031428100541 0.707744514103979 0.390075773000717 0.594191015465185 0.669799201423302 -0.752681938698515 -0.568492457969114 -1.27030528406613 0.839766572928056 0.725506291957572 0.950340574141592 -1.05434320052154 -0.368382870452479 0.158316809451208 -0.29869997408241 -0.429294184781611 -0.310704222181812 -0.247657031053677 0.986545827472582 -0.519424809375778 -0.645653095562011 0.186159746488556 0.389826383208856 -0.291839007288218 0.375069797970355 -0.964826341252774 0.559604409616441 -0.852221241919324 0.579960122006014 -0.663611729629338 mlpack-2.2.5/src/mlpack/tests/data/mnist_first250_training_4s_and_9s.tar.bz2000066400000000000000000002236371315013601400266410ustar00rootroot00000000000000BZh91AY&SYœK`š¯Iÿýxüýý}ÿÿÿÿÿÿÿÿ¾=xxx|øxüøøùùððñàèaT^ùÅI ; ( h*ÙT¥J¥6ÆÍ ª¥}µAÕ5UmªÖª•JHÛFUEI¡KfØLš©UU+fJ© i[hT¨ª†¢P„ŠRˆ"MÀURJIE(P)TP(Š €@@€”5¶)IB•*UU4¦–Ì… QJ5 -jR¶j”Û(‘ p ªà()I)UT)T¥*JR…UªÑ¶Õs5.«UJ[%MVµª5)"U@"Fóh ’ˆ;gFÍ‹&µ–3dÖ²Ëݹ¦„µ¦²k,»@¨€4€ô¨P ìÐÒ  š\é*€¸9€:9*(  ÐPh   @@kB”P E(R¡@*•Q{4 "d B@ÀjY¥IèC@÷À@¡ ˆTJ‰I€$(ÛODdÐÐI©¢‡ªF =CFƒLƒ@€ 4Ñh M4  †‰¥'”ÂaêLš“G¦Bzi¦M2zzI‘²€4Æm2ƒTü IE4di’ž¦@€z€ OD‰D  i©¦M)P@ 4“Õ)J2h€ 4MH& 5Lbh™LÈOJ{MšSG” €dÔÐPýPÚM þ×Ôë}J¶¬[F*ªÅ[EZÅ­ŠÚ­njµsU­ZŶÛZÅZÅ¢Ûhµ¬jÛlV5µEm‹m­­¢ÕmµÍ\«ªÔmª¨ÖÚÚÆÅ­ZmkrÕ[šµ‹khÕTm­QµÕ±jªZÆÆ­¨¶Ö¹ª®U[[E­µ‹mkF«m“mµ¶£Um­£mV¹m¶­\ÕZÖ¨ÚÛbµ¨ÚÛcZªUkš­ÊÛEmXÕ¶¶+mkmŠÖ­Í¶«›[l[kVÜÕ­«rÑ­jå¹µms\MZ¢­hª¨Ú¶U­mŠÖÆ5cU¨«X±VÕ«hÕµ¢ÛUQVÚ¶­Í£VÖ·+mµFÛmlmµ´UZÑ£cjÛh«Eµ«[›[F«šµ]ee•®[\ÖÆÚ£XªÑ­¢­m¹[WM¤µI­µËUr¢¨Û\­¹b¢­±­&ª-Ëô>Wjžv¨£–¤¬UµÍ¬m«F«nm¹[W6£wmÖÚ6ª‹V¢«X¶ÔY–̪*¤ÖÚåTV­–·+jå«2­¹¶Ö-®j¹µËUÖ‹kiúží½öÕW¥jðËÚ¹U\­rÕE¬m‹QknJkmV6Ô[îKjÞ-£U[ƈÕEhª­rÕrµªæÚæ£[ܪ6ÕsVÔlTÍmËUͶ®[V¹&­±µQmF¶¢J”ÕsWM­nmZ·5¢ªW*Õr©fÛ;«h¶®ZŠ´ZÔj×*«•´Tm®k–µ‹m²mEZ-ª¢Ú¹¹±mh´V¢Õk*6#EcE¶5[sj¹¨ªÜæÚŚܵKÓvi6‹ÆåTZ¤¨ÛQkš®j±¨É)¦I¶5I¢Æ­sk‘W(¶«š5ŠÑj•–J©6ܶںræÛTšÜ«¹ksT­6ÓM¢«Iµbխͨª·Msm®Ql[Zj²×’ºªŠÊ^1­ªæµµr¶Ü­nV«wuW6Ú‹[k›Z¹lkW-·5skQjƪëMm¥šÛfj­[ã­]¬´­&¨ª±£k+A²VÕª¾™­®m±l[bÕ¨ÖÔV£mbª“lm½wZJÓ,Ò£U£[AZHÖ›5Iª\Ý¢Í"´ÚZ›©\í¥‰ªl¶l©¥¦))SF“QƒR•;­v%,£Yn뛬©h–”iÛR•Œ²²…Qk”ÕJY©2,¦kº®\«–ÌÚ2%¢Ñ¤Û ’bŒXÆŠ¦ÀŒ !$$'×a$ Ÿç°AHBÈOjI ú¿ûÿkŸúôyÕü_ù¹¡òÙýÿ¿õDkÿ:ÿ]æ¬óñ¿ÃøST£LçýŸ÷¿‘ð•m²šÆªÏölÿfucbì?¹…XÑ¥ùQ­¥u+^ „ιãl_} ²5ÓZû¯oåªÃ†¥ÕF¥’Tx­ðæÏþ-(„¡-L’ÝÿþÜ›õUµ±>u°Ùdߤá|i$'ì°„=G«–,.©ãoü8?û~ÓUU4ëÒ#§P¸µs¾úV„f¯<©ž’ªÉEgª¶Œî:ù­ƒÝUø ' 3»c[*µßþ­Z¹ûþüå;éÇ Ye|’ÈRÓ£Ÿ®—¦”ãf&çÚÕ뱑whÖ/í•­]C¼÷ÃVÌü±ÆßÕývÔè„p('Ú¡é@ˆ_% W8ñU|'ãß©û(¥¸¾¬˜AÛß®C&á¿Öuñ®± F;íº÷G*h(åœ]œp¿¢ºóZßÂÝFUÃK/k3Ç)k}Ò§V0Zv°ŒÎŸÚ[ v<8vPíŽÔÊå¬ZÚ©m´Œ­»fÝb‰Ù³Z€Þ?¹fXÂT)ßFücáC§á”t•ð”O%piæXmÉEËD5íá+÷^(Ž®{ZÐ}ïÅ¢¦·Ëý¼5ÁôÕÜËCñ¸Ù£¢Ôÿϯ ¯ÚwUVOsÞ«0–‘ä"3ÖþŽë0« Úkê×^ÒhI¬¢Ü|µäô{R=¡@ß·†ÿOŠùcç©7c–Ê­»)³Á”êWZ'x[fü^´Q`ñº¹fú†ÜFtôký¼s¾3ßÊûµË9ÆÒù^MÚÔâ,±ñ³:ÕhÌµšš™‹¡ÓNÛ3ñÀBŶ¹á~D[A¢…§_n–‰f*îO&©!UkG;ól°yã-s³/W öì¨K kË+Ûc´Z%Ä€ öÔ¾ÿEBúx®m´Sƒú¡–8ã·IÓÉuišÑ>†ºªí¤Œ_„·Wf;ñÔËVßô,¨…"«4âá^Ó,šÈÕß09‘×ÏRãÌZߣéÝ<æô J+(ˆ– øÅ»ö/úßà2øçh2þ0òʇÿOþ{½ßëA:¬x R^£Ø¨Ä (Ü“[£?àL˜ ü¾ÏÏÕ²¸ïµcžXÃv{³«å÷¿‚Ýp?q-$?*XBuCò°:$7qÏ@Õ}ÓgJEÖÄÔrÌ)ÿó';:­¶›*±´ØCR¨‰@5lT…úRp¡§3}ªÕÄHˆÜ‹k*%†Í~xöË7ÇÈÀµ F¨’’ud/Û k®#’^“­-Í뎇A±ÄpÃÊ­ÃQR×D“bIšˆJ PU¢gþ`žØ\hÆÍZ“­£­e†—JèÊ6 3ëm" ÖE¬¹Î×B‡µT#4ÜâÞª•!9²ÍØáhX@û¾ª›nr°qL»6ó­ÏŽ|+EœÒÃäëµ”ÂE Á!Íɇ ¸Þûv5Ï 0ÜÛÎþigºbñÊ]Á Ƥ€ð¨ÊÕ%2Zë-ræIÙ;·Ó C¯ž–: džHhÃê¬&êÂÅ2LÅHŠE$;²açS4‡V˜²Ùi@Èns–£ÁñI%™,Ç•<5Xl‡•TÉ X͆æI †Ô% , l›Xj@³'va²¶²,†Wïcá…û²]»/zM©&S JJw3¢CŸ×j N¬Ø‹A%ÄÇ œïŠ™=ªÖ)ŽdšÙ Þé a­„68 q@RBD %3$’5DÜÅühÔòÕF(¤&iC’I&ætI6èT“»mV<’!ÿ+, O‚OÕîùkT±{$. éš³ÿõéVòɦ\äÊ‚ˆã9d¤Æsb©ZK1TÂë¾xáí¾§pö\!Žõ{_P®üñ+îøZ‹m&¶Ê–«hº‰f¢_]³¶>#³G˜åuú€´­êWá ßnø¦_dìkeŽÖÕ¼5Ôo©ä" P ’ì-X>Z­iÔìô¶ÀߨN¹Òüù1™ ›U;¥Nìá¿êk¥“òÎÛ+^JÕ¶ªîh¤ÏžZïä°mUÏekEfB¶n1!ŒF.hó¬D-ƒIÏÊWEku³[wc@À¢Ѐ5@­ÏMzUž>ùÛ¬iTåŸÐµZû²Ò™í×]²”Ìi¿Nήˆy¶ðžØ>ëîÚüuAîÓºY×?â¼S£è.æmm1FÞjã‹-ø`œÙaMÏÈÊùU†l- çd^W´ªŽ’ÿ”äof¹_„ê“–~]v?ÿäg“¥ŒõAˆ&² ø×¦ [á/ýZñ¶'mÔcªÀÛ+‡,îÙ¡¡q¤†Ûb‡9e¬WKGzë:úƒ¬¾wçkÝ`Ó+m#VLê¨ÀEâþ-zmžèöïïç|¸·ÂØtÕåœÐ„Ÿ) &5»c·äõÓU4Æ/‰×®/ +ÆÓ·EÙ3lgD0ªû³€ÞEC¬Üs«Ã±ÃU7[Í+l“-œÌ±4nÕ-∊6A¦u±™)Wë±yRlg»eñ½Ìêï®ÚFë¾/ZhÇzã~ÇBu  µwŠ.Vwš7í±hnЍ6Åù?1`’ˆˆƒ˜@“¾ÌãHËUÕÖ« µ»fÝ”Õ+ÖÁ9Ë~vŽø-úšüpûõïÇW å&Ë>™œo–¬nŒ§FýR² mtPl®ºJ ¹Î›Ö¦”§,.,ùBO¤ÈD@ ­kÂ_/×ú?†º±›U:íÎÊ.ºWJúÆp}ãÑUŸoÛ{Xvl<0ÀB§4i´G'í©²²wÁܵ0j]’ä-å–ši5:Ù;mæ¬ eñT} \Ya^ª¯Ñ«O}Ž¢ ˆˆ?÷tþïéÞ?æ²*X©ÿÄ´ÑË…\Û¹EÚHbwú|»£iFêÙͺ.‡5Ûë]—Øë_SÔÚ½'+yi«UjŽ}q®)f‹€ B—ˆ8˜Ù1ë¢Ã,í¸é 2ÊN4ZmZmpÍhºÆ|)ftÐ<€]+1­/·VZi»žüpuuÙS,ž¨CÐü(­ñ]ÆTªa‹±ÂwÍÏ'¿8×D!ÝŽ#ú–ßmÚ©ëÙ”îk‰=ùaŽ5í}Óžmƒøú8>Zî®GãoñƒâP ÒÛüx\ݧ#Êò,P'ÄÈàe3ÑŸ×(>ܨ±Þ£ÂëeÓ¯ ÓÕ:†|*aòå…uW;µq§[÷ÂÂz÷x礳k›i4Ý;ó¯8åNZ8YN8×üñP¨X›õ­–S…u™(nDñ;)íݲ9ÏNË/zèmÏvÎmb£•d*7h±5W__YÙîþnw&XÏ~ôßE‹(Ýdê0ãíÏsè°à×plçoøÃê7ǵ-è¶À¸]:œ%E±9«Õ²¬ïÊ&<Κªw vcW÷¥S®ÍÎ3'õ?ÄÒ5ѤáKdºð­ë£Q¯4Œöâu_E”¼[Ï`™ÆÜX4AÚÞVµžú„¯ª.ÓT&éÜèO®¦ÁáE *Šás+¹¡l]ÒuÅp½RƒÉZˆ‘AÅ—Š©&wdvæô@0(ÿÕç<Â~ :²ÂÆãbzȃ±ÏV{¹éÛ;Wt÷KƉÂΪz;Üê(‚r9ÏÓ:¸ËŸ1}Ô]uãýÎ…ó_•{ õ× Z›¾…ì–Ûâ¼õS³V\v<ÍÍ]—®¼îØe¸²©ŒiN»iv7]´Wxœ2ºÛ'Žتª8 ¼¥³{%bÊ‹mT­Ò²é"æ " òxó=×ã.†vªxTêáeþÊ!Ní1Æ­rèPü¾[éWïˆv ¸Ñ(‘T¡]t>'ái˜ÆV´Œñ¦‡ÎˆÎT5‘R¼ ™k8³æ(£’Ù6”@ƒ"õzÊwé–Vf+ b÷g/ð%|´–˜Ç>6å©_R«®ºÿ{áoÞµå|ØÊ¯ Ñá@ûÍ€ ¨]r[ÎeRj!, —m™S³]4ü-sܤîZ5Sy]³Õ =Û´°œ×ÎñkFúg+d©¾ ­Ãy³:á)Ë'ËJn y,³^¸†¾¶¬ç,*ïåv¢íÖh¾¼¡ÃS²‘ŒW†­P/ª‘•j°³Š®»i¯ZçNµRº\ÁÊ´m†²êªÖwÔá“FU­óŒö/Ën9 ölò›±PòMˆˆž¢—æ.µ¯Ž­+³]ù¬§†ÊlͰ¸¶¶R b‡Â;"Íe¾NéߤgY}9>ùjÜ^¢fs«™´nßaZj}uá³v޶øÓÔÄ´‹¬¾wÊë,Y¦elDD7 ™«Õk°"ZKHÝ‹u\ µKLqÎ3†–X[)§, 7Ñ« TÕUÏ©³°iJä ÃF¶e©¦ ƒê²RÒq ¤àá=Õ‡@Ï'Íãg˜ÕªÖ“äè‘!jêzXDM÷5'*¨ŽKºËªÈì…Zæ‘(, VP<¢jÐj§MKÛä¶[“ÿÒçšã :€}çãRù‰ç© (¼Áìé÷NŒåH$‰3+ä M0ÉçBÍðÕcʬ+ä\*áÏVVyª1u\÷Ñ ±¿Q‘ï¥i¨·=]»±¾Ìƒ&Orxï ØñÏLÞí°<Cs -xPèÃS__:ž®…4ÚèÉ-‹ˆ¤¹î½l³oªaïqÍb:©|.Û+ª·þrƒ…šÿØv­k\#,^ˆˆ7¥}è"¾ÚÝ3¸ž?¶ºø¼ì2‘JîÝ,®¾þ™Ûl–»9T?›nsè§.©8j³”È[ËX×Þp¶’!&¾N¦\Šþya\_«}—[T,Ë!\cHpëÿº?wûê?î_ý»³ï¿/—ðÿÞßËûŒöþßößÏÿGã“wô Ó_ñ~ýçñªvašÈ öÝüÄ™¯FïÿadGøÑlj{³èþ{Ÿöÿ‘ýÉÿÏýëWþ·õ¼’Dqð¯ƒ÷hÙéþ:~[c€dè'×íS_ô(0d u×õÖ¦ÿè?±ùâ‘þ®[Ʀ¨¶ P8)¹„-ÿ[õmÒ»MÜ_ø1}M× y[“ÒýêA:Ê÷Êu+nÍÌD o±îoäÝìˆÃé£ùÝßÿzüÔÿìþvÑÝßnݾO£Ów³] õs¨wkÿƒÉ—rTB@ Q‰V¨Rhе…Ì ‚cþ·ú…«Œ_.«½OÛÙBn„°„?Œ ïÿ¿ò=_Ýý5x°“ÌP$£tT2èý'®©ûî,kgÝ1&³öUW›,¯AÆ×¹¡ —›¡:Ëb·È÷6@7¸€Ú÷¹$Br"e-2Ú©"=¹ËòŸéèZÍÙ+™v·wg_¢c×Ýg›çJÏ£è>Õ@$æRþõMˆ 4 £?ü+í]Ï{o‚`²Ä`-¶šÑiHäåC9V,ó·üÝŸîlù¯«³déæòÈwS¤À‰uª&òƒ3ÎFÂ;Žï¶Æí()!)ЀŠ&ÊCž¶¦Ù¾Šý´IfõÇßé³q*1O²Ï Iÿáë{}½5p*z¼dÂ¥™d ‡¸¯ Ùãhßf’Ã9«QiÛýŸ?Ɖò9S·ÏNpxýä\}¥Ò@R€ ¾çõþz2 \¤¸Æ°‚¿av»o7A?¥ƒR¬î¹KVK0®À ö÷¬?ñX'ùÐÀ£ÂúÙÓü9™ìõ’RÈê`0¨BaÖr;Èݯ{"3?e}ú[G»ËUòú±q÷*&D^G1á¬ý<—‰Þ|ßàÔ¹”2”¹&£¡d¿§–Jg®9"¶ý?èžj¯›L>Âm¬žhŒ}í¹‘Ä¤¢x¹}åzÈóû—s2í JjwµiJÑ[j9ÑxÐñ† P \ ¸²-S¾—\ÿÇÐüÂÐ%ˆÕ~J¯ÙbÕÎDŒ´¶ž”=×ÃMgÙùXõÎ:fLtC ˜UNâ¥;Êís"$Íáw“óó¹µ4cËôŠ'ÔGàLÐ G^åÜ4[l~ÿÑ·Aç=9V:Èà­1xÂèuBŽ©mÐ0YîÐ? ÷?Õçë¨aÞî&­$¥‡[ 8Ñ–:ÈaÚÇíÐx¤ûNiá¾²fĸܼèà8Ú‡ŸÚ¹¨dÈŠ£îÛÙ®(Ó_¿ë¹·y?}îòez¸€*ûT  _©üæÖ~¦½,Âï´€&Bogº/X7¢È<§ÔzPaèUô)Y²ûÿ^Ï OÛ]&Cfâq•j>åqâ|Ÿãï^ïgnyÖ¹bäÙŒÐzËTC°œéàwºhM²ª—qÊÆŠ–N”ìkVËF²Òå‚ 2Q»ê­µöc·W¾«5élóðèt‚¥nî-æ/¡@f¾.¬Òøe ŒDADHy!¯DNDª— Õ¥2¯_šŠç:œâ:: r¥q(>~…ö{k`0!'Ýl~® t­GAÚC³T&%GyZ1·Ž¼4–—(ÜïFÿ(qy覢ÄË£HøÎ«V;µÝ¦$ßmù*'W“T]$«]ÂKÒ°wlç »iN½^:®¬£Ó‘ ßà¢Ó¬ÊÓµ³Ö;¿\;"fW»1E>µ`ÖÜ+Lº—PnRB+«T µâRd†&2ªôζMäKEŠT8Ù”©rb|þEI_§ÇsèÔ¢ð,˜`Ry®;Žçùs_vqp„£Ýjpæ-MUR†lôV­AAi§‡€ƒËh[} ‚¯—¾žŸ×žf=¤ÅËfë¾Ëá3²½U—wW?ÃáZ"b¦&A0 9m ‚0f B! ”#™¼—üøÏ–_®áåÿ“ïõþÿ…!Ž<ÎæïޝÒNvÓ­¶mppô¯Ôxõ(§Š¥aâA0 "•½o%a/-T Dz…а+,•*ÜìHQõ)ì4-ÏOH@A$!NG@â%Iõ|Ç}Ÿ_µ¸[ùF®¿ßñé—ÑñóÙñ§îO0ï×*|\ ÙŠKü­Û3l‰€A01þ†Üm5îžþòiµžÜ=ÙD5*Bú#tåÉ/òáFŽ}¹êwë8ðw ¬òéû|Ï¥6JªQ¡½BÚ¡™B{ÈEµl1¤‰ûF¢à 9ýDÕT‰"àî ;×yÚçÎ)áZ׈ä ŠPP¨8”I‘³Âb® ËÈ¿êcäþ6õ{{/ø¹àÈÿBQÏé3}aªXSZB.2S 4L0ê¦èþ×£‰ÇÇ¿|—è1^Uêùï§¡jjQʬ²$bb"a$P¦¢B&P?Gýÿ6ÕçJ„­xøí—;aÏ•ö ”@ÌÄÈs0R"„!†7¯„aCŸŒ^X¨„? æ³Hãt¹¯á „1ÁÂB%Ú¬¡Ìä›ûšìoì_â5ڀжIMóÈb¡«ÛòQøü¨ÉgßI’Nì2l1|ª¡wíýj:~×ëÛ“¥; ä5—éú°_Û8 k$nÊé Õ‚ùy˜®NšDF×?ÕžÔ9Õì505·LX¦ 1g=ÕêO§íîù ÁüQ°§ýϹ+ÕÌsíÑ¡!‚B•'óîŒéß¾U¢& "Ã(ê0®H“•.W©ÛÒ«BAƵiÐÚ]_íÇ„X[¿5q`Oꡯ×wÁâ?YãÔä "ˆâˆï`Qäósô9nScÑÃv¡#T=–ÍËeâ>›;€D ã˜D hšÜû܃iR)¤'aD ï_|ý?T7vúþ¿ÂÛ*´ùÿB«›4—•ϧü=ïØW[*RC»ýSEá2ˆÀ¹ 2›:J®H ˜!‘=®TJ §^>óúø;_â¶ÈÿB÷RÚOþï ³®ä±üÉÂÒºŸ $È ñ#_c3[ÌËÞ¡"B G™Û?Okè'ããÛän3©ÞÚ¢€ ð†{úºŸïÇj»9¿†ߎÍú 3ÒZtyÌœ$ùý^Ÿ×·áËžŽ]Ý!*Ò› ^Ø2ë‚À¸1DïUl“·czzLÖËï²\?‹¥~»^Í5Z²SsFŒ ¯Ks¶ï+búA BJxkÿóoo÷†ÊŽÛ¦5ìáÍÅ–ÔÂRÃa §ÜcÓÔß/Š„¤Ä)¢žÈö3ˆ+À¸üéú¼ÏÃÉ4šu BhäKrÚÂNY),gà”Õ瑆(F!#ñëY„ýßÇïßï뀚þ¬‰Ë§Á€Ä¦â¬¿¹{‹Š~<µb¿lÿlPRn;H öP6ûÅ@õHtNC@2‰§ùÃϳ_ZöíÎrÊÚVÅ®'7¹CÇ^”l­Ë_#g—*V£B¯ê«‚$@9ˆ¨§æSQ:íñr<¢Hï#î9”ô;ßc’œ(Ûp7ŸÔó:d‚²' ½eSR•'~„B‰7YªG `œ<´NÖ<ù}ŽÊzÎrSÁÒÞU.%Oeöï­k +ÌÚc"–òzªÉÚz¿.£ÅÒõ}6'©Û.ì†9³±IÝ<€M€È ‘ S˜„ë/þ^ß§èˆê(—J% »Ê Çó»ÂõQ YBô|@˘b‘?ݶ)±šÂÈÞÛë§M)RoaÌ©íÐ"o4Õqà.qš TZ¥Kõ?[1q÷wRœµ YÊíæl£‡_cMê9DkžÍgÈ ¿F…@jV6hM<7ȸ©d†f¢~–{Rè¯ä3æ}_Ço…û0=L>Gjwü|.pýÊ„Ðyòó`热Fªs÷ÝíäaéÞÞsôÏä Í”MVH üä ­‚øY‡!»b®V ¬ÙJúŽˆ"7øny›ÚiNñ툀–v&v€Õië'N1= ¨îƒ*R¾‘Šb =xS+×÷» ø+IQ[".]žÚðK®»Î:1ñÇnƒ&˜ˆˆ…1 ö¯rýú3T8`T³ð¾V”£>Ö¯ŽÚ˜`þ’žž”|©«óhÝñT>ŽLSË çO¤ùܘ}–ÜU„Å^NæÒmÂÖ 8ZŽÉó¼_èÖžïÏ­‰àÎè, xŸ£ÆáÝ ‹5ý5 H`yqÒeC_’­³C„–h 2ÓAº.l¡Ïö¢yP$3BoªÇ»V bŠmHQ̱ )`‡uǵTØsšB)#½‰ÄÂØ€%^œi-q0ˆSFÐÒÄö–±"ä£0ŒRÈFt_v$)´§œ9U¤tÕ#™F^åÁåaåTB•”½Ê–$>²Äã7Q1«ùHp˜„‚ébø‘°_<ÜÕi=Õ2+‘^ä½Q`Ñcûÿu!Þò@cÀ0 BÞrvªC 8@.ˆšDÀTBؘp˜a€n µ «mÏ5á¹€Di†c¸|"©5»;qoùÑ•;»Á\ëÇÎâšiA U‰@eÎ-„¶Ð4ˉ€„ÀÊ¢UÈ–0;È ç7yËT$Ìûö,±×…'qü>ŠÉ× H¥È©ÍU2R à€b¢Oƒ€NýC5L½Ó(w‹1+šj¯m‹õò¾þdŸðwôÞœlú»½?ü_¿j€œºõýßC>%â"åPˆš€(þBÝ?ƒ‚”ˆ(~b]»’lš0Òõïßõl ¨ÿoÕ÷.ûýšrúø¸l)¸¤Èñ)¼£ÊØÊtÁ‡1Aî0üJ³BQž)õÇaU“é|øŒ5ÉS\Ô¾ÃÿÒÄí~C·êŸáÓ·õËê¿o(GùJ8‘ùö¡¾âˆ•ó¨DÊqЧ½SEThª­l¢?ņ›ÊGtÝŸßIh}_vCÆ¥‘ó°·—e¬'âuV‹ƒÿ·ÿ1à FýBâ4&L¹ÒtdÊŠ @¤%?w{‘ü¿do‘0ݾùéÙþ§Pšª‹5ö®ïO¥®¯ß^Ëð=$ô”æ?A8ÿ#"sÙ((¼„ƒx믽¿6“h"PQ‡E<‹ ȹ.‹F§®›4T¿ˆ‡Ú>î6·]”Ù÷}~nª¡š b‰VJ Ëü†Djö3D ‘L¢œ-fûêTíö«}ß²ì£Yõ#ÙñíøÅî¨ñ*Â6C ™O˜ÿè¹Ó¥®[°Ãj"i8æ¹E|ÒÛÞD{)àéKD"®AÞîWd0ܾXÂ}*ÊÙ­ÊÖ“Œ B'@ÆÀ1&Júô{“^ìªØùÖ˜N¬Ÿy„;~•‰‡äOn¿<é.}wQ+®Ž:o'æú-“3¸Y¼Ï;6¶æµ %nöueTœlµ2žúu 'ªýŸ{—úÒÛp%²ˆ‰ñûp†a ¯Å8@e(Êñ œÛ¢ðÆ °‘lˆ mîe•ãÂE¯Hæ`mü–&Ìë˧ßd{b‡}]ÿŽ3&“Yœ²Ì”PdÊ·„ dÂDMæ}ÆN%®Â’}§gæ¿?9?Uèþn5¡®¾¦Q¥}Ȥ`_ºC³ˆ0ÂyW§·‡:›Â©“‰bUJ<&ŸMI\u)^õ4ËPt Š22<ŸÕ ¿xà ¥°Š¯ÊÖ èÉ”ûåFs‚!×^^Û˜&Ö¾åqÒ‚`È þ…CO{ò:^jO•ñ>°Q(=§¬z6hÑxÂO­ß”TT %„)Ôw”cÊßÂÉ»õÉ m­O¡M.kïkÍ„·OçÕ]¿"óIµóvxÚ/\Ê@¦/ÇB}Ð=žvbŸUZXŸ¼òÌV±r¶HÌ $ ˜'â,)‡Š_.–Ÿš3åçZ!›ßàÒÒkkç()™Ow¿îwHQ^VA2‰Yk–Yz÷Uø|"â>ÒÄ‹„‘TMcÙ'ìJ£ÛÛv³Úõ¿‰(7ꛕ©Te߯B†˜vWÊ €6pFB]îî1*@yPû–À¿(½Åî®§!»#UhZê¼áŽÞˆˆ±øKÈv ¤Ž.XGÆ{{·9 ö§ VO ®ú7›¶Žé rè¿—«ìë¡[¤ê™NHŠ3ðK¤ V ¯pê܃Àö²€ž\T>ïÉíj¥GÊõ$”AIDô˜¡Ý¯ñ„` }å8‘ñîPžmJƒiÝÔ»MÜT ÈÝõÙC†ý_%Œ“Ro×G‹ìý¶0NôxŒuë×sD…?"ug4Þ™& —Z“$ÉŽ5ð¦÷rʰø¾/¾GG†xlfòûÖ?É€$oÑup]=úìvÆÕ¿êï‚ÄÂȶï58y9oÑ‹©‡n< í¶û(ä÷ 㢀¼•GÆ`LèN¤m»/¦L“z¬„ˆ¤ŒiÔ¬ûë;¿|ôOQBä™Ðú{®ïÝgƒ¼2åÊÍ,óöwr¦®.Ò`T¶«¢]ªŒn‡TÜa°’J$0/BID>åyä­šÙhüRƒÕŒ2ÏôÁ«¬í¦Í7¸ŽgIè9¾pú¹ã³+Ý=úëŒ^óÅ‚§›ÉåwW~)xÏÚø`i]7u;b)·É¼â‚¥²›HÓKhèá¦p*, ¡!*Hšq¥D%(S¸ÇÔ÷4þ\gHêc’Ðs8 m2Òј#ë…SÉâ ¿¾ÈÁÇ©š™Ðåsàèú¶}ýÿ‰œÉX~ÚÝ7lic)9”òÛ©‚ͳR¾åN zƈø³±s9ÆyW7j¹–¸º˜óHŸ\&J,a/»O}¹—,4Ç×"¶\ÖK€™æôl¢D%xø8L1X@‚RH.س¸G{…)¢48A$£‹Šþ¿GÏôãôþÏ{¼Döáj¤§¡T@ô»ëyü›bVÔO9Ÿ’€Î&ˆY aLˆ2•¢"ÀüYR(B*ú¨Ù'0Œ;KÀt’Gwî¶-ºöiù‡Ið@Ôxw5}„;!3óy”\FpFM‚q«ºU«÷ÅÎq=˜¹Æ¾OtVͲMžŽnŸÓPáTlo×™]û<>V¨ûO¸y%¿ŒÐÍËya^˜²»µ›Ü²$2ùZ¦ò5=w>Ýû#õ-4;H-—f{†'Óùû~÷\¥ò¢”äêùËÓy1·4*R…$l’('E‘8@ÑHR”„¸&ƒPâf4þhÎ&ê.  ¶Q@Þ‡Î\]#u†xèÑn2 ¶¯’ˆþ44 v9dæg+´sO?m²·t¥LÚÕRQ‹x5ߘæþNßWÞ¾ŽwHž^Ìsuƒ,ȤÏv-“(”žþ}Z¢ÇÔÊÝ,Ì Š€¤g·çÞ~ë©nÁ‡é±¬ûöoø/*)ôáÓKµ©5CÕÇ[ y‹ž±dìʼðØ%‡ÊßI[”2¸ªß+cºÞ·¨HúñÃÅØ„ãDFKd7á¢`⩞S0Pœ×4¨Á¥B¼PTIË×òªj;} é;úf³¡Ëêk:ôßBN C¯‰ 4eHiäÈ… &&'× ¤DP2çfKÇÆ¯º#mk-užSI­ƒ'ºÑM6Ñf*T&‹í*I"ì©e “>èacÑk ´@D0*—QÒþ›ò¼ÝÎÍ6AÊ&íöŸVA˜^…Ô(¢pß?^ò_:ɹÚkKˆ)­ïh`Šõ ÌEd( Ÿ¼¬TK~:|>ãÿß÷ý³—åˆ| Å®±—«É¹Í+D'l•!É\NÝíï2;Š-žŸ«úSòð«ò‘áö­>ª4Ü´Ð'y Ê)Üz7áÜ]hGN”¢‚€P"Œä< „: ¨€ 4‚a‡R“®6±e­¶çŽ…¾/ušº¬Tjrî¨ó‚+#UJŒ¿kS´¢”h"ÑÞe°}6ý=ÔÅç§Ý‡^<ÞÏ¢ìO«LA¶Æ;¿9‚¦±±Tå·6ù²bl‰-ш­Az­n]ò§ìfŸ]×õü"íwò±—Ã¥·Œqš'aqçóïbu‘GS%dPæér¸”#î®.lxò?`@º+ÚIT Eo¥ye<M@‰«€‘Ì9S°!@ÇOž·0qȵr`9Ù\^QJ‡28¢%+ëÆÊsöù¹Æ“ò·÷¸k£Zßé6· óö @Ó X@6vÄ\™/Œ¨qR¯#ù&!­+ç³^§»EŸðGn'²-q¿Y8×EJ@}a”PÀ¹q ‹ˆ E>Q»É­ôG¥À[ιv/Ð¾ŽœZ浿8Þº‰Ýkÿ“¿~ºr옘4¦â5ÄG”E& O€ët~ï£àâîÝ >Ÿ†¯ß7%Ÿi¡¬ôÂ$1ÜÜíßÅþÙïþ?GË~8{mö”îâtQBõ>÷Áû™GÕì*ÉaÑ `…߯¥íH)Çâ¢É·ô?Zÿkèûœöl†PÎÏþ?Vοgky4àíNÖíÐ}Šo§ø½D£ÉDA& ‰ öïÃÅÛ×}ƒŽ§ÜJ2Z´ú¦å/òݧZû{±o3_r‹ÊSÜ@(ƒÅCÔ÷òûþuuÃr×÷øø{"â>Ót«ýÆËá/áÅ‘ŽÙÍ Rˆ((€ŽÚ@é?R”òþÓýÕŒãå /GT†¼&pOÂ$!4ÃbŸòŽ÷òôät¶† þãB&…@$(‰¬€Ÿ¢÷sõÙ°ý5/ddJß]¥ùÿs—Úù}ÿ¥Û_^(¦ß ú n ‚̘#L‚¬CwBŸê á7H –”j¡ï¶a°_ßzväxd½ÀIú*CeÌÞå —«{ÇTÃÁTyŠt”©Áb¼½LÖðFÅP ˜øøSœà`€ Ã6–”ÐiÒµÖ(ž>Ø4ÈV¿¥C¢§t  €ãO`™D1v飼˜£Î(e.ËU‘uZ{7 KûüT®þ}V¿š‘‹•ò"M©áQ\k”Ê«0P´‡›~-ž·0›ºH}ÒçFF²?y‘@¤ôž‚™!¾g°‘%ù""G¬ ¡•åÞÈ6ª;2QŸÖÿk%E,#*7Ïùu­ûN¾u‰^Ãî÷*#í!8?v9°ÙÚ´lÛ²®—Jõ%8ÖsÛd7NÚÝ‹ª̬G] ÷»ó~—¥]e™,Ðk« z•½Û·«Ÿ^žr;œ‰kqë™x±·’ƒg­¨êz%þâ¢â9I[;˜tf©‰ôIv”‡6‘Çbî5šÈØ@ §ˆ¿&)Tªo‰&¨)(¼¤Ý«›m*/ŠÒÁx8ê£[œvu¬/wڇ̚mÏ¿ &>‹C©@l,ÅQˆé­Bô­¥1#˜16A@Ó–Ú`”yxýký–pIËæ¨xøë° c¶¸8²V©ÔÌ7PfžMaX³»5%ªƒ³© ììÓ–­¶A;&Ô0Iv{¼êìýWDòÛžÛÃ'ŠõÐIå­0y÷9Ó!üê T@ ù0Ó…ŽšŒ4£êI©îÃ{lj`û{wÒø³ƒ9³Ù¶œjdù³›Ú¦»OcÍ'ƒ9¦¿J)”ç¤ɚþ^¿f×ņpáPúØÔÝš|Û*eSÁ!Õ‡6BÌ1lÁnú™fqNâz3†UßÏe°a·Ç+`;ÓO}ËgZ“6mL›&ˆbîÆŠù=Ø`O$ц¶˵Ším4´õ3nTja½&¶7øè–½T I‰ËܽeËá’¢8õÅHs&k«'<ŒÊ,ÕïİPSQ¸ø‘Ä©zêÇ‚°–C%§Lv]ÐýÆÃ¨ÚÅq)t)}ƽ±Ö«³Ëööñß4U–²P0]‘º®9Œò¸» `:Y@SW:·p»†ÆÂ0Š£­oÝ¿ª¾ZèÖiÔîÒ:+TJÏAÙ|e×ÓÐE0”³^qvÒ]õA…U(¤ Ð  èiÜ6°ÔîOæxÌÈŠv(lEÐø”ìÑz“×rE5•³‹GDàϱßn­Œ‰IE ꃮÙmp=j€gbïô¨ÐÈݱn#vþöOg¥]#àQ´P–\ jvæ-Zùx2]’§"; °Ñ…ðPÄ Šy{Å5áä©ï ¢ô88åØð2z„ ¼ÚB¡vuª%¨MýJ›wÝ{½¤~ŠÛiÀín¼hù™èŸ3Lšn©ð2p{$ÔݺB˜z”1¤€¼ëØg%@ C=Îè)¯©@Dʤ,Àóf8TéU6¤-CÊÔzß u þ˜ZE-â«âö¬ëm-cÌØ HŽ˜¨.Ü£2£œ®—Á}i>ö> âù§vK> ÁªI½ê„çãD» YâÀëm†üp†ç$ f·FýkâC‚ÌY-ók¶‰8;Zmø!¿®Üt¯ ·Ø=MË®’Ôwi¦;g°ØW;ògNõ½6<·j볚ë,€Äœö,ȳjÜyf¡ÄlîP1k¦ä ]El(wQ;£¥§šµ4 Ù[>Œ÷µÐ^üeC€ðP/<äq©uïPSq j)—jØNUú¬¾Uóþ‡8%}KU>,(Åh(½@°Ì€96Ú`P&´5$=—ÏË~H_àÓ‹6°ÁÙÁ‘ùªì.Àù]3f¦a¾¤=ɱ‡ÛÕðg~¦½_UìGå‘€·^M´ò)´¦qÝœž/2 Ÿ1D§I‘¿%m özŸ­\Ï —œÔm:ˆ‹ÞCêiD<Þ«r1JŠ_Rí^Ãc±É z¢÷‰Z¨=e83ì¡¢ä¦ ¦ªÙ°‹-¥ŒTPA>Ǩñ"²dzùw U¶çH ¹êóIhOø°ÙE¢}Hgðѱ‡Ð™ujrM¬ò·…Šê¢³;a†OAæe“ר».E€.(Ôiõlx…+ÌPcê§$A ƒ§jô¡¦ œ ¼Áʽß·&]Öµª×K—÷m´×ÊŽ)µ.útã`YÁ¯ÁP»ÐA3(+µR½¸0´¤M¶½°¥§Ù£ð4ð[¨ˆ=Rò`¥æö{[mrMÚ嘎»ìx´±DÜ YA¡ƒ8&âÖ׃¶DFЧS+™î 41…8³í^¶¦¶czß·ßæÛk´«˜Ô”…ÓÉ ËÊŒÓÅHP-jÚ™Ë]qå¥äÚ^–B’É3E2fÆCC[1`멃 r×mÌ90´ÑÍ Ì8¡v¥aF¨ùvVH¹¡f­µ‚]š'Ɔ¶Yœô·~xRdôw¤äé¬î–”/ÉíWHV:ˆAIÍfâ:ªT»1…y+î¾Q0'Sõ¬ßX"¹ÀqæÙ v( M«uLåXq€¡ûpP… ¨;évÃF*2ªöçÁ@žX0I¦÷'gÕ^)ä’˜t@å«N÷ h_eNL·ZïzÓÙôPY;3‚mª’û3ãå„Åö=ó¥&{hÉ6¤Ê§œŒ©k @¦È,acE*"A3!šÃCƒs[mT±Ê¤OjÉqdª·ïjꇌAÆK‰¼@BÝ–à>ss¤€ƒ§j…ˆetú4“Q",Ò—’ô`;”Mmæî×o·Ç]þ,ü4^²˜‘Ъ%M\ü™±êˆMñV u·&¨*/)} è-õ(vJãc ‰lN5Ë›7>7(¾ 6”ó­'\FÔ›‡ÖIæ€iƃì$²hÃÖÂpÁS2JEyCHAäçTð” @k5²7F‡•æöˆc¶7¡1áS°Âʲícî$æíéGÛήÉÁ53Í ¶Š:¥'fž•@uÝm6`‹±¡† ®J€ X¼„ÔDzìóàôá’î"œÔ\E$8ù=s¤B“Ã…Õ¢áÖ¢=Ý´´Ìˆô”JÏLâØV¦˜†sÕÔ\È77Xwäè¸QKÊ‚‚’‚ÂŒi0;HJ èTÄ I&ñéýë±o‰Œ}6BtøÉÕ¼Õ:ÿnŒùñÚš5ÏóùïJ3#g’!¾š¼Õ8 „ÄÀHLím;s›V½*[«½|ÔÈ¿¿öd¾~­ËÛu |4;A-…&çOëo›¼ô@ÃÄÃ<*&C„„ hLÅ X­ò¹Á‰(¢­›FœŽAÒ;#«ïê¸të%CL¾:6ÏÃŒ0) ”ÃÊ@0G¯¢0“½†…ýD6ô»¹/=äw)ïøŠcî jÁÃ÷‡DFÛ5Ä™†l@ˆ¹5q4ô‰)ó"SrtÕï—«)=KÆç4ŸÅQá 1$}Éò¥“‘¾«]¬Q0Å„8Arf(Q0)ˆh†\⢀¾ ´¤.À˜-#±Bn­€6#±~ÍÖg¡Ïß 6®‹¨ä>¨¬ôvëBþÄ0o nx•.oTö ²!¼â@ÈŽ£ûüj¾s€DI%’ŒúàÈ?¿¨óú‹ð»>«òöÄcgPõ‡§~?}ûuöVÖ/%T(;J&ïŸñ`r)DË™áœAQß?ägc'#¶ø p: ±V¯Çò¨¦MÖK‡¿LÕžÍÁ}ñ¹¥Þ@~„Ê–„ Q­÷Ý”Œl@˜ ¢Þo»”•³aAä H@ò¥ý/èï/EmUŠ"Ƶâm Ü· žà¦¹, ÊvÖ)ùe‰¤+: ­¥…[ØäÍåßRU•°û:Y @ÚIq×gضFø¼|”Ò@(šåéhàeœåTLhƒšŠV¹ »ô£†­\œð ß·îËŒóÓóe>l3^Žÿ•¸öóÏÇŠ ê<üÊУÔQiöÍý¥Ãë3ª h™n±8W(@n–0–¦sŒ«û#Š‘®]ŸòØÜiì¨7YBþƒ­TZ.iŠ“€™L¨8D0lè®ÜØaävŠæÕçË5ç“+Õì„õ}Œ>²oîýÿ§¥àWj×M¥²‚zŒ%X”Øõ¬ ~*€m¥9E§éP->ª?ǯÃM½ø7÷CáD{Ñ/t¶Ü"RjÐÓ1ÓÞ^Ëʥ߲˜£WNǧJtÕ„/ûè¢XryvCÛR¦dµAÔö@œM<@ Æ dòA`m)ù›Š9GÜe½P R”ñmU*v÷Ÿ£Éîmt`(P &uY\^šÚ¾CûÀ¬eQ:¥NðúaŽÊʲ¨Ë^jwNÂíÑV‘#UªÔÓ¢“˜ÉÒà«H*NV™Ëz&·¥e~jTZ9¨S{ÇåO‘Û¨ûºjª•€Ñä´2 ‘÷• ý ÄêÞ ) 竳«òá‘ò\¢' ßéÔÞÓQ9ªhTŽã2£ÀMé²=îˆuH–gHbb¶S¶ »uë’ør…"·3òRØP­Šk=y/g,û(°îzÎà¢`F dãd×ó¾ò‰2M»?kÙ&¥ãó¾{´þÊ/ôAÇkó)YGù׬§´äF£D¦ ±ð`’Š€$P~‡WU^Ͷy"6)ýU<å$j¾† v‘€!=ý æìw@á¾^#¸ÕcnŒ[C“âÉÖdfP@„#y *C†€˜ ×‡º e°,‰ô·TÓ´Ú(çRŸ^07X™Ÿ¯çª™hÒÌݹ W$û\£îdä^Cää@c"‰”÷jcË)°îÖܬ4óÐ?Hµ„od“HØîƒ4ŒB¨ü±YXÖ„Ã ˜ÍP (š©ì0#Äø’E=*‘)">ƒÕ×S%dD©À 4?ëp‘yH˜bæð|x÷A1× RD ™ô¨÷šµ"6(ºh*Vrl‹ñTü sPJ SoS5Ec[ª>…K‰('E}°GdÁ=Nì:éQaý+iJbö6cì^ŸÁPaj©íÖ²>’Eïñ½Þr÷8ªQ-͹YÓT‹rÅu–¯Ž–2.åûŠ%Êò[®=èÇ:>*VڞĶê'mÔ.O4Ïô1²˜—Ø¢ªÕkpa‹¢É‰¨‹J\D "‘s×'*mâ¢Õ:<Ëd•)>Ò¹BWΠVõØE~…ë!6뱀Ȍ2^ä¥m0w<Õä Š!î禺`.r€ã«Hpo¾öÑ&/½“ÔþbN‚mÂÙùW»××0Í «¹^Ëå!I%† ÂâŒK›ÏåxJåSSòØ÷%$"¨"ÖS3Àí •«™‡lUAMijÇ)FljUnÇ DöV\(úYðj£šº9%ðå`¦§fÚš:éTlñݽâV¨ý­Öà˜)!,¹.a2¼Œ‰Ún!@IJ]åu(²»˜#ï‚ %> Ksy+ï¥Ô9B$™R®½È\äàvSÕ;^êÕˆJhïá»[ßJr§k}eU ¢NÆ2^ŠW™ê .…Xz•¨ìáT«޶kí÷{ßšRdÙ³7aÏmÍ0©=iêCeKÑɬpÈÙ̶=y¸ZļޒNj|ªœ¯­½\q`%»W™É™Gò©¾µT£h›! \©¥!Mꑌ!Ë‹áË\Ù¢œ[´ê)ÄȱÔo£.já˜ò·7 ‚¨£êÂË =‹¨Ðê¡g•†\¼VepUù]ƒñZ T/”‘ÄÎ Q´ðõªi,Ø Ç‡]úœƒY‡Ñºî)„¸Ñ4²LuœËR¡¹*cRò5X¢TIà¹AïWn˜mOß×EÞ(qv}bÖ‡“>/‡‰ºçnß’ö`ƒÀ¡r© VUmre®†×³ƒ2 ¡ÙÛk‘#ô, Œ:dä¢ZB ¹[µ^D·ê–oª‚x"‡eOk:3ÓLìfÓLõ°ïʧ¢|µÛG™¦ûfm~¸žþç§1N&¢(£^q´ÖqZ{…¾:s×» âÃb LÛ·IöYš jdž^ìØñɆªŸ­ÛHJ6B9§qßBFj_­®µa¶öJÄn[hvy¨ÔSѺ 0 š3u/øÒø ºõNkðn$Jå¢ô-SP ”‘A báªþ kÛ¡q7 —\©+xµÄXE#tX‚ G9RÍÖl›9<º›"ˆïãú¬“Hs^£á´±¥ØEÁÙ(~Úíºd êôŽY»@@Ì‹J{OQ´ŠçÀ#8o\1Xß]¬ê”ɪbr0ø¨¼=7Zþì*§ØüHOÉ'mÎìw¸ñ-ë›o=ww±-Ň1š°ííñò×’Ž=ŠaÝ~˜AaƈA)Ú©©W<Õ-ýmäÎúÊ_ÃÙÃnýmA ¢Šžµ™I”Ù‚ô3ÞVSæË%ô}jœyåÁØ”Þ˶ž%OåNŸ•G´Õ­ÛC†Ò3 sŸLÖªh·GŽ$Ë2šÈ¶Á¹’oe}ïªÏžq×N0râFÜ‚7eºí01³Š¥ëèÍÉ2"Fƒ#Ð@ï5£‡ Àû=¥RE+¿½’dUÔº2Œ eÅ EÖ¨ùAzõU ᲎sr_ÝEÍ v×SµpŠY»Áµ9SQÖʘ¹•µëìÍéA^L'%GŸ1ônŒÂ9ר£ÊF¿•g(!Hn†ƒ­ê‚œ[YÞ¸Rgç^xq+Ò´”¯w9-(=7E­æ…å>B7Dq2åÛ±\dc]Õx¾5zÔDZàv2‘=ç«Y)"ÈCS°<:)x Ýb¢vF‘aªjã¼Ñ%º+I áR÷¾ÿbs!1<0Ìë`ãÅ_©Û='¹«‚ŽÍº®u> D :–÷#ö¼O˜kv§6.ƺ&á‘KüëYNmÊ6MG f%ëxb1*Râ/)õÃT€‘‰îU¡U3%~ uQe{>»$#òTŽåÌ„Ô*LÚBDÐ@‰‰OY¸¤r†ÍÎæŸm.MUÃjÀ»¤ä~µ§d”‡ët©»íÔ:Z i¶¦ÆjöTú¸Ž<›bâ5²gÑl°‹¥H¥iP¤sÅ}D#³{‘Å*ó¯/â6jQêw;‚É_ ©iA¨{=XQƒ9cD§{v²dßÖµ>ÝôlΡôúTØÓê×F))‡”]6<çÜÂäyÑ“­éwR-GÈ“Svkf(krw²þÞ8\ùÒO›¬w¨ÑÏ6{;¥S $ƒ{¯Ãâüœ÷\¸>,÷!©ê‡wbé\RAVT|lòfLö°²S£'5º%mªw°ØšÒ0õ¤”¬—½) L–I¹† ,:&Yð||è0pd§ØÍV ôƒƒˆfÉ’]Œ“&¶kr~F)0d907$ÞöéDàÊO²ÙŸ; ý)vs¨ngÝ@ÔÉm*“k% Ö€à­áéS¯Z§£{Þ¼(—`S[:¼‚Cð³Þ¯Urg—ÃÆ÷Ë•kLô5Øèæ’Èb’˜`ŽwÓ*÷»})Ù“4èlI9ú¾ÆË›÷m°s×ÚÚ™6ávÁ¶ýò¸žêýmõ RÆOjE¡I½›™¹Á¯]lßF 0ª/òr¾˜hÂÎL“fÊï•6ÖuRY hY>§ V¦(x YfÏÌÕQlŸ{goÆrMŒž8t·}¬sH^B7æÃÈUTVS\VÏ YÞ¹:‰aÈþ›”1D“¾ê:ó£/eôRÉË®œU SÍ“z‚ò‚¨¨u‘‘ýŠ"Gª÷¸T}e3#¨î8b´Õ£ê_îÉÓ©…n °óâ©%TÖX=y‘aÌ£Ê"XR™mTÊ»Ê>ûÞé§ ùÝ¡¶2£sö2UMµ¯ Â/;kQòçÚɬ{Tâ·:ò.¯“<¢ª ô–¡bG…*ÊQ¨àÇÔ_Zéà»±¡€.[J H/Q"ƒ¶t3Êl½eºª|¼ŸÏ¤ÐãE3¨ºê3s­]sî†ê¨ëÑvaÁ·2¡.+ŠÌñúû§òœwÔû‰eSS…ntâ<Ô;­\œˆÐJ‘›A{˜`v?ãÐæ#†ÅMgšˆòs̈̂ešˆ`<†"ßàö7¿E~õqeª15Æ5:˜_á ‘2µGÀõŸN¸(29‘ 405jØ-r‘Ó.¿Jûô©‚ê8ÍÈâÈPºêTR›=NŒ‘J hF†Qð![FKmZЊˆØêY´E†fF—Å’ÂÚ;(ºˆßš˜*TPd]‚ÜD™˜wÓË­|V“›Ý4rCó}ØòÒÕ5ã82Û¨  u*Ÿ)ÖÁ¾•±ÙíŠè„G—RŠoÅÂU«êP0< xdÛJ¤M…ôéO‰Øõ&¬ªj•N’Î#6k.6IݺÆUP°‘8J*ƒ•ß{)Ú©”taï­z|ê™â¨=FŠé­×~·_ZÀŠ% nrXÚïn®jc® ‰]òsÎt-:×,•=œȩÓe"e¿z»Ÿ#°ì)Iq™È™4âüù_Æ)¨ÙÍcyÙ±@ÄêU¾KòÝÇ †ÂE¸¨ØSÄŽ›9Eè¡~€…¯‹˜AãEãÎüôåG;Ò~ŸVzj‚E4¤âñÏß×U¼šöäòãÚµœã~ç^XïÍT¬5¯ÉïˆÔ¸/'›¦‘E˜òÉÞSÐn=¹1 ÕÉ`_¾êÜòé½Û:xq¡æ*îX±Ú£¼ˆ;[)EeSQ‘O!ÈMŽz·¹fa}tB .˜!mU ²b‡“$ÅÉ­T@ÕÚ½ƒw6JaO: 88'»ßGàIÃÔÀ Ýót `¨ÜØÚïYfyãžL,ÕDñ•ݲŽÞÆzË5 bw™×pa¥‹«^ñM›!ÈSÆnÚIêÙ»±Âã­áeµ@j)m¶(Æõ;Ê8Š %E;`»Ø<æ×`EØRØ/ { Î…)5'Øçc…|˜$£s”vâ×,\-(+òêO‹b , eÚ÷F:£¬ŽÒÕÐTA¤uŠÓµ»W]¹9éAšåÎîlÕå~†ë1»}ýº§4ÛJÐ@}¹nrUEíd³a">’9fØ}Ûy£kê\qÅDAï){Ä™ˆÌë9‘36Â=ì³~N‡ó{îõÇ>*™XºˆdTÞ|”(f,Ž»àíg³›…ŽI*¥›UÝù/s‡X>STaD „™ΰð©vJöQ½4¹ø­±›U 4£4Õ[¾<»mÃ"|›$.“[!Á“'®Yø“3¬Y›2ðmbw¬m ÒhèÂ{Òng›2ã]RhȧȖO Ôt`rC†öƒÆ¡:!Áø¼hÔûtL ZÊ"EÆ’‰ØQ0»c›á»Òë 3Î@ÜSáhSm,N²¬ w* xª%YA4§‹£Õ6¼€-AÃ¥uTÇâ£ÃwªØ‰µí‹ça4¥— taÅ&h#,W2<µg¾Ç€k+Q3PMÖ°ŠËE‘@–”‡Ò¢òÒ¬EMgð®ê\.1#¤¥¤sŠ˜½ô¹-# *;à´‘Y yÉÝ_axŽÀÐþo<"oÁh8b»;H§iQȇÞFY_Ç|)‚ñ~Äd~j-³zó½ft.Ð&¢£–›ŠYsÊ·J‡Û……Âòf3^j "EE‰(#³¯RˆØQŠ ½úüÎBœ­[îQu4¶¢$ e#®¦6]'RG”ŠH„ÑšeÚž÷s¹Ri§R¤ˆk8¶V®¶Î÷19¨”ÔdlÝBÂOÅ•v­d ½ÊØ”8»¤È1Ї‚/7ŸiKª ‘'Ô¸ö‘Îf`f¼uR÷ž:éípæ"Ú»LVL‰äúÅu\¼ ëQfj\œHàq#Ò´@H䢙qw¡å£ƒú5~ßBÀõ‘ Þ]c”ÞÛ1ü~ëѬcB"Í!LxL J@n2@j˜•Ô¡›z³ÈRUP)@§ö©èó}Þm:ø×Ó»ž^¬ðÇê÷ûþϦ= S¯ÓAÝ–ª“ä†^úÍŒODÍ‚"j>[…­Kë7}7BŽË†Û£&9Ÿ°ƒ0B XÉHK)‘#@ˆ•¡P)BP4ŠÈ¸ qA¼öGŸ—ÑúþæúÊþÔ¢®Çõa…úbêt ðÊÚ«æ /Òµ‰R„”Ýo»óú>b߇§2ÃÙdqùÎíuá'9Ц%62Š\—Ûz±DGh¥Uœ}51‹@KE?®½•ÿ¸ÕZÇ1%ÁCÜ«ÞÁT*¤ŠÏ.Á¾üã)öL8{ù{¼=ôQôuÐËÛØ7\7“~ÝÆç¯U2Ýñ®, X\Sö”Aù i6ŽÒ‡¥èç,hïÆm©}TåWFÔñsÛ=Ý/†d;LZ/Ì]Ý ?®q tö-žUÚ|“Q¥ ¢GzòÛé§oM<öz;ý–wýÚ¶Cög¨JS–ÏbhÑ8 €"†ý¢›Œx¨óh¥4GÚªÓ–ÑPixžxÅÆ+˜Y4I#õ~µA@¤Q¹PÅ<¹/Ùí“K•höz}Æxæ¹T}²9úNŠ,ûfHâdØ ˜3)Ã1 Æßߎ…ú¿G†´ëùXÈ%%õŽÈ†ýƒÌœ( «1jã‡ÓH¢a†B%|…„ 0<W¿U¿ã-êŒ~M›÷úÖââñ‚?oD×á* à ,±#œàh€ é÷ϪK­Y(èßž¿D p[lCÁ@®JŽD’_À€<ôk`åùñü»8Uõîß캻öµMLlÒ쪡‚`އîÂdÂŽq€yx8JdBaDL¸!„@#(˜ €e±pLX]ö¥í}.¨²VÏ£ÇÁãV4ûÖü¾§~ûO¨èFÿ†ÇwÿdèÈŽ1(ŠQóYýW–ѸxTx‡f"I‚*Öõ[ë=ÕCpÞl€Ê¾KXmiÜá¼Ù”ê&J5@@,<%áÃÓtlýüãUvýÿúç=~›£ÍÇø}xê‡eKøGÙìüµûxP)Ay˜dÚ0ÖBp(ÎM 0$ ÑdÏú.]ßêu¬Lƒ¦çBŒ›°sçÕúF£ßÇï›Þ/á­ßÇ ;ºuÐB;­z|UüI(BŠCb t‘¿¶­¡þ¾ú•‡ùJ¢ÜÁ-Sÿ7JÄ ëBû†}÷IIÓ¢ÙÐ׉DÀÇHú6© ÄÁ0Î:Ž(þ[®D2ÃûOûb¥øÏ[ æ·¥~¿÷ÎãeÍp é(³_‘E 1qN$&D°[/™³A&.‡…1Æ îî4ñœv:‘wi oø¸3¥_Ýÿ„®–<æÛ£ìóÂ?:x* ë ªÕ˵l I@õl`9û¬]ÿAÎâ ªßS™“ñø”)úã¡ØS]aï/&WÝ5ËAJÄØRÒ²Ÿ;{˜sô,,PªP‚_*'×رÄ_©l& qϲG²Ï™“Ë1&¦î|òÞd-Áü>УO¶³¥ŸêÎl}Ò鳟~ÏÃgÉ$ø$ 5(eTå«Ç¥ã  b& ˜@†¬UÖ$$ᦴgžy@9 U; ߇Öï «ï7cªŠ«*fQ¢.·@ö$ B¬5®ÚZXQÖÉ?–%e†—´‰€ã„0‚I÷ŸÓgÈÝš’RS¿sm¤ þúÀõ×àëXýú´c ÁwÑ×yaom¸Ô¨¯ø–S¼»ËÙZ¸”Z)6j oþV<çÐ3JÐÓ^Ì -ýJ‰ÿȼ<â7€8&ùèSïaúoäûþ'êá'&Ÿeüª?9,Ë!Ñ¡0¬I_6So2Ï~®þ‚Äš-“=8lhþ=¥¥+õÎýÞÓiÍêU¹=­ZT8¬ßGŽ_éjáâþ¥jÃTY Õ¯?ÂëìÙÚ¾%^Š^`=„D©KV ¤“’±î |<ª•”Nxq`‘ ~UØUÅÜDoW]éÉÙ’ —M0á!ýK‹Û‘%îóJ‹èÃk—äøjÌž¾Ac[Z&\ÁM¤(w,Ó Þ–9úõ_n^¡·4ÿA«Ì>VQêÓkvyé(?; $ÈÞQø/¨ÒHª¾5 að §äBYù°¬¯¾ózÙn ÎõOàTÀ âb7˜Ô²+Ë„ücÉž­~S]B’ÚÓÊÿjÀ£ë œ ®© V~/ñ½pŒ«Ê.ç&n,kßoôCÞ·Qæ+ˆD JCmˆ¨ï>òë ^WÝìaöWìÔ÷ §Ï©´íz8޳Äýš|[ÏÍÎÃ3ÇÒ¹MCÊ <BÍ«ÒË»ÙѨz1ŒÇ8qú]õÞrÒ<ŽÁc«¹ ¨€¹FÜžkÆÕÕdy"läÇâ…K\·rO„\uÏ#‚€v'‚@„·–U«´û Ç‚ªæÄÒÂçð¡¾îíÞŒôÛ·¯£8"fóÒÖ‘í É}‚Ë2°P˜ë"'ÉŸ#µüú ˆdR|½ ý2¬‡Eº¯%x¸mï¯ Þ¢ê,5㟄€âþ3¤5p„ P,Q×ÉFÍ¡F“0ÏÌw>ÐGe×–/†Ø5nÈøbÛN¢‚{jkà°Ê,‚¹­æ{ S~_¯C¤uƒ Tq%»3¥É Ø¡bª‚×)3­`^zMA¹MD¯ÉY*¢ž¬mΘÓvT'Sîìß;eÍ4Z͇y'hÔ—bâœä>…ð©©)³¹¸ù,X‹Á[÷¯“† Ìt>âšy}=ñ§•Iíj/Ê,å%ůnmNåæïñdçÞnm‹ÌÂò1×±EÚlaT”f©¦*&]flŒ@°m7õ¸3¶ÈÆË—Ûeì·Ùkœq¬w´O]RÙ[Oqó­[ë¡y¾ ƒâÑÙ}°²ðò.E-f9jÕr»+ :­U?²§½¶ôº2\Î7-Ç­ç>¾†§\[fÞ?.O¸ž…`xås \Ží=jöÈ ã£%dXWÇ“wžÖ\¹Ô{·Ïc“-Üo¡óè]‡• +óvÃËÑln­y.Ñ€r^B‘w%RÛžÓ9Vs ©”ÕZò]¨Žƒæ;„ÖìñkÍ÷ªS~ÌyµDbeF˜¹å¥#á[ÃÓ®†ÇS`±ò>T÷˜Möá˜Ç”¶ç¤Fêè©ÌRÂ^«™ç&\û5É äJtµ™T}Çi*)€ªº¼üï&ÃyöSDœn…Z-$D³Z 8¯“}ežÓ\Ô:£‰æ4æöq à©yyØjÆ,Û )ŸŒ¿:„C†sSbŒ.Q¨Žo2åæàÙT¶ (aÏšŽ[|ìÅ(Åpµ@Ù­FÎogExŽÞßu2|Åú^×Nè¶­ýn£<™–¥E… GÞaëQé®ö±ð1Ùk¾c³ÍѦ훊Ê绞ºµa˜Û)UC\@¸ò"û×-ªƒ¨âj×MÃq{3Ñ O;T,•…C •[­ œRÌøÿN¨\rÝ@§ÎÌÒ,Û«Ûí¡T}ë{qÇS¨ÉÂ…æsñ)–ÍÃw òû®Ÿ.½õÑØ$¸÷ªñµRíØÏP¼ˆ¦f¢¹*+­QŠH ‘~Šâ7Ï„_Ù-ØQðä·õuërê­¶‘)(݇-z?£~˜K…›$Ëd[Ï»FÙQ´ñ– ÒSg{Ûשâ®%rñ7«Võßdô/®Ú-a! LÄV"¾{kÖîÒŒTÄÞL¶Rçm­qÉP@§\{˜)‰:J•ºnÑðš†ÉG"18ŒJ54ÞðK´±»È¹êw­õúUÙ”ç³Þû™æéÉ,/޽-}gŸ¿§Iv@}•jdç0 Gáºçfp2߃n(²Ñ€Î­Lº”oñðô¸JœípËßë…ö‚™íç«¡Óƒ¼íÍÆˆmèZdÛ“]÷ùwDž2T§ŽS±ÀD…?.åá©F'™·6Ó¡@â·\ì(ësE×muO¿5Nv~Æ+Þì"ªTרÝÞÈ :(úa¬Àézöu¨ËTØhSòÆpàñîÒÉÀq3rˆ‰®úßùºó94êÇÁĸï*IvƱ­Ô =-!ÁGCú_§¥½$xJþàî£nkfœö+’ê¼Í™‹‘G.µ‹émå)<›»_¹î0öñd†¥ÂÅ´ø&X²™sæ#2ÐP”ñ}Z8 º*%_·ÂúŒá¯zŽÃÚE‡=_!ئ¾S—…ú9/æÜÃ|=Â2å4 •^J9Eî·7w:§I#ÐbPWܩħØoõC„,Æ<Ê¡~Ú£µ÷š m×¶.®Õ:9*Q­\×û\‚=ÛÄŠAåj„î0µm¼.»ÉaâFÛVèÆŽ˜'¢ÅæÉDˆø‘>õÂßZÅû¡Ø64(êÚ¶è  }|ø9̶“’ÞF¢+CxÛnS3á2²©Ã"0Vœ¬·§7 ˆÂ1ß4*´@uq«f·UÃh‰ö?ÉåÝêú<üÏZ±7ÎGqãߣܥ€1)¾Åß޺棥ëé#€=<í¼î"ƒ¨‹MÇy/T·-Í™ßÖÁ5iõ{Õ¤µ<.P=§Õ°_­^”nrX$¿"3æNHP檟4áä _­ÝÄ$JsÁG¿Ð¼ˆÜRÑ"' P†¡DL‘.µêÌœ M*«Ó®wj„b Ô7K>¾ïŤãH%: ¶*Éb 2Š|ªãª0ƒº+ò·_tŸECÚ¡ý=}Ûí…§2’⢾vsVDc`sšKÈÞ@¦J‰R¯¨ý—¨ôù-`=†Òš±T¨¸¢TlÁC¨Qƒè•ì/ù®ì³`'V¦Þ|Ù¨eàáI q @R*ƶ×Zfä‹lê`Î.ˆdÃ&9PhÓ½žì‡¯óí5wõjç„æ›Yv’Y)/jà÷å@mvaR}ï+[Z¾ÿmì`ëM>z'‹d˜$÷&mŸ×Cûö`è9—­RÍê%bàoU¬ ‰£¡B²ú™cÚÏ$-Ž«L˜0òÏÛ¾ú2ÿv¡½%ºW&lãP_c šèÁ† õ;d·¯ìX?QÐdõ'šotðIäîAñ©¥q±1a:³R`ÏާßD"=¼„ š ¤ò!~¥ÖP+jµÖôȧÓ»jž »Î¨Ôf×n~ëÜ…™/TÔ›0üÊ+W^‚h²Y3d» H0Ô—’ ¼Ä¡ RÕ³¦¡ùI€Ij]V(N“ÖDÊZ@ç>ízí4±>rjf^4G迸î l΃άÁgÚöJÕ³ÂÓ{%ØVªIŸÃPä›õW›íaÅ<Øbñ³=µ%o¨Y'©;!>&*£'wO €sMW¢“S;¡“Õ˜0ÏuNI±8rñÔ_Á8 jfvßi· %ÓZø×ã}ܪ¥0Õ•è³Ö’¶Ô5!­.ËZµô’Ó飔ú?+ýÄ<Íø¿¡ž ‚Qz6Þ—>Õ ÃmG/ Hȇ”HJH°ö $Ãò)en{¯çãl~®s¹36à·v-'Q§+z7ùzÚ·oÖíj¡5a⻩Qös.{9tÀòçdàFó°è@ÚRȨÜPN•°†  …1 ¬0±bV6Jsí¨«KdÁMLi°KH&¾µ|lº£ù`¨HϳŠßÇ b+Ëo½‡¨*Uå^b::þ^­Ü ƒª¥LèŠEÞqú *C‚WUz‚›8Ÿep¯zBT!Ž‹.H[Ê«R£JGšSfødMõƒE“ -à™;1 .Õ*‰Ð`Úé3ÐËñÔ¼äs°ŠN¿{v9ei¾…ÁRѦqR4ز"?æwB‰sÍί~Ë\ƒ¡•×* ‰®Á€°†eŒT)(+p÷»Újò¯Y»6oA™<x‘{kä×ýV0$‰"Œ—Ë¢±„jÕkûkà«hbªÌFRóípךØ\r1‘ZŽ£eY]7ù©ëˆüé‚vÎE„Y-ì¶<%Ýõáªëóv»i›EW*m3â£y¬¦âé…nA:4á}°‰}V°Oøg]96AQiªë¡( gñçpAbS¬ñ›áRåL}›œÂˆÔ|¥^‚<Þz n\ ·Rßw¯¾AùóRÁÇq!JT{•bB¹DÈìÞ©³}Òv_l¶B$qV)19Ô©ÆkØÐ¾§ŠøgTPFÚG£oåãðçè×OÑÛ©hœA£ÒŸ*±}õÜØ%•ù˜Ñ_Ö´­Ý[¤\ʼn^±ªÎh9ì‰ô€øŸ?lVÊV×ÏW¢tŸ²ß—Ýæªƒw<*ô~£ð=óèè¿¥ß"ÞW&û!û]í(ÈH @·KÔÙV’ù¤èD*R<£œöùxoL-›Jå¨èÅaYÛ÷‡àäžþ6Ž'ƒåòí݈• È´!£/^5rì5Õ˜[>…*É¡’$JЏ‰ =e1{Mº_>Ÿ¥dL s–à¢L>ï«Ü<¾¡( s*„æ"¢ƒ ÇGà|œzJvÎcûumj2ê‰-£?_éZŒ÷+¥­©›#­ZÍÔO„&LââÙ¯7("<Ê”„N¹ÏµÞm[,âÏJ/ðnû]ÖÚåÃòÂß²•ª^‰;ÑÓøÂH6”Lˆ8ZÓeDòŒF<°b'=]ü£]$ŒNdЬx½à •Ò³“ÞÃÊM…j¶ÓvÌØáü¾&•«ÝëÇÛ¿×ûvqŽ!ϽyŠÊT6²<ÿB çïX'°Áþ§ab,š R²š¼Žjkîó^ÿ]_·{^G7WÏø}ߟwÚÞýSÝ…ÖC+W\¹aùИE¿Î¼¬[Ò½L¥£‰%Ç=ßHÑô/ÇÇåÄw;Æýî }`ªªUBÞ¡‘{{U=D&8û(ÉÅ„Å>†B|͘}§íŸ_¶ßwíe¬‚,D'ê‚ZM“ÚQbÇx?TîdŸIxO~* h`VH«ÈýÅ–='ý3ÛŠ—Y™áèÃzó£ÀL0Å &&&‘Š`TPqNÚ¨pnQãóîkJ@ãç±¹Y¹Ýû¡EK×oß÷¹Þ¾zjúGñK«sÛnØ›K¹háã絿nû4×W–±ƒO}ÝLãÉh¼‰˜¯Ì„òš¢±è>ÿØàè¥Öª b7ðUðúZ¨9˜»™<À¯ŒªµpÛ£_ =©Ð`~ 0HwÅ¿‹ÕFöäðuçÛær]Àð]|Yo‹)bê-xÎPÃã€Ô€L\D@0e!OÚBßúZà\•;£¯±ÿ·¦ÿÆ€4Û;GWí‚j5Mc¿H¢/šW3 ¶xNMõÊÙ¢B"Ìp|Î&‚´þùNmá>³^¢ 9Úó–> ­u Ç_F6¹Í¾`ºÿÖ·Yºø9 °ÚÊKæ“ÇÒ®Ÿš†ÔÁ.ïþEBÍÇÜ!7eP·ŠÐÝÿG^¼iÊp¦¦º0wY°âS")(5Ü͇³êý Ôé}=HeZé§å’«ÑŒ\àcÑ2#¨q⣔GA)È`5 ³áô7+ïÙÊâÚñáÂK6ø„Úœ~5mÇ\)u˜ð3)Ä0™ˆ`0&B"`?6ÞÁPƒIAÏbí?IA1áóÝT)­jý{:ðóÄq#® V‚aSÒXÓ3@ P‚ex2ð>¯>ùØÈ~$ݸÖÏÎ÷Í´/uMwINv¸²a¢"cz±YGÓ®ç§zœx—óaãa&޼‰tö"={žxIñ宪&6&¢@i’ñ2Ni€È‚MfD‘2–Ãë>É„éhÿ†Œ§ÆÜ·p\yE߭̉®ÕHè6²¾¬Y%ö¼÷L¨ëøþße|z(Aih($<渘»¤×K¥*ž2<›‹û˜Ô'FÜ+‡ˆxuÆxÅ ðb»-[qQ‹b/×_v"¾Ö [mY¨™Vy¨D ˆLvÈCÒj QkÚ>?j øÚ½d1)§WOo5p@ VÕŸiý£Êñ·èÃ[ªË°0°¥†Uh{ØPj²·XñBîÒèë{•M^¯'%'wéE‡¿è¿¥Âš½Ü÷±®ëng>ŽËœzvª`tëµÎ€¦ŸBÛOß`¡eYQC ™nž×7 QtÛêiÉžñYÔD‹KÅVéu:%9H,ÚpWB‡á&Nà°½ÂÏ»L·ÆuˉJ,•z["òÍuiŒ†Ê°\'8^åžÂöF†íóv+õT²3*UkÓô¾a$Q Š@¡ë¶§0¾k~*9]ÊÛWó˜Qì#ÛÖ-j::¾Çp›ð4§8®ÆÚ÷K™FÎÕ8¨ëU<çŸP¯xütP1ê—NÂ1õ/ÈõœHç‹z»W glÔ1¸ÌÊÁO öB‚ñ•y½@¬úkXA@ê½móø5ÇÊa«®™>n ßù®'×K0ý=”|?.»™½‡Ù`|yQe†¨¦‚ëz®\Ët½{#Ün/檷zˆÚSŸK<%X爦•©ÞG»ºŒ®sù^'ÏÏÑCîp_‡Ü2ȱ³‡©3‹ß}þ3ÊÚäêqÍ“®g¢aîx‹óN–õ‹…L áR9Ï‚D5ÁL๙ä´ÌUÈú u3\Å3e¸R̤am­™TH9‡½ Q˜I 6È‘…,**Ç}bá®fýPœfD#6fD~[«º•Y F2#1\Å2%A´÷·MIf͘(K0ÜÀ²,²Ë¢Û¶¯¹Œ=žu7”$Zr¾Á`^]_’Þé„Ú݃Èà€f@Ì @ÇFàÜÍø,7ÖxúÛÊ»!,ͬ¢)Ü Š!(ŒQÍY•IßõõëreÓèæ›ÐR¹mwŒå—9ñÌš·÷ç}¾ªÍxn°¤}v´àÉz¨Jd‡Ò¤h ' Š"\:ºÇÊŸù>š§¶Ÿ¡LpR)­6Iªô–Ô‰Y†¾y ´iX5‰lmÀÖì¹Ò(ºö¼˜3h“ŒÅÎùM¯6¹T¯’Ú2±0·à=L†ù^ä[PÇmwÂdØ ŽŒQjgF"¸ÏÄñ¯+m`åÀI]ˆæ)Lš¡oÔE´]°„;ÂpôóZݺå¦ pqª’ÕBœ‰ÛŒê‰*®ãÀ·Ûµñl®¼ç$”¢ ‹,0Î=ô¡R&Ò‘D@N£f\çPUPráɕ÷ÀBÖ_Œl_ž'éÈR ÄÓ ªbäxäÜIY”<Õ ‹ÝÝB¾·¢·u›äd’–þ•߉6ü‚•ú(^µrõzçÎT£Yc’ÙçµhÓhSÍ·ŒT]% ùcäYPmu©Íð”&ø¹‡IìðŒá¯ŸØe§é‡}¯Ö.×OÁ½4zÞàÝž¾¾n‚!ÖÅ\üÊÏL¨OÀ§ríl¤!¨ ɪ†üÍLÐ^åÂ…Ûã{ŽkìhÏΫo±­Ì Å=9µÛh¸Ü"7.N»•FšÉ¤êE>²Š +0| Ám/q¼kºaƒX¬»ÓKÍ*ã‡Y|/º_» &ë*¾—: !too.¸2¼³×àkÉšŽ{û<‹¯P“÷X±Çn¡ðÜÇÑ­·Bõ-Óqª¤¼§4& m\4ñ´ôñ¤ÒI­'}Ù©i©*lÆ‘¹Yy$¡=5ZvéeÆvSM„T¿@§™ç?ìH‹H TEõ¹³ £d¸T\i=†ª±Ô è+t•Zk¢¯m–ŒÈfÓH‡`¶½„¶¡½g_ºyÔ»^âšÆ¼dÕè힣£B£µ ”{fvTK: .¶à¨G“i–SIe$…S<[S©}¶ã0³µ×>²üg0ªt¾ å+\Ïš7£ p%7T¬ÿ•'às Jl:EiC ‡F… ˆŒ×u–d+‹Ž;ÑZ¬šê ª¬âÀm58ÍÔ\ɺöMf¶nUA¦y7»FK‚&™Ek«5ë±ôCÍß¶ìœugSC§ ïj®ª÷Ñ%Áe}ð²*À&.)¢$œ«›Œƒ¢§îÇz±û_½ú•¶G©«Ž“à‚mí|5nå„X·UJoWýŠ(EMù•3OHj<‰q°ªHñÊŠo“úÃ}*·o®ŸÌ¿õ#VϾ–§öVèæ%Åõ?ÖT»Ì½ê*œ¥¿‡Íe&{ª—áF3Z_þTsÐ9¹ç¸GßÚÏù%K†]-;GTÄDòï€éÄá ¥5ލDi+®HÎ]~ë[ZRÚFGÿˆá=Ø 5Ñ–8üÄLJX}@ò®aö…AÕá ¡ Œú3¡•Ö_{Óvlß‚ÇÓ5׆]°¿÷÷z0Öó”gø½Ð7½xZ5ñ8žiǧó5’ *1©V¢ÆáÝŒ²dþ59­­æ’¡¥NŽÝÈÕMêæ³P‰ ˆEPHgjËéC þ_]úsq¦x¶GŒ¸¦WÕ1>mzçEPXAÖ­~j¤í_ãßÃU5«\¶? -[—  (‹,‰T"™@¡/*+×ZOÿj2}ñ~·üWP>ýÊfþ¬cA*sW6›=ÊáÒ>Èäz7ò‚[e|õPhºLKÒ;éY-ßo¢çõåWRfŽØm$DhF—DN¬ý©Ž|QüBŽ2c>XAã¡`¿3ñÎX|è_3¿ãÑ‹ë‰ÚfXíf±Ñ.ŽÖrÄ—ãD­;UÛºk2oêc™èûmzzÜj“òÅ «P8¶n°î¢ŸW‰¡³¶tÏVM…ýñÌkˆb l馧Ök÷ìèu´¨†îöõf0+ŠƒW_mPÄ{a§f¯_Ý{î'ý)ž‘nŸ=6«*í²á:Ò$çóWþé¨ÌÄÊ@bõÂUtˆ:q—Ý1¡@ˆ §Ù³…p²£j¶'2ATÒ²É X†¦qÂC‘³ú·^a]¿ëJ º’SM„ÞÃD@ ¤ô.nÅ;0C›:4 Ñ+m(v¹’NŠJ`£R²b®¯¾¸²ú߃îjØ:¯Ý~1…W“öÑÔÃÆðvüt‡£é[äÀ_M[4È (,ñ柎ÌÜPP‡°wÐÓÇóìéÞ‰²}pÝ³Ž’Î}íØKP.3S¹BþcKXÞÊ2~7î­ZÏ%OŽe"UeÃù;™Æ±÷÷¹ÂEKÇŸÁ‡¶…å è Šôpa7HDÞ§”èͬ½é¹®…7knžŸñè¶Ÿw˜±›5z‡~Óht‘ùE$¨óž¿?…ÝÚ]Õ=l‚ÏŠØ.×qw[¥ëïz7Rþú©ž¢4"¢*smLŠÉZ4Õ×8Ã_!O;ø›ÈËŽ¦´©ÂÔR½è°>>aF(*x$,b+HY') ”D=Ö¨(©J59(o„JU*µ¯×Ò!kÃYeô5²k³Ï”j„cliÉn±hUfKÌÝr€©@„™ :¨*š–?—ºpvµÅ{Z¨uUâZlã‡{°X™ÎÞÇ(¢yLš©c«©AÌûa,§#e$D7 ¢®ÚuBZ:Ò ¥B«¢¢™T"ä“zñz92} ±Ÿ¹bå{)©žUŠQÁapA3KçuªÖ–ᩯå<~½›_JðkÞxÐÏ–÷0:+Jë4¤ ,Wô]Ç*>˜ðdàQsODîuŸnñPqUJšf-ðnÊÞ ¹Ý¯zîÓTbúú`÷W]¸I£l¤Qlc 4 )‰„9’G.,óWÆß£j¿õ¹N„.# c‹`ž:hâ(«žû5ðáákt•ü”}URr3Jàòˆ••±ì¡i`CÊsé½Ã«¾ùAùÔ?"÷gDlk W=º¯»Üßl*·á˳:5u}»+¿‰Óuºª0×ÓFØñÕßS¬Rœ )Mžö_C¡I©û] õª8ß’£éPœCœUÔ/ð³ÕgÙüž®˜¯ÛôZÓ• î{(éç*€o]%ßiKFêg2‘W. ?¥ xæ¨dâ î¥Fj©"„ßÞ½€¿´8­3„¶öªJ…y 6Ri<¢Pü£¼ß´Ýy¡‘º5çÃÅ„ÛWãztÀ»äu!7,¢Ú ¢CË)fw³“÷ï®W½]öñäÝUð®VÙë•O95(“ò.:½­D$Z(âI{$IqG¹Ì\Ê©uZ7ÇŸË7Só5ãòþ™‚$èëlûÙ^ˆØP%¬Þ¢¸‘.×N”çÐT<Ê«Q LtѨÄ2Å‰Šª”ý<~ÏqûéÙ§‡ÎWX—›üÚFZù¢¦ç°yA°·bëÓÑ&•ñs½åר9B†º3“6Aîr‹£îŸÞ‘Á ¬Ý>êr£jáÁDLÏ! .È<üT»÷n<¸²¯^ǵ™s9ÒðjX8ÕΡE!'=ý-lR!A0Ž%Ï5Ž1X9‹Tü4æê>ú®û~ÿÌsqÉÂþÕDãaËTÜìVúN¼7$>Íè™tHtlZFÒ‰ËMXõËYišŸÛ5õ­ŒLT,œÆÀ®èj\^º™aUKY}ñ½ÔÐ1{0@ª±œË ËÝãßð³/GÃe™~Ÿ>½ 15öwS« Üv»rÑO ˜Š£ Ë`¦<#»ÑF¦y@áIgS6†‡]ÂèÌÅmÕMD3…Æ!ä–$‚I‰ìt›jÝk§é&Àèáæý°NÁ È˜òtcQFà KŽËT+ 9øÜá-Ÿsž^Ÿ·©’7Fç;«©ðtÆ/ˆ$=ƵÄ4¿k²-ÇM$ðT=ÁUbV[h¤VþÇ ?Jz¿ts·òøeòߌ¹i«åÓ¥½|}[¥ÝË{:sx?m¹W,u·>PõSÍ­§9§CNf4ÍÛ&0)C¼Ÿ—¡ßooµj–÷ÕgUõ÷FB»7»Ž]ßU·;Év̧¯–JRç¿G#Ê\xY¹òzºÜ ^ž@U$¦‘y}Q”q²E6(M4ù:•W‹Ž:Q 1Bx裑á˜0WBŒK7¥Á_Êp-òw;Qö¾T8€aá0T¶¿o+RŸ¬9h©¥¦må2“u|a–¬_Fb†âÉWsñ.ºâ"Ä"—«¸¹„g¤Â(¤ªE€p%„ú6FÂ|ÿƒ¥ŸêV­±£±P!™„ã^!ÈÂS‘*¦ˆ< œË¨‚–×4YœÎ KŠýЭwzGÃèüñãFço9çÔg×8÷ü¡ÙžÓ=aF´{ö;{ÿ<Äl}ÍXüøjŽ¢ø©^ßÛC%*òyªdg…Ré9‰%n!J‚øü>Û·êמ¥´¨òÄø&µ¯whNÅ$8 RN#í2 ”Ñ2 ®‰bâÎz1*£™î·ïóìæöüiùó¦êï[>ëD@û8ÊxjˆR Fnäý>~l{þéViª~5?Hñt]¥ M¦‘@LGÅæ=Þ÷zV©ûƒ™ŽKJ5ÖBãD PdV)­Š#Ì«’Å·ÕÉP[¢¤J<©¦*hwgÒA‚£Ô€ vE‹=~T:…Ý=zÈxlêDaMW&JÁ•i¸¶î˼Õìé[Ó›¡‡;•g…´«•謦 ¡‘F ÐT……ÇÎÙýÏ ÚÕÛ²Y(²E…½±ÝìåÛÃ^ïÎá©úõ¿žìÙy™äô c˜¡#sÕ +©ƒ0%š   |wâ¾?'ù~¾ÿ 9­Ï¾þȰ^³°²A@:  ýpPÆ* ë;C.?_€·£»›éôk¬îùµM# ÂÍʰ΢Žô QÄ”a3|3¨Íq)”€JBit:¤¤¹ZXíUv\EÂ5î£ÞùóýŸÕË?ÄoÝíjlºë¶óMN%ˆä°-°é+/µWìTü|$‘Q ’"”Òˆˆ@Ç¥ç&ܪ¡Ñ6qV<ä+7Kï¼™ aJ[vŠ3ôºKÿCî›âð’…,d`ÿ$Óùw]®ê«[mÌ.¤ŠØ;m‰öšÂØ¡½S6J@F¬„‘›¹JaJÅz.TŽKîEî!†ýÝÍ(zgÕV‘=›?ÅîQì…Fú{m»ñÝfÃ/·²Š÷À?IŸJ!êž&¸ØDóe³4~[ðÝñÌÔôMqLÁ hãosÐWé9©*(rç+»Ù=á<@!5TDʶICÊ`˜QYc¹¸D{k½«¾êµAGîãs„ &C䡨²¬  Òâ„r:â¬7@hôtQñùú·wßû»öÛðïvÌu “”çíRˆ„þŽcQÔh Dá2ÉôŠ"a <þß-}xåúD¨Z/¦¾Šý_>ª°%6Jvq0TBS2*(Ò£ù¦‡¬HE7¿*ýÍ÷ûÏ‹xÍ}ÿbðÛô=Û¿…Z?å¤r ”ñŒ_”)>Ú@Ð…kdSA*Uí.¶ÛÌ¥Æ]eâ"¶þf%Ô‡‡z‡×N(jó¥“5a› m™ E"¢a†p€…ù7·ö*?ÙäÓ³ëÄSQ É%·àH=^TmM©æ(ÌXÌkÝQ~ïíú}Ž¿ooò?IÝÕß³~|;ënbÚòîô·ÕS ›EölÌ¢´D‘0 !P@§Ér’Q~¥qˆD]´jéæ? pýðÝîîõçûË…ŠöŠõ¬Ô?ˆaŠ/Î¥Ž‰,¡BÓ:ôiDÜT|àM¦ŠبŽ(â€GÛÃS‡³»Ÿ/Ããï×ÑúFcnõáG§dq ÍØmU”*|coæÜÈG„2û¿MÆ|ž”ÝÁ“-ìîkÿM{¾|iM¤st(A]ª—ª‚@Àþ<ßÂX¦\빯–*´ÆBd9@h@DP¨œpîÑ“R?7å°ô ™ ƒ10)a6ÿLYróF…ÂûÝú£æÐ¼}ÆÖD:ÙÛª§ë|Q< ôš QχÔÙ"ý]öÿ‰óõvÿ…üÔÇÛ¬áéFÞžÛC,v`}L—E¾»òùð<¾CîýŸwk¶Æý¥iÞ¨ó’Þ~Å6wb®úÅâß®ÍõóþMlúèoü_zõŽ+økþÝ{9½¿¼=Ðßü,¸Û'‹›ŠqÄ! ßÂ,|Å þµjDÓñz9lhL_›ê£ &¦Ï±3m4Ãß÷±Ç“6,¼š½ØÚ Ù)àHqe™ ©! a¢µ]éZ…túë=µ ®¦"ÅöÉg=sÒÙzúè|ØœDYø|­ü›…'f:xTøÛ¡“ ûIúàþWÀU1ó¶h Aílï¬KV‚Öô7p%æA°2ð‘Â…„5Þ4ãôa£ÐL8@áXŸ %,ª¤†Ç»”ÕIÒüx#ávþÃZÌJª¼Š¼;û=¾Þùî(GÑÀq­è+PXx*mäê½ñ’\CÊb¢©5'XãØ g~¿˜)N8â¿¿é>VZ±9 )Ömvˆ}i™ú>KØP)LJP@!é?Gšæ¿©{>ŽÎ«eê‡øº¨ ÛáËØjþ 'ëŽVöÍ-<´O®¨_ÿ-©ÿGâNÝʸ*Ä»Ã8²&]Üû»ëÓäøí–0P=ÌÈ„>4÷°ûïçÕp­ €®œØ­2ûlsÄ™þQàù*úÿc¶ƒ£Ø„hˆ˜¹t¹ ÝA@¢&ònŸ|»¾Ѫ„‰>iÑ£Ò5ïæç𫛞¿·CžIšx¿³…aô†øÆ”(y·Œá ˆâCi Ä&Swbp—÷ùãcAdÉ5gÂ×íýý—¹ìóû ƒßó2×¹ËMÊS¯‹ú0ÇøOvßè)çÁïõo¡s>•F±Jo÷¸¢¨!ò·}ìñÒ¿2¢¿%aÓ P1BY<ŸÛýÃôšç´¹LáGè¸u¬¶-ÖÚA_á×ký=V„·í?)½\Ù«‡ÚÕ´ æ’X ¢ I¶?²òc •ÍJ·Eì `[ç6¾+Xë  r°¶¢¢)ÌdÉ' 7“÷7Ë‹f¯Àú_W·ÏÍÛ 'ù{ÕƒÕ´Ed Ì+ÏŸ½xPðç @-ëˆj"'ÍÅŸOW\MDNhÁ†–züZ0˾³×ý.¿'™jÝ»,ÅvŠ;uôIÈHOȧ¬¤ùôÇê2ᲃ¼€ åêç[R  PhPb£¿ìß¼¹¯_²Zzÿ›t›W3°ò1ƒxypùÚú¾í?0Ótœë¹A´ïçtƒRP®@H@=E HçUúO‰H‘õ¿²W0Ç3úS1¤„ ð²TÀÒ÷5„ =Ê ‹J} üHÈŠiùþw»”Cn^uÍ‹,”´Ì7Õ•7¶ØúâÉG*.UˆòJÌw,@÷´iÄçÅ’Ú«¶¼H…Ükb¡(]ß1pðoWë48³ik¾3Ëd³Úp.ßGaœ<ž?®¯rûˆÃêP9¨·ä VP>Š,(Å$DÊ8Žãéðì`Ê@äRdóªYË›Ôî“{ä(á÷5š(÷ùÖÂ;jõ²&ÒžB*eó{—W6Æ÷¾E'`ÄânПTÓša(unõ=tTYüøn¹§9òQ »i69gk,5éßóQ6Äê¡tB軦Íì}ÛìI«ÓÖ[%ÜÓ·¬ mðkzÌ/âÞxeÃj< îñTèõ*nç ä7êP¤ëQÔ@Æ…õšéJ¡AD`X‰µ „×?­KÇÙU)8Z" v§ô¶Bi¢M~UÐûW%;ŸÌÎ —[tÜS¨á©×»³Zø<’çöt{@€c£¬”îUbë§Ú ÁÌæ£B:ª¼—N‹-cÖ…eqØ*Ç%Eùs‡Õ½5øáb6Ä<ò¥#‘g$è˜à…Å«” DwI1¦•«ùZ„˜ÞSßi›}zÙØ¾¨ž´6ËzK] ‰íŠ”ÀÂéñRj³(¸ ‰áàÖPx_C1AQ!s«JÈì‹cKüèÆD§È,l³÷ç÷_\e’§‡m³Ë„޽œ VºH"•sÓ¤Txw4_ÙõR“òZÿl«ú}p£Õ›”i8ÚÞZéÉ××€ÃLWtû ÙÜ”¤ÑFUð~ •"–Ó^cYj >þXÔ‹DïîXô“š¡;Îô¤F¥äµš¢d¤CêÇTû&¨Ñ =¸uÆ¢`d”x¦Ò) ižÄ©ÌRÃú¥ËrëH øÀO€¾ŠËÄtwŠt$¤¶Ö% ¶&©u¾–¬Òñ·Q”â“»7÷k½O+™~ÝΧèëÛ|ï¼ön\Û'P÷x•ÑÚGÜ_¼õ;X‹]nûègÙñ™.PG¾ù‚Є"”Õ;ûÛrÜ«îýÓ¬†úb:öñÏŠ5ûÛåÛÞú>–©S,Lä^Î}UGÌ}v„cµý®“ã¹ên¯S^ —Çô“]…ëùçë!5«ž, i»wkÿ3Ç~j—3q´ö “XØ®Ò{…nþðÛF©z)åÞ‹­•ÙcõJÂ;¼g%è ¸×áü±álýµhòór¯žÞ­Y»¡ƒÛ¹tå×ηÑXpÃp£'i~c,¸Ü#UøæèK´SíÖ»¼ªùå’ÔýÅÕr=S~ÚX%k]ï7+©ë½#Ã/¿‘ï›Rœ›b­1> >Þk®J§UN?\畜Ër²ôž‰¿“%Gd‘èãvg™âò–œö«ÐƒñèÉu*sq¾èiõŽ”{À}˶0çütgéFÇËSj”¼¿R§¬y%ÅüìqljK +]›&›Õa«ç)§ƒ;§lòÄj¹ÞO*é›c7LO¢mn7T“\}{ºÕFûæAUîò¦–ž—|U´àa¢}¼·ÈÁ0ÏkíÝÓ/:aEyeœ;פ§¾âí9gçÖmmt™¶Æ^Ú™;V¿Çvðs¶NÉYí² Ž4¿Õ†l9{|sr4­ëúÜyP¶…»ë¿ðYµ„›«'™{7±PÒMûxjkFÁËQƒ]«ŽunºdWv²†&Ùv¯j²C»–UpSní4ön PâöÛ—ü[Ç>'€ÿ”½VWvÌÉŸ=JÝ4"aÑCÖ1î‹.\ºöK&PÕ~žþ̲ëZ´ïÉóºšB™4íæðOl‡I“$mç¾ÿ;x=™9⥯…ÿ–°šë‡mz8}ÈÓpc#¼‚Õ¼õÌTÔÆý2ÚœÞ!ƒ ÷ñòä©.L›>)å-4q¼Àú›*1*þÛ™þAÒmññZ{E×ùª‰Ñ½‹X•cÇœÈm¶ýsh[ÙfèÒ±òrÖÞEôº%ãxçNØ;‰ ¿Ýó^ìçÞíuëä<èƒ5±2œõù?-f²^#ƒ%™c2mëÃ~Õ=ÃmåÓgjEµSk£ŽÑ™:j–M<»_·m;~þ!Lø¯Öu·re]•¥ï6ó(ZŽ\]úb*á»”ð={‡®7Ç\ø-½ªÌ?óÒá­í±™\šujMèîs½ÒCkvÏ(СâÖŽsÙ»ï*ƒé)nws®­dÂk½|O;ÉÈ¿ªœ6-›ßÌwšâÇ.ëÌŸ\_'³&ìo]¾Fùkɾ$ù5süXròÑ”ç·Û‡m^ú°tÑë Jc'”°øíCêá’´\ÛWgÔ5γ‹”!ÅV¿Ö0Ïn½žŸçK¦;ðtÅ7tú|çUÄ,@ ¼Eö8IçŸÇªÝ©ñ—åÚ4(ÐϤ•-RïŸ îúbžpl8ôÄ„´)òê’çkXrŽ3ùCy/üÁL;ñÑïdÜ¿ñÐ.DúMK‡¿vMFÅÛý`S>:źö>ñ_·«‹Y™«“Éîk!/+\<ßdȺñ¯8aÛ®|P Õ^…çªÝâõÂG×êÒÉ+PxþÆÌ%Óä¯é 5XÀ,& ô8÷§©…’5 œ*種ŒùÒ‰Í?Ÿ±ÞžÜ<¶›B²s¹és²ÛÖâÔðáÍnF^µ—lÞ³¤¹ ºÖN Yu•¤QBÔ¡ØÙl³lð²“J¡ínŸ!åK>êºüÓÙ©ÿg\»|JïjJy·¥ P§nÛ5@ÑUô«ÕžË],H·›}MÀ^PD¿nwÏâÞêWjÃdšß_åtçå7åŬ-ÃCÊ8ÅÞø27r ÞIw'ÏßO§Z”CYü]…íñmð,as³qÅÝ]¸$Øt ÇU(Ά”ëŸ78(H¾ÝLäÔ¤›mÏ÷s”w$ô,…ÝÎäC×)b×.nÕGh4þÛPìâšÔùШÙ&y<Óx¥ëw¢ è특‹Z^ÃИ‡B€»±ðÝ©˜ç榩îöJ‡Õ C+l—ò¥JUõ™›r”[}¼~_th7äö“ŽºJy^¼ÚFk;¶½;%`:ÔP¾›Í[QµzN`¨¤Fï9®h€‡RËá@jÛªmܯüšzQJviؽÏ*™óм41KxoÇSp×ì¶ëËÁRéíGÚ’5teŸ”ד®$RíëòÉŸ¿ÍÆr(ü‰>â}y[Ç2Ow]^6Íñ¤—óÆ4¢‘º0r¾ìGŽ_Å,yô'oSÚg€ûŸ,R{V²·…/§K¾MÕðu±< ÚR«ûûv¨'‘ÒƒºÝ8iÜ–…ØïTg̦œœ6¤Ðß_t¯Ðí¤Ò…ÐZ×”<»å7 ]¹˜«nµM}x%ÂkÑ7Û#SeV‹†·fÏ~Ø{3Ølb|+»&+W›YàÚ`¹I™ûhµˆ*ó–õ:¥QxÞtsF¸ðØ&øÃ:{úügµnÏ>„ ¶r8l$h/òã˜þxÁåG±c÷ļä¹Ç¿-©ÓéQ—¹×v¼<퇅lЊX¹jÂ\ƒsvyvðZtíã¨ùb»«†Ì5llÆg+Lë¾’g¾ÛøïOÏBÁÆî—cÒþOy.æT›ÁW'{tBèr»RªQ¸l` ÆØÉ Õ1o£XmHr•޹,Z­‚c ÙºÛ'Uß´š-½—UQU縫§%óÃÓ]ÎÊsïv¥JcÔMP{óN »Ù…•äžùo8Ÿ è›Õ½¾hí$É¥"§ŸÚ|­dµÊî¾rC£B>;ç 5jM2vý=oÏ_fW’®œ·Þ­ÊŒ<CX! ýíàÁ “ÌUª´&wMß/;KܦËúIF÷Jé"½$‡¹€=µN­‰(A6Ê¥ Ú,lå~Õ‹€Ä-ÐS¶Þ|ªŸO µ‚¥S Ö“f:À°3Jþþ‹8¸!™üT)báÚ-ÀîÃìöšNk\ÜÞ88"[r$œÎÌ´Yµ¾Wöà’¥ckjôô7K!]M•°Þ—CVm©§ÏC}ƒ¹®I ãEÙÊ&O£¿QZE«”ù²Oi›RAž¡&"âr—jØ¡“öê_}o5¯û­ª€0ôϤE>žb£’ Èû±¾ˆ3ð¨î#6\{'†ÓIlZ:¬jÌ’ñu·0ê—(È•çs'©+x>ì"¬ä~”‡2»·\nÂt½ŽÝ—ìEYzÓ+þæ¬ÇÒZåº^é빩Ë'bÙnþNKvÖ<˜î‘™·Ð÷nÉóÕ{ë÷¬+µ~d- Ú_ïY°_Ÿå ^èóh=kõÀ2qÁÙ\¡`Ô/—¥7ž¢Úà'­¯ºpðѵÿ›®8Û[|gËIÞím)Ø`‡Ç~“/Zhž~ûHU§93Òú½÷›Õæ \ž(tS™fû…&<: cNÚ_jaB#&èŽæÆMŒË:?V ÷~=Ÿ­> Å¿»:åó§4¡ÆÎ[…’«ÜÇã&‚XªR:|¹´ÐWIÙ§ÒŽî95õn¬,fÖ°¾Üê–’êMo5½J_x¤ª¦-…c:mWM3¯U³µ¡TDFÀš¦åI˜ÛüÚk†Ó\åbõ&9©†—É«Mºþ‰¯R.K˜íÍ7¾*]ޝ\³ì½_ ì©DênFÿ~fúÑR†Áž qK9­•t&s(˲š¹6«¡¯Tdí'˜°Õ¹ð¨þ¸|¬Ö…úé¡q ÿ¥…(‘öá qðŠ7k®]<ÂNr\³E–À°Ù—8]¢–ŠCW¼h«!.Òàrµ$·›iyõ‰ ܸ53oö¥«=ìû©déÓM+’¥«]1hÈmŽÉdJžüv¡%>aÏ3ØîígÆr(ÆkxN¯Pß.¤3éï.žç]Ý Ç®ð̽ ‚T¼«òZ”Ë»CåÁ*éîçQÊÕ^·Š-ù›Z!A©@øv¡÷7«yðÝ×gÙÞ;ðiû¾ÂÑç¯àIØ0èt›Þl§À÷g}Cº÷ÇNúò¢¶1qß yîª÷o:»´õ(3ºy³ÅÇÛKê-³îjÅ|]åðmªÌ¼Ü‡nJû¡öý°í¿kía‡óL¶÷ÚšK䨣4=•¥"¶%w%¾¤ðÁ#Ånõ½7˜ßD,»ÖßÍ)QCÍõ7ʸÎw?JŠt1hí3v¹ZiFr’4lv¬ Ÿ¬iÆ£¿rT­3Áá:UæTZö—=îéå¾ygµù4¾|ã6ˆ½.E/EË7Ãìßž„Lš>ÃéÚ9(hV'¤ƒ×ŽèœÐÖ×ÛyÍ]Ï»o·+¸jÄ¡¯}Å l-…?IžŽ12ð½³k'pð•N>ÛªX’ #ʵæ.Z;¹}´©BÕk= Ü=xSJá\âE†Aª)³C-ÊÇ$à¿Pº_Œ÷#Gdž˜Â²–}³ÂÃ+MÒ{Ö5•s„+a/8ÑŠã«Úi¶°z§¾c$Iܘ#~T¡4äUeuì:-‘¯v×¼mvº.Èý°$'ñù¾±J7ùÙRáý´à™=’âÞv§·8AÅë·œ²–ó9J÷o2,[¡a1ük5—YSÜŠqª¬fçX÷%/‚'²j»•:ǰ—º±%_‡¡´êqšÞ}YY{'ÚËvçzÉ ¢/š§Oœ#•K1Ù§ŽÜµ¡®Œr[—Vh&Š(·D«ñGpïT`ssñÏðØò€å½yÐxÒˆó7<šR»$ã߯Äõµ"dž²:žã¨=^Ï ‰½öäîã,6tÑŸ {•MnÛ6vPïÏç mw`õ, ¤Þ½?Ø j­ ™~¹I¨Z,9ðç*%…“xÐßé“ýïS.è%SVÃp;}zÑÛ¯;Ò¯ÊÆØj¶üƒKOv¿§RôV[_l»sÖ›ío¦¨ÑB]”¶nóYìó¸êðîã”êK“ ÷u|_y­rÉ!{ @øZŸk+mº÷;nk[ÏÑp厗²ØmÏ5Ò€€Ô/„ô(k¾™—µ§-öjîƒQÝÆœï /¯a§Ä[[˜gñdsm£ª¯Ì;RœËå_|¹ùcÜsi¾=Óš^}›n v šµµT0~#O,KûS.¸¤b±vØ¢õmeÉnØY:6sGÛ­›¹.îú»²]Õ,5³har­à±N)Y|¸O5ä]˜Žõl=)FO{}Ä  ‚mã_·¦ëBüÃ~½í4/Lʳ£¼Ç~jŠD¨fÙ–²ˆÉëÔÌâZ½û^¯e˜ìökEQëøRyÍ-(Ñä­âª1ܼuãZ—:5[¯•QÆùIR Ëc¥Fu\ä@ÙmöºÛ¼Êë'ÂÕù©VF g[”Û §ƒmîz\Ôº¤ë}+çjG}`öçT‡… 4QŸ5ëó¾Kyg¿¢/ÏYñÖZ$¸x´äô-É7j',)‹·»Rþ{|¢é÷ Ï ûñí².ô°m[=p|𨹭XcÌt¨^dݰy¨³››‰u.»11VŸØªVI((%˜¹$w•²½Ä·ãž˜ Œþûœïðu¡B˜h;…ÎÉÖÀÝL¡ÞërÇ-£ìp-¦ôOCÝ("ÙáÈû6w“vùf°z_ef?4 $/ynžÌŒ8§Ï±Â@1tj|ûÏ’“z ½Ë¼\Êöœ{9š\´æ—u–¢ ™¬ìlÙ칫䨆â'­m3v}­™PzÙäÜܧh}Ó{§—š…³{O—3οІá£+[Ò–™:Eµ}]g{b3jz6§Í=;-±c×j¸òjɼ¬UYåâH uôÐ;jZ1V«Êà°£R_¯g‰î`gDšJyöjùáŠDÚµ $«ØÁ+s¯ßë«W|cŠUç¿Zg@O»«ÓH„ (lðh]vÔŽZ'²­=w××’ªóR…拳3ÇR&p-<ŠzE¹Ù·“U˜ 6>sŒËã®PÉG%ÕͳæÛ啤¨™–åHZæXœëç—äÐ]_dpK“â X̃¦ã•|ÑZ<º™bÙ÷i×±º=ýozn¾]ÉŽ`Þ_º¦:SÍ:’dÉB…TiÐDª`çhmæºrê¦Öê=²ù?^Éuj¸k‡AÇ_S¢Âmµ7B×çBy_oIp:Ëhêa»Üæ°“Ék*,)n Û¦í ÝIø•8³;¢ƒ§ÞúÇs^ã¶'äš\ÊÌÃJ£9|þ&ŽE¨h¢µã5€ú\õsò̽¿»©îIgðî´ê¦œo®`å~hŒ<¥öÊöˆxáÇ ñ£O–7®' WÅ­sï |˜ÚWÖ-6s÷kh&æÜ]Ÿóˆ(#{Ç´‰ë¿©,…¯åÓ‚ Vu:¹/‚ˆ'—GÎ];rkµ;·²}º„ƒ.¯/Óåj¡µO°ÖËñ¹‘$©m¯Ãìc%‡9ÝÉi~!ÃS~a£mFܨË=×À¦NIØ„äæäÜ$®Ÿ5[9¹ÆDôl¨—E5¥'|¥¬åü–lãÓ[Ü:³:./šŸJJ³e·8 —¾-ÆîÏ"ŠvòÏSéüœ¥}ºœ¡¨ûQfëMì´õÒIºUþê°rc`„[íJn9öê¡ð— îÏG$“#N¢ÚçŠâ3£‹æ½9±8U0ø-±CVÊö,ͧ·Ô,+(ƒx+cæ¥=òì·^ªÚ°gêÅÎüÙº×&=>øû½éªœôosñÙ“©Œ”/ÏsÅwc­ÂwJì«E—Žm”q/(¶w¸¹Äÿu#NŠ‘ëzîEÓÎ—Š“O¯¥‘ÎÕýë±i¨a©å>œuŽ­Én[í€3KT:ánPå»yܲ)y9ûzR%´oò’ÅÒÖÒ†Z~qµŒé>'ópYñ‹uÞ¾í’z|$ër§mm0È*4N¥×O[Ö R{q¾ô0ê÷K>”ýÙ÷õ+T;és‰7ÞrV¾C¢Œï+k_`#×3Züò‚Ê28 ô͇ ª_Ÿ^¨8{£ÁšßsìÅ øoÛ³cN«Xñ¨ÕÒzÓFèKÍîTÛÊÑ›\u/É£·V³ó:'ÅzŽVÊã©Süx‡¿Qð­ ªgê‡w™©Öî:~|Ý¿Êä|Õå•Û÷í*K~?ïìÛiQÍ[\oíÓ#L-’þ£³ú’<`ww¼ˆwF¢ÃË[­ÔfHmä$»ÎÞ÷°G¿Á²Âu£BüZªÔUÚ£L«Eƒ6ªxߥ%êÒjö¾çç ,½Â+Œ]±~ <¡¾Ä§à½%m±[¾)‡KÖ|~Wå¹y®ÐçËáüßѨ»ûŸËÜ1MZöó]bûKb*;¶ôàö3…]>_³"2|IŒ²óBß0³îÆCï¥UN³|8mõWT7_½‚¶Ûvä™Ç¾èŸ› ?{F÷]b‰{K…t™Í %õì*`>ÆVéUÀƒ;ôûø^…Ïʇ¥¾Ž‰ãçï0lázZ*d·G> {åûõœ8àÛP»Ž²”(J}ñÞ}=ϳòÇ.p!ïÑu󆱶6g¥u§¸ÉsÎ5ü½Œþ•/ÉW?{Ó:È«Ãåm1¡ÒÕLW›;†^u̺û×M¿5ƒÞn8)>û·ó„eݾ|Åg6ß|=ƒ—Ás-í–*üÕUš¨råUŒÛ éž=Ý7Ð׺Ψ‰Žë»l}e¿bæÝ{×´µÒ†+4üÞëf®êx58¥VÓÓä¤Áï&T¦ìªÁ¸ à:‰Ñ(²á_f'³¼IVaÕFi¹£µqúé>»}rñš·etPõ4«ižTÑÅ]u¼]½4'}µs-S`¹lœÙ…ßé{”`ïaœÛéL‡tvâóç6+ŒVb6´Ü³¤¢Ë~­Ò×´¢Rk ü–r;>è½¾ÃðÙrÍ=/4ÿ\o}(¨“ññ¦¾øuì·w=˜«—‘o¥†€4®Î’Ë‚œ±¿"»¯#Öj;âJtjªR=ß\Ü¡¦:’vZ˜6T©—Õ¸OÉZ\»ï ¹PWuM¢å¢Bž•¡gÝ; w ›õi~äa és¤^>ýÓ±©Žt¸ûøÕ<˜Ñ] ä˾ýOÏŒ\è¤÷3Vl¯ÇÓç_·öÏYukŸ­Š—Ëá³kû$ü"9@£Í®~¾–tPƒéT©ñb/ºn©²¸ÁÂþ:®ki‹×”ÿÜñf]ðx6µž,=^ã¶Ðà4/%[`jA†÷œÍ˜ÔcYáOV–«ñæ—ç«ÈSQr_P€[¿K{ÊO»KC5ÑÙªßü赂0’ÆŸ™¶8 ²„ÊóZÛU”àF°nvǨ#½ÒhŽ«š-rܨ¾o0&®›ÉØyßí§·Õ¼°â¬w·¯]iÒÈè¡3ªp‚¼~°^©LQN{Q±êêsq;}\'}†)àñ_m/…x-UÎ×ÚÏäÕj“>ðxò:5ºmž+üǑۿž¨±Ñ»Öâù¥L ÝË> ¸Jÿ3Cíé"ÏkØò>Ý<ú²[… A'Á­„§W¼¬í¢¨ã° Ùeµø¸¿°ÖæÏoM7ž|¬°%ɲ¢nc}ß&Í2‹]zÓ¹®ù_»Gj™o…©ü÷©ñsªO·QôÛPÛÈ0Ö*ùNb7¥9Åä„3bÐã•"Ê–6ÿVvdÖU£â·,)©N¡Ù¨­þê¯rÅ ˆÜöþO†ä’Ëã V¥M•ŒY÷?4MÃf×#z=€óuพîUCy€êI5rÜÛd6„_lôHÛÑëV“»¤Ð¼1c+šëß»»&£,vÂøt´Jið9õµfBãÚøX…î,hÍw= #Ø•C% ôw×Z~÷6|nÃÆÚºßÑKîkrbxçsû“ÑÅ{‚¸iQÁÙE8{ÍÍ|¬ì\nD_òÔ•ÛÊ9}dYà†0«ÃñnJj’°¾?||€9z•é4:°ymúÔ‰eí‡jìJ­ëH¶Rù÷/ã0¾>àS/gòñNÛxr!¹ã0ãï­k·ñj½E& C|e(0µ nIgDíhüÝÏÕ1ãÝU;>Ç;4;ÉŽ—Ž]¯”˜ÖD33n_}¯›ë|ÎÐføvÂà[‡9^Ï«]÷†Ž’_d´¸#Öõl§í°-ë¾jVo#®PVüOk´&[ã*–ë­?  %Až†¨Öž]00¢1D]ïFJ’%gK)`Ìþõueµ»59<ÐŒnän{#NÈ­ïÄ(H¼Ð퇉­/äð¯+vR¿åÃB>ä•óëãDËaƒaoL-Ðc¾¹u›ß׳Ö÷3õ{^têV¤ [6Í°›™4òwœS9ªÛø[)Êm¼åže(ßZýnü²n5—¸Öl&=*o©â`ú•$ÏÅ?"72Íöå75ì›å›7åG©™Us pïqNFná†ôÐ"ÇæCÎU£»5²*ý²Qõ‚:xßbÞa»ݤÊ#Ø'Ã’{8Vïê”–ÑzZ±fžÃÞ)Ï8Ä„€úY1l¨“Ò*m¥3{«6 ²æ§!ÙâfÕS—v¥ÓûbL›*$èº!²®„x©[WÓ›Ô(N¼¯lÍA\X9XùVqkpê§Dùîõ-§¨©†ÌŠÏ°ô²p­êîÏyü6K‡iæ‹æÑFºö« dŽ[§yÔ²©Òý• Íõ«Øíî ÐÎã“€&*ÎD,A„É…½súŽ ‰ë_«wÌoiÅ`*7ÓjÓä±a®4RÆ…É®œÞ"‡¯…{…úß”3…§]¸£ù*å¼Ì‹o-p_¶{ü¡¤X¡ùW5Kdë.ñ«õަc\-‚›àúéZðìé=•ªJI6îšØjøÆjÉNýæYáüä3®‚…¼ÛvOÇÇCj+ÝyÓœ’ ¦}­óÄÐÝVf扌zìÑôË´{fñŽrÜGïÝZ¿¡àHºsr­õ'ºd7÷ÇÛRqÒ]¥oa (ûsgå3PË×&›ÏóÊ”O/žØ.dóŠÂVnØîµòz´<-c:z˜Ú«pÏ 9ÛEÈMÛ_ˆVIÑ:E8ó¡_<tú÷yó¥S™‘†3/Lݨ#^|<ƒHQ6˜'î½+úJ¾Ô9¼èóC“GƯ³·Y¡³UL{}ó²/ø¯>Š»¨çzˆä-lìM…ãäÔ7P<ðmZ¦ýAg°š~nâeüa ¶ü“¹¡B¥»¡‡,tgŽ(é:¦×ªô|8ÞÈp‡,$¡øà·ƒE²§¸ÕLÌtöÞLÔìáfj]QÊ_w± «B=›Í'BýT›i¶‡ã$ˆåç>@K:E¨ •5¦#ì€Ê2K†½Ú'ºª £ÙÐm ¿NùU¼´¾:Ë–v·¢¥Ž‡Êa:¹Å>˜Tm$z³âl‘””È\'cLG1¯H¢õJoIq0®ÌÓê>ð¯"1oðG͚Śv}âA¬ôÆÎ×Ñ• F³Ç`ÅøªZk»¦åìÁAöè¨r…5à÷‹"¿7Ä/¬Á{Ë…œ0+çà «×ÓMœºèÇÓ•\ôá'œzûªsIÌû¨Éž”Vj·ÏC…xïÆË´­/ÔÆÊé…Aþ¶œ¨25½Ãâä"Lã  G—® › —lkü“ß¿VtëÆínå:ÌnxÁ¼ßg·eFþJù² ½×|–çã9)®‚íàSk”ä â6Ó¤ÞªÉÑ>ÂUcfü²O‰]´°§V °Ô©jJ®jI³V|rþÈej.Ép¤û™i­[7M¶ânŒUÚ¤…^šî¦Ãï\´n»”œÁqÊ”­†ì=>Jê]Ù)UB¤/ïÅêѾ‘ë[YüÓ¯–Ä*EÚ|Ñhûõ^øÅ;J-q¹ÛÉ8›j:Z²DEl׃²†À*ÅÎ;ŠâÊndcY•Ç«r{švtõKÙÊâË ÚÜ)T¡§==Ëa{®F´oz¥ ¸ÚÍ{Sù—wÛé/>¦}òIøkø?yG\jÓÞhqÔìÓIJþÎ`¶»¶ìa¯«ÔhÇg†λ^mÃ_j»î5%ùjÅÙüç¯>×í| igôw5ÖÏîyï9£ KS}h3 GÁÑ´ËQoubb:í=pÈâÖxºîØŸÜøt[ËËZPØÉ‘ü^ª_¥©²Š=-a[½RzäÌîäUÃl­î‡gYo>ò-vËþoúׄ?Çþq?Ðÿ?á_û¡¦"?¥þ¿*iÿ&nÿ4¥GúʯÅ`DÙGø…§ú´I¿ÊuP_ûÕ»×ò„3T§üûÎuÚÔ}€ÿP‹§zß¹ØNZ Ø-m†¸VÇ[ Õ8DÎúØ5ÆÖ…áñˆ>z:“ih§+UJxÕsª8eÂæ™V­Õ3U‘DT}B $Ä4²L’’‚))%‘¾Íü?î¿…üºü×â=kûÏü§ÿ|Gó{]ýçÿ-ßåOÿOþÛ©›ÿݤ9ıs[üñÿÑJÙ ˆÅˆI ¿ÇÎÒÕÝZÙüNÜ­Cl VË=´á†¦º²\騉@ˆ@þ•kþÓ(A^5_*êª_ÞY·Ù¨h4”Z‹¶é[àíZ¡Lua@@5I„ôx@‰¯HÔä jÄ ¥TNV4ñ¦W²ÔÛYŒ¥Zi¡/v޹ΠaÔEÅ Š"'Ñbý‚—ÍaªŠC~ì/Ùi¶§´¹øG! j Þ²Mdm8 @.#k©RȂͶmuD\·_˜ÒÕ€]­!iª/kˆyh©NèÒãXÆ… UѼ4S ´®Ö9ó Fû¿êœßq:J»Öé‰þé»'>ÊenZçJ4¼—ó¼5‘¼Ì€.!û†ÌQ8à£ýã¸ÀðTtô†{íãÁóIݳ]|µðš‹W^¾a^㉪íRMñÞ.œ]¹ÃH X'ÈÖ¨#ÏM‘-Ç6qL£ VXGD6iJg°ÉÉFÌÚ¡Æe˜É(XÃen2zJ!éGµ¹nXºçµn!ChaMJ€ Š"Ń*íó¦³Sú m‹ä¤W8‚¨‘¾‹i¦O¾VœÈ@ò@Amê«™SþWÑžÚaKàͶ‹šÙÙA;¨ÝJÑ}q¾ÀÂ: Áà¡ \j¥è`¯¢¶‘:¹¶kA.ªº ™ûkÎtGV0:ݼÜBÕ©u5ÎÆÚò¿LcV„a¶,æâˆ€^Qq¾¬ñ†.Ó”¢ð’­ZIð2-vÓOε}y°?Ü!3Š 3/Ùƒ ºÅþ抉ÿö¥µÔn­aƒ†WU.@nËe4=l¯8\ƒ8ÂQ}r9È(!Jd°Ýƒ Q5·{¡d¢ä‰Ù‘PH yLˆ´íÎmÁm¡Ÿ7Ø"‡ÄRªæ :^úžú%°Òr.fdRP%ÄAÚQ1¦LаIl„ümVÏIŠÃ¸EFºÁÛõ°/‰ËµáI<ØL’Hy°=Œ†í;¼+UaÝšŽqÔÄÚX9Y[€ÀݱŸ8Ab£\†›ðz@ª"ÑLÙµI®¼¼ÌL]úºÞ™ƒZeŸž¼M©«.vÈÙDÛʶãNÙ¯á«JžIÁoåA'$ èP‚âðC‚¾¦)ÅaY6á®Ú$—ÖCÌ´*½ì˜DDZ¶°H¤ŒÚÛÄ œŒcµw¬?¤ñxºðçi3d;¡¹!f`ô|Rwno·¬ç­bi8ß7D¼),¥Ú¢B z¨A XTQw(y@7—²„œ„Zga½•1,fDˆI[ž¾ b‹žÅÊçf¬lW8’âjQ”ÖOÍ–4ãG}jÙyèÍîU›`“‹·;ÚI'v@Ö¨N|jh›Òì™ï¢ždÈÕX!$§¦tÚ† NªxîÝiȵ™µ_k7gDíéRhͰŠ(Vª4饦YSÁ®,0C ÖÎ%3L™[pðá€h€Né'$Ø„àºO&NLŸ]ŠD˜ìm,&¦c²§f­BóB`€áSˆšùn·vÒuH°ƒÞ¬$^nÖm8T›™Å½u@&¤ ’w{0‡ôRÙÇžËx¡×›·Q9²,/¥M‰©ÅÃ:¼ëÔ©’R¤!Ί$Š™ì¢JC$®“$›YÍ!£$ d$Û§®Àž,¤„¹šžl6¥¯¶Ä & á€8° Å„>h6ê ¦|é¦sL(ã³¹ 嬆fàS X@?†óQ*ZvÔï´½¾Ùþv³ÿãEvZôRŸ¹Ò¬ ÿÚª1Ëó`–ÎM9ª´™¸Z§âf«·:º„Ñ…"ÁTýT£s5òµ¯Â°µ,UÕEETq¢˜:¦Ä&‚$Q,"D ÅXÁDgçµ)!ÿçí•’ü0è/¾Ûû¼8_É3Å:9Þ–y&íuwÁÄ{xÐ>w¶´àÈpÕ_G:âÝ8³ÉüšêE2kÎŽ‰ÏmtC¶jõ‹á|uJÖé^Å®¥Ö!$„(^£ Zý^÷ÌlÈ4ŠRÆúº×f‰7Ø; eLú;¦" ¢È±UŒð“ø[{kç9bkã[øjäà–rÊŒ£eŸêíÛ5¨¬´+®—µÒÁвd«!M.‘·þ1u%Ù-Öæ­‘¤b÷»ȾKU /áM¦ox²UÍã/33ÕkZk8‘I-¹."P¥ƒiP¡êÊ^TTi¯Ût¬ñíß=ù½l¢"Þ·@‘ ÔC)"Ë3ê;¤ÉHŠ)2R&S]PÍì’»ãV9¹óëlXc˦ޘHR¶Vh‰?¤ä›RRaΤ²ÅêNü5Zc¥J3§jT48¨yؽ¨¾Úp}$ $¨(57é[[wqïyt8'Wnu QˆŒAb*`X (“Qˆ±‚Á¨‡j¬ÓsÚºúlÝ#ý< „N…ôåiê`ÈF)þ&îvç8 „Z—r+5 A!‚»ûèg‚+ùR(Eî¯zäO•®µžÚ¨à‡¯,K/…²k §³3N—¡‰&øîdSån“J`ˆl–e$B " Ò2l‹é[éLøò¦ ÃÅËŸ2ôÁG1 €‘R U›2[ª–FãaJ‘´áRε¸é% ÖÅ€sÖe2!LmQ^JV5åïvϱßS+o'Wºpª‚"dúû›—Ö¾“«Ò$‚†É/]ÂA!0‚„UˆÅˆÏ†ŽŽ®U£ÕË•ab‹éSj7«£Z *Wò_µ¡0VwññÇÇ ¦TðLÞÿšt0Ü@¡Ë*Û¤ÒFd~wã“·Éc¼ÒP’I# µ)ô}Éà˜¨¬‚ª!2d˜Þû™ hÆb&”cHŠŠ(¨öéhnóÏ<öצRîmÎ×ä'†L*0‚®Š‹C‘ÿZòÍìj8V¨‚ì UÔ°”S¬¥”hȼ \Öƒ©Waž~«C±ß§–üóË_JÓÛÁ¹QÝR’'fS’¨¯¥p²$‰£`$ȳ%!3bÉ„ òÍÛC» ¹xw¾=jÇpÏ*È*˜çñ³/Is©;û"Ÿ~Ë߇Ӧ>  ¨á-E®1c›vRºê¡‘g‹OqR@´¢¡ ‘b®)Á‡¶ä¥MOפ.*±XÀJ¬Ê1°L‘5‰$XÚKê-7$’¸ÔÛçÃw³ ³‡3¯kýæºg¡è©JžV¶Hx»ÓRCJï†ó®ìÓs±PŠj×Ö˵TÜÓ7ác«9k£b‚"¢3"6 ¤"L  ª±QƒÁµsñÝ}©”Mm/0"ëбœ¢‚EHLŠ:‡4yPÉI3Qmj$K»fv–n{Z¶9$S# ,øt¶Áˆ‚æá°£ŽÕ~Uƒ~t¿º˜ºª¢˜»Dšr¯ãíŽEóI’Ù„Sc4DR*¬Ev ²Ñf(17¡;¦Æ]äÉL/T¶§:³6NCÖ°·ŽYÚÄ=Üè×…RÍÞ ïÉ^º)fÞå®Ú(ª†×Õ^víÞ¤82kU‘ð¢u½XAå]9ë·wfI&ÄDÒQŒbT”™A&Dd‘’b‚Aõ»YÚðÆ§—¯wkõçY¹§?6.È/ˆ‚ÈÁ­l(±«6š™@Jˆ+<œÚDn]5¨‘¦n©ù*…ŽÕª ž6¡Ub«1ÄDŒ!…™ ‚(¤D*±OÌãüÝ„ØÛ^«l1hôC\šgõaŽ4œ¼ýuxt@Ú†OoجhV‰RTÕH“0ƒ˜&Á ÙÄS%À†‚‚c{0ÎÛnÃ6³'²g·í°¤c›J©uS¾ÏÛÅ}u}:Þ1`¡‰ l4b)}?t@B%EƒQb?©ë¢ûjx¾½} ¾¼ hƒ4O.ËùsÂô੆Ëíå‰zG­ ÏÇp÷a@â ­65ZÞ ˆø–׿f‘S‰HtÕûrôÇG€îç[]Q‚ÅUy4óyt«±R#bÈ¢d$Íi$ÅD¦ QŒF*ÿÏRÞ;:üxukJÜÛ ¶•x•N­îL‹žõkéj h¥ÅÇb³Y°6shçk¯™§°ÞÅBÓ&kœ¡ I@P à¨À¿Y[ušÖ×…#9ÛÆÖDl÷µR5*¨”§&­—N½ñÅ1¡=/Ï×™ÌÕ*TöCÞîla! â{¿'G=4Çn½,ºŠl“@…°µe:£9:ѳ67W;ùš{Õ4h›pŽwÒê'ʘÔ+i´cžYˆR«RŠÌ•˜(Úç1•×þŠý›6µE¡fu­Æ—«ÆÓÈ#ïž× ê¢Á‹!ÊÅ–¶Ê[(„‘ÄD¬¢I)d‘ÁÉ2ÙX…eiYh-*A,™\¶Ö ‘­lƒ!U¥*–%‘Á¶¸ØU¶¶ É‹…Z*È|XG÷|ÎVí>Ømú¤¾é32¾°—Æu%Ø«‹‚>Ùïá3}ð·µY —)Y‡æ…²#2dû}hÙö|÷)éÊÒÏÎß8Z $bF ½îü,ã')'§ÂoáYX¥‚>ˆÙó³+ìó×Ù΢§H}©ãóuÏnN°H£/Æ›ûÏ&r’Ve±¬žëµï¾ü½ÜêúEÉœË ÉÈi» E…h¬Š0ZÉ”— ÙVÕ Ò™l¨¥˜Ö¹–2LIK“-Ëb–2ŠZ¹YhÂ\ƒr¢cKi$„l«lŒ%ªA‚I+dù™e(é”hr§I#2‚¥T.îpuj¥µ ¹°`â+P3PªÀÒhjàPªEž¬©±ô;Æ9Ø^£o)f­Tž…B˜­°t «70Ýe³AJFˆqQd ˆ&$9ê¦ÛYÛËmŒCÅE_B‰¹»ö?VoWÒNÁ’??ªj&±}É’Æ2$”¶åJ[ß«å¼ìÆ>ä¾öz ÆY•„ÆKFÃÛc²Mh¤„´©YrFE‚–Ll+lÊ0W J9—-PYʲ Œµ\J·©*·”aqIhþ`ãÁ‘G}¦¤òʵ¹*ÁB€UŠð"Ó¨À^c²‡ß»Ÿ_8Ø¿“~Ýñ~py*’s%É’{’ˆÊ9–½@qF¥]‚¾'mp 5Ý{²¡Ì•Ä9”‡pfe B¹å"åbQËšÚÁSÅ¡±Ñ TÃ:º0f¥pµâ¸RW<ÞšâŠU0[œKŽç1¤=Þ¿g ÇžD¹IaKlcY÷ݬÒí»&üXoæ‘ù÷Ù¹÷}¿.œ=d’HÖE‹e”*Fȹdi#ie–²Ü“+a29UfQŒarÒÂÂʹd`Ù–‰l±XÊA’ÉbØ”¨ûwkmÉ|n‹š6"~ìÑ~Sì}6Isû.CSÁØs)·õl)µÅ¡ª/®î–nóïîÙ>÷> ŸßÁï®Ë>r“Ÿ\›ë@¾ÊJO“°’ žälŸEÝ4lØçSÎôx:­Bð‡b!‚êÙŒ±s~ÊÂï~¯«ðçOgÝ}«• ©\¿ ¢_ÏËfxÕ¼<¤²,4­–’¶ËQE[l°%€«F4(Y‹W-P¨Ë-!Ú²BX–n„`ÕIˆÈ¸Ç2KJD²0«r-’e¥µµ±¬(T¸JÙ-R~PÍlwÙ¶Çé丵¹ 仜s=\øùÖäW©îgǼúSž0¨Ø‰nP–ÄqÔ—¢&­ÃW»Ýy2d¨Œª”ÕžYÞé|ç·Mc9¥SìØGѯ»Ñév¾ž¼¯ž±U>itØ•:\›2{%k¶?—)yŒaù“Äwê¼MH~‰Ês&GY—Ÿ)¼rZI?-"¶Mƒf9r5`¹¯£µÙö›3lbC%„·%!·%e¤& ‹Ž*•È…JŒ™ddq£Š"FË–b¸ªØFÆ"E‘’±É RKƒ[mRdfÄ’’!,•–Ù2VÐ¥RT%°Ya÷»DåæuÖÎR–U²’ŸÚÉšÆú¦´Û¥°¹µvG|±øìV§Á zÈþvÙì¿¥=–l‡Ëçhxä_Íîß»×¼WÔ™œ©á"Œgá.VB ÷{á\ßÇÏbsñ?OÈû÷€ù…òŸ§+ È[$ç߉ÌudKK…ªÉIRÙ„)&9,¡ýCçðžyÁ–i–Z(Äk¹.F¸ÚÛ!$Œ’ÅH µŒ’¶Rd*Aµ…b.-·%–% +FâÄ¥Väa#-¤¬KRÅ%~‹WOžõvOVІݩî.D²—8¬æà祉Ô踨ÌÁR§3ï½ ‘}¤m”™D,ÍBƀȈÄkCÓ“ÙÅÛ1„{KîšßÇ~§)èâBLÊtÊ…±Òµr233$*ª¬ ,©‚ÒX1å—ÖQ¹6Éõ´¿uû.ÓVäË$H}­ùhÖ{í=}¼76E^Ü’•´i+d´f$‘Á­±&4ïótÞs¹¨­Ù#”‘K&)QIieÉq¬¢K,‹!ŽFdEH’ТLi*–BÄm…·,d"¨Ø­…R¶U ”k#(ánZ1ʦR–K2~Ë7wZŒqYbÉ–L¶‘‡®Ïo*+ͧ&oÔ¿Eâªu_¹µüdü};§žO«îô{©XøMh¿nÔXÒ'ÊŸO³• àâØçŸF•ö á#g.Ërþ‰kÓàÙìg‹°¶ÔùŠ•½I•éîœæ}ÒÜü~G¯ãßgßT¨©úœêˆuf´9UÎwj…<––’®tID!¬ÉhÒQªP„Éd-¥¨²0–2Kåȳ# ‚–¦VBVB"Œ«’È·‘Š•¬ŒÅ˜ä¬–þç³u°£Ë”² ±’ \‚AÆ9i²F62Ûø`7H8“å8ê>Ÿ_MNrŠã+)=­¬˜HGìµo¾gnÐÂÛF°+ùy›õðøiÝgéõ·õr÷ËC„>VÜ#Š,÷ßmš}Û§=žûÕðöIpú9j›l§×ß—yÏ=úhöÈ÷`|é53SëÜ·éKïø_†úO ]b¹?mwõøQ¬9l¶ã4,‘¤aV[c_£ã¿¯Þö(ÈM‘%¥•aXU²µ¢AȶˆÙ+b,ŠL¤\Er1‰.W Ò1rFÔ‚‘ŽK É#HW ǸÀÌ,n&ýûÔ‹+/¾î:wôÂnG#’8Gõî·&xqûß`."¬¤‚ £}N™ê¥Ñ'ç½[£ö9£$=·Ëߟ™[åüß­7Ôg¥ò‰¬ô÷]4X3'¬±…ž¾ç<ïãÍ6þ˜ãIëk”ý‰çÎê}œ% ¬Œý _lÒÏŠVédRE‘‘dm$¶Ë‰I=›5f– ¡X,l[mnZÖ1r[VÛi/µ—i*Å$É•Œ²FRBÂj²D’6ªÑh‘qƒÄ#زԉùÀPš¯ÎúðÞnds-ì÷:¿ØÍ“—]g¯-ÖYú§èK¢0å…ùîº_ÒáïsžLXÙkoáÍ=v usÓ(ŠdA"d'ú_Ý‹=¶¿=+_-¾Û™êî:ý­¤_Óo¢ÎVŠ•’ÿy ˜ì—¥ÀשÊ$ª¤d˜ÊH$Œ)FȰ©aAD,•,õ™é±yÆ’¡X–$n[\ƒ’²\K’6P£kl²«cI%%£ä²Éd© ZQ¸¶¥!H ,‹T¹H’Á¶6ØD¥ÂÖKR­ÂÚA´¶ %¢J÷Iv”²Õu/§¶j=gÊÙì›â¼IaXÔU™K0b ïä÷Îöp‰ iÞø {N“Þí|¹0Û~¤XB”$Râ·:§d¯b-ƦÚ]H£Žìím"¥-áÊÛ«Ÿ_û¬pa+ciHB5‘ŠJ9ÉôB]– ñú¿^uN‘"ª¾JJÔ¶*2TmebÈ6dˬK1ª±Db%Æ¡Aq¶ä’’JØY–Llƒ+FV—)mA,–Rä¬ hÖúÃKõpü¶lÓÐŽ#¿L8¿¦NbD9&Kcq­µ±>]úgìM¡ìoÐÃáÂ}±ÛêÍ%`¸~)2¦ÕG³ÙdÛÏú{¾“áçÛëéá5®°YKsOw½éïf üysîØvá™ïB]½˜aC0ÌÛ­^òÊÀ²šwa| „ÀA¸+ƒœ¤¬(GGj8ÂÚ¶R–%·°Þ’X1Üø»¬QCjBåJVUkj”¹n"Zä­–6ÊZIfE…¹1ª–3$¬,©YhØÉ„‹’%%$´· `UIaEle¹#ÿý­æprqjY. ¹`#ŒüÊQPÙB5P… .¶¤Rý÷7~ˆOv‡Zþ)yWË?FóIˆïŸ9ùpúoµúô-õrX·Öàý½æòp>µó—r0vUjy&YÒrŽãú~\øþ=½Ä‘‹‹òúáv1Ÿ}¶Lú'ºKmßÍ«YkŽ ´ŠÂP¤$ËïÒÚO·Ëº,œG>M7eµZI(Ê•ÆÖ"µ„±´ŽX@dY*ËJÙTcl¥ÅVR’ÚF3°H²Ò· FQ²…c,dÿüÙþÓ÷è[™ý׳Ùÿ¿Û»Ïw:Ï‘ó¼»#5\&éÙrêÁ.a„8½÷•Æ¡ùLë^ÓÎê''­JËå¤VDćgÆMçB(ó™Ð¢æâƔ՟ ¼ÎK‡ïÛ¶sh Ÿ¯òrðÜþz‡©Eü÷~{îúûÂ%ËŠèd¢•4I˜È(ªXB¨B w¸*Šõ[òlûâÒúÊ?JlË|—¹Â×ì%à߯¾¿±Dj® _[1›:€þ².£‹5F‹ÕƒßE-E±‰¨^ˆu5Q[M5ÔÏ»ljª¼d•\(`Õ–:BLjÁUz$è’Qå-#qÁÊêѨߪo®" ê1hV^±ãÞD"è–q¡^àÊz¾Jø˜ƒ b%¦k d³¦“;i ”gˆ3¯®BB§ 7–UÛeÐN.§EBºlÎŒƒŠ[)qsÁ4¤à´F ™Ø»JF0mÎSESOU¾FN r)±`õœbÐPÔqM¾õE>0˜J†­Â¨ÁïUƧ=Å…àý–å¨e ûˆŽçìŒÇÓÝO–¨p=JÀ¥GYjðV¤31ýª,zZÑ™`ÁñtÓ6F0naädt#|}Ð1×ÀZÇÂZÁ8[MSºùg+²£ƒS)™î´áãn ‡¸/0háy* º¿¬ýö€:”ê÷Ú…(G¥6s%HEBhÌü+‹ÚÙ%e°ñälã|¯1` hùäI#Ú3Iˆn¢\ß.‚Ï]$>n®&R@]ìɰÙFÂGM:Ì:N÷M6k²qÇnN*îOÞ¼óõÈþoùŸèÿŸýŸç»‡ƒw+üþ7Æ-) ÈΊC ÿ¬t¹û9í¶Zí«õ0.?uÜÂC𠬪þÍþ^ýû/ûÉ›³~«~ÿLmý†”ÎLƒ‚‰ÝðèRõ 9CŽt4H©@ IwJI·/ضLOVÞö:<ÞI‹¥ùažïr¶ý:e”ÉÓ§ Ú«WÕú4]&Ä… kïP}Š”‘:tj°Wëª÷'î¶9>¶ è@‚E €6Ædš¯Ñ© )+UMDUL5”[¯5-¨yV|rkµ¼¯á~:0Êô@Âõ$>tCbÊUЄ̡¥YØàäØo"™­.PmPn\ËlÉâÃÁün,4CS gƒÉ:¡©!d*õ “[‹ Pä7|“Å'dš pªj!nÚí³ØÏ6jéwn”t׫æô–½) fHJáPÖ“£‚ 6!CW 5qÂP fÈjd™•FÛÔ“oadÅ& ¬jr3Ç~‰óò݈½Í÷í"{Ýô€DRMÈìh 4¢nI€‚˜ù³¶>\v^g6$Ñïnýï:yqfü ÏnXv¿‚K²HdÈØ Oßa 6õ±ãVÇTニºp¤±¿2€ªI© ØL˜uÝPY –u&Æ@“k àÃeU8»S0á@ ­0\*‚ ¯Ð­—À± ¬†,Úíx0’LRBx²l–BíP BXBO×’‡ë9ýiúÃX±Å¨þ®ì“»?gÛÝóÄó‘Ãö•V`$`E—¼„2Rì‹^*{oÞ'|·/V$úWÛYè–° ¾^¦Ärùì Žk "0 €÷N ‡Åz„¥ù½ôŽòGá%¡ój$T/êVB¢" $¾…jÂÒ=Õ3R&cëeOa@‚¨2 ï”ã!S3 =Ô™¾“Cœ­N¾¹ß¶°‡ÂÉèDýnü.Œ2‡àæÖö{9‘û^Ï“ìúzÍñOÛwÞkNûÈoUX·'CÖ¿ (}Ëûñø}÷/}¿kêñ=4+}} FŽãÆü*¯?g–*“à ~¸Q¡'"['¹}Ü¢ó•€Ç«’ ßu™}c‘øG'œË$ô:ò³¸ÞµçuG9l½ ÞbøMGR:½å|P5 ^@˶ž‰ç;#N%åC˜Åï7vç§fœàpWPÛ·aÈœ…Xó¥zÄb…{Ä‹|7¸Ò8éø{½Ù“ÓÀ]á3ÂûÒM.W…Æ BYòAŠx‘`é$ ¯ÇÊÖüL _d«Î>p}Tc š:¦¢$¿™C=Yð•©P÷'’öNàWyMµ ]Ä')€ÿ(÷†ö)x#‡©´ S$«„‰ðî~xNè«û­0}ó³rv´¬ô·¦òcœ‚Ȫ ßmg­¾z<ë6²©³Ò-”“ÄÄÄâ^Zwác‹¡ ±W*Pæ,Ķ÷r`àÊ(†‚ÝÇ¥³å,®‹²°ÍF&Ž.ïNÞQívZ®lî£) ½'Š÷5.h ²é‡Ê'Ï[僑Ž]95¤¹+oJxþO<óÏ9æŽïBåàaŸ– ¶ ®€ ¼XvÓîU /ï õé‡þ\÷ êÓéª,òû9íB(G`ˆ²¡š,U ЈP÷²Ê*DŸ4ª#6“P&·Šñ®ŽQcÔØ¥©jBØ Vhe ­=ÖóËíÂU¹e»¥ CSSµPðÖÝ}¥ìŽU+(9Äðìn„ °ÏAC E󢊪tˆ¤Ù >‡åR]7½à«ètœå[ ”·,§D!¯ZÌC”F‰9©pƒ¬C¸§ýçøãçM¾·¡È£ôéú>ß®øclê ”§bñöõáä bœ21‰ÐsM…xˆg“;~Á]ë^§áEŸK^©¬•܉onx…Po›[˜#”wó[zðëX2²ù=Hñš§&~+C†Ú;”¼$‘ªS>#áÎ9Û¼ŒLû¢Æt0ŒûbóTÒ2Ò½l‡}o _{î¸~ïOÅð*³ Àióplˆ: ›äQ$Du[©‚â]'!“Ù š>¢‚Z'‡*1 ª¬ž¬ D² X²‚G-…?¡ùg!Ä"=]ÏÈý4BCr$»Þú™óz|Ç~¼êm\R¦„Ü«èW¨c¾¼>é{ßþ_¿¬»Â|úÅäA\ç¾Æ“+¡ÍÕE»#e$Âo%Vu9ßÚÍXl)èmžhË8ýÖ”ó%ËÆþùïº)×߯|Ò¥çˆøO·ß½<ñê€Wç•™Ú¤Žú›î®?+¡ŽC14À@ùˆDBƒðîT`tŒ*w¨<ù}e½¿H¿˜?áƒ`ÍxÁ¿pÈ ù0ž }Òϯ[Hý ”‚ˆ4V"LŒ!Ó±Û%CNSBQózºšXI·IcÞü‘ö!òq üfN{ÕQp–2ømçNwaxÈv,°¸bIÓB0꘮Ä;“vpO¥´­Ûj•c#àù^ãã$(‡C Å”å3e{®9NÏq%p™>» ñ>îW¶gw c{ºµÜ1†&„m×±j¢”5ÑcQ‡8h€,I(„ä5FΑônß9¹7WG5ìöS„ 1ìêUp‚k5—“–ÚšåYDÏ3ɳ_+ ©Ñ޾*«³7Õ ÜHážÝgûmFð4—YÕþÝUŸ û¨^¶ó2^áv@c»scÁÞ;0R]Âï4"Úo\sÁ2ã[:ÈùÝáÍž¼'ˆPj‘ZF]…ˆÖ…ëH::|ˆ³’‡dqÛõ8{}ažûf¾†ƒ1e:ÝD¢Î T´Fn‹á`®lè(5QN œk|ô’2.$'úìˆëºk¿3M˜ó–fûëÚo©áW|üHŠ>9¥ã=S§ã¤ÂäWºH|a,ã4®X¼s#šîè¹Åö_…nï\“áú¾{=Ø}ÿ«÷¯8ÛSÂÏj±Ãgœ „ï³/Tî¢þëãyÎ4ió§9Š—¢(“7ϯ éY~#O›ÊÏ¢ÆÕÅ”8Î[KÅ6QsY?ºxûö.<»âŒìs…¡3çÚ?N÷¬ˆØFøˆDVØx'¨ªó¹Yã§nF>OtdYÁÜóp2#è´>'^!@QÎǸ#…¶5Êu®c_Ä)E‡^åqR‡ÚÕ´îŽn"¼Ò=|$y‰n*x„.ãगCRÐG ÉF#sQ3*0˜lkq§¨5À$ Ô)x ҈$`°>RçRZ‚N²Qu~xH´Š# äC½IYææ^&†—”äÁNX:--î¥k/œEƒNšfž^“{aW{À©dO~Ãìj¯Ó?Og¬Íô§$f¿Y@Ço½¯Ôø æÏÃê o{á7Äæ9ÕÌvâcgNã~ügÑè&ñóˆ~÷G†aÔW765#Çnâp{·J3¦tbî^áˬóÌDk Mžº»‡G…ºý{ÑÍ“Õï¥t4œ$ÑNîæÖÔÖLët]B¯™Ä/»+ïpãö÷”øHÒ7‡œ~…YÔðÈwN7çs°Åʦ²8|/M!ÕžDPQ! ·‹°IÜ % ²GÂ’SPIÏyu  ˆ÷>ëX ‰âÝœ°*›îí,î%¡AEpäͽÛ"ÞàÕ|+Q×mó'¦ ø{zAUŒñÝÔÛ¸«‘ñ¬únM­½˜xqq(ö5 uÌÔ†§œ±Ö£å%"AÒ˜ÀæbÛ>ï_9£Ñ° O‘jÅ.ž³àÔÈb¦åÝ•«qñK¼òãlåÝSîzjPv)¬ÿ~ÓÏ<½óá÷×ìù™Ð)ß&eJ8qÞ>OfÄÝBÄè $F¶ wÆv/ÅŠï9„@ó¬±8ê8Ú%ÁpT!Ë ò1<ϯeÁrÅ6¼IQ¦+åÐêØ½²’PH{£èIò ~/W¶´=¿‘¦ÔOÑŒ½Ç)"Á¡x%'Ǧ>17Jx¦\ñB(*I9ךÜ^zþö½1÷ä8_ÇùX~.oÏÚŸOáÃ0 Gç®O§ÍÊîÊ”·ìdÎ%{êœå¾±íÏõ{ç|c.ß´)ߢ_S¾IãHNü·¢÷"ê /F?FO]ìpÒÙôUîâvGr/L—Á[ôÚA>ÓW¤(¨ÃÅ•ZùÄäé;¹¿PE©x¶=ÌÔ’ îè.2j{©]2.LÐÜBh ø Š›â’x”¨ånÏŠ¥"H=£ða* ¯‘Ò ú9ߌܟÜöüô¡yú̵᤯@$|TQ™c–ã_NR‘a÷g@ãÛo©ð§“Hn)©rϧ‘jystbÁÎt š [¹‡ÙŽ(+¤îEË©*Îó¸X»¸nÄq €ˆó—FATu!ËQÚ\åw¤lÐS¢ç‹KµY< ­ƒîà’ýNÒ{Ÿ‰ú¬ÏRóÔŠ˜.>ù=o>„ñ—Y Ž]®ðµ1A1ˆ`Ïb8ñ$!jÅ>!È&g–©zE9P¦-ó%ˆÇÈ,ž°Œ­-c{²éÑ(¼À£à)ó²¶hô“{ÙmÎ9eóNùG5JãºO;ŸrÖh0Æõ 0Ÿn:¶ÊÇ<ó¡'E}äUA˜”Qée:ûuÅœÄ#6c=Êç[–|܇«6³ÍÞ“V<2âP£pž·Xø<õÄõŠ­4àÎ”ÄØi·+‘9Σr/)ü>tî—‡pêúBg°“}Ý$©Z–hÒkãdmb<½ ogÄÄ×+=ëêALG7Ëœ÷(–½`ÌÚK¼‡N,Bw“Dtî½ÉÉPp­™W-²ðÇ íØñ”.sAqaŒ ïtÄV‰¸dÚB÷»Èœ€úC`q¦O¾÷a”]Ôì*#YÜÔ*4òœuWÊ}[ÏÁ̓Æ»[{*Sc®\ÖWŒˆº–Þšœtì¹e'cfý.òsA4ô0gž{´±ã¢ó<-ìl>Ý,„_Àí´n†œ¤‡KðÁ‡;úý4$y½12ýDBRÒû§^2¸Q8åJ²z«¿gÎîOÔYh3CûÓ_ÎõOzû÷>áÈq‡‘/ŽõBR^¸xõÞi“ûÚ~§ÈW¥*ƒèÈôG¢k¨Š óñÈ$ï¥öªSÍßdߊè‚õûêÐwœ¹~Eõpâ¼Ãô=e»¨+õû€¿fÓÎ’;PNÏ„/‘¼0¼…ã–¬õr²{¸peB®èI[Ÿ0ê-i‘G ÜÀáoa ~> ¡×ƒåãšçq‘©)›Ü`-î°Jr/UõÖˆÞÆÚÓß,>÷b¥ƒ±¼Ù“çYA.n‡twŒvâìËÚ  *˜3«Pt™;ÌHfç¡sŽ£‘±0÷\äsKª ’;H‘£uÍÍñy2t-(Ró¨+´ å ©‹f›ÕÇ#'ÆrÞ1‰…åáHlôlVZ¸uqïuÒ‘²rƒ9aògãøY)çGÄŽö§JõøÈ躼F8/Kœˆ€O³ Ȇy ÁºÉ‰‚¾ôöókÕ„~/gªZ†èU X™Ø Miè8›BŽÄ—&ãv½„Ų÷syÀ›&nU˜¼2²Ç£tö’g"¥˜øõ"¶™°têNMé k-¾Ykrˆ0“ÜaäÔ»ÌeÚO^”?æzæ-QRâì/zî0‚$µÚ ˜ 4’uÛngnìªâ¹²|+²Dèc!¦ÔV™n˜8÷>RA§ç Ãò.Öú}÷x[«³¼¬Î¼,8H·t;74FyÖ¢|¿ÑÌÇéC´õP;!hòFF•:¨]Ø®c£m(\? çÙéCõmì»D7¯›JñÜ„Sž¾d_ž‚ …ï¾ûoÞ›ÐjC=Qzt~{E"Óböî“4;ÄÊ9Ü÷F8W¶; )áäÀ´½Ãº&˜!rS‚7€œ!gÀZ„æІŸ}ïÆi÷‰HÃåqD¥_úUéÖRwo~üz›©¢ûî÷Hü?5.šßž™P+ß­ºÃ ŽÐéJ½Ðâ]:8‰|Cñ®MKÇJ%„…gÒ5 |-<8ÑOoð}È€#ÓÁ!”¼‡¤sÁô®Üª¸! òlÐM¨X¢‚øÐ5Kà¥^èÃ*šå¨áM†PÛà‚I• kÔÅ9:m»ªTµ¹ -œqÚ¸"c‘õ<2ÅëSÜ]ªPNÌG7çÑŽô­A`¼úËøZ|{¯Ä´Ümºç}MÀ\á»Äò³ÜÂ;PÜ}~1ó„¿5¯ËÞY'ä|ÁäO°OG± ¬Dùå¶àèÄÆ½¤/Qޱº5 ¤IÎÜ—|:Z¼¦;|ܦ;;аZ8eLæò,E…n¢ûŸ6L¢”„IXŒX{ÖMã¿CÞ~ŒæmDúüsNòV"o‘îll;i‹6ÞÓ‚xR¢G;`Ô/gp±qÄ]9¢òe$@íÁTäâg5H+9UÔÑÔ„_œ@±/.Ìöè–¸þ$)'Çà÷Žèó ײuH@ê‰óÒwÁòÄ âÍ#+Ûá,zÎÜÅ’CJœ¨Q¥q€ÔÝ|oWœ5`=DnG-òº²Ù­’–ç™çô/£My鉟S–ÜU%àm´dÞ5‡-{I*ôð„:ªEN4U+«ŒógALî}Ó¸çŠ ¾PgŽ7Ù6!,nqv]{ІQô8ûÛ‰…{÷žJŠ;)¶½ÂaÄ€F‚µÆ1 ¾Ôô[BÉ¡zæFQ|ö$è×·UâŽq:YF)¾wö:¢ñx{rl:Wk­ô¤>£ž½`Y­N°,Hz´ªWk‡RòåćYÒEo2øÄK4­qTŽ‚‚Ûp[ݹ gUlÞ^Ì÷< †Ü¶ê]1ÎU&™Åã/ cºSáH ÈpÑôàúû‹Âžn *î©‹ %Ÿq˜£1ÝIIÎ'5wŒ$¯pâe<‚0–û5k…ÓxkÔÕÑpŒõ¥Q†›:,•9“ÅÉユ_¯t—&ezÝÜcK¿G½zÀdkéˆûשé°uò®&P¦µŸ:u"ì‚ÏqraYƒ©Úº`ŠÊjVìĦ aLÝ#¦\õ#¢SË@qÙ²ü pÝY¹ã°µ ¯7KLÉ9Ú8‡"›bc˜[C§8aÖHŸF­=B×ûs‡"D›Ó–!ë}³¥§ÁÝ£ÀÐqÁÜÓsž¬4;Ž\»ºÜC듚bA¢¾‹F%(5©ï¬AŸ‹M{Ì4f°ÖU”sxTáy°Oà f¼WBÓ"®‘lNÂw2ü¤v³»gX뎬VÍ!†zXb'ïòï ¡„Òï |¢°úbDïvLƒ±¸ùÌ‘Ç~žpw¸;ÃÕ&´öá±sK²Ã„k«5§ ä ê”Ù%ÒNúXÜÐNí7Bi¶wi{ÃÛVú ´ £1CEbgÐ. #ËFÏs¸ôƒÜž à zžQžI[¨ÄUi.žw=rBð·¤áÏ[;•wB":.bo'WsH¢“©Å„eÝÑLb±Êâ!o¨îÎ1»2hêE9¦mí¸4H6v€Ï­ÔñLçÜgHû‡U"Õzyã*н(Ò½Ð뮓• 4J#R‚œüx«¶ŽA…ÎãÒä*uXcФãyylzÙ¤ð¤g‘Úðí†m•£"ÚÍšQÄSË®¤K» Kt*Dé£38Ò2‡nݳsgª¸Ayí \:¤_1˸=`auå´á+ Øw'déÍt.4÷5]¦ó†R奠‚:ù´~<¹U„¾©CŸ€9^ó2éAiÍ„ô;ÃÊa4¥ùÿsýoüúŸ_G|–Â(¸¤Ok™}“"%Hí=ÌÕˆL†2HI& !!²©Dy ãXAE{nFÈ¡R«ƒŠcf5XívÇg”ŽcŒ´9ÊZYm àq­@|‘ÙG+$²bŠ'Äñæ‘!fJKuZ¢©•D„ä"®q‹°Ìã™UÌ)¤9䣯8¬„Çää™±ŒX,’lðK(ºŠOÈ5I,3­]ìˆâ¼žY²y'vÚ‘Áz5ŲkYŽ.(+žCƸrEÈäxëqÍ‚byf\rFB$EO G³4µ·ä`DzÒËd"ñÉsíÉd1\‰³ * à䓈G|rÓ*²Ì±†É²BO!G“=å5ë5‚ælX®@œ¸Z˜Â³“·½Ø’’ \©[89F±WdT6泺Àó½ÝZÄŠ.5…‚Ë%˜e±8íȇU½ä³™6æ‰æž5ߣBHG¯yµ!$d".DJÉ6ê‰JÄ$™dd‰0Eì­$A–Öˆ,ȼe|s©y3Ž9y+g.O‚ &ã›OÔš¤D#NÊUWQ`ø–Ç­Æ¢IR"ˆ´¹'vŽ$9" ‹äÂF/$ÇïiSUr)áç9¢ùÜæ†”¦ˆmòUTNŠàœ¶½xíšÕ„òdÅîñ 4È0UNù·yc]rl–éM­¹$¥Š‚bHThÈø„°#‚är,“âñ•ǼÒêÜT勜’agNMÔrE™1qá$CÆ]¦ò;„DÙäRLÈNHóiR"T‚K¥I‰Š ,°ì´‹! ŽE@âEª†H¸{a†R¢RIx¹J¤!b$Y‹‰“—©Ý‚òHçäqž8;wq\1aÛ¯kŘO’¦9&ÜnîŽ "*ŠøÄœ»¤V°‘Edq“:‘6Tl…A„H37³]a!„0ªàì(Õd`1„y"^¸u¨ªŒêrXºs®ë™C Nc¯}1í·¶YŽl`AÇ!³%`ù s$|—‰ª(4V ¬¬K%Ý×wR]ÝÅs«¦ŠK³|ÉvJ„’'{jlë)H™L€Œ$Ö¹LAH–¬ˆ˜F!fvÙù6³¶ ºç8w\TTÇß+µë®ÊnÂù-Š qà89Ž8b% ÉÞBNsF¥ÝÊÂGœñ3.öE‰£ËZ^ÝTkÔâÁ'<§/–k8m”‚b<™ß ì’uÂg\² &¢8¹d_—.5<¹ Ì´¸R²ùH\–Žß4â¸Ô;!œìÎëÚXo;îuáÉÒ™\lÈ"æO3»Û’óžb%º£nÈꜤÇâ&ñn ÆbªD"Æ!k%mâ¦ÙÆxÍljZ&’òL{Êó¤+Ääcl¹¨‰,L9±Ù³X µT’,|²K¬™ ˜UHÅÄPF¤1$Å’mó”áÁœ…¶Þ\—Ë×t¶ $TŽrcåµ6Ü{""‡‰ Á%`“­Æ‘`ÎÈRH’!2OGMŽ˜²kZ,„^IVH-„¬IK×YÈÔ˜½6áK-¾’c°„ˆ‘ÊÌc’0;²I6FŒVb$X2Ê+•OHÁ$„ ‘‘œY«%Kã­'–1Š¢§ŽÖl.8áQŽFG:1© „âÙX=²–òêkž\ë›!6ñ#‡$d‚¸«£<‡T±Z·",sRXˆ¢!)-É A+GWŒhëæääƒI1À\d&„ML4J,éH¸—$‰‹&1#‰Ø\Kvf¸ kMcÚ@ë“QlŽª²G"Â$<‘× òGËåÀa "F§Œ;!M†òÐÔQÇÅ ÅdÂ#„ñ"TU<™qOÔGŒØ‚0°¸Œ`àÔ‚!'ÙÄQsÎR¤H9°ÛîÜkÛWK'e°…„j ¾H4˜BEÌŽÈ«‚ÇKvw³uÌò„ò@<7˾;­Ñ¨u]wm˜4ÊÅ!ç%UÁ5ŠyB½—s×›ÌÊ1¡!Wº¾;¨ )7¹&£W®i+®çr÷u[/{©%i–ïw¼&NO "Ìm¹».K.J´Ød¬6îêUGÏ/sb¸É6NËÙ°’'’õ× !aQJç¤w°ÜDEì'XG+&3 㤭£……s¼¹¤4¾Kؾ'Q8ÌSˆ½NºY›JlŠ ‹Šl‚à+¥”W›­SË8æŒc²¤5Ô#·É øÍa­%¯–ØÆ0’ !ƒO.TÍ“#‘Èb 3°ŠP‹Bd“ ă’<´Q¨’,ŠÙ\×ÎDØK³tN¤,YØDyÛ^Aä– Oªk„AŒ©{4rвf8³& â8ÂF$'!ÆòÓÄ&È8Y´Šãá8ñÃr’I3"\©ã+Ć)䂼‘DɶåTG1"±GØB–VÉ2FcAã„EÊ$’¼¹²edògy®P‘Œy{t³"è´í†ö…^ Í`“Æ-PPTW]aæ’†ÉQ„$ȳ“ ÜV2DXI$ dJùcH 82€9œ‘\QÓzò„Wqγ-…qfGˆ2b=lwS’ÜŠòAìFÌ›2Ì®`«ƒãudD’5'6÷¼»ˆF“ÉŠbˆ¨ñ+‡GÉ+•޹™1s5©SÂ8p‚¸:¨qˆ ™âf(kRvÙÄ› ÙÂÎ\Áp"µÌy0¨.«¨K0¶)¨q+d.ZæñÍEò«á¶óÒáÝn2ŠÇ1T5#Ñ^!X„s/`k®/'$ &aå·2ˆWêj.kÏ)½†q3]IB™´ñˆ%p¨oeq±T6Lñʼnç!ž$JÛš¯G·SDzmØñ½7Ÿ=Ê“mã¸so…¼zŸ-â*MxÈ“`Y'10ã‡.ÄÄA@lÂ8O T#„u"áS0Ô*3ˆd@×0*æ!•‰Æ ŠŠd‰37Èf[/‰ì8!®`j ™É¥ù8y:õ‰¶`¹×9Éœ¸CˆdpÔŽu8ÔõX2›ÑFH$–Nþ”h2Ò‰3a¢K03@ÁP¨a¥ B`É„† W¢²¸¤¤“$»$š0É™¹!MŸbi“1@¦Iš]’Ì L š—d s©$ÁX Id%˜hÉ,ÀRE2flÅè2SÉ,ÀÉ’LR`HRBì& Å{mïßom[›^ÕkÓk{lmWa“vP Y¦HY1aÍéµàˆMé[–ñn›åk} ÚÞ‘«œ¯J¹¬loj×¶ñ«šd’È1HS&iÉ0IšÒ$Êá×2 ¹š‚žŽMå (‡Ó³ o“#•û»¡â1M_ ½1¢-Eµzøíx·¹¾P±S3»…L & ·2¸ÌÁ@Q9+†B"ëÔ|´8•Ý]x»"¾Qmr {õ癘´Ar¸YAAr¸âW+¶CÄãâ,¼™ž9ÃÆôŽÂm™]rª"¦V$yuÕ’v.0êwtèåïÍyAByÜÌJ¨/’‰ °ŽÂ¢âŒ.xÅîÚƒ• nÎÚ•2B! \“0D#pq\NÇnjh௠$œ–Ìwžs™¼»ËÃI›2c`â³a&QQ%1*j—*mÌ ‘ã\(ŠéØÇwPË´±‚É0E㛹 ¢ÎUËd˜G'e²ITëÍ»™fÍa¢Ç³ ´QÖ"×­5ˆ¢c›%"á"v[Ö‰%m&\ùoHËœ™j1ä‰äÚAí-ͱDJæshs„ã²+P'Ž\\Õ|†ºÉ2BcSಿ*ó‹í§œn(ATƒAƒñ«Ö!dOÕŽAx‡BÀÍy¹5òÆÇ“Ç{ÊݶœiD´&ù°ÏTA:ò„êxñ<¹É ­ä'kc"if¦³´½å›©ãÄQKÙÆ q5_6†¡ÌJ@UE,„b;-Îù*j°³.4TÒˆDçl¢Û²µJ|ñð™ ŠœpŠùå¹<·–Slºy.¨Ž"‘:Ü©‰#‘r2 #H""Ƀ ÖØDI&œ»£#‰ “²ŽSÆFSĸ‰âE°Á„‚LD¬³Zª¤H˜¢êBì¦Øuº›ãºâäI!›-‹$Œ˜$Žl…sÉ’¯”® £²øäÏ:̰†Üfd_;Í3ÎJÄD£åÈ-B'fm«ÄBøõç³ÓNÝ»»€ pEŒW·MW$±A ‘Ç’l™¢¢˜®cœâãš%‚¦ ÈE“I&–Øà69†¥Um›Ù…çÌP© f¢\–ÖUP„@„¹‰ÅxjðYa­ÙŽlÊÔÉ2óš.ä{c‹!8Çå¬HNÛ¸ÈLŒX‚„W‰˜FG $&2pì€ì݆@|“‰&I™®I‘y'f«Æ£HjuY[29-|bå€í±5ºlã#‘pñÈÆ¶öÞˆL넨dCR&-°ƒ™ÓfdjØ’F0m’ÖäDdä…›åó„áKå;b–HˆHgY{fVLN=iŠŽ/íxóÎ %bËÚèâî®Nì\q˜‚áä-’£b¯a9¾]e”å·‘œN&б$ÙJ˵Œ TZ6%FÃXXdb\dˆl*\Q$ÈͱoRkd‚â .£9ͼfrE:ÚxÕãÖªâ*(›ÎîÒ Šç’f¡bÀ’Ø¢8¹¼óo|òób™ÛÚÒImŒÈMiæ”+‘BFHâ*GÇw¨rL$Ô Ž\ÝÛ–%±l²Ó Q!$„VL+ש¹ —y«É ù®ÝÔ§$ÅË݇’8¦²³à–œ{Ä)ÅÉ%+s.Ý»`nò\*ù¤îÙ¤’¡KJa“Ê@„9¤ 'Ò€SÍò `Ó„ßõÍý™y¢†a†tø†ºÍ·ìïÐàä'ØÒ™É_µÔý߃G[B·ƒúÖÒcîŸçE¶H5ë€5”‘ˆâ¤¸ØÊ¿Ãßf?¥ÔŸØê­ÕûõÏ)ŽŽª¬ž©TªŠ‹–vÆVâh¯lr§÷W7í–sF4Mikèg JÈD©DDiÚ§S—RïÒÁs’±½Q°©ƒÛ`3ÚE™‹4ºUáJÐoÆËrtñÂ8­–RLhœµBÅg™Ê3«SÜ¿E"B JêÊùáÿ`ðîÀO^p–Û¥Ø8Ë9FZ¦í)Õ¬Jíö×ÿVµ¢®÷€Ž('ùåñÁͯVgx­î;aŸùÝÕÈlÕ±ûéà`Ù¶â”d–IEîÉ„*ªƒ TF¼jå­kÒªÃ1]k|fö¶ºóÌœg|Ò—9ηU4Ц­Z¼?yÝö¿™-ú¥Ó‚ìéDÔ.ê˜3¿•BœÈÃRâa ³£8ÓÕ`uµd÷«¸R¶Eµ×–ïäWe1./@zÛS…û²“‚A8ðÑßÝù¸m<¨‚Á«¦¢)>…†Ú÷·ùRÛTBzÿ·G=±«ž¯Ü&«=Îs?VÇ€1½yF,¾êM¯¦ c¡QQa5Ÿùª/UÂ6Ä.6îttŠ)˜WYQh^Ö0Z•\i„]tчa®æ5rÝ|ä6”î:an5´GÌàs4™/Ys€U7™ìQèÔcP×­ë–­:+¶ˆ-‘¸ »bël.WTPhSü£m•ív.Ή5FÜ ãAÎ8Õ`£ =Í\l­BQÍtMÐ¥–ò6NJ¶—ÇýJ0•à´Â„ÛË!ìµFº¨ÍÁ1)•?é³ÉÑD' Sx(Ò颛¯îµõg(Áïà ú>Zïã(mWµæ÷6¹ÂåÅ÷s ÎPsÁiÒ¦"‚4Qs,ÂÿI@1‰§ý6¬4e`ûó¼ÅâÓ®mn»å®œ¢`¨î¢õ†;äÑÃI9õÔèFAoµÈ¢9Ô\ D\EÓ²P払¬km×L’RÍâ‹vi©´„™àØ÷‹3]U~רÆN"y4ø3ºooº¦ô¶´ßjœÙ$š¸Qƒ%†Sƒ¯Vy0Å1Ý\-D,Ârxê ÑzHh3BC»¹„à…ó¨I½0x!¹ãßU @ºC[ ÒÉž¶¼¶ãm‰dŽ4,5% K0!½µ Τ†8Ø5^¿ÞObh,,qM‰tÚÝ„ì„)é îi=stfæId 2bÁp@ ¤†OvI80Õ–ë©+•@ä÷òy^½9aV&f«ùÐ|0çhH¤<¶T’]ÖÈqx!‰ã¡ºäóµGbpAÝçbj{$“ :xPÉkµ$å¾€Ú’k@¤,ÎU‰àÙ$’la²¨?˜’œÒì’sCzIÉ“D†sò¢Y‚ HMI$@œY$âÀÁÖ“r-™“ц)$6²KûkMôŒ„ ’’{ $„€Ï&ÆîÂ:ÓÕ•Ó ”l«Zvwð¨fΩÃÅZS*£&a¿…€» 7ë²öïD7ë7[÷2jáSnº$ ¬!&šñç¯f2I•·Øß™¯µõ±BxçSc{×›)“0C$óf‰ŠêË!fI[)/f¥& 2NHjpœj§G ˜3>¼´¸ta&@ódºBYÝYZY­90·V52C{mÛ, kHONtLhÈI´d$¦µÁæÍl%Ù¨ª†Þäšr¨!md’îlàÃZ’C[jIµ!èÉäà ԇ’BK3²©<ÒÞ4@0CraU¦ëN¥ù2ÌŠ!Üd‹Ÿ6 C52@É„ bÀ$XBâÃbÌ1@) ­’Ì 3%’Nºëc0É Ú„<'dÁ“DÖ€udvBÅ%âXudÍYÍ%ÐÚÃÉ„82@êɦd’ Y!Ý–]”saƒ )$’HC%…$„ŠI8! HY$™¤$Q! $Ê–ÍkÇu®dÃåð«å«O.xpZìÑÔþõ§Y"Å`‘Ý=øÝ„H[Ú ¢YæICþPˆÇ9@º é¸:|,ÞñÆj<Ïé5N1˜òˆÊúDÉ蝹`ÙÊד›9ðã`‡©’ßjð ðµFƶvF—Jýª=Å1¶íÿG]:%z u ÎT@°5|\žÓY‰…Œò~‘wîÿ¥ùìŽ8MÁ’±hF÷½Õi!‘J")»oC =Fã¹ê3(—_o\œ4‚Zy²ŒvaDNüœáeS`›¨³I< Š‚ªUiª· ¶U:uÑmê¦K"T%~‚€øºù%Õ(AEjƒ‰„iIàk(óÒEY31£U·¾XPØ@uúÙÛ3K6¬^(òÕ¥¡ I2Ë¥·éAL¦´ÎÚ=]‰\,&iºYß@L®¢öÁaI”†·_­…²i'ÖQ&FÞµèëHrdÞäÀšøq°»PéîUÃÇÇ=†ÜB¶º+SW…’|WB9¢®£ ­‹PÖºõþøçX9Po!&ÖQ•+(êd«K¨Ñ\h Ñ‹Tèµ0Qe{aeP LÕ@ @!ܨšÑGc5ÚFì\Ôa\íxL(|jV8VÜ»vÆ@–K;dÍzè8¼va¦“ jð¬_VO g¶§Í²ù‡DÖœ’’pOãeÜ ›Ô–rã{[SNWPð?êš H L.RÏô¬p4ÛF½U‡:/Fîw§‹äÂiÆ‚I¦”fè°¬ÐñlÜJöDµ›1Q îäÏã%©\ÙZû)€¸W Qeæ…¶b$ß}[XJt1@¬U° ´µ#=5R(œ(æ:¨Pæ.j+†K DÝMôÕ _U4ZüÛ½±®¼ôàkÖkM5ûíi!Ñðd ²Ù~¯¢uo«®î¸ý=vS6¾Ê ì(ãTUÄ#²s‰ÐZŽ£©ê‚FÂT˜ì«{X&Ì·gŽ !1CbjCbRE/f²·–Ë™3¡¦EÖìw½9r±"–d€x«"kY¶tŒ"ÚJç€óqKÍ%.9‘n‹|Ô ­ãG\@@ i!P,~ ^»âä5ÏUnÆ*¸)ëL†ç~áµð­v †=ëÍY»Ý¤ž Ë}^5µ8&ËQ„H,Õ×–-Ìj¬2¨“ ø×FN,>ºùô±)6ç³}ÎÚª^WÃ+ÚÃ2IÅ n¢of¡þ›±Ó¾v€qB‰5ë¢]=&C±Çân0˜êãÒû¹[­èØ€²836ÓÚíÆ±Þ_*ÎÒÌRx¼YÓ .œ3©Ø4­^5#®­€¸ÚB…Ò¤nd—Ò¤ к(ü’³eS1ãF›š0w¼Ð=I¢z³—Z Œ¶5 PIS$†¤Øº&Ô4s@Å!d›™ wjÜÀ $$:$ åA½ÝjJd–K$!±á¤€Mªä„92BN(d’åJjC ‚ k`JI<­´žÔ 1a,!&¦dø²rµI,€sdŠI»ÕR3’LÒ`—Jdäà&¶Ð’M­„ Ì‚ µ h¦M!½„îÈ\I0@è›^¨IÑ<ÒBÀ“ÃÆ°Ad,I9šTÑÍRLXL™!’C&bÑ„›†ûŠ…ÍÜlI17š·ïÃe¨Áé̇ ÑL“~Êq`);ä•!“Dd˜ªÕ—I© ƒ"“BšC¨BC[3`BÏ=Tht„ì’{BýÏùY=ûaõþ?ì_îÿÏ/ù„"_ãÿuÿå>i“üµšcßù—¿Ø(ñµ‘y§Õ0q&-Ɖ¿LLŸ‚Ððå}^×þñ_g}ÎL­vJÖ}ùßSe›ç=@!'ÿâîH§ ‰l@mlpack-2.2.5/src/mlpack/tests/data/r10.txt000066400000000000000000000047461315013601400202500ustar00rootroot000000000000002.388061771742711503e+00 -7.998843946705324059e-01 0.000000000000000000e+00 -6.055455953973358785e-01 -8.978447303086828368e-01 -8.478705136616015992e-02 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 -7.998843946705324059e-01 3.426145759425113901e+00 -1.638176200441511243e-01 0.000000000000000000e+00 -4.796520189361042075e-01 -7.078326540851567206e-01 -9.383403917548253093e-01 -3.366186799343440228e-01 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 -1.638176200441511243e-01 1.048508772479271434e+00 -8.846911524351203093e-01 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 -6.055455953973358785e-01 0.000000000000000000e+00 -8.846911524351203093e-01 2.023397783192608657e+00 -5.331610353601525798e-01 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 -8.978447303086828368e-01 -4.796520189361042075e-01 0.000000000000000000e+00 -5.331610353601525798e-01 2.697683931814737868e+00 -5.616838162810112722e-01 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 -2.253423309287867493e-01 -8.478705136616015992e-02 -7.078326540851567206e-01 0.000000000000000000e+00 0.000000000000000000e+00 -5.616838162810112722e-01 2.784761835214184078e+00 -5.704882222059795005e-01 0.000000000000000000e+00 0.000000000000000000e+00 -8.599700912758765359e-01 0.000000000000000000e+00 -9.383403917548253093e-01 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 -5.704882222059795005e-01 1.890408204565853900e+00 -3.815795906050489794e-01 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 -3.366186799343440228e-01 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 -3.815795906050489794e-01 7.410893491077712447e-01 -2.289107856837824251e-02 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 -2.289107856837824251e-02 2.289107856837824251e-02 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 -2.253423309287867493e-01 -8.599700912758765359e-01 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 1.085312422204663285e+00 mlpack-2.2.5/src/mlpack/tests/data/rann_test_q_3_100.csv000066400000000000000000000071231315013601400227310ustar00rootroot000000000000000.339406815,0.843176636,0.472701471 0.212587646,0.351174901,0.81056695 0.605649993,0.45338097,0.623370668 0.269783539,0.482031883,0.36535861 0.725254282,0.477113042,0.042100268 0.529287901,0.776008587,0.303809928 0.098778217,0.318454787,0.983422857 0.685345453,0.837942768,0.540406673 0.503220972,0.268813629,0.41488501 0.160147626,0.255047893,0.04072469 0.564535197,0.943435462,0.597070812 0.663842864,0.276972185,0.02208704 0.077321401,0.032366881,0.826784604 0.794220519,0.319582218,0.266025433 0.466815953,0.864683732,0.684380976 0.680962499,0.009554527,0.484176898 0.257862396,0.875068776,0.326253946 0.695503778,0.695095604,0.955586038 0.569205007,0.662786497,0.036489177 0.604542917,0.250714055,0.232826165 0.928175028,0.871769923,0.327107027 0.362809806,0.270846833,0.917535106 0.567471988,0.09223176,0.018232595 0.30294,0.197331083,0.676067984 0.136497436,0.991079028,0.640906359 0.490191642,0.321877535,0.210121475 0.886240693,0.124811844,0.109638108 0.487537807,0.474289999,0.34183089 0.038698268,0.515865087,0.984297254 0.437309222,0.534489172,0.792665419 0.898099395,0.461121197,0.785225662 0.256850927,0.840446806,0.056158684 0.335408063,0.806637161,0.913551388 0.11329872,0.670392847,0.333361274 0.954403847,0.024104509,0.325578493 0.824152332,0.614355433,0.271931013 0.729647547,0.666093053,0.579723184 0.25675029,0.94037263,0.530553224 0.799877963,0.555666351,0.056606945 0.213120693,0.763046224,0.341926361 0.975873714,0.554796483,0.049489218 0.422782321,0.375502502,0.875514176 0.732474122,0.920181004,0.273895723 0.288070185,0.878065303,0.57017629 0.269706239,0.854626516,0.607709975 0.615118638,0.006748605,0.00278243 0.655373608,0.348029869,0.909502319 0.358287814,0.419322455,0.362741982 0.152473842,0.659459939,0.497284571 0.930791658,0.934132013,0.150924236 0.792977546,0.953203388,0.465884431 0.971953827,0.268751729,0.220474277 0.244730747,0.056636753,0.088649766 0.873554351,0.305649442,0.91790044 0.26662478,0.221646762,0.310857157 0.659541537,0.93018384,0.139339275 0.833616742,0.833734413,0.551027856 0.43405195,0.874582065,0.996443541 0.442896336,0.118977275,0.03127628 0.388886541,0.976070927,0.294801481 0.14757794,0.195944854,0.129778502 0.73209291,0.551685931,0.218866346 0.85393572,0.675733762,0.501776114 0.804291505,0.746786474,0.94053733 0.199998362,0.403471102,0.614783956 0.302029244,0.084831174,0.043490422 0.458371115,0.076068613,0.940418665 0.122287089,0.867270578,0.612001352 0.423331474,0.370629389,0.407480574 0.400056969,0.919523609,0.940521669 0.74852813,0.872400563,0.915423635 0.614934326,0.67621724,0.382278246 0.0184522,0.545825352,0.74127138 0.937453855,0.175662201,0.666301896 0.504358818,0.251308945,0.849159677 0.397410107,0.973000161,0.648601332 0.398342217,0.110698975,0.916968596 0.464980239,0.683124011,0.070633362 0.787030874,0.393777381,0.731973049 0.612845512,0.893440416,0.475331995 0.241219407,0.792282417,0.389574277 0.465756798,0.552685716,0.092092299 0.908028882,0.837528446,0.794160948 0.552741674,0.898614081,0.764312365 0.607116253,0.958698621,0.334887326 0.322583246,0.541387861,0.879874555 0.124522558,0.229074642,0.510214096 0.049900273,0.471371867,0.367698395 0.261657863,0.105228571,0.748191349 0.216818324,0.700885804,0.34479269 0.896337659,0.634574468,0.203599217 0.961150989,0.920563011,0.795999477 0.120635447,0.744570376,0.107177572 0.696406743,0.788342315,0.173664558 0.577700329,0.493151732,0.989211395 0.270346683,0.586765585,0.208832269 0.171412097,0.116618251,0.53141933 0.596260532,0.819973735,0.531503373 0.120665467,0.556332466,0.643268746 0.546751646,0.563896374,0.079856633 mlpack-2.2.5/src/mlpack/tests/data/rann_test_qr_ranks.csv000066400000000000000000012520201315013601400235060ustar00rootroot00000000000000797,104,105,552,469,788,257,26,42,565,621,640,78,670,595,28,733,714,337,809,278,284,429,92,468,612,449,496,234,110,85,8,150,199,174,819,76,743,716,600,642,647,235,277,292,81,128,446,212,75,467,806,838,753,301,708,151,323,213,535,273,786,651,181,427,474,27,362,828,381,524,375,200,271,342,67,608,205,664,790,486,413,276,587,424,685,214,7,137,118,23,861,17,732,321,328,772,80,182,684,333,179,436,525,722,577,589,349,187,217,758,329,262,593,258,710,372,300,416,652,750,351,655,164,3,471,19,767,49,221,238,518,885,385,397,693,198,634,557,129,254,532,550,835,746,345,215,796,222,675,196,546,638,580,696,153,850,209,163,694,854,158,692,867,403,832,117,763,404,426,616,399,601,868,456,393,418,177,592,377,636,659,514,628,485,679,239,444,721,737,757,144,411,180,896,56,246,202,366,253,442,170,145,545,874,63,453,310,100,367,70,77,688,816,846,89,15,434,6,148,673,751,690,237,352,34,373,95,282,327,852,13,649,114,102,785,646,259,502,648,448,774,139,668,2,126,152,609,127,388,249,686,571,161,160,368,729,794,526,760,464,143,748,359,804,374,36,255,66,801,603,707,624,309,555,335,830,635,111,122,203,529,241,447,261,572,484,54,69,225,405,762,831,480,894,414,521,172,438,369,516,350,898,131,734,759,306,155,396,82,567,51,48,304,274,761,791,9,884,515,883,627,226,764,216,862,727,169,483,415,162,562,798,827,540,44,614,88,702,475,826,845,224,86,789,607,64,229,299,171,575,24,841,519,522,877,899,395,458,147,305,156,800,834,599,31,99,166,314,364,778,263,303,500,534,585,508,297,58,428,98,389,491,260,149,814,382,848,390,681,718,735,121,116,268,730,869,472,73,597,12,793,663,781,863,400,588,72,223,620,645,459,626,60,220,715,527,644,307,108,71,402,386,285,662,554,512,623,824,586,346,94,886,736,879,617,725,452,598,39,190,115,547,146,859,671,293,481,505,840,194,749,811,613,712,706,219,860,361,50,817,218,178,625,371,676,357,312,654,783,820,45,272,878,279,360,661,441,542,206,517,744,432,493,590,14,488,201,583,189,717,876,723,875,563,65,799,829,594,822,185,175,591,615,842,509,643,142,672,825,705,511,316,870,633,596,68,10,839,719,106,770,391,83,33,40,296,370,890,101,52,631,167,677,602,120,576,419,784,611,720,5,533,455,503,322,243,606,233,851,270,46,47,813,457,808,498,41,792,417,887,897,266,564,836,188,682,275,265,711,756,339,341,889,506,543,343,805,865,691,336,248,280,812,32,454,641,325,251,581,107,765,703,288,439,423,138,168,37,666,408,742,465,537,435,176,795,132,460,476,704,422,856,74,881,782,495,55,544,191,93,334,683,406,466,425,192,227,331,741,356,231,38,383,445,893,207,844,549,353,745,281,689,499,747,398,295,513,440,256,470,30,319,726,125,35,379,298,656,269,630,242,739,90,849,59,818,888,443,815,482,650,821,528,308,629,724,103,245,324,570,347,531,823,135,678,769,11,91,57,140,392,43,232,315,556,61,728,487,97,211,409,740,291,866,421,700,193,79,230,882,294,584,561,568,433,637,358,354,186,123,873,340,494,780,136,29,462,431,247,208,376,289,559,197,619,667,283,573,53,610,290,384,755,578,776,286,25,560,313,463,810,87,410,510,843,124,858,658,330,803,489,1,699,501,697,287,332,900,569,520,365,880,497,895,437,109,380,21,582,119,523,318,695,837,4,787,348,775,558,632,451,687,657,311,320,738,210,204,18,731,669,551,864,892,240,618,134,250,891,773,16,566,184,355,236,477,857,574,553,653,777,228,344,478,363,872,622,195,771,252,605,660,461,579,302,802,401,267,530,639,62,807,244,326,420,548,853,165,754,536,541,473,430,709,674,507,20,713,113,338,479,159,407,173,779,387,84,130,871,183,492,317,112,833,450,855,412,698,539,504,22,604,665,133,768,680,766,141,378,96,701,157,847,752,394,538,264,490,154 263,743,170,39,688,258,212,173,405,22,774,660,402,596,403,238,440,25,848,83,455,156,648,351,812,302,252,45,764,90,509,288,111,143,499,828,182,697,481,149,220,340,711,645,87,154,108,620,99,359,427,88,561,810,733,84,112,38,744,861,852,543,393,456,837,806,315,257,107,485,197,598,410,551,327,354,279,349,126,187,875,330,61,593,663,451,675,504,778,169,559,763,202,588,669,323,460,132,309,537,44,338,37,374,70,796,563,382,716,395,458,667,525,243,856,540,49,421,165,76,527,568,21,301,528,47,412,503,305,441,555,18,850,342,10,313,819,426,720,239,56,482,592,585,50,148,274,53,815,823,798,714,142,353,209,384,755,397,60,30,715,548,780,227,882,105,186,851,110,69,706,201,505,742,289,729,684,267,241,372,898,746,158,371,753,665,632,873,152,508,334,794,203,381,885,167,221,788,52,534,619,404,519,240,652,248,443,436,331,364,192,153,380,831,377,682,318,524,577,251,97,544,804,230,120,487,277,686,809,58,704,450,814,463,493,880,206,869,863,502,758,494,249,138,449,345,420,496,659,464,78,28,218,820,54,575,123,437,888,476,255,556,344,625,845,881,636,807,179,74,724,506,839,7,583,401,459,130,523,247,275,689,367,510,295,24,846,256,387,538,705,92,89,325,572,457,618,245,121,34,690,36,895,67,638,434,399,375,631,467,554,413,307,805,166,347,668,571,465,254,721,35,250,134,46,394,824,282,64,576,447,627,215,172,141,466,737,498,834,843,727,109,835,300,552,756,520,829,217,428,5,507,642,896,566,386,693,29,723,677,854,647,292,535,333,430,287,392,666,767,199,692,140,836,591,643,606,871,317,536,348,549,745,91,115,185,626,335,557,362,322,630,735,196,226,783,847,801,605,2,358,611,608,676,271,11,622,409,624,356,484,587,864,474,102,122,787,357,553,268,297,518,687,701,722,424,12,180,790,23,768,312,176,62,514,19,607,604,521,303,266,259,781,131,695,900,595,281,63,406,515,649,491,601,417,284,157,146,472,707,461,42,136,27,709,868,93,135,698,71,95,171,754,785,826,623,296,681,228,462,773,584,438,234,224,600,581,802,183,363,759,336,674,818,730,661,775,96,411,479,237,190,82,513,874,489,899,343,32,235,308,352,470,486,855,337,453,539,326,75,565,683,533,246,653,483,779,454,772,771,589,244,651,139,678,278,883,33,703,4,163,550,422,293,242,6,822,889,376,639,81,314,211,560,124,205,77,542,369,886,558,713,865,884,594,391,200,155,740,320,144,784,448,821,290,270,731,708,789,368,310,634,232,654,599,355,897,51,161,646,116,680,478,516,808,261,614,286,210,15,68,570,160,1,332,739,85,757,617,407,98,269,717,748,858,866,236,526,734,189,752,204,299,878,129,439,629,766,791,94,872,844,501,621,894,475,66,655,578,260,324,827,782,612,444,119,477,423,319,164,750,425,671,760,770,272,101,765,662,184,712,283,857,562,738,615,72,545,859,672,726,265,609,650,316,795,125,195,751,547,842,31,408,670,429,373,341,445,416,350,691,9,396,749,431,495,580,635,114,419,280,264,736,229,876,3,679,830,747,103,893,813,57,43,891,100,291,541,769,797,816,414,13,719,400,616,298,793,14,641,546,590,79,777,222,710,385,117,346,490,811,762,531,718,208,128,379,360,728,471,673,162,276,702,294,799,435,776,20,150,361,233,389,867,17,841,792,231,725,862,613,610,574,418,694,398,567,188,446,838,133,306,512,370,800,285,532,40,147,193,832,432,637,480,16,321,442,80,633,388,597,644,657,383,853,833,468,602,113,586,860,656,273,86,365,825,175,685,564,127,219,194,511,253,517,877,892,55,699,433,817,304,26,415,664,198,500,177,65,732,8,390,311,225,214,159,887,118,696,329,178,700,579,530,339,41,803,522,879,104,145,213,207,840,106,181,137,216,469,569,473,492,151,366,573,223,488,741,191,262,582,529,603,890,628,48,168,497,59,174,786,378,73,870,640,452,849,328,658,761 191,507,96,398,677,604,468,173,289,434,560,169,783,833,670,84,275,499,682,554,873,263,149,333,436,433,11,598,892,128,828,419,31,586,625,559,181,500,832,65,63,56,364,205,588,57,40,283,197,715,49,672,295,568,665,602,393,346,372,693,726,192,86,125,619,387,526,718,525,804,73,109,110,345,807,271,412,13,135,731,735,232,519,394,417,208,882,669,774,38,483,415,218,660,199,99,268,98,43,335,378,171,77,732,664,334,217,859,894,667,473,336,307,124,773,448,337,643,17,685,131,421,528,36,529,68,680,794,265,509,267,155,709,831,358,113,683,819,438,95,459,721,592,795,488,70,114,703,868,704,876,690,316,514,648,108,890,352,260,652,880,827,570,456,796,762,225,724,91,247,659,373,555,874,7,887,222,172,127,58,867,475,168,195,245,301,673,700,224,214,119,870,157,163,846,382,452,512,51,90,864,736,469,671,482,502,675,779,30,839,466,228,72,727,266,472,112,50,567,322,178,286,376,160,21,342,258,480,899,580,771,696,401,849,845,713,220,861,684,158,505,811,338,404,356,787,822,186,881,688,41,647,47,842,253,284,383,852,784,465,399,252,639,231,547,810,463,517,53,591,613,451,556,202,496,14,276,144,351,331,4,605,800,691,102,237,589,329,123,595,558,367,761,305,540,689,133,239,759,229,584,312,872,190,321,834,152,444,390,120,327,360,60,698,385,151,236,524,450,147,484,294,768,734,355,332,743,477,542,844,818,350,374,758,521,772,309,143,656,717,427,649,863,314,248,409,308,723,405,104,354,296,738,862,76,449,720,410,621,348,900,293,201,223,105,303,234,130,302,898,249,264,2,633,478,430,531,850,52,185,616,69,297,494,29,797,198,325,722,132,100,368,486,579,645,585,752,406,310,464,593,471,725,609,361,422,455,274,851,44,573,194,641,453,281,188,546,54,563,653,650,66,847,583,552,129,527,111,640,551,878,479,853,400,370,575,777,180,184,601,87,565,359,71,855,891,238,67,581,282,93,366,183,875,221,177,391,564,805,572,836,292,418,597,814,574,12,655,577,798,523,117,403,536,606,765,751,174,407,802,380,243,371,8,612,711,454,603,339,663,328,445,211,747,504,594,789,569,74,279,487,5,121,216,699,39,895,755,319,313,242,692,78,340,754,745,189,661,261,678,829,728,816,145,492,187,446,85,520,756,362,432,886,126,837,657,767,561,493,377,533,634,55,19,23,241,897,753,614,298,16,59,146,627,22,763,397,285,458,830,447,668,840,748,306,83,97,537,635,571,429,408,869,706,20,457,291,347,510,92,582,381,25,694,788,18,843,259,543,835,141,793,215,193,676,46,244,553,103,530,600,618,140,442,10,413,485,871,545,801,204,35,541,610,716,615,159,815,210,662,532,739,330,776,26,162,857,437,646,290,742,889,251,138,854,535,317,515,423,712,164,896,697,809,156,629,45,24,240,212,246,167,481,476,702,203,142,491,161,6,235,566,823,856,402,737,686,474,757,518,885,62,273,730,27,638,539,280,879,624,632,506,311,411,544,549,443,386,179,226,508,440,388,785,176,94,516,420,607,341,416,654,764,33,719,137,511,826,666,81,841,636,148,707,786,165,708,461,884,596,808,357,288,701,136,256,395,522,431,590,630,106,435,389,599,230,651,257,28,631,623,501,170,893,48,323,42,467,206,34,695,343,61,272,182,548,269,562,620,82,760,153,375,803,576,674,749,1,729,578,304,790,379,626,538,369,227,15,498,497,426,154,534,353,766,324,687,428,318,116,733,658,489,209,196,80,490,460,824,166,414,791,858,349,746,740,79,866,9,470,813,611,587,315,637,679,207,877,88,64,75,254,134,744,792,780,817,557,396,825,617,32,344,439,775,705,622,107,503,233,513,250,200,363,424,37,770,838,462,806,219,213,270,812,750,326,365,769,714,320,101,122,644,628,441,300,550,115,821,392,277,262,3,175,883,495,287,255,118,741,89,278,820,799,860,681,425,848,139,642,781,778,299,782,384,865,608,710,150,888 702,573,13,640,868,369,619,122,42,283,409,788,404,233,129,74,839,568,631,676,728,11,510,8,782,166,515,595,367,167,559,200,176,332,41,747,12,887,247,461,603,670,333,363,235,150,78,799,375,455,658,613,683,622,210,385,272,96,585,874,695,664,470,121,586,721,358,714,703,92,637,579,89,98,745,20,164,304,596,418,666,49,270,271,821,378,321,355,322,36,519,797,72,289,636,633,845,157,67,342,444,575,227,102,447,748,352,760,425,35,341,759,87,711,648,326,88,787,454,348,734,109,638,66,223,364,387,286,393,22,678,395,820,769,190,746,550,189,856,380,154,110,174,407,546,493,14,671,388,893,391,243,249,124,258,63,477,17,203,522,469,193,888,770,766,647,376,649,516,90,273,30,169,517,494,260,459,3,716,161,871,878,135,810,680,521,113,737,700,507,717,428,651,33,858,153,615,618,152,296,168,667,672,128,890,320,64,792,256,777,204,31,646,644,742,578,158,524,365,2,463,853,735,5,131,475,25,610,501,311,530,290,740,473,361,783,265,602,642,814,847,293,500,269,216,410,601,817,483,47,266,503,354,467,139,763,398,330,849,356,46,587,268,732,789,718,392,605,137,635,305,312,656,264,195,405,838,353,83,478,162,244,690,79,582,329,831,15,81,765,840,555,687,77,889,51,729,1,597,440,867,91,879,115,544,284,71,639,212,278,837,508,197,351,590,778,796,224,863,144,818,359,492,429,177,704,894,9,628,130,655,846,512,548,141,464,560,343,609,514,691,677,465,4,843,488,520,547,645,509,527,399,476,811,627,857,897,472,862,482,567,328,652,800,741,441,460,543,736,323,584,533,252,861,855,860,173,594,138,757,44,574,795,401,68,498,316,674,841,412,580,870,382,178,70,542,744,780,370,497,292,505,238,442,775,379,864,495,192,650,199,282,679,52,23,577,865,662,60,149,317,626,186,202,886,773,499,660,629,340,263,16,809,518,829,423,267,758,816,165,474,462,823,257,485,881,701,184,220,668,443,541,793,241,403,390,496,762,94,432,366,254,37,653,179,785,242,511,885,709,726,40,396,804,643,111,215,669,107,117,824,569,801,381,155,277,160,698,171,39,654,835,708,719,869,239,458,712,722,848,18,142,384,822,794,583,866,562,563,771,338,86,229,784,892,145,103,236,776,438,240,373,105,7,394,294,715,213,854,334,486,188,426,554,231,452,727,415,545,491,314,288,368,253,299,159,468,827,24,844,182,143,21,414,397,457,136,386,406,710,834,895,598,779,699,504,607,56,225,245,413,665,791,812,581,101,681,697,768,731,752,198,75,445,300,318,805,62,536,209,28,337,424,684,360,132,437,591,170,571,526,313,641,371,319,535,689,374,851,106,570,599,657,420,815,772,112,422,733,350,431,632,259,696,433,134,226,466,331,634,774,529,180,439,803,880,303,621,222,80,502,453,402,147,813,612,484,553,750,6,692,119,207,883,325,19,538,790,675,324,630,661,899,532,767,206,725,876,564,566,251,480,730,183,127,430,344,372,298,57,877,279,140,882,27,211,362,389,58,421,255,262,479,221,688,400,123,456,156,593,624,53,357,65,806,275,884,490,349,347,833,448,540,592,850,85,181,807,274,565,531,781,148,219,707,336,29,435,436,126,54,411,449,551,539,808,191,506,487,97,481,310,201,339,383,588,552,434,755,446,175,723,306,743,751,720,38,685,558,45,346,693,248,896,427,620,307,217,898,749,287,114,828,76,875,798,234,151,73,706,93,754,43,276,589,120,335,451,549,232,891,61,345,825,125,616,301,250,172,308,417,246,208,842,832,556,534,309,230,852,900,450,116,55,32,537,673,471,617,327,623,739,187,686,104,713,830,302,663,297,753,606,576,69,214,604,377,600,205,761,561,34,764,611,659,513,826,738,281,291,146,572,802,108,237,625,694,416,756,84,523,118,315,608,95,528,82,10,218,872,59,185,194,26,408,50,836,614,557,133,525,285,196,786,280,705,261,295,100,99,419,228,163,724,873,819,859,48,682,489 660,345,322,851,839,594,829,505,185,705,6,393,748,357,358,338,759,835,110,859,891,378,93,233,301,353,491,876,553,560,785,557,484,787,265,78,371,800,494,552,573,524,46,54,752,459,408,587,700,760,471,848,354,21,70,754,709,603,226,580,250,326,269,71,26,166,689,889,847,431,664,203,75,38,892,192,370,276,673,739,42,155,763,51,654,188,588,595,239,308,527,303,462,201,259,609,737,508,133,125,798,616,528,444,817,159,69,894,662,389,273,514,43,712,209,150,572,863,540,790,414,37,872,151,380,614,691,419,589,202,534,693,171,896,681,659,164,511,675,555,690,314,104,383,791,649,167,883,334,793,421,56,525,324,632,106,420,137,652,855,427,533,758,805,225,871,666,7,687,506,58,388,172,394,426,479,39,215,716,99,299,719,395,741,181,64,146,124,764,211,583,446,738,102,238,606,821,234,477,91,415,845,682,563,801,703,316,884,315,893,658,457,439,36,590,382,337,193,475,335,642,714,111,247,368,504,236,411,627,797,241,612,109,788,720,29,438,252,16,628,669,472,717,570,362,794,843,663,711,300,559,849,403,336,617,615,645,598,344,254,348,442,474,482,96,176,320,148,418,844,30,174,10,692,82,248,704,530,105,708,232,33,879,243,608,665,404,377,144,783,727,726,886,213,831,305,369,282,885,750,796,582,268,567,107,556,73,732,11,231,735,599,323,53,807,667,372,333,770,277,406,677,854,786,676,537,841,386,875,428,857,694,607,804,581,728,34,267,4,8,168,860,291,289,657,12,485,145,824,434,809,412,275,165,246,826,867,823,781,216,519,114,568,522,655,520,640,507,476,518,621,139,430,575,806,740,57,184,249,585,468,257,432,812,379,810,19,579,877,297,443,729,149,684,596,20,97,312,240,828,639,161,178,116,747,792,769,544,463,473,218,23,14,198,480,674,671,500,128,648,721,355,309,44,813,636,852,696,83,777,385,373,900,744,644,825,261,493,643,604,564,774,449,547,359,304,490,220,710,492,260,108,543,489,245,397,775,868,453,325,548,699,535,881,224,80,465,861,830,895,862,387,41,278,160,221,576,244,402,512,488,169,779,392,542,452,50,374,455,447,86,814,200,153,736,212,129,865,529,713,458,332,597,686,59,274,266,836,766,767,356,423,65,546,749,503,130,470,678,820,554,152,417,264,330,285,346,61,466,263,400,436,701,631,256,795,31,840,95,789,630,487,157,235,339,634,498,154,360,541,461,271,351,290,536,753,578,350,401,142,620,833,210,87,321,448,600,586,63,445,650,194,897,251,496,375,313,175,182,566,870,25,205,197,521,183,77,523,837,307,422,204,180,549,9,207,222,816,469,856,866,199,734,776,228,195,853,517,811,501,698,565,186,281,140,47,294,707,255,803,223,880,363,122,562,68,495,100,28,755,284,416,170,79,242,838,724,214,55,584,407,441,32,286,647,878,272,306,756,349,219,177,18,723,179,310,718,532,190,464,98,846,873,762,262,364,888,771,24,141,424,435,331,127,134,5,688,680,486,163,610,768,138,715,288,352,405,515,119,551,103,745,611,187,624,123,191,94,668,633,772,516,121,206,81,651,815,253,258,623,84,52,702,882,283,429,538,757,425,189,347,626,637,62,502,135,381,2,822,90,208,92,808,13,834,413,391,751,343,365,1,22,27,451,440,706,450,661,292,227,88,784,454,384,746,115,113,147,864,328,550,743,483,827,842,3,89,287,858,76,15,270,577,298,641,802,112,302,327,48,602,725,136,296,101,481,329,819,670,376,778,318,72,697,593,574,293,731,390,410,117,229,341,773,237,143,230,618,460,656,869,433,467,558,366,126,780,437,217,497,605,761,49,887,361,173,66,730,409,545,398,478,799,156,158,874,342,695,722,340,850,162,685,782,832,613,196,898,85,629,396,67,638,399,526,818,118,672,17,571,683,311,765,45,622,625,601,295,619,742,74,60,367,131,592,456,679,120,569,591,132,280,319,35,561,890,539,510,531,635,317,513,646,40,733,899,499,509,279,653 808,47,163,784,611,816,564,193,24,741,333,485,388,617,572,86,776,850,81,876,687,335,183,102,235,581,452,789,336,287,415,191,243,526,172,562,173,769,730,649,671,615,27,64,579,197,216,428,446,395,417,875,736,461,115,833,429,494,66,436,90,664,514,40,114,208,274,718,877,426,612,194,73,104,725,61,587,139,748,858,162,295,576,372,434,524,352,179,65,152,123,716,160,598,161,412,791,218,99,493,613,299,519,557,854,315,367,752,371,267,660,279,100,693,75,545,516,588,464,821,644,171,845,91,44,574,260,740,214,184,246,680,765,763,591,723,34,683,550,249,498,486,390,782,848,478,167,884,170,695,203,330,689,541,777,53,804,133,401,856,809,285,697,879,145,889,338,449,546,511,413,430,479,820,419,420,141,159,694,205,337,668,517,702,234,427,135,153,811,569,745,175,573,82,851,273,537,54,431,79,455,497,293,661,869,343,424,663,96,755,322,211,609,558,827,69,52,239,77,198,751,760,410,221,357,121,309,85,408,630,754,206,351,445,392,450,640,112,178,632,457,761,394,735,30,470,500,623,369,349,387,853,543,101,381,393,794,825,270,652,458,137,747,282,528,120,14,56,169,871,361,560,276,520,375,213,832,678,22,376,147,290,622,383,366,705,291,150,16,442,471,836,893,398,894,377,384,196,780,593,605,506,865,321,504,785,151,378,146,26,631,201,71,78,554,768,655,23,882,447,829,753,596,859,466,831,764,237,774,437,518,584,815,874,665,319,329,39,385,166,658,888,127,129,767,313,118,43,570,181,779,67,746,212,340,886,899,675,552,28,317,7,793,773,647,140,278,210,238,421,798,76,327,416,656,636,303,108,51,423,229,219,355,582,209,872,126,838,648,567,666,743,4,402,462,454,691,248,11,802,226,624,527,586,868,673,674,186,286,566,530,207,264,42,341,778,502,594,192,359,365,269,318,89,722,577,775,713,607,788,323,190,900,830,862,817,620,380,597,251,292,439,414,280,797,368,245,379,703,795,106,525,762,650,580,667,501,892,422,21,826,474,353,847,252,345,411,676,733,891,878,144,41,771,70,259,744,232,553,332,405,551,578,453,682,94,265,109,614,281,448,885,503,756,595,13,621,883,561,824,301,188,681,646,575,354,328,490,806,861,639,534,124,841,619,662,5,110,842,852,254,601,418,111,17,18,155,177,846,45,68,638,399,734,499,467,227,714,559,796,787,97,364,346,441,507,262,284,268,786,342,48,122,717,509,864,616,37,749,134,867,897,95,225,715,199,712,463,84,701,803,128,738,813,488,538,185,610,700,679,641,49,215,684,174,306,272,456,585,515,217,629,535,304,168,316,36,482,158,844,728,606,618,759,302,35,870,223,589,544,783,460,711,15,742,465,440,314,331,531,10,704,633,119,522,228,275,29,87,828,142,200,1,143,189,895,477,707,344,512,685,255,433,432,739,731,182,391,590,288,231,3,98,708,31,88,556,247,438,307,363,563,790,397,699,103,890,887,136,643,476,600,692,373,187,326,807,339,297,222,487,626,406,801,154,625,729,117,20,195,8,637,202,132,389,360,25,505,669,242,473,508,549,224,634,571,757,83,19,407,706,55,719,839,289,472,698,489,356,32,74,857,523,271,721,12,131,156,696,105,157,149,592,244,542,475,657,521,484,116,298,58,165,727,565,834,257,233,362,180,263,860,176,256,642,635,46,690,849,324,814,627,92,720,770,374,93,261,898,236,253,294,855,451,880,603,2,325,72,250,347,628,204,602,654,130,710,645,819,532,659,425,469,688,480,386,653,492,230,80,529,548,495,866,810,50,459,311,308,873,799,63,608,370,382,38,672,818,400,583,686,837,60,709,468,113,651,781,138,792,148,568,812,348,444,677,732,533,539,350,835,33,823,510,599,496,241,896,6,805,536,258,481,443,750,822,220,240,358,320,491,435,483,107,409,800,547,125,334,863,62,283,310,59,843,513,840,164,724,670,277,9,540,300,266,881,726,772,305,555,57,737,396,604,758,766,403,404,296,312 238,734,236,24,588,248,139,203,460,46,792,655,354,603,432,280,365,22,857,38,322,213,687,403,807,341,269,13,720,126,427,277,160,101,535,842,233,606,475,170,218,343,749,691,81,197,177,583,98,307,430,49,564,828,766,73,103,85,753,843,853,556,437,527,847,806,282,142,66,491,189,630,488,607,208,419,308,393,124,150,877,394,65,644,617,496,647,470,780,239,540,759,232,612,684,310,398,176,377,575,40,325,92,386,54,799,613,244,659,424,494,646,584,204,859,576,91,323,192,64,521,623,2,367,538,86,374,500,304,489,537,45,850,205,44,283,830,423,653,243,83,507,637,573,43,143,338,12,812,773,788,748,173,390,198,447,727,463,89,18,695,519,709,165,880,52,181,861,109,120,743,242,549,719,309,704,732,329,202,439,898,660,210,318,763,708,666,873,108,547,326,784,171,446,881,183,155,790,114,580,619,319,462,241,560,216,468,313,373,231,188,196,387,845,358,677,360,561,558,298,110,471,815,287,190,482,331,680,777,57,714,426,829,389,431,882,235,863,869,454,686,487,212,153,472,272,335,441,596,499,113,19,249,818,90,526,128,413,887,505,296,553,355,611,852,879,652,809,225,39,765,550,851,42,624,443,410,157,574,214,332,740,257,544,275,63,841,289,444,442,633,88,27,378,450,493,625,295,50,47,605,87,892,104,673,418,478,328,689,512,484,396,356,820,123,314,664,578,384,305,700,69,174,94,74,382,767,315,26,563,351,565,209,122,156,417,769,534,848,858,741,61,836,352,492,778,508,839,151,436,14,509,656,896,586,302,570,30,631,707,840,679,274,503,311,425,266,399,650,733,195,722,191,819,497,568,658,867,369,498,362,569,718,72,186,129,674,327,429,407,345,562,756,187,227,800,855,796,635,9,344,645,640,713,220,23,542,406,614,368,523,641,871,517,152,119,745,364,592,251,253,536,701,744,626,391,8,161,798,32,760,353,56,67,457,20,629,591,474,290,263,206,770,158,692,900,581,336,76,411,545,683,464,589,456,306,117,77,477,711,440,70,169,1,721,870,144,80,597,3,53,228,774,785,835,648,279,702,256,453,754,609,363,267,230,598,628,794,219,379,771,259,703,825,649,682,783,51,409,422,258,247,102,449,876,516,899,262,36,182,349,370,543,458,821,347,511,531,294,55,541,712,532,299,661,515,776,520,746,772,585,265,594,140,698,217,885,25,736,21,168,539,479,346,278,58,803,889,408,610,133,366,254,572,154,162,106,557,385,884,504,599,862,886,604,415,200,175,768,339,137,789,300,823,297,303,730,723,791,359,223,688,293,669,579,420,897,99,100,657,179,705,518,490,827,324,643,221,234,7,33,595,132,15,383,750,41,716,524,416,97,264,729,747,860,868,286,461,738,135,761,105,333,878,148,502,618,779,811,84,866,838,551,665,891,372,79,672,622,255,350,824,795,632,402,59,510,452,261,229,757,481,725,671,775,320,95,726,678,222,742,201,797,480,735,627,16,448,864,706,710,281,621,685,380,817,121,184,717,582,822,37,466,600,465,405,361,435,485,348,731,31,376,762,400,552,608,675,116,392,224,271,758,292,875,48,590,834,751,125,894,831,75,10,890,164,284,455,752,801,813,381,60,755,401,654,334,816,17,681,577,634,62,793,147,696,404,96,375,501,832,781,593,694,237,112,388,337,728,513,715,115,288,697,245,808,495,782,6,215,357,193,395,826,11,856,805,276,602,865,667,636,529,451,642,330,615,250,476,846,145,268,559,414,810,301,548,35,136,240,787,459,676,428,71,317,469,82,638,412,639,662,663,316,849,844,506,567,167,525,802,651,285,118,397,833,130,670,587,166,211,146,571,138,530,874,895,68,690,421,804,321,29,473,693,111,514,159,78,724,5,445,273,178,141,172,888,28,737,312,226,739,522,533,342,34,814,467,883,131,134,260,163,854,127,185,149,270,438,486,546,554,207,434,528,246,433,764,199,252,616,555,620,893,601,4,194,483,107,180,786,371,93,872,566,291,837,340,668,699 663,29,258,614,195,859,378,184,172,734,538,213,440,849,792,120,437,806,162,848,598,496,135,315,67,723,244,678,650,251,433,209,176,504,466,591,289,350,871,509,476,398,130,102,628,139,211,85,300,388,141,872,665,586,432,844,391,568,19,109,112,560,465,137,248,104,256,521,847,721,375,59,179,336,544,231,718,52,585,879,294,488,566,545,78,581,645,226,309,193,49,612,203,798,27,125,459,171,166,616,487,75,480,793,858,183,463,608,621,561,757,30,319,419,161,685,599,304,270,845,449,431,751,153,156,456,295,870,93,469,21,551,770,601,611,454,140,856,144,99,548,752,658,877,824,260,312,869,477,239,514,642,724,732,851,157,897,400,380,843,895,564,255,830,164,888,187,630,331,590,671,609,700,894,224,748,158,345,420,292,293,221,593,364,113,486,462,167,638,576,523,438,296,254,865,328,342,44,425,89,774,363,76,807,610,323,694,423,33,582,381,313,448,716,696,14,51,132,118,418,661,408,348,413,372,32,503,11,687,631,861,308,285,517,522,512,669,357,311,288,91,876,200,772,84,505,475,263,537,659,262,840,481,347,393,60,785,892,149,747,647,1,846,35,510,217,45,69,124,855,625,695,399,457,615,100,567,632,207,198,126,573,483,679,116,660,74,324,107,154,105,784,885,579,797,674,131,396,709,446,174,580,881,346,556,883,321,145,379,10,219,64,40,337,362,511,451,148,768,528,749,701,565,875,474,762,301,482,633,745,447,170,802,890,761,359,403,3,549,410,644,874,422,334,453,445,8,182,335,12,667,13,853,314,175,767,866,525,147,16,643,56,683,570,340,41,119,28,17,744,720,39,220,88,218,201,574,355,53,94,472,68,77,498,215,893,204,775,298,550,533,341,87,478,653,617,777,66,15,733,277,717,781,742,786,575,227,38,613,371,738,330,392,264,473,664,121,390,495,501,417,70,677,384,274,233,690,479,727,795,681,426,900,783,736,803,834,79,230,402,96,360,110,163,889,386,20,443,764,715,7,600,532,864,623,654,358,857,714,71,891,430,491,750,634,327,325,584,281,873,835,188,133,728,97,624,854,80,691,618,90,547,199,338,809,257,520,34,697,552,450,800,412,826,160,191,825,852,279,527,508,136,589,267,627,189,404,442,708,731,670,769,169,735,181,831,111,269,684,860,516,808,725,242,54,9,18,246,743,266,6,740,604,603,810,427,382,656,672,706,833,194,365,344,409,507,592,250,555,540,291,48,210,832,373,887,689,5,801,290,789,842,310,280,655,23,535,619,387,822,811,47,578,827,273,703,26,571,676,421,385,240,282,828,368,278,415,526,458,816,302,838,577,42,439,397,4,326,117,790,635,787,370,705,249,31,818,494,177,796,668,190,756,103,753,492,539,424,142,524,72,587,713,216,374,406,596,81,349,711,108,500,63,129,247,829,416,763,553,712,646,607,675,746,441,595,46,223,252,460,95,73,354,286,236,214,353,25,320,155,333,369,332,464,637,361,868,758,377,729,804,502,588,680,228,562,837,202,639,534,127,622,572,452,434,788,791,58,152,55,168,626,82,468,83,383,234,629,760,62,241,719,794,306,673,493,307,316,165,232,780,225,557,782,186,546,839,134,702,86,229,776,542,606,536,37,303,343,597,414,470,128,471,351,436,180,821,339,389,376,515,253,299,886,484,766,57,151,150,61,602,754,50,43,305,666,238,730,819,429,878,329,106,265,722,541,405,222,867,245,435,652,726,704,805,235,65,352,276,275,444,297,506,698,812,178,841,519,771,558,196,692,651,317,497,115,773,489,559,22,649,814,813,693,817,192,401,554,205,799,356,92,779,461,657,159,395,896,271,513,485,662,143,529,755,173,699,815,2,882,36,394,707,594,739,543,836,283,583,122,765,185,641,287,367,284,197,898,138,880,636,261,123,741,862,686,146,322,455,411,272,518,499,208,490,759,682,237,563,640,259,467,268,212,899,688,620,98,605,823,206,24,820,428,531,850,778,884,318,710,243,863,407,737,366,569,101,648,114,530 325,722,105,635,886,180,767,385,271,317,208,533,819,364,164,214,705,497,691,545,893,88,284,193,701,45,254,726,829,348,860,584,228,696,390,368,178,849,372,143,241,327,392,313,583,277,139,746,544,810,435,525,156,273,412,377,589,272,617,875,793,152,53,121,567,600,729,855,517,485,434,382,74,136,882,188,44,203,309,387,663,9,596,60,809,21,811,765,686,82,751,349,353,133,546,571,673,331,43,33,612,637,110,297,480,531,47,891,854,411,50,753,122,532,775,48,200,870,275,486,318,125,643,52,618,218,778,255,628,234,704,315,432,885,341,463,706,391,828,490,458,242,134,233,404,440,28,651,773,890,808,243,90,81,266,96,492,129,441,565,446,708,871,475,826,597,588,398,447,101,184,99,92,416,196,736,289,68,540,10,880,838,18,668,504,120,406,742,473,58,329,822,611,78,562,516,759,692,76,182,578,862,806,304,783,693,236,883,295,889,592,270,190,328,298,724,336,240,721,162,176,713,423,35,25,680,19,744,872,626,206,749,481,848,827,560,32,812,593,621,846,316,694,128,539,818,866,665,867,306,283,561,46,795,381,748,166,410,851,49,62,645,140,671,461,807,642,647,256,513,171,41,443,394,85,189,590,89,261,683,77,185,879,247,559,211,817,301,165,858,863,314,657,17,727,281,515,102,803,522,876,216,640,338,104,379,29,776,141,370,784,712,311,547,716,470,409,595,610,3,350,222,820,462,454,198,895,293,740,551,878,794,150,468,202,831,191,436,366,538,197,550,787,195,622,179,667,707,754,518,495,616,167,841,310,638,798,619,881,580,874,430,186,389,526,654,634,576,718,796,174,487,126,861,847,830,127,785,204,681,488,356,695,675,113,503,153,201,887,31,119,788,577,606,465,219,375,687,548,519,717,94,181,131,426,557,837,656,685,269,93,80,536,351,168,257,857,312,235,625,747,429,569,355,873,602,629,415,226,448,719,343,763,330,451,511,227,641,661,632,512,769,733,363,344,888,679,1,397,159,402,106,396,554,27,36,689,582,506,699,352,494,332,702,521,598,107,770,877,764,568,213,420,346,703,414,324,502,63,624,756,98,804,97,207,723,91,792,57,453,237,574,303,320,853,575,264,563,444,627,323,108,142,698,500,231,884,839,401,477,24,248,79,319,894,294,238,684,388,489,752,170,507,123,658,469,728,40,479,700,660,70,856,209,403,789,672,650,130,439,192,710,38,13,26,282,845,781,340,529,117,239,183,115,175,413,163,579,72,802,431,772,790,732,169,496,232,376,424,114,177,633,896,373,285,73,630,210,322,393,843,249,7,145,768,39,782,148,758,408,161,245,34,674,419,2,514,799,357,566,731,109,553,445,51,605,549,825,869,438,296,417,160,690,433,501,5,833,510,741,646,834,30,762,225,15,730,535,478,383,821,844,367,246,850,678,552,135,61,449,69,832,244,400,542,777,337,173,715,71,528,299,262,836,644,217,520,815,308,221,250,824,900,864,305,649,664,652,608,124,662,87,230,267,14,286,258,614,774,276,868,556,16,766,347,86,65,666,157,620,482,474,711,581,670,54,466,84,369,734,760,335,229,6,491,259,859,771,677,405,599,572,342,738,842,42,260,835,743,688,800,333,287,300,253,428,360,361,586,345,371,205,653,280,779,603,144,613,100,541,326,395,64,558,66,288,437,737,527,268,291,456,399,655,659,292,147,223,601,11,252,615,594,898,609,386,555,37,797,623,83,457,418,158,564,801,334,8,467,523,452,639,187,23,274,483,137,631,215,12,892,224,95,682,220,537,55,573,648,509,67,278,484,591,460,714,103,805,149,498,899,750,151,321,279,676,636,425,212,75,290,471,132,852,378,791,607,359,725,354,813,199,455,59,302,823,111,530,524,585,570,194,407,735,761,365,814,755,459,374,22,251,745,472,263,493,534,816,587,384,499,4,720,604,427,155,339,116,739,669,138,20,56,118,508,146,543,450,154,362,172,505,358,709,780,697,112,380,265,422,757,307,442,421,786,897,840,464,476,865 658,648,212,728,888,196,767,389,214,327,191,779,556,2,21,322,855,536,525,616,796,132,482,140,774,101,620,682,274,420,663,427,419,532,107,463,215,889,19,499,614,714,350,394,374,409,320,838,613,634,738,470,485,304,144,317,510,250,624,868,668,515,399,247,417,698,585,809,633,55,732,609,211,135,821,178,113,480,603,253,483,98,442,149,856,264,273,527,310,260,680,636,325,59,686,766,850,415,237,193,626,749,348,25,361,694,254,822,382,99,152,815,138,780,574,146,225,849,602,312,684,105,669,245,381,497,581,33,646,76,788,506,507,829,324,772,490,27,867,635,328,6,38,73,464,672,143,615,283,896,301,63,229,52,159,218,20,93,426,467,26,232,891,696,709,486,638,289,673,200,77,94,36,39,591,125,413,130,782,238,814,885,188,840,679,321,110,649,731,334,710,379,773,155,518,390,768,628,315,387,41,791,802,97,882,552,13,851,475,833,414,258,604,256,644,670,400,550,529,129,450,860,601,120,290,665,89,711,339,448,124,457,623,572,437,514,241,443,428,827,864,32,721,236,408,559,718,842,488,23,500,456,386,401,376,828,329,67,820,156,75,723,103,798,571,607,522,577,402,537,109,136,418,469,74,519,826,355,192,707,343,92,799,10,745,392,832,198,267,843,866,468,592,84,861,18,761,123,671,625,887,240,568,356,314,49,151,784,167,460,862,717,421,265,755,775,734,359,823,175,622,377,629,291,383,521,897,118,735,53,763,863,360,326,154,612,373,523,281,296,429,563,342,148,835,295,687,453,783,681,588,598,115,677,617,831,880,637,884,560,364,410,540,742,789,659,695,720,800,179,474,558,435,865,878,873,68,416,368,819,182,627,811,594,302,282,300,512,875,332,494,871,424,363,172,239,336,777,508,538,502,230,22,147,713,543,881,697,168,664,45,248,455,189,185,546,874,689,61,307,509,701,69,165,892,813,586,726,262,365,137,150,685,452,759,423,17,806,837,347,683,667,830,517,58,812,786,227,259,501,582,288,754,12,272,306,703,715,64,524,65,473,210,705,78,643,403,655,893,674,688,285,385,553,600,48,87,702,106,161,836,372,854,465,108,396,90,746,160,121,446,795,597,297,883,305,91,654,765,839,139,366,422,848,504,589,781,712,564,744,249,28,292,647,894,43,268,371,725,354,279,79,50,190,516,479,762,261,736,344,645,131,411,539,5,640,505,548,242,528,217,436,351,319,357,335,330,752,81,804,384,375,243,134,533,257,199,566,208,561,733,870,441,639,492,675,596,183,180,104,318,676,841,459,657,80,737,472,495,751,834,216,228,116,391,369,608,222,708,3,252,34,278,793,204,226,551,756,433,576,653,83,760,439,395,579,666,346,877,37,549,722,316,481,477,534,153,542,727,526,487,750,176,578,570,158,205,466,244,610,747,378,337,438,724,844,535,277,112,128,380,284,126,9,808,730,593,567,825,133,706,311,171,886,333,219,700,824,599,531,491,792,899,605,565,271,678,846,340,246,62,449,573,24,255,184,266,630,223,46,876,425,102,869,117,72,194,606,221,661,293,405,691,166,803,345,235,233,195,757,776,88,60,206,498,432,890,398,397,587,476,353,595,631,807,186,82,857,141,547,489,652,309,66,650,440,220,287,583,127,96,431,642,370,716,805,42,704,478,213,234,269,207,4,407,496,690,660,748,545,57,693,554,785,818,404,164,349,541,187,111,794,471,898,511,294,224,352,879,619,201,47,740,15,801,853,341,275,231,557,299,816,54,163,169,367,85,621,430,251,895,16,145,847,303,753,122,445,162,544,209,8,7,817,530,454,447,331,434,770,900,575,71,263,35,451,787,30,562,388,651,741,270,797,11,632,520,286,758,56,778,611,590,44,14,729,119,739,358,771,580,197,743,769,790,662,764,618,323,100,177,461,845,31,70,656,641,569,503,308,699,181,493,458,280,406,170,202,276,852,157,142,362,174,40,86,810,584,513,95,484,462,1,555,298,692,203,29,338,173,412,51,393,313,872,858,859,114,719,444 693,23,250,542,108,855,202,106,155,675,643,388,228,837,767,86,487,732,259,806,363,495,292,300,163,727,326,558,525,162,186,91,148,355,448,718,246,401,866,569,547,491,206,197,505,100,190,111,188,152,235,848,755,698,472,788,249,518,60,142,126,678,576,212,375,243,88,348,819,642,406,159,277,418,341,223,719,97,591,867,424,551,455,632,77,666,512,76,280,199,5,733,125,817,93,110,536,114,242,692,389,39,501,737,808,379,581,387,465,513,789,53,405,452,180,745,555,123,309,770,574,498,652,205,78,470,103,868,24,474,25,543,860,393,546,528,151,827,179,58,466,729,683,887,769,241,365,821,432,263,441,669,708,728,825,219,898,422,274,768,896,457,299,829,232,870,92,720,314,573,712,580,724,899,329,647,323,361,449,414,410,290,602,400,264,610,471,288,609,681,599,331,260,307,894,184,175,67,447,173,697,129,17,740,660,144,667,185,55,366,214,254,562,794,754,14,41,279,34,383,653,484,531,433,420,2,544,10,550,499,886,135,480,311,351,650,687,330,456,390,81,873,82,747,35,306,253,335,362,608,238,761,564,282,293,66,763,889,255,787,623,11,844,70,663,281,27,121,85,823,682,752,548,398,658,181,625,634,248,80,170,626,266,651,87,616,141,268,119,46,62,757,863,604,831,627,269,391,552,373,133,538,900,267,679,874,416,47,479,37,257,12,45,402,215,577,588,83,795,584,842,674,386,851,381,812,318,446,509,668,195,216,798,882,705,134,557,26,664,507,750,862,377,303,560,575,1,211,187,28,592,3,875,409,349,775,888,445,94,51,529,96,726,671,408,6,44,22,52,621,731,107,276,113,182,237,618,357,64,116,392,166,153,367,209,883,360,807,124,638,617,423,98,339,554,710,853,167,31,644,105,779,780,800,786,485,245,7,526,482,756,478,545,236,443,661,130,489,524,372,213,149,614,435,287,302,578,517,811,703,595,359,893,746,792,707,841,139,319,240,65,150,210,117,892,473,49,534,670,772,56,704,633,834,691,695,194,839,622,50,891,324,428,635,586,515,364,431,283,815,785,145,229,833,138,583,830,189,685,519,157,665,140,442,759,128,587,38,696,488,594,810,571,872,171,154,854,824,396,601,461,156,589,333,748,353,481,217,659,730,713,716,298,796,192,778,120,132,723,804,395,838,645,231,33,20,59,380,840,208,4,736,440,611,776,218,510,532,760,637,803,79,497,458,496,468,486,412,523,655,301,57,168,858,413,876,641,9,828,354,843,871,316,438,749,40,585,541,425,814,782,118,346,881,368,694,84,702,781,520,191,328,358,859,200,417,530,508,291,774,272,852,672,54,527,483,43,131,63,700,490,813,340,615,382,61,766,374,115,715,656,239,836,73,850,639,582,204,312,369,72,411,735,305,407,502,493,137,426,688,143,430,68,275,304,847,285,835,619,605,699,522,734,709,537,454,112,378,207,467,224,75,439,371,220,174,294,30,503,177,516,165,313,252,744,270,826,797,469,799,721,579,701,689,338,648,801,95,535,556,164,511,620,553,404,777,809,8,172,18,193,533,16,459,69,521,203,711,676,15,102,636,818,385,783,477,345,286,127,178,865,308,566,640,322,567,793,71,606,99,158,820,500,624,629,74,227,475,504,444,462,271,350,514,251,342,802,234,506,284,613,343,434,885,565,753,90,36,325,147,612,742,42,104,295,773,278,822,717,464,879,317,32,273,598,657,437,320,890,427,559,597,791,686,864,160,101,429,198,460,332,310,539,741,857,89,861,415,762,600,196,662,714,399,463,109,790,370,494,13,722,805,765,725,878,226,540,421,230,849,352,29,739,397,596,201,347,897,453,568,549,654,247,356,706,244,816,738,19,877,48,492,649,607,743,376,856,265,476,261,673,183,677,146,221,296,337,884,176,869,628,450,136,680,845,631,321,122,590,297,233,572,334,336,403,764,593,225,436,690,315,570,327,258,895,646,684,222,630,751,394,21,784,561,419,771,758,880,262,603,169,846,289,832,451,344,161,563,256,384 572,514,309,824,884,402,848,554,259,605,1,466,809,205,208,375,780,745,278,774,895,302,172,248,455,193,492,858,610,583,842,625,500,799,271,65,371,847,305,483,526,512,159,155,722,495,415,687,718,821,523,737,237,14,122,632,728,544,398,738,456,240,198,131,146,319,743,889,752,362,651,312,114,70,894,244,216,329,603,589,185,95,748,15,757,97,626,682,361,298,666,235,502,71,403,668,741,545,162,43,794,690,464,299,694,274,37,898,715,360,107,652,73,708,409,47,501,886,550,681,381,45,835,189,499,567,756,228,671,200,659,629,100,896,617,635,345,354,775,624,645,184,38,170,679,658,160,834,436,871,518,31,379,166,471,144,217,142,646,776,214,574,838,693,439,785,709,7,677,406,16,283,75,174,438,467,124,203,712,94,509,810,270,764,310,40,186,311,711,120,529,563,760,126,111,631,840,424,412,168,346,877,789,437,802,749,211,892,392,897,675,460,387,5,485,559,416,263,601,295,536,742,165,206,306,628,154,585,676,770,67,678,195,832,767,82,285,425,133,663,790,265,777,428,477,826,872,703,784,226,569,769,313,476,608,716,497,405,548,96,225,581,269,602,115,388,474,324,470,744,6,59,64,664,26,301,674,418,171,766,257,19,888,161,669,570,590,391,231,859,833,611,831,116,779,220,457,250,879,740,866,511,137,575,51,365,77,816,32,341,803,702,401,153,827,630,357,447,704,164,281,573,860,653,654,394,883,356,864,364,880,772,449,650,451,806,52,396,9,85,104,758,422,286,656,22,613,317,841,549,750,556,86,373,293,762,818,804,865,382,616,252,454,473,660,640,706,606,618,538,493,289,432,733,855,820,28,353,320,673,475,337,566,814,383,665,74,440,890,183,336,786,308,680,553,3,29,463,397,759,700,54,78,21,647,765,839,655,481,450,88,36,90,267,430,584,792,479,125,649,763,444,277,101,863,672,819,661,17,683,408,370,873,633,552,734,105,594,685,642,634,822,577,588,143,516,615,145,623,342,376,42,504,349,132,260,813,793,395,487,330,688,508,857,224,188,434,853,875,878,795,420,158,169,352,199,421,358,258,515,619,109,837,351,399,564,24,571,297,427,87,729,215,39,828,326,25,787,558,695,423,332,507,730,69,296,489,861,691,692,207,290,103,435,852,334,222,565,591,697,607,35,350,272,480,413,517,83,359,400,562,268,754,547,149,829,150,817,23,717,468,593,141,196,262,582,586,333,315,520,441,340,363,102,510,597,472,490,219,328,505,773,380,246,213,551,533,532,135,254,496,367,899,119,498,236,458,129,121,557,881,91,191,53,620,180,238,453,845,192,419,68,99,662,56,151,366,846,522,796,851,63,736,707,221,372,778,609,867,393,604,600,76,445,80,92,177,781,348,815,390,882,204,307,541,48,527,253,113,670,491,540,292,157,461,768,727,81,20,513,273,521,2,201,637,874,355,304,808,275,335,279,60,800,322,323,726,684,230,484,127,869,893,830,194,452,843,714,136,41,369,338,245,79,140,4,535,720,525,147,751,739,62,746,288,173,227,636,181,648,229,698,705,256,701,108,287,27,568,731,825,431,30,179,66,595,854,411,410,638,55,187,644,862,486,318,377,836,442,374,506,494,578,46,433,276,417,49,791,134,233,163,812,50,849,503,223,771,284,426,8,106,44,280,344,592,528,735,384,300,61,696,534,530,782,57,152,58,811,251,339,761,580,885,801,13,176,266,805,212,18,241,488,175,546,844,232,243,385,148,598,753,123,138,10,542,130,807,539,255,868,210,12,732,543,641,118,723,429,524,34,98,239,719,128,316,197,667,459,555,891,587,303,537,282,294,798,218,242,404,560,699,110,887,261,386,72,621,579,347,596,443,724,89,84,876,139,710,686,446,797,234,612,823,850,614,407,870,209,448,264,117,721,321,343,747,249,755,93,576,689,202,788,190,599,478,519,291,643,713,112,33,331,167,389,325,627,247,482,469,178,414,182,156,622,856,378,314,531,561,462,327,639,11,783,900,657,465,368,725 175,809,248,112,806,65,355,313,502,27,671,689,542,384,236,359,494,17,863,21,581,150,678,392,838,157,316,115,736,224,645,433,238,245,505,747,264,750,253,130,202,346,740,691,137,290,214,731,246,527,530,2,400,709,694,12,242,89,800,884,874,431,335,526,837,826,490,386,33,397,265,677,466,541,477,429,136,471,116,29,865,272,143,485,767,330,681,650,788,260,714,628,322,367,748,480,497,271,370,387,158,531,88,228,6,801,482,521,730,383,267,771,528,298,870,360,78,610,254,24,484,532,54,364,648,107,574,252,504,425,703,70,733,479,62,327,843,230,784,399,120,333,456,289,14,266,287,11,815,862,805,591,59,207,58,444,439,404,192,13,402,566,824,92,890,1,344,777,218,75,563,139,338,421,339,651,700,286,296,373,898,786,101,455,760,572,619,877,129,379,293,813,323,418,778,295,372,832,105,596,515,586,693,111,624,389,325,612,483,512,310,237,342,716,247,789,473,577,701,273,67,580,764,226,156,661,220,795,793,124,437,584,802,578,576,852,99,869,855,565,822,232,415,51,602,460,567,579,718,380,196,15,176,836,169,704,43,167,894,280,162,702,131,734,797,888,745,834,315,8,597,329,811,109,443,498,428,84,571,413,347,590,535,391,449,46,873,324,491,706,807,39,3,234,432,363,680,262,181,147,794,80,817,187,525,188,401,570,603,609,662,614,464,796,318,326,623,675,336,180,536,36,343,16,140,225,861,288,146,470,605,724,69,10,64,625,669,641,766,828,575,7,841,341,549,672,684,850,361,588,56,665,376,897,600,275,511,132,812,762,849,719,170,440,398,618,469,573,765,696,96,752,223,871,713,737,508,867,445,658,388,627,787,203,213,18,635,171,715,268,251,687,775,276,208,666,728,833,720,40,522,427,420,495,135,98,725,607,611,371,311,552,840,547,141,85,839,368,507,331,423,616,620,685,781,529,71,201,632,23,699,350,4,26,354,20,385,705,608,406,430,409,816,274,394,900,726,231,52,255,638,519,422,407,291,159,305,49,395,782,190,161,185,68,642,853,160,229,773,34,45,283,756,630,857,553,114,738,117,467,814,457,601,249,93,698,489,854,87,362,668,209,637,640,804,722,561,31,476,461,244,285,79,598,820,534,899,499,48,138,179,219,513,320,880,173,550,646,227,9,613,478,436,307,753,634,831,468,604,791,727,103,692,106,520,410,876,104,546,41,47,670,396,278,221,83,821,891,345,569,166,450,301,314,183,30,55,683,172,889,365,543,866,883,441,560,164,154,711,128,42,825,589,649,337,142,785,582,653,374,488,655,256,417,686,366,893,91,294,481,206,459,352,663,759,235,721,453,348,32,153,349,263,38,357,792,44,757,749,284,74,369,514,803,746,827,163,644,744,282,799,309,145,878,191,393,636,790,772,66,885,851,615,659,896,319,195,447,458,200,204,823,647,487,451,216,587,463,465,174,763,548,657,779,783,351,222,829,643,292,673,442,881,667,592,676,19,408,846,493,606,211,500,533,340,708,28,358,712,492,872,100,306,679,448,193,152,626,503,544,732,76,595,717,594,454,639,501,73,621,452,186,540,241,808,82,751,847,780,239,848,819,95,81,895,94,118,695,697,835,844,250,77,617,332,707,378,758,97,633,538,631,197,729,353,735,194,261,328,554,741,742,518,472,168,53,524,555,743,568,583,90,434,769,403,674,486,593,25,151,119,334,557,887,61,776,774,270,537,858,551,545,405,299,517,564,654,198,539,830,215,416,462,212,622,419,279,149,57,121,868,317,496,556,86,475,243,182,629,562,438,446,509,302,723,856,435,652,217,390,879,761,122,148,300,842,277,377,559,113,205,144,558,375,381,882,859,22,798,184,864,308,50,321,516,312,259,297,125,755,37,474,240,382,356,257,892,5,739,110,102,682,690,411,134,63,810,656,860,199,269,155,304,845,178,72,108,281,523,506,510,414,210,426,258,127,412,754,126,123,585,660,424,886,664,35,60,233,177,133,818,165,189,770,688,599,875,303,710,768 398,477,304,800,843,491,835,540,307,659,48,214,859,479,391,333,618,752,305,767,898,367,55,329,294,269,289,861,825,554,877,683,395,837,446,15,396,721,588,330,343,284,155,104,804,417,353,520,696,850,293,789,113,13,279,718,744,608,306,650,456,89,73,112,172,183,781,889,733,643,481,143,102,128,894,298,286,188,467,715,230,144,803,32,648,54,834,762,529,255,677,80,512,211,240,513,559,480,121,47,784,604,379,530,776,119,5,897,865,574,159,506,125,561,462,97,565,869,364,782,157,126,830,135,582,441,816,436,651,352,528,575,67,895,671,425,397,631,628,523,722,404,174,365,706,535,171,845,679,786,740,186,389,302,612,134,551,243,669,820,522,764,719,637,438,831,687,61,577,426,148,380,204,455,231,751,46,245,569,31,496,658,266,611,142,1,369,296,616,38,317,743,673,151,131,694,821,374,338,84,668,876,737,619,624,788,415,892,259,896,734,490,175,62,309,516,355,96,664,386,434,548,36,241,229,590,216,541,866,822,208,779,69,880,863,71,261,597,161,429,685,510,741,448,487,870,888,501,874,432,450,818,191,657,639,583,484,667,471,149,320,453,408,419,27,443,458,299,382,760,110,115,50,655,123,129,459,332,239,736,147,130,893,376,533,538,444,460,203,832,750,586,849,181,633,428,226,311,886,713,795,570,165,580,6,635,85,777,77,253,663,686,308,281,802,421,136,521,544,154,145,560,881,756,703,264,819,495,856,665,890,621,407,748,573,848,2,292,49,180,23,783,641,371,420,4,555,392,814,383,730,493,224,435,108,640,690,799,806,314,839,210,310,242,447,603,666,468,449,811,384,190,276,630,765,689,139,525,248,489,646,156,357,823,316,785,25,319,879,98,179,606,324,774,711,22,64,288,348,771,766,70,258,100,539,778,723,598,726,225,220,7,91,366,475,469,680,250,285,770,824,232,578,223,755,461,827,503,39,747,705,504,883,629,351,787,323,377,474,738,505,838,361,494,387,563,442,103,724,212,237,10,263,688,83,194,793,759,661,515,589,712,605,854,476,107,301,868,792,885,763,393,150,78,341,431,625,173,346,727,418,18,745,185,556,695,109,526,350,622,3,617,43,94,716,430,189,773,315,482,562,234,403,537,37,111,571,875,675,585,167,500,29,257,775,567,213,691,412,780,790,184,642,295,486,345,388,19,182,546,502,347,871,406,433,852,192,840,26,720,550,697,24,87,138,596,813,280,507,268,326,267,373,202,349,725,534,423,244,427,322,623,532,236,90,378,360,674,265,372,498,256,899,101,277,340,297,20,33,321,864,133,127,170,754,57,287,485,846,472,414,270,41,508,168,106,278,842,452,809,858,187,644,732,79,313,746,801,808,656,514,400,68,483,58,34,178,853,153,847,402,887,222,358,375,44,749,228,221,609,457,753,283,56,518,649,731,82,74,704,169,772,114,465,401,872,176,116,700,327,163,260,146,638,440,363,654,602,51,318,9,857,867,878,63,627,844,543,238,59,707,195,92,275,75,76,614,709,769,336,632,796,117,542,437,303,252,634,197,594,291,739,699,470,572,8,390,17,701,692,798,620,201,132,40,547,735,579,497,553,105,251,558,873,409,328,576,761,729,381,647,339,615,217,218,219,499,88,794,271,399,52,810,11,855,254,385,728,118,584,66,162,21,652,209,531,331,742,166,137,249,626,413,335,660,16,206,35,829,233,524,653,599,833,826,53,344,140,672,141,12,488,282,362,342,758,198,152,492,86,684,613,274,160,120,581,272,805,473,207,797,411,60,536,552,466,196,768,681,439,30,334,549,568,95,394,45,836,325,359,851,645,454,595,478,356,714,517,65,290,370,587,81,891,527,416,93,708,464,607,545,235,702,200,312,882,247,593,757,227,807,262,424,791,817,445,354,884,246,670,273,14,564,610,557,710,124,828,99,636,591,177,841,205,693,410,662,300,815,509,158,42,215,199,698,463,405,122,337,678,28,368,451,193,812,860,422,592,511,717,600,566,682,72,601,900,519,676,164,862 647,107,186,395,136,824,84,44,193,574,739,451,158,839,762,58,446,626,442,725,252,425,405,287,349,718,274,402,554,75,151,41,78,201,449,820,176,416,848,490,479,460,348,310,367,45,123,169,79,91,251,772,784,795,548,723,113,406,179,316,326,714,587,276,526,409,37,187,743,632,296,265,295,463,198,237,697,118,521,825,578,543,285,691,133,690,518,74,394,156,16,801,73,834,203,80,497,46,250,735,220,17,401,728,729,498,627,248,467,478,808,111,444,360,391,776,445,59,234,672,603,544,537,215,109,358,69,862,6,470,42,419,887,241,415,472,333,806,261,20,313,744,734,883,674,139,342,726,517,356,499,742,650,730,769,253,897,427,134,652,896,439,376,767,454,809,26,830,190,481,778,541,754,898,289,682,434,325,357,430,616,355,563,336,390,688,514,485,540,717,577,423,148,319,899,97,71,219,334,263,724,62,13,692,670,61,661,103,48,229,117,162,550,861,747,68,31,331,51,321,584,469,625,382,339,8,507,57,585,347,888,120,593,246,288,798,643,484,601,383,163,868,24,685,43,202,157,323,363,619,131,649,515,412,161,92,706,874,471,807,604,32,826,135,783,476,82,291,39,738,756,777,662,240,712,233,617,576,293,21,170,710,143,666,50,536,329,206,152,28,104,676,782,608,817,635,338,343,397,212,175,429,900,140,741,859,435,15,545,70,256,1,34,516,89,552,642,137,786,570,857,583,239,787,235,802,396,386,335,671,124,258,759,835,633,100,629,56,781,623,813,791,492,279,561,656,14,399,81,36,473,18,878,610,411,751,880,266,141,146,607,207,698,689,361,4,10,22,116,660,683,244,196,278,194,267,673,513,65,149,345,255,280,197,147,837,447,789,90,637,615,436,236,227,466,799,885,340,88,527,66,827,819,840,753,311,260,5,533,458,775,532,669,271,332,598,247,465,549,275,125,221,658,504,317,283,443,432,854,600,648,307,870,653,800,602,847,213,315,164,30,64,351,47,894,677,87,500,575,773,102,763,640,832,715,667,77,761,618,129,879,167,301,503,638,641,273,257,292,702,708,93,377,865,327,622,790,303,639,486,286,720,108,389,719,174,634,159,644,450,686,768,624,889,249,262,875,740,380,599,387,127,519,322,845,393,681,119,567,665,707,699,364,804,344,760,173,160,695,727,381,852,646,199,95,40,188,418,863,330,29,709,428,558,821,106,655,374,818,530,757,99,511,438,457,318,555,597,489,696,205,53,132,856,305,836,568,33,816,547,841,873,475,611,785,38,542,441,502,792,737,282,230,890,314,659,209,765,844,487,96,420,312,871,238,417,711,407,142,811,172,866,701,60,620,452,115,49,25,580,308,831,231,501,379,178,664,422,128,700,581,185,858,182,884,774,564,181,424,228,192,243,731,495,297,520,494,290,538,612,369,524,121,372,523,829,138,851,668,562,684,588,815,748,510,272,168,400,144,404,366,110,506,426,352,154,165,105,566,130,589,72,408,211,805,298,733,797,609,838,758,560,750,746,341,755,745,27,565,572,328,362,636,571,398,779,796,9,232,2,309,384,3,525,63,551,277,766,591,7,35,596,849,350,867,346,370,437,254,86,893,461,455,505,531,488,764,67,654,264,306,810,371,705,628,166,191,579,337,493,477,359,180,613,112,433,793,101,482,284,736,462,474,882,509,680,94,12,431,208,679,663,19,222,223,842,294,855,606,403,860,217,23,388,468,780,535,281,886,573,614,630,812,687,869,122,189,375,226,592,214,242,556,749,877,52,853,245,703,569,304,657,770,378,320,85,803,216,512,11,771,833,794,693,892,410,559,413,150,846,440,76,713,269,582,385,225,895,483,491,480,590,302,195,721,448,881,645,83,864,177,459,553,621,788,210,850,153,324,373,557,218,631,54,98,204,546,822,300,843,594,528,171,678,823,534,464,114,752,184,126,529,200,496,270,722,522,183,392,694,365,595,259,268,891,605,675,368,586,704,453,55,814,732,421,651,716,872,145,539,299,828,155,876,456,224,354,508,353,414 87,758,341,551,858,150,754,564,518,318,245,326,859,451,292,442,452,374,704,342,900,290,285,473,615,113,216,683,876,506,886,705,375,741,606,176,440,684,425,96,122,161,515,386,664,445,331,635,584,852,336,364,3,191,571,337,669,416,644,817,793,14,15,310,590,531,786,844,279,641,307,365,256,345,879,463,108,309,167,314,657,154,661,118,731,12,866,833,778,287,788,76,562,174,527,526,383,491,230,33,623,642,178,438,410,390,71,895,889,617,35,687,324,353,800,60,359,855,249,478,93,312,543,242,739,225,838,278,685,487,678,272,116,887,431,234,747,489,697,555,574,389,251,199,282,417,232,508,839,831,861,362,111,194,284,300,446,380,561,498,387,812,742,165,806,428,646,306,408,231,274,270,197,344,183,813,316,297,360,106,837,690,92,483,414,84,602,702,253,7,101,867,546,296,169,655,743,714,185,302,692,870,802,403,427,762,421,883,412,892,699,501,73,213,30,753,520,228,792,436,115,450,262,244,128,732,198,763,897,676,136,828,343,881,869,392,43,846,585,393,772,319,717,147,681,857,885,479,893,502,355,500,32,848,533,673,123,398,805,31,218,666,172,607,238,808,728,680,434,339,252,58,381,462,201,240,241,75,499,710,221,311,880,437,550,210,735,541,400,847,820,173,492,144,261,480,373,349,787,522,816,367,212,507,42,397,200,780,294,512,640,745,465,648,698,204,146,730,186,57,20,207,841,384,567,6,836,528,686,682,888,651,59,328,313,865,171,549,308,573,34,325,840,477,299,193,708,751,724,552,411,689,114,823,217,239,289,608,835,638,896,553,26,85,334,713,677,596,668,854,61,523,214,799,756,691,266,825,402,577,675,333,575,701,293,396,255,13,877,5,19,566,670,720,620,187,162,603,639,454,794,25,271,102,119,572,711,696,791,139,177,163,464,582,388,133,770,148,456,736,807,371,695,532,761,430,598,247,112,441,803,569,539,215,74,471,283,536,482,749,537,797,594,470,267,864,633,44,468,2,460,41,104,634,4,9,700,298,659,768,327,570,534,612,663,455,181,776,774,650,340,448,516,49,715,592,377,419,175,748,632,16,723,89,304,815,226,789,120,643,132,190,121,97,759,703,237,330,263,280,542,248,126,511,265,180,868,860,281,188,11,404,203,18,853,385,459,790,95,420,834,196,656,378,738,589,693,155,67,785,722,131,890,109,517,827,628,662,77,363,202,804,51,56,46,391,884,694,560,166,233,429,444,79,189,320,277,665,21,801,39,258,829,667,8,521,103,556,583,153,124,610,899,64,206,195,591,50,68,184,818,423,141,110,843,81,726,323,765,529,394,264,10,630,519,72,581,810,525,474,718,130,458,433,90,653,332,872,811,579,160,351,22,766,142,315,82,875,376,782,725,832,36,755,209,125,821,600,587,211,795,878,557,288,842,223,616,17,156,604,1,874,268,554,254,764,352,145,613,275,447,509,432,658,740,485,493,773,159,286,170,814,882,891,47,771,488,208,645,37,750,27,23,405,137,303,229,671,849,496,783,614,100,467,580,152,29,734,406,679,627,535,752,709,618,63,649,62,472,737,746,514,250,138,236,301,721,830,767,457,317,647,243,688,777,164,348,779,819,733,845,24,395,426,54,548,595,415,576,544,586,260,674,257,798,407,235,611,55,706,338,524,192,597,45,135,424,781,379,295,443,205,513,578,538,91,399,53,530,151,227,510,707,871,605,335,672,127,273,540,158,622,52,346,86,712,504,88,660,435,609,503,418,28,182,652,140,637,98,38,851,413,107,466,356,497,66,654,769,619,40,357,599,220,99,760,48,863,259,69,873,809,276,505,490,729,545,369,143,80,149,224,291,850,547,784,354,366,716,370,796,105,329,222,409,824,83,469,621,449,494,461,134,719,727,321,744,601,593,382,94,219,626,624,305,347,453,862,475,563,476,78,775,625,588,65,495,358,826,269,368,117,157,361,484,322,179,422,70,486,168,629,481,631,856,558,129,401,439,565,822,350,568,246,559,898,757,636,372,894 836,145,152,648,587,755,370,108,31,576,516,710,82,492,469,101,819,751,274,835,296,266,441,72,501,543,548,549,61,220,94,42,258,260,75,789,104,829,562,667,717,740,218,283,297,187,221,564,327,121,572,806,840,673,174,689,236,320,254,603,248,803,666,191,333,483,97,442,852,226,642,436,212,213,410,58,547,300,745,760,378,347,311,513,528,660,50,11,32,188,99,857,73,610,375,458,842,182,223,623,424,317,489,387,704,607,560,393,40,113,694,437,210,714,197,634,351,422,514,611,796,265,709,203,4,537,52,637,172,130,356,588,870,440,404,787,148,472,674,275,279,382,430,752,769,465,217,808,59,779,35,394,614,495,626,161,721,150,246,702,742,34,790,873,324,825,253,658,509,417,478,330,491,774,541,171,399,167,715,366,567,776,503,757,517,639,116,380,795,726,809,17,532,164,890,110,364,224,413,287,249,262,272,449,888,134,309,409,216,425,120,114,754,734,861,151,86,485,14,123,719,831,678,237,400,131,316,170,81,341,783,10,638,95,49,713,631,126,405,762,552,641,273,653,13,138,179,739,36,247,352,686,615,39,235,481,737,671,506,696,388,255,662,457,785,278,66,250,176,814,474,630,550,379,446,415,867,661,80,261,280,398,307,299,406,613,518,55,98,340,510,786,838,411,897,269,605,158,466,456,636,346,894,209,698,619,277,281,302,156,703,168,144,185,397,823,820,7,891,496,879,655,264,730,268,862,813,119,556,232,207,690,797,804,486,67,586,178,596,335,812,854,76,74,849,544,173,157,429,293,654,109,770,407,584,886,899,460,578,186,88,165,824,859,720,135,245,291,414,141,801,295,392,570,668,716,381,159,136,534,45,451,571,332,233,781,344,858,500,683,763,818,111,133,214,635,826,511,118,647,37,753,519,685,872,448,725,198,71,705,527,426,536,20,231,772,625,729,190,91,102,482,189,181,782,676,558,728,771,592,132,62,881,765,883,627,579,551,722,38,328,208,621,271,768,577,403,473,470,844,285,708,851,421,691,711,329,868,204,51,712,294,180,680,184,640,439,362,775,794,847,107,251,871,259,200,553,498,484,85,602,743,565,561,512,2,373,242,555,100,693,885,748,843,682,18,672,853,701,865,143,270,649,746,802,573,521,201,732,860,688,377,322,875,723,471,83,1,864,699,15,651,219,105,48,106,339,353,889,23,128,575,43,744,412,192,480,445,718,663,677,5,554,487,540,355,57,566,139,869,354,127,90,750,539,777,468,112,767,290,887,898,146,497,837,314,758,244,155,628,759,357,384,877,609,467,401,799,850,780,454,199,308,731,3,494,523,326,334,395,160,617,679,420,303,431,202,276,137,706,453,633,589,574,477,205,827,12,590,318,766,530,821,54,863,735,464,33,599,238,92,391,643,306,559,371,44,228,230,792,323,65,64,389,348,896,292,807,459,263,764,79,545,310,817,455,374,583,582,252,507,68,234,811,46,53,493,376,684,383,622,350,816,70,845,6,839,893,312,773,257,692,832,361,325,504,697,239,69,196,657,367,461,866,63,585,736,84,96,183,89,416,149,87,447,568,16,665,433,241,336,298,595,313,828,475,805,78,27,349,856,215,670,598,535,427,526,488,142,166,47,876,363,331,815,175,25,365,490,140,93,390,372,515,286,700,557,402,632,9,499,222,337,538,629,800,408,122,618,385,304,846,227,479,644,810,103,822,681,338,727,612,26,788,525,591,154,396,900,531,452,194,882,360,895,581,129,418,8,546,153,659,206,650,756,29,695,434,793,569,733,315,594,778,342,450,664,267,56,117,669,502,345,878,880,163,656,22,343,892,841,30,476,211,243,177,604,749,620,608,738,830,240,423,305,284,834,606,282,646,288,687,724,359,419,358,741,522,289,597,675,60,855,369,444,524,505,848,125,645,520,533,593,256,600,747,508,24,624,162,462,463,193,319,195,791,321,124,41,884,147,435,386,115,707,368,874,428,761,438,529,77,432,563,19,798,652,616,229,301,28,580,225,784,833,443,601,169,542,21 291,341,337,193,63,732,130,223,459,527,753,170,510,888,812,225,46,460,605,493,499,532,343,540,267,688,81,324,844,176,516,306,135,370,694,691,377,38,881,234,175,138,506,394,501,152,207,37,117,384,59,623,546,730,735,635,252,467,295,218,538,449,385,403,634,353,278,229,484,826,60,192,416,609,296,479,671,124,219,762,683,597,402,680,48,570,824,434,690,255,227,552,288,832,190,4,75,165,330,659,204,34,270,815,639,368,569,393,804,704,725,89,596,54,618,711,505,110,72,657,258,663,358,299,463,177,398,866,120,673,62,238,783,339,430,108,588,860,49,58,443,829,790,876,526,44,445,556,809,92,817,823,573,737,742,389,900,604,239,576,898,744,65,433,608,669,107,813,66,494,825,615,779,897,102,882,425,490,53,410,668,52,522,14,266,599,734,592,189,554,210,765,19,483,872,319,116,397,262,294,869,251,104,757,166,281,798,197,125,342,359,367,230,834,401,263,198,201,379,539,364,64,502,500,259,167,587,253,864,437,868,495,478,594,643,731,543,763,682,51,76,878,106,603,328,519,456,24,710,787,109,571,305,720,272,27,558,886,515,724,652,119,793,77,640,666,362,457,155,548,795,714,660,224,747,144,171,399,544,111,188,780,318,805,12,381,277,466,348,100,71,468,616,633,315,792,133,520,436,148,36,472,891,256,619,877,523,91,630,209,5,134,161,706,105,150,322,491,323,531,614,441,444,717,347,534,87,606,244,853,400,20,574,748,662,448,566,183,774,750,631,598,788,529,126,632,146,627,82,68,286,191,856,727,206,275,452,221,61,307,871,382,405,301,45,142,78,50,103,884,427,297,127,187,8,16,758,766,196,17,625,157,137,271,213,781,485,528,95,470,340,47,480,469,658,770,839,250,290,391,378,719,850,806,386,289,11,101,810,156,789,536,679,561,465,311,93,145,718,559,420,97,855,692,33,18,371,115,802,557,867,586,701,471,414,535,865,67,30,496,31,249,162,96,894,751,73,428,611,514,153,648,254,893,572,518,140,464,822,435,885,241,512,361,841,590,172,357,23,577,423,261,517,674,508,820,816,205,651,761,136,551,2,181,728,564,700,349,612,689,560,352,408,845,41,583,879,489,80,141,600,163,304,21,759,185,786,373,338,284,575,797,375,487,151,821,390,545,303,646,707,854,837,388,383,220,242,415,589,654,212,670,768,264,887,345,721,380,726,366,676,477,369,300,279,346,849,602,697,280,173,194,334,818,132,775,568,215,693,712,509,431,729,647,555,56,222,601,703,764,595,320,387,801,88,686,195,581,672,131,98,582,325,843,620,273,772,446,226,883,321,880,567,35,760,395,245,179,158,432,326,814,42,421,237,351,407,769,25,833,308,26,746,504,796,678,537,562,236,424,462,331,636,656,118,547,800,453,702,310,521,827,363,344,677,374,214,733,695,713,476,858,831,859,112,274,129,178,1,513,248,333,661,69,638,396,70,154,355,74,454,149,123,591,579,655,524,317,741,749,892,293,482,835,335,785,650,128,842,755,174,418,649,114,645,771,685,200,426,99,578,404,147,778,9,417,584,684,653,90,85,723,862,360,777,233,28,716,580,79,863,642,199,461,553,475,799,22,874,503,644,481,392,819,283,327,488,664,276,698,705,309,211,593,247,164,807,94,246,621,767,626,530,899,282,440,40,202,217,160,811,336,84,139,6,708,525,740,507,406,840,15,268,143,439,776,752,203,473,565,617,828,442,784,533,3,376,287,549,563,422,7,709,665,861,313,836,228,486,450,113,791,738,32,356,10,743,354,794,159,687,870,889,235,803,637,350,739,121,497,122,411,736,419,745,607,39,896,265,292,169,216,413,257,847,624,838,613,180,873,314,168,316,696,857,312,808,29,451,182,412,455,240,83,86,55,585,681,542,851,550,438,13,846,830,260,409,498,722,372,43,492,429,622,447,511,629,365,699,208,541,610,184,458,895,675,232,332,298,782,302,269,875,754,756,474,628,890,243,667,641,848,285,852,57,329,231,715,186,773 775,193,250,846,792,666,745,395,79,701,80,532,579,355,381,230,833,849,38,880,816,356,171,124,337,429,572,852,315,497,613,403,441,672,121,316,274,841,498,652,669,650,18,64,660,394,373,616,647,618,547,867,588,182,12,776,623,566,165,580,133,541,443,52,19,224,530,848,879,309,697,267,68,22,855,107,451,292,739,774,31,199,673,161,635,383,352,391,43,257,379,552,344,333,271,620,821,436,138,304,738,569,582,400,818,300,209,863,412,233,430,503,26,761,71,326,551,790,599,778,603,34,872,154,179,651,508,487,489,118,501,703,469,868,646,740,42,493,681,513,611,284,149,510,832,648,164,888,101,799,168,73,612,370,657,76,482,87,577,854,504,328,791,873,132,882,590,128,674,521,123,382,245,499,524,268,108,169,766,172,262,755,465,789,278,228,35,112,836,421,706,177,724,70,548,476,730,142,507,117,272,715,581,549,878,578,252,831,266,864,526,359,601,206,742,244,217,314,264,219,704,801,291,214,420,378,234,281,363,705,424,401,249,619,522,180,550,59,25,694,645,520,634,643,192,637,678,717,485,203,562,845,533,106,542,605,700,627,319,423,362,367,559,483,325,77,158,93,361,871,103,347,104,653,152,330,823,632,23,626,238,62,793,191,598,684,408,232,88,676,670,795,891,261,892,213,467,205,850,719,772,558,585,502,321,583,100,638,20,163,757,479,235,2,734,773,586,131,877,369,658,711,727,802,602,707,847,263,853,294,702,713,712,839,591,546,174,195,105,8,437,884,66,170,768,127,365,30,749,399,812,289,439,113,387,887,899,769,736,136,241,55,708,691,714,402,557,456,453,306,725,139,463,573,794,760,98,33,189,615,297,336,491,721,360,829,60,732,825,461,610,797,39,545,481,162,350,351,135,824,455,385,239,296,856,743,787,448,231,608,308,109,83,67,411,750,662,631,65,490,574,415,153,6,822,679,826,753,301,764,181,220,900,796,808,809,338,536,687,428,539,656,516,512,472,247,464,342,671,665,242,323,699,406,434,564,689,889,324,147,617,633,440,875,94,201,511,782,838,895,885,307,21,553,99,116,594,332,450,340,529,396,756,509,567,210,95,251,528,293,280,883,419,413,735,40,259,886,636,828,349,335,663,728,287,398,194,680,810,859,514,393,119,731,733,496,57,229,805,817,341,288,282,183,156,176,302,130,690,63,273,515,462,709,211,659,51,783,299,806,668,258,329,354,447,604,216,198,222,720,471,197,236,449,609,786,596,215,571,49,803,896,37,115,561,422,692,500,5,527,716,190,860,544,592,417,317,431,460,677,813,13,237,377,290,327,91,505,741,246,358,311,390,537,4,305,160,686,380,857,814,348,737,767,346,129,876,253,771,409,763,621,442,111,432,218,366,506,375,667,84,827,495,56,628,134,256,47,1,820,185,157,78,146,140,894,649,452,144,486,589,175,122,200,747,837,318,427,748,322,303,102,10,781,29,184,696,474,389,488,255,759,866,576,525,145,893,881,9,371,260,584,565,125,188,61,718,595,226,96,625,695,225,819,137,433,556,372,54,454,14,685,480,36,614,286,45,270,629,531,683,446,212,240,339,655,840,53,69,600,343,11,744,865,275,457,554,698,207,81,120,784,606,48,661,85,221,17,779,16,82,159,729,114,723,535,444,693,484,166,46,3,72,418,568,788,459,519,405,276,58,858,397,416,762,368,41,410,861,353,630,752,320,815,807,92,7,357,898,151,110,143,765,269,835,780,44,364,141,148,494,746,86,435,285,331,445,754,751,492,770,254,204,758,575,593,426,644,178,310,312,248,243,870,534,28,404,384,466,834,869,223,475,478,277,27,785,518,386,597,688,844,50,843,279,90,298,726,345,587,313,607,830,186,167,800,477,682,639,425,851,75,798,710,777,642,202,897,15,641,470,187,654,295,570,842,196,458,126,473,664,388,640,24,523,722,543,208,407,862,32,150,414,74,622,438,811,173,675,560,265,155,283,89,334,890,624,538,468,555,97,540,563,227,804,874,517,376,374,392 449,609,239,781,886,301,834,504,270,505,30,455,836,262,190,328,754,662,457,701,895,230,184,241,545,114,390,828,768,512,870,630,413,795,337,120,318,853,338,343,401,418,238,192,708,429,313,723,678,841,472,669,159,48,234,548,711,459,485,825,617,162,103,115,292,420,778,885,665,434,562,314,83,84,891,236,128,278,483,513,380,34,734,1,799,23,766,767,527,216,737,215,467,68,445,628,715,479,109,3,759,694,319,298,625,344,6,897,822,409,43,709,78,635,581,11,412,882,440,622,299,64,790,124,561,441,803,223,681,222,693,518,163,893,532,549,515,384,804,570,597,205,51,173,576,572,101,792,611,880,699,89,255,122,397,117,302,143,583,730,283,656,857,593,627,736,692,102,596,284,45,213,57,247,326,606,156,145,644,26,743,818,153,739,367,25,274,499,616,42,427,733,729,104,218,607,827,541,273,166,466,878,808,392,789,770,226,892,354,896,682,414,276,61,373,636,395,228,684,264,399,735,217,134,183,664,92,659,820,753,86,756,259,862,821,219,165,631,290,621,823,268,777,295,517,850,877,691,855,257,471,720,182,648,547,749,372,408,728,37,152,604,211,618,189,591,551,465,400,663,31,14,169,569,18,246,615,265,208,772,176,54,888,199,629,443,707,377,207,867,858,495,787,50,750,249,451,198,864,687,872,424,267,502,22,379,29,813,47,362,802,741,371,300,805,543,329,525,641,65,250,447,861,578,601,275,887,358,839,462,884,793,311,573,376,843,69,417,93,240,94,697,612,269,613,35,647,500,819,533,672,575,88,620,261,706,798,765,874,461,801,308,320,396,585,660,704,587,655,700,363,361,297,809,859,824,36,568,282,683,501,322,600,791,279,605,80,306,890,81,221,788,430,696,537,27,95,542,458,698,752,20,105,15,546,725,846,679,586,356,71,17,224,312,353,453,830,382,170,677,796,419,422,194,868,634,779,556,33,614,564,387,845,521,464,676,139,592,675,668,580,817,619,514,196,774,637,39,552,237,381,16,425,452,38,138,794,727,460,579,340,640,463,815,357,304,289,848,873,854,726,366,248,175,507,285,388,394,172,574,666,55,832,232,333,654,19,690,195,456,97,649,193,79,851,448,75,716,494,645,402,244,369,718,174,243,757,866,584,595,98,281,72,334,879,323,231,639,491,633,714,58,439,225,566,437,602,32,349,550,608,179,842,415,251,837,360,784,13,623,368,674,66,87,140,492,769,538,351,480,305,303,309,74,383,531,370,534,127,565,431,763,582,446,144,523,403,497,242,186,374,473,899,149,398,164,516,112,132,468,875,142,91,53,732,96,487,364,831,272,342,113,28,673,178,49,428,849,478,738,826,44,686,626,123,474,703,760,871,432,484,526,59,557,147,197,76,833,386,807,506,876,107,519,423,7,643,365,245,559,653,742,325,168,689,724,695,46,5,508,157,719,40,280,567,856,332,233,783,200,393,286,126,811,481,296,657,775,202,385,131,863,898,865,171,554,800,661,310,24,524,212,188,136,62,60,433,713,680,201,816,710,12,755,330,125,129,670,181,651,324,638,745,410,705,41,375,8,509,761,814,411,73,82,167,488,860,589,540,558,206,339,535,838,667,204,341,852,594,520,671,391,490,121,317,331,421,141,748,210,287,160,782,99,847,511,187,744,180,496,77,203,21,405,209,469,486,776,407,266,135,598,493,563,751,85,161,70,762,137,293,721,599,889,764,106,336,158,786,355,10,335,416,177,498,840,260,119,435,258,560,717,154,56,52,539,111,773,406,130,881,220,4,712,442,590,67,702,544,529,9,155,345,650,185,503,116,780,350,482,894,688,254,476,288,470,758,277,191,256,438,603,110,883,315,571,229,536,642,359,722,321,632,63,151,869,100,646,652,477,740,227,510,806,829,522,588,844,294,426,148,133,747,404,307,658,327,810,252,528,624,90,797,352,553,347,475,235,731,685,118,2,214,150,436,271,555,291,346,444,146,454,253,378,746,812,263,348,450,530,610,316,577,108,785,900,771,489,389,835 676,52,355,780,314,845,590,368,228,819,293,152,596,784,748,252,479,861,35,876,778,553,78,341,6,685,318,829,615,444,576,386,347,708,446,269,399,336,844,567,519,385,66,53,776,315,360,148,504,550,199,889,507,288,273,869,589,704,13,55,25,419,374,146,59,10,451,749,871,709,476,56,184,244,763,275,695,138,664,885,58,445,752,371,156,457,641,358,230,310,179,370,369,670,24,264,474,361,213,458,691,253,580,773,883,49,300,797,637,581,651,96,238,503,61,533,710,505,381,875,339,296,852,210,245,573,462,826,277,437,126,688,455,796,750,488,71,835,180,276,716,696,496,822,857,421,327,893,356,222,439,447,755,684,854,188,856,354,548,882,855,599,240,837,20,896,409,287,493,673,471,636,595,847,284,703,74,366,508,249,16,226,610,436,37,274,362,9,719,435,517,367,472,251,586,500,551,18,514,93,726,547,236,820,542,521,657,618,142,783,543,456,390,405,654,73,183,119,217,466,724,388,123,427,426,163,492,80,655,788,732,441,89,669,644,165,672,185,86,299,144,841,430,795,191,681,661,302,613,606,428,879,489,246,555,169,810,880,1,630,633,101,815,100,202,50,108,14,278,881,397,563,121,626,463,140,538,666,203,420,200,378,693,605,272,753,7,415,168,328,195,824,897,525,754,619,117,432,846,625,265,698,616,510,352,867,266,350,219,92,294,239,166,173,569,523,280,206,739,509,546,785,761,888,652,697,282,535,803,717,623,225,799,894,807,497,181,85,248,151,394,890,271,382,401,207,134,94,544,129,801,128,744,48,139,769,817,735,242,29,530,47,671,475,391,190,311,161,70,701,743,17,340,60,321,262,418,170,159,177,537,98,65,711,353,895,122,738,480,477,495,323,68,643,742,334,465,12,62,836,443,528,648,541,794,759,301,192,593,342,634,198,112,270,578,730,137,363,403,640,582,116,584,227,304,298,830,570,461,853,597,481,900,831,662,865,733,120,268,526,263,558,67,359,828,38,103,400,834,631,63,383,469,827,498,607,564,877,713,114,872,614,612,858,501,64,425,781,324,898,868,337,51,473,15,520,848,43,705,650,77,376,357,364,816,307,373,31,722,598,216,802,218,566,214,186,621,878,290,516,592,260,665,308,258,174,57,628,804,768,609,757,147,646,160,818,150,332,677,884,556,627,700,320,113,106,11,189,527,205,110,745,660,659,690,617,87,813,442,814,838,289,267,317,392,667,502,8,571,411,412,164,333,718,460,891,764,105,741,84,687,765,153,27,483,143,588,723,229,789,823,3,771,565,322,699,19,343,402,407,585,154,313,675,404,250,76,645,680,721,440,706,448,149,209,365,36,539,285,863,806,653,513,821,243,21,859,482,305,790,746,312,536,115,429,171,529,534,75,728,82,786,674,41,468,297,587,28,172,787,5,379,124,81,39,805,604,568,393,774,579,491,424,663,422,792,102,212,408,512,33,145,204,256,175,316,522,79,176,292,155,577,309,591,387,345,892,712,132,518,736,490,406,532,237,295,849,431,583,467,90,782,485,380,434,734,740,208,187,224,133,772,261,348,215,257,231,410,825,235,464,760,601,326,306,635,331,196,157,423,377,104,679,870,2,608,832,283,624,40,178,720,694,416,486,44,396,135,770,286,413,91,683,136,649,127,777,540,330,389,247,130,201,843,494,808,167,344,88,107,438,809,211,45,449,375,233,478,873,470,864,484,255,220,839,234,223,279,811,30,259,554,611,658,668,398,95,372,319,54,594,414,433,620,561,338,758,725,798,559,197,656,450,349,639,254,678,682,524,158,459,686,727,707,511,97,281,603,351,702,303,162,779,602,647,72,545,860,182,560,531,731,131,756,714,32,291,851,42,866,23,384,812,506,600,762,751,452,747,69,850,232,629,499,572,417,4,899,109,874,632,118,194,715,842,800,22,453,125,549,454,515,689,46,642,775,767,325,638,574,221,335,346,241,886,729,557,26,622,833,141,111,737,99,562,887,791,862,487,793,193,840,575,395,329,766,34,692,83,552 94,741,255,17,622,274,237,267,506,70,757,495,575,700,507,302,243,16,852,21,600,259,580,469,751,346,135,106,850,153,660,396,127,273,648,767,290,498,615,47,76,182,707,637,230,193,158,497,124,503,305,84,397,763,788,121,213,152,704,816,858,380,297,483,839,738,422,314,28,667,72,514,447,606,407,459,310,316,15,239,870,379,188,588,561,381,818,639,826,218,617,584,307,624,608,249,263,187,345,485,91,319,35,515,122,701,502,488,830,577,442,596,581,90,867,509,151,424,78,171,347,632,20,329,642,10,557,579,361,574,493,4,784,419,99,133,838,572,568,227,192,612,654,616,37,85,333,51,869,710,863,758,144,416,279,421,795,505,149,79,762,714,631,62,886,112,220,840,49,129,726,299,556,764,175,834,643,357,88,368,898,562,173,210,649,594,735,865,14,400,167,860,116,449,849,295,241,783,57,499,768,492,517,369,384,367,576,474,335,457,326,271,222,811,201,698,374,413,663,376,29,332,706,306,109,521,343,699,873,191,693,611,734,652,686,859,174,891,857,321,650,587,280,148,542,508,573,327,801,625,67,83,119,861,157,468,98,543,882,453,341,531,387,530,787,890,692,797,215,43,727,484,814,50,618,337,242,66,610,282,262,721,473,638,236,18,809,382,444,528,633,36,80,378,293,609,476,348,225,46,585,134,871,161,554,548,439,394,662,478,389,455,344,829,184,156,475,677,223,254,526,33,386,199,162,253,702,418,97,722,578,470,131,203,235,629,658,496,820,854,599,58,872,411,328,694,532,851,208,385,6,537,641,897,431,160,392,73,635,690,894,668,139,325,177,465,304,373,597,853,82,666,105,790,456,480,661,888,354,405,523,451,626,183,159,251,604,195,525,284,190,417,755,360,388,754,812,723,640,19,500,550,679,665,89,68,461,425,774,211,534,555,846,601,217,22,689,205,655,440,443,403,806,760,551,275,61,42,733,95,847,466,136,30,289,71,682,477,353,463,231,363,664,143,719,899,519,257,180,270,467,570,287,749,359,206,200,31,659,752,553,118,281,45,802,825,60,245,538,107,12,264,732,653,823,739,401,592,286,671,656,458,352,130,323,717,623,796,212,547,645,120,533,761,590,744,771,24,265,261,383,185,23,340,836,372,900,471,3,52,269,487,454,298,792,464,512,680,138,128,728,697,695,331,703,511,729,428,560,819,619,278,805,32,772,393,880,123,646,7,196,674,351,228,169,77,879,881,539,406,75,358,300,540,48,258,154,565,339,893,336,408,884,874,436,365,55,313,785,370,110,742,552,724,163,342,657,571,678,198,318,685,234,651,740,292,896,142,238,756,209,716,395,427,821,246,621,355,260,26,150,583,69,39,272,715,9,837,518,591,11,165,628,778,798,822,252,688,605,334,766,301,288,877,44,445,789,753,817,8,864,889,558,564,895,221,141,569,586,426,216,885,779,731,247,189,402,311,219,268,644,486,712,559,813,375,59,696,524,126,607,322,782,705,567,746,38,285,866,620,824,145,441,725,312,793,155,244,841,669,776,146,429,433,582,415,320,504,489,377,747,102,452,815,362,404,687,589,250,430,291,409,748,240,831,13,501,868,794,101,878,827,5,115,887,147,391,462,848,810,856,233,93,765,232,636,432,791,54,720,683,527,132,713,308,546,460,114,226,647,807,769,522,803,108,41,309,446,595,399,750,27,276,613,172,708,513,684,53,178,412,113,479,800,92,828,832,181,410,833,603,743,349,536,450,317,598,168,593,804,277,179,630,366,781,390,549,104,63,164,759,563,634,303,81,248,438,197,777,414,544,711,773,166,780,855,364,770,96,350,775,718,371,207,520,842,86,745,420,56,64,34,516,324,672,875,876,125,675,535,808,170,2,490,737,296,494,103,204,602,25,481,117,224,202,74,883,140,730,437,194,627,435,691,434,1,736,670,862,229,87,214,356,843,256,100,283,294,676,315,545,491,111,448,681,330,266,673,65,398,472,566,709,892,786,40,186,614,137,338,835,482,176,845,423,529,799,510,541,844 471,637,318,789,887,232,846,557,323,469,38,531,812,107,108,406,794,627,425,649,890,255,276,269,586,95,488,817,620,568,855,650,492,783,286,116,361,863,163,396,465,494,285,274,663,507,395,782,726,825,567,583,187,59,195,460,724,437,533,837,602,225,183,212,298,457,784,881,617,308,639,410,171,121,888,280,115,397,525,376,340,85,705,21,824,76,632,747,448,319,756,246,506,13,526,718,755,544,215,33,762,752,374,206,542,391,64,895,744,336,46,758,123,701,556,17,389,885,532,539,370,86,767,240,564,499,802,91,733,210,751,554,103,893,523,604,490,230,831,658,581,114,32,57,545,656,169,735,500,884,572,50,253,87,292,218,82,162,616,651,74,560,865,574,593,624,739,62,668,310,15,214,34,56,427,418,242,211,708,130,680,847,194,787,420,77,235,477,646,98,476,623,775,166,94,621,842,562,347,270,301,872,835,290,806,763,150,891,446,896,678,426,358,16,378,675,470,345,686,284,408,773,267,190,278,721,124,697,709,727,6,731,312,834,785,184,188,543,272,696,849,118,805,275,555,819,866,750,807,179,549,647,266,570,575,798,344,223,704,42,145,679,131,700,198,551,578,481,482,597,18,25,172,603,22,364,654,316,252,799,305,44,883,127,716,438,734,379,302,871,867,459,714,81,736,165,536,245,850,719,877,405,110,540,68,205,102,839,89,432,830,778,451,268,821,592,375,527,641,117,241,423,845,473,601,281,889,331,829,313,874,820,299,445,315,822,120,493,70,208,106,608,517,303,671,80,707,456,841,615,673,642,9,553,360,702,759,764,879,513,664,367,348,421,669,722,754,682,728,519,359,414,407,828,870,852,39,484,382,749,441,422,670,791,366,461,157,317,894,146,291,816,429,659,452,27,36,589,512,672,742,29,31,3,535,720,860,737,466,428,37,92,222,328,353,468,854,458,142,626,779,521,271,177,873,715,757,613,14,561,393,355,769,487,450,614,43,681,743,643,677,823,685,587,35,690,713,132,486,259,479,58,462,248,90,176,809,676,332,595,158,661,435,810,238,324,384,832,880,813,687,413,293,155,514,203,258,474,156,478,729,109,856,343,254,619,45,703,192,373,144,640,262,19,861,417,2,662,576,691,363,354,394,770,136,350,653,858,590,598,126,197,173,341,882,213,306,606,505,550,612,4,304,294,584,508,648,134,326,496,657,152,780,436,100,826,309,761,20,611,295,655,160,200,247,503,625,509,287,510,399,402,368,26,464,388,342,585,99,498,398,732,515,400,175,607,444,431,224,122,334,520,897,88,491,135,580,143,129,538,875,178,201,5,667,216,390,362,833,128,387,12,83,745,141,147,489,853,552,694,815,11,740,600,250,516,674,652,876,288,497,609,52,559,93,186,125,808,447,797,522,868,104,472,501,72,530,385,229,569,644,605,380,265,618,695,711,47,24,404,207,566,10,153,635,851,433,346,818,244,453,357,138,840,424,349,725,804,289,485,209,864,900,843,196,504,753,660,263,23,327,296,239,67,167,48,352,746,534,154,836,683,51,786,307,79,105,712,257,717,337,622,771,320,765,139,356,30,419,801,838,321,7,180,119,528,869,524,518,634,97,322,571,811,645,249,236,862,415,529,610,371,483,71,369,392,411,151,741,193,251,264,792,140,844,579,101,776,282,439,65,228,96,161,297,442,582,793,480,381,84,594,577,638,800,78,220,60,710,219,164,768,633,892,730,75,273,277,748,351,61,231,403,112,454,857,335,233,412,283,558,772,133,69,1,565,40,777,383,189,886,148,8,760,449,689,53,692,416,591,28,55,170,665,113,455,221,684,434,443,898,688,185,475,234,430,795,73,300,329,502,628,202,878,199,547,137,463,693,182,738,409,630,66,63,859,49,723,629,546,706,279,541,827,848,596,573,781,330,260,174,217,796,261,191,666,372,803,226,537,698,149,774,339,548,338,401,311,631,699,168,54,333,227,204,237,588,365,377,325,256,511,111,314,636,790,243,159,495,440,563,181,599,41,814,899,788,386,467,766 167,795,172,186,841,81,458,290,441,6,623,637,648,470,276,289,518,41,851,71,755,102,586,347,821,127,257,255,812,201,743,455,175,346,507,713,217,784,356,65,157,308,669,612,232,235,137,718,273,626,465,56,359,657,666,26,298,68,759,883,866,368,260,415,818,789,546,559,73,485,251,593,353,468,651,366,104,363,90,85,860,202,228,408,778,264,772,678,796,162,720,587,300,376,684,453,515,236,268,329,225,513,4,296,55,750,384,710,815,424,254,758,444,301,864,325,43,700,189,83,419,466,138,266,640,48,645,327,503,404,675,27,725,665,53,320,832,335,792,370,166,372,437,357,18,250,199,114,839,873,836,579,5,200,92,341,571,345,197,77,525,650,842,140,887,97,364,761,209,11,547,111,330,533,267,737,621,219,303,271,898,806,20,482,711,502,611,865,155,318,274,845,340,333,803,321,462,814,15,495,604,686,717,164,658,463,350,733,388,690,360,215,285,692,245,773,398,481,712,253,16,597,721,152,45,649,156,783,857,231,477,642,751,707,682,831,32,876,833,549,828,332,464,7,573,596,698,576,808,406,142,76,89,852,169,687,3,305,895,265,129,664,161,701,762,884,726,811,261,51,564,291,775,105,400,386,448,12,511,461,243,558,671,413,427,2,863,312,394,754,824,14,115,180,521,392,624,205,365,178,823,44,835,163,452,307,311,617,527,529,670,618,379,779,389,324,569,653,401,96,519,1,512,75,173,222,877,297,282,560,729,732,40,82,63,702,591,568,741,807,526,70,856,315,545,599,655,844,446,536,69,634,405,897,509,358,630,208,837,727,879,652,145,418,381,608,476,537,745,786,60,694,131,871,749,756,467,870,352,635,432,541,760,302,125,110,538,168,793,190,184,704,744,355,281,628,714,809,667,64,575,387,440,472,176,151,752,590,672,314,309,457,820,510,112,49,850,319,484,416,520,539,660,647,816,505,154,192,607,46,764,354,194,8,378,57,430,662,592,478,390,508,797,238,493,900,685,128,80,234,557,454,396,542,223,99,373,120,489,776,306,188,191,185,661,829,67,377,813,203,103,229,708,609,843,580,187,668,84,555,802,383,636,153,116,724,425,848,31,412,598,283,565,631,817,705,581,95,422,486,269,183,10,595,805,434,899,627,34,181,118,275,391,322,888,244,473,659,246,58,676,488,516,242,739,572,810,343,606,798,699,72,794,61,574,531,862,211,500,35,30,679,294,170,117,50,861,885,375,561,74,362,247,326,108,98,17,641,158,886,393,633,868,874,397,504,123,198,677,141,9,790,763,644,279,121,746,524,616,337,594,585,139,426,734,263,890,38,403,544,150,498,286,638,736,126,656,550,313,78,288,344,278,42,256,757,94,819,788,380,39,328,491,785,735,804,79,728,681,407,777,492,106,875,122,304,709,747,748,62,881,867,548,566,896,420,239,423,395,287,146,858,625,535,442,367,501,361,497,109,716,474,605,801,781,310,237,826,563,216,589,556,894,768,554,688,124,483,834,460,697,135,447,532,227,663,19,382,782,487,872,174,249,689,449,196,133,620,410,523,695,107,610,730,583,342,614,439,113,629,514,241,543,130,791,24,787,855,780,221,849,800,66,226,893,13,179,731,769,822,854,272,47,601,295,639,369,719,160,603,530,528,280,654,480,691,224,299,240,570,703,696,409,602,87,28,456,588,680,475,577,119,374,740,429,632,402,567,100,54,193,351,551,892,144,753,770,159,643,838,469,582,417,317,552,615,578,91,522,799,258,431,438,171,619,411,316,230,22,37,882,338,443,562,36,436,233,252,674,517,385,494,584,348,722,847,336,738,134,435,891,771,148,147,331,830,323,506,471,23,165,177,459,553,451,880,853,29,774,293,859,248,52,284,534,479,277,292,195,715,93,399,262,450,445,218,889,182,683,220,25,600,673,499,207,88,767,723,846,210,270,59,414,827,212,33,143,214,622,540,428,334,101,339,421,132,433,693,86,213,490,613,496,878,742,149,21,349,136,206,825,259,204,765,706,766,869,371,646,840 757,212,196,344,292,750,90,66,175,440,759,698,6,690,612,134,646,523,464,671,20,307,562,210,549,647,449,241,182,111,1,25,173,51,249,866,145,653,693,571,608,649,399,441,141,132,181,425,131,3,481,663,851,833,432,566,57,246,336,514,369,822,722,342,573,585,14,87,712,349,450,475,352,424,49,198,636,310,587,695,637,488,127,718,360,760,153,18,244,220,62,873,68,781,419,288,707,108,318,772,167,163,390,494,546,705,725,39,99,240,797,340,408,496,365,782,297,98,376,458,784,484,412,296,67,392,5,765,53,309,253,402,892,60,263,655,334,577,499,142,157,557,660,829,598,271,326,581,268,567,222,669,596,622,618,303,838,327,104,478,842,129,607,801,486,682,75,841,305,375,733,383,686,857,477,338,561,274,491,489,719,593,510,520,592,788,347,569,605,798,745,170,282,321,899,41,92,324,348,405,415,22,100,463,837,21,468,58,219,48,30,112,716,862,825,177,118,527,43,214,610,701,795,319,380,81,434,178,191,147,863,17,766,12,27,850,658,345,664,619,381,753,64,616,71,15,4,560,35,407,231,461,601,234,113,329,670,746,600,799,474,207,738,374,859,465,152,384,136,659,735,775,770,206,678,435,796,603,265,55,316,666,26,487,239,480,519,121,226,107,304,672,656,538,876,437,580,251,155,225,373,279,900,114,814,709,439,70,533,224,476,54,164,444,123,720,823,102,858,568,885,517,29,617,130,845,597,204,230,372,7,493,761,726,456,2,755,223,815,628,861,721,294,180,767,752,124,357,137,237,398,110,852,632,621,831,896,201,333,276,256,311,786,828,547,72,80,199,346,300,741,396,332,482,389,495,629,386,185,388,126,470,521,94,208,694,528,827,161,744,742,676,269,46,203,821,884,550,209,397,13,835,730,826,808,216,473,97,217,650,704,609,769,184,221,642,467,661,394,50,16,455,395,430,539,504,281,536,865,414,339,160,777,620,868,428,764,460,559,28,187,24,590,148,855,748,320,553,353,834,312,820,812,572,774,732,82,749,331,165,785,120,158,361,416,794,354,86,501,492,687,117,442,886,400,411,589,535,541,172,537,810,287,506,524,61,604,278,576,200,811,832,800,887,471,197,840,700,595,783,189,257,534,558,878,591,724,11,522,758,743,485,457,860,540,555,233,45,804,532,85,817,358,190,146,168,366,497,889,215,140,626,59,625,654,19,734,193,836,447,657,36,645,526,554,243,227,731,260,847,286,171,135,830,433,710,417,150,803,505,881,893,362,708,854,245,648,213,404,706,697,453,33,895,500,552,431,849,880,689,119,409,368,843,47,551,762,280,69,615,159,805,778,284,586,490,267,32,91,452,166,793,343,363,508,308,644,128,313,413,623,378,874,188,890,848,529,10,651,40,238,74,711,509,429,544,176,377,479,635,443,252,186,512,525,871,103,864,683,264,768,270,792,515,715,138,385,584,317,302,570,179,462,677,254,139,258,306,754,299,751,65,578,8,869,133,624,870,588,853,446,679,846,614,426,763,633,56,228,393,556,174,631,790,218,703,779,44,247,79,295,232,42,330,275,688,194,809,351,89,88,341,802,379,883,325,582,283,195,192,894,445,483,285,675,418,574,242,335,315,211,856,250,602,787,291,101,630,236,371,293,503,125,723,37,684,680,162,641,122,756,451,518,739,599,702,322,23,662,421,565,728,151,454,382,875,272,877,436,370,771,359,34,579,273,813,410,403,898,717,674,391,872,513,891,290,277,438,115,737,83,427,406,747,867,38,807,169,713,583,511,466,780,594,248,298,776,93,229,106,806,727,563,816,897,364,714,78,266,882,639,84,542,154,355,367,328,844,691,543,611,692,387,73,469,459,888,472,262,729,314,627,498,531,665,63,818,301,144,606,420,205,773,109,149,350,668,652,323,685,545,696,422,401,673,502,643,9,819,96,255,507,31,548,116,740,289,183,76,839,337,613,356,259,791,423,824,530,681,448,634,156,638,789,105,516,640,736,143,261,202,667,95,879,699,52,564,235,575,77 571,591,175,784,888,326,822,432,188,484,41,595,769,178,118,275,826,676,449,736,885,150,256,136,622,92,470,814,623,464,831,546,387,727,204,270,231,877,248,415,502,542,223,215,617,386,280,775,654,786,564,680,325,134,144,534,647,385,493,852,613,320,210,83,316,498,706,873,720,283,636,397,40,24,883,142,108,313,561,496,398,8,656,10,828,91,620,671,413,181,684,409,377,43,500,673,804,425,67,32,725,694,337,205,605,462,36,890,728,262,81,750,22,713,562,28,340,875,491,579,472,13,787,93,466,471,734,184,642,100,712,543,348,889,478,664,473,276,842,576,521,107,12,179,608,612,33,792,494,894,567,26,266,68,357,66,258,37,540,700,243,519,878,702,638,733,658,170,631,246,21,138,19,227,411,459,213,62,721,29,774,862,148,799,455,110,165,530,699,152,558,604,753,30,418,525,816,538,278,191,311,861,802,297,859,691,116,886,351,891,590,314,416,133,523,615,346,322,599,159,457,818,368,54,197,633,25,650,719,679,124,652,393,808,742,345,202,560,328,739,848,220,748,308,435,788,849,773,781,131,469,696,257,552,481,778,402,343,763,74,86,616,201,686,352,586,507,467,367,695,18,31,245,555,4,312,760,332,128,735,189,15,874,90,663,456,761,260,154,860,865,544,791,14,840,129,563,102,837,672,881,354,482,437,117,300,9,801,23,342,838,704,339,242,795,670,514,421,783,58,454,460,823,568,537,431,896,224,827,304,864,833,394,580,305,782,153,400,146,230,238,724,516,168,759,96,630,451,821,554,677,553,120,635,381,810,871,737,879,450,692,295,453,569,693,632,674,609,705,545,447,389,334,839,872,858,7,511,255,744,369,405,690,746,244,589,99,448,893,177,349,844,379,588,407,75,221,621,426,688,661,95,42,47,667,683,866,660,443,477,27,34,298,211,261,528,856,509,60,556,709,499,273,119,882,729,754,649,125,581,414,251,850,557,637,653,89,681,767,559,602,780,722,495,193,797,682,61,489,390,406,103,583,318,121,217,764,772,303,535,293,596,358,819,219,430,327,812,884,843,765,284,241,376,510,166,302,483,130,433,756,180,847,309,269,549,1,678,187,306,222,770,355,208,867,350,72,758,601,777,277,237,410,796,330,366,768,832,628,708,155,186,82,513,887,235,173,531,624,610,572,51,285,145,515,401,640,38,566,446,587,160,757,488,147,790,399,745,85,641,363,574,139,132,207,439,634,611,212,659,317,279,233,104,438,518,319,503,190,550,607,857,526,505,310,547,508,383,163,172,403,517,895,336,486,111,573,267,301,597,868,94,69,77,603,158,529,290,805,171,265,76,106,710,140,55,420,824,424,731,800,39,717,606,194,465,749,626,876,291,548,592,185,501,329,333,59,766,504,752,461,855,126,520,475,5,497,356,199,629,669,619,272,225,687,830,643,161,3,378,264,584,35,162,707,834,395,344,809,123,485,232,64,863,388,214,675,785,353,408,249,845,900,820,360,429,811,803,296,112,375,321,371,50,56,63,423,668,533,87,854,645,6,836,203,97,182,618,114,627,259,593,698,281,740,127,263,53,422,741,806,286,48,57,331,480,880,512,452,565,372,299,582,817,732,183,268,853,445,492,594,551,428,45,476,324,294,149,711,115,169,218,751,167,815,639,135,726,287,359,98,174,20,292,289,539,536,715,527,335,49,689,479,644,789,229,71,206,743,105,282,762,532,898,718,156,247,195,869,436,16,200,598,78,697,851,228,137,307,361,468,771,46,80,101,444,109,738,474,164,892,113,17,794,392,648,73,625,396,490,52,84,216,776,384,458,234,651,362,666,899,614,192,382,176,427,779,240,323,315,541,701,88,870,198,577,380,506,662,288,723,442,665,11,65,841,122,685,575,578,730,141,646,798,829,570,657,835,252,364,143,226,793,254,253,703,419,747,347,434,655,79,714,365,463,417,373,157,585,807,44,2,236,70,370,196,716,374,440,338,239,404,151,441,600,825,274,271,391,412,524,250,522,209,846,897,813,341,487,755 358,573,369,787,862,351,846,622,370,616,49,255,867,322,293,433,631,717,321,710,897,377,122,368,340,202,347,854,794,626,882,730,495,840,453,14,467,736,412,331,343,309,212,169,789,520,432,602,741,864,365,713,78,16,261,639,775,608,385,678,507,83,98,178,186,240,816,889,668,557,536,230,159,155,894,345,221,291,466,594,218,144,805,51,701,63,809,793,541,327,739,59,597,118,316,610,561,584,191,50,798,688,396,431,709,151,42,898,849,542,97,605,153,595,493,61,544,879,437,728,165,135,812,216,641,483,835,280,723,337,633,585,4,895,660,448,446,486,674,627,731,305,124,187,645,612,224,808,642,799,727,128,326,225,489,208,303,257,704,770,284,756,743,533,468,759,745,31,634,418,91,356,134,231,295,692,115,281,599,110,469,684,248,648,201,28,342,308,598,54,304,747,715,200,5,729,841,450,372,167,564,885,790,522,579,817,334,893,375,896,766,546,192,17,245,609,463,166,716,419,407,567,75,266,269,681,217,629,843,819,82,804,102,878,860,52,226,587,157,477,734,323,788,379,588,874,888,555,873,374,538,769,203,654,669,663,403,473,494,87,277,576,278,517,37,460,551,350,488,698,69,71,67,680,94,219,441,306,285,782,239,107,892,302,632,516,510,519,289,852,795,514,802,160,525,363,290,335,884,742,821,562,12,636,26,459,126,822,108,359,700,750,421,274,826,417,133,589,456,149,72,515,881,672,726,180,827,506,853,577,891,666,312,647,497,861,40,394,41,161,24,703,615,410,422,35,650,425,833,503,725,607,93,414,158,552,539,806,829,395,803,294,250,210,499,690,735,591,553,754,319,264,349,670,796,724,114,511,341,586,646,241,430,834,389,679,95,233,886,100,176,630,386,780,683,21,1,339,443,753,800,39,152,46,455,777,758,687,693,254,139,64,89,409,502,442,718,275,260,773,837,310,490,228,776,531,818,526,7,714,640,528,838,560,259,752,183,465,535,764,613,847,423,596,181,500,547,123,682,148,325,22,244,537,76,154,820,696,593,592,383,744,621,842,408,113,355,872,810,869,712,482,198,30,382,371,491,247,286,706,485,43,785,236,458,721,103,603,292,600,34,521,79,8,751,481,80,711,360,470,559,313,393,590,20,172,504,880,638,524,130,404,116,179,786,444,298,719,352,720,774,86,550,353,578,454,475,96,104,570,611,265,866,388,300,865,168,832,18,689,439,733,84,131,175,618,772,271,484,235,392,373,451,99,391,601,492,534,142,415,215,472,527,220,65,496,354,653,262,256,402,317,899,15,328,272,378,33,19,332,876,174,188,73,768,121,238,498,857,336,480,132,55,614,147,137,376,859,565,767,856,105,677,722,138,390,705,779,830,558,479,487,23,549,2,45,177,858,209,851,474,887,162,357,428,88,732,287,222,566,505,737,366,127,509,530,762,32,68,662,140,738,62,362,416,875,273,173,748,338,234,348,156,656,476,435,694,671,92,399,60,871,870,877,48,644,807,449,214,27,604,194,77,193,136,57,508,760,740,299,665,781,111,532,461,199,164,707,263,686,324,749,757,452,655,66,440,29,652,755,828,556,101,185,10,575,761,580,543,628,13,268,554,863,413,318,457,811,667,447,658,242,619,163,204,307,548,109,784,267,387,129,823,56,868,301,270,765,146,606,58,190,85,445,229,464,438,791,223,232,189,563,529,420,699,3,251,6,792,253,346,691,675,836,815,44,315,211,513,150,53,424,196,288,213,801,276,197,540,112,702,661,249,119,38,651,141,814,397,195,813,330,47,581,572,574,120,778,643,568,36,205,406,501,9,426,90,825,400,243,855,695,367,624,434,381,746,279,117,296,380,545,143,890,429,436,25,649,571,427,617,258,673,170,207,883,125,657,763,282,771,320,384,824,839,523,344,848,297,512,246,74,635,518,398,685,171,844,81,664,659,184,845,227,697,329,625,364,797,462,206,70,283,252,478,411,361,182,311,583,106,471,314,145,783,831,333,401,582,676,623,405,708,11,620,900,569,637,237,850 662,438,37,776,877,542,761,205,44,536,155,587,709,400,260,61,823,754,404,813,881,87,152,43,584,190,333,799,671,261,789,345,161,666,132,464,72,874,495,399,488,522,105,91,597,146,88,714,518,716,461,796,483,296,126,663,524,306,326,840,567,427,214,9,291,443,583,864,808,337,595,248,3,22,873,31,198,103,592,696,425,32,618,90,774,167,655,553,327,33,527,608,148,263,353,560,815,196,2,144,669,544,247,308,735,451,97,887,742,215,267,651,17,690,508,174,283,851,358,692,528,35,804,6,276,391,634,449,450,74,593,503,649,885,437,675,378,471,822,367,446,230,112,490,718,481,7,849,502,890,591,145,325,180,525,4,644,27,381,781,629,491,872,802,638,834,489,363,540,202,160,122,138,626,289,582,123,13,698,18,809,855,142,779,376,185,130,520,736,243,620,603,673,1,777,350,746,411,150,55,439,826,704,406,880,577,186,870,109,886,448,125,435,389,665,460,100,191,432,65,499,820,415,25,86,445,42,492,766,657,454,535,420,773,705,516,255,572,342,720,812,493,624,413,203,743,819,747,764,171,285,770,226,513,312,694,526,635,762,258,114,440,408,589,509,599,302,349,118,795,137,183,307,452,92,140,798,344,38,613,39,107,860,162,510,472,730,99,21,811,833,654,861,64,884,177,487,36,836,612,869,284,825,254,228,578,5,707,45,110,817,545,98,209,722,711,606,233,853,83,689,504,792,731,426,623,895,129,806,405,835,800,559,767,372,713,187,165,293,269,434,827,511,51,785,153,444,382,758,334,700,322,428,699,295,865,898,693,867,268,748,120,580,676,667,436,505,421,610,648,573,227,151,821,858,843,84,515,52,661,252,259,637,682,73,771,53,633,879,220,390,846,219,485,383,242,538,579,213,725,519,290,235,270,782,650,854,468,462,447,176,47,374,77,141,601,837,484,76,469,622,356,341,95,878,680,757,640,370,656,501,124,892,658,780,719,319,615,740,430,419,697,688,281,568,839,569,57,551,563,195,237,672,550,212,292,674,847,359,365,614,473,234,831,275,500,173,786,882,871,828,82,134,628,423,201,486,379,188,422,710,271,801,210,384,417,54,594,239,249,299,845,402,549,852,199,364,830,576,807,179,62,414,768,564,265,824,790,678,788,246,279,29,684,888,388,34,380,733,737,539,301,357,26,317,158,543,23,765,352,387,257,751,523,332,723,477,729,272,677,506,418,104,78,135,330,681,670,163,749,143,60,56,361,321,724,324,274,373,588,787,894,517,565,476,331,530,323,121,328,555,412,891,664,395,178,455,416,546,616,842,46,16,351,514,85,643,206,750,354,93,343,192,598,168,40,217,753,175,775,783,250,659,642,94,297,803,625,862,401,619,470,441,340,631,480,81,703,453,695,300,838,231,512,371,19,497,211,166,701,641,653,79,113,715,876,532,407,75,397,347,646,223,287,732,816,208,245,755,59,398,48,63,866,280,50,602,712,355,218,262,797,900,784,548,313,863,868,316,320,533,310,496,170,11,189,585,507,586,101,844,621,68,856,108,251,339,424,14,431,128,571,537,256,632,111,115,197,475,605,727,309,286,15,596,396,875,474,311,394,687,236,590,829,752,133,429,805,554,369,531,726,346,136,561,127,119,147,685,89,116,102,679,181,760,630,305,636,238,224,204,106,24,617,264,647,348,581,482,154,131,769,232,556,744,459,12,457,791,49,558,706,303,896,739,304,225,70,897,465,66,241,759,164,841,810,69,58,139,377,318,728,71,207,410,222,360,686,607,157,889,182,149,778,282,498,253,552,386,240,194,294,392,832,702,393,221,639,159,814,899,463,288,244,184,335,734,627,273,278,541,756,20,857,298,574,668,611,529,570,645,409,708,67,229,818,366,604,521,534,763,28,721,717,772,458,683,883,117,609,156,193,738,338,466,741,375,660,478,277,547,80,652,314,336,557,362,30,566,850,10,41,96,8,691,216,793,266,479,456,169,172,329,575,600,859,385,562,200,433,442,467,368,494,848,893,794,315,403,745 357,623,218,30,465,370,64,141,391,102,827,687,197,642,496,241,397,77,834,136,146,227,701,358,793,423,310,1,588,85,247,184,144,34,474,865,201,589,542,255,304,408,711,665,38,162,164,537,61,166,431,143,703,850,728,156,46,88,677,810,818,668,544,514,841,805,165,45,178,452,219,618,483,602,71,364,390,372,211,256,863,438,22,714,538,607,508,318,691,229,382,828,176,702,641,276,453,128,375,673,16,250,157,414,120,817,689,83,491,368,613,571,584,243,825,678,115,194,216,110,617,629,23,363,400,137,224,597,204,459,437,93,882,68,56,355,782,461,605,191,53,546,683,696,134,147,345,70,742,708,686,770,265,470,284,429,792,448,49,59,775,393,659,309,857,142,109,874,121,177,783,278,610,800,347,620,735,324,239,490,894,628,290,334,753,779,616,856,199,651,421,655,151,435,895,105,72,732,169,576,580,170,331,281,635,106,485,163,336,80,96,154,487,868,506,570,297,585,404,270,213,516,837,308,231,352,365,569,637,19,809,279,839,223,273,890,343,829,860,486,603,579,117,258,360,139,174,455,403,493,113,57,339,737,60,450,248,497,869,632,346,458,468,553,873,855,541,773,186,135,794,646,859,37,676,464,510,261,530,118,337,762,112,554,237,131,815,238,401,307,520,203,104,441,601,495,627,289,4,36,512,108,900,74,764,492,502,208,705,446,442,277,305,796,62,389,752,466,540,378,814,150,55,189,43,551,700,275,6,524,179,532,342,236,212,245,802,460,862,849,821,175,785,314,566,812,385,801,79,371,47,388,767,888,621,424,731,14,519,626,763,624,411,625,344,315,185,329,581,631,323,684,210,772,419,526,698,835,326,454,287,575,692,17,180,230,694,482,286,527,457,572,663,99,190,842,880,784,545,35,205,755,706,795,354,12,484,302,513,434,606,679,872,443,155,225,664,430,578,161,133,528,647,716,574,394,8,222,847,69,667,292,168,171,611,52,704,565,481,188,215,84,745,123,788,899,518,407,101,563,498,777,582,594,587,422,51,196,428,592,550,31,138,11,682,871,195,15,539,26,149,187,747,845,803,612,348,695,335,366,717,719,295,333,298,447,661,718,311,325,819,398,769,864,586,590,831,153,449,511,226,249,198,463,889,564,897,119,111,301,473,396,562,619,771,383,462,377,420,116,387,789,489,272,547,425,715,559,833,662,471,356,412,246,721,86,881,5,811,65,269,376,557,406,359,63,707,883,367,710,167,321,214,699,206,268,173,445,536,866,670,750,830,878,733,353,300,152,729,433,262,760,130,870,341,369,680,807,846,427,114,660,327,776,418,477,893,127,27,671,158,774,633,415,826,374,577,100,181,42,3,697,145,40,432,669,132,573,417,399,202,274,823,634,884,877,361,291,746,48,652,21,436,858,200,558,501,734,797,183,836,768,472,666,875,535,29,791,681,232,478,749,832,630,476,7,499,515,228,244,743,410,712,654,693,267,92,622,744,220,786,90,738,294,820,504,78,598,853,799,653,379,736,709,413,838,221,103,604,568,790,20,548,643,402,505,500,317,439,252,656,24,259,688,340,614,521,766,148,282,126,271,804,330,891,73,556,781,636,107,898,806,140,2,879,217,350,351,657,751,727,552,66,757,525,595,260,824,9,644,531,638,18,822,50,713,488,58,467,395,844,756,615,730,328,234,362,207,741,517,725,235,240,648,233,854,469,843,33,257,475,193,280,778,10,867,780,322,754,861,722,600,674,494,787,263,560,306,373,851,97,266,549,534,852,209,650,13,264,332,724,480,759,440,75,288,591,28,543,320,740,720,658,426,885,808,599,409,182,690,758,509,349,98,380,798,125,761,640,254,293,253,567,39,533,848,896,122,596,503,739,386,82,523,726,25,639,159,41,723,32,392,384,95,76,192,876,87,672,381,312,765,479,507,416,94,816,303,886,91,124,338,67,840,81,316,160,251,319,593,522,609,242,405,608,283,555,748,313,285,649,456,645,892,451,44,299,561,89,172,685,444,54,887,583,129,813,296,675,529 294,546,113,73,452,589,90,66,362,239,772,466,461,824,679,92,250,248,766,331,527,261,502,390,621,540,61,149,825,18,523,199,6,187,605,818,145,474,787,115,129,203,590,512,233,19,28,323,10,346,168,450,608,809,731,416,84,173,528,688,745,545,394,366,768,632,224,227,352,709,49,381,341,572,333,335,516,108,119,586,838,439,158,660,377,535,778,451,729,76,356,711,110,760,430,60,303,20,215,630,51,74,64,670,429,641,580,438,783,581,657,391,533,81,782,673,208,244,21,426,432,614,176,185,464,31,380,780,96,567,255,54,876,383,138,180,724,741,443,23,171,716,721,820,308,4,264,384,832,612,826,786,355,602,571,301,894,483,42,344,889,672,539,402,828,514,32,870,7,214,789,392,684,891,95,851,569,288,79,324,892,488,287,151,548,652,691,798,134,570,284,793,24,367,897,130,82,619,53,365,804,319,268,573,541,175,669,334,98,401,170,125,299,871,459,487,121,322,453,316,193,338,700,273,78,251,397,484,869,204,854,456,702,560,596,874,363,855,821,272,481,790,69,399,314,421,468,240,719,666,1,337,202,792,56,263,361,791,827,662,472,282,655,329,815,844,485,654,27,370,771,658,796,26,687,186,389,221,503,70,99,752,359,696,40,140,678,269,270,304,431,296,471,493,582,671,388,290,283,15,425,178,900,50,661,770,413,150,634,249,285,192,101,765,52,242,578,498,518,343,732,211,328,534,97,537,622,410,112,761,448,349,437,585,427,470,668,258,845,831,726,449,836,326,400,706,267,754,63,135,103,281,822,888,378,436,722,58,463,497,886,501,373,495,132,201,62,111,393,859,318,509,16,642,317,376,704,858,117,229,482,306,486,104,34,617,542,500,357,423,320,407,593,271,454,803,883,600,418,165,315,718,781,784,398,86,354,153,740,218,690,555,830,490,191,195,536,222,643,386,311,245,810,694,475,172,142,88,835,297,849,408,609,278,566,286,811,321,265,339,29,184,550,3,880,898,302,293,342,521,252,685,428,848,526,403,55,385,701,575,800,47,231,188,797,799,25,206,455,414,310,83,624,794,705,743,647,473,467,644,520,597,167,107,549,579,651,623,405,551,682,440,588,877,462,606,867,353,213,375,382,38,124,257,875,305,899,336,147,295,476,639,374,576,674,663,348,522,372,434,659,823,728,198,511,276,561,395,751,708,358,499,736,156,843,256,868,157,753,137,496,492,371,220,179,77,861,857,564,562,13,114,127,749,33,615,300,313,607,865,627,710,841,853,631,109,163,350,720,618,406,592,489,864,100,517,491,686,785,230,159,591,152,807,616,254,893,155,120,829,91,847,557,212,795,237,404,161,39,223,146,744,22,162,210,543,274,763,351,695,174,45,775,613,879,840,340,559,525,260,598,241,513,833,30,479,737,610,755,196,764,862,360,480,878,565,46,748,650,531,415,872,834,788,292,154,226,253,85,243,529,291,664,515,698,219,9,505,544,11,611,144,689,595,707,636,368,563,839,757,860,232,603,774,228,814,460,48,816,665,645,160,554,506,530,638,583,238,347,105,625,128,189,758,131,458,558,697,412,169,93,538,842,177,881,35,445,801,646,2,896,742,71,262,850,216,629,259,856,667,747,532,106,779,379,446,309,739,87,675,637,424,67,693,166,494,676,17,246,547,813,681,508,887,182,298,102,209,519,235,759,275,41,444,75,808,417,802,279,164,717,43,236,699,205,852,776,94,733,777,626,746,599,649,713,141,422,123,469,750,183,89,628,574,873,190,756,65,325,277,633,656,725,247,80,57,653,116,730,148,680,819,837,396,882,762,409,703,8,640,692,524,577,139,594,734,36,885,419,133,126,207,411,234,735,817,895,369,457,769,635,181,136,584,805,225,727,14,197,507,200,345,307,59,68,12,846,552,604,712,327,587,280,738,683,122,648,510,866,118,5,289,266,767,194,364,387,143,620,504,477,556,44,330,863,478,447,553,217,601,435,332,812,884,714,312,433,806,37,442,723,715,72,890,420,465,677,568,441,773 159,425,373,379,317,672,431,396,494,564,556,12,790,859,780,319,31,535,549,519,832,536,160,567,190,589,30,602,894,342,813,548,228,662,759,362,459,65,863,155,84,17,398,277,713,268,280,76,327,719,4,671,221,465,725,682,521,573,304,276,550,143,137,322,517,209,588,629,472,851,71,85,324,518,724,504,569,93,183,774,578,464,639,474,144,306,887,698,776,288,445,179,444,721,134,79,20,310,271,403,441,194,279,826,723,148,308,794,891,798,540,153,488,68,645,511,563,467,75,756,34,559,528,259,626,208,701,830,336,699,166,300,427,753,553,32,604,858,89,199,631,812,694,809,532,130,386,663,856,196,875,737,486,673,749,331,886,576,434,691,881,855,104,328,595,740,332,603,161,484,709,614,678,873,26,896,242,453,74,247,606,69,400,43,118,302,763,483,192,257,57,853,156,404,609,554,409,402,278,195,880,646,351,791,39,593,792,625,158,758,610,490,45,624,169,393,301,56,592,586,287,7,198,443,226,350,508,385,900,679,742,738,211,836,844,468,387,818,545,2,187,838,366,539,477,806,801,8,879,803,202,696,168,823,458,113,476,866,432,534,613,224,727,94,312,669,460,408,256,580,650,524,412,383,615,50,36,289,523,372,141,666,732,793,112,382,262,574,337,401,255,407,715,527,129,804,22,520,743,333,216,551,664,420,320,861,375,368,487,241,61,365,231,689,376,40,46,617,131,361,249,423,775,778,541,263,201,692,533,874,773,59,416,768,695,762,284,232,557,651,293,612,835,566,3,371,309,642,354,123,406,338,729,705,37,163,210,456,265,330,899,345,205,54,18,339,292,152,150,898,285,240,116,251,151,80,641,800,238,51,764,52,78,572,274,816,295,281,442,229,122,24,482,710,788,530,577,171,348,526,659,452,779,608,252,515,98,286,870,16,687,315,461,616,542,254,172,14,711,769,716,29,867,656,119,9,583,99,544,658,885,684,787,440,136,647,814,41,6,718,140,596,81,234,841,712,121,264,717,233,128,347,42,890,313,298,417,447,854,547,847,451,634,570,839,325,162,677,142,752,439,374,394,307,463,828,824,86,599,843,95,239,175,77,745,741,571,418,529,797,244,215,102,600,125,649,777,501,1,25,704,165,261,15,435,28,761,734,370,213,355,807,225,188,258,825,380,730,132,733,845,739,862,415,500,303,266,245,204,746,353,591,884,197,857,683,582,635,457,436,648,681,138,147,135,450,892,428,772,33,200,250,410,653,106,786,594,329,513,714,193,170,784,497,220,107,120,728,693,686,507,260,805,466,19,637,164,235,321,13,429,481,248,697,811,108,654,509,561,864,411,810,290,109,668,253,270,531,297,560,627,675,139,543,90,343,426,869,236,852,275,38,437,584,455,377,395,820,72,726,514,703,473,618,103,359,871,390,652,299,503,878,388,176,685,191,448,446,525,822,246,889,688,842,5,621,64,27,126,499,115,364,568,47,707,469,212,269,82,88,174,496,305,834,218,781,620,114,670,478,895,124,146,789,219,619,640,367,883,751,206,636,491,10,720,676,522,413,399,294,565,579,389,817,105,173,638,414,747,316,363,795,771,272,454,323,91,796,667,214,630,597,230,708,438,422,802,237,893,510,766,223,506,750,73,314,607,495,502,690,755,157,489,334,605,23,757,318,66,744,581,538,335,897,154,356,49,492,58,70,760,282,185,101,92,381,479,421,644,340,799,111,480,283,643,562,748,117,207,349,391,833,133,765,177,182,344,203,657,326,623,63,702,475,706,512,731,485,369,311,243,785,537,11,433,67,585,598,850,291,424,829,876,110,449,655,97,872,149,180,267,587,736,558,782,601,184,877,55,217,83,178,296,660,840,611,575,674,273,848,397,35,384,622,819,680,665,127,661,60,552,471,100,346,357,87,462,808,505,837,430,189,62,860,815,341,222,767,493,546,167,352,722,516,632,358,735,392,846,44,470,405,145,419,888,700,48,186,181,827,96,360,849,590,868,628,555,865,378,783,754,831,498,633,21,770,227,821,53,882 835,188,205,768,740,692,586,247,66,622,300,717,227,283,325,193,862,792,165,848,497,264,378,72,498,449,651,711,21,349,251,185,367,408,38,619,172,870,375,715,753,760,143,237,399,307,306,684,514,291,659,818,771,467,45,705,382,380,258,658,191,739,625,164,194,436,268,675,860,120,743,434,174,117,627,77,465,360,780,712,216,270,454,345,671,568,40,133,9,249,235,794,183,400,396,611,871,316,219,488,591,490,561,275,725,547,429,609,60,80,560,541,124,800,122,470,392,647,621,654,774,138,801,218,59,655,208,457,331,67,494,698,755,670,496,820,90,330,756,415,369,229,238,557,785,617,195,836,1,825,2,176,597,361,579,152,455,96,377,750,481,53,834,881,236,827,416,391,676,427,259,296,318,528,615,50,319,158,802,324,421,822,478,823,495,506,28,273,832,626,819,8,706,134,815,253,580,210,469,267,95,463,435,379,894,303,179,635,297,650,263,202,745,484,846,220,186,493,114,137,744,857,578,215,439,266,260,248,30,486,573,126,544,224,145,512,599,32,232,808,686,461,464,623,109,289,344,804,107,129,501,737,633,12,353,616,720,531,441,559,321,354,549,558,642,160,125,199,290,826,262,479,365,533,277,450,879,691,62,443,323,198,530,153,572,687,535,118,130,550,674,797,847,313,897,140,644,161,652,620,763,395,831,327,567,468,204,472,170,213,803,328,241,58,601,841,787,49,893,438,844,700,417,713,381,828,856,127,723,104,384,778,764,775,466,217,446,245,359,147,704,852,6,91,858,374,293,79,634,398,746,221,571,239,581,892,900,640,722,201,10,150,816,843,789,287,409,424,510,35,798,286,487,636,796,809,211,47,212,679,82,480,614,520,305,736,261,829,708,629,734,851,98,255,231,412,639,517,163,732,180,587,302,476,876,600,817,340,27,731,357,308,351,31,269,790,719,752,71,181,254,548,46,63,840,761,699,795,562,662,17,84,887,777,884,707,355,645,786,151,492,386,672,402,534,393,515,433,521,814,334,566,838,228,604,681,536,883,116,97,552,445,240,791,39,503,526,554,839,833,869,203,168,799,196,64,459,509,413,68,661,638,730,605,444,42,222,265,519,88,574,890,690,688,779,22,406,868,749,878,159,338,701,811,613,584,326,372,781,874,618,276,282,854,782,358,101,55,867,718,52,403,108,146,112,178,366,299,873,14,225,507,85,762,182,364,284,595,543,742,656,69,516,475,563,460,5,423,94,864,452,209,167,575,646,726,482,200,677,141,885,898,51,332,759,425,776,272,43,529,729,337,606,793,695,376,419,702,735,805,689,110,315,527,48,473,309,368,546,184,246,370,583,553,121,387,234,477,274,773,630,442,727,682,485,206,849,16,738,214,788,668,697,70,728,555,405,155,585,373,105,590,565,175,667,285,25,169,73,821,278,4,103,342,230,896,462,685,298,244,716,3,304,136,837,657,418,594,733,250,500,113,100,845,19,119,669,474,637,502,539,577,866,192,765,11,865,895,132,593,78,703,767,166,311,280,696,401,13,74,709,508,339,882,54,448,643,226,102,336,56,537,317,7,603,504,20,513,432,394,556,256,343,312,665,588,853,18,33,524,678,92,748,724,453,414,428,680,23,139,41,872,456,135,810,173,87,207,653,44,34,352,569,362,489,714,407,589,648,26,281,106,252,295,663,812,523,294,612,410,123,861,346,540,766,673,83,694,754,347,598,751,156,830,660,356,29,426,899,389,310,65,877,233,891,741,131,420,37,404,257,769,89,545,505,148,525,602,784,551,807,187,390,824,431,592,532,397,15,242,522,279,149,886,806,81,610,61,458,888,875,115,383,292,157,93,747,511,596,649,770,850,189,641,162,177,631,632,350,491,320,721,783,243,197,570,576,693,388,607,757,75,855,582,666,664,385,859,76,538,483,447,710,128,471,813,430,154,411,271,628,422,333,190,288,772,335,171,86,889,99,301,451,111,518,322,880,371,758,363,499,142,223,341,36,842,608,440,329,314,24,437,348,542,863,683,624,144,564,57 509,312,226,131,140,716,5,86,311,354,823,510,144,832,738,155,293,345,657,446,110,358,551,366,534,671,229,122,614,66,151,98,97,79,535,863,214,321,810,341,338,378,554,513,185,89,145,235,24,83,271,532,767,853,689,514,43,252,401,468,578,707,599,425,727,587,54,19,484,640,189,414,435,595,28,323,642,225,315,653,779,568,129,761,209,701,560,166,572,208,138,814,128,836,376,103,360,71,327,763,53,42,261,686,488,643,703,58,505,497,788,238,574,203,616,789,296,14,170,458,596,646,224,302,259,205,117,834,29,543,158,221,888,34,228,324,573,755,288,36,180,742,778,870,441,84,385,416,661,342,623,809,542,696,638,369,891,504,73,379,887,464,340,565,683,561,16,878,95,352,829,481,768,895,277,730,602,364,199,492,807,325,485,217,544,766,629,706,299,724,478,563,63,436,899,96,1,454,242,428,739,27,64,604,567,44,682,18,165,33,91,171,503,882,634,265,149,437,204,331,415,363,752,390,287,118,517,255,644,148,881,184,731,195,268,874,570,688,784,310,245,833,8,559,194,136,105,275,361,647,94,371,442,597,106,164,549,820,681,791,569,174,760,254,847,708,286,540,101,475,819,781,806,112,765,304,506,452,460,9,250,792,30,713,70,319,522,247,295,39,176,479,494,610,673,666,421,347,125,65,188,279,900,108,782,804,523,17,675,239,212,55,152,698,10,418,677,300,633,550,843,383,92,598,126,722,349,373,82,695,75,249,620,651,518,134,747,227,860,795,830,547,670,314,477,774,135,615,7,146,215,157,875,802,483,564,793,74,175,353,719,427,592,635,253,87,22,107,257,720,548,445,183,420,160,241,759,718,193,201,333,357,396,51,162,660,605,684,37,624,555,348,461,154,362,850,893,516,292,244,116,827,825,855,575,120,220,72,588,406,776,639,822,398,267,410,307,404,631,222,109,301,737,655,273,233,163,246,877,344,729,332,618,443,711,318,845,289,282,172,56,26,462,52,884,848,219,508,377,704,263,801,594,813,700,612,13,500,619,329,840,69,240,190,735,797,213,67,236,343,429,150,583,869,571,705,692,463,591,489,408,740,77,322,611,305,725,386,566,447,771,585,693,892,258,451,879,465,320,476,334,186,339,270,880,444,852,57,303,472,649,658,480,733,400,697,328,278,562,487,419,865,669,264,294,218,393,521,846,530,197,627,430,382,837,46,824,133,842,269,613,230,557,440,422,187,654,783,512,662,169,181,192,844,223,685,423,202,762,757,769,786,687,794,775,139,389,309,668,728,581,496,59,889,274,617,391,800,862,399,6,590,335,866,317,455,854,280,23,817,179,872,717,153,777,456,306,11,76,284,90,826,102,251,407,411,392,520,132,659,381,161,864,405,890,856,537,207,546,100,424,41,672,723,206,603,558,526,714,368,606,674,297,527,744,667,40,851,745,470,625,710,859,773,417,49,290,426,78,351,519,276,652,394,552,231,62,266,637,142,694,2,359,211,816,439,413,664,785,849,787,482,749,796,403,839,580,15,630,645,450,156,676,501,459,743,736,104,356,47,528,177,48,650,114,608,438,803,434,45,3,531,868,355,885,178,285,622,457,50,898,665,243,196,734,395,680,88,732,507,533,709,216,790,577,372,248,751,119,628,579,499,31,746,20,498,753,21,471,380,835,641,600,876,431,493,167,35,541,316,770,448,85,350,124,867,433,873,298,346,798,113,127,412,198,861,702,291,805,754,715,699,741,691,818,68,384,336,313,756,159,147,626,721,883,130,838,60,538,502,337,678,812,283,200,115,780,99,593,123,799,841,808,536,896,621,589,432,143,772,409,260,636,191,586,609,80,886,556,370,326,367,469,25,726,679,897,449,272,811,397,387,281,656,821,32,828,81,168,486,256,330,495,4,12,137,748,529,524,758,525,648,210,690,750,262,632,173,858,141,61,491,111,712,182,576,388,237,365,584,490,663,234,374,871,539,582,545,453,607,553,232,815,857,467,308,601,831,121,402,515,764,93,894,375,38,511,474,466,473 774,405,88,584,783,542,431,108,55,391,537,801,115,283,266,122,845,586,506,711,361,107,579,27,733,368,596,456,58,187,186,77,245,190,22,805,36,880,307,614,690,745,354,429,154,194,173,750,324,174,687,630,797,706,196,463,189,155,485,831,534,772,647,235,526,704,164,483,738,42,665,597,221,203,469,38,366,409,668,505,593,241,197,467,747,599,31,116,160,145,296,849,54,464,598,591,851,180,215,563,367,465,393,169,478,753,553,452,63,8,564,674,202,730,451,545,177,581,544,384,803,227,590,204,73,480,137,424,279,40,585,491,859,495,234,779,380,225,830,352,129,201,310,559,594,501,139,654,158,878,114,326,436,294,394,178,514,80,179,500,528,6,874,820,624,642,291,697,522,237,420,150,338,582,595,68,541,81,731,359,818,856,341,795,700,657,92,641,740,681,787,134,600,140,887,71,434,471,304,410,89,382,482,206,896,157,133,551,306,488,84,37,741,723,824,407,167,606,170,24,605,854,768,118,317,312,182,426,109,217,652,60,762,123,53,791,492,328,589,815,770,414,344,449,121,131,233,807,94,87,339,490,561,211,142,692,562,419,775,570,209,466,444,675,827,554,263,484,191,661,440,521,678,287,357,515,861,547,117,325,301,350,399,149,523,468,765,11,151,567,748,640,679,271,897,100,742,49,379,425,813,175,894,135,686,363,238,435,316,286,812,323,223,288,441,816,842,93,883,376,870,493,218,481,156,804,886,10,498,59,292,821,659,587,254,159,649,330,651,472,794,718,219,18,858,608,371,377,494,462,540,284,626,685,691,881,899,408,782,412,183,320,755,847,758,297,346,459,650,69,705,499,401,809,810,835,308,360,207,721,1,610,760,276,181,536,437,781,696,627,715,862,293,51,29,639,808,735,300,497,95,670,369,611,829,340,836,362,21,727,375,453,694,35,74,667,826,743,111,19,104,636,96,216,860,763,432,710,728,396,86,2,784,603,872,438,413,732,806,23,439,246,798,280,583,825,628,417,249,785,454,684,841,200,622,607,351,786,32,268,460,222,34,571,119,773,403,302,855,634,746,79,415,853,509,98,285,682,282,4,788,716,720,557,272,70,309,530,365,3,736,865,778,800,837,128,578,739,756,863,13,274,546,817,832,662,780,244,612,814,574,163,390,846,867,229,153,41,833,470,14,535,46,56,248,265,623,395,888,168,335,381,62,656,257,210,672,319,677,529,457,91,568,461,507,220,132,777,17,875,305,199,48,602,525,531,270,290,617,552,879,898,353,724,802,473,702,52,208,427,575,613,477,857,648,273,635,790,838,776,566,255,261,609,45,508,734,165,327,224,78,474,632,621,386,355,406,345,176,555,374,496,620,418,503,433,725,72,767,99,658,616,764,256,850,796,321,106,749,166,295,385,487,569,549,329,20,423,314,695,633,198,172,524,653,892,247,744,411,44,676,144,518,162,834,422,532,645,688,105,699,143,267,876,192,30,504,664,752,430,712,442,890,147,839,33,713,891,475,709,120,644,819,259,311,527,476,275,47,97,843,230,347,884,12,402,576,240,126,313,214,269,298,112,625,588,67,631,195,421,445,83,489,253,840,388,869,258,193,372,852,383,601,455,793,264,277,693,82,416,262,844,213,278,792,331,5,479,400,141,39,516,332,619,299,811,333,428,638,7,556,315,358,322,577,680,550,239,761,519,242,766,303,707,722,799,103,789,511,226,486,671,127,882,373,655,236,404,900,726,458,85,873,185,895,719,252,349,16,717,43,729,113,510,683,50,520,364,660,448,864,136,539,828,212,580,513,171,25,251,618,337,188,866,877,392,669,28,336,885,893,232,250,76,57,398,637,543,703,517,708,771,289,446,124,565,848,389,548,397,592,698,604,228,281,343,573,572,146,757,538,64,823,450,512,560,759,689,260,378,348,646,754,75,342,643,701,125,751,65,502,334,130,533,61,663,110,66,9,889,152,387,356,101,443,161,868,615,673,184,629,231,243,737,26,666,447,370,148,90,205,318,138,769,871,558,822,15,714,102 184,650,401,703,830,310,823,627,470,529,120,173,871,464,371,445,419,596,476,558,900,408,139,466,358,220,247,819,864,593,886,758,440,832,605,26,502,586,509,212,201,171,321,240,796,485,412,503,689,867,268,604,20,49,441,559,775,588,448,665,637,25,46,251,325,273,824,874,481,698,367,217,223,272,894,443,231,274,296,524,370,189,790,87,643,42,863,833,719,341,771,12,623,178,332,492,344,557,234,51,753,639,319,522,642,159,48,897,882,671,98,569,261,406,658,74,528,860,316,691,68,254,736,252,740,354,855,350,717,469,592,437,5,895,617,262,606,587,577,573,734,423,235,250,501,478,275,723,798,746,834,270,285,279,462,276,433,364,681,712,404,831,631,318,618,669,714,102,490,388,192,399,214,334,190,820,156,342,416,115,602,564,224,450,199,32,532,432,390,27,150,840,600,287,15,748,816,549,311,195,733,880,791,566,328,822,456,885,365,896,787,581,90,64,99,670,486,135,781,487,280,357,73,312,230,705,266,682,884,808,123,839,108,888,881,103,168,757,300,313,686,410,776,317,660,877,891,389,890,511,444,715,127,793,654,589,305,530,580,85,320,575,293,451,40,633,645,459,465,567,163,86,132,614,166,182,233,207,411,773,228,216,892,435,537,398,518,591,351,837,768,352,718,202,260,496,222,407,865,657,769,536,21,615,22,516,180,806,188,403,571,754,426,458,784,232,56,699,211,142,8,397,876,619,710,71,759,597,817,724,893,544,204,562,483,870,53,427,111,330,6,561,780,500,242,66,666,601,792,460,609,640,130,608,122,298,255,752,795,473,872,381,125,76,329,704,706,543,519,851,186,323,288,656,726,611,213,711,372,468,739,208,369,815,373,625,140,106,869,50,79,439,514,810,744,63,31,347,512,663,825,36,267,75,248,741,649,688,812,138,205,93,174,551,513,286,659,152,414,818,847,259,687,387,693,368,767,340,37,655,794,628,755,417,92,685,290,376,374,813,535,841,361,531,278,661,505,109,664,57,335,24,101,692,45,97,789,495,722,696,434,709,647,782,620,157,284,856,732,821,538,499,308,1,506,565,526,241,297,803,420,19,728,151,461,807,177,676,271,707,30,282,39,28,668,634,161,547,226,258,632,294,281,418,43,117,684,879,467,303,91,494,137,59,756,507,380,799,167,651,843,158,701,415,674,493,508,118,17,735,652,264,887,249,472,859,309,805,34,570,379,804,55,100,116,568,861,366,612,94,331,402,480,114,283,542,452,585,119,613,65,203,721,343,14,442,200,690,436,277,306,377,899,3,219,304,382,9,4,181,848,291,185,113,836,88,405,475,835,510,484,245,35,548,314,128,425,845,560,683,828,148,534,638,104,474,533,858,785,672,315,353,16,680,11,70,172,875,175,853,607,873,144,527,299,124,826,396,395,375,621,842,454,155,673,263,743,23,126,725,69,846,153,539,243,849,257,112,636,378,236,428,295,498,653,497,583,675,58,326,52,850,829,889,7,763,702,198,400,29,766,105,18,345,147,149,424,745,838,446,624,770,136,324,595,244,133,742,349,679,477,708,762,646,572,47,590,38,648,749,801,629,191,187,33,457,641,750,697,541,54,430,392,827,504,301,482,764,814,578,786,95,576,322,83,383,635,209,747,431,563,145,788,96,854,221,327,713,81,729,146,333,131,616,134,302,362,809,179,206,346,348,488,384,556,13,356,10,738,256,360,550,730,802,777,121,525,169,229,237,89,610,61,391,60,737,363,165,662,162,731,491,394,107,84,695,183,772,269,154,765,449,78,393,515,455,129,778,779,584,41,336,599,265,2,603,44,866,337,77,811,774,413,630,553,555,622,386,72,194,227,338,210,878,579,598,67,582,594,479,677,141,520,253,355,868,143,523,761,246,694,421,193,783,797,385,453,800,429,540,225,80,517,667,438,521,197,862,160,678,545,170,844,359,720,215,644,409,852,239,307,110,238,339,574,447,164,218,176,626,82,546,471,289,857,751,292,489,554,716,760,463,700,62,422,898,552,727,196,883 629,173,309,829,649,766,747,424,205,788,151,180,752,672,612,248,598,851,58,870,883,466,5,297,67,517,320,866,713,480,779,491,365,799,406,149,381,573,768,499,481,383,16,2,812,345,344,308,611,740,238,882,374,140,215,843,689,673,52,291,133,296,247,51,21,15,619,858,860,664,507,34,78,134,876,236,531,109,637,859,40,283,802,193,372,278,733,525,299,252,360,265,412,476,48,392,563,408,118,266,759,404,508,688,869,25,141,892,762,543,459,241,126,574,165,340,662,756,396,863,289,175,867,125,336,546,645,711,426,359,275,684,302,889,730,504,127,771,394,385,729,560,333,694,839,487,229,895,444,538,540,292,646,519,809,96,780,259,615,880,772,669,495,828,122,896,526,162,551,582,303,518,418,746,260,710,1,277,581,110,167,453,477,554,23,119,314,38,724,264,479,502,593,160,429,577,725,82,448,12,687,783,470,753,661,666,534,837,154,885,644,450,330,237,569,192,207,53,370,405,659,506,37,322,335,312,357,211,770,830,524,595,19,822,773,75,514,298,29,398,391,750,602,704,269,818,833,425,778,503,449,878,387,366,601,342,739,838,142,443,496,201,698,198,97,148,188,41,305,871,233,380,20,667,280,91,576,555,159,591,139,231,846,482,397,701,161,410,112,625,473,785,897,369,775,511,145,350,886,699,558,655,469,527,189,824,138,572,88,81,488,419,184,146,718,530,240,281,735,358,432,734,844,874,693,578,624,490,852,681,831,427,705,881,737,707,42,95,108,83,244,884,379,343,465,63,268,137,716,203,819,232,544,153,86,782,835,801,545,69,685,33,571,423,463,338,464,273,196,743,651,13,315,285,586,484,258,249,152,317,549,66,169,800,323,887,10,634,765,321,400,493,89,712,722,179,284,71,102,840,594,334,475,349,754,806,535,348,636,319,458,50,24,256,520,683,389,347,310,708,717,128,539,168,575,407,850,596,271,841,623,456,900,808,608,856,562,217,395,628,368,728,174,417,703,197,216,255,825,501,72,209,433,774,331,461,709,873,676,213,826,682,605,879,452,18,388,845,618,898,865,337,11,328,64,446,804,45,559,677,195,225,588,300,741,436,220,183,592,583,84,797,131,382,468,228,437,875,329,568,553,221,600,445,147,117,212,811,796,751,447,660,30,541,478,738,101,454,641,872,663,441,652,272,202,136,105,55,440,287,218,606,792,607,565,767,46,842,250,823,777,435,150,199,286,650,640,49,509,409,386,166,324,516,434,868,696,182,564,155,616,790,245,35,332,214,528,697,185,658,748,28,888,402,326,550,77,208,257,414,791,65,204,471,542,144,85,589,807,610,413,515,270,290,124,234,62,736,339,862,849,460,609,827,157,60,854,617,599,731,700,375,353,194,276,68,390,721,59,821,135,864,497,87,451,156,647,22,111,757,123,510,114,3,172,816,691,384,227,745,439,604,254,552,467,855,79,164,579,422,32,129,106,462,224,301,613,261,92,304,43,764,695,781,263,421,894,719,90,325,706,393,295,399,121,163,813,557,653,371,311,814,306,505,401,566,570,361,120,373,115,787,438,352,377,116,243,230,803,430,656,702,420,222,186,633,590,293,235,483,251,74,671,890,104,494,755,537,657,98,316,638,680,288,415,36,411,31,820,226,364,8,769,17,798,177,643,665,246,431,107,54,61,784,403,749,219,521,80,57,313,793,274,130,580,223,170,294,877,355,794,597,367,585,853,103,206,200,832,9,94,489,536,512,614,626,47,267,351,7,635,532,327,442,376,416,587,795,726,428,513,529,253,474,620,354,485,744,548,242,262,522,639,714,362,143,176,715,341,642,678,318,670,603,556,100,675,776,99,472,500,723,27,861,627,113,191,834,178,817,187,346,815,356,455,848,567,533,763,93,857,190,621,686,742,457,76,899,73,836,492,14,363,654,761,810,6,648,44,584,523,378,786,26,668,692,732,279,720,630,132,181,282,171,847,631,561,4,547,789,39,158,622,56,679,893,690,805,486,758,307,760,632,239,498,891,210,674,70,727 496,141,233,583,331,832,432,236,252,704,516,88,637,867,795,114,305,763,303,806,802,480,58,373,75,670,98,718,839,259,663,346,134,608,576,487,314,289,880,360,309,201,163,81,692,129,172,54,312,563,30,861,493,513,518,828,445,577,59,199,282,385,293,119,321,86,404,645,798,810,223,12,144,343,714,287,659,3,436,876,387,418,630,481,102,433,831,440,507,150,198,442,273,767,13,69,318,178,112,502,505,91,377,822,854,117,344,776,833,672,683,42,329,277,371,604,589,435,133,849,256,413,740,101,337,339,490,868,157,527,34,465,680,758,603,284,315,873,132,89,601,790,644,870,769,176,270,860,698,301,751,661,642,708,842,138,898,405,391,840,897,748,245,731,349,882,243,599,250,533,676,594,687,893,79,865,108,334,281,173,450,192,495,239,46,370,567,286,497,416,347,685,242,237,829,406,403,139,322,32,857,536,193,821,458,448,741,588,10,752,472,351,247,682,525,124,74,21,302,431,530,261,229,368,235,115,457,123,869,688,838,511,200,738,742,485,561,579,379,146,135,877,290,705,196,694,690,142,791,721,186,835,324,578,400,62,702,892,294,674,612,25,824,16,411,407,161,156,103,827,606,620,361,420,586,11,396,489,280,285,35,587,666,728,76,573,130,388,106,311,187,697,878,522,654,730,29,389,792,409,248,565,864,354,422,887,265,262,353,15,171,165,28,451,393,342,266,323,615,419,602,627,722,874,508,611,348,548,652,841,657,136,699,883,755,574,291,14,529,471,492,858,636,383,283,362,78,365,380,9,616,83,830,491,47,623,784,545,254,72,853,93,519,384,182,122,159,22,27,871,585,36,110,160,240,179,575,558,33,45,596,6,40,566,166,890,153,626,441,392,364,231,191,595,727,562,720,63,77,710,446,614,782,691,650,591,216,96,777,167,717,244,386,363,477,526,151,181,553,634,564,5,801,437,269,121,711,338,656,780,834,499,899,712,568,794,836,23,126,531,68,484,70,147,886,556,17,307,772,554,2,474,352,888,473,501,399,807,809,238,891,438,539,760,744,299,197,684,298,879,775,213,168,592,209,719,863,19,649,761,66,395,219,158,803,467,498,162,633,667,328,673,228,762,183,359,812,811,118,376,582,60,454,152,551,48,593,617,624,597,540,797,97,569,292,846,131,460,517,856,724,787,818,255,194,43,65,145,590,464,92,693,823,453,848,572,427,695,580,653,800,398,185,177,225,488,815,332,640,366,184,39,241,778,214,881,665,67,715,470,625,732,524,355,482,7,381,678,466,786,736,71,771,735,109,681,20,414,542,227,456,272,175,789,584,120,504,510,546,852,308,837,425,31,494,260,26,449,127,765,686,745,276,689,95,90,756,734,258,847,544,87,651,275,675,417,452,646,53,655,211,701,618,378,217,310,773,140,421,605,263,754,116,52,444,707,426,658,500,783,475,820,668,814,264,671,1,73,215,428,38,104,382,220,410,267,295,100,154,64,202,469,424,713,459,538,855,607,463,631,875,335,402,726,128,557,804,274,816,613,174,660,503,317,523,753,703,170,169,107,296,629,189,621,61,212,372,515,781,143,327,766,788,206,598,412,279,535,367,188,747,345,443,813,313,479,844,204,851,218,476,619,532,643,356,55,394,350,600,483,571,44,512,278,547,80,805,340,180,514,509,320,224,896,326,662,8,306,56,4,647,639,37,18,226,560,268,622,808,336,872,251,221,369,743,528,521,99,785,257,358,749,552,729,648,253,94,208,401,246,506,203,559,609,774,300,817,543,664,429,297,737,581,190,468,49,700,555,733,57,537,826,859,541,725,375,205,770,125,638,415,288,768,486,709,330,333,895,105,374,325,520,111,669,819,357,679,799,51,885,155,195,635,570,779,677,793,207,632,41,746,234,455,341,397,164,319,894,230,884,534,149,82,825,862,610,113,549,447,439,210,408,628,304,550,641,723,232,750,461,271,390,137,222,900,696,430,50,434,845,84,85,850,478,757,843,716,889,316,764,462,866,423,706,249,759,148,739,24,796 795,133,165,465,365,786,170,48,105,527,690,656,18,672,608,78,696,652,388,763,96,316,485,160,482,647,462,390,191,118,13,10,175,122,222,842,115,689,709,603,639,653,320,362,233,109,174,428,176,16,469,760,847,802,359,661,94,304,266,491,297,816,694,274,470,500,12,221,799,373,492,422,293,356,177,130,635,272,651,759,522,459,215,657,384,726,157,1,168,190,28,869,41,761,368,312,739,90,269,729,265,161,438,513,659,628,658,167,106,238,788,327,345,561,289,750,353,183,415,573,770,423,555,251,21,451,3,768,42,275,247,481,891,208,349,678,254,610,509,136,226,543,599,833,706,326,287,722,232,596,186,593,638,609,673,246,844,281,135,615,846,120,634,841,412,785,88,805,369,424,670,411,648,865,475,347,473,248,557,440,633,614,532,576,516,731,292,471,685,775,754,124,357,267,897,45,163,253,385,342,429,51,93,520,863,27,463,158,166,188,40,95,714,836,837,113,54,480,11,193,662,721,734,299,404,43,417,117,212,249,860,5,699,29,34,818,663,276,544,627,408,767,84,660,22,36,32,580,46,409,260,601,602,164,141,339,716,780,521,791,484,162,747,366,831,398,59,308,101,751,664,745,688,278,612,401,823,640,203,77,280,583,92,467,258,550,468,83,150,139,334,727,776,511,888,425,539,237,303,307,433,340,900,129,781,738,391,85,457,145,512,38,98,361,205,743,815,33,877,551,885,597,107,715,184,856,650,195,372,402,35,517,797,800,514,6,687,149,755,519,852,809,241,140,784,681,67,279,223,210,507,50,850,530,566,867,898,306,381,207,250,235,806,832,579,37,79,178,321,311,771,332,338,461,455,533,563,317,127,410,108,430,489,187,196,774,452,845,255,718,733,698,192,68,252,783,879,487,125,523,4,825,703,814,838,319,525,58,214,643,677,531,684,119,245,700,476,655,358,60,24,427,394,350,589,534,420,595,849,528,330,128,858,704,876,540,742,447,574,26,197,47,545,154,855,665,296,526,450,839,261,798,821,591,746,725,144,824,343,75,811,180,179,510,396,720,386,204,567,671,778,81,352,883,324,395,645,478,558,189,501,787,354,508,575,23,538,227,600,199,764,868,762,882,494,114,826,796,598,810,200,242,584,586,861,542,631,39,625,812,728,499,406,872,556,585,148,15,827,654,66,803,379,143,57,86,322,437,892,131,65,644,70,666,620,44,630,315,819,554,708,7,590,497,537,300,211,642,263,851,305,111,89,828,456,782,477,72,813,431,884,896,291,604,848,229,675,262,335,719,736,389,152,893,506,562,378,830,874,695,231,336,348,829,25,503,667,328,153,592,142,793,744,284,490,466,206,71,56,582,290,772,419,474,483,239,741,91,377,445,682,414,870,104,886,817,529,9,578,100,147,202,713,432,460,486,169,295,405,711,375,217,102,442,444,887,159,857,613,325,766,256,735,498,730,268,346,547,393,298,493,99,403,686,173,82,333,282,710,301,692,137,637,19,864,64,748,881,488,835,449,668,834,565,387,691,701,69,224,370,536,277,581,808,181,702,789,20,182,52,216,329,31,273,294,622,116,779,436,76,138,397,773,363,875,400,636,225,110,234,889,355,559,443,577,441,621,286,331,240,132,873,318,535,794,219,55,524,344,313,257,439,220,632,97,641,679,236,619,62,676,371,453,737,607,752,309,14,594,380,496,790,121,421,454,862,218,871,571,383,801,434,8,611,413,749,341,399,899,623,587,382,878,505,895,367,198,426,53,649,103,479,376,724,854,17,807,270,757,588,541,464,732,626,302,314,765,156,213,49,777,697,548,843,894,285,669,74,283,890,683,30,570,185,374,288,416,853,629,568,646,740,323,201,472,392,880,572,230,756,264,624,605,495,618,171,822,360,228,552,546,134,804,172,243,407,569,792,244,723,560,606,435,418,705,616,553,2,758,112,310,504,73,446,151,769,351,146,80,859,271,549,364,209,820,448,840,458,707,502,564,63,617,712,87,680,674,753,155,337,123,693,126,866,717,194,518,259,515,61 675,254,338,852,781,676,808,490,203,762,32,323,755,439,436,317,745,851,48,868,885,422,55,263,185,415,456,879,562,552,776,535,465,798,303,77,381,747,576,568,573,500,30,38,785,433,405,494,699,761,395,867,361,42,95,807,731,643,146,449,160,324,282,71,9,98,665,882,858,489,641,153,90,66,887,227,437,241,692,803,14,231,792,103,545,246,590,554,225,309,461,279,452,304,176,543,712,492,154,186,802,550,551,514,846,105,101,895,666,429,342,406,70,706,138,233,618,836,521,838,376,78,878,169,345,613,669,512,536,264,445,727,194,896,730,627,117,589,579,515,728,378,175,480,833,614,223,889,310,719,402,120,583,391,716,118,523,191,656,869,525,541,685,828,126,884,636,22,679,540,124,451,272,496,390,511,27,260,711,125,161,629,446,717,123,85,187,51,782,250,572,403,729,131,267,598,795,151,493,64,473,825,603,633,766,707,373,870,271,891,661,467,413,72,594,302,301,164,410,357,680,681,69,297,379,424,299,321,621,816,336,582,57,789,742,16,488,206,4,558,561,555,701,630,327,796,835,591,718,353,546,865,417,290,623,524,700,697,205,334,401,347,547,371,68,110,252,89,393,861,80,265,3,713,150,218,688,569,114,690,228,83,866,322,549,709,281,386,149,746,644,772,893,286,820,363,295,318,886,765,743,622,291,578,137,652,108,698,31,180,684,533,285,46,794,647,333,294,780,330,404,733,850,840,704,566,769,421,873,486,839,605,664,847,651,720,39,213,13,7,199,877,262,311,599,24,392,104,797,358,829,343,356,92,204,823,854,826,722,147,506,76,588,502,604,463,586,428,366,539,653,91,414,448,751,678,122,152,235,499,485,195,332,812,380,853,20,606,842,325,444,677,112,702,637,49,132,196,181,841,607,239,283,189,777,809,726,484,482,431,307,41,2,226,509,710,557,466,184,667,737,284,344,60,750,574,857,693,130,824,411,394,900,787,635,845,351,389,571,597,517,771,337,527,458,167,382,268,770,510,198,145,522,559,298,441,775,876,508,258,648,724,570,883,274,35,464,856,768,898,871,385,23,280,96,276,670,171,471,537,369,207,749,384,615,409,107,273,516,477,79,814,182,216,655,190,224,874,481,691,495,313,619,610,52,238,148,821,800,790,396,498,62,556,650,577,127,432,687,849,560,249,472,287,261,234,240,73,443,237,331,505,715,649,339,793,6,848,141,815,705,438,177,257,341,663,478,65,398,503,462,248,348,364,528,819,631,278,470,86,620,817,162,29,335,370,601,626,74,518,714,121,892,288,460,447,212,193,208,532,844,37,243,296,501,200,25,567,834,372,435,305,242,469,19,253,159,791,430,863,862,300,734,811,230,134,860,504,752,565,740,526,251,221,166,33,340,703,179,806,163,880,418,58,553,109,507,53,34,786,170,377,157,50,142,837,744,275,113,634,434,420,84,350,596,872,217,270,739,383,144,173,40,638,168,312,725,416,165,425,75,831,813,763,269,349,890,773,15,211,479,440,320,209,158,28,753,657,491,236,483,799,222,668,315,423,474,459,129,497,88,783,544,219,538,140,197,139,741,564,759,580,214,247,81,673,756,215,220,593,93,36,738,888,155,475,617,708,455,128,277,632,674,119,487,99,387,1,832,116,256,61,804,11,827,328,476,754,329,368,12,21,44,530,450,757,375,608,210,178,143,805,400,292,735,136,135,192,875,355,642,736,427,748,855,10,94,289,843,26,43,326,575,365,624,764,87,314,319,18,612,695,201,354,172,457,399,818,723,412,696,374,133,640,616,519,360,758,407,352,174,316,397,778,259,102,232,611,442,658,801,362,531,584,419,82,774,529,188,513,592,779,47,881,426,106,54,788,308,639,293,453,822,244,255,864,408,682,760,245,859,183,686,767,810,581,100,899,59,721,454,45,542,468,609,830,56,645,5,585,662,346,784,17,646,671,660,306,625,732,97,111,367,156,694,520,659,63,587,672,115,229,388,8,563,894,602,600,534,689,266,595,654,67,683,897,359,548,202,628 815,255,94,615,720,668,399,82,23,489,518,772,97,416,387,77,836,688,375,795,342,184,488,27,612,465,558,517,66,189,139,39,229,224,31,802,46,862,464,632,711,752,269,324,237,170,179,665,316,146,607,761,823,698,180,592,207,245,345,760,370,798,655,197,419,580,112,470,814,145,649,490,201,199,450,28,466,318,716,662,482,290,268,485,648,625,50,42,80,138,190,858,37,547,471,507,853,163,198,584,392,371,427,301,610,692,541,437,58,45,634,539,196,732,312,583,279,491,512,519,804,240,666,183,8,503,79,548,211,72,456,531,872,480,320,789,253,384,779,294,210,308,376,680,705,472,162,770,107,842,73,367,532,410,528,147,653,99,205,618,669,14,846,860,474,777,265,689,504,321,452,247,422,717,554,132,448,105,734,348,740,830,423,784,591,646,93,509,781,710,806,68,555,125,890,74,393,319,352,314,192,313,361,347,892,131,238,481,239,462,88,54,759,745,845,256,95,526,62,48,671,849,750,165,351,208,248,280,110,295,749,26,718,111,52,774,561,228,486,792,691,545,297,559,34,134,217,783,69,176,331,602,574,113,181,572,661,568,675,639,304,330,567,553,811,409,136,346,158,771,458,576,606,332,408,442,871,596,70,287,277,377,356,235,446,542,664,11,91,451,638,738,791,335,897,200,677,86,440,436,762,271,895,161,697,524,244,339,302,203,780,231,159,230,413,820,834,16,889,429,880,571,254,630,212,839,866,41,530,166,264,776,758,744,389,104,603,225,624,404,810,808,149,24,856,565,252,258,453,357,588,182,721,546,619,886,899,434,714,275,152,221,800,855,755,204,283,355,520,119,769,378,372,724,768,794,350,261,143,621,6,508,678,309,187,696,374,832,599,654,746,851,193,87,117,640,827,620,194,575,51,725,457,656,859,405,797,262,40,722,460,420,614,13,142,736,763,747,144,44,96,540,151,191,835,739,499,726,767,502,114,10,865,700,882,549,506,643,778,18,364,220,742,263,709,754,494,433,382,824,338,703,847,336,660,663,333,844,122,135,616,260,98,641,157,735,403,343,831,756,816,65,306,870,366,153,454,570,396,32,719,733,642,536,412,12,341,365,469,33,729,884,775,833,787,47,637,817,743,868,56,250,585,788,821,597,693,223,683,840,629,289,325,867,825,379,85,4,852,601,9,594,148,53,123,160,467,353,888,76,218,484,57,707,354,202,573,402,712,598,582,17,544,459,515,293,103,701,63,874,315,128,43,695,523,687,380,188,708,418,885,898,251,604,822,369,737,154,177,537,682,468,447,876,609,383,500,801,843,786,510,213,274,684,7,483,647,249,334,327,100,563,658,497,329,381,278,307,127,644,421,569,587,498,476,296,796,38,706,233,723,556,807,133,864,782,397,61,681,206,185,395,566,431,534,340,30,298,266,764,475,129,84,439,505,894,270,790,425,173,730,116,533,257,829,445,432,595,627,172,586,71,234,854,106,19,487,516,731,386,672,398,873,108,841,5,799,893,390,757,215,673,828,317,303,513,600,242,60,140,785,311,411,878,15,496,667,137,81,226,126,344,214,92,522,562,25,650,323,305,388,209,550,273,838,424,848,168,90,349,863,286,635,538,702,337,426,581,118,276,150,869,291,310,809,227,3,415,444,124,55,435,360,552,299,773,463,407,626,2,521,259,328,477,589,765,455,169,704,430,281,819,243,579,685,813,75,818,611,282,631,636,49,861,461,623,195,373,900,633,449,141,881,288,896,651,171,368,1,628,89,699,155,577,741,20,622,400,751,501,826,241,560,805,285,492,593,219,35,164,645,441,292,877,883,267,657,29,322,891,879,109,385,130,156,272,608,679,659,551,728,812,246,443,236,417,850,511,391,543,428,694,676,300,363,362,674,529,222,690,613,36,837,406,478,525,652,803,178,535,438,578,686,186,493,713,590,59,715,101,473,401,167,414,115,753,232,67,22,887,121,394,359,78,605,284,875,495,727,326,564,120,358,670,21,766,557,527,174,216,102,479,175,793,857,514,748,83,617,64 614,241,371,841,728,701,804,528,264,786,55,219,790,518,505,347,632,846,56,858,886,483,47,325,104,445,395,879,653,590,801,594,467,820,374,38,430,613,656,536,519,405,53,49,816,442,419,380,712,787,316,870,289,36,171,828,762,705,124,310,147,250,245,114,20,51,700,880,845,592,596,107,139,135,885,290,470,231,659,827,17,278,815,131,436,233,691,606,281,341,439,176,496,330,133,492,599,516,204,205,805,522,567,603,852,43,102,896,747,524,344,322,138,637,148,246,679,811,476,849,288,151,875,216,386,616,713,568,545,336,369,734,108,895,768,549,137,682,440,501,773,450,242,511,831,600,273,887,362,576,471,188,615,434,763,172,586,269,698,877,583,639,539,794,94,884,650,26,649,601,183,517,311,537,340,612,28,312,644,158,72,494,466,625,66,68,271,37,756,221,477,463,689,201,166,660,795,142,507,75,573,821,585,727,629,746,451,859,262,892,726,525,339,70,523,283,314,120,422,413,662,548,27,348,388,408,346,296,717,840,332,643,22,818,785,3,489,237,11,432,437,620,708,669,351,830,842,481,779,427,553,874,391,324,673,417,718,758,109,327,459,306,597,295,24,99,261,78,406,856,123,276,5,740,210,193,575,564,191,699,236,145,869,394,510,731,184,449,206,721,556,777,893,326,739,433,209,373,890,772,630,685,174,622,112,733,165,683,71,200,569,530,297,97,791,559,222,333,692,343,299,744,862,848,750,495,640,508,872,589,843,490,655,850,722,760,31,217,21,34,143,871,305,372,468,30,366,122,792,317,819,334,359,64,146,767,775,826,623,152,580,95,540,377,527,438,591,382,291,646,626,74,399,321,663,555,187,195,256,398,571,157,232,829,404,861,41,563,812,293,378,538,141,764,735,61,92,113,203,844,664,227,331,211,741,822,610,453,578,352,345,60,2,292,574,684,415,370,266,743,776,225,418,130,627,469,864,642,110,839,509,473,900,789,529,851,397,298,443,675,482,793,223,533,485,81,309,272,802,425,182,121,402,658,277,410,783,857,608,270,710,751,638,883,350,7,444,867,667,898,855,414,42,190,88,356,738,105,521,641,255,170,680,349,690,460,167,235,552,570,48,770,103,175,532,252,249,865,376,579,577,318,617,506,18,178,87,834,799,755,383,588,86,457,502,657,186,493,611,854,647,279,572,337,274,243,179,96,304,287,308,543,780,619,403,806,8,853,134,814,742,479,169,248,319,706,554,23,491,355,458,267,396,363,504,837,694,282,461,84,515,716,199,12,253,328,561,719,149,562,732,79,894,208,384,499,156,125,126,428,833,77,265,302,566,194,13,621,838,447,480,342,224,387,62,263,160,798,441,866,868,320,695,823,215,129,847,582,674,661,724,462,212,244,91,9,361,757,98,835,192,882,421,58,526,154,602,65,73,778,128,424,207,50,106,771,765,240,164,736,393,497,115,420,488,876,181,220,676,431,80,226,89,487,229,360,704,338,93,400,45,817,715,797,173,409,888,670,40,198,581,390,234,284,189,52,781,671,584,307,335,825,258,513,385,464,472,456,196,484,144,803,551,294,474,118,268,140,782,544,753,668,254,275,33,687,628,257,259,605,39,63,723,889,69,514,696,635,547,136,303,560,729,197,389,111,452,19,836,202,329,59,813,14,832,218,534,754,280,429,32,57,76,618,407,748,323,631,116,159,228,784,392,214,665,85,213,150,878,381,693,678,454,604,863,16,163,285,759,10,67,401,446,423,478,720,117,315,379,6,688,624,286,357,180,503,416,824,711,411,558,448,162,531,666,455,368,788,500,353,177,365,486,702,161,119,185,707,426,541,686,367,607,651,512,100,752,593,132,498,546,745,83,881,520,90,15,807,260,714,238,375,810,300,313,873,412,636,796,153,860,251,609,769,800,550,44,899,101,774,475,35,435,565,681,809,29,697,1,652,634,364,808,25,725,654,737,354,730,598,168,155,358,230,761,595,542,46,557,749,82,247,465,4,648,891,633,677,587,766,301,672,703,54,535,897,239,645,127,709 112,681,222,15,549,381,190,203,485,123,757,458,562,764,600,239,197,75,834,98,603,280,541,476,693,421,70,116,858,85,641,366,59,271,653,771,265,461,709,38,50,142,674,579,260,114,90,429,63,489,235,204,456,774,777,236,182,172,647,759,832,411,311,460,819,694,384,306,101,711,24,466,422,599,412,445,375,241,16,371,866,407,194,619,492,430,828,594,812,169,544,615,261,686,538,156,231,113,310,540,64,234,25,605,242,665,527,502,839,610,532,516,571,43,853,574,181,374,23,282,338,631,54,297,601,1,530,685,292,589,413,3,804,438,110,99,805,671,505,137,205,672,690,712,104,20,324,157,870,659,867,773,213,509,386,394,851,520,107,175,831,731,561,134,876,256,149,847,4,158,754,346,623,835,106,855,608,349,42,350,898,508,198,152,593,607,738,849,18,446,154,856,47,436,879,269,187,730,39,457,795,474,448,470,392,330,636,451,253,472,307,243,214,829,248,634,315,370,613,372,48,302,695,316,74,450,368,632,886,216,753,580,713,645,678,860,230,884,846,270,569,692,212,233,496,522,560,266,790,658,10,176,118,857,120,382,170,676,869,537,385,453,511,455,781,882,630,748,136,131,749,556,796,28,649,278,244,87,581,215,193,743,475,677,141,41,751,369,400,473,552,89,211,428,351,655,426,347,272,11,517,160,893,117,583,669,439,333,656,415,335,378,267,821,124,132,484,628,291,285,587,68,402,332,146,325,651,452,91,760,557,405,226,352,322,591,648,431,826,850,633,179,878,418,312,696,463,830,151,317,13,477,722,897,390,199,490,55,555,624,895,614,184,339,119,388,227,295,518,871,128,602,37,726,389,423,687,883,294,336,542,379,550,166,95,396,576,273,480,319,196,377,708,363,447,762,837,670,567,53,479,618,728,714,150,67,403,359,785,174,616,551,838,577,232,44,621,167,667,465,437,343,822,744,500,206,83,14,765,165,859,483,320,81,344,139,741,410,298,471,135,327,596,65,797,899,442,254,255,326,393,611,300,799,398,252,145,111,705,703,679,86,299,80,814,806,21,251,487,217,76,207,700,691,789,755,529,531,355,689,584,498,284,84,420,698,639,740,287,578,643,188,534,798,523,715,811,102,200,250,414,109,33,286,844,328,900,468,22,82,323,570,425,362,737,568,482,644,173,247,735,750,736,309,650,449,668,417,620,792,539,361,800,40,810,373,880,147,684,31,308,629,337,209,138,69,885,874,573,424,27,296,259,638,8,397,210,499,434,891,404,497,881,872,493,277,52,356,768,478,195,688,553,775,93,409,582,597,704,162,281,654,202,720,725,263,896,155,218,791,178,780,441,357,818,238,547,318,183,73,159,657,17,71,229,662,60,836,459,660,35,78,675,734,823,820,289,663,554,340,721,314,364,873,6,462,788,707,801,34,842,889,513,526,894,304,100,640,622,504,264,888,794,772,219,192,341,268,144,279,585,440,710,514,786,348,9,627,512,49,588,288,733,702,604,727,127,334,863,664,854,140,481,756,290,793,258,177,848,683,723,164,486,416,586,524,408,443,467,305,716,108,376,807,274,406,652,625,329,365,228,495,787,220,845,5,454,861,752,29,892,803,2,180,877,168,510,387,865,763,841,313,94,782,245,565,427,770,57,724,680,488,105,706,293,501,563,58,208,637,802,742,519,852,97,88,224,383,546,345,758,62,186,536,96,739,503,719,121,171,559,51,435,747,130,833,827,125,515,808,612,761,395,609,525,246,548,129,566,778,275,103,642,433,817,360,646,77,122,185,697,626,666,249,72,153,533,191,784,354,595,766,813,189,809,840,353,769,32,419,729,673,464,201,575,816,36,824,401,56,46,45,491,321,718,868,887,223,592,661,746,126,19,543,779,303,598,30,225,545,66,469,133,163,143,12,875,301,699,572,240,590,367,732,558,7,701,635,864,221,26,237,358,825,257,161,342,262,682,331,535,521,61,432,776,391,283,617,79,506,444,507,767,890,783,115,276,717,92,399,815,606,148,862,380,528,745,564,494,843 773,19,285,785,373,848,561,286,151,797,312,288,438,726,700,199,647,865,17,882,686,491,125,240,77,689,396,804,402,375,412,276,325,626,297,395,306,529,801,660,640,531,45,65,705,270,321,234,482,417,291,890,669,359,190,863,508,649,12,148,3,574,480,121,55,73,331,713,881,578,570,114,162,200,712,197,694,163,742,878,61,397,688,389,230,541,440,229,110,262,117,568,275,671,69,320,661,310,198,509,677,252,604,706,875,156,349,732,430,429,697,141,196,627,11,579,665,494,442,861,511,245,854,188,135,620,319,795,232,317,159,720,629,744,714,641,14,781,296,267,645,624,457,809,866,464,280,893,211,363,251,381,755,666,838,155,833,266,492,873,839,403,394,871,8,897,361,337,548,648,435,584,572,835,365,530,104,292,633,260,62,379,625,576,115,354,243,23,782,545,675,212,525,204,746,392,540,1,519,96,593,469,221,768,754,401,564,590,139,728,427,346,542,476,775,35,137,192,119,344,765,586,231,358,432,122,441,41,444,730,752,282,185,502,465,253,708,84,85,455,224,812,378,790,105,550,535,445,406,493,419,870,565,116,490,228,821,864,44,687,601,91,800,157,332,20,37,5,242,883,376,610,181,616,431,194,729,719,129,371,215,336,617,506,308,758,78,295,112,314,247,842,898,501,862,518,225,335,806,643,353,655,771,436,451,841,239,318,206,63,415,201,138,101,557,691,467,103,846,536,741,789,673,879,589,792,446,400,783,581,513,348,826,891,767,347,250,58,284,128,552,894,147,272,608,259,102,26,553,146,799,81,757,47,236,857,888,718,311,10,329,27,763,667,537,152,278,182,113,515,791,34,362,145,423,390,370,80,134,257,380,158,166,662,315,892,140,805,486,569,634,512,16,522,632,387,583,88,28,829,303,619,618,597,852,733,425,174,399,479,628,226,154,175,498,779,233,504,300,497,447,186,422,165,454,416,808,681,554,837,407,334,900,845,796,851,703,217,404,374,274,470,183,333,813,74,144,426,794,753,83,478,657,750,595,692,533,889,575,36,859,563,507,856,356,161,452,725,471,896,880,264,42,658,2,384,815,123,684,481,179,495,420,437,778,176,341,18,721,449,324,867,351,683,330,86,646,885,421,711,468,249,717,453,377,263,72,527,820,834,682,690,167,780,273,764,93,208,788,876,368,651,562,246,40,56,38,216,743,95,59,737,458,738,611,516,99,770,523,814,832,172,339,366,462,637,309,71,433,639,405,133,254,747,520,884,740,54,769,39,818,874,66,68,642,169,696,630,153,774,830,32,709,727,414,674,67,496,573,567,580,106,304,710,238,305,98,603,631,638,352,693,534,202,177,382,31,475,235,860,766,672,591,802,298,7,872,281,372,699,786,385,664,33,598,269,544,360,180,635,15,734,702,22,538,294,393,13,108,822,6,218,64,109,24,877,556,680,388,679,685,299,413,555,605,751,142,307,485,443,107,89,164,424,75,227,558,100,293,323,248,549,461,408,571,210,895,840,97,615,594,602,588,456,244,301,847,369,383,342,189,731,473,609,290,724,759,150,130,203,57,736,214,222,265,327,131,484,772,219,428,678,599,322,448,654,503,90,60,411,521,50,735,855,52,600,793,326,450,4,70,816,659,340,650,30,268,136,749,205,279,124,653,187,582,256,748,546,434,241,255,87,213,787,587,831,220,261,195,149,345,849,209,120,547,514,173,606,868,439,853,559,168,355,810,277,143,313,886,92,271,410,776,592,823,466,49,391,207,126,487,524,328,670,622,237,756,698,825,614,316,566,474,510,607,302,707,612,343,118,526,644,621,817,695,29,386,398,357,827,472,76,739,517,543,21,623,843,283,636,656,798,111,715,613,9,418,836,53,844,25,505,824,463,539,704,760,500,676,178,850,160,761,489,577,477,48,899,46,858,652,191,287,596,811,819,94,289,193,460,483,528,585,43,551,807,701,258,459,762,170,338,364,184,869,668,745,79,723,777,223,51,663,132,367,887,784,828,409,716,82,803,499,488,532,722,127,560,171,350 762,168,142,433,363,782,145,22,108,498,715,640,21,717,631,57,664,622,428,743,127,304,476,169,491,642,423,359,270,81,30,3,121,105,245,849,93,676,735,568,597,615,336,368,214,68,133,410,129,15,446,746,841,815,404,650,71,282,277,523,351,809,674,263,511,519,5,202,773,409,458,413,288,373,178,135,629,237,607,756,582,456,192,670,371,725,231,6,230,158,28,864,23,777,362,273,716,58,246,737,225,123,401,543,652,637,662,182,191,253,793,316,361,514,346,758,328,172,367,573,752,440,530,223,26,412,4,795,20,299,222,445,891,209,321,641,300,647,501,95,198,586,625,845,680,278,272,703,306,605,262,636,616,618,681,227,861,290,100,598,863,175,628,828,464,772,60,822,315,399,701,403,668,876,437,424,474,232,508,427,698,604,507,542,524,739,332,522,645,764,729,207,309,255,899,32,131,276,339,326,473,56,90,534,850,16,485,165,136,193,33,80,682,852,827,120,41,462,14,188,624,696,744,287,358,36,407,132,305,228,867,9,706,52,67,832,643,357,600,601,398,796,63,646,17,50,46,557,103,432,211,588,572,240,111,314,692,803,576,798,475,157,755,345,842,452,89,340,66,734,691,753,718,234,634,366,807,606,212,54,243,620,101,495,215,517,488,83,147,134,324,705,761,515,884,454,528,220,297,258,425,312,900,98,784,763,379,69,467,137,489,29,77,411,170,719,808,49,873,537,885,570,110,707,156,846,653,201,335,444,48,496,776,800,510,12,688,141,788,579,848,801,313,149,760,694,65,327,180,176,471,45,855,603,545,851,897,268,375,213,348,238,789,823,541,27,55,153,311,392,747,334,292,470,443,516,590,390,99,380,122,408,481,163,161,771,460,837,244,700,710,679,219,70,251,805,882,493,130,487,1,824,728,818,826,285,503,51,266,608,689,535,720,140,217,667,472,619,381,82,19,402,447,385,585,494,387,554,856,504,405,146,854,675,871,531,770,435,549,24,160,35,552,106,868,738,286,499,448,834,239,802,806,654,741,709,112,812,382,107,819,148,171,482,441,745,333,185,559,665,751,59,377,883,364,430,658,465,555,229,500,778,323,469,587,43,564,248,591,221,769,853,757,888,492,155,838,767,569,794,199,195,548,561,870,529,714,42,594,791,722,532,384,860,578,610,154,31,814,644,118,817,420,126,86,84,329,429,890,194,79,635,152,627,673,39,683,296,820,521,690,13,577,468,502,264,301,684,281,839,254,87,72,835,415,779,461,74,810,490,880,896,369,660,844,190,639,252,378,726,724,394,173,894,466,566,376,833,875,661,196,356,320,840,53,478,732,303,124,649,114,813,740,256,540,450,206,61,34,560,265,790,374,451,455,247,721,174,365,479,648,370,869,150,889,825,506,25,583,97,183,189,699,480,416,477,233,319,438,677,417,307,104,449,518,881,117,857,626,341,748,343,766,556,704,241,322,526,344,284,497,94,421,678,226,78,283,294,695,250,686,115,655,40,858,116,733,878,546,843,520,638,831,609,360,723,685,44,298,395,553,249,589,799,203,702,783,11,177,38,242,302,18,325,261,612,151,785,436,64,109,396,804,330,879,347,617,291,164,179,892,400,513,431,633,422,632,267,406,271,208,859,289,584,765,218,62,562,310,350,293,434,181,651,88,623,687,186,592,102,711,397,457,792,575,730,275,7,595,353,544,759,85,414,419,865,216,872,550,352,811,389,2,630,383,786,391,355,898,657,599,426,874,536,893,331,200,388,76,672,92,442,393,727,862,8,816,236,736,567,565,484,750,596,269,274,768,128,259,37,781,731,611,830,895,342,656,144,235,886,693,47,581,166,386,337,372,866,614,533,602,712,318,187,509,439,887,563,224,774,295,593,574,512,663,167,821,308,205,547,525,139,775,143,204,354,621,787,260,742,538,613,418,459,713,580,571,10,797,91,257,483,75,486,138,749,349,119,125,847,279,558,317,197,836,453,829,463,671,527,551,73,666,754,162,659,669,780,113,338,184,708,96,877,697,210,539,280,505,159 742,207,136,368,347,766,111,16,132,455,744,655,22,725,638,61,643,572,474,702,113,285,506,185,535,640,394,297,298,62,26,2,116,79,267,856,99,665,733,538,570,600,365,388,184,63,122,406,105,9,431,710,841,827,435,612,49,248,311,558,393,806,670,289,566,561,1,165,737,410,419,421,301,387,141,161,623,242,568,732,641,451,158,687,362,727,257,10,277,156,44,866,23,785,372,259,698,48,254,746,186,124,359,543,607,671,678,151,212,266,787,327,370,477,398,769,296,150,336,528,752,456,463,222,51,361,4,791,21,308,225,390,892,177,283,622,342,646,508,92,173,589,652,847,635,246,274,659,344,618,305,667,592,621,654,243,865,300,78,549,863,197,639,811,523,736,42,842,280,360,730,379,675,875,418,442,511,232,476,429,756,608,484,517,553,758,353,582,606,772,711,247,278,275,899,17,106,318,309,343,493,43,101,510,845,8,496,133,148,162,18,69,674,861,821,176,54,468,33,193,593,686,775,284,331,53,397,180,332,191,869,12,739,55,77,850,628,404,658,590,403,786,45,619,32,39,37,545,128,438,187,536,554,293,91,315,664,792,633,795,467,181,750,349,852,507,126,374,64,694,721,765,753,202,656,354,789,579,241,38,244,653,81,513,204,472,537,93,169,131,330,672,719,514,880,453,539,217,238,216,415,281,900,80,798,760,382,65,504,167,478,29,89,450,129,696,808,85,858,527,884,530,83,676,121,840,660,206,282,447,40,498,759,768,483,11,720,170,810,631,853,764,356,160,751,724,90,373,149,190,423,67,855,673,560,837,897,218,364,250,386,273,770,820,518,35,47,159,325,414,723,358,268,505,422,509,615,444,118,366,137,413,502,119,144,747,481,826,221,699,701,668,264,57,237,814,886,532,179,440,3,829,740,828,812,233,497,59,291,594,705,559,763,175,203,630,485,610,400,74,13,401,461,417,577,482,335,515,860,457,428,152,831,634,862,471,783,436,542,25,143,20,573,95,867,797,294,495,399,825,258,807,800,663,743,700,87,779,384,163,817,112,154,426,464,782,307,142,552,602,713,58,408,885,402,445,645,488,540,234,526,784,303,454,563,72,587,292,567,219,788,838,773,887,494,200,849,731,557,778,199,194,512,551,876,534,777,27,544,762,712,521,396,854,596,598,183,52,803,599,153,824,420,134,125,114,355,439,888,240,110,625,178,597,693,24,735,239,830,466,669,31,584,465,490,224,337,734,287,836,230,102,76,834,377,755,424,107,805,562,879,895,416,715,843,196,613,228,411,718,695,427,145,893,452,555,391,835,877,648,171,378,314,846,82,479,781,269,98,666,109,819,748,265,591,437,227,41,28,500,214,796,328,392,448,288,677,210,345,475,611,341,872,198,890,844,499,34,603,70,220,146,689,546,375,486,260,352,480,636,459,346,139,458,588,878,97,857,650,321,738,380,794,575,692,201,334,524,316,271,533,123,446,679,272,88,245,312,717,235,716,96,662,50,859,164,684,873,604,848,547,626,833,637,350,754,657,30,322,412,583,209,595,790,215,703,776,15,205,36,286,252,19,357,256,624,189,799,395,66,94,371,813,323,883,304,614,339,208,157,894,443,469,363,688,389,620,261,430,317,255,851,249,617,761,251,68,605,263,369,310,449,138,685,60,647,690,155,586,127,749,434,470,801,556,697,270,5,616,351,581,728,86,425,376,870,236,874,487,333,802,348,6,651,329,809,433,340,898,704,627,441,871,541,891,306,226,367,104,714,75,409,405,722,868,7,816,195,709,550,578,492,767,585,231,262,771,103,290,46,793,745,632,815,896,385,661,168,211,882,708,84,564,135,381,383,324,864,629,501,576,681,338,147,525,491,889,516,253,774,326,574,519,522,683,120,823,279,174,569,462,166,757,115,172,319,682,741,302,729,520,642,407,460,706,529,609,14,818,71,223,473,56,548,117,726,320,130,140,839,295,571,299,213,832,432,822,489,649,503,565,108,680,804,188,601,644,780,100,313,229,707,73,881,691,182,580,276,531,192 417,659,337,779,888,204,847,575,355,454,45,494,826,124,121,432,766,604,445,614,892,271,275,297,580,105,474,815,670,583,863,685,501,791,319,97,388,853,170,374,428,451,303,288,686,518,412,770,734,840,543,551,138,49,223,439,738,452,538,829,626,173,156,230,300,447,796,884,582,334,617,403,195,142,890,312,117,398,481,362,347,104,723,30,820,71,678,780,498,335,776,185,531,20,517,710,720,560,228,31,772,763,370,216,521,373,66,896,782,366,39,759,146,676,574,19,395,885,512,528,336,112,758,251,602,479,813,99,748,239,756,530,72,894,520,568,516,234,819,672,595,135,54,47,508,651,194,707,535,880,615,69,237,101,284,235,87,197,637,635,80,601,860,511,611,593,751,58,658,306,34,225,50,48,413,468,238,231,684,134,674,830,186,764,408,74,268,478,613,88,423,671,769,196,52,647,844,576,351,280,323,879,838,292,761,786,171,891,470,895,704,460,322,13,326,699,496,327,719,313,380,745,241,211,279,740,137,721,750,736,2,762,286,849,806,162,169,572,281,669,843,120,808,263,581,832,871,732,823,214,544,632,244,607,587,789,309,210,703,35,153,693,122,698,154,561,608,497,499,569,36,26,165,616,41,363,603,289,282,801,307,63,886,149,714,424,727,416,318,872,866,425,688,96,662,199,506,267,850,716,875,415,61,556,56,198,127,842,109,458,817,787,476,291,824,547,339,557,567,119,167,406,852,446,622,227,887,359,827,344,882,811,258,421,310,836,115,504,75,217,84,570,549,330,621,83,729,489,841,627,654,663,8,555,349,641,689,774,878,524,705,389,299,371,640,741,767,691,726,552,321,427,402,822,867,837,60,515,400,744,484,419,652,794,384,436,164,262,893,125,254,797,467,694,487,28,15,579,532,655,771,16,44,3,475,724,856,752,505,396,46,103,209,361,378,429,845,422,174,661,795,500,305,213,869,692,754,586,9,550,435,391,739,453,385,610,51,665,717,683,679,831,664,598,33,690,713,130,490,208,480,42,401,264,82,151,810,628,360,629,148,677,472,799,270,294,383,833,873,805,645,440,304,110,522,232,256,455,158,513,712,93,854,317,252,666,62,722,181,410,123,573,229,6,859,463,4,623,536,639,390,369,377,753,108,338,657,865,562,553,111,212,183,277,877,218,325,649,437,526,660,7,328,315,620,525,653,140,250,533,687,147,802,405,126,835,298,775,14,597,276,701,150,187,226,502,673,495,316,438,397,426,394,18,443,365,343,612,91,509,331,644,537,393,132,619,411,461,248,118,296,514,899,53,469,139,577,114,94,492,876,207,206,5,708,202,387,376,834,143,407,23,76,746,161,145,503,858,564,680,816,17,730,596,236,523,633,702,874,308,457,600,27,591,65,155,128,821,430,807,541,870,102,483,477,90,565,399,246,529,656,650,409,269,634,624,725,25,38,431,166,606,21,179,590,851,434,320,812,257,442,386,163,818,473,382,715,800,260,482,180,868,898,857,141,540,733,592,283,10,358,266,184,92,178,55,340,765,571,190,825,696,59,747,341,86,98,737,285,735,364,631,788,356,768,129,392,24,433,803,839,345,12,193,95,510,862,554,545,636,77,346,542,809,642,247,233,861,466,539,648,314,491,100,332,414,450,160,743,222,287,265,792,136,848,546,113,778,259,486,79,242,116,176,273,404,578,804,448,381,107,548,589,630,783,57,243,32,700,220,152,760,668,889,731,81,301,278,667,348,67,261,342,133,375,855,357,224,456,274,584,757,157,68,1,594,37,781,353,175,883,168,11,742,464,681,43,711,471,618,22,70,201,609,73,488,189,728,441,372,897,718,192,493,255,462,785,78,272,302,459,585,215,881,219,559,106,449,706,177,749,379,605,89,85,864,40,709,646,519,695,311,485,828,846,588,563,755,354,253,172,200,784,290,188,643,368,814,205,558,697,144,790,350,566,293,418,329,675,638,203,64,324,249,191,245,527,367,352,333,240,534,131,295,682,773,221,159,507,465,599,182,625,29,793,900,777,420,444,798 45,632,316,126,436,495,302,329,560,306,694,183,714,838,687,304,30,217,774,198,745,413,377,580,517,501,16,320,893,211,758,515,133,483,737,611,395,185,804,39,9,14,616,493,511,189,181,229,169,636,81,361,256,657,796,406,355,367,559,599,777,206,161,433,743,528,542,453,167,823,4,279,408,608,583,539,461,154,22,550,808,456,422,579,317,336,881,681,841,233,573,354,391,693,375,98,29,213,305,475,223,199,109,724,439,449,420,656,886,731,531,352,592,2,828,552,364,426,11,505,113,635,208,287,662,49,649,748,331,673,301,87,637,607,315,3,781,794,270,152,428,755,704,735,232,48,378,327,882,438,888,779,300,600,566,405,874,598,276,380,861,843,310,111,829,429,235,773,42,308,750,496,650,854,33,891,476,440,5,295,857,246,266,28,389,469,792,762,18,312,24,878,65,473,769,443,286,647,123,353,867,589,437,634,104,502,726,553,209,619,498,392,64,753,105,596,339,194,653,537,101,66,487,402,122,455,454,590,899,447,732,698,522,789,814,739,248,879,780,62,396,770,291,333,558,691,711,73,870,751,86,386,94,869,299,238,247,785,765,529,520,368,603,272,605,847,622,665,200,257,723,545,677,168,641,141,34,125,613,297,159,730,614,756,112,144,597,534,419,458,416,156,382,489,100,747,187,467,514,128,348,344,818,277,445,775,442,362,623,369,134,412,263,810,224,7,193,672,75,296,349,177,617,533,345,178,417,610,259,850,683,182,231,521,508,713,510,366,738,815,446,289,880,551,59,586,432,809,221,216,139,460,700,873,180,51,171,218,414,563,900,565,102,107,8,410,254,212,358,896,116,484,60,562,226,201,679,875,280,151,682,207,323,363,174,588,491,158,466,205,96,124,668,581,642,674,727,478,544,219,612,540,757,658,83,252,196,347,860,36,639,464,728,643,400,67,409,27,718,646,615,153,868,744,258,53,293,1,669,376,889,624,471,176,120,346,782,197,88,631,129,485,350,136,830,887,294,225,479,170,285,482,77,872,313,202,268,149,822,688,764,264,499,253,855,645,63,468,265,424,143,318,627,481,715,827,671,324,465,817,351,335,175,50,587,759,633,661,372,719,459,68,290,701,309,733,801,186,40,35,601,132,84,82,709,135,894,626,115,43,282,699,341,150,555,703,504,725,52,444,842,740,840,398,638,431,547,343,338,825,524,448,876,46,852,575,821,373,593,157,434,702,204,142,106,243,897,767,706,131,95,292,370,625,17,574,379,477,423,862,166,164,871,791,288,179,20,577,778,571,274,557,678,621,13,535,418,394,518,15,337,630,210,689,807,147,859,325,387,848,314,800,334,220,798,214,470,435,250,244,371,655,31,251,127,594,140,877,326,795,72,26,561,721,667,664,321,784,322,572,680,516,381,831,10,430,864,628,787,76,763,895,532,383,866,97,271,541,602,686,165,898,766,844,23,384,192,108,99,403,385,462,685,222,803,457,71,513,269,57,390,388,546,813,374,802,267,90,833,582,890,80,245,799,240,742,401,275,885,741,527,393,509,117,690,595,421,492,494,319,707,328,427,845,155,260,696,543,554,360,284,660,788,230,710,114,203,863,771,103,824,772,41,407,776,273,648,303,892,716,846,138,283,797,74,519,576,697,236,746,752,342,278,584,452,227,651,146,92,729,736,708,463,884,78,130,118,490,298,195,793,70,184,340,47,591,556,585,330,242,652,38,525,568,357,754,835,110,188,670,548,839,145,692,228,160,530,137,659,629,474,37,695,425,768,486,663,249,148,191,506,720,618,56,241,91,549,399,851,359,526,811,865,32,640,816,172,856,79,163,523,684,604,397,705,786,55,853,190,85,6,19,441,488,812,826,806,411,500,760,654,25,119,606,819,503,609,44,451,307,237,538,21,234,215,12,783,507,666,712,311,415,173,832,675,93,536,734,761,404,61,262,578,749,472,162,564,356,820,69,569,497,89,480,849,570,54,450,58,676,255,512,837,836,858,281,365,805,239,620,834,717,332,790,121,644,567,722,261,883 631,610,14,673,884,324,703,190,65,305,296,754,612,244,116,90,837,579,627,685,840,12,387,21,779,93,418,678,571,225,734,312,173,496,101,641,31,887,265,374,490,597,303,294,369,164,79,804,439,633,603,620,553,453,217,404,390,139,566,879,747,521,309,74,522,680,507,810,693,182,595,474,37,54,836,27,96,252,508,424,652,7,402,154,833,242,524,516,415,24,606,721,132,213,588,625,835,202,23,212,527,604,177,131,478,683,216,849,648,108,232,782,48,694,681,206,117,839,389,413,638,57,679,26,330,331,570,260,466,34,709,381,763,847,245,704,569,238,856,411,254,114,110,341,515,476,3,732,518,896,556,185,194,84,266,32,445,10,276,572,432,368,890,756,792,671,448,532,501,83,211,25,100,444,388,414,362,2,700,56,873,876,78,803,628,353,146,749,674,336,642,594,664,9,821,261,692,622,109,227,272,796,766,161,885,460,81,852,249,850,333,72,510,503,658,616,174,398,483,13,386,843,647,1,76,541,5,649,690,435,391,454,659,676,584,727,170,667,587,791,854,284,591,241,286,624,769,805,687,86,273,557,250,585,222,788,348,365,853,237,28,598,231,743,708,764,442,586,153,634,224,183,552,321,105,318,820,271,89,573,98,166,811,91,582,313,838,51,64,828,858,486,751,22,878,87,675,4,729,485,880,123,848,168,366,306,18,725,130,277,844,592,200,370,660,748,738,308,842,53,762,337,669,450,263,596,897,43,714,246,798,845,407,536,157,654,392,350,469,447,555,686,580,20,824,349,561,567,720,494,542,446,357,819,502,841,892,549,877,470,730,322,531,753,718,509,528,537,760,506,456,458,207,868,864,861,106,646,126,774,148,463,789,534,55,520,226,576,872,269,419,863,408,328,171,382,640,777,393,535,443,342,175,301,740,459,867,548,345,539,120,163,621,97,40,488,869,578,58,300,491,547,267,187,888,768,583,618,461,401,406,61,832,464,790,480,233,761,802,315,473,615,813,293,410,883,713,70,285,546,397,363,741,317,256,262,600,778,198,504,359,346,102,719,247,745,179,656,889,785,744,66,358,728,645,155,243,617,69,264,823,384,829,283,159,423,88,757,115,129,500,817,599,605,874,298,361,731,668,831,62,103,323,816,733,438,875,717,529,755,228,107,128,699,895,165,94,377,726,477,412,281,209,8,452,314,736,99,812,433,533,124,657,440,248,623,696,538,372,499,288,425,236,133,186,204,643,822,71,808,140,137,35,297,339,449,141,421,291,759,794,891,651,775,564,479,498,138,234,178,367,650,860,737,471,60,670,558,663,666,814,151,19,320,465,201,800,82,655,235,41,282,268,707,326,46,420,701,219,607,639,215,636,417,210,513,698,568,866,188,495,540,497,493,750,691,38,632,672,526,467,770,158,716,378,47,383,434,332,590,797,662,192,340,827,862,399,436,112,172,354,614,299,181,783,724,405,409,780,6,644,127,156,882,396,33,543,807,563,289,475,776,899,723,661,327,773,857,519,385,375,351,613,149,45,335,338,457,462,80,881,395,52,870,73,136,255,482,49,487,279,355,574,292,715,258,180,295,229,653,702,118,278,15,752,287,886,581,429,379,799,430,481,682,851,50,195,834,426,575,626,711,189,176,608,334,92,360,512,135,104,311,550,403,665,781,143,565,347,208,376,280,95,394,274,505,492,562,695,364,147,689,325,742,772,602,30,551,609,16,316,710,352,898,523,489,343,122,894,705,167,169,786,68,846,826,218,67,144,635,199,765,44,162,437,223,259,545,441,125,893,77,230,815,150,593,203,373,302,356,270,220,257,825,784,577,371,511,196,818,900,554,111,119,63,544,706,427,451,251,530,712,113,801,152,758,795,344,688,304,787,468,559,29,184,739,275,589,310,735,601,39,722,677,746,455,830,793,290,319,85,422,806,191,239,619,637,611,697,160,517,42,484,560,193,416,142,11,400,855,36,75,121,17,431,59,809,514,428,205,380,329,214,771,472,767,221,307,134,197,525,240,253,629,865,871,859,145,630,684 729,433,45,431,720,554,258,25,84,301,668,783,67,473,378,58,790,483,628,625,260,89,604,54,749,428,503,302,212,60,135,21,126,65,115,851,15,852,462,528,602,682,454,484,55,85,81,691,167,90,614,555,807,789,366,415,56,79,521,822,622,781,650,269,659,730,70,330,664,166,546,583,255,304,327,68,410,343,570,505,724,295,74,571,687,642,162,97,303,86,257,866,10,599,587,470,810,63,197,638,193,347,276,264,419,777,615,329,172,66,637,633,296,626,572,632,117,445,418,328,787,355,451,177,101,361,78,549,165,129,511,364,889,353,133,713,496,358,785,222,39,342,472,672,509,360,141,553,344,859,289,518,413,384,400,188,700,145,51,408,698,71,850,766,722,578,151,806,376,176,575,159,474,734,516,261,586,92,624,381,865,828,336,712,711,718,234,732,644,709,741,274,453,173,896,9,262,535,211,421,259,251,387,237,888,47,247,409,238,354,13,4,707,811,801,399,112,581,163,29,530,818,809,121,252,253,204,420,283,100,761,48,804,95,64,858,479,515,714,758,733,544,192,439,106,62,149,746,152,202,198,392,506,367,33,612,512,533,821,649,239,416,517,629,861,680,280,560,88,573,594,606,768,150,497,476,836,486,180,179,254,529,256,284,395,375,782,5,148,460,683,566,596,333,891,219,715,59,225,249,754,109,900,30,737,487,294,306,450,272,747,232,161,469,267,757,840,128,872,382,879,405,111,459,44,798,871,26,334,207,199,763,634,565,241,110,719,313,769,645,823,654,407,27,832,697,315,507,331,386,393,243,731,796,670,854,898,227,728,440,391,380,723,833,673,218,210,371,613,265,657,540,279,803,735,779,464,547,155,646,14,574,743,125,91,531,508,767,576,630,674,834,379,7,32,751,867,745,325,372,38,739,539,716,778,187,774,266,139,669,498,536,784,98,34,580,795,684,236,2,31,584,285,369,824,686,275,592,808,300,286,17,736,527,863,341,563,681,748,3,311,120,791,146,725,876,567,404,184,776,432,742,813,383,653,590,185,721,147,314,577,83,11,422,310,827,288,160,815,526,663,24,502,878,593,263,365,676,318,61,771,740,598,488,305,122,452,558,373,37,780,841,800,860,794,213,727,661,699,837,19,183,456,755,873,641,862,134,494,753,579,248,412,843,857,322,168,72,793,411,73,671,182,43,273,233,631,423,892,293,319,403,138,568,468,93,786,174,756,401,458,104,569,447,467,118,324,839,76,868,194,142,20,692,414,537,215,270,656,695,874,897,538,805,812,388,623,35,368,489,542,647,362,886,562,321,639,817,864,726,406,370,229,717,107,499,829,102,171,430,23,648,666,543,552,356,402,195,75,438,206,617,475,291,478,477,627,190,694,203,564,501,819,338,885,849,345,96,750,57,377,220,520,689,435,397,132,500,490,595,706,390,181,556,760,883,103,802,513,80,665,359,679,346,788,244,492,610,551,87,708,143,396,853,323,22,351,640,762,312,744,282,881,130,855,123,609,884,651,773,337,597,820,446,316,677,455,131,208,230,830,114,441,870,69,480,603,169,157,205,320,137,201,281,522,607,144,710,164,308,297,124,660,226,880,240,835,429,292,217,893,525,482,317,842,216,348,582,277,495,385,825,116,471,764,374,6,616,228,278,158,545,170,703,156,797,444,250,601,52,696,461,442,550,519,608,463,136,759,493,424,705,196,678,585,846,153,844,394,186,561,541,77,877,235,772,417,339,899,792,557,242,869,299,895,589,298,309,46,775,1,618,231,559,799,16,635,191,600,434,848,245,655,765,113,465,588,50,154,175,688,532,389,838,894,523,667,105,223,882,887,246,307,18,140,514,491,701,693,449,619,704,349,290,268,675,890,326,534,504,620,636,485,335,481,189,662,426,49,752,398,99,770,287,350,427,816,611,363,457,352,690,685,214,443,524,738,108,831,8,357,340,36,658,12,621,94,40,42,875,209,466,271,127,591,178,847,652,605,224,643,221,437,826,119,548,448,510,41,82,332,436,28,856,845,425,814,53,702,200 740,62,349,819,390,840,665,380,203,825,224,218,569,695,686,259,601,869,18,884,761,544,93,285,47,669,402,852,471,479,546,378,394,721,335,241,381,453,794,653,616,480,54,66,766,345,387,229,579,555,269,889,537,234,182,867,628,709,38,99,19,450,412,142,32,41,451,770,880,618,578,100,172,197,775,246,679,185,734,876,23,397,751,307,233,445,519,315,149,310,195,384,364,590,69,344,595,393,220,424,739,301,644,710,877,80,273,805,521,497,625,147,198,626,25,485,711,582,466,872,414,236,868,222,210,658,433,764,309,343,183,741,396,811,752,599,36,779,260,333,720,612,408,762,866,517,300,894,231,294,281,312,749,649,845,177,795,283,600,882,800,495,327,863,6,896,464,206,589,683,353,621,523,786,354,558,75,331,635,250,1,313,633,557,74,253,255,4,778,431,617,248,574,225,513,512,638,26,566,98,606,587,276,787,673,533,570,680,178,796,556,446,462,297,717,83,204,159,200,423,758,511,131,401,460,192,454,87,502,791,662,389,92,632,584,117,689,89,35,400,212,798,482,793,186,671,659,411,522,510,490,881,539,151,593,242,818,859,11,609,610,134,780,153,181,17,91,21,306,886,291,526,84,691,367,191,660,706,158,477,238,275,714,505,348,769,49,372,160,379,258,846,897,486,814,530,175,391,860,703,346,707,540,543,322,837,243,410,168,107,404,267,199,94,656,643,329,169,799,514,585,806,750,883,682,735,368,483,832,608,645,311,810,890,784,474,164,102,171,76,375,893,157,334,515,173,161,40,641,179,828,150,681,9,190,826,857,765,303,50,350,55,724,551,506,226,360,223,125,549,767,42,405,112,427,356,320,71,184,254,475,140,129,737,373,891,111,760,550,491,565,441,52,646,701,263,365,57,70,856,425,489,552,468,831,783,409,239,467,426,572,176,64,221,577,768,208,449,296,613,576,170,438,156,413,399,850,678,371,861,439,421,900,849,729,865,651,193,363,496,326,596,128,422,754,2,154,416,829,687,106,347,554,743,507,648,640,888,622,88,847,666,604,871,355,60,484,792,436,898,878,339,43,457,16,388,823,95,685,561,135,369,455,428,789,244,292,45,713,525,213,844,245,492,293,133,518,887,377,642,548,286,716,420,207,230,5,657,833,817,634,698,155,702,227,763,139,270,745,879,459,541,594,295,96,116,46,196,571,137,121,725,559,726,567,647,31,824,374,839,838,249,282,341,440,697,336,10,488,493,465,188,325,675,534,885,755,113,728,22,738,820,68,15,499,201,676,700,152,748,830,27,773,531,415,674,65,337,376,508,668,115,318,605,314,271,20,664,719,603,443,607,435,216,130,370,63,592,308,870,821,586,627,836,277,37,873,338,386,718,781,406,504,77,362,138,527,481,120,731,58,804,672,14,547,268,456,30,97,822,12,240,119,90,8,848,661,536,321,732,624,319,302,535,532,813,144,264,520,498,73,145,148,342,105,289,623,118,211,357,163,652,385,538,398,256,895,774,59,469,597,553,442,417,252,214,853,478,434,351,141,788,432,494,340,692,727,232,165,266,86,782,279,235,284,261,174,361,809,278,528,712,487,332,247,699,448,109,104,500,274,48,742,874,7,629,802,358,472,29,108,757,715,288,563,61,352,85,803,205,299,103,733,114,690,194,730,631,382,298,167,78,180,759,560,827,237,359,132,146,305,843,257,101,562,328,202,429,875,473,842,588,251,280,854,162,136,316,851,28,219,430,684,580,723,503,82,407,262,53,598,529,330,614,458,323,705,756,815,591,265,575,366,461,677,324,650,704,395,189,419,581,602,771,470,39,304,501,418,747,383,143,736,615,573,33,667,807,228,630,637,790,123,776,620,13,215,855,81,835,44,463,841,444,476,772,708,545,744,122,864,209,722,583,663,509,3,899,67,858,655,126,272,619,812,834,51,403,72,568,542,524,694,24,654,797,746,317,564,688,187,290,392,217,862,693,670,56,696,801,166,110,636,34,452,892,777,816,516,753,124,808,611,287,447,785,79,639,127,437 693,10,330,678,185,862,428,270,218,788,469,205,446,828,784,194,450,837,82,863,601,542,134,339,25,737,315,733,573,341,411,271,285,559,453,480,349,305,864,578,515,414,108,98,673,241,316,100,381,393,191,882,647,484,353,858,455,637,5,33,29,540,472,175,156,55,301,567,860,702,426,76,221,322,585,264,735,118,642,886,165,494,635,505,81,582,593,242,236,283,70,544,288,770,21,192,465,269,232,599,560,153,552,789,871,144,436,623,549,563,748,26,309,459,75,665,660,332,345,859,437,387,809,222,178,517,317,854,167,463,46,634,674,626,675,479,78,849,143,183,625,743,620,865,843,340,355,880,370,172,421,574,760,740,853,209,890,390,451,861,888,527,204,841,53,896,281,493,407,648,618,644,689,887,292,694,152,376,461,323,91,197,639,380,86,438,409,65,676,565,554,337,365,278,801,384,395,8,485,115,732,368,114,816,583,371,691,424,95,603,422,379,460,629,713,9,125,163,131,442,709,386,254,445,431,62,522,6,609,679,835,325,201,497,500,366,701,215,188,298,66,869,284,799,126,511,476,272,488,641,352,857,516,237,462,90,814,892,27,741,669,18,844,45,398,92,42,23,216,870,547,687,266,523,592,155,577,670,224,274,196,504,492,662,187,715,13,364,160,180,96,817,894,600,783,661,148,427,751,508,171,646,832,419,507,879,335,208,327,47,235,119,113,244,417,532,405,164,766,575,710,757,611,883,536,767,225,496,686,716,448,181,819,895,796,346,328,35,449,268,591,884,295,363,443,367,39,112,394,58,730,43,834,135,193,779,848,614,147,12,501,54,706,562,372,88,189,79,19,680,752,28,324,24,229,207,528,212,127,129,478,102,59,566,319,897,198,785,296,570,572,321,61,510,672,518,681,34,22,791,308,682,746,697,808,652,230,89,568,383,724,307,249,261,520,714,80,402,466,519,435,109,627,310,239,267,753,526,656,826,617,440,900,820,728,836,810,93,250,413,177,396,72,265,874,145,37,470,805,717,40,551,530,845,622,671,412,868,696,57,889,491,535,807,558,200,404,636,247,885,851,277,104,664,32,576,852,69,721,608,60,513,245,389,821,233,487,3,739,561,378,811,347,754,158,173,775,867,312,529,541,231,650,275,481,223,162,458,764,763,688,774,202,729,111,831,151,256,703,873,482,761,698,303,51,44,2,257,692,199,20,771,546,654,759,457,214,708,621,769,846,206,362,375,447,597,477,106,564,502,373,121,290,812,432,893,742,30,800,149,776,818,184,140,628,77,602,658,302,823,833,15,589,758,329,725,14,509,612,439,406,217,338,794,326,313,211,606,498,778,377,806,571,73,336,425,16,369,203,827,684,755,430,768,291,11,839,418,186,787,719,263,707,71,663,359,594,397,128,588,49,631,726,103,434,392,538,50,251,756,17,374,94,132,87,825,474,722,514,731,657,486,605,711,452,651,84,258,311,495,67,120,293,240,179,276,423,7,286,252,279,403,227,441,569,320,877,749,234,683,750,533,550,638,282,471,850,294,556,503,56,677,586,408,429,786,797,105,190,124,154,685,142,391,138,360,226,587,798,117,304,734,736,361,539,579,280,213,141,334,645,157,632,822,52,615,840,168,640,38,159,781,630,525,555,41,342,248,666,348,444,136,531,262,475,169,815,410,399,351,416,195,289,872,521,803,122,210,133,107,534,795,139,36,357,595,255,668,842,473,876,388,161,166,780,433,297,300,847,146,385,610,704,699,773,273,83,400,287,176,490,344,483,705,744,246,830,598,802,613,137,690,607,333,580,182,762,553,499,74,616,777,772,712,727,116,382,506,314,782,238,85,792,512,655,97,454,891,260,584,524,700,170,590,738,64,548,838,4,878,1,420,765,596,695,604,824,356,643,101,813,228,659,350,415,358,63,899,123,881,667,220,150,720,855,747,99,318,299,464,354,557,537,130,543,793,723,306,545,624,259,456,343,253,898,718,619,68,649,829,219,48,790,243,489,866,804,875,401,745,174,856,467,633,331,581,31,653,110,468 356,459,385,787,799,527,829,609,369,682,71,138,866,536,448,403,525,762,261,756,898,450,69,414,212,313,283,862,833,612,879,710,456,845,540,1,492,604,625,330,317,242,157,117,823,482,419,432,702,856,250,781,84,16,303,730,774,663,275,507,406,72,90,148,142,131,794,886,717,679,466,130,143,182,894,364,328,216,458,721,167,210,821,85,562,87,841,778,559,323,662,32,588,234,202,495,445,557,179,83,791,602,429,601,783,75,42,897,868,642,173,437,175,520,422,119,630,858,370,801,112,176,827,199,622,483,826,480,652,423,493,603,9,895,701,363,378,665,516,554,763,463,220,368,704,552,247,843,670,672,749,217,435,350,646,194,553,304,699,824,529,790,596,580,362,825,690,46,579,500,159,461,244,447,227,773,67,318,533,99,335,535,319,547,108,13,421,226,581,63,265,752,648,221,34,726,819,331,393,113,689,872,724,664,453,807,490,889,293,896,768,577,146,49,249,498,409,98,669,485,452,430,18,314,279,605,288,521,867,838,195,798,33,883,869,22,297,574,128,343,592,551,751,499,548,876,888,418,877,511,505,822,214,647,678,522,517,675,339,154,389,440,444,353,2,382,478,264,441,761,125,133,37,680,149,139,354,352,287,745,192,150,893,436,539,575,329,561,259,812,687,589,847,241,476,504,158,395,890,718,716,632,48,641,23,651,136,766,118,280,572,676,347,285,797,348,81,567,420,203,78,587,885,772,741,225,712,586,855,692,892,519,411,757,629,853,11,300,44,155,17,767,631,455,302,21,555,371,802,381,740,512,219,336,95,544,506,804,735,299,830,228,277,152,402,616,666,465,396,818,374,169,309,503,694,585,171,497,294,427,686,134,269,832,384,786,70,276,865,110,161,474,322,806,748,40,29,207,349,779,785,80,290,105,477,793,639,608,750,191,258,54,60,431,570,469,564,215,338,805,839,197,620,253,661,404,834,486,41,771,723,595,873,634,245,800,344,301,391,776,523,840,256,550,383,388,405,137,758,172,232,31,187,705,102,204,795,734,698,528,614,738,668,851,542,52,333,870,697,884,744,479,144,24,291,494,657,132,407,765,307,36,706,200,613,703,140,473,400,674,5,530,30,57,617,468,178,754,257,375,637,281,428,439,4,101,415,878,667,545,186,576,93,185,659,621,271,711,342,788,813,188,677,367,502,361,326,82,88,565,509,392,875,410,472,860,129,846,43,722,583,708,66,122,156,649,811,160,597,151,377,312,449,206,357,733,606,434,246,358,223,424,510,141,65,372,341,715,296,412,532,211,900,38,262,401,239,12,10,267,854,162,190,166,775,96,183,573,849,531,501,284,73,467,170,145,274,844,526,808,863,209,624,755,109,292,737,809,742,691,514,390,55,496,7,8,237,859,107,861,408,887,251,289,386,106,777,218,231,600,379,747,320,77,417,513,759,76,114,743,164,770,120,534,316,871,174,111,656,416,124,310,177,491,464,443,650,541,39,337,14,850,789,880,25,655,837,399,205,64,728,193,58,308,127,89,638,720,782,397,489,816,153,387,537,334,272,645,254,615,305,769,688,515,546,45,446,50,739,671,784,673,208,198,3,594,644,571,524,578,20,240,563,874,278,398,626,719,746,355,628,266,660,248,168,233,582,97,803,311,470,79,817,26,857,163,433,736,121,640,74,180,86,654,230,543,315,760,115,135,282,593,442,270,619,6,273,27,831,295,560,623,636,729,836,47,351,181,471,94,59,558,184,425,196,725,229,201,568,56,727,566,332,189,104,635,286,815,484,243,684,488,91,438,618,451,222,796,700,487,68,360,591,457,19,373,62,842,365,238,764,643,538,658,569,325,685,518,61,306,345,549,123,891,590,346,15,731,426,633,475,213,696,255,340,882,252,598,792,147,810,327,359,780,814,454,235,881,260,683,324,28,481,653,599,695,92,835,51,681,607,236,848,165,732,413,707,376,828,366,224,100,263,268,693,556,298,103,321,709,53,394,508,116,820,852,460,627,584,753,610,611,713,35,462,899,380,714,126,864 492,733,251,685,893,107,790,453,301,274,164,730,702,18,19,380,829,438,622,487,849,142,455,208,779,52,551,678,470,461,780,558,437,616,200,363,271,885,21,386,497,594,412,419,430,444,340,840,639,744,688,367,321,241,229,248,579,262,686,882,756,365,288,276,482,694,697,837,477,136,658,600,233,170,852,247,63,484,476,166,554,87,504,112,860,157,456,683,460,280,767,452,394,13,693,759,815,462,245,116,637,769,302,58,300,649,186,856,598,188,67,834,172,725,690,78,223,867,544,281,546,130,614,261,532,429,714,6,706,141,811,432,353,857,322,681,621,54,868,655,373,31,49,8,366,647,156,520,459,894,500,90,148,26,114,250,5,140,472,401,2,383,890,518,772,381,677,253,635,176,74,101,22,3,517,264,411,158,728,199,843,876,129,814,653,243,201,698,619,213,560,581,763,189,342,480,792,705,275,389,143,841,836,102,853,654,60,870,510,863,527,314,466,183,441,753,465,505,671,185,341,835,528,134,252,739,91,775,564,508,35,628,567,726,633,454,146,623,478,795,872,9,757,150,534,696,804,819,691,92,493,396,283,587,415,838,221,40,845,72,70,760,37,805,458,715,643,648,440,410,100,68,398,464,62,490,758,258,266,746,346,105,844,64,738,315,846,278,320,865,879,345,479,65,803,85,717,160,713,615,888,238,375,393,210,32,153,820,184,524,858,770,481,358,771,675,607,506,720,119,414,296,723,228,423,332,897,193,727,147,833,861,226,227,128,751,318,584,260,359,298,420,530,214,782,255,748,588,801,700,523,687,28,755,553,740,827,634,889,638,591,495,370,590,737,729,735,734,813,344,329,599,400,873,878,871,84,583,406,816,293,601,812,645,319,209,304,330,887,215,357,862,541,463,242,178,230,778,603,474,644,122,12,82,521,545,875,750,313,549,15,222,446,290,212,402,880,572,124,428,640,666,179,236,891,785,565,629,161,328,282,240,548,348,576,385,7,798,817,491,692,743,826,542,4,847,802,162,254,324,604,194,611,83,159,192,732,557,154,656,20,515,268,657,173,606,364,703,892,620,555,349,434,371,680,131,75,676,76,291,839,256,855,382,88,568,93,800,106,218,361,661,471,171,883,436,48,513,695,777,205,368,334,832,399,516,828,781,448,596,138,59,285,433,895,55,337,535,556,299,450,29,139,239,646,563,786,237,540,512,722,77,632,403,50,731,547,571,149,445,135,610,279,257,277,338,539,766,155,699,362,426,303,42,447,168,169,651,99,670,526,818,608,665,327,689,451,234,263,44,206,709,869,286,570,56,749,339,347,642,851,265,204,41,577,309,659,216,742,51,289,10,180,807,259,177,618,799,501,503,672,23,718,397,333,650,537,561,884,108,417,667,181,630,335,449,109,711,684,631,613,784,94,664,494,133,369,543,323,473,796,586,427,431,788,754,574,152,89,191,244,488,110,80,741,752,582,475,823,145,679,387,217,874,468,287,668,848,502,507,408,825,900,764,390,424,569,762,418,132,175,336,395,66,231,174,182,682,388,121,881,457,71,850,207,24,95,708,272,716,391,413,761,292,810,273,352,144,197,793,808,127,36,190,384,392,886,566,552,589,374,443,498,626,831,151,69,866,295,652,660,435,311,111,486,519,312,317,573,203,187,421,663,343,765,774,17,704,377,351,224,331,198,39,310,360,673,745,701,529,104,536,595,783,806,284,219,225,485,163,45,773,602,898,509,269,326,316,824,625,167,137,550,38,641,854,409,232,350,538,379,794,113,86,98,469,11,636,305,165,896,57,97,822,308,736,47,511,306,624,123,14,81,724,355,592,354,522,416,585,899,707,61,307,103,575,768,1,467,297,525,617,294,830,79,719,425,246,791,27,821,499,496,53,46,776,33,710,404,747,514,267,597,787,809,609,789,531,407,73,120,405,842,118,43,559,627,721,483,372,674,126,605,533,356,270,195,249,439,797,202,117,325,220,25,96,712,580,378,115,422,562,30,578,489,612,125,16,376,211,593,34,442,235,859,877,864,196,669,662 60,588,381,242,354,530,406,433,579,430,614,79,810,839,727,343,17,330,700,290,827,506,249,618,317,517,51,470,895,318,823,590,218,625,788,399,483,92,830,86,43,12,542,376,650,278,274,162,288,728,58,462,159,522,787,523,499,507,451,417,707,111,125,391,658,328,627,571,250,848,52,195,374,586,682,565,492,175,84,626,711,458,574,500,229,257,889,746,831,296,570,174,490,678,254,112,7,305,302,371,352,239,203,793,552,252,308,759,893,804,465,256,556,39,779,474,491,481,68,624,45,617,336,295,703,134,729,772,378,718,251,188,435,704,454,11,733,829,173,223,569,795,694,734,326,116,407,449,880,268,884,755,342,622,635,394,870,620,411,521,858,866,171,143,739,538,322,666,119,396,714,558,652,840,54,894,332,486,42,264,757,131,312,34,234,309,807,655,75,215,21,878,132,473,578,554,380,576,213,285,875,669,439,717,10,600,775,621,225,719,610,505,36,645,66,546,358,136,690,598,164,13,273,453,191,460,494,540,900,599,685,774,294,842,859,573,282,865,681,25,281,803,365,393,581,802,806,40,882,799,180,527,107,862,436,201,304,822,631,459,555,315,629,205,353,801,605,583,279,344,679,477,537,303,623,123,2,182,616,375,194,699,710,792,155,248,420,602,429,469,323,238,504,503,22,796,97,534,653,247,261,471,636,401,300,818,425,419,566,335,95,446,291,781,327,8,71,702,26,301,177,280,735,628,498,140,246,693,408,868,778,110,253,593,589,794,329,324,634,745,269,359,872,604,5,440,422,764,310,212,259,456,667,819,101,32,55,348,321,480,899,495,93,30,18,437,313,220,258,898,152,356,118,370,185,133,673,856,297,124,768,151,190,518,265,675,369,147,479,179,76,56,637,691,751,562,582,293,501,341,697,423,753,584,90,404,144,350,873,24,651,351,591,684,513,121,263,14,736,756,716,106,871,726,168,38,445,50,544,516,886,698,557,271,53,493,783,127,48,725,183,592,200,228,815,825,222,231,608,126,237,333,15,879,244,207,382,224,849,683,805,403,607,397,861,447,135,611,181,563,235,395,541,255,638,832,742,204,520,852,208,214,199,72,659,808,601,595,432,800,276,61,149,560,206,738,749,275,20,1,687,192,157,47,514,85,845,723,211,69,267,758,283,87,346,763,487,786,44,568,864,706,860,457,642,413,415,286,142,821,489,497,885,94,854,676,708,533,455,284,508,747,165,154,115,363,897,613,767,28,178,306,448,577,80,644,485,452,383,820,81,41,846,660,170,189,59,680,760,585,325,409,784,416,33,567,287,217,289,9,421,587,243,647,837,141,777,441,525,855,414,782,245,202,743,233,386,536,319,366,529,632,100,388,114,510,230,881,272,836,137,62,389,709,461,466,334,833,176,689,662,643,361,752,74,372,876,548,741,150,677,891,511,270,813,46,410,385,543,789,160,896,705,844,3,531,163,70,128,478,226,463,661,98,797,526,166,390,145,103,232,482,349,851,193,824,373,19,771,443,890,83,122,798,240,674,484,360,887,769,314,547,475,23,740,603,400,524,496,339,686,476,467,841,169,196,720,412,664,367,347,748,754,260,512,221,117,850,766,197,671,715,120,561,633,338,722,277,892,670,834,91,424,780,37,442,648,612,377,744,790,241,426,428,575,96,692,266,73,791,654,656,384,883,99,198,113,551,158,161,785,109,236,216,82,379,553,398,468,299,696,88,564,357,519,646,817,139,57,509,444,843,63,731,78,186,472,184,712,450,596,65,721,387,688,559,657,392,210,227,316,765,539,29,355,104,515,545,867,364,405,814,869,16,434,773,108,877,153,67,311,701,663,528,761,730,130,847,102,138,35,64,368,606,835,750,641,535,402,812,572,31,219,609,816,630,580,105,594,167,362,549,27,320,307,77,649,619,639,776,337,262,129,853,732,187,331,809,615,532,146,292,695,668,597,209,672,431,857,4,550,438,148,502,863,640,6,298,89,762,172,488,838,713,874,418,427,828,340,724,826,770,464,665,49,737,345,811,156,888 477,752,149,595,892,92,738,359,270,218,295,762,659,71,8,300,820,386,723,449,836,38,498,161,814,7,482,591,552,350,770,462,335,509,216,533,181,883,48,336,443,560,461,447,349,345,247,832,517,697,656,346,400,394,310,222,466,167,713,889,803,423,304,267,599,757,610,807,454,160,586,597,213,187,829,210,12,424,421,168,701,20,399,153,853,197,520,641,534,186,747,592,316,61,712,708,813,358,192,155,521,719,220,30,275,734,242,842,640,171,87,827,177,670,772,107,125,849,453,250,578,148,528,207,496,339,664,34,616,121,791,347,565,840,238,649,690,55,866,551,289,32,82,76,325,539,80,478,571,894,594,170,88,3,72,219,146,98,367,360,133,403,893,513,828,378,570,439,530,73,157,13,24,141,456,364,469,78,680,174,877,876,44,798,703,332,257,785,553,279,542,650,693,144,619,384,739,749,180,390,225,819,817,43,863,561,37,858,435,845,429,232,470,371,481,760,393,499,642,102,299,831,627,40,162,704,10,776,667,410,165,583,683,709,602,666,94,737,620,789,870,41,681,95,485,636,778,808,722,81,380,351,252,668,315,824,193,79,868,93,1,740,22,801,625,802,623,718,341,387,204,90,549,356,86,448,771,217,273,663,285,182,821,59,672,253,860,214,286,850,873,313,459,5,825,64,736,69,628,492,886,140,695,301,309,56,119,781,243,480,851,731,402,458,696,658,674,486,763,46,556,240,655,228,326,385,897,132,638,208,811,856,233,249,54,730,408,538,428,502,422,411,624,151,799,357,727,678,751,643,432,633,130,835,559,766,854,519,887,644,721,501,382,622,714,677,653,685,812,445,318,609,319,882,871,867,114,717,343,805,262,590,822,535,229,234,342,366,879,236,354,861,577,389,198,320,441,809,589,398,575,223,63,178,525,433,869,702,365,524,21,266,611,283,109,368,881,546,137,370,562,645,268,306,890,774,473,555,328,278,388,195,564,307,629,331,57,796,810,419,600,671,839,427,124,884,788,112,196,374,568,296,626,150,194,183,631,532,175,669,65,396,185,581,265,744,281,612,891,569,511,255,494,537,754,190,42,707,4,302,843,329,841,334,27,563,106,818,31,206,465,687,554,373,880,464,191,484,675,787,139,288,280,823,613,506,874,750,392,576,134,19,290,504,896,23,312,512,557,274,474,128,176,154,634,518,806,241,689,566,705,17,654,362,135,661,724,472,272,379,96,584,292,230,245,244,646,834,126,758,264,355,224,99,377,179,85,608,111,782,605,848,729,786,413,621,418,164,337,18,188,755,859,489,508,2,769,451,503,615,826,311,129,136,579,294,804,108,660,117,189,101,231,780,381,110,601,756,395,431,582,67,630,330,308,679,488,606,875,97,375,596,321,648,550,635,28,692,733,536,632,743,53,775,404,120,412,593,420,430,833,699,401,460,846,784,455,282,115,147,261,604,248,127,748,684,545,467,790,39,728,348,287,878,531,226,567,847,558,407,490,783,900,759,522,444,529,794,572,259,293,317,487,118,184,314,145,580,476,131,888,372,36,857,201,11,89,651,254,639,437,327,716,369,779,305,353,239,138,767,765,77,143,103,603,303,885,662,588,471,652,544,414,540,864,47,25,852,397,715,732,491,221,199,500,514,258,425,468,260,212,438,548,442,694,793,6,598,361,338,363,405,227,163,276,333,607,688,746,497,169,505,493,797,773,446,203,383,415,62,51,726,527,898,416,440,426,256,855,745,235,200,618,14,753,838,409,156,324,665,297,768,100,66,271,391,52,523,291,104,895,33,166,815,211,682,50,406,352,543,209,83,152,741,585,686,376,541,322,673,899,711,9,215,60,657,710,123,495,246,475,574,298,795,91,800,691,202,792,49,837,463,417,16,105,725,75,614,323,777,434,237,587,735,761,507,844,515,452,68,35,483,830,142,26,479,706,698,676,284,573,45,510,637,269,263,122,158,436,816,205,116,251,173,113,15,742,617,344,70,450,526,84,764,516,547,74,58,277,159,647,29,340,457,862,865,872,172,700,720 805,45,290,817,499,824,619,311,117,793,250,395,405,582,588,198,761,862,10,882,680,447,162,189,138,632,483,823,276,411,394,282,376,615,201,397,295,684,725,702,703,628,41,80,682,316,348,361,531,419,401,885,692,325,105,850,523,614,55,228,12,613,513,114,43,123,351,745,883,435,666,175,147,141,736,160,644,204,787,864,40,370,679,345,349,521,317,199,57,277,146,608,279,550,132,431,764,344,190,470,704,343,630,585,865,197,350,746,319,329,650,216,143,716,1,515,636,565,528,846,590,182,858,188,104,673,314,727,291,224,231,757,605,772,700,726,2,696,439,341,612,480,367,762,860,539,260,890,101,522,142,288,748,566,797,139,760,200,498,871,767,310,546,878,15,893,426,287,622,616,339,510,471,771,458,365,124,261,720,266,78,530,595,699,174,363,152,35,826,534,742,115,625,165,714,386,589,26,532,121,423,496,294,709,833,421,446,646,179,755,422,335,610,403,800,70,156,244,113,308,789,710,269,326,460,163,389,81,303,718,687,251,211,453,387,227,698,22,62,581,331,758,448,785,106,495,508,577,324,378,476,867,597,56,484,334,816,812,86,638,524,153,765,237,357,6,50,20,285,880,304,537,161,642,362,249,795,731,92,437,232,254,639,384,406,780,145,259,111,402,373,849,896,442,879,393,320,305,808,688,489,623,751,454,443,788,209,390,149,96,558,236,164,47,620,768,542,82,872,500,766,794,658,869,560,806,606,340,801,424,511,488,827,881,728,332,255,99,225,75,538,891,63,219,717,234,148,4,621,206,821,116,697,23,315,874,898,744,441,44,186,38,792,737,652,185,353,248,192,346,804,74,430,226,561,535,298,19,155,381,313,212,256,674,337,877,133,825,579,573,677,664,16,474,533,333,516,158,53,838,301,559,481,509,868,753,575,213,289,556,527,203,130,119,462,798,368,592,208,438,429,271,292,95,609,543,820,754,477,828,265,280,900,848,837,847,576,321,540,338,371,482,284,392,747,71,214,434,779,784,136,461,729,611,574,693,571,892,436,37,803,572,463,859,221,172,491,730,635,895,884,270,42,670,13,262,773,184,629,372,278,497,541,494,732,125,283,59,689,356,347,876,413,627,473,51,518,886,536,791,398,299,741,587,374,342,54,526,836,856,669,568,169,802,410,690,87,151,831,863,281,529,414,217,48,89,94,202,782,46,97,694,354,776,452,512,77,783,472,832,813,137,379,400,487,634,180,93,336,722,459,154,242,691,580,873,706,90,759,8,845,889,14,67,665,230,749,545,85,733,830,76,721,707,517,598,131,514,564,667,660,83,318,626,177,352,69,553,659,469,358,549,525,300,102,388,66,519,268,857,786,569,681,799,355,36,875,181,505,555,809,485,637,24,551,274,502,322,240,607,18,750,676,11,600,275,273,25,61,843,28,107,65,140,17,888,567,651,328,570,701,168,327,399,711,781,194,383,602,404,171,91,103,578,32,196,641,176,366,396,293,593,617,377,599,127,894,870,49,548,408,645,633,330,258,223,834,425,247,243,306,723,418,735,210,657,743,170,109,245,33,734,246,120,380,360,84,451,738,286,506,563,468,323,428,683,662,29,31,479,465,21,778,853,88,557,739,450,296,9,34,841,649,229,708,52,222,98,774,122,187,159,685,178,601,375,675,604,490,173,191,58,183,671,624,844,307,302,272,195,238,861,253,193,661,486,129,562,866,432,811,668,167,507,815,215,64,364,897,108,239,297,814,467,851,586,60,416,144,135,457,647,235,631,520,218,695,724,835,603,464,449,409,653,594,407,655,596,220,157,475,493,456,855,686,3,440,309,417,854,640,79,672,478,427,5,705,777,359,663,719,839,112,740,466,7,385,822,110,790,73,584,840,382,391,715,713,591,654,257,852,128,807,547,656,552,68,899,27,810,618,205,420,444,769,842,134,267,166,445,554,501,544,30,503,818,643,241,369,829,126,312,412,150,819,583,796,118,763,712,263,72,504,100,264,887,775,770,433,648,39,756,492,415,678,752,207,455,233,252 577,64,341,616,177,857,423,294,260,768,483,111,539,859,810,205,332,813,158,845,697,549,96,397,13,735,220,736,737,342,519,343,248,590,531,450,386,214,877,474,420,305,145,98,707,230,289,43,373,475,102,873,544,481,452,853,470,634,20,27,107,451,394,182,213,40,376,579,838,790,334,32,228,366,611,325,731,73,535,881,234,491,652,499,49,506,741,357,379,262,126,458,329,787,5,128,346,259,222,550,534,117,487,826,863,93,404,688,718,646,734,7,350,362,183,643,659,358,254,861,330,428,778,212,263,456,408,867,174,514,23,555,623,672,677,377,166,868,77,154,633,795,648,871,825,266,372,870,526,127,584,613,738,756,860,219,897,435,448,858,896,666,135,794,144,887,272,512,348,614,650,656,712,892,203,815,129,403,364,275,172,119,591,271,48,400,501,124,570,488,432,498,300,307,781,427,396,55,439,92,821,431,130,840,436,424,753,467,71,649,471,411,360,619,599,54,131,91,218,480,631,261,215,446,381,90,527,47,771,706,842,426,161,620,638,383,657,378,247,193,46,875,285,777,181,606,566,176,654,711,311,855,442,402,464,51,779,894,75,726,683,16,849,8,371,208,103,74,202,854,574,671,280,493,592,85,437,596,273,281,155,553,554,727,137,669,11,418,167,209,82,772,882,582,667,730,67,453,776,473,134,637,817,422,468,886,344,224,363,53,153,146,101,351,405,416,298,246,651,518,608,724,668,879,545,690,169,557,673,808,525,104,775,889,800,459,297,31,465,365,503,872,462,417,318,367,61,217,385,29,695,72,836,243,113,675,770,575,118,36,709,80,605,434,257,108,190,58,2,814,685,26,245,21,165,132,561,368,112,63,558,45,14,567,287,893,198,710,322,485,466,211,125,573,739,521,678,10,62,763,395,647,780,700,729,628,157,94,704,268,746,288,274,352,536,622,52,286,528,602,502,42,745,393,163,168,748,429,635,819,758,492,900,782,607,827,833,30,151,490,136,441,24,240,880,241,9,419,811,618,15,496,415,874,546,597,414,846,788,140,891,486,569,789,682,196,345,662,189,878,824,290,138,578,87,670,864,25,728,714,17,457,197,303,830,369,507,44,723,645,336,725,258,751,99,267,799,848,200,409,593,194,560,178,469,139,277,522,713,674,625,807,179,617,88,851,171,384,587,866,612,783,793,333,121,68,3,235,580,340,59,761,708,551,823,505,269,719,581,732,843,320,295,316,375,559,664,141,630,392,331,115,321,805,355,888,744,60,769,251,680,720,339,188,516,41,484,701,399,820,803,18,655,722,231,742,1,449,529,323,407,256,313,798,463,239,293,586,515,831,391,829,494,37,413,387,22,406,206,809,698,764,359,754,226,39,804,576,156,841,639,186,676,160,609,356,542,513,69,621,120,660,702,195,353,388,692,89,349,696,79,552,133,97,201,740,472,693,517,791,563,679,626,792,328,658,34,170,238,504,33,148,361,150,299,324,374,12,210,192,233,421,207,562,479,440,862,615,337,653,844,444,455,703,244,497,847,301,715,583,35,699,556,283,495,796,765,147,227,123,232,687,162,508,84,302,326,537,812,122,310,784,767,338,511,510,199,370,242,282,641,236,538,822,95,571,852,143,773,105,296,691,603,598,438,66,410,292,642,433,520,100,524,255,500,83,828,389,314,461,454,253,279,890,445,752,65,252,70,57,604,721,109,4,265,532,306,610,839,447,876,315,216,142,774,460,412,237,766,149,390,705,588,750,661,229,110,354,380,175,523,249,543,686,760,308,832,568,755,540,106,749,589,225,548,116,757,564,644,86,565,816,835,585,694,221,304,663,250,684,204,173,806,533,717,191,382,895,184,489,425,594,164,601,802,159,547,834,6,884,28,319,716,600,762,624,818,284,665,38,797,264,530,347,398,270,114,898,185,883,627,180,76,801,865,689,78,430,335,482,291,509,595,187,572,743,759,327,681,477,309,443,276,278,899,747,478,56,541,850,152,81,837,312,640,856,785,885,401,786,317,869,476,632,223,629,19,733,50,636 479,293,289,139,36,750,13,122,366,433,816,399,194,861,783,187,217,403,616,476,125,452,498,436,414,712,216,157,645,103,182,133,118,131,590,831,282,202,846,355,335,330,530,482,276,111,185,145,42,114,219,570,730,833,707,572,92,341,345,321,508,649,566,428,680,501,79,25,502,715,156,346,455,610,39,389,685,221,334,717,738,605,196,761,110,695,613,204,583,263,117,768,177,852,313,57,281,99,364,760,82,23,306,746,555,560,686,73,554,574,801,161,596,165,576,797,382,1,164,540,522,660,267,343,297,232,141,858,33,599,94,258,880,51,307,287,539,809,174,37,270,786,799,876,491,71,439,463,675,201,643,821,587,743,700,402,898,550,119,456,895,537,208,545,612,604,24,870,86,434,832,553,793,897,256,777,551,429,160,505,726,207,535,138,461,741,653,629,295,706,421,585,38,474,899,147,11,393,296,400,787,29,30,657,425,81,737,10,166,49,146,246,458,879,602,236,170,375,223,416,426,269,678,459,327,93,564,222,682,225,885,235,646,255,338,848,601,659,749,244,135,862,14,603,224,190,129,183,417,711,107,451,450,594,158,89,584,855,571,802,623,128,795,178,814,640,283,483,123,516,826,791,774,148,782,290,406,480,495,16,275,803,48,756,40,376,391,324,331,12,75,507,541,642,578,722,329,424,176,88,72,358,900,155,764,838,543,9,667,240,108,44,153,688,15,348,598,337,561,577,813,432,144,641,186,689,192,467,104,753,91,143,635,710,593,168,716,220,845,781,807,568,661,397,365,754,102,591,6,106,252,137,878,752,411,494,703,105,66,326,733,398,562,567,200,69,20,65,180,762,544,388,195,285,58,126,776,691,214,120,427,311,298,96,209,721,580,651,3,620,528,239,441,231,473,840,889,405,284,305,149,824,849,859,542,167,101,53,634,353,805,626,784,453,351,410,188,349,665,304,151,251,778,664,134,169,226,234,871,430,769,419,638,486,637,404,867,210,193,249,41,50,340,67,893,788,142,526,475,672,238,785,518,851,696,618,22,504,687,333,868,115,336,230,770,734,241,113,109,409,437,203,546,839,511,747,755,371,632,565,292,718,19,332,671,350,742,320,615,533,732,532,625,891,132,468,884,489,278,369,442,206,372,179,869,384,804,87,344,438,655,725,477,684,245,759,360,323,512,556,490,873,727,325,294,213,312,519,806,536,173,669,493,385,860,84,789,191,829,314,648,273,538,444,415,257,683,702,586,575,205,199,250,856,229,736,500,184,773,714,720,692,650,735,744,100,367,420,670,767,611,412,62,883,247,663,310,766,830,339,2,592,377,872,373,443,817,356,47,850,248,877,705,85,772,485,288,18,97,342,130,835,76,319,390,359,407,552,32,731,392,124,857,387,881,819,569,262,454,162,395,78,694,656,211,609,614,488,704,380,517,674,316,484,668,595,83,843,758,559,621,728,864,812,328,80,264,368,31,435,422,300,654,254,547,301,60,181,579,136,636,7,189,261,779,487,446,582,765,841,823,471,697,818,413,828,624,26,676,690,302,233,698,357,527,780,757,95,396,43,525,253,45,679,54,588,470,800,524,27,4,608,874,401,875,228,152,619,462,61,896,633,268,265,627,465,740,21,771,466,514,658,291,810,521,354,322,724,159,644,622,464,70,713,46,381,790,28,445,448,820,628,597,886,423,509,127,52,440,299,792,431,77,279,63,853,472,863,362,408,827,64,140,237,274,847,708,308,723,681,709,748,677,745,763,17,378,370,383,693,242,90,662,739,887,175,854,98,549,531,197,729,815,198,277,68,798,154,631,112,796,865,834,457,888,600,548,499,163,719,227,266,699,280,639,581,56,894,497,394,318,347,469,34,775,617,892,523,212,844,317,352,315,701,836,74,837,59,260,379,309,386,447,5,8,121,647,573,513,808,563,607,116,751,794,286,558,215,825,218,55,534,171,652,272,589,492,303,460,481,520,666,271,418,882,606,496,478,449,673,503,243,842,822,529,363,630,866,172,506,510,811,150,890,259,35,361,557,374,515 502,656,278,772,893,191,835,509,290,412,66,625,777,54,64,387,826,601,467,633,878,206,344,233,668,77,522,793,546,526,828,611,464,723,237,220,321,874,95,416,495,555,313,315,597,474,374,812,689,803,629,534,270,112,175,403,677,375,576,854,644,289,245,225,348,543,744,868,618,226,662,471,181,117,880,259,84,424,528,323,390,71,638,38,845,108,556,700,425,295,749,347,445,5,587,740,805,506,218,49,730,764,363,135,465,484,97,886,674,266,46,796,123,728,586,30,339,877,550,450,451,79,741,242,524,485,761,47,712,157,778,525,216,885,444,660,501,147,853,658,499,68,18,29,498,663,153,686,443,892,510,33,229,51,236,214,42,141,579,603,35,463,876,599,648,581,711,115,673,267,14,158,15,22,468,345,293,172,733,151,752,863,171,810,503,142,188,540,664,152,529,588,780,162,207,573,831,608,331,305,221,861,840,219,843,721,86,889,457,887,626,377,420,67,434,684,447,398,659,240,410,819,364,154,274,713,88,714,634,652,9,680,397,801,715,281,180,533,330,760,858,72,787,252,520,783,844,799,757,125,530,596,285,541,514,820,316,148,773,45,102,697,87,748,300,593,590,519,454,577,25,31,262,575,19,411,722,307,241,779,311,39,871,78,727,413,788,332,287,867,873,439,665,56,794,111,620,195,821,685,883,353,234,481,122,138,106,837,99,446,850,769,449,275,817,650,477,483,699,116,337,399,813,400,549,335,897,269,816,231,859,847,292,393,256,800,190,507,130,248,193,591,475,260,743,140,707,472,836,646,641,642,11,613,430,750,822,731,884,535,614,394,386,512,709,706,747,692,767,419,376,461,414,852,875,862,28,482,385,789,370,488,739,746,355,391,204,354,895,183,341,848,442,598,369,69,101,670,518,632,687,55,8,13,569,676,870,734,382,493,16,134,291,284,301,466,865,521,109,553,717,580,211,174,882,762,701,643,60,489,327,298,735,455,536,563,10,736,791,584,682,804,758,582,12,763,751,146,409,309,511,98,547,160,126,201,790,672,253,605,104,623,371,781,179,415,388,807,888,782,678,380,325,268,558,149,178,551,121,384,786,173,855,366,184,589,34,732,161,302,235,683,357,76,872,404,2,645,636,753,294,361,395,811,250,418,719,839,578,637,145,132,212,407,890,144,296,565,559,469,537,1,222,265,594,504,694,168,426,459,675,127,698,448,57,802,372,704,59,592,257,627,217,228,272,437,567,609,223,616,392,401,342,26,470,329,279,606,107,531,479,806,508,478,263,639,476,360,210,83,306,583,891,176,532,89,640,239,230,607,869,192,198,6,621,261,462,310,814,75,350,4,124,776,164,156,516,838,517,651,785,7,745,560,280,544,657,604,881,205,491,647,94,561,199,286,113,768,545,742,523,849,100,515,505,80,431,422,243,571,690,574,378,322,681,756,666,82,27,319,246,496,24,93,695,830,487,402,832,194,539,358,150,860,417,312,708,827,373,494,283,851,900,815,288,436,718,737,299,58,251,326,318,41,187,73,308,720,441,114,857,622,40,833,255,48,105,691,254,702,338,566,766,277,792,197,334,62,352,795,834,247,3,177,232,486,879,497,492,630,208,346,572,775,726,215,165,864,351,548,610,428,423,53,435,421,362,200,688,170,202,317,754,213,824,671,65,759,333,383,103,249,128,90,314,438,628,771,568,427,63,619,585,696,809,163,196,120,655,189,129,784,615,898,669,139,271,297,818,433,81,166,490,70,570,856,349,238,368,367,480,798,91,74,17,513,21,738,381,186,894,85,23,808,406,703,43,635,356,602,50,20,110,716,227,473,276,612,432,538,899,679,137,405,159,452,797,37,365,328,527,653,224,866,131,595,258,396,729,118,770,456,617,44,32,842,36,724,562,631,661,264,600,823,841,624,649,725,340,185,155,282,825,182,136,654,453,765,304,460,693,143,710,379,458,343,324,273,554,774,167,52,336,203,133,169,667,429,408,244,320,500,61,389,564,755,209,92,440,359,542,119,557,96,846,896,829,303,552,705 839,36,213,699,471,819,407,172,60,678,485,599,153,589,577,129,772,817,135,863,350,355,337,127,328,625,545,639,114,269,141,90,282,348,145,702,183,736,668,685,712,686,134,198,427,215,264,438,366,179,512,857,821,609,169,784,300,446,121,398,81,764,641,177,217,326,142,507,871,317,626,315,202,209,456,97,628,256,755,833,239,402,429,525,396,654,132,27,20,227,41,820,138,653,257,414,803,223,222,624,514,273,563,527,809,464,548,451,103,216,723,296,208,684,59,647,493,383,528,737,740,267,775,212,7,598,87,719,170,199,260,642,842,503,537,752,38,607,553,259,390,496,463,796,829,484,258,856,43,620,44,408,700,584,741,174,777,201,313,798,788,123,649,885,158,869,268,581,546,533,509,441,558,816,534,248,298,229,687,360,331,631,583,690,378,591,149,218,812,698,787,21,543,186,882,187,386,74,491,225,339,265,195,590,875,194,409,394,180,467,204,196,710,672,851,46,79,405,10,207,759,765,568,292,449,70,384,62,131,481,794,47,518,178,140,587,688,35,272,664,404,730,275,729,18,224,230,638,89,323,389,785,632,16,307,368,791,774,291,720,502,166,747,330,670,111,17,107,191,853,482,657,436,475,490,351,847,708,72,266,271,392,346,377,358,689,316,139,91,278,373,832,874,489,895,352,501,235,586,536,523,479,890,283,652,734,279,246,277,96,605,130,126,115,426,805,754,1,891,557,870,726,349,822,359,861,674,221,627,319,237,576,835,859,619,100,521,102,531,243,766,879,30,146,806,499,92,50,439,226,716,56,783,220,508,888,899,556,445,71,88,77,834,836,663,93,231,233,276,219,826,184,413,375,572,603,406,63,128,442,151,353,420,422,270,848,285,860,430,679,743,732,29,240,340,592,780,345,39,731,80,735,578,680,877,561,614,147,155,659,596,376,419,52,320,790,473,675,232,206,182,385,242,165,650,610,655,718,721,707,190,159,896,818,883,738,633,432,623,125,299,252,476,288,807,338,280,519,617,844,203,667,824,552,696,733,369,886,309,9,799,374,287,760,228,500,480,465,643,850,866,175,160,846,106,249,676,363,588,188,443,693,513,566,630,5,388,85,637,210,615,889,651,828,559,14,677,872,629,841,251,274,694,645,703,515,293,247,778,862,715,517,281,867,549,604,66,15,855,801,83,665,312,171,13,58,192,327,881,12,51,661,112,757,498,250,324,569,682,739,767,11,541,504,567,468,67,361,238,840,399,113,154,769,570,843,594,49,793,143,887,898,42,314,815,244,753,362,136,714,814,214,421,864,579,573,263,744,813,727,457,167,332,742,22,477,342,435,401,495,236,662,669,311,245,454,101,294,173,779,562,671,593,673,459,82,852,34,494,452,786,516,810,8,830,611,542,75,472,321,23,486,695,164,580,381,119,104,168,823,152,37,33,301,176,897,364,802,483,393,762,86,550,387,771,555,297,538,554,329,365,55,197,697,19,105,530,234,597,379,551,372,666,110,811,24,868,893,211,748,336,692,795,410,318,460,781,255,124,241,469,510,520,825,137,660,770,57,99,162,40,547,116,98,367,544,26,640,575,185,334,431,621,347,751,564,701,25,4,380,797,109,711,705,325,526,648,391,205,48,6,878,492,344,792,84,95,284,585,150,157,305,455,423,343,574,644,437,613,45,434,156,310,634,646,837,333,118,506,306,322,854,200,335,608,758,120,789,776,411,808,602,31,616,635,522,108,397,900,371,425,261,880,461,894,535,69,447,32,412,254,618,253,681,725,68,746,524,831,622,565,416,595,691,453,400,709,357,117,78,658,571,440,876,865,54,612,94,370,892,704,3,600,308,341,65,606,804,560,636,722,838,193,487,415,122,745,717,161,749,133,656,773,424,474,444,768,532,417,462,761,76,845,382,470,540,304,884,64,756,601,448,497,354,706,782,356,53,505,262,488,539,286,189,302,827,466,181,148,873,163,433,418,144,800,511,858,295,763,582,458,28,529,395,73,849,728,724,290,450,2,683,303,713,750,478,403,289,428,61 583,707,208,669,888,141,755,385,248,262,226,778,579,1,4,318,845,438,632,518,796,104,519,160,802,71,597,631,344,398,697,443,396,502,142,496,212,889,2,426,554,671,413,434,334,394,302,841,582,654,740,370,451,336,203,244,480,211,685,881,749,482,366,277,510,744,605,794,528,75,692,650,230,172,816,196,79,484,530,182,594,90,402,164,860,241,326,572,392,252,720,636,317,59,722,762,837,401,238,184,576,751,303,11,284,736,258,819,444,115,127,828,174,757,674,140,181,849,561,250,663,135,600,254,420,429,613,10,655,98,798,425,523,824,278,747,591,9,871,630,293,3,63,56,372,649,144,517,371,896,390,100,175,32,103,237,25,110,397,384,27,273,892,617,770,389,627,341,643,159,101,72,44,46,557,178,458,130,761,232,854,882,149,827,714,329,161,733,667,310,659,465,759,176,536,383,756,703,275,408,81,793,812,65,873,540,15,853,483,829,410,247,568,287,575,730,411,559,590,129,379,856,648,114,261,702,78,750,419,405,119,492,670,606,477,588,188,562,525,820,870,8,706,177,450,567,728,833,546,42,457,369,331,497,342,832,259,36,850,134,54,746,62,808,620,715,592,657,393,427,143,122,487,415,87,521,811,299,229,699,338,126,792,28,742,314,852,204,289,847,872,382,481,70,848,34,766,121,640,570,887,194,602,327,312,22,165,783,201,490,862,729,436,333,741,753,734,414,797,145,604,304,625,221,343,453,897,124,687,82,772,863,291,249,105,656,399,555,335,377,423,454,446,163,826,323,719,552,769,700,507,646,102,768,638,803,865,577,886,634,464,468,466,713,773,682,695,724,815,239,395,624,404,875,877,874,94,532,373,821,197,653,825,549,286,216,332,433,876,297,442,868,506,350,162,268,364,804,563,452,524,218,33,156,637,476,878,716,215,642,41,271,531,225,170,459,880,660,96,307,514,710,120,219,891,807,511,672,279,296,198,169,578,365,718,351,16,813,831,363,675,665,839,491,55,857,795,199,206,439,615,294,726,29,246,263,678,623,85,608,35,431,193,639,128,704,361,628,893,589,601,285,445,538,684,91,53,731,73,186,846,357,855,421,68,455,106,781,117,139,467,758,612,306,883,367,108,543,754,822,137,346,360,843,550,593,834,708,470,681,209,13,308,587,894,21,301,416,664,280,325,86,77,191,585,522,786,272,721,424,686,89,471,461,31,645,614,486,255,441,157,493,339,300,316,290,417,791,92,787,340,388,243,111,474,183,155,622,173,677,688,861,558,725,460,689,529,167,240,66,253,738,835,456,633,52,767,469,499,723,830,264,213,112,448,353,717,180,679,18,235,51,269,799,276,200,611,752,428,478,607,76,732,368,374,651,573,422,879,45,463,701,305,564,503,598,125,584,748,505,566,735,136,680,512,171,260,535,311,527,790,479,381,473,784,818,489,274,123,116,320,378,168,39,789,696,616,542,817,113,739,337,217,885,406,224,662,842,619,501,513,782,899,658,548,319,581,823,430,242,107,400,539,58,251,234,195,626,283,83,884,386,93,867,146,48,153,652,245,673,358,349,711,231,801,345,281,233,154,765,771,74,84,192,537,375,890,498,472,545,533,435,520,551,838,148,47,859,205,644,599,580,265,109,610,495,228,347,516,179,133,462,603,409,693,809,17,668,447,257,282,321,227,12,359,407,690,676,763,553,95,618,547,800,806,418,189,354,449,166,69,774,504,898,437,348,295,330,864,698,222,88,705,7,776,851,391,256,267,635,288,805,80,131,187,376,64,571,355,207,895,14,158,836,266,745,99,412,214,565,210,19,38,788,534,556,432,380,403,743,900,647,40,236,49,544,764,30,560,322,596,683,298,779,26,727,595,223,777,24,814,574,500,50,43,709,97,712,315,780,485,220,694,760,775,629,810,515,387,60,132,488,844,57,37,569,691,621,586,292,666,150,475,541,270,328,138,202,309,840,185,152,324,190,23,61,785,641,440,67,494,509,5,661,352,609,151,6,313,147,508,20,362,356,869,858,866,118,737,526 642,238,260,836,786,715,800,432,158,757,59,286,764,534,487,236,721,846,83,867,888,396,17,228,183,436,378,873,655,488,802,507,384,797,327,132,338,746,668,515,519,454,8,3,784,360,334,461,633,759,342,876,374,84,131,821,705,610,116,477,205,317,242,18,21,87,630,880,857,539,578,91,29,44,886,179,453,143,646,831,48,198,783,111,524,233,699,547,265,230,438,313,397,368,130,483,700,419,68,194,785,495,500,556,851,94,79,894,751,463,375,381,40,648,193,253,595,830,451,842,357,69,874,85,343,552,665,587,486,261,401,691,296,893,710,584,151,679,560,446,711,447,224,563,829,546,157,889,404,740,505,185,568,423,745,45,669,172,601,872,653,597,704,827,197,887,579,92,603,516,192,442,310,621,323,611,2,210,656,61,281,622,417,684,93,80,229,98,760,237,533,489,678,73,415,561,780,152,428,11,557,823,574,680,775,694,426,871,177,892,623,418,376,168,592,274,221,97,405,335,631,643,71,248,318,389,264,304,744,815,435,586,63,814,763,72,470,300,27,518,545,632,650,627,282,812,837,549,769,402,475,866,372,362,572,480,707,766,267,364,407,303,593,337,117,190,226,88,321,863,129,285,16,658,180,128,683,529,86,635,125,121,869,371,484,690,297,359,76,737,626,768,895,268,832,409,246,270,884,731,735,594,465,522,150,738,57,659,20,110,637,499,201,89,765,609,325,280,791,293,460,717,849,848,673,565,792,421,862,553,844,582,670,859,654,734,28,140,53,38,234,882,352,275,581,25,355,145,777,290,813,295,450,209,146,826,870,808,726,114,625,36,570,491,551,412,531,370,336,664,628,41,340,469,741,647,154,247,144,456,490,127,302,804,309,865,1,613,843,294,410,651,99,706,649,101,243,188,142,835,598,266,358,254,779,803,712,430,540,385,349,12,24,208,472,693,550,414,212,675,730,217,440,78,753,521,850,636,211,825,520,392,900,787,645,845,445,351,526,596,449,758,320,457,567,322,347,213,776,509,123,162,506,676,284,425,755,878,559,251,739,688,537,883,353,60,388,852,772,898,868,328,7,344,112,345,724,124,473,589,354,206,723,319,639,437,115,273,510,501,82,822,170,324,638,204,331,877,429,689,482,222,576,577,138,166,311,824,796,788,394,535,10,562,696,619,66,452,687,853,605,333,536,223,244,165,218,22,496,269,288,512,770,604,444,793,52,840,189,809,728,448,120,184,276,614,583,136,427,504,379,159,289,433,466,839,618,231,498,175,640,838,250,75,346,301,558,620,105,542,722,102,896,380,390,462,174,216,258,493,834,15,161,373,530,126,118,527,820,476,377,403,227,416,50,186,106,781,365,858,855,356,685,807,147,108,856,571,750,616,714,458,308,225,263,74,315,736,153,810,160,879,431,119,485,65,566,35,54,774,219,494,104,9,245,841,709,330,122,672,422,538,156,439,564,864,133,202,702,350,109,107,33,629,203,255,660,411,137,348,55,819,833,795,292,387,890,789,58,257,588,398,329,271,77,64,761,600,575,259,503,799,215,663,326,467,497,420,67,443,81,767,511,277,492,96,195,163,749,517,732,599,307,169,178,612,756,287,235,528,240,49,703,885,232,441,671,695,554,139,341,641,624,181,464,47,367,4,817,135,272,14,794,6,818,299,523,720,262,382,37,19,13,677,400,752,306,580,176,100,207,806,332,252,698,199,95,256,875,298,716,692,393,773,847,51,149,196,861,39,30,391,585,408,681,743,34,241,312,26,590,634,220,369,278,413,474,805,719,383,727,424,167,602,569,455,399,742,478,291,200,406,508,778,363,148,182,697,361,701,828,366,555,543,459,113,747,666,141,468,541,762,5,881,502,164,191,801,279,729,305,395,811,249,339,860,471,607,754,214,854,134,667,748,798,514,171,899,43,771,434,23,513,532,682,816,42,662,46,544,591,316,782,31,615,652,661,239,686,733,56,90,283,103,790,525,644,32,548,718,62,173,481,70,617,891,606,708,479,713,314,674,608,187,657,897,386,573,155,725 508,124,337,670,368,827,566,370,290,751,402,47,700,846,765,202,348,809,170,839,847,542,26,415,19,641,157,793,820,398,714,460,258,704,575,312,411,274,874,428,367,220,99,46,764,244,296,70,458,634,63,876,426,362,462,853,578,643,42,102,171,317,254,110,169,18,504,733,824,796,331,1,147,308,785,338,639,41,499,878,207,440,724,412,107,404,822,497,466,240,247,314,396,691,3,149,340,316,145,445,620,186,470,807,872,29,267,838,832,668,615,56,295,364,251,530,647,545,229,869,195,389,801,139,390,447,580,849,280,531,72,565,487,825,678,334,219,868,118,197,681,746,588,843,805,304,318,877,625,242,692,571,659,673,848,156,892,427,510,867,889,741,198,732,191,895,394,431,384,602,576,619,623,882,123,850,49,388,373,175,224,164,533,302,6,246,526,120,568,365,363,629,371,245,635,520,532,81,417,36,837,621,279,831,430,561,719,660,60,815,587,463,235,502,500,114,143,24,357,490,596,257,83,420,325,185,486,117,857,768,747,585,65,781,774,270,582,476,200,153,126,866,423,723,248,749,742,166,795,687,329,865,359,507,523,95,725,894,109,609,618,64,800,33,218,265,168,77,210,854,492,557,183,555,506,32,399,536,277,419,87,489,735,682,174,630,51,468,138,406,206,730,893,512,611,701,23,453,852,543,268,633,690,477,341,885,249,386,285,52,201,261,96,361,521,377,165,353,595,457,483,675,789,883,613,579,309,599,739,826,726,141,693,890,777,628,158,45,375,336,378,871,562,442,255,228,129,260,496,54,694,131,745,305,34,617,710,650,272,58,810,68,519,344,234,196,292,86,43,859,604,21,199,88,289,190,493,455,92,75,627,5,22,669,281,897,93,608,518,385,369,215,144,666,762,422,538,15,79,776,537,514,713,592,648,686,237,176,757,184,653,173,188,395,558,590,119,194,516,697,640,17,743,391,266,155,786,421,501,830,792,552,900,738,509,841,780,37,135,612,167,600,27,287,863,333,44,327,818,503,14,383,326,879,438,495,535,836,799,223,886,572,614,828,676,98,298,770,303,896,814,328,90,448,104,658,862,2,655,766,35,315,310,205,808,488,450,108,642,679,181,656,122,610,182,351,705,842,132,379,624,140,525,178,354,48,382,698,688,616,524,784,76,505,208,840,150,494,515,873,729,680,798,332,192,78,38,121,464,435,115,684,829,498,794,657,231,778,465,728,806,459,161,193,262,598,772,112,644,291,301,101,346,706,311,887,711,97,661,322,559,636,432,151,407,57,436,727,410,759,740,25,833,581,160,665,8,300,405,243,583,209,225,689,603,116,294,597,654,813,413,761,392,71,393,288,39,570,239,821,773,662,400,752,105,61,791,712,307,845,601,163,528,241,480,221,474,685,16,734,172,787,607,213,319,286,756,73,343,645,133,671,134,31,282,664,563,550,444,802,469,760,546,771,269,755,9,69,323,491,4,137,313,179,380,356,424,84,74,152,89,593,397,736,347,544,875,564,330,513,858,358,320,638,146,429,823,416,788,589,94,737,479,264,547,717,663,252,189,203,238,720,297,569,125,159,381,425,817,236,456,783,677,250,403,527,283,467,345,342,517,230,534,861,111,539,844,276,819,142,433,591,622,560,349,55,471,212,695,443,556,30,626,136,646,40,779,475,187,540,376,222,180,891,366,683,53,414,20,11,584,667,127,12,335,409,284,481,855,408,864,360,321,299,812,372,446,148,699,80,271,702,485,696,551,352,85,263,452,82,605,293,529,586,632,401,754,651,674,472,233,716,484,211,574,128,637,652,709,130,461,763,834,549,541,273,162,775,227,573,374,306,769,594,703,216,451,888,67,439,387,577,91,748,790,204,449,835,59,880,100,217,707,548,708,753,731,339,721,10,804,275,478,473,522,278,113,899,177,884,567,66,103,803,856,672,28,606,232,554,350,454,718,154,631,649,758,324,767,437,259,355,214,253,898,715,418,13,482,851,50,106,811,256,744,870,722,881,441,797,434,860,553,511,226,816,62,750,7,782 311,258,349,261,49,779,188,255,441,597,717,114,537,887,828,242,61,551,510,574,536,581,257,558,185,718,96,413,845,228,524,332,181,432,701,638,405,12,888,264,206,141,422,315,582,191,241,19,180,415,58,693,505,691,724,698,305,542,224,123,424,409,357,362,575,236,302,274,561,839,93,150,386,603,342,469,697,122,259,807,618,607,466,668,24,572,826,429,664,273,211,471,320,832,140,27,77,208,313,647,268,60,324,834,710,254,545,436,808,747,749,59,584,84,549,715,573,139,110,734,227,649,435,290,438,232,408,876,143,683,50,293,741,384,498,120,499,874,13,82,512,838,784,878,595,85,447,636,788,40,798,806,627,770,790,367,900,621,289,650,898,765,21,467,493,748,153,768,112,556,809,657,785,895,113,879,337,494,81,379,554,14,557,42,197,563,745,461,234,526,215,750,71,459,842,365,173,288,308,251,875,270,97,793,130,327,811,225,126,380,394,395,220,796,387,218,209,175,341,579,398,55,383,508,286,158,612,207,864,513,865,492,353,625,671,661,586,704,623,48,41,883,147,653,304,565,480,26,742,794,162,648,321,692,336,18,620,891,339,746,687,92,819,51,577,592,292,345,187,626,775,729,599,278,752,129,168,433,514,154,199,766,356,814,45,443,182,485,319,101,46,534,694,656,281,804,89,539,518,212,17,544,866,309,594,886,488,111,615,193,2,142,172,673,165,161,250,449,306,546,588,500,501,783,406,533,36,642,322,860,420,5,624,800,712,456,489,167,733,695,604,658,756,541,105,600,128,580,127,68,366,184,855,643,171,275,412,283,39,238,861,310,411,263,67,148,107,62,69,880,454,230,164,102,4,3,753,703,205,11,654,116,78,338,246,829,418,527,103,455,328,29,401,525,699,738,799,170,243,468,392,707,847,789,416,372,1,108,815,151,791,479,614,568,515,351,47,149,732,611,450,76,856,670,8,30,445,155,772,635,867,619,776,548,382,622,870,44,25,523,66,280,86,144,894,640,53,431,675,491,121,629,231,893,583,521,198,538,833,370,889,297,570,437,835,458,210,421,6,659,487,285,427,633,388,818,844,146,690,782,73,504,15,201,780,564,696,247,651,725,478,350,316,821,10,560,868,569,75,138,639,186,346,20,686,163,681,402,397,317,601,820,325,452,72,841,355,547,307,719,744,849,846,399,326,203,176,373,520,631,189,700,777,303,882,381,645,457,702,434,740,451,331,291,277,410,836,460,736,233,213,200,344,817,177,824,632,194,714,644,464,376,680,559,507,64,244,655,682,787,641,229,425,763,98,728,135,522,630,132,145,552,333,837,628,256,677,503,276,881,360,873,566,31,716,390,202,221,190,517,407,812,87,497,226,260,463,762,22,852,358,57,730,430,751,609,571,590,174,482,385,391,662,585,159,531,803,364,665,369,377,802,314,265,596,354,271,731,688,769,472,850,810,862,106,348,109,169,32,550,183,300,637,28,613,404,118,94,266,104,368,195,63,608,511,652,605,282,684,739,890,299,423,831,312,754,705,179,840,757,83,490,660,79,663,786,709,204,400,115,530,476,160,767,16,371,578,666,713,95,119,773,858,359,711,287,9,679,540,124,823,598,252,567,393,519,827,33,872,396,610,465,453,801,267,262,496,617,352,689,720,249,272,509,294,99,825,152,235,634,721,589,477,899,298,484,52,219,156,134,795,375,100,80,43,669,495,706,591,428,859,65,269,70,529,735,722,216,426,442,602,830,403,797,462,23,323,301,562,446,473,38,727,676,848,334,843,295,535,475,54,805,737,35,414,34,759,417,792,157,672,871,885,237,755,587,296,760,166,448,56,347,781,470,774,543,88,896,222,330,196,253,363,318,853,532,778,678,131,884,223,178,389,708,857,378,816,74,528,117,486,440,248,125,137,91,439,761,474,869,593,343,7,854,851,335,279,483,646,419,90,502,481,553,506,555,685,374,743,192,516,606,214,444,897,723,217,239,329,822,245,240,877,667,764,576,674,892,284,726,616,863,340,813,37,361,136,758,133,771 437,700,44,208,810,240,324,88,274,35,682,752,380,459,284,138,677,152,814,264,556,22,644,176,829,212,354,171,630,54,512,202,76,137,325,832,43,843,371,232,352,509,636,600,42,93,40,739,154,372,560,196,645,772,583,96,92,3,712,879,835,633,450,367,792,803,310,410,305,319,361,638,316,408,480,198,182,359,268,186,846,200,52,524,768,452,558,446,661,61,563,791,83,497,693,455,702,78,205,500,119,427,47,225,109,806,538,535,613,221,424,743,390,431,826,488,6,593,259,74,657,418,175,192,417,108,395,409,333,265,642,84,856,510,8,533,763,300,818,278,12,332,464,525,160,241,127,224,714,878,685,581,106,235,146,271,646,234,39,113,627,399,863,442,873,231,216,825,223,11,592,53,388,652,375,569,662,97,434,315,898,842,81,615,754,651,472,855,356,547,530,694,364,238,886,85,330,753,32,492,451,515,609,103,838,217,277,602,323,540,120,31,519,799,561,673,250,568,522,73,181,747,807,69,72,511,126,680,708,57,653,383,819,436,415,874,179,817,833,679,821,400,311,114,378,329,458,681,588,299,89,102,242,737,19,683,168,366,891,441,110,598,290,707,850,861,590,764,118,213,623,447,811,33,457,438,696,169,384,304,219,578,483,346,404,64,864,86,285,660,796,199,258,201,798,298,709,68,204,123,816,2,896,18,639,345,294,478,537,430,736,463,267,706,292,567,760,461,722,174,784,77,273,156,16,552,882,91,206,394,520,775,289,226,51,454,701,479,794,785,741,281,758,128,738,690,551,777,355,489,122,501,597,890,635,668,862,117,820,656,782,586,422,689,548,445,340,474,731,622,317,674,150,872,765,790,482,822,248,667,180,596,788,131,41,209,570,487,725,403,421,800,650,101,71,732,841,823,572,87,320,607,494,632,491,60,786,462,469,508,382,531,844,328,9,253,849,521,379,153,256,601,539,571,845,624,90,360,756,49,612,142,405,166,704,67,502,715,699,194,339,297,828,134,629,900,675,222,25,549,541,641,666,453,428,313,228,376,308,643,402,45,15,188,550,860,94,190,840,244,314,59,670,813,797,448,184,724,112,312,824,611,637,288,111,513,449,789,105,189,742,619,733,801,834,555,678,303,584,713,62,140,143,703,869,564,899,387,165,465,322,195,416,649,887,207,347,420,554,104,426,608,358,82,604,443,781,397,836,659,580,161,566,260,546,296,867,80,672,100,147,466,432,276,243,7,729,885,191,779,58,245,65,529,193,214,20,526,381,866,728,859,812,876,671,468,335,27,595,252,178,776,591,839,407,151,744,734,802,559,499,536,136,605,507,370,892,1,233,498,14,585,493,628,710,185,599,368,155,135,158,490,327,46,353,692,282,634,769,255,229,396,716,664,857,854,148,475,761,172,669,309,263,852,211,348,485,697,691,262,858,773,411,614,883,745,75,665,471,98,392,735,684,444,655,230,532,514,527,28,755,331,557,848,654,116,220,787,711,215,717,406,895,543,771,481,275,770,808,663,562,343,698,518,239,721,130,218,594,373,877,36,321,830,257,295,336,433,302,385,587,17,429,589,575,505,425,618,34,503,391,95,631,129,871,38,837,759,640,157,889,740,183,159,893,29,173,687,616,750,749,610,5,577,565,573,145,726,79,496,369,579,144,727,283,783,287,210,413,338,746,647,439,582,251,249,470,386,767,517,545,374,280,751,504,793,293,762,121,48,326,414,334,894,66,805,676,197,870,851,553,435,730,269,827,621,486,139,306,831,37,528,344,337,748,177,467,115,237,167,881,266,576,686,4,440,393,55,476,377,574,523,506,648,865,780,542,495,107,757,897,620,149,10,187,766,363,617,625,162,365,412,419,389,349,853,888,56,720,350,815,423,164,286,534,291,460,318,26,774,124,246,544,341,362,270,884,301,606,261,99,688,719,357,247,203,795,477,868,23,254,132,170,804,21,272,24,63,351,778,342,401,125,227,484,70,705,723,307,133,603,473,456,880,516,236,141,398,13,50,695,279,30,847,809,626,875,163,718,658 512,705,257,722,893,133,808,473,285,329,126,706,719,19,26,381,835,498,554,546,856,163,419,211,751,59,549,727,464,489,792,571,441,656,200,314,280,883,37,410,505,597,374,382,484,453,354,838,670,764,680,424,316,198,199,304,623,299,646,873,710,351,278,252,420,649,707,848,537,148,675,563,219,146,861,238,81,460,514,226,476,89,545,92,852,151,459,683,425,282,757,422,417,10,657,758,821,482,233,101,678,774,330,77,360,606,162,865,604,201,62,825,152,735,643,64,259,871,562,340,525,111,673,247,510,445,720,13,708,136,800,474,306,866,367,693,570,79,863,663,415,31,27,11,421,662,158,598,427,895,472,58,183,32,145,231,7,132,515,475,5,397,887,567,724,440,696,202,659,207,43,117,18,2,511,265,372,164,739,192,819,875,149,822,613,215,186,641,654,205,572,558,777,179,294,500,807,667,300,364,144,846,840,128,851,679,63,880,492,869,551,335,458,139,450,728,449,465,660,197,378,839,461,142,264,730,96,750,568,565,23,635,507,742,650,395,167,577,414,791,868,22,771,191,521,718,816,820,700,94,508,467,295,541,438,836,261,70,827,65,84,738,54,790,408,658,621,605,437,477,73,50,350,504,38,456,762,281,241,759,338,82,849,52,741,356,832,286,307,862,879,390,566,61,809,86,694,173,756,653,889,275,327,423,193,56,137,829,153,491,855,772,466,317,788,685,586,479,731,120,401,339,753,288,457,348,897,208,768,154,843,854,256,287,170,763,273,559,210,301,269,488,483,223,784,217,733,530,815,689,587,672,21,697,517,760,837,677,888,603,569,435,393,590,740,721,737,726,802,343,357,544,418,867,881,870,51,523,406,813,305,574,794,684,332,272,268,352,891,220,359,860,494,497,274,138,190,752,573,538,655,108,8,49,560,600,877,745,322,539,14,195,391,276,240,434,876,579,112,442,665,648,171,204,890,787,630,652,127,388,271,244,634,398,592,436,3,786,817,501,698,766,810,564,1,823,789,165,303,333,584,168,609,93,157,203,755,622,172,627,45,550,296,712,160,532,380,744,892,690,619,355,392,341,633,124,104,640,95,302,830,236,859,389,116,561,68,779,125,234,323,692,433,141,882,411,20,585,691,785,229,366,365,833,349,487,793,801,509,631,147,76,260,431,894,83,319,526,583,361,455,12,150,239,624,536,765,218,516,470,703,98,637,430,36,754,463,629,123,503,182,607,262,248,279,377,520,709,166,687,379,416,311,34,468,227,214,632,109,616,527,826,548,601,318,681,480,270,230,60,246,664,878,254,582,71,715,312,310,645,857,232,209,24,581,293,593,245,776,44,309,4,169,796,216,174,589,812,499,575,717,17,734,454,326,615,602,540,884,122,446,674,161,595,290,400,113,716,647,669,578,814,102,608,502,118,368,490,277,524,767,547,407,396,746,773,618,135,66,225,249,447,80,74,743,783,552,451,831,159,638,370,187,872,428,291,699,844,448,506,375,841,900,775,369,413,644,770,362,115,176,346,384,41,222,131,228,701,385,107,874,519,53,847,213,30,105,702,258,714,363,462,761,266,806,251,328,119,242,795,818,155,16,194,331,429,886,522,513,612,313,399,533,686,799,177,97,864,283,611,626,439,353,90,485,471,324,267,628,184,180,387,704,292,781,748,29,729,373,347,178,289,175,40,325,402,666,749,668,493,85,591,596,769,811,243,206,196,557,181,75,780,599,898,580,221,284,320,834,553,140,129,543,39,636,853,383,237,342,469,409,803,100,91,67,478,15,682,345,185,896,57,72,828,344,732,46,555,297,614,106,9,78,736,315,531,334,528,426,594,899,688,88,337,110,518,782,6,432,321,535,651,263,845,87,671,358,298,778,48,805,496,542,42,25,798,33,723,444,713,588,255,620,804,824,625,747,617,376,103,130,371,842,121,69,610,576,725,412,404,695,134,642,452,386,308,235,250,443,797,188,99,336,212,47,114,711,529,403,143,394,534,28,495,486,676,156,35,405,253,556,55,481,189,858,885,850,224,639,661 29,790,306,87,686,183,359,367,573,93,682,419,717,668,458,370,201,43,846,21,745,280,528,527,730,269,136,216,874,255,785,535,219,409,694,635,362,459,557,35,50,119,716,616,339,278,232,513,244,670,277,63,229,658,788,115,340,228,733,818,862,234,196,504,824,712,578,450,7,695,83,494,444,594,572,523,246,333,15,188,852,343,303,483,600,259,851,748,845,274,713,386,414,507,609,288,191,283,347,356,218,402,62,511,126,643,398,659,867,641,307,627,571,89,870,385,213,567,105,199,224,601,78,344,731,52,700,485,465,610,559,39,611,587,177,84,844,546,569,319,290,574,581,466,37,153,342,67,881,718,882,708,96,360,236,443,715,529,264,108,678,796,604,2,883,86,338,776,111,165,672,300,474,665,162,847,595,379,91,320,897,540,141,206,602,456,765,849,16,265,73,880,197,460,703,423,363,804,92,492,786,657,637,354,254,508,568,634,378,618,453,353,129,705,61,764,451,374,760,433,19,258,614,321,121,631,324,766,893,302,532,729,660,781,787,800,103,890,839,267,685,501,401,102,652,677,723,286,853,625,147,117,79,879,263,515,56,464,872,316,309,599,294,545,675,885,752,810,296,36,671,358,761,148,534,314,125,45,646,406,273,684,642,620,295,40,806,468,493,655,688,17,75,332,110,615,430,383,352,140,628,207,753,262,410,480,424,525,623,536,395,576,412,833,298,80,329,746,70,202,297,42,548,179,268,82,704,503,214,756,725,462,55,139,237,749,551,558,757,838,418,34,884,489,225,596,626,854,323,441,54,636,477,896,366,47,170,189,687,719,898,691,44,174,156,577,417,434,624,868,26,674,130,794,495,486,607,888,405,429,619,426,592,308,227,208,564,58,647,176,85,369,784,479,471,650,683,710,693,74,654,388,593,522,10,181,469,538,822,143,442,487,808,669,281,9,696,133,667,570,598,390,816,762,553,253,158,49,588,132,855,552,81,27,100,114,584,467,311,605,304,510,632,243,608,899,563,204,231,95,499,422,150,737,230,97,325,4,699,803,452,245,361,116,813,782,88,377,549,144,8,341,732,432,834,754,368,555,247,736,639,310,411,99,292,802,560,825,173,622,512,24,400,589,597,792,676,14,215,151,457,233,30,301,739,299,900,640,20,5,157,476,428,112,797,436,566,773,38,137,811,580,720,387,777,585,751,393,326,842,692,221,850,18,743,561,858,242,491,48,146,771,260,190,118,164,891,860,590,251,124,420,372,389,65,198,178,653,210,889,127,193,886,857,256,408,32,382,791,276,59,742,709,518,138,291,661,399,470,122,438,681,238,505,819,239,887,205,371,728,284,630,270,478,807,217,656,502,348,68,272,455,123,90,226,747,11,866,579,612,6,185,427,821,644,740,212,795,539,473,801,439,200,875,64,392,830,767,814,3,864,894,617,526,895,60,266,381,506,498,94,892,702,726,167,315,407,249,275,287,603,556,707,514,836,447,142,741,413,180,496,454,789,815,373,809,51,104,848,435,837,77,271,698,285,727,113,364,863,679,780,252,357,330,645,337,203,629,530,484,778,195,575,831,403,312,734,440,279,550,415,448,663,240,724,71,482,878,828,192,812,829,25,220,873,159,355,542,861,827,871,69,175,735,107,664,533,755,166,738,711,488,261,649,449,472,394,235,160,722,744,768,475,775,66,12,327,586,524,391,721,1,346,606,211,543,554,490,98,194,322,169,613,805,187,774,840,184,186,799,520,763,152,516,223,376,633,155,680,750,384,209,621,248,666,519,421,222,23,106,769,562,509,257,163,282,317,318,820,521,404,662,770,57,583,859,250,835,149,134,779,798,334,293,544,843,154,651,335,53,46,13,517,463,673,876,817,145,714,461,826,101,28,446,701,437,351,168,305,547,76,541,41,345,313,109,869,131,759,380,161,531,445,706,375,22,697,783,823,336,171,182,500,832,350,33,331,349,772,172,565,425,135,481,591,328,120,638,31,416,397,648,690,865,841,72,128,537,241,396,856,431,289,758,365,689,793,582,497,877 750,273,162,327,407,694,113,42,170,371,758,739,2,625,537,120,695,485,535,621,35,249,583,183,641,568,448,226,206,83,11,17,163,34,229,869,103,736,616,540,588,662,435,461,96,118,157,504,130,3,518,593,848,841,423,494,37,197,391,668,463,823,711,329,624,667,8,124,682,297,455,521,334,398,85,175,553,310,555,613,692,438,87,697,452,738,158,27,269,191,115,876,40,753,475,317,748,86,291,751,153,205,346,434,478,756,710,63,125,203,768,427,388,517,442,759,237,172,373,392,792,456,382,272,70,364,6,717,64,270,312,367,892,98,223,672,387,505,606,160,105,499,611,805,543,278,279,532,296,707,247,642,526,544,534,280,815,284,58,426,818,117,723,788,579,617,76,847,294,309,712,315,628,845,483,313,578,236,515,467,802,700,454,562,656,785,321,660,587,787,742,215,302,289,899,26,119,389,300,421,375,39,173,390,859,10,415,132,232,91,19,59,722,863,826,248,129,549,65,171,556,747,812,267,340,134,370,253,224,110,850,15,796,9,24,865,585,402,709,676,498,706,74,541,81,5,12,619,47,356,217,413,558,275,71,396,603,685,721,775,411,264,680,444,868,550,204,447,128,598,718,744,791,180,633,439,809,547,263,61,295,640,41,437,265,431,644,82,221,190,405,615,594,484,880,377,638,212,144,214,491,228,900,75,806,646,400,122,524,244,551,94,169,466,140,731,839,126,864,513,885,468,29,536,84,840,732,156,227,330,23,577,725,669,383,7,769,252,820,664,862,683,333,149,794,761,178,418,164,271,368,152,836,724,653,843,897,184,440,322,293,336,771,842,569,109,107,238,425,287,713,443,301,608,492,589,581,445,194,460,89,509,610,69,181,614,539,817,255,719,727,740,306,28,147,819,884,639,251,357,4,831,689,813,799,187,565,141,209,661,659,596,795,182,168,607,584,673,366,32,1,497,376,424,663,552,254,533,861,352,326,127,749,554,866,365,726,529,620,14,218,25,686,142,837,827,385,510,288,832,341,811,816,519,752,696,90,728,277,216,734,92,102,335,395,825,323,67,623,457,655,88,472,887,474,379,514,586,473,148,636,803,355,502,449,72,561,363,508,159,814,835,808,883,574,222,833,670,629,801,145,243,493,626,881,612,804,16,489,754,703,419,450,857,690,479,233,46,800,471,78,797,308,161,198,201,451,488,888,250,199,545,60,580,602,13,781,166,834,401,566,45,630,506,522,200,258,793,225,856,259,176,99,810,404,645,345,195,776,600,877,895,432,774,851,276,627,155,409,648,632,523,68,893,516,481,495,852,879,699,174,416,332,829,52,530,807,230,55,557,121,782,762,343,597,446,307,44,80,410,146,763,348,311,507,359,609,151,414,350,573,399,867,242,890,860,477,18,702,30,282,73,665,599,403,512,177,430,501,590,538,292,210,542,647,875,77,855,649,219,743,299,783,476,745,135,422,591,358,245,635,188,464,757,283,108,261,397,779,286,772,93,730,21,870,139,572,874,637,846,417,652,849,563,393,764,548,53,234,360,687,138,564,830,192,643,741,62,239,112,305,189,66,320,328,677,202,798,285,143,131,274,784,339,886,281,691,331,235,193,894,486,453,260,767,354,500,314,319,372,266,853,208,570,786,316,56,654,211,362,268,525,116,735,38,733,604,167,634,111,766,470,511,693,559,666,347,33,714,441,531,708,165,527,428,872,257,873,381,318,720,384,36,729,240,821,433,380,898,770,657,361,871,458,891,349,290,394,101,778,43,459,369,705,858,31,780,150,675,528,674,412,765,658,207,324,737,49,220,133,790,684,520,824,896,429,716,79,256,882,773,137,469,100,298,436,338,822,704,503,595,681,386,95,420,546,889,406,337,671,408,622,465,487,618,54,789,303,97,679,378,196,777,136,179,353,755,592,342,601,482,715,496,351,582,480,701,20,844,48,262,462,22,605,57,698,231,154,51,854,304,571,325,241,746,344,838,576,650,374,651,185,567,828,106,490,560,678,104,213,246,575,50,878,760,123,688,186,631,114 484,664,24,648,890,271,731,224,114,288,284,656,739,329,151,93,808,519,679,634,873,27,330,69,754,66,317,699,758,222,814,409,146,575,211,546,54,877,349,239,360,457,336,274,443,160,56,789,454,740,496,570,405,413,312,391,455,163,573,881,788,376,175,67,555,654,613,840,628,332,488,415,35,79,859,68,62,187,395,421,691,7,472,102,831,116,722,652,563,22,673,591,182,192,535,560,795,195,14,118,531,588,100,202,478,631,119,874,805,233,150,774,65,608,766,129,124,853,299,444,491,82,660,17,445,236,685,293,507,99,702,318,672,871,243,580,653,326,850,407,314,179,125,335,461,418,2,706,696,894,734,232,123,76,265,34,525,41,305,557,498,537,888,644,825,655,480,508,433,60,219,39,97,497,272,629,319,8,616,25,883,864,33,760,556,247,260,770,564,200,493,752,620,23,791,340,720,662,52,177,442,836,785,213,863,553,148,870,207,872,419,101,367,474,501,666,194,309,600,48,283,813,554,4,29,589,6,698,828,504,368,618,595,797,742,693,89,782,619,728,855,327,624,172,372,743,827,765,815,174,218,544,110,730,238,773,255,417,862,153,37,602,198,707,640,810,529,626,145,568,217,115,518,311,96,227,762,159,137,606,47,193,849,164,526,240,842,108,77,843,861,398,719,20,851,167,592,21,775,466,878,128,830,190,246,375,13,751,131,275,832,641,186,462,676,623,615,423,799,16,633,258,764,470,313,435,897,113,721,406,844,837,300,510,168,769,324,357,464,512,426,643,726,64,772,292,596,657,727,471,494,502,322,846,397,803,884,547,880,506,838,356,384,611,621,549,534,513,737,715,339,463,109,869,860,854,117,756,112,723,286,400,763,578,42,523,178,422,882,136,270,845,479,434,291,347,565,750,440,503,567,241,205,248,617,485,858,569,528,402,104,105,612,185,70,373,866,429,130,436,622,468,432,257,885,683,586,499,408,411,610,152,826,399,684,482,253,700,759,450,453,688,801,263,430,893,692,19,323,393,371,249,598,465,132,138,625,705,351,607,396,370,165,713,389,704,95,716,889,792,668,84,381,609,690,285,278,536,51,439,811,252,820,154,181,574,83,784,72,251,404,757,467,517,868,420,366,661,538,779,143,53,210,786,674,334,891,793,456,637,103,171,75,540,896,228,120,516,577,483,614,268,359,30,543,352,736,49,741,582,576,88,806,328,354,712,725,571,290,451,245,561,106,44,74,189,798,829,169,732,81,133,61,237,225,458,134,473,191,807,687,875,761,783,414,452,364,214,325,155,279,646,887,638,362,58,651,425,533,527,833,183,3,282,647,87,824,80,701,345,55,307,149,680,388,9,437,753,220,566,678,180,559,412,94,541,635,745,865,315,387,449,394,597,663,649,11,768,603,636,550,809,86,771,261,26,581,475,410,481,821,804,234,277,852,834,428,342,85,294,203,781,316,304,686,755,344,289,749,12,579,158,196,867,520,73,492,819,441,204,386,796,900,817,524,486,735,823,590,303,548,199,460,215,15,337,301,500,675,157,876,448,32,848,173,111,166,545,71,509,365,378,630,438,681,142,295,208,267,677,724,197,297,1,682,231,879,711,552,343,777,505,390,718,856,31,226,839,639,645,748,558,188,242,446,355,170,374,515,223,209,244,572,361,729,717,162,542,201,348,379,331,63,521,144,403,424,642,627,287,221,585,310,703,714,495,57,447,599,5,321,650,427,898,539,477,459,45,886,694,121,320,670,107,802,818,235,18,259,605,264,697,98,91,416,298,230,551,346,46,895,139,176,780,141,522,135,431,490,385,184,266,392,767,709,665,229,708,122,746,899,659,127,161,147,632,658,487,350,126,401,601,92,835,262,794,776,341,710,363,812,338,489,40,250,787,216,514,382,667,562,78,583,695,747,383,841,800,358,369,38,353,790,333,256,530,593,738,689,212,469,10,594,604,254,308,206,28,584,822,59,43,50,36,511,90,733,476,302,269,276,377,306,778,671,744,156,380,140,280,669,273,296,587,847,892,857,281,532,816 533,252,273,180,49,767,15,103,332,456,815,437,141,850,778,170,275,444,580,526,88,432,501,389,418,716,244,173,591,99,131,101,126,121,538,846,258,251,835,396,366,364,502,460,254,108,187,168,48,72,241,605,765,838,675,593,76,331,320,319,462,698,595,407,666,498,54,26,552,680,204,348,430,584,34,350,695,221,367,736,711,596,188,762,128,717,542,146,518,246,81,797,155,848,315,74,336,94,352,769,97,28,326,732,574,569,699,53,485,529,812,163,564,216,525,804,370,1,193,545,566,654,300,325,237,265,98,856,25,557,93,291,883,44,317,340,492,795,203,45,250,773,786,876,527,95,422,504,620,225,588,813,600,747,706,369,896,523,116,480,895,472,243,607,568,637,23,869,120,441,827,541,787,897,289,735,544,405,212,507,702,239,543,194,463,755,630,609,344,731,493,509,62,448,899,127,12,353,314,383,761,16,21,664,510,60,715,4,159,41,125,222,505,879,658,199,142,387,166,378,479,321,696,439,339,66,556,185,619,215,884,174,673,196,277,847,625,613,729,294,151,859,10,627,176,148,89,232,341,685,129,476,489,524,154,100,611,852,548,814,623,123,800,189,821,608,233,443,117,554,819,799,768,162,777,303,490,513,449,13,272,790,33,738,55,408,373,297,306,8,77,547,576,652,656,693,347,399,171,109,90,355,900,150,782,836,530,6,663,219,153,30,140,657,19,409,636,284,635,587,834,470,122,674,179,746,229,424,130,720,56,181,682,734,589,115,718,198,840,760,820,616,612,357,433,759,78,537,14,112,292,114,878,721,440,563,779,124,75,302,671,361,618,628,247,52,18,65,183,710,590,363,217,285,82,167,763,646,197,143,365,322,312,92,208,741,565,713,5,651,567,296,381,200,434,844,889,400,249,329,110,833,839,863,601,175,138,42,586,393,801,633,783,391,334,464,206,390,649,264,119,271,749,642,178,211,235,286,874,454,724,368,690,520,703,425,865,231,234,202,50,35,349,68,893,766,149,536,477,725,227,798,571,830,722,653,24,562,659,281,867,118,311,262,737,740,261,105,152,447,500,182,521,864,478,712,757,374,647,514,307,748,40,356,681,295,733,290,631,491,750,599,665,891,165,403,882,535,318,451,398,210,417,224,870,415,781,58,375,508,691,707,471,745,255,751,330,268,572,573,406,872,689,304,245,186,299,515,832,469,139,686,402,436,845,59,780,201,837,342,684,213,553,461,452,259,622,688,539,632,220,184,228,862,263,754,506,160,793,677,770,764,610,719,775,106,426,392,641,774,645,394,47,888,283,678,309,785,851,380,2,558,371,873,316,458,802,351,46,831,230,875,728,96,752,495,267,11,83,362,135,842,113,337,412,327,467,487,51,701,446,158,866,335,885,824,578,195,466,133,343,67,714,634,240,602,549,450,676,438,483,614,288,481,638,669,79,858,758,531,661,672,855,789,376,85,266,401,57,423,431,269,644,301,496,270,80,169,598,161,662,7,223,191,803,414,497,655,742,853,796,512,739,805,420,825,650,22,624,667,310,226,697,427,486,784,776,64,359,36,482,257,29,639,63,597,411,810,516,20,3,585,871,397,877,242,205,559,385,70,898,604,308,274,617,468,743,31,727,416,445,726,298,788,570,324,287,704,172,621,579,457,71,705,38,419,791,43,484,372,817,603,583,880,465,555,144,32,459,305,772,503,69,278,104,861,435,868,382,413,829,102,107,253,279,841,668,323,792,683,700,708,744,730,811,37,345,379,333,692,214,132,648,753,886,134,857,111,592,550,218,709,818,248,276,87,809,147,582,91,807,854,823,528,892,546,575,421,177,771,256,209,694,260,626,532,86,894,522,429,360,410,453,39,756,577,890,534,190,843,293,388,354,687,826,61,849,84,236,395,338,346,511,9,17,157,643,606,475,806,581,615,145,723,794,328,560,156,822,192,73,540,136,640,238,629,473,282,377,551,494,679,280,384,881,594,561,474,499,670,517,207,828,816,455,404,660,860,164,488,442,808,137,887,313,27,358,519,386,428 430,195,341,286,6,819,149,221,390,612,736,176,405,883,832,217,111,579,450,627,393,557,276,504,179,750,151,395,772,206,382,251,181,371,662,690,375,48,884,346,271,208,386,307,523,177,235,25,162,297,89,737,613,727,689,735,259,529,190,92,333,519,453,351,528,234,223,226,623,813,147,172,384,566,257,427,738,131,344,830,562,617,431,701,9,631,754,305,572,272,128,571,266,852,127,34,153,186,322,698,255,33,376,829,743,279,580,327,719,700,782,37,553,144,443,758,556,77,152,740,323,646,473,298,336,269,287,882,95,660,26,347,792,285,507,198,413,869,24,72,481,835,789,888,641,105,459,668,713,28,723,804,664,781,811,357,900,576,264,675,899,693,32,574,412,771,116,780,140,559,816,661,799,898,173,856,355,484,142,414,479,35,581,79,210,600,694,416,315,601,312,650,78,455,872,304,125,232,349,253,859,188,49,796,227,246,800,156,117,275,326,366,309,825,521,158,180,201,242,532,488,109,438,514,334,97,616,138,803,478,881,380,399,496,561,688,636,629,577,94,17,885,102,697,231,435,358,66,622,773,178,670,404,587,308,8,666,892,292,775,704,62,842,44,619,500,224,295,175,672,777,763,590,288,756,164,256,515,474,106,212,766,252,802,40,505,154,436,289,46,7,597,733,680,444,787,122,512,471,216,2,537,889,284,647,887,506,65,598,165,21,82,146,633,136,238,352,359,460,573,681,558,410,806,379,639,29,593,311,841,290,22,687,828,730,332,542,135,746,674,667,715,676,492,191,618,80,480,104,53,409,118,871,570,209,418,563,282,5,200,798,268,525,389,114,91,71,41,50,853,555,211,193,74,3,15,751,638,185,23,583,145,87,303,245,849,423,637,51,545,432,69,330,452,673,760,837,167,197,508,291,765,858,822,534,372,12,64,757,222,812,516,605,490,502,454,27,219,705,530,356,98,827,642,18,68,457,213,809,651,831,554,818,595,527,634,874,54,61,422,56,220,101,132,894,567,38,485,684,603,112,685,338,890,640,596,171,614,807,267,893,274,517,463,805,489,239,368,13,678,564,261,391,712,325,786,851,169,717,725,75,582,10,254,790,429,708,184,692,679,547,495,406,857,11,462,879,626,134,225,592,196,425,60,731,205,620,294,476,434,657,815,343,578,47,845,320,420,437,748,652,863,824,378,236,163,129,402,644,531,121,744,686,397,878,283,625,442,755,482,768,340,400,363,364,411,764,441,695,342,230,182,318,848,218,846,649,141,770,568,602,541,594,518,611,52,335,632,648,814,696,204,300,823,160,747,120,604,702,207,100,511,365,861,509,316,653,513,233,876,339,880,621,20,699,446,174,166,161,550,377,836,115,526,270,214,546,665,1,838,451,83,779,328,795,643,586,447,199,408,302,324,711,503,203,551,741,319,635,465,301,720,262,280,499,510,244,778,710,742,565,785,821,854,187,310,119,215,39,535,192,258,606,58,520,367,137,57,337,123,428,139,36,467,608,560,645,440,654,774,875,398,536,826,345,759,749,133,776,734,70,475,691,157,615,810,767,130,374,73,464,477,96,709,14,439,487,716,714,55,84,753,864,387,762,329,30,584,424,126,850,533,317,540,348,548,833,4,843,321,486,591,466,791,383,228,433,589,361,656,683,265,247,538,229,148,840,143,314,549,729,539,497,895,388,569,59,155,189,159,784,483,88,85,63,732,468,769,599,461,867,86,202,45,522,745,671,248,575,445,610,801,544,797,609,19,281,354,472,469,421,67,707,728,865,260,862,278,607,543,31,794,761,76,415,42,793,373,739,110,724,868,877,369,820,493,385,669,183,585,43,243,788,449,752,458,107,897,277,407,263,350,362,249,844,448,808,703,90,886,170,237,456,722,855,306,847,93,491,150,524,396,370,99,108,124,394,783,417,873,630,392,16,834,860,401,299,360,659,381,103,552,403,498,470,628,677,353,655,293,494,624,240,419,896,726,331,241,426,817,296,194,870,658,682,588,718,891,273,706,501,866,313,839,81,250,113,721,168,663 741,78,129,780,634,821,609,203,50,753,314,324,561,744,653,59,700,847,115,879,828,343,61,138,167,563,303,822,629,288,615,274,193,646,290,446,179,718,812,509,514,442,22,14,696,148,163,328,420,527,259,885,616,377,204,843,464,518,39,394,150,488,354,10,118,121,375,792,876,592,481,64,26,92,820,77,572,38,676,874,190,272,672,318,359,401,630,325,217,87,182,577,187,628,65,308,708,201,33,400,656,263,429,683,867,195,268,846,675,402,613,230,82,581,166,474,530,671,333,849,455,168,853,30,143,469,410,790,233,266,200,623,715,845,626,589,101,784,452,214,570,578,395,815,844,373,116,891,371,681,445,374,673,554,814,19,863,161,397,873,860,497,639,871,220,894,341,415,449,480,417,435,479,855,267,690,51,147,587,97,382,564,444,601,120,312,256,162,761,428,608,391,485,58,826,348,575,66,345,9,668,641,331,742,825,427,503,756,28,834,404,250,433,519,751,85,35,94,174,248,697,662,279,209,265,135,304,103,725,731,758,363,237,685,620,378,565,298,178,470,390,818,392,733,74,669,709,476,658,447,310,869,396,297,383,311,771,866,302,602,454,91,770,196,405,226,57,60,108,878,361,495,238,501,360,72,749,582,46,379,43,327,759,461,289,691,260,210,7,472,438,810,895,357,881,465,232,208,841,573,576,515,857,338,376,842,89,388,124,3,525,240,27,155,571,670,463,107,854,358,752,734,750,875,489,764,747,339,804,633,712,468,778,887,719,484,228,11,346,207,513,890,335,172,651,243,114,127,580,88,775,69,754,316,177,865,898,706,536,24,642,4,722,659,487,153,286,146,180,710,743,23,221,366,599,540,329,278,13,313,364,81,245,667,152,889,49,777,721,424,496,643,37,498,586,387,661,170,18,816,355,544,585,548,837,714,593,173,505,384,534,117,247,104,362,729,434,416,269,491,492,134,499,144,684,426,808,611,549,817,567,287,900,813,807,832,704,252,450,386,234,521,291,244,830,453,175,277,765,720,25,412,632,794,456,547,524,886,595,84,858,478,406,856,421,273,295,760,699,896,877,131,31,695,83,399,803,113,546,522,293,403,517,301,739,255,282,136,596,422,317,868,342,728,520,96,677,883,385,740,380,80,588,502,483,186,439,664,786,823,538,644,36,766,604,748,8,264,772,870,477,635,590,95,68,16,111,67,776,192,76,638,687,654,655,591,257,773,471,785,800,239,224,211,307,490,553,285,381,678,258,17,123,724,370,880,647,41,723,251,824,893,254,241,584,106,597,543,158,732,789,70,840,767,334,542,90,462,594,500,701,48,122,698,356,159,336,443,679,674,227,705,408,235,213,219,15,551,128,850,783,617,528,774,151,32,872,467,569,711,738,330,649,86,692,367,365,516,191,680,56,797,579,184,389,160,508,29,142,788,189,451,6,42,292,884,486,652,315,660,558,535,431,568,600,787,55,216,507,300,119,5,110,619,137,102,482,246,281,197,231,640,791,627,562,276,892,859,202,566,717,448,532,440,75,332,819,353,552,323,423,716,347,726,261,657,707,157,20,181,52,694,249,299,305,225,99,414,745,271,473,624,598,139,555,510,703,253,125,337,702,93,666,864,309,418,769,459,614,73,242,806,529,349,574,2,205,133,735,176,280,40,645,169,650,320,713,493,319,262,294,71,79,831,419,799,149,322,222,47,350,829,100,165,539,526,53,606,862,275,835,533,145,730,811,344,206,140,897,194,198,441,782,504,848,556,1,223,164,188,411,512,284,537,663,183,737,693,779,430,636,494,407,550,457,283,625,559,432,63,436,648,686,827,763,132,296,560,218,838,795,156,682,398,466,98,607,861,229,458,541,781,12,793,605,185,610,809,109,839,171,393,796,352,523,768,736,437,631,215,836,44,746,506,621,372,270,899,34,852,475,141,369,612,802,798,126,425,326,368,409,351,618,112,460,762,637,105,545,805,54,236,199,45,882,557,755,62,622,757,130,21,688,321,511,888,727,833,306,689,212,801,413,583,665,851,340,531,154,603 515,601,139,122,630,388,113,86,304,95,769,767,120,517,399,185,621,159,797,245,161,132,698,236,811,392,418,43,445,65,199,102,127,10,332,870,100,776,450,355,443,548,644,633,2,126,112,664,85,111,551,211,751,834,590,163,27,38,673,846,786,732,585,468,803,807,114,107,320,306,372,656,438,498,128,252,369,419,342,247,848,384,3,650,666,613,361,239,558,175,397,854,90,618,680,396,672,93,343,648,46,329,171,305,131,826,667,138,366,218,583,655,487,425,768,637,59,313,312,88,722,525,101,326,325,196,173,509,212,330,530,150,888,134,32,532,707,351,744,221,7,429,566,626,204,229,264,168,598,828,533,660,259,395,237,382,695,341,28,91,685,241,810,488,840,213,115,863,209,116,690,187,522,721,465,466,710,225,422,476,894,782,257,531,770,756,479,844,373,681,574,528,273,368,899,45,130,699,158,545,424,200,410,191,820,71,365,248,350,154,42,68,602,855,642,552,240,622,364,156,302,708,841,216,228,379,288,559,489,1,759,193,843,147,170,887,371,746,835,661,718,492,155,249,300,76,136,632,308,359,137,87,409,615,25,572,293,433,875,611,258,501,437,640,876,829,494,733,157,226,709,612,849,48,578,506,703,323,435,146,336,668,143,426,338,167,831,119,346,420,654,290,202,387,799,362,714,184,12,79,684,55,900,39,753,414,447,271,614,451,636,303,281,696,117,575,812,380,739,378,850,186,35,190,9,658,830,141,62,370,176,693,428,267,152,194,785,469,842,802,823,282,665,192,738,777,432,723,169,453,108,401,715,880,688,670,864,49,677,607,616,568,537,749,518,328,214,400,653,471,448,676,238,821,619,701,581,765,297,580,144,617,761,15,133,234,647,596,463,554,547,750,606,24,67,814,877,801,508,83,135,734,591,745,535,23,682,340,353,560,521,634,862,334,66,347,780,562,460,57,72,604,490,584,752,561,37,385,833,73,503,162,316,232,760,74,599,675,651,82,279,81,800,129,711,898,592,417,58,659,519,758,717,464,600,481,84,376,278,529,485,21,47,80,520,873,217,16,731,105,299,106,689,868,763,475,269,727,275,215,789,726,474,412,219,354,563,704,276,178,818,620,798,865,741,483,773,292,593,705,96,235,250,643,890,628,895,97,203,491,496,289,527,735,847,296,393,283,603,123,246,712,335,183,495,421,724,524,869,544,478,331,295,363,567,69,872,8,784,118,254,311,569,452,415,40,549,885,223,815,166,287,124,663,284,274,110,455,539,839,794,866,754,874,774,434,441,52,605,408,317,755,195,879,472,307,713,819,861,582,227,564,298,720,322,504,891,70,56,526,78,686,641,523,742,383,553,125,145,98,18,623,266,60,482,646,265,449,594,256,319,398,816,555,889,882,337,207,781,13,589,53,457,832,285,500,345,687,702,315,822,635,413,671,867,747,30,783,610,99,541,587,771,484,657,44,540,576,394,153,764,358,609,806,571,172,177,691,775,272,790,160,853,222,836,381,205,788,813,778,493,480,791,573,403,793,230,104,458,442,837,5,486,808,260,462,502,301,374,263,557,11,270,534,477,629,407,736,75,339,198,151,719,294,892,94,748,678,538,142,897,729,233,31,884,179,268,505,497,694,627,692,36,631,649,550,140,766,33,516,390,639,41,796,63,792,423,109,536,262,805,674,556,577,405,349,467,201,787,542,586,411,243,716,416,858,375,838,77,208,454,344,224,857,19,852,662,348,871,859,669,459,795,386,856,456,499,310,251,851,22,461,430,510,827,121,597,34,367,352,825,360,700,638,51,402,546,6,389,318,706,595,507,652,893,737,645,277,197,809,860,470,261,29,242,725,255,679,697,314,444,446,512,92,391,824,896,103,625,436,740,513,181,440,588,50,608,253,4,772,89,309,565,165,174,286,878,188,579,333,280,757,624,356,357,206,817,231,883,26,210,324,20,804,14,404,64,164,180,779,439,543,244,327,514,189,730,743,427,182,683,431,511,886,321,148,291,473,54,61,570,377,17,881,762,220,845,149,728,406 69,633,389,470,654,421,673,528,540,480,361,67,858,771,606,396,134,465,620,401,889,467,163,568,311,368,90,707,894,455,875,717,312,784,755,157,500,267,770,103,68,32,424,290,769,371,332,282,486,840,100,534,23,245,687,543,670,554,438,513,704,12,34,306,501,276,779,805,352,828,148,169,294,434,848,522,358,189,139,602,573,319,731,273,393,107,891,827,811,310,706,18,557,462,263,255,97,422,251,191,580,406,231,732,614,171,142,874,898,788,260,378,409,156,757,246,516,759,138,684,3,447,542,256,756,208,832,642,553,658,388,288,146,859,539,63,711,787,313,373,676,692,493,538,403,257,336,597,862,479,879,564,308,472,578,330,795,515,552,637,764,863,342,193,715,617,523,379,265,391,494,495,458,720,71,883,221,415,162,164,713,283,258,179,183,117,741,555,184,70,22,876,317,377,236,680,656,556,239,217,851,836,651,693,87,767,698,834,277,861,742,548,11,344,29,611,423,92,775,562,194,108,116,385,186,588,381,613,900,753,412,839,168,882,878,309,206,843,504,98,456,690,600,357,639,860,868,140,895,736,286,641,85,850,547,349,296,762,601,248,461,425,482,287,149,758,645,524,366,459,443,262,305,460,397,109,38,170,530,604,181,491,854,695,298,300,439,623,384,723,550,278,624,353,62,726,106,481,820,437,505,511,279,507,110,754,291,655,383,360,272,618,351,688,587,47,6,740,51,207,19,324,847,652,622,54,487,686,667,844,865,268,212,595,558,856,154,369,367,589,77,464,857,575,42,219,533,730,585,302,413,541,400,768,58,96,79,576,574,476,899,427,60,1,101,572,508,355,362,892,120,325,166,492,411,304,468,831,326,244,794,126,232,718,301,682,238,84,773,65,27,175,593,792,789,293,264,292,499,506,801,192,565,314,112,590,343,519,864,20,446,205,370,661,526,161,420,21,636,817,825,128,841,607,374,130,640,127,247,596,873,710,689,328,13,603,608,203,135,803,320,783,226,363,638,786,334,136,668,41,253,121,4,846,104,111,632,339,833,721,709,561,664,631,813,271,174,799,402,750,365,453,414,56,583,782,701,177,407,849,269,57,426,61,599,830,394,646,346,797,99,105,33,252,386,737,502,387,55,49,705,213,185,158,227,48,806,845,316,124,147,702,196,16,545,712,431,815,46,634,867,469,835,436,675,448,442,178,10,807,560,380,897,115,781,819,535,719,204,405,451,804,59,89,80,475,893,488,748,2,223,348,478,341,122,621,484,509,233,776,15,50,826,503,24,285,83,727,669,441,307,372,880,133,66,452,299,53,86,31,714,450,202,382,855,74,644,471,747,793,457,579,95,331,584,155,395,763,429,532,743,392,270,518,72,489,359,884,531,810,197,151,150,733,176,225,242,869,118,816,660,824,243,681,132,234,871,474,628,222,666,888,490,182,780,88,594,159,322,798,73,890,463,791,44,766,167,37,335,433,199,449,498,230,778,520,337,517,43,180,93,744,635,887,28,829,544,39,650,200,870,45,9,663,173,418,466,570,886,685,435,700,295,94,725,430,250,653,419,510,619,610,647,808,318,75,703,190,694,566,615,745,529,209,218,327,315,837,777,338,340,616,214,760,559,321,665,521,881,643,838,40,497,626,7,398,674,404,582,662,739,165,649,224,785,102,546,485,25,796,399,514,240,842,81,216,201,729,114,141,648,198,345,261,254,131,473,145,629,266,567,259,672,592,678,376,749,113,64,347,241,800,5,627,8,428,408,144,734,280,697,215,609,229,390,659,440,612,211,172,536,683,289,129,444,235,303,708,852,477,195,671,821,76,137,738,26,885,210,14,571,774,563,591,724,679,333,746,35,123,78,143,284,818,790,716,329,569,483,735,630,30,354,445,677,814,364,275,722,153,537,496,52,581,586,187,551,751,549,728,281,125,274,822,657,323,237,853,375,625,297,220,812,527,691,188,699,410,866,36,432,249,152,417,809,577,17,228,91,752,82,512,772,525,877,605,356,761,454,765,823,696,598,350,160,872,416,802,119,896 25,735,255,92,647,287,348,314,533,135,695,380,710,744,557,304,166,90,839,80,765,277,480,507,679,338,65,244,882,195,780,495,127,413,692,661,333,445,670,4,17,78,671,569,381,199,150,444,185,648,207,171,263,681,789,214,323,236,672,796,848,251,170,439,814,663,545,467,67,732,31,418,400,583,596,484,301,249,1,326,851,356,328,518,540,279,864,714,843,206,653,429,371,595,535,216,159,219,289,403,189,334,27,594,238,604,404,676,877,657,393,564,558,47,860,446,220,541,35,290,197,597,115,275,688,8,669,614,399,608,476,21,680,618,176,57,832,656,526,241,311,650,635,611,88,81,312,162,881,691,884,730,139,433,352,396,819,523,232,201,791,808,577,40,873,231,272,801,52,155,703,340,546,781,83,869,559,365,49,274,896,510,140,149,551,478,761,846,7,286,58,878,123,423,797,391,345,762,51,420,822,637,574,449,283,475,625,622,288,646,432,327,99,752,91,700,382,318,709,417,18,243,598,303,72,561,339,701,897,344,652,706,639,783,799,815,126,890,829,228,624,634,354,156,587,673,718,247,858,662,75,211,43,876,235,440,98,629,862,398,362,519,410,473,682,879,697,774,222,111,694,425,750,113,579,242,125,33,606,360,187,698,642,664,221,39,772,443,422,600,631,42,198,372,188,654,378,374,412,102,588,205,828,226,437,627,395,465,610,448,359,512,330,824,264,64,343,704,121,194,387,60,576,306,256,138,685,503,229,800,717,416,105,284,308,727,550,464,775,835,461,124,886,458,208,603,552,847,285,369,61,562,602,894,320,84,294,172,641,658,898,628,55,177,97,505,351,361,556,883,34,601,53,754,450,442,636,887,329,370,623,357,537,316,145,368,527,106,617,169,82,355,740,488,524,675,739,659,621,108,615,456,666,599,36,173,435,453,830,94,530,462,806,630,270,6,651,89,665,585,589,321,836,747,529,193,186,14,660,196,871,544,266,56,154,180,678,406,260,590,227,486,582,153,722,899,483,168,291,141,409,455,146,810,254,119,293,68,733,770,612,217,376,161,826,767,28,405,528,261,54,276,689,516,813,778,497,491,296,756,586,346,373,45,389,785,581,795,215,643,522,70,397,684,563,763,743,76,143,142,474,136,12,257,771,252,900,633,26,22,183,570,379,163,776,548,506,741,48,250,817,667,769,350,716,511,696,358,411,837,619,271,861,3,804,553,856,265,554,62,239,724,234,129,77,134,892,855,616,262,59,337,331,501,16,347,213,573,278,888,204,299,885,854,309,324,9,415,784,384,116,687,720,620,73,364,591,430,542,87,419,645,175,605,812,178,889,190,377,790,248,711,295,402,807,165,578,468,268,114,298,555,74,122,151,693,44,870,543,677,5,100,514,794,705,749,210,792,493,496,766,472,246,866,15,385,840,712,809,10,853,895,560,469,891,132,240,471,532,565,118,893,742,777,128,349,332,184,223,273,547,485,690,489,823,407,86,683,390,85,482,436,782,820,421,802,133,164,852,521,857,50,300,728,237,746,202,315,872,686,731,269,394,342,640,424,267,567,479,408,737,203,515,833,336,292,702,492,366,481,375,520,719,181,764,37,459,874,811,112,844,818,2,281,865,147,470,494,875,805,863,120,157,758,104,592,509,738,158,723,708,427,253,632,441,428,499,179,110,707,757,734,447,838,32,29,245,539,477,319,745,20,258,536,144,607,517,572,174,148,466,109,549,793,233,786,834,101,317,787,525,788,209,580,313,341,571,96,644,726,383,137,638,305,725,460,538,212,46,103,729,613,568,224,130,200,401,325,827,434,451,721,816,79,674,850,230,841,71,218,768,759,414,282,593,831,107,773,297,23,13,11,454,490,715,867,842,225,649,609,798,63,30,487,748,457,463,93,335,498,117,500,38,310,280,41,859,302,713,534,167,502,392,753,504,24,655,760,825,322,95,160,508,821,367,66,386,307,779,192,531,426,69,438,736,388,131,584,19,513,363,575,751,868,845,152,191,668,182,452,849,566,259,803,353,699,755,626,431,880 707,80,218,443,139,821,99,62,173,587,713,518,67,798,724,95,525,648,380,739,123,423,451,262,366,703,369,387,386,109,48,27,132,164,368,823,184,488,813,562,556,537,322,340,312,91,175,236,121,34,344,776,817,791,490,705,110,390,186,315,260,759,637,290,497,431,15,154,770,553,407,321,320,455,131,219,692,192,588,808,530,536,267,688,183,716,357,21,273,211,5,829,72,818,245,163,584,79,282,742,234,46,466,671,706,529,660,144,289,410,809,146,442,460,292,779,461,59,319,664,667,517,546,254,58,435,20,843,8,426,86,480,887,167,427,550,258,753,314,56,295,685,691,878,694,227,355,717,374,376,348,690,676,696,755,269,892,391,141,659,889,300,432,806,378,802,44,807,276,505,744,521,725,895,394,563,470,326,456,477,555,420,589,440,441,704,454,462,601,743,654,255,231,324,899,87,89,190,395,305,623,25,13,658,740,39,622,71,103,138,90,160,620,857,793,51,49,413,18,298,634,544,661,383,405,7,513,47,419,311,882,42,616,108,150,784,678,358,564,481,206,847,33,695,30,107,65,429,181,567,212,641,579,266,178,145,720,858,444,810,593,53,811,202,794,397,60,268,81,746,722,778,665,274,684,309,689,630,272,29,240,673,73,624,118,573,333,182,171,23,126,709,780,606,856,586,422,323,343,261,220,434,900,157,762,836,458,14,523,100,328,4,66,463,119,626,701,77,833,597,877,617,155,774,229,835,425,332,345,591,43,325,787,834,629,28,653,88,769,582,828,801,359,242,632,674,16,308,112,82,511,17,876,522,485,800,894,287,159,134,447,213,750,751,467,6,22,61,152,527,731,257,280,279,249,335,651,404,106,230,253,318,338,191,201,825,476,822,84,687,675,507,193,166,411,790,883,365,94,539,24,830,785,837,797,331,317,10,415,535,756,542,663,221,327,645,277,543,504,194,69,296,574,471,375,381,459,509,859,596,526,259,866,686,840,600,826,288,421,104,83,41,399,102,888,607,133,541,552,812,156,781,711,768,737,710,98,789,533,76,870,188,281,516,551,647,350,222,353,681,733,113,356,875,285,547,754,371,639,373,342,745,168,479,682,85,627,124,666,367,708,814,677,891,293,187,865,766,482,670,337,207,577,424,849,478,599,55,605,727,732,652,400,839,346,715,179,75,761,702,233,844,568,216,63,57,203,469,880,226,26,698,239,615,760,64,625,354,819,557,752,37,566,502,520,347,382,571,437,763,278,93,135,860,412,827,569,35,824,472,871,886,363,576,816,96,608,398,457,783,748,291,122,896,430,662,244,799,862,575,105,396,370,864,111,491,643,414,127,741,200,850,726,120,581,503,128,36,45,598,303,820,307,519,465,172,697,250,165,631,636,284,869,117,885,775,585,70,483,162,137,198,735,448,403,531,364,275,487,657,302,362,114,416,450,861,148,863,672,500,730,453,786,669,592,248,241,484,224,401,409,115,486,492,246,143,235,116,614,223,628,68,408,74,832,204,729,838,540,846,649,619,788,683,385,721,738,38,428,514,349,339,644,635,316,772,805,3,225,11,264,379,1,436,125,594,215,777,565,9,50,538,831,388,868,389,438,304,161,147,893,406,515,496,501,512,728,101,532,217,197,842,377,650,693,177,149,559,352,452,417,402,196,613,92,499,773,142,554,209,699,433,498,853,578,719,180,2,489,283,633,718,52,270,306,854,286,867,604,445,852,301,12,384,474,765,468,361,897,572,618,549,848,646,884,185,195,446,151,595,189,330,510,758,874,40,851,252,736,609,329,621,771,473,351,170,803,205,392,19,782,796,712,764,898,313,611,243,232,872,464,31,680,256,528,310,294,890,545,548,558,655,334,153,640,372,879,638,97,841,140,534,590,610,734,169,855,238,297,439,570,210,700,78,129,299,506,804,271,815,612,561,247,602,792,580,493,32,723,176,214,560,136,475,237,749,495,208,251,767,341,603,336,265,881,583,747,393,642,656,508,54,757,679,263,668,714,845,174,494,199,795,158,873,524,130,360,449,418,228 45,792,285,74,701,167,351,347,558,72,697,467,685,644,440,365,244,31,854,11,719,255,561,496,761,258,153,176,863,240,758,494,203,370,669,679,339,515,524,41,63,151,723,633,294,262,216,545,224,620,312,50,271,688,783,84,306,193,747,833,864,272,222,489,830,739,546,420,3,661,94,521,450,591,542,492,238,350,19,157,862,340,261,491,626,282,844,726,846,256,706,452,378,497,632,304,239,266,358,381,186,399,54,476,92,684,416,615,855,602,323,653,567,105,872,393,170,552,109,154,263,595,62,346,716,42,671,471,457,578,572,34,667,556,134,118,850,510,611,318,251,550,582,472,26,147,330,51,874,756,873,710,86,361,213,442,709,502,241,78,665,771,655,13,884,70,321,791,102,131,670,275,473,657,182,836,617,368,106,331,898,592,130,234,640,487,748,861,24,296,95,871,196,453,752,379,355,807,79,499,762,624,631,328,325,466,539,612,380,588,407,324,168,735,91,765,441,403,740,390,18,316,658,300,110,616,310,766,885,260,547,704,702,742,759,824,96,891,847,315,708,480,382,87,622,619,694,336,845,597,136,82,81,875,233,536,46,445,881,334,292,608,287,575,715,889,750,810,281,25,672,376,781,117,531,341,185,43,630,383,273,680,606,599,305,32,821,437,481,648,705,14,60,322,187,581,478,367,317,121,656,163,795,236,444,459,410,511,628,537,430,566,392,831,283,108,384,733,122,198,373,35,493,144,235,119,744,462,184,725,700,501,57,115,210,722,587,559,776,843,464,29,882,461,274,623,618,857,313,456,39,621,486,897,402,76,248,156,707,727,895,696,64,227,190,569,391,446,651,858,36,686,132,809,523,526,603,890,389,460,577,455,636,276,211,174,576,77,647,195,111,423,784,431,422,673,718,738,693,52,614,411,590,543,30,142,513,528,799,181,443,498,819,642,250,12,729,171,645,518,553,417,803,753,598,288,120,59,625,93,848,506,71,17,159,80,579,503,366,560,308,475,674,228,605,899,580,209,192,140,505,458,205,714,249,113,301,2,662,797,432,217,327,88,796,804,83,342,593,104,4,320,743,488,839,736,345,589,229,703,676,362,421,116,268,788,555,826,152,583,554,49,454,634,638,780,681,8,247,207,412,226,27,356,778,343,900,601,9,23,165,448,436,164,816,404,551,745,65,97,785,596,698,371,767,585,773,400,396,842,691,206,841,21,732,514,866,204,520,33,125,746,286,199,129,123,887,870,557,319,107,401,360,395,68,169,141,646,219,892,194,270,886,867,309,408,44,344,786,269,53,763,682,574,158,277,687,451,530,161,426,677,232,512,800,253,894,166,349,713,259,627,295,495,806,215,659,470,338,47,243,465,128,67,245,760,5,859,607,565,7,201,468,814,695,777,200,772,586,425,802,398,197,878,69,388,808,775,812,1,869,893,610,548,896,101,242,414,500,447,124,888,717,712,212,280,424,278,289,264,643,541,711,570,828,418,139,757,463,180,538,434,811,794,435,789,37,173,856,477,820,89,326,689,293,741,90,353,852,654,801,220,372,387,609,333,208,613,516,479,774,150,563,822,415,352,721,474,246,544,386,405,663,231,768,58,529,876,823,177,834,829,20,183,883,137,335,562,851,827,868,99,138,734,149,666,490,769,127,730,690,509,237,675,409,527,375,221,188,699,764,770,484,751,73,10,357,564,573,406,720,6,354,637,225,600,533,535,75,175,311,178,584,825,146,787,835,189,265,817,532,737,218,485,290,385,629,148,649,779,348,223,604,252,683,482,413,191,22,100,793,525,522,303,126,298,329,279,798,504,428,650,749,85,652,860,284,815,143,202,805,790,307,257,507,849,155,639,377,48,61,28,517,433,635,880,838,103,724,439,832,135,15,438,692,394,369,162,267,594,56,519,66,337,302,114,879,98,755,364,145,568,483,678,363,16,728,754,840,299,160,172,449,837,314,38,291,332,731,230,549,427,133,469,571,297,179,664,40,374,429,641,668,877,818,55,112,508,214,359,853,397,254,782,419,660,813,540,534,865 803,215,224,831,805,665,706,325,41,679,134,616,489,298,330,212,854,834,89,873,744,289,252,70,408,414,593,818,211,436,536,315,409,619,59,425,228,866,445,666,697,685,37,108,598,346,323,649,611,543,589,855,652,286,4,735,569,507,216,639,175,615,515,65,71,316,457,809,874,230,712,329,80,22,808,61,433,295,746,736,96,201,626,217,664,424,244,296,20,241,332,648,274,328,326,624,852,387,125,342,699,564,573,331,778,399,280,820,294,157,461,551,24,776,100,350,504,758,606,725,667,35,856,133,116,645,418,465,441,66,532,692,594,840,597,773,68,420,732,496,547,245,145,529,812,643,131,877,45,842,95,88,595,338,628,72,472,53,528,825,501,225,836,876,200,870,560,240,677,477,139,313,259,516,563,179,167,119,779,210,381,806,451,814,364,317,14,181,841,491,745,113,719,60,659,398,698,174,487,146,197,673,571,490,891,511,214,794,277,829,434,281,651,302,791,263,206,366,234,160,709,844,401,188,405,341,236,284,268,657,466,300,368,531,407,304,565,50,93,743,680,498,601,630,144,566,625,754,375,141,548,817,570,63,479,629,700,592,400,449,308,380,555,539,442,117,132,123,309,860,122,367,205,618,164,356,858,637,12,586,266,83,731,147,596,682,494,171,73,674,696,787,885,265,896,163,557,156,804,689,795,506,688,430,394,553,99,613,38,170,788,444,242,5,703,807,662,82,890,370,730,695,668,763,549,749,869,190,822,209,656,747,718,815,552,458,276,222,187,28,546,875,34,109,823,219,345,43,722,410,789,272,475,182,452,893,900,724,755,158,168,78,739,760,742,373,535,463,505,207,740,191,467,627,816,799,103,32,178,640,198,389,568,672,307,797,111,766,811,530,646,843,54,447,392,255,448,423,126,796,371,439,256,335,865,701,821,422,140,650,293,148,185,25,334,756,704,671,31,385,485,471,87,3,850,715,782,764,390,721,104,142,897,781,849,768,311,591,734,321,542,603,588,478,497,360,517,337,632,711,282,395,762,327,486,584,663,888,239,118,585,581,347,861,46,324,514,716,857,887,880,250,58,653,129,67,554,397,419,237,602,480,759,544,527,136,105,285,519,196,391,889,533,524,771,21,297,879,681,859,261,303,669,769,416,468,291,631,802,867,550,322,143,790,785,429,40,159,838,775,238,306,203,127,124,173,339,162,786,29,273,502,349,720,189,600,135,726,372,777,660,194,374,379,474,562,137,310,152,793,453,184,192,488,610,752,559,233,587,94,847,898,42,229,634,427,717,406,1,523,708,271,819,642,621,388,365,541,579,723,783,13,251,412,199,354,195,443,693,226,288,305,437,572,15,299,183,658,320,837,757,352,733,727,377,154,872,166,780,318,765,636,540,90,567,353,355,404,464,609,86,767,510,112,635,155,161,85,7,824,257,98,69,218,213,895,605,537,172,393,622,107,169,153,798,784,344,495,748,269,384,79,9,830,17,121,690,525,483,492,357,713,882,481,620,75,886,892,27,438,186,614,638,110,227,115,694,556,138,56,678,655,253,864,74,417,576,319,36,421,8,654,435,11,623,343,18,336,575,526,670,361,260,249,462,633,862,33,48,578,473,19,738,839,386,426,518,710,120,101,92,827,561,51,705,102,151,62,729,6,30,235,686,193,675,617,415,676,545,91,114,10,106,369,583,792,500,456,508,314,49,863,383,484,770,482,26,521,835,312,612,753,258,851,750,180,2,358,899,267,150,84,828,243,871,774,57,363,77,262,402,761,55,450,351,254,455,707,751,503,813,220,270,801,534,604,440,580,97,279,362,247,202,881,644,44,476,275,454,868,884,204,428,403,221,47,772,520,460,599,714,848,76,800,231,128,432,691,359,558,333,641,810,177,149,737,513,687,577,522,826,39,832,684,741,647,301,894,16,607,459,283,683,223,538,833,292,376,248,396,661,378,574,81,431,728,469,165,290,878,23,176,411,52,582,382,853,264,702,493,340,130,246,208,232,883,608,512,413,470,64,509,499,348,845,846,590,278,446,287 96,531,312,349,480,592,491,369,489,501,553,38,814,842,732,284,70,476,642,468,865,461,165,545,294,506,9,593,896,304,836,562,178,667,750,373,417,195,841,82,26,4,437,289,692,239,231,151,313,763,12,616,162,475,733,615,521,518,359,453,670,98,81,290,577,285,632,683,414,844,42,106,281,495,782,478,487,78,112,714,663,383,626,424,263,244,888,740,810,235,546,180,432,672,206,97,40,278,222,338,441,241,194,793,665,221,260,831,894,784,472,253,465,59,739,445,503,572,29,708,19,532,504,215,657,133,741,798,352,668,248,240,464,808,499,16,682,837,223,209,591,783,650,772,466,108,318,630,872,371,882,719,387,603,690,295,879,540,413,643,870,861,259,276,713,696,331,627,126,395,680,542,625,858,6,893,251,397,66,176,761,205,305,72,171,271,765,600,142,201,20,873,157,353,640,550,463,509,189,188,876,723,455,745,104,617,764,716,137,811,608,452,11,622,107,498,293,48,651,543,217,52,243,367,131,419,426,494,900,658,706,775,258,859,864,547,297,853,621,21,319,816,404,449,515,818,826,44,884,779,143,644,83,849,423,212,375,845,619,460,537,286,655,167,339,771,539,508,232,525,633,459,471,330,563,30,36,207,523,408,92,646,791,769,129,301,400,549,314,529,388,326,675,447,153,778,51,462,758,302,341,486,705,379,273,833,317,448,474,252,140,428,229,727,403,23,55,648,128,272,237,337,795,721,511,202,385,652,541,868,817,148,324,711,624,802,277,254,576,699,267,554,863,536,22,351,356,720,392,150,350,389,679,804,31,146,255,450,415,390,899,374,125,35,17,393,316,192,249,898,211,288,60,411,270,200,607,832,227,109,753,67,161,571,216,776,287,219,589,139,61,99,551,703,766,526,579,265,401,479,697,405,742,564,203,488,228,329,871,1,628,279,535,609,485,175,315,2,689,768,737,39,867,662,266,28,558,68,530,599,883,654,770,357,111,594,788,95,43,724,145,635,187,214,822,821,210,172,659,154,147,309,24,881,242,220,446,410,848,631,823,431,584,557,843,398,89,695,292,735,399,325,438,299,567,813,787,130,513,851,213,191,261,15,686,781,533,555,433,786,246,186,100,578,268,691,754,454,5,34,660,105,174,63,502,14,847,777,307,159,269,767,185,136,467,790,363,760,93,676,860,704,855,355,570,320,348,197,218,792,425,505,890,120,850,715,666,620,427,380,566,726,88,79,64,391,897,575,748,41,123,236,358,587,47,736,517,366,421,797,156,225,827,611,168,127,65,688,707,606,412,332,840,470,3,559,247,224,306,8,527,490,164,645,824,53,759,430,585,854,342,785,233,177,693,158,308,582,282,524,629,623,132,493,33,420,376,880,347,838,204,25,409,656,492,434,303,839,114,734,580,730,370,712,49,300,874,458,684,245,641,886,402,179,796,199,442,406,477,799,149,895,674,835,13,637,74,10,173,422,169,345,569,166,749,439,196,386,86,50,170,560,534,862,208,805,574,124,725,435,889,57,113,773,138,618,561,378,885,731,354,605,418,80,702,602,429,484,368,321,601,538,456,819,152,117,653,360,700,365,416,762,743,190,510,250,230,830,722,182,678,649,160,698,612,327,755,328,891,597,815,163,440,738,27,334,581,528,481,694,746,144,496,335,638,76,701,310,18,757,590,565,298,887,75,283,62,552,103,69,744,234,181,184,116,372,457,407,604,257,747,121,522,507,614,588,780,54,256,443,343,829,115,718,183,262,346,110,661,377,583,87,669,381,687,520,673,483,291,226,436,756,500,46,361,85,512,573,857,311,362,807,869,101,482,728,56,878,91,155,497,664,671,516,752,677,193,866,37,119,32,134,275,709,828,717,639,610,344,825,544,7,322,556,801,710,598,122,634,118,514,451,71,382,396,58,613,789,548,812,336,198,135,852,774,296,280,809,568,519,141,264,729,586,596,274,681,333,856,73,444,340,77,384,877,636,45,238,102,794,84,394,834,685,875,595,469,846,323,751,806,800,473,647,94,820,364,803,90,892 739,7,233,654,249,856,380,156,109,733,537,340,329,799,745,86,563,813,116,861,525,483,184,237,107,708,338,674,503,240,311,147,212,474,375,634,235,488,847,602,578,506,90,94,585,137,229,162,310,281,241,879,726,607,333,840,357,555,23,154,68,652,542,121,219,135,187,517,863,629,467,106,174,277,521,171,704,83,662,880,244,493,539,559,134,624,502,123,182,197,25,689,164,766,63,193,597,175,173,639,507,97,519,723,853,257,504,562,477,484,758,71,267,513,87,685,589,305,355,824,568,386,769,151,75,515,202,849,80,399,57,609,798,574,605,575,76,816,247,111,520,694,614,869,827,319,289,874,328,347,374,592,729,701,835,136,889,320,353,841,888,448,387,859,117,894,205,620,409,596,633,583,673,893,323,631,188,295,514,309,264,359,611,485,169,531,365,148,696,635,641,270,378,224,878,262,344,26,460,95,672,296,74,755,724,252,642,403,49,545,298,261,550,710,773,3,40,215,44,345,702,530,423,382,405,19,496,4,535,601,850,204,352,418,419,543,688,222,273,431,141,860,218,768,35,429,396,397,416,586,293,832,554,200,364,119,795,882,150,753,618,17,830,81,582,140,10,52,112,864,598,700,408,478,599,153,683,667,144,207,145,538,443,613,168,681,101,243,79,155,143,808,887,576,857,604,232,334,676,487,255,573,895,315,612,867,303,142,332,18,342,54,42,238,379,630,572,70,838,569,814,722,511,872,461,812,424,421,637,653,366,288,820,890,732,236,440,11,556,336,687,884,263,253,594,469,12,98,362,45,693,5,846,245,271,834,897,552,221,14,492,39,750,679,464,31,110,58,59,616,765,51,276,127,317,322,544,227,53,176,384,128,149,494,231,892,230,819,314,619,628,486,41,420,579,621,779,114,6,747,186,735,736,742,833,591,350,38,495,479,709,348,394,170,453,714,195,498,435,426,316,138,577,280,398,363,691,580,737,775,560,326,900,801,815,786,794,161,368,284,125,297,198,194,881,325,66,489,730,774,37,643,661,809,663,686,349,875,622,24,883,422,451,760,518,377,388,557,401,877,854,166,99,777,67,526,826,132,678,510,165,617,274,428,770,124,508,20,698,475,512,848,500,829,266,92,792,865,413,664,458,167,645,402,656,286,327,376,746,793,697,706,203,803,265,781,73,139,764,852,391,776,627,209,15,8,34,259,817,130,2,738,465,668,731,360,337,636,684,725,822,89,433,404,471,516,442,251,491,669,321,50,179,821,445,886,677,1,806,210,844,885,191,260,713,61,632,565,272,800,818,65,528,845,385,682,62,649,728,532,392,192,292,811,225,343,372,522,454,740,269,805,626,84,383,427,13,290,93,789,615,762,449,707,313,21,837,341,248,712,717,285,778,36,784,534,570,283,234,490,29,558,719,159,452,406,468,60,256,757,77,346,33,157,178,871,400,780,553,646,690,463,651,666,571,593,85,324,335,441,146,46,278,430,122,163,411,47,425,228,410,354,437,339,695,223,876,836,291,749,692,595,670,623,250,547,831,214,499,476,201,603,581,600,330,759,790,32,104,56,96,610,64,351,133,439,129,648,716,69,246,659,751,318,699,529,447,185,78,268,783,160,625,763,206,566,804,189,587,43,102,828,548,540,650,27,226,304,608,306,381,158,482,371,407,301,782,370,473,239,509,196,294,862,564,797,108,100,242,105,533,807,72,88,417,703,183,756,810,438,873,432,55,358,705,549,279,275,898,258,444,551,802,658,858,312,48,390,172,300,389,412,457,711,788,118,825,527,796,606,287,638,647,459,505,181,761,466,446,22,665,754,721,785,843,103,480,434,254,851,462,30,743,436,588,91,470,891,367,584,590,734,131,523,675,115,715,791,16,870,28,497,744,567,680,524,823,361,546,213,771,126,718,299,393,369,208,899,82,868,640,307,220,660,842,727,199,211,472,356,331,541,450,180,455,787,644,216,456,748,217,481,308,177,896,657,720,120,671,767,282,9,752,415,414,855,772,866,302,655,113,839,373,741,501,536,152,561,190,395 811,220,92,579,639,715,342,49,25,505,548,753,61,494,459,51,817,684,371,799,308,207,481,36,567,514,523,492,109,153,104,16,203,202,59,818,42,845,533,616,685,724,270,327,240,132,157,593,278,102,562,776,838,736,223,619,168,259,321,703,354,804,654,192,433,545,68,415,820,212,598,474,204,229,393,29,508,295,700,707,493,331,252,525,566,651,81,17,99,131,127,864,24,604,438,460,841,125,191,624,362,320,425,367,643,667,557,385,78,88,679,486,222,690,306,636,291,426,479,536,800,276,648,177,3,490,41,617,151,119,392,519,878,427,334,780,245,463,731,246,210,376,440,754,716,428,176,778,137,816,106,435,550,472,564,146,743,130,178,633,759,35,825,862,464,791,216,730,476,346,507,284,485,786,526,208,453,124,689,361,718,802,457,758,560,663,141,496,772,725,798,79,506,145,893,48,339,299,350,317,264,261,296,399,889,97,302,410,200,408,55,44,745,784,852,213,54,503,28,66,665,832,749,183,348,147,282,235,152,287,790,10,711,93,45,789,580,247,501,770,605,615,244,584,14,116,171,757,65,238,314,612,569,128,160,515,674,650,637,688,351,285,627,499,823,398,105,333,122,783,512,625,629,323,467,412,865,599,86,234,256,437,310,300,386,538,603,15,83,375,555,748,801,377,896,257,635,108,413,396,687,283,897,139,723,592,267,281,336,165,732,170,120,262,360,809,831,11,888,465,882,576,232,668,197,853,848,64,498,237,217,726,774,775,429,57,622,193,661,436,827,819,169,30,849,587,194,258,391,311,574,126,779,540,594,885,899,400,628,242,186,211,806,857,714,143,228,304,473,185,782,355,352,655,708,761,401,273,117,553,20,480,618,280,166,737,382,844,528,666,744,829,173,73,156,680,851,577,155,563,23,767,522,712,861,380,763,199,80,698,520,448,634,21,158,728,702,720,195,43,58,504,224,227,807,681,491,694,795,518,184,26,869,710,881,546,570,581,739,12,319,179,692,226,781,741,450,456,403,836,315,735,847,421,677,673,292,850,190,110,697,239,107,614,225,734,379,322,797,760,815,47,313,877,347,221,510,534,446,67,657,755,561,524,470,8,389,325,502,60,738,879,777,859,740,46,713,821,704,863,84,233,582,764,842,573,683,182,671,846,656,356,335,868,793,449,74,1,854,632,22,662,218,52,98,121,422,363,890,82,163,527,69,696,439,159,586,381,752,591,623,6,539,469,511,290,138,682,112,871,307,103,33,756,497,733,404,135,750,417,886,898,263,602,837,324,721,180,206,590,706,444,390,883,575,434,471,810,860,773,454,236,275,747,9,483,653,265,298,409,96,640,675,447,374,387,243,254,94,642,395,638,537,500,475,272,796,50,620,301,717,513,828,118,874,792,423,39,647,181,161,358,597,431,509,372,56,286,289,762,452,162,63,430,495,894,241,814,478,219,742,164,589,332,812,406,394,565,556,188,551,53,269,835,115,18,455,461,722,353,664,338,855,75,856,13,803,892,411,788,288,660,833,384,316,544,641,189,113,198,746,312,458,870,40,543,709,87,85,172,136,341,149,133,468,559,27,686,365,248,328,260,611,277,858,407,826,175,77,318,875,294,607,530,672,364,489,517,187,253,140,873,297,369,808,214,4,445,419,167,100,420,326,558,255,751,529,359,609,5,547,274,357,552,588,771,402,111,670,397,337,824,196,532,621,830,95,840,606,293,699,578,19,834,466,658,231,366,900,626,482,209,880,345,895,571,154,373,2,630,76,645,205,613,787,7,691,368,766,521,794,303,600,785,279,451,649,201,72,114,678,516,370,876,887,266,652,37,305,891,867,70,443,134,215,271,554,765,644,541,705,805,250,388,309,405,866,531,340,608,378,669,659,349,442,330,727,487,230,646,601,38,839,344,416,488,631,813,174,595,477,572,610,251,549,693,568,31,729,91,424,418,142,414,123,769,268,62,32,884,144,432,343,89,701,329,872,484,719,383,542,90,441,676,34,768,585,596,150,249,101,535,148,822,843,462,695,129,583,71 749,339,176,816,861,580,759,377,55,637,68,624,589,218,239,212,854,796,164,843,825,228,240,57,514,309,579,813,317,487,642,414,434,659,56,379,196,883,368,619,667,668,63,103,612,382,325,694,639,626,603,810,575,202,4,688,597,489,295,723,315,556,443,45,100,393,565,848,842,187,708,369,40,5,859,53,336,318,713,677,152,93,643,95,728,331,348,456,87,197,490,595,298,207,407,656,844,424,88,231,714,616,528,270,736,444,180,865,457,149,345,622,7,770,222,229,474,826,594,689,629,8,837,97,208,615,553,362,543,35,602,671,535,874,573,757,159,359,793,554,555,151,61,404,764,653,83,864,127,879,204,27,525,237,564,48,380,24,548,789,395,282,866,846,342,836,600,162,679,423,60,254,126,401,547,216,157,79,773,113,534,838,372,812,416,243,14,281,805,398,711,247,743,28,590,466,750,286,452,136,174,754,661,435,891,576,120,856,301,872,515,285,608,215,726,391,235,370,352,119,663,839,399,108,358,482,117,432,403,670,349,440,385,621,538,305,483,168,110,756,753,394,658,562,224,635,705,772,531,90,546,782,508,177,510,686,641,521,551,337,223,492,453,598,427,258,251,219,350,819,50,252,178,613,66,366,833,572,17,647,227,25,806,84,631,638,593,167,69,760,783,727,870,147,894,99,571,102,815,695,849,480,628,465,299,475,37,685,11,211,817,563,266,23,744,777,634,161,880,275,673,652,716,706,558,684,892,163,824,181,742,798,654,752,491,583,209,271,118,39,472,845,131,89,807,135,485,122,768,496,766,374,356,319,468,888,900,739,829,244,322,105,683,718,745,494,592,545,585,273,681,246,463,709,855,831,30,130,188,682,214,418,614,693,296,735,75,696,867,437,587,851,109,518,392,156,371,522,213,767,493,334,128,220,822,703,850,537,194,620,179,86,184,42,314,707,784,648,13,459,570,516,85,1,878,738,775,747,284,690,148,145,895,725,801,741,201,632,762,415,577,664,644,520,376,530,584,260,596,650,333,293,724,292,383,512,697,875,203,242,507,599,357,852,43,363,488,758,884,885,862,256,77,582,241,38,486,461,330,263,662,397,808,505,455,264,31,447,425,189,335,877,500,419,828,74,186,860,676,841,236,294,618,795,388,470,478,701,761,832,451,261,98,720,857,361,54,268,790,734,320,190,171,104,248,238,477,96,731,107,389,400,506,678,106,665,183,737,269,748,588,312,304,310,417,544,277,429,115,765,449,198,182,365,586,691,504,313,495,205,797,898,154,290,567,511,680,405,6,412,649,355,871,568,604,288,471,479,519,704,830,9,172,291,323,302,276,413,740,133,280,195,351,627,12,221,259,715,375,804,781,232,746,702,332,249,847,297,840,287,717,640,439,192,501,347,265,549,509,660,175,811,396,210,611,72,230,132,15,776,402,233,92,193,367,893,625,431,70,381,552,217,81,94,780,809,386,484,787,191,442,91,2,853,78,116,699,623,473,503,329,779,896,609,566,144,881,886,62,338,185,560,578,44,143,59,633,601,225,26,755,657,124,863,67,307,481,460,34,527,36,645,557,33,672,279,49,234,539,606,719,326,134,166,422,607,882,140,129,591,436,58,710,827,523,364,438,785,173,200,226,763,541,16,669,150,165,47,733,3,29,199,712,153,721,636,316,698,497,123,65,21,51,306,532,729,540,569,536,346,18,821,446,561,794,406,20,426,803,255,529,769,353,890,751,114,19,321,899,303,73,64,771,137,835,818,76,300,101,274,445,778,22,344,245,311,341,722,692,410,868,121,146,802,513,630,328,605,139,360,257,138,142,869,574,125,428,390,464,823,897,340,354,408,155,111,792,409,448,550,687,814,52,834,158,267,411,651,499,476,502,610,774,82,71,791,387,700,581,559,799,41,788,732,786,646,441,889,46,542,373,262,730,170,458,800,343,524,253,430,674,283,617,112,467,666,454,141,384,873,10,80,378,32,517,308,820,289,655,450,324,206,160,250,327,876,533,421,433,469,169,420,526,278,858,887,675,272,498,462 423,160,354,430,57,829,279,300,364,692,639,88,553,884,837,244,123,685,313,715,606,605,176,506,79,747,142,587,812,299,530,334,219,537,677,521,414,48,887,345,285,183,288,215,664,217,290,3,262,442,71,810,509,602,641,800,385,627,120,42,241,387,344,297,353,122,325,402,700,836,177,90,316,498,461,422,740,107,370,864,393,596,595,626,4,549,804,375,556,304,164,409,331,821,74,67,138,248,298,630,396,85,416,847,815,152,472,551,781,736,752,15,482,172,330,702,646,195,168,817,239,585,625,277,351,324,398,881,158,660,34,431,670,508,621,197,319,883,1,119,611,835,756,882,720,162,432,775,704,8,735,761,689,783,842,310,899,573,368,777,898,748,10,634,283,844,209,673,205,623,766,687,770,895,147,866,232,480,173,332,296,11,603,103,106,453,678,270,357,500,287,666,151,399,790,406,253,167,381,184,867,321,98,834,190,365,808,306,113,489,447,427,260,730,474,131,189,126,289,568,528,95,266,514,336,125,617,117,838,637,857,478,228,644,681,494,638,565,411,77,6,886,202,732,264,600,534,60,714,786,227,771,363,581,413,9,707,894,163,743,717,62,843,14,412,350,211,203,206,760,716,719,405,397,708,111,233,532,415,204,191,703,439,795,80,582,69,484,272,116,31,662,820,663,394,794,63,533,661,322,27,628,823,371,522,889,433,148,501,134,39,137,153,545,261,237,224,348,418,561,564,636,601,856,504,597,25,655,488,854,456,12,695,868,779,452,359,108,622,554,540,787,653,511,145,471,96,361,220,52,541,130,853,404,127,419,520,436,36,144,813,212,492,305,118,128,139,59,23,865,578,129,208,32,43,17,697,559,178,16,658,87,28,473,294,879,307,612,150,477,360,55,271,588,750,657,741,75,161,642,389,688,833,765,555,519,29,100,788,181,789,376,417,479,577,486,2,182,682,645,495,58,826,579,26,73,620,258,718,744,839,607,875,676,448,738,862,20,50,542,92,343,38,193,891,362,22,434,767,552,84,576,273,890,575,580,295,690,830,257,893,400,609,635,792,292,274,557,35,803,671,314,281,586,221,782,869,82,734,772,21,466,72,240,822,467,656,133,710,724,358,499,265,780,5,421,845,712,105,201,665,200,468,53,566,136,440,457,567,469,631,828,268,512,19,860,293,476,407,819,713,827,840,379,226,146,76,311,496,491,124,757,763,420,870,445,441,614,669,598,802,382,308,309,318,516,784,254,725,243,275,174,342,816,245,874,706,132,758,435,535,462,526,317,490,56,335,711,591,818,733,112,517,723,143,759,61,449,560,180,229,403,328,825,574,252,463,593,392,873,390,861,538,24,608,388,115,301,207,680,562,807,185,649,222,154,651,705,49,858,497,99,691,302,675,437,590,569,94,583,256,531,694,340,238,475,778,223,539,529,199,721,242,186,338,487,366,709,652,798,525,791,753,849,159,503,81,156,93,572,91,249,523,33,464,374,218,45,214,149,278,276,46,599,451,592,755,378,527,699,880,346,391,796,303,668,793,225,809,722,18,624,654,104,640,806,754,171,323,121,369,610,157,686,44,320,459,629,785,102,187,799,831,349,613,410,41,548,383,198,729,401,373,701,196,589,855,51,851,246,444,558,571,745,315,170,483,450,513,616,679,179,395,355,386,68,841,247,263,594,619,425,380,897,352,633,54,235,89,97,751,544,114,37,110,604,424,672,728,455,878,141,255,30,674,618,615,236,524,267,493,801,446,797,505,86,234,329,485,284,543,101,683,693,814,327,846,438,650,536,13,805,684,78,510,66,768,515,764,135,647,859,876,341,698,372,291,742,213,518,40,269,811,547,773,337,192,896,188,408,280,384,282,443,848,312,667,776,70,888,109,216,563,696,832,502,824,155,632,65,659,367,339,210,230,166,231,863,326,885,643,250,7,850,871,507,165,454,460,481,169,546,570,333,584,648,749,356,731,251,429,550,259,377,900,762,286,140,426,852,194,175,872,465,737,726,746,892,347,774,470,877,428,739,64,458,47,769,83,727 595,114,270,360,41,830,86,103,291,601,746,344,250,863,803,118,316,621,436,686,292,475,366,384,265,745,228,395,627,126,226,109,108,267,524,787,275,245,871,454,430,379,367,329,427,87,178,80,96,152,169,774,741,786,600,730,164,451,162,187,320,648,552,322,513,338,85,181,701,711,247,224,352,502,203,326,717,112,466,834,567,576,349,699,48,673,603,145,446,225,28,742,148,850,153,46,373,89,306,725,240,6,408,783,736,433,626,256,543,562,811,51,487,272,391,789,494,38,195,706,515,584,505,268,192,341,132,874,13,536,17,410,869,242,453,380,359,841,119,18,390,795,771,891,666,110,401,700,579,168,572,780,664,765,798,314,899,484,190,663,897,523,193,710,425,804,44,822,157,519,802,594,788,898,254,767,413,406,269,438,545,185,583,220,321,662,582,448,461,679,499,481,97,388,895,179,71,215,365,282,793,81,2,752,529,121,729,84,63,223,196,258,473,856,672,78,73,298,105,419,560,332,570,443,362,15,553,62,654,396,889,214,525,328,387,768,651,512,592,261,59,879,32,698,101,295,218,209,440,687,142,658,491,457,234,36,695,887,389,810,641,19,836,69,727,460,123,302,83,714,782,792,638,277,739,204,504,568,376,33,201,734,156,733,30,527,238,315,221,8,31,659,775,635,705,703,251,420,415,216,55,464,900,212,709,877,459,7,565,91,117,4,60,544,88,450,557,222,683,588,824,589,308,805,310,766,197,463,330,759,159,120,753,844,676,173,610,75,779,634,781,778,539,370,434,646,14,416,68,24,456,34,885,577,340,642,825,287,43,146,661,241,645,606,259,10,11,12,56,748,647,227,207,147,76,122,712,533,104,67,439,208,175,243,199,851,445,754,37,625,566,283,273,317,538,796,876,257,113,521,134,819,837,843,681,346,107,3,613,383,801,528,643,363,414,554,106,386,609,371,213,151,740,550,149,177,444,357,853,622,719,418,857,640,715,611,866,116,198,274,21,100,232,72,894,623,45,507,620,722,95,750,555,862,690,657,92,713,696,172,890,219,403,486,707,596,285,304,130,684,660,163,382,833,324,691,828,252,680,574,174,675,40,355,762,278,669,138,670,531,639,682,561,886,98,337,880,697,294,470,462,154,497,200,820,331,633,166,532,608,693,763,369,751,165,809,249,262,624,738,465,864,720,296,131,74,135,431,818,400,42,728,498,508,852,158,636,399,808,514,776,184,480,429,441,374,619,526,569,598,239,93,217,861,303,849,605,53,814,530,790,800,501,559,743,27,476,506,549,816,724,248,229,882,264,704,150,716,813,398,64,452,358,872,333,397,668,447,161,845,260,875,677,23,644,458,115,65,57,575,342,838,167,511,356,182,629,495,39,772,546,128,848,235,868,723,587,281,336,300,233,266,731,478,289,534,586,312,558,564,327,581,176,347,488,744,180,839,694,628,655,649,826,797,402,293,136,339,77,455,297,171,540,255,409,253,141,54,496,125,535,70,202,301,755,407,692,688,615,832,823,510,678,791,364,770,761,47,632,630,186,405,665,423,471,806,799,22,305,5,375,426,9,593,20,518,361,769,637,1,29,656,859,381,842,350,189,467,318,94,888,469,422,509,449,517,807,16,737,288,368,757,417,749,563,188,309,591,351,548,551,334,210,590,139,319,827,99,442,392,732,482,489,892,477,653,61,35,335,183,726,617,26,133,129,821,377,840,607,437,873,140,66,194,483,777,578,299,831,522,618,702,735,747,815,49,237,385,325,541,313,144,614,756,881,127,865,263,674,571,143,721,785,246,378,50,817,290,599,25,773,855,835,602,884,428,503,492,160,794,236,124,758,348,650,404,155,896,432,472,424,516,345,205,784,435,867,671,58,878,137,393,520,667,829,244,858,111,394,284,537,311,556,52,82,170,468,812,353,860,616,485,79,760,846,493,411,191,708,280,102,547,286,490,354,689,585,271,474,573,421,612,279,343,893,652,580,323,542,764,412,90,847,685,500,631,718,883,211,604,372,854,230,870,307,206,231,597,276,479 784,1,260,744,348,850,462,221,100,765,429,393,317,719,688,136,667,845,53,879,532,463,190,212,127,689,438,746,334,323,293,184,287,504,269,574,259,575,786,666,656,587,70,94,601,224,283,267,410,306,341,885,747,519,204,846,408,573,25,187,16,673,566,120,108,134,229,594,881,528,567,160,168,218,578,151,692,156,736,874,121,443,582,487,237,619,347,118,80,241,54,694,207,703,96,319,685,252,194,605,583,216,584,676,862,258,465,597,332,379,741,133,214,627,22,652,603,395,453,830,616,284,819,185,63,598,209,800,154,309,130,684,757,621,646,665,17,759,342,223,546,620,521,833,853,440,282,886,162,421,199,459,751,668,820,140,848,266,409,856,854,330,468,880,47,894,296,499,513,610,526,556,614,857,416,480,169,279,629,320,144,449,628,593,189,489,236,81,771,641,706,131,488,195,841,307,439,9,508,114,549,345,145,735,791,308,554,466,105,596,333,289,612,625,807,8,85,255,43,316,761,638,371,353,446,61,457,14,355,631,797,179,315,364,336,428,711,73,158,522,232,816,304,774,50,402,383,500,300,482,390,851,607,78,413,228,813,859,98,731,590,69,803,165,517,51,4,24,196,878,481,671,301,540,510,226,763,713,103,295,215,419,478,516,285,739,116,238,89,247,240,836,896,533,882,509,298,321,727,565,361,591,864,368,576,834,272,242,251,48,456,109,95,110,470,715,602,46,869,563,808,766,531,873,491,826,494,356,710,536,370,394,832,892,733,233,376,42,432,192,678,893,107,234,674,380,58,33,464,115,762,38,796,99,326,871,898,633,310,13,273,31,782,737,562,83,219,137,112,451,802,62,369,175,437,431,442,79,97,280,314,197,220,545,286,888,191,837,405,643,682,572,12,407,538,530,714,138,10,783,198,696,654,683,865,651,460,101,343,548,659,305,271,117,445,770,270,568,318,386,331,227,417,174,492,472,753,677,675,795,381,278,900,829,843,815,726,254,471,274,245,352,248,291,842,164,132,484,749,789,87,604,718,724,661,717,436,891,523,2,863,479,435,811,362,290,452,609,498,887,877,217,74,764,23,387,790,183,672,397,230,611,400,485,754,86,414,11,704,382,474,875,501,768,366,52,707,884,497,750,403,243,695,506,571,344,148,388,793,844,708,657,205,822,329,745,75,104,812,860,276,699,520,213,5,30,57,263,818,59,20,730,337,740,623,384,200,686,645,776,821,82,433,427,514,560,249,170,392,723,398,92,210,775,527,883,690,21,788,76,861,895,64,153,728,141,702,542,155,769,827,67,561,810,473,663,93,639,701,624,483,125,327,758,142,373,203,543,512,637,313,729,622,186,231,430,28,365,172,828,680,709,555,760,363,15,866,206,358,649,773,401,752,6,738,448,559,246,268,502,3,615,722,66,535,354,324,36,135,809,34,161,39,166,65,890,461,755,476,600,725,261,537,551,658,653,157,385,467,423,180,60,188,511,55,171,515,90,425,325,375,434,496,288,693,124,889,867,147,700,553,647,687,505,277,420,835,294,328,350,244,644,534,681,257,732,778,72,102,123,56,655,119,201,262,426,88,599,721,143,349,617,664,338,636,606,541,68,27,377,691,77,705,801,122,581,767,302,406,7,35,852,589,399,698,29,208,211,679,202,265,178,547,297,458,359,748,469,518,173,372,106,264,779,618,831,222,167,281,176,396,849,152,150,529,662,146,720,840,447,855,539,84,411,756,422,139,340,897,193,367,412,825,586,870,441,44,424,126,235,391,525,339,697,716,149,777,608,824,632,357,557,577,558,544,303,742,503,299,71,630,669,613,839,798,37,495,311,346,868,524,18,712,444,507,32,579,858,389,642,670,799,129,592,585,40,626,806,45,838,26,564,794,493,569,580,785,477,570,250,814,113,781,404,490,475,111,899,49,847,650,292,322,552,805,787,177,181,335,374,450,550,455,91,454,817,635,225,351,804,163,418,378,159,872,634,780,128,734,743,312,19,660,253,275,876,772,823,360,640,41,792,415,648,595,588,182,486,239,256 317,569,171,19,417,522,34,90,403,169,815,546,316,779,634,162,296,155,808,231,322,260,585,399,687,521,163,38,756,24,387,178,36,92,566,854,181,482,730,180,205,292,646,581,135,63,74,405,3,240,289,297,648,841,744,294,40,139,586,723,776,594,471,450,819,701,172,100,252,639,108,494,436,590,165,377,487,243,157,462,860,466,72,694,426,577,699,385,727,154,369,761,143,759,524,137,357,48,306,659,4,134,107,592,283,715,633,238,693,515,658,451,572,133,805,692,164,176,82,277,518,638,69,286,441,55,290,738,128,537,331,52,883,183,101,250,747,665,486,66,114,677,728,793,224,25,326,218,809,612,782,796,329,573,461,388,876,492,23,191,867,579,555,349,845,339,30,881,16,194,803,362,681,873,214,790,641,324,127,430,893,517,300,221,627,713,688,831,149,604,347,752,39,428,897,116,37,655,106,472,749,219,271,464,549,125,620,210,211,200,126,147,392,878,474,513,208,449,432,308,195,401,764,304,158,291,413,512,810,91,843,371,765,404,459,884,376,842,851,361,506,737,60,346,345,272,298,323,601,622,12,185,275,773,42,335,307,714,850,668,438,360,596,424,853,855,509,705,78,248,799,671,838,7,706,319,425,246,526,62,226,772,201,664,112,138,726,265,356,269,440,247,280,499,562,624,491,303,111,1,431,148,900,47,712,702,476,152,673,359,330,220,188,788,15,302,636,490,511,393,768,170,168,391,54,545,608,370,21,703,287,407,410,447,348,354,731,364,864,847,771,301,823,341,452,760,309,777,28,245,51,333,818,890,496,420,708,5,445,547,857,553,400,542,225,239,94,197,457,804,327,576,89,670,338,416,720,849,234,311,406,435,551,29,96,468,611,497,279,484,398,448,628,173,340,832,887,662,478,80,253,745,770,806,372,18,384,217,676,320,682,617,861,485,175,199,558,314,637,274,212,381,767,716,473,264,50,129,852,179,802,373,414,223,574,159,789,423,350,266,86,117,605,22,859,896,390,374,237,541,375,732,495,780,560,434,20,273,623,575,736,13,187,65,769,846,85,73,446,196,216,131,674,826,742,722,554,556,429,552,580,653,186,227,456,529,678,645,382,470,750,415,661,879,479,598,863,241,305,419,318,130,160,332,886,433,898,207,122,281,488,571,475,584,684,583,422,467,394,284,565,824,666,249,523,358,602,483,792,700,409,454,630,190,820,153,880,53,791,88,412,444,469,325,278,56,821,872,505,599,61,229,174,751,99,489,244,383,588,871,640,709,837,870,672,228,230,261,734,568,353,654,276,875,203,477,557,741,825,313,103,635,256,813,544,378,894,150,45,795,123,827,593,288,822,336,481,109,87,118,41,743,35,95,334,589,182,710,352,609,177,132,811,618,885,869,380,437,606,141,619,102,504,848,84,530,669,657,781,167,800,830,427,559,877,528,11,778,685,421,455,833,844,754,368,44,365,386,119,262,610,367,696,543,704,254,6,531,625,81,690,83,667,460,758,587,206,550,856,784,817,312,656,766,344,835,363,43,755,647,689,70,567,533,502,600,561,267,418,161,644,64,202,748,215,535,548,735,293,198,68,443,836,268,889,27,453,797,649,9,899,775,77,104,866,236,539,270,807,698,740,538,79,786,442,519,295,785,14,680,607,532,10,757,75,570,626,2,366,501,839,724,563,858,258,263,222,184,597,379,762,242,120,525,121,840,458,829,140,233,650,71,257,697,76,868,787,209,729,828,675,719,621,614,733,166,498,232,439,816,142,146,616,564,874,192,739,8,299,310,632,613,753,315,67,145,652,59,683,235,718,801,798,395,888,783,507,595,57,651,686,516,503,124,534,763,31,862,527,189,193,204,500,110,691,834,895,251,508,707,660,282,105,578,794,97,717,32,115,582,98,397,343,33,26,58,865,328,642,615,342,663,355,679,603,93,725,408,882,113,17,337,151,814,144,351,285,213,510,520,514,591,136,396,812,411,480,631,259,493,536,402,774,891,629,156,389,746,46,321,711,643,49,892,463,255,721,465,540,695 420,764,127,359,870,59,509,281,353,28,548,777,504,209,85,289,733,140,815,225,678,26,644,218,846,65,453,314,570,229,615,380,245,310,298,732,132,869,105,267,394,538,628,599,123,287,161,816,363,525,637,110,516,649,495,29,292,24,757,890,847,536,400,397,768,819,478,573,244,196,472,687,350,370,636,258,57,468,322,30,821,126,174,377,836,347,517,565,646,164,695,720,237,247,742,598,737,266,271,355,307,601,100,66,53,807,431,665,592,190,228,820,362,540,824,312,9,739,388,27,625,358,252,270,514,215,542,128,520,222,748,169,760,653,46,561,776,81,862,466,89,137,296,219,114,422,138,199,682,891,659,411,40,72,6,326,330,217,201,97,301,398,884,383,872,118,435,711,391,8,404,12,194,337,457,445,641,121,543,316,895,873,54,702,773,539,407,856,406,424,510,699,523,260,813,251,522,791,93,521,309,673,735,5,842,409,122,736,462,679,293,134,494,643,474,750,408,590,618,103,177,790,775,83,124,651,73,769,667,162,379,496,795,547,486,823,94,809,794,734,864,116,507,31,492,460,591,754,624,165,264,92,236,729,145,789,76,82,889,238,34,713,64,784,796,859,677,788,303,154,448,261,756,158,308,511,707,133,399,498,332,427,640,191,562,86,878,178,371,786,861,149,187,99,771,155,743,96,356,306,866,14,838,148,501,77,263,660,449,546,818,630,426,657,477,576,725,537,684,108,680,74,429,32,130,421,893,109,374,254,655,832,153,61,1,580,612,582,694,722,606,189,726,170,755,584,686,778,544,613,205,627,336,881,645,639,828,288,871,714,738,622,369,650,609,604,531,616,808,503,241,717,279,886,841,852,348,801,386,762,226,652,829,317,157,47,532,375,826,328,390,839,689,231,78,571,704,840,648,143,470,414,257,415,436,197,850,619,418,528,175,459,798,368,42,246,879,545,300,239,413,675,396,493,876,705,203,443,572,48,488,179,284,119,623,75,234,783,765,343,512,476,853,342,319,898,770,192,10,430,610,487,635,224,323,216,441,338,198,710,101,210,69,295,425,835,207,367,875,240,302,200,668,708,817,346,19,749,20,294,855,483,759,349,3,564,335,831,25,182,663,554,693,611,868,563,444,255,642,724,88,272,156,774,810,579,896,549,195,450,193,58,439,541,894,43,417,506,497,50,456,364,227,160,685,560,822,387,751,661,697,37,574,278,324,463,837,211,491,144,21,558,412,291,259,63,696,880,147,752,180,389,185,273,311,41,2,634,184,858,620,827,804,863,551,595,365,51,527,60,79,805,721,701,480,33,792,629,698,578,670,485,172,360,555,381,877,16,433,276,107,333,376,730,607,183,676,552,351,159,318,250,473,91,382,731,280,621,851,102,235,519,524,712,761,812,84,567,781,341,715,471,111,848,334,290,452,718,632,274,865,745,484,605,882,700,262,467,345,62,313,691,499,268,690,405,593,533,666,45,780,432,479,874,654,221,402,849,688,373,669,586,899,617,683,500,248,727,758,469,393,339,600,340,283,569,23,437,530,277,887,129,188,844,242,90,141,589,361,557,583,95,603,515,709,454,461,451,4,664,575,38,392,152,800,125,867,746,674,357,830,719,265,233,888,22,18,806,490,779,766,489,52,423,518,614,232,656,223,438,327,577,344,662,465,814,70,416,428,378,626,596,401,305,249,202,585,559,785,566,384,366,455,811,633,681,320,594,136,67,68,556,482,897,142,706,608,285,833,834,446,329,647,106,747,740,534,167,372,803,139,638,243,163,529,354,206,297,166,115,892,117,395,753,55,587,186,204,434,526,403,282,304,631,763,782,481,502,253,672,900,703,15,71,98,767,513,286,602,173,410,442,447,553,176,857,845,13,793,80,854,464,212,146,325,458,214,475,112,802,168,321,508,535,550,419,883,213,597,39,49,658,799,208,44,275,797,588,825,131,440,87,331,787,104,181,11,150,385,772,352,315,220,269,151,7,692,728,299,17,581,568,230,860,505,256,35,113,135,36,716,56,171,723,843,744,885,120,741,671 680,618,82,520,851,298,501,165,143,186,545,808,258,148,99,177,832,413,678,544,505,25,647,56,803,184,591,391,200,191,372,180,266,193,58,778,49,888,122,514,619,703,516,550,86,230,166,797,367,318,705,410,716,681,295,236,202,51,660,878,715,709,585,306,661,772,300,554,578,20,638,674,271,243,564,96,181,479,570,250,713,158,141,428,810,510,156,308,369,142,533,817,106,328,700,652,837,208,228,473,347,586,272,33,257,789,527,561,227,11,412,770,238,692,667,433,52,682,532,164,764,244,459,222,263,398,301,209,431,53,701,377,830,577,110,732,602,60,860,446,64,85,255,364,401,526,112,468,361,893,314,319,240,135,145,220,344,87,170,302,345,75,889,689,773,423,383,691,521,94,371,27,216,406,595,117,622,63,694,352,876,877,190,781,748,632,163,766,654,606,720,366,623,168,862,116,511,666,211,494,61,542,655,32,890,249,47,663,424,587,131,50,684,677,726,616,281,649,390,18,451,855,782,62,253,523,90,637,287,149,528,215,785,269,179,805,320,581,695,806,843,188,450,242,288,223,388,807,284,40,336,294,455,448,120,761,360,173,861,429,67,635,219,753,815,729,485,665,241,464,421,400,717,232,296,572,836,396,203,440,343,355,524,78,607,305,850,29,229,699,828,460,480,151,884,42,771,30,335,389,858,57,879,118,633,136,224,582,373,444,834,518,333,452,490,760,819,279,854,254,824,313,277,226,109,688,895,6,449,39,467,846,470,329,66,340,662,500,670,604,733,529,445,35,848,621,567,596,559,599,414,489,471,801,704,833,896,359,853,592,403,502,644,799,734,487,474,594,751,153,560,642,375,868,839,859,280,590,299,765,19,669,813,278,169,268,512,664,775,539,630,873,508,71,3,624,754,802,506,354,234,574,262,522,712,273,857,540,88,687,231,476,740,130,21,546,867,702,124,43,192,690,127,315,883,769,323,645,668,189,171,17,609,427,820,260,265,777,809,95,536,357,844,327,408,885,727,324,77,679,583,629,790,84,517,462,417,646,16,503,218,194,14,453,161,814,341,307,880,463,608,108,558,811,686,107,83,737,114,23,838,651,759,513,70,276,285,711,175,9,719,791,763,724,866,316,486,580,743,840,4,293,411,822,816,673,869,376,456,710,430,28,432,767,892,54,264,207,741,245,119,426,26,73,478,436,745,407,863,384,556,176,217,547,178,303,756,252,615,379,221,286,543,425,439,128,339,852,10,856,274,297,80,441,475,267,92,492,437,718,825,894,597,795,730,588,613,7,326,204,368,728,610,804,639,100,735,742,786,736,643,365,214,472,233,497,812,55,380,139,65,356,551,706,482,289,573,465,256,394,330,363,614,261,495,626,575,275,831,15,534,631,683,496,823,800,196,283,780,182,537,419,317,722,498,304,105,598,457,565,776,438,322,611,792,864,246,648,353,1,563,342,493,101,798,415,617,650,708,34,755,270,351,887,399,74,507,779,744,442,707,549,899,337,787,183,548,870,627,620,115,562,757,206,310,552,248,370,174,97,881,146,259,886,41,201,387,435,198,469,385,152,484,235,697,569,197,568,38,579,555,5,405,199,826,282,882,499,422,395,845,557,504,348,865,123,76,768,162,625,530,746,98,290,723,509,44,576,311,212,89,593,338,658,381,829,150,447,600,93,589,454,386,187,481,525,628,409,788,601,239,657,418,774,714,750,157,721,346,134,247,671,292,898,251,675,392,374,897,784,458,91,827,68,874,749,402,291,104,762,45,725,102,362,612,147,350,331,488,312,891,46,466,821,111,641,358,144,103,416,541,225,121,818,841,605,640,154,332,847,900,477,69,36,13,603,636,349,693,404,634,685,378,519,48,731,849,172,698,159,752,653,461,138,213,393,397,584,81,793,382,137,738,538,571,553,842,483,434,132,185,676,794,31,129,535,758,321,783,59,531,210,167,672,37,491,12,79,72,872,195,334,325,140,205,24,835,696,566,22,656,420,133,796,155,515,237,160,125,8,443,113,126,739,875,659,871,2,747,309 131,809,226,143,822,74,422,329,498,11,633,630,639,453,278,341,470,16,854,17,717,147,613,396,823,141,256,191,813,231,735,480,215,338,540,698,261,742,335,95,146,292,706,641,218,279,185,702,271,615,468,9,336,658,696,13,307,100,779,881,873,351,257,477,829,799,551,510,19,491,228,616,409,515,606,419,121,399,86,56,863,240,216,428,767,266,771,695,810,222,733,559,339,366,707,444,462,272,324,337,209,526,48,300,23,755,401,652,811,445,239,760,497,267,872,332,91,661,204,66,392,505,98,319,660,80,648,306,519,443,676,51,683,618,81,286,846,322,775,384,171,374,455,327,3,245,250,50,842,862,838,590,24,210,84,397,535,393,214,43,483,651,817,83,891,22,370,762,202,59,556,144,340,488,280,737,646,273,269,312,898,776,73,431,719,512,636,869,114,316,234,847,330,382,764,343,432,827,75,538,598,663,710,157,589,469,364,693,430,643,371,254,275,681,187,790,446,506,732,295,20,550,716,207,101,659,199,795,851,211,439,650,754,687,673,831,69,880,840,521,818,305,463,18,609,584,668,546,806,424,167,47,108,853,188,679,5,252,895,247,156,675,148,701,752,888,748,826,302,8,578,304,782,120,421,417,379,31,562,458,298,580,642,429,427,6,864,349,452,741,816,4,40,213,408,411,623,253,314,162,803,93,808,194,449,265,353,610,572,576,647,631,423,796,365,284,547,682,317,132,473,2,485,46,182,169,861,334,227,561,703,700,35,33,88,697,607,604,744,821,514,10,860,357,499,614,669,849,415,560,62,653,376,897,529,260,507,180,824,749,877,685,118,369,354,624,486,554,751,783,58,723,166,868,718,730,495,875,403,629,459,564,758,287,179,67,577,127,766,197,177,655,773,360,289,627,690,814,699,54,585,375,436,467,109,137,721,608,680,308,315,492,825,555,151,49,837,309,518,420,525,553,665,667,787,481,119,163,588,39,768,387,77,1,311,41,416,656,571,494,404,501,791,262,441,900,694,165,92,189,586,448,356,524,230,103,359,53,493,794,255,195,221,125,672,830,110,344,781,105,44,283,736,573,848,594,164,678,107,563,793,381,612,174,117,740,460,850,71,433,601,186,557,603,800,734,570,26,406,412,299,236,28,569,792,442,899,605,21,122,129,277,440,263,883,232,530,677,183,30,689,478,531,303,763,611,820,388,545,812,724,99,788,72,575,520,865,178,496,25,32,704,321,205,149,96,856,886,398,513,124,413,301,310,130,60,55,662,140,890,325,523,874,876,368,533,115,212,711,135,7,804,720,599,281,142,759,516,587,320,558,626,192,405,746,293,893,97,377,541,200,484,294,638,756,170,688,537,347,52,243,345,249,45,285,778,38,819,765,372,36,331,464,807,705,798,123,728,692,386,797,434,116,878,136,342,709,774,772,42,884,870,591,595,896,313,237,394,418,290,139,855,628,543,390,326,534,373,472,158,727,527,635,769,802,355,223,828,567,241,597,536,887,761,532,715,57,378,843,438,686,138,414,549,282,670,12,385,784,517,866,159,274,640,489,193,128,637,474,548,731,104,621,747,579,367,645,437,112,632,503,244,539,184,780,65,750,858,801,233,833,815,76,160,894,70,161,708,770,832,857,201,90,622,264,664,407,738,134,634,568,565,258,666,450,674,219,291,246,600,712,729,457,574,102,15,475,596,684,504,593,79,402,743,389,617,466,544,64,111,155,333,582,889,113,757,789,203,522,839,500,592,352,328,471,583,620,133,566,805,270,391,476,173,602,447,296,208,14,85,871,358,451,528,87,435,224,242,691,552,383,487,581,276,671,852,350,739,168,348,885,786,153,176,346,841,297,456,479,68,150,126,509,508,454,882,844,27,785,251,859,238,34,318,542,426,248,288,190,714,61,461,196,425,410,217,892,89,726,175,78,619,657,502,181,63,777,725,845,235,259,106,400,836,229,29,154,268,625,465,482,363,145,395,361,152,362,713,94,198,511,644,490,879,745,82,37,323,172,206,834,225,220,753,654,722,867,380,649,835 808,301,58,621,767,616,429,72,13,455,498,772,133,379,346,63,841,668,422,770,408,127,488,11,672,415,559,521,92,176,204,53,222,238,17,801,23,871,424,608,692,745,279,350,226,155,144,708,335,189,643,733,815,689,170,567,210,212,389,796,440,776,641,177,451,625,147,509,802,104,658,505,173,168,493,12,416,328,693,615,519,246,257,459,705,594,71,79,107,98,240,848,34,503,496,538,851,146,163,561,400,407,399,259,588,715,517,490,96,24,593,597,164,736,375,557,235,563,497,482,799,215,662,152,29,480,120,499,239,46,494,508,867,539,293,783,291,348,809,308,187,271,349,644,677,471,109,755,140,866,111,354,483,364,476,118,613,61,192,601,636,21,863,842,541,756,277,688,502,280,423,198,384,679,556,137,457,60,738,324,778,844,380,788,630,639,77,565,766,681,795,114,581,88,888,70,427,378,320,322,166,377,419,300,893,157,195,554,243,530,84,35,741,731,834,305,86,535,97,19,646,852,753,101,316,248,201,338,161,283,710,51,735,160,82,771,518,282,512,804,749,495,330,510,54,175,270,790,121,126,332,591,555,172,159,640,617,536,740,598,260,382,528,600,817,464,184,391,142,750,437,544,638,326,374,445,869,574,59,311,255,362,409,199,465,507,727,5,76,515,713,704,769,288,897,150,700,48,456,434,791,230,895,136,678,474,214,393,281,218,797,275,158,249,443,825,832,37,889,387,874,547,278,595,193,829,881,15,551,131,323,798,721,702,352,154,606,252,631,417,800,787,196,7,857,564,289,296,477,386,585,219,684,614,642,884,899,435,762,302,211,244,779,846,758,241,306,388,577,138,744,405,363,768,792,822,321,307,134,671,6,527,723,319,153,659,373,820,683,622,725,859,224,80,83,633,823,676,223,570,73,690,414,626,843,396,826,292,47,719,412,404,649,10,91,707,794,739,103,44,117,572,135,181,854,752,487,722,747,469,125,4,853,674,878,533,463,685,784,27,385,261,763,262,670,793,549,392,353,813,370,680,839,309,620,623,365,833,89,188,579,250,62,651,139,751,376,369,847,742,810,50,327,862,411,122,406,603,355,30,757,709,691,523,368,39,314,431,420,16,726,880,764,827,818,66,607,805,746,868,32,229,568,806,828,605,761,273,664,835,592,245,318,855,849,337,74,18,840,580,25,573,112,31,174,182,522,339,887,108,263,436,95,682,329,232,612,397,687,587,548,49,529,433,481,268,151,748,36,873,294,123,26,657,500,653,343,220,666,468,883,898,297,667,816,398,716,106,171,484,648,504,511,870,611,336,553,789,837,773,562,206,236,654,38,460,695,207,367,299,65,534,629,545,347,351,298,356,128,637,432,540,604,472,458,344,774,75,760,200,698,575,785,186,856,775,357,102,701,221,225,428,526,478,532,312,43,333,274,743,543,191,85,447,582,894,272,765,394,129,696,165,514,228,830,462,448,596,669,113,627,68,227,865,132,9,486,584,734,381,673,444,885,169,836,14,782,892,410,729,203,652,824,287,276,492,569,264,81,99,821,295,371,882,8,453,618,183,64,266,143,331,254,90,566,552,28,624,286,360,425,162,520,233,838,402,861,216,124,358,860,304,619,546,759,290,383,656,130,317,208,858,265,285,803,251,1,421,441,110,41,439,372,560,334,780,413,418,609,3,513,267,315,446,576,737,466,209,728,442,256,814,253,634,706,811,56,807,599,237,589,665,69,876,454,635,205,359,900,675,426,115,877,242,896,697,178,345,2,663,67,724,116,542,712,20,583,401,718,461,850,197,537,819,247,516,558,217,40,185,610,403,269,875,879,303,650,52,313,890,891,167,342,94,100,310,645,647,661,524,711,812,234,475,202,470,845,467,449,491,489,686,660,258,341,395,628,550,213,720,602,22,831,438,501,531,714,786,190,485,390,590,730,145,452,694,632,105,732,78,473,361,179,450,87,717,194,42,33,886,93,366,340,55,578,231,872,525,699,284,571,148,325,703,45,754,506,479,141,180,149,430,156,781,864,586,777,57,655,119 636,168,340,835,632,773,771,462,236,798,131,176,756,647,600,288,595,849,43,870,880,491,22,313,55,525,345,867,681,511,776,512,405,805,398,109,408,546,754,524,500,388,32,18,821,382,383,302,645,740,260,883,347,112,206,843,704,693,64,252,106,290,261,92,12,19,634,856,859,652,537,53,122,149,875,263,541,156,661,853,23,303,812,198,356,283,707,534,276,298,360,239,441,455,67,404,559,444,160,264,787,420,548,676,869,26,147,891,743,544,449,241,145,597,129,327,683,770,428,862,291,179,868,173,338,588,654,686,454,358,287,701,240,889,752,514,116,765,370,410,750,547,318,666,840,516,262,895,397,492,495,270,664,519,807,142,736,279,650,882,732,656,465,825,77,896,565,108,591,609,277,542,403,706,296,678,7,306,602,148,79,431,496,564,38,123,299,25,735,268,485,467,617,182,333,607,741,82,482,41,657,785,475,762,637,680,531,838,192,884,670,478,336,194,566,205,247,87,368,425,677,501,37,344,372,325,373,213,731,836,489,596,16,810,772,34,536,245,10,393,359,729,623,716,289,816,831,427,760,497,487,879,401,324,635,339,746,832,88,434,507,224,688,211,63,100,197,44,341,872,212,357,8,692,278,136,572,583,165,611,184,214,844,470,417,720,130,435,151,618,461,792,897,377,758,505,150,371,890,722,539,679,366,575,181,804,163,589,102,128,490,436,220,127,734,540,233,292,719,374,389,749,845,873,711,570,573,504,854,660,827,414,708,877,744,700,45,137,73,52,221,885,323,365,460,54,284,118,728,237,826,253,506,75,115,778,818,813,522,83,622,49,580,411,473,350,488,297,207,703,669,30,348,254,577,472,251,204,188,322,557,96,158,809,363,887,28,629,769,330,406,479,99,725,727,146,231,62,133,841,603,311,452,310,761,819,520,361,608,332,447,69,5,274,553,689,346,352,301,717,724,152,503,153,549,412,851,614,235,842,576,469,900,814,586,858,533,228,391,642,395,738,157,459,665,114,230,286,830,499,107,202,423,751,331,468,723,871,667,210,802,702,631,881,418,3,415,847,593,898,864,376,21,285,47,424,791,58,579,671,190,222,594,328,739,426,216,161,604,587,76,789,135,317,453,226,387,876,335,560,578,266,624,445,91,143,124,811,806,764,457,658,61,526,429,726,141,456,643,874,651,402,641,305,209,164,111,94,392,269,234,610,774,628,529,783,13,846,232,828,784,433,166,238,314,682,585,15,513,381,416,201,351,494,464,863,712,199,556,98,590,757,203,6,315,246,552,714,169,655,763,39,886,334,343,561,80,195,225,419,793,74,248,439,532,174,33,619,815,574,448,481,273,300,105,267,86,745,375,866,850,438,638,834,185,68,852,581,582,721,718,396,312,196,217,40,399,715,65,822,139,865,502,48,480,171,626,36,101,779,93,463,144,24,113,797,710,342,223,748,446,538,227,527,466,855,120,183,598,450,42,159,117,437,208,329,648,255,103,337,46,782,649,775,242,409,893,699,57,295,672,407,281,367,162,134,817,592,620,355,275,824,308,484,400,558,567,369,155,390,125,800,458,321,385,140,249,215,803,442,674,705,379,259,132,668,562,257,229,515,172,60,690,892,50,518,755,535,615,95,282,627,698,265,413,59,430,29,829,218,353,31,790,20,801,170,639,685,271,432,85,51,90,747,421,768,250,543,84,97,293,795,309,138,599,186,193,258,878,386,781,616,384,517,857,70,187,244,799,1,110,471,510,508,571,630,71,304,354,2,662,551,320,443,316,440,555,808,733,451,476,528,243,474,653,378,477,767,530,272,256,493,601,709,294,119,191,697,380,612,625,307,673,633,554,89,687,737,126,498,521,730,56,860,605,72,121,837,177,796,167,362,823,349,422,848,545,568,786,104,861,219,621,695,753,486,35,899,78,833,509,27,364,640,766,820,4,646,11,613,563,394,794,17,684,694,742,319,713,606,154,189,326,200,839,644,550,9,569,788,66,175,584,14,659,894,691,780,523,777,280,759,663,180,483,888,178,675,81,696 463,134,374,459,48,844,283,300,358,719,620,93,518,879,839,255,141,710,275,737,560,616,171,506,61,760,165,599,786,312,487,334,243,529,665,507,434,49,887,388,317,207,269,204,672,235,311,16,277,412,78,819,527,587,613,814,396,636,104,15,188,413,372,296,328,107,321,394,725,829,198,94,327,497,442,418,751,125,420,871,343,606,602,621,10,565,778,347,502,319,149,417,339,820,65,74,155,266,304,633,416,83,457,848,825,143,480,516,747,730,759,18,479,192,292,706,661,173,190,826,253,578,638,294,336,356,362,881,156,652,43,471,656,483,634,222,287,882,8,136,617,838,749,883,740,180,452,795,666,4,691,745,715,790,852,320,899,566,391,791,898,728,13,653,228,856,212,643,231,639,756,698,779,895,167,854,229,492,193,350,216,19,626,117,100,469,655,220,401,523,316,614,162,405,785,411,254,144,425,183,860,305,88,840,189,357,804,281,126,468,450,443,286,720,503,113,186,139,256,571,564,106,250,532,365,114,625,99,806,637,858,444,213,608,651,462,660,493,360,85,5,885,197,750,247,577,494,73,679,787,252,789,407,514,438,23,734,894,115,752,729,55,851,25,390,302,176,168,219,775,703,727,373,435,704,129,251,568,398,200,208,692,406,799,91,611,50,488,276,102,21,687,836,680,419,796,69,544,662,344,22,640,811,395,530,890,445,142,496,137,47,132,157,499,267,265,240,333,447,588,575,659,585,864,520,618,12,649,510,847,428,27,724,874,792,423,359,109,598,511,543,805,597,513,158,460,92,325,230,59,567,120,853,335,140,453,531,458,24,130,774,187,533,322,146,124,147,64,26,849,607,118,236,11,46,31,693,489,181,37,644,90,34,475,313,884,303,635,131,501,400,58,241,581,748,642,733,56,151,657,364,696,834,762,586,538,40,97,771,203,797,381,370,465,589,528,1,205,676,629,481,67,813,548,28,77,631,293,707,761,816,604,877,694,478,753,865,36,60,522,105,342,39,209,891,295,32,466,781,580,81,579,297,888,594,605,298,717,821,224,893,426,615,641,780,260,306,550,35,810,689,326,261,583,182,772,872,76,746,763,20,484,72,273,831,430,647,101,732,716,361,536,270,777,9,377,841,736,123,217,669,223,508,66,539,153,340,439,603,504,646,837,274,542,2,863,288,440,449,830,685,828,835,392,194,145,57,323,509,448,110,767,731,464,866,436,378,619,670,624,815,349,324,331,346,545,742,199,722,258,301,177,353,818,280,876,726,122,768,354,555,477,455,282,515,63,366,713,553,824,755,96,476,723,164,765,53,461,559,201,226,387,351,822,526,278,386,610,393,867,410,862,551,41,576,427,112,290,214,695,570,808,202,674,248,138,677,671,45,859,540,119,697,257,663,403,609,524,95,572,225,519,718,299,268,486,754,195,495,561,154,675,237,178,284,517,383,714,648,800,556,758,741,842,172,512,84,170,103,584,86,246,505,38,414,380,242,33,215,166,279,272,17,552,467,549,769,402,474,699,873,385,409,794,318,645,809,234,783,711,7,628,658,111,623,812,766,160,330,121,345,622,150,667,51,337,441,627,798,98,179,803,827,376,590,451,44,485,341,218,702,355,422,709,148,612,861,52,832,206,375,582,593,735,338,161,482,431,535,595,668,175,408,348,379,70,846,264,291,563,600,389,384,896,397,654,71,221,89,108,738,574,127,42,128,601,415,673,744,490,880,159,244,6,684,592,573,263,534,233,500,788,473,801,521,82,227,352,472,259,547,116,683,712,807,329,850,456,681,562,3,802,688,87,537,75,782,525,739,133,650,857,870,369,690,332,309,705,239,541,14,232,817,558,776,308,211,897,191,454,315,433,285,437,845,262,630,793,54,889,80,245,596,701,823,498,833,169,632,62,682,363,368,210,238,185,174,869,314,886,664,249,30,843,875,546,152,424,421,491,184,569,557,307,591,678,757,371,708,271,432,554,289,382,900,770,310,135,470,855,196,163,868,399,700,743,764,892,367,784,404,878,446,721,68,429,29,773,79,686 820,294,165,717,800,570,554,219,76,508,369,786,180,193,230,188,861,710,300,799,445,183,483,61,650,382,670,626,8,309,226,161,366,332,16,702,124,882,254,688,748,777,233,323,287,286,265,750,467,248,703,727,785,544,73,575,328,275,396,774,353,766,648,200,319,586,243,615,814,50,742,543,201,126,584,74,395,420,751,579,373,232,358,375,745,559,10,137,54,224,290,823,154,376,532,663,868,281,228,477,529,533,490,160,599,682,466,557,38,31,510,660,133,793,244,458,289,667,629,499,804,144,724,227,81,601,195,362,368,52,583,623,802,607,391,824,210,213,822,446,262,149,218,464,705,613,168,770,27,865,13,179,503,278,441,167,374,83,317,645,398,11,867,856,410,743,419,489,647,343,250,209,261,436,651,9,426,123,795,346,649,860,415,834,617,551,33,440,813,646,817,37,699,131,838,197,550,345,424,348,42,455,507,247,896,255,108,635,352,597,207,147,763,542,839,325,216,569,156,91,694,866,689,163,409,329,206,361,23,384,534,107,674,174,102,638,537,122,377,831,769,355,457,527,129,225,305,828,88,72,462,631,606,71,276,701,643,394,641,517,234,450,438,668,726,311,202,334,271,762,270,453,497,449,251,516,877,621,85,443,351,217,479,101,598,588,691,78,152,603,747,729,779,252,897,82,719,110,535,565,818,298,858,259,600,338,214,505,215,267,832,387,260,120,573,841,825,77,892,402,853,604,356,578,299,815,883,68,664,40,383,827,709,679,342,203,536,312,454,241,735,796,58,70,864,461,381,187,614,475,684,279,521,428,672,889,899,552,790,306,29,237,791,851,803,339,434,494,624,7,755,406,470,761,830,840,205,135,235,728,39,574,725,431,266,608,333,810,740,627,734,870,184,170,116,469,692,665,240,653,162,589,246,472,859,506,843,404,3,752,302,370,487,45,182,749,805,773,67,106,204,633,20,97,863,797,591,775,594,524,6,51,847,704,878,582,282,721,821,103,519,371,767,405,452,630,612,425,386,798,430,593,846,119,592,632,488,844,47,176,427,388,150,713,28,654,480,460,862,759,826,172,256,819,344,41,320,622,326,19,758,673,768,605,318,63,223,411,423,36,657,885,738,698,835,69,408,816,784,880,90,350,644,836,711,658,545,349,718,854,580,164,331,850,849,236,127,60,852,590,22,400,43,117,181,242,522,341,876,66,307,413,59,733,125,315,435,485,563,677,531,98,540,476,546,380,17,620,48,869,421,231,132,528,619,595,379,263,602,288,881,898,141,513,781,500,760,159,87,429,656,478,567,812,700,285,561,739,780,811,680,155,297,481,53,504,484,272,473,118,185,336,587,636,178,393,330,459,257,693,539,407,716,556,515,321,807,24,792,104,744,687,697,157,782,681,372,130,696,292,190,523,492,340,642,280,4,273,143,776,448,46,145,439,417,895,403,686,296,121,695,12,337,84,848,571,512,659,754,175,625,142,138,871,79,100,637,611,706,502,628,555,886,166,801,18,806,894,239,609,34,690,794,136,322,335,562,401,2,57,808,399,308,888,26,378,568,258,115,389,105,422,364,21,675,541,49,530,295,456,549,139,327,284,737,520,873,95,99,493,746,194,708,616,655,354,301,722,5,245,114,855,363,134,809,253,65,291,548,62,25,444,491,463,433,789,314,558,669,14,365,171,277,199,634,756,576,303,715,498,113,833,390,666,783,712,96,707,671,293,474,753,169,874,538,451,80,437,900,560,360,32,872,153,893,772,191,416,35,553,173,788,75,482,518,128,447,526,730,496,857,112,418,842,357,640,465,316,1,274,525,222,94,884,829,189,661,30,442,887,890,177,269,208,86,212,741,412,678,596,765,837,238,581,93,347,731,495,471,367,468,732,723,192,158,486,511,683,283,714,685,89,845,566,639,662,585,787,148,397,414,547,771,56,359,757,577,151,564,198,618,392,249,324,196,720,220,140,44,891,111,304,432,109,385,221,879,501,736,229,572,211,146,514,15,778,509,313,264,186,92,310,268,610,875,652,764,64,676,55 781,66,271,835,609,807,698,332,127,794,189,342,541,585,571,202,749,862,1,885,793,421,108,194,139,573,447,849,398,424,549,335,359,699,232,313,299,704,730,656,655,564,7,36,733,315,330,365,574,544,363,887,600,251,111,851,581,626,42,294,34,517,416,73,13,101,431,808,884,480,641,145,112,119,814,159,588,180,761,861,20,312,731,272,380,423,428,307,100,255,213,510,306,511,122,427,745,352,157,384,746,371,589,599,864,163,260,832,456,368,568,254,120,708,21,429,630,687,497,853,513,156,867,160,162,643,410,713,331,240,267,743,507,839,720,695,14,710,457,353,661,487,320,726,859,551,224,893,175,586,244,248,722,537,800,115,750,191,543,877,753,397,595,875,35,895,466,219,618,596,296,496,414,751,400,446,62,238,715,199,109,557,546,694,141,266,171,22,817,438,693,212,644,142,633,441,674,33,503,70,474,639,366,723,820,508,435,757,168,830,495,350,533,327,774,106,164,190,182,308,768,696,200,301,406,208,344,124,445,769,614,345,166,601,528,183,637,71,31,558,383,742,516,760,148,636,666,569,489,376,479,873,529,128,525,360,796,816,117,556,493,176,739,252,273,23,69,8,285,881,245,458,110,660,300,203,775,686,79,502,196,214,755,375,412,764,169,288,96,509,434,841,897,387,876,399,281,293,852,714,570,624,678,469,341,783,170,471,113,86,590,310,167,38,684,738,464,129,858,432,691,785,752,868,612,773,676,355,842,476,668,519,797,882,729,460,184,97,178,45,442,890,133,235,683,174,188,15,685,220,828,154,631,41,247,866,894,777,524,37,324,18,756,672,617,241,393,268,216,450,780,40,389,290,628,572,246,55,146,379,356,181,249,737,322,879,76,790,707,492,594,662,12,559,591,261,422,150,49,848,385,482,449,425,855,778,616,269,361,505,473,151,87,140,459,786,403,535,206,520,532,233,329,83,659,540,844,732,392,838,340,309,900,845,802,854,547,311,536,413,369,593,264,402,724,118,237,354,787,725,130,357,670,652,491,615,648,889,488,65,803,619,494,874,262,135,453,789,680,898,886,280,5,566,10,289,772,161,584,436,279,404,610,433,727,195,225,89,647,394,256,870,328,531,522,64,454,888,498,770,417,257,711,580,295,278,99,658,834,850,587,576,123,771,490,690,61,228,799,865,377,472,455,218,74,93,107,155,718,88,137,653,504,744,448,627,47,809,378,833,801,215,298,325,415,635,304,85,358,664,418,149,253,620,545,869,706,114,701,25,811,883,50,46,560,239,709,592,75,702,812,57,822,613,465,562,134,407,468,611,741,43,259,553,283,275,58,561,735,485,348,521,420,314,80,323,53,622,291,863,819,514,682,818,286,30,878,303,582,607,795,467,530,51,463,204,443,452,201,705,28,810,608,11,567,209,364,2,39,836,48,205,56,91,44,880,625,548,265,623,621,282,277,408,675,821,172,317,634,381,147,84,72,597,54,210,651,222,287,367,207,697,712,527,499,187,896,857,32,462,477,565,526,316,198,185,826,484,346,250,336,762,347,719,243,606,688,230,90,297,26,759,319,158,396,270,102,349,754,338,583,602,411,274,339,673,692,82,59,500,382,4,763,871,103,534,740,518,372,17,95,806,657,217,638,27,263,52,791,125,211,105,736,126,703,326,640,649,409,226,153,24,136,689,554,831,292,373,227,165,229,856,258,193,669,395,121,470,872,386,798,677,221,598,840,173,67,302,891,68,179,321,776,451,823,645,29,351,186,92,506,646,231,552,444,276,642,765,813,538,539,437,333,632,603,405,575,665,305,177,391,475,478,847,579,16,343,419,390,829,717,143,650,515,430,3,728,766,284,604,679,827,60,804,486,19,318,825,144,782,116,523,843,334,370,788,663,605,700,223,860,131,784,629,716,550,63,899,9,815,563,152,426,483,758,846,98,388,132,481,577,439,654,6,555,792,667,234,461,805,104,236,362,138,824,578,779,77,721,734,192,81,512,78,374,892,747,767,440,681,94,748,542,337,671,837,242,501,197,401 135,565,313,45,257,590,114,242,529,324,751,275,575,860,717,267,23,236,741,252,561,450,470,557,516,589,30,190,879,134,604,392,80,338,695,711,374,137,826,85,52,65,622,530,417,131,159,173,59,477,75,419,482,744,777,463,228,354,526,531,712,421,331,478,743,549,382,240,241,784,9,335,471,631,356,518,567,169,54,594,802,541,314,655,223,510,853,566,780,249,439,552,316,761,395,24,41,139,359,612,98,81,127,722,466,535,563,458,849,681,634,291,619,5,767,653,376,203,20,496,256,667,148,328,586,42,508,793,192,665,206,70,764,389,292,27,719,787,211,68,362,762,746,810,293,13,438,318,869,342,870,813,420,649,605,449,899,608,176,378,894,753,248,207,778,469,94,843,11,353,796,532,705,891,55,885,543,473,4,416,854,197,385,16,460,606,758,736,32,502,100,845,8,509,872,326,104,611,144,424,863,383,274,638,154,340,727,310,182,409,369,345,168,840,263,506,286,285,550,514,162,64,596,453,172,336,523,498,895,344,829,581,602,633,679,805,405,866,783,61,299,809,126,443,475,546,540,46,773,732,34,380,188,833,218,147,368,808,718,636,574,296,666,237,686,825,547,635,145,311,775,643,726,99,702,184,86,227,609,136,194,768,415,754,28,187,559,484,433,266,282,253,408,576,214,734,251,486,347,40,210,346,890,205,603,794,517,180,662,350,67,272,230,791,71,50,373,607,193,432,570,245,457,555,265,398,315,583,122,842,507,119,397,571,536,564,615,341,803,830,621,363,868,524,107,663,317,756,60,158,93,372,789,874,287,121,358,101,271,519,900,544,246,259,21,290,116,128,303,893,244,497,53,488,95,123,723,864,270,96,623,260,321,219,166,614,562,355,247,396,232,97,626,465,598,757,836,492,489,191,479,661,800,748,185,161,103,226,828,87,703,568,766,616,404,117,325,82,713,556,476,177,865,731,171,31,201,18,769,379,883,585,493,255,301,337,832,183,62,527,44,302,377,57,878,888,234,366,474,387,284,624,198,881,495,381,112,222,779,618,806,163,452,170,857,690,69,300,157,375,178,278,628,647,677,814,684,384,542,737,348,520,49,89,617,657,680,595,480,668,599,160,464,818,212,678,861,239,58,79,560,129,130,48,792,233,897,448,118,90,462,700,440,371,456,710,491,629,156,472,742,819,812,410,577,388,501,454,553,750,428,548,804,88,875,400,838,261,688,150,521,600,361,258,196,213,889,760,672,294,77,269,343,715,29,613,429,413,580,858,403,357,856,785,504,120,76,528,759,632,407,545,483,749,36,591,425,579,656,66,124,640,295,776,687,273,876,334,204,867,289,852,512,141,807,332,447,238,174,221,231,728,2,235,224,554,165,834,167,763,113,19,682,651,782,739,442,637,411,422,627,308,525,797,22,537,823,620,770,102,704,884,490,467,850,216,149,673,669,642,352,896,831,847,63,208,220,186,17,441,444,446,692,199,733,436,15,423,431,33,533,175,418,659,569,714,280,217,839,685,892,181,459,822,327,795,485,133,871,729,468,306,597,155,654,658,572,367,500,189,671,264,281,827,72,430,650,652,539,209,105,641,846,309,798,84,132,837,689,25,882,747,39,305,745,349,675,143,886,664,772,330,262,820,202,503,511,720,142,725,716,437,138,648,268,298,694,37,195,660,786,696,551,898,164,229,78,319,386,250,801,115,111,333,6,699,558,701,297,312,706,1,414,455,283,815,821,153,390,691,625,824,365,709,461,35,513,215,610,676,401,10,693,573,851,399,740,125,279,320,402,721,698,38,225,26,645,288,816,277,639,848,877,92,771,765,339,774,51,391,412,592,630,351,683,730,3,887,323,146,43,47,494,276,799,781,873,445,427,790,588,83,110,646,844,304,697,7,370,364,200,515,91,74,56,12,752,487,644,724,426,534,109,817,707,73,578,601,811,322,14,393,435,735,406,307,538,360,708,151,582,593,106,499,880,587,152,505,140,670,394,451,855,859,788,254,481,835,179,584,755,738,243,862,108,434,522,674,329,841 789,382,86,745,852,526,647,203,22,500,300,766,353,187,184,138,864,718,369,798,684,104,438,5,664,291,607,692,150,311,448,209,318,443,1,652,74,889,282,617,700,738,201,285,361,261,206,774,508,423,676,737,727,492,48,568,390,267,422,834,469,699,542,106,345,593,359,740,811,54,724,511,98,50,747,12,302,355,716,578,415,116,421,269,790,466,145,266,102,134,402,785,142,303,531,653,866,281,117,383,574,567,433,149,615,648,364,749,217,16,424,695,51,783,363,362,265,770,573,530,760,63,753,118,115,547,339,331,418,4,624,591,772,771,374,802,287,215,845,470,307,105,143,437,694,606,65,794,114,888,130,132,444,204,429,78,393,6,343,666,410,83,881,843,528,764,474,477,623,284,193,131,188,456,575,85,367,35,786,218,750,870,317,835,597,479,8,512,797,541,782,168,717,43,823,226,641,406,347,283,70,627,616,235,894,365,67,775,309,769,273,108,703,504,803,411,178,509,242,24,631,868,643,66,312,403,97,451,216,461,484,220,628,381,278,620,454,262,389,824,813,336,535,478,158,395,525,830,299,32,439,654,513,182,295,735,587,398,732,431,156,502,387,683,697,446,254,379,232,762,210,356,482,462,171,457,867,540,26,520,244,152,673,53,612,544,744,40,81,728,812,701,800,162,895,44,704,33,655,589,855,276,850,256,510,350,94,613,126,251,848,475,227,137,637,826,796,103,886,288,827,557,514,594,327,773,893,27,721,60,585,840,649,669,314,375,471,313,426,259,665,799,166,10,859,386,440,271,688,496,682,337,473,549,603,883,899,609,847,342,222,221,739,828,788,400,499,522,659,110,707,396,405,814,853,856,119,257,176,746,42,532,741,518,197,626,252,768,832,521,656,873,208,233,146,416,640,679,274,658,280,506,199,404,838,546,862,472,52,706,223,264,497,7,136,710,841,729,11,175,328,600,39,58,879,795,634,755,533,539,68,15,863,680,860,610,250,733,819,170,527,494,779,385,447,754,638,308,392,743,407,507,821,169,493,534,565,846,55,255,428,408,133,759,31,635,414,583,880,806,825,121,239,784,409,18,324,605,243,62,780,564,805,519,296,147,127,516,346,30,576,874,686,646,857,82,366,816,757,869,57,263,572,837,689,580,705,524,708,839,483,139,229,818,877,219,76,120,833,614,107,335,49,45,241,237,577,224,858,123,371,334,247,685,124,476,489,558,485,657,498,177,441,370,455,351,173,678,14,854,352,179,79,465,551,602,329,315,523,419,861,898,268,561,720,505,713,164,59,358,598,517,767,776,644,213,584,672,734,781,763,93,183,432,159,401,570,228,590,113,129,301,488,660,167,260,341,588,249,715,621,332,726,566,413,357,807,161,842,111,711,650,622,225,748,639,245,322,667,430,238,661,399,434,586,172,75,297,144,758,554,195,109,377,553,892,450,599,198,148,608,154,290,71,836,674,481,569,778,88,611,96,84,876,112,41,632,691,645,467,548,675,896,388,752,56,820,890,286,529,87,604,742,92,200,293,536,464,90,9,849,458,196,885,2,298,491,348,47,436,101,460,452,36,690,445,29,442,316,538,633,128,289,174,714,490,882,191,160,495,736,212,681,696,731,258,294,787,80,325,234,831,338,91,765,240,38,270,595,20,3,373,556,412,559,777,272,601,562,25,319,153,181,248,537,723,550,417,698,453,73,815,368,668,793,651,17,642,693,189,468,761,214,891,592,427,89,340,900,571,253,21,851,95,884,810,155,304,23,545,190,792,13,391,480,151,384,582,687,394,878,72,326,844,323,629,380,378,37,306,435,185,100,872,801,277,552,163,376,871,897,292,230,186,46,279,756,425,596,515,725,817,157,722,77,463,730,503,543,360,581,670,719,99,122,625,449,671,344,712,702,19,829,636,709,619,662,822,140,397,320,487,791,61,333,751,560,321,563,194,618,275,372,354,205,663,207,64,135,887,34,192,349,28,420,165,865,486,677,231,501,211,141,555,125,808,459,330,236,202,180,310,305,579,875,804,809,69,630,246 313,800,182,343,874,13,544,359,412,29,504,730,602,215,92,355,688,112,831,143,749,62,618,300,845,47,418,339,673,298,704,466,296,392,382,672,206,857,116,197,320,465,643,601,210,329,207,810,403,615,592,65,413,578,531,15,370,67,776,889,858,445,322,426,775,812,554,630,155,279,441,660,364,395,692,335,34,469,235,7,832,126,250,337,843,248,622,662,703,208,744,613,330,202,748,591,681,323,299,272,342,632,91,105,40,781,378,720,697,284,139,827,384,484,849,229,46,777,369,42,527,372,239,302,590,179,633,107,570,292,767,144,671,702,78,489,808,109,853,492,156,163,280,141,69,423,173,148,740,891,733,415,12,64,2,360,289,261,263,90,246,499,880,257,877,79,475,663,389,20,391,45,168,262,422,518,619,171,486,287,895,862,33,652,753,472,463,856,334,328,435,766,511,307,715,357,556,821,98,517,381,726,778,32,793,485,164,787,478,724,390,214,424,567,365,791,456,547,691,175,113,745,723,117,120,694,93,804,751,244,304,588,762,646,594,802,50,840,797,693,868,104,546,4,558,545,685,714,718,227,283,87,172,794,218,792,28,70,890,157,48,732,36,786,743,869,713,806,354,102,430,189,729,198,278,488,603,83,452,541,331,420,700,225,568,59,879,269,414,822,865,85,125,99,664,209,708,146,411,316,870,52,771,217,436,75,274,696,451,563,801,680,458,684,510,493,651,605,540,84,534,49,503,16,201,294,892,195,400,347,725,830,71,21,5,679,559,612,648,722,505,110,788,251,689,535,707,809,562,637,178,675,238,884,586,516,734,318,873,736,818,650,245,532,550,661,565,640,817,589,137,710,260,885,841,844,333,835,432,752,338,628,828,379,199,19,512,234,847,228,301,825,712,344,161,513,607,838,683,134,549,321,241,351,311,233,842,669,502,461,138,440,782,446,94,165,878,470,350,367,500,645,464,522,872,665,224,388,491,54,566,275,185,72,490,81,211,769,728,444,523,542,846,375,243,900,774,145,44,305,627,421,525,273,219,122,479,222,293,758,76,267,142,291,476,829,183,439,871,231,205,268,678,582,834,398,27,735,18,405,848,408,764,290,11,659,317,850,10,285,584,447,604,507,867,636,394,170,575,642,167,295,111,739,755,528,896,631,136,356,114,88,431,416,894,56,457,593,397,41,548,310,312,230,721,595,837,377,638,717,727,14,686,188,341,539,839,256,425,123,1,649,362,240,204,103,768,881,236,670,181,438,253,177,259,9,8,677,97,866,483,737,836,860,448,609,277,118,560,30,23,811,780,569,450,31,795,521,580,501,695,506,174,270,655,346,876,51,482,309,169,286,288,741,626,149,690,614,404,140,366,193,459,101,353,765,192,706,851,166,159,494,433,761,676,770,63,674,750,429,754,524,58,855,297,264,538,731,656,186,875,814,530,583,886,553,324,371,308,133,194,759,467,315,596,453,581,477,653,80,763,473,498,861,701,303,409,854,616,374,599,639,899,711,557,585,190,617,773,386,462,242,496,352,265,536,3,480,625,327,887,191,151,813,345,60,73,654,407,600,635,132,668,577,699,402,519,383,43,698,620,96,361,152,742,127,859,803,719,376,789,738,213,271,888,26,24,823,572,807,824,373,82,442,434,647,332,644,249,474,399,551,387,624,526,779,57,449,363,471,587,623,396,314,180,115,576,634,756,543,410,255,481,805,597,573,368,495,129,77,37,529,552,897,176,666,658,254,746,833,417,393,508,128,629,747,555,147,454,785,237,598,306,106,460,437,130,336,89,74,893,154,349,705,86,579,119,282,509,571,325,266,348,514,667,815,428,611,258,520,898,757,22,135,158,798,497,226,537,121,340,358,455,610,232,864,816,17,820,68,863,406,160,150,326,515,124,468,184,783,153,380,419,561,564,401,882,162,641,35,25,606,790,281,39,220,784,687,819,216,443,66,427,796,187,95,61,212,487,682,385,276,203,319,131,38,574,716,200,53,533,621,252,852,608,223,6,108,196,100,772,55,247,657,826,799,883,221,709,760 694,31,182,605,256,855,329,106,108,705,592,322,365,831,752,59,521,783,205,847,558,455,178,237,139,708,259,643,599,179,364,126,123,449,412,673,198,467,864,536,523,451,118,104,561,84,156,134,245,296,187,870,725,640,410,824,310,524,39,207,131,631,518,107,283,169,174,502,851,671,391,95,157,308,527,163,700,48,606,873,360,478,519,579,122,616,586,154,268,133,32,698,121,788,58,119,547,105,130,634,460,69,469,744,844,282,503,576,562,509,759,67,284,456,186,701,549,286,265,815,534,407,737,109,92,452,208,865,51,426,42,546,826,572,566,520,142,834,234,73,496,727,639,879,805,239,257,863,457,373,483,633,709,713,830,114,897,337,288,822,894,501,383,843,224,884,137,680,323,554,677,567,692,896,251,707,194,272,459,281,421,349,580,427,173,545,432,238,647,624,600,392,293,200,889,228,292,57,388,85,731,302,72,758,716,230,661,400,17,551,274,223,508,753,743,15,19,168,66,332,662,499,445,348,333,11,479,16,637,570,867,218,387,464,468,617,668,347,378,377,149,871,159,745,38,443,424,344,488,614,215,814,505,309,311,96,772,886,252,754,610,9,836,77,622,262,26,90,75,853,632,717,471,418,619,103,651,626,165,151,94,591,447,642,100,635,158,225,68,148,147,777,882,565,850,630,196,319,665,420,253,533,899,255,620,876,287,112,372,8,303,43,14,331,315,589,557,89,819,537,813,690,514,868,413,796,441,422,602,697,389,260,802,885,726,269,465,3,618,438,703,875,396,249,552,495,7,190,305,20,649,4,859,398,241,810,891,504,210,25,608,49,722,654,395,18,80,28,54,695,736,61,201,171,290,301,582,357,27,129,397,99,144,458,160,887,226,801,314,597,595,454,78,402,583,656,820,138,12,711,185,742,763,761,807,541,325,21,560,425,732,359,472,188,416,679,204,442,470,433,324,102,636,354,384,300,660,522,765,748,638,341,900,771,797,764,823,127,318,294,79,278,193,115,890,490,46,444,721,749,22,644,625,848,641,666,291,861,658,52,888,363,430,735,598,439,298,535,385,869,837,113,141,794,111,588,838,125,672,550,167,613,229,358,770,180,531,53,686,493,532,828,492,854,261,143,827,856,351,628,453,97,594,353,706,246,500,380,702,755,684,729,177,785,316,799,74,184,734,845,461,806,674,166,36,2,55,242,818,216,6,728,542,623,779,352,450,609,712,688,812,117,390,350,417,473,553,369,506,648,244,24,135,832,367,883,653,1,803,334,835,880,317,375,714,30,587,556,356,804,798,83,548,860,304,682,71,650,746,486,361,217,243,829,277,289,494,487,411,781,219,833,621,63,463,381,13,266,60,760,590,780,371,678,258,40,809,446,231,741,681,212,800,81,816,596,539,343,222,475,64,543,715,267,370,394,538,87,346,723,155,481,37,152,312,862,342,793,571,652,669,574,693,719,526,559,65,273,271,408,153,41,336,419,197,136,330,62,409,140,415,321,474,401,704,276,866,821,403,756,757,540,659,670,213,603,817,150,584,516,227,568,581,575,366,766,784,29,101,34,145,577,44,440,93,414,175,655,718,45,203,664,789,270,747,476,431,295,128,192,840,233,573,740,306,528,808,162,663,86,206,811,507,601,615,35,211,376,563,374,429,146,434,406,379,285,795,297,423,280,564,247,299,881,510,768,76,91,236,82,593,773,33,88,345,733,183,778,786,386,874,355,47,404,683,612,382,214,892,340,466,607,787,676,857,264,50,327,189,368,362,339,482,710,825,98,841,485,769,569,328,657,667,405,462,110,767,435,517,5,675,792,774,750,858,199,448,497,181,846,489,56,739,393,604,176,399,895,338,529,530,689,124,513,720,220,782,775,23,877,70,437,699,578,730,511,839,275,525,191,738,120,685,248,335,279,307,893,116,872,611,326,172,696,849,687,221,235,544,313,250,512,436,254,428,762,629,164,498,724,209,480,232,161,898,645,691,132,627,776,263,10,791,515,484,842,751,878,240,646,202,852,320,790,477,555,195,585,170,491 577,576,72,194,697,436,152,47,211,124,741,772,132,529,398,112,683,229,767,381,242,82,663,157,799,387,422,104,443,39,215,71,88,26,263,863,48,811,463,380,468,580,596,589,11,79,66,676,84,129,569,320,761,821,548,209,25,24,643,853,758,737,586,393,779,787,114,189,434,273,408,637,363,444,222,172,361,372,400,339,824,324,12,623,684,606,365,216,535,109,389,854,42,616,655,418,719,55,259,636,68,330,144,292,197,810,639,239,383,163,593,660,432,478,746,629,46,396,309,135,733,482,186,235,261,190,151,537,183,251,545,161,890,237,37,584,671,370,770,192,2,409,547,651,277,231,185,293,579,851,531,632,275,388,276,296,712,256,15,167,704,225,838,581,833,344,117,850,223,102,659,136,508,740,449,455,668,141,475,425,894,805,236,595,752,736,437,828,445,667,628,523,317,278,899,19,162,666,123,507,416,245,433,182,852,59,332,364,290,252,22,23,619,844,687,543,174,605,325,87,350,753,837,139,176,356,226,549,511,17,760,158,836,160,166,884,375,720,816,691,744,520,150,282,233,93,173,670,327,314,121,156,411,597,9,607,343,480,867,610,227,491,453,649,870,812,461,700,100,340,675,600,829,45,557,487,751,337,360,142,266,633,208,397,352,180,832,60,254,465,689,368,342,347,843,322,707,113,75,108,729,40,900,13,738,452,378,299,566,391,681,291,210,654,147,644,813,319,794,348,856,207,69,257,3,695,858,86,128,355,220,724,479,376,143,188,755,424,826,776,820,412,645,119,773,750,410,694,201,429,159,371,718,868,665,743,888,73,715,570,634,534,585,777,565,297,196,394,652,474,488,641,199,827,677,742,554,739,214,615,99,601,756,41,76,338,602,650,539,563,573,788,562,16,50,793,877,792,476,137,120,725,582,732,626,49,726,321,333,583,516,591,845,246,31,404,801,592,399,38,65,590,473,546,798,611,83,439,823,116,498,95,489,305,797,126,598,672,680,54,267,98,803,110,722,898,599,382,70,685,494,745,748,467,594,497,111,493,247,503,532,20,14,168,499,865,181,56,784,244,426,51,646,866,735,438,298,709,249,179,786,717,530,405,218,316,538,686,269,131,802,706,791,862,774,440,763,423,625,754,57,170,281,682,886,618,895,134,262,575,504,268,485,766,861,303,313,234,662,193,232,702,311,106,464,367,711,483,878,521,448,328,315,406,568,81,859,43,768,171,307,264,555,415,401,29,560,879,165,830,125,205,62,664,285,366,118,407,567,822,819,887,727,864,782,414,470,34,552,431,369,730,306,883,481,283,696,809,860,631,308,524,228,716,289,484,885,44,80,527,32,673,638,541,701,334,528,153,92,175,64,624,318,96,457,614,390,435,657,248,379,419,807,533,889,874,302,206,769,36,559,115,450,814,294,458,326,647,658,392,806,635,346,630,857,808,33,783,572,89,564,587,749,469,698,101,522,578,454,91,747,272,553,831,544,90,191,693,762,238,764,198,873,243,842,335,353,825,785,771,496,495,790,551,349,757,274,105,442,386,848,18,459,839,195,451,526,265,284,240,518,27,253,505,490,609,351,714,78,345,219,127,705,224,891,103,796,648,506,130,897,688,280,107,882,138,287,561,492,661,608,734,28,603,669,513,77,728,67,471,331,604,58,759,94,789,428,133,540,200,781,622,519,613,413,421,447,178,778,525,556,502,204,708,472,855,286,841,145,146,486,403,169,875,63,834,620,301,892,847,627,420,815,358,869,509,446,250,184,835,5,501,374,514,818,74,612,61,430,341,849,323,674,679,35,417,558,10,359,255,690,588,510,721,893,703,640,270,148,840,881,441,258,7,203,692,329,699,678,310,477,517,460,164,373,804,896,140,617,466,723,536,241,395,574,97,621,295,6,765,155,212,642,187,213,300,871,354,542,384,260,731,656,336,385,288,795,221,872,4,217,279,30,780,1,456,52,85,154,817,362,515,202,230,571,149,775,710,462,177,653,377,512,880,304,271,312,500,21,53,550,402,8,876,800,357,846,122,713,427 726,294,215,839,845,619,774,421,73,673,44,562,629,297,317,239,833,820,107,864,853,301,154,94,420,357,561,847,399,504,665,459,444,694,106,285,256,861,454,618,648,636,29,64,655,408,354,650,649,654,563,844,541,131,10,721,630,538,233,662,247,505,407,35,49,295,594,866,857,284,700,299,40,9,875,81,386,286,709,717,79,121,681,91,688,313,430,479,105,223,474,535,347,241,335,633,822,446,96,207,742,607,546,356,782,337,140,879,510,229,352,576,11,756,158,230,523,834,595,734,592,12,858,109,240,623,575,428,540,80,568,685,455,886,622,723,117,449,741,543,605,216,78,439,789,644,108,882,151,850,257,34,554,293,610,51,437,50,582,827,453,374,836,849,253,868,614,103,679,476,60,326,157,440,514,314,95,116,762,114,427,803,410,798,328,175,32,174,806,366,687,277,732,38,529,507,761,217,472,104,269,773,640,506,881,612,194,863,281,885,557,342,574,152,705,349,245,311,363,187,669,808,296,150,381,461,161,390,470,706,350,484,274,659,591,192,492,144,54,711,704,464,661,597,244,676,738,730,577,149,555,819,494,178,545,647,657,580,447,340,287,442,495,551,322,170,222,146,368,846,46,255,99,638,77,321,810,593,19,652,209,22,831,130,620,656,520,235,72,747,745,746,887,173,892,163,508,148,852,712,825,530,566,500,251,534,45,684,8,179,788,558,263,14,754,750,581,169,870,291,624,672,760,755,604,663,878,248,848,275,767,752,660,791,547,616,132,237,71,15,402,869,138,133,769,87,450,97,779,456,787,359,371,218,393,880,899,765,804,188,375,83,668,680,713,482,601,515,536,362,683,166,457,651,832,802,41,113,182,642,305,345,552,722,332,784,43,686,865,418,567,817,92,569,483,110,303,431,176,801,527,312,164,204,823,733,826,519,278,596,208,55,102,66,389,710,719,615,36,516,611,452,137,2,854,701,809,729,236,725,213,206,900,748,776,783,258,590,714,478,570,690,579,526,422,424,539,262,646,626,280,250,689,392,372,501,715,883,310,219,559,631,434,872,85,249,486,796,867,895,877,290,39,528,160,90,544,370,394,351,602,344,793,481,512,298,37,383,468,279,252,873,417,369,792,84,189,876,637,816,327,307,628,751,276,404,365,724,781,835,443,339,75,696,811,441,53,316,770,780,401,201,268,142,225,210,400,82,677,120,364,445,560,678,162,695,101,778,220,775,625,343,266,292,409,586,333,308,200,707,460,193,228,382,589,731,549,288,498,134,764,896,122,172,518,469,666,488,7,463,664,265,884,509,584,346,387,405,438,675,842,4,181,300,379,271,156,473,766,202,336,227,325,598,3,224,198,728,403,838,813,260,739,735,302,184,862,360,821,391,718,621,396,168,415,234,289,588,419,697,139,843,426,128,608,68,320,89,5,785,323,264,93,124,272,890,653,398,69,475,532,270,63,153,744,840,330,425,772,261,341,100,1,812,76,159,702,578,384,487,246,794,894,639,496,186,889,874,21,306,273,542,531,70,127,30,667,617,309,58,693,699,141,830,119,361,489,451,42,517,27,682,556,56,641,232,62,195,600,603,720,423,143,177,324,627,860,126,125,606,334,23,716,859,413,414,503,759,259,147,214,737,585,20,635,112,231,16,763,13,65,136,740,88,749,573,385,708,458,185,28,6,33,397,522,743,497,583,448,283,24,829,435,490,777,331,25,367,841,282,571,758,378,871,790,67,18,318,898,190,52,118,727,203,805,807,57,315,145,155,499,757,47,358,211,355,380,753,703,432,837,197,123,771,548,613,348,643,205,353,221,183,212,855,511,98,377,462,465,800,893,338,429,477,238,86,786,466,376,550,674,815,31,856,243,180,304,692,436,533,433,587,799,115,111,818,411,698,632,480,828,59,768,736,797,634,329,897,26,599,416,167,691,267,513,814,242,553,135,485,670,319,658,61,525,671,521,171,467,851,17,74,388,48,572,395,795,196,645,524,254,191,226,129,406,888,565,493,471,537,165,491,564,199,824,891,609,373,412,502 mlpack-2.2.5/src/mlpack/tests/data/rann_test_r_3_900.csv000066400000000000000000001003301315013601400227340ustar00rootroot000000000000000.637150933,0.126536213,0.823749724 0.637940649,0.951567917,0.397777975 0.344749598,0.517031469,0.48297473 0.296545224,0.419944602,0.99985146 0.707956343,0.985929306,0.942420811 0.24734852,0.001808114,0.489512545 0.29395388,0.751934338,0.924845167 0.306350765,0.676837884,0.614397758 0.387029257,0.668882644,0.316866608 0.166701475,0.220250465,0.70788096 0.666366134,0.343214579,0.063804166 0.970614577,0.514452347,0.643280872 0.084297811,0.906111319,0.590434434 0.163302217,0.226212634,0.074753132 0.20207705,0.197835179,0.217985026 0.413236066,0.640190511,0.520645448 0.807941459,0.463910044,0.996796367 0.208875906,0.182468954,0.876826443 0.743474185,0.840439019,0.143677671 0.266758693,0.103719005,0.920995789 0.030607849,0.973154392,0.814015083 0.237753714,0.374336732,0.44138091 0.83212984,0.547216604,0.371699647 0.302411666,0.58054099,0.303141758 0.949214871,0.783756542,0.457582838 0.32776739,0.133095384,0.351183944 0.673471065,0.432009028,0.761641303 0.120361022,0.494421101,0.954699616 0.049975694,0.857405242,0.133753572 0.314326245,0.599784238,0.698931698 0.10972582,0.994733888,0.603365409 0.246939825,0.79385323,0.576049373 0.420949269,0.55824091,0.684730016 0.085903635,0.678776288,0.759533545 0.221902971,0.606683148,0.183625782 0.934582003,0.263106456,0.195228637 0.276550653,0.563455012,0.477130256 0.939865401,0.683543172,0.98694541 0.090714119,0.155392084,0.183225576 0.546170002,0.226065658,0.757518873 0.635725491,0.259656977,0.803254796 0.768135532,0.329687113,0.784471673 0.67201594,0.69314804,0.216292884 0.731388623,0.632648812,0.298465113 0.030104188,0.531279626,0.68605789 0.404907965,0.617707054,0.646985633 0.38264213,0.522044947,0.606066308 0.850778503,0.771072538,0.780038925 0.333386945,0.62981651,0.838539888 0.144526261,0.90723358,0.672092924 0.803193149,0.545698586,0.740250704 0.144775421,0.073065649,0.81327723 0.800150626,0.077947117,0.498989131 0.805355858,0.282274855,0.111520406 0.432276345,0.614069782,0.04562788 0.119740317,0.122788948,0.68461108 0.188596378,0.666133286,0.753645204 0.143050522,0.393902986,0.609633117 0.754401856,0.84172035,0.387786256 0.97549575,0.97044364,0.621482928 0.735098473,0.96738673,0.239086021 0.825090649,0.153687653,0.520111132 0.720848546,0.211391499,0.513430909 0.572411742,0.56579983,0.313933048 0.766584951,0.704264072,0.103088529 0.933914925,0.70795791,0.378434849 0.232266382,0.864968616,0.664769493 0.180748316,0.792633394,0.983236654 0.320744207,0.073646797,0.915148464 0.058415284,0.478244018,0.171213592 0.613274471,0.423949271,0.899198164 0.83818587,0.622457639,0.496368891 0.547369341,0.5160996,0.318684775 0.489079348,0.504840066,0.174865371 0.133510366,0.873938618,0.95342181 0.355477984,0.610358907,0.32242224 0.32167355,0.132961802,0.381440702 0.660257981,0.59386003,0.570704079 0.519799486,0.220676336,0.85452965 0.097125446,0.037837774,0.581579153 0.801485909,0.741547848,0.06310355 0.413142247,0.303102946,0.30224609 0.07746447,0.555846316,0.750106689 0.593760097,0.256631753,0.179035377 0.819000445,0.86578977,0.797167379 0.644052663,0.148335877,0.377067692 0.02037784,0.835405997,0.192438566 0.248506314,0.951214215,0.492923258 0.387445752,0.862693509,0.11983047 0.411437123,0.512831692,0.516380652 0.481199694,0.970780992,0.565521666 0.967908564,0.168755985,0.447517833 0.280607962,0.670538365,0.548021587 0.402044213,0.121532495,0.136718448 0.83696286,0.739549154,0.495218329 0.652215616,0.664877651,0.838254198 0.846246408,0.411635906,0.96601722 0.359827733,0.627436225,0.666295882 0.522326573,0.496565812,0.404066784 0.614406114,0.160072022,0.269439305 0.221722954,0.558736063,0.890699947 0.561777087,0.782270647,0.792345194 0.385698506,0.295964873,0.697613223 0.101162968,0.27600378,0.239798872 0.075127486,0.14163579,0.728168103 0.982440842,0.583109151,0.395072917 0.69628067,0.26642599,0.283866713 0.073093261,0.914332418,0.925554624 0.01642578,0.927883834,0.248712685 0.11636724,0.556067816,0.248282085 0.487453151,0.058684617,0.294624957 0.813726551,0.860917181,0.678149491 0.492581545,0.501803813,0.193032429 0.642766795,0.422421802,0.950458987 0.662519175,0.950448071,0.157126432 0.548815699,0.127905654,0.23337741 0.159163516,0.345059322,0.586704542 0.40029112,0.928563882,0.954476476 0.587201396,0.44357769,0.797926632 0.026827624,0.206281621,0.680220462 0.884217164,0.266754666,0.652197582 0.475019281,0.447732834,0.14299077 0.193076354,0.317892868,0.976621856 0.515208981,0.512331237,0.422351595 0.336671812,0.870606258,0.364554196 0.438596677,0.333836845,0.801388791 0.194389409,0.929245672,0.589545825 0.205377525,0.079938747,0.187363423 0.426814991,0.823224852,0.707435026 0.262972959,0.517545732,0.19872636 0.720354434,0.847649202,0.709246477 0.355306192,0.303943053,0.835051265 0.949975427,0.106134411,0.204516092 0.106374426,0.874129261,0.971439223 0.14517828,0.371147898,0.695954142 0.739099753,0.331888701,0.890413781 0.627551297,0.9001009,0.177324543 0.047488938,0.224289129,0.220822902 0.912785118,0.79570392,0.838242455 0.49717293,0.703176431,0.754883589 0.090976094,0.502530756,0.657999889 0.194042479,0.284561692,0.14516165 0.409960603,0.285564554,0.097001811 0.310205693,0.003434942,0.173823303 0.233583043,0.118822434,0.816665508 0.513843271,0.539640669,0.864405207 0.40692643,0.436463418,0.369798489 0.126544008,0.159580886,0.933714485 0.286441339,0.872974675,0.04454198 0.964565622,0.910027403,0.897861798 0.203121728,0.899714292,0.085173744 0.445639841,0.360999337,0.016645134 0.307793993,0.117750087,0.562967352 0.281587526,0.174834541,0.274581395 0.119660773,0.099740072,0.484016211 0.511583585,0.54938211,0.339766424 0.188451695,0.073022292,0.006123739 0.346586219,0.49567248,0.234826476 0.225242461,0.587641331,0.725805817 0.075409614,0.208266437,0.826377328 0.204076002,0.04779427,0.040457828 0.050861901,0.763043363,0.256073344 0.972150662,0.792678045,0.909955027 0.506115605,0.031837525,0.903340416 0.804010111,0.955685921,0.175896939 0.092926989,0.062223289,0.821308211 0.363715968,0.726101463,0.79168981 0.787381816,0.338102828,0.005758252 0.484331698,0.495765424,0.891180155 0.241982415,0.277129738,0.561477087 0.484161267,0.286665154,0.03556541 0.211600046,0.304292614,0.395789513 0.372524976,0.202611617,0.166595985 0.265124748,0.017345601,0.037686194 0.701786714,0.420334817,0.714000487 0.034048463,0.651290563,0.050634716 0.802331316,0.558297752,0.291679579 0.344037056,0.467477672,0.358504649 0.639463582,0.425507582,0.954817361 0.602885138,0.374751922,0.374607167 0.993637385,0.955212301,0.16550343 0.955669008,0.745723993,0.889786752 0.365337348,0.19682491,0.506234866 0.7457291,0.51831627,0.978818087 0.92625289,0.631584997,0.443128894 0.786168714,0.264993195,0.263960382 0.316681591,0.61079768,0.089349247 0.858371024,0.834969763,0.174819213 0.525393487,0.243792169,0.955241627 0.720242053,0.143419208,0.402799979 0.749292304,0.217311863,0.799688479 0.246462289,0.958943724,0.142358796 0.528138907,0.590786754,0.948225902 0.454799161,0.510565688,0.295103038 0.953069085,0.021533141,0.116332423 0.188120341,0.690529852,0.623168048 0.318359731,0.758493036,0.91843922 0.726077549,0.902046947,0.327147423 0.386752461,0.338547997,0.651921958 0.707225745,0.584329479,0.37703596 0.060288975,0.494620757,0.075518168 0.237652566,0.962903992,0.824801251 0.535945075,0.958493881,0.754701994 0.064404553,0.235151293,0.39448081 0.979476468,0.347342952,0.99138709 0.189166661,0.798328607,0.697048046 0.180560013,0.342106481,0.174983336 0.28337819,0.962425666,0.955845318 0.593924663,0.66654314,0.570147835 0.114749593,0.903677338,0.957687266 0.151925114,0.716482401,0.637800283 0.235669594,0.580788646,0.528893286 0.778117587,0.250968708,0.684104646 0.747849981,0.214563448,0.02984775 0.720813243,0.066656345,0.737883757 0.626964368,0.953760147,0.459809098 0.469018562,0.720549931,0.518332767 0.821461664,0.507041049,0.514946331 0.384160041,0.953174654,0.443907617 0.233220889,0.511502601,0.369065624 0.434955659,0.150497671,0.76574469 0.8958592,0.481635774,0.942994014 0.979260732,0.445148596,0.323549157 0.334878174,0.403760723,0.385124629 0.460214884,0.33828675,0.592783427 0.518346254,0.909618383,0.6009723 0.338370801,0.317375424,0.337490389 0.636668843,0.96449714,0.481975016 0.025064304,0.923419227,0.119203699 0.048318449,0.53489191,0.76133984 0.491930784,0.016568024,0.112619998 0.17743988,0.903969674,0.481918653 0.981634317,0.513179093,0.316557669 0.02560158,0.930375993,0.563316641 0.017997936,0.890571459,0.4580491 0.96277821,0.443025655,0.083145161 0.419576578,0.112060055,0.531294103 0.494454706,0.954168063,0.047922651 0.800000835,0.673332473,0.064026809 0.870702162,0.510095577,0.863030178 0.851121904,0.916229763,0.781903614 0.159726434,0.082081261,0.19548317 0.362450326,0.788524336,0.826141196 0.270846003,0.098989879,0.574494436 0.406889772,0.838173717,0.436699777 0.035503139,0.853255007,0.642800341 0.083155666,0.952721164,0.708076056 0.847697478,0.56519776,0.894660498 0.037841045,0.984301359,0.365909559 0.177721428,0.418447797,0.157612683 0.429370039,0.508723836,0.767724035 0.071851749,0.216253471,0.819600825 0.578083664,0.212360494,0.627380646 0.380746754,0.954034946,0.11483721 0.211278539,0.560080096,0.685450354 0.770737322,0.813954563,0.79322567 0.318759117,0.06983,0.664250133 0.059856737,0.06677071,0.26622355 0.968241527,0.953861837,0.311894576 0.504226431,0.06220937,0.289105117 0.256406511,0.249902695,0.348997399 0.674888311,0.860374,0.605942473 0.246067727,0.048342783,0.343006159 0.830735494,0.783740344,0.677522751 0.99887952,0.341758368,0.229922444 0.731699282,0.940258743,0.10886285 0.541383735,0.910293019,0.381124662 0.750868727,0.848911762,0.265718422 0.425671591,0.626146239,0.622684142 0.214013066,0.091251581,0.864057899 0.545601885,0.310480085,0.046543211 0.517244356,0.115819763,0.248517895 0.872633121,0.50117097,0.12009094 0.255496857,0.472006579,0.796438566 0.468962035,0.26918685,0.131735945 0.742353904,0.528441793,0.565922864 0.85366711,0.2676075,0.914062206 0.447698287,0.149534939,0.670156644 0.445589481,0.6431063,0.225580433 0.357872915,0.788565726,0.814611643 0.580287142,0.506307991,0.527031912 0.500500265,0.365277722,0.04677688 0.141881394,0.926001483,0.86894952 0.221717771,0.366035312,0.125658418 0.600339909,0.684670388,0.826168927 0.307898392,0.20966968,0.752966481 0.959700077,0.899536378,0.491452813 0.230433688,0.613941888,0.415683508 0.495527265,0.634504412,0.370199526 0.506575734,0.986633413,0.84941237 0.761764339,0.963921599,0.828872018 0.348601654,0.087553061,0.791174897 0.104944192,0.102179531,0.905877926 0.375324247,0.246387607,0.301420991 0.875454272,0.118686164,0.988824311 0.17698346,0.393647261,0.159870783 0.917659703,0.583236755,0.630992101 0.285048123,0.469986869,0.37272766 0.011480822,0.597073945,0.904116141 0.313259229,0.510005423,0.894823085 0.795838324,0.911141124,0.928152818 0.164974957,0.359128099,0.60236716 0.983429159,0.003861397,0.083218217 0.242529745,0.562773547,0.664077813 0.765913188,0.194009625,0.286229668 0.070781352,0.102661854,0.249285398 0.511452125,0.418997177,0.284014634 0.439472205,0.891870259,0.82363463 0.580892549,0.466753672,0.140496383 0.615517449,0.738921356,0.461546367 0.824697707,0.698589656,0.941554339 0.46610398,0.902958283,0.688012984 0.523365471,0.691567649,0.547171487 0.545929937,0.714552317,0.041938604 0.32756288,0.701840615,0.927731162 0.761874356,0.276228477,0.886668834 0.979442228,0.298771691,0.591610911 0.374731022,0.860510449,0.321638525 0.8074911,0.097011746,0.930723417 0.453431338,0.206882669,0.431005917 0.910029309,0.03223923,0.493150704 0.2897017,0.170401689,0.739971322 0.024666309,0.777054677,0.769170439 0.05624039,0.089983601,0.64642539 0.149696037,0.539762835,0.702098143 0.676100319,0.000479419,0.639516981 0.967411256,0.893394783,0.958913773 0.158669993,0.527294695,0.347808355 0.181672491,0.532695548,0.988953142 0.053598946,0.497693858,0.118111495 0.132496571,0.985450674,0.753931807 0.87586561,0.732063591,0.884137731 0.419609591,0.012639269,0.645369169 0.102047486,0.008854525,0.658344391 0.123913855,0.210708056,0.499395878 0.159685659,0.968477268,0.586268979 0.834269522,0.369645239,0.245380904 0.637297781,0.768550638,0.48870442 0.778386961,0.376787501,0.03205647 0.67713794,0.632054697,0.000672655 0.860752189,0.140567399,0.326727043 0.220600271,0.039797462,0.871431738 0.373493897,0.910009286,0.043303147 0.269453424,0.571833998,0.346704152 0.919787568,0.373470212,0.873193468 0.776952353,0.362003265,0.172733797 0.575862615,0.900415576,0.599884308 0.616882997,0.8845633,0.13177173 0.366855251,0.729104299,0.950578149 0.668847681,0.753527405,0.660832331 0.264243456,0.308498641,0.912106098 0.542527865,0.880831766,0.535728949 0.460634645,0.013712653,0.152280892 0.828209711,0.921304334,0.049084108 0.874065663,0.473229025,0.545232499 0.731220357,0.126627169,0.996060848 0.943461868,0.033256065,0.992038738 0.211193798,0.522810965,0.907780013 0.767158364,0.967162642,0.888059793 0.689583275,0.841550923,0.377520241 0.147705388,0.959063909,0.031580823 0.654707489,0.752912445,0.305046055 0.628378168,0.075829853,0.719349441 0.886468112,0.185491156,0.719710557 0.749470564,0.448017109,0.897349202 0.492693185,0.884164268,0.633427171 0.44686733,0.7934547,0.773846432 0.630683325,0.776592453,0.708944434 0.814848973,0.845977344,0.643222219 0.016975156,0.729138989,0.058020996 0.511298247,0.07057554,0.733405098 0.784480806,0.738595698,0.373688534 0.530814843,0.44312087,0.691107945 0.944091316,0.957332961,0.639542386 0.771047017,0.811962024,0.977774991 0.87020688,0.755962661,0.925248114 0.458700988,0.334773333,0.095844508 0.533831151,0.912609619,0.027149015 0.524625598,0.652693277,0.497418106 0.805674264,0.723021478,0.80073208 0.113696528,0.650247192,0.344709776 0.826900827,0.593783006,0.550936366 0.940655423,0.740273144,0.630218018 0.141520315,0.632429144,0.838610834 0.39673597,0.503240828,0.590691376 0.037602886,0.040815285,0.620639119 0.716116291,0.506754028,0.253596249 0.619782298,7.76626E-05,0.676065593 0.496033457,0.98742451,0.984019601 0.649314148,0.147470427,0.489967654 0.691622038,0.161245902,0.647851723 0.936526892,0.590442875,0.939555093 0.604802621,0.838823011,0.251219058 0.071190531,0.67647138,0.597666328 0.019410183,0.495778133,0.44031324 0.726411874,0.262687025,0.086312948 0.830480537,0.135077568,0.079159787 0.950841893,0.769723105,0.47873095 0.611417896,0.84114966,0.395349789 0.181347141,0.287776713,0.883076078 0.200712222,0.873964629,0.571505353 0.65202277,0.084117342,0.250545655 0.342561024,0.202306216,0.079726003 0.584301932,0.122693153,0.129858724 0.591176502,0.051275102,0.876431468 0.165946295,0.474087103,0.856717365 0.839385948,0.763414504,0.961778868 0.528260865,0.865453126,0.680673095 0.076050301,0.71693581,0.15210816 0.780443967,0.33197709,0.73242445 0.363327494,0.164977224,0.185099911 0.687912867,0.396104619,0.249748592 0.88391393,0.554502064,0.089705278 0.33788714,0.686247878,0.252660937 0.19163616,0.441496434,0.513458703 0.478908993,0.15156254,0.818829745 0.918896553,0.899169945,0.780767514 0.782967436,0.327693122,0.755050753 0.32558364,0.492239506,0.12339517 0.047070459,0.693552034,0.508452959 0.109465204,0.821862145,0.632136838 0.826253828,0.610682399,0.632137891 0.162364171,0.5709024,0.027035072 0.479768494,0.607203769,0.077566143 0.897031412,0.795684932,0.974415558 0.801002173,0.551618649,0.876984199 0.123312093,0.411438516,0.901446561 0.594677287,0.32833558,0.914104796 0.741635419,0.14325589,0.115905361 0.08993896,0.243272135,0.742401503 0.116491314,0.690400792,0.020805328 0.180855336,0.599454312,0.340688071 0.087037755,0.006886539,0.952560809 0.300603611,0.113658264,0.797478049 0.832235841,0.05963984,0.771465426 0.095194013,0.247650851,0.801344581 0.300632189,0.150924198,0.086360387 0.874625368,0.700861247,0.713919826 0.863383564,0.57922769,0.870911826 0.11913471,0.767551415,0.50918181 0.556749667,0.691513618,0.782003681 0.197331319,0.827247513,0.779623914 0.987023902,0.734883462,0.623629089 0.420615082,0.614082171,0.741891207 0.312249031,0.014913149,0.070878868 0.974642188,0.983123549,0.086275706 0.783360774,0.814835668,0.67625897 0.540478752,0.254940938,0.449867885 0.048763621,0.290768213,0.625363258 0.697965851,0.033892112,0.612844092 0.724879255,0.708375839,0.525641059 0.747562377,0.173208535,0.263779612 0.867179342,0.213616814,0.754428508 0.02226162,0.326141353,0.081963664 0.627227744,0.116451144,0.409565408 0.543129433,0.092850944,0.54072763 0.281594806,0.709633472,0.876793176 0.35647452,0.063874296,0.965050871 0.045168661,0.497624359,0.186815072 0.524949861,0.944601324,0.332059785 0.126474627,0.02739514,0.246752374 0.208604998,0.568408651,0.772918262 0.125784169,0.514833609,0.514478954 0.154512957,0.373291441,0.993402025 0.233618131,0.572616698,0.016411005 0.999890963,0.570275565,0.216853317 0.486828361,0.379924401,0.696213866 0.075314427,0.667395497,0.863855433 0.86294927,0.812782874,0.997533964 0.031445186,0.249022328,0.973324576 0.326573891,0.118171329,0.965763005 0.332020059,0.604459411,0.538268842 0.706622108,0.694323961,0.209014536 0.932949763,0.08165582,0.356510191 0.75591714,0.880443277,0.240181713 0.227219665,0.515538046,0.063202431 0.069200681,0.150851636,0.361221939 0.902427408,0.646942656,0.504832272 0.262382978,0.180972368,0.403132445 0.032506623,0.656194,0.257345113 0.959652463,0.776117592,0.653289283 0.778669537,0.171816058,0.383820737 0.64856927,0.78342696,0.966231461 0.638608998,0.323023815,0.667259556 0.120265759,0.176019011,0.416173717 0.275065523,0.921190579,0.324061946 0.490137925,0.337844445,0.135339916 0.724097632,0.992269402,0.410123181 0.296958503,0.142356399,0.479483213 0.092381103,0.57773093,0.290898447 0.89183933,0.312149005,0.295126666 0.669251799,0.071453982,0.955861716 0.938378225,0.324238979,0.455589077 0.762236627,0.048617283,0.120655973 0.886194063,0.842136906,0.886167779 0.420448588,0.826040453,0.209811195 0.496120113,0.140244984,0.010275807 0.291770734,0.089337397,0.940136172 0.823744617,0.442752205,0.79506829 0.86635257,0.308919721,0.929313191 0.124187371,0.515507145,0.3952627 0.515643261,0.514493405,0.592216269 0.435577703,0.202265522,0.749380396 0.851215206,0.581140662,0.909262689 0.97276388,0.305964393,0.119556192 0.833642983,0.44267292,0.574065373 0.908658096,0.985442117,0.032891222 0.120536868,0.898167052,0.754847347 0.328480689,0.206500348,0.883388839 0.584233318,0.127164736,0.934356548 0.520904286,0.085542266,0.469645136 0.118804512,0.276694477,0.255706174 0.669152609,0.480169645,0.350044668 0.784599588,0.030844507,0.672270616 0.97462202,0.984822685,0.801402402 0.09061512,0.20599842,0.288943446 0.500630874,0.668012143,0.326107661 0.243946646,0.885842685,0.356343047 0.704519934,0.112411764,0.840776533 0.064722176,0.148130565,0.724221405 0.069998846,0.826917642,0.285248236 0.463142105,0.129132053,0.071693121 0.065672617,0.491471158,0.143248345 0.345719852,0.550477283,0.417188691 0.523811405,0.923188335,0.366706095 0.57113315,0.798590349,0.465646081 0.828359309,0.886833757,0.470994632 0.649200809,0.422037446,0.338970547 0.991959241,0.065292471,0.545926733 0.402707667,0.892315167,0.157737898 0.583371677,0.915247643,0.510882162 0.286752954,0.119216908,0.422178531 0.000574842,0.932477989,0.322762631 0.521100182,0.182516345,0.799539149 0.217552185,0.32460329,0.001286413 0.129263953,0.832799191,0.746800354 0.859133069,0.682500693,0.035727655 0.081296267,0.499283963,0.851895509 0.709384988,0.14985208,0.186521894 0.247922963,0.253358356,0.872326832 0.203028631,0.068652472,0.553487984 0.292370767,0.925595124,0.401383438 0.721522222,0.300176493,0.452098604 0.622021123,0.308001842,0.51395483 0.601298816,0.268135963,0.584441602 0.207949629,0.407128704,0.699430418 0.152216375,0.92660356,0.07049208 0.997031345,0.789488864,0.194662825 0.14170589,0.513011324,0.250918681 0.979853004,0.246273698,0.732371057 0.441466086,0.428787477,0.680856737 0.513859379,0.668402062,0.50429415 0.32103853,0.59436219,0.481843963 0.466004374,0.019901121,0.225087815 0.546731744,0.359957666,0.776590304 0.088133727,0.021028123,0.579299556 0.172044151,0.237278834,0.567876411 0.576325796,0.86256513,0.487980769 0.459957415,0.004052068,0.41344615 0.72021758,0.906208873,0.049850195 0.835505139,0.006504875,0.716129577 0.974913096,0.06350265,0.945758998 0.538076764,0.931252476,0.05429443 0.921879308,0.750002283,0.120075272 0.825790117,0.095295707,0.471769578 0.667512779,0.726667248,0.68041055 0.604774928,0.209313615,0.803678279 0.058678158,0.457882119,0.491090679 0.46503574,0.647148555,0.063745514 0.268569925,0.07151649,0.354414339 0.309997568,0.048651773,0.652050824 0.852057231,0.800064591,0.378993288 0.101844132,0.975250128,0.919521375 0.879950774,0.012524944,0.243977924 0.71298613,0.410784591,0.766666426 0.253953963,0.18863912,0.353408633 0.859540187,0.786140568,0.50468592 0.885165537,0.182373738,0.365436093 0.919226953,0.132590959,0.305319302 0.794222067,0.325843691,0.81503301 0.360472386,0.828503699,0.992751302 0.568328182,0.596642015,0.166689456 0.495797608,0.390533497,0.466894225 0.497383703,0.057721092,0.136501948 0.18770586,0.924785691,0.325442341 0.693138587,0.351786889,0.499636742 0.898980429,0.759285754,0.006488642 0.203362481,0.362873482,0.576750046 0.178651329,0.720602676,0.881219809 0.176525065,0.325805008,0.029694687 0.280908733,0.527522643,0.545345238 0.370750152,0.138599939,0.044930538 0.675097184,0.14761356,0.378589866 0.735023127,0.793326142,0.751658301 0.589712544,0.569527756,0.006401988 0.528971516,0.297342992,0.454367414 0.691477287,0.799565463,0.424110191 0.261622015,0.848996059,0.848455301 0.401014342,0.684428894,0.631646442 0.16646465,0.252704215,0.907185556 0.100875707,0.566947803,0.906685851 0.434813596,0.104021401,0.167032575 0.525475323,0.508926771,0.950312938 0.159164103,0.298161029,0.813651341 0.688364345,0.371765734,0.533450516 0.712069354,0.849924822,0.351626269 0.322500041,0.141195673,0.954104724 0.146595062,0.93264431,0.190821916 0.71991816,0.904994255,0.945180752 0.025505056,0.369278227,0.225567491 0.450884297,0.163076541,0.835655337 0.666130325,0.52707414,0.82767262 0.747584223,0.050899988,0.253442115 0.525074918,0.930938393,0.27765909 0.940041036,0.129750051,0.169526547 0.976328221,0.406056506,0.156213454 0.413206486,0.217043404,0.425652131 0.108491931,0.963192763,0.498477601 0.958709036,0.585116585,0.507265441 0.048428848,0.713725414,0.728970388 0.587791364,0.896305822,0.279922122 0.086686919,0.740059232,0.914875869 0.422027713,0.086096483,0.419750985 0.767716034,0.871663257,0.103971292 0.549835043,0.371430165,0.801009346 0.557408598,0.341725364,0.279171927 0.071240148,0.765613908,0.173767574 0.713230298,0.779720404,0.253165546 0.572322236,0.663937254,0.045664107 0.428432377,0.161070991,0.891029544 0.818292324,0.971164957,0.271696059 0.269446053,0.962766931,0.051526478 0.515277086,0.74833971,0.351491465 0.796419252,0.556278732,0.361314209 0.801556269,0.987424165,0.117197305 0.782772261,0.05866778,0.982749779 0.21806961,0.609256862,0.798461899 0.699205142,0.038761394,0.271238908 0.534754129,0.27476979,0.163606178 0.003518131,0.437675965,0.388250875 0.619198012,0.090710318,0.566559914 0.178576562,0.885793567,0.022734794 0.578539981,0.281190469,0.008260142 0.177713211,0.393560621,0.052236228 0.846158221,0.357695748,0.875170299 0.127568308,0.638314871,0.946658268 0.767138325,0.621405933,0.564104167 0.798451074,0.40443786,0.599831193 0.616223487,0.665752297,0.971012789 0.267441096,0.388352985,0.430687937 0.923867358,0.654582643,0.464037122 0.492137227,0.706258913,0.378247168 0.536642887,0.555595419,0.104998227 0.992969717,0.688862613,0.896407883 0.454975157,0.851727744,0.144297419 0.317976254,0.620102227,0.416793119 0.440632343,0.535615753,0.913356284 0.791010869,0.962116708,0.627040144 0.926826073,0.382456611,0.465806072 0.568904993,0.514101455,0.724489494 0.895517901,0.391005356,0.347893715 0.289875186,0.830981849,0.92116788 0.95185048,0.996829271,0.970163256 0.079055453,0.999386589,0.528208258 0.926932102,0.147799896,0.417138668 0.244651465,0.832349744,0.221104338 0.179560876,0.149581841,0.97827318 0.869778794,0.116050413,0.930858226 0.681347988,0.700100934,0.003010153 0.688804753,0.087819887,0.217246073 0.054919581,0.536206628,0.011960678 0.640496257,0.193125181,0.654595034 0.879605152,0.152112809,0.50946439 0.336877078,0.352944356,0.032651908 0.578287892,0.410740871,0.424981809 0.655610763,0.370342392,0.021605292 0.184746216,0.078627828,0.615262076 0.335250916,0.744164606,0.7834867 0.086006226,0.796624922,0.100735176 0.278674471,0.483655368,0.117132599 0.994681992,0.915583798,0.682419845 0.077364925,0.488968443,0.762836001 0.460939585,0.226843633,0.262301782 0.998409563,0.464398025,0.918229672 0.221191504,0.605272697,0.236818579 0.305532514,0.107986913,0.285771959 0.429457882,0.021852143,0.417044654 0.4398254,0.904405397,0.587007492 0.472361927,0.615492219,0.311474339 0.4847793,0.830454499,0.692963217 0.525054945,0.760690911,0.176296268 0.117729529,0.425190139,0.763022992 0.435815483,0.901034288,0.68353143 0.310722347,0.711502874,0.050054312 0.692557474,0.756865138,0.823601442 0.748561397,0.302607431,0.404056776 0.370478834,0.749199053,0.220199408 0.686929375,0.172808164,0.22046762 0.037511035,0.299597568,0.543432459 0.513900441,0.892613907,0.740051648 0.389543522,0.806516669,0.891439062 0.053758187,0.367104684,0.356060944 0.450039969,0.18662041,0.022226949 0.481122219,0.376490604,0.455652341 0.97009151,0.252002631,0.121449418 0.322174741,0.359645571,0.785282495 0.904310053,0.730301338,0.994210513 0.450101531,0.92830086,0.086584177 0.456948101,0.90305291,0.216589856 0.430158828,0.574385535,0.812451667 0.958800913,0.229029132,0.004822368 0.641856333,0.757170989,0.097059421 0.442276634,0.278413528,0.877655305 0.036927777,0.425286999,0.92305997 0.996003678,0.902465847,0.265142606 0.306340939,0.260744837,0.528606261 0.098272048,0.162476078,0.354882218 0.658054373,0.890822429,0.9000076 0.087284546,0.695167739,0.026293663 0.667310433,0.902843368,0.248946207 0.451887926,0.995052067,0.181712955 0.721298527,0.006611482,0.727102995 0.180137144,0.38951174,0.678305837 0.420761331,0.419860176,0.010656383 0.788488075,0.180473318,0.708019695 0.662265015,0.757397169,0.348937464 0.22732873,0.663301685,0.39923678 0.716892599,0.552981067,0.089832495 0.177215605,0.465175647,0.887666589 0.4010009,0.597937203,0.09497585 0.259096154,0.591668012,0.145793124 0.7855796,0.541345166,0.383678057 0.201753532,0.613603748,0.879697044 0.825321851,0.452349759,0.192581377 0.171266337,0.782789247,0.848185787 0.989170718,0.575391852,0.643933271 0.224216552,0.128615538,0.261286445 0.355440689,0.629457955,0.902600249 0.72784327,0.282293864,0.605943451 0.210467186,0.748327916,0.269725684 0.703080367,0.411052005,0.029450281 0.611720264,0.653108765,0.115754888 0.625714261,0.426502244,0.253625516 0.080879639,0.231561531,0.000776511 0.580765049,0.214103901,0.655333535 0.411287343,0.079075761,0.794277642 0.710073858,0.646863988,0.71074505 0.335569397,0.900645276,0.683474835 0.967747154,0.579773932,0.534024604 0.766717973,0.582199309,0.533102234 0.383468743,0.426721157,0.027251934 0.490400205,0.117276739,0.92366954 0.526437331,0.70107653,0.671085752 0.889392656,0.764668251,0.594183178 0.638642815,0.578480214,0.97861599 0.87668719,0.16462794,0.216101311 0.42672965,0.578827138,0.263549989 0.811170473,0.093966938,0.225951223 0.099089206,0.263591386,0.882393744 0.38399777,0.327948679,0.494541301 0.183583616,0.008025085,0.345896483 0.584960878,0.5469813,0.968535684 0.361345034,0.854037953,0.527327995 0.984905322,0.997741532,0.876521812 0.074758264,0.39928899,0.847634791 0.78330323,0.392062416,0.024783838 0.467728166,0.712167022,0.024533141 0.587280899,0.398576247,0.573112113 0.964829971,0.025982741,0.969019811 0.9497508,0.659436309,0.204878206 0.657359903,0.347373583,0.193308068 0.186434557,0.521059421,0.070439079 0.870109867,0.062761012,0.710077454 0.217962469,0.288311322,0.190708548 0.955539243,0.022311215,0.71590241 0.625665814,0.76136552,0.988044588 0.597252746,0.710748192,0.314068902 0.516054372,0.327282916,0.54307302 0.271367679,0.738701611,0.304169987 0.933804469,0.580994455,0.210076964 0.127919156,0.599299518,0.585857959 0.676065679,0.558987708,0.958866142 0.316141871,0.460898294,0.141769324 0.471335921,0.089770919,0.358606362 0.623875078,0.120949677,0.031070096 0.279561054,0.756633154,0.523821594 0.367638452,0.041473293,0.205100917 0.194748444,0.554149226,0.891998106 0.41189445,0.060780804,0.739908884 0.463521747,0.175865472,0.535693142 0.945971006,0.966028962,0.856940254 0.183047078,0.337562524,0.181769865 0.594627884,0.198176957,0.150059332 0.843270928,0.530723522,0.928016742 0.223830394,0.396224789,0.671524797 0.660767374,0.651553136,0.816830801 0.435601302,0.067504838,0.286367496 0.118647364,0.597413606,0.736034901 0.130876628,0.718657894,0.132667782 0.512036173,0.807939768,0.573980493 0.651567779,0.146952948,0.239972065 0.288725439,0.224872447,0.043641949 0.13707238,0.381109232,0.022199238 0.754226814,0.167426623,0.961971718 0.951586322,0.053557001,0.223348551 0.618926676,0.885546611,0.123622882 0.790423531,0.278666859,0.501354777 0.038612914,0.868235102,0.288826116 0.488859959,0.478054033,0.700027159 0.862804894,0.011591559,0.750381881 0.994070885,0.954113216,0.968886216 0.452966461,0.985185262,0.402556559 0.163204173,0.188199516,0.352205827 0.15850908,0.505182571,0.583169832 0.135779826,0.409087768,0.238200196 0.643385144,0.86154063,0.14538336 0.50233965,0.544662955,0.992305772 0.208435385,0.031950832,0.061424365 0.866478253,0.391456921,0.511463088 0.4937369,0.216683838,0.68183869 0.635277683,0.264963125,0.828569956 0.57036797,0.199089208,0.947261901 0.622849636,0.554898686,0.300444481 0.148150252,0.793195105,0.95852649 0.118643776,0.375521816,0.127817104 0.758672306,0.928120507,0.147843091 0.988902496,0.305378105,0.027460368 0.101391422,0.187140233,0.666743757 0.742622491,0.913697728,0.538923383 0.093250323,0.083342814,0.253041857 0.769590781,0.9991462,0.438612548 0.729371479,0.304770086,0.732577389 0.309854988,0.231328158,0.907015378 0.357043464,0.291981607,0.210471606 0.310867898,0.310831132,0.021305479 0.099716251,0.743995352,0.892636908 0.41508308,0.015438634,0.257251295 0.53442204,0.552940574,0.911759333 0.066875817,0.519643391,0.683239895 0.960228558,0.637860456,0.564663828 0.166667197,0.282113595,0.909573438 0.400063729,0.629753113,0.314970443 0.708945745,0.167807931,0.868195558 0.371947838,0.749772529,0.913374887 0.364252703,0.719347038,0.968988396 0.565947998,0.47317603,0.848594323 0.963005103,0.86347636,0.213376655 0.010974265,0.115488107,0.918644935 0.579274525,0.748172658,0.195517101 0.054742886,0.089561473,0.35514667 0.352904397,0.177453817,0.485671073 0.86540568,0.455589491,0.325840682 0.826269285,0.742045207,0.836774969 0.075485913,0.446267336,0.134777488 0.123130773,0.10695964,0.319080831 0.353341713,0.250920125,0.94582804 0.934151416,0.641155987,0.332526901 0.183094596,0.975798892,0.512697523 0.931523642,0.525759501,0.067066893 0.171012136,0.581683693,0.603794825 0.489763176,0.561915728,0.886623062 0.427818728,0.227974683,0.462025302 0.059325421,0.726266371,0.692412984 0.770271664,0.743519141,0.117959307 0.107862896,0.552555172,0.592259145 0.445007388,0.046308389,0.69499137 0.056486616,0.370154602,0.498507879 0.347798483,0.541312622,0.44955603 0.01637411,0.777726654,0.346640124 0.918778501,0.247274577,0.931656904 0.468325578,0.552066653,0.233304727 0.558842714,0.30110019,0.237582706 0.520406065,0.396600845,0.627623904 0.42717615,0.55961213,0.312743984 0.043819454,0.060632818,0.168267929 0.151405047,0.276450913,0.385322692 0.864539894,0.203199707,0.865006307 0.866179018,0.649792248,0.369625823 0.566181508,0.155001949,0.751738414 0.022193506,0.262524266,0.378478591 0.835870282,0.436869514,0.439857307 0.54507765,0.825712044,0.425012638 0.180124959,0.284189803,0.059324375 0.91303517,0.659662103,0.021990781 0.068890512,0.857174742,0.245915138 0.146299591,0.2282098,0.992357695 0.279495766,0.087424865,0.532747766 0.095737503,0.107245868,0.190786801 0.276947216,0.537071712,0.654100689 0.010738646,0.40673838,0.479608479 0.420307684,0.947352567,0.178277524 0.108124774,0.127227634,0.278086371 0.18958629,0.587262704,0.69187928 0.814773727,0.220263054,0.007250506 0.948149379,0.572617808,0.939774741 0.150492895,0.970045889,0.979230909 0.997567108,0.897085006,0.573132383 0.039773611,0.517659257,0.317936584 0.915778891,0.598912752,0.541405962 0.081857212,0.994515385,0.261260636 mlpack-2.2.5/src/mlpack/tests/data/sensing_A.csv000066400000000000000000076205671315013601400215440ustar00rootroot000000000000005.996066833600036637e-03,2.948650785979744539e-03,3.788800910132437017e-03,4.921071157989882414e-03,-2.693465608665803444e-03,-4.162095609388970913e-03,-7.209597220015458857e-03,1.014701382373444554e-03,-5.890077612520789804e-03,3.795394208886201525e-04,-1.885618994974598301e-03,-3.474746663028901658e-03,-2.619087170026621232e-03,1.328890782051072791e-02,7.071999280858070345e-04,-7.323645009119992243e-03,-5.913378290174412660e-03,-1.359892024449595171e-02,-7.538812284959883585e-03,-8.703009318631776611e-03,3.976359515656229145e-03,-1.381365025578573614e-03,-2.020311906802838443e-03,-1.118874279334840368e-03,-9.974915659414979648e-04,-4.721231908953726734e-03,4.788437472462962622e-04,-1.021308992484299674e-03,-4.502650096824593884e-03,-3.897481536164214321e-04,-2.039690604913181431e-04,-2.448833284556879882e-03,1.981762389661763553e-03,-2.186352329355944341e-03,-5.527922893061440121e-03,4.144908017469654295e-03,3.881484770082874788e-03,6.657796819141310884e-03,9.475732530837241083e-03,-3.618145838036525286e-03,7.595472153951625567e-03,1.902851369279208232e-03,4.613195902181173140e-03,1.616053214501949730e-03,8.302770437342938889e-03,-8.191225463142642188e-03,3.069644553592619941e-03,-2.319671430390632134e-03,7.200141300573237486e-03,2.175987428952357462e-03,1.191932418862692855e-02,1.449339378230731033e-03,-1.571301359132675846e-03,1.755274313452949821e-03,1.055820664165149809e-02,-2.628079018786732765e-03,7.087826880002296055e-03,-5.854238146581868495e-03,-1.161658881653817454e-04,8.685320561706953366e-03,7.350080922114483016e-03,-2.297143425626823037e-03,1.678052304500719316e-03,-1.118667615135183297e-03,-1.601878016031434582e-04,-4.042377399466999185e-03,-1.035761574952635301e-02,-8.113549999708612182e-03,6.466305185710037075e-03,-2.717016493592109509e-03,5.845797646592407887e-03,2.289659765630868912e-03,-1.878502495230789963e-03,8.237264402045960110e-03,2.075959306200392620e-04,-1.427620372601259846e-03,5.669691433304770026e-04,4.722043957035896265e-03,1.460006114407912201e-03,-4.780243046149441032e-03,-3.991700401255375216e-03,-1.033277253892474386e-02,8.925894670182493024e-03,3.609323552606581373e-03,-1.951128327042518174e-03,5.829582081568611060e-03,6.136969751993644586e-04,1.437924337048736073e-03,-6.817993805428170613e-04,-1.709517474089754458e-03,-7.331825828809425969e-04,7.602520242974763337e-03,4.858568808300113648e-04,-3.215876084732027275e-04,-1.353046559171247956e-02,1.149487947603421677e-02,5.245779600566170472e-03,6.626259312331024554e-03,2.014602534346635401e-04,4.879609716477248220e-03,-5.328446857354971443e-03,5.942752864093946499e-03,-2.190164583134875109e-03,6.141840642406247962e-03,-1.148071485425325444e-03,2.776832939190747220e-03,-4.742676790178805557e-03,-2.956817606354652725e-03,4.726835815675594320e-03,3.124919326209728808e-03,2.134400293864198873e-03,1.194137822690704252e-03,-7.355302635715967041e-04,6.766241964732201339e-03,-4.002440068571151699e-03,-5.553005626923992934e-04,7.085460087860197438e-03,-1.887912595493212520e-03,1.262362892797063438e-03,5.243152713900646120e-03,-3.082961686443703910e-03,2.363709249107888693e-03,1.036936677426065871e-02,1.202632062899888559e-03,-5.215747564800913248e-03,2.402596443817421514e-03,7.216576405052382848e-03,9.442334922804961062e-03,1.015132686040120212e-02,-6.395021727697223525e-03,-4.428306621409398673e-03,5.016603936523839069e-03,-4.532770666468411910e-03,-5.067234624634871058e-03,6.881908538165859915e-03,3.174754374613730345e-03,-1.143117775587849938e-03,-5.566117136699646160e-03,-1.227969901304192327e-02,-1.255041401304289421e-03,4.651191574312083833e-03,2.547878491027211895e-03,2.052558425354000186e-03,1.308730226698827034e-02,3.063173039051139256e-03,-5.640881001094198016e-03,-1.397397339252754020e-03,-1.512074446177453244e-03,-3.088225488416671688e-03,-2.248909266491735254e-03,-5.277683158050021882e-03,6.745143414627778611e-03,-4.725736851381861191e-03,1.651126433585899328e-03,-3.092141727558565551e-03,9.546890134439494843e-03,-4.053706138889586049e-04,-2.577394516021269981e-04,6.874656815378949945e-03,-3.809283355727751189e-03,1.632065201609316082e-03,-6.394517494057607278e-04,1.984030127161252356e-03,-4.947265247661784479e-04,9.831949397520663506e-03,-4.409712498040114885e-03,-4.242242322886859845e-03,-5.992127090471556332e-03,2.112628136100002003e-03,-3.167267288992255993e-03,-1.652746962184792638e-03,1.557856407969216014e-04,2.410564997589218286e-03,-1.094189110019926428e-02,6.915181187650810100e-04,-2.133566014047679187e-03,-2.017472144790414222e-03,5.626024497162480649e-05,4.391519975107442686e-03,-1.179600184993450512e-02,1.000910053262789616e-03,8.003654136558976603e-04,1.478143672208253592e-03,6.435025554990398940e-04,3.126253284044610160e-03,7.377781250389361944e-03,2.748559666776599457e-03,-1.530917588787010161e-03,5.646650497368814990e-03,-2.825568992887462442e-03,1.009337691919258949e-02,-2.274673297652194184e-03,-9.464701117453833829e-03,3.765270257069448009e-03,-8.551926747520812561e-05,1.321841201517393299e-03,4.346929125918727946e-03,-4.792338193969498603e-03,-2.747856527976848783e-03,3.590237043969527484e-04,-6.802229162346831205e-03,4.244125878746591619e-03,6.239771920656889993e-03,-6.084242217542195751e-04,-4.663348370894809976e-03,-9.248198455944437596e-04,4.960031487827212381e-04,8.746209538178496140e-03,-2.400137264855488339e-03,5.527109815307815720e-03,2.050697891405028114e-03,3.976784404879717127e-03,1.887897510642893998e-03,-3.480669320885108886e-03,2.560664010051034913e-03,2.213827152839298752e-03,1.109450092650742480e-02,5.691854520604376980e-03,-1.316262829374604485e-03,-7.005741703835371287e-03,3.268092438015582357e-03,4.020824873732248977e-03,3.474579461029434456e-03,6.473866817621283173e-03,9.374024496706158741e-03,-1.153818761429965811e-02,-6.804839503959120121e-03,3.783750303048446801e-03,2.719752126634340361e-03,-2.818109936530684666e-03,3.981470000118729653e-03,-9.212783347864659492e-05,3.356341736746142057e-03,-2.527715493836831494e-03,1.691802799869685740e-03,-6.353001091927176422e-03,-5.470503045660047615e-03,1.362446810847683064e-03,4.843382893128807265e-03,-1.859570236267686682e-03,-5.174290896181683343e-03,8.719010344385328543e-03,6.175038709167698404e-03,2.694953766087702322e-04,-2.898563110307098793e-03,1.339292396532103767e-03,1.913372851378667642e-03,-2.517601248303841417e-03,9.138918013423424913e-03,-6.146029639704122909e-04,-1.371124471242751838e-04,-7.071220617502578173e-03,1.295902790724778658e-03,-5.558087431477503433e-04,-4.533156097979097916e-03,3.049969940504502759e-04,3.275359618185833075e-03,8.927612205484904834e-03,-2.687675581996455849e-03,2.462844919520965070e-03,1.890974323265190568e-03,7.358196933122965189e-03,1.201849813774707836e-04,1.561543101997764924e-03,7.231569891908200265e-03,6.136192823718018656e-03,1.156166945888317636e-02,2.493742302420919217e-03,5.720467096817614602e-03,4.924203339892665489e-03,8.331724871064913329e-03,-5.007759726332199762e-03,-3.318137548208288635e-04,1.506979330171685219e-03,5.570019841524925103e-03,-2.759517234796511186e-03,-2.727902295334305987e-03,-3.113534093557971533e-03,-4.684019066909870209e-03,-6.534243638858575325e-04,-6.149013956191788990e-03,-1.719975670309443257e-03,8.491360285388603255e-03,-2.127067921741769996e-03,-1.381260930471474649e-03,-3.800653409095451282e-03,3.423087767845202356e-03,-2.126318898456739867e-03,4.883220412576629726e-03,1.445488633849450859e-02,-1.208028114579750515e-03,-9.179408153419138950e-04,1.507454700728951078e-03,-2.683026709023751525e-04,5.998327069513705034e-03,-3.137345965720412209e-03,5.788724799076395630e-03,4.839295524860022166e-03,1.021073528638100688e-03,5.219992762145104527e-03,2.910676881272935765e-03,2.696129642979702267e-03,-6.429463484461075268e-03,-1.495728815849510333e-04,3.575933493507907606e-03,-2.262888792938125108e-03,-2.902506786787655436e-03,4.621964058129697343e-03,-3.638750935100217802e-03,-1.078938034677539475e-03,3.946576640256776666e-03,-1.309506145403960091e-02,7.527135868660988528e-03,3.819462847316583247e-03,6.559378647993713632e-03,4.976634192628802411e-03,-3.349873114402211402e-03,-1.880664949253169820e-04,-1.817961212419854158e-03,-1.196234034127522695e-02,1.161064602585891838e-03,5.524447172888666095e-03,-6.900316232403193133e-03,-8.661561001350450914e-03,1.137967541328140271e-02,-3.316950893858439022e-03,-9.926698846976346555e-04,-8.792498947219310954e-03,1.295304376489782479e-02,3.048252177121921831e-04,5.229691691366313228e-04,-3.320978369942691102e-03,-2.062929501325346058e-03,6.255990410068883699e-03,-1.140324937004879788e-03,7.309393477647103694e-04,1.987036273338302697e-03,3.252593102028957027e-04,2.179032213352487170e-03,3.522074797586911051e-03,1.970779078106133734e-03,1.441170421875944199e-03,4.138654031326276066e-03,2.010407073888480483e-03,-9.472325014226993037e-03,8.236748506489786209e-03,-5.303641689231792730e-03,-8.414184344271593771e-03,9.505627332307986250e-04,-4.160064070116112345e-03,-1.042959819033617563e-02,-1.331348347494823708e-03,-7.187998888861115918e-03,-7.106182934953381158e-03,-7.409289609393218505e-03,-6.486540002020155585e-03,1.304979523473945476e-02,-7.085994047221257260e-03,2.923125762728215598e-03,1.004017791390665777e-02,-1.060182440675451650e-02,-3.662818434305494756e-03,1.408914388137078510e-03,-1.615893067197859230e-04,-3.197168739639871175e-04,1.400997428904405010e-03,8.328496166569871947e-03,1.529367120923905447e-03,-2.051903788090136873e-03,-3.247085151482188587e-03,1.696437930602433513e-03,7.481890909549556053e-03,-6.150852254986427861e-03,-1.267491325554214159e-03,-1.327758979157875557e-04,-2.935668584184531697e-03,3.964656054654751641e-03,-6.407740543284882670e-03,7.146557477260255184e-03,-1.966363889400618039e-04,-2.493108775070692913e-03,-7.116932832658896986e-04,-4.093920916588845646e-03,-4.804269389334812554e-03,6.795608968388642569e-03,-4.432919408530323728e-03,-1.300609945436936518e-03,1.412697646755046088e-03,2.512520218989864620e-04,3.887547970963629255e-04,-2.452818605134742067e-03,-2.919132861101190850e-03,-7.084005795256175138e-03,-4.501639044777215777e-03,3.748899792565527403e-04,-5.593223911058878978e-03,2.825907750582469340e-03,1.146060238628798323e-03,-1.372021533256891079e-03,1.349138535538373573e-03 4.189644351836629456e-03,-5.146801510485208252e-03,-4.463938356852372932e-03,-6.649552112919978469e-03,-3.402075344429458977e-03,5.705090371383121639e-03,2.439808086017855506e-03,-5.671346179795699882e-03,-1.583404000029571676e-03,3.360557957695817308e-03,-2.064246206546503475e-03,3.735842537316205053e-03,4.991023748873837755e-03,2.451446431577560068e-03,-2.013714304285655048e-04,-6.155207462351819803e-04,-2.588289591501009759e-03,2.373902091136435855e-03,-7.859646358925527834e-03,-5.433522994907904850e-03,-2.044618877963517642e-03,-3.511650199144295079e-03,2.029111707262901566e-03,-9.775996172902199841e-03,6.482338196303523599e-03,-5.865734187875225195e-04,9.272641901073401527e-04,-1.204751899510726060e-02,4.279816886962654887e-03,1.817708171946943627e-04,-5.954450354009091159e-03,8.948534403695222392e-03,6.954261643971025586e-04,-3.101493266331270711e-03,7.254264928336507344e-03,3.810266250364215666e-04,-5.571035884724830078e-03,-1.915913797968367372e-03,4.005331491902441518e-03,-1.540806876093956702e-02,2.604028120957726459e-03,-3.269542404276380015e-03,1.981799845523797479e-03,4.125605853373695564e-03,2.415011604142417410e-03,6.084045190367874840e-03,3.568022005741689580e-04,-3.655870863605897489e-04,-4.450654371026026811e-03,4.555393127140592788e-03,-4.737166368576822828e-03,2.580261594791596544e-03,-6.634567853386543791e-03,-4.007796745140586488e-03,2.819955002675118231e-03,2.889167763016974901e-03,-7.166102428078698506e-04,-2.968913880571266619e-03,-4.046130358081637471e-03,2.229741130289553813e-03,1.221472913768174981e-03,1.383304901925643243e-03,6.374618266985284006e-03,-5.514647154417312395e-03,1.835597587613196175e-03,-5.646839733477979593e-03,-8.323015272489525315e-03,4.378715196826856067e-03,7.250723508235826405e-04,9.068876931758913101e-03,1.616164742334893220e-03,-1.145039283949204033e-02,-1.353184289345102029e-03,6.614969952016528615e-06,6.426454167024336916e-03,7.919847900028005941e-03,-4.424415519099037311e-03,-5.527343102705832220e-03,2.573628605635308510e-03,4.281111474720238584e-03,-6.525771172893565439e-03,5.365960867926900836e-03,1.871440026123174193e-03,-3.382684324280381578e-03,-4.657115828413853874e-03,4.615932669270552172e-04,-7.198314846468179332e-03,-2.283306997726489563e-04,-8.632640918770159363e-03,5.546212143538671417e-03,2.732699125166758360e-03,9.217685182162834931e-03,4.831177610531610378e-03,2.550676966246777019e-03,-8.255953751841038982e-03,-1.834800605081642923e-03,-2.416879783326028857e-03,-2.603923993321077151e-04,-1.403049329525847110e-04,-8.917672234117872410e-04,-5.058702730643082485e-03,-8.454214538288552447e-03,9.384671308041826017e-04,-4.366730542594381842e-03,-7.403169115169270197e-03,6.778319116601777376e-03,5.236425549591663259e-03,-6.326530216879185965e-03,2.571723309655660639e-03,2.454103146155118837e-03,-1.068671281262682446e-02,5.343960102465073782e-03,9.980315836952068434e-03,7.938550378628813145e-03,6.326293440764926487e-03,-5.694204881572483001e-03,-2.476016765711152191e-03,7.145564207403492032e-03,-1.569879416367479449e-02,-9.929298164051549719e-03,-8.007406328194736911e-03,6.097265348727664951e-03,-4.797956432861517385e-03,8.066023790683306662e-04,-4.068206057670766158e-03,-1.034679808750459812e-02,-6.488521337463830883e-03,6.400771370756241224e-03,-1.419253983722787785e-03,-5.294151050303889938e-03,6.417215872408944984e-03,-3.250553740032121296e-03,-4.644087178435951646e-03,-6.479602837661798537e-03,-2.294434666843944357e-03,1.029483557875874623e-02,5.483784946677862197e-03,4.376972494576065673e-03,5.693193954940782738e-03,-2.154528558525032819e-03,-9.909696134739588366e-03,9.046567937366491868e-03,-2.782316387013940599e-04,5.325016391559685841e-03,2.400496566819998933e-04,-1.085048692730548534e-02,-2.101907541855079279e-04,-4.657306690110103557e-04,5.232314877689742127e-03,9.777163688005411496e-03,1.158041159215398306e-03,-3.496271789671306983e-04,4.280528160825940696e-03,-3.095185299624778844e-03,-3.472180231036145026e-04,7.060570478629329700e-04,-1.933693020006808493e-04,7.971260707714415477e-04,9.300342990205824958e-03,3.225955516423820342e-03,-5.827888145772182338e-03,5.266446536422985172e-03,5.186434468901672014e-03,-5.139987575247349262e-03,-3.580632595588362308e-03,-6.964776587076112330e-03,2.948520222285958784e-03,-4.851749550061526528e-03,-4.794801718341638354e-03,-3.203324209293228596e-03,1.231335828392351745e-03,5.772597415184067653e-03,8.418645263129378303e-03,-1.518687669336562639e-03,7.804168799350374086e-04,2.165078562341746053e-03,1.095173440695151011e-03,3.692384216397524843e-03,8.164792054543379071e-03,1.267072492051655661e-03,-9.185767599716467523e-04,-3.497729859911444771e-03,-5.028695546211718777e-03,2.241967750739767319e-03,-2.999117183714312355e-03,-9.311074891690439281e-03,-2.170824247907107620e-03,1.007535610353451141e-02,4.403373513884157806e-03,1.423778757150301333e-03,5.090728002162202802e-03,-1.530268656885854298e-03,-4.100623125033670338e-03,2.379877563630141118e-03,-6.013753904392475994e-03,-3.587815555018368345e-03,1.108521054912879958e-02,-1.390083189776438995e-03,5.710006348072218726e-04,4.376965452036108220e-03,2.015991072183128362e-03,6.469694525774605069e-03,6.265364287046985836e-03,-8.479981838823173398e-03,-2.885362717474956873e-03,2.493768201946120790e-03,1.144588894521286971e-04,4.864501037299865478e-04,-5.465992258277105678e-03,4.150938494374568043e-03,-3.711142011908761043e-04,6.305238192090196515e-03,3.203108601365901604e-03,-4.964136034424012321e-03,8.281218559757740072e-03,-4.841263836208102057e-03,3.496677405668886405e-04,6.523054157062120606e-03,3.201937953156871325e-03,1.297725156671898986e-02,-3.184163121636540087e-03,1.828077244625252564e-03,-1.381736230975131544e-04,-2.311932531907141212e-03,1.496142340508197811e-03,-5.223422244550452939e-03,-7.296410831218699041e-04,4.411461910461634238e-03,-7.205228413021198992e-03,-1.311230893484406808e-02,2.063963315918079875e-04,-3.127031894375820100e-03,3.610136337811407220e-03,-5.222455881388570614e-03,-1.401876692153474326e-03,-6.253524730083467528e-03,4.723876514121122527e-03,-3.905656833750823561e-03,-6.938614637053286925e-03,1.123418398733469145e-02,-3.860007004383290106e-03,-6.405698822300091334e-03,-2.630822742083050933e-03,-1.240023801073196963e-03,1.317644988483912755e-03,-2.198277597965971226e-03,-6.511969349882746304e-03,-4.543333815936093957e-03,-6.238595491595075185e-04,-5.156275374030933056e-03,2.099096666639451163e-04,1.524705881427794582e-03,4.857843573389335722e-03,-4.019912916748527403e-03,4.817599281394415248e-03,-7.727313928027720186e-03,4.243855207425247271e-04,9.351709626197926561e-03,1.087417646688581922e-02,9.552274312511717380e-03,8.648127492736803860e-04,8.702087124123304360e-03,-6.599557322718872278e-03,-7.120397209271548539e-03,-1.063064045060925977e-04,4.693108892541332164e-03,5.792379762037140264e-04,7.041832262657081454e-03,2.181585586670123280e-03,-1.087525267920258498e-02,-4.646194117059217871e-04,7.010287427021142433e-04,1.753678341797368183e-04,6.505780305228739055e-03,9.307619710658889152e-05,3.360145828380921729e-03,3.809900254020350940e-03,4.489266897043354965e-03,-7.491897643217879967e-03,4.609206262725644432e-03,-5.154695081518841961e-03,7.416383005171770480e-03,-3.129816838152824597e-03,-5.011267727817443271e-03,-6.245804332024024576e-04,-4.310972671230121553e-03,6.553888170106117461e-03,-4.856094404818125237e-03,-5.648713241143336720e-04,1.875335910385510577e-03,-1.420788410753320548e-03,-1.622071239821804229e-03,6.703148483716214572e-03,-9.320527094756568803e-04,6.206724915491381540e-04,-3.682730947843587850e-03,7.826053942630910309e-03,1.439149983293627755e-03,-1.952405064794425623e-03,-3.856290001981608085e-03,-2.726905926334104429e-03,-1.225096018052623803e-03,-9.951646953840602178e-04,-3.455131099257191182e-03,1.536004557149437148e-03,2.584711666678119347e-03,6.058402565142152751e-03,-4.353542133164211062e-04,-6.005402397953431889e-04,-5.795709449034886114e-03,-7.972016622709958422e-03,-2.799110763151875080e-03,1.349718482308307410e-03,-4.738083661498685394e-03,5.089984428384811653e-03,-5.138358864540195931e-04,-2.644520923731599922e-03,-1.370963542832240312e-03,-4.255460647793565831e-04,6.551231432194951944e-03,4.437613100087498268e-04,-3.420551150068227783e-03,-5.242339780634271574e-04,-8.928531464768352244e-03,7.049686667973159744e-03,-1.109592410134461159e-02,-4.691395280305364103e-03,-2.634468080468886815e-03,7.110690216797364155e-03,6.913739016570865682e-03,3.693818695868688814e-03,4.038507190015163534e-03,9.271604483933458974e-04,-2.222105672976814293e-03,-2.140254754571026610e-03,2.847297428080158496e-03,3.499447661406255867e-03,-9.390195750534843528e-04,-2.853779577107075100e-03,5.674809781367497891e-03,-5.054369660868613702e-03,-4.023408826672248903e-03,2.307199761593222310e-03,-5.356362075167040489e-04,7.811620608864781531e-03,-3.024546168197190469e-03,5.149413409124793325e-03,-8.374737881276158105e-03,3.579177194455295842e-03,-3.089219003580245294e-03,-2.249407035799537956e-03,9.252318127879211387e-03,9.848014450197693781e-03,9.995231590941610136e-03,-6.782378418055519387e-03,-1.024188030269141455e-03,-5.003535769219840529e-03,1.062409342454985646e-03,-8.023043833490879823e-05,5.364402588133896721e-04,4.429899160525821965e-03,2.723461066039145877e-03,2.572938806112804854e-03,-2.793999410495957240e-03,4.068468049206176024e-03,1.026661732426461908e-02,-3.835456437306985003e-03,5.628222027448512212e-04,-3.553399853960910720e-03,2.386856366652072986e-03,-3.070503293735387176e-03,-1.218943322942107887e-02,4.488164377088952293e-03,3.552460934176459185e-03,-8.080912229471666269e-04,-9.064462145781601327e-04,-4.208024661090696496e-05,2.946036028054613721e-03,-1.468513109615633694e-03,-1.087074984783562362e-02,6.340069774352511192e-03,-6.914609310587760752e-03,3.394413437445324092e-03,-9.202417171716437139e-03,-3.239962961072342246e-03,7.301716732199108344e-05,-5.499604052513495239e-03,-6.602340525211970214e-03,1.383931677762115231e-03,4.474318626869483971e-03,-7.613125122923748538e-03,-1.741883083018197899e-04,-1.821391865146222678e-04,-1.865978295869430426e-03,-8.035857065944621653e-05,-5.322397136055401733e-03,5.813223590346102906e-03,-4.147435361414359908e-03,2.722881189794789606e-03,5.087630805839290365e-03 2.720422429009505989e-03,-5.381136841215718950e-04,3.353086461364228853e-03,-7.405454158266729266e-03,-5.157301431744004466e-05,1.358878953847484933e-03,1.029985312780104956e-03,1.702510635872533408e-03,5.540790739870717508e-04,-5.007518061847452372e-03,-7.266016056482829406e-04,1.401911492399933803e-03,3.260562454012565226e-03,5.761799429096547341e-05,-1.644977017574793341e-03,6.917015525490110943e-03,-1.129848674943159225e-03,-1.937310928284287576e-03,-8.319726531413088391e-03,2.761046211723226698e-03,-5.382027412524978752e-03,8.014556142430597635e-03,-3.605969578737187181e-03,2.659247068854728776e-03,3.981678227738767728e-03,-1.018381299532275779e-03,-2.948682512110767308e-04,-4.724166673475775277e-03,2.241512036603896321e-03,2.110102576782727900e-03,3.898530141282983887e-03,5.750109695411683743e-03,-1.154842876320229299e-04,2.556738693195555016e-03,-6.893575329498161488e-04,-8.386478817462610785e-03,-2.655509848044078879e-03,2.133983168896795742e-03,1.437052359936043288e-03,7.490187577389295810e-03,-3.924856708004418118e-03,3.035459338322560405e-03,1.082803194773479527e-02,-2.556412051233764287e-03,2.134735731319340607e-03,-5.225688362688211096e-03,-3.191160605341900047e-04,-1.822725304655575373e-03,-3.010787018431957903e-04,-4.775839289044846690e-03,1.864874932490695209e-03,1.179865491115327110e-04,-6.586571932291411871e-03,-4.225968432373920607e-03,1.192102314264900350e-03,2.481859138046803911e-03,-1.746949403427858422e-03,-2.427165445839543027e-03,5.400064959018244584e-03,-1.117547862373812319e-03,2.236519546247622836e-03,-9.119520230803394509e-04,-9.252472340809505050e-04,2.168085940316240388e-03,-2.365374768619098872e-03,6.242192844725549632e-04,8.917792439689894209e-04,5.413946755963989534e-03,-1.433701931677209065e-02,7.684357872295161616e-03,1.572766580684465103e-03,-1.426345920452891944e-03,-1.674728384287613560e-03,-1.822877126794740163e-03,1.978622747563605830e-03,-6.157231302308410065e-03,4.093620282585197501e-03,-6.171217512872582620e-03,9.037465300875235959e-06,3.992756682903997323e-04,2.213253352608086054e-03,-4.041017658232507609e-03,-3.311609993915300837e-03,7.523196574736377800e-05,-5.970454532653766619e-03,-6.607634571202352161e-03,-2.022439578023292866e-03,3.007644314357931902e-03,-6.444392110860655901e-03,-4.307220130711185424e-03,7.078165926824878974e-03,6.066879426562724308e-05,-4.380409408832529501e-03,6.729335541691407359e-03,-8.905326272179315411e-03,1.104693851068449346e-03,-5.074282330036959839e-03,-5.900211127326362819e-03,-8.747737672420688795e-03,-6.211712204226157996e-03,4.256343626776285492e-03,6.636293453445579619e-04,7.931809276881451529e-03,-2.071914409311946970e-03,-3.646987309272599256e-03,-7.325502555884667519e-03,-5.905559336232078099e-03,4.050782177398823229e-03,1.094298994000139419e-03,-2.116242332138610835e-03,1.613869498726030289e-03,1.503003542861526542e-02,-2.386228147429341959e-03,8.657549747209216870e-03,2.197489187193262580e-03,9.612096490074792041e-03,-3.351754122567526199e-03,-7.977380480743393940e-03,6.395867560340988411e-03,1.622722626796401510e-03,-5.744671273640127723e-03,6.431352402085691554e-03,2.900744133127648901e-03,8.530338089988796085e-03,3.888098333635510700e-03,2.807514788862623331e-03,1.313698634473263937e-03,7.386948565826644171e-04,-5.992087668483263643e-03,5.071319512538562287e-03,-1.212964842594954282e-03,-1.377769590274714566e-03,3.220191359505455639e-03,-3.480422615593975911e-03,-5.666923495950275023e-05,-4.592904273136705177e-03,1.725039792359432450e-03,-6.750454470240649721e-03,5.257276856350708840e-04,-2.243958997406538761e-03,9.472269826900138814e-03,1.284815730642396327e-03,4.066028117294525832e-03,5.628698084624934352e-03,-1.971581119709866613e-03,1.314027383918532459e-02,2.002624891683196622e-03,7.727234541930034194e-03,-5.755584145713216020e-03,2.999726908584626241e-03,-5.557479014295496796e-03,-4.310343301398361572e-04,-1.962081910673444275e-03,2.438108054014712870e-03,1.453381959710875557e-03,-1.596230146730358681e-03,-5.745442824187736199e-03,2.730624748613150275e-03,-9.251693048616381823e-03,-4.906036543579358512e-03,-4.904621636023539025e-03,-3.391065039401343481e-04,-1.156490333887829746e-03,-6.528918283145024662e-03,-2.015400047448268495e-03,-4.564768196846863760e-03,4.896551366469274053e-03,1.432977615375580563e-03,-2.141332708028171813e-03,-5.360183884864761460e-03,4.257214976414612842e-03,-3.171383733736569607e-03,9.809913815427029685e-03,-2.744929279221025760e-03,-1.437568237465610722e-03,-1.529850532059949184e-03,-5.159993492262739123e-03,1.066567302269791481e-03,-1.645400908765221957e-05,1.164198516377396714e-02,-5.652173704715113441e-04,-7.352484178857356285e-03,2.886730050932173521e-03,5.085363584882858154e-03,-4.540707142838358058e-03,8.534197953641620843e-03,3.258397141733185200e-03,-7.060598116416686582e-03,-9.096449141499721169e-03,-4.896691311560574973e-03,2.506620156650941119e-03,-5.200285940730645963e-03,4.656733560499549799e-04,5.226092179121360863e-04,-2.043955929851399515e-03,3.630844198715504813e-03,5.443962664832888501e-03,-3.457819491216102730e-04,3.729452177277611281e-03,-3.770156078142470994e-03,-2.929401825925925188e-03,5.369667470022800038e-03,-5.601298010660104090e-03,2.019123684108586891e-03,-1.449287773715306508e-04,4.409129182700470551e-03,1.373951188596211339e-03,-3.150108387700984694e-04,8.024379950741022255e-03,5.177921733240211863e-03,-7.377329174567190742e-03,3.547388573226966605e-03,6.719774689987151155e-03,-6.525448416283682537e-03,-8.952957292751972940e-03,-1.640780973805126266e-03,-4.519817922351217730e-03,-2.500429679249432935e-03,-1.511309120847594695e-03,-5.113710282922909536e-03,-4.265385876423532499e-04,-4.424293067619860381e-03,-1.135437054718834403e-02,5.613381492162645460e-03,-6.621544240740846569e-03,8.419595511858260842e-03,1.098154891219352801e-02,-8.846632194614973549e-03,-1.331750719266482086e-02,1.932361718084531351e-03,7.672148507362718840e-03,-5.659419406075748282e-03,5.512866014033681282e-03,3.270462702810356375e-03,2.339348953206196041e-03,4.639340395078987364e-03,8.732867124675747651e-04,-5.223567475645106775e-03,-1.083897447239916961e-02,-4.577253816001174494e-03,6.980361645431507750e-03,-4.086896044339997079e-04,-3.828966826022355170e-03,-3.426817280770975342e-04,-1.658239852652803811e-02,3.945816147670019869e-03,-1.185230329257010162e-02,-1.394692626848133624e-03,2.341448772444467211e-03,2.609658805692194761e-03,-1.540360195468544295e-03,8.243539249870875758e-04,3.623338347563690321e-03,3.954506977885240658e-03,-3.403697742155169584e-03,6.650886838667089719e-03,7.176637857927218450e-03,2.512222715565652674e-04,5.116050559083722702e-04,-1.572490781270141852e-03,-1.167375552802211352e-03,3.553805249020172263e-03,-1.703331972153854365e-03,1.198570661670001272e-03,4.872229988113931445e-03,2.460165341910072138e-03,9.192594194517599623e-04,-6.149221631296200065e-03,-9.162870562301647995e-03,1.324970806610541131e-03,-2.252240898166565832e-03,2.214323866288387191e-03,-5.180538607935526296e-04,2.581135247454940246e-03,2.302217520495042314e-04,-9.104601226535566708e-03,3.917231500308935091e-03,7.908528394976104892e-05,4.718035513134481718e-03,-1.944308244052069196e-03,2.769345104992396167e-03,2.602939661417855242e-03,-7.011058606891959130e-03,1.081121406928757609e-03,-2.890667533534271427e-03,8.273992868449182988e-04,6.372616162780729218e-03,3.645446206295739275e-03,4.008279750678262480e-05,6.465577121741104306e-03,-5.956566135617204043e-03,6.312627287238625588e-03,2.441917852501375961e-03,9.094227374080447041e-03,-5.376289637837134046e-03,3.930620847565154466e-03,8.509799640996172990e-03,-6.369348371746364965e-03,-1.065985615241901024e-02,-4.197182426147323848e-03,6.687301157384598072e-03,-5.743245381330341990e-04,-1.938778081164218084e-03,7.902254077966609780e-03,-5.548400119214738115e-03,-1.102890391413676461e-03,7.249433108224642578e-04,6.943070212790105819e-04,-1.185737631478770608e-03,-1.231553212321209734e-02,-2.282836616048284327e-03,5.673404326035402616e-03,-1.238090232032127737e-03,-5.776255380848352959e-03,-2.113997831467115483e-03,-1.217041832579386596e-03,-3.886045929567831861e-03,4.829941927866419037e-03,-5.672793192647363966e-03,-2.887467330853255715e-03,-3.930208703067904381e-03,-7.688487891607312646e-04,-8.098261894929219967e-03,-7.405445607482221727e-03,-5.343253131236132270e-03,2.920392328138609554e-03,3.472452872223627529e-03,5.172727950391062084e-03,-2.633908247313304944e-04,5.684883479394944698e-03,-4.290155537361044040e-03,7.353425418444221352e-03,3.442760876468956444e-03,-7.188599003646520096e-03,-5.462757584911456452e-03,9.384915059708397265e-04,3.847794641955020259e-03,3.366794903500656879e-03,-1.947611841213784745e-03,-2.264200158770048391e-03,-1.082655378599844259e-03,-8.942707890962693640e-03,1.467598556456068803e-03,-5.835194823411135427e-03,-6.390781823568816118e-04,2.595334029534040500e-03,-3.592207183955895694e-03,8.041082744184575685e-04,-4.345056721306054734e-03,5.619931461046999074e-03,-4.022232321003637451e-03,-2.136621738008751773e-03,7.405214673409076138e-03,-5.347184760724051945e-03,-9.138085356147716137e-03,3.177879511605374022e-05,-1.741820988328285575e-03,2.089837582536893494e-03,-3.460804480625598398e-03,1.255861064962180643e-03,1.376921472580005211e-03,4.095885134324712441e-03,-1.972385900310891053e-03,7.678138683875483953e-04,-2.815631264550889305e-03,-7.081366824969010168e-03,4.712521681300729272e-03,-9.395645261128350925e-03,3.526382668338426916e-03,1.442729014199013455e-04,-3.215558215324250055e-03,3.404334798275398220e-03,-1.796776965357798836e-03,4.627929074701588041e-03,-3.480408985973419168e-04,-6.098631028816824118e-03,1.435878193404987145e-02,-2.570065078526770439e-03,9.621865629821663851e-03,-9.119339515820502158e-04,-9.617637349975682176e-03,-6.596015505324633044e-03,-5.140872994038142921e-03,2.999675543902493181e-03,-3.545100604798018772e-03,1.134112174180030045e-03,-3.172648337166048254e-03,-2.724908976059269277e-03,-4.102386883927468947e-03,-1.630955838520039116e-03,-1.073749200931201596e-02,6.791569553057084409e-03,-5.489481447307267663e-03,8.582843268174742985e-04,2.101332656484840473e-03,-9.877352250378468659e-03,1.772133615566771122e-03,-3.740528756036242634e-03,1.321454484631927155e-03,-3.689129612410182654e-03 -5.970791126964495807e-03,-3.624193450259292036e-03,3.368305624191498225e-03,3.531724192447386994e-03,8.492819856146933922e-04,1.479222026347622523e-03,1.065028059863775897e-02,-7.151706953621241730e-03,4.702466175629302532e-03,2.368951585636344757e-03,1.710437617997728494e-03,6.192914581075728217e-03,8.750632920960524364e-03,-9.881172130069958842e-05,6.001987582535814802e-04,8.789826809733836338e-04,3.988453088540102644e-03,1.050951351066459162e-03,4.304871413578935033e-03,1.979595231671739365e-04,-5.882114811605542240e-03,-1.440163032245314004e-03,-2.616400859664111660e-03,-1.374146052647170449e-04,-4.954711612701812561e-03,3.940174095976070037e-04,4.131942812833453750e-03,2.585256572005626644e-03,-2.047200468588597933e-03,1.040145318682451870e-03,1.560614866490255513e-03,8.375409972752840410e-03,-5.800243253990310186e-04,-7.283934288541477542e-04,-9.564753297816206649e-05,-7.083703982341085000e-04,1.664505344454643779e-03,1.781539594963410088e-03,-1.550344854550153020e-03,3.271693903359617142e-03,-4.150180167936311400e-03,2.711661980785485030e-04,1.666258333675255934e-03,-4.534236428057737342e-03,-1.789236067366578066e-03,-3.030277904931417184e-03,-1.018270350734226323e-03,1.699079907521220106e-03,-4.079196596979800873e-03,-5.874409369026998770e-03,-6.255962156878359590e-03,-2.837951448820365437e-03,-9.198745746594396908e-03,1.283799364701099233e-02,-9.549550212075900643e-03,-4.775189009359877453e-03,3.936129913056109469e-04,2.620642086568810130e-03,-5.037384546070922640e-03,4.167445770871091443e-03,-5.389534270410057705e-03,-6.364774231710421809e-03,3.554554628907340402e-03,-7.671810431586250506e-03,-1.453698940839694793e-03,-6.988913516316405672e-03,-3.164828430807442843e-03,-5.151855209817832702e-03,6.893775419675780662e-03,-2.549632142916859363e-03,5.915234878730207856e-04,7.016648598930291109e-03,1.395341683622616227e-03,4.245160592191176731e-03,1.036628675373320145e-03,-3.435611445918702010e-03,-9.083221677511076368e-04,-8.054447839870153628e-03,-6.138451263086530489e-03,-5.637632964506843159e-04,2.340322289088030572e-03,-4.219945599415664793e-03,-3.458993050035324458e-03,-3.965564068613102594e-03,8.606084860230424610e-04,-1.544117298253673998e-04,3.889291164067876488e-03,5.562358596249364221e-04,-7.572061486699075729e-03,6.370812113288730830e-03,-3.764887365943451938e-03,1.088468333183461277e-02,-4.052355426672249504e-04,-1.238681862400326829e-02,-6.095998411698852135e-04,-4.103362934744549280e-03,-1.800355698035439682e-04,-5.662052611031249787e-03,1.737024294476792550e-03,1.105866071058551675e-03,-5.986779668131146127e-03,-1.026370125411034916e-02,6.859432141565342290e-03,-1.882805759391950129e-03,7.701609305939003183e-03,-6.492677873740970718e-03,-1.945913312825915632e-04,-4.911304162919162553e-03,-2.524837165744084951e-03,-1.710607409744669457e-03,2.222182595732348932e-03,1.198631401804811647e-02,8.853631061107176655e-04,5.351679725621289428e-03,2.810796784627990139e-03,3.693457630618147362e-03,1.705906889991083957e-03,1.623045951980774636e-03,-3.616580894344111237e-03,-8.191379382201002118e-03,-1.385667745290998770e-03,-7.903225684870134396e-03,2.761166356143500769e-03,1.172921400349264754e-03,2.551805886577758996e-03,-5.287413681390125235e-03,-6.409659615775077906e-04,3.264018312937113763e-03,3.276352143100804380e-03,-2.808250547637524720e-03,-2.359078674231707053e-03,-5.006859597747629828e-04,5.301051816539597422e-03,4.046728309498894699e-03,8.359908623558495733e-03,2.762042446765955537e-03,4.713251516132244481e-03,2.172984687907264772e-03,-2.634766513467003048e-03,-1.669647983739011923e-03,-3.204667730967054699e-03,-9.420075342585985598e-03,-6.590367722314189117e-03,-1.080427313548287934e-03,6.151356514757956064e-03,-4.278337598363951598e-04,-3.023995262452481864e-03,6.436805776107881391e-03,2.990513321130340834e-03,-5.360377208933741339e-03,3.582393967924353854e-04,1.124355941618596262e-02,3.737853061760308421e-03,3.415425698865208904e-03,4.934080305123694082e-03,-3.563369728329760476e-03,5.872627423542395655e-04,5.553525532814245110e-03,-4.646212240540385612e-03,3.424127422794669400e-03,4.903148994527737475e-03,5.235561739418235529e-03,-2.056786048221676769e-03,-4.795187190091915533e-03,-5.712560381040379144e-03,-1.133284856181763708e-02,-6.817122143697781217e-03,-4.010761663887672974e-03,1.059092344132930462e-03,-7.450836916873143992e-03,1.047220983904541356e-03,-1.945841094551230581e-03,1.760192342448983717e-03,8.474504171396763860e-03,-6.331718135534713078e-04,-7.372907274762145052e-03,3.291020359321384248e-03,5.707451443080431390e-03,-5.721277478801630378e-04,-3.229624017009052970e-03,-4.715667927270086011e-03,-2.960919605807681588e-03,7.310757565424076676e-03,-3.337003502124145926e-03,6.955021019976134061e-03,3.299028806079969990e-03,-8.363688669393047267e-03,-4.606539076385154051e-03,-9.135633834244162241e-03,-3.147246110021365527e-03,5.452250033886918944e-03,7.712422837656329527e-04,3.588551514936201906e-03,-1.397816913821431362e-03,-3.353084652744188227e-03,-1.375405095621360773e-03,6.966176421768023423e-03,1.012996942287719936e-02,2.623420076550598331e-03,4.971245448882473730e-03,-8.423412981803131546e-04,-5.815942384380145061e-04,2.206858179736853783e-03,-9.939176859082419005e-03,7.604814090649558711e-03,8.387814847827548131e-03,3.681505693383648088e-03,-7.036029264609682324e-03,-2.014969043678530112e-03,-6.697373885335049584e-03,-6.590157159123134940e-03,5.878019431139532014e-03,6.163401886324334014e-04,-3.514654197835793623e-03,-1.717898538123696986e-02,-4.798862653512810800e-03,6.532590782272449520e-03,-8.056749133730130037e-04,-5.007429025975030160e-03,-9.991160702661478279e-03,-3.820908583981418374e-03,7.502478890507958964e-03,-2.002173630279114377e-03,-3.420196321559568382e-03,6.887354679818207199e-04,3.533647628132223755e-03,-7.063273071356915289e-04,-5.751550727425335484e-03,8.291187163897716789e-03,2.442958441595032091e-03,3.456208682053506231e-03,4.282216134117253908e-03,-8.093372874448790874e-03,-9.259353926648458601e-04,6.567314394951442955e-04,-2.433102610907639067e-03,3.551259283090717721e-03,1.862345560371989756e-03,-3.360701349373910237e-03,2.368110892633421265e-03,-4.634527514247336819e-03,-2.155441724289375131e-03,7.013220337142808160e-03,7.054576858705104321e-03,7.100525973091632555e-04,1.817542605934928379e-04,4.886867615875179448e-03,1.886131955501152901e-04,1.675377396021225174e-03,-6.541154851150308633e-04,-1.226530171594549749e-03,2.364109783925176164e-03,-1.251623217523422987e-03,8.884622687918467638e-04,6.836268401304967067e-03,9.683096463826139800e-04,3.148743038939424890e-03,3.615582348162841003e-03,1.503002202736954205e-03,-6.598510187862527757e-03,-5.651869020956135602e-03,-3.901626103262699659e-03,5.779958983902743104e-03,-8.374373932947693985e-03,7.567459928581408285e-04,-4.552333864984536133e-04,-3.566874016281800820e-03,-1.581938426763304130e-03,5.803459714274891365e-04,1.201356677439862448e-03,-6.977168440511659006e-03,5.619877124256943829e-03,-1.058258710153450223e-03,1.539445980250630022e-02,6.976741780618344983e-03,6.006205266048786871e-03,2.984605219156504126e-03,5.211784586945937721e-03,2.503919840770390658e-03,1.152744473503753602e-03,9.162775837196726614e-03,3.053440817345393337e-03,3.094596425852491096e-03,4.236317830046355294e-03,-7.561422686325671577e-03,-5.873723524136365351e-03,-2.884754058094577116e-04,-1.160162892047812006e-02,3.787496749584404101e-03,-5.506935093478119688e-04,5.901230370151963264e-03,-1.928236179668269901e-03,-1.043266784755680736e-02,1.672675048654511179e-03,3.401596157549965915e-03,6.817910198096678760e-03,-6.426499914386406449e-03,-5.463305925348094054e-03,5.440317131419475198e-03,2.278028075396667726e-03,-9.879284247837716391e-04,-7.177220021872365678e-03,-3.691313090137711417e-04,-7.974640977955782181e-03,-3.910148212562034389e-03,-2.002086173657640657e-04,-5.636192408209990921e-03,1.053912659088686787e-03,-7.839436350653703964e-03,6.064438401891576345e-04,-5.877573397968257826e-03,5.538184913629682141e-03,5.121438027807891698e-04,-7.327662032248213192e-03,-7.371564235660870313e-04,-1.626144823313642462e-03,-3.012303535677978133e-03,8.483527990543594452e-03,-1.444362355787037235e-03,-8.784602860911346414e-03,-2.987245441174003063e-03,-1.852238322753827979e-03,-3.310112750184429804e-03,-9.400238218133997840e-04,6.028106241842315360e-03,1.704873468469279407e-03,2.786248631537320119e-03,1.043527647018962536e-02,-7.186867593938487851e-05,4.469899561001570987e-03,3.545449713065839011e-03,1.363298893995598616e-04,-6.159732354492028841e-03,4.900345161190268126e-03,5.169388782138054431e-03,5.085605617008750502e-03,-7.756408993497493701e-04,-2.930068351866099707e-03,1.674868584447118886e-03,5.104710160537457610e-03,-3.421565004536236947e-03,-1.702782047077112514e-03,4.408691158013391606e-03,3.779571510925938075e-03,7.397576892424007414e-03,3.413874251978063018e-05,-2.161788607426510123e-03,1.148404789692954696e-03,-4.122305468155650250e-04,1.009558250982323276e-02,-5.662750523699823339e-03,-4.664291710085309225e-03,6.319984854765811795e-04,3.427453747673901967e-03,9.935102910399354337e-03,-1.718884851609479500e-03,2.563973325751196190e-03,4.291844469611076109e-03,-8.814499729570237502e-03,-8.147372254058163343e-03,-9.088355282916685957e-03,-6.466396248473884820e-03,3.265169879003054863e-03,6.272815016385061872e-03,-9.128551552478891135e-03,3.082465890030615125e-03,4.513891752706129974e-03,-4.301483143057359404e-03,-1.997967923519964294e-03,-6.486159749649582232e-04,9.563928616954264121e-04,1.572544870263692428e-03,-2.425054629614206095e-03,1.815526305455643450e-03,-5.941436273995538524e-03,-2.411877893974959802e-03,-4.996847385367964543e-03,1.319128332179613167e-03,2.303168168355917717e-03,-5.901766874166631573e-03,-7.017036402481665767e-04,-4.168824971272752991e-03,-3.918440557168156261e-04,4.226742376330532187e-03,-5.621333173907819628e-03,-3.646111886932648042e-03,2.118996611278693189e-03,9.023347022043994373e-03,-6.857604277675436247e-03,1.183775320314148661e-03,-7.822094651805304097e-03,6.895340898198800729e-03,4.661817663087449055e-03,5.732572790271323222e-03,4.292570438694249223e-03,-2.262587863917378973e-03,6.736042727474893937e-03,-2.704792889515374686e-03,-4.619011314915333517e-03,1.922160634869462571e-03 2.155183227416570570e-03,2.064573824538749013e-03,3.699821190184130002e-03,4.889055174438979544e-03,-1.318231952077121551e-05,7.141635428739316963e-03,-6.864692111146109173e-03,-8.076500645335005397e-04,1.085907370645311856e-03,2.472211167328389951e-04,-7.852651906503440443e-03,5.738592201641416063e-03,-4.547118772550646018e-03,-2.313169202483686404e-03,-1.953843375379164197e-03,2.868903273225059452e-04,2.733340497102270707e-03,-1.160840479769020307e-03,3.263041737851699281e-04,-7.150807682917243932e-04,-1.098075522795611235e-02,-2.222246796817183619e-03,2.053545878729991919e-03,-4.562336405235183252e-03,8.346383645000188863e-04,-9.260294479973250892e-04,-4.128336728161803235e-03,-8.374017332484554738e-03,8.227130710402994276e-03,-4.191857973158311210e-04,8.780615146942582095e-05,-2.025846216083093242e-03,-2.895790040841939722e-03,-4.761060878651887478e-03,1.126205356363241158e-04,5.829325614482611280e-04,-2.290338610318760210e-03,4.338566028427362324e-03,-7.828183029980162293e-04,-1.831980764762315738e-03,1.466082855750831966e-04,2.188904037961451887e-03,4.311695898023811500e-03,5.379239455806668520e-03,1.085092946910033848e-03,2.786336991589753384e-03,-7.212828340128465444e-03,-5.659155510097108564e-03,-6.038475715713293362e-05,5.126531315516053532e-03,-3.295199417911304912e-03,1.606458750686834153e-03,2.844088625497903731e-05,3.056696423717093769e-03,-4.521053060820057028e-03,1.266531590828751108e-02,8.838172953722852471e-03,-3.937129367110791568e-03,2.516502047421070431e-03,7.122536663402492287e-03,2.800175545042847894e-03,-6.105216983032168802e-03,7.212180706475308269e-03,1.374885098015931695e-03,-1.078290617591781946e-03,-5.137697097605879205e-03,-1.496032395068681549e-03,6.508724773678038538e-05,-1.940953972492744355e-03,1.266439229044590215e-03,-1.554885452898831493e-03,-6.861448648908878047e-03,3.100468293779917815e-03,2.953154586473718049e-03,-1.568836551059808564e-03,-2.618505438063461511e-03,3.264073099012396195e-04,-2.089416527002110110e-03,1.067463740164196475e-03,7.490957894989143037e-03,4.944808433330021361e-03,-5.329183811297738549e-03,5.722670611843842098e-03,-1.370927322716173095e-03,5.457162527339889414e-03,-3.252206262481351865e-03,1.621671496263930650e-03,3.083786682776755959e-03,4.968414914288730652e-03,-2.333542779171093179e-03,1.813437168406616118e-03,4.150416273979528822e-03,1.679761540140788101e-03,-1.241059065920414829e-03,-2.685680854912080983e-03,-9.170799966820282131e-03,3.027755156279177563e-03,2.849434780226669270e-04,2.228673489349605134e-03,-5.670695736987243704e-03,2.972330804986574818e-04,1.488413374137703520e-03,-1.298052365899152833e-04,2.581445098606448610e-05,-3.980200473521127401e-03,4.446663532565810900e-03,-6.338081524898160277e-03,-1.946020207201284253e-03,1.595230911688211035e-03,-4.316504377712361042e-03,-2.207453076063993058e-03,3.418975685129244883e-03,-1.138229526618178225e-04,4.271621763261373127e-03,5.595455783751804822e-04,-4.541510950610751045e-03,-3.776099784789801496e-03,-4.459908988281925321e-03,-5.053085013159413358e-03,2.807179379518126051e-03,-4.990004611231428026e-04,6.920147504449405832e-04,-3.682845305228992817e-03,-8.456931908836753248e-03,-5.566069931934758408e-03,-1.033944906011092504e-03,-1.073539141256424833e-03,1.750258743375558137e-03,-6.169853798196773750e-03,9.680812875311193763e-03,-2.956022944926632806e-04,-1.770878151047816868e-03,-1.897003599009225371e-05,-1.469772409397653414e-03,3.233135524126832475e-03,-2.237462094327481114e-03,2.053452729198559056e-03,5.731837961787036341e-03,-6.106776771801448664e-03,-1.364422713534331824e-02,-5.145220511362446139e-03,-2.648836089921957800e-03,7.485108352397303912e-03,1.806879516306711527e-03,6.241852399353295215e-03,2.449455268416330517e-03,3.798737915582918890e-03,-4.852863110644108253e-03,5.776947975903345056e-04,-2.192951067196800483e-03,-2.430878637335402693e-04,-7.850070829330757722e-04,-3.525776441530836195e-03,2.114678274394841094e-03,4.675869321300007465e-03,4.262753102707839831e-03,2.364355414447021683e-03,6.872175539795699790e-03,3.051640989442821507e-04,3.295518279902607487e-03,9.666843459543687659e-04,2.606090097573420840e-03,1.086172151880686679e-03,8.248552804246064851e-03,-2.040891064014332002e-03,-7.897469738031029363e-03,-1.434582780717987872e-03,2.505266496877715538e-03,-4.642622598090346241e-03,6.401056545946215773e-03,9.972114070012735865e-03,5.055188324220616890e-03,-2.078949008336011210e-03,1.705769142833032503e-03,1.428610590328164903e-03,-1.549668204060668459e-03,-2.485801156075614828e-03,4.404046147204953524e-04,3.043329210064651871e-03,2.679824612742054608e-04,7.123118379503165336e-03,1.067577669959510721e-03,-2.035813215294507948e-03,2.509495782706510757e-03,-3.567880344048680874e-03,-9.295557808088395543e-04,-9.302386909434204321e-03,-4.767188241903248629e-03,5.388184620023820336e-03,1.324240875616742653e-04,3.034604670117315578e-03,4.087750176231534811e-03,-4.052027955807173572e-03,2.150971282252813355e-04,-8.251781355186664502e-03,1.252830290544671571e-02,-5.915615943379280012e-03,6.660757804153515050e-04,-3.669406040868866941e-03,-1.063570123954214524e-03,4.092608023426116265e-03,-1.419846316435686290e-02,8.244554368183017786e-04,2.080498270084484361e-03,-4.751413859854755443e-03,6.796471970541986297e-03,-3.188155000172172848e-03,4.980560520823873807e-03,-9.268479930968059194e-04,1.019350816801908042e-03,-2.181199904206629694e-03,-5.731301123900118307e-04,-6.140514866996339867e-03,-4.986413966063278289e-03,-7.376832864903730934e-03,-8.017000620081907517e-03,-8.767462013479417407e-03,-1.222341938594784065e-02,-4.803961396291336373e-03,-7.210513938159583648e-04,-6.234489116445250320e-03,-1.641172781300941519e-04,1.863616882131914187e-03,-3.156684235378637900e-03,9.030565161902315719e-04,-1.742837116317145054e-03,-3.856558208805416915e-03,1.346091723483730605e-03,-6.358828999748789576e-04,7.106754509109375166e-05,-4.254294409790008828e-03,-6.026832179805311461e-03,-4.368640878197106205e-03,4.244487837740514025e-03,-3.754028922202729511e-03,-5.732909660183535018e-03,2.724426572708240051e-03,1.463351137778469077e-03,1.652665748173973289e-03,8.557069047417270244e-03,8.080221167325042989e-03,-1.863293815294234925e-03,2.328520014637072526e-03,1.037168333295912992e-03,3.411149769663896457e-03,1.068753212752733550e-02,7.083349397854737939e-03,-1.049850819798264952e-02,3.182802002717651391e-03,-1.294397374833583633e-04,-5.790981187881310069e-03,3.664687145642275389e-03,1.140207456619006564e-05,-1.343642630207145076e-03,-2.401473979270738173e-03,-3.496639553997186404e-03,9.568765996184975999e-04,1.876924910852360850e-03,-1.151665050905602934e-03,2.902537452455914487e-03,7.202398630940573922e-04,-8.778191657833686254e-03,-4.193218632589301546e-03,1.042900742287681572e-02,-3.905295982713279275e-03,2.545073771079732743e-03,3.423767385044156910e-04,-6.924637309143365400e-03,3.678952457619876111e-04,5.919837558742903350e-03,5.243020615377047642e-04,4.711375034706867873e-03,-5.653806269592831502e-03,-9.159270223296375052e-03,1.381767166750697668e-04,3.113214835705143107e-03,1.020930534104596936e-02,1.702641841866879557e-03,1.293568509521233513e-03,3.325051164190987504e-03,1.396062999289086372e-03,7.974950173710412121e-03,-4.287189845134090592e-03,-1.252184736129583441e-02,6.455895815223935789e-03,4.812677145021342605e-03,8.163409920855244431e-04,7.441485366794277530e-03,-1.490677310470514246e-03,3.856613762835515730e-03,-5.726283237405276408e-03,3.408482200915127422e-03,2.053982447973922917e-03,-3.575500708713280889e-03,3.865954794185900208e-03,-6.430359641727354490e-03,5.413784745428786016e-03,4.462705458511569828e-03,2.046685542054793756e-03,2.568587051488272193e-03,-4.621620667040192941e-03,-2.308497561527424885e-03,2.998887702093789350e-03,-4.799631779876998433e-03,2.926814857208169703e-03,6.948489142735717744e-03,-5.495055855064993411e-04,-2.104939291548812118e-03,1.828118283419916058e-04,-2.603747501418222511e-06,1.044978998318176963e-02,-3.429313781981594084e-04,-1.765950224685544981e-03,-9.946314552560281321e-04,-5.179968183239421105e-03,-2.512434151014222850e-03,6.775049699194456675e-03,-1.245967849455556682e-04,3.396193942199775273e-03,-4.475236866989960351e-03,1.552031143249330957e-03,-4.077364836125496952e-03,4.028202618477473053e-03,2.835625508461634380e-03,2.593187661245145281e-03,-9.840682624537607892e-05,-3.613924686920875345e-03,2.230001595102639161e-03,3.332128366722348408e-03,1.314291849211391945e-03,-5.125239978885012294e-03,3.018820593146068904e-04,6.527550455437604315e-03,-2.972038594974154777e-03,4.935018883039019727e-03,6.257044293732590134e-03,1.081599276966176658e-02,-9.692824302834475436e-03,-1.658972026365429162e-02,6.504469932042878343e-03,5.006749256377621832e-03,8.242974675276933463e-03,-3.967024390891747665e-04,-1.958517953635017638e-03,2.758306772939152652e-03,5.627484780309412009e-03,-2.601257470812810661e-03,1.168789175459157532e-04,1.180231983730351854e-03,-1.288510866155629948e-03,1.824119131993575922e-03,2.184226464157254782e-03,7.188509352208260865e-03,-7.637035054917342075e-04,6.440642495800910639e-03,-2.185959992124678324e-03,-1.220232962675078239e-03,1.761137757901349611e-03,-6.311197666980984493e-03,7.885438453797782160e-03,-2.294427436153065845e-03,-9.263355508676010194e-03,3.972039160858573115e-03,-2.612168872999556107e-03,-1.092579265095386821e-03,2.711462749813422710e-03,-1.101472105830081372e-02,5.043968969140238783e-03,5.576240176152523823e-05,-7.242960625387743004e-03,1.698557665412685734e-03,-7.498692136375321136e-03,1.008953152755119424e-02,-8.992978539275305397e-03,-3.166385979563062468e-03,-2.802502450451887714e-03,2.763450736270052933e-03,6.293731539182166254e-03,3.685533021613851719e-03,-1.953775145050575760e-03,1.016067281959077105e-02,-1.824992722627209697e-03,5.654154486948361619e-04,-3.627331285182879288e-04,9.498818232410052810e-04,1.659720564912812221e-03,-8.201964142302810984e-03,-1.043279862010030624e-02,5.899221926762200467e-03,-2.987314744560154011e-03,4.813797417884960010e-03,1.121406924512718039e-03,-4.963590536005277798e-05,-5.705223349501567162e-03,-5.511138053015252405e-03,1.561992700913623043e-03,1.520660506108080642e-03,-7.249979604322560021e-03,-3.905338582507911650e-03,2.688327966619248957e-03 1.590281230647549429e-04,-1.099270040960662661e-03,9.724505974827877750e-03,-7.163481179599012934e-03,6.415245428308985476e-03,3.821821304569789080e-03,1.448018772254675924e-03,3.128854776448528659e-03,-4.348560514322400219e-03,3.360238609599415418e-03,2.847525346679946525e-03,1.947795146500027165e-03,9.693488796459920906e-04,6.080339216424134399e-03,1.973791235459504672e-03,-8.617773668286593178e-03,2.100735640107939586e-03,-2.888943317712681837e-03,3.984791342766630731e-03,4.434552153892426171e-03,-1.084270614997095376e-03,-2.714944929552900316e-03,-1.578892315333273524e-03,-1.330976612248887647e-03,3.480616389366275078e-03,-2.545706547491369525e-03,-6.361737207905122059e-03,-6.134967329698683136e-03,-1.520345310626710104e-03,-6.701876359891919925e-03,2.857424886445711874e-03,5.520155067689565438e-03,3.081489127258001635e-03,1.581312719771469722e-03,-1.698775561792410793e-03,2.950637004338549797e-03,-7.331455624241994644e-03,-1.257152622135886523e-03,-3.522080835264721019e-03,-6.593265109580881324e-04,-7.309631362310112061e-03,5.678394765407391784e-03,-3.615173978820985265e-04,1.238745720759653430e-03,1.214606906857077298e-03,9.871805202722907027e-03,-5.657826640700027885e-04,-4.928331163829787614e-03,-2.005438951035824324e-03,-9.175816825224566249e-04,3.285821693659641846e-03,-4.982837077384706659e-03,-6.713056491454915189e-03,3.557963273025055613e-03,-3.583303026697951064e-03,7.266118506252658947e-03,1.224854979857258204e-04,-5.722049518943853920e-03,-6.940618425155357062e-03,-2.634625090694188085e-03,1.887826849262667136e-03,4.599985517272680430e-03,2.554754275606903845e-03,4.326198562724058172e-03,1.736819570201210807e-03,1.533328750830630343e-03,-7.817802722865205625e-03,1.246985261480622461e-03,-1.101768955351963269e-03,8.556461573473024508e-04,9.008086062681310013e-03,-7.131861794476186851e-03,4.747895387474229158e-03,1.331212427011168285e-02,6.598354791305760352e-04,-3.184828456134235642e-03,3.392343324421243696e-03,1.929589490275779393e-03,-3.280169735482602652e-03,1.216081397756173937e-02,-1.505996354673322076e-03,-2.665772873884076494e-03,-9.575920758477527231e-03,2.502700017172391610e-03,1.014833259333835884e-02,-5.718996096341583645e-03,-9.166393366845002402e-03,-2.440240238617771146e-03,-8.113837173583915408e-05,-1.879399637463396869e-03,-7.853559205520305175e-03,1.037439139422827259e-03,7.894091388817379984e-03,3.564212599397930357e-03,8.347201607572744125e-04,-1.324180298024649552e-03,2.033083879070492379e-03,-4.842936543149922550e-03,1.118573759116257360e-03,-9.687475493590853165e-03,-7.410245567148275592e-03,6.678862732037128651e-03,6.488697269809238187e-03,-2.250361191062983778e-03,-1.716928354110357300e-03,1.915868399249048793e-03,5.256998548009046701e-03,-9.203611018568045102e-03,-5.504726731310899575e-03,4.689024980355789574e-03,8.824114832358323848e-03,2.188774610144461532e-03,-3.955448130951849488e-03,-2.725558992254656907e-04,1.295839636729870584e-02,-4.288607856759464425e-03,-4.386801851707957701e-03,1.298823604348063078e-03,-1.761011786975593242e-03,-1.064829814917747468e-02,1.184726770443624792e-03,7.120118192808513879e-03,-6.108939966025568429e-03,-9.521467276503839394e-03,-8.333297133638849913e-03,-1.762190625903392984e-04,-4.467479924618913446e-03,5.823090262996219804e-03,-6.365219770960775340e-03,8.743840693961182691e-04,-5.852019817474903712e-03,4.975670193990237682e-03,-8.658082633610040407e-03,-3.760710019846492946e-03,-2.056145165902817200e-03,-8.424597475454143752e-03,-5.201190791026408071e-03,3.015181580388118232e-03,-6.366879045556646764e-04,1.837717689193204022e-03,3.632987065302753504e-03,-5.253608844655909341e-04,3.813777595013249141e-04,2.104179425783328592e-03,-2.561021646985644010e-03,-2.187831680345861569e-03,-1.057949383444432324e-03,-1.777178953292271428e-03,3.160458292082338888e-03,1.470371782199300604e-03,-1.233123956556832256e-03,2.777997962762842037e-03,-7.219129376027564465e-03,-1.144160258597699299e-02,-4.533438370213480409e-03,4.771974260103638288e-03,-1.942262381135335253e-03,2.754940493016836116e-03,9.001065431462763888e-03,3.783433661660749736e-03,1.422172631009381225e-03,2.345187465827173155e-03,2.041021226553022937e-03,-9.592312774923889354e-03,-5.375511581261357107e-03,-2.002233674918264036e-03,-6.531254065122614992e-03,-5.373673706008210479e-04,1.379353841677840101e-04,1.576983679803255255e-03,7.867651668441888457e-03,-7.333803412218286265e-03,5.552125156339121972e-03,1.439395260305361681e-03,8.260611514077243617e-04,8.919975185223619554e-03,-2.486722481539826914e-03,1.854309875068568770e-03,2.887572703160458821e-03,-4.006288902130736643e-03,-1.096327152879916431e-02,-7.980137496706301084e-03,5.983419068727441510e-03,-8.552973344739512283e-04,6.017433879151221424e-03,1.384029119438412664e-03,-4.479139402126357423e-03,-1.240521693857582573e-03,6.590409713445497322e-03,-2.721368954330875226e-03,4.660869641742794098e-03,3.519081487320694507e-03,-2.516717504862976858e-03,-6.423686199342154395e-03,5.005101701037862174e-04,-2.228954316038573354e-03,-1.949611736504081646e-03,4.058808828205181206e-03,5.742287716571727735e-03,-5.474770124878365792e-03,-2.497915587758140518e-04,-2.521888077996654615e-03,5.262780035944664451e-04,-7.309842233168536768e-04,-6.617642247009869992e-03,3.724316233698705600e-05,-1.314022555187091806e-03,-4.783406509828432546e-06,6.305102197856444698e-03,-6.544146924386836208e-04,-5.740636163709247029e-03,-2.387735146007549571e-03,7.412061521200505017e-04,2.327672087345470514e-03,1.915747060787011146e-04,-3.114011202394460429e-03,-6.208041007540057200e-03,3.301036514694286571e-03,1.091902039534013930e-03,-2.717513130520307846e-04,-2.244007456842557393e-03,2.079520506416059761e-03,-6.572335800395758681e-03,-9.429992905444615006e-03,-3.927920281986422494e-04,8.805894022276645680e-03,-2.224491430409477472e-03,6.941210420225929316e-03,1.099870968985925887e-02,-6.107554439721802654e-03,-3.766365686924935587e-03,1.491383204579722075e-03,5.325098430621066047e-03,6.110971507482729215e-03,7.097881912052285587e-03,4.638367114338382523e-03,-1.880981437014732447e-03,-1.768132790577337269e-03,3.957193035947888238e-03,1.639318767004582745e-03,8.894549770284105455e-03,-5.957508139759381182e-04,-8.552429308311254846e-04,3.242980885256820014e-03,2.055988892169583349e-03,1.674499816034055949e-03,-2.085574526530290483e-03,5.156686421671457903e-03,-3.661968029880250872e-03,-7.748178537415579582e-03,-1.197456744209079082e-03,1.178275164211279235e-03,2.499113770182347875e-03,4.475388059904028743e-04,-5.290592736283987116e-03,-5.223166011869509072e-04,-7.100475928450158090e-04,-7.410398474902169137e-03,-4.705309639186974584e-03,-2.425762222899606035e-03,5.763193258125917998e-03,-8.896915391252044683e-03,-1.149584816689541017e-02,-4.602496286340700474e-03,-4.624792608147597350e-03,4.994721046006798774e-03,7.801432591142861729e-03,-5.989703514969647521e-03,4.952265044043638494e-04,-3.088839148121587386e-03,-3.906684685516946705e-03,3.396902209358716382e-03,-2.935636442024399870e-03,4.499815458000292097e-03,-2.620329431109892811e-03,9.483318459459740349e-04,1.481713392019009222e-03,-2.591607235641083147e-03,6.965792732935286412e-03,1.158506739445352290e-03,1.743129356539628279e-03,6.943513116751885402e-04,2.213300530458914338e-04,2.159127951077915043e-03,-2.665751375136459281e-03,-1.575021181159747376e-03,-1.011395513943770703e-02,-4.147336832647035253e-03,-7.047703126026830679e-04,1.050504954211427082e-02,-6.597600824077540053e-03,-6.637867325994123609e-03,2.930232962517667628e-03,-4.985290705441534041e-03,-1.941913308374771654e-03,-7.255352943034551125e-03,1.185707724461111364e-03,-2.030453661914324338e-03,-4.980832845290866090e-03,7.743037033652893329e-03,6.270116818340395505e-03,-3.697663010766656568e-03,-5.457564699037163042e-03,-1.643405819999626486e-03,1.491585733842642602e-03,3.590215115734390287e-03,1.382039307872438480e-04,-3.427122949982543479e-03,4.753453113609214416e-03,-3.597876646542787476e-03,3.655137529575717623e-03,2.612038336288063922e-03,-6.883854610178843664e-04,-2.054229986693332805e-03,-3.303138057797892942e-03,-3.839897015885007821e-03,-5.970004121244307238e-03,3.749328894056853847e-03,7.909395778707998745e-03,3.825616435197720308e-03,-6.953949607344400317e-04,5.248384135929650383e-03,4.085912248470638733e-03,3.910887307553795843e-03,-1.235352780116078138e-02,-9.239521378302896493e-03,-4.840748600922926623e-03,2.076281708792403376e-03,-2.121105807557726285e-03,2.672153569370517776e-03,-9.695779140997748272e-04,8.351525913292310102e-03,1.560940320612553894e-03,1.285049538702497237e-03,-1.180352406488278938e-02,3.145324548532749294e-03,-4.271661365902517001e-03,2.054029480483585829e-03,-2.623399032220735195e-04,-4.032739973175343724e-03,6.968734763145377487e-03,3.211482121653435034e-03,-3.165674282487248269e-03,-7.161837302747262046e-03,3.538680487601187068e-03,4.253292118132470699e-05,5.660558311175820784e-03,5.130244347119968136e-03,-1.859860282590989743e-03,-7.706299829758777101e-03,4.951563980279891954e-03,-2.800524306830347570e-03,5.385562093385007831e-04,5.844250118550203994e-03,-1.520446269394197655e-03,5.522217967902157999e-03,-4.628473299761287976e-03,4.326772397619703218e-03,-9.029830676750281657e-03,-9.297929112837005497e-04,-3.424220839401763330e-03,8.427085952568306310e-03,-4.010791389125535275e-03,1.168014838974575672e-04,-4.503518950942837795e-04,-7.564902533057613109e-04,-1.023191547236268657e-02,2.627427368473590936e-03,2.579600311184705968e-04,-2.136395352730698083e-03,-2.678076271121285733e-03,-2.830459039639192332e-04,-1.147416550711956567e-03,4.505799010537272879e-03,-6.272828728767010538e-03,-3.716748082768988031e-03,2.380773034927709687e-03,1.168310281048974336e-03,-4.963865104238058970e-04,-3.211643463396488419e-03,1.031515439742150195e-02,-1.783508814148326285e-03,9.663669914062861591e-03,9.582035028531764656e-03,-4.977820992483454349e-04,-3.627560222916382127e-03,3.305993107826452754e-03,-1.918139199765167427e-03,-6.729606938368415912e-03,-2.166276714577640081e-03,5.251569806346920860e-04,-5.196861062900890761e-03,1.022143400262976289e-03,-6.125751962307717478e-03,2.670994134074594953e-04,3.343350288201531358e-03,-2.211696466944092741e-03,-5.485821371676729858e-04,6.545610274350117594e-03,-2.566204736854379043e-03 -2.691987043896553779e-03,-2.277120697009402551e-03,-2.350281499157457193e-03,-7.135121666878455147e-04,1.155587482947326682e-03,2.315524987322822251e-03,-3.534448091494350085e-05,2.439669620337512473e-03,6.905157220890433423e-04,1.097050909400721329e-02,-6.723098246111895021e-03,-1.164467656122823149e-03,-2.983334849687017173e-03,-1.629149020248586970e-03,4.240792316453917930e-03,1.042954765300246802e-03,-7.436421387155529340e-03,-7.311049976434550422e-03,-3.129727330780006801e-03,-1.752681005968995337e-03,-6.948104458707971866e-03,-2.002943182876856104e-03,5.783323378096567879e-03,1.762925775503109251e-03,-2.069545575944002017e-03,-6.593709229067117169e-04,-5.272188512308353643e-04,3.809806066625877341e-03,-4.561574462704503716e-03,-2.402432894446271307e-03,-3.349089808188914202e-03,-1.407619828411895650e-03,9.621064398193861049e-04,1.102569617232176686e-03,-3.215744430295900888e-03,-3.483908059805529856e-03,-1.203324585283442952e-02,3.121437823900433031e-03,1.313920762440704644e-03,-2.482863118616912817e-03,1.512146363991739113e-03,9.877997638075043663e-04,-6.207261325136140609e-03,-2.379429074559600860e-03,2.787592016800607975e-03,6.451210649434862415e-03,-3.659134328730513381e-03,3.494222482520497469e-03,1.213792486730904882e-02,1.792806620373119716e-03,-4.887804632570828985e-03,-7.312204895684072800e-04,5.219007621338629319e-03,-9.862882717453239079e-03,8.063888972526056607e-04,-7.070674224738888840e-03,-1.785574254330580526e-03,-3.907788399203961267e-03,2.073617626738571886e-03,-6.337190778415217191e-03,3.855693914089536908e-04,1.792950743292539900e-03,-8.214134057497877658e-03,-8.004578269623358186e-04,-6.030148960332527841e-03,-7.448954543312601324e-03,1.041674396890549751e-04,-7.914592652846105109e-03,-6.469395997976936296e-04,7.821171640085602603e-03,-2.673451927125585371e-03,-1.805277722479231703e-03,4.909487416160791297e-03,7.343622176867253652e-03,-7.324012820508448048e-03,-1.146492864688574689e-03,-3.272449956396291781e-03,9.491191991271538328e-03,-3.598011404275696604e-03,6.954439580160009203e-03,2.179556611454234495e-03,-2.192106692245114185e-03,1.331815015848218653e-03,3.169909921349888687e-04,-1.197585435839573775e-03,2.857364934314622455e-03,8.342708344766759307e-03,3.055706317580515752e-03,1.292838481328684955e-03,-4.937703714316239723e-03,-1.654737131851281070e-03,-4.637509634112851366e-03,3.095045029963879295e-03,-9.285938300100039962e-04,6.658375180913801532e-03,-1.750617183232855206e-03,2.761318382259536543e-03,-1.656897397687804837e-03,-7.599893326678489741e-03,3.763748852057084097e-03,-1.106399241794808058e-02,-1.124299971320902314e-02,-8.124623770415053889e-03,-3.703961039021410641e-03,7.311834889847710836e-03,-1.477260885709751406e-03,-3.177949723398222626e-03,2.974710313162506978e-03,2.079082044652610060e-03,1.599007950114060290e-03,8.426158531736185897e-03,-5.966651634613722582e-03,-6.146779648574744043e-03,2.553061358780982486e-03,-1.235972337949803150e-02,-7.883826892345860426e-03,-7.229504561415208905e-03,-7.885765332671572456e-04,-8.540049270777245322e-03,-4.604995868227005663e-03,3.075380547142118892e-03,1.162734198242390375e-02,7.456093615968876175e-03,1.049443983916510149e-02,6.358096886839526007e-03,-2.199852006137611657e-03,-3.205446619150580448e-03,-3.170667911169468278e-03,-9.468264914445119343e-04,-5.967413073085707162e-03,4.215958272613767183e-03,-5.808365192571623807e-03,1.408100321994799030e-02,1.061476967828289516e-03,7.322777191606535908e-04,9.742714839744518254e-03,2.957810875966456875e-03,1.212315142432429313e-02,-5.633972254702968030e-03,-8.072044296694958973e-04,-4.121343093764841562e-04,-1.112734512739889598e-03,4.371057769090968236e-03,2.823257863783941966e-03,-2.296543268300783348e-03,3.692459510558341088e-04,6.670091800342173999e-03,1.033944928637532071e-03,3.125678479861498851e-03,-5.574893388373450623e-03,1.966340962430359815e-03,-8.573419897627278588e-03,-3.159452059252845346e-03,-7.384537705702318676e-03,1.660761419969664912e-03,-4.320813572956356576e-03,5.903170480886469652e-03,-8.316857494725882205e-03,4.214143963333463402e-03,4.660054305373605155e-03,-7.919024172714070678e-03,1.381134461618996334e-02,-5.794836123428563201e-04,3.498333427775097960e-04,-3.420922267196633160e-03,2.169771344321593833e-03,-9.742382001854000412e-03,2.136343007796391049e-04,-1.329472874683699547e-02,-3.498561179994478815e-03,4.443693354729291375e-03,-2.169261563431940446e-03,-7.075886813485701525e-03,8.833476183755939112e-03,-8.635351517885431899e-03,-6.042326375052676623e-04,1.925245552954200998e-03,-2.572685079222809638e-03,-2.833504610322348793e-03,-6.535756236276064890e-03,-3.003849831686731045e-04,5.901230190530238652e-03,-2.636756202397328146e-03,-4.163152484503449338e-03,-4.542885144955085240e-03,-7.666105122453405370e-03,-8.367695623259575541e-03,-1.279317233411476039e-02,2.641360976867104717e-03,-5.045378804919052954e-03,-1.172827867224046465e-02,-3.697560618975298443e-03,4.029586794874884595e-03,-5.492286626317747535e-03,-1.239642960191685494e-03,2.093048934232729990e-04,-3.372050271023984639e-03,2.235272352128298695e-03,4.865926527079560138e-03,-1.335267901226263546e-03,-6.524229295622641263e-05,3.126981666445837350e-03,-1.361188521395154463e-02,-4.509802876363390098e-03,-3.992757931362025253e-03,-3.518748669904930501e-03,-3.865686098417901121e-03,3.127944549798472064e-03,3.194090099962710975e-03,9.943446842484993109e-04,4.360306328777455338e-03,3.794495921249451016e-03,3.803902197149016361e-04,1.194517457930775566e-02,3.690251987936177171e-03,1.244406919301393240e-03,4.938568323476818199e-03,-2.406299564827403797e-03,-7.094350131939933073e-03,-3.686164077865598538e-03,5.029812350483366112e-03,4.172734017839231820e-03,-3.255972365311412403e-03,-4.776819674084950909e-03,-1.593608911683076085e-03,1.664569535463557900e-03,5.479160655128221050e-03,-1.902534575864217418e-03,-4.549581207961390862e-04,2.043206133275035641e-03,-5.562630290341609961e-03,-8.544780768739461060e-04,-2.556812960120675559e-03,-1.297435889776168104e-03,1.386090787215937814e-02,2.979730618503718606e-03,-1.287080934566637441e-02,3.936210318370231102e-03,3.904887828758532835e-03,-7.778802191701060867e-03,1.661792901831313013e-03,5.375616575922551564e-03,5.450194462049410236e-03,-3.749454832747787115e-03,2.016250456427639095e-04,8.134967412571156168e-04,-6.633701429198588656e-03,4.523534591062065673e-03,-5.708874467434462731e-03,-7.583891061997069527e-04,2.109638552538295551e-03,6.071099621671253478e-03,5.387631382471692702e-03,2.870338918743110725e-03,-4.337402284100472065e-03,-6.513410466855731884e-03,7.832705946752174624e-04,-2.221556915391388522e-03,3.102806529800343732e-04,4.902022488677622476e-03,3.382736192605766308e-03,5.276420163422826035e-03,-4.569306957952815572e-03,2.482954477399321788e-03,4.897800180325214726e-03,-1.846602875638081714e-03,-5.078760222395415323e-03,-1.015264053438348461e-03,4.407175177482308519e-03,1.036846851648525695e-03,-8.853791616450844108e-04,3.274535411810664421e-03,-2.441606645914778072e-03,1.512212111371727353e-03,-1.254066024206268275e-02,-7.688981111718807930e-03,2.908848716891835728e-03,-6.249048306714418723e-03,3.442833283570886135e-04,-4.063513964375836500e-03,1.220718280980141691e-03,2.903103925783718977e-03,-3.491472081859534581e-03,2.771879892019415460e-03,-1.017139537460218061e-02,-7.569078681838222085e-03,2.092524985443599238e-03,5.853090034295796483e-04,-2.074536682257115041e-03,-7.662433395919535182e-04,-7.641568917929956496e-03,-2.884286717481086490e-03,-4.698917435263350040e-03,4.120029170851917562e-03,4.725848835262346642e-03,2.320066886873441462e-05,-5.643182676290959925e-03,-1.406948336557960200e-03,-1.792940312923830323e-03,-2.147577580687397378e-03,4.436172459884754789e-03,-1.767996986035942760e-03,-4.806899248167802174e-03,3.669985222360659573e-03,-1.878920576182768233e-03,1.372777686202152547e-03,-7.970123822228678337e-03,6.102105206693017548e-04,5.526073472957403546e-03,2.240753721591494840e-04,-3.084057803318442439e-03,4.659808893675354731e-03,-1.244418758587277586e-03,5.724191485564909462e-03,1.363675948951678044e-03,8.098267293593803271e-04,-1.255039464651005500e-02,4.156888896713536834e-03,-1.146178769833683430e-03,1.019954066525489457e-02,6.231700222623775057e-03,5.734448810836744598e-03,-1.046315012432651008e-02,7.036197442461997707e-03,4.296136446953097675e-03,4.109416637606433213e-03,1.156403107263926386e-04,7.762488892096597662e-03,1.792302203926183840e-04,-2.428471698591257947e-03,-1.789635900942486120e-03,9.288754468137547585e-03,-4.206206433514252802e-03,8.596410559353305503e-04,9.883891156995975799e-03,6.310932911088647235e-03,-5.102307632021546580e-03,-2.214503040789920964e-03,2.451947716788301676e-04,-3.513069477276494976e-03,-3.262593595387747181e-03,-7.133777045083748800e-04,-7.408306987480448892e-03,4.762290019979173450e-03,-4.922373557576336083e-04,1.298439596903205873e-03,-2.514701249810597384e-03,4.944488154462203799e-03,3.677380461844574795e-03,-2.259772786383726102e-03,9.057480874506536545e-04,-5.634219710470496877e-03,-3.995943625393435887e-03,-3.663567296557332462e-03,-2.081513416148875022e-03,9.107130731657486940e-04,-7.900126379914704016e-04,2.679813427859522191e-04,2.597285062399401576e-03,7.753593055845579493e-03,-2.363266040673594418e-03,7.375184113809978709e-03,-2.835250373677564682e-03,3.058281936495784066e-03,2.347898212395395281e-03,6.258752664979217763e-03,7.894529609268589610e-06,-3.395075546120304147e-04,-5.960537282641631580e-03,5.034513708634903370e-03,2.435169231295484116e-03,-2.522483598764941985e-03,-2.999738996592805733e-03,-2.020456261623552151e-03,9.617850519877809909e-03,-1.676893511165282458e-03,6.028386873840293689e-03,-3.783979773455851264e-03,3.504128390558618016e-03,-2.095594533639669448e-03,-9.226903701836675640e-04,-1.843172422583302461e-03,-5.309910495853677713e-03,2.991839414183756346e-03,-5.790779357906097359e-03,-4.090153622298834876e-03,1.516821274461103812e-03,9.054107710430447584e-03,-4.005162672126555412e-03,-2.419407567998997383e-03,1.595521699950993568e-03,2.263878915162319608e-03,4.017904631417088088e-04,-2.778065366381470436e-03,-5.821503275166109090e-03,6.510675141314648841e-03,-1.481099210195884266e-02,-4.652701386371035290e-04,-3.776540719243911062e-03,-7.502553280890587612e-04 1.342460900241773669e-03,-1.811520151331098968e-03,2.245813390335882689e-04,-2.711577495548434490e-03,-2.909214993466001115e-03,7.283650853306259873e-03,-1.186745455729639950e-02,-1.689515023114178914e-03,-7.688335639882965325e-03,7.619421421899510473e-03,2.529146906368829727e-03,7.405862486016870881e-03,-1.390076970311833991e-03,-3.499109515415778013e-03,-7.990687742195643276e-03,-1.981788773237048995e-03,-5.912999497070304568e-03,-1.869907817904803773e-02,6.465840826450781550e-03,1.460254796920586835e-04,4.215464047338014948e-03,1.129282250667011352e-03,1.000354580983625478e-04,1.378698055755852876e-03,4.305112902705105805e-03,3.684643769016910514e-03,1.320172543565798591e-03,-2.816767408759777731e-03,-3.266586770173791204e-03,-1.157924002825481014e-03,-7.229059688374285010e-03,5.226417665678959566e-03,-4.166105194057240571e-03,-3.514151062555985212e-03,1.725149095963684522e-03,4.729002133627200830e-03,3.016659000337098065e-03,-1.571791961512664744e-03,2.991672000790606203e-03,-8.910550817625413275e-03,7.368804505321575014e-03,4.486883617732141151e-03,6.317244219654257094e-03,1.172050119734400131e-03,3.584988051303687751e-03,-1.327241518506050433e-03,-3.939222986697334984e-03,-5.966640513945062341e-03,6.506393028063073639e-03,-4.139718274050504208e-03,-7.252209673731408249e-03,1.435473521018032148e-03,9.634482991682803406e-05,-3.650538108469294249e-04,2.984802942231187979e-03,1.091730146394207315e-04,4.636907663287472697e-03,-3.774923288497156627e-03,5.134410883387520962e-03,4.397105436255279408e-03,-6.775569298996047922e-03,-4.534831870607679283e-03,-5.757251559243612912e-03,6.311032108630973197e-04,-1.606005276107828851e-03,-9.462360606559031432e-06,-1.273636815454762825e-03,1.570452086613651627e-02,-1.225119806547720312e-02,4.405521107064327806e-03,2.417820720264453327e-03,1.753102058972879403e-03,4.420723500689761995e-04,-8.695450800919184457e-04,7.931178125758799741e-03,-1.924340432539937105e-03,5.535014351138046150e-03,-2.613791004215489035e-04,-2.350996692508087928e-03,1.546236668972097213e-03,5.483352125719049251e-03,6.914940693469501069e-03,1.920561587125182719e-04,-1.881256224942317039e-04,-7.459444022153572350e-03,4.995126188523769419e-03,2.789590844620654011e-03,-8.023491561466609967e-03,1.506066512580628682e-03,2.781601449456359040e-04,-6.777753265143680525e-03,3.185667130112532393e-03,-7.956247859043440824e-05,-3.280492729314396071e-03,2.298268419649128827e-03,-6.832646858006869925e-03,-3.389909550858333752e-03,7.454987539644696709e-03,-9.651744503264351712e-04,2.260568455676551739e-03,-4.244649398673546944e-03,3.311665219051712194e-03,4.885453299333299138e-03,-5.208652161001213476e-03,5.501966231985182644e-04,-3.137577209572680725e-03,8.127229128040312431e-04,9.755042228181611420e-04,-4.713189831171956355e-03,-5.571911213049821412e-03,-2.970457941958299362e-04,4.098537309890033092e-03,-4.209795771235205590e-03,-7.753488502008326622e-03,-6.432662309268783775e-03,-3.887482965196717303e-04,6.128726542509834938e-03,1.486108771231652546e-02,5.737166338781900569e-05,-1.154474937753836167e-02,1.165645514799095372e-03,-3.708166160593208589e-03,1.734317438498712578e-03,8.128258275379208336e-03,6.619331427182252245e-04,4.784281941591804997e-03,-1.899184056840058589e-04,1.847906841620193096e-03,-5.685829838119046270e-03,-2.617379984983484072e-03,1.284468046485198398e-02,8.359338184170976224e-03,-5.599307287217471577e-04,-7.094545622464550018e-04,1.866865576758750672e-03,-3.359195943673971146e-03,-6.447816711324167414e-03,5.639679491538666413e-03,-9.567365482148875930e-03,8.002382805495499452e-04,-1.178908700977489808e-03,-1.397948599156556559e-02,-2.026629494298058057e-03,7.362860099608562435e-03,2.417852703559159131e-03,5.091893761695644172e-03,-1.673556451845890436e-03,9.423153261758399762e-03,1.414185080828140283e-03,-3.496404885578866038e-04,-1.060281960463280333e-02,-4.923306691115850002e-03,-7.412237414112935255e-03,4.884010786145534131e-04,6.855108784955299926e-03,-1.147103966541608263e-03,-7.138107569961711814e-03,6.867977103421529089e-03,2.900937800227292983e-03,-3.826017325927797905e-03,2.536832822988843023e-03,-6.420818859853953675e-03,3.594450980481737803e-03,-2.207280848301356943e-03,-3.095359955350661212e-03,5.804008293807699130e-04,-4.590221862076217067e-03,1.547578526241382693e-03,-1.052274329771020212e-04,-5.292273943290328776e-03,5.953215925824681225e-03,2.394734500513102243e-03,-2.378988805860487565e-03,5.159644848727954793e-03,-8.572871070247813780e-03,-7.921564501543302411e-03,4.870420359359632200e-04,7.784708587736428528e-03,-4.816267994776198859e-03,-3.488339548440884310e-03,1.664221262610284880e-03,1.259318458145620989e-03,-2.210386295517057387e-03,-2.229547486821428347e-03,-6.837864596228577715e-03,-5.640951440659859248e-03,1.032918452926607101e-03,-8.733419461567568098e-03,6.647630075004602214e-03,-7.440930824704517464e-03,8.044479359090071042e-04,-1.372813775404479195e-03,-4.171671844453749174e-03,-1.634421288962391392e-03,-6.134803131864527415e-03,2.338398332128019310e-03,-1.663982897686387078e-03,3.295782588712819951e-03,2.249391717384671956e-03,2.133071416437492401e-03,-6.557657112776850081e-03,-8.941237097606778879e-04,3.752017134548614615e-04,3.023824324712112710e-03,4.716202632540730799e-03,2.398834834692138620e-03,-5.018230507483221504e-03,-2.427570713421700176e-03,-5.687704274484529747e-03,1.187511952667714499e-03,-3.770472289797288570e-03,4.492719964859896893e-03,-3.804679301336853954e-03,1.170706141934913999e-03,7.362092548822390947e-03,6.874279998561227262e-03,8.189910759976656157e-03,7.459542445407706807e-03,4.832072545781650099e-04,4.658753696279531713e-04,1.507632745629800593e-03,1.523484143275561416e-03,1.017908276481320137e-02,2.415942508102886938e-03,-6.653274299588890842e-03,-6.016497533465768079e-03,-5.980809083301847316e-03,-2.153482300644455043e-03,6.075782653324733780e-03,-5.683885718750001856e-03,1.654472981569606992e-03,1.060580198582562273e-03,-2.703340229923864059e-03,7.445619271847152103e-04,-8.966191184124443214e-03,6.575630399488891463e-03,6.074116174001691591e-03,-4.527469153258413884e-03,-2.698698238427763452e-03,2.886521089423982507e-03,6.339883932442707398e-03,5.612484783166006054e-03,-8.332214585188736991e-03,2.512997684581034976e-03,-6.839583051000209038e-03,2.038520536331603090e-03,7.412730132891885183e-03,1.887186096060525318e-03,5.044742747856693519e-04,-9.110508355922298379e-03,-9.390772928308563187e-04,-1.701024457901997118e-03,-1.363390284473659604e-03,-8.688182063080376882e-03,-2.169821442420197862e-03,-7.569218664717926705e-04,-6.449903365214947580e-03,-5.670104304124175959e-03,2.905434419055198236e-03,-3.310634342989050914e-04,6.306541300423988347e-03,-1.818756986787074077e-03,-5.463273264818186738e-03,-6.309056877587290550e-04,-2.454991788786562226e-03,-4.411688019036355372e-03,-3.876753466036640412e-03,-5.559975825290387232e-03,1.814039482070994381e-03,2.933881165392510180e-03,4.531249185157018404e-03,-9.559557977131777178e-04,4.936476247352625767e-03,6.571245924240531194e-03,-4.888813449963143667e-03,-3.273381654599812843e-03,1.419399132585556908e-03,7.080965946817452394e-03,1.981893866419261185e-03,3.928898031050482610e-03,-3.113910388719343721e-04,-2.718054189361884850e-05,-6.488965146109550858e-03,-2.003530879731127250e-03,8.874783362644863378e-04,-3.064009467017389087e-03,-1.134761279889283901e-02,3.530397178378511403e-03,-7.013999052826551655e-04,-2.680275361964389209e-04,7.383129309056610592e-03,4.903549991037549870e-03,7.551362190040163125e-03,4.353365474665705712e-03,6.665904917699384639e-04,-3.571596922674738495e-03,-7.267352769436407310e-03,2.097246813801901394e-03,-7.099471820477158863e-03,-3.077640435419622751e-04,-3.911223557160495821e-03,-3.064395458603110488e-03,-1.668354059070787038e-03,-3.513652228022681821e-03,7.585991910241582890e-03,-8.455888304648869445e-04,1.549739670649016160e-02,-2.448440719632445292e-03,2.621348970289584697e-03,-1.918312595772657262e-03,-1.851938758413370321e-03,-5.360365436562348519e-03,1.413049266078275070e-03,3.064576507214021063e-04,1.548853575787266165e-03,5.000585154565461224e-03,-3.682634225862159872e-03,3.307870225203256298e-03,-2.523628859447440607e-04,-3.920779070122962229e-03,-1.051148334492205648e-03,5.019655832143134384e-03,5.907901425475894225e-03,2.506217900565813148e-03,-2.331075735568680345e-03,-2.848008619842325867e-03,3.699385404462898416e-03,-7.816000070377100012e-03,-4.270561644304608908e-05,-4.624992990545844646e-03,-5.633723197747601483e-03,1.512112958177955734e-03,-4.590701132050897977e-03,-4.293437411457597802e-03,-1.016687901005440455e-03,-8.836978667805967316e-03,1.850671512061268301e-03,8.896241329290417027e-03,1.303817168235793869e-03,1.827848943481754068e-03,5.415510269653978082e-03,2.315366708909747870e-03,8.221148440701608190e-03,-5.390312069872750349e-03,-4.099401896470396961e-03,8.036001825642718772e-04,-6.347294063131936760e-05,-4.197850557251671388e-03,1.353807304835747023e-02,9.791132966298092891e-03,1.886436090643151854e-03,1.412318024335709743e-03,-9.780002776953492741e-04,8.012216711917462438e-04,-5.720906419825338117e-03,4.793596300805989287e-03,1.584360287432074821e-03,3.182907739748339743e-04,4.925262718641159297e-03,-5.158140021149277719e-04,-3.676796283356700921e-03,4.317676204473991017e-03,-5.915693215130542583e-04,-7.259457627550648141e-03,7.642311145834470958e-04,6.503956919709031996e-03,-4.677914584550625816e-03,-4.282646868804564078e-03,-1.362592812350557083e-04,3.194163959882530095e-03,2.134342244464776991e-03,2.535242676480388576e-04,3.126731570361124431e-03,2.665454168872391592e-03,6.057919331784600812e-03,-3.042560193382204598e-04,7.738213016549877650e-03,-7.387965363089522668e-03,2.223940345542828063e-03,2.239979628339344112e-03,-5.147610356340759363e-03,5.360401558354235124e-03,3.538624677804011465e-03,4.997551158071436399e-03,1.007300221095158883e-02,-1.564134670386709697e-03,3.361804958768823369e-03,2.540042943410612385e-03,5.587989219153897090e-03,-7.480215811648929618e-03,-7.028465536260895298e-03,-4.716049965763246220e-03,5.993454414503657582e-03,1.765950030213044378e-03,-6.647224098765763985e-03,-1.104944001836456573e-02,-1.569985351071087869e-03,1.003421478593123119e-02,4.844343413622073310e-03,-2.981334880671678792e-03 1.015656830809414345e-02,6.576649768691541359e-03,4.570564433748562291e-03,-2.266850091677440320e-03,5.878498532868409350e-03,2.660930671067051039e-03,-2.623436295794394878e-03,3.787864598855654463e-03,-3.457066449424185759e-04,3.588862448650850150e-04,3.364858168249422292e-03,-2.031555142996411146e-03,6.370982464845914225e-04,-3.375278919848395589e-03,1.046340643432938210e-03,4.329155264805770523e-03,2.503248922183676662e-03,-3.272694116574768105e-04,-1.559671183253389781e-03,-6.438268890626553873e-03,2.040307659807282642e-04,2.308997572188504609e-03,4.408862657684303223e-03,5.980700649059661304e-03,-6.798447182913887159e-03,-7.511923324876656170e-03,5.353137730072358470e-03,4.244621562141920323e-03,6.227580634080766980e-03,5.088645242825155530e-04,-4.796755649753378474e-03,8.943660283171446989e-04,9.609236699761805741e-04,-4.057487980427524135e-03,1.234148135194024448e-04,5.561378947140275565e-03,-4.734601471469461411e-04,-6.242377479151559050e-03,3.299280879184519812e-03,-7.587620661837911513e-03,1.509936565908410978e-03,-6.965342140310150018e-03,-3.822742425906182785e-03,-1.035152467154464104e-03,1.878476598184025985e-03,-2.486245336528653568e-04,-8.221489568448279994e-03,-6.009573646003433731e-03,3.688683804038964207e-03,6.657336291759878436e-04,-4.458028511294460927e-03,-3.341980664486796787e-03,-3.472533355720401789e-03,9.476846804342870892e-06,5.535896098399137148e-03,7.223652199846811520e-03,3.152861712374885935e-03,9.166159718122447904e-04,2.049208701299254191e-03,7.460745742078007947e-03,1.416298538995898817e-02,-2.323405140547291862e-03,-6.626400445583913688e-03,2.770374320118384635e-04,5.146668680145008966e-03,6.876277581018805200e-03,2.649882055512127986e-03,3.629496335083827289e-03,-1.810712510870818122e-05,-8.280462990915558527e-03,6.232701703541795347e-03,6.721630220418455329e-03,-1.637518126283189334e-03,1.151213198006744917e-02,-3.381778125793313480e-03,-1.522508228429486372e-03,-3.853482022864838149e-03,-5.668178308434544677e-03,-9.690816788093116474e-03,-5.695535115823160792e-03,-3.619334706130331139e-05,4.924720277718136301e-03,7.332365931879502143e-04,-3.572485908897116234e-03,-6.821143963072737008e-03,9.808742634678127396e-04,1.083710860068442389e-02,5.611300385230106984e-03,5.722105500132205515e-03,-5.046651377696054208e-03,-2.424098417725506060e-03,2.102331621716911278e-03,6.734008602489850205e-03,4.734720699813340511e-03,4.693701145045813677e-03,6.159586207882663872e-04,3.864175259184294761e-03,-4.527302791240435560e-03,-9.332976192726638890e-04,-1.393754265756845653e-03,-3.865983130604205321e-03,4.742923002202921456e-03,-7.041605911633342209e-04,-5.231708531516481714e-03,2.461339222601083721e-03,-4.802650739540685956e-03,1.667914768670448198e-03,-1.742817523674646636e-03,2.330394513420018694e-03,-4.707574429232238254e-03,-3.122436534701200118e-03,3.533976159810463778e-03,-2.983742543437691713e-03,5.304497323999789703e-03,1.154238902215896749e-02,2.495521888943559932e-03,-4.425799057413265340e-03,-1.254325668757619956e-03,-3.418526301537866581e-03,-2.939919061572409103e-03,3.213770301642830955e-04,-3.187338621257429679e-03,7.551125611514854924e-03,1.983810545860714616e-03,-2.929115880399753573e-04,-2.555103455350732789e-03,3.192104401499831073e-03,-2.790196072893980778e-03,4.349405157652639839e-03,6.406346456834409973e-03,7.216122127097891834e-04,3.919780941672472950e-03,3.851711893406275459e-03,2.000457021949797638e-03,7.191134830508001848e-03,6.133956386648523182e-04,3.653680043566791529e-03,-3.460990780813350768e-03,-6.541914352958084870e-03,-7.229188986996246992e-03,1.759820531663906090e-03,2.510955019724258296e-03,3.973005210991519334e-03,-2.260034199550436246e-03,2.987587904465355791e-03,-2.214439089399610882e-03,3.082664853602044234e-03,-4.778756532867996850e-05,-2.536118630677158901e-03,8.472221271406366397e-03,-2.476734054258840009e-03,-1.973062132861634708e-03,2.990847520641219410e-04,-1.524907496627906592e-03,1.634874393095260038e-03,-2.923742116274360373e-03,-5.157093219973088970e-03,4.577496916584499614e-03,-3.500849979331964804e-03,-1.054136329145066242e-03,-1.071147955741626537e-02,-5.652029664649084288e-03,4.447925118539371170e-03,-1.016164512890042676e-03,9.665219546937912779e-04,3.880166809741296147e-03,3.794341816469410430e-03,5.308281674015154000e-04,1.483439757746183533e-03,4.057372358464502934e-03,-1.019617407395120466e-03,2.769488953229999346e-03,-3.554920900113328099e-03,-3.433538399211388207e-03,-3.088787134077119155e-03,1.385847113873475266e-03,-4.868884070670082172e-03,-1.910185712833821275e-03,5.689742525468767693e-03,-2.756876477978269618e-05,2.062363031695693325e-04,-5.490620761712324167e-03,5.660860503570892648e-03,-1.989310435671316941e-03,4.492137446143450522e-03,1.017479146699439008e-03,-6.862724033344009579e-03,-5.264737977975502033e-03,4.386149472895505518e-03,-1.119728676963018801e-03,3.578817050068183736e-03,1.195976918029441544e-04,-4.871414379184250965e-03,-8.505691512153190420e-04,-3.673806541749439960e-03,4.237991745261453252e-05,9.992324027705308878e-03,-1.631007470856556726e-03,-3.207516395724882650e-03,6.705629809623204693e-03,3.214342374484914673e-03,-9.906113075288238562e-03,-2.166000712269054024e-03,6.437825755292355019e-03,-5.247719224003316195e-04,5.602108145846367709e-03,6.304780138194228910e-04,2.574405034757973505e-03,-7.608847380197434913e-04,2.058305735474815455e-03,-3.274346610453136852e-03,3.551456842188049478e-03,5.188274975209222935e-03,1.065006456450449425e-02,-1.242415525779923203e-04,-2.602695425559099166e-03,-1.967010139030715699e-03,-2.578380300418180800e-04,-4.497054824660225852e-03,-2.461587427955637831e-03,8.243806180425338956e-03,-2.045797086516364571e-04,1.463623647723908414e-03,-3.589391201918834453e-03,2.812848339008062679e-03,-6.713857264673312938e-03,-6.546943272338746553e-03,-1.834554883337102208e-03,7.491481882770827279e-03,-2.988989387924507243e-03,6.720256366201968028e-03,-4.943075890713652908e-03,-1.826669891555405406e-03,-1.030260028021047318e-02,-4.467792954412228013e-03,5.310739675228869430e-03,-3.025765134317819375e-04,3.003327046767930754e-03,-1.113082922345982190e-02,3.271893141107361583e-03,3.365303308465249915e-03,6.166002576083129405e-03,-1.015651997997974961e-04,-1.130844960821228559e-03,4.109590354937637646e-04,-9.359455804124099804e-03,-1.162939509060377009e-04,2.099033076064204997e-03,-2.703814569063528635e-03,-1.869114391516135112e-03,-3.345049742159459868e-03,-8.763574301728394055e-05,-7.262645574179414816e-03,-5.046102407834202400e-03,-7.520058321506638624e-03,1.067438823250103398e-02,2.426215025122108182e-03,3.062555845639514367e-03,8.833853893108666792e-05,-1.524076693770770943e-03,-6.079354849635301488e-04,6.527914024055975206e-03,-3.554008392929710464e-03,-4.000306051250095661e-03,-8.362677791998673026e-04,-5.867511653130673193e-04,1.302800871642840162e-03,3.131395709102950979e-04,-3.554803605459970397e-03,8.722034015467419049e-03,-7.996278725553808767e-03,2.098273657286498124e-03,-6.176492618716748415e-03,-1.258781549179133425e-03,-8.258635808984420099e-04,4.089088291608167572e-03,-6.465332051674992260e-04,-1.261967948491382591e-05,6.486079412118265358e-03,6.316862384791121186e-04,-6.288984239824018350e-03,3.010027974228072969e-03,2.885286856050245808e-03,3.096454915264969070e-03,-8.049896569374818955e-03,2.068738016035270855e-03,-2.696131183113235560e-03,-2.223962630652064759e-03,-3.005639689411539629e-03,-5.358387482622021386e-03,4.026759586519134890e-03,7.097186415249862393e-03,-1.161412410934857878e-03,3.490286409501275333e-03,-5.497021356135715041e-03,2.783079939308664638e-03,-5.466058125809548094e-04,6.153695864261904923e-03,1.404691403512510744e-03,-1.402931895909590278e-03,-2.094665924220152561e-03,-7.387725135997660550e-03,4.675986425238295427e-05,6.124344593992062449e-03,1.284293279140524000e-03,-4.074514339039199927e-03,2.027932669988233814e-03,-7.915614117077179185e-03,-3.641314520968011448e-05,-1.093335065398945124e-03,-1.761048793192180424e-03,3.889215095350868922e-04,-9.630677077946072154e-03,1.706961984859229654e-03,-4.620239274100068107e-03,7.438596641216904670e-03,-3.064633669979090636e-03,8.688032888083941502e-03,1.034409069654798703e-05,5.714145793329143165e-03,6.343724373854849226e-03,-2.513351936106905139e-04,5.029406157439181638e-03,-8.302142581107712499e-03,8.532212291445231619e-04,1.510668325453487027e-03,-5.511605124332641555e-03,-3.650687183671650921e-03,8.099673647246178904e-04,5.211415926020467396e-03,-1.254859741548905276e-03,1.518221083605758234e-02,-2.682540889870766668e-03,6.769589522433223754e-03,2.095857888108356836e-03,3.664151513428032768e-03,3.734114193277183070e-03,3.333876601042185038e-04,-8.665447800806358458e-03,6.986038331970355689e-03,-5.845144149606733286e-03,-2.549941574471103079e-03,1.124856737418531219e-03,-3.773278639987722675e-04,2.309027380596554661e-03,2.029774679925088702e-03,-4.725096180914266017e-03,-1.167607848651186627e-03,1.769585811527077477e-03,5.721474432896588198e-03,2.976152121863810847e-03,-2.413057987631414721e-03,5.930963454938126917e-04,-5.045667317374782478e-04,-3.855743861030901543e-03,8.923302545202113634e-03,9.052268898342256953e-03,-1.000105088064162304e-02,1.658333390700909675e-03,9.082005545731901275e-03,6.405554466828375371e-04,-1.201088937086306017e-03,-6.252268381466802652e-03,-7.951914955840008736e-03,-3.342680199067107200e-03,5.239302206217683056e-03,-1.611665635694489425e-03,-3.626080850739468297e-03,7.076001227222632368e-03,-2.598125782654632249e-03,1.059189780341803855e-03,-1.150922722147604271e-03,-5.602035244799708635e-05,1.623037264676986191e-03,-3.218023122428585476e-03,-6.507678556523481192e-03,-1.004144350130445687e-03,7.680280953275833537e-03,2.255360261246631623e-03,1.591132438824375802e-03,6.253768817665110794e-03,-9.770177580105558757e-03,-6.282895731001026133e-04,-7.988596694347861315e-05,5.895928197971547075e-03,-4.369238929596373083e-04,-4.889058291679522494e-03,-2.543862489559668011e-03,2.561338110085389398e-03,-6.236994486244310269e-04,5.625987811165756788e-03,1.276229241227624066e-03,-1.800566939735611782e-03,1.090831026841021381e-03,7.487774334227980354e-03,-2.898773568772473452e-03,-5.751263678608363461e-03,1.873091568047243480e-03,-1.965748585021912659e-03,-4.827855938266596182e-03 9.479198226528374278e-04,1.624371594137407346e-03,3.825462690565430320e-03,1.286657196776531085e-02,-1.226590614084574699e-03,7.258237151155237698e-04,-9.081811349140864187e-03,2.584231065950497001e-03,-4.124506649150936136e-03,-9.243480669968334382e-03,5.887392264056890909e-03,-4.093642191144071692e-03,5.273719963625253873e-03,5.265110193894207043e-03,-9.313165779227498484e-03,-4.681247748879287934e-03,-9.198103462081261283e-03,-1.329674510148705271e-04,6.135899477931375896e-04,-7.719358611767768337e-03,-1.091980874747537286e-02,-7.201027520983224045e-03,3.354659011898789098e-03,-1.565588440162098962e-03,5.450968293134328272e-04,5.581028516599060435e-03,4.449981275259387537e-04,3.180685674111425471e-03,-1.142663957907946552e-02,-5.920034540526412925e-03,4.968057152932321002e-03,-4.624742834068114642e-04,8.040048283830007400e-03,-5.007270110022641468e-03,5.543300589841572423e-03,2.524617194451677250e-03,-2.074171648686828299e-03,-5.195720035327977548e-03,-7.735127074841590018e-04,2.828468636906794880e-03,-1.331133974121389207e-03,3.290866138794026874e-05,1.575448432517652896e-03,5.151708292441018952e-03,-2.086779824149039356e-03,2.841063275202697011e-03,1.013699027696628695e-02,-6.786566789926807945e-03,-3.658517290689666801e-03,1.611551552671672174e-04,3.224460424520152711e-03,7.610945643616971604e-04,1.385672490006967846e-03,1.093074850861040781e-02,3.689516981245023926e-03,2.679060710058277402e-03,-6.804703531083968089e-03,9.367839016935792379e-03,2.696277152139753882e-03,-1.302785033255024483e-03,2.347743137618462967e-04,6.198526895837886720e-03,-1.060221638084094843e-02,-7.753461293912898913e-03,-1.300219308760067020e-03,-3.295325433859174705e-03,-1.167327990959713194e-02,3.621676733856954693e-03,-1.654731885635148765e-03,-4.051812597264916466e-03,-4.988254991272551148e-03,3.804471943136931208e-03,8.172791652968632720e-03,-4.851599855065205115e-03,2.705217780412536022e-03,4.449891436988723942e-03,3.776007423647312605e-03,2.263433841281928260e-03,4.046544278206331245e-03,1.229431567131092319e-03,3.430627202759565109e-03,6.235912270662078392e-03,5.930583185290462349e-03,-3.303366531183685867e-03,-6.542478786121958337e-03,1.226868689462508669e-03,-7.002240902594059242e-03,2.379169668065307924e-03,3.414206697012615518e-03,2.646588838934940680e-03,3.933669476301698484e-03,6.890177806524396681e-03,-8.549037519369090052e-03,-4.957315424611918007e-03,2.562881286793897151e-03,6.919605963299538774e-03,1.244433240468178006e-03,5.905616622177373241e-04,-1.700200926865630489e-03,4.760007194852818035e-03,3.168561786189611103e-03,-8.381043612099436805e-04,8.697169354136244021e-03,4.222050344391901910e-03,1.962811517293877508e-03,-4.928289096787503987e-03,-9.546939602162815217e-03,-1.599301650569426260e-03,-2.117655728247147887e-03,-1.719798775338037076e-03,-5.579961670629124751e-03,1.260830165741480026e-02,-5.751952136327677509e-04,2.435964469535937139e-03,-1.773946896513442246e-03,5.966865086774210229e-03,3.062276114801325191e-03,6.807056206888937103e-04,6.613999364637433709e-03,3.663927350887045964e-03,2.132267909315802533e-03,2.001612945206445934e-03,4.954256063618379426e-03,-2.028812188241297613e-03,1.456733639074171823e-03,-3.725719449575460359e-03,5.152689526373405160e-03,-6.788720174483267936e-03,1.329163641176536303e-03,-8.253349337286400300e-03,6.568117314640131493e-04,2.103641843183259603e-03,-3.225156104356640373e-03,1.531765593808407213e-03,-2.431137828807683390e-03,-5.515171805560590591e-03,4.863045536967579643e-03,1.681874745421652658e-03,-1.913602392346123180e-03,-8.649448868669194421e-03,-2.068773042912943911e-03,1.326560507498781346e-03,3.641559342312492313e-04,-2.013788917427154941e-03,3.110855337327794647e-03,-1.131935795718221061e-03,-2.571802520312624243e-04,-4.061406402815216990e-03,6.912983297795343221e-04,-2.087603051635763748e-03,3.274807649230247167e-04,1.334489379417304550e-03,-1.969553947933404704e-03,-3.363907765390036688e-03,-8.252050728755889325e-03,1.093769695676347943e-02,-4.710004447546819323e-03,-9.555619306272893770e-04,-6.642233935295457827e-04,-2.225178733114951523e-04,4.996271691874046815e-03,-5.912494236715191628e-03,-2.908427417062492502e-03,7.061679507814653351e-04,-7.608143328046970168e-03,8.742931038906642457e-03,5.792176681965263349e-03,-1.464444440627784335e-03,-3.933525242313011018e-03,1.067254842515650897e-03,-4.262495665633694798e-04,-2.030834573548432730e-03,-1.931137147298455422e-03,8.996166245101957712e-03,3.990318872667918257e-04,-8.198861752351099819e-03,8.245773679535842171e-03,1.069294565637800417e-03,-8.355938680628179505e-04,-1.066141471347717591e-02,2.317311856129540808e-03,-1.031819919336395518e-02,6.614504907129257344e-03,2.233451511017043364e-03,1.656887918427595377e-04,-8.217870229889800288e-03,9.035444588956795340e-04,6.970490486638602004e-04,2.011704296637308043e-03,6.501600643611920444e-03,4.866388213202484450e-03,-2.926109047549553359e-03,-3.719928207264190569e-03,-7.867025176895530922e-03,6.122239727728760739e-03,-6.314830935690836020e-03,2.191077657406638717e-03,6.164201282421958444e-04,9.117511510655788073e-04,-8.960594718310247115e-04,-7.164985975279060436e-04,-3.984952930087135720e-03,3.660907163570195286e-03,3.097972567768142232e-04,7.195094878291501128e-03,9.698594364159646500e-03,-5.579086464023579571e-03,8.900834811288204848e-03,-6.010953344781152716e-03,-1.296038205968052040e-02,4.011943322047442119e-03,-2.979078515987380882e-03,-5.444124251370763452e-03,2.364796488581405107e-03,3.844710148702071371e-03,-1.745206186032225012e-03,-2.056796417302035867e-03,2.715280878468501972e-03,-3.522086886809420395e-03,5.586912530018806923e-03,5.468623648190702215e-03,-1.063830367935057839e-03,-6.104029587883981478e-03,1.589001249845266263e-03,-4.432150659182009476e-03,-4.273838912803716583e-03,3.768861160676447927e-03,2.129482370094688218e-03,-9.148964972537117560e-03,-1.993397126238619869e-03,3.230250865802213392e-03,1.251064855831987045e-02,1.632231875969528080e-03,-7.260128411490440921e-03,-2.061724170238280959e-03,-5.523252441008603096e-03,1.430560304567307130e-03,7.647223192918967995e-03,-8.174372136104600091e-04,1.993719114847189184e-04,3.993089097208943165e-03,-6.851732949179836427e-03,2.711692154271302144e-04,-5.257708223687063490e-03,-3.565539277543075818e-04,1.099826548154849565e-02,-2.952862200737147932e-03,-5.046004833253282329e-04,-3.090181902996466667e-03,6.560493591927866672e-03,-4.158992383081166859e-03,-5.299272914462907361e-04,1.774829505057358252e-03,9.805193036179186625e-03,1.526485389932351116e-03,5.764184861090981826e-03,2.923857071560586858e-04,9.011152485644749596e-04,4.664774058656024486e-03,9.373935549992897052e-04,3.354091607252096714e-03,1.025012107600508510e-02,-1.185145507279564787e-03,2.737520057421646011e-04,5.725175174311261488e-03,4.279498749099559796e-03,5.781366906758807385e-03,3.911058276280149175e-03,1.244220168590789641e-03,-1.701058786356576293e-03,-1.345539423002874461e-03,-6.681078200558965285e-03,-5.577414479290518671e-04,-3.252866711343262163e-03,-2.392414331972631501e-03,1.746045554941590779e-04,2.219062634219541846e-03,-2.893861289493288459e-03,-6.958439050071377023e-03,-1.396097760539617184e-03,-5.960433476938716175e-03,-3.791812892586911808e-03,1.296557395335728828e-02,4.477228601890991672e-03,4.500879881708091251e-03,-1.107815225776014533e-04,-2.678888405162437383e-03,-3.859731210760450359e-04,8.503714817718981394e-04,-4.053335571020158165e-03,-3.083361761526926762e-03,-6.036753181365634921e-03,3.426581560893832840e-03,-2.215821529937789757e-04,-2.932160803092597693e-03,8.153204335235663498e-04,-5.267608727455550671e-03,-2.925017801995313067e-04,-5.290171623556516947e-03,2.948231992341585148e-03,-5.910181574522630046e-05,3.007306284142650769e-03,1.668582117623100547e-03,1.318044673674379072e-03,7.065358376440875532e-04,-3.971513346685516532e-04,1.084188532812575996e-03,2.874591600794065231e-03,3.779638344773970319e-03,-7.389236042601975314e-03,1.948882960835442548e-03,-5.753101444284395780e-04,4.682808338149163294e-03,1.997343572002550308e-03,3.559726299573859245e-03,-5.029998522590644304e-03,7.654042102712637664e-03,-1.091682629217064979e-02,5.848417835301457195e-03,-8.157117161707323066e-04,1.149405352954176521e-02,8.224591058249977604e-03,-6.137027926245713786e-04,1.005467300215317356e-02,-8.972392593504215433e-03,6.576669401809266680e-03,-6.010696201807428675e-03,-6.124502527854361984e-04,-1.633874850740321075e-03,-4.509528858390081732e-03,6.274589377022800904e-04,-4.028883891592514692e-03,-4.461764154179557367e-04,1.283030608354115658e-05,1.012967326807205116e-02,-1.751113155858900977e-03,-2.024165620375846259e-03,-6.060602105666525879e-03,1.027322901432988281e-02,1.826688490545647862e-03,8.129148068299441726e-03,-7.287345469480192330e-03,-1.315598672250672084e-03,-4.170062336370869080e-03,4.332881515547334331e-04,5.012206826631386558e-03,-4.172902246428961473e-03,-1.051932608018732278e-02,2.690129275070425510e-03,-2.767288458604720731e-03,-4.532569882592324927e-03,-7.486077863669397120e-03,-7.530589945985893862e-04,-5.794111380078275503e-04,-2.249900913432001902e-03,-7.008169023245329129e-03,-5.317298914926335031e-03,1.380469881967791605e-03,2.540898009861410501e-03,9.563597797595243394e-03,3.484302999239127958e-03,-2.785376022841435338e-03,1.101794366029529293e-02,-5.746337468504940371e-03,-7.561253552729256013e-03,-1.096125300359892044e-02,-6.391724570644158332e-03,-2.160948441758544722e-04,2.674984360320529966e-03,3.985460621469058204e-03,3.277797685768035936e-03,-7.433632046936001006e-03,-1.698009500113681477e-03,-2.801713058726903929e-03,5.815115462034899964e-03,-1.165075083314943060e-03,7.364408670889405683e-04,7.661767527448950718e-03,1.195938047103696500e-02,-7.805984252191619337e-04,2.144932739916134874e-03,6.326578337985839205e-03,-8.653546662954063632e-04,3.768936392176266362e-04,5.528909277996773506e-05,-3.452323553720927380e-03,7.798032526776244033e-04,6.888743356064395973e-03,-2.739440475724150281e-03,-2.562550250786625652e-03,-2.664636460292497868e-03,5.629674247299805943e-03,4.350979832026380563e-03,-3.486240184907945985e-03,8.103920226015233139e-03,-1.571205775197691409e-03,-7.019556063567811317e-03,4.906473309670421855e-03,2.064377195323445838e-03,-4.308074425340254615e-03 -3.095699758121144788e-03,-5.123246837058626976e-03,1.607418109665629169e-04,-3.603808800730671450e-03,-5.997489458395226236e-03,6.174634943200491477e-04,-1.263992733537778156e-02,1.665484086841497813e-03,-8.344896609371812643e-03,1.923654175025086720e-04,-1.081808904610401766e-02,6.553644705621356446e-03,-3.890046835611560717e-03,3.340031626691112340e-04,-7.100498943630131725e-03,5.978426010812228536e-05,1.126680776233268677e-02,-6.507598572382041792e-03,-1.073999500560151969e-02,-7.937435403170268718e-04,-6.676699389049500647e-04,-2.970796267008944842e-03,6.079108513710809736e-03,4.877147930146893615e-03,1.511341652003440851e-03,1.142619314916258448e-02,1.578213010712903623e-03,-8.446048967332610985e-03,-3.671997863467275306e-03,-2.932024444781351270e-04,7.135487085083153233e-04,-8.359718723418294375e-05,2.160925182293365037e-03,2.845309885749802790e-03,-3.163180231962496765e-03,4.937393422118035828e-03,-2.456961728574427479e-03,-7.247379419878336934e-03,-3.917382721128064016e-03,-1.558190279102337475e-03,4.636389272621656461e-03,9.414387154889729270e-04,-1.775756498952961492e-03,-1.125324740741090674e-03,1.565831875261474923e-02,-8.561997841863722841e-03,2.363503526052886019e-03,1.332404333434656762e-03,-1.985081636075443703e-03,-3.824529857387268510e-03,-5.418668124443384165e-03,7.686138789980255168e-03,-1.215523501418837261e-03,4.357791100151998422e-03,-2.387999973848928172e-03,-1.028950788935448571e-02,9.038085854071593568e-04,-1.643763099033516105e-03,7.389544441393070158e-03,-5.354422469108206448e-03,2.471795488319300949e-03,-3.095171785804491323e-04,2.572194864883365750e-03,1.641018006271634851e-03,7.389672694690009418e-03,1.561982515717646113e-04,-3.262748777911423422e-03,3.920949693155810489e-03,8.242167745533900136e-03,8.435158915632667669e-04,-3.433878750299054674e-03,1.253201081715191526e-02,6.067204371595005287e-03,8.432084156382392342e-04,2.156189681020950115e-04,5.308948512504340364e-04,-6.686246164008982466e-03,-6.015634963788642382e-03,-5.786725090310749706e-03,-5.010782818052917156e-03,5.427791635786191442e-03,2.591401426122320276e-03,-2.386586820525991772e-03,1.288753910113225247e-04,-7.964017663761986882e-04,-1.048647312445816389e-03,5.430051244272386148e-03,-3.387416587206534130e-03,-2.972217812719500293e-03,-3.496530552843557689e-03,-9.363561069642603596e-03,2.888270074741001222e-03,4.663209117442298698e-03,-1.236708143799492053e-04,1.812356432868351399e-03,7.070651237809253906e-03,-2.063797679678513151e-03,1.038762206206414289e-03,-2.964548392940162189e-03,-9.618470213808819232e-04,2.071475860316806649e-03,5.061117815344605593e-03,4.431348230226784569e-03,7.936987847252763989e-03,-8.856373170065885703e-03,-5.268799919447864424e-03,2.959776181793707592e-03,1.369964865994898178e-03,-3.559015888498191695e-03,8.187894513320225984e-03,-1.442228582017477225e-03,-9.103422494923969766e-03,2.507949186028941586e-03,-1.232565253080218844e-05,-4.437942514912014344e-03,-7.534449438256376856e-03,-6.528779934984557838e-03,1.005003931181000162e-02,5.936277257922018165e-03,8.178882880495834395e-03,2.906106757811289729e-03,7.419723636188518996e-03,-6.604258741138192704e-04,-6.191628368485143676e-03,-7.828302600983773687e-03,-9.544859954576143763e-04,2.160219945309534324e-03,3.568981712756508478e-03,8.296929033959267452e-03,-1.184113130766258765e-02,2.806642284390467156e-03,-1.636534975632854272e-03,-5.967068358873086481e-03,6.688050097545259268e-03,-7.753106497095303348e-03,-2.663988057088754677e-03,2.481163478517298040e-03,-4.745114742785514449e-03,-1.494025722621714915e-04,2.600342374811134231e-03,3.269719338948171537e-03,-7.995610401409551649e-03,-8.871865759385271291e-03,5.338298537655090589e-03,-8.712406418995282276e-04,-2.849200285462303184e-03,-3.756055845142802860e-03,-6.113377589422780249e-03,4.193460517321519372e-03,-5.442233250685584584e-03,-1.306524469815540691e-02,2.277711003011997307e-03,2.395432408999094865e-03,2.862769441881171661e-03,6.044164835773890507e-04,1.104166171849624463e-02,6.298044658737522901e-03,-5.232036123483659734e-03,-4.213280882984233276e-03,1.288250136584279592e-04,-2.632069704301946960e-03,3.437651236366183471e-03,-3.102484870459553800e-03,5.546723448187619761e-03,-3.556025496289271816e-03,-3.086477852050337201e-03,4.002610752032269986e-03,-8.953048945242932447e-03,-6.259027295950946382e-03,-5.381420205518881034e-03,1.434394168970008147e-04,5.882968586299052415e-03,-5.973361736531417401e-03,1.474929919484412695e-03,-2.984428839759500560e-03,-2.196524625882823479e-03,6.958129610636414010e-04,3.095507528591806556e-03,-1.404776030797982378e-03,1.161980663079064486e-03,2.693508966611090380e-04,4.357133432695686585e-03,3.217255722045542742e-03,4.986564548491421306e-03,-6.891387327564620080e-03,4.124033430137008516e-03,-1.658225312984892277e-03,-3.436535137661872897e-03,4.124321732344094133e-04,-5.944526607645105563e-03,8.286464247536255712e-03,-6.848493216883330507e-03,1.047667026036120698e-03,2.133661205292724464e-03,-4.933567860030280312e-03,-5.515301455510660672e-03,-3.251768986635832862e-03,1.282492089879797926e-03,2.668575001327032088e-03,-6.660537825276261620e-03,4.887189347627413950e-03,4.668870710116391999e-04,3.292947362956189471e-03,3.212659504847363220e-03,-4.667260427054018349e-03,1.977715502034072415e-03,4.087192637345705634e-03,-2.787817003433968981e-03,9.353302760850811710e-07,-1.868540098771315043e-03,1.490277229037941529e-03,-6.788318123937778659e-04,6.674970301565780716e-03,-7.534339235670992410e-03,5.526228947250574203e-03,1.009803962557023066e-02,-7.852209329763272863e-03,1.687529103740184958e-03,-2.894368515468213280e-03,-3.484225709605034103e-03,2.623480056214105394e-04,-1.490924465463251735e-03,2.640483478970145990e-03,3.058414366864081833e-03,6.270603573192333469e-03,6.551064683563968989e-03,3.726130938086901802e-03,1.720875030758664949e-04,1.488057388330489821e-03,-7.193443284201785255e-03,8.572549136639611572e-03,-7.322117277738632822e-04,3.501931333738835406e-03,-2.957152205124667036e-03,-2.197558071281093504e-03,4.032993877629736153e-04,1.599538538183901482e-03,1.426372466335519649e-03,4.020429308236898522e-04,9.132879829170739011e-03,-1.485124708378855133e-03,1.722304341736437781e-03,7.760578622602539584e-03,-1.589526295682559777e-03,-5.790099342986077847e-03,5.171013921249615133e-03,-1.758059687730734308e-03,2.940248221827871516e-03,-4.215921302941531097e-04,-1.956266123296588558e-03,1.039093657852469595e-03,-8.478662820774116732e-03,1.251893872904979305e-02,3.277189840847524217e-03,1.568220057686692389e-03,2.692296664533232100e-03,-8.247300161485655520e-03,-1.368791017981139642e-03,-5.512216663187923318e-03,-4.794620615035698891e-03,4.377525200432990792e-04,-1.228602058685178931e-02,-4.768400658201870507e-03,-2.737120958035913192e-03,-7.954981488317907018e-04,-5.250913330885145086e-03,5.343638999147855111e-03,3.107517151900259476e-03,7.707859202543887109e-04,-2.535206398533523808e-03,-3.408868681294460854e-04,-4.375836656086286997e-03,5.569839250761674486e-03,-6.170895922118094581e-04,-6.007403374658013516e-03,-6.004955552019056522e-03,2.153834687443718140e-03,-2.746576222838374519e-04,-1.918100515716178586e-03,-3.686306934668817908e-03,7.574038396693801023e-03,-1.312391101707133809e-03,3.982524007104660049e-03,1.906726525669247795e-03,-5.984298756127852045e-03,3.037608807369646200e-03,-2.468092938582452500e-03,1.842283318064197274e-03,1.595942085203905280e-03,1.263944228000368720e-03,-3.252618204413694394e-04,-6.440032517639478855e-04,1.470749325368355667e-03,-7.514564679992117788e-03,2.322142217051465039e-03,3.353075405667464096e-03,5.318154958123398612e-03,-5.338165859473232135e-04,-7.064284446698494702e-03,-3.412780745568517321e-03,4.607500343098267295e-03,7.739044515643968107e-03,7.892299475044632293e-03,-2.291797060128027922e-03,8.615185320228358618e-03,1.685426816376996238e-03,3.534825966018803806e-04,-3.413340165564334328e-03,-4.275648872798979740e-03,-8.363641791326582170e-03,-7.264419045520274440e-03,6.291241900974409379e-04,-2.370546978576800606e-03,-4.834291522955497591e-03,-8.971649488869360301e-04,4.433085376067647312e-03,1.827232830588115245e-03,-1.479624890497913819e-03,-1.631415396276211056e-03,-2.035858301842212014e-03,8.361591074260908002e-03,3.659157704219719552e-03,-5.624741318389244599e-03,2.995183950083890904e-04,-7.963440021520393555e-03,-9.883017774478144828e-04,-1.933950574998593696e-03,-3.229123864138865908e-03,-3.492934354892939311e-03,-3.403258002989099570e-03,5.894198804110617300e-03,1.034118788249473869e-02,-1.019035966851094144e-03,8.786613131965952464e-03,-2.796277335216839320e-03,9.422455234880268817e-04,-5.304159699721601971e-03,-5.651879704332508393e-03,-1.008038417874960492e-03,1.682318624138100932e-03,-3.304454256004459890e-03,3.897009399802749390e-03,2.429763444404234481e-03,-4.108053697326620023e-04,9.098505746416539475e-03,-2.395131285874002851e-03,-4.575674846952717981e-04,-7.196466549647340116e-03,5.773390840290583415e-03,2.338507095406754739e-04,4.270394741354804208e-03,-1.237346619074793878e-03,6.180327441781070276e-03,-9.952327755582362698e-03,-5.203180718136880649e-03,-1.997020460886695954e-03,-6.387065536061567461e-03,1.705530950326639454e-03,4.233916708207386727e-03,-3.403811276986670655e-03,4.791019724450871890e-04,1.678871148272433576e-03,-4.619434876383913743e-03,-7.228865593838570448e-03,1.075254878145617474e-02,2.512147524479173798e-03,-3.488556831420408077e-03,9.285135135233825790e-03,2.328488332985165134e-03,3.450180293316466775e-03,1.042859655974914639e-02,-2.715373082130354693e-03,-4.880090543029567615e-03,-1.757013418478854325e-02,1.070255469311725757e-03,6.747277491762797459e-03,-3.115025931639549251e-04,2.808292554105410420e-03,-2.686341802229450914e-03,-4.179360019690777674e-03,2.464337901442552586e-03,-4.472853929861419359e-03,-3.649625814541034973e-03,5.038988013372098888e-03,3.141522948738404569e-04,-7.406694322536965143e-03,5.496398828489606342e-03,-5.635414994619451085e-03,-2.653815706562080018e-03,4.622665163600549534e-03,-2.129496186051103163e-03,5.882695650762879633e-03,2.882668812002677893e-03,7.701732590705397119e-03,-2.312043748358256177e-03,-1.124089026517889004e-03,1.261652654833843139e-02,-2.619737443431296477e-03,1.119274701483939909e-03,5.314074252679736737e-03 2.643445452815701077e-03,-5.594934965237297400e-03,4.311890849621439689e-03,-5.151824872949191392e-03,4.191304109888114096e-03,-4.088141923721615688e-03,1.295592883620230729e-03,-5.698010853030198442e-03,1.073333423357323950e-02,-2.891047247704662659e-03,4.195040764901009425e-03,-2.120192576196896529e-03,5.467320459863570523e-04,2.433652484183744871e-03,7.590092623764336729e-04,-1.097174615936722682e-04,-5.730529909609326710e-04,4.907440537744989055e-03,4.132015874606588708e-03,-1.725224199994785084e-03,-2.938279081522560208e-03,5.454499153612620693e-03,-3.537391857884934071e-03,-2.596147866167775996e-03,-5.320095521397407400e-03,3.909118550243953320e-03,2.328326031834263878e-03,3.865757190199844637e-03,-8.245158135694004905e-04,-9.725898679102873076e-03,4.539972401706317273e-03,-1.603025555399909116e-03,8.007024296832535287e-04,1.910595432115600855e-03,-1.076118860998506038e-02,4.161144717096666203e-03,-2.662591774311634085e-03,-2.444047149794261603e-03,2.185350815626788731e-03,-1.184656155340207961e-02,3.199880833403197580e-03,-6.879016018892887205e-04,-5.390762940555266815e-03,6.502979250113241410e-04,-4.717255770039175712e-03,-5.411850227972700780e-03,3.314175167792946138e-03,-1.569928746959033637e-03,-9.509940387796827349e-04,-9.530545793441806690e-03,-4.032218466897877739e-03,-2.168989222958771039e-04,6.967426949819009990e-04,1.240728523824288006e-04,-8.386390663824751105e-04,-1.249908391940868745e-03,1.762689810584302380e-02,-3.107150368937630809e-03,2.656597145369599729e-03,6.002086721182151551e-03,-7.026915139107152777e-03,-1.847582385342266082e-03,-8.378921615332310391e-03,-8.254495582362283979e-03,1.211577623770273421e-02,-1.311115659491588793e-03,-8.463824606247257257e-03,-5.572460429516509816e-04,2.711476154129980904e-03,5.808231940677904132e-03,1.593801804914560293e-03,2.030888842875496619e-03,6.630371965031230195e-04,-6.063803621159061675e-04,-3.018508798353789369e-03,-5.576110612902057673e-03,1.004566293524240643e-03,6.857176578492055891e-03,3.987322188979741861e-04,2.726728253442555882e-03,-4.102417039760264221e-03,3.293782354542224337e-04,1.739697707529075594e-03,9.044509042474309715e-03,2.277193949844668469e-03,3.032421885991947331e-03,8.023217372560962138e-03,-1.981945784395903413e-03,-8.733025057295541738e-03,-3.406670799204725391e-04,2.966262543482055258e-03,-9.896293634652469917e-03,-1.676021020933327315e-03,-3.329980455336093662e-03,2.085496106549545280e-03,6.603443148283471946e-03,9.770027691859559799e-03,-8.447719995146242555e-03,7.274075486209774337e-03,5.102858371050076128e-03,-5.170767130691501616e-03,4.816032260717913877e-03,6.226411733854175182e-04,-2.489536945113709175e-03,-6.711487025965864796e-04,5.826537120002547518e-03,-5.625813066524640955e-04,-2.024587221864405321e-03,1.189541024984836371e-03,8.450078940215342621e-04,-5.595002217164445242e-03,-5.919823123481491797e-03,-1.122916391939739883e-02,-5.427010160704734366e-04,-6.776269131256030913e-03,-5.718877235169101021e-03,8.495247504582548528e-03,-9.461231347806275710e-05,1.176981881520187045e-03,6.346225414515133416e-04,-5.599717137937499679e-03,6.061945298034132177e-06,-8.494040523500559318e-04,6.280619296553219787e-03,1.588211754532546649e-03,-5.150975689706725237e-03,2.696114319536919383e-03,-5.989663491458368030e-03,7.897614872909219583e-03,3.893543637931782229e-03,5.080011425572128481e-03,-5.296556173340185962e-03,2.865385049144584476e-03,-3.835974958479386378e-03,-1.651475796255631430e-03,1.285552751995400330e-03,3.637493542310856701e-03,4.327415174099274314e-03,3.130945600656177569e-03,-1.169462740933137563e-03,4.674435860500928547e-03,-4.853328924548647345e-03,1.685966190432861217e-03,-1.416192877129334371e-03,-1.917347093494561937e-03,2.144713514001188748e-03,-3.178289747260087516e-03,1.616113154467538554e-03,-4.982448140474501387e-03,-7.973284189024626434e-03,-2.611919291849982422e-03,5.756378271646416661e-03,-3.982820760158819334e-03,1.349045628253711313e-02,7.294133482869349927e-03,2.056882928874281639e-03,8.243324330254455518e-03,3.771414717753439859e-03,2.662631397205001901e-03,3.612345243863921557e-04,4.526489732003120305e-03,3.136791058617279318e-03,-5.093838443201764623e-04,7.885335422927890886e-03,4.005931490079552362e-03,-3.515306646385984394e-03,-5.147310660939351419e-03,2.029644046731240558e-03,3.632391763844860632e-03,-3.671266569818046333e-03,5.437509732557826124e-03,-5.805173119408611748e-03,3.865325711487311959e-03,1.068908133132052962e-02,-5.109037429116958413e-03,2.893609477803630540e-03,-2.014292038241104719e-03,3.898454402427942080e-03,7.119868141423988522e-03,4.930818984147612369e-03,3.462266028333543279e-03,-2.139177717718120531e-03,2.043882696397177241e-03,6.934691033919662640e-04,-6.669017116606910066e-03,2.599098869260320181e-03,-1.266521756765059742e-03,3.335546894939494470e-03,-5.847025639918516232e-03,-1.216898987369001992e-03,-2.715807891769389006e-03,1.423607531185528416e-03,1.106653055229100784e-03,2.781366362762109374e-03,-3.439590761825983990e-03,-2.014275865902714815e-03,1.846309041792048146e-04,-2.992877143085551344e-03,-5.210778224268785078e-03,1.628686123516324405e-04,2.161360390421413272e-03,-9.356424112204039800e-03,-1.300089421125978206e-03,4.060306715808813770e-03,-1.790931305209128317e-03,1.106767469645782848e-02,8.927045341614122179e-03,7.062311148727816457e-03,-1.057049597578709962e-02,8.654398176700546768e-04,1.987861362380802070e-03,1.709035314528099337e-03,5.299073361689148748e-03,-3.055714781093597093e-03,-3.023604986851763550e-03,-1.116320011185891609e-02,4.507888145821541299e-03,8.202211615725067337e-03,-7.945766664239867910e-03,2.945883828670424649e-03,-1.610309991972379111e-03,2.884490036399809353e-03,-3.417100281278999396e-03,-1.131803139502521099e-03,8.205298982066485183e-03,-7.021662085033111324e-03,-3.147490804952703634e-03,-7.172757501733410986e-03,-1.047647532753546648e-02,-8.026128036990393816e-03,-5.932857537936189231e-03,8.462762899548974130e-05,-4.186792475735874656e-05,-8.589448973108043325e-03,5.645943140910207603e-03,7.681126786346228509e-03,-4.247837447798784240e-04,-6.703673710475246614e-03,2.365248859834500825e-03,-6.746359934916241162e-03,6.027654924544335749e-04,2.851770274567452974e-03,-5.101355963426322786e-04,-4.773642006564961932e-03,9.730448652795152184e-03,6.994845206295932996e-03,-8.861788721141145714e-03,5.874545511622612445e-03,5.907465246898360028e-03,-5.567184388440829176e-03,-3.179092979290619198e-03,-1.061937527893372715e-02,1.009262260670354752e-02,-7.003735379628667559e-04,8.097113746442270591e-04,-2.791645472894895268e-03,8.775321984559417490e-03,-7.625203228985182914e-03,2.054468753290466729e-03,-4.743854783490006853e-03,1.884474043376418257e-04,2.932238821672651890e-04,-4.201845253921616431e-03,2.962643820531095807e-03,1.780622924176580900e-03,-3.424148278148784075e-03,-1.454220655738260341e-03,-2.648736258156602057e-03,-1.002687621689037859e-03,-6.961019404246095730e-04,9.774412006128342309e-03,-2.927530207964778447e-03,-4.675286422933648566e-04,-4.377258776418039427e-03,2.711579419574739955e-03,1.531842655575720719e-03,-2.842426473067979215e-03,1.087885309185017413e-02,-7.891896388514771377e-03,-4.590894362344267315e-03,-1.267095469529164707e-02,-4.017497702794850176e-03,-8.117100023920995749e-03,-1.359451283296024895e-03,-1.048730203980208046e-03,1.065801785933408533e-03,-4.988622063217867421e-03,-2.607310302795482859e-04,9.258289176964569793e-04,6.407063995608582724e-03,-7.265838383022803736e-03,-1.986147882546062012e-03,-1.216864172799679740e-03,1.402839993668207955e-03,-5.896611923471684925e-03,-2.304238712212848279e-03,-2.346941611666638304e-03,2.803620503807691469e-04,1.679466150793745664e-03,9.924579899118002920e-04,-6.923186105101801783e-05,3.087792463411059320e-03,-4.643474178386253551e-03,2.012379981892782263e-03,8.729243352646825181e-04,-1.183649455975006003e-02,-4.076722191425810604e-03,1.057542394512861039e-03,-6.438341499199559280e-04,6.412958590754819048e-03,-3.203440995266479581e-03,-4.966931617281667571e-03,2.972190687832914312e-03,-3.417169348059400594e-03,-2.672437270645613289e-03,8.438261265293685309e-03,5.819240012208548455e-04,9.094504552610493606e-03,9.373286119040145567e-03,4.951793852978375772e-03,1.329829496985494028e-03,-6.474085907115959687e-03,-7.209866156499947817e-05,7.047641917654662649e-03,1.733600359651980144e-03,-1.987362847205638005e-03,2.338595533670224502e-03,-3.516951503988854162e-04,6.866033791322286896e-03,-2.739908101959826099e-03,-9.213272749421930544e-03,3.162605185960118588e-03,-4.218825495759413930e-04,8.323499992935626344e-04,6.331178568138528216e-03,1.872010342337525638e-03,7.651678555907144401e-03,1.083608140980301228e-03,4.388204358141367840e-03,-8.360435682027754847e-03,1.838483081550850405e-03,-2.029448010958607388e-03,1.326790487656104951e-03,1.097996460198752237e-02,-1.628448083197710011e-03,-3.397208299461210255e-03,-1.470926006504553411e-03,-1.291410084071641308e-05,-3.433476514092786112e-03,-3.439006177147745785e-04,1.055935710590530895e-02,-3.363010254226566492e-03,-2.450466754782663122e-03,-2.388916408767083593e-03,2.763003859087077504e-03,6.558883583649183398e-03,-3.423384985633008606e-03,5.290067966520517993e-03,-2.397086336149072648e-03,1.027670049755597488e-03,2.633313408489279701e-04,-5.510580723541618278e-03,7.815308421013359569e-03,-1.971030391964779128e-03,-3.969955596875362341e-03,-2.669690578247645683e-03,-8.142243949249288945e-03,-2.833489309118399514e-03,3.611125340603219425e-03,-6.003029370979136303e-03,-1.156627429533195126e-02,3.653220647829806718e-03,-6.513597693016117025e-03,-1.568554934257959622e-03,4.256535589813716190e-04,1.287654097651370205e-02,3.908171661839023037e-03,-7.188528159101849769e-04,-3.703731379579096490e-03,-2.127982815273708403e-03,3.688336133447679514e-03,-4.086939312652863171e-03,-4.284750124322695732e-03,-2.216893050581189139e-03,1.113563514753978541e-04,1.828397612125681737e-04,4.499005004346684077e-04,-5.196363609499228715e-03,-1.201012907032039320e-02,8.053854130049682009e-03,2.541593210680269214e-03,2.587476617920016382e-03,2.194088921652928310e-03,6.884087316691286582e-03,1.264591256829659992e-03,1.987371451185047152e-03,4.001800390456570523e-03,5.500137061909152550e-05,1.499762452762793373e-03,-4.038237142957048185e-03 -1.853450663018008744e-03,6.568047516776001366e-04,2.968551639736193536e-03,2.799562511503565429e-03,3.380057744181940143e-04,4.960482147131993653e-03,-7.455625677234027117e-03,-3.334860559694335404e-03,-2.998983307813237777e-03,5.937460708308884942e-03,-2.866862698631429587e-03,-5.688331431615515493e-03,-2.369730158884301129e-03,-1.020822489097726951e-02,5.515873290629328893e-03,2.200464210946239275e-03,-5.012948375650815285e-04,5.837522222992239627e-03,4.739406652972366976e-03,9.035905888083572285e-03,-5.133890255663698006e-03,3.272860279969863530e-03,-9.879086560006357723e-03,-1.088081874008318050e-03,-4.196929096890606933e-03,4.259000038041845480e-03,-6.631712528315208965e-03,-4.925639058278244312e-04,-1.423731896536296199e-02,-2.033069637635998017e-03,-4.076564943302136458e-03,-3.790393406946594560e-05,3.406486943341928246e-03,-1.072561426596015111e-03,5.576194381746740585e-03,7.589380101561347611e-03,6.333804370769497739e-03,1.284258827357211748e-03,-2.728289060499249335e-03,-6.996668957419828472e-03,-5.147462162181948644e-03,5.500642674181166654e-03,-8.139225710950497407e-04,-2.777133826699337230e-03,3.675631148409433998e-03,-1.663243038503599627e-03,2.138220180389936199e-03,-3.985897710954176137e-03,8.209622736442868246e-03,-5.324401469526957552e-03,-3.564011058677582066e-03,-1.115258917282578284e-03,-2.868356862858531010e-03,-2.716034669063277420e-03,-3.495199414316452289e-03,-2.550327260896574980e-03,4.997935012639619611e-03,1.208739490215198420e-03,-7.030098050046579110e-03,-1.021159064224936956e-02,2.161163749850022152e-03,7.228679304375960045e-06,2.903410206162442843e-04,7.378546494929726259e-03,3.984607654598825051e-03,-1.056368922380836357e-02,-4.105992769576287393e-03,-5.918249444956735078e-03,3.830045242558902888e-03,1.561675383831750454e-03,4.507565583370351861e-03,7.100040023196549019e-03,6.178251249601527870e-03,-6.053088357030636480e-04,2.783681326649493568e-03,-2.758856064589060297e-03,-2.124362021263806119e-03,1.158098244937371204e-04,2.389075565864607394e-03,1.552734958301624311e-03,2.346177189326249194e-03,-5.211582870266756422e-03,-2.269264538830508427e-03,-5.236473372295828016e-03,-3.598010140963055908e-03,-1.791000016814368531e-03,2.244371253810271624e-03,9.086303729278917204e-03,2.324803053200596124e-03,-8.051049646010943750e-03,-1.704081929889870268e-03,-6.169888748778093457e-03,4.817583857496830768e-03,-6.823950439771080297e-03,-1.200906028315589956e-03,1.081534112396527296e-03,3.965769765432625334e-03,-2.544104399384161533e-04,-3.753118225064445979e-03,9.499020230163928227e-03,-5.583098450867146883e-04,-1.284630170240743741e-03,-2.412865697186869149e-03,1.058582582514561686e-02,-3.615981326537110168e-03,1.435643545981606399e-03,-1.108767890725375271e-03,3.189217674152674056e-03,-1.021305796192418982e-03,-1.419563940509817150e-03,-3.525739620463069334e-03,-2.844711660197293303e-03,5.733620290153210522e-03,8.026822098086135865e-03,-1.078273143620551121e-02,1.122943556550109762e-03,1.849848688801245282e-03,-2.103664082430358861e-03,-2.990413740566057839e-04,5.831681414609602744e-03,-1.022095069206455526e-05,-2.271272822140322362e-03,-3.639969702316630043e-03,6.785941749962605234e-03,4.852606523308755464e-03,-3.122327909116033584e-03,-4.889520337374203725e-03,3.611839878616052244e-03,9.494505200030797787e-03,2.583544213259725969e-03,1.376694659272139840e-03,-5.967942395842439074e-03,3.257243860921730117e-03,9.158269461408778938e-04,-3.964476947977806261e-03,-6.916750600038978946e-04,3.650059489613003989e-03,1.099983369813874526e-02,1.455506044147975931e-03,1.512549479202469124e-03,3.163959327402234706e-03,7.238807110773840502e-03,-2.221428464314301968e-03,-5.845113110924424898e-03,4.165732303736526497e-03,-6.592496239806087087e-03,-6.817181386143835072e-03,6.223637367663673557e-03,-1.022344274131142469e-02,-9.937953607113830801e-04,1.506145167191611650e-03,1.955987402429920727e-03,5.650194827203374251e-03,-4.572044146209984158e-03,-8.286546462397809815e-03,-2.954783468350679742e-03,5.251772029357127010e-03,5.803295507474838391e-04,3.142678702231365708e-04,5.824859531271557068e-03,-7.909525947984736433e-03,9.580535458189863920e-03,1.471763223679018731e-03,-5.375347513540427134e-03,-3.123594791716715127e-03,6.150105127619799605e-03,-8.427396478861348336e-03,5.165346240852984605e-03,-1.998801826374873181e-03,-3.474850367618563876e-03,-3.964920433873120380e-05,1.805552634507008548e-03,6.107826401977160119e-03,2.053138217041862272e-03,-5.090242232695715158e-03,8.251354702479879628e-04,2.101686738951635738e-03,1.182725169910637348e-03,-1.564776356623400009e-03,-1.686361498999139471e-03,-8.216220998277456999e-03,-8.741515351580829820e-04,8.630733979505748887e-03,-1.620306541382781880e-03,-2.725098546514419674e-03,-8.650099608537837161e-04,-1.592288982990039490e-03,-2.972013103310710593e-03,-3.415665102217585545e-03,-2.961506807989965767e-03,-7.345154464683961888e-03,2.782155612986549238e-03,8.119702123280968242e-03,-2.564668186520848662e-03,3.354399238762455215e-03,-1.406184246700767663e-03,7.036710325545365461e-04,2.189607759098881508e-03,-6.520248412653389079e-03,8.608199128771211114e-03,6.025295444258414734e-03,-6.845861268800748980e-03,-1.224873485555332290e-03,-1.014465250641524230e-03,8.081232696392348994e-03,2.060035586587997351e-04,4.250926956361401020e-03,-1.637554379947572369e-03,3.136702587673663965e-03,-4.648244525546067654e-04,-5.057332820919507171e-03,5.690934809682494586e-03,3.361980622067677841e-03,-3.412130096983923694e-03,-3.148310480359695247e-03,3.136287379232170379e-04,7.258769925475447131e-03,6.546694959573523542e-03,-5.191554148749836367e-03,6.537884656252338812e-03,6.649990121403858041e-03,-5.425568567158438023e-04,-6.341482799356571234e-03,6.330498843892767673e-04,6.378602719221080838e-03,3.491769640431747167e-03,3.371778014340150349e-03,4.632711821418937503e-03,-2.191331491707743453e-03,8.828104460585271774e-04,-3.226332689763540713e-03,-1.523850159975281490e-03,-4.073094723673813776e-03,5.608503286524802291e-04,-5.547780762563775565e-03,-8.482743426723617433e-03,-1.177574961286553950e-05,-1.556803488004702236e-03,-1.731592269836995960e-03,-1.415335729853392163e-02,2.061777755741180020e-03,-9.232608887359617864e-03,8.790160203436774141e-04,5.621053229372988472e-03,3.942893278438730671e-03,-3.703922823406288149e-03,-7.606385002986265978e-03,-4.612822627968350860e-03,2.543599618193465722e-03,4.288419921396196150e-05,2.488396440699645537e-03,7.671156998042247309e-04,4.719209863546672788e-03,1.014795224012235951e-02,2.699214486121917587e-03,7.385708710332342802e-03,-2.746089549369330578e-03,6.137321272556371839e-03,-3.516949600390502188e-03,9.230126817976612615e-04,9.316421812387669366e-03,-9.565020938030499228e-03,-3.443997382872301862e-03,-2.357013616144874521e-03,-3.937570759282565253e-04,2.787120710524250249e-03,-4.843245393837087112e-04,8.977433191948580235e-03,-3.711603070617702609e-04,-1.214755221942070062e-03,3.735234338602116001e-03,-1.368254172910085293e-03,2.917246384032456518e-03,1.405263785328118853e-03,4.040817318623800454e-03,7.527469890339691036e-03,-2.092354419611753592e-03,-8.847947139397058236e-03,3.175988706135239939e-03,6.090094259256294273e-03,-5.823584892521741982e-04,5.692575693988653068e-03,2.344514170479647559e-03,-6.382572016762746991e-03,1.508279291135943868e-03,-1.926157900268683342e-03,6.003493590557005578e-03,-2.564134796823067195e-03,6.921596759778142656e-03,3.836129787911197954e-04,-5.366429426002195142e-03,3.809031753679517282e-03,-6.969410608474720201e-05,3.103811593006404988e-03,1.295879080128106188e-03,-4.106627022474705103e-03,2.668417682384898880e-04,6.491095637485186712e-03,3.018483649109829879e-03,-1.692855380813270078e-03,6.547559718908517890e-04,4.060186863396054655e-03,1.460143174641443786e-03,-2.131047899523753723e-03,-4.457429790517667550e-03,-7.277612418691704714e-03,9.793658585667767546e-04,-4.028441437021292211e-03,-2.682973499369130637e-03,-1.145224018727680889e-03,1.216720102823987300e-03,1.309161147903886116e-04,-1.135791881833317666e-03,-4.963945124888661901e-03,-1.389678464702452480e-02,-6.139700154975141766e-03,2.084035743974498901e-03,5.943860630883300057e-03,1.071161840719424101e-02,-3.364358841066362408e-03,5.204963120437839340e-03,6.697824484915612664e-03,-6.937785843541412201e-03,7.792786751189412220e-03,-2.905143751653316200e-03,-4.620931622321668370e-04,4.450123146919497674e-03,3.821604589875371352e-03,-3.893372855081400196e-03,-1.732711109618803027e-03,-2.738665005525163162e-04,4.418902306541883616e-03,-1.246881349082327659e-02,-6.841183024148837256e-03,-6.030716871772315128e-03,-4.713898826256836608e-03,-6.081909557866056661e-04,-1.521729101373274452e-03,3.022281618419816150e-03,2.773377265255336840e-03,6.839334676799763016e-04,2.908654722416781011e-03,6.629217932286895869e-03,-2.838147714546024845e-03,5.390441796741239980e-03,3.186466398440979640e-03,-1.219923050977344062e-02,-1.196163087556860721e-03,4.008781797162799636e-03,-1.979547068455342427e-03,5.830489458923091700e-03,-3.779753330145702722e-03,1.843394314523207978e-03,3.468755122579356403e-03,2.094633292014562095e-03,2.172765923421498668e-03,-7.701371835756701083e-03,-1.238031575720923740e-02,-6.714394089364598452e-03,9.485505805396624371e-03,-1.002921943804487709e-02,-4.713432575192401559e-03,-1.047459005313776045e-02,-3.545477017657160460e-03,-6.044256134506786155e-04,-2.794757208450600675e-03,-5.941725822725779345e-04,8.188006365704307609e-04,4.002266830205838954e-03,3.851849708280680705e-03,9.596047742659566566e-04,-1.570147075067378185e-04,8.432631045530287334e-03,-6.189458201213982221e-03,-5.510315163958658287e-03,-4.107187832521810161e-03,6.251435667705914929e-04,2.213222462220251156e-03,7.299602979325374788e-03,-7.346679131117738468e-04,-5.647351964475307885e-03,5.191814990456834929e-03,-4.850438699100798946e-04,-8.709502665597512044e-03,-3.429405699864902530e-03,-2.031157650207543203e-03,6.103593160779545099e-03,8.443589234231845334e-03,-5.230136144973295334e-03,-4.711144106758439337e-03,-1.966992259163663059e-03,2.358683079194418691e-03,1.018688840965646658e-02,6.160455833026923060e-03,1.786888451792828340e-03,1.188856657193786224e-03,-5.326269361298635336e-04,-8.151995186883358169e-03,3.184357552419384615e-03,9.244626467562449407e-03 4.350048199337707387e-03,5.315387723252985690e-03,7.705019348705701918e-03,-2.164367108465739369e-03,4.747317854499571825e-03,-3.203840174330279279e-03,5.915205192152006748e-03,-3.237885049861275507e-04,-4.943519632679381492e-03,-3.806120607232916803e-03,3.154662377824236658e-03,-1.727395918021510865e-03,1.298508189486438993e-03,2.188192025877754628e-03,-2.108461734628789500e-03,-1.179638293336865429e-03,-1.430118747275735186e-03,1.071782751092765313e-03,7.136872706690344098e-03,2.895195194719277741e-03,1.163227881527700153e-02,-9.440954085105250637e-03,-2.127077189746261025e-03,-7.597590308492598206e-04,7.328686746156818678e-03,-8.867898419642736068e-04,-5.267764570530967388e-03,7.736134438935705904e-03,-1.282418190295417674e-03,-7.806614361578870275e-04,-3.498021006339896086e-03,1.193283563762967577e-03,6.571603395031361435e-03,1.768388100468600314e-03,2.972280647795540621e-03,1.804620052579121231e-03,-3.284493709054752287e-03,2.173173008831963350e-04,-1.910764497537125774e-03,2.637267632802154857e-03,3.126400527688403923e-03,-5.518518051714258778e-03,5.911614715530076093e-03,-7.491512082548522355e-03,3.092713938481852966e-03,7.304686612069358209e-03,3.591037666225672335e-03,-1.301809504617943043e-02,2.877311663779372197e-03,-2.386233143725043342e-03,2.425028225772199193e-03,-4.616908391425885057e-03,3.152069580199738912e-03,-1.346927300366478585e-03,1.404051457210266952e-03,-1.049338431511190415e-03,8.125769013127814508e-03,2.801965851290786572e-03,-5.674624308153803330e-03,1.040639251084105868e-03,7.878801986421312201e-03,-2.643447770491757359e-03,6.023430567163452244e-03,1.323306563198207476e-02,3.426969287693344712e-04,-4.536422530082829217e-03,-1.020547048603642282e-03,-7.616206473671136533e-04,-1.183198406380393201e-03,1.263772043425358607e-02,5.800049757736425518e-03,-6.466925729487753727e-04,2.857338520529839783e-03,3.381161362510631471e-03,7.413775906622101231e-03,-1.395893067803004838e-02,2.974866120061060975e-03,-1.232059065909918080e-02,-6.819292123667010028e-04,-2.471986006149183828e-03,-1.074223478755711742e-03,6.681668978152093159e-03,9.964980429586175678e-03,-4.967164470697741242e-04,-2.583108234127840137e-03,-3.136802052174238620e-03,-4.769144329651022959e-03,6.379298609680199381e-04,6.615306785362338148e-03,1.152731353510591875e-02,2.313072649451589080e-03,5.834506485183888445e-03,2.874920517107409403e-03,4.592994597584962833e-03,1.551127704774076356e-03,-9.181402854695073362e-03,-3.363399623009238372e-03,-3.115624627910583335e-03,1.003028789596851697e-02,6.387050790481175978e-04,4.118863137519667832e-05,-5.683753862566122417e-03,6.997548159596765627e-03,1.853558818572351554e-03,7.009979959652429737e-04,-1.381787916445617950e-03,-1.542310374132611472e-03,1.811241546164499587e-03,-4.464236509722076349e-05,-1.816768856748096368e-03,7.347611326797498336e-03,4.746525583469206680e-03,-9.690093765627681363e-03,-1.997988339312994823e-03,-4.019611600536645451e-04,3.417365099465276192e-03,2.014305716457912183e-03,6.354965510098426677e-05,5.847082991557846221e-03,5.603017957821721309e-03,6.228764454463893083e-03,8.038037403797156782e-03,8.130574420549152581e-03,4.891295422090004806e-03,2.389709065167236034e-03,6.376480746928795199e-03,3.436151781928401070e-03,-7.992215008809437984e-03,1.408068818157105166e-03,-3.831401688679032976e-03,-3.040525083795553264e-04,1.521926421677592635e-03,7.874707471246916377e-03,1.313303604325121452e-03,-2.430076216913103129e-03,5.002089255958930628e-03,2.860344584759348056e-03,1.100096285884406052e-03,3.530030497698800548e-03,-1.773082650892300171e-03,9.434245549726947080e-03,-1.729558099700690159e-04,2.093590203315875411e-04,6.153267538348743647e-03,2.584470588929444444e-03,8.108933089314909878e-04,-4.498967636853545143e-03,-2.820912225826958334e-03,3.096168081614052809e-04,-3.354924249954466427e-03,1.343181125474923786e-03,-2.706118545217314850e-03,-2.519305850363590202e-04,5.118843705115718942e-03,-2.811901587091781337e-03,-1.051624418225233400e-02,6.612859571594030149e-04,-3.983118763643156424e-03,4.694961131720689143e-03,-3.749904146093180281e-04,-3.707837509591988684e-03,4.326488140132477532e-03,2.753134821544618756e-03,8.405997249022035753e-03,-2.945512889495701010e-04,-1.732800832398071313e-03,-1.120157446452687763e-02,-5.645680364278714974e-03,-6.426721609685615184e-03,-1.132163421124127156e-02,-5.147249869278483839e-03,8.113287413370407408e-04,4.171618270495670334e-03,9.379516213266046384e-03,-4.359045558866817167e-03,5.458476349112947185e-03,-2.288424040380259306e-03,8.341912794234623103e-04,-1.019712549324902628e-02,6.567389326650653440e-03,-1.018007123511896267e-03,-1.648493120442090891e-05,-1.741442484987453573e-03,-3.159485529370598643e-03,3.222678288517983464e-03,-9.478503416647203819e-03,7.888653414507822842e-03,-1.017033170933093721e-03,1.229952881719289029e-03,8.818950879476590825e-04,-3.658577341921393205e-03,-2.101909559234846694e-03,5.269709619582237971e-03,5.046524805408173896e-04,-7.299939155068170191e-03,2.123141766349869678e-03,-1.712412697064690653e-03,-5.255056743105993543e-03,1.764455935971278840e-03,2.990742912631124545e-03,-6.825270182300106903e-03,-1.163279622151357912e-02,-2.225413303206913874e-03,6.383594001144192627e-04,-1.922432257754182758e-03,-2.598844660269165191e-03,2.029676723026516919e-03,1.047674297278852057e-02,-1.407382693975982139e-03,-7.360364458448357718e-03,-2.873006752552916248e-03,-1.817756375691388388e-03,8.561560590878843427e-03,6.187612608867156666e-03,4.422389002966394152e-03,-9.038894189378863728e-03,-4.360483833618460786e-03,-3.017631496937557296e-03,-2.438484662800170094e-03,-1.065961883809246613e-02,-6.086683986167320398e-03,-4.359742229497558762e-03,2.454824336734774161e-03,-6.378805609557115265e-03,6.108467631449314081e-03,-8.444173361156562252e-04,8.118150588308403054e-03,-4.203207791474469768e-03,2.049106116622986633e-03,-3.804920843344477849e-03,2.797164336190054515e-03,5.047676813943478879e-04,1.788963496581082863e-03,-1.149459541433920859e-02,-6.100773693029887257e-03,1.457393791041925943e-03,5.686637413947333471e-03,2.490958958836293497e-03,1.052963916573918185e-03,-6.552774273240372094e-03,-2.801895657651221234e-03,-6.520451442973747985e-03,2.405906388341465946e-03,-6.429524037528069891e-03,-7.554696228856905282e-03,-1.903809867888348387e-04,2.981616437755136260e-03,8.422555269415243559e-04,1.867118239465788204e-03,4.666676605913099344e-04,-5.751563383994830192e-03,-7.169397858377322443e-03,-9.649926065301025297e-04,2.598158882360442241e-03,-6.051311650638834666e-03,1.949609780860252003e-05,-1.561138485485105243e-04,-1.353934008581913822e-03,-3.605318361096969751e-03,-4.174145990256740955e-03,1.798089411236373841e-03,-1.053450397500959278e-03,6.619263507899433678e-03,3.289409294690434030e-03,2.851332776212210218e-03,1.856549237058344399e-03,9.420317953856510371e-03,-2.450757244750691958e-03,-4.273314704133475764e-03,-6.438546221766129618e-03,5.029832053316609514e-03,-2.709131461785285831e-03,2.007552250333059091e-03,2.834283708159102046e-03,5.816322980555969175e-04,-6.120310593372603339e-04,-6.480517509399064471e-03,-7.272456925179006382e-03,3.117075282647357309e-03,-6.607822196495011409e-03,5.341276141622122747e-05,-9.445085785972459624e-03,-1.032897325317042037e-03,4.746840018629744883e-03,2.811215884459561300e-03,7.533981800686507038e-03,8.013293136347054632e-03,3.591346448976925551e-03,-1.386613860857371146e-03,8.058129430822773970e-03,7.485041901089245324e-03,2.558682252056638639e-03,7.817232835109096806e-03,-5.007619635250050091e-03,-4.177864926318203449e-03,-1.784010994446347037e-03,-1.342716569180053096e-05,-4.506131968175866760e-03,-3.340154609926883865e-03,-4.230834066461941143e-03,-5.639534428671116699e-03,5.794845260594749629e-03,-9.814593726720161407e-04,-2.369117375507786335e-03,-1.651631206802272548e-03,3.704423802913608520e-03,4.166006004815953935e-03,8.027623403489642095e-03,-5.920630347430745274e-03,8.346007815873874875e-04,5.141314606028762890e-03,5.638705663376159492e-03,6.625199380977697975e-04,6.919145137217164523e-03,-1.141235310689555672e-02,-9.064788527284319236e-05,-8.420067469260034305e-04,1.856529250413220154e-04,-9.122438163979153247e-03,5.854708429742741689e-05,2.877163108668752672e-03,-7.405766337081279814e-03,1.910647121629167151e-03,3.523645900622012460e-03,-5.215256939087549622e-03,-6.775607837567162658e-03,6.381678589678313994e-03,-2.080492593079086468e-03,-9.289995442390007116e-03,-7.071739522300383995e-03,-1.155576753886032983e-03,8.942726678201338639e-03,-3.321205707657014401e-03,3.279063696733198834e-04,6.560035684861467535e-03,-4.394901460327057391e-03,9.461134398779505461e-03,8.989822932808241851e-03,-5.527947749631893087e-03,-2.949802397050675391e-03,-1.087891996184088723e-02,3.584606136021303326e-03,-2.612440448724746758e-03,7.426300197471828214e-03,-8.330364938788048209e-04,1.047981155526366698e-02,8.498289839805195020e-03,1.173691397263272075e-02,-7.686961585896365245e-03,-1.008305581650571943e-02,1.195267088510127036e-02,2.701580123780125773e-03,-7.448976288921727203e-03,-3.448893049471493059e-03,1.089437899846043122e-03,-3.184446453567357782e-03,1.614545857565335366e-03,1.604497164624885640e-03,-8.965684639677215517e-03,3.813342117544492316e-03,-3.902123406554477113e-03,-6.200520556246935829e-03,-5.630697999245863535e-05,2.535515166137893711e-03,2.828796417589664845e-03,9.414576389768473594e-03,-7.532507622835454136e-03,-6.450468928709468704e-03,-4.663992559422099082e-04,-8.296266645222621916e-03,-2.858623792812513796e-03,3.681129529941223281e-03,1.767016290451293276e-04,3.277196422860268084e-03,-3.024960061810014721e-03,-6.387640872657474695e-04,1.169737920587579378e-03,5.419728524873332690e-03,2.442628398916883541e-03,6.845199167029841075e-03,9.011460434094505784e-03,-3.995586797548357841e-03,2.890505577860530070e-03,1.612534325356285527e-04,9.882196691631366103e-04,-1.553245346931194900e-03,-1.615286109790213011e-03,-2.009594366042481879e-03,-7.389242777270011234e-03,-6.195268234590690058e-03,-1.793743080940189650e-03,-1.173029928946822155e-03,-6.259553862936813318e-03,-5.466716151516119032e-03,-4.034726869210450775e-03,1.643298202425562057e-03,1.102092885811896102e-03,8.667316106221961344e-03,5.004692244126291052e-03,4.460399958608096856e-04 7.182955535310750136e-04,-4.129937746069514519e-03,-4.087517778746136733e-03,-1.021120485500576301e-03,2.630511342274852865e-03,6.289717912183867728e-03,9.500481829925200034e-04,-1.180064596215350077e-03,4.739812057664215782e-03,3.173968830283484428e-03,4.475155884329915477e-03,8.013982324515521449e-03,6.624699633975751623e-03,-1.707094927134748114e-02,-5.126047103138260848e-03,5.175590114436935500e-03,-2.029259464866010540e-03,-1.565579684958217418e-03,2.224828452511692922e-03,6.165600526455070757e-03,-2.813126210452317460e-03,-3.223304988872784625e-03,-8.476499765476438715e-04,-1.648587331609540577e-03,-1.163486773415314899e-03,-1.641075517422376515e-03,2.757117694660350411e-03,3.483773501101263578e-04,-7.265746454335167588e-04,-7.320357243991859478e-04,-6.617105320937997177e-04,1.781516636048114484e-03,3.731501766900844827e-03,-5.545387839166030647e-03,1.130834894868522431e-03,-8.302514216889822496e-03,4.332659751673313860e-03,-2.050131476938445076e-03,1.601679633035708004e-03,-7.958456419284872484e-04,-3.097607270329681001e-03,6.148717325094375623e-04,-5.703080186934783536e-04,-1.285173046013556811e-03,3.349247980442864557e-03,-9.375288177772464135e-03,-6.803545284712829953e-04,-5.621558207678289086e-03,1.440334082309160501e-03,-4.945672408009845443e-03,-4.225748805806416042e-03,5.699145724160302982e-04,1.515865277124290450e-03,-3.969059572063862902e-03,3.359915981268762203e-03,5.553328605851338452e-03,-3.196388174864559992e-03,1.359406563756325979e-03,3.057368148167794418e-03,-4.187372518686011645e-03,-2.190204659705944669e-03,-7.158712068723780374e-03,3.589692993734205397e-03,2.453926723020894178e-04,5.875505729910638492e-03,-7.371760522745934852e-03,-2.060351788304388976e-03,-8.612835889805714462e-03,7.115830922151538361e-03,-5.414828142428431031e-03,1.225228029315423535e-05,-5.333989285394262828e-03,-6.756033492603490814e-03,-4.205543609731668430e-04,-4.227919839370846387e-03,2.040088902445911200e-03,1.550138821653438163e-03,2.583086946827204376e-03,-6.383985852556636406e-03,2.474216108289330196e-03,1.008831499155504741e-02,-3.170176341942593408e-03,2.811977572519692905e-03,-1.054454496643331129e-02,2.547153580615915940e-03,2.270419086613764421e-03,-6.626273509046210798e-03,-5.196023238832743647e-03,-4.813878396144774703e-03,1.289855659113949872e-02,-3.065142153063505313e-03,2.129473405180323971e-03,-1.052131187645766706e-02,9.146368494215413592e-03,1.045739036996258704e-02,-3.470395865110309595e-03,8.835101543093104928e-03,2.543989954855271213e-03,1.868596704637709655e-03,6.886721007007465083e-03,-1.730348800278378113e-03,5.607841839034320014e-03,6.867231001401141906e-03,5.597578784051843170e-03,-3.164691726163704980e-03,-6.878172703642249607e-03,-4.814247118988386649e-03,-1.356494210555216464e-03,-9.261829622477625937e-04,-2.386348468629966805e-03,-4.062297768421100491e-03,1.642860351843211143e-03,2.771333544716159721e-03,-1.147605477224412397e-02,-3.043216608204546685e-03,9.399816224919031390e-03,8.775669141767708514e-03,-1.245531666907320626e-03,2.739491379665695732e-03,2.787835538323812402e-03,-1.246207622534881675e-02,-2.198159956061789509e-03,-1.609344501880480499e-03,-6.831058918339114050e-03,-1.632521876110295915e-03,4.857004597594061744e-03,-4.190279897446353222e-03,-3.044832930960153072e-03,7.493192101227466595e-03,-3.525051747710391398e-03,1.344080875693565505e-03,-1.030500147415649515e-03,4.347855916956363566e-03,-4.004253416231376775e-03,2.063250438883191054e-03,3.695278188884514557e-03,-1.080607425915667240e-03,-6.403477709340696725e-03,3.536197498909430734e-03,-1.247497454204008578e-03,6.869116880482542542e-03,-2.001412010958427898e-03,-5.592027887332929638e-03,5.676061324796826194e-03,-3.454063894338605615e-03,-3.259060593093617442e-03,-1.644563968632294310e-03,1.012628436934256363e-03,-2.355051409175672444e-03,3.450870350496593100e-03,-5.208862684131304426e-03,6.703150728575158271e-03,6.486677566929760887e-04,-1.484344870750772555e-03,1.668886009560053554e-03,3.872254344396243615e-03,-6.061539593661545607e-03,-1.575634178669251071e-03,-2.623016664314652049e-03,4.725980164461715365e-03,-8.326301286131800039e-03,6.867536031608491472e-03,2.395721805962385768e-03,-4.291838229189900641e-03,2.569111796579045572e-03,-2.552134735609075129e-04,-2.690154973325331513e-03,5.502014415023590531e-03,3.406573703464925613e-03,6.324779657449921752e-03,4.063987982005115920e-03,4.512060468056223149e-03,-4.764673748420162301e-03,-2.880692123479639509e-03,-9.008022363180757030e-04,2.534875087516489951e-03,2.969276801660691249e-03,2.112876534798009773e-03,-1.471443404079450469e-03,-3.840605910993925669e-03,-2.924885305713320876e-03,3.991530031155546537e-04,9.543535033615559030e-04,-3.957096952139443034e-03,-1.533205985857482396e-03,-3.216639163479852355e-03,-8.874392750757364307e-03,-1.942556398911272734e-03,3.226857818111251129e-03,-7.171455706230770236e-03,-4.680873951157145559e-03,-4.886475136311556160e-03,9.189720882422381873e-03,-4.761839226179422763e-03,6.349565012495271034e-03,-3.725200421336483190e-03,3.636015653653860025e-03,1.836282239893910788e-03,-3.227850014408535986e-03,2.565194251863296519e-03,-5.087872355243796548e-05,1.518304224056241710e-03,-4.736839717115133401e-03,-8.958297038828818898e-04,-3.917305320841703298e-03,-5.431535606893402209e-03,7.700440569981207434e-04,-1.901046813277218342e-03,-6.980290858498557072e-03,-1.024587585136252582e-03,1.847934402027606379e-03,5.316206712521232523e-03,-4.001306298588550199e-03,-1.552030458871054078e-02,-4.753215052569008912e-03,6.095481775210373140e-03,-1.116644145139328911e-03,7.573539285781256090e-03,-1.583373320199504396e-03,-1.186434076747190083e-03,-4.347618783453264212e-03,4.372862290173154748e-04,2.832758006902983284e-03,7.097215739821032161e-03,-2.253266665801264244e-03,1.655544869340193478e-03,1.390426484390221455e-03,-3.546726735201504322e-03,-6.638771185988289998e-04,-4.038583021838539761e-03,-4.952398165789696424e-03,-1.039151112425932937e-02,-4.858400471185531696e-05,5.174251910767841290e-04,5.516192167508340155e-03,-2.143769831854473555e-03,-7.306186109381381227e-04,3.041114007072100861e-03,5.586320122850612149e-03,-5.505693002617854589e-03,9.574160110881330121e-03,-3.664099434830418722e-03,-1.051616900659688983e-03,-1.047820660968342240e-04,-3.086103961564300602e-03,-2.647431388506010785e-03,-2.211791444397456612e-03,-6.052143455017455634e-04,-3.335885834088090501e-03,7.911445402086455733e-03,8.207842323516764942e-04,-2.687963184951490132e-03,8.648709993857108674e-03,-9.494027466144895622e-03,-5.455809680674150557e-03,3.835811200592410038e-04,-1.032936597911915909e-02,3.241969450820928692e-03,-1.143359455615006355e-02,-5.220284357238630553e-03,-2.761271515796372665e-03,-6.757482451357819594e-03,6.605901001827673390e-03,-7.147754341723320734e-03,-1.266149542046805928e-03,-7.196519882245931872e-03,-9.419140687367939802e-03,1.506823863605998145e-03,-7.910585988760094125e-03,-2.625765826040512424e-03,1.146602218685766582e-03,3.678225042104740078e-03,-3.064656180640265137e-03,-2.000584461098670660e-03,6.315714489597893094e-04,1.968372383037458222e-04,1.073475104657977113e-03,-5.027937098830853775e-03,8.871255128624440725e-03,-3.842746511558396520e-03,1.435734630013276553e-04,-4.919036451921949410e-03,2.510796549336066254e-03,3.696354691029264412e-03,-9.568558236491553348e-04,-4.423675567269680191e-03,-3.996828481380151031e-03,-9.202366578722382968e-04,7.094642680593604873e-03,1.620311316743327677e-04,-6.252549016977200502e-03,-4.402245851657033901e-03,-3.752472320387022654e-03,1.042705513627731687e-02,5.669333624709275120e-03,1.320008401482271592e-03,1.028168931186306759e-03,6.593713637818021667e-03,6.365884673242660878e-04,-2.575709747102805213e-04,-2.393234725101131208e-03,-1.471402438482553995e-03,5.063102053991872485e-03,5.279992771486529020e-04,1.435949032630267361e-03,-5.100909984223950429e-04,-8.193851699375960616e-04,-1.765159178733626509e-03,-2.094428823564309372e-02,1.101900651035574684e-02,-1.054662014464992532e-02,6.561854975107790948e-04,-7.930991202547775509e-03,1.016417424441942675e-03,5.641258134108197327e-03,-2.834508573716608849e-03,-8.592692449447400169e-03,-2.999312904490557610e-03,-1.462467635128026191e-03,2.981734992078914018e-03,3.015769261973890239e-03,1.210333737200290031e-03,-1.350720979015784589e-03,-8.279066822933106931e-03,-4.626622188703865407e-03,-2.690756020449382082e-03,-1.279669125939620266e-03,-2.613155460854760422e-03,-7.806483005576651465e-03,9.246916333657563807e-04,4.886166199897905467e-03,1.163990986935881621e-02,-1.022614091427292982e-02,-8.048964159673202209e-03,8.369933832236721785e-03,2.593774781118684137e-03,5.420447854160289486e-03,3.739516345716919956e-03,3.085512392818371471e-03,4.911471094650943052e-03,-5.551070882335969875e-03,3.426004818708931667e-03,-8.048779484277743604e-03,-5.012676949696969267e-03,-6.629742263084734653e-03,-3.341912344750893465e-03,7.733402033189679635e-03,1.325044391980389601e-03,7.528023934894652151e-03,5.017252269089441524e-03,8.344817132976860893e-04,-3.243547137100802769e-03,6.539309401575983498e-03,4.282911980022760350e-03,-9.495228877637748588e-03,3.664441545509654861e-03,-6.534932938776054530e-03,-3.490197413452013515e-03,-4.183941620233892940e-03,-2.442166644240274798e-03,-2.094840039455780813e-03,1.833756881957817730e-03,1.772464495724605750e-03,7.984159709556013992e-03,-2.201516471739517391e-03,-4.418499097810177920e-04,-9.750924988784810329e-03,-8.393666279834273089e-03,-9.553835186854968669e-04,-6.869649131591971936e-03,-7.619920510051276033e-03,1.035336332473575081e-05,5.350386748514513770e-03,1.184027601597113788e-02,-6.352003817349516357e-03,-4.845021695533043189e-03,-1.832668045763260990e-03,-1.265048198174086903e-02,-8.033556976120198633e-04,9.838232157219461548e-04,-7.138724332387670277e-03,-2.285820756019330078e-03,-3.140295352936091355e-03,-2.777767921321658718e-03,-1.072725275440585338e-03,-4.717785749375738059e-03,9.336293255742710712e-03,6.972271923958604442e-03,5.138586148715103274e-03,1.090826980451765167e-03,-3.842643092490103870e-03,6.861414097455914574e-03,-2.561890144542957725e-03,4.233395789450182349e-03,1.947101226586324700e-03,-2.745370083087152625e-03,4.648277773459698337e-03,-5.440232450035840765e-04,-9.503572449527612894e-04,-7.012502138158974163e-03 -1.049368274444442174e-03,-2.469077044216909725e-03,1.482625048316320131e-03,-3.072749363347678387e-03,7.104509335923960478e-04,5.359899725218514148e-03,3.183094030103473127e-03,6.124596861605391580e-03,1.117298244701675882e-03,3.509023286022562409e-03,-1.045701713695043264e-02,-9.293639273622230149e-03,-5.086438071169024217e-03,-4.322400750028132163e-03,-5.503096350557253220e-03,-8.867048933217203804e-03,6.074233685803683870e-03,3.462031473680525755e-03,-1.078968428293543671e-03,-7.073453133876451970e-03,3.405961014866436525e-03,5.935764993250603784e-03,-2.533098361258119893e-03,-3.850811066754096165e-03,8.186470315893111527e-03,-8.109966236454848459e-03,6.210497206962857812e-04,-3.635934912045846786e-03,2.846465089336348061e-03,-1.938744543456489069e-03,-5.944342121086569619e-03,-1.079410404154764825e-03,4.874910118124085694e-03,-1.094490578069480351e-03,-3.115615422475855045e-03,-2.287954928872454677e-03,-7.037013830156604866e-03,4.203991044427541528e-04,-3.955270399544720539e-03,3.157966504656193035e-03,-2.510465452264952838e-03,-3.341937315063989340e-03,6.965059999622631205e-03,-9.729637426175979237e-03,3.559741177436445692e-03,-1.234437257081377401e-02,2.455694744742637587e-04,7.208510272735085822e-03,2.426990324935512488e-03,1.070768477623497417e-02,-1.387382439378029124e-03,-1.550391342285356664e-03,-3.266321808095830250e-03,5.485284064218617396e-03,2.971372744953707998e-03,1.340393536749568456e-03,5.455369769607246698e-03,-5.476016935446125725e-04,-4.338535106076017896e-03,4.887290635999175063e-03,-6.170596561490835144e-03,-2.013218578079917914e-03,1.875812587590929394e-03,-5.084829249540431632e-03,-5.554198279607207821e-03,1.823678064689321409e-03,-1.352496256463554293e-02,-7.629464961330224490e-03,-1.047765338745752783e-02,-4.234913391891961462e-03,-3.296434460246938213e-03,3.337409692810551498e-03,5.407149453742729787e-04,-3.427423485271348901e-03,3.672945359380989754e-03,-1.906494435085316413e-03,5.476264437605159359e-03,3.572218253895972223e-03,9.543943075261815054e-03,3.837901558156646235e-03,1.110821434632798619e-03,-6.389093459832506476e-03,1.140335703213940581e-04,6.702499837829916339e-03,3.564071854364974853e-03,3.950853335882359083e-03,-3.601918283783363955e-03,-1.179418033166242758e-03,4.896582080121861420e-03,4.898387095670766805e-04,7.630981530240241693e-03,-7.612475378237019454e-03,-5.712671808480759665e-04,-6.258986156372333721e-03,-3.499891405956748994e-04,-7.966750671700840655e-03,-9.959174768823506919e-04,-3.964304319804531906e-03,-4.291467554936076456e-03,-1.682338434820962675e-03,-5.360830663007472296e-03,2.109501866970835138e-03,2.134465312073828471e-03,4.926375553953660918e-03,1.231571088527129952e-03,-8.867618328735478797e-03,3.297345322193240974e-03,-2.615282620781591520e-03,9.960381945178507432e-03,-1.732587902266731874e-03,7.008541687344358589e-03,-8.066031581950791358e-04,-4.026942991120420479e-03,1.026706829663701732e-03,-7.305525658389727716e-04,2.193050438525171245e-03,7.351546943418687391e-04,-5.979630928107364934e-03,5.402066753194730182e-03,-4.916942489052122250e-03,3.167026118411193913e-03,-5.606559591745444854e-03,-2.852870671098376495e-03,3.060608507793172780e-03,-8.151649751409620684e-03,7.512155930054505810e-04,1.775415145992405904e-03,-7.930027606629242581e-03,-2.315106981053463228e-03,-4.720619873398896292e-03,7.011992621497110598e-03,6.714307093421965564e-03,1.883748855580423109e-04,1.138021346193210555e-02,5.250589976591192847e-04,-2.359627511066374325e-04,9.045424773366554866e-04,1.119775166739672787e-03,7.208232882633435271e-03,-7.739185114719833230e-04,-2.829715002356044187e-03,-6.571860588162666689e-03,-5.042408653167361086e-04,9.897666362407924693e-04,1.625837556187736477e-03,2.673041796384007050e-03,-2.368723566611666970e-03,-4.328555081986421547e-03,-1.004227675506075973e-02,1.700389020027447270e-04,-5.277996530402296813e-03,-8.977972987118160891e-06,6.472443513479423293e-04,-1.793284250274567154e-03,1.033005961226978256e-02,1.086685096614744194e-03,-5.340778187235556830e-05,-1.339427764757833254e-03,6.685276845535912532e-03,-3.036850643247105872e-04,-9.017069151180895012e-03,1.251989280891171660e-03,-2.596223811499159904e-03,4.544590140252288343e-03,-3.213474058165403935e-03,2.319753248297705666e-03,-8.823075524982615858e-04,3.166532098473454765e-03,-1.284982077507645102e-03,-2.386034711398212459e-03,4.279559809130398587e-03,4.266328632709323956e-03,-1.563961057478825082e-03,6.518647659850116197e-03,-2.591131052120252548e-03,-9.056732075780667829e-03,6.333331574560285837e-03,-8.119102019429242940e-04,4.197781532451047050e-03,5.931292950631966709e-03,1.095812153590273140e-03,-1.196997586013984725e-03,5.112736331444078016e-04,1.208855715287642484e-03,-3.092486347266858691e-03,1.952139634288286912e-03,3.823907459850075640e-03,-8.002263763378928340e-04,3.888846303211433014e-03,1.148487601955090927e-03,8.130024459730820705e-03,-5.854084148953048530e-03,3.522202722764513055e-03,-1.597360571360241163e-03,4.000529307188625296e-03,-3.057065034093781487e-03,4.563217958598445902e-03,-5.626822291240167473e-04,4.589517314853301840e-03,-1.935100772519947679e-03,-2.891987276568718825e-03,-2.123206394388693023e-03,-2.520265695192213632e-04,6.196122817257066363e-04,4.651007749649486989e-03,5.567841669696820157e-03,-1.900346616250896798e-04,1.330310485150957298e-03,-7.133076321130212612e-04,-1.737190477890360966e-03,9.416065207705918484e-05,3.554515815844481660e-03,1.451638270884353897e-03,-6.288660541859125881e-03,-1.630648825417845785e-03,-8.064576308945081706e-03,1.997321097650642165e-03,-9.595787946933946339e-04,-6.779808773849977169e-03,1.593379135558244336e-03,3.595331831319764117e-03,-2.121944915762791727e-03,8.385575480645847055e-03,-3.370975461903824230e-03,-7.645260389057501570e-03,6.292813949658604758e-03,1.014798494511712397e-02,5.876466281177367308e-03,-5.167423331350315223e-03,-1.098877222172364981e-03,-8.371699468536168004e-04,-2.612065281032908536e-03,-3.816739730264144493e-04,4.570412574812168609e-03,-3.681655497950860490e-03,2.881919399765793099e-03,-3.099656039468273814e-03,-2.859150715892602116e-03,-2.546913546157285671e-03,7.848044377001881303e-03,4.255173861114854017e-03,5.568042371701360273e-03,4.957994295067423909e-03,-3.390870626707383727e-03,-5.656489556844351667e-03,9.784614841259852216e-03,1.962616903006071320e-04,4.614531352971242353e-03,-1.099428790711688160e-04,-2.513144707489460821e-03,-6.451073533346913819e-04,-3.697956228875726604e-03,4.315055895413109476e-04,8.989996932900768886e-03,8.338405180282100135e-03,4.412301039079769352e-03,-4.106485640920679372e-03,-5.128765451035217828e-04,-2.800806487687238118e-03,-6.831688691547899497e-03,3.791105177519981154e-04,5.719144511366942504e-03,-3.865057100960787775e-03,9.704062053851702889e-04,-4.463885013413187129e-03,1.570237259512466789e-03,-1.205277385554535084e-02,-2.442572096959599263e-03,-2.000851490788416523e-03,-1.081048629309719605e-03,7.207006015713953535e-04,-3.554058301496165403e-03,5.412114515701253606e-04,1.004663730125283190e-03,1.318475175843685648e-02,-6.841181153700434323e-04,9.654556198124825800e-05,-3.747236302229307696e-03,4.977382653200706011e-03,1.042350370372885620e-03,-4.391550635938435733e-03,-3.514113580683707848e-03,5.676000849533158719e-03,-1.202539482406277333e-03,7.165525075357320056e-03,9.090994550322814907e-03,-4.903832957092860156e-03,-4.159337114889144135e-03,-6.697895157907998026e-03,-1.487364613381678699e-03,6.131887988290247093e-04,2.276616027469462275e-04,2.132121994188953661e-03,-7.934479819558947649e-05,-1.653424337589713689e-03,1.197521349447221812e-03,3.247296371871547112e-04,-5.979208257095645733e-04,-1.216667845793397394e-03,-2.593214557343608592e-04,4.929325006895731408e-03,-1.297794108329649026e-04,2.406158566309873816e-03,-5.716539741755269051e-04,-3.536698453036830284e-03,-6.153768202098183239e-03,4.321649154732383046e-04,1.439250040034850209e-03,-3.846815389306832388e-03,-4.082742275434429920e-03,2.444085214307251119e-03,-4.463821471612865709e-03,-4.791802637728367030e-03,-2.885997623880782060e-03,-6.761124263277243189e-03,3.525682792385504468e-03,8.047641221568889447e-03,-4.202212326494932093e-03,-2.174541608541320521e-03,-3.700442342492177815e-03,6.645091457396413819e-03,2.185375377530490628e-03,7.520022351827404354e-03,-1.542161619088485130e-03,-7.734055186941852719e-04,2.456444289613751719e-03,-4.756388417987865089e-03,1.139534532261916389e-03,2.419896727352353200e-03,-1.151410435423200164e-03,3.245152478247033401e-03,-6.857839864997289413e-04,-7.815658954439223469e-03,4.450177909359570756e-03,-3.001134026931354636e-03,1.073505688213480073e-02,2.714709886432651719e-03,-4.381675789101770711e-03,-1.885213989810177780e-03,-6.762090900283961210e-03,-6.004746212865551182e-04,4.865304992238667949e-03,-2.106386162453906252e-03,-9.140268613528911056e-04,6.127577227849087191e-03,4.756015980315963327e-04,-1.879107606639481140e-03,1.147566430824896802e-02,-4.739711811620211761e-03,1.166607294298408837e-03,6.098956685106441192e-03,-3.856030473981633764e-03,1.077965999041388852e-03,-4.997570157572517324e-03,4.984933282667658797e-03,-6.196249446410766670e-04,-1.261407302581344725e-03,3.077440296381562818e-03,5.285534788522798857e-03,-2.153689008430414217e-03,-4.386049538019916316e-03,2.538540619587787046e-03,4.208781755434528281e-03,-2.064743537035709699e-03,-8.360018143548859416e-05,-3.068796225702931976e-04,8.246605591408917232e-03,2.448743686112081399e-03,-4.797026316762076879e-03,6.471075743308529055e-03,1.973025805615186753e-04,2.436642306050647213e-03,3.198990428670704916e-03,-6.435224521205110126e-03,-4.945388433236289072e-03,-1.099914306244000875e-03,-3.682092857120680077e-03,-2.287507345588188096e-03,-3.185601746292475878e-03,-3.005205171734616487e-03,7.882602635667140725e-04,-1.450307673817688783e-03,7.936912345786139265e-04,1.534624641482798795e-03,1.273428412244544767e-03,1.352200099451935852e-03,2.028340197218536211e-03,5.468876163711124854e-04,-7.280075745231069564e-04,-7.250870548635404006e-03,-2.107179234662231310e-03,-4.060277354487462111e-03,-6.422055600798433003e-03,-1.369789404517882678e-03,-1.496595096984674718e-03,-2.429714862196072269e-03,2.794874764262249116e-03,2.760676294753736711e-03,-7.053841235466245391e-03,3.274446056559910268e-03 4.177047002219028918e-04,3.245767092829261735e-03,1.249073468124776466e-02,6.132299567587152968e-04,5.607182721773866854e-03,-7.269033320246035684e-03,-6.115972734415216910e-03,-1.509599516210316603e-03,2.971093437459411315e-03,2.288728330024840212e-03,-3.084247055866079588e-03,-8.976832264043051873e-03,-8.487099121915880071e-05,4.298948766690991428e-03,7.797746846510919885e-03,-4.852159829296370276e-03,-5.668198853107725248e-03,4.656490435561057815e-03,2.167378974754718100e-03,2.040913223128627547e-04,-3.087333352881777015e-04,-2.891010711912627654e-03,1.606605750514010251e-04,-3.079458195935593900e-03,-4.315054491631106076e-03,-4.107292213682261356e-03,-4.283297788577273613e-04,-2.694110439657127539e-03,-1.262227992659569007e-03,1.335346306757237943e-02,-8.961812420410200999e-03,9.295763397968729441e-03,-1.017222337549845198e-03,-4.065636570142379673e-04,-8.198966523343417887e-04,-5.713831635927303816e-03,8.311814569649089018e-04,2.313021644506393307e-03,1.126161443497542382e-03,4.994302061737446695e-04,2.551764574643769403e-03,-2.550175148143886059e-03,5.261016516989529615e-03,-6.147169382013192851e-03,-1.568539633568250685e-04,-5.725464925688221809e-03,3.268811075883363768e-03,-2.441773510002842422e-03,-2.781972568481276987e-03,1.315867454598797053e-03,1.360400492442933110e-03,7.690171665962953172e-03,3.693548117506024065e-03,6.344759388108201989e-03,1.998578527126780580e-03,2.388905879924790778e-03,7.832871018642580466e-03,1.751674369391643948e-03,-1.466502662913654696e-03,-5.173723190188596506e-03,1.044112045995128704e-02,1.227389503342059957e-02,2.793666508938094564e-03,-4.206370059667144563e-03,-3.422422755641803954e-03,1.782767652424331996e-03,-9.441246601325998036e-04,-1.151255337911613955e-03,-1.235551566517939515e-02,-1.798887562404508480e-03,2.497170077001436422e-03,-3.895219035881887851e-03,3.173715894279377676e-03,1.768236216170838461e-03,-6.450805835555328046e-04,-2.583861799088200354e-03,1.542464769918887009e-03,-3.759975713422283958e-03,7.731530954903904451e-05,2.154165895790636533e-03,-7.091715497950452744e-03,6.461630244315721625e-03,5.559307046586399112e-03,2.683750621717903226e-03,-6.472107199875940181e-04,-1.016312595339895122e-03,-2.256742156006211805e-03,3.520250598979594598e-03,-3.803554488288903891e-03,-8.566994407760928840e-03,-2.285932223003792584e-03,1.001708262519792822e-03,1.648945850192681632e-03,-3.735230407869254404e-03,1.136648784795948148e-03,-1.023079907765638355e-02,5.296645314935054596e-03,-1.925538883441203298e-03,-8.332600900332075289e-03,-6.959725945617574634e-03,-1.289657314537138240e-04,2.607698716896977419e-03,1.447567257984936446e-03,-6.283400557124518143e-03,-3.815739534935735314e-03,3.958284076722825423e-03,5.997073438411226927e-03,6.020604501460727689e-03,1.827387876245832599e-03,-2.184999391249304625e-03,-1.049134966968827670e-03,3.758288226110854686e-03,-1.873675075974000955e-03,-7.865293769317655351e-03,1.044696276040317029e-03,-4.100826107405997414e-03,-1.133024809445345440e-03,1.533359536033636296e-03,-6.272167826589889784e-03,-2.054299631076195957e-03,1.957821424057728382e-03,7.032221102743672903e-03,-3.207695618124922765e-03,7.745134711984113032e-03,-4.360456950745892138e-03,-4.671716885743917359e-04,-1.284848897015443371e-03,-2.110782812212236553e-03,-4.097123096293821265e-03,3.517890940298511009e-03,-7.767103997511834446e-04,-1.452055555294336567e-02,-5.950647352732531432e-03,-1.341311705151212140e-03,1.440305225173936585e-03,-4.899996333571733816e-03,-6.015349170160091659e-04,-1.102346150640491976e-02,-2.370360416766268373e-03,-2.211019295326905069e-04,-3.937910678261753381e-03,-4.311646157402627966e-03,2.197203049140520649e-03,3.537519436559088612e-03,-8.739790832166234244e-03,-5.858832321432745453e-03,2.820456469288410914e-03,9.866146691536238980e-03,3.043427287374476237e-03,4.357976876978779043e-03,5.960049804846580257e-03,-1.855953763284879342e-03,-1.089343678314037833e-02,3.852915567273960604e-03,1.068508823856620426e-03,-2.364657175625011105e-03,-9.881121441730430534e-04,-3.061317400579902613e-03,-7.739868063160309022e-03,6.621385881614535313e-03,2.679108236342621612e-03,5.249162948365105122e-04,4.084996307995397008e-03,2.335187527184045837e-03,1.455675758021162357e-03,6.609880372863875461e-03,3.603485822015263171e-03,-6.324451979246032383e-03,1.436382471731509694e-03,-6.354723055717393862e-03,6.574819626230482376e-03,7.558555483838189787e-03,-2.199106563511805511e-03,-4.366844049128730532e-03,-5.342053010324230439e-03,-1.619396289207057334e-03,-3.717875403165397594e-03,-4.301005737571604592e-03,-3.153907337976164047e-03,-9.705746777684773688e-03,-2.738513912677385635e-03,-6.204381998407261511e-03,-2.060610427741288182e-03,2.793828760674017201e-03,-1.397928339949772213e-03,-1.335926494243786986e-04,-3.039007022839801050e-03,3.883133121305259658e-03,-5.449553764016587185e-03,-1.306124349713913904e-02,-7.451480967431732723e-03,1.737024956274079660e-03,-7.698423126469087184e-03,1.118333865946581109e-02,3.542977880195352907e-03,4.332293672160941571e-03,-1.975518595177643173e-03,2.189271977884090989e-03,-4.672489267287265394e-03,4.368687179273717837e-03,3.266041883539285047e-03,-3.029147933862502348e-03,-2.693790490092419065e-03,1.080116486467393166e-02,-1.155741589959087936e-03,3.798620064153219143e-03,4.396106081131535837e-03,4.878883022338534325e-03,-1.466756099823601908e-03,-1.204877725330993866e-03,-8.757306173351775717e-04,7.455676244521654717e-04,-4.534293674050112605e-03,1.899019441699229330e-03,-4.803362637184299504e-03,5.584593515456659669e-03,3.656070420060809033e-03,8.594666092160469677e-05,1.288134005898056329e-02,3.789031270114095150e-03,9.472136253632168577e-04,1.714802549567492071e-03,-3.957557373742958025e-03,-3.145485183018589814e-03,7.878452128692746761e-03,-9.101995140091438327e-03,-1.516914113418161431e-03,6.913100881083068631e-03,2.524778189585403457e-03,1.540325088815493963e-03,2.981710576997927131e-03,-7.763399128773068635e-04,4.445987696841599383e-03,1.100035987042933573e-03,-6.535031856298376129e-03,3.587014290202954463e-03,-8.986983410797661984e-03,-5.493440143825958563e-03,1.490179623001511603e-03,1.287887169301750500e-03,-1.234801110634528175e-04,-3.257284482330710575e-03,-4.425659755222521863e-03,4.301581358060429898e-03,-6.324678978580320320e-03,3.127406805736952528e-03,-8.654984208549203195e-03,-3.148595430273186131e-03,8.342178445581403812e-03,1.083146938503292550e-02,-7.163307725947752592e-05,6.358056329163751762e-03,3.254810374335944528e-03,-5.737776023727547692e-03,3.002127771106109788e-03,-5.945482820421352579e-03,-6.485123334192502420e-03,-7.869863541274118787e-03,6.546294732322405219e-03,3.144889026456680267e-03,-1.596817624153176567e-03,6.176059545136304389e-03,-3.992959033865887118e-04,-3.105201583755516290e-03,2.885518596945344395e-03,-3.160309322190056245e-04,-4.820500222863979140e-03,-4.978889641858905667e-03,-5.634349660128652672e-05,3.689546115860699218e-03,-3.984329705899346784e-03,4.443254064222980880e-03,6.515748724197165803e-03,3.789674760554326369e-03,-5.752266015084075068e-04,-6.437589196196735579e-03,7.391107990667871580e-03,-8.435440596913073436e-03,6.685901193100755174e-03,-5.834968125154250083e-03,4.077551952961282374e-03,-6.983088093096485090e-04,-4.103498068773455375e-03,-2.044247567279336691e-03,2.537165972957224850e-03,-7.576416753423493612e-03,3.271705975239018308e-04,-9.227820583622840453e-03,-6.230447781576030526e-03,-4.987240230401506456e-03,1.773209757734225392e-03,3.898157971053340352e-03,-3.396402284636210189e-03,-3.728250078561445770e-03,-9.085972730447488405e-03,6.431978231130956909e-03,2.987692321637087315e-03,-6.870909109480366310e-04,-1.243152734624891219e-04,5.554412791024725647e-03,-2.906293389887899207e-03,4.020795503728524789e-03,1.072716002917372194e-02,6.734992447228601783e-03,3.556655640146527079e-03,-3.457026293350758377e-03,1.496698085994061563e-03,-5.347851987854303386e-03,-2.128555650332562398e-03,2.663137737902416136e-03,7.678371772498868500e-03,-9.443889285142733877e-04,6.406908429310532657e-03,4.099659035659781379e-05,-4.411966892436517719e-03,-1.978466741149725264e-03,1.114256406129920810e-03,-7.612744982286434765e-03,-3.014599514268002066e-03,3.822154998187532980e-03,-6.353394549005457904e-04,-6.116110899341985121e-04,7.557817541877773215e-03,5.989109548761212092e-03,1.171066065153918145e-02,6.409731552581628133e-03,-3.176867889427893634e-03,2.044119366589134974e-03,-5.255393597813298560e-03,-4.287973433045150565e-03,-8.480888473000830138e-03,-3.429356639479885435e-03,-3.234564045222043660e-03,4.251959842289446665e-04,7.711028656044735009e-03,-8.789672694981718118e-03,-1.882064389676295487e-03,-2.652737606915310390e-04,-2.710592928266324433e-04,2.740383420767475431e-03,2.307952861791850822e-03,4.079663787592758997e-04,2.535483717212036990e-03,-4.903145413543080235e-03,4.115467519115442729e-03,4.869378888701076866e-03,1.487186758964617432e-03,5.240153575582070719e-04,-3.601288269438907182e-03,-2.504101033484742653e-03,4.030164410621879523e-03,-7.690683367438257145e-03,-1.370402667678460862e-03,2.084078449677872254e-03,-9.660705413908246553e-03,-1.839413570091655986e-03,1.257557433023154302e-03,2.313645320090354854e-03,-9.865461917435347158e-04,5.496128853731976037e-03,-9.086568180375598734e-04,1.358951107819553151e-03,2.124194289911245006e-03,1.967361648751032865e-03,5.557289609573545184e-05,-1.381795382393425677e-02,2.037310963485667193e-03,-4.127375063101801529e-03,-3.634970936646865491e-03,-3.067760761256053959e-03,-3.569629116964697312e-04,-5.656542869223255519e-03,-3.105046435211322037e-03,-2.185537594353726537e-04,5.789378613270952656e-03,-5.103191622979384202e-03,-2.433579511453696186e-03,3.031710718297014222e-05,9.010891744814987937e-04,-1.164555239876749644e-02,4.321681300320796837e-03,4.306994668446030594e-04,-5.396978024653161649e-03,1.053034959220321837e-02,-4.113416986621841465e-03,-6.369863377817874123e-03,-2.173114675534648481e-03,3.430915238323051491e-03,5.307726301134490197e-03,-2.259354167751083300e-03,2.547986641901315435e-03,-2.959657130559384354e-04,4.436902318387363729e-03,-8.128831970185708129e-05,5.853486573441833707e-03,-2.084696274198625209e-03,8.725348191530524688e-04,6.303876481529366652e-03,-2.383634467327614033e-03,-2.949314422792859072e-03 -6.133326245423212494e-04,3.475935319204483467e-03,9.002831925598572598e-04,1.674488425959813273e-02,-1.676592720247074408e-03,2.735802508654960451e-03,1.688107356876897280e-03,-1.513277462644008718e-02,2.222142928091173659e-03,-3.038472053429035896e-03,-3.817268102658815006e-03,2.287809886724566626e-03,-6.255325375322593265e-03,3.381597513667616330e-03,3.313985517268519011e-03,1.280534468841530542e-03,-1.095062331414272489e-03,3.577813620533882959e-03,-1.013188898859097679e-02,-3.279930365076782952e-03,-2.607626596836463539e-03,6.602030081630050427e-04,-1.642788542353167645e-03,2.068143481368650820e-03,6.969666802302582238e-04,1.032186489256281173e-04,-1.084434141790112426e-03,4.398040243394873462e-03,-1.926294985384960964e-03,-1.184215265352920113e-02,1.519723220121289853e-03,1.322761216091184432e-02,-1.199050442622414097e-03,7.712789543624381031e-03,4.753279895793259378e-05,1.401579961786764833e-02,-4.653840113175787552e-03,-1.401419592821472569e-03,-9.787249890843318419e-04,4.333099266561094356e-03,7.151126334976871599e-03,-1.522529067000237199e-02,3.269245973027718345e-03,-4.487539526559326981e-03,4.031912439369550634e-03,1.790571321944371685e-03,3.513678839931322958e-03,3.967325240931104773e-03,3.515659687845784209e-03,-7.200222061682885625e-04,-1.980698686651121299e-03,3.827969093593017093e-03,-5.655110823943153807e-03,-6.840636312282081036e-04,-7.321250830520412492e-03,9.921502090133990995e-03,-5.576306381013414019e-03,-2.929912747043204868e-03,7.598656515658172919e-03,-4.160510071642824778e-04,-5.295310780625138436e-03,-1.608656352837371057e-03,6.822726336004278098e-04,3.323657382319317958e-03,5.613700573035660269e-04,-5.803392475383604976e-03,3.654098905109817979e-03,4.417853788047377585e-03,-4.965513019052284448e-03,-1.136961267410097759e-02,6.037919395852272284e-04,-5.541276026972064483e-04,2.952800813122145895e-04,-3.816664535878807902e-03,-3.435383045115276285e-03,5.942454515476810334e-03,-9.011077579524569324e-04,6.579167041159508821e-05,-1.723701263527076906e-03,1.710421832117310983e-04,1.617045924027834831e-03,5.274012963485390660e-04,-4.663189777871514473e-03,-8.138792845595479650e-03,3.956153268050660241e-03,4.487000304286057825e-03,5.499653555616449731e-03,-3.849019475943235742e-03,2.968015974176913679e-03,7.177557544163336994e-03,-6.674713657100990460e-03,5.988878665058706453e-03,3.269740829031587016e-03,1.004018419733708131e-02,2.424360472701391888e-03,8.517143354542852781e-04,6.158864818600714996e-03,-5.599702537014692935e-03,1.745006008715114035e-03,-1.374986538974712433e-03,2.272304748578651487e-03,6.532778335788739198e-03,2.787232045314837550e-03,-9.223052813637934834e-04,-1.957732712340829340e-03,-2.799823543041097661e-03,-8.498575026710733851e-03,5.887181937771336290e-03,7.178305839175952961e-03,-2.641468984234394187e-03,3.129065609092171549e-03,-5.396015250070905489e-03,9.035047975287572967e-04,-2.505342089765198559e-03,7.593626167697822664e-03,-6.876461964340191413e-03,-3.585962071991816743e-04,2.246334211208457846e-03,-8.063194230978962609e-03,-3.214632804016008538e-03,1.592267566856425372e-03,1.024732781811584859e-03,2.017359235155934908e-03,1.301795850881816950e-03,7.780490679556693184e-03,-8.553149777090497327e-03,-9.946838750907560471e-04,-1.544274377949403972e-03,-6.326566233037555012e-03,-1.512153837641628390e-03,1.078324417089452214e-05,-6.111551310210108258e-03,-2.001765654986239693e-03,7.786399558645522474e-03,-5.873173572136330262e-03,-7.284314975483837003e-03,1.061367547950754558e-03,-3.273423681488844654e-03,1.492212058195799881e-03,3.954060456325534344e-03,3.930744869079683991e-03,-4.679226246811036757e-03,1.448746477777057388e-03,-6.239439397747354420e-03,-8.165518333259940559e-03,-1.698176938386067444e-03,-3.251303730697693850e-03,-2.236023929932138415e-03,-2.753160320072327739e-03,2.597180413819727331e-03,-4.194314359928445323e-04,6.080827924564914666e-03,-1.704730922504374109e-03,1.730271654164006190e-03,-3.017243940082919244e-03,-1.484920833661111314e-03,1.275842372429213459e-03,5.462502997532111756e-03,5.788651158177900158e-03,-4.334196643504211084e-03,2.377961115551434790e-03,-1.847835074187676547e-03,-6.912581217675942483e-03,8.505421870761833286e-03,3.811420447424781945e-03,-7.038489139295072425e-04,1.284184381949236408e-03,1.489532443829950925e-02,8.461184952692102790e-04,5.071238802229525479e-03,2.399038937359273573e-03,-1.588175903849070288e-03,-1.277904177750918593e-04,5.833318180262347247e-03,-6.143737057333301978e-03,1.006795373120403757e-02,-4.346256635685013907e-03,-3.170078782169035055e-03,4.002096982013933837e-03,3.442613752442759759e-03,1.371193028917356454e-04,-1.049078142735782992e-03,-4.999160637563213917e-03,-1.809012519242461777e-03,1.193890623057945917e-03,-1.812967996154428657e-03,-5.710413448503458451e-03,-9.172852341830750557e-04,-5.246151851952111402e-03,-3.977954412381849714e-03,4.367926070898392465e-03,4.858859884511538543e-04,-8.764761081482426094e-03,3.745356642292095085e-04,-8.254679231877583984e-03,3.441025783290839208e-03,2.643646514856375779e-03,-6.537982019671978798e-04,5.578385192854693961e-03,3.431483955533466950e-03,-7.473706172132035287e-03,-3.656164926874147687e-03,3.784843148202898713e-03,-2.984769501970112142e-03,1.097239008964966209e-03,-5.851097374760805270e-04,-2.675656675679343854e-03,-8.558511292935767206e-03,2.539998712521699970e-03,5.876848316287803728e-03,-5.038627206585229775e-04,1.308682588782787096e-03,3.303381194409335184e-03,-1.061086350487948083e-04,-4.331095987317052092e-04,6.040638955176050318e-03,3.372532312443143540e-03,-3.564550671222628357e-03,-5.364619297277368679e-03,6.100553641729105497e-03,3.312990040683664369e-03,2.903790649152790827e-03,5.749651219347929297e-03,6.182816537989605822e-03,9.468172737615031948e-03,-4.919153703003733245e-03,5.182019255413652416e-03,5.375034595934012963e-03,1.984776004435324665e-03,-8.650875774662022608e-03,-9.167624665954214602e-03,-1.488280575168685928e-02,-5.784125322742312246e-03,-4.064416013752943611e-03,5.651717956194127028e-03,-7.068833398673363480e-03,3.351277777981857484e-03,-1.083494956388760238e-03,-8.908293630488791455e-04,6.736690826631050975e-03,-3.581984066987727294e-03,-9.281339923213430829e-04,2.469670262293703882e-03,-3.309807897365849710e-03,-4.967452480365496317e-03,-4.787207659248190680e-03,4.556796058114740561e-03,-2.789392763141421931e-03,-3.473335456617724238e-03,1.458657065802688741e-03,7.390482521360086481e-03,4.999162255431882813e-03,1.218512847189029066e-02,1.852654628549139405e-03,-7.280688935038978106e-03,3.742922553111348478e-03,6.206289786431745496e-03,6.343644997025954697e-03,2.558623580235347503e-03,2.435482934110432440e-04,-1.349459597040520440e-03,-7.550920495976487339e-03,8.692941436016221048e-05,-7.600462553671566814e-04,1.487733334201320201e-02,-6.818482704664605154e-03,-2.870251977300650875e-03,-1.298489135050407816e-03,-1.571461619278928610e-03,-7.976229229832551906e-03,5.292005374350823577e-03,-6.495474502694974402e-03,2.774917093768405158e-04,-1.326339732295698636e-03,1.491662787068549594e-04,-2.595046488380305539e-03,5.551283962922322346e-03,-2.240957956674655312e-03,-5.058972489496728862e-03,-3.857535105607506291e-03,6.989745888375860187e-03,6.808475065185011499e-03,-2.088279700811713525e-03,8.840305990638476323e-04,-3.566894720377307426e-03,6.407305317158112998e-04,5.026986625419321147e-03,3.258473942527652400e-04,-1.229830747700959639e-03,-6.738716071419285046e-03,-1.540234401150380192e-03,-4.733989453843283671e-04,2.125897445719662424e-03,-4.483357512025654829e-03,8.564324907195526278e-04,-1.284329188526733573e-04,-3.466952394230470892e-03,1.367108518905352189e-03,2.120417283044039470e-03,6.272717417745982257e-04,1.607610740864530733e-03,-2.239791293957685541e-03,1.635908132769622273e-03,8.506354719182216209e-03,-2.810979285679364730e-04,3.649552004753350409e-04,-6.931942624671115147e-03,6.114592242321568484e-03,4.629319590371026166e-03,-5.396762800438810112e-03,-5.231765463240899833e-03,-1.085980745878959786e-03,8.035923642000787973e-03,-3.735712699405078965e-03,-8.514168153837722852e-03,-4.704208555193359689e-04,5.580950968410155338e-03,-3.302607334103269728e-03,6.333448584370551771e-05,-5.229851748349409121e-03,5.016148267312959749e-03,5.601380192101863484e-04,8.464351703813875402e-03,-8.413635765123748250e-03,2.584300039392382030e-03,-5.624120854930277861e-04,1.202447558246112248e-03,-8.914482062981428714e-03,-2.139306853050818458e-03,-3.636577079826952552e-03,-2.327922008070920298e-03,2.609331994439999421e-03,4.066086921903811977e-03,2.788149040294013101e-03,-4.812927871413829814e-03,3.945981828284332062e-03,8.526541412236314857e-03,5.211797142900570501e-03,-9.144473599184138471e-04,-1.759656110986901377e-03,5.206795596934861848e-05,-5.923788542921331300e-03,-1.948078651800058811e-03,-1.625603782035929491e-03,-6.645051646208247412e-03,1.577340929962820718e-03,8.317121870610405185e-03,1.810284952443608035e-03,3.143574791224584223e-03,6.526169924409670406e-03,8.793868397825006051e-04,8.901717967725250591e-03,2.073608654448937539e-03,1.680668260977275560e-03,-5.060100231432372066e-03,7.347499197720135794e-03,-1.338435099955682418e-03,1.093715085673395253e-02,1.765514863967675542e-03,2.237512068005412594e-03,4.283197074338076366e-03,-6.041293320833954553e-03,-7.091411009840711951e-04,-7.012128247736861691e-03,-7.683940196851462429e-03,3.774119435037697964e-03,1.796799500288207094e-03,-6.059851793667480474e-03,-1.857479256398673981e-03,-3.638954618368374534e-03,-5.530845425568088093e-03,1.324575710562229643e-03,-5.857967974203077098e-03,1.738044578517474164e-03,-8.577777226970459759e-03,2.834788238346457566e-03,2.161777215207338326e-05,4.100179151775717665e-03,6.610477148819101810e-04,-4.372777482665375116e-03,4.223972303143874035e-03,-7.549752758372823593e-03,-3.553671234062413982e-03,4.442128289124908959e-03,6.240505890867399305e-03,1.413656823405669066e-03,9.975800746435557406e-04,-3.758156250369690168e-03,5.017137562250919940e-03,-2.260627140507326556e-03,6.198584260028744235e-03,5.644047608738113862e-03,-5.756258051063265803e-03,-1.954045686518980293e-03,-5.664189707543914329e-04,-9.738937958741419182e-03,-3.582199549604429237e-04,-1.121254196926352135e-02,1.205599442270758812e-02,1.405106278867895939e-03 -7.129826971874658574e-03,6.486016017651364575e-03,-1.076688864511004748e-02,-8.104658314692303944e-05,-1.161174859028228602e-03,-5.267670744232055662e-03,-5.016432652696121465e-03,8.106145507142600848e-03,-5.270989825986162509e-03,-9.461631747851287702e-03,1.685355250853001058e-03,-8.640325429825266440e-03,-4.417993171124129891e-03,-5.160659679337945439e-04,6.145499521957319827e-03,-8.651614014149709361e-03,-1.034794783350279079e-03,2.489111378488738629e-03,-9.320494476582700735e-03,4.457283988339768785e-03,-1.445708138868149811e-03,8.032176186910732580e-04,1.599185115742822157e-03,7.665081227938600958e-03,7.849233443353845555e-03,1.072685021418394273e-02,9.446412753490809628e-04,-5.075686948220847587e-03,-4.262411287563336662e-03,2.179205248552474206e-03,-4.679200248352826806e-04,3.029155259047252788e-03,-7.800711601738439561e-03,5.494684888471729027e-03,1.349847608282436946e-02,3.324967296592625313e-03,-7.437483378190694848e-03,7.109980044200842108e-04,-6.037086536721656370e-03,2.863891130856642715e-04,-5.798850198056391091e-03,-6.252486364823267757e-03,1.384762983101866206e-02,-2.525874083498704913e-03,-3.334172474778832832e-03,-8.591860428944289585e-03,-1.542587047008976784e-03,-5.064595927197148781e-03,3.224808762825002182e-04,4.794703737045203501e-03,2.506297868888349937e-03,1.983236816087637772e-03,4.731159593363931312e-03,-1.201281382699336637e-03,3.677835620795506539e-03,2.302954226386162272e-03,-5.427653256929578472e-03,2.175566127964882919e-03,2.039561164731835009e-03,-2.374709917106306283e-03,-1.531259158785709500e-03,-3.922761403480769064e-03,-2.534093904891551296e-04,1.058825388872121354e-03,-1.175069868446989609e-03,-6.352598789293218448e-05,-1.528066753841709341e-03,-3.798125436998213943e-03,-1.014413632880730234e-02,-1.374656769151130298e-03,1.275870053583528640e-04,4.436473173950903834e-03,4.523649398768083037e-03,-4.344429893843887273e-03,4.797125581736682437e-03,1.046024726910854859e-03,5.888214887282635507e-03,4.258567842061806996e-03,5.063970692118494564e-03,-5.594602577171872530e-03,-2.386005967005158472e-04,-1.294502494356001773e-03,-2.275705447902992735e-03,2.863792234166864271e-03,-4.222415120172016631e-03,-1.824099209109449624e-03,-3.145439094775615181e-03,2.913926869654438149e-03,-5.590512075445561863e-03,2.416569054693577714e-03,-6.100423725687297363e-03,-6.495856079631969794e-03,-2.847734183774766874e-03,4.267294242269712573e-03,-2.647605777205175575e-03,-2.337152011938246291e-03,1.065027327089784369e-03,-8.657478975608241464e-03,4.284481286836632277e-03,-4.601041150596231824e-03,1.261442697977608858e-03,-3.962465605314223309e-03,4.345620892917889493e-03,-3.957542157169768177e-03,3.754224587093291533e-03,-1.600214385093491143e-03,-5.731443917678107243e-04,2.613743611197287195e-03,-6.216735626269593619e-03,4.381600313457206623e-03,-5.788356835072246305e-03,2.654049745415074622e-03,1.726437709909373194e-03,-4.343110331329909542e-03,4.328401496372379781e-03,-2.754507670521758565e-03,-6.585933257892381473e-03,1.454768815725413162e-03,1.312841112481439665e-03,-2.892532671737200762e-03,4.439955964210619971e-03,-2.196458601048773777e-03,-1.161935630394616940e-03,-2.832670607388810928e-03,1.976512940503066633e-03,-2.066996073846724506e-03,-1.403289037975829871e-02,8.975427336965800112e-03,2.204723266453151222e-03,-2.041393892517363688e-03,-6.359957225161812222e-03,-7.303537537957901886e-03,-1.368674924494219826e-02,8.962596977824768829e-03,-7.902793080628481293e-03,7.446625844492200247e-04,3.336840018709032250e-04,7.339466068603773923e-03,-5.646338645955500772e-03,-6.679294201059592269e-03,7.227013390765204062e-04,-9.604549630231433432e-03,-1.398897672673684624e-02,-4.620650402686974495e-03,4.158595777140705260e-03,8.277955022209021116e-03,-2.979332096459619706e-03,1.259906321303894605e-04,-1.322625454950041631e-03,4.775032151172063817e-03,-3.021268680936808806e-03,-1.504934903277475042e-03,-3.904891206392391203e-03,3.611045378534980064e-03,-2.783612775620602445e-03,-6.578147574515785106e-03,1.484745671159319082e-03,-1.761479756620114908e-03,4.055386157916846500e-03,-8.808178944504642741e-03,6.805845177232699907e-04,-3.294265973037018361e-03,-6.165889344277922929e-03,1.196608897540269150e-03,-2.545856894231183880e-03,-5.106162424125652161e-03,-1.938584842509554883e-03,1.037834275047050305e-03,-5.488927530112957219e-03,3.853611525908605135e-03,2.091772642552806958e-03,-6.268719683904885702e-03,-4.474563355823750650e-03,6.598531441212678733e-03,-5.667651574240494677e-03,5.908148715632716499e-03,-2.066024842631199263e-03,-2.921453234721550427e-04,-3.768532267648877238e-03,2.090554585686531935e-03,3.792540436445540467e-04,-9.100518481063047402e-04,-1.157298014207269916e-02,2.283082278578942546e-03,3.235766589509347316e-03,5.986020809454040787e-04,6.025601524671161772e-03,-8.512249083234894832e-04,-1.301812252110916409e-03,-6.118894949800618953e-03,3.530221494373854407e-03,8.616104036517744855e-03,-3.597484951489486344e-03,-2.922687115729658697e-03,-5.837700296289692788e-03,-7.247999667995849802e-04,4.879550564822594805e-03,3.580578525181579274e-03,3.504393684521760199e-03,1.641236028412257049e-03,4.173870340011561972e-03,-9.132723643288927912e-03,-4.214346667611913559e-03,2.897668170013411280e-03,-7.765619327759293311e-03,2.155411310989776771e-03,1.813214053061490373e-03,1.327882358881709267e-02,-1.201095551043661986e-03,-1.954253124950799687e-03,1.339145669885961291e-03,-5.680545178728403123e-03,-4.072021445661128981e-03,-6.047981405773173976e-04,1.439396273596922366e-03,-4.754563016933702549e-03,-3.556545265720495452e-03,8.665163455238031548e-04,-8.029756932183633109e-03,3.248828753018132270e-03,3.367564428518715061e-04,-9.301098725278551457e-03,-6.562353041959213763e-05,-4.654031981656995588e-03,-8.142445020228891370e-03,8.732434623628451686e-03,1.824111166218661886e-03,-5.190797329346942546e-03,-5.010139437497212979e-04,-4.925419301327838874e-03,7.335913066825310372e-03,7.690463861988599235e-03,-6.590526157321665440e-03,-7.788482522157709649e-03,-1.467469465103747074e-02,1.269273318167706607e-03,2.330126358380385336e-03,-4.012742560073650350e-03,3.832653548291839116e-03,9.403866845757231268e-04,1.314109560727793344e-03,6.554125337192593347e-03,2.850946473347041286e-03,-8.469400293106188818e-03,8.554639051417475479e-03,-1.391414697804403775e-03,6.162504816274300395e-04,-3.317280942026036746e-03,3.404866957999354207e-03,7.304337136758541280e-03,8.925892780431399567e-03,2.486548291266062476e-04,2.799335222035209680e-03,1.808676626296037765e-03,-4.687075917908152881e-03,9.048481487222238828e-03,2.120539950190720654e-03,2.521197060835589365e-03,-7.546819940086776403e-03,1.073796419886762638e-03,5.858281576632388445e-03,9.657069944704520237e-03,-5.172743968336807920e-03,5.756001942440630917e-03,-3.517380120395520665e-04,4.649618036724268051e-03,-5.540691065757026465e-05,-1.015755376133579448e-02,-6.527100398173331201e-03,-6.279228631117487894e-04,7.330131093806236146e-04,3.298376800956376309e-03,8.217711232737957968e-04,-1.192839666637924992e-03,3.899946989538644656e-03,-1.466180815376736703e-02,-4.694872812319604918e-03,3.042055657990357619e-03,6.800236789746722313e-03,9.636569644192593143e-04,-7.616715290032288581e-03,2.265436307619176888e-04,-3.955409132537678757e-03,5.209613608938838680e-03,5.596923066561342551e-03,-1.652524684009453945e-03,-7.450222948685400470e-03,-8.682652346254492864e-03,-3.097002831314964091e-03,1.033176620626097275e-02,-2.304827875050905178e-03,-1.832178128584773533e-03,3.339373011356011698e-03,7.916466083137741958e-03,-1.527758923824212595e-03,-1.249202731493067632e-02,8.002460882228799649e-04,-2.326326870445708222e-03,4.963621024399732760e-03,4.119580612137454866e-05,5.407630908185918905e-04,3.343404108734240269e-03,2.804305847165775457e-03,-1.767807619102219722e-03,5.803831343339008013e-03,3.911058886403043439e-04,-2.358535743823430512e-03,-3.938307878320934478e-03,-4.487633637652023617e-03,6.197802512153135308e-04,-2.291713811501171563e-04,-2.707158407674953565e-03,-8.004554715398459752e-03,-9.425251705787650270e-04,-1.549232833793153657e-03,3.500323118257732662e-03,8.607795866579604632e-03,-3.956206790086932816e-03,-4.079693663255366332e-04,-9.263874345669253704e-03,-4.734286529236467027e-03,-2.100971944549717609e-03,-1.906066712119283777e-03,-9.960412854303458356e-04,6.132609036696663740e-03,1.282858810267836769e-03,1.881650854514547132e-03,1.055755311666224747e-03,1.680704929442669571e-04,-4.980346638642395990e-03,2.068868674543445588e-04,-2.208794989400541132e-03,1.306821580593800144e-03,2.800455142126772028e-03,-3.880283954049253332e-04,-2.935881014558906158e-03,-1.273203200085688781e-03,4.010451251395950878e-03,1.226092805018134317e-04,7.354603502560711284e-03,-5.640386542380760801e-03,-3.371482866259887471e-03,2.189137117729976229e-03,1.455530384681571988e-03,8.400682237179164791e-03,-2.774263648202470768e-03,8.019574380719868409e-04,-6.715306418453824422e-04,-1.232972320536485306e-03,6.104609371908144896e-04,-4.209626218382574343e-03,3.716863063758023538e-03,7.977360487424005851e-04,5.871234158903627584e-03,6.077218750154108652e-03,8.329367176925448965e-03,-1.191484651107570236e-03,7.522349368575217445e-03,8.410716771111409473e-03,3.755500927782487279e-04,5.231321455357599431e-03,-7.583128027910371240e-04,-3.841938821480602763e-03,-2.753650926341361105e-03,-2.239861037325444060e-03,-1.437851493142098744e-03,-1.610447963387315112e-03,2.040771335296355168e-03,-3.509583246486955559e-03,-2.506360302304815740e-03,3.361554902345959776e-03,-3.480674314670052609e-03,-3.927111132538898750e-03,1.132843116349673690e-02,1.619215642859632518e-03,2.578899938437826528e-03,4.500681425116285573e-04,1.694220163631667865e-03,-9.902996908867740464e-03,-4.682831062840613068e-03,2.047889329475851173e-04,1.829119374415403773e-03,-6.350664916546360297e-03,-5.511379466714131162e-03,2.025481479460941364e-05,-1.335496495435674943e-03,-1.050489956614246521e-03,-4.417880727310813035e-03,8.327515818602143280e-03,8.723364037558039089e-03,3.467598077566145902e-03,-6.552421191727624342e-03,-6.583929772907053330e-04,-2.093632165126273984e-03,-1.519053080521826646e-03,-6.461308823610362011e-03,6.208055971806237562e-03,-2.980527234128367911e-03,-1.529698624752718001e-03,3.784403681935705806e-03 -1.806177191264198663e-03,-7.465567854989541181e-03,-2.414792991706167947e-03,-2.807774300912609794e-03,-8.727738290648672306e-03,-1.876925736761463678e-03,-1.864104604241035787e-03,-5.822441428720554098e-03,1.289606062866076059e-03,5.080593950171110847e-04,-2.101042238922183267e-03,-2.977611127539140073e-03,2.144470899998773741e-03,5.547932412399043944e-03,4.752425353450740256e-03,5.726882994066393416e-03,-1.173175619763334445e-03,2.744484731726338968e-03,4.844969498687980852e-03,-8.742372630694199981e-03,-1.315827636706670993e-03,4.497817175527146945e-03,4.686526927030872092e-03,5.286681258851613427e-03,-1.908129265049443926e-03,-7.496645059107019324e-03,-6.979784249335801083e-04,7.535696839206980531e-03,2.352993273778290675e-03,1.824432477949005857e-03,1.045600413447708363e-02,1.749179786994613369e-03,4.981504944054349765e-03,-1.394982933052063903e-03,-8.345000164016838943e-04,-2.905793584632214866e-03,-7.953156963161544732e-03,8.687812962125924339e-04,2.941547977019991905e-03,8.034923140079507694e-05,1.080321636505770372e-03,6.055216785419420891e-03,-4.317361547805580874e-03,-3.303659218632166647e-03,-2.533283267540759835e-03,3.907519112949548942e-03,-1.197087279602557794e-03,-1.050381880450812353e-04,1.830152024850626152e-03,-1.636681956986555525e-02,9.750600749413960034e-04,-6.179234830287995070e-03,-2.856928043725220391e-03,-3.447946511575253423e-03,-1.316001957805520246e-03,6.118113551557175477e-03,3.697959905072110802e-03,-2.380865780656534446e-03,4.437888320210643775e-04,1.285296540347238836e-02,9.390748192588114704e-03,-1.621851989284325141e-03,1.331896243383355605e-02,-2.825871291475899171e-03,2.038924188952729883e-03,-3.328150451996013739e-03,-1.260712012466037955e-03,-1.405380152589019238e-03,5.230415599815384405e-03,1.386052752714724782e-03,-4.289457728723723717e-03,4.785542288255665265e-03,-2.549260321677237172e-03,5.180613998095290156e-03,6.407117754793232624e-03,8.454757698360019363e-03,-1.801124067150442788e-04,5.054361039935174361e-03,6.125351240600362783e-03,-8.883853241338834916e-03,-3.587623175544903120e-03,5.927830324250921504e-03,-6.544128114038491074e-03,-2.919381390943919788e-04,3.670913910181648407e-03,-4.949550776188736158e-03,-1.741712370557253742e-04,-5.009573939559418786e-03,1.047169518332121503e-03,-2.252430372315108575e-03,-4.039960495446500116e-04,-2.379424842224168220e-03,-6.176141797649698471e-03,2.106608529725246138e-03,7.301954251719423340e-03,1.593971214253661053e-03,-8.405701052050329122e-03,2.475569490097102171e-03,3.964339725189787444e-03,-8.979405232935730799e-03,5.972110024814566293e-03,-8.463070202260037332e-04,6.271118676790011191e-03,-6.452809729942164327e-03,-4.295720844717749141e-03,-5.422182845200844425e-03,4.829820834378751281e-03,4.953634152135000940e-03,-3.738013934157980670e-03,2.465705134020901475e-03,-1.673283552840755181e-03,1.386122087751673381e-03,-1.217072831713367914e-02,2.966495732385161614e-03,3.492883229532684972e-03,2.893868780189268186e-03,4.041896792026935907e-03,-6.784897557207688647e-03,3.892505757429649479e-03,2.073376000140790629e-03,3.019673833693280705e-03,-2.979128130121221248e-03,3.199154659078134143e-03,3.678475906085916861e-03,4.352360250877768642e-03,-1.672914767394629242e-04,7.952710621146440043e-04,5.103834543555586627e-03,1.221963866185510859e-03,7.521454242319075729e-03,-6.423962424929222093e-03,-3.895509323238816805e-04,2.732660216949354352e-04,4.402713373061720344e-03,-8.542202486392164867e-03,-3.625281485885610989e-03,1.507728198285612168e-03,-1.650607631768495273e-03,5.662631832929900834e-03,5.968533499203996541e-03,6.221985041536872438e-04,-2.017927074961236092e-03,-5.005975510147944035e-03,4.014097181186783783e-03,-4.699674403268748295e-03,3.637839504555000995e-03,8.976768278423995218e-03,-3.613841303299707847e-03,6.638992516517714694e-03,-1.268031283514662244e-03,8.072398844871070400e-03,-1.692860183647668928e-03,-1.524417248792668891e-02,-7.960521893977029473e-03,5.668899936313364328e-03,1.470728874232431812e-03,2.124697732588913188e-03,-7.548202996299435696e-03,-2.782587589021962710e-04,-1.481557395814209416e-03,1.906156037072480715e-03,-1.337829849796252912e-03,1.690601958323479726e-03,1.483897995725393061e-03,1.554327958427657429e-03,-4.804128839328927064e-03,-3.092203969155271192e-03,-5.710460935920877684e-05,-1.886511529379747724e-03,1.318131118536845108e-04,1.421787612284267262e-02,1.726915210312006182e-03,2.075675541575585075e-03,7.926734851797434186e-03,3.280755507575809291e-03,4.130302432183098095e-03,3.748919546271342693e-03,2.969737323662425491e-03,3.330941913548601807e-03,1.590440105569755885e-03,-9.969397242879894649e-04,1.597633037678953269e-03,-4.919518601897589390e-03,2.973351036552455886e-03,4.416056788211311667e-04,-3.937261514316465344e-03,4.053067474246434265e-03,4.734863906436001524e-03,2.560519034325917792e-03,1.578514513291481847e-04,3.685820793457198624e-03,5.377721180833987778e-03,-1.882334883493483714e-03,-1.943114132312809923e-03,-3.407947744220933507e-04,3.379641384620262466e-04,2.605017048687231600e-03,6.246065933165873864e-03,3.186696264755884897e-03,2.195720489570063706e-03,-4.151703120977943452e-03,2.125779894706255647e-03,-7.606730855948211530e-03,1.654001070551558731e-03,6.640023547079289094e-03,1.109839835749750706e-03,1.585746306161166371e-03,-1.625428091164095095e-03,9.644046393235488311e-04,9.207643940436678349e-04,-2.968242021920851695e-03,2.596717619594169930e-03,6.357885004105807460e-03,-1.518421759483817250e-03,-1.222322731392335229e-03,-8.535490925334352288e-03,2.016667232519754084e-03,6.179756046103689233e-03,-8.078749284012592610e-03,3.187976097222568889e-03,-3.407773756941174462e-03,6.003642509807175648e-03,1.540437961113754741e-03,-1.086604581992757067e-03,-7.323723309067996755e-03,-3.599722772288129792e-03,8.095926417417397339e-03,-1.687548699780070782e-03,1.475595884918180604e-02,-7.304871030736713629e-03,4.296381014306962891e-03,-7.976688897063542380e-03,7.975787724575047338e-03,-2.220580715606541682e-03,4.529322746874622600e-03,-3.560243863159946558e-03,4.317683683977330614e-03,1.010761098612105021e-03,4.920360278832284877e-03,-7.871831289680841521e-04,2.774898863126466601e-03,-1.261941777098189656e-04,4.014694906596226118e-03,2.151356948659627084e-03,-6.721185740184447768e-03,-5.564783111890458066e-03,-7.653638468768632569e-04,-3.735617320015005485e-03,3.053470570247698856e-03,2.670111024615497326e-03,1.604438673559696431e-03,7.899543621737042248e-03,-1.497850169399482078e-03,-7.598194731777163993e-03,-1.289317034806947149e-03,-1.064293081092530921e-03,-1.771518984729563003e-03,-7.524726790906607378e-03,-1.071774908029165261e-03,-1.013239539100550639e-02,2.341067641165706526e-03,-5.306827859002066598e-04,-3.799550033793545390e-04,-2.906568782610086544e-03,-2.819203305286773886e-03,-7.704078522525538084e-03,-4.464507655477830371e-03,-2.440962748672700303e-03,-5.017365318345642575e-03,-3.325567843784737153e-03,-4.551921999748750371e-03,-1.162699272750255869e-03,4.588722992696158721e-03,1.510709629959693405e-03,-3.267303346413557609e-03,5.569801744365329016e-03,-2.335925335385905226e-03,8.284510396119642434e-03,7.497785993394419542e-03,9.578952711685859069e-04,2.406956095687825311e-03,-5.470910866269167346e-03,2.793332068285768032e-03,-8.604887961373337574e-04,-6.895156994558633741e-03,-3.704642373887017011e-03,-1.072299672595011105e-02,5.674471754327046515e-03,5.967776444655226642e-04,-1.367857991628545220e-03,-2.336933028536349521e-04,-5.189413757374496811e-03,-6.303180922147161823e-03,-6.155441388519525790e-03,7.429669234834959494e-03,-2.543420752426240033e-03,2.568113664820703169e-03,-6.838208872096319772e-03,6.352190985313117301e-03,-4.911623798382904436e-03,-3.545441433908343588e-03,5.315083785042707884e-03,4.868220636844011588e-04,-2.454515325389128397e-03,2.937383575777893253e-03,-3.146596780633687234e-03,-7.963285525578520443e-03,-8.072061181804510849e-04,-5.661250858998836087e-03,-1.155544501517336660e-03,-1.171935698095786030e-02,-9.503093121325214198e-04,-7.040501082970967994e-03,3.214451246143288591e-03,-6.611835057557533327e-03,2.536587761440563899e-04,-3.156268605390534845e-03,8.371675320653734764e-03,-3.544823124049223482e-03,2.085440606235851053e-03,-5.999564884078147260e-03,7.379490393789634199e-04,-1.386277544228671672e-02,2.470257113605248914e-03,-8.213256972015324509e-03,1.373744750767799368e-03,-1.759585491656279850e-03,-1.212669294504245890e-02,1.604096933858232881e-03,2.698774079380769891e-03,4.446148952283794815e-03,4.356705292695063278e-03,-3.353019371340724714e-03,4.389656285839187307e-04,-3.921170632165018981e-03,2.058607070976152097e-03,-6.089237422551740941e-03,7.236418135852229309e-03,3.901044331481977497e-03,2.676399996280214889e-03,7.992365970204215334e-04,4.035174540598018486e-03,2.900264372099568225e-03,-6.967538311810073971e-03,-3.698090970024670628e-03,2.492801840872190672e-03,-6.454739680573184750e-04,-9.940905395665531035e-04,-1.197853039038312755e-03,3.331298397155410020e-03,2.858452658248718161e-03,5.880446129615976049e-03,-2.984050463072391874e-03,3.522964182103895288e-03,2.154384083486110026e-03,-2.746124167800386963e-05,5.432914407011906009e-04,-3.862087732882748137e-03,-3.754085491201440954e-03,-8.350759061958567750e-03,3.501069378360449624e-03,-2.157177000625928268e-03,7.669689864369859476e-04,-7.431091999520660161e-04,-1.960354022979747101e-03,-4.976388464315432449e-03,3.265692924152734100e-03,2.899648154288875408e-03,-1.298340637017257967e-05,3.074862767244956191e-03,-7.292737428083592537e-03,2.018264306358094791e-03,-1.639580635563080971e-03,1.520128592539815105e-03,-4.101442978265610294e-04,3.391219904960892685e-03,6.364042016049430541e-04,2.266124995667396624e-03,1.028761836545292498e-02,-1.991860444557305330e-04,-8.264320858207295289e-04,-2.283682165342513538e-03,5.548389340393906563e-03,4.854740635651881575e-03,-1.882431875910943551e-03,-3.331181549938349098e-03,-9.634014904977937629e-04,4.754790152169077744e-03,1.772876447077449929e-04,-1.099889560047830539e-03,1.063657297228111018e-03,-2.803011315908147515e-03,-2.886769401848807669e-04,1.424000707477186779e-03,-5.965036027024649828e-03,5.702440701686240822e-03,-4.062141423053485488e-03,-1.675972173130560417e-03,-7.005886366842384826e-03,-4.262909143023846728e-03 -1.830601609606408110e-04,3.932307503734418230e-03,-7.102382990970565989e-03,6.356565996066417500e-04,-8.610251802807524640e-03,-2.725708297393023265e-03,1.763162712671541114e-03,-5.915754944049292363e-03,-6.329937935793433085e-03,-1.558018761924022128e-03,1.837695914893440984e-04,-1.224653538230325613e-02,1.081771443935747833e-02,-3.242327863935919041e-03,-2.947292071337320827e-03,1.674221955729862914e-03,1.007694908855391938e-03,2.900395575454985369e-03,-3.266322436270268498e-04,2.148705890263385033e-03,-1.351151799033753964e-04,1.521866292120444518e-02,-7.290210138183633899e-04,-5.051914473230263367e-03,-2.348892983511676648e-05,-5.106985378784067130e-03,-2.598570754121226536e-03,4.247523537852280519e-03,3.492375399786855784e-03,-2.962520166039430639e-03,-8.322667435874417027e-03,3.604279882168633879e-03,8.833871126777090863e-04,3.952054986926905635e-03,-1.075652708167583113e-02,-1.676983869901327133e-03,2.983190305142620520e-03,-3.875341662262153999e-03,2.903983890884453620e-03,-1.779910562962578463e-03,-2.070150003664169430e-03,4.170464944942041608e-05,-9.345882228419775950e-03,-3.415188164986462417e-03,7.142960065367013299e-03,6.626508076652871688e-04,2.023954494694214096e-03,1.223887381398577976e-03,-1.162391570421518645e-02,3.867902096867362107e-03,5.743385435172285516e-03,-1.482021839066432036e-03,3.481435664838912827e-03,7.696804167335353705e-03,-2.292511535885050425e-03,6.903378902167019637e-04,-5.640763377975290850e-03,-4.755954504088249178e-03,-5.096350287846547304e-03,2.228956878945539868e-03,-9.426359314117763524e-03,4.959872689338657922e-03,-3.731641600342517403e-04,-6.596021232664414299e-03,3.764320940130561326e-04,-1.394339773069178416e-03,-1.350880251250835551e-02,-7.376976051057797246e-03,9.153253110809220173e-04,5.197971681093225887e-04,1.059046691612281800e-02,-9.579117654913484395e-03,2.920156371185694518e-05,-6.366952335621465673e-04,2.351720601507687695e-03,7.830126809993440129e-03,-1.146771686006228808e-03,2.156458192364821540e-03,-3.848597109391188858e-03,6.537459107615385158e-03,-2.211054968929523044e-03,-1.374967045625687286e-02,1.062616856164591388e-02,-7.270783367997976122e-03,-6.168670211747004709e-03,1.709505772305291819e-03,5.416834977418587618e-03,2.183241771298444212e-03,-3.302261770376612240e-03,-8.130086940016349392e-03,2.410594893199494246e-03,3.123212428787376909e-03,6.660885340396755991e-03,4.977001245210147320e-03,6.919591189048278877e-03,-1.155147662262693420e-04,-1.649590797182788138e-03,-1.352280104599114271e-05,3.957819767703955535e-03,2.378588022336913625e-03,4.600626288307077018e-03,9.509540553261922741e-03,2.530972961354818924e-03,-3.364732605674234343e-03,3.020080440712715093e-03,5.717931350085625301e-03,-3.112582554871010272e-03,-5.233024586249614141e-03,-4.667805798800572260e-03,-3.314226385553470638e-03,4.485541944148732402e-05,1.182631665542431239e-03,-5.477770559585775582e-04,9.525965824042758462e-04,1.155097260332432589e-03,-1.798328867986995509e-03,9.698490065205546407e-03,4.473869469808623538e-03,-3.678530257839511086e-04,-2.516349020042195524e-03,4.369587970073859212e-03,6.100694910209980342e-04,-3.769931133054968755e-04,-6.232849244407794725e-03,8.316578597217067284e-03,4.602263165217364629e-03,-6.892764700735282309e-03,-1.465495477963033553e-02,3.009569921086779898e-03,-3.038201614750403283e-03,-4.710678122483498431e-03,-2.327678722154378523e-03,-3.471003031494501300e-03,-2.000199831256951107e-03,-6.346587858780657152e-04,-4.625516327930854869e-03,-8.312106982733064725e-03,-1.558067722369634381e-03,-7.050900804225288497e-03,2.105516516218043123e-03,-6.901143879328145228e-03,-3.739160569972312022e-03,-4.963404264232476430e-03,-3.169763807212919258e-03,-7.003307973863860713e-04,2.542169535337938957e-04,-2.025898335788429224e-04,8.298877522084863265e-04,-3.688930552473971940e-03,1.833003577362079086e-03,4.679747423934254615e-03,-5.647167694527253229e-03,1.673181884146383164e-02,-2.254473423223593218e-03,-4.271751325251312709e-03,-6.419977490335101837e-03,-3.519992071737683935e-03,-3.391042614393592353e-03,1.197334427481152024e-03,-2.192732979909495748e-03,-3.833035313966065647e-03,-3.847291936177401738e-03,-2.279899934580750841e-04,2.516377271343098361e-03,-2.981360234122763450e-03,2.983583175251777221e-03,-4.143925534974458522e-03,6.116096666602759656e-04,2.249178298334319169e-03,-6.247406393470932538e-03,-4.012169330599885467e-03,-4.743876336824387994e-03,6.176682019417516385e-03,2.407693007016233760e-03,-1.138263314067566867e-02,-1.009900656896334931e-02,-3.641117836623554860e-03,-4.124247040680201551e-03,-5.533201150996703600e-03,8.479755826331772384e-03,8.953290415654944741e-04,6.919600357105282554e-03,4.943444208979053643e-04,-4.755717638775911957e-03,-2.429902883555266609e-04,-8.817696299421688699e-03,3.548802165685697139e-03,-6.787477507507341801e-03,-2.671645531480706069e-03,-5.113105008694433795e-03,5.747702925790191403e-03,-4.331055270690099997e-03,1.785267658106624638e-03,-2.280689364612677964e-03,2.971371935882464402e-03,2.301536335658306998e-03,9.533361973448652160e-03,5.237386256157229233e-03,-2.487978500173675112e-04,1.218086009248004007e-03,4.341075880744207395e-03,1.639408252609510534e-03,1.570287587845797935e-04,3.910161269766123174e-03,4.681637912024720280e-03,-6.509615193711871910e-03,-1.776986658703768110e-03,6.949324683086025023e-03,5.260374051918166550e-04,-9.075397769949400048e-03,-5.842081869456342696e-03,-5.869691815829124540e-03,-5.059890582457256952e-03,2.442999613685606624e-03,-2.252832400903606504e-03,3.560622125201800148e-03,-2.719853939511823748e-03,1.715502500464305673e-03,8.725212563896397877e-03,-7.206195542962765335e-03,-4.632788230076805252e-04,-1.284192691009020592e-03,2.434137652490291678e-03,-6.954840605773037095e-03,-1.554727845107374594e-03,3.083401425293700367e-03,8.978578747088693940e-03,3.289077957734147691e-03,-2.162643167300620523e-03,4.800659211263752675e-03,-1.421798222136975206e-03,-2.697116106701412733e-03,6.359820898732629086e-04,4.238512585739715247e-03,-3.106218452556507893e-03,-2.221105303912270477e-03,7.435959601501002450e-03,2.334945962386870012e-03,3.410791126150245021e-03,6.854857803330607555e-03,-8.537485594687647039e-04,3.644835128629678438e-03,2.826913398672325963e-03,-7.964793973709895225e-03,-2.064384310331793954e-03,-5.675499412545101895e-03,5.183848991862229083e-03,1.501298020890700063e-03,-1.508686683079905975e-03,3.785620270368369577e-03,1.001025747075760201e-03,-3.940328186402697851e-03,-7.076424731363582105e-03,3.025838872718340615e-03,7.058563514156433912e-03,6.371838858854342140e-03,-5.060880269660513776e-03,-7.722859793793536431e-03,-1.514015270855577616e-03,1.206264642555449030e-03,-1.994764819413985014e-03,-2.512449325179697771e-03,4.357840520648166918e-04,5.426745342912325559e-05,-3.681230232601936589e-03,-1.326659338236350167e-03,-2.737305031549877501e-04,-2.211284033082090879e-03,-1.033501807437629165e-03,-2.947685316201535677e-03,-9.252556022617120304e-03,3.129533705281487607e-03,-1.056741496030718860e-02,7.646977705522667150e-05,-3.155743721754547537e-03,1.433292372212336980e-03,-3.531784337081637828e-03,-1.704102631529698418e-03,5.961476187106553616e-03,-4.138750782307435913e-03,-3.041004720317322645e-03,-1.765119486002611308e-03,8.019495662029907856e-04,5.626688461079277538e-03,1.658042617393616896e-03,5.392079948424630641e-03,8.426629063647383955e-03,-1.015087501380942210e-03,-1.849850795059680957e-03,-6.443910952076878883e-03,4.450191608450660558e-03,-2.993157816443383582e-03,-3.750809681886051444e-03,-3.388326715003295276e-04,2.189926331048719713e-03,-4.790415564945854979e-03,2.331774569832668464e-03,-3.739690711927633106e-03,1.676041558955904804e-04,1.260737610142075591e-03,-1.530835017969109629e-03,1.365395749695886901e-03,-7.491352977891389125e-03,7.687856708212488510e-03,-4.111669742831580189e-03,-2.208198893121818447e-03,-6.281700703564981319e-04,7.062754320985973775e-04,-1.694369446554236771e-03,-6.915256194388195478e-03,3.110495175064152424e-03,9.404033309420120612e-03,2.561985696306842177e-03,3.240985008532927883e-03,-3.818342150662394630e-03,-4.461759421595144033e-04,-6.583589776884096051e-03,-2.545282557195807129e-03,3.229218319857321464e-04,-1.192582751621508279e-02,-2.521732736992267614e-03,4.022949292997794919e-03,-2.868208013228137031e-03,-3.889668241485285966e-03,-1.213472082217000383e-02,9.213992898232038070e-03,-2.908758456904889361e-03,-1.964282874650068872e-03,-4.302122077493432915e-03,-4.293409197563196472e-03,-1.134583109023593571e-02,8.828066279879802792e-04,-6.431582700134782539e-03,1.788401918280967768e-03,5.311092264704799643e-03,7.717661918689095509e-04,3.198493846157053882e-03,7.448220843993384917e-03,-8.863571999921441430e-03,-4.527576069335632024e-03,4.365259173148153607e-03,-1.449717963655286317e-03,4.752499319545585611e-03,4.632355607854789399e-03,4.090173466919375458e-03,4.096100830820356936e-04,-1.902635056644907475e-03,9.854343864639592346e-04,-2.256498800667056104e-03,4.328246624263912268e-03,-3.173709529220798947e-03,6.890194810171232712e-03,9.483032312261637300e-03,-4.473723558150653930e-03,1.101779441486793951e-03,-7.976548123582463679e-03,-2.638417523629847921e-03,8.382361370803109854e-04,6.492813441136617743e-03,-2.470463786138648535e-03,7.148975457630163920e-03,1.477580450841958423e-03,4.659669660184308226e-03,-1.534879325915823253e-03,-1.525266046324849937e-03,3.214974153000268488e-03,-6.014223119407896730e-03,-5.745335353556101782e-03,8.662447362608786949e-03,8.219508898750040038e-03,-6.245008764946431527e-03,9.075855062556896155e-03,-4.900071548201046824e-03,-4.201735601258892237e-04,8.049483456245168020e-03,-7.738400909667315745e-03,-6.356851781041722785e-03,-4.680416881415779516e-03,6.762444236920270128e-03,1.626442409771226795e-03,9.709426204986463735e-03,2.774089113748818506e-03,-6.512312874580860993e-03,1.710523960293641298e-04,-7.972135572600222431e-03,-4.446221463133523920e-03,6.270472132771137859e-03,8.014404486537657748e-04,5.180278989873880746e-03,-7.953070877111973348e-04,-7.636670636001624546e-03,-2.020512744552644714e-03,-2.479325509192015589e-04,6.660987688468178458e-03,6.995396029701029909e-03,-1.058249831536270218e-03,7.031342410561501784e-03,-1.212204541957344166e-03,-9.543302218207649684e-04,4.020610652188113393e-03 -2.254707796201693877e-03,-1.014437832838622046e-03,5.183805312110611324e-03,-2.670208566420420853e-03,-1.834150875357651051e-03,-1.331289114219336937e-03,6.965556017948356694e-03,-5.716284937493784288e-03,-7.337948539978648987e-03,4.109105554817123391e-04,8.935726700637065898e-04,-4.744709700294572931e-03,-4.739772896756016402e-04,4.233866356674173895e-03,-3.467532095265985371e-03,6.443191931109708455e-03,-1.762140785734563932e-03,-1.063990567395511205e-02,-4.041627886152642478e-03,-4.783278813136678832e-03,-3.447223429454722345e-03,3.973789725944372464e-03,4.518818079741309009e-03,-3.376695579345397914e-03,-1.393967960538387623e-03,2.095735227658311953e-03,5.669525343679011377e-03,2.878526448267217357e-03,-4.181122356669978064e-03,-3.630219797675555016e-03,-4.004223838946858495e-03,7.026665785599334364e-03,7.527878338204966636e-03,6.931553592612047995e-03,-8.998815758085457542e-03,-7.983268664739178594e-03,3.209573602179152316e-03,-4.783000232959818028e-03,4.907939055912836912e-03,1.943634414335935427e-03,3.753897456701997364e-03,-5.389881016015014847e-03,7.524577728348620740e-04,6.720592240746832251e-03,-5.235225635502224230e-03,3.564901452913612524e-03,2.588389073894380057e-04,3.888927296757085841e-03,3.877104583185033608e-04,-2.458274214504686458e-03,1.281873613939292196e-03,-7.266213683672376189e-03,-7.846396978387822421e-03,6.636596219771053601e-03,2.902560369588344080e-03,-1.307724165007745874e-03,-1.338365149202651967e-03,7.623306953067693825e-04,-2.303158298516838880e-03,-5.173193981710822419e-03,-9.901596934724029870e-04,1.218664655775839913e-02,2.798035215470320337e-04,-2.280597884914771761e-03,5.610689350102634684e-03,1.024417257708956945e-03,2.048379562163706556e-03,-2.394918786450617957e-03,-1.008301439418188110e-02,4.649465300815043209e-03,-6.604210652927219351e-03,-4.624175858210220237e-03,-6.878126104620600409e-03,-3.052176247232022484e-04,3.964546691329820863e-03,-1.726741349099101793e-03,-7.816288327783648543e-04,-7.134912787031713437e-04,7.808068484459897152e-03,-6.169875717237467347e-04,-5.835952344856678740e-04,-3.137841011165477551e-03,-2.001573349231588941e-03,8.332176700757969341e-06,-1.090644063746567041e-03,-4.467291348802148105e-05,-2.073175257552880516e-03,2.257106802522837290e-03,-7.265469722845592504e-03,-1.154882080438261391e-03,-2.424049894232698909e-03,4.519549648004527083e-03,6.398020476230396186e-03,4.014121356666197279e-03,3.972199961701193549e-03,8.403263031998963600e-04,-2.045282927328727362e-04,-4.037092628759952027e-03,1.340179507752848167e-03,-4.486169631642816097e-03,4.984709980741915125e-03,3.380365232682865789e-03,-1.318641033394657653e-03,6.485103242510352386e-04,2.418755814493760061e-03,7.403482294980160382e-03,2.822465923287476551e-03,-6.751744239223671749e-03,-2.245931769617816581e-03,2.902791573994307865e-04,5.830168055279952646e-03,-5.769955562807060293e-05,-4.708568094005498479e-03,9.924821072696675406e-03,2.058594537578401461e-03,4.822840273870539207e-03,-3.256673181737239266e-03,-9.885526084956941170e-03,-5.782220567546282781e-03,4.146712237966115877e-03,-7.176422665434162426e-04,-1.547609150317966328e-03,-8.045304611759580143e-04,6.972374162491947781e-03,-5.223439493301714441e-03,-1.896683486088818205e-03,5.986899708724995745e-03,3.895417132223139099e-03,4.596534404061874447e-03,8.001336516399110491e-04,-1.434639004793610527e-03,6.182728979776538729e-03,1.105842345805105841e-02,4.771357341428003433e-03,5.614224012823930410e-03,3.524709656256649502e-04,-8.284078883836473034e-03,-6.143237932022066689e-04,5.545209150782762740e-03,-8.812718107355432199e-05,3.109906494822802913e-03,5.803752645769214707e-03,-5.776336648475029721e-03,-5.494093502788364135e-03,-2.623570473754469230e-03,9.849484249439108621e-03,-1.591098637740129360e-03,-3.807276584349921812e-03,8.348514018938607215e-03,2.175493820555150055e-04,2.280043744279037967e-03,1.145434201014274315e-03,5.850572742543137285e-03,-4.063474101382707863e-03,6.569170369351114841e-03,6.502040346379125849e-04,1.849958977160296109e-03,-9.762433591604756768e-04,-2.831763992757168735e-03,9.806772025824591656e-04,-2.082040858109230220e-03,2.484388397231441017e-03,5.286292335154890547e-03,-5.146957961415503380e-03,1.137467689621078420e-03,-3.834865261886239926e-03,-3.693484728440855295e-03,-7.405567471787487385e-04,1.322185211788287455e-03,1.579961825997337537e-03,5.258264360819177178e-04,4.162848392251024439e-03,-2.206600183608767481e-03,7.066955863273234779e-03,9.473172636617774311e-03,-2.617174776708114815e-03,-6.800174603864883637e-04,-4.754165625730987724e-04,-6.492607062077381495e-03,3.547519322142423260e-03,6.499007049210422417e-03,3.588415463357425526e-03,-1.155547088485874918e-03,1.619665410976294613e-03,-5.271225178052072260e-04,-2.580925386214211192e-03,-5.418917166171022780e-03,-5.118142377210624719e-03,2.615376647083304459e-03,-7.556181051460809060e-03,5.877210240767673927e-03,-1.043201855312450100e-03,3.584309238161276657e-03,8.412270607881486588e-04,-1.987622403375651978e-03,7.595762423298406570e-03,8.527892837686927227e-03,6.328164619335142037e-03,-3.545817169860110505e-03,-1.519417669687616029e-03,8.743741338370398944e-04,6.999971607040259870e-03,1.112613817011802540e-03,-5.803337901946559532e-04,-1.006771438342559874e-02,-1.354870769971304803e-03,-1.457633408518838177e-03,2.019610802820703387e-03,6.676726736878232926e-03,-7.888210589085270247e-04,3.842402027661903554e-03,-1.169737337159735798e-03,-9.695108285062993200e-03,-1.240433129430506447e-03,2.044287047628877939e-03,1.003125310362186582e-04,-2.711148149617170679e-04,-6.055617052319541178e-03,1.183272770933087477e-02,2.375437776049874310e-03,-4.436605513614098341e-04,3.263228727592925107e-03,4.415585775556350344e-04,-6.287810537128786341e-03,-2.283207681105279153e-03,1.067415045596096504e-03,1.148578311692222825e-02,-2.092203075976613780e-04,-2.380495347440833749e-03,1.893708520636605032e-03,-8.707491998615705248e-04,-1.032225735688229416e-02,3.833247628572816323e-03,-4.236700378824035870e-03,4.347819966401532746e-03,-4.193468743685983997e-03,-5.244352240892809474e-04,3.286952945498708943e-03,1.742173920449265273e-03,3.282457491490847062e-03,2.277798881607996390e-03,3.996051001974289252e-03,-7.580085195847260579e-03,3.452478759893284291e-03,-5.234301226949640735e-04,-5.408081443904442197e-03,2.022469668200090141e-03,-3.514222353128304815e-03,-6.352445289169062793e-03,6.836887794373028987e-03,3.227243171450450056e-03,3.688035150076609658e-03,3.922862100182555986e-04,4.175926573409733701e-03,-3.345188591879360435e-03,-4.864901894526782976e-03,-1.322053442944461175e-03,-5.822854002498528865e-03,-5.578108705221691832e-03,-3.916432308573887869e-03,4.262957037611355381e-03,4.855644792902100854e-03,-3.637633650513295066e-03,-7.162130288594814771e-03,-7.470337317113009967e-03,-5.822295906452362724e-03,8.328361824462276065e-04,2.288660357902577887e-03,6.125660479825900586e-04,-1.129776118032754968e-02,-6.714112237860061887e-03,-9.066145625365873081e-03,1.097087638960984012e-03,8.998365564422970741e-04,3.139927191560362013e-03,5.839881865238935946e-03,-3.482310657804217960e-03,-2.203678100557861664e-03,2.128816129299435263e-03,5.450589174688119480e-03,-4.336522989699933758e-03,-1.164405950424737910e-03,-2.965956084380317031e-05,5.448476104730841027e-03,5.662407487096619493e-03,7.773428909196093174e-03,5.397154050603029940e-03,4.045737176111954651e-03,-3.363360691409917301e-03,-1.424615621941554091e-04,-3.286343755899878011e-03,-2.762188716645855562e-03,-1.132513522313979967e-03,2.939594112647462773e-03,-9.022207834350230368e-03,1.431716883286537553e-02,2.719487263911785253e-03,6.074833939312418614e-03,-3.662249641823913694e-03,-2.046511476777050013e-04,-8.496304498180730705e-03,5.871859485210996488e-03,-2.251659291445986307e-03,-1.024124200848134866e-02,-1.258390884596646874e-03,3.879739093885832373e-03,-5.417642871423259364e-03,1.072022112410236911e-04,2.521576940814060971e-03,-5.154675983601236106e-03,2.133987006514039692e-03,5.296620766343384677e-05,-1.490082968629663019e-03,4.363608253953992107e-03,3.568466951913006925e-03,-3.640454688691841401e-03,-1.116902076465344819e-03,1.891038687692963606e-03,-5.218249974269487459e-03,1.235074223073894667e-02,2.173538655801224989e-03,6.592512882889731834e-03,7.457148364005418070e-03,3.128969120256218719e-03,3.476895576714220896e-03,-4.158906790026083156e-03,5.018488420079461436e-03,5.669861929543424002e-03,8.765304023511897311e-04,-6.609611183572349641e-03,4.060622675727047944e-03,2.392564977219896385e-03,1.312495162394942960e-02,7.403543729882190554e-03,3.103532975693377972e-04,4.603144846272330844e-03,5.303429299580947852e-03,1.730994186354641593e-03,2.479202890184797631e-03,1.019991960602266261e-02,-1.903228368157487433e-03,8.922496686841760796e-03,1.438982406841345443e-03,-7.888294791973015285e-04,2.116855067929436767e-03,-6.537925266918146426e-03,7.253496899885125519e-03,-1.301244713048184854e-04,-6.634547105975559028e-04,2.493038591150859647e-04,-3.211313976819676289e-03,9.777462552792146912e-04,-5.335567571127890901e-03,-3.764636355747473170e-03,-4.450986351225782491e-03,-8.047904307070898045e-03,-7.064136633285687502e-04,-3.647949415941944239e-04,-3.488491672852052534e-03,1.147395502197740263e-03,-7.920587064609225722e-03,5.342747476488916099e-05,2.266632232091838131e-03,-1.964006222952589028e-03,-8.993236724781743241e-04,-3.381778088813774258e-03,-8.230722659080664388e-03,5.425733660939484858e-03,5.867502284426082647e-03,-1.056458883392242740e-03,-7.200000792496230768e-03,-2.351422046574237670e-03,3.334383640239302338e-03,7.281416874398799650e-03,7.872436506082557822e-03,-2.763225122295715885e-03,-6.659603786899229855e-03,2.551545074508296783e-04,4.582435825269087597e-03,-1.183267604110637382e-03,-6.263777600579703043e-03,1.878747725206281584e-03,-3.498640437992191037e-03,-3.671031187318082433e-03,-1.257578916806308469e-02,-4.856210541882975684e-03,-9.380362399553580921e-03,1.185897498310926586e-03,1.259177362227935143e-03,4.628945187344054961e-03,-5.233235858533493573e-03,-3.437013167056585734e-04,4.358292354676221274e-03,3.173392634704228117e-03,3.929581242524563499e-03,-9.378450098304182483e-04,-3.295487970854673286e-03,6.923775243283653274e-04,-1.195693523358754831e-03,-1.087032087001548256e-03 2.581284073683964841e-03,5.121086343814238752e-03,4.563716159104789886e-03,2.957601355730199335e-04,-7.099014935601528388e-03,-2.491889023714000551e-03,-7.874307796246690638e-03,9.258687297456968598e-03,-2.304652912391461344e-03,1.106674309898839862e-03,-2.478341339556020245e-03,-4.158364626921351101e-03,-3.692865760528528645e-03,-1.702516024145448933e-03,-1.604653679288663235e-03,-2.795402577948357357e-03,-2.812741268909241775e-03,3.940565955963189725e-03,-1.454472622664895179e-03,-5.809308574566986877e-03,8.391521224560063996e-03,1.888472153062242332e-03,5.374571896980916488e-03,-8.062887621794179865e-03,1.279475372995676416e-03,5.079048804488399578e-03,-3.709668010297246729e-03,1.612889525498081627e-03,-2.750344620055709087e-03,-1.756615893942806497e-03,-6.106444353394281654e-03,4.832803510487086729e-03,-1.346080337526218279e-02,-9.408657282414210898e-03,2.276265739846792743e-03,-9.453935226046582610e-03,-4.449074513249043003e-03,1.104709120121836856e-02,4.612557008210833691e-03,-5.859574069414038959e-03,-1.297780261098411885e-03,-1.809270925651887201e-04,-2.817043019803804407e-03,1.858397386218405044e-03,-1.574776195591156940e-03,2.173221674375016687e-03,1.543977371068341398e-03,-8.736712842321836189e-03,-3.331056792905555657e-03,2.294148212702731111e-04,3.905404374165438123e-03,7.458532438396023832e-03,-1.009528953002381164e-02,1.099672727547812943e-02,-7.557828518207250490e-04,-1.612598105458185830e-04,1.354711455179803117e-03,4.317792336073845820e-03,-8.710865128803321452e-04,-6.046324302177556904e-03,-5.096022109766300615e-03,-5.134318395181833999e-03,-1.190656254841341829e-03,-3.078475725616490076e-04,-9.621539182646080034e-04,-3.388892014582656383e-03,3.841970432801310523e-03,-2.985670895690760440e-03,4.920816162092662809e-03,-3.376898033826185481e-03,-7.963116419767704221e-03,-4.618686566888179947e-03,1.426483831773002128e-03,1.462063302676583007e-03,-4.199504218333370217e-03,-6.644166010997071270e-03,8.198272549403315748e-03,-4.002454075086278248e-03,-4.770584707996365983e-03,3.391287502658834468e-03,2.732020551895325815e-04,-8.490445613006214476e-03,8.475900359558750582e-03,-1.155253706245200698e-02,-1.148140293843823677e-02,2.586483387844956227e-04,6.312197935395313150e-04,6.365378914465183793e-03,-1.070594005952447449e-02,2.441785823825503177e-03,1.952205599216849630e-04,-5.622616733585935906e-04,3.163196358418267785e-03,-1.819668180730160635e-03,2.112410595696584403e-03,2.891156280821156020e-03,2.045722255271696828e-03,-5.233051290671093814e-03,-5.120748570316088245e-03,-1.043560271080948461e-02,1.781202844614608680e-03,-7.787267769317059119e-04,-9.973269596504318046e-03,-6.295166910911226266e-03,4.728527179400805573e-03,8.599428949021228896e-03,-8.422244099780521256e-03,-4.453457838848379723e-03,2.631748403713752692e-03,-1.733963616805954660e-03,1.658399291579936482e-03,1.706809786123870531e-03,-2.823736041961811497e-03,-1.635402849262897196e-03,-6.821926905216691393e-03,6.588133501586755127e-03,-7.600034912786798706e-03,-1.365182221607448045e-03,-2.430040909032301320e-03,-7.456573944591109518e-03,-5.277138337965819838e-04,1.305260012097588266e-03,4.971925047196884724e-03,-1.254113530216610694e-03,4.495301241043333238e-04,7.536745859615763196e-03,7.807220020421307138e-03,7.826280386874294448e-03,-9.027685848931134294e-03,-9.650821991760564697e-03,-8.319934805976057779e-03,-4.819102750556462054e-03,5.047574855007175287e-03,6.432014866113550458e-03,3.804132757743200650e-03,-1.398645368230834289e-03,-1.691711312237815070e-03,2.732130732498202434e-03,-3.689933528889440331e-03,-6.320724326649223949e-03,-2.276979454396243878e-04,-1.007318483018535861e-02,-3.957525889479654793e-03,2.536983693926307508e-04,1.003858076097110391e-02,5.484344887277364800e-03,3.156732444361932868e-03,-1.308260901172750897e-03,1.895599319586834626e-03,1.685966062983584608e-03,2.583252716976996294e-03,-1.387040920792640894e-03,1.624203959672858646e-02,8.580529521401094109e-03,-1.089903272478826186e-02,-8.291329010006601510e-04,-4.810103843961489704e-03,-1.097673529595432794e-03,3.460301915076254183e-03,-7.521867494026579417e-04,1.464384681286739478e-03,-1.657698759041035738e-03,-2.668021584782480273e-03,-9.112666263687593630e-04,-8.788126366550286922e-03,6.002931517479762578e-03,1.937304598136837684e-04,5.682261043926827536e-03,1.095783300398177383e-03,-3.924179520230959735e-03,-3.344693596300116634e-03,-1.287181462345984287e-03,4.095070948878384853e-03,-4.649052962605429266e-03,-1.982678924760838306e-04,-7.056620614411320859e-03,8.331315526963391921e-03,3.301233585087748855e-03,4.971228015166477728e-04,4.187907951575816669e-03,-5.182269309858492795e-03,2.019421933168582747e-03,-3.777047933132327440e-03,4.125103009467832524e-03,6.761129932258824879e-04,5.344272569888234431e-03,6.002059665817895302e-03,3.947565789461112903e-03,1.934848567798127660e-04,-4.257100333872255137e-03,-2.428675688908018670e-03,-1.011725790347741544e-02,5.373883452958130511e-03,6.510233213683457032e-03,-1.056885246881709333e-03,-4.655967269012157607e-03,8.230140426006010571e-04,3.856178623436327447e-04,-4.310970401339917803e-03,2.676668306441166382e-03,-3.774645638004463580e-03,-5.404392211185647362e-04,9.504090592052161831e-04,-7.289052285938484553e-03,1.390204106807580711e-03,-4.397891084885246528e-03,7.069903064040027616e-04,1.072797318536185826e-03,-5.442865403904399366e-03,-3.623076691053109515e-04,-2.957011429906297034e-03,3.660785184175477554e-03,-1.714607303834022548e-03,2.188696905939530816e-03,3.096217310887802113e-03,9.603648586033323650e-03,-1.304772107570803530e-02,4.701625477851779054e-03,-2.120467781360286705e-04,-4.480652647029571313e-03,-7.136001932745277172e-03,3.385537227268559102e-03,-2.765334954955690414e-03,-2.724541189106235908e-03,2.508184737357300759e-03,-4.721928232349486242e-03,1.793527141184603560e-03,5.096188009587816412e-03,1.023724158547196367e-02,-1.358478679500679234e-03,-6.583666974094347455e-04,2.981202199926057551e-03,6.548733579497098903e-03,-3.708027772382224176e-03,-5.560539310220467997e-03,-4.535894975426165571e-03,4.649572677860381795e-03,1.697459896861762060e-03,3.795295659128220499e-03,-4.554949843584744570e-03,-6.241078645736119687e-04,-1.610281371075746384e-03,-4.475540971017098399e-03,-2.203502171073566315e-03,7.582781864171578290e-03,5.704143834223005839e-03,-2.408071223129311812e-03,5.683750805007600077e-03,-6.481242434256187773e-04,-3.360040635686262645e-03,-2.873096079724458702e-03,-5.099337291036072639e-03,1.379560061686629924e-02,4.252682761644337693e-03,2.012791749568022988e-03,-8.911433933456731904e-03,9.484743494735325964e-03,3.526749578111724320e-03,1.030698436369296977e-02,-4.293559078258728234e-03,1.812125023698670997e-03,5.369016009244315275e-03,-2.274495443425863812e-03,9.287672530311692418e-03,4.413686103575838009e-03,8.418871460799925641e-03,4.438521983931994867e-03,-1.968717129123299329e-03,7.977038222799237924e-03,2.609576512259968121e-03,-6.355009560587213373e-04,-4.164522483501256647e-04,8.987102838201258687e-03,-1.806304094643805926e-03,2.841445445024444114e-03,-6.291094057015726888e-04,-4.900919977262878802e-04,-3.473670759477931382e-03,-8.177960932360348609e-05,2.779466617225727094e-03,-1.237468366055435995e-03,-2.961889826758655128e-03,5.152027279879298552e-03,-6.897851274346720671e-03,4.020318181149827065e-03,-2.974596176473001067e-03,1.006440573468705794e-02,4.216165547160578148e-03,-8.732226271158575678e-03,7.497781037531084164e-04,1.582570819772208823e-03,-2.769414619822082105e-03,9.914447290082215036e-03,-1.514007094290789092e-03,-9.266667600890995948e-03,-1.984218497095507818e-03,2.701686275767510408e-03,6.449899866475687681e-04,-7.922650774563945800e-03,-1.559578165930773945e-03,-1.579675791342638068e-04,-4.575059425944154323e-03,8.527864289671761855e-03,1.834201985685841689e-03,7.066990742530840190e-03,-3.369759973778492686e-03,-3.952805755017775938e-03,5.094595993693751788e-03,1.191074737820889970e-03,-2.999836337055704176e-03,9.612330398002166473e-03,5.393408391928443402e-03,-1.879018672648518360e-03,-2.882226604443082277e-03,1.122931793705660153e-02,1.857430223614702034e-03,7.592284124337882363e-03,-2.587866832000318331e-03,-2.636179282563869109e-04,4.773626840473315508e-03,5.293164619711716347e-03,-2.131487412272167845e-04,1.039042708082630728e-03,-1.769720915487122542e-03,1.401339949714818000e-03,-3.645800517376768420e-03,2.751421825836359030e-04,8.823036427160287423e-03,4.852284874537941307e-03,-2.961662613065451158e-03,-5.443711758334550919e-03,-1.085812756457429050e-03,3.505315966772126063e-03,-3.957302654013134663e-03,8.399493556637553280e-05,4.239069956645433700e-03,4.229788263235911318e-03,-3.588408768944547321e-03,1.139072008987541268e-02,-4.121118892819240417e-03,2.086483109858332588e-03,-2.092415248724108830e-03,-7.893673998124635383e-04,6.255156142510233212e-03,-5.154372191637188487e-04,3.895690819774381020e-03,-1.582937850134499488e-02,2.157926968951941367e-03,8.962503557461441511e-04,5.250050348040991022e-03,5.553697946358991840e-03,7.385046196511414491e-05,-3.653375626263704028e-03,6.560560083454914856e-03,-4.011488376917962673e-03,1.019543486891731458e-02,-6.732535566702100606e-03,-4.989469610913436007e-03,-6.833239945406631086e-03,-8.114877853122299742e-03,-9.205024574062804039e-04,-1.795617382840913922e-03,4.042857456963590626e-03,2.893225663595812552e-03,-1.190405756129123659e-02,-9.663453327937167672e-03,-1.650223930643524625e-03,9.182841658691832595e-03,-7.085823546814291104e-04,-1.256368168854215227e-03,-3.545083595919248064e-03,-5.490590990587299741e-03,-1.168999912227290314e-02,-4.449713051480521341e-03,6.270915783072738984e-04,4.220948050047824258e-04,-1.665458071312580493e-03,-9.232698379648372755e-05,-1.628740037370167773e-03,-5.736473647216380062e-03,9.748529664906207268e-04,6.246733379833484083e-04,-5.153695923497187380e-03,-8.207703357714084703e-04,9.840320583317803405e-03,5.380049325803127995e-04,-4.733906363782354465e-03,-9.109076239809551199e-05,3.317007247703226275e-03,-6.429310625182720410e-05,1.858222557708014405e-03,-5.004225688464967493e-03,5.849810902590410824e-03,8.086850320484323352e-04,-8.391725612915609811e-03,1.145706674087546657e-03,3.755272039004777743e-03,3.055640489521276038e-03,1.826304907037221267e-04,1.671113874364120282e-03 -1.672217636440772844e-03,-4.492893084021127269e-03,-4.832438114070510514e-03,-5.392523150776591494e-03,-5.275725346250219024e-04,8.893315840871838032e-03,1.900106233454839375e-03,-8.434635718051246289e-03,-4.997617744271196961e-03,8.573359327448079950e-03,-8.166371150445278157e-03,2.642048710476259128e-03,-7.431309177978986241e-03,-1.244833808116136819e-04,-1.437501901035514529e-03,-7.276166770452368281e-03,-5.766749682416902768e-04,2.203752662037694425e-03,-9.650261103440275698e-03,9.973433738347766472e-04,-3.195784424814244237e-03,-3.353312243738401210e-03,-1.365083320014070590e-03,-2.983180550324145402e-03,-3.703435020482136452e-05,-1.623141850713502718e-03,-3.692710891908396187e-04,4.975580098775995883e-05,2.833629477286930733e-03,2.229107022152139472e-04,4.883084171425774393e-04,1.012441127529843448e-03,-1.902320171467938251e-03,6.723454709387251651e-04,1.275103990858366797e-03,3.350647050851500094e-03,2.992166090502698314e-03,-9.105184900178203353e-03,4.037324863341300481e-03,1.673055050568185171e-03,5.326394119749357052e-03,-2.328563619975764810e-03,7.160766469367516908e-03,-3.749880572699836588e-04,-2.200779424258785585e-03,8.823496183190010804e-04,-6.345583118447510607e-03,-5.492237526038060841e-03,-6.272434250985675511e-04,3.586416957165209328e-03,5.478542699870529026e-03,5.919801638689717142e-03,-2.850930815892640884e-04,-4.230688749944417140e-03,-3.244278932381215058e-04,2.216215615797175083e-03,5.720764898644000070e-03,6.711722547567668985e-03,9.177765626307368002e-03,-9.031265148987283086e-05,-4.819390932202213644e-03,-2.857054425143028785e-03,1.212712668827267562e-03,-1.858548412484597491e-03,5.677562900404212708e-03,2.859276165597874394e-03,5.341967060316054868e-04,2.897303824772841548e-03,-2.238840302779172148e-03,4.836384833085557132e-03,-8.042263532945110802e-03,2.678073816622972584e-04,1.695731219378466366e-03,5.663131727992099830e-03,-3.323435230336905491e-04,1.372773352063785465e-02,3.087136189414535145e-03,2.875690850121251363e-03,2.588532527763300018e-04,-7.500332503774150179e-03,-1.195914434193112545e-02,-6.838083467446319508e-03,-1.178445455864986224e-02,1.741126873026672180e-03,6.385191118991646528e-03,3.637588233913379310e-03,-2.892183609969779974e-03,3.874435416101335104e-03,3.926283572131824585e-03,4.026095489840644977e-03,4.510922355017986038e-03,-6.956897946877725614e-03,-3.545659792135683053e-03,-5.496862210622514150e-03,3.991305643008898410e-03,1.062842395163605264e-02,3.755783928077946910e-03,-4.030573207762322072e-03,4.222471341012719141e-03,-5.593403429980999854e-03,2.625678635486714679e-03,-5.753873489185754898e-03,-8.147002871381446576e-03,1.409342297101859494e-02,1.387922107159922846e-03,1.100437152046860115e-02,4.795329408637926533e-03,3.688282973713146763e-03,1.437416530279551994e-03,4.461411140752747327e-03,-1.036317638465554221e-03,3.002622585727187168e-03,-2.981435322343643692e-03,3.291198886001096661e-03,-3.257789505495176617e-03,-1.133879875603785838e-03,-1.883664764928871067e-03,1.128316049730008580e-04,-6.608738166694458002e-03,1.078146527379296362e-03,-4.520143622339710919e-03,7.419513348206601550e-04,5.081695393080038793e-03,1.112926790883198534e-02,-1.069920124270609240e-03,-3.793101259349361543e-03,-1.278085474105672675e-03,3.968863721255577390e-03,-5.776306128951464239e-03,9.509922522038362705e-03,-9.329056894367068753e-04,1.299849704417079845e-02,6.047125733269061339e-03,6.160332064684322919e-04,4.047033426956106114e-03,4.458734711218322738e-03,-2.939926317375184985e-03,1.387337105178426330e-02,-7.842643181065899957e-03,1.846051456142498368e-03,4.704639889033182185e-03,-2.523422820209251204e-03,-9.628675741183248841e-04,8.690885631265541332e-04,9.561252606825885667e-03,-3.267524756766080306e-03,-3.149493658673622760e-03,-1.666548780888542965e-03,-5.704022519243127226e-03,-1.539972712683162831e-03,5.825621205428531402e-03,5.567410757203101503e-03,9.027740117681449031e-04,1.019716901269454405e-03,1.942920738867863701e-03,-7.720428324931579572e-03,-6.157993120210540801e-03,-5.210382022202077364e-03,-9.993778710505388935e-03,1.213526100570897675e-03,8.559463959961688059e-03,-1.190124766972678325e-04,-3.246651276253717088e-03,2.787012367302350163e-03,1.020236047833200481e-02,-1.550717010528797953e-03,-9.664148742910213391e-03,-6.832990365141134066e-04,8.993675738113814230e-04,4.903107359703125657e-04,-3.549400686702997281e-03,1.124422066958763542e-03,-9.067948965804486020e-03,1.791543609439154620e-04,-7.774475477403014782e-04,5.931543689327697412e-03,4.998392218094319453e-03,-6.796124802203299189e-03,-6.864346683070227670e-04,7.072687784593764335e-03,7.188766316635974247e-04,1.757998967244154328e-03,-1.565924384628350442e-05,4.873135446426493655e-03,-4.138671967904399007e-03,5.137542955557020813e-03,-1.049630000763139039e-02,1.618571177457489530e-03,-5.639467536007577303e-03,2.085038292599152368e-03,-4.565129836348753509e-03,-1.723495454774605362e-03,7.415761889848858758e-04,1.705608176020790342e-03,-7.427760072576132869e-03,-3.689834428546127747e-03,-6.974020692308892659e-05,6.898210246387169453e-03,-2.944886824144874165e-03,-1.411045597936576648e-03,-2.859093963568531873e-03,2.848635259315755036e-03,2.920851862049814694e-03,-1.977610825302291656e-03,-1.824726556541830238e-03,6.243242195369306212e-03,-2.473297853768539976e-03,-1.567781727431341934e-02,-1.243584366929360429e-03,-6.038937564239082838e-03,3.625642908288379072e-03,-2.254502126103749646e-03,6.895710431120103149e-04,1.758799240834610627e-03,4.533411003359179466e-03,-2.267008100211777722e-03,-5.632429408734920470e-03,-7.940164030347469969e-04,4.972125232565203189e-03,-1.600119066196650265e-03,-3.397799196917915959e-03,1.779421910048194894e-04,2.513535114733787476e-03,-1.827585225709003848e-03,-3.982761958291333899e-03,2.358373770418222253e-03,-2.117289967736147790e-03,9.731881982585599208e-03,7.897461542164711064e-04,3.652129146026098058e-04,-8.029916911741282956e-03,5.223418389039235360e-03,5.353743493587020431e-04,-3.948198726589208056e-03,8.981774765549572560e-05,6.344408430279263773e-03,-6.059144849346410726e-03,-1.010703387581252111e-02,-7.386826822509753865e-03,8.791944861991460303e-03,-1.135150697580053101e-03,2.997845056328531099e-03,5.040517341638589381e-03,-7.439312021078454094e-03,3.638645455511044496e-03,7.170572292883222940e-03,2.951393562599964951e-03,4.279313760093002295e-03,-3.006880319163369314e-03,-6.926880863185384991e-03,1.012762653545830338e-02,5.538590874292865888e-03,-2.742591750361909279e-03,-2.888821164699404039e-03,-5.257185230303268366e-04,-2.964263470277361291e-03,-4.596172947934903464e-04,-2.027918844850609258e-03,8.184805265942853318e-04,-3.897096135389890338e-03,-8.628082508750627589e-04,7.071181953422232824e-03,-6.670612860790994282e-03,7.060113452045936079e-03,-3.350549593319329840e-03,-2.385087518243835143e-03,5.690264343652990178e-03,-8.014911770497488725e-04,1.439738987025792998e-03,-1.896006974731590574e-03,-2.595427988405065670e-03,4.642030390871143726e-03,-6.145772582431156122e-03,-4.724263600544482446e-03,-2.163087979061459679e-03,2.998103981583653994e-03,-6.303692716201464950e-04,6.790350035367029736e-05,1.894305645887562175e-03,3.623239760647970712e-03,1.198324597241452912e-02,3.185210507660234114e-03,4.174430196301598915e-04,3.733178746915152314e-03,7.873360137879257745e-03,-1.059396461222625145e-02,-1.020239149800738262e-03,-5.864485998177688085e-03,-3.856069889929573339e-03,8.650478565406669257e-04,7.085917856642954225e-03,-8.959678086227124182e-04,-4.388634999455642144e-03,-2.152560531795212444e-03,-1.197681595003066701e-03,-4.604517885735949674e-03,3.267379290082829228e-03,7.711709176491208552e-03,-5.328330619913688136e-03,7.567786554091485542e-04,-6.777388054056802946e-04,2.795927276880068053e-03,-8.221362470656370935e-03,5.860357438977740509e-03,4.381288504209196639e-03,-1.044886423964366123e-02,6.344927977707299875e-03,1.424634899856690173e-02,-7.912075963936098660e-03,-8.933493309888006564e-03,4.269901088637541309e-03,-1.618617981031876324e-03,-7.349111527497857212e-03,-1.388334684754517811e-03,6.948619187616193044e-03,7.895189699278923551e-04,9.319576110246629089e-03,-5.698611452168374919e-03,7.338499040348486768e-03,-6.726466080369223818e-03,-2.619127241906682359e-03,8.333151020309159315e-03,1.062106564537500068e-02,7.387128921064696485e-03,-1.181819644139722215e-02,6.735957373120711293e-03,1.641926575849797249e-03,1.642269235638810568e-02,-1.798151246847186574e-03,8.917721566809052149e-04,-4.130807557413932948e-03,2.672444017388950037e-04,6.885866970128671008e-03,-4.480206186066457087e-04,5.967262717597652394e-03,3.389710108844075018e-03,9.091418957793660771e-03,-6.079547152754076131e-03,-1.729113887265658952e-03,-1.100589136371373303e-02,-1.938543513661671611e-03,-5.713689692234548423e-03,-6.106121862633764712e-03,-4.715947113512045964e-03,6.608395041765598339e-03,-7.389988783215377215e-04,-3.453912715021757035e-03,-3.635141691081777461e-03,1.871768279284472501e-03,-1.539015982727835003e-03,1.994031507436292046e-03,-3.481608669478599913e-03,-2.678768755088239539e-03,5.112976152360694353e-03,3.852543886468124106e-03,1.905444465534371747e-03,-9.450903167533754312e-03,4.845852626878929185e-04,1.717405787735844843e-03,-2.723874705106617451e-03,2.958144555838816605e-03,-4.336581226247210145e-03,-1.170563185919984818e-03,-7.290816074792095570e-03,-7.933321316264565007e-03,-3.818308172594514060e-03,-9.805098722750198170e-03,-3.875881372033940404e-03,2.692325271605742424e-03,-1.997473375363360663e-03,5.414476292819306660e-03,-4.606091904265376103e-04,7.203065518577195757e-03,-3.376927925813542637e-03,5.551865496842450006e-05,-5.101718063926766536e-03,-2.711864356352098031e-03,-8.751761850903989892e-03,-3.672333315959319753e-03,-3.109309955585273875e-03,3.740332526075342001e-03,-4.030273393960510063e-03,-4.767177907645651393e-03,-1.496848815271951353e-03,3.517310336839159918e-04,7.185376039630847478e-03,1.233096236665687630e-03,3.556957577930622543e-03,-5.805994486842429303e-03,3.582412306622634931e-03,-6.472471036887415471e-03,2.329233753618065953e-03,-4.672577360714960312e-03,1.002523326821444863e-03,2.463932921315834662e-03,-1.843145584868432357e-03,-5.509181005425854358e-03,8.409704521855328532e-03,1.309151874529302208e-03,3.206499310934305021e-03 -7.167429430712121685e-03,-6.242780397849645229e-04,3.030691218057549735e-03,-3.482742844609130074e-03,3.361183166715323630e-03,6.497286741311811102e-03,-7.168191649338624509e-03,-7.997757475484808948e-03,1.231310424369292528e-03,-3.636410371966733533e-03,2.645417693710724449e-03,3.259575436201725580e-03,-8.119805999648288530e-03,1.117010009341302089e-02,-5.523087885511891552e-03,-1.915771183657099868e-04,6.381731241578386497e-03,3.458771017614128279e-03,8.961614384751256587e-03,-7.442247746159722475e-03,-7.957457156356153097e-03,2.632811021801534628e-03,-2.094233313473877096e-03,1.453880348216939934e-03,2.030605677043457698e-04,1.672000717182350546e-04,4.019473424258354112e-03,-8.368673826985733449e-03,8.562566491369744126e-03,5.517567703656931465e-03,1.140604850148211518e-03,4.675715060746724917e-03,-3.337887018743026656e-04,2.849717003181092417e-03,-3.849896942560606500e-04,6.482193179889149393e-03,6.610525637868562565e-03,-6.009325649644550868e-04,5.646975810324905524e-03,-4.217120467699619901e-03,-8.540974342767166869e-03,3.817620211144622921e-03,3.205267084474959505e-03,-9.669898677664824985e-04,3.506227967262733232e-03,-2.061642794965420997e-03,-2.300201939305407742e-03,8.806246856427966951e-03,-9.037325870953140036e-03,-2.227035946674033907e-03,1.820719654543233247e-04,-2.397132704384396466e-03,3.766828872737073967e-04,1.354207739756689219e-02,7.901378029959308172e-03,5.678017293874926610e-03,-1.758480137351916932e-03,4.949368598266701150e-04,-2.583903892557072764e-03,-4.405549779127192916e-04,4.411878909632822345e-03,1.266277504864553663e-02,5.550563569925181379e-03,4.938702693472279276e-03,-7.197515565765109057e-03,-8.004217376902357517e-03,3.465685475332836926e-03,-2.766671058939606728e-03,6.293266679959959482e-03,-4.416752819885678798e-03,4.611987900682605439e-03,-2.946225423365606694e-03,-1.748024566022598732e-03,1.061887453481324267e-02,-1.201204857022511105e-03,5.526324591599423956e-03,-6.975228058147707119e-04,-1.043745780754573235e-02,2.576281836097008178e-03,3.743097287425853435e-03,3.839205824673240443e-04,-3.877037902783882217e-03,1.693325166789876697e-03,-7.920138403791135689e-04,-1.501458694257778114e-04,-3.686530356410868629e-03,1.160826845710518154e-02,-3.888644579594892608e-03,5.655703076402419753e-03,-2.032377718813545720e-03,1.742932445636374795e-03,3.528814895501732572e-03,1.513659785235921306e-03,3.465674087417258947e-03,6.676990151149312519e-03,1.393443871793835324e-03,-6.137245858152910218e-03,4.155336279892936011e-03,-2.916682837488288223e-03,4.510252088299176315e-03,6.227008203960634862e-03,9.577301806241351762e-04,1.548396354118479609e-03,-9.445040520298370176e-04,8.689325867988428008e-04,5.268388337124623073e-04,-2.525113590114477953e-03,6.548684483164472769e-03,-1.650172355610769878e-03,1.748138297609114862e-03,-1.920550472687284314e-04,-5.250665095558287691e-03,6.357955476737877298e-05,4.830237656870207778e-03,9.080151913577286585e-03,-5.639621463922522837e-03,7.220256732436168340e-04,4.476582805114199485e-04,7.880099430340751967e-04,2.130893964679619684e-04,-1.186633299772727086e-02,-3.716367557784290478e-03,-4.869761471575332831e-03,-4.506301821530162358e-03,-4.919647974094105806e-03,3.085072949943156156e-03,3.271836769735934902e-03,2.530233043157842539e-03,3.021876310660533485e-03,-9.334991528318995271e-03,-6.923675698367907569e-04,-4.111854581899432141e-03,6.621348958480904878e-03,-6.443754131178444948e-04,3.422886004497429323e-03,6.027485479297448046e-03,5.422632846564976253e-03,-1.460390357233224117e-03,5.479465844792773266e-03,5.426445907021890981e-03,-1.404739473984632043e-03,3.414608062224064409e-03,3.504776657377389314e-03,-8.414907900952419556e-03,4.780328626940414868e-03,2.889024809455816196e-03,2.228259730732688130e-03,-1.185837855024957435e-03,3.465229974872040524e-03,-7.714342987434052719e-05,-5.766389823695448023e-03,-2.475227949862083936e-03,-2.701931550049668871e-03,8.067298742784953861e-03,7.629375338308888805e-04,3.619135408000381630e-04,2.758077561668109863e-03,7.428611813593627260e-03,1.035847643582401461e-03,9.891027463860547347e-03,-9.868594202362681797e-04,9.344717153035605670e-03,-4.606676376434607964e-03,-8.021056334720258821e-03,-4.635071217189236695e-04,1.482214179274743632e-03,-7.926194594615375288e-03,-7.228944165118650041e-04,5.525913320622464694e-04,1.978563044144111133e-03,-4.440344119415777328e-03,-4.606335911313735117e-04,-6.777744195960211208e-03,2.071028557809224523e-03,-6.979850615695704606e-03,-1.472496101418522890e-03,-9.516901123935900958e-03,-2.928066842181334601e-05,-9.330381316458915067e-03,-3.591968174090173337e-03,-7.721367312358717698e-03,1.304279877488422350e-04,4.381542222209500355e-03,4.826775525610853615e-04,-4.607518538395939493e-03,-2.818547315132452491e-03,8.925768083363706720e-04,-4.536123314229276692e-04,2.062746363851170723e-04,-2.572026024831369426e-04,8.442024817215731730e-04,1.694797623809619685e-03,3.736469303713859692e-03,2.967918551184011572e-03,-3.878016480102528668e-03,-6.975357227281296819e-03,-7.381645429412918465e-03,5.648682671786522599e-03,-5.023919007721361699e-03,-8.360618009238408865e-03,-3.382833256027390715e-03,8.618299493979903261e-03,1.227218263556294816e-03,-3.708658097224522797e-03,1.591981839897609269e-03,7.065639524987499080e-03,1.055326858808010694e-03,1.370307681627474811e-03,-6.277406771497657557e-03,-5.234448280549556728e-03,4.654266634968118668e-03,2.062544999024097311e-03,2.652463981388585718e-03,9.167235032282667131e-04,-5.516499897908012928e-03,-2.801577268755406792e-03,3.990304202068833093e-03,1.317669017407580414e-03,-2.363741671991646288e-03,-4.191397609154477971e-04,3.084297518159510691e-03,2.113265241156807219e-03,3.271420250888439183e-04,6.135776579531528139e-04,4.315731095409079676e-03,-4.323695426134505949e-03,-5.663603052335338013e-03,-2.084162512396480361e-03,-7.533751983873381193e-03,5.107907331870657763e-03,5.126380358724519533e-03,9.781126850218355151e-03,-5.101973649964287351e-03,8.000949826182050112e-03,2.790980927456732054e-04,1.393115243071794138e-03,-1.346592122358733793e-04,3.128991208389716311e-03,1.075765568946556328e-03,-1.408604193193153898e-03,3.761896339009960979e-03,-3.191413901130770413e-03,2.188101684863518782e-03,-4.610695380370314830e-03,-4.133044364885672672e-03,-5.106970767557106805e-04,4.731338286151927537e-03,3.429168334951147455e-04,-5.384263247367598870e-03,-3.312277251563252190e-03,6.149030455500623972e-04,-9.735961224436521025e-03,8.531475958871367893e-03,-2.744077764376212647e-03,-5.383714926236682352e-03,-1.946857655617123279e-03,-3.839241139270884069e-03,-2.842885682586956432e-03,4.430489108993509659e-03,-1.994269589752270793e-03,2.611135463828016031e-03,1.516243359791359050e-03,-8.317314810382774037e-03,-1.177324957097043712e-02,-2.757019736422672285e-03,4.265079461409054806e-03,-2.442774293090244336e-03,2.452831749332284525e-03,-2.721755028840626683e-03,-2.379398157386241209e-03,1.471863058143729535e-03,-5.373578137199692961e-03,1.152280885974797317e-03,6.956132200980361981e-03,3.134449490700834222e-03,-2.375207532340903836e-03,3.585346573398858882e-03,1.345877277726655052e-02,-6.137033494092168293e-03,-1.088621381783281758e-03,3.390790869349213574e-03,4.138517679488580396e-03,5.890377193656432496e-03,-6.645317150374097266e-03,2.112262621128023748e-03,7.599396200819189201e-04,7.251925555323189554e-03,9.562998472240604115e-04,-1.674353110214646173e-03,-1.487244036129228928e-03,-7.404062899557146428e-05,-8.745091412262534850e-03,-4.629922945373713197e-03,2.456191056805394519e-03,-4.687957690129465269e-03,2.755871899632818776e-03,-1.853307969580554734e-03,-1.872551609049793314e-03,-4.654395166458458535e-03,-4.776180045500380902e-03,-2.619906648856410412e-03,3.982521463718034934e-04,-2.784148073362074406e-03,8.819044585027295546e-03,4.821083405438615285e-03,8.627505102226730492e-04,-4.312185664743544561e-03,-1.502197295102807029e-03,-1.064703065334983694e-03,3.921011388168354518e-03,3.742104379494314449e-03,-1.658442654875778811e-03,-4.183891547638497478e-06,1.039890389111128473e-02,1.028874460602957792e-02,1.834190421833708021e-03,3.332608788821052673e-03,1.058364799156265475e-03,-1.265085725462071270e-03,-7.487675663439476159e-03,3.529744049688090038e-03,-1.218250987431596490e-03,-3.936736312693406466e-03,-4.884312153698579934e-03,-8.512724720097863225e-04,-7.598406157803112848e-03,1.713146120560722673e-04,9.798883721727669367e-04,-5.182358782022915148e-04,2.493339501066659317e-03,-1.229694014412483069e-03,-6.182616026475471274e-03,-7.391137884392131457e-03,-3.262121187924339055e-03,7.066205515264109907e-03,-1.894419714058395579e-03,-3.078351264941132839e-03,1.789643129666996374e-03,-1.043344920368405142e-02,-8.430013292823085747e-04,-2.734612958122853606e-03,-4.401790937495661006e-03,-8.946974474273498845e-03,1.610209498546119567e-03,-1.701163084713621862e-02,4.114482967681714308e-03,6.815963616553835300e-03,-1.774339197744763700e-03,1.131689258841851037e-02,4.748342129820588321e-03,3.366466587948665698e-03,4.912422285907936705e-03,1.145085752390685024e-02,-4.960668057634880050e-03,5.976487033577996232e-03,-9.189340021084501167e-03,3.499812615009762016e-03,-2.724771458973541862e-03,5.752434091934721834e-03,-2.137701176948266467e-03,2.973685698570207106e-03,6.715120630846578738e-03,2.526671462766614824e-03,4.442153955925696898e-04,-4.351796469539123192e-04,-2.753159712694627954e-03,-8.402704954453317152e-03,-2.586562338953271849e-03,-6.114353996256823888e-03,4.343982464381412369e-03,-1.186984797660468036e-04,3.497438977171567903e-03,1.741172525167693870e-03,9.605855307373637993e-04,2.758792956170931857e-03,-1.575598658448858908e-03,5.932179464389848722e-03,-3.514489331333157940e-03,7.848221195169244324e-03,-2.880517804580607841e-03,-1.769415117918642609e-03,-4.233316869123514466e-03,4.016637951285128780e-03,-2.052509782463741907e-03,4.055964992968966176e-03,5.753504125777875525e-04,9.236829947226721623e-06,-6.277366290768064292e-04,-7.820046466072948232e-03,9.195658358337338318e-04,3.968069787486075764e-03,-2.644629422536085017e-03,1.116606157485816239e-04,-1.569264417639255546e-03,4.442482942676498694e-03,-7.222738995340643705e-05,2.615306900701050603e-03,4.817213227543261020e-03,-3.206085674746148289e-03,-1.037119587661115665e-02 5.794020228421087153e-03,-5.134996724723847952e-03,4.803520462066211173e-03,-1.321225496726402071e-03,2.701860981479912039e-03,-4.966733796773852710e-03,6.336784164203788407e-03,7.129507832631680822e-03,-9.149294219402817618e-03,-4.285264206556000169e-03,-8.854462270998826254e-03,2.964882181348990793e-03,4.436702596121572077e-03,4.028313969505916033e-03,1.957777603236220913e-03,-1.731700957019277056e-03,3.294328710533631186e-04,1.926874234686456328e-03,-1.538285037979396776e-04,-9.423641330940648111e-03,-2.412994558080670988e-03,8.682483409483323505e-04,8.035423247689098269e-03,7.048432347779168619e-03,5.951731742368269323e-03,1.679556046097791617e-03,-4.777339258775287895e-03,-9.518154440592056786e-03,-1.559144974227705878e-04,-1.171421634524764726e-04,5.938831828360286932e-03,7.683926850098648153e-04,1.202020612181417796e-03,-2.609610852766952160e-03,1.524544888853848453e-03,2.070515714698125970e-03,-1.870001736487608788e-03,3.288356525591629772e-03,8.035302863583854843e-03,-2.054009723153497945e-04,-1.474851239302946496e-03,-5.294157638591236170e-03,9.132288099170734899e-04,1.511247289997977956e-03,1.801482106476914175e-03,-6.894960238973226515e-04,-7.066053941454231288e-04,2.472565786607644391e-03,-3.396921504192700839e-03,7.532686332732347126e-03,-2.477830797724091955e-03,-1.967640250988134890e-03,-2.786134566249463557e-03,-5.750507558458293686e-03,-2.517265895343099007e-03,-5.232135428536184574e-03,-9.345535329789170231e-03,1.091353198774573305e-02,5.721519592170756872e-04,-9.856396232896911733e-03,2.586488320600837613e-03,-2.753238734399224459e-03,2.549186540035609707e-03,5.523472079087048299e-03,3.765095127515270031e-03,7.816240474437597188e-03,8.850475502889653540e-04,8.409056429906996977e-03,2.027099059899641217e-04,-6.353239825560077873e-03,-3.574108356698500483e-03,-1.675695871606899768e-03,5.424076637033961937e-03,-1.178340961607149103e-03,4.423727775398007649e-03,-7.200057741337069148e-04,-3.703964191802432138e-03,7.212957631766006572e-03,1.848707687369682004e-03,-1.483692888648389897e-03,-1.940064289516034218e-03,3.969564433745636708e-03,1.518926916466606467e-03,1.863556145154037164e-03,1.427292083202452667e-03,-3.872792547439218858e-03,-3.695209423950704484e-03,-3.499054372974055426e-03,-1.181113104106984099e-02,1.657524498764650954e-03,-2.763723189496742998e-03,4.096166366398751660e-04,5.419109613119354844e-03,-3.228338715664290717e-03,-4.783286975088111258e-03,8.727516613981474436e-03,-3.879873001443192616e-03,-1.113301524795236915e-03,-2.967883956503053131e-03,4.556516800914250757e-03,1.648741644482539176e-03,-4.464234596283239015e-03,7.787522795908419747e-03,-7.081394366574560173e-03,-3.205051185924184462e-03,-1.043679190937146567e-03,-5.752222768890419627e-03,-4.360274145418534773e-03,3.705660993366411424e-04,8.436097791509267890e-03,-8.186262654954243323e-03,3.056894500779067190e-03,-6.237860714695269565e-03,1.856775805414310294e-03,-1.909754210995303204e-04,-3.247573259106031895e-03,-6.514226033204638389e-03,5.727910283907424599e-03,2.922025633698691147e-03,-5.709176033171515716e-03,4.363145691322334145e-03,1.048656505167008509e-03,6.045592474238032331e-03,8.353843091417101005e-03,2.860619233944586802e-03,-8.514825983073842988e-04,1.216955244650502364e-03,-6.468708819861454865e-03,-4.440339746870677028e-03,1.402561682226838476e-02,1.041554262274124384e-02,-2.534473859797331616e-03,4.017264741084772967e-03,5.072660916463356262e-03,4.533816905893634820e-03,-1.555774119601736373e-02,-2.848459615850496267e-03,3.200433491038702678e-03,-1.794112996916804800e-03,8.153678386520715748e-03,1.301956302473491730e-03,-9.121252566184005761e-03,9.526869789232154983e-04,-5.200861607216731089e-03,-2.463821972003961847e-03,7.453615254841219765e-04,6.497542562115244356e-03,2.262383739772278347e-03,-4.123450500946299058e-03,-5.300446081600782879e-03,7.527391566737471652e-03,6.661242257152533803e-04,9.428674921067014278e-04,-1.486617309515164340e-03,3.245248478143957210e-03,2.305869387114717339e-03,-2.657622494062348434e-03,9.491585471460685028e-03,-2.463690217294999853e-03,1.323605362942648859e-03,3.882393741749847458e-04,4.807500677159222066e-03,-2.456260886724906650e-03,-2.226354151068874257e-03,-4.998326522312386035e-03,-9.936015438326419730e-03,-1.628429273175402685e-03,-2.925495356265550660e-03,1.767876783190230373e-03,-1.284520496661818376e-03,5.740587893880110676e-03,7.016025786161060249e-03,-9.722703885854026493e-03,4.221061255269441780e-03,2.996043544615351831e-03,-2.874342268167361363e-03,2.442402042699557199e-03,-1.916796800815631164e-03,-1.752913616393899829e-04,-5.487053300771867030e-03,-3.922594911678040953e-03,5.402935956400721842e-03,-3.806855929245416340e-03,-1.465075008042231869e-02,-2.321536114456423418e-03,4.301282030885117402e-03,-2.732810234477051070e-03,-2.309725863748367421e-03,-6.371937735975431900e-03,-1.857307638547217804e-03,6.350964697411985813e-05,6.530288187354206563e-04,1.437116680247368201e-03,8.354675576625492789e-03,-2.095426479683116204e-03,8.738366968087378506e-03,-4.351616530082463158e-03,7.060793356594223004e-03,2.381936238380071621e-03,4.965100512704858672e-03,3.977757866142757510e-03,-1.235515958608569657e-03,-2.572605177076273293e-03,-2.212575474574040267e-03,-4.853494594889433585e-03,-4.597787569630272411e-04,6.150863390918463736e-03,7.368383388016788021e-03,7.675598512950103890e-03,1.793078661654568624e-03,-1.211329446045410815e-03,5.561658352239373659e-03,2.699873074675009001e-03,-9.342358023009506669e-05,1.294000913370551946e-03,8.554171436755061975e-03,-4.409219609581622838e-03,-3.209237316555156420e-03,-7.645661260177763442e-04,-4.060566801081917643e-03,-2.968532344166999876e-03,-1.956755968703886167e-04,4.395308515589938533e-05,5.059154556125727668e-03,2.560432093877381202e-03,-9.671426631859474793e-03,-3.813748421981128477e-03,-4.832827194089154080e-03,-4.969250479775092627e-03,3.792449011486844716e-03,3.416751845190180853e-03,4.292082254238228575e-03,-3.165922679500123970e-03,3.600886583163512462e-04,6.258183570731395799e-03,-5.043923407978686928e-03,9.975888055288994333e-04,-6.315043722101605292e-03,-9.463753503277176973e-03,6.817770982806586888e-03,7.678999463076184757e-03,-3.686695648485591430e-03,-9.307599915509660871e-03,-8.939076238499888902e-03,9.709566001301550217e-03,-1.267798260070387636e-03,3.508435342724222552e-03,-6.740283060125092963e-03,-3.334715262271041505e-03,5.373726368815967894e-03,4.204983106695251403e-03,-1.007665652200570236e-02,-1.338891365663902409e-03,2.277171679117566579e-03,-8.178322846927704422e-04,1.027755128400778886e-02,-1.319132037585003022e-03,-5.411571917684761951e-03,-2.333240792583924153e-03,2.227628243938489360e-03,1.379255638333440380e-03,-5.917516573830223191e-03,-1.925362460277423016e-04,1.477726184653814327e-03,-7.376166817303984717e-05,-1.349635724965610184e-03,7.960616129605002297e-03,-1.152506567964323159e-02,7.022106157216544652e-03,2.550810573428519655e-03,2.235025361841556143e-03,7.956079577261586204e-04,-2.242870288744671509e-03,-2.528424218288020508e-03,-6.346207731818718015e-03,2.654307197681577985e-03,-4.017999356515974257e-03,1.730056471669041496e-03,1.416144084902898120e-03,-6.199955632057840144e-03,-3.930883679640463299e-03,-5.600128769878857720e-04,-3.764945913393256006e-03,-4.195168667024243118e-03,-6.206888799421098361e-04,-8.859604458368780305e-04,6.556250793604829867e-03,-7.731188694509488930e-04,1.797473850958761525e-04,1.073196135602246318e-02,-2.539735966041090000e-03,5.628850239656158980e-03,-2.773863854738094648e-03,5.794266489840734988e-03,-3.355290310946415673e-03,2.259413269102111734e-03,-9.953600754276346191e-03,8.670111141156313511e-03,3.144450001888913094e-03,-7.928592665442551254e-04,4.759036176353131226e-03,1.461741864290455259e-02,4.780965765921863127e-03,-7.555011973714157412e-03,2.682489459206605174e-03,-6.740780978927904052e-03,5.274282987756214423e-03,-3.395455128176143658e-03,1.517803660907868441e-03,2.453147141562930789e-03,-1.389081122702700124e-03,-6.732956824285617203e-03,-5.273093756966915763e-03,-4.557846671968126054e-03,-7.301153105796851644e-04,-2.676261030200451606e-03,2.737419497934882179e-03,-6.309780223965206700e-03,-9.245841423836732072e-04,-3.159954807007677238e-03,2.153102081123341605e-03,5.296368499718274656e-03,6.848786476417252807e-03,-1.688789247644684275e-03,-5.263370680403085290e-03,-3.055331822606435765e-03,1.366964216638371114e-03,-2.369097114956377880e-03,-2.547795587299515711e-03,6.977681156258911093e-04,-3.751996440619251218e-03,-6.629344905922719992e-04,2.860037548355019114e-03,7.035665123594044695e-03,2.642589608750380709e-03,3.756710350663798256e-03,3.143885394127575970e-03,-4.780749099811613771e-03,8.136256382370267090e-04,2.735952529502929612e-03,8.977019675258792603e-03,2.121015013284092109e-03,2.174714938632401305e-03,1.699504600118071024e-03,-7.884364198218944339e-03,-3.788309125865451862e-03,2.343837374351100122e-03,2.955570166053387866e-03,3.495000507535395390e-04,8.689878704695770326e-03,5.732324968333693191e-03,-3.509984817840612788e-03,4.936369856947226201e-03,-1.191457986948499638e-03,-1.092784719155429594e-02,1.082807659144852108e-02,-6.958486887823091792e-04,-6.394769944132969522e-03,-4.732230774915744016e-03,-4.369077444850837447e-03,1.048395654964777435e-02,-1.184963326014042183e-03,-1.652413766183602820e-03,8.840508141544209039e-03,-1.011919169116555495e-02,-3.268268568249009434e-03,-7.254294148110997965e-03,6.915957572746309935e-03,-4.827452744398391324e-03,-4.027684208810760474e-03,3.882488902417367740e-03,-5.265712993647962172e-03,1.239719401936345729e-02,4.190588613185507877e-03,-3.180921572521722054e-03,-3.932734114424902075e-03,-4.818138253813164518e-03,2.233931920970222158e-03,-7.402749739154989954e-04,3.261302191229513772e-03,-1.089613312354521281e-02,4.048903775401791784e-03,3.204547976694556259e-03,-2.165327113429501345e-03,2.842598854479409971e-03,-1.636018830713992320e-03,3.934496978947637363e-03,-9.192549652423489745e-03,-2.655579939222653570e-03,-5.392894265442154007e-03,-4.183425186901159644e-03,5.475241002328786084e-03,2.611579086660847059e-03,4.138192083034034248e-03,2.906761542071137436e-03,2.601660637526755776e-03,1.491007220815504910e-03,-1.003852240140648382e-02,1.731329921775380618e-03,1.300461827786143819e-03 -2.628462004830780987e-03,5.464205326882248456e-04,5.244668386365606248e-03,6.407025079574543355e-03,-1.133543936529997310e-03,-4.010838234073415241e-03,-8.387973032593216277e-03,-3.310730942435253111e-03,-2.908438970042499187e-03,9.543730498282548087e-04,5.586212999142498893e-03,1.784868107028662193e-03,6.797462968300863745e-03,-5.259721211294600927e-03,6.185052068775371593e-03,6.966007373128675715e-04,1.542004283147388466e-02,-2.239248525779867667e-03,2.872867936979706799e-03,1.051493768151505427e-03,-2.883127032135270347e-05,-3.407048940293580172e-03,6.026916685660910211e-03,3.350848742874074342e-03,5.921650443659216010e-04,4.981991242005968934e-03,5.517727444318699069e-03,-6.986288297673568307e-03,7.783591814482211137e-03,3.723685115305393721e-03,9.685626523428910589e-03,-3.239050477982726144e-03,7.856104949126464093e-03,1.607846870896264624e-03,3.574294881239282063e-03,-1.350462710992636700e-03,2.156027701131617205e-03,-1.079808794473101352e-02,-3.494514828734739250e-04,-2.806850661838553637e-03,-2.520336028080357783e-04,-5.435234327150740251e-03,-4.698513866875394032e-03,3.617702668684234703e-03,9.571242282680546276e-03,3.021337161338627161e-03,-3.292356071959066932e-03,-1.415147882414032098e-03,2.073174442813921058e-03,-3.757856791406739578e-03,-1.287452206471525768e-03,5.780305993439267820e-03,-3.948414907643789037e-03,-7.213643308365002421e-03,6.977000286092183493e-03,-6.514455338562910938e-03,-5.000675451959341912e-03,4.646808468985370172e-03,-3.420605721897194973e-03,9.592306620497243960e-03,1.001553042279480966e-04,-1.735345619687782108e-03,1.401320913034987303e-04,5.732108849156733292e-03,-1.091047904724161410e-02,-3.258874115126942847e-03,2.635546282321322899e-03,-9.341122824579244175e-04,2.564890028501869059e-03,-1.197200378610123548e-03,-1.271172403800632442e-02,2.993036263636529408e-03,-2.495877393255501592e-03,-4.725361115237600683e-03,2.407960211817224572e-03,-3.419054947827835245e-03,5.105155035676914657e-03,2.059536073395980098e-03,2.905987776786669993e-04,5.210236835084861538e-04,-6.090000927840248136e-03,3.718513867461008954e-03,5.939204516725907733e-03,-1.166131968891760858e-02,1.134242807921490492e-03,5.652074331247222347e-03,1.648726647167694315e-03,-3.664648009705618421e-04,7.118152620586580862e-05,3.694684393270204759e-03,-2.198359638476872057e-03,3.730646628894543869e-03,6.279105132903044575e-03,1.404331371437140305e-04,-1.889704259635381200e-03,-7.774570131106781426e-03,5.422097627098629823e-03,8.072703406641748569e-03,2.686800636420375206e-03,5.070847013752674201e-03,5.835333558920093207e-03,4.421294376823281004e-03,-2.023755221072643610e-03,-4.974572376215447705e-03,1.732027595990816727e-04,-6.972096212827987548e-03,7.201046626746539225e-03,1.498313228871683790e-03,4.889058644091385207e-03,-4.249289587020511905e-03,3.554155047594607323e-03,-7.767831335872553943e-03,-5.681611166216809020e-03,1.172079233555833877e-03,1.977467917174798029e-03,5.515596683439307965e-03,7.506037719167055026e-03,-9.399574877006254894e-03,7.358457933593612679e-03,-5.932248784089296303e-04,1.601158888080677668e-03,1.105608944780312472e-03,-2.860160278491326075e-04,-6.551293937032233623e-03,-1.872068704109245687e-03,-2.961301950984340271e-03,-3.433933386430841443e-03,-5.179811279260700813e-03,-5.137184411703615144e-03,4.556662894481997537e-03,-1.672507110097317925e-03,1.075965314659566106e-03,5.214177813466037113e-03,3.947793901637446784e-03,5.814287815161038077e-03,5.061654932708033704e-03,3.050434429118985442e-03,-2.703772021914425255e-03,-6.687884000248521315e-04,-5.269463510579945782e-03,-9.950134406366993242e-03,5.188347893103275633e-03,-6.865845831128971830e-03,1.190248335291135438e-03,-1.327462333239793572e-03,3.984262695868717383e-03,6.367696392779144225e-03,-4.699637867153259136e-04,-2.556891789546373531e-03,-6.960130361124905289e-03,2.191083134567597870e-03,5.568915292396395961e-03,5.262000464857911029e-03,-1.838200881512010859e-03,3.745283676067119464e-04,2.963172598955175416e-03,-9.122817020425002046e-03,1.138320621575387905e-03,-3.056764535782978756e-03,-3.421062457343758238e-03,1.106570661993076538e-03,4.696561601150906970e-03,-4.421652121962395081e-05,7.309527913468906110e-03,7.617979337394702871e-03,-8.863425835881648049e-03,2.280201182965891065e-03,4.024550466109246688e-03,-3.779709552900668845e-03,-3.469384725207434739e-03,2.825504950526927517e-03,-2.017318127871752832e-03,-4.831139783926928391e-04,-2.082423405416904388e-03,-4.463200864640987781e-03,1.908371729660543860e-03,-5.687975502766081000e-03,-2.033166387020524882e-03,-1.741638954988669608e-02,5.333941368363585324e-03,6.433158495213683881e-03,-5.168233564278300676e-03,9.951781422334858313e-04,-3.029653669276857557e-04,-5.179503112860314208e-03,-4.319329073854087132e-03,2.726774860564925695e-03,-3.785493199233372439e-03,1.692547285875196103e-03,1.053201794460034701e-02,7.055176153590755374e-03,8.294799390962455388e-03,4.405192311498488944e-03,3.518912146388283348e-03,-1.350051794493528591e-03,-1.734182927115843426e-03,-3.064907374814286340e-03,-6.229975837921750657e-03,-3.922832691992117482e-03,4.851946475980161745e-03,3.872382068609494556e-03,1.249560332168511321e-04,-4.090251105856371021e-03,-4.437500440416546586e-03,-9.286927772489727076e-04,7.610649188173524218e-03,9.380090188379744745e-03,7.206410801735252088e-04,8.440664926964288814e-03,2.025215473172016674e-03,-5.082124623862223746e-04,-4.497749496569533562e-05,-5.710690094933650388e-03,-6.471543847710630598e-03,9.776002444089558745e-04,4.403755569699331629e-03,4.016898009583966082e-03,-1.363329777378914642e-03,2.194440703099720131e-03,6.471378298138362808e-03,-5.328287600020939745e-04,-1.896953269231003211e-03,9.703784484031962101e-03,2.323021412540870155e-03,-6.187307303521325658e-03,2.343092668710991273e-03,4.400801149549133685e-03,-2.027907534214914726e-03,4.678242021395530303e-03,3.002898128486029047e-03,-1.661507009623547186e-03,3.434762897351609310e-03,1.929319310732571270e-03,4.179757760091067434e-03,-6.347704626905666288e-03,3.027896451070704199e-03,3.354418782496861214e-03,1.634259618184669832e-02,8.195856132756748798e-04,4.825370368349716881e-03,2.815105960750207293e-03,-2.488582386518480884e-04,-9.053103588280345506e-04,-5.954667223954615996e-04,1.138357689793293749e-02,-9.801268673606009646e-03,5.047020634231851001e-04,3.674621337618986464e-03,2.330376038248927785e-03,-4.681924523700226048e-03,7.367464804933030603e-03,-3.921077808335245336e-03,4.085497546203006002e-03,2.416796322768111852e-03,-1.825941811091477381e-03,-6.079703214520593016e-04,5.102216588232485854e-03,-1.291284622711143465e-03,1.331172913555010544e-03,7.357259550889386733e-03,-4.933070315956492209e-03,6.485930267193807004e-03,-6.542834138882494890e-03,4.614722252141871539e-03,-3.472796022364170627e-03,9.462497023908177790e-03,1.593455781815734246e-03,4.808545418888434363e-03,7.098728562944149958e-03,-3.843079814985970316e-03,8.505101457016392766e-03,-1.219589523857975453e-04,-6.106584232520009727e-04,9.659694434480249441e-04,1.468794455971963141e-03,-2.041350599304137935e-03,-5.217164805623826160e-04,1.179361886014109601e-02,-2.435970618506969141e-03,3.618689232351105390e-03,2.970901047591149088e-03,-2.858037116375538450e-03,-5.740033752085237258e-03,-2.934701885537471105e-03,-9.279381955382182828e-03,5.780573995266483599e-03,4.129142389777637825e-03,-3.526458688796946673e-03,-3.590271026178998987e-03,-1.874360437123659742e-04,-1.034105974239171548e-03,2.806935450951994373e-04,6.427608460132246994e-03,6.322554044877201976e-04,-3.461930138016801762e-03,8.109336555827219295e-03,-2.008624096119294034e-03,-4.777058126604841887e-04,3.585902646643865811e-03,6.602771872601763533e-05,4.643185337984692511e-03,5.974785959523620966e-03,3.182594533927238321e-03,7.259751591592540632e-03,5.325584609047695483e-03,1.943862786791086686e-03,4.846984454434689893e-03,4.994158882774361700e-03,-4.603636650143442914e-03,-6.797610362555391895e-03,3.293129797347769082e-03,5.955671434838959166e-03,3.790201217076025111e-03,2.735462180064768354e-03,5.694531377846253489e-03,2.883118028548966628e-03,-1.131781354926173801e-02,9.402049672473969356e-05,-6.735425373986298272e-03,-6.087801569880976066e-03,1.325316156927667036e-03,6.996957226885686283e-03,-3.428751837299174286e-03,-1.690587344652125321e-03,2.052878698087927910e-03,7.411800837010177820e-03,2.822265873527529841e-03,3.766376493080684455e-03,5.241233802750279967e-03,-4.483761569031416774e-03,-9.922711939393574938e-04,1.035761654844960349e-02,2.904739591167737525e-03,2.605472407150688768e-03,8.003523951159146471e-03,1.127385024593215851e-02,-5.347291749876674320e-04,7.918834363328047798e-03,-7.853708419436925339e-03,-1.389880531749120886e-03,-1.912899474155487249e-03,-2.741442378987866014e-03,-9.328634836075691680e-04,1.217169240754136612e-03,-2.688462515729048413e-03,5.748010122895416728e-04,5.589527168674628468e-03,-1.516110532508505590e-03,-1.186487936544131441e-02,-2.508636595093157763e-03,5.882541965813196591e-03,-2.570243132336659259e-03,3.790256515860711013e-03,-2.901738735631909181e-03,6.223854853087310392e-05,-7.573075013336563049e-03,3.174801801048510175e-03,9.919810185213843653e-03,-1.603057815942169478e-02,-4.487031507186818417e-03,-6.547154757945845534e-04,-1.232538859916106457e-03,9.528026348286505742e-03,-4.666816041914234083e-03,9.770854401881830589e-03,-1.540886923063627262e-03,1.198117153282007158e-02,-5.861298228819483572e-04,-3.782585294655528476e-03,1.598490223049650084e-03,1.601002886846619819e-03,-1.070917346524882579e-03,3.547709962426765472e-03,-3.009639476027494917e-03,-1.006949255873533988e-03,8.113868439343051298e-04,6.560689121259928071e-03,4.553936085430260704e-03,2.561083271922245485e-04,3.257653998051069418e-03,2.059022131744839192e-03,-4.942648587111627551e-03,2.928989505146506987e-03,-3.514332429765977366e-03,-4.291775228619689429e-03,1.803915353161721774e-03,1.978493106837947348e-03,-4.075705067911849534e-03,3.522782823624407301e-03,-3.296876528098569015e-03,-8.629818315354918432e-03,-7.694419744734667249e-03,-4.212305538020542570e-03,5.868881514522473510e-03,-2.875980050347449069e-03,-3.786989517655944739e-03,-8.790569841108241030e-04,9.319671820001305864e-04,9.356941486177509622e-04,-2.885033853447827121e-03 -5.496287638922633858e-04,6.791084410932905074e-03,8.907086168135262480e-03,7.087789341606916087e-03,6.517219130725658040e-03,1.998513895652072846e-03,-2.402341193707401793e-03,-2.639385013866888275e-03,2.574850106943431163e-03,-4.579979542871147856e-03,1.556854932455882350e-03,-8.498751490825590810e-03,7.476936359700861977e-03,-8.788696390530070443e-03,2.300000700389209264e-03,3.585705926030330771e-03,4.337395167192796032e-03,6.160279567673530854e-03,-7.608253576162200783e-04,-3.715132287367368946e-03,-4.194701501278373743e-04,-6.098691260700988409e-03,1.590034418990639645e-03,5.072067693890229328e-04,-4.613166995891755488e-03,4.082845946749585724e-03,-3.024528102165444793e-03,-3.491388567558730265e-03,-1.390001765529108721e-03,4.382446861261030287e-03,1.161869501906590517e-02,3.308840274541994132e-03,-9.031259188102332570e-04,3.146741352662180049e-03,9.339140388583955926e-03,9.124531683764786072e-03,3.319102785236803521e-03,1.443752448567069092e-03,-5.236238951068940446e-03,5.516774053723167738e-03,-7.574759804001717119e-03,6.911598073296949807e-03,9.275645322505568589e-04,-4.699413603749686905e-03,1.115914470185844047e-02,-6.041293014045279793e-04,-3.846768264492325098e-05,-2.731423572105725825e-03,2.030605117764089276e-03,-7.419121954758394616e-04,8.223687040447045377e-03,3.631123114263945951e-03,2.289974252353328948e-03,-3.616706819520504324e-03,1.367108205148767977e-02,2.411028337060958527e-03,-1.169925500637043238e-02,-8.916070680526770251e-03,1.746782584907353563e-03,-3.338881624519802332e-03,9.600504221132745084e-03,-2.210027442573897060e-03,2.752017771958956659e-03,-3.100448600753658197e-03,-1.416804622377603037e-03,-3.007369925966144552e-04,4.810042207000936841e-03,-3.135694236267242384e-03,8.860411578523342407e-03,-7.932580174674699891e-03,-7.912434953580914192e-04,3.045345899926047394e-03,3.655456217574401816e-04,-2.329299768026112279e-03,-5.019981872313175435e-03,-7.247573192177487241e-04,4.209677076822878131e-03,1.029706448816595198e-02,-1.846811531760828148e-03,-1.710249159672557731e-04,1.958113831228378704e-03,1.021000964386364201e-02,3.809337618558017620e-03,6.031482702885956155e-04,-1.075187175582094888e-03,4.274737633778761051e-03,-2.530695106571314613e-03,6.284818614564663224e-03,-3.348438379746430070e-03,1.518586945405886923e-04,6.237571265098813475e-03,-2.281291598218267418e-03,2.152813516159821864e-03,2.440372940396417969e-03,6.083131634105445537e-03,1.820547821807859228e-03,7.754871742560049894e-03,6.732490856387158933e-03,6.695463266974548827e-03,1.321731509231987775e-03,-1.170397741491239606e-03,-1.622719072348862909e-03,-4.662589613056663022e-04,3.826846858353458256e-03,-4.988299691733262542e-03,2.009025898096269448e-03,2.796462895416951949e-03,1.773398840791508930e-03,-2.391949404808477089e-03,-3.438511863652673681e-03,-3.417165774091587491e-03,2.676392216913714406e-04,8.510038014902489983e-03,-4.727805823991430498e-03,1.619788318434718091e-03,-3.383267670427948966e-03,-7.752884663109487276e-03,5.923142925926112856e-03,-4.597857291204123648e-03,-5.181206763734389799e-03,8.833801417445229925e-03,-2.149615681397259049e-03,-2.495018385278186413e-03,5.579197895452873458e-03,2.157395081925829071e-04,-2.364614435052765423e-04,8.887490625697880937e-04,5.338292927791803144e-03,6.620510213714459781e-03,3.482919674657358093e-04,1.973666856328805397e-03,5.877685169265380039e-03,4.412683437660542933e-03,-3.804394672321262036e-03,-3.280899304539365395e-03,2.167029246190271732e-03,-2.498453539778954912e-03,-5.919138751947647381e-03,8.831208322768630296e-04,8.460331402563900371e-03,5.438727590073645299e-03,-9.004085054104458974e-03,2.218753799072089705e-03,2.672934856966523425e-03,-8.408120154455654693e-03,5.632975720718128541e-03,-4.898321335178331022e-03,-3.635975356355555365e-03,-4.858058336938551013e-03,1.381687850843153591e-02,2.411931187236790279e-03,1.139439435487019613e-02,-3.185551031439960452e-03,4.067617843749403612e-03,-2.988757121609217468e-04,9.136217797839515237e-03,4.015083485380981088e-03,-4.953576297911164820e-03,-7.483047520394244673e-03,-1.150207357624140336e-02,1.308362737911575790e-03,1.008524359372358703e-04,4.740432788783450167e-03,4.407056327776826855e-03,4.499599808957104388e-03,-2.989619609978162382e-03,1.044222696433295440e-02,-1.185830406289588128e-02,5.341566716016397064e-03,-4.589340141742811845e-03,-2.477157067719271827e-03,-3.354890689995794137e-03,-3.781085947411394997e-03,2.958818235146033933e-03,-2.668194895206311024e-03,-7.418639252502918786e-05,6.061798914913318903e-03,9.733332725414316211e-03,-4.421952136026458154e-03,-6.312830506978989349e-03,-3.847601068033353229e-03,-3.773095048055699358e-03,4.010422619231414874e-03,-7.368401691795038472e-04,3.673989384636827064e-03,-1.250062330318934124e-04,1.142969995258491696e-02,-4.372351784044774432e-03,2.907248615535625805e-03,3.119492921351009126e-03,2.187340717689869197e-03,-2.057231018061085690e-03,-5.894276479297429596e-04,3.831346196280709365e-03,6.020003201593146155e-03,5.891018486742741634e-04,-1.135056630085960793e-03,-8.847118178676309161e-03,-1.098671503502531184e-02,-3.841637999407140989e-03,1.345677584413435815e-03,1.226976179495650613e-03,-5.407399515811103265e-03,-2.143697570858628561e-03,-2.735489740747187024e-04,3.942487493214125193e-04,-8.706001212763938082e-03,-2.680744341353279072e-03,1.566456604573550652e-03,-6.537560909175420699e-04,7.346628540563448183e-03,8.804013598983145492e-03,8.255001919960556177e-04,9.650913942024927361e-03,4.165064452130501013e-03,-6.045517687208198584e-03,3.111586915679934607e-03,4.373686925668622376e-03,1.333507736820998798e-03,-3.595916164785899552e-03,3.801709119424291541e-03,5.129842034057626420e-03,1.300885470346494280e-02,5.522180004195200408e-03,-7.605477932148555636e-03,2.056872506058009687e-03,-4.400876326629653626e-03,-4.377888621383157521e-03,6.558648874596007851e-03,-4.006048093410595132e-03,6.301379266048832452e-03,5.474533245030384181e-03,2.690552061753421088e-03,3.111009713208446378e-03,1.015521745319688464e-05,5.108645915290045345e-03,-1.496358172223932964e-03,-1.553484258720365602e-03,3.363062476665781494e-03,-5.607472080042244164e-04,6.104778965556393461e-03,4.694413447954979245e-03,3.558609063309446999e-03,-1.503553503406473762e-03,-1.386573068646981127e-02,5.186865643350665848e-03,-1.750410821038863118e-05,7.364855805580826885e-03,-2.636143211102295447e-03,1.036446228662914019e-03,3.213739766426317989e-03,-9.251703634078286848e-04,4.337527647719844649e-03,-5.718014930064771288e-03,7.564201353465914955e-03,7.222723874836736377e-03,6.900847743129476242e-03,-9.196987765005046724e-04,3.640401448885907752e-03,-1.599983678796934594e-03,8.804207971300870988e-03,2.373975371014115654e-03,-2.118182945378912010e-03,-7.248719998020838537e-03,2.672580255593492472e-03,8.206604874809363126e-03,1.399365709107229820e-03,3.052534975288468425e-05,-4.596185957979994838e-03,-2.980190107325903122e-03,-2.769635378334435287e-03,-2.352184159227957861e-03,2.609767739638100149e-03,2.605381294214672520e-03,-6.680936036730206716e-03,3.370907562720884456e-03,1.123886716712157443e-03,3.948721887624654855e-03,5.309381061255221551e-03,8.344308718341564984e-03,-2.424738775996885306e-03,2.403201616525208698e-03,-6.943380008573422652e-04,-5.011634780278466025e-03,1.634304194672170580e-03,-2.960079253790989643e-04,3.404238717274188364e-03,-6.804322853661986387e-03,-8.373762013397140908e-03,2.608478206832800031e-03,2.802184631369440335e-03,4.235401882167304233e-03,-2.705734652374127379e-03,1.307201783903054687e-03,-1.543206991062377872e-03,-7.354450069525749964e-03,3.861045237104301486e-03,1.274513970220606147e-02,2.442250972818103393e-03,-6.971046537230458744e-03,-5.072761979083509863e-03,7.623664083784333252e-03,-4.139877394313618898e-04,9.538953547554974641e-03,3.217846724266096217e-03,2.971653036020589170e-03,3.562697320093697294e-03,-1.147367819745306709e-02,4.692106675470691783e-04,1.344789078683567354e-03,-4.151272285977213386e-03,-2.505888383222219969e-03,-5.459828354969976927e-03,-3.404611810636969864e-04,-3.362462410754864239e-03,3.811598118111676065e-03,-7.051977242694056763e-03,3.162357540188200783e-03,-6.966304409411705478e-03,1.973483140878741124e-04,3.774877549860735336e-03,1.010437014119804251e-02,7.086897162838699110e-03,-6.727743356295938504e-03,1.702311466900950166e-03,-2.902377384161299381e-03,5.261055140693637021e-03,-8.884554763998448942e-03,3.359699597310565004e-03,-3.289381960114443754e-03,9.048952558301402369e-03,3.638605792850451142e-03,6.295297227146576767e-03,3.549635134952943555e-03,3.374456174315505676e-03,3.444820406769530299e-03,2.410499153941719156e-03,-5.299736683016643447e-03,-4.061718470385490727e-03,4.231048715596436494e-05,-1.303586536213734571e-04,-6.658703692680877258e-03,-2.893685655784493556e-03,-7.689640147304781410e-04,-4.121756594073203682e-03,-6.404626112531452975e-03,-3.063500838463362359e-03,5.799645311968153541e-03,4.047635759203776183e-03,1.371075566744293722e-04,1.281411308789036970e-03,8.370360749021817093e-03,-2.514635498306064254e-03,-2.889635395357542918e-03,6.899427344422103328e-03,1.863659737046102251e-03,7.500748006349033936e-03,-3.573739084743940853e-03,5.521135506090799408e-03,-1.412033814197376550e-04,-1.420745743422309991e-03,2.320835192863202230e-03,5.378570026843590771e-03,5.287688423382536085e-03,2.538127552050495860e-03,-1.791064194527844117e-03,8.380556268220231558e-03,2.039785172605771812e-03,-2.948502614713542636e-03,1.792731077823449390e-03,-2.858798802210620483e-03,5.869385247890852343e-03,5.002623627423668021e-03,-3.620650177848288610e-03,-7.450595209094079685e-03,8.137322546449913749e-03,8.352471550730270183e-04,1.695853721371830042e-04,-4.140113115250821860e-03,4.280608927664240546e-03,3.348005718297289521e-03,-1.450447617429402852e-03,4.781984352890890529e-03,5.445093138636124958e-04,-5.643016347390635186e-03,-8.770774778591169246e-03,-5.071302810932674932e-03,3.272707247975675067e-04,3.847934120055687950e-03,-4.243401968517553210e-04,1.744740071122303842e-03,-1.894539024756453386e-03,-7.555092045356671775e-03,3.548802083339685186e-03,-4.437223138708624136e-03,7.490200093515874541e-03,-9.222194315355124574e-04,-9.763263745376909173e-04,6.726146604809346390e-03,-1.817516009925909627e-03 -9.995579128086514581e-03,-2.347628822573098219e-03,-3.719594646906449528e-03,4.096982308912324421e-03,4.319215357272408219e-03,6.489083990358147577e-03,-4.248248217289374144e-04,-6.107960324041701410e-03,3.515561219594591155e-03,1.831002966270091912e-03,1.386555246067518177e-02,-3.586637530876649223e-03,4.160573495819429660e-03,3.143324305163847608e-03,-2.750617514543007933e-03,-3.640767899408594308e-04,5.033909907296575149e-03,8.458084801660643512e-03,-4.461866968833603717e-03,4.989939698006886325e-03,7.461251611371178850e-03,-3.400482562017119719e-03,3.863187591769375387e-03,9.619872226111637892e-03,-2.866676127353769317e-03,9.497748412019684297e-03,1.088814822817964385e-02,6.974198350203451987e-04,6.547085909006709165e-05,1.234277272090794804e-02,2.028003813047182108e-03,-8.162585471510728011e-03,-4.930254362648428881e-03,-7.363796110068210332e-03,7.352200485864773686e-04,6.042007306042408710e-03,8.069827980561527711e-03,2.387068679636331590e-03,-1.111387596801330932e-03,-2.368546591135130882e-03,-5.008479831515822052e-03,3.706251007801372212e-03,-7.029691091963063271e-03,4.103281722544383989e-03,5.834299989856307420e-03,-3.596780776094463377e-03,2.230098238252111419e-03,-3.409798697647544244e-03,3.784442428959007912e-03,8.779512800307868320e-03,-1.394934616215166324e-02,-4.687200682888744557e-03,-1.269265574590103035e-03,-1.476271092671217408e-03,-4.546564581119886704e-04,-6.048623447691924807e-05,-1.093060824416927385e-03,-1.629194494953684199e-03,7.625138348249155262e-03,3.713134537999211900e-03,-6.042840674308014651e-04,5.112402368599703109e-03,-1.011214110408879298e-02,2.025309240861132998e-03,-4.758334811504329361e-03,1.962268297321219813e-03,1.091526090840054183e-02,-2.086177555708503776e-03,-9.407788772679268052e-03,2.476291122002155596e-03,4.797212707520670874e-03,4.801540856499934319e-03,-3.231503775060532400e-03,-1.444548135559585401e-03,-8.762771394923932894e-03,-1.147221550155850048e-03,-2.664549279970368067e-03,-3.977029291798764819e-03,1.186831205486384589e-03,5.348669378159516194e-03,-1.350550017848887140e-03,-1.036508540695265258e-03,1.906259252651930618e-03,-5.718628882794550040e-03,-4.108382517946234345e-03,5.206113904969246302e-03,-7.762833976292455966e-03,2.457228127653031911e-03,1.582040753327215764e-03,9.050564832304220861e-03,-4.628261212055772081e-04,7.564392818230253161e-03,6.879664199404012144e-03,-1.102282593120798744e-02,-1.165823620123248618e-02,-8.255940651971565626e-05,-8.150002166237936335e-04,-8.071400073669366890e-03,-3.065244186524220937e-03,-8.370208153999678102e-03,-4.841000848804753731e-03,9.377858456379246227e-03,-1.393735969703442223e-03,8.686404997188723956e-03,2.135600444988991053e-03,9.935548200105206486e-04,-2.627769943247299997e-03,-4.870893138765858125e-04,-4.795653467089669442e-03,9.173290575446949383e-03,-3.317110470960186932e-03,4.737587877884876148e-04,-5.632000962720021178e-04,-1.130080498458022791e-03,6.118449126198328460e-03,6.760690582656349139e-03,-1.114622649136769086e-03,-7.608953136846559155e-03,3.250369742816656190e-03,-2.807855635114052655e-03,-2.422426351356901132e-03,3.687460385831246770e-03,-6.293306154543317626e-03,5.393098150300728276e-03,-1.117002421751009042e-02,5.508479868684480404e-03,-5.578845686188097473e-03,1.515899726805314957e-03,-4.049291494064092591e-03,5.602914582062851548e-03,-5.467052555601607279e-03,-1.737294857980341625e-03,2.886552481095509045e-03,6.390700633273071161e-03,3.489880377926631722e-03,2.002994511578718318e-03,5.995539633683777576e-03,-3.754594981271414805e-03,7.471937359074409639e-03,-5.504570459436372055e-03,8.354968448663209826e-03,2.919122801417267307e-03,-2.275739073085309653e-03,-4.032198553965453337e-04,2.841749057311527236e-03,2.027528180393574873e-03,5.434258749931442464e-05,5.743076097433237430e-03,-1.703343120921086470e-03,2.172763053881993350e-03,1.622414748677098589e-03,-5.601322951590528322e-03,-2.091654631442066686e-03,4.398998068466660610e-03,9.670948224767432488e-03,-3.263284269405576817e-03,-4.903549047426232314e-03,-1.048874628052415270e-02,-9.551700867726694091e-03,6.095698347406343365e-03,8.013754603481020033e-03,1.000670238889597285e-03,6.082293321703997543e-03,-6.080522602172552804e-03,1.130406789023490890e-03,6.361215975560725877e-03,-4.650287163579499249e-03,-8.014305902352981612e-03,1.604740986522378227e-03,5.044407142733647702e-03,-3.919080830175727774e-03,-1.806835695644906387e-03,4.864881170152582715e-03,-1.531446608037569188e-03,-3.386988679837324304e-04,-2.123250284567591850e-03,6.317177025316279880e-04,-1.045771918980744394e-03,-5.270169154438631050e-03,3.973958273403714256e-03,2.526669568209121731e-03,5.731996639262481978e-03,8.615237603669770620e-04,7.642360149240001385e-04,-2.649519543249135699e-03,1.361640641221594601e-03,3.172855913720503806e-03,-1.208281986566737661e-03,4.383185957915244901e-03,-5.588323182888876861e-03,-8.650690113591451291e-03,-8.633676654660409067e-03,1.062324297280585039e-03,5.238012739796211494e-03,3.129071171786121548e-04,9.246776794399837532e-03,7.601671481396701097e-03,-2.966380433489030819e-04,3.425251248304145724e-03,1.908699644967528416e-03,-2.571297937476723153e-03,2.959754672810488408e-04,-2.597092919572342382e-03,6.581762150963398365e-04,2.897453460520656686e-03,2.335153835106241506e-03,-4.283781938618660620e-03,-6.198546301230329833e-03,-1.336797162022284940e-03,-2.980006416512331890e-03,-2.756028411717819336e-03,-8.113965888311723334e-04,-8.537038768158615107e-03,-3.582934049887681518e-03,4.893911318501433698e-04,-3.277462424905006663e-03,5.862240047753522823e-03,-3.988791434686261264e-04,-6.850453436904041862e-03,-4.627455302213709723e-03,6.486122194289274320e-03,1.708933295102435046e-03,-2.822351947291061418e-03,3.183274168152894646e-03,2.288717061350109029e-03,9.355414135933191183e-04,3.777487446953106958e-04,-6.433243707942392156e-03,4.321941131880569983e-03,-6.324410193661545929e-03,1.120413270331435630e-03,7.414205970152945276e-04,-1.020428901476836629e-03,-7.671444237817701649e-03,3.240606760644198169e-04,-8.592514646209839860e-03,3.089873081266051973e-03,-1.027299125244320044e-02,-2.800555382477188954e-03,2.873986909375290163e-03,6.627047003664203982e-03,-8.144609922706576646e-03,-5.951354028968569576e-03,9.649974967849687295e-04,-5.096492831611345968e-03,-1.795547574515694660e-04,3.448011499237905531e-04,6.627093154425062714e-03,-2.455864000548135199e-03,-3.287083243684297239e-03,-1.550634634546812184e-03,6.006470406476121723e-04,-2.618490077137455505e-03,1.724056437691741210e-03,-2.689051106347769161e-03,-8.577780708405901774e-03,1.022245853022490936e-04,7.807115103904999510e-04,7.738961234140129090e-03,1.425619930657798424e-04,2.568383238791009573e-03,5.206436854691256187e-03,-1.186691590188839948e-02,-1.039105629212569654e-03,-1.015638099600273472e-03,1.973532041916908376e-04,4.433991479025643778e-03,-2.008929008448445078e-03,5.857431009897383876e-03,5.823564532890878167e-04,2.311396336362268183e-03,-6.305142974242066052e-03,6.988171218240824337e-03,6.088703321944863127e-03,-5.138706020995236629e-03,-1.996907390650986842e-03,7.944146280797194327e-04,-3.309821750378453501e-03,-5.934095415376693453e-03,-1.273799393663241917e-03,5.051428048171054987e-03,-1.983630854196867868e-03,-5.111233571978001169e-03,7.097775917448644577e-04,-1.953939064024605085e-03,3.575948307382505100e-03,-2.913511873168597027e-03,8.003219704849750399e-03,2.809836937749627100e-03,-6.289011054735418188e-03,8.387440037292762107e-03,5.738779310730893549e-03,3.834815122679761847e-03,3.317172383724370163e-03,2.834508280458554290e-03,-7.091431686354138057e-03,-5.988043898418606188e-03,7.159237601510418450e-03,6.264160449532618982e-03,2.773855799315257629e-03,4.255515980318130950e-05,1.352133494554197445e-03,-3.491114695608815363e-03,-4.604538791543479233e-03,-5.834659407799393065e-03,-7.709645165695499201e-03,5.751619261176082847e-03,-7.214457951019070860e-04,7.582703144232792909e-03,-7.648281222241811589e-03,-8.514464394057655230e-04,-2.594740918786949627e-03,-5.531813688193163922e-03,3.525001001556473095e-03,2.192240197718995070e-03,-1.068660525756038281e-02,-6.065712256036334075e-04,1.241563886085018248e-02,-7.702354843476775202e-03,-1.371324532785543919e-03,7.126870153794096809e-03,-9.989292003070248359e-04,1.465734534037092763e-03,-4.325389318651745411e-03,1.827467090295771014e-03,2.323632313254069773e-03,-3.858386613622910837e-03,2.789362078472717565e-03,-5.968957235289225165e-03,3.390224521252088643e-03,-2.100688033537738419e-03,4.710703568791954621e-03,4.278306342663244273e-05,5.082657807551388580e-03,1.174951491914133186e-04,3.640283966610579139e-03,-2.528563282222076603e-03,4.138735096214477514e-03,1.522733569407095765e-03,-3.773294032367390351e-05,-1.666059429133730620e-03,2.622951556617500457e-03,9.217071146441298615e-03,-6.256872366555111045e-03,4.214287638457172268e-03,1.313897616161469354e-03,-6.769324418641347891e-04,1.960191643387191197e-03,-3.353554335615694000e-03,1.138411701789762524e-02,5.762884672669765611e-03,-7.225613627682418716e-03,1.446808854312012791e-03,-6.984086745817134206e-03,-6.530640709004027246e-03,-8.696713888579411453e-03,1.739048226150680413e-03,-7.454194591410420560e-03,-2.561006630119566554e-03,-3.133217429992480197e-03,-7.605094074605115448e-04,4.647104899889015726e-03,8.265330775847741507e-03,6.074225377634196420e-03,9.103890551706792997e-03,-6.196015957680918104e-03,1.644367886085666144e-04,-7.457381167466189259e-03,-6.951363300003930669e-04,1.479838165330530980e-03,4.127591259721072672e-03,-7.705569353243794245e-03,-5.202795719112300599e-03,7.988420909263630951e-03,1.202558617180928685e-02,-4.351173283505976896e-04,-4.007983900505522482e-04,1.121166306466273195e-02,-7.238173090385671693e-04,2.541590610508520295e-03,-7.612750527827854934e-03,2.978475291713283263e-03,1.635723120362775470e-03,2.253333757972673500e-03,-4.167407253677989781e-03,5.689284208466922725e-03,-3.906116583840957951e-04,-6.096176617605876188e-03,-1.005137031487366231e-03,-1.820256774933069017e-03,5.624658777421362790e-03,7.805720109385100303e-03,-5.061361666865996783e-03,2.783801932306119269e-03,-1.189886109874216467e-04,4.423907421770499095e-03,-3.710725607429154940e-04,3.357820860003470857e-03,-8.239380048514264795e-03,5.051347330094603863e-04 3.142614183492974025e-03,4.580731184607707805e-03,-2.652312546527405782e-03,2.075219419682315485e-03,-1.100186478082884004e-03,-9.723827404031637131e-03,2.575066234987147349e-03,5.384841151661544888e-03,1.216670234538851612e-02,-6.204458574803192392e-03,-1.560330827035589715e-03,-1.777158700202075003e-02,-2.108880661752343937e-03,1.944006200412418074e-03,-5.696188127660394414e-05,4.649512070291354944e-03,1.737580514403301926e-03,7.421330419855503950e-03,-4.168693895442152943e-03,-3.295601938831148672e-03,-1.280236273903746780e-02,1.434343696224785972e-03,-2.392769591160994933e-04,-6.838765192726338482e-03,-1.931412379166203615e-03,5.159869881829607687e-03,5.458399371248121718e-03,5.690343282577664877e-03,-5.757417289330841971e-04,1.659577342893960592e-03,-1.310182741953125471e-03,1.428887055349254906e-03,-2.622599661217795558e-03,4.155951417745295563e-03,1.480049632759986977e-03,-5.784316321515198429e-03,1.962733481716423620e-03,-2.271711911617917915e-03,-3.945201684979208947e-04,-3.536686854692647208e-03,-1.443703830213499901e-03,2.272642480310177546e-03,1.335753018500780839e-03,7.052266328175737196e-03,3.534432449033432511e-03,-6.803336228217334025e-04,6.509152615071839634e-03,-4.683178302186023247e-03,-4.336296573825036867e-03,9.739684151679204108e-04,-6.545506184945752703e-03,1.980068788354486051e-03,2.027801406807522213e-03,1.851708729897662362e-03,4.534094246715405715e-03,-3.905681819773933021e-03,7.744668855283927573e-03,2.974630309374656778e-03,-3.392823868346154202e-03,8.016461592337599876e-03,-9.725270530346047512e-05,-7.563169326193318462e-04,6.917684001591492896e-03,-2.541013565135978765e-03,-2.663060200861658549e-03,-7.282757065764544797e-03,-3.112075496821484731e-03,-3.149121411538944801e-03,-4.401327812511718514e-03,3.153946352890275081e-03,-4.171329384899911952e-03,4.319090097459759268e-03,-1.921405727657955970e-03,5.528478619440909021e-03,-1.164995012457577608e-04,-5.501569907400787746e-03,9.067881736011616095e-04,3.588015620022620006e-03,-1.813775997566281425e-03,4.376064079411086036e-03,-1.335341395005063252e-03,-5.976803963327067355e-04,-1.029897372679006683e-02,-6.431363963362372350e-03,3.036824798641232174e-03,4.595181086491578054e-03,-2.174216364043856774e-03,3.548493915490009976e-03,2.014551748901422489e-03,4.222312970812286230e-03,-7.046153974805413785e-03,-1.864953064333538203e-03,2.087855166205077051e-03,-8.200128603873230821e-04,-8.927574347944075214e-03,3.433387667007182110e-03,-1.797672561087556783e-03,-2.595458682762000289e-03,-7.221848409180110263e-03,-1.774610595615731898e-03,-4.489510668004788362e-04,2.058002110497064371e-03,-7.688879059959795456e-05,3.139141834679971214e-03,-4.502019010801310700e-03,-9.397341626781527532e-04,1.842385831342004121e-03,3.884398883317911715e-03,2.996768981404409043e-03,1.149706689914552042e-02,2.646013981341638170e-04,-4.184541738729151590e-03,5.156327638658125996e-03,4.668511813390504445e-03,-3.329431578785371076e-03,-1.672493886289466453e-03,-5.374624037203940170e-03,1.537683820727790826e-03,1.170216375485991873e-03,1.340738807389636224e-03,6.183230854672156461e-03,1.230434386465411082e-04,-5.873188190069942097e-03,1.396190212310793194e-04,-6.530949169477499178e-03,2.878333683213741396e-04,-4.244011901631249487e-03,-2.456840737313629487e-03,1.155089584214667297e-03,3.155313142267725246e-03,1.678672379630671764e-04,7.054742508803353747e-04,4.679785211607967801e-03,-3.236336834416839257e-03,-2.174518722230330459e-04,-7.277772739689204913e-03,-6.355306796516362218e-03,-4.633823463348698407e-03,-1.169575000610006358e-04,1.133139912680426742e-02,4.813682884707705216e-03,-2.613425465675201041e-03,6.539026084730941097e-04,-4.187518961077689970e-03,-3.761300592505101379e-03,8.228115601129926474e-03,-5.722992428837077490e-03,-4.972505849585609657e-03,-2.982369238691246361e-03,-1.343058383730533515e-03,-1.562961232590810191e-03,-7.354740773442376353e-03,-1.652969277815311564e-03,-2.042231178824823259e-03,-3.497217889819966789e-03,-5.748430178107714027e-03,-4.592834673681226755e-03,2.662245946189553073e-03,3.394459043851332999e-03,9.578747628086059377e-04,1.666343627668800975e-04,-3.334243448992499619e-03,-4.713810348270570287e-04,-2.852455727758769315e-03,2.612373604902041516e-03,-7.673853173524513779e-04,2.422523222782351372e-03,-5.275308200940286366e-03,5.059396985172148579e-03,-3.462658525866039051e-03,-4.075775973991517878e-03,8.678814275670371486e-04,4.561221643798476187e-03,-9.305794749015524128e-03,-2.607609175441519267e-03,-5.038548497488060628e-03,2.726096283110405680e-03,5.011481389929785431e-03,3.512265839163818387e-03,5.942408942249418832e-03,-5.218944201418119697e-03,7.493009737472324655e-03,5.514601370377835224e-03,3.194103701512672391e-03,5.512315730222545028e-03,3.238507437882067689e-03,3.648434570913066484e-04,8.509103202330661406e-03,-8.246883446558590444e-03,-1.024893839489487112e-02,-2.733184362372286739e-03,2.469246306811409206e-03,1.925283422997558114e-04,4.579012750278390990e-03,-1.157782856200276688e-03,-4.810234671465137002e-03,-8.050185667413117460e-03,1.613419692316277491e-03,-7.795100349556878354e-03,8.712181563247664920e-03,8.524493074313348347e-05,4.576686338366259222e-04,-2.387292284579733970e-03,2.147974686544129683e-03,6.349036893806428786e-03,-2.283921995143190029e-03,2.806463337017967279e-03,2.863200219540156660e-03,1.088294300083536559e-02,7.868283876667629337e-03,-5.831303972972591738e-03,-1.482887067885578459e-03,-1.813346229063333374e-03,3.323435242834911485e-03,4.575510050372930300e-03,1.464608262951041205e-03,-1.186488584360594414e-02,3.267344627609344420e-03,8.052941362750046239e-03,2.261233096058886619e-03,-4.041914135066280826e-03,5.122709610846975029e-04,-5.017026329966001522e-03,3.535647993197929551e-03,-2.047555309032764604e-03,2.797997970220225759e-03,4.211595602592608026e-03,-3.793439868927688495e-03,4.470389653883343264e-03,-4.211106049089682890e-03,-1.137370521993824428e-02,5.290943291091878241e-03,-4.096093213986120793e-03,-5.031929083331735698e-03,7.200812678685547877e-03,-7.264184256412139704e-03,-1.434326209973000275e-02,-1.514626776087645845e-04,-2.219392786545677267e-03,-1.463048996599992414e-03,-4.053657703687514624e-04,-1.004603346443318224e-02,9.056208175289997848e-04,-3.150259304844786005e-03,-7.238705103334374487e-04,7.585087611100534231e-04,-1.912322712599790310e-03,6.461252331778672038e-04,-1.913971272393558219e-03,9.252755702320722073e-04,-4.629261558484578428e-04,9.825182714889815760e-03,7.222205256587373298e-04,-1.216009031620651076e-02,9.881369655915519471e-04,1.364703585329216399e-03,5.759754688841809724e-03,7.169579165118823705e-03,-2.240884812137656015e-03,-4.477433173282855360e-03,9.093065729067283109e-03,1.850852215336430380e-03,2.327624465210946948e-03,3.394901937820358345e-03,-3.358045360364996131e-03,-1.381553933164729291e-03,7.707964600024460981e-03,3.514086497570210760e-03,4.281422778390515244e-03,7.828119492785430436e-04,-5.972117705417205429e-04,-2.611819984294149691e-03,7.954526762071758086e-03,1.461256391930043814e-02,-5.111448332073311764e-03,3.013346145594547783e-03,2.745009309101944205e-03,3.822316970163736755e-03,-2.304052066320796521e-03,1.819387001937703254e-03,8.681812968761712032e-03,-7.186181578903883850e-03,1.548780965462258352e-03,3.063746835104731031e-03,5.828364666235430480e-03,-4.581735430233578772e-03,3.069806874476072728e-03,-1.381274820126948818e-03,-2.212757352342087022e-03,-1.559547961193642794e-03,1.333324195883999874e-03,1.510434086818558154e-03,-5.583627415193806931e-04,7.928700951092595636e-03,-2.419930661469483612e-03,5.644507819726295519e-03,5.864623527190471631e-04,9.099654157059933191e-03,2.139067933657293977e-03,-8.392565919689192258e-03,-4.328648106712544227e-03,-5.766461386443666633e-03,3.929531466601317476e-03,-1.966766499434113424e-03,6.314275073691699713e-03,-3.210014875894512485e-03,-1.040859144638865523e-02,1.119132793236964074e-02,-1.054290657267174455e-02,7.041321658356088087e-03,-3.035819692174070595e-03,7.055947771939077677e-03,-9.399000896527598056e-03,-2.266033973034787489e-03,5.897585412961494104e-04,-6.885640207558419400e-03,-2.603044039117264220e-03,-1.370707213784519689e-04,-2.850678835476213647e-04,-3.690959489576226514e-03,-3.821195149823497679e-03,6.405072716310884241e-03,7.085368686146854486e-03,1.290123304523562190e-03,1.593496333528333225e-03,1.414416199796407134e-02,-1.918277117392749951e-03,-4.173651330197507801e-03,-1.173762338085067173e-03,-1.306456979425964400e-02,4.690743765792134388e-03,5.832907095212751768e-03,2.335571321872479886e-03,-3.238742579250383953e-03,-1.265811654649455498e-02,7.698268961411981574e-03,5.392748315877085932e-03,-5.264377226495262421e-03,1.007007714685656703e-02,1.890791319563662164e-03,-1.814273918229339063e-03,-3.127743961090528879e-03,4.159197591680876063e-03,-3.373237224496337602e-03,5.268427224152034613e-03,-4.038497564506410634e-03,-2.389806245884341495e-03,-1.550152692204049892e-03,-5.067363165590225635e-03,2.509177057382355400e-03,-2.019067729400675533e-03,3.423687435768325849e-03,-4.725548923943446479e-03,2.487289472976527901e-03,8.703650015570097570e-03,6.490630597936202124e-04,4.845026000790537447e-03,-3.242784684170162655e-03,-5.901755764587386308e-03,-9.091569478638193832e-04,5.323610229198898897e-03,2.498768612816429716e-03,4.644067818376720544e-03,-8.136096017144621440e-03,-2.580768773955344686e-04,-1.540941862416426241e-04,1.443645656727465115e-02,-6.635026712742698660e-03,-3.742775233894444446e-03,-6.091387611659439434e-03,-4.984545314591707667e-03,-9.840282694928507642e-03,-3.264836989572343900e-03,-6.174997156529416159e-04,8.843870145793521556e-03,-4.850134225546925623e-03,-1.941998501473035091e-03,-2.343674250739177702e-03,2.453816960292567410e-03,-5.423792832554978930e-05,-2.127859630806493586e-03,5.416961226425352728e-04,-9.728244324268931625e-03,-4.993815726536316758e-03,-1.712186550781611815e-03,4.490061769736360640e-03,-7.757456518925301414e-03,-4.087394498717395776e-03,-8.858277409563143287e-03,-2.987374484789706748e-03,-5.450104479222569499e-03,8.781338117564872411e-04,-2.851078635254585859e-03,-4.190776304868405050e-03,5.628686976394839045e-03,-1.108422290118643702e-03,4.370910958661652218e-03,1.423425013224975883e-03,-1.018372135069772234e-03,-3.446271507711200655e-03 -2.547151733112047191e-04,-6.716633619418716254e-04,5.772980449823043002e-03,8.796043927185092509e-03,1.015356721382761930e-03,5.073818050654749397e-03,3.419517689449581583e-03,-7.940830583572674843e-04,1.040743879410650601e-02,-1.523280638553446134e-03,-8.610030557581138666e-03,3.180633465960219178e-03,-2.640976632199741950e-03,-1.894023065489357314e-03,7.831042721597025053e-03,3.810877210239091168e-03,-8.194645683656416602e-03,7.067181794745893298e-04,4.668664301887685852e-04,-8.393565717738929455e-03,1.480174078326165332e-03,-3.245209345033943892e-03,4.969258887152384375e-03,1.071028742254746076e-03,6.210172997537465739e-03,9.162559229868102659e-03,-6.441470733811419103e-03,1.320511186925893247e-02,1.036825067472401480e-03,1.454179547350589871e-03,-4.066398910237828401e-03,1.534926562913764905e-03,6.341739412698347818e-04,1.543642314690384446e-02,-1.394161447871980486e-03,5.887026519068523471e-03,-6.075588610736179116e-04,-1.996226564246843943e-03,-6.606173009407277814e-03,-3.391266750869305086e-03,3.177274144323656221e-03,-8.361215421288365054e-04,5.780043391732344812e-03,-8.288677935849706260e-03,9.827559194732665357e-03,-7.389116668433659378e-03,-2.600421020549096175e-03,-8.823069789587991568e-03,-2.658381895251876997e-03,6.023170002550553952e-03,8.105537857697601642e-03,4.331961095521473168e-03,3.218774841658385549e-03,2.108852716399813639e-03,7.762103314801711840e-03,3.072672237162065410e-04,1.570525809645520736e-04,4.600140715340291300e-03,-1.324880784773083174e-03,-1.484509701945288529e-03,9.985524436545177573e-03,4.184991208964742096e-03,4.125822156282962609e-03,-2.889733450672048089e-03,-1.172302237919703970e-03,8.353648520559682142e-04,4.252434048390406514e-03,-8.187260320701061217e-03,1.242182580739763925e-02,1.052038192212580506e-02,2.270495564658059004e-03,2.196849867215298988e-03,-9.928723045285737213e-05,-1.285140811949277498e-02,-8.072390120711019773e-03,-6.324531001051607575e-04,3.521640710164048154e-03,3.156092603560305999e-03,-5.547707205275379840e-03,5.189212182284598128e-04,-7.461687323805738133e-03,3.373124596324776256e-04,4.793116964067129556e-04,1.994878639561677332e-03,7.173282871269242092e-03,1.289676371064707406e-03,2.597720021946314101e-03,7.837417132063029951e-03,4.650368331938409261e-03,6.405329862096416360e-03,-1.385984228336468701e-03,-1.249166162557835587e-02,5.269923447063669213e-03,3.208935556781097355e-03,2.395852266171950330e-03,4.313551842350152299e-04,-8.872338937390451949e-04,5.507226966071284433e-04,-4.184326641760087277e-03,-3.230659567827799412e-03,-2.784988847143294276e-03,1.370921121675226437e-02,-9.536780096378819765e-03,-3.545587723006887650e-03,-9.191118954027313603e-03,-2.760634389711460676e-03,7.759576451225769222e-03,-2.831694639506939209e-03,-7.644856288162991086e-03,5.760030348700058596e-03,6.559233058432326936e-03,2.443349180665215035e-03,2.608592051410700205e-03,4.789331412448557916e-03,-7.404990599285764426e-03,4.692004263985753883e-03,-2.532096155243960004e-04,-2.004523155298323055e-03,5.839620766725003159e-03,-1.107263034037167006e-03,-5.265988257869685767e-03,-4.188627257277129610e-03,-4.810744672195038174e-04,9.625769755339091136e-04,2.251504553046397399e-03,2.367335069579594616e-03,3.096730888448737552e-03,-1.473617630412024215e-03,-1.054065516395338506e-02,-1.257934927352171665e-03,-7.524415884945546106e-03,5.084523566240734503e-03,-8.155343847083171629e-03,1.967335568975044774e-03,5.196734875323539074e-03,5.061277572974594297e-03,5.130738069138821868e-05,2.039242474739686936e-03,-9.449022689708145748e-05,2.788068845264924777e-03,-2.255789862511881159e-03,-3.523683902332448888e-03,5.162764525156939011e-03,-5.067426246523755277e-03,8.099245644530387103e-03,7.620391619718497669e-03,-4.380806750429111322e-03,6.100238161272916043e-03,6.362193725899436408e-03,-5.099437797519747358e-03,-3.119174545146135430e-03,-4.696226166302186825e-03,8.198347922159853612e-03,-4.819549887323125795e-03,-1.255689072327437162e-03,5.544270721541624640e-03,6.624520163936525874e-03,-7.086840009646746576e-03,-2.685903822296661220e-03,1.153614960045037897e-03,-1.634306348322500701e-03,4.321195470990273589e-03,1.340337369759510115e-02,-2.834222345230195082e-03,2.729181498064767717e-04,4.955979881833848244e-03,2.072237204253588321e-04,-3.383487466154530467e-03,-4.524367032019859479e-03,-4.142121178499818569e-03,-1.652742118139071889e-03,-1.077670909381357733e-03,-8.106387537364439025e-03,4.552984177508374798e-04,-1.592519932079896131e-02,8.128176960592791919e-03,6.423898391907002662e-04,-8.348937823138127823e-03,-6.669680521429292852e-03,8.743049849750291966e-03,-4.162892301030729492e-03,6.505731374874030746e-04,2.757300023080664405e-03,5.483934150030617474e-03,1.229458278970633860e-02,7.947503037018960673e-03,-6.797626859293612583e-04,4.655630872890182889e-03,-2.712748581684432028e-03,5.183563262491403152e-04,-3.147795398279007104e-03,1.992404431279255934e-03,3.026775120512988256e-03,-3.875227562652760072e-03,-3.187030065914940591e-03,6.198340860944472064e-03,1.393679245666717970e-03,5.511040282119601154e-03,-2.713708697620215941e-03,7.429075975696715524e-03,3.407941919869586587e-03,3.404097341236065038e-03,2.595578109189619880e-03,-1.516296495405925144e-03,-2.889063495873662752e-03,-8.055167456566758588e-03,-1.383275676865010351e-02,-4.978028353836170226e-03,-4.300441530613370415e-03,-1.064466997556103890e-02,6.504255675607808866e-03,2.315785073368729387e-03,3.166839072259534579e-03,-1.844278359332984351e-03,3.041647622554793061e-03,2.988594373367533738e-03,5.879491258298110909e-03,-5.244814978904598426e-03,-4.661879144570280943e-03,7.468617170985798748e-04,6.688969216467286491e-04,1.278631027529934837e-03,5.790684244803152511e-03,1.471939494690965074e-03,-6.452830447756082756e-03,-5.812629247561366952e-03,-3.157656507731977560e-03,1.676701566480133911e-03,2.829544902845928037e-03,5.644929323627443339e-03,3.768535200115291899e-03,4.530250527168959949e-03,-1.260553648641607851e-03,4.057897638423138553e-03,-1.213082688458972432e-03,-3.002324010842726681e-03,-1.668652223007223217e-03,4.021133095511225317e-04,-3.990857636496519009e-04,-6.753887520062753363e-03,3.914773087914759538e-03,-8.038504357618133159e-03,4.951005094767769663e-03,7.386169468030291977e-04,-2.323463716970158160e-03,9.196462584505968665e-03,-6.449547319604501637e-03,4.972383018449303207e-03,-2.968926439058019312e-03,-2.845876735350456096e-03,8.381273038490231683e-03,4.733020234425831287e-03,1.673938520605799387e-03,5.496333900930155079e-04,5.448637189509509746e-03,3.357909381065931481e-04,1.140604821311714959e-03,4.996325147441576554e-03,9.606486163152971186e-04,-1.271252531353911667e-03,-4.078342069784502387e-03,-1.769595278632426485e-03,-9.337878102544407838e-03,2.563080804009519251e-03,-1.417988641765928178e-03,-1.151283957717010422e-03,-3.228254472367330860e-03,4.061597371595377856e-03,-6.204722189915777737e-03,-3.610052316465403241e-03,-7.699148019057782538e-03,-2.677960932598096901e-03,2.790310350517840286e-03,-1.132921478070998645e-03,-3.200290028404929182e-03,5.980206124488184581e-03,-1.129613688032478445e-03,-3.938331009830410567e-03,5.486880624012376029e-03,1.098713389997863464e-02,-5.798492324372717631e-03,7.573421218260433484e-03,-2.857472124502598125e-03,-3.119383420712158565e-03,-3.966253846357302264e-03,2.888241124476961449e-03,-2.096816102600717897e-03,-7.398365954638767839e-03,1.991875788388644021e-03,6.404576775753736788e-03,1.908212767962579005e-03,1.342509953247541317e-04,-1.358738116123933779e-03,4.917003402876938276e-04,-9.146284618977533476e-04,-4.571302994130216693e-03,3.181417779264948328e-03,-3.159192483166733956e-03,2.213577933786844607e-04,1.666397755729797986e-03,-6.031037220217591067e-03,-3.661713361319055140e-05,-9.174208919892044151e-03,4.736789137539826355e-04,-7.413089150062985785e-03,-5.731584837430827445e-03,7.524970194321719923e-04,9.885443086424812020e-04,4.117313223648152130e-03,4.829333094982559034e-03,-7.607886564005498023e-03,-8.549807968987434975e-03,-3.568680519750467781e-03,5.474278574947148875e-03,-6.950184017855479991e-03,3.169319164148480701e-03,1.590963173176672147e-03,2.806667969668652767e-03,1.057953929150078423e-02,6.597179175989519723e-03,5.009428261843648124e-03,5.629064144750504581e-03,-3.967612692954911036e-03,7.878804293197210970e-03,-1.027886684227335154e-03,-1.178814639345558300e-03,-6.002564456762576710e-03,-1.206629280320670238e-04,-7.257783431342481988e-03,4.785173392270955506e-03,-1.043655071265886525e-02,-1.361807535686472862e-03,4.501802623922115422e-03,-3.725819079413300594e-03,9.927008431212082720e-03,5.900961091545998786e-03,8.018505521091508909e-03,-7.553519901950950417e-03,3.407375382055760106e-03,-9.740294537307864903e-03,4.973109637481169278e-03,3.905606101913829641e-03,-6.370748057762950535e-03,-1.890842987103068121e-03,3.460840689428607269e-03,-1.551393161803355522e-03,1.569475755395306120e-04,1.988212349283774265e-03,-6.374080700748558856e-03,3.801526612476701251e-03,2.374387679117570711e-03,-2.697945224793522109e-03,6.824847507683536908e-03,1.764784058053116224e-04,-1.624006256559018275e-03,-3.855132483966721996e-03,-5.609997409121040895e-04,-4.226166043199257490e-03,7.721664496759517680e-03,3.121444562012833102e-03,-5.011436598538718777e-03,-1.128283193212868338e-03,5.740864475186450988e-03,-4.915073843568402686e-04,5.622859910507029300e-05,-1.785809342840660556e-03,2.897579450608279376e-03,7.850601890342227482e-03,1.270829118146195964e-03,8.066406590703437476e-03,-4.359346503060059484e-04,-7.470136450563174060e-03,1.490898691226252501e-03,1.386084040397755988e-03,2.169978680210390542e-03,-3.781877549805466240e-03,4.963930397978820117e-03,1.477901060188617759e-03,-4.765859862880773672e-03,-4.084211448506982881e-04,-4.668997638273959978e-03,7.812854758946438400e-04,-4.808788190437309623e-03,3.862245734605453118e-03,2.032227896452960369e-05,-7.050067888113921086e-03,2.961458474116585547e-03,1.916174248792601844e-04,1.672710540906295878e-03,-6.847839196171963102e-04,1.822568427589880491e-03,-2.832519954645765966e-05,-3.599350218015398995e-03,2.875154292303381143e-03,7.379457597946209330e-04,7.145182617774635189e-03,-4.524890253684713155e-03,6.362729124047868242e-04,5.752527273999648826e-03,-1.382237888603932759e-04 -1.124876206957930655e-02,-4.839808964600371063e-03,4.622766485022814062e-04,-1.260566528387991637e-03,-1.738204871243553351e-03,-8.037306152954363722e-03,9.194986354991751751e-03,-9.590676379571474314e-03,6.440571545037593586e-04,2.733436404831412429e-03,-7.606157258343903237e-03,5.020919853334638618e-03,-4.655939223611011216e-03,-2.972776102209602218e-03,3.313749583640460562e-03,2.224129162424998835e-03,2.394254889695008378e-03,1.158975321747838247e-03,-2.885360534431793533e-03,2.605996752716148055e-04,7.124748218919832107e-03,-2.053703207305268043e-03,6.804356461051773485e-04,-4.639306909835038198e-03,1.859720951856394522e-03,-1.703372731898730947e-03,-1.693249645068777660e-03,-1.809305659280146302e-03,-8.829104194544484851e-04,1.866800221015972399e-04,-9.866215584554034745e-04,2.188677078196391620e-03,2.879328373041612475e-04,-2.682476059972378670e-03,-3.534963686309243718e-03,3.052520788700756587e-03,-7.528276525793905355e-03,2.161155896774386628e-03,6.716054159537410140e-03,-4.533524144853502170e-03,2.899526361279948158e-03,-9.037419127072753239e-04,4.078531757997948953e-03,4.863752202116412848e-03,-2.459181334904017233e-03,7.473254785760025977e-03,1.686901491762226842e-03,-4.021423871080708964e-03,-4.523594339284134347e-03,-6.338114913069613568e-03,-4.241021066176959078e-03,-6.735379881043489572e-03,3.462534857458098199e-03,-6.125352492943937112e-03,-4.856032201198902734e-03,-2.502650615525389482e-04,-1.588999242885026663e-04,1.537555082101952101e-03,1.257815071916875468e-03,8.363436922628445999e-03,1.413413275917918370e-03,1.443100611751601528e-03,1.227828629394259402e-03,4.112075593249825758e-03,1.703154119223564090e-03,-8.251943423468178357e-03,-2.123276745156073893e-03,2.041977236585649016e-03,1.111892238322704195e-02,2.905552864886087818e-04,3.913094971897453783e-03,3.354796759564547819e-04,-1.525508532030552932e-04,1.457928077607829713e-03,-3.363210347451110142e-03,-5.035543777328632849e-03,2.185882565920128570e-04,1.502067065711423929e-04,2.317346989299565087e-03,3.285373778519892936e-03,1.734951590848624135e-03,-8.632337116337469515e-03,1.131546554343121347e-02,-1.977392966740926586e-03,7.339098315395913008e-03,-2.564441696739236387e-03,-1.152824915299873257e-02,-2.438053480892983776e-03,4.985014078770516437e-03,5.485331387526446020e-04,3.480880222703979822e-03,9.103948980925731863e-03,-3.814253351064840914e-03,-5.003704970602884834e-03,3.477044148486187636e-03,2.566920344438130674e-03,-1.929006419292755717e-03,-4.651609488390726424e-03,-7.350310310150605592e-04,5.797500146479690926e-03,1.711068599850596762e-03,-4.106928178207503582e-03,4.335698114869076733e-03,-4.846399612233640020e-03,-5.884323175356012811e-04,-3.350096069084913353e-04,8.644251152730868032e-04,-3.729549110282860507e-03,-1.961770478869224923e-03,-3.348115477509452584e-03,-1.020349048695025265e-03,-3.613266561193544070e-04,5.093651032654947577e-03,-6.428641959271995734e-03,7.798396812046604412e-04,5.524635605341114003e-03,-1.081025138248156844e-03,-1.510771631738442537e-03,-9.322141249428832557e-03,-1.242015841643191905e-02,2.229186950791529464e-03,8.041727471777018743e-03,2.415271436499475972e-03,-4.210946958879101254e-03,5.152112693370743124e-03,-9.734657171260497829e-03,5.745932222987784306e-04,-5.563446683211935480e-03,1.061220901351663101e-03,-1.741982039252988439e-03,6.542956644255739848e-03,-9.256325946619150397e-03,-7.754069024362640096e-03,-1.608828960801651428e-02,-2.040803611933625188e-03,2.886660618919088383e-03,3.954703881414176406e-03,1.458921312179459326e-03,-3.931217575960722563e-03,6.101662862650258429e-05,-5.495933316856586696e-04,1.860540588376265181e-03,1.239041914314882066e-02,-2.736759474246611558e-04,5.746638713482040035e-03,-6.176322660540177825e-03,-2.236418441383505266e-03,1.659781649635741615e-03,-5.580437207106984064e-03,-6.847135447641573835e-04,-6.513359884028935808e-03,-9.108846298468826347e-03,4.864613365546757739e-04,2.059993394531378468e-03,4.954140838990623609e-03,1.971937264187807910e-03,5.876068563109177259e-03,4.114429276223181657e-03,3.627316338057666716e-03,1.549202070657786871e-03,7.101177272628884929e-03,-7.839750264017420270e-03,3.047337505227495479e-03,-4.588733461077588657e-03,-6.833322243331108915e-03,-4.469277106007005923e-03,-8.598971787908464559e-03,9.566198046237130593e-04,1.200165572465749933e-03,4.764334034356834495e-03,-4.888885947976940802e-03,-4.002993086768595610e-03,1.691162772503698557e-03,-2.737786205024123846e-03,1.786831877802957935e-03,1.795406574173771504e-03,3.275663022948876579e-03,-8.348298891307904092e-04,1.530132711209893298e-03,-7.464171346062912181e-03,-8.692468360362836863e-03,-4.506893608391795807e-03,-4.862846142852944413e-03,-3.866243725751281231e-03,3.669153033044822605e-03,-1.848418432736705086e-03,-2.880485786352649999e-03,-2.915525160575820735e-04,-1.318793569226332785e-02,-1.394072949050070625e-03,-5.372863899304842691e-03,1.862034027471354009e-03,4.621730813391618925e-03,9.241492336026304460e-03,-5.508217377307421565e-03,4.555442990543067641e-04,-1.224163941889620886e-03,1.266615332201277352e-03,2.925469769178205490e-03,-3.326768477495867411e-03,5.078338897128605728e-03,8.440089961240881608e-03,2.108786640038037407e-03,8.503029782361836179e-04,-2.060476231192840801e-03,-1.884681294464776106e-04,-6.681685413839695727e-03,4.615736754486229193e-03,9.488990873200776541e-03,-1.982333901834598070e-03,3.433627875277910340e-03,-1.978164603447171276e-03,-7.882339326363924092e-03,-4.202945318516775053e-03,-5.091950552487611685e-03,-1.207007467950147990e-03,-1.637639009696353941e-03,1.944147783957307076e-03,2.002437652837973491e-03,-5.064079746013860905e-03,-2.773959398042491649e-03,-5.897740695053273023e-03,-2.448656732328041100e-03,2.944055213707415805e-04,-1.517706339193137383e-02,5.860175730719561701e-03,2.613527241446486106e-04,3.878769618936641749e-03,2.059513013760325140e-03,1.205116110471414134e-03,-4.887290540198400632e-03,-7.075603434572998868e-03,-2.819027503024977999e-03,-1.914879707679557289e-03,5.691920779666054427e-03,-7.378244400187222935e-04,-1.420921820405665614e-03,-3.626578167976883364e-03,1.201035972435350423e-02,-4.906743025549999485e-03,3.932584306128362496e-03,-1.777389326172810211e-03,-6.318110375232678308e-04,9.521540529230239705e-03,-8.824696410137984268e-03,-1.200416273123392332e-03,3.320459573137990623e-03,3.801069831814884569e-03,-9.752274557013243918e-04,-9.125303460511564185e-03,-4.536796165818859569e-03,-9.999249188259309168e-03,6.938361029093934387e-03,-6.152821733195874832e-03,3.683996856662609051e-03,1.616627039716013719e-03,-2.408902773219032730e-03,-5.312263774625015401e-03,-8.572545068078791727e-03,2.375452545196470987e-03,9.804557100023464974e-04,-3.838069821615324763e-03,4.255418579784894101e-03,-2.789021395078679008e-03,1.530642581617042532e-03,3.726725138642638345e-03,2.078118804878612870e-03,1.352450176752638617e-03,-5.196298745601772833e-04,-1.423759287919127261e-03,-4.845297977733076938e-03,-1.485567575239265864e-03,2.697360646971971865e-03,-2.938789538700812502e-03,3.714119045097583754e-03,1.431591525898904572e-03,3.748839426021094608e-03,1.415198373244814503e-04,-7.791804278022451680e-03,9.473148339033728854e-03,1.927736971300078334e-03,6.883845199315888862e-03,-3.165134680478498205e-03,3.835019974220281111e-03,-4.598728010878624234e-03,1.521387842445294644e-03,-7.822459834252600314e-06,-5.105847164354809439e-04,-2.958273678463499336e-03,4.897520707589526727e-03,4.661132450329972220e-03,1.499211638419051425e-02,7.372207172651750080e-03,5.195575563697216528e-03,3.985187746688759712e-03,1.234630607546129827e-02,-1.059003610727189508e-02,-2.311783932389759998e-03,1.229679279522093421e-03,-1.017021863223404980e-03,-2.050560248438014401e-03,-4.625719153880955212e-03,2.204178005289405412e-03,-7.912284672283444523e-03,-7.606713126040171545e-03,1.201228158217168681e-03,-4.118494612737899911e-04,2.737034377584910555e-03,-1.572428060778338589e-03,3.539445913547786468e-03,5.019618364998356189e-03,-7.659238810440686419e-03,1.714364622127182141e-03,-1.483151225698872396e-03,-2.628410520883355840e-03,-9.023187212867515069e-04,5.179392840549373704e-03,3.118984542428618507e-03,-1.033750378448190094e-02,-4.771544475682464731e-03,1.060535730057596661e-03,5.008442622041847333e-05,1.843953327402549579e-04,-1.201777551104539184e-03,-1.589203679475840580e-03,-1.916639507681985844e-04,-1.234423579142446140e-03,1.951964064565846202e-03,2.687351594820974851e-04,-2.933200032952242996e-04,-1.132659271728271924e-02,8.392740755278761504e-04,6.442363077420354411e-03,8.329771804309347993e-03,-6.069254964702078259e-03,-7.685947589946045405e-03,-5.094731384430743661e-04,4.405973921953247781e-03,-3.096999351132078727e-04,2.042570971285039439e-03,5.467052801695895456e-03,1.692236894578483513e-03,-1.975703417832197755e-03,-8.438191593447996741e-03,-5.696579634455688733e-03,6.646361232767426797e-03,-1.877185950339483288e-03,-8.297864626384121936e-03,-5.967152327195107686e-03,-4.897801633273540615e-03,-8.803497355098511765e-03,1.511620457089167925e-03,-9.678516375898277638e-05,-2.407988878570732020e-03,-8.600167291286332436e-03,4.459983117945889863e-03,-4.687729780778334202e-03,5.299265792052820084e-03,-6.670735441499590912e-03,1.447443559721333598e-03,7.077598862016420966e-03,-2.942898530274584874e-03,4.097516124589721184e-03,-4.551565394563641454e-04,-3.439202851904584780e-03,-3.343257749419209501e-03,6.309590524346224763e-03,6.166370250526503199e-03,4.933443990668785533e-04,-1.363098531667269108e-03,1.192914779395903282e-03,8.657265732644822340e-03,4.977793418109297506e-03,-5.600844997507152187e-03,-1.030675416974552970e-02,1.051850632296336824e-02,-3.043839059493942331e-03,4.274258727379227560e-03,-8.465845497086312054e-04,-5.469170946968945728e-03,6.986411486675783537e-03,-2.867273529174211655e-04,-3.752043482133828566e-03,1.281692028775720943e-02,-4.380250196158686112e-03,2.353466451978139182e-03,-7.274866727010147888e-03,-1.197611147565681581e-03,7.969394044051253618e-04,7.490855124837117462e-03,-7.196881309849752217e-03,1.663479075064837635e-03,-2.881678322335259543e-03,3.869131351791229873e-03,1.903748657697889668e-03,3.758424197364690789e-03,9.099502796137294959e-04,9.093595359486825180e-03,-7.167192534964036792e-03,-5.154959232166138501e-03 -2.648809726794285903e-04,-8.155160665263545480e-03,2.365659583613559814e-03,-5.218357779937588328e-03,3.186654743833060820e-03,-5.377448395449040651e-03,-3.536072655983590386e-04,-1.142752615837134937e-02,-8.289889623392094177e-04,-5.543004656992529637e-03,-1.868716651336308756e-03,-2.711708616218025999e-03,4.635442032101392759e-03,2.745735848387017772e-03,3.437312387800757311e-03,4.271013885640602241e-03,-3.951764101777642708e-03,-2.676376001135842998e-03,-6.358675663407040721e-04,2.426033757055267616e-03,-8.933194583005173856e-03,-5.566541581708497714e-03,-1.305836588448629557e-03,4.037311847339718381e-03,-6.751734730870929437e-03,4.721774726430453747e-03,4.344611424670514858e-03,4.119962888058180503e-04,3.563940203920082459e-03,4.048006739088045799e-03,1.630878977580670162e-03,-3.919296918230465686e-03,5.823109836392917089e-04,4.593981366793752942e-03,-2.692758068100637066e-03,-3.675377235122607219e-03,8.271180602872196114e-03,1.208940249553943844e-03,-3.752525734172275102e-03,7.761495695422881806e-04,9.587088870540792784e-03,-1.622395345110013267e-03,-2.757032650731382705e-04,7.848890108084923684e-03,-8.019211958274347074e-04,-6.911446578289192483e-03,-7.518851204362380107e-04,6.197685207155897123e-04,-9.935714764030328591e-03,8.829035205957793894e-05,2.110447657943725647e-03,2.300702825475489568e-03,-1.855909563240710570e-03,-6.830441396616381044e-04,-2.691417301303170983e-03,-2.504202853104747079e-03,-3.984238044622387571e-03,4.466162109663832785e-03,-7.840017632069601859e-04,-5.627698081196356485e-03,7.998113871603271599e-03,-7.662891104143901636e-03,1.645278175063331905e-03,-4.789138544488304346e-03,3.851196388559762363e-03,-3.515918111428692483e-03,1.263954553321934744e-02,9.680908044989547682e-04,4.723230666028892764e-03,-4.121706936879678158e-03,3.708099979616085156e-03,4.589761393335423621e-03,4.013354147814802292e-03,3.637847551856687126e-03,4.643234648827653487e-03,1.607143147604082282e-03,1.728907359090608390e-03,-1.067417879282844990e-02,-1.256585748111948859e-03,-4.511391531105635178e-03,-9.292755576028556216e-03,5.320923231305718475e-04,-1.453958190494998885e-03,-9.008418298766584847e-04,-5.223133043547347181e-04,-1.539453217953967463e-03,-4.230000435355939052e-03,7.253310370387897445e-04,6.495462824278466036e-04,2.728822329407001347e-03,1.065946730045278044e-03,-8.847485311481940919e-04,-1.080588656567854286e-02,4.953934214250409196e-03,-4.216025730549366642e-03,2.076854728594681055e-03,-3.064941966174003496e-03,-2.840814824967962098e-03,-4.353482430081837863e-03,3.043630316495489398e-03,-8.344836902355411536e-04,-4.328590934530642323e-03,-4.659619475077968111e-04,2.080616190571182942e-03,1.002270716265330668e-03,-7.741147745967460851e-03,-4.916126559878137604e-03,7.020063222871286528e-04,9.166466161067145582e-03,1.737363375830807896e-03,3.338460154033481408e-04,-2.407039392608261580e-03,-5.079227575576011726e-03,-7.597062060987661014e-03,-4.349073040079740049e-03,3.210691701065341522e-03,-1.632029612920569452e-03,2.460873513731587359e-03,-3.206213450510948717e-03,2.191923589353146282e-03,-1.036982645725448467e-03,-1.199777649602545030e-04,7.293101563342042036e-03,-8.322454174069205193e-03,3.961178462228306940e-03,-7.571968020361734969e-03,-2.035944124377194776e-03,9.844698258956393472e-03,1.055765765720028358e-02,-1.666049666605338812e-03,-1.592962476844749084e-03,-6.887563709106693695e-03,8.593401579491864728e-03,2.349136867301928125e-03,-3.050797058578504518e-03,-2.539450679450579954e-03,4.873986413711589799e-04,6.838685458568391322e-03,7.269250597490231440e-03,5.950858421429880418e-03,1.368517199742585240e-04,1.891250923989489224e-03,6.494830783903081384e-03,2.188235857683378492e-03,-3.291902055741349666e-03,-3.458826354390122631e-03,4.052199926679041046e-03,-3.369765706690580013e-03,-2.479716846805033945e-03,-7.366395735582480751e-03,4.353672421983080477e-04,7.726865177512923927e-03,8.327078543192135279e-03,-3.906615234644927187e-03,3.653712875054581434e-05,-1.128586057396716892e-02,-6.160851561609938416e-03,-3.931436080744420657e-03,9.532378283768756094e-03,-4.909496042012609276e-04,1.614120557064966125e-03,2.382998725860736855e-04,-4.613508273597421517e-04,-1.479694469821714493e-02,1.073331195778107986e-03,-2.783642930090029100e-03,6.937011944179846592e-04,1.782675753387991807e-03,6.750380743693913796e-04,5.744256629591697888e-03,-2.709282982512265479e-03,6.594184630554780595e-03,-1.595088842847018672e-03,-1.747767709679262085e-03,-1.996501932325154931e-03,9.302515814720128950e-03,-2.454198481526663912e-03,-2.012113328795023161e-03,-3.447351083312696560e-03,2.201824289320905136e-03,7.653484026581212386e-03,9.687874136866267633e-04,-9.012594631060377422e-04,-1.469496812201562773e-03,2.822818699357795141e-03,-3.075822874060441404e-03,-2.049788521602610367e-03,3.630338524085251640e-03,6.556592960009622988e-04,1.394502421731437091e-04,5.952892557805138443e-03,-5.448085877673075790e-03,9.913602988157544163e-04,3.748468560993925818e-03,-7.138621630573607595e-03,-2.473222200411535204e-03,1.049760391415044533e-03,8.848361519048719703e-03,1.768833880999763630e-03,7.978375927698257847e-03,2.702753698017866177e-03,-1.478288349997330725e-03,-2.882883086329456317e-05,2.213138411293295773e-03,-8.360453054803101622e-03,-6.206400766362048411e-03,-1.406537538648817412e-03,1.819138105187932458e-03,-6.883381118151018531e-03,-3.135489597686792404e-03,1.325845105516970787e-03,-2.368648332098202995e-04,1.085022181615741483e-03,-3.307779060204401201e-03,-7.717280796885807315e-03,4.382750386608048020e-03,-2.705801977616672377e-03,2.493177419510035717e-03,1.486193092734803363e-03,2.867957967951604258e-03,-2.501304241147748238e-03,-5.434182141087573990e-03,2.603635533582702739e-03,1.021978285491480709e-03,-8.836804471775731515e-03,8.639386241682008374e-03,-1.038898460151371555e-03,-2.513607127838577131e-03,-5.921373227172052611e-03,-4.436516728676869435e-03,-5.752920501330944549e-03,-1.689877890105179453e-03,4.237833813621776601e-03,2.479156982658747062e-03,5.702559836302265126e-04,-2.408886308332622728e-03,-1.033948681835527954e-03,3.122424978341123338e-03,-7.041280251371961028e-03,4.547288470564740843e-03,-3.328557212929050910e-03,4.197386820141879755e-03,-1.616795752970899905e-03,2.733670909926599023e-03,-2.253746475303059600e-04,-9.719649930574860569e-03,-8.228795372908708590e-04,-4.604872603685782224e-03,-2.914893148484823393e-03,-8.949296950190078776e-04,-1.098056438660352692e-02,3.086461177035875955e-03,2.965887099033234688e-03,4.394101075936292927e-03,-2.624518995961261977e-03,-5.371649129366188810e-03,-1.498951111132640953e-03,-1.740210274034821049e-04,4.578868597653338972e-03,-1.803141738160850011e-03,4.769867462973387609e-03,1.184739581174657419e-03,-6.341101092986729890e-04,5.363136342387485669e-03,4.438183495115756612e-03,-5.168694170973957801e-03,-4.015771687350839017e-03,-2.171521765855263797e-03,-9.340458001329331877e-03,-1.712095103892957238e-03,-8.975598724130413181e-03,-5.101962460691778813e-03,-1.278595238527637087e-03,1.566718901567999223e-03,-8.349440381684214473e-04,5.714426026753085738e-03,9.678015837952560737e-04,-4.780389592837851307e-04,-6.089561622998861112e-03,3.878800958269866192e-03,2.069544948847499254e-03,-6.913373433449122515e-03,-6.391111678997654644e-03,-4.895714846352755903e-03,1.033037417701704926e-03,-2.170770751542540691e-03,-2.908808131540994383e-03,2.392204572356704771e-03,2.017983806166777968e-03,-5.754361839133477485e-03,-4.200046598718392386e-03,-1.146019278581414912e-02,2.910995757360637735e-03,-2.301849691366830881e-03,-1.365014222764684981e-02,-3.634985763238124910e-03,-5.562876038515228785e-03,6.675861406477755049e-04,3.106919274645705137e-03,-4.025975415892861162e-03,-1.445467329277273826e-03,-1.298355502045243004e-03,4.180805212126713008e-03,1.423682687442070832e-03,-4.283133358090430304e-03,-5.596754941560274418e-04,3.269175802840112999e-04,-3.474177425685685491e-05,2.862569986624551293e-03,4.417498761952913695e-03,6.160259824072696544e-04,-5.170156942228490947e-03,9.822858907284131788e-03,-6.746091284993069141e-03,-2.786286027945209529e-04,2.626454415089815239e-03,-1.670491468300085419e-03,5.160557682035826765e-03,-2.010964023885908510e-03,-3.636535756410885951e-03,-3.552206894292957206e-04,-4.526976756192337419e-04,-6.781472878078200622e-03,5.644637452491546928e-03,7.810350912735006257e-03,-1.783414533515638228e-03,9.317964380344213784e-04,-3.576210535720734940e-03,3.478682907638557496e-03,3.212497484909575957e-03,-4.335846335493214777e-03,5.632324652864980144e-04,2.593614396796880238e-03,6.436879731699263196e-03,3.777236003698835297e-03,1.864384290104782174e-03,-9.463953497893971487e-03,2.544743414372754343e-03,-1.298155432659183492e-03,-7.214244188510366904e-03,7.478462572110765837e-03,-1.186284905418353696e-03,-6.881057701334278298e-03,-5.861450441377560189e-03,1.439638260817171708e-03,-4.775680684850167924e-03,3.470455990594990370e-04,-2.748220439552334204e-03,2.670848572564897051e-04,-3.207779342873437554e-03,-7.065651957280654101e-03,-3.544777812881420853e-03,-8.444929081536729949e-04,-1.763478855313006582e-03,8.852984104251737391e-04,-2.182783069704290560e-04,-6.831942080042125191e-04,-6.271911714514151644e-04,-4.502638838756232650e-03,-2.193878450999818888e-03,-5.502841126677470232e-03,1.662669247981226998e-03,8.403405280361064518e-04,-8.210885917117119459e-03,2.980959686119897482e-03,-1.123141308007308443e-02,1.003409455906414645e-03,1.878838534146336833e-03,2.183114332135792715e-03,1.442072388442222153e-03,8.611230276881851747e-03,-6.047854808158726649e-03,9.694593670800520646e-03,-3.855670572136571470e-03,-4.674946844782170569e-03,4.865874828970179894e-03,3.379762927916423625e-04,-3.223034652701890421e-03,-1.584644126785460455e-03,5.121776977805542024e-04,8.113690719368795226e-03,5.739917606391054319e-03,1.953724882430432706e-05,-3.351185415949589565e-04,2.216199576499819043e-04,4.810991504544728618e-03,2.974745900219623231e-04,1.427345063218333349e-02,-3.090443032017481232e-04,-3.605322139831080306e-04,2.230411455666226608e-04,4.308199884159294793e-03,-2.468248806581785880e-03,-6.881797688745249462e-03,-3.502455785318694861e-03,-2.854511168420976932e-03,-5.812268083345098826e-03,1.295102379479752888e-02,-3.619355158175963528e-03,4.465681991917226298e-03 1.941675239402445425e-04,8.900301397446337345e-03,1.067851016070716676e-02,-1.866259269495406997e-03,-2.836017106702395322e-03,1.900920506061497131e-03,4.486434536644343628e-03,-2.055299207961746363e-03,1.341808169091738331e-03,-2.265600453184959603e-03,2.447747979906895327e-03,-1.098344848754988917e-02,-5.410607885900264175e-03,2.012110754719258991e-03,-5.096791016973106769e-03,2.427118037503586852e-03,1.064005352082926861e-02,-3.272094805483126943e-03,-1.155545526006956317e-03,3.239544219416364002e-03,3.529016228124865623e-03,5.078311293237272421e-03,-5.937150858818267783e-03,1.196499553966936516e-03,-2.281734799961135200e-03,-3.321085631179014966e-03,-6.780827383371349978e-03,-8.260471452955954344e-04,4.159903193684357132e-03,-4.309435517646019077e-06,-3.484905159091781363e-03,-5.818070432236267450e-03,-7.385631276996975844e-03,-5.414042215679168088e-03,2.634093442030755860e-03,-2.774048833376818388e-03,-4.001852931948042687e-04,1.248962860952663274e-03,-5.520647436609876779e-03,4.741487489390342445e-03,3.009339501264787688e-04,3.400853226923784139e-03,-8.833508192153224003e-04,7.215894340321854289e-04,7.171511614208599117e-03,2.107684566692294946e-03,3.014717541422897926e-03,-1.167222014012643007e-02,3.574439028333195167e-03,5.754589652327926109e-03,-6.639342581246463620e-03,6.992181382116242384e-03,1.539123064534192280e-02,3.790324097152255722e-03,-5.584061765306607555e-04,-4.748304463151773944e-03,4.525043075082020756e-03,-2.286361711314661895e-03,2.978244762184494672e-04,-6.777638323538768585e-03,8.747980670531957034e-03,6.272920575635979393e-03,2.298294219963261786e-03,3.470316955498293967e-03,-3.763636891244955766e-03,4.266850559453522554e-03,-2.484691011959129243e-03,1.003786077511856396e-02,-2.558136309844138978e-03,5.768556717333499229e-03,-6.024097433486973706e-03,-4.045812460609842127e-03,-6.350677555268559316e-03,2.769848998919926478e-03,-8.534870090535252862e-03,-2.946981796317857567e-03,1.666311264088522065e-03,7.760698212254054512e-03,-1.787788493334271228e-03,-3.085230960691750764e-04,-5.924041917218899472e-03,1.815942711648899392e-03,-3.958625824886942266e-05,-3.522641893888589417e-03,-5.218839049623958011e-03,-8.993649543270764574e-03,-2.658151583249030326e-03,4.518190994216523941e-04,-2.441230098277918752e-03,4.139677426355192931e-03,2.095798133310012180e-03,3.261740572295204667e-03,-9.309792769398328796e-03,-4.334528358154392247e-03,6.257689683236354085e-03,1.587966954162563313e-03,-6.733787783340739255e-03,3.327123680500889552e-03,-2.029539243000667674e-03,5.814394472588618051e-03,-7.662813209074575450e-03,1.846713371407754638e-03,7.534952774647373300e-03,4.801324083469800667e-04,1.115550171805223048e-02,-2.093826364508470506e-05,5.174271680075409434e-03,-3.900598569442918946e-03,-5.610457163521141623e-04,8.915863929572601402e-04,-5.525211291091423026e-03,-9.067534533772916849e-03,-6.019560795700159596e-03,4.394212300005315662e-05,2.137822553240176920e-03,5.962948804511701215e-03,-1.441757603159855936e-03,-6.741368281931911163e-03,-4.526695284577979196e-04,2.384827033439426691e-04,-1.140166139333875350e-03,-3.400306474941196689e-03,-3.389375168043651937e-03,8.421708852679723675e-04,-5.068682800979024128e-03,6.990160457245439898e-03,-3.023899472111697424e-03,-2.692757329972979546e-03,-2.893990086838799537e-03,-4.304589046102124013e-03,-7.797646192650440830e-03,-6.575515085734185035e-03,7.912202217247695435e-05,8.337847372142813754e-03,-3.061211945478045128e-03,-5.764885463982023783e-03,4.155672310812112170e-03,-8.940240935267233791e-03,8.146501308961519999e-03,-4.457582019663396643e-03,2.321290754299720933e-03,-5.261591572296615837e-03,-3.913840593239311869e-03,3.325783176378668306e-03,-1.467765000052403609e-03,-1.260219459426240550e-02,-7.690828753047170008e-03,1.522295321828245592e-03,5.401679481333428352e-03,-8.096290901537872911e-03,-5.883120116705923067e-03,4.638316859761852824e-03,-6.347730593203653078e-03,-6.957274625085311907e-04,4.173586765573211559e-03,-7.202882342133204441e-03,-1.369218938462036551e-03,8.204933385004064411e-03,1.208367053494623566e-03,-2.482120532460798529e-03,-1.721653290753479091e-03,-4.332384863606374668e-03,7.736789898016417730e-04,6.289784640810557048e-03,4.063143848589183341e-05,1.086460903441796404e-02,-1.259652661867959522e-03,-8.702931439499487706e-03,-3.300284122785506272e-03,-8.933178154084014163e-04,-7.452918930279825395e-04,1.318147877157670698e-03,2.939064202267644386e-03,2.865778509278407889e-03,3.047569603117847303e-03,9.180051086245293729e-03,3.826492267244143396e-03,8.197117267332250162e-04,5.118355413808855017e-03,-1.000549554619804826e-02,-1.245304824168365311e-03,5.103266235104809501e-04,1.225154335028037214e-03,-4.817846670748160767e-03,-1.458207962717772465e-03,4.387834722966752615e-03,1.236024473200412295e-03,-7.454680319117426915e-03,2.985901077299831473e-03,-6.491349550514128632e-03,4.901042104641038441e-03,-4.158081489752789317e-04,1.422973447256332079e-04,-6.204064304739207412e-03,-4.045270032693177274e-03,8.929211032375527005e-04,4.106779175227351364e-03,-4.301990982168202998e-03,3.946824948587041200e-03,-2.518235586452635330e-03,-4.193817497380398877e-03,-6.386274533484892491e-03,-2.687209346759286081e-03,6.102067305860088206e-03,1.777982651138540872e-03,4.445115038477219158e-03,7.906077466377692611e-03,1.629206370891796211e-05,1.900866100359571969e-03,1.394109744602092154e-04,4.407768021700593096e-04,-2.542320831576161920e-03,2.343653325837994836e-03,3.660592239103892774e-03,-3.761256936697087447e-04,-3.025029219460267926e-03,-4.063410344790886752e-03,-8.486038377493169971e-03,7.392574147774067135e-03,6.281010914371114480e-03,1.051916833294281976e-02,3.594976815278992915e-03,-5.812465204059201127e-03,-7.732609091873698199e-04,5.122733946574272064e-03,6.306651110084361689e-03,-8.746431835513265168e-03,3.126318732023553211e-03,-2.409088925283472873e-03,3.189850439029368566e-03,-5.592889331875330146e-04,-1.005573410342665459e-02,-3.491805267676367786e-03,-7.560939526554700044e-03,4.249923789583178423e-03,-5.619163441189593475e-03,-7.955125280046573127e-03,1.419318197453164855e-03,8.123431052060666943e-03,3.415677577642902904e-03,-5.811829947828784336e-03,1.894237296612756713e-03,-2.615986054520749076e-04,1.848133157349947592e-03,-9.721287395450853817e-04,-6.958838995188601842e-03,7.645775930592588719e-03,2.083623958537992194e-03,2.427377841205144698e-03,-2.235798687770864778e-03,-3.850467900785756094e-03,4.731464070668913759e-03,5.947978023491831343e-03,1.894197951877514335e-03,-8.168632261139945061e-03,-5.277429968552325296e-03,-1.193463141715780246e-02,-5.826803705623579722e-03,3.140877731985470753e-03,1.768136785269209480e-03,-5.595986936607027021e-03,-1.015943987540734788e-03,4.487301583106872646e-03,1.630038935878756323e-03,3.258070866214255335e-03,8.741171455887757069e-03,7.622975506525701307e-03,-4.552109704634585419e-03,-2.951295828960062966e-04,1.039225271317612692e-02,5.073933376203609152e-03,-2.038202956074135468e-03,1.001210853725517484e-02,-1.302075133370556113e-03,-1.213410071496225659e-03,3.563625311281624127e-03,-5.090189534259179603e-03,6.855052602532166021e-03,-7.035815280454073978e-03,3.394786029152214828e-04,-3.681878840987271332e-04,-4.759876054297652002e-03,-1.838036315644653567e-03,3.202224841436153869e-03,-6.153324512046134606e-03,-3.524772956724764317e-03,-7.978404258579574774e-04,-4.037675784742500266e-03,5.095058402534205153e-03,-4.794423402265108290e-03,-5.607940837970413388e-04,2.456876213172401804e-03,1.045854931174182391e-03,2.287008238246913613e-03,2.661318582061916217e-03,-1.168919038194325341e-02,1.721880411026933911e-03,2.377784370248999402e-03,-1.528575723348313872e-04,-5.036102602226711841e-03,1.167706126132228702e-03,9.445125067144689510e-03,4.849653833564633761e-03,1.491092990611233952e-03,-2.098985812555193963e-03,-4.022152689979309123e-03,-1.542701607609835114e-03,2.908098500337710952e-03,-4.911203346660563372e-03,4.282852649397908735e-05,-1.067350449288869893e-02,-6.130041455208203732e-03,5.553287374403869520e-03,-6.329216842743043674e-04,-4.720365524471919474e-04,-2.549936883178195116e-03,-1.595875145269358246e-03,1.009299050177899643e-02,-2.827717479544972733e-03,-2.353602193431906121e-03,8.076307166970218701e-04,1.267586859363078978e-03,1.308187528817211797e-04,1.407800134918811650e-03,-1.142882352625689078e-03,7.946335566998996916e-03,2.575377804585540342e-03,-1.759315384804609520e-04,1.717067996782579292e-03,-9.361402111325836575e-04,3.346133658627855213e-03,3.970659349789708997e-03,-4.275035079755159888e-03,-3.746428341627264236e-03,5.364458300508684677e-03,-5.363657059204472027e-03,2.017374743724593793e-03,7.174121330139262166e-04,2.211672167934944627e-03,-8.225579717865510315e-03,-3.059830747147475327e-03,-6.582877775546486405e-03,-3.806737718556658327e-03,-7.528244060797968130e-03,-2.238525611203398980e-03,-7.039058250786793189e-03,1.902547029426611901e-04,2.164989941882840795e-03,9.557851966521488949e-04,3.668845991906362607e-03,2.843160271432327961e-03,4.704435593854209598e-03,-5.379808487174933887e-03,-5.911707905598442142e-03,3.025490685107547553e-03,1.085428597168460949e-02,-1.455651783860511519e-03,-2.987187034272595267e-03,9.326992194727608265e-03,5.008949819076401709e-03,9.031746497805382204e-03,-8.258835413068470882e-03,-6.685565403060464429e-03,-3.803793069681972189e-03,1.037643308677681491e-03,-3.302710198920465971e-03,-8.099725528679496342e-03,-6.048794617286756131e-03,3.986496633262909088e-03,8.372054830112727339e-04,-7.192964107238240465e-03,-2.169616136541687324e-03,-3.825092329967127395e-03,2.366187661824693204e-03,-2.452761709197957330e-04,-9.866349876800519295e-03,6.448622663385831719e-03,-1.411996988000965110e-03,-4.056830904562562534e-04,3.845650584152223057e-03,-6.058055464947535024e-03,7.376246646283467212e-03,-9.528045201468516093e-03,3.584565761234215914e-03,-5.875645859080540732e-03,-5.515167253287789687e-03,8.034028756652284581e-03,5.793035302705456385e-03,2.504579391689202173e-03,9.956677945095996837e-03,-2.099728571862532513e-03,-4.136365053542908309e-03,3.635424143485368499e-03,-3.615024032958671918e-03,-1.144500461963873263e-02,-2.876261360568871384e-04,1.787216879639961457e-03,1.568863855050154580e-03,6.172436750128352557e-03,-6.112012917867056666e-03 2.234727389488501068e-03,-2.708580868000551572e-03,8.496372768950536357e-04,-1.896430294253084953e-03,-2.379475830705444789e-03,-1.020775312003331485e-02,-1.404509946189280429e-04,-7.317630072062775333e-04,6.280662140790927751e-03,7.671941150642752442e-03,2.085052095592687844e-03,-4.825035850704455752e-03,2.013007486162120036e-04,2.290156393308827570e-03,-1.613868695379772067e-03,7.623656605794958173e-04,-3.656841220942113214e-03,2.696274953325575405e-03,-2.361782526440022855e-03,-1.588675800940963839e-03,4.324289438204948280e-03,3.274285039291304628e-04,6.832545093503820770e-04,-1.212276592821812278e-02,1.171130554590400080e-02,-4.324701132595971768e-03,-1.290474790206271299e-03,-3.867345090695094344e-04,-2.501003036798783979e-03,-1.350984214405446908e-03,4.321000418947664780e-05,5.673460119094612605e-03,4.609732896431054452e-03,-2.662319168550776431e-03,9.834733559517720800e-04,4.757768272213224077e-03,3.855862136130915399e-03,-1.279019899022829651e-03,1.398057605016456715e-03,-4.641385930145738685e-03,5.247406871632473262e-03,-3.211879950323521214e-03,-8.725420179945923838e-04,-3.915185138440749162e-03,-3.830264220771672102e-03,-4.209635909577945281e-04,2.523154899108694940e-04,7.256550304587734875e-03,2.863099053056669327e-03,7.661156468633987214e-03,2.276775423187111635e-03,-3.666511276797619883e-03,2.782474661223755062e-03,7.065502902748364324e-03,-4.617905291069284400e-03,-4.119043957740591011e-03,-3.490837145429498412e-03,2.420396158057113410e-03,6.677624098066070948e-03,2.815891379054777637e-03,-6.236798574530597571e-03,3.920228864003776070e-03,-5.229026422796532493e-03,-2.939922551820448472e-03,4.834711656740363697e-03,3.742791012556735686e-03,-6.229659331495788306e-04,2.401144204036858611e-03,3.392812695274501859e-03,-1.233365649096431621e-02,1.120018597344071484e-02,9.056505454717625817e-03,1.497170333872964790e-03,5.583296489358422453e-03,3.167296290246983566e-03,-6.519149405390687289e-03,-6.873522194353806451e-03,5.185153993294069029e-03,5.217409557849429953e-03,-2.062367735209076191e-03,3.767979801147914795e-03,-1.226639132583028612e-03,4.499538437276535269e-03,-5.829758959558504691e-03,-2.115434947625963614e-03,-3.447960093852744500e-03,-4.959285806703248381e-03,1.579148502959089144e-03,7.425859695079455730e-03,7.684763164044411572e-03,-1.295829365291371882e-03,-3.486003592161333528e-03,-4.736282543049214616e-03,5.430546893183499889e-03,-2.243434277004536376e-03,-7.399403615721845615e-03,-7.414237228157334776e-03,3.504158554843862534e-03,-1.734544062565997495e-03,6.001793007037949275e-03,7.100755087602590807e-04,-2.236303459210146286e-03,-5.294739913192736402e-03,2.287588852716901551e-03,-4.080410860449021522e-04,3.495414266931054156e-03,-3.995218108657465871e-03,2.520575604092883514e-03,1.474091642450659585e-03,-6.970941997786720030e-04,3.179015050538510755e-03,3.748026551473239583e-04,7.527895793476162972e-03,6.453165837075856837e-04,4.989622952133340034e-03,-1.183893874071611845e-03,-1.386784899287549569e-02,-1.112661848133209095e-03,-1.065668523016895936e-03,-3.026210603101879299e-03,-4.675158225918957591e-03,-7.814984701772748716e-04,4.465007043334977971e-03,4.413131513028052157e-03,3.936122590493377517e-03,3.044286303557333130e-03,-2.081812437977434860e-03,-8.476398969290060981e-03,-1.419678532850680418e-03,7.164831085191839454e-04,-1.325395249940943638e-03,-4.908817563226486932e-04,-9.683472773322031454e-04,-4.509725760228079292e-03,1.009960590748662165e-02,7.664638662726428950e-03,2.201655952978650698e-03,-5.310735766069663022e-03,3.228186750301195382e-03,1.672155197760812702e-04,-1.062618633722574833e-03,-9.226187072417746339e-04,2.986837554897972188e-03,2.233733673904274731e-03,-6.458419925869663021e-03,-4.508657933307437635e-04,-1.760383358503676528e-03,-4.060856950901986333e-03,-2.198145720336421461e-03,-6.207078971335400555e-03,-5.065849507739320946e-03,-6.571524758548101313e-04,4.226117489688731993e-03,8.259147051001641815e-03,-6.874742892049471275e-03,3.237343490176531040e-04,5.151739800538349817e-03,4.786202882223278623e-03,6.209186672193998599e-03,-4.951640658996719381e-03,-1.146549328377523963e-02,1.053105471200238737e-02,1.266728399351664102e-03,2.885867305731501173e-04,-2.877329854704514713e-03,-3.124620449632825592e-03,2.941162553623715217e-03,-7.199586023697563016e-03,-3.259003811118503426e-03,2.348896817764711473e-03,9.527166875270774368e-03,3.587602352233843720e-03,6.479414764330860069e-03,-6.395362293826106813e-04,1.305913043312234784e-03,-3.674932032930795003e-05,5.834918648696605682e-03,9.289968069990622412e-05,-4.468597122405096843e-03,5.517529287545758221e-04,-4.203630997165439497e-03,3.257639590287749343e-03,-1.054171619081654011e-03,-1.464772988410456382e-03,1.003673654825916863e-03,-1.907367247643565250e-03,4.526434507144895709e-03,5.001902178229593321e-03,1.394828302335902707e-03,-3.251956562182924636e-03,1.432413343324547667e-04,-2.224411393308086109e-03,-1.649528508659847898e-03,4.847883891232000406e-03,-2.093335547180814494e-05,-2.319381712863874834e-03,1.368500104687797418e-03,-6.716203030264704388e-03,2.979392097800603507e-06,6.178134689117614591e-04,5.739891860228425388e-03,8.859910768121698546e-03,4.893982609763596232e-03,-1.415682256680656035e-03,-2.507975481357895345e-03,-2.027383137559050311e-03,-5.418911555926170763e-03,-6.732784480254235156e-03,2.876501078414046581e-03,-7.891294150138729660e-03,5.563726369381888533e-03,-2.410628682469676421e-03,-1.642220280468653952e-04,7.291385789405384864e-03,-1.065091993667068981e-03,3.484547839934416021e-03,1.729838145921725259e-03,4.582265528943978324e-03,2.952897937115046053e-03,-3.614464604414770361e-03,3.680551233276241861e-03,-4.627415547375249251e-04,5.435740625762379083e-03,8.473994349132209572e-03,4.824217813500016050e-03,4.533355566318437085e-05,-6.504535985703125116e-04,-1.676355448749660944e-03,-4.489427168325335702e-04,2.360281713546794665e-03,-4.115115395834305356e-03,2.841302765112661138e-03,-5.747891999082743167e-04,2.892018019299954969e-03,-4.021104349465136688e-03,4.529663949930548897e-03,5.220802323441316346e-03,4.797257146359160986e-03,-5.038730564629874051e-03,1.319134620376315441e-03,8.560164639387680269e-03,7.117219197504849430e-03,1.742096821574569496e-03,-2.209595042072180337e-03,-9.388553180567475037e-03,-2.087962596380102051e-03,-2.834888862262900221e-03,5.743094964771607025e-03,1.294343694391712242e-02,5.664613549731778962e-04,6.000022868067597659e-03,-9.122327008578689361e-03,-5.105937957724320485e-04,1.717491526459901090e-03,2.227655696387127477e-03,-5.992398963743023662e-04,1.555159989597347352e-02,9.626772483520461970e-03,6.263434106881650247e-03,6.911204116389678062e-03,-4.461846467727296345e-03,-7.717655640029537530e-03,-6.516389710693007951e-03,1.779352355453121034e-03,4.294800922293196335e-03,3.724368738446069520e-03,5.289640831960586327e-03,-1.356912483799854177e-03,-4.039439260598589286e-03,5.470882455961733476e-03,8.679413026964616763e-05,9.110680616994382142e-04,-8.656196684792001432e-04,3.269140015041923242e-03,-7.818039261833406237e-03,-4.857714836900784740e-03,-2.846333433695487879e-03,5.192646643162014512e-03,-8.321988259861514900e-03,1.248402329331076968e-03,6.815143131345321190e-04,-5.784572240902552225e-03,-1.626791281100936192e-03,1.848348514525104526e-04,-2.476574030499134518e-03,-4.139918490724901604e-04,3.249029708701220605e-03,1.342007285155471727e-02,8.051223120845002988e-04,6.436412240996205493e-03,-2.603094512009250343e-03,1.887047821515331876e-03,7.095878870108486905e-04,-7.658541568916888255e-04,-2.362397304579122795e-03,3.375234176821552152e-03,2.690359860467127044e-03,6.309429990812025386e-03,3.375874914066547341e-03,3.015435187380021125e-03,3.980991482639432456e-03,1.406689059210864540e-02,-1.541156449463299818e-04,-4.495087653610999566e-03,-3.266036699987695784e-03,3.041108558689818633e-03,5.142529659064033233e-03,-1.524305581457270024e-03,5.666537131763678230e-03,2.370926006656785996e-03,1.015201093956654733e-02,-2.748455788509244786e-03,-3.590729113045465753e-03,-4.307631093124399818e-03,-9.365035390134663143e-03,7.992457985364973113e-04,-3.323011847815123508e-03,1.364874786614342361e-02,3.860797088124324691e-03,-5.127157384930022127e-03,4.549845501517003551e-03,-2.539657203796688145e-03,-2.254432190636171569e-03,4.529156434781370956e-03,2.855774335451501046e-04,2.002826443732445404e-03,3.510515298626145156e-03,-8.301032348527992780e-03,-1.928116452906827611e-03,2.749905934569258324e-03,-6.323474841274254188e-03,4.787410895749902827e-03,-5.048751115675546158e-03,3.199656217029426489e-03,2.963057845304670722e-03,5.610482944898667959e-03,9.184171121692561932e-03,5.020925817549770805e-03,3.686574707616757911e-03,-3.040707770379752744e-03,-6.492578490502003532e-04,1.284695018493688459e-02,3.672698799883435377e-03,-1.675170063648400223e-02,-5.656934613661718410e-03,3.233852776513899247e-04,2.294572897368902641e-03,-2.892801484492872870e-03,9.876963915432183938e-03,-1.961206994599517389e-03,8.614697184564661016e-04,3.087065431958980685e-03,2.348338163121794879e-03,-2.503396252653385047e-04,2.774037362790249695e-03,3.040826497673866473e-03,7.782944358921170536e-03,-2.716388507548695048e-03,6.096987508061692436e-03,-1.965800935885264004e-03,-1.435897413524763041e-03,2.382239777174840079e-03,4.602624387467104013e-03,1.734123109158376089e-03,4.682755337350490675e-03,4.655396772559688111e-03,5.506750109219545226e-03,1.562197478059577579e-03,4.138610023104051690e-03,1.130133443689650954e-03,3.347093562638122060e-03,1.886168734172436520e-04,-1.343977646015950475e-03,-1.960639170312383252e-03,3.145534779644045247e-03,-8.784547138933321633e-03,4.526353525584692408e-03,-5.220949784406815446e-03,2.555704164802373359e-03,6.101658282756909505e-04,-6.358086145086130382e-03,-6.036854458210486399e-03,-6.922049856497240750e-03,-1.130629753181144719e-02,-7.604801729029707705e-03,-1.091885246056117659e-03,1.964756182450749615e-03,-7.682312509733131740e-03,1.797992863484336543e-03,-6.666724966273348402e-03,5.588486208286818496e-03,-6.254880597998322724e-03,1.802881418384495987e-03,6.562306428974498673e-03,5.771190490922057173e-03,8.071718061293693940e-04,-2.028435888552365131e-04,-1.152799413295485520e-02,-5.310737958460157865e-03,-1.039705446192341377e-02 -1.177328930777101976e-02,2.216303341590577201e-03,8.087553775318891483e-03,-1.489827508292922717e-03,1.989926144161476528e-03,-9.961296584183985600e-03,-3.426278229661006407e-03,-1.381478693487150954e-03,-7.928755831631061762e-03,-1.401080436450079930e-02,-4.021492777016840402e-03,1.306782571384624550e-03,5.302207371760917810e-03,8.752939203693934495e-03,1.185512030674269876e-03,1.446266474952386319e-03,1.822210817674350643e-03,1.585309681672042795e-02,8.145416430556480167e-03,8.669944932301948057e-03,-7.434244670726687524e-04,6.426917844922904952e-03,-4.417405756505669097e-04,-6.042171613876347151e-03,3.749988364127617921e-03,-3.259835216136174919e-03,8.615475091152968029e-03,8.329641102795361240e-03,2.330524820894614921e-03,-1.497751064027531831e-03,-2.839805923065991198e-03,1.055244996895342638e-03,2.677379847133401752e-03,-2.088525264059784819e-03,-1.032444283687466123e-02,5.268093564320665452e-03,-4.396971052162100546e-03,4.177670020857926145e-03,-3.669757217166476695e-03,5.862640082144145344e-03,-3.409608881986976245e-03,3.596183613343457378e-03,-5.471949781194702716e-04,-1.658308950637261666e-03,1.179104082627074499e-03,-2.416894999757437587e-03,-1.580354217919970440e-03,5.785401485280308037e-03,1.522366032338983962e-03,-2.987195382565661222e-03,-9.110054946410775512e-03,-4.674090541663602529e-03,4.953009505394247690e-03,1.521676247906129887e-03,-1.486398744608014022e-03,2.329335163234104271e-03,4.603868850101521838e-03,2.664137386242180182e-03,-9.271394909374985546e-03,4.434833532162319207e-04,9.001364699093785879e-03,3.649212347171776989e-04,-9.617863022819008552e-04,-1.925582934069537113e-03,1.721112605683548899e-04,-1.992623029636125375e-03,-3.044960338501297506e-03,9.164575665831385578e-04,-3.567097910884329311e-03,1.273890225856913125e-02,5.229015428282915261e-03,1.032441028930094692e-02,-3.682951618903401031e-03,-5.301338953410850326e-03,-6.947906243037047305e-03,2.743773109281792515e-03,-1.992348858731720403e-03,-3.933374821537469028e-03,-6.402419880756202684e-03,-1.270437226458205789e-02,-2.179673676245353579e-03,1.225359915471324699e-03,-5.035935031115149584e-03,-5.615048552918424675e-03,2.305253373913958553e-03,4.064758603411445723e-03,-7.534075708862607822e-03,4.558693378626391471e-03,9.471817274736124906e-03,3.032724561644937252e-03,-4.766725996221482346e-03,3.493283802339167132e-03,-4.419225379970210393e-03,-4.329328047181031126e-03,-3.978525907813539736e-03,4.739180751886566852e-03,-3.663736569941537010e-03,1.504224006050084014e-03,3.382708020351779185e-03,-2.710390248772084985e-03,-1.208975270037751645e-03,5.184816705091989213e-03,-8.015932128982689031e-03,-5.213606917252014764e-03,9.347151260170013728e-03,-6.220756017775763745e-04,1.743746336597975198e-03,1.936065755542035888e-03,1.240830102389661911e-03,-2.415545602618352199e-03,8.535155250204892394e-03,1.067167857378356188e-03,9.213935764727230679e-03,4.300111310344524075e-03,1.958586458730978558e-03,-4.631982398865503760e-03,3.731759788462554652e-03,5.981689304394266840e-03,-2.714033658750284166e-03,-1.097334203111042031e-03,8.056727382979888327e-04,7.831377343855264184e-03,-8.548355768058578212e-04,-6.136085648867035079e-03,8.456552091549612068e-04,5.217012542077078575e-03,-3.740566526229524764e-04,-2.035027748801947547e-03,-9.561668013426730811e-03,-1.515632014538109522e-02,-1.107840134692464860e-02,-2.042805947613991150e-03,-3.653532916769353274e-03,-8.386650455494210543e-04,-6.866580859063379193e-03,-2.783107661209904479e-03,2.824546498506293256e-04,-7.486194598481248769e-03,1.590743036951180328e-04,-9.148500023863550407e-03,3.174515752052101299e-03,-3.709420520857919243e-04,-3.492351897181363365e-03,4.304098125215059800e-03,-3.095674725875156071e-03,2.878443701748076809e-03,8.147324678058697573e-04,1.145880860434128796e-02,1.031536989524846522e-02,-5.544072006638890542e-03,2.002939407652185467e-03,-8.531996936092838685e-03,-7.845301533489189294e-03,3.168926212525560379e-03,3.189555121832527850e-04,3.175514369241718197e-04,-8.890766161154549374e-03,3.784778225027026887e-03,-3.469015031825248372e-03,-6.717402277520735480e-03,-1.045701381689352442e-03,7.535344115875931489e-03,4.761319026082430454e-03,1.942329968163884583e-03,8.394994838955126965e-03,-4.984970912138927783e-03,8.194273364092433468e-03,-7.812186321999041623e-04,1.490732706403403692e-03,-1.273307556603854368e-03,-8.161117663307057568e-03,-3.886008272845651278e-03,4.020829497855214589e-03,-3.211761100504841905e-04,-5.071560781162959101e-04,7.999720903025243218e-03,1.806313196291790260e-04,-6.726333079975402307e-03,-3.811736544849703579e-03,-6.043879814836703585e-03,-2.138874876729127041e-03,-1.635665169851863816e-03,-9.024774571442066573e-03,6.683050652522465156e-03,-3.027800757631382740e-03,3.970795710555370672e-03,1.906137491211005325e-04,-5.190596406829893191e-05,-5.983624848712500340e-03,-8.133165390390151989e-03,2.490197973589848048e-03,-7.333684282594552829e-03,-1.847150407127336712e-03,-7.540893589514386861e-03,-2.625000704107722377e-03,4.012537404862553253e-03,-3.219553961979804407e-03,7.856551480263780597e-04,7.313763085120293661e-03,-5.497755539434319915e-04,-1.403055184970967637e-02,8.159280915863679909e-04,7.957778073972060669e-04,1.698950857292718872e-03,7.128897922105471331e-03,9.249477210836600655e-03,-2.644914413650643198e-03,-5.245074624629327172e-03,-5.760060536752136667e-03,-7.863529227710224986e-03,-6.057400782217579882e-03,4.030770127533895263e-03,-4.731513005541060357e-03,1.972012317187107726e-03,2.696615060557305127e-03,-1.335903099293072943e-03,-5.590259531408342877e-03,-2.461562998037402326e-03,-4.270534300468318070e-03,-2.465356546393068088e-03,-2.285438469303723800e-03,-2.558386144727340522e-03,-3.251925065509995912e-05,9.259943054301441462e-03,-3.003614805704632722e-03,-9.113297495365419928e-03,-4.728734884807775721e-03,-5.513331367201461516e-04,-8.552902359194520729e-03,3.590694741549051508e-03,1.425120983584055555e-03,8.995359811429231179e-04,-2.747006650711072018e-03,7.075983558334245039e-04,-3.178180106624469738e-03,7.998391759908136077e-04,1.911053866373052090e-03,-1.212870810111506070e-04,-1.732825340801366848e-03,-4.740233753958175353e-03,8.664505524783434887e-03,-1.879829003277062828e-03,-4.478743140842796934e-03,-5.582613684828366916e-03,1.002709393738372918e-03,6.911224534705938264e-04,-4.912841145968256835e-03,-4.715992382272186575e-03,1.091105549179957271e-04,5.131315469712792975e-03,3.353106290468079424e-05,8.330970498378805281e-03,-1.536856634820705191e-03,-6.297532689731709897e-03,-3.540034413833873482e-03,1.024827807426813200e-03,-1.551705426580884993e-03,-8.792704234876170741e-03,4.038129826161222877e-04,-3.416291600316667589e-03,-2.101883825654659471e-04,3.129940759629500253e-03,-3.978749260841235071e-03,4.247501810680837445e-03,5.188792313071740204e-04,-3.348375017583932363e-03,-4.371668197189535962e-03,1.184427730994535568e-03,1.143834750147839285e-03,1.719182793876506505e-03,3.595334254403544615e-03,6.916417813956255919e-03,2.672123426728947957e-03,-3.469115890813807748e-03,4.696358301414819622e-03,-2.022321703777109174e-03,1.184420731884656660e-03,-8.069039571081976919e-04,-8.283613652303038408e-03,-4.449669504601664986e-03,3.636547738314417806e-03,1.974622520479622008e-04,1.283782463668073533e-02,-1.789411507236562081e-03,-4.600911824839569973e-03,-2.232945218577157744e-03,5.820197360834567998e-03,1.558176742822998517e-03,1.004060414582199495e-03,-3.997368288826125618e-03,-4.250172277570532509e-03,9.110339343005448114e-04,-9.097865484682467890e-03,1.636905774112813695e-03,-9.743420028516171661e-04,-1.052607400838997614e-05,-2.019231899216527865e-03,5.075001258085099290e-03,-8.543065372214003403e-03,3.818314644427727063e-03,-7.190036432237755263e-04,-1.850742027146761023e-03,-3.639088008695849771e-04,-5.358894523639938165e-03,-6.233948250790710688e-03,6.707611666514127352e-03,5.444659268353508816e-03,7.363731286981180235e-03,-1.848877082226557450e-03,-7.439965066970049071e-03,3.833942645758972242e-03,2.806830665052890538e-03,-9.740110022313227395e-03,2.696577249440667689e-04,-4.509967427096989862e-03,-3.200900874481499379e-03,7.411710266932466755e-03,2.484641229198354413e-03,7.533109015736112572e-03,-6.128112739954397030e-03,-3.556516953718045311e-05,-1.092711230307986947e-02,6.589146332522907365e-03,1.143564734602667233e-03,-6.258766181356988263e-03,-2.069726400309035950e-03,4.758292159338324556e-03,-1.356029819775723348e-03,-8.979671041135094296e-04,-1.188877679612322658e-02,9.270438426668834581e-03,9.559358397278549038e-03,-8.323413653985357614e-04,-8.266894330589491871e-03,-4.142869488304314063e-03,2.558639830804054164e-03,-2.474814373425992262e-04,2.885282336159556652e-03,-4.858870767056864583e-03,3.932080252990538365e-03,3.193070467392886656e-03,1.114570589679246201e-02,-1.987463768360982121e-03,-1.538288213605847271e-03,5.629605798526624363e-03,4.328506069979990746e-03,-1.504035940484147512e-04,-3.899328536347227130e-03,4.191798598141623956e-03,-7.890631142248834620e-03,2.086809328631897536e-03,5.913154630578225379e-03,2.920136815220915338e-03,-4.069508542506202614e-03,-5.198994089146300769e-03,3.134310854338672250e-03,5.299069634253717256e-03,-4.575873696558123947e-03,-5.768289982625374526e-03,-3.041422334558601098e-03,9.885955717498166873e-03,9.353478696628651605e-04,-1.410829263376535371e-03,4.980189923073193720e-03,3.772920378376049596e-03,-4.440312309052440835e-03,1.253167914951755545e-03,-2.934910318385211200e-03,3.461086764807132492e-03,-6.661192313045405378e-03,-1.378750593359714212e-03,8.167931626158879757e-03,2.898208100895398581e-03,-1.780904297870313305e-03,9.168885674654693105e-03,3.425856331031289746e-03,5.711464699196340446e-04,5.081448806612480495e-03,4.509648580392966975e-03,3.376319517314628656e-04,-6.729047185005696680e-03,-5.077095823767845822e-03,4.410490849717806083e-03,4.143603022183207571e-03,1.935940314941237512e-03,-6.630944715324615241e-03,-1.057966109004038202e-02,-4.743581315652742195e-04,-1.000805311135575040e-02,4.555744530702076495e-03,1.070521817330910260e-03,7.105451582816766677e-03,-2.768456264245354632e-03,2.042084542591022893e-03,1.796537932018565367e-03,9.353191936784981467e-03,-4.089080420738236063e-03,7.095549936705302339e-03,-2.898932604689864918e-03,9.229959999795261591e-03 -6.748561794300185664e-03,8.119479491984485028e-04,-2.913613853103964434e-03,-2.979835473358418124e-03,3.066676257258514355e-03,2.079496119004196810e-03,3.668827143897603407e-03,-7.424245547142155691e-03,1.868360246955596280e-03,-2.089118554940338959e-03,9.270462044714920932e-04,-4.305856164333352748e-03,2.924231321829512918e-03,-3.835423453427049510e-03,2.475050282885827950e-03,9.947149346737986161e-03,8.864449496310397233e-04,1.417204345859648029e-03,-4.276758300563583866e-03,-5.743867416573201004e-03,-3.343115620680084821e-03,1.074593188390029043e-03,3.731317272839331910e-03,2.332901773927271299e-03,-2.697540380459352398e-03,-5.141000182547571414e-03,2.307144568983713565e-03,2.523097405979108591e-03,2.579153549180679059e-03,-4.245465178588061356e-03,-3.729696120519619142e-04,-1.661315658761715547e-03,-6.329130294041366428e-03,1.812639498779183107e-03,-7.875791985535162418e-04,-9.858140294933741103e-04,1.561681530646255546e-03,-4.195899428361117523e-04,7.387432704112566195e-03,-4.702735512174578345e-03,2.825245245962511743e-03,8.249685676569506673e-03,7.964593162326482831e-03,3.618308432492189290e-04,-4.806742618763144154e-03,5.356787896579061868e-03,3.201189487000073632e-03,2.783222919879292222e-03,4.223655271166616709e-03,6.442059471952319870e-04,4.063379465108559081e-03,2.180285360832553629e-03,4.560612692541108140e-03,-5.717687513403202706e-03,-6.142144816832874332e-03,-1.088198937809946526e-03,-6.998170319824818277e-03,-6.596353471994776708e-03,1.665577454432630531e-03,4.084888064204168952e-03,9.536983813007492237e-04,-4.465972725341229264e-03,-4.604882701493830284e-03,3.732596872442439824e-03,4.954850582895816910e-03,-4.763726016212648656e-03,5.420307037575270229e-03,-2.080844177441993053e-03,6.783823013215443086e-03,1.362830042835813931e-04,-1.491416215937722096e-03,7.698550109311423379e-04,-2.181365771691663909e-03,-6.013144509040233574e-04,-6.817430507520989835e-03,1.445431033959137265e-04,4.369310367506737229e-03,1.688583456445764226e-03,-1.353573592838481815e-04,-9.675122383150778008e-03,9.600924862656247047e-03,-5.931991320299693192e-03,-5.924863184136062132e-03,-3.978079516509878955e-03,8.218612242866801462e-03,5.353306661863125454e-03,-5.079404802115510334e-03,-6.342652335370251374e-03,-5.160700949119083510e-03,7.178731324110699911e-03,5.975217435899492398e-03,9.176883759919877287e-03,-1.243874129227694149e-03,3.886303105944686424e-03,3.648409121493027471e-03,-4.845137219551764150e-03,7.850995225035312802e-03,-1.334872055954396250e-03,-5.450041605081339287e-03,1.774300473029694764e-03,1.739411210365142792e-03,6.050875526617915771e-03,-8.924659750113339216e-03,5.677027096572856771e-03,9.805998695678417448e-03,3.535684337572431255e-03,-4.620558343135947911e-03,-8.829624057842657206e-03,-7.854710168926212419e-04,-3.024936898246261430e-03,-1.297869024263902625e-03,-1.635960210922212527e-03,8.321748571817553716e-03,7.899300427565925098e-03,-5.668819562546639768e-03,-1.665714273261919773e-02,3.026377045017049350e-03,1.556194388053656484e-04,9.607020890753812043e-04,-6.619576670588797444e-03,1.478068984323489387e-03,6.063706793999618962e-03,-7.394651527926417341e-03,-4.082497345744028115e-05,-2.157529821866224426e-03,6.698789330479300958e-04,3.318053746100207285e-03,-4.916284691365139549e-03,1.624941041440398394e-03,1.680593051302898102e-03,5.477119978045582802e-03,-9.064696800873038585e-03,1.054419719975513035e-03,-7.774392733699795341e-04,7.711887411925585391e-03,1.571688127433116915e-03,3.615022474029254938e-03,-7.647069014118672205e-04,-4.323815181460312872e-03,5.608481451116704118e-03,-3.846375009907442022e-03,-5.829088407198995507e-03,1.797089326608823081e-03,1.928728236687829705e-03,-2.225520099551125518e-03,2.100140518956902097e-03,-1.717332573060346155e-03,-5.684805062078229082e-03,7.263813617860264945e-04,2.853107499782162829e-03,-1.078429365908930215e-03,2.425045047058981083e-04,-1.116301961599054475e-02,1.250436895603249717e-03,6.503090302873183997e-04,2.668811890898248936e-03,4.573281329829090368e-04,1.094592224916061604e-03,8.215050772963181092e-03,-2.485813600259732849e-05,-1.076787830143556036e-02,5.997521968633527135e-03,2.324059045461403152e-03,-4.102488987720085897e-03,-4.917354586430130667e-03,-7.128066169298793477e-03,3.265010760100830415e-03,4.860477209611654623e-03,3.405065444340871477e-03,-8.727386563767761812e-03,5.610329513141915081e-03,1.639497555500214580e-03,-2.061528196093556729e-03,-6.024718741410105109e-05,2.334652930005285017e-03,2.683121286077970349e-04,-7.458197362975994490e-03,-7.746018363472957809e-03,1.625968975054743314e-03,2.330427409924421163e-03,3.316933496690899989e-03,6.001401679917164195e-03,8.270020435312023729e-03,3.274274345621196354e-03,3.939104160838789899e-03,-2.005056230774577251e-03,-6.693607853250825752e-03,1.648325778599178800e-03,1.339010690208273201e-03,3.160374526444193009e-03,-2.166288790662611059e-03,6.885940671610813954e-03,-9.681722602513827269e-03,1.032032228558891693e-02,-1.706964798034512763e-04,-2.480036260998285617e-03,-3.782103752886198465e-03,-3.800609473882944969e-03,-6.765128998232930804e-03,3.698365742285241418e-03,-6.420936401628520290e-04,9.465053415373677476e-03,6.853209147202911182e-03,6.576330460963344558e-04,4.809199267858911379e-03,7.630831524090420613e-03,-3.466979565989543235e-03,1.740188214795798487e-03,-1.328333704582618279e-03,-7.156563713745375636e-03,-4.659286139113257702e-03,8.643319880744805214e-03,9.867218889095913728e-03,-3.171078609745860166e-04,-5.699009112602934461e-03,1.975533822350261041e-03,1.580336026397395665e-03,-5.827387009140248299e-03,5.133214074263773609e-03,8.167964441007839402e-03,7.302933780625362262e-03,-3.460449072129296883e-03,6.434036031200635140e-04,-7.613268927392667293e-03,-5.858136829326705299e-03,-6.437048484557826925e-03,-1.755260053136367725e-03,8.039003028235484558e-03,3.394497470280852681e-03,-1.773658426387475463e-03,-3.086593772904256110e-03,3.244239813992291430e-03,2.711732745752682382e-03,-2.048421259924825509e-03,3.541833100774647181e-03,1.541743543615262384e-04,-6.839503991802851034e-03,5.176655319434132416e-03,-2.810454106661329932e-03,-3.814149888738981465e-03,3.389415107131551683e-03,-1.829117305670917510e-03,6.004044175467331684e-03,-3.701376019193342807e-03,1.904865141305299653e-03,9.715513700801594113e-03,1.028397586253242471e-02,-3.572672827381416245e-03,7.764440478094598690e-03,1.068470859187005233e-02,-2.223992724083063575e-04,-4.087957889394524744e-03,-8.929132233795258541e-03,-2.189061802161094885e-03,-1.221510846502094853e-03,-2.751039611508161644e-03,-4.907518580609099464e-03,1.217745877076543321e-05,5.468770349504450089e-03,-5.900817259285187112e-03,-3.791014346805846730e-03,-4.125122693516075460e-03,-4.519249848224342466e-04,-6.835045186676478625e-03,-4.942216163456094290e-03,-6.453977909293710270e-03,-1.972368851864308645e-03,-8.662611525433350657e-04,-1.111485859732156507e-03,2.651931748982407945e-03,-2.240487535866204048e-03,6.162829466000017097e-03,-1.018513059413668834e-02,-1.064135120159919942e-02,1.069020867193274391e-03,6.610100517803081113e-05,4.605643983553281764e-03,-3.019428038512665495e-03,2.699508200632882039e-03,-4.327198861630866464e-03,-1.938127374295368888e-04,-9.132412501015024081e-03,3.588112519729590600e-03,-1.546985188377226648e-04,-1.729015160107847098e-03,8.458700298845869478e-03,7.025071466293281684e-03,5.917942777708204080e-03,-4.668170367241005533e-04,-2.870897579846505305e-03,5.142874815588180729e-03,-1.801139249977240725e-03,-3.304211546692198496e-03,-3.112886173784498983e-03,-1.144803004095441834e-02,1.098608291991473411e-02,1.805000159987047662e-03,-6.833489384216652300e-03,-9.788500181569857565e-04,-1.002521686762670484e-02,4.155787806254946683e-03,1.983459180642138317e-03,1.760389149031373838e-03,3.044514571972607633e-03,1.533284456157422374e-03,-1.220570867659702234e-02,-2.723436957325170611e-03,7.634434093972371389e-03,4.489519926445043919e-03,-9.229391584227971540e-03,-3.243394252841275832e-03,-4.560019444762832691e-03,-8.955662150476777824e-04,1.594654119630812021e-03,3.494956762397151938e-03,-1.308995892711466158e-02,-2.000908123432351606e-03,7.425978421725094696e-03,-8.035234282434523074e-04,-5.089686025797686853e-04,-5.541011838032548070e-03,5.684849213634125646e-03,-6.974552599597566658e-04,-6.632710674751703291e-03,4.570652143320947693e-03,8.319197060695738763e-03,-2.027337580024106585e-03,1.690556880773570790e-03,-4.134925436807167058e-03,7.238346177935189689e-03,-1.749906591838956390e-03,3.624367804571732455e-03,-5.755386629852245368e-03,9.646986138387676074e-03,-3.060155345911300204e-03,-9.400979070061390129e-04,-1.123058004830249437e-02,5.416323317217790678e-04,-8.366468110419297138e-04,-5.141746470584173181e-03,-8.336001023096012083e-04,7.025190040530437247e-03,-3.947113367534005324e-03,1.077120668172678979e-02,-4.046718354619577122e-03,-3.331381313344558212e-03,2.816773094785139219e-04,-1.335868576480964652e-02,-7.053422661124883301e-03,-1.828527886601017359e-03,3.442243107859411285e-03,-1.122276933793511094e-02,7.297665611747512944e-03,-1.910916963747960043e-03,-1.725510061604768140e-03,-4.687943268507599806e-03,-3.802578128230717745e-03,9.415676577374073458e-03,-3.531674996117575849e-03,-7.691682549358911693e-03,2.493840917571267437e-03,8.369836918677266493e-03,-2.195038961452235819e-04,1.089106952254525942e-03,1.468418505184675756e-03,2.870523770229015809e-03,-6.283346937780145622e-04,6.846984499823593941e-04,1.564890385302171666e-03,4.323254057368899027e-03,-6.476540830271709705e-05,-3.969049029372057405e-03,7.113416274948721786e-03,2.649265438100325588e-03,-5.491424438540662943e-03,-4.217081858893993273e-04,-7.371151764128482109e-03,3.779768614371784539e-03,-4.160124981902975014e-03,3.340555086977424221e-03,-9.168367218653666537e-03,-5.933536179132387414e-03,-8.499333353557108478e-03,-2.417421093918923312e-03,4.246462967024152921e-03,9.134898223881462651e-04,1.257259369792975258e-03,2.905062453915168395e-03,3.307503328494887374e-03,-4.290853128720125559e-03,-4.819155901007008574e-04,-9.686817563871087536e-04,-4.913519502361869858e-03,1.889506229539048791e-03,-4.886751634406785727e-04,2.908623652730257803e-03,3.338930120788665898e-03,1.064320157045901653e-02,-3.800105009006467249e-03,2.011602380827179175e-03 -6.246260732870313921e-03,7.030935575874515051e-03,8.646373139623105344e-03,-4.409511038534383656e-03,-5.673495356463818286e-03,1.043677118128048462e-02,8.331843577202867335e-03,4.177355291704394798e-03,-1.388121146772200703e-03,4.003817302224371190e-03,-2.428373135278447028e-03,5.999655605749557724e-03,3.992285464967385827e-06,3.859992560217246314e-03,5.638953408185383431e-03,-5.328503338586673514e-03,-3.560068401064813738e-03,7.748531803926984353e-03,2.674974484287788359e-03,1.393187955200075661e-03,-3.027489997627709137e-03,-4.234446941157602590e-03,-1.869219412960370415e-03,8.473508499449566161e-04,-8.937117621881774873e-03,-9.746405465250081168e-04,-9.666783632451747196e-03,4.873808979906935415e-03,4.889897167562666687e-03,1.159933315086607297e-02,6.872619884004655018e-03,3.804234978471125392e-03,-1.181222364852023378e-03,-4.563595358459369977e-03,-9.020481600711014250e-03,2.444794858943991588e-03,5.670768048448960631e-03,3.116269591186974445e-03,-3.000409931649226838e-03,1.395297562618796590e-03,-2.651013499427909859e-03,4.531176557922475187e-03,-2.095896053002797334e-03,-1.851541147507217979e-03,-7.220971631227852613e-03,4.459692799935518343e-03,1.642772369155868877e-03,-4.469791071524049572e-03,-6.776373391530430071e-03,2.943356661193173400e-03,5.353563213484588716e-04,2.170675142569313942e-03,-1.308021369185048167e-02,-7.191290954662818771e-03,8.402785028200623549e-03,-2.859447335236953852e-03,-9.238453243038232537e-03,1.398384343496662883e-03,-6.341956668049568402e-05,6.639454496797079532e-03,1.185062295650061645e-02,-9.258309877412041399e-04,3.381413011753028018e-03,1.064387107567649855e-03,4.080904480315630258e-04,1.950284287788830789e-03,6.126587963534134602e-03,-1.897478346024492448e-03,-5.052977440711773661e-03,8.055583824449361269e-04,-8.065947242217125224e-03,1.199182976612152285e-03,7.403283311765635963e-03,-5.246262378608346731e-03,-4.984218911331954674e-03,-7.749534426064954998e-04,-1.329077284594278171e-03,-5.613401222596760103e-03,-1.509173469473146928e-03,8.455415840828556934e-03,-1.954595303539748401e-03,3.823799900780024911e-03,-2.012378905121947441e-03,-1.003204002040450490e-04,-1.296880371173159087e-03,-3.102545533540064949e-03,1.767535692164490637e-03,6.212099399482682617e-04,6.165927694488315680e-03,-5.398145370018678918e-03,-8.741122639997827778e-03,-1.438655292257731526e-03,-4.040294532159539355e-03,-2.725304523607672419e-03,7.958797471062265677e-03,-9.030360651727530746e-03,5.083465107953318712e-03,-1.561937154969409488e-03,4.891400864733422629e-04,-3.302386036859434378e-03,3.906092125216830725e-04,2.031249931766327451e-03,-7.887213375726556328e-03,2.765301226791466206e-03,2.016596398508219319e-03,9.408906520146960092e-03,-4.187054439506503190e-03,1.074136250905660499e-02,-2.905877994659671320e-03,-5.926691929750466277e-03,-2.253218946189764534e-03,1.647097377369497991e-03,4.449553432768912908e-03,4.440244071255328336e-04,-2.961712326383926746e-04,1.852642761529329878e-03,-7.409085339319890044e-03,3.893307907436756852e-04,-5.887916243496853740e-03,-6.248507432884705878e-03,8.772562511249658301e-03,-7.975523724873872719e-03,1.678850939953879448e-03,-4.196922811588422834e-03,-3.197289673598694044e-03,2.988696566951840831e-03,-2.566782398176317963e-03,-8.288102939229843522e-03,-1.967030468633346359e-03,-3.356714760127447322e-03,-9.684620406044257583e-04,-7.593522095078518094e-03,-4.576696783322533463e-03,2.847434903779297744e-03,7.326220926444017463e-03,-4.113270010052919902e-03,5.647849926677835086e-03,3.334461242587335138e-03,4.573842814686162987e-03,-7.998154066249732203e-04,-9.246697997734156047e-04,1.196005852268551030e-02,-6.836102327162508173e-03,1.795564172127150351e-03,3.511062148294072567e-04,-6.087573824196739272e-03,-4.827916203016697008e-03,-1.180936257882259088e-02,1.738834410398807902e-03,-8.571593645815387894e-03,2.398946165121032190e-03,4.799237134987601426e-03,7.758779407523947250e-03,7.592338860487374622e-03,7.630367752782124709e-03,7.322893715628457992e-03,-4.370077218387669916e-03,-3.610028291187269734e-05,-6.507215895668090480e-04,-6.659221710417316878e-03,1.058699376979861358e-03,3.928134777262671227e-04,6.120924004173709798e-03,1.069876740012485304e-02,9.890922593562127309e-04,5.357091656660464092e-04,3.150470160310096194e-03,1.034559830915176074e-03,3.893378695457620168e-03,-6.478512965294831553e-03,-5.489823017883793850e-03,2.068603738501234145e-03,7.793521131872528391e-04,-2.070742096599560798e-03,4.955496396844162772e-03,-1.578755218704609373e-02,2.643034678386544257e-03,1.398688734224551311e-03,9.324860861555820529e-03,-5.900172408949816254e-03,-6.492587298955125596e-03,2.066194801075224748e-03,4.799382142706844212e-03,2.844015303471395125e-04,-9.959549033780257937e-03,-1.243961733484184352e-02,9.080585947967961466e-03,-5.226427496584631557e-03,7.578604102127320125e-04,7.484329203686174332e-03,1.957255306966291120e-03,-5.854927581670677991e-03,-1.084471645676678316e-02,5.954150745554862706e-03,-2.643653541558446999e-04,7.128693014597758948e-03,6.902481448579263386e-03,4.853153160414355217e-03,-1.083777433860162167e-03,-4.678570420007254621e-03,-5.055813940959265514e-03,2.090208101017612327e-03,-4.879804605183690226e-03,-6.535659396396794815e-04,-1.151785717132785235e-03,5.079774165219093189e-03,7.193683543798531574e-03,-1.339669740804641735e-03,6.083506703539779195e-03,-2.061035394221691439e-03,-9.999777678751710711e-03,3.564065269117864219e-04,6.393883042986450715e-03,-6.431846546171429008e-03,5.602940593533699016e-04,5.429034173352224185e-03,3.787474155820385062e-05,2.811405691509388273e-03,-5.383683547383374089e-03,-7.731133087398224350e-03,3.502881817154081170e-03,8.388291222792746901e-03,-1.043132211974586972e-03,1.907729342541648193e-03,5.371379104629920090e-03,1.030688836534490033e-02,-6.447803285163651588e-03,-2.102317871692346591e-03,-1.442010306118950147e-03,-9.915524446701708659e-05,-7.880281296773295006e-04,2.832365260587121208e-03,1.103737548595889887e-03,-2.459026107464531638e-03,1.183148670536240000e-03,-4.656107836982814260e-03,4.557917883036730847e-04,-5.874498976426635426e-03,1.218294208584708681e-02,-2.780543884513981818e-04,-2.660628688564703904e-03,4.869404277557849887e-03,1.378914039171663490e-03,-7.322596766204906606e-03,-2.266719742539816899e-03,1.913216807450974382e-04,4.637363293732795280e-03,-4.140433247705938881e-03,-6.300501564028665805e-03,4.810821321034448872e-03,1.021046129956297566e-02,9.473391805038729749e-04,-5.607945195270695043e-03,2.388122942023305616e-03,6.509592759087909309e-04,5.475911261438509703e-03,-7.335485520873073068e-03,-1.345408842966633106e-03,1.409085304564957264e-03,-1.717400739446225391e-03,-8.820189365743800677e-03,-7.548574564506308187e-04,-4.487608211191325414e-03,1.262794875363390306e-03,2.441575985859325681e-03,7.893938193588155572e-03,1.333186466890319034e-03,5.958435116642152307e-03,2.167537319140897499e-03,1.014267982825626342e-03,6.977152232057378875e-03,-4.527375463299026029e-03,-9.521494030302229614e-03,2.385619911623196483e-03,4.180179984079775146e-03,-1.067640638875548944e-03,-2.345559566152296584e-03,1.142219117816445058e-03,-9.864352709157838198e-03,-1.520198775107884509e-03,-5.275078177442278637e-03,4.998417957011146336e-03,-2.062499113087102372e-03,2.646675094210334221e-03,-1.541667170267437936e-04,-1.010400713629563789e-02,-9.480985692828550540e-03,-6.032957888117038495e-03,-4.393663379516714838e-03,-5.804127140063301740e-04,9.275022991482453167e-04,-1.219564787509487181e-05,-5.337307137091022897e-03,4.463178212746621472e-03,-3.107402171181629629e-03,6.420699118109484196e-03,4.694204964636829180e-03,-9.895567931020428365e-03,-7.265613062887033013e-03,2.496548735233840733e-03,-2.188975388701410590e-03,9.071316874036946393e-03,2.147598461466310605e-03,-3.782707794387617489e-03,1.242551883593680587e-03,2.865278041871419708e-03,9.078761488820908526e-03,9.366671102788470035e-04,-2.428542780371403462e-03,8.281986253017592850e-03,-3.853251476052129820e-03,2.007857107199805501e-03,-1.715565295348910968e-03,-3.831371614427593955e-03,2.695259409943100929e-03,-5.142051439782128162e-04,-1.082149649726941651e-03,8.942793931364614868e-03,1.047503567599557608e-02,-4.240009839642562388e-03,2.718692950258767057e-03,3.179357383879180023e-03,-7.422740514602570779e-04,-4.797994103287166016e-03,4.100326095285021684e-03,2.030980464034025986e-03,-3.209793617760793756e-03,4.369576570146592280e-03,-1.616355952894774897e-03,8.168125551041339766e-04,-3.678555421615519860e-03,-4.624205450193075682e-03,6.333657808461598364e-03,1.131213554685042743e-03,3.183384592249546754e-03,6.596824594737885940e-03,-3.526743783986652984e-04,6.227322109794937444e-03,-2.107265524960793678e-03,-4.644933774260958693e-03,1.082621640620282967e-02,-2.855802944455630515e-04,3.126300728198506095e-03,-4.346471281803771479e-03,4.405511279019259062e-03,1.174518106614112767e-03,-1.837180972791256075e-03,2.945004077296148413e-03,-5.786030286974381057e-04,-6.131145835886301102e-04,-5.644523881676714525e-03,1.781479067076132718e-03,3.355963065394602820e-04,3.868762629969836261e-03,-2.697325697537675173e-03,-1.659873702980084811e-03,2.116647698633478038e-03,-5.771930313175179135e-03,6.740471206288318941e-04,-8.213555051473258803e-03,1.125816592874342038e-03,2.929935361122669359e-03,-7.661516404256512106e-03,-9.730752064244441862e-03,2.732263575032393629e-03,-9.996355988686372526e-03,5.962707369718621420e-03,3.066904136788296473e-03,-9.170529387408010366e-03,8.735925345497793856e-03,-7.984179080986376562e-03,-5.845099907176037717e-03,1.979899304009548246e-03,-7.987532846570591719e-03,3.085802829382315089e-05,1.387248673903218816e-03,-1.085748489345187079e-03,1.003620882645732751e-02,3.253673784682289960e-04,1.864268264439934665e-03,5.010763254730419593e-03,-1.555868400694626966e-03,4.941459079762701261e-03,7.256714179870459068e-04,4.652633617520716622e-03,2.112960292243751433e-03,-3.700756726527053286e-03,-4.021705888345759351e-03,-9.276624395787091276e-03,-6.682992482369921362e-03,-2.071846315897089754e-03,-7.610800759991975632e-03,-4.289616264254081726e-03,3.501651370301643237e-03,2.397640301815304447e-03,5.011944389998191675e-03,8.802371726388006842e-03,1.551035999013567271e-03,-5.913294255738604701e-04,-9.875994567468295363e-03 1.532336614256740986e-03,-3.159978099773682290e-03,-8.734206688396518176e-04,3.828566167347727516e-03,-1.487514491373474789e-03,3.743843665513999115e-03,1.398052851004519259e-02,-7.191309840191177812e-03,-1.571493644849578661e-03,-1.422539037438051365e-02,-3.933357815667653384e-03,-4.998852883913619191e-03,6.094113524286150045e-03,-6.495823497845470973e-03,4.611036822277007441e-04,4.692524291136555836e-03,-4.466835482011942025e-04,-2.819859835355416865e-03,9.320847933839766092e-03,7.303165708853657330e-04,6.300617999414344361e-03,3.434606627133760057e-03,2.528733246755299809e-03,-4.436618175051967091e-04,-1.082331797750343653e-02,1.201563783243749916e-03,1.554000216928746276e-03,-7.254590397106119685e-03,-2.811883721222044590e-03,6.069476590423501620e-03,-1.254505238736822572e-03,4.013915265416394436e-03,4.860868834874334556e-03,6.461268624116626481e-03,-3.366093155117271256e-04,-1.555042584743338355e-03,-5.633733171292428302e-04,-1.393432550008847064e-03,-1.769034130695432182e-03,-6.393670446393031649e-03,-1.612610490945393929e-03,-4.347751892412074319e-03,-5.796816194274515520e-03,-1.511951040339742491e-03,-1.877836849833773580e-03,2.185485018336018499e-03,-3.295715102512473179e-03,-2.610955994562663869e-03,-4.494967353611009284e-03,3.874339000024742442e-03,1.867401232580046888e-03,5.780229141683690397e-03,-3.272027916815765068e-03,-1.096962867261572173e-03,1.884042225952698444e-03,4.246055793284601518e-03,-2.006779069702111905e-03,2.304320515588541805e-03,1.398668604308008195e-04,-8.209331656841380165e-03,8.900666560142002012e-03,1.155649246798525799e-02,-5.239268011097289804e-03,-5.804779075951727908e-03,-3.305389433086483499e-03,-3.510804732982885507e-03,6.542116465611110161e-03,-1.034242745930901786e-03,6.697994432450192492e-03,-1.259103589603541252e-03,-1.149102040764168539e-02,4.777155079983825062e-03,-7.205724642842616968e-03,-9.719394539206713257e-03,1.845494538919479179e-03,1.390580341115069986e-03,-7.847809586550428748e-03,-6.972603527393399746e-03,7.012976272252838088e-03,9.290319329721924251e-04,-1.872981327338883988e-03,-2.455887976859422693e-03,-8.360811584723452927e-03,-1.176343888672767152e-02,-6.685842768344005607e-03,-4.632155777380356373e-04,2.648522820377704513e-03,-7.986221765320178703e-04,2.830244967596624774e-03,-4.934957274112170694e-03,-6.564529080282361859e-03,9.287134253880349744e-04,-2.900685282462814492e-03,-3.533516488657551056e-03,1.749152091489880254e-03,5.775834258176899653e-03,-7.415368281435510620e-03,1.210057382973548680e-02,-3.228687842853424257e-03,3.152176957651573949e-03,3.295061725556562918e-03,-2.154577174714717921e-03,-2.791543940752297306e-03,1.093545494903838634e-03,3.323913428853068897e-03,-1.043822906693349113e-03,-1.303025001756796288e-03,2.682767843086903640e-03,5.924753135211986189e-03,-5.283107030941936073e-03,7.824750682444051031e-03,2.448026502409653116e-04,3.402492249313875117e-03,-2.787917340540322069e-03,-4.628177519483002433e-03,-3.061330821866218438e-03,-2.406889111539820512e-03,-9.714299524698778374e-03,2.612858001985077596e-04,-6.550237861740543059e-03,1.651327939770897779e-03,2.514472141328902809e-03,9.258442903870104388e-04,-3.909591043590523139e-03,-7.099029336567458555e-03,2.212699860093288954e-03,9.550097886305260533e-04,-4.290666768593145129e-03,2.861722618437716973e-03,-1.915477043902939725e-03,6.184189184467202456e-03,-2.062555396255899391e-03,-2.280034291074834601e-03,-3.320530117778677792e-03,-6.687139471107378733e-03,5.667388227470281743e-04,5.625567329919590499e-03,1.116072973610522483e-02,3.223595804288831334e-05,-6.487405132834878421e-03,-1.661280575788321541e-03,-2.256208714535420847e-03,-2.229562528064216956e-03,-4.778683588316677208e-04,3.740150891899517052e-04,1.227875794542037866e-03,1.898326038285669216e-03,-2.311556219456833115e-03,5.654412171956823933e-03,8.870787632691221611e-05,-2.224357489862560666e-04,4.624971055368666539e-03,3.804453183457794270e-03,1.134079300879766708e-03,6.581182160242936632e-03,4.236791519846808129e-03,7.116129030430734873e-03,-4.554788402177801197e-03,-6.654473344628467416e-04,-6.159398054789755154e-03,-2.586175551141736868e-03,1.580367401414422375e-03,4.221498392289391687e-03,-2.480066826271425801e-03,-2.538444265839012205e-03,-5.623721068257259148e-03,-2.800375222789226650e-03,-5.183922521127972034e-03,-1.405416899229597729e-03,-4.028688977637862045e-03,1.987757517137893337e-02,-2.436156183583918118e-04,1.119790313553997501e-03,3.586440638726541991e-05,6.418013819853364207e-03,-1.502248495271057754e-03,1.772442887058680141e-03,-4.418030811473812498e-03,-7.223221733255231862e-03,2.155216937478849487e-03,1.388928909305447556e-03,-4.035114586981695256e-03,3.174850928021720128e-03,6.878866640382215111e-03,-4.180789655562489569e-03,-5.468670136393709823e-03,-5.468099812679296824e-03,6.203607085234631724e-03,1.167673776275054802e-03,1.219240063010806029e-02,1.211299992243916254e-03,7.387716986485342099e-03,8.474941483521554117e-03,1.220768736092420724e-02,-1.093646541528343853e-03,-4.628330159524027612e-03,-4.844031101522857340e-03,3.799944852696991878e-04,-2.569566702856654946e-03,3.259076643689612970e-03,5.561937791672097973e-03,-9.682074173536818961e-04,-7.563053317969929169e-03,-3.926822382736042463e-03,2.624394994529104415e-03,-8.218377909592049918e-03,1.490932091126939479e-03,1.572087715933424097e-03,-3.869387170895936790e-04,-4.467078838631535938e-03,1.065502681741354631e-03,-3.910690354486213473e-03,-6.246214398482767549e-03,-3.416274019673422573e-03,-2.423828375632306343e-03,-1.727494358333663677e-04,-4.815483619928983758e-03,-4.138642411278290606e-03,-3.299298402869187325e-04,-3.407170777390453823e-03,2.050426253302258667e-03,-8.591332973061116193e-03,5.899678135250694105e-04,6.324852236336545551e-03,-5.740926852858227979e-03,6.562299642287073957e-03,5.709483950586299351e-03,-9.541024772425571801e-03,1.464890597484132641e-03,-5.077721703222478830e-03,-6.633019255882026920e-04,-6.382853611318639771e-03,-3.776435655529613092e-03,-4.503459175249696828e-04,4.228855113220090535e-03,1.310339601156787319e-03,2.010619090006271047e-03,3.764942038522791365e-04,7.296878787076977076e-03,-4.175106896569370438e-03,-4.082609394856839347e-04,2.280016723553077423e-03,8.910371065490130076e-04,3.138657596232748929e-03,-1.038711019584198300e-02,3.300041910016638519e-03,-3.132793455586038982e-03,-1.026695266985319264e-03,-4.257990892847560334e-03,1.147971982915928354e-03,-3.020964164737544033e-03,6.584852374056135438e-03,1.882664471385700044e-03,3.886549933660683048e-03,-4.453617482779309578e-03,1.809094259335896748e-03,1.605882906040148822e-03,6.405165699411227167e-05,-3.052478711070187936e-03,6.446986973151286153e-03,5.056988220892416445e-03,5.264208639849227676e-03,1.160850792978550781e-02,-4.529947709027457720e-03,-7.520578630253195104e-03,-2.102203531199883964e-03,1.310208572101586103e-02,5.321910988041618149e-03,-8.855100487205320656e-03,-7.988203126535559084e-03,4.436199789891722388e-03,1.594866450004320388e-03,-4.151249422008026699e-03,-4.285246906769068945e-04,-3.095567008771415367e-04,-4.524142008113144799e-03,-4.323087493339145340e-03,-9.936027487888214257e-03,1.931215756561811695e-03,-2.294348848420914560e-03,-5.116981163608444114e-03,3.893110850336058918e-03,3.094112519750582407e-03,-3.797119817396326247e-03,-1.960359157446679886e-03,3.403077306883645677e-03,-9.078756429645720369e-04,-2.934110101645860322e-03,-3.710443520715467607e-03,-1.260131587695030716e-03,-4.972299301769185498e-03,-1.141610481258452511e-03,8.720348842788406066e-03,7.244837830955519302e-03,4.251746881327926261e-03,3.386244357835289237e-03,1.365707899360294429e-02,3.309935549274494772e-03,-3.845442840290118000e-03,1.439083061254079572e-03,-2.059838864299002627e-03,-4.546740028676406371e-03,4.486372649378286143e-04,-8.935747468175880837e-03,-3.128764738594063104e-03,3.215066420372563946e-03,-1.729777885275153148e-03,-5.200751841094409600e-04,-5.033370931329340589e-03,1.272014840085264721e-03,-1.445862846423737538e-03,-6.709305616067817897e-03,5.812768068998754901e-03,-1.204547897818638615e-02,-1.533207324143371839e-03,-2.742617502792356072e-03,1.437827568520837207e-03,4.009664912975363952e-04,1.644290010345105923e-03,7.731841350424234482e-03,1.032103623073907413e-02,4.248895105582542638e-03,-9.573975675930202628e-03,-4.697033928178876404e-03,4.063380916339005659e-03,1.387430734066456969e-03,-2.480701325353691991e-03,-7.189499977055161840e-03,-2.347773921893254057e-03,9.804734888946972302e-04,1.519983274063181145e-03,5.644540897815356756e-03,-3.328465098946676009e-03,2.096370029466238655e-03,8.468906576209680667e-03,-3.316592152394211550e-03,2.864344871736412227e-03,3.639514636283128993e-03,4.964533811639584675e-04,2.805283696178765673e-03,4.028198993613426974e-03,-1.746958266065247152e-03,9.706468484701993166e-03,7.737962013218808999e-03,5.212819681627611605e-03,1.612348448516264920e-03,-8.097215264955057298e-04,-5.654402469392044543e-03,-3.891050837873760888e-03,-5.417550523667601613e-03,-9.755012214871027626e-03,-1.933730225947048438e-03,1.093533012197031237e-02,-3.014586351082052638e-05,-1.620362716943553544e-03,7.863250673719450468e-03,6.989205090742610545e-04,2.257146790982971418e-04,9.560738627638152437e-03,3.016099515788143916e-03,9.595768207137142904e-04,6.357740857837685181e-03,-3.086480192196927163e-03,1.694104884524851896e-03,3.965974310214392408e-04,1.964108001785930778e-03,-1.882244074691317732e-03,-5.910351594900920512e-03,2.716291071398446635e-03,7.996187533550297780e-03,5.000785243488387218e-03,5.104240584214244812e-05,3.142129773766373906e-03,7.487013406659998038e-03,2.595473710246306697e-03,7.533151018981848294e-04,-1.228962841712680394e-02,-4.305018492130059660e-03,2.020206252317986034e-03,1.812189000865328066e-03,-1.749212497129875015e-03,-6.349646714094339672e-03,4.548731213512468424e-03,1.161521818138530095e-02,1.672584148872969471e-03,1.156069882330668834e-03,-1.512203605245081425e-03,5.349933829929973345e-04,-2.798108398905585752e-03,4.574561846665181099e-03,-9.424376387022218027e-03,2.799024811267203806e-03,-8.548603864554129780e-03,3.670077470673510905e-03,-3.588630610147143952e-03,8.087991491793048229e-03,9.199946909779422232e-04,-1.154493947757298999e-03,-2.115429089378667495e-03,-4.400217463428098517e-03 -6.704899440355203966e-03,1.025977461122875526e-03,4.465887093564309927e-03,-6.034556667418521439e-03,7.611143975698127229e-03,-3.099357536512834226e-03,2.893988477023456008e-03,-7.339645269380705786e-03,4.462722164863732605e-03,-6.930949442501830801e-03,-2.873719287052047770e-03,-4.114710682438040695e-03,6.335738558303482919e-03,-2.112424776821816841e-03,1.059430687616592015e-03,-2.806167079399804476e-03,5.450051312920458670e-03,-2.112474660110018766e-03,2.321648459113826758e-03,-2.110797701515032890e-03,2.873541333946617177e-03,1.272074449082445484e-02,-9.659020798206184241e-03,-2.544699516311734345e-03,2.885210073545256722e-03,-1.385500715078350278e-03,-4.596404894267633150e-03,-1.323570350666015798e-03,1.517176000416751774e-03,4.876625461890166623e-03,4.910108973543340395e-03,-2.201147464656924135e-03,6.714925055841373253e-03,-3.037974995813541145e-03,-3.753561085285602188e-05,-1.765775185258715296e-03,-2.303857895305435558e-03,3.906259798288356892e-03,2.152518450612663468e-03,-1.120651903916212999e-03,3.485476577806001768e-03,-6.178647925618249584e-04,1.038917574430465139e-02,-2.463700047250687829e-03,-6.384812977778974233e-03,-7.216704317719827011e-03,-4.940510408965343315e-04,-9.012598844799960375e-04,7.693059051218050122e-03,-9.307139043316741550e-03,-1.760778035759044333e-03,4.453104795398003801e-03,1.999557478436483036e-03,2.853223481739284532e-03,5.529450824823391869e-03,-2.929013331273163691e-03,6.847572172413142694e-04,-1.357893485031757804e-03,-3.676895349434254221e-04,1.883366302780688575e-03,-1.373719139453415637e-03,5.095734390900364362e-03,-2.043670566927065658e-03,5.878015588303504325e-03,-1.033330262555058436e-02,-4.129915494224913226e-03,-4.677509076397174878e-03,-1.500514207765051358e-03,-8.910187338431151127e-03,-4.498124761609199364e-03,-2.248292149970374486e-03,-3.880700425246035815e-03,-4.273257498643873512e-03,-3.544334666706281875e-03,4.989859855320401662e-04,-1.293869107861033395e-02,-2.140113711681944624e-03,1.012550658698860512e-03,-3.117578375520553211e-03,-2.102259362684453636e-03,1.096135937135117372e-03,3.980121842344928711e-03,8.762452646493543488e-05,-5.312608671247685821e-03,-1.946063152416077852e-03,7.815902602523181961e-03,-4.977319619645294291e-03,1.187549425730609343e-02,-2.371706027531124850e-03,-4.248784372041753766e-04,-5.009242953717946535e-04,1.821383930681560450e-03,1.178445648634449464e-02,-1.271492235352068845e-02,-1.984415589444121341e-03,-2.974935714576595544e-03,-4.553187410097290534e-03,-8.560016433817957404e-03,2.712763854049299577e-03,6.171928473710576110e-03,3.237616495179798225e-03,-1.779893817194173932e-04,-5.454716679597771799e-04,-1.110974401841156953e-03,-3.593375144218692325e-03,1.863872881742070992e-03,6.122499529040302015e-03,-3.329020299042094282e-03,6.956509568482678509e-04,-3.208929256768859335e-03,5.042591752796223709e-03,-7.684882262744673759e-03,-2.953616959587852808e-03,1.048648885213534750e-03,-2.021835270610372257e-03,2.287725380483298190e-04,-9.235014705280559372e-03,6.331878260604085172e-03,-3.545586672805240650e-03,4.025210035589418117e-03,-1.943695232572673038e-04,4.586467195813807331e-03,-1.590857698187087780e-03,-5.340269504866360442e-03,-4.220792742510717094e-03,-2.665656932920867483e-03,3.134345032503932024e-03,-3.792438905149163493e-03,5.955593825067968285e-03,-4.679505484730249465e-03,-5.187311321113869875e-03,5.346600673383212002e-03,8.908767519443834678e-04,-6.732779039628148353e-03,-4.861773343646089672e-03,8.923185880545662137e-04,-9.661221512538431186e-03,-4.452820697358711270e-03,-6.060294943437028077e-03,-3.440580279252996102e-03,1.063662588639601413e-02,-7.275834888745617152e-03,4.731227096952534718e-03,-1.323765307535224207e-02,2.181528561612978035e-04,3.621775858629290644e-04,-2.777601540203620319e-03,1.428332014328283770e-04,8.112404635644679396e-03,2.072782245850963729e-03,-4.724383921347001698e-03,-3.554730860437986788e-03,1.270315085068442554e-02,1.322641120381942703e-02,7.563697706193724677e-04,2.993280343500288182e-03,-1.482237293184019561e-03,-1.292519842312731069e-03,-8.731676360085134300e-03,-1.538063065049934943e-03,-5.918451886982795725e-04,-8.656825087039493605e-03,-8.935230918817040416e-03,2.431310829971598721e-03,5.204147114959758839e-03,1.631390837789441477e-03,6.296877446723575869e-03,1.077136537751236574e-03,3.726315282548163695e-03,4.472560797345304789e-03,9.152438456189440543e-04,2.676195006015530658e-03,-1.114933079888913870e-04,4.365222242430326013e-03,7.215790104663150246e-03,-1.073514594444620936e-03,-4.854413444019366544e-03,-6.290364137593530865e-04,5.657176057784825841e-04,3.111903353451822996e-03,-9.943042159642838905e-04,-3.089416054251282063e-03,4.059812308572390178e-03,8.853296037266486854e-03,-8.918758021610753923e-04,-3.695732397642112997e-04,1.748595752514239127e-02,-8.523562549945115641e-03,-8.902367687035568416e-04,1.672793153095315441e-02,2.777877670358845467e-03,2.313948195242792986e-03,3.914670260460010633e-03,-5.657968102170816277e-03,-4.411322831026589196e-03,-8.764105122904183040e-04,-2.753342997432349157e-03,3.239358460121339375e-03,-6.981538316446969997e-03,-5.454146077575433918e-03,-4.180915909651979068e-03,-1.448025841978988187e-03,-2.672362248840454060e-03,-5.751769034230933708e-05,-4.953029904838895231e-05,-3.672000289143153781e-03,4.147788269881460405e-03,1.676679042567771069e-03,-1.134367150860959850e-02,-3.508393097976833085e-04,3.584760472500436983e-03,-4.945578191027205292e-03,-3.898964394945616564e-03,1.568318563175678587e-03,-8.816092639137912065e-03,2.795744805259883694e-03,2.971886318203317400e-03,-1.481867382059896083e-03,9.818478165301689278e-03,2.932371881621183632e-03,-1.777255244965608282e-03,-5.541183475253395538e-03,1.084610470274248104e-03,7.513273288394520938e-03,-1.043888571184406824e-03,9.729799334201684469e-03,-2.455118209040835914e-03,2.324867171498406181e-03,-1.274572485207470631e-03,-2.643495421138754124e-03,6.115394144260008144e-03,-2.892369413202162693e-03,-9.902137785962352584e-03,-4.297388781965489832e-03,-3.878701069104006395e-03,-3.490006011279286023e-03,6.589008101340859680e-03,-3.586045860453493991e-03,1.362650829177446945e-03,3.482510194811739407e-04,7.457901645384063102e-04,-3.453505931700237737e-03,-4.398978075484196587e-03,2.274116156868328294e-03,6.074523435373034574e-03,2.798604457761809036e-03,-1.971435536917037854e-03,6.310792680761102835e-03,4.732734276204052411e-03,-1.025919764112214394e-03,2.601721478338302543e-03,8.297281892247345553e-03,5.157711881685449196e-03,-8.142601797544787986e-04,-1.189091690594681026e-03,-4.087076315296433310e-03,1.336576290876746904e-02,5.137024798380050890e-03,8.534047897348726042e-03,-1.505620395874290862e-03,-2.745065664946177639e-03,1.927233821198708627e-03,-4.531905152199726047e-03,2.472834340033172430e-05,2.113897661303567460e-03,-2.503962128190000049e-03,-1.270406803452267183e-02,-7.312359631875091359e-04,1.378661195413219452e-03,8.870334265960682198e-04,-4.310688794110594142e-03,9.142771681833065977e-04,-7.254363287615975091e-03,-3.055326191462120599e-03,5.788066100158258988e-03,-9.563908352938035222e-04,-6.993232604618541265e-03,7.097541441706898234e-03,-5.005780948061553347e-03,-1.319342790157706884e-03,1.328803474469357482e-02,-2.917761164351067096e-03,3.768965688270983164e-03,-7.217737820082864575e-03,-3.491218490121838006e-03,-2.366717834114716919e-03,1.500904558974720847e-03,-2.804855756393345614e-04,1.255777071495504764e-02,5.114874796938744970e-04,-1.416747675302440805e-03,-5.602483239331668315e-03,-4.786622463581354653e-03,-3.219156742381856871e-03,-1.776574081551769843e-03,4.301025400348996129e-03,-4.974089965277441426e-03,3.295754371802850358e-03,-2.449426607546530653e-03,4.904732184291870148e-03,3.509553258089436432e-03,2.897763838182695374e-03,2.325374672138110638e-03,3.690349315022888304e-03,3.703505445637843632e-03,-4.451593150897915907e-03,1.902551835770210139e-03,-8.170898630391604375e-04,6.471395376092471848e-04,7.213501089419771702e-03,-1.336923569192815771e-03,-3.639781071612745167e-03,-1.558662908195042329e-03,-4.036313349376027571e-05,-7.944271468181746121e-03,6.771281549307919118e-03,2.669361948907733508e-03,2.549223535234336653e-03,4.460842037002528664e-04,2.418724176785207267e-03,-3.365053514958034342e-03,1.833320199455769003e-03,-2.797385001573385408e-03,-3.668762228166092248e-03,1.468142388289356585e-03,5.366815015808122646e-03,5.961029542615540674e-03,-7.919549668519090466e-03,7.095226006253103554e-03,-5.822745756048399932e-03,5.364472627564976668e-04,-5.444939098608178249e-04,3.734979023730440606e-03,-3.903307495391151669e-04,6.066690989443841546e-03,3.039932908016554271e-03,-6.384453265964207692e-04,-5.466173958439717734e-03,-4.342403007061937728e-03,1.612231293903079385e-03,3.557987742008141864e-03,5.143067448369623845e-03,7.304611865320460602e-04,-2.817221650129736032e-03,-4.901741659352753210e-03,2.201727943110986553e-03,1.017679712764830459e-04,4.879985219120388379e-03,-3.827822562287608223e-03,3.008377088068406747e-03,5.713198729535473759e-03,2.840347844806263584e-03,-8.072566581540202915e-03,-1.438655183755808368e-03,4.030931178701378423e-04,6.367355837483227188e-04,7.603906078737025266e-03,-4.830055164559954838e-03,5.584375157068148781e-04,9.679077722233534603e-03,-1.669390631127949795e-03,4.064178521928633880e-03,4.678864902805018704e-03,7.265377544815852968e-03,-1.580056218279076710e-03,-4.891224509390989898e-03,-3.553363995158994194e-03,2.386967973605728845e-03,1.786915000591090359e-03,-6.513378766622882586e-03,9.807277977456580614e-03,3.484308230599102944e-03,2.543054674607948384e-03,-2.010907959252477943e-03,-5.155159341820273779e-03,1.655121858924074292e-03,5.943425050272227797e-03,3.032824381832920253e-03,-3.400235458812947420e-03,5.181534755848405359e-03,3.931776381669688583e-03,-7.540929768270951232e-03,-6.444999991715573244e-03,-4.735438830605464416e-04,7.463252100675020484e-03,-1.257774446003307219e-05,-5.989428987159941880e-04,6.402029147126295575e-03,-2.538236661336532550e-03,4.386639011068758551e-04,3.232204882020323338e-03,2.373564427674430908e-03,-6.482369180220657820e-03,-8.262370479014947283e-03,4.921766883553350437e-03,-5.959324744591311352e-04,2.950815620240952752e-04,3.102364621926778556e-03,9.776690153135529637e-03,-2.290948359106027149e-03 -6.500547920649452256e-03,1.059528148963728841e-03,1.051729894203000870e-03,4.517021209003095183e-04,-4.903889012047972680e-03,-7.606093288505686920e-05,-4.656946402704827299e-03,-1.136648103833595453e-03,2.191007893728482737e-03,3.953004028484490354e-03,-1.120644017620190096e-03,-6.691961545070942985e-03,5.772484176304816701e-04,-9.095742539487828085e-04,9.196601499475643565e-03,-9.746504713875754969e-03,-1.779265098942709204e-03,-1.181577095374521059e-03,-8.861281599542589552e-03,-1.031262167056246244e-02,8.435894240192588897e-04,-4.442006776648234519e-03,-5.524743252078126964e-03,3.596381835194734609e-03,7.179645356822584625e-03,6.749356119076519236e-03,1.541433661327252765e-03,-3.882683953755385487e-03,2.233740033588585590e-03,-3.631357044373672259e-03,-4.604647434099501044e-03,-6.652849359564373677e-03,1.623200372592237869e-03,3.658934147111604553e-03,8.097420925610094786e-03,-5.272725132365466486e-04,-5.179104400818495060e-03,-2.930828201412177206e-03,6.086828686333705654e-04,5.434241659232006685e-04,-3.521010850576797996e-03,3.488849407095050733e-04,-1.378972186090222929e-02,-1.880087507924542824e-03,5.823407263073094621e-03,1.468458746773298323e-03,4.445806292557376825e-03,6.399177932596247440e-03,-2.832562342178478534e-03,-6.165269516925407992e-03,-6.167084775618383516e-03,8.615239714519429964e-03,-5.792224145354331333e-03,-7.197912306010694101e-03,-7.495696196198800955e-04,4.352522479646402619e-03,7.874666014200611824e-03,4.106016466433676064e-03,5.223233335776868852e-03,-3.015489630623631109e-03,-6.765401719918433775e-03,-1.507978139002339249e-04,-6.416510461805002873e-05,6.666372112858333192e-03,-7.071275763282762646e-03,1.726809519156240506e-03,-5.876487899866388552e-03,-8.763663405491749214e-03,-8.444147215688683865e-04,-3.682659559890755739e-03,7.585827623134448403e-03,-3.317605107194590824e-03,-2.874801502711164976e-03,-3.741927415617216351e-03,-1.165154376676716787e-03,-4.762683468791706169e-03,-1.722415124875605443e-03,-2.276858108850100901e-03,8.050106629363793301e-03,4.715705632890912144e-03,5.743793428339838934e-03,3.374825194088877916e-03,3.911019207230240734e-03,-3.637008389870164532e-03,-1.738532852319042482e-03,4.204575406522681295e-03,6.741686347102014004e-03,5.979418826064228972e-04,8.749413697572849466e-03,-1.080427917783967004e-03,-8.058933096918952793e-03,-5.809256431803386939e-03,-2.249519619226258114e-03,-1.125954330651500294e-03,-3.384976187940126613e-03,1.922668461092650596e-03,2.789715942767204051e-03,3.192733320079073605e-03,5.930694476504460265e-03,-4.631217697953191742e-03,5.795215933400134356e-03,6.460638006549541609e-03,3.832240131271892874e-03,-2.987589603682154309e-03,8.739636823503265858e-03,-6.092479830845986231e-03,-1.052252586534561203e-03,-4.043829991598679556e-03,-4.632546077752169476e-04,-1.623326619652525260e-06,-1.873577481818361869e-03,6.070748007202995955e-03,-7.124311595109706447e-04,5.953738679570850199e-03,-5.278212317516724726e-03,-1.797627019573047385e-03,2.167905483286163611e-03,6.924074750856061035e-03,-5.569551035102073000e-03,1.378086073625033959e-03,1.729800542298402886e-03,1.199796928062587436e-03,-5.837064994784804325e-03,4.952982972808544744e-04,-2.298943049640931029e-03,-1.983759054350254635e-03,-3.857079396809951453e-03,-5.886665356989134561e-03,-9.887476859684345490e-03,3.012716639783585837e-03,7.311264204651683231e-03,5.991514456766029381e-03,4.850146691155395560e-03,-5.628560061759570882e-03,4.073919269451178014e-03,-9.627054450057088292e-04,3.010672771302215500e-03,2.898152707592967919e-03,-5.310844941694431963e-03,-1.498004841354853454e-04,3.826534888610642025e-03,1.362178356348078562e-03,-3.463317759109335006e-04,-3.216735910344169051e-03,2.333554236247387375e-03,-3.139117819336591491e-03,-4.946208786406757103e-03,-8.243337562393829310e-03,3.969559623573491887e-04,2.561739439682192941e-03,6.312031971840709535e-03,1.686682809846755523e-03,4.403799192783689961e-04,-1.376308596295627591e-03,6.485824501826747968e-03,-5.397252464182513559e-03,3.042737359070455880e-03,-1.125002225223160207e-02,-9.946131859187508015e-03,4.492328335779340390e-03,5.220875622657332633e-03,-6.277751387742883625e-03,7.845562584049779395e-03,5.183428380854292888e-03,2.906865778428325501e-03,6.435967384211422129e-03,-3.937749922046125152e-03,1.497159625869319121e-03,1.148576685299110972e-03,5.798209545402423009e-04,7.363230655514106443e-04,3.239363623254455302e-03,4.461891266692110183e-03,5.272358165962754382e-03,-1.081678244639041128e-04,-4.426798921036860512e-03,-6.362539549030370167e-03,5.230549760897467565e-03,-4.116808861804817192e-03,-2.618179319018388955e-03,-1.664152686356367913e-03,8.292815732100651191e-03,-1.269375515832847894e-03,7.169950552644862619e-03,-8.724024895639103175e-03,3.460276768700465671e-03,-8.501147139282178369e-03,4.701782786861931351e-03,-3.538456340089991478e-03,1.437706275638474251e-03,-4.813767361193135343e-03,-5.125774944883991838e-03,-2.065446436248989052e-03,-1.091301011163714647e-02,-6.292027764212841830e-03,2.231419506681555330e-03,-3.023673004011343500e-03,-2.444390971291979849e-03,-1.023165696372289359e-02,2.083587129500015663e-04,1.625218108801098011e-03,6.467280157884592283e-03,8.697514253345896285e-03,7.205310543654657243e-03,-3.061111999168093789e-03,2.744890056513479490e-03,-2.350395168296774001e-03,-1.336377810110804748e-03,-1.023108849993013695e-03,2.229379403675807274e-03,-3.985760454998326860e-03,-6.326441238712993305e-03,1.176381102145950333e-03,-1.917259192563795355e-03,3.796903917791643050e-04,-3.874417424670519462e-03,4.660896220502995513e-03,3.950472766971382249e-03,-7.161905332465938988e-03,6.318904774880926972e-03,7.215928445701781831e-03,3.221025224856745826e-03,-1.086134992230005508e-03,5.387857437464831090e-03,-4.933555296785749680e-03,7.182707246114198446e-03,-2.834390145093736071e-03,-8.315635507997153064e-05,-7.500821520878021668e-03,-3.198037966054203575e-03,2.456416219141737203e-03,-4.085304750572973237e-03,-2.276713741297467508e-03,6.078391505525085163e-03,6.676309759610924490e-06,2.584982653139237131e-03,-8.540471796895610047e-04,9.905853927187897694e-04,8.875138816045177043e-03,-2.790574194129609182e-03,2.752963045535554258e-03,-8.132714211535730861e-03,2.393075215807906417e-03,-4.256746455136523576e-03,3.270241640709018512e-03,-3.053915362376555774e-03,2.393171358632420996e-03,3.428533603003027820e-03,9.696057418800167402e-03,-1.451141056779944004e-03,-2.282949436018366206e-03,-3.191325553010746635e-03,-4.085059196684887386e-03,-3.337434390999627007e-03,8.371440174176916771e-03,-3.893180035789786816e-04,-7.955709932712239085e-03,-1.206661339646374325e-03,5.123886662743803002e-04,5.389831251764081246e-03,3.985194489067904401e-04,-3.467628344609869229e-03,-1.562086328874418874e-03,-3.047753200043274314e-03,-6.855684503010838621e-03,-4.635077422370898273e-03,-6.403377054486700118e-04,-7.072242665845794585e-03,-3.164656119800757922e-03,-9.594575189381870581e-03,-2.191599337619928780e-03,3.269429403815462751e-03,-1.016162839023048575e-02,-4.881542954663986751e-03,1.770959554163147130e-03,-1.689636177891551752e-03,1.188653486995019118e-04,5.827987848458148108e-03,-3.949533178189113501e-03,4.284282002561014269e-03,4.074999074565964345e-04,-5.286103100919534127e-04,1.284411750691426095e-02,-3.649508825184042283e-03,-1.992984686879915521e-03,2.274224058603693116e-03,-1.063712583304694337e-03,-5.465442388326638250e-03,5.514930057658714961e-03,-7.370181391739547005e-03,6.916650979352173892e-03,-3.856342254448958881e-03,1.160610833741164243e-02,5.273065235448670500e-03,-2.448609337419913436e-04,8.954437810173672060e-03,4.102678057901933475e-03,-5.085854148484295935e-03,-5.338921674372541261e-03,3.973614589215279311e-03,-5.883879421449394326e-03,-4.586433392941105872e-03,-7.266650212627516654e-03,1.272315564475475294e-02,-3.022195888303848103e-03,-3.330585381889809762e-03,-2.156821027096283289e-03,4.155246028742932973e-03,1.979548124726661969e-03,-6.567154730725665267e-03,-1.244973104644589998e-03,5.177110084558718059e-03,5.455010084368262964e-03,-1.394721008054548590e-03,-7.127713195791967205e-04,2.767776803894851298e-03,2.743647289582920677e-03,-5.248586161584879554e-03,3.482439206857460782e-03,-5.418571743832308746e-03,3.431791965972124780e-03,8.083776728728362141e-05,-5.647179831944525857e-03,4.340022216434182012e-03,-4.304137686728636177e-03,-7.754782360412873152e-03,-5.439776827309181564e-03,-8.390706530523099572e-04,-1.241919815581226869e-03,3.027305701306683399e-03,-7.061615272331848081e-04,1.499926887903363414e-03,-1.727163189969603942e-03,4.670769674075180185e-03,1.881792235473858698e-03,1.136149189366214693e-03,-2.314021711527296157e-03,-3.520951895488473441e-04,-8.779177809493663928e-03,-2.002247692767452746e-03,-2.313024005965325352e-03,3.141433669738269950e-03,8.754151949182802398e-04,3.769406681303004783e-03,-5.516841738475930339e-03,1.016956940937109111e-03,-6.211230732202406167e-05,-7.326976335143219402e-03,8.781299473791732198e-04,1.073045436235511808e-03,-3.457176326408390506e-03,-9.592436040460566513e-03,4.529906466144464385e-03,1.647184891839053558e-04,3.810958090709218923e-03,2.285813655395665944e-03,-3.324888597344974935e-03,4.987178074629229976e-03,6.333463620721878524e-03,-4.304779500119670400e-03,3.800164213252197557e-03,-5.919531287023997029e-03,6.303632827281076094e-03,1.114322988941412323e-02,5.441714932055438311e-03,-2.009011576118632184e-03,-6.221935944728746240e-03,-5.889994297095841810e-03,5.004858291661909084e-03,2.107521026160636812e-03,3.765696214564804526e-03,3.849134692100770615e-04,-1.409360483885081870e-04,3.907363064344150302e-03,-8.875765294743685058e-03,1.071563269302137390e-02,3.861878557030683202e-04,6.694003383160489033e-04,5.520720113092364477e-04,-4.700184489697428053e-03,-2.020467614789811690e-03,8.048702356485021650e-03,2.326144242600485749e-03,-2.671704426336833464e-03,-5.756829372016477285e-03,4.006550365025758099e-03,4.501532166341882905e-03,-2.004953851186648366e-03,-2.703736152056222634e-03,2.016341292840506238e-03,-9.373286305092209947e-03,-1.845702014547743607e-04,1.572593383941333495e-02,-1.533768354220063461e-03,-1.044334518984722272e-02,1.642803344773608289e-03,-1.546552819983585455e-04,6.167461878456432546e-03,-1.581914808674938990e-04,6.990247471748371050e-03 -5.377975732518139795e-04,9.664659350716430647e-03,-2.129052892826077594e-03,-1.013480351019089253e-02,1.126455277856117272e-02,7.885286755686485641e-03,2.314482382067748617e-03,3.584308903148574182e-03,4.155611665541975724e-03,7.224554620909945856e-04,2.244828738888645942e-03,-1.017793317762108983e-02,-4.022228158727659251e-04,1.089642980589035186e-02,-1.094107233970336790e-03,-6.111724547725818496e-03,1.075756516412611913e-02,-4.721324344589329126e-03,2.339210935950225730e-03,-2.769726731644657426e-04,-3.807644914651312374e-03,8.553663783285279931e-04,-1.895707348340058569e-03,2.369414082520702319e-03,1.808929065201280925e-03,-1.565271167444804139e-03,-4.584512009005563445e-03,1.097558381947499710e-02,2.544938763404932212e-03,-5.593286216262085110e-04,1.119006575948659939e-03,-5.516757504551724790e-03,5.382277680400272997e-03,-3.268087349692537708e-05,1.983428240432843961e-03,-9.166590922948329376e-03,-1.121412952074205948e-03,6.623432540640913392e-03,5.484352872140351104e-04,-2.539035001826476720e-03,5.738540445177555148e-05,-2.273791974733335112e-03,7.090257055550472842e-04,-3.834051689178812321e-03,-8.318956951475166631e-03,-3.391586785952824777e-03,-2.691770632406899959e-05,-4.293086954592574035e-03,1.035274906659937842e-02,3.019867028534435963e-03,-5.048851989450177206e-03,1.959180656987916797e-03,7.282610522106007858e-03,-3.939179324016585382e-03,2.832588343231984534e-03,6.884327018004164980e-03,-1.056665777445821644e-02,-1.308953874883261826e-03,-5.899000602221622126e-03,2.190116960432812905e-03,5.564534799152979355e-03,4.236140840397081136e-03,3.819034041077612462e-03,-2.750061997343369175e-03,6.424932500784614683e-04,-2.914072842256940164e-03,4.259067607022381640e-04,-1.162267452855194130e-02,-3.419581939328308483e-04,2.619352887435699283e-03,1.992762394920694589e-03,-2.868618061696637245e-03,6.147266922022519287e-03,-1.402456289371399080e-03,-5.684247347693867532e-03,-4.686080120258537957e-03,-5.765394332321399640e-03,5.072479259049165329e-03,2.758382484900246887e-03,-7.014998911977079511e-03,-6.881431937227452250e-03,7.562553389047429325e-03,-3.671030393117547235e-03,-1.632382633174816328e-03,3.735192264444230428e-03,-3.008625160042792401e-03,-6.220529528377819611e-03,-3.797731858146343744e-04,4.615560945394680398e-03,4.771016559828154639e-03,-5.120646567404918588e-03,1.459860602405976555e-03,1.905281221605641812e-03,9.068693020919360928e-03,2.770632218444408698e-03,5.946613803364614315e-03,1.082073919451184522e-02,-3.832236892953633928e-03,-5.845590127259970258e-03,5.648299699840948936e-03,6.927874598883975062e-03,6.997580689919920445e-03,1.449880281914739995e-03,-7.134443671822606088e-03,-2.926027079579273608e-03,7.826403262303646691e-03,3.511072731907556328e-03,5.054466906183525433e-03,-9.779345887683145791e-03,-7.354389908068548545e-03,-2.692385338575946190e-03,-4.469615100978511710e-03,9.385872205683271161e-04,-2.277240787429621327e-03,-3.351691876285776972e-03,4.049477217774790500e-04,-5.134400887237120480e-03,6.995108965306354697e-03,8.056508914140635353e-04,-1.197826623108466876e-03,-6.182420676944370454e-03,3.428704026716850820e-03,-9.636863033189430440e-04,-4.625728394420777247e-03,5.719486983906700225e-03,3.286510084583774312e-03,9.501823847387078428e-03,-8.885949081735108890e-03,-2.506112715735600138e-03,1.562082549575823445e-03,1.080225399154609821e-02,4.409631482945383131e-03,7.882049103252283806e-03,-3.214341090548114587e-04,-7.893291991760415308e-03,5.213620148939104729e-04,1.853608317370999785e-03,-8.664216899438305711e-03,-9.345004557333663016e-04,-1.040455906429885689e-02,-1.663719863833685100e-02,-2.807865111407377347e-03,1.212016331300468401e-03,3.327054996477862760e-03,8.983863574003055075e-03,5.233453759253679782e-03,3.309679735664847449e-03,-3.613637410354510193e-03,-4.061620624724931669e-03,-3.439936417961909649e-03,-2.448831913693203400e-03,2.467784059496772302e-03,-2.333041160523569805e-03,7.857237043565591661e-03,2.950877775115853092e-04,2.978154870159948221e-03,-1.215046570108697889e-03,3.959163373976520351e-04,5.718356244505542292e-04,5.961673659141801702e-03,1.247449358131042267e-03,1.990539630327805946e-03,-4.414568773795172693e-03,2.784101342333480337e-03,7.527822691399320594e-03,2.543258788763955256e-03,3.645958169665228839e-03,1.716138367295961089e-03,5.419157199462318157e-03,4.705562024901689590e-04,-3.720018789831052840e-03,-1.004429445328761626e-02,5.645344574841615337e-03,-3.622420217649159006e-03,6.747210726473812543e-03,5.372137350775714384e-04,1.030824859280886699e-02,-1.409986091363599377e-02,-2.204992122148646035e-03,-7.714682330073754403e-03,-3.269470969294156672e-03,4.231618960829424195e-03,8.233133067224392837e-03,-2.069626669555508017e-03,1.145589729403029321e-03,-4.163787841750437105e-03,3.111079341074042375e-03,-8.777681061198435927e-03,-1.237506135711193694e-02,3.456977251258949377e-03,-6.380991089592486128e-03,-3.142507902122233798e-03,1.355323800389094920e-03,-4.039634040811134266e-03,-8.267469388871051626e-04,-5.942873096994596809e-03,3.917679581982866713e-03,-6.085577166534373531e-04,9.493351652834339021e-04,3.887309632668505942e-03,6.330908361719378950e-03,2.480934051858965216e-03,-5.738337960014904633e-04,3.279087930375565625e-03,4.347845674595478792e-03,2.966609359696012384e-03,5.465944007250673510e-03,-8.138835044015494888e-03,1.414998915576492870e-02,4.899482962587216675e-03,3.772314407804120903e-03,-7.209447694725966979e-04,1.637731890729466006e-03,4.081684852224414764e-04,2.458032770807611934e-03,5.054018058876728328e-04,3.722551943155932852e-03,7.018893500347093625e-03,-7.069338537410241839e-03,1.115145145505974655e-02,-5.209815789349789977e-03,2.275072464281888921e-03,8.297677742144326726e-03,3.429016378415969575e-03,7.573335644268698336e-04,1.157419120043756428e-03,3.120009569103949273e-03,-2.578295912710682619e-03,-3.866736816946952969e-04,-2.921087776824565340e-03,9.032927870214849456e-03,-5.698576918384688078e-03,1.887598178981823383e-03,-6.922733473611835222e-04,5.857460735441374564e-03,-5.197422678816338796e-04,-3.021901952955407741e-03,-2.775794131891864882e-03,1.831654041879250986e-03,-7.351697142372568298e-04,-2.744817767176866829e-03,-5.877449524435604186e-03,5.773500527115689625e-03,-1.685355073904828147e-03,6.983032402777567503e-03,-5.378257797801115972e-03,1.452958530906511966e-03,2.688035978245768755e-03,1.267975800227621151e-02,-7.393132508015851784e-03,7.370769251808313889e-03,-6.525973619846929660e-04,-4.449445667425450118e-05,-3.052991957521232133e-03,3.973271895452087053e-03,2.561747396349157557e-03,6.075758754394501408e-03,1.103062173310388576e-02,-3.989064150407917522e-04,-8.519466555318629467e-03,2.795609069388451263e-03,-1.777741440058666590e-03,-1.955600281820075569e-03,7.386744449442432699e-03,-5.004761519393888074e-03,3.583429981023931749e-03,-1.323671665107798269e-03,6.484768007480988181e-03,-4.629583136317317292e-03,1.186867205501036977e-02,4.828148789059925894e-03,-2.319035908755769960e-03,-3.018890568526663883e-03,4.781177820486624992e-03,-3.191078864857602605e-03,-1.225610527801629206e-02,-4.415667593265715056e-03,5.001569009550554772e-03,-6.690167571417456223e-03,2.465658236264737721e-03,-4.609003622938938685e-03,-6.642201489039533183e-03,4.290471998046481092e-03,3.431126018855842289e-03,-1.227371513731126320e-03,1.368804743315477903e-03,-4.523165200717863335e-03,-7.046836627483106172e-03,3.765245554408358595e-03,4.276438473451118398e-04,-8.413734638263024868e-03,1.648307150446449683e-03,-5.757156921483427142e-03,-8.969429497957061986e-03,-9.040135462704677882e-03,3.018862226536191910e-03,1.990710086447407528e-03,-8.635780369991907790e-03,5.556438166627379778e-03,6.292213402404367266e-03,2.646439026392738685e-03,5.976189210050097782e-03,4.140590217562480600e-03,-3.034320683286874203e-03,-4.334255097583652495e-03,1.580378284859456891e-03,-2.321033343571790499e-03,2.094054280827178059e-03,3.495603183820254495e-03,-2.495209558278927396e-03,-4.316119860193540735e-03,-9.256564621289228509e-03,2.597722912786177814e-04,-1.433294426845074148e-04,-8.481002241829058153e-03,9.299896092054321835e-04,-6.594506595191966438e-04,7.408377273305602820e-03,4.354677793066337801e-03,3.346448665713234669e-03,-5.888744428814333444e-03,-8.107403593725826321e-03,-5.137739101778299004e-03,-2.224212033958339763e-03,-5.103866172649320493e-03,1.028260643505561010e-04,1.111027179500509963e-02,-3.631579777379566440e-04,-1.685113985670174622e-03,-7.275392317289446770e-03,5.284237087652567284e-03,-8.937238487255052405e-03,-3.951683742905188210e-04,1.435392156851278617e-04,-8.412258151650361254e-03,-1.616508150177530466e-04,-1.181093089906517591e-03,-1.010614956100184224e-02,3.515572797981592491e-03,1.250051070049229291e-03,-5.031889016601319604e-03,-2.090801348782884289e-03,-6.566485260767043752e-03,2.301003620063533320e-03,2.814152726815304009e-03,-6.191746095962767822e-03,2.509592351537107743e-03,-5.612455675111863683e-03,4.926131261401649718e-04,-7.671028169219234203e-04,2.945381580915096796e-03,8.115846396959990397e-03,1.071597730627102743e-02,3.632507547835830616e-03,-5.415774784763433859e-03,1.387663141658376786e-03,-1.002026204665195340e-02,-8.017446497670070856e-04,-3.232326938693687117e-04,-3.050162321751557180e-03,1.024825023789853375e-03,-7.325066134796532312e-03,7.117557165643472513e-04,6.609072244637860007e-03,3.664648198359858437e-03,7.883803372830100117e-03,-6.562509879614808299e-03,7.276680066000529475e-03,5.842430455920270306e-03,1.137286818064414326e-02,-4.683484109449811104e-03,-1.087986253941053937e-02,6.163408186490436369e-03,4.785859914188015166e-03,1.933383944101298981e-03,6.355165558404842624e-03,-6.677603387321905289e-03,-8.554100243062842668e-03,-3.276870308725544564e-03,-3.028007198982519792e-03,7.479188992345363112e-04,9.224364162013344668e-04,-4.758238549344335350e-03,3.900740871350985052e-03,-6.113919292250904027e-04,-7.172734977393168190e-03,-4.599529539148353228e-04,3.898244856060243454e-03,3.289738916151792834e-03,-3.684679526778715758e-04,-1.303226374974792963e-03,-3.277949696959897834e-03,-5.717503220006263723e-03,-3.469481416999920225e-03,-1.280741210983759107e-02,-1.099509738227929731e-02,6.986739134426386312e-03,5.153990587999293140e-03,-6.975272249106946468e-04,2.810507636695437541e-03 -6.000247735311978192e-03,3.157082274869350662e-03,-8.000768978482808999e-04,-2.073436849222501030e-03,1.311803463388561521e-03,2.269333822202158468e-03,4.899975649755108215e-03,-1.515764613602087283e-03,5.548652002670496040e-03,1.630301986289646758e-03,-7.056057264999407094e-03,-3.978806791295565193e-03,3.873301757809627984e-03,6.223119976925467652e-03,-4.482451047129285861e-03,-6.252538576179913869e-03,2.080121791724092849e-03,-8.550683090864107799e-03,4.986972075094582568e-03,-4.791716883626479830e-03,1.692301733997182700e-03,-1.160507404249201853e-02,-1.884076634129989774e-03,6.939534563056327064e-03,1.504889393821335491e-03,4.620637667848205045e-03,3.122127201241898728e-03,4.589243496265395109e-04,-2.961189692448833004e-03,1.018609340573577550e-03,-1.606872791134257735e-04,-3.810019490063634197e-03,-5.194537026527417788e-04,-4.299594730580476440e-04,-1.428556096180662706e-03,7.201725770428007815e-03,-1.590031366783766767e-03,-7.032203762425907889e-03,2.728406079511626076e-03,1.177990710107655690e-03,-2.170287544622041386e-03,-4.161830606284635387e-03,4.705518937757020154e-03,1.096143587248702214e-02,6.656818809866993890e-04,1.527228951117134102e-03,-4.776774568454965190e-04,1.420900590078975163e-03,-1.006536646308477372e-03,5.371505337460819214e-03,-1.434874081313945206e-03,-4.505658239007307060e-03,3.304081583001310801e-03,-8.539720417070044076e-03,-8.393329566175004169e-04,7.075696251647270972e-03,2.109545368656130359e-03,8.752319582068786991e-03,1.082315903229627910e-02,-7.559639189852516462e-03,3.961693895073744795e-03,-2.809309460651777859e-03,2.590828275450769280e-03,-2.521044039820422630e-03,-3.961908820614343935e-03,3.314234009377261658e-03,-1.495581009094509167e-03,-2.684527102296681463e-03,-4.422315878661240568e-03,-1.467474843386360178e-03,-4.421730052701107602e-04,5.248742499250184414e-03,9.560542396277620092e-03,4.007841924742588761e-03,-3.039817595166578994e-03,8.632920254739835869e-04,-8.162550737034579389e-04,4.872703474044350579e-03,5.952996719098506764e-03,-9.719875149911014157e-04,4.550323933692042300e-03,4.828709696140938770e-03,3.260198710841636597e-03,-8.304432481597167140e-03,-6.723471425255974493e-03,-6.117512160851105302e-04,-3.300507403266026200e-04,8.599296951750873622e-04,8.281586791914921294e-03,-2.563496580463865953e-03,1.446727104174987512e-03,-8.641758287490187290e-03,-5.965068653988181771e-03,-5.968146854563598398e-03,9.092592023203806533e-04,-1.066101988738714715e-02,-1.257770665019070479e-04,-7.027372413154835476e-03,-4.390951479625722527e-03,4.027886220580640178e-03,4.370300698688522542e-03,5.038510729222781502e-03,-1.409648265731441950e-03,1.486455961263627099e-03,6.174817948101036028e-03,5.497489578623729160e-03,-5.657551498233064644e-04,3.311775833163396656e-03,-1.345034201694110196e-04,-1.759857560575495182e-03,8.839237560396372541e-04,-8.087208623037828712e-05,8.051169540415526891e-05,-3.262683360154076864e-03,-4.354599555402044356e-04,-4.800923519615914333e-04,9.285813265036843567e-03,-8.230315944846482504e-03,1.124850026048250084e-03,1.758835237153665888e-03,1.171918033898329331e-03,7.004954838704084349e-03,-5.532874939303587293e-03,-4.414805291073941804e-03,3.608686468858590679e-03,4.444263957299823126e-03,5.634497643107824402e-03,1.111749945482398683e-04,-9.330136357908490330e-03,-1.972630473442068058e-03,-3.456187802826297950e-03,-4.606208477673459254e-04,-1.629827588961975686e-03,3.488784427323155142e-03,1.048178312404894575e-03,4.424584778522771926e-03,-1.810996231703713988e-03,-5.312958706043292845e-03,2.202517785955251592e-03,8.468868037653701610e-04,3.515264321591517888e-03,1.395490758402463758e-03,-1.354474510545463500e-02,-4.103935526568260744e-03,-8.249553809991993364e-03,1.508699439367242219e-02,-3.597718479885679554e-03,-2.075131761709563666e-04,2.967739208272576648e-04,2.347957746347023155e-03,-4.410807908778488322e-03,-5.315917771187231994e-03,6.070574125086012317e-03,7.072697992917011383e-04,1.190343619951378037e-02,-4.258044300970929494e-03,5.125390416902585872e-03,-6.945270069971233584e-03,8.281150427112037504e-04,-7.699332259927928277e-04,7.037382469623619519e-03,-1.159643802709790610e-02,-1.292298600292826616e-03,6.120146296662297078e-03,-3.264148149754698795e-03,2.722735159404277103e-03,-3.088735539516788021e-03,-1.028189111940330629e-02,-2.086005184955334185e-03,2.239629486496818016e-03,8.330511591981012249e-04,-4.671491946154700432e-03,6.395370529714850157e-03,-7.565068780767557691e-03,1.384649426404321090e-04,3.299478135246537713e-03,2.407544318705445892e-03,4.158530824819115478e-03,-5.782833259078102965e-03,6.871313901997782008e-04,-5.256644705398530806e-03,-6.207027000501817715e-03,2.408065594072348638e-03,2.675875610520072299e-03,5.786832932052581166e-04,6.862483190918149296e-03,-2.521804684586428230e-03,-1.006498484359437742e-02,-4.695263315560790039e-03,8.632727225024307109e-04,2.850498217159352487e-03,-1.446778783330275120e-03,2.979832879597317692e-03,5.936492130195973604e-03,5.496793000341899070e-03,1.088691140945764312e-03,9.673859753556574709e-03,-1.444557988584115396e-02,-3.480179373103324189e-03,-3.827592944839159055e-03,3.989491588747221505e-03,-2.770190637054258478e-04,6.592507434748262618e-03,-5.794508628587017804e-03,-9.658518835282377604e-03,-1.569429696715306764e-03,7.809509044724189265e-04,-2.652401079294226544e-03,-7.341669824046983747e-03,4.453111903967165060e-03,2.359225636300089934e-04,5.780154196616426286e-03,5.636312987044775867e-03,2.885267440860739361e-03,8.226253736401499940e-05,2.218804910978765949e-03,1.221498297704839845e-03,1.582046311565452404e-03,-6.215265366987306628e-04,1.445259225440068259e-03,1.254163153202403000e-03,7.990453729437818316e-04,-6.109596104053642562e-03,-2.135136908800821208e-03,-1.601088148427009692e-04,2.564867225402529729e-03,7.866227411569993305e-03,-2.354003583323453688e-03,2.753510479643018691e-03,-8.825875026165058354e-03,-6.353202615140203166e-03,9.589910985756552007e-04,-6.092332588604546829e-03,1.026296549965684084e-02,-5.041265013780226595e-03,-5.975079617122568652e-03,3.454416951688142005e-03,-9.659958017381649678e-03,4.221891947787720888e-03,1.367127849221868923e-03,2.468475480610582071e-03,8.720899500031522103e-03,-2.846596978323218292e-03,6.778992003967881756e-04,-5.911544231929275611e-03,-2.729379477141268429e-03,-4.747243934339865842e-03,-1.020106555417722022e-02,-4.168237744741245220e-03,-8.495584302977306906e-03,-3.660817299593011010e-04,5.050046419436455712e-04,-3.970761601622864326e-04,1.176497907218330974e-02,-4.895695106754776281e-04,5.705932294516716935e-03,3.906746251195685783e-03,-1.821158832356201346e-03,-3.492795020402118724e-03,3.244112801013197128e-03,-1.072882768724580868e-02,-4.107140273766431843e-03,-1.326828333710033080e-03,-2.786852976584842326e-03,-2.207237261307532438e-04,-2.494774093339117201e-04,9.339603279376870737e-03,3.702032790786674056e-03,-4.688537439513554457e-03,1.203537872420641121e-02,7.478910810843176528e-03,-4.707985125522481766e-03,1.158739810754567641e-03,-9.242060017001339648e-04,-3.148828172221244861e-03,-3.949717281579227107e-03,-1.055124818240768883e-02,2.852188607674130058e-03,-4.353388337555432093e-03,-2.071177322696674938e-03,-5.420499863895200665e-03,-6.258155463254376320e-04,3.058096132211226507e-03,4.813009575056810367e-03,3.197067491090577977e-03,-1.838481097366565864e-03,-2.178308884017115046e-03,1.964053126592868025e-03,8.762155343789335311e-04,-7.093327714477150802e-04,3.598915123008375027e-03,5.461269400460968138e-04,-4.695363426955373974e-03,-9.047530658305758021e-04,-2.390735972963953931e-03,-7.233190839211742218e-03,5.400692988157871841e-03,6.337334548241211132e-03,6.625537556451927641e-04,5.895298535557839600e-03,-5.355117578939106240e-03,-4.006474049537646007e-03,7.981614320818210912e-04,-5.292788876948991382e-03,3.569539148087954705e-04,-5.180373686919469492e-03,-4.294837210827850103e-03,-4.337656362094179791e-03,-2.914144510497874031e-03,1.021078627070797331e-03,-9.350691686563945837e-04,-9.229150853898600141e-05,-1.561364439683441763e-03,5.378880720299473658e-03,-4.443971616956004199e-03,5.107202391347522019e-03,3.958067020231519594e-03,-2.843280856846182227e-03,1.278338348386955363e-04,2.630122528037806984e-03,3.659280183613289610e-04,-2.380538636250993916e-03,-4.125668248477001865e-03,2.558729367627442415e-03,1.969930129793011787e-03,2.883308570526593641e-03,2.228074606281524143e-03,7.766824738805125014e-03,3.108890126846121068e-04,1.975937389127232265e-03,-1.193247255757079667e-04,-4.126287057463981674e-03,-5.717548306163450743e-03,1.068523735845091282e-03,2.173945063998341393e-03,1.400229354770051674e-03,9.799657422992212250e-03,-6.097859532721020876e-05,-1.446049998929830813e-03,4.295145202403060020e-04,7.924494964391843102e-03,-7.643108114378837151e-03,2.759964990094916377e-03,1.873460794765682692e-05,2.494812240004280236e-03,2.122087912920529035e-03,-6.261995723961532451e-03,-5.004819877810826392e-03,5.581955479410798809e-03,2.593208816037622082e-03,-4.672518091462062387e-03,-7.128026927951953105e-03,3.991325746726273149e-04,-1.693294792696469148e-03,6.330510190691194736e-03,-5.557787076050622194e-03,-6.144964970901545080e-03,-4.812887561995461520e-03,3.501857294538425914e-03,-2.548249994173221872e-03,2.971183273723905158e-03,-2.106776083929513387e-03,-1.033034718824973712e-02,-3.258056351237074641e-03,-3.655409589188472405e-03,3.449330874312385962e-03,1.575980413509010065e-03,-8.832027858205302069e-04,-4.654757173976909086e-03,-2.551424305096234788e-03,5.878248930373836112e-03,-7.697872345445553807e-03,-4.249113955018904779e-03,6.322749964128439315e-03,6.228191164356280431e-03,7.982310842676830498e-03,-4.049582575280762618e-03,2.557859727717247140e-03,2.607970064451797885e-04,-3.246810098999164916e-03,4.864473902426021080e-04,-4.038916240785979730e-03,3.492662555953180304e-03,-9.851288598606202512e-03,3.542548973541736340e-03,-4.877669724209522488e-03,2.517763474113108267e-03,-9.252034393307682555e-03,7.503123562687686807e-03,-9.700875664298915652e-03,9.666192908568108544e-05,-7.730429724034320162e-04,-3.884450881423267068e-03,-2.847908131392460837e-03,-3.456970187788100356e-03,-2.045766851959425394e-03,-3.925594405427661193e-03,1.952814769017814417e-04,-6.621528533205436190e-03,-4.203807479829963466e-03 2.891096972828173170e-03,6.117015542192807964e-03,6.859763707742222959e-03,5.173038731240741164e-03,2.438368100392727771e-03,-1.797333951780475531e-03,7.764643316430311099e-04,1.023512022212642175e-03,1.190113479353421475e-02,7.386001366476469508e-03,2.265008313986061322e-03,2.434788018142767737e-04,7.352177964381444164e-03,5.541924936528418340e-03,-2.776708773539645226e-03,7.149839679561803378e-03,1.526443709666498521e-04,1.347732325210470802e-02,-6.950302208000917054e-03,6.164017360979104181e-03,1.284438739593746432e-03,5.257362009892375780e-03,-9.847246901755280293e-03,2.844258388681459530e-03,3.011612967396833530e-03,5.005654688303110694e-03,3.864676040923683761e-03,2.741895148110331994e-03,-1.568500512306444190e-04,1.551060850590789799e-03,5.205184378154628991e-03,-1.226082262506033270e-03,-4.956950986703702149e-03,1.311824576817245680e-02,5.560096843900805622e-03,1.231759188689134435e-02,9.002849372649173940e-03,-7.364498071970687577e-04,1.754655098001651429e-03,-1.319916439816005201e-03,-5.953781855323989373e-04,-1.322678146599639204e-03,-1.630254908729026741e-03,4.323565852066642203e-04,5.069294427324385309e-03,-7.139985781170134931e-05,-4.026409977845333242e-03,-5.456262730804060414e-03,-1.072740273210032354e-02,1.971382025286630250e-03,-5.593982016925549451e-03,1.076912709270532939e-02,1.030309223949955454e-03,3.156206887203182390e-03,-1.546743478017448270e-02,-5.172013212236943776e-03,3.284424247261328797e-03,9.606865914749743857e-03,2.932985371521151449e-03,-6.405186033921355521e-03,3.781396779493423604e-03,-2.386408163696160812e-03,3.786391978644950296e-03,2.167457778612955598e-03,-3.879380950032109820e-03,4.260883843337825820e-03,5.893926332441238453e-03,5.019951643097360881e-03,-1.340037271892492764e-03,-1.101032400675478525e-03,1.081320425609371512e-03,1.358398669823990572e-03,4.088385237939120599e-03,-3.182283002601164670e-03,2.899126098241160566e-03,5.763519369770505815e-03,-8.992388219390379592e-03,2.999375248974367903e-03,-6.424515689895700030e-03,-2.093463697424255443e-04,1.395489254192129559e-03,-6.531215206967937517e-03,-2.481937073049313592e-03,-6.986814874338461486e-04,2.941462751052905354e-03,-1.252195295265970745e-03,4.155866802586231229e-03,-3.465260050563519038e-03,-7.007385255694374183e-03,3.376328071676031918e-03,-6.752977418306176441e-03,-3.700042698832549620e-04,-9.106788458508378826e-04,-3.735214748215980214e-03,4.660428429763989466e-03,-4.352311748675739334e-04,1.642362920056366455e-03,1.083798137904948495e-03,-8.008105600983141462e-03,2.467143721559018031e-03,4.874943328042162516e-03,3.474987929911756666e-03,-8.150233352178362428e-04,3.284291371974703590e-03,-2.916263098920244927e-03,6.069520892861437686e-03,2.155450901513800404e-03,-7.560280422911650963e-03,-4.876974413793500399e-03,1.932965517338826322e-03,1.205661982668723188e-02,-1.319762634046308823e-04,-1.399224009236472175e-03,8.773783053388897529e-04,2.912177814642988218e-03,4.619117392426152179e-03,-2.550551755014515503e-04,-4.540515980949118985e-03,-5.086798605511032821e-03,-1.119985221207145040e-03,1.007095441704306329e-03,6.446824394340956894e-03,-2.696386116886845957e-03,-1.228782718585217249e-03,7.140883685393986354e-04,-1.676979684641817080e-03,7.536143770130787622e-03,-2.680566070538625400e-03,5.178688103808195414e-03,-8.955500914117994760e-03,-2.496421639539210622e-03,-6.444242618876248191e-03,2.940142805856037472e-03,-1.338342654343999735e-03,-5.006314083909228080e-03,3.285473778314887492e-03,-5.183215351791367435e-03,4.318293616514615359e-03,-6.342369294523949978e-03,-2.918476540573964207e-03,-3.764489018141265169e-03,-3.567322893908222558e-03,-2.120209885234131727e-03,-9.042590966085495329e-04,9.734193972029782978e-04,-3.324271649491810764e-03,-5.207490481294240088e-03,-8.071840412987770982e-03,5.788559196077106353e-03,-3.477012011651257894e-03,8.163109349195241181e-05,4.376244717571714342e-03,4.652099104999359719e-03,-3.479798676597553250e-03,-3.422990723451306994e-03,4.841689837921736052e-03,3.854588550795270008e-03,3.711333306808116709e-04,3.417413620248806964e-03,-9.357484329005299030e-04,-1.762340478937589935e-04,-1.785605893481908039e-03,8.949653112582473197e-04,2.066486245143564927e-03,9.360202207723879522e-03,-7.634728343292607362e-03,-1.588137032344011683e-03,-1.868385655481398511e-03,1.488710256734576546e-04,1.049023651367103703e-04,1.447957098428367386e-03,-9.413565343293316123e-03,-5.034151591884582742e-03,6.909091714425182225e-03,7.632373076594373661e-03,-3.367722468340568227e-03,4.163004233687065889e-03,1.785175177702753293e-03,-5.791666852306062534e-03,-4.731038843451867287e-03,-7.368467470026785577e-04,-1.150403095825692883e-02,1.145897286827380311e-03,-8.699307786402880839e-04,-2.096335777244805361e-03,-2.756702487018721122e-03,1.574299701198519311e-03,5.955819015858058139e-03,3.312436232462938855e-03,1.389946147681690204e-03,4.587012476383182588e-03,-7.421472782133516945e-03,-1.413421592994113006e-03,2.466276220097228522e-03,6.195267315711587043e-03,1.796628796506806458e-03,-2.649235275359950554e-04,-7.203787793814125234e-03,-1.080630529083992548e-02,7.778533854317926967e-03,4.426891275270354760e-03,3.847942036461584649e-03,8.193477459074306572e-04,1.077845867849060429e-03,-2.853198728668469245e-03,-7.123243944167844191e-03,4.488513281880136818e-03,-6.341144870887973663e-03,-1.208894154867477810e-02,-2.778511590319856367e-03,2.683320673475824911e-03,2.863578649014173003e-03,2.061000790708367603e-03,3.626556229591337225e-03,-7.432191472637980937e-04,1.145824889105158352e-03,-9.777774745437272166e-03,5.188433115688490145e-03,-1.193456707443946163e-02,5.919843896192180516e-03,-3.301538848426430802e-03,-6.160900275606859787e-03,-5.105039270857202570e-03,-8.583441128827347041e-03,-1.373801268084365269e-03,-1.451755310156514723e-03,8.492608142862585058e-03,-8.292665666063160634e-04,2.108283235092569402e-03,-8.421768228402088524e-03,7.205362835530626335e-03,-1.486540499417740406e-03,-2.022437356600216129e-03,-3.859126605207418084e-03,5.203673839381674215e-03,-3.058714314161877640e-03,-2.576849393603060281e-03,4.886797244598081172e-04,-5.892998453019900225e-05,-1.338861755887259253e-03,-3.293312908826345840e-03,2.384567617997018774e-03,-1.240916582138175232e-03,-7.806611590027161436e-03,-2.264934547872802378e-03,-6.466457872335016829e-03,-7.680812784469277267e-03,-5.788152168120253363e-03,-4.817985803679222453e-03,3.360937845056480560e-03,-1.619451823990292281e-03,2.148072951545629185e-03,-5.857461646259724125e-03,-4.395250625750836415e-03,5.063527330649793962e-03,-9.770795407591347913e-04,3.609860659449855725e-03,-3.958977687128236063e-03,-1.577510438097098162e-03,-4.015383018778533389e-03,-2.060601420676003593e-03,-5.786518052840750048e-03,-5.533566971187133497e-03,3.863571931169024372e-03,-3.691168760069016828e-04,3.700306946786853339e-03,3.185280663203871564e-03,-2.048032245309487111e-03,-5.902962196431036463e-04,3.002736559610052081e-03,2.478515680087111338e-03,1.970981505447561965e-03,-1.403779868676288201e-03,-1.017266687768015026e-03,-1.538768631399675202e-03,6.067744986714917153e-03,-6.991764430498302076e-03,-1.015026097068189893e-03,1.377986765218864066e-03,3.763173693247144885e-03,4.659541892686740183e-03,8.599402841534401539e-04,3.981355696568696610e-03,4.514109684735766875e-03,6.333817886622320967e-03,-1.782027738378054709e-03,4.372449046088269092e-03,2.718589561954489441e-03,-5.145574768659462450e-03,-2.205559549398710285e-03,7.220887007973617912e-03,6.875986856197377484e-03,1.605594880402792021e-03,4.985463557213182803e-04,2.111267219474766491e-03,-4.826682053241350238e-03,-4.272577030929048877e-03,6.539450837027512009e-03,-3.478062720335898649e-04,-5.621247131492451699e-03,6.856957819235419416e-03,1.502074489250877169e-03,-6.211652089441743740e-03,9.850428448239833754e-03,-7.901781577879112700e-03,1.176585946175460681e-02,1.321994493877531006e-02,-1.531714061504242490e-02,2.467428911173444230e-03,5.410306253026614560e-03,-1.516778520615744185e-03,-1.325980781984212320e-02,2.940653123697381001e-03,2.790481689312059293e-03,-5.305297058707349363e-03,1.054937174570486774e-02,3.894167899039617953e-03,-1.871002097926200217e-03,2.543041293661247020e-03,1.326704306376801535e-04,-6.565956175175129790e-04,4.740026044867131868e-04,1.381502534473044545e-02,2.306577065430587790e-03,-1.921447012824264175e-03,-2.304522344990926922e-04,-8.444506961188970708e-04,4.306246742817525924e-03,3.730214527594013976e-03,2.320603477841411914e-03,6.568692116843845470e-03,8.323853426128382128e-04,-3.522097349276809373e-03,3.923394874457111076e-03,6.076157374975131240e-03,5.749294513452780572e-04,1.657780834373177511e-03,1.014811790678092976e-02,-3.338636456917684105e-03,-2.486472801832034351e-03,6.649115609674506773e-03,1.817312946285702755e-03,1.028138730541354809e-02,-4.806000775396943210e-03,5.717583758334349349e-03,-7.672364408828212697e-03,6.542949347956517059e-03,2.386646956499033541e-04,1.136553020664945665e-03,2.274162692826646025e-03,-1.237991114051194622e-03,8.725544479392131902e-03,4.137088174174332646e-03,-8.758375317591970619e-03,8.753157270236822411e-04,5.884771870991790423e-03,-1.181581409902990364e-03,-1.564038137411996945e-03,1.139239203332476514e-03,-2.947865301019820215e-03,-1.236899319346419258e-02,-5.440935505957525918e-03,-6.934323766166214723e-03,6.759277121428050074e-03,1.238614234057088502e-02,-8.767026821979042152e-04,-1.630302077577856171e-03,-3.517366532728638182e-03,-3.150194535901788048e-03,6.349898826049822816e-03,3.358426406664872209e-03,-1.024596398079852776e-02,4.291654724566677272e-03,-3.886846710693211537e-03,4.853993529053668085e-03,-2.630172386140810364e-03,7.567642962439617878e-03,-4.124492630558598827e-03,-4.654704780318496410e-03,-5.374909132395680987e-03,-6.263633188653242555e-03,-5.028398618364860947e-03,-1.751889085740056945e-03,-1.505054838828868155e-03,-2.823629607247672094e-03,8.344924219704705475e-04,2.636041075728613476e-03,-4.289758575837829773e-05,-5.450799864086165562e-03,5.705423210245382784e-04,1.652185515174483393e-03,-4.974542991892545729e-03,1.524520378420698985e-03,7.719620671536942327e-03,1.130945292358779240e-02,-3.518557948942681226e-04,-5.735845325086994483e-03,-7.393250035430274682e-03,-6.762047272621430835e-03,-4.609923647188745476e-05 -1.057313642230972572e-02,-2.902075007730554532e-03,5.911689718532036958e-04,4.780947523508811622e-03,-4.711702946140363009e-03,5.415336261656391627e-03,2.407524805707181824e-03,-4.806908915556949735e-04,1.407773630172959138e-03,-8.395805146691586937e-03,-4.285041131362886564e-03,3.577672419913480346e-03,5.238492710777547037e-03,2.455750939105382901e-03,2.284857780094028384e-03,-6.173627131800195865e-04,-3.494641221621697276e-03,-1.511795013305526842e-03,7.558546588336120461e-03,3.317771061359703517e-03,6.874651787773158190e-03,3.529257278461137449e-03,-1.407180715008565764e-04,-3.531669126854408789e-03,-4.533874882342824042e-03,-5.700532501095394687e-03,9.229379818038115774e-03,7.179939240567515327e-03,-2.714804708800729396e-03,-9.231196155180806245e-03,-2.547887537309564444e-03,-1.921407859543741497e-03,1.578576757960376909e-03,1.922130761801789378e-03,-3.104727941174352263e-03,8.641587673806020581e-03,-6.117686346285794961e-03,-8.161610231732830539e-03,1.826991745448063118e-03,-3.095871899964557344e-03,-3.101257499913378981e-03,3.890413399520035228e-04,1.069994116098615640e-03,7.746467031331480749e-03,1.001717556358868756e-02,4.811005662657148046e-03,-4.831861475999117629e-04,-3.806679446510774414e-03,-5.147582062762259304e-03,7.217227980210701773e-04,-8.854709041494981600e-03,-2.853527704316622619e-03,6.626852620944718737e-04,7.549042313258756340e-03,-5.470968533193074426e-03,9.616533797766975769e-03,5.327780591709570940e-03,6.476031254767214418e-03,1.898047458184444307e-03,-3.358788292810820867e-03,-3.980720730106790221e-03,-8.622329438408552274e-03,2.797053743643260985e-03,-3.870008776847630363e-03,-2.908106848857885604e-03,6.005127297203625286e-03,4.153372406023944294e-03,-2.062046101297016778e-04,4.740666688583954763e-03,-4.257408661239394689e-03,5.818760928550049862e-04,1.072964618837841250e-03,-6.865374662508339464e-03,9.093179003628661453e-03,3.689701511540240380e-03,-2.444450461706969453e-03,-7.247024062896442742e-03,3.733319069838031325e-03,7.223936332130464870e-03,1.980431493529804209e-03,-3.579846819438331669e-03,-7.692249460855452720e-03,-3.568702302765488447e-03,1.678682024263997413e-03,4.492729058923094695e-03,-3.037014024908051956e-03,5.231962772039260370e-03,8.506148021547512306e-04,-4.137879449534422396e-03,1.622432133604546087e-03,-1.815813097011038956e-04,2.208494989621808194e-03,-1.240642053910898882e-03,-3.180169868436412244e-03,-6.611531735443479905e-03,4.611522656427095683e-03,-3.265430898186081222e-03,9.502118141511703767e-03,-6.133080042626775329e-03,2.016132422060458104e-03,-5.097197342511199450e-03,1.106078026960686861e-03,1.104236111275561658e-03,3.254345643593784237e-03,1.349593636220595120e-05,1.003350517923601130e-02,-1.489261852646690202e-03,5.294251531857141480e-03,-1.595110109483305078e-03,-4.998023830928337297e-03,1.246992924606024622e-02,-4.398004148782247761e-04,-3.055062714945999257e-05,-5.065968653058816780e-03,6.184958453904561455e-03,-3.931446195429714845e-03,-5.219388104426862275e-03,4.827954985209235994e-03,-2.916516750110367991e-03,-3.838502955985618071e-03,-5.114014250591463438e-03,-9.349043093018439762e-04,-8.053604991604184846e-03,-1.105014510197712936e-03,-3.085845643727467419e-03,8.487952072622371655e-03,-1.797871840990544062e-03,-5.371224450916693705e-03,-1.065016279400006613e-03,2.212704647149538618e-03,-5.968228391255524625e-04,-6.422866563569811643e-03,-8.832693134404391230e-03,-2.103954136306088977e-04,5.811122094190141960e-03,-1.998612118471684591e-03,-1.035630037107396857e-03,-2.931173334913599049e-03,1.363103272626444264e-03,1.341301070575447651e-03,1.337369778215777294e-02,-4.955150299039360728e-04,-1.092667478757945558e-02,-5.476791569923224884e-03,5.137386873347699577e-03,1.446624347751183513e-04,-4.030721542985840448e-03,7.485872656758208767e-03,1.053689136769490496e-03,-2.353276364913080287e-03,-2.667788868486599841e-03,1.297630673786510664e-03,6.802305588612778771e-04,-2.318451912554290832e-03,2.861784319524894853e-03,3.893604660637555673e-04,-3.045197670792201220e-03,-3.614186537523838508e-03,-6.783486787105425138e-03,1.264504657472384150e-02,1.295693485079704157e-03,3.497044245797594885e-03,8.196138451093163205e-03,6.701870439942731947e-03,2.355702710994697028e-03,5.851902075323656138e-03,2.045227732939669182e-03,2.448708214774706661e-03,1.694264828706702015e-03,3.531087406925866295e-03,-1.152211430879740948e-02,-7.426464016219484765e-03,4.649219415878932428e-03,6.154606223327212591e-03,-8.095482073438449097e-03,6.069462426688381076e-05,2.507573842084851855e-03,7.362722816541127611e-03,-4.659311621165734157e-04,-3.248144744872330601e-03,-3.274012572281559895e-04,-3.830830550442307098e-03,1.337264396062738496e-03,2.136121705745292008e-03,-1.071934791406022828e-03,8.038719027544579190e-03,-8.399652276842958787e-03,1.039649225019258130e-03,-1.793147756158536971e-03,-5.497101405749658595e-04,-1.498411516658225374e-03,3.705520089443019739e-03,1.338557531224626989e-03,-1.290445445764459072e-03,2.694710938511856238e-03,-6.449008319969209112e-04,4.765303551872218310e-04,2.330874573599086392e-04,-3.829267827784401702e-03,-2.046233805605602197e-04,4.255398427543720559e-03,-1.114654369939437062e-02,-4.249001789442993261e-03,-4.957746354913194174e-03,-8.593713446322417612e-03,-5.896287717027077309e-04,-4.967329576752602636e-03,-9.770172280830543426e-03,-5.190449738693612980e-03,-1.454848802292640431e-03,3.074956930317695493e-03,-3.922404335115274067e-03,-4.522946790809281685e-03,-1.779655501287874487e-03,-2.950193782301139492e-03,-2.799530955680611089e-04,-7.448826104690516210e-03,1.564369375527048175e-03,-3.052896875304821501e-03,-5.888911629688040571e-03,7.854501724394610113e-03,-3.019698678532991426e-03,8.586829117275865050e-03,1.693094951381951541e-03,4.143661484764434196e-03,8.384962023458612801e-03,-9.686289974013088120e-04,1.580444153922464911e-03,-4.268929459569834745e-03,1.088747647517474514e-02,2.514603467660512116e-03,-1.685131007561876124e-03,8.225132069298330545e-03,-8.406531397228257305e-03,-3.089215936714812243e-03,-1.133610952851008338e-03,-2.372340653759236109e-03,3.264317750457553500e-04,-8.250391237903938017e-04,3.153195469382550319e-03,-5.389295229379508119e-04,1.748403065586788107e-03,2.307997814181212033e-03,-3.327188213645830214e-03,1.952462414283334007e-04,-1.436779589489797478e-03,-8.356479211902165726e-04,7.082006139319437128e-03,-7.091806294992160017e-04,2.374540344691648559e-03,-5.327263490233925987e-03,2.128738642255747094e-03,2.044534698406660824e-03,-9.402429834172409658e-04,3.909889062319307833e-05,-3.045796396513977388e-03,5.728262498865938600e-03,6.541370901244526401e-03,-5.562676674605380144e-03,1.644886735242082412e-03,-2.539765896282845502e-03,2.953819329795236582e-03,-8.450142685988790842e-04,-1.713882881656932479e-03,2.957492733591880860e-04,3.764795924406894815e-03,-8.231038947297372038e-03,-4.279109353045012186e-03,-3.610595571434948702e-03,-9.792556304810179704e-03,-4.287020865706040601e-03,-2.142978697112052355e-03,-8.478235228029998294e-03,1.972719436705394885e-03,4.960924144981399518e-03,-1.165491003965405314e-03,7.662760977072786625e-03,-5.554531809139503368e-03,4.704511862610964300e-04,-3.392873067476754462e-03,1.541873089377209871e-03,7.435923956733061363e-03,-1.111185638603705500e-03,6.815451698189441251e-05,-4.025391468666617359e-03,-4.089103915785620696e-04,3.440761509032536254e-03,-4.138839254949458306e-03,1.612716981504689305e-03,1.556380338159381142e-03,1.088626463461508627e-03,-1.934298642882874416e-03,-4.561870555716727397e-04,6.085357371438198347e-03,1.520308831524625577e-03,5.337612789724051170e-03,1.179824351650344411e-02,9.959877358280291946e-04,7.446876157197101966e-05,4.000790349276575272e-03,6.717257791365096632e-03,3.674550794354304942e-03,-8.630363811147054770e-06,3.752951365124083342e-03,-2.132607431527727953e-03,1.012068060232437609e-02,-1.057621604107327872e-03,6.283972904256439340e-03,-7.532002641881477972e-03,1.648177091932053785e-03,-1.042859622964654315e-02,-2.410230674298784962e-03,-3.277619307431590880e-03,-7.518017398236233372e-04,1.002625728324507345e-02,5.751154704443441588e-03,-5.131535415889989550e-03,-4.475689133653183716e-03,-4.149589791580581000e-03,-6.615638086953323739e-04,5.383587429692046275e-03,-1.752003360134967228e-03,9.089888986916722785e-03,-4.110891829770326432e-03,2.396206361444936598e-03,-5.328259868366260420e-05,-1.808139839435069039e-03,2.859863057359364035e-03,-5.587789379435017384e-03,-7.295173537010277674e-03,-1.309408475634109628e-02,-6.476790775667484517e-04,1.329842584577590331e-02,-9.209811949403874784e-04,6.171941265237762542e-03,-3.769072636589290443e-03,-5.527368769115055015e-03,4.600971197423745678e-04,-6.308301733274105164e-03,2.534089215642090342e-03,3.469778237207702450e-03,-3.086197907706170369e-03,3.755356321051686163e-03,-4.800224623276878530e-03,-8.825556327803702611e-03,-4.764316257267757655e-03,-4.850580006242149328e-03,9.063426869503612635e-04,3.660136747019571667e-03,-4.045169975074713181e-03,-3.095754181894826065e-03,-3.078331039167775916e-03,-2.831508592590855668e-04,-4.467531553690591052e-03,1.545715890819900724e-03,-6.288704440543650166e-03,-4.181386866578773627e-03,-3.936317197628846280e-04,-1.347904397636850559e-03,5.152981653968807980e-03,-7.282361718882838023e-03,6.472071480059786441e-03,-1.262141880738125704e-03,7.240370041682374878e-03,-1.679305465117352306e-02,-3.347012756420767073e-03,-5.060382870102231138e-03,3.497029881609041977e-04,2.281071422394995250e-03,2.835587364321162738e-03,4.450004565470821669e-03,7.221229305545997373e-03,5.232046572825020669e-03,-4.800075730178840136e-03,-1.537529087317055215e-03,-6.959241664548879158e-03,-3.277145462209242639e-03,1.543240643075218541e-04,-4.753387029732496390e-03,1.390917397386213323e-03,-4.183806056980688380e-03,-3.962992764241023147e-03,-1.626554142934882137e-03,-1.500253810378681697e-03,1.253778961177138196e-03,5.014485011743794254e-03,-2.452668217371448394e-03,-9.911216302064711822e-04,2.485875399146691631e-03,-5.890475368839038573e-04,-3.479129507873310757e-03,-7.405613503883327550e-03,-5.984470649980372890e-03,-1.280520353512082073e-03,-3.414368001179310646e-03,-9.563162878053469611e-04,-3.453924360449410914e-03,1.845758304321306296e-03,2.818937714415984362e-03,1.640115975657904443e-03 5.720097688770757818e-03,-3.508586582721697888e-03,4.004383747863940295e-03,5.357517442267228057e-03,6.314522974824108084e-03,1.297426708210023790e-03,-2.154877683175480084e-03,-5.581479611652982904e-03,1.384380043383388404e-03,4.893734438380569103e-03,6.546237725013038210e-03,-1.039960356653388826e-03,3.521518550631374655e-03,-3.227615673302079539e-03,3.426772270972376069e-04,-4.795748474055038131e-03,7.385931457306604570e-04,1.602367920763936825e-03,1.124457471865926932e-04,1.329152882083112989e-04,1.220836630822948665e-03,6.084629952044438329e-03,-5.438277102048282192e-03,-6.252262012532982215e-03,-1.535031845300676746e-03,2.243711260191511524e-04,-4.931122259834351540e-03,2.332238846391537085e-03,-4.278716398050489388e-03,-2.007050061567815756e-03,-5.377966970612420815e-03,-6.812378993475171497e-04,-4.338041447439490786e-03,4.726155139267816142e-03,5.863697130845293981e-03,-1.126758454336105122e-02,-6.310401203028596093e-04,1.037524107074975534e-02,3.607869784160886491e-03,3.096766339359162228e-03,-2.715448183949439962e-03,3.237565950186582168e-03,2.719573652200369535e-03,-4.245836034403671980e-03,-2.848768016943000175e-05,8.374864338527581928e-03,4.705869799249184416e-04,-4.511107405965575703e-03,5.452951473105848053e-03,2.559286440003977290e-03,1.052118925365022562e-03,5.631082358612278689e-03,-1.189668601292887505e-03,3.861487564606282119e-03,-8.493903000330456712e-03,9.605329193797414438e-04,-4.288148679208252448e-03,1.055017866486738804e-03,-7.505483455934297554e-03,-5.820021448467655248e-03,7.762051631638644880e-03,-4.352022672932455080e-03,-7.006285692112909635e-04,8.385822380762420966e-03,8.608139843618589377e-04,1.262893834397534228e-02,2.847019191337252903e-04,4.591374329356189520e-03,5.531346303321116840e-03,-7.246375889487794396e-03,2.528224911755755019e-03,-6.089854436670242080e-03,4.956544444106188683e-03,2.335016411891541795e-03,-6.186339317808470413e-03,5.687929767660916593e-04,-8.720171803221274734e-03,-3.972303535334064349e-03,4.377164870997557293e-03,8.385149050572841649e-04,2.461130572763456630e-03,-4.114320089822052010e-03,-1.809449953620800682e-03,-4.634153501062753043e-04,-9.996672228623888767e-03,2.713944997706912998e-03,-1.203739881170326270e-03,-5.971442424552207175e-03,-1.802764797018184843e-03,1.880756334371022111e-03,2.839906690268947584e-03,-2.163555851273720517e-03,-5.677332667134464203e-03,-1.121014225768963242e-03,-2.062196990094624261e-04,3.627544990741883840e-03,-1.299990480491626001e-02,-1.135873893390308481e-03,-6.887423026255769416e-03,1.258189713934155553e-03,8.382597462642181821e-03,-2.875122108534964790e-03,-4.084104916778634638e-03,6.313731512442252204e-03,3.211702344973145409e-03,1.298935816539467962e-03,-5.233837135431509621e-03,7.016269373392126571e-04,2.982131642476605615e-03,2.166936913468993538e-03,1.027601363911247656e-03,-4.430193168431319911e-03,-4.904275149070875910e-03,-3.444059630494693963e-03,-6.881761760813675209e-03,4.753986198242403372e-03,6.532680635807938761e-04,-6.528941545799833625e-04,-5.635513650589261264e-03,-4.372710738841841145e-03,-2.842379578208746528e-04,1.177629136594483816e-02,-1.653436808113018658e-03,7.093996685654068227e-03,2.951422234991854087e-03,5.046459415466837684e-03,6.048690386578585779e-03,6.798722049953327164e-03,-5.106244986277488733e-03,2.270497475115644970e-03,7.438129038502687721e-04,2.826364693439322937e-03,7.987543220734260457e-03,1.876529324155060694e-03,3.273324310270150916e-03,3.736360696977445578e-03,-2.552586170291964909e-03,-6.599078878725196965e-04,-4.208316842394642897e-03,1.052901027590460829e-02,5.678696903954363190e-03,-6.918983614060719017e-03,3.600212650729995266e-03,6.701250542181850373e-03,-8.722530963376346619e-03,-8.419884619073258678e-04,3.641044610183831378e-03,-4.428476370937420845e-04,-1.064927303006605284e-03,5.441390845380659515e-03,5.106769018425727676e-03,-3.040950548478589966e-03,2.978181580052609561e-03,2.902308060167165232e-03,2.628223544741798388e-03,-3.469092829726396106e-03,6.799004545157526095e-04,1.802040084412181000e-03,-1.320683270726770225e-03,5.052384493572417354e-03,4.785760306753409146e-03,8.695929266832134324e-03,1.864670412963870478e-03,5.677866213098257953e-03,-6.263182848750829118e-03,1.974491506276202912e-03,-2.062584028828359090e-03,1.179445631218168766e-03,-4.197312852002095529e-03,1.277213986162932264e-03,-4.553100082592007047e-03,3.281548717843816524e-03,2.372911864605551768e-03,-4.757860514664170263e-04,4.995251073500072596e-04,3.417549111912216667e-03,5.948014490156380493e-03,6.454648401753371352e-03,1.522984029052498267e-03,9.288944219695772359e-03,3.449769310453759154e-03,-3.024527493002204109e-03,1.149098463584449607e-02,8.666568310302942557e-03,1.191176522120221057e-02,-5.407500735560891690e-03,-1.593058967909641993e-02,1.265920575600838283e-03,2.227051311993102217e-03,8.316615890398294450e-03,3.590267972059987909e-03,-3.888443236423391822e-03,8.275442304409366678e-04,2.268484347706965516e-03,2.043582208954332086e-03,7.460508742012846592e-04,-2.450242383177766431e-05,-5.551576729099663796e-04,1.044949964500331259e-03,-3.715660784514757814e-03,3.298681031354934196e-03,3.492177308295824588e-03,8.808212564185404284e-03,-4.174242283374580628e-03,-4.454536593486877957e-03,-2.570319880526897218e-03,8.890233325879999191e-03,1.295165035626906188e-03,-5.743859400882984391e-03,2.923095777654347287e-03,1.770009558093142135e-03,7.537410152706505674e-04,3.331173294993950481e-03,-1.022975162237020228e-03,7.793544515833150839e-03,-1.228399846278919582e-03,4.661091140424928649e-03,2.239151402387822270e-03,-3.070734878724431204e-03,2.706839945568984170e-03,-2.087662941842948362e-03,7.617167851691920323e-03,-7.056437771078980653e-03,3.942700269051916534e-03,4.151734647617330771e-04,-1.231155301678891513e-02,-3.514916107987737983e-03,2.317546469084088168e-03,2.801971611901486400e-03,2.577198287567324282e-03,-9.026676939696953034e-03,-2.303665511886878955e-04,4.421687717382601773e-03,1.942375385216826627e-03,-7.410610887862499434e-04,-5.446484474333655387e-03,-5.442157761403782374e-05,-3.546078223172772180e-03,4.100229314404701432e-03,-6.436102082552027683e-03,-7.303087119518590978e-03,-2.614301818023865524e-03,-5.600273565840167486e-03,1.299199541827541181e-03,-4.536089691082922969e-04,2.122137617213053738e-03,-4.665410694479741986e-03,-6.052076569386425579e-04,6.636361453151767755e-04,-8.698350292433734005e-03,3.751187289967008748e-03,-2.227356661596868487e-04,-2.301762124800949254e-03,2.327278763362098258e-03,-6.682316745187212061e-03,-4.665400968052234830e-03,-2.702239519575157906e-04,-9.162695892051389351e-04,5.577538555153229750e-03,4.980529809933879545e-03,1.435680063950743298e-03,2.011514251112156947e-03,-7.082438736195705020e-03,-1.008141241539678174e-03,-1.044225173221646936e-02,-8.718639607637565514e-03,-2.906180768937890319e-03,1.977473982266489926e-03,-3.704936984432841054e-03,-2.254126736941668618e-04,-2.323767967813050641e-03,-1.409796133896059440e-03,1.994668107297862657e-03,-2.789111471078447018e-03,5.403581271336523745e-03,-1.102275040376471697e-03,5.066459598855748564e-03,5.773822036934932612e-04,-2.499112932147865788e-03,-3.637599052502804662e-03,-1.177017512077990508e-03,-2.863315843533308265e-03,-4.932433570738733140e-03,-1.018120447548247209e-02,5.502909818299371245e-03,-1.333119082763606312e-03,1.770940000144151328e-03,3.693297427880555973e-03,6.014858860420226484e-04,-1.197899530940653304e-03,-1.047315400696608757e-03,6.432655680216343985e-03,-2.359921555450395381e-03,2.229650761430970914e-03,-4.954906930061427420e-03,-3.790996864682285011e-03,-5.335352356932041885e-03,-6.108566526102336344e-03,-6.098571780392202796e-03,-1.002760885792813774e-03,3.340010124500822006e-03,4.010467814497830491e-03,1.993384768255143918e-03,-5.698320256441327149e-03,3.710100227711372791e-03,-1.475214630178423849e-03,-5.554614620540214952e-03,-3.131912167701928016e-03,8.631806686094849884e-03,6.218848536425867178e-03,6.646943925393088322e-03,2.150562510135596486e-03,-9.675516377587481355e-04,-2.510644808125207187e-04,5.153594743439609410e-03,8.926475237974887747e-03,5.605275252035964422e-04,-9.364215995273932232e-04,-7.125952303197983931e-04,1.050824781607884650e-03,-2.500507434711702302e-03,-7.688295746327339711e-03,-4.756722945778964814e-04,-5.707313384049805729e-04,-4.403409040659558675e-03,-1.032229651894712823e-02,-7.766438977208352072e-04,3.108068885232353530e-04,3.908529159413704564e-04,-3.476654667968317406e-03,9.592468015112917690e-03,-5.274502130638756246e-03,-5.876164977602417114e-03,9.476400480960909395e-03,4.250601441194560605e-03,6.699850508926258373e-04,1.019306627856749782e-02,-3.864742100981058588e-03,-4.804050180476965651e-03,7.117647550588697414e-03,-7.563277250239096132e-04,3.380934203650078941e-03,1.257668077691966556e-03,-3.863412965535453335e-03,4.223118155692128124e-03,6.415226991799464172e-03,-1.829500268425957423e-03,6.341975117327341982e-03,-4.886426626977562237e-04,6.682514989395283225e-03,-7.680757485983048805e-03,-3.793159653355942639e-03,4.539071488235298969e-03,1.612583215637925817e-03,5.764158499064558037e-03,-4.529324149319244840e-03,-7.051254825897803995e-03,6.575543225304590622e-03,-9.183610033992958171e-05,5.412044386557997820e-03,-2.481957237976961834e-03,-2.080948005401019475e-03,2.427248091149840619e-03,-9.803875581412036661e-03,-1.074953387215277006e-03,-1.709390660054077343e-04,2.573908956638149958e-03,4.107443751768647777e-04,1.542940773142306016e-02,1.278340759217229926e-04,-1.371398954097545128e-03,-8.276322356075876269e-03,-3.061055851234749923e-04,4.129415063235810680e-03,2.572268081469274419e-03,-6.925258648618441834e-04,-3.326995233022757032e-03,-6.770916424999143604e-03,6.621016178040660741e-03,6.289620453642508213e-03,6.624284478789300122e-03,-3.321342675106257053e-03,1.138831666747012697e-04,1.302779166079306560e-03,3.598150510676470775e-03,-1.099187360252727383e-02,3.487551735816032429e-03,3.944591829443759232e-03,1.752796003569022940e-03,-1.643838857826426542e-03,-6.786362030702698406e-04,3.429085477914426035e-03,8.385571957701986375e-04,8.410506023256639554e-03,6.904423877940529114e-03,2.102875719661016687e-03,2.734273667880894425e-03,-2.223428122255354227e-03,-2.607322119493310671e-03,1.557787787354859843e-03 2.707856641419746982e-03,3.133512823275860868e-03,1.899047046275285098e-03,-7.773186794028829304e-03,1.322645840878094157e-04,-1.040199349571908211e-02,2.733529769174125167e-03,1.426924233587092148e-03,1.801983343399224911e-03,2.020819272021906390e-03,-2.157780805032815784e-03,-4.844962210561411589e-03,1.780941397523687440e-03,-3.418246534541753527e-03,-2.372437360656224801e-04,-1.998385814727800074e-03,7.418279292888225671e-04,-1.457754723450152556e-03,6.736714441807867618e-03,3.814646943524044906e-04,-3.100963161359318685e-03,-8.702785829403328777e-04,8.246754541002048952e-03,-2.795918761627426592e-03,6.446194087486370287e-03,1.284706376418783555e-03,3.400528957355041480e-03,2.384313359501916835e-03,-4.128660785256284618e-03,-1.316375989962123615e-03,3.456143351132316428e-03,-2.120691870943193502e-03,-3.562078912670606397e-03,1.006458552356750405e-03,3.826171208394816767e-03,1.357655540394306533e-02,-2.860999499424427949e-03,-4.185918679039234841e-03,-2.246171615389768155e-03,-1.381635130230094745e-02,-3.957324863856145382e-03,5.034623945355885810e-03,8.926638174749691040e-03,4.535285471762710505e-03,-3.117878523390803062e-03,1.749216333233185672e-03,-2.874888258588596205e-03,-1.498912573094112995e-03,5.893876256567942389e-03,-9.423377949440633894e-04,2.979614801598147455e-03,4.102900611946021237e-03,8.247227000655995427e-03,-4.810357117326316545e-03,6.623981883815857445e-03,7.459299544904575419e-03,1.615553485888601964e-03,-9.446277257166180597e-04,6.061184135729476112e-03,-9.851368007794073650e-04,-2.844412487506670605e-03,1.095122775147949409e-02,2.925408850764782409e-03,-5.582735695749757414e-03,-1.096151965254935648e-04,-4.449830214010079728e-03,4.984045974170622957e-03,-3.307692378145649006e-03,3.186328941860142819e-03,-5.212322111000950754e-03,4.085437439914327978e-03,-5.448991824545341761e-04,1.277848786409317922e-03,-2.249218841452345612e-03,-1.209983616418889261e-02,-1.234072959327181529e-02,-6.247210014834379821e-03,6.271885657537527511e-04,-1.943402756924541381e-03,-1.063884651119714319e-03,2.466760430527111924e-03,4.850671353464301207e-03,1.600230509576839070e-03,-1.241171483391239130e-02,3.042469803944020633e-03,7.907306433837931955e-03,6.585164814395177285e-03,6.692862083155365349e-04,-6.275616059154161958e-03,6.897917007831275496e-03,1.803266309826439878e-03,2.426251836059714877e-03,9.384863293695807490e-03,9.051505627404701814e-04,3.146299100153376882e-03,-2.716854908147270809e-03,7.581644726459560273e-03,2.445738258908241019e-03,3.199307439666077609e-03,-6.072122345804044363e-03,3.582762662598621990e-03,-5.319649823360299290e-03,4.948366188435015904e-03,9.250548062087342359e-04,-4.482398917967933831e-04,2.324527512269669669e-03,6.035047880550691588e-04,2.655444485157519921e-03,-5.023219504507031412e-03,3.440776017055066117e-03,2.005417277831809432e-03,-2.460829135373752887e-03,-9.251651623627159737e-04,9.821620907508952624e-04,1.220194657860037418e-02,7.917018379886002488e-03,-3.495184244765513551e-03,-2.829999925984301966e-03,4.372107347001461496e-03,1.089336367928146927e-02,-6.230655803162411452e-03,-4.115008090175180551e-03,-3.270714561021093817e-03,2.711288751818995580e-03,3.417931681112026075e-03,-2.263071748876591995e-03,-1.016857209952733935e-03,2.509571191211260738e-03,-2.225440228941992071e-03,5.170328390363504618e-03,-2.114398054811757102e-03,-2.445456419142315839e-03,-2.219470283050277524e-03,3.309088073055767992e-03,7.514024885318552199e-03,-7.050653912362278217e-04,-5.386074107409969183e-03,5.519875189836606112e-03,7.688095366392323424e-03,1.270002522782300719e-02,-3.821075572755683325e-03,3.691976411492885364e-03,-2.203098771711368108e-03,7.033567389601242011e-03,-1.248074442940013936e-03,8.420316975943758706e-03,-6.473104018838543930e-03,3.384297672069556343e-03,6.808626095084339622e-03,1.043013490431055762e-02,1.154704201747301043e-03,2.417009515588893539e-03,-4.808692857210361115e-04,1.291269621222850559e-03,8.224956541572637711e-03,4.280556125914896162e-03,-3.297461921768718874e-03,-6.684736936608002243e-03,-1.089723622399316454e-03,7.193336014062268538e-03,-5.425203852990758725e-03,7.634664086682507950e-04,-2.810912648366765704e-03,-2.130331612724131914e-03,4.137877813406400269e-03,-4.608698348737885406e-03,6.287113632556061774e-03,-3.079074298003949429e-03,1.846563283109989946e-03,1.050189117144420016e-02,1.145242154338920598e-02,4.638917751997805386e-03,-1.176483401640376859e-03,-3.557549865625657002e-04,-5.456405309738576043e-03,-8.507294495732534547e-04,-3.200816372935483267e-03,-2.261972998019391636e-03,-7.924036313188963232e-03,1.078036921281346622e-02,-2.847563912885273888e-03,-8.063105977354214152e-03,-7.448313829791487935e-03,3.692488177407152617e-03,4.226915610691755988e-04,1.101441047109150227e-03,-8.444481328759346345e-03,5.302429291094973643e-03,-3.896255481879858876e-03,-2.852035480438240133e-03,-9.022606442600094884e-04,2.051715613984084457e-03,-9.457847190596604166e-04,-4.228736127212654773e-03,-5.986528184406377830e-03,-6.275058059880413153e-03,1.748503157636445773e-03,3.491228202610357611e-03,-3.411409964084084930e-03,4.742612267031779717e-03,4.438470175763304867e-03,5.048079579683569830e-03,5.104754823694938187e-03,3.259500821590238795e-03,2.586053798755487583e-03,-1.758752470568186775e-03,-2.206459815709556783e-03,2.757806183859927950e-04,-3.718102217774784789e-03,-6.205430648277579601e-06,3.457616023687589812e-03,-1.580296868235344645e-03,-8.720878542843096479e-03,7.567555639836337673e-03,1.903296544279732246e-03,-3.406791877760637337e-03,1.481013601261142428e-03,5.348186149727586665e-03,-3.163835310643077492e-03,7.344579706635667032e-03,6.746378789990549246e-04,6.058742114604216515e-03,3.786997790805550230e-03,-4.379734240203532122e-03,-2.829970142769382049e-03,3.996171687249109274e-06,4.747139039330935256e-03,2.150293768653722872e-03,-1.537919181027613044e-03,-4.256420327692557459e-03,-5.856286209614394028e-03,1.825842774212448321e-03,-4.056264075119382791e-04,-4.731008391604615143e-03,3.372943787103508013e-03,-8.205217840685287986e-04,7.763106122469076199e-03,-6.822768801864702564e-04,5.746934538363405488e-03,-3.981067029201236507e-04,7.936408974540180806e-04,-4.267831502273645126e-03,-4.147786160297685376e-03,-7.202901004316023502e-03,-2.262842479331930636e-03,-2.704769538295310766e-03,-1.136084657902258592e-02,-6.792914168142636690e-03,-4.381130944524949146e-03,7.484852232116212924e-04,4.885563941210798421e-03,-3.146795825103433902e-03,-7.369787486410963630e-03,2.300675702337445436e-03,-1.690840066569698199e-03,-9.437873246849966952e-03,3.465114249182757369e-03,-7.330572701030697511e-03,-5.980947917387607199e-03,7.777129244028431823e-04,-1.444590546148244838e-03,-8.393754016845762431e-03,-1.651265542702918046e-03,-3.016146012799976722e-03,3.427758749127978333e-03,-6.038134969467758581e-04,5.526842966578467131e-03,4.082590085235866632e-03,-1.148289711322418106e-03,-1.097370517953387935e-03,-3.165081211223933020e-03,3.055852914553462436e-03,1.576618421385018792e-03,-1.460193766269309327e-03,4.466586816700376418e-04,6.480317994281850624e-03,3.596950407848603214e-03,4.351066156574601163e-03,4.931310781406328045e-03,-4.206818954297796356e-03,8.361417866411091637e-03,-1.001316103878086830e-03,5.282547065637593735e-03,-2.086462655583662309e-03,2.651927231291046315e-03,-9.712499148183971476e-03,3.480805622991441106e-04,-2.406429907817634727e-03,-1.303157402590482536e-03,4.419859553452533743e-03,1.278210596649456514e-03,2.614214422100134046e-03,-1.620591511869785872e-04,-4.658408016423258753e-03,-1.409309260847364245e-04,2.805382187376582060e-03,-2.665553502163216151e-03,-7.229518833342643471e-04,5.741450858461753445e-03,-7.191556581682483421e-03,4.876974273012472071e-03,-1.485859641572874219e-03,-3.308051388054479396e-03,1.375603287647003637e-03,-1.212981267668210295e-02,-5.076037057236222096e-03,-1.251773952923724946e-02,-1.107475200326772874e-03,5.410231539325877949e-03,-3.055655073592430330e-03,-8.156656600089323694e-03,-5.012476702352251963e-03,7.129608521693572842e-03,6.532923312867834446e-03,-3.883957803885644197e-03,-8.084721741548964169e-03,-2.292000789485828475e-03,1.529564038186618765e-03,-1.779915751873978845e-03,3.290204637229123008e-04,-4.259793346706211049e-03,4.472683776498392362e-03,7.973705331240662705e-03,-5.392531206019259699e-03,-2.857363080388869089e-03,4.237193649690824786e-03,6.599398522402676048e-03,-3.414000929309515993e-03,3.100079180380212445e-03,2.117172106837901871e-03,1.000732170809359549e-03,8.348035705926057804e-04,-8.224828785590087774e-03,-2.003091872056724822e-03,1.123737616922042097e-02,-3.445116755530160699e-04,6.988462549250010897e-03,-4.654482191699701836e-03,4.107043897702584469e-03,-2.259188606062541218e-04,2.415282508248985365e-03,4.532920109827154385e-04,-3.923698769368023052e-03,2.053140303636457296e-03,-3.122148152726775174e-03,8.719117192863423335e-03,1.142819046573621908e-03,4.293601552495670062e-03,2.560801901723254361e-03,-3.331991558836730548e-04,4.765564399944447263e-03,1.519934334685081427e-03,-8.501339598966009938e-04,7.846148008744127655e-03,-1.941013325702339344e-03,-2.447936663277377415e-03,5.544332184363433280e-03,1.160152165822899697e-03,3.591188551810100150e-03,4.735088108761326478e-03,-3.257642751219870752e-04,3.676031760244619605e-03,1.263924638680231135e-03,2.342075865774841028e-03,6.875206641142586964e-03,-2.621048789858215679e-03,3.024666626610518320e-03,-2.175038908038454989e-03,-8.220914271939081669e-03,-2.238522437256666373e-03,-8.567813039139443934e-04,2.510255726154884785e-03,1.199351912562291551e-03,-2.020872571927521347e-03,-7.622661822145345269e-04,6.653451593156348154e-03,-1.191362007135280190e-02,1.038545478965070606e-03,2.502619086692320621e-03,1.409670677008510254e-03,2.980101184934785667e-04,-2.347327984093706878e-03,4.430405398816717605e-03,-1.330148754070714688e-03,3.300196480212480056e-03,-2.949866949778537600e-04,-3.179583903404087364e-03,5.415755452506774634e-03,-5.393229346323175251e-03,3.572739166674905771e-03,3.677112293369734605e-04,-7.251079788164747940e-05,1.593001475412003740e-03,2.741452285545013052e-05,-8.908790574643849630e-03,5.215519860727176223e-03,3.714831579817761114e-03,1.217637642445386532e-03,5.752514532746931010e-03,-3.610247453532984194e-03 8.713106676393029236e-04,-3.955221883738534693e-03,-7.478987025702621763e-03,3.379800008804036571e-03,-1.037137637224094452e-02,-9.631165577837774716e-04,-1.319254554844617039e-02,2.398733445311433676e-03,8.497673514435045636e-03,-8.106115400767039889e-04,-9.736172054160463843e-03,3.559866239041386150e-03,1.941631529323648150e-03,-4.787015536222630034e-03,-1.537015576982334823e-03,-1.290468733932491167e-02,-9.752023818018163911e-03,-2.734892604873998597e-03,-3.581797855496241835e-04,-4.810866960827088479e-03,2.035918568805301747e-03,4.467403470354429015e-03,1.509859632155478686e-03,1.594897456605839578e-03,-4.847902511296599097e-03,-4.598412996531123752e-03,3.796379670686869548e-04,8.582855171584309606e-04,1.347022632207486251e-03,-3.593188987533687401e-03,5.191521356780707131e-04,-1.571531612364771230e-03,4.284436788390547698e-04,1.975227874964831029e-03,-3.210529476027502733e-03,-1.599386118305360467e-03,6.684902239133637750e-03,4.953830693181727508e-03,-6.977507555497156067e-03,-2.089029954761434903e-03,-4.356739007372298182e-03,-1.506252064364623248e-03,-1.995818538803928888e-03,6.011461889095254946e-04,8.057582967833798113e-03,2.107940823802193983e-03,-8.890505147088641549e-03,8.725084069613099277e-03,6.413959734615258708e-03,6.554924429230262611e-03,-1.438243493070496181e-03,-4.584138213806029276e-04,-2.162901361539922625e-03,3.784373530820458499e-03,1.670344163426758276e-03,1.776266159687355873e-03,-8.647029207108929486e-03,-2.907045621866322788e-03,-6.678774453199950827e-03,2.675831796472161270e-03,2.924781020789446115e-03,1.224279356784195496e-04,-6.671709220849609276e-04,-7.544621667393881532e-04,-6.420757212468750086e-03,-5.033165164935939073e-03,5.159260654882416239e-03,1.054020133721910140e-04,8.569353454542296420e-04,-6.888584891350794936e-03,-6.464331808580714780e-03,-5.887903001411282653e-04,-2.730052896217000091e-03,2.413232536467663659e-03,1.230289225830814414e-03,4.418052349183292310e-04,7.984647885454157360e-04,2.499618407202380214e-03,-3.380629275028387230e-03,3.196356285589225469e-03,-2.941696358940518088e-03,-9.257476257264307673e-03,5.639091521369528869e-03,-5.859558749008488866e-03,2.133752459871178942e-03,1.809507347667904794e-03,3.941428656236830794e-03,8.443880916490731359e-03,4.814274767934320631e-03,4.535526407003906066e-03,-8.530010638826520308e-04,-1.346624086454960637e-03,4.066907939440771516e-03,-9.289909820173592944e-03,-1.012204286470519236e-02,7.282664543244392504e-03,3.092995561529804285e-03,-7.096807716570803677e-03,-3.958981773698086806e-03,1.874335688618438940e-03,2.835836273121017988e-03,-6.587135141483921839e-03,-7.065771274917716400e-03,-5.540897896097653902e-04,3.683296202034019255e-03,-3.917417611748170644e-03,-1.664530904586638237e-03,-3.442702320767055887e-03,4.232004132866834870e-03,-3.113512171882089861e-03,-1.864812050919077745e-03,-3.057867865817950458e-03,2.281169208387341229e-03,-1.695424647233265743e-03,-1.832901972161225497e-03,-1.397844723722176455e-03,1.452367760519618616e-03,1.129167824404831398e-02,-5.828568370912401604e-03,2.381126953278352859e-03,-3.896611417505483222e-03,-1.093133365422845928e-02,3.363782224195005065e-03,3.078635263216188567e-03,4.254145584228167555e-03,-6.471187278076600798e-04,4.437621508351573610e-03,2.574533995246423412e-03,6.111331362544337661e-03,8.814421214511898930e-03,4.904121222137381407e-03,2.677652084067872439e-03,-5.347539971445858763e-03,-4.916437011596066185e-03,-7.873383915668902971e-03,6.565981633786270241e-03,3.952704341794606967e-03,5.001543034592096336e-03,9.734463302803938664e-03,-5.049818726874310063e-03,1.486865778685875520e-03,-6.070553469989052250e-03,-2.905362491615937938e-04,7.680482471469383260e-03,3.333787752534509048e-04,-2.877685638424926258e-03,-8.225904795238382985e-03,7.961148274739102484e-03,-5.436709372571053508e-03,6.881780250025155629e-03,1.246655739100915178e-02,-7.003731052425930803e-04,-1.924851533966655823e-03,5.710345267904574961e-04,-2.743406688883796244e-03,5.633760129650525151e-04,2.847052331330455349e-03,-7.329860187725455514e-03,-4.880454906682148738e-04,2.774611149442987243e-03,-5.715669397732160836e-04,3.411622404864111859e-03,5.020508483831906746e-03,-8.465837291890199873e-03,-5.355545445462825231e-03,-5.878265562626885758e-03,2.271687017248154392e-03,3.273750424349188893e-04,-8.365497319450179584e-04,-3.859697363222827182e-04,3.787232500143552626e-03,-6.754610401473408639e-03,4.982837584502988922e-03,2.252947858940483779e-03,-4.618562248836230802e-04,-4.915753831077815210e-03,-2.051435532148925944e-03,4.082196759423824782e-03,8.619097778876263008e-03,1.648048356766371701e-03,-4.713716168169845844e-03,-1.583785967225226007e-03,-2.745375549042925693e-03,5.249461615363977193e-03,-1.614883945302192326e-03,-1.352460706004269056e-02,-7.575571213829551849e-03,-1.028029465766963158e-03,-2.133381606091706503e-03,-1.113594051616165030e-02,1.734965372459586598e-03,7.078729698327653266e-04,2.979034984360523410e-03,4.559720911465258049e-03,2.474313799020579855e-03,-2.794690327457579563e-03,-2.102971071762629179e-03,-1.530975942077966376e-03,8.997938288775740763e-04,-9.185457461213049693e-03,-3.721770041294452346e-03,-4.858377770783783465e-04,-8.125028017660945040e-03,1.950958672591329235e-03,-9.629607232974149184e-04,-6.875544638593965828e-03,-8.498609596806863979e-04,-2.895837637219816040e-03,-2.399093389990953739e-03,8.314145872101585608e-03,-2.315097014877848421e-03,3.675598206169215245e-04,-2.862178799079082795e-03,1.002591942737903613e-02,-4.273477192692738853e-03,6.055645406574443508e-03,-4.129416903177880152e-04,-1.252442689730190808e-02,-4.333157690656972393e-03,-3.799288359760280218e-03,-2.268941539928067980e-03,6.761490109534323660e-03,2.753069493007326285e-03,4.505238156597046045e-03,7.180201662557624855e-03,6.144991381579670677e-04,-5.099365994456833522e-03,1.624135794453532224e-04,7.179078192471483391e-03,-6.172757165271581647e-04,1.887477113441648037e-03,6.720600792030111820e-03,-9.592069656615997628e-03,6.271759288291869558e-04,7.674294251337874032e-03,2.936190615437656547e-03,4.725168344313408590e-03,8.017727686945658877e-03,2.713299239598956848e-04,5.776115430862450821e-03,-7.360110226856242562e-03,4.469608328713784405e-03,-7.571644650240780261e-03,9.555720996034515469e-03,-2.118467754004042542e-03,4.237198152548330918e-03,-2.266717422646527709e-03,1.301177829485726783e-03,-3.320560443240329492e-04,-2.323888535030212057e-03,-6.076281613604743838e-04,-5.453878847671781403e-03,8.627896083767993760e-03,-1.306461865953131504e-02,4.484372977033385869e-04,6.863964750436228092e-03,1.112562280930578205e-02,-5.730843025594702794e-03,-4.017261549736422452e-04,-1.806382981812708430e-03,7.659205325438918095e-04,9.844637926542463679e-04,3.912608058227184070e-03,2.002620881832209345e-03,-2.623761562457254011e-03,3.512161211977038108e-03,-6.515131370218417081e-03,-7.384746014258054998e-03,-8.954726413494665032e-04,-6.864122129142170327e-03,3.103427092561158247e-04,-3.862400075503246225e-03,3.734166420134386183e-03,2.554418419081537408e-03,-2.675848002363959924e-03,4.286662806185714651e-03,1.462510146734764295e-02,-1.112216012211426040e-02,1.538161753094597988e-04,3.788379484903208871e-03,3.587564623145819351e-03,-9.437387199402465883e-04,-1.259108651182200495e-03,-4.395796242662505966e-03,2.201677282395110742e-03,-5.645619604458259645e-03,3.237325650845406747e-03,-1.260976845906275389e-03,3.866125753808191418e-03,1.003401776067254850e-03,-4.686764936966830025e-03,6.079011634592005269e-05,-2.231031048088286898e-03,7.809701079678495195e-03,1.577588112536417462e-03,5.856067584292537487e-03,-3.234815673086505628e-03,3.381859962753824226e-03,-3.677712248900114576e-03,9.240307001493378676e-04,3.783146342867911831e-03,6.105656790175204018e-03,3.382319745225545720e-03,2.654033111436777861e-03,-2.392753684717583915e-03,4.175270938600894946e-03,-4.899666431148991946e-03,4.814099965859220363e-03,4.752505680869139465e-03,-7.729052515419178836e-04,-1.204343033464245994e-03,-4.153525049066338591e-03,2.153299589994112605e-03,-3.573256400594468970e-03,7.658741222839870197e-04,-3.784899869752838750e-03,-7.448120135565014857e-03,-1.236620216258443872e-02,-1.724555501933626184e-04,5.117981509893163895e-03,6.282992388988720622e-04,2.833207570338411394e-03,-3.148315665642913673e-03,-7.428765127913926347e-04,6.496182259318546322e-03,-9.475003443209956437e-04,-3.356073548510935239e-03,5.068661821700680935e-03,6.264151734308344086e-03,4.692190786680218093e-03,2.958876041805412038e-04,5.199248734397777898e-03,7.986287880464014097e-03,-1.025672846096340775e-02,-2.854036915309280525e-03,-4.273858012437338656e-03,-1.350563214200256644e-03,3.579053966447220004e-03,1.122375763978515840e-04,-9.179680499808722145e-04,-5.788747102381886292e-03,3.556967813267653596e-03,-6.278908262434989250e-03,4.526708702048055271e-04,-4.734106650825661670e-03,-4.601532956547833321e-03,-3.504959325780557678e-03,2.894666793806829463e-03,-5.796355347443977665e-03,-4.056245276054656554e-03,2.488963411244762922e-03,-2.756192110513599330e-03,1.067465194403677785e-02,3.176810795064382784e-03,-1.282041752080866374e-03,1.111704916459996424e-02,4.054656038476666530e-03,3.820560337532830000e-03,1.959667959050392493e-03,6.146968127755968181e-03,-1.207927578602571370e-03,5.108735360842084176e-03,4.325185302138242864e-03,-7.964067722489033899e-03,-8.352746929222491790e-03,-1.039125191027694649e-02,-1.146976578097481632e-03,-1.823784377983316040e-03,1.996465013129768325e-03,-6.571513285944857383e-04,-2.095327018180657790e-04,-9.340532910828548783e-04,-9.973656741128883105e-03,-2.775477360620551368e-03,1.350778095565989762e-03,9.834271632952204870e-03,4.196356623058490262e-03,4.634405461552893639e-04,2.280036136623065333e-03,-3.797519197694193549e-03,3.133793784863237938e-03,8.129590066719273242e-03,3.081747130815392071e-03,3.391697870633536994e-03,-3.526028828672230723e-03,-2.555768457895887802e-03,3.609662997889700586e-03,-5.289496040575778282e-03,-1.785306451283381879e-04,2.922204356005045508e-04,-3.158810730243827389e-03,-3.944223682700145721e-03,-1.066828427833998831e-02,-2.534075715715786766e-03,4.875617430623487014e-03,6.359535393611278481e-03,-2.255577345264262422e-03,-4.427434032141733776e-03,-3.145235103822199233e-03,-8.712465263195480134e-03 -7.960502451205218530e-03,-2.904390174877401372e-03,5.495450475461673319e-03,5.736838000208894714e-03,2.280505398974912949e-03,-4.736860637089566938e-04,-1.305572117489840929e-02,3.229097635909013598e-03,6.896164114143057185e-04,-7.694730805308716949e-04,-5.682836238748391854e-03,8.533247130783894879e-03,-2.040197993980928919e-03,-3.943198147981009580e-03,-7.532207807766279177e-03,-4.902265257554054741e-03,-8.034968031995656096e-03,-2.132433376008231046e-03,3.139101544361664296e-03,-9.429770235050570666e-04,2.653437621195498062e-03,-2.555214695507735268e-03,1.487308251608912644e-03,-6.235800490482274264e-03,6.378605737524482323e-03,-3.865776604201511760e-03,6.135992168353876682e-03,3.474231320238444637e-03,-5.028545839289982715e-03,-1.113029967835538897e-03,-3.921128397294213440e-03,-6.184215517886075021e-04,-1.794436528999807448e-03,3.128159149653537974e-03,1.965324718779230210e-03,-3.028823506011728544e-04,-4.034637645361060371e-03,5.355580335745945345e-03,-6.096154612495669530e-04,-6.407037002199005710e-03,-3.005012497586373628e-03,3.619782366248597196e-03,-3.863098010926038100e-04,-4.301741290137797019e-03,1.086601364551153213e-02,3.152123722843204294e-03,-4.546657919692953694e-03,-8.071701748297299694e-03,-3.037692942873037715e-03,-8.738055661345008052e-03,-1.153187553782374303e-03,-4.875222033285452755e-03,6.366923617027416167e-03,3.161937122373894410e-04,9.282173242334503438e-05,2.708543561505270120e-03,9.428651546718699525e-03,-2.262297302667278746e-03,7.389626728452232396e-03,2.158396543138584815e-03,3.370517354930429622e-03,4.968314639055365634e-03,2.243400703462650539e-03,-3.902231977980706330e-03,6.877690606596487122e-03,-3.146284679948901496e-03,2.166684482798481633e-03,6.651617622849039131e-03,9.809837704262143465e-03,7.819578524412612933e-03,-9.378483928245113328e-04,-8.969025280985113042e-04,-2.040459548910272813e-03,-4.111184597123133423e-03,-3.178255002688516132e-03,2.684736644177623203e-03,-2.800528365326810949e-03,-6.921392092309420693e-04,5.234291767141797426e-03,5.018324141359244844e-03,-4.592400311020820164e-03,-5.309001336606701538e-04,7.021229143773427333e-04,-7.338274870657227620e-04,1.700946228562430756e-03,7.786977484543953186e-03,-2.978017270097579189e-03,1.380561283380425880e-03,4.454620667999623779e-03,-6.074510558727295746e-04,9.586011008090056587e-03,-9.787395986958722163e-03,-6.887308451523494780e-03,1.982932461946850476e-04,3.038852210985353193e-03,1.474457129675081418e-03,-9.670266819962433950e-04,1.377770440068511529e-03,-9.549677761555616651e-03,-8.216734222500327684e-04,1.169847611583711353e-02,-4.356653624694987841e-03,1.951567359898885839e-03,4.196845644994295216e-03,-1.126121272656358809e-04,-2.551737657611899499e-03,1.425475577328927336e-03,-5.837121030318867879e-03,4.600491646392567031e-03,6.681522471566880665e-03,8.499087105989612599e-05,2.275155040693131216e-03,-1.383506960883747845e-03,9.683096540891236725e-03,-3.770321746453557041e-03,9.288671117817923900e-03,9.920205192882176892e-03,-5.757979798104316451e-04,2.636008285200288747e-03,-2.875682532330888202e-03,7.625790861996340105e-04,6.330501990357660994e-03,7.993073226403779249e-04,3.556139529148317872e-03,2.079278095304895428e-03,2.965101898745263476e-03,4.190515760422265645e-03,4.639428357703816348e-03,6.916124576801493994e-04,-1.488981073355577587e-03,1.882664223042432436e-03,5.592977824861287625e-03,-2.097077909708653195e-05,1.249217006279611762e-03,-2.095178555310135606e-03,2.922133596770441159e-03,-7.718525147314843869e-04,2.673242111604311883e-03,1.743742268015639144e-03,-2.122930604157012915e-03,8.593944205795775448e-03,4.326372462013131949e-03,8.637898158963927715e-04,-4.198760783156340291e-03,-8.450063510284657256e-03,-3.126824185144056606e-03,1.507455485215610305e-02,6.608519096934471499e-04,-3.622528892851375158e-03,2.563222104310404047e-03,1.138596209780139840e-03,1.001136395120393562e-03,7.886178067446277277e-04,4.005930945742071890e-03,-6.037123634581892384e-03,3.369092524948727029e-03,9.305827954868712767e-03,-9.306801914877217349e-03,-1.071093276726158526e-02,-1.000299207340751406e-03,5.256583605656368376e-03,-9.762450561659497547e-04,-1.091626466597715815e-03,-7.214852659265525749e-03,1.305343240810043605e-03,3.808465071318640691e-03,4.051332448529341643e-03,-6.726799532395982655e-03,2.651192818159540249e-03,7.208370158675257364e-03,-6.256189813549865170e-03,8.115714931494802734e-03,9.889712834339240213e-03,1.797764987411453689e-03,9.216508685956867689e-03,7.894505459414091850e-04,5.051684955453848496e-04,-1.710373977699408904e-03,-1.770570226430167324e-03,-4.385397721442512581e-03,3.200015867451182455e-04,5.674315057454820503e-03,-1.009080676687079941e-03,-1.663066428657533884e-03,-1.254592745051461879e-02,-2.785085948324776423e-03,9.914860889211065190e-03,4.160195169316460370e-03,1.610271265255246729e-03,-1.639145061836892216e-03,-1.392006463646832618e-03,6.761789381034161509e-03,-3.618291884166180417e-03,5.481463586243438130e-03,3.212310143950778286e-03,-5.327356786397380480e-03,1.880276655691391326e-03,3.878207790501408958e-03,-4.573204584073889065e-03,-9.012312723296419040e-03,3.177937441929297446e-03,-5.100637573457141895e-03,3.237030108164066795e-03,3.335366737472779657e-03,1.130535907570005914e-03,-4.265611886382512409e-03,-3.730058905887318355e-03,8.460256625387142446e-03,-4.340612421352993931e-03,4.586445976171827157e-03,-4.049757797495171646e-03,4.087900121216338396e-03,1.213439821623804983e-03,3.988127109831222407e-03,1.459168217479576068e-04,-5.145222161696704280e-03,-4.865505222337171247e-03,9.891262026950542116e-03,-6.831618597818712958e-04,5.624793834702841245e-04,7.811385395896101175e-03,3.139495064940557882e-04,-5.093248253996125003e-04,-3.307111623995771452e-03,9.087417508026598631e-04,-5.036461544720314751e-03,-8.779121211950910789e-03,-8.444270298202890038e-05,-3.833528625655089912e-03,-6.692418822085734447e-04,1.177226232155670349e-02,3.640593642577380756e-03,6.494307715818816926e-03,-4.422702917399232679e-03,6.060579337906617800e-04,-2.957298526787582003e-03,6.738629213104088515e-03,6.487068123864618997e-04,-2.644148064465181702e-03,-3.322045615812068906e-03,4.129451089801582098e-03,-5.058375353658939311e-03,1.151578076843809938e-02,-6.569073260727458646e-03,6.698394693116370598e-04,1.251505626074471758e-03,-9.584038585656237935e-04,-6.351266716310388985e-03,-1.813527980429358033e-03,2.288180489870760177e-03,-6.748872163312264263e-03,6.891608783249571126e-04,6.174375130472142814e-03,-5.971122676907195809e-03,-4.692223238174811177e-03,-9.178238647173926450e-04,-1.314136386723238799e-02,-1.562852811047509959e-03,-1.254225850107675965e-03,5.118059456483233481e-03,4.607169367546026101e-03,-4.848098406087940522e-03,-5.547138578800029675e-03,-2.364744737210064049e-03,3.642889279205646733e-03,-5.457433055570678516e-03,7.329783736866396331e-03,6.050605297054194309e-03,-4.078374307792941370e-03,-7.139320338807705196e-03,5.056690403345913930e-04,-1.687114224600271299e-03,1.403249971221724214e-04,1.881489834333498852e-03,-8.246608589579630883e-04,4.716523293625753295e-03,-2.675693456291697823e-03,-3.262948609187531039e-03,-6.069911926563942642e-03,2.289776676745148513e-03,1.000302864186082942e-03,-4.011214595633891093e-03,-4.548865102978514631e-03,-4.328360145509766409e-03,-2.719874150625782409e-03,-1.618044134196519037e-04,-5.261288607883643845e-03,3.044278489227155403e-03,-1.836819610323332189e-03,-1.363387260423487872e-03,-5.478604014267743003e-03,5.962618429590242977e-03,-8.664672042281001615e-03,3.278168982112794876e-03,-1.397734668892011472e-04,-2.069603316555671153e-03,-1.207103725981269367e-02,-1.238462694676735097e-04,6.148753658262483442e-03,3.927057172997712583e-04,1.496222690995894361e-03,5.128905977432894614e-03,5.498683622490951883e-03,-6.083338371318132258e-03,-5.143108198054513079e-04,1.085641863489499384e-02,4.810285379183877955e-03,3.436722057476692370e-03,8.164357482777905017e-03,4.070469115950831572e-03,-6.483192676148078967e-03,8.175535588514548993e-03,-7.405193659720231154e-04,2.442765755874249864e-03,-4.436328117774856507e-03,-3.447749444731764822e-04,2.238157130885854839e-03,6.004054141843719050e-03,7.238013128777641067e-04,-4.773728987022324635e-03,-1.088697866778769001e-03,-6.176452724761294193e-03,6.565983520761265770e-03,4.302604414462383488e-03,1.432512447778833695e-03,-4.309881209588197707e-04,-1.151959046589756674e-02,6.956673649559638739e-03,6.620363263183858156e-03,2.635605409474703342e-03,1.748451089948636688e-03,-2.759007320868405149e-03,5.828562378899237745e-04,-1.351182206846501550e-03,-3.903334342965742668e-03,2.492212591389098659e-04,4.319107079320856288e-03,-4.365157795160554306e-03,-7.447574309549892220e-04,-3.178863274670780132e-03,5.682106494197747097e-03,-5.836694391702465512e-03,2.546118845040888819e-03,-3.685387834597446313e-03,4.208967909534890202e-03,-7.302962049368017568e-04,-8.963020802614487170e-04,3.262208889483043158e-03,-1.491347420421021419e-03,8.510568194376936027e-03,5.396226785701885124e-03,2.280879292286389082e-03,-6.002761580037947915e-03,3.001511791667543696e-03,4.602250208636364699e-03,-1.990437223455552088e-03,-3.072443468482902434e-03,-4.778667213761399538e-03,-1.090822129411009441e-03,8.384281254976762299e-03,8.642214937622780258e-03,-6.525097988079245695e-03,1.604470142224544843e-03,-3.192535090382306491e-03,2.808020054758008505e-03,2.266478313181514298e-03,1.225107665436723711e-02,-4.873018807242086338e-03,-3.424481323326078420e-04,-2.631998896102833840e-03,-5.804461269885014853e-03,2.336186257969910703e-03,-4.889732246874795712e-03,-4.068833907153407078e-03,-5.041700231170061046e-03,8.704702804383752240e-03,-9.202663335538341718e-03,-3.500015365426923947e-03,-9.057765647273505584e-04,-2.275316088296850455e-04,-2.438243555750105454e-05,-6.567695527055473180e-04,1.081545111488987736e-02,6.425412489035532693e-03,7.079780507051494469e-04,-2.689108072149087435e-03,-1.003338705050611747e-03,-6.829994767410711189e-03,-3.389176327126313271e-03,1.296737638444025792e-03,-4.521862050426374638e-03,-9.392098601919549776e-04,-6.454182173600105793e-03,1.675156832528642290e-03,5.773064723155072776e-03,-1.230701940586907510e-02,4.239168168354617763e-04,-3.886827107170778579e-03,2.725573446669378799e-03,1.478359985757671941e-03 -4.652185106842444268e-03,4.108495735604024944e-03,4.788533265983594724e-03,-3.155262565729746293e-03,5.287502678123406734e-03,5.564077848039816927e-03,-5.000745893857275540e-03,-6.821137408054585684e-04,9.518981383586396297e-03,-3.220585776575437403e-03,9.182415237713920791e-03,2.065578218169780624e-03,2.888407118769182377e-03,-1.977237064104731684e-03,4.269508246123319869e-05,1.436197232537776746e-03,4.975403450132786717e-04,3.561667037609241374e-03,-4.906989673685973816e-03,3.155991557509769604e-03,-3.924555342072413508e-03,8.006496858147313725e-03,6.206017809145921577e-03,-2.007055778213734550e-03,5.561479582345029218e-03,-3.748986587348764402e-03,-8.673262327917716577e-03,-4.427490263622346246e-03,2.735256080164526638e-03,9.812308643500321226e-03,-7.919182468313449089e-03,-1.635619021888672997e-03,6.681382621140286374e-03,-7.448572615690601676e-04,-3.125820976875654227e-03,-1.369470707240239970e-04,5.495211014435860339e-04,-3.309191541404722658e-03,-1.458471096728176496e-03,2.942120427358871273e-03,6.601453400877468555e-03,-1.339716720263128781e-03,4.856596934674800472e-04,3.089402654174958334e-03,-3.390883140293008031e-03,1.400967389382574502e-03,-3.603813643870434815e-03,3.908219171445059641e-03,-6.498596912705659895e-03,6.279797280797997858e-03,-8.088699064483433705e-05,-3.125846711304246239e-03,-5.325612405892522251e-03,-4.407807869254055586e-03,-8.923450881678680890e-03,3.358256485570926351e-03,-5.532148883834310180e-03,4.275151683454999664e-03,-4.903627542443615879e-03,1.718112808012548084e-03,-6.748054231030126311e-03,6.209869908437303486e-03,-3.301973625850075037e-03,-1.748354117038702221e-03,2.321072212026487492e-03,-7.154256500191479712e-03,-3.619619545469720742e-03,3.918922235422451529e-03,-3.272048986354857657e-03,-1.845440377446719987e-03,1.022602837696958908e-02,-1.278315175713649063e-03,6.821873103438282245e-03,-1.546152767237094243e-03,1.311231789143757660e-02,5.898561970509640616e-03,-4.119319071339822697e-03,-4.747150148177948538e-03,-6.781813567906145104e-03,2.570983185235041057e-03,9.770422153849047178e-03,-6.753747276457213074e-03,-3.791957455056767755e-03,-2.129784852559872110e-03,-3.233280350876201626e-03,1.144391496995636947e-03,-5.655927105055239511e-03,1.119345680855874173e-03,-1.802339450883405940e-03,-7.649791881799454171e-03,-1.234563110558019707e-03,-7.473907541713713937e-03,1.698209476025997486e-03,-8.336866624531985223e-03,5.034214780086340539e-03,4.023667083995306239e-03,-5.319090492311965776e-03,-1.153470592287988159e-03,-5.433100691393162075e-03,2.305889838407605263e-03,3.021685110420103265e-03,7.759537453742523100e-03,-2.134762378502426568e-03,6.996180454436911125e-03,4.366297564009397232e-03,3.247922441863854842e-03,-2.067171296169875109e-03,3.695240845751897579e-03,-6.062954050062831224e-03,1.396227169437042623e-03,-4.874335678181729130e-03,-1.208234685646860686e-02,4.147423998991328804e-03,-5.573041590793339529e-03,-4.483947374117087416e-03,1.439477111508574931e-03,7.293692907239451156e-03,-2.416554872977164526e-03,-8.778472229618642514e-03,-4.282046442932234583e-03,2.150689819718359062e-03,-1.095302599609247231e-02,3.088267898209993804e-03,2.603150487697997693e-03,8.296746603810687890e-03,6.239715602176909744e-03,2.242946809952634044e-03,-2.333507089868304060e-03,-2.173015504339651568e-03,-8.385592179046013131e-03,2.190813551554662841e-03,6.307663616903228833e-03,-7.563271538297594750e-04,-3.328856855634877558e-04,3.581097719221294166e-03,6.105556233492445478e-04,-3.010762099325122940e-03,1.483377282369218201e-03,8.408981025602993084e-03,-1.120826293959932551e-02,-4.084227130018797114e-03,3.663102219204612472e-03,1.301697371943693476e-02,8.517771649735931226e-04,-6.777026195432077002e-03,1.798974083490459993e-04,7.879307990560528024e-03,-4.217918408285187097e-03,2.202991733204088653e-03,4.904886752528827241e-03,-3.045419847237257078e-04,1.238084777619655706e-02,3.469524183614961215e-03,5.346100300535818785e-03,-7.157218037369871000e-03,9.030379189543833590e-03,2.577051264767087936e-03,3.349545428704999231e-03,-3.141517164793676889e-03,-1.747127427830589544e-03,-5.383548439095988021e-03,7.597071370184556131e-03,2.409705358736894353e-04,-2.333846888330541169e-03,2.217916260346159893e-03,-2.944400203164151963e-03,-4.875220431178294879e-03,-4.043428151433622539e-03,4.997195001607290972e-03,-9.508127393331300805e-04,-1.756996011716096173e-04,5.762537112716954561e-03,3.784665544241319103e-03,2.387170958671364605e-03,1.703184116161658148e-03,6.080598362291286807e-03,1.199115002518198053e-03,-2.352036265081668912e-03,-5.083923051447716487e-03,7.253166957547461805e-04,-3.638095493859847158e-03,-1.136953517542961360e-02,-2.905862646893098925e-03,8.056844647191249015e-03,-8.301122522667837150e-03,6.483485412680154575e-03,-1.123620087191166440e-02,-1.359149146524571590e-02,7.219956980982137132e-03,-1.500093358533773286e-03,5.013269412632195658e-03,5.375895024780926769e-03,-4.602194953978432264e-03,-4.245970346356555368e-03,1.997278163832491744e-03,3.651403934984617187e-03,9.780100460498469123e-03,1.328293101708184765e-03,5.896378832972714104e-03,2.208758151502863433e-04,-2.283906356598857691e-03,6.111065787773940425e-03,-3.140418456034718957e-03,-1.192002480687975050e-03,6.142469456561820695e-03,5.454124185298127835e-03,5.209189722973611020e-03,9.649033083787198319e-04,3.213602500544442998e-03,5.432708025436282923e-03,-6.642936808971626125e-03,-5.879597016357260677e-04,-4.690624613752896604e-03,-9.998740112839035743e-03,6.362115249763156122e-03,6.222606453899439836e-04,-5.809507566535200042e-03,4.260090895254718679e-04,-1.158308058960620494e-03,-1.956562791370318900e-03,2.133755511988515586e-04,-3.485194280556980478e-03,2.652792307040276621e-03,8.814664583544899734e-03,-2.514277736117153002e-03,-1.554350533796882480e-03,6.402502771533428311e-03,-1.676560044288183914e-03,7.125087957632280332e-03,-2.056090669288623655e-03,2.691162429955466177e-03,5.905530565751614808e-03,-1.148202339521604311e-02,-6.894077380307672698e-03,-6.916134735934895486e-03,3.595356088042319601e-03,-8.181072982210138914e-03,1.515554030430488978e-03,-9.361386500245516204e-03,3.373809845249282641e-03,-9.928918605943713727e-03,-3.742761963615253828e-03,3.163964511310574622e-03,8.888774854492248120e-03,7.996778388501480511e-03,2.423878568101707140e-03,-9.636648329290127546e-03,-1.021083801095444554e-02,1.639492375208231878e-03,2.601476336123437596e-04,3.696494325141188100e-03,2.242871285139506204e-03,8.436048600800379743e-03,-2.880337019850741885e-03,2.394213586193477510e-03,4.866426352386155822e-04,-5.876349591319573063e-04,-5.699169417010037202e-03,2.261046721056729578e-03,-2.203788037267380082e-03,-2.515548667598301990e-03,3.092965535438337368e-03,1.898273569026539750e-02,3.377378541654363613e-04,-3.937431942057231619e-03,6.386731479004430397e-03,1.331139835010730762e-03,-3.333465145256017475e-04,-9.080927698999464136e-04,5.261073344443787086e-04,-5.184141116336210380e-03,1.474868097692681040e-03,5.986726567635891789e-03,-3.335678508869181617e-04,-1.987225197147663228e-03,6.137889476542785806e-03,-1.054096950839497789e-02,3.703005213172677851e-04,-7.134580803874523112e-03,-3.394986338582229538e-03,8.740600040504451634e-04,-6.543357987620880556e-03,2.514397049491741681e-03,-5.782303224893596723e-03,-5.962183187582510528e-04,-1.095772309137114410e-03,7.262981694901133982e-03,1.384078968628387743e-03,4.818470576516984370e-03,1.326439218098737858e-03,1.666140840289535418e-03,1.043298842448405622e-02,3.733490759221211840e-03,2.408782757953942602e-03,1.309479610651398049e-03,-3.310159413730449657e-03,-1.074976526637488536e-02,-2.418741998205999489e-03,7.235361564170500297e-03,4.878020454246951090e-03,9.653554136103059333e-03,-2.765807954040862705e-03,1.093327289506928340e-02,-6.033730967435507742e-03,-5.102148174606963363e-03,-5.585139486419849961e-03,3.070108674534873482e-03,9.424892950312747150e-04,1.440165025411932953e-03,-2.166621490881630568e-03,2.654310106720138914e-04,-4.704335245372988882e-03,7.884648799584718926e-03,-4.206081771799321037e-03,-3.670975770767975852e-03,2.791262535196590254e-03,2.497617663042321829e-04,-4.573829783809604636e-03,-4.908576771421024713e-03,-4.699473213814190330e-03,-3.297300072465079047e-03,-4.282522442088427042e-03,6.065892475180459029e-03,2.274613881420362461e-03,1.530135599222362227e-03,-4.951039753973688132e-03,-8.284150883858747150e-03,-4.989418964524263214e-03,5.216987104841428260e-03,-6.574527203365617897e-03,-5.624602014975246117e-03,6.621419480877193016e-03,5.938201465802388571e-03,3.560737526345148039e-03,2.059143446637270395e-03,-6.063081301269049751e-04,4.012852543263140982e-03,-5.463275826400135898e-03,3.785521988447860912e-03,2.829686536986157457e-03,-8.702236493550574020e-04,1.788892563936735026e-03,7.735305385321872111e-03,1.909619564917304762e-03,-1.331530014923516889e-03,-7.610945492641071203e-03,4.051578103411222417e-03,3.934187724418235048e-03,6.613770533932794397e-03,-7.360822508450979391e-04,-4.380259657766595593e-03,-4.184895439287395630e-03,-6.444407322927827794e-03,-1.145568831168836631e-04,-3.420491637108373333e-03,3.079093506426293023e-03,-7.811987581379658228e-03,2.266477043823477214e-03,-4.372840929059372203e-03,1.018135643287502567e-03,-2.169067457160714883e-03,-3.374986965293849152e-03,-4.957485873792284378e-03,-2.199302680311451472e-03,-2.861088734788378926e-03,3.412862584242550023e-05,7.389803773728115775e-03,5.978399892156388389e-03,4.615795920749296045e-04,-1.888309069765416442e-03,-2.559357214976078123e-03,7.187579909043780718e-03,5.580764132568286420e-03,-1.489975636209991744e-03,-6.081513970737352460e-04,1.494254071499514190e-03,-6.673986945135493981e-03,-1.180403654595864500e-02,6.072042742819722116e-03,-2.604244668951182558e-04,-2.777092749646791359e-03,2.885291335165612175e-03,-3.958504101048616365e-03,1.900882012815569358e-03,1.076997947020102812e-02,-6.797174874245961659e-03,-5.919011273683038969e-03,2.753028199059984887e-03,-2.558965014826811938e-03,9.001151248518112326e-04,2.462418305535523603e-03,3.245252494303524880e-03,-5.915118875923857343e-04,1.110198738643587528e-02,-3.226833910791924217e-03,-1.846749658317685481e-03,6.949997394794229719e-04,3.863739995722220971e-03,-4.168885342143769149e-04,-2.826070654832288625e-03 -2.809317388235830180e-03,3.020327361312952815e-03,-5.553504794136246252e-03,-1.268639611514325296e-03,2.374068715117417117e-03,6.868562980176916993e-03,-1.827243307256836173e-03,1.011795255813375573e-02,-1.398856852717132119e-03,-2.275221022267741918e-03,-3.739897236858278490e-03,1.157433339571184081e-04,-1.387459514813840430e-02,-3.540171808963497695e-03,4.751243057855688494e-04,-1.760530376563941221e-03,-2.230977599894044903e-03,-3.279701541101460831e-03,6.189339816858815263e-03,5.299236382971902158e-03,-4.742038501865230432e-03,7.336360371823540810e-03,-1.581980574514833251e-03,-2.599530714334568129e-03,-4.011911735408491323e-04,3.229232947833839042e-03,4.225905737146360731e-03,-8.233360099435215167e-04,1.375160345565348960e-03,1.575409412312334069e-03,2.733259481206610185e-04,1.879363025412631198e-03,4.289693581419773243e-03,4.062928183452631822e-05,1.042737578488777891e-03,4.865445777168081985e-03,-1.751001655190544256e-03,-1.412344666039321841e-03,-5.369241479746465068e-03,-2.172084595878070318e-03,-7.875310546950585466e-03,2.165531712295508816e-04,-1.691951159611137373e-03,7.990099651412330833e-03,-3.881459292636792700e-03,6.331161839724954582e-03,-1.004401685717795081e-03,4.162531640455604913e-03,6.807731977344165910e-04,4.899835543370755463e-03,-9.405371417889933638e-03,1.822228662027777606e-03,4.732138675485930528e-03,3.228079743208698161e-03,-6.763470539365815624e-03,5.114972354672571442e-03,8.873453340462062952e-03,5.422670877052116790e-03,1.236588960609603634e-03,8.762392726881687577e-03,-1.643470091143894906e-03,1.195673804644111068e-03,-4.762053709116640873e-03,5.180342466532713609e-03,-9.116521053319833617e-03,-4.956617868063144770e-03,-3.329960403605296184e-03,-4.162220421780633318e-03,4.171007496085962795e-04,-8.428716104173392077e-04,-2.166350411838880319e-03,-3.719622871591033407e-04,3.984809248894203605e-03,-4.039055518270363854e-03,2.835297259383305277e-03,9.750661705805446083e-04,3.847574509978361930e-04,9.082431224989556684e-03,-4.386235426132685175e-03,3.948952450815381050e-04,2.420458322826324598e-03,-1.318128488907603212e-03,-1.559460593266941359e-03,-1.234401003012175306e-02,1.238360799207759416e-03,-1.882336001206304016e-03,-5.449016354282695297e-03,1.146864573186296267e-04,2.002510627122553420e-03,-2.696241568699946772e-03,4.746787653630965900e-03,-1.074929076520598527e-03,-3.563598286775062317e-03,9.382978563254798640e-04,-9.450626388692613797e-05,-5.321786552157397286e-03,-7.046415696292828400e-03,3.647444610160002938e-04,-3.962289774777506823e-03,-3.922862559754892661e-03,-3.529248201424110484e-03,2.745686473405649104e-03,1.558685478741955559e-03,-1.023801450827354492e-02,-1.088507779053922588e-04,6.906319186427714802e-04,5.006763261539264216e-03,-3.948597875276354738e-03,1.326695847903147131e-03,-1.742431671593251945e-03,-6.584389415475343753e-03,1.192762894318598766e-03,8.382735479798498340e-03,-1.479969651003937639e-03,1.045680085617164837e-03,2.679716925054014248e-03,6.541555683895784734e-03,1.061997948505763976e-02,1.536680524844136588e-03,2.266131921248248481e-03,-2.978352382858372692e-03,-4.855667732530927552e-03,-4.009477358367040910e-03,5.580547525430244390e-03,1.862505065876286607e-03,4.499715545744959258e-03,1.659231991710556934e-03,-6.323184390017162586e-03,-2.171299737027172598e-03,-1.540358638481701455e-03,7.385166938750973090e-03,-7.671192600269730912e-03,-1.623516729912137220e-03,2.850308489823331817e-03,5.661144645181463708e-03,1.665772315982341030e-03,4.180664214281948690e-03,2.723150122431278360e-03,4.493593609096807433e-03,-6.600097563385166942e-04,2.459722380919243754e-03,1.278235381727866292e-02,4.148051313888576411e-03,3.451290387242225786e-03,1.512010049111340041e-04,-2.512655814963104736e-03,3.526366344202307677e-03,5.313348594320139832e-03,5.148118283624428769e-03,-2.804048551688819097e-05,-3.384461440459092443e-03,-9.357591282142269790e-04,4.744272419229334306e-03,1.554291405533101637e-03,-3.264555726635300255e-03,-4.290819356967265179e-03,7.952956752215262909e-03,5.882604720762289741e-03,-5.694410119160687837e-04,2.736212018549921167e-03,4.453156998912862581e-04,-3.676330264163381063e-03,-1.546081347508736778e-03,3.573826912446290832e-04,-5.343965638914673771e-03,1.113221962969038669e-03,-1.233750161170345076e-02,-7.718897749219014691e-03,1.824833382358076721e-03,-1.399979095141729816e-04,1.063370570154062531e-05,-6.995072083070861353e-04,-1.892628335657841080e-03,-3.717853568146264540e-03,1.887511351026037584e-03,-6.911332464924034450e-04,2.782111553432671958e-03,-5.425515272224662629e-03,-3.132831914507367387e-03,-4.736318685498155413e-03,-1.768667599904232219e-03,-8.699374219232511654e-03,-6.510577798955389725e-03,-4.371162580603334125e-03,4.827873224874305695e-03,3.391702253482453688e-03,4.554186804971120141e-03,9.571249241397386878e-03,-1.283210470313208857e-03,-7.951830120387578821e-04,2.006188234733374614e-04,1.333967958413651098e-03,1.531465405714699766e-03,7.915727080802880886e-03,4.119366435428879758e-03,8.363839921808511116e-03,-3.312917332267044466e-04,-9.404635642008522681e-04,8.556452844434849284e-03,-8.493102076347300647e-03,3.828217398772321665e-03,-4.661682666204977794e-03,6.056360456694939026e-03,5.403955326504409663e-03,3.740523311784480116e-03,-5.047360087052801467e-03,-6.579709845085360189e-04,-8.102439369175190326e-04,3.217055969149830615e-03,-4.757862068681602795e-03,-3.262394520005085924e-03,-6.431381794344333269e-03,-2.590716092699037101e-03,-1.469085510740916929e-02,-6.028147340442199581e-04,-4.760942186054266390e-03,-2.932068205912808094e-04,1.080684105330378909e-03,1.818144184918179347e-03,4.532195700664816244e-03,-4.059268535971262588e-03,-7.253070036814325593e-04,3.504994015623746212e-03,8.694977660433197335e-05,-6.365103367949850703e-03,-1.866860264395851917e-03,6.371209845232967892e-03,6.095388010177325323e-03,-8.505258748366715735e-03,8.539187710604427680e-04,1.397657754945457126e-03,-4.312905069756933860e-04,-5.792739304568425909e-03,7.740269426648773744e-03,1.182057241995749857e-03,1.689684912025167735e-03,2.860560342919996057e-03,7.966921581516482609e-03,2.080255690885755761e-03,-3.806666393749310283e-03,-5.985125950269755239e-03,-5.211396380047211667e-03,-7.180165465856217389e-04,6.956602174668460349e-03,-4.914165961365668349e-03,-5.535826004508249661e-04,6.421010299452827551e-03,-3.185045661992075804e-03,1.870141272941338011e-03,7.028076088879777091e-03,-5.370341143956933877e-03,1.505935645841585419e-03,6.399135184072892585e-03,-1.846277304521544383e-03,-1.493190590712663128e-02,1.646269280383227170e-03,-4.125576509587429268e-03,-6.981773772309320591e-04,9.675263710016620070e-03,-7.862544327595205260e-03,-6.062966178833925461e-03,-3.498038325727360204e-03,2.725899618463245837e-03,-3.013849432530882506e-03,-1.405109304375090325e-03,8.349957594388748156e-03,1.647967421444575600e-03,2.523348631007108888e-03,2.875267576944416214e-03,6.433473430936267564e-05,4.991521024687059320e-03,4.665307219732513916e-03,-7.274277655322520207e-03,-4.686102954664947938e-03,2.588799855984473108e-04,3.500278352222516674e-03,5.976595146992168782e-03,1.007188725964261531e-04,2.029614900501245517e-03,1.048832774252074496e-02,-5.601158419166132538e-03,1.324916505604108478e-03,-5.832905636523870061e-04,5.078859479755217995e-03,3.473501787814290299e-03,-4.336244069835066064e-03,-6.076220651550981802e-03,-4.794366583055054316e-03,-7.847521167130748140e-03,4.266055954240836022e-03,-3.678245832835005946e-03,-5.704531659698694208e-05,7.716768707749415215e-03,6.970940050002163990e-03,-8.809613597634604124e-03,5.247372823957173471e-03,-3.266160918569092908e-03,4.242732423173323567e-03,8.325176607713669422e-03,5.688316871927938390e-03,5.282570516628734038e-03,4.431460437388180275e-03,-3.306489064036663254e-04,-4.241714849491541074e-03,2.330185531550619735e-03,-7.588331363473430199e-04,-7.767991615225094959e-03,2.031966567400640862e-03,-5.314885752111692683e-04,-7.243008000670782221e-04,1.794266414898378981e-03,-1.491706010072745567e-03,-3.990868309464738403e-04,1.100605127652724922e-03,-6.469231331443930227e-03,7.948221966072930959e-04,2.473858083230419305e-03,4.461519864642878724e-03,-1.183845600095594433e-02,-1.005630877019820027e-03,-4.528134237976277204e-03,3.067977173814848742e-03,1.385982652959141847e-03,-3.132711097212062448e-04,-5.871280373069901999e-04,2.315843619882603736e-03,-6.250093022958812770e-03,-5.418642501197694882e-03,4.832109311824036441e-03,1.298567779889687850e-02,4.007944600785535300e-03,8.095204244687773457e-03,-6.923131097015236025e-03,3.466489348833662592e-03,-4.656120710673060034e-03,9.585640678665250172e-03,1.709399809135396552e-04,4.682443501542208697e-04,3.631573568440719327e-03,1.866410556151395540e-03,-1.217715312447726619e-03,-3.448709200113027600e-03,7.397830875933123114e-04,4.638452362887192024e-03,1.651851111736647249e-03,-2.088026350367268823e-03,6.639155589746746980e-03,-2.172671979704495632e-03,2.052693936272076761e-03,3.269111861829846529e-03,-1.101039299454851812e-03,-7.367315111465560021e-03,1.764275737574259092e-03,3.546019284987227908e-03,6.727002608511745488e-03,-1.972191155424161263e-03,7.287027404250289234e-03,8.914812670721684179e-03,7.827629493009392869e-03,-5.049705914165761138e-03,3.666519231161555203e-03,-3.622799736317441812e-03,9.689100194255149262e-03,-1.282980043337359714e-03,-5.227799372715735866e-03,7.689798874506375594e-03,1.795965238086967087e-03,8.375540940314766050e-03,-1.714088951168029652e-03,-5.028365118912738949e-03,4.006751819495286103e-03,7.105579029189066850e-03,-5.253130692072695111e-03,2.546936270976993526e-03,-7.559931203967626412e-03,-1.306782078812819894e-02,-5.546782445488665806e-04,-4.335053077841110464e-03,2.127829799558778913e-03,-3.318412235892918664e-04,-2.923903632070983926e-03,6.842905708983826329e-03,3.395167498901382596e-03,-6.383644782579319873e-03,-4.544536526157752845e-03,1.433701898781867840e-03,-1.110329535334729847e-03,-3.771336838756297622e-03,3.542850860547523597e-03,-5.705342542299933657e-03,-2.434479447202110656e-03,2.911671743204373702e-04,3.606711149418959268e-03,7.475706925447716221e-03,7.219925426214911755e-03,-7.633897614328095070e-03,-3.563579600733900437e-03,-4.191521417246566426e-04,3.433694217777727431e-03,-6.002063848281083370e-04 -3.467549970242904574e-03,5.028178056742054099e-03,-7.796702221568505269e-04,4.227402376889634848e-03,7.202887931293715587e-04,3.715853302455157138e-03,5.405268748336509478e-03,-2.403773238835463721e-04,-4.284302167392538448e-03,-2.269417681777569432e-03,1.023684364157968589e-02,4.191593644263877892e-03,1.787417119098270221e-03,-3.042784363326292863e-03,-5.843713748608273841e-03,2.569347938496356187e-03,1.079613573150988923e-02,-2.417563973923217858e-03,-8.450232229607097453e-03,1.364451795222815580e-03,1.266368764484631119e-03,-4.226886414172796765e-03,8.778473573847674419e-03,5.910744333752543725e-03,7.617528046834327144e-04,-2.592240410880683639e-03,-2.542141366826837934e-03,-3.386849765069781769e-03,3.438188181549060082e-03,-3.130115169414954875e-03,-2.932896781599957873e-03,3.472186942455585297e-03,-1.311989081005457934e-03,2.975545358446223670e-03,8.182726823611489314e-03,-2.931387807739896812e-03,1.579155233958464549e-02,1.529611196621533239e-03,-3.737952366639316244e-03,1.981168841456413827e-03,-4.893368904806277148e-03,-5.230133863503059512e-03,-3.976417207014743341e-03,-4.869658193763858056e-03,3.776487417053308476e-04,3.464807113118187430e-03,1.636106431442416534e-03,-5.557926716349840049e-03,7.958825644467594843e-03,9.300487423322684058e-04,4.054818124651689976e-03,-5.844375926462068915e-04,-1.327940182050687569e-03,-4.614415528943473406e-03,2.877377874642740151e-03,-2.343360313735621311e-03,3.901705809559016715e-03,-2.131149413763181583e-04,-6.554379009203892791e-03,-2.266819933499624167e-03,-3.713543638830665156e-03,-2.234700720919729350e-03,-6.477236818163744150e-04,8.451267942042926893e-04,2.568303137612369062e-03,-3.829369230275569014e-03,7.343126952294039256e-05,5.623950450162384755e-03,-1.942696281582166650e-04,4.765130951291472912e-03,1.889103266911635638e-03,9.190261052113289295e-03,-3.394551466653141902e-03,1.066512401568921534e-02,-2.943086469759936385e-03,-7.145693010846066937e-03,-1.820757702399813276e-04,1.787443239723840709e-04,4.055863463299822598e-04,2.349442889450390233e-05,-3.444517075315363559e-03,-2.909163262692596608e-03,-6.649876613099800840e-03,-1.166759506084392344e-03,-7.532121417950010450e-04,4.752009904833593215e-03,-4.030476093850768153e-03,9.285391697331075306e-03,1.117268200575397517e-02,-5.133345191360185605e-03,-3.637405351184831021e-03,5.731169435358039917e-03,9.586200041353253737e-03,5.156402312359146395e-03,1.157858755856640970e-02,-5.606643782393551552e-03,1.179315836310669552e-02,-9.614177707624434535e-04,-1.119938947612837384e-03,-9.373252736897218478e-04,-6.054747593693675031e-03,-1.756010072474700843e-03,1.523926829813724399e-04,3.361154062467012777e-03,-7.702409120156900393e-03,-2.886637693998126818e-03,-6.446233135979489819e-03,-7.139676053615444514e-04,4.840492309320056606e-03,3.709732445250720715e-03,2.704984891535399012e-03,-8.559829701352772435e-03,-6.839116940099116239e-05,1.127141778440286747e-03,7.123816254034055773e-03,1.084582762281051599e-03,-1.681746790293519667e-03,1.074845529936080402e-02,-1.137425026757093218e-04,8.356677807616104056e-03,5.954530614738904291e-03,-3.780599993265367463e-03,-8.023415524409870450e-03,-6.592158368675456355e-04,1.562632882025274646e-03,-3.284066106301513689e-04,1.072749718620033066e-03,9.281368814896731438e-05,2.095031395611476974e-03,1.772446600454182473e-03,4.999275072404854754e-03,2.163461067166921534e-03,-5.097712089966994695e-03,-3.609795126473509894e-04,-5.387057484049228434e-03,-2.132555547854351682e-03,-9.742605052388404697e-04,7.293327660436514344e-04,-9.615418968757668411e-03,-1.047207423704877476e-03,4.518481924405686953e-03,2.749184156079349968e-03,-4.291303487029016835e-04,4.942408300324125509e-03,2.738015159347927541e-03,-2.063348587205366801e-03,5.321533346335842636e-03,1.492294396424393333e-04,5.346031553263932718e-04,-1.133722859878980721e-03,1.751146488647774887e-03,-1.296337160237872720e-03,4.560666253913062129e-04,-5.214026303794258924e-03,5.314552124200870240e-03,-4.514887533388222893e-03,-8.661850156632369853e-04,-3.699877255772332420e-03,-1.792429186920773678e-03,-1.633468154365715639e-03,3.173724910543299776e-03,-4.477640142667027243e-03,-9.250980076443758669e-04,-1.182821823105383030e-02,2.115282499563520338e-03,-2.195198428026676370e-03,3.864676375268724564e-03,-7.734465629098438537e-03,4.476336986643836106e-03,8.484083384450964022e-03,-1.729407604836803497e-04,-5.498586013392877921e-04,7.372493111781019915e-04,-6.101070835489519686e-03,1.589137677420438552e-03,-7.665482819830799974e-03,-3.366915063183070159e-03,3.786199923868591730e-05,1.992393283262184397e-03,5.769858471398251268e-03,-9.353354823117518728e-04,-6.615621212439166867e-04,-2.174742127647782877e-03,2.571861149240745135e-03,2.880058188866103323e-03,2.815839756743457496e-03,7.804912946543451045e-03,5.888447691911844438e-03,-2.483212953331532039e-03,7.583906540855105344e-04,-3.253693575771759473e-03,-6.257432423507616401e-03,3.111902461372508932e-03,-1.684841009377854035e-03,1.279647150010300590e-04,5.110792278951766486e-03,-9.221505749229504459e-03,5.240819919144175561e-03,3.113803520862299095e-03,9.873252282165581595e-03,5.111542889819210671e-04,-8.825612770027508810e-03,5.146087510702599711e-03,-3.777207707633941443e-04,4.554999304057581852e-04,-3.448814433961244676e-03,6.270572863756065235e-03,-7.651028536317394831e-03,-2.192716127560797798e-03,3.176286798743561696e-03,-7.679634917665910561e-03,-4.351596440042545914e-03,5.406986003947650277e-03,-1.958121376509125205e-04,2.885246393602644999e-03,-1.608858991457982456e-03,-6.585718045369789476e-03,-4.444092923962436256e-03,-6.675979338962035811e-03,6.765066862632844848e-04,5.455335468567785659e-03,1.089720534982137334e-03,-3.434823147037770457e-03,-3.052331681322096972e-03,1.273517857402557685e-02,1.407692488778901699e-02,3.461467373239591333e-03,-6.955823875370698538e-03,8.378311557256448716e-03,-8.795148746586895633e-03,-4.685581278309145989e-03,2.368286740886111838e-03,-5.297437579483513584e-03,1.767438082489105472e-03,-5.576368077332020078e-03,7.320315063323137080e-04,-1.899711754848950381e-03,-1.352340567770511052e-04,-7.034918069291497267e-03,1.012160705483798606e-02,2.156964325368865461e-03,-3.663967643334620997e-03,-1.381743296912923448e-03,2.607444832073756787e-04,-9.902070605507138149e-03,-4.803752872274589039e-03,1.056766604802145820e-02,1.907499684277063614e-03,1.589993212165923767e-03,-3.078183837618617792e-05,-3.489218346301621587e-03,-2.693348883519379976e-03,6.166070318540983579e-03,-4.091235163016028117e-03,8.123663537335654638e-04,4.065341593089566267e-03,-2.174709215726145211e-03,2.423258414776802063e-03,1.262721082864951515e-03,3.806290071278767544e-03,-5.455051232290840033e-03,-4.939077280264349105e-03,1.320248540043926890e-03,1.631548361689398726e-03,-4.678513591180822641e-03,6.860617285279807867e-03,2.175652237770202106e-03,-2.044397601408540471e-03,-7.851331462537192163e-03,-4.646212991832639715e-03,-2.378575894046120083e-03,2.134314014058868222e-03,-3.470446157554865499e-05,-2.417623060008606845e-03,7.698671208956655611e-03,8.984362043831704836e-03,4.723810934731705394e-03,5.449118375374368932e-03,-4.969716183783982064e-04,2.665417320575886695e-04,-3.664981698495926778e-03,2.411947936034572991e-03,8.294924029631164236e-03,-7.125012683840462567e-03,-1.465740156983018480e-03,-3.054650373105681233e-03,-2.204726273903788707e-03,5.058042614352279559e-03,-3.543485275680768008e-03,4.506031283877391831e-03,-3.507620465709279656e-03,-3.427940410235676139e-03,-3.419804186720169521e-03,7.857248143312765137e-03,-2.231731585085711470e-03,-1.516733415466868734e-03,-1.302131540137710592e-03,4.824764844292683125e-03,-6.679563504552088818e-03,2.964780102552337909e-03,5.906167133662349779e-04,-4.235566036124090335e-03,-2.869488507886949844e-03,-1.262696918273028364e-04,5.341899175083116187e-03,-4.709210523193851222e-03,-1.627042345574706748e-04,-2.277346348471819947e-03,5.684433590014812201e-03,-8.354310591230849209e-03,1.255452432840599734e-03,-1.794167677596333367e-03,3.437527115779374269e-04,-1.004019114117374496e-03,1.881501550295860796e-03,-1.787577688538174005e-03,1.157245387087705961e-03,-3.105426756712557738e-03,-3.107850992841464533e-03,-4.235852823912829475e-03,4.984703903024130999e-04,1.242968164203161596e-03,-4.853868232692472935e-04,-2.419349222022528726e-03,3.529769611307288519e-03,-3.891988612328442596e-03,5.737895896160975090e-03,3.099938878210678657e-03,2.213016451655413358e-02,3.837300395681256943e-03,2.700212835447783882e-03,-6.812761626815959891e-03,5.490006423178490913e-04,8.513352474481589507e-03,6.423526173308749879e-03,-8.474370196782295486e-03,-3.872960427742301592e-03,-8.124007756866814811e-03,-8.489955857614825711e-04,-6.008097241754859760e-05,5.302604509323217974e-03,-5.426196056266111879e-03,-2.574357322162391156e-04,-4.543143259553622006e-03,-9.630484104155763794e-03,2.353547330416330685e-03,6.424859459568739613e-04,-4.328941405016929820e-04,5.249203524054589222e-05,1.212991115176670570e-03,4.117761858589872107e-03,-1.045400162953202816e-02,5.001394728019062721e-03,-5.000953337348490978e-03,7.686512284836267798e-03,6.299954773426007237e-03,-5.748451667350734477e-03,-2.102454033834055532e-04,2.896941890184344306e-03,-5.389267405414573976e-03,8.696644908597582491e-03,-6.087849870289782292e-03,-8.469523150368859360e-03,-3.783924590702808424e-03,-8.772250372351565045e-03,2.182602789443651575e-03,-1.185801134414181580e-04,-5.698950839001979372e-03,-2.849176038490396773e-03,7.421932485126852977e-04,-4.044600082603948613e-03,5.592717478544291521e-03,-4.173314725631469858e-03,-1.070150292488800505e-03,1.164956238761544194e-02,6.726524376670086833e-03,-3.899675429367426178e-03,6.513298466043841370e-03,8.333107321200535975e-03,5.571204964153534094e-04,1.219625889385955730e-02,-4.315841476300983845e-03,-3.166446153564330367e-03,-1.012708348891453933e-02,6.147003765916807250e-03,1.196055403883715049e-02,-1.484415082737835893e-03,9.436929353649359947e-04,-2.148737132799978002e-03,-4.164943519597598398e-03,-2.286715949296718418e-03,-2.667412328563282132e-03,-1.503142884009134837e-03,-2.511474494054558634e-03,-1.262995044640662741e-03,1.039135112308554442e-03,-6.645286708983095137e-04,5.562987336480752255e-03,-5.696975142768087239e-04,-3.140220245903547949e-03 -7.013715000873442171e-03,-9.089920128413985639e-03,7.688338308974447012e-03,-4.258427040189111980e-03,-3.202901274103028221e-03,1.093373117960003658e-02,7.002701724329325750e-03,-3.320444536251969565e-03,2.323704062240481327e-04,6.328359653618112089e-04,-4.635827321699086123e-03,3.612043774257116431e-03,-5.265612739823958416e-03,8.577787158408998070e-04,3.851374914245972621e-03,-5.240697829177094931e-03,-6.284226979249309640e-04,6.827725217707192318e-03,1.377058906863132259e-02,-5.562664577603309082e-03,-1.903961158664401517e-03,6.050497208997268443e-03,-3.776127330680907027e-04,5.771930219911012780e-03,8.509934750939194756e-03,7.011377512422265271e-04,-2.224784334152376392e-03,3.408498574388013538e-03,-1.981370274470675881e-03,3.730653110021042197e-03,-1.533477426242591890e-03,-5.297658061915417682e-04,2.028910183685292759e-03,-1.489299171764315146e-03,-5.047384641422486551e-03,5.004971410106414526e-04,-4.690474051805709060e-03,-4.106014006313972627e-03,-3.606874955037944736e-05,-9.460961150720887530e-03,-7.407592145413161499e-04,-6.232872207850331109e-03,-5.832008679492217988e-04,-5.096719461997070363e-03,-1.516142635502990780e-03,5.822246492903683686e-03,-1.528787119192203878e-02,-2.984257359267725087e-03,-2.004434911600944721e-03,-1.139272896166862101e-02,-1.870025480289846508e-03,4.919253304418170526e-03,4.892123730726014377e-03,-1.275505199035173454e-02,-6.584879778903702033e-03,3.073472750971408159e-03,-1.277804305886061061e-03,5.248596937701172256e-03,-6.291868332761021802e-03,1.636828284071395424e-03,2.110705812362402599e-04,1.160248800025920086e-03,-5.549185242873131210e-03,-3.443845963982038356e-03,8.911326610512024845e-04,-3.247401678470349367e-03,1.677978641878658623e-03,-6.163526739932515333e-03,7.705722337776608685e-03,9.779614361886924349e-04,1.024237661299528049e-02,-8.785447435177841076e-04,3.696724612788312471e-03,-1.576507212781631286e-03,-3.256632078598213631e-03,4.908972420112972350e-03,-4.578027501102660372e-03,-4.509338124919957079e-03,-1.348750784705113727e-03,3.856468388575621815e-03,5.766461039083947519e-03,-2.252836328038887968e-03,7.201591251576394508e-03,3.923748105156680578e-03,-1.487061159293283016e-03,-3.487208746077885124e-03,1.739855242151978161e-03,1.791068827339123686e-03,-8.298537145009067181e-03,-1.207958006653433091e-03,-2.965110099782054982e-03,-5.569960388115218451e-04,4.930471897504856531e-03,-2.729428502262446366e-03,7.426707011106800649e-03,-5.298811290477449011e-03,-6.161772436336235628e-03,6.421319445630358121e-03,-6.933708818503655710e-03,1.866453128346814636e-03,2.488011946755713799e-03,-8.238919566752229279e-03,6.077609208759743309e-03,4.842347604830888816e-03,-3.828671470112732667e-03,-4.947645952257610953e-03,-4.852194531042990609e-03,2.220175137063011832e-04,-8.856219164231324313e-04,-2.231789871984602661e-03,-7.979023200184712575e-03,-2.019795503933630079e-03,-4.633515057231927040e-03,-7.582956636556179584e-04,1.589086069787992034e-04,2.904175947073636688e-03,5.217940260434959093e-03,5.570979092343492436e-03,1.834220883980961251e-03,-3.207065209128051077e-04,-1.957714214769191302e-03,-6.697742394404228262e-04,-5.244471516625672479e-03,-9.049167402384105524e-03,4.065965880129598052e-03,4.196213801359897563e-03,-4.071291593576588375e-03,7.766395578824037034e-03,-1.544993982829730942e-03,1.386193590191689765e-02,-2.695042981133366749e-03,1.373985670625728871e-03,-4.663102574076988770e-03,-4.963262588576987898e-03,-1.865534782915286276e-03,6.415386622878207698e-03,-6.629734609099328241e-03,-1.055858369789391992e-04,8.707023016639275611e-04,1.703547493399918712e-03,-1.425707364114678745e-03,4.986591422992764366e-03,7.764256567841975243e-04,-1.369330097324088225e-03,-1.801970809714982307e-03,-1.954857175168626081e-03,-6.321742978724126903e-03,-2.480200976506397124e-03,1.725534760125223971e-03,-7.152008167741594549e-03,-3.468419036066116169e-03,-4.650107372170425232e-03,6.873919124032225499e-03,-4.627769917459458558e-03,-6.131045129640570746e-03,6.132326102587204800e-03,-2.367238210819518334e-03,-3.522398556294640930e-03,-1.233765611077742175e-03,1.870208613495437117e-03,5.586200776216292130e-03,-6.032883162215412279e-03,-1.226610038603018552e-02,2.927514771646833654e-04,-2.293089839635126942e-03,-7.926237466423975003e-03,3.038730114363942698e-03,-2.706907961270870779e-03,-8.470620444010659722e-03,-1.129796005138572305e-02,3.252298377700126204e-03,6.492429044783108727e-03,2.588315589889561130e-03,1.740260761830963315e-03,-6.151246881010971612e-03,-2.316961040576231371e-03,3.376692917836375193e-03,-3.096916426061819844e-03,3.940891408131032628e-03,-4.713228838772219077e-03,-3.772720639091728129e-03,-4.749158931859014283e-03,-1.565984949845405580e-02,-1.461141056512068681e-03,-4.787596822105930940e-03,7.511961035047142431e-03,3.138551239093989240e-03,6.881546118324369070e-03,-3.223257242332515371e-03,8.343958034376170591e-03,-2.097505762437884037e-03,3.973456392695197750e-03,-2.181864483864023006e-03,-2.436693394923529401e-03,8.331069127625673892e-03,1.253172807394779299e-02,-5.065842672332606343e-03,-1.025356884851812535e-03,-5.467227510914475291e-04,2.770233570479745024e-03,6.433784177918587532e-03,5.780026951898311434e-04,2.479446010064808963e-03,-3.695294464103701226e-03,1.722037948057138917e-03,-7.874912066778707101e-03,-6.332071551248545822e-03,-4.504320263862952346e-03,-4.152710438941963067e-03,-5.289742850504335417e-03,-1.498183316372624216e-03,-8.991744997701876709e-05,6.503155830570346704e-04,-3.816180349051069803e-04,-5.120266339859362061e-03,6.797888252794234525e-03,-3.725777869186992775e-04,8.101258737449309804e-04,-2.441591276602928396e-03,4.771893720365179975e-03,1.744514339206868067e-03,-2.791392807225438148e-03,4.392262721760805712e-03,-2.758341242631145301e-03,-2.858926280346962140e-03,4.250253564871740372e-03,-3.242071010319347822e-03,-4.398014146125976340e-03,5.380657876447888718e-03,-4.081059115295154228e-03,-5.291130438637459479e-03,-4.230479326116652660e-04,1.394562045897975540e-03,1.904391514286820665e-03,1.768074783328935041e-03,3.399139644010025236e-03,4.563891140540308165e-03,2.779688858602303182e-03,-3.069199207161114537e-03,4.517892685369160645e-03,-8.706156148162766395e-03,-9.301901030271422094e-03,-2.197244014261246371e-03,-2.519352194674263131e-03,-4.352494023347002437e-03,3.219878786694114602e-04,-1.388067269642263802e-03,-6.002839163769882469e-03,1.128230177585179232e-02,9.121772798118430933e-03,-3.147943468508296555e-04,3.808618058973932234e-03,-3.896382063228339982e-03,-4.969235057657482753e-03,4.218835061289913238e-04,-8.687484900978800773e-03,-6.928013662033233148e-03,3.308554709515885723e-03,8.121431323442373281e-04,-1.168877219469430934e-03,-2.153531151761194085e-03,3.847522560955223166e-03,-1.546786634968901107e-03,8.036927519620736329e-03,-3.570099924214680236e-03,-1.189599017468425312e-03,2.809851370271657014e-03,1.256941841377715681e-03,-2.274088382925980485e-03,-2.798663767921045608e-03,9.516585143922357036e-04,-7.041469512472467093e-03,2.405164074006949440e-03,-4.706307245817543256e-03,1.843507321130945925e-03,-9.775067353371458961e-03,3.827781899017983040e-03,-3.014838044939157583e-03,1.707433975596784085e-03,3.441126382884561810e-03,-1.898239969855909691e-03,7.992684127082855207e-04,2.300978328411422587e-03,-1.773572588041069133e-03,3.921626420654118414e-03,1.445218403657415720e-03,1.924486468352718362e-03,-5.000365546105923174e-03,-4.554529375936363506e-03,-1.892369500889770715e-04,-2.191233413329640375e-03,-5.328245478971772517e-03,-2.986296439815107340e-03,4.201388582362019196e-03,-6.388748454374202843e-03,-5.127445456671385175e-04,4.646218084368417987e-04,4.776907093243636278e-03,-8.668455052568512301e-03,3.055112612334375498e-03,1.822978405513209438e-03,4.814854673357867976e-04,4.644654277508359838e-03,3.270959042475279303e-03,-2.286808106805981143e-03,-8.386400808503216434e-04,-3.885441421916221039e-03,-7.792684687152280415e-04,1.036997804774853794e-02,-7.852142420292612668e-03,7.474334523005428041e-03,-3.883532776603526582e-03,-9.629722037484536590e-03,-9.159761641766457890e-04,-2.240178238569433527e-03,3.338762670797839477e-03,3.113954347417085492e-03,-1.173919947160398581e-04,3.124001740438527280e-03,7.650337969501094532e-04,-5.354044619401033579e-03,3.999463690904526275e-03,4.381269908351036885e-03,-1.912823208149362674e-03,4.079986661270551735e-04,-6.712735491395879772e-03,2.527823023373254076e-03,-4.325568305399721258e-03,-3.796263142488045042e-03,-2.565035976000492029e-03,-1.243717699912344965e-03,-4.781554706382909123e-03,2.428919833797257670e-03,-2.611343128029716080e-03,9.444010835136958798e-03,-3.349818596151781219e-03,2.199441173773003641e-04,-9.982104713754675479e-04,7.556823280399443087e-03,5.250289798419572208e-04,-1.044637496696209072e-03,-2.585707366715898122e-03,8.742127916764621393e-03,7.676203157236293796e-03,1.172587693652939189e-02,4.143928946871548030e-03,-1.895065485799485305e-03,3.664328748044556969e-03,-1.224515278869503466e-03,-6.192083818104696102e-03,6.408590546357334841e-03,3.301830039119569519e-03,-4.941526584335421100e-03,5.529268806487581955e-03,6.310638179411263982e-03,8.174438330105550116e-04,3.395334935792700994e-03,1.686446643391930960e-03,6.647693361546651650e-03,1.322454699710075982e-02,3.886788402789748131e-03,1.480896904964548169e-03,-3.810197044943078749e-03,1.268294566985661651e-03,-5.605983900531247809e-03,-5.909146791244115470e-04,4.296279873232774138e-05,-5.686286886159272295e-03,8.492995318504625163e-03,-3.046869546109583996e-03,-2.828580511835773227e-04,-2.168625928826671805e-03,8.632220938915958347e-03,5.094973987775178528e-03,3.422458966145162755e-03,-1.595748570750901534e-03,-8.086377446894627702e-03,-3.481372859834045916e-03,2.858342774482268292e-04,1.036138536016447537e-02,-6.018940392907487329e-03,-3.199135653755505113e-03,7.383471973710992653e-04,-1.086022617654003970e-03,-2.496775956632944207e-04,2.725240455215694867e-03,2.641908079066225892e-03,-5.038299518826378116e-03,-3.183496806686923059e-03,-5.042295736365031041e-03,1.688488857349486089e-04,9.891198500728204043e-03,-6.352829883404973246e-03,4.747924270099167579e-03,-2.181338627138284993e-03,1.179366232710387166e-02,-4.502729509881831654e-03,-8.552338014462575059e-04,-2.215591285480079562e-04,-1.110940338910550813e-03 -6.819629699580004832e-03,2.250457877667602202e-03,7.456764401312558979e-03,-4.156446719921882336e-05,-5.534347255617970592e-03,-2.728868896610046803e-03,8.422430305286986510e-03,-2.161812443730211364e-03,4.339445369493508640e-03,-2.965856029143566579e-04,1.074392706924709277e-02,-1.807426057581179406e-03,-1.886507501104944806e-03,-4.447840116854408742e-03,-3.053978789711800021e-03,5.276075274793766163e-04,-7.063777547286308536e-03,-2.989456676110024448e-03,-2.443829829797047057e-04,7.396754811868770305e-03,4.251893131260939805e-04,7.036763608583874834e-03,-4.234452324484811944e-03,2.140929921487567424e-03,-3.789116292447334937e-03,-2.181903819952547927e-03,2.877552736375584876e-03,-1.794489227226226139e-03,8.701521321339623135e-04,4.827250901285461138e-03,5.333916878748445715e-03,-5.780991912696392503e-03,-4.765986928162350630e-03,-1.255386552615280838e-03,1.889201948790628386e-04,-5.010881941052200807e-04,1.587874016463905729e-03,-9.644439456912274819e-03,-1.963874544495506410e-03,-4.371607779522538977e-03,-4.572144158566699042e-03,1.103681712786837472e-02,-1.716356677645924228e-03,9.780681300802052808e-03,5.834404214394454946e-03,-2.562294090431965852e-03,-9.160379544656101214e-03,1.149442810777321451e-03,8.723577709485145701e-03,1.313576637469549479e-03,2.855467645385796876e-03,-1.155005139795634234e-03,1.823492111983412685e-03,-4.300350078019941600e-06,-3.855754965116765017e-03,1.654980101329008241e-03,-1.530781125910723409e-03,-5.532917171371679911e-03,-7.186214724761103359e-03,-1.155154916152343909e-03,8.772422310282266883e-04,4.728596373700343948e-03,1.045935051546008788e-03,-3.623015781378744523e-03,4.503568259817960009e-03,2.935343499563379359e-03,-1.265611487400007326e-02,-3.109929585512036417e-03,-3.044495333538232117e-03,-7.148896120430263867e-05,-9.096054099880101921e-04,-1.309143200617787946e-02,5.085641476482541910e-03,-4.393978259451523406e-03,3.967750583196716536e-03,-1.984276758620412244e-03,-2.407083062757503153e-03,8.735056484762202325e-03,-4.866405792637271653e-03,8.297842488770514231e-04,-4.097028725145179101e-03,-2.543264598682383124e-03,4.778752911079327259e-03,1.388604825009147467e-03,-7.721585499740202122e-04,1.499734331308309743e-03,8.642407514671523669e-03,-5.210823851840314276e-03,1.172518402792571782e-04,-3.624924701904619351e-03,-5.775577525729867030e-05,2.802270326333670090e-03,-2.605351113228579398e-03,-6.124349774444869630e-03,-6.465305359663346164e-03,6.680191196387234853e-03,1.274260461865370231e-02,-4.056080418607176802e-03,-4.646092773838325120e-03,-1.443618484598563996e-03,-1.727840231001214974e-04,-1.694408606968963484e-03,-3.626115316951887980e-03,-2.366967476314902653e-03,3.133796653159959494e-04,2.421797847235509886e-03,-5.457785382936933835e-04,5.918288020630944066e-04,8.914890873506284633e-03,-1.540724731254792005e-03,-2.813775422096726625e-03,-9.198889664055739240e-03,-4.126326722157997937e-03,-3.550239025676320184e-03,-4.795759162684066004e-03,4.812458150451443331e-03,-4.488360023034783548e-03,-8.763505866322513990e-05,-3.378727968359142644e-03,-4.222236189187360961e-03,2.577503155489256972e-03,3.289303802389281252e-03,3.733827407281734706e-03,-5.966515390149678549e-03,-4.835106230866931080e-03,-3.840642652306398581e-03,-2.850002228593763410e-04,3.834262381159679513e-03,-7.224224312204860499e-03,-1.246298297056567563e-03,-6.595900313316558489e-03,8.665134968096611890e-04,1.398350402464469113e-03,3.796709265416084184e-03,1.763951164029065434e-03,5.641217021762256947e-04,1.969149799859141927e-03,-2.509604573671306554e-03,1.439298325867036142e-03,6.879119859036777855e-03,6.195078214088412492e-03,-7.044804020700901059e-03,2.056781296701277843e-03,-3.246193836725589071e-03,1.241736630392412761e-03,-1.995986627446428522e-03,-1.285428009680785605e-02,-3.895493411704145040e-03,-7.009650369024253945e-03,5.411637081066384899e-03,-2.280431325289117989e-03,5.302981803167844746e-03,-1.555219790187112783e-04,8.256338516682153877e-03,1.542740151770764963e-02,-2.951040814821844372e-03,3.934250414877775610e-03,-4.030798395618798505e-03,-1.501945129883089066e-03,3.158109072772679465e-03,-2.122965822447188140e-03,2.563673581785873637e-03,-2.556611160216889968e-03,2.887815349082507432e-03,6.339123833834965811e-03,-7.401512169489170810e-03,6.475092584553559204e-03,-3.622465564191566139e-04,-7.395624258070188431e-03,2.675169069503943210e-03,4.409074574106650914e-03,-5.952024251715033352e-04,5.754697468873529870e-03,8.866039243288013905e-03,1.071803338583503110e-03,-8.330811406601686814e-04,-9.733994171914188767e-03,-3.608931751407532234e-03,-2.857958964751488185e-03,-3.177721940074690939e-03,7.004841377510445362e-04,3.263939212971356685e-03,-7.309280712000551863e-03,-3.415809444414730830e-03,-8.277252927744272815e-05,1.006229237912920245e-02,8.684618525460076994e-04,3.180923490551713939e-04,-1.016691028817114442e-02,7.940036037054646820e-03,4.403797445320265132e-03,3.889782478032636093e-03,-2.837769617155927460e-03,-1.745267162501352651e-03,7.102421442509625552e-03,1.159084189259394423e-02,1.559176876563236934e-04,3.963862971075070470e-03,4.988088660485054567e-03,5.283076354200990851e-03,-1.057571359098905981e-03,1.364585740873370222e-03,-1.282284408962135278e-03,6.100182397325880530e-03,-5.441259218047880472e-03,6.990213513450344203e-03,-5.170699425713925004e-03,-4.892163021673199197e-04,4.490423540246936927e-03,2.832249334969106497e-03,2.208172654317689873e-03,-6.560283508232496973e-04,6.190923766775228466e-03,-4.308401820478213046e-03,-1.122300104555522954e-02,-5.759733082976357076e-03,7.074546361756786118e-03,-8.823845435876271140e-04,-6.460374697392921904e-04,-1.719798699561413168e-03,-4.871541099637641085e-03,-6.763657096757422563e-03,1.655990271617651961e-03,5.535340868206209755e-03,-2.067531925285920856e-03,1.598729554098485967e-03,-1.862948458373517866e-03,2.081207893565168755e-03,1.233095864065172301e-03,-1.124379987069418392e-03,-6.677137480667902705e-03,1.801965501633069275e-03,-1.142687457717135330e-03,-5.215025971641063266e-04,1.778914692228849691e-03,-1.160956743195640760e-02,-3.651910923223590424e-03,8.805224999122503865e-03,-1.970294438547356942e-04,-2.809614551283860350e-03,2.225660353837393812e-03,-2.594727577205679318e-03,-1.188377216220563165e-03,-4.369045018662020934e-03,-1.840117177359375938e-03,-1.005836374732033568e-03,8.992070125562091551e-03,-4.368982052718735180e-03,-1.728808355578683245e-02,-2.694401585947403793e-03,-1.073371074190074053e-03,-9.463766682467397023e-03,1.183655168851626555e-02,3.072441506968702669e-03,-2.717250031255712042e-03,-6.026565187704092941e-03,2.508146972959703958e-04,1.190993780953741562e-03,-2.053882870485283611e-03,3.047131100583966301e-03,-8.748300782650250684e-03,-7.441413029934548784e-03,-5.494000270291164853e-04,-1.565495000734916983e-03,-7.201115985072656378e-03,-4.660358717153216014e-03,-5.069504141286706143e-03,4.146903435811874830e-03,3.478363778868882525e-03,-3.087957441410588318e-03,3.064280721974068158e-03,-8.446615393657468801e-03,-7.856969871059198834e-03,-4.686259007462027434e-03,2.190133438746619688e-03,-1.056542214173417224e-03,-2.810199059851616488e-03,3.281344805256121932e-04,-1.227303714861653280e-03,-2.949289021884412155e-03,-5.897353336657120727e-03,-2.890498181156108803e-03,4.387825515656068771e-03,1.774189632856686995e-03,7.991894292342083955e-03,2.023653004460957035e-03,3.957788721234658212e-03,-1.434294373095665277e-03,3.076862932237074081e-03,9.012416226436672020e-03,3.543586833659929762e-03,-4.225670725692100109e-03,-2.305119398482637950e-03,-6.246173871408925750e-03,-4.450092624394198781e-03,-5.841913775019283786e-03,2.907014694479168266e-04,-4.096214563320962725e-03,-2.790280494876697798e-03,-9.397934205865207625e-04,3.894936667932853442e-03,-8.814291208663978163e-04,-5.240891151377579868e-04,3.336141873569206922e-03,-7.247637759667822921e-03,-1.296302146725015212e-03,-2.007930535574499898e-03,-4.994103602713984361e-03,-3.073343996105263893e-03,4.355722412509953519e-03,-2.470426049077160263e-03,5.688872048307044328e-03,-2.384042251128590424e-03,-1.886334619321876046e-03,-1.074579230294504285e-02,-2.067313481916766785e-03,-6.899953015233688779e-03,-3.238508538653032953e-03,1.747937519929178892e-03,2.337896145899314663e-03,-1.919027003219259727e-03,3.111763558374525031e-03,4.507786899284442657e-03,4.989891248315873711e-03,-2.260630230323673780e-03,9.993591726660680866e-04,6.140805629375443303e-03,1.129194154610877431e-02,2.414472323874753378e-03,-2.584415468229476088e-03,-9.786997245636482046e-04,-3.513767019715852754e-03,6.033659762902196955e-04,8.346880435375274125e-04,-5.282541683935619917e-03,5.427064325632680215e-03,4.918006383264912802e-03,-2.103447663307315800e-03,-2.388721006723658422e-03,2.439096538723433023e-03,3.631975198471906405e-03,2.860024549116679284e-03,-4.840371788663478704e-03,7.232466847276785085e-03,2.643809951607872365e-03,2.872789575190552898e-03,-2.698089937951589205e-04,3.134764584208674012e-03,-2.822053962357042728e-03,-3.477169314999875582e-03,3.358922694067211119e-03,-1.328042447488716419e-03,6.409487622883391737e-03,-1.184447992240740674e-03,-7.217279013001494034e-03,4.846354470880481075e-03,-2.551480079315753685e-03,-3.451551921859127414e-03,8.453808562382755104e-03,-3.039444101185572161e-03,-2.982771460652967541e-03,-3.414123006258040771e-03,-2.037428482070833755e-03,-4.030967783144647106e-03,-2.830065801970528278e-03,2.446049883457732804e-03,-1.299535081880776348e-03,4.351032265670199718e-03,-6.796038057091624736e-03,-2.582435470638098050e-03,1.149268383056938547e-03,-1.721659570508799440e-03,2.049390473760642154e-03,-4.872763626454785189e-03,-2.092879573455135988e-03,3.473780219947369876e-03,7.134385265680772553e-03,-1.306579616622212912e-03,-1.221480233393868299e-03,7.909159220405333549e-03,3.229578835165022193e-04,4.312559525270646087e-03,-4.799033984799161781e-03,2.185527824055449354e-03,-4.143215456471746258e-03,9.312317465230575929e-04,3.197914827715412321e-03,-1.746828130813231307e-03,1.264685050452835470e-03,-3.436584284122120702e-03,3.536162775637829493e-04,-5.115189631947307872e-03,-1.369656571464890078e-03,6.066007112415353932e-03,1.309822018723793549e-03,-3.526243478312538968e-03,1.988681562540295118e-03,4.196845205542902406e-04,-3.872519667055160614e-04,1.006898793865371525e-02 2.448504311748752899e-03,4.831649089683340108e-03,5.521494943290361675e-03,-1.294341284534649382e-03,-3.833689550125284968e-05,-8.879667944046544872e-03,8.159701535151959312e-03,2.126273318985873878e-03,-1.428877440290223302e-02,-8.629710878446460176e-03,-3.300420392342419927e-03,4.636473631254878561e-03,-4.727895600479750313e-03,-1.368129142374544492e-02,-9.910945214530821924e-04,-8.979339231774350824e-03,-7.512733093209913483e-03,-6.812672428000536573e-03,4.466312552497900138e-04,4.955337072334633705e-03,2.443505182280085019e-03,1.638633260171699088e-03,8.015908453077022489e-03,3.726968772440782986e-03,2.122030686383985249e-03,1.130277409752715543e-03,7.931704778934232578e-03,-2.718513152889256903e-03,-1.676178294583608873e-04,-1.878194934417148606e-03,7.481534284806059566e-03,4.094000637830090376e-03,-3.316900057127058522e-03,-2.589465857643748550e-03,-7.102986830091377081e-03,5.198333406634501538e-03,-5.189844055672362434e-03,1.319123919571045002e-02,3.780834786971571954e-03,-2.729080042058941880e-03,-2.653045312630784251e-03,-2.521531505847749138e-03,4.640340110877676357e-04,-2.859999701177423637e-03,-2.363423458782842792e-03,5.846512247169144416e-03,-4.219621941425664643e-03,-4.011122726291536239e-03,3.214386100402498447e-03,2.118410465312003596e-03,-7.050232472194738198e-03,-3.504950164828388084e-03,-1.243039798413158648e-04,-2.461841417647478002e-03,-5.672383860151186848e-03,-4.331050967413218461e-03,-1.308260767772602975e-05,1.972151523480372010e-03,-5.269600895380715036e-03,9.967407123816449457e-03,-5.928129034273162297e-03,-1.271436928103632496e-03,-6.689586088174925664e-03,-1.652260810046248625e-03,4.784942798628915098e-03,-7.356622766826905954e-03,4.178936958594797030e-04,-4.278780561282162498e-03,1.114926383601465721e-03,-3.363946227744489537e-03,-2.525781723743524693e-03,2.589062581043345863e-03,-3.959905171856029082e-03,9.652408541894847913e-03,-6.415884784678010239e-03,1.705172514255217644e-03,-1.343390069754672712e-04,1.070495984806908198e-03,1.141419050387420045e-02,-9.351232854373564204e-04,6.612433457383528213e-04,3.293009689441359859e-03,-3.756603520251668746e-04,6.942972231509264179e-04,1.297430460363881329e-02,5.211542904086329669e-03,2.662454686906791292e-03,-4.714400111847256818e-03,3.333807650473307425e-03,2.502969794193796085e-03,2.677815950170563380e-03,-1.429732989219130836e-03,4.317082370737395508e-03,2.295275493094399075e-04,1.215589448479647249e-02,-7.409611487557863849e-03,8.074303809914100752e-03,8.156924025172712731e-04,1.840156233281656055e-03,-2.729272270932090599e-03,-6.647070502664240675e-03,7.396729601846991839e-03,-1.168542913865224658e-03,4.523668466142669389e-03,-4.952857433549542367e-03,-4.431025333669342918e-03,4.461684210986283615e-03,-4.258609507892954261e-03,-3.244407731100061500e-03,-3.488865019433455995e-03,-2.684929780449459361e-03,1.043375467532426867e-03,-4.030980753849144546e-03,6.555493321116186503e-04,6.660546077713977876e-04,-3.212333410171957105e-03,2.563528626524481198e-04,-1.839635410549003919e-03,5.800324622235792336e-03,-2.100848346924885299e-03,3.525561842102529174e-03,-3.367764220601893177e-03,3.784553751836010078e-03,-1.198447951889919589e-03,-7.990160500340024244e-03,1.002043024969210584e-02,-1.047140890242630532e-02,-5.996810788715237206e-03,-4.407626950755774015e-03,1.888621669990884003e-03,4.460070703809240170e-04,6.586701029768761602e-03,9.101632254565436564e-03,5.706245726058896146e-04,-4.028425550978564397e-03,-3.128408353038412363e-03,-1.768559617613399935e-03,-4.752809941872799572e-03,6.738053208058213377e-04,5.580860275943125975e-03,-4.861331812372800959e-03,3.911403875096789232e-03,2.323592275764201596e-03,4.518113203208562641e-03,-2.313890933041363388e-04,7.491015159569204811e-04,4.501434213181644550e-03,-3.994817853020997921e-03,-8.698629323226232928e-04,4.738409596240835200e-03,-9.317297587117296179e-03,-5.301188828787569036e-03,-2.380956390792936250e-03,-4.508342855083984991e-04,-5.428501799756183660e-03,-4.298271954886816420e-05,5.250692586092705998e-03,4.378809694438919563e-03,1.869328624834299931e-03,1.834089029676961961e-03,-3.293182063008871091e-03,8.375489796068436715e-03,6.667079136174645579e-04,1.576587872495862838e-03,4.359940078358132999e-03,-2.856684649266460433e-03,-1.665764107475700999e-03,4.668331389703865922e-03,-6.400912776467968836e-03,7.085267086184633993e-04,2.622717694576267699e-03,-4.456619081147234970e-03,-1.983433038134941543e-03,8.881752495456892230e-03,-8.057865677192995249e-03,5.339744441365406897e-04,6.456715419307579133e-03,-4.139151830129389475e-04,-1.176167196851766756e-03,5.752936376023300331e-03,1.104937758421718243e-02,2.175814445630811205e-04,-1.143999716122200746e-03,3.585627553052187031e-03,1.589179882766993211e-03,2.442250899608921296e-04,-1.358490776417772914e-04,5.570938640815956827e-03,8.006502850000374316e-04,8.070650967026689153e-04,-2.843725944520160073e-03,-8.284862628543568364e-03,4.325435097357782296e-03,7.941156310864740087e-05,-6.023700437687845707e-04,4.837613345689661080e-03,-2.242555949830849198e-03,-6.668902518519545866e-03,-5.548600333554009474e-06,4.974349093105714416e-04,-4.635074846802362050e-04,-1.292636688737238207e-02,-4.477733710608679772e-03,7.350172830128907106e-04,-2.062588931872167287e-03,-4.264957087062397866e-03,-1.271390496006842563e-02,-1.124229999728734025e-02,2.051857291504317893e-03,-5.175906102901051912e-04,-4.591809250441314903e-03,-3.782183613240872738e-03,-4.000156850798432945e-03,-3.558006510091822356e-03,-1.473928789015884529e-03,1.364998724740915451e-02,-3.205249825570291675e-03,4.110416366687242619e-03,-1.746955475245524046e-03,4.954897901753550077e-03,6.750946683044265536e-04,5.262435803634944384e-03,-1.490117747623524182e-04,9.896055096600393439e-03,-3.752273318687377682e-03,-2.194607748599659153e-03,9.378744259204251435e-03,-1.912219563732412526e-04,3.737540022536336645e-03,-7.094627683381612634e-03,-1.874424351204290228e-03,-4.416164376829090701e-03,-3.128838473986223206e-03,4.252165122933579573e-03,-1.160704667629548076e-02,-1.213241860321327085e-03,1.032183877201517906e-03,1.954901107637922655e-03,8.365269691486619530e-03,4.157543902794956503e-03,-7.092594928240532207e-04,-9.001324096590927748e-04,-5.108067147085653542e-03,2.357655881776899061e-03,-4.334266750416967419e-04,1.148666512471468830e-02,-1.430982999802945859e-03,-8.481818408364078139e-04,4.641744293247420002e-03,-5.142472305542311432e-03,1.085165233408266414e-02,6.592171843185077283e-03,4.215868358832113548e-03,6.669433609980339461e-03,-1.099648120453585853e-03,3.017188126936750682e-03,3.094017105325866531e-04,-2.555617373087081673e-03,-6.126428506386445531e-03,7.325208916339432871e-03,-9.558877369543010687e-04,-3.975332688092455234e-03,-4.382808865088758631e-03,4.900192190225125845e-03,2.362885034392412428e-03,6.819612339230605481e-03,-5.838755464256190075e-03,7.098494250770033542e-03,-4.775295183755283272e-03,2.475655738540463377e-03,-4.875848368898373121e-03,3.616006515167413671e-03,1.852465565959458523e-03,-1.405180058434432437e-03,1.320665083310890685e-03,-1.819011567488541554e-03,8.909056665446675460e-03,1.144111820421586092e-04,-1.188613140112084951e-03,8.348320098133408737e-03,-4.109869928259330014e-03,-8.648492440003206425e-03,7.271410148415448187e-03,2.363409328095857249e-03,-2.636482429053269797e-03,-4.785317813038956342e-03,1.049395602722962690e-04,-2.944421067939180341e-03,1.664189566110348215e-03,-1.651941112539902180e-03,-1.225871553279019944e-03,-3.976107877280699032e-03,-3.267522914111224350e-03,-6.130611268226784753e-03,-1.110599473813238085e-03,1.284559586865145894e-03,-5.084890303045215771e-04,2.268775069030645134e-03,3.482642377522558123e-03,-9.051191617425922986e-03,-1.716151481981138338e-03,-3.639332789848417470e-03,6.783020131887089639e-03,-2.931478040874398866e-03,4.352500719100369994e-03,5.480167540902959773e-03,6.733356201853644279e-03,1.169029338287512314e-02,4.950284730609925098e-03,-4.540618666620464296e-03,3.199518366216601035e-03,8.390693109257119477e-04,9.793791149220392594e-03,1.437897578586974168e-03,4.593691428180754531e-04,9.805206295570528460e-03,8.851239230732031900e-04,3.276806005131292689e-03,-1.465734539601845198e-03,-4.769983235546791482e-03,-1.028477323536399271e-02,-4.312669892014333803e-03,6.733296285215737906e-03,-7.402834733948505767e-03,4.126164629912792807e-03,-1.112657130364783607e-02,8.030132367471493060e-03,2.348098052778455503e-03,-2.257904710206874895e-03,-4.836658615316344507e-03,4.433924166078553365e-03,2.755745937519413213e-03,-1.302799672650110517e-04,-2.615326241897722621e-03,-1.497208237453319683e-03,1.531977573340445554e-03,9.838375792086521705e-04,7.415827729220764554e-04,-6.441840016325730590e-03,2.985974240166139760e-03,1.130681342683228022e-02,1.476777110208459220e-05,-5.254600022195687024e-03,-5.486700501212239851e-03,-3.048348152742888094e-04,-2.122468620997385209e-03,1.981792191547160527e-03,-2.157622807202110807e-03,-2.374384199877935166e-03,-7.046528939161671862e-03,-4.099678845854749787e-03,-5.747252076810174110e-03,1.568219374798555726e-03,-2.710377576478690230e-03,2.657708185810925185e-03,1.021938463199389014e-03,1.030201017132922805e-03,-1.106208447815522182e-02,2.365895586040066105e-03,2.964935817045737630e-03,5.251055191875102527e-04,-4.951583872144374801e-04,-9.494840757616417569e-04,2.624110964934851450e-03,8.492957609015474099e-04,7.106875048140305455e-03,4.477225029928578320e-03,-3.670919878972449431e-03,-2.441838467362563575e-03,1.065848241546330996e-03,3.923420088193504095e-03,9.102941561485469691e-04,-4.961596867052932465e-03,4.531976427932174131e-03,8.009448373421393375e-03,6.534292342846256298e-03,2.975823381752288471e-03,5.953588685356813077e-03,-2.389865808325294418e-03,1.249590611383683512e-02,-5.342066930342941056e-03,4.591875557633163206e-03,-3.472119520430109671e-03,-4.548969324971201787e-03,-6.096330188278493122e-03,2.279562223268896595e-03,2.799814090604277415e-03,6.927792674310424663e-03,-3.484539807806453712e-03,8.737240142219116976e-03,1.021233453264500449e-02,2.270714446187588244e-03,-6.142873415404239736e-03,-4.474354671546860486e-03,-1.581968087180829832e-03,-4.934822486608764734e-03,-6.242606603000288960e-03,2.559531996705228640e-03,6.390451915353526600e-04,1.947671375042682018e-03 1.132687772088484569e-02,2.858685559345399882e-04,9.771220819279152064e-03,-5.039718232248969511e-03,5.063539025476408023e-03,2.151274394388470994e-03,4.145673902682778245e-03,-5.906213079595204127e-03,-9.449379589951498182e-03,-3.590750036621541061e-03,1.210717030489673676e-02,-5.363571696147349972e-03,3.035225745631839594e-03,5.641839646742597032e-03,1.000453719937512923e-03,-5.003997396044250826e-03,-6.014930578900344908e-04,2.942042044851598745e-03,3.507646646424584656e-04,-9.022232431047812716e-04,5.259683422553761319e-03,-7.061268493055683840e-03,-1.361432982937001331e-02,5.378880829110196243e-03,3.740704306371986018e-03,-4.340122473679207046e-03,5.671179571072472014e-04,-3.389656265024012724e-03,-5.888342220776240791e-04,6.418606174300618521e-03,-7.739042627848035510e-03,4.000463229469106152e-03,1.684913436485083226e-03,-1.576552608334459357e-03,2.022533358850874766e-03,-1.950171855892267054e-04,-8.564019291196717590e-05,9.640569698690162892e-03,-5.232710003744233014e-03,-1.365241138376324182e-03,7.282853028908780257e-04,2.711590733291453000e-03,4.708307142535742874e-03,6.997659455576716800e-03,-1.681358274754737826e-03,-6.226303786774945413e-03,2.714766083861809391e-03,1.273843352019703168e-02,-2.614143482364988426e-03,1.240488142078858234e-02,-7.470924998597851426e-03,9.338975072319453605e-03,1.630870355799824475e-03,-4.254660935742069720e-04,4.019719709680956952e-03,-3.298454382633425087e-03,1.227105236397978052e-03,3.613314745098954606e-03,7.219260785566060032e-04,-5.160948127196700493e-03,9.013372301308467723e-04,-1.636987384914150587e-03,-2.484384880004789162e-03,3.366813285098215619e-03,3.569426594503880695e-03,4.652253801287280742e-03,6.702945524954162689e-03,2.920625084185990688e-03,-1.748939624565974378e-03,-6.798044739453422587e-03,-1.139238731380539998e-03,5.116128081117419374e-03,-2.352752503657377849e-04,-5.041313146327049827e-03,-1.080697123468473547e-03,5.388216403663455452e-03,7.563657695738952233e-03,-1.314600938180400524e-04,-1.353856051987514623e-03,-9.691209353208757272e-04,1.191190700777163469e-03,2.862157200253288934e-04,1.087931156277224766e-03,-5.173903413381717407e-03,-1.444388817858523969e-03,4.943766951942436816e-03,3.864846985660045243e-03,5.268873438228320258e-03,8.220016634154376708e-03,-3.547639065466677761e-04,7.553807667732816145e-03,7.366153703940669061e-03,6.922332845681511589e-03,5.873673702049199419e-03,-8.858147905783234684e-04,7.844552968769825940e-03,1.166155850449737165e-02,1.043420444052006388e-02,-7.370860454595114068e-03,1.016612177132935300e-02,5.223402855063040591e-03,-6.117518018191604214e-03,6.705981669068564188e-03,-4.251210912458594851e-03,8.310293537080886556e-03,8.220425598055948468e-03,-6.737068273429958065e-04,-2.286127040122534412e-03,-8.080724926746405161e-04,2.970863461529501051e-03,-3.138611211915307703e-03,-6.997498578918839515e-03,4.635110902726590864e-04,-2.083564074318693983e-03,1.034922476310744721e-03,-1.508146673323268157e-03,-5.036308716106037940e-03,-1.510654490659152225e-05,3.269886009281229491e-03,7.514781007420127988e-04,-2.442956923005154546e-03,-2.164355943943246669e-03,4.915584360045059491e-03,-3.130508340255474132e-03,7.652974151524117793e-03,8.202122271158664157e-03,1.121313759136028515e-02,-7.199738504202078324e-03,-2.047863618876485242e-03,7.345900170853703406e-03,-5.978401374268331979e-03,-1.873073135061515196e-03,-4.061081492538752211e-03,-2.886442587841034444e-03,-5.383278589312512873e-03,5.911884161528171977e-03,5.707701661733032656e-04,2.068790180558188231e-03,-3.345876046063034261e-03,1.866235330950955602e-03,-4.258406543284061138e-03,1.877541842873294529e-03,-4.862354108941378643e-03,4.208082800885396338e-03,-3.051223854233228901e-03,2.295923812876843155e-03,-3.570950570250788249e-03,-1.024111929640357174e-03,-6.622791107423802652e-05,-2.509926278753115639e-03,1.055470510224192516e-03,-6.980091498892915594e-03,-5.493407447388549449e-03,1.064010737331216264e-03,-1.066911051689679261e-03,3.664720882456498283e-03,1.977942341980882743e-03,2.221769726015867825e-03,-2.716208419501741841e-03,-2.537995571853692359e-03,1.829522404111965121e-03,-2.293520711063720338e-03,-4.495501987759777454e-03,-4.854322521074746176e-03,6.117604506740240491e-03,6.086510738235726727e-03,-5.475204746741684772e-04,4.928117168271481906e-03,5.500065668316816980e-03,1.070642852640178722e-02,4.682366503368198124e-03,1.972335359221764721e-03,-2.630958180521106391e-03,-3.334446344588973699e-03,-3.785600701036426793e-04,1.697555471781342406e-03,-1.737317092253295073e-05,1.316163168561694345e-02,-6.066341668902753767e-03,-6.959419605374906996e-03,-1.204339396335319426e-04,3.343221553962602739e-03,5.900591230125667823e-03,2.896121319881885935e-03,1.148802818549806261e-03,-3.261775597583359276e-03,1.058614711558029118e-03,2.386380148683116883e-04,5.945182491923268857e-03,-3.528282231853973293e-03,3.547741667631421156e-04,9.458314938188177104e-03,3.759216308853932133e-03,6.563258806579752407e-03,1.269297226979423064e-03,-7.963143617541298264e-03,1.180789021601622848e-03,5.735584486189874119e-03,-2.543518945075128314e-05,-1.617209602716034294e-03,-8.425569586221709137e-03,1.815515583244861880e-03,5.515274792206152805e-03,4.064052288103399933e-03,-4.952768807769761803e-05,5.074128234542263881e-03,-5.717366265491091552e-03,-7.967749770321377531e-03,-4.829571895790090059e-03,-7.157717582482340862e-04,-2.611277359858729902e-04,1.148897875711027200e-03,8.036117267080511092e-03,-2.504200513568187372e-03,3.622274389159069099e-03,-7.861263131545472710e-03,-2.311194057835788598e-03,-5.768980736738827005e-03,-4.421017953826317584e-03,-3.503078817110626304e-03,-2.389905988093876579e-03,8.112582023427319239e-03,-7.007666776321287705e-03,4.455033286435060302e-03,-1.040163573440776833e-02,2.587142293433034122e-04,-4.933574736869163375e-03,-1.042204854521557706e-04,-6.274076303583437861e-03,-4.670317508980276547e-03,-9.864388472316880895e-04,1.054878806115194757e-03,-2.621179143557105288e-04,-4.817759432536549563e-03,-6.812232469153339971e-03,-1.133944728608272468e-03,4.536183756075955371e-03,7.574051042052131789e-03,2.944868005712905201e-03,2.033568977302202194e-03,-9.528383996011267898e-03,2.847586035609181226e-04,-6.712671565370633905e-04,-4.658533281744695574e-03,9.372269956817079473e-03,-8.609085648811187763e-03,-2.060152838347343739e-04,-3.177598675101826666e-03,-9.788413038033414674e-04,3.663413483251548499e-03,-1.017062887356352981e-02,7.876411768730438509e-03,-1.042858483065504287e-03,-7.070247024042864911e-03,-1.571361566640283244e-02,-9.848249201175501063e-04,-5.579039120729677170e-03,-2.454227478612178766e-03,1.417562092482405750e-02,3.006458156740600147e-03,-6.767400665784390097e-03,4.055735695540590817e-03,1.382171712975863642e-03,4.983517801808858699e-03,6.442742010435494049e-03,-2.000739400165849752e-03,6.436369329583320649e-03,-2.930699544762649638e-03,1.789753155935767601e-03,-8.742350506651279168e-04,5.583014020832328776e-03,4.143753540681079789e-03,4.254638744839885881e-04,1.140876917762806159e-03,-7.724529729676513599e-03,7.748940688909856196e-04,-6.780092769761301291e-03,-1.268071431449674207e-02,4.885993399482909142e-04,-2.818401786851856133e-03,1.242962922736108242e-03,-2.885904923150966594e-03,8.600106530248763023e-03,8.089529528759131782e-03,3.206075259058244111e-03,-4.911627174859946841e-03,-4.916623730417607648e-03,-2.773321344491340001e-03,-2.170948483542141892e-03,-1.262447245458591695e-03,-4.145713600111638857e-03,8.138936775426959883e-03,4.455884799833954510e-03,4.018447926533325401e-03,4.099588772464031472e-03,-4.231278239963079564e-04,-4.857201493953754862e-03,1.543605282334784869e-03,5.323048077917546909e-03,1.216948384393998217e-02,1.009914425424182464e-02,4.054366622508476410e-03,8.423200012073998530e-03,9.539454742921069444e-03,5.493036912904293145e-04,-1.893572174902767450e-03,1.398017278438321975e-03,2.744802607599763217e-03,-7.607469401635338219e-03,9.241270351321614592e-04,6.531468129807652817e-03,1.021667107715180760e-02,6.361668476737101195e-03,-4.456592817093569560e-04,3.384589789856438848e-03,6.612741997389619784e-03,-1.455810554091378555e-03,9.178408815751390795e-03,6.550132947090983072e-03,9.470521925502918851e-04,3.062308775562546173e-03,3.199898330664059567e-03,-8.419832077466866613e-03,-1.386294030246993650e-03,3.675395433688119409e-03,-3.637461537638321269e-03,-6.633844249794536470e-03,-5.668468384652509788e-03,-1.280362317723858240e-03,4.894060281602425830e-03,3.567334043858774815e-03,1.137845500076661029e-03,-2.111556763778252559e-03,5.194775534860417675e-03,-5.937101317385673482e-03,-1.392739205243462806e-02,-3.090095702583965567e-03,6.723013271617898218e-03,-5.792446079753548090e-03,9.178694954534594858e-03,-5.972192510561719937e-03,-5.282607475396969982e-03,-7.355877657252438487e-03,2.188876009849429621e-03,1.154453111045027763e-02,2.769429256243345454e-03,2.080715931479571335e-03,-8.700056305195274717e-03,-4.663592734004923072e-04,2.473240528838087718e-03,-4.263845741268459423e-03,4.157637696158161117e-03,8.739069060974094180e-03,-7.790024280118770621e-03,-3.521099936994899395e-03,7.570300561666798433e-03,-1.147419599758043142e-03,6.840063473067249829e-03,-5.314389011694916828e-03,-4.095436679436954656e-03,4.267531845167468643e-04,-2.071639269049429556e-03,6.810755365394274667e-03,-9.078486231200162508e-04,2.052140769506040959e-04,-3.536963124260321389e-03,5.001381991316365856e-03,3.032574714522011787e-05,3.003788263963263431e-03,-1.811363515075467600e-03,9.789753905058808982e-05,4.509808346922024227e-04,-3.961017070807516371e-04,-4.574630031911045523e-03,-6.358195892299940179e-03,3.130536102626298688e-03,5.087758476682267827e-03,4.840234726043828040e-03,2.933301558221529468e-03,-6.795400493644489053e-04,-1.404514922318703769e-03,7.365835321229792163e-03,-1.830156434251168762e-03,-2.344799299287857360e-03,-1.417014077392687605e-03,9.365104388844275318e-03,1.600442238686549138e-03,-6.948193656097109011e-03,1.819831335130822668e-03,-9.359522939676834935e-03,5.778634915441656882e-03,6.795579407940999066e-03,7.129501230678085034e-03,-2.902153000980524020e-03,2.541650877176459405e-03,-1.825345751840890028e-03,-4.306745988141840766e-03,-1.143902810942103859e-03,3.909538014413891467e-04,4.953010567410650461e-03 -1.058886601310423878e-02,7.373028047803586831e-03,1.893423570659863312e-03,1.325882038270600680e-03,-4.895742852097067187e-03,5.085578647089011167e-03,-2.499176845797751369e-05,5.892687661229316712e-03,7.280835360124725437e-03,4.230199762825423947e-03,-4.057573702610653768e-03,7.172963251746936747e-03,2.366392803865745818e-03,-9.258007724001973346e-03,2.208356625531901745e-03,-8.541637745208534999e-03,8.661357306181165444e-04,-4.666651643882977277e-03,1.115741938672611989e-02,9.156115109398710187e-04,-1.732292840991621037e-03,-5.923146506426273756e-04,2.145042705676293657e-03,-2.605242512090972726e-03,8.971398565186495047e-03,1.763339965178575353e-03,-2.858400274523783570e-03,-1.930973118737990251e-03,9.549218502729661445e-04,-4.259463954137631542e-03,1.083137784400273196e-02,3.688554380752995677e-03,-4.746931492912151440e-03,4.816769530497714592e-03,-4.995621353640315385e-03,-6.684123620315929196e-04,4.127381806669394755e-03,-4.770377139141404035e-04,1.002672103700083701e-02,-2.118140835185710214e-03,2.314699981322340585e-03,-7.492743067440624759e-03,-1.024305420924607113e-03,-4.427188087047513598e-03,8.962018431371827562e-03,-4.942586943325529350e-03,-2.819721466199690690e-03,-1.028371144841271415e-02,-4.058560327580807217e-03,-2.654671275467198471e-03,-8.838620405128720658e-03,4.221789732850932814e-03,2.550990935844313010e-03,7.154337991532032891e-03,3.842547481423593241e-03,-5.762243753792143514e-03,4.341958335121577986e-03,-3.996249726992237572e-03,4.480547273211162550e-03,-9.889113803276273346e-04,1.213247885112874250e-03,-1.555091732204984083e-02,-8.179267788225570365e-03,1.460531514297439860e-03,6.070667414330181352e-03,-4.248862467033514244e-03,6.612148453039264073e-03,1.103067607318087423e-02,-8.493242000995880267e-03,-4.810596771327569840e-03,3.094582528594376838e-03,-5.553766304489547202e-03,-3.195074750537012535e-04,4.794558064326413099e-04,5.151001067422447503e-03,2.038325206730858680e-03,1.355411369232497124e-02,-8.936888536125689453e-03,-5.131865029133191730e-03,8.067386515357240565e-03,-6.191246373468672730e-03,-9.152496087850785222e-03,2.979104035019503570e-03,-1.698395712514760847e-03,1.064088993642163007e-02,8.557804999543991648e-04,3.589251796916471948e-03,9.495706910423942423e-03,1.198036487194219625e-03,7.916142307379711238e-03,-4.445613708896451258e-03,-8.998728058657741100e-03,2.008675055311311651e-03,-5.550080060192619756e-03,6.569709955652677617e-03,-1.603443147151506010e-03,-2.024602112164393137e-03,-3.963733861934593677e-03,-4.003966700254819704e-03,-1.895332964900920238e-03,-1.197183467219726767e-03,8.375902335391358089e-03,5.186321648733459777e-04,5.201201453121574898e-03,2.742476095143994881e-04,-1.578772669326989871e-03,2.893902397261570274e-03,-4.337519109828131582e-03,-1.350615258558005407e-03,3.266993071058058029e-03,6.968293080240987869e-03,3.195856376618174430e-04,2.122259154883971571e-03,3.786976651718905462e-03,2.172541378551574257e-03,2.675147035039856114e-03,-1.354151788299506019e-03,1.871683441579264761e-03,-3.926499113780763418e-03,4.300809255787024429e-03,-8.962784465126321255e-04,4.750272190735327534e-03,2.023523199926134837e-03,-1.445659450087489160e-03,4.796492623856056514e-03,6.175899748160503487e-04,-6.105774276311017913e-03,-8.574343523260791362e-04,-9.776458314515104955e-03,2.186498382688437624e-03,-6.820814907568081148e-03,-6.077697550936013351e-04,6.759563738955779930e-03,-3.098449839029727287e-03,4.769274376749788155e-04,-1.585561549137944190e-03,2.803381140934544605e-03,-7.049701726269432846e-03,2.313488432782520154e-03,-1.133755784750482224e-03,-6.754272583023884780e-03,-4.111565476485946370e-03,-5.165662246690504514e-03,4.088437153733482862e-03,-7.706935649110282713e-03,2.137587832175509746e-03,4.266729749066432500e-03,1.010184148620697081e-02,3.331741261867848629e-04,-7.511218385126308510e-03,-4.024914544573987563e-03,6.872293971778005092e-03,-9.916795882767340947e-03,-3.900852641584567383e-03,1.281417322403621351e-03,3.595655557582376656e-03,-6.729674478034855349e-03,-5.491814187013331931e-04,-1.950167631080167499e-04,5.960976350113082398e-03,2.769602610319577994e-04,-1.249239464644406362e-03,-1.479918189726188741e-03,8.255818483927208906e-03,-3.279906744295340171e-03,-3.249920677286764759e-03,4.666531707072903504e-03,-7.641908842219857648e-04,-6.964010782442833260e-03,-4.677130114084002491e-03,-3.982447954425248106e-03,1.958936319550128047e-04,1.423863477707345074e-03,5.399164020669987990e-03,-8.673062502483829331e-03,-4.174552390782392333e-03,6.288837390169383160e-03,-1.679344483652949003e-03,-4.371324845211786193e-03,4.315553991721100824e-03,1.909564047766998136e-03,-1.125644887873998926e-02,-3.806603026941745693e-03,9.426963164482805857e-03,1.167898809963295820e-04,-3.405833375887804427e-03,3.310865747847093355e-03,-2.968782563451680383e-03,7.122759370163393201e-03,2.749543214220437522e-03,-1.847152633568003308e-03,3.593663187110801658e-04,-3.075606236059901921e-03,2.623629719704018756e-04,1.108840926011019860e-03,8.585993392318899686e-03,1.238381196344860496e-03,-1.115649988091152803e-02,2.221903711329492170e-03,-3.170055755170387776e-03,2.624688287579322573e-04,-6.128718858923726427e-03,4.432066848815578719e-03,-2.811772009048979157e-03,5.179158713554322946e-03,1.067975594643713968e-04,8.848944155925829187e-04,-4.648134836321205811e-04,-2.867968213658672171e-03,8.125183710800003289e-03,7.209657998762849726e-03,-1.033175352238641272e-02,2.696162423583815370e-03,-3.182722976372236341e-03,4.444668813581885598e-03,-3.301144714776816074e-03,4.889733599372111895e-04,6.582280525393335857e-03,-1.646736899793928375e-03,-2.246113177398323130e-03,5.450727198456302436e-03,-5.981906234264605486e-03,-2.130801941490882961e-03,4.576883182078948613e-04,4.495830257275939407e-03,4.194152497658440969e-03,-3.841832202729333207e-04,-3.305668501976063035e-03,-1.872864794213039211e-03,7.724889487190212192e-03,-7.665649695598982134e-03,-9.053652323456550313e-03,3.999315073124353609e-03,4.513456171162018478e-03,5.212032080921023477e-03,1.513118975686331786e-03,-1.280744317702238759e-03,-1.314903056801198806e-03,3.424315996292194401e-03,1.583502440024790996e-03,4.615073619657550884e-03,-1.445808576592462610e-03,-3.100312448933017096e-03,6.362012643395820351e-03,5.338919988112536368e-03,-5.876533954998534003e-03,1.193691258734507973e-03,-9.056506100500060288e-03,6.180188024365731309e-03,2.653105765535262138e-03,-7.069122847205196354e-03,-5.604987631011224686e-03,-1.380910809875235618e-03,-6.299973750420710542e-04,-3.898240956838303767e-03,2.370156087135162799e-03,9.116366921236278981e-04,7.965502314841199291e-03,-9.682667310651253689e-03,-8.194716136916017912e-03,-8.519938377924697300e-04,4.722947758965211768e-03,5.433172820894146930e-03,-6.911170664269768865e-03,1.766768135266106850e-03,5.138051213480859108e-03,-5.412605364268353566e-04,4.679008899442789037e-04,-4.020784965562227732e-03,-1.288731709784554300e-02,2.103651089456360073e-03,4.227166461594682672e-05,-3.716216046625626154e-03,2.209855667171868631e-04,-5.185301747256310448e-03,4.340711971097384499e-03,-4.878374630653766614e-03,6.431435707945180880e-03,3.240714007038521249e-03,3.411205472189378363e-03,1.739864418828132153e-03,-1.959160692660530650e-03,7.826058705919769140e-03,4.988179106106024711e-04,-1.039181998317034554e-02,-2.471664255888190564e-03,-6.083692504128886992e-03,-4.412761384253128735e-03,-3.722390302889416139e-03,5.196469394179385314e-04,-1.654086838993227681e-03,-1.606494404823977052e-03,3.885915353927172928e-03,5.267782295662967566e-03,-1.289782621471061196e-03,-2.874529715796195987e-03,2.923137357361182010e-03,8.777156347208379603e-03,-4.852717302051265391e-03,4.289574069145500948e-03,-4.473056968586569884e-03,-5.923820490908361268e-04,3.770632921293844489e-03,3.864225083869553441e-03,2.980053879882296666e-03,4.159885714267693921e-03,2.762577463606755851e-03,-9.781746529887054824e-04,6.573795640818680570e-03,8.387530241007181905e-04,-6.214439423981745316e-04,-2.255289708162252094e-03,-9.944103170807234819e-04,-2.762677997944615240e-03,-1.851429395425580006e-03,-7.049622727099151467e-04,3.785633047747799041e-04,1.444832966562888598e-03,-2.449011222150527210e-03,-9.534139343475258504e-04,1.422589255495424268e-02,-7.272169144643925512e-03,-8.760498442104984809e-03,3.689761956711939874e-03,3.200750721655288948e-03,2.901082344755534156e-03,4.589144691112524710e-03,3.719578292333203945e-03,-3.901993630289330578e-03,6.658413504866742341e-03,-8.639647857727139829e-04,-3.188707543105683950e-03,-9.996717752259443555e-04,-9.558099139329364474e-03,-4.949405026395018586e-03,-2.033794289432525049e-03,9.479624036039037719e-04,7.936586701401399449e-03,-9.906740860669116522e-03,1.090832030662672184e-03,6.948218314119855923e-03,-1.759738998872973975e-03,1.620132162271569701e-03,-3.643045670937653663e-03,-4.944058882055352676e-03,-1.755673824547212538e-03,4.074430913200123572e-03,-7.960702085092800917e-03,-8.253480706585500717e-03,4.485098206908605563e-03,-6.700332948024041897e-03,-4.066486002746531533e-03,-5.037149863580475890e-04,-4.420525659801955928e-03,-1.922084843229274285e-03,-5.328348766481823436e-03,-5.781712928873682664e-04,4.024443939544209738e-03,-5.985566755471649972e-03,1.552578726285263038e-03,1.465713569665337904e-03,5.395279080810138315e-03,-7.702772589362662931e-03,4.230050626935512141e-03,2.031103687512227952e-03,-4.304606265863996500e-03,-2.338892033685732556e-03,-9.258815267723804164e-04,1.252487850411949455e-03,-8.971430098951933742e-04,4.335725757587256292e-03,2.272524659892751712e-03,-8.517575294650124931e-03,-8.256475454455619936e-03,1.074945736994306514e-04,1.969168709941818314e-03,4.215129492698370568e-03,-5.243756377804953579e-03,-4.448440175814949958e-03,1.310769707317337553e-03,6.467395575754689499e-03,-1.439587540399830338e-04,-5.225172144188122529e-03,1.260214410501193320e-03,2.111034176193920479e-03,1.070484887530183914e-03,1.294460909925093488e-03,7.178889205645843166e-03,1.314168438468845106e-03,-3.881115829592230067e-03,3.675687744996131157e-03,-7.646082846073832748e-03,3.233433699288800877e-03,-9.585692983041490048e-04,1.169631524103271168e-03,-5.744302132455330299e-03,3.762432007723106892e-03,4.442322861304069150e-03,2.074387220726728147e-03,3.454087190958172668e-03 -6.697050894949031731e-04,-1.211755899074739781e-02,-3.871592044337568428e-03,4.416587266807491495e-04,-3.456535634002754105e-03,6.166895765802021985e-03,6.021652114501619725e-03,2.273231872525448716e-03,-7.660695558726999914e-03,-2.045169237718447684e-03,1.307472946783802027e-03,7.779624569065653064e-04,-2.152944107585989168e-04,-3.534591284001779680e-03,8.711120189998880492e-03,-2.327483723666070193e-03,-3.516832804456661125e-03,-1.121476030044266797e-02,-2.977999676646136094e-03,-3.451125086884958629e-03,-5.198891934833591369e-03,3.588813699290845007e-03,1.062043827239657129e-03,3.178270078955510065e-03,-2.655336772031204485e-03,2.301418381261485010e-03,-8.285786892777287057e-04,-2.370463941019063624e-03,9.046838129342588478e-04,-2.959856319559937832e-03,4.114346552095194710e-03,9.161875711286243760e-04,1.971312887908617962e-03,-2.964265268360008476e-03,-2.410769663796504415e-03,8.278204482219001653e-03,-9.387725519025587459e-04,4.154212410539628343e-03,3.298514035123934363e-03,4.090782013699298912e-03,7.913084004370708789e-04,4.732362801921585066e-03,4.551166933896897901e-03,-3.148486032654741144e-03,-9.226168008845334056e-03,4.910216926157310799e-03,-5.658378739672349778e-03,-2.277327666754815039e-04,2.561569146261614017e-03,1.449872471043531707e-03,2.729938022792520842e-06,4.400672080996519716e-04,5.296548158312342283e-03,1.610060930589888965e-03,-7.974328611929955637e-03,-3.933177329621234328e-03,-1.793788804139475647e-03,5.311109086098722391e-03,-1.076925592216921440e-02,2.643829851098519670e-03,4.033130085635366208e-03,-1.120759336892623512e-02,-1.633863309493329124e-03,-5.343654567631052170e-03,1.702862644455407741e-03,-1.435877381242993888e-03,-1.610546321938271640e-03,2.899142715610165152e-04,3.836285680027708665e-03,9.649595360091307861e-04,-1.337094039539169291e-03,-2.813238039571589518e-03,-2.162286528728950188e-03,-1.667802659017022073e-03,5.517784822676854778e-04,3.436575205857728740e-04,-6.991694545768941012e-03,2.226930154710368877e-03,1.332051411516278414e-03,-3.897798775667127812e-03,-9.030497608882216308e-05,5.548198548300056245e-03,-2.005364941531970376e-03,3.078849423515195392e-03,1.253936131357328600e-02,-5.590388867115560732e-03,7.695939232775499562e-04,1.918844950831918904e-03,-4.804254369616990650e-03,1.123539911867665018e-02,5.456006805043761912e-03,2.534540412342479229e-03,-1.525509630105427233e-03,-2.972366622089457795e-03,1.431609232832496475e-03,3.261088082419146255e-03,6.104650800473394694e-03,3.030757508737431331e-03,-9.828582718978226551e-03,4.527312651232902156e-03,-1.915903951390967563e-05,6.419984599893244562e-03,4.667287347859583151e-03,2.817676570083743528e-03,-1.550407060272962576e-03,4.803841764824536703e-03,-2.016049654091947065e-03,1.421745484351493675e-03,-5.643001921880765222e-03,3.157830015896054363e-03,-6.781314859280859232e-03,-4.040335836610436243e-03,-2.165982042867338041e-03,-9.989377633012818617e-04,-1.656588240934625620e-03,-3.952911877391963695e-03,5.509395834023862390e-03,9.433929074995276234e-03,-5.421273383419042066e-03,6.139961178476109095e-03,-2.296263933202114969e-04,-3.296245839678593314e-03,-1.029765710500928078e-02,6.415779133216728373e-03,-5.601439669331489263e-04,5.153603537915957093e-03,-7.753560381355676691e-03,5.731912724375555664e-03,4.845668247705812483e-03,-1.014713510141873175e-03,7.252554491826981517e-03,1.240989586876981021e-02,-1.822641702673189878e-03,-4.777752699619882554e-03,3.487291828103123402e-04,-3.221022354076444414e-03,1.727503087723662885e-04,9.214640142146792237e-04,-1.990976107594668188e-03,-8.174114879625929506e-04,6.097816654064335505e-03,1.949375218525031773e-03,5.986444926444537892e-03,-5.559297444514952127e-03,7.580746855242194090e-03,-2.205553477888348105e-03,1.152354286076262384e-02,4.295657906083163828e-03,-2.712943828551321514e-03,-1.902332059486548852e-05,-2.092926256855582104e-03,-2.465974322818989106e-03,-7.903697820883725036e-03,-1.732154222606064730e-03,2.102959188387450336e-03,2.684812036443896474e-03,3.225178580610196192e-03,-4.655792651474862515e-03,-7.024394835146328606e-03,4.360462137312685373e-03,4.239643932238993870e-04,9.320829671599953695e-03,-4.387153263749053386e-03,-6.261222488809543721e-04,4.156875360922217440e-03,-7.694793655905999771e-03,-4.879678307176981041e-03,-5.089232118745934917e-04,-4.409888972774052979e-03,6.443793823310168915e-03,9.750268297274714696e-03,-5.302912411985194995e-03,-8.660083648982574772e-04,-1.008158855125215747e-03,-5.602933482233335340e-04,7.822471692682089750e-03,-2.016856827071253529e-03,-2.138506155561563718e-03,-5.264111786931265947e-03,-3.347513566477745665e-03,-5.410286036953401538e-03,6.508974142487332533e-03,6.864401467521745069e-04,1.065230212891533738e-03,-3.747009469017978898e-03,-3.969517802961043673e-03,-1.636877111004168871e-03,-9.948756001474465954e-04,-1.038736786791943228e-02,-8.083212065852006349e-04,2.924467752675247254e-03,5.179140614378329073e-03,-1.700143397695829056e-03,5.022433550549920515e-03,1.825128130253569946e-03,1.113686232901281783e-02,-3.220672889322551743e-03,-6.940319431996776374e-03,4.545135485996154405e-03,-6.869709398866127512e-03,7.690553676630552081e-04,2.874384891705156664e-04,2.537768161368714973e-03,-1.740678323072905294e-03,-4.941415578533615328e-03,-6.650831619556006602e-03,1.747816018417424179e-03,4.066178460281426842e-03,-1.437220790615713643e-03,3.117380146268756980e-03,-1.504218106279998045e-02,-3.175722156484638362e-03,2.224402290854325757e-03,-4.391674675704291056e-03,-1.878538557689742711e-04,-1.492964948961769678e-03,7.569413602656496418e-03,5.366739899934017642e-03,-2.536737411303774956e-04,1.370599883469611176e-03,-1.812572546695736348e-03,-2.326354099193946989e-03,-2.040794791887442627e-03,-2.911278868155599709e-03,-7.744986676010810625e-04,-3.549352007385946865e-03,7.253295609510743624e-04,-6.946924608044639099e-03,-3.669571952530133706e-03,8.986390614212098199e-03,-1.409654429804187072e-04,5.185696623965582383e-03,-5.904350952752315460e-04,-7.387092385526676172e-03,-2.814545198849325355e-03,-7.255885899555433727e-03,8.918524747961874451e-04,3.745090038158050468e-03,1.781125579105682401e-03,-2.376407360641094330e-03,-7.652007868211739214e-03,-3.468698044248090962e-03,7.214551792992247858e-03,1.078959354121191913e-02,-6.760142331002414948e-03,-2.308500859222326323e-03,-3.468331039953420191e-03,-1.368868493847646452e-03,5.848452732745264282e-03,2.996543179717246676e-03,6.909062519570401129e-04,-1.553407943533014444e-03,-3.301942696395934393e-03,-9.806155293818276560e-04,-6.606380553031683187e-03,-7.459528857383834061e-03,2.500287639771130513e-03,-9.790585965272701700e-04,-5.628060947709350066e-03,2.989594629733871751e-03,3.290826390197387156e-03,-9.299356076980317895e-03,-2.143099170613948622e-03,-7.619733244839748364e-04,-7.012910870708421833e-03,2.057977322263189121e-03,-1.003992210056362314e-02,4.158349206665938689e-04,2.153632121999496819e-03,-8.199407727896160583e-03,-5.341989328392657797e-03,6.338740000413153712e-03,1.201185105084033936e-03,-2.052164166719688387e-03,7.350618684988938570e-04,1.046991681644307212e-02,-2.005554734931772227e-04,-2.165085055501492477e-03,5.193429486543216581e-03,7.786728568316519418e-03,4.802965120190593774e-03,-2.489421925364808559e-03,-2.634460712493739282e-03,1.817719809035335879e-03,5.752818122541803782e-03,-1.721241895483935141e-03,1.176847934462816551e-03,2.295063478220431101e-03,2.285245753521747154e-03,1.838242787019202056e-04,-3.486747204072587064e-03,-9.675160543937046803e-03,-2.373555525020582305e-04,-1.488268307727553348e-03,-1.167500444605683351e-03,2.264484998164134014e-03,6.449061127439001916e-03,2.264153081818529468e-03,-1.025800699566174409e-02,-3.907513208087421905e-03,2.762433766131713528e-03,-4.348583707597841047e-03,-3.606636413181305012e-03,8.978536942949073493e-03,5.629744002110070693e-03,5.403698080742296711e-03,1.669951453285800635e-03,-6.065599983438786647e-03,4.842827765924056886e-03,-2.298907197816695090e-03,-8.979030497616223225e-03,-7.621349059115655215e-04,1.557443603638858668e-03,3.389655416324272464e-03,7.610221361677083594e-03,2.564144153978567715e-03,-6.140737569502915591e-03,-1.061610669765197237e-03,3.679433656235678394e-03,-6.800975329616843271e-03,5.722692220382039584e-03,-6.580442253105105166e-03,-2.181697819535453192e-03,-9.219079416742053876e-03,2.976547191609540260e-03,5.771315430095674739e-04,1.935539456825293098e-03,-5.207799238684842659e-03,5.145713447314924360e-03,1.813592146140747012e-03,-8.848271630884644440e-03,-9.209976972935450881e-03,4.990146585475182216e-03,-5.382878889547165757e-03,5.169372508034342228e-03,6.386546179602401417e-03,2.142744467866291094e-03,4.784494694430862886e-03,4.755676366357467280e-03,4.752511299686680048e-03,-3.843127943528811506e-03,-5.568805561224798861e-03,-5.461014000484826734e-03,4.578400682342449735e-03,1.306385731892666232e-03,-1.498950225966180208e-03,2.850528322955240418e-03,3.952256077751834830e-03,4.227954131854515935e-03,4.047709705602951097e-03,-2.818795110602123621e-03,1.188504544079059533e-02,-3.869928568672516105e-03,4.075905935707998390e-03,-1.098373670688805419e-03,-5.211015590664434295e-03,8.606697675732304488e-04,4.057915672657699986e-03,1.055498298331960030e-03,-9.156593367731867940e-04,8.219291068544503070e-04,-6.182345053693014776e-04,-6.474460790788773464e-03,6.601262346906687506e-03,2.927458359381701560e-04,-2.966032499342119693e-03,-7.130217063464144855e-03,-1.300707032349580385e-02,3.698711944612217257e-04,-3.077387889950421954e-03,-1.397403643879806050e-04,3.965539896090401230e-05,1.068176313037996240e-02,-2.463731746101200607e-03,7.168205952869184140e-03,-2.684151954894194603e-03,-1.109212290876215694e-02,-2.798414433889184934e-03,-3.144010128945836247e-03,-5.510214394165781869e-03,-3.164273230774666303e-03,2.640148654302779639e-03,2.509831674070578129e-04,1.448751634196148146e-03,-2.577307543683257738e-03,5.422585296042055185e-03,1.670182602450777940e-03,-6.266344936789052661e-03,1.079378129064421888e-03,-5.506015699556543290e-03,2.869315205635828130e-03,-4.882834342422727285e-03,-1.015949586504204076e-03,6.947110170295272939e-03,1.180860142381857068e-02,3.030733088037463035e-03,6.247436653977096743e-03,1.545416623203067037e-03,-9.596547666981805266e-03,2.601307274165777939e-03 7.640650755011325727e-03,3.271854971183206159e-03,1.367161424354790169e-04,2.228556167691416335e-03,-1.015648633183137504e-03,-4.069527920496865230e-03,3.139939000974716846e-03,4.006222300866404094e-03,1.182970288731963237e-03,-1.355266203324390680e-03,1.305498926234231229e-03,7.471697808904869320e-04,-3.643501092692144269e-03,5.734097736531328614e-03,5.114243936805880353e-03,-4.592087660594052455e-03,-6.341904812656350837e-03,4.513723659839240406e-03,-4.592974029989973714e-03,6.841032491024428078e-03,-8.943995496638897482e-04,-5.476402939832670895e-03,-3.652512698519239098e-03,-1.391154094044540213e-02,-2.827561112171707823e-03,7.996536747857909549e-03,-1.586589126957772215e-03,-5.497626872910701653e-04,-2.672097158297754259e-03,-5.174249280463946027e-03,-5.803555948176190513e-03,-2.534102964523344756e-03,-7.720398430914997973e-03,5.981786438114176235e-03,4.766945397537056522e-04,5.394806267654496043e-03,-9.563407545203468640e-03,-7.652940630189933660e-03,-5.250330449946534993e-03,-1.297950336958277441e-03,-3.043634911614776211e-03,6.821160888769533387e-03,-4.039221144658978330e-03,-4.957909128249496887e-03,-2.843039547619424096e-03,6.537593324445546347e-03,-3.299884805443617508e-03,2.667322544262512845e-03,3.593840932298980259e-03,8.641846481705538208e-03,5.282070969422996559e-03,5.522471173535122187e-03,3.736384702721129696e-03,-6.118145604229911078e-03,6.095773764009063123e-03,-4.289297762611979717e-03,-1.015905036739164075e-02,-3.848651924581815431e-03,-1.277566491227943250e-03,-2.381071383313560809e-03,2.521632363845421887e-03,5.906508899301483526e-03,-3.984947394109981597e-03,6.853358258260631997e-03,-1.127131075420558220e-03,-1.976468773297026128e-03,8.087377386057305675e-03,-3.139808217835745653e-04,-5.593925951302233628e-03,-1.083688005699502167e-02,3.325255937205092230e-03,-5.197568161339530778e-03,8.621223807395370087e-04,6.856631346298154399e-03,3.251064834523812157e-03,1.285532805435351264e-03,-2.678473329055037350e-03,8.780426689223256188e-04,-9.957620440150104658e-04,6.475419123069404981e-03,4.288467720030789999e-03,-5.867380154239882316e-03,4.420407224830043801e-03,7.786656507857410078e-03,8.297857701507033515e-03,5.504981813825002984e-03,-5.122404976321083547e-03,3.176910598926388756e-03,-9.007929532290807157e-04,-5.440048308961283052e-03,-3.276345269325394729e-04,-2.498027156971228003e-03,-1.991596020168227756e-03,-9.816501195422818599e-03,-2.128846402398582114e-03,-4.513662086036347200e-03,-9.477292253680059606e-03,-1.844727907907402469e-03,-3.243239150383513125e-03,2.923160921506077776e-03,5.449006960124014309e-03,3.330433257552532084e-03,-3.751528297477449859e-03,2.829864810450766904e-03,-2.468125693318084634e-03,8.229300949394139850e-03,5.153755040067295717e-04,-6.171850852185006798e-04,-7.662229313704517659e-03,4.026317479197657860e-03,8.341174877591976330e-03,4.177970841799092706e-03,3.257498131958727530e-03,1.142210863984889462e-02,-8.596923787414343168e-03,-3.591191795280014240e-03,-3.833715200594479088e-03,-2.980985207612050505e-04,7.897224839926995574e-03,1.715020873465159619e-03,8.894531870255066586e-03,-9.844817901027803214e-03,-4.037711602505668004e-03,2.608534034604930059e-03,3.179890768130089803e-03,1.044791925232482939e-02,-1.342615729554121756e-02,-4.524824465455529361e-03,-2.368021219430029804e-03,-1.551110494159687637e-03,1.246255718815204990e-03,-3.231728549526995641e-03,-7.641808759918202083e-03,5.099804853473446095e-04,-6.221847412572150023e-03,1.645149975986104915e-03,-9.167238384392705245e-04,-8.832374329569396554e-03,-9.904208936643841329e-04,-2.446109991815149237e-03,4.682692040895853128e-03,1.712248568030666322e-03,-2.110312314869092643e-03,1.579669404038155922e-02,-7.593809103892606299e-03,1.853456862499478809e-03,2.855046158875622266e-03,-3.888145823222027841e-03,3.228909397198704775e-03,2.524850691519320463e-03,-6.296613433762734448e-03,-1.160631590914573859e-02,-3.154806459501228750e-03,-9.402193155614982367e-03,6.149134093806544674e-03,7.520822093528464411e-03,4.138701173124076545e-03,-5.643362950038684732e-04,1.485020441912541372e-03,8.447938283434629821e-04,-9.853988917834786629e-03,-3.300526226457283288e-03,6.469010083714346084e-03,5.528587783422665020e-03,-3.667653635708670919e-03,-1.121379460083523488e-02,-2.092731105418097880e-03,7.978897413246188557e-04,2.976183066538383634e-06,-2.773429062072751451e-03,-8.716246015049063101e-03,1.428836255293053576e-03,-5.111020441559953505e-03,-5.247590204636292180e-03,-4.758374936783216214e-03,-1.098174923379842752e-02,4.392456186749499857e-03,-8.798713616514306510e-04,-3.736654777644640366e-03,4.222692865359127458e-04,-3.715930810100987695e-03,5.042496654790483597e-03,5.933351214427101845e-03,-4.158183106558627565e-03,2.498318419872363246e-03,-8.239263205344194915e-03,4.377444712072382262e-03,1.246459800175145002e-03,-6.629437382902927424e-03,-8.859788315467584732e-04,-2.165798392581198481e-03,5.308636486923103658e-03,7.772156365080804463e-05,-5.584544681034312374e-04,3.425911839867951027e-03,4.422170636757855218e-03,1.059005175256518168e-02,6.382698660131505899e-03,-2.399415980133392211e-03,-3.514317100919410724e-03,2.045985690888922626e-03,2.389318972051086842e-03,2.148174410141077875e-03,5.360425964555063250e-04,-2.536025897691281030e-03,7.299617548359465906e-03,-2.622009135433843625e-04,8.321220129375939481e-03,2.592963857713665062e-03,-2.710527480189435859e-03,-7.519430599110456248e-03,3.005598229617275664e-03,4.030351492248409294e-03,3.874338635190577827e-03,1.675669707457154997e-03,1.200478353062994037e-03,-1.269834247155416642e-03,-1.256999565324525697e-02,6.193592815075598529e-03,-3.463346850230331845e-03,7.891845629451738758e-03,-5.649705851760365022e-03,2.054475736696482081e-03,-1.507840635551220901e-03,-1.969553299495767785e-03,8.594915241532604527e-03,2.202170645578261678e-03,9.675634042835641949e-03,-7.151362381531648806e-03,9.890192157969938795e-05,1.553063542845928074e-03,-6.177368966020843752e-04,-4.316287636975312154e-03,3.922617149566871679e-03,1.657494187384814938e-03,-5.873149386315754811e-03,8.681377497790446554e-03,5.477820066006422982e-03,-3.215281936127139736e-03,7.545418603545525106e-03,-8.544037413378282111e-03,-7.939766742339410873e-04,2.361793513413949861e-04,-7.679467464822549525e-04,-3.759424183098398781e-03,1.267969328232844417e-03,4.712716688811425039e-03,9.065348200805688900e-03,-9.977430315052200280e-03,2.244984638054012618e-03,-7.227753152931755106e-04,-6.971859305122208922e-04,-7.560500244880228454e-03,-5.111489503264806875e-03,1.684680437226337632e-03,8.407708125227206708e-04,6.651763950329150866e-04,-1.758955291044621543e-03,-1.469059034821422265e-04,-5.752181822112545882e-03,-5.279742516435319451e-03,-1.989224756797241134e-03,-5.283340350302891814e-03,-1.624846852834452684e-03,-5.629859762900579160e-04,-2.610570658995596365e-03,2.240202760643766129e-04,-1.678300481390605520e-03,8.871829070978535028e-03,4.362567567477987698e-03,1.778419713346280967e-03,8.197789306051045432e-04,-4.496038277974558246e-03,-1.740489400064738294e-03,7.958518289675061211e-03,-1.467548412475269378e-03,6.924078498004224407e-04,1.691516732090647007e-04,9.344624043121165652e-03,4.048765902761567934e-03,2.420904816058199946e-03,1.829581779575540018e-03,-7.006311173783701281e-03,-5.655264003940587564e-03,9.099159352168529474e-03,-1.564868048068537507e-03,3.491760912902145374e-03,1.930052128178133932e-03,4.007233608062215832e-03,-1.403354055170657859e-03,1.457863513200633680e-03,-7.487874522485568531e-04,3.864706047976389225e-04,3.322928207988612068e-04,4.571972556261152260e-03,1.007139533509820951e-03,-1.402563342464995281e-03,4.578147405385609055e-03,-7.353163752811327399e-03,1.922645438175740436e-03,1.531035557443291215e-03,-6.444001536129312369e-03,-1.357444638201486288e-02,3.085521880719838562e-03,2.141764950555686241e-03,4.920785040148385921e-03,-4.175687426070990096e-03,-2.269078136802786441e-03,8.367433727405990118e-03,-3.605162377265920624e-04,3.409102215845621008e-04,-1.574323273443326877e-03,-1.224442679790373363e-03,1.827565099576437336e-03,9.549891258248713018e-04,-8.493036788469654833e-03,2.393798700796847135e-03,-3.117440779124871358e-03,-2.410759196295542053e-03,-7.165783659575028497e-03,3.751062315209187630e-03,8.263036894668897639e-03,4.035975908885804965e-03,2.085415955442473587e-03,-1.408780847562000286e-03,4.493751663570061568e-03,-5.712636089697674738e-03,-5.685027872476678193e-03,-1.083720731198390259e-04,-7.357893777746200674e-03,7.905939955921228307e-03,1.930283449101733535e-03,-5.259093931321405253e-04,1.660074241377484505e-03,-1.460670691395592755e-03,4.025883274541985565e-03,2.139868478653907374e-03,1.480331179490120778e-03,6.118084938372005512e-04,4.354508702531179921e-03,3.725348870966242992e-03,8.478562752594140037e-03,-3.435569556952827903e-03,-2.047430854468476266e-03,1.298880912718902280e-03,1.352162441966518226e-02,-4.602095098950913926e-03,1.923421135210302401e-03,6.870536876903916889e-04,-1.862063671073853127e-03,7.264231608242057532e-04,-1.311146591996543542e-04,-8.360234255163023764e-03,2.311563009905556829e-03,6.782644956725830936e-04,-7.311952858167207903e-04,1.551292291761099994e-03,-3.479726619354286433e-03,2.022044094226976233e-03,3.171132168573988653e-03,1.124125629616052301e-02,9.645114475340158498e-04,1.151654084312310651e-02,1.862390318613939454e-05,-3.607454434258518423e-03,-2.838626265930746951e-03,6.276000266320378405e-03,-1.963819829701937691e-04,-1.016710230355584964e-03,-5.165704390427454601e-04,8.940003560679232394e-04,-5.227020049097435075e-04,1.768454535075730284e-03,-2.736988069959995790e-03,1.574683146909303414e-03,-2.647162606320660558e-03,3.960406856634565882e-03,-3.805885174914594698e-03,-8.260546089361832725e-03,-1.225100717914396481e-02,-5.290420578876671096e-03,4.628621129788863114e-03,3.025122309962378561e-03,4.132410870873353889e-03,1.168935374515380149e-03,2.379434354114071549e-03,1.280565595315330671e-03,-1.483639121245629994e-03,8.222370996878817020e-03,-7.024219164009700735e-03,7.099500145752663319e-03,-7.402737655236161715e-03,-6.509289548239198531e-03,6.960576239159818471e-04,8.571681319845573035e-03,-1.129179725273226839e-02,-4.715598445463429697e-03,-4.584281038258821188e-04,-2.316235907865346090e-03,1.221685172376735240e-02 -6.828830749349093119e-04,6.998551159313817771e-03,2.374723961791033993e-03,-7.551906807733058764e-04,2.681323670442957007e-03,1.091216719774221536e-02,-6.146274301597105978e-03,-8.815117165607008060e-03,5.404959085270519790e-03,-1.952066100877235299e-03,-1.018582623489583221e-02,1.525291271362625746e-03,-3.518707501686198051e-03,-1.655866316881689270e-03,-2.843105359428400088e-03,-4.600526858407664142e-03,9.538080650316396053e-05,-1.272164908918781926e-03,1.060678555401760315e-02,-2.960449004882922605e-03,2.183624628456689795e-03,5.292012227582572163e-03,-3.851767054661116210e-03,4.921453277246016188e-03,1.153716608172527039e-02,1.031020322706376058e-02,5.686864680302949199e-03,-6.871114553718271969e-03,-3.824641392517911121e-03,-1.227233901456786054e-03,-4.208825840065124474e-03,-3.122749555012163149e-03,-4.169542509791609357e-04,-6.440596005995834076e-04,4.887428830164682741e-03,-4.793008762175192394e-03,3.244074208356457090e-03,-3.893999955333509767e-03,-6.111355373279499931e-03,5.958105855007458712e-04,-7.824961298113356986e-03,3.209916685148970703e-03,1.752993916776115742e-03,-4.971245302434922596e-03,5.162026419605583033e-03,-5.713925609684492903e-03,1.420117674455759291e-03,1.471038937073491102e-03,8.478540260738466963e-03,1.463629153304606134e-03,1.170460233293526544e-03,-8.562623918204466346e-04,9.131538002068433774e-04,7.945191072479273336e-03,-4.082082141031074737e-03,3.133519617544526341e-03,4.293821930635374068e-03,-2.795327034476033801e-03,-3.394365242262921412e-03,9.836922275042276828e-04,3.304706356952882102e-04,-3.356911872884473218e-03,-3.198753157623422487e-03,-3.824454736390422950e-04,-1.686796480629561496e-03,5.872810301982596057e-03,7.390211728104533091e-03,3.353028670273584550e-03,4.046149521466797230e-04,-2.625097841212917314e-03,4.649638103368601214e-03,-5.096961570416745635e-03,-3.975587662878524685e-03,-8.756851496782960863e-04,-1.026572692947728613e-02,-3.583108891947263223e-03,-9.071912135219756951e-03,5.829076390559104030e-03,7.786197852296315031e-03,2.593058583503700300e-03,7.167436067922979860e-03,9.980978035063248904e-03,1.031549721414722976e-03,4.159484414107417379e-03,5.189488520354482506e-03,3.904505220355901883e-03,-1.084723242797673787e-03,5.644694390558685666e-03,2.099396410100783095e-03,5.704744144113526108e-03,2.249249543805730521e-03,4.667920135787630154e-03,1.356368739568543716e-03,5.668473675038725700e-03,2.272540315637134462e-03,5.007752700737243268e-03,1.616323740124259861e-03,2.171778946597040846e-03,7.177131693952375387e-03,-1.093610515674659639e-02,-2.362914220905231081e-05,-6.636268972968821685e-03,-5.872432148830729337e-03,5.065214874358590913e-03,-9.584036290756258250e-03,-1.161024701172125220e-03,-1.063954907133307258e-03,4.706399356919698657e-03,-6.098921192559570922e-03,8.678276217729248218e-03,-1.753256075060291228e-03,1.588664224414210136e-03,-3.413938982248983147e-03,-7.314288971788467791e-03,1.466929133546697852e-03,-3.123710143169242115e-03,6.222241807059331791e-04,2.952112966751780417e-03,-6.094494858259829305e-03,6.113249188333962428e-03,3.104288090483616337e-04,-3.057669668592241739e-03,-1.324134675841080776e-04,-5.999249914828144563e-03,-1.951189026476613745e-03,3.778303006252991831e-03,-7.115699990934277845e-03,3.253045989726805701e-03,-3.062390365458207437e-03,6.438829522992180893e-03,-4.903787645933956225e-04,3.787452241611422021e-03,1.217157803607592107e-03,4.367519271765016135e-03,1.086615566168354363e-03,-4.497016232351157467e-04,-2.061301909292767576e-03,-4.615292136830322252e-03,9.218476720745681641e-03,5.388426485303482276e-03,-3.811037363592758090e-03,-1.400719762920569524e-03,9.616043891086327455e-03,9.213130315050358995e-03,1.442702814029974204e-03,7.656286381595964891e-03,-5.080054098621237857e-05,2.738389779054948951e-03,2.292175259846596959e-03,2.529982736164328345e-03,2.944325620158217121e-03,7.519843427080662515e-03,6.358666092791761919e-03,-8.447256736581117489e-03,-8.398110443660072736e-04,-1.844013068079769068e-03,3.411933572975616846e-03,-5.745847701658174595e-04,3.060980927112604173e-03,2.997205998909075429e-03,6.173067753846146928e-03,1.228419759903591132e-03,7.707113193154515630e-03,-5.838731265308683659e-03,6.353853700722400695e-03,4.814580480192895591e-03,9.653106835082313902e-04,6.652612525069259361e-03,-1.456926924068261894e-03,5.636335082287227509e-03,9.556426312347893887e-03,2.760788566878663499e-03,8.228652254187045906e-03,-1.677052597369704540e-03,2.045014815797712316e-03,-3.227654297265068403e-03,7.246472451961027961e-03,4.335314145158667906e-03,-1.934563962807319369e-03,5.383109896412158549e-03,2.993120154032414908e-04,-9.428580019435944815e-03,2.124891829149911336e-03,8.216934787568503859e-03,-1.162069553452734980e-03,-2.038465297966832353e-03,-8.150042238190590219e-03,1.709995896017824785e-03,3.031374262594153327e-03,5.517173418281716771e-03,-4.547631922769308777e-03,1.347016237342029202e-03,6.162286171631228653e-03,5.157096842270589548e-04,-2.864688857847872815e-03,-1.462216360309740717e-03,3.094529446321189151e-03,6.617106194125615959e-03,-2.137566528256779064e-03,1.843088738450598084e-03,3.649994660039528396e-03,3.600293147639829902e-04,-1.125606595846390806e-02,7.129074078352009777e-04,-5.981869086687910224e-03,-9.773904500929939815e-04,3.380151177787187825e-03,6.708779671886744810e-04,4.210981475777941276e-03,-2.921684509477117091e-03,9.791290128022562406e-04,5.004986340780834871e-04,-3.801429309299318628e-03,2.727486876358167778e-03,-1.972970011873776821e-05,3.816893610118604532e-03,3.404486620540591538e-03,4.610202004696458376e-03,2.597994953944532479e-03,4.023656922909650199e-03,1.962268646462892417e-03,4.991588442069715831e-03,-9.071009909558362325e-03,9.432634425103305492e-03,4.674746499561459497e-03,-1.934228072078328408e-03,4.359932935838668235e-03,5.486522280866843642e-04,-1.349558976080747963e-03,2.398903218233152442e-03,-3.141212422228728107e-03,-1.007419855865110403e-03,1.669689656738667163e-03,1.063449277356157087e-03,-9.081686346058468178e-05,3.699588179438775219e-03,-8.368207375060748854e-03,9.733754948183827305e-03,-4.964830579438551765e-03,-4.716525090384607842e-03,-8.366061994893344081e-03,5.656150430811591676e-03,-5.138506234272702949e-03,-2.047238624755094282e-03,-4.885666484819412361e-03,-5.552908929092261088e-04,3.509667330222833621e-03,-2.150480693273026846e-03,-4.891482230067619418e-03,-1.603091162183832834e-03,-6.300226194796436276e-04,1.071220550006439542e-04,4.112601656779908688e-03,3.544100040844611228e-03,3.664208279615932800e-03,-9.345533977468510481e-04,8.011522659072133762e-03,-9.441349390119152182e-03,5.546323311926498007e-04,-3.992343103410531634e-03,2.426964236304587214e-03,-4.073514606212237828e-03,3.228169302208261805e-03,7.202750597827036090e-03,5.717134582386026141e-03,7.088333749418001227e-03,6.954667575473664695e-03,2.486198342146102042e-03,2.088027519744339693e-03,-1.344265300784425231e-03,7.743033771192172482e-03,7.251259317189505914e-03,6.518027551927775207e-03,6.549526485752215320e-03,1.077474359503594135e-03,4.157011018594612348e-03,-1.029509053353032128e-03,4.383104342295518832e-03,-1.448775391974021836e-03,2.940160774423758016e-03,6.070517623129204586e-03,-9.447667009811181252e-03,1.935335308164672649e-03,-4.034001233548798956e-03,4.080863998029588013e-04,7.278297958918795475e-03,5.477611302118167574e-03,2.177169652345341440e-03,1.048933365698060879e-02,-6.819976917052731659e-03,-2.375810643854280989e-03,1.188311762952350007e-03,-3.110568050304627838e-03,1.734298261779153045e-03,3.099939179556983337e-03,3.942867203141702317e-03,-9.481838815319864819e-03,7.015131409110060036e-03,-7.721482460884812517e-04,4.207980737640737608e-03,4.321752791366579357e-04,-7.528423427687063081e-03,5.271401789092033247e-03,5.713652014781959131e-03,2.086545950422817684e-03,3.587193516975927801e-04,1.400413713334348062e-03,-2.345554297345239271e-03,-1.877011888327829526e-03,7.628754824652284570e-03,4.354363940864985731e-03,3.589650412193134878e-03,-1.083961128861541702e-03,-3.356671764418378079e-04,-1.179476722315182108e-03,1.617109575039077157e-03,3.367834752685832603e-03,-8.090590634677853610e-03,-1.101574359740987996e-02,-3.002948280948852366e-03,-5.318876308540665399e-03,-9.811429086497595528e-04,2.234816873182558956e-03,-8.330899361742000024e-05,1.705786260867191498e-03,-2.244000861385089026e-03,-2.964490808461517964e-03,3.301601430542153940e-03,-5.306496444145835517e-03,-3.625480202429198339e-03,-2.310875746817636373e-03,-7.689000378099836684e-03,9.715468948240933364e-03,-3.060860438277537409e-03,-5.095916526631084041e-03,6.105689453175838083e-03,3.699598756795856140e-03,1.086887383093604852e-02,4.541139907085766335e-03,2.607327039384968372e-03,6.912360209540295043e-03,3.343853663915390909e-03,2.556134266178786179e-06,-4.844564284281229621e-03,8.894869764194044109e-04,4.651183138530060393e-03,-1.824582532441064702e-03,3.320129312562574136e-03,1.455600763595611474e-03,-4.476515939629665229e-03,2.502484721527322830e-03,-7.198501781444300489e-05,-6.104756618840503392e-03,-4.597776253650245372e-03,3.228989306133130816e-03,-5.700099615037297271e-04,-1.482880241752098597e-03,6.284145473805707004e-03,-4.708224929835015844e-03,7.915368950096528895e-04,1.111477411283714155e-03,1.267879237732628906e-03,9.292053237205496335e-03,-6.701205049753511216e-03,-2.247776663796163376e-03,-6.025808125460350127e-03,2.171309168264017476e-03,4.635953415636216365e-03,-1.805658220979356257e-03,-3.136172273362422547e-03,-7.443767311893759089e-04,4.503698640064346566e-03,5.196039089984452568e-03,-8.798189475172416701e-03,-2.161537173701530857e-03,-1.084993465316368361e-03,-3.277397910716809293e-03,1.344949125966584617e-03,4.944879107269493750e-03,1.622510978971168204e-03,4.540923006754910528e-03,4.239700952225882238e-03,-9.953747053069962256e-03,3.149425626715785572e-03,-6.071240304039146607e-03,5.050548386202005462e-03,4.957017216960666640e-03,4.291486894480685439e-03,5.040778661286181295e-03,1.067335261750416249e-03,-7.201500980096041437e-04,1.077308145452359443e-02,3.086073650842225541e-03,-4.728514286921293650e-03,7.309532540742100064e-03,3.546788715518695012e-03,3.578337767550856651e-03,1.901224970006800546e-03,-1.715846479404507232e-03,-2.624765244487443569e-03 -4.858877410298441000e-04,6.815211292739873847e-04,-5.644719266401917318e-03,9.864682571341940390e-03,4.663863659758842907e-03,4.715154555988644927e-03,4.676154915937748464e-03,2.403492107796086855e-03,9.155135579702811804e-03,1.791532241440778371e-03,-8.292531861626787834e-03,1.461848155190761467e-03,5.501981117862590753e-03,-1.592504053916809180e-03,-1.545636630622984359e-03,1.321806105307113963e-03,-5.064061558898213210e-03,-6.079049844616820492e-03,-1.339899483124868101e-03,3.112988108451544746e-03,4.539190985980730460e-03,-7.388278777650055056e-06,-2.135473170994547226e-03,-7.821285415837452015e-03,1.189317534450887170e-02,6.665906894038156096e-03,-2.811914214203659577e-03,-9.878331515154566752e-04,-2.144022860971536888e-03,-2.625807002814676844e-03,1.572606165499225482e-03,2.326126951255911256e-03,3.058399782190137587e-03,-3.032969734982516740e-04,7.269575292081567608e-04,1.140650334960377314e-02,5.840365907873061374e-04,1.197314176532929004e-03,-2.294491693021869123e-03,-5.497834918292161983e-03,-1.391521170961320872e-02,-8.452272152490081149e-04,9.030988147260742005e-03,-1.303549468350629313e-03,-3.269241571829216344e-03,-3.534772523346033871e-03,-4.025424725206146540e-03,-5.345391136020438954e-04,-6.574129724261316456e-03,3.163176892638228273e-03,7.055775118856013814e-03,-4.171541245209678915e-03,7.135901791638230495e-04,3.237447459915081781e-03,-9.027819818383242545e-03,-2.385637546871842863e-03,4.101043106726502165e-03,3.077406492085578860e-03,-5.822676236845846474e-03,-4.989762450121019467e-03,6.498543187315621202e-03,-5.553204487175461383e-03,-1.025073674199964910e-02,-4.752511369379648458e-03,4.990781592789992997e-03,-3.492099733274007297e-03,-4.081000560362996014e-03,5.889501056650646958e-03,-4.893205188874150124e-03,-7.147924202338419401e-03,2.886184240281859485e-03,-4.710395855208611426e-03,-2.607792161558241013e-03,-1.110888718321048976e-02,-5.514765909116806607e-03,3.477329716356527406e-03,-9.760285027876063058e-03,1.036644568202742776e-02,2.691601594255205437e-03,-3.418656148470641983e-03,-1.556299168732946155e-03,2.222413531448210865e-03,1.147811084296592593e-02,7.043323336335478770e-04,-1.855617054043087440e-03,-3.176474582220940607e-03,6.858646286354837338e-04,1.021109655771439140e-02,-6.299534432191365892e-03,3.227467275208278723e-04,-5.057613609043237583e-03,5.742842829782340668e-03,-2.842032755638930312e-03,-1.111391825400575130e-03,-3.204696022831150966e-03,4.698735505481423103e-03,-1.147636999384185489e-03,-1.117093490193379803e-03,-9.126474370084068297e-03,1.840541937179073070e-03,-4.115795781042100136e-03,-9.978848421027823514e-04,-2.738115111224533223e-03,2.847307887286476807e-03,5.254475448453729651e-03,3.798169973472983758e-03,-4.765282020292804056e-05,2.451130781583191979e-03,3.198699382959956116e-04,-2.681394379600919887e-05,-1.026644792876406625e-03,7.626262462373103318e-03,3.853790132766499350e-03,-5.291816762118290753e-03,-3.316750383289610699e-04,2.357001627555587960e-04,-1.411330773035270698e-03,8.852970651096756308e-03,4.492920030364546888e-03,-4.939084017205239673e-03,-9.182621897360793714e-04,5.094958295182875962e-05,-9.420909229503830751e-03,-6.787525341455610296e-03,-3.802161278621076380e-03,-1.112858064583114800e-02,-3.465470588464088664e-03,-1.654033145865045368e-03,5.514454079289765956e-03,-5.658497341511549037e-04,3.366302742985530358e-04,-4.273450458327849449e-03,4.284172567326281593e-03,3.350622063792521761e-03,-2.788330679907910677e-03,2.896085275533452023e-03,1.120145966051235356e-02,-9.367570944328615715e-03,4.622250146507082998e-03,-1.432582442124515987e-03,-1.046118412305487196e-02,-2.963681941076275628e-03,6.336319419508092306e-03,6.694542011255271395e-03,7.774704649262991797e-03,-4.923022334933539008e-04,-1.124787678486142105e-02,-4.534699055631317803e-03,6.986567042166162873e-03,6.399500151515632312e-03,-2.183076058780781398e-03,-3.367507504408705395e-03,5.254749113952081677e-03,-3.606996549947731352e-03,3.913827005874422407e-03,9.292915261282636702e-03,-2.491392923158688641e-03,1.918542618859768846e-03,-2.804200251051422780e-04,-2.959810676903349130e-03,-4.624542160510556818e-03,-4.666180074510389852e-03,-3.230070220909464691e-04,-2.952129038819727332e-03,3.350266053242803577e-03,5.165279855605594238e-03,-2.669349385544803662e-04,1.805692395549886049e-03,3.686131827083918757e-03,2.827100574648570407e-03,-8.756825908550395707e-03,3.180994209561868215e-03,-3.824857323618245674e-04,-2.026665440714828757e-03,7.332644570075592776e-03,-8.650670286770209202e-03,1.152929160789834852e-02,1.240516924259566769e-03,-1.950427738306046094e-03,2.548231396193111498e-03,9.659570929154568308e-04,-3.990785058656894611e-03,-4.299656462814752458e-03,3.295030558177137765e-03,-2.204131627470869333e-03,-4.590852264869470407e-03,1.018740722263694783e-02,-8.758882532156158501e-03,-9.578723113230949715e-03,-6.917707035421666838e-03,1.050300895602010280e-02,-8.733269956427739354e-05,-1.052624951910161640e-03,-3.089230207566460052e-05,8.030628145033016137e-04,-6.572322657254911320e-03,-2.731447984403400069e-03,2.651378987915113238e-03,6.690809077780197900e-03,-2.601754158160791714e-03,1.895754000650515378e-03,-4.729665127825625169e-03,9.291443297882057703e-04,-2.171343901709056005e-03,1.810476169450760380e-03,-4.391605600623415401e-03,9.330267827890908333e-03,3.398501999766889888e-03,3.813406088375774234e-03,-9.473298785584531242e-03,-5.660620784857904467e-04,-3.043289887778616917e-03,3.417691954335447518e-03,-1.047233500351714116e-03,3.259984420323284917e-03,-6.259785365329974087e-03,3.103448741784735603e-03,2.796355412888009307e-03,1.714781320155491819e-03,3.341421285697889843e-04,1.356847369832788310e-03,3.204174547671479086e-04,-8.320445709307535548e-03,-2.667277832319413060e-03,-1.728989725751014844e-03,-7.523334598441296962e-03,7.040792339888586604e-03,-2.807347615308967374e-03,-1.349259502463466022e-03,-9.795511870874492114e-03,4.317777168182701021e-04,6.291573342859352845e-03,-5.491250327427622813e-03,9.615179077845326062e-04,4.492614776315779698e-03,-2.907838022670935704e-03,7.281983832059396762e-03,9.656368958129084995e-03,-5.401057876813782077e-03,5.701288515242002378e-03,2.977339648586338136e-03,1.571704494420181602e-03,2.782257372498213050e-03,-8.479312478501501577e-03,3.313971700637375999e-03,-4.976901631410123560e-03,-3.300453767100212845e-03,-4.893698325775152659e-03,-5.617058188853262315e-03,5.617880221964013585e-03,7.280681808632821946e-03,6.259421840108684264e-03,-9.826817625667233144e-04,5.354053091466350230e-03,4.488055891683725464e-03,-1.018959693737793264e-02,2.194276977397037649e-03,3.697651189416383335e-03,-1.024630251572113994e-02,-3.600856845044585457e-03,5.639977618051956847e-03,-8.280951362266373086e-03,-6.375328065693801945e-04,-5.631007108539167177e-04,1.790419060536019898e-03,-3.268572057684999109e-03,-3.226018163736367114e-03,2.321067638530654141e-03,1.578172821927131906e-03,9.340805325961311530e-04,-1.187398999871351613e-03,-1.211409009938526011e-03,7.654506139429944093e-04,1.014543277484067536e-02,7.066406672948294214e-03,-2.519849510482412638e-03,-3.368899620567644324e-04,-1.552231916991500581e-03,-6.703485404306911213e-03,5.150556191918195456e-03,6.260359905423188744e-03,-1.183464860607807414e-03,-4.316864266356474321e-03,-2.340867192896306081e-03,9.396359038195236865e-03,9.171349943020745341e-03,-6.266016008009749234e-03,-6.459779313414097538e-04,1.412898161790763184e-03,-5.584632114066230384e-03,1.264244201360081583e-04,-2.208862224907134821e-03,2.273061847225625858e-03,8.362573030396709337e-03,9.666984289807965813e-03,-2.121457890832450979e-03,-1.057474010566617330e-03,-5.344162462632267956e-03,-8.164917051733581663e-03,-1.686883313840845154e-03,-6.556724399021816342e-03,4.905070974997608781e-03,5.508002705365307302e-04,4.591117655534198566e-03,-1.728398733732275372e-03,-1.583224481328624573e-03,-1.348665235616358822e-02,7.359132167451814133e-04,-3.392967394664228586e-03,3.817361789788133349e-03,-8.455140896296049627e-04,-2.199950095650364795e-03,-3.477171256897708222e-03,-7.304194060527163107e-03,-6.498458243332801533e-04,2.196368315856770184e-03,-1.438815832614988126e-02,-2.845738519670093416e-03,-5.297089971917900812e-03,1.081448051776363883e-02,2.360172965458002181e-04,-3.471284775200537704e-03,8.718921214768365988e-04,-2.075322548198526174e-03,3.849048959773522256e-03,-9.022368268418240120e-03,-5.738301721734961631e-04,2.544252701083428176e-03,-6.554235240573726140e-03,9.926600841256520008e-03,-8.960952246168165389e-04,2.877268300509606015e-03,4.894651556044032946e-05,-5.377389687769172123e-03,1.221571922021781912e-03,4.721976800391228246e-03,-2.079223063020180489e-03,8.960105732607337101e-03,-2.701819564642059940e-03,-2.908728274461262059e-03,6.259936848294547253e-03,-8.673477865070049198e-03,-7.280374291108570456e-03,4.815882773464568931e-03,-9.450078148726058155e-03,-9.634107664180819328e-04,-8.583644390003889943e-03,3.251337275224238776e-03,-2.719641265722977548e-03,-1.164255249062090374e-02,-5.321228972105657151e-04,-4.537144397023886756e-03,-7.277365764052557383e-03,9.675116452198057743e-03,4.398960756503193792e-03,-1.641802220850715685e-03,1.173754885918416695e-03,-3.634962500697931512e-04,1.279579069335799388e-02,-1.968822561473436168e-03,2.877088202827108600e-04,-3.577592269337179394e-04,-7.022634741722487070e-03,-3.571819065669286364e-03,-3.087235903113124780e-04,-1.218479900088489595e-02,-1.057639098002476552e-03,-9.798756308259915469e-04,1.583570733194405002e-03,-1.291964067154616766e-03,-1.869184587730128534e-03,-7.775672173895000145e-04,-5.077018497387762903e-03,-3.121663809617883801e-03,-8.719890565303617608e-04,3.709750508671727509e-03,2.374274788934042230e-03,-3.431439080155917597e-03,6.327123373065924976e-03,-2.409805044000258838e-03,-7.406204671698795976e-03,1.834906770173232128e-03,7.150683927548614214e-04,-9.832946277690601811e-03,-3.924753732280454599e-03,-1.057702285628093000e-03,-5.339165151431104085e-04,3.522592829376503410e-03,2.836948166819668916e-03,3.949750834523230170e-03,-2.480123717547125760e-03,8.059068830981255122e-03,3.263287505846372734e-03,-3.489240038900855417e-03,4.906272584511495779e-04,5.622805520955589031e-04,1.375007800419732369e-02,1.050781424272287205e-02,2.006523056383180317e-03,-6.947050855333535523e-03 -7.573532618243026225e-03,-4.078974475904690623e-03,-1.481668776852257223e-03,-7.354930974203509070e-04,3.376995048602314704e-03,-8.213148619194783964e-03,-8.664218683095106094e-03,-3.542802449294054495e-03,5.506157243334697249e-03,2.131047226553445337e-03,-4.850758996612219119e-03,7.782315018603379840e-03,-6.426750492651296665e-03,-4.810734698545760836e-03,-5.867466996809526415e-03,-3.705453851373666924e-03,-4.903708683639723502e-03,-6.817632832349709675e-03,-5.839016078240139886e-03,-3.701761223030060593e-03,1.207591895027500077e-02,-3.685796057002096483e-03,-5.748943337019326791e-03,-1.114915990987165768e-03,6.735985678793741591e-03,-4.715574556575304228e-03,1.287564370156874275e-03,-1.012252710982530389e-03,-7.657455349099028842e-03,-1.509847637738694027e-03,2.263551035963778327e-03,-3.362272114827360319e-04,5.550572193527779884e-03,-6.882944643295926292e-03,-9.354155368747400735e-04,7.960370746175580992e-04,-4.823274837427181209e-03,2.109989817141559761e-03,-4.456659581510647762e-03,1.078309669806057185e-02,-5.292905735649887020e-03,-5.266781211810175195e-03,-1.004959203689399783e-02,5.958334594816737557e-03,-4.820672333749016943e-03,7.421607347736274007e-03,8.817769205912181010e-03,-4.812549624383505135e-03,4.429131015627221085e-03,5.667013421947872734e-03,-8.683628445335907567e-03,-7.138354299095079833e-03,-6.823003558612939119e-03,4.699018972389563953e-03,-9.784029170228753479e-04,1.036867993388421129e-03,-3.276305432136659220e-04,-1.730856559741151017e-03,-7.138843102031139365e-03,-2.951423628240953511e-03,-4.949214416832869827e-03,5.577288231772916955e-04,4.403455739686683128e-03,-3.453856841906194873e-03,-1.226414696646379362e-03,-2.357085370432591368e-03,8.535578346812281196e-04,-2.017592550970533430e-03,1.051465639015957548e-02,-2.243470510265490464e-03,-7.089616189287815430e-03,-2.617992992243019928e-03,1.564710757870478748e-04,6.099756266986732650e-03,2.703601015590758206e-03,-7.024063091981488927e-04,3.600411089481404467e-03,-2.530655088616401253e-04,-6.182708035891939895e-03,-3.838340625874514089e-03,-4.248500914974727601e-03,-1.877439115978022386e-03,-8.993819771658487228e-03,1.489232477898345779e-02,2.995474728188497922e-03,4.786220135172590887e-03,-3.137270826781152384e-03,9.420628833347348061e-03,-6.049264368053640893e-03,4.472230197741611062e-03,2.135636101235512813e-03,8.696331947090388606e-03,3.047991539389405431e-03,2.164191362047024510e-03,9.645850231194385768e-03,-2.156290927781865600e-03,2.393172641451462527e-03,8.353185589466375835e-03,-4.930694541545683519e-03,-5.141624642063300984e-03,7.785475342589743067e-03,-3.768022306948275169e-03,1.789191564617269043e-03,-3.477652715150927865e-04,6.621376841397103030e-03,-4.621814590235337299e-03,-8.994774134670813215e-03,-1.042597447199805800e-02,2.836022384768630260e-03,7.209291983909729565e-04,3.016815332461043019e-03,-2.395105435326495141e-03,-1.820666838301437593e-02,9.865879575019062889e-03,-5.311692304164187563e-03,-6.323011649766421133e-03,-6.753416325170590040e-03,-1.967917190105792670e-03,-8.550160141879935543e-04,6.261279735156949008e-04,3.441106062770565421e-03,-6.319902003501821734e-03,2.575053083570007142e-03,-2.304113055637751020e-04,3.309910746853470236e-03,-5.305709256056678064e-04,1.520406426601376374e-03,-4.197408610514546598e-03,9.505916195426202669e-03,2.622771277408689639e-03,4.156783036554342718e-03,8.101180030330137199e-03,4.714739238068288950e-03,-4.636830878357599099e-03,1.146189546091295509e-02,1.796675201801283331e-03,-1.415765613247908870e-03,3.115462461636282418e-03,9.400676641470930364e-03,5.591136764546185932e-03,4.825209822700401302e-03,-1.049855049811474969e-02,-7.864110130143263030e-03,-4.433053511858705761e-03,-1.108073359443521822e-03,1.490096000496470919e-03,1.463864374669597047e-03,1.455701886285452147e-03,-1.724265695125406020e-03,-2.856504971600572727e-03,-5.110595202413582318e-03,4.924127333882265950e-03,-1.291607660681760340e-03,1.467115301909693598e-03,-4.903247163008069183e-03,5.223739551183170099e-03,2.329558920748451677e-03,-5.565422885230320894e-03,5.613936450318327692e-03,-3.398464473982907522e-03,2.005049142710803745e-03,1.015916758115651373e-02,9.585752993444581274e-03,6.642703706715841466e-04,-4.341204840100318721e-03,3.077603655807740080e-03,-2.120924310120012295e-03,-6.594784011311972428e-03,-4.525803311329475338e-03,-3.613873815225160683e-04,-5.938179033310880990e-03,5.569205309418017065e-03,7.017640622292577179e-03,-6.443164773550165252e-03,4.982605880251058869e-03,-4.628262159082555924e-06,-4.795750453320954225e-03,1.134144720952649050e-03,6.953976205153726206e-03,-4.352985295738729700e-03,-2.977965179588925280e-03,4.395852368770603903e-03,5.678538024721513366e-03,-1.049886931479399998e-03,3.254589878427802292e-03,-1.725042430706275910e-04,-1.029503850376667536e-02,-4.956331999866616170e-03,9.101379434933349472e-03,-3.711119094977122549e-04,-5.933772786940590301e-03,-5.829755565409214073e-03,-6.210951093948512310e-03,-7.612471221574865221e-03,-3.393611231294065868e-03,3.273341896610016625e-03,-5.019881348279834135e-03,9.286522789793301685e-04,-1.306917293054346303e-03,-5.710568686666085171e-03,4.182828004016348432e-03,-8.087654220300515570e-04,-4.469219480975064433e-03,3.246819330659010336e-03,6.868905599204686539e-03,2.582685978888056046e-04,3.501469789595673242e-03,-3.486751399414524197e-04,-2.062301558086456442e-04,1.078548610873359542e-02,2.251165372588006738e-03,9.956287146299742583e-03,-7.145929385252951368e-04,4.718695019840144465e-03,8.496417295223471569e-03,-3.746665652348874723e-03,3.817134090269853029e-03,3.802480899458659039e-03,-5.659279111296013770e-03,8.129950272585810198e-03,-3.299367100761953567e-03,-4.111336871135693252e-04,-4.444873770823816461e-03,3.782444129653003868e-03,-8.481835028552103459e-03,-4.394500291694639765e-03,4.406160505024896796e-04,-2.303659842115217838e-03,2.857578046481498496e-04,-7.843903424863570700e-03,-1.817131918324530609e-03,3.541191839661330873e-03,2.164914736428459434e-03,-7.599402451301699030e-03,-3.148728774256644419e-03,2.153904430108549792e-03,5.065677493030471684e-03,-1.643730919594029111e-03,7.342600264889619262e-03,1.154366923780552126e-02,-9.195814880826616794e-05,4.403216757138690880e-03,7.968533551799595899e-04,2.710163281375245394e-03,2.975244691699584481e-03,-4.256032416586519491e-03,4.560102005444586527e-03,1.735410078412878351e-03,-7.536673649608553391e-03,7.706169456631515279e-03,-8.452447831850076754e-03,3.566188915660063481e-03,-2.332594784814943836e-03,5.576690541699443580e-03,-3.308570482779489823e-03,-4.450174303778105418e-03,1.621219624785447415e-04,-3.321662257333873360e-03,-3.221058080099559965e-03,1.161598078888722609e-02,1.148105617577326737e-02,-8.604015790344164477e-04,7.007923289652193827e-03,1.357301371848757855e-02,1.094032579003849197e-03,9.810210975850139608e-04,-2.255604363751443546e-04,5.515383474148575008e-03,-9.875133788750109845e-04,-3.854966123009768205e-03,-5.205855304325841988e-03,5.651317657196520614e-03,2.717226046642812112e-03,-2.222496903820262142e-03,-7.762870918775961401e-03,7.743022134188885059e-04,-7.494794303920174552e-03,-1.107716112154652445e-03,-6.203351853709517319e-04,9.365367909320087420e-03,-3.540171225865881496e-03,-4.742544852210948091e-03,-2.749226141381366263e-03,2.750696441324214109e-03,-3.994164448610637921e-03,7.342746870929923488e-03,5.843191595342811691e-03,5.493373150122888258e-03,-5.093378669204787906e-03,7.531555311026380322e-04,-3.481231230223118776e-03,-1.288760781598382449e-02,-3.903468360276609542e-03,-4.077410211225472939e-03,1.560560816859115187e-03,-1.800979369766098461e-03,-8.167294457337350572e-03,-1.498691480186989909e-03,1.420717832740848010e-03,1.894592915221791213e-03,-1.405129021250934034e-04,-1.802335231856398191e-03,4.758634112171591761e-03,-4.551475877168379737e-03,3.600593106056644826e-03,-2.314115675058540152e-03,-5.903111009509799147e-03,2.343208384250194867e-03,2.488094667088549487e-03,-2.047949665750360519e-03,-3.394079432393389689e-03,6.632484761135028391e-03,-3.037152247749228678e-03,8.245203747957195739e-03,-7.571597494466102835e-03,3.946990431416611792e-04,1.442127381696586309e-03,-1.099547369658503441e-03,-3.912076655123760370e-03,-4.194407630557659729e-03,3.713394037471074490e-03,-4.008959869036656312e-03,1.106124532386198274e-03,-1.175271875713363671e-03,-2.871599708943983233e-03,8.081586119380663319e-03,-2.602122156240861510e-03,7.061894247264268072e-03,-6.783882733229998178e-03,-6.769881781555615527e-03,-4.405831491179677470e-03,1.461481171044024365e-03,5.618609318210855338e-03,-4.469974200624939227e-03,9.876248685229342683e-03,1.120803357214282618e-04,1.017496591719813624e-02,1.511154188683474023e-03,-1.349007074118704387e-03,2.746073504968805836e-03,2.705323125842278801e-03,-1.194889970335042111e-03,1.969705832317739450e-03,-4.379280233196696805e-04,3.005507326220579339e-03,8.982762276275258135e-04,-8.307568588864791765e-04,-2.582694351295356048e-03,9.845792764466848243e-03,-2.650405301431622059e-03,-3.172092350207021422e-03,-7.608451319992216334e-03,2.443380329568953929e-03,-1.603873093605665120e-03,-7.987822139144760275e-03,-1.022269488517946277e-03,9.272230881025323238e-03,-3.917728285844558622e-03,5.962989441931603428e-03,2.130646403017538704e-04,7.977643734123443867e-03,1.522234577671846903e-03,8.424755462002881182e-03,2.785932289866425646e-03,-3.068836992051470511e-04,7.584576407403452111e-03,-8.878465578074279732e-03,-7.169304172725807006e-04,3.253061893413949392e-03,1.195276128610067712e-02,-1.693191142640835698e-04,-8.910976517482006171e-03,-6.152460490814254043e-04,6.372965005728628042e-03,-1.729064613782934625e-03,-2.623682946049503070e-03,1.208927669848662914e-03,-4.450525615651213483e-03,-5.630603116567871758e-04,-4.202801753651305358e-03,-8.178644273067442027e-05,1.527995222127240520e-03,5.077093181741214496e-03,-1.201969432098383764e-03,1.023466822179714504e-03,5.893477200654183637e-04,2.139169268165684763e-03,-5.945639633574499247e-03,-8.499376333244359084e-04,3.336704689676548525e-04,-2.030344313792625463e-03,-4.673253097499260010e-03,2.410841292826012780e-04,6.038554129199326381e-03,4.380258011812386727e-03,-7.005792707923167405e-03,6.364387267813883912e-03,-6.077394688946993057e-03,-3.197342147658785959e-03,-1.082519063164255830e-03 -3.747493771474818235e-03,1.461537229747065071e-03,-4.020512679761466257e-03,4.627382099488290199e-03,-2.679952686560431972e-04,5.216915815206426589e-03,-3.407963102584370650e-03,-2.435613613967389617e-03,8.601658298076341478e-04,-3.162557526626900810e-03,-1.356831353533578246e-03,-3.105390515500384974e-03,7.429492379413928441e-04,5.058058284033046907e-03,-1.356512827265142279e-03,-8.714388187130880231e-03,5.202382897766628586e-03,2.302402248357858418e-03,7.242668578945994293e-03,-4.931922233723169333e-03,-4.546969739172485037e-03,3.318904675016319441e-03,-3.605039286545637899e-03,5.053115876266970176e-04,-1.780846207908451948e-03,-8.296718918144251401e-03,4.663696068193954117e-03,3.106233762147284864e-03,-4.768619236937025538e-03,-5.374034621000756433e-04,-2.003136442398460455e-04,9.462748411758918404e-04,1.541061858112944671e-03,5.054209268346591954e-03,6.493727262174590550e-03,9.259960029530755685e-03,-1.080891740039486258e-02,5.467631605477885418e-03,6.737665149884305808e-03,-4.353750054562842940e-03,3.054157219976519257e-03,-3.217695732235592489e-03,2.543283681311560036e-03,3.532644365876736175e-03,-8.214419222984499502e-03,5.463191902266389911e-04,8.390587062955676611e-04,-5.008324385488320714e-03,1.223566224840205083e-05,-7.803391425487925508e-04,-4.254656657261549004e-04,-7.056761953927559276e-03,-4.751974196097416905e-03,-1.844782517325930897e-03,2.081636266244840834e-03,-1.698929021869826327e-03,3.529821964610643358e-03,-3.654664460558672973e-03,-1.023542161049528776e-02,3.655592019665100320e-03,5.729740708545656702e-03,-4.760838962182481135e-03,9.904208608380871137e-03,-5.796011913755634093e-04,-8.230107269566121556e-03,-4.077086024137290855e-03,-1.692721830362211232e-04,-2.203087739724161919e-04,-2.844490899673765474e-03,1.708503609369821635e-03,8.177293472359532106e-03,5.041950578446137247e-03,7.129167061653022208e-05,-1.133434243278558327e-03,-2.484635502341028378e-03,7.729297294264803053e-03,2.776487378553049376e-03,3.772803340178489898e-03,-7.690036358020330316e-03,-6.871617666620638901e-03,-3.236257793082637085e-03,-3.683617873480696334e-03,-8.737427136495341806e-03,-4.790702737793240545e-03,-5.501181521035994034e-04,1.855240890764073096e-04,3.390817186278897544e-04,-4.403235744510155918e-03,-3.861594622803081987e-03,1.283513236878444652e-02,1.653248955200660713e-03,4.308572746356885246e-03,2.011431196074348320e-03,-6.221297009753229239e-03,1.886080512451693975e-03,7.242140904556221802e-03,2.215368111650839727e-03,7.248477722010366123e-03,-1.988711039588298671e-04,8.450773590366439728e-03,-1.430507023750407589e-03,2.284723661508091582e-03,2.541233404037505656e-03,2.302868084170427100e-04,-5.714718715279860006e-04,-9.263811855850553889e-03,-8.837615902410965452e-03,-5.091250323637791335e-03,-2.034859419263813744e-03,3.843916002129512668e-04,6.693285845905698125e-03,-3.737385991516325393e-03,-1.238270166381854991e-03,1.566543242329346578e-03,5.104540400420589340e-03,-3.614371363108698828e-03,9.119315538952493111e-03,-5.730902810327803514e-03,1.723345993858232950e-03,-6.006636321712573341e-03,-9.582931156976226550e-03,-2.239138135129211803e-04,-4.569226782337419690e-04,1.581217611414692872e-02,1.101301386919289159e-02,-4.590872834509088750e-03,-1.284347834479502778e-03,4.616917675568803919e-03,-3.061181849443273158e-03,1.267434313576960674e-02,1.606773578356329767e-03,-1.095871347175917006e-02,-4.326347857276790707e-03,-1.188338376357681025e-02,4.103083897035086393e-03,-9.844427558059743594e-03,3.990825248935678422e-03,9.223873816569669792e-04,5.752504529402454464e-03,-5.243644486645525522e-03,-2.093471621009164571e-04,-2.707113451239489058e-03,-3.691992621727493550e-03,-1.263685528356157071e-03,6.431469081992546855e-03,-6.264327524763194360e-04,4.200701557203629852e-03,1.204825944165343297e-02,-6.438560140783284626e-03,-7.841733749921968311e-03,-1.266550140990074617e-03,5.261862929825773631e-03,-1.976438608188406821e-03,-6.987950836924049745e-03,-1.113036274015288948e-03,-3.103035221904382104e-03,-5.173641260096871648e-03,4.069672785801712635e-03,6.058423909259415653e-04,-4.010071627671807468e-04,-6.659864662880641585e-03,5.129187824765056927e-03,-5.629961699036447817e-03,-6.833761367847151005e-03,3.464517065141889569e-03,-1.159861263961252387e-02,-5.993059255825946598e-03,-7.322167511583851755e-03,8.130556883391072687e-03,8.820485282285669970e-03,2.586577823545891264e-03,1.062171910769058687e-03,-2.467223942148401555e-03,-1.317397938268411589e-02,-9.233860473070216665e-03,-4.108847814295093484e-04,-1.728803580810904040e-03,2.378793059518590283e-03,1.399927244001808696e-04,-1.823273866833351881e-03,-1.206754806426646367e-03,4.230312073971338998e-04,4.924970309407268772e-03,-1.822930054048066753e-03,4.655396172216506368e-04,4.503965236139194535e-03,2.666869740955148029e-03,-2.978025358639020855e-03,-6.356800417239477467e-03,-4.573798623163502692e-04,2.340841027081459660e-03,-1.853558604692539732e-02,-6.123070050175568766e-04,-8.219949883632840558e-04,-6.816030887866412168e-03,-1.898268692674238978e-03,6.193437700515930840e-04,3.315710911203099878e-03,-1.546900137807511420e-03,1.671578722411445070e-03,-5.471684519768909216e-04,1.059304652186287415e-02,-5.188446284191805926e-04,2.365978811233417599e-03,-5.966574452335271332e-04,3.850944678125291405e-03,3.186497426769861985e-05,-7.497528754938557029e-04,1.901534137321230584e-04,-1.716909286868601543e-03,1.658026447396134732e-04,-1.341494647156159757e-03,-2.466193008324931660e-03,-3.613862741548173798e-03,1.565962233450926555e-02,3.733985159388034884e-03,-1.154034852902009051e-02,-5.179534354245300155e-03,-2.690583025118055936e-03,1.750844270143160327e-03,-7.535802735698722728e-03,7.572200315928864084e-03,-1.033789037695841129e-02,1.406409635267826799e-03,2.606209795107474175e-03,-4.901264042037690831e-03,-3.761562516492786452e-03,-1.107756266780459808e-03,-5.018478611299935846e-03,1.421475588491064425e-04,-3.354238946153019485e-03,5.195603809821112973e-03,8.882297353090904269e-05,1.612548354788578397e-03,-7.662225245477030135e-03,6.887257451931968338e-03,-3.365288939694315983e-03,-7.444880448638909951e-03,8.230909166526457349e-03,2.408459858998479065e-03,5.187836192853936303e-03,1.083439097548862147e-02,-3.933154441028463219e-03,-4.647644672409272613e-03,-3.699336389625140862e-03,1.956442695280162080e-03,-6.303433971945726806e-03,6.136679769673152388e-03,8.862489947191726615e-04,2.180915457402820290e-05,2.510319740800550248e-03,2.876491813022226049e-03,1.520038976237136734e-03,3.579436781698634175e-04,-4.958524537210932167e-04,4.585576532526049361e-03,4.420477429663876440e-03,-1.065655360887447599e-05,1.536437274264740999e-03,-3.468822502305081087e-03,6.721161482717249221e-04,-3.710034366614788515e-03,2.638853680419669858e-03,5.944580717326845148e-03,-2.412236215843261290e-04,-3.133323246705353986e-04,-7.796233905754123249e-03,-7.237196845151440579e-03,6.266670785611286537e-04,-3.065987295555184112e-03,-4.050995763423748315e-04,2.729638195159626250e-03,-3.558463894305720614e-03,6.790042891866563714e-03,1.079900542471346446e-02,-5.113660693490005779e-03,5.732468965123993856e-03,-3.802342519390322639e-03,-7.038049915017777474e-05,-4.697279372945760596e-03,-8.716477811275366583e-03,-5.642719301825234327e-04,2.415136689526075579e-03,1.143278107965639730e-02,-5.642705839427594522e-04,1.033395932250372358e-02,1.908449150100672122e-03,-6.724141799362245986e-03,5.747675192136204349e-03,4.789572350434306326e-03,4.714713607573446999e-03,-3.824473793569859673e-03,-4.124863371357241279e-03,-9.012273789264863217e-04,7.321195856735759769e-04,4.014911791133837608e-03,2.559944600993582786e-03,5.419263743205538462e-03,-2.251472141063302554e-03,4.922956039520284648e-03,-2.692649656181281270e-03,-7.800400010787461071e-03,3.379341772075657429e-03,-1.512311673215381352e-03,6.310971460771740496e-03,4.370809837070129163e-03,-6.835725531930593067e-04,2.276663934500493432e-03,-2.210153510287260654e-03,7.885335937805679077e-04,4.429477887581889303e-04,-1.612909999169769536e-03,1.145781560045040087e-03,7.596381097035446658e-03,6.402596721748703319e-03,-1.671706633145203817e-03,-7.165286235770765635e-03,5.054238428742093003e-04,-2.222200219644374000e-03,-1.954039033374253744e-03,-3.116497611693636038e-03,-8.940494879648293661e-03,2.455827921501010317e-03,-4.768572960346448633e-03,-6.466284824505429303e-03,-2.441871034709804447e-03,3.331386410489636080e-04,-8.721070295618485457e-03,-4.650626768764663083e-04,-4.887673138534696666e-04,8.163489086277167387e-04,1.100841433830778131e-03,4.166371031349731281e-03,-1.921962405402500456e-04,3.082917755745807746e-04,-3.757334197193678681e-03,1.649726090146956536e-03,6.874440206447939364e-03,-6.794447873778408468e-03,-7.826067592116337651e-03,-4.915885822095491771e-04,-5.524812947768895956e-03,-7.249979040264081060e-04,-3.960010363505423597e-03,-1.234067911908532994e-03,2.283585452523847256e-03,-7.065572889910567977e-03,5.099401187663295427e-03,-4.400145320022722135e-03,-1.941545482085394207e-03,1.117313665863278131e-03,-7.188562974381960654e-04,-7.373383664143418927e-03,-2.920503617010331791e-03,1.474818882788369668e-03,-4.375837652554144010e-03,1.561169051672329881e-03,1.000257908370910881e-03,-4.035917365019786075e-03,-2.092999482447925261e-03,2.881623770653587643e-03,-8.030598113356279778e-04,-6.645625571214600416e-03,7.228085371021746725e-03,6.143522842875072474e-04,-6.653873366421840171e-03,2.311991604678010140e-03,1.411892881065365942e-03,-1.681105010948039796e-03,-1.023074182132346216e-02,7.958706700532579281e-03,1.121138542835016776e-03,-6.866984938961943376e-03,-3.217361846790496843e-03,4.115404448138409652e-03,-1.309841798153742491e-03,1.598391144479635291e-03,7.512747331117167497e-04,5.041346885235992517e-03,-1.989038540784342918e-03,-7.673666086903799255e-03,1.646112066241256760e-03,-1.237373036198004435e-02,1.849698079133724410e-03,-1.273981984755918374e-02,5.474880252433393310e-03,4.957803044669268494e-03,1.870764569755310443e-03,4.799681101045868807e-03,2.677751878164826797e-03,6.997410870757253543e-03,-4.137648751409940193e-03,1.213540736011553718e-03,-8.272956051212637793e-04,5.802814660531464483e-03,-7.781331924447408869e-04,2.050983270295878708e-03,4.239568369868319114e-03,-2.366875382341121058e-03,4.652946309965742128e-03 3.403576261936838709e-03,-3.135070293263856993e-03,-6.000939269199209204e-03,7.851368091513193787e-03,3.557795903507784529e-03,-7.676764490088498413e-03,-5.853562823572220058e-04,1.227503880519710144e-05,1.202238917243161421e-03,-1.223379251035055073e-03,-4.214373391726459854e-03,1.043140500228769123e-03,-4.343686377801172539e-03,-4.011610929631139627e-04,3.039914241648715258e-03,-9.297172202431888341e-03,-1.639696308504700033e-03,3.785658152387630306e-04,-5.217192267261205191e-03,3.603784697795492462e-03,-5.216406904543178027e-03,2.295965962304356497e-03,1.185647748473988954e-02,-4.677212084447292077e-03,-1.861709111574222050e-04,-1.174490422843472819e-02,1.590812025058045091e-03,-7.814503016432366306e-03,6.841355577159666116e-04,2.938058749538220780e-04,-1.814962773421540559e-04,-2.558131371463237829e-04,7.087068693784635831e-03,-4.236761677489410405e-03,1.583233115601119035e-03,-4.400731395833593941e-03,-6.288433993369987353e-03,6.054925399168621832e-03,-2.050670172453410590e-03,-1.907460886941077585e-03,2.114989547159175885e-03,-3.866337200576774446e-03,-7.968421939174079273e-04,-8.717261660771016916e-04,2.177320035718366149e-03,6.930368363392115129e-03,-2.201834429063845820e-03,-1.747996821739060619e-03,-3.726683466270062656e-03,4.654482616643600807e-04,6.555150224336991731e-03,-3.827000351242322226e-03,4.092907489671886168e-03,-3.997851530187951016e-03,2.912855368891013007e-03,-5.732023053410880038e-03,4.270261110675347381e-03,3.118684803505645739e-03,6.760058237898470233e-04,-1.997762965727681566e-03,4.838401814091726309e-03,1.013063814958824020e-02,6.398165852982155323e-03,5.956318660161229878e-03,4.175990761414942465e-03,3.200857080935945106e-03,-5.529101407074549224e-03,1.070796512124922275e-03,-6.014614204307972174e-03,6.544385920290486831e-03,1.035326677172715079e-02,-7.761143143872097257e-03,1.210352538719468470e-02,-2.830309943007349328e-03,1.125519661420163603e-03,-1.602637230746656316e-02,-1.815761174587299199e-03,-2.869934528481736438e-03,3.428124467347848630e-03,-5.298679227397596996e-03,1.665507560972374120e-03,5.648573291228315384e-03,7.579914471465380721e-04,-9.367618980576801263e-04,2.175330018761769540e-03,1.844140919310351991e-04,5.160509352360884367e-04,2.616766918260423833e-03,-9.537881219585139791e-03,-1.006089874864547068e-02,-3.338315275383301351e-03,-1.093890808048160208e-03,5.599364746957406422e-04,-1.888494872304759568e-04,-4.596366033711976086e-03,1.847971282580664391e-04,1.287148634126181190e-02,-2.718993914899015658e-04,-6.764620889275702389e-03,-9.860564663641897126e-03,-2.019013887143064796e-03,6.501240242230469334e-04,-9.481081030891089960e-04,-1.012917043871653516e-02,-1.725197687517984963e-03,1.657390774072974027e-03,8.920806700200083553e-03,1.836982619326973628e-03,3.053312894481572011e-03,-2.228639774918914334e-03,2.049755125289140121e-04,8.147657567057061886e-03,6.793852021531071594e-03,-5.207695000521697301e-03,2.666021264760988930e-03,-4.218459211426418831e-03,-4.875360680816532948e-03,3.740466383152471572e-03,-4.104293656494682578e-03,6.489010532970343123e-03,5.887997233245475734e-03,-7.391170378414912111e-03,-1.775905223200119993e-03,-4.133154361254761343e-03,3.760649717520940689e-03,9.786593888930460172e-04,-2.858207141405196341e-03,-5.142739141520718225e-03,3.557929859047455172e-03,2.667311484903619329e-03,8.952589684625778324e-03,-1.667679759456475175e-03,-5.460504322656876616e-03,-8.116114505403731680e-03,1.854240253063453711e-03,-7.127771080147861198e-03,6.969739494436376359e-03,-5.710601830893229265e-03,-1.019614425472794780e-02,1.198828475051421857e-02,-6.308694302229543582e-03,-1.914098137144569283e-03,-7.517048858457994059e-03,4.517200755211757575e-03,-1.626726177722496438e-03,2.110673445795176358e-03,3.334869535749490632e-04,-1.009427050074816062e-03,2.128365069447794421e-03,1.891768154668637975e-03,-3.008359562542476523e-03,-2.091289074530753233e-04,-4.029771676214208135e-04,-7.613574304900079482e-04,8.353634810464972233e-04,4.100036951419671824e-03,2.234068150650787496e-03,1.236996480386869796e-02,-2.421860260081884052e-03,1.029811982976987401e-02,3.303350346107222345e-03,7.599975643859367431e-04,-4.794835209046776313e-03,-1.729455791190450797e-03,-2.297480023052970630e-04,2.922714746542628365e-03,-1.156502471060146626e-03,-3.895463416878765500e-03,-2.808707761069611977e-03,2.300075403073271758e-03,-4.851716990629848826e-03,-1.561490805375307040e-03,2.385411652715456379e-03,-5.528178022382853295e-03,-8.373218114636647377e-03,8.414953234592355714e-04,-2.655734272027220500e-03,-5.436864436301337526e-03,7.461702114467452622e-03,-4.353582194706781885e-03,1.265977140862277949e-03,4.700887927668966765e-03,-3.337731574846926601e-03,-3.514431802007115819e-03,1.685573834295429289e-03,2.899559768497098761e-03,-7.248014461445081470e-03,3.989718574195334246e-03,-7.452377727197380050e-03,-2.987946493075987163e-03,-2.480329960968286583e-03,5.435272632430581535e-03,-1.406334907553959688e-03,-4.957752080452386455e-04,2.199765576037881684e-03,-2.166885262542439950e-03,-1.075505091512743081e-02,8.859552082872510642e-04,-4.426565122248952641e-03,-6.589316812372934991e-03,-2.571467013327027006e-03,-9.056591360209654759e-03,-1.068720474849899635e-03,7.845486813134665109e-04,5.991657671935178564e-03,-4.056010558707273672e-03,3.024583530803027359e-03,-3.794154174749476711e-03,-4.967793358524641260e-03,-9.700794993290940164e-04,1.514051012007162174e-03,6.841604385680412348e-03,5.918662672702649542e-03,7.089747215237302133e-05,5.408701348397195775e-03,1.666470190233557992e-04,-6.854816997747794662e-03,-1.125154685705600892e-02,-3.092677224614915916e-03,-4.861711751576610306e-03,4.333099923410515565e-03,-1.067835114718361164e-03,-8.567339331526726848e-05,9.368339857147923652e-03,1.260152048119134822e-03,4.941618471270852256e-03,-6.713715598934191613e-03,-4.322395530127034861e-03,5.315904919574402504e-03,-5.643211777773956758e-03,1.764536828936578289e-02,5.342484508139512447e-04,8.022530000998477281e-03,2.918555030158934572e-03,1.470013015089759699e-02,-3.323451295098553719e-04,2.014063343001341704e-03,8.034394810812842472e-03,2.058731189729128951e-03,-6.554303266853279096e-03,-1.633066919682304779e-03,2.421680510652964198e-03,-5.665391295020322064e-03,-1.967419811132585777e-03,-4.901307440535246883e-03,9.885395350591347566e-03,-7.640297521598117016e-03,6.962996552506721740e-03,1.872279728096970649e-03,1.399369379254126284e-03,-2.536873477599144080e-03,-6.794069605929578017e-03,-4.624546254707652751e-03,-6.575059504660278262e-03,1.820362421396630763e-03,9.346026261590950407e-04,1.488350520012710456e-03,-3.407572689963083833e-03,-4.165607902317200115e-03,2.503954076221682687e-04,2.077550129256615016e-03,2.281487619836381994e-04,-3.058319637559850281e-03,7.400296060553360721e-03,3.750888606457010443e-03,5.499862301461140015e-04,-4.118028617435353066e-05,-3.651202312094222087e-03,-2.415931506429477415e-03,5.210489598182235088e-03,6.496592413579054502e-03,-5.646914271999868215e-03,-5.454156861400807389e-03,1.162970874452082595e-02,-4.954168808442570814e-03,3.936028862100330629e-03,-1.900420366870593254e-03,-5.838739689418030385e-03,4.313442251483466661e-03,-1.742302981318170213e-03,-1.852879681990377350e-03,-4.659395508574882064e-03,3.153586971068720983e-03,4.733476522720493976e-03,-4.555072712022504612e-03,6.322146496044490048e-03,-5.851684985678757590e-03,1.361688270458276048e-02,4.899242947312280536e-03,9.295810232635039541e-03,-6.682433469313824449e-05,-1.304684706309161514e-03,-1.300533006587280835e-03,4.881031650453480334e-04,-5.558547896451691145e-03,7.607276978671207498e-04,6.086081135567737721e-03,-8.775050693221627513e-05,8.094454056093911259e-04,2.518921524156911805e-03,-4.090590764450756020e-03,-3.546978508933210789e-03,-9.739524721475942629e-03,-6.191453904775797226e-04,5.373285243413139685e-03,-2.117764386104230030e-03,1.783448076551839281e-03,-4.860772069891558652e-04,-9.486948712737548056e-03,-3.740074450112991934e-03,6.942342620994198556e-03,-1.193919881811592552e-02,1.252424180532038869e-02,7.377327309321889647e-03,1.647441817019817175e-03,-5.947180421716033449e-03,-1.067253301135958829e-02,2.594680190131731356e-03,2.519017209391692051e-03,-4.726066677336280067e-03,-1.261594693905561480e-03,-5.057828823122781196e-03,-1.119865226444018147e-02,-5.437704376530725671e-03,1.126340239165250778e-02,1.756456312664522301e-03,2.263917980806080723e-03,-6.499232948584251698e-03,-1.319016482378370433e-03,-7.629142423911612625e-03,2.656827104994193521e-03,2.373714413131193610e-03,-3.395709163557306381e-03,-9.111906747201303617e-05,-1.451625246242963527e-03,2.873721932321016050e-03,-4.987005461363785801e-03,-3.161991785395097546e-04,3.121225476667194906e-03,-2.014198287808546123e-03,-6.735878873924185670e-04,-5.248886925229631975e-03,2.315226151300095372e-03,-3.467336604654993291e-03,3.449388841602401765e-03,-1.820581866661462583e-03,1.964976881312705762e-03,-2.431068387015405605e-03,-2.125967281177110115e-03,-7.543621082305845002e-04,-1.099259925051080874e-02,-2.132356712002961546e-03,-1.955553291510227481e-03,-7.633077419352718614e-03,6.254628298708229082e-03,-6.716115906251406316e-03,-7.152485207517588275e-03,-1.921844876901035752e-03,3.764667621091801223e-04,-3.450914936075335430e-04,-1.662325302034401917e-03,6.826888337856116054e-04,-7.543319255073506306e-03,6.222153587341622619e-03,-5.229582388326647728e-03,3.631399620702430858e-03,3.416045192923336093e-03,1.054642194372363857e-03,7.333363124603883496e-03,1.385313866304086405e-03,2.026695758582860307e-05,4.195222118825645691e-03,-6.919187157228182767e-04,3.809317815974297677e-03,4.680757099546597327e-03,-7.164898595078345381e-03,2.629197026262051779e-03,-2.397110193382020831e-04,3.373970048265016340e-04,-1.439426777124317098e-03,-4.489749805342562079e-04,3.668866106236740986e-04,3.921019733380384613e-03,-3.907615194001833395e-04,2.093709297978535964e-03,2.902990338408730116e-03,2.218192738738395883e-03,1.639964555285713726e-03,-2.298291982959207568e-03,6.904432494354450882e-03,-1.264021576860401615e-03,-1.198413138015850220e-02,1.077521657740208340e-03,-3.660673020430386688e-03,-2.828545107605143129e-03,7.134112264313374752e-03,2.570346655593182001e-03,6.766941211710552143e-04,-1.387738106380090840e-03,-9.192217306469104357e-03 4.409901403180009269e-03,-5.856970507903049017e-03,5.412796191539001123e-04,-7.311451097315711106e-03,4.189916751373454033e-03,5.523250021105684422e-03,1.646333358631811789e-03,-1.005703585530569712e-03,-6.743448749124020605e-03,1.916689381413263019e-03,-2.418549081982996219e-03,4.292897783305798208e-03,-4.987745922650745761e-03,8.025949123293200623e-04,-3.932478198759812096e-03,4.169956012314790733e-04,4.464984063398911088e-03,-2.019329719580557295e-03,5.199923419600659298e-03,-6.909878595867278636e-03,1.352222490788910179e-02,3.568095261701704547e-04,6.154041095056116475e-03,2.794297385103605420e-04,2.385131141179058106e-04,-1.725241884709888870e-03,-1.250783571374599864e-03,-3.424592227214168227e-03,-2.302611854987006088e-03,3.123676762007731797e-03,1.598507370642978428e-04,-6.421956330439086182e-03,6.859904867502361614e-03,2.526379040976040741e-03,1.116019727724102328e-03,4.448546901435158560e-03,3.048349717796456932e-03,-1.341996399641924185e-04,4.033034911277923358e-03,-2.559275297590554808e-03,-8.095162304560388341e-04,-2.378932175316473413e-03,-5.024743823521361265e-03,1.007311226970772572e-03,-2.206183141173729406e-03,-4.477025343932474442e-03,1.615247662347524911e-03,5.215962280640211793e-03,-1.146407986935055277e-03,-6.154932186149521256e-03,1.221492989892110467e-04,1.530878840319854772e-03,-7.538153733245594144e-03,7.977712203236980715e-03,2.462795873670155580e-03,-3.483156545752041271e-04,-1.116064257361455703e-02,-4.515184897811365394e-03,-7.324084927894657614e-03,-2.723418626178096373e-03,-5.118780741775950417e-03,3.560040075764456333e-03,1.121452254822977689e-02,5.027044373186676917e-04,6.823000620854471204e-03,-3.098868196873295292e-03,6.955315326487792761e-06,6.430018242460502215e-03,-3.091486550883112625e-03,-5.622119725930916365e-05,-3.303904210908332768e-03,-2.808709084006347288e-04,4.164962533114143460e-03,-1.424356339834041653e-03,-5.337649215943802636e-03,9.292213202855317518e-04,-1.327844196215823220e-03,4.713091895372892130e-03,-2.587818661807503740e-03,-5.223272009841167102e-03,2.735283929072024854e-03,7.192112279639797934e-03,4.183360942017920383e-04,-4.395935167899596256e-03,-1.545651877502442065e-04,-2.300841220776236786e-03,-7.196405879359324409e-03,-1.086764272597730459e-02,8.676534593292170236e-03,-5.247834290301571676e-03,1.138444490336170496e-03,-3.422484789558251621e-03,3.574969747132453884e-03,6.402735321426730394e-03,1.599057374768107038e-03,-1.524911390134721075e-03,5.103758873109480454e-03,-7.774833745262366667e-03,-1.511800654554824831e-03,4.849411572319824865e-03,-9.580101098605081937e-04,1.422055799594793783e-03,4.525081144586339671e-03,-2.199874145065812327e-03,-1.078020326873495652e-02,4.457528433266243727e-03,5.252543319248859262e-03,-5.612366599355721880e-03,-1.232926901884839237e-03,4.275013513456357543e-03,5.797823570738428432e-03,7.375656791904211659e-04,-2.936659876039363331e-03,1.174865919415702190e-03,-5.257139477811015663e-04,-4.542871744265549014e-04,-4.444997860353863683e-03,-6.908592674553870952e-03,2.305535936533084206e-03,4.819399081087514559e-03,-4.764388626825967477e-03,-6.043512347522530734e-03,1.547233307238355001e-04,-1.691894138354247974e-03,-4.637062862328424767e-04,-9.167664830179370191e-04,-5.186431050879373224e-04,8.394355337823898458e-04,1.132865724472381435e-03,-1.029920244386416626e-02,3.758945956533659219e-03,-3.006736161329106195e-03,-3.815529242539736788e-03,-5.109963331531532614e-03,-5.664787453644155348e-03,5.021279402695218337e-03,3.757755804413367181e-05,1.025988262154727020e-04,-8.230845026605635318e-04,8.262888436813498050e-03,-7.606877244390778184e-03,3.828918892066744423e-03,6.675520596822673422e-04,-1.188539007314609814e-03,6.888346171468493970e-03,-1.220710540213256574e-03,5.839591140041832610e-04,2.358829840256286212e-03,-4.787994899665203724e-03,-4.458703646119541392e-04,6.151953157713515793e-03,-2.036768692120543602e-03,7.836043476558198504e-04,3.422605135292923787e-04,-7.137352919955476833e-03,-3.874946636733076627e-03,-6.042546256620926967e-03,2.680737896010494409e-03,-5.025804001645547883e-04,5.643214519175062879e-03,-3.952956620371527244e-03,-3.267563634765481301e-03,-1.169635644507336161e-02,-5.925462290650877761e-03,5.880076319440784294e-03,6.522452453571996565e-03,-4.150591294601197736e-04,-2.098658335837247542e-03,-9.170610242928417880e-03,3.136235885920626160e-03,7.270270468839682906e-03,5.729426442534182179e-03,-3.621197808820837607e-03,6.666709323290332702e-03,3.394056354839643241e-03,-1.008130606225440644e-02,-3.199458747254643222e-03,-4.138013324223898011e-03,5.954548816693913497e-03,3.387738722254446302e-03,1.729857277467073415e-03,1.210350782570535945e-02,-1.731195504955952529e-03,3.121584056621515800e-03,-3.153772249771051930e-03,-7.847186715978450924e-03,8.162378632326464274e-04,2.262490908716775415e-04,-1.079803105062702084e-03,-1.024311801570781727e-03,-5.032882988747690300e-03,-5.927411014423651127e-03,6.323855921112396181e-03,1.542898191178596230e-02,-4.908793297257661031e-03,1.646031004658500638e-03,-2.439298273808120608e-03,6.154040653452845164e-03,-2.342421984147420155e-03,-3.155618975165318512e-03,-2.578012433739291197e-03,3.153395169813558423e-03,2.763469102240540421e-03,3.519868265527270525e-03,-7.132710961131887191e-03,4.599919789042610820e-03,-1.290249968273687008e-02,-2.106097736261046838e-03,5.458757089029616857e-03,-7.608401290970754315e-03,1.151496242669291462e-03,3.987419749229803058e-03,5.236271798295459926e-03,6.358131574383404927e-03,5.287971754648290723e-03,-1.357411787526758857e-03,1.654310071945569276e-03,4.426710046971688191e-03,-6.956012114405034624e-04,-4.163976083976663085e-03,-1.676175483897939531e-03,1.815603421878982755e-03,-3.435547756220723795e-03,-6.759049908355411335e-03,-2.570911221205467515e-03,-4.888646015427761463e-03,8.598575752084097051e-03,2.004988908582648870e-03,7.285502142276426875e-03,-5.907113740971874213e-04,4.778599888078419525e-03,-1.996236709841965780e-03,1.028800313846564909e-03,-5.459924745229220028e-03,8.944632067792290114e-04,-5.289214314311091458e-03,3.110970785936860092e-03,5.433841634852443203e-03,7.695140089348292273e-04,5.823487236431946525e-03,-1.553915574862032439e-03,2.861590215898651181e-03,2.083191325146943954e-03,2.341990168143465254e-03,1.056447345269401726e-02,-5.056053687951046413e-03,1.881007492589213204e-03,-6.143921325715106900e-03,-2.705699342830965651e-03,3.407400265479875161e-03,2.329064839263767905e-03,-9.819130974141106888e-03,-9.266456832554890652e-03,-6.536375965565324395e-03,-6.375230062798361956e-03,1.895681279978868403e-03,3.180407677475602640e-03,-3.038639419970586182e-03,-3.516338037028748920e-03,-1.692886361515632317e-03,-2.592275288151498110e-03,4.715211645189731036e-03,1.520277151401661607e-03,-2.499758157775812308e-03,-1.770246465545002130e-03,1.257714831453612828e-04,-1.200581347828838909e-03,-3.086872752979315671e-03,2.800097480601759867e-03,1.374946754194706841e-03,-5.174002276677811332e-03,-2.951281178621418481e-03,-9.021130055260468544e-03,-1.009825783159349656e-02,5.518450633788664177e-04,-3.255207199102894307e-03,-5.483782758651892589e-03,-3.445683171344541152e-03,4.744613956387403256e-03,-3.148092423070663042e-03,4.615488133423818924e-03,-4.881350265965525440e-04,-2.636806015974340126e-03,1.250539312904490460e-03,-6.943176641864571598e-03,-2.318438042201320269e-04,-2.861244973355118060e-03,-4.636940267457305420e-03,-4.252006560664419420e-03,3.827392682700318793e-03,-4.095026238530299553e-03,1.461602549410463692e-02,5.764463838021343343e-03,1.406449110972602355e-03,-5.312718960861513746e-03,-2.698055450139142091e-03,-6.206005934764106946e-03,4.302740655614808313e-03,-5.349110716428612418e-03,5.419205012389777427e-03,-6.738164011565014183e-03,9.939335972721254769e-03,-2.466836250765181652e-03,7.899917710286190331e-03,5.863352245623629569e-03,1.902148460322826721e-03,-3.299840864031069413e-03,1.035243978090724876e-03,1.705538826527806199e-03,-2.229911108385686496e-03,-3.030544213932808567e-03,-4.077035891577105559e-04,5.185663096915253678e-03,7.409598334096103309e-04,-5.093843161608943050e-03,-8.412450913117297839e-04,-3.639414470348357154e-03,6.339200792647360638e-03,4.579731664293524782e-03,1.430505966672389589e-03,-2.680864096821362810e-03,4.497210805210823982e-03,4.658427318442256646e-03,-5.475406844671361004e-03,-3.146960575905056144e-03,1.609504511298580614e-03,3.763230448407004457e-03,-3.918297667928521792e-03,1.423715999220255676e-03,9.317968003759054957e-03,-5.733374378806240131e-03,-1.017050953149308424e-03,2.510068035504738754e-03,6.620576447302523558e-03,3.231478781520685708e-03,1.635292462650151475e-04,-9.988296528074204830e-03,-4.206573573313056660e-03,2.803714756686866497e-03,-5.065544074881921793e-03,3.110703930438793539e-03,6.038463862339331877e-04,-7.240858106463877834e-03,-1.243579191966284097e-03,-7.143148287589098575e-03,3.593588860516405952e-03,1.101607106630019678e-02,-6.273126589090512878e-03,6.477767863356962905e-04,-5.311632331483893445e-03,5.030464016675927248e-03,8.124549368930532615e-04,2.134730733562281279e-03,-8.669724458945411369e-03,5.141946579943617104e-03,4.860967011789995955e-03,9.845508220141286487e-03,-8.276662965476501649e-04,1.498647780861570586e-03,7.860190233619521270e-04,-1.419550047829747216e-03,3.486825651766189450e-03,5.449238626946184333e-04,-2.737738977190316489e-03,2.180846205065113935e-03,-7.543450589716518828e-03,-4.905749356557740845e-03,-3.309339594775442340e-04,-7.543259244765274964e-03,2.489654912811165195e-03,1.247412598546944690e-03,-2.877310680940816864e-03,-1.091124001637381376e-03,-1.463297224595514617e-03,-1.808379765618511501e-04,1.129473657225756001e-03,2.128107129096203521e-03,4.157917704395706383e-03,2.237914431644156928e-05,4.233160425186410086e-03,3.031124746390671983e-03,7.467047294888753918e-04,3.592111274886386051e-03,-3.071575452550346589e-03,2.002270509583749334e-03,1.051675948156780916e-03,6.746514017078601884e-04,1.693662906663141125e-03,2.328499684112062939e-03,5.184352824923789578e-03,-5.642005705007421866e-04,4.754495672967887119e-06,5.508980806775255019e-04,1.858986400386248867e-03,-1.451434537103630596e-03,-5.527221398388604788e-03,8.379311189698190063e-04,-4.667495356644042227e-03,-1.434661494624678074e-03,1.005042078818463519e-03 2.677131613449803670e-03,-6.151211629028889051e-03,-2.983091088875263700e-03,2.842356267661363652e-04,-4.146752901142272750e-04,6.792252867282500516e-03,4.716258562207273006e-03,4.819023362960888311e-03,-7.822505951519450468e-03,6.353255652827588355e-03,4.914810425223044635e-03,-3.526192516253784555e-03,-3.645868450609590899e-03,3.964084439201987810e-03,-1.532025632723059069e-03,5.789318726350672212e-03,2.034597599212748832e-03,1.769105093106767288e-03,2.613940518962900791e-03,7.087141361106068585e-03,4.723624620752570925e-03,7.019557803605475620e-03,7.269313101123823194e-03,-1.297226627358447374e-03,1.810698106355101500e-03,2.085541415756134165e-03,3.122795298595379583e-03,2.517612287537543490e-03,-3.711695272923016750e-03,1.241217511553511762e-03,5.015662747408670486e-04,4.771646016055926601e-03,-1.251682673355217559e-03,4.384230351932496411e-03,9.990723044448271417e-03,-4.804828105612572210e-03,3.582437364061418113e-03,6.874146159063613225e-03,-2.481988855560811509e-03,8.738501174720574027e-03,9.607990559807344857e-03,-4.369461357125611033e-03,-1.248206206805426268e-03,-5.261710649280014473e-03,2.817475353865514422e-04,2.021230244534214220e-03,-1.132961259085107572e-04,-8.048408107330759682e-04,4.219685311144683731e-03,9.525166631238578393e-04,-6.645999689655132138e-03,7.787239723589234157e-03,2.818763076910725681e-04,-7.961515134001992400e-04,7.643274740967639924e-03,1.770243372821783982e-03,4.722507751896014121e-03,-4.467302821565218018e-03,2.907393133449473997e-03,-2.573338945781683106e-03,3.205228373915756067e-03,-5.499086487146469899e-04,-2.946619894419774937e-03,-3.576144842285750546e-03,1.297345751320771101e-03,9.123190466875501789e-04,-1.623919235963497203e-04,-6.573544724347916675e-04,7.341459142969443408e-03,2.210807117303120305e-03,-1.830106928492886273e-03,-2.198401120103753766e-03,3.426604195850306220e-03,-1.232975888298585773e-02,-6.352340676416381830e-03,-4.358908164439973670e-03,-3.527142471972900470e-03,-5.377918203144919050e-03,1.070753397925358966e-02,5.716916529394646014e-03,-3.018924519251251458e-04,-4.743486327026716354e-03,1.085941161710267562e-03,2.694111661256084946e-03,-1.180809522496002045e-02,-1.081086967329817836e-02,6.569408103536948651e-03,5.288838336026203740e-04,-4.093962473039229675e-04,8.185049251213658613e-04,6.256287634093848742e-03,1.790452913472458151e-03,9.900326155249640438e-05,1.775258446966941368e-03,3.578102449013224710e-03,1.198525666698729841e-02,3.673417801594417473e-04,-9.551847937932475855e-05,-4.400111069540363871e-03,6.157604147204675214e-03,-1.553794470055521685e-03,8.099496966034553866e-03,-4.595355982428878452e-03,1.289479424195686449e-03,-5.171566596191729502e-03,5.259446343098161947e-03,8.359004374842077039e-05,1.360996664363178787e-03,-1.564569526352189511e-03,1.566906360690920128e-03,2.164449269813768697e-03,1.623855888510125287e-03,-8.204674079437174194e-03,-7.253492845355489008e-03,-7.001878523375925506e-03,5.716922138221177709e-03,-1.141006042866828801e-02,6.472594384829341921e-03,3.480641428084005774e-03,1.921386026379529608e-03,3.908439384729939410e-03,3.255841110376040976e-03,1.646719473102293066e-03,-1.078971283093917632e-03,8.639254362198169751e-03,4.670202232070346806e-04,3.748487177717154794e-03,1.013638700353228618e-03,1.719990417968806989e-04,-7.167478985116183636e-03,4.050923094356074713e-03,4.514422809526697986e-03,5.367816971973951058e-03,3.884994324405181766e-04,-7.444702475601046483e-03,6.880113308609598147e-03,2.204207404899306486e-03,-8.046339338940701686e-04,-2.081224381596793668e-03,1.812766024882219709e-03,-4.929499576719766074e-03,1.559203439476210956e-03,-1.849711878921586963e-03,-1.792297187209062692e-03,2.050769013987228506e-03,7.789774166869026803e-04,9.045277229915101719e-03,-3.544086923124141909e-03,-1.382278566879530102e-03,-1.638435190411981256e-03,6.238494299040806684e-03,-8.952513253822982648e-04,5.427859795340227861e-03,1.014445524462726476e-02,-4.637630596676585969e-03,5.583873520464343113e-03,-2.209521870233693581e-03,4.376716192487309191e-03,-7.198049405659147533e-04,3.685234186233564933e-03,1.127753121500912892e-02,2.005024703317585377e-03,-5.012581443453662140e-03,6.316173517879880919e-03,2.582180860184187226e-03,-3.560075921231770340e-03,2.582554863681105293e-03,2.323912566936319098e-03,8.717892705039819584e-03,-4.871077546242440735e-03,-1.042631241185354440e-02,-3.037702214004288337e-04,-6.972014365124355945e-05,-3.635105487563243271e-03,-8.741911834817824023e-03,-2.611976799676986967e-04,2.040777329486665077e-03,-2.291241181768406601e-03,-3.620309518620765631e-03,3.447690204299574870e-03,4.070640787296227062e-04,-1.404469461072069617e-03,9.877524591312537502e-03,3.017731394603573834e-03,-6.999975225247213537e-04,-4.383880372229548121e-03,5.609236122341564046e-03,-1.456745490563259374e-02,-6.743556765243301394e-03,-5.231903732547553421e-03,-3.377167026949165810e-04,-4.292617688059306887e-03,-1.621235353219054433e-03,8.303527045864636524e-03,4.409975740138581506e-03,6.349217043575747926e-03,9.823464407500995255e-03,1.768724197954966168e-03,1.079506778046331072e-04,-3.201615319360499087e-03,5.645841550025634380e-04,-5.979780062857507038e-03,-1.043900251134950025e-02,1.062681396775742686e-03,-3.823176614830539524e-03,7.384838289898235097e-03,-5.377177741271711893e-03,2.855044824474053315e-04,-2.469595624936481680e-03,-2.305830164046914540e-03,-5.606079764032736307e-03,3.369910532832262086e-03,4.671085014301494184e-03,5.872505690505199979e-03,-1.637626318207061277e-03,3.315142488647916056e-03,-3.277272640078231214e-03,7.066134990492023427e-03,-1.010248575089597520e-02,-1.931595862768577211e-03,-3.056951334778345696e-03,4.034660257099928829e-03,3.026707202928641781e-03,4.734657301586737651e-04,6.409355514819635967e-03,-4.382501783226419936e-03,-4.110307704257927150e-04,1.067970638440176250e-04,-6.490479004657351281e-03,-1.317357831020069285e-03,-3.772025488982104404e-03,-1.174551888236530055e-02,-6.720502389410536528e-03,7.329916097043441547e-03,-1.505386803052229366e-03,5.699857710957711770e-04,1.485780335135291773e-04,2.313247324707857870e-03,-1.868145595648869339e-02,3.780241117613991619e-03,1.151326391126104420e-03,5.608098863123471946e-04,-3.191535934808566177e-03,-5.662262383400386463e-04,5.716500607059633692e-03,1.942292372588619006e-03,2.137143139942167432e-03,5.842988030877649744e-03,-1.077384102603440481e-02,-7.945472131986057798e-03,-4.486823980687339522e-03,-1.958650077359573322e-03,3.367622579420781123e-03,5.411683704838837067e-03,-7.025792268359974321e-03,-1.746124519098766516e-03,-3.557249072027953221e-03,-2.973999927557815018e-03,-3.573995194758594184e-03,-2.343070635861444926e-04,-3.542845961055737358e-03,-2.496205683166042438e-03,4.570686148384746350e-03,5.084248722414614653e-03,-7.304811489799027321e-03,3.812175174222361042e-03,-9.174206476948131017e-04,-7.217710545571940500e-03,-5.021935117525663621e-03,1.071645973437602983e-02,-2.087574579211467010e-03,-8.825054535999084021e-05,9.444473119118342030e-03,1.575411343916417132e-03,-4.499912379912941393e-03,3.704641050014692528e-04,-3.560020777614962550e-03,-7.546426658916479714e-03,1.060936920753917509e-03,1.968477208616052545e-03,-3.654800147107524662e-03,-1.375916245054580392e-03,-6.972413107803109543e-03,-7.235675773806273837e-03,7.629273746225857800e-03,3.782938316122920822e-03,-4.591299201064171463e-03,-3.420729502545283990e-03,-3.203014997523272438e-04,5.515711870038205941e-03,-6.939557943416746477e-03,-3.534693057584523297e-03,1.981171439757871931e-03,-4.067713342323428863e-03,-2.083386603799498578e-04,2.488158923308920936e-03,-2.472062276890278631e-03,1.611447699413637042e-03,-2.127887747796028840e-03,1.732015484949695578e-03,1.624117752991317961e-03,1.602850120664041107e-03,3.839961354755750626e-03,2.827276505982731069e-03,-9.598990057677329879e-03,-1.963608504741858919e-03,-4.088793749837477337e-04,1.022555630960876073e-02,-6.062140644376012985e-04,3.783385881192186714e-03,2.284252765667429703e-03,-3.788083075438937464e-03,-2.745528275467636233e-03,4.684419427464996122e-03,1.867914799310460301e-03,-3.061666040492828922e-03,2.352142254741756977e-03,4.603231304538455380e-03,-4.135320601261852102e-04,-3.074645506471190490e-03,3.530833498670425414e-03,-1.167555913363725172e-03,5.353379342597934304e-04,-3.178439246472410974e-03,-1.655663270518480806e-03,8.192072983167208999e-03,6.059808179763941441e-03,1.675480032219568401e-03,-9.016085808316540509e-03,-5.363061300082192298e-03,-3.454254640951082166e-04,1.192269159356911488e-03,-3.037284316701609720e-03,-7.631338728430529100e-03,3.411528846977084865e-03,-7.547941336054610206e-03,-8.970407407597656918e-04,2.775444560484242092e-03,-5.545034160786410779e-04,2.078431412160028271e-03,-1.436929687794826068e-03,2.970063207205728161e-04,-2.806027931462176054e-03,-8.937180853257407735e-04,-3.182125115540540084e-04,-9.992671964662792200e-03,5.461139415134206795e-03,-5.142969704408389964e-03,8.361991825411226389e-03,-1.286861434269491282e-03,-2.055355960624683995e-03,-7.494617530861493078e-03,-6.518071137122988593e-03,1.255240243511616775e-02,9.420876439560146864e-03,2.955710336097492567e-03,-2.177502625035557596e-03,3.481662681432415220e-03,6.282943606857040349e-03,-9.961651948466560685e-03,-3.034964692781157332e-03,-4.023355904909204153e-03,2.755260623761793780e-03,-4.279845833665160779e-04,1.166509134851688445e-02,-4.811155239694064195e-03,3.020369122607920740e-04,-3.556691573853737378e-03,7.912990268426374832e-03,-4.053834537787625279e-03,-2.960864937554199758e-03,4.952317329017869507e-04,-4.954205269167868270e-03,6.705680306246520651e-03,6.672273105536993473e-03,5.678223313702066097e-04,6.351315476370910339e-03,1.752074614979843691e-03,7.201874699991304973e-04,3.904428868929871350e-03,2.643247603481147911e-03,-1.627014819555975362e-03,-3.736215792045707862e-03,3.603340089626884922e-03,-2.160922257994853362e-03,3.763257108586233208e-03,-7.481542580755441881e-03,-2.874351014829444346e-03,-3.242844158412954564e-03,7.131429256523348897e-04,-7.026659037990506777e-04,-1.300185470810804461e-02,-3.573384222217918741e-03,-2.009076329461498278e-03,-1.466756384110802134e-04,1.700958360287566301e-03,7.931543326373591881e-04,7.407190008028133842e-03,5.434160852205625024e-03,-2.695348758955358383e-03 -4.998483423519954963e-03,-1.082110412856200242e-03,-5.636399289737713909e-04,-8.358414068820899058e-04,-3.951139393587151799e-03,-5.251060822039235396e-03,5.572424090694741081e-03,-3.893557677544806899e-03,1.545686478227550984e-03,4.097371507390333567e-03,-2.902428950837133721e-03,1.035219229676868436e-03,6.366197093173228336e-03,6.379402418754569377e-03,-3.389652149463345723e-03,-4.451303525829706370e-03,-3.740561425857499324e-03,1.445442854288283088e-04,-3.954435400443290574e-03,-5.358727945805695676e-03,-5.041433499712485479e-03,-1.547398374850247589e-03,-4.217143560506277263e-03,-4.200116623418130753e-03,4.488036021751786071e-03,-1.156467545032940220e-02,4.231383062996981782e-03,-3.686188118435229904e-03,3.358851043166006297e-03,-2.592092823143918746e-03,-5.970651799081998050e-04,-4.616109317137299245e-03,-2.289890112957687195e-03,7.884804478874438615e-04,-9.300415147116278605e-03,7.777922843401242556e-03,1.691761268092881892e-04,3.481696150347331209e-03,-2.101364394671670008e-03,9.502743893951241935e-04,2.613040756533683423e-03,2.181405086080226094e-04,4.503534926916102042e-03,-6.137081840321188461e-03,1.201254279515126706e-02,8.278423322439682625e-03,3.769848282603807232e-03,-2.160579494608788533e-03,-2.142924822382970360e-03,5.555204755612910134e-03,3.673328729489099345e-03,6.444898999857356101e-04,-1.506215953538569919e-03,-2.556037427069805237e-04,4.258171573918625996e-03,8.341572270814589726e-03,4.293573535725292434e-03,2.146177090815817520e-03,-6.930022597168690457e-03,-1.042805568985641102e-03,-6.886429769583041725e-03,-1.613152421290173612e-03,-4.679537527403933132e-03,1.721079008885629516e-04,-8.922429387584888613e-03,2.899984615533985743e-03,-4.026753438166205679e-03,4.149268347800168233e-03,-3.500020831762479159e-03,-1.860205452669300673e-03,-5.813483093092248685e-03,7.492953665916632713e-03,-3.132163495761474716e-03,-1.690442880270273890e-03,5.473764047795225129e-03,9.709652866393444007e-03,7.298433604783381365e-03,2.826289279877721317e-03,4.742363659601168263e-03,-3.783457868519254398e-03,-1.968967859986581366e-03,4.205650024655916211e-03,-7.424477734991979244e-03,1.147398915983933836e-02,-1.824160363026065389e-03,2.337027275052939394e-03,5.777981125024487900e-04,-5.555474951102184339e-03,8.330450963887786955e-03,-3.431222287727775898e-04,-5.808479527301714518e-03,-1.366092794581581045e-03,-1.291280547341029390e-03,-1.132971983608415181e-02,-3.211248060151991775e-03,3.638995139947107191e-03,1.874441111825537456e-03,7.296086934362987393e-03,-2.879979753370853941e-03,-5.244678440033497500e-03,2.380203054628970191e-03,5.390596412125510904e-03,7.895075730963097005e-03,1.765004564215607533e-03,5.631489427904011642e-03,-1.236065008082013784e-02,-7.218504416282547215e-03,-1.192900472742952286e-02,8.129901709885617445e-03,7.322920637238081870e-03,-2.430077334373721091e-03,1.515268162716293518e-03,-1.229255075330552838e-02,6.159100199678370011e-03,-2.274288849542166393e-03,7.270749528620496296e-03,8.694100231377434969e-04,6.483702278207848294e-03,-6.322849429343701394e-03,7.572174746459495219e-04,-7.000201371749545376e-04,-3.684367985992115235e-03,-6.961789392054175060e-03,2.040707713066395538e-03,7.292126396156928779e-03,-3.343676302800780396e-03,-6.744559703033097681e-03,-1.956735588608266539e-03,-1.856850802850523860e-03,3.266536603045534955e-03,5.126310752260435609e-03,2.775024293710542499e-03,-1.130860048373216048e-03,4.595639664683920204e-03,4.197654465853300695e-03,7.526292131118315605e-04,-2.986922073306726502e-03,-4.441440389026264819e-03,1.201738903586678855e-02,-2.780988315611620035e-03,1.373647037189698562e-03,-8.986255287980471351e-03,2.472785941252579630e-05,8.682733169061599560e-03,4.116689614318970458e-03,1.889237889894549750e-03,4.452402403956915930e-03,9.988729311613465944e-03,3.405404488131606646e-03,3.818138623303026814e-03,-1.131420098930069167e-02,-6.477503083016595891e-04,1.735130100785707129e-03,-3.844766595545734151e-03,-8.879139449908970624e-03,-2.805374275295229557e-03,3.093343104620810413e-03,-3.229365706536695569e-03,-2.952459567763299209e-03,-4.562511814911240048e-03,-2.769103654672059808e-03,-1.382564265048541246e-04,-6.092590120348969257e-05,-3.706062162754543334e-03,5.399412652479323310e-03,3.859841840482677953e-03,1.961385123888077591e-04,1.182226879765119676e-03,-1.002456563012711849e-03,5.491335619117989264e-04,-7.187173909193020392e-04,7.886094244435457729e-03,-1.072843396842835043e-03,6.753372286806236203e-04,-6.525639569928376156e-04,3.304421962932010590e-04,1.586768616761683728e-03,-2.453364597898230904e-03,9.254825560995967940e-03,-7.924264959312611644e-04,-7.568597093916606955e-03,-3.925027666993281990e-03,-9.175773724715938109e-05,-3.113593002471915470e-03,6.143885456480892120e-03,-9.436336764355852946e-03,-6.297872460387809838e-03,-9.074263024611933098e-03,-4.438742816759404762e-03,7.089664046425325902e-04,6.290389288239326901e-03,-1.780295868132083245e-03,8.030651116142758367e-03,1.828421849353642319e-03,4.627965910579151521e-03,2.639347194172512057e-03,5.684863689145855034e-03,-3.235629227141689095e-04,-6.178326349858802996e-03,1.857279198752245439e-03,1.314091478063737599e-02,1.268707713195063938e-02,-5.561323903145344062e-03,1.215410053190081347e-03,-4.583065098598610929e-03,4.317434510730284655e-03,1.800951512138871096e-03,-6.655940933650822400e-03,3.463008647719816209e-03,-6.867548987216859704e-03,8.345060745110096084e-04,2.913725595685727129e-03,-1.508218551274587149e-03,-4.797711251039501329e-03,2.673098528456864693e-03,-4.348862751809332125e-04,3.138399123577910108e-03,-4.751917640267929745e-04,-1.388568898281743701e-02,-6.162160034259536040e-03,7.651134109927075228e-04,5.371005113564947253e-03,-1.900091139708538811e-03,4.840875543085811257e-03,-1.341859689959490733e-03,4.501876611681396648e-03,-3.316230808570128259e-03,7.225667218442428930e-03,-9.372970174860141557e-03,4.783690012636425057e-03,1.222461832313094347e-02,-1.740247661739202601e-04,-4.199522071179118176e-04,1.485450035495066226e-03,2.374604568157943213e-03,-2.629527086451971443e-03,-2.583631495233920574e-03,-1.104177286915677221e-03,-4.066870690296998589e-03,6.472074266387311918e-03,-8.142591627356867215e-03,1.106043922386695582e-03,-6.281647819845820151e-03,-1.174125738871835065e-03,-1.757991378606039946e-03,-5.647619484471463371e-03,-1.688702620017612163e-03,-1.932607297888273813e-04,-2.104477780365540847e-03,4.983565219260013487e-03,3.987846452563554092e-03,-9.794115853965957783e-03,-5.244283566030085407e-03,-3.006111597478897633e-03,4.048581623825577014e-03,8.648758211194484957e-03,4.011062888476122561e-04,-9.195002390895771994e-04,4.206098013262585603e-03,6.442823497036331663e-03,3.445168546922913397e-03,2.561385500002161535e-04,-6.399245655859259717e-03,1.273040534624283668e-03,-7.552009254488018485e-03,5.941656649911772857e-03,1.644706356095599308e-03,3.935128592552548238e-03,2.961420828469361888e-03,2.803941601900963431e-03,1.495160717092547387e-03,-9.740680391788076849e-04,-5.640644910440888692e-03,1.988551279078939072e-03,-7.325732698964809589e-04,-4.988147824454690349e-03,1.498781585809552154e-03,1.965622348202248487e-03,-4.332127269949771591e-03,-4.940550052063691182e-03,4.502405344692467400e-03,3.484612965553099059e-03,3.381479149116090304e-03,-3.518875466065714969e-03,-4.707050846800332929e-03,7.857670138747186876e-07,-8.215522302488703415e-04,6.847044726407628540e-03,-4.838544544247289775e-03,6.571989455667201323e-03,-5.651120492874690641e-03,-6.976842754331408561e-03,-3.501830914386016604e-03,1.210498781228133702e-02,-4.131692056930596699e-03,3.867056385322810343e-03,9.540422448217588921e-04,1.008584124635040873e-02,-7.865334815142215746e-03,2.182948387719172833e-03,2.833324887173113964e-03,8.780164003455383573e-03,-2.728967771824644672e-03,-3.925688358879251931e-03,3.628779733907059202e-03,2.025874375931257902e-03,-1.286359458972373502e-03,3.400103752947766442e-03,8.837990991902883176e-03,2.750149134148557081e-04,4.791392160655167341e-03,-4.692721215428482737e-03,-1.507154225157061531e-03,8.629649375521748766e-03,-4.926047791803131980e-03,3.051236522624399192e-03,6.496273469800179987e-03,-3.770220106449902295e-04,2.626824530226910381e-03,1.946066112020960852e-03,6.265314199818495210e-03,1.465911034015940752e-03,3.619765840649398665e-03,7.438820242421720352e-03,2.068954479692000550e-03,3.162572023725853092e-03,2.341095938364829918e-03,4.806844749169095579e-04,1.874845921938525723e-03,4.063093633731727945e-03,1.270617353746140814e-03,2.399239348194747725e-04,-2.595686928810966767e-03,4.765889508490033649e-03,-1.035184439995651823e-02,-1.342278413260775778e-03,-4.166988728180732668e-03,-1.237681376790485338e-03,-8.924290997311740332e-05,-6.529539863801617910e-03,-3.832327210677968408e-04,5.544780885220933903e-03,1.510043205853414151e-02,4.524276464051855788e-03,-2.050903898801840868e-03,-1.277732781599240364e-03,-1.026025245224815785e-03,-2.929495016568286515e-03,-2.022570821771237006e-03,6.996806638288280249e-03,3.950040925989693058e-03,-3.645651390291483347e-03,1.418762353452034195e-03,-1.476645078723013956e-03,-7.180983147202064064e-03,3.690232624362568912e-03,-1.712069917701407144e-03,6.761481748290292318e-03,7.390181763967960732e-04,2.673346082013913945e-03,2.569137759352458066e-03,-2.363108687424060432e-03,2.300913445718403075e-03,3.078511779470874486e-03,-2.077407412417608609e-03,-5.432831348055182710e-03,-1.327208560854326166e-03,-4.584241483496388943e-03,-1.152248307571796838e-03,2.620429119488260506e-03,-4.047277095653872142e-03,-4.694914265275457535e-03,-9.047074193755163390e-03,-3.511498702075989409e-03,-4.497636552232007687e-03,-2.146003179867576622e-03,7.670551909823797310e-03,7.321502081732271527e-05,-3.792039818599803915e-03,1.014739420355997845e-02,-4.404295158412278866e-03,-1.461101568697372420e-03,3.408161612350835232e-03,-5.755384225640406207e-03,3.118190409862084660e-03,-9.684822310115745632e-04,-3.528147567194765688e-03,2.107858496745743278e-03,-1.075340311854089577e-03,4.736877721770283746e-03,-2.748611564154536651e-04,-2.933239381903738913e-03,5.355914048651423849e-03,-9.091314522576341220e-03,-3.670056919668087010e-03,1.128350596734181872e-02,2.287081999769636272e-03,6.151536407465502294e-04,-1.252371100151172638e-02,3.314821682487268002e-03 3.883133265135145160e-03,1.959092363330877922e-03,-3.898608724594661637e-03,1.504322510000524228e-03,-4.265954171587062237e-04,2.059294416351691113e-03,-9.508126193515374289e-03,-2.837062913555803059e-03,-5.092178376096454483e-03,5.500021741376014302e-04,-2.084840539249847489e-04,1.016733981935319386e-02,2.890356975929468924e-03,6.584805393060611835e-03,5.013250289659642971e-03,-5.364857187377622969e-04,3.204124646790694388e-03,-6.959602311112465930e-04,-2.851985125727308796e-03,6.740903264845951072e-03,2.089665475090407652e-03,-6.767896182572800219e-04,-8.728189971008061413e-04,4.257468643804929002e-03,1.738993965985784294e-03,-2.355038126292088602e-03,-6.160093959547500433e-04,1.286153286796190261e-03,5.639907160903384793e-03,-1.314718826383151021e-03,1.815089000474720072e-03,-5.714964372153287682e-03,-3.862464004948255696e-03,-9.562752576467388249e-04,9.188693786374626388e-03,-5.128848311242692514e-04,1.237626297237046959e-02,-1.808449520305410361e-04,4.437981304946352588e-03,4.708129126988326238e-03,6.170630800351174956e-03,2.616727666888127735e-03,-1.006529878179854114e-02,6.415352433101827873e-03,5.293782237189125460e-03,-1.186967260370282769e-03,-8.414484399188560532e-03,-3.856255502544494139e-03,-1.015976179559083606e-02,4.931044012959183301e-04,1.484597644092972360e-03,-6.291669593108854082e-04,-2.346065300675005336e-03,-6.811534342607443449e-03,-9.572050312719834753e-03,2.412748256583017135e-03,-8.485301665495640147e-03,5.274695762538643591e-03,1.395354027569536623e-03,3.474202035660170582e-03,2.323191822351626089e-03,-1.885222047947327081e-04,-1.359925120601177592e-03,5.276377299451220394e-03,4.190892941621901860e-03,1.833608603283919374e-03,-7.625489933196604028e-05,-6.961168264117959491e-03,5.962428424495784529e-03,-4.262548681136387428e-03,-6.669494815958972034e-03,2.484856926230482200e-03,-6.148946706021218671e-03,5.267233899116858728e-03,-3.016338087733229888e-03,-4.884511139236470678e-03,5.590348824117376964e-03,-3.306334900500407497e-03,7.054462790112493720e-03,1.301318033232194650e-03,3.560076206571716455e-03,-2.224615768397412204e-03,-1.091163681359445629e-02,1.259821480144797780e-03,-8.110612830888493108e-04,-2.701894113804424041e-03,-2.120695287467878193e-03,-3.298674799395406778e-03,-6.172950356210946121e-03,4.090840633100509835e-04,1.011650272925349427e-03,8.113241695117418689e-04,1.399372225097998343e-02,1.001086068423774969e-02,-1.647392394015692596e-03,-3.460213742447591431e-03,-1.063534611345589263e-03,2.277164434736492389e-03,5.696862844396358844e-03,5.306265175362283835e-03,-5.030507491937812568e-03,3.482738188286491846e-03,-1.827402187964441210e-03,3.128087229509175435e-03,5.930890185711966932e-03,2.038741326297238091e-03,-3.336960216004130741e-03,-3.832551319257543592e-03,1.226259919277689720e-03,-2.881789343605592382e-03,-5.162874861777788475e-03,2.772048448670496636e-03,2.862408866035458938e-03,-4.429988790062414409e-04,2.676883560224008370e-03,2.709197442206607697e-03,4.282941714498071997e-03,-9.707173899691319205e-03,-4.383665671607647339e-04,7.645634027420778432e-03,8.377249898337921653e-05,-7.593903793757520370e-04,-4.011680596446470283e-05,-3.651783335805567840e-03,-6.051901975758523615e-03,-2.698614365719325054e-04,5.645364563027352976e-03,-4.153970159587140262e-03,-1.300149335812770613e-03,-8.945199726476515262e-03,-3.694690891849973687e-03,4.027542534997050408e-03,-1.258961967866453757e-02,5.642137177826261722e-03,-1.357349061441919793e-03,8.717504353049769159e-03,-3.989952185494368732e-03,2.015959524577320386e-04,1.041713328729873024e-02,8.077382483858350226e-03,9.420624630232720884e-03,2.610083604907254082e-03,3.739525051928936899e-03,9.153480887415138198e-03,3.228716229794550974e-03,-8.033279381804691102e-03,-2.790344024456537647e-03,-1.790944105758310355e-03,3.363287902767327434e-03,1.688462687758559688e-03,1.395054408099262455e-03,2.583869577584912113e-03,-4.080842524668153756e-04,-3.434813127769208944e-03,-1.172203920901103251e-02,-3.397675867778708440e-03,-4.156013788189513725e-03,-9.755770594149887689e-03,-9.726928013163566294e-04,1.970597585818766474e-03,6.924644641796476297e-05,-2.869371319781568314e-03,1.278061935135376076e-03,-7.786873085232519972e-04,-3.152708747443396832e-03,-6.837097755601786314e-03,5.356276720125261598e-03,2.478486708715139323e-03,5.437977460041927384e-03,5.417715219447560848e-03,9.570239848981988359e-03,3.409929544542432758e-03,-2.111194460430344290e-03,-2.415403428667340768e-03,3.255193268563024225e-03,1.107266851620481059e-02,-5.509359954933293162e-03,-5.875332669242060678e-03,-2.497950963829096045e-03,4.200657612621666939e-03,-1.038453870014595208e-02,3.525098588697643531e-03,-3.361004616154680130e-03,-4.638740666022817330e-03,-3.327833084563869889e-03,-1.649045039293762496e-03,2.776700050379445842e-03,-2.718387233671164076e-03,1.172923309632250373e-03,-1.085991698885656678e-02,2.222808346467122949e-03,-2.806733238819590030e-03,-8.458079873875219007e-03,7.195823292144104761e-04,1.532708406694645710e-04,-3.384266719118353330e-03,6.279804461415842759e-04,8.735349692106420677e-03,6.867492641050328269e-04,-2.993764924641630944e-03,-9.816332510146530584e-04,-8.783380110288015186e-03,8.555713078628774879e-03,1.275662035541637014e-03,-6.667266054164022911e-03,-5.940506757092773151e-04,3.453717365974025093e-04,-4.087682724447508527e-03,8.618715431783798703e-03,8.964167779688603926e-03,3.601942683233523888e-05,3.742814261589310233e-03,-5.240116977779315371e-03,-2.481130962369174596e-03,-4.800352333481044780e-03,-4.652376151379425222e-03,1.395575454390482820e-02,8.727157519728365523e-03,5.233519648991557388e-03,-1.104404423191796512e-03,1.746886591930794973e-03,1.992371758316049003e-03,5.311587883453761094e-03,1.138056134914341554e-03,-1.590651709141209080e-03,3.635410354065682966e-03,-8.802997180797597285e-03,-7.195280049908686637e-03,-8.070910426896661617e-04,-4.622360872269766670e-03,1.529474473153869018e-03,-2.505619521625821258e-04,-6.567010245742548567e-03,2.204814623686486390e-03,1.117953498663053040e-02,-2.774023376119478905e-03,-4.535029903662411059e-04,-1.020403708314892265e-02,-8.496787942287465506e-03,1.337552784757006438e-03,-7.813706705051793114e-03,3.599738188445328248e-03,1.585413157889906804e-03,2.884956827232012643e-04,6.641702240145004772e-03,-4.768511810282386777e-03,7.333918307281969001e-03,5.998100794467008806e-03,5.179522712749030575e-03,9.559692518641500682e-04,6.979023488240431589e-03,-4.227874431912014355e-03,-1.979910598962331646e-03,-1.695284152847941579e-03,-6.032677832745080536e-03,-4.987174221806697392e-04,6.517281057111336045e-04,7.263452076315441916e-03,3.262310558411159363e-03,-3.136354965571532937e-03,-4.594186513922598157e-03,-1.284565937786269745e-03,-1.854076734278865867e-03,3.939410395992279726e-03,2.347594082649194502e-03,-7.276257687476007062e-03,6.592318846969480450e-03,8.275834648668123697e-03,2.654367741099612592e-03,-6.712966143694832841e-03,2.865344833047792344e-03,-4.703547332216710501e-03,1.926399884018427429e-05,-4.984492390521719218e-03,-6.482440466750836669e-03,-1.643297997114430246e-03,4.471109067473907342e-04,6.500075150792495801e-03,-4.953525597600554400e-03,-6.116898837013847075e-03,3.974205434573346678e-03,-3.818049349205948888e-03,-2.011808651909476834e-03,2.824483340856210906e-03,-1.401035939239350702e-03,2.904251467256423712e-03,2.321775216948502012e-03,-1.099096445743522965e-03,6.606825336522233021e-03,-1.296457878088285668e-03,3.848127936178206802e-03,-1.358244349253466196e-03,4.667656195648247665e-03,1.188381606744889619e-04,-5.406164631359146149e-03,-2.904088164516405689e-03,-4.577982218636706367e-03,-3.529131009256758142e-03,9.683200797218788694e-03,5.358675739393489387e-03,9.761270235354421775e-04,-8.755770906415364166e-03,8.409108833252527948e-03,5.555172944822256605e-03,-6.913905099281698018e-03,4.320816843823555194e-03,-7.919158732989083418e-03,-2.033462316015042896e-03,-3.417676893794241444e-03,-2.433715839614410039e-03,-1.711840222792781249e-03,-5.737765120084424965e-04,-1.628294503463403528e-03,1.296679425759920609e-02,-5.951492663311161137e-05,1.015498529083976378e-02,-8.330592784369125585e-04,1.104048356433998037e-03,5.722371049974511042e-03,1.542934463258844409e-03,2.787263902383210149e-03,-3.000213051680549869e-03,1.251993152454204932e-03,-1.734167432899860869e-04,3.135838907662893364e-03,3.647830976390162207e-03,8.545602975318290773e-03,-7.573560935317207379e-03,-7.198121222949982294e-03,-8.453962885093398508e-03,2.149797462577509883e-03,-5.742207879260980037e-03,8.351689841890851118e-03,-2.671492511997328786e-03,4.186452554860176263e-03,-3.162716313459044189e-03,-1.559510789319079258e-02,-2.282966554092619432e-03,1.772443804052761937e-03,2.085677565916118845e-03,-1.741278262977855821e-03,-1.947269311101888995e-03,6.887635955274355737e-03,-3.166199409204247927e-03,-3.343552683910290882e-03,-3.143064319110175668e-04,-3.564977480827841264e-05,1.578754126399640459e-03,-9.505980524434807025e-03,-3.386229269196262652e-03,-1.920259782354134066e-03,-7.284499677089963793e-03,2.601120968827009237e-03,4.852442808929033347e-03,-6.328744718538089134e-04,-4.110387592793115878e-03,6.711206820231925725e-03,-4.377761868501320319e-03,-1.625210974812851513e-03,8.495419155178578902e-03,4.960356940068122271e-03,-2.389271387692533562e-03,2.769123377572039243e-03,3.728394990679293700e-03,-6.577982380437687009e-03,1.054063869874520207e-02,5.776973465538836131e-03,1.622405821309741512e-03,4.553843810375621769e-03,-1.426657452503477842e-03,-5.850327232495479250e-03,3.856992086588804762e-03,1.529978254672288663e-03,9.783750870752207229e-03,6.347168360923170945e-03,4.196457836333804194e-04,3.032954887195047061e-03,2.953387102641984456e-04,1.254115771180580640e-03,8.907285365501947313e-03,3.523810727833174856e-03,-4.055530324909425652e-03,-5.261704189621027941e-03,2.664102870759099163e-03,-9.320517501810931735e-03,-8.776224822181893101e-04,-1.564829470473004525e-03,1.423901947578188644e-03,1.052930114719158100e-03,5.896958648447948002e-03,4.560660645695865066e-03,-5.477840225733169449e-03,-4.105333218398319467e-03,-1.846025736789124556e-03,-3.605615986651047270e-03,2.116883937732235137e-03,1.469365030215090753e-02,8.026369807589516306e-03,1.810057918824026744e-03,-7.154009050179422790e-06 8.528212128864218200e-03,3.071632221644674757e-03,1.723744028747019666e-03,1.991596945069343955e-03,3.657810794054197016e-03,2.208678915135432654e-03,-8.846743799554131898e-03,8.808003297152913399e-03,-3.553217612085462509e-04,-1.004984238533942857e-02,3.790162169964156596e-03,6.563046061922673050e-03,-2.065575468409117099e-03,-3.621908799957466226e-03,-2.736257861408455602e-04,-6.388799815759725088e-03,-4.227360333783992084e-03,-3.767608662530006532e-03,5.888477733122132046e-03,-1.746653880971357363e-04,-5.513037221417297837e-03,-1.529227237225045787e-03,2.218410291176044734e-03,-3.932601439785029553e-03,3.947896401471654641e-04,-1.920595344230193163e-03,-2.139441338715840704e-03,4.693015067967047822e-03,2.367328251431315145e-03,-2.147433984612329017e-05,2.159569383387852787e-03,-9.102196747957529144e-03,-7.725296069827995368e-03,4.932229174710404127e-04,-3.843919347790146783e-05,3.741664161678619725e-03,1.170084768528430619e-03,-2.813059885797778033e-04,9.846265494047065289e-03,5.379219066985198826e-03,-3.772143265791037987e-03,-1.679381731300700579e-04,-3.638846715707321801e-03,3.278812233160420097e-04,-3.684610807064134762e-03,8.173832867943589195e-03,3.250485864412633492e-03,1.848884700527664597e-03,7.304865014986269833e-03,4.899988472952488024e-03,9.059969756158765559e-04,-9.167456380362197510e-03,-5.913419044290327969e-03,9.275546435244212814e-04,1.281746344759179595e-03,-6.415284240380694780e-03,-4.735662873865676727e-03,-4.007706116924489824e-03,-1.036769340236634411e-02,-1.135001948400255313e-03,2.985697967875297534e-03,-6.662826597389693217e-04,-3.212969605742196401e-03,4.275024248617322574e-03,9.871091156566124455e-04,-2.392772700648638975e-03,6.714645332274285332e-03,-2.806931144548562278e-04,-2.602351987790826274e-03,6.748709302759363024e-03,1.101186509423848423e-02,3.345130387320977569e-04,3.143622589106179633e-03,2.907369448614816228e-03,1.057025686019523155e-03,-3.235992448658091616e-03,3.526979418560630564e-03,2.863517854573565877e-03,-7.476174866861725991e-04,-6.863235356157233995e-04,-3.775399436881848002e-03,-4.421899924178163653e-03,4.089742517292751546e-03,-9.490283060893776532e-03,-2.188643081744802432e-03,6.215358550545082433e-03,3.839265417004883874e-03,-1.932496381255921275e-03,-5.395686842259149810e-03,3.202914591135908734e-03,-6.217414919706493138e-04,-4.288509860957843457e-03,-1.018654111374907040e-03,4.079425130543276948e-03,-4.007005897985212346e-03,5.245050780713538321e-03,1.353748242186505113e-03,-1.520219139183157141e-03,4.065011205818798087e-03,-7.764179771098294984e-03,2.066757260768165971e-03,-8.581838503770006883e-04,1.522493061195853269e-03,5.794136052903464673e-03,3.003421106158737659e-04,-1.167834297571492333e-02,5.074548092509682112e-03,1.551050449493198737e-03,1.770747998631526897e-03,-5.818691331921963840e-03,2.902289962777153311e-03,-3.503384355530418052e-03,7.332742706365281971e-03,-9.114988003777482750e-03,-3.357553925703831398e-03,2.425753937591797280e-03,-3.581231704491031308e-03,-2.313600566163580719e-03,-5.631570836820250702e-04,2.315289478762097564e-03,-5.660831727209479115e-03,-6.930451856172139928e-03,-4.015560339337421493e-03,1.013194368231469435e-03,-5.711462451253024146e-03,-1.917759124418193847e-03,-3.528079864735559571e-03,1.122065711316634065e-03,4.068187614636399087e-03,-2.296947258973254438e-03,-9.909408078396152175e-03,-1.531932124439837079e-03,1.939630538337417743e-03,5.073917144858066405e-05,1.845025308529924619e-03,1.957859120655417676e-03,1.839649509331974225e-04,-2.390304174328842807e-03,3.452293049318268161e-03,8.483730431843758109e-04,7.124437950702797694e-03,7.262019194323365758e-04,-1.464428918436437620e-03,-3.518484145393754002e-03,2.403355014043017865e-03,-3.513220563207467267e-03,6.614231718298009544e-03,1.282047000121005554e-03,8.122752089531049251e-04,-4.724095735137635865e-03,6.513838772783241138e-03,-6.865460586825871195e-04,5.915679576795462163e-03,-9.723608964755094076e-04,-4.862131448405996800e-03,-4.235373689005594942e-04,-5.138104670169331596e-03,-1.362977811042392559e-03,9.002823513523333741e-04,-4.733611035593464356e-03,-9.146075308317390320e-04,-1.209292386981530854e-03,6.774318752456877065e-04,4.083746810733809157e-03,4.779887259779579708e-04,8.552622539557977683e-04,9.447563184988136322e-04,-3.967579709778147438e-03,-1.764062026472716432e-03,-7.020080166381253983e-04,5.955162822300017963e-03,-7.504591699608684045e-03,4.551833640141615851e-03,5.818065281743849473e-03,1.848280470037844635e-04,-5.132712304215263435e-03,-4.556646986816064761e-03,-2.157453313806428040e-03,5.992180356444290479e-03,-1.087774682717158534e-03,-7.440072360924273806e-03,-8.747730851753248230e-03,3.636356711034903141e-03,-5.968455348475493367e-03,4.675598711431593860e-04,3.908607705507865376e-03,-6.545894576886261869e-03,-1.209149875129728957e-03,1.080200792175409555e-02,1.692649943359859563e-02,-1.384712487729325642e-03,2.560806097284226354e-03,3.288130828058265405e-03,6.794365840023014816e-03,-2.296620116851008121e-03,-4.724815668715427915e-03,3.600500828479665481e-03,-9.250456023745553050e-03,2.203056498755716526e-03,4.675594184366275649e-03,-6.874840480102768422e-03,6.240962700223822031e-05,-2.008658841115410366e-03,-1.753734196145964553e-03,8.502728500784350815e-03,-1.517990780952754816e-03,4.307554339143532382e-04,1.643754012875543171e-03,4.661182753816643480e-03,7.645459063114576348e-03,-2.803154190362498310e-03,-3.897541425360602236e-03,5.935960637838130077e-03,-7.410865541357209915e-04,6.277828594221296354e-03,3.569576941684296381e-04,3.602982519958199343e-04,5.212491366868125808e-03,1.484236732330246680e-03,-5.315282625759169474e-03,4.753651439505813918e-03,-5.782284173984218907e-03,5.007278229356322552e-03,4.380556461235516003e-04,2.217894265205858453e-04,-5.674120381651672450e-03,5.738413140577784156e-03,4.408844058163553271e-03,-3.401279278792127079e-03,3.281447874439265625e-03,-5.483385285080993009e-03,-1.055486386303886460e-03,3.648505125819281880e-03,-2.146900441788328773e-03,-7.146180061939720954e-03,1.083871964412418338e-03,-4.026169396671609922e-06,-2.376712610676827257e-03,-2.310676309831983209e-03,8.722155624593724496e-03,-4.987520877184530368e-03,-5.082961438075421848e-03,-5.543047491943660886e-03,2.596728335253962582e-03,2.443177224948660437e-05,1.461612353107912682e-02,-3.954865764503155343e-03,7.631439917367551992e-03,3.417295283888683276e-03,-6.748902474736636486e-03,-2.665831497941276011e-04,7.866132271044282143e-03,3.950608398540060746e-03,9.123766631481639716e-03,-1.181115120542133726e-03,3.890194124357127491e-04,1.108636705551908214e-02,3.252197766295246949e-03,-5.339427471433308946e-03,-4.698560649144880273e-03,1.593134401826893764e-03,-4.052056283116179107e-04,3.414217810556337534e-03,4.176263318558428271e-03,-5.485091102341536386e-03,4.172896078543241079e-03,-2.456267918459971965e-03,-4.740126019906943169e-03,1.698923238971804554e-03,1.223585697348085217e-03,1.287511516747160117e-03,2.009108076243128031e-03,5.867432985696893581e-03,-2.439380248886331790e-03,-2.967908620502176121e-03,3.509343431075124219e-03,-2.324358967992383516e-04,1.464328929767192583e-04,-1.147231324085473507e-02,1.845461866365573312e-03,-1.912755261354871046e-03,3.828873223079262960e-03,-1.141457877503498919e-02,3.234884594570424515e-03,-7.596926563719610861e-04,-6.041554841820145905e-03,-8.246929746678368506e-03,5.773864919217522925e-04,1.968567148386997086e-04,-7.047199492840193653e-03,-2.614285088093166055e-03,9.997446908029160653e-03,-8.062703278508936673e-04,2.474210986578842617e-03,-1.439101221933512939e-03,7.142529759351349703e-04,-1.925402603659642341e-03,3.306439372621931892e-04,1.865775126847058537e-03,6.732384921096141745e-04,8.566257558415998853e-04,7.055672922591145219e-04,1.729480553968500557e-04,4.180587029312492035e-03,6.131698025296865827e-03,-7.740046590985767694e-03,7.055094252003234721e-03,-1.280026588070787980e-02,7.473052101323723710e-03,-1.205839549185907383e-02,-4.844894283411563946e-03,-5.078199932027635879e-03,-1.760594754006052131e-03,3.185915871811187579e-03,-2.239886329032519507e-03,2.604593740604496015e-04,3.023121464410525368e-03,-3.536619709202663570e-03,-3.278924667759782266e-03,-5.463128315422204236e-03,5.875600853293792315e-03,5.215618733395805014e-03,-2.734319275091715278e-03,-7.610884176541227002e-03,2.260028329318094793e-03,-3.425541478505497641e-03,2.997956666997744334e-03,8.425858698915038303e-03,2.596019636349151515e-03,2.894227205413884771e-03,-1.204341281713979320e-02,9.874012386019100952e-04,1.328283198647921607e-04,-4.043905232347499296e-03,-4.112473263277694954e-03,3.220521843437470968e-03,-8.896537802616457172e-03,-8.166680949213166781e-04,-2.628494030224183715e-03,-4.674440961558946374e-03,-2.265172583967142196e-03,1.263968737574569208e-03,-6.106074696192288244e-03,4.654229295043865386e-03,-6.524321072183716930e-03,1.188975486064339573e-02,-7.792282185951467316e-04,-2.680142537735757339e-03,-7.018731853465417104e-04,-4.316213535275266987e-03,-3.314450186354354292e-03,3.788481815601152380e-04,9.881037210383830215e-04,4.905322557779232678e-03,-2.754421972303020601e-03,-2.041962542956251604e-03,-5.044664333430270292e-03,5.075925549039461693e-03,1.176862315113777610e-03,7.427614561787061233e-03,-4.320840089731194968e-03,-3.199388724740966951e-03,-9.351480771412788370e-03,2.635111875355269917e-03,2.732684399293432934e-03,2.563441399048685659e-03,-3.522095080278523998e-04,-3.531690805117905160e-03,-3.946722765476477941e-03,9.663311007352615644e-04,-1.123344477583480654e-02,2.635496732756275121e-03,6.324925752541209374e-03,2.859665010316453913e-04,-4.605130978583628874e-03,1.064257038616240290e-02,-8.223957268522178940e-03,-2.208916577205370310e-03,-1.982214618250712716e-03,4.654873357539512230e-03,5.159584646415807212e-03,3.578861427687236776e-03,-6.084621225829787289e-03,7.115257336686676634e-03,7.616139156500102876e-03,-2.851689297603699196e-03,5.518599262255880107e-03,-3.137323774615586973e-03,1.030881424214166210e-02,3.584914436173115664e-03,9.119995764777787421e-04,-4.821787917279302116e-03,6.427614036157719944e-03,2.251162272932245509e-03,5.982109216100141465e-03,5.179376556779405859e-03,9.494750623438814180e-03,-7.129028930093197700e-04,-1.227890353376828898e-02,-9.489882232299582179e-03 1.806426093410379123e-03,5.342267167574350382e-04,-1.022465423937516131e-03,-5.934813648071681977e-03,1.673013379111724973e-03,4.261835163651762556e-03,5.559499699080087520e-04,5.376198793384758716e-04,5.945887042409965691e-03,2.463103711031539109e-03,-2.545581929331847907e-03,3.095179591052407957e-03,-5.454308667448092398e-04,9.646651603367601552e-03,-3.045700954738867101e-03,-4.939387603908975723e-04,5.840104426684108660e-04,1.197819001181728120e-02,-1.466338856668165632e-03,5.432266395483517293e-03,7.346580972625527117e-03,-4.034347065298870237e-03,6.312620943530538112e-03,-7.905067232133063636e-04,-1.685957917737176959e-02,1.861041270067547663e-03,7.843652766028764295e-03,-7.833682684673739377e-03,1.876394983261681429e-02,-2.112804461810121638e-03,3.022026590773670574e-03,7.031085754524095399e-03,-8.683772815586872102e-04,-2.875368243556824122e-03,-7.549384470755812065e-03,-3.025782776121722743e-03,-5.621903331710799813e-04,-5.827701016055747245e-03,2.924574090247538814e-04,5.272199091768026320e-03,-3.596799039043971965e-03,-4.685207075543353084e-03,4.232457414898657330e-03,1.008332002312132235e-02,3.035850153170979032e-03,-4.801396446099480582e-03,-7.429710233896297256e-03,8.960046163389262346e-04,2.494030018281688174e-03,1.313104294416461097e-03,-9.764850404367652909e-04,-4.196501252635151998e-04,6.796777920326829876e-03,6.003498684482910580e-04,3.248800315877441863e-03,7.967468588211530601e-04,-3.602659165667982614e-03,5.079789453225041617e-04,-6.891405162289760376e-03,-6.605922496413440051e-03,-4.182920171070032833e-03,-3.263048951444240831e-03,2.716810865554364351e-03,-9.936610120059199795e-04,-3.844054598136523778e-03,-1.420789917989857517e-03,5.791990344040254084e-03,6.069009888930678625e-04,7.969283249679119793e-04,-1.886134703862512359e-03,8.991196504809234255e-03,-4.354370664718754634e-03,-5.831977016018607789e-03,3.177586171079933679e-03,-5.220465723589179548e-03,3.547739518043833799e-03,-1.508183055028468073e-03,-5.321790012092586335e-03,-4.793548133684994404e-03,1.238399715556891761e-05,-1.308247102433294715e-02,7.124961924454782441e-04,-3.817219457637757284e-03,2.056986292654211426e-03,-6.195329932898127091e-03,6.262456514728672458e-03,-2.122113343381536437e-03,-1.591487502020588448e-03,4.921670869988730747e-03,-4.967405118274026248e-03,-1.537630125358349902e-03,2.910905582326852627e-03,-5.635101411721744370e-03,-5.234821552031239909e-03,-1.029851172809042985e-04,-4.792421727900322316e-04,-1.289039132037680989e-03,-9.839427939189106676e-04,9.425384777611827553e-03,-6.051538901384758903e-03,-7.862388562960020283e-03,7.418458949299123854e-03,3.543113366382056283e-03,-6.949986764920422153e-03,-2.505066384610656928e-03,-3.339895242303080221e-03,-2.191077715950018497e-03,2.104947614669625336e-03,-9.712462003277933529e-04,8.109267374435595241e-03,-7.214586325469733025e-03,7.150139603916639587e-03,-7.861115796758844781e-03,2.625637754131963150e-03,8.871426061039144190e-03,-4.342421745571267873e-03,7.772656382882483660e-05,-1.454497769251045756e-02,4.882281518324794144e-03,-4.291788061337964378e-05,1.061553662529835483e-03,4.669671705173920363e-03,-4.152131991220504859e-03,6.940346655237787654e-03,-7.816285815247176263e-03,1.276950884320007850e-03,-3.206441304896643502e-03,9.808755602264212191e-04,3.178221234236628095e-03,1.176702167186197568e-03,-3.640042684083254036e-03,-9.582348065626446199e-03,-5.557895252594823726e-03,1.277216347028292459e-03,-5.510391446010874834e-03,9.276158090888024814e-04,3.203711541380026024e-03,-1.759897483146309269e-03,3.096135497235151350e-03,-3.771754534996673257e-03,-2.871487293602183080e-04,-8.853098656545575068e-03,-8.476238451918952435e-03,6.627429954323387461e-03,-1.027924990363641511e-03,-6.278547621859999033e-03,-9.158141963094125162e-06,4.253932103987351047e-03,1.363675096825333081e-03,2.479228924373381646e-03,5.193560731377304699e-04,5.108390849677172350e-03,9.500905712031071718e-03,1.154413924695280817e-03,5.010912189976132226e-03,5.355311326324303269e-03,-7.515167986166504908e-03,-3.524946267778075043e-03,-3.285297530865705085e-03,-3.499877245322725872e-03,-9.133818384860723514e-04,-9.992372440248462051e-03,-3.543174786785987553e-04,-4.133138040071378250e-03,-4.856584181659014815e-03,-1.772045416826147687e-03,2.087997279778882210e-03,1.167627785323976713e-03,-1.693318980769452982e-04,-5.725727350734085772e-03,3.718982375460025500e-03,2.203713030890023985e-03,6.236070678500716356e-03,2.701339720504316016e-03,1.057737205493966895e-03,-6.248316731452497816e-03,5.446517587724966029e-04,5.249924997416582050e-03,-1.945761688063365243e-03,-5.778244198285331819e-03,-5.252834734092100971e-03,-1.132223319695230051e-03,-9.657041002746532057e-03,-6.583282184979689748e-04,1.172750501166203684e-02,1.563781285341738717e-03,-1.792254985550281862e-04,-2.781629393975449793e-03,-9.954159213171728717e-04,-3.172309135197516269e-03,3.439862950771135994e-03,1.922582211402595886e-03,7.279355786359913323e-03,-3.133057372384966902e-03,1.984181168576301581e-03,2.171198295624815044e-03,-3.361100673553434927e-03,4.251221201331307651e-04,3.767472486283109274e-03,6.620049789332568303e-03,5.446083064548181037e-03,1.216177250244627507e-02,-2.472454291335050500e-03,3.279413337143627372e-03,4.139095807017687673e-03,2.638293433410937670e-03,-2.552420631568955243e-03,3.746882563827402482e-05,-1.548696858951927719e-04,-5.717353977192184480e-03,1.182506198425888375e-04,-2.388810889730272856e-03,-2.762933418343670004e-03,3.801341947777922732e-03,1.900089626282326768e-03,-3.876415140073437467e-03,-5.834762739741368269e-03,5.527031477656269530e-03,1.076162653547992933e-02,-2.978695314685092608e-04,-1.462718023695106211e-03,-2.874739150269294691e-03,-4.608227818242153183e-03,8.555337917043820753e-04,-7.399376858722390982e-03,1.600723800472591499e-03,1.328042127754747344e-02,-4.833364401525206897e-03,1.016979697140223647e-02,2.399029031782268229e-03,1.097901781352590873e-02,-2.443004764480171363e-03,7.213694203029175599e-03,-1.965832402842847444e-03,-9.867661012936906548e-03,-2.220998365430816319e-03,6.236783639215179136e-03,4.665646545798757608e-03,-1.319674081511729331e-02,-2.528011648176014745e-03,-8.138104356278862486e-03,-4.712176640398471443e-03,8.033623648635884740e-03,6.189285097034314710e-03,-7.461573685805116679e-03,-1.939318306479915436e-03,3.432794561871572135e-03,5.538036383732837693e-03,-8.807984790045202475e-03,-5.186478189926933997e-03,5.827221248019295689e-03,-3.003531980794280250e-03,-5.001673446369214043e-03,2.598975027786580713e-05,-3.352529639562451969e-03,6.082954640762262485e-04,-4.390791333235141529e-03,1.761902699784122011e-03,1.767994630397575232e-03,5.194261010207507946e-03,-8.263688417908291242e-03,1.906014078118919248e-03,-3.813531976801469024e-04,1.142354158611549398e-03,-2.337903571531206159e-03,1.481887644799594669e-03,-2.060809585467388363e-03,1.897645599025604127e-04,-1.290777790511923396e-03,3.385077850947162351e-03,2.251137548297889139e-03,1.295191745232275799e-04,-2.971295992572975300e-03,1.673129837198671088e-02,8.698291827744182581e-03,1.579229858779832539e-03,1.452852499580187468e-03,-6.916886294644113489e-03,-2.315810795861626345e-03,-7.889930344864832731e-03,3.437046681445866340e-03,-5.083072776982435072e-05,-1.937389560046996273e-03,3.686808588032685447e-04,1.264622646933382354e-02,-3.379222489301669363e-03,-5.932715501611693885e-03,1.155130272469848465e-02,7.959980622218790561e-03,2.693269843229797924e-03,6.108198159747982650e-03,-1.215176319111596409e-03,-2.607188309267474344e-03,-1.451263633838577273e-03,-2.370372595796352397e-04,1.199390804060018918e-03,1.773330240242174723e-03,-1.590444269177547542e-03,-1.071476599278918616e-03,8.176331151590337945e-04,-1.041788931576625131e-02,3.866675941602098782e-03,-3.746856234727161550e-03,-8.136229759460152783e-03,1.769551815464607799e-03,-6.489828578500463031e-04,3.453771274076654178e-03,8.435475670605329084e-04,8.144916498032396231e-04,-1.795225749927313293e-03,-1.076624142830452310e-03,6.464079217514148365e-03,6.583128959540302839e-03,5.641334082743845214e-03,2.581502036120926838e-03,-3.933587682783195975e-03,6.707595926341358879e-03,-1.828053726330484095e-03,2.579224771514955408e-03,-6.319605080765756770e-03,3.188297237166454971e-03,1.221175141057791180e-02,-6.994987669274916990e-03,1.884058472638582222e-03,-3.571662644687105970e-03,3.825243326744894902e-03,-3.423836002521635658e-03,-1.923211668051707460e-03,1.215751391028681815e-03,2.002619562932324640e-03,7.949843955599601025e-03,-2.278863703226683812e-03,3.089928875631856552e-03,6.835771531378045116e-04,9.631378075992198980e-04,-2.071381920606677439e-03,6.556714835199266463e-03,-1.833206790099254670e-03,-2.755420961564768092e-04,3.675548143813223319e-03,-2.669620933482442973e-03,1.759231765055128396e-03,-6.117434766804380995e-03,6.044236404262016797e-03,1.431966904035504584e-02,3.854809154840829234e-03,1.021342131126768034e-02,7.726370494355723657e-03,-2.771523472344890760e-03,-9.667949811006893920e-03,-1.957168429061171590e-03,-6.577042728828298886e-03,-8.227948274168254625e-03,4.055458180379154821e-03,6.205347286327730569e-03,5.367840981328858910e-03,-3.242513544759598361e-03,5.171574113584240934e-03,3.314217754698881458e-03,1.385793455587812076e-05,3.897940830403454245e-03,-1.714494949671925951e-03,-1.564187215697896479e-03,-3.522078439239677283e-03,7.472634850254341460e-03,4.468478926743530560e-03,2.898950924964207622e-03,-3.317001725122767364e-03,-5.748155608015994720e-05,-3.382399473914710769e-03,6.310301330385288503e-03,-7.807658406651549546e-03,-3.428833484051322517e-03,-4.347121035759202688e-03,9.937822059332577715e-03,-2.631278435848277957e-03,2.636248015457235642e-03,-7.531517595801725390e-03,1.882426363894804458e-03,-2.384086948290343490e-03,1.495694482721216835e-03,-2.127811929360291700e-03,7.022188568783005458e-03,-2.995539632447327336e-03,2.233752837036320771e-03,-9.128424805948140257e-04,-6.655769586537443579e-04,3.468385605604249952e-03,1.096980059911290776e-03,1.317487341613372788e-03,3.865738826803332903e-03,5.643001150820026526e-03,2.225591027652720679e-03,2.225555105083903973e-03,-1.016960290330370335e-03,5.669995264773713094e-03,9.204618666936817753e-03,8.284807170611804134e-03,-5.114850454096534459e-03,-1.986698180695525666e-03 -8.261060134809596403e-03,-5.320019864219905324e-03,-3.252363099873232383e-03,1.035601237419784496e-03,1.722044509635172610e-02,-1.006881594055801347e-03,-5.647696875761189922e-04,-5.800438983811379359e-03,-6.680134361056367862e-03,-3.664279338195446795e-03,1.464593141397466363e-03,2.127200123971489856e-03,-5.491191279693588903e-03,1.755773279526902028e-03,6.139953854248890452e-03,-1.831350491277491162e-03,2.430912044837721560e-03,5.832851027967543220e-03,1.362602613376453706e-02,-1.591544429465838835e-03,8.562233106407465127e-03,8.030144022329942059e-04,-3.557507500641344470e-03,-4.344310204801007415e-03,-2.790038431985504986e-03,3.535016240859635605e-04,-2.338881892246135602e-03,3.596614330311944177e-03,4.507535864758214919e-04,-6.987305263474674448e-03,4.936004915085590548e-03,1.017412483007516669e-03,-5.076960591594911233e-03,-1.601008939239883144e-04,-1.570766970264708337e-03,5.904056642354664659e-03,1.309824219895256098e-03,-2.816669999381833309e-03,-9.853838880811134713e-04,1.061012024110536550e-03,-1.084344751384519294e-02,-8.404620571253462735e-03,2.920222002134184851e-03,2.953047525466215555e-03,3.873474745883899797e-03,-6.276315061892857351e-03,-3.835975418628539101e-04,3.566646674272514698e-03,-1.232257659889316077e-02,8.801120348988466765e-04,1.427003531316120045e-03,-4.232269441253267090e-04,-7.047507086472106115e-03,-2.024829737618106043e-03,2.722730121886850468e-03,-6.007368613922723331e-03,-1.086155804306080871e-02,1.101474645624583816e-03,4.709735303036102771e-03,-1.321420348650661006e-02,-1.804224963866990451e-03,5.688445360993050863e-03,8.596212170321651524e-03,1.236370340739955352e-02,-1.766683760639086017e-03,-2.028769500869113257e-03,-5.413525457715250011e-03,1.859509870237872073e-03,5.994073448798451029e-05,-5.932064062885978022e-04,3.768525917217957645e-05,-1.004189922579115830e-02,-2.610459741080281863e-04,-2.306549466812476447e-03,2.929900087182749340e-03,-1.020991224758662592e-03,6.025155194473921262e-03,-5.824838861389216617e-03,1.848808528042903418e-03,7.929231248976686616e-03,2.086167445415509046e-03,8.043553854124231572e-03,-5.090538439421751077e-03,-1.968710396627552234e-04,-3.308936935085065104e-03,-7.245029880512647222e-03,4.397885527883124181e-03,1.695707985848945738e-03,-4.444444730092602346e-03,1.748434395901864559e-03,-6.323154885243297170e-03,-1.555377707767365697e-03,2.014205108371594933e-03,1.787995538050158803e-03,-7.743598695573285576e-04,5.923292779848908680e-03,4.029317895891888269e-04,-3.271187183129463036e-03,-4.747700298805617065e-03,-2.061065810862499053e-03,1.312416534026438148e-03,3.477273886816955716e-03,-1.262615847545422217e-03,-3.297536293107342000e-03,-1.766222601987434094e-03,1.611957185646410154e-03,6.452797320809047429e-03,3.053252347428853271e-03,6.665647582669373059e-03,1.423271920247693410e-03,-3.321186093701796599e-03,1.758778606778075388e-03,-3.155798035570942048e-03,-4.442491377713078497e-03,-6.253797451198722657e-03,4.167440911184862491e-03,-5.851380073181595975e-03,1.305390553711770728e-02,9.201478337683205097e-04,2.212052867353445085e-03,-4.265025438322704067e-03,4.849311675775555878e-04,3.128310402269188911e-03,-4.243576463170094064e-03,9.936479144995942134e-03,5.116320951703007139e-04,2.798528825431116033e-03,2.859876591365669886e-03,1.175934891656765574e-03,4.096825263598723697e-03,7.240486431323669460e-03,-8.190454726035968983e-03,-2.070010540910233003e-03,-1.206415952897293476e-04,-7.683218540940032690e-03,-6.338116389044350747e-04,4.646658972541817603e-03,7.133798857975793065e-03,5.910609692443377639e-03,-2.489889129662488550e-03,3.529455223204885916e-03,-9.377518239767792044e-03,-6.384714967708731391e-03,-2.178076350000001347e-03,-1.801812331337035097e-02,-7.356491115149427044e-03,1.276589645582610439e-03,-5.342170880777465970e-03,-1.971278256651365010e-03,-2.979611081076465919e-03,2.926180620116543632e-03,-7.752509514580297086e-03,2.868922975556880816e-03,-2.509869726395802525e-03,6.753061345239721666e-03,1.058193872811202460e-03,8.375379735623274605e-03,-1.590340702094111564e-03,3.184236482523752302e-03,2.540958203184177661e-03,-1.366905472262294231e-03,-5.101792847328498123e-03,7.965277597096129258e-03,8.275902206274570252e-03,6.891121317708117896e-04,-8.913613738024361161e-03,-4.472943456908112965e-03,1.467904682605741806e-03,-5.214908741435582071e-03,1.130673911161337191e-05,9.672873426699330618e-03,1.499556630612779422e-03,-1.552310233925669782e-02,-4.297517750395571899e-03,-3.127394816030672643e-03,-1.074041220101977501e-02,-1.801103415999731703e-04,-7.005757296423159933e-03,-9.555659414743691848e-03,1.287385015679705629e-03,2.287925392451121628e-03,-3.537437284630576967e-03,3.066527229970066959e-04,-2.112963327527184448e-03,-4.969759564992290825e-04,1.929420337369337161e-03,-7.625311230896798277e-03,-9.247998639552019312e-03,8.790134031996094371e-04,7.746346047530832009e-03,1.910428348520683490e-03,-1.004934519482168298e-03,5.794172092141895887e-03,-2.661420885122606130e-03,1.174892290481480313e-03,9.196474441005306547e-03,-4.798547138811191937e-03,-2.022005651662493828e-03,-3.972649154976672960e-03,-4.513501047542087079e-03,8.035142457344573996e-03,-1.973277764849666518e-03,-2.081300309317110023e-03,-1.302671774233819238e-03,-4.357231785518828407e-03,6.689781500209095177e-03,-3.321901884286792596e-03,-3.019424806273966148e-03,1.926132046306720065e-04,-1.516474837763222587e-03,5.100647460554919542e-03,-5.226902674259073338e-03,-4.416901253633530831e-03,6.248608278687296444e-03,-1.359833051088060176e-03,-7.040539177441722729e-04,1.841942687336752893e-03,6.411289683346927358e-03,-4.043022042229072952e-03,4.316340894925856184e-04,-4.215083715738932209e-03,1.388558125567208998e-03,-1.458732860577797560e-05,-1.370888348140077975e-04,5.532206596353157410e-03,-4.482606925911509881e-03,-2.152707981089724883e-03,-1.600085533966089666e-03,9.966800584356714544e-04,-5.061814622884279946e-04,8.214154150124924197e-03,6.552033631415696280e-03,-3.282636926078061259e-03,-5.015343782134913490e-03,3.277019124194956341e-03,-7.745700567666186738e-04,-2.988604777362283658e-03,-2.329459842007458419e-03,-4.273835491174754649e-03,8.788282010937646313e-03,-9.244514168752632211e-04,-7.238101236312370970e-03,-3.433112629702240754e-03,1.169298547769946330e-04,5.997043690273072720e-03,8.966637161394088590e-03,-2.591183961135376523e-03,3.473310928441258189e-03,5.305882147631070175e-03,-1.398491224293369221e-03,6.924880705598255616e-03,-4.965830320976048762e-03,-1.600475340093739703e-03,-9.916687129367274256e-03,-9.265481566157607665e-03,-5.373151260126749623e-03,-7.577319644499054714e-03,-1.690486006970503814e-04,-1.040122077467559806e-03,5.310335998476542127e-03,3.996621437860795045e-03,1.679679801904767093e-03,-1.551320302685659869e-03,1.543446210801237165e-04,-3.357800681771513029e-04,-9.035632818626637003e-03,1.097182101908147868e-03,-5.898991752327577319e-04,6.784678000822661533e-04,-6.520468089061837516e-03,-2.921277754738947022e-03,7.635597563859918870e-03,-1.040924462151438358e-03,3.040762092367140651e-03,4.319122889831879079e-03,-8.311529177683343525e-03,7.278552690011440979e-03,9.101032689172733206e-04,8.518740387441545549e-04,3.922084132387290527e-03,3.507765062965471740e-03,1.506096950631852921e-03,1.164591795099939424e-03,2.466601006428135057e-03,-6.746175300496058680e-03,-2.902829210522327435e-03,4.810608484373942936e-03,-8.478918917149637705e-03,-3.263857556583163889e-03,-7.628268277611192395e-03,3.249261699860673137e-03,-4.683914425818907004e-03,7.604426865154468099e-03,2.259632001394295837e-03,-3.801880380789806040e-03,-5.002524728165397283e-03,3.530577525240609599e-05,7.972331419742510750e-03,3.813938433662094740e-03,1.163139164601250915e-03,4.340154668927168578e-03,3.298292647685282554e-03,-6.172481742356643666e-03,4.903116838419938409e-03,7.520859119127205894e-03,-4.648889977879408324e-03,-5.895351292624541198e-04,-2.271974143848392788e-03,-2.616497760691551488e-04,4.991606863575788998e-03,4.639001618183065709e-05,4.208152811964619398e-03,-7.467513196551681456e-03,-4.328023168868335400e-03,7.210540713696513872e-03,-5.245717125328706182e-03,-5.448808068132030696e-03,5.789558027882097443e-04,3.751715268902612826e-04,-1.011734268732479049e-02,-1.660362561104489340e-03,6.500140071209550147e-03,1.419606246681138747e-03,1.035962502146704940e-03,2.335632241654927652e-03,1.579285106899293821e-03,1.037925651394303560e-02,2.966949917957303028e-03,-5.744297305838925354e-04,1.338965486603859974e-03,-1.864678371773701002e-04,-9.875161286807372044e-04,2.236224099649338892e-03,-6.292826336041480945e-04,2.764825530908922996e-03,9.130421259010040088e-04,6.068226031792978321e-03,9.400084087597362914e-03,-5.190822246452362308e-03,8.870483927691627697e-04,-3.687564906253583841e-03,-8.965455050638835134e-04,1.133515735997436824e-02,-7.472543807318154712e-04,7.233200172495319637e-03,-7.857586982377302506e-03,-9.233434803242866751e-03,-1.009112811993382285e-02,-5.793961259188197196e-03,-2.443648781474733103e-03,7.896362754755994334e-03,3.565247729389533701e-03,-2.463882318946173863e-03,-8.055737095360834127e-03,-5.918468880688514599e-04,-3.073172359071681559e-03,-2.163871953795081481e-03,4.563689317849083105e-03,-3.552821394650573349e-03,-1.011465446533917067e-02,-7.130360629592959881e-03,-3.214799909861771384e-03,3.386352034583986741e-03,-6.196118303900256981e-03,4.726450911807726749e-04,-1.013921427931408501e-03,-7.605530417534704830e-03,-1.895440884065633877e-04,-8.352217014201600287e-03,8.138973151463354096e-04,-3.311862205063696111e-03,-6.577625177346071994e-03,1.035855153762311716e-03,1.089247818390742546e-03,1.002129422339845212e-03,-4.502376752805339541e-03,-1.400871519318933707e-03,5.231956285126569337e-03,-5.358244399277477255e-03,-2.840346855938164199e-03,3.181386021059665319e-03,7.842424704569752117e-03,1.519248329762845455e-03,4.281769780024514103e-03,5.749688223028351190e-03,-2.272420806827141378e-03,-1.290713572966744119e-03,-6.417532463534863368e-03,4.317065048099605166e-03,5.139996826860260791e-03,5.112853673926906137e-03,-3.245586803618436510e-04,1.402691098576868637e-03,-2.517744310147146392e-03,-4.272343838837944045e-04,3.109140191807750926e-03,-2.327828601794661514e-03,-2.552486784286556783e-03,3.902254040220965521e-04,-9.555807105341884017e-03 -8.895054190435089034e-03,2.022026995503297319e-04,2.235283797417450449e-03,-4.831926548102054471e-03,-2.671493370168526085e-03,8.796833488274492449e-03,-7.342089008252100819e-03,1.556804889437936562e-04,8.021366561436715737e-04,-3.093138780214344137e-03,-5.775962220833074636e-04,2.162192406890606992e-03,7.194963200008897865e-03,-4.765919721946888961e-03,-1.297605963173441410e-04,1.358731270335261946e-03,1.445764565673880557e-03,-4.490211945851831782e-03,-3.686662172067315963e-03,-7.364190349941699375e-03,6.531868348397375321e-04,4.352881892369801602e-03,-3.559026075270420829e-03,1.234671499427746836e-04,-7.600270693366919551e-03,-1.970394241232529924e-03,-2.287679807012216529e-03,3.623597108675888378e-04,1.258471015121015923e-03,-8.948873724519424962e-05,9.502657498473662812e-03,-6.610728319461066718e-03,8.288622638192579342e-03,-5.146729891591996257e-04,2.677396216072134866e-03,1.896054340004504684e-03,-8.357138114006290586e-04,6.564728714563123659e-03,4.736302312720152251e-03,3.451162105056514943e-03,8.339025759593820478e-05,5.147464651284513332e-03,-3.057056789828661077e-03,-3.607360927014128214e-03,6.262871881641524259e-03,-1.192812689340439104e-04,-1.056991687056141761e-03,6.264436469880652579e-03,-1.157330958439383578e-02,1.146553631697498589e-03,3.953002804644860022e-03,-8.915980503255161935e-03,-3.533828071369879333e-03,2.284901989866543106e-03,3.303043961214988111e-03,-3.010938001579979099e-03,-3.589332600496389792e-03,-1.043227541310916750e-03,3.062376565197993342e-03,-8.606699482868436848e-03,-4.941749614740527201e-03,-4.206475914778017543e-04,-3.479267031338423907e-03,-3.947740908802644180e-03,-5.173696665115683213e-03,-4.563766817803425156e-03,2.691659858545172781e-03,1.031453480910249181e-02,5.437295954174645521e-03,5.761956714191726094e-03,-1.146164601089012412e-03,6.091333961292969695e-03,-1.361357908590562701e-02,6.191424318559317005e-03,5.510687027005126658e-03,-7.402164045136042612e-03,6.388805893449480420e-04,-2.747781921902148726e-03,3.432291019753557371e-03,-5.693507543552008519e-03,2.610909396586288348e-03,1.213654983213472002e-03,1.925390405079170641e-03,1.169010784855500050e-02,-3.649140133840823993e-04,-1.254918499857220334e-03,3.532229051575891572e-03,1.237397665031738552e-03,-3.503115300343490317e-03,5.740760841785589284e-03,-2.418388509826811025e-03,-3.839156734805626754e-03,1.252473032141511282e-03,7.138126123154155990e-03,-6.597178010716487143e-03,2.748004704846118609e-03,-2.531478156304146476e-03,1.565329098170704708e-03,3.833930620013045829e-03,-6.521274232810331207e-04,-1.943526487480219927e-03,6.100596813293336119e-03,6.476418007378534260e-04,1.325809902151090815e-04,5.415079818361075679e-03,2.610677540755176797e-03,2.778303157731844709e-03,1.652297169219844137e-03,-1.589906719989583883e-03,-2.578021872103137321e-03,5.754349002928023024e-03,6.099627276519228349e-03,-1.744635802428135896e-03,-2.269131303713469491e-03,-5.308184385666304093e-03,-9.877646851203767970e-04,2.611077703442057051e-03,-3.120608341538056926e-03,-6.545477325771352668e-03,-3.717756252158377524e-03,-4.655099900922803589e-03,6.817074419618028049e-03,-6.804437766989595085e-03,3.926458239480379829e-03,3.057137114725302311e-03,-2.781275654776067720e-03,2.473597957859990232e-03,-4.800830042125114015e-03,5.209078700676743855e-03,-3.666525750121138721e-03,-5.619221723448597668e-03,-7.324040788292941750e-03,1.478988263553510703e-03,-3.865375967696531301e-03,-4.141066508433464976e-05,-1.233470543417532347e-04,7.303715137713149598e-03,5.106295775801255113e-03,-1.454259845941807607e-04,-6.088763846091657314e-03,5.179433696206336285e-03,1.361847846281843619e-02,1.222636615994304645e-03,5.525566685758644299e-05,-2.457272852275069457e-03,-2.668486250370063519e-03,4.215557216640673267e-03,-3.033266070081960808e-03,1.071815482231095427e-02,-4.546693017312135321e-03,-2.143885025399143397e-04,6.595880561960068740e-05,-2.891573811451011999e-03,-6.718074986646142485e-03,7.040475292434206223e-03,-9.900140427027467188e-03,6.129157414703417968e-03,-8.746977808322306189e-03,-1.183652834589454935e-03,-9.382475420041674766e-05,3.238740874541813034e-03,6.116627463958910702e-03,2.320634931083132273e-03,-2.537853762629326514e-03,7.732009844184832038e-03,7.665776200767018475e-03,-8.949069748919905726e-03,4.290142820493704490e-03,-2.724837279685080570e-03,7.728135582724869786e-03,-3.100192860499699152e-03,-4.079835954292361684e-03,1.557763613528460444e-03,9.766204690317258769e-04,1.601565857454794123e-03,-5.978678691901767291e-04,2.659490109777328318e-03,-8.811952158124126272e-03,7.336866669884363305e-03,6.934277986418185824e-03,-4.241759521345048189e-03,7.261083076581248928e-03,1.854240923226898631e-03,3.221873970405957945e-03,-7.536704153995896303e-04,8.794661912090415840e-03,-5.298472956712876707e-03,-6.967222592464459782e-03,-7.750214549443410969e-03,-8.484059141337761331e-03,-3.879765005007406245e-03,-3.327957029875412351e-03,-3.475077842177088511e-03,1.339045651351983062e-03,-1.549603082194584997e-03,-3.252092926302470972e-04,2.488255902974488565e-03,2.910734983524912878e-03,-2.891395359179373906e-03,5.409053841580386580e-03,8.031631896427499084e-03,-5.043832611561616282e-03,1.796808684533338432e-03,-4.765081501154998906e-04,-3.378074754668802717e-03,1.419161610564203526e-03,-7.904099047276605741e-03,-3.920506682786374132e-03,-2.521388865933481445e-03,-1.445995445836069501e-03,-5.162435602016890272e-03,3.939545974221561088e-03,6.173830917400278083e-03,-4.878390709394087553e-03,-2.707788853920709785e-03,-5.821882466659876118e-03,-2.973651088360066991e-03,2.111759911391381146e-03,7.225111952448331348e-03,7.212159219164645738e-03,-1.406853354031374322e-03,2.696489527766358658e-03,-1.546325320587026413e-03,-3.002691967997699832e-03,1.175772341432534647e-03,1.395792174656664323e-04,-1.962313728728866131e-03,2.273697529767066024e-03,4.555446614561725616e-03,-3.759414367782590392e-03,5.393096041066862585e-03,-2.988167554560170695e-03,6.489070506525851205e-03,-9.353340258421714501e-03,4.413555607701577857e-03,-1.063969228997646479e-03,3.030306066927170808e-03,-5.079246257328083256e-03,-4.485270888047404245e-03,6.469671378290185390e-03,-3.912244419369168349e-03,-7.919370164654665131e-04,1.583146101729289391e-03,2.848056514812992001e-03,-4.248948624944793619e-03,1.377708066489352927e-03,4.144147645197999996e-03,-1.898778751980403998e-03,1.770606837657491203e-03,3.474931121750782746e-03,3.303429932707887574e-03,-1.758194697344853634e-05,2.381318881396119987e-04,7.762410083789407673e-04,-4.378071460662169199e-04,5.410521072335671040e-03,-2.531373422439767151e-04,8.883478793994122394e-03,-4.177176055601391873e-03,-4.359720871435338041e-03,-2.306954359475446754e-03,3.860542334508105470e-03,8.859620507016396224e-03,-5.435346140060527383e-04,2.180995837158004186e-03,-1.273542722021872228e-02,1.501997632227583524e-04,-7.459830636849826974e-03,1.074167144711682745e-02,-1.189676255636863862e-02,-2.096067101037991411e-03,-9.980450901987051107e-03,-2.699302958740812646e-03,1.202905880390959777e-03,3.686075540991531549e-03,7.168945250681639861e-03,-9.759146204211395850e-03,-8.752723672231624980e-03,-1.281792274229843175e-03,-4.455709659116052941e-03,1.360012880430745089e-03,-1.309860421880203228e-03,7.990363446583494469e-03,-1.375719029171554569e-03,5.302209184383801198e-03,1.126790803121851918e-02,8.530732226047428243e-03,1.276489055530364083e-02,2.114622857730997802e-03,-4.538904387093456103e-03,-3.612131499247324450e-04,7.337001502006813836e-03,-6.107968355989762540e-05,-9.101421060396462301e-03,-5.863743100859768707e-03,-8.133264361667880832e-03,-2.904420390948842749e-03,-3.488232402723047583e-03,7.891597599129343554e-03,9.169263468395107619e-04,9.263255575578189030e-04,5.704777810156922736e-03,9.454572563537756887e-04,-1.202103574981462988e-03,-1.215351613816650867e-03,-1.743180401525672510e-03,7.606310365636494630e-04,3.250190107621514155e-03,3.354696056081304858e-03,1.576048517906528459e-03,4.529218115113030872e-03,-6.513409026455964541e-03,3.255625395339968500e-03,-1.224163330336478991e-03,6.877086614068292011e-03,4.774081792414246205e-04,6.756099913593018079e-04,4.524274747366866314e-03,-1.102530155976957481e-03,3.680472284131730033e-03,8.339459909587021392e-04,6.173484309364214717e-03,-2.246736647531387063e-03,1.282779239941282505e-03,3.324974965968947264e-03,5.981379672675844923e-03,-1.655843845730857575e-04,-1.112610266256123907e-03,4.927129820312933610e-03,5.504431462109898691e-03,-6.139136370685664354e-03,8.943908312003399008e-04,-5.216025702939868641e-03,-7.035187286787834837e-03,-3.710759795659228279e-03,-1.065791834852264884e-02,8.910744847985161213e-03,5.694884220540481160e-04,-3.930266110532212691e-03,1.112032273976532211e-03,6.937722739625586370e-04,4.584840989616371237e-03,1.105633454393848032e-03,-9.266737230035109602e-03,2.740931758131575583e-03,3.919970404537448730e-03,-4.100188818911804740e-04,3.153890236898927897e-03,-5.402040530511595417e-05,-5.399579176431845099e-04,-2.998811802217692113e-03,2.429863607095320618e-03,1.056131782976943347e-03,6.285448726767148102e-03,-5.846367286971933079e-03,-5.992395675341671661e-03,-8.870778432892111433e-04,8.429219475932243216e-03,-3.216797235826253896e-03,-6.136652096284588194e-03,-3.886292289944718800e-03,2.953118838009283296e-03,4.242307218676371998e-03,3.886287676590552669e-03,4.945270070679830786e-03,2.862349061765241936e-03,-5.842120155479377979e-04,1.188713781851815678e-02,5.394178713930385637e-03,-2.458361021394743669e-03,-1.175221751992331200e-03,6.983869213494637088e-03,3.397564033276272849e-03,-3.429820195113413364e-03,-6.928430288411882880e-04,-4.213686228999250459e-03,-5.803697939209494921e-03,-2.693273151774245450e-03,2.005051613320265604e-04,2.797433698895060092e-03,8.268046015139831478e-03,-1.678557849599733674e-03,-3.284917391466335088e-03,8.037007323498263642e-04,-1.028352493699941127e-03,1.586336595494966095e-03,-3.745360141131210546e-03,6.348526794883646920e-03,-5.435050646431054816e-03,-4.047055371258690055e-04,1.486371444579662800e-03,-1.195861009137041617e-02,9.918351791722108610e-04,3.228722213969896764e-03,-5.436994034953282137e-03,6.285677126661390075e-04,1.262632793413187224e-03,2.713914820899379719e-03,-4.456532702871550053e-03,2.678515760684422372e-03 8.585496327138461906e-03,-6.908555451164656233e-03,6.306339135091103883e-04,-1.121309194247813515e-02,-8.521674174281435119e-04,4.802144325545146773e-03,5.125737056662722693e-04,7.315633400348528568e-05,-3.696376903789524906e-03,-4.277791493624359676e-03,2.669785021498519151e-03,2.284096154539430514e-03,-1.318173712873331503e-03,2.246707503996490073e-03,-6.197378231404294946e-03,1.517311152273900223e-03,-7.610593263670781088e-03,4.758359523303511834e-03,-4.347609785349327780e-03,-6.255276643662253423e-03,9.792436188686265679e-04,1.027570308453512951e-03,-3.986850804842923326e-03,1.159222875854444730e-02,-1.078095645964932681e-02,6.945851780741980896e-03,-6.970006702028829111e-04,-1.888286589788483764e-03,2.335041005887824022e-03,-3.977133865959321495e-03,-2.933235197936780809e-05,8.895871918246677987e-03,5.680972536224160839e-03,-2.251039449531167750e-03,-1.181145132961272968e-04,4.260581841220747838e-03,7.426661922448301971e-03,3.875122903198119969e-03,-7.961142071488805372e-03,-9.276048040749294843e-03,-3.188179128603722042e-03,-6.614753661493263720e-04,-7.731486329829051311e-03,-4.266808844852707297e-03,2.791807998331050089e-03,3.126501843590077847e-03,-1.128125486003302060e-03,6.701117237687854999e-03,4.819308414260137056e-03,-5.188588965850880232e-03,3.163864326720969896e-03,1.295775017747545065e-03,-6.171797689616941376e-03,2.227832084837867103e-03,2.470199363079247836e-03,-6.271733604533100484e-03,-2.348190720448966543e-03,-3.373195285744977974e-04,-5.464206869852929367e-03,-4.023933306436751345e-03,-3.006928326078319152e-03,-5.632509946261033892e-03,1.260250176806469290e-03,1.140644688484539004e-04,9.445673792632516830e-03,-9.218058546939340500e-03,2.205659798334099354e-03,1.160507229757192817e-02,-6.330397531143386412e-04,-4.881668812992400429e-03,1.780762576237368718e-04,-7.283303910303307212e-03,3.525301231451045177e-03,2.883771644125918653e-03,9.329041544162412769e-03,-1.074517797813610322e-02,8.103202355934897112e-04,3.180524356429734363e-03,3.271947722616939686e-03,-3.675187671182615461e-03,-2.893632948487347405e-03,-3.994659676620054486e-03,-4.734125737699608483e-03,-6.299007808760589446e-03,-3.330807623872119629e-03,4.380790388580310148e-03,3.998380220845219012e-03,2.857905325897711155e-03,-3.202795358917809570e-04,-4.644196719843058329e-05,3.747367304222370098e-03,-2.537714031326908782e-04,-8.646995041013879127e-03,-1.572425569415533784e-04,1.354538742855899156e-03,9.745911184553539448e-04,-6.553328425172717971e-03,-1.089068791342255246e-02,2.868963655335400945e-03,-7.471936276228023296e-03,-2.971158670737741175e-03,5.915520900561295307e-03,3.086588056575904036e-03,9.280262368431905703e-03,-8.412669825473289772e-03,-2.607907822767096227e-03,1.313896139531843055e-03,-1.730259366191476010e-04,-5.092347923488777309e-03,-4.716503344094831116e-03,6.305340163993366186e-03,3.994639571671005154e-03,-7.537643508652167382e-03,-7.100612328177264079e-03,2.832974856603848440e-03,8.053312463332304568e-04,7.152203817995213438e-03,5.044637552710316136e-03,-1.180198090892845171e-02,-6.105466270407391071e-04,-3.023750337068351057e-03,-3.327307130299398275e-03,-2.478845486790026611e-03,-7.644859926490023966e-03,-7.296087995703207313e-04,8.173213482449656966e-03,2.287606023575465119e-03,6.358448621553235798e-04,-7.491165905102914642e-04,1.289839125787806302e-02,-5.182718619016607291e-03,-3.650299652346443435e-03,3.700604179140797381e-03,1.110475578402241731e-02,-2.483274636270803998e-03,-1.873128204492778342e-03,-1.418150497899865563e-04,6.375974432002316263e-03,6.749405299769519959e-03,-7.563898032382967290e-04,2.457116616850019922e-03,7.155565767511541317e-03,6.252736670438005517e-03,4.893542325098427202e-03,1.036072497368136294e-02,4.094455974953553103e-03,-4.187584112207131011e-03,-1.436825750339654206e-03,-1.524648851680630076e-03,5.351335881745875565e-03,-2.100580792455719430e-03,-7.455515681836681098e-03,4.004148826820164869e-04,3.856962655628336147e-03,2.549045999056987095e-03,-5.776766680311938747e-03,1.223000814746119658e-02,-9.016723604513899801e-03,1.205317692740815252e-03,-1.013311932377809754e-03,-3.104818141468621614e-03,-6.371890630665237761e-03,-1.830668987253739836e-03,-6.577365626928843856e-03,-3.268180124541326340e-03,4.382003996615387205e-03,-6.418493182402035258e-03,-3.351456685585004619e-03,7.880486190380877861e-04,5.679587784482360846e-03,-1.457519620651794067e-03,-2.455266603252691617e-04,2.764845255332067271e-03,3.379089538397433458e-03,1.756752205257100866e-02,-6.546115699797587853e-03,6.608194863647044899e-03,-2.992660930918518772e-03,-3.819318348263226349e-03,-1.531291559982005156e-03,-4.236679149584327703e-03,2.856018210663120185e-04,-1.841764965637956823e-03,-4.080666867702609571e-03,-9.982374655756827767e-03,-3.356153290860137365e-03,6.739631650082109071e-03,-2.837293028566122838e-03,-2.825498934142300043e-03,8.555525659754533579e-03,-9.414509825295309248e-03,-4.453000068320318514e-03,-2.364374139272746363e-03,4.197582505883068991e-03,-3.206461149071668931e-03,1.204495684460874134e-03,-3.905874064180160470e-03,1.532578191266499102e-03,3.964485557127366173e-03,1.620770464762484684e-03,4.056941807661571843e-03,-3.052527800691859361e-03,-3.717194581137337958e-03,-2.205867709559576270e-03,1.942592013474522860e-03,5.465381950708456463e-03,1.678418598972669304e-03,3.899976830935790971e-04,1.881499715047831294e-03,-5.056766241135886514e-04,3.971600798580971793e-03,-1.079520444069951646e-03,1.874739373280865902e-03,5.740520904884872802e-04,2.491589328506057209e-03,2.205331942606434358e-03,-7.821476852969199561e-03,-7.358982851164224195e-03,8.546995938830584394e-03,1.877268016604240848e-03,6.584380869701030731e-03,-9.261358007810742421e-03,1.919112080801088204e-03,-2.690141407333126056e-03,-2.581784145187696663e-03,7.299432603308682321e-03,2.316856254616014420e-03,-1.408904037937801007e-03,2.396804352114611115e-03,-4.973544652990616322e-03,4.530989650461800765e-03,2.061657477279981322e-03,-1.121166108793951455e-02,1.391622851332320740e-03,-5.679473004624248670e-04,-3.875395764583396309e-05,2.504305510733498209e-04,-8.163728274292798848e-03,1.474066359237157604e-03,-4.260983394809600248e-03,-1.113990724518771312e-02,4.036746881379948970e-03,-8.539218612183173624e-04,-2.933197729624088812e-03,1.067844088049255298e-02,3.816105913956604166e-03,3.003159041034725567e-03,8.095283242674879750e-04,9.532345324748654411e-03,8.140456238886397550e-04,3.867792196588744087e-03,8.410385742253145488e-03,3.295790837710194880e-03,-4.044474562322130181e-05,-2.691407538513421616e-03,1.785061793396468212e-03,-4.555608010158381134e-03,-8.116624277810194354e-03,-3.789417276486489305e-03,7.568874485670673558e-03,-1.907802311597582860e-04,7.052182287625197686e-03,2.347441156516021522e-03,4.339201234486956757e-03,-5.700123110772957417e-06,3.157868866812043023e-03,-2.932278461635174065e-03,-1.315101339120314022e-03,8.256002038643941261e-04,-3.683353451824167811e-03,1.376452997385032217e-03,-2.949427311035613406e-04,1.390640460916603774e-03,3.913547530314745101e-03,6.338888367172132964e-04,-8.638067176426960855e-03,-2.257388255089258782e-04,3.588355001405957919e-03,-6.991445646063758259e-04,-5.332590246461597558e-03,-1.316021881234459004e-03,1.936126632307727264e-03,-4.671404797564387458e-03,9.011703446155182708e-04,7.524155811884664785e-04,-2.403766258615084531e-03,9.227134104940238474e-03,9.223712623567839955e-03,5.597869624954699211e-03,-3.462655360850377603e-03,8.637132463273555424e-03,-4.584617827173799974e-03,3.494845598199789512e-03,6.863891597477329767e-04,-1.321140661684821332e-02,8.279727254541694001e-05,5.194443786551572982e-03,3.194490509594525348e-03,4.364491672668341024e-03,-7.452825585574943080e-04,2.892606058535729472e-03,6.768675522466182336e-03,-7.856624084891131003e-04,7.297960471732397973e-03,-3.275574771158633759e-03,-1.688519757432720148e-04,-1.271097424466967676e-02,7.144260148311725248e-03,4.763194726446102166e-03,-4.163574179423214423e-04,1.771304836017397258e-03,-7.625054167041641837e-03,8.204346223781085190e-03,2.110653905947691960e-03,1.485473130019314761e-02,-3.439648630840468416e-03,-6.062577119580631405e-03,-8.542193206036799175e-04,9.534481931399047330e-03,-2.611081892985222131e-03,-4.304604359042856279e-03,1.534434161808361733e-03,-1.164655652471777332e-02,6.047425595488200786e-03,1.215229888514238708e-02,-1.074646132198346128e-02,-4.018277900842905266e-03,-1.755219591806985282e-03,2.320229905432011526e-03,-4.191746914794358854e-03,1.471944014628428230e-03,4.509822876955835343e-03,1.623173330604284290e-03,-5.643268444287395831e-03,3.004166678902635019e-03,3.701796467725553420e-03,-4.484886197007141063e-03,2.568481892897340763e-03,7.603012882348314343e-03,3.722460993006358916e-03,-5.399554304467629913e-03,8.457343448887671804e-04,6.904431989527916140e-03,-4.627749515767089748e-04,-9.646135008019383894e-03,2.011726834189237791e-03,-4.540704544092159789e-03,-1.314371007976267158e-03,-3.844758183555672167e-03,5.620248381084914924e-03,-2.553154228993233417e-03,4.086739266919790781e-03,-1.818973112123821500e-03,1.270661974883378420e-03,4.652900128067028081e-03,4.881370283624417682e-03,5.333898160599898942e-03,-5.617918038225633119e-03,-5.496923057761199383e-03,1.003477322661863562e-02,1.048034839998784017e-02,-2.137989170835628058e-03,-1.384053727374468272e-03,-2.001374282222071876e-03,-4.108550442337192792e-03,-4.482304633301091579e-03,-2.503866581327520834e-03,3.234238948095406394e-03,-7.284666192219999437e-04,3.233994386397816539e-04,4.550704603870948071e-03,-3.261711615990933212e-03,-1.471312337421573006e-04,6.349572600776068158e-03,4.204301221037333967e-03,-1.608493765528964991e-03,-7.207232017803898161e-03,6.950196311252225013e-03,5.643900224283590270e-03,-9.797334812387108530e-03,1.032760474835953948e-03,-2.449329893901838924e-03,-1.051902718171023911e-02,-2.942652090145683914e-03,1.589423846694280407e-03,-6.005906336333312638e-04,2.782048613976821280e-03,-2.258512031831535084e-03,-2.587202056534173428e-03,-7.575462662085205925e-03,5.757430000196341220e-03,7.171965618160840205e-04,3.517723749328657227e-03,-1.435645314926809390e-03,5.311550991549305288e-03,-2.334985191604634861e-04,2.974654607738772688e-03,3.213554590529475990e-03,-1.720549639641310007e-03,-5.908773399577413668e-03 1.735392574460755972e-03,2.012234086913989792e-03,1.241614335318145038e-02,7.809902327626532317e-03,-1.557322275979289425e-03,4.543210645856981394e-03,7.442079201798311450e-03,-7.826520854635730408e-05,-5.601223695330498700e-03,-4.897083256359272344e-03,-6.898045877007601525e-04,-6.170992884202493485e-03,-1.185882990523119337e-03,8.635342251286749066e-03,7.624885305968279008e-03,2.017784372600794733e-03,6.049506874444296120e-03,-5.874651277712139574e-03,-3.883718268673686143e-04,-4.652919506140378025e-03,-2.530755396718238539e-03,4.697681694959825898e-04,1.134835859337055865e-02,-4.965642176684681788e-03,-3.556554295189901134e-03,1.070731303910807874e-03,-3.225240198200370056e-03,6.658925032650789624e-03,-4.413813241800600314e-03,-4.959346961530657939e-03,-5.310775595656299676e-03,-6.426356109120998951e-03,2.637093804093197484e-03,7.915599437686601748e-04,5.036973014473296621e-04,3.107638301078110600e-03,-1.123754950781518649e-03,5.639493583583670734e-04,4.038650102076576324e-04,6.479310170260803997e-03,-4.784920102277598949e-04,3.691690788238434235e-03,-9.678776051202844094e-04,-1.034309586023393970e-02,4.675382144721470547e-03,2.538293202423363287e-03,-9.262966637062903380e-03,1.662272271055895189e-03,-6.112064729899065619e-04,8.817810369898352110e-03,-2.553074775807402490e-03,3.878878247864903615e-04,-9.161018285111429196e-03,9.118739642495826345e-03,-7.054702439637442271e-03,1.327558325645781341e-03,1.029630162997477406e-03,8.182879098294203180e-03,-1.792517569219186091e-03,1.551111002591117841e-03,-6.065434177194253818e-03,5.136445943943796291e-03,-8.601231003596261724e-04,1.659023027801666489e-03,3.264076310866363337e-03,2.026057226461846616e-03,-2.280444693407066101e-03,6.950517224988901448e-03,-2.468446921141193278e-03,3.422425901967441914e-03,-3.477339315815788316e-03,-4.567911446765046853e-03,-5.190827569090165038e-04,4.997782042921655038e-03,5.852629381395281494e-03,1.723456175044730610e-03,2.241540569809537427e-03,2.085105060117464250e-03,3.867505082996592920e-03,-1.102278449678501145e-02,-3.588317049277798144e-03,2.701800829649166936e-03,-3.199229095601904913e-03,-5.549657557997712647e-03,7.326735177085179872e-03,-4.569374900254006937e-03,5.166262548264180174e-03,-1.051362765815585902e-02,1.001677229614052547e-02,-4.175039036144872866e-03,-4.687830251576366979e-03,-3.235265341918280337e-03,-1.048071131731002394e-02,1.103667994388934410e-04,-2.855188119045626912e-03,-1.544590924976615987e-02,2.282275822342131298e-03,-5.806124287510728080e-03,-3.308049923687445080e-03,-7.482256731347966408e-04,9.232784852391343066e-05,7.800126314794108423e-03,-5.197970520810240094e-03,-7.021123953350126047e-03,-5.718947444666285830e-03,-3.774361735172656641e-03,7.823763406919648656e-04,2.184616298754359261e-03,6.236037133324818680e-04,-1.722059857144222854e-04,-3.541317169495974537e-03,-2.824880068672253747e-03,2.814410699329839092e-03,4.133977172300095683e-03,1.006980840003667086e-03,-1.444382074304418001e-03,6.506140782626634173e-03,-2.478583575071915349e-03,3.225790727291798048e-04,1.266310205980247924e-03,-9.745659019857474221e-04,1.082655382575946042e-03,1.262938677028165382e-02,1.539126855156233405e-03,8.587337973260441432e-04,-1.417585886636921286e-03,-2.162993316512403733e-03,6.579491646363360216e-03,2.565528134462230492e-03,4.503255965084854108e-04,-3.793857776348806836e-04,2.514602882495469826e-03,-3.304684877490341680e-03,-5.233578233196505523e-03,-3.423708406892658155e-03,-3.260083056572504051e-03,4.838809388645540568e-03,3.689778962005729312e-03,4.145519193823729055e-03,2.431844196718986433e-03,2.323585910923696256e-03,6.981585506757134740e-03,7.632401353333441094e-03,1.141701533257793305e-02,-1.728511896085755143e-03,2.193250428204427532e-03,2.396468219564872916e-03,-1.037282096135199458e-03,-2.447167388205273141e-03,2.289641425045354194e-03,-5.900471276978416779e-03,5.575840861441852189e-03,5.850159315347363415e-03,-4.719660223920946543e-03,3.627143875541187501e-03,5.787153983238286538e-03,-1.282983629320028291e-03,-1.694120277170066977e-03,-1.550075454104870827e-03,-3.970636830912325534e-03,-3.768573475905413246e-05,-6.738558138391657903e-03,2.770160203378274535e-03,6.792351332545929453e-03,1.969585419972138488e-03,-1.013944942479426230e-03,7.509053156214583712e-03,1.988130490934056707e-03,-4.509519056525186469e-03,-6.861995088403133845e-03,9.289335430768198149e-03,4.944044868391243332e-03,6.760469829750523352e-03,3.044436463378906183e-03,-1.074723076964382651e-03,-4.386615365134068772e-03,6.115379392493673627e-03,-4.077181439772842308e-03,1.080434207194877656e-02,-2.315943532006165245e-03,-5.510719435476756078e-03,-1.255348645549700598e-03,2.650629672109144214e-03,5.126340937675218784e-03,-1.980830109142030313e-03,3.695240588423354122e-03,-2.532414762345436537e-03,-1.024666652348577706e-02,-4.659243541338847425e-03,-3.422755919744601642e-03,9.649275169031823821e-03,-4.393140441761488740e-03,-2.027833837525788024e-03,-2.052292878808570017e-03,6.033389025424959126e-03,-2.960637505218872284e-03,-4.705645681280832583e-03,1.895373465721977586e-03,3.058067043702566989e-03,-3.138861815134541631e-03,-3.365300074637011737e-03,-1.289749692455606327e-02,-8.544069387551712566e-03,-7.306648441336252055e-03,2.437365877610632101e-03,3.342999066335997169e-03,2.412563402838140397e-03,-9.822376834288677055e-03,8.673020676995211661e-03,-2.884040908939180690e-04,2.338009113916285960e-03,5.217199613164417299e-03,-7.440718023146768167e-03,1.024581159323005412e-03,8.285560237722325305e-03,1.635191254778429722e-03,2.132363761059639687e-05,-2.964401953271513998e-03,-7.356835859512577368e-03,-1.094122162734643099e-02,2.078527880434295066e-03,8.711229322975740791e-03,6.636936552752810067e-03,-5.975922990225032011e-03,3.214823340923561850e-03,4.450328335292737941e-03,-7.111537977633105787e-03,1.709370594862596698e-06,-2.783048282661123446e-03,-9.342928924934643109e-04,6.179747634169075734e-04,-6.030601394758895728e-03,7.851177084738725567e-03,-1.787413601610222209e-03,2.765104449447987507e-03,1.109796708055449908e-03,1.488499075173548274e-03,-5.179467185176105451e-03,5.688536412092472239e-03,-7.962120682757088327e-03,-4.776526884412561971e-03,5.159592930104167467e-03,-1.877933533266958619e-03,-2.055871132762385634e-03,3.795915578441564579e-03,-1.742190125577798825e-04,-5.501489466123674413e-03,-3.325203686470963940e-03,-3.770150127069066964e-03,-5.864397849512860590e-03,9.788472141125542994e-03,3.167737759980475583e-03,6.497881508529104733e-03,-4.863166325141763703e-03,2.754193577082020457e-03,6.818948604729294441e-03,4.846247784811564510e-04,3.183376502119937990e-03,-2.517159965197747398e-03,8.540630422254438022e-04,-3.492196317539322890e-03,-7.223584889024284653e-03,3.789389581790630641e-03,-4.178592717701294298e-03,-2.809963470324773008e-03,1.121846976158221467e-04,2.131216476623180173e-03,3.159109204322870477e-03,-3.965300319577307053e-03,-4.515635809134169870e-03,-4.621605756466121498e-03,-8.125662342116415568e-03,7.121366169619584505e-03,1.314225217054817291e-02,1.842905383263295817e-03,-1.154004629510088815e-02,9.591054769625439666e-03,-1.071940917631794381e-02,-1.095214221233320028e-03,-3.761954646696710151e-03,2.564861838185779775e-03,4.173223878020380570e-03,4.940898908141925366e-03,2.672839194330667214e-03,7.512579796849180522e-03,7.014478762861740997e-03,-8.497276677547934212e-03,1.290946044889345560e-04,8.557611465675896839e-03,3.554572035521974451e-03,-5.317676804407587762e-03,-1.952700973840964822e-03,5.041629240681455397e-03,-5.029553261801044360e-03,-2.115737726111262176e-03,-4.785363090015240864e-03,3.008836583968857621e-03,-1.319580509130477896e-03,6.200470837020143761e-03,6.652223638552071106e-03,3.134368822136313678e-03,1.491075362702442642e-03,-6.172504102944743540e-03,9.584670816917265817e-04,-3.019914755821445206e-03,-1.157120557399023489e-02,1.440692071427814112e-03,-3.324915072849440791e-03,-1.098568792297743499e-02,9.065003638585530672e-04,1.645394949330760380e-03,1.647376184992959194e-03,1.492300309601870623e-03,8.217613211975288365e-03,-1.842421735453388413e-03,-3.401901415425792000e-03,-8.518686671785138942e-03,6.480466787940258877e-03,-9.594795083213772391e-04,-1.730814512586197453e-03,-1.751198557453814263e-03,-1.520619730608414443e-03,-2.146432017641158266e-03,4.311326434000758816e-03,3.200698207356032169e-03,3.310701049095306474e-03,3.820341822060559049e-03,5.458536373156253237e-03,3.150039699051347935e-03,-2.955738605160627627e-03,-3.980944098984824826e-03,-7.715164216052535070e-05,-1.155732852342174037e-02,-3.324743448297449305e-03,1.576417205701579997e-03,4.671785992122872051e-03,9.154210720191419820e-04,4.966223634909186285e-04,4.098569881844668292e-03,-6.498850560158300413e-03,-1.159930926156099934e-03,-2.504236767731041354e-04,3.614012142839235879e-03,3.517387449443510911e-03,2.329382341788369809e-03,7.293846993185602867e-03,4.433484276999847228e-03,1.053611311853672704e-02,1.011877709583148318e-03,4.481827815819406681e-03,1.528541089111206709e-03,9.894733953919794892e-03,-2.056099582445386226e-03,-1.238011608368644323e-03,-4.468652551291341611e-03,-4.966938132803331435e-03,4.070504273094103717e-03,-4.990878803892856170e-04,-2.986346045610383491e-03,-4.641477150644285864e-03,-4.664468149348229703e-03,-6.904215200583603551e-03,2.441919067591267947e-03,-7.107494115183141028e-04,1.732088144380388404e-03,2.915192309894756167e-03,-6.312280664499714776e-03,1.191520188625351398e-03,-5.423040255092269855e-03,1.614792709551493609e-03,-1.282153994400221956e-03,4.984415869165226623e-03,-2.250999469940176672e-03,3.290411599581120894e-03,9.894879015109995807e-04,-6.714002166540808218e-03,3.455391036536401440e-03,-8.399969535186836006e-03,-7.569128414830876735e-03,-4.833692019111481171e-03,-7.734033020102778350e-03,7.108047672097943422e-03,1.766215528341192427e-03,-6.043857032211111084e-03,5.520166904671819190e-03,1.594830953218375532e-03,6.576166473656967423e-03,-1.009478544965469304e-03,-7.657640191274733311e-03,-4.003162687942519136e-03,-2.990141113473455004e-03,9.746790774630906262e-03,-5.671072039911243673e-03,1.375443876294439854e-03,4.234429771980086250e-03,1.924526594044840633e-03,-1.561546827644206235e-03,-3.290556613388108138e-03,-2.348823221839286841e-04,-4.832684401802576739e-03 3.939084700022896343e-04,3.086122538590602409e-03,-1.654926186316734099e-03,-4.219636054967877171e-03,8.018617319188691556e-03,1.105091503392231257e-02,-8.744366353566387917e-03,4.105793689197321551e-03,-9.572834604548692908e-03,1.460473149374507231e-03,2.467359870182787287e-04,-5.866955413570344290e-04,-8.596444185468769643e-04,-1.110280206082161700e-03,5.186658932845410533e-03,-7.583909567433535712e-03,-6.448890091864358659e-03,-1.876555659428995370e-03,-2.683027528493462256e-03,-1.061643460286677138e-02,-8.491195074359509862e-03,-3.929085258364824022e-03,-3.236433756254684241e-04,-4.248252224941468334e-03,2.381149699795330130e-03,-2.896006392446456856e-03,-1.549578779585545013e-03,1.707003901111393959e-03,-2.875820373100054176e-03,5.272854685069001957e-03,-4.195630189580664207e-03,-2.368704193017500542e-03,-1.035957596149978234e-03,-9.837295361032241670e-03,-5.617785318979786301e-03,1.557243667631940873e-03,-3.259179970901080739e-04,-4.310395121532973640e-03,-2.872874938998745166e-03,9.424235648888148282e-04,-9.867428414170376083e-03,2.968533319547401322e-03,-3.896446513096118135e-03,-3.713793436976763065e-06,-1.864704483701873708e-03,-1.830542210329344759e-03,-2.904022344790230248e-03,2.943022639741176266e-03,-9.075865049133557483e-05,-3.706994414821324340e-03,-2.657965823535263401e-03,-3.040477647192647761e-03,-5.175969549209343684e-03,2.539970920378286506e-03,7.281534372825328866e-03,-5.919976652257850566e-03,2.801803363933224223e-04,1.637299210123874943e-03,-1.909658925861905648e-03,-4.390512617298701799e-04,6.811591787904279667e-03,-8.057041889406450427e-03,2.939009555619679845e-03,9.894325046718558964e-04,1.041845825407263166e-03,8.647002912197622113e-03,-2.607196644361677289e-03,2.472596801919551245e-03,-6.018726297893976481e-04,-3.641389109072122512e-03,6.589140750822622232e-03,5.735200629825286736e-03,2.670834954273079362e-03,-3.919859753526406566e-03,5.702055455986891185e-04,8.873272780786480365e-04,-1.274414011142434820e-03,4.173672395857129860e-03,4.604624147465909145e-03,-3.042964528516421598e-03,5.109223882130403856e-03,-2.599492617255691018e-04,-3.372987995858916632e-03,4.990526112530361885e-04,2.694526995459475740e-03,1.801895002326527615e-03,1.037503718763068024e-02,-8.452469761617552660e-03,4.235355289043918683e-03,-1.053375131168655769e-02,-8.245488893340868763e-04,-3.635099279266958659e-03,4.160054873448839133e-03,-4.545905893911648954e-03,-6.101481734344718459e-03,1.092910483874342392e-02,-2.376855589709511891e-03,1.207195224977476445e-04,-8.693279082197560544e-04,-4.985852000336283771e-03,9.223283997313660998e-04,6.122195270071051658e-04,-2.428382585040384390e-03,9.892432879499385261e-04,-9.971820478082446190e-03,-9.291735337797084326e-03,3.172037161618416947e-03,-6.022318192071520609e-03,4.020182420949681111e-03,-8.053749848249884200e-03,-9.006418715708720130e-03,-2.080734243092356023e-03,-1.217131686999362115e-03,2.120753837511828676e-04,-2.028820974992293254e-03,1.890167967428562712e-03,-3.637578747732535429e-03,7.492593432404330740e-03,1.285429796967404482e-02,1.254201259683342118e-02,-5.343855574350717813e-03,-1.037346941878145544e-03,-9.558308057256711260e-03,8.205168837695453465e-03,2.165886148839373231e-03,-6.347614809427309154e-04,1.398893817398975991e-03,-1.708311055927727411e-03,6.185373226647314718e-03,6.438970672827164232e-03,-4.346904283115415185e-03,4.814397615586780976e-03,7.150106625568495265e-04,1.078528012833867081e-02,8.230313714808164868e-04,5.622047900950440796e-03,-4.555513021633036258e-03,-2.892401858085363788e-03,-5.649499629625927234e-04,3.664406482484589000e-03,-4.543481595162953898e-03,-9.567018469792723870e-03,-3.271321373676297148e-03,-5.550958120051007263e-03,9.093955858642531779e-04,1.126300722177320116e-03,-1.007229253579529013e-03,2.350434621563658819e-03,-4.067517068521483287e-03,-4.880246753274737267e-03,8.406523292199133759e-03,-8.852469025134090952e-03,-5.522845777302778751e-03,5.368022002122329293e-03,-2.967953590956848623e-04,1.757202749440188536e-03,8.189303045154261071e-05,-5.820154647056069441e-03,-6.653323077557646119e-03,7.008289242119151996e-03,-3.641845377144460518e-03,2.686680849514207995e-03,-4.056698877038916393e-03,-3.276372787465712481e-03,4.821784778112570002e-03,-2.962403069803234978e-03,5.687703632184872710e-03,-5.597685205113091256e-03,-1.989874756595372274e-03,-2.892707982542622788e-03,-6.673778365928212213e-03,-1.295280919698726029e-03,3.184023526388658158e-03,-2.379960508508498739e-03,-6.776059576859136922e-03,2.569784683692128389e-03,-1.873721837952153696e-03,4.228974524150944147e-05,-1.966022907008316105e-03,-1.110985643172218273e-03,-1.935679066325508716e-03,-1.023736207052474041e-04,-1.340856348404624023e-03,-6.645595548203484144e-04,3.668972563849245451e-03,-7.076657129278744046e-03,-4.083719746362441446e-04,5.984388928537532661e-03,2.846662455792348112e-03,-1.497058076533201764e-04,1.621012174677956795e-03,-1.106554905879023921e-02,-8.211802086690265026e-03,-4.353073264946879378e-03,4.441417332700710210e-04,2.238290535667624391e-03,-5.219946816121560570e-03,-9.110163468207454693e-03,-2.422980654482975987e-03,9.081811994004168435e-04,-5.015903371474754978e-03,-2.400616494575059674e-03,-2.576957011024354552e-03,4.136469565361336129e-03,8.802772232566691757e-04,-3.561229976739713806e-03,-8.802746281848765212e-04,5.162705027714302713e-04,4.440839132879508856e-03,-7.847444839098741739e-04,-4.724395746318495876e-03,6.117806089313000510e-04,-1.054823842351772234e-02,7.036384773586211927e-04,4.650410933119988183e-03,-2.376333109837441664e-03,-3.500754851302382772e-03,-4.238204993969864505e-03,-1.013071466019703016e-03,3.642753682873154601e-03,-7.282779048253025132e-04,-1.414396934240329598e-02,8.241418398452406840e-05,-5.673705157472558672e-03,7.533809908922889896e-03,4.577148493205939264e-03,-5.497930695624413805e-03,-8.557606239733854449e-04,8.753950599255244528e-04,-6.828592560338488798e-03,9.886160677807571961e-03,-7.337470619574113818e-03,2.240712909741386562e-03,3.369159411861563126e-03,8.516006600119403833e-04,4.305806905368805663e-04,1.264759064792619035e-03,-7.341716116326233235e-03,2.149626294408258590e-03,-4.794167621282139670e-03,-5.723960477777419349e-03,3.543157970420603153e-03,6.393744742229011575e-03,-1.816399283016840007e-03,-3.734208490879748019e-03,1.518794329353662358e-03,-3.069224543042266273e-03,7.452083894734586739e-03,3.514815789124120450e-04,1.236099270074246222e-03,5.883369767623020108e-03,7.268590064789282349e-04,3.076644766578432149e-03,-8.894379795957201625e-04,2.186999100121587904e-03,-1.705745382042240773e-03,2.171026523812634706e-03,8.721656669210656881e-03,1.327675852888023393e-03,5.625705985460091726e-04,1.159158225113227298e-03,4.195175266427404325e-04,-5.132950535061016166e-03,1.574962528126831176e-03,-1.756269101293726417e-03,2.289723668427332600e-03,-4.242186653926004712e-03,-3.070046485722166801e-03,6.100377009828972415e-03,4.699834220393251348e-03,2.714253939031982619e-03,4.016442890847368372e-03,-5.031872452184425921e-03,7.009520611613149002e-03,-1.136163415686241868e-03,-7.177612286744136129e-03,-2.914772923528246218e-04,1.590768341359562795e-03,-1.016940226969219319e-02,-3.379764376124517870e-04,-2.677018370456812554e-03,5.585976317480402520e-03,6.634016093110440757e-03,-1.700068664971653154e-03,6.120196285538797958e-03,-1.220832203362273022e-02,-2.054922297679678475e-03,5.431365905526653291e-03,-3.156262387035703681e-04,1.071750129761847234e-02,4.095549920105425747e-04,2.764927711364025768e-03,-1.093707932776877799e-02,-2.377624171642009256e-03,-1.387312695169851074e-03,1.004411452495904307e-02,-4.137147422440027277e-03,-5.503545180508466125e-03,-4.309592229534055935e-03,1.043967932395720687e-02,-2.986834145223965509e-03,-2.631052393231112212e-03,-8.169796784929686355e-04,-1.992705419380280934e-03,5.703051445076451169e-03,-2.620937280643052033e-03,1.703375771231666449e-03,4.676159892132315116e-03,6.997320692750963814e-03,-1.095934202516087377e-02,1.960181808009403386e-03,-3.350224556967788033e-03,3.374626838142742637e-03,-3.002473239703868594e-03,-2.913468011783109865e-03,-2.002261693954488463e-03,5.991621670153546297e-03,-7.243809510838227547e-04,-1.186502015727715440e-03,1.567332121336753852e-03,-1.600514208823340927e-03,-2.813331763546968352e-03,-8.355959246389184705e-03,5.328273145600498362e-03,-9.202984861702517925e-04,-2.565539881399635496e-03,7.156198994565690120e-03,4.896550431797179991e-03,4.603942257500080043e-03,6.989315795881727994e-03,8.045019267401908383e-03,8.557272010105589207e-05,4.830335570322033360e-03,-1.532101414919249472e-03,-3.279488029026196919e-03,-1.799532987642698265e-03,1.781078429780983546e-03,7.877592599699011905e-03,-1.469540588083914764e-03,-3.264057216668858098e-03,2.357232531980561936e-03,1.013284732712624011e-03,1.695824362388671010e-03,-2.107402270790156764e-03,-2.316949777988610171e-03,-2.515617909858971091e-03,1.698684751414280625e-03,-1.923015523075604456e-03,2.000415210106869148e-03,2.943371394092032627e-03,-2.049862330856941188e-03,-6.387197848760296261e-03,-4.556244187158929072e-03,9.144270191446386654e-04,-1.587474698291250303e-03,-9.462887187048315760e-03,1.613738538039357837e-03,4.503460929334503378e-03,-5.040991921619122039e-03,1.275214429234805026e-03,-4.562951341834582328e-03,1.414621726913648111e-04,9.159557618769319665e-04,3.217875418881065500e-03,-1.002939896606710665e-02,1.045914418348241548e-04,1.130361064560493102e-02,-1.033548423604352222e-02,4.217830450125201538e-03,-1.349417936779643765e-03,9.851373430330135315e-04,1.491559084718432557e-02,6.841791632484196806e-03,4.648420928509536081e-03,-4.888074070973768594e-03,3.291993613430929319e-03,-3.952035807993425215e-03,-6.325867330792498203e-03,9.848239745558876740e-03,4.888725336991666358e-03,1.377498285658296717e-03,1.721142680788151705e-03,-3.469413128234922431e-03,1.758377257128449162e-03,-2.736527496300367440e-03,-6.512687953518733232e-03,4.147314458962431463e-03,-1.513217954759032437e-04,1.293045778429798183e-02,2.763363093154530981e-03,4.472548232052324206e-03,1.823112167399702603e-03,9.090058677190005279e-03,2.303303834139654085e-03,1.590791590458581154e-03,-5.910979930814356381e-03,-4.685316533671842962e-03,-2.531865082531555705e-04,-2.082780837795200190e-03,8.162578094643825957e-03 -2.520627622714628562e-04,2.318891836378042786e-03,2.341503213481561842e-03,1.707172942081549129e-03,-5.751496299029232838e-03,5.539818917416739104e-03,5.239263411040240119e-03,7.273607688113659445e-03,2.276447883000220270e-03,4.734389102364527750e-03,1.972554050494215640e-03,-4.489329440421839859e-03,6.756456811751704143e-03,-2.205640997798324465e-03,2.601877138532782303e-03,-4.166660767575732990e-03,-2.224029827080803781e-03,1.119389240994803254e-03,2.119009503720753734e-03,-1.859375116988971595e-03,-1.324853000428155201e-04,-3.282794307497820079e-03,7.046965010616256658e-03,1.675198837330089518e-03,-5.810954129230362641e-03,-6.528861406603005992e-03,-3.800510580846090843e-03,1.316623055979494967e-03,-4.612461375146236116e-03,-1.103327044152667500e-03,-2.817630295496966093e-03,-3.630201368081661939e-03,-3.409495626365111488e-03,-1.141214366712070636e-03,9.872070262421071135e-03,-5.447082629101525006e-04,1.311116553370999822e-03,1.073334155473039571e-02,4.257073840511480835e-03,-1.411156535022722684e-04,1.053578171201790701e-03,-8.655624354196197329e-03,-2.753357856276403195e-03,-3.882735881432896295e-03,9.254148175631969923e-05,-5.266227123387229314e-03,-4.107581749888958127e-03,8.188186857635133126e-03,-1.062383019050883995e-03,-2.928675754884406662e-03,5.097270911475781669e-04,2.483620620366317096e-03,-1.804810720137180798e-03,8.451894553735969998e-03,-4.885676777491773475e-03,-3.194733400630561226e-03,-1.932822114679004196e-03,1.625042827149490547e-03,-1.006257114074098215e-04,2.896624242326512576e-03,-1.882327899850498847e-03,8.963339220635838447e-03,-1.128905895936410664e-02,3.374708232801398519e-03,3.419207714877508381e-03,-8.880720419039684829e-03,-4.819403137421095218e-03,4.378156438364446540e-03,-5.574327447584674633e-03,-4.483225979277283800e-03,-3.557728614240305339e-03,-2.207750874259069296e-03,-1.509448375624200837e-03,1.957990354402688784e-03,9.217439684288895527e-03,-2.643848440959558656e-03,2.151053184487108742e-03,9.121491763392321813e-05,4.036957745493866163e-03,-3.406272624978311410e-03,2.971484880440116591e-05,7.430601545903683155e-04,4.068885150892511543e-03,-6.541871220623556200e-03,1.968506031028002291e-03,-2.341976984166114443e-03,4.651505058260244074e-04,-1.108416533668369350e-03,1.639207604234015417e-03,-6.043827697862028461e-03,-2.391214606887657763e-03,-8.936685955336987129e-05,-2.469525022416968647e-03,7.080086495460284970e-03,7.808175312566241434e-04,-1.029022927789425418e-02,3.535245761830083187e-03,-8.891112279759581447e-04,-2.132918004275243060e-03,-8.437018897036074883e-03,1.259146487099868737e-03,6.778326476972175160e-06,8.041015270322643058e-03,-5.679497066153663641e-03,3.701217650070455934e-03,5.607339156587330155e-03,-1.301593038107974002e-03,6.311368210281961908e-04,-3.313551057787502080e-04,7.382872441642900378e-04,-7.228638691938463895e-03,6.793822556221398509e-04,-2.512859677340663258e-03,4.692803974683302549e-03,2.424070567885785229e-03,6.318880582120683063e-03,-6.475564301473993832e-03,-5.186397707016744338e-03,4.857005905566581967e-04,1.831527294736642105e-03,-6.726617808207447854e-03,5.638943251247220505e-03,6.333731025830639273e-03,8.589227604160901489e-03,1.000251020598353279e-03,5.562871782229588009e-03,-7.233570101439576287e-03,-1.045304115838610126e-03,-1.011214097958635511e-02,4.572929258841398718e-03,-2.269016329024512932e-03,4.062186467410928287e-03,-5.362014263222013646e-03,3.205231561222154479e-03,-1.327112478966182337e-03,-6.960084686103518679e-03,1.551951787047933101e-03,-3.288645658632005035e-03,4.134186428461817005e-03,2.423477913619102950e-03,-5.537579131839905251e-03,-3.036357511770601102e-03,-2.881638331923880910e-03,1.566901071700846967e-03,-8.546273783410936447e-04,-5.933490715980042863e-04,-3.860301877877270454e-03,-7.176751518685638154e-03,-2.014619148270992317e-03,2.403072218102316176e-03,6.176806703790729651e-03,-1.775755874054113330e-03,-1.022132243743724238e-02,6.274846109411714654e-03,-3.239173333948689587e-03,-7.051437636590736748e-04,-4.318251401304993090e-03,-2.602141798800406444e-04,1.017436957355912245e-03,-9.950143194212078546e-03,5.088170525146535131e-03,-2.828510389505969804e-03,-1.253177488802150949e-02,9.902686751478459275e-04,3.450965089029971178e-03,-7.403534459754695929e-03,1.015805884653332410e-03,-8.388159779697088210e-04,-5.121283638864093483e-03,4.435259233433444873e-03,1.890596750308041422e-04,6.717113318630918775e-03,-2.871473874623768196e-03,-1.876286772560573904e-03,3.535072408533823738e-03,6.147894037393625624e-03,2.503900602261795866e-03,3.438650706914471097e-03,-3.067766759031795298e-03,-1.216312711404914787e-03,-8.671366200567537852e-04,-8.288322784677438354e-03,4.222495613095935069e-04,1.999591735490274368e-03,-3.420301488402410686e-03,-5.650418440646719567e-03,7.451777692514135153e-04,-3.692378155156279709e-04,-2.951227055663549568e-03,-2.298405759907851814e-03,3.054917449217571437e-03,4.737466404481765862e-03,-1.112260725119825801e-02,3.872555904361104089e-04,5.814385125934173502e-04,8.826648459119073795e-03,-1.529161293245441620e-03,-3.310345021900408698e-03,-1.005687396251979220e-02,-5.227372961277989595e-03,1.115799304645965530e-02,1.095706940454520333e-04,3.923356727551157580e-03,-1.313789909562617926e-04,-1.090353489101606940e-03,3.579072251229292127e-03,7.079477906907348950e-04,1.293465227378185227e-03,-5.008241458144467540e-04,-1.609613644395389669e-03,-1.908803301556906114e-03,-2.769367004891873808e-03,-2.462339175195495935e-04,4.817132197383949474e-04,-3.959319582891527786e-03,9.450944420354377359e-03,2.395917029579900607e-03,-6.711117843994793275e-03,6.143469193624620170e-04,-4.801282387474505205e-03,3.515138690902475539e-03,-2.507792320853644008e-03,2.794893215726148433e-03,-8.581570057915408750e-03,2.520008360274830165e-03,-3.126638960977714665e-03,6.538761064788397442e-03,5.566353113159413710e-03,-1.609427071915900561e-04,-3.286358898201103412e-03,-1.549369549792025368e-03,4.856076255584626023e-04,-1.063992134839483565e-02,-5.061367112110487336e-03,3.237748315223194697e-03,8.543624337373784822e-03,5.342078773154691415e-03,-8.045889673580880171e-04,-2.420244846373122410e-03,5.410081986350350099e-03,-5.591216743322251388e-04,1.197325551833840077e-03,-4.204266211000371595e-03,-1.091257231341548773e-03,2.204098589105954486e-04,-8.023390608518721559e-03,1.940075900582957473e-03,2.105988139293518604e-03,6.844013766315663189e-03,4.572313099540296737e-03,-2.564239058670741500e-04,-7.820520674698094562e-04,-1.096168521135513184e-02,-3.317465516737234943e-03,-1.548956574676840824e-03,5.997443159547315335e-03,1.919085255003907808e-03,-6.464941020176357203e-03,3.072333480663803906e-03,-2.283129187794061179e-03,-1.059081016752864415e-03,6.760349979866510546e-04,2.696184268417744969e-03,1.162854166197549964e-02,6.066933472796244044e-03,1.341544275400640947e-03,-3.576322493977431710e-03,4.081478214034856174e-03,5.042734256355588772e-03,1.955569359344488511e-03,1.548039268458646775e-03,3.161358749977730056e-03,-7.276549523867047176e-03,6.051496244481114999e-03,-4.539217766763871918e-03,-5.512765094886271783e-03,-6.822226206652528234e-04,8.461522957250614679e-03,-1.217447297275494993e-03,-1.968563405390599125e-03,-2.934785859601046244e-03,-2.744842066905088074e-04,-5.754737299024548398e-03,8.002557623511678472e-03,1.185874617937709985e-02,-9.053017561174766387e-04,-1.491658889483307411e-03,1.920018888802633170e-03,-4.718297675322067133e-03,-1.128667848091786420e-02,-6.557171956391140422e-03,7.237740388928400222e-03,-1.632298232971255477e-03,6.143052836549438636e-03,3.329559816004251747e-03,-4.764922568778085822e-03,-5.600821406930902788e-03,2.653331711898786541e-03,6.120470035690312501e-04,-6.725840593742447859e-03,2.818009038375600666e-03,3.827648289290486432e-03,-1.901261675576849247e-03,-3.383081158198527651e-03,-3.482030497473325662e-03,3.649718612241439550e-03,-3.229772542634017259e-03,5.364459339887153386e-03,-1.468495079330216224e-03,4.148063293430066280e-03,5.368914798003804334e-03,3.176184942757194658e-03,-3.784423854913006140e-03,8.951488644568860592e-03,-7.409352070805298382e-03,-6.243203472796866882e-03,-5.822709108702239615e-03,6.240992908195775422e-04,1.429528551263866028e-02,-4.295957995067979825e-03,3.203783150785180760e-03,-6.069372366304301956e-03,-2.245740911896599711e-04,3.526378960542946549e-03,2.107775683168602518e-03,4.342192085151540260e-03,3.309571121778574938e-03,6.991894546958786494e-05,1.292880806967404731e-03,-2.339864948308342969e-03,-2.459902364475812300e-03,-1.506444072332144410e-03,-4.264589190706808905e-03,-8.218548043194263270e-03,4.832306196045257907e-03,-1.417468003163998864e-03,-7.329944564634833810e-04,-7.898911025764993000e-04,3.858425330721351743e-03,-4.724439272833427382e-04,-1.086862223588600483e-02,1.732184381896534038e-03,-2.697950929025543301e-03,-1.168860733285261369e-03,8.839639022786597092e-03,-4.766617426747105438e-03,3.162662723463393038e-03,2.115981093557330896e-04,-1.932048174176703779e-04,1.798570451383263219e-03,7.896748219712795142e-03,-1.017747171244997098e-02,-4.246607201389556406e-03,1.469717375512289391e-04,-7.774045332781708700e-03,-2.925447350328563758e-03,4.093347664335815310e-03,-3.401789760958195442e-03,4.801723954860045128e-03,2.811217142336084295e-03,-4.425123362733310826e-03,1.545876657058887832e-03,-8.451594312835474312e-03,6.244563497387747159e-03,6.465893425809731559e-03,-5.054244288460599862e-04,7.925392119035562510e-03,2.104255287020409364e-03,8.528098154057545816e-03,4.146302892511635925e-03,4.400693264143209701e-03,-1.702592839176853392e-03,-5.433558114095182745e-03,-6.043478869926327719e-03,1.730977390047282313e-03,7.035331911450294282e-03,-3.411738399635845347e-03,-5.850994290730467898e-04,-2.167980040760754921e-03,-9.325395100281515942e-03,-2.263182965851599088e-03,-1.001921457938686338e-03,-5.545445937116307705e-03,1.060224673616361359e-04,-1.066584853281105040e-03,3.926528327050267386e-03,-1.665646040879788280e-03,-7.434155102440840115e-03,-9.436251817728909209e-04,1.299082725877529539e-03,-8.245183249350493491e-04,-5.109561268059975569e-03,2.633649301646734560e-03,-4.044216128316554214e-03,-5.322974860950418878e-03,4.143205250023543926e-04,-6.630532682511654577e-04,2.250130078080856484e-04,4.136410041499728220e-03,2.902181688398405759e-03 3.591588112127206550e-03,-5.177324711294459605e-03,-4.658431424080174826e-03,-8.750778726913530309e-04,2.975689844251661748e-03,-4.802637926138155974e-03,-2.285603865956406175e-03,5.414887186313201151e-03,7.126505575170622060e-03,2.812538447745492869e-03,-7.839528723272014450e-04,7.898867130160153877e-03,-5.434569619875322284e-03,2.435642663590077001e-03,-3.166366845760298053e-03,3.643675262395746923e-03,1.200398678314633502e-03,5.102439010662281234e-03,-4.689222069549787639e-03,4.232605495714226139e-03,5.218235594004918924e-04,4.363517176717164256e-03,5.838466330382309673e-03,-4.284615547048692234e-03,-7.953126467844955083e-03,-3.558888246381959608e-03,-7.642075812358596100e-03,-7.634155508271391229e-03,3.490328604392072855e-04,-5.981296952896999683e-03,-1.038976606137419559e-02,5.189448242090403431e-03,4.726060915941921944e-03,3.933515931925545689e-03,-1.214850416682209744e-03,-1.362696264415284636e-03,2.419939958178903266e-03,4.584113623198221230e-03,-1.390639101839593640e-02,2.467154715888550420e-04,-6.296718863227468432e-03,3.805996444409973306e-04,-3.547979510473487849e-03,-7.759169689466985029e-03,7.151668541675160369e-03,2.349013182350637684e-03,-5.503407484874774103e-03,1.617866395738429578e-03,-4.430826014809839675e-03,4.073947015813254857e-03,-1.085793966655181048e-04,4.353902138524812053e-03,2.384070362273104596e-03,2.128469710041948435e-03,-1.953116884648742682e-03,5.133091050635774436e-03,3.779647447827791471e-03,-1.055433442926047337e-03,-4.525187817596035104e-03,-2.851974031332010079e-03,2.000321006490073561e-04,6.058734263204892641e-04,1.265369766375080062e-03,8.605483189444148445e-03,7.039660568910019890e-04,4.553710267397171447e-04,-3.470781686341388277e-03,-7.856286720969935314e-03,5.726832008684605829e-04,-3.973273380603064078e-03,1.015174730214009466e-03,1.058622088238585007e-03,5.296494749150907656e-03,-1.814078018504537530e-03,-2.659545228356275434e-04,4.215281804942945174e-03,3.068802582578305724e-03,-1.699730651726845442e-03,3.317193350336055811e-04,-4.085980961242317855e-03,3.883957139379572068e-03,-8.579780956958396598e-03,1.028391183251798485e-02,2.212883084796123372e-03,2.593159658152061325e-03,-7.844869368944663857e-03,-2.569388667426873891e-03,4.693591667284956886e-03,-5.704244335926441853e-03,3.068638101798443207e-03,-6.978339245596511080e-03,-8.753656502674154322e-03,-6.023287450016802033e-03,-2.214662012307506631e-03,4.600482122718501515e-03,-4.074881744358784495e-03,4.586639443919141891e-03,9.035567500118323048e-03,2.045446646427296917e-03,-3.612351472777869995e-03,-2.539753025367724910e-03,-5.067722264383092007e-03,-4.194555684238532897e-04,2.686974658152866801e-03,2.913054525854759901e-03,-4.760986961455645171e-03,-1.440792605324680122e-03,-3.416621782302409378e-03,4.303760688850515048e-03,-9.113868038629615795e-03,6.027249011692794450e-03,-9.108154698477291880e-03,-8.984967848288087514e-04,-2.841537348873756589e-03,-6.681343631125468037e-03,-4.317017540993809029e-03,-1.649864310952555389e-03,-5.545071343830620058e-05,5.130196875377423171e-04,-3.262200071132395802e-03,1.055431358236788011e-03,2.645053156828280905e-03,-1.014929067586273735e-03,-4.878396205203348358e-03,-7.064182032124386931e-03,3.833803198912402819e-03,-9.766480905927977726e-03,5.851794601281763535e-03,4.481588099394119562e-03,1.519449261026757907e-03,1.244159019157434426e-03,-1.469223341448583527e-02,-1.266026934875664686e-03,5.472458704367978363e-03,9.002452848512336112e-03,3.936061921551360643e-03,-7.812755748527329749e-03,-1.838430722522112724e-03,-3.363317920270601693e-03,-8.269837188586304927e-03,-5.043802999636776141e-03,4.084736184755575691e-03,-1.100578576704181431e-03,-4.467471669244354147e-03,1.352689391686445666e-04,1.706382422054609571e-03,-3.072994237614969266e-03,-1.647259370854333663e-03,9.979625434820426841e-03,1.766709335462681709e-03,5.019357060679627888e-04,1.958108718334239330e-04,-2.814607571473640331e-03,-1.055421681445192338e-03,4.271519368022962462e-03,1.238961636864710557e-04,-2.253888943480423027e-03,5.231694466576418401e-03,-1.382966784814059023e-03,3.456416448910245263e-03,-4.182856496612789941e-04,-3.645838293122086269e-03,1.892791853710360656e-03,1.178993717973458389e-03,-3.185520837419721519e-03,-5.022505574042116817e-03,-3.960825727639496682e-03,-9.020881374956588039e-03,-4.479042285199486342e-03,1.367178034101112702e-03,-2.371500404715099885e-03,-1.566309500547952149e-03,-1.868964633035229139e-03,-4.719582353204004183e-03,-3.363603542003064557e-03,3.779385573253123912e-03,4.508497073238081597e-03,-1.003281624000943416e-02,8.351019726259942266e-03,-4.443005490033367054e-03,-3.366041276567481762e-03,-6.439545711909412141e-03,4.129204955437103383e-04,-3.520689881266133740e-05,1.010144720460604949e-03,7.396505282883689830e-03,5.684209112332506107e-03,1.714201016345725387e-03,-2.871003153584488453e-03,-1.078241438015682934e-03,5.811667830754423114e-03,-3.653673983054590499e-03,4.672952505451760490e-03,1.445689763066656684e-04,8.525534720547437231e-03,1.484610853120262031e-02,-2.892396585870023705e-03,2.464251212314573676e-03,3.847231024391500540e-03,2.106125823856691559e-03,1.218251851623761801e-03,9.668024257838269287e-04,1.117943286608455867e-03,1.332272514212712788e-03,1.409400830732855658e-03,1.227908914546390869e-03,-2.003730416787662116e-03,-3.113754857984592055e-03,4.394578909988928507e-03,-3.266582714159429589e-03,-9.131045264571658812e-03,1.842967292519360773e-03,-5.334915931334433155e-03,-4.959164312579742752e-03,3.158059270406356547e-03,-6.635496705691624119e-04,-2.236429578888123049e-03,-5.384625989034415652e-04,4.679935239037010322e-03,5.096093714899579963e-03,7.125043572739859024e-05,3.551411252210415724e-03,-2.988569325461537009e-03,5.770000363742226399e-03,4.466148942266436397e-03,4.555208831740562535e-03,-1.478146984362573935e-03,2.164486635781393432e-03,1.292510095618811666e-02,5.736258184757917237e-04,-3.176660420559924980e-03,-3.865616222289145962e-03,-1.575967518411658603e-03,4.696002089561656170e-03,7.405831801403773076e-03,8.804261871708773599e-03,-1.371147579661072258e-03,2.958641440699562265e-04,7.028492938371069115e-03,-3.795276527591997639e-03,5.999648459328196792e-03,-3.770861717354064956e-03,1.101644216497656538e-03,-2.332833319014038145e-03,7.791442806891471159e-04,-9.299633765891670905e-04,-5.711392258296759013e-03,4.555016995088262931e-03,6.344033618599236729e-03,3.174592422646086917e-03,1.568543674628854233e-03,3.572115352710781427e-03,3.124833316845470796e-04,2.849831473342064915e-03,-1.233593545640416546e-02,3.565728827939331890e-03,-6.923081848753710743e-04,-5.650983102442767858e-03,1.004631981625866283e-03,1.375007906585892000e-04,4.804314177075485852e-03,-5.495867702306113205e-03,-6.808175245092216131e-03,-9.127363932766871871e-03,3.206020526585287599e-04,1.469680020289126617e-03,4.346116798917197717e-03,3.562242385077655496e-04,2.429888794544429655e-03,-6.019399856574739020e-03,-7.321053260233656781e-04,5.729182279849179618e-03,7.066233671555686166e-04,3.155510885744117634e-03,-1.172692546892252973e-03,-2.035589219451174221e-03,1.526543845567618091e-03,-1.329432650406849438e-03,-7.081687418910654133e-03,1.969484526701095143e-03,8.559051952899984425e-03,8.831148297453514763e-04,-4.352996690723831032e-03,1.432453597706983498e-03,9.949471782890491681e-03,3.732398905808787404e-03,-1.428546629998094972e-04,-2.692852176401591604e-03,-1.778626732833543797e-03,7.088568757023839834e-04,1.268187850917622218e-03,5.174253302977991839e-03,-4.141572748425571132e-03,4.256450032044807943e-03,3.257606535264459188e-03,6.073249799617966641e-03,7.861383260551453242e-03,-4.047909733149552061e-03,2.322458116531848308e-03,3.061241716211329962e-03,3.479778915811253433e-03,5.001361587887488865e-03,-4.908069096687375701e-04,2.070447481070045344e-03,1.223924454602253777e-03,-2.892617256770152554e-03,2.120272749859224929e-03,4.570612718580959431e-03,-1.961095254692279792e-03,-5.355651816539030302e-03,-3.218368471857754355e-03,4.318599953807483738e-03,4.650804264040230354e-03,-2.767670346750020476e-03,-8.242672017537531690e-03,-2.329597868147343168e-03,-2.043003387303139479e-03,2.661497809883907957e-04,-7.274465892107896160e-03,-6.170467705311905027e-04,-2.534312902428643901e-03,6.390563287022366958e-03,-2.118593954878401821e-03,-3.303424249649773627e-03,-2.788429669983448315e-03,-1.299183120207236287e-03,1.106127332945820371e-03,-2.235839278454873353e-04,-9.233171904522982643e-03,5.454457683615536803e-03,5.826264997323156621e-03,6.559957972482794754e-03,-5.940712338520983717e-03,4.434390216133650631e-05,-5.387784738556126475e-03,4.648682519198206464e-03,-2.932542084323857580e-03,5.924591044258229779e-03,-3.585369935437574766e-03,3.843553569465206314e-03,6.900275807159737471e-03,2.553203681024839803e-03,-1.134976058453202995e-03,4.709309593952606780e-03,-5.925967892435005102e-03,-3.101210664753578216e-03,-8.994947792226423017e-03,3.812607220356645666e-03,-4.462156003354400199e-03,-4.010021780254678474e-03,7.461956351712388004e-03,-2.317401076662334216e-03,-2.857275366517916110e-03,5.138427576720942802e-03,8.539423038054943290e-04,-7.181786888832909950e-03,-1.786722218247466026e-03,-1.625934288248372973e-03,-5.565415019757423767e-03,2.653107405284846516e-03,-5.401995595342865865e-03,4.435852652627978246e-03,2.706130876153779578e-04,-9.000152801743568215e-03,-9.000347483225741227e-03,6.298764434053753108e-03,-5.197843879181183319e-03,-7.892590381448853082e-03,9.528890761432628684e-03,5.822974067694886013e-03,6.817486289634631588e-04,-1.171563844511075599e-02,-8.797128161273264687e-04,3.992210890944051550e-03,-7.292570084070978433e-03,-6.123089824462710738e-03,8.569022017894426316e-03,8.979619411267564591e-04,7.436726992763632860e-03,5.303728475952633817e-04,6.948638240263793489e-04,-4.478503442527453048e-03,-6.049536215523155635e-04,-8.127941584458330329e-03,-3.192687270413335863e-03,-4.818716213697261941e-03,5.942237194753651093e-03,3.667305093365979055e-03,1.046222907212567086e-03,1.701019503017136636e-03,2.731337675165271262e-03,1.717084012211851120e-03,-4.651173265881500610e-03,6.379715587529309487e-04,-2.055893527449927528e-03,-1.321546343906619160e-03,-1.252660796567350333e-03,-5.503882173761981238e-03,3.394979487232863222e-03,-4.543195951648529816e-03 -2.582092048520396709e-03,-2.620485214679908487e-03,1.782894968111215037e-03,-4.017312193146485230e-03,-1.074115578184719859e-02,1.067001023856287535e-03,-4.217335229661438420e-04,6.874363184045037545e-03,3.660828244209171200e-03,-2.940556605208103236e-03,-5.461443898242363156e-03,-3.736795832850975038e-03,1.524724834086698365e-02,-1.196825060609067780e-03,4.392061771407571556e-03,4.487048155291885679e-03,-4.352821023822232155e-03,-1.547556855450998295e-03,1.032084864694346026e-03,-1.121104598369759197e-04,5.904266928434106967e-04,-1.949145539817555240e-03,-9.124938282256611286e-03,-2.479658209733955499e-03,3.489785851086891957e-03,5.581389528490409389e-03,-2.621219215628238942e-03,1.076912605804579252e-02,9.045973278810041799e-04,-2.466671804974937109e-03,-7.384480360147886897e-03,-1.549253908147989536e-03,-1.047336857367286815e-03,-2.424479231308523225e-04,-2.557687494896697302e-04,2.119172852384494481e-04,2.522724895144513261e-03,-7.293073649701440828e-03,-6.912730044100257910e-03,2.850739704056503773e-03,-3.288730847153103435e-03,5.558352344512546006e-03,3.726995540225821416e-03,-5.896673485024919378e-03,1.080986581934630791e-02,-5.828245908035723129e-04,-6.496959321823496739e-03,3.208251320900911818e-03,2.448704669969198723e-04,-1.355556824677162661e-03,2.133665694283735838e-03,7.868190395427921910e-03,-8.427582772345163604e-04,-5.231691315257061027e-03,-5.724601654390777845e-03,-8.967995956896851126e-03,7.178477528807805652e-03,3.660674142263102386e-03,-2.844606986598410998e-03,-8.276408706856979922e-03,5.234388487634535590e-03,3.443327811613909259e-03,-2.229289351795913764e-03,-1.177483616580174419e-02,-6.860923741414381169e-04,-6.732505110342673973e-03,3.722644023775089316e-03,-8.782627052159482778e-03,4.473773477109548600e-04,-9.911896385523227826e-03,5.931110460982856776e-03,-3.559312439328407660e-03,2.017951695080581666e-03,1.244395483855819046e-03,-4.875221760369166830e-03,7.542877724369558175e-03,3.126211569560030591e-04,-9.121882287690013733e-03,-5.807413191635202483e-03,-1.310620216421952942e-02,1.737727827865801628e-03,1.372312905243467957e-03,1.380012595777004719e-02,5.208951798895432306e-03,-2.713207968189260133e-03,9.258938847422414770e-03,-3.577769493615382684e-03,-4.367692282663941140e-03,1.925093663444124407e-03,-1.169116570511109050e-03,-1.113671919500198093e-03,2.908926298185127244e-03,-7.262374289526003258e-03,-3.854106799893383672e-03,1.369264998114935999e-03,7.802976626879647680e-03,-4.336235522529815339e-03,-8.812976968466801689e-03,-3.385829710326583499e-03,4.265160119015999791e-03,-9.136486008968394690e-03,4.675845448413582287e-04,-1.012087094638987675e-03,7.102218529487343279e-03,-3.577558134654811386e-03,-2.671981055558826233e-04,-6.746760105593862947e-04,-5.927588978041259220e-03,-5.408366410828572071e-03,1.323076940581563714e-04,2.280958297345446906e-03,1.543997530678775307e-03,-4.215965978458786553e-03,-2.691104431295561050e-03,5.621908919071507381e-03,4.091810673782415690e-03,-4.916449177339067729e-03,1.725444929424065294e-03,5.706819989388510483e-03,-2.665562247814578546e-03,-4.881231900923022797e-04,-4.362839375042843436e-03,8.728246776495317749e-04,5.722429926092603876e-03,-1.909049856538573261e-03,6.836450116120247091e-03,-1.012734044563369865e-02,9.642950756719061503e-03,2.625491164260442250e-03,1.970896271988865868e-04,3.072461788304613715e-03,2.567241643752444201e-03,-2.093307703016042930e-03,-3.468501444350232527e-03,-4.269174561677642477e-03,2.700311155537738015e-03,1.157135902436169650e-02,2.851801018651140690e-03,-1.138578067770790875e-02,1.061602860782473326e-03,-2.947602809893673514e-03,8.220741471108792761e-03,8.620321100705732181e-03,-2.125599812934789948e-03,3.502858372328988574e-03,-1.232166672599754789e-03,1.382062970184503962e-02,-1.861849977995891142e-03,3.422560533154635730e-03,5.665374955262415541e-04,1.643117638447867419e-03,-4.059971619165548586e-03,6.014890693116221702e-03,-1.091155771965605925e-03,-5.280159592988311647e-03,-3.893235874859621911e-03,1.738664730080908757e-02,-5.324439532027377270e-04,-6.578221323770648728e-03,9.085256453639830448e-03,-2.220335460347282359e-03,-1.365688148894448738e-03,8.831951852889244683e-03,3.376760821939296057e-03,-1.062359821510452463e-03,6.362657549308416323e-03,-5.651501321069242988e-04,-4.804332679954627892e-03,-3.392747552175951007e-03,1.184195181721441146e-03,4.962060675497772209e-03,3.797600938270939176e-03,-6.681355661392337315e-04,-4.577925636311700429e-03,-6.064247637648353430e-03,3.474955580284595173e-03,-3.910104833555352530e-03,-9.704676878742329741e-04,-7.358602706990065778e-04,8.258062999928546255e-03,3.730409157016419900e-03,-1.689062271640934211e-03,4.085647478772274840e-03,-6.207160032180668698e-04,4.956165369583060559e-03,3.413381211454891411e-03,-1.234125694960670587e-03,3.363528657773611798e-03,3.950568156888470753e-03,6.512027852376455535e-04,-4.369951213916706137e-04,1.267519405921586646e-03,-1.939361882255777889e-03,3.391761111589672480e-03,-9.630829837460705423e-03,-6.349260552096848073e-03,-3.792031262419356605e-03,1.561986506701613193e-03,-6.139504657536525717e-03,-9.181955432957512181e-03,5.721842703428901486e-03,-7.662062213053233125e-04,-1.931808875295437395e-03,-2.826069033791874310e-03,-2.002556042716667397e-03,-1.076631713549342320e-04,4.701790944995916903e-05,-4.022922696965918095e-03,2.547586327481397590e-03,5.688687506180272607e-03,4.759805689451958831e-03,3.497849061338226088e-04,4.481744494536094546e-03,-7.370397150943405013e-04,6.558701457167834085e-03,2.704756047149689535e-03,-6.393329733780840456e-03,2.765451923065027125e-03,-1.676711189415977261e-03,8.041294056894184722e-03,9.784823688211001860e-03,-2.626783673981580771e-03,7.275108503943795947e-04,1.395160518531650233e-03,1.899564846399437804e-03,2.591847863826200662e-03,-3.355893254414253537e-03,9.578056851758914858e-03,1.195051709893492223e-03,4.064522183274863268e-03,-5.374093186163384980e-03,-6.085949836019038071e-03,7.351170300709023407e-03,3.077703946366860087e-03,1.801942420562127395e-03,-3.241200831371413623e-03,9.940942755727972743e-03,4.369593854034554622e-03,-8.185455445104748533e-04,-1.337159633196308628e-03,-3.452741570153284884e-03,1.473473103780739813e-05,-8.082170183982859829e-03,1.246015887208799539e-03,-2.311738873681070446e-03,1.005764333971412077e-03,5.503603831764098710e-03,-1.971241571537164696e-03,-5.144326259977681157e-03,3.893181185262020467e-03,9.835119759506393781e-04,-2.579001761870320666e-04,5.502148141336921441e-03,-2.205945350353551247e-03,-4.452493869175157596e-04,8.633895296307132414e-03,3.200950801844903125e-03,9.125916751785333181e-03,-2.885478706628060125e-04,8.415654636608463704e-03,-1.134461125986489620e-02,-6.802591720230434947e-04,2.533789765009006930e-03,4.107365892794014907e-03,1.293070207558274067e-03,5.048469922086660372e-03,5.172246324796264672e-03,-1.581073801224708872e-03,-6.083666081666843153e-03,5.484521263880713302e-03,-1.354772555083479579e-03,7.643790123343667010e-03,4.763528121364503029e-04,-3.545966421294499183e-04,6.509033065304129298e-03,4.200905970722085942e-04,-6.662471049043557948e-03,-2.907821572189446969e-03,7.344870080109136209e-03,6.227515987784440488e-03,-9.985804561480991076e-03,1.081505533259916009e-03,5.191638438201951443e-03,-1.128321032437093431e-03,9.414800363838730923e-03,-5.387560851098897527e-03,-4.732226122571088663e-03,3.019664052225244557e-04,-2.461015431947420531e-03,2.736259433994132299e-03,-2.056744378322819360e-03,1.937365142953979954e-03,-1.753283273597329211e-04,1.812012966397483845e-04,1.036718958763624106e-03,-4.184323397329904430e-03,7.619754973805578993e-03,3.697753684821401213e-03,9.262672886331493882e-03,-1.514731664757959896e-03,-4.226479301659179406e-03,6.169498498158153439e-03,-7.429605232537110811e-03,-4.046883583277081074e-04,7.893218462307547952e-04,5.472215210968709408e-03,-3.052525355813769450e-03,2.313931020886367262e-03,-1.503213783069006328e-03,-8.692303828711996777e-03,-4.169461559340694595e-03,1.035312817162086950e-03,2.693302619714860332e-04,7.479471698277910877e-03,4.276039551592488407e-03,-1.358133100269831757e-02,2.303141732444100465e-03,1.094152958418367558e-02,-1.965049135794020088e-03,2.334102389613678784e-03,-3.290771278814760836e-03,4.354650975177977272e-03,3.201523530288621563e-03,-4.796377277042475438e-04,1.106623328312324531e-02,9.571402504178322043e-03,-1.281257927377202430e-03,-8.655292022989727937e-04,-1.370651564572446376e-03,-2.193220214722982515e-03,9.326061486380971596e-04,1.176362328927558175e-03,-2.272116405046178261e-03,8.099793705912392006e-03,-2.089644078892807758e-06,6.760052675039840767e-03,3.403633041407306929e-03,-3.414412677140879742e-03,8.056777784474679002e-03,4.213449445370686048e-04,-3.186792122507043929e-03,-5.212883282852401844e-03,-1.995043189462585432e-05,1.854137904651756126e-03,1.005974453760456484e-03,-8.497790793330955907e-04,-4.350844682792262379e-03,1.219459229469539285e-03,2.994115888990504792e-03,1.239325701588965848e-03,-8.408555186943238516e-03,-1.828778165079638198e-03,-1.306908549812060838e-02,-1.527160384042469763e-03,-1.650543395153935373e-03,-8.452788362082535104e-03,-1.530058549536913309e-03,-3.879333652245549454e-04,-1.708839061748198542e-03,1.819116747769278751e-03,1.241289425366693061e-03,4.173030460173511591e-03,4.532901139336307020e-03,-5.048069908599215477e-03,-4.442823561482165708e-03,1.059912158379018815e-04,5.830120876617582611e-03,-6.232278674404873910e-03,-1.690194259000570021e-03,-4.724615684799304653e-03,2.962133769388458800e-03,7.193256269124669684e-04,2.857178730440813018e-03,4.413928488447806304e-04,-9.305071120146557462e-03,2.479761028480527863e-03,-2.403593311750985416e-04,3.172990862143082718e-03,-3.346186000971011591e-03,-1.990318032867058069e-03,-1.288564207290162104e-03,1.758756793232551250e-03,-1.129951641778719114e-04,-1.216714531126946493e-03,5.356831623159659557e-03,-3.003369023020337897e-03,2.081688216366124611e-03,2.467929078946271611e-03,-8.356458165617545054e-04,-2.244483512441654022e-04,1.068647574289779446e-02,-1.056342642683857566e-03,1.541402211746318923e-03,-1.108914676435545288e-03,-4.227819459269743611e-03,1.123562459408580273e-02,-1.137842285173862322e-02,-3.668313521118106435e-03,1.002343931521320133e-02,-3.962941979397912803e-03 1.008588768308084404e-03,7.352538401409017830e-04,-1.485949467697673422e-03,1.901254920243008757e-03,-4.738017361820989586e-03,-3.092242641812314920e-03,7.367945289036666005e-03,-1.397728288707599268e-04,-1.108634158004612007e-02,-4.499846443535151369e-03,-3.525851881891486905e-03,2.371828860534463436e-03,-2.641958560258688641e-03,1.810603105427145351e-03,1.155485857151126701e-03,5.228605971594914434e-04,7.047840330592318805e-03,-2.077680078742396436e-03,-2.138743134441988382e-04,-2.770135967315619924e-03,-4.797586853783329143e-04,-9.206656937565301999e-03,-1.155145786014205546e-03,4.617410789900727253e-04,-1.908144966755420798e-03,-6.988862994120896913e-03,2.140933059429002330e-03,-1.646044060633585328e-03,9.246150724712706801e-04,-4.679946221169233034e-03,-2.253532688535911377e-03,-8.403675930143399142e-03,-2.695443522205311603e-03,-5.566100275459466042e-03,1.007976931267191671e-03,2.583810986711257592e-03,-5.455102593195228916e-03,-3.716042153947422648e-03,5.398382249973597774e-04,-8.951027772469951141e-03,5.046936836525474021e-04,-2.166281345126350177e-03,3.616214333940371423e-04,2.946779795195027444e-05,3.063772690267911603e-03,-1.589568217682578530e-04,-1.275241490107686081e-02,-4.396581870064221291e-03,-2.712113307739747684e-03,8.343519575108943520e-04,-4.316100848828621772e-03,-3.032375908622029961e-03,-5.116684217042392555e-03,-5.146263966413984307e-03,1.182019957534523108e-03,5.823380479751995471e-03,1.060885568116755155e-02,3.881729154230142975e-03,-1.012667689698906401e-02,5.819160110986141372e-04,-5.188822303445921220e-03,4.162368329159979279e-03,2.602044583994245295e-03,1.714494233270422944e-03,-2.324537562962451179e-03,6.534695302074427841e-04,5.199200332890553840e-03,-3.829341514896533825e-03,-5.206828814159729368e-03,8.055652845486722252e-04,-7.135766170692351730e-03,4.113420663443811943e-04,-2.167335508937605435e-03,4.550383253707980698e-03,1.992261099295659314e-03,-1.160768220117545458e-03,9.367996705914580224e-03,7.769162121021304553e-03,-1.326700093507614730e-03,1.046193505762887943e-02,6.515419294368090392e-03,-5.510682420781803752e-03,1.951410725776315776e-03,-1.614402740756924791e-03,6.312356350642350962e-03,-4.202221772969438364e-03,-1.811290162144872738e-03,-1.050560112880792032e-03,-4.104234593136127027e-03,-1.189761675856670876e-03,2.147257285927869127e-03,-4.072884832532526998e-03,-6.539380179594839020e-03,-3.815029839956398001e-03,3.374786978618777439e-03,1.008292019861697746e-02,-5.799107647498583790e-03,-7.008416524548467981e-03,-4.037153257080781932e-03,4.708183417658024818e-03,-4.429538207109891962e-03,9.186980226065876928e-03,8.971317578663181094e-03,3.330470792987753025e-03,-5.045873298901258977e-03,1.366988106581981059e-03,6.806703274615482149e-03,1.040375698383627472e-03,-2.208991008725590122e-03,6.163362785483688213e-04,1.766469795470109166e-04,-5.703967035390729301e-03,3.500376821957972527e-03,-5.596895673544028012e-03,5.668282260498412953e-03,7.144244918847178147e-03,-3.538450967114067491e-03,1.232892245937107023e-03,-3.796293067357027137e-03,3.664301079853074690e-03,-2.278920668032920465e-03,-4.160344138518768352e-04,7.912478424362183113e-03,-2.683216116710996756e-03,-4.302470521061640095e-03,2.442295673059363161e-03,3.133325952706611690e-03,5.900027169682695374e-03,2.566870603109381189e-03,3.235541446791833763e-03,5.194924871753531410e-03,-3.192028046542050562e-03,6.820471869533767948e-04,-4.373591345640237375e-03,1.037611196329211657e-02,-5.922921341331305496e-03,-1.884750882223474970e-03,-5.151203965452294531e-03,-2.878249587289558823e-03,-2.957015111235028471e-03,-3.072462867429859475e-03,-8.952022839954318356e-03,-3.411900521485846088e-03,8.088153967861472227e-03,-7.645663669296782084e-04,5.980237473328482538e-03,6.504376365972377484e-03,-3.212546934443916927e-03,6.072298234480855150e-03,9.049286532537293424e-05,3.115012514819195423e-03,-2.655402986135336433e-03,2.554922955219212917e-04,6.778667264872489983e-04,-3.911517157886905578e-03,-2.962456248454889071e-03,1.030462116112602737e-02,4.070276925183871630e-03,-4.322125045748785848e-04,-2.737034079475762945e-03,1.568408839314757580e-02,4.754023498678212022e-03,2.327686192764910622e-03,1.225838171348561656e-02,-8.545979263102135987e-03,1.676391045265878668e-03,6.106422270087982600e-03,2.929430145416198842e-03,2.460824693262708869e-03,-1.798454967108192252e-03,4.828061043197070731e-03,-3.244608809509094910e-04,1.367208313549452860e-03,-1.463341544751282442e-03,4.920541693020947550e-03,4.158462393328574494e-03,-1.054537378452973309e-03,1.926497487614217772e-03,8.011664982453646153e-03,3.966148593017491952e-03,-5.209133387323631407e-04,-5.331904413407999831e-03,-7.394044996034811277e-04,-2.826808705257590599e-04,-4.348897308784700939e-03,-2.505646949822686476e-03,-4.933680458721212241e-03,-5.313029702357284674e-03,-3.954043619752736186e-04,5.358605410562880360e-03,1.576076053915493420e-03,1.489001215720560985e-03,5.390247403019454535e-03,1.199650197273808856e-03,4.361093095799449017e-03,7.309139035883958219e-03,4.090326226617354254e-03,7.666879311587685916e-03,7.241736137448063701e-03,-1.400234434689981910e-03,3.430125510698225910e-03,-2.176813633110911018e-03,-5.064735458657835566e-03,-2.031673918346528604e-04,1.650642597338900907e-03,6.159336486731123352e-04,-1.616379244825155194e-03,-1.073888335916773403e-03,2.474567269808105544e-03,5.430880282491249919e-03,5.387603371229550062e-03,8.179166534198014616e-06,3.152966435551339455e-03,1.977825044103088698e-03,-1.404991303613982098e-02,-9.129126267374418824e-04,-6.977645879472604536e-04,1.547748723365957027e-03,-5.087975296684230792e-03,-3.762348940066419668e-03,7.744575530377373384e-04,1.715443527380635996e-03,-2.723629035634440691e-03,-6.378336587711764746e-03,-2.667538982888248335e-03,2.627729062367454064e-04,-3.098476383347483279e-03,-1.581621537366496855e-03,-2.128413635918968098e-03,-2.197319708864600414e-04,1.931515498494733524e-03,2.196717422011957438e-03,1.890940848088894059e-03,1.697689382541781077e-03,-1.138560249444009551e-03,-3.809032615590288769e-03,-5.392143404553613115e-03,-4.540335812695688034e-03,4.522550264445759742e-03,1.338956737305928854e-03,5.670236620962387786e-03,3.204625051405667058e-03,-9.466188787805196458e-03,-1.536211571871863016e-03,-4.097295561495401320e-03,-3.866498882072320897e-03,5.748956038369109484e-03,9.665057970249006311e-05,-9.837973323605259145e-05,-9.223311140619831414e-04,-4.048073476403743617e-04,2.094398059582064896e-03,-6.370978237027897541e-03,-1.255444929349193179e-03,5.842037674830034312e-03,-9.175684075808787990e-03,-2.816370386188012890e-03,5.033271330222448964e-03,-1.162327442400651594e-03,-1.830178759259386062e-03,1.381012675430415846e-03,-8.244341359755752831e-04,-1.177257505550438448e-03,-7.295736618485826504e-03,-2.041600397514527557e-03,1.575637468320912823e-03,-5.817719434426519773e-03,-3.996962056074503285e-03,5.744624028354016441e-03,5.971794915156361169e-03,1.480502145624145848e-03,-1.430761104632429469e-03,7.522087399139397099e-03,7.779732375329607812e-03,-7.471549485088244819e-03,-2.059385497948282816e-03,-1.094701360094403955e-02,8.971163179227564744e-04,3.380115576276405281e-05,-1.179961670713338198e-02,6.924705771725440679e-04,-4.152816203953115827e-03,8.796391703634189735e-03,1.971811868050620930e-03,-3.730990371702120473e-03,-4.275340262029952293e-03,2.093226125534215082e-03,7.946178600210584281e-03,-1.019055215629579778e-03,7.517998501990169613e-03,-4.407035277152400107e-03,3.737622802962091689e-03,3.882922586205125263e-04,3.784665396809178822e-03,-1.757179483416784239e-03,1.208760746243378245e-02,-3.904193765353062748e-04,3.434611838288073246e-03,9.196398750614051543e-04,-6.390457971448837166e-05,5.774780804391195337e-04,1.078573586709251873e-02,-2.209014493404131384e-03,-1.174382970189211377e-03,-4.226055479347843799e-03,-3.691561996972262012e-03,-7.250849647531805268e-03,-6.346661291238504636e-03,-2.961472805378504596e-04,-1.623366568033567233e-03,-1.057215589842380255e-03,5.161916696288452143e-03,-3.794430932298318519e-03,-6.066362248673767832e-03,2.088956729993649182e-03,2.267236390946439448e-03,-1.352358843769636962e-03,-1.147900491339935627e-03,-3.437557730435199677e-03,7.332345325964967787e-05,4.168131736738203696e-04,-2.036501675462960327e-03,-3.446878020639195222e-03,8.257495143719827818e-03,4.357001244006603654e-04,2.854730512828402775e-03,3.272121987307932689e-03,6.446914497166091051e-03,-1.150099216183985773e-03,3.917593402625887088e-03,-1.775307397602892290e-04,1.133116253462481890e-03,-6.778265943045242922e-03,3.592659575763035990e-03,-6.060478601816125799e-03,2.519645880164373287e-03,-3.715905469522853138e-04,-1.969059701780164796e-03,-3.475593840632935569e-03,-1.764805028350352814e-03,-4.030594491354804257e-03,3.516160675449334864e-03,2.122475330067900498e-03,2.288965110744442706e-03,-1.326376448711400055e-03,1.843145755176518784e-03,6.972301071308678773e-03,5.409338016978357602e-03,5.232552004197844019e-03,-4.466544070536259368e-03,3.287399174253319550e-03,-1.465120293350263229e-03,-1.021575032199032763e-02,-7.270982497505004694e-03,-5.093266719248206072e-03,-1.221596450463864050e-03,2.737235358472192657e-03,-9.855336955550757574e-03,-3.310155211930301388e-03,6.849915471805997740e-03,-3.644008965294028967e-03,-1.262228397037110442e-03,5.108866745680335289e-03,9.832448950364091819e-03,-1.255923839633515645e-03,4.286348769321282615e-03,6.498469133236394130e-04,3.840192403104061500e-03,9.882471347519515212e-03,-1.354855813688285665e-03,-6.187503934682035342e-03,-4.569205129630048677e-03,-4.161023036647530302e-03,-3.235657262350128023e-03,1.102311413326813490e-02,9.282361143232571027e-03,1.071773602220889516e-03,-5.435913917355884731e-04,-2.472779484272212103e-04,5.060299468323099792e-03,3.054397595249088283e-05,-2.531869061762246752e-03,2.027699226906513538e-03,9.895549491576061552e-04,-3.718938253138284831e-03,2.920333634215813764e-03,4.188227456716993367e-05,4.151742537141991408e-03,5.650775940877990344e-03,2.784810089674191660e-03,2.916262025790546542e-03,2.179168217603646464e-03,2.782753355729852482e-03,-5.226779470701888898e-03,-6.008111716577764147e-03,3.144931567104472438e-03,4.402485621092579529e-03,-4.081456095860126060e-03,3.043707022325284513e-03,-2.978101387537808952e-04 -2.483633241727350657e-03,-1.026533223985759587e-02,6.400856013336300721e-03,5.370888233638468692e-03,9.647168721439508196e-03,-4.195047828708109439e-03,5.349430317763883419e-03,-2.644805349726261478e-03,5.163973655078082578e-03,2.720901481768265000e-03,-1.031716877598825004e-03,1.125838138179711912e-03,-2.196152271891464031e-03,-7.436209724154457801e-03,-3.140277718536500177e-03,5.065543441720492257e-03,-5.552923030106463737e-03,-1.242212628711010340e-02,1.242640615770944585e-03,-1.222472664554894936e-02,-3.375005098794896241e-03,-5.683838758506608302e-03,-4.725018111055395015e-03,-3.120665025283357336e-04,6.788135366678878929e-03,-1.682224836652598506e-03,2.749307893290582009e-03,-9.255040725544437432e-04,4.348551443465687762e-03,-4.882134903902001638e-03,3.086893709096290642e-03,-3.773247471843448400e-04,-6.668022153167455293e-04,2.239073822101446658e-03,-9.218677932394215430e-03,2.791406417352660860e-03,-4.296091965345036692e-03,-6.079647237807868539e-03,1.834982266313977574e-03,-5.699747914887891157e-04,-1.586841557074648639e-03,-1.359133022985141133e-03,-6.872972791907881046e-03,7.907043794873003456e-04,6.910869838834609452e-03,6.605076164493961510e-03,1.638845743605899614e-03,-3.279427039766253348e-03,1.948688545094446646e-03,-1.789023711408205828e-03,5.209949508169753850e-03,1.792392534039223941e-03,9.846626879633241930e-03,-4.658465463750311245e-03,-5.783141370449908052e-03,-1.421403108061465278e-03,2.929718762237039104e-03,-4.549109115576640237e-03,4.123765593352961972e-03,1.395177885693954208e-03,4.542035797283513338e-03,4.077515939749261353e-03,-9.541404322033049607e-04,-4.381366514391256406e-03,1.703245544161615874e-03,6.226211174601762460e-03,3.388941044154822942e-03,-2.875128935601652374e-03,-2.445974882208750128e-03,8.590871903445549199e-03,1.140569900019298977e-02,-8.384074091242489299e-03,-2.802216324103521967e-03,8.388867255373369525e-03,-4.021499262674231752e-03,2.217614238560007870e-04,7.487953366732991562e-06,-6.540010763717925453e-03,-1.613450189432829012e-03,2.948211338056931034e-03,3.368113989067036875e-03,-1.045856117783855758e-03,1.360863690953236600e-03,-8.526985337768052189e-03,5.831199818724186988e-03,-3.975283836434231367e-03,2.278727937401986252e-03,-6.266938216537417013e-03,-1.793479753486373071e-03,-9.834796278802878169e-03,-4.435791898063020343e-03,5.389871379968460413e-03,1.926472505850280429e-04,-3.308867558298554581e-03,2.696102335934353385e-03,6.657179134464972191e-03,2.193739101094802774e-03,-4.048831269080650254e-03,-7.538860879507515086e-03,5.311634278108744774e-04,-2.934602305755832242e-03,1.877001470918188012e-03,-4.208195751340342017e-03,-2.140475744011998034e-03,1.237743514036273315e-03,-2.111308183125169650e-03,4.553571330125698660e-03,-2.127348152547181310e-03,-3.218188596810396540e-03,6.240964869967550142e-03,7.239443125882461492e-03,6.182954260197806064e-03,5.182124681080637826e-03,-5.665469337919888486e-03,-3.286667526746161506e-03,6.429666327006765963e-03,4.001135733771248038e-03,5.488352003146880487e-03,1.376566597061044603e-02,1.412321515470683084e-03,-3.055104466390231232e-04,-3.272723535059233850e-03,-2.170610269079548908e-03,2.031109226134813258e-03,-4.260363517845916839e-03,1.300081603616097016e-03,-4.033799963431156076e-06,-5.508970670514293580e-04,3.669700453226818621e-03,2.831393782776611082e-03,1.273577987571110348e-02,2.403947224931706102e-03,-6.771291996017511985e-03,-2.896413267696174823e-04,-7.862916860351768303e-03,-7.565300716222656333e-03,5.181416332843755482e-03,-8.837752140337227016e-03,4.208962043820867194e-03,-4.477984791985942063e-03,-4.517879213295182550e-03,6.131009197761596793e-03,2.921235222693058763e-03,-2.068286397957551804e-03,1.455130387793343019e-03,-1.943398933602786486e-03,4.657291844540241595e-03,-1.209387866599102510e-03,2.114737625621196122e-03,4.145882871490838735e-03,6.235918789324342883e-03,-1.254238806671079945e-03,-4.012315057456588771e-03,1.370775458528311098e-03,5.079834605552665389e-03,-2.088513861666632318e-03,-5.363475697710058492e-03,3.133724538987469731e-03,3.404964513754925662e-03,-1.175017859028299220e-03,-1.099306602342977479e-04,2.996122738677632017e-03,3.771339195210919007e-04,-6.311625748593651004e-03,1.024922272442710044e-02,-2.993950934973146211e-03,-6.898234533062583274e-03,-1.447151858049521879e-03,1.515787501087484217e-03,-1.242051502171188752e-02,-3.169795475286896203e-03,-3.789900876289409217e-03,6.344837222830871574e-05,5.877003601601642202e-03,-3.108081381970916220e-03,-4.624762745290177807e-03,-1.430256809330685778e-03,1.361734487458483378e-03,2.383600844797082591e-03,4.511776485210713082e-03,2.642971058637715812e-03,-5.337161583270933277e-03,-6.522326597846359389e-03,8.896264696365204297e-03,-1.022835717006682736e-02,7.586157894414158028e-04,-6.513461096510751334e-04,3.319709686324711154e-03,-5.919782765778955173e-03,1.527706387485912917e-03,6.660605884745564136e-03,-8.877126483392642695e-03,-4.773665632798631923e-03,3.942547951190636911e-03,6.056688968490105129e-03,-6.666812086108356815e-03,9.479921717302518258e-03,1.785013821125918353e-03,-4.953928690987234248e-03,-8.328228319888401161e-04,4.113726523465632660e-03,9.914130661840568626e-03,-1.375488198552051402e-05,-3.401701550522105363e-03,-4.776394025173340621e-03,-1.566390278093341510e-03,-5.484467092437537473e-04,-6.693426193082906150e-03,2.373635675459739326e-04,2.176238893696117588e-03,1.161718454437369197e-04,7.120711004318109293e-03,-5.184366121024340870e-03,7.131657503466075701e-03,-1.075067203006069803e-02,9.074598467516344774e-03,3.130835865735891096e-05,4.582663760384801839e-03,5.052268094777517815e-03,-3.438097323135789161e-03,4.488983037212898770e-03,8.131230310836481262e-03,-3.464371117737271698e-03,3.673983598172875838e-03,3.335808724182765406e-03,-2.411888810960875925e-04,2.473681501357583197e-03,-5.364124201749346024e-03,4.978144201575724313e-03,-1.087798140735802936e-02,-4.043023989525726739e-03,-4.997709285005798828e-03,-4.936200810106871305e-03,3.057625391395314609e-03,-1.614493709089536702e-03,-1.320859537943658291e-02,9.756595879634692844e-04,-4.723507373601569005e-03,-6.390079894399687890e-04,2.101295798051175910e-03,3.287990288550736440e-03,-1.260939025120425038e-03,-7.862446604355914637e-03,6.963174064301119043e-03,-2.286735045040491979e-03,-2.258548936475956656e-03,-1.220340007572946707e-03,-8.580216988620233088e-04,-1.902699206707492152e-03,4.324772015267389948e-03,3.663159476619127445e-03,2.550147705483946873e-03,-3.071982806842644140e-04,-3.122240293655459084e-04,6.765666160086975835e-03,6.652802475543874439e-03,-7.265690473055201777e-03,1.014373733882757873e-02,-2.813456189729504880e-03,-7.708908930821012709e-03,-7.169246634874198998e-03,-3.493754085749966112e-03,2.406796535833502677e-03,4.814627351935331714e-03,9.700176852789956297e-03,1.839464786352465204e-03,-1.272640910268259436e-02,3.334149669903482740e-03,-4.286383693612282574e-03,-4.235014490816752311e-03,1.677796571768994625e-03,1.286719600890812772e-03,4.680947813854282116e-04,-1.642805355198998906e-03,-5.539506880235846482e-03,-2.971529004515449310e-03,8.272493746193064490e-03,-3.954499997332487963e-04,4.631492130746138816e-04,-7.355175818415110459e-04,7.654171637317212437e-03,-8.480785353541585997e-03,1.271313197809307953e-02,3.799325180425130663e-03,-4.935204429785916384e-03,-3.114691256669084476e-04,-1.085426558953220310e-03,2.655187319150595533e-03,1.714380734127752669e-03,-1.191265462525809048e-03,-4.249989130313720775e-03,3.113395434043817519e-03,4.104954422464298855e-03,3.506372709832325952e-03,-1.771987517394103514e-03,5.509308036157275178e-03,-1.245700250010816279e-03,-2.707415567562761020e-03,-4.207925438413044963e-03,2.640154728166945169e-03,4.872495717818877380e-03,-2.475765448417131982e-03,-3.065607668022897990e-03,-2.686060970956987218e-03,-2.716266498535588886e-05,5.779343462525769574e-04,-2.221847306400776814e-04,-1.904511700653476218e-03,2.197022159617375171e-03,3.665988846072130124e-03,6.067474738104577242e-03,-3.740965581258881047e-03,3.818980370177919165e-03,5.114138814230604659e-03,-8.331840113627723629e-03,-2.492906646866940683e-03,5.436147394189079496e-03,2.824981544703600213e-03,-1.194189126573209914e-02,4.708730281286414680e-03,-4.488393614205430683e-03,9.354860515740691038e-03,-1.871663337328402813e-03,-3.518363431499556618e-03,-2.346604331995954130e-03,-5.318689124202079753e-03,2.690985623984154182e-03,1.712610065546448651e-02,-6.444912661454755241e-03,-2.309556728917248111e-03,8.261683919485025146e-03,4.549451555367432960e-03,-7.691252442471156921e-03,4.753027873245093370e-03,-6.904267964281849909e-05,-4.965718752765126316e-03,-8.000398819249318486e-04,-7.955723493525595005e-04,-1.444584771743427903e-03,-1.063632139216498691e-02,2.454653922829421603e-03,1.991226396736345185e-05,6.447328877336157282e-03,-1.031631650174293265e-02,-1.091328055334384343e-03,-3.750126525956568556e-03,-2.210573674752035894e-03,4.860457782379699540e-03,2.126269439246520742e-03,-6.631477377034708997e-04,7.238825267419046923e-03,3.679296231828995106e-03,1.338879377376282851e-04,-4.093834615826892273e-03,2.415059997907087908e-03,5.724678428058274128e-03,-1.159348250661390990e-04,-5.428510791855432893e-03,3.617532366010763490e-03,-5.693045939812456900e-03,9.368563733538842406e-03,5.133606767935807585e-03,2.863196308119546514e-03,-9.688264884977936453e-04,-7.649255790557813427e-03,-2.454040608143888327e-03,2.589690537519320681e-03,-5.857805905788393509e-04,-6.817484942171968439e-03,2.647659502158247399e-03,-4.834038792192706102e-03,-2.737602639340738341e-03,6.573427433621984416e-03,9.271059096469979255e-04,2.847665327530727458e-03,3.388811616053098545e-03,-3.028053880032905874e-04,-3.359894872156491980e-03,-7.386563041824044731e-03,-1.283827628069846103e-03,1.157966493049235410e-03,-5.787427016669577547e-03,-4.379671447566309446e-03,-7.427163720561151192e-03,-5.775672207805573020e-03,2.882523369272547119e-03,-6.400271363446237427e-03,-6.856847238967244546e-03,4.222629831070129694e-03,-4.192280875582741345e-03,-4.662840937666367855e-03,-2.860924733015010168e-03,-4.054346648577697096e-03,4.346170257892937802e-03,4.003039493121607686e-03,2.587189018131684243e-03,-1.031996823903020589e-02,-1.376477573580803812e-03,-4.424797596802909454e-03,4.457239878699580105e-04 4.083200533031799626e-03,5.081461964962959155e-03,-3.089017800233463586e-03,5.452544305998417717e-03,-3.736329917960834555e-03,8.664818034297798455e-04,-4.954396132702829428e-04,-6.552597459039982523e-03,2.677459291693210040e-03,-5.314214339413553083e-03,-2.136597483685637178e-03,9.086496997257473762e-04,-7.311456398879255178e-03,-1.736271069175453511e-03,-1.199351417041829432e-03,-1.498512078899049424e-03,3.160854892867312471e-03,-7.010023181933975472e-03,5.099768016920142553e-03,1.473423725729291513e-02,-8.618590216080359093e-03,1.359489936445149147e-04,4.881605748058753629e-03,-1.811904415797301569e-03,2.261439795253154198e-03,-1.241228760245342497e-02,5.328024490324314352e-04,-4.740913355215182004e-05,-8.268093361571991545e-04,5.619171693157184490e-03,4.484016184596172072e-03,1.211353032195467729e-03,-4.308826510326058297e-03,6.270423821355466942e-03,1.504300082232261533e-02,2.256469648734018858e-03,-3.342383724569681584e-03,1.226562421946959629e-03,2.541423868630026534e-03,1.053977581315261071e-02,-6.265526972898894688e-03,-2.093489495951234697e-04,4.690081505986342368e-04,8.031346823123227535e-03,-3.457243424176050222e-03,5.337016980492167474e-03,-4.440191863551847667e-03,6.038171513401963467e-03,-2.896115823892605069e-03,1.521406760401387359e-03,2.230892429527459118e-03,-6.620056950631272516e-03,8.707094826345387808e-04,3.763004336411232120e-03,-8.039356850205837263e-03,-4.102324526391779198e-03,-9.039111155860255972e-03,-3.252747570203515758e-03,-4.634344105430503180e-03,2.584468838998898055e-03,-1.606468906137079869e-03,-5.554838036574977732e-03,3.382590797522018900e-03,-9.036164006740742688e-03,-1.551765870729084843e-03,7.145172834569157687e-03,-3.593203864746527532e-03,2.006653649307018478e-03,-5.628661295612762648e-03,-4.410304804960163720e-04,2.020516233107787826e-03,6.105796379831974173e-03,-6.297133057112215028e-03,-3.330690874830146125e-03,-7.805879307366027104e-04,-4.308095630128914826e-03,1.017989352423619501e-02,-5.652850127165528604e-04,-8.411680139009048568e-03,-5.593830957992502133e-05,8.788729139676291699e-03,-8.466908623890035106e-04,1.552904464553220244e-03,1.884121042844869032e-03,2.320184167307394376e-03,8.429098298227383212e-03,4.422291969462014208e-03,-4.734623046495069383e-03,7.311562151459014350e-03,-1.067546226879490835e-02,-3.083290275671715305e-03,-7.363137319557325113e-04,5.686153860437211278e-03,-1.343274602461434026e-03,3.274304728739074310e-04,1.564626559589627972e-02,-1.739184617364603405e-03,3.612351009106799706e-03,9.160375395066871693e-03,6.155734775034750299e-03,2.928114701993864404e-03,2.953770162017593879e-03,-7.088748207823500347e-03,3.765149270181328452e-03,6.297960486084203287e-03,7.381319747991485491e-03,2.752402656780279723e-04,1.701030241273755825e-03,-7.049969007743830431e-03,-3.520426865904820613e-03,-3.475845906329932521e-03,-6.570637058714876542e-03,1.987660421114757683e-03,-4.070194981649735526e-03,2.423476106375953981e-03,-3.481564086213617768e-03,-1.755618166121379912e-03,-1.009024321973129613e-02,5.138065244176791453e-03,-2.927838115541792161e-03,1.079856681129616469e-02,5.191534636398776378e-04,-5.346361350104735129e-03,-9.560349597847192862e-03,5.585055214856101020e-03,-1.961504432857485297e-03,6.459708035019834892e-03,-5.706245590618343709e-03,-4.335943244423697517e-03,-2.808245105738000832e-03,-1.339100561655557145e-02,4.666754570197812098e-03,7.931485241417585846e-03,-5.002190719754336101e-03,-8.220086834296055564e-03,3.029912811399807209e-03,-9.369566869084475572e-03,-6.449304849977386539e-03,1.873161632372619687e-03,-1.634401844821124468e-03,4.280906820483666372e-03,-1.762384574820141225e-03,6.293136797052806090e-04,-1.438490783699159815e-02,6.494983460359437383e-03,3.453643759501969293e-03,-2.808605382538754188e-03,4.581666714426480512e-03,-3.031997395539879313e-03,3.402402789810769421e-03,6.312453626076587263e-03,-5.161863479832388274e-03,5.512454294270028449e-03,-1.625237689276207804e-03,5.939299030386470564e-03,1.002220197769280183e-03,-5.557790897046038098e-03,-3.092937663306451028e-03,2.099858309355099716e-03,-3.861513099411864602e-03,9.943500519155639567e-03,-6.685137977984581190e-03,-1.337771888677542764e-03,3.849859140324750264e-03,-2.094468078434409946e-03,-9.004685195810904069e-03,8.933456560200277868e-03,5.071254757857726093e-03,-5.561937654840748771e-03,-6.077651661047218587e-03,1.744145074933490516e-03,-8.492637987773869572e-04,-2.423990308083990176e-03,5.840879750463417314e-03,6.365277571821540553e-03,2.925010197049637915e-03,5.161178907998735001e-03,5.443778265889624365e-03,-1.332968410499097518e-03,4.740628672194813036e-03,4.917899051530325678e-04,-2.677085305889767130e-03,2.387691088641108288e-03,3.896855220512600217e-03,-4.623617420642641383e-03,-5.860563959110782171e-03,-5.001005663964456539e-03,2.038197266631784632e-03,1.009380804421521408e-02,2.203454957955533131e-03,-9.524303401420004464e-03,6.427917153948893569e-03,1.558376240065511946e-03,6.442577509697149617e-03,-7.198807984653546842e-03,-4.565197949245115893e-04,-5.283605337823500323e-03,4.937530752835721584e-03,1.038046411690250748e-02,-6.302014050778189692e-03,3.124671790901211148e-03,-9.321020168683101514e-03,-3.398791865241968749e-03,1.146007865260539708e-02,4.063607968777659118e-03,2.754013538216868351e-04,-2.772871073514113303e-03,-9.752752179139641655e-04,7.931223855563203015e-03,-2.155966409015710669e-03,-2.565895411877749249e-03,-1.509360589123700299e-03,2.949008128263963177e-03,-1.794953535702664910e-03,-8.932537447230022923e-03,3.257231263544936497e-03,6.385175663151398690e-04,-4.661477901305851809e-03,-7.325480150467704368e-04,5.761917332155626868e-04,-2.055734495406272462e-03,-2.362501214702947185e-03,5.385655428421311924e-03,7.979605902483017862e-03,7.601430352292350839e-03,1.087065711219634481e-02,-4.782545746299386356e-04,-9.093016769648732539e-03,-1.905260792553644441e-03,-4.524294451026152075e-03,1.654462489831831119e-03,-3.640283042705565230e-04,-2.160250553560245229e-03,6.424844228073979709e-03,-2.141066606804965279e-03,-2.888101724523294347e-03,-8.124304875098884576e-03,-6.130709497216827418e-03,1.438605970848047056e-03,3.289592675183676367e-03,-2.962844071193118309e-03,-4.407906066371707239e-03,3.501287248078291912e-03,-8.936185073655623185e-04,3.523734032382702577e-03,-6.988671402770643346e-03,1.732449369691678800e-03,-6.861370969350783118e-03,1.752393670665992019e-03,-2.747196852529238537e-03,-9.505961874114633378e-03,-2.719180814816048695e-03,1.691033920636733287e-03,-6.671772708777562325e-04,9.595455150516392123e-05,-1.323591249255166250e-04,-2.481090685294983552e-03,6.017891311468016348e-03,6.480560546286899190e-03,-8.469425516122133790e-03,1.268543300911267706e-02,4.939546973678941681e-03,1.756692968973912845e-03,1.120597239875713129e-02,3.661731769129990700e-03,-8.586459398941840779e-03,3.226254924087992339e-03,-1.040322951556211378e-02,-1.045314525744323172e-03,1.205619805080607003e-03,-5.131322231267779026e-03,3.027097378785265167e-03,-2.999878128571791761e-04,8.284016346847977372e-03,-2.393062035961363715e-03,-2.194375067980858934e-03,6.039208906272328786e-03,5.622703492743712748e-03,-5.955626060863284353e-03,-6.070750851402281767e-03,1.533801272084015909e-03,-5.502630209917286352e-03,-2.660380788924338567e-03,8.428730675805004569e-03,1.356762403910221964e-03,-7.941462277892759629e-04,4.839749153647195622e-03,3.980524079908423669e-03,1.690913153806319728e-03,-1.223273702261482888e-02,8.243495307292610401e-04,3.210847575266976162e-03,4.206758676117050913e-03,2.557342740057564918e-03,2.970850916550883073e-03,-7.273985415800472785e-04,2.745972271775635153e-03,-5.131153478096778589e-03,-3.637139904392557759e-04,-3.244553802012457161e-03,-1.264938409586762033e-03,9.348398128898484719e-03,-3.971873931056487880e-03,-1.121722326681629488e-02,5.108732727254281479e-03,6.141097807852047286e-03,2.137579950282892868e-03,-7.676221637019196506e-03,-4.227413745546595124e-03,-2.135480000960081771e-03,-5.766114326054478691e-03,3.566681745587938109e-03,3.525130427049388490e-03,1.295626270823097915e-03,-4.172061242550511272e-03,1.939112167382219681e-04,9.303941291085427931e-03,2.795882438277756069e-03,1.443867796456069739e-02,-1.071535733796065122e-03,4.187928863756448794e-03,6.695736750251930769e-03,-3.227751203120604161e-03,9.875861386581625753e-03,-5.730839877919701071e-03,4.064505871570912171e-04,4.810531008535400520e-03,-6.138434675707345883e-03,7.606569131051593713e-04,-8.069258010284681686e-04,6.673630666326806288e-04,-8.517768602441034773e-03,-1.462162063563866876e-04,-2.199433026688120234e-03,6.788163074853462674e-03,1.436895623048828558e-03,2.846294961605962473e-03,4.947999506399429351e-04,-1.478071503144971072e-03,-4.896085465454010077e-03,1.189501568137472541e-03,-5.378685151100803113e-03,2.508636494771664463e-03,-2.249285414416981373e-03,7.536220911674463944e-03,-2.908474087373473342e-03,-9.508236190271947112e-05,-1.301523029936364545e-03,3.852385802245931849e-03,-6.818253176880232497e-03,-4.940367606659740000e-04,-6.466631323148784236e-03,-3.070993274349762686e-03,4.999933143381083724e-03,-2.608821733302194457e-03,-8.707328096913009158e-03,2.558108025028424555e-03,3.693336432927382066e-03,1.020764990950032060e-03,-2.348199518990341621e-03,-8.361919748359931959e-03,-1.145386240039260577e-03,6.580548108341347478e-03,-2.366361446572001152e-03,-9.173523721952250853e-03,1.781288538442621122e-04,-6.031844539303227691e-03,3.989033383256906581e-03,-3.032295086889593132e-03,-2.655370997395158431e-04,3.546237911218234808e-03,2.895090958658303685e-03,5.973303509417641002e-03,3.395118691396789134e-03,6.848079171750930290e-03,-2.078090419028822810e-03,-9.672054719623594463e-03,1.192856687741250185e-03,7.255957479855175071e-03,-2.040644915491250828e-03,2.253922599909651252e-03,3.647714932310118247e-03,-2.132247418529757167e-03,-5.209847963939948784e-03,5.381087265777037207e-03,1.410591550202996546e-03,-7.248189414407785365e-03,6.676292149807232928e-03,-7.926272380367459583e-03,7.753647398020249368e-03,3.138563909732463002e-03,-6.456947533236799541e-03,8.667600119669706567e-04,-1.225204895869271352e-02,1.599781157530024821e-03,6.217179361015547079e-03,1.760280809950100830e-03,-1.716019561549681806e-03,-4.288035107686528902e-03,-9.671870341887339783e-04 4.887664664887174078e-04,6.361534712637183491e-04,-3.310080968777241988e-03,4.290639890269580883e-03,-9.044286931242629356e-04,1.096113816562410042e-03,6.386555936583965254e-03,5.483305702835398338e-03,-3.217237952952000258e-03,-6.057253243258541205e-03,5.344451602220897345e-03,5.049543514911594352e-03,9.000065900596079067e-04,-3.844035521330359511e-03,-2.084056295133336102e-03,-1.512435550753483092e-03,-1.537397106850288192e-03,5.549154379586338642e-03,-9.369702285385977589e-03,-8.177735545309542959e-04,1.417394579476002545e-03,-6.743990298181612268e-03,-2.660234389197443840e-03,4.209921526918660763e-03,-1.275656241982941292e-03,6.176580029587504413e-03,2.993630974679736823e-03,-9.209556912323639197e-03,6.088585808165774410e-03,5.924172352443112140e-03,3.840247946610523232e-03,-3.011268663053292063e-03,-1.577554729561502249e-03,5.711488682529268154e-03,1.055249058320581511e-03,6.532584598488626847e-03,-6.890781169150883741e-03,-1.451154205552959275e-03,-6.739341061583417440e-03,1.066734187090975089e-02,-6.524065430100905311e-03,2.139984705642223738e-04,7.561425323742026333e-03,-1.288443009078272649e-03,3.651351039293807086e-03,-1.739261289223464164e-03,2.968165849263217516e-03,1.076051643237068388e-03,-8.404253535888519741e-03,4.000279394035756095e-03,4.549038570924267971e-03,4.570873573833075278e-04,5.278202681435997107e-03,-5.635943384865621823e-04,3.200466031740567974e-03,1.119693529529293408e-02,1.701104083271105013e-03,-4.164637549972437930e-03,2.399680551343995587e-03,-9.109863136102423981e-05,9.257592471783132560e-04,1.184629581425284545e-02,5.355743510041345579e-03,-1.144749759573146812e-03,9.422428823381458337e-04,-4.731873329493846425e-03,-3.432351536439996742e-03,4.983016908286208232e-03,2.039041953346669954e-03,3.605803659954243904e-03,-2.374633082712600783e-03,9.447822067391057829e-03,1.300886228237253762e-03,-1.559365442274945673e-03,-7.367152129404169747e-03,-9.582799824620066159e-03,2.358517854033982935e-03,3.014298887843438416e-03,2.027773287889721903e-04,-4.271506101597720255e-03,1.783877949650435665e-03,5.229188692394469949e-03,2.429320890502813420e-03,-8.352963950691598069e-03,-8.596121188757400305e-03,5.531256768794143817e-03,3.118267224169627393e-03,1.020872594675617875e-02,4.534481700061185165e-03,-1.421232694197201813e-03,2.125539519556580558e-03,-9.698643927267850892e-03,7.488570871716474127e-04,4.552073392087254387e-03,3.145386510868671166e-03,-9.919365492657856651e-03,5.740913369859430675e-03,-7.647502796626879946e-04,5.233238151355782374e-03,7.830002314584440931e-03,-4.104125049832608660e-03,1.837320317466613903e-03,-5.626033146494748526e-03,4.110066744688299634e-04,2.412547376386244783e-03,-8.625483244365352367e-04,-3.061759620839311219e-03,-1.421922155855880445e-03,2.416364576982997676e-03,3.750196055912147978e-04,4.634540634047417686e-04,-9.160333081870922906e-05,2.032355336804456299e-03,8.113339341587691086e-03,-4.902525478612740177e-03,-2.332159221440996931e-03,-4.404138734971064194e-03,-5.197665358800644328e-03,1.038967243393851964e-03,9.562138475862414985e-03,-9.614968344501336904e-05,-6.719374670064249058e-03,5.571274872145848787e-03,-3.384475246958514472e-03,-2.157244684365658329e-03,1.663123387148050239e-03,3.372937102280329936e-03,-7.361906961930556569e-03,4.113847427718596989e-03,4.571422792594905839e-03,1.414556421800163694e-02,3.735830415189569237e-03,6.758823167640540079e-03,-6.563532146764424792e-03,-3.444667216544768876e-03,-5.039154956691951036e-03,-1.047993712095690415e-02,-6.778792956689402675e-03,4.397835696201177549e-03,-8.405128508298079400e-04,8.424049809869582281e-03,3.913275935882721060e-03,-5.307122699151282496e-04,-8.124148796870272332e-03,2.307059207916579546e-03,1.128838639957113581e-02,3.090125462714365945e-03,1.094872014106241772e-03,-4.217531418456843886e-03,-5.572486039466124572e-03,-2.309232022119558393e-03,4.606013014152672037e-03,-7.048619615844547992e-03,-9.785831928476625488e-03,1.375489750473594215e-03,2.460880595427246062e-03,2.375704186188181376e-03,-6.834386731187972602e-03,4.186668995823029232e-03,4.483066451739189419e-03,5.139910322019554390e-03,-4.827542339174515501e-03,3.175684552500267991e-03,-5.371960270585361685e-03,-5.460852887912277971e-03,1.795854995308999221e-03,-6.021468967262612326e-03,-6.634541274917005269e-04,6.528225456235443869e-03,-3.448081861126033737e-03,5.214453387048274477e-03,-1.318338590700729928e-03,-1.638850201136520923e-03,-1.524348299109477226e-03,-2.372372731425476805e-03,1.723297446037698466e-03,8.848922856857541902e-03,-2.832496425837674903e-03,1.082591625291502713e-02,1.568196312532282389e-03,5.082556311386373510e-03,-3.126816924397348308e-03,-2.867382237983309191e-03,-5.945579145076439892e-03,-8.310894694511994016e-04,-3.629111983291370604e-03,9.399542183844955198e-03,-5.775098390594412059e-03,-1.927313695873562709e-03,8.425804193184581459e-03,2.222725460497922394e-03,-1.762439421244092535e-03,-6.230234281625722384e-04,-2.541485259676798305e-03,1.014077853170103349e-02,2.128649297772416786e-03,-1.893764792681436370e-03,-5.706248154269883896e-03,-1.753820793675859527e-03,-2.157095469397142670e-04,3.586176774368496356e-03,1.035311301600018614e-02,2.260007104196246324e-03,3.461463425072826023e-03,-2.552478754497350616e-04,-1.407661238396184025e-02,1.607408551109039754e-03,8.933540942114171629e-03,4.491891352445129097e-03,3.168740380180700908e-03,-1.688633992850757919e-03,4.804487384866717135e-03,3.593100545069512642e-03,-6.620138340756569900e-03,8.810082271880305058e-03,6.400951580789028192e-03,2.265731392539023959e-04,6.863452415841867806e-03,7.395219723985122472e-03,3.995688608186848817e-03,-8.259866751693016249e-03,5.965701675938031578e-03,1.078342286252812962e-02,6.555620913900063983e-04,3.901911206692356901e-03,2.819335006114679579e-03,-3.093596930828942243e-03,-7.020621562951254516e-03,1.048288664456120418e-03,-5.140954416530193897e-04,4.307576437193697504e-03,1.462740675421410688e-02,7.063671152923088571e-04,-2.052360495426316326e-03,-5.331948289372396253e-03,-2.371825024870412735e-03,1.162924320223632409e-02,-2.788943368825953610e-03,3.038970455783585566e-03,7.092347728627629300e-03,1.000530713673044962e-02,2.218537676366375260e-03,3.427137166188971244e-03,-6.578807777731201373e-03,1.661770473346821072e-03,1.146693747636649829e-02,5.700841820252407991e-04,-3.619126575672134490e-04,-4.021238681587324562e-04,4.595239627475130109e-03,-3.052924816578588864e-03,3.704150205062598650e-03,9.743729565803145240e-03,-4.483534517958548404e-03,1.702900696718548769e-03,9.209440151531415181e-03,-2.697566002383421351e-03,4.063704055198476446e-03,-2.729214606292603134e-03,-9.944781906760262122e-04,3.865274649720859285e-03,3.204399545258375913e-03,5.150697795873202499e-03,9.795114757865605939e-03,4.829505200583836429e-03,-6.582537466413795023e-03,-5.691288648038518756e-03,1.798116131555936711e-03,2.353967334379689514e-03,-3.381385078690509610e-03,2.123091071149918983e-03,-7.634917540097072713e-03,-9.466707347464994697e-04,-1.066804684826211901e-02,3.825292354210011684e-03,8.020133338962027320e-03,-2.807836743395888436e-03,-3.992040381541328564e-03,-1.142255216141424426e-03,8.924765225977333502e-03,1.989673137714712860e-03,-2.819954274688654802e-04,-7.553114730866339332e-04,-2.965050858845212287e-03,-6.732250353178291253e-04,-4.528069492875709014e-03,3.752557721594708580e-03,7.852960630334087852e-03,-3.136442215038895680e-03,1.292511609304480011e-03,9.997165350246980421e-04,3.537728701440928745e-03,-4.241276335559903202e-03,-4.975920777562991421e-03,5.080862132872484942e-03,-3.168726908339061701e-03,9.977528207475452014e-04,6.576071368537761246e-03,1.193453333112055870e-02,8.271657845051166164e-03,5.341093293269218688e-03,-5.766825797492554362e-03,-4.341576949217350323e-03,-6.087652652447069932e-04,2.137869416714340259e-03,6.649312550462367633e-03,-3.941506568457914207e-04,2.466056983384568545e-03,2.031744779076562635e-03,-4.412812972361329807e-03,7.939501128435267480e-03,2.306796642076368482e-03,-1.912213787072556499e-03,3.065760291815631394e-03,5.260969524986728457e-03,-7.625997964409226768e-03,-3.108457999326139001e-03,2.302962814927518240e-03,6.378666646676312176e-03,-2.704964186193387771e-03,9.301328897216566000e-03,1.370237871300349422e-03,-5.178753742529766321e-03,-9.204620798761898801e-03,4.311920234374731764e-03,-3.254913196285125318e-04,-3.716220326333765679e-03,4.227093021564347006e-03,-2.568184856904301688e-03,-3.834799221651280076e-03,1.881515055248420693e-03,1.062297863418644461e-02,4.334845794597703972e-03,-2.030954499133117829e-03,-5.387068202587204889e-03,9.126480488317984321e-03,-1.785726268123559663e-03,7.056660085793499231e-03,-6.241868425616356854e-04,7.112602443828192284e-04,-1.256660330051018789e-03,-1.017799876490535840e-02,3.960432300322291183e-03,2.243836888084441844e-03,1.263200845672855874e-03,-2.734842739413004264e-03,2.050304996056644038e-03,-9.578319505093627582e-04,2.192605157632774270e-03,-5.580871573941265443e-03,-3.878287256233312248e-04,-6.035378479034907324e-03,1.077876395868094075e-04,6.950627870108440614e-04,-6.745804945010020967e-03,2.209716751690811887e-04,5.270689557718802841e-03,-5.057412295360321115e-03,3.192403796728831688e-03,-5.953819815207038110e-03,-5.089819743291458531e-03,3.382745593335130963e-03,1.999568811067443854e-03,3.092812122724757345e-03,2.333191613991149674e-03,4.469975236966069326e-03,2.030758396705120272e-03,2.138979184353744997e-03,-1.070821583893725266e-03,-3.200981621517271988e-03,5.529840396199991927e-04,1.900323221287315715e-03,-4.622903884486791355e-03,4.339040444472867970e-03,9.695556961367922041e-03,-3.199502479693222979e-03,-8.296035799409422554e-03,1.088780453693680356e-04,-5.806793905792484618e-03,-5.051150336442585508e-03,1.221998093716278842e-02,3.630721250954690137e-03,1.853965807990529905e-03,7.371854438451804978e-03,1.344337596973635158e-03,8.449834176506243466e-03,5.649003031584438721e-03,-6.143839573701349345e-03,7.777887771796404386e-03,-4.966156115083118598e-03,2.632055776297879176e-03,-1.228879585463235266e-02,8.494842146572577704e-03,-7.072079515671855256e-03,-5.732316021680740857e-03,2.002392702662591714e-03,-1.274340940415548710e-03,-3.145236300006381755e-03,5.234355982291601789e-03,-1.707870847720504800e-04 -3.613457561051312220e-03,9.639604991799270717e-03,7.896207691279579291e-03,2.445723474839758133e-03,-2.013283275445482655e-03,9.100384739022868499e-03,-1.844701472447025752e-03,-4.762632725985884406e-03,5.573910457100071398e-04,-3.001839502108703643e-03,2.843205156377505687e-03,1.656093147677140456e-03,6.189527410629647695e-03,-5.458398224390731196e-04,-2.980550071056341146e-03,-3.669938608241930973e-03,1.172636627378413100e-02,-1.279391763192422023e-04,-9.169325181335372804e-03,-2.571255841116670392e-03,-2.981787791523502623e-03,7.975540829842088855e-03,-8.930938668083259965e-03,-6.934737270150996905e-03,1.942837297342824400e-03,-4.764362274867410503e-05,-8.447743801976388689e-03,-4.045631777902554058e-03,-3.143357506047792047e-03,4.096801185028042108e-03,-3.556437581066798810e-03,-5.882861946117493891e-03,9.719015662306392574e-04,2.887903897933479999e-03,-1.532115589487394571e-03,9.780309269441199438e-04,2.236954895280262017e-03,2.155927261382967978e-03,6.290218860181891625e-04,1.161433468825531909e-03,3.753763060055312090e-03,-3.391340964017310056e-03,5.212304585411611617e-03,-5.257987152253146930e-04,1.646921624833005623e-03,-5.079297424141766826e-03,-3.660893565237246031e-03,-4.156819951223737764e-03,-2.543600783417274223e-03,4.716427785065325715e-03,-1.862923127360655712e-03,2.477661388459316723e-04,-1.164000726916275324e-03,3.355990998261114064e-03,-9.571813468124062280e-03,-9.135850147749858402e-03,4.016865861615637016e-03,-3.501477082019418402e-04,2.755502827477256372e-03,4.765284172380123903e-03,2.883329842239004356e-04,-7.156872891873523488e-04,-7.645181139985588251e-03,-1.289400513183334214e-04,-1.204494356682792554e-02,1.863328271784093527e-03,9.833602211877541868e-03,4.671322564221743866e-04,-4.320486166380212677e-03,-1.298677984054042410e-02,2.901116957600331520e-03,3.227280349227445912e-03,1.210975407156554117e-03,-4.217685531312218602e-04,2.475122340757324440e-03,3.403838498298225775e-03,1.042199107588281329e-03,-2.769744479795382443e-03,5.551056386641052033e-03,-1.176483430713373256e-03,2.611091284540431861e-03,-4.679653220954381292e-03,1.161226815788379786e-03,-1.478276305704586150e-03,-6.442565242093952448e-04,-2.921943378407573701e-03,-6.748690879494322753e-03,1.025393653804151754e-03,-6.397185807967969984e-03,4.330289445640041765e-04,6.924291439929275253e-03,-2.630202275183336508e-03,2.398495106769336048e-03,4.184313633967716266e-03,3.648110709727429152e-03,-4.709528685324542926e-03,-9.853901833955876557e-03,-8.502422838915285261e-03,-1.792586993061022612e-03,-3.414929258483750062e-04,-5.195784411055399087e-03,9.337868711488656710e-05,8.634409317576486166e-04,-3.700795232155392779e-03,7.179771967115837657e-03,3.216957690876981382e-04,3.650704570474473491e-03,-7.221548891471260686e-04,-2.275576471353505346e-03,-1.453140599538204356e-03,-5.535098125319658745e-03,-4.129581424783889494e-04,8.287001648773806598e-04,-2.531410308986337381e-03,1.173853360034897089e-04,4.923113896353410561e-03,-2.287160068188084407e-03,-7.295032108763831180e-03,-5.025962966468672591e-03,4.886087963843035228e-04,-1.109395287857859423e-02,-3.916975727281189756e-03,7.491612704448828187e-03,4.377511974649263803e-03,6.606397682351517267e-03,5.816477318395913386e-04,-1.365872387668239487e-03,4.792434646555403547e-04,-4.875454881294285364e-03,3.227589620765056438e-03,1.727111810976026491e-03,7.929987107427597520e-03,3.326694870197639473e-03,4.716409426405773937e-03,-6.822753411414048477e-05,1.285329302557351771e-03,5.832800987054622462e-04,5.877328142764321438e-03,-1.315181028927045123e-02,7.372182273087689308e-03,1.258580719147034052e-03,9.344637133047265518e-04,4.971171678738789054e-04,9.281720907834904721e-03,3.797926102345480261e-03,-3.224873277859603010e-03,-1.491368059489863937e-02,-4.849469872361385735e-03,2.532099731813577514e-03,-1.546519548312255406e-04,-3.762127340795426694e-03,-1.780791852775656398e-03,6.117273920174740552e-03,-3.898455438806852180e-04,-3.152792846792936299e-03,3.893163789323087626e-03,1.334376587285649113e-03,-4.129447427934105318e-03,1.809129039706738580e-03,6.154442605498687199e-03,2.305823144673123880e-04,-1.429100614492594601e-03,4.364782926931639294e-03,-3.477760333553602153e-03,7.836886108059408149e-03,-5.129082292116634786e-03,-5.741261423114548713e-03,-4.167042080736755097e-03,-2.605292147371675998e-03,-9.544272083020312325e-03,-2.833650302335861265e-03,-3.271134312750064756e-03,-3.533723917802812382e-03,-7.112930767989237256e-03,6.758145664902791803e-03,7.097430860157016852e-03,-2.525269477626642788e-03,-2.815823737353716563e-03,-2.180451407796189962e-03,-7.182419513550562969e-03,-2.964958737485013339e-03,8.967442477572458090e-03,-3.101683650593344867e-03,-1.124321681399483236e-03,-5.220086892446665697e-03,3.085795492773932454e-03,-9.553444065223422712e-04,-5.862198785314708116e-03,6.286481186569405771e-03,1.576944782325172354e-03,1.982484551629769710e-03,-7.832626483649490767e-03,-8.584975047821963301e-03,4.375793222843037968e-04,1.084044491026518731e-02,2.502913812684846775e-03,4.977423683742069824e-03,4.679509970353905833e-03,5.722912659559688253e-03,1.948747155722243283e-03,-5.736691626990598038e-04,-4.713058258961317672e-03,-8.938655875505583173e-03,7.559104659437099194e-03,-6.079826706061357798e-03,8.087751352934505200e-03,1.187856972081375966e-02,3.393043490907219056e-03,-2.707109413338774744e-03,2.947593881150151630e-03,-2.985940317927329724e-03,1.520819976016875687e-03,-5.857823166835029148e-03,-1.175577070688847269e-03,-5.570371774158919004e-04,-5.666459691168483984e-03,-5.904203896401967640e-03,8.046673853146650611e-04,-5.313627381626172957e-03,5.937174066391770477e-03,4.489330338374362094e-03,-1.540970156420841137e-03,-2.922932725052163755e-03,1.401636921805507821e-02,-9.163019345249825802e-04,-5.720913359578491328e-03,-1.409939966556468527e-02,-1.224353287351126474e-02,4.716122584650068268e-04,4.550522084919888383e-03,-9.617732806930012540e-04,-6.887531308845645794e-03,-7.025114666758217486e-04,-1.297652480945708266e-02,-1.748764937960574806e-04,2.425876873575825380e-03,-1.007343365619963946e-03,-8.563778665950275149e-03,2.413244961556591881e-03,2.858373399831103546e-03,-5.130747873268184066e-03,-6.311192137734832411e-03,4.412080399478870204e-04,-3.942112945122948874e-03,3.949277433440345417e-03,-6.556359615936716879e-03,-2.101121343679605673e-03,-8.426617982440608862e-03,1.291207433846754691e-03,1.103373686044798081e-02,2.924873825058266208e-03,2.726726752439404683e-03,1.001551258964241027e-02,7.760793821190840359e-03,-9.303960261957311753e-03,-3.920415025630714576e-03,6.439813459917901467e-03,-3.290080226805626278e-03,2.000421270987873690e-03,-5.949916957694640143e-03,9.887168948605780730e-03,5.630641626342268880e-04,-1.753992780797198172e-03,3.966174286441869618e-03,3.471899920597267767e-03,-8.330478019852752356e-04,3.502874645969838389e-03,-3.219389433239282575e-03,-2.563602794541181441e-03,4.467504860136154755e-03,4.997901134665685627e-03,-9.508350683937540114e-04,9.477458262239392764e-03,1.212721082194500926e-03,-7.104400405344228966e-03,-1.895568859144639365e-03,3.257386309701217064e-03,5.221484764063917365e-03,-5.282410466619383510e-04,3.520316388156952115e-04,-2.456121712573675758e-03,-8.103498385922087250e-04,8.049101105242231725e-04,8.462715455827062722e-03,-1.202114062347216803e-03,-8.094110506579288780e-03,3.430062976547980716e-03,-5.001843849136634879e-03,3.524871141314629870e-03,1.129699624680749687e-03,-1.523389601660696084e-03,2.916692990021464357e-03,9.526433612362962262e-05,7.951222796463253834e-03,4.699322791170033702e-03,3.440306034654235824e-03,7.540202662184545017e-04,2.153734379483768658e-03,1.414587415989465883e-03,-5.385566960302047568e-03,-6.177251823265082764e-03,-1.019033111666299216e-02,-6.562238606833804312e-03,-6.251824648893804584e-03,8.178220284624683131e-03,8.479274720353018524e-04,5.706312706737295265e-03,7.371716248697923746e-03,-1.461260549258827039e-03,2.436626540892872055e-03,4.856744362072476680e-03,-5.689064152441050421e-04,-5.345761918924173822e-03,-1.539265158426552423e-03,-4.333367853139417913e-03,-2.085876857477006029e-03,1.573549662049442219e-03,-1.671173333941732529e-03,-4.342597616298126371e-03,-1.816370067096275895e-03,1.277046623337537400e-03,5.471049814255722274e-03,4.524075056372818537e-03,-1.103608026501287295e-02,-8.749915657522120988e-04,-1.053053041353824149e-04,-6.776513032096574164e-03,-5.941447822366122122e-03,-3.191949016986219204e-03,2.360480861588111056e-03,4.792063745218047913e-03,1.102091568205452662e-03,-2.564256289544760131e-03,3.191881590993166876e-03,-2.916228703106898534e-03,4.597926329920479400e-03,-5.671847468236825007e-03,-7.406622533405097727e-03,8.783177930824751997e-03,3.852176923631496930e-03,3.369949333133979291e-03,3.356110535991307584e-03,2.266790041144420200e-03,5.973681471353551459e-03,1.526505486270054003e-03,-6.892256740976404616e-03,5.262789231511461399e-03,2.024359469250030315e-04,2.169687246200817550e-03,6.191758208752941479e-03,3.450358783945985976e-03,-5.206097864796248309e-03,1.591769573355809268e-03,-4.045916314412953234e-03,-1.860732610075277113e-03,-6.714498599478853228e-04,9.812074113681817054e-04,-4.327090798703907282e-03,6.219609763436658584e-03,3.175542221627830619e-03,1.707392135632897559e-03,3.370834150928348448e-03,-8.419565821889216789e-03,3.372541338431526460e-04,2.553718209027450257e-03,-1.820697061233070508e-03,-4.767146397132013790e-03,-3.372169049251864796e-04,4.507202968881081541e-05,4.162796646983109305e-03,-1.083966402188396398e-02,2.693855764604667549e-03,1.517039311378614924e-03,-3.735535553978489715e-03,-4.968479653692395824e-03,-1.387847934267528787e-03,-1.347437681041506550e-03,-6.660922257647063733e-04,2.953427353784133288e-03,1.964617534276250944e-03,4.803829287425063696e-03,-7.618025180673770706e-03,8.192490271345890390e-04,-2.076061416358426608e-03,-1.723306936881306446e-03,-7.366574517045648566e-04,-3.049813887760447905e-04,6.120234923912368924e-03,-6.868731722483969234e-04,8.617212617100165201e-03,1.982378573803541925e-03,1.016972447274214382e-02,-5.523234369512878758e-03,3.403159888338249100e-03,-3.757585075473788162e-03,7.721486958061704250e-03,-1.076834138015913406e-02,-2.651795655569509896e-03,3.539363859310809372e-04,-7.318826718127134645e-03 5.691987814784239992e-03,3.082090440712240142e-03,3.699327572556645408e-03,9.951846152159861403e-03,-7.594271055069718411e-03,1.475863126456569870e-02,1.248275046489765935e-02,4.325133504867323397e-03,-1.009453543579397726e-02,6.597510319942199476e-03,6.600413577455047608e-03,8.857849410052736228e-04,-5.217839859757087730e-03,-4.006772274403517728e-03,4.861446208891649808e-03,2.499797336330379996e-03,6.054298250131940826e-03,-1.923155483540147336e-03,4.758899565155946809e-03,2.984028323859075522e-03,5.688812086014666432e-03,-2.660591368881988234e-03,5.781844167788019982e-03,7.966399611535434147e-03,5.369059637674825514e-04,-4.589579188592137447e-03,1.938518134214672981e-03,-3.678583446652317155e-03,-1.170424243508870303e-02,-6.488261484348662247e-03,-2.426135037980614150e-03,5.844752787274336801e-03,2.605348056191171893e-03,-3.104136395618650214e-03,-5.752649624745551603e-03,2.183240037068256804e-03,-8.665813743908830011e-03,2.839017718065787923e-03,6.623430317237395670e-03,-7.932363256921509675e-03,-7.723038936368523512e-04,-5.750069838351597358e-03,6.894871370583263194e-03,3.353786426940250608e-03,4.005482072117406978e-03,9.621807402712467122e-03,3.656635872918523582e-03,7.124816704548576560e-03,2.301310280791682428e-03,5.149750712072290702e-03,1.761744163356784246e-03,6.181347071839669860e-03,1.913512685050393390e-03,-2.320641362440363335e-03,-3.529152361377436899e-03,1.026599539612882590e-02,2.505687075929029055e-03,-2.654161692968194045e-03,-2.068898423135878337e-03,3.142062363673286951e-03,-7.538838330820236619e-03,-6.272260868568406422e-03,2.293108170762705924e-03,5.594086653480908992e-03,1.647188877042174730e-03,5.362392889680330822e-03,-5.189197934171847944e-03,-1.057215097039274857e-03,7.333188250864957909e-05,9.872297727198003567e-03,-3.207810883822151491e-03,-8.161120100196529899e-04,5.110471471876591731e-03,3.262084289540798186e-03,-1.289927907078501461e-03,-7.016657274552815307e-03,-1.240458457582587761e-03,-3.104953272610449112e-03,6.361561035332475612e-03,5.946156496291020617e-03,9.448565054245776731e-05,-5.297106393703497097e-03,8.973347498888515142e-03,-9.778656103542049654e-04,4.474908482427578452e-03,2.794368914756898900e-03,-4.882666422597731030e-03,1.166989842745721345e-02,-1.487384604151276114e-03,8.511181169997730778e-03,-2.818867938741216345e-03,3.229085640037785386e-03,2.634019388375453356e-03,1.040843183574288863e-03,7.349982655638196208e-03,-9.308465625766487381e-03,5.625171173282367612e-03,3.668044307441901009e-03,-8.392922751174789614e-03,7.243126670816347687e-03,7.148445606129336626e-03,1.415461329987518016e-03,-4.458506856297381193e-03,2.326704207815272568e-05,3.635833745176336642e-03,-3.170697094323046868e-03,1.527163608494516277e-03,-1.697977588901015446e-03,-7.890044319340415793e-03,1.256920973125551233e-02,4.922966325984431263e-03,1.284009799202455969e-03,5.643654298188348697e-03,2.483620979105109240e-03,1.519796168985661743e-03,-8.333028360888887270e-04,6.574463824834377024e-03,2.554301354333308061e-03,3.437635690771104462e-03,-2.950463524008991537e-03,-2.492398872683103568e-03,3.442970539080433129e-04,-2.528776968952836310e-03,-1.147712186643384943e-03,2.230379882492870228e-03,2.451320190982511811e-03,-7.753471698789806028e-03,-3.107695592803138770e-03,-5.866130288169889642e-03,1.544630015119962101e-03,-4.616186310615266568e-03,2.491553215795410271e-03,5.590444286223478047e-03,5.423462784176063048e-03,-2.136983130877704576e-04,3.117899758337070587e-03,8.409170346603778113e-04,1.325268579154016951e-03,2.979121819898033954e-03,-1.402386431588753275e-03,3.343484914265845622e-03,6.099608470746841100e-03,-8.808062948811229420e-03,2.277025490420761956e-03,7.309727415909945611e-03,-1.820926896110649118e-04,6.989787030019162413e-03,4.388265602823089757e-03,4.578954335527746743e-03,2.501431465027213492e-03,3.114265098785385863e-03,-2.227487923413864254e-03,-6.781906190194633352e-03,-2.496637979508335888e-03,2.102217844260996157e-03,-4.237445744855856955e-03,3.058793773793104867e-03,-2.079164650411344240e-03,8.007446523849931573e-05,2.399715012277426219e-03,8.890877145668446210e-03,-5.360539101778930872e-03,4.554453036563340003e-03,-2.785346262657647544e-03,-1.681584321645498720e-03,-2.296491248059552397e-03,-6.404242363125739641e-03,5.580188984115051418e-03,-5.393714065365749245e-03,1.218584592066047001e-03,1.347487874874320202e-03,-1.180055411684333274e-03,2.132378594531102612e-04,-2.866640600815678185e-03,3.902736018804114481e-03,1.887622990229951246e-03,-2.598898260808405348e-03,8.048976338880331136e-03,1.100909155654841839e-03,-1.780235331997049664e-03,2.530386211681053682e-03,9.074584997575642006e-03,-1.775026924452830987e-03,-1.496647883116646255e-03,-8.161824414883245629e-04,-3.870707651652004633e-03,4.312137755535745687e-03,-3.579528534401819027e-03,-3.493617439136078705e-03,4.925440958170730806e-03,2.987873398168083405e-03,3.073894768702124607e-04,1.954279283118973316e-03,-6.296056660605107687e-04,-1.720095341191390289e-03,8.353008696315436174e-03,6.986886134726057011e-03,-1.730705714312555203e-03,-5.005133224154982271e-04,2.295551443783272992e-03,1.630281120394088063e-03,6.847275632540620466e-03,-8.015532674771133625e-03,-1.105928460659994818e-02,1.743135473755744961e-03,2.268914030449717314e-03,7.925926330341662683e-04,-8.369129839759231878e-04,-2.485442125467456770e-03,1.138698335225457066e-04,5.658888100569113740e-03,-4.164803897374971114e-03,-9.252097513050670666e-04,1.137761826531891261e-03,-1.712356092601218741e-03,2.705570317181744001e-03,5.960396068955621345e-03,3.496210644053248263e-03,-3.755650587920395227e-03,1.259515317768458882e-02,5.396352638660272330e-03,2.676257486490622121e-03,-7.950263584262350634e-05,-4.415636163389206371e-03,-7.918846373974610178e-03,8.889850806478147641e-03,8.373464808834999579e-03,5.402596159175417599e-03,5.359730983653610004e-03,-3.311786082227639785e-03,-1.663908833217236927e-03,3.069461063151231097e-03,-3.855816892483455386e-03,-5.425835829786026729e-03,2.399115905696578340e-03,6.284377733311190262e-03,-4.356262177702228434e-03,7.232488298051288102e-03,-4.990349613007184770e-03,-8.266283317278300125e-03,-1.599785183885245408e-03,-7.452272953741181753e-04,-6.312120971861154978e-03,1.043812259736944924e-03,4.993958000159096642e-04,4.634179847324847862e-03,-3.977906913690153722e-03,-7.663560674732244719e-03,6.361884639325244596e-03,-2.106176682687233535e-03,5.688334064169182740e-03,9.262631597525203264e-03,-3.858047559138652344e-03,5.248348413637008228e-03,2.415815318034814172e-03,-7.666606786099313536e-03,-7.419252266564163416e-04,4.629123006258553316e-03,-4.951968750570325678e-03,5.398344445295274845e-03,9.179896366546097183e-03,3.875711533416941260e-04,9.348055189374204765e-03,1.238770285705651560e-03,-5.402606080235433650e-03,5.170964347717969117e-03,1.319249504644285113e-03,-1.099040290050460993e-02,-2.825685288293307718e-04,6.853378761107467229e-03,5.269074151918037505e-04,-1.200542952411804993e-03,1.171799357990379155e-03,-3.884555503115312146e-04,5.527096529403006248e-03,-4.945043401505481301e-03,2.309400273022265901e-03,8.371474730591725632e-04,-1.053910365995503785e-02,-1.101104186534269773e-02,-3.137395420630204133e-03,-8.893049034044973530e-03,1.036913296516112255e-02,-2.930757666681561065e-03,-2.020672964538394734e-03,1.078456053949445638e-02,-6.101182925331591865e-03,3.427665977278153246e-03,-1.420065519822961671e-03,-7.104456015207284963e-04,-2.024557564873073409e-03,7.825814652771545532e-03,-2.421495721266815442e-03,3.197324404244917141e-03,6.428629686302001498e-04,-5.271963179360226494e-03,5.701811553178674948e-03,-2.866194092330064027e-03,1.027804973047045066e-02,8.881291809159344922e-03,-3.969708482267063701e-03,-7.087730893247786920e-04,-6.644414337867664798e-03,3.572675270382439511e-03,5.916233631296220936e-03,-6.732208053387945049e-03,-1.010383839443689587e-03,6.891417716709042317e-03,5.175597688687003789e-03,-6.761287991066382980e-03,4.007194755581685273e-03,5.089225375102474427e-03,2.721594015989401566e-03,4.182639390954112873e-04,-8.320583065162374080e-04,1.085691064584120430e-02,-5.814136317066430533e-03,-2.895107397472057500e-03,5.129977114788919663e-03,-3.522988200291228816e-03,-4.661320518850272689e-04,1.082562642400075430e-02,-1.097708797991448718e-03,-6.602467658660118519e-03,-1.830459831565775866e-03,-3.830918446159419736e-04,2.650815344005158936e-03,3.991160977453740016e-03,6.935885789173307572e-03,3.276922220340938892e-04,-8.311599353802139739e-04,9.974493830069580420e-04,-4.742429240296739238e-06,1.030521304853747669e-03,-6.014983667983513560e-04,2.872785507561641683e-03,1.230302861246859204e-02,5.693525324957171910e-03,5.476350765894206617e-04,-8.438107341165618789e-04,1.558637240401483095e-03,-5.251820647657576719e-05,-4.409765766433030275e-03,4.443780144474018291e-03,-9.074214048057933218e-04,-1.346313672993051718e-03,5.449910457515594504e-03,-1.512777214735806211e-04,5.831592342258266892e-03,5.105520185478440442e-03,6.999611116742123114e-03,-1.583624687760652955e-03,3.674446904814204946e-04,-1.178633610921068879e-02,7.962400973186135530e-03,-3.106195458962715452e-03,3.670040881841386653e-03,4.039487654248607139e-03,-7.362113622824515910e-03,-1.687251365622097774e-03,-1.436875382661841831e-04,4.804038742376558722e-03,2.865854410285923436e-03,2.749874405649703306e-03,5.612352536950664626e-04,-5.701336129828383711e-03,-8.021768302389771510e-03,5.812849280716783293e-03,1.098453834091111603e-03,1.447381473175725353e-03,-6.187775397664243941e-03,7.241226474508236435e-03,-1.806913991925983786e-04,-2.873199455941390004e-03,-1.280980734884264692e-03,6.212215649381581592e-03,-1.331811187317780663e-03,4.322706066337420744e-03,-4.951452128976436586e-03,-3.336472069072465244e-03,1.602377938482032419e-03,-1.306789734442325357e-03,9.306372737493194373e-04,-2.669864985588850641e-03,-1.299444985504315817e-03,3.128827593294558271e-03,3.059440578250412907e-03,5.907968975453997511e-03,9.417733511055695339e-03,-6.660013205021135703e-03,5.192334868444939064e-03,-5.803130192826974100e-03,7.884471214938874525e-03,-3.639583153421843860e-03,5.626636088246732802e-03,-4.564775735955426891e-03,5.865178882003972867e-03,2.730152292755547668e-05,-4.735219087724399723e-04,-7.101489631103265025e-03 2.440074725215037304e-03,-2.907517558953308639e-03,6.640409866118139777e-03,-1.052570753121359788e-02,1.647690121570556622e-03,-2.138201065297743071e-03,3.999153392800265487e-03,-2.892867390309437469e-03,-5.461814133998456810e-04,5.252018732953871866e-03,-7.139055915858932150e-03,-4.552576228491042945e-03,3.722657315926883922e-03,-1.574223838536330938e-02,3.935455148203074842e-03,1.266480463244510843e-03,-2.156349552821283655e-03,4.178082679036466442e-03,1.738979491653122817e-03,-2.034575692971264844e-03,4.213123679360040487e-03,-5.029340903148745226e-04,3.551242691026005975e-03,1.016422803433833164e-02,1.656995505065587959e-03,-2.959581184993042777e-03,-2.919449312726671991e-03,-1.954889029376937263e-04,1.765668485983585374e-03,-4.557213830952373812e-03,1.250567459842951909e-02,1.173601823109483378e-03,-2.563418440573498724e-03,-6.427058462484507538e-03,-9.193340321303757404e-03,-8.591958794120233348e-03,-4.019968694580712022e-03,5.186538773528973599e-04,3.856943726972114714e-03,5.440765381704438207e-03,-3.920718164663257108e-03,-4.579686811094311853e-03,2.935315888907118025e-03,-1.447971675765340758e-03,1.664800132346706818e-02,6.565227550603710931e-03,-1.331475539189913787e-03,-3.306551732304779338e-03,4.181071135120595882e-03,5.195375010718431691e-03,-4.383990431698202295e-03,-5.287180738167192133e-03,-5.905062688131680095e-03,6.259002650160287208e-03,-2.903316511042697712e-03,-2.954900263669126784e-03,-2.698492105104312224e-03,-2.288621390549801498e-03,4.599649261586577845e-03,-7.371590270523209650e-03,-1.844473343904026036e-03,-6.020164117124796564e-03,1.058858600447114967e-03,4.699080171604844494e-03,-1.818447085890393918e-03,5.024676499489146063e-04,-2.210999632044390734e-03,2.630935002248395741e-04,8.420933714276695320e-03,-2.566196020491388723e-03,2.890753107925290912e-03,7.335875073744260212e-03,-8.952854615050263865e-03,3.305009945286791363e-03,8.740913284296675939e-03,-1.230536975601594085e-02,-5.266699029011815206e-03,-2.709195000340591884e-03,3.278754762558923602e-03,2.670035917664847504e-04,-4.060729921136482096e-03,-9.426133341059343695e-03,-4.440000413365149988e-04,6.647041253232832507e-03,-7.255856512010399771e-04,6.457746625153993128e-03,3.661768726136493957e-03,-1.387509076899048422e-03,-7.757949017646052249e-03,2.240941139599917763e-03,6.735473589637820842e-03,3.482258018580985503e-03,2.238727493359290305e-03,3.026978572856121527e-03,4.895703778808673315e-03,5.617364252765413488e-05,8.535053708035792233e-03,1.796253390971914825e-03,1.357222263442911872e-02,4.565003831486497479e-03,-1.076513467715277395e-03,-8.040209158326148617e-03,1.395223269372612230e-03,1.391494919805987354e-04,-3.674411890357612547e-03,-2.015274438925845923e-04,1.985364411738057590e-03,5.658596911512032632e-03,3.147765972005647263e-03,1.494468633827809347e-03,-9.723717065698130740e-03,7.316550657748382676e-04,-9.737859028114631901e-04,-2.375762021679396463e-04,-2.457453012039731693e-03,2.165227383193487395e-03,3.376639007064021099e-03,7.589760758158870836e-03,-2.285125276715833671e-03,3.249172375030506908e-03,5.763584049353202496e-03,1.353062239435705782e-03,-3.234668613612597370e-03,-1.107224741376068144e-03,8.100589446992550666e-03,8.265080499404159689e-03,-6.529715625566705108e-05,3.971359029123570352e-04,7.073861274846527346e-03,6.808262272609496429e-03,2.808588983680597364e-03,2.354244946870209104e-03,4.593759742447128293e-03,-3.809501889380049067e-03,4.118964717100739165e-03,-8.295357413717124526e-04,-5.782928726853938696e-03,-1.453152075411017277e-02,5.375677502224124847e-03,-1.165053083259032056e-02,-4.667041426197334614e-03,1.921566412616696438e-03,6.724681016904716924e-03,4.144323677856727450e-03,3.006009478169226895e-03,-2.517515303833839499e-04,7.840345259974160555e-04,-1.704937293520002693e-03,2.590308189405964804e-04,-3.168688500033999683e-03,3.955846594725784954e-03,2.323064048223407591e-03,-3.587869163302630140e-03,-8.497132521619203682e-03,-7.137931594202333307e-03,-7.468889334580028003e-03,3.641723836356752656e-04,6.959888705437700454e-03,-1.834861823423785884e-03,8.782302889703767768e-03,-2.159128749511410577e-03,6.761713968644095414e-03,-9.252052926156590848e-03,7.384156203191140355e-03,9.797216997099940025e-03,-9.163732329110708258e-03,-1.458093920077388993e-03,-4.067988668331486320e-03,7.362492867803687412e-03,2.345268058080583924e-03,3.079247794575304658e-03,-1.477382735079323106e-03,-3.946745086466734147e-04,-6.577292141179064740e-04,-3.310427878183704693e-04,-4.575000296722987400e-03,1.666119672661519561e-03,-2.917750769923129796e-03,-3.763741160682623158e-03,-6.430699938874262847e-03,3.833100668555437015e-03,-7.690773784813065804e-04,2.781590063147106485e-03,8.486009014587478014e-03,8.204477235851281164e-03,1.576336512884013268e-03,-6.708363339413162679e-03,6.583606752038582977e-03,-2.086087703285577268e-03,7.410281386346254746e-05,3.576032411070102692e-03,-5.330043546192277625e-03,-1.565483865998810748e-03,2.650382524352127862e-03,3.756727304650980294e-03,-6.934647630481610876e-04,1.357340193445223230e-03,2.411855112284639995e-03,1.314955257381904665e-03,-2.227184574533294745e-03,3.336121187266649721e-03,-3.578441510061508134e-03,3.013137940993529558e-03,-1.221139861174582245e-02,2.709156150414599937e-03,-4.184329446215137409e-03,-6.953910557398084065e-03,-5.684826840350919522e-03,-5.603483204539540935e-03,-1.971197853562603030e-03,4.802047475327023661e-03,-6.817423831959073220e-03,-3.456533258919490797e-03,-2.837343970200004954e-03,1.962994454955003382e-03,2.693900798594231483e-03,-8.511880312353463504e-04,5.196021558877736077e-03,6.227800385776271737e-04,3.842497622047597733e-03,7.321222886362319104e-03,-3.468421387536826592e-03,-9.064285112709480688e-03,2.125971390813468764e-03,1.203642140341318681e-02,7.640870936876592925e-04,6.011897385220858238e-03,-5.414349967712252937e-03,2.183152396138277832e-03,5.407923179863658933e-03,-3.082268042662377958e-03,1.236399711461952291e-03,-5.288435220810119505e-03,-2.242432615432752869e-03,4.607264878952688431e-04,2.116185901661901746e-04,1.571184571913299827e-02,-3.929756160696597049e-03,4.797939868014691571e-03,-3.633266941479818598e-03,-5.710179262076085782e-03,6.059724942341518893e-03,-4.130020419541396287e-03,-1.227757248243461878e-02,-6.709859269876406471e-04,4.244742411428742770e-03,-8.931188952401802883e-03,-7.152426757880822258e-04,-4.414792376729014152e-03,-6.272896483915057358e-03,5.915920931320230863e-03,-3.000296078375335688e-03,1.002198070611802581e-03,2.269998824204931369e-03,3.421278318163519655e-03,-2.266831230236696429e-03,-1.957555967970979517e-05,4.800040044736580101e-03,8.703778677621997265e-03,-8.679885836944787811e-03,-2.955273551260113093e-03,2.540631567590444055e-03,-5.957447747173796188e-03,-5.717326728594318347e-04,1.229048013270008085e-03,8.159574719550109767e-03,1.822161208856243103e-04,1.123340953497591879e-03,1.651109313219847842e-03,3.208496782539896949e-03,1.340839091111236748e-03,-9.959799597152289505e-03,-2.374747609694570407e-03,-3.357419991706290380e-03,6.568209507625380299e-03,7.242831784718945995e-03,-2.609562962155084070e-03,-5.841452304288672834e-03,-3.232273086323703311e-03,-2.590919327036807912e-03,3.793578595309926062e-03,8.227675543931285912e-04,7.443151578079763809e-04,6.981889796587805071e-04,-5.633002277771453209e-03,9.685843162835860651e-03,-3.135590024011671289e-03,4.285805925503864688e-03,5.271853930406526952e-03,2.536232964812189586e-03,6.432949861349881483e-03,6.860688228910728249e-03,8.333024352060135892e-03,-2.717374994315725507e-03,-7.486231577604295431e-03,2.152373364455714699e-03,-7.039405616664380465e-03,-1.484523318835178816e-04,3.640741707853875631e-03,4.464644925023842498e-03,4.701316132201257408e-03,-8.208707727923526994e-04,-5.995543013957317820e-03,-1.665724746371604866e-03,1.812192833327857036e-03,-5.741813181861691413e-04,-2.876036334378939251e-03,5.868314450086810179e-03,-3.316065863679289964e-03,-4.501292456019712930e-03,1.353058693972250190e-03,-4.750314490884062837e-03,-4.396104629351836775e-03,-5.240311226175986895e-03,4.041081116312595159e-03,2.076372924178427790e-04,7.650808086115506697e-04,2.517616695097154612e-03,-1.892249774016617654e-04,-3.853378455851376363e-03,-6.426460776497512281e-03,1.454608872318484620e-04,-3.712454908600414105e-03,4.158000052523211712e-03,2.005689049986479535e-03,4.364559931705348783e-03,-4.474223879461401536e-03,9.585280729663267149e-03,3.010104706036664888e-03,-4.871263740085107589e-03,-1.883138601494385462e-03,-6.423933204567826814e-03,-5.026977490513224837e-03,3.169751136927100466e-03,1.797624659986629420e-03,-6.330931209475697133e-04,-9.944179915589592897e-03,-5.869812030130815671e-03,6.711532596667894000e-03,8.417111770608510721e-03,1.082659675141998609e-02,-5.238019969243920375e-03,-3.802191394056537738e-03,1.128851502769558091e-02,1.426302768254577311e-03,3.109736874857375009e-03,1.679168325795601631e-03,2.900103547014149050e-03,-5.352582572487519889e-03,2.746090366840653850e-03,3.153860870753579133e-03,6.894763878952362215e-03,4.841170890268771572e-03,-3.174832766853102981e-03,1.893916083863868044e-04,-5.940002637250538578e-03,5.167249752842269922e-03,-4.128401091229224121e-03,7.742889519619674740e-03,7.992120733665338678e-03,-2.430476560734053798e-03,-7.307673876456626666e-03,-6.467734052716584726e-03,-1.892965872013256451e-03,2.696853568310839284e-03,-1.271752813980906675e-03,5.852176752746974728e-03,-2.250617661753301791e-03,1.167111364696511245e-03,-3.409281444051049954e-03,1.033043323875651801e-02,-2.038242584708496903e-03,9.455047669649683205e-04,7.946560380849220298e-03,-3.936342719741241767e-04,-2.809772949386916182e-03,9.915964649796826791e-03,-8.632163827933654566e-04,-3.431919290718688561e-03,2.287991822635323212e-03,-3.225980067039936790e-03,8.510604353151145218e-03,2.393848655678578233e-04,-1.622810639043560611e-03,-8.846303728972047462e-03,-3.069237973544715597e-03,-5.073481450979653272e-03,-4.387667844100040342e-03,-8.256123197017973744e-03,3.401263245840678833e-03,6.511236975400888638e-03,5.135728509214759763e-03,-3.801642134102470490e-03,2.256932503473531217e-03,-1.200289317389805193e-03,3.610436214336066978e-03,-4.117846838009898980e-04,2.786735539487130136e-03,-3.349259277226804277e-03,3.246789232623271265e-03 -6.275791800570630532e-05,6.265345177340807259e-03,-4.963816295395787276e-04,-4.120477880197552858e-03,3.397786998073769327e-03,-4.382408811916566928e-03,3.096075317060234620e-03,-3.028734852868109843e-03,7.330144739557318406e-03,5.546343128560419708e-03,-2.175431299258699658e-03,-2.523000121801940131e-03,3.234825586720824469e-03,-1.388806702855761681e-03,-7.399090773319062526e-04,9.897342363972287584e-04,1.267104843455474101e-03,-1.626026147664070892e-03,3.207386547070152638e-04,-6.197811513502621619e-03,4.019355431685740603e-03,1.104204997509445306e-02,-1.072483550678235073e-02,1.831597840484717977e-03,2.304160493524720198e-03,4.796854199174526399e-03,-2.263764990185585142e-03,-1.498818678407957267e-03,-1.526162528013255693e-03,8.611606695242992465e-03,3.563109295525803116e-03,-1.756880941988608153e-03,-3.898676350098012871e-03,-1.189315256453016625e-03,-2.248721068477771163e-03,6.305182190084047782e-03,-4.724391492199989410e-04,5.417001408878165981e-03,-6.777965659950340414e-03,-2.922981938555906014e-03,-1.207454763929851835e-02,-6.997100009163476422e-03,-3.477580714568808177e-04,-1.742211577007306104e-03,-5.224348716189928729e-03,-1.146600827011184150e-03,4.857433356563381336e-04,-4.808964684060817313e-03,6.135117177019964270e-04,-9.529867867166150544e-03,3.609602541439891569e-03,-7.012771865366096598e-03,-6.425381844874880101e-03,1.423874427754311112e-02,4.129794781522015505e-03,2.473032715600777427e-03,7.219013221411217304e-03,-2.633709354776225895e-03,4.793812060799544812e-03,-3.525078248559954694e-04,1.111811610018059596e-03,2.750146199475302783e-03,-2.885596523192000395e-03,-4.355963339390837505e-03,-5.840842520141139964e-03,-1.171984684272425661e-03,4.443565297375080304e-03,-4.784783840073036118e-04,2.600836902549712084e-03,1.303446790873245571e-03,1.280121789977454495e-03,1.890352337630062454e-03,3.433877555892426059e-05,5.527604833758431417e-03,1.472082393958341418e-03,-2.955980151098878180e-03,1.636050664357136582e-03,-8.264687557553910104e-04,-2.688392138709706433e-03,5.570389912874802903e-03,-3.415723835356319557e-03,-6.557077336927139531e-03,2.911430386447729293e-03,-6.074470569445758125e-03,3.335685558667745004e-03,-4.325183404486464961e-03,-1.125941864787002519e-03,-2.306250698414277073e-02,5.903886126988788091e-03,-2.529443075643077014e-03,3.397179881242941529e-04,3.922670638466706162e-05,3.926183863846875732e-03,7.078676018361737370e-03,2.506185259637628854e-03,-1.048989451194993171e-02,-5.328822570939046894e-04,1.714088327371438665e-03,4.276147882214156003e-03,-1.247949698536882568e-03,4.195091810337157369e-03,6.456023350565933024e-03,-7.512763280129123450e-03,-2.825617310413332077e-03,-1.337461410144783879e-03,8.372383947477527594e-03,4.444593172298242452e-04,3.590711023204112146e-03,1.812816942880215174e-03,1.592037960241216867e-04,-3.479937530878193189e-03,4.870694448776651322e-03,6.645500435716149290e-03,4.388431555033265805e-03,1.589584487522936523e-03,-4.187298995307366795e-03,1.571757847363063195e-03,-3.240118668186839357e-03,-4.264518447624625862e-03,-3.744868106833619363e-03,4.700390352819549221e-03,3.786561007469709233e-03,3.263615226029081828e-03,-1.523372135285207566e-03,-3.941140689764188569e-04,-3.659055522988892244e-03,-9.518162442579051682e-03,-9.215672857141005798e-04,3.207774445203725924e-03,9.016695295714033040e-03,4.094844597909268365e-03,5.179418750742472398e-03,3.881434618622757200e-03,-6.776539983171039200e-03,3.391125001574784507e-05,-1.546968236598329598e-03,-8.700110154630916193e-03,-1.533401030196331755e-03,-5.526207451818058655e-03,-2.300823315227465676e-03,-3.764812870697833904e-03,-7.431196381648123850e-03,2.461146649833094013e-03,-1.214230740243234285e-03,-8.702871159939168074e-04,1.379470296275387082e-03,3.811812105358685244e-03,5.562516397570675154e-03,-8.207371009252459023e-04,-8.903784844940160247e-04,2.655762287709864276e-04,-1.512279776395116871e-03,4.949343518900394152e-03,5.282382107239242189e-03,-1.939666378626499774e-03,1.703470153738187443e-03,2.273933715703305332e-03,3.361300341581474702e-03,7.990474133887925934e-04,-1.162393383390301774e-03,1.331005949381099248e-03,1.236187671687686077e-03,-9.132309361183896289e-04,3.148103266761397880e-03,-7.881477480197577896e-04,5.063640335716820123e-03,-2.694673617289107277e-03,7.611769164770272079e-03,-2.374739299720874698e-03,-6.775859427752307929e-03,6.806624454483942502e-03,6.940933694290558897e-03,2.753902916517090866e-03,2.996026399420149890e-03,2.463654322413686478e-03,-3.827598098561153144e-04,2.529693542294784558e-03,-1.581119933200585354e-03,1.610148233938562363e-03,-4.872846295118189557e-03,1.655863469459443148e-03,-4.107133421118405563e-03,-2.122112381929784890e-03,-1.124317661267073199e-05,3.412496389475409495e-03,1.696373364747482152e-03,-3.312110857226076201e-03,-1.412368408086501765e-04,-2.173095115115331218e-03,-4.606599692183384353e-04,6.777609691364149067e-04,1.858335895780394761e-03,-1.065934609500252504e-02,-4.167396106740807460e-03,1.142818025840693086e-02,-1.033709209191312206e-02,8.406729416228149107e-03,-7.292857646384753693e-03,-2.082820196610509975e-03,-3.423096919393060419e-03,-4.203924759076755821e-03,7.381808550697610896e-03,9.157902668873524809e-04,2.362540871718054290e-03,-4.505119305690992622e-03,5.725328662126711113e-03,1.191864014000003575e-03,-1.986151118674686633e-03,1.651951345605396975e-03,-4.189209767575335051e-03,4.491065228781865662e-03,-4.836690288549962830e-03,1.217418696760569168e-04,-1.390829166549059593e-03,-1.448967518627165526e-03,1.221990083087960005e-02,-7.860873436824112898e-04,2.873446669021194983e-04,1.719415392873846699e-03,-9.896531616730255485e-03,3.276110684111604651e-03,1.259235540130719923e-03,6.622119104967282358e-03,-5.002204709333836138e-03,-8.745696776480269691e-03,2.627528298480137593e-04,5.459436389067591098e-04,2.660843828482587631e-03,7.014505870991549590e-03,-1.073664112361458678e-03,1.544011317126142918e-03,-2.878310901680114497e-03,8.431724372610607983e-04,-7.662003932291025325e-03,-1.074183874742851976e-03,4.040667358244549867e-03,2.593871893985456173e-03,-2.330131470986337647e-03,3.324078877794579053e-03,-9.445814896658090085e-03,-1.275990564009168212e-03,-3.339163292610560375e-03,5.767881724255807981e-03,-6.623204322749241119e-03,-2.942077546763138970e-04,-8.747360544331799284e-03,3.774132390130907101e-03,-3.519208103844363462e-03,3.038929316979604566e-03,5.067094662017831465e-03,6.955385725789793409e-03,-7.802394151833018637e-05,-7.088477226218021891e-03,4.786710318422125916e-03,8.548116446253924789e-04,-1.540465917683057125e-03,-4.741006451925978927e-04,1.501693661470569199e-03,-4.515979920236565595e-03,-3.256776400639913461e-03,-1.670067239182317304e-03,-1.120708813038957330e-03,-6.081262674332340440e-03,7.200846665044089573e-03,-1.233225344638366576e-03,3.028865159879233493e-03,-5.951193942085850758e-03,-4.219409737654280894e-03,1.279400421662649223e-02,4.308013803784537486e-03,-4.949525091807272317e-05,2.376306429151853281e-03,-4.008693171363107970e-04,-2.905350053100442132e-03,3.990804968298431430e-05,1.094933208753898733e-02,2.112096587254692730e-03,5.689648788612403953e-03,-8.758412575855642826e-03,5.831107191383002632e-03,-5.400279533965574030e-03,8.029297571991400552e-04,6.419127699734321433e-04,2.790941420389862586e-03,-1.130722968030839666e-02,-7.031715175007132064e-03,7.944745801226471907e-03,-5.504848432348549009e-03,-5.899269210072649768e-03,-3.119602108528911084e-03,2.608586373924672453e-04,1.663250004469876957e-03,-4.756346920952789561e-03,3.226231311356277109e-03,1.937400600820673524e-03,-9.109478794886700578e-03,2.095024482679886529e-03,1.290850887663560998e-03,4.061343435066668700e-03,-7.160669066608903871e-03,1.135927555167650684e-02,4.021701943378362329e-03,-7.112449857239737383e-03,-3.090432847328301435e-04,2.433917695362392518e-03,-1.557505340452804490e-02,-3.608117899000114464e-03,9.053211560654961937e-03,6.900534247170598059e-03,-5.471906219969227200e-03,3.553622497339794985e-03,-3.303094982107697593e-03,-4.406269616657777777e-03,4.147052776364142275e-03,-2.737072777662372548e-03,1.559444181438095775e-03,4.452958790992112871e-03,-1.523297748234728579e-03,8.211205110501931456e-03,1.115273438106299678e-03,-1.225870816853203132e-03,3.025506879848151223e-03,-4.404519703821883556e-03,-3.339932538961392760e-03,2.031386157585616907e-03,-5.224231993890804913e-03,4.261530191244847568e-03,-1.800076244821797447e-03,8.599453633136114675e-03,1.512804435109870848e-03,1.877904825608038325e-04,1.246058087585450369e-03,-4.285236042246450688e-03,3.753058693325888109e-03,-9.113047252787472366e-03,-3.403338373551473717e-03,-2.153859860199980845e-03,-1.373892994336845842e-02,3.620353270896086578e-03,-5.165136587014702331e-03,-3.019515594038617301e-03,2.433725660109108569e-04,-5.404636028068204867e-03,-3.920273091246353515e-03,-4.270326214521769775e-03,-8.760092380906402479e-03,-1.825713744801328109e-03,4.743570273928755451e-03,-1.477551520792445318e-03,5.137620866854635676e-03,2.668340932875492514e-03,-1.749784011287130406e-03,9.736429204079525852e-04,-2.560656069575575032e-03,-1.528593572185824295e-03,-4.760262002324115206e-03,-4.106697075625684833e-03,-7.972427859289768493e-03,-4.498844801324662981e-03,-1.462084226391721772e-03,-1.954339427457131573e-03,6.376069031050342036e-05,-8.155470494583443378e-04,-9.628566695187719751e-03,4.776399000524575791e-03,3.434962317273109866e-03,7.552620999288711147e-03,-4.607736442461277983e-03,1.820206380422236241e-03,5.153646759399436138e-03,-4.507073573991529115e-03,1.112686018354766375e-02,7.417860016370959354e-03,-1.304089344464087492e-03,5.198013029752251415e-06,-4.037648345328447713e-03,-7.150905839024068039e-03,-2.311414131565516698e-04,-4.233904094695271729e-03,-3.376761978911731448e-04,-1.839521122825983970e-03,-3.661230702806588660e-03,5.259931979324257564e-04,-4.943883929436653120e-03,7.786562923744093714e-03,4.951575838021626645e-03,-2.204906837703461252e-03,2.211766418603328262e-04,-4.809321431661379466e-03,-5.240809877822317792e-04,-1.143625089698276261e-03,-6.156179005746487609e-04,1.351242762476533412e-03,-2.644218742614053846e-03,-3.357098988416152945e-03,-1.738915709747536082e-03,2.191854156325469575e-04,-5.799004764420285715e-04,4.593987023126507117e-04,-5.698351034828682410e-03 2.216144854638955712e-03,-4.481131557207892200e-04,-1.737712826495824777e-04,6.064446399856298492e-03,4.459075746603556600e-03,2.663286303967780586e-03,-5.943684695823393338e-04,-1.672130281550396466e-03,-5.870576672407148804e-03,5.026310687650708924e-03,9.345762096675970911e-04,-3.179928834575708723e-03,-8.597273658677294025e-03,-4.563218518463159519e-03,1.837346524935510976e-03,2.634444632864763741e-03,9.456103568057928595e-04,6.522446275207207820e-04,8.246646273846715974e-03,-1.521333670189673002e-03,2.947581689551433647e-03,-1.963746510871231758e-03,4.859359666015671811e-04,6.485629334659688983e-04,3.213269915549093504e-03,1.209804230390318890e-03,-6.208535952440534696e-03,8.146365547760799422e-03,1.016877645443144997e-02,3.730602722166233027e-03,-2.834321660153165095e-03,1.921272850067316795e-03,-7.848322464974700693e-04,1.361585840426588574e-03,6.417884494962087160e-05,1.264244294351980248e-02,2.726277964261626316e-03,5.925756272884085026e-03,-1.220400712387654032e-03,5.274970977773608960e-03,-2.582649924123707800e-04,6.840110919805035029e-03,3.716322009530224901e-03,-6.742930498013149918e-06,1.183123107046699611e-04,-3.744690215031752992e-03,1.749054794622512642e-03,1.291608183115257845e-03,-5.887726723835712099e-03,-5.555978353447608466e-03,3.588590831817091778e-03,4.475623327124388963e-03,-2.284201322353721000e-03,-8.673481805084789012e-03,1.346788811084969175e-03,4.886777868412432377e-03,-5.671445684681885408e-04,-2.941828831610188329e-03,-1.008505798178458736e-02,5.344857245519583563e-03,3.851234095458126901e-03,8.982302797289011889e-03,-8.003981896673654020e-03,1.699498711675752425e-03,-1.597921540386993159e-03,-1.604420440838265913e-03,-5.690550701226751960e-03,-2.374125996291228270e-03,1.146633156291490367e-03,-4.109802113930017403e-03,-9.171594600489621496e-03,8.722609431479818112e-03,-8.906784960077959029e-03,2.615400778753495647e-03,-6.280172866513358126e-03,5.826774416742221303e-03,3.997850522833933107e-03,2.260382004728073851e-03,-3.142182491941987454e-03,3.786612456803196362e-03,-1.203557966202704714e-02,-4.515292882976500687e-03,3.289857749293748773e-03,6.369603432978261474e-03,-1.066111436374602567e-02,1.384084845527778053e-03,1.302394931494295881e-03,-4.708359088377402145e-05,2.036193364238319571e-03,3.093883693727673793e-03,3.253955635012424370e-03,-2.777507518935616506e-03,-8.357310327140954689e-03,2.471298477751659442e-04,2.312934629950729598e-03,-5.494224509585139520e-03,-3.389871153084795738e-03,1.822671344837539266e-03,-5.118241991138484590e-03,-1.750645807669762993e-03,5.215608147227926948e-03,-2.555166785838687733e-03,-5.738575698817713865e-03,-8.609352629414715463e-03,-1.188488897540004906e-03,3.636535817924178810e-05,6.695726417671151939e-03,-5.546946256246631200e-03,-1.007780028103972929e-02,-1.778194454081413181e-03,1.648841347430613207e-03,6.642466681307266328e-03,1.281978865774585728e-03,1.761314313045974711e-03,7.288438852223353900e-05,1.471682866488523709e-04,8.660550472591007493e-03,-4.108986470887547073e-03,-3.878270759569308994e-03,4.124458697591730158e-03,-9.025079346055897567e-03,3.609410409988825837e-03,-5.365353419615376089e-03,-4.240299038986007456e-03,1.686130907904277677e-03,-4.626205070450648341e-03,3.968300527786890707e-03,7.332582861526080976e-04,-6.114810201164188209e-04,-1.683922939245113565e-03,-6.665433345451894642e-03,3.687173893083766849e-03,-1.857419394538537601e-03,-2.746646525384901973e-03,8.243004483966536074e-04,-1.807469285880007083e-03,-3.009569935485668663e-03,-4.463996042028837465e-03,1.542598300508733582e-02,7.587756737647575805e-04,-5.666763718250259624e-03,3.664739854429651788e-03,5.363740563770007762e-03,1.188718850824353420e-04,-9.804737692685220815e-03,-2.461649659562179827e-03,-8.660332423003738777e-04,-2.572122442041384002e-03,1.907837549944046800e-03,-5.541943120593881798e-03,-7.647002282761913676e-03,1.567391966808178230e-03,-4.856456199531017023e-03,6.376690821212970635e-04,-3.596504477913188775e-04,-1.771656912776256631e-03,8.359557030005125219e-03,-4.221684634334763389e-03,-2.240270990216443607e-04,-9.696087638390628056e-04,-7.517191071022476419e-03,-3.266743488864316119e-03,6.717677850562293364e-03,-2.057412963359024280e-03,5.607122725674780657e-03,3.611269703115847080e-03,-5.372820505947669550e-03,-5.155475791877468621e-03,8.706869401215806131e-03,-5.071850492302846201e-03,5.759502864752795090e-03,-2.070459254909363771e-04,-8.412737131723386965e-04,6.740427002481634046e-03,-7.608445563116596026e-03,-9.401217463501735164e-03,2.954684646604466974e-03,-3.018780252230326890e-03,3.069972034656071654e-03,-2.657969287610500563e-03,2.571160775951555032e-03,-1.670926647175792435e-03,6.658456254353831497e-03,2.870258416155265727e-03,-3.998199921218976993e-03,-1.133928466826303501e-02,4.739828675574726986e-03,-3.499970332663800403e-03,-8.861064688887129726e-03,-7.962623280094068001e-04,2.942638552468796430e-03,1.500660315251948042e-03,-4.709753325206199326e-03,-1.700196714232918857e-03,4.496129620972438910e-04,-3.349744390677323485e-04,9.763547557186126255e-03,1.152645619277762289e-02,-7.979028970736679396e-03,5.640285264187633665e-03,-1.035218273986723662e-02,2.351266709796255051e-03,1.253319169587074032e-02,-6.879184469768786364e-03,-1.783816396539106210e-04,1.255025563004050931e-04,-2.449709026846243979e-03,-3.080868763546674733e-03,-6.519151413987255006e-04,-1.023643658631971307e-02,1.046525629701010145e-02,1.046387152429529904e-03,-9.278171206088470929e-03,-1.226645829606522534e-02,-1.655454205553911345e-03,-6.661653146398775066e-03,-2.244830666012641047e-03,-1.023725211414830069e-02,-8.083686803453966202e-03,-4.009335975722443102e-04,-5.036073513396914676e-03,1.454643320144885900e-03,4.609262098443487774e-04,-6.454185531662885028e-03,1.770379313378262452e-03,-1.576168642871609432e-03,-4.707780013140690743e-03,3.428512321111814535e-03,4.553786466720377395e-03,8.152244936029175723e-04,8.395907248514229872e-06,-3.466781766863540819e-04,-1.923789422956635178e-03,-5.873799266570674249e-03,-6.279633257934478206e-03,-7.524382695048132362e-03,7.537130056766567533e-03,-8.175737926460703488e-04,-1.001142563729455915e-03,1.380038215625005796e-04,3.301637942910047845e-04,1.884499824912817508e-03,-4.113676599520495886e-03,1.099301195917044838e-02,-5.625461872449829398e-03,-7.068024529165969731e-03,1.674938489254590782e-03,4.776743362699852752e-03,7.338722430576546420e-03,3.136689385020648407e-03,2.578204218170180011e-03,-2.486782117060731649e-03,-4.351012316318477943e-03,-4.030201295905759822e-04,8.933949068295109208e-03,3.031689102051914400e-03,3.690300136319675799e-04,3.983109436949530680e-03,5.471613942962742783e-03,4.983573987223864979e-03,-2.728702648131642924e-03,3.791687614055131578e-03,2.436745860612509352e-03,6.022447131075303606e-03,1.600619741618338615e-02,5.333775391859644360e-03,3.448509766582790077e-03,2.753402932262750350e-03,-2.423748100386356220e-03,5.400429695576092513e-05,5.239041465055987263e-04,-9.911938690362755155e-03,6.296560745353592522e-04,-7.013877255787889092e-03,-3.204331335547381480e-03,-1.878199988319984330e-03,4.585744556928375917e-03,1.355595783401368970e-03,2.782471397742777418e-03,9.507703944062027498e-03,4.715359732931738435e-03,4.969171841798120451e-03,-9.872515184146555753e-03,1.764376317197424802e-03,2.595506984971101237e-03,-6.184553277365244239e-03,-7.444830928889512554e-03,1.049949504729848302e-03,5.491175868878522999e-03,-9.668639860480504264e-03,-1.461365292906626531e-03,-9.610934201944280539e-03,-5.020204957251359025e-04,8.910724311310898359e-04,-1.717272776471010258e-03,3.256956725499980925e-03,-5.983477169147994289e-03,-1.127383375386435373e-04,-5.676603587719825615e-03,3.379538370492391180e-03,1.144486010971880810e-02,5.002349244830109863e-03,3.177197553040588245e-03,-9.737804741669689717e-04,2.055248355382018109e-04,2.755542906898083688e-03,2.425916666968226925e-03,-1.820649767248196972e-03,-9.042095754466160330e-04,1.347544217856764745e-03,-5.794271126514241935e-03,-5.987661758553640499e-04,-6.692540870851050420e-03,-5.791482972063807626e-03,1.176739481734314762e-03,-3.148484478680012300e-03,-1.800608246336535788e-03,-2.979968808988645110e-03,-9.497360756075705346e-03,5.547764110741777895e-03,1.519115096107239707e-04,-4.858184531902266790e-03,-6.038657326212448773e-03,7.305941763150436766e-03,4.042956118054000063e-03,4.760978020425426886e-03,-3.163264113752991234e-03,1.549746660077754024e-03,-1.775883883299508859e-03,-7.383720621134624504e-03,-3.393694967699809658e-03,1.021482048806167560e-02,1.207833789868422141e-03,-7.281695674314163889e-03,5.191215353956966927e-03,7.253733192269449549e-03,7.970115975545141035e-03,8.797170368618788261e-03,1.358817909773313119e-03,-4.517440753219710732e-03,-3.773638020386511655e-03,-3.847816075169970939e-03,-1.481314274792109708e-03,-5.487915388483310952e-03,1.148177714993293746e-03,-3.154444759881274828e-03,1.014590158474525930e-03,4.323182189291734263e-03,1.388826415940634489e-03,-8.345936437484528835e-04,1.472167303292928452e-03,1.966726014944833207e-04,-3.269790798148340671e-03,5.700953846739921158e-03,-4.032343640699618224e-04,-6.638754564627247050e-03,-1.300442421397739797e-03,2.929377858315395690e-03,4.710244724097901146e-03,-3.060608605563660666e-03,-1.379666549691986903e-03,-1.178107355588190255e-03,4.464215926270761158e-03,2.303474816440737299e-03,4.106237799166970008e-03,-1.608016374360861020e-02,-9.523270580111979633e-03,-2.062454508985967112e-03,-3.796285749739869084e-03,8.800632193291356084e-03,-3.470353301317692825e-03,8.546557490928606560e-03,4.334368807600652920e-03,-7.195066256031626417e-03,-6.366883428591969471e-03,2.829500141336726161e-03,-1.352753663993330746e-03,-3.586726705512916467e-03,3.662593107774351955e-03,2.253875284186132762e-03,3.089188666200884045e-03,1.779665626183117230e-03,3.410232768196903245e-03,-2.794565129799606116e-03,1.204958102303082244e-04,3.045078993419731133e-03,3.691331584191615174e-03,8.575282465917393035e-04,5.535129103525965322e-03,-6.964783538611715339e-04,-1.020217694892991238e-02,9.064276202128507091e-04,-3.916814441486499501e-03,-4.159192683116924930e-03,-1.656953589411706417e-04,-1.352733291669827023e-03,1.466077338333945934e-02,3.094543307648660400e-04,1.894875568850589755e-03,2.276886541286822323e-03 8.276694819971912656e-03,-3.230189867823457799e-04,-7.245778888102681267e-03,-7.050223705802033565e-03,8.538181080050012348e-03,-2.251265957880552344e-03,-4.316314746431257864e-03,9.244795870614735131e-04,1.666190790679594746e-02,-5.011487607613077707e-04,-6.556837107438366619e-03,2.744452896170865904e-03,-5.742812284064940757e-04,5.120538643558186775e-03,-5.319621343816658375e-03,5.246943606783786851e-03,-6.274503644154189909e-03,6.166511980546099999e-04,7.873958043079770782e-04,-1.019148943423653884e-02,-4.333814761454935741e-04,1.351424965351667685e-03,-1.454027485863274407e-03,4.562339003948045341e-03,4.239109281497444731e-03,9.560591765229008859e-03,-2.309456557736105497e-04,-1.664226120078508538e-03,-2.052956988917705415e-03,-1.429772749533539779e-03,-3.974012332927245525e-03,3.319396235673039309e-03,4.971055602752287583e-03,7.228782385212690308e-03,6.002081023550983468e-03,1.122417831904013938e-02,-1.275563428769365239e-03,-9.210824451308188571e-04,-4.854952760779928690e-03,1.078114130933422224e-04,4.921901265998607916e-03,1.869517431476528779e-03,-2.807854898905291600e-03,-2.291672404947975566e-03,2.259303179387719355e-03,-4.331055786237932720e-03,2.030493851695200878e-03,1.229369030084343992e-03,5.088323141570854496e-03,1.496365661882178774e-03,9.661268979319749626e-04,6.129423761165347928e-03,-5.369331329747877198e-03,3.220533056406640893e-03,2.127063094474337425e-03,-8.422493859518799314e-04,1.940822422970580902e-03,6.628301964228297538e-03,-2.222277557356925736e-03,-8.806160030769140216e-04,-9.912109039023446477e-03,3.481997924686494172e-03,1.908378786031932105e-03,5.841049409895030095e-04,7.147157037654090252e-03,-2.371129918013004621e-03,-2.886592512930155705e-03,2.862968334161070799e-03,2.162582413343246048e-03,2.969981133948263515e-03,-2.228668614166615190e-03,-1.993502621086108619e-03,-1.323944169085938469e-03,-1.657471802414127535e-03,7.193778043692730488e-03,-3.086370749437037340e-03,9.027630007182616077e-04,-2.102701427299733146e-03,6.046710304583202689e-03,-3.975109025518407807e-03,6.339388539256615544e-03,-3.131391016064977227e-04,5.198965596640179807e-03,4.176066865719572398e-03,5.211306150463051789e-06,1.898666606075275713e-03,-1.113867255269898691e-03,5.902504799072854963e-05,-4.209145870308640333e-03,-6.024953468256476555e-03,2.561814560469939488e-03,2.472201149721146548e-04,2.111597579979244965e-03,3.193373954986904444e-03,1.249604975269721181e-03,7.536081614909027372e-03,-1.236544008361148373e-03,-4.448416604586051537e-03,3.975093967537639272e-03,3.614648493051105405e-03,-6.466920847249427000e-03,3.118948952280491300e-03,-2.736331677277917176e-03,3.953787097403964508e-03,8.464187539022357751e-03,9.572771247130436659e-03,-4.145644670666749111e-03,-4.817765466567956907e-03,7.470851195829159019e-03,2.753226258374552766e-03,3.505590870708213264e-03,-3.078083181430906661e-04,-7.778537492310201584e-03,-1.025809872447810620e-02,7.212123908912624389e-04,-5.137512206088933574e-04,-3.204061840198576421e-04,-4.237602480594054172e-03,-1.402950095519654283e-03,5.683008242775392481e-03,-3.247124587868771128e-03,-3.324548957271823962e-03,-6.712433795963074386e-03,1.288915464106654436e-03,-3.338650601987917278e-04,-8.677573991374892609e-04,1.024880980306437303e-03,3.405668096661571258e-03,7.190217103266857354e-03,-2.380670748824689417e-03,-2.663952023998822274e-04,-7.503379786652572200e-03,7.094912558748972531e-03,-9.313046853834964708e-04,7.984522820033930273e-04,4.280019029111310942e-03,-2.644170129575158703e-03,-8.681644060062888553e-04,9.081275407819996287e-03,-6.094151643848586333e-03,-4.985328005076701020e-03,-1.049294263106268985e-02,-4.545032431973755442e-03,7.553176175942574733e-03,-3.092233962204789591e-03,-4.540064798265945438e-03,4.589188628396676829e-03,2.356110553357993777e-03,-7.706592630359876232e-03,-4.612826733184956099e-04,-9.501143073288361579e-03,-8.207265532218370116e-03,4.382629209946604237e-03,6.130588585719016874e-03,-3.441612629499700773e-03,-4.619087078823302505e-04,4.649539614867061188e-03,-8.140203462970143494e-03,6.039454151787231745e-03,5.174889194327691316e-03,-1.671556651700904165e-04,-9.783184581598498852e-03,5.716873321387090905e-04,2.765330549777502908e-03,-2.010916925194040916e-03,-2.534655744053633183e-03,-8.943611355915937963e-03,-2.938078850719975585e-03,-2.299335516347163222e-04,8.689298997667996424e-04,1.305044109301420991e-03,8.579822296657805061e-03,-4.433001674170796227e-03,8.752897672081112052e-03,4.254987813542123430e-03,2.053709851512109871e-03,-5.623206758204714946e-05,-1.951738720745642376e-03,-2.020420852208035679e-03,8.267667137570986098e-04,-2.827505484132779995e-03,-2.052239329745743270e-03,-2.101718246434491118e-04,7.875924341400317163e-03,-1.180194350006557279e-02,3.290901199849261428e-03,-2.440999954756205910e-04,2.828341453394631034e-04,4.162308512475696408e-03,2.641102752531057207e-03,-3.450969319971345013e-04,-6.605134750863625968e-03,-3.806134055236169451e-03,9.659717758842037774e-03,1.536398661296610804e-03,1.256751822129466897e-03,-5.272478425119584214e-04,-1.197497204985510747e-04,1.282108638129614590e-03,-1.183395147532167535e-03,1.169601869330167028e-03,-3.164527606133803184e-03,-2.406871023194482023e-03,-1.211706285818738580e-03,-3.368330842167618791e-05,-5.681450612179441036e-03,5.198260832874105307e-03,3.887450005125553387e-03,5.469190155028125395e-03,2.090149985248434834e-03,3.325149789362744340e-03,-2.251417680391649620e-03,5.381518058367926877e-03,-1.475210234220678245e-03,-2.588064069794547908e-03,-8.435202098799244111e-03,-2.353891822913196236e-03,2.514564472034004497e-04,2.545946180444467414e-03,3.670057077317683187e-04,2.504957050996384454e-04,-9.648991544703182724e-03,4.638383659243115222e-03,-5.929933859563159444e-03,3.560163177004244681e-04,7.209251152193550801e-04,-6.622779122240731071e-03,4.067233252978459538e-04,-6.590303681568589374e-03,-3.234126417277430419e-03,-2.233824002634877699e-03,9.498940696829484944e-04,3.264827079068196095e-03,-1.314086949761310297e-03,-4.320731175301221853e-03,-1.177540748594061663e-03,-7.801785328059573747e-03,-7.327605987544733544e-04,-6.761155073638415273e-03,3.815598951131364980e-03,-5.959164655897306387e-03,9.968476833247007837e-04,-4.882400715487317244e-03,4.847080858153581320e-03,-1.687075345868769075e-03,-4.260891008572526527e-03,3.436712204621026039e-03,-1.036083061553712617e-02,-4.181341498729248748e-03,-1.170462140745186230e-02,-1.052737686290415543e-03,1.211231194879033380e-03,6.340858283992802796e-03,4.180682040030064134e-03,-4.098102872114517598e-04,1.302563777251997427e-03,-2.889821237803473410e-03,-3.197801386363240562e-03,-4.139382135636793941e-03,-8.507516165742981529e-03,-2.993965296327972484e-03,1.813570300191903696e-03,-4.195639532622610218e-03,2.900197401185811326e-03,3.475459826990124571e-03,1.059005767418805775e-02,5.507024882384211825e-03,3.973920948830599736e-03,-6.678170156769338596e-03,1.181404679188575307e-02,1.867823010265950209e-03,-1.076831076054542147e-02,1.445684936623126637e-04,-6.060330411835253023e-04,4.146940816971138311e-03,-7.353677413679937831e-03,6.169156727272163791e-03,-3.328227451178085786e-03,9.277396698487662061e-04,3.513322638445846250e-03,-2.736555197393566660e-03,2.363183881999015960e-03,1.185489676131729888e-03,5.309868279311861540e-03,-3.904429744419496157e-03,-3.298704540833511615e-03,3.046618821529880217e-03,-5.406909922746664342e-04,1.315125872823054388e-04,-3.711613279863660349e-03,-1.037887856723651581e-02,-1.902576506846308637e-03,-8.974853585116531482e-04,-7.784053888183198842e-03,-3.965029440726738214e-03,-7.694310951215752210e-04,3.803972839881823893e-03,-2.007629785199849942e-03,-3.223740527156819177e-03,1.444060139455950742e-03,-6.131036659842205479e-03,-4.881990354758130105e-03,-2.176661531005421799e-03,6.843028083240357664e-04,8.205755579037563030e-03,-4.616781922068330900e-03,9.293387420780572020e-03,-4.038531589749845031e-03,-1.110094457808503618e-03,7.632102287895604908e-03,4.163374715999236531e-03,-6.273445499944606436e-03,-9.248181237745787750e-03,-7.737077804589561833e-03,-5.707516912892407653e-03,1.159967026867835026e-03,1.557253434521796561e-03,-1.185453434077356032e-03,-4.174862956515032648e-03,-5.567999184395554940e-03,5.131103745823781780e-03,7.578197840959123577e-03,-2.331758544874496452e-04,-4.595561518777364560e-03,7.266784512380563416e-03,8.158755721342476159e-04,1.785503905039395990e-03,-5.201443355846851156e-03,1.067524377933999668e-03,3.884461942923603858e-04,-3.464292591684129535e-03,-4.238962203972915579e-03,5.193817609310166436e-03,2.552886165097483732e-03,1.564026352617892178e-03,2.787166947223175106e-03,5.432627857413055254e-03,4.771946171324251444e-04,4.761225382804838181e-03,8.990478432815060799e-04,5.222079619383289580e-03,-1.435712500889596956e-03,-5.828873089172049993e-03,-1.043080941333064844e-02,-7.438965969501960991e-03,-1.066939790130909047e-02,-4.151398999319566162e-04,-1.602317412724960349e-03,1.014121683000823032e-02,1.989453581921045715e-04,-1.753098305574380495e-03,-6.290264612808646981e-03,8.506920757671011498e-04,-5.620552764365728711e-03,-2.692422075346950988e-04,-3.471811374724755560e-03,2.836074959407271570e-03,-1.041610464051041622e-03,-1.745207441867779050e-03,-5.592980715021310389e-03,8.838148759932268686e-03,-2.986487999347211663e-03,-6.598204115493922026e-04,-4.471643362212167350e-03,3.568078753659935810e-03,-5.705600847711305573e-03,2.104831272145530978e-04,8.191355102826185924e-04,-1.360078515073677094e-03,-2.353484152597866939e-03,4.058523259493020032e-04,-1.589705660125756426e-03,5.569787215793689863e-03,-5.179478896084153440e-03,-1.727057967628032329e-03,-1.800603220851479478e-03,-4.848838234130153045e-03,-4.263552959193393430e-03,-3.062439266977570393e-03,-1.037101366618912134e-02,-3.481717882072362692e-03,3.185685778447757809e-03,-2.666259342396636901e-03,-8.006323632658598176e-05,-8.663393694279595755e-03,-1.020477156230239541e-04,3.894324150242784149e-03,3.269544621185694992e-03,3.352491420641865068e-03,-2.208884838736251827e-03,-1.138016841360614366e-03,3.572135051436418104e-04,5.318399540900404891e-03,1.074200431471878823e-03,1.056730387471111485e-03,-3.350470319782622615e-03,-1.284629853684717167e-02,-2.958480304673622580e-03,2.276081469947711499e-03,-8.860099998941274504e-04 6.167402176793242803e-04,4.785816015879963280e-04,-6.088689442188617747e-03,4.532536114867461233e-03,1.952887272088439392e-03,-4.843431779386401376e-03,-5.460285278786581112e-03,-3.064163547101687694e-03,-9.072732336787473579e-04,-3.909830443588663620e-03,9.240411150803146470e-04,3.077662219947738514e-03,-8.772341100571054429e-03,2.298223644075522362e-03,-2.725165509966504668e-03,4.013013378028533126e-03,-5.796665333429208179e-03,-1.687864050919670031e-03,3.583104236176775335e-03,-1.051586026692201241e-02,4.465587208138746718e-03,-6.819269851366466240e-03,-9.041683167443576377e-03,1.884439015528023543e-04,4.890695173132545218e-03,-5.826428845266917907e-03,-4.213897777033966119e-03,6.781459952770080594e-04,-7.018087749998307492e-03,-6.377118058047716136e-03,6.517680530470240305e-03,-9.516071412591533933e-04,-1.049598088079394323e-02,-1.787286323970117569e-03,-1.606097421891929502e-03,-6.694156152788240111e-03,-6.200928983518209231e-03,-2.188410695045150782e-03,2.225712619506089139e-03,-3.915767630274048783e-03,3.128640072947770460e-03,-1.316321420591466890e-03,3.759734372575276873e-03,7.435144480195227633e-04,6.896212995296207085e-03,-1.190123939687150753e-03,2.760924669340841103e-03,9.384159204057484008e-03,-6.594927755783796218e-03,2.639528637236397256e-03,-7.948785427172000775e-03,-4.551517574957831603e-04,1.800056142415223893e-03,1.077723125748149356e-03,9.702163100855299566e-03,4.422812114468897826e-03,9.261669521736211677e-03,-1.713066609847606944e-03,1.841959115943316580e-03,-1.292825963272187413e-02,-2.542660271492536758e-03,2.479350660475266883e-03,4.543579754315628903e-03,1.075117404192317029e-02,2.101380409484069831e-03,4.573375334659681118e-04,-1.908890195702739280e-03,-1.605101435744379218e-03,2.286064784821289532e-03,-8.098663714413277537e-03,-1.259122734068375896e-02,-1.205893739990157457e-03,1.712894030005128986e-03,2.532209053242437911e-03,4.860390472244019086e-03,-3.420157529925376642e-03,1.828175042720728239e-04,-7.956817259491796900e-04,-2.444429575123150300e-04,-4.424363025810007510e-03,5.390328247948369088e-03,8.227080247696704624e-03,4.049246806578744168e-03,-8.993458106490775580e-03,2.023487656519888829e-03,6.362806820747516322e-04,-5.655300573312616605e-04,9.268526295539352561e-03,1.906591013387016554e-03,-6.467164160536215470e-03,-1.234079704013576779e-04,-4.915193042400921308e-03,-8.011107897689134782e-03,7.166975121517707263e-04,-7.728641598418755157e-03,6.875347970513551003e-03,1.040937811669105997e-02,-1.515791621148061645e-04,6.821933031909956091e-03,-5.427889194931452034e-03,-2.039854824158781818e-03,3.628274650780812828e-03,2.407496299683251594e-03,-1.999367568956593802e-03,-4.673722988602145405e-03,1.326722527134080757e-03,-9.863681319060223565e-03,3.675159480720209082e-04,1.460064817620447368e-02,-4.571618297725839157e-03,-5.782616114284623403e-03,-1.496602153577591271e-03,1.728509023153327164e-03,-3.367590230497949019e-03,-1.873105501450872705e-03,-5.598568007506779867e-03,2.577096948299769513e-03,1.473205602116234910e-03,4.500449547281027904e-03,3.499628299888443358e-03,3.959813356964927956e-04,-2.278265013939342973e-03,2.902907166077354269e-03,7.034369681028689557e-03,5.467026137785477736e-04,-3.616118524102167678e-03,1.433296948168745260e-03,1.171693349838690262e-03,1.068755404552378802e-02,-8.231513725179637891e-03,4.009517284229913679e-03,3.767571029581911080e-03,-7.585299426036005294e-03,1.409252163097089380e-03,8.736748418178046571e-04,-7.700839908610576656e-03,2.944421514691944464e-03,-3.781207674437162772e-03,-9.747346428023245801e-03,4.117829815175690772e-03,2.333950968869044889e-03,3.911558388113693836e-03,2.533904401346179832e-03,-7.831988134559072015e-03,1.157222945181262536e-03,-1.693731455307488729e-03,4.844924166817152775e-04,6.425196188228747898e-03,-7.779151943981356453e-05,1.906119820535049150e-03,-9.414757134984153356e-03,2.331221131589119450e-03,-1.830689330256341082e-03,-5.454470718082206002e-03,-7.333329981296550504e-03,-9.289721619221529003e-03,-5.806128463363738031e-04,-4.436727813994150414e-03,5.242343912097855579e-03,1.809401713714151125e-03,-9.078100395557137797e-03,5.116858179365638831e-03,-1.434461380455591666e-03,2.415623038802810780e-03,-4.185100052721811049e-04,-7.966624110573800391e-04,1.540705110585398503e-02,9.867856421563584758e-04,9.026247027997683428e-03,-1.349209172351273182e-02,1.384598699022829543e-03,-8.784545066254848336e-03,1.002488021737228154e-02,-2.148762827408063746e-03,-2.901720965574808471e-03,-2.088890501409022306e-03,-2.263182375399824808e-03,-1.582855805389690035e-02,3.537320668320008323e-03,1.004561901314356305e-02,-5.098873016415292163e-03,-7.549541370206319484e-03,5.731553825717965048e-04,-6.231303967407837534e-03,-7.130756335612600458e-03,4.897887019261965361e-03,-3.569344444787468261e-04,-5.831199442309071114e-04,1.063611322366378063e-03,8.015016911863445670e-03,-2.416325285391005082e-03,1.532161142307309799e-02,-3.519601992642306700e-03,2.200326024385362497e-03,3.128243443722581525e-04,6.126573526241118998e-03,-2.251683282341296593e-03,-2.490902948340040311e-03,-4.816886635057020249e-03,1.727114993324281632e-03,4.259413417976572712e-03,1.572143247738323150e-03,-7.418751259617284606e-03,1.000238149202515605e-04,4.259073762669089713e-03,-4.779014235313760496e-03,-6.430315198056229219e-03,3.172996146595893387e-03,-2.368845678411033578e-03,1.678251574778143879e-03,-4.610536723280333343e-03,-1.619934410885769159e-03,-2.585024999457617365e-03,-4.979974124996702084e-03,9.713382294362537389e-03,9.815392247305000303e-03,-4.230563312166119895e-03,1.574288321145838297e-03,1.038129998735676414e-02,-1.147778070227495166e-02,-2.786712510920330566e-03,4.794090944352804272e-03,9.287523969632642193e-03,2.169631028333146149e-03,7.149743062012643355e-03,8.302951898129721023e-03,-3.763907325606154382e-03,-9.450801606765605418e-04,1.948771952607981613e-03,-1.365500151385076473e-04,5.822282908283688517e-03,-4.521255465789780943e-04,-5.587106475603849376e-03,-3.926670726380995444e-03,-6.410299093939400908e-03,-2.797848212033412010e-03,2.833463939025679303e-03,3.675868101029244536e-03,3.497839367914870861e-03,-3.128775049881637863e-04,6.581616519699072174e-03,-1.231917114389082836e-02,6.758285528411091085e-03,2.582555907744878664e-03,2.896125506012294393e-03,-9.910564540185974255e-03,6.018201674952742067e-03,-1.380698065049017686e-03,3.027085846475944132e-03,2.983957428226282314e-03,4.901800352753492643e-03,5.559128292451332234e-03,-4.277283391366109438e-03,-4.034068875975202126e-03,-2.704360285875853642e-03,-9.091934327794008383e-03,-3.088043316120469162e-03,1.844057403903326658e-03,-1.283571553685684818e-02,1.799993327137060445e-03,2.287669905177843691e-03,-3.810425074654451554e-04,-5.817481209129160384e-03,-2.338114323121288050e-03,3.771325761585375110e-06,7.367672657224130610e-03,1.484223913154615951e-03,1.061058127806221692e-03,-3.286505549669752752e-03,1.732823236767272760e-03,-5.338603080330461825e-03,1.305319828082412851e-04,-4.523729849200714727e-03,-2.197709846935885520e-03,5.306229882603588285e-03,-3.219038131476370610e-03,3.132859562128819178e-03,-4.261089530531492024e-03,9.545914591385401982e-04,-3.135720379501596967e-04,-6.077467506386956264e-03,9.993747428570752545e-03,5.677062828603651919e-03,-1.754081250964228999e-03,6.312272955963565968e-03,-4.558712022702179033e-03,-1.077954370275303712e-02,5.438754835430719033e-03,-1.693191716754228972e-04,3.378494369099006248e-03,4.194595864505659515e-03,-7.282342537887556569e-04,3.149486904894125295e-03,3.271785655738120198e-03,1.600448942895069723e-03,-2.747362710545577826e-03,-1.322815874622412791e-02,-5.095157661519078969e-05,-1.015847959379806258e-02,-9.479277060365218829e-04,3.476945926084197558e-03,2.181720452540470648e-03,-2.019292980748143944e-03,2.693630865289976199e-03,-1.866016520482800378e-03,5.489523737439734987e-04,8.422549803621139346e-04,2.049379325930934630e-03,3.745470381120373851e-03,-2.546670536579220302e-03,1.327591126238288196e-03,9.237362613538647668e-03,-5.053636767112954994e-03,-4.143495679733596310e-03,-4.596191133622228205e-03,-2.962709022297133760e-03,4.343349682261459822e-03,1.176178869442542213e-02,-2.896742240171643317e-03,1.523488989184806691e-03,1.136627707179999102e-03,-4.445772678692982635e-03,-8.411278645045174723e-03,4.590871448274009767e-03,-5.235800085138032564e-03,-4.343937246219010430e-03,3.589343851647173404e-03,-3.087036125041052714e-05,-5.757978809291229051e-03,4.146803793929182066e-03,9.072903300835218716e-04,-1.339503683233653165e-03,8.404750639736717488e-03,3.503339781163452779e-03,4.711332852395691724e-03,-1.236554138546555083e-02,4.445836943334805121e-03,-1.565297077208587830e-03,1.257842909820801715e-03,-3.165059710345133424e-03,2.659436779495149249e-03,2.254616769784854435e-03,2.387150993205665490e-03,4.889861753159118092e-03,1.742533791116519185e-03,-4.999850839883667949e-03,5.598882480159224351e-03,-7.232537516479245476e-04,4.296282795876862280e-05,-5.027272591993444013e-03,1.390477934629849414e-03,2.274841832896646720e-03,5.358430763029742652e-03,-1.381018532845587447e-03,-9.784813160113117216e-05,-7.764599908076785274e-03,-2.570012776551469820e-03,1.046423674842848241e-02,6.919738063868495709e-04,-6.173088344038122829e-03,-8.946448275499592831e-03,-2.222810319045350445e-03,5.085507165062868283e-03,4.270519560954795166e-03,1.361424521678261971e-03,-6.271425201864497083e-03,-5.774478669820058566e-04,-6.783542792242362035e-03,5.657093291709431107e-03,3.250518303728587403e-03,1.221807872318126905e-03,-6.256119466865417943e-03,-5.012843940147598994e-03,7.373673768506100630e-03,-5.591799362818707965e-03,2.357629772343623059e-03,-4.618642802601273696e-03,1.718539266894862378e-03,5.219298860447136972e-03,-2.860204922652879751e-03,5.880706827807488657e-03,5.701087792030277816e-04,5.425322298849804062e-03,4.947266683671274617e-03,-1.301149046577333279e-03,-1.330206416180740921e-03,4.925472859860700187e-03,-1.118545308834470670e-03,-1.151837143146693674e-03,-5.621812609998244961e-03,-1.395339935215495451e-02,-1.812430517510491565e-03,4.979050950335466193e-03,2.314813065461540089e-04,2.788044621980767610e-03,-5.401887669996781327e-03,-6.688892452549269211e-04,-4.079541717266789262e-03,5.304568751617646767e-04,-5.741037779593255452e-03 9.370175588713275730e-03,4.317084662827069819e-03,-6.737783419213510053e-03,-5.366889648926374129e-03,-9.968721817292395052e-03,2.455744992128700097e-03,-6.176607095633698634e-03,-8.455289587649334523e-04,3.662542836972793666e-05,3.916785357737896048e-04,-1.457148948193675610e-03,3.801983031154146635e-03,6.536646513514188760e-03,-6.984347180408591402e-04,1.340118018638185565e-03,7.060697701085443499e-04,5.277284745805470474e-03,-1.145771421464189792e-03,-2.323538034856314442e-03,6.103245396152023709e-03,-1.701430353976439392e-03,1.292375322717860021e-02,-2.151119444630553244e-03,-6.319433750771648144e-03,1.008478799720216763e-02,-5.436971765820455306e-03,5.775502556372541204e-04,-4.677787551749094420e-03,3.081728308162281441e-03,-2.479640909285228245e-03,4.469112200521207189e-03,-2.586397294134719664e-04,-5.379613134394408032e-03,2.876396419452522009e-03,6.489396128950625058e-03,-8.916187205156491519e-03,-1.675835284135757108e-03,-7.264907673403462468e-03,9.403605054529638393e-03,-4.666161781598083397e-03,3.835390305358934038e-03,6.331466833626711067e-03,1.634835476573791345e-03,-1.584104376386479950e-03,-2.775072543391194409e-03,8.622048316420155914e-03,1.052856544197702110e-02,-5.484775411985193900e-04,-6.925818421595155740e-03,-8.209122977027162420e-04,6.909390296191715748e-03,-6.055600994361191869e-03,-4.179878836036652820e-03,-1.731189093473182701e-03,3.234018708685591510e-03,-2.156537110733097642e-03,-4.133994626746603611e-03,2.406181869101716667e-03,5.298958535664061165e-03,-3.254717003905554986e-03,-2.421486423053757428e-03,-1.562832030058939605e-03,-5.707199811932362095e-03,-4.980708164228131452e-03,5.810212301905027599e-04,3.968977940161540902e-03,-4.091858803608653013e-03,2.190692467215880294e-03,-6.182439716214855295e-03,5.852481064164479808e-03,-3.476995641288383484e-04,7.815312403414393511e-03,-1.145743583796960275e-03,5.103226236871138792e-03,2.138001270493675252e-03,-2.433605246049267909e-03,6.652461003375222982e-03,4.489758769435796271e-03,8.925910597579723126e-04,3.835293949930888759e-03,-1.633901293157233314e-02,-1.347746462927906438e-03,-2.895745077330938010e-03,4.005328719675425864e-03,-2.877323922140234821e-04,2.099050974719912730e-03,5.228399788929198773e-03,3.986543701866220035e-03,3.881010639279504609e-03,6.592877763261565521e-03,1.788621715711021431e-04,-4.175075026865756461e-03,-1.745990324376869222e-04,2.806228113092091702e-04,-6.977513773032874116e-03,8.077386021624576029e-03,-5.773453398237122537e-03,-8.537643835926412433e-03,-8.702400724751389871e-03,2.279610344249941543e-03,-4.241402024121100102e-03,-1.663420720968885116e-02,3.432528437810971299e-03,-5.658633382713504703e-03,-9.075924638143786916e-03,-4.051297557546009939e-03,5.958737284875184614e-03,1.750887907587113986e-03,4.272181269630830579e-03,-1.305906778917333426e-04,-1.955563366592677661e-03,5.320379594544695674e-03,5.499945568320562493e-03,-3.311073797698513353e-04,7.387112387381246370e-03,2.639536996614518394e-03,-2.898389887926386844e-03,-5.725088203281438343e-05,4.763693205810700347e-03,3.207287121648430550e-03,2.228392362941226598e-03,5.855430316157998229e-04,1.810631484926070265e-03,4.625823138674284123e-03,-5.820296137956895016e-03,-6.088369842420089141e-03,-1.433252645942884561e-03,-6.613147287177678982e-03,3.898246778020693620e-03,-7.186970771726648721e-03,-5.875337472848901968e-03,-9.034052977751861127e-03,-1.186851734374751166e-03,-3.761404974280011144e-03,4.394414328091314666e-03,4.225113075235212984e-03,-2.338925588932093576e-03,2.621460399891163955e-03,1.162771123845722033e-03,3.064227137360382713e-03,-6.176117563146175590e-03,-1.456340953195665389e-03,1.466898403762177889e-03,5.990376833036817859e-03,4.142856367400265041e-03,-8.354075901237621726e-03,-2.829895966719777819e-03,-8.450677633521709239e-03,9.321472781759264446e-04,3.951903051905724176e-03,-4.438397622517729381e-03,-1.060242339010187195e-03,-2.096993302857204713e-03,-3.475251719536684956e-03,-3.282247557868193336e-03,-4.151297058654043401e-03,3.553203296016317798e-03,2.745202928740738367e-03,4.413689319736353867e-03,-4.497013256952487291e-03,-6.633046754040187142e-03,1.296472088026107789e-03,-5.685080861878376302e-03,-1.627950253708208492e-03,-2.826757502399839233e-03,5.938689478044848966e-03,4.502727688577964275e-04,-1.291300045843471597e-03,5.851525249923376915e-03,1.147248587447159685e-02,-5.862399110945195647e-03,6.585211635066467182e-04,-2.219730886983505361e-03,-3.860125976485382412e-03,-2.599697744735179981e-03,-4.618684120459943740e-03,8.215879436889223300e-03,-3.124880283864477516e-03,-5.414117928727838966e-03,1.053967708506305979e-03,1.010156344547704721e-03,-4.212671200366661928e-03,4.128653635914605402e-04,-8.663842133181975588e-03,-2.039143994427085587e-03,-5.498890871238907609e-03,6.032781302380088063e-03,6.670832466718156113e-03,3.717625935456894248e-03,-1.618906785336784136e-03,-1.477866191445067470e-03,6.204470940868331286e-03,-2.300848498032190444e-03,4.719874662557116581e-03,-2.255513219956648170e-03,-1.053944536040720206e-02,1.228820511345962360e-03,1.506703710961335456e-03,-1.169999269599602751e-03,-2.047895213838329519e-03,1.298603175724334993e-03,-6.507673703948732916e-03,-5.092987689918530500e-03,6.727238742620766064e-03,3.809153004807450896e-03,-9.583994295876786590e-04,6.776882245073051828e-04,-8.615886741743809720e-04,4.595626830339806686e-03,-9.221748335388673529e-03,-8.772889311378249868e-04,4.913088273854351873e-03,3.690658124203900943e-04,5.837495836235557199e-03,-7.532866143860163406e-03,-5.274245615268171802e-03,4.999067273025968854e-04,2.064099626458933568e-03,-1.614521862516980419e-03,6.752870964119530965e-03,-2.426941632794385635e-03,1.294509925846345534e-02,1.058610833793519475e-04,-8.083176709727170773e-04,-6.485772107485045929e-03,9.096853471931941673e-03,3.856053855817125845e-03,4.225747064444905325e-03,-2.713048513411705533e-03,6.512641284303478766e-03,-3.512686822287024602e-03,6.797557082052613046e-03,-4.462477584773491090e-04,6.583591705119862204e-04,-1.775804564092100851e-04,-3.462102558790256576e-03,-1.279493374916769542e-03,1.616361166662436642e-03,2.938162432726921808e-03,4.515615191696906346e-03,4.414691312595034160e-03,-1.060655734006866113e-03,-3.284476618724826297e-03,3.046510167065718568e-03,-4.419592274658767049e-03,-5.404289998107368077e-03,-3.894569032269334843e-03,-4.899849780155621053e-03,-1.038786732602483978e-03,-2.741388692321909421e-03,7.472967347823561250e-03,-7.570683933141558741e-03,1.935042324419968636e-03,-1.975530214908493426e-03,-6.019361375210352354e-03,5.223513317973576230e-03,-8.566914723862324374e-03,-1.342361681498598688e-03,-1.903134091881960926e-03,8.116321258868101560e-03,3.771184787457597300e-03,1.370477520481322908e-03,5.577878963496658848e-03,1.122288522084909741e-03,-3.128207279349612922e-03,3.340210626188950226e-03,-4.016319640697842835e-03,-3.005142918338182985e-03,7.274163075787926439e-03,6.575233574978234068e-03,-4.755859261987141484e-03,3.977799468361002070e-04,4.342796150969442139e-03,-2.695628086942607020e-03,-5.094708257269691537e-03,8.936491621357465012e-03,-3.367669065645704368e-03,5.488462906729971863e-03,-2.991357064907779716e-03,-3.579148090777484800e-03,9.325412488102079525e-03,-7.087607010933320330e-03,6.964200098890691894e-03,-3.322655824453719584e-03,1.465113134918368887e-03,-1.503303243884831055e-03,-5.027652987838094820e-03,4.419156457652782463e-03,-5.025545945959899140e-03,-5.736656542102226523e-03,-7.576330300176417620e-03,-1.352863467124522379e-03,-3.968342008019187447e-03,-7.876074043088956717e-04,-1.082181244255247306e-03,7.632257166438631904e-03,2.405604433441861114e-05,-1.024018901822577356e-02,3.704221399773915957e-03,-2.092443826092075208e-03,7.437606327463713515e-04,4.452780887200211667e-03,-2.147685063479171823e-04,-2.902657884993830716e-03,3.310149679440033826e-03,-3.937825143467843340e-03,-8.922319850101079056e-05,6.454905181124039742e-03,6.763220300522659184e-03,-5.664620854933683294e-03,1.217222328964113120e-03,5.109087373336668007e-04,-4.797523529327352393e-04,-2.850891577866769121e-03,-9.820513136388993314e-03,-1.353956406707505474e-03,-4.296755674920048861e-03,8.748810926672208679e-03,8.338641373247739725e-04,6.063787404196482878e-03,3.835353014002528850e-03,-3.650470710587575603e-03,-3.888045105556724949e-03,4.307529813419555716e-04,-2.594477665653336252e-03,-3.858754757079349128e-03,-7.841543760774670122e-03,-5.404983850927619522e-03,-2.508418711967011928e-03,-3.567185264687838982e-03,-4.812496881231450967e-03,-1.471236447521483094e-03,-1.918420916655499226e-03,7.985485983853039468e-03,2.037413352806540564e-03,-1.699180249256860382e-03,-3.789538567513231569e-03,-2.085885306196243703e-03,1.853755142155867272e-03,5.325646454465227456e-03,6.440606480271651420e-04,-1.154821758033738703e-02,7.879029293411745488e-03,-2.698781287650450691e-03,6.612486186321814632e-03,-1.233343682709443952e-02,1.487088482683086334e-03,-3.321479006563685370e-03,9.450022696591602739e-03,5.142084926884957566e-03,-9.168767173982135966e-04,-5.060267118956322027e-04,-8.756606322487247305e-04,-2.410087766698862377e-03,-6.202402562679220147e-03,2.816540198965683528e-03,-2.740348192517549301e-03,1.546133928559853023e-03,1.058809252811635480e-02,7.228715402498731533e-03,-6.064133928615339335e-03,-3.240525682429704069e-03,1.378612463198438115e-05,-1.070944898622934287e-02,3.845212638520583391e-03,4.538306079422220300e-03,-5.140383253483580585e-03,8.088338413606106433e-03,1.209442123741394978e-03,-3.752890373932083693e-03,9.183281450493614370e-03,1.092938859370917726e-02,-6.203862353846870082e-04,-6.280321479049412285e-03,-4.978771152312387420e-03,4.640352562208116365e-03,-4.067563467595202743e-03,5.484632824955222759e-03,5.750810174989423422e-03,-5.446766403398364646e-03,2.021360607767352761e-03,-3.009992812409154235e-03,4.756488598450666315e-03,-7.082598803007711386e-03,-1.266868406697486092e-02,-8.991275445538567807e-03,4.312818781142726269e-05,2.720229535487321808e-03,-6.430917080727711378e-03,-7.628966512487359020e-03,1.605563228214190773e-03,1.931479327242215148e-03,-4.599581940923352409e-03,-7.214062314584946886e-03,6.159570963244691104e-04,-7.869367481972819670e-03,9.006886119023926537e-03,3.635695351687555366e-03,-1.043005555422865313e-03,2.437050567963968024e-03 8.133282771529479335e-04,1.002860247902495573e-03,2.029435350880727691e-03,-1.180968983940450766e-02,6.086651043323151704e-03,3.635943995887943713e-03,2.894588784130271370e-04,-2.039648750952417679e-03,-1.564751957615197718e-03,1.902778280747139663e-03,2.878327521717623932e-03,-1.854030705136396756e-03,-2.465556309851602099e-03,-1.337486275960476229e-02,-3.293935891274093163e-03,-1.953064368868949506e-03,-3.277548825543656359e-03,-4.222591229030249666e-04,-1.474456363077152835e-03,-2.041998943137577315e-03,8.969274098143581106e-03,3.519683504392758476e-03,1.771940108473758031e-03,-4.426294228262751471e-03,3.904628503695726453e-03,-6.164494177320181505e-03,7.445409864891579634e-04,3.711903904920120945e-03,6.625871834444148518e-03,-3.156425967944046234e-04,6.685719677382305536e-04,-3.074907388601765771e-03,2.205937193424354053e-03,-2.401776346465491277e-03,-1.416984396647612580e-03,-4.019739137340576883e-03,6.031514514270374468e-05,-1.669949246561243644e-03,9.858206288921837410e-03,-2.499417031306370128e-03,-1.735129707289222818e-03,7.165573990406824863e-05,-2.804249543053309764e-03,-6.977756807416111290e-03,-4.801888815532791288e-03,2.155344187011148330e-03,-9.374095166893995842e-03,-5.110708449785241435e-03,2.038825012442953104e-03,-1.639495752358124080e-03,-4.527120759836053381e-03,5.401800099095883413e-03,1.038321665663550575e-03,6.080594044198027112e-03,-6.689991093261452330e-04,-2.504667914675409984e-03,-7.010010644351175393e-03,-9.377978582550180325e-04,-1.547123169061796538e-03,-5.216868921390394910e-03,7.469386462190134857e-03,2.189740421583172801e-03,1.109118046110547321e-03,8.725457442319851584e-03,6.846009343695961570e-03,-8.657834053173870358e-04,-4.501734479220334713e-03,-2.796749666010160066e-03,-3.573053274330467276e-03,-8.263325858012393740e-04,2.464664496745630831e-03,-2.867317960154433694e-03,-3.660972370815269517e-04,-1.647658440104994093e-03,4.607584885674894173e-03,-3.374657302392336088e-03,-5.980052731552219433e-04,8.489465037456976615e-03,6.170411726378626296e-03,1.347435099571132932e-03,4.826498605971963826e-03,-7.733526346408023909e-03,7.099404043061597822e-04,2.301700079767226355e-04,-3.454700540915359597e-03,2.692140896876095077e-03,-7.385716764458698236e-03,-8.958179563263517567e-03,-4.864627588010906299e-03,-3.788031219994091384e-03,1.068838343482787258e-02,-2.464141024535406246e-03,-4.171906215366134180e-03,5.854173173220095228e-03,-1.583626576073046775e-03,-1.113036554115300709e-03,-8.649800509730997886e-03,2.113511709894308043e-03,2.415338136172544541e-03,2.935393550138557656e-03,2.478566993626429155e-03,-2.656061395591296625e-03,3.156273638456863137e-03,2.763152854274877022e-04,8.486846566911150982e-03,2.032003909065350448e-03,-4.425738624033506534e-03,-8.927950411847761225e-03,3.262899768954282721e-03,-1.941764438821889319e-03,1.907192337252829007e-03,1.118772788959597171e-03,-1.832845677418983585e-03,-5.268214525823049395e-03,1.031244276553291636e-02,5.691335407184740409e-03,-6.077779022134616356e-03,2.426388147938059615e-03,7.608261547353973838e-04,1.344777250566457898e-03,-2.431723935646236488e-03,1.216551810286100113e-02,-2.888014291056369056e-03,4.049728250178932093e-03,7.913987451283964122e-03,-5.058777289501194263e-03,8.944770999388501967e-03,6.954875523912995572e-03,-5.275286343424607019e-03,-8.156296119410938114e-03,5.575400375368299265e-03,1.732946599877074195e-03,6.886308328336036150e-03,-4.518215326471142559e-03,-5.821188869376920857e-03,-1.138838799732213788e-02,-1.156745035992871005e-02,6.922951462888585206e-03,-1.777043571428558837e-03,4.489356261857473216e-04,2.370579081514510844e-04,1.656760173144262993e-03,3.093828761030966400e-03,-4.138120284742033243e-03,-3.148886283601867730e-03,1.015178640080783429e-02,-5.660952919245510785e-03,2.486169713534026014e-03,-4.289637228777202268e-04,5.535416775550579087e-03,-4.741867105529755377e-03,1.588198487347109800e-03,-5.592295725986342822e-03,5.402727345665609381e-03,2.727922910224480236e-03,-7.163131566231756460e-03,5.871884339566943098e-03,-3.666097684665003617e-03,-1.208278347431190067e-03,-8.343577250723926237e-04,1.386975801109330058e-03,-6.705504705150341915e-03,-4.993848872152898896e-03,-6.407154394128171046e-03,3.223617111011298914e-03,4.724771892641988109e-03,-3.287861532505427769e-03,1.102481339978622632e-02,4.433667914242471302e-03,-4.245251400036806864e-03,9.881188130680385734e-03,1.095038961439442210e-02,-4.559779238202597319e-03,4.916597931278201238e-03,2.076696969871590862e-03,-2.936419744273931556e-03,7.596339423995707163e-03,7.395941049019075751e-03,1.470728962037570189e-03,2.672961892371281643e-03,-7.856616370250287537e-04,8.519358400133554207e-03,-2.464357301944407764e-03,-3.065477908233565224e-03,6.695709513010109429e-03,-4.366101994389674605e-03,-6.846505257035764326e-03,-6.937678632353932312e-03,-5.666871032278975040e-03,-1.280721436974818425e-02,8.588272125672721021e-04,2.427079445767644811e-04,-5.340421444281691875e-03,-4.468443826607059605e-03,2.211314625381378875e-03,-2.879433815264475539e-03,-5.934752287383968138e-04,1.536153223840386068e-03,-7.775946831818352639e-03,5.683425898889839928e-03,6.427264817990187111e-03,-4.827795907847034704e-03,4.742752224843404564e-03,6.698177594730063411e-03,-7.452932511640619415e-04,-6.873550535194604101e-04,-2.621188309195404947e-03,-2.958607590209009087e-03,-1.050042140114132092e-02,-2.865723743860609102e-03,6.287487611796880108e-03,-4.225337716947743377e-03,9.733224203163144033e-04,-8.959018443826334208e-03,-5.617317877375564705e-03,5.803133121617778598e-03,-2.670917708871354927e-03,-9.276951233813305817e-03,5.531758621309005447e-03,-1.779017355877592456e-04,-1.770154401001361151e-03,4.397899150458981464e-03,2.551716104210088355e-03,2.890950802170519449e-03,3.131290077749906563e-04,6.531027703435692520e-03,5.374720449288140044e-03,-6.503691043550913339e-04,-6.651192482507217556e-03,-1.113311837600030571e-06,-6.778446910352523003e-03,-3.064986325626352717e-03,-6.707043824974103126e-03,1.478220215938101743e-03,4.936066003933794501e-03,-2.700985677786637510e-03,-3.323707146414492039e-03,2.954783755930489100e-03,1.878026802761196580e-03,-4.679711572898997800e-03,5.285781598245001094e-03,3.530038267237137466e-03,-9.223015415000808230e-04,4.367350356137645294e-03,2.772721644534841532e-04,7.911048023596030029e-03,-2.860372013677454049e-03,-3.863517930588500262e-03,-5.056303433672844870e-03,7.265404832959680877e-03,-6.590805373365565822e-03,-1.876321188556169832e-03,8.345330799186379380e-04,7.803164744809352018e-04,3.129999488337975588e-04,-3.635960208271047257e-03,-2.363858064838713323e-03,-5.220674006267142855e-03,-8.648777892766749270e-03,-7.651307319820760573e-03,-7.137312976154369833e-03,-6.130034905593904036e-04,6.011206069138422259e-03,-1.697901401722076535e-03,-3.959031683324954788e-03,-3.827596917842409231e-03,-1.306250752045274265e-03,1.350304976874522117e-03,5.291119170876742821e-03,5.414325310688009171e-04,-1.290652783879712431e-03,5.412110868559333321e-03,-5.432856846221115546e-03,4.485775824258045504e-03,-3.733643781375917559e-03,3.481587679095956623e-04,8.092195490566157572e-05,1.768120531271808599e-02,3.654616407198553376e-03,1.322992099497885890e-03,1.948257263556271539e-03,1.927585366044433151e-03,7.893265197080358980e-03,-7.452272957499964968e-03,7.734728684149541882e-04,1.923237300486244994e-03,4.455917344173570063e-03,1.880360612543744434e-03,1.339479011328175188e-04,-5.186818403474235083e-03,-7.784880158682734586e-03,6.163805896353160597e-03,-2.899627784236139243e-03,-1.998477328242173265e-03,1.057586404455926332e-03,3.753275714041077928e-03,-1.218855238619166118e-03,-4.868837602013300313e-03,4.029846257967888493e-03,2.292952463571749575e-03,2.323939528400001397e-03,-8.295290809084263431e-03,-6.356945367521301717e-03,9.848698629702371324e-03,-4.499487190196189458e-03,2.542354366492161847e-03,5.515794542498461163e-03,-5.175881868230534574e-03,1.511116810379379552e-03,-5.069470563344983041e-03,2.143728300379114070e-03,-1.202823092162931019e-02,-4.530259795632117503e-03,-3.994054415612690776e-04,2.408452705323142068e-03,-6.642318738477251058e-03,-5.902374762417197246e-03,-3.363960012000981629e-04,-8.479382632586017141e-03,-1.305607925089107349e-02,4.261383363187888282e-03,-5.713526666701589790e-03,4.272124093130345857e-03,1.889679289077879484e-03,3.761159686921703143e-03,-4.503895007038214576e-03,-5.102534693212860961e-03,-2.639273678558262670e-03,4.303063259730527217e-03,5.264564336989093971e-03,-1.258819870136584961e-03,4.409563074526546417e-03,1.112400878821929008e-03,-5.478549954976910143e-03,8.798210215305343751e-03,1.280592764678917673e-04,4.420388036479820822e-03,2.871194414598862710e-03,-1.878211027416153405e-03,-8.369023487814274825e-03,3.810184052963367946e-03,3.529510048832855320e-03,5.311733638837185315e-03,-1.150602804517179329e-04,2.609580125161605705e-04,-1.403351136514969632e-03,5.591991248093543254e-03,-7.781877283381191321e-03,-2.680987339116219174e-03,8.356226503754669507e-03,-5.917212408564223071e-03,-1.641557361361388597e-03,7.902373130187771016e-03,-6.287903386082788204e-03,-8.960158203067532592e-03,-1.194450834892394828e-02,7.397652739224806209e-05,1.210622296463058052e-03,-1.188693396815669254e-02,3.754157119762905476e-03,-2.498228434373533577e-03,-1.637934136167162142e-03,-1.850474924089509079e-03,2.410779321379319905e-03,-2.159721234261043586e-03,3.420841635617800133e-03,1.338556908682746963e-03,-6.590208226627262272e-03,-2.459470043960082595e-03,1.631468404706496247e-03,-3.274558684847254757e-03,-1.236733009295193147e-03,-1.723146743627149277e-03,-7.534960100390186884e-03,5.633677324813866037e-03,-6.567392375443803672e-03,2.521165094759077337e-04,-8.373516518604932554e-03,-2.184026774293669048e-03,-1.677728649203061447e-04,5.013512099318800062e-03,-2.847330963260464090e-05,-1.618707244291974498e-03,-4.154028364866634883e-03,-1.199851041686961868e-03,-6.781668584741456021e-03,7.658771938186822266e-04,-3.732394751520518549e-03,2.314346643786064067e-03,2.045525902952466998e-04,-1.876971680594631932e-03,-1.584982450630621547e-03,-7.000909027952434242e-03,1.889741552693831577e-03,-3.123006455776590300e-03,-5.321312382937835872e-03,5.341179248601611237e-03,-2.881646864769719235e-03,-4.017055194861457001e-04,-3.143865883470139521e-04 2.608031503251476754e-03,-2.461875749569505511e-03,7.556820104640462211e-04,4.808915810742968831e-03,-9.534661194609306582e-04,7.640379794778016714e-04,-1.385177567184159297e-02,4.832824544753810091e-03,-1.173760337122393624e-03,3.183987370127556744e-03,4.513631265796594523e-03,3.227146803774766978e-03,3.039361889002094568e-03,-7.330133997495077817e-03,6.352250356493048013e-03,-6.325953511212512037e-03,-8.258492277052522507e-03,3.864669876793118408e-03,5.383109292066007301e-04,3.617441583187595215e-04,-7.822983769661795818e-04,2.553222946904926821e-03,2.867000916520386251e-03,4.417817960958967879e-03,-5.267424161313522153e-03,3.280763364902451046e-03,-2.982343359237596848e-03,7.654802788267788547e-04,-5.664410933071502806e-03,-2.059367122799579414e-03,2.515891799663916815e-03,-2.237178347568691533e-03,2.071743778752959276e-03,6.571411276877725700e-03,2.232441269427377917e-03,-7.397001327220232951e-03,1.956714203501220238e-04,-5.462507625511823020e-03,-5.494200124857393820e-03,5.365760878588620060e-03,-7.865010278460544957e-03,4.935242949244383458e-04,3.061023198372871471e-03,2.058209540690486225e-04,-2.020806584992564797e-03,6.450795852177048165e-03,1.034212775321719652e-02,9.434786514476392605e-03,-8.109890297391630226e-03,-2.016377438128078275e-03,-1.295300841391639184e-03,5.219744392664042026e-04,-4.092137009105868868e-03,2.777863928176606961e-03,-1.409491652164464271e-03,-6.288434476645216216e-03,1.556007440249725657e-03,-6.733365766464088215e-03,2.216406190476602131e-03,6.295285423179267960e-03,-6.717024022877176974e-03,9.163436586113453408e-03,2.961378048599231597e-03,-8.737278885008565796e-04,1.382034932786347162e-03,1.171201473459022001e-02,-8.114682223999282462e-03,-1.757265122996576833e-04,-2.734842632972001118e-03,-4.513074923614783344e-03,-5.768303588401096742e-03,-1.956300829074907911e-03,4.399712493800520506e-03,-1.914727778714198341e-03,-2.104744602756988391e-03,-3.333838798808829405e-03,5.082375587034162687e-03,1.101437561191005157e-03,-3.220879848425026691e-03,1.862476232100947898e-03,2.302333883371881877e-03,9.460964830857446954e-04,-1.029978908744094875e-02,1.326521818971395901e-03,7.388682168218760685e-03,2.715357577552333489e-03,-8.977136340170458568e-04,1.395935026195224758e-04,7.361874489186315609e-03,-1.490330211654769706e-02,9.994742980827968359e-03,-1.064825197965266028e-02,-2.597054227629595410e-03,-8.176115544509232266e-03,-3.556805782596640392e-03,-2.902740260257917655e-03,9.529272482874363848e-03,1.189570479450122382e-04,-1.064919592524894926e-03,-7.179542971084930045e-03,3.906572616780859541e-03,1.067769028761196733e-02,-5.104736969794349986e-04,1.886770471069896269e-03,-2.861614334709448697e-03,-2.987068063159395524e-03,-4.244079390997106105e-04,-2.468069745750450360e-03,3.903752273898123282e-03,9.369231359642673715e-03,-3.250954108245283848e-04,1.090114801574367312e-03,-3.768037724050291633e-03,-3.462040931163013445e-03,1.631898246904920670e-03,-3.655089163943953286e-03,-3.133732778533070661e-03,2.569154951618058801e-03,7.931576114578968570e-04,-1.452066798964128918e-03,-6.987474074839932457e-03,5.911037147511160467e-03,8.020935426271637216e-04,-1.116895845564085457e-03,3.617417733465369978e-03,-6.137244153286080910e-03,1.825519071159941494e-03,1.911395695960094220e-03,-5.223162951335249236e-03,-4.934932070618376111e-03,-7.493170331936416240e-03,-3.773228033360323617e-03,-5.546121077608478014e-03,-4.577862627663559399e-03,4.699323999797659634e-03,-4.057096348407889831e-03,1.554006013128387837e-03,9.642098680585847812e-05,2.728265807453028602e-03,-9.666544557939986751e-03,2.712327895466857836e-03,-1.072254169058650241e-03,3.211293833654547784e-03,6.726185827910189166e-03,-9.299612646139598926e-03,7.765568775175121612e-03,5.262675670055518527e-03,2.913891401086974448e-03,2.740976450629306008e-03,7.692537684643388149e-03,-7.706331945451814353e-03,7.128569510570804783e-03,-5.235930468658646364e-03,2.358755158344674318e-03,-1.180148955166966711e-04,5.309509553822462733e-04,-5.677262514276748551e-03,-2.357065100370223185e-03,-1.989291313310188360e-03,-7.014163856259693700e-04,-2.911535236788487940e-03,2.856379819663717578e-03,-7.361029862532971965e-03,3.707629507715631736e-03,-1.365988457514958265e-03,5.914039019991801892e-03,-1.561149398931414986e-03,-3.381459166763261474e-03,7.988009853080731831e-03,8.862356073817155566e-03,-3.447618122914683590e-03,-1.761284593094478157e-03,1.753336590005227270e-03,-8.017967407274865718e-03,-8.420035248970418260e-03,-5.405884121992105343e-03,1.445415495749588540e-03,-6.709054209140545046e-03,-1.512176761051356455e-02,-4.737589917087140987e-03,-1.132403297205686202e-03,-3.987862872737825588e-03,-1.241031392669180019e-03,7.624337017219987717e-03,-5.286407207500070206e-04,-4.932795578906404819e-03,-3.187117325496631112e-03,-2.087432343858685192e-03,2.204171890278963826e-03,2.845837354739377129e-03,4.361284983819624719e-03,1.362827820347015844e-04,2.711143430833692566e-03,-2.033823220375493603e-03,-3.747380972468431545e-03,-2.149379626887713054e-03,-1.491736463757695023e-03,-4.579314996921504677e-03,-2.772859571369536667e-03,-6.075270399483181580e-03,-2.744185067324449081e-03,-4.299179655076079513e-03,-7.547203931157171926e-03,1.741249417371506765e-03,-6.555824460566025448e-03,-7.015607810437710524e-04,-1.550239749973933564e-03,2.482611060744120690e-03,5.128099875871662636e-04,4.294341559062635726e-03,8.711987861201968275e-04,3.142677895732012730e-04,4.031531499603826715e-03,1.206200611273945244e-03,8.687458220487206950e-03,3.707048067336573497e-03,-7.456025045902269638e-04,5.498509690201701730e-03,-7.887945206254017710e-03,3.824506467736027079e-03,1.167329241418588182e-03,-3.658422412612006777e-03,4.403240558935461388e-03,5.179326079472156008e-03,-2.696172082416463129e-03,5.528906874186971607e-04,5.228458606673830160e-03,1.042039953650908055e-02,-1.088064884239314144e-04,1.846786700832985179e-03,-1.282175034664795538e-03,-2.404408541076156775e-03,8.664142300563429724e-03,-3.677221165280959540e-03,-4.967831897494892450e-03,-5.909424258606976905e-03,1.264575238806834971e-03,-6.434842246273175977e-03,2.565334635629756138e-03,-6.053476677660326634e-03,-2.948175108856906056e-03,-6.933844217367523058e-03,1.876492979851064006e-03,4.020890750820117938e-03,-4.465824972560548771e-03,2.032516456091236663e-03,-6.189351322423363670e-04,7.075959596065165697e-03,-7.823693694029229539e-03,-3.628614881788391928e-03,4.864671744729712026e-03,8.658159354021026377e-03,-2.109217837566272884e-03,1.192834117009736530e-02,-1.718807719561369620e-03,1.645288218675838507e-02,2.400453194776389337e-03,7.693060325102469080e-03,1.976115457801858998e-03,2.852246910075211059e-04,2.094811718249891229e-03,2.724963516983220941e-03,-1.689942933900736091e-03,3.309813621448464728e-03,5.487954784499387943e-03,1.471633606494671077e-03,6.031677271452937431e-03,-1.631212740514259010e-03,6.935925154729709850e-03,1.124917619166767788e-03,-7.141998874893910020e-03,2.400793933069385669e-03,-1.423601461315586025e-03,6.034457038294431010e-03,-1.899557471689996809e-03,3.024425186746099344e-03,2.547329072695703276e-03,3.416360379683918216e-03,-1.143390602836885650e-03,1.399961162150092225e-02,-2.223935565280027449e-03,2.550996875334778872e-03,-1.735968996372635499e-03,1.548783441789480624e-03,1.029562372871281085e-03,-4.246617828702736047e-04,-4.593395576596259570e-03,-3.507281549839921971e-03,2.128213359269289992e-03,1.611144163803789010e-03,5.690289500927750085e-03,-9.468266751692587220e-04,-1.423624335920742898e-03,1.678696279212253391e-04,-5.620909326295924630e-03,3.222312633956296492e-03,-1.170998653348484173e-03,-3.898756406655356707e-03,-1.390314947497982116e-02,-1.695246317374441260e-03,1.864204282429335943e-03,-3.802281958623484649e-03,-3.874374653909786869e-03,-3.396269628837778227e-03,7.148853758553156204e-03,2.093143122059617622e-03,5.074068378149180526e-03,1.681595023505997851e-03,6.421636923326283150e-03,1.361557725524308547e-04,2.104571916465950397e-03,-6.276988456249135533e-03,1.130680704647889744e-03,-4.429861457912672405e-03,-3.828211312569009504e-03,-2.285252643173420108e-03,-4.863841044426681342e-04,-3.840794603330253184e-03,5.124632313484789835e-03,-1.222941685500159026e-02,-3.206736468928443780e-03,-9.637081288647399852e-03,-7.704641768629122943e-03,-2.274743706933858318e-03,-7.089755489259037466e-03,-6.106524692210057829e-03,5.873095795482637278e-03,1.015874960948728173e-02,-6.896056450527372587e-03,-4.180128255307113960e-03,-2.188654526891695585e-03,-6.869293865259152795e-03,1.662249621169857279e-03,-4.692293223954813186e-03,-2.152598040828619923e-03,-4.816154855222474010e-03,-5.520150786859266689e-03,4.291132466131601653e-03,1.220111057930010033e-03,1.610034245978581892e-03,7.050989104998982727e-05,-2.445707815964697808e-04,3.368681220740030961e-03,6.024884902018640652e-03,-6.690867866135671513e-03,6.056643528682161860e-03,-5.473518565971577897e-03,-3.362612512179577462e-03,7.431318409269574952e-03,2.991440061973572265e-03,7.585443687711830064e-03,-6.777995994463310511e-03,-4.192619747239947729e-03,-3.492751100554742586e-03,-3.172503877149798236e-03,-8.278090672153749427e-03,8.137572765797773319e-04,3.073839450954110077e-03,1.229776685821538582e-03,-1.271019565165559936e-03,-3.437066490644776883e-03,3.572465796881807432e-03,2.798571247650482284e-03,-1.470374027345121596e-03,-3.481752132126068617e-03,2.456321036966526219e-03,-4.756589013421323384e-03,1.648936479341789720e-03,-1.707324368654466844e-03,-4.829143307316212559e-03,-8.973301562942498084e-07,-1.687166466793644896e-03,-1.539798780140196575e-03,-4.393124267189834481e-03,-5.993143190437656784e-05,-6.740600463775277967e-03,5.170541778800402492e-03,-2.695165491867734277e-03,3.414235256879884391e-03,3.978006847901558070e-04,1.197397665102692784e-04,2.256983456020643288e-03,-5.132403793461279978e-03,-1.417188057764937146e-03,5.791563815025959489e-03,-6.285834534765103419e-03,5.681731083712371864e-03,-9.516147775618377658e-03,-6.406604167522016863e-03,2.398948982849994239e-03,7.120415113107906452e-03,-8.404724963285786776e-03,4.825690473160509572e-03,2.151645246380883350e-03,1.925986762089863628e-04,3.725871562888867852e-03,-4.545368605532998569e-04,-1.086236191037936311e-03,-3.818610742194666137e-04,4.070634700649321605e-03 -5.494242000903279931e-03,-4.169031017790234038e-04,1.618558124006766420e-03,1.049593297105926156e-03,-4.342045786909424880e-03,6.254372303346897310e-03,5.719533602703681836e-03,-5.515755657465525388e-03,9.692967075162207654e-04,-1.273081195874565825e-03,3.196602167033222967e-03,-4.414626559703173185e-03,1.706807304432466504e-03,-2.270178404106742293e-03,-3.018022929497755219e-03,9.176259577044564020e-03,1.025168721572408074e-03,6.348101555334000230e-03,-6.915696139627324282e-03,2.907296980696098734e-03,7.282734998478835979e-03,-1.243140830449855801e-03,-1.352192172156261359e-03,-1.550865479673457217e-03,3.989295771524713381e-03,-2.428575501534902242e-03,-8.323381621951033565e-03,-4.117161632226091616e-04,1.886109154342572106e-03,-5.081839900488544295e-03,3.113121651184697254e-03,-3.386858084243530957e-03,-1.021361635222571953e-03,-2.144706610276849151e-03,-1.307930765864722886e-03,1.051464286974374517e-02,1.047229477309989491e-02,-1.425735301835161272e-03,-5.654235681192213824e-04,-7.724436916961541742e-04,-8.383245522186432222e-03,7.190469256702611105e-03,1.036328744989201177e-03,9.334300451710145219e-03,-3.749488474166076668e-03,-2.778189073326125472e-03,-1.096276313116500587e-03,-2.259380877191598507e-03,1.000225807507515614e-02,5.417220410559677769e-03,2.501756507992398525e-03,3.126562474907856361e-03,-1.634073386931806033e-03,-5.643655249088717560e-03,2.760225953004740926e-03,3.649811169507941305e-03,3.992917700900554950e-03,1.216359693156528546e-02,4.192536717450580895e-03,3.165362031424914877e-03,-1.359267881246518407e-03,1.303585896894947317e-03,-1.097629390757244843e-03,-1.877455031065177219e-03,2.540499604396615103e-03,1.290677478058032887e-03,-7.222324157773939114e-04,1.297231048284425104e-03,4.022381557398484360e-03,-1.312245375604760914e-03,7.183921719709919496e-03,2.302687675134483351e-03,-3.383113269062331427e-04,8.417215340471337348e-03,-3.337490720389911384e-04,9.092853606561493440e-03,-8.174748516190537273e-03,-8.522468151033002134e-03,8.839443405535845136e-03,8.366435972662468121e-03,-6.196301250625741927e-03,-1.567771285327198205e-04,2.232661707279301033e-03,6.129991470664163458e-03,1.217644623005273950e-02,-4.004164116063385626e-03,-3.606287857191621407e-04,1.018493270330718186e-03,2.415537754766256158e-03,1.930216709692912517e-03,2.668293785767096322e-03,3.371469767690898033e-03,2.242051339119624422e-03,-5.314122324580347091e-03,8.036059758967983640e-04,8.096079882379035673e-04,-5.176865318550653849e-03,9.385055971030098997e-04,-4.403951674727971111e-03,1.946456597231983757e-03,5.401184494518289662e-04,7.984707855030952900e-03,-6.872168915608863581e-04,-3.292860376944308984e-03,4.937708398255540379e-03,-7.728412845607104779e-03,2.877151626769092890e-03,-5.988123971848249155e-03,1.547023454118276204e-03,-4.596497749620419293e-03,-2.080452488640208153e-03,-1.213902399667300160e-02,-9.431598561318238548e-05,-9.764499448076375751e-03,9.316462697798209153e-03,-1.454528505211605781e-02,-6.140966249706313008e-04,4.999429635656753475e-03,-7.419256267063680139e-03,-1.019370735090460375e-02,-1.025164549066329492e-02,2.595688891281073760e-03,-1.939381877577599569e-03,-3.971696297215095671e-03,-5.928892333188205167e-03,-5.366122679625950215e-03,8.039605748557885831e-03,-3.273989762022743236e-03,5.923404347613416302e-04,-4.032129212875687312e-03,3.541732961004199955e-03,6.229692674984954430e-03,-4.083963883083213675e-03,-2.680303934465585406e-03,-7.888730193191992833e-04,5.433955740758071064e-03,-2.816650388050087692e-03,7.204819132403893732e-04,-5.606431762475640179e-03,4.974181704531647260e-04,-4.397108379379265682e-04,3.934788643085306188e-03,1.967842243978670629e-03,-3.988021044404401610e-03,-2.309232706748315291e-03,-4.630529633987081999e-03,8.934825736974629298e-04,4.290740342058118294e-03,2.311858677456465894e-03,5.614390607535641071e-03,-1.760466815372465320e-03,6.648735481264612598e-03,3.937618399348127960e-04,-1.039767709803359787e-03,-5.462472835935373340e-03,-3.462215813151451379e-03,6.103566529905961064e-04,-1.188563998336969636e-02,6.682651652448700491e-03,-5.336785815785319645e-03,-2.517846073912923758e-03,-3.561140704796592147e-03,9.532008065236357783e-03,4.529157754691579864e-03,-5.763046946840588540e-03,8.756844841043455105e-03,-3.187874263108283687e-03,2.796870269020848478e-03,-1.485849358752131541e-03,4.726728957030064925e-03,-6.601074165107043666e-03,-8.077844380150386036e-04,-3.252038986559225870e-03,-8.038847636653493169e-04,-4.397411447778657410e-03,6.097678332458594981e-03,-5.671016834759888027e-03,-5.763195257210945721e-03,-6.599828904320993381e-03,7.330879729962056835e-05,-2.393045912853406475e-03,9.758529252490758497e-05,-3.228684179165778462e-03,-2.044843269542808397e-06,-5.794393716401413999e-03,-7.322634696089571141e-04,-7.589040491753835591e-03,5.012823212097422543e-03,5.373673233291190003e-03,-4.044532117478726388e-04,-2.185890451229849287e-03,6.107237935717404337e-03,4.332488707441876571e-03,1.244794142933311476e-03,1.762518317376586844e-03,2.013747865202301490e-03,-1.224637087158155250e-03,1.864694422691364839e-03,-4.619427476714271200e-04,-1.004772412508633979e-02,-1.447555242198559928e-04,-3.196737470309993483e-03,1.245135012665315273e-03,1.163240196283747349e-03,3.725468075165618963e-03,2.758100583385069619e-04,-8.621886127180724115e-04,-1.134645966596027795e-02,4.611913842015907990e-03,2.223779525723076564e-03,3.966493157544193592e-03,1.729882356402449514e-03,-4.173353529725560597e-03,-3.614435476935290350e-03,4.692224462687133446e-03,1.394074062391293441e-03,1.580762286270125322e-03,2.105756952136200039e-03,1.685182784897258212e-03,-1.987717118926207380e-03,-2.333485660399568090e-03,7.988500738448924346e-03,-1.090524732963621488e-03,5.788024131499398296e-03,5.402164120395547796e-03,6.748829640443307137e-04,-5.644458092467337373e-03,-1.597831103525842490e-03,-1.129629590014923773e-02,-6.216543341371612123e-03,-6.563334663676937721e-03,-3.005970051734353082e-03,-9.749871804407558157e-03,1.432626189500155189e-02,-1.197928564822946674e-03,-5.260298887695846574e-03,-9.391547600045216729e-03,4.350866075826371982e-04,-2.548187797606948305e-03,-4.393996227539729411e-04,2.233329221805000191e-03,1.692657649635181342e-03,-6.549839486332261580e-04,-4.263001834689518661e-03,-1.086516801995767801e-03,-1.875562115127466043e-03,-1.283814681568589550e-02,3.814715195207835780e-04,9.664018035185611283e-03,-2.472488220277478232e-03,3.266118757141130754e-04,-3.537558138100126929e-03,3.563671860181532799e-03,3.477932979211619327e-03,-4.514730343902694735e-04,-2.464084158299400238e-04,-2.196416471688918202e-04,4.546016172021675496e-03,-3.042716142192443581e-03,-3.565040424953389107e-03,4.353396161570188480e-03,-7.931477702844301134e-03,-3.227054880131797771e-03,6.326913079591948248e-04,-1.305375680155878128e-03,-2.820938362061754974e-03,-7.551087996024600200e-03,1.063129916178004453e-03,-8.868236048187587428e-03,-8.179673627298760668e-05,7.371631172363714247e-04,-3.764177203976252183e-03,-5.528291509611323908e-03,-1.340267944524366726e-03,9.344204934324046785e-04,3.592091432901604701e-03,2.115319108882195848e-03,4.307063827469134742e-03,-2.392748032267646117e-03,-2.849290224061265617e-03,-9.679520480236637400e-03,-9.251396583054902591e-04,7.556641219407503676e-03,-5.009870331044072832e-03,2.440175575734789860e-03,8.229012169292376083e-03,-3.413799149200450085e-03,-1.108598858717233480e-02,-1.014940345819951000e-03,-1.029536626595103281e-04,5.114128334373831694e-04,-4.953838515477619261e-03,4.780872825536671387e-03,-1.238199832043067198e-03,6.296842939360125686e-03,-2.031161104068604940e-03,-1.099341358150462619e-03,1.823016146926172837e-03,5.927638977896048690e-04,6.862608527468505581e-03,1.925998535421693039e-03,-2.894181995878356787e-03,-4.419145722392743146e-05,1.000955508505516111e-04,-1.177349302218400756e-04,-2.302311089178254219e-03,-1.778061056821751001e-02,2.922714656936131663e-03,-5.103611021111151107e-04,5.698539083157089583e-03,-1.781961210442143621e-03,-3.917764314600642207e-03,9.141644740961827470e-04,5.334745299433871907e-03,-8.850143784544843789e-04,8.651504064938901660e-04,-3.148016252705567852e-03,-3.523579468761041909e-03,-4.266604979742591973e-03,5.212726352400450826e-03,-3.204987978074296754e-03,3.609445476383778533e-03,-4.520067272906818571e-04,-1.365864284687358160e-03,-4.705581542008470571e-03,-4.046402610956487816e-03,-7.340203963919017527e-03,-7.659698786425539214e-04,5.018245559421992841e-04,-2.497384498856805753e-03,7.711712628969139482e-04,-1.322648066122083339e-03,-1.818382135137793648e-03,5.000912700786365288e-03,3.309248933711620458e-05,1.457801444098383607e-03,-2.817010616565695201e-03,1.379354544731732873e-04,-6.195668640273384320e-03,5.808820213460280397e-04,-7.490088022070230840e-03,-4.532142493715285089e-03,4.300238516805970045e-03,6.199362284785250865e-03,5.296856497791219663e-03,-5.127568099804758741e-04,-5.383000424123756062e-04,4.791089219632761742e-03,-5.079557397446673832e-03,2.127488011939908559e-03,-3.151025867667885897e-03,5.846503814987643634e-03,-6.284660898538333221e-03,5.388128143512840341e-03,5.302380491944393842e-03,-2.953457737296684253e-03,-2.600649312791087318e-03,-8.902094798604062807e-04,-8.345184113330319370e-03,-7.480957211712575307e-03,5.502290390523497261e-05,8.400685456912660234e-03,1.040747839754681270e-02,2.663725059736678745e-04,-4.073651579578150411e-03,-3.610103096200341019e-03,-6.315713546744882584e-03,3.194740257410893115e-03,-6.688404942062037110e-03,-7.274862865910022500e-03,-2.966839337994251705e-03,-3.920501825778437664e-03,-3.028153259833074769e-03,1.748201911240793117e-03,1.292671043732128173e-03,-6.225318770085204456e-03,-2.783895163370645343e-03,-4.718903368872416106e-04,5.118152365134484225e-03,1.066024751081886190e-03,-2.522928897633367178e-03,1.248982229052014031e-02,-7.989975879943317719e-03,-7.829157589424502045e-03,-5.762590460790776789e-03,1.411393416641037665e-02,-1.746523949292032394e-03,4.379155655978016724e-03,-7.442251145087037858e-04,9.182868471253874745e-03,-5.155096868064368287e-03,-6.366902930565232314e-03,-7.430537206877498718e-03,-6.100098924395780386e-03,5.694959540232339204e-03,2.251547339706921023e-03,-2.719920026409728808e-03,-2.369147898738429864e-03,1.215244796310443673e-03,-8.757275919756694112e-04 2.126443439238956835e-03,-5.736763545008424434e-03,-3.893305862937287715e-03,4.702513351731170410e-03,-1.039650006693015319e-03,-1.578135705276160214e-03,-6.584178554602036597e-04,-6.720963318314466736e-03,-6.321543796911093319e-03,1.744154883767277172e-03,-1.096196725651109533e-03,8.111496190506380793e-03,2.102923970541166563e-03,4.881627063496267308e-04,-6.730551589669253945e-03,5.055064899333636813e-03,-8.369963234674733868e-03,3.871645585738278574e-03,6.466565252234018901e-03,5.624988031265550081e-03,-7.408555010060697762e-03,-5.290155859670921633e-03,5.893631497108337003e-03,5.045590513347335965e-03,-5.214754598593333151e-03,-8.124997785941383277e-03,2.977153029323096212e-03,4.410842610612595097e-03,-2.527531976238144538e-03,8.584719630801620344e-04,8.117447582501888595e-04,-7.107667625522559769e-04,1.930753749759428267e-03,6.986616732015895779e-04,-1.261056083074933200e-03,2.476997303338259772e-03,-5.863500160574262009e-03,9.823591879750924460e-03,-2.731982117136429883e-03,-1.186521922469401056e-03,-6.970531853647190558e-03,-7.131587649093397083e-03,-2.329917527958076922e-03,2.992947395985246471e-03,5.583470228694874769e-03,5.725892608447481422e-03,-7.957262646052253030e-03,6.535992058377115659e-03,-4.217031489892053564e-03,-2.405121235865859044e-03,-2.374514099271590826e-03,-1.614406116944680156e-03,5.296229618949161409e-04,4.478561821087341174e-03,-6.987909529749079286e-03,3.230023885913531605e-03,5.805792034821707741e-03,3.988367718454994756e-03,-3.770950651671601140e-03,-5.814178048623473011e-03,-3.662522575690566051e-03,-1.150263052143260089e-03,-3.409341131179400002e-03,4.207954630484003408e-03,1.037110897431512378e-02,-6.361446551360230676e-03,3.722778198436125976e-03,-7.327631017705197800e-03,5.114660628919865011e-03,-5.817557830864579504e-03,2.978116968804840007e-03,1.421212105030005132e-03,-6.520532942547950170e-03,3.888248248518712843e-04,1.098040944916655985e-03,-6.947688195301136523e-05,6.481952818325189755e-04,1.074673355063328513e-03,-1.303625381052862320e-03,1.481065182268582168e-04,5.233703074162432845e-04,4.387306560266210836e-05,3.946819866995928891e-03,-6.540220221888845206e-03,-1.894557235921232258e-03,-5.945356554231130339e-03,2.753649624010548035e-03,-4.637777086445012652e-03,-9.280616582096775508e-03,-6.518220508847615002e-04,-8.672487776446009475e-03,-2.655619549029201564e-03,7.277569752276776688e-03,2.426615328252428299e-03,-1.093940528980016317e-03,1.280131792884001661e-02,9.407017501286177655e-03,-5.679352406478235110e-03,2.235267119156210489e-03,1.409040976826869251e-03,4.048980794320250834e-03,4.048139543384486139e-04,-3.758203600132562568e-04,-1.106115878200564315e-03,1.210822170260046973e-02,2.926674311521529261e-03,5.681024915984015863e-03,9.364269689234001187e-03,-2.368445749276403781e-03,-6.840325999252078182e-03,-2.204852989503704001e-03,9.858970248013405706e-04,5.680776970715161259e-03,-1.566160167828520647e-04,-6.629529659561155149e-03,1.062087391491781455e-03,-9.418978940335635472e-03,4.649547067142660757e-03,5.288540118432133452e-03,-2.495780039260527081e-04,8.643653751837379953e-03,-5.051366669088879196e-04,-2.790934389651482644e-03,-2.353869478350338260e-03,1.151037942040596942e-03,-3.878959389745922242e-04,-3.536287609753791666e-03,-4.716001695066919107e-04,2.455478744353719808e-03,3.536166713933310349e-03,1.035285898586055416e-02,-5.192957509297196211e-03,5.826198016485464350e-03,1.073729957022469261e-02,-6.848043273423187575e-03,-8.521680823166646185e-03,-1.068894894353325910e-03,-4.486477075740150262e-03,6.219432363482966380e-03,-8.602753433480808416e-04,-5.388578372833890268e-03,-5.026959711654063051e-03,-1.590647770848820476e-03,-8.501767037526215126e-04,1.076172316546792924e-03,9.520651291173004961e-04,9.927294409684181680e-04,-7.266885901796012268e-04,5.636445903264370762e-03,4.143159928592103809e-03,6.330317203688309510e-03,3.436566246851379455e-04,9.482896240711463989e-03,-2.033452200165021539e-03,-4.628996374950585142e-03,-8.070456167817140550e-03,-3.299037546626029053e-03,5.352485520862911537e-03,-3.815957203791958165e-03,4.058079599715324302e-03,6.852811058178983651e-03,1.960745361382006200e-03,-6.919876027721699871e-04,1.886419252719560629e-03,-8.907459496959544429e-04,-1.018778080692889394e-02,-2.801145742233469241e-03,5.289008984032807920e-03,5.341878440481348869e-03,-1.809050843615452649e-03,-3.795669262089601537e-03,-1.171700032755641253e-02,-6.508561653054378534e-04,8.466642612856369087e-04,6.344155493289778576e-03,-1.239044872003296953e-03,2.644658848493526126e-03,-1.416915514275690971e-03,-3.824334972923441010e-03,-2.512052504990115124e-03,-1.244291498438533520e-03,-5.370664640648197506e-03,1.065249043988801547e-02,-5.829918700263967378e-03,-5.931174956473678683e-04,2.242844138500858699e-03,3.222651089356318339e-03,-5.063300997173059582e-04,-6.772509056810116848e-03,-7.432600398096921024e-03,-8.394945499050489963e-04,3.834477427545771554e-03,-6.599930952202548527e-04,-1.966836039966893095e-03,-5.265434435886658625e-03,1.723509925947810202e-03,-4.476831262744646912e-03,-3.283491974890796923e-03,-2.014696085420949242e-03,-7.453428100025403548e-03,-3.586561764120813978e-04,1.126770672317406116e-02,-1.177169677531939235e-03,4.086879137675985597e-04,-3.320835971821589736e-04,-2.538024713939276023e-03,-5.532103787527619526e-03,-6.938375883798274246e-03,-3.746291053212179138e-03,2.360606981905518601e-03,4.370974017382163110e-03,4.378368034319745283e-05,-1.044811618604415992e-04,-2.516821187223106191e-03,1.117508911399971248e-03,6.204499024947554911e-03,4.929105349100732295e-03,1.613070085038164096e-03,-1.975999920198636024e-03,-7.785985853690114301e-04,-7.028334963348269876e-03,-4.594874663413950171e-03,-3.832156070469697109e-03,9.547736764277361260e-03,1.702300359587097635e-03,-5.251733193632346884e-03,-5.446784857686175207e-03,2.607653521826529828e-04,1.820698425204108403e-03,-1.522289939832991747e-02,-1.718219281288020216e-03,-2.510880091237389228e-03,4.384261215463374113e-03,-6.576093500658637681e-03,-1.220785645824184551e-03,8.719705896114119878e-03,-5.373441259218142245e-03,1.426712223279092166e-03,5.331816381500136676e-03,-4.247012031119636044e-03,-4.186622805894815377e-03,1.278189087403705472e-03,-8.009412735096704469e-03,-5.849796322232866247e-03,-5.086758013670159874e-03,5.009076015577069475e-03,2.563408540687358694e-03,-3.707963926360898464e-03,2.327357428164483853e-03,-2.402220534567376092e-03,5.134242103274421582e-04,2.120250498277074071e-04,5.707710945955314467e-03,4.796391616814725707e-03,-3.815759735004408196e-03,4.541699941971201297e-03,-7.789444475200161741e-03,-8.758832514918768464e-03,-4.723352337417614788e-03,-5.341292682631025579e-03,4.952544608646735139e-03,-3.663269253852758072e-03,5.280025188065767643e-03,4.500583526529370292e-03,7.601025659974149952e-03,-9.838433532419119804e-03,-7.315655475714856297e-03,-3.797480099434764889e-03,-5.777125309126025267e-03,6.928567852222303203e-03,1.398235345064774253e-02,1.121628637564514586e-02,2.353181165814238404e-03,2.240397454678676094e-03,2.005892768529415025e-03,2.207143809102094737e-03,2.638533805004689842e-03,7.937161807193194099e-03,-2.247981636501820459e-03,-3.224686736337359621e-03,-9.592705959403208769e-04,-1.820098511930822848e-03,-2.080834600595203501e-03,-8.385503839147124931e-04,3.368437261612507324e-05,2.368013845200565761e-03,-2.868793772057808542e-04,-9.133089910639353905e-04,-2.663180027543513278e-03,3.518770864200007186e-03,-1.995419017858632987e-03,1.205483096524516060e-03,4.305815522102869673e-04,-6.861071285279663444e-03,-4.217346343555693731e-03,-4.642267469909164800e-03,-9.210245616670029861e-03,-1.701762457237033068e-03,2.579865300241807741e-03,1.476757320137524546e-04,-6.060900386183092590e-03,4.502779002874847218e-03,-1.720186253215827497e-03,-9.532668836633529166e-04,-2.041740055704979479e-03,4.091258807599618651e-03,-2.116958331374795173e-03,1.498119522114999902e-03,-1.974999846112841968e-03,5.581240283365972513e-03,3.803945284940846074e-03,6.098963441657715388e-03,-6.821986830055191617e-03,-2.496046551683627629e-03,-6.590417436134868344e-04,4.383463798371546594e-03,9.060188821215195724e-04,9.657028245291964701e-04,9.081170785449196522e-04,-5.279155392397118056e-03,-5.500007855392452516e-03,3.923865137246159221e-03,8.894787420710799017e-03,6.672371727632168134e-04,-6.774125306341412071e-03,-3.000183149895037371e-03,6.691704324527687417e-03,-5.435985751096293617e-04,4.113998962570007863e-03,-1.253969029384073910e-03,8.063653958269715802e-03,1.043468912342820085e-03,1.387566164005548741e-03,3.317875020094107879e-03,6.480565473912691839e-03,4.142068645802285398e-03,1.834870172330853528e-03,-1.820170169679870764e-04,-1.965411205396968772e-03,-3.157744221271711546e-03,-2.052870972079182815e-03,-4.042655233724198015e-03,6.302495624182007268e-03,5.949838198413854619e-03,-5.904637906721172010e-03,1.448654569611581384e-03,-7.528380245970622657e-03,-1.173603262804236852e-02,-3.658946144797477554e-03,-1.448298794702746958e-03,-5.995482382153693134e-03,2.670179266605224940e-04,-3.860181045319662808e-03,2.857386416288293287e-03,-2.471595438431228430e-03,-2.328936148325520469e-03,5.726821790067198291e-03,2.286133470668929809e-04,-1.062187693650078592e-03,-2.920243887780739865e-03,1.660894891507088203e-03,5.231573960955229231e-03,3.669206568888369515e-03,5.626745892017154423e-03,-1.787348805994125626e-04,2.380101627512908307e-03,-3.845191363785244090e-03,1.024030857505045695e-02,-2.146106011905866973e-03,1.553757338881853797e-03,-6.983313651268391970e-04,4.420608473318112484e-03,2.317978475808782184e-03,-6.397439539316731455e-03,-1.133050962050876059e-03,4.897780407125652651e-03,2.919070297666592935e-03,-1.160291955169498760e-03,-4.315156596982915038e-03,8.683618583192550828e-04,-2.601636567654385789e-04,-7.366967405789548971e-03,1.929292298517907536e-04,-4.552327128844025727e-04,3.601334328891636020e-03,-9.552874874349865557e-04,-3.221677654592140956e-03,3.310671730341986163e-04,7.558507028835075719e-03,-1.030558297367715950e-03,-3.378214557565033141e-04,-6.433132473318054281e-03,-2.710577657145640459e-03,-1.286838285864821615e-03,9.254228653482824904e-03,-4.495663253962009845e-03,-5.344191160650066906e-03,1.266470211680911307e-03,-3.052470896562443914e-03,4.905273747883099125e-03 6.049073329338325138e-03,-9.588952380880075288e-03,5.072752942401375503e-03,8.486024647195280005e-04,4.808972905845919875e-03,9.089065424925171777e-03,5.589269014422415527e-03,9.639367498012313060e-03,7.295455910561977550e-04,-6.850409554453524338e-04,-2.047921088861540811e-03,-5.474030461357237308e-04,-7.033355667956553599e-03,-2.738419461790242079e-03,1.085462632043397799e-03,-9.690808118266522201e-04,1.062424460466543201e-02,-1.583049625740638397e-04,-1.500836753338540821e-03,3.138056933318263673e-04,-3.149006231757368705e-03,-2.621437634659339738e-03,6.188168825881507247e-03,5.235144376858465176e-03,4.632578536322099622e-03,-2.274891211372263248e-03,1.035810908521347853e-03,-7.996537973611614683e-03,-2.178799477259775897e-03,-2.963767383518597486e-03,-4.839512540573830297e-03,-1.123214637167298724e-03,2.286247169359344189e-03,1.462640033799411643e-04,6.347897388800287019e-03,5.987918369659200879e-03,5.619445521678626335e-03,4.192302293697279351e-03,-5.354628239319992808e-03,-2.482444229625525075e-03,1.986720701972838777e-03,6.344403480888325016e-04,-6.938571302736856999e-04,-6.752918345558708499e-03,2.031399674759948575e-03,2.333815713202035558e-03,-2.775758849361982788e-03,-3.894504693944888987e-03,5.867078632109179633e-03,-8.682239317856730101e-03,-3.403428017028703516e-05,-2.163378820550683045e-03,8.441950296457272291e-04,-2.953411097119142126e-03,-1.034090085028760431e-02,-7.002208219211852012e-03,3.511926874526464116e-03,-1.561852994161491703e-03,1.981099004588966414e-03,-6.001364443142660939e-03,-3.700328236153047762e-03,5.305543830224431975e-03,-5.087354717464341071e-03,-6.310215754587267765e-03,-9.976519359018488648e-04,-3.203136590174794542e-03,1.144221201087519636e-03,-2.872952336445928206e-03,-1.695391132431680074e-03,1.579098590316016905e-03,-4.713708294892873622e-03,-1.106925467395613975e-02,-5.317208588352308196e-03,2.379165794612493731e-04,3.140783906620191537e-03,2.947923524850455494e-03,7.706239141604740231e-03,-5.464547019952905801e-03,-4.730548591241620117e-03,4.317573625828880803e-03,6.114516922654149658e-03,1.330143603580991605e-03,1.293483591912852546e-02,5.350147306738357346e-03,1.328378440849050475e-03,8.067675491484354797e-03,3.817587137516108634e-03,4.123141186774171088e-03,-1.584194579030348332e-03,3.743691973621922180e-03,-3.805017842086839876e-03,-8.628771546722296265e-03,7.894590001011529343e-03,-7.447413208094440067e-03,-3.977601732346592429e-04,-3.890133403009132188e-03,9.567354765874003025e-04,2.389861337875724863e-03,1.017945207086260575e-06,-1.214936635953562391e-03,2.348854894314331837e-03,2.607760517632928907e-03,1.577166036662380234e-03,2.505215911366113959e-03,1.303545609383941015e-03,-4.427558581660097532e-03,-4.007373092163599507e-03,-3.860759124441638050e-03,-2.950225083174851017e-03,-3.400318449035940205e-03,5.830229332466197942e-03,3.599890842758561842e-03,2.615550223335421243e-03,1.664765780328917926e-03,-7.960250888475661410e-04,-8.119006163691438235e-04,-2.400936988222136716e-03,6.425958691593322385e-03,1.427341161194426670e-03,-6.794161159099756757e-03,-9.781122002306932156e-03,5.452957535241940643e-03,-5.293270957171129559e-04,-8.391937716162834252e-03,-2.086365657276949195e-03,-9.793147461570061996e-03,4.073367009809918983e-03,-5.446148478266520619e-03,-9.597348786653385781e-04,6.132106599038692797e-03,-4.259095838545150052e-03,-2.938079679483013816e-03,2.468350324709248436e-03,6.409688535768044704e-03,8.167673464296384619e-03,-2.893797273309871732e-03,1.819280134529782439e-04,1.189104996701630373e-03,-2.841494629549014012e-03,2.045863617101715409e-03,-1.237387242526318161e-02,5.547186125002498300e-05,-4.071937330827733563e-03,-6.165476316297311072e-04,-3.484172634131460673e-03,-7.688492085292403244e-03,-6.996367897288205406e-04,-3.008327776067863495e-03,-6.061651850053050017e-03,-1.321252619785418490e-02,-9.708441214656903309e-03,-7.465148052245463173e-04,6.839794167759558584e-03,2.621645198181199021e-03,1.073725991149363521e-03,-8.876204685672428292e-04,-5.152075061432968839e-03,-4.700312137543839058e-03,2.836545495703650215e-03,-6.161781741974069714e-03,-1.889920515507282923e-03,3.400794431243182609e-03,2.634893472950583775e-03,-1.157298862787716452e-03,5.298867085104176818e-03,2.703384856159308895e-03,-3.364004644628455082e-03,6.842603368806720243e-04,5.154247045321771331e-03,-2.317974264437743827e-03,-5.520925038451575569e-03,1.647885167965086259e-03,-6.546411127476730370e-04,4.511978383260932361e-03,7.811343773881750455e-03,6.283848428160501709e-03,8.396474852750733867e-03,1.425695455200945124e-03,4.927708828575797645e-03,-3.488361548027531060e-03,1.049711189533443213e-04,-3.603367615327556384e-03,-3.544178308581608875e-03,3.967330639614229451e-03,-2.101277741516994427e-03,-3.042602779361265348e-03,1.262082500070271784e-02,2.351679041291963087e-03,-4.090437468810156213e-03,2.200663466221601094e-03,2.624488287384229169e-03,2.419579456472394866e-03,-2.019548394736190211e-04,-1.421364646273882114e-03,-1.267229837181357843e-02,1.946875228473449399e-03,2.398985693494116642e-04,9.122034793409792078e-03,-9.065687781429873290e-03,4.265756214084768898e-03,-9.458005407606721177e-03,1.098933973528098485e-03,7.981085556940017284e-04,5.552233550422332230e-05,-9.919095810037310400e-04,3.767935056625738204e-04,-4.052427633338891687e-03,4.138988564890643505e-04,-6.765023518411545679e-03,4.397262742878271791e-03,6.285192491744195920e-03,1.535053153103672951e-03,4.603531710617339394e-03,-8.856784458960332770e-03,7.077986263861866688e-05,6.458969406113618597e-03,-6.808004269454262384e-04,-1.227969000398320895e-03,1.581419472338972980e-03,-7.092371493177620362e-03,9.158486779239822378e-03,-1.606761961277095095e-04,-2.861093355940915947e-03,-6.866072231542629661e-03,-1.781342870012040053e-03,1.524768063506333569e-03,-1.525309790111652999e-04,1.494069777961232323e-03,-1.051863628115068394e-02,2.475144390372634694e-03,-7.498011965829891714e-04,1.690512887265529195e-03,-5.366981761357377152e-03,-1.354280652745866280e-02,-1.036758932788998944e-02,4.172482894012959076e-03,-7.352292638285922236e-04,-1.401099482921161011e-03,4.925070768893711863e-03,4.514186218405375275e-03,6.026521050773277037e-03,4.271487578274108313e-03,3.080383384541600291e-03,2.426085804560715927e-03,1.534910903932868451e-03,1.881956549079390195e-04,5.927423663674623311e-03,-4.465126949536890773e-03,5.276712918558058851e-03,-1.922650072558761006e-03,7.736848293096470867e-03,2.137657306076606607e-03,1.147128574300606283e-03,-3.207346796885775057e-03,1.068075222363154206e-02,2.684881872294016172e-03,-8.719547235386938908e-04,-1.652260272622976779e-04,9.811307906405003151e-03,2.398335567633920907e-03,4.227180585610654890e-04,1.074578898606230737e-03,1.322272623887691062e-03,4.591545614677309747e-03,-9.707866555971771116e-03,-3.161066910826168964e-03,1.876209908655292061e-03,-2.001071519125991426e-03,3.470732834851751342e-03,4.557427031898534341e-03,-1.064509816686784113e-03,-6.337074925355764936e-03,-1.160006799564125188e-04,4.240058189019958214e-03,-7.280044786519965748e-03,7.361569479200995552e-05,-5.646109797104452606e-03,-3.939506909387421006e-03,-4.431289484428568154e-03,-9.055660577043968262e-04,-9.067970094289584670e-03,1.766804248666392673e-03,5.731576590900473896e-03,-4.703976962127436219e-03,2.987374781976462855e-04,-4.910201123417918569e-03,-1.079700158077604236e-02,-6.143986746982308192e-03,-2.912241902345987302e-03,-4.096821903986550169e-03,-6.172792240852596410e-03,2.838768401736281154e-03,4.642102449481750812e-03,1.362517404593650665e-02,-3.735133765288238452e-03,-4.206548950051365761e-03,3.427939906348471174e-03,5.924747573260169542e-04,1.628871835903090287e-03,8.035531389975618041e-03,1.418011113673777477e-03,1.064352202275926224e-03,-1.444588602189504981e-03,8.521186302481064540e-03,5.482000548353542155e-03,5.471048840318594279e-03,3.144110343782533998e-03,-5.367278606357101314e-03,1.254886794950770771e-03,1.031042257359591173e-02,6.063554637629391640e-03,-9.390922286721789872e-03,7.413324706924259364e-04,7.715500715706213614e-04,-2.705731655347013483e-03,2.115257198930176313e-03,-3.631567193684146454e-03,1.595290269812120747e-04,2.325618911341195433e-03,-8.324936731135797532e-04,2.298396315473574640e-03,-5.834374811068131737e-03,4.257215029871420399e-03,-3.557179263323974328e-03,2.981395948310545290e-04,2.236828973272266766e-03,4.916434007048483233e-03,5.408141465070034556e-03,8.498102343783191584e-03,3.821576057223326477e-03,-2.930224659436781159e-03,-5.623965934964332385e-03,9.250005668880538143e-03,7.740812718463495855e-03,-5.543205694036430591e-03,3.016395501750203292e-03,-5.700883695172408692e-03,-2.756720263666168590e-03,6.454848587112381724e-04,-2.362067405453995331e-04,-2.756755461594157484e-03,6.098477185710414646e-03,-2.624531270603864229e-03,2.640812067301706503e-03,9.002634282186635847e-03,-5.951315443383474322e-05,-4.396993171048200859e-03,9.813783696051426100e-03,-1.060270049244722672e-02,1.933922800613020786e-03,8.015860499696473088e-03,-5.885086101414864664e-03,6.091395734143014926e-03,-8.758974536284398066e-03,3.450973798998204379e-03,-2.399610332052894666e-03,-1.825394498922420020e-03,-1.022369664300968803e-03,-6.041766228481196266e-03,4.401921339786337567e-03,-2.141723038518714584e-03,-2.897047408876572173e-04,2.716657139076724022e-03,-5.750907129588721245e-03,-6.127505198962561808e-03,5.415457459929260732e-03,4.364694491708106215e-03,-4.698775299377473853e-03,-6.822573736098533001e-03,9.579178885981826791e-03,9.282410365318597908e-04,5.636316396236863355e-03,4.245061705206980111e-03,-5.585220391051502059e-04,6.400465368603708467e-03,-1.228694872196398665e-03,3.134808776393918661e-04,4.716055568246039879e-03,-5.875881829932412190e-03,4.310121698261992589e-03,-6.896176223371554281e-03,5.179989861032895451e-03,5.565536529114586614e-03,-8.777980670651171864e-04,7.648441466924864839e-03,7.894323509281658477e-03,8.475443963531578348e-03,3.817654604380747458e-03,4.872244278443804005e-03,-5.550985679724805555e-03,5.093984700438695945e-03,4.658546142863744660e-03,1.921665727930988408e-03,-2.181991266114327067e-03,7.102422854549522886e-03,-5.073351212939527805e-04,2.071143158838609916e-03,-7.396473833286330325e-03,-7.432050429058109295e-03,-4.796835721976373812e-03 1.782891758075273768e-03,-6.338035731925040217e-03,-2.741315717055706723e-03,-2.776233807903090694e-03,-1.861239900591186254e-03,9.783969627270921402e-04,-2.157936664894282916e-03,3.033258649679237105e-03,-1.161831811370700289e-02,-8.031508641879575236e-03,8.135459342571693935e-04,4.225748167551337045e-03,1.088566557329829869e-03,1.269192718830058995e-03,-5.345605291025864016e-04,-3.338000928294753821e-03,2.400851646050948269e-03,5.618585427090487701e-03,3.315647166568326965e-03,-8.687653512337330372e-04,1.301789466551830346e-04,-2.232778815421619916e-03,-7.331431538618544963e-03,1.536131981597183569e-03,3.385723110876311375e-03,-5.009662819774296852e-03,1.980696271047904713e-03,8.027514813421784273e-03,5.935004275535274735e-04,2.418663250136073462e-03,-8.576835830146968195e-03,-4.001318360334668589e-03,-2.177663238963123849e-03,-1.880678267202020703e-03,4.983573895158628468e-03,-2.399501774270608196e-03,-6.400006762563528566e-03,3.326410257641124711e-04,-1.187985258033202787e-03,-9.285096691070280489e-03,3.908756077672758192e-03,1.836399007176009414e-03,-2.632775945082005669e-03,3.997636776452955475e-03,-4.729672001599586652e-03,8.031435967535884707e-03,4.122777509374882547e-04,-1.040066979659093512e-03,-4.210201567669416903e-03,3.811120212656267034e-03,-3.647441958345118071e-03,1.574731212039786341e-03,-2.503303799898560274e-03,1.397039918997504491e-03,-9.745921840709286313e-03,-5.088131522867770425e-05,4.235473266562480438e-03,2.324138664231308743e-03,7.664056046341857255e-03,-1.761499075858386550e-03,-1.563698432331025664e-03,3.640815320387731039e-04,6.505359169473531183e-03,1.275490513383383852e-02,4.039751204961150761e-03,6.767024784032877639e-03,-2.786464695889514630e-03,6.058683065011049966e-03,6.419405515086007256e-03,4.969664009858106024e-03,6.481987874403772297e-03,-5.754312869209806270e-04,8.407905238014242236e-03,2.928203819981780275e-03,-2.112481284446813580e-03,2.385142861845176640e-03,-1.057470699725760431e-02,-2.097832805132341435e-03,-1.329138654680897058e-03,-1.041964583603320962e-02,-4.688857082815895902e-03,3.399540375638635914e-04,-5.548876761880009931e-03,5.954883186427255152e-04,6.004001315263274383e-04,-3.208890368495100585e-03,-4.554373428529766808e-03,1.701304563315259727e-03,-7.715983129627004486e-03,5.367067740982740982e-03,-6.909406393675058579e-03,1.261485917737901205e-03,-4.483973229753212587e-03,-2.720343892778338737e-03,7.850012681357810412e-03,-4.466261674891537636e-03,-1.833268255028046951e-03,7.549670363263408582e-04,-8.122211495615426144e-03,7.151719458307264493e-03,-6.276597368160356447e-03,-4.351776619317251037e-03,4.954221519508404149e-03,-1.518886285391237443e-03,2.146689933594183967e-03,-6.704037989352057240e-03,4.694614073692794654e-03,3.405583608520509377e-03,-4.052864543923649816e-03,-2.106966894609036203e-03,-9.500525889045308842e-04,-3.173147962368057066e-03,8.188111921221865805e-03,-5.123645360846299478e-03,-1.332661054147293812e-03,2.969317918275576201e-03,3.156901631691064192e-03,3.923436511698051750e-03,2.660022266957082476e-03,-4.216014888728243475e-03,-1.128299198245516716e-03,-3.701634872650230376e-03,-1.357989198790662301e-02,2.277930320183369026e-03,-1.228599658956069538e-03,-1.959557796483875607e-04,1.534744155113878334e-03,-5.058300536559452200e-04,4.491234736066490943e-03,5.082934530178160812e-03,-4.819510770126241364e-03,-1.634082604048774784e-03,-7.897893174054464865e-03,-1.591065224691875796e-03,2.399709836477722895e-03,6.236323243006022636e-04,-5.410426007389129537e-03,9.107581171320157723e-03,4.675783774745230839e-03,-4.587770011659546288e-03,-4.868419289113190551e-03,-1.731916101524705104e-03,-2.826716908099744657e-03,-2.570766815219908009e-03,-2.929879923200349751e-03,-1.930123885125548111e-03,-6.781906838952830689e-03,4.514725719379724007e-03,1.760936881843363794e-03,2.236118025744983882e-03,2.096787782922738489e-03,7.633572662278297234e-03,3.958393176958556687e-04,1.310389339594476366e-03,-2.894285311964023971e-03,1.477027940363238563e-03,-7.069687312528633027e-03,2.690750308545823005e-03,7.730376972659874259e-03,2.728443863366014632e-03,3.444124768689069659e-03,-2.168460578351358217e-03,9.662898011712310869e-04,2.737127442324874867e-03,6.259131180926759061e-03,3.967333459107097796e-03,-2.037133816201825744e-03,1.589646931230765357e-03,2.410236154508442127e-03,2.747959635254860339e-03,-8.815128580552791804e-03,1.612518811815912088e-03,8.330058740618985838e-03,2.900206940377972331e-03,2.665299466882081920e-03,-1.902435374948794299e-03,-5.642022288321378067e-03,-3.942089784204872230e-03,5.678840941257716837e-04,3.141218157554526592e-03,1.515951971573967303e-03,6.078973800327623286e-03,-1.019973257039707808e-02,-1.903527735549604208e-03,1.013827833748036020e-03,-4.606642621857829321e-03,-1.384802423088097787e-04,5.477140006045004908e-03,-4.120128377359302471e-03,-1.226879479748175595e-03,3.948944851053488338e-03,-1.007546885963276663e-02,2.956424780363203341e-03,4.472621483511489068e-03,-5.437877412790522065e-03,6.356072200235059373e-03,8.611794893779410598e-03,-1.171229223231851600e-04,3.667827773014456939e-03,-2.113445603990247936e-04,1.163154198700542792e-03,-2.003014015591983010e-03,4.191479465149183618e-03,3.262395959939530608e-04,-8.133116981862724068e-03,-9.326856950375630312e-03,8.241550702099614822e-03,-3.972864423869357578e-03,9.579490911495194827e-03,2.942815985296252674e-03,-4.341481341965652309e-03,6.649862302386279518e-03,7.209728898934411601e-03,4.188075397166320102e-03,-2.450914284771736791e-03,-2.626507096825370351e-03,8.613508874203998984e-06,-2.207590335097475102e-03,5.078499492388357725e-03,1.024633724119644596e-02,6.146532387649727112e-03,9.386360543399647821e-04,-6.440809368249976767e-04,-7.630194979854616581e-03,2.019962197777900974e-03,4.706046408570350482e-03,-1.439468313496146097e-03,-1.474809288365927810e-03,7.289378587175834867e-03,-7.638372441660674222e-04,3.457798941877517594e-03,1.234609595968410049e-03,6.290478998598960855e-03,4.798307728076278873e-03,-8.413945047291565560e-03,-9.531708664190944929e-04,-3.095577089683290272e-03,-1.933852537579132695e-03,6.344745505242075333e-03,-3.095357919200641354e-03,-6.661392031830640041e-03,3.568415727914505710e-03,-3.389006175663640714e-03,2.371317469444778579e-03,1.777952948434919755e-03,4.916207911823535642e-03,-3.202973971695285017e-03,-8.634324258131032795e-05,-5.297037915849469279e-03,6.682064237712950598e-03,9.554216869300196854e-03,-9.694720728420504890e-03,1.186986996055767844e-02,-4.501845305445869964e-03,-3.450659206920557963e-03,1.513988502252709820e-04,-4.528861623943922048e-03,2.896599942638779668e-03,-1.386778742524310062e-02,-7.670205627475171629e-05,-4.032741582261318430e-03,1.448113007105692775e-03,-6.671792325871363054e-03,2.388333165427515298e-03,-2.172342979780980590e-03,-5.577560605692906924e-03,-8.417897443989635995e-04,-1.099314663926347058e-04,1.040432621940392405e-02,-1.450863385960833975e-03,6.765095561180323180e-03,-3.088649932031388868e-03,3.770408970624264822e-03,4.715688697897848884e-03,5.270535034177964949e-03,1.479635683773049661e-03,-2.490658381888547455e-03,-4.518647698783485704e-03,5.617962466039124608e-03,-9.262473074404430550e-04,-6.381843916532669745e-04,5.358005410555473184e-03,2.171880053900806726e-03,-2.432039656825043996e-03,1.142591455389268922e-03,2.781997060247015305e-03,5.059856122659249673e-03,4.471570073680756173e-03,-3.333939431856533563e-04,4.946800604933941810e-03,5.037306462194282054e-03,-9.617368937468945625e-03,-1.159745499267235760e-02,2.749016334040780673e-03,-4.204893154635690883e-03,1.772574794238450466e-03,8.714290172800700368e-03,-5.418531622032786259e-03,2.739974443278136475e-03,1.999827404649770751e-03,-1.022889032092474254e-02,7.409110932916507068e-03,1.143329934134204237e-02,-3.182134234992892424e-03,-5.776122718042576640e-03,7.134015522142757477e-03,-3.540643445411031411e-03,2.955132909157601705e-03,-5.845186479858102517e-03,9.931933104693715930e-03,-4.413080795309017208e-03,3.668618311003963260e-03,6.941139364955740791e-03,-3.592808984652449706e-03,-9.328725961045354793e-04,5.291019996040566102e-03,8.058544088641234060e-03,5.926178531457305075e-03,4.728390457493313157e-04,-2.612495488654918447e-03,9.910017855289984367e-04,3.462647470472070575e-05,-7.581755347086874784e-03,1.886573131275811948e-03,2.153517701597477355e-03,-5.181576626283363948e-03,-6.488750174709123630e-03,-6.189958094913969658e-03,8.568384936880214103e-03,2.857885358944541737e-03,3.941123998106413372e-05,5.164591606117680846e-03,-7.908830072048390197e-03,-1.473052859933833645e-03,8.008280893684953730e-03,-3.341870424170707062e-03,-6.092319590117894340e-03,2.597048150510303677e-03,-2.891305197387705837e-03,5.401688160879983085e-05,-4.663432642514516433e-04,6.421835208024829870e-03,1.369529200168152425e-03,4.055177915427709417e-03,8.848144803031646882e-04,2.602934414356519906e-03,7.951375064962583464e-06,-5.121753882565150025e-03,-1.177907807438963736e-03,4.671082020961263119e-03,2.444615957326179076e-03,-4.222526383615349439e-03,-3.874408336699685686e-03,-6.071988055296506691e-04,-5.343071142301947588e-03,-3.947072468969652428e-03,1.619201390092305480e-03,-7.425646254454801136e-03,-1.854968421032103453e-05,3.555849773960574156e-04,-1.126958159812325652e-03,1.385416477986043098e-03,3.330921922004339555e-04,-1.389295224879456836e-02,-3.032780111801620590e-03,-1.242832552541416188e-03,-3.127058419203280430e-03,-1.266285120536492486e-03,-6.193350630591803845e-03,7.114863972017915726e-03,-1.017575711857170856e-02,5.428393826803839042e-03,7.382618249719220642e-03,1.214634610371929935e-02,-1.838861395565829452e-03,4.520767177555452300e-03,-1.370912747060152410e-03,9.929031176272621120e-03,-2.623262687906925336e-03,1.328188176005835018e-02,6.099238826251003262e-03,-1.459378481191719694e-02,-3.492100561029362876e-03,3.456497146828219069e-03,5.044631102782103083e-03,5.546485143065109326e-04,7.223950071626797625e-04,-1.766762316540154394e-03,7.820699560276814276e-04,-4.752185433620996664e-03,4.808346434942494148e-03,-3.426391492716138708e-03,5.171242452739508583e-03,-1.063235034724006531e-02,-5.772764361093040558e-03,-9.917303834769053070e-04,3.082194314335380547e-03,-4.936158821175223692e-03,5.275415600968017850e-03,-7.844309835254590149e-03 4.343554744058412444e-03,-1.499598732788713596e-03,2.901362257875980655e-04,-1.128278268840623294e-03,1.724104918843145679e-04,6.171691164102671538e-03,-3.244630849078669870e-03,7.340338380731729201e-03,-9.407609549986366282e-03,5.688543547055525777e-03,-9.133670808938594912e-03,2.001631024770772916e-03,-1.173328684604465523e-03,2.377963465618765631e-03,2.940939245087698019e-03,4.545111583426127838e-03,3.297877713858235384e-03,1.135121430457519892e-03,2.013267114882802498e-03,-4.136664999090830866e-03,7.523997808044515347e-04,-1.260889943903704393e-03,-1.744838512728289805e-03,-1.880211083830246903e-03,-4.554436170496895496e-03,2.301566365799058694e-03,-6.520242782051528449e-03,-2.441657416618225884e-03,1.125273117401506991e-02,-4.068379987918332649e-03,4.537505681221696077e-03,-1.065667288660391123e-03,5.253525111139216962e-03,6.267561934051816685e-05,4.091751341498358480e-03,-4.749678084431786882e-04,-2.357785847186067655e-03,-4.301063197546099205e-03,-1.845591145481354986e-03,-4.360476400097981070e-03,-7.012130550054821235e-03,1.580089691303123919e-03,1.641328551542393628e-03,1.990574629126522308e-03,-1.911482973637444478e-03,1.007783699785604753e-02,-1.099448524322466786e-02,5.174440390580316122e-03,4.799243661566626046e-03,-2.593927282778837450e-03,2.829037151037455938e-03,-8.400592082554597401e-04,1.475417683887705552e-04,-7.600741290114934514e-04,-9.705336141524938207e-04,3.880187131785040565e-03,-4.310682761390917904e-03,4.518926561676345557e-03,2.436304891039606495e-03,-1.489092116386376034e-03,4.774301334056429980e-03,-7.113591814444639172e-03,6.534984039866211653e-03,3.972997116924533237e-03,-8.795871821639364430e-03,-2.876265623535666054e-03,3.438996611577340139e-03,6.330570362938524494e-03,4.175409323678437937e-03,3.285042106199850733e-03,9.742506065172082744e-03,1.633266847317130926e-02,-1.119224506340069439e-02,-2.769466393529023112e-03,9.536989239302348393e-04,-4.866289934844194448e-03,5.407670637999208857e-03,1.899421824098866392e-05,6.740388012620846143e-03,-6.758790221827788540e-03,-1.437346941591434750e-03,3.998294667575777103e-03,-6.787366708971432315e-03,2.796997522741952440e-03,-2.097675184885529863e-03,-9.974566149016398861e-03,-1.574308262581832320e-03,3.436022334853703307e-03,-6.161300943454904241e-03,-8.360595977552428074e-03,2.828826695873452174e-03,4.575848351560162040e-04,-5.817333245302892201e-04,4.594169285694985408e-03,-1.352785651474461412e-03,-8.692803075950931990e-03,2.303730117955144711e-03,-8.392406227091509484e-04,8.190212234656255030e-03,8.467279378368348236e-03,5.183735494496431511e-04,3.844472703021479826e-03,1.547169067980763962e-03,-7.226157502791916154e-03,-1.049151373584494623e-03,-3.522897876717286645e-04,-5.262986343886950046e-03,9.220012112754520867e-04,1.054269556670583367e-03,1.505128774252574271e-03,-1.027049476859358962e-02,2.712330994307882766e-03,5.264275689928175038e-03,6.422023638954848894e-03,5.281474853539832107e-04,-6.288246221181125438e-03,5.342992266781655328e-03,-4.888531119563921910e-03,-1.817899964016474494e-03,1.052434576788162726e-02,3.600173150397110045e-03,1.392371825531455365e-03,1.749561104346023096e-03,-5.057066938983854898e-03,2.931935035828095904e-04,5.020831916843208030e-03,1.136670774027453241e-03,6.753865908014696374e-04,2.358320582578623918e-03,8.298957454556787211e-03,5.922694344713069770e-03,1.350510106213920767e-03,4.690222126336174129e-03,1.691767938110365051e-03,-1.102448005220350684e-02,1.112092619224686789e-03,-4.687841594923475817e-03,-4.613898462602702630e-03,9.204309331307465765e-04,7.358699875199552383e-03,-1.667987876884328936e-03,-1.869458498549857487e-03,4.475988029253778300e-04,-6.967497093497519202e-03,-4.678352078095381929e-03,-3.104379474458280382e-03,1.990615018792040239e-03,2.141991785470036876e-03,4.575288649436384282e-03,-4.039638970085119782e-04,3.755902804402595839e-04,-4.157998622569673321e-03,-7.161667116904083669e-03,-2.976870415729672426e-03,-5.295730190704847108e-03,-4.681417612396519129e-03,2.738313734045073455e-03,-3.219704365264445308e-04,5.672199451922456219e-03,-3.163528648683627299e-03,-5.431473263187019815e-05,4.416116971199628952e-04,3.024838886266332204e-03,-1.148784953714165891e-02,7.801798247298040749e-04,-3.166278170179040365e-03,1.059888103195818533e-02,1.786780058699834889e-03,3.821310004009380139e-03,-5.646275353043387138e-03,2.394666329787526436e-03,-5.172514317782587155e-03,7.597923471771896205e-03,2.944325632673702297e-03,4.542057559123801463e-03,-1.214197374247905949e-02,-1.591649022593463010e-03,-7.122924119119753220e-03,-8.297619640595541241e-04,-6.457764930405236964e-04,1.840685198945184002e-03,-2.755644743777737552e-03,4.695527649157546848e-03,3.480147499239017155e-03,-1.540034738290318008e-03,-5.079774068306563105e-03,-1.456972005913111271e-05,-4.267597358352905653e-04,-4.505519542688818727e-03,-6.210101228912820112e-03,-5.051715343611602738e-03,-5.709769634630040966e-04,-6.406396908671134396e-04,2.755364870817341143e-03,1.214337810235360804e-03,-2.380874533899907629e-03,2.520194859810212084e-03,-8.898199155809900437e-03,-2.785525168184954745e-03,4.715602488376069781e-03,-3.434843460678813163e-03,7.433440569317249086e-03,-8.689883555843077104e-03,2.374314906943795003e-03,-3.660988892134331690e-03,6.120469525357222912e-03,3.652346801584211008e-04,-3.496759267599383516e-03,1.991412236533235836e-03,-3.358186380563542035e-03,-1.042446911811161555e-03,-7.362819109998627876e-03,-3.002990449999483509e-03,9.039446737817901853e-03,5.574363316333982160e-03,-1.648734629904773314e-03,6.644234321710538148e-03,3.205115619899583622e-03,4.087681955634403334e-03,3.720070709747344464e-03,-1.011167153472491281e-02,6.173022658315211603e-03,-1.749778968433784998e-03,9.382863112654117555e-04,6.801972210727259306e-03,6.856648881505017111e-04,-3.224061510922743203e-03,2.265004187362769112e-03,-4.260075743074134662e-03,4.285882341195810727e-03,-1.810332981405349782e-03,-6.025123133503573064e-03,-6.097985098720760076e-03,-9.836405493085498969e-03,5.191390681096716073e-03,2.069387120405379032e-03,-7.525888434588238335e-03,-3.464592287665007762e-04,-1.424230546152348677e-03,-1.436468574675732121e-03,-7.129416343036604729e-03,2.613630637079784793e-03,1.873116611183760910e-04,-3.755507162448331639e-03,1.026394139043231490e-03,1.386355814542355146e-03,3.315371994362381180e-03,-3.794526129763069102e-03,3.700212933092658727e-03,-4.813806782312152026e-03,-7.177394378419234451e-03,-5.596197969479107907e-03,-8.982894729292308783e-03,4.628414062746127704e-03,-1.504885599278210466e-03,-1.549028029758614880e-03,6.316081834807969579e-03,6.459159293949308595e-04,-6.533106104052881224e-03,-2.762835173091407904e-03,4.500188310326121155e-03,3.009639649338119883e-03,-3.016391939613052506e-03,2.108080819456484029e-03,-6.472427005693296743e-03,4.022689087881106337e-03,1.982632018766373669e-03,2.294948225128923963e-03,-3.676004867980832251e-03,8.530349867789508225e-03,7.265194123110428094e-03,3.034795899254935452e-04,-2.898154097685815499e-04,-4.852816235898107719e-03,9.962698588820073295e-03,2.937441729464565412e-03,1.651535633587723284e-03,3.322129976866435453e-03,-6.593765901919358338e-03,4.896746260270299668e-03,-8.378053863575188678e-03,-3.590966492150459398e-03,1.673787017655179767e-03,4.119804415166303593e-03,8.725356021188891654e-04,1.796286032631034613e-03,4.096745739378295295e-03,-7.032382177975491332e-03,7.682185221181675634e-03,3.993335235653093160e-03,5.921404379273809962e-04,-4.359993817093231992e-03,-6.190828903763674358e-04,-2.403530748037962378e-03,-5.225643769171926974e-03,-8.643481174993881749e-04,3.182629699179704771e-03,-3.494933593133260803e-03,4.097972188662975940e-03,6.016074870780053720e-05,-8.237662066607037681e-05,2.752541113172587029e-03,8.108759383215787334e-03,1.081569290099712068e-02,7.327562990081816149e-03,1.906869386851802253e-02,5.123821555550027614e-03,-1.510170151112650953e-03,1.057350246771373388e-03,2.869265543104241017e-03,1.792541696189685471e-03,2.832891249558182190e-05,4.678396043804280979e-03,3.042838789446202820e-03,4.021655891750876982e-03,1.158833887422583940e-03,-2.447160686119265443e-03,-4.868307451464608422e-03,1.013595447964661146e-02,6.158041581657758799e-03,-4.088269215673344045e-03,5.112361886549437323e-03,5.228154645435587403e-03,-3.327668470863191525e-03,4.729360210789234644e-04,-5.457168763908381952e-03,4.214736316789650244e-03,-1.628286697737075855e-03,-4.366736291250535376e-03,1.150020917985408687e-03,-3.484497399166898782e-03,-3.917774498849777194e-03,7.523581592750608685e-03,1.346097043551311087e-02,6.762191771644049301e-04,-1.166506737455640097e-02,2.378786647084642734e-03,4.422550611869777853e-03,7.254035856670333453e-03,-3.571766613560274075e-04,3.833876409259595022e-04,-9.407284600056478596e-03,-4.018318854277024878e-03,-3.849249547387683691e-03,-5.326496260100554818e-03,1.013688628096584207e-02,-3.910513538201502472e-03,-1.200058175225955474e-02,-1.001598164281058620e-03,4.563188139235748505e-04,-1.478556924427914003e-03,4.440265592095832096e-03,-4.962905554836734810e-03,8.180335123123367788e-03,-7.443372951258750989e-04,-5.829339808612128866e-03,2.830080415127355243e-03,-1.101695439290423023e-03,-3.643461831821266563e-03,-5.325551304715120740e-03,-4.863981602853979271e-03,1.148307723190875344e-03,1.430942064243438195e-03,2.166570886081892362e-04,-3.263729024995696421e-03,-4.822606631897443340e-03,-1.537293684513170444e-03,2.956681356510921978e-03,4.572678813356170768e-03,-3.640569677223787107e-04,7.204612740213901453e-03,2.091872773587609598e-03,1.505073386708268056e-03,-7.291629771738003356e-03,-5.157089607985543538e-04,-8.057143094540560394e-03,-5.383206831298384919e-03,-4.089990638915122143e-03,1.049809163458803422e-03,1.015408833383709027e-02,8.397253554586514871e-04,-2.469045649490338688e-03,2.396576448323633514e-03,1.942731559756551228e-04,6.831240701448034998e-04,-4.882360659344994053e-03,-2.694373494194541533e-03,-6.705772968182074911e-03,-4.975053128501313870e-03,2.807023641342034671e-03,7.223283897976620627e-03,-3.659630904607393855e-03,-1.509753906556823063e-04,-1.619253558090359701e-03,2.055371127884139107e-04,-1.192158837259212718e-03,1.316614527667240870e-03,-1.877024937973641172e-02,4.287315691944403560e-03,-9.581837788908543693e-03 -1.135208136643374617e-02,6.441720094814270756e-03,-1.050621963388116958e-03,1.187028185572158029e-03,2.207225953190207425e-05,-2.880529044222365082e-03,5.888248032722954994e-03,1.050228373094112475e-02,6.830455629018550821e-05,-9.730451722626131013e-04,-4.652041318912012238e-03,-3.115673027337411641e-03,4.626883278879720569e-03,-1.503099773716958106e-04,-6.858347289838374536e-03,-6.721808514848673366e-03,3.365546382665975805e-03,5.684938768903338192e-03,-3.159859967619076549e-03,-5.389854560551795865e-04,-3.742589771621771411e-03,5.951394999470781787e-03,2.538563761271380326e-03,2.791555593631286306e-03,9.777185991756080774e-03,-4.622241363133453886e-03,1.825910359800800298e-03,2.100214868601886530e-04,-1.056698570607108667e-03,-4.425654762673235550e-03,3.970959524411719378e-03,-4.434312324127594705e-03,-1.102989086323757917e-02,5.102338797665484545e-03,-4.693171598932684152e-03,1.400671434631749462e-03,1.007755143006643442e-02,5.278973012603469656e-03,1.438451238810192420e-03,6.630889657679502492e-04,-2.573446271350215368e-03,2.003543032751293855e-03,2.505864479569177780e-03,-3.453782962951500082e-03,-6.190827474843341172e-03,3.541589668217990858e-03,2.675333461181493826e-03,-5.148487916075965287e-03,2.270593985206316705e-03,-1.216983329532627788e-04,2.597751586820930855e-04,-1.941675990627650785e-03,-1.961097522975427474e-03,5.671553088503708887e-03,5.497312942010058358e-03,2.042679259992435069e-03,-1.311879476832745767e-03,5.441343896937213265e-03,-3.206638822794962741e-03,4.235258833301271293e-03,-2.823043770509489018e-03,1.340752665376123733e-03,-7.022733952894919841e-03,-1.499002385718458297e-04,3.206174506082134102e-03,-2.970955985519033952e-03,1.415078077545359843e-03,9.731671712116074340e-04,5.720346944842772882e-03,-3.669335334464135122e-04,-1.743465013211239860e-03,-2.666570905100826804e-03,1.593375669763648578e-03,3.812197429705092497e-04,4.860874356370423026e-03,-2.932276503102517699e-03,-2.201878778992618290e-04,6.486704179446784943e-04,4.724312196979366454e-03,-2.974967830874432643e-04,7.974800707822840909e-03,2.054481296503270216e-03,9.074198019388248285e-04,2.018346013288314748e-03,-4.243236754142165439e-03,2.508239349741444787e-03,-1.597282565058545092e-03,-1.015106724500003171e-03,4.584240386963144530e-03,3.466368726045484638e-03,-1.623752121231657750e-03,-4.550968851901389563e-03,-4.929632448840390833e-03,5.089599913077237506e-04,-4.530076630598786798e-03,1.106674727423329063e-02,5.548884232301759881e-03,-6.898449124727351785e-03,-8.167174583914544456e-03,-3.957006765928036164e-03,6.305089399596251754e-03,-2.448224890976529681e-03,3.007128253579351065e-03,-3.380420176870295185e-03,1.358456556143418934e-03,8.135055445751578204e-04,2.131324537407810506e-03,6.459456150653795567e-03,6.921937446526518420e-03,-2.451970584452841957e-03,-2.754308688612762315e-03,4.720724817403276069e-03,-1.142453809145111679e-03,3.570502296702230630e-03,9.955991490718670532e-04,-6.318337723520235302e-03,4.058949563509252345e-03,-7.804606974289381334e-03,3.531801778433160156e-03,-2.971454685193186587e-03,4.203542261520242933e-05,1.437005373911297854e-03,1.023886985220694677e-02,1.501103726387531989e-04,6.617654820498055507e-03,1.316667648147336030e-03,-6.020837933852767054e-03,4.533403923115556303e-03,-4.490926782330958815e-03,4.226397879759175892e-03,-1.927534502927539953e-03,-6.941406072935667713e-03,-8.197783240682234712e-03,-4.002132752003562111e-03,6.878208210410113183e-03,5.919405287862114333e-03,-1.260805818361631150e-02,6.407868690597935332e-03,1.420421715321419339e-03,-4.615240728445571712e-03,6.678069805237558075e-03,2.160034951851938327e-04,-3.399170205614514099e-03,8.978616400427588684e-03,-5.193172890758632892e-03,2.219860531974203986e-03,5.716576479723532722e-03,-9.767754939581146847e-04,2.664030316677130656e-03,-4.634107546089084699e-03,-1.580424477326057130e-03,1.513966856054108296e-03,-4.128533165142423376e-03,1.294464979654263471e-03,-1.129297862562118719e-02,1.353473025590570464e-03,-6.001470306137460936e-03,-4.846671336998122429e-03,3.541299188699339063e-03,-6.485470455747566546e-03,7.126209782969309499e-04,-6.493449581267158634e-03,2.601123311438312095e-03,-3.872474517681049819e-03,-2.252047914871192335e-03,-3.673899583527694915e-03,-5.128776743733782714e-03,-1.270342624065488456e-04,-3.043433541546507626e-03,-5.486023813268296918e-03,1.260518748086386209e-03,7.929033208670524318e-05,1.473805335063023674e-03,-1.456886161797939531e-03,3.865658349514126089e-03,3.151600834526408108e-03,1.542334376110108575e-03,-1.886396917177691904e-03,-2.460768862520521915e-03,-8.608065821973042396e-04,-3.201773598995291074e-03,2.187367341913040372e-03,3.239113271648279128e-04,-6.924421913406040956e-04,3.188023035982671168e-03,2.126018199382282304e-03,9.452599792520884319e-04,-4.306888796636228374e-03,-2.667476260522922901e-03,6.599021643811734880e-03,4.271701823826259098e-06,5.378317331921046718e-03,4.707333467259192453e-03,-3.673993931687774203e-03,-5.022338107533495452e-03,-1.476561224875224060e-03,2.859450948756776193e-03,6.682561744027636433e-03,2.120718133607108619e-03,-9.144381423146856133e-03,-4.944129501091305834e-03,-5.538287347466102142e-03,4.741901449602225864e-03,4.307277424694740293e-03,1.847125155154783144e-03,-4.133308718534932542e-03,-1.663991003068936245e-03,-2.238422156988712722e-03,-3.119749975676963053e-03,7.466103542625303173e-04,2.902597659789639840e-03,8.104040444822312519e-03,-6.442958608733453767e-03,1.231029896591214791e-02,-2.066348082450916290e-03,4.495701316235767991e-03,-1.321998610940806635e-03,1.771221544516171642e-03,-3.140784269067841555e-03,-5.898911743695332485e-03,-5.648876873037037352e-03,6.890498543086463496e-04,-7.959251214487848142e-03,1.328051599509617962e-03,-2.438246985480387215e-03,4.544147380435669195e-03,6.691765233621188927e-04,9.382844607963413547e-04,-2.638523828727340481e-03,1.209318404375564308e-03,3.441898947760021598e-03,-5.768821304045653603e-03,8.247072903400322688e-03,5.920993204973475955e-03,-2.867658116928573520e-03,6.270012110028850769e-03,-3.138602465676071374e-03,1.163451041915361615e-03,-4.583225766717679596e-03,1.160386947528200229e-03,-2.583005739585578740e-03,4.429631823116485408e-03,-4.038545619688936605e-03,-7.146582257555230029e-03,-8.442390287043620781e-03,-9.377711104551261691e-04,-8.455980453157095492e-04,-9.761638964247911117e-05,-4.217969652740159898e-03,-5.260622676886900217e-03,-2.599872835080395926e-03,2.064587467956074170e-03,1.001863027493498336e-03,4.426089041985121347e-03,-7.803538872837150267e-03,1.908373660876747091e-03,3.678878197441379661e-03,-1.527661332416028767e-03,2.612127112214341230e-03,9.938263305434586609e-04,1.709002822044723788e-03,-1.279934021916340854e-03,-7.788976362625131555e-05,6.886696892987122275e-04,-5.853763103924099036e-03,5.373873482055455163e-04,1.118938563648501448e-03,1.103305801419894160e-03,-6.216221449761828114e-04,-4.047979219442545892e-03,4.630835276027181827e-03,1.156270723390828399e-03,-9.272377352216575400e-03,-2.876890462449717845e-03,-9.241239406516537497e-03,-2.025110746850408445e-03,-1.597954546655595094e-03,-5.521878355762396134e-03,1.719697434453786597e-03,5.777139509243760587e-03,8.022351235088449189e-03,1.412100512623308936e-03,2.012481499891450275e-03,-7.289357662730193204e-04,-3.478173894411260535e-03,-1.942285768307819033e-03,3.687521695756959018e-03,-2.675037104153201589e-03,1.756079219997614110e-03,-9.107765031103529779e-03,4.602643821505161652e-03,-8.687498988863001995e-03,-7.943859171708658740e-03,1.757173520212701099e-04,1.688234512118946904e-03,-3.010116754994450819e-03,1.013398696867913625e-02,-1.114310512857658898e-02,-5.571611036167362259e-03,-1.186029087331937158e-04,1.026519247282230108e-03,3.167463813370764540e-03,-5.363928228943813138e-03,-1.301452586002345771e-02,-3.372662837194901307e-03,3.353525129151743874e-04,3.299271169453281156e-03,2.370126262252266955e-03,-2.058621544757941509e-03,-7.046452654131669761e-03,-3.028535603689205455e-04,-2.198755269769154651e-03,-4.631100184704864224e-03,7.929782799270318136e-03,1.608219173386577806e-03,-9.926301665465174506e-03,8.135443643774466910e-03,4.198428547387155277e-03,-8.675358794513582730e-03,6.618657227819025810e-03,-3.620065520260349668e-04,2.135143982462727862e-03,6.845984562128591341e-03,1.370552998315322261e-03,-5.093234994210290786e-03,5.020538594833649955e-03,-1.357029233107131302e-03,-1.607764955233780338e-03,-3.304564758283118873e-03,-3.100133066985126355e-03,5.456937508994158699e-03,-2.528497007412701305e-03,-5.866138084558369763e-03,5.725929018934157672e-04,3.021429497328182064e-03,-8.686374664798881054e-03,5.356680824393459696e-03,3.147857728202007807e-03,3.573047685008957745e-03,-1.812259824369411151e-03,2.512669949490750707e-03,2.607055287141168097e-03,-4.201662262741702632e-03,2.501206923685838062e-03,-4.506691172732939232e-03,-5.595400647536147472e-03,1.260830183413923569e-02,9.398325845306511328e-03,-5.993945585030109298e-04,-2.630874276766095778e-03,4.806784102586378338e-03,5.258058969449256248e-04,-1.285145556427192640e-03,-1.261313089153946396e-03,4.111426639869196313e-03,1.326285488928846809e-03,-9.677369862316720318e-04,3.181056745319325169e-03,8.219508696420334964e-03,3.834727953257582259e-03,-4.141492142431966485e-03,3.314659409509123841e-03,8.111733604779266041e-03,4.560212556327890993e-03,-5.946665495416229158e-03,8.538377686274271992e-04,1.218669036695390565e-03,1.222346971546603607e-03,-3.753651838172629603e-03,2.287519740646861127e-03,-2.735166001410882942e-03,-1.154342018223720534e-03,1.416174657972456546e-03,2.917970853334773160e-04,-2.536996772781539952e-04,-5.516196967663914395e-03,-4.501430812631713389e-04,-2.021397321949336846e-03,1.521820892876322691e-03,-6.747921516796896550e-03,-1.796205493471512288e-03,-6.813309961005104519e-03,-1.078596821237741031e-03,-7.501958728496698977e-05,-3.544610429723353052e-04,5.308588223142685254e-03,-4.441489380812404901e-03,-2.198437307373341757e-03,6.978954322407819946e-03,-3.982415282077204259e-04,1.012864768240228416e-03,9.723551769560115268e-04,3.899362991297180593e-03,3.138269242750541411e-03,5.172627198227196764e-03,-7.265504955626491020e-03,-8.864710961920289553e-04,-4.796071663304575169e-03,7.225278626310030223e-04,-3.099834078122016331e-03 -1.208023590045637042e-03,-1.302137813701323421e-02,-9.265166764214013712e-04,2.358758781702049740e-03,1.009412995531660390e-02,7.315776408060322908e-03,-3.629945646909442120e-03,5.969259992416207569e-03,-1.033001984919850727e-03,-8.632122717313835819e-03,-1.404987198540379135e-03,-5.006044763821755270e-03,-6.002118810891957611e-03,4.383221034813624718e-03,-3.979488240610026470e-03,-3.254520921371071348e-04,1.402070527989635192e-04,7.876741127113134075e-05,-4.695074265784893695e-03,-3.581543475808383111e-03,6.151591522308394976e-03,6.565617984898321473e-04,-2.391601579942019464e-03,-2.251218654810107755e-03,-6.257866566953757329e-03,-8.920190890235464923e-03,-4.134488790451015805e-03,3.285032700437582297e-03,3.140884344631794060e-03,3.416933537124582261e-03,1.529614484580176653e-03,-2.393220092915609391e-03,1.927952891344129720e-03,-7.486510437398131643e-03,5.894753758123839719e-03,-1.625529409624605626e-03,5.006060932963370298e-03,-5.657935885112202069e-04,1.317387658894722795e-03,-5.616900601412780011e-03,1.018737957875510101e-02,-4.813049213906745415e-03,-8.741419042398928391e-03,3.281210732727692005e-03,8.702450172636034403e-03,6.307582409642134395e-03,-3.008485442855794874e-04,-1.008766809013234792e-03,2.544580391671778189e-03,-9.912819533962983001e-03,-5.890523936785020155e-03,-4.403380565190938463e-03,-1.258335671088357290e-02,-6.540650384327246526e-03,-2.788768916745496419e-03,1.069144058905184373e-02,-3.969933698296145767e-04,3.301431605372546325e-04,3.510121218495052507e-03,5.946450414977164870e-04,1.006777202105696087e-02,6.632350877319926306e-03,-7.696828793224090755e-03,-3.095998431621287302e-03,1.986989127169959662e-03,-9.453157487884334504e-04,5.668684890618834113e-03,3.114855588922016329e-04,-2.355692199950249442e-03,5.828575795266184188e-04,-8.249224217366383019e-03,-1.264231706669845072e-02,4.450890582692906827e-03,-6.460187663130400465e-03,3.484250144882066347e-03,-5.352368241451188179e-04,3.548599573626368565e-03,1.120367286351238792e-02,2.026516811044832764e-03,-1.774480635672613226e-03,-2.571832449139532448e-03,-1.122808506803956970e-03,1.218272318405000561e-03,-4.681015052472502214e-03,-3.502855231288655360e-03,-1.650150292903690419e-03,-3.011818376817105170e-03,-1.451801509654985275e-03,-7.503387993010899454e-03,5.348917895522282973e-03,4.714089425386897014e-03,-9.404486136322862816e-03,5.506600441798311860e-03,-4.944365614204858893e-03,4.569105152053594930e-03,-1.332945930480596529e-03,-3.449727454617753254e-03,-5.930678158686556942e-03,-8.747290641821126207e-04,4.686529964074859049e-03,-4.193242433268805185e-03,6.225372413976656624e-03,2.972421164173112869e-03,-8.452345612507223757e-03,-1.110978860129096949e-03,-3.301071108997537164e-03,-4.850736068241877884e-03,-4.037863513453264323e-03,5.009043833549124171e-03,-3.438770945251778607e-03,5.267923338609556069e-03,-2.772102892584243309e-03,8.840229430486609696e-03,-1.269587175832887068e-03,-4.131988242333521806e-03,1.307297986041802002e-02,-6.007682238891270780e-03,9.827046784213094507e-03,1.162701733087958406e-02,2.024116742269716461e-03,8.086304790552566873e-03,-7.526238974280754362e-03,8.033825959155428773e-04,-3.226003399177498204e-03,2.658451624029086694e-03,1.160221772161891627e-03,2.135325502939484747e-03,6.174185541399145570e-03,4.058064381844708746e-03,4.159443618523926169e-03,4.927821245911443750e-03,7.981509938275687527e-03,1.863973509952860427e-03,-2.269833596915164613e-03,8.596780308657077374e-03,1.116460296759312285e-02,2.539737514153445758e-03,1.097452807154108371e-03,2.745051903641305534e-03,1.588944050382768173e-03,-1.107760282722629677e-02,-5.097675295323973906e-03,2.210668378440870056e-03,-2.709808107706960720e-03,3.505764315933915937e-03,4.814287194590186406e-03,1.263752814298923577e-03,-2.272047080151226063e-03,-1.964914051022208490e-03,-4.855526173013364685e-03,-9.777417865928740223e-03,1.458822491315764358e-03,2.590787150428823255e-03,4.198663934065279164e-03,-1.163826019991280949e-03,-5.944678949936654626e-03,3.947914702309401024e-03,1.060548042031357087e-02,5.008587372193043855e-03,-9.123870780557390278e-04,-3.737710340885555017e-03,-6.186652858147627127e-04,-4.047982456824908079e-03,3.936655380182185054e-03,2.721106814299627773e-03,-3.158858100475555025e-03,-3.726735464609484249e-03,-1.098960194172719437e-02,-3.359464981708220298e-03,7.367503188291200412e-03,-7.914561467584605228e-03,9.803892588650402543e-03,-1.945649529826442518e-03,6.074225397765309342e-03,-7.832797440555688134e-03,5.119597493372404599e-03,-5.712635160976948547e-03,-3.346475100575006670e-04,2.946391428488293485e-03,2.904906279719959936e-03,-3.641433299156962566e-03,-3.491691779030925598e-03,-7.072051390531660384e-04,-4.997418392795978902e-03,-6.733912147589961403e-04,-1.640678888081679391e-03,2.492102233960508898e-03,6.626793161842663149e-03,3.001815863937646375e-03,-1.124815333656680339e-04,2.145400723303931504e-03,-1.188492614867812068e-02,-4.906541526410366193e-03,-5.059951467032051953e-03,1.245312392226684273e-02,-3.991146299073694732e-03,-1.184600358041180534e-03,-1.076563654778068136e-03,3.145993207426246024e-03,-3.452501872330618945e-03,5.417841913393814206e-03,6.071088282231861671e-03,2.625194094803630189e-03,-7.574494879113530211e-04,8.781859009445584438e-03,1.711599465957737518e-03,6.825963701923371853e-03,-2.899736030081302880e-03,-4.677876477493347028e-03,7.907455288765002227e-03,-1.848663759010191622e-04,-1.657229780588618780e-03,2.725857423615499816e-03,-2.013450741696547024e-03,8.622145910350566961e-03,3.163854338532794477e-03,-6.576038435154139480e-03,-9.826566429279162684e-03,-3.376481155716844988e-03,-1.474327497416047473e-03,-1.024893067754947080e-03,3.292819244212944027e-04,1.969283475367857982e-03,-6.007756586013596536e-04,4.752768783851893213e-03,-2.020271742834024026e-03,3.630296612954957928e-03,-2.278783429007261510e-03,-5.091375504038446811e-03,-7.430109985414418096e-04,2.900808877377071982e-03,-1.445514574775571496e-03,7.494334335109409904e-04,3.151068406351126717e-03,-7.586305703101692537e-04,-4.578002813086612215e-03,4.687303320046606707e-03,4.787017119001593483e-03,-2.075566231138413140e-03,9.076930652412453469e-04,1.116425480578123706e-02,-7.240902847854759940e-03,-5.167799585142213159e-03,2.240038156236423723e-03,5.185986193379994377e-03,2.183566031253605239e-03,-7.029561458862705788e-03,-4.191928592904777505e-03,-1.279120847435994443e-04,-6.879706584416687976e-03,-6.311631849755689480e-04,-2.509348854724600237e-04,-6.260464313030190183e-04,-2.519177977373696781e-03,-4.653913517380489986e-03,2.603763844053588999e-03,-1.846317780593803885e-03,-3.257774767678427077e-03,5.152660058048991708e-03,-4.440679913415067868e-03,-5.907319057994465418e-03,-2.247921070094555107e-03,5.426606857524391390e-04,-7.871846833027466503e-03,-2.777207440456388662e-03,4.197516272951563356e-03,1.464119064433450600e-03,-2.886845669303190857e-03,1.217789528521273992e-03,6.402440305969494985e-05,-4.509905448088761225e-03,-4.375160161200840336e-03,-1.536840935204140078e-02,-2.261200235102707370e-03,-1.055045012784896954e-03,-3.579888197272179749e-03,6.988590804661542729e-04,5.748701648897320583e-03,9.135951122109617784e-04,3.414755048350523025e-03,-3.666260107196860074e-03,-4.667183815763529739e-03,-3.684877761267999624e-03,5.385072364467315131e-03,-1.232788683487836753e-02,3.395769883394742969e-04,-5.974061196657669444e-03,1.650362523390701275e-03,6.869717341352712600e-03,-8.182735228168259685e-04,-2.240813111103091924e-03,-1.935568791949564662e-03,5.476183449324362358e-03,1.897859957657675603e-03,2.708408767688979374e-04,8.672939223552065049e-03,5.421616870511427461e-03,-5.339939144433770643e-03,5.077028795453361956e-03,-1.453045804514836617e-04,2.117859712749907853e-03,8.125559275467921991e-03,-3.533808698345037008e-03,1.256880429558774812e-02,1.134795651065857117e-03,-6.940187185480094180e-05,-9.197760762295266110e-03,4.124890674622324997e-03,4.407393025183088155e-03,-2.529839729534274443e-03,-1.838654293173868325e-03,-7.313126618916277258e-03,3.810016345173335931e-03,7.877504233811427267e-03,6.657798626727252055e-03,1.036067902327576589e-04,8.996350530996082354e-03,4.033433627900819343e-03,1.464891856765335176e-03,-5.522638029608105750e-03,-6.728607033959784801e-03,1.818390457781287077e-03,6.518857443822055793e-03,-1.044274430503243497e-03,-4.857717057781713878e-04,-1.772955034125067025e-03,6.523082283897282778e-03,-2.913413979351893966e-03,7.822521371958966697e-04,4.677469708424750465e-04,-2.980318464519775539e-03,9.792728479870436466e-04,5.432069559276803995e-03,-5.042654422683233613e-04,1.705746107492809097e-03,5.192110938299502113e-03,4.245053532570883595e-03,-4.012399711945549650e-04,7.135803420538698290e-04,-8.535133311054113325e-03,-6.054125251909137258e-03,2.721898129238648368e-03,-6.763288176783957377e-03,-8.228121189257345605e-03,-5.468831720604490744e-05,-3.840768196710076660e-03,-3.425600574188536560e-03,8.783829677403527311e-03,1.216152988746173509e-02,-4.396164623727427277e-04,-1.601275555927575769e-02,1.113701704501632306e-03,-4.003801934818822586e-03,2.067577053758342526e-03,-2.837793074049185967e-03,2.280987874014008248e-03,1.548307980003137737e-03,1.442613335177745787e-05,-6.869820823831649173e-03,3.530017253580199250e-03,4.699112708918283532e-03,3.697403678943494455e-04,-2.381441083005257382e-03,7.297983882401205998e-03,6.693087591210859473e-04,6.640433479018757225e-03,-7.499337147440683811e-03,-1.577159745847971127e-03,-3.447963633394535675e-03,5.848663294112869081e-03,-5.054783564268526194e-03,9.294481132306798329e-05,-7.188145125612045890e-03,-1.565277956120602731e-03,-7.327577327063531688e-03,4.958089908030196766e-03,1.241890922319554712e-04,-4.322895033581994166e-03,5.982205729082620653e-03,1.868401468546959480e-03,5.921442360906268264e-03,7.283306600063830030e-03,4.950410697739198590e-03,4.524235455442528133e-04,2.423017610197148418e-03,-4.089424092770676915e-03,-5.285251481380129693e-03,-3.261457474068154357e-04,-8.040156293408526984e-04,2.389398681368309907e-03,4.475234287939901874e-04,5.630121989490788868e-04,-1.606238188370543406e-03,-8.537887897292430542e-03,6.778046386693849061e-03,2.080979918288379960e-03,-1.594771549532966043e-03,-9.691921042753998518e-04,-8.356863642207879722e-03,-7.002360339151807340e-03 -5.501139992128824784e-03,8.478460533651886732e-03,9.438129788444561555e-04,-6.241221851198144877e-03,-7.651032577701046174e-03,8.051972161381893214e-03,8.315511753812376491e-03,-3.489487968935652516e-03,-4.777219304144982720e-03,-5.248315725200308583e-03,-1.693571711491111088e-03,1.158772278648890824e-03,-5.862611473927844229e-04,3.127137170355851706e-03,4.379576539433687431e-03,4.000153422230328173e-03,1.448349185707603626e-03,1.012195141194710719e-02,-4.608405840209638289e-03,-6.557779563870420533e-03,-4.174488845260085519e-03,-6.586070500889219283e-03,3.496506725219844167e-03,-4.534483257718702025e-03,9.653147002433450736e-04,5.454309628199914030e-03,5.553860495502402871e-03,-1.305498445629398735e-02,-3.105937350997975063e-03,7.410020418160878555e-04,-6.957228560326871036e-03,-6.509646864839692057e-03,-5.030189419920770913e-03,7.213285004594151501e-04,-1.011094603396879045e-03,4.420337171385307733e-03,3.602467092173104908e-03,1.511626329300954632e-03,-2.112860969443337931e-03,1.816541393655919786e-03,-3.657583911504494988e-03,-6.968934263541554010e-04,-4.133269906291272243e-03,2.842537493711105433e-03,-6.455880516984361349e-03,7.674343128142714458e-03,-1.306581551651724536e-03,1.166192751596435402e-03,5.534702559662171492e-03,-5.491236361163632021e-03,2.977676300279050170e-03,1.980690782418913307e-03,5.164053544759949869e-03,-1.280239675304959031e-02,-3.233461780691696159e-03,2.310047189413442364e-03,6.201957010914374568e-03,-1.473059440028593944e-03,5.278015455991994648e-03,-1.031101269356723971e-03,1.959230854820385656e-03,7.136575994377054475e-04,-7.136310450832570804e-03,-1.330477864190632298e-03,-2.525067252789207298e-03,-9.865901704649280603e-04,2.029837772309262976e-04,4.997318865909337882e-03,-4.417724828879207068e-03,9.005022840391601474e-03,1.859426654516546174e-03,8.452302694491696869e-03,-9.023949779254291867e-05,-1.752449267233171463e-03,-2.639080699519101743e-03,4.948214378512297718e-04,-2.444510898141706839e-03,2.639983365285044226e-03,-1.993169998856655304e-03,2.938346323242960710e-03,-6.090403132584385620e-03,3.064994820571330207e-03,-8.421757346576871570e-03,3.202556213475834732e-03,8.659789084723278074e-03,-6.176812315675049743e-03,-7.309131156576522542e-03,-4.657859635308120652e-03,-2.064962589850823614e-03,-7.795356969714482137e-03,-6.977731042397874311e-03,5.234800536704280367e-03,-3.178163526577064846e-03,4.852768278148373617e-03,-2.447393815207190996e-04,8.080841296287482461e-03,-2.745246801125095313e-03,2.078053320957519521e-03,1.678440201062921531e-03,-4.529549402928298267e-03,-1.024371150893628284e-02,-4.515579869545544543e-03,9.930145607167813453e-04,1.117885789004868704e-03,8.054150334193588973e-03,1.055010407256367841e-03,-4.165273017198195711e-03,7.150012248013223534e-03,-3.333503853155554697e-03,2.558642533372505528e-04,-4.007669362172688703e-03,-3.072012918268693989e-03,3.302842736418370572e-03,-4.696376905336602930e-03,-1.394661808583935389e-02,1.311367816866104187e-03,-2.828088044338192709e-03,3.079917640286157152e-03,1.827084472625795791e-03,2.683285707244999641e-03,3.859471453192889635e-03,-1.743769243917038742e-03,-1.112918602428975912e-03,8.645605091610747417e-03,7.579226024988830988e-04,1.821686194194570242e-03,-3.478750061845593822e-03,2.428442311683186045e-03,-4.813673185891824587e-03,-1.631397014497943786e-03,-6.521019124148513813e-03,2.919568720709022074e-04,6.352913190476325647e-03,-2.933499718619741840e-04,-1.124501503061225115e-02,-9.915795582528317935e-04,-2.973401402417303555e-03,-7.668471464477401908e-04,4.450387634252757178e-03,-1.540029170510012410e-03,-8.973475454397835185e-03,-5.610882077568097107e-03,-9.281574004577188355e-03,8.965643959534154309e-04,-5.225570524612157229e-03,-1.158140725104509980e-02,4.096892626044359130e-03,-5.269982840794032149e-04,-2.549628048873074388e-03,4.594011028585514439e-04,8.923213744240265704e-04,6.219188283354307525e-03,1.270766026862616901e-02,-4.027958463876997464e-03,6.249444128694660447e-04,-2.879739789130170689e-04,9.098921766854107390e-03,-5.546613670720429434e-03,-6.462889276498197692e-04,3.809910488361887941e-03,6.383190009051698151e-03,4.870388962205493902e-03,-6.417298120174079121e-03,8.097915313429264114e-03,3.574508014982845372e-03,8.525009551133564192e-03,1.221818021434171444e-03,-2.808491505232458933e-04,-5.064436342266276370e-03,1.245072757846870347e-02,4.725467389232401207e-03,-4.261655167918701990e-03,-7.163505944739193693e-03,5.482761422952931417e-03,7.284006816045961695e-03,-1.410271632192280564e-03,-7.654620921786195985e-03,-3.622263436802081182e-03,-6.166599182817858980e-03,-8.180419830538817813e-03,7.109681156803326040e-03,-1.475877156008857964e-03,-1.918843704374083086e-03,5.873233240303051168e-03,-7.573571822125883942e-04,1.117478713853909365e-02,4.772329514623564750e-03,-8.344486723405306061e-03,-1.138975238259547912e-03,8.791881732808998900e-03,8.497585718930252344e-03,6.499346782822977911e-03,-1.372648404152883870e-03,-4.998428975684764825e-04,-1.444615091341587510e-03,6.386121498564224438e-03,-1.799121105741002206e-03,3.515135909503138562e-03,-2.987384839766999908e-03,4.694909955233021737e-03,3.649599454659649671e-03,-1.851306468968348358e-03,-3.500763823016424920e-04,1.005332987545354967e-02,-1.519273678096060010e-03,7.657138886232243806e-04,4.727494027836974533e-03,1.103076413683700546e-02,-6.493779513284065295e-03,1.021828138534018257e-02,-7.536725718013514291e-03,1.344465060876516918e-03,1.473918936854318792e-03,4.589000666856002907e-03,-4.512499056578566162e-03,5.151766244215185732e-03,2.007674836568432673e-03,-4.538470323673914238e-03,-5.991269503044479791e-03,4.668291589646515402e-03,6.099455934788253035e-03,-8.450985748898646273e-04,-2.798443736797568988e-03,-6.937340861174046310e-03,1.824335149360677534e-03,3.009002819289305439e-03,-1.717650190856666662e-03,6.754233743884717825e-03,-4.047968132849635089e-06,-1.316435200536047101e-03,-6.132868242764972708e-03,4.615021944359177196e-03,8.579461412769405059e-05,1.789106729551550624e-03,-1.109324605175397715e-03,4.291634642385483249e-03,-2.102106468920833873e-03,-3.181036925356801968e-04,-7.867505973664258925e-03,-3.699455865099795116e-03,-5.551437266326750795e-03,7.103265749093083670e-03,1.019582481833216162e-02,-7.574920859430775422e-03,6.948142386516201457e-04,-1.483735785444058883e-03,-2.542153992415244722e-03,-4.339095792041596650e-03,2.933244554763103293e-03,3.174025430034049192e-03,1.498265855496634831e-02,4.326475794337618884e-03,1.166458518708929186e-02,-3.330235670886175464e-04,-5.427820410020192145e-03,1.162373593796874396e-02,1.385742418013111241e-03,1.189528233981862774e-03,1.849422482782597191e-03,2.617748290624483283e-03,6.716327483493177153e-03,-9.227738348646086913e-03,-8.196450844299884436e-03,-2.140585755011297376e-03,5.256711694216188724e-03,-2.599115520950480603e-03,-6.135177467678331296e-03,5.059947110467591207e-03,-2.593414928924414435e-04,-6.415663577130895426e-03,2.873253048188469257e-04,-4.480150246015355241e-03,1.001533820369726602e-02,-1.060848872305956151e-04,-6.172043973059463838e-04,-1.349457810544666412e-03,2.874902174102457204e-03,-2.525511842113733595e-03,-6.981564874090628869e-03,3.244026110763395965e-03,7.501750575943829692e-03,-1.137688446947289836e-02,-1.000799173362240495e-02,-2.284131542409372634e-03,-5.799896975729214137e-03,2.293440433073817581e-03,-2.608524483388629557e-03,8.123833049417786604e-05,-7.404097688725546321e-03,6.922938561481684072e-03,4.303545707531369197e-03,3.388026553701360515e-03,2.723766736644396384e-03,1.824367521074652092e-03,-3.113868885802412102e-04,-5.745165195785827648e-03,9.902499515753789006e-03,2.724317449035830625e-03,-7.007944078447064112e-05,8.097918873811327939e-03,7.669482247060713069e-03,8.208319727210032155e-03,-1.055832196968956157e-03,5.595029057636904425e-03,-3.451215037849781567e-03,-1.883836615192935934e-03,2.492328371913537979e-03,-2.692858776287906583e-03,-2.005569627013532689e-03,-5.048546776628895635e-04,-4.894715408280842298e-03,7.616872017955328422e-03,1.207477290461853210e-02,-5.991934120165504692e-04,8.418141904318981322e-03,-3.448219955522201490e-03,4.366948983896059948e-03,-6.825186137509639539e-03,-7.698614017252525255e-03,3.517221812512829650e-03,1.991646324240719038e-03,7.700995778997576924e-03,-1.022260811012632060e-02,-5.638442372017758849e-04,3.824175307360175476e-03,8.946486126710860576e-03,-5.136919160157171586e-03,6.566757944784269371e-03,8.806505338656596751e-03,4.305734923972923643e-03,1.320603208177613440e-03,-3.021202280198355145e-03,4.899979124094508594e-03,-1.172205496169527288e-02,1.710889995754989691e-03,3.599701049825965258e-03,-3.242188752973201166e-03,1.021015485112556839e-02,9.233199739198329377e-04,-1.390525863958930533e-03,-7.729996566680641260e-04,1.154723215307769738e-03,5.257717737418636213e-04,3.598551834305320322e-03,6.327906572469113458e-04,-6.858306502176435195e-04,3.043334831730436230e-03,2.041373953564998311e-04,5.787085591943268680e-03,-1.006654893483478665e-03,-3.120072549434834211e-03,-1.204402957543516299e-03,1.814553180302226057e-03,-8.727359504672139653e-04,7.506342375685513997e-03,8.003257401623594140e-04,5.772880444560791156e-03,3.822628793153451669e-04,1.711871753123533598e-03,-7.236408209819531270e-03,1.330770996783283734e-03,-9.816647705067566357e-03,-1.460291587536600164e-03,1.075953530396053619e-03,1.381162433713925249e-03,-3.835485350806414756e-03,-1.122637951571585748e-03,-4.515155438237093070e-04,-6.637457259027850637e-03,-3.615376058146413038e-03,-1.351927308928411661e-03,-1.933891144933496493e-03,-9.300370670807066656e-03,1.069633397188887204e-02,1.050682955668628613e-02,7.078990009355312814e-04,-5.388636000366804277e-03,-1.265658131684813635e-02,6.605445483727098027e-03,-4.973684111882746041e-03,3.064132367933234020e-03,-1.440217824898561497e-03,6.214723210427085601e-03,-7.006730775743740154e-03,1.350035597370516632e-03,-2.636507707777582810e-03,7.248267395846238678e-03,-2.111506240917253568e-03,4.651042608817602432e-03,1.297692124444052315e-04,4.718187575447356421e-04,-1.570532109430464174e-03,1.076214119170246404e-02,2.251969256652693388e-03,2.478374674423639533e-03,4.231841321207527665e-03,3.301891558061829761e-03,-5.289424553532242647e-03,-7.949817797576131523e-03,1.235024796615570098e-03 -6.509092262832248330e-03,8.790209420440127808e-04,2.947720199976806843e-03,-4.366726930491095227e-03,4.966461264716465034e-03,-1.815207231738592613e-03,-3.756124487138986601e-04,5.552161610580394306e-03,-5.527763328099747665e-03,-8.498340679308708304e-04,1.289377711612376635e-03,-3.893686282711868608e-03,9.965249464864598880e-03,-2.607779679258155538e-03,-2.329353237189561059e-04,1.305280024936529646e-03,-3.435490782245174263e-03,5.578638830344919038e-03,4.070740361300609865e-03,-1.300652580552090487e-02,-4.769297149902888312e-03,-1.799225488789775798e-03,-7.702937727915217664e-03,4.223808316086250826e-03,-3.376099913757508921e-03,4.438407960263085310e-03,5.655329203568493397e-03,7.051370413897879892e-03,4.898553598612737439e-03,-9.003069670521234068e-04,2.692475297805958768e-04,5.259846898470834503e-03,-8.612500016966560465e-03,1.177504396508501170e-03,-1.580961833636336479e-03,4.322246797917685449e-04,-3.581591157376790274e-04,1.196296687421370816e-03,-7.324883740432859254e-04,-1.193871769001422778e-02,-2.232624336953248183e-03,-7.635147779029191964e-03,-3.890164395675685567e-03,5.887154772487150980e-03,3.582429722070711205e-03,-2.565573217374885698e-03,2.608660437309917070e-03,-2.041214831228242016e-03,9.088420443725110306e-03,5.658702312402908059e-03,2.026141628620799044e-03,1.023420147075046992e-02,-6.618603357546464470e-03,-7.896150058432273994e-03,-8.039260993342633305e-03,-3.026494298498346065e-03,-2.998383501803647978e-03,-4.051471072615099762e-03,-3.119963631187744769e-03,1.107492482438336527e-03,-1.621341995734803523e-03,1.787462262579679433e-03,7.127693856423764638e-04,-9.783096711745556395e-03,6.385963409138305409e-04,3.200942805960960971e-04,-3.548686199655499279e-03,8.845640140286689700e-04,-6.887191022390036846e-03,3.005338502239210917e-03,2.211983882659815295e-03,9.339309022106554933e-03,-6.262141931714600152e-03,-4.008945928416346276e-03,1.147309057333070152e-03,-6.101469316203799245e-03,3.194289334803404080e-03,4.614021386335040616e-03,5.554015536005579151e-03,-3.011497825566198869e-04,1.646621795819293950e-03,-2.428327054906887286e-03,-3.411988667623804677e-03,2.075570863982701460e-03,2.746067714540586027e-03,4.762237793483859133e-04,-7.706597577534429061e-03,3.490168799775137266e-03,-8.254876586659864318e-03,-1.247582949906073276e-02,-4.972796716548022120e-03,8.614531715094655290e-03,6.597022504426142044e-04,-5.446326298854360128e-03,4.182556647363518294e-03,1.939350038446623764e-03,1.236413469250043431e-02,2.869193381445384310e-03,-2.725264582459427289e-03,7.285279354691154459e-04,-1.001472048484019746e-03,-3.226953513909188061e-03,-1.779372366583448911e-03,3.114225693899429215e-03,-2.679461842388315956e-03,8.701568991345243545e-03,2.408804077464007064e-03,1.014481300103333740e-02,-3.143624100923868574e-04,-4.050310520368964173e-04,-3.542804330866973668e-03,5.296476211159278046e-03,3.564932108204487728e-03,1.109350337939677376e-03,-1.351045382227446462e-03,1.093393343533987294e-03,4.835577020917795898e-04,-3.294164336008570393e-03,-1.000135754169864232e-03,-2.185317373207488340e-04,-2.566230007101914182e-03,-6.908851916685026205e-03,2.966939890497810508e-03,1.151538741440441072e-03,-6.422942396594296527e-03,1.561317685672889127e-03,-4.642360901112925486e-03,1.323265767413554436e-02,1.025395527136645224e-02,-9.678725896136298849e-04,-3.370105427897171989e-04,-2.034034658477466666e-03,-6.173488747541913332e-03,-4.381495223056581026e-03,-1.211908428340461230e-03,-2.484902839745724984e-03,-1.740303450868410868e-03,4.381773169087369371e-03,-4.754780127420269766e-03,2.095102096774950195e-03,-5.869386631686222952e-04,3.894692866584871516e-03,-8.216770740242066188e-04,-4.709013392954429153e-03,-2.073723764318672561e-03,-7.905541261996979971e-04,-3.516559229401936080e-03,-2.336579455337857783e-03,-6.895110711649783891e-04,1.801782180966285179e-03,9.227271010695967113e-03,3.886904864313952494e-03,7.148766416018809654e-03,1.422107394552083771e-02,-1.982510823129577347e-03,-2.751943913301151037e-03,4.265618568882186901e-04,-3.398344957280162882e-03,-7.027096444596109314e-04,2.915502012205486992e-03,-1.620038443526244237e-03,-4.225335645073083693e-03,6.689471712602208797e-03,8.539639408821044636e-03,-7.130938822425369854e-03,1.763579681532898295e-04,-1.235257509310236964e-04,1.136593805588181211e-02,-1.417374616164088826e-03,-1.140167861175556087e-03,-3.383908530793236762e-03,-2.563488338587252471e-03,-1.647717159476728358e-04,-1.580737606389758312e-03,2.625390293026130285e-03,-5.105458447061848263e-04,1.036397575672359794e-02,4.965476023835940084e-03,-7.702947655801436512e-03,-2.082242019101603319e-03,2.466454622575867898e-03,-7.282571325810918011e-03,-4.189272850480540121e-03,-4.708072267653345445e-04,-6.209148555002006430e-03,-6.574286329587868648e-03,-9.826363546990833121e-04,-8.462346531253028478e-03,-9.671837307539402767e-03,1.166983334139178475e-03,-5.826090136717906008e-03,-5.104863734765425001e-03,-1.715260436615100980e-03,8.945394280846490770e-04,-8.167566472122632024e-03,-1.530348475218506164e-02,-2.364240930071125242e-03,-1.172809613151656969e-03,-4.567826853345355841e-03,-1.328465509158576873e-03,-5.624616726982727306e-03,-2.649301651985001751e-03,6.383136906863603501e-03,-2.429851627015053826e-03,-7.836536985677704020e-04,6.621327770735318217e-03,-5.486874112149666598e-04,-1.935217900898879512e-03,3.507741795145461177e-03,1.715080521728473695e-03,2.778444027319298140e-05,-3.198229559967480286e-03,-2.175321983988517862e-03,2.458364213254207037e-03,1.034244821627784607e-03,-2.662974998153058281e-03,4.553053099801053320e-03,-2.776745846266978537e-03,3.999868379464524754e-04,-2.796850583523833884e-03,-9.864958804146685065e-03,1.338123561306074107e-02,-3.364546937696297722e-03,-3.688881084728160443e-03,2.528540672516624548e-03,-7.980414375426036957e-04,-2.670871392658585777e-03,3.529849093543768919e-03,-5.384948937656377534e-03,-4.848432982451043004e-03,-1.905568188186916633e-03,9.065778853270107379e-03,5.728550880366102722e-03,-1.666341846927871803e-03,-9.692624680677740245e-03,4.005254876514093704e-03,4.010048246998800317e-03,-2.656038389398875880e-03,-3.358346462476628942e-03,-2.661509231789175364e-03,3.283692800984620592e-03,-1.162726756859770163e-03,1.247019006850645520e-03,-4.353633527892932474e-03,-3.870388280486536988e-03,9.370588719501183900e-04,2.397794539902854596e-03,1.111407469960241532e-03,-1.903187391696033442e-03,-2.623501734249163945e-03,-1.851654152369812617e-03,1.341045399274626076e-03,5.260949124221508087e-03,-9.509996879740833619e-05,4.463186475790772295e-03,-4.844291633922380436e-03,-6.578455016727529497e-03,-1.803432883738889788e-03,-7.309578145239754854e-03,-1.701669943199018509e-03,1.180347405284002359e-02,-4.555822819549749415e-03,-4.241602073764992077e-03,-2.307607810226066508e-03,1.769054733941729379e-03,7.696410541726749280e-03,-2.456771444071190365e-03,-7.159680631492310564e-03,-4.808229842184978675e-03,4.630901272583249576e-04,-5.958402900664796879e-03,1.067892917945589652e-02,5.901876265343528744e-04,-9.383018574303157028e-03,2.259736268519197365e-03,-2.566077118801090001e-03,2.107532072127261697e-03,-2.687280745085283409e-04,6.298099938329528372e-04,-4.429913891008331085e-03,2.891368114758613699e-03,-3.069280274112655936e-03,1.386166694574261782e-02,-2.404162713760181654e-03,-3.788687584353362357e-03,9.890384338383190286e-04,9.181106060964751986e-03,-3.900897987543486180e-03,-5.948436859289327715e-03,1.863142364208185235e-03,5.447884533152884748e-03,-4.440504373849146150e-04,9.395682075872636768e-04,-9.404242080119692287e-03,-2.228242805275416297e-03,1.505792964789574992e-03,-8.014802188128137758e-03,2.793863537613307767e-03,-1.371867777437704413e-03,-1.193188671438074602e-03,-4.060917199286894104e-03,-3.209041137502419323e-03,4.786665576682728324e-03,3.758943966780258113e-03,-6.115900561550717887e-03,-1.590832188370774197e-03,-3.586708364254548483e-03,4.268021255358573143e-03,3.572751238383553227e-03,7.098021524491352624e-03,-4.607121946486113923e-03,2.890245172577458454e-03,2.803021496024253514e-03,1.979679656627190368e-03,-8.392106586933766000e-04,-2.149772604742177223e-03,-3.848726751069256932e-03,-2.517543644233757286e-04,-3.626342334507143186e-03,-6.074969004537866119e-03,5.676322469763486062e-03,7.131266603070158736e-03,2.160446952861008307e-03,5.065316927320915631e-03,-3.622718015618806627e-03,3.860902402451943014e-04,-6.587829421745912406e-03,7.953109993228660649e-03,-2.845301401144318450e-03,-7.568357284257895383e-03,1.289576176566098508e-03,-3.551787418172132164e-03,5.930186370627269594e-04,6.780606160095256933e-04,-9.203451712147704819e-04,2.425101491613806341e-03,2.735843534759560807e-03,1.911757489928311285e-03,-4.245432547826726195e-03,9.531097705865723604e-04,-2.899454140771686936e-03,4.703325499197807556e-05,-1.030655919159879832e-03,4.693515020181496765e-03,1.984731360595584637e-03,-2.911106934913415024e-03,-2.771194627528329908e-03,-8.832901511541363078e-04,1.064734922310998531e-03,2.081946592760457355e-03,9.225532899720527077e-03,7.788559515158337450e-05,6.989976400289273474e-04,1.075091516302881765e-03,-1.405811738414539233e-03,-2.015391389711920398e-04,4.966344124490174386e-03,-1.937318862573953826e-03,7.816922622285215130e-03,1.347067388106619728e-03,3.361284373693169698e-03,3.610699923378877900e-03,-4.364592239518675330e-03,-1.075080224066112325e-02,3.392966430021616270e-03,2.834862994935736721e-03,-1.158471715562569811e-03,6.168044598986418756e-03,-6.014468999086667374e-04,1.257217364383770263e-03,-3.603756063682622061e-04,-9.211492750111764964e-04,6.813853437816960561e-03,3.790031037239936090e-03,6.869584749161987414e-03,8.364514354547854594e-03,-3.854304728956214638e-03,-3.851388268540226808e-03,-5.387897244367503662e-03,2.072939366986440140e-03,-8.064450990787910833e-03,-9.920285395809318521e-03,-9.747546229979090621e-03,-7.775111490940481325e-04,1.735770977244343585e-03,-2.623798506005372254e-03,2.482845461020016017e-03,3.035659519491789443e-03,5.705477023233370554e-03,-1.202605472332351436e-03,-2.761567802993338851e-03,-7.005276368608515623e-03,-1.587936509067795754e-04,-7.742605965296943784e-04,1.052486726229716384e-03,-3.605533824850692265e-04,-3.085184275505192374e-03,-2.519643498649825220e-03,7.109467799259679447e-04,5.254285074925809512e-03 -2.978508108062070080e-03,4.655085617512045616e-03,-1.726480093259644437e-03,-5.036853073373897226e-03,7.439100666086521582e-04,3.270438138908655923e-03,6.079445266767325405e-03,-3.599945764319313363e-03,2.679976783596416633e-03,2.774756390729420791e-03,-8.488650430716712658e-03,3.276401142050764347e-03,-3.613442216218728381e-03,5.313732860171964736e-03,-3.945373988694384458e-03,-8.822711156030664525e-03,1.006738111009490046e-02,2.238523601774998831e-04,-4.580704235799461232e-03,-5.021123880682195305e-03,-3.086579866698389808e-04,-1.129032060411177402e-02,-1.364615924330151277e-03,-4.699782455435667916e-03,-3.655216221557140883e-03,1.751633752058239088e-03,4.947596664496496308e-03,4.590718783380331341e-04,-3.990995681202202071e-03,-1.160481787733732943e-03,5.942304976083082448e-04,-1.647288036075774753e-03,-1.001609385155376869e-02,-1.439076720636359467e-03,1.217336440717629127e-03,-2.470029893005932977e-03,8.951925322619087161e-03,-1.067706454590432695e-02,-1.213961984437568736e-03,3.720348227354407927e-03,-5.633488949141409428e-03,-9.345587078893330055e-03,-2.983462710669767485e-03,-8.098243046245677621e-04,3.167749174296314012e-03,4.368648793618386435e-03,3.311989279446678897e-03,7.981079117018225269e-03,-5.136414252667138684e-03,-1.553457881443031144e-04,6.466680377450430978e-03,2.584350904641557426e-03,-3.429393968767998312e-03,-4.647669095848772601e-03,-3.679787117772620984e-03,-2.609680828086086320e-03,2.952390520783433354e-03,-4.560713260351451429e-03,-5.780837140170660277e-03,-5.812114246022831183e-04,4.532641516355002606e-04,1.270865925313421300e-04,-3.360371993144077411e-03,-1.626663334584705408e-03,2.736909047383584079e-03,1.269936446176275551e-03,1.792197033137723182e-03,1.382836186174998296e-03,4.863921629815702412e-03,-1.754961828383233514e-03,1.361570254370614514e-03,-4.145708969448399714e-03,-6.531297485635566627e-03,3.301339376882076577e-04,-4.020601958232331540e-03,1.731859736275660538e-03,1.359591968612375908e-03,-7.545677689928272265e-03,9.110042569018815192e-03,-2.670801145992087551e-04,-6.970159043945080074e-04,9.375139530892110870e-03,4.617546285271119377e-03,-4.912286590661333295e-03,-1.734950017995721743e-03,2.329148312595780059e-03,-2.970757488521009321e-03,-6.555283921033945582e-03,2.467222988453987749e-03,-1.248649414868604210e-03,-1.254529531769951745e-03,9.028513456330260851e-04,2.515289984323868921e-03,2.738722732474208315e-03,1.963907165479218848e-03,-2.683920830327738653e-03,3.260248517489098760e-03,4.252280216509266887e-03,6.857854036281245402e-03,2.242835805051397822e-03,-3.217508579996843088e-03,4.210112058796094958e-03,4.391916759819922307e-03,-3.704516799915776992e-03,1.000433621736964004e-03,-1.212891138587185794e-03,-2.917047456681068265e-03,3.047334166253939502e-03,-2.886282736901414452e-03,2.690997707456925167e-03,-3.338410994885744956e-03,-6.296483468127562176e-03,-1.973604415758294442e-03,3.447986528639833403e-03,1.856798979397670385e-03,-4.948846861774084185e-03,2.547010810722798082e-03,3.035102054203986811e-03,2.297194488320454617e-03,4.811037219754914565e-03,-4.625074636051332982e-03,-1.704599323207680022e-03,-2.331520621575855576e-03,5.684212077630971431e-04,1.583996002199631540e-04,4.123149076069585958e-03,1.749434952961950035e-03,1.814717079561043497e-03,1.264358343876855265e-02,4.428079998700929810e-03,-1.239050888222996936e-02,3.563280432232958893e-03,5.864676478502580531e-03,-7.666427425088690557e-04,-9.145720910749645383e-03,3.598535291744238071e-03,-5.587463266103879361e-03,-1.172198800717286476e-03,6.052561128716893619e-03,-1.109964237195599362e-02,6.532848754371920787e-03,-6.892535758144428077e-05,7.150410862039236273e-03,-3.942538877824372859e-03,-4.567339504455574656e-03,-1.289641018944523692e-03,-4.474438218872581804e-04,3.340009504074088580e-03,-2.190513463785426999e-03,2.858246003112996456e-03,1.028229769278891318e-03,-1.734116587592216376e-03,1.297700099008251406e-04,-1.519390531688258111e-03,-6.449750947349532776e-03,7.049890557742609538e-03,1.349182862991827074e-03,-5.940479296587808378e-03,-6.743055543206162529e-04,1.033947521455905703e-03,-8.677757595227174325e-03,-1.053567493291090095e-03,-4.666803230990702010e-03,6.898480415789366715e-03,-6.390391801863922090e-03,1.603162867313378658e-03,5.519918850851911890e-03,7.251116907060737443e-03,2.738345835653940245e-03,9.107632417831397140e-04,7.252396451368520180e-03,2.961736712046479907e-03,-4.841740471965895487e-04,-3.013055820832890100e-03,-7.846586649114825271e-03,-7.678502296841642656e-03,3.133640684064775347e-03,6.487875500052940614e-03,5.505718255998122814e-04,-4.290968059226397263e-03,3.078717736294074234e-03,5.512563421107439989e-04,1.150250491358156433e-03,-4.929303062384832006e-03,2.241974090156724844e-03,-2.488672546599745000e-03,1.268714949512719445e-02,-1.755672054668002552e-03,2.511691374564316986e-03,5.190042449068359501e-03,5.218116848836551469e-03,9.882870299201255981e-03,-2.844290031243362250e-03,-3.172171201186045359e-03,8.406242691143183884e-04,1.202360511010738256e-03,-2.487151635109344401e-03,-3.737821727009270931e-04,-1.341597785139742212e-02,-1.089091279003696899e-02,-6.033094799286459198e-03,9.456637659239683618e-04,1.799275309513218794e-03,4.214769803577371574e-04,3.826202855719912799e-03,1.560940117117932784e-03,-1.125002605661476891e-04,-1.028907075173586885e-02,2.428190407009092437e-03,1.139688953550182854e-03,2.860665249602858864e-03,-2.493156821979033294e-03,-5.293140881809051124e-03,-2.787643179686740552e-03,-2.040957081327546621e-03,2.008627425702589307e-03,4.503164335295183583e-03,-3.305163319059410687e-03,-2.943010397320593987e-04,-2.725441319964893010e-03,-9.574373952446122143e-04,-4.098468754430022870e-03,3.002520071963885647e-03,-3.092187310621669592e-03,6.358826275494006662e-03,1.566465080561802196e-03,-3.137369781618035147e-03,-1.089765577981544227e-02,2.267061340341988869e-03,2.162278292917986858e-03,-2.561829628084280608e-03,-2.861059615049658337e-03,-2.591538840734973828e-03,3.608918799409529046e-03,1.518387151276807652e-03,-1.471411088836921052e-03,-4.917980144989186203e-03,-3.122974725684332219e-03,-1.454203159691947866e-03,-6.430560281025319430e-03,2.831272625298537036e-03,4.692819797023556512e-03,1.272953884495985847e-03,1.105872554234047303e-02,2.641821384257004180e-03,2.964302014196781160e-03,2.518777383663853980e-03,5.505705452545808613e-03,-7.882507042560021882e-03,-2.717349656339620653e-03,-3.441047774047711807e-03,-3.681731110660213784e-03,-1.136129117106256812e-03,4.225468831338797734e-03,1.820163618214122331e-03,-1.743298415579731873e-04,4.004307938658280795e-03,-5.730795518705268090e-04,2.724505182092903585e-03,7.471345210764295018e-05,8.255300575813306896e-03,-3.596024259106602242e-03,1.802699131334313054e-03,3.169169286849692329e-03,1.189040995095290952e-02,-3.598943954654035358e-03,3.734463247952231595e-03,-3.259062064404889478e-03,-1.646260305420493668e-03,1.635528272222849322e-03,3.249437991927131997e-03,-7.690528301580085201e-04,2.832545596222093079e-03,5.654251851146750198e-03,3.020249871178643138e-03,4.306590615932153485e-04,-1.429104648180285755e-03,2.361677937727364035e-03,-1.644669659511833245e-03,2.788739322548801185e-03,-1.065990880305882609e-03,6.253790379341775179e-04,-3.304866914474294520e-03,4.130508388152142121e-03,-4.462913979639347717e-03,-2.395132342203761758e-04,-5.322673103040543967e-03,4.822321750759126164e-04,5.821708274916873862e-04,-2.184137295697355981e-03,-3.942884819849630691e-03,-4.053759104861019702e-03,3.635422830935724006e-03,-3.849248366420203200e-03,-3.041027800994374961e-03,-3.899308748815316704e-04,1.393996774680429608e-03,-6.867915403674922526e-04,-4.641248451703243981e-03,9.127802380380032341e-05,-2.505734061999102379e-03,4.278387451504477718e-03,9.741811426015926922e-03,5.099758308630485126e-03,2.321956778125636427e-03,-2.723191396428387753e-03,4.289749483961126202e-03,3.279651665591530423e-03,8.723082592851328851e-04,2.089871129372920452e-03,-3.872872567890826671e-03,5.050342700961022119e-05,-2.567485087979740673e-03,-8.221210626374993169e-03,5.128594872486242099e-04,-5.061783696358667164e-03,-1.020970185226602149e-02,1.209850455015000882e-03,-3.438220677555277154e-03,-2.651595557501705905e-03,-1.281096070793950360e-02,2.229443651876710387e-03,-1.422345676473082401e-03,-1.209015494356527332e-02,-4.848567138693880081e-04,-9.741350549414308502e-03,-4.939426473916160455e-04,-5.755082490032726976e-03,-7.327460889773054764e-03,1.180895859884598630e-02,-5.948146055273263906e-03,8.789279777153097620e-03,-5.902834652482502305e-04,-1.051818549027336768e-03,3.879971821690748716e-03,-1.865201247999589973e-03,-4.250725270789002669e-03,-2.232113741148709224e-03,-4.859741661524101010e-03,2.848996053204639820e-03,-2.232163550081494360e-03,-1.133035404471064399e-02,4.251824391598236553e-04,4.079659496924300556e-03,-9.117791694050593563e-04,-9.270802108119269910e-04,7.181071872212008156e-03,1.434319897505328609e-03,-3.112221586868239853e-03,-1.204759038430018190e-03,-3.649842410800447842e-03,-7.421919444896389606e-03,2.703780230726744380e-03,-2.721691966583740172e-03,-2.432193199131991307e-03,9.834946724812628979e-03,-1.279710894676995558e-02,4.888690745965324456e-03,3.212741160306873539e-03,-3.674333060904251807e-03,2.666774066633187043e-03,-3.850561737564017848e-03,6.688128563252729829e-03,6.644927166245638691e-04,-2.778651289287268223e-03,-2.594841049145323736e-03,-1.539537914726781627e-03,5.121663336346210689e-03,9.001602908443316923e-04,1.403717227443581185e-03,2.579360286220516198e-03,5.185276795894742854e-04,-5.957222604174821148e-03,-7.345154934104914301e-04,4.025790453419080504e-03,7.579364688879503464e-04,-1.068878948812318963e-03,-2.748699340566147179e-03,4.469618055669389092e-03,8.943826972072408234e-04,1.362468092229727470e-03,9.861713688960030769e-03,-5.765747881024029543e-06,8.839018199855516386e-03,5.324192556531547570e-03,4.280253071443944866e-03,2.960982450413442678e-03,-9.535585212904292354e-03,9.960802515183728514e-03,-4.888046545106787891e-03,6.539925822989058231e-03,2.845419841754253721e-04,-6.088721138149896534e-04,-1.098084442517590102e-03,9.380497107206548987e-03,-6.154731943500266721e-03,-3.565128570489053440e-03,-8.388022611673316351e-03,7.115966949282292141e-03,2.602238460805343309e-03 -3.096751663103517425e-03,-5.641067253233595161e-03,-2.473926867966860267e-03,1.429710513419880881e-03,5.252335938840693025e-03,2.312394215064613530e-04,2.313881213538077682e-03,-3.044972353873739296e-03,-3.169146133245373791e-03,-9.005624693016085758e-04,3.502367749165775576e-03,7.900408430219362996e-03,-6.104206997544556548e-03,2.376710659683484953e-04,6.572319715928375689e-03,-2.939962017218840243e-04,7.146709149330072834e-03,-3.973890056142539290e-03,-2.536725616701332887e-03,3.246889975745864947e-03,-5.107961437713747464e-03,3.955407848178977767e-03,2.332516975867629516e-03,-2.685139264219713832e-03,-4.470910634848040272e-03,6.236888625806711467e-03,-2.540558908758540368e-04,7.531716519142134377e-04,-4.143322805948436958e-03,9.744858248257952729e-03,-2.575201726225389990e-04,1.014683311498156340e-02,5.821148939035842088e-03,3.152424571398602937e-03,-1.652837700159415571e-03,-9.365992442016194547e-03,4.933700306181624390e-03,-3.339916327524406099e-03,3.539946955445971884e-03,-6.379802758417302223e-04,-7.456523147376118744e-03,3.156011701964174954e-03,-1.264524508165485979e-03,-3.012429628064465514e-03,-5.010846884285920753e-04,1.119333891472212156e-03,3.088910037546425871e-03,4.097644518477489353e-03,-3.017584970738884913e-03,1.061401998104912273e-02,6.207328144542375919e-04,1.052949962985622739e-02,-1.170804581886765667e-03,-2.079055003513845690e-03,1.262035312855422017e-03,-3.116914927099972889e-03,3.851346225570051202e-03,6.206663195190928808e-03,-4.923270511621738454e-03,1.015942754720586602e-04,-8.521646773558579080e-03,6.000412206324121957e-03,4.468161518532614424e-03,-1.946464345482261686e-03,-2.440347161669962836e-03,-8.415561213640668312e-04,-9.012013334221295746e-03,-5.788392672533467127e-03,-8.276153267030555288e-03,6.409938584768439408e-04,5.598904998685980735e-04,-3.164241775514466809e-03,3.511308820991012030e-03,-2.103374224425376047e-04,-4.737880681406854422e-03,1.022331703259924014e-02,1.197109904720009346e-02,-4.661637464419303954e-03,-4.870793041997754230e-03,6.170664104732975080e-03,8.541706896882621377e-04,7.561961134764761490e-04,-6.350003219607235289e-04,-1.130302489434425149e-02,-3.508302695418066483e-03,8.248882461448839992e-03,3.109741121372471169e-03,3.480524139387177179e-03,5.289927631157794906e-04,-5.856912648410608118e-04,7.846688214844421000e-03,-1.775580250871752969e-03,3.227234933856369670e-03,6.785961343878776225e-03,1.372999672780068599e-03,-1.630764657367631074e-03,-1.222964525086485746e-02,7.909508975801682936e-04,2.590411365015326487e-03,-7.786593019653968197e-03,-6.093266937716432024e-04,3.714319601285456969e-03,-1.578466691605797745e-03,2.296433482366053418e-03,3.146434140537538674e-03,6.154130392928645465e-05,-3.102565793427973333e-03,-3.049682915963595944e-03,-1.028434016629750994e-02,-9.730213855569861195e-03,7.499355597337722121e-04,-3.836555464593484083e-03,1.950399939721691967e-03,-7.337454870451260748e-03,-7.614224931816237184e-03,7.005330112385102677e-04,5.573114490212930684e-03,2.935833806560324711e-03,7.636532161176465966e-03,-3.416081508485286928e-03,1.095353067480640645e-02,5.822716116531780235e-03,7.940096486769599457e-04,3.652074589551920682e-03,6.183403628015838628e-03,8.395918164905062740e-04,-4.547094033760561084e-04,-9.114961085698105063e-04,-3.182702051009625704e-03,3.343603072564322286e-03,-8.148883441007833545e-03,1.784737960438282072e-03,8.135086110688655430e-04,2.363009663041008932e-03,-8.210022752244112250e-04,5.242798042374415511e-03,1.010175549058831912e-03,4.011130100029534906e-03,-6.673760741585803985e-03,3.985922119683628709e-03,-2.708201759458630457e-03,3.866292734288035663e-03,-2.396653935186092953e-03,1.900483422389218774e-03,-9.557664984398485825e-06,-6.161851041932889760e-03,7.673187612389965710e-03,-8.320747821245403145e-03,-2.751631964981462401e-03,3.582863627297061451e-03,1.343022805024039323e-03,-1.156688818459545610e-02,-6.203233386132495607e-03,-9.800415183602248243e-04,-3.042883293305921784e-03,-6.771795489772535620e-03,-1.495658637634992953e-03,3.379519674931934659e-03,-1.777635100509171147e-03,1.111543548880541922e-03,2.018414547246996475e-03,1.200877037294433534e-03,-2.984105718622497364e-03,1.667186451114781136e-03,-1.223213502858018584e-03,8.078011985713732385e-03,3.928847563798083808e-03,1.778304646045984153e-03,8.216560558039848194e-03,-6.129863766895382420e-03,-1.252309070949390955e-03,-7.785716468320375352e-03,-5.381891242166495599e-04,4.902376079303311270e-03,-4.083796148689019211e-03,3.730656391784867732e-03,-5.999167408371606190e-03,-5.175867137319634624e-03,-1.827858512959645593e-03,2.376432232856398163e-03,4.935790806982523135e-03,-2.783197656885497227e-03,1.516698353089599577e-03,-1.983975169248230683e-03,-1.008314120907932926e-03,-4.382777624027779130e-03,5.633716470471779141e-03,-2.165101611313905317e-03,-1.022186143490080683e-02,6.131217120548497837e-03,7.660933062079904564e-03,1.475550201076983141e-03,6.416883953007013652e-05,-3.556553353650286631e-03,-2.269293199609663352e-03,-4.076363794933883858e-03,2.175747925552238161e-03,-1.055002920757169664e-02,1.268247940206206200e-03,-4.377456763707718648e-03,5.342095391540478688e-03,1.193848418258509599e-03,-3.422852945276105320e-03,1.515864833301103914e-03,-6.277739716885640106e-04,7.776861615590203742e-04,7.145880059636456434e-04,4.858823837570903220e-03,1.308918470066164005e-03,-9.330195532382637436e-03,1.961681525170839817e-03,-1.452920254971434497e-03,6.605284039214855216e-04,9.580290243477614792e-03,5.035665226457811637e-03,-2.021735610007211609e-03,6.332368896402685264e-03,7.718494346022342946e-03,9.012404221686028810e-03,4.248446275749575424e-03,9.707624306620214662e-04,-3.240262808501604037e-03,-6.867016223955003940e-03,5.523424816014047868e-03,3.215787315122893824e-03,7.462644223574729091e-03,-4.257490327341391032e-03,-4.034703827063370642e-03,-3.554110121002262870e-04,-7.365447428479628307e-03,-1.409267912168636269e-03,-1.133452238387571255e-03,-3.747412474041026363e-03,2.938553063004504824e-03,-7.521345658013801026e-03,4.568189421914155800e-03,8.601934573818259717e-04,-9.339511401309846458e-03,2.514104182784111709e-03,-2.225117991486538251e-03,-4.968624967391785212e-03,-3.949949918584873439e-03,-1.108797205774796416e-02,-7.635998897911201733e-03,-3.809975421842374458e-04,-6.540140974556844788e-03,8.665445413452000202e-04,-1.048846216032408608e-02,-1.712182035630467137e-03,-4.248870333730407102e-03,7.162341252575616475e-04,-3.567735527012351991e-03,-4.736201108132069050e-03,4.458209687193863749e-04,-1.687995315724707089e-03,4.698387425179560879e-03,3.372078600901746905e-03,-3.777876137319680257e-03,5.497535254078111640e-04,7.186484825565699064e-03,-7.733571224434385855e-03,-2.380267590210616204e-03,4.142739285187539700e-03,1.028204457743796089e-02,-1.829046241229278493e-03,-7.210699243905687415e-03,3.276895910704517650e-05,-6.065360449154200487e-03,-7.355801565514099444e-03,1.212250220854099031e-02,1.734064757737331219e-03,4.572098958025529701e-03,-2.327485920312689568e-03,5.782636638848545098e-03,3.905814712593461743e-03,8.144401117579299493e-03,-1.111075012971409750e-03,-5.304903621153584216e-04,2.894634076867434389e-03,5.032410920059728239e-03,-4.953326400411995310e-03,3.692515070812927517e-03,-8.674308079546713332e-03,-6.035736051794408051e-03,1.020101608742471832e-03,2.661762328149793605e-03,1.089188561504946147e-02,1.363237332281592995e-02,1.188930579488751971e-02,1.022374998709342855e-02,-9.788916304440835325e-03,-2.354537275836883145e-03,-2.169896625413337589e-03,6.660085303111173798e-03,9.929073502968847192e-04,-9.418344151114991115e-04,3.136529773017728701e-03,-1.680135810750075717e-03,5.082179158394266923e-04,7.082264705928174864e-03,-6.277888545849902302e-03,-1.182500534299779724e-03,-2.025343388332068520e-03,3.409477454288898333e-03,-7.158443812958082374e-03,-7.200173954755507109e-03,-8.857200539824696198e-03,-4.978951656751916428e-03,-4.095947081822257259e-03,7.633627442410709663e-04,4.023929373137507745e-03,1.505953731590662379e-03,-7.757740958399504672e-04,5.870302397679730277e-03,-5.142734577965658339e-03,5.337326992963018976e-03,1.162453766780375663e-03,7.507134869173053243e-05,-5.498702904491898323e-03,-1.694882848616818295e-03,-1.525713063159517919e-03,1.112738678971899459e-02,-1.003595822591011807e-02,-5.236174192702755352e-03,-5.203339549649139972e-03,-4.474407818477389720e-03,5.799248630646693624e-03,1.791394323691693213e-03,7.914321856180217205e-04,-1.240319178808764163e-03,-7.526160988126154305e-04,3.396474654286656125e-03,7.202186155384913448e-03,1.139545213118672937e-03,6.429050991209764830e-03,2.941592484129056540e-03,2.518919741346459153e-03,3.381128320064678058e-03,2.443451920997325409e-03,-3.867456277991784559e-03,-6.644619658439854769e-03,-5.168273954260376585e-03,1.627988155685504770e-03,9.841411830139831004e-04,4.226751804346301315e-03,-2.732346098973704746e-03,-7.429342964022206917e-03,-1.948718651024091949e-04,6.074289606861769247e-03,3.910115192981697928e-03,-2.635280453522682777e-03,5.780368273541443107e-03,-1.240939165628338883e-03,1.681320310792288643e-03,8.016551660769989304e-03,-1.493396197996340160e-03,5.131914680035531799e-03,-8.121301974358093381e-03,-3.882044855278092566e-03,-2.285625535402328000e-04,7.206553087085751028e-03,7.473618347717698833e-03,-3.030974904508642546e-03,-1.166760057054348635e-03,3.604663709515322501e-03,4.657495150191879110e-03,2.741065304308991629e-03,-2.938009932011938219e-03,-1.242677531055284357e-03,-2.749889727431100318e-03,-2.523880969757404634e-04,-7.803201532475398904e-03,1.402296139289113805e-03,-1.749849684319765937e-03,-3.821863582643492911e-03,-8.150202952206270918e-03,6.020266144976710136e-03,2.525158212258110831e-03,-5.499097209652557133e-03,-9.122769175223104446e-03,-5.729609975194525050e-03,8.095726174666194406e-04,-9.264599577463653382e-03,-2.849733260196500757e-03,-6.113396662807448791e-03,-7.230696802964602359e-04,-4.987591625739420768e-03,-2.900232325028669327e-03,1.170949705248357638e-04,5.790465886844461431e-03,1.495518020636275825e-03,-5.340284590400192974e-03,1.005373766084307827e-02,-7.914268552759932789e-04,8.230299054137383110e-04,7.301873656974473478e-03,-6.581515330928759677e-03,-1.950141605550603872e-03,-1.695558651261528434e-03,4.976303339343933659e-03 2.205890983094858839e-03,-5.415910135918165953e-03,4.123530752873752307e-03,1.500121604484669478e-05,7.907900773643040948e-03,-4.630143320034986985e-03,-6.392116609974100080e-04,-3.401424799304596583e-03,-4.724471110921962640e-03,-9.077054741609834505e-04,5.016915666436186014e-03,2.339703871659973202e-03,-5.176516756148735569e-03,8.824795701581652010e-03,4.099724698464814535e-03,-5.704228620027096712e-04,1.925392711560893269e-03,-1.090636236874735834e-02,-3.769700821296361175e-03,-2.455528017471563483e-03,3.723246462234441195e-03,-2.594894752128956272e-03,2.489914444188116600e-03,-1.002790718076361758e-03,-2.086439975790415312e-03,-7.652115499104683929e-03,-5.025922374319663965e-04,-1.117040636694903531e-03,-1.000221502891015385e-02,7.280165668597650235e-03,1.048810459662944923e-03,4.830847518412223832e-03,-4.835756437056374681e-03,-9.369247292040909503e-03,8.602594922902382854e-03,-2.128292182457195997e-03,7.961881088722224525e-03,4.676155097535774144e-03,-9.369544367561990117e-05,1.276720410571212079e-03,-2.270701571398236421e-03,-1.998688502969767165e-03,9.961041843160598971e-03,1.369392754916451058e-03,2.275160158095659857e-03,-3.086616089717612171e-03,-1.831472930468643491e-04,2.427005164898630486e-04,4.995609300312430986e-03,6.082648784274675979e-03,5.322133958753124360e-03,4.503897766256188470e-03,-5.754220317302457307e-04,3.653052440645189122e-03,-2.077094523441059409e-03,2.446429176302185489e-03,-2.466521906321645068e-03,-1.122770979834435308e-03,-1.064296499199097502e-02,3.292003374024721294e-03,-8.539965366396366428e-03,-2.995944740202035033e-03,4.956499434087093155e-03,1.123440615909221049e-03,-3.299085688735454297e-03,-1.076682818763875088e-02,-5.296037053906385399e-03,1.925852743919201826e-04,-5.550857037949949143e-04,-7.424214200496687122e-03,7.802183831309507963e-05,9.227860017599118616e-04,3.199481467953747479e-03,-7.072116778601676007e-03,7.314167559381109900e-03,5.089915969379937843e-03,-5.727116709491153305e-04,1.099616579837980543e-02,-1.039301570652011175e-03,-3.168241869773206175e-03,5.432064252431078114e-03,7.171982379708039673e-03,2.035238343273513325e-03,-4.514080657039699733e-03,2.763008228174659029e-04,-2.839084690173316466e-03,9.726169095851194824e-03,7.824567608685942081e-04,6.372292402549578627e-03,-7.770490164232899538e-04,2.618617878367366855e-03,-8.809670713181498580e-03,2.294403234180799407e-04,2.060201935009289397e-03,-9.261984049546471975e-03,1.980662414765318666e-03,-9.515505810533042216e-03,6.112439618863647721e-03,2.970601988822009314e-03,4.903477170164223693e-03,-6.798155917848658575e-04,-8.001556993198629189e-03,-3.862595361838892967e-03,4.281952702892203103e-03,-2.320987900383853029e-03,1.608193840324715143e-03,-1.399448624918553721e-03,4.053580734787074272e-03,5.443257922197746075e-04,-5.448901740111353849e-04,2.812660828784853059e-03,-5.823190837246123783e-03,5.928824819080162926e-04,-5.605784249507250738e-03,-3.327914097532485519e-03,4.555871810185348897e-03,-9.489802996884113759e-03,-1.492615070983730700e-03,-7.965179220922649281e-03,-1.636098062650025956e-04,3.750210997108367685e-04,2.669052508617737653e-03,-6.289575707803361435e-03,6.721144375145300318e-03,-3.451567277045965599e-03,6.530665554343142900e-05,2.679556484824416871e-04,-7.733434485647635789e-03,-5.515169780630236121e-03,-3.776123528008956155e-03,-4.204876112875672103e-03,7.887620713210387368e-04,-1.031918074937033299e-03,4.196450573511697341e-03,-1.739697804370692351e-03,-2.264003609495301994e-03,-5.464195836781910939e-04,6.355777113412271499e-04,3.369842542737092744e-03,-7.781073410063471356e-03,-7.983146791481052565e-03,-1.047421251737851687e-02,-4.830619515036381137e-03,1.131840537283400989e-03,-8.011661010377542358e-03,-1.382348004596281946e-03,-9.649410180636008513e-03,1.603811495956578015e-03,-3.368091561243185747e-03,1.087600977191336023e-04,2.907297013149827240e-03,-5.022090415940340218e-03,-5.696881409495356020e-03,3.117466262798380548e-04,-1.202009290126374291e-03,-4.534730021754813933e-03,-3.608994148776329823e-04,-8.654893545799892786e-04,3.045045314028368977e-03,7.207203848969561018e-04,-3.599544626547148893e-03,-8.564966390079175804e-03,3.966491880896791215e-03,-4.422573425732877076e-05,-2.583555902661012444e-05,6.186182033329967397e-04,-1.542236387650185170e-03,3.479049615433629956e-03,4.213250913593618273e-03,-3.855236391468978710e-04,7.286550671934074498e-03,1.031317970832041492e-02,-6.923716030060624027e-04,-5.092049043785280490e-03,4.844481522850280740e-03,-5.408954927745939639e-03,-9.627485325413184759e-04,-1.091099602450848992e-02,-4.462881707562887544e-04,-8.119443048179761022e-03,-3.674747968116176110e-03,2.053581235010078741e-03,3.217946425703717243e-03,3.394424923113183059e-04,-9.800313161902843295e-03,-1.450448329428122383e-03,-1.531596069051734251e-03,-4.470753064392948357e-05,-8.759318690407191799e-03,3.477272069770451137e-03,3.886918595586888025e-03,8.107908236955795739e-03,-2.171084429369870921e-04,4.984511334440027168e-04,7.409932095754854717e-03,-4.045830508791714748e-03,4.618057285300056214e-03,-4.041117729562512177e-04,6.537558244321737926e-04,6.648591481190321666e-03,5.473526805613943003e-03,-1.152157946664745386e-02,-6.940384834000374800e-03,-5.605565820500613486e-03,-7.021280506969177979e-03,-6.188223534314189755e-03,8.539971443231562931e-03,-1.539657273246927520e-03,-5.988250264643960909e-03,6.429589576761868611e-03,-8.343555390329810903e-03,-5.764163506145428620e-03,6.498698022697565850e-03,-1.943847934716330878e-03,-1.038243688025575857e-03,-6.159105041932596940e-03,4.909764386309179579e-03,3.096796351952637791e-03,-5.295621398488722410e-03,-1.121275516661336680e-03,1.119282416689432831e-03,-2.255123349306533288e-03,-9.983539108790753791e-03,-6.310378465511974622e-03,8.238546822778351764e-03,-9.739056419891796088e-03,-1.367601641369751812e-02,-6.999009163426768258e-03,7.325198186100575701e-03,7.842976312209220049e-03,7.623738791882336309e-03,3.725959906376386747e-03,-3.058298813809744079e-03,-2.248902294762232090e-03,4.841591479889222234e-03,2.782703396835633017e-03,6.981287616174463098e-04,4.559713445968316924e-03,5.889741726143403640e-03,2.542213609543684388e-03,-2.257405927444312843e-03,6.131759125322007627e-03,-2.669526007781848273e-03,-4.265434773046155974e-03,1.095589512271412380e-02,1.836564616849663528e-03,1.036163894895112021e-02,-2.814147922300875345e-03,-1.699492876016058234e-03,4.995002992713485322e-03,-1.038574562340611497e-02,-1.786021524321699389e-03,-1.817306654254686321e-03,4.169809327657811272e-04,-1.030627105695616210e-03,-2.831929134765607918e-03,-4.858038979966793194e-06,-1.681141484934212506e-03,-1.146133056396043438e-02,-1.741668395062160755e-04,-9.440731856834074995e-03,-4.920642430663099177e-03,-1.858300752789831785e-03,1.266044742138275987e-03,3.020344479013754553e-04,-1.874362767710160927e-03,3.639686260928476792e-03,2.706676431819835405e-03,3.115347614504016763e-03,-4.912871862390848915e-03,-1.059875601320062373e-02,-3.946859567616289519e-03,-1.022725628098948005e-03,-3.202281136101126861e-03,3.820614580801681877e-03,-7.398928634887828003e-03,-1.169954454484782667e-03,4.460091892628394172e-03,4.416337078838376146e-03,3.630630132845220209e-03,7.741252247729582189e-03,1.148641121218017987e-03,5.225371832816357899e-03,-6.763454350948681915e-03,-1.077753063278116179e-04,4.243207411687978554e-03,1.249415511026885552e-03,3.935625909306670646e-03,7.953892172852389253e-03,-1.179522797279458938e-03,-4.217492871378391954e-03,2.246121493754777310e-03,-5.092915475464271195e-03,8.107717298065878365e-04,4.721796936230304546e-03,-9.543728979850877106e-03,3.541171185280416953e-03,3.556477645541979843e-03,8.663438808167051594e-04,7.162516752502427623e-04,-1.685659822622040795e-04,-9.259226977893431770e-03,5.713061390965313256e-03,9.191046663133846037e-03,1.344380155800081441e-03,9.724790058613241514e-03,2.757321438187469026e-03,-4.179061815701554453e-03,-1.573515449032543726e-05,-1.257028993634595271e-03,-1.032245916888088668e-03,4.338180502533507503e-03,2.722203854670717307e-03,1.072983831135300783e-02,-1.415203661457063462e-03,-9.509013578174543857e-05,5.130222409204136975e-03,3.432824378690553860e-03,3.571775764692803777e-04,8.470961789079446691e-03,6.166101383054491993e-03,-1.394520162647700434e-03,1.519179587687772073e-02,7.987944794714121140e-05,8.922107699432555578e-03,5.139580178491472834e-03,2.944715709009147059e-03,-2.056844249739593102e-03,-8.053814152046765004e-04,-3.646420756485776408e-03,-5.490129787835049645e-03,2.288137489907162022e-04,-2.385109348215171381e-03,-4.797624678127318520e-03,-3.793896778776573771e-03,5.206871280000416151e-03,-1.095254845853113092e-02,-2.987165394557856832e-03,4.217504720627892684e-05,-8.700282267273970471e-03,7.364047990941236486e-03,4.214558047711796512e-04,4.084954082205084769e-03,6.156105914039883102e-03,9.072460932323819782e-03,3.518895274812600810e-03,3.671823223317164948e-03,1.252021584472247590e-03,2.447024889374202657e-03,-2.394036882929270055e-03,-1.690852204663163003e-03,7.497803481908556396e-03,-1.533935704187893486e-03,1.085555527264286387e-04,5.050988192040375101e-03,-2.222581313234672090e-03,-5.172765957744882709e-03,4.060489289966027343e-03,-2.333559304145607811e-03,3.619046886487721666e-03,5.566063504226627032e-03,-7.300849664804499525e-03,-4.324503647347260372e-03,-5.376750881505135832e-03,6.311647373491827834e-03,4.459135514352586052e-03,2.584534313121585828e-03,-5.329626085375845784e-03,3.629727865075391656e-03,-1.692040128999984213e-03,4.928044596011767198e-03,-7.335158244527977120e-04,-2.774620483081170329e-03,2.101924992715591394e-03,7.325850163777933126e-03,6.833641492773366924e-04,-9.682697148091935074e-03,6.847732645117609977e-03,-6.256416213948833797e-05,-3.321825937090413117e-03,-4.571578624543590967e-04,-2.568491541963467597e-03,1.193760619391458352e-03,-2.486469671770794206e-03,4.205369996188938232e-03,1.002064265091193694e-02,6.445725042980045977e-03,1.945254504345390097e-03,-9.571503233253154941e-03,-4.775912070160396802e-03,1.767142373469766155e-03,5.333008206498451777e-04,-7.288440971962839403e-03,1.999741186098213854e-03,-6.996629467863783175e-03,-2.158378896171932963e-03,7.122392656966298162e-03,-3.388672948582834368e-03,3.871370119771585399e-04,-2.599580579712646149e-03 4.216726212229470611e-03,-7.674208711530024207e-03,1.918892983275124792e-04,-7.746552224851926656e-04,3.390048713438939543e-03,4.321381695349483105e-05,2.310462202081923350e-03,8.238605807090767022e-03,4.353678048436274671e-03,-2.689924697112241960e-04,-5.446442377120282825e-04,9.780907127214308674e-03,3.222528447951240015e-03,-8.631279718402898316e-03,-2.821114901398310160e-03,-7.184935390249352731e-03,-4.098030158614424448e-03,-5.715783886692842736e-03,-1.635291400932835631e-03,-2.592534141712382785e-03,1.446138713910303511e-03,-6.017008311689073978e-04,7.788988747635353961e-03,2.829639161342426511e-03,3.290813294720710473e-04,-6.558696458780547550e-03,2.268505774528300090e-03,-2.220971711771482868e-03,-3.004215410670565013e-03,-4.805708451207544099e-03,4.181196914944638153e-03,1.177047226877149088e-03,2.101964269870709983e-03,4.393463222074312727e-04,-4.628019858629590748e-03,-4.803599868973903661e-03,4.928887576330679213e-03,4.412539565104030455e-03,-1.650360139258038716e-03,-3.760962175683471315e-03,4.483375330973892771e-03,-2.579647170669322875e-03,-6.394385150025961966e-03,5.766058575851429812e-04,-1.577009933570908389e-03,-7.892389466291957825e-03,1.158128212998823028e-02,2.434238571490085750e-03,-3.841551436127169875e-03,1.451461028340579693e-03,6.411171675583509505e-03,2.200224881572526107e-03,-6.460335530270734650e-03,-4.194555015676768028e-03,4.625727468180565981e-03,8.301158949140768906e-03,-3.338123249757726351e-03,-2.462482194692258188e-03,3.614521765228919607e-03,1.568038734770931496e-03,6.366080534073131729e-03,-1.306768944610734720e-03,-2.600367037628241364e-03,4.213611663909644194e-03,-2.206815764078176156e-03,-3.228494541432611140e-03,-4.485876685944277693e-03,-2.419405188489279646e-03,-6.066426614263660533e-04,-2.758590615771707930e-03,-6.252399443881590836e-03,1.419248145066798758e-03,-5.943837084901851166e-04,-2.908957503270617230e-03,-3.113414360609405296e-03,-1.055043570303052132e-02,-1.950273489979713901e-03,5.701607778141199942e-03,2.131113934950952180e-03,-1.232227720939862515e-03,-4.743519548762367005e-04,2.829576319469990272e-03,-5.929200712692023374e-03,6.509579850052340730e-03,5.091791364926060749e-03,1.212712986506818066e-02,7.707213306921756107e-05,5.305021398807921408e-04,5.087484661640685572e-03,4.783325930738327285e-03,1.253846136714070487e-03,-3.287093198195061537e-03,-6.515077034512209257e-03,1.602924676140210806e-03,-4.449362180332821701e-03,6.648905434248983219e-03,-3.594196446624286188e-03,9.036341221896174863e-03,-3.930077550826196857e-03,4.202939109697509910e-03,-6.503444233757844396e-03,2.278817620376447518e-03,5.750619372455707985e-03,6.280919932541833076e-03,5.671928694540524611e-03,6.427719839125781371e-04,5.401801460113221307e-03,1.544835185283636135e-03,-4.347129087470838656e-03,-7.945532587654900378e-03,-3.957657436257732905e-03,4.101398370484008946e-03,2.684932795598859480e-03,3.999823177870662216e-03,-6.208693166555065250e-03,-1.563914952522006165e-03,-1.039331103103204208e-02,-2.702290181624581139e-04,9.258886766465627971e-03,6.893053888850779118e-03,9.069951658518132093e-03,7.871739852611789323e-03,-9.352595537463065378e-04,2.756152718167783917e-04,7.368793396483533722e-03,-6.705706502897704896e-03,-2.430996050681525755e-03,1.030541800713637645e-04,4.512443630842294731e-03,-2.360671494131360345e-03,-1.942663755975879841e-04,7.559188260821290452e-03,4.964423928540572482e-03,5.949019627222486457e-03,-3.337343157680501353e-04,-6.262517417449329007e-03,2.335958101454684130e-04,-2.805382803064858897e-03,8.922979000319113813e-03,-5.590939919132931722e-03,-6.403621788191197171e-03,-1.745365613388127207e-03,-7.835351645666472492e-03,-1.683865386452580975e-03,3.337245085519588341e-03,-1.244460567160220014e-03,-8.665689921776013882e-03,-3.318954511883096610e-04,-3.375069949333159899e-03,3.973356065893826790e-03,4.456708030993975812e-03,4.696591699082207881e-03,2.235370493579434681e-03,1.210551040184459567e-02,7.818265941513649619e-03,9.270589985250552020e-04,6.620252352711254382e-03,1.378958348830036477e-03,4.463597165980154671e-03,-1.148392031894827307e-03,1.160924310228039465e-03,-2.531513514393141789e-03,-2.015795704837996909e-03,1.668018637096207232e-03,-2.613212272629898540e-04,4.795412783351322809e-03,4.572076627618894794e-03,3.064945818791097332e-03,2.270819553191954360e-03,8.665691403989638289e-03,-8.972645528777040830e-03,7.499583901332215231e-03,6.719387621846383690e-03,-1.756619871660195908e-03,1.413310247262912706e-03,2.300735627937681819e-03,-6.160511925499048518e-03,-1.676963088712140346e-03,3.611679317049726334e-03,6.385113037266862759e-03,7.805464396719683819e-03,-2.669609066537672812e-03,6.282683910768253248e-05,-4.063993723203174549e-03,-2.791536950369799194e-03,-9.320406943484623591e-03,-7.461389903492782021e-03,6.771635715953155076e-03,-1.638254125953330894e-03,2.759703004117274516e-04,2.462962169646034163e-03,-2.841250420451204694e-03,1.214371900726554437e-03,-1.754982527069802499e-03,8.468939586345534606e-03,4.069207980950199574e-03,4.985937561071793374e-03,3.865142613333121934e-03,-2.493962239225642943e-03,-8.314692831980658458e-04,-2.545133035612756004e-03,1.199296605979445745e-03,3.105585473527712817e-03,3.532943972631246571e-03,-8.194434669964461085e-03,3.683671932369255060e-03,9.996935959523273532e-03,8.766828886011634667e-03,1.873435422957149890e-03,8.227174876163098963e-03,4.486845113207399774e-03,8.357044592806477405e-04,-5.735263137639251985e-04,-1.625363450991432895e-03,-7.455337657891164538e-03,8.053531690324394927e-04,6.783524329647186261e-03,-6.938272016502258647e-04,-9.146140745609989200e-03,-5.851739721667050236e-04,1.073993662437440705e-03,-8.909097225817260227e-03,4.017443543013778789e-03,-6.741874977993303226e-04,2.493310495942706057e-03,8.468321067471888328e-04,1.150258592823386918e-02,-9.006883689891638858e-03,-2.022069032085273060e-04,-2.116940584849776995e-03,3.135747922417577685e-04,8.993698457504032545e-03,4.201607168494919838e-03,-1.288240492397712622e-03,3.139306208818924086e-03,-9.185801278365830769e-03,-2.909584343770173134e-03,-7.853716668031509836e-04,-6.333558772024067487e-03,-3.051239673498503356e-03,-4.734323130820188366e-03,7.474595121179308452e-03,-1.888704390634544027e-04,-4.312285801611356190e-03,-7.920636710925268954e-04,2.951879324067496931e-03,1.139659918750367609e-02,3.114134590124969029e-03,6.401814531469741075e-03,4.148405666508436849e-04,3.924023777797988784e-03,4.635824481323559329e-03,-2.196541770534223004e-03,6.758050292474807649e-04,-4.443041958366975193e-03,1.244623925758290596e-02,5.818148690789969046e-03,-8.712750847863989095e-03,4.019786190262154141e-03,-5.297067040126861824e-03,-1.130513090881113056e-02,-4.566799013170625907e-03,2.754346594658721968e-03,1.295539398820920798e-03,1.989905776469905012e-03,9.567363630692865724e-04,-4.523515267381556654e-03,4.024552900643626335e-03,6.951342044410113719e-03,-1.313228679017495711e-03,7.956347195141971057e-03,-7.361791194672930028e-03,-2.721239452514311475e-03,3.203041539826547400e-03,-1.540518436154594915e-03,6.603037230138299879e-03,-3.172297496944213781e-03,3.592154611006310831e-03,1.011226268577157007e-02,-6.551960786459147708e-03,-1.005509014284217649e-02,3.943820347960362571e-04,3.381719838858841809e-03,-7.069058292173108324e-05,1.991066342426709053e-03,5.257137921023189407e-03,6.607030003363082446e-04,-7.412995221018392304e-03,-1.389266240160867433e-03,-3.581571889642041432e-03,2.819601020098185841e-03,-5.034932707907097300e-03,2.278749083212505949e-03,1.291299588192910052e-03,-3.233603142078759173e-03,-3.952777535747394565e-03,4.119797373665331879e-03,-5.208979782524519007e-04,-1.033026676579572572e-03,1.522017102761883062e-03,4.495597905437605365e-04,-1.667792373110236039e-03,9.696434934605941552e-03,1.786024926105054395e-03,-5.989225427261469191e-03,8.406502890743208084e-03,1.917758052751595557e-04,4.982707541762909989e-03,7.904474061517429914e-03,7.010146111587530968e-03,2.353460260048000855e-04,-7.863396152636113745e-03,-1.124035555340583044e-05,-7.890314954781721402e-03,-5.859838622165375668e-03,6.189102858764823287e-03,3.209878520747694308e-03,8.208167001623830131e-03,4.810453946952265858e-03,7.898463191797476871e-03,1.711040622447168071e-03,-1.910728761603361315e-03,1.756605425036573487e-03,-8.126866657286962005e-04,-2.493679012067655416e-03,2.162862812982929879e-03,-1.323166967472632575e-03,2.045068388494880943e-03,-4.953599963280409801e-03,3.392457138909548125e-03,-1.424477382529859299e-03,4.160927436443490751e-03,-9.327034987624790649e-03,9.439944430777202564e-03,-1.400361868861744899e-04,6.104250564818471808e-03,2.168210603184335221e-03,-2.906479004358917314e-03,-3.657552748052246561e-03,6.763298627423354643e-03,-1.282806832104919957e-03,3.996913167244159869e-03,-7.387905799400488029e-04,5.818237369088734810e-03,-4.054012366904904963e-03,-4.776960415615739974e-03,2.994576836627633153e-03,-5.651336206534672904e-03,-2.967381392625015996e-04,-8.442373747132419712e-03,1.025024277583247000e-03,-7.885566976295493879e-05,4.141947181798940475e-03,1.176943031860947213e-03,1.189152769078914229e-03,-6.481118296867832956e-04,-1.719648997094328055e-04,-1.284003940388912281e-03,3.355890855034172632e-03,-2.669834338672090750e-04,1.468869328068926009e-03,-5.093945251761659160e-03,-4.696395400810399537e-03,-7.379354834239091948e-05,9.024378677850901712e-03,3.397640947399640458e-03,-2.598574192748589421e-03,6.740352264559480895e-03,5.443012993549918230e-03,5.179339617548231120e-03,-4.196269240128861974e-03,3.108608481031333915e-04,-3.055050524642496240e-03,6.192196284398964086e-03,5.946919101681345798e-03,3.977852996264848108e-04,7.003769674424779011e-03,1.112350296759847743e-02,-2.656468463143141397e-03,-4.893443102084677442e-03,6.002221968493480150e-03,-6.449426548332114380e-03,1.075887812981207378e-02,-9.717902433595231385e-03,4.451552417754376344e-03,8.244460543395866828e-03,-3.092396436441485676e-03,1.896843888289594061e-03,-7.149433359402824519e-03,-3.038583889768441300e-03,3.297746446601057622e-03,3.644097212329793726e-03,1.890340537490173799e-03,1.009778536625704180e-02,1.072680326215265205e-03,4.205841716469903464e-03,2.394150530142974496e-03,-4.925981388159497015e-03,1.173604453281368309e-02,-8.144995987457663957e-04 6.123676502241269829e-04,-1.886013142142197395e-04,4.580334357185889514e-03,-9.362475225662270431e-03,-4.038695453602129948e-03,2.822915060234038737e-03,-4.477266505716222950e-03,6.079716855863051854e-04,-5.289852956770158161e-03,4.226486853346073276e-03,4.105221239275767646e-03,3.562862648992934887e-03,-6.210810171786964630e-03,-8.021057878913235434e-03,-7.848417367863311537e-04,-2.298485872240001106e-04,-7.498186336812781225e-03,-4.877153459299547571e-03,2.042209527106402921e-03,5.023994278490860957e-03,2.857100788930489084e-03,2.818938011967145590e-03,1.941143025116830265e-03,8.094088795208164225e-03,6.049991084009497281e-04,-9.189432832990997559e-04,1.413840304294315415e-03,-4.851061495022614307e-03,-2.633515468039565773e-03,1.117716260847432218e-02,-2.833785438559572028e-03,-6.376546820558567572e-04,-1.063195892624708289e-02,3.424158177701316398e-03,7.172622769816314886e-03,-7.970659736356985534e-03,1.132240510600213438e-03,-2.264167268794351883e-03,-5.171415908278709575e-03,1.286296711594928609e-02,-6.857290732686019538e-04,6.235711226621840769e-04,-1.349938585828201884e-03,-3.109211765559019135e-03,7.533353024705133133e-04,-3.370981515457952254e-03,-1.021678579892373092e-03,9.259787258566548607e-03,2.392717662831571248e-03,-8.870816733966746873e-03,5.689201466624814810e-03,-6.276858171117406446e-03,-2.861245282577384343e-03,-3.382875986794567531e-04,-6.849077686314962617e-03,-1.452598545777254160e-03,-1.860880009081777300e-03,-2.378674384185544815e-03,-1.876935590098380031e-03,-1.785794019555473396e-03,-2.574322592774789305e-03,-5.228777428724669905e-03,-4.438031071938270712e-03,-8.120037100903313137e-04,7.443451647097018564e-03,-8.830139206745845140e-04,3.864771592250848976e-03,9.832180462245058896e-04,-4.504874119624587871e-03,-2.061633465548032632e-03,-2.158890287863807992e-03,-1.014654615856165767e-04,-2.578577259624951611e-03,7.809332564379312591e-04,-3.899771670006915627e-03,2.270151699622937475e-03,9.405164702590751782e-03,-3.158182041141626827e-03,5.370918682255824451e-03,-3.372687765120481376e-03,-2.860860051851590955e-03,-8.347875771839940312e-03,6.365284478132051012e-03,9.201409520762504970e-03,3.100122535803669088e-03,-6.862326775494717243e-03,4.877876657647426707e-03,-1.149356262304798140e-03,6.072521881766176563e-03,-1.546247370113041503e-03,-1.043017272466981692e-02,2.866040279678845008e-03,-5.487660571166241147e-03,-2.673273983280771357e-03,3.898612130804281119e-03,8.112937717848191449e-04,-3.127086534221871442e-03,1.271777985097004380e-03,-9.179731931353071056e-03,1.248217398346112205e-03,5.397790633876103049e-03,-3.640554862992689301e-03,-1.489163205039996211e-02,-3.410069413825228047e-04,4.442363831771124587e-03,1.114951583380813238e-02,1.572165277024343936e-03,-5.724695219818241865e-03,5.821353554774490700e-03,-9.630350042503457058e-05,-2.136445644018162421e-03,5.128010606844303661e-03,6.584384455563203897e-03,-2.206309790107655131e-03,9.994530264851577903e-04,-2.075252829608824866e-03,7.474798718420931631e-03,5.490364433782143200e-03,1.102293866914583155e-03,7.574604498423646512e-03,-4.971040664690501562e-04,-5.776889040360370323e-04,3.507842061743470744e-03,6.493463739855368476e-04,-1.917850257503220810e-03,-4.429397657767539069e-03,7.138828220011098590e-03,9.932416292633954730e-04,5.644244425324836632e-03,-4.493587485437328631e-03,3.531997684036146563e-03,3.316511815132360767e-03,-5.867747244828511177e-03,-5.071538276807436999e-04,-2.819217899847112786e-03,-1.674346061071007360e-03,2.650573584224068680e-03,-7.521487445984025196e-03,-4.581842675447158307e-03,3.126599444581430135e-03,-9.543336671878929582e-04,2.013842152421072788e-03,-4.200673380985691166e-03,-4.108798334472736277e-03,-1.209276229215926418e-03,7.149254264448880181e-03,7.497524382291758348e-03,3.485562008535534034e-03,-5.087734224494939138e-03,3.277826665599092512e-03,-4.641563718537265737e-03,6.812097581896972044e-03,2.807706677832646439e-03,-1.000157506534615423e-02,-4.967597614820169413e-03,-1.947228525186589379e-03,7.703153627172313993e-04,1.244705471530348738e-02,-1.665700524922560490e-03,-5.211653821770313375e-03,-4.458571224970161138e-03,4.578500213995384885e-03,-1.933233194184179839e-05,1.477734750199858477e-03,-6.927984746464688551e-03,-2.900936757555212363e-03,6.879607395983730438e-03,1.918802087561362942e-03,-9.969320637968863766e-03,-5.487580374101070149e-03,6.294973342336930219e-03,-1.741674515806950234e-03,-1.052847001212852719e-03,9.374174887110017481e-04,-4.067144753040661479e-04,-9.532992061207305398e-03,6.579947911049320239e-03,-8.628514038274253881e-03,3.785626357027007186e-03,-2.183385410509603233e-03,-2.680878647774200419e-03,3.776633449528635830e-03,-1.054623183021762348e-02,-2.467352647140791497e-04,-3.067734579994756561e-03,-4.386560540588900318e-03,6.547336544320174825e-04,5.373852010882095714e-03,-4.747532754008047134e-04,-4.699407146810637495e-04,5.487643136354734309e-03,3.128501737131937632e-03,1.510989041189688938e-03,5.028760049343611836e-03,-4.592057542748865032e-03,3.340853755292341656e-03,-5.144041010623367875e-03,-1.536864047363821831e-03,2.220092750005381917e-03,7.268938271009260134e-03,9.500184718755528028e-04,1.085088237387699887e-03,4.025978316813426557e-03,5.704580561408486433e-03,-6.931763904824499959e-05,-4.576074326675859394e-03,-2.625723597252021223e-03,-4.223427030156066096e-03,6.325887449682051668e-03,2.690912250994519387e-04,5.295231994541475286e-03,4.717666750353192474e-03,-1.732535689713850593e-03,-5.976094902046038930e-03,-4.271373627692749524e-03,2.776359944982900035e-03,-4.366043599479019926e-04,-1.094554050453715828e-03,8.076125040726063117e-03,-6.942596092999134098e-03,-8.162067595391257410e-03,-2.435742339653874326e-03,-1.440533440817696499e-02,-4.575237058908496364e-03,4.314619751470015127e-03,7.288402653939291855e-03,2.316097927013256990e-03,2.426238360398801184e-04,-7.938917067641156761e-03,1.282940604309094077e-03,-7.865328400870955777e-03,-8.522382724704735679e-03,-9.341916444439770924e-05,2.804238348782696214e-04,-5.036133295687144186e-04,-4.399048255110714692e-03,-1.016793517818518310e-02,-9.330117358951462922e-03,-8.805673882723371801e-03,-1.251850777091983525e-03,8.601211606094132447e-03,5.265476972687660288e-03,-4.536164633109570385e-04,1.338915816010867801e-03,5.325692905764286417e-03,-7.447126458919460686e-03,-2.935293804990517664e-03,3.501660014808940721e-03,-3.040264127339595886e-03,6.769261601932084413e-03,6.845665200291947000e-03,-2.401670811167513214e-03,-5.941802982461350489e-03,1.442423002667647973e-03,-8.822187466056093455e-03,-8.138228397955259846e-03,8.937635409063204514e-03,-5.410435720071842978e-04,5.013565287777630161e-06,-3.831441518909243799e-03,9.608323890372033908e-04,-5.537737421582559702e-03,7.272663677182540469e-03,-2.199730390809877571e-04,2.458967620531409513e-03,1.232112978473055669e-02,3.672836038553665972e-03,-5.896109744096995897e-03,1.845001327333584259e-03,1.838379484209307427e-03,-5.270352045795682200e-03,5.767841173852879255e-03,-1.114850105836896754e-02,-3.126218711852623449e-04,2.456665014807898634e-03,4.438088772013363582e-03,-4.926366811615420435e-03,2.159906677628118613e-03,1.212258861946512124e-03,-3.501727892527443063e-03,-7.567477921033360228e-03,-1.980734490616074730e-03,3.053237171536073418e-03,-1.768156551891820495e-04,-2.489913294225866609e-03,1.963540463014297639e-03,1.839600234833349535e-04,-1.361265950235659473e-02,-8.021946378379159887e-03,1.075252089561664487e-02,-3.487378120570235182e-04,3.258916953548939184e-03,5.094172267488050916e-03,-1.224783973546001402e-02,-9.855905888742754611e-03,5.516457785396846301e-03,-4.527137978746104650e-03,-2.544892357421434017e-03,6.287275627504291956e-04,4.904089954653677655e-03,-5.893807631863677739e-03,-9.605098292613104601e-04,-1.689932262553336170e-03,-1.460640308379889795e-03,-7.757195783373548904e-03,-2.699422291502636198e-03,6.219785443012549632e-03,-1.167258716323953723e-03,-1.325259417807300895e-02,3.278762828004052753e-04,1.596166612032946233e-03,7.447454896831477190e-03,8.619977047987252214e-03,-4.590438310069395143e-03,2.885049543859245173e-03,1.969379444143503494e-03,-4.863639889383005940e-03,1.687753738065644145e-03,1.014374024679570415e-03,-4.208708736965841891e-03,-5.825380583436477744e-03,2.452053145052278393e-03,-1.306090160174816755e-03,-8.262754081324879749e-03,-3.108259796621831370e-03,-5.866354181468957756e-03,1.494691694305386349e-02,-2.150723518093923016e-03,2.888876085290383987e-04,-3.241551010222498697e-03,-3.325832305310332925e-03,8.226165634346242761e-03,-1.673744595445747626e-03,-6.849271797733072834e-03,6.615053541694069670e-04,4.306837467610452955e-03,-1.448482910179095008e-02,5.904441301946700138e-03,7.338481726588858521e-03,-3.707731212110740564e-03,4.312658044740177413e-04,-4.328846646934497201e-04,-4.477339148601668073e-04,-2.833910036748311890e-04,6.684478695805656789e-03,-4.111727248525021659e-03,4.523028624190817207e-03,4.881042999285307646e-03,-9.304212749981500804e-04,-9.427616804089726217e-04,-1.522086873256087202e-03,-1.623514761974902826e-03,-2.285857906217864718e-03,9.912512626419798471e-04,-1.680525776154472711e-03,-2.114685399903487757e-03,8.643547486001930229e-04,3.617647570249813392e-03,-7.018950047737407705e-04,1.700046453731326578e-03,1.071788340998787795e-02,3.976916247867637484e-03,1.427583392008131315e-03,-7.772995847499702786e-03,1.028204755681968834e-02,-1.989539901429519374e-03,-5.354959488021719671e-03,8.242881694913059901e-03,-7.006464964291493885e-04,2.619494355220210440e-03,4.526866618555446496e-03,-9.193478715100317222e-04,5.457927262526533069e-03,-4.244152901310954858e-03,4.125887359038932223e-03,4.543184920258231188e-03,-3.686898474730988980e-03,7.966403562636961699e-03,-2.103067693932909963e-03,8.982111194563797887e-04,-7.564209948844270205e-03,5.488086742305469355e-03,-3.921696608895745145e-03,-3.960899940468842056e-03,8.809241403749915218e-04,1.895618430726554856e-03,4.085710781532715490e-03,6.311727636766256526e-03,2.185484774670114639e-03,5.835322847793702854e-03,8.622129067835670319e-03,5.628152161317788969e-03,3.461721107807524261e-03,2.285410984907789746e-03,-8.804875970854027147e-04,-4.620030614055456453e-03,-9.974634667139945735e-04,5.911158952624867569e-03,-1.630746768703523336e-03,7.717732149137310725e-03 3.826070998115870328e-03,5.419755159882757483e-04,2.261786863513839865e-03,6.615984471786805904e-03,-1.839908267519386326e-03,3.605499129850895951e-03,-3.534976686003235991e-03,3.486963674254170333e-03,-5.836204045511117662e-03,8.895565679600663531e-04,3.718317074541579136e-03,-1.653518876810775116e-03,5.529246861517887008e-04,-8.488083036748497040e-03,-8.169659929351254062e-04,2.005446016884538177e-04,-2.919729743582457140e-03,-8.392499554768981501e-04,5.841467755375118719e-03,-1.881001634380016167e-03,-1.330759006889015552e-03,3.237581435818617857e-04,-4.486665754796827395e-03,-2.408045375594454994e-03,-2.453127333157442864e-03,2.719385119344002941e-03,-4.254380410565543683e-03,-7.352315911382016622e-03,-2.465684380795486394e-03,-3.505985452469620890e-03,-2.708167323172384292e-03,-2.446750597156625070e-03,9.895595745611186159e-04,-4.118581238056482144e-03,9.382083660553373547e-03,7.358484678095251162e-03,3.539383846348651431e-03,6.382418485718672693e-03,9.934773445868310196e-03,1.121229444825741408e-03,1.079551918974269373e-02,4.887588775733607560e-03,-1.847784543113313973e-03,-6.693795798872762860e-03,-7.541021724369185476e-03,-6.738702231306114079e-03,7.608186741747770048e-04,-1.494082524977342723e-02,-3.001911121877935597e-03,-1.382533214379363896e-03,2.199597107775921184e-03,3.946213448156715362e-03,4.558165512403184426e-03,-1.807687409970075995e-03,-1.613493169521428277e-03,-4.629745591876374515e-03,-1.078446761022495070e-03,1.738096375538456190e-03,3.722072836043709837e-03,-3.202204907054229197e-03,-3.865886194370326059e-03,1.046053691896872391e-03,-1.273594753573680525e-03,8.789762958458256781e-03,7.259191665490404011e-04,-7.377098007701118725e-03,3.474369704222031105e-03,5.077385841737835914e-03,-4.479249414924461360e-03,1.447526467942738572e-03,4.300168059136098841e-03,-6.107368155829244645e-03,6.051945443497923696e-03,1.630768619232133339e-03,-2.103669473577122247e-03,-1.323080036397446388e-03,-1.620266771647957101e-03,1.195915124303441150e-02,-3.842434137111980664e-03,-3.514913583419949222e-03,3.752426982688442884e-03,6.239750377318938751e-03,1.562448063488071706e-03,3.308632324744201735e-03,3.667774766547611358e-03,-1.110212694412728235e-03,-7.576701538990963762e-04,-5.748673232954908119e-03,4.873907441028307863e-03,5.842572453380584681e-03,5.413707542514228227e-03,5.414839444217118885e-04,7.831613585226498628e-03,7.107136707925321671e-04,-2.979031706299855014e-03,-6.018288174802611731e-03,3.736335037137240682e-03,6.837291811587997876e-03,-1.363008158036257446e-03,-2.181114323142636403e-03,-1.046399599522823687e-02,7.491008570152160248e-03,3.326400727211361198e-03,9.860801465375350003e-03,1.516198992948858404e-03,-5.105049344367158445e-03,2.856696588976783400e-04,-1.427563617959122858e-03,-3.021738929235182666e-03,-7.221290911652441906e-03,1.924581860544765733e-03,-1.258857925268182243e-02,-3.389609719290384224e-03,-3.146596743365844343e-03,3.071260199635821898e-03,1.720754950270400900e-03,5.269505967684740347e-03,5.385631521941408595e-03,6.557309254813792343e-03,-5.749655112337807676e-04,1.944413648068412607e-03,-2.714782372530931236e-03,-5.614522542106763388e-03,5.567586060389742922e-04,1.244355669803271598e-03,-5.722840930884111853e-03,7.143205208211929813e-04,-7.449283181143519792e-04,-6.530794923496569503e-03,-1.288948727658644757e-03,-4.356338478180093142e-03,-1.106674942095731314e-02,-2.074928109919178115e-03,-1.852253447714118919e-03,-4.284474187241691691e-03,7.473667829851140957e-04,5.519839424619060742e-04,-6.766925095919040095e-03,-4.566364389080392093e-04,-2.014015114412694239e-03,6.508801887019420411e-04,7.569833666954828158e-04,3.078035629546557587e-04,-4.326467370224546839e-03,3.379262511388101092e-04,2.151918082065984276e-03,5.775206795104328474e-03,-4.879601620338101971e-03,1.562592453744592318e-04,-7.572720478703644882e-04,-4.419170514415462526e-03,1.409422178913880041e-03,-3.374066448220998295e-03,1.379817682542964618e-03,6.084447973568487752e-03,6.999582799874782581e-03,-3.510774406468826839e-03,-7.250789974043576587e-04,-1.160962681457406408e-02,7.619627230543933247e-03,9.822921206647308723e-03,6.728352342116308751e-05,-2.228588528086310454e-03,-1.894909400955118459e-03,1.498850039880340055e-03,2.076918127968991820e-03,-4.934035131026848035e-03,6.939713015205386561e-03,-6.338574423153862242e-03,-2.238888126247148228e-03,2.664760177425893380e-03,-1.107997890989704864e-03,7.887638252209856066e-03,1.681822178212067200e-04,6.994519190235310400e-03,-7.341673557233203105e-03,-6.706032176403372604e-03,1.474832015638460183e-05,9.461951879649927247e-03,5.342068497151151139e-03,3.945213906502529656e-03,-2.952319693182594781e-03,1.983853889267885723e-03,-6.217575377414158444e-03,-9.883391551030833427e-04,3.311808825663258319e-04,-4.757534667172363257e-03,4.169614747186680022e-03,1.486379231375830733e-03,4.708340021450115975e-04,-7.274591980112527986e-03,4.149574812495438608e-03,-2.883284478914949645e-03,7.695963449210873469e-03,7.705033046407711896e-03,-9.365447200321975856e-04,-3.418040155856739417e-04,5.023589806729018023e-05,6.542700063334024908e-03,-6.617888489038826517e-03,1.896158410523907567e-04,3.701510540745534591e-03,1.448924410599817907e-03,-5.874607182977760120e-03,-4.741147890470292364e-03,-2.239691250351730483e-03,4.551093183892903488e-03,1.364790919230615945e-03,6.658795365132117589e-03,-3.416237747789257519e-03,8.682829986824435950e-03,4.648046175872888597e-03,4.254543172785611727e-03,-6.155635881008716948e-03,-9.235137824278580321e-03,-1.415942240587058851e-03,7.537972355455117152e-03,1.351329122258440639e-03,-9.090008077124785840e-04,4.752194941577964418e-03,-3.056451762670274117e-03,-1.392258135299891684e-03,6.787301325693320328e-03,-7.670628629313363221e-03,6.649260131579595029e-03,6.381778785011113465e-04,3.316508814009396910e-03,3.058652875483301552e-03,-2.094691715500730366e-03,-8.101726842358592667e-03,-7.836119055582990364e-03,-3.286282492309815055e-03,1.457069277430330734e-03,5.783798672478665786e-04,-3.812778250614455577e-03,-1.203036194370690542e-03,-3.908925596507180182e-03,-7.647130831186704780e-03,-1.092960435904757367e-02,7.923684520458128917e-04,7.103888792039420204e-03,-1.847823346618295655e-03,-5.808575082896779568e-03,8.726836918567178886e-03,-1.703302554845635653e-03,-5.653326939826334996e-03,-4.414350017932644997e-03,-3.088990805857489471e-03,3.978707646426999332e-03,6.339829938868322447e-03,-3.439159648041479212e-03,6.524395301908677419e-03,-4.909110054972575486e-03,2.239633546818467231e-03,7.692686288973903629e-03,-1.463294168111086324e-04,-9.141123765121925637e-05,5.854166950076280726e-03,-1.768847916740808740e-03,1.514001496813276778e-03,9.490551097081361895e-03,3.584136739300249844e-03,-1.804990548765806908e-03,-3.586323486876584390e-03,9.866924903531951521e-03,6.065570611983310545e-03,-5.271764315113196785e-03,-1.824077067596029155e-03,-1.164010824772687908e-03,-1.210848690893298997e-03,-7.788819564823940000e-03,7.723482626407738444e-03,1.169605286369889384e-03,-9.044763914551114461e-03,1.690782247239748168e-02,5.076515856537355858e-05,-3.863844549537870030e-03,-5.618821006145187195e-03,2.283039511064862900e-03,-1.009826522298264877e-03,6.880656890269246095e-03,4.128864725581660178e-03,-6.600444159369353234e-03,3.352330884826369879e-03,-9.986042581713438365e-04,-4.525088913350384934e-03,-4.291238998991394986e-03,-3.067213471282941549e-03,-2.478951051641518390e-03,1.732529485738838271e-03,2.826688937640130413e-03,-2.440670360274131961e-05,1.104776841650457033e-03,4.302595876490310223e-03,-7.525151551910609984e-04,-5.041450563402143786e-03,-1.724446732145040262e-03,1.175072169263407379e-02,2.486035813433209833e-03,4.605653466667447549e-04,-2.567656178715665288e-03,3.503281155077836440e-03,5.651332766350820706e-03,2.238801599858337380e-03,2.416898148823836670e-04,6.893072273738634279e-03,-3.298983039472895562e-03,-1.636473864181158542e-03,-6.602042874328946072e-04,-4.625151981177304424e-03,-3.273159328594201639e-03,2.976100104489333171e-03,2.166551704871612968e-03,-9.405439114856755864e-03,7.862753034102386816e-04,2.101216336697066863e-03,-4.159738788776594885e-03,1.113618971874693817e-03,5.690510846341401661e-03,2.719737077298194604e-03,5.832301688057328799e-03,-7.566027471378656936e-03,-3.240049414896609744e-03,-1.444844894886862562e-03,-1.141566801645764852e-03,-1.017619328406370316e-02,8.260464492327361736e-04,4.675508499684326996e-03,2.910492456199534946e-03,-2.867996485513105241e-03,-3.262558078774591055e-03,6.087397989470962230e-03,-3.472996184002483718e-03,2.969157221750522938e-03,8.471073533748428239e-03,4.730624154166249325e-03,3.365925062284312770e-03,5.821334987276717142e-05,-7.221310881441451827e-04,5.413984024112357234e-03,-3.203689463314187379e-03,7.152948623167022443e-03,-5.457453730407889683e-03,1.833448355798156021e-03,1.151648779485280744e-02,-8.484953103121738519e-03,-1.995787918413048038e-03,7.457847757174789101e-03,-5.766363203147078197e-03,-4.786922641602789873e-03,-2.057740939124029587e-03,-6.523806131941587913e-03,-7.242621754973662014e-03,3.192601241154752190e-03,-9.979117810137128845e-04,-7.173715802077635859e-03,-3.041593735294345147e-04,-1.734356648696638555e-03,5.988648083324380443e-03,2.284226071072703156e-03,9.811911692020245344e-04,-2.394908144863009455e-03,-1.181255833773615449e-03,5.527223404597303956e-03,-2.633456555458130177e-03,4.010903188928461108e-03,-1.169047387467501324e-03,-1.793356661411754866e-03,-1.672900843601584156e-03,-4.841269529701734339e-03,7.009172588036423519e-03,9.251448297874273865e-04,8.534066581966311170e-04,-1.412103806474985590e-03,-9.142672490349361067e-03,-2.902932964002681929e-03,9.564050930758386318e-03,8.121745773825994478e-04,3.179647965084446241e-03,-1.017761232461374121e-02,5.740685847124658327e-03,-2.052059499348318078e-03,2.985173307580696409e-03,-1.841703408521397139e-03,4.251966183719527691e-03,5.791649131346669060e-03,-4.028349121450095285e-03,3.168934670470126020e-03,-1.016965367229812067e-02,-2.662065361979443299e-03,9.193914378483705394e-03,3.199842249279787835e-03,-2.844533829438855490e-03,-7.354068659448331942e-03,4.770859212378862992e-04,9.126067280475250686e-05,9.082099537822673277e-03,-7.579742990909883217e-03,-4.100355764307037770e-03,-6.170864942848143389e-04 4.122856776679894274e-03,4.478125436364614048e-03,3.256687289050448804e-03,-2.633801793596834257e-04,1.929814209650764596e-03,2.995863441245279982e-03,-7.677768350992703397e-03,6.104584474165160833e-04,-3.261523604068359574e-04,2.377177705610045069e-03,-3.622822520197864753e-03,1.940227601863246188e-03,8.172942917148866282e-03,-2.196593304952732494e-03,1.605046791490029930e-03,-8.739724039771691946e-03,-1.948932544378932999e-03,-5.992216702677253051e-03,7.827257933117351407e-03,3.040460635109505389e-03,-1.111232195908772686e-03,-3.277818751308601754e-03,-5.323025036024052475e-05,-6.906630306310609230e-03,4.569279270507297860e-03,-2.586780426536763270e-03,7.313316473424150187e-04,-6.234785141967638450e-04,6.404168481394352996e-04,-7.893526233240086634e-03,-9.392038233271225375e-03,3.129835445733716512e-03,-3.438380619750586783e-03,2.468904597785540750e-03,5.477259127629982051e-03,-7.030201174479618932e-03,6.622628058736384007e-03,4.550136732193258045e-03,5.486531828062620429e-03,-1.274940690431013374e-03,-3.883307584512406697e-03,1.279643401799904993e-03,3.746044260530359028e-03,1.017633802225277364e-05,3.203964901274027749e-03,-9.723335125176504196e-03,-3.708658881238694090e-03,-1.012954304676039544e-03,6.830879395126270666e-04,1.637158322843316365e-04,2.208221043744815291e-03,8.547426875390981603e-04,-2.801400935542610725e-03,-2.584618966895910891e-03,-1.786389204712267960e-03,4.599950224083887949e-03,2.791902912749661845e-04,2.103614662644643762e-03,-3.893392728565602239e-03,-7.043307627012503069e-03,4.973896997594078745e-03,-2.039190521730854886e-03,1.128830664065324152e-03,-4.325048255621309450e-03,-3.973962011044310585e-03,-3.365810063197506585e-03,-9.276328884373272448e-04,2.243543081267357613e-03,-1.782180632653862203e-03,-8.563057862253454838e-04,-1.044023130536718630e-03,5.899145207611970754e-03,-3.479373004152187723e-03,3.486360263958397654e-03,-1.421654788404558929e-03,4.036192915801951712e-03,3.938725078936398775e-03,9.235320831946700320e-04,-3.708829980175739477e-03,6.416438795783853963e-04,-9.741449074143511758e-03,4.918316926468459943e-03,5.826132181829486506e-04,5.221502593112556725e-03,-4.459192406547669504e-03,3.355530694748744009e-03,-1.010599594755948096e-03,4.443699328984612729e-03,7.116352054410734866e-03,-2.811679006606521408e-03,-7.645257643424845442e-03,-5.973891474604900789e-03,-4.460342663846554069e-03,-5.922893968843803182e-03,-1.866580086677189360e-03,-6.554917296687187353e-03,3.834316057671783521e-03,-8.378135491564310186e-03,9.428724665210415053e-04,-1.965054975726754968e-03,-4.962130297019420704e-03,1.571516284273239512e-03,3.109437276558613735e-03,-3.214140250619514604e-03,-1.276107894815261233e-03,-3.466471076000896489e-04,-2.010641491837945015e-03,5.265900119208196707e-03,4.141016389324294125e-03,-4.950348597474947369e-03,-4.508654244574423174e-03,5.554452527111390647e-03,-6.251784904520731566e-04,8.912030173958035081e-03,6.449549669110692801e-03,2.271612429058568732e-03,2.988525666505684316e-04,-4.814793323625832233e-03,8.078216451814503388e-03,1.951148070376005359e-03,-2.188847443855665605e-03,-7.702347755958313322e-03,1.816657145020451327e-03,4.203474206716219241e-03,-2.542995183068458981e-03,2.099283219128252401e-03,-2.505701788398802775e-03,-4.378714860935168339e-03,3.096025771627104739e-03,1.369746446965596073e-03,1.590453343962896712e-03,3.388003734515432526e-03,9.323026868300292661e-03,6.588345734381129484e-03,1.435401939923193525e-03,6.810496252189680681e-03,-7.614517990849599752e-03,4.120984985129929300e-03,1.704311576932078303e-03,9.151686063144461036e-03,-3.888264119776906492e-03,1.221310140738863029e-03,2.175153814335597634e-03,-1.467205716112831649e-03,-4.164175128652945063e-03,2.687402655055991414e-03,2.927139868925446829e-03,5.334351073020341706e-03,6.265102709230823200e-03,2.761442601063375090e-03,-4.516236308398030933e-03,8.774787582968733221e-03,-6.005476821747845161e-03,-5.130515461932126653e-03,6.994517829806902454e-03,-9.138502318670438732e-03,-5.949968080747067810e-03,2.510003996688386571e-03,5.791270091235487319e-03,-4.101896132817016666e-04,-2.866230335938686150e-03,1.801677994761605238e-05,6.975842186937683976e-04,4.558226371287922032e-04,-3.559038982262095491e-03,-2.300550944244879499e-03,-4.987501021623404543e-03,9.691566206903191177e-04,8.575048363743578705e-03,1.044218634851268313e-03,-6.703548238398649925e-03,5.687509826962060776e-03,3.401218026960607915e-03,-3.927949228713269701e-03,3.358375882981720541e-03,2.500212336334575996e-03,4.129518853276977278e-03,4.019114749591314213e-03,1.220550476416511860e-03,8.923937986714213649e-03,6.642398977790535050e-03,-9.793742187191695984e-04,-3.615292500581155505e-03,2.889955529406039014e-03,9.224734937366491777e-03,-5.962013062949057590e-04,3.010846106189554727e-03,9.564820257206679485e-03,5.194614487958231289e-03,-2.436964611504627547e-03,-6.446128243561535277e-03,7.546600527796738268e-03,-6.712580226186271211e-03,7.774083914458575044e-03,6.865592446655604218e-03,1.766194699232996167e-03,1.135876344036192837e-03,-8.671249625010887460e-03,-4.239040671842627880e-03,-4.639767261567962972e-03,6.366388534252857834e-03,4.204446249359532727e-03,5.956861526779031780e-03,-9.354312963803625494e-03,3.534725192236427577e-03,-7.346252856017356121e-03,-1.215515224258706785e-03,-1.471523795600790732e-03,1.089992847854025400e-03,2.408094314477790977e-03,-3.993992010200241721e-03,8.507591180213804061e-04,-2.544762059842164536e-03,6.065436202549026384e-04,6.824512893785201692e-03,8.296311041051078125e-03,-1.135712898951205495e-02,9.965647081504998284e-03,-1.206070497681869559e-02,2.850445431853458004e-03,-6.408222916797692327e-03,7.163476213683878102e-03,9.188801742606333978e-03,5.973272252485507285e-03,-3.255459195754440913e-03,8.519327390823247612e-05,-1.112991725079423826e-03,-1.358887739321729517e-03,5.967258220667912352e-03,-5.174494147069104259e-03,-9.676830572174372966e-03,-6.908135290961752231e-03,6.745670484657909777e-03,2.540734899081873409e-03,-1.354666120542764602e-03,-1.482027460854029057e-04,8.771780452652181218e-03,-4.241248772340827991e-03,-2.539076441254832939e-03,9.275595049095673901e-04,7.741587780722427811e-05,-4.476831958306269101e-03,-3.297727063656411602e-03,4.509751572278086784e-04,-6.704264605930719371e-05,-1.095971455436889168e-02,-7.714762567797068919e-03,-7.940117368316679247e-03,-2.656181806843819267e-03,-6.634998465005620405e-03,8.518048017335062491e-03,9.547332401161243501e-03,1.628416418696599461e-03,-1.533488441975468328e-03,-7.502166322333985241e-03,5.174284517145361605e-03,1.240888217825555874e-03,1.660366486299393908e-03,-4.407637618901474180e-03,-5.530841037708109954e-03,-1.513783253207834356e-02,1.012431556897175344e-02,3.362424570816119774e-03,7.502741257536044628e-04,-1.266499846660093086e-03,-8.646616685224246465e-03,6.801134792205175070e-03,3.327520948641505671e-04,1.466951190918868581e-03,3.422313898681603487e-04,-1.236021735393072300e-02,-8.705977571555015282e-03,-2.956036257229084380e-03,-3.263390082238564610e-03,1.040841151665819128e-02,3.082226986673829421e-03,3.290308511611288959e-04,-2.320548958770171297e-03,-6.500608431821799406e-03,-5.948014807946127268e-03,4.105333754803709191e-03,3.947823077210238681e-03,4.813957789695365308e-03,-7.287797179291407614e-03,4.796738288878716130e-03,9.431891013275272781e-03,4.461333754509340041e-03,2.421549155397784972e-04,-8.972051168537510321e-03,-3.794047893192793860e-04,-6.242524555397512866e-03,-7.536155597474249873e-03,3.117319613529846625e-03,3.919257955972426863e-03,-3.254486432173546093e-03,-3.666609568729742915e-03,1.246506897013385318e-03,-9.237430096907767868e-03,-5.757575248634380465e-03,-4.340610394975403900e-03,-6.026420740059506079e-03,-1.110815729761105604e-02,-6.586649769299793983e-03,-2.422286403408263882e-03,9.811511844412999783e-03,6.419733601906407044e-03,4.400612713133348307e-03,-1.060603599136210719e-03,9.093473657877555738e-04,-9.539829607355601463e-04,-2.435715653959433669e-03,-6.880783002671845075e-03,2.368009629151376309e-03,-1.825434311291775037e-03,-2.308001565841395070e-03,5.928152958649303070e-03,-8.219439256854119830e-03,-6.992135794195862737e-03,5.131249204933618739e-03,-3.833726962971862814e-03,-2.681255998907003998e-03,-2.471016311587414648e-03,2.095609660682796246e-03,-3.573274460593757994e-03,1.008534921236100418e-02,1.107021937114745745e-03,-1.784374666634355820e-03,-8.969030818744396297e-03,-1.024210013108079517e-03,-2.295864188212878333e-03,-2.013185276133257120e-03,-1.656198395343361782e-03,-2.682856704010006606e-04,-1.156284982271406185e-03,3.161117295601018236e-03,4.300879224918533091e-03,6.403461433681161344e-03,2.807155397962633097e-04,4.913835240821012515e-03,-1.443885055256955265e-03,-5.574119307574773368e-04,6.701643763397381300e-04,-8.716276060654077479e-03,5.475692942872507386e-03,-7.998756344639174065e-04,1.508311220584361567e-03,5.711304468475177434e-04,6.338108119342088361e-04,-6.233916933756356894e-03,6.825226119571976661e-04,-1.120785986703279639e-03,4.012493443534293283e-03,2.057016488393493235e-03,7.432626206625966674e-03,-1.267424573642961759e-03,-1.800335832493491156e-03,1.952949084153967967e-04,2.212734841888185089e-03,4.526360775990251545e-04,-6.300917437634263527e-03,6.522086235298496688e-03,-3.020559549322143246e-04,7.867578496611382297e-03,-2.915056525088719104e-03,-8.316440978807848605e-03,6.981002017640802054e-03,-1.160342665057671231e-02,8.960992112256985914e-03,-1.878018862016330674e-03,-4.458662199113727631e-03,4.677512358549528598e-03,8.302376081723859054e-05,-1.072346681365335093e-03,3.990012994365358528e-03,-2.017655796166418960e-03,-1.016522962999504530e-03,-4.437086202007671683e-03,-6.172473148715884594e-03,4.896025597654641084e-03,-4.369478689920628539e-04,4.908660236370603665e-04,4.625245932722175587e-04,-7.877345446649385019e-03,9.749207095975152490e-03,-1.282125482312824181e-04,1.269467317065273810e-03,5.870450822549510422e-03,-2.834405005889428179e-03,-6.555143050719171305e-03,-1.441184741520751787e-03,4.994690925790213135e-03,3.780318131008226796e-03,-1.044840538112584011e-02,3.482961809226309789e-03,-2.492542403628977832e-03,6.280453446747036143e-03,-5.769453705449449632e-03,4.923860703622999664e-03,2.243979246772548634e-04,-3.990452185658742087e-03 -5.905644593057859590e-03,-1.774530153337838811e-03,2.307637931362237303e-03,-1.512488282199517535e-04,8.868173616661203140e-05,-6.065483885215429589e-03,-5.668171848780332103e-03,7.733838163179544353e-03,-5.867675943220330981e-03,-2.376399240146012844e-03,-7.450775747950043673e-03,-5.558706727090718426e-03,-7.233580182374388899e-03,-9.886923756035604499e-03,-3.297797838180182694e-03,-2.206596505117239173e-03,-7.609291051060398979e-04,1.317840768189845529e-03,-1.986506743286069816e-04,-5.192251299092389034e-03,-3.273174534413498486e-03,-1.489164190904241920e-03,-3.813880587217081779e-03,8.388659879887155890e-03,4.404961458847968074e-03,-1.017214932664223205e-03,9.859854472243627811e-04,5.041479250463143900e-03,5.291764413495253679e-03,4.114666065660224983e-03,-7.191659658587386435e-03,-1.276167634244158855e-03,-4.483970206871801256e-03,-2.124766225530313763e-03,-1.827867305728056099e-03,9.458731900217584891e-04,-7.012194620242265773e-03,-6.567958059941023975e-03,-8.649983410357837748e-04,-3.390136745480383344e-03,-8.740307517105868490e-05,8.624527961982143625e-05,5.984101051123817553e-04,1.844054773309288485e-03,-2.058672677485095474e-03,-7.640316075656681735e-03,1.475884812631513519e-03,2.738596509119553199e-03,-2.160335683434267285e-03,-7.758163234208417846e-04,-3.462418837900434893e-03,6.933268342219182055e-03,-8.795698549264241092e-03,-5.460254675440830326e-03,7.596875736550984595e-03,-4.907429079268749161e-03,-6.452530605199659208e-03,-1.457658298652895622e-04,-3.904605247894207994e-03,4.652707890081212984e-05,-1.367850737344929006e-02,-5.845668253420959609e-03,-1.400346760334613208e-02,9.711770203152219868e-04,-6.645567285427639651e-03,3.304948273059941238e-03,4.634466808787591000e-03,-1.354185702755056342e-03,6.101320415264670227e-03,-6.533004772755632295e-03,4.098752247737949723e-03,-6.949027923916801554e-03,-7.499602264283817231e-04,-7.685492887544199958e-03,7.576489034555095384e-03,2.058307170520865328e-03,1.109664533728004984e-02,6.520206180483002637e-04,2.567945635136405572e-05,-5.479089467047048093e-03,-5.564061063466025324e-03,-4.928394841032586460e-03,-1.165121814775286682e-04,-2.590408449966763709e-03,-6.491916070545752752e-03,2.628453742862416325e-03,9.734444740013826342e-03,-5.465140796168311242e-03,2.368091113036649605e-03,4.149407318210142186e-03,1.269022752778949928e-03,-5.742577037320886410e-03,-1.415050567835363310e-03,2.948573938611668654e-04,-6.425548305683795227e-04,-4.371910214269307819e-04,-2.162731635773321886e-04,-2.580440905745929356e-04,-9.078486519696605389e-03,7.478716177099838970e-03,4.099102044280643689e-03,1.228000560488633729e-03,-6.625469489350603591e-03,1.026732349082410772e-04,1.207417720406043756e-03,8.524439854626828482e-04,2.288473317889497781e-03,-1.541709280634304092e-04,9.657194128623133866e-03,5.188221647911084398e-04,-6.348146046625455056e-03,-6.195767211604617103e-03,7.183022054861760161e-04,-4.319270179956989468e-03,-2.883594066847188983e-03,8.807813731681008590e-03,2.970687464246074810e-03,-6.304000163070417988e-03,-4.133245759583610221e-03,4.789023506148456372e-03,-8.523212526108615503e-03,-2.626253679240722096e-03,-4.350581433097031395e-03,7.083793013056147864e-03,-2.783409094281638196e-03,-1.608304339913232397e-03,5.987562559384386127e-03,3.654004822086720505e-03,5.674239871780318266e-03,3.035248715812314526e-04,2.148081986697481134e-03,-1.354227205520311741e-02,-1.310307250874598144e-03,-5.539137287977898005e-03,-3.622696069910012577e-03,-3.320856601639055050e-03,1.102281976969370775e-02,-1.964826972298104688e-03,-8.896924109226871102e-03,5.843284341389807937e-04,3.597079748597348161e-03,-1.912755588097196082e-03,-7.366229035312200392e-03,1.746647578101003092e-03,1.372321456039333164e-03,4.886821298804373999e-03,-4.189622417538278742e-03,-5.705849487166988858e-03,4.110553751788045959e-04,-1.555386271724781529e-03,-3.929703777488030222e-03,2.175011590562543010e-03,-2.868294401891524799e-03,-2.704608581798020189e-03,5.401827072902078142e-03,-3.800187420728831558e-03,4.122319247610171239e-03,-2.379758318026980232e-03,-7.050317402196267565e-04,-2.160270147150144659e-03,-2.265816721021589580e-03,-3.371314426390836089e-03,-4.370960027698769616e-03,-1.119367486601389409e-03,3.576746388696102653e-03,-5.934085097369906810e-03,8.253778946612877303e-04,2.864039986611004347e-03,-6.560662849243339804e-04,6.720564858116282944e-03,-5.521951602781603264e-04,-5.093746911286204174e-03,2.277842353495174079e-03,-9.660676133181235340e-04,1.765635240659938583e-03,1.228639462775544785e-03,5.281045220231409335e-03,4.345790571889183128e-03,3.078963602975258798e-03,7.945712978576749755e-03,1.994900991167294045e-03,1.386348950033984779e-03,-2.992905965093334373e-03,-1.087743817873676076e-03,2.176106329380439724e-03,-3.283116760755304224e-03,1.665218895149062290e-03,6.382910042280589939e-03,2.885945856591859947e-03,-1.622536557224841082e-03,-3.475460654808737709e-04,4.762274237245811581e-04,-3.813794768216703483e-03,7.821981396493860733e-03,7.199061980387021220e-03,-2.123615466562873977e-04,-1.044348802835190666e-02,7.904545904875532811e-04,-2.898082702366188389e-03,1.792823304819439467e-03,-7.057784219891175687e-03,-1.128000452938790303e-03,-4.498835253526781239e-04,8.323130215046768351e-03,1.495577104788469022e-03,7.729508105449877514e-03,2.095228277198272343e-03,1.431886079095708551e-03,-3.915901407706308228e-03,3.012878481685645881e-03,4.565670213361672052e-03,2.419380528001253885e-03,2.904451922275300569e-03,-5.676854698421698354e-03,6.761562222260732863e-03,-3.076184979103936852e-03,1.580195956038878168e-04,-2.339736692255813249e-03,6.219390936707412578e-04,-4.293527100823653353e-03,-1.887016702110965111e-03,-5.606249788440847981e-03,-4.819748698840562243e-03,6.120106654292562995e-03,-5.025565739835513666e-04,-2.753887610742984399e-03,4.689342529871283992e-03,-3.692688540406685507e-03,-5.002367524843152197e-03,-7.035594872534102311e-03,-8.773286648497937573e-03,2.529163931909618988e-03,8.639651615014445549e-03,-7.204355195586975395e-03,3.297672814436626077e-04,-3.151792527206148577e-03,-4.936842028240866556e-03,1.860833278428749183e-04,8.330675633602015531e-03,7.881828324635914082e-03,-7.917872676689174591e-03,8.221777516685516846e-03,-9.146718982893926701e-04,-7.602321237045844317e-03,7.267946135299110412e-03,1.931522684455000459e-03,-1.160484514707322374e-03,4.795325138166292881e-04,8.447442514233875419e-03,9.039004104287006987e-04,-1.892443946268453732e-03,4.641958402141680914e-03,1.049795311272995642e-02,5.674710490866646936e-04,4.175763862532086315e-04,3.787932924149255316e-03,1.300049102934404226e-02,-2.598806422556219561e-03,-9.669289088756411257e-03,-2.213998515969262115e-03,1.170254869344217880e-02,-2.557109918005401848e-03,-6.959422605533016571e-04,1.214800114264207032e-02,5.421752002682233076e-03,-1.330259260491030784e-02,-1.076904404540173768e-03,-6.519776823996043837e-03,8.234155379950696936e-04,-4.413340582332906802e-04,8.816516008437661964e-03,4.642852843972995920e-03,3.283988255648527359e-03,-4.405389115562857033e-03,5.977373740036559177e-03,-1.863641979410687183e-03,2.203896172149959112e-03,-4.328932193340851754e-03,5.361753939360986917e-03,4.182881756316436835e-03,1.367839825739240690e-03,-2.877083587313077409e-03,7.169489764058708907e-03,-2.845755137650488823e-03,4.190017907545895946e-03,-2.686386939569991930e-03,-2.071848316775849307e-03,1.112050149947176048e-03,-6.463610079786416898e-03,3.858268823868207039e-04,2.814641450662972865e-04,-2.825298109278104319e-03,6.319429613396182477e-03,3.152411253558723425e-03,4.331846751006346828e-03,-8.747527700085116348e-03,-1.960768082927165888e-03,-2.027743819603580137e-03,-9.404806425327620709e-04,1.085206660869811150e-02,-2.438368450826737706e-03,1.237769089640473972e-03,2.023213128427301614e-03,6.708164613849488568e-03,3.829834591015505042e-03,-3.878891637037906129e-03,3.686669884440998361e-03,-1.119103790437165355e-03,-2.967177286511194639e-03,4.295392154301766116e-03,-4.458354416067065659e-03,1.191800308783097506e-02,5.652318752257322332e-03,-4.170871299828733821e-04,2.542652232243121715e-03,1.786264668511898046e-03,-8.460697489552502148e-05,6.098303489989692579e-05,2.208858891892196991e-03,-2.036732425751767907e-03,-3.465088025561639026e-03,-2.104334491113167734e-03,-1.026523114028406007e-02,2.301653263688634588e-03,1.725352508974774528e-03,-1.715803760752500394e-03,4.917758150688010185e-03,7.425253926554995637e-03,-2.803985543171602906e-03,-1.256335149546511593e-03,-1.976006355380081469e-03,7.335995235339996291e-03,1.382783852516690764e-03,2.765384318117158988e-03,-1.022981499478885573e-02,1.319687028120772581e-03,9.579337246313868875e-03,-2.579697055873624890e-03,-8.479479510430517133e-03,-5.779664817410822518e-03,-4.715220533915577550e-03,1.480169957173567928e-04,4.511554177957993045e-03,-1.405799870923223581e-02,-2.341207032276549586e-03,-2.917397582393316258e-03,-5.025439644920285179e-03,-4.035303791337908896e-03,5.067635911119130684e-03,1.167281502041239358e-02,3.115774855621542775e-03,2.190745594610818935e-03,1.085887241550348610e-02,-6.143378355100891086e-03,3.224574171250093983e-04,6.334540792731073108e-03,-2.896213138129471966e-04,-1.469313465653382964e-04,6.133664868736617842e-03,-6.889485498428093817e-03,-2.840816411025326536e-03,-9.684462969043053718e-03,9.310205504371992522e-04,5.466613541926931222e-05,-6.234497195071573780e-03,2.196440851099270512e-03,-3.730726975326789131e-03,-7.106396233049531129e-03,9.419659572463958547e-03,2.389306269958260936e-03,-5.374404648720311137e-03,4.807206341811287240e-03,4.069423614703986244e-04,-3.570616461235003296e-03,-1.662963977178472195e-03,-3.692046230883084829e-03,-3.759879186368661214e-03,3.369497691352008571e-03,-8.153095005848277552e-03,1.207447219078616740e-03,-1.536191630068083243e-02,2.048395818299194194e-03,7.958701378925578732e-03,4.313540309336392695e-03,2.749223351128464703e-04,-1.562384102130107968e-03,-1.700883236739864730e-03,5.581545994771643787e-03,-5.896587414530333137e-05,-9.221750251817135632e-03,-6.350760551162982332e-05,-2.505180829828746163e-04,-1.317647416180797422e-03,6.533047861982157686e-03,-2.949825540052360140e-03,-7.361824657761171370e-03,-4.487765368477817924e-05,1.062147338252183992e-02,1.515694822402736995e-03,-3.816115689773298073e-03 -4.534098078336815628e-03,6.849192188008430479e-03,6.114185724681494780e-03,4.815698188358475719e-03,1.827403775493088378e-03,-1.771960016430009638e-03,-1.811592516743957409e-02,5.647750501191209187e-03,2.566646202828766054e-03,5.901879578861258906e-03,2.710115296173925582e-03,-3.108575110710365061e-03,-1.696628977650063491e-03,1.843114581384351332e-03,-5.409727083767143854e-03,-1.738894082235687821e-03,-2.400020460010292116e-03,-1.126836969328095378e-02,8.250302861224483595e-04,-1.459813897620133513e-03,-1.512895818243252988e-04,-1.137755650105156400e-03,8.514027771214378567e-04,5.573306484869870910e-03,-9.401091131231845144e-03,5.776685082104854319e-03,-8.843813426620995974e-04,-4.064608477850676499e-03,-1.236272312255063234e-03,2.977858161977153372e-03,3.280462002315374069e-03,5.026911518989698666e-03,-1.136436443639708693e-02,5.353692459260977945e-04,2.058056546815754300e-04,-2.614341263601129266e-03,8.735944882896378230e-03,-5.292546526087839494e-03,-8.804874755825467178e-03,-7.429101630708970748e-03,1.280543401779044368e-03,4.882408597154034094e-03,-4.909993960470904716e-03,-5.869653363832966209e-03,1.369973946495601320e-03,-2.309057830870189267e-03,-3.522789812510426256e-03,-6.639931960113974393e-03,-4.765725846069952983e-03,2.528254745886695577e-04,3.650340545506370292e-03,3.012008518528406890e-03,4.761739373730936031e-03,-6.847301847263485147e-03,-3.836433017933991736e-03,1.569732476319915049e-03,-8.651320534230504122e-03,3.062093763288432566e-03,-6.445844021165881600e-03,7.708770228205648160e-03,-1.647269619077288165e-03,1.190248575070159548e-02,5.107348145403569663e-03,2.696635455015054458e-03,7.827166809924601329e-03,-1.474431138526447046e-03,2.658743643968861830e-04,5.597834999014053223e-03,5.566964955232189122e-03,-4.254356309905334838e-03,1.171902051100302865e-03,3.329472544330513376e-03,1.422197256240796638e-03,-7.302857538461847431e-03,-2.726912339111794373e-03,6.273045539291931955e-03,7.187021764819795204e-03,-1.581841205533981391e-03,-4.125904440333747446e-03,-4.923338787512414848e-03,9.770846090444871776e-03,1.354670932570760508e-03,-4.232251170346868797e-03,-3.801518844728360002e-03,1.918618274281040344e-03,-1.629428738348268416e-03,-6.003541826759224709e-03,1.109662523619954662e-02,1.196319387428190382e-03,-7.474914182574502429e-03,-1.149833939956331798e-02,3.058490704114460394e-03,-5.010148358762658329e-03,1.721466717794402704e-02,-3.552093960054846910e-03,-2.521466235954978197e-03,-6.023903314003288371e-03,-3.648418699882635848e-03,6.627024389385941482e-03,-4.787233823035717817e-03,6.139594195327378358e-03,-2.007522736581106738e-03,5.595123993452270558e-03,-9.862064805011283575e-03,-1.107953379598031023e-03,2.015909548335715566e-03,-1.939104811302297350e-03,-9.057098673891405757e-03,-2.921406595492582851e-03,-9.388088016058494106e-03,-4.044194156569699133e-03,4.663650410955027040e-03,6.142916423237078198e-03,-7.533588162827336600e-03,-4.474242925568363942e-03,-9.961241830558074158e-04,-1.479451197500966436e-03,1.801104571573568193e-03,7.721029107892439687e-03,3.545560190394268178e-03,3.452745033252387075e-03,-6.664426063985638498e-05,-5.423608514197026872e-03,1.707174825699111836e-03,1.242231964904676666e-03,7.307866295122360245e-03,3.433862261535288417e-03,-5.485245330766354115e-03,-6.066130753239246456e-03,3.655606992328877102e-03,2.740987666371525952e-03,3.658107347003962308e-03,6.045041706710575606e-03,-2.825307496115747000e-03,-8.350048794458411056e-03,-4.979791724498132833e-04,-3.220052154158961795e-03,-5.312864876774493761e-03,-1.806358236557779320e-03,-4.141666104896311700e-04,1.265649678203946979e-03,3.393586043357075394e-03,-1.023246938416866791e-03,-2.568279248880278717e-04,-3.017852205165542662e-03,6.446921170178945143e-03,-4.726335552020700763e-03,-4.901921095849130555e-04,-1.636628188489218155e-03,-4.448619858572353848e-03,4.476121574140124446e-04,-1.722284066759126363e-03,-1.555814851104719019e-03,2.439353884679640316e-03,-3.170385785036762983e-03,-7.466788737137214360e-03,-4.174759268797669995e-03,3.955658819838274443e-03,-9.505058968292522809e-04,7.173180591589201592e-03,-9.757711063102843122e-04,5.817687440886730975e-03,-2.105817237049022617e-03,7.359802341631767504e-03,9.386408389189566590e-03,4.137309175443727727e-03,3.454120592420799676e-03,-5.803605658901976258e-03,7.540968712064246432e-03,4.503030670417019522e-03,7.990737323847717233e-03,4.183701071370860350e-03,1.913328578380110246e-03,-2.915784805496723399e-03,-5.329119644895945890e-03,-6.964574928974240700e-03,4.107683250538720840e-03,1.864454056066975045e-03,-8.151081778323337329e-03,8.132789321788302895e-04,-2.379228441267322320e-03,7.376683044202448324e-03,-8.136129620423548231e-03,-2.345753047292456744e-03,-2.232744915359911181e-03,-1.129172097655533951e-03,-9.300604296648841418e-03,-5.404485909103596761e-03,6.178741789623044649e-03,-8.422364430243823100e-03,-1.274185608716155984e-03,1.045003284953622234e-02,-5.599698636750511001e-03,-1.221699812420134557e-03,1.476220789721073784e-03,-6.040873477462364340e-03,-5.072185096963006089e-03,4.846160764095479995e-03,-4.350469387679113675e-03,-1.134378075179919487e-03,2.013723017125129255e-03,3.879579458481813032e-03,-6.752112688260289257e-04,1.209150039516811246e-03,-4.510022121811243200e-03,6.173369217739715096e-04,-5.958863787452666014e-04,1.734215694807290584e-03,-4.254480001665809571e-03,1.002067949640361938e-03,6.144046377478374309e-03,7.866302037012298104e-03,4.053025055770947786e-03,9.555068502514668443e-04,6.529045090998676290e-03,-2.144321123512262731e-03,4.402668463608658614e-03,2.404911473786102826e-03,1.126994179908410521e-03,1.344711052161507705e-03,-1.395330448271106831e-03,9.325770391424688388e-03,5.815313362369818086e-03,3.189799427686695193e-03,-3.548606851274911395e-03,-3.006696210742793550e-03,-3.792635905921750740e-03,-6.738645539226912826e-03,-6.897334467345075590e-03,-6.220227979852293969e-04,-2.083830358800284246e-03,-4.477621889080072790e-03,6.960949658155943069e-03,-7.812612084520031627e-03,5.965424923822612580e-03,5.298527320823923775e-03,-5.116161698886551208e-03,-2.339333756974940493e-03,5.865396029796272372e-03,6.533511973353810405e-03,-8.555330010336802474e-03,-3.993766894967977488e-03,-6.744862750756112565e-03,-2.792099737314364220e-03,-1.787977175449816190e-03,-3.466762224929666875e-03,2.204456993796364683e-03,-3.683750337041471513e-03,-4.603649835150472672e-03,-8.306777907739032159e-04,-2.213858675118998004e-03,-3.084646237652429769e-03,1.760193356184391184e-03,1.812748002212797151e-04,-4.013327035538985510e-03,3.858232096032022680e-03,4.865837964934042852e-03,1.231266966798979156e-03,5.044024764969820378e-03,-2.878258924248026495e-03,-2.528182855930796204e-03,8.730775710614024736e-04,5.295085930436573864e-03,1.988033373783614464e-03,4.594442071336997738e-03,-2.162697418333681815e-03,5.360275267061809581e-03,8.723259585250246403e-03,2.578171931640863609e-03,-5.702146751589340194e-03,9.008234342270188483e-04,-2.904148680147031203e-03,-8.302267522406359818e-03,3.553901146509305057e-03,5.022002043263257796e-03,2.533184404292014753e-03,-3.837565041931460179e-04,1.000017893856313582e-03,5.331116645685239760e-04,-2.767036816198442228e-03,-1.145755491165286865e-03,-5.699426699933391252e-03,8.581669903556590762e-03,5.157404192070129748e-03,-1.185418255071199337e-03,-5.194961499488680995e-03,3.403176362790380580e-03,-3.102963327665393331e-03,-4.196783654173899016e-03,-4.430406758299201982e-04,1.623359634490678096e-03,8.512099557044160115e-04,-8.162313968133702580e-03,1.006456373359978368e-02,-1.237828673094658197e-03,9.089541468033165636e-03,-6.933777146990769726e-03,3.429859367671933210e-03,1.199714515975548257e-03,-4.803731660558604988e-04,-8.737194259059020973e-03,3.880527549898236690e-03,-1.618897988194309152e-03,1.034652878123549190e-02,5.186019249257378238e-04,-8.983044616169322955e-03,-1.310003918343210193e-03,-6.124918375793396430e-03,2.115203554455690090e-03,-1.175255016353015996e-03,1.206029070015112240e-02,-6.797077118695721764e-04,1.752089871161860391e-03,9.249010619344453111e-04,-7.784628282970427506e-03,3.197851576035150821e-03,-3.483736768428755386e-03,1.676242139206522925e-03,-8.185489831073455499e-03,-1.442805000106100804e-03,3.285010593862337010e-04,1.066857729608857233e-02,-7.250435833844154701e-03,4.848514886799944190e-03,-1.098362146528108173e-02,5.720415334932072056e-03,8.233820263701751392e-05,-5.770851904492168248e-03,-2.908117743042934900e-03,7.679872729697504591e-03,1.741266489414753076e-03,-3.868100493724609991e-03,-1.004479069456078654e-03,-7.600748979321356701e-03,-3.383963163065456786e-03,-7.729722205634558495e-03,-2.391850796255346433e-03,6.819530092221043165e-03,-9.189363763494129769e-03,2.786041543308511630e-05,2.451723628247577929e-03,1.403515738623238888e-03,-5.697652158162728022e-04,6.752872165075847130e-03,-1.042649774875073140e-02,5.277172128690883583e-03,-4.822910580543697014e-03,5.708873454620816412e-03,-6.119571585117571447e-03,2.334977393882249098e-03,-1.010276299838848597e-02,6.602910510151032558e-03,4.633462965889827839e-03,2.220888507476668182e-04,-6.437797076966282617e-03,-2.135372575315801474e-03,1.757858977579840375e-03,-4.788656444855194858e-03,5.530954208428754543e-03,-4.623641967066181857e-03,7.641512105844802151e-03,-2.363533077272963329e-03,9.959457371729696189e-04,6.696095615814554296e-04,6.916731121236688838e-03,-4.142331784041363915e-03,-3.049963571720841829e-03,5.778075351886924685e-04,-6.798454053934503347e-03,4.318411175014957686e-03,-3.808385987101055477e-03,-1.213737038399176522e-02,4.665947771995562245e-03,3.166243400316310058e-03,7.677613246571531221e-03,2.905041857552214703e-03,-4.411625792754116440e-03,-2.822152294930886218e-03,8.024231730220635420e-03,1.680362779851988239e-03,-4.713653532350771044e-03,3.154226611146687936e-03,1.851222839057078615e-03,3.608815087146317532e-03,7.248122222725630545e-03,-1.644993222101852727e-03,1.011920665017201557e-02,6.796735741095184932e-05,3.207173326976802633e-04,-4.349783012364921014e-03,3.177199238198968813e-04,1.099417872658150705e-03,-8.689221679031340281e-03,-6.739001487710619825e-03,-1.103004605712425325e-04,-2.404388422225260322e-03,-4.512263344852864011e-03,-3.332757453437055692e-03,8.279990498181507696e-05,-1.875787185755253264e-03 5.024882392562693770e-03,4.381477062573718717e-03,-2.164062700140503858e-04,2.665949421723010169e-03,-1.314635337294060282e-03,2.468728472027629985e-03,-1.306185521131826913e-02,-1.354971205301058225e-03,-4.776503420828904362e-03,1.639672543919786169e-03,4.121756028904449004e-03,-4.680421464396782895e-03,2.015692564103129143e-03,-5.215978514526398505e-03,-7.972515481757933356e-03,-3.586122934357075399e-03,3.837485703076321409e-03,-3.673825528276443344e-03,2.482006932908380897e-03,-9.398971467140852798e-03,3.528212431661151456e-03,-3.368198641800095906e-03,-6.343041473914905366e-04,-4.536782700376787436e-04,-5.161291660147444664e-03,7.299395387776315370e-04,1.943797091566938640e-03,1.131913968442294603e-02,-4.498832843845040202e-03,8.781861271519758247e-03,7.623295866754040704e-04,1.061615462668252579e-03,3.753075534282158260e-03,9.719677598552953430e-04,-9.938590856315444519e-03,-1.298679761982629403e-02,-9.192958143648398070e-03,4.887584542176881888e-03,-3.138408758541693083e-03,-7.688989706641089633e-03,-7.157832561377235536e-04,3.469676241044441231e-04,-5.064053577373846433e-03,3.142449654721013366e-03,-2.101614667456586007e-03,3.717315529494762279e-04,8.328961265564177596e-03,-4.137191671421026548e-03,6.759298167938648837e-04,2.975623588259366722e-03,6.513183483812038689e-03,6.779617027188833585e-03,-7.873452996687587248e-04,3.907234679686388649e-03,3.530341978919652145e-03,2.387540716965636661e-03,-6.239522504391248336e-03,1.764664685606027669e-03,-1.808587232355696704e-03,5.978521784330512526e-03,1.716714343124713365e-03,-9.185430978264507232e-03,-9.308615461796638374e-03,-4.850155928415734140e-03,8.041509529842248488e-04,-1.406576530383939726e-02,3.379560763519621862e-03,1.982908771219359014e-03,-1.485245989272432343e-02,-5.264159244794182346e-03,-2.960660979712068357e-03,8.542145849893545417e-03,-1.882932691174213921e-03,-9.465560032293917139e-03,5.922944407600215164e-03,2.575609498951632586e-03,-5.644586542880537447e-03,-8.725865123710598373e-03,-9.149596845289334479e-03,4.440514980449731391e-03,1.714767098537933960e-03,7.402669872042471179e-04,2.465948121986063970e-03,-4.928163808333424663e-03,3.158152629366117389e-03,6.824418065550494350e-03,-2.468730413701081781e-03,-7.165348843041965433e-03,-4.331219975063277383e-03,-6.923435035551823123e-03,-8.001753183804432949e-04,-5.816086455849800441e-03,-3.303643152763068025e-03,1.009308511822597319e-03,6.629498813044289174e-03,4.832043349179310741e-03,-6.592413838692529580e-03,5.617836246769671070e-04,-8.580635745546023006e-04,-2.643734153364878537e-03,-3.702231577698172991e-03,-1.639375723187314277e-03,-3.227127282174239910e-03,-3.247696418280713020e-03,-6.436454021862713336e-03,4.142136414535809580e-03,-1.533013240237096663e-03,-5.769276554932054567e-03,-1.021720516935067718e-03,-5.610676523962659056e-03,-1.553880450134659903e-03,1.495056175864194496e-04,-3.610120597029417740e-03,-2.848512706449637596e-03,1.079742762038699358e-03,-6.776439557867816599e-03,8.079275078865884654e-03,3.324018025509065002e-04,-5.804605993675751065e-03,-3.750914152768167632e-03,3.973253378602943642e-03,-4.650676356020099975e-03,-8.486237102710804045e-03,5.505415555627017968e-03,8.282334440092352021e-03,-2.420803187455134332e-03,1.087289569075112518e-03,-5.499870852182523785e-03,-6.967510360490560689e-03,-2.788469183783037091e-04,-2.454699155261739086e-03,-7.924773277109045319e-03,-2.176511772543454452e-03,-2.495526783676037339e-03,-1.001222130738128185e-02,1.142116359571466143e-02,2.070285491709711843e-03,7.766122365253466664e-03,5.794221649886835759e-03,-1.712136649316389888e-03,2.299395920709827474e-03,1.057451781966529990e-03,3.206109298290267442e-04,5.321667131601974521e-03,-7.075076334351079396e-03,-2.331457832093894976e-03,-5.735056954658895000e-03,3.705455346211782193e-03,-2.456353766216274551e-03,-1.527897443639008842e-03,-1.340203250889166310e-03,-5.700718357879382830e-03,4.668891762119884228e-03,-1.660631110584175757e-02,3.827416988613144191e-03,4.786281034068726875e-03,-7.998059212848487171e-03,1.420273811235534802e-03,-4.243567906399526787e-03,-2.283232859535656249e-04,9.036453038395256005e-04,7.824462645403330516e-03,-8.989523448373604869e-03,-1.672417056139307494e-04,-4.135943669440970104e-03,-5.562676559235786274e-04,-1.500812356361650010e-04,-2.055612646839851711e-03,-2.615638057550994343e-03,-1.120655554822521695e-02,2.083656783089588536e-03,-5.354443502606578281e-03,4.784524841961099000e-03,-4.176877995580459066e-03,-7.469908550421093833e-04,8.252757031433296392e-03,-8.010470569843719524e-03,4.801322834092112447e-03,-2.522524511404256129e-03,3.011424420731614437e-03,7.240188053139762388e-03,6.762667783331241002e-03,9.450137329838623809e-04,-1.370287123446234986e-03,3.572728509789551126e-04,2.675613159762618362e-03,-1.167453458083360200e-02,3.269878338360410371e-03,1.031504026615814862e-02,2.631116233616091499e-03,-2.530544069348849168e-03,-4.669821505119002396e-03,3.452476018958668298e-03,-2.871181550608824587e-03,-2.117101717002464918e-03,7.456410467468008001e-03,2.281676116147056659e-03,-3.328627006578062115e-03,5.008924172327570284e-03,8.328333173162333477e-04,2.937929675952189335e-03,2.060719726027218299e-03,7.872471874857411584e-04,2.065996115545779607e-03,-2.677642452569471958e-03,-4.224398809650093366e-03,1.511373755835233260e-03,-3.750176667120245361e-03,-3.932919122375175096e-03,-6.527313536949056480e-04,7.258240671213717424e-03,1.166847582830177286e-03,5.406154298841111400e-03,3.276100642576242755e-03,2.889723280550448616e-03,1.491381117294365963e-04,3.820304746385444396e-03,-5.203351514592750512e-03,-2.132402429235609551e-03,7.748806689624866337e-03,2.563391787591751732e-03,7.758726864056724251e-03,2.400276225592438625e-03,-1.203755331452854458e-03,-2.384358548357636692e-03,2.738090065324549471e-03,-3.963954343388530720e-03,-1.496391199801238399e-03,8.741535700034344658e-03,-4.028671897545455655e-03,1.599119524681793863e-03,3.875196699105881715e-04,-6.575630731938802813e-03,2.201372706789812925e-03,-2.405282970054817629e-03,-9.500373293629625279e-03,-3.275352900854084943e-03,-3.068736430175587719e-03,3.876687498106415244e-03,-5.568667347323602405e-03,-1.093575883424220165e-02,-8.575717744959004702e-04,-5.510166791840150242e-03,-2.154815700815739671e-03,5.477551241771580828e-03,8.231812716197821345e-03,-2.394279906480533890e-03,-5.598011023648525805e-03,-1.794589969606301145e-03,-7.059818421487547594e-04,1.632193279769597345e-03,-4.427040178242954392e-03,1.555865494147554151e-02,-9.609559037132413837e-04,-1.745010398311278889e-03,8.871455683840208297e-04,1.254288535600259777e-03,-2.649823534320624031e-04,-4.769008469753037611e-03,2.561901351533915801e-03,6.393575086859347488e-04,-2.464041281848387225e-03,1.362508729303788142e-02,-1.224579999403990066e-03,-6.830603363878091049e-03,9.706364333722739208e-04,3.304327777181774797e-03,1.199447969096291422e-02,2.148771435693616768e-04,-4.277535736008893433e-03,-7.033462547337124282e-03,-4.494354096633610719e-03,5.704519878174639687e-03,-9.746670010055124027e-05,2.401462792178909750e-03,-3.971955316775382938e-03,-6.211362150426392482e-03,3.945618082769843625e-03,-4.720024711024672047e-03,6.254650240059199352e-04,-4.850150315833086367e-03,1.464575124103116100e-03,3.034971358538749494e-03,-5.880219796016577204e-03,4.589160933045463922e-03,-4.179558328400272138e-03,-1.476500947830169334e-03,-1.124433012835635319e-03,-8.787400046888762736e-03,7.911444051488414776e-04,-1.531507009605589638e-03,-1.275270701814602966e-03,-4.218969719264499020e-03,5.712153197712808994e-03,-3.841733601824361133e-03,4.209270362156485555e-03,3.961556748497254386e-03,4.300775156050894166e-03,-7.685782260353001688e-03,8.560691844847214370e-04,-1.989917887229536268e-03,6.610593655877764971e-03,6.485136387495192903e-03,6.597583420459542580e-04,4.661749666004123133e-03,-5.967294305661200934e-03,-2.819818335517311394e-03,-2.064469219749017360e-03,-1.766203218194237461e-03,-1.160397967819699677e-03,2.214742806989172114e-03,-2.084475349083671821e-03,4.389641001151043352e-03,-6.137485604250520095e-03,4.503919115575388994e-03,-1.170105101361074975e-03,-1.023337887596514548e-03,-5.894397795837998805e-03,-6.471973269615585722e-03,-3.335069830402658930e-03,2.346267203919848859e-03,-2.658977566637895328e-03,4.576325143321804088e-03,-6.267709683799368471e-03,5.559633055318384011e-03,7.116287436591725232e-03,4.447036123630950660e-03,-1.219534709770890265e-03,-2.933719495856987412e-03,3.149791439459659944e-03,4.029620769228229650e-04,-1.688470165312773419e-04,-7.642674789564347405e-03,-4.202047513041856706e-04,-1.033225439729930015e-03,-2.460944270654501480e-03,1.433798918609131921e-03,2.657919292002755215e-03,8.217205849314474647e-03,-4.986175004244293961e-03,-2.466572437754373757e-03,-2.900611712699353406e-04,-4.110046472649148998e-03,-2.889060821007135259e-03,5.899663105825958542e-03,-3.362567580760226701e-03,4.547410219942806149e-03,-7.616400341019618526e-03,-7.287568482061526675e-04,-8.083936726062368981e-04,-1.619842020022956681e-04,6.143825851435277753e-05,5.179119692298822693e-03,1.790710626291314427e-03,1.549761287123326611e-03,-9.174187552202827181e-04,1.341948325829733562e-03,5.811451785672923885e-04,-8.420303558321983557e-05,4.900696540884430519e-03,3.865127509234059320e-03,-4.021561011861736305e-03,-8.758744813439101615e-04,6.664836541342911624e-04,-6.082081185764197893e-03,-5.274438060596525627e-04,-1.884343436927030926e-03,-1.961473823537065835e-03,-1.031663909382743091e-02,-4.112952680071562880e-03,2.202477368648367035e-03,-4.015683809633296963e-03,2.479787159683348365e-03,-2.920664739691571348e-03,-2.459521424917299366e-03,-4.441329294745490104e-03,5.088984778966163663e-03,3.097621744638566032e-04,8.221883095541695358e-03,6.573854426266095841e-03,1.025062619018422437e-03,-2.430508614523594832e-03,-5.307576408488493512e-03,-8.901556314318032244e-04,9.430092736239834668e-04,5.763225912599129371e-03,9.558311775464860618e-04,-1.170904304294403649e-03,-5.417930480032671220e-03,-2.447578407838030550e-04,2.340729459048945717e-03,1.546911459362807059e-04,3.685452995427854379e-03,-8.032263775844036141e-03,3.099789294690743309e-04,-3.383054993175614806e-04,7.892934711383277693e-03,4.675804869904380719e-03,1.246546181517513762e-03,-7.869405313139626532e-03 2.871822670013306125e-03,1.171731666788380010e-03,1.555487901033019826e-03,4.405882035911779480e-03,5.097294369378938504e-03,2.539563163481889699e-04,-2.302203073263829093e-03,-1.405961875406234757e-03,7.929470412999383919e-03,5.136261200846969736e-03,-1.281586081878126598e-03,-5.424836516272901754e-03,-3.322020469877767085e-03,4.267841830354111507e-03,7.161075339159590856e-04,-3.598584361544829822e-03,7.641788810040191227e-03,2.544445911741982039e-03,-1.361033399963381633e-03,-2.764624042298568975e-03,3.616760959328379436e-03,-1.524342179481511309e-02,1.076456539797547053e-02,-8.822971241546975762e-03,-1.000590630233888154e-02,5.762224411472171258e-03,1.290337853542521509e-03,-1.255367943514489265e-03,4.709118960455969949e-03,-4.932595564020943353e-03,-8.906341960873932415e-03,4.178199401434570109e-03,1.859782925786728457e-03,4.394022579281510812e-03,4.991372298639650332e-03,-1.577174719069844230e-03,4.069842088557622771e-03,-7.925994540811420394e-03,-3.077461637620842495e-03,3.265375072207209444e-03,1.888986532929515239e-03,2.112883356228173557e-03,-5.856860755135084862e-03,-6.586063091262262144e-03,-4.230337400322159783e-03,5.214239197277413876e-03,1.840085901284657134e-03,2.417129454017660339e-03,4.167093536472899168e-04,6.408893224894979083e-03,-4.243609458838875498e-03,6.887945410491998384e-03,2.921585463963924986e-03,-4.494050712973126152e-03,-1.942917189353595472e-03,6.317018696596247464e-03,3.572086130955789106e-03,-1.494320204829361742e-03,1.689522250554024519e-03,2.112945426607476908e-04,8.658593877391524904e-03,-3.695397499474799632e-03,7.486238945027794828e-03,1.021895758861366028e-02,-1.301266148603813207e-03,-5.885319025719910543e-03,-2.125501727617335867e-03,1.880987788347292808e-03,3.403792838406275497e-03,-6.388086508030493501e-05,-5.062596486978710765e-03,-4.318089379551266066e-03,-2.679316126780727789e-04,-1.003865827200243685e-03,3.747017482518643153e-03,-5.316208501007310509e-03,6.094025270058271194e-03,3.652856305188984225e-03,3.518571675045567039e-03,-4.836632879780315003e-03,3.317069824560309101e-03,-8.867397498453742644e-03,1.054323984118616482e-03,-2.497642705156645319e-03,-3.594907273750994146e-03,3.358698670296905026e-03,1.251664983801643698e-02,7.286049869807188252e-03,-5.664964426803257430e-03,-1.151360728621918586e-03,-1.225007938498646814e-03,-3.656290719011736734e-04,-5.681150042169413623e-04,-1.719272209500030376e-03,1.457553212935914014e-03,-3.145675469698445189e-03,9.942832965129210897e-04,-3.413672653715533935e-03,4.054959377235098535e-03,4.390394155540751873e-03,3.407867831216435822e-04,1.233898985319271035e-03,9.543124488628063192e-04,7.278535973053154584e-03,4.313375354210568363e-03,-2.372461302806007019e-03,-4.046145570407488402e-04,1.168048567326529681e-02,-4.706177064826193648e-03,5.415552327342816608e-03,6.110654402892462967e-04,3.352867790415251740e-03,-5.471257820946230470e-03,2.596001303820044768e-03,2.187906430911620067e-03,-6.249554323340272301e-05,-3.229325604993526968e-03,-9.094728706697459072e-03,1.025615709272694256e-02,6.251046068931371961e-05,-9.226142732801896965e-03,9.843367321780403540e-04,-9.173437705477694055e-04,7.348553368342556832e-03,1.039853103505061733e-04,-1.216791792378407299e-03,-2.294718002888820538e-03,-2.951674069793200350e-03,5.126811102992643709e-04,7.824447038993797773e-03,2.963935118452359040e-03,-4.296769541644360852e-04,-8.549873298450834727e-03,-1.730895556017044562e-03,-2.984561393174048598e-03,4.686259473164504884e-03,5.000587290615195885e-03,-3.982745010812113566e-03,1.109579003230323855e-03,3.298973117094968887e-03,2.635656916489314237e-03,6.527086072916086015e-04,3.482368825274647240e-03,-1.035789505455556817e-03,-1.541477143203583188e-03,2.478391028729725123e-03,-7.041816538034584121e-03,-1.944194185955432043e-03,-2.443794517294947330e-03,4.096619795667545000e-03,7.298230793941245381e-03,-1.250805550267839123e-03,2.059053260350328894e-03,-1.468737308504392295e-03,-7.553796858634626282e-04,2.864732628287760402e-03,-7.097784957009764181e-03,-7.861641645214738133e-04,-3.007953722802153475e-03,-2.856108698370105232e-03,-2.432369279176318979e-03,4.233303184678514561e-03,-2.441844294223750270e-03,2.156016537882696465e-03,1.273109545944467718e-03,-2.102466294083760266e-03,6.086831257669764010e-03,-4.800722534268337359e-03,3.377997196937509040e-03,-3.745998007794684118e-03,-2.515076152797299323e-03,3.836993005567354651e-03,-4.926788149258957278e-04,3.763707739782718407e-03,-5.980735705011798117e-03,2.345257028270735801e-04,3.944854290241785709e-03,2.112129919880029703e-03,-6.063152850064736955e-03,1.395192793367742556e-03,5.439379778786015106e-03,-1.056354207387044837e-02,-5.647262679455155449e-03,-3.119102900030600294e-03,-2.234412886039053615e-03,-3.081643604194933463e-03,5.227724246374887938e-03,3.321550629773027934e-03,4.202170520728677923e-03,-8.824299380983587418e-03,-8.456262085688709797e-04,-5.581818746965776235e-03,3.471819391612730645e-03,1.984253509970286978e-03,-1.645572053953246261e-03,2.081726039182634456e-03,-3.457309408848240843e-04,6.231935432661812706e-03,6.941404208954473493e-03,1.086499499911077046e-03,-1.289017103761449720e-02,2.630135048417079724e-04,3.243780818014100828e-03,-5.346559444680147382e-04,-1.504634089797481219e-03,2.905284611037767641e-04,-4.263333991367270620e-03,-4.959472964234453926e-03,9.163217692545126691e-03,-4.953762587827045295e-03,-1.256575622053513440e-02,4.233475804946016524e-03,1.174864294869413552e-02,-4.722529015934609151e-03,7.611052334420917269e-03,1.142108685248856325e-03,-3.705509998224427223e-03,-2.699021572254368690e-03,6.674887374384160833e-03,-7.347540031160112312e-03,-1.029750046634204160e-03,-2.723025011088861598e-05,3.018196236481874781e-03,4.336883406715601880e-03,1.788859987272883520e-03,3.839127993834441296e-03,2.187774613830240945e-04,-6.332795801127186729e-03,1.196387212455339404e-02,6.559547822125576808e-04,-2.099526079469223299e-03,-3.896501613134595310e-03,-5.250058615742374778e-03,3.992076442057168432e-03,-6.658685528703955740e-03,-2.332259499946100732e-03,-9.213316434084862713e-03,-4.626095295617971390e-04,4.049337494067570351e-04,5.355856594070710144e-03,-4.150679525555039592e-03,-1.043486706387429172e-03,-7.813102970894433147e-03,-5.663673174662899740e-03,1.231471747924517499e-03,-4.228651750060417359e-03,3.562168925561730662e-03,-4.626497800536888763e-03,4.056326989231608809e-03,1.036044808797845175e-03,3.149653795429736489e-03,1.037327758333673198e-04,3.643018479300277022e-03,-3.515330600644924960e-03,-2.176007117777149780e-03,-8.421368244232802641e-03,5.982714589513766015e-03,-4.165937479508340785e-03,2.645814844917825613e-03,-5.362939723678346142e-03,4.646135223269200892e-03,-9.989201742085536753e-03,4.167239920692546173e-03,-3.503068079102921936e-03,-3.686911934431973386e-03,1.560224826754177735e-03,1.531944730413138266e-04,-5.493005379982204145e-03,-4.644826192382894674e-03,6.164312150160271286e-03,2.176880351201184848e-03,-7.388493666898612172e-03,9.510131430916852124e-04,8.340926121137366006e-03,-8.118574440578252630e-03,-1.720122532451585733e-04,-6.704909677689475066e-03,3.120129212249387468e-03,1.641496161643832101e-03,4.454344289365593824e-03,-3.395777213025447539e-03,-5.033854973620605945e-03,-3.002800411068313850e-03,-3.836491957013497128e-03,4.233348461457227957e-03,-7.478749594894626226e-06,-9.370824619115439291e-03,-1.028412302512185690e-02,-7.318347785088241382e-04,-4.857177081875255556e-03,-5.082329971695455882e-03,-1.427235479722098422e-03,-9.534274272801300781e-04,-6.909337716451910156e-04,-1.352624836846828566e-03,2.041336340951214947e-04,-8.287837894419607110e-03,2.065513052171584051e-03,-6.200676101043132646e-03,3.740380056943666976e-03,7.260334410577141094e-03,-4.571445712339875823e-03,3.855743218683351826e-03,-3.552332664544087786e-03,-4.798439484929230281e-03,-9.486151392693662268e-03,1.256044275825086781e-04,1.164924218433821719e-03,9.668613867081963034e-03,-4.444250068020981492e-03,2.627964512328605749e-03,-1.289625307127932000e-03,-1.313795002204455565e-03,5.097042849465020439e-03,-5.851485212358542446e-04,1.953962962594897514e-03,-4.893294826471042934e-03,-7.963481086733812525e-03,7.697323145577516026e-03,5.977682256108914245e-03,-8.952738046059682125e-03,2.813619213006846812e-03,9.588990037818239569e-04,1.366904530579434937e-02,-1.563569642011355433e-03,-6.305883925159500199e-03,-5.431772198842222711e-03,1.325690412578923744e-03,-8.800818099870162670e-03,-2.824591118539269106e-03,-1.027405194230333538e-03,-7.660336275792067363e-03,-7.465802553688790678e-03,-5.522581983353993217e-03,-1.133355777129894677e-03,7.241849420612480248e-04,-2.809111451372867021e-03,1.070731553191922245e-02,-4.885044701832594490e-03,3.201066807017973088e-03,-7.258186213789354947e-03,-1.167837047580577541e-02,1.063652225737275010e-03,-7.286533746730173341e-04,6.182220390399583093e-04,1.271207660552667578e-03,-4.194722757537801272e-03,9.360024062835068520e-03,-3.820830398348454586e-04,9.927410231842936569e-03,2.028049898804486651e-03,-5.859010913511006856e-03,-6.931577185937625287e-04,-3.560678097491720272e-03,3.144690703841628542e-03,1.953690248144680699e-03,6.336613811630761352e-03,-1.065412088783955187e-03,5.294382850235097859e-03,8.396807997078118910e-03,5.177985456409985726e-03,5.653521514503699087e-03,-4.814127313609659585e-03,-6.103788611053683096e-03,3.533246690564097094e-03,-5.766864956299078138e-03,3.692547246562661364e-03,2.414474594693662157e-03,1.559081394952349555e-04,1.391771430261768252e-04,-1.498303115272656312e-02,-3.857720946333382129e-03,-8.907588095840177078e-03,3.154905092027186601e-03,4.211613459781658395e-03,-3.360317798849517170e-03,2.280135568389337415e-04,8.800818911173903872e-03,2.910726304588134861e-03,7.766338318362743520e-03,6.436861760407627808e-04,-8.394019617042945778e-04,-2.914736280826365437e-03,1.083805334195233738e-03,3.221950153158849544e-04,-8.087736692009403656e-04,3.059514911104999220e-04,2.853805207413607613e-03,1.761933589595789974e-03,-4.207945087680415273e-03,4.401826895081193368e-03,-3.960638252720935157e-03,4.229920863465984419e-05,8.382149096716461426e-03,-1.123082380356551016e-02,1.171485751993424015e-03,-2.156840504829694253e-03,3.633322418110894892e-03,-6.352390769898145530e-03,-2.149851802365380991e-04 2.688042023234077935e-04,1.250634170967598836e-03,5.442068792252056919e-03,2.358527895633018723e-03,-5.726142460604949184e-03,2.786751924505165203e-03,8.071833210940295225e-03,2.627839198538944269e-03,-3.297439052076935311e-03,5.968301117985555862e-03,4.886752027887269383e-04,1.730137659442142954e-04,-2.550758392602813732e-03,-6.887297678649689087e-03,4.905431396798328597e-03,-5.079412652263868080e-03,-4.519330934974473933e-03,6.698923375290028945e-03,9.800677025858264141e-03,-4.500990393080044061e-03,-3.449624107734932613e-03,-2.118839570525962376e-03,-1.407430770790882605e-03,-9.803764801466944429e-03,3.871013697558660111e-03,3.961674911225219510e-03,2.053468877851904583e-03,3.418917332761138572e-03,5.868495068814987177e-03,-9.925009069452147854e-04,4.762934153325850198e-03,7.048785786006718599e-04,-6.371360800027886344e-04,-3.056236809764350561e-04,1.539768582107624469e-02,7.105979267193044714e-03,-6.653251706208576643e-03,-4.682438399412514919e-03,-3.422214014283559390e-03,-4.709567058011873524e-03,-4.389744130898972328e-03,-4.540533292291397925e-03,1.762702284355806585e-03,-1.661316248776577992e-02,-3.202707976837481513e-04,-4.240524033992151831e-03,1.476615266362115006e-03,-5.864165129887683250e-03,5.907696388005958431e-05,-5.625762494790481454e-03,1.460491403311882114e-03,4.040277382011661124e-03,-8.913070418616410079e-05,4.088244941387795400e-03,-3.947468159836865081e-03,-6.609453473310654935e-03,1.419566412009672183e-03,-3.076580527016014796e-03,1.800140251665681770e-03,-2.898308644960164316e-03,-2.644620868390884136e-04,1.837947104880239407e-03,-2.424411586859586548e-03,-2.731504041715209462e-03,-2.003535681561586981e-03,3.044091445015287712e-03,-8.423262866795488621e-03,4.038665604553459687e-03,-2.356216486811732371e-03,-9.532127886077631021e-03,4.777614491696273447e-03,-1.999805589899546644e-03,-6.534963401878979283e-03,-3.589555310217508715e-03,-3.689189340559147993e-03,-9.105055051866503199e-03,-2.494041338694752918e-04,-4.154419839990994637e-03,-5.209133956137161972e-03,7.567732280461306714e-03,5.180705628489094827e-03,-2.580132184437207866e-03,-6.372552644294482644e-04,4.758087054242511778e-03,-2.950780117360807340e-03,-2.562245954565137325e-03,-4.072573163402089941e-03,-2.609257980220370827e-03,-3.970331512399154911e-03,-8.719615702050344946e-03,-2.845155799956395216e-03,-4.666150906605364009e-04,1.623158076320216185e-03,5.809315523446772914e-04,-4.355015268164128331e-03,3.427868177350935300e-03,-9.567538815416695155e-05,-6.079017898874895451e-03,-8.786201304571678960e-05,-1.736352482306509869e-03,-1.393940554576512079e-02,-5.640068662847862353e-03,6.920481458796199845e-03,4.755820319882482494e-03,8.219209160310707307e-03,1.134674560320572505e-03,2.097548177259173346e-03,-2.290557009931643972e-03,-5.186155851125592332e-03,-8.555193987677577744e-03,-5.672830352578948042e-03,-2.424664402429015445e-03,6.952695308383784493e-04,2.998348964947569403e-03,1.597309552167886599e-03,9.615204317650704810e-03,8.814053839981846744e-03,-1.208756273470590510e-02,5.133338615098586959e-03,-1.011861632029532193e-03,3.842903010236126560e-03,-5.765778233600507043e-03,-3.239548589911501509e-05,1.182747519324621119e-03,-3.535390203700564029e-03,-4.223423640585032522e-03,-6.520714359532904687e-03,5.913369229203482499e-03,6.349048416134392067e-03,-1.015578342241994780e-03,5.444173146038787901e-03,-1.959716088755278536e-03,-1.321679684556847055e-03,-1.943671334069057904e-03,-1.175865712566144224e-03,7.525350988691803869e-03,1.398102344702503112e-03,-9.252385681616475191e-03,-1.808681416443037307e-03,-8.162276365209639650e-04,6.053716911597471773e-03,-3.567393410006260146e-03,-2.457148826400481611e-03,-5.914801672187067205e-03,7.584998541780749138e-05,-1.748107253764105641e-04,-1.634656995471714782e-03,4.666494459868743977e-03,-1.762740108207193910e-03,4.211997343629063882e-03,1.639324976896829652e-03,2.179436193144771317e-03,-2.005615056983261948e-03,-7.085520143619003291e-04,-7.596859855445646688e-03,-3.699229730877314458e-03,-1.226716562459171343e-02,1.970454417400655661e-03,3.130261399783533169e-03,6.405374038911416110e-04,-2.580951213659143928e-03,-4.212352517095159163e-03,1.104686782574796754e-03,4.763004577003318216e-04,-6.512774416961357405e-04,-5.724619056032843531e-03,-8.876687487250329786e-03,-7.036298155663584311e-03,3.968053424362382071e-03,-1.101980016868071843e-03,1.144508328948102595e-02,-2.941664859698470049e-03,2.023454950343788958e-03,1.165372428461870086e-02,6.006459530607002363e-03,-1.080292520885912586e-03,-4.848603027783417760e-05,-4.734682899323877127e-03,6.469590890319510930e-04,9.212823016492113423e-03,2.204538959400365820e-03,-5.123062890128700191e-03,-7.690417929266325954e-03,-2.169287665607898870e-03,-7.323242575134531786e-03,1.124317888337973692e-02,1.490035162218872897e-03,7.334629721160627154e-04,2.208226705547460932e-03,1.062997701084973049e-03,-3.306134798010364130e-04,-1.151094734592659694e-02,-2.803561237821571451e-03,-4.812187709355948868e-03,-2.993056355836616264e-03,2.560511377713091830e-03,6.700400537944900233e-04,-6.204306572367253129e-03,5.876172090769799482e-03,3.824970003210696485e-03,3.445702715090463979e-03,1.335053376005783908e-03,5.833353573356158013e-03,-6.315752824812646132e-03,-4.753200030563758949e-04,-4.544964457963341156e-03,9.132135737368349097e-05,-7.540787750202651270e-04,3.300706623557324738e-03,-6.187223753726005596e-03,3.119924993412580977e-03,4.348361918046298076e-03,7.386316540033006570e-03,-5.519172490265458579e-03,5.018424234314317577e-03,7.404108213280599302e-03,-2.195148681602500200e-03,-1.189049073166609564e-04,3.716183869039526612e-03,-3.625113444270604800e-04,5.660244815828162010e-03,-1.660013128658342990e-03,-3.388340950654272852e-03,6.445140202359275414e-03,-1.595868091234863917e-03,8.575199500523907406e-03,-2.676158778502059788e-03,-8.769875796056890438e-04,1.366753299725639772e-04,9.302576417071639278e-03,-1.502469410825657107e-05,7.612140866618648249e-04,-5.864368779323579639e-03,-8.729992689206997683e-03,-7.552593933569282450e-03,-5.169651131114937417e-03,3.404889685512877285e-03,5.479487280195644364e-03,-1.228375219856301867e-03,1.656814701643470698e-03,-4.613885612332721715e-03,-1.205354892687723715e-03,-1.315382766788283616e-03,2.280226186864429093e-03,-3.891738107096111114e-03,-4.701664426015115909e-03,-6.067333206285041407e-03,-2.046032331775382487e-03,-7.689506277802443841e-03,2.335844620181516933e-03,-3.819633226581441339e-03,7.817016249966519687e-04,1.683572755270631559e-03,-2.820447958037042725e-03,-1.640498692831982106e-03,1.069878107790111057e-03,-7.938021252277424611e-03,-2.799671998468054000e-03,-1.939415483595038469e-03,-8.404559617919271988e-03,-2.744153167481228582e-03,-2.751509997513781284e-03,1.057545344887555680e-02,2.599030686293220607e-03,-6.749491162245162441e-03,-4.356422955557591480e-04,-8.954880912898109668e-03,-7.862691514846737936e-03,4.803520013930542083e-04,-8.782271828479385270e-03,-7.822061999374649386e-04,5.991308044894231438e-03,-1.342154689933031195e-03,-4.216103035491060302e-03,-8.785220600262906923e-03,-3.086044141442995797e-03,-3.754287046203097587e-03,-3.748682370658189614e-03,4.657626478123424313e-03,4.607187238203593570e-03,1.433667247283581039e-02,-2.086426265561090645e-03,-5.812043149773835939e-03,3.153793547602145491e-04,2.680767780825982997e-03,2.011559860896096168e-03,-4.757933186517080207e-03,-5.308977375340596591e-03,2.391862102793074641e-03,2.907674050716720603e-03,3.641162039735485606e-03,2.886424531780638035e-03,9.921531858489318623e-03,4.994176544107850483e-04,-4.210055624795179009e-03,-6.105653038895668752e-03,-8.803299735038879262e-03,-4.479864595373180512e-03,3.649463903982685074e-03,6.329623856636680361e-03,4.651504384674787565e-03,-7.134491614487345305e-03,2.794780292341047384e-03,-1.065158044728203857e-02,-1.056144608251353544e-03,7.836803559736508704e-03,2.970453854377019288e-03,4.707749143787973503e-03,-1.445435273738397523e-03,-4.626801137979895569e-03,-4.100013517074303955e-03,-3.558857241485038035e-03,-7.884528870599460601e-03,-5.452154161414476258e-03,2.032428450268097733e-03,9.477033721223491677e-03,-3.072269632725654074e-04,-5.687733266138520448e-03,-2.368557644436275044e-03,3.203282981539067910e-03,-8.472764497595805955e-04,-4.321251924280636314e-03,1.339701685616269161e-04,-1.330303863708343038e-03,3.880322613545800279e-04,7.513942511240774558e-03,-2.515715716731214274e-03,-3.968984140692783866e-03,3.532754310775788279e-03,-1.297027161223300628e-03,4.155607054912561008e-03,-7.560850844039402642e-03,-1.656279765479118761e-03,-7.083555959669629414e-03,2.604421919898930460e-03,-2.759065611488266921e-03,1.907105564715078604e-03,1.454511029943413329e-03,7.067048042230524019e-03,2.692472505252243697e-03,-2.196150850765530519e-03,-3.616312329011049306e-03,1.702673485093987148e-03,9.117682386815838258e-03,-8.752360755080975437e-04,-7.106084570948054168e-04,-3.773657323668216067e-03,4.695133372484295783e-03,-5.184888651012955693e-03,2.512775870420083310e-03,4.715092216364978606e-03,6.705969485811162124e-03,-5.112019442181293082e-03,-7.055474122381782173e-04,-4.583686547717835506e-03,1.536801844495011863e-03,-5.669844031549338068e-03,-8.249017338419893652e-03,-3.918371279169162967e-03,-2.779349732299615881e-03,-1.083836149844408236e-02,-1.018439215004158897e-03,3.197068167423932749e-03,-9.064308873395895624e-03,7.662885452980092160e-03,1.041842012270186892e-03,-6.210553999608929218e-03,2.082547624440432652e-03,4.092507729630764229e-03,-3.859962418609671434e-03,2.921586136177152775e-03,-2.640085081065771631e-03,7.154812665919542576e-03,1.242692282532475599e-03,-2.638913947524754947e-03,-2.346248821005942481e-03,-2.667333254454109723e-03,4.090316386756422118e-03,-2.907698649769229100e-03,3.559245819784196009e-05,4.727691618296089686e-03,9.033479471629463423e-03,3.651426181464408754e-03,5.972508881236819264e-03,7.565894918522873304e-03,1.938307610515321248e-03,7.752606801167416690e-03,9.608991419328950789e-03,-1.296457632641060623e-03,-3.569274551082349451e-04,2.266577984101191684e-03,4.377556936212092555e-03,-2.644921516804481102e-04,-2.944460645457155912e-03,-3.468596155191477296e-03,-7.240374394899934743e-03,8.667976744553265375e-03,2.292775665481357888e-03,7.100512383494626475e-03,-1.039052457622031793e-02 -2.586815414620171759e-04,-1.255671659357956559e-03,7.329036625482327101e-03,7.897277435073268501e-03,2.853626815422692208e-03,1.061409759068889305e-02,1.303808905236540725e-03,6.773780092815916273e-04,5.442379005498467577e-04,9.341178204117939893e-03,-1.812751687185542660e-03,-3.616758891556010307e-03,4.490197752664902217e-03,-2.099369156513267425e-03,4.396222407243201812e-03,-5.830551012368440263e-03,1.294381382634210171e-03,-2.908466856642264679e-03,3.243939728208095757e-03,-9.642297009899722729e-03,-2.448491087383789129e-03,-1.261488934168416513e-03,6.192431674264816700e-03,-4.576851447151390404e-03,-4.165296436360566359e-03,-1.425010623844987813e-03,-3.645961381149319652e-03,-2.501691133391335201e-03,-2.245751232859232154e-03,8.260425942773028107e-03,4.164921093613745476e-03,-2.686622624293558744e-03,-1.402583730688360076e-03,3.804298371345505355e-03,-4.308895409894769254e-03,4.531861091852893222e-03,4.474254484432985471e-03,-2.148262948841658197e-03,1.230202744956379959e-04,-7.847556417215125618e-04,-7.592629670316136843e-03,8.201570679016468310e-04,-3.305494955921054519e-03,-8.928434940625693287e-04,6.843597179149154587e-03,4.327166573918093083e-03,3.946393039789991043e-03,3.438728701918589929e-03,7.055516675374119868e-03,7.553408537135651232e-03,3.175378126530193842e-03,-2.933919949785007263e-03,1.248417326438521630e-02,1.137327021438582463e-02,5.273370875035043870e-03,2.029617412077719223e-03,1.880003791768594798e-03,5.833705298573723698e-03,3.154544325612603298e-03,1.902102604482363820e-03,-7.620283458577838294e-03,3.005596569858859021e-03,3.458315213866623087e-04,-6.938631238843425732e-03,-9.315631955762002092e-03,-2.164546399795208489e-03,-6.798979558850841605e-03,1.745468709566011002e-03,4.891288321063996282e-03,-7.467591705633328664e-03,6.782530300941808088e-04,-5.901907412252375973e-03,1.963957564105577903e-03,1.860858650725808697e-03,-3.795601774901283341e-03,5.369587379442007646e-03,1.706094785401764255e-03,2.132511433557851208e-03,-3.496255080619707287e-03,1.805853175662977062e-03,7.705374111105587481e-03,-3.698832520406230683e-03,-1.857671736821818595e-03,-4.784719718078141865e-03,-2.285428526916542021e-03,-6.809893870578015949e-03,7.812114269006346055e-04,1.404106894699584662e-02,-1.534627055437516119e-03,3.353461345928730720e-04,1.218358013328559378e-02,-7.219696382187886213e-04,-4.179979886483593896e-04,3.801157983893243240e-03,1.445672557818389846e-03,-1.567732182606480567e-03,6.601064609043233618e-03,-2.442152208837609050e-03,1.125549749831615629e-03,3.088341231460560877e-03,-2.853719315061180491e-03,4.234691674664264188e-03,-2.578219828604516248e-03,-9.554681465840734675e-03,-7.984972361169481378e-03,2.639036829736528169e-03,6.026878446980230876e-03,9.855399012589491012e-04,-4.142194818632257552e-03,-1.601272913884337881e-04,-4.039797311380994259e-03,4.314960520972701240e-03,5.380017081604379463e-03,6.187396037103116270e-05,1.153780928159170091e-02,4.645043776807335212e-03,-3.915107351500421570e-04,-6.651744331233186945e-03,6.402834592727546341e-03,5.050076992879404258e-03,2.104048147164868255e-03,-4.562239420639955211e-03,-4.757909958598825588e-03,1.174441589244138969e-03,3.260162219533178571e-03,-7.792719089520029445e-03,1.116056748563692622e-03,6.497301804405969817e-04,6.980156930212963462e-03,-7.158554915940760002e-03,1.224957287918149694e-02,2.967721569848093613e-03,4.367022137235785154e-04,9.906423305566562643e-03,5.994175611796062403e-04,-4.338657707183629468e-03,-1.159448506910039541e-03,-6.485201339009751161e-03,1.562285942217996396e-03,-1.624771557302385778e-03,6.902197269466851933e-05,-2.358344606164092944e-03,1.573109471583834535e-03,-7.012856727676601738e-03,-1.944143952396176636e-03,-1.522079774975156280e-03,-7.511506248481361614e-03,4.860956420166755165e-03,9.232710327235832057e-03,-8.356202399769134537e-03,9.180075467262796812e-04,5.297736842510685504e-04,3.348944992300117041e-03,4.130336563848622383e-03,-2.704057783630738586e-03,-3.226399638876196324e-03,4.846322575408317478e-03,-7.188853249926893674e-03,1.860198016100604980e-03,-3.496111443295344540e-03,7.503828841090263123e-03,1.556818529280322005e-03,4.924529447578220552e-03,-6.637896582782682256e-03,6.410281691095215222e-03,-9.841489380840891515e-04,3.585132699711340940e-03,4.751666500623068917e-03,1.384373590134855636e-03,1.284727824703826848e-02,-2.079748260072827176e-03,5.979499792593765892e-03,-4.084603479132531412e-03,2.919387822065968533e-03,-1.194440058291457987e-03,8.452086113368390344e-03,-1.259886698602184557e-03,-9.377307368211004746e-03,2.213194696171945375e-04,-5.448877549363105761e-03,-1.960289364684412918e-03,-5.328238716736420112e-03,1.720483918942689603e-03,7.349080076862844237e-03,-3.136765360372093404e-03,1.395796994970057662e-03,4.462780968648949638e-03,-5.157183362065429801e-03,7.270338306202289540e-03,2.653272653532079965e-05,2.557867987981584041e-04,4.785809231958117020e-03,-5.716563527825685963e-03,-8.235540272248135021e-03,8.303597901453133511e-04,3.068343821844450418e-03,2.235383505314747585e-03,-7.674317416601745233e-03,8.473346765904091540e-03,2.823961049410544932e-03,-5.299484869063920793e-03,3.186199259083496646e-03,-1.470545280981015984e-03,-4.081522096840232636e-03,-4.599865591415970741e-03,-2.066321771290419856e-03,-2.263599654942070449e-03,-4.824314784552664707e-03,3.001174950877671856e-03,-7.603108173165840089e-03,-2.086088614801035867e-03,9.946920146185291890e-04,-2.212786408680840911e-03,1.853742492830636563e-03,-2.159588722134640759e-03,1.726563385444154428e-03,-2.818972805075233359e-03,5.910233286221891220e-04,-2.908377925676729030e-03,5.804859475305872672e-03,-8.532458421701985540e-03,-3.551782584218547527e-03,1.463257620420708440e-03,2.476831649210903437e-03,-4.445590609702489149e-03,7.117570660782997910e-03,5.261789470888569679e-04,6.576132296216662468e-03,4.716572345078669237e-03,-1.614821939900014597e-03,4.036724724693754114e-04,-5.389841787800833499e-03,6.489301160260858674e-03,1.924296453078098513e-03,5.612994462784272331e-03,-1.826195871563194521e-03,2.881302185816349368e-04,4.510532278373358475e-03,-1.437130338366013652e-04,-4.164836075371546563e-03,-5.100104431152004125e-03,1.015426110247556399e-02,1.733165194695606912e-03,6.069941561515111890e-03,7.717008214245335909e-03,-3.804199237641954154e-03,-3.027620024754605014e-03,1.137552223941204997e-03,-7.274553503950117322e-03,6.095883245987374424e-03,-5.661999880282003565e-03,-1.050811868804035071e-02,1.103929964466886758e-02,6.461410691256047958e-03,3.815950375766701420e-03,3.008731800151694959e-04,-4.629428575572911916e-03,1.954228134689586869e-03,1.959071425229544448e-03,-3.068440021023341891e-03,3.548360751834942362e-03,3.325806123553538655e-03,-4.218252301706777209e-03,1.304003362341152718e-03,-3.502795275039626553e-04,-5.542651087831561470e-03,-8.298927726306823058e-03,2.078550109757518367e-03,-9.459143062050572514e-04,2.799335845323755022e-03,4.390403721277670482e-04,7.934435737819572898e-03,-2.967335885144082360e-03,-5.660088691516507002e-03,5.464015478618392332e-03,-3.616375403301488357e-03,4.302710261844062446e-03,-1.284034154601361097e-03,-7.301961242519058697e-03,2.771144860457418983e-03,-6.265889605061798588e-03,-1.135714466479288264e-03,-2.946382944311360004e-04,1.849074394082681231e-03,5.523992530705652305e-03,-4.912194718820419789e-03,4.032829161308742333e-03,-2.369844101720172073e-03,-6.727134196980157294e-04,-5.762367416996239632e-03,1.001774595855131926e-03,-5.224448346847977882e-04,1.731244349135551905e-03,5.405688989978309905e-03,1.165463498412984758e-03,-6.723382584567638839e-04,2.488894937522096156e-03,3.716088620719037137e-03,6.654747727179476709e-04,2.873877255766908896e-03,-3.040466268035892008e-03,-2.954961439936126897e-03,-2.863641401141901917e-03,3.943359322763313269e-04,-9.891963491622940419e-03,-3.726789150965540299e-03,-4.156577687394490996e-03,-4.076886200904476266e-03,-8.276832240244029340e-04,2.509154101366696272e-04,1.319867215472521945e-03,-5.534091245660326596e-04,7.934964307321205890e-03,-2.589438790541144703e-04,5.115941610576239459e-03,-1.368022824304989055e-04,3.782717541454469045e-03,-5.375792326006307784e-03,8.911590916223429207e-03,5.301273231688761502e-03,-1.641155770730191301e-03,-1.142896281804489308e-02,1.582263956053614842e-04,-8.452039258281347431e-03,1.827535236808469025e-03,9.142373010140272815e-03,2.898640045298131636e-03,-1.860335208845114937e-04,4.018332201706800895e-03,4.052954972116254787e-03,4.868766736770039141e-03,-6.770741542777576616e-03,-9.410444401826246336e-03,-7.594357243288879858e-03,3.913342572118363311e-03,-3.867273364371841276e-05,5.990783001330779629e-03,-1.158442944870883553e-03,1.300228679365447899e-02,1.418842384697923710e-03,1.126213764732254125e-03,1.995468440792961401e-03,-5.599747074014715768e-03,4.337485211222797364e-03,-3.226629744286323501e-03,-6.725806533522186688e-04,2.516569094914897307e-03,-7.484480982113060968e-03,-7.665953584064359033e-03,3.911275257236192064e-03,-3.703733226500686222e-04,-6.166009049211246262e-03,7.274031186320377776e-03,1.744411228251282857e-03,-1.395312653808554286e-02,-5.795025842433071742e-03,-4.593542842474224869e-03,-5.081190212278585520e-03,2.781541048475429621e-03,-7.189451840571495569e-03,-3.656777280952617885e-03,-1.019827737003854533e-02,-5.823633338440166885e-04,9.453008582394296574e-03,-2.754923030586129383e-03,4.282122855732827665e-03,8.219894367408367127e-04,-2.251329185777969227e-03,-8.573429505570223305e-03,1.483995493981059288e-03,1.192540927545714243e-03,-1.648794926725438009e-03,1.575442593077666870e-03,-2.160450089468437824e-03,2.944804151874129243e-03,-1.013560368405806884e-03,1.084114701504634501e-02,9.433341892400002454e-03,1.481925568403425278e-03,-2.964119360797204047e-03,-6.582907884092309599e-03,-8.023159992661232920e-03,-2.093358815041264104e-03,-4.963314346931767956e-03,4.930716008242097895e-03,-1.321299709234660105e-03,-9.814888750856178359e-03,-5.350740345168892263e-03,-5.811702730934172750e-03,-1.567491519603841357e-03,3.953814055926959184e-04,3.669001544983703424e-03,-6.261830604162656015e-03,1.412589037861158338e-03,-1.523505438332520973e-03,-3.436274464379550017e-03,2.867828788845148275e-03,-4.577146449437870419e-03,-8.470071362925985078e-04,-9.373706941912183220e-05 2.966566349245461885e-03,-1.396466587961113243e-03,-6.758818516529337302e-03,-5.850348398536171952e-03,4.231268075203413644e-03,5.417133015059372343e-03,-1.787851646235122730e-03,-4.993081720542252508e-03,-9.160129993459543291e-03,7.363424078775109190e-03,9.507424376228709641e-04,-7.178926378040395399e-04,5.450668581008932410e-05,-4.108903388598124720e-03,1.218873365412392927e-03,1.733644562646876469e-03,6.164345111871603021e-03,-3.369211764673708028e-03,4.671775355655764794e-03,3.625366065873099168e-03,-1.479018685036058824e-03,3.488863433000600686e-03,-2.409376483445333644e-03,-1.486265530724696030e-03,2.658462641921563539e-04,-1.110576538308886019e-03,9.228786056745258556e-04,-6.622875075226006270e-03,8.326410954821949172e-04,3.609981696920793164e-03,3.091963748262439079e-03,6.671827414084978945e-03,2.220256321081659403e-03,2.531721199414641282e-03,5.881594887896316771e-03,8.785142918939572998e-03,2.772979325288039195e-03,-6.414482563905985271e-03,1.252646687670526914e-03,-2.693940167184460423e-03,7.358788043571995625e-03,2.536095243836527721e-03,8.977699366146453877e-03,-8.754489586060894973e-03,-3.615618083280238305e-03,-2.632762983670062801e-03,2.583659720265036285e-03,6.511405302694548737e-03,-3.324107556720863610e-03,-1.113547351586553812e-03,-1.524222195990431990e-03,5.960741642344141722e-03,3.130526387087466909e-03,-3.392797557800804591e-03,-4.978523618883022410e-03,-9.618953155383271753e-03,-6.643492086821439768e-03,-2.823477192749053281e-04,2.367990271386908544e-03,-3.944838761868612459e-03,2.805263994822708266e-03,4.820873027168480832e-03,6.141181729240364172e-03,-2.452507234261372491e-03,-2.930311437905670172e-03,-1.338002244013214610e-02,6.664615241890079617e-03,6.636173610251722404e-04,2.928356573645261082e-03,1.391672106132008611e-03,-5.239691121867716753e-03,-6.858286983738126445e-03,-3.188354382311429491e-04,-1.617622444277875811e-04,-3.228222303271520086e-03,-1.474832557329264259e-03,1.221185575714341988e-04,9.119529932762724181e-03,-5.916106437735148662e-04,3.154794365828397350e-03,-1.402323595211342261e-03,-1.544532935957518673e-03,-5.101460933664462086e-03,2.434458482630002391e-03,2.153994660909539990e-03,-7.985839446216683180e-03,5.207320543912668685e-04,-1.210499174468729474e-03,3.237032598547643237e-03,-3.200443358455880728e-03,-4.742990484279265007e-03,5.402208941478930763e-03,-5.406751197366410980e-03,8.200766347382522012e-03,-7.139096108588810843e-03,-1.341779377131986119e-02,-4.404804493807659704e-03,6.657951917420090043e-03,-6.221445901145063878e-03,-1.775036528636550080e-04,1.302194967188405675e-02,-2.031850537922659083e-03,6.830940778683874948e-03,1.287108928808202357e-03,2.693590166780360402e-03,-2.832303264021182512e-03,-7.888251702813323801e-03,-1.823693425740536287e-03,1.285263003084705048e-03,5.188324201046238052e-03,-9.338227147831721850e-03,2.603544833846355849e-03,4.997972411832840231e-04,-6.659436387048675576e-03,1.242512636093576119e-03,1.143614453130982651e-02,-4.599148690283640095e-03,4.959520019744524839e-03,-1.084136084512876338e-02,-5.515264700444419237e-03,-5.433822278633326092e-03,1.372180589415863960e-02,1.775611008479479561e-03,4.555461899470997483e-04,-6.756418119355355606e-03,-3.456899420207444618e-03,6.928330152976874996e-04,3.599656686925408605e-03,-4.822849265546922705e-03,4.996280330399099218e-03,-5.108422548132137385e-03,-4.917211596437931487e-03,-1.293493914506330285e-02,4.202710741735195417e-04,-2.220162714720768597e-03,2.587295679906675971e-03,3.813489453508533012e-03,4.114174400761409257e-03,1.471877994017919619e-02,-4.208513854353044571e-03,-7.981938239872830854e-03,-2.540095983362698588e-03,7.308125186112780954e-03,1.918123177142534025e-02,-2.186247983034287322e-03,-1.399314248665682429e-02,3.691106281984393722e-03,1.340386287314344893e-03,-1.553520432356830100e-03,-9.021080066707034759e-03,5.144832306842956650e-03,8.390193316098462292e-04,5.760624552791522406e-03,-7.845788574907575780e-03,-1.408774721363360979e-03,5.940247518918446469e-03,-2.009044987993604224e-03,3.610899131589009534e-03,1.522219845158512838e-03,-3.139564019142367015e-03,-4.209612120429201930e-03,1.485132768172631960e-03,-1.115950871021421324e-03,-3.002561310429728743e-03,-4.587732154199212174e-03,-1.502448987851610386e-03,-2.358100449215157386e-03,6.445925684828569639e-03,9.384494012874667430e-04,-5.765594551453919608e-03,2.387164084268016389e-03,6.437683930128105213e-04,-3.036196023918118041e-03,2.626138560371616380e-03,2.750803263656889305e-03,1.532371115783336731e-04,9.021711443698167800e-04,1.839543379549552285e-03,-6.943632146964515139e-04,4.944622939470779029e-03,-1.136057946498809880e-03,2.963502313545022214e-03,2.070498640424657793e-03,-2.238137710330085402e-03,4.125191271203711537e-06,7.436214914628164023e-03,-5.660389312923699151e-03,3.642916495023512148e-03,-6.837755169941048261e-03,-2.864596898963685679e-03,-4.139461275363768775e-03,5.675075782014525096e-03,1.060257378526861805e-02,3.983419356652620827e-03,-7.536823456971334691e-03,-3.065862521826013317e-03,1.218674142326644280e-03,-1.434142909793760523e-02,-6.075925444880086145e-03,7.059170350437286856e-03,-4.679542308128567876e-03,3.631538410196754295e-03,1.635547979840285297e-03,-4.477758710126870587e-03,1.127957139792787116e-03,3.422564655286479927e-03,1.135980202161003625e-03,3.914719658882137099e-04,1.039726122092470353e-02,-3.887191783601752840e-03,8.139444574459504074e-04,-4.286375052425862517e-03,5.333399464360407717e-03,8.915615208937015257e-03,3.067544788549497140e-03,5.396219585097414805e-03,2.360374530151932327e-04,-4.332854420012724386e-04,6.287033206321299486e-03,-5.390972428617321274e-03,-1.142453046703656439e-02,1.621276333520371097e-03,-1.365985240653440000e-03,-9.587458538540889058e-03,6.422274187764926388e-04,-1.824710216780320695e-03,-7.192341548329457459e-04,1.174580139431136323e-02,3.586095525335421946e-03,-7.141036427171944647e-05,5.608767040961316550e-03,2.853046131869475983e-03,8.283659355625194514e-04,1.481328861997195699e-03,-3.058235422560187655e-03,1.606953895863292561e-03,1.288729781282228542e-03,-4.528192689523932203e-03,-3.613508825929200984e-03,2.012142114855000261e-03,9.196633823908518732e-03,-4.369093415498730149e-03,7.352435725313441299e-03,2.980392315576559353e-03,-8.564538826582691194e-03,6.974381033448758015e-05,5.579459241142744816e-03,-2.701908308594748107e-03,-9.084854392921743599e-03,-1.249742497375451672e-03,-7.989039257749248890e-03,3.911377827702364716e-03,-4.072051787556222636e-03,5.797591480665728950e-04,-6.847909356842920778e-03,-7.151978692974344382e-03,4.841542112281300471e-03,6.596343325647907900e-04,4.358582834964320508e-03,-4.442297439739885563e-03,-4.824600780722317593e-03,8.880059041116031629e-03,-2.949545479827427388e-03,-6.940571894293375444e-04,5.101196766347443519e-03,-6.858036540324875851e-04,-2.188060827706062787e-04,-1.090984826346349731e-02,-2.852411118694495214e-03,4.906906268735094882e-03,9.135341358929402640e-04,-2.662889670827842375e-03,1.612875598979259363e-03,-9.499908054796028442e-03,1.767903204184465699e-03,4.234226027797887044e-03,4.217276684777363123e-03,9.320399098146704350e-03,3.719874383999770445e-06,4.732005185194423230e-03,1.208773191899356007e-03,2.170619927170608018e-03,3.604897405220060767e-03,-5.857788678391753420e-03,3.506936375604870117e-03,4.838682262512643967e-03,-6.039496975019847079e-03,-9.291623465641270924e-03,6.515120693299568909e-03,-2.355578498963522508e-03,1.004091980256460390e-02,-3.353888685696594305e-03,4.991192244050536055e-03,-1.841181358969711489e-03,1.678232151634281508e-02,-7.259337529244279733e-03,4.823581554969145688e-03,-7.964787751105859015e-03,3.398520100943091485e-03,5.716558405428996854e-03,-5.600225924879247315e-03,1.721799894283998836e-03,2.541606315013206999e-03,9.126813083471032066e-04,5.276678983823660483e-03,7.620756419981512472e-03,-3.882471583514899346e-03,8.692098262147192724e-04,-4.702901478855272925e-03,-4.084388725299344883e-04,7.499958101413736061e-03,-3.658029528046646481e-03,6.279679074495585732e-03,-3.438042292294334230e-03,-3.079684565212677510e-03,4.607025933575216101e-03,-1.464321347549769111e-03,2.232776293517297391e-03,5.721093379389915329e-03,-1.056598750213124001e-03,1.857212831593687804e-03,-5.759436543563304983e-04,4.808913354166661006e-03,-3.574240219612187606e-03,5.053046102137428129e-03,5.648552855377562444e-03,2.291105953268431951e-03,1.967246281578117345e-03,4.375564118400542164e-03,-7.099813282976230946e-03,3.221913075006491330e-03,-3.724494432862270402e-03,2.541170182214659958e-04,-9.847732311829600002e-03,-6.047795682935708299e-05,-3.851140826206201403e-03,1.302919058540295480e-03,2.474524078108158260e-04,1.513788789884777230e-03,4.184363332313604557e-03,-1.656045313297188843e-03,2.858082542306294559e-03,9.720520655408770513e-03,3.053619605669202330e-03,3.236516423429339380e-03,8.418919475269048477e-03,4.170884449238535831e-03,-1.643698327618347893e-03,-5.231057340555204157e-04,-5.678918026442269194e-03,2.290036333681529524e-03,-4.295131489188011055e-03,6.062824701576051021e-04,-1.028754737165593972e-03,-2.127558506393084897e-03,3.554610449066384920e-03,-1.893701534714442450e-03,1.099042223790583340e-04,8.757693998857221315e-03,-1.114994463757869661e-02,-5.158200107640363141e-04,3.418571999707127867e-03,-1.344847127581859500e-03,2.978816353897507669e-03,-1.227390835970991240e-04,1.187715134096780549e-03,-3.563356171942237303e-04,-8.178931097275986273e-03,1.636120126462400470e-03,-4.083327857403958906e-03,-4.721255048375369405e-03,7.051738712851557400e-03,-1.542966791858590416e-03,1.915704767145113121e-03,-6.003465023503097402e-03,3.652518477812424173e-03,-5.655082660571465226e-03,9.091493332717656998e-04,1.764824649233881623e-03,-4.811898479803160093e-03,6.332755333533427247e-03,5.732481301893108810e-03,-2.665230472733427693e-03,-9.688127710982217047e-03,1.100487956708837405e-03,6.629293138244819750e-03,-2.470737163802446627e-03,1.066862978203162414e-02,5.982682562846342901e-03,-3.295314171606166919e-03,-2.071422497278611798e-03,-5.525436693961195689e-03,-6.074306527038326846e-03,2.319958407850590109e-03,-2.162181795500883047e-03,4.408164483352103140e-03,-2.135033773179210049e-03,8.214089826884467087e-04,1.873040740827002252e-03,-5.604199204695858166e-03 -1.090044953252015600e-02,1.084456913522130689e-03,2.563561664557435348e-03,-5.252874946035086465e-03,-6.504409292636323652e-04,-7.279168198761794273e-03,5.745436344862198293e-03,5.949796332016620394e-04,2.604848267490955920e-03,5.631158292631902908e-03,-8.069339739389710942e-03,-9.834608502710224468e-04,-7.348873449679019405e-04,-2.160984329963670444e-03,-1.203595562511164570e-02,2.683282372853533146e-03,5.020094598829889440e-03,-6.340036903531226032e-05,-1.827571868599669243e-03,-3.105853612764007503e-03,4.364645842003007578e-03,5.738960879408084963e-03,-3.646128794780949603e-03,-8.891011998074935729e-04,2.972462734239614911e-05,-1.797715301289645002e-03,-4.233557536076357555e-03,-5.542985415428898152e-03,-2.797252084576846900e-03,8.278118365652190058e-03,-4.247840044068718415e-03,-2.383657235336518031e-03,6.251424540232640957e-03,-5.659959346811492716e-03,1.072505984327016930e-02,1.407567950602609842e-03,3.022040090637374051e-03,1.996537768671389991e-04,-3.637141850489029014e-03,-6.230702218835969600e-03,2.938541918637476769e-04,-2.008632970711519625e-03,-2.404999286845087726e-03,-7.342325958630457807e-04,4.307895288876962235e-04,-6.513686269692992199e-03,3.539050522305360602e-03,-3.797919308631896268e-04,-1.918380974501285351e-03,-1.120212522448848927e-03,2.539443253559081561e-04,-6.070216918085177132e-03,1.050463019316886203e-02,-3.546947477748436245e-03,7.792381487199099991e-04,4.763881439988928296e-03,-8.470693388906805690e-03,-3.686825584341024264e-03,8.199575635755277161e-03,-2.218604701909637940e-03,-6.219652883247503775e-03,2.398949483501743463e-03,3.333744703231623571e-03,4.137081484265720324e-03,-1.141162571091481649e-03,-4.356950693745984329e-03,7.888635173324503799e-03,-7.527637431604549192e-03,2.273337952242571582e-04,4.458332657475469647e-03,2.773240471998452642e-03,-3.347522924171706087e-03,-5.148277557570346379e-03,1.671434577513587413e-02,4.201592700911430003e-04,1.523910710104533616e-04,2.793539832139100947e-03,-6.315024921718719389e-03,-1.304117469597695922e-02,-1.531831793729454116e-03,-5.353012664941343181e-03,-4.585676022530020719e-03,9.288814212669466728e-04,-1.780080508253917542e-03,-1.119195122622033099e-02,8.334245877636162203e-03,-6.453238966666551522e-03,1.955552397761998410e-03,1.117631122514626228e-02,-9.623898135877986468e-03,1.905007802081308500e-03,-5.214200221844337603e-03,-4.028482326481922123e-03,-1.695778352510102487e-03,-6.949894967423351144e-04,9.643400986540349301e-03,-6.850311145422163447e-03,1.007037032809076325e-03,-1.248492546015824052e-02,2.684183003769981431e-03,-4.799278362456534108e-03,-2.687805977305223869e-03,1.781125816561179831e-04,5.656956089221605241e-03,1.138503968608503211e-03,3.176079915059437783e-04,-2.441016236015159085e-03,-4.096560860186921650e-04,-2.252628608437230728e-03,9.158727050133650890e-03,3.648711579291616641e-03,-3.882607877183151795e-03,-1.453829684828612887e-03,1.839974486307401980e-03,-3.797772785486836639e-03,5.023287949967048145e-03,-1.193567439698912892e-03,-3.475328436425545627e-03,6.905722425861121963e-03,1.910117482183170149e-03,-2.790291504695480585e-03,1.976372332660041552e-03,-1.082614295233869789e-02,-6.321106484278681123e-04,-2.738276038080345292e-03,-8.531077753158450294e-03,5.038309887937816520e-03,-1.307657151770586813e-02,5.749575696416957564e-04,-2.964582565150559058e-03,1.842545200354496785e-03,-6.527907749434379465e-03,-3.157314179469526087e-03,-8.665899934672881971e-03,-1.261077033785250844e-03,2.032919025798573187e-04,-7.746033317058102904e-04,-1.317819609280410434e-03,2.887840725807178273e-03,-5.723639096550364042e-03,4.432917422098192964e-03,2.987663626435760247e-03,5.754669587362026655e-03,5.674947316545671250e-03,1.621711933259049323e-03,2.890527899831375151e-03,-2.717249674357574262e-03,-2.617186152845620353e-03,9.119199994595065736e-03,6.117256526659752003e-03,-3.697213828830607529e-04,-3.127753803055723092e-03,-7.433842912375627707e-03,-3.514272870143241132e-04,1.395272141243577950e-03,3.969548962786996656e-03,4.017284036773210651e-03,-9.820931717670993138e-04,1.148248976165068026e-02,1.182907550391328131e-04,-8.760205830884529657e-03,5.637620308152314734e-03,-1.249876304958234255e-03,3.446029338915315710e-03,-3.232836625094680661e-03,-4.079652670504537761e-03,-5.295733308275259003e-03,-1.810355303280203072e-03,5.426393066752235525e-03,-2.821964548701083857e-03,-5.565666612088558153e-03,-9.820817696917026607e-03,3.569075308411541209e-03,2.804564407151122988e-03,7.512941797996398538e-03,3.038824456166321596e-03,4.403013721981685266e-03,7.090600633026054173e-03,-3.617972228329029410e-03,3.428729424303761548e-03,-4.553496157050150421e-03,-1.971686716304409126e-04,-3.412445036428936535e-04,-8.331459924127539510e-03,4.960919433941510595e-03,4.964389192139125398e-04,2.901990658724397636e-03,-3.815496419860494280e-03,6.051619575821932072e-03,1.433945259593982704e-04,1.403984809978616211e-06,2.120040684152962700e-03,-4.001581096112254154e-03,5.466797606048241776e-03,3.890334272842214166e-03,3.173252258427313073e-03,-6.607531195081712758e-03,2.494912529632384779e-04,-3.814716779917330918e-03,2.540658005983119708e-03,-3.885635819273723646e-03,2.474277261536365468e-03,1.697737982694317443e-02,7.407295697557555493e-03,6.474628097775245866e-03,-1.349668464851317535e-04,-2.265371795900928895e-03,1.711669539641386779e-03,2.794459705659232442e-03,-3.309564857839150057e-03,-2.912952829230195263e-03,-8.867010065940541044e-03,-8.290680176776414395e-04,5.007633209400503931e-03,2.386503643262588049e-03,-3.792923553561976767e-03,2.185540102366465703e-03,2.616531169116404539e-03,-1.921863281069517704e-03,-3.859342939539296604e-03,5.551451646698492282e-03,-1.204250499837702243e-02,-8.466906629493717307e-03,-5.741988129325249247e-03,3.933004474007626913e-03,4.229833857326554050e-03,9.042560123551285139e-04,-5.695840738425889636e-03,-5.683071100650489722e-04,-1.280548627367315926e-03,-1.770410582568057853e-03,3.529307919255578099e-03,-3.267069672985446066e-03,6.582376585810262304e-03,4.109086604321671339e-03,4.684663031139096011e-03,-4.869578062242717331e-03,4.620742043620019654e-03,-6.286723219678042388e-04,8.621041011230980849e-03,-7.088641092526628412e-03,-1.622306246046557919e-03,-7.525935596361785565e-03,-1.196689205701583067e-03,-1.109608353908926679e-03,-1.026357023523531524e-02,-5.209439701419943565e-03,-1.568433188306778527e-03,4.500332720261797370e-03,-8.827411026410656066e-04,2.298807820747319867e-03,3.652296435391144767e-04,4.829120952984988779e-03,-7.907258890461186032e-03,-3.144405452233348715e-03,-6.943986887991013268e-03,-2.792641933114887228e-03,-1.166756902766196714e-06,7.423527929526856604e-03,2.017903666758677928e-03,6.825494756199887089e-03,-2.139993443108511445e-03,1.191298011101504187e-03,-6.205668350286980099e-03,-4.837809581848402131e-03,-9.553645225945342663e-05,-8.756224579556838236e-03,-4.521137081511633106e-03,-2.945385635909452283e-03,4.514815717748858298e-05,6.698929876863105731e-03,8.918328268639715613e-04,6.300153643132453719e-03,-6.965464415525281054e-03,5.885790437834530903e-03,-6.992414772341736320e-03,4.325196045245067143e-03,1.138831743410059945e-03,5.791478245316460799e-03,5.747413163880429670e-04,-6.877665236588679024e-03,4.071789680080988732e-04,3.091556046641234790e-03,3.068189532971893861e-03,3.203238834296303662e-03,-3.909325983757181572e-03,-4.658585962426380588e-03,4.482688079406976084e-03,-3.863744078706697854e-04,7.356160300470715192e-04,-1.694775244039760332e-03,-5.985370243000053513e-03,-9.291347703642868078e-03,4.594351121204048524e-03,-4.775083469718745949e-03,-1.254987354696759753e-03,-8.476251720846801271e-03,-6.023835315926588663e-03,2.066837340256445603e-03,4.651256556232572469e-03,9.499374971215000883e-04,2.872403398872658796e-03,-9.697848480963154952e-03,7.471064583060221115e-03,8.100903004679789118e-03,-3.113106438441994549e-03,-6.200465506438918202e-03,-5.260715115759535647e-03,1.181520895960368536e-03,6.802198781606669763e-03,6.233590074778274509e-03,-9.106022114649684135e-04,-1.464675506552330381e-03,-2.941703988229226688e-03,1.213863721492679975e-03,2.787352605988950094e-04,-7.729377532452653098e-03,-4.024394005036404385e-04,6.502107957278353834e-03,-2.863607166654311229e-03,1.250360129989580397e-02,1.987645554681980158e-03,-4.065840117597839705e-03,-1.583290645336746282e-03,6.094184221535113057e-03,5.048726959653518885e-03,-5.710477509306208579e-03,5.159747385338098354e-04,2.686607683505071262e-03,-7.557072862875640479e-03,-8.537431150773569467e-04,9.105237651695789747e-03,-4.505212556859214965e-04,6.025984070189509441e-03,9.929270827867825097e-03,-2.907789099482234983e-03,7.877103962322580880e-03,2.013935188585651986e-03,5.073206266802138785e-03,6.310691760126252957e-03,1.084391872028709746e-03,-3.813694968254728417e-03,-5.762990535928833383e-04,2.876977441438285845e-03,-8.461917618456098189e-03,1.067806900659256731e-03,-9.357043835889627265e-03,2.675153352769523655e-03,-1.047896056203235598e-04,-4.891973151315105392e-03,1.086338407050350735e-03,-2.276581318587987332e-03,-4.712360541430093107e-03,-7.736971575946509274e-04,7.023095461778753289e-03,-9.697886033216016055e-03,3.279163356946682151e-03,-1.453647396273089398e-03,6.384841802761395159e-03,-3.630885811186413108e-03,-4.182900279949882533e-03,-3.319834029518270013e-03,7.013283644362836456e-04,-1.744872065461234049e-03,-1.106687194188485032e-03,6.761870935258118900e-03,9.556073662911156522e-03,3.335045986238417345e-03,-1.089443270465101019e-02,3.276056050330422402e-04,1.579276186194953019e-03,-1.056719571877672839e-03,-8.187587617897224349e-03,3.640639246506093517e-03,-6.024644180983161623e-03,-2.346251178027443634e-03,-6.092566170687613129e-03,5.249024294329557826e-03,-5.187583923179991605e-03,8.645132657703603202e-03,9.959113401928629608e-04,-6.426528001051720271e-03,1.113394952728533581e-02,4.139477218746369890e-03,6.388234742343606304e-03,-5.372029728989256717e-04,-6.591344094716552809e-04,4.627191106066444806e-03,-2.803254954698380062e-03,-1.788535934188834097e-03,-3.245029915306423506e-03,-1.413996973917551036e-03,4.941967705655896798e-03,4.792086234811423866e-03,-2.291812635574789689e-03,-8.478750401762876615e-03,-4.224339904896580976e-03,1.435121491015161396e-03,-5.108195374219570532e-03,-6.317698611205602686e-03 1.362350603306212733e-03,-1.893501678634377334e-03,1.118396105738710469e-02,1.245016085867305935e-03,2.137795202586895670e-03,-6.460330443657755738e-03,2.897259012115920799e-03,-1.731284442934196951e-03,-1.249238229171084407e-03,-2.663855445490940707e-03,3.188086544612561343e-03,1.044232070134754097e-03,-2.024702511661526701e-03,3.121721578430648476e-03,4.550849530788630103e-03,4.123317142882241561e-04,-6.725953703065476290e-03,1.994007605331269816e-03,1.165764077658469405e-03,3.608098650414670594e-03,5.592130761626713797e-03,-6.177551547326879874e-03,-5.765699393852923363e-03,-7.084760524569818849e-03,5.170842833285112716e-04,-9.127596812302163007e-03,3.958665776570407906e-03,-1.455318838344451301e-03,-7.028251621737716115e-03,-6.437935982002733135e-03,-3.809652017752250801e-03,1.044283189419214055e-03,-2.549936569152120875e-03,-5.143749637400197271e-03,-7.442949602197683132e-03,-1.766494297533731123e-03,1.885092842924217509e-04,7.550919061301308334e-03,-1.205140713350143569e-02,5.166643353572927290e-03,-2.148650568909191236e-04,3.952584003911996033e-03,-9.492182775071422024e-03,4.240310914196202853e-03,7.371129538450670038e-03,-3.686538670178959257e-03,-2.278596210153972703e-03,-6.159633352937230952e-03,-2.548854700943715751e-04,4.241237451064547824e-03,-1.197580992610618005e-02,-1.652626814671738845e-03,2.780443705407000345e-04,8.273963741671755759e-03,-9.643723849255611894e-03,-2.209540653256819695e-04,-1.063853696364131527e-03,4.579116338192859396e-03,-1.214942942247616668e-02,7.394644658418420176e-03,3.748199059464260140e-03,-2.155454180051905901e-03,-6.859003672321541990e-03,-1.928948066112485486e-03,7.085662105672681059e-03,2.905161791478497613e-03,-1.969501293883988563e-03,-7.015523010859065454e-03,3.455513180721815885e-03,-1.919253632025282041e-03,3.372586453048051990e-04,-3.335873496734686420e-03,-8.188050333201631753e-03,3.139514305429685816e-03,-4.541713353646016291e-03,-4.699385744537163193e-03,5.946176040677000896e-03,-1.911296063697235682e-03,-3.244211001317656804e-03,-1.028301213688684931e-03,3.634290815565791911e-05,-3.333943385679896676e-03,-4.313513843169583355e-03,-1.036482905234607137e-03,-1.887596858043423054e-03,-4.391252214938512304e-03,-2.792933013457583186e-03,2.089340127959362151e-03,-4.353938425480547401e-03,4.716523966001712025e-03,-5.512754023243069713e-03,4.099004529005423618e-04,-1.151023444991008601e-03,2.419158020813753310e-03,2.053213342049935598e-03,-5.643738760989924644e-04,7.920770880175048018e-03,-4.310609214376818349e-04,-1.032040928530618623e-03,1.132917244019509672e-03,6.537251649705413600e-03,2.251728102586515387e-03,-2.887513285503762964e-03,5.808297778125665924e-03,5.923932015768748095e-03,-5.270782222571126141e-04,9.795276098868568176e-03,-6.399093570044423786e-03,3.122248284601699840e-04,-1.232487870478080885e-03,-2.992848403054657568e-03,-2.560303388522572072e-04,2.674338646659557995e-03,2.477259057720472817e-03,-1.202799408778740513e-03,5.858622979181743462e-03,-1.906218167803666467e-03,3.375055283311623796e-03,5.638900985625383902e-03,-2.638787624030596371e-03,-5.316347417632541310e-03,3.937460599268875240e-03,-2.811341894366463888e-03,6.756243529034832319e-05,-3.403566474929966244e-03,-2.045038199619999240e-03,1.930296542863790777e-03,-7.603185134966512944e-04,7.376023432384081445e-03,-2.495643803515448702e-03,7.047312378150909912e-03,-5.889662678397927613e-03,4.911120216628868211e-03,5.831202773864336934e-03,1.292400938110141407e-03,-5.906734888634456240e-03,-1.871870199389744603e-03,7.390527387123501041e-04,3.701801106183876151e-03,-5.788006823066577620e-04,4.253510381555361368e-03,3.482641794993510229e-04,-2.043899820922681458e-03,4.988647728649570068e-03,3.900872934665755055e-03,-3.484678665526183810e-03,4.196335914277064469e-03,-2.339096022672259613e-03,-3.952933447948067040e-04,-6.320442023618362375e-03,-1.130917516028697800e-03,-4.125182504311167286e-03,1.051558005541930116e-02,-6.634036238992336611e-03,3.320355133296459468e-03,-2.060319714212813149e-03,-4.216212002874309729e-03,4.706034402071016670e-03,3.462180403091857234e-03,4.245123078251066169e-03,-8.130871724237563603e-04,5.974766515659486526e-04,-5.734518516365335987e-03,3.120130547645737391e-03,-3.481025452225796745e-03,8.639238255584932163e-03,6.498042923425427150e-03,5.455783279522720582e-03,-2.590933058002272381e-03,3.549517754624888482e-03,-7.677995819172586493e-03,9.831286439740374167e-03,-1.204569473435199850e-03,-8.301646301773164816e-03,-2.793166080354085937e-03,3.994022298205458441e-03,-3.748865513895872567e-03,5.643613120591935324e-03,2.851744449083003664e-03,4.540982074752703858e-03,-7.497580451209556046e-03,-6.368020890135690261e-03,-4.311211560294310637e-03,-3.333459119800923076e-03,-6.232025705298754707e-03,6.856863184794577794e-04,-7.871218131185745565e-03,3.836366881760359081e-03,-2.471230985372587820e-03,4.582145149997086508e-04,3.534306319784725689e-03,4.629150526003429460e-03,3.124382683914982139e-03,-2.823165121350011946e-03,-1.615352761032245435e-04,-9.977294743358344330e-04,-8.658019645203881079e-03,-4.541245396706417026e-03,3.099813604902137368e-03,5.537399947822698940e-03,-5.945307732403268611e-03,-5.489463134948472517e-03,-2.029341845089569114e-03,5.143353979700328923e-04,7.365890664846208406e-03,5.127788209644121818e-03,-1.875552990141400892e-03,4.661700613741877157e-03,2.664877584176147084e-03,-2.097087036386678909e-03,1.287139311621702219e-02,-2.300238380720549727e-03,3.107918111635151089e-03,2.083742571286437400e-03,5.773052389053028380e-03,-4.280118135607018960e-03,3.409446188883195113e-03,3.631671033531189922e-03,-9.357626573147561494e-04,-6.953239936927989133e-04,2.342219311629507774e-03,9.642882037493603951e-05,1.564033003935776664e-03,8.275491004020689434e-03,2.536513339953938052e-03,1.866146548406047954e-03,1.681041324358131047e-04,7.376781866211054930e-03,-5.970199242907640001e-03,-6.863760157224455677e-03,-5.555809149644703841e-03,1.077666331703700485e-02,3.721627678524058909e-03,-5.131158209408751870e-03,-8.472162027942156930e-04,-4.678812394983683856e-03,2.291534569426569717e-03,-2.710105388810252636e-03,-3.107776422558333838e-03,-8.387889621238499164e-04,9.436879380774206667e-04,-2.270725288127601390e-04,4.640023365946973309e-03,6.767605785649983110e-04,-2.940739468828570957e-03,1.179692382557003157e-02,-7.519939092455577056e-04,3.771057224338640758e-05,-7.743253180992662119e-03,-9.864359649790607938e-05,7.455911266042938509e-03,-8.832812827728230357e-03,4.628941538480668173e-04,-1.502200890143878505e-03,1.620646554791275529e-03,-5.331667161166285182e-03,1.032603356665985801e-03,8.558355952929161924e-04,-3.302726349539556231e-04,1.177685828292219898e-02,-8.646494937777422785e-03,2.930925045678160062e-03,9.280697380347575562e-04,-7.266531867827127553e-03,4.445456749304735854e-03,1.873991203616913242e-03,9.075921996918132496e-04,3.456246833704438876e-03,2.586017347126851051e-03,-3.893418523649086384e-03,-7.102992196055058285e-04,-1.705910834642649200e-03,6.294358269916521323e-03,-1.293160106997754029e-02,1.386543774149339749e-03,1.670056621453992112e-04,-4.992303962442326591e-04,6.029843549849167941e-03,-1.571103615196844765e-03,-6.091766171964789825e-03,4.921859786181486014e-03,-3.665253520599861224e-03,7.972048664346443117e-03,3.668464234037969746e-03,1.945823371674635901e-03,5.160633199365117704e-03,3.025918485182502084e-03,1.888652548278545173e-03,2.508660218711710431e-03,1.516624333115083965e-03,-1.927178208055082371e-03,1.091245258315377831e-02,2.171493189431027322e-03,1.295063234965198283e-03,1.034066769462581303e-02,-8.714312629878836688e-03,8.544511784903285889e-03,-1.153647272433388886e-04,-6.401160005589982827e-03,-4.180555622825365795e-03,-1.457453666327789346e-02,-4.385371782017850537e-03,-7.306119074699072040e-04,5.172977580765156062e-03,-7.150815038493313174e-03,-3.187619589086491002e-03,3.716180186277181965e-03,-1.507402156409838124e-04,-6.755470754924315082e-05,4.005601233262340993e-03,5.961890736833578598e-03,5.424752389293906706e-03,-3.110732382543712152e-03,1.306708179035638052e-03,1.275465404078004322e-02,5.914290592664766220e-03,-3.160396267473840597e-03,1.092029573197330836e-02,4.019985816573030881e-04,-8.018736674343798082e-03,-2.802116273389639815e-03,-6.413516097484913553e-03,1.268109350886752716e-03,-5.679033917062921259e-03,-1.800236642420796444e-03,-7.977386462597154532e-03,-2.020351831844300564e-03,-5.005417411276384329e-03,3.696319199628234076e-03,3.577758947068030663e-03,3.689863773484367091e-03,-5.352278417857350527e-03,9.297813238651969972e-03,-8.462876404745547226e-04,7.389973134697587487e-04,2.372573877380805812e-03,1.047400764962504996e-04,6.766882252746181294e-04,-1.669663915873196127e-03,7.973810360609915422e-04,-1.479195503645893209e-03,6.779933334016670365e-03,-4.783340310125981827e-03,-5.585392539114894875e-03,-7.946212050456671200e-03,7.152338696895243174e-03,5.730233206863550102e-03,-2.006240882279381778e-03,-4.159378499830904347e-04,-5.967751642419230097e-03,9.083146787861891671e-04,-1.833448728929999152e-03,4.234977991225468037e-03,-1.140712250871345505e-03,6.961904884620830243e-03,-2.972477721822723833e-03,-2.656376218761660052e-03,-1.202560514524552565e-02,1.732833344484736990e-03,5.359578112276821779e-03,-3.081577603567713444e-03,1.571979441161329731e-03,-5.463821329292941655e-03,-3.671370050144950013e-03,1.755854760465955450e-03,-1.605049876307502142e-03,2.319318129475565586e-03,5.405627914137428602e-03,-6.775076555841054926e-04,-6.203017479569650390e-03,3.010914042345544557e-03,2.411911098725868716e-03,-2.306754858813445542e-04,-5.271530019086710693e-04,-6.504697886787504362e-04,1.004636721993579446e-03,1.066162548822324882e-03,6.758336986218080062e-04,6.255662892795232216e-03,-4.703144298446292233e-03,-2.318096444358687120e-04,4.371109956543568938e-03,-6.727919131738060304e-03,1.792402999291589161e-03,4.498598544284620332e-03,-3.108755012583042186e-03,5.260829728498670962e-03,-2.475123534760405742e-03,2.128064005492687292e-03,3.230564532261983191e-03,-4.180755852833720719e-03,2.689446174670492330e-03,6.308350152650802484e-03,-3.517818076957196071e-03,-4.395402536833092379e-03,-3.192082400545488436e-03,4.048215728797551795e-03,-1.740355009917548840e-03,-1.219630329721347120e-02,-6.674748103158549290e-04 -6.044705580651858236e-03,-1.212437996775533704e-02,-9.260216696435689650e-04,1.842956016572628013e-03,-1.248933907733712663e-03,5.120728393500358772e-03,3.199102802203429227e-03,-9.243538433261729373e-04,1.623508106940654101e-03,5.720882101295132055e-03,3.761703543604073136e-03,2.486755906093383048e-04,-4.215464303104419206e-04,3.152037327418710962e-03,-7.361036946473781782e-03,-1.000008853965601505e-03,7.795893497684803879e-03,1.337253842914817425e-03,1.978378345897847078e-03,-1.985089692278586609e-04,-5.121734023078404821e-03,8.436385671332301620e-03,2.413847377420168683e-03,-2.237781512791485808e-03,-2.974500992335326038e-03,-1.951130218462477197e-03,-4.821129813950928746e-03,2.346946429702587134e-04,-1.924357411492017347e-03,-7.273980184933055183e-03,-5.115771333566194491e-03,-4.624013378052916831e-03,-7.483710026872390786e-03,6.002556929621700836e-03,-6.168954583616897795e-03,-8.545833243518539610e-03,2.683589284190613430e-03,-3.438553035792980134e-03,3.280382561758587653e-03,1.168180853810076893e-03,-1.557047297083031774e-03,1.724721157173113523e-03,-1.900650107435667389e-03,-2.106904317812199456e-03,-9.462660838836310651e-03,-5.905704066381065043e-03,-3.294639331327278362e-03,-7.623039010573598277e-03,1.003612411127563281e-03,3.322081359724062335e-03,6.972463866298868503e-03,-6.738814216699277968e-04,-8.562342559317748930e-03,-4.939191533807538430e-04,-3.015400203957697197e-03,-7.714741683777164884e-03,-4.031623753887689248e-05,-2.474357537804026518e-03,-2.880952076787721139e-03,6.804727423783602826e-03,-5.089640166285392883e-03,2.829984696959998844e-04,7.859915189052641251e-04,8.463002108820933886e-04,2.628880859915356909e-03,-3.997737825847016979e-03,-1.537563810558979999e-03,-8.119883709206797071e-03,-9.698910743202851297e-04,-1.372541791500502635e-03,6.164393320456707827e-04,9.249349594800385294e-05,-1.905391704373286788e-04,-1.565959206729352401e-03,-2.222471971356275965e-03,5.527510465554230819e-03,-8.399753179097281530e-03,-1.036720832121907276e-03,-1.691717925654793835e-03,-1.538288449291311906e-03,3.761570555735096067e-03,-1.116916420821098587e-03,2.933695671934350661e-03,2.899041311677888781e-04,2.700055510761885293e-03,4.192041437287976312e-03,-2.987492814448114431e-03,-1.463819000081807709e-03,-1.328407885795964772e-03,-6.022952539138525961e-04,4.722881685839678406e-03,2.867472004399576873e-03,-5.790859414428440981e-03,9.148284387522472244e-04,9.089686941982266961e-04,-4.558251462341703260e-03,2.592226018680846792e-03,1.772875134043004706e-03,5.850346847237329097e-03,2.196027348027175356e-03,4.590032998408083605e-04,-6.190410508043567642e-03,-8.973414000143694821e-03,-2.206157528388463449e-03,8.160436665510827081e-03,3.178107136524855551e-03,-2.665813522437722916e-04,-8.182988677458186318e-04,-3.043957723761928340e-03,4.806649597897132489e-03,-2.391651586445293649e-03,-2.888638116296778427e-03,-4.863623085566433357e-03,4.787431779272854340e-04,-9.209295097894852969e-03,2.499942105400068237e-03,-4.519761904363930709e-03,-4.548020186913882650e-03,2.070852157018491721e-03,9.308135820448904278e-04,1.962651974523420446e-03,3.550432287759443944e-03,6.191749630809821776e-03,-2.443476466433352427e-03,1.634564818514080975e-03,-7.999736748259733601e-04,-1.714840854433707191e-03,-4.354916380112986568e-03,-3.380783839144806645e-03,-4.108720353766502804e-03,-1.417009474602794021e-03,2.703186312223931388e-03,2.819625253429856176e-03,3.859749989650965987e-04,5.780549520852349158e-04,-1.102431445684261919e-03,7.562120250491962542e-03,6.394998004913423446e-04,-1.789128380593835816e-03,-5.491355908853951581e-04,-2.097017228427887781e-03,6.984988117427529640e-04,-3.201953043788173178e-03,-5.417059169543270158e-03,-1.908234125388863923e-04,-1.207006859717981752e-02,-1.115022664166246368e-02,4.100219221432489965e-03,-5.709247617099388709e-03,9.719515291526354241e-04,-1.055355166785435297e-03,1.496176783701178709e-03,1.688267163898131635e-03,-6.318914896516932000e-03,2.258194206017806373e-03,-9.327515666022120899e-03,3.496701813377914997e-03,8.324128058170538564e-04,1.128734819791536725e-02,6.315723838770622052e-03,-9.148341891648219390e-04,-5.760198275839397558e-03,-8.693268019755617804e-03,6.130893719821917240e-04,3.440243895875276254e-03,-8.934762038933320716e-03,-8.774392426336628109e-03,-6.193027298887551248e-03,7.946682843991503473e-03,-1.932921121504675016e-03,-7.166849865681376719e-03,8.934669041205195407e-04,-3.612247365316529051e-03,5.796429076642448976e-03,2.472992575924136318e-03,2.510539085815495335e-03,2.866326323208700025e-03,4.574907838562527161e-03,-1.497667277967707281e-03,-9.734362678703462407e-06,-9.183023336366877287e-03,2.811148274337098470e-03,-1.846401589590191674e-03,-4.604900303801517242e-03,-4.157197358187042155e-03,3.321446262145639431e-03,2.213953380958462967e-03,4.507577770355743196e-03,-1.159863686975353249e-03,-6.085236833991381974e-03,3.490093827897175396e-03,1.407930227781055248e-03,-8.023663882192411459e-03,-2.676529182119288808e-03,6.627979043512624048e-03,-1.726361962439448491e-02,-1.669131482112624150e-04,5.053702634397081936e-03,-1.494746822410922922e-03,-5.324515074835100073e-03,-1.747298041064905810e-03,-3.184776136679617550e-03,-8.377448989359603804e-03,-1.429854728328629546e-02,3.774154288231201543e-03,5.797343428910257078e-03,-2.887667389916943796e-03,-6.651582902508080923e-03,6.323136893656620093e-03,-1.056716284022026636e-02,3.026668852199987814e-03,1.225403613303923831e-03,-2.464911279019382437e-03,2.591903932289521626e-03,-2.519250102918946157e-03,7.643285294194085422e-03,8.109270005953692420e-03,-8.624536061203099801e-05,-7.317680788908708343e-03,-2.457985829991649832e-03,4.632036182523023571e-03,-4.357384230904428994e-03,7.424372288063686882e-03,4.774766336475218947e-03,1.399643442008978575e-03,7.119359789026473949e-03,-8.225260416241060579e-03,-2.540313901223559528e-03,-1.539064466265530886e-03,-4.066124021716583346e-03,-3.480109775881106652e-03,4.926281285140090595e-03,3.340837858426109423e-03,-1.825723154634224603e-04,-5.218463327513084803e-03,-6.816856712815363636e-03,2.010140701741420718e-03,-3.860349806434312456e-03,2.820239152766378966e-03,1.240780997534322644e-03,-3.730727594721891180e-03,8.460252731667271178e-04,-1.087633122169480221e-03,-8.318496002798375991e-03,-7.838354803806740420e-03,8.099111157438488445e-03,7.307709824049313284e-04,1.043288840748485269e-02,-7.228509812352457153e-03,-5.629546321895152923e-03,1.949520680692896739e-03,8.470961204510818162e-04,-2.494134117002038548e-03,-2.629183123719518352e-03,-8.426868792082100115e-03,-2.502627174186523168e-03,3.438719393466186519e-03,-6.447125307439794707e-03,2.313851472998449017e-03,6.797382923413536858e-03,-3.976314329675772682e-03,5.516514404558180498e-03,-1.659338155312995973e-03,-3.757492571725484852e-04,2.834054732739516979e-03,2.029964721933813932e-03,-3.953815632860537693e-03,4.361559794755454252e-03,-7.125760725933586760e-03,-9.548559411982196488e-03,-1.824945433633012870e-03,-1.612127639576061881e-05,-3.460976218887114753e-03,4.819360900595040346e-03,2.011235630466738384e-03,-9.912235014125440544e-03,-1.796933600611482981e-04,-5.747169222404196294e-04,3.962783392694569674e-03,1.097739905382341718e-03,-1.528013532156665463e-03,8.033262441163519559e-04,6.454650450466741960e-04,-8.355893138473841422e-03,5.109419773879595415e-03,-1.703038015169296843e-03,2.645983979968334968e-03,5.392067357815540606e-03,-4.251931080655618062e-03,3.085882040949658889e-03,7.264762612089706306e-03,3.432180611957521502e-03,9.584253900429412498e-04,-5.574921265850842252e-03,-2.805196959323625998e-03,-4.151319273063292133e-03,-2.319558351050891257e-03,-3.793391975491735473e-04,-5.026306419254741517e-03,-3.963955176189709433e-03,-4.850485271928040281e-04,1.692530862168222482e-03,1.082153113849593055e-02,-4.787225759699254880e-03,-1.604271741616588014e-03,-4.684270965061907918e-03,-2.212697566706423990e-03,1.094308274699957485e-02,1.182946624392143022e-02,-2.302854551800765954e-04,-1.429995342835695257e-03,2.932553767907674475e-04,-2.746676134858457713e-03,1.582325772765067525e-03,8.551920162832925927e-04,-1.491308369894877098e-03,7.309888665273454877e-03,-5.171377591859326744e-03,-5.484988587197733682e-03,-4.039453306941827998e-03,1.502444831231302238e-03,3.391873522522118132e-03,-2.130448314260603281e-03,1.518040568247093112e-04,-1.426016217453445227e-03,3.401013867546841696e-03,8.614393706426295635e-03,2.567358167663956123e-03,2.841381885694925849e-03,1.235530772651760589e-03,1.012323582190069893e-03,1.251542453116540922e-02,-3.049924983422536411e-03,-2.563558704963501706e-03,8.862039639090142510e-04,5.154501689698282561e-03,-2.672487635457350234e-03,-2.245357507017525962e-03,-6.138529534727380638e-03,-1.066850374722009328e-03,7.684356211023909977e-03,4.634402527601012498e-03,-2.225051540315959534e-03,2.184308010817254033e-03,4.387020130763453173e-03,-1.879628607471319065e-03,-4.272660495657113616e-03,-6.078988029918605367e-03,1.260305866250631748e-03,3.963201347413473387e-03,-4.741377826342899234e-03,1.339832423703237428e-03,-9.590499893474964095e-04,-4.104430080349442435e-03,-7.042581844980694153e-04,3.179903782807130732e-03,-4.492882407357592228e-03,-7.287606064076413738e-03,-2.637205875086983883e-03,5.220632259605191734e-03,8.657126511604468222e-03,-5.071364939510363684e-03,3.189666454794498791e-03,3.099340689328623603e-03,2.059274913056741586e-03,2.353196441074921052e-03,-1.228599541681995189e-03,-1.062746993155256996e-02,-1.600305832339807971e-03,4.651764227376329643e-03,-7.808712625142433467e-04,4.102787995833028119e-04,-2.628791242080018471e-03,8.338538373998902534e-03,3.694485044305411293e-03,-2.111088532336183928e-03,1.970802490906046079e-03,-1.930919618433505703e-03,7.422986479615251501e-03,7.975721983799068755e-04,-1.010369555654904578e-02,1.574248404340931884e-02,-8.711837449121313109e-03,4.322615927830145317e-03,5.050379875081626758e-03,5.415550656576071167e-03,-4.282796321183433320e-03,1.370571414685521714e-03,1.385703593835815872e-03,-1.158889948113019108e-04,2.483125427469622722e-04,4.225452249633023655e-03,5.176678448640176754e-03,1.588331465857483133e-03,-6.133383017668735462e-03,-2.644456318376601448e-03,1.707063778092840576e-03,-4.683484554069245690e-04,2.667589486191218520e-03,6.470996613380975912e-03 5.971281374902313345e-03,-1.779434769395232948e-03,-9.644622777028377669e-03,-7.330395402368025150e-03,-5.019878243277309075e-03,4.790918580795773367e-03,-3.753232464691446790e-04,-6.316141113351642251e-03,-4.396280118666240702e-03,-9.224478590111490420e-03,-3.205462085460533250e-03,4.717902806331612754e-04,-1.270351867478213470e-03,-7.280143651150083710e-03,-6.562193636731453172e-03,-8.161163958625239009e-03,2.621362743177233206e-03,-9.861655128355769143e-03,5.603964670758523718e-03,5.947288010929109407e-03,1.087787138494074150e-02,-3.692214941214143386e-04,9.809128916891290181e-04,-8.783301256708141158e-03,-4.151443631321683619e-03,-1.254988769485104825e-03,-1.430933640878487116e-03,6.742808423896864099e-04,-2.208703492620203037e-05,8.098025408991934085e-03,2.460198152987531309e-03,9.793705239248972835e-03,-1.026555464686354155e-02,-1.275502578598592405e-03,-5.039505390552466782e-03,-5.758268643960915388e-03,4.806119193184157554e-04,-1.601919830361835974e-03,6.895017491303652088e-03,7.500644197093768928e-04,-6.039400461570400280e-03,-3.690448817354634748e-03,-8.703627661049928577e-03,-4.109253925177463605e-04,-1.724971345552347209e-03,-1.801995890625060059e-03,-2.899521999330071341e-03,1.692967219996892531e-03,-5.311603663353365104e-03,6.299857400655732836e-04,6.539410070529175006e-04,-3.056413652118940023e-03,-1.199436858677048858e-03,-4.412063157930819386e-03,3.492900247453544806e-03,-1.988047331130781271e-04,-9.573097852202640951e-03,3.495658085372914045e-03,-1.265688236897732910e-02,1.308450328062928450e-03,1.006353337689915375e-02,4.708538392337059501e-03,3.795362951518891064e-03,6.329622779103716464e-03,7.152018661943936734e-03,3.280938639406429341e-04,1.145686118954535276e-04,9.000230459811318917e-03,-6.622133787545948259e-03,7.593917952061055368e-03,9.955924054754941577e-03,1.846462657204121088e-03,2.456621255235694971e-03,-2.314943637812169261e-03,-4.159083340233371041e-03,-2.990714938567984836e-03,4.565397764857104419e-03,1.727600423005485166e-03,4.301595996938598586e-03,3.769358388427639008e-03,4.309695631123176829e-03,9.530851336978738222e-03,-6.427370470307478684e-03,-1.947254842982465558e-03,5.177117521287339860e-03,3.728100896424954049e-03,-4.740058501963042431e-03,8.928184525785852824e-03,1.385810044924650625e-02,1.387561366725610862e-03,-2.062214660392298719e-03,-5.537221922799499298e-03,4.155992335631198895e-03,-1.192765550921970515e-02,8.595813518912040832e-03,1.610077588423519234e-03,-3.614951337775264176e-04,1.002773274741865749e-03,8.124346047242280572e-03,-3.276661710547533295e-03,-5.031180836336591675e-03,2.166156002704478379e-03,-5.566983002075387345e-04,5.544598679553166751e-03,-3.457017209747628717e-03,-2.107350137271793346e-03,3.686283426627115414e-03,6.496912102863825617e-03,2.625623604779551631e-03,-5.871999336668670515e-03,2.722196883347011934e-03,-4.660688403023267824e-04,-6.158490963324554646e-03,-9.785173657781351397e-03,-3.172249852504282694e-03,-1.725729886065006588e-03,3.266045071049133129e-03,2.739521674889941645e-03,3.920638764347835620e-03,-4.031010442237482547e-03,-3.107504027239039665e-03,6.574140786991302873e-03,-3.472605555445922466e-03,-1.755132079202182879e-02,1.250308800646112014e-03,-8.160457786401528399e-03,5.038889222371438492e-03,-1.143536516200914570e-05,-6.032173337718667316e-03,-1.122652174525141488e-04,-5.673578503468878001e-03,-2.338071999418268789e-03,-5.369884268751373702e-03,-7.993400712109459405e-03,-2.964234169014610953e-03,-9.949613567007685108e-03,-5.097093813686936863e-03,-9.322065829458948613e-03,3.287219551523911688e-03,-1.859887809500640737e-03,-1.072562295310716153e-02,4.465429186441139363e-03,7.064491191089226907e-03,5.754003498975535959e-03,1.783335363546360158e-03,-4.803778162736279181e-03,1.651040759115653686e-03,-7.370264874231500195e-03,9.157052785740900673e-04,-3.687129308582914790e-03,5.032719896552232362e-03,-2.382385090802629623e-03,-3.921258232381052283e-03,4.303745982370999097e-04,-9.308550857801009548e-03,2.332430916115190139e-03,1.867585453229731038e-03,-7.696083377820176451e-03,2.993560232530602123e-03,-5.730931899463874782e-03,-4.315049695407195858e-03,2.627399458090048413e-03,5.237074876245249519e-03,-1.323398935613575049e-03,-8.834831995286044140e-03,-2.385951912658242254e-03,-1.436385144116517206e-03,-4.907088443377081999e-03,2.360745374096022691e-04,-9.981296946672704815e-04,-5.199800438120998763e-03,-6.039602716132115882e-04,-3.177675837382550484e-03,9.489863201806626606e-04,-1.958771841103110114e-03,6.822114281273529424e-03,4.920337433121435089e-04,1.697214211914535890e-03,2.563966624657286822e-03,5.303944035031491269e-03,-2.110640394800851298e-03,-1.026359020361451799e-03,7.268186079050838622e-04,-1.178944899212865452e-02,2.903831629550560067e-03,5.022164713364064836e-03,7.430244043498172857e-03,-7.583349680074692387e-03,1.174487053631252968e-03,5.579272666507435752e-03,5.277963201047937945e-03,-4.227483620970893391e-03,1.395398991646294110e-03,-5.613564300016933746e-03,4.917071787624381803e-03,2.981841828877697213e-03,-1.532355170039733597e-03,-4.846985189031128259e-03,1.670025899927855004e-03,-2.376739068093726946e-03,-4.459417023160268216e-03,5.121176890567919610e-04,-2.190212450643561938e-03,9.919497078345125736e-04,-5.897026640232735407e-03,-1.309484929906027046e-03,2.130499612457442075e-03,6.381678559704158024e-03,8.672966561056857926e-03,-3.628932992019552519e-03,8.722898929764651751e-04,-6.888680627272203254e-03,1.613853176658990333e-03,8.952496395092373205e-04,-1.296639287761567434e-03,1.068034267611983265e-02,-5.234185164535663445e-03,-1.660455809861448653e-03,-3.566995348207896866e-03,1.628319940230996826e-03,1.576852729753467491e-02,4.151493260822975220e-03,9.080741582856018471e-03,-4.613912324084511359e-03,1.283870372052267964e-02,4.523605018072311863e-03,-1.417309408513411743e-03,-4.605979846789781153e-03,1.071003018516469233e-03,7.152688110303838040e-03,-6.490488305443490799e-03,1.257049464330078220e-02,-1.289065669262348061e-03,1.149213476925162831e-03,-2.050116873486232828e-03,2.417385762724896022e-03,4.953441318467045537e-04,-2.006092471417955138e-03,-3.409965889186271533e-03,3.102253833797349869e-03,-1.527453648787636909e-03,-8.191377761156139539e-03,6.636093176558985340e-03,5.402241869748723897e-03,2.422331876898490242e-03,7.151412993174727575e-03,5.705550010655543493e-03,1.276742603500306480e-02,-5.823526991254588704e-03,-4.342972073792298292e-03,1.446230583322259374e-03,7.259707270109260055e-04,8.471189716146283397e-03,2.038670210965815802e-03,-4.292342649863227673e-03,-1.051204107416548773e-02,-1.080484636194294556e-03,8.917625383365958974e-03,-2.511146303436144825e-03,-1.872563266556238285e-03,-2.519497533707467860e-03,-2.254732448875213652e-03,3.995450034352083894e-03,-7.895899212041632143e-03,-1.542418944787148733e-03,-1.106649050687915498e-02,4.322734921244061890e-03,5.755567300728732343e-03,-1.881595418381971875e-03,1.547810495864438589e-04,4.675179425052580599e-03,-4.525072072814629101e-03,-6.350990259907286599e-03,2.360305855444890747e-03,9.868990935593330901e-04,-9.467025230781344128e-04,7.238282558570094437e-04,6.692022164318132613e-04,-4.498605164236922811e-03,-1.079093819560840353e-03,-1.283319917084680111e-03,7.126630063806798504e-03,7.613191602625883278e-03,-4.907395492700316036e-04,9.601608278383618215e-04,-3.976703714245444742e-03,9.601151333402127563e-03,-3.545101704824236890e-03,-1.901590289277235377e-02,-2.769303428127415333e-03,2.622524074544945946e-03,-1.503200156374873663e-03,-1.619362906777527256e-03,7.798657862544066191e-03,9.916268359526145618e-03,-3.281349433017491900e-03,-1.928340577517366353e-03,-2.033221129851254735e-03,3.097319510687003390e-03,1.087533446271234062e-03,-3.046648708939038875e-03,4.014864260081586408e-03,-3.752080247925637570e-03,-3.435893887846637115e-03,-9.086774680789973593e-03,-7.916335437360670288e-03,-7.884191509312512766e-04,-3.077359539062799170e-03,2.458339483285552560e-03,-5.236888151484367024e-03,3.746223300399806940e-03,3.763731312166197600e-04,4.590266565694125188e-03,6.555217373109652884e-03,-6.359107449523727397e-03,4.353011159009321607e-04,-9.577806236649537980e-03,1.945361948309754559e-03,8.523359491810929972e-03,2.209217555980502387e-03,3.143786121463927000e-03,1.198294094953943761e-02,4.411624309649175770e-05,-3.998001662401543498e-03,-9.313496783646759184e-05,7.306618906105703456e-03,-3.395329381188145167e-03,3.349412306250563066e-03,2.277571894270980120e-03,-3.265091041085580598e-03,8.325550022344229469e-04,-9.346039071119575936e-03,6.133237302167762750e-03,3.519746235141488016e-04,3.275233837775387048e-03,2.094472472724720588e-04,1.718146207885306882e-03,2.902153780083484985e-03,-1.454120563770181299e-03,5.200848674746862436e-04,-2.612937904604718028e-03,1.269669650650707212e-04,5.302401804327181020e-04,-2.119332759317167549e-03,5.240633183663526845e-03,2.859115918682018660e-03,1.147567758548741126e-03,-2.425398947641312802e-04,1.680138552580699024e-02,2.191539319757327981e-03,-6.791459973755822194e-03,-1.920026435787116920e-03,-5.612224693441255664e-03,-4.811250726750948906e-03,2.625884985580920900e-03,3.819247832472858015e-03,4.225523744472570306e-04,5.528494409513076344e-03,6.667749274611679475e-03,5.314563439952150678e-03,-8.573327041781396429e-03,-5.194185128794904897e-03,-1.029745887437739709e-03,-5.261455216017618675e-03,6.047730164718941037e-03,3.735221895263289224e-03,-4.388760677820619308e-03,-1.650074002675223179e-03,4.597309251618632883e-04,-2.197885281927650612e-03,-1.645047993088443253e-03,7.765829533287755236e-03,3.460838490395894834e-03,7.377719025480585988e-03,3.032465713424862836e-03,-2.171475163774794475e-03,-1.037144771176109559e-03,-4.141082509718558083e-03,8.110416399333283954e-03,4.566378749905529444e-03,-1.428034893206167567e-03,8.844250805009737371e-03,2.446477984272514474e-03,2.179484501980503367e-03,3.772690830541680553e-03,-2.203293120850934993e-03,-2.802250592633832253e-03,-6.479105285865596362e-04,1.740960656275297291e-03,4.297372294932738646e-03,8.703831825650662837e-03,7.601553116670213030e-03,4.286450068596819982e-04,7.094779579693888220e-03,9.787232441305603048e-03,-1.068724617126788536e-03,-6.195361666354103980e-04,1.624160937799677467e-03,6.002734320341409670e-03,-5.280753624416708453e-03 1.768322905220257382e-03,-2.155257425016966332e-03,-4.615613275248724168e-03,8.487932795776444855e-03,6.845869563121445393e-04,2.195924386422436019e-04,5.441046015408048699e-03,1.096985275255745948e-02,2.777218116276933529e-03,-1.342644790752295500e-03,-2.015618463773786138e-03,-2.782360809622169001e-03,-1.015630180156076347e-02,2.567320810393951594e-04,-6.720323520537830990e-03,1.305772507411202864e-03,-2.233411284322594365e-03,-3.142157651200528420e-03,3.372425022249525214e-03,-5.107597230094140101e-03,2.898003162191338826e-04,1.557830526975079958e-03,-1.983512004531445740e-03,2.519905413020374213e-03,-5.288752290270635299e-04,2.372291145839566882e-03,8.301264027809659996e-03,-1.267111998041834742e-03,-3.150833863434904323e-03,-4.722578888077452568e-03,1.276482416512912058e-03,-7.824173656345070546e-03,2.077876670239064104e-03,5.561763075309398424e-03,-2.044412374599046886e-03,-2.190136971406607088e-04,-6.767473223611586040e-04,3.264561330275515729e-03,-9.694996907080279808e-03,4.763250818692775154e-03,-2.585024936502051691e-03,4.483357100550403279e-04,5.810367355572775526e-03,-4.636676892458637539e-03,3.841724213420160804e-03,7.229344923274690223e-05,-5.887154687140037056e-03,2.729791619715032838e-03,-1.136542351544820965e-02,5.333254790264502221e-03,7.249434964867660508e-03,1.120712416459136891e-02,7.483807316468338170e-04,5.462537386139122361e-03,-1.487657354089969236e-03,3.968822826879230339e-03,2.522120090301504212e-03,-1.022816945022371297e-03,-5.553511974876022481e-03,2.442969136445129631e-03,-7.669238733930904862e-03,9.031403778321038175e-03,-2.336116006948877298e-03,-4.673915200201615326e-04,-5.330720760029901374e-04,9.840645815358138506e-04,1.139138731578579443e-03,-4.065501227712098320e-03,-1.722972684616912121e-03,5.323322207142293025e-03,6.501402000334221241e-03,2.483963502719125065e-04,-2.340592633848955374e-03,1.091214193425232935e-03,-1.920560789104594577e-03,-3.505340933596235494e-03,1.575993546597677635e-03,5.622716436587101366e-04,9.490470319017231884e-04,3.877721722009255540e-03,3.990709737909887977e-03,1.140441268348205486e-03,1.214879589871648577e-03,-1.363219065435654007e-02,-4.264678482947937763e-03,-1.260073468788861471e-03,-6.332024229150071647e-03,4.271744075335625061e-04,-4.746472171244640707e-03,-6.230052010648225169e-03,-1.028582254205973436e-03,-6.499023487454381152e-03,-1.734282704004436963e-03,-2.795669382517205603e-03,1.300369302034013472e-03,-5.169415445936366080e-03,-1.261625853393251277e-02,2.403757854170148096e-03,-2.031392501871240232e-03,-1.135415476492397657e-02,5.141476593193188005e-03,6.737684521555614096e-05,-2.079124561462740352e-03,-7.372268792837346520e-03,-3.216848010057728972e-03,-4.862031199911133819e-03,4.438151949411577989e-04,-4.360695796233910772e-03,2.457954598827320238e-03,-3.621851892159961943e-03,4.137729978179063395e-04,4.329821804943346543e-03,4.391028933660545523e-03,4.624849255527438072e-03,-5.293472743500775290e-03,3.205573608796106165e-03,-3.428630639868470061e-03,7.469636620355696050e-04,4.923372034497150440e-03,1.866199464809028315e-03,9.022961222252395006e-04,8.076109400766515612e-04,3.348928654992410284e-03,4.274708707813802688e-03,-2.144465450076869350e-03,1.182958915116433599e-03,4.464949580936644451e-03,2.358188860928731295e-03,-1.024785360768662792e-02,-1.569955290329616950e-03,4.341366519043732622e-03,2.239040816391386218e-03,5.671446032559355512e-03,6.840567120736958497e-04,-5.771585331086070969e-03,-3.081150582509085186e-03,-1.795272724683772943e-03,-1.397756054550699026e-03,-4.297888146653638594e-03,1.711537771037586596e-03,5.627288493536686600e-03,-3.083845545226198380e-03,1.162414801106265681e-02,1.672749062236560374e-04,8.366366344367586394e-04,-1.343528371174685342e-02,-1.087171575813248268e-02,-3.160900828387850411e-03,-2.127318445522611429e-03,-5.083440308036918723e-03,-4.448662150794471457e-03,7.814587976246459983e-03,8.416520835802800485e-03,-1.685647445318154717e-03,1.685455080016757092e-03,1.108974015702567486e-03,-5.598987384243548368e-04,-2.577718232373535454e-03,-5.288632422731267234e-03,-1.851850495833542564e-03,-3.248757692354560956e-03,-5.522005440769884969e-03,4.229537318110896359e-03,9.215933221839299905e-04,-8.965600537526926400e-03,8.541144897509611415e-04,-1.590314377878946457e-03,-5.644169224961945001e-03,2.061715454990724339e-03,5.625600174878985123e-03,6.571425713886016803e-03,1.894175569865802879e-03,-1.828753140553588102e-03,-9.796882840005973880e-05,-5.059368949004465141e-03,5.481832697559187130e-04,4.318867233491400582e-04,4.834242363819487437e-03,-1.897853303872064140e-03,-1.927318405757723341e-03,-4.937304755438479999e-03,3.526788381830834739e-04,-6.608265259195446946e-05,2.161712716134871551e-03,9.861862362849711100e-03,9.430155963682096540e-04,-5.761959179697176501e-04,-5.238954763839131362e-03,-7.190380073450909756e-04,-2.638533461636642305e-03,-3.070051713222365572e-03,-8.247585614839568133e-03,-2.615723525102183877e-03,4.444791615856560213e-03,-8.650005209205304446e-04,-1.135647695277249708e-02,-5.558756572120086316e-03,1.668474384224687525e-03,7.362469484814009209e-03,-4.982319001638063627e-03,-6.075366599152947321e-03,-1.867957153642951177e-03,1.616135862329402817e-02,1.970623385345386582e-03,-4.509244559107656800e-03,9.394591343246348758e-03,-2.833844804265230229e-03,4.408274898609971376e-03,-3.469452385953760744e-03,3.719554007291024471e-03,-2.192888730801335288e-03,-2.502160905775811361e-03,1.299037137753946769e-03,-1.576292319470801997e-03,-8.894690497769275794e-05,-5.511810283666191031e-03,4.320751795833886917e-03,4.791004523640824894e-03,-7.888770488886892585e-03,2.023276438824191895e-03,-4.466511040705987458e-03,6.866254724353582561e-04,5.370982635066229478e-03,-3.974549626858886811e-03,1.157499687766016909e-02,-4.345597544674172606e-03,-2.302373491040064432e-03,-3.270040901074936120e-03,-4.789365715015168889e-03,1.044230000000984614e-03,-1.133078251378188050e-03,-3.930671274551471132e-04,-4.043919481480619249e-03,-8.157021152901932845e-03,-6.899704645892936784e-03,-1.856749788369252353e-04,-8.937574038555393466e-03,8.396967842286263278e-03,2.999322266213938926e-03,-4.732506233368661023e-04,-4.848195357381667250e-03,-2.593466151578876621e-03,1.059520642121785207e-03,4.510234320104750984e-03,-1.846874008089032567e-03,1.098398761573514751e-03,-2.374412390628963699e-04,-7.828059574299940995e-03,6.912067181428783492e-03,-1.798860291118762722e-04,-9.705214068178856859e-03,1.886552297289454229e-03,7.241046727714577080e-03,7.657102635282560330e-04,3.510106162875550537e-03,-3.589409814384169478e-03,-4.531599505819641251e-03,-1.357781775064205269e-03,4.824552635252228605e-03,8.302492753862585062e-03,5.292525759240758118e-04,7.048098908835282242e-05,-3.729121978905769984e-03,2.582053987960337624e-03,5.935811903956585839e-03,2.934408125839135448e-03,3.375087800084090699e-03,-1.876966883449966434e-04,7.547156971805401085e-03,-6.886037849531120164e-03,-3.969866991778528073e-03,4.399834825686378681e-03,1.219470285415223849e-03,-3.372868967608102370e-03,-1.140797137837894736e-02,-1.047360321095158753e-02,1.882126807647843377e-03,1.822381145747506166e-03,-1.059675417001552803e-03,5.599975180663475591e-03,2.443732786246986572e-04,2.579189671298547527e-03,5.823026769070894886e-03,9.297292428954497326e-03,-3.739904035773295408e-03,1.793542888718607895e-04,-3.016476822444495460e-03,-8.235765095683704809e-04,-7.800904584101070460e-03,-6.738316787486330756e-03,1.910492650785818860e-03,-5.715815573898387104e-03,-2.447674098681018161e-03,-6.564943628754758495e-03,1.020589113688567286e-02,4.349989689617512686e-03,-9.075311051255016760e-06,1.216056012906808863e-02,-1.356563654856270638e-03,4.070675422313238360e-03,-6.379215232561466324e-03,3.301592467618290339e-03,2.998033572087321463e-05,-3.155563038798400808e-03,1.703941479572340873e-03,-6.056321533320641916e-03,-3.600484697846336856e-03,-5.487066786002398229e-03,-1.968642732223166696e-03,8.488004654181581466e-04,5.071730752473402153e-03,5.121734138966234536e-03,2.628164420356556962e-03,-4.687003566047807067e-03,-1.967308631144363412e-03,6.735093076782054829e-03,-5.945595654823494607e-03,-2.535993747933385639e-03,-7.486602048892807525e-03,4.978688418483396584e-03,-9.708637862118557962e-04,4.352565536927571060e-03,2.652679082938536048e-03,-4.241441069123033127e-03,1.452948707463390735e-02,2.540267152790357755e-03,4.899636783539594283e-04,5.599780735700847847e-03,5.092746994945178754e-03,-4.313766477748853627e-03,-7.411724945451490928e-04,9.671907602028087308e-04,4.509597380081025418e-03,4.273206241008781388e-03,-8.678199410320188573e-03,7.547894177231431224e-03,-6.239734881811894590e-03,4.278592792605851944e-03,7.898372284166777640e-03,3.990699451957750200e-03,9.741666689164695751e-05,-2.847029293157796268e-03,2.279403395920053844e-03,5.929884344025771262e-03,7.430235400209578463e-04,4.633277206554997814e-03,1.649375072809655619e-04,-1.771730169644569449e-03,-3.254555225155770452e-03,-1.089884482887316887e-03,-8.913622146468560756e-03,-4.656232510220469772e-03,-1.236273454859606810e-02,-6.099405402589631138e-04,1.589348947671493772e-03,1.158072385610905411e-03,-6.745562125980885532e-03,-9.717769170595070422e-04,9.507266636483822572e-03,-3.155728165339367203e-03,-1.416282188577711867e-03,1.615261840716075746e-03,-2.854060019244194623e-03,8.440305918756234663e-03,-3.943878595397202189e-04,-2.823890105739947521e-03,6.580872130583300880e-03,-3.451912416979325323e-03,5.455778785308105540e-03,-1.074386124817088353e-02,-2.699659096725563409e-03,-3.097554141847124083e-03,5.368789021132222387e-03,5.845375495718900700e-03,-1.837646356088068228e-04,-7.776098512919365352e-04,3.926835423871478763e-03,-1.341056676276002621e-03,-6.412791929322377883e-03,9.379098724815543514e-04,2.506571093731828695e-03,3.356950187295012437e-03,4.223187479741592580e-04,3.338921314178632232e-03,1.312284854565322302e-03,6.040379454978964460e-03,-3.751508482769813570e-04,-4.614103039455191325e-03,5.067419415255334549e-03,-2.039597152662146577e-03,-1.870173992892461322e-03,4.396671165303057266e-03,1.126900687358033462e-03,5.455335694428865498e-03,-1.582555281074247240e-03,5.749493920924775292e-03,-1.534883738092653310e-03,-8.265018235284029083e-04,-5.114968347842625482e-03,1.129786257614641236e-03 1.062506942375319226e-03,4.160231019343099516e-03,-2.793656879898803388e-03,-1.072285596200786909e-02,1.787269907655749044e-05,2.274852414447180084e-03,1.751624143120686329e-03,2.723973428901052020e-03,9.951800802429832879e-04,8.909878297113385743e-04,3.900543167747213061e-03,6.393930109518375197e-03,3.233145472444663888e-03,4.283378441795704199e-03,3.555133456930699258e-03,-1.240608783117145382e-03,1.266116635866851650e-03,-9.446634118117098328e-03,3.553171976033834851e-03,2.473339744763364875e-03,2.899047920285946055e-03,-8.227237392388210557e-03,-3.647601089808349441e-03,8.054555873737332620e-05,-1.278288041428972052e-03,6.842990924048774314e-03,5.541350660506378333e-03,7.355930622886457798e-03,3.779046563095410025e-03,2.488261175689794168e-03,-2.379253175609313993e-03,1.531684256459284752e-03,-5.778447695830553690e-04,5.737465942363407546e-03,1.642871448896614933e-03,8.134782648208939773e-03,1.260512497324437105e-02,7.061774970714545900e-03,-7.071583116020615850e-04,2.786066642813999315e-03,6.439099606463644489e-04,-6.945079269097422349e-04,8.672278959961429853e-04,-3.210163480003156398e-03,-4.501235826056187994e-03,-2.223811247310054830e-03,-7.763098630781316463e-03,-1.344272429687635821e-03,-2.585667307058584549e-03,4.057604663430631857e-03,1.964000044756043090e-03,8.167811588766697370e-03,3.312375570604602845e-03,-3.580073444502509042e-03,-4.767004596294102561e-04,-4.165321563843282435e-03,9.890118134649329273e-04,-6.422466119298544940e-03,3.447283796991307790e-03,-1.287590886449734365e-02,-4.289563760916538689e-03,4.765875295026564089e-03,-2.405867711693498306e-03,-1.966955918249161436e-03,-2.533485664646379742e-04,-1.141356173281969991e-03,-4.809486173285788466e-03,-2.888086553191504969e-03,3.989675768548411362e-03,3.517432821222026169e-03,-7.640462512744905770e-04,1.145580856957109650e-03,1.449076987267793005e-03,6.913999160964670078e-03,-5.479625660039241048e-04,1.888910507987522211e-03,5.287318390331276034e-03,4.543830422152746358e-03,-4.340755639614938448e-04,1.602416019714244730e-03,-5.554115264726059743e-03,5.554959875472155646e-03,-6.325405360112678482e-03,1.519176376264378715e-04,4.846767534295438710e-03,2.086438318683172220e-03,-2.497122514354241547e-03,6.294087788683486867e-03,-4.757711923424236787e-03,-2.897764232841350523e-03,2.303089425838836767e-03,-6.452550478254720587e-04,1.193382991047759476e-03,3.146380999881117626e-04,-3.133911230693140447e-03,-2.609285424940615064e-03,-4.599641959363594441e-03,-3.617708582888031973e-03,-4.491424097707715987e-03,3.109396666474666636e-03,-3.287594733436745818e-03,-2.344438733886720179e-04,-3.064575602238352875e-03,-2.006133202713973867e-03,-1.074292561577787179e-02,5.060627914215323070e-04,3.464838131822147848e-04,4.894767478322137495e-03,6.285287678651524813e-04,4.287740652215222756e-03,-7.786607501693110309e-04,2.381771406535741677e-03,7.336872392582028009e-03,-5.251192100607866668e-03,-8.405805612272124194e-03,-1.295062187248665394e-03,-5.815046275941391195e-03,-3.352146968952335326e-03,-6.424504479702202592e-03,-2.749775815290780509e-03,4.368332762275034704e-03,-6.925000528470661061e-03,-1.766952377191751159e-04,-4.987113273559062171e-03,6.355490884377502751e-03,-6.000620863085154039e-04,-1.043156972050082081e-03,-7.154389794370527404e-03,-1.430646480569507193e-03,4.513818097598490224e-03,-3.548952229567830206e-03,1.389514342596222252e-02,1.399209793819849263e-03,3.970257486444001872e-03,3.079613293035993616e-03,1.200245943485954078e-02,-4.767105845391824033e-03,-1.066430204069435117e-02,2.975649381265043324e-03,-6.947879668308956473e-03,-4.771753259011867615e-03,-8.414817353397030172e-03,-3.496266607865099879e-03,-1.964757620269488305e-04,7.762844242292841037e-04,-1.679884873971223928e-03,7.974495358215729496e-03,-2.675168691200467425e-03,1.058174526425952142e-03,1.845279188901779001e-03,-3.146328373509496654e-03,-3.671508753986792249e-04,-3.618745237005337699e-03,8.321179073207469112e-04,-1.158532149955289296e-02,5.421532511574401938e-04,-8.324544794942325202e-03,-2.915962414720570084e-04,5.179146917015649970e-03,-5.511378621728665666e-04,6.158019937198600012e-04,-1.592632181567404037e-04,3.585634300658801978e-03,5.630173281130022508e-03,-3.867659158377218746e-03,-3.631868757748055176e-03,-3.629759340457968005e-03,-3.283955043148524544e-03,6.875378676260395470e-03,9.933626329019302342e-03,-2.629117268866570435e-04,2.710501056852406172e-03,-5.578395267965622880e-04,3.827715848613596691e-03,5.455761553906328734e-03,1.299504505576580017e-02,2.334053542234875486e-03,5.054967081216805470e-03,6.905811353273037974e-03,1.583249219150741939e-03,9.479681547870296224e-03,-1.066680308247133953e-03,-4.656579561180706171e-03,7.789941304118112517e-03,-1.068565989923390622e-02,1.214871525423900965e-03,-1.031326814955320359e-02,6.124417619470250325e-03,-5.251669441353751114e-04,-4.512789614559651122e-03,-3.193739434645850552e-05,1.532537099628204823e-03,-3.729748810690978068e-03,6.568085261873639509e-03,1.149428645889578448e-03,-2.011922847017102046e-03,-1.472568629376594081e-03,2.126152087698553357e-04,-8.077104490385253336e-04,-3.304392852083450852e-03,4.078520378022425404e-03,3.179108361409893266e-03,-5.213183272103519110e-03,-7.701813731783497576e-03,-3.510580967434494618e-03,-2.380922087751393117e-03,9.465496282839873160e-04,-2.193517512977393878e-04,6.082530703977159607e-03,-5.367027244455901177e-04,2.767779572895010236e-03,-7.645205591996256022e-04,7.948439358357258649e-03,-4.555865900065669653e-03,-1.034329777895867490e-02,6.923269700842124910e-03,-3.987254995015062503e-03,4.665646605808749267e-04,-7.508124095883646426e-04,1.114146840448470904e-03,1.132766911910054568e-02,2.576528205622156693e-03,2.222535416195985537e-03,-4.531117206955518131e-03,3.086098329238818293e-03,3.895353054072096463e-03,7.781120387074809798e-04,2.390146511001708940e-03,-3.149871794732260737e-04,-3.686294366026882856e-04,3.810419249010018553e-03,-7.338033026833468489e-03,-3.038968935018666623e-03,2.990634343496132081e-04,-5.688721151372995617e-03,2.586620500606297713e-03,7.353641386663186015e-03,3.778666407874256474e-03,-4.621100305928126963e-03,4.873042639144085052e-03,-6.527439119045504688e-04,-1.436593727916582160e-03,-1.458691700224703765e-03,3.693629076409359231e-03,-8.307443527184478873e-04,-3.981114055206329098e-03,-4.289638147214918339e-03,7.897435512278026815e-03,-3.678363429873456308e-03,5.494653035899193291e-04,2.430447600771939651e-03,1.074788431442999688e-02,-2.893910313792027490e-03,5.687071950185121696e-03,-9.290224195451958142e-03,9.756978275942624138e-04,-2.434362525080138468e-03,3.258820057321351604e-03,2.282015527071784306e-03,-1.173745386728333511e-03,-1.069457738552311975e-04,2.033016699776008310e-03,-2.827271220382578630e-03,4.471409484650437595e-04,-1.544124751208883744e-03,-2.793720608407839270e-03,-4.186304521230675008e-03,-1.242669129796951898e-03,-1.960918940559706804e-03,-5.115144405500641166e-03,5.493470197166804014e-03,3.686956472997443644e-03,-1.682900472937554981e-03,3.454681012459402678e-03,6.557409714050483392e-04,3.874858790093737085e-03,6.292989144987167806e-03,2.482649831904268096e-03,8.548450071792694158e-03,-1.354442678967505238e-03,2.539475046038138385e-03,-1.936040224478671311e-03,-9.424843214713428050e-03,8.161999414358476102e-05,-2.924967767150456872e-04,1.916425471711786198e-03,-5.996991068096407980e-03,3.203203040770205741e-03,-9.020088255088940313e-03,2.442704025018939580e-03,2.480789136216337426e-03,-7.130830220033345171e-03,-3.405740107362740209e-03,-3.155416231509431389e-04,1.272119892387920228e-03,-6.353634140562594915e-03,4.592338810795734000e-03,3.581122264995316846e-05,-2.395990931367737026e-03,4.002837166592164084e-03,6.473517472624625881e-03,-3.298389158935379709e-03,7.852631757587314604e-03,-6.071650924352081473e-04,-1.947908603429713897e-03,6.001816949925412731e-03,-1.118288837263980810e-02,7.942255079041533796e-04,-6.405466094075071802e-03,6.171225377518896153e-03,-3.797762865966543798e-03,-5.683174606244139997e-03,-7.327638614719655583e-05,2.064560284280195744e-03,1.057860803707893083e-03,8.099957086148678329e-04,8.049737623072459464e-03,2.087292290050009325e-03,1.280299731527881390e-03,4.655543452123546769e-03,-4.352672920750711057e-03,7.464033699898557856e-04,4.314583986391401157e-03,-1.438572670913643809e-03,1.249554467273236540e-03,-5.148562803246641483e-04,5.113361537815128176e-05,6.839585312091559649e-03,1.357262573726548179e-03,-1.293905678567654465e-03,9.875587793468603229e-03,-2.373416013607489421e-03,2.912256763321666945e-03,6.228326973865968945e-03,5.620067770763606009e-03,-4.714489836734441809e-03,-4.143871217373956001e-03,-5.389792131127031129e-03,-1.478572681677117794e-03,-4.966877719150128087e-03,-1.541713054047693732e-03,-4.143613855693621846e-03,-5.749595504183645928e-03,1.074413229840840344e-04,1.675149714412811587e-03,2.353826263339825042e-03,-1.050931993652407057e-03,-1.511232102522389216e-02,-2.658354902508382623e-03,-1.141672899007667441e-02,-3.052830773306684179e-03,6.450412928321923658e-03,-2.724547319315893078e-03,7.919746112832156456e-04,4.992304988722420268e-03,-2.546308678321281254e-03,-4.504881076064971672e-03,4.965085882850580994e-03,7.166272358762403039e-03,5.198691479702560736e-03,-7.700103217981570970e-03,5.205672538691802267e-04,-1.159518177292449478e-02,1.471792523307259570e-02,-4.572422645584743248e-03,2.862244668729157452e-03,1.464284973980424737e-04,6.267192206724539819e-03,5.205862000143382821e-04,2.095318294633009924e-03,8.410070015040826616e-03,4.983092197327606242e-03,4.729767427494308354e-03,-1.824494548438001899e-03,-3.329858016421435168e-03,-7.521337937150445255e-03,2.119537967128885987e-03,-3.556753365823544372e-03,5.035429056851403046e-03,6.351542560637857271e-03,1.958745334812479841e-03,4.134609118824549186e-03,4.153570228966339470e-03,-3.139491635927822450e-03,-9.584064986947099562e-04,-6.207693897394760303e-03,3.176323657195731422e-03,-6.344754264567820101e-03,2.359146623078147136e-03,2.710202708558818920e-03,1.153911351316500933e-03,-5.091530426058210775e-03,-4.898953775693594588e-03,-4.068709141101896874e-03,7.110784218038227432e-04,9.614568605661115430e-03,-1.713569879090686820e-03,5.762850094131497652e-03,1.360703303797251330e-02,5.832117519470331711e-03 -7.869804698853453614e-03,7.054907695243944031e-03,-2.538504284877772434e-03,-3.463956261042229739e-03,-4.106797537985858366e-04,5.594257204589010929e-04,4.321798030276047253e-03,-1.276825584397255943e-03,1.185973390117021231e-04,5.078479460608693007e-03,7.513994701655511872e-03,3.900146805236414608e-03,9.344083397470252447e-03,-1.207612558519069668e-04,-6.925142790389634401e-03,-5.137847173310842314e-03,-2.963463996038822766e-04,-6.236984343323260550e-03,2.786825024903017301e-03,-1.048501929085055018e-02,-1.415766474224878276e-03,-3.653019389593472038e-03,-3.066371520180658124e-03,-6.342475385551924784e-03,1.088717312175571531e-03,9.679298932120614443e-03,1.516800623965643266e-03,7.731439531800363184e-04,-2.093882961542679281e-03,5.837465547833713304e-03,-4.470411021455800334e-03,-3.549409115754885175e-03,-3.335677389906270898e-03,-6.375813648300053538e-03,2.763798986797909481e-03,-2.099012144500199493e-03,1.401264610086026504e-04,5.070300790452769886e-03,4.176547443413409361e-03,4.859352072781607940e-03,7.491758387382104964e-03,-6.015800057088085961e-03,-6.758324029850572333e-04,1.278538975588699089e-03,-4.152619264965669332e-05,-7.926868215752530533e-03,-2.003181903836599893e-03,-2.720819546491422183e-03,4.661973126136484749e-03,7.086767132452262159e-04,-4.538624130317104113e-03,5.276066293730749188e-03,6.013407251750110469e-03,-3.843221355111444120e-03,-5.777345843676384533e-03,-1.060406149605149645e-02,5.221797096033142620e-03,2.829072650018385655e-03,2.741327633321322902e-03,-2.374934608786908024e-03,1.475882639359571988e-03,9.765941205313183326e-04,5.284564306173504360e-04,-2.590267304524959333e-03,1.208945970133475274e-02,-1.027572108972561911e-03,5.184542307880886901e-03,7.957780166596610874e-03,-1.112551126229745398e-02,4.138809559122672010e-03,-5.734475723318975190e-03,-1.225860682049558815e-02,-1.614183964152502999e-03,-1.159673632667595567e-03,-1.312383826888297283e-03,1.356884309459767748e-03,-3.367088333376157261e-03,-8.225304368158945292e-03,2.394994758314028006e-03,-6.676902186535331636e-03,4.931383992184603261e-04,-1.774639917745651340e-04,2.070518749251478927e-03,-4.018585581921282728e-03,3.797326734419788612e-03,7.431825365964001360e-07,4.553718783220372132e-04,-5.106251178743101936e-03,-2.010988040826858399e-03,3.086820106256189713e-03,1.241963557188323754e-04,-2.610280086134302360e-03,-7.468555466630669853e-03,-1.056499097571430602e-02,2.363557525804544507e-03,8.569066995301312471e-04,-3.688043240727485912e-03,-7.174688826104414071e-04,-8.030540877949049472e-03,9.753959673020350801e-03,-1.658528006811515153e-03,-4.106554787159075767e-04,1.009752510895162592e-03,-6.473631668051759401e-03,9.374304711232285927e-04,-2.861013032501247711e-03,7.982216426090062850e-03,-1.247335777677858549e-03,7.258673574781141294e-03,-2.011278440433187178e-04,1.154396017672105557e-03,3.048871597456032870e-03,-3.020316216526842349e-03,3.135894242776092380e-03,-5.309726309898839085e-03,-5.647792713234721720e-03,1.845952100129924197e-04,-5.636565607707619101e-04,-3.474464454723929976e-03,1.724177582130236291e-04,-1.677424268225897715e-03,2.237154989982434884e-03,3.664689158914588140e-03,3.768419601650458469e-03,3.797293838808763963e-03,3.570368251069667639e-03,-5.793297016282244853e-03,-4.036600285830293025e-03,-3.041323593932036655e-03,2.000540719095772382e-03,9.145761903342484664e-03,2.014697099627835251e-03,-4.432762611968986584e-03,4.263645509112620052e-03,-4.007748733855661348e-03,1.583550937134897219e-03,4.005469053607622898e-03,-6.587873082565588213e-03,-1.028877841408017786e-03,4.555848039064983382e-03,-1.109275262795852198e-03,-4.998367690401072337e-03,-2.623660908188318267e-03,-1.462893313446643690e-03,4.729309660523223040e-03,-1.150967920159950801e-02,2.519174410262194769e-03,-2.955365886054096142e-03,-1.263570563654626937e-02,4.286435230389671648e-03,3.876451289699081072e-03,6.336458334409679960e-04,1.501796305893646037e-03,6.009631718283744636e-03,-9.994167140938889013e-03,-4.753669049535715237e-03,2.426264054993789061e-03,1.921565582129800822e-03,-1.118855609914407370e-02,-1.089856253747230330e-02,-4.526548644084115065e-03,-1.923597684920921392e-03,-2.365978938091835033e-03,3.343405789579292822e-03,1.034108521197199060e-03,-1.722941555760202151e-03,2.249113244781863682e-03,-1.731475553853315539e-03,5.865934540205916189e-03,2.448467235561718313e-03,5.601940435139595494e-04,-1.218595863436261993e-02,7.938147149416138748e-03,-2.506618621978780066e-03,2.983343215995016423e-03,-9.834727802034344893e-03,5.045998144433343213e-03,7.537380466273549059e-03,-1.664675572156980006e-03,-4.241882613568056376e-03,9.341173078419362508e-03,-6.619568330334581890e-03,1.173224256256524952e-03,3.975393584566024735e-03,-2.514734836717969022e-03,1.224803285431057915e-03,6.860247037689812082e-03,6.058069628155530323e-03,-5.154009237182911526e-03,2.605295770186645456e-03,-2.041912721634848125e-03,-2.363753349668553782e-03,-2.754944658638025875e-05,1.332719191762323906e-03,8.046098612659842789e-03,5.583140315873195980e-03,3.194073512518267374e-03,-2.855663963630953282e-03,-2.364174624260772985e-03,-5.376660759430439661e-03,2.191302817863278411e-03,6.799475928511516942e-03,5.899398311927821388e-03,-8.306771114413724275e-03,-7.179245162976391828e-04,2.594777162820621937e-03,4.290648365496443212e-03,-1.034020142903852241e-03,-6.449791765968882146e-04,-2.617963094633473861e-03,-4.616346322680021930e-03,5.173374944115698496e-03,1.763145368419722714e-03,5.433743224978224845e-03,9.874578076114188911e-04,7.712185438053400718e-03,9.767859750257003489e-04,8.572900958605780743e-04,-4.217097918699981053e-03,6.879409023518030797e-03,-1.859712949957803086e-03,2.190345417035239467e-03,7.616121357177896636e-03,3.124221235389965275e-03,-5.482954382422478186e-03,2.228898243325014641e-03,8.655933863095823314e-03,-1.438109255134730272e-02,2.859368030468283973e-04,-1.946634910399511892e-03,2.372337042865763860e-03,2.797689514650472751e-03,-7.534620215861537532e-03,-6.650661666063308226e-03,3.903507724286460739e-03,-6.824260917859236060e-03,2.211556429187023099e-03,1.882064909252237112e-04,-5.989301088830163320e-04,4.512992226751547618e-03,-6.291146600864071237e-03,3.607598709200585032e-03,4.701697130671327399e-03,-1.600944106528736331e-03,1.060761898949059284e-02,4.952165116431574143e-03,3.807925882828349685e-03,9.319368101960106123e-04,1.112547090259094658e-03,3.673968601588407674e-03,1.117826298142167787e-03,-6.447057528355433151e-03,9.309109225466462648e-03,3.051306025209946267e-03,1.671920536177591609e-03,1.004642791974946596e-02,-7.424818150498981029e-03,-1.396986518418243464e-03,-4.219973873481013341e-03,4.551561673838033090e-03,-5.537379886336874814e-03,-5.966203684410612603e-04,3.954556871713989750e-03,2.951192120351961087e-03,5.974183183956290402e-04,-5.041967617907156494e-03,-2.560760455027867415e-03,4.507704310082310054e-03,7.200727042701618799e-03,9.555205350575576156e-03,1.770192809333657677e-03,-3.150165243936525836e-03,1.495335141067315578e-03,-1.138630126446369718e-02,2.126522575129074621e-03,-6.598440450328133845e-03,8.784001274129803541e-03,-1.579373296720332784e-03,1.127285363004272697e-03,-3.795176435099287260e-03,1.760948972741114898e-03,1.921029767339145879e-03,-4.279327747137991970e-04,-4.063932232926831278e-04,4.551704298868781452e-03,2.169984119658355508e-03,1.261435457027164581e-03,-3.399955065120311565e-03,5.056775974772102777e-03,-4.124576328579320460e-03,-6.500969638487496920e-03,8.210703073851792488e-03,1.933121627895830646e-04,3.006410451295734448e-03,-9.712356269423664916e-04,-3.639480594921832343e-03,-2.536858137101289431e-03,-3.889320940970197524e-03,1.973408011063367950e-03,1.939877279933901804e-03,1.708661397550272206e-03,-1.993003487451440459e-03,1.227627680373116759e-02,4.763268145739791085e-03,-1.504242249822738920e-03,4.449111622005615801e-03,-6.673911403656243232e-04,8.334666991755720206e-03,6.718048901017993624e-03,-3.158779890422195192e-03,-4.386415110188585965e-03,-1.509830861216661336e-03,5.070415163522304629e-04,-6.787810091448945662e-03,3.860017675602942389e-03,-5.251753796801018892e-03,-9.003270308979648140e-03,-4.943520878205742361e-03,-9.358680172365879896e-04,-7.935236934654440674e-03,-2.147917800674314232e-03,5.108566959827818069e-03,-6.589096586280154648e-03,3.443979866358426087e-04,-4.657856506992611237e-03,7.537054112991261809e-04,6.938206801993544205e-03,-9.767938920406931541e-03,-1.535620612173205267e-03,-2.239950267698060868e-03,-1.958632545990600035e-03,-2.363004976592135865e-03,1.042832696544990570e-03,-2.818637715873737164e-03,-7.667590699486496672e-03,-2.052466911700670874e-03,8.548160651679818575e-03,-7.540172288036444022e-03,1.906273489210519312e-03,-1.769267644530601185e-04,-1.813650236913111075e-03,-3.481506777854252895e-03,1.058505602320576611e-02,-6.880363216776930191e-03,-8.540163699448776596e-03,3.027725213773533371e-03,-1.220467575054070067e-02,-3.917275869129265529e-03,7.061079234036858089e-03,3.415948824800039713e-03,4.008388756854920632e-03,7.328087130038796193e-03,2.589513777535093920e-03,2.163387254886315735e-03,3.136269841569032900e-03,5.205094681340501767e-03,2.620300907930719209e-03,5.394574335475038444e-03,-1.480488104425458371e-03,-8.542570038999852374e-04,3.262846471097460323e-03,5.513018933506326665e-03,-1.593569788529309965e-03,-7.511811249081744779e-04,-1.694472254576396240e-03,-7.982539286606532211e-03,7.375490088694806011e-03,-2.382694355204452528e-03,5.751506222732396308e-04,4.009148669832055846e-03,-3.829601512628318886e-03,-2.003574240770226772e-04,-1.707882400210267463e-03,-1.769451962212316778e-03,1.408871179125261060e-02,-2.289109471406434396e-03,-2.004801549852620098e-03,-1.623140158663583053e-03,6.348942725306603078e-05,3.787721020581752852e-03,-9.316167950196733993e-03,9.821537024887994419e-03,-5.723330531041823793e-03,3.875520818049670350e-04,-2.361367420992159603e-03,-8.463463062980302242e-04,-4.825313159712705212e-03,-5.127774748950399836e-03,-7.401467235933107368e-03,2.536092166317427052e-03,-5.719827869551803202e-03,-3.108640864074131011e-03,-6.695147280103451101e-03,4.950341585899669522e-03,-6.132499869775674683e-03,9.668291391524705750e-03,1.028523399334047017e-02,1.971763874715308073e-03,-5.770160309064243037e-04,-4.018249376223550356e-03 -1.456959695121569105e-03,-4.458552986971737152e-03,-8.841183238521611171e-03,5.810452604877604406e-03,2.750767641767180079e-03,1.208561300583271825e-02,2.975600609900371646e-03,-3.495196601347005504e-03,-3.864982499730901938e-03,2.920826445479779866e-03,5.234215849029117372e-03,-1.522226607443339123e-03,3.144426046362937006e-03,-6.726757219335765788e-04,-3.258832447933146439e-03,-5.405400953125149861e-04,9.638514665026024636e-03,-1.270317177605756465e-02,1.118633564958836785e-02,-1.388133297220702856e-03,1.653196825973476686e-03,5.670665402041458365e-03,-1.302365837212906413e-02,-2.638923950445655389e-03,2.449030183701448194e-03,-1.688817239899542081e-03,-5.766216478322022838e-03,-5.107408562828770482e-03,2.677517673150820159e-03,-1.669400752480128909e-03,-7.495037037641602042e-03,6.529350742510341143e-03,4.351476034643916163e-04,3.626993552416204157e-03,9.425406535157760876e-03,6.756412645558540486e-04,6.199845783972115359e-03,9.723960140338308925e-03,4.426522981445105316e-03,7.939819543409164324e-04,1.072593464563384967e-02,1.631065511819561461e-03,2.928691673970264774e-03,2.643252980445133190e-04,-5.753187344364791992e-04,5.871441956227537286e-04,-4.357331460333605314e-03,2.696177969752672383e-03,1.225297091521923044e-03,4.318716948902991126e-03,6.078498230501713090e-03,-1.125685557643002705e-03,-3.829575849668226829e-03,3.720523630414001703e-03,-5.219030678151025289e-03,2.783338903793116086e-03,2.284988285716494088e-04,-8.889224254299585987e-03,3.920833913735628264e-03,-3.167472472969897358e-03,3.315123577178709022e-03,-2.112235301764189225e-03,-8.010801364189823600e-04,7.947034769534809528e-03,-6.218906357405427909e-03,1.682548487048620691e-03,6.466392426825378432e-05,-5.186993808335581616e-03,2.668620087064611617e-03,-4.342546226610371173e-03,-1.206623609145505505e-03,-2.409395361627879057e-04,-5.731569378336412272e-03,-2.228515224534075338e-04,7.741931389515816810e-03,-9.456951560060291445e-03,-8.107675033176411222e-03,1.076384680908411447e-02,-6.138679337944067445e-04,3.849722686590188649e-03,-7.343447643555540798e-03,-3.555410079702049815e-03,6.125245142343125650e-03,-3.584589525835086415e-03,4.465062886750116589e-03,-8.293752354586700001e-03,-2.738199221927777992e-03,1.128078789380870155e-03,2.440481926281980359e-03,7.459721637400734944e-04,4.132197239305907335e-03,8.766988925770003924e-03,6.247415107730237577e-04,1.212876296653935893e-03,-7.736039700382275723e-03,-8.115729565409316934e-03,-6.852552213325219287e-04,-4.067065137033573641e-04,-1.374789724072499617e-03,-7.162014993010615976e-03,-5.715306192655579716e-03,-4.459787567923312324e-03,-1.018797306503771427e-02,-2.457922128957255002e-03,5.384989718915911792e-03,-6.089718234887441135e-03,-1.137951440644906307e-03,-2.033685891482804218e-04,6.600908900825003299e-04,-4.203994094734452014e-03,-3.776533145161751863e-03,2.250610628249552752e-03,4.019340333026384833e-03,7.454665048790136070e-03,1.593132009556724477e-03,-3.325532911467205498e-03,-2.032819419017869609e-04,-6.117970074763885539e-03,1.136386052243843446e-02,1.077067356333648654e-02,-2.787933904932609567e-03,3.637120046229493520e-03,5.252248991492118602e-03,-1.910439132095106225e-03,-1.176709779630897776e-03,-2.928249107131112748e-04,-5.752925193138650785e-03,3.995304352673251583e-03,7.060600802767201013e-03,-2.058510116590761291e-03,5.832192927480168120e-03,-8.947403444561848745e-03,-1.787830131274213089e-03,-3.682755998381303207e-03,-7.657413011167301525e-03,-2.890794117582523495e-03,-1.070381292438083972e-03,9.843093530168614547e-03,4.620623871458046810e-04,4.528377830395649041e-03,1.107349217784543531e-03,-1.319489990696388285e-03,-1.348105643044939612e-03,5.061046665649353814e-03,5.008272682630063831e-03,2.156981521064912227e-03,2.757466627856079277e-03,-3.262948580546401640e-03,-3.064065601051727526e-04,4.278080281815940019e-04,1.629643340281751408e-03,1.453242844292745948e-03,-1.143664629144051058e-02,-1.595884414001599860e-02,6.613817375221336774e-03,-3.398635481174097639e-03,-1.087269085194618153e-03,3.853512135755257206e-03,1.925852248981929941e-03,-6.381736109218972240e-03,2.281142428033755142e-04,-2.122122418307002909e-03,1.090429133806720659e-03,4.883572849216540057e-03,-2.467123642386759123e-03,6.811332270055583601e-03,-1.941550499324081218e-03,-2.006376868493974907e-03,-1.977957996381786345e-03,1.266336358428844962e-02,-1.278125994683123375e-03,9.481487175150953017e-03,-1.106915757643314041e-02,1.583876136015812449e-03,1.116142577145201251e-02,-3.888049503551858823e-03,3.898821039281085993e-03,-6.162664988394412312e-03,6.172929112996629157e-03,2.318756656746969683e-03,5.310926041899495693e-03,6.873062675782935565e-04,1.945763636239601895e-03,-3.856735802360360902e-03,-4.101509870197624606e-03,-2.631468443368038118e-03,-1.503025319679283109e-04,-1.405819124700345094e-03,2.707816474648815520e-03,1.864870801883458098e-03,3.097759558591016600e-03,-5.365643743473156105e-03,-5.652763984551616334e-03,-1.454217379064709796e-03,7.340279307381989554e-03,4.709373517789097068e-04,8.994039922200956771e-03,5.468413553506197566e-03,1.817182563950354816e-03,5.803769848499853133e-03,-2.294124232680996519e-04,2.450601914071288880e-03,-8.568865182802247460e-03,1.522575563881308695e-04,3.032807520167006901e-03,-6.586030729217095332e-05,-3.440359340472806671e-03,4.627587632453978791e-03,-5.320139550617615509e-03,-2.042504979789129711e-03,-2.244032803822861542e-03,-4.105794801996992538e-03,3.380008650185522493e-03,-5.704144381000804225e-03,-4.310081511094345126e-03,3.125371687580093608e-03,-6.478579434672907846e-03,-3.862826902102137433e-04,1.934385738833638855e-03,-4.119506048323479976e-03,-3.891797138862610438e-03,-6.570442936447043296e-03,1.521466824733004293e-03,-6.485806487587284343e-04,5.608867866686026785e-03,-2.139401532758041218e-03,-1.179301715929901954e-02,5.040437062571796745e-03,6.810938968456208345e-03,-3.804774980497926690e-03,-5.012442649772010268e-03,4.727181660458018575e-03,2.431271403985361262e-03,2.970239940659008537e-03,-6.422228374313409789e-04,3.976516328663350260e-03,7.357181846923647496e-03,4.945168947865632991e-03,7.567260168988247790e-04,-9.766936368930841106e-03,7.032773238622660669e-03,1.897309553248483719e-03,-7.277908733611454170e-04,-1.166480179356644675e-03,-4.602274044718910814e-03,-3.545271318728550076e-03,-3.683799713021693092e-03,1.771074997546464138e-03,-3.118723684492795672e-03,5.058558416783914130e-03,-1.927329209765087439e-03,-4.394167823693602079e-03,-3.426287802080821981e-03,-5.070701502466395255e-03,3.024123585527929616e-03,2.531815657578123224e-03,-5.058958623649123347e-03,-3.422833137345099395e-03,4.646881058199928442e-04,-1.249960669535142869e-03,-1.985321627473036164e-03,3.308622492696338367e-03,2.328604241525846809e-03,-3.274070051797556714e-03,-6.362461171330933626e-03,-1.192873774024818742e-03,-3.742264293097555695e-03,1.786623676645618646e-03,3.445798258566570849e-03,3.895818777025313560e-03,1.335621271342700206e-03,-2.487963297538687418e-03,-1.004418021445198025e-03,-2.314311263754019812e-03,-5.895962205057452173e-03,4.831163014564549734e-04,-6.374242192632636854e-03,-4.059340222687132446e-03,-3.971549256410230287e-03,2.494579254389953354e-03,-7.640270141872605031e-03,1.332930990315426851e-03,6.321299618556097677e-03,-2.411492871449654948e-03,-4.191542801182547746e-03,-2.469859344341322847e-03,-6.576621299157485619e-03,3.304772581515215413e-03,-4.864944497716993874e-03,2.222820269701168651e-04,2.456381718481357845e-03,5.754413088613645824e-03,8.936316268653538356e-03,4.334779306797625908e-03,-4.462043681703004548e-03,2.431679410162599156e-03,-2.410868637319506236e-03,-2.628874033020442511e-03,5.432510957599342907e-04,-5.470833882643845632e-03,5.879787481661020131e-03,-2.177073560881597966e-03,5.180259933690618303e-03,-7.746249596219485431e-03,4.502916191236304606e-03,-1.242136493136311581e-02,-2.942903972166661919e-04,-2.401214865282142851e-03,-5.761339045897143595e-03,1.181634666171919260e-03,-1.980636816999314584e-03,-3.921876233775221324e-04,1.831356673247338650e-03,-1.757157903349577509e-03,1.144280225556253703e-03,-6.673004071773248544e-03,1.999525874493782336e-03,1.285607623113446500e-03,3.663157692846960144e-03,-4.354092583820328398e-03,-2.939559394796993283e-03,3.504484291870620539e-03,4.714873089992511340e-03,-1.009249326590053219e-03,4.015295919515423327e-03,-1.043090353896486758e-03,4.237713585540957457e-03,-8.405670476161992283e-03,6.473724733481730359e-03,-1.482263860968296163e-03,-8.866888010732286440e-03,4.476009646237983676e-03,2.323732390882043974e-03,-7.836794091929307274e-03,2.508988439886916146e-03,-6.129345783273080529e-03,4.350919780689499992e-03,3.490692168889626343e-03,3.135637735508898290e-03,-2.639263034074409292e-04,1.403411626930123942e-03,8.170823625882613767e-03,3.676748138894227365e-03,-4.126088431039799317e-03,-3.812658432218401713e-03,3.618138214028016140e-03,-6.055963403925813948e-03,5.072273465466926694e-03,-4.282111305394983511e-03,1.134841391585347413e-03,-4.293038974698197910e-03,5.191913505685508481e-03,-2.282296301150518649e-03,2.362271108203716375e-03,1.450294081265173169e-03,-8.819639556071179332e-03,-2.597904989354213844e-03,-3.764787135851242056e-03,4.535194230654710813e-03,-2.131531913347651293e-03,-2.196167560877603214e-03,-8.773752537749934105e-03,1.132748875857560568e-03,-1.379459126563427687e-03,1.044030135894185706e-03,9.193767270280626264e-03,3.232423982376194174e-03,-3.715425106882382783e-03,1.130283081761116783e-02,2.355453831567278118e-03,-4.813800669768856285e-04,-7.351674305498600908e-04,1.243480798071039525e-03,-1.550725871799152594e-03,4.579710440037777774e-03,-7.451054757015302364e-03,-1.074957440150074370e-02,5.846555928706762342e-03,-2.978536423061887983e-03,1.175009900407644791e-03,-1.555917574622265546e-03,2.410562094589116262e-03,4.892258384968060057e-03,3.805523050200625775e-04,2.678943290316794946e-03,-8.068189090758674797e-03,7.377848312590675785e-03,-9.116559530366536920e-03,3.118301416177634428e-03,-1.073178468544369948e-02,-3.817624394995180158e-03,6.399972438174292289e-03,2.121393176525356462e-03,-1.912160249153187165e-04,4.930795967793466864e-03,6.744770067882324520e-03,4.668001238038564302e-03,3.648223953774117762e-03,-8.727497958493981092e-03,5.855582197430962872e-03 -2.594512462601520231e-03,-5.368489177275447520e-03,-3.027442412092587465e-03,9.246306595615200533e-04,-3.120187958333389602e-03,1.126199442137080717e-02,1.224764704768492064e-02,-2.298084986830201040e-03,1.391380497511313617e-03,-5.751171885388341003e-03,-4.886711887161520641e-03,1.495923110682198780e-03,5.266616627947784434e-05,-3.022920923996044729e-03,4.970031289853481249e-03,2.573347478882141975e-03,1.840779173597463297e-03,-1.188166535491325289e-02,-4.428974339843830613e-03,2.239744967585158373e-03,1.200131189738138076e-03,-3.976060680531043855e-04,6.255126492341444265e-03,7.260820491238140304e-04,4.598826974402741956e-03,-3.367225948741400386e-04,-2.518251481849439685e-03,9.774159724199890306e-03,-2.451832228447286405e-03,8.726192059501259191e-04,-1.696431968592036202e-03,-8.868082490174348164e-04,2.289044534431331282e-03,-8.222492452470076462e-04,2.202988406193888329e-03,-9.735859777450020514e-03,4.368880992694025621e-04,1.027031044955715143e-02,-3.211913026443028590e-03,3.649645986836000042e-03,-8.341321903302015284e-03,-4.962282608915860376e-04,-7.238400177933364960e-03,-3.974141181030376932e-03,7.371931336770934026e-04,-1.197238410200225775e-03,3.919276060970581198e-03,-3.259783341274895101e-03,7.147866004572948039e-04,1.442239159197525713e-03,-5.734222474349967916e-03,-3.081098647794914928e-03,-5.738025462235270789e-03,8.648979128241768982e-03,-5.578783986206778053e-03,2.825039032080888958e-05,-1.243581979080598610e-03,6.527588089412450501e-03,-1.109799958670957360e-03,7.448474387440534366e-04,-1.201867303256978423e-02,1.819806226750038218e-03,-4.194714259196295064e-03,-5.264224092355851772e-03,-1.363819492111260868e-03,4.854326329478390704e-03,8.609495908674215728e-03,-1.340848314368665167e-02,2.428209164007553381e-03,6.201223385319080961e-03,1.287823073469211840e-03,-1.026261315914093759e-04,8.206964726037430232e-04,2.704018045421864927e-03,4.129505643817981894e-03,7.161991605466311566e-04,-7.346184423272650831e-03,-3.009330654026698849e-03,5.061120520414793406e-03,-3.859230425233777829e-03,7.917742433463638646e-04,1.913789392899918704e-03,-9.414798386018009488e-03,-1.135236895488715529e-02,-4.531364033725396138e-03,5.156463248962956172e-03,5.371704621153443500e-03,-5.143643494843671941e-03,1.040959085767601683e-02,2.565179257513672175e-04,6.131937641878465808e-03,-2.090403628666058240e-03,9.035703416596871113e-04,1.302852834917591620e-03,-7.124126696219981562e-03,-1.335730182040826674e-03,8.181488544142913669e-04,-4.708149118653714472e-03,3.810187081736502151e-03,2.688482051870241460e-03,1.415813258166592412e-03,-1.360713629361561287e-03,-1.949353691786114961e-03,-2.750008270688838498e-03,2.347487438217922984e-03,-6.185757557201068332e-04,-5.730030272119010157e-03,1.004080242120098353e-03,-1.836037020558745460e-03,1.316949039448053532e-03,-2.028848985087797988e-03,-1.365059664412420978e-03,2.565952536476393507e-03,-1.361061821310104994e-03,-1.007304480545610607e-02,-2.645027110224203344e-03,4.844291182574535920e-03,-3.243519755114731423e-03,1.880980651019521256e-03,-2.455627065495661569e-03,7.531371645482400198e-04,-1.885061751030733205e-03,6.101390250882845687e-03,4.932647538847137990e-03,-5.458482893332917800e-03,3.870078680767301792e-03,-8.959911491512199467e-05,-3.926455902632369592e-03,-2.143663279344661197e-03,-2.920596222628468741e-03,1.868578427015358048e-03,-9.099419703885986907e-03,1.316794684729334641e-02,-6.326341532323923739e-03,8.501532226988264785e-04,7.015126641597707472e-04,-4.043253681464594948e-03,4.049106485028681514e-03,3.246046277978952184e-03,2.036105020698254168e-03,2.623312322922179921e-03,8.470509687067763124e-03,1.551893553699151135e-03,3.407998002839233813e-03,-3.255670825239263648e-03,1.039706541566217269e-03,3.872947004421030664e-03,-4.607246431286916508e-03,-6.784034142042895225e-03,3.862878408835442208e-03,1.052194364148251269e-03,-7.628838984531194183e-03,8.424560944725463865e-03,1.016466107423203086e-03,2.656936950074572333e-03,-7.730398101419908366e-03,3.977172654067422791e-03,-6.456002157202861587e-03,-3.884540059027379900e-03,6.421153965806447594e-03,-4.907432347870051975e-03,-8.355717463147295174e-03,1.525591093701941358e-04,2.964330462732520100e-03,2.059338164561547949e-03,-8.650667427208547089e-04,4.148699837807676150e-03,5.246324408778645608e-03,-2.961091981001606234e-03,-3.394883029835684502e-04,-9.839382599890142151e-04,-1.584753509435910283e-03,-2.480376750106334533e-03,3.832933277141121857e-03,6.576626456504258299e-05,-7.034527607788616799e-03,-2.459861964668987738e-03,1.381727506589350242e-03,-3.143179261137830829e-03,5.048371122434847265e-03,1.504911829449840787e-04,2.297496594010714748e-03,6.514663399266765013e-03,2.897647334015980504e-04,-2.929290311150225569e-03,-1.191081150594667463e-03,-3.500120068543120192e-03,-2.768252950970484343e-04,-7.879393047604316483e-03,3.586260767852928345e-03,1.315046316078013859e-03,-3.806934857803852788e-03,8.885388527527533081e-03,7.229575850586024595e-03,5.572585314827637609e-03,-3.160333104313828387e-03,-7.596390828701763201e-04,-1.716681977052523535e-04,6.198410393214106792e-03,-1.715303225889263511e-03,4.630299672756206584e-04,5.439740235969520166e-03,2.863904108708540015e-03,-3.081080438376424500e-03,5.257684248615055941e-03,-5.796882205774118828e-03,-1.008974300744238696e-02,2.630716959385740513e-03,3.942977817761742290e-03,5.484172946062103489e-04,-2.385378684779891276e-03,8.330651584992932249e-03,-2.269312244756615474e-03,-1.109413948726003900e-02,2.484249545309944341e-03,4.909033717164376859e-04,-3.530325110667159854e-03,4.506404166310025092e-03,-7.380164483670486296e-03,2.465523458569016807e-03,1.341343178729710118e-03,-1.880926931207540544e-03,5.898570015900784710e-03,-2.234214164421051883e-06,-2.949243348259085896e-03,-3.228634205719036615e-03,-4.019368874414668870e-03,-4.723999164120050442e-03,-6.529597482493746177e-03,7.704146097383969202e-03,-4.441855392580184268e-03,2.913468212586512417e-03,-7.290480354102645000e-03,4.789007511610546969e-03,6.202620853661774784e-03,6.655271342047048601e-03,-7.600652319460650050e-03,-1.545483479888445227e-03,-4.534835094237280387e-03,3.469904450184835994e-06,2.115317893712637554e-03,9.336955164414337911e-04,4.552981926757721128e-03,-2.214749371745817954e-03,3.735340932955234089e-03,-7.264669736936503648e-03,-2.358361221539459745e-03,3.889410935602799491e-03,-5.347744102836273980e-03,-8.510072143410761167e-03,8.589254206942798994e-03,1.042617798093969060e-02,-9.809472155188955073e-03,9.663968346304374277e-03,-4.069754969736967022e-03,4.022493862470396565e-03,-3.818868654618253703e-04,4.056660671193031836e-03,-1.750805748046301144e-03,6.524433120202392800e-03,3.909770801644542326e-03,3.064974909037079375e-03,-2.354268097114936879e-04,7.337463658596586787e-03,4.264478472821552079e-05,9.534237798895119423e-03,-4.545994888173695431e-03,-5.354469099922678360e-03,5.283508334948019101e-03,-2.672975956457457249e-03,-6.829687643410734497e-03,-3.722143509119930048e-03,-9.712824624618379915e-04,2.635248957799139671e-03,-2.452535525854442500e-03,-1.573363290952623471e-03,1.767049892082686721e-03,-6.655340458356246228e-03,7.611144827491820100e-03,-5.251968790264246491e-03,-4.156520374550494489e-04,4.393655256060249176e-03,8.588377460239273028e-04,-7.656857679654001034e-03,1.393512128100609293e-03,-1.026249703753658125e-03,-2.063856629145513662e-03,3.341813061786397948e-03,-8.308066072628136163e-04,-2.666944787989834927e-03,3.937640825036960689e-03,1.669247458192482225e-03,1.906576402136102950e-03,2.546923592920932820e-03,3.155755029781861978e-03,-5.456440267119849578e-03,-6.966710334532701977e-03,-1.412176389540038260e-03,1.238829792792423869e-02,-1.974212582493142625e-03,-2.026031591485742515e-03,-7.791952801636382701e-04,-1.002445063804755995e-02,-6.249158359415819472e-03,5.547552867779887743e-03,6.997573563670498285e-03,5.428171089726751934e-03,1.238260419599715792e-03,2.633680253588949974e-03,-3.104198009219794052e-03,-2.865235353484953812e-03,2.903319108623338314e-03,5.514748590409808589e-03,-7.822137927590691597e-03,-2.735965848729648119e-03,-4.307745566588354336e-03,6.559156490372515178e-03,-1.604396204527190877e-03,-2.136102522198007024e-03,-3.738775865090471651e-03,-2.132848691568891685e-03,-2.081518849691257542e-03,8.802090772176439154e-04,2.587971179511233243e-03,-3.940509692152160265e-03,7.334683643040253468e-04,3.872764660291421693e-03,-1.566812950756876332e-03,2.158161803674216606e-03,9.464493243943602335e-03,-3.299525627042225635e-03,-5.114606767403490420e-03,6.981721838768628965e-03,5.318492165804767670e-03,3.581817734299721449e-03,-4.225268758148898601e-03,6.548849739141155866e-04,1.319187303278705758e-02,-1.118682605953990727e-03,-1.156639152966644447e-03,-6.670306380764468544e-03,-3.806398005689790118e-03,-1.900305180818441845e-04,-8.492500929879970382e-04,-5.523595519548400029e-03,3.438627809831497524e-03,-2.438337461241238402e-03,2.174126595819204054e-03,1.944716807037209446e-04,-3.809473548948801632e-04,1.326789948008814122e-03,-5.833075536306290242e-03,-1.551518033763699947e-03,1.229908563691462127e-02,-3.322912641513515401e-03,-4.411694474628288787e-03,-1.770218521713457643e-03,7.429704963267291969e-03,-2.688084111169785056e-03,-1.641693125448736527e-03,3.135433078189051124e-03,-4.613259441493215013e-03,-1.699034843781194876e-02,-4.423963478587332465e-03,1.082054283647734035e-03,4.544582163625213340e-03,-2.725118501424304862e-04,-3.872379340960824257e-03,4.146421370195497481e-03,9.642693304829020456e-03,-2.686622178526058575e-03,-3.269683009024337925e-03,-2.817418171594108633e-03,-5.296067661348147042e-03,1.262521505199235049e-03,6.289552349533503173e-03,-1.954355288870775025e-03,8.402654054638527062e-03,4.110002308393099005e-03,-1.814276836340194664e-03,-2.281051285767209027e-03,-5.099147690278547050e-03,2.889661712044172721e-03,-6.426691573668385365e-03,2.604246481807223190e-03,-4.988051408347108154e-03,1.025892765003104043e-02,2.933579817617280264e-03,2.375602722275687272e-03,1.035899189077063099e-03,2.174579380065393489e-03,4.988011281482207862e-03,4.376021611158863005e-03,1.958722764009201370e-03,2.361173754699492835e-03,4.918255477997277025e-03,-1.005095345427430053e-03,8.092181857382134413e-03,3.411330940915805084e-03,5.203951638315003347e-03 -3.838585554570314844e-03,4.007045048036540769e-03,6.892045391895307294e-03,-2.026577633568288871e-03,-2.744599179476808325e-03,-3.695737120469113571e-03,-1.296941536327483532e-02,3.897350101993099513e-05,1.819712982300271962e-03,1.706187249741011143e-03,-1.474024575314961637e-02,-2.343718896521340082e-03,6.817937968513809559e-03,5.629282263238603434e-04,-2.144595645107340819e-03,2.936910777760969819e-03,-1.082628807700321966e-02,-2.639152261494975924e-03,5.460838297622208973e-04,5.097842303440911285e-04,-2.482820199874924503e-03,3.139143265533175364e-03,2.856483199657387350e-03,-2.530312250163788395e-03,-3.565276529095468862e-04,-4.014128499049268977e-04,2.229701578853453021e-04,-9.264914958743478667e-04,-2.048350495049640601e-03,-5.479028232318601359e-03,-8.335722890490581067e-04,-2.633923392989198295e-03,4.659314660021313982e-03,-4.610445958268218590e-03,8.602968329247808604e-03,8.618104365073731357e-03,1.847387660170611893e-04,2.224166230401858260e-03,3.717989106670774530e-04,1.562339304025394749e-03,-9.082348455727728628e-03,3.942614346861685742e-03,3.377194260040382486e-04,1.061910403939684069e-02,2.329739932104453893e-03,4.389947967309089545e-04,1.619361244120295570e-02,3.757317780306531664e-03,-2.293417464688299844e-03,4.245582273379300449e-03,-7.359726831310123632e-04,-5.103496175185437238e-03,3.663083638616783291e-03,1.021207219075909427e-03,5.139745189415562432e-06,-1.204103957034325040e-03,1.422997464924567875e-03,6.008296247805728621e-04,4.823816413611643934e-03,1.377145073200060355e-03,6.757725069055879087e-03,3.566875342426778072e-03,1.142973742130662211e-02,-4.927098137481991484e-03,8.311710620676197869e-03,2.517935236838875126e-03,-8.770792988861018588e-03,-4.076969140515718584e-03,3.527228556987306845e-03,-4.070592208189346452e-03,8.142181959112450990e-03,-1.031669769901424547e-02,2.425806963773611668e-03,-1.730847231479060568e-04,6.606013047168589336e-03,1.658591637940268811e-03,5.634321899492353996e-05,1.206984190474307906e-03,-1.392541236968186831e-02,-3.624503518516360330e-03,-3.616090229658974753e-03,7.471497541032957705e-03,-4.822614619028515212e-04,7.957099702905727143e-03,-7.628804436408545032e-03,1.162070831438003805e-02,6.404307082869352798e-04,4.090310869848059680e-03,-5.839998882943325115e-03,1.498719844351785457e-03,1.525243352440961092e-03,2.093454188618236716e-03,5.543744402646349177e-03,-2.845828915385130008e-03,-8.582706201874049437e-03,-5.431208657290112832e-03,5.980108106882175674e-04,-4.251235064856259162e-03,2.952897926466989398e-03,-4.537469648501121430e-03,-9.025239372655024248e-04,-5.122249682991116514e-03,-2.408483119133953028e-03,1.108320177147926501e-02,7.625863326918971147e-03,2.608518589069154341e-03,-1.114036589461382155e-02,-1.034397308949541131e-03,4.542652041432494910e-03,2.473458726270284357e-03,-2.976690032994593754e-03,-5.395943088194598430e-05,4.064643180974765284e-03,3.673872073413047244e-03,-1.383232342642873314e-02,5.595831175917102395e-04,2.942596712981197424e-04,-2.084149169410964841e-03,1.485259393870113015e-03,2.309515884408600155e-05,3.147037162499817953e-03,6.043660532010956300e-03,1.140005614909522531e-03,1.096891442738235321e-02,-6.366247694285712480e-03,6.634389569056402779e-03,2.747909767353473500e-03,-5.014799993827703632e-03,3.494580276021539127e-03,1.958595785261395012e-03,-1.640889858017777463e-03,4.612512173501458895e-03,1.983779868025601181e-04,-8.721443623613590185e-03,-4.286585113970317650e-04,-4.029096164410024897e-03,2.877274739110826339e-03,3.260523432070624918e-03,-8.025167071009095890e-03,-3.723328760271228059e-03,3.050592692417104963e-03,-5.061273018447943507e-03,4.247123080169465034e-03,-3.842931882161139227e-03,-4.174342750371967843e-03,1.896629645438761835e-03,9.864009697743201729e-03,-4.390377649859402397e-05,6.854379400089695602e-03,4.458695162566890004e-03,1.720925037581331270e-04,-6.607072809341664292e-03,-2.890396739490781915e-03,-6.649471513709107919e-03,-3.810104194032275411e-03,-9.748426527163523911e-03,-6.237781385631600839e-03,-2.566101765585894045e-03,-7.639211642288276302e-03,-4.686460548509306483e-03,9.451771500178488342e-03,-1.408099671173308016e-03,2.151991799406539028e-03,-6.879847470021798308e-04,1.312296616476574425e-02,-1.064375750772547863e-03,-4.324857858336059006e-03,-5.795422628152400047e-04,3.529478183201827362e-03,2.616584504332476312e-03,1.167783839631419442e-03,2.614457841419860155e-03,-3.845631973463914475e-03,8.159763989975846524e-03,-1.104073627164090882e-04,2.522389323791970981e-03,5.229747667665282304e-03,-1.165261674632849052e-03,-9.290759793060299502e-04,-4.281161823152839635e-03,3.399977106600147320e-03,3.108903294869346922e-03,7.822142410289013512e-03,-8.041482568737916324e-04,-2.055606889917742548e-03,8.420443290070236605e-03,-1.751996362255332090e-03,1.230723737332155808e-03,-7.954837020083951188e-03,1.770303647009148496e-03,3.251028212443250048e-03,-7.356816695251954495e-04,2.699420287332047162e-03,-4.145785669480574198e-03,1.874352609644647563e-04,-6.094168675812132452e-03,8.544775749802113740e-03,-3.005449088306141794e-03,1.882582507898697974e-03,-1.529009828950670486e-03,-5.378704830336070511e-03,-3.762458192233899146e-03,5.206278698487114449e-03,-1.805242986079122740e-03,1.272391385363155986e-03,5.291808621825300728e-03,3.131432873509980026e-03,5.662980847775615952e-03,7.611701798581147621e-03,8.329651156475698266e-04,3.211353284734517772e-03,-3.127288799845711134e-03,7.327323169668708940e-03,-5.870175251373664117e-03,-3.095357119180196064e-03,6.479330906248500881e-03,2.252735271450359220e-03,8.130376710847399963e-03,-3.854883041887490792e-03,-5.443459336576137939e-04,-4.248362566845061659e-03,-5.579506521861568417e-03,-6.654645299084097380e-04,-1.009349502128960470e-02,1.628200958695508833e-03,-8.526825014878600334e-04,-1.095201449080904698e-03,-8.725016928476358666e-03,5.664410158006461578e-04,-6.275702828810772807e-03,2.191898227019941324e-03,-4.375894649431221438e-03,7.939523695859283803e-03,-2.331562733264127066e-04,-2.950729826361399473e-03,1.778169752174194040e-03,3.779736667406397629e-03,-4.993529478826470684e-03,-1.042395931153615798e-03,8.926989087796389707e-03,-9.810669555328820415e-03,-5.592244257168775328e-04,8.835926376896239068e-03,6.852876151915627580e-03,-3.815047429061801075e-03,-3.420631731255323587e-03,-8.597872308109430440e-04,1.320226525339147482e-03,-4.995022833437211610e-03,5.728802331477243943e-03,2.412925179823669636e-03,9.870510053628126987e-04,6.125387520021377021e-03,-4.908665617589962234e-03,2.295186089971216082e-03,6.269221474197742006e-03,2.421903756458685883e-04,8.748040186859476139e-04,1.508675935265245795e-03,6.890105320935484452e-03,-1.655143329511853329e-03,3.977551522775328845e-04,5.602102610001448088e-03,-6.350640513179564659e-04,4.939708514480812239e-03,4.656001982138326285e-03,-4.853847926012250062e-03,5.662165980039233180e-03,5.939127954132074338e-03,8.702951428793679819e-03,-8.559862265942216712e-04,8.753244678002725446e-03,-5.496484018650493741e-03,-2.487730304206631736e-03,-7.761954173738363702e-03,-9.092533013149682597e-03,2.126966148603593506e-03,-1.556705725830254648e-03,6.744422791020507693e-03,1.183393874194118775e-02,1.147427058105888716e-03,-2.900560551304276046e-03,9.370477209651607159e-03,-1.803679018483980061e-04,-6.695225889134323203e-04,-3.330049524230204540e-05,-4.672122740596841572e-03,1.870611614030116138e-03,-3.523020488265114943e-03,-6.118271118423604035e-03,-1.435622614127521953e-03,-1.229302957505899047e-03,2.610708959189439460e-03,-2.679008560158007302e-03,2.136978133155444939e-04,6.573618345693167274e-04,6.752291376287162562e-03,6.392928088437985212e-03,-3.296695672105048359e-03,4.714548912410246144e-03,1.459669968357480384e-03,-5.409125785539619334e-03,-6.024565655651093268e-03,-1.382120516436712541e-03,2.941456246690646488e-03,5.907082453014115718e-04,-7.973293919503585472e-03,8.367310296724755209e-03,1.597091125930628691e-03,-4.073698430393508756e-03,-4.282634813097113517e-03,-1.215830789415877494e-03,8.260416278622950692e-03,-7.593802661028721535e-03,-6.681632168526252698e-03,-4.417146664666103673e-03,9.539226980010638221e-03,-1.674050958037018311e-02,3.973949391358171622e-03,-4.531892629661390774e-03,-1.254887425318770323e-03,2.673450787282872967e-03,-9.041455183105851987e-03,-7.465256905332375882e-03,-5.409672794944654292e-03,2.984256082232394306e-03,-8.774468965001348472e-03,9.981278249379464673e-03,3.675950236059232802e-03,1.410963338845882880e-03,1.519991027563817815e-03,4.378119715261872061e-03,-3.734999226274164467e-03,-4.330130264613635325e-04,1.230627356415402596e-04,8.129482702317256809e-03,-9.807199116128259304e-04,-6.210150180220192287e-03,-2.960537536857113083e-03,-3.473954312389258142e-03,8.136718814283010193e-03,-1.092734935348048950e-03,9.428438000335437330e-03,5.498165325766191970e-03,7.487886141251363058e-03,-3.871501882983160395e-03,-4.221485262733113558e-03,-2.068642660291926047e-03,2.105867503246051584e-03,2.987695784469879962e-03,-6.095308090633282799e-03,-5.620825186397028321e-03,-5.521954324863945565e-03,-2.499214659864438734e-03,-6.018598542530100709e-03,-3.506616263290577001e-03,-2.247589687677859459e-03,-3.408266773078099006e-03,-4.227136019961535290e-03,5.323519022946560098e-03,2.519332514203613711e-03,4.162338552736684349e-03,-6.924142649754490980e-03,-4.132432906129556707e-04,5.323816265519059308e-03,-9.025387494403038641e-03,4.315808476064698050e-03,-2.615988431878665723e-03,8.709065900597653953e-03,8.471792609439157684e-03,2.424713291908315443e-03,-1.013114578880860202e-03,-2.365951915865838174e-04,4.533285767571362118e-03,-4.326486108946665972e-03,-1.486438355808188767e-03,-5.721460551428482998e-03,-4.732536675763553483e-03,2.075754994342299438e-03,2.169001154572787991e-03,-5.427749194927542799e-04,3.185372860486324672e-03,-1.287147394813465762e-03,-5.200806424102771656e-03,-2.674456288407636717e-03,9.004290480429981902e-04,-1.073337937139328888e-02,-5.574145570231284035e-03,1.575067206100726853e-03,-6.459738786802001614e-03,3.667947310117023168e-03,3.263269033427767998e-03,8.952997061731212888e-03,2.518829997130267884e-03,-5.957063993008768600e-04,-4.344074236869738086e-03,1.012888639012925543e-02,-1.637167271195762218e-04,-6.369241175480928711e-03,-6.881890608752528303e-04 4.744901043049290848e-03,8.737753222830610530e-04,-2.002389576716489053e-03,-8.538934860840504543e-03,7.487321865228033875e-04,-7.636552140475282024e-04,3.387800255924864907e-03,6.206325283527034699e-04,2.005110957879369625e-03,9.279297727813488883e-03,5.552676281619412998e-03,-6.290719381659476764e-03,3.909567366011243092e-03,-4.987243311205154239e-03,4.353635892395711811e-03,6.163207895807242161e-03,8.517048734005185778e-04,4.176701006262624537e-04,-1.106544176449539459e-02,-4.772641564703697208e-03,-1.022296457258999990e-03,-3.885403402944077547e-05,1.872008307621629000e-03,-2.810857999669708496e-03,-4.572722322456390133e-03,-5.474165502261047443e-03,2.454267310872846122e-03,-1.170974737464135912e-02,3.838322567075234232e-04,3.881775432502220739e-03,-1.182385077265665947e-02,-4.360751763669136125e-03,-6.425299710091958742e-04,1.694529946257727297e-03,1.523952585950640731e-02,-5.897323883595253710e-03,4.671136393867104941e-03,-3.000671110559554461e-03,-3.202606462018675691e-03,1.540051890302773785e-02,1.263378563425534206e-03,1.299561235621516297e-03,-3.004952645950856576e-03,3.283322172753489008e-03,-7.365245045872622839e-03,1.023050729932004251e-02,4.957831201859067873e-03,1.871260402061982649e-05,-1.428463544001554782e-03,-3.348717335640485210e-03,-1.921613233050538253e-03,-7.152221350052745744e-03,6.490986723630576272e-03,-6.446840232812340275e-03,2.182527751763607606e-03,-4.693266262049332511e-03,-5.416584851391958999e-03,3.218355230279597474e-03,7.837623824158659880e-03,-5.707217383649423534e-03,-1.037947194481772181e-02,4.315944697332038844e-03,5.100232720751103448e-03,1.587188431111286654e-03,3.660780859928004002e-03,-5.433652131117192377e-03,2.706980154573322347e-04,-7.634109892350288097e-04,-5.091635387094194110e-03,4.377790202379111192e-03,-6.565406333678509496e-03,7.429640351893722003e-03,-1.098027953781259385e-03,2.025390612575054660e-03,2.394875323441139625e-03,2.147004345087043676e-03,3.344714663771156137e-04,-1.660539288279572027e-03,-8.605037495258691105e-03,-1.896204897422151152e-04,-1.498097976173347334e-03,-3.719265673572851819e-03,6.396538546173957847e-03,-4.245143892808612464e-03,1.064632136093838707e-02,1.088954618383448404e-02,2.220406159635702082e-03,2.681693141311764883e-05,7.911912310645708921e-03,2.178171976603688990e-03,1.355423605681517008e-03,-1.912241656892674875e-03,-1.807199703701636677e-03,-5.250664934684661639e-03,-1.501669223922062280e-03,-5.715441639874175054e-04,4.812488851897463345e-03,7.376555253788410343e-03,2.159432061696675358e-04,-1.137250677467832724e-03,-9.169790609119653083e-04,8.943258974882812434e-03,4.105933030394828126e-03,-2.221295857916084394e-03,-6.853216438331132324e-03,2.231067433335945079e-04,4.134627931730781164e-03,1.971309904156115945e-03,1.396107569602388857e-03,8.059902570955700118e-03,3.017593054466955523e-03,2.954650807797971814e-03,1.926038657589427931e-03,1.660793595817479699e-04,3.971075509341633232e-03,-5.604012400044191579e-03,-4.728127760134973566e-03,1.660139226544647770e-03,-3.488520827344798736e-03,2.494955662626458458e-04,3.964989381859758584e-03,-7.963977810685061087e-04,3.684482246758719569e-03,3.251767839419706079e-03,2.372895128250140841e-03,-3.891969854155734321e-03,-4.646774266684811604e-03,-1.009038668969627168e-03,2.671927440807969276e-03,-7.224076108427035861e-03,1.020499141281659521e-02,-1.674475267351917219e-03,4.505297185201719533e-03,-4.471841459989212356e-03,-2.365268225845745548e-03,-3.974571590052467808e-04,9.581205587072478061e-03,3.212240453610863602e-03,-5.026420889274828448e-03,4.159658158204160164e-04,-2.322540457208368495e-03,9.880765935158545774e-03,8.723233797157330105e-03,-1.694074959030102134e-03,-6.653318921316494529e-03,3.211730808733390032e-03,-1.018176577480389656e-03,-1.073983989280768775e-03,-9.273252130892074147e-03,8.559683395730523225e-03,-2.741033226588782817e-03,-4.203707028085059555e-03,-1.079603047060220003e-04,-1.035595744061938543e-02,4.725294344317005138e-03,1.186737305356468753e-04,3.649471644724298740e-03,-5.631470751861714780e-04,5.610256580130356177e-03,1.078508192299398134e-03,-2.578107421267250630e-04,-6.302369693884196352e-04,-1.075591168056546677e-02,5.316458109569785137e-04,-2.987114805308472708e-04,2.574508117992680822e-03,7.705445651541479408e-04,-2.085554416030098272e-03,1.600190748305083946e-03,1.868234778155057185e-03,3.965713108854820174e-03,4.005008495031055216e-03,-4.912183730803616928e-03,2.845050787748217361e-03,-3.792859161590646233e-03,3.483003127391483787e-03,5.324846348242740467e-03,-2.784163068469253793e-03,3.417414894301958571e-03,-2.039581388343995796e-03,1.395080414736372780e-03,-1.273793783183393633e-03,2.826341995909017590e-03,-3.875457247005160803e-03,5.669725605361321643e-04,1.527318573562462664e-03,-5.553098945481769501e-03,-3.997724276741012293e-03,7.254104458909475459e-03,-1.936394299231675815e-03,7.140816622483113149e-03,4.164659717373639901e-03,-3.992262389141312926e-04,-3.924544466555698885e-03,3.445886754342268338e-03,1.133866169115408697e-02,8.173201433805166866e-03,6.265539279891061374e-03,6.509191727626596237e-03,-9.614267131748068892e-07,7.373288469141394547e-03,5.921929716276912170e-03,-1.801749926721100091e-03,4.470508374077146636e-03,3.468539064007353321e-04,-3.682412218467499658e-03,-4.479348358428260346e-03,-4.097968437615094779e-03,-6.028956026068497433e-03,6.136209821812780262e-03,-9.930493419858970380e-04,-1.929796877380895266e-03,-5.476710917694801312e-03,-6.815427491172744391e-04,3.778298208023922987e-03,2.149534219102322476e-03,-2.392285525720245186e-03,-1.232670738077495769e-03,8.178168535232764342e-04,-8.876549481248351137e-03,2.626559406872093407e-03,-4.785133525909573277e-03,1.667660106019579192e-04,-8.996341814888344016e-03,4.191957935951519040e-03,-1.355939013784291216e-03,6.566818984980203611e-03,-4.023154594189564878e-03,-4.356773054406864847e-03,-4.529634740280257497e-03,3.532782649593873420e-03,-1.169561284309123678e-02,-7.189383350521954055e-03,2.323152124273849661e-03,-5.219201118250842847e-03,-1.318345928861899352e-03,-3.910493356635575367e-03,4.695678836236694478e-03,-9.356145538721578588e-04,7.241805947978535545e-03,7.938450185970766237e-03,9.454237700269877342e-04,4.674137502981206248e-03,1.048622855139075792e-02,3.643389667447341452e-04,1.316284655989744326e-03,3.273150155825047154e-03,6.710236011587017435e-03,-3.261009588210901037e-03,3.439952454346646704e-03,-1.049049123877836623e-03,1.436676025237548608e-02,5.564709533308911070e-03,7.742430866226121880e-03,4.776196002630113055e-03,5.651219375652521970e-03,1.159722273501839415e-02,-3.770753177726717410e-03,-3.312759983067386366e-03,-4.217020798059419040e-03,5.701512318362924291e-03,3.458367603910425525e-03,9.974390289862232820e-03,-2.874067559179612451e-03,2.897253258414839681e-03,-2.998500623029773035e-03,4.410723967738562103e-03,-4.199809353317053226e-03,-4.044223081812680330e-03,3.903584883661887026e-03,8.862697988812412919e-04,-5.375555220108863171e-03,9.106544214218300487e-03,2.734697571970502554e-03,-8.723560909367002484e-04,-1.909820819039489449e-03,6.155868552248644487e-03,-6.514397542921519270e-03,1.236830691260390643e-02,-3.845812567891909815e-03,6.443930930374065606e-03,-1.550811085135601940e-03,-3.975660989270341644e-03,4.845794637541336186e-03,-2.751617938731576018e-03,-3.329473735582491080e-03,-1.011049199139075648e-03,-4.921649224598783311e-03,-5.893617932945076528e-03,1.201977750603617145e-03,-5.707875928659693690e-03,-2.315189318419089322e-03,1.838762437934861414e-03,-8.147702713946235187e-03,-2.577543422484527354e-04,3.061535891432600690e-03,6.358584220036747853e-05,6.474149794865984020e-03,8.215551435136927982e-03,3.538129285986526661e-03,-2.982557260881503296e-03,-9.069989536068950170e-03,2.880957793399825835e-03,4.900750076886828226e-03,-2.448344883764619079e-03,6.784624582222020112e-03,-3.589991938145551537e-03,5.537183186009710448e-04,-1.635781123830910800e-03,-4.447239021404008306e-03,5.448364262017159950e-04,1.419599072894135944e-03,-4.288445317025935122e-03,5.724006388338940231e-04,9.361994751188767495e-05,-7.503356938347447574e-03,1.128352852609935432e-02,2.704259281803146446e-03,3.838122220732674561e-03,-2.021705459266117701e-03,1.834992827529360274e-04,1.224065672333841948e-02,6.744519082496941605e-03,-2.814173208097511987e-03,9.566213194161077684e-04,-6.651401233778650446e-04,-1.035632178619086581e-02,6.033000571738143109e-03,4.470918932894589427e-04,-7.591422203367455268e-03,1.710027000438299168e-03,-7.458939924984057598e-03,4.348470629443684787e-03,-3.883657482985342687e-03,1.168653208417565941e-03,-2.139862977991842320e-04,1.150391592044284648e-03,-8.991516721009784607e-03,-8.317055137842409643e-03,-3.676634943874417232e-03,2.025215090862144546e-03,-7.161909961225627029e-03,1.807318788052500024e-03,-2.393626086554698999e-03,1.206673333047989698e-03,-7.211997267892319811e-03,-3.686982418662583874e-03,4.965507077183505327e-03,5.250086218732651078e-04,-3.340318941085301807e-03,-5.147986353601808653e-03,-9.961605733891544870e-03,7.935033860503822034e-03,1.129968305914243448e-03,-1.183590425698092218e-03,3.913428187029400584e-04,9.780950626404518594e-03,-2.574575831651903981e-03,3.260960807688765509e-03,2.884447363867458352e-03,3.450519602769296264e-03,7.897723820544768139e-04,-3.919903562342316565e-03,-7.993139578326953834e-03,-5.335158659842224047e-04,-1.710872537206695795e-03,2.211945638321900808e-04,3.719676828987765905e-03,7.525318987210253023e-04,3.311933072343047329e-03,-6.555139752192308197e-03,3.487881975318116168e-03,-3.514548231144313476e-03,-2.656244727518935599e-03,-5.874539127559824919e-04,-9.711585391262461675e-03,6.093622974202315967e-03,-7.563117519039784954e-04,7.741066371344642975e-03,1.185428291832636488e-03,4.262560486785398756e-03,-5.117241026221340874e-03,9.487999608612938257e-03,-2.146780954865708680e-03,-5.012185482261979214e-03,1.550557835581158452e-03,1.184077056658849013e-02,4.014942559474686708e-03,1.024315853680251090e-03,5.663320096451172035e-03,-5.773290187525906628e-03,-3.838618963428893759e-03,-1.813842858493508459e-03,4.729729720001027773e-03,-8.084927551685359287e-03,5.287640090460016783e-03,5.824672995748786733e-03,1.911270525225091353e-03,-2.450404349117687661e-03,4.829324884710477911e-03 1.503279506131171856e-03,-2.559945860474058428e-03,4.960662730072626717e-03,5.777394677534240450e-03,1.231616332096187490e-02,-5.552795834417242025e-03,3.955769254805297994e-03,-2.087087808619885517e-03,1.021912046136365815e-03,-5.451755473704178283e-04,8.598438388513741058e-04,6.596308535541226328e-03,-2.528334090840252991e-03,-1.275338311725013416e-03,1.722004220343921563e-03,-5.492009083318966263e-03,5.364982515629026366e-03,2.102951570028481921e-03,3.624732337998483669e-03,-3.198618302515490383e-03,4.805642580672109948e-03,-1.982542829914690687e-03,-8.463804955560671941e-04,-2.430942233940255794e-03,8.412242456412616365e-03,2.830145198139415088e-04,2.905841146315401788e-03,-1.787102146063315787e-03,-2.137979010576215172e-03,1.314278435377853448e-03,-9.850895249239516316e-03,-3.083022368194927604e-03,-2.777620562939863737e-03,-2.649483983291716499e-03,-4.187717945598906123e-03,-9.089654910286083453e-03,-2.528420564884444315e-03,-3.806667761157063608e-04,-7.596110188449362553e-03,-2.049154361445043246e-03,-2.833926126914030567e-03,-9.546930831380608801e-03,3.153210410392643086e-03,3.657815678224218998e-03,-1.007160572693245257e-04,7.838799081353304762e-03,3.608107462242634081e-03,4.794052543296413102e-03,-1.074363859631545471e-03,1.202916586308590621e-02,-3.609865760839104363e-04,-3.272308155777782848e-03,9.766688960028288569e-03,1.876611455837874209e-03,-3.730045922368097674e-03,-3.253331151144642740e-03,1.669646779957250369e-03,-4.501320474207411680e-03,1.858912278713301461e-03,-2.638847253440843119e-03,7.698893657947360279e-04,4.387136586598636666e-03,4.424530528400681152e-03,6.886307907777750037e-03,6.047771902566645950e-03,-8.709341855674344532e-03,-3.099610089524286894e-03,1.916968227842466262e-03,-1.275565589418982794e-03,3.064625305032567856e-03,9.514488021964976477e-03,-5.989308525073099194e-03,4.255505813552765658e-03,-1.080638294802279027e-02,-5.007819458207346357e-03,2.112861934911064096e-03,-6.871779012709628874e-04,-7.019165549988304418e-04,2.702038676596776792e-03,-5.294329142116391967e-03,-1.234326533594263219e-02,7.422843738085220695e-03,-9.231692428759853686e-03,5.802285919066938781e-03,-8.191132281877655172e-03,6.695585347295737733e-03,1.191885418600348127e-02,1.471185489472738236e-03,-4.302469815859192768e-04,-4.055144227192193987e-03,-6.456364469897002412e-03,-1.407733266326648602e-04,-2.107823076614465628e-03,7.418747444617183190e-03,-4.685046034568093182e-03,-1.094982710454138475e-02,5.286689088572115586e-03,2.519338101655312512e-03,5.165012453467846681e-03,3.496226153020145447e-03,1.384619125007796776e-02,-1.252271617125577099e-03,5.782507303165416480e-03,-1.090870415500917821e-03,6.985914620263770219e-03,-6.827201094957744548e-03,-5.683691133654016070e-03,3.039699397694368196e-04,2.101413195177894572e-03,-4.098149863565785436e-03,4.837094627369612973e-03,1.123304720670095097e-02,-6.426713954178698207e-04,-3.526565522436460581e-03,-1.082376840119004223e-02,7.480313954644669675e-04,-1.096226583998874751e-02,3.092132289495343102e-03,-1.328357403381784201e-03,-4.259200254984495636e-03,2.670708125652094134e-03,-5.176540331577677949e-03,6.912425716329122357e-04,-6.174678298337597099e-03,7.702774887140427431e-04,1.595747444832232038e-03,-8.151086941347434126e-03,5.082804221505439653e-03,3.104415596644342131e-03,2.075759540976030403e-03,2.376264940030645232e-03,-2.448392651486043688e-03,3.326579652256449020e-03,8.839560961371948841e-03,2.362493816596905778e-03,5.092737178788613213e-03,-3.386797771502230141e-03,6.441461739463747935e-03,-3.990756663782392584e-03,-2.461475115752711530e-03,-8.353396385961260259e-03,2.236420162270877105e-03,7.366492615758490729e-03,-7.383164612076133920e-03,-2.706295958327626728e-04,8.565240139009276318e-03,-6.676568390349586768e-03,-7.475782421504276187e-03,2.271576620821667489e-03,-3.128341873045586974e-03,4.319263709091406521e-03,-2.434672242187215299e-03,2.226253199087566397e-03,-4.358926383751911271e-03,6.073522661336455686e-03,-3.927536735500515228e-03,-5.396091076597927215e-03,-7.047939124318503001e-03,1.712047703363026637e-03,7.299568491675679539e-03,-3.748878606448007782e-04,-6.912250361013409479e-04,-3.624368881634122685e-03,-1.605930221144700196e-03,1.075929504469481587e-03,-3.155484814372112781e-03,6.042861575031560628e-03,-4.353405756074212214e-03,5.568594766018221572e-03,-7.059012335756112075e-03,5.065249738855279009e-03,-3.807494023464049956e-03,-4.413550774808660399e-03,-3.222184986708640474e-03,-1.886700156052812218e-03,2.966766558376296502e-04,-5.520843387133518056e-03,-6.085601465913295302e-04,3.631056425096950823e-03,-5.027067619605680646e-03,-4.921449026652254671e-03,4.816489414542861162e-03,6.972314770584210446e-03,-5.334597465445445157e-03,-6.276784928786379933e-03,-3.467574033124835517e-03,-1.229968809010631610e-02,-1.238199704041443141e-03,3.033373102718344154e-04,-5.048192833973157850e-03,1.945059989504299962e-03,-7.302777374547873498e-03,-7.173407874516367008e-03,1.963607848215212937e-03,-6.996923330700494277e-04,-1.210191774350036567e-03,-5.018474121180041461e-04,-2.252600317292725515e-03,-3.610193046531414828e-03,3.112862576400873306e-03,-5.653266563187643763e-03,1.045746634307563681e-03,8.455207416871972012e-03,7.549479382171288971e-03,2.909502886324876782e-03,-6.059729802090983719e-03,6.906566087915587404e-03,1.030749177053420264e-02,-5.729170552753833599e-03,-2.908121708041568575e-03,8.698981892307711822e-03,4.972506231305101609e-03,-4.256012066715825237e-04,5.459709470437524963e-03,-1.667025405925522509e-03,3.954632009552842484e-03,6.523916601250221749e-03,4.339615485962202562e-03,-2.295575271759683553e-03,1.884547417818966796e-03,-2.763988397183788238e-03,8.249698445980091152e-03,1.394583801255571895e-04,6.634430625885708033e-03,-8.367406396111711983e-03,1.556715167095728495e-03,1.710287740559199906e-03,2.347233485677978088e-03,5.880784037614145317e-03,1.656296796580166006e-03,-1.630085133525005156e-02,-4.836417115252442112e-03,-6.430659117369869927e-03,4.590426485802615031e-03,-1.242655512119536509e-03,6.182696549838168597e-03,-1.405799442399741402e-05,7.863831362417535381e-04,6.875911548765865390e-03,-3.283055538092413076e-03,3.613394190326963632e-03,6.726593919446980686e-03,8.063299668449111501e-03,3.885366184571322108e-03,4.263356721328019280e-03,1.402726936765637039e-03,-5.026713124705851042e-04,2.320264105756606431e-03,-1.216289037446558396e-03,1.020711978665187966e-02,7.210197666256526298e-03,-1.987436692570161353e-03,-6.412589920493391368e-03,2.339394452262855053e-03,-1.437817064657087660e-04,9.710547232670440135e-03,4.462543147598280516e-04,-2.443790896421208468e-03,4.407106367031261764e-04,-4.381775451961962727e-03,1.175806278983383647e-03,3.429639416693564587e-03,1.677311174875960407e-03,3.862059134462037142e-03,7.000332294209198405e-03,1.918052797244043520e-03,-2.684289590422521930e-03,-4.103082366416506595e-03,2.168739215703060705e-03,5.507160358929551539e-03,2.050194141048425085e-03,1.952924565182362912e-03,7.499831072086821961e-03,5.287223448476885586e-03,4.935193780342428740e-03,3.038336120630004238e-03,-8.825068747394693158e-03,-5.213980582823650456e-03,-6.501735867478927039e-03,-4.966504747422520851e-04,-3.485444411727426327e-03,4.710353143303798971e-03,2.244438989205387058e-04,-3.257815005552722187e-03,5.103178428063347670e-03,-6.692063884718343555e-03,7.562964765625145459e-04,2.523486324108990646e-03,-8.461812693900713778e-03,3.181263406673725657e-03,4.162705710787850504e-03,-2.585256165031903260e-03,5.501066927608174936e-03,1.795353779875707816e-03,-1.782914132105855578e-03,-2.558951957789504239e-03,4.782026097058517458e-03,-5.117671493474093038e-03,2.279457711028647342e-03,-8.615113812091137807e-04,-6.760497870452348949e-03,-9.732751163802040931e-03,-6.881390687012175697e-03,3.018079088092597353e-03,6.149211699050443312e-03,7.942473030781408913e-03,-3.416851573875044536e-03,3.207550287686534735e-03,6.727354426941477674e-04,3.495365787141452990e-03,6.148619509379031275e-03,4.671712688690383974e-03,2.081684399286908970e-03,5.410187518123299802e-03,1.463111698016585733e-03,1.827951078655770884e-03,1.483227044968826389e-02,-1.396812065551981475e-03,1.687521618094597409e-03,-4.410867090913496956e-03,1.159800426662769939e-02,-3.399423776444014059e-03,1.690138756865666378e-04,-6.694341498486870191e-03,7.026267920189847166e-03,1.059215230259081782e-04,-1.657549338555639167e-03,4.927886563703253592e-03,4.982472788947628524e-03,3.856269527212647513e-03,7.168706180668862199e-03,-4.634743764873432148e-03,-2.393004479879109764e-03,-3.533008384109775045e-03,2.083255939042361017e-04,5.354378011813321937e-03,3.848341839330827628e-04,1.531777275331905287e-03,-3.602675633688452467e-03,1.101854715467130187e-03,1.254068259577739188e-03,-8.574905608505983086e-04,2.803899924334131259e-03,1.378842153193917753e-03,1.808820897011383187e-03,3.145934747145659981e-03,1.485169525899250305e-03,-5.888985613231340933e-03,7.592076012553549032e-03,-5.559125778504168947e-03,-1.197461176942236395e-02,1.375142284417483519e-02,-1.730004846822973770e-03,-7.887140955826464368e-04,-8.218065358993989142e-03,4.965069739614354043e-03,-3.519641953151345613e-03,4.997198994303985072e-03,5.842217481566210763e-03,3.302834770217291058e-05,-4.058137799662756874e-03,-4.098161219294083231e-03,-6.742046487776078879e-03,7.775828642285301587e-03,2.223388170684711777e-03,-3.954355352563464210e-03,2.818758765171107666e-03,-1.809807284042360895e-03,-2.161795261801958794e-03,1.240400472797116788e-03,-2.946281119148254552e-04,-1.924774741243122193e-04,-2.669919277005736256e-03,-7.436596058301070313e-04,3.200042806064483120e-03,-6.097516632243331347e-03,-1.933826116608433014e-03,5.930992815761418867e-03,-7.382573795054041613e-03,8.361848671894165155e-03,7.677412058355028920e-03,7.184725772826111578e-03,-4.246151855265756746e-03,-9.098421827199552761e-04,1.570721167168571920e-03,-6.695406803730675864e-03,-7.348392923199908750e-05,-6.789268076407078793e-03,8.508074272290264356e-04,1.750183939401686367e-04,6.908413739736480805e-03,5.096218450548100146e-04,-7.163662979354525756e-03,-3.615926844523147370e-03,-1.990121138102346910e-03,-8.369464343924382099e-03,-3.322131464705506583e-03,-2.537844833963693279e-03,1.512020096895419627e-03,-5.756620775619102418e-03 7.824255283888059340e-03,5.747231479981432202e-04,-1.081384807050158985e-02,4.456215819337811489e-03,-2.105281889278955241e-03,6.467635256204455662e-04,7.121822053508150259e-04,4.067150842003330186e-03,-2.259845286465220811e-03,5.779859487056095580e-04,-2.533792708283964020e-03,-7.528369431936976670e-03,-5.127776242310734534e-03,-9.642170971948025790e-03,-6.775705571999282063e-03,1.318718967549680453e-02,-5.156880910916670190e-03,1.259195373442666176e-03,2.929480947222569409e-03,-8.408279723365551760e-04,-2.845441525608653592e-03,1.023298410052537857e-05,-4.371875267735375430e-03,-3.032543795098902473e-03,-3.779274251403004752e-03,-4.279687408017035857e-03,-8.022105288694989650e-03,5.106440459385318265e-03,-1.080347007592412324e-04,6.487138524948607952e-03,-4.189939226823661579e-03,-8.992172499769313055e-04,-2.815257171062771849e-03,9.253565101292954981e-03,7.700278957675814806e-04,6.509363806210804061e-04,-2.415578798430299996e-03,-8.447602072052105837e-03,-6.214910234418788773e-03,-7.822907035950777549e-03,-4.037807692474050351e-03,3.761895184581902189e-03,5.579979862390503295e-03,-1.012473697589081831e-03,-1.429605984700598430e-03,-6.255772096952053746e-03,2.580893154040197167e-03,-8.254517519232540054e-03,5.384210573524621625e-04,-9.048607964161615308e-03,-3.151149591269929943e-03,-1.845484600713904718e-04,7.716704494086812707e-03,1.160823965541147749e-03,2.593935758467582047e-03,8.273100501108526447e-03,-2.250535279000557155e-03,5.147510989734161053e-03,2.381718230940197637e-03,7.133069208814762051e-03,1.306814588757746008e-03,2.108146872031613998e-03,-4.792255352546283438e-03,8.169827205569229339e-03,5.791567748810233507e-03,5.659381065540998630e-03,1.408861191772675181e-03,-6.291687173336134309e-03,3.374072015563559651e-03,-1.546908284717026481e-03,1.010266308457662653e-02,1.469021524132862588e-03,-9.916049923198453145e-03,-3.667428414394973118e-03,-1.486574710863746828e-03,3.297843997242678148e-03,-3.587020879485858823e-04,-5.326966378586237699e-03,3.860062105269861880e-03,-5.120083205327086162e-03,-2.757377272239192652e-03,-1.064737031678781622e-02,-2.215292739593043569e-06,-7.594682947749255980e-03,-9.668672835750514050e-04,-1.761449876753861507e-06,-4.967523862043533509e-03,1.260014159556886856e-03,3.345951340027369329e-03,-2.100902835540996612e-03,-3.485679945998738753e-03,3.427880589376422135e-03,-2.489914141495527724e-03,-6.391327154069933493e-03,4.174217844588431342e-03,6.475516457782172150e-03,6.171859792047397313e-03,5.058692989652363581e-03,1.571993082205719663e-02,-2.210623768417333771e-03,-9.268024833288566328e-03,-2.008630291771603370e-03,6.393363673473647507e-03,5.292967977499704428e-03,1.236202558467111362e-03,-4.826583891516653645e-03,5.828286527018808165e-03,-2.215684932100097509e-03,7.198978183339289905e-03,5.389440843434782635e-03,3.917726700169764896e-03,8.509667449099822328e-05,3.003698025500007012e-03,4.463851813243192098e-04,9.943297572044134964e-04,8.167206521782328921e-03,-1.152110062939467033e-03,5.103351413686880653e-03,3.139904916423956797e-04,6.715534533549731740e-03,5.747634732603329293e-03,6.707788005246905026e-04,-3.235223509151591292e-03,6.080986128457979527e-03,1.056038676081939810e-03,2.267742766916980551e-03,4.261563364708670712e-03,-1.771687954820565800e-03,2.611970440385334019e-03,-1.010158662635628375e-03,-1.513357541045946657e-03,-3.894788559666576694e-03,3.205319494699745021e-03,-2.354151559920284256e-03,1.991978161178967893e-03,7.733290444998036986e-03,-8.977875650685508691e-03,1.346104085135256577e-03,-2.656025794885739054e-03,-7.757294094068527789e-04,-2.293587681103894781e-03,-4.017170977260191636e-03,-4.303097365145596570e-03,1.582019883306722300e-03,1.357190109145160880e-03,8.481664909919973277e-05,-8.514411068547296903e-04,-4.659991120186872220e-03,-3.176634226720132588e-04,-4.855742214384819490e-03,9.314462126455355651e-03,-2.073690806034969233e-03,2.731110991788485674e-03,3.886290763514313115e-03,5.989495104628071893e-03,-2.718734555768004529e-03,-1.053740540874774714e-02,-8.780666023550735486e-03,-8.297581980964394838e-03,-5.567610724802526771e-03,2.690391639386996350e-04,-9.557529918388436851e-03,-6.290989973614866983e-03,1.960649071127461198e-03,6.411011356484046589e-03,2.440029511431112547e-03,7.682235087464911764e-04,-3.899466138785975203e-03,1.990475695808479641e-03,1.545900071330872071e-05,4.042044048844592279e-03,-1.079855317098083606e-02,-6.869663997372618821e-05,-8.553598033107215684e-03,-6.764881406164081033e-03,-6.081364740817790486e-04,2.894041177164597637e-03,-7.474701756976668242e-03,5.558579306688372738e-03,6.735010977879650126e-03,-6.672410960182866523e-03,6.859598426129414478e-03,-2.795228430902560891e-03,-7.551808383634655425e-03,-1.181479411533043573e-03,2.270402815704432743e-03,6.197446569926651563e-03,9.129192162687932086e-03,-3.358738280992373901e-03,9.815356940129993726e-04,7.535229612642069729e-04,-2.274345750043743793e-03,-7.303735335388155184e-03,6.173695085761056120e-03,9.996841297998642489e-04,-1.042391293162958253e-03,1.910690805878887972e-03,6.833605979912775246e-04,-7.247661987648801145e-03,-1.664657365145452012e-03,9.336871888081278714e-04,-1.901112023425396112e-03,1.712886563658342059e-03,8.403058116741045141e-04,5.722030669695021138e-03,6.420922528959576575e-03,5.835984683620962579e-03,-6.237587658432354541e-05,-5.513912122064134128e-03,-7.574983779752025729e-04,-2.326741544554870606e-03,-2.034978449499399294e-03,1.191834424436641988e-03,1.833176510108957841e-03,9.403777925091470107e-04,6.436331535749175922e-03,-1.801403491040375277e-03,-2.802121007269643086e-03,3.011126707758283658e-03,-4.429620387357827887e-04,2.123712760925828679e-03,5.137482618607449626e-03,-4.339241758412515707e-03,1.969706783238532512e-03,1.246305259103226183e-02,-8.899482951359978869e-03,-5.174907441829397145e-03,6.998341969933201401e-03,-7.122028044662794162e-03,-1.617095620373093859e-02,-3.011902583147333268e-03,1.107760272442512468e-02,7.160395910200050701e-03,-1.324693501139187274e-03,-4.387923770134648065e-03,-1.061713885899901984e-03,-7.695816016559847532e-03,-9.506313759152401925e-04,-1.882419921824900189e-04,-4.038605329641667928e-03,4.792347355563240929e-03,9.305373663230374947e-04,-6.045314549124233480e-03,-1.098029631398022253e-02,8.506058708146465447e-03,-7.589593698924146370e-03,1.047121375330123394e-02,3.631119746840944049e-03,-1.984822425606527579e-04,2.308975304719525657e-03,-1.232096281828171710e-03,-2.123555347119527193e-03,-1.094904895777210584e-02,5.709418184255249803e-06,6.864815109653267175e-03,-3.908555384600985688e-04,3.799770379045039337e-03,-2.082658456233583283e-03,2.881060095104707054e-03,-3.038438818716245916e-03,2.878583580760313201e-03,1.556429936457154675e-03,-3.002726442054790609e-03,4.883974437674551243e-03,4.980946202810331881e-03,-2.399921606514303796e-03,-4.177666458241337255e-03,5.207676295089997201e-03,-6.446656896119721224e-04,-8.953940909468904126e-04,-8.642792113531255413e-03,-3.889054737107578395e-03,-2.936399816950465737e-03,1.836271996570571722e-03,1.346499959660324159e-04,-6.462214920373809913e-04,-9.684153769958328897e-03,6.177468990579482785e-04,7.378514240039103257e-03,1.464717300331325276e-03,2.788088575812760952e-03,1.185404619589786089e-03,4.977991261537096854e-03,1.334904223747318715e-02,-4.841364755372961140e-03,-3.224035274002607591e-03,-2.093933259053581183e-03,8.376754283107983598e-04,9.924907244226500308e-03,7.005070756074533236e-03,1.185073260747038226e-02,-5.057690330801167811e-03,-3.135179713539972424e-03,-4.432983575822393822e-04,-3.790586066672883229e-03,-9.865201610767776290e-04,-1.366945416189178399e-02,-1.976971528346688992e-03,-1.128585743457438499e-02,-5.829010662870103346e-03,-1.834748369874437263e-03,-7.346254093020714374e-03,-4.079901045166885350e-03,-1.518122204899181350e-02,-9.212713880844229783e-04,1.465109279149056504e-02,-9.058684955118898041e-03,3.397952598427404942e-03,-4.450290758632761620e-03,-3.103118673511502407e-03,1.344391959739922419e-03,-3.817713888160849910e-03,-2.016817983541038719e-03,3.821706588974431093e-03,-4.186670436832551009e-03,3.969105887331734681e-03,-3.698948574677622409e-03,7.001620604616826289e-03,-3.445281737295059765e-03,-2.502478983096484533e-06,-5.606074830530575906e-04,1.932835441787315650e-03,-4.922663238970656496e-03,-4.530381788670980077e-03,-5.840786633259125414e-03,4.152330961500419080e-04,1.513158375099080827e-03,6.386287083434766731e-04,-1.782194357583399271e-03,4.861629488637903267e-04,-3.072985353508376084e-03,-4.018777466283195556e-05,-8.464785348255289901e-03,-3.567603524123645446e-03,3.053671867067244815e-03,-6.844733867917508242e-03,-3.686095485198208344e-03,2.791189040023147466e-03,-2.672943624596284522e-03,-1.017304855210164529e-03,6.551513463002697074e-03,-3.747643454184883434e-03,-2.149133801139741472e-03,9.536465670177548051e-03,-1.850520665753922290e-03,9.304185414776325672e-03,4.245176061215532454e-03,-4.501382276693516142e-03,6.022896089892051941e-03,-1.436660415196349555e-04,4.880193215529029746e-04,1.584747906897352040e-04,-3.647786481875221125e-04,9.225436307103674141e-03,7.470389595658294413e-03,-8.434898032792502789e-03,-3.547048393142515198e-03,1.087556286837385380e-03,-7.851402743504929879e-03,-1.194527792387020416e-04,-5.012279801478400701e-03,-2.888682442986374634e-03,9.168836494403937581e-03,8.072491251392642161e-03,-7.126003946451202678e-03,-9.575541828081731571e-03,3.401641830452183969e-03,2.458135775743730284e-03,-3.789889719806419609e-03,-4.600723161155609829e-03,4.869972682508172564e-03,6.674772704575245193e-03,-6.382945791245053964e-03,4.215082231807958449e-03,5.832200545523154485e-03,8.059310918247164154e-03,3.193431618242880115e-03,-5.483658663800952335e-03,1.074085510301762665e-03,-4.693117782689691826e-03,-1.504686794903973028e-03,-6.706955756131798571e-04,6.020638012229057425e-04,-1.788441955579927242e-03,9.207182854277537118e-03,7.140150895640033982e-03,2.619711813489034643e-03,2.948491196776277697e-03,4.094506374190926869e-03,7.480776935084755032e-03,3.467330535281645202e-03,3.949376234492165045e-04,9.074343765392815656e-04,-8.710352657138129701e-03,-5.065803743349748742e-03,1.941311180268691719e-03,1.295472620578963854e-03,4.558783242738154509e-03,5.082015658163478201e-03,-3.249940321263627453e-03 4.956148829467855142e-03,-1.356074197233748078e-03,-3.562096619401454445e-03,3.278347596958827020e-03,4.129869366638938381e-03,8.178570918673064294e-03,2.651478671186033916e-03,2.324290131869588723e-03,-2.095472278660806369e-03,2.450879678220014429e-03,-1.906822260888945287e-04,1.483072201602944184e-03,1.373639429506759652e-03,6.980408499413868345e-03,-4.053702476857851990e-03,5.856925947298501599e-03,5.653622966105695084e-03,-2.030833658325966774e-03,1.399852229350096681e-03,3.450075974271244340e-03,2.234899398614395658e-03,5.037081577940376656e-03,-1.440068255008554801e-02,-4.168501455432366103e-03,2.645311116137808154e-03,-3.638444209739702829e-03,5.700807534992424069e-03,6.089852580369358240e-03,-1.069887603195438311e-02,-6.589023827608606922e-03,2.337777807562848708e-03,-5.820533973909990328e-04,-6.391007206347185630e-03,4.929827668428677333e-03,6.186395853279227572e-03,1.597961500579481685e-03,-3.147463446076292307e-03,-5.326252985667700307e-03,3.364808941084697206e-04,-2.604295074472686659e-03,-1.389688813727595973e-03,2.480072125007569666e-04,6.779175701158169851e-03,8.736549031327641573e-03,-5.609806427226161733e-03,6.118017360762784129e-03,3.670906548616249473e-03,-1.693994952545655299e-03,5.535268447484762225e-03,6.291158749056355824e-03,6.202526802859765955e-04,-7.279463638352932919e-03,-7.426815716460725622e-03,2.652112928843940436e-03,-1.077364377231268718e-02,4.607273764090507283e-03,4.061311746980853649e-03,-2.514568038275525989e-03,5.248956276996325117e-03,4.326903089282100809e-03,-5.112547823867870925e-05,3.795751426257164534e-03,-3.689728610175899257e-05,-1.595107382980474929e-03,3.704921569350419584e-03,2.079921512132857156e-03,-8.087846440444163227e-04,-2.104712964626804985e-03,5.988664493197271314e-04,8.452630899013090574e-03,9.635583770054024751e-04,-1.757515611180399768e-03,-5.099321785323253017e-03,3.691916934261448534e-03,2.323086906428405672e-03,-9.719913150744663661e-04,3.376832513974218405e-03,2.284485876475086680e-03,4.072037448507329939e-03,-3.639682235069425283e-03,5.101230562227319389e-03,-3.965108052296882277e-03,1.195083433033230016e-02,6.916466847752005985e-04,-5.928360428148322925e-03,1.619976666785641860e-03,-8.294512197843391388e-03,-6.065961743549321862e-03,2.427546178518379377e-03,-8.235047931236763596e-03,1.223260412190188768e-03,-5.286776908035774152e-03,1.000423138440913184e-02,2.707974212384993851e-03,-5.099622217997255499e-03,4.798593379860308442e-03,-3.114430896179647048e-03,2.879808604224261531e-03,2.578346316166242515e-03,1.335212517676810174e-03,8.858151167514806369e-05,-1.185220649132218097e-03,-2.875028371748405021e-03,-2.158067121251409145e-03,3.588509874329564187e-03,4.220436879299353317e-04,-7.684620966434802490e-03,5.945326779061053425e-03,-4.408446535963476617e-03,-1.271622039492275365e-03,-5.236896384095893545e-03,8.512941300157660443e-03,-1.519688602527592431e-03,1.260376121776343901e-04,4.587314410887150147e-03,-1.927527126123372873e-03,1.229187158735184360e-02,4.347580207957345115e-03,6.168680392115158222e-03,8.421589261059602913e-03,5.149032180341278804e-03,-2.309029932012922771e-03,1.404780705233789556e-03,6.193922821275486996e-03,-2.098244710778548842e-03,2.135876722453097386e-04,-7.077438139172267592e-03,4.355500682631037630e-03,7.773768286662606974e-04,3.939471294754358886e-03,-9.245464210749744158e-04,-2.395064019982538924e-04,-2.218979774212872895e-03,3.856022631848458719e-03,-6.547668031477346315e-03,-1.489001488175414152e-03,-8.476416906727034239e-03,2.249942595501625443e-03,2.782692938905623192e-03,-8.148030071809315988e-03,-1.120643919493331982e-02,2.934920363735949063e-03,-1.562858128750062712e-05,-1.288663092896791089e-02,2.750814945096130126e-03,5.114182928688949629e-04,1.498640857514176071e-03,6.394254805456836901e-03,-1.968239856931671555e-03,-4.077302658942591126e-03,-1.163708357047304976e-03,-4.661174063416637960e-03,6.535291880455939314e-05,-3.163689410506478547e-03,1.369792742574225952e-03,-2.731959365101355782e-03,-2.051542979128729866e-03,1.946145590193049200e-03,-5.460510904957650499e-03,2.392845128247749007e-03,1.844264263345288474e-03,1.163845191187095148e-04,5.282217019011518563e-03,-7.147303464464347875e-03,7.838088406964770910e-03,7.667811178949149840e-03,-3.465563366744674190e-03,2.610051851243158812e-03,-4.841505049511220932e-03,4.431819737524530199e-03,1.570772524976653088e-03,1.489834099478700444e-03,1.478725917324616402e-03,-5.493562493140644867e-04,-1.829562009421032386e-03,9.481315415593662057e-04,-1.678815442632860654e-03,8.564849966653191983e-04,3.232037150712467784e-03,2.203883061174902614e-03,-3.309592810172377626e-03,2.235181733710631894e-03,5.288523739524451928e-03,-3.498543652496253326e-04,-1.439455979151019237e-03,-3.602838245335361535e-03,-2.551837245957063686e-03,-2.876810303005597654e-03,-1.009289395708560123e-03,-4.114028455755142869e-04,-4.566045543821205428e-03,-6.819626296703254703e-04,-6.622569906517003849e-03,-4.066178762246430191e-03,-2.190109265409893695e-03,3.542037114675213819e-03,-2.698699840710369093e-03,1.497705745586563842e-03,-5.737694762694212254e-03,5.013858573087492769e-03,4.328364659849427178e-03,4.759472682109767809e-03,-2.171777855846394791e-03,-2.524094830920515283e-03,7.714688712057759905e-03,-3.648934541861935041e-03,1.983835190719665086e-03,7.410079468932021657e-03,6.334146259196498166e-03,9.462688740759628682e-04,-5.252043204691981973e-04,3.565401368149311544e-03,6.635126213004173632e-04,-2.039081893861958723e-03,-8.081726088800587557e-03,-5.490146190247889059e-04,2.089765268464134542e-03,1.510753419321364619e-03,2.610862496273843019e-03,3.025108918146853128e-03,-8.859711143170368938e-03,-1.072509113050426144e-03,1.649884282902263364e-03,3.974775806760173097e-05,6.565718554930638669e-03,3.614361705781378507e-04,3.645426314424478971e-04,-6.994291436671181339e-04,-1.445062606767374448e-02,5.085720465170450073e-03,-8.351268799203171106e-04,6.990937929742930233e-03,-1.006111484254295849e-02,1.387577273761322720e-03,-5.874010713317878978e-04,-4.357006086353730771e-04,-3.815501477084858820e-03,-1.038651170753258392e-02,7.955610907013129646e-03,8.800273555210751456e-03,-9.906145594981070369e-04,6.276468457604584541e-05,2.479867263024281791e-03,-1.512601166285019528e-03,-4.427553792544715490e-03,3.518887944367733375e-03,2.498821973691888430e-03,1.328004032748119571e-03,4.443491913498420071e-03,-2.029630501804934560e-03,2.147896241474998250e-03,-5.211467716397639137e-03,-3.355919876563399733e-03,-2.517865800112864625e-03,-3.829855444018543352e-03,6.674503824347859104e-03,-2.776305887180443004e-03,6.432596932671538368e-04,-2.875307146719709422e-03,-4.149873068506075915e-03,4.368249562960029551e-03,-4.620094454203103884e-03,-1.027882404472631878e-02,1.494035867667364934e-03,-7.526265810377490671e-03,3.127673542688977609e-03,-1.066808354324144713e-03,5.500226298141707056e-03,3.516674577234703628e-03,1.447245974494537797e-02,-2.552101853571148420e-03,-3.081108123815423129e-03,-3.744534140453087954e-04,6.265653358277770667e-03,8.325672867160929108e-04,3.679488738094881077e-03,4.772107805093095748e-03,9.766871296778233385e-04,4.946627576578635846e-03,5.007462180063319140e-03,5.447354111471823697e-03,5.324923991061812981e-03,7.171846524318403748e-04,-5.117855374173303599e-03,-8.672283332839213904e-03,4.241907161225296624e-03,-7.257646446700130420e-03,-4.232562736937844558e-03,-1.020869171974811114e-03,-2.558374419589649028e-03,-5.843049605510838851e-03,1.929832333732604517e-03,4.190984753223160955e-03,-3.733061251045020901e-03,1.168892598583903646e-02,-3.475031862813819043e-03,1.352910140441855071e-02,-1.817785136149393339e-03,-1.051705735507679068e-03,1.934809201953409516e-03,1.720622947436365683e-03,-4.428106086541937041e-03,4.325086866907834268e-03,-1.356028422472530274e-04,1.768123099285987335e-03,1.835630046775480828e-03,-9.062433103077738999e-03,1.052591820853051446e-02,-3.599205861419650307e-03,-3.047167323442662341e-03,-2.222955306625049553e-03,2.681163525393598281e-03,-1.391659210017887147e-03,-2.440021961080780614e-03,3.655054220103307787e-04,3.254695393316182081e-04,1.282441338808628989e-03,-9.063800698484031360e-04,-1.007725327580066658e-02,1.411932926184184325e-03,-9.474583383106448872e-04,1.635583604409235467e-03,-7.211170762429914595e-03,1.720271350497433590e-03,4.252600484379771145e-03,9.084170957658664960e-04,-5.691417339084255296e-03,-2.344736673283618570e-03,3.454708026432197921e-03,-9.603394938113457915e-03,-3.479615848833073847e-03,6.984451434168351472e-03,1.247336408803810179e-03,-2.866314465353811923e-03,5.927384270233901180e-03,-7.074826363813503400e-04,1.447309222153156686e-03,3.772796164681091382e-03,-1.277574515264620321e-03,-5.197453952753635850e-03,-7.825830519482523603e-03,-2.172065254932363781e-03,-9.939634985763050486e-05,5.424196287464385681e-03,-8.675286065626532789e-03,4.418544592956706249e-03,-2.632570406259553760e-04,2.092355646407586481e-03,-1.037886494913306968e-03,1.873507621355396339e-03,-1.416839703111310959e-03,1.105849767170923254e-03,-7.482052493981202618e-03,5.830392609069136404e-06,6.719127351089704270e-03,1.485526973769908058e-03,-5.441479600814263943e-04,2.758224851760586200e-03,8.732016896671459735e-03,-1.867532983657967886e-03,-5.013554724146386012e-03,-5.789831480307642241e-03,-8.685920845990883002e-03,-1.074886018953410613e-02,-2.555544672262539778e-03,-4.516150063242632511e-03,1.395791176087139837e-03,-6.347630638114209849e-04,3.652223309940177270e-03,5.372127775800983112e-04,-4.905933156689458625e-03,1.646915783868742669e-02,-8.679274035090700154e-03,7.715920758146924824e-03,2.906490020390567561e-03,1.143730575462667022e-03,8.255343186419879589e-03,4.707848304126397397e-03,-7.518211973090811392e-03,1.028040886863382025e-03,1.106178894338081869e-03,2.246150319508598541e-03,5.113512189057235757e-03,6.025002566819141758e-03,-4.384305028444027423e-04,-4.345388798284044278e-03,2.453627696577285761e-03,2.312889250708216604e-03,2.521732271412549281e-04,-6.602977316998576152e-03,4.288384333927724133e-03,-2.977722579120236062e-03,-5.129589268443510413e-04,-6.190539310167119530e-03,-3.602671851849969034e-03,4.532824754815924576e-03,-1.392851971694408168e-03,-4.280639994075824406e-03,5.486893382525758096e-03,-8.421533741096425961e-05 6.000419150975676297e-03,2.275434379357852213e-03,-5.759143624531516334e-03,-5.408159818967666309e-03,1.745400411256075018e-03,-1.797046496938693380e-03,9.373560627903327822e-03,2.375502884760170765e-03,5.658847396396915995e-03,2.538272411998098669e-03,-2.211861322005579614e-03,-2.691983244045371374e-03,-2.440324311893434482e-03,-3.824817728951846688e-03,-1.410728548183056656e-03,-9.774403726040595784e-04,-4.392964122966025453e-03,-2.139438143574785758e-03,-1.386685156163240673e-03,3.829620958376806460e-03,-7.987555754359795465e-03,-8.430743771460779898e-03,-5.938423117616833773e-03,-2.358080920248663975e-03,1.521600710033114111e-02,5.809066225344748955e-05,4.574144228562474976e-03,2.590706154528548289e-03,-9.062475389332918455e-05,-3.741696510911048362e-03,4.407878418094111905e-03,4.965545917313682507e-03,-8.074395287521832568e-03,3.640636853071778627e-03,2.036604673533150278e-03,3.440652009323201706e-03,-5.918447845734428961e-03,-2.157622161531064665e-03,-8.921417806125106834e-03,-2.116695164533608561e-03,2.922190995589907248e-03,-1.994742820429419583e-03,-2.940192782535653790e-03,-4.158385211127620280e-03,5.192581366948801139e-04,-3.947734776739604545e-03,1.027262479575068166e-02,2.476232951110247937e-03,1.696347543849750349e-03,-5.195022916249092751e-03,7.962018294784749955e-03,-4.924210855548480197e-03,5.562186462859781098e-04,4.304073083446447942e-03,2.711280514582600429e-03,-3.943515438936654903e-04,-6.111791395697143553e-03,2.220429642864789506e-03,2.664915099514954776e-03,-6.924048024741147854e-03,1.870412822121721294e-03,-3.089937708244630543e-03,-1.038128875122914033e-02,8.998887058196382294e-03,-9.802016999923327499e-04,1.482008886626967311e-03,2.157251439656806408e-03,-2.606162340192839152e-03,3.527838745880115204e-03,-1.718783976844514064e-04,-8.246249835072563816e-03,-2.607713849558898709e-03,-5.198319041494981786e-03,1.098382360071612981e-02,5.098776107925650364e-03,2.190851302533770775e-03,1.375568173960252643e-03,6.065683934960976360e-03,-3.759074675679849580e-03,-6.473585653407673904e-03,3.974532763954789424e-03,-3.744087700771581931e-03,3.112270446507674031e-03,9.562364543598312477e-03,-5.694367741132853530e-03,-1.195616426360762881e-02,-5.774850257700309434e-04,-3.460581946701525233e-03,3.763313969090870820e-03,1.744262972675798959e-03,3.448045644076714981e-03,2.618057420987744579e-04,3.263317657688195200e-04,-4.100109839803215035e-03,4.838861007264731208e-03,8.411913176726791216e-03,3.265924980330425689e-03,-6.223139919044345762e-03,-6.355497419571059202e-03,-1.560340048280593186e-03,1.363887347102079976e-03,1.286361965045067050e-03,3.380269490740584436e-03,2.273694221639863838e-04,9.696815919018523730e-03,2.131520650900487564e-04,-2.654046100613344228e-03,-3.945614973311576366e-03,-6.598017587549603880e-03,3.751952895343496630e-03,-6.598769459618742360e-03,-1.213978454225271651e-03,-1.023482452165586695e-03,-2.383909852700172218e-03,-4.245759962044209045e-03,-2.053648206183394075e-03,4.032242462954245370e-03,1.602555252918425001e-03,2.092830646733978388e-03,-1.043288887990085614e-03,-1.059289425249650531e-03,-1.636592931011762557e-03,1.723665549733012460e-03,-4.762472707353618620e-03,-1.764886359031058588e-03,1.253343198809388302e-03,-5.457702873307570396e-03,5.466785427881702989e-04,2.611642712660488738e-03,-9.269472087035181967e-03,5.687383920360138644e-03,-1.785085072179429808e-03,2.339325371245738316e-03,2.714899928064879071e-03,-1.775797940220099009e-03,-4.076498694996308347e-03,-1.957625800744497348e-03,-1.802362574054395010e-03,9.371085231648453376e-03,4.869425513146773479e-03,3.565670099455149362e-03,-2.161693903379243928e-03,-5.667933536070477937e-03,3.519931263311446988e-04,-6.975950042643514447e-03,-3.283757634441174071e-03,-3.488606905579223587e-03,-2.241641656184882852e-03,3.690965587182148434e-03,-8.504422906739813855e-04,4.341855197674323458e-03,8.133693609720545753e-03,-6.048303027297998596e-03,7.725484607599648022e-03,-2.228535640962733973e-04,-5.058477864649093235e-03,5.393799694851213362e-03,-1.716483487941414471e-03,8.805604919946618744e-03,7.846535541240387546e-03,2.658207352984470317e-03,-2.942059965723200208e-04,-2.808335654202621674e-03,-8.573151457073741044e-03,-4.000224074895333132e-03,3.265377081999777924e-03,-2.468490306305213666e-03,1.060820047578089551e-03,-1.573383633521523248e-03,-3.508010301519953310e-03,1.763066710172884349e-04,-1.016628128056624308e-02,1.550712338838822202e-02,-8.589895819466093893e-04,-3.277403818943504937e-03,-1.505358054029740794e-03,2.813874285646946697e-03,2.939948411351627627e-03,6.373035320234278966e-04,-4.375866715119607585e-03,-8.293915664512900229e-04,3.377178560435775689e-03,4.661421485901579833e-04,3.277600556362297250e-03,6.862268011193093688e-03,2.138159991367967038e-03,-3.429137313852857402e-03,-4.536632597242518973e-03,-2.141856846711826228e-03,3.612426275930347760e-03,5.672543957561122785e-03,7.249410094026332912e-04,1.196250602176690938e-03,-3.605888222374072517e-03,7.661556805743847433e-03,4.252915490003914521e-03,1.704687290999804634e-03,-4.208929977557101382e-03,1.070216485813471792e-02,-2.612010178328380821e-03,-6.094125132368287054e-03,-6.601260017542132964e-03,-5.434770638110804006e-03,3.929713786465127007e-03,-3.273549043630876853e-03,1.050092385041722504e-02,4.272018393525246933e-03,-6.850445217619877125e-03,-6.004711445457200890e-03,2.579329766201807293e-03,6.390142199467506228e-04,2.788805318619080875e-03,3.789733741747857137e-03,1.986922171712797967e-03,4.125972783798467422e-03,9.235091747970580398e-03,-2.498291313128236154e-03,2.902400293827301758e-03,4.995183337081128896e-03,-6.568236759734316990e-03,-4.127134140270996392e-03,1.636525511872774638e-03,-7.544951277769003806e-04,3.496280504322183021e-03,5.169349360083325302e-03,1.841997123724210296e-03,-2.181969928833577509e-03,-1.075502605651673226e-03,-4.781822333628135344e-03,-5.134106834082715481e-03,-9.112477019665060282e-03,-6.774262040827115638e-03,-3.683485168902931803e-03,-8.413559688549288040e-04,4.586205194303469572e-04,-4.342619534584899604e-03,-1.465679144717616260e-03,2.320853442671260217e-03,-5.128976438655467353e-03,5.799177040105173744e-05,-3.120321079758407896e-03,5.869152566380917736e-03,3.092641610469499744e-03,-1.418123012639723249e-02,4.431987511280613412e-03,-1.229484083450455532e-02,2.861394202954836707e-03,-9.799567013692802447e-03,7.159005361524225890e-03,1.113807287092796663e-03,-8.897664272664097037e-04,5.694011167767659698e-03,7.619332398214236253e-03,-2.854118709520415183e-03,3.692935345088948357e-03,-3.467479321810996128e-03,-1.009241651791681251e-04,-1.818400761526807723e-03,8.256838089917879417e-03,7.036226920493680002e-03,3.839761182002286635e-03,-3.915667099268777505e-03,9.114900261350375368e-04,5.826765084524717329e-03,-7.477386009188238640e-03,3.125288520341458227e-03,-1.421373891532339570e-03,-6.132080480768171624e-03,-2.469118182832168856e-03,-1.161927809880080849e-03,5.666346199680083288e-03,-3.375626055868438842e-03,-4.584527754220439275e-03,4.851372585348861892e-03,-6.051370405013736450e-03,-1.381388096568978395e-02,-2.235934926180082466e-03,-2.335378766680726650e-03,-1.843502973577218745e-03,9.838489246930511370e-03,7.708655120945972876e-03,1.343403007415010872e-05,-6.295840028754360468e-03,-2.583656874136150330e-03,1.423164509997536806e-03,8.978672589452575334e-03,8.396040025165148191e-03,1.478219029680449522e-03,2.215137093418195324e-03,3.193071690101297305e-03,5.706901445243550480e-04,-3.306288329947005265e-04,1.392513722963743079e-03,5.607835766927188889e-04,1.827729713582300396e-03,-3.290219420709836369e-03,-5.418138442250359892e-03,-2.589955845596005231e-03,4.685641709306598637e-03,-2.699356552503584528e-03,-2.478100332777539907e-03,4.563165250161608545e-03,-1.619026056178780350e-03,-3.375377323232752347e-03,-5.261972152306627556e-03,-6.023297771545405195e-03,-8.026505135357905191e-03,1.759614746071796847e-03,-7.966151716486185794e-03,-7.906332294213495870e-03,-5.517303303040559483e-05,-5.030348253582708241e-03,4.361400862901584040e-04,-6.354548374200128143e-04,9.421926018051177479e-03,6.077275699678189922e-06,-4.801939820981812910e-05,-1.300884034493303160e-03,5.605174591885584475e-04,5.699330981746853798e-03,-1.205154521287125826e-03,-9.234354470054752240e-03,4.101860853409296145e-05,1.674135317536314723e-03,3.843141552007757217e-03,2.055836938394790380e-03,-1.352705682212542804e-03,1.520064571448790475e-04,3.918881367242555737e-04,3.288482105043722108e-03,-7.969950265480927123e-03,8.000830762638804980e-03,1.450942695725653073e-03,1.985788428052119407e-03,-4.193041280177777462e-03,-1.930907932136536947e-03,6.656738996949565154e-03,1.029770517711181381e-02,-4.790570787898619683e-03,3.614035053571888062e-03,-3.228609889994595288e-03,2.769812914269689438e-03,1.551407607438609918e-03,-3.789363603270450541e-03,1.391630084737505461e-03,-1.860332082624729672e-04,-7.615014726197038945e-03,4.000907822421998286e-03,-7.979594215627775627e-03,-8.409679528287822975e-03,5.002737137922559814e-03,5.314184719213900683e-03,3.026679728350567956e-05,2.560569429754454632e-03,3.151996153310027658e-03,-1.168596070247024908e-02,3.191666430853445056e-03,5.773103963275134053e-03,1.128633048918309574e-02,-2.615162214999282135e-03,1.545971770878363030e-03,8.563868940712060854e-03,2.267426261383343258e-03,6.179646930966106733e-03,-1.015457410159995152e-03,-3.107765719477147311e-03,6.247745614725178608e-03,2.784960647172748504e-03,1.732643704479664782e-03,-9.155220739566703389e-03,-1.013887576836911865e-02,-2.540366767847109311e-03,1.960068282359707804e-03,-2.429827638383861687e-03,1.882536874330112192e-03,-9.740480681247234984e-03,-5.129239038570915406e-03,5.655737184432864037e-03,9.304264710436950178e-03,-2.419611673409164700e-03,4.950338831137455174e-03,-6.403174514877936485e-03,4.394986305341614871e-03,-4.323901971723104545e-03,-4.246611640393477792e-03,5.436562297771605152e-04,-6.249443141302945832e-04,-6.631164671781391087e-03,2.440479071346138805e-03,6.118532388689533127e-03,9.000090130875322602e-04,2.922239749385112644e-03,5.820289167738741491e-03,-6.097226803244324214e-03,1.212318432377588150e-03,-8.643894771918578593e-03,1.049137483961609402e-03,-1.256347191405660741e-03,9.286866510319910853e-03,4.338332387313848827e-03 -3.485039566590735806e-03,-8.658718967965915031e-03,-2.579369414413170531e-04,5.954186477167836694e-03,-2.236426523711646224e-03,3.063974448419199467e-03,-5.316380702224359064e-04,4.345799154018808574e-04,-4.439663064827687432e-03,-6.372082512029230046e-03,1.070649682387760961e-02,-4.906862549245557571e-03,-8.959346159238140012e-03,4.951429206763705362e-03,1.377424249650797787e-02,6.024655398009177987e-03,1.235249844370191067e-03,-4.996240994556184686e-03,4.522377019294473305e-03,3.580380815835577241e-03,4.632405850131204178e-03,-2.573526956943806773e-03,-3.645053573927192325e-03,2.159566620569181028e-03,5.683189433713017056e-04,-3.535076133637606900e-03,5.287950678533768697e-03,4.312557081500690594e-03,4.100247243039591497e-03,5.485733848295572257e-04,2.582513439829268748e-03,-3.044328297730172325e-05,7.712766441388342782e-03,1.923961347913784228e-04,-4.546452209472637458e-03,-3.771651382062882630e-03,2.206283358945129172e-04,6.031743706459528030e-03,6.011110699814907476e-03,3.175711669724291548e-03,-1.401111815014785501e-03,-5.229211122621041895e-03,-6.721793171757963967e-03,2.663163989641214105e-03,1.834226173409553859e-03,-9.849988782945639804e-04,4.326871476987743936e-03,-7.556008202623764464e-03,4.220296696449521434e-03,6.821695992753705863e-04,6.105293098258559627e-03,7.521524757645525229e-03,-3.941299215601534560e-03,7.272928902364345256e-04,-3.343892860288673850e-03,2.957869756653862953e-03,-1.514035114167635579e-03,6.540294993636313302e-03,-1.273699799241593579e-03,2.012663347025079284e-03,-7.549023728245649522e-04,3.438097484496235000e-03,3.580640068427124161e-03,4.685797155725032054e-03,3.890912176058479379e-04,7.089903140478744569e-04,-3.214600760310153886e-04,-1.758749727792018915e-03,1.409429684432556224e-03,2.604362181953770016e-03,-8.217694016131480914e-03,1.268565539494754353e-03,4.917734542632888270e-03,-4.566952263402077918e-03,5.439939322270922160e-03,2.334116502106048808e-03,-2.198859903709961521e-03,-4.159019268325685366e-03,-5.513767671041096968e-03,4.609795580516175360e-04,7.574605690670707138e-03,2.073701282090260282e-03,-4.222738909278033262e-03,-1.205282516447622916e-02,-1.168088723439276479e-02,8.597752252506711892e-03,-2.117976166222680298e-03,2.107366354741428346e-03,-5.185751935173453162e-03,9.868188475034731996e-03,1.059083523681637710e-02,5.014990913969356347e-03,5.244225559693312024e-04,9.104761767627318758e-03,7.563257580287659383e-03,5.135244969848855486e-03,9.494935547799831024e-03,3.440914580113712488e-03,-9.818721481698266217e-04,-4.293820164280797574e-03,-1.389795932642375054e-02,2.077014568968960345e-03,-1.082361404100900312e-03,8.614104676793380919e-03,5.162390834979454710e-03,3.441528879709049218e-03,2.706822508847048919e-04,-6.304613623134831639e-03,-8.360066850623348866e-03,5.365528047031649025e-03,-2.158896613612467253e-03,-6.389757144126833981e-03,-8.818838413955618766e-03,-6.369102139138547655e-03,1.648799566677397986e-03,-2.303268780333588353e-03,-5.283844782263230969e-03,-6.938951242355848557e-03,-4.829644394679194833e-03,-1.043550213859629301e-03,9.004370143208848681e-03,1.982056314958027376e-03,-3.403963103931864284e-03,7.263482572411614560e-03,-9.784351952810649983e-03,2.960023545649176878e-03,4.464978610648298943e-03,1.028198068729113505e-02,5.828884575122366936e-04,5.658658867589106897e-03,-6.866871491246444650e-03,-8.495762742973150036e-04,-8.198717018120490248e-04,5.979628864918564574e-03,6.144543409416835860e-04,1.361868644541311185e-03,2.541952949846466013e-04,-1.032189015525572561e-03,-3.408333640139307383e-03,-3.337991065978031420e-03,-6.504859590049674040e-03,4.756684286631790957e-03,-7.946425792297982446e-03,-3.257619805720991454e-03,3.120265183262520715e-03,1.200762092725870915e-02,1.175212822457296627e-03,3.146192031663891907e-03,-2.156169565660812342e-03,-2.042253129176748241e-03,-8.999758683596228298e-03,3.123940319797385151e-03,-6.226240556492831490e-04,6.617035495934504678e-03,-1.348246655288936279e-04,7.286737834297059985e-03,-8.087853627008817145e-03,-3.367095416046436088e-03,-7.890612098149524939e-03,-2.550169945511244953e-03,5.001939338626706054e-03,-2.170760930757150483e-03,-2.219805003868112632e-04,-3.524090400885882771e-03,-4.134006092138228436e-03,1.010868404672646357e-03,2.112078804140910091e-04,5.127371124580519003e-03,7.307261977442884235e-03,-1.275220784532447990e-03,4.633525433915397854e-03,-5.676035354835943895e-04,5.698695271847507328e-03,1.503469312251977158e-04,5.892139581746351192e-04,2.414356026756135382e-03,7.022836315577889524e-03,1.391492137267521818e-03,-6.209281609824300954e-03,-5.710069876955532406e-03,4.614710288368211420e-03,-5.093380774961994749e-04,1.894445251391094600e-03,1.185173183125701321e-03,-2.001368317367552708e-03,3.346068926799918280e-03,1.132667419083986660e-03,-5.609584878813894844e-03,-2.634624554308125782e-03,-4.743144198275454568e-03,8.411661258349396528e-03,-6.859134052570190626e-03,2.277708394306882100e-03,1.255628495449763807e-02,2.556506629179217355e-04,-1.814826552911191727e-03,5.683268307046338876e-03,-1.384141423572171885e-03,5.394283761742153663e-04,2.526016182239663316e-03,1.425868558425746890e-02,-3.489599835054435668e-03,2.395108633755755733e-03,-6.577326713846145916e-03,5.016440358542624024e-03,-6.100355329960214547e-03,-2.939819348402945582e-03,-4.137572408992411938e-03,8.554458992861366934e-03,-2.800551772616538104e-03,-5.839061052223027976e-04,-1.215847023632414536e-02,2.283500631721683584e-03,-3.304499534710767222e-03,-5.452848683964590416e-03,2.768547010606122426e-03,-5.991762910722568843e-03,-4.270957902137610847e-03,-3.943087967387030358e-03,5.591271338590444832e-03,4.696337659025328314e-03,3.484067123903638631e-03,-3.101082582686649921e-04,-3.648112777883823449e-04,1.320756215168873531e-02,-7.519301761840025029e-03,-1.159757379638060438e-04,5.656936895451624304e-04,-8.408090024056269138e-03,5.963911491412128780e-04,-2.076952318362192476e-03,-5.952670940702118978e-05,4.494415105674868030e-03,-1.320699629295006127e-03,8.761371862573859998e-03,-2.236713732661092368e-03,3.518736864082680038e-04,-8.697169020942153647e-03,3.349602358423862611e-05,3.538662785438973120e-03,-8.525118232665068567e-04,3.191647230859418968e-03,-1.597088934459904997e-04,7.531247327530749512e-03,4.397354049326960071e-03,-2.315538541114323719e-03,3.985906958238700191e-03,-6.099282793969510938e-03,3.656151172151441810e-07,-7.694359975846434482e-03,3.192150990096230198e-03,6.286823124749846281e-03,2.747331744676738234e-03,2.211707861408571899e-03,3.194832256043793686e-03,-9.565066348242117324e-04,9.393645965217493551e-03,7.455569211163846390e-03,-8.454603591414935893e-03,9.409290703997454845e-05,6.371363351454033524e-04,-5.856770408391304139e-03,-5.464549284015549763e-03,4.705376139604502267e-03,1.573045326055611676e-03,3.519916937456641498e-03,5.171152013495177238e-03,-9.851937097820128580e-03,-7.889470923088760801e-03,1.194107666843308158e-02,4.452612742066314536e-03,5.655444856923838234e-03,-1.084592204960216008e-03,-3.880635963502220183e-03,-2.375187072181163348e-03,4.220185873352448082e-03,-2.893038356311805268e-03,-2.777437695506313896e-04,1.900458241223496308e-03,3.494364928142376127e-03,4.006862511895136446e-03,3.116271172354388350e-04,4.415282555414579320e-03,3.119292382605366016e-06,2.027315751683820243e-03,-4.504313312958539532e-03,-1.280113999513273404e-03,-1.599020533505144184e-03,-2.448522524636583160e-03,-3.926069286486440386e-03,7.126956148207598367e-03,-6.942999892769490325e-03,1.816401109393824551e-03,7.985037780251882152e-04,-9.893085735371296222e-04,1.051396165617010485e-02,8.879569235582873005e-03,6.144815205707548737e-03,1.000132241183220028e-02,-1.211345763633021183e-02,7.027000965349801008e-04,-4.922331729856803742e-03,5.911375857875502370e-04,1.436453817701685955e-03,-2.702690050973217181e-03,1.074956300596495570e-03,5.064491240773941541e-03,-7.463972086727325116e-03,-5.601949884988723961e-03,1.088469834058397590e-02,-2.680702110592154876e-03,5.162849330990363327e-03,1.025905995031589635e-02,9.529970522982780801e-03,2.787184854741353059e-03,1.258051475821614633e-04,1.788621534428786169e-03,-5.011620041563257660e-04,4.527006953819531256e-03,7.244845508748673155e-03,-7.528046744448716257e-03,2.518647705118891821e-03,-4.214219890937872907e-03,-5.144280194309513998e-03,1.192903295698626324e-03,-1.913941087195603335e-03,3.001007620244559252e-03,-1.772980888845422350e-03,2.229425478039523054e-03,8.822733732083101743e-03,2.304516627488246636e-03,6.267321287678012665e-03,-9.165098877769381480e-03,1.059710881506212808e-03,1.725214956931024309e-03,-6.586849873540652663e-03,1.189838811052048792e-03,-4.917470499242558059e-03,2.530635264894379698e-03,-7.319233291379115790e-03,-4.227746242973805246e-03,5.778799645935553137e-04,2.890539645338300722e-03,9.388648298735884426e-03,7.334628886374505632e-05,-7.603251982138466581e-04,1.804658727096165828e-03,6.012594847539702060e-03,-1.121043105787545126e-03,-2.942759133043160343e-03,-4.229728673211592682e-03,-1.569228380379018416e-03,7.598736833571831384e-03,3.644156687721809146e-03,6.594428326370803048e-03,-1.234898895960750368e-04,3.679986764508433230e-03,-9.183459010886054147e-05,-1.613224915927465074e-03,1.939563300962319353e-03,-2.958432557680170261e-03,-1.855032248035194979e-03,-6.437215962202666106e-03,4.852795068287881126e-04,1.622066707026119207e-03,-4.568493952230172177e-03,-3.469672381189655395e-03,-2.019964157984682155e-03,4.066740881600791613e-03,-6.159912800580405814e-03,-8.515336127958319373e-03,-3.910992014364070117e-03,-4.062156922955136723e-03,2.029783310959714477e-04,-3.236854756716095547e-03,5.820656332671634604e-03,-6.571402449924101073e-03,1.113944436740998273e-02,-2.065956455820554347e-03,-3.185799774637169089e-03,1.010549338219503442e-02,6.215249342181329226e-03,-3.270568007701320223e-03,5.044753303407938441e-03,-2.016587105188481430e-03,-4.402279376155393553e-03,4.536080016607074174e-04,3.392618995566682501e-03,2.465772295343747911e-03,-7.707833741006093163e-03,6.864662914206160968e-03,1.719940344645569342e-03,5.924628797164931647e-03,9.579305306956025466e-04,-3.222165754424178002e-03,3.944741233823862922e-03,7.551340072479124400e-03,-4.060213317959041925e-03,3.322845773138700682e-03,-4.181504971241587864e-04 -2.820708888003421993e-03,-9.015266456859302746e-05,-5.421924113024004059e-03,-2.054667575329158918e-03,-5.252010311487120700e-04,4.866952369909525664e-03,-1.238375063211444001e-03,-4.079836082467065174e-03,-8.164687557060856110e-03,-4.112626178312477065e-03,-8.194093782911502505e-04,2.336770573062447055e-03,-3.557129126075704584e-04,-7.721811061073113013e-03,-1.064604825907319516e-02,-1.049703290957990434e-02,9.306446902484040093e-03,-7.951524139930435039e-03,1.363894367461641888e-03,-5.450559567427978522e-03,-4.075717835487729208e-04,-6.531750726528762728e-03,6.675584177413579519e-03,-5.569844572272691051e-03,-7.968435316443976332e-03,7.740132765858264222e-03,-5.621306054591145365e-03,6.829980547942554429e-03,-3.601098204291480927e-03,7.316049668651263514e-03,3.696295830247672744e-03,-5.418723551459329012e-03,1.595088705022350517e-05,5.073997281177290881e-03,6.651729093490036347e-03,5.423114395831828832e-03,7.145814116678782420e-03,-4.170821029513328317e-03,-2.049755257821681866e-03,3.589521057478646440e-03,-1.733713047600912152e-03,-4.227443855953212822e-03,4.004117962100661038e-03,5.747684267984716315e-03,2.790401712312516342e-03,1.200579914710265660e-03,-1.161625461681243697e-03,7.922560534158993445e-03,-3.848942278800208884e-04,3.289426031093993941e-03,-4.098593034465599846e-03,-1.646131379887156351e-03,-4.912820170341171758e-03,7.491989807177538247e-04,-7.347179013444849090e-04,-4.542474596688941650e-03,1.043898295726706713e-03,-1.150875159389749651e-03,7.701849306854325131e-04,-3.565617773494927489e-03,-1.928240798891655639e-03,-2.319428163723640229e-03,-6.266292086225552598e-05,1.869961280728248219e-03,-4.672360602878777684e-03,-1.492361219255868010e-03,-6.737202751274031128e-04,-1.530986803539516414e-03,-2.291848189446933810e-03,-2.733742896314027716e-03,6.201316617653600261e-03,-2.515620114396952510e-03,1.036407771420862906e-02,-2.468447074807211122e-03,7.615019543012310245e-03,-7.678988338257935857e-03,1.032786442896744183e-03,-6.430995985827451036e-03,3.533874530307631358e-03,4.025096708663251054e-03,4.015168418366503282e-03,1.056335322725042696e-03,1.758665503211657424e-03,3.964483529787382892e-04,1.871994648887373392e-03,5.008298371495109418e-03,-3.095637105796324434e-03,-7.919943126035174736e-04,-1.442433484018458036e-03,-6.555808952556863890e-03,5.442083093581564709e-03,3.617051776056933593e-03,-6.994226329366347653e-03,-1.069260641348011680e-02,3.748090449649023890e-04,8.021328330296575992e-03,-1.333012742147209528e-03,2.694198113352436891e-03,-7.029556181385183088e-03,-2.501309624620076319e-03,4.258210561233071421e-03,-8.761904459168440296e-04,-5.513301378239240241e-03,-2.901651894305603413e-03,2.877175543199485437e-03,-5.290837535722584840e-03,-1.027751878172033769e-04,-1.170548218577529545e-03,-7.730430180863873298e-03,-2.301695530841877269e-03,-7.041801101990659067e-04,2.001926653191492440e-03,-3.716936318817019771e-03,4.168519798209788935e-03,2.939305840453924033e-03,6.780824316728862287e-04,-5.987279110318063577e-03,-3.041737422418740048e-04,3.631089946481997916e-03,-8.001257458118374299e-03,1.471762326497964238e-03,-3.944410094458955171e-03,-6.631664428814019671e-03,-4.229226631964085222e-03,1.319456828811285849e-02,6.122261146242856736e-03,6.106722496525488451e-03,-1.863653715294510960e-03,6.688400564763647034e-03,-1.184491304227215224e-03,-6.060235897864172522e-03,-8.295507487167196048e-03,8.360647234703716879e-04,-5.088268894949070755e-04,3.830449146068611293e-03,7.030895411315333035e-03,8.357090823541640442e-03,-5.351786300481002579e-04,-2.106192114610178014e-04,4.673416442802909472e-03,-5.405474473983228056e-03,1.285935681721186649e-03,5.516008483566012949e-03,1.844776115538231120e-03,-2.952951148933801589e-03,-3.701133452387649590e-03,-3.731278687577152531e-03,3.375797030202741717e-04,1.083193507933186920e-03,1.744548289146442474e-04,-4.902005078963869375e-03,6.348839731684077260e-03,-7.438038677203315241e-03,7.529135160499225189e-03,-4.129076157993816568e-03,5.297631682918686573e-03,3.703881676245244278e-03,-3.883143754026205114e-03,-6.023560159198747786e-03,8.462051239448910300e-03,-8.579973430274737134e-04,2.268422142693242178e-03,-6.028204328555622710e-04,-6.880571970104552501e-03,7.118551412002690516e-03,-2.534541377385234572e-04,7.359007930160496244e-03,9.800653807530416983e-03,-1.032677408972667611e-02,-1.511632860122950406e-04,2.469912702703587915e-03,1.437144257740812208e-03,4.302723663272345518e-03,-3.560398246800825645e-03,6.435619630010245423e-03,7.492550208982269588e-03,1.382696324774424033e-03,1.189823864491435131e-02,6.889514102587990414e-03,-5.336109337741370799e-03,-4.364650309881991028e-03,3.035846273770333691e-03,3.324261397971674074e-03,-5.450766081512243254e-03,1.968379968428714960e-03,-1.928192283255695497e-04,3.427527107160260032e-04,-3.830911235058500328e-04,-9.417853307247054523e-03,7.477975126799274626e-03,8.457041219681171035e-03,5.530397294747551283e-03,5.717916158186254864e-03,8.554568874047994813e-04,1.312801073020222926e-02,-5.279661806845300370e-03,8.118928611171543144e-03,-2.700780160139904223e-03,1.474887712346688393e-03,-1.023058510145841843e-02,1.147305954436785838e-03,1.960472617455275707e-03,3.810605299302910042e-03,5.370536382105960127e-03,-3.989398806394151334e-04,-9.543561339707248761e-04,3.554669834858196781e-03,-4.562213067463265735e-03,-6.260146913247381650e-03,1.018930200282824671e-04,-2.218836617057338024e-03,3.363195829384053002e-03,-6.994963555988328169e-03,3.089638310002525839e-03,-2.603267783837788767e-03,1.233776721303942760e-02,7.905932275214003560e-03,-3.586646049055436988e-03,1.035984490969952621e-03,-1.542962078666365892e-03,2.194577015107160821e-03,5.908844343588161296e-03,-3.256994462685200518e-03,-5.960444229599739900e-03,7.512998978434403709e-03,3.961539412878800626e-03,-5.567484815629932665e-03,-3.405659502207326189e-03,1.176540544925743425e-03,4.101381985293060979e-03,-6.877866179005851649e-03,-9.441222727668846168e-03,7.000496362140672633e-03,-1.826847433685725502e-03,4.916485975255256713e-03,-1.274027111228497045e-02,-4.872419433487894247e-03,5.354725626863600374e-03,-3.084240353393621334e-05,-5.252739015667879513e-04,4.157945024334763068e-03,5.094359834993273002e-03,2.360685983513120725e-03,1.089894927347548684e-03,-5.706285314912943121e-03,6.078218266627273027e-03,-2.727776299778365658e-03,-1.090282173105566396e-02,2.129766162793684733e-03,-1.894992930759004449e-03,5.090029560370900111e-03,2.495393027868292573e-03,8.427531786693762475e-03,1.374136044904736374e-03,-2.602148374357706601e-03,2.679402247438069599e-03,-6.677246647959180631e-03,-6.435093753428973960e-04,3.251749167519426024e-04,1.427436604008207625e-03,-2.157594261838698818e-03,-1.588597016069808214e-03,2.283028029276355959e-03,6.073383018619264891e-03,-1.095167741618973236e-03,-6.398877801717451838e-03,-1.102745875840424952e-03,-3.859122884738349084e-04,-1.758875917471812737e-03,1.986716641695827575e-05,-5.907811604575068619e-03,1.838695102027029728e-04,-1.486883589297041761e-03,5.951338029349580736e-04,-4.423057981194206817e-03,7.227908214312152996e-03,-7.182483254761132252e-03,1.062878323196903875e-03,-3.713676240531461241e-04,6.335603513759441655e-03,1.627716239426354149e-03,-1.274400426890716321e-03,-5.569127712076390145e-03,1.247477572360063949e-03,-4.363425953483584391e-03,6.377092510372978258e-03,-3.133029522719232764e-03,-2.651986894484700125e-03,4.079689589881767622e-03,8.207696762785293698e-03,1.554506715912631681e-03,-2.184714121219906002e-03,4.340588220309256340e-03,7.479119136431693418e-03,5.293892229141715713e-03,-4.407786110906071128e-03,-1.266539128852984218e-02,-6.086920119280138939e-04,1.182195754831765486e-03,-5.168612820581294266e-03,-7.111704186429415250e-03,2.683030433037295135e-03,-4.839837742684077911e-03,-3.923095553820720761e-03,-6.905976380887725091e-04,-7.247790656219051803e-03,1.508901792228653765e-03,-1.269014336039495441e-02,-3.951461797261753006e-03,6.412988632520048166e-03,-8.791284841636408040e-03,-1.731681841880616758e-03,-5.812798390573383628e-03,-1.688200182221855604e-03,-8.885725927393964274e-04,9.609453252776884065e-04,-8.784098909818311568e-03,-3.922488544214155527e-03,4.205521475763622334e-03,1.228723354024579932e-02,-4.025156796149470355e-03,1.164939949141727488e-03,-7.734377447958183313e-03,-7.025800567068600858e-03,5.696856961130122680e-03,-7.091923608862484713e-03,3.882556840584296402e-04,-9.820586808442860471e-03,-2.717792827784587528e-03,-4.318877451283463335e-04,3.893365524070424729e-04,-4.708166447896983998e-03,3.775548090956367970e-03,1.666138320566460523e-02,4.077188122299339745e-03,5.398164991939801349e-04,6.516570790650897359e-03,-8.673799105418864008e-03,-3.963194841804372373e-04,2.313368972913548878e-03,3.732784017646189039e-03,1.891958544561079614e-03,-1.007442355124538386e-03,1.081498539618177060e-02,-3.806663076918364642e-04,-4.703314937805234616e-03,-3.729738358315884579e-03,9.033397236547122469e-03,2.963906301934367683e-04,6.005817102008569877e-04,1.125823595193574736e-02,2.830875662373578787e-03,-7.983029178501063750e-03,5.252025527591146561e-03,-5.035422912268058306e-03,1.929195057018092156e-03,2.552973444312251535e-03,1.085867979410548405e-02,2.646599446672067736e-03,1.091370144931002456e-02,-6.385428514899343065e-03,3.215207400345479081e-04,-3.609376173523022851e-03,3.713682324121232644e-05,8.912858977458713861e-03,-6.023535530055238417e-03,1.273447022151857552e-02,-1.360265926195144059e-03,-2.702456971679675305e-03,6.643085253283945862e-03,3.752543241579124871e-03,5.201618667271833141e-03,2.472762165731554225e-03,1.300807848478796082e-03,-1.637499303458035118e-03,2.199863265058412537e-03,-6.225868432903307673e-03,-5.528656936001269395e-03,-4.306096746180402765e-03,2.101750767577252512e-03,7.038806802727098216e-03,-5.920154845739896907e-03,-7.009704803779374938e-03,-3.818712417034348609e-03,-1.027510145302697360e-02,-7.707528302744816361e-04,-3.544840012272825608e-03,9.079376009694979308e-03,1.048063138347974689e-03,-2.525267151165181811e-03,-1.104727519615279242e-02,-2.123906648380193777e-03,5.106239351522478057e-03,5.011622783032274736e-03,4.210121417403267161e-03,3.838716125120631133e-03,-7.760690247462621134e-04,1.018807230017518446e-02,-2.256578325795211958e-03,-1.103185467485155854e-03 1.707445766024478801e-03,-2.666706184107229920e-03,-4.189664824385152218e-03,-8.819514915215537557e-03,-5.176988457935805150e-03,2.738716819585528331e-03,9.506650223427592136e-04,9.605517168267853475e-04,1.103827878825023277e-02,-4.182773092848841336e-03,1.535967349334695781e-02,5.307433409329287768e-04,-4.181399236903045498e-03,-3.751548422417514111e-03,4.973388896414129151e-03,1.788861983149300566e-03,6.243323278477833950e-03,9.466763697661745216e-04,-5.910079359885306825e-03,4.057203858590132696e-03,-8.056106734358178811e-03,-6.457641153915056123e-03,-8.777991147963349434e-04,7.463867516356278813e-03,6.163342392319946100e-03,5.394431981323585638e-03,-5.216886208145149993e-04,4.278350373477909398e-03,5.732259601993958988e-03,-6.055279047852412354e-03,-3.760206550922428432e-04,1.063619788158427001e-03,-1.104514945809967254e-03,1.919368898807001338e-04,4.779664852171743201e-03,-1.864381101184437155e-03,3.039607155655608896e-04,-4.106399474723083920e-03,1.397439479452369966e-02,-2.506039202783749264e-03,-1.248517149737324479e-02,1.307193216468163928e-04,-4.737772727711102652e-03,1.174648857263621560e-02,-1.828199264337698835e-03,7.094498542247060051e-03,8.025818492634457200e-04,-7.961199893765523363e-04,6.347230186869265867e-04,3.584339217477400426e-03,3.375583443616196871e-03,3.798358700484974113e-04,-2.208622836657108107e-03,2.917054291587133866e-03,2.851276708176319480e-03,-4.965996398153330049e-03,7.238091381944653305e-03,2.224172795411657935e-03,-1.073350039171528693e-02,-1.119125113473497203e-03,-5.685301967478506406e-03,-4.747614046503594205e-03,4.161419900725694954e-03,1.723490546618412257e-03,-2.506769371587114215e-03,-3.526953806035868248e-04,2.521186775338025033e-03,3.630818900924698191e-03,7.283761258552312948e-03,4.690656133696859652e-03,1.214720101596028544e-03,3.683915541994193729e-03,-5.270664471665314869e-03,-4.142428836815359276e-03,-4.338103087606916784e-03,-1.403233275507422342e-02,-1.831342986752570784e-04,1.802336663546073641e-03,-4.527869827374887690e-03,-5.019043871638822910e-04,4.633614682521190048e-03,1.571922384188836896e-03,-9.612477048041769695e-04,-1.257720162130212833e-03,5.606607723267119370e-03,1.090210090036616494e-02,1.467578201350677432e-03,-4.107239191189896214e-04,1.042945397926643009e-02,-1.775899966042058034e-03,3.037021360009705198e-03,-7.807716885706234618e-04,4.016048610009196294e-03,4.119324123056307928e-03,-5.839101500737904453e-03,-1.182966151292990861e-02,1.137334090321952609e-02,-4.060337003884906086e-03,8.653331967914531728e-03,-1.028040896575787774e-02,3.442639538735549603e-03,6.293648107255586134e-03,-3.440510908107134551e-03,1.174780040829829803e-03,-4.277353410315378043e-04,-5.974567465861197564e-04,2.075455973821935772e-03,-8.046430326410860395e-03,-3.050334297154423061e-03,5.799475625556736302e-04,-6.620951500532060163e-03,1.101207897870446709e-03,-1.848946643344273654e-03,-6.069420266177516510e-03,-5.747482757994443192e-03,6.785759700818240611e-04,3.126527415494120919e-03,-2.670354438883568594e-03,3.444868918503683503e-03,-4.146347158567669292e-03,-1.084265900135850488e-04,-8.513119997433748858e-04,-9.839445834978876454e-04,-1.815061102220843147e-04,5.212112734497419131e-03,8.182712395007604722e-03,-9.408557027607215677e-04,-1.362773094729714373e-03,1.865239899802264905e-03,-3.972542188372856289e-03,-1.392895696583304227e-04,1.575278504309407003e-03,-8.754552686846623494e-03,1.115294387849930739e-03,4.186195789670547028e-03,-2.658209630189452190e-03,5.392201616172482073e-03,2.385427727582400004e-03,2.286416564404342155e-03,-7.330341272406404709e-04,6.877670856674568964e-04,-2.051184139873627604e-03,1.390384227408951847e-03,1.172548167502963793e-03,-7.811025390868936262e-03,1.384334729569544620e-03,-4.318437574092803301e-03,5.640813883733987628e-03,-3.504363715468315809e-04,-1.113172089187306141e-03,-6.136697622133895601e-03,-1.233307613486895509e-03,2.753060561971726848e-03,-8.433605531749807649e-03,7.586054061578991099e-03,3.541169631071145688e-03,-2.012557171524392667e-03,-3.518402491664720882e-04,8.943163737784349740e-03,6.776555144961125864e-04,-5.413564395558392080e-04,-9.073506066393953115e-04,-5.868527579290421306e-03,8.259423430823149836e-03,1.053983680825743181e-02,-4.302255577995063758e-03,-7.598134934388078507e-04,-7.919255575249157381e-04,9.615859721819844386e-03,-1.594770518205433770e-03,1.767794082417953588e-03,-8.342976876184604249e-03,-1.726970782909487221e-03,-2.544558097521223018e-03,7.235305900338827615e-04,3.941084605240645009e-03,2.061778049499898326e-03,1.922352944062312701e-03,9.758760081890125978e-04,1.805630948787237920e-03,-7.856474765321327589e-03,-4.494659587914283036e-03,6.794555170869278661e-03,-1.183474599178266796e-04,6.907081126148977360e-03,-7.280158799148457593e-03,2.880566448899955171e-03,-3.098884130352289284e-04,6.450546892655931522e-03,1.112334509371680014e-03,3.742400807695286848e-03,-6.880071576708094230e-03,4.005479267191448811e-03,4.774164720185972502e-03,-6.385420196050033137e-03,-4.342293343167582566e-04,-1.084356684650934939e-02,3.689988246149640549e-03,-1.430255217670611855e-03,1.079936932028422412e-02,5.178443450626782872e-03,-7.327408646659863342e-04,-1.572619288567658844e-03,8.523782632236664644e-03,1.167045622846123436e-03,5.844129980306410690e-03,-8.952873307139374778e-03,5.781521247040720748e-03,1.566366130221669845e-03,2.545329876694781262e-03,6.415239583024290836e-03,-1.168808293688748494e-02,1.076816453958773971e-03,2.114625974337967264e-03,-4.039482964720989347e-03,-2.367290547177332716e-03,2.561293297888568923e-03,-6.586964483909388095e-03,-7.367010701170233211e-03,8.620234841290673466e-03,1.227076498802905568e-03,2.842532489675529336e-03,7.338886961214957649e-03,1.438613643729693727e-03,3.673531163839009912e-03,-5.777870183378689458e-04,-6.397020700037377736e-05,-4.323292679903776789e-03,1.133368634501723780e-03,1.554154068507287816e-03,-9.003077218783149538e-03,4.293705668869316795e-03,-1.784442686418379618e-03,2.576254624034628703e-03,2.605197175852734352e-03,2.242442543303432931e-03,-1.894148260084157999e-03,-5.255146886758340838e-03,1.445307404225962006e-03,-7.937840147622149574e-04,-1.095086654716515701e-03,3.679684259515816295e-03,-4.389172979356765168e-03,4.147854817258932701e-03,-8.151445033624537401e-03,7.652703963939581217e-03,9.411673378615649857e-03,1.154761019829938931e-03,6.905514057056425306e-03,-3.774717333483027567e-03,-4.039454805162502257e-03,-4.373688037677632423e-03,-1.003973770203917155e-02,2.964818448411667313e-03,3.825862517354543073e-03,6.486475211605510468e-04,5.364171773840171673e-03,-1.459024240062560280e-03,-6.562346124360475194e-03,-3.059779548864606752e-04,5.477823287428319581e-03,-4.710652648585092819e-03,-8.401571725570719865e-03,3.285226086405761484e-03,-5.246249759770550865e-03,8.711795180968665009e-03,-1.756448600879111088e-03,4.766705719916344841e-03,-3.280644140388776220e-03,-3.078538013675548995e-03,-7.795602299436698093e-03,2.357124366981020597e-03,1.735031205653480429e-02,3.409133652485150934e-03,3.509926684632296186e-03,-2.037690178198204444e-03,9.800873105630686800e-04,-7.003882156944786497e-03,-4.761903634730134394e-03,-9.469676502889389641e-03,1.435855931604097455e-03,-2.441144642718300149e-03,-5.482645855682566870e-03,-5.614504603049194111e-03,-1.536591983455331926e-02,-7.753056119846707351e-04,-3.168794979413829937e-03,-1.118552291278091665e-02,-3.950512098396837327e-03,-3.616613801563839901e-03,4.610561495361711719e-03,-1.241797324255077212e-03,-5.525759996407103407e-03,4.225968030316159391e-03,-3.286421173574431611e-03,-1.411167887518595399e-02,-1.077435139349200866e-03,2.459058961910179624e-03,-2.239150536257000041e-04,-7.541497677066705090e-03,-1.705619845617087653e-03,-5.563177581818456140e-03,2.186856151043245228e-03,-1.104385463622378598e-02,-7.343602369881519799e-03,-4.715174551296415564e-03,-1.661226898764814547e-03,-2.717306573147699935e-03,4.513123077780135592e-03,-2.096547375097684938e-03,-9.199098190651662163e-03,1.382561148171492470e-03,-2.469068993431081799e-03,2.970818867080967968e-04,-1.274886691747432153e-03,-2.303094900542825962e-03,-1.144622155296211481e-02,-7.610737873576990491e-03,-1.308603009264509278e-03,-2.038524540897388403e-03,-1.568668523566104376e-03,8.713946021565627947e-03,-9.218056266870825526e-06,8.420495250714972284e-04,-3.358498401359823974e-03,1.049606665466664005e-05,3.437504230218230844e-03,-4.531552861706765294e-03,8.700199525736248413e-03,1.243016801226083309e-03,-5.923501197785114095e-03,6.941619386741438788e-03,-1.813290135992274904e-03,-1.873908008288633969e-03,-6.261605643859722431e-03,1.372995698598424236e-03,-7.149621929842751508e-03,3.860842744741582386e-03,6.079328168037327271e-03,-6.725035494987817962e-03,6.537437417674722391e-04,-2.632151759862331585e-03,1.909973057566237290e-03,2.246710076776878978e-03,-1.329071629016663481e-02,-5.292839990875482931e-03,-1.395794913473345040e-03,3.040015594432364521e-03,2.792641607015688369e-03,2.765117482750543629e-03,-1.272018808706372461e-02,2.618448464315874160e-03,-6.607687637967823860e-04,2.348592272427301245e-03,-9.555988683491325672e-03,-1.304540776335372807e-03,5.432174299136100989e-03,-8.547916377746737557e-03,3.004299992730135715e-03,2.944022945988849236e-04,1.415700403400486180e-03,-2.592383085080999291e-03,8.921010262041060776e-03,1.413098635418299576e-03,-4.043769603196271586e-03,2.224571443006420016e-03,-5.307013295611330735e-03,-7.572025705287996865e-03,-2.306554253265201068e-03,-6.975579497493425776e-03,6.795637131839201220e-04,-6.028212059714342652e-03,-6.117687027025635327e-03,-1.217331303528007384e-02,-6.821189616816442612e-03,1.676283157654761624e-03,7.424730622830829491e-03,2.319559115974030843e-03,-1.129404222778409086e-03,-5.493224945899142543e-03,5.534816700966049656e-03,-6.176126465719667356e-03,-4.574333871143925097e-03,5.086357307383874114e-03,-4.390977570277102351e-03,-6.699288218130392512e-03,3.510446218953622993e-03,-7.202397077972881763e-03,6.588057307706719941e-03,-2.944725601643567465e-03,4.701849927194996261e-03,-2.663072157997690287e-03,-2.984388752990081457e-03,-1.367845456977339301e-02,-6.588955914817475751e-03,2.800627792125408647e-03,9.481790216581128389e-03,5.547799605138063750e-03,-2.488940695569871156e-04,-7.129034057889388834e-03 -2.520771811483217203e-03,-1.113787128125045493e-03,-1.143125182410540883e-02,7.887519669945979722e-03,-5.962642774994816354e-03,4.435470140379983196e-03,3.614815938472361843e-03,1.435663930640542467e-05,4.647359668198134189e-03,7.597964040399187921e-03,-5.552430283005594346e-03,-6.110595002014121424e-03,2.373376539866964893e-03,5.190923625496532239e-03,5.246893552852140673e-03,4.921733627662801258e-03,6.598511771831065470e-04,-5.189470502544115977e-04,6.232343789472022475e-04,9.031312258083744370e-04,-1.183068092116609793e-02,2.731328058667109137e-04,1.297065029324570573e-03,4.565310045549169737e-04,8.037077198357299446e-03,-2.266926292703968105e-03,-5.467114910958586040e-04,7.607089602936428037e-04,3.901427028796771238e-03,7.702105089780655889e-03,7.714280569644352313e-04,3.138059569517924908e-03,2.010103167640936352e-03,9.579090062688935597e-04,-3.434032748966094929e-03,6.354959591028223626e-04,-4.357441597440780533e-04,-2.031659325090757161e-03,-7.173142515307076930e-03,4.541813920792605805e-03,2.957261428747886244e-03,6.951509511875154056e-03,4.459015866283180915e-04,-9.109514785111157489e-03,-5.995424852484990434e-04,-4.780342401165426323e-03,-6.884347233129369838e-03,9.744889851981122439e-03,-5.058000155927835743e-03,-3.661670009860040417e-03,-5.076837142166610792e-03,8.610548213756419669e-03,2.915401088031771733e-03,7.496615058837402708e-03,8.930806670307616171e-03,5.843123798308432360e-04,-3.718740082842000513e-04,-2.144077068066462498e-03,-4.220266599391215775e-03,-6.326262267354865708e-03,1.133332211402295064e-02,-3.659087343728049599e-03,1.261601535534776462e-03,2.994247533971090662e-03,-3.596492236247403763e-03,-1.956744440641770746e-03,5.330171867412579999e-03,1.834362869737474503e-03,1.358934780917753432e-03,4.386694543625221238e-03,-4.728415138811317177e-03,-5.395915967250561156e-03,-1.590101583334575753e-04,-1.929289867391354970e-03,5.654728526562703521e-03,-5.964124816828859010e-04,1.376613906854119143e-04,1.893012902016699955e-03,-7.081525435952857120e-03,6.158368204249601609e-03,5.670903096311563467e-03,5.753924793592038109e-03,4.199700169877218850e-03,-1.637422566123890094e-03,4.268294057001390321e-03,-6.642581689005531281e-03,-2.165034117660320431e-03,-7.085848534110468586e-04,4.893666048815267875e-04,-9.853467307031855971e-03,4.641641229625031832e-03,7.728509573989656746e-03,-2.287560138718915809e-04,2.332048305004252517e-03,3.259720946942938364e-03,3.179619195478639634e-03,2.252481264944785917e-03,-4.678024282857348899e-03,-9.113352416853285308e-04,7.712058090385819423e-03,-2.099686688891104231e-03,3.333117530259125478e-04,-1.364307111406681122e-02,-8.777735248387745048e-05,4.542248889291361502e-03,-2.943411714937495325e-04,-6.497522028913475713e-04,-2.823722613561123213e-03,1.969376827892141673e-03,4.592573281804497240e-03,-2.905725623082971564e-04,8.214719773025127084e-03,2.076933142968404467e-03,3.748233401438108570e-03,-3.149387686304375221e-04,3.288161845307473706e-03,-2.505458203010930236e-04,-1.908497081432435332e-03,-3.998352833698395198e-03,1.019421549115351149e-02,-5.627827486238168829e-03,7.252815233574333616e-03,1.039296635118509859e-04,-4.277638630708186472e-03,-5.204226558980843548e-03,-2.347245385932381512e-03,5.639390031183142207e-03,5.927471003627859210e-04,6.104032707479251953e-03,1.962319719289990112e-03,1.297449932753655182e-04,7.819764582487625723e-03,3.476888257965178759e-03,4.332152245238058731e-03,5.158682169963100389e-03,-1.481731869015652644e-03,-2.779052130661544256e-03,-9.706698002223010336e-04,-5.293680806740630289e-03,-7.168225844006938414e-03,4.142991461833002778e-03,7.447274406152069046e-04,3.418242002303995691e-03,3.936064213815796201e-03,2.304708990972052774e-04,-2.528745324949130315e-03,-1.484168337276897329e-02,1.024038218192695020e-02,1.119353185629024536e-03,2.905799531754383290e-03,-6.548985361533861231e-03,7.199382126209371777e-03,-8.390881119010420650e-03,4.471332926378224736e-03,-4.882935937643690695e-03,-3.791986120550843733e-04,3.208163680806147880e-04,3.426798250168115387e-03,-4.324964601387628650e-03,-1.167015377506214853e-03,-3.130121143507029642e-03,2.647403132243610589e-03,3.867573327398653887e-05,8.753645622284462677e-03,6.453297749197874078e-03,1.432358931532832754e-03,1.035804318796761103e-02,4.776912189258983830e-03,-3.710456852701177460e-03,-2.699182957055791879e-03,7.026117697686355863e-03,-3.481376371516947850e-03,-3.195176998883870675e-03,4.619420301787799561e-03,-1.002906318285928019e-03,2.860355522309900814e-03,-2.666248982668951387e-03,-1.807199643049232679e-04,-3.700873301061095031e-04,3.767825981108802961e-03,-9.382763642697005985e-04,1.132396727808326550e-02,-6.078470403124868436e-03,-1.769553037177558873e-03,-2.534845840633681568e-03,-2.519051688755875266e-03,9.532486242949752067e-03,8.487384568148041361e-03,1.280726608318232202e-03,-2.999527798395026403e-03,3.076998537928889426e-03,7.384784876148959946e-03,-3.543361894655689889e-03,7.148146163386747028e-03,-2.368188168366719241e-03,-2.063165864986575906e-03,-5.676725801599853856e-03,5.457959700894176973e-03,-2.867099431436188222e-04,-8.113586291411623019e-03,8.056992126252901162e-03,-3.001812070894665542e-03,-2.172756104363854010e-03,-4.111845083384566869e-03,-2.851523566145226524e-03,-3.430608205444049714e-03,-1.166999973660589652e-04,4.001376475657096241e-03,-1.073119069906978973e-02,-3.930509973469241414e-03,2.115373185467418152e-03,-3.726109121452983539e-03,-2.067034363365812845e-03,-2.135676410099746840e-04,2.677651396611593470e-03,9.165579619111836537e-03,-6.943370177306729119e-03,-9.659998817207421460e-04,-5.223540792445686368e-04,1.886822388783237451e-04,1.773838291401252975e-04,9.321174507205378171e-03,2.899512965668248413e-03,2.597395469763417140e-03,2.238551803034958491e-03,-3.864588047500533852e-03,-2.188620230783458378e-03,-1.088667112501513594e-02,1.551615751904129132e-03,-2.533140785563262574e-03,-7.302362484238975916e-03,-2.318575898645286026e-03,-4.478598345588611414e-04,3.333187830919903238e-03,4.681917474440182303e-03,-1.233475030449095221e-03,-4.089652320476630520e-03,4.783856047625303183e-04,2.942502208763850442e-03,9.542817117464515641e-04,2.635041009098926007e-04,-2.283019815380656504e-03,4.444181964393843168e-04,-4.314020999439792002e-04,-2.704651246750493926e-03,-5.673321995055825330e-03,5.477418022388779589e-03,1.249268782743036266e-02,1.795458228181490406e-03,-7.902049129059038378e-04,5.048071472788406844e-03,-6.639119791723934426e-03,-2.849847842839001687e-03,-1.001780319034549938e-03,-9.158622379216286788e-03,4.349665444934236978e-03,2.580071039540623025e-04,7.415925961685019240e-03,-9.519256394935057125e-03,-3.335340245001419766e-03,-7.316246510960169999e-04,4.863974999788460429e-03,2.282371281477521465e-03,-6.332492367478434296e-03,4.409797767582079192e-04,4.976569579214283438e-04,-6.968254431571748939e-03,3.963080131898756575e-03,4.240904296904299091e-03,-5.482259675128255995e-03,2.504398892084066467e-03,-3.850560497763509931e-03,3.384857401591970412e-03,4.744009470395897134e-03,6.641433335190768776e-03,2.250196285642285746e-03,-7.193933036038617848e-03,7.339165749389448858e-03,-6.376217593127590441e-03,-1.048271529967730042e-02,1.767018013506559343e-03,3.709700024215381023e-04,8.348209472139695927e-03,-1.243543808252217924e-03,-4.706721039896811197e-03,8.943480773310353432e-03,5.730840433958272161e-03,4.947921825271170936e-03,5.448470979010830700e-04,-5.421872939766886161e-03,-5.069080315439435103e-03,-5.875188276188683705e-05,-2.407182572388794246e-03,4.668660037767627391e-03,6.870819595553290509e-04,-3.044277698270713025e-04,7.287474290063781536e-03,-2.760288846596513010e-03,-5.340408475961967369e-03,-5.160901753899672581e-03,-7.210242809406835346e-03,5.260497728885677772e-03,9.985869980061051765e-05,-4.343826754602278517e-03,-3.848939623422195817e-03,2.026401355075435791e-03,-2.431490628486353799e-03,-4.063960975394559534e-03,6.212425890623843869e-05,2.560577195475052564e-04,1.567540653008173222e-03,2.424223874537864091e-03,-2.375588629154889303e-03,-2.347654003050867952e-03,4.412758442943170122e-03,4.059956504622951046e-03,6.071093830436056354e-03,-3.382620106471435212e-03,-4.029152721697444248e-03,-6.567770661928310459e-04,4.054793736625652949e-03,-5.324091159810945273e-03,9.160583837612279803e-03,4.258765664644005057e-03,2.550658166032011408e-03,3.466115295980575525e-04,-2.033585045517326533e-03,1.193143085572289730e-03,7.973931529213868982e-03,-3.267054244155774730e-03,-9.995537277746564873e-05,-1.157030900034043740e-04,-4.195570850320999405e-03,5.775660372485906004e-03,-2.972176313832557582e-03,1.616166719266575597e-03,1.436053010048254615e-03,-5.492384817392391948e-03,-2.549994103830145200e-04,-3.948538625703534692e-03,-5.287135726895112776e-03,-5.167865213550611875e-03,-7.024234780296813568e-04,8.319278368489365152e-03,-8.141899327528231284e-03,2.838017820278980995e-03,-1.432791700355338926e-03,8.228704379544192829e-03,5.369638507048534598e-03,1.246001016222703355e-03,-1.639334927318389715e-03,-7.247413937118255683e-03,9.141995550653007549e-03,2.807969863402392120e-03,-4.751378267717341897e-03,-1.256689635338192716e-03,-8.306347844203457523e-04,-3.224546115459977959e-03,7.665946015944705497e-03,3.258017727652640604e-03,3.827245361836654775e-03,-1.462116162972580810e-03,2.016218103927684073e-04,4.694299257587076536e-03,3.518051578812799677e-03,-4.502991647324016897e-04,8.151402061002437777e-03,-7.716123470127187259e-03,-2.580036554109494361e-03,4.754736566832041232e-03,8.979830711731669537e-03,1.702866128190678270e-03,-9.343045088645019619e-03,1.340576464462260586e-02,-2.912376962617582807e-03,1.416608427551783013e-03,2.749203735529994960e-03,-5.027231698372726897e-03,5.485941943422917300e-03,-2.077299032293224851e-03,1.961792353215435162e-03,5.555229869413888175e-03,5.607308894919528323e-03,6.074595457839903484e-03,1.645816118822091955e-03,6.901529340431892742e-03,3.538472810722083622e-03,4.228229770594127044e-04,-7.813075283657151045e-05,-1.102447679150901985e-02,5.386391218247797361e-03,3.590060076274750357e-03,1.393270937893592923e-03,-4.384585294649535545e-03,3.295971275051356661e-03,-2.402598759490609005e-03,1.844503690066097087e-03,4.524830800682078449e-03,6.400693152115638279e-04,-3.528108908945809963e-03 -1.038579239102738368e-02,-5.008855316336642419e-04,2.736254449301353964e-03,-6.093547523485010053e-03,-8.528104427376927144e-03,-1.643566478623700519e-03,7.378076792826608957e-03,-6.323047663712760141e-03,6.181113702773727465e-03,-1.189819951117919829e-03,1.119094951139597803e-03,6.942031617556166344e-03,-4.819488763834979018e-03,2.120105897731057194e-03,2.194395958362316181e-03,8.287945121646265664e-03,2.388822326047813395e-03,2.752514517026103096e-03,-1.323854542644532090e-02,7.123033927775522176e-03,2.481317658338308605e-05,-1.096193781516804477e-03,-9.652220380402899702e-03,5.148302724627933612e-03,4.625712077005398291e-03,-4.723741068290550349e-03,-3.358535122453410425e-03,6.063455476150355836e-03,-4.363743937321128488e-03,3.888422714219700194e-03,8.233583788676288387e-03,-2.962078231662061863e-03,-9.178793085884165426e-03,-6.265551146518629974e-03,-5.523127778792880083e-03,-2.469250512632259757e-04,-6.033875668117522420e-03,6.147109629919889247e-03,2.858294235170965106e-03,6.088078465356024788e-03,-7.191385454738089857e-03,-3.850367237190761762e-03,-4.357726552389443349e-03,4.093126275293419887e-03,7.475484827795676745e-03,-3.534942750944536274e-03,-2.347393915301588131e-03,2.871309390168018828e-03,1.571201800191419570e-03,-2.658572400569195974e-03,-4.458387195278065707e-03,3.597776004031886816e-03,4.263876228827144510e-04,-2.827023828972967650e-03,5.829555404836336482e-03,2.591691383571593458e-03,3.558406644821493579e-03,-1.007846481935969693e-02,-2.554870277666359407e-03,5.563144382874299224e-04,1.263917679216772271e-03,2.181309241660213307e-03,7.947030456161583226e-03,3.107819283784172323e-03,-3.886700281473924341e-03,-2.124844487875851996e-03,-5.112011898508878917e-03,-5.191602335691849340e-03,2.955173986391444393e-03,1.023213405154688258e-03,-4.441049729400082515e-04,-7.384982031398791558e-03,-6.602236047028885169e-03,-1.747839161486967408e-03,5.644072388293694918e-03,-2.742561975032804626e-03,-2.123537282361629298e-03,6.190041734157170246e-03,4.183299577552479498e-03,-3.645436997292474026e-03,1.371309047991302419e-04,-4.071878875278356000e-03,-3.871552684745558325e-03,1.988024676177046340e-03,3.346210630579865737e-03,9.501461967580246068e-03,2.879082465362005697e-03,1.493349327127194408e-04,-4.936236913784933233e-03,-2.016089138911114371e-03,8.412068321114216790e-03,4.000740946317216454e-03,-3.304525710120138036e-03,-4.803764412271708389e-03,-2.595812411701038571e-03,4.859384552416808056e-03,-2.749002668990168773e-03,-5.949898985076493806e-03,2.786587662855885181e-04,-3.130034008719567024e-03,6.959528123933609450e-03,1.045580294279446591e-02,-7.193699434368380254e-03,-2.853521456768803258e-03,-3.179747511191010224e-03,2.471170358346976622e-03,1.285213158285672050e-02,-4.485726478853274517e-03,-1.537119904493309229e-03,-1.285571900789604476e-03,-4.031404669847242386e-04,1.163957110784117743e-03,1.120353729269780996e-02,-3.358772348051105618e-03,3.202728183283096771e-03,6.846760265278294686e-04,-1.370589658292721948e-03,-4.661165911439169030e-03,1.509011320312244817e-03,-1.299438287834876055e-02,4.400556809708090419e-03,1.172917351275836254e-02,2.553333542341133452e-04,-1.329585803375585773e-03,-1.522862731066682875e-04,-1.181103972371081931e-02,3.299722798903794840e-03,-8.123232325938534171e-03,9.227175692806473435e-04,-6.935351798556814613e-03,3.439424950471232922e-03,1.231705125743991908e-02,2.601582621576894409e-03,-9.707940499114210003e-03,-4.237665685923398644e-03,1.239667367498768595e-02,-8.513928597259676863e-04,1.947081991215331414e-03,1.883114811183889126e-03,6.253614809795827838e-03,4.030462907951437405e-03,1.855468024500597172e-03,7.849568582246611078e-04,-6.172202116545550885e-03,-3.757268098135816987e-03,-2.951446279448082660e-03,-1.273610056681126662e-05,-8.005738340954525120e-03,-1.072008933772989215e-03,-3.546442517109455426e-03,2.532891747254772713e-04,5.264630233187271753e-03,4.780350279432587363e-03,5.503298782904670998e-03,3.049712545962931544e-03,-2.746893960439208558e-04,-1.144237265358128731e-02,-2.043936710904171571e-03,3.705484128040908813e-04,3.407630736620748807e-03,1.383389322262595412e-04,7.319137448966314512e-03,-2.822908194352154003e-03,-4.368058474071078584e-03,-8.019468519521185485e-04,4.535868507829399383e-04,-2.782171555122808781e-03,-2.604856813449085880e-03,-3.910903789356371361e-03,2.574721366965721566e-03,-8.387184277456018666e-04,-3.073622120642307588e-03,1.889792928015893679e-03,-3.558334964957842166e-03,-4.737811561447665895e-03,-8.960690062269540540e-03,5.611939982474164340e-03,-6.994708676761400047e-03,5.235172977032329783e-03,2.483065158490063533e-03,3.848053353735406568e-03,-1.565962073398244033e-03,-7.311377311244566558e-03,6.935343084094006127e-03,-5.267166565597860206e-04,-3.600473721802425423e-03,-2.874674939334793781e-03,1.091262167800838507e-03,-1.523092539502691198e-03,2.182001074587170460e-03,-2.626888424549916022e-03,2.827501212635736769e-03,3.116228691767721740e-04,-4.614819459931042075e-03,-5.796172894790682130e-03,-1.500395287851825129e-03,-8.129728621099631923e-04,-1.018759357237447247e-03,-3.194812241827286548e-05,1.505040946574695386e-03,6.355603859496627116e-03,2.235854140637487126e-03,-3.852543260583717304e-03,3.120589584758014148e-03,-6.695576445976811862e-03,-2.109224859491430105e-03,-9.950816380106586312e-03,7.492778571867299243e-04,5.098844738809745966e-03,2.023952963950199479e-03,-3.250796417844447651e-03,-5.459094370434368348e-03,6.290079499427784354e-03,-3.850368695129783333e-04,-4.006565127706370778e-03,5.872830657084410211e-03,2.432672954214126207e-04,1.170693187949252726e-04,-4.181753233645668644e-03,1.445942699730655423e-02,-3.611644919347285158e-03,1.764079304613017747e-03,3.268621907054741413e-03,3.717742568397007855e-03,-4.196842270278021425e-03,1.199990017731389661e-03,-1.933202573771858985e-03,3.929101113173872999e-03,-1.504841076271682364e-03,-6.128039708235589154e-03,6.355858598780428706e-03,-9.335489055410938625e-03,1.521135414618226753e-03,4.586439804522553058e-03,6.283512035575034016e-03,-6.815223760029675941e-03,4.430485986402790677e-03,3.361889284069575871e-03,8.343539792724081984e-04,4.232992879638997584e-03,2.136502113669456852e-03,1.217183068539640480e-03,-2.042171908398814247e-03,7.300154926616133104e-04,-1.157551406634037792e-02,5.187856603470928848e-03,2.021291362381590594e-03,2.915151691831949308e-03,-1.305194375663608700e-03,1.091013373213029110e-02,-2.696728878618745049e-04,-1.087776097794272180e-03,-1.715144077292287631e-03,3.947661582717755004e-03,-6.442655709788519883e-03,-1.964152476131665170e-03,8.123199535720902753e-03,-1.658105696703370121e-03,5.596754080911285276e-03,1.128685937366861580e-03,3.806035996742147733e-03,3.412391348519857801e-03,-4.210447093172769821e-03,3.201896244381809142e-03,-1.565897364653243270e-03,4.345977516940597099e-03,5.680750233870172396e-03,-1.902316692107064834e-03,-4.611429783463811736e-04,8.883491804399824832e-03,-1.535111856174725979e-03,2.991197538237540601e-03,7.057332279947242833e-04,-7.395649890238048876e-03,3.458373203028645374e-03,-6.223311858336956666e-03,1.360704987000001797e-05,3.269787439874360588e-03,4.591846072549924523e-03,-4.036415367923100635e-03,9.161158799680820669e-03,-3.019500824383804759e-03,-5.338641287672976458e-03,2.016001437791381935e-04,6.159634500608650656e-03,1.266544843193970465e-03,-1.450919193633050733e-03,4.804535755819310102e-04,3.825416155352992509e-03,-1.666525314157168243e-03,7.666101627301881265e-03,-5.697466450391799365e-03,2.491671321153860873e-03,-1.702505557301616521e-03,-4.379477073088003207e-03,-1.264562343447967439e-02,9.741014983278586253e-03,-1.589789273578565273e-03,-2.404405409850356428e-03,4.366974755026013745e-04,-2.550090256328608486e-04,8.335050644476300549e-03,-1.965372760383887751e-03,6.231317364222825617e-03,-7.865851744519116590e-03,1.250785986020445766e-03,-5.873177239419004195e-04,7.540996992045848578e-03,3.031082049349267341e-03,3.420059599052210214e-03,3.396729186586558945e-03,6.963798201196267322e-03,-8.016987111025533977e-03,7.548119692161326354e-03,3.151110433053983233e-03,-3.134395025223803404e-03,6.137369174324919891e-04,-1.163818420611736687e-03,1.621106019807699297e-03,-4.669863591911633215e-03,1.014341960944103570e-03,-5.462295750539755169e-03,1.873812881108792138e-03,6.882214216314106597e-03,-1.527135533772688153e-03,-7.025205252658044304e-03,7.514564929276332586e-03,-2.107914640694402274e-03,-6.139468466282018777e-04,-6.639479089589637789e-04,-3.857780782817848628e-03,6.085725352483416749e-03,5.309184504593534269e-03,8.488453880811186111e-03,-9.738996109398201850e-04,-5.598086868869949460e-04,-1.539886065956517017e-03,-5.250384357900773555e-03,2.421548366672742446e-03,-1.841065279422977831e-03,7.976818103332380732e-04,-7.282164463056167887e-03,-5.872773749394142630e-03,6.831301342383854468e-03,-5.797007964219241197e-04,6.056734038695780337e-03,-7.534735970510351858e-04,4.717435581494714217e-03,-6.489739455138929594e-03,-2.505793144344399797e-03,2.081724162629352767e-03,-4.168771167835114665e-03,3.953885720389014799e-03,6.768670553518256472e-03,5.857379496997236999e-03,-2.510956427463073570e-03,5.970593652598323205e-03,-8.080916844321858793e-04,2.263588470227675346e-03,-1.603641739261304783e-03,-6.136642357667362185e-03,-1.775341590721565138e-03,7.672783799612171475e-03,-1.232630769566477741e-02,3.369918045623979379e-03,3.030513112912152588e-03,2.017667068867007643e-04,9.733681419051811584e-03,5.502599159383958667e-03,1.689019320206540690e-04,2.166116472497062358e-03,-1.205425468257444307e-03,-3.082644615928238803e-03,-5.645923037550438194e-03,7.329328271356340581e-03,2.628953568486691161e-03,3.382697704107410878e-03,-1.088887340555078249e-02,5.098273607392212435e-03,8.082614966949836291e-03,-8.778735792940438321e-03,6.386544993520468227e-03,-4.202645249180109134e-04,1.716909150272412105e-03,4.866421319565596322e-03,4.559373600681323286e-04,5.623602958854551429e-03,4.897446753980616400e-03,-1.130878377272050497e-03,1.553960468451017797e-02,-3.656061288521605859e-03,2.187780998800837890e-03,-3.002688473522432087e-03,-1.009433898223523890e-02,-5.156554565295443961e-03,8.462870716814207758e-03,-6.081405218646337811e-03,-9.109291588551084867e-03,8.060620753878240327e-03,-8.487596951641029680e-04 -8.369500315861254673e-04,1.958432503924306331e-03,7.698074414274449877e-03,1.037688587327607014e-02,2.342568554958800869e-03,5.866890304221287523e-03,-7.605830836485053643e-05,2.248858412403143166e-03,-2.415355080145912148e-03,-7.842898723634739860e-03,1.752869502415075904e-03,1.076154403574314548e-03,-3.921909018926265510e-04,9.219527423255904214e-03,5.124833062476719624e-03,4.195191711369910918e-03,-1.059553469183148293e-02,1.054922509758000893e-02,-4.629334182487057289e-03,-2.978367402671836515e-04,-7.175685062712294782e-03,1.139382462319360727e-03,2.092833018997991221e-03,1.544307666002950460e-03,-5.743714648430224036e-03,-1.094675240661022696e-02,-6.705237121436812171e-03,6.817347946375498010e-05,-2.889135852831637482e-03,7.833142594726252156e-03,8.593002929522622368e-05,-1.879776789280132891e-03,-4.548848943987446672e-03,-5.652552001242300457e-03,-4.251194284358306673e-03,6.661937883039306327e-04,6.498473001954203562e-03,-9.189023275879516436e-03,2.214867408278254658e-03,-1.158603347623906894e-03,-2.412837499378458431e-03,1.532890673276269232e-03,-3.738405235975877060e-03,5.013374185541362785e-03,9.324007643681552545e-04,5.996005891480197386e-03,-2.914336780549158304e-03,-2.026979222374079154e-03,-1.119994148745350263e-02,7.099891718460799202e-03,2.294237200522102581e-03,3.233907706629176454e-03,-3.184495622542871714e-03,-7.982257896356018470e-03,-4.382277060958185942e-03,-4.327863187208564125e-03,3.180243169221597804e-03,2.251580220663865640e-03,2.358228646887633711e-03,2.188813895349891840e-03,-7.241532977477384151e-04,2.990007971705524459e-03,-1.018880410782242642e-03,5.896772189877204015e-03,-4.750006191942999015e-03,4.183178371088784771e-03,3.730894171596618649e-03,3.017780387216472716e-03,-5.222997953072461286e-03,8.103176936164268413e-04,5.098015314899941204e-03,-2.483509196540685700e-03,1.051500671106317744e-02,3.965447755186821480e-03,-1.719123297494251374e-03,-9.174845660600075559e-04,-5.118559699406391265e-03,3.059107248593763342e-03,1.278984010001735575e-02,-2.510566121091109028e-03,4.892508878290425904e-03,-4.165561728285916733e-03,7.338285707791226617e-03,5.555241931501080088e-03,-2.720476966689101481e-04,-6.533062627187631762e-05,-2.746164420798674708e-03,-7.401058184503122054e-04,-1.199844698161473509e-02,3.818676892405238263e-03,-6.179383196821026589e-03,-3.829214466553165514e-03,6.055285399651111344e-03,-7.229041996967638939e-03,2.200379023230906485e-03,-1.559405569548208978e-03,-2.851814217408176790e-03,-5.710808932790932546e-03,-5.902206986505071327e-03,6.175993632774329743e-03,1.529289749170659402e-03,2.479584391018120181e-03,8.321051917387092822e-03,-5.008833283814134599e-04,4.563400658517752922e-03,4.781850513947734013e-03,-3.627894477126436448e-03,-7.322136422911483533e-03,-3.020844643619332703e-03,-4.428381167438375128e-03,-2.171257835721725551e-03,2.366465788733254812e-03,6.503876161000602838e-04,1.122875536630241287e-03,3.263451758558894630e-03,-3.838469313365282243e-03,1.441844152964275559e-02,8.122824544026061022e-03,3.398646540448872679e-03,6.569769856486857608e-03,1.217125697837414568e-02,-3.376228908175898415e-03,-3.275458949233954649e-03,-3.644680325927198827e-03,-2.604994836233918499e-04,-2.707660835347367421e-03,3.178199113217388197e-03,3.559329700802752600e-03,-2.781546999106898534e-03,-2.161432445623857716e-03,-4.722384484574859882e-03,2.111172475269160968e-03,2.477225334088639565e-03,-4.048009596216797269e-03,-2.184270679356507122e-03,-8.136082946403147900e-03,-9.305576926060761577e-04,-2.192993669293432594e-03,-6.103753859308188798e-03,6.189439591662923221e-03,3.009823430054498969e-03,2.125236748320875283e-03,5.873774206669466272e-03,-2.675994369120989966e-03,1.363568268794018018e-02,2.042072147609795725e-03,-1.386112863797047453e-03,-1.690025158039371034e-03,2.187631623442515705e-03,8.831704634771965793e-04,7.322983746978970788e-03,-5.733864542844020577e-03,3.542156034662253834e-03,9.633085863300896914e-04,5.127624731990349308e-03,8.100310294382301844e-04,-7.612456682605952749e-03,5.214126723250269921e-03,-5.358627318034606818e-03,-8.397281712592550434e-03,-5.162616489592322493e-03,2.022035715484767505e-03,-2.091094184688326203e-03,-4.906196144168718451e-03,-4.060453675348451100e-03,-2.708616264705802044e-03,-6.652127901076466780e-03,-3.544401215765095219e-04,-7.352001044631791687e-03,-6.683573885904839883e-03,-2.488625712112815715e-03,3.954058905978339970e-03,6.383646686327170534e-05,-8.040342453128747433e-04,2.825183126107072475e-03,5.822612651984174562e-03,-3.374296280590766920e-03,-1.323580570179347079e-03,-1.242401159194718441e-02,5.711643304982912811e-03,4.240778264025295502e-04,2.518924659719706458e-03,-7.689527819267617977e-03,6.425075691176974739e-04,6.770508971165398703e-03,-8.672484570718048610e-03,1.326831723665805552e-03,-2.084627826614958261e-03,-5.118142822150897712e-03,3.356770580997431828e-03,-7.142830979849494263e-03,-4.470106133106339918e-03,-4.001822529689893007e-03,7.796798017535823237e-03,-2.055516672661428582e-03,8.248666441014417244e-04,-1.128733241753133824e-03,-6.223443113812001000e-03,-3.924331614493339546e-03,4.873016393861310516e-04,3.692596286436351772e-04,5.373269976548866907e-03,-8.566583254300000339e-03,7.070286271106132614e-03,-3.068404925553813983e-03,2.685164290217427648e-03,7.505054202559298046e-03,-2.382949293884557406e-03,-1.210904325361893651e-02,-6.511438176827535722e-03,3.072182422168139981e-03,2.506487903137768949e-03,-4.302860818386509967e-03,-1.220773867677830797e-02,3.112996040417943117e-03,3.992671153190043704e-03,-9.621180413971802594e-04,2.032139016062430113e-03,-5.286180238784279374e-03,2.263642077415619226e-03,-2.935151950442558416e-03,2.557934802464521037e-03,-3.060595853465126852e-03,9.952084201619003107e-05,-4.552378296342567826e-03,-7.753399663662206582e-04,-6.065623816473762753e-03,3.771946173388706584e-03,9.936515858596543186e-04,6.091588570009319822e-05,-6.686191642234324932e-03,5.812446986600354991e-03,2.390571536110401053e-03,-1.736528675335238456e-03,-1.247088247895499456e-02,4.649208557015166790e-03,-2.223896528218752972e-03,-9.712366372095455197e-03,-3.087807440674807981e-03,7.512621246378413907e-03,5.145033209493410711e-03,5.186530934865664876e-03,2.454042938880448242e-03,-2.438200913683468037e-03,-3.372284670930734252e-05,-1.063034251062693752e-02,2.938052753934120020e-03,4.034899419709084972e-03,-4.826159280666196018e-03,-5.282065014958755439e-03,-1.638486742122890246e-03,-2.495173936082323903e-03,-1.392591473564858649e-03,5.663436168418917692e-03,1.829833725565581902e-03,-5.869324579752678572e-03,4.462352722622618152e-03,-1.933635030809656213e-03,5.450658807143686675e-03,1.152332627561940400e-03,-2.738539752297237864e-03,-5.621690328299797871e-05,-6.774362283678916647e-04,-4.707036222261192036e-04,3.564065424809349531e-03,-6.286690306330242582e-03,-1.619373632078187006e-03,-1.738549944547251652e-03,-3.258598750818897209e-04,2.427372019536906071e-03,3.320357962863064796e-03,1.233781551236524726e-03,5.210817848808228965e-03,3.621311822994138438e-03,4.621335233447664698e-03,8.616628184852711137e-04,4.646552401163967470e-03,-5.290304484019955206e-03,-8.260958418093825723e-03,3.028114619035011772e-03,3.989577333861546275e-03,-2.134414043912356666e-03,-8.314510151393800924e-03,8.518384720408436403e-03,-6.169524821918355439e-03,3.064107039989327329e-03,-8.079392913352839017e-03,-3.004474176146154120e-03,1.520365874867420346e-03,-2.998580590912628113e-03,-8.919257985513386706e-04,-4.702085311640533637e-03,5.089539145100158146e-03,-6.503600284121324564e-03,8.827309095522423502e-03,-1.209170046900366266e-03,7.889537087806946028e-03,7.949038865617871069e-03,4.767183364890817572e-03,5.776558049004378026e-03,-1.129713061785185893e-02,2.552770655182495275e-03,2.911811776133415609e-03,-2.264015668368232504e-05,-6.689884196553261405e-04,9.576658392876820098e-04,6.512698411596009804e-03,-1.195939309332216122e-03,1.000517901978754953e-02,-2.885352465505127816e-03,7.340648510205027497e-04,-1.113451450218060995e-04,-5.867851345792863410e-04,-3.811916473853293671e-03,-1.773849035509694175e-04,-1.922197876114778436e-03,-1.140159540898817237e-03,3.543392400157471204e-03,-9.638649225911487001e-04,1.058994852069472352e-02,2.313398061477514849e-03,-5.756275649709070331e-03,3.967751183463188482e-03,-4.472920448176248738e-03,6.245883819723221410e-03,-3.754225514942341729e-03,-6.347262669400242918e-04,6.684260853821619120e-04,-2.910125267060795305e-03,7.310610191302576583e-03,8.132466705850193919e-03,6.743388683524944444e-03,3.849331828586292133e-03,-3.590929895014392109e-03,-3.795139508619684889e-03,-4.228098594005956739e-04,-4.684865741841333885e-03,2.027219189957290592e-03,-1.599109035238297557e-03,-3.969941188842951961e-03,-7.489897844954593840e-03,-3.760014789329367601e-04,2.900185310827264103e-03,2.999472354223980432e-03,1.003796143132747176e-02,2.420737091211464225e-03,7.675452976419182630e-03,-2.602650139630982313e-04,-1.826358962819294492e-03,2.918953739966656698e-03,1.952740746205035597e-03,1.091697196452384505e-03,-5.040844441712258621e-04,7.928550668264505719e-03,6.756071226614863805e-04,7.238691294799352687e-04,-5.929540541376335759e-03,6.367219906786998271e-03,8.858577785788820588e-03,-2.589125564237519835e-03,-2.559228520335591218e-03,-3.900100737112139216e-03,-1.477144503830138530e-03,-2.565902858717091471e-03,2.132647518084452177e-03,2.121560109583999626e-03,6.901246405822310422e-03,2.625752300878993889e-03,1.464029437045948848e-03,1.011658960456061603e-03,-2.699761175896221374e-03,-2.866963077449794124e-03,8.912235716872140764e-03,-7.727479804487736616e-03,-1.127628549061493414e-03,-1.114175367719704920e-02,9.579368505270045636e-04,4.574206482067065885e-03,-6.944119254664620454e-03,-2.992613787639806112e-03,-4.218191713989647394e-03,-7.059301396629323599e-03,-3.060630953669257011e-04,-1.049158358926540087e-03,-5.655747353196945680e-03,5.968015073359191243e-03,1.955030128961234882e-03,1.062525539655498818e-03,6.484318494358505720e-03,-4.862145560549889500e-03,1.206520886423264639e-03,-7.863586866061178168e-03,4.655628383435532654e-03,1.118395366582268773e-03,5.095062967589990797e-03,-5.428563229764310624e-04,-9.874490151384942432e-04,-2.133662366397426867e-03,6.729793895450824140e-03,-2.261020346593237054e-03 -4.361779799760405994e-03,5.505127963766404857e-04,-3.162793410888181667e-03,-8.035177005978106149e-03,-1.224631200712221046e-03,2.072427207351121466e-03,2.648038125488706360e-03,-4.034088662587237591e-03,-3.113268208479092695e-03,1.534367551093108019e-03,-1.546061969683271446e-03,-4.491803658780412108e-03,7.527763432851037706e-05,-1.428073657311463261e-03,1.261809551471953627e-03,1.677684063734257705e-03,-3.817876188322343392e-03,3.624010911997029970e-03,-2.478640449695099788e-04,-2.261024647461623745e-03,-3.596774613904428226e-03,3.836037965257820112e-04,-8.051014292052105156e-03,-5.258814173486860838e-03,5.360934657480204320e-03,7.503339175874957322e-03,-1.025459230950655351e-03,5.365608167985661849e-03,9.973905148692656202e-04,1.767386613451196872e-03,-3.328744884582641745e-03,-7.568368179381564954e-03,3.744180568767094350e-03,-3.764492478833891083e-04,-5.644285434558275402e-03,7.714117312464454854e-03,-5.617101434673923803e-03,4.884222913048387460e-03,2.668986475513366891e-03,-1.807680942865450089e-03,1.886943268157423650e-03,5.748650859263829474e-03,5.554753911241136315e-03,7.800930304780403363e-03,-2.255198355516726226e-03,9.400082997052244945e-04,1.725623042692101938e-03,-1.065756967004152919e-03,6.048788836416958518e-03,1.520721743871148108e-03,-5.006657838215198067e-03,-4.394763053439748601e-03,-2.815811290655187947e-03,3.497658463259850029e-03,9.989269730678717457e-04,4.075411239402696589e-03,3.000395713594275569e-03,8.254390285354417517e-03,2.642099801643837866e-03,-4.442017812816280789e-03,2.687565568075464106e-03,-8.498379954558378871e-03,6.377972902402059309e-03,1.002820176959144573e-03,-3.115617894778481556e-03,-7.081719853903334534e-03,-2.684756543019660869e-03,2.890907381250996770e-03,-8.319638211137559552e-03,-1.721090498783531611e-03,7.227702385424443141e-03,1.265712311117327647e-03,8.165399771839642815e-03,-2.561443445908038411e-03,1.671572830645144106e-03,4.126736108627951025e-03,-1.537909900123852688e-03,-1.982693756663792386e-04,-3.051711909335425021e-03,-1.447377316382718977e-03,6.860238567958998682e-04,4.486766323708240439e-03,-5.471945038171835245e-03,-1.741610336827973705e-03,-3.647582486091208425e-03,-3.981191492031674328e-03,-4.093876859252203876e-03,-4.738499110905553045e-04,-9.792191413435053990e-03,-4.040970664073638592e-03,3.421288698171306986e-03,6.529059427247356669e-03,6.540204343092924713e-04,-2.523676640334583839e-03,9.603337531891679990e-03,-1.349203758013046636e-03,6.336704301437541022e-03,1.264435582648731730e-03,4.795397846882208854e-04,-1.762834008776245154e-03,1.263582826048285403e-03,-4.854737382577791957e-03,1.906760905139121606e-03,2.085104441724143108e-04,3.319786267001198412e-03,2.497192815092622812e-03,5.499042379768150513e-04,-1.064136823409233927e-02,9.301183900189679069e-03,3.388307868488833925e-03,-8.475243012558945563e-03,2.572050019913513751e-03,-1.472798646112671601e-03,-3.589246222485754576e-03,1.268614458452109461e-02,-3.406547121294567710e-04,-8.866342309706391756e-03,7.588270140431446894e-04,5.942075159358113298e-03,-3.621469423238545763e-03,-6.270709786327915286e-03,9.336995972315248124e-03,2.116206670419430291e-03,7.423810919041198050e-04,1.235118235048036862e-04,6.653169390870232458e-03,5.033793099787531485e-03,5.848832856873374328e-03,6.688789638699207561e-04,5.075248503017585136e-03,5.050877494408376710e-03,2.002749202428104509e-03,-1.241704056290076274e-03,7.112287377898649995e-03,3.583237252546496703e-03,-4.698388089462197589e-03,-8.101038963586975478e-03,3.013499149931339258e-03,-2.913976653920929537e-03,-2.296767852969833126e-03,-1.455209878771473184e-03,2.440075684930159347e-03,-1.940140865121395665e-03,-3.832516972251526904e-03,-2.751257021107206269e-03,2.021257962225855246e-04,-5.860909085709768360e-03,2.266759351141352675e-03,-3.530417958731205640e-03,1.086747460479652039e-02,6.593372053468818904e-03,-3.994598244994343635e-03,-1.839286317356715649e-03,4.578380167845367762e-03,-6.231042387152245163e-03,-4.977860347375138170e-04,2.852119386642484270e-03,-4.249104838634627096e-03,1.384469539118066096e-03,4.590864818991454571e-03,-9.983590865011622312e-04,-1.022583932127228354e-02,2.086311416165789745e-03,-1.497861191634147207e-03,1.018904839292963634e-03,-1.182395605635379885e-03,-5.454806852157009431e-03,-1.467003498245778893e-02,-3.092470012555598863e-03,-6.280810040283974459e-03,5.282864393784307208e-03,2.277590927236041710e-04,3.441339913200981704e-03,-2.852523733529429076e-03,8.723584873246569178e-04,-2.475049835898176630e-03,-5.711396814039298303e-04,-4.008607768686113862e-03,-8.504402902052842243e-04,-2.555888308166316333e-03,1.286142969707156905e-03,4.789670324875274464e-03,3.930583586302156644e-03,-2.035137881898191044e-04,2.044086068879096951e-03,5.738868610976118490e-04,8.068893936559998550e-03,-8.213167798076752638e-03,6.970345035969094209e-03,1.299621283171112579e-03,6.513786259001771994e-03,-5.070455342523461605e-03,-3.463520194849066883e-03,2.524594612579973522e-03,5.045242885374686889e-03,-2.489439337543176978e-03,1.123025345566466653e-03,-9.756858319584801691e-05,2.730579702303468758e-03,5.028555790615704506e-03,-4.198416397356925962e-03,7.427135671839427480e-03,2.792027461116327724e-03,1.062951166569336362e-03,-2.921351337737538006e-03,-4.166119956959876171e-03,-1.772440023466417521e-03,7.032427427469956083e-03,2.492570788857350369e-04,-1.091877089348992826e-03,2.912599909046940678e-03,7.153499427924603100e-04,2.746884801746943792e-03,-7.780801892294722652e-04,-8.552183450326140621e-03,-8.159291829318708264e-03,-9.785592066347874318e-03,-1.785757675945867868e-03,3.463413050657926460e-03,-7.810741303009209255e-03,2.360554479750539159e-03,3.692120950422003809e-03,9.683431631728215902e-03,-5.303622002336693363e-03,4.116191665426236629e-03,-1.656667023020908219e-03,3.115707757432526533e-03,-1.573463911241787527e-03,5.728663569365827213e-03,3.460557653836226978e-04,-3.940551619659615756e-03,-1.702929081353182761e-04,4.895105831131284745e-03,-7.642268592840076313e-03,1.362507409333166102e-03,-5.596832164214146123e-03,1.109222275789909775e-03,-4.287073393505346983e-03,-4.521046032858667962e-03,5.305585220770897689e-04,2.444780193226898154e-03,-7.774268138488383932e-03,-7.685428118396078973e-04,-1.564434545453220402e-02,1.740520446461518809e-04,-3.199415464202862939e-03,-9.540947189571215639e-03,-9.047226347913580116e-03,5.785769480417982807e-03,-6.788679845956851165e-03,6.546865673930755622e-03,3.433897964436166770e-03,4.375269817031906747e-03,1.236099598993293386e-03,1.848880244898059959e-03,4.713501580693186394e-03,-8.542050713118505469e-04,9.444081891642135129e-03,3.710503933063449399e-03,-1.277138451120121438e-03,-1.747119097573384154e-03,-6.593781092376301001e-03,-5.270415379292785196e-03,-5.053346386879847829e-03,8.502338210849646680e-03,1.436579954875382217e-02,1.875817797425262650e-03,2.159536494494988454e-03,-5.521768287198497438e-03,5.684938380102391026e-04,-2.461629810177783702e-03,-7.625258610031461319e-03,-6.758438366888492969e-03,-5.215890655942290004e-03,-4.766746619926722460e-03,1.007833770969674553e-02,-7.225146538032978412e-03,1.766899514839868381e-03,-5.692404098154753289e-04,1.394395417322191223e-03,8.075024526446880971e-04,1.264000736657220833e-03,-3.687280689908501666e-03,3.565216316636343405e-03,7.308094209144534378e-03,-5.838855705604926484e-03,-4.918859288968020771e-03,3.625223160806625294e-03,-1.939138012756227417e-04,-4.086659529152229277e-03,4.674697047174461613e-03,2.486411905079974512e-03,-4.865868413513198265e-03,-3.328374369650883517e-04,-1.082151450392465457e-02,7.597572474364553416e-04,-7.434884172155921574e-03,2.426843913300515219e-03,-8.762659567241627701e-04,5.013356959592574734e-03,5.674606396306645928e-03,-9.111481224643128152e-03,-9.456948195253604694e-03,4.127989563691384679e-03,-1.654156958247906314e-03,-4.334809632464447365e-03,-5.998190441957683590e-03,-2.794900262881126925e-03,-3.685485347958358370e-03,-3.988238107582925060e-03,2.050802928647700845e-03,5.971187890566324007e-03,8.852464054510344148e-04,2.428027938165521968e-03,5.557818922706605526e-03,3.285149488121013022e-03,1.608921577090176996e-03,-4.638310375666061282e-03,-2.433308002265945590e-03,4.394010668889473600e-03,-1.691234778346635201e-03,-8.019003263953434005e-03,-2.504817316265484182e-03,-6.800170173485735672e-03,1.125183530308232656e-02,6.239785322654935185e-03,7.405585279007881154e-03,-6.728507839441856195e-03,-6.665270494044204033e-03,8.994465788495345504e-03,8.191361400552377159e-03,-2.940262401707219482e-03,-1.583533687789849629e-03,-4.243397881810901523e-03,-1.133826902255291851e-03,2.666920545389995243e-04,-3.187414069027279077e-03,1.373857821097152323e-03,4.703865664286189206e-03,6.489588964044056517e-03,-2.063857624707752646e-03,-1.620476700614622559e-03,4.486387499531723812e-03,-1.051179246952425005e-02,5.575914907105263690e-03,6.955710285016045852e-03,6.460198722376117586e-03,6.329066611606318053e-03,-7.481591130685722769e-03,-8.749769290533726540e-03,5.082567615056117656e-03,3.165737457840962690e-04,-6.006385408694887330e-03,7.901618748391886667e-04,4.640950809607693843e-03,3.958803680749392075e-03,-6.586536095651505636e-04,-6.371765326478348417e-03,-1.831231414187239849e-03,1.284038945681476853e-03,2.121496711531932824e-03,4.176865741495528915e-04,-8.428748606286002101e-03,2.360870898030238002e-03,-5.099211715088673366e-03,5.775349272549932210e-03,-8.997258410517395416e-03,-1.926073797522794917e-03,1.475647325723811187e-02,-2.977480121407359223e-03,-9.989390309019460529e-04,1.496989049973963338e-03,-4.056037656753897218e-04,-7.720097341226997379e-04,-3.271947555749152737e-03,-6.814902624274206400e-03,-5.187849688606668667e-03,9.083224848396046977e-03,2.282337888907135567e-03,-2.116730782528851006e-03,5.011637945047125835e-03,1.630899049618963776e-03,1.770377549439758292e-03,4.441193748119767921e-03,-1.149285433390023195e-03,-9.908042625098333923e-04,4.214618748211646990e-03,-4.365934450588049645e-03,3.729101978696786231e-03,-1.780056059999500235e-04,2.238797208324364105e-03,3.426994231856051332e-03,1.245913687443790229e-03,9.375799921248632917e-04,1.945606490186733983e-03,1.084800965781745289e-03,-3.545525266459138500e-03,-4.091817158526077879e-03,-3.571135678537554724e-03,2.108477139296746974e-03 3.530223324875353700e-04,1.480015211869205663e-03,-2.567341354637622981e-03,4.383353818016575906e-03,7.855879098539979635e-03,-2.378504465471405378e-03,9.129995879575558346e-03,-7.999975626836887399e-03,-1.356877211643569029e-03,1.080745576452209894e-02,-1.345193401469743904e-03,1.163797708807357044e-03,2.204704943256534117e-03,3.028089169578079311e-03,-8.971955668252880323e-04,9.818099267964392265e-04,-3.302080377539090474e-03,-2.307613680865127064e-06,7.121589636418652371e-03,3.384654519054121694e-03,-6.853616154457379053e-03,9.816709129762317063e-03,-6.844158470594814345e-03,-6.143892350076422468e-03,1.276356241471635414e-03,7.070680512388691566e-03,-1.035662693162592339e-03,-1.451265288823852181e-03,-2.099255291458285416e-03,-7.165742478460299548e-05,-4.681135410304757515e-03,1.723710665083630139e-03,4.583895405588769324e-03,2.102397026411001197e-03,8.702114989451533236e-03,6.935369359399925875e-03,-1.342186635826106813e-03,-6.870416447482092896e-03,-1.338618857758891965e-02,2.995495346208664963e-03,-8.483453066301228571e-03,-8.021684733790214418e-04,-4.738920593950239450e-03,1.522184350552026853e-03,4.774181260969120183e-03,1.035725027321826586e-02,1.938624001682513623e-03,1.227874363116179379e-03,-5.519202801376906636e-03,4.255469793857337944e-03,-1.899008872811180958e-03,-4.922988497718342338e-04,3.944618831272485304e-03,-5.733299037851615799e-03,7.523440973648333127e-03,-2.456943230299488308e-03,-9.499521132902618084e-03,2.490054815411054984e-04,7.064177907461744246e-03,-3.769971293729040005e-04,5.362151236609835787e-03,-5.859990011186018075e-03,3.714947183768048215e-03,-4.404069904546750042e-03,1.464585795971642349e-03,2.277365934594168494e-03,-2.348738026720877635e-03,-3.066002337532490202e-03,-3.216932141732594015e-03,-7.364452827496807685e-03,3.981826164387525235e-03,6.765716713705361915e-03,2.930360432544098538e-03,-8.270331718999870615e-03,-1.005884123927716818e-02,-8.127532082929790451e-04,-4.669569243134792831e-03,-1.692464326665793235e-03,-8.045216602223358854e-03,6.804528912033981115e-03,1.552885386323318330e-03,-2.891637279122565192e-03,2.452929661781824721e-04,-3.415816982966527523e-03,5.312207861426281215e-03,-6.096782861856707772e-03,-1.033392059210133489e-03,1.046030489364667762e-02,-2.204700633399221937e-03,8.530837199259379278e-03,-1.543865320227975381e-03,4.769466202822817552e-03,5.629885791265240233e-03,-5.136829233620706855e-03,-3.584313603882213328e-03,4.183660751202680425e-03,6.104004121058762087e-04,-2.606671810080601198e-03,-5.697626122468233377e-03,-3.366713350257133971e-03,-5.086990581044502011e-03,3.700750633486866309e-03,7.745721507441202672e-04,8.922857075684005254e-03,6.898673560547757648e-03,8.106337832626405240e-03,1.275331161950634724e-05,3.915149699270269006e-03,-4.683503882732207094e-03,1.564335773874928363e-03,1.391792097311906999e-03,5.206740055203589596e-03,-1.053918333348438305e-02,3.582225489383808730e-03,-2.151341261991637343e-03,-2.473962937147587953e-03,4.965573175042030385e-03,4.156088833153584808e-03,6.547850281673254190e-03,-3.479257713631746934e-03,4.710896338987567299e-04,-5.769592625341447498e-04,3.370874622187275925e-03,6.989965997857605466e-03,1.372975525096204819e-03,4.959819270129818547e-03,3.481656641774221947e-03,-7.739899040762216256e-03,-4.664731201414086884e-03,-3.325506562154528431e-03,3.709657960870461641e-03,-1.624950302029692711e-03,-1.487331578280755686e-03,9.307735585218048641e-03,-5.812167401808694634e-04,-1.543121147710873101e-04,3.734367153930086714e-03,3.226770651890410475e-03,3.715847370037678871e-03,-2.960652746704297168e-03,1.277347476487392455e-02,-6.450585011643625299e-03,-4.488436747197073473e-03,-1.067676264792005031e-02,8.537766815158623996e-04,2.509301722222826055e-03,5.859824784824571239e-03,-3.711768088353399559e-06,-1.095416893577213537e-02,-4.200769259209097793e-03,-4.106183090402241477e-03,-7.378035266548560803e-03,-3.568577076653688465e-03,-1.346731602769619257e-02,9.118154974800877339e-05,4.833626569576459503e-03,2.808155748286213053e-03,3.282886374400213574e-03,-1.990536630397092706e-04,1.063416190767947200e-03,-3.160537886368211080e-04,-5.421211519233572586e-04,7.979685699258609816e-04,-6.757082656145797610e-03,1.938455265258992892e-03,-5.637834053796161524e-03,-5.199571316990797380e-03,2.989796307438609153e-03,1.651194676100338835e-03,2.158476117059809646e-03,-3.998116173101124056e-03,2.147246399007434955e-03,-1.913760236617750594e-03,1.027315056378305452e-03,1.245451512204676960e-02,1.037108279198798062e-02,1.906917046892941513e-03,3.751698292468126465e-03,-1.053643569560894049e-03,3.189623724733799386e-03,4.965997396421796203e-03,1.136744576731644855e-02,4.081688594528065650e-03,-3.225775475090628900e-03,-5.496543610125083053e-04,-9.379685177996107889e-03,4.089265137667831042e-03,-7.330040702321706340e-03,-1.038786841914021881e-02,-4.594976650386539511e-03,-4.839247937837945837e-03,-5.746018637172785326e-03,-9.198028383771371039e-03,9.717943420422907579e-05,2.171405158120295019e-03,-3.031273600962014654e-03,5.217983246383299870e-03,-2.246285864664989990e-03,6.019900812259548588e-03,2.703126581572657034e-04,4.123461222370132864e-03,-4.982270070746353668e-03,1.324184961886489247e-03,9.582336733557503478e-04,-8.702218765261867175e-03,-3.156316887676642851e-03,1.747177359210823561e-03,3.173188827517321991e-03,3.407129866261636702e-03,-1.122346880893766979e-03,-4.516855133024086713e-03,3.209243711364532462e-03,5.630546033633026214e-03,4.222193634417283417e-03,-7.751607436217491584e-03,-4.391040006179871419e-03,-8.961527536164833399e-03,5.051164203204127484e-03,9.761517421124456858e-04,1.315078553980517095e-02,-5.870054547643321734e-03,-6.302104007565834456e-03,-7.580532990207115157e-03,-5.972863582425682466e-03,2.993582671799341257e-03,8.426003388070865425e-04,1.389839697519906769e-03,-6.178540098664703087e-04,-4.085363041447661984e-03,-5.937505428359697579e-03,4.404817628816107798e-03,1.883301638289663080e-03,-2.640469415042820314e-03,7.532578357541267269e-03,1.307675869585132530e-03,3.068509419286306276e-03,-6.434023663017017676e-03,-3.752098359381588442e-03,-6.901327018509186007e-03,3.376675856240847744e-03,-8.920534250188070710e-03,-5.522626345526355889e-03,4.086778367303839365e-03,-1.858600784694276970e-03,5.904975350084552300e-03,-3.224013455181500964e-03,-7.268065821216204182e-03,1.832730018438344681e-03,5.295268444688707409e-03,-7.875309337166177398e-03,1.663340309352278950e-03,-7.791391888093749524e-03,1.347645271162089707e-04,4.014681590601377678e-03,-3.983126993122210364e-03,-1.219067435392633949e-03,-1.086420878623163902e-02,-5.282044128022161944e-03,6.736129044666394703e-04,-8.324294211968796126e-03,1.088193980055123964e-03,-6.976690193750640333e-03,-1.851762495065475446e-03,1.637344167879459895e-03,-1.317709097048964766e-02,-4.709844729366234432e-03,-3.375037395221103242e-03,6.362703298584932755e-03,-5.922004525557459341e-03,-1.609280854230692170e-02,-1.363848386281634470e-03,-4.549228464741912267e-03,-6.093756055526864442e-04,5.107584279262041525e-03,-3.825427193952288859e-03,-1.537027946056899758e-03,3.477568456558033892e-03,8.290488418842980418e-04,3.494455873990089979e-03,-2.863934268995762134e-03,-1.298306754514057400e-04,1.497953664454291340e-04,-1.471843908285236325e-03,-2.131696064687049860e-04,3.438952675714964345e-03,-8.532200917405235313e-03,1.853104968854394163e-05,-5.238757147071432425e-03,-8.565315313890323884e-03,3.676493599220749271e-03,-5.173050967981985583e-03,7.367594447732606078e-03,5.675455596419611655e-04,-2.649083998994491290e-03,-2.788015421585674381e-03,-1.594748485767337161e-03,6.775381707283036634e-04,1.235376809069346727e-03,-3.018719044008048510e-03,-8.404941898909482415e-03,-4.639174808244412619e-03,-2.977751104322599006e-03,7.816639102626900260e-04,-1.708635634212115619e-04,3.472594031382776874e-03,-1.413632764654613452e-03,1.699595889908672502e-03,-2.304866678482794286e-03,1.138288479916890403e-02,1.077600571135508591e-02,4.310050665452294996e-03,7.334894635010255105e-03,-3.622156382880798321e-03,2.788027598573115332e-03,6.331428511611825009e-03,-2.182624839049776902e-03,-4.386936157359719139e-03,8.327682066537686481e-04,-8.558900102654006317e-03,1.298730635472527140e-04,3.402844847958973250e-04,-8.958510832298387586e-03,-1.315242435479991201e-04,3.238303151703037088e-05,-4.342889891264763332e-03,-2.850106592373132137e-03,-7.768570387189758803e-04,-2.185690500418500590e-03,3.253142698609626845e-03,-7.397850191406072501e-03,9.440065038447979697e-04,-5.796864347434884344e-04,-4.207614797041800761e-03,-1.324908763260528553e-02,-6.417154651406623521e-03,-3.947063180073432515e-03,-1.127553353582102938e-03,1.495437405675985711e-03,-1.122597150409218635e-03,3.853444028977143524e-04,-8.828163117610280872e-03,4.595585955877709572e-03,6.519838616879742622e-03,3.611811265960751180e-03,-5.760275136943963578e-03,1.986622842366446082e-03,1.839150290503053804e-03,7.451685501300535364e-03,5.324296658804216105e-04,2.741604043484779495e-03,8.056652522242937348e-03,7.903237465532500158e-03,4.329576104133351158e-03,3.301262161170744405e-03,-4.040121684336079808e-03,1.589647098432576998e-03,-1.597479906264592624e-03,-3.986553263238906686e-03,3.165177906227344389e-03,2.915964913998300278e-03,6.788680698968889557e-04,-2.343713376006163144e-03,-3.467595032672433301e-03,-6.245872031988238850e-03,6.509286117539727278e-04,1.690342769521825769e-04,7.719884772563020275e-03,-7.561196473895809810e-03,2.764056874611224134e-03,4.785165899780104669e-03,6.016656211371422536e-03,1.583381886690223006e-04,-4.442022745422016762e-04,-2.387534173687787427e-03,-9.610341121708609663e-04,-6.850662501821877005e-03,-1.545705551244318503e-03,-1.407148198553822031e-03,2.619280689700661472e-03,4.472449535647159309e-03,-1.089849649601772226e-02,4.573184744288747924e-03,-1.058881347439385003e-03,2.081004733354964876e-03,-3.209078940728086332e-03,-4.358818325631410272e-03,-4.984980798001628285e-03,1.691195201540944245e-03,2.899867689106015289e-03,1.139219524677328836e-03,3.610071274081577599e-03,-8.288610015455548374e-03,6.307652569301954934e-03,-4.081332026959281177e-03,-6.113148500244398528e-03,-3.642628289081255863e-04,-1.537686983268154312e-03,4.157012959126129073e-03,3.387699389083440038e-03,-6.965368145208249064e-04 3.804494613860694326e-03,4.985245145167766824e-03,-6.850589762303170051e-03,-1.347388171080674164e-02,2.085974872481559481e-04,6.775735815364126655e-03,1.956933662387848674e-03,-5.629619182364978790e-03,-3.422302105409613551e-04,2.673229148814883650e-03,1.132595331742099415e-03,2.659646183710552298e-03,3.745135621783309433e-03,8.300764036017705067e-04,-2.498058410839181301e-03,-5.253290983712502818e-03,-9.204251548042188119e-03,-9.985818802454183435e-03,-7.433022727449320466e-04,2.040930835499334909e-04,6.093566583690600974e-03,-4.853926046115598913e-03,-4.479513742749694286e-03,-6.670956559560608373e-03,-5.275088309658796694e-03,-3.298164837555641726e-03,-4.653097279158031029e-03,4.487513708446663578e-03,9.565820467423241658e-03,-5.644143200172131378e-03,-1.398300167850856368e-03,1.300133087975115231e-03,-4.705092099844631714e-04,4.378840725553985111e-03,2.447296295668491706e-03,1.839725756564104071e-03,6.717781897052280474e-03,-1.077345312111785240e-02,-8.402583454085675238e-03,-5.938082431688539858e-03,-1.106691867502759006e-03,-1.680339843527889126e-03,-2.598509042912924231e-03,-2.016511089908190179e-03,-3.091272693261594342e-03,-7.411917061381277302e-03,2.036507914756232468e-03,-3.407555406958514242e-03,-3.340511636661823956e-03,4.112094364132638270e-03,7.659436071652155932e-03,2.463804935231631282e-03,4.923600316029475357e-03,9.213817165856039854e-04,-5.296645523388347418e-03,4.098759737656271565e-03,-7.053452875535380293e-03,5.063465648335744099e-03,-6.073897403414420769e-04,-6.359939782682130262e-03,-3.511056626385940758e-03,-6.120596658321388571e-04,-1.729355515221622872e-03,7.287356231341094760e-03,4.191710615431102314e-03,6.380138707120136575e-04,-6.648146890072069611e-03,2.413290518414893272e-03,-1.029196987342947262e-02,4.984346107717625360e-03,5.737517189599026721e-03,5.055419902003610924e-03,8.526241919295101139e-04,3.315683038088133167e-03,-9.452534273069848917e-04,-5.499246280663449248e-03,-3.474952719578746317e-03,2.778813106690020737e-03,1.567364969738573152e-04,-1.734509856410493918e-03,1.193397254674713470e-02,-8.500304415631857249e-03,3.572143895509278990e-03,6.161723546333865327e-03,5.059351488112570454e-03,1.950799798171230521e-03,8.455160790556123795e-03,5.642544863039610469e-03,-3.062529441439575031e-03,-4.291188933974821561e-03,8.036359881208871361e-03,-4.644037461278975251e-03,-5.577722659000186253e-03,5.598879570859838749e-03,5.945155257959865852e-03,-4.396367520595254388e-03,-4.658009140375107653e-03,2.471603660503978069e-03,-1.788224274412086087e-03,1.651845861916278677e-03,-7.187168367603291412e-03,4.652903855216207430e-04,6.389399712568179408e-03,-6.996797414681912852e-03,2.298398778071368727e-03,5.350619758835744276e-03,5.163808703346013142e-03,5.682916556978244438e-03,4.714641805504599104e-03,-1.052361913572004253e-02,-3.232846544394955488e-03,4.721197313888132394e-03,-2.001923809436193923e-03,2.282266822215003356e-03,-2.640757034582231263e-03,-1.448142718649483808e-03,1.112894211862907982e-03,-2.570332955911778811e-03,5.955866588915525919e-04,2.678711080496353793e-03,1.635789872484634793e-03,5.037981896475024826e-03,-1.193466611264847391e-02,-3.233460345731000319e-03,1.386154363787618204e-03,-1.890755978463946408e-03,8.306993303731169051e-03,6.182572702874208383e-03,6.383956364827198708e-05,-1.178968954854839150e-03,-3.137528820323299411e-03,4.641586615374038681e-03,3.558896149347069086e-04,-3.886384404727296054e-03,1.494716261977529180e-02,3.379332486879129110e-03,-3.916791222990207225e-03,7.451510380059605913e-03,-9.873197443560233905e-03,6.790086009073718638e-04,6.697825801990931480e-04,1.775345271909402334e-03,-5.231514817910008861e-04,4.270800296717130952e-04,-7.979258906270323033e-04,4.677168382953230535e-03,-3.250976536684153971e-04,-2.755919399101540348e-03,1.043862604894282045e-04,2.269268506400909553e-03,4.982754837199831859e-03,5.075155266981018487e-03,1.181750184579581295e-03,-1.887564277880892337e-03,2.131596579859966391e-03,-6.284571393957828246e-03,-5.355745762097497385e-03,-2.647704791655128894e-03,4.466975254818698893e-03,-3.969935002292206312e-03,2.893448528559881047e-03,-4.266714817990126599e-03,1.050586686807369885e-02,6.270821696339044793e-03,-1.002909228052975554e-03,1.011669624845674791e-02,5.820769902424278259e-04,5.108101421684274612e-03,3.923194875300010931e-03,-4.049595438051003908e-03,7.415897920366748379e-03,-6.087057397199772089e-03,2.557348337815685234e-03,6.188263831322105175e-03,-4.258606204396117936e-03,9.530230152099566660e-04,-2.151229690853221580e-03,-1.146131250675023160e-03,6.253195633290145605e-03,-3.145666741343759471e-03,-1.272862420825723265e-03,1.747121542056132102e-03,-3.398266135134612748e-03,3.355095964618839011e-03,-1.396008244546905743e-03,-1.960833600375821040e-03,4.216547517447931902e-03,6.986440340768305945e-03,2.238646196246103955e-03,-1.119030485285050667e-03,8.374347605020208279e-03,-2.411037662027684398e-04,9.722522136434375234e-04,-6.235594443531336344e-03,-4.025605255511506726e-03,6.476322703389676867e-03,5.340408821030070800e-03,-1.686924055390611604e-03,5.166921792488216427e-04,2.160637605816337608e-03,-6.254110818792262515e-04,1.035584366265057676e-02,4.380668385042387619e-03,-4.185489243462549303e-03,2.257318890899456115e-03,-6.926545548263426944e-03,-7.026395870698091267e-03,-1.295608210356679734e-03,3.068490760505129464e-03,-1.241081933030621512e-03,3.500443981788660570e-03,-8.722301326067945226e-03,-4.067772983748202571e-03,5.692748213306682292e-04,-3.876346175591143320e-03,-2.044062487022445945e-03,7.278627427625425708e-04,4.132460371707751511e-03,9.063812021480679298e-03,1.413821311524834142e-03,-1.380650544208853305e-03,-8.744579762808129750e-03,2.199244356446436541e-03,6.508569950188030327e-03,1.742394790914135808e-03,9.135911119824325841e-03,-7.028456432550359012e-03,-3.500587027532768916e-03,2.579364365873742116e-03,-3.370625023783738357e-03,4.339942488059346688e-03,-3.324855942873059401e-03,5.107347683299480294e-03,1.483470349136208630e-03,1.195293187336518093e-03,-3.459313791961452032e-03,-3.969545587248476080e-03,-4.594275933842390663e-03,-1.951027004549706458e-03,-3.663283976004201754e-03,-2.523030085117792479e-03,2.572857459743072613e-03,2.084524227868971447e-03,1.915312323069671670e-03,6.247681299935700996e-03,-8.631313541145524057e-03,-2.131974388694095161e-03,-4.195176967461188532e-03,-2.686626188563551610e-03,-8.368353394017870230e-04,6.705450444422606307e-03,-1.353562275038297904e-03,3.150051613443312706e-03,-4.298530284779005992e-03,-2.687055129194413240e-03,-7.350882535860928319e-04,-3.181078407126708592e-03,-7.673963632466335172e-03,8.417726530773555040e-04,-4.684589664059514262e-03,4.022813364531566691e-04,-7.302074317964123325e-03,2.672257942965803896e-03,-1.266104369579882207e-03,4.435038819787995822e-03,2.561262102062534119e-03,-2.997387840868361280e-03,-6.588752294419368029e-03,3.716330500073104781e-03,-6.384506830949759166e-04,-7.617409661051496644e-04,1.495465982803361555e-03,-3.702698093223136494e-04,-4.480735349845031863e-03,2.746323242147239926e-03,-3.680652325569005318e-03,7.081008854709575431e-03,-6.511927934343017489e-04,-2.495551760871505924e-03,-1.829879807315884250e-03,-2.866734328017263647e-03,9.626105756350546248e-04,7.829242897841542578e-03,7.410429456109403346e-03,-2.205790873080452994e-03,1.717069514445251476e-03,-9.133197338828451207e-04,-3.501993152508655564e-03,5.656892932839752554e-05,-1.249655364456577454e-03,-3.507076566040713897e-03,6.248706629634639270e-03,-6.851840081505688150e-03,3.660493705017130953e-04,7.803007472354565009e-03,7.873259531846485762e-03,1.073429065243774669e-03,4.979264552985573737e-03,3.214912475705157448e-03,1.104090420540070278e-03,2.189718780686084141e-03,9.189935425203700145e-03,-3.365075893548104789e-03,6.858078850130859896e-03,5.219885236713072384e-03,-5.622724385367590220e-03,4.058858155781948363e-03,5.589045659760951551e-03,-1.537604486008732748e-03,-1.935019778945466567e-03,-2.576846204538247708e-03,3.967141056416349440e-03,-1.207061867345602182e-02,-1.334275102429670345e-02,7.820613663058986753e-03,-7.147436462843683586e-03,-7.224849509372525697e-04,2.264184369968184809e-03,2.636551401909617998e-03,2.892732911041475188e-03,-5.112166985699776595e-03,4.567929845284864759e-03,-2.146377315050245968e-03,-1.644089530905890268e-03,9.759264860750989715e-03,-6.852139393021224652e-04,-2.831504837339060814e-03,-2.567166536178113259e-03,-3.138902124012386136e-03,-2.695961037848474545e-03,2.007110097449443364e-03,-4.692624559984313534e-03,5.240969369875675930e-03,9.531498474212335859e-04,1.721427030810394367e-03,-1.654024438072488690e-03,-1.179801615417005621e-03,1.798526478730786728e-03,-6.369501502697876016e-03,-6.692248163461650932e-03,-6.469517316910190947e-03,1.772212285924560076e-03,5.497966046022139189e-03,-1.504612608395262995e-03,5.271905564801063031e-03,8.182113690359009081e-03,2.194151948952508240e-03,-1.397841827756335841e-03,-9.141626748931722113e-03,-7.169281519766109063e-03,4.385034150909805813e-03,5.822698031684937943e-04,5.444518462265858158e-03,1.074354735682953206e-02,1.954523160010381792e-03,-5.202286135237178637e-03,-2.670721992262706495e-03,2.986831244999500409e-03,-3.463659074476360851e-03,-2.279101249341375628e-03,8.458146903115975875e-05,3.821876062901990133e-03,-4.281243325288390522e-03,4.557366301760562045e-03,5.440490677855936395e-03,5.353960252946244301e-03,2.971535738242875443e-05,-5.202854214929469000e-03,-7.043178183048354451e-04,-4.564606191483418229e-03,1.085373200941813429e-02,-1.019010025345602668e-03,-5.997322343640167563e-04,5.476437084299358958e-03,-1.764184070625395527e-03,5.032795315317710808e-03,2.261192110725678169e-03,1.404072476250680865e-02,1.078948274746390282e-02,3.752089525139253815e-03,1.445652332454717363e-03,-1.766378687499604860e-03,-2.180028751324515710e-03,-1.902096646563948036e-03,-5.536107433034172069e-03,1.136300010960266185e-02,5.081726990200215177e-03,1.977421409549368417e-03,3.284201160929555645e-04,-7.008581802901628485e-04,-1.763149011347685366e-04,-1.461648060967755437e-03,6.630080130760697410e-03,-2.835848191359619744e-03,-1.527721798193023184e-04,-5.000660984590021146e-03,-2.874492430411004773e-03,5.187766908917238254e-03,-4.309677568905040303e-04,-4.088326439781886021e-03 -3.427695057630931411e-03,-4.443105153683010906e-03,4.485711236551682621e-03,3.019986349774518488e-03,-2.367413668828560313e-03,9.423420262765397392e-04,-5.210946828727905898e-03,8.993660932073937672e-03,-6.552781686461367494e-03,-3.845203641792059330e-03,5.476671947227752628e-03,-1.490160710679074973e-03,3.019861383045845463e-03,2.144350614377395475e-03,5.862875022305395409e-03,1.034361324057486362e-02,3.601208985452759598e-03,3.569446866225157132e-03,-5.131086901802730344e-04,5.235789664933954493e-03,2.446392218999020231e-03,-2.887540396281445664e-03,2.238422503318403776e-03,9.444795486507601536e-04,-5.943710468846564489e-03,1.492715644716321706e-03,1.208346437823911356e-03,1.701051485221470724e-03,1.572192568385461344e-03,9.886126237182778914e-03,-9.315011241903016492e-03,-1.065265022842218877e-02,9.349770097879499947e-03,-4.705316187557515711e-03,-3.550146156343426170e-04,4.128656090827775502e-03,-3.243037404044071467e-03,-3.777614607616163866e-03,-2.594634714454872610e-03,1.595206900801903556e-04,2.673287059825156433e-03,-6.470482097893239361e-04,-1.726592965029673027e-03,1.067047254605187935e-02,6.913332577181148693e-03,-4.504340978243459234e-03,-2.100503448689464148e-03,-4.243827989823506508e-03,1.130190125198734510e-03,1.224498704169811733e-03,-2.724530795101166077e-03,-2.460392730508297130e-03,4.260225449869200348e-03,-2.757909448974788132e-03,2.410767818611556871e-03,5.900535648511153787e-03,-2.825584198013333069e-03,-4.945896996486703022e-03,1.927507579072528171e-03,3.998844898661686395e-03,2.017343763099797762e-03,5.518624932607208225e-03,7.737057771030234760e-03,2.958210778374326653e-04,5.724181539179549810e-04,-4.257179772718147973e-03,5.423322541859139541e-03,-1.320410185027296379e-03,1.029174343873062291e-03,7.968925813559702212e-03,7.825454690264825250e-03,7.921407836509674130e-03,6.516412660751563779e-03,-8.103348856719820538e-04,4.856891479752653736e-03,5.599855781374182308e-03,4.719949813638425924e-03,5.336235974504534730e-04,-7.569022548461154000e-03,-9.839345123844993064e-04,5.250140062934259728e-03,-1.449045201599222709e-03,3.513470903594105230e-03,-7.612011553199516702e-03,4.300462432136414014e-03,9.185824751439730304e-04,5.452614420045546681e-03,-2.376809125974994451e-03,2.329065834885072923e-03,-2.316205170536159413e-03,-1.338794070513808429e-02,-9.382812501614043519e-05,-3.330343652049187145e-03,3.807459026171359801e-03,-8.793119228271685473e-04,-2.668123206519524681e-03,-6.259439991400269493e-03,5.988039539575803864e-03,5.572153291625223798e-03,2.932497945973783302e-03,-3.127705443371898812e-03,1.843836491027042767e-03,2.098247682684098567e-04,1.009232762707974219e-02,-2.416418633175622648e-03,5.653918966253211956e-03,-3.803384107809881180e-03,-3.133887771093847325e-03,-6.946035327985309628e-03,5.380405388455196392e-03,-6.731289048596595173e-03,5.150182691893743364e-03,-6.196371520947946887e-04,-7.281825086745115014e-03,1.078073220099051103e-03,-9.808707530358949309e-04,6.687867999292237617e-03,7.157757627427282707e-03,5.365642733525225914e-04,1.634866512491216575e-03,-7.209457391926028802e-03,1.753953254677635470e-03,3.630242051481829754e-03,7.218998959597899964e-04,-1.898853925231463319e-03,-7.280886806698191188e-03,6.577429272805072023e-03,3.566560889282794408e-03,8.874289112678911823e-03,4.740348078673335339e-03,4.707052224769619347e-03,-1.087088137354624666e-02,1.897653376405010689e-03,-7.532448213867289511e-04,-5.130092960963996904e-03,-9.412589877800751209e-03,4.962045287039196229e-03,-3.299140021122834703e-03,2.509460872589956270e-03,-4.416849034742735200e-03,-1.514022188792716120e-03,4.215881368034279908e-04,-3.185740944020915098e-03,3.762811087134308469e-05,-5.400526948652171685e-03,-1.876170953676071845e-03,5.260520689190441128e-03,4.166039936076433234e-03,3.020825674548502111e-04,1.457820711853871805e-02,-1.040397480887290428e-02,-3.850026679810528932e-03,-5.639530817933079794e-04,-1.287407851877696600e-03,-6.827142279599592196e-03,-1.311196757939480338e-02,1.183678887751523408e-03,1.585884479595126061e-03,-4.596205995178618241e-03,3.140111015364531379e-03,-1.122077055191712932e-03,-5.940326283738544058e-03,3.500393227741975533e-03,5.055717624873440688e-03,9.116594680134487358e-03,1.348119033812302837e-03,-1.937234700417368987e-03,3.453642418977530986e-04,-4.045720366826484164e-03,-9.242125902185584826e-04,-2.557593575882468814e-03,4.344520222654228540e-03,-1.206508439511585346e-03,-6.436845360924275776e-03,-6.305802849530782635e-03,-1.359428428392059525e-02,2.383841276848644528e-03,-1.386150929911487929e-03,-1.869604736306128235e-03,-8.187018922249184039e-03,-5.937283702600123711e-03,-1.462402774431390343e-03,-3.704206079859011149e-03,-3.164331617513882768e-07,-6.031106649046066512e-03,-5.476709294934207009e-03,1.265868476902821016e-04,5.630316354160453361e-03,1.234199280344337388e-02,5.201450528162282396e-04,-6.665187834470913744e-03,3.099678786951006338e-03,7.486575469025476097e-03,2.175757016619050271e-03,2.777255027093501012e-04,-4.550708342474634252e-03,6.026975638385299004e-03,2.817566960979305760e-03,2.987404166775006384e-04,7.774510429290665183e-04,1.529072563210229782e-04,1.818586420349185872e-03,-4.827469649090482783e-03,1.038189107655367038e-02,3.300457859782333133e-03,4.136882119191283291e-03,-6.336195277478881213e-03,-1.360880960606920907e-04,-4.119649507025382150e-03,-7.938627384398591266e-03,-9.327175076424108410e-03,3.142716992164334890e-03,-9.016115356413480220e-03,4.445155717826674733e-04,-1.000962711473047566e-03,-7.948269866072825429e-03,7.513028472462869329e-04,-6.391313587625075364e-03,-6.051805897126916536e-03,3.530532716259074475e-03,-1.168577777362803539e-03,-4.518543830336863368e-03,2.838210579936005767e-03,7.838300105868936749e-04,-1.100087018455293093e-03,-2.326280034558903496e-03,3.583147834280811170e-03,-5.986349711587931992e-03,-4.539050817935079245e-03,2.618911810089629083e-04,-6.715099345144852350e-03,-3.163426199707416835e-04,-1.349469514026446423e-02,9.182867217216970912e-03,5.050896441675075885e-03,3.641840687861596596e-03,-8.401711518141575076e-04,-6.534328575315020938e-04,-7.787789206329819372e-03,2.941727165457131061e-04,1.378447665899095505e-03,-1.077253060747154073e-03,-4.064104022143576710e-03,-3.851425801579898712e-03,1.335317662258292187e-03,3.857871840678798840e-03,4.569429531362916193e-03,-2.133027631450027696e-03,-9.902483389591170641e-04,3.351751559823844002e-03,5.663416106427469161e-04,1.466739481408181655e-03,2.865790137476075666e-03,-2.935945350834690427e-03,-2.671177875091348183e-04,7.944016470337158034e-03,3.747884802101039605e-03,2.475086838700634156e-03,-6.402596467194302324e-03,3.101637658066259007e-03,-1.262317464081239796e-04,7.291561310602152728e-03,-3.668837036409967802e-03,4.499827006058902158e-03,6.224220920017301852e-03,-8.120498077313674859e-03,-8.809681665927184468e-04,-5.946037693931790222e-04,-7.177158253842459527e-03,1.738966399350711563e-03,5.624619550089002967e-04,-1.078590379568572033e-03,2.783779303651248791e-03,-3.713651048762871450e-04,2.077565339950664808e-03,1.240008999621646001e-03,5.330545537838589755e-04,2.919285358039853603e-03,4.320058765099759365e-04,-1.288232116061747978e-02,2.387666437339320668e-03,4.527207523462018629e-03,1.864962678365178440e-03,-2.552690570184445919e-03,-1.487559729403613307e-03,-2.688745321583968853e-04,1.999421022727570618e-03,-5.665490186028731938e-03,-4.025350873065357445e-03,1.051305074056976471e-02,-3.651752043719637582e-03,-6.270985990892630722e-03,-6.806578873208527815e-03,4.871182554899355438e-05,-5.475203572727059889e-03,-1.024628072035511234e-02,-7.853898531284170953e-03,1.279468399221040959e-03,-9.413799207565829580e-04,-1.181812814598455844e-02,-3.219362088607124594e-03,-1.598684869771316557e-03,2.532626690653857517e-03,9.218871320566034231e-04,-8.837083392175551499e-03,2.677193871979865070e-03,-4.212894799586953953e-03,3.169920038966305125e-03,1.926192777172201992e-03,1.824802629091547073e-04,7.446985758663112950e-03,-1.006714195540080181e-02,1.050033507948059611e-03,3.489216437193570688e-03,6.596223493340954043e-03,7.741552803427738470e-04,1.831439084788096926e-03,2.616117997798413607e-03,4.006796348663371581e-03,3.407952725602033869e-03,-3.573449555400825400e-03,9.798976473047941570e-04,-3.264582731367692350e-03,-4.906490848820835998e-03,-7.769188356621272015e-04,3.506438693225357069e-03,-3.427158257516724118e-03,-7.209508760040996174e-03,8.383862237623142255e-04,-7.225422398857986143e-03,-2.190948454112196051e-03,-4.879776760340855113e-03,5.785699299414108140e-03,-9.374335076894760657e-03,-5.338276609806077468e-03,7.499403275590979818e-06,-3.890240370374549277e-03,3.083027499047895684e-04,-4.284224048554129138e-03,-2.863673872318509700e-03,4.883273664620969845e-03,5.289290004707248357e-04,-5.825659705259791064e-03,3.799326336298085739e-03,1.331676691559344683e-03,-3.496584636629634923e-03,6.695555887199111655e-03,-1.386668722623035600e-03,-1.424712458350321309e-03,-1.960259815959859361e-03,2.638111425334202596e-03,2.113048063594071381e-04,-1.297508660371219681e-02,-9.552703018927169139e-04,5.824883606862594349e-03,-2.503582795343144499e-03,2.558402512990415757e-03,6.833877534724377978e-03,-3.367708681738863785e-03,-4.252698581213434220e-03,-1.485810806093656403e-03,-2.417235734883197915e-03,-3.792171691673968965e-03,1.233864858017402668e-03,-3.012216726825463382e-03,7.409251157731254651e-03,1.137194810356565756e-02,-3.531263329298489715e-03,-5.203134214922223132e-03,1.456940251376593741e-04,5.006168156404271555e-03,-4.567689493172594878e-04,-2.483786559790852541e-03,-4.259912611276031776e-03,7.667919831899111700e-04,-1.577254195202162750e-03,-1.879463179976788518e-03,1.818302646381959832e-03,-9.852718615374147799e-04,1.545480717256652627e-03,-5.084755826426295283e-03,5.070266082672839417e-03,-2.141733126715676303e-04,1.776086543811931972e-03,-2.156460829676267160e-03,4.507955412502727410e-03,-7.126897589006743645e-03,4.239343166706844093e-03,2.693920448499164278e-03,4.386504800887176293e-03,8.016072690920670457e-03,1.159824476101010667e-03,3.186677172718228781e-03,9.038614852865008154e-04,-3.986165766932290092e-03,-7.912529083846372296e-03,5.101312600841823551e-03,2.334680413656931193e-03,7.831258572353658187e-03,-5.311047470929748486e-03 6.165860409464098048e-04,-4.766252117375723192e-03,9.175561644384338253e-04,-2.272411815760530637e-04,9.178924184969935833e-04,-1.748040423755837895e-03,-2.212228970097947396e-03,3.662576863359339602e-03,6.777529105205519662e-03,6.743899321731856682e-03,8.847604432993800769e-03,-1.522002753744024174e-03,-4.048523322523104630e-03,5.658348177565024860e-03,-3.080547505445536331e-04,-1.257382686588742401e-02,4.237607601109109542e-03,7.301514860312159454e-03,1.389884436458920685e-04,3.398050209506780855e-04,1.848356421281845997e-03,-5.561206784303539610e-03,2.922237414500063017e-04,1.100217252192225775e-03,-5.371822394690576284e-03,-2.970935783675698189e-03,-1.605671973926827887e-04,-6.185395912664592120e-03,-3.977891152484117239e-03,2.946819203775519289e-03,-2.297877344422136897e-03,3.906630561962411846e-03,-4.463562974599748906e-03,-2.365140200997403134e-03,8.041481631960332652e-03,6.320333808074832770e-03,4.511529995602277937e-03,4.551666512190445657e-03,7.562432370980773887e-03,2.410888623436051835e-03,-1.214294110015291979e-02,3.247336507837943220e-04,-1.018089799131382612e-03,9.139132411740887377e-03,-7.718950637324722974e-03,-1.319811466239414208e-03,-4.056680204035910571e-03,-6.600896768489968904e-03,5.587362122281881900e-03,-1.610693975907007450e-04,8.863921994006096483e-03,-2.034663285581762154e-03,1.186917134869951802e-03,1.166670479827471713e-02,-3.855441778607715010e-03,-4.345821783533618775e-03,2.393122278650390545e-04,9.984703191220558206e-04,-2.125562697284746980e-03,2.146511258318793258e-03,5.880657158381402683e-03,-4.116736922771604100e-03,7.311217555442435165e-03,4.384511759741284431e-03,-9.716340615657720944e-04,-1.340224549377376810e-03,5.049464114797521351e-03,-4.299150087246377504e-03,-7.414612539150338202e-03,2.417422642267081983e-04,-9.144717410119214537e-04,-9.639559200068308076e-03,-2.837268942851228076e-03,-8.281799228377169017e-03,1.106573487817557200e-03,-5.057547160558344308e-03,9.004866228018753227e-04,-4.072242559123012869e-03,1.476787436930688049e-03,-5.309914987400684433e-03,2.052442230141865009e-03,-5.261831587028250462e-03,6.042620136094481453e-04,5.065567196889766630e-03,-2.158508263669549148e-04,9.315984955126163517e-04,-4.211362902643837024e-03,-2.734567829719084051e-03,-6.971586928732921833e-04,1.222891789642667265e-03,-1.339422827022029036e-03,-4.111087955864028802e-03,4.663180910610935023e-03,-1.133020474787095480e-03,1.383646545828583164e-03,1.251297917364491401e-03,1.897836363122411881e-03,3.291100275557481275e-04,6.061644968425724742e-03,3.228179982291831066e-03,5.125372996336565769e-03,-4.180227903848646002e-03,-2.179172653367975662e-03,1.409102143436093265e-04,3.519014452175567818e-03,-9.571615031595821776e-03,1.502698193692581083e-03,7.161266109271581699e-03,-2.298659777241864825e-03,-5.629146283286875531e-03,8.338775426378414873e-03,-5.894890290795382418e-03,-3.771986493773203924e-04,5.922740084268067473e-03,-8.049445305577110018e-05,-4.424638415678330133e-03,4.952514668659534734e-03,-5.949082007579990089e-03,-3.201130380202044275e-03,3.807836632348002293e-03,1.036118296956822088e-02,-4.060905533616133989e-03,8.575864995340787972e-03,-5.739586770100985232e-03,-7.089252237312869756e-04,-1.034005967215563234e-03,-4.933876405842492659e-03,-3.823497104746482675e-03,6.402266799216463182e-03,7.360283314967473777e-04,-4.337839674912558584e-03,5.231817830879205433e-03,6.486933257619273419e-03,1.361463927663317426e-02,4.464033706204028479e-03,-8.353314593693100462e-03,-7.360882993293669336e-04,-1.221388580298440494e-03,2.755177322197919967e-03,-5.544914381609243083e-03,-9.058867005417846688e-03,6.183688995573885386e-04,3.691567957591049189e-03,5.026583317814211846e-03,-4.656009073541107247e-03,1.376732388070235104e-03,-1.596806052473704945e-03,-4.137444298110416231e-03,2.971098128492421706e-03,-1.002064829130441164e-03,-1.668340512860825133e-04,1.167796860011958808e-02,-9.554669748582299239e-04,-2.280714408746600135e-04,9.072559915545960665e-04,-2.768974254593865088e-03,1.546550415292096770e-03,-4.993477068203334979e-03,-1.551023787486409053e-02,-4.378269988179951666e-03,-8.453158372760703006e-05,2.934883612426531039e-03,2.787021424428350307e-03,-3.007924400943771601e-03,1.869263442542663236e-03,-2.943232512768965796e-03,5.026501925044581792e-03,-5.840519960311207036e-03,4.478030624309993490e-03,5.317821554263068087e-03,2.583125587195337410e-03,5.685380569083616463e-03,5.228570446985383695e-03,7.270282164160593412e-03,-5.899378937333671661e-04,-3.290259395015974163e-03,9.052232365521925778e-04,-1.325803446897592030e-03,7.332621895143543378e-04,9.590664760895491409e-04,-1.237685094425435353e-02,-4.934859086855157388e-03,-1.102224694820933167e-02,2.019351421384494964e-03,3.491012089245111452e-04,-7.364316626858955460e-03,8.138795243344085217e-03,4.234672629836373249e-03,-3.033402906031150859e-03,7.366749928628251462e-04,7.591777468532502800e-03,5.296723812958129343e-03,-3.768486828946887622e-04,6.419419928410019429e-04,-6.496267485118798011e-04,8.043125618733912990e-04,-2.613641375295042753e-03,-5.926363535289295992e-03,1.303624711844142197e-03,8.395197674374373260e-04,-1.217480964489318007e-03,-1.885499310680117816e-03,3.266163136817162947e-03,-2.654590351638831842e-03,-3.855035156600214712e-03,8.486260664884467605e-03,2.507009231616404588e-03,3.705715145289673247e-04,1.689678047565824556e-04,-3.852928171315000636e-03,-4.827334757074377113e-03,-2.526885607573969416e-03,7.597531281011338580e-03,-8.761474522613207916e-03,7.767107058328122371e-03,-2.673177768477389468e-03,3.545528556276046541e-03,-1.778826989184781890e-03,4.820075625602721509e-05,1.233484460746823996e-02,-3.015457489154905850e-03,5.677067757826154311e-03,1.108679981198203331e-02,9.562417251786314810e-04,4.961065075519017001e-03,5.168103045930084018e-04,-3.577665762105414204e-03,3.198611044541463475e-03,-3.513468173560394415e-03,-3.912707496607719000e-03,-3.945554876534583186e-03,3.327403338309886660e-03,6.242572962440326272e-03,1.490977377631190525e-03,-4.161513249969906250e-03,1.032977937281524643e-02,-3.360697715032690772e-03,3.274860757885821581e-04,-2.947355442880899516e-03,9.022322556845987010e-04,-7.168197103784657367e-03,6.547396076424915473e-03,-4.078802084972307264e-03,5.043377435989925515e-03,5.200335306714814077e-04,-1.670285368978529673e-03,2.317467210722940537e-03,-3.275220414317180518e-03,3.603397025366135344e-03,2.600574467289083324e-03,-4.462783425395588227e-03,3.433269944763327978e-03,-1.179194143450615698e-02,-8.970934130930625569e-03,9.426231421417722092e-03,9.039815233289323279e-03,-4.662808256598208016e-04,-2.814467934371844837e-03,1.205427415537164808e-02,7.658097280296830879e-03,2.413562755617656291e-03,4.349514960916707820e-03,-1.718346576124368200e-03,-7.641415270871977118e-03,6.490070308044985047e-03,-6.949367202756352203e-03,-3.011344243025933145e-03,-6.405324244513620506e-04,4.941256870606806038e-03,-2.865149425143629497e-03,-2.512817154466646594e-03,-3.165521841952162636e-03,9.810652103554516959e-03,7.694184994393677389e-03,3.616974943830516577e-03,-2.433837007479477434e-03,3.056881226594150569e-03,-1.029649376135569581e-02,-1.785986787817239003e-03,3.342208520694926570e-03,-1.484478807456246843e-03,4.226013310093083841e-03,1.143890300111071248e-03,8.199685701255850223e-03,4.993118918560593856e-04,-8.668328173690077612e-04,2.805252494725720292e-03,-1.098657327905784575e-02,-9.743898593636824562e-03,-2.028813952763571351e-04,-1.896401342197909581e-03,-1.082121703610338086e-03,8.601793406719989224e-03,1.855372602578614707e-03,-1.796003976757406520e-03,-2.496018385655676686e-03,3.582164246455456398e-03,8.791453052936617834e-04,-3.971181962838003858e-05,7.063255134476863727e-03,-2.950078463775360638e-03,2.467378814011714910e-03,-4.663059851043094284e-03,-1.574171053907606754e-03,3.184613598060801170e-03,1.470477889048783359e-03,-1.110539937609520723e-02,-3.722634782523911393e-03,-1.637201748431679716e-03,-3.609754179539627263e-03,-2.488337666709237918e-03,-2.470038438865150088e-04,-2.172112207870539272e-03,-5.670124165733376152e-03,-8.805086560925224834e-03,5.417280186711019054e-03,6.978121744427855620e-03,-5.871529385469369429e-03,1.053293101075774077e-02,6.369271702791743338e-03,-7.811693082483468602e-03,-3.998836644201067894e-03,9.840258659564688620e-03,-1.529221580814681385e-03,-5.952662372911071871e-03,6.872674472520997647e-03,1.161801619082783801e-02,-1.925542746152296555e-03,-6.489827108923904063e-04,9.231038173518784414e-04,-3.192008624595484793e-03,-1.401029612032149820e-03,-5.589118544188230626e-03,-3.831926762641415986e-03,-5.855977120428586431e-03,1.723488962694573164e-03,6.504228670453814781e-03,3.159005893947076839e-03,4.131232106219482302e-03,-7.613449325521516424e-03,3.561065434612551654e-03,-7.374342786565378678e-04,8.916432882919926584e-03,-1.122625077216818194e-02,-2.564842431951669648e-03,1.109212013576542496e-03,5.430946546425944070e-04,-4.341975909668817357e-03,9.520250494885251769e-03,-4.532266552733912532e-03,-7.288145822454528307e-03,1.256136573909620059e-02,-3.009092830851542617e-03,-4.335333022111975253e-04,-2.847855762508054358e-03,1.146223801143428887e-02,3.907172440153983128e-03,-5.053900512491095627e-03,-2.192615151905450647e-03,-8.219688141751165010e-04,3.441687192913576304e-03,-1.066244604164861198e-03,4.619369247936949956e-04,2.725568448147115599e-03,3.993349779848269498e-03,-6.065577360354912524e-03,-1.914484221828157990e-03,-4.236621003629706221e-03,-3.636475673725866452e-03,9.382803309631777139e-03,-5.567371049817085131e-04,-6.235164416875255307e-03,-6.129734100782660393e-03,-1.673742111123410802e-03,4.680252659047450491e-03,-5.790424276442151684e-04,3.457084661570471607e-03,6.545421837063834097e-03,-4.126960350363565247e-03,8.277122370137803692e-03,-4.043171704986677460e-03,-3.309021734855068821e-03,-6.240666482192456718e-03,-1.033777779703983803e-02,-7.534729851147316249e-03,-2.433892511823200142e-03,5.378613348372447180e-03,1.610839876397831888e-03,-8.040877255050239703e-03,-9.318685400684305342e-03,-7.741807138039499825e-03,8.615403611970507249e-04,-2.443458453497609605e-03,-1.508956354078754914e-03,-4.900276797391463473e-04,2.659363764318085585e-04,1.088855686571934406e-02,3.723655358078456403e-03,-6.603003671538130309e-03,-1.361270029144960895e-03 -2.360253660505350591e-03,-5.513033819313966147e-03,7.328074899410879500e-03,-1.150934368969585729e-03,-5.156830069998019801e-03,2.237636265265096659e-03,2.302154253056422004e-03,-9.387361700773598754e-04,-4.872999552550680812e-03,-2.172503491235028023e-03,-2.505032364093655751e-03,-4.854612178694688723e-03,-2.850650217668305834e-03,5.274566179911050097e-03,9.591384207519201800e-04,-8.749088716489243286e-03,-2.383840292953372415e-03,-7.935931021016908934e-03,6.751441976592259704e-03,-1.951524210825812374e-03,-1.374687868266117078e-03,9.687880712309223166e-03,-1.829202907107408977e-03,2.477671209071790129e-03,9.456741943240155532e-03,2.023743904167144524e-03,-1.042133802301533577e-03,-1.440982447692746270e-02,-1.625320575360342462e-03,-7.466938187847303905e-03,-1.404199211524412641e-03,8.196218588012589915e-03,-1.268337160513177209e-03,3.664965260354286587e-04,8.331435421238551952e-04,7.347654153362355271e-04,-5.607401280823713814e-03,-5.781186935330071431e-04,-2.298702151591231011e-03,-1.226907415173404507e-03,5.310847565427037273e-03,3.577923702326971534e-03,7.893663467528675912e-03,3.163750293087814330e-03,4.855728254691145038e-03,-1.133854968468146502e-03,4.345166939150186571e-03,-3.719143740954049693e-03,2.969325600212652672e-03,5.510911567650648990e-03,7.735307148840899968e-03,-2.278860468232107774e-03,9.312745046253076767e-03,6.468859708001645573e-03,-3.512707407645419486e-03,8.916979377611870816e-03,-3.282992816516084982e-03,5.074302016049429299e-03,1.042339643806067251e-02,2.810332832370696803e-04,-7.601448975350404841e-03,7.885954339827671786e-03,9.717495270842356828e-03,-2.827878572566166603e-03,-5.386548549055216774e-03,1.882283518240038234e-04,8.993210203068540895e-03,-6.638384132737450150e-03,-5.930716938438984447e-03,-7.075591779128908090e-03,-5.948272512863861730e-03,2.549033707209393939e-03,4.650011048943606881e-03,2.778127265513601583e-03,6.367828228317870705e-05,-3.927811624431268647e-03,4.732603482266626096e-03,-7.165753507483461036e-03,-5.470729667213133614e-04,-8.293297089906672206e-03,2.640688855332764336e-04,-7.619495450738799150e-03,-1.105988636173325847e-03,3.552654016931668449e-03,-3.610731786766899296e-03,-4.348664176923027792e-03,1.499448771855165083e-03,1.727000905061681869e-03,7.570706000968605368e-03,-2.423440930620013572e-03,-3.937901972814368719e-03,-5.140086050449272671e-03,-3.418856906749220370e-04,-5.621881419133798428e-03,-3.507497652121284101e-03,3.661061970648626322e-03,-8.086340497476448078e-03,3.444821536293629551e-03,5.524093707982362725e-03,-2.504703374106370065e-03,-2.376994388543241868e-05,5.990181915011572392e-03,4.532365682613406777e-03,1.008879922707618497e-02,-1.426872655875839439e-02,-6.729138614605909528e-03,-1.147212149035316308e-04,1.570245119775653259e-02,-4.030078859855482044e-04,-5.901265494867539464e-03,-5.824562023492382661e-03,-3.767602441791301996e-03,-1.766562797313654936e-03,-6.524720652714791416e-03,1.786856956644808586e-03,-1.644296444367615790e-03,3.741231996771289450e-03,-2.986518658344840087e-03,-2.805675324914872013e-03,-1.076177342347196635e-02,2.278986840797045041e-03,1.991272843788492730e-03,-3.735077540888021280e-03,-3.361927986843852960e-03,3.316756463011541565e-03,2.661185224527387507e-03,4.428871097147788968e-03,-3.617811342646266685e-03,-8.895491346921323428e-03,2.376657599623623225e-03,2.825405380148680781e-03,-9.253216149436881144e-03,1.003955421996079091e-03,3.334656340648869868e-03,5.506145162558171401e-03,6.416343034904316983e-03,8.853140581777470611e-03,2.609433878397493826e-04,6.686352509935647968e-03,2.877578097478449785e-03,6.144123549419656938e-03,-4.913218016249575049e-03,-6.157414315293630556e-03,8.346739561722819889e-03,1.249466439202820238e-03,-9.279537084465360949e-03,2.613804714192354949e-03,3.612252440285018771e-03,1.168809479875865427e-03,-3.974048485641916428e-03,-5.393025866122388295e-03,8.238322989030022123e-03,-1.165238201023651755e-02,-6.719615091228551586e-03,-4.468156515537896264e-03,8.694643851025704931e-03,2.774691007175832981e-03,-4.334872398153989843e-03,-1.495961950593809259e-03,2.781787260235485046e-03,-6.067796177898805363e-03,7.699578004409892915e-03,2.021375834709418940e-03,6.025916982890753030e-04,2.228344706836966286e-03,-3.315947744384325959e-04,-2.500676627189751811e-03,1.150815016314165988e-02,3.951610917887960449e-03,-3.336784861395551867e-03,-6.273989802474241352e-03,-3.519138208989820021e-03,-3.119349512011496562e-03,4.809938630189888702e-03,1.518881308672889252e-03,5.904078630652231929e-03,-4.246394007574987202e-03,-3.184434314231325731e-03,2.330659239628496903e-04,-3.245913121663910293e-03,-5.475797780686631790e-03,3.677295305338543208e-03,-2.592062248563998823e-03,8.801084886164919907e-04,-1.207644468588733611e-04,-4.421783806394825357e-03,-2.493734755827709936e-03,-1.219734766591694733e-02,-1.387804883294948763e-02,6.228056423131762311e-03,-2.096886073174303051e-03,2.591986803928108662e-03,3.447565094101320143e-03,8.800796901839822058e-04,-3.022146546942161126e-03,3.614062552949299980e-03,-2.361644029693853696e-03,-2.352870002757168153e-03,-6.584713295022094279e-03,8.066051323178459118e-03,-6.060132525399478456e-03,5.042688420718737657e-03,-1.092650401941416480e-02,2.295063895882589052e-04,3.919638174246985358e-03,-3.191734198486608116e-03,1.396563189713480573e-03,3.544987831753046090e-03,-2.839568790311383824e-03,-4.807641127312551103e-04,-6.520055673405025613e-03,2.111729621672725141e-03,2.862341963062866668e-04,-3.233958074203871665e-03,8.320935391566265313e-03,1.752491570098743505e-03,-2.476892318996283017e-03,4.115288355385909001e-03,-1.702166662087818347e-02,4.817844793590399938e-03,-1.918371303404440339e-03,-4.351971848893489841e-03,1.122015629192087615e-03,3.928082746808460833e-03,5.650915290669257171e-03,-7.012968569692410283e-04,7.329577900184794208e-03,-2.545997972424063512e-03,-2.376233442652296492e-03,7.440651877908073010e-03,3.316527063462489273e-03,-2.374029108247795866e-04,-4.920204950858965420e-03,-4.020706171851807717e-03,5.853364150597811788e-03,8.800820744366768741e-03,4.401240374463055889e-03,-6.086634115181148962e-03,3.972390039518936856e-03,3.175611040979527556e-03,-5.969958690728079803e-03,3.669792823082343013e-03,1.590466664629253124e-03,-8.240041590668012797e-03,-6.350365542373042488e-03,-2.761409402297905318e-03,1.017887598396008614e-03,-4.718744631065875856e-04,6.703453544948484617e-03,2.872484833566216721e-03,-2.011428220850022298e-03,-6.268497712377520299e-03,1.121629939443279794e-02,-1.456833805853033651e-03,-7.067200315458318366e-03,2.164904140206287599e-03,7.677117014299793783e-03,7.207198471502702063e-03,2.119957627187345900e-03,-5.789080760769371568e-05,-1.308418378570075889e-03,-4.518905467740102870e-03,1.194647557608625793e-03,-5.282158745422336175e-03,5.174658685122515310e-04,1.003940372969216966e-03,-4.603011404848837348e-03,-1.508158633422669679e-03,-1.173429853867471659e-03,8.247607594595896289e-04,-4.561175438599362228e-04,3.751501787441495657e-03,3.886187326120392992e-03,-1.708872193473783387e-03,4.284941623217824146e-03,6.777868999604941616e-03,-3.088087110385699056e-03,1.130348504423634793e-03,1.783859848399841101e-03,-2.255777246619173042e-03,-7.798544033379637948e-04,-4.676921648550701541e-03,9.372968353997238392e-03,1.254131690530833898e-02,-4.398724883736557581e-03,-1.948361963333878613e-03,-7.879944561354408974e-03,-2.903722374517646570e-03,-6.469138150224359790e-04,2.693604940128905764e-03,1.033786564561461896e-02,4.895645333673097517e-03,2.364394196662550091e-03,-1.958732731845428798e-03,-1.385435548777833904e-03,-1.297222850650905403e-03,6.640509622985448987e-04,2.325813799916540971e-04,6.811742774414100525e-03,3.547315016227544152e-03,-7.415037964699298066e-03,7.872854995204917050e-03,3.108334364706440318e-03,1.122453700771487532e-03,-9.884385935752927274e-03,5.967218687915181244e-03,6.116001995099325239e-03,-1.856820883560923990e-03,-4.236713356334142339e-03,2.758349787918918262e-03,1.280619162027396259e-03,5.821080832583690537e-03,-3.846126180315368164e-03,-2.191639208792860497e-04,5.982203469455766968e-03,7.430133822034847317e-04,-3.595772940260750453e-03,-1.110498594836344805e-02,9.351464970051175107e-03,-3.202543172917432079e-03,-7.634528977601363377e-03,-4.923745387580458384e-03,-8.583250014279153114e-03,-2.686566353170044465e-03,-2.389579102758040974e-03,5.800197597086844369e-03,-1.865719584724447305e-03,5.225012424665600660e-03,-4.727520306238638427e-03,4.020641850029132297e-03,1.675905032050999978e-03,8.715944645923410020e-04,-2.894384179432432733e-03,3.879640309836735433e-03,-4.956707563623096412e-03,4.738108818302712473e-03,3.496225478983592747e-03,2.891878214313854060e-03,6.471663193911656592e-03,2.574804504387408557e-03,-6.679962395069003779e-03,1.204355439265084428e-03,3.132635028896086412e-03,-6.844718250908900063e-04,8.684504202339031172e-04,-6.357179315146933851e-03,-1.075328820185056031e-02,-2.102150757900985573e-03,3.906834423841419948e-03,-2.141913086056067717e-03,-3.449331820954261527e-04,-3.360387165143769589e-03,-2.462009452768706887e-03,9.947342937883739931e-04,-1.973362298586088352e-03,-1.784933329427496615e-03,-5.640356877187165802e-03,-2.028667193086836507e-03,-3.602909183335004247e-04,-5.093356388677779904e-04,8.719963130746407470e-04,-8.071583927463868088e-03,-4.678655189336507723e-03,-2.957335026576856339e-03,-5.144186443438150593e-04,-6.263959536350250923e-06,-5.279397882241421321e-03,3.071649828433466439e-03,1.241253094513319367e-02,-5.441968898308409582e-03,7.175039697459945564e-05,-1.929559985706531550e-03,3.612627220150001726e-03,6.336933882473400450e-04,-4.381120034181854028e-03,8.686517149321107065e-03,1.444645865636115349e-03,6.794781437208616270e-03,-2.514748009231113513e-03,-9.986391122276521218e-03,-7.661476091565516851e-03,4.955957479814844090e-04,-4.760306483777586306e-03,-8.260563616200644610e-03,-3.106444422710791779e-05,3.221675289614942071e-03,-3.465562265706960930e-03,-7.937227641681093501e-03,2.667248354655227163e-03,7.069575797231925475e-04,-6.311276813715391287e-03,-1.055099320815373280e-03,-1.442282741405420156e-03,7.877148318419414800e-04,-9.851788916158165169e-03,-4.696132414861856671e-04,1.619599685665263061e-03,9.316970367133197914e-03,1.443423539921895923e-03,-3.264509067590709579e-03 -7.000530161763918700e-03,-2.322490749799642051e-03,-3.068462825049001440e-03,-6.789602535806677289e-03,-5.088524003960805293e-03,-9.423461253374215869e-05,-1.530114658579481484e-03,-6.210622767087677076e-03,-1.083274261210800601e-02,-8.835730032554360222e-03,1.534673581245706367e-03,-5.133728480643817444e-04,5.329210052061731685e-03,-1.148409216422748187e-03,1.117100642279973991e-03,3.740444683854063541e-03,-1.240595314430739194e-03,-2.831618260343708119e-03,3.792371241174514063e-03,-7.293988881740580887e-04,2.897319915081160782e-03,4.084565274696863851e-04,2.953127578257001146e-03,-2.432860013911710122e-03,-6.003366072098688996e-04,-3.520519282757106198e-04,-3.093867904470041109e-03,6.347711595788229320e-03,4.072645827952083150e-04,8.864709480850874873e-03,-2.128050577195856957e-04,-2.470959365588189498e-03,-1.029034573045220675e-02,-1.447595502084638493e-03,-3.432537959637417969e-03,-1.431627677195952165e-02,1.680089018493522530e-03,2.036229128406153409e-03,-4.437415555769846053e-03,2.405743387659520921e-03,-7.404243080159399540e-03,-2.981046340985204698e-03,4.760642581874707097e-03,-4.037809276722448618e-04,4.511064958083149776e-04,3.013095458796075056e-04,-6.044648541543814688e-03,5.686969371456724806e-03,2.998226735281329849e-03,-5.101975858475222246e-03,-6.375341956857650920e-03,6.900626324468470303e-04,1.321881804528424577e-03,-9.039639343598059214e-04,4.682596793409798605e-03,-1.709264647016215180e-03,-7.642574112017757985e-03,1.675543710386417917e-03,-2.781811637110515739e-03,-1.712467054377938636e-03,-9.022664371658047772e-03,2.072905297885630979e-03,2.175257600435498082e-03,3.513951446606932826e-03,-4.628962353979581244e-03,-4.242034523220619170e-03,-1.071275672552658015e-02,-8.253133492008447175e-03,-8.303541624510763636e-03,1.798027028707580716e-02,-1.616863223495178812e-03,1.753441853573160835e-03,1.022762662795395280e-02,2.804872010338475819e-05,-4.874385499721702562e-03,-2.799490914237300412e-03,-4.816555494076219857e-04,-5.798769638182297431e-03,-1.596431645376913955e-03,-1.071637040811969539e-03,5.652117129885130334e-03,-2.946580973657305248e-03,-3.356318150643255310e-03,7.027017350002665724e-03,3.221841637141828474e-03,4.971975746451290108e-04,-1.637740411851962632e-03,-9.104764988488191473e-04,5.239485252030873057e-03,-4.617840355500279505e-04,3.336456640138778153e-03,1.087840092684755561e-03,-2.729373307844564726e-04,7.262798446910263567e-04,1.440049195306168650e-02,2.603451806456168454e-03,2.825420094214404435e-03,4.512705319649134954e-03,-2.056526062119959277e-03,7.653883454607680707e-03,6.637217390785705821e-03,-1.128022385570426792e-05,-5.333553696056010131e-03,-7.061841721777106957e-03,-4.397419601343995933e-03,6.306425625231044940e-03,-4.329906966257001722e-04,-6.565342273058671213e-03,-3.258937893357627185e-05,-5.049052514773201550e-03,-3.748318721610879173e-03,-5.213227557387184984e-03,-4.973005429065715209e-03,-3.646731659848588374e-03,-7.076419102905945509e-03,-1.101437432586481859e-03,2.017181336824650471e-03,2.394591528881830425e-03,-1.107967440105867181e-02,-4.898801301114358644e-03,2.257418322057287707e-03,-3.497775330621919442e-03,1.965588668905438301e-03,1.234985551821858899e-03,5.240403116531713347e-03,-5.283047484483297364e-04,1.528329253688115149e-03,-2.881430779617573119e-03,-1.600786014261314227e-03,8.995630837131299946e-04,3.730146053685210079e-03,5.888069718990418533e-03,-3.801279669403728918e-03,-6.192309263380303685e-03,-1.896512095469650135e-03,-3.718682617103701970e-03,-2.025855159503666802e-03,-4.185979920941070194e-03,-2.402171568528173578e-03,2.395661561221281571e-03,-9.690393301863913476e-04,-6.626854981649511409e-03,1.260232926025161519e-03,-7.527502174674270425e-04,-3.913621315562343557e-04,6.564770089926892475e-03,-4.310909515032607629e-03,-1.183206407630603332e-03,4.499384069183313961e-03,-1.033182243167250335e-03,-1.774858802448344732e-04,-1.599669836776115807e-04,-1.078081403538785378e-02,1.513711348942085129e-04,3.332372162758812616e-03,1.048965593145756913e-03,-8.803100147213947096e-03,-2.098281398787369030e-03,4.250611836366160849e-03,-3.734943931726724103e-03,2.134263174104229690e-03,4.511929671772295369e-03,-7.005245596575637853e-04,1.770006684169482102e-03,-3.270070497618829310e-03,3.659490318864961397e-03,-7.895155473826263870e-04,-1.501386341521731768e-02,-7.027685639299820208e-03,6.426922246288899708e-04,1.039435315809962568e-02,-3.256050084450888907e-03,-3.391013130081691525e-04,-3.217874550166945763e-03,2.779521799189457309e-03,-3.286596117830282125e-03,6.745854503400874717e-03,-2.115497974073195675e-04,2.596227936560420793e-03,-1.140346175782338102e-03,3.144687665492146376e-03,1.653502662835088736e-03,-8.441072877753330508e-04,-3.623416294142703647e-03,6.852721133098585958e-03,3.878716123021490065e-03,-1.472469088444951571e-03,4.509161727882187061e-03,-3.119280949208629982e-03,3.531149205256840199e-04,5.355525445599603092e-03,4.412411974812405040e-03,9.133055518165993786e-03,-2.603453237767842238e-03,8.296223666990246159e-03,-6.040248767803279954e-03,7.186401598263223930e-03,5.457726939541318210e-04,-8.932251911777852865e-04,1.400011378749872011e-03,-5.840851736307490008e-03,1.082328874788145677e-02,8.620412667329468634e-03,-2.170268255417328717e-03,2.802281334558897295e-03,-1.542050278008344532e-03,-1.240216930598768245e-02,5.813219955794719958e-03,-5.634308625338918866e-03,1.514262474633138345e-04,-4.999769928736643408e-03,-3.932472046172503545e-03,-5.514522906526502302e-03,-1.930589577773406400e-04,4.179910089186483345e-05,-9.508248499993500122e-03,3.589002308252177680e-03,-2.400088405363701681e-03,9.379885766586344015e-03,9.525163230107343022e-04,4.394476701624597421e-03,1.171300797122012396e-03,7.959942918655382033e-03,-9.254779599785680627e-03,3.530332666231051405e-03,-1.962287592009320097e-03,-4.666985771672562452e-04,-2.345446626350466014e-03,-1.079101793525521938e-04,2.651109927604756045e-03,-8.130382720012666042e-03,4.013037114452539141e-03,-1.201444547161087135e-03,2.765456576773901113e-03,-1.500257258601822531e-03,-5.570896358949796630e-03,-8.061320056504667339e-04,3.975995296084818519e-03,-5.311764596531324406e-03,8.338821912223776502e-04,1.433560036266240617e-03,-3.531538723665170187e-03,-4.895200038628312079e-03,4.129412178104033097e-03,-3.866094349800014082e-03,2.514959996370967274e-03,6.394147582012428251e-04,-2.965451299107574996e-04,1.151446811190288532e-03,-8.726497056626338761e-03,8.948870715422625050e-03,6.059051022556457763e-03,2.742825112699632997e-03,2.943667888094947047e-03,-6.685586523788529566e-04,-2.629234581456949145e-04,-9.713135344483946054e-03,-6.564939967352408567e-04,5.838021590274363121e-04,3.247672626849698753e-05,-6.114857294534076400e-03,5.415226781204299580e-03,-4.825432289297588648e-03,-4.679265618843195598e-03,7.128700569870876150e-03,1.806674237995237750e-03,-7.943731226328779385e-03,3.466909517084686924e-03,-1.803127853361724021e-03,-2.176377121766097757e-04,1.297485381971558464e-03,6.758533085413504990e-03,-1.539463545394492835e-03,6.698158777360760244e-04,-1.153833915738972614e-02,1.106111540997981773e-03,-3.357587419047714048e-03,1.098523268945755110e-02,2.545284467495658592e-03,9.627405499844650383e-03,-6.589680410666882114e-03,3.324900938184322755e-03,-1.652074803035975294e-03,-1.603994829475803661e-03,1.187879233908820239e-03,8.862196257067096045e-03,-3.423185409723242777e-03,8.099557884985753367e-03,2.277690797070846141e-03,-2.563172728323383654e-03,9.329967014986713034e-03,8.406517448350329588e-03,-1.095661437585968272e-03,5.104314026945970181e-03,4.143917830721785198e-03,-2.707246369975962151e-03,-9.965087465014886700e-05,-4.452525942287206308e-03,-1.253761274848655058e-03,-1.491774773158327473e-03,2.075226658682588401e-03,-4.635875085243946689e-03,-3.885164098474975657e-04,-9.799039205227418666e-04,-3.672692953157695103e-03,-2.847353287967226837e-03,8.593355333436174942e-03,-3.445067420649806228e-03,-1.208305691595484236e-03,8.825430160977566557e-04,1.741649433881841758e-03,-1.074446373680523975e-03,-6.702821566504772212e-03,6.393478712090434543e-03,4.069780197170940438e-03,4.804691453235964207e-03,-8.703728020974832100e-03,-2.159874351305441764e-03,1.604579550090380006e-03,-2.076640003297499249e-03,1.616585793717385897e-03,1.719349680949402992e-03,-1.838838968091890480e-03,-8.650825847679250368e-04,3.035399482155822903e-03,5.809680335564136600e-03,-3.653721753375505175e-03,-5.343663771789810678e-04,-1.408731081126984560e-03,-1.551956800221247290e-03,4.053314321240474027e-03,-2.133387031557531371e-03,4.014070276737278717e-03,2.786929439683436417e-03,4.631470061458468034e-03,9.493635704867323414e-04,-2.737780886500545269e-03,-3.068738688410386801e-05,6.209122722168617520e-03,5.285656419853845822e-03,-1.291577610681716158e-04,4.368979114755576834e-03,3.670796794767257256e-03,-5.682485549092534602e-03,-3.101241308775871865e-03,-2.656278082079827508e-03,7.101695973341937314e-03,2.720297334363285750e-03,-1.042142208963365083e-03,-5.308056466367358159e-03,-6.222113345040988439e-03,-8.898232722600037808e-03,-1.794505645703289073e-03,-3.624882941212217383e-04,-1.034158510911888833e-03,-6.262371605450306293e-03,4.056467128295450884e-03,-8.393120551398104701e-03,2.600767430329943477e-03,1.786778408838508988e-03,-4.691435347583789751e-03,4.437838485450707593e-05,9.774362069461684649e-03,-6.125903114590459768e-03,6.775709346210466097e-03,3.503260455896483011e-04,-3.001863357096735008e-03,-6.312823941690194911e-05,1.843122959676705497e-03,1.694438945965120189e-03,4.553601941585507591e-04,-4.971210727340218648e-03,1.883766610785536991e-03,6.319424844199700297e-03,8.542521564930681197e-03,-4.851042314904281325e-03,-2.580463452232777989e-03,-3.572338581144758607e-03,-1.710454058928010876e-02,5.336418820758892815e-04,3.752592299128363913e-03,-4.880230962837949285e-03,5.294210761395710084e-03,2.995801337393809698e-03,-4.132494416154104677e-03,4.934271279916175798e-03,-2.433312580927834024e-03,-8.979598998725071488e-03,-7.897185127924475842e-03,1.106019587565586866e-03,-1.486955877009329399e-03,-3.942335722337222959e-03,9.290090481024087295e-04,6.029309938905077874e-03,1.395126851547596981e-03,-4.533830376546346924e-04,3.014316853537208317e-03,5.573542149172850194e-03,1.316712371074599481e-03,6.872294385176280773e-03 -4.030407130076279348e-03,-5.425812858189012385e-03,1.522205740753066741e-03,-1.717451560639069721e-03,-1.940398366294279493e-03,-9.068035523986378924e-03,1.470867030570182721e-03,-1.109035602815818845e-02,3.552638298079847587e-03,3.765556703900579949e-03,5.152576354853173612e-03,-9.185935109060076181e-03,-4.831187071220564844e-04,-7.852395716673503390e-03,-2.248310446054933767e-03,2.215916962656099053e-04,-8.267588335524420073e-03,2.430338920214319010e-03,8.474317220391714518e-04,-1.268514540524811642e-03,6.837698672981848760e-03,-9.584445431641844260e-03,-4.189346957371352376e-03,9.055956881159765368e-03,7.197069056915876041e-03,7.455648605704404651e-03,-9.322302274630732410e-03,6.467497038800493203e-03,7.530683337717411149e-03,-2.775935330565454991e-03,4.262464121536819334e-03,1.410403633197723139e-06,-1.169146493537949645e-02,-7.285752219571505449e-03,6.224207456213964006e-03,-4.207213052657670337e-03,2.373821051558223320e-03,7.225235522937947710e-04,-1.403154307653337908e-03,4.138635492683481352e-04,8.407278139136345474e-03,2.176071607659554093e-03,-1.877443658188876572e-03,-1.232491258089305408e-03,3.933665959091016059e-03,-2.139141957354719103e-04,-3.709736672786678735e-03,-6.578911607254549383e-04,-4.514436925018654699e-03,1.043660155299007885e-03,-3.778428427688851473e-04,-6.507115127307850841e-03,-9.633458749833192787e-05,-3.599478417749196919e-03,4.388125751816357965e-03,4.342229326973887603e-03,4.617364716447294390e-03,-6.192106233499324215e-03,1.641331730002754368e-04,-3.626431184380391436e-03,-5.246613161691734452e-03,3.239285052489630432e-03,8.933198623083966200e-03,3.887783004060451318e-03,-1.149536005911777192e-03,-3.924259462330029549e-03,-3.949503903499995347e-03,-1.579891183468485130e-02,-1.477780304498592509e-03,7.692458253169451006e-04,-3.050036856434978818e-03,7.400729850171316147e-03,1.646008757133135547e-02,-2.987940979896748506e-03,3.229824443470832168e-03,1.405766583484668701e-03,-1.971860294200185239e-03,3.868260909534667735e-03,3.423864991882596878e-03,1.643444106303607608e-03,-2.985024430515896988e-03,-5.585455078401575617e-03,6.713818443747652858e-04,3.225242720440241878e-03,1.463076850145182066e-04,-4.861782696292721653e-03,8.334175830195219084e-03,-3.738189925895982983e-03,-5.067634561761108665e-03,3.331192699774920916e-03,6.769737720889183914e-03,6.997643579938752842e-04,2.632736193061806663e-03,7.430731980672185266e-03,1.095404125564272647e-03,6.302455265033454594e-03,6.025407511504461168e-03,-3.886989223650890325e-03,1.125969905619545776e-02,-4.440938369966454351e-03,-6.793539024688931803e-03,-3.487282275965580217e-03,-8.641872716955538436e-03,2.025301021851915861e-03,8.403771138875752679e-04,-2.151761526076423987e-04,-5.321749394542520800e-04,1.221336610730191846e-03,2.887642232112516867e-03,-2.470306077151743565e-03,1.459197026674658110e-03,5.153707628972060625e-03,7.296431360148668624e-03,-3.782681620148800345e-03,-2.893829173549607083e-03,-1.452768754408879882e-03,5.555265482484978388e-03,-5.519687514515957301e-03,9.154185864729500258e-03,2.122612783525826101e-03,-2.950786532228119163e-03,-5.288302419295847108e-03,-8.554198102998031164e-04,3.764845686310798294e-03,-3.546165237965252532e-03,-4.073009705715230876e-03,4.973322683824050842e-03,-2.929431093862485845e-04,-7.950246267553694700e-04,-3.571835397562741175e-03,-1.170508343490961594e-03,-8.638914958747251002e-03,5.046025268040090980e-03,-1.142534709606937400e-03,-4.314385210659929179e-03,4.900711802046302072e-03,-3.718223874825131708e-03,1.112405825952915948e-02,1.258102038381014171e-02,6.140604093081484388e-03,7.690900351856190531e-03,-6.158948301528857465e-03,1.215625872139073407e-04,-5.887509009774828488e-03,7.174993537499001206e-03,1.742462848371886374e-03,-5.823417905212530587e-04,-7.319674202400575438e-03,-1.150738877324157658e-03,5.072838191467607047e-03,1.315295416045753583e-02,3.360533524914879541e-03,2.221733219997253095e-03,5.413518399778242604e-03,8.468826649440127155e-03,-1.747753645896861279e-03,-7.179560815570386161e-03,2.012307642406376430e-03,5.952067776882704990e-03,6.350466460113691862e-03,-3.847801161525560808e-03,6.360756738552544171e-03,3.373268920827875660e-04,4.642227358833906671e-03,-5.808822446842700372e-04,-7.421638345031611288e-03,-5.069522413677310206e-03,-2.046122322759993256e-03,-6.403111024511966851e-04,6.024995823372737182e-03,1.581661979857700235e-03,4.816552698442579800e-03,4.444908385893890646e-03,-5.928377894468804879e-03,4.598436993007339594e-05,-4.786739889625209361e-03,-1.829008279089828744e-03,2.271849385878651833e-04,-5.359095961897187904e-04,1.026481872791977024e-03,2.597946605644926420e-03,-2.213543532323257630e-03,2.824090581475666682e-03,-1.066569455259174654e-02,-6.487433666880193343e-03,4.345047232235959081e-03,-2.019962749388533579e-03,7.670363165259445806e-04,1.117842345071390700e-03,8.499039340728665234e-04,4.093068151802434392e-03,3.295761473309684764e-03,1.961146648181440796e-03,-5.259820924367882705e-03,-2.487275472389827938e-03,6.681640084024945830e-03,-5.864727379068513692e-03,2.918948920664941188e-03,2.747479716778627058e-03,4.078629571993474569e-03,-5.803134383572296044e-03,-2.607005753280737179e-03,-2.834736133973587025e-03,-9.020222140565383601e-03,3.828131705380129982e-03,3.579389167074786718e-04,-1.154217393615471210e-02,-8.889163796013196603e-03,3.478156842857247797e-04,-2.148758099819715172e-03,2.098175436328348752e-03,-1.559345200102770900e-03,-7.324501961313532872e-04,1.133052527642629606e-02,1.884106092619415368e-03,3.772876856253745085e-03,1.285463952799429776e-03,3.995269760446595528e-03,-1.326956038575296615e-03,-9.008214045011711654e-04,-3.153875585303162702e-03,-1.802649654846051434e-03,3.400135137749309231e-04,4.338331174786978252e-03,3.821811652446180461e-03,-3.523716581170634708e-03,1.985789080034379688e-03,3.156076985046256216e-03,1.529172231640542551e-03,5.156125311334382004e-03,-3.036341390083630454e-03,2.542184038074421937e-03,2.301598169194170578e-03,4.781828121525861543e-03,-2.942656678152416692e-03,-2.065898565083543366e-03,-7.594995094265023956e-03,-5.923951486690663681e-03,-5.149708121637882295e-03,1.975745576676700421e-03,-1.028571369842214492e-02,-7.749665907976002743e-03,2.133488636744027955e-03,1.215687040986938037e-03,-3.868643581959321179e-04,6.758123906556501304e-03,3.343003106550832167e-03,-1.983573335735539823e-03,-1.133634791662054605e-03,5.797812222467811812e-03,-8.053973371231515658e-05,-4.288415789420628108e-04,3.880905225499796819e-03,8.791958871420253285e-05,-1.188257813786982794e-02,2.294719464371604289e-03,6.719651635813203181e-04,-3.068908239281574555e-04,1.345814346372160636e-03,3.060364657898621745e-03,2.252161802829900881e-05,-7.776273954573734723e-04,-3.568832883388778007e-03,3.545911492589500121e-03,1.571792233325765417e-03,-8.108241822549842881e-03,-6.255570707551635565e-03,-1.828000490071781287e-05,3.283625988051350458e-03,9.301140090854679091e-03,4.352050898728463714e-03,-7.338543124523819530e-03,4.019878915543122384e-03,-1.306364540041894673e-02,3.993333465182965884e-03,3.079420598470566321e-03,-1.465542191319108359e-03,1.488535520136263519e-03,-6.072468747318912619e-03,-2.148865988455902435e-03,-8.107077739623941759e-05,2.253441225167916555e-03,-1.566578766310079088e-03,-1.023436960882263130e-02,2.052903661552903190e-03,8.752643601770832005e-03,-1.323408764209813796e-03,4.443901596305878041e-03,1.905801529579995244e-03,1.717063980398790914e-04,9.448265570918270087e-03,3.237132179919576837e-03,-7.825527791698531910e-03,-1.873649694855493257e-03,-1.619816487215881839e-03,3.856276436143791717e-03,-1.421632898311557232e-03,3.632686999086452012e-03,5.697435657050053080e-03,-7.043459029292835263e-04,1.468288675314712672e-03,-4.919581738842705027e-05,1.035047848280581766e-02,-1.146043252060402126e-02,5.686633532763564133e-03,-3.151693297295320422e-03,-4.018725960966433065e-03,-2.717038064110509615e-03,3.270418445427395685e-03,2.539804782474771282e-03,-4.387309367893404803e-03,-4.909090224613935592e-03,-8.530667802060291904e-03,2.249889093302272734e-03,6.998292675401547765e-03,-3.792470536388418316e-04,7.018427857049677580e-03,5.836971935596226187e-03,5.048931928726565696e-03,-3.865502351883901618e-03,-4.693805753639716284e-03,6.235740645194741416e-03,-2.559293949591066072e-04,3.027994475464451879e-04,-6.711500071921426731e-03,3.455909498457029189e-03,-8.682557510629086359e-03,-3.643792723018532929e-03,-3.916365368535329634e-03,1.005865029121969130e-02,1.567332337119200234e-03,-7.392309407754383763e-03,6.167932377125770818e-03,-2.843860532286482390e-03,4.517470890563073799e-03,2.938348934755362655e-04,1.103258769804037545e-03,-5.337174367266840289e-03,2.942387806593046128e-03,9.329370357455691806e-03,2.671843403311475931e-03,1.310400788329231731e-03,-4.513886940819071027e-03,-5.022456014723230243e-03,2.327032297418298468e-04,3.027588972863738681e-03,3.882769823362788430e-04,-2.716543529154663746e-03,2.976816169250637995e-03,7.547251540659237912e-04,1.509313999708619897e-03,9.610071357641903483e-03,-3.291963139150236219e-03,2.798395046262388414e-03,1.040085186024844238e-03,1.731411952751598510e-03,4.426411577545601942e-04,8.343743831934925933e-04,1.362753383502822956e-02,8.392746595082892225e-04,2.544752668927138892e-03,8.844624057950913748e-03,-1.117008733086350715e-02,1.378362904640061375e-03,-3.038514439953199277e-03,-4.567701585815577003e-03,1.338988871290927856e-04,-9.631521213710020693e-03,-2.089868362788210431e-03,3.204165703943247148e-03,-6.847610627705243320e-03,-1.735211799694804909e-03,2.650569118236031831e-03,5.633300853304181594e-03,5.755641621998939042e-04,2.809506732991044406e-03,-6.319872859535847695e-03,-7.922273286699722632e-04,4.400568117213566645e-03,3.099837540401088280e-03,-3.696790625423836130e-03,4.231217861521335832e-03,5.596051761340858736e-03,-3.850375235111359699e-03,-6.023707986164369456e-03,2.219032601611489047e-03,2.436436748841988142e-03,-2.781462595029496332e-03,5.560372437522905116e-03,5.684471663439056265e-03,8.662188083951428627e-03,-5.741488713392232192e-05,3.758805905965720458e-03,1.472430380462913611e-03,-1.297482053268992274e-03,1.768789693163989960e-03,2.488221673349869881e-03,9.380111785754470174e-03,2.631060478232497609e-03,7.701549900501344741e-03 -4.392144207259222541e-04,2.263835661732196015e-03,7.258052729773339928e-03,-1.801798649176931683e-03,3.381650429615626428e-03,-2.510986244987935094e-03,9.160848869645424547e-04,7.091947846874327964e-03,4.798080599409652885e-05,8.714227472253286561e-04,1.314074715303442708e-03,-5.248428196439644360e-03,-1.313134478936776612e-03,-5.414735900558007888e-03,1.141628463782540801e-03,4.247689833582284601e-03,2.705372476325129671e-03,1.108700097542759250e-03,3.311455782575790561e-04,-1.440003346121289645e-03,7.445132378813855423e-03,-8.936815935945908312e-03,1.944775923616476852e-03,3.127308963141940852e-03,1.687770331170596700e-03,-3.440482087854991426e-03,1.906336135847638705e-03,-8.763178437730256495e-03,-7.067363973776762467e-03,-2.959940354787142253e-03,-1.713672050422401782e-03,5.885611035520341670e-03,-3.201694540143820017e-03,6.602800028432205190e-04,-5.864816313402453421e-03,3.642955099607615363e-03,4.676097224868417009e-03,-1.135565220755698802e-02,-9.087495370341090381e-03,-6.808510401415434135e-04,-1.753122143721811549e-02,-5.200195438283673581e-03,6.229525287474147702e-05,-5.666487889935126893e-03,-3.395085551515747944e-03,-2.981850888795109424e-03,4.235826274691528899e-03,9.114549448190848585e-04,1.997962545259783767e-03,3.282881186024990089e-04,4.139546683597763489e-03,1.596557741584458739e-03,4.019745895372240675e-03,-5.074306232056173538e-03,-3.994019603823511393e-04,-1.097064921296494674e-02,-2.448652559524268010e-03,9.581562617610979785e-03,3.177429306159077968e-03,7.139955057686248052e-03,-5.159342716560432439e-03,1.329314163681497019e-03,-4.483731461333490854e-03,-3.687678611190008116e-03,1.101803376572061365e-02,3.353164874435574788e-03,-2.749874869463978487e-03,2.549753270723857917e-03,1.390438812490628296e-03,-3.335857404837440279e-03,7.017897813194826084e-03,8.052197583098205658e-03,1.430591251697324161e-03,-9.163597097887649806e-05,-2.542691527298346615e-03,3.606051129456581614e-03,1.392263434667684145e-03,-8.603578136544417013e-03,7.796871662113766949e-03,3.373459462611160843e-03,7.054083512686951107e-03,3.585305211050254098e-03,7.413704026746294109e-04,-2.010821136067538623e-03,6.629415582984141478e-03,5.542899964664660005e-04,-2.560286417932561981e-03,-6.654136265078250748e-03,-6.291992499190610447e-03,2.891329920843462454e-03,-9.623738733839197479e-04,3.359947119531264941e-03,4.502282963999080026e-03,1.150492574244819802e-03,3.292932211680471479e-03,6.594212595370239557e-03,-3.832594875312446479e-04,-6.416702136378975643e-03,4.958357526343062927e-03,1.769954204733974147e-04,7.213413626379073726e-03,-6.127359007169835663e-03,2.191131696593626339e-03,3.274632870058724696e-03,6.703354437309769036e-03,6.768160236085706168e-03,-1.661061355518424730e-03,1.248826559144815057e-03,-1.638463679662895351e-03,1.092110688715076765e-02,-3.271181136478256389e-03,-5.487938781776348016e-03,1.215741045726369089e-03,1.460312406780949444e-02,4.693783763720094961e-03,-1.655024698601544183e-03,-4.684782715568137045e-03,-6.388292070705959592e-03,-2.913158018105827494e-03,1.757411584141437650e-04,2.066373548243968274e-03,-1.263474478569363065e-03,-1.486078288922623508e-03,-1.010572086857825652e-02,6.439731824300002141e-03,5.145155121321424475e-03,4.521713577066524894e-03,1.183264966429216662e-03,-4.649479131107793657e-03,7.979617491136917857e-04,5.972421264920922982e-04,4.480117200628373764e-03,-6.752492444818629543e-03,-5.589480053470917767e-04,-1.026435615928048602e-03,9.912583086144266184e-03,8.250690291368977804e-06,-4.033248938858003188e-03,-1.660501493409482533e-03,-4.279150742854345593e-05,-2.938777977324266168e-04,-1.187428489212411170e-03,-7.976717817265258428e-03,3.493562681816731082e-03,-1.646244607964052198e-03,3.493075197786352400e-03,6.762147237336821293e-05,-6.586385610545816100e-04,-2.627347950115381699e-03,3.282489886868762436e-03,3.628494663261257018e-03,-1.479085791127005044e-03,-8.047230860838699637e-03,-2.574565114498356556e-03,3.911227431693896535e-03,-1.586085905984517541e-03,-2.818958362249829840e-03,5.821699588503775717e-03,-2.415565417846667583e-03,2.207239510172214099e-03,2.937186720414851522e-03,2.618534362145249171e-03,9.532378783353389305e-04,1.337210237264047736e-03,-5.832672647694985002e-03,-3.568580293838071946e-03,3.265967314636871940e-03,9.149961318502076784e-04,8.731801475793421127e-03,-3.422802319016812929e-03,2.234279851835305891e-03,4.628655206915611238e-04,-4.492320844328460613e-03,1.044865156581117503e-03,5.995828341091795355e-03,-3.574791784297631954e-03,5.932514549895052079e-04,6.953521022693325080e-06,2.781187302991122278e-03,6.771831604547855644e-04,1.770805285758239849e-03,3.858912707400472117e-04,1.462184040888120417e-03,-6.609778875069407741e-03,2.427682030703431568e-03,-5.947412345699418755e-03,-1.513915907650349854e-03,-1.014055662123605588e-02,1.124605055545484103e-04,6.565347348131493274e-03,-8.520520436841153055e-04,5.178798051240169308e-03,5.939470819885016295e-03,-1.859011426835586664e-03,-1.152913771690127365e-03,3.556357891863966801e-03,-8.417798132210730852e-03,7.421198442471969164e-03,4.968862156383303981e-03,-3.750310044129547644e-04,-3.362215980230004366e-03,8.108629401258789227e-05,-1.827364453443840794e-03,-5.817618931169283268e-03,-8.725414674023788952e-03,-6.742883759377350631e-05,5.223010810456069596e-03,5.614815031529754065e-03,-5.655922602251106266e-05,-5.282984136094662567e-03,-1.141789569219965259e-03,9.786578963276245866e-03,7.906867456935687136e-04,-2.731318434046589843e-03,1.467270358215386509e-03,-6.331814252722730392e-04,-1.256737834694441128e-03,1.892370434130906777e-03,-4.528837346514808133e-03,-9.290816384542200518e-03,1.496491401608340463e-04,-1.475249534972204214e-02,-1.961947543947041399e-03,-7.936994070972171883e-03,5.208256860457266720e-03,-3.723028857733979517e-03,3.343880774667789147e-03,-8.918165398265785401e-03,6.243774817922181919e-03,1.951665433315388993e-03,-1.033398652461181046e-03,1.420817859869570295e-03,2.067536998889229170e-03,1.289769779227443710e-03,1.142704130650512573e-03,-2.880374618015194791e-03,9.287182377099409587e-04,-4.570792936486688281e-03,-7.657000109777569591e-03,-5.923369625208722844e-03,2.680121143326372624e-03,-2.847238991052571805e-03,-3.831529130888796080e-03,-2.124581758499388684e-03,1.712980523074577807e-03,6.300767517107249468e-05,-3.240395312449509055e-03,-3.108750297731352561e-03,1.521454156977874051e-04,-4.213501796357697372e-03,-3.524448861916378017e-03,-4.290296489314881015e-03,-7.252461887120162029e-03,-3.108344694156153862e-03,1.540010085551218096e-03,1.693558652840373513e-03,-3.714640187711271829e-04,-6.591161037830570169e-03,-1.018210863541118248e-02,-6.222286627813472699e-03,-7.059709338137465894e-03,2.205611511515990705e-03,-2.775057319589705872e-03,3.060406510931383463e-03,-6.464165095641761019e-03,1.619200834001496012e-03,1.975835967671916071e-04,-3.220415641517908149e-03,8.373183313104338651e-03,-2.351512661960289581e-03,-4.593906930959583759e-03,2.197682348930058739e-03,5.967654528406643281e-03,1.048253868045288285e-02,-3.250106978922931975e-04,3.103487662796541868e-03,9.181430216311445291e-03,-3.592623436022776996e-03,-1.392483136077484265e-03,1.017948955934499036e-02,1.099554967202970301e-02,4.188087858727418226e-03,1.239821312324101586e-03,6.541901902028667178e-03,1.143737031168412608e-03,-1.180483567344829936e-03,-1.762708540110091834e-03,-4.435463837622764284e-03,-5.040643422161939988e-03,3.375985945433663832e-03,-1.289898818379989036e-03,2.280858650113832316e-03,5.832213689990190138e-03,3.685622765520687148e-03,-1.698179603697538322e-03,2.350037381428430206e-03,7.606910879041508568e-03,-3.285996121576392846e-03,-1.672130223832380119e-03,-8.217309854707199011e-04,1.903876882051133831e-03,-3.261195544423365050e-03,2.957658994151954931e-03,-1.015349245263753560e-03,-3.297248375643009339e-03,-3.973561618637544640e-04,1.890042369360612606e-03,-8.381556472906101507e-04,5.749749895030971392e-03,5.717858576565536775e-03,-3.025815420172579730e-03,-8.198954161044580066e-03,-4.651973366868266888e-03,-1.722257639779948028e-03,-1.016759348778255127e-02,8.653961214580742009e-03,-5.842667492699133401e-03,-7.103459163807148737e-03,-5.947261436722139916e-03,-7.999057925199894517e-03,-3.483138031547776533e-04,1.131495898014661921e-03,-3.504790736890203186e-03,-5.555543042259441198e-03,8.508795299380186861e-03,-1.806395202663712337e-03,-4.406734530416885585e-03,9.999310956348682073e-04,-6.807345893729329495e-03,1.356908412970839585e-03,1.146859035819499696e-02,5.392534381160084440e-03,-6.968002711352313692e-04,3.869094912614532995e-03,-6.298190960083202561e-03,3.645629680596950816e-03,-6.622828134537525960e-03,8.592651898234751964e-03,2.179703383900681926e-03,1.088667464068046120e-03,4.115110882579472128e-03,-3.952257236853042206e-03,-2.932682278457324297e-03,-4.250158787402539412e-03,-4.931495355063965358e-03,-2.999925372751677255e-03,3.713756094742616445e-03,3.017302703068374271e-03,3.533082112582149222e-03,-3.036478297135305517e-03,-2.224681046330629058e-03,2.887002816386483647e-03,2.087836648038195186e-03,-8.125830092965802276e-03,9.303612642272967182e-04,-1.732383667515843047e-03,9.543571021761140458e-03,2.042968410158314905e-03,1.593685830263002296e-03,-1.302869234840287223e-02,4.136603159897629840e-03,4.567363471934785695e-04,2.135769512824564533e-03,1.271874758648336521e-03,6.461664968423359509e-03,-4.249577859581397422e-03,4.845669037736898173e-03,-4.739774935960751920e-04,-6.783924288415414151e-03,-2.051180082876667330e-04,-2.054911023622523619e-03,6.918656046139863222e-03,-4.426989537736992185e-03,-1.784791205874685987e-03,-4.529217389620126297e-04,6.791144261787300865e-04,8.759979625426603939e-03,-2.940781125029762102e-03,4.784609925520937231e-03,7.575560431959735622e-03,4.425452033092932549e-04,6.686238105472114279e-03,-1.575870682609066108e-03,-2.353923564110753288e-03,2.441309830104929253e-03,2.406091241874401109e-03,-2.622101312746013494e-03,-2.805751629950713980e-03,7.292455501474093792e-03,4.110816092342364433e-03,1.067793930140683092e-02,-4.417363837247284195e-03,-6.864478475901876313e-03,6.848739645732902434e-03,1.661449229618999985e-03,-1.806550212552444478e-03,-5.862410997834180924e-03,-5.381430080033912855e-04,-1.424964612428934524e-03,-8.841191025162657366e-05 2.277560649209337418e-03,-3.353520429836376153e-04,6.339663220906363236e-03,-3.193938701284809202e-03,-5.610588936895546074e-03,8.468757725727060412e-03,6.151637274689990970e-03,-8.233230237905881429e-04,1.397912585690967366e-03,4.481171540639936404e-03,-2.074296396453428025e-03,3.796794270424172675e-03,-5.444299153503882882e-03,4.801905114959123359e-03,4.071414828836190415e-03,1.266497936331547128e-03,-4.949598231858722691e-03,-6.641909424532782286e-04,2.978454013670945189e-03,2.697409741239930551e-03,7.868535204080246911e-03,1.921077434041620121e-03,-2.624300827380811850e-03,-4.810199192902322833e-03,6.639807205312994428e-04,-3.402219008215193446e-03,-6.710546397496750318e-04,5.836933466210943861e-03,-2.623535201132023553e-03,-7.528709263632807824e-04,-6.648073365526936847e-03,1.523897195359755468e-03,-1.727578893711488725e-03,7.189839567095917053e-03,-2.994012580827865727e-03,1.122755196837945486e-03,-2.482715676866212849e-04,2.989150243671087094e-03,2.764211465014059302e-03,4.031531750762719010e-03,5.213742629822675292e-03,1.393416610838273595e-03,6.000578402870495980e-03,-1.033338379230230517e-03,-2.230706585803689680e-04,-7.496102250774356940e-05,-2.566970095384337163e-03,6.583386073964200098e-04,8.526112540479450011e-03,2.812784067357102999e-03,-9.515223738015257584e-04,-7.619133005937286408e-03,-2.746357900527623443e-03,-3.279465002437559774e-03,4.810009230508788779e-03,1.705588211986978605e-03,-1.194724496974842445e-03,-1.074256943949893254e-06,-4.934771236873731690e-03,2.517362884979913989e-03,-4.310905480540512189e-03,6.221978827013497151e-03,2.366812972029075975e-03,-4.633908314580322679e-03,4.763440064333965100e-03,3.098562356051422738e-03,3.467015992495137665e-03,-5.106980105286178760e-03,-1.260380829188455492e-03,-7.290496438543191821e-03,-4.064155963520397000e-03,2.874936014309234164e-03,4.585142408344046461e-03,-3.525297188444317595e-03,5.751952655569583274e-03,3.519792156571777565e-03,-2.084833768999959543e-03,2.542588054645826090e-04,7.352102478945647147e-03,2.245536547623370929e-03,-3.045011863641130211e-03,1.334451724167899211e-03,2.331253708662208921e-03,2.081623607902466106e-03,-5.390063836410053690e-03,9.537060561266356941e-04,8.937469379217634693e-03,2.463737221960758715e-04,5.563936787826657872e-04,-1.249538126410153465e-04,-5.454993150636676738e-03,-2.201486998125219956e-03,-2.503674426624075914e-03,6.308021942229279436e-03,5.646951298999922791e-03,-5.707327488043304549e-05,5.357430870662625161e-03,-7.281238144359321145e-03,-3.567783408269139134e-03,5.929929403673603674e-03,-8.076877011789398986e-03,-7.353179770157395809e-03,-2.071496436325865557e-03,-5.124248194827306282e-04,-1.598233624521945562e-03,1.164181864113680506e-02,-4.522436485638431716e-03,1.412610335215648116e-03,4.028302702440206794e-03,4.403514043826222031e-03,7.117718703869311886e-04,-5.066590174636006333e-03,6.712050289300180170e-04,1.169005177276995471e-03,5.385643180829893448e-03,-1.838259859557361776e-03,-8.735880335177382931e-04,-6.049283988908061259e-03,-5.131466761558099027e-03,-2.810483215454121463e-03,4.180300389762790260e-04,-2.542187567069759645e-03,4.053229083204654311e-03,4.812246880823473245e-03,-2.427679627871956979e-03,3.683292057473078933e-04,2.937562950001742102e-03,2.910507526425886067e-03,4.532227604107092259e-03,-2.871220921927618948e-04,-7.836094643703057916e-03,8.629973509525966325e-04,2.781827141734678961e-04,-1.956675786171340869e-03,4.483336540246974852e-03,5.943453369306457394e-03,-5.435730280218510450e-03,1.082197794679421420e-02,1.372803746125193227e-03,-4.189945353787859726e-03,-3.729747146505183256e-04,5.588318228677047021e-03,-1.278139431678015350e-03,-4.274131513826643150e-03,8.212467705429710510e-03,2.207125085260670371e-04,-2.978733416165279223e-03,8.081898166576086812e-03,-1.973162829686200478e-03,8.668363156086827601e-03,-3.305237876399479938e-04,7.172513789742895798e-04,-5.867320822247903966e-03,-8.336850897439079464e-03,-1.345966860933407987e-03,6.446241385944647892e-03,7.605006566219102947e-03,-3.400367480317195365e-03,-3.405187721120895368e-04,-6.083991680134135174e-03,1.112689453501826192e-03,6.961882194148341846e-05,2.353667878767325659e-03,-7.223766866506648940e-03,-1.147455048607627127e-02,4.852985416325013832e-04,-3.275315877859324293e-03,-3.333611748517952261e-03,6.051233772555475801e-03,1.730739584226048417e-03,-4.280049377117145851e-03,3.741595979439786990e-03,-1.203748345106462215e-03,-5.749724761694984324e-03,-2.350296082095043419e-03,-2.481981436854565400e-03,-3.345866425661941316e-03,-1.851756776815569744e-03,-6.374404239266544259e-03,-4.461099317542784222e-03,7.714396834867336388e-04,-1.572178264383722139e-04,-1.109513299087424611e-02,1.208329044516624588e-02,5.674743494038651456e-03,1.170427124239744937e-03,1.419845873444159322e-03,2.803622538235194667e-03,7.270874982873781582e-03,9.621575394094939023e-04,3.477546272084849964e-04,4.261841971813840058e-03,8.161746624160139477e-03,2.929473430992695672e-03,-2.700344912537738437e-03,-4.934453178337403975e-03,-4.137666033611890094e-03,2.670699888125853145e-03,-5.617644027017155853e-04,-1.297754437949973300e-03,-1.575589605093162060e-03,-1.934007914507870646e-03,-2.592533565738442802e-03,-2.505489865011355146e-03,1.271952828720943453e-03,2.819157663394058957e-03,9.665834222864030554e-05,3.557777802959016691e-04,-1.864638558575736071e-03,7.341758338662463836e-03,1.700709881223534746e-04,2.907671903306173044e-03,3.094819531148508811e-03,-3.816304055786250583e-03,4.191814639846248337e-03,-3.303065558357942666e-04,-4.878151404695640447e-04,-3.829066350338701952e-03,-7.719396609723477333e-03,4.697763065571593927e-03,-2.269412142904243725e-03,-2.760272246979981960e-04,2.243279280997149527e-03,-7.035612501599524402e-04,2.652366401704613335e-03,1.894689244586454137e-03,1.795152040644993393e-03,-9.818613619611929488e-05,1.531119817782817649e-03,3.384501575764873975e-03,-6.788811991828342177e-03,1.113090075409402073e-02,1.076535336110666418e-03,9.911247460268847106e-04,-4.738644701899716490e-03,4.527029476718898854e-03,4.689919367572142721e-03,4.640373493373504965e-03,5.227047669090543027e-04,-7.798350694241161854e-03,4.588776810711305519e-03,-1.862574506819265679e-03,-2.325371846349396070e-03,1.119306390442542046e-03,-4.367464021561577765e-03,-2.530848372249600107e-03,5.952144300020293689e-03,-1.579090305935409175e-03,-2.174330252515341462e-03,-9.837360391647302985e-03,4.289055179141144578e-03,2.854949490588859756e-03,-1.910857384618336926e-03,1.151499287673927487e-02,9.195334078636269257e-04,-2.886498269661393404e-03,9.328492026963760078e-03,-4.188398489006046020e-03,-4.487129528957066807e-03,1.776565475343784648e-03,-3.765467282884473865e-03,3.412636933521639472e-03,-9.535492854036962691e-04,-4.362617763722122059e-03,9.984138992439610567e-04,-6.388324104894166399e-04,9.456963197965332149e-03,5.793026010930427084e-04,-8.911300066276347078e-04,-9.848405214374369998e-03,-3.240850280562439147e-03,8.986199922868663703e-03,-6.037728807442866059e-03,-6.925545483789766893e-03,3.379062173511410919e-03,6.749241152254017995e-03,5.841205946251396388e-04,3.386933157047738920e-03,6.029527853523559792e-03,-3.424581373084201446e-03,-4.578084366863736931e-03,-6.378546142412871989e-03,1.603043809599601945e-03,-5.410071659196783117e-04,5.534586417274714974e-03,-8.785090744704219037e-03,-6.568419741766701701e-03,4.510158207845220424e-03,-6.656093042318980864e-04,-2.686321019075192602e-05,-3.577854174307470037e-03,-4.765242049904889511e-03,-2.564870719664645929e-03,7.043741856994699228e-04,-1.095475760603272338e-04,1.680779327817437125e-03,3.997730163184624354e-03,-5.822622392879999538e-04,5.106344525698120959e-03,-3.961456467063557453e-03,8.939781507699777127e-03,1.520071627428443845e-03,-9.862269382095564720e-04,6.787824308250624272e-03,3.461301895008038997e-03,5.356653163403310262e-03,7.989689521284291246e-03,-7.396745548816151955e-03,4.597018578617270874e-03,-7.541917299666427502e-03,-7.492220409446557344e-03,2.698300907024921581e-03,1.253709656815823288e-03,-8.283132285378201880e-04,-9.115397334332715068e-03,-1.002341052075101085e-03,-7.421745341680664222e-03,6.146936045092917661e-03,-4.742420064427417190e-03,-1.584384965739111606e-03,-7.507792477977189190e-03,1.064233468763343092e-02,7.852416466394960062e-03,-1.810810462977215747e-03,-3.412325346506486171e-03,2.548326362075368449e-03,-2.006384437746967092e-03,-1.614008137711325742e-03,7.365573284149132000e-04,8.951329089691128713e-03,4.217487899900511546e-03,2.266378804099434922e-03,-1.496255303364904532e-03,1.180644230276730417e-03,1.004051792195633126e-02,1.211335309154016205e-03,-6.246037997789381790e-03,8.608402744257212524e-03,-3.306608874403137947e-03,1.996556357012144699e-03,-5.496472122032145316e-03,-3.302915997711837210e-03,-2.993713030915416554e-04,-9.215084019837069609e-03,-4.968493829902873894e-03,2.428449874910399396e-03,4.788926537479908443e-03,-9.934412937470228286e-03,-1.680105519766766182e-03,-2.282674851339837763e-03,-2.577156045597775707e-03,5.695631312787041538e-03,-9.110635964849319104e-03,-2.021108246283914896e-03,6.348660565888702111e-03,1.599248432909129497e-03,-4.415861599082210472e-04,3.245858370231480142e-04,1.029627266779854257e-02,2.713735262075656855e-03,-1.813630383636815561e-03,2.909289451079101387e-03,-2.633457567821607514e-03,1.766843775825235364e-03,-4.216519793955960174e-03,-2.451318542932420883e-03,-4.772956440267047719e-03,-2.157355880774641712e-03,1.208681173623277075e-03,4.675979225143177785e-03,1.786948341399923314e-03,-1.480017310059539596e-03,-1.466938600742409907e-03,-3.332214328921900034e-04,1.080654554247438906e-02,-4.627379580013261473e-05,-5.736929371500194133e-06,6.777065367219221696e-03,-2.571339898852955785e-03,1.579017785364235068e-03,-7.727970090316125043e-03,1.710433573906879404e-03,5.521085964201529611e-03,7.609342055064518759e-03,9.577403214999800907e-03,2.255856994253100083e-03,-3.633833351688074004e-03,-2.706069867577297295e-04,-6.059858152980220070e-03,-6.635176097445898147e-03,-4.950814216671594291e-03,-3.012765404272659334e-03,-1.054466435723961336e-02,2.533269874133212314e-03,-3.171954057499190571e-04,-6.319640035209739271e-03,-4.394024932416136281e-05,-2.321032747122024186e-03,9.545433155743931943e-04,-4.391869413434594353e-03 -1.640019654324926211e-03,1.954438056854401144e-03,5.136775716784823011e-03,-5.141309258552794999e-04,7.716368482325946035e-03,1.636828685422388446e-03,6.440074520842929175e-03,-2.996259360129467295e-03,3.378630564850961665e-03,-1.259415663569569141e-03,-5.700347472015568915e-04,2.257573191145147585e-03,-6.134388885173140682e-03,5.698344759226344923e-04,-1.346765841389598387e-03,-1.002989666299072304e-02,-8.042339825673799208e-03,7.807806847854517748e-04,1.072643582832650232e-02,3.729060705935455244e-03,4.715913407915064970e-03,2.046599012668388114e-03,-3.327288374336896844e-04,-7.347824594555242041e-03,-1.886914991441356049e-03,3.102901007110291445e-03,6.229155430087989853e-04,3.022097528579952008e-03,-9.398675523564605297e-05,-1.394752159900027435e-03,1.700341594589011398e-04,1.767602212108964135e-03,6.848147805219952956e-03,5.106174900520808746e-03,-1.958028577237285427e-03,-9.193561537278856222e-03,4.749053715376406158e-03,-6.503389718126159277e-04,8.183971508730698147e-03,-5.532551496122104472e-03,7.584353684170591116e-03,-3.309247262150802772e-03,-8.010921251418523861e-03,1.387682326083809758e-02,1.947721749658795458e-03,4.256894515329461177e-03,1.378755754114385475e-03,2.807395752503326000e-03,-6.423880985481945916e-03,2.564918784339148550e-03,-2.156454856133080151e-03,-2.021712702997627720e-03,2.335689917568582047e-03,7.971502806903462551e-03,-2.225580786485244125e-03,-3.421822928696340894e-03,-4.446180941443337990e-04,-6.753969804950446867e-03,3.035763052029448808e-03,1.448455186158934373e-03,-9.568038808693706682e-03,4.887201595142198415e-03,-1.097383944702585426e-03,-5.385986107082423383e-03,-2.029534975033063342e-03,-1.367682107708707677e-03,-6.471251645939392761e-05,-1.997845173361696355e-03,-7.033668700255492345e-03,6.034198383133736708e-03,-6.966540004273959023e-03,8.395140167388437033e-03,-5.014870787912445622e-03,-6.083508365649013296e-03,-9.060746471567113431e-04,-1.367965571559426807e-03,2.787726288056322638e-03,8.137446061791798677e-03,-1.537354223024725098e-03,-6.002815215833294700e-03,3.628323778508633132e-03,2.424390649731974611e-03,3.933971809571629001e-03,-4.748682701959126840e-03,4.585459627293616419e-03,-1.889632495260188793e-03,-1.153536148141510141e-02,2.998755862022301714e-03,1.216081776316610538e-03,-3.017400881337397103e-03,5.553536851321248281e-03,-9.734358835337174679e-04,-1.112850624333325614e-02,1.801505154951340290e-03,2.687713982871238819e-03,1.973374731352114247e-03,1.227933056899865307e-03,3.444867996444363114e-03,1.913223163533962334e-03,1.374840729709947635e-03,-1.879534487976379552e-03,1.826800234828314948e-03,-9.418418238174302778e-03,7.805732124326650659e-03,-2.185704492872380614e-03,-1.266002333016302264e-03,-8.182524218458843944e-04,-1.474867989894693490e-03,-3.541036653904554646e-03,-5.934328425380316907e-03,-2.448516508191399309e-04,-1.287766566263880922e-03,5.532166798329587153e-03,4.954098518517788263e-03,8.730705104721981150e-03,-6.579134543644595857e-03,-2.763084205347865734e-03,-8.012618302776899903e-03,9.830906110864344113e-03,4.115567174387164094e-03,7.477273056905685451e-03,6.350958503838752267e-03,-6.507696842762953487e-03,4.805405347802372412e-03,-2.676848609802731540e-04,3.781637888140067152e-03,-4.099268187750583839e-03,-1.677416099435910285e-04,5.972791002176179890e-04,3.432003216704305418e-03,-3.368164224351156023e-03,-1.751992375506470495e-03,5.944618214054126458e-03,-9.693266293753071696e-05,9.956241286758107500e-04,9.106070293970701043e-03,-4.484762681350795005e-03,2.464442673578605934e-03,3.857535586426794321e-03,-1.936948065408933880e-03,-6.179163721251162414e-04,8.650280983177858346e-03,-3.448329167339013717e-03,7.019316523521924867e-04,-2.140492076248870414e-03,5.952156166433348987e-03,-2.896553047421028971e-03,-4.092879018007773879e-03,4.174311647204645773e-03,-4.474889890640936993e-03,-1.275187387419811878e-02,5.036428551277723036e-03,1.560673886649311098e-03,-2.251505855418011164e-03,1.001179329180721701e-02,-3.109612068150459981e-05,4.219835863644106656e-03,-8.194948225442031439e-04,4.024273244838065706e-03,2.244572731767711167e-04,-2.364838249458716020e-03,2.721768567972423711e-03,-3.866660575870299859e-03,4.539968940534357990e-04,-6.694282627322144935e-03,-2.822152031554711182e-03,-1.770362119523801814e-03,-1.237022533770550927e-03,2.612859010067292848e-03,-2.649535836742959380e-03,-9.423148591605787910e-03,-2.058503635387406040e-03,3.429718150205691637e-03,-4.811346547796678010e-03,-2.922793111530514343e-03,-3.910424899857635798e-04,2.096652419031461937e-03,-1.081602379454523009e-03,5.277301853917690765e-03,7.910097018114627468e-03,8.229232498015518968e-03,1.054500362234777298e-02,-2.540310570935837720e-03,-1.214516398010833179e-03,-1.085192394301594861e-03,-7.943427285895643397e-03,4.061998558253526563e-03,5.629783976266095058e-03,-4.386624286504539269e-04,-3.017579990169199505e-03,1.621409177071476927e-03,-4.489056378076736534e-03,-8.672332351602543565e-03,8.411215607507093340e-04,1.697922370768033609e-02,-8.987713666606734694e-03,-6.821239165993279660e-03,4.989892811512215177e-03,5.462288466701614893e-03,4.213953672270153057e-03,-7.026461229140577511e-03,-2.837778344827378459e-03,-1.927824824783442139e-03,9.176844257750135445e-03,-2.435487467349871653e-04,9.526312309203015943e-03,-2.702422671529134172e-04,1.968595290854969516e-03,8.452519824626013659e-03,-5.736887277952064119e-04,-3.548759775380500546e-03,7.014131357165348350e-03,4.364859479185545857e-03,-1.701853847405904478e-03,-7.312862747652130145e-04,5.211153012503789723e-04,-1.184864208911688585e-04,8.953331265898555047e-03,4.906860536081024057e-03,-4.100647184395594404e-04,8.493807033278030641e-04,-4.968124377969691981e-03,-1.844643501698574588e-03,4.968664686949739520e-03,3.169992670560141650e-03,6.982452275578997225e-04,-1.291583675285941759e-03,-5.847986040258082788e-03,3.653678478627785022e-03,1.226552342161393459e-04,-1.639631353840898323e-03,-6.676347045795627717e-03,-2.866296050434880179e-03,-2.108206130061350200e-03,-8.111909474794915019e-04,-7.248307011764110793e-04,2.026434360796737673e-03,-7.821240738616931643e-03,4.265544996783150397e-03,6.169444713196710513e-03,-1.465689582157262989e-03,6.867396861703155449e-03,3.325357860870147768e-04,-4.520328273417902358e-04,5.353732523420297491e-03,3.972089098727372460e-03,-2.086105064455057601e-03,-3.271785906877713911e-04,9.124842598093503305e-04,4.600078936482069475e-03,-8.763340501947382635e-03,-1.229734703893865192e-03,-2.421076076976535693e-03,-4.899947815551445804e-03,-5.503170924320188902e-03,5.594593671192220861e-03,-5.917036338948619689e-03,-2.377843630491875866e-03,-4.181751876472801499e-03,-2.018028327306962185e-03,7.035975378523672344e-03,5.090987372611661087e-03,5.068664435741068296e-04,-4.901679791969520495e-04,2.478157749442824135e-03,4.293790851467185407e-03,3.075122927225672556e-03,6.769959769565229123e-03,3.303864631486786579e-03,1.853999701137640623e-03,-5.631938447259687798e-04,-2.173284993432585913e-03,3.950240311655514591e-03,-8.021095644491316873e-03,-8.674241442970026181e-03,7.720450498655181612e-03,-5.855099377703412060e-05,2.380415304752762397e-03,8.754481000315061066e-04,-2.210784239894714696e-03,-2.924066384077181422e-03,7.865201968164338897e-04,4.077852559634021282e-03,1.150698165080094254e-03,-7.140678760343001363e-03,-4.759158098116864771e-03,3.667725101181931908e-03,-3.630985815848044164e-05,-2.775303428397135272e-03,-2.044300348997408860e-03,7.010945962241241745e-03,-4.984891760174916045e-03,-2.314117184212542873e-03,-1.406728432203583571e-03,5.898980821758471015e-03,6.706094797566637851e-03,1.670141720795088967e-03,-3.735966497865588886e-03,-5.021105682690528774e-03,-2.242160775613067248e-03,2.612941859244914938e-03,4.681534061283813310e-03,-3.595612952219515352e-04,-2.478626560165727438e-03,2.955614304164925522e-04,-3.559246523348689756e-03,-6.219857898249033824e-03,2.713800669340044481e-03,3.129141071367243926e-03,1.153422976330402677e-03,6.389457318862478384e-03,-5.099300278995792801e-03,-2.068836786743827425e-04,-3.503908138312725445e-03,-6.380089316510144729e-03,-2.165711067438375916e-03,5.733101061514613711e-03,-8.222213375198635099e-03,-1.035665795398263943e-02,1.022775609493988122e-03,1.748887967287595963e-04,-6.552810926314961860e-04,-9.014848729919085028e-04,-2.411862868333904247e-05,2.767585198661815220e-03,2.500567782855753294e-03,4.376022391053354006e-03,6.060488009176259738e-03,2.768685663751000673e-03,-4.473168914000599621e-03,-3.041874769152379124e-04,-7.322488117040048920e-03,-3.931057793883459793e-03,8.608031852230198233e-03,4.774563890578804176e-03,-2.971715917230126260e-03,-1.356731554693378985e-02,-3.448479160967226047e-03,5.265531715942508781e-03,-2.874450461619426026e-03,-4.555376356017664871e-03,-2.691298386558298346e-03,3.183092334675605584e-03,3.258072756588758319e-04,4.006489403870157383e-04,1.997899739571608844e-03,9.126003638720649224e-04,4.959983647636855009e-03,4.017671109859308177e-04,4.093199127171026803e-03,7.511952443983025383e-03,-3.378096867456505635e-03,-1.653662503613945005e-03,-3.084132303883624273e-03,5.156586365913364174e-03,-5.625312888914713039e-03,-2.716110504309554065e-03,-2.589444863663262642e-03,-6.807610046840629862e-03,-3.156805822570839075e-03,-5.219895595359133091e-03,-4.018166378286804757e-03,6.288272349514989999e-03,-6.729694565927474628e-03,-4.617953175268294831e-03,3.946386383388239636e-04,-6.043260953656037771e-03,-2.414774868994273307e-04,4.861063305705884754e-03,-1.205336056827408225e-03,-2.936434980357607513e-03,-4.221523943955390011e-03,4.748674691834374982e-03,5.326941534301780738e-04,-4.077194698772570319e-03,-8.312266512554745837e-04,1.260192218484832913e-03,3.580833439467317417e-03,-3.959297612341898184e-03,5.321780953634819896e-04,4.193315728048183891e-04,-4.277060859983129043e-03,-8.806662233027227546e-03,7.165838847701050743e-03,-2.961274559569342536e-03,-4.281203347395111727e-03,2.402777851619443893e-04,-2.548697388257628246e-03,-2.353992870311446238e-03,3.418049348972678706e-03,6.280777038212447504e-03,-1.100906726362898668e-03,4.129157434175226640e-03,2.278652412261006517e-03,3.818457298254516606e-04,3.112511801789042375e-03,-7.773981174882180906e-03,4.965133457104788962e-03,5.821838846976841279e-03,-1.097832243702132581e-02 -1.741089841612598974e-03,7.273075765763207558e-03,2.258575208695328025e-03,6.437476062134789637e-03,-2.345728410611569209e-04,-3.380971058577666132e-03,-4.552599852635355604e-03,-1.868121383161959990e-03,1.484665980401901836e-03,-6.043281374113974348e-03,4.156473810446075640e-03,2.656357010870266742e-03,-3.179355705584459811e-03,2.607402387164848342e-03,3.639753836707585507e-03,-3.774093526601892800e-03,3.084150096317862452e-03,4.253710050227112356e-03,3.606431135110589603e-03,2.815171689150306692e-03,-8.812674794455306604e-03,-3.197328676798326733e-03,3.332992192467789001e-03,1.026478889853239644e-02,1.203465518662479201e-03,-9.734164728654652685e-04,2.697737508311134468e-03,9.603390982477505444e-03,-2.130256408521617219e-03,-5.092562290415582338e-03,4.490836761321544655e-03,4.863275109042491419e-03,-1.293070488484511201e-03,4.508763951652998425e-03,-4.345185393581799745e-04,-3.787430831493769287e-03,-5.528493727619553794e-03,-1.124812834185140616e-02,-1.252742671065948518e-03,7.152883897303849783e-03,8.316653095761924672e-04,-5.591077157116007557e-03,-2.443201380522923814e-04,-1.174439831459985102e-02,-7.553170658999558537e-05,-6.013635615926131960e-03,-2.744059585562756351e-03,-2.983372339135364950e-03,-3.375047781919563328e-03,3.031379694542432092e-03,3.099420085150912663e-03,2.256366675731809313e-04,9.081751651256454033e-03,2.272278774364121842e-03,-2.379165901385670457e-03,-6.576845386918343285e-03,-4.023284035063407274e-03,-2.093913282701492188e-03,2.672425166679307049e-03,-1.541825913616243346e-03,-1.030800071508207027e-03,-4.133465297371821841e-03,-3.684952208377537900e-03,5.701032418310486918e-03,-7.444819293276908057e-03,-3.169050909925738135e-03,4.570969343578343974e-03,-4.759398651239921986e-03,-2.771819214207459948e-03,3.161533327177582561e-03,6.108771083621415167e-03,-3.123070489485473781e-03,7.205544368157222948e-03,-9.440528041084412028e-03,-2.672746936454742026e-03,8.362212408217658405e-03,8.321435457269902788e-03,6.704290841652916534e-03,-5.344571251659994277e-03,-5.458353058253795874e-03,-3.053396465182472291e-03,6.362163042433921934e-04,-7.936938297461971006e-03,-6.654081408966977529e-03,3.751867978106244745e-03,-2.389144655927516340e-06,1.010705033979654791e-03,5.768853723534350490e-03,9.266633683450952981e-04,-4.326978095002658072e-03,-2.302303187974482669e-03,8.279790864243387772e-04,-1.208834887947209807e-02,-2.184199031121963246e-04,4.954741551457129446e-03,5.108336990326471649e-03,4.918129314513417297e-03,-1.181757952167424713e-02,5.257310446430719930e-03,3.631662084737313865e-03,-2.483332171261177970e-03,-8.327765374795943407e-03,3.945518877563137031e-03,-3.009842977568412924e-03,-2.571092544437989132e-03,9.755676847356689607e-03,3.903711334151317865e-04,3.028005754668499847e-03,-4.688248524841307462e-03,-5.988326941206309484e-03,4.884538013028609342e-03,-1.682122849231649283e-03,-6.837609045510811834e-03,-6.684720513240232980e-04,4.165068370581349530e-03,-5.469961523496428245e-03,2.183359532789302861e-03,-6.143651316558434820e-03,-3.108012650396719829e-03,6.014312613450313073e-03,8.455424449053295863e-03,-5.902033893993053086e-04,-5.634344871269065039e-03,-6.953908961411067909e-04,-2.553825051531510414e-04,2.168256972701413689e-03,2.187161244676041980e-03,5.017269307183946231e-03,1.288752930241995409e-03,3.046428549966901343e-03,-1.551838449073938237e-03,6.864527736952278324e-03,3.383151219970942977e-03,-7.672658475688141390e-03,5.077888436169812093e-03,1.163964228063434428e-03,-4.365183646569218215e-03,-2.704505992919269651e-03,-2.733835348259343236e-03,-3.549199267980902503e-03,-8.865360904555089116e-04,3.457843680238058964e-03,1.062949411027871646e-02,3.256097699437013413e-03,-1.697048957533985958e-03,1.579322204565885906e-03,-9.266786191064909067e-04,8.788717532741265742e-03,-9.407707056091148615e-03,-9.085387637279576656e-03,6.561940880456287569e-04,4.722061491623973789e-03,-5.556834445566140486e-03,-1.244544243792403788e-03,-4.493574857068426825e-03,-5.584085590517834201e-04,5.102617991624014285e-04,-8.559279103734414654e-03,-4.598986210638538949e-03,5.711324512931991414e-03,7.696335090178936696e-03,2.332525707080257849e-03,-2.809099785035482164e-03,1.329110581914118338e-03,2.456725580214275378e-03,-6.938157093672313314e-03,5.071423918345318443e-03,-5.632859324435141772e-04,6.628695396265833935e-03,-1.158671721660704292e-03,-1.430283433955582481e-03,4.850210420364267465e-03,-4.824543587663900220e-04,-3.962517872261115369e-03,-6.460386834682061569e-03,5.640959985958647269e-03,-7.627760549520577402e-03,-5.660793627109813632e-03,-7.277178750947203324e-05,-1.720933579257662247e-03,-8.519185195454179182e-03,-4.411967246641591844e-03,-5.153719062068132933e-03,2.534001864355235525e-03,-5.984696878485514654e-03,-8.744625784417319955e-03,-5.768377698454433275e-04,-4.701584374140510211e-03,1.313209933494939065e-02,9.893620873341558899e-04,2.411124090622138866e-03,1.237184267325845688e-02,9.204785032997063465e-03,-2.753412303281258400e-03,3.591340055229094025e-03,2.986065278788228167e-03,2.755933944641677721e-03,1.499311750059809751e-03,-4.235045074019406705e-03,8.447848682890377048e-03,-1.112790604514308189e-02,1.947928198508678058e-04,7.304285699697238505e-04,2.905223454982710975e-03,-2.922534969523818936e-03,1.074244457125447111e-02,6.512692179697517764e-03,-1.350747308964769583e-03,-4.515980325221494000e-03,1.310257495954528532e-03,4.482625853579881754e-03,2.479884677861918414e-03,-2.326523481559864947e-03,8.920878285431997623e-03,9.826143736314995811e-04,4.213858135552176268e-03,-3.496038698041810909e-03,3.767517648091103961e-03,2.938650167626070692e-03,2.775233863255603065e-03,8.533599412180114044e-03,6.854698386414038135e-03,-1.572948812098479837e-03,3.328710167039614261e-03,6.708240808040112114e-03,-2.522765286438232672e-03,9.547564934031001552e-03,7.498384271627449944e-05,5.309382435213947857e-03,5.913826591793703950e-03,8.592329714593281579e-04,2.839598684478825217e-03,-1.018452952826811794e-03,-4.621223573906926146e-03,-8.049187461249556960e-03,-2.056189440236178174e-03,-4.375019565456750824e-03,-6.215690312686216606e-03,8.252516021832615248e-03,-3.560503332970264227e-03,4.397322225855593292e-03,3.811496479675084096e-03,1.002574244892222386e-03,2.310354821082428317e-03,-3.526273557116246272e-03,4.095428992456995950e-04,-3.506670532131795585e-03,-2.233283784486748535e-03,-5.122557038181901022e-03,9.836325535055144217e-03,-3.029707805415448717e-03,5.692435634815699169e-03,-5.250264092886116479e-03,1.717722244672030118e-03,-4.493476082172440447e-03,-9.755869215033389878e-04,3.286585456883400243e-05,-2.646188613195569247e-03,-2.597758351355893523e-03,-4.513085586685185689e-03,-1.512297925989163072e-03,8.389535883112123407e-03,-1.072291807365908459e-03,1.373758518929766659e-03,1.416783735963121973e-02,-1.861591496040289036e-03,4.624811956639049600e-03,5.490817958695952752e-03,2.799813883287592870e-04,-4.975425263061365559e-03,1.921629673205550119e-03,5.675703719174977208e-03,1.093860068168896194e-02,1.375065287699891656e-03,6.895945549687796695e-03,3.989552688781750870e-04,-5.624447009820307194e-03,3.193165809457419908e-03,6.742126498644873851e-03,-7.224865687239240795e-03,-5.733748728342054519e-03,-6.423831262260647082e-04,6.924736424225261602e-03,1.255503639909768231e-03,7.400019247014420938e-03,-3.573729186852231052e-03,1.010795961651832480e-03,1.032523598521638622e-02,2.840699466117882488e-03,-4.248972432645104803e-03,-1.340406745865467888e-02,1.440218459156880279e-02,5.323833041289568156e-04,-1.127797754584162170e-02,-1.867835100567529980e-03,4.434689191219760851e-03,2.209883815958764691e-03,7.247084199700134892e-03,-5.691086858171648909e-03,2.876488449687195862e-03,-5.650765441235586564e-03,-3.683809786155629006e-03,-1.996480838352558511e-03,2.230261180219999390e-03,8.012772581038404493e-03,-2.702461281299873772e-03,-1.347685404485939259e-03,5.259028259461925696e-03,3.215948484763383130e-04,-7.184449243414504568e-03,6.441139121572347047e-03,5.079539408855101787e-03,-1.057454023791855498e-02,8.225295674138705451e-03,-6.542321488661681516e-05,8.422010466949404446e-04,-2.496521149345537482e-03,-5.058221438019249241e-03,-6.727882146190335144e-03,-2.754711564090199863e-03,-7.268351980137789875e-04,-2.158207559296363657e-03,-8.576861033613530222e-03,1.072791979322503669e-02,5.016187083554441220e-03,-1.222229616412411589e-03,7.106146571430771441e-03,-6.689208027786996101e-03,-1.327199259242009847e-03,5.701420729757443410e-06,7.842375032090753402e-04,-1.137560433419409437e-03,3.243269132989451929e-03,4.721486692114779983e-03,9.680548874733664816e-04,4.915326575631462878e-03,-4.797718521560089683e-03,1.521084089906225064e-03,-3.797642309070403330e-03,2.992120623889783608e-03,-1.470812613724528390e-03,-1.316606659166977842e-03,-4.054527716121314636e-03,-6.205394970115072836e-03,-1.773701500170836096e-03,3.033648797463467991e-03,1.149839084205437310e-02,3.204627657315314848e-03,5.924064477814798731e-03,9.450781320032811128e-03,3.130853181206978957e-03,8.655618964602962298e-04,1.312485918827744312e-03,-6.909793152531844805e-03,2.305719995576814042e-03,3.427897093250301640e-03,-2.336269829754841743e-04,6.122815233409386639e-03,1.972140523536571131e-03,1.515625982620153042e-03,3.744757075890248112e-03,8.852890362420305984e-03,-7.582939023094747814e-03,-1.862417559600905220e-03,-6.003994156697465244e-03,3.300586253060878127e-03,-9.711674712826703915e-03,4.634265763769094236e-03,-5.712218803138130463e-03,5.935679015498764727e-03,-4.292138265066406354e-03,-1.216413563430799151e-03,-1.087936149075249392e-03,3.154600495994770238e-04,2.338534419297988443e-03,-1.337456853754185942e-03,7.862065536123141003e-04,8.389100421245843792e-03,3.385169907162150459e-03,-6.284711241297536756e-03,-6.416144044664843799e-04,-1.182547138843368165e-02,4.388800284749968389e-03,-7.504505620089816156e-03,5.104826768985711721e-03,-4.059150448146242422e-03,5.233774341297236198e-03,-9.990015477705702345e-04,-9.632083742849730998e-03,1.221601061632536164e-02,-4.413998264278871063e-03,7.154939918077712077e-04,1.267875888698199571e-03,8.099879800521174456e-03,-6.143415645746506124e-04,9.909418154126915790e-03,3.915237009302569363e-03,1.127430821690038722e-03,-3.765231990426639425e-04,2.672013751051626699e-03 -2.020254323076803565e-03,2.689145762306810981e-03,-1.046370631432788677e-03,-5.722197443679997303e-03,3.273469051519643791e-03,8.772096822009349537e-03,-4.823677759683863732e-04,-3.078923142424432502e-04,2.454937072122267711e-03,-1.226613993827728542e-03,1.471854778257640495e-02,-4.669648218591108089e-03,-6.767324520705555839e-03,-2.118810814391264979e-03,4.946829611560878137e-03,1.057319311508772910e-03,-1.569829040878769136e-03,5.242656327855916946e-03,2.702729714363054023e-03,8.802876387752079854e-03,-1.219872678489160138e-03,1.552636710108272131e-02,2.093188942743893968e-03,-9.262132197016985369e-03,3.363360709116406354e-03,-1.213880906358059866e-03,5.140141678095158997e-03,-7.227442719186605823e-03,6.912864619627298590e-03,2.359069391426911813e-03,6.704871478715750469e-03,-3.903383905440559187e-03,-1.040378471346260422e-03,-7.471483393610502165e-04,1.872007114255163675e-03,-1.652286589912989323e-04,-1.203432458030545375e-02,1.438903200262389194e-03,4.115424620207443901e-03,-4.333468365111726485e-03,-1.322924977258969339e-03,3.718012790601493266e-03,-7.822474269602235769e-03,-8.745802998249589330e-03,3.908284908708406849e-03,-3.114395421888322291e-03,-6.502625785015810761e-03,2.239897889156734594e-03,1.449658489722889998e-03,6.378432432519686585e-03,2.667986305765025073e-03,-1.379032921367522946e-03,4.155729604671624770e-03,2.190667272155177175e-03,-3.380119452736825465e-03,-4.343725136463086160e-03,-9.469467752839348662e-04,1.683067997022528323e-02,5.435834501654217797e-03,1.956341986902993685e-03,-4.811601298271453962e-03,-4.264884134852438280e-03,-6.090636998372820146e-03,5.232141961961471466e-03,6.604351922567427459e-03,-1.743633002619415610e-03,2.802859621263594815e-03,-2.119062498557521771e-03,-1.930899411934786710e-04,1.643044869312263595e-02,-2.520076112260341214e-03,-1.666246115736513822e-03,7.080063234050915089e-03,-3.141842325941016090e-03,-2.077949824433255113e-03,-9.099567349142818201e-03,3.416314684896276134e-03,7.178997966296110297e-03,-6.296598563835241989e-03,8.544099381467022344e-03,4.369047782514802122e-03,-3.233962385399342162e-03,9.494664051493051024e-03,-1.023964541484825115e-03,1.579569819391581130e-03,4.884334007616040552e-04,-7.175277345854416157e-03,-3.626015535366238982e-03,-7.395735738892775593e-03,1.959482272314226527e-03,4.832344742796598237e-03,4.552945550477717311e-03,-1.746082275551186615e-03,5.531236071415274657e-04,2.265561457835534812e-03,-1.883532939886717328e-03,-4.159957933589870059e-03,-9.729906297157132206e-03,-4.664912393964807183e-03,8.412299971322288045e-03,5.463467971750743954e-04,-5.575457855560581391e-03,6.074313447987260590e-03,-3.329946372920813882e-03,-4.497757551071525015e-03,-7.080482156799952284e-03,4.763883220911574630e-03,-2.073714460846019271e-03,2.414485103863339872e-03,7.392109581785056768e-03,1.004233476710881569e-02,-8.424390633980479576e-03,-3.664520123047428253e-03,-5.098343740918940756e-03,-1.020068438299070275e-03,4.906933986918628893e-03,2.527338289269296409e-04,2.055239176563796073e-03,2.121763504773925817e-03,7.525548957574994952e-03,4.271774148639048002e-03,4.879243810015598121e-03,6.062316609233391052e-03,8.160426007585146625e-04,-3.060590566069718629e-03,-1.863696950621809324e-03,1.290167104091027514e-03,1.864993176133784326e-03,1.352084792328943320e-04,5.107474063465696976e-03,-1.757409113994998478e-03,-3.508168241904880118e-03,6.014916226365151592e-04,-1.063098797256145298e-02,-8.860985312553099763e-03,-5.063613966824555106e-03,4.565789454045420513e-04,4.609124098242719549e-03,6.201510228069429150e-03,-5.072832239183854355e-03,-4.690898796580368277e-03,3.923626039858713055e-03,3.997548949036641014e-03,-1.482420653933926025e-03,-3.423309164511569979e-04,1.105069244361993721e-03,-4.926894383828117532e-04,-1.109720133895711813e-03,1.393808767092874533e-03,3.958526379459365377e-03,-9.548347322035560109e-03,3.163102712117193006e-03,4.331300693040569978e-03,-1.045429653009285510e-02,8.022982543470072231e-04,-2.471393156638187778e-03,5.782202804762797375e-03,-1.022736817349429415e-02,3.383425115370321170e-03,8.806242587481349958e-04,-3.625402959677797524e-03,2.915910463123303018e-03,3.465053440783896418e-03,-5.621312722425422272e-03,1.049021867313540933e-02,-6.863972734152554156e-03,-1.690794709964769070e-03,-6.769756449645562791e-04,1.705072702837873372e-03,5.678946233757842764e-03,9.051799977382170986e-03,-1.893181289538716210e-03,-1.095256742733971946e-03,4.062851404204452163e-03,-7.518914592112992124e-03,-7.921957520090114726e-04,-5.743663338301722120e-03,1.984934645771474753e-03,-7.700985069857070250e-03,1.376292239000751578e-03,-1.153855743423338326e-02,2.398460285372928675e-03,2.765851001948251182e-03,7.896317620361234524e-03,5.064979706164259715e-03,2.248877302596895712e-03,6.334354585491819463e-03,3.377409222209758893e-03,-9.443801321787174963e-03,1.410070869758385461e-04,-2.434727130997456617e-03,-5.213199379810101730e-03,-2.174212041684130559e-03,1.009652161677099865e-03,-4.569234851289723969e-04,-7.635351750748494478e-04,-4.326733540470335294e-03,-6.279497108868932844e-05,-1.045961841309960916e-03,-6.338526546617797609e-03,4.774996271474021316e-03,4.760280947862569230e-03,1.139394012203204980e-03,1.854414807965149622e-03,-3.390164762601021592e-03,-2.077303764712147300e-03,4.462090944506508748e-03,7.922567817864843709e-03,7.330842389521360219e-03,1.222647346469833826e-03,6.986661493587167579e-03,6.318275080141699633e-03,-7.154719009390553145e-04,3.780829070382940669e-03,9.320558091742708537e-03,3.317399908536561911e-03,6.808840876668369874e-03,-2.013214308487418919e-03,9.743497914142284274e-04,-2.869209109120006602e-03,-1.250646018207348178e-03,1.203432426017743808e-03,-4.633695746217131664e-03,7.529500786593708523e-04,-6.619758108867260585e-03,1.520564953295643887e-03,-8.313172721882082505e-03,-1.602856554538047151e-03,4.452573796762806374e-03,-4.135804622408465782e-03,1.472622686444670717e-02,2.267103296360544959e-03,-5.042588639606021116e-04,2.319444783133394004e-03,5.943711419324855180e-03,-4.045635525939920638e-03,2.538859915829323266e-04,-9.768387876277517031e-04,4.058992076039036161e-03,1.319598413822068404e-02,1.802552131207056738e-03,5.673698151465920124e-04,-3.868555832615433714e-03,1.671338473140860986e-03,-5.882999702445657064e-03,4.827438674352303900e-03,-3.671538567880895318e-04,2.562378936125681017e-03,2.325158957793574833e-03,-5.830232031549928331e-04,-1.260486499263264743e-03,-6.153032984132687788e-03,-8.539832460026083871e-03,4.304239547950197003e-03,3.259720133734068329e-03,-3.599527922677650798e-03,4.037902208456603005e-03,-2.182156558825705094e-03,4.616160451361987944e-04,-1.743844467260212492e-04,-6.958617381176071016e-03,2.262643099928557562e-03,-7.746276643228819890e-03,5.514094093531186035e-03,-5.941516425405976055e-03,-4.424055281175073330e-03,-6.588016350937613529e-03,2.279653798939988360e-03,3.402058493261334027e-03,-3.544881735167669957e-03,1.561752068052342209e-03,-4.706091656325086213e-03,2.925800138755645097e-03,3.641721291196785976e-03,7.320543981592816246e-03,-6.578828437089452412e-03,2.241009428477495208e-03,-4.641994139507942506e-03,3.726301885195713176e-03,7.480694125375346199e-05,-2.470531909125064495e-03,-4.000065825749383498e-03,-1.472931730569480586e-03,1.700831368159377854e-03,2.002908666838474922e-03,4.713969926950028707e-04,7.609135336184996018e-03,2.699721634221290915e-03,4.407568342219939615e-03,3.902448664744681810e-03,-1.976025677449391900e-03,2.842480986211344453e-04,-1.033692472687331235e-03,-5.337334156371898938e-03,-1.770221449549953027e-03,1.248224931532602826e-03,-5.779403558863419749e-03,3.841351012495985160e-04,9.397663976248304962e-04,7.589174365313605501e-03,6.722261153969843160e-03,-5.327445016730754032e-03,7.970691179168596860e-03,-5.052414315668918819e-03,6.298885910164661248e-03,1.008509457074293876e-03,1.212117598390625821e-02,-3.610156917621790834e-03,-1.803640031664188867e-03,-7.262809386774831191e-04,-5.500771168620144543e-03,5.289114902903225201e-04,-7.717891931982249348e-03,-4.276060184144832885e-04,7.408333563369668209e-03,1.759014511199086467e-03,9.553141251351418214e-03,2.085229695458876327e-03,2.668541592238509071e-03,-5.248534985949280564e-03,7.062866007260645319e-03,-4.360802115755002366e-03,-1.633106075485889295e-03,1.402492220986356142e-03,2.504314953620335581e-03,5.257514979775669041e-03,1.411471664326122144e-02,1.072139317351921816e-03,1.109869810564575402e-03,2.530711836116567200e-03,5.170155453800699338e-03,-1.345350264010486364e-03,4.485272991349754240e-03,-6.260833117637988263e-03,2.281148128016255786e-03,-4.055314265789105102e-03,-5.842167588102829129e-03,1.688625522148224202e-03,-5.616079281663312849e-03,-4.647968531282125107e-04,4.207839075404293597e-03,1.869417158181599066e-03,4.099012272931258291e-03,-2.096255476928510133e-03,-2.019466726102988046e-04,3.597894280672407977e-03,7.092939928530140291e-03,4.894939867659765277e-03,-1.330498673135776556e-03,-1.821139707091849747e-03,-1.040979709583828017e-02,-1.230143928803410867e-03,-1.436658607178444784e-04,-6.993207268445041785e-03,1.642122712006194654e-03,2.982046954559072760e-03,3.416767724079934816e-04,6.911279594098315021e-03,3.714102722333972526e-03,-5.388410587949444067e-03,1.411030382273068852e-03,3.376553397904884083e-04,7.661066410293633500e-03,1.467387018216168167e-04,7.764029360556390716e-04,-8.424339240700708803e-03,1.722456425947545223e-04,-1.145104069938858394e-03,8.993302976337495119e-04,7.471440209227141888e-03,1.764773668322326163e-03,-3.737898618458763284e-03,2.735850984527657057e-03,7.993682061809748510e-03,2.882846217399289432e-04,-8.712874251572942392e-04,-4.796634519670116975e-03,4.276999693037852011e-03,-5.798736528823103011e-03,2.983562057860052619e-03,1.340592767773440483e-02,3.647407865425339978e-03,3.971142544966916633e-03,3.774069864090953425e-03,1.171316115575236167e-03,-1.031533977672322890e-02,-5.628704951390815954e-03,8.668516755048424199e-03,1.483959047489620499e-03,-6.621373501710152432e-03,-1.585965527554270395e-03,2.355167752438316973e-03,-6.255166797233768089e-03,-2.269407319696077098e-03,-7.428190775978086354e-04,4.859774205427785758e-03,6.552238725461251127e-03,-5.785288919432081227e-04,-4.365541902706856874e-03,-4.247249150239201104e-03 -8.094236135902287133e-03,9.793994415915349781e-04,-7.335818967152669609e-03,5.579905738691753186e-03,-3.144987431253646205e-03,1.158062201197866681e-02,-2.866195623022081609e-03,1.447629465225186906e-03,-2.885436259021368927e-03,-1.573880260328717291e-03,3.882145703579759164e-03,2.531587258009676197e-03,-3.801973972385756708e-03,-1.885829935587881852e-04,-6.694535644031653250e-03,4.763546421715857127e-03,-1.361094727743253005e-03,-4.877890558646369792e-03,9.019056700629416584e-03,-4.592303110157302387e-03,7.551758561034781561e-03,1.224502313428255801e-02,-1.390876972630020235e-03,-2.877234118860485038e-03,2.098081885280338090e-03,-5.688730545900128402e-03,2.726145127558882424e-03,6.335550469715776163e-04,-2.303787519263972381e-03,-2.804175294662543828e-03,-4.022656546475305434e-03,7.005984544642778616e-03,1.336944195089621607e-03,3.219107635576709016e-03,2.024085322914436196e-03,-4.067150966290458518e-03,1.021888309540568184e-03,6.554411899459162971e-03,-5.347718314495516871e-03,-8.428440546388662330e-03,-3.758621542482367805e-03,2.867168419576299719e-03,5.108042430664090248e-04,-8.819049062336398875e-03,-2.733701550893239286e-03,-1.154601120104044656e-03,-1.688121091269575123e-03,-3.694340072393135273e-03,-3.063140675297044632e-03,-1.060090116422861013e-03,2.395504097270542004e-03,4.287938994807290183e-03,1.916022924774016343e-03,5.366205454699330031e-03,1.458052640683735277e-03,1.317681028419984916e-03,-2.683487359765887245e-03,9.074829925710063694e-03,-9.216208968068144132e-04,-1.739290414465231123e-03,-5.873770512902767513e-03,1.095685023609383781e-02,-4.856703830564625297e-03,-7.440461762067246757e-03,-1.006844008364920313e-03,6.563641326412283754e-03,3.423704594957859525e-03,-1.235230914284220737e-03,-3.714452105122387823e-03,-2.178742453512966335e-03,5.011595357201944871e-03,7.549778754138878126e-03,6.171947095818800487e-03,1.077741021214429851e-03,-1.003652692145029154e-02,3.474346557942112335e-03,6.086148477060002171e-03,1.389828295644230738e-03,-1.407710750536888926e-02,-5.065941647074682051e-03,-3.052324468190366173e-03,-4.680066753535946943e-04,1.947418885007310948e-03,3.647765976849530814e-03,1.228832556239339542e-03,1.255742125903369774e-03,-7.763303620999370894e-03,-4.972025043921296864e-03,1.360213836637439973e-03,2.081142716569362675e-03,-6.259688202586476471e-03,-2.084750580176293524e-03,-3.146218373889581726e-03,-4.447876980573134756e-03,2.517161482963472998e-03,-3.143059638625905327e-03,-2.801154005677397498e-03,8.796839018190031820e-03,-8.806904534168371809e-03,6.266302382299404443e-03,-3.465391716044123258e-03,1.440660737983903445e-03,-8.328698369823086265e-04,-4.365394828949491386e-03,2.135791084860079632e-03,-4.027868679619438404e-03,7.304680899486707367e-03,-4.988150740881870833e-03,5.490549848207456696e-04,6.858091781878408559e-03,-4.466126034148197241e-03,-5.597516631511083307e-03,-3.930208077588401988e-03,1.368029891364957095e-03,1.708254824316821595e-03,-1.059094254349684378e-02,4.347677632307835743e-03,-1.549457602087230511e-03,-5.035527907714621061e-03,-3.367491374755066623e-03,-2.777362436568723680e-03,-5.106237442332054247e-04,-3.120866260302273577e-03,-4.443997613488791763e-03,5.521704144005866802e-03,3.823516494670022286e-03,1.998245682333845026e-04,-6.441096274815728655e-03,3.218302096199401451e-03,3.250452976234573677e-03,6.549269523313912744e-04,5.693361143744399711e-04,-2.159108721272785716e-03,2.369796278163107534e-03,6.465499847119979618e-03,-2.656910678415899587e-04,2.247759214765887581e-03,3.159700488328044329e-03,-5.016070371452866807e-03,2.460949015292294376e-03,1.580898725467174467e-03,-7.809704623618783995e-03,-6.125319338027607370e-03,-4.949004151354127051e-03,-1.001125920260846321e-03,3.967843716439696175e-03,-6.738828641961533937e-03,-4.792949821997629159e-03,9.737115318714274770e-04,4.827719697740917408e-03,-3.080350181324565100e-03,-1.378149951155726834e-03,7.008805070110395684e-03,-1.581371815217922933e-03,-4.824001168802648145e-04,-4.893226893303927010e-03,3.496296513928177875e-04,4.009912144354962037e-03,9.152043117005355388e-03,-5.911107201683951304e-04,-6.990377208234452126e-03,2.704267745229477302e-03,-3.812025879767411401e-03,1.065513079058036557e-02,-7.402896987206269812e-03,-3.635871608955700690e-03,4.151317048003865494e-03,-1.240207663675357407e-03,3.345335297712451975e-03,-2.064885337703315643e-03,2.299080860184753074e-03,-1.817415091065219615e-04,5.550504453154287039e-03,1.255028145809312413e-02,4.616025580137518049e-03,-1.379768778611330541e-03,2.581718835743130724e-03,5.160012159242224720e-03,-7.450923417807696347e-03,3.490097626702061070e-03,1.806132331831182764e-03,-1.711207450951143794e-03,-6.586067162868244201e-03,3.505603514504579605e-04,1.008379832799861063e-02,3.418283016840663237e-03,-7.672815408428842326e-04,-1.048166209446141015e-02,8.796062423999124330e-03,-1.802667034871549860e-03,-6.573598635668577612e-03,2.316622061385522485e-03,2.781817437206633541e-04,1.416187853532872810e-03,6.589362272458979507e-03,-3.067785531689597688e-03,-9.127681197290460346e-03,3.975774357029734397e-03,7.427610960368709325e-03,6.193269501915933889e-04,1.904395646062321396e-03,-1.786031325448533347e-03,4.689206922449251115e-03,3.072680287614825128e-03,1.273213822814344906e-03,1.080691837533001749e-02,2.515288237669828335e-03,-1.408132107886190306e-03,2.070248696437641724e-03,4.936844576905469054e-04,-3.866839201239390122e-03,1.493148145632805426e-04,-1.534501353964896637e-03,-4.783728247003591846e-04,4.118377376712142705e-03,-1.366794740489309795e-02,4.678609419310238680e-03,4.393832889493432328e-03,1.263495676588986642e-02,8.505038198042540187e-03,1.350583257992690397e-03,-3.751882658242344966e-03,-4.001898850510528585e-03,2.114785393423416343e-03,5.723830382598145937e-03,2.901309793182973648e-03,-5.674428628954088054e-04,-2.997136289965182578e-03,9.985759068558933138e-03,8.232522763122961454e-03,3.219342520651081747e-03,8.885091268796893940e-03,-1.378210121675637169e-03,-4.813146572954101507e-04,-1.106279108968932778e-03,-1.418438714031646126e-02,5.855604548272051388e-03,3.031455975970223215e-03,-4.050715308510547709e-03,-4.390430296858199334e-03,6.413938638422006337e-03,-6.764593789988164463e-03,4.931521978837719554e-03,-1.068961496490274891e-03,1.429009869790981175e-04,-4.380169313598753465e-03,-9.851767265165423896e-04,7.577041539699112512e-04,2.814833497556473978e-03,-4.271481038926354910e-03,1.018466627560259130e-02,7.894067279302802512e-03,2.376515864421120238e-03,-7.019972703019210854e-05,-4.489382940294499476e-04,3.053652951581305522e-03,-5.098908224034483759e-03,-5.479946937721892619e-03,-1.763666318307271054e-03,-7.339735298081385105e-03,2.447229884759755895e-03,6.261202042496594351e-03,9.281645066841014266e-05,-6.434424500676055089e-04,-1.508748836119576430e-03,-4.755538582554784052e-03,1.327876918275368301e-02,2.416007010698656443e-04,2.504365970914639201e-03,7.529732256930544466e-03,-3.791195389053273346e-03,4.924813556440300845e-04,-5.683102735859339380e-03,-5.769577348394567227e-03,-4.136880614047370140e-03,1.544853831064216254e-03,-1.205694525156749243e-03,-4.063301487487282378e-03,2.553640574874500684e-03,-4.528090422705285077e-03,-3.240596254584147701e-03,-3.450685170056275695e-03,-2.829742657367142165e-03,1.179243826528711576e-02,1.060195216622823299e-03,1.214690054625715233e-03,4.799371152956480140e-03,1.728135282894369823e-03,-3.515689453023877753e-03,-7.367272635717689658e-04,6.102465534630664885e-03,3.725960589963540209e-04,-5.681550019718927501e-03,-2.120946402973101358e-03,-2.638762147334621291e-03,1.079941107911512425e-02,-8.789513472552334603e-03,3.284900666675884089e-03,-4.414566235835727550e-04,-7.400012738449525569e-03,-9.525044360273141053e-03,-7.785670667145051620e-03,-3.870787410665738060e-03,1.299135771573435919e-02,1.290886943233217051e-04,-1.192719059461602434e-04,5.028764547550425160e-04,7.192784263938762149e-03,-3.362250162210984883e-03,8.381995827082615010e-04,-1.047629715231131187e-02,3.410603176495918907e-03,1.652916111080101431e-03,-7.574134356353192342e-04,-4.944276907502604367e-03,7.096365097031962638e-03,4.204990830123861607e-03,3.410270782705668909e-03,-6.678513336264224436e-03,2.762988528820182598e-03,-2.454234368789954212e-04,4.605326838666941314e-03,1.367242375242766180e-03,-1.590487143588550575e-03,2.976965225353666537e-03,5.984966652756736234e-03,4.286758965919429398e-03,-6.173412021845776573e-03,-1.717497647661247019e-03,2.177605055420066531e-03,-1.216853285658957111e-02,-3.499332021165800846e-03,8.586615819785672205e-03,-2.569503633589320657e-03,4.937240521963918993e-03,-9.453678840557208987e-03,6.054353556822001867e-03,-3.366015804306678964e-03,-5.549584173415603693e-03,-7.560822232709368297e-03,-1.528547637366160609e-03,1.527018553142929242e-03,-5.965596227941794873e-03,2.421904834267187348e-03,6.253653816986681942e-03,2.271611051924270208e-03,-2.244248178595913568e-03,3.097968540928182010e-03,5.785088319771087076e-03,-5.611443397268729011e-04,-2.824185639684482357e-03,-1.531690496130191665e-03,-6.814035426494961280e-03,2.394172736955093596e-04,-5.700437022377880837e-03,-2.966281144160259984e-03,-5.368845981548063008e-03,8.321304081070820050e-03,4.356478377056672777e-03,7.389691164756092331e-04,2.551815837344101855e-03,-3.190992447811501990e-03,3.819015378022134252e-03,4.638361329688412554e-03,-3.818111642254807397e-03,-4.978035365438332818e-03,-1.485417297632912184e-03,-2.423907385670974642e-03,3.576256877503141644e-03,-3.729542001251680804e-03,4.201727963405044695e-03,-1.772692888663261876e-03,-5.037764418646704240e-03,4.393168183907066905e-03,1.010936491289217420e-02,-9.145786830107235921e-03,-7.973714998925727094e-04,-4.211307886939556593e-03,8.172104556583884363e-03,1.160913730494243790e-02,5.507635722276210503e-03,-1.285309347033456980e-03,-6.509187901757451525e-04,-2.804872702391194607e-03,-2.444989707647867230e-03,-7.201457995044564624e-03,4.955240689658386467e-03,3.890812258036922561e-03,1.209977845394625065e-03,8.270783780435216753e-03,2.269674914123636766e-03,8.089632783202506351e-04,2.714511376676836496e-04,-5.879504789314233355e-04,3.693164757142940440e-03,-3.064410222829125861e-03,3.449644262701577913e-03,2.616471104427049894e-03,-6.317265349561167198e-03,-4.681397596091462117e-04 2.365636818710254927e-03,1.874070300417879191e-03,4.352304965446228587e-03,4.395760569338708415e-03,2.641252846929373929e-04,9.989341038888713969e-03,4.089696413672808985e-03,-5.385321029643623698e-03,1.652859994978957093e-03,1.221475678361943519e-03,-2.153074632110691961e-03,-5.709081474594450076e-03,-7.126132728252307933e-04,-8.884713733222205853e-03,5.209050930323251170e-04,-6.346906266827895098e-03,9.977329084632474265e-04,5.997151446789769288e-03,-2.078141575184672823e-03,7.179837750281304644e-03,-5.111152285163027623e-03,-1.037486634258425952e-03,-7.542190832592498195e-05,-3.182302335811575296e-04,-1.828432920205178610e-03,-4.104960427772426368e-03,-2.350261249643284100e-03,4.940107986852771381e-03,1.913780351823854680e-03,-5.233492875899946502e-03,3.414846280172042711e-03,-7.173455098119181639e-03,1.750787996223218896e-03,-1.036086836655752789e-02,4.235413947195686840e-03,3.636282144711266637e-03,1.044062774109693996e-02,-7.601746330106326326e-03,-3.115217114498354360e-03,-7.715173373237640747e-04,4.757163000343934316e-04,-5.725288565624615756e-04,-6.166049016576953187e-03,-4.738206516083195291e-03,-5.063459897758623490e-04,-2.652560119806042244e-03,3.614873809526939624e-03,8.042333665462980327e-03,-3.872844428381173052e-04,4.769773979954990140e-03,-1.227118284810493898e-03,3.290135797501883563e-03,-4.118099079470407750e-03,-1.723357281395600845e-03,6.067645231591515045e-03,3.224318938029510292e-03,-1.813981644280049442e-04,4.368362337014182820e-03,-1.711872959174429198e-04,-5.063974714704015320e-03,-1.439640425489012139e-03,-4.759755322850925743e-03,1.045618819835282889e-03,6.093336104369395483e-03,-2.621568456534035738e-03,-5.938204890001119556e-03,-1.444793776615923902e-03,-1.246226194617097369e-02,6.025577254880163259e-04,-4.350819596901565303e-04,-3.747542271658852071e-03,5.623343775175415442e-03,-1.858233717120515681e-03,7.163739725096297589e-04,-6.792744455851828372e-03,3.065473637145232390e-04,-1.404309542236751879e-03,6.596053853405238780e-04,4.484804312773286658e-03,8.548002610861270983e-05,9.507356730595946815e-04,2.913376767070311965e-04,-2.382772069461639580e-03,4.719451534101157773e-03,2.004184982558082639e-03,8.838577951425920926e-03,-2.994737864401152030e-04,8.836374805508626085e-03,-6.038439881700239341e-03,-7.474309652463804500e-03,-1.826322921261229210e-03,-6.425764510714942167e-03,-8.668033117284181183e-04,1.108172078881127995e-02,3.747328947841424671e-04,-4.764526284099132938e-03,4.855066603947907206e-03,6.445076709230254165e-05,-5.016699081441550016e-03,8.456502250364449080e-03,6.112441065569905983e-03,1.232162044324438134e-04,6.575466320599583636e-03,4.991441103588119092e-04,3.589591110688406406e-03,1.023123554059615159e-03,-1.213264164021314465e-03,1.121885437879782677e-02,3.560373004000352490e-03,-2.421617521337214324e-03,-2.342132950706093519e-03,-1.495498694577463740e-03,-8.186261691634832754e-04,-9.357931574942338418e-03,5.160023219175836895e-03,1.064392282407311541e-02,-1.191106177110958300e-02,2.605758357967990005e-03,3.786347978154097453e-04,1.967713292371211717e-03,8.808383660897936779e-04,6.261334806451262017e-03,6.110082114631834593e-03,1.171447517253899201e-02,-3.569363157379597511e-03,-4.841636592094754376e-04,4.720165537914827389e-03,-5.683043647991800444e-03,-5.471371504995103073e-03,-4.343681801993700027e-03,8.429951719623883684e-04,-2.532093597976333723e-04,-5.508650010759166479e-03,8.889434803795386031e-03,-4.345949976362008596e-03,-1.312831475021995515e-04,4.287903611780416284e-03,-4.433836351605237140e-03,-9.443431074102327100e-03,2.528914081812832712e-03,-5.694870928172766824e-04,1.404405949783710077e-03,-6.545925268401604942e-03,-5.291602495024166040e-03,9.615000880239928105e-03,-1.164458938575742633e-02,9.944235798337347672e-04,-5.628757891249614766e-03,3.649587172859916954e-04,1.669792094450161255e-03,-3.898088741088590308e-03,2.281321138885354823e-03,7.992442902756476120e-03,-5.024533740426587528e-03,-4.540803129624430487e-03,-1.927525353274361399e-03,3.544536046346316149e-03,-1.211670175099350145e-03,-6.284343098064962795e-03,-7.863551383194347655e-04,2.981870545020605758e-04,-1.217949616108344102e-03,-5.239104421377158063e-04,4.314546476309772706e-03,8.938195848147963227e-04,-5.475466404005297980e-03,-3.486195672370159208e-03,-1.170404673932697731e-03,2.826692472618191985e-03,2.920228288227517347e-03,-7.327533658608095801e-03,6.093511605171996089e-03,-7.055232956969678887e-04,-2.382292487818887979e-03,-3.114532402626623599e-03,-3.282412259987001180e-04,-1.226235461005844391e-03,6.543247645794797712e-04,3.810325149884946183e-04,2.405568485830669387e-03,2.762776482282922139e-03,-5.288034595920878232e-03,-3.078247380212736396e-03,-6.205650380193511277e-03,-1.036719104196680979e-02,3.237667764611599358e-03,-4.900724862647099118e-03,-2.290095374025779770e-03,-2.205608858021586034e-03,2.222416882944986283e-03,3.621909125784570239e-03,-5.030817678494071618e-03,-8.558006713745829747e-03,-6.497196569559894261e-03,2.398939134563628141e-03,1.201250390083958902e-03,2.843783052380843487e-03,9.050943453315085652e-03,4.018280195813638900e-04,-1.385778895497760200e-03,-9.880511521835882039e-04,3.019601053440365210e-03,8.248590496599055260e-05,-2.847031677608084961e-03,3.666745810059921385e-03,-9.759518718133061710e-03,-8.984850489333943979e-03,-3.715334383833873255e-04,7.808239344508735580e-03,-7.081228732301419982e-04,-1.608308340763226797e-03,-3.845781771830433706e-03,5.351735342427336312e-03,-6.722430561233477819e-03,1.045174805155150077e-03,3.542718321152994645e-03,5.228476867736442726e-03,-3.212982300321794746e-04,-8.928389391559054394e-03,-7.407687893381560825e-04,4.471754484093702284e-03,3.481178644370766084e-03,-5.024475301401629818e-03,-3.319644227468020181e-03,-2.658769399034316937e-03,2.317444030013528222e-03,6.961749545433224559e-04,-7.243497460454021049e-04,2.800577316018519787e-03,-2.943689552481484808e-03,-8.824440905347219954e-04,-1.840034353367174655e-03,5.675914503207530118e-05,-6.084515502606069152e-03,-1.846356589003185153e-03,-4.285951220284840366e-03,9.103197965240492073e-03,-2.150943540294601372e-03,-2.397146879529788149e-03,-1.759891750869419349e-03,-1.752494055950047007e-04,-1.087551016675921680e-03,-5.401303237971662001e-03,-3.326706817415496856e-03,2.986368154084754166e-03,4.036909530340878835e-03,6.656898229272514671e-03,3.618393432911676255e-03,-7.956886297376172118e-03,4.357103000857482679e-03,-3.747862454527605899e-03,-6.174899624848607937e-03,2.254834166118934845e-03,5.579710911003322137e-03,-1.209028083357332117e-03,-2.596735279475143737e-03,5.298212261858583488e-03,3.614155201253820872e-03,-1.708395234440513799e-03,-5.084022058920433310e-03,4.201900455971931819e-03,-4.821186590905569141e-03,-3.176019237154814861e-03,-6.028591213269156486e-03,1.735163580688025512e-03,-2.783953617032585651e-03,-2.155424075704275282e-03,-7.379204835424714101e-03,-3.126922281890061502e-03,2.992231669582264778e-03,-1.531559954674937890e-03,4.270097564287931419e-04,3.144425164610221892e-03,-4.281374847402646218e-04,-7.702548752097684512e-03,-8.871304162838428500e-03,-8.399005070318986063e-03,-4.451744180197206215e-04,-5.075902353216644493e-03,-2.268033102779000020e-04,-5.553980082252252780e-03,1.265144006232568662e-03,3.280855339714648829e-03,1.169520812132649433e-02,7.757557942906230519e-03,3.669028259588368555e-03,-1.279755305302727846e-02,-1.653809361523769046e-03,1.831477338690392764e-03,7.716545775962754122e-03,-1.210699936038380897e-03,1.055950440007488072e-03,8.486608498802934372e-03,-1.064745600492253766e-03,-4.099275977519210856e-03,-1.746484622275941033e-04,7.321189777667896741e-03,9.117554727879995796e-03,-6.107297766976176392e-03,1.136497696871516770e-02,3.110344572131818180e-03,2.671625067443650012e-03,1.831808273637470033e-04,-5.172917107988254590e-03,-1.328332330350732440e-03,-1.272214966724113938e-02,-1.078707551291331742e-03,1.749046993512157287e-03,4.071739281160058344e-03,-5.954157353750100246e-03,-3.238010908766915386e-03,6.849025060984639118e-03,-5.103624518146843321e-03,-1.795698978656891506e-03,8.610219654139397716e-03,-8.609192209295019353e-04,-5.615124757915246172e-03,4.324462981933546030e-03,-5.820238452027186662e-03,-3.861368925551794933e-03,-2.896588753028652742e-03,6.975566157953905048e-03,4.974235283455721163e-03,-3.322441434208713885e-03,5.705518211529960566e-04,1.635599211121693212e-03,-2.981908623158261665e-03,2.297206723831791801e-03,-4.647722865251342761e-03,9.778587838594363861e-05,-2.156684730222045016e-03,2.558707722044020402e-03,-4.222488111264501021e-03,5.129740837243331776e-03,1.125825785263101335e-02,6.265648924743295357e-03,-3.692829535786442893e-03,-3.211866612878382446e-03,5.144789639781043554e-03,3.072875244125250916e-03,3.036134500199469313e-03,-6.609924762923868706e-03,4.988621485387705230e-03,5.121632836287861971e-03,2.178431064555912469e-03,5.850896772679587375e-03,5.740372919867881181e-03,-1.358478000628814803e-04,3.756773272841374420e-04,-2.662119189845964587e-03,2.270123530034025543e-03,-1.845767104341721138e-03,-1.758937956466619362e-03,-5.115410160410800884e-03,2.124192515063758578e-03,-1.041753449378122857e-02,3.038991016982087179e-03,-2.397573693848149194e-04,1.432017589884043143e-03,1.142318685331795676e-03,8.631628553992723571e-04,-9.936744679672783725e-04,-3.561806768618146386e-04,4.919511538418153487e-03,2.469626607117213647e-03,-3.780994988437735502e-03,-8.713865460058071175e-04,7.929807613654772701e-04,1.247053466531402839e-03,2.447944494144406458e-03,-2.245815225579841663e-03,2.459571565484742171e-03,-9.600357073554154422e-03,4.655259653022068844e-03,-3.137931872195290437e-03,3.840748428087306654e-03,-4.100911967623507848e-03,-2.262484464922778164e-03,-6.491109662717156441e-03,8.462051399652578557e-04,2.100059693898806076e-03,2.815224166508087551e-03,-5.939523776882214313e-03,7.117020608104317712e-03,-4.003045841568110552e-03,7.162202324876635034e-04,-1.505636955104139524e-03,4.029640875405842565e-03,-1.081454701527249842e-03,-4.306382754309635678e-03,-3.770734048388943004e-03,-3.190702672026241200e-03,-7.977711184988930601e-03,-7.268856672339805113e-03,-4.949988457968331444e-03,-3.386929035578372608e-03,2.358192357453349746e-03,2.016644165442995945e-04,2.721230156757800580e-03,3.107404084896504132e-03 1.491282784707898944e-03,-3.962169694164791381e-03,6.212146390667005243e-04,9.135204558608956901e-03,2.902587091922358489e-03,1.454823426043192315e-03,-2.159107074648230692e-03,-4.785605537302364126e-03,-3.079783898690272807e-03,-3.143123355801686995e-03,8.461541052586332365e-03,-2.381501710702954230e-03,-5.345677827075431647e-03,-8.702359621089202427e-03,-5.058452613838406060e-03,1.035743033300339996e-02,-9.346775599612417319e-04,3.472846232840100743e-04,-1.669078270678870431e-03,-2.745016078252203624e-03,-9.225140366784758544e-03,-7.456181694435981252e-04,1.542305998411948927e-03,2.359655844252545043e-03,9.695979146208314955e-03,8.937471061560181215e-03,5.901522243637751658e-03,4.007558601035235077e-03,-6.046997049102450540e-03,9.036770453458147509e-04,-1.457386992882598047e-04,9.653057390581613537e-03,2.134026613383538048e-04,3.141062440865882884e-03,2.422724676170617125e-03,1.231065749763650191e-03,-4.412114743914154202e-04,-4.165655949492112417e-03,-1.266961503578604639e-03,-6.585816325564220960e-03,5.694568869199436063e-03,4.222831881175018176e-03,1.735200613490509806e-03,-2.403766236138743431e-03,7.476123126891453391e-03,-7.211041527001888808e-03,3.532074007330313070e-03,3.648735494613256775e-03,6.420560920173568921e-03,-3.657524519244783560e-03,1.552233296971916544e-03,2.918175456662957189e-04,3.914884874794123844e-03,1.128548894076905097e-02,6.095296986617568073e-03,3.571724375515592659e-03,6.826174269410669078e-03,4.798409834889659151e-03,-3.626070130629782933e-03,2.054452175383152460e-03,-7.055729962410566339e-03,-5.012989082684743414e-03,4.303263370917843086e-04,4.362962872775992713e-03,3.180886589277120112e-03,-2.561718435334269222e-03,-2.934408025585386786e-04,5.051957216698310486e-03,5.242400622318756781e-03,1.255848995109229557e-03,-4.542873489488384944e-03,-9.513237476251015963e-04,-3.841958387289278257e-03,1.969009190503947116e-03,-5.745647559359294554e-03,-2.130679417187399984e-03,-2.384551074412448370e-03,-1.283276434996252421e-03,4.489331947592642424e-03,-6.365577003261150765e-03,-2.771366320112207508e-03,1.713949970849610325e-03,4.764255753894616231e-03,3.760173757779630287e-04,6.425351527904913512e-03,6.535534274790861901e-03,-1.727658763937549692e-03,2.750701303442679765e-03,3.706308361186376119e-03,9.408370717934759486e-03,7.572791558196795801e-03,-1.797448993481681138e-04,4.807657891219137454e-03,-2.782598556367800197e-03,-2.047298773175301040e-03,-1.186063057395304659e-03,8.109255804034836215e-03,-1.996892829353552959e-03,5.271948011181026850e-03,1.104780663336717888e-03,-2.739974597839976364e-03,3.412148464677244213e-04,6.694549259860218572e-03,4.668428462743496807e-03,-2.099993032986726717e-03,-1.687610410284684460e-03,-5.317036693592618046e-03,5.230531575910301288e-03,-7.146244485788050792e-03,-5.751167756391780544e-03,9.001099907034961281e-03,-6.807658959978700984e-03,-1.920713548890898986e-03,-1.565301062332666001e-03,2.861911446159549812e-03,-1.298616834957551933e-03,2.408961239107648006e-03,6.634735567931098095e-03,-3.452788593936691533e-03,-1.299590337565525435e-03,1.133606192897579313e-03,-5.456260370351097982e-03,3.984233076128687205e-05,-3.262358942149672439e-03,3.290299260088842338e-05,-7.110814095099182025e-03,-5.018008513869507727e-03,3.735106497617074472e-03,-1.066339181627371825e-02,-1.180822652511572733e-04,4.252777465525193167e-03,3.819630682206147116e-03,-1.771560461317935823e-03,9.600004505823152984e-03,5.812121334149692245e-03,9.014810153032587524e-04,4.051439023818175509e-04,-7.520604772514404186e-03,-3.223185389682501050e-03,4.376748985194004252e-03,1.143881013290619711e-03,-3.364268496358534689e-03,-1.596083139714175869e-03,2.206229949413664529e-03,5.370391128697478422e-03,1.839532364156040722e-04,9.796704216231065868e-03,1.562455920116061821e-03,6.066595785846977916e-03,-3.246332338114953781e-03,-2.637692450693910218e-03,2.903932131393733695e-03,3.758316163262074260e-03,7.051566378028600174e-03,6.238019769130354053e-03,5.368702506812648642e-03,3.404329026460101246e-03,2.270029786953433747e-04,1.696540174668880775e-03,1.923242721630783044e-03,1.108428879633698190e-04,-1.821732507598816664e-03,4.597267265580190151e-03,-2.723802578259912604e-03,-5.873287479595820995e-04,-1.006131236129103289e-02,5.650777877940437420e-03,5.665323538424938202e-03,-1.867351524459852667e-03,-3.458309578919416868e-03,5.538161710589278169e-03,1.928523367368661662e-03,1.969412188465693384e-03,4.107351517407774148e-03,-6.119893578474727702e-03,5.463601149589864155e-04,-5.897062472001510583e-03,-6.362988971798192242e-03,-6.444841974802402625e-03,6.191453099220260888e-03,5.537244085221428198e-03,7.914760326472687277e-03,9.338978750303813386e-03,-6.729742676417821423e-03,1.145821183370698999e-02,-2.715235810393199899e-03,-9.709726584849595957e-03,1.725529025815056988e-02,-3.374578320976560908e-03,1.763675168331410364e-03,8.861301026542263426e-03,-1.674460948397968460e-03,8.305587624224397558e-03,-1.140144184362389124e-02,2.476617231176079115e-03,-2.111525876968566723e-03,-1.574423331715582878e-03,-8.529859281233795679e-03,3.170916149390155814e-03,5.670340211752706357e-04,4.140058647120132065e-03,-5.927074265594525597e-03,3.048299704079007310e-03,5.141580621714820012e-03,-6.337693163539831129e-04,-2.197143491558457926e-03,-3.181531608964725463e-03,9.002361909108917010e-03,3.900843491949933673e-03,-7.611460842575290368e-03,3.612431956423383349e-03,-6.414676797144101827e-03,5.250610570301778197e-03,2.492374945297850738e-03,3.644138885418576456e-03,-5.560378180223241214e-03,-3.405899122169103385e-03,2.890269358848849938e-03,-1.070441570364482307e-02,-3.887569421607070443e-03,4.552710851492095326e-03,-7.600491807126260810e-03,-1.659497941628328599e-04,9.935954328866130822e-03,-1.878648951651878517e-03,-1.735624022399251680e-04,-4.946385752024426430e-03,-5.025492214170862795e-03,2.496659251867833478e-03,-2.800277279645409147e-03,-2.567560508640400613e-03,7.316414380750566743e-03,3.626104283972304224e-03,-3.137135540956463749e-03,-1.143427388546294126e-03,-4.061648728020354239e-03,-3.486081918416306796e-03,-2.005150213977153085e-03,1.023844663244831389e-04,7.501045245169404331e-03,-4.138350294925664460e-03,4.991922405730367905e-03,-2.305890889607587263e-05,8.438206599436198611e-05,-2.283602216952825698e-02,-3.732045041186472442e-04,-8.524319889921952789e-03,-2.686400458784392233e-03,-2.277943573821224253e-04,-4.395608683441151179e-04,-3.145244687353662488e-06,-1.126902704757844686e-03,-2.361550561506775125e-03,5.622343811148484451e-03,1.507180518823162492e-04,-2.673478228956685174e-03,-5.786804601812320240e-04,-2.982104479911476189e-03,-1.502729175241007209e-03,-1.088432341309587652e-02,2.817845913550731294e-04,-1.050166842456782233e-02,3.507463172413357623e-03,-4.464180332812261141e-03,2.551447023739133145e-03,2.470192349180788411e-03,-4.368759821552935761e-03,7.547241177167021827e-04,-1.616295681974043441e-03,1.011944836012904254e-02,-1.901975545267586267e-03,4.834158666897407815e-03,-1.760985488373486359e-04,5.967667718056660422e-03,2.940620260194243447e-03,4.205436246449657995e-04,-1.228658952548738852e-02,-3.998138530172682704e-03,6.189452361227541386e-04,-6.160383179872800860e-03,8.971066818292856848e-03,4.790965490527920694e-03,-4.215646165981979006e-03,3.748453615931456560e-03,6.720012250440949913e-04,-3.276275176949801533e-03,-4.498687576457771620e-03,-6.672814340598373292e-03,1.121340760237079376e-02,-5.493516234228617342e-04,-1.898344202115969737e-03,6.101103726013027556e-04,6.590541788693816960e-03,1.098338103658331837e-02,3.413048937131396045e-03,5.498777462221794525e-03,-7.919740057096251104e-03,8.474511945637961058e-04,-1.371716115733853094e-03,-8.101284266624264596e-03,4.263706240013642561e-03,1.087424408225972847e-02,-2.325065306281149172e-03,1.978314911821673402e-03,-3.126306259860595852e-03,-3.980125172073807616e-03,-7.469172098607233974e-03,3.835439966579495211e-03,7.152188759432936971e-03,-1.059201897545905720e-02,-2.014665915105268591e-03,-2.454061613656565669e-03,-2.430032038031486753e-03,7.196388305230873306e-03,-6.806236244055814850e-03,3.624498238245361676e-03,-7.269972211581498395e-03,4.353320730773426080e-03,-7.460327222758772470e-03,5.138645054132885519e-03,8.036110733975787221e-03,-7.026660693055653342e-03,5.683391419213096839e-03,-4.384667246759095874e-03,7.721519124928278265e-03,1.343965364491747031e-03,7.013132168759555855e-03,5.621069086806367455e-04,-2.305391958234846023e-03,-8.820177841200425514e-03,6.560293935240212558e-03,-9.167013489009027613e-04,2.394835918926870467e-03,-4.825905415833268118e-03,-5.795301198765423489e-03,-3.431925544136200674e-03,-1.158165040129853713e-03,-4.439733751335396203e-03,-1.520812767724481068e-03,-3.593920482243830386e-03,-3.131611305069857579e-04,-3.986924105009956656e-03,-5.195888138104375227e-04,3.648299255794019893e-03,-4.299342315262794437e-03,4.020336953529478688e-03,-1.991216514673576406e-03,-5.728228514500686118e-04,9.389122491207765181e-03,1.664699722460973865e-04,1.038387665250489755e-02,1.415389810115196304e-04,2.936230620543939761e-04,-3.000081751931046870e-03,-3.840135956632378077e-03,-8.049515143003145926e-03,4.826354100611045250e-03,-4.432280738227605162e-03,-2.913865816515367330e-03,9.788765414176286636e-03,4.749606620168066173e-03,1.317147889299139169e-03,-5.629535812954690292e-03,-2.708932441285198320e-03,-7.041064191602061347e-04,5.156727096541550634e-03,1.075227334429304689e-02,2.192391287040327836e-03,7.755264959030972252e-03,3.919306740535399500e-03,-1.329191057616716919e-03,1.497590423093800652e-02,-4.311592181971008950e-03,-6.499965864420990426e-03,5.331135400092445152e-03,2.865976396078717681e-03,9.781168456470982564e-05,6.711172380499566374e-03,3.640398086154390464e-03,6.128039516561826355e-03,-1.386281321648574966e-04,-2.139337581892537029e-03,2.678770080225121274e-03,-4.372163156273850505e-04,2.797214383110380783e-03,3.245670080287870948e-03,-5.307669717257426298e-03,2.033904031329550940e-03,5.394097239874340484e-03,3.870578870930494126e-04,-7.731824332082776197e-03,7.315222476939936894e-03,4.531444222551404312e-03,-5.349672205855120037e-04,2.135337417613941430e-03,-4.919596305462247549e-03,6.557971970969265715e-05,1.619597137489003219e-03,2.017358520512943423e-03,-3.249545960336838648e-03 2.565472025688392884e-03,4.433981107215632239e-04,2.166589079581733145e-03,2.709484977277191423e-04,2.186762680003837174e-05,7.382222440673737000e-03,1.082679511623546283e-03,-7.412720927890123273e-04,-4.716000557362741603e-03,6.658752346640275932e-03,-4.492209332333511017e-03,1.220649478388395170e-03,1.508520728185223556e-04,3.505501168388013011e-03,-5.917076268068334520e-04,4.806838949648528994e-03,3.610765681180185330e-03,-8.339529976533175976e-04,5.858437943522596184e-03,1.275388954219902442e-02,-2.055056150739783486e-03,-4.574256420895788343e-03,1.445854227291180991e-03,-5.165554276209532829e-03,6.670611407004367610e-03,1.020502748618786967e-02,-4.937844376847591588e-03,5.439660337264336926e-03,-5.955881320731326589e-03,6.773933394578376260e-03,-1.797171248383562167e-03,1.424456466549945972e-03,-7.061010919762823572e-03,-2.163592866506725519e-03,-4.917395836717662790e-03,-2.810950807180493879e-03,4.660545742046964986e-03,2.719931932748199509e-03,8.220475212058467665e-03,-6.595071076900623115e-03,4.506435381299150715e-03,-5.529470063500663800e-03,2.193408513131200280e-03,-4.757796220462931665e-03,5.841991583635494471e-03,-1.564693846951861322e-03,-7.440912870832147325e-04,-3.147602207423314414e-03,-4.444016493692723273e-03,1.044623377998511347e-03,-9.823188673221447398e-03,-2.951025652729797211e-03,6.828196393658102561e-03,2.989014519728938613e-03,5.806199067704088598e-03,5.230545286090357640e-03,-3.324008178836191339e-03,-5.104824614089855349e-03,-3.622140827936494575e-04,5.335592103146782027e-03,-5.815850382484940434e-03,6.169028397846682747e-05,-4.872166022580302902e-03,2.268359614021598931e-03,-5.637954993550735316e-03,1.688554592646674193e-02,-3.086978734693017132e-03,1.071696573924209393e-03,-7.302711209750328088e-05,-4.475526968321479060e-03,-3.878913271418323473e-03,-1.536482687787766503e-04,-2.142599307227718332e-03,-1.863659196869661997e-03,4.154448049459502287e-03,3.325700877046119334e-03,-2.357663327070794353e-03,-2.148077181062067655e-03,7.805208238332123245e-03,5.935841818847633657e-03,-3.334140765944065745e-03,2.120552408025215552e-03,8.220988368261407733e-03,-5.106845618351964732e-03,-1.198027845540235647e-03,9.450021847197768174e-04,1.333729315811530488e-04,-4.745405013010740229e-04,8.369302111735585099e-04,-4.528153213479507548e-03,-1.982975872207421407e-04,-5.881015502762548461e-04,-4.351632375869087833e-03,-1.189574009235847430e-03,-6.749734415745910210e-03,-1.252656472361554814e-02,-7.119041750147086373e-03,6.007826781433106555e-03,-3.728187040307902843e-03,-5.238339358930146732e-03,3.030027125904006212e-03,-1.130053100835864618e-03,-4.743437535493095700e-03,3.176087112165776414e-03,8.486708277687788488e-03,2.404357829190005482e-03,4.827258643500865511e-03,7.412094872205268227e-03,8.615336264963082205e-03,-1.992285556898097405e-03,-7.290443158109412899e-04,3.074402702422334224e-03,-1.963080467912395755e-03,2.186682536407888715e-03,8.463952099683434965e-04,-4.154314489035392140e-03,-4.693222748053717426e-03,6.892888118186346787e-03,2.163637026806839673e-03,-3.699261363241247106e-03,-2.331528739731461228e-03,5.231272375126267207e-03,-3.889062308242790221e-04,-2.553873080751809118e-04,-1.356579092932183074e-02,4.674772625709797125e-03,1.629568835268248523e-03,3.830296872049294174e-04,-8.806344019828519706e-03,3.959091295069383835e-04,1.186969703488527806e-03,7.283271246931554213e-03,9.677907420946740488e-03,3.976444882452145196e-03,-1.008167913326840637e-02,-6.383501677917172460e-04,3.069721940268789126e-03,6.241298497379766004e-03,5.072974486158485924e-03,-2.851884039445473640e-03,-8.111395552991753494e-03,-5.034280766445408360e-03,8.767926903984762585e-03,-5.727757046791969592e-03,3.408362029800380655e-04,-4.566125051269831134e-03,1.951544097433848685e-03,2.310697776123136257e-03,5.486835115790228770e-03,6.882667864846635578e-03,2.843447251884447359e-04,-3.636085723643849542e-03,9.154647107201288559e-04,-2.178397764240232615e-03,-7.800102146225836725e-03,3.352941635627478510e-04,-2.490443871936104218e-03,7.790207789623186446e-05,2.076511798883833720e-03,6.126560707193109273e-03,7.744215195442702426e-03,1.500577986695443566e-03,1.437457342501612632e-02,1.246711566966497708e-02,-2.069921575276716628e-03,1.523453245223838981e-03,-1.815735090755126249e-03,2.076084944582582900e-03,2.462953049492345049e-03,-5.514215195470221001e-03,1.939517796361430943e-02,-3.690975086443663844e-03,1.339282358864897493e-02,7.201125558255565967e-03,2.975355982010692221e-03,-3.248173745472370498e-03,-6.007706689671626042e-03,-2.529891680796786824e-03,4.664799284384383085e-03,7.904666015314934296e-04,3.516671750059913344e-03,2.401225024549306485e-03,-6.306658945705053039e-03,-6.335202588477016747e-04,8.445313426104130577e-03,-1.282160541031742698e-03,4.246908911600430318e-03,2.640906366724326410e-03,-1.747628203731790538e-03,2.500470845626327505e-03,-2.188318060525803414e-03,2.666853385921665238e-04,-2.190063356998779944e-04,4.042599663241652915e-04,3.279587502568233164e-03,-4.664380053492980099e-03,-4.819978937065456850e-04,1.960646074557383187e-03,4.604728775639303426e-03,-5.210821542837605624e-03,-9.147954931876603468e-04,4.021384592582718381e-03,2.942437211196303071e-03,-1.176710622798117327e-02,5.343282154719577841e-03,1.662649664983971518e-03,-1.227550093692201779e-03,1.748686631473905448e-03,1.810871325211062537e-04,3.817188142685620268e-03,-4.413417299766472270e-03,1.513910218547581763e-03,-7.893310905156974717e-04,4.384630422163759284e-03,-1.242157313885860906e-03,6.051404000620135698e-03,-8.552424408002065603e-03,2.293254394649448750e-03,-4.054976356712219725e-03,1.134642256048848516e-03,6.488431987249868027e-03,5.962427765094605037e-03,3.867215290669925431e-03,-1.047240533926812114e-02,-1.612191704017770094e-03,-2.713913282567400557e-03,7.497996531034186070e-03,9.030480412753012559e-04,2.161698732310702187e-03,1.612658114351111800e-03,6.692579870853445495e-03,-4.517349936749951410e-03,1.223376257015367733e-03,2.083317152828408677e-03,-1.289778632041889417e-03,1.127405888811750054e-03,-5.066156976851219317e-03,1.238109487462674997e-02,1.824956640830007414e-04,-2.565982684454097204e-03,8.602262593602465202e-03,-4.210851234422446562e-03,4.745962772146112944e-03,2.442267365651167466e-03,3.357498071260429841e-03,8.002642574137637521e-03,-3.540220102208360871e-03,1.350119119518402005e-03,-8.133405441240741662e-03,1.085251535834499351e-03,-3.096077526566729052e-03,3.063284548609421774e-04,3.528066596587412992e-03,-3.427073767953623534e-03,-1.833609593217295532e-03,-4.295611236375122127e-03,-2.243067671684359672e-03,9.116145930934292305e-03,-1.275010516830528493e-03,7.104887396383926385e-03,-3.074512362304214139e-03,3.057579575246287935e-04,1.072219752733141913e-02,-1.499001696671874292e-04,1.128243202973026535e-02,-3.815601320334167240e-04,6.603453114242663520e-03,3.461441191661247634e-03,-4.463192894057656408e-03,-1.441718887232451606e-03,-5.541765478292419054e-04,1.514734341175933644e-03,1.225309914488802773e-03,8.864239501871685643e-05,7.638788362421489175e-03,8.741686644581546373e-03,-8.315939716481682634e-03,-8.565578123952851362e-04,1.848737557121420214e-03,1.399414228365546391e-03,-1.211720886397118742e-04,-3.479406628966063685e-03,-5.644896314765713869e-04,-8.136938945778340870e-03,-7.601046662251574517e-04,-8.229167390544963462e-03,5.721364164942387490e-03,-6.870991386080755811e-03,4.083235314909525702e-03,-1.780954337847774670e-03,1.098916739769093855e-02,-3.232983739052160653e-03,-5.831007528886351411e-03,-3.368241773369050882e-04,-9.152585781386109870e-03,4.530186475683617778e-03,-6.575449633580386644e-04,2.899343252151570429e-03,6.562914020810391158e-03,6.507651120098466909e-04,2.564495620320306375e-03,-1.078874860928947903e-02,-2.963712730990854943e-03,2.217103357144242348e-03,-1.948742755058432930e-03,-5.683654967273860624e-03,7.171777580760073552e-03,1.137109822622028518e-02,8.534556090749016374e-03,1.137043301766318265e-02,-3.861365588707139527e-04,-2.608405346622520455e-03,2.266544278658701259e-03,3.087676836650649504e-03,9.515845772752336935e-04,-9.606293192129362023e-04,-7.111363450413664530e-05,2.647599796181999282e-03,6.475558908653957814e-03,-8.661589273783091387e-04,9.044814233444706253e-03,7.971685271734349487e-03,-6.647701283704181459e-03,2.179112886676478310e-03,-1.600088671771951595e-03,-9.414065294211900609e-03,-2.849982607089516281e-03,-1.160951943675431978e-04,-5.778337678277614339e-03,-9.841920885968160476e-03,-1.152969332261895123e-02,5.623733505621790647e-03,-7.608237393702717028e-04,-7.137213953562942907e-03,1.054286405104215373e-04,-6.042840136683952194e-03,3.985322243469128882e-03,1.112025499658474744e-03,-1.470381295274177059e-03,1.946340696065842514e-03,-4.815288831262207539e-03,6.477558690620501556e-04,-6.751741857689458641e-04,5.827373439211736049e-04,-5.734941634143996556e-04,-1.504462436773299199e-03,-6.753409026504011156e-04,-4.019896062488661614e-04,-4.254757537928725505e-03,-6.298341749490363704e-03,-1.712322074051533197e-03,-5.918425031501410583e-03,1.773313457220863486e-03,-6.894433267177876336e-03,-7.150879734969568455e-03,4.665596068971103287e-03,7.043548166136967201e-04,2.134147357842364114e-03,-5.419462565982661670e-03,9.519709447278068429e-04,5.084528973486312012e-03,1.218309476171830366e-03,1.894987239094811111e-03,-3.051864632376451603e-03,3.060328896527908515e-03,3.921855758766601766e-03,3.998300400664836149e-03,-4.892818679872708645e-03,-3.192643254479162786e-03,-5.407091851039325971e-03,-1.541637681514482217e-03,2.569878716741949478e-03,4.941709423044873296e-03,3.731057969143207606e-04,1.810691959460806718e-03,-5.484197158840939217e-03,3.107492912500764079e-03,-1.672376246574564743e-03,-2.182194488439174671e-03,-1.040426804834064306e-03,-5.105633582659261163e-03,1.015207567899777640e-02,1.195669848207202534e-04,5.049613424930758654e-03,2.701885740036555263e-03,-1.319830147258133947e-02,2.552215957812143873e-03,-5.600040402928901755e-03,-3.060546752377098917e-03,9.270636700102778335e-03,9.638577158317127372e-04,3.681316135959130254e-03,-2.149340330709132406e-03,1.094232161974352004e-02,2.849800551439781848e-03,3.177591108853457941e-03,2.569950655327826825e-03,-6.367631368161496414e-03,4.065549265537544109e-03,-2.426962571055046522e-03 7.283093176434309902e-03,-4.532330104039567105e-03,1.854141194256151651e-03,1.846955729264321408e-03,-8.340883837246019367e-03,4.043769378680296331e-03,-9.812604170120072487e-04,3.924779823186311419e-03,1.992945544535106982e-03,-9.589914682824557043e-03,5.607548860812735722e-03,4.799617148504671180e-03,2.201812900932278798e-04,-8.138899400471346843e-03,1.081343088808165483e-03,9.622912812041903513e-04,-9.875859202302223437e-04,4.238313042343754000e-03,-4.173381427910760090e-03,-6.835265074855165560e-03,2.227084707497415089e-03,2.690806572869618510e-03,-8.743236204099863462e-03,-6.096199825548283442e-03,2.137142817840923317e-03,-1.994982746558965868e-03,6.631114623974492320e-03,7.228716673213770802e-03,-4.664817905980689697e-03,3.934742851665160891e-03,-4.143123686627768823e-03,-3.606645432897399254e-03,1.589599672991697313e-02,1.307622829271176939e-03,1.161806115069995669e-03,-5.237543199768200132e-03,5.545014462330642756e-04,5.961680329296400495e-03,-2.378656484293415092e-03,1.406994347364891144e-03,5.266435172279032920e-03,-9.515734224730549792e-03,-6.312452407786419729e-03,-3.995095651853729571e-03,-6.900835029139701197e-03,1.064546837848776364e-02,2.784390549223210616e-03,1.212476566818221357e-02,-5.875714284073871828e-03,1.884359929384307741e-03,-6.564983534204203372e-03,-5.429441921836435439e-03,4.399824214908572804e-03,4.001982904818797426e-03,-4.523083175578982568e-03,5.798450391094995063e-03,1.752989960200989836e-03,1.311025147509953205e-03,-6.381627198324830693e-04,2.796485136779167015e-03,-7.383987579611439415e-03,5.097248568386385237e-03,-6.568164721738076889e-03,2.404763836607993251e-03,1.128171433370519540e-02,-5.046140590008619324e-06,3.343137686049062109e-03,2.114235053779057929e-03,2.987377365365655093e-03,-5.789073075620538092e-03,1.581594776163718118e-03,-4.131249985357908147e-03,-6.182231318448752448e-03,-1.114889630209051422e-02,7.958700033747101443e-03,-1.637281514199390945e-03,9.338007580318113607e-03,-5.454807769198416652e-03,1.267836889025896136e-03,-1.048359070602470711e-03,-9.014784939161385457e-03,7.109840223314035750e-03,1.820164524701722809e-03,-2.954837422483401650e-03,-2.966768968309710028e-03,1.917335147717808951e-03,-1.069925186940311711e-02,2.867739990617623747e-03,-3.206750456084537723e-03,1.204510150628778330e-03,-3.696096944358086799e-03,2.391769347989623415e-03,-8.520358278618868006e-04,6.574309221061782997e-03,1.707355282976775069e-04,-3.352603321316421393e-03,-5.969283859751568257e-03,-5.011943272845480191e-03,-3.682793581456906239e-03,3.642582115177126195e-03,1.035467913841519151e-03,1.694010374563103107e-03,-1.423457202213134192e-03,2.629614688115413778e-03,-2.398016638161812124e-04,-3.211122202406730113e-03,7.806384436665436861e-03,-6.509772296444851780e-03,-1.810449623911308901e-03,-6.452482826012470064e-03,-3.338048962829665532e-03,3.334277303293119860e-03,1.003426526369338061e-03,7.187140034181277448e-03,4.331675384633321829e-03,-8.168816331560612917e-03,1.363777058000482500e-03,4.581977690902360595e-03,-5.674307314197411974e-03,-3.334382472645529803e-03,5.991991832932781238e-03,-7.081359096963224520e-03,5.771444732398480111e-03,-4.680550782043033965e-03,-1.048447544632787401e-02,2.569067558549460555e-03,-3.273604031962388736e-03,-3.651516194595896304e-03,-7.834203258540575754e-03,5.688252373275451759e-03,4.980764856967904129e-03,-1.527401139958821027e-02,-8.216658957147774181e-03,8.244048446565659799e-04,1.821991621406802769e-03,1.443950561703037866e-03,-5.135413173869657517e-03,7.111666020051246248e-03,-7.222917490698857988e-03,3.147332350737639100e-03,3.995719897919105396e-03,1.723076399001103730e-04,-5.939840394584442428e-03,2.617506491461167248e-03,8.122706142749019573e-03,6.971790154306179224e-03,-1.076970559754253403e-04,-3.715012857508935996e-03,2.258486323688472380e-03,1.042625247214591395e-03,-1.273231303718483365e-02,-2.032015403041743072e-03,2.201944737269646325e-03,-2.443630553654595281e-03,4.066296539396713552e-03,-5.135210434408677728e-03,4.748715586552647305e-03,4.578261485295528237e-05,1.228200537850572711e-02,-1.865265375898720853e-03,2.453236826095358918e-03,-2.353648691124213266e-03,-8.067428054398262122e-04,-2.020859609236789773e-03,-1.547636097639590444e-03,1.129120590318200522e-02,-3.026582813833718615e-03,1.049098071237572526e-03,-7.472360474528118995e-03,2.584249215517221091e-03,-2.138174438127066375e-03,-1.334853577013504896e-03,5.433852766627415495e-03,8.933141543681442226e-04,-7.073844595654687172e-03,-5.577917782328365229e-03,5.222069049027175738e-03,9.191386461468475585e-03,-1.933203762060144680e-03,-1.931821208850469053e-03,-2.651946592313735897e-03,6.021334841799281037e-03,-4.586611352585390558e-03,-2.195418142024510404e-03,-3.077083231370712942e-03,-7.217317127575708681e-03,-7.934090407983629065e-03,3.072421914434007775e-03,-1.188402579246253601e-02,-7.650121479741938194e-03,-8.758016702202593359e-03,9.864128335356405625e-04,-6.786519667889135028e-03,-7.989865003170084495e-03,5.501681123870248713e-03,2.204918853423975796e-04,6.146487710266204533e-04,5.160435999209827401e-03,2.334858881300770628e-03,-3.781038042599986493e-03,-9.387443487350749256e-04,-6.264883759541621175e-03,-5.085484656556042964e-04,-3.537709745246858575e-03,-1.469119810492703401e-03,-4.373003305724211878e-03,-1.406089761363244059e-03,2.050638572366613333e-03,6.810270465809893474e-03,-6.919271543567128216e-03,7.025955873483575795e-03,-2.469475935722432753e-03,9.417302483101509109e-03,3.384418410209813748e-03,8.309052447474204323e-03,2.758397307050514244e-04,1.472489098692082667e-04,6.003129304071817482e-03,-2.801944635203315517e-03,-1.403149399973409013e-03,-8.630788941132097949e-03,3.978696905791548853e-03,1.697669270513409157e-03,-3.383232689617347508e-03,-1.102846458249097513e-03,-7.757315732145916680e-03,4.323411293152914364e-03,-4.257469526498923289e-03,2.839508470360732270e-03,2.111824301662392107e-03,3.944384461088741188e-04,7.327294552589318742e-03,-4.550654865822066283e-03,3.771857896369957847e-03,-8.643909788138932557e-03,3.599431411443333988e-03,2.944355663232096042e-03,-1.304388174389196952e-03,-2.582361375281424024e-03,3.178458251551889995e-04,-5.993114071173545263e-03,4.353522718920550122e-03,9.181387676729322808e-03,2.943268033229992295e-03,-5.504349551393664629e-03,-2.028652189136205975e-03,3.610751047849922960e-03,-6.736738694316474034e-03,-2.448078555208948060e-03,3.532514629553229913e-03,-7.471993713693128591e-03,9.330094093551183224e-04,-6.041473924951654660e-03,-2.471902077118243862e-03,9.852440154116506085e-04,-1.275354663957128888e-03,3.700788946707257721e-03,9.076227487007801353e-03,-1.389237927529906116e-02,-2.758093989377100735e-03,6.461610826129401660e-04,-2.137909332781872274e-03,-5.595418793160288756e-03,-4.382539291869287819e-03,1.690226784056758424e-05,-1.348466865935420145e-03,6.195061901700262171e-03,-2.917612328075410138e-03,-8.567406194114345758e-03,-3.004235967316673007e-03,-7.465480354068930458e-03,2.090463487323672605e-03,-2.251620136099309957e-03,2.020887622285969572e-03,5.131878452744952079e-03,1.219236300506551345e-03,-2.753990103076028179e-03,2.445567320342020758e-03,-5.863228600836149773e-03,-5.711056918204024911e-03,-2.786053633436991802e-03,1.632893863982517752e-03,5.071940322151765809e-04,-9.598478902427433476e-04,1.026665161926450590e-02,-1.987719542228621149e-03,3.829271792951294447e-03,3.183581254723801945e-03,2.066725626474477488e-03,3.454602049142494087e-03,2.749399912899172516e-03,2.084347283330400657e-03,5.288618033265324661e-03,-6.348376708387779401e-04,-4.806255367481457319e-03,8.865222465618766493e-03,-7.264116568620537090e-03,1.423151689740414843e-03,-7.576279885395417572e-04,6.565279752333062421e-03,-1.525262911787730706e-03,8.034939692922474230e-03,-4.011950513092579079e-03,-4.368174742752323030e-03,8.352452130480000079e-04,-1.692499487463394172e-04,-9.296251707113905791e-03,-7.446331012652244313e-03,-1.551727823796834925e-03,-4.415907357176549787e-03,-2.842387826024765553e-03,-5.327141942024911066e-04,4.371449969692533426e-04,5.119218129021119341e-03,-7.232481567669626389e-03,8.472486375303863934e-03,-2.566289663155903698e-03,-2.219479324908993703e-03,5.030045226641291153e-03,3.543198495757206561e-03,1.822933183009541871e-03,5.714424912362338956e-05,9.954827211869196960e-03,5.184594646715280121e-03,-2.164379904648086172e-03,4.041642587129350150e-03,-5.089878691357149201e-04,1.750137526865287209e-03,-1.381243025471264388e-02,1.081014128022378916e-02,-8.696817165203585970e-03,-1.882010149564019956e-03,4.323773538798098524e-03,-5.709786823411253792e-03,1.091136962334079800e-02,-2.087771781031450490e-04,1.168723461515791366e-02,-7.093619891916408578e-03,1.613888468156461468e-03,-1.325648205102886298e-03,-1.798760993574311309e-03,-4.084438454269342759e-03,-4.917285026519275896e-03,2.598768017493567790e-03,-3.732847261718390958e-03,1.474387817685765024e-02,-1.061773079690602438e-02,-2.932976933065493692e-03,5.452426456820393813e-03,1.257103177607308284e-03,6.876016270818415856e-03,-1.427064405629318135e-03,-1.877025968027836757e-03,6.646875740936258967e-04,5.659365710131933694e-03,-4.071367664337224228e-04,2.654820232890838134e-03,2.628088344753043600e-03,8.364233062350462031e-03,8.950432825986159163e-03,-4.994892529337362913e-03,-2.407703624408201112e-03,3.113740761965161797e-03,2.835595057379805984e-03,5.352118136602401592e-03,-6.519243185071968254e-03,3.015527588035690938e-03,-4.344431843980457730e-03,3.032238753509465469e-04,-5.565849961795580128e-03,4.584798455802922487e-03,9.556780792930673341e-03,-6.217900898653087871e-03,-5.278162608660998643e-03,-5.111758709069541898e-03,-9.967111545424858776e-03,5.287502794767653079e-03,-3.760315569566184332e-03,1.914186554383261749e-03,-9.700959247073618827e-03,1.036117619594908827e-03,7.524138706023910869e-03,-2.863839866573742094e-04,-5.788211230049421924e-03,4.210773779586255046e-03,3.852249793665779229e-03,6.391167604018894478e-03,8.786384749732121819e-04,-4.452817001806745265e-03,3.535857329270100657e-03,2.055073032967373711e-04,2.331721558369401637e-03,-2.763891105141781223e-03,9.229496412221795121e-03,-9.080524219318118762e-03,3.851124861807205934e-03,-1.309193889061598115e-02,-1.051839327756494760e-02,-1.024365546027718003e-03,5.410545018025484211e-04 -5.671715594651090438e-03,-2.224617238483701884e-03,5.214367238930252453e-03,-3.623508800852558100e-04,-2.354250358978222588e-03,-8.000453886369001885e-03,1.075119824334692335e-03,-5.754840992850212417e-03,6.616058245882552348e-03,6.777068925123157575e-03,5.556370930419814473e-04,3.295610356088581397e-03,-5.395158429110659924e-03,2.298394973774017195e-03,6.774901188363502587e-04,6.041163631019695265e-03,4.877012329576473656e-04,-1.713561192277160557e-04,3.806116960469738044e-03,-6.395244540300722581e-04,-2.948160637162729170e-04,-5.650686708514054100e-03,-7.592761773038910617e-03,3.629169714255181707e-03,4.148116120373911662e-03,-3.307484632669655672e-03,2.728657218899838144e-03,-5.025138063535463523e-03,-1.950079004701208297e-04,-7.694510194794470376e-03,-6.356454595366426395e-03,4.979464595120565093e-03,-3.646178312970761639e-03,-7.481494735655588521e-03,-1.631298559550411857e-03,-5.031738044554156380e-03,-2.606376862620175035e-03,1.491352887108976337e-02,1.927448783567375923e-03,6.974330885584407828e-03,-6.780224676345103681e-03,1.588688975646590854e-02,-1.162873743084210688e-02,-6.281072925153213977e-03,-1.110706876168820589e-03,6.509736023069851926e-03,2.265300453026401588e-03,-2.336446685791426954e-03,3.494552327594468424e-04,-1.343440040845147575e-03,1.566819918715085021e-03,1.730163300569106770e-03,2.128475701416891915e-03,1.981508048497563918e-03,6.245574903341271191e-04,2.383069372418713736e-03,1.362104384754633368e-03,5.995421586131504353e-03,9.480715567543490305e-03,5.347694242483234071e-03,4.985793333539389333e-03,-6.688182304656055573e-03,1.938011633607497749e-03,-6.443220758775078730e-03,-5.198332172091902313e-04,-3.135657843232432258e-03,5.080468976503925774e-03,-6.734033703350674957e-03,4.943583132560151749e-03,3.805685532858630541e-03,-5.573765913098088949e-03,7.867241552322781772e-03,7.926901359036831601e-03,2.031734153878738628e-03,-1.483477118133211085e-03,1.523142435097499811e-03,9.769073965514905519e-03,1.376391111259341830e-03,1.020793841901828508e-02,-1.226434879359173356e-03,-8.691706698661146729e-03,2.378304334107464381e-04,4.217124359562667013e-03,-4.275362843107572905e-03,4.595924209039351188e-04,2.492606672660119954e-03,6.835870474764035344e-03,4.468690410246596231e-03,2.334707094216972867e-03,-4.241240780768615846e-03,-5.158158993152846782e-03,-1.203218070638636689e-03,4.660566159118941648e-03,-4.703156358147746489e-04,-7.896779530591603678e-04,2.536700685318324464e-03,5.493793578492047955e-03,7.868672517327737455e-04,3.179503552978250186e-03,-3.661651856695018453e-03,-1.026372268092875595e-03,6.545493293013328329e-03,5.346319049200362085e-03,-1.563285475412247959e-03,-6.243057063678656739e-03,-3.942544842497582632e-04,2.324826479431855650e-03,1.604737426617877920e-03,-3.060655652845658409e-03,-1.683254866723517089e-03,-2.332004808464134549e-03,2.317330603079614717e-03,4.754245320234072592e-03,4.666530865845842403e-03,-1.383360210912698693e-03,-1.396251253838214166e-03,-3.990509945753166476e-03,-2.481624713767996079e-03,-2.719581454843453261e-03,-1.471741282509073795e-03,3.975087612592114600e-03,1.346107609738710076e-03,-2.331721460712031600e-03,-3.056434293926643241e-03,3.597898147322685283e-03,9.796296653061021403e-03,5.505704416774891662e-03,-4.096717007924854069e-03,2.832146249100567226e-03,2.024503225787932601e-03,-4.752177332911640753e-03,-2.224777278823272895e-03,4.544882602884826352e-03,-5.932988340919305090e-03,-2.734745753192178129e-03,-1.307801553453454219e-03,2.380536387170964930e-03,7.950684576356411778e-03,-3.892362999825254612e-03,6.677823808581251699e-03,3.338848541206725865e-03,1.276638605981215979e-02,-2.149060591792330797e-03,4.616547962833195476e-03,1.249328589371332624e-03,-6.163968214105255798e-03,-3.757458574514709222e-03,-8.339394171330718683e-03,2.832725278701249925e-03,6.829485244998878546e-03,9.975817562934640413e-04,-9.677025152459318297e-04,-2.756919237755078248e-03,-2.721216201823244466e-04,6.323934684985379855e-03,1.193117100833891893e-02,3.898222116993920860e-03,2.123753109732495151e-03,-2.466821740027742328e-04,-2.024194801542021117e-03,-8.978786241687634201e-03,-4.625550726060167692e-03,-9.360906848333037933e-03,-5.373537302657897287e-03,-9.444304590239302466e-03,-1.713045388285289183e-03,-2.767011883322375524e-03,3.157377820759782461e-03,-6.887482037487186425e-03,3.496258584377234888e-03,3.137719632275964843e-03,-2.631550970244660084e-03,-2.650801182693776278e-03,3.107903763254085227e-04,-3.743387818837353755e-03,4.250466544237411701e-03,9.139603018691934769e-03,1.464458496290727127e-03,8.594882549311097996e-04,-4.873158441832936387e-03,4.944203970853217577e-03,-9.884992543095208126e-04,1.586154075556444340e-03,-3.207420006340626264e-04,-3.326932802114746911e-04,4.254191722010960616e-03,2.706527853439798671e-04,4.530305494200705628e-03,9.778517247789793049e-04,3.373882522835989703e-03,-1.114931142604459226e-02,9.003886978894575649e-03,8.020468313174591510e-04,1.887285462728895830e-03,2.694326664357376763e-03,-4.086344605668448121e-03,-8.175298138327891789e-03,9.591015981769845586e-03,-4.691214005149716565e-03,-1.100673988330037878e-02,3.972769333547895160e-03,6.560551589271899041e-03,3.437680852509212345e-03,1.089112405452199528e-03,-1.051183058797038397e-02,-9.477294820476975246e-03,-5.025894033753801338e-03,4.853357549000808339e-03,-1.214902999691231354e-02,2.368822982748720971e-03,-8.392333900446804010e-03,-6.309253291880629819e-03,9.349963538357328532e-04,9.311691317749016747e-03,4.845824799559127552e-03,-3.334570126660157865e-03,-4.380102683672734178e-03,-5.168834719934679335e-03,-8.590606379323107410e-04,4.727437386121326342e-04,6.976086623403619791e-03,-6.173837861654881722e-04,-4.441431721023326139e-03,7.283391675838292618e-03,-4.237648075049672315e-03,-3.117241206475797593e-03,9.367426815184730143e-04,-9.479213154032797807e-04,-2.803996227977557845e-03,-7.816477880045757411e-06,4.451330931358229125e-03,2.037913731002807035e-03,5.645017778235054962e-03,-1.566894314222723244e-03,2.017772652912293817e-03,-4.814959282500627766e-04,-7.951694112774134252e-03,-6.998295967687459757e-04,4.712735313252715448e-03,-2.636374862535415409e-03,-1.645290448021849644e-03,-7.604489430814923533e-03,2.984853465253001475e-03,-1.819098978418120290e-03,-9.337334939169713004e-04,-2.764466101768836451e-04,-5.092591519099702070e-03,1.127311992893669391e-03,-2.352660043595638292e-03,6.327890994510543439e-03,3.139473063030286983e-03,-9.463377949763510273e-04,-8.873907194123085984e-03,-1.353803801048651233e-03,-2.149301704732546787e-03,-1.866335284437408521e-03,5.922853973292436288e-04,3.764331459411626897e-03,1.953654477462357826e-03,6.468044810764886701e-03,6.407740235917206220e-03,4.086700174171281966e-03,-7.578139579898302350e-03,8.167899259937369538e-03,3.847085985122176630e-03,6.897521525381288161e-03,5.689307865182676588e-03,5.422457619621635569e-03,-3.809927809549877500e-03,-1.076330178498368989e-03,-3.828834131611188671e-03,1.047275437304908893e-02,8.057106921079165707e-03,-3.723519844558681024e-03,1.043074856164438562e-02,3.440130586326296119e-03,8.655756420808954216e-03,1.093565420484607711e-02,-6.463785319752205286e-03,-8.742430967696038571e-03,3.530680323660764439e-03,2.636448157493497414e-03,-1.203245728058679499e-02,5.171870058796720383e-03,-4.316836322873504085e-03,-7.609122619400271806e-03,-3.936422603296031372e-03,-1.405482604213511254e-03,-7.551647725349473934e-03,-5.423743796499249335e-03,2.538418703674878967e-03,-6.953221410361203232e-03,1.120032504990481521e-02,9.156229536040717126e-03,4.551741731552812892e-03,3.952696779984563383e-03,1.030237855043277667e-02,1.055334285956771416e-02,-6.031106360364801855e-03,1.083320517543148991e-03,4.559182457189268488e-04,-1.085122117130884849e-03,2.547562013881977342e-03,4.900612442636834674e-04,3.664107960346881226e-03,1.739019058702269676e-03,-1.344398664593966406e-04,1.134609626797597297e-02,6.513566930786945930e-03,6.160501005068736748e-03,5.198112134300523351e-03,-7.261599570756401868e-03,1.110548287160379073e-03,4.057880699333891832e-03,6.696257214797171967e-03,-1.161404407163130377e-02,3.386394569521472987e-03,2.609499601212254393e-03,-2.991265314579643047e-03,1.872032517822941680e-03,9.163425602540921019e-03,-8.745478119612273534e-04,1.070316349030908665e-02,6.944681005842397170e-03,-2.081141796024576877e-03,4.819325935470199739e-03,2.790599871881472346e-03,-9.761995583331691635e-04,6.964101026439560929e-03,5.906030788161938105e-03,-4.070974873731356326e-05,2.882605004483867160e-03,8.747174172961105429e-03,5.110798633134572277e-03,1.947038707205610712e-05,-4.383782830612582317e-03,-2.480056320849383968e-03,-1.485904176376389926e-02,-3.573660725946777939e-04,2.641107830489789095e-03,3.151468512370883719e-03,2.341445700668178150e-03,-1.032057736187092650e-02,-5.422862023960895415e-03,-2.338135087845019319e-03,-5.537566278206842904e-05,7.989197661437982476e-03,4.145155979085759709e-03,7.535869940976916773e-03,-3.857129330812924736e-03,-3.239609095179985052e-03,-1.472144596202681292e-03,-6.165447985400083694e-03,8.285312603720529703e-03,-8.174569296479717098e-04,2.270777387858382429e-03,-1.629935002687305564e-03,-2.473785746105727594e-03,-3.744999320495594197e-03,6.799640050210181523e-03,1.806606840125916415e-03,-7.967045633161014143e-04,-7.008394479548464562e-03,8.749446926170459035e-03,1.462467286742345860e-03,-7.971891671806059614e-04,7.845287114291599889e-03,-1.352251685064408690e-03,-8.867416707222994937e-03,2.777240977113629855e-03,1.178192767495567884e-04,-2.770247416227784431e-04,2.967595058028377631e-03,2.777650395653672149e-03,3.917558536187213357e-03,4.081132335742190353e-04,7.483142423170889524e-04,-3.854637181720556305e-03,-2.267897653880067334e-03,-5.230583817205628275e-03,3.902541397982350023e-03,1.008594355511324399e-03,1.424121669400363896e-03,1.678971888529482651e-03,-8.647316482862989553e-03,-1.088620261781122383e-03,1.092189579224037361e-03,9.104139307453767982e-03,-1.387186725053064350e-03,1.997233824029193139e-03,7.488501082837682732e-03,1.688684544799954907e-03,3.209527434447632596e-03,-8.497462381370658041e-03,2.263094795610853024e-03,-1.119428131389242588e-03,-2.181447657241580591e-03,-3.662341846105056191e-03,1.089215909711350669e-03,-9.057886475551408384e-03 -8.243078898602557852e-03,-8.722116645882647956e-03,-9.575369674766294513e-04,2.422593746887374225e-03,4.328276822801502104e-03,-6.477434645939749401e-03,3.412709472614926676e-03,1.165183496371505287e-03,5.336124386974168236e-03,-4.050778647410812131e-03,4.897682046776920218e-04,-6.469227894775953988e-03,5.060566109417749883e-03,7.806522994028334238e-03,-2.457767795314410274e-03,9.447487520915266723e-04,1.217254446950766190e-03,2.312609336180059623e-04,1.271741413469800559e-03,1.347760395512364109e-03,-7.224141186588064597e-06,5.828090945264591607e-03,-2.837392735329659788e-03,1.955516112516192135e-03,2.542053664070996390e-03,-9.308290247443392479e-03,-3.637219959652635047e-04,-1.958323164764560163e-03,9.720676432344129607e-04,-9.444226520562800829e-05,-2.346618495076750396e-03,2.262899919904930082e-03,6.226917764935950735e-03,5.272951844125370845e-03,-5.931649539881736315e-03,1.325183872132901231e-03,6.054255488636021976e-03,9.761093076131756698e-04,-4.117310651759553339e-03,5.723247185535954538e-03,-1.953541748941320410e-04,-4.034368651013839173e-03,-2.950773240901807468e-03,2.180383161915987709e-03,6.033627495621957278e-03,3.177061713947455530e-03,-8.040414096696632865e-05,6.392111678455664713e-03,-8.523780288062757574e-03,-7.841016995662428504e-03,-4.673029050120130687e-03,-1.610481985355854614e-02,1.133685120758779266e-02,-4.844851673282577972e-03,-3.445803265578262014e-05,-7.147995450290767216e-03,-1.290255110987030248e-03,-4.571440960977542732e-03,9.315014439710216088e-03,-1.284311495186403820e-03,-7.700616877392546975e-06,1.800648338627155141e-03,4.433673797185730577e-03,-1.219925023214675670e-02,3.111628421482282199e-03,4.080227457880726896e-03,-4.816758140180578927e-03,3.159317335303299477e-03,4.940415996195077035e-03,5.575303718129022597e-03,-1.012194101926440319e-02,-3.084896113428832465e-03,-4.216628848681101133e-03,-1.644562904015981234e-03,5.955241261212312204e-03,6.872948712734239284e-04,-1.957523376820459893e-03,4.424488844046924226e-03,-1.254663924665914195e-04,4.780272683074796719e-03,7.518345700599240143e-04,9.443226908955512297e-03,-1.333763517666746020e-03,-3.230152850424921479e-03,-5.309235048395354387e-03,4.763687007754638901e-03,6.157098649492321642e-03,-3.600247530475830251e-04,7.357061423806012662e-03,5.431112034772792377e-03,7.137501179962633426e-03,-8.039011086539432291e-03,-2.487933001431200542e-03,3.688873265418676686e-03,7.975486026786975796e-03,6.835493892220538548e-03,-1.748451338249972778e-03,6.707520641691178294e-03,-4.247658193799127430e-03,-8.771674496805879559e-03,-5.952649078857145601e-03,5.644384867639972575e-03,7.984254570720201657e-03,-1.313674096901186862e-03,3.747163946719542140e-03,4.191729927958366339e-03,-5.983500111667370116e-04,2.638153178620982874e-03,-3.931366239053676534e-03,3.977439926672986757e-03,3.650385716826937566e-03,-2.458987095557323491e-03,-2.588927178336904687e-03,6.863264872966720609e-03,8.361432939187016591e-03,-3.788280021762669130e-03,-6.981365938872854254e-03,-5.958297734474788700e-03,-7.700610312468806132e-03,1.404508162871772985e-03,6.043413390483230035e-03,2.917220080510312275e-03,-1.061913407338967510e-03,4.670647315884210275e-03,6.104864559196089384e-03,-1.420472855192071119e-03,6.624879124150519910e-03,4.985930388300310705e-03,5.549316330658984019e-03,-5.142706115294540768e-03,-5.361489968300166276e-03,2.718900334773560225e-03,2.364902329923932681e-03,-9.333817190487151555e-03,2.674100708435561951e-03,-2.572157666793299224e-04,9.066772198272331782e-04,1.109306085506566678e-03,1.663330479652830015e-03,-2.624638026854299051e-03,-2.572096152761366768e-03,-1.609421835027571596e-03,2.960418962293391923e-03,-3.753583605843075909e-04,2.877416030944171532e-03,6.960544972633772934e-03,-1.340945015778540871e-04,3.893077181324189600e-03,1.549603297070435400e-03,4.921757390851210466e-03,-3.063804525479786253e-03,2.066478003475365460e-03,6.960087882008024994e-03,-9.361232035783147859e-03,-7.181625311978507339e-04,-2.661722085473994682e-03,-7.595388583552221962e-03,8.971330072546010448e-03,-6.529314074166685418e-03,3.591966005852629401e-03,-1.923224662699499730e-03,-3.009714920076961807e-03,-3.425545238042657092e-03,-3.578850481949400700e-03,1.450312489650500527e-03,-4.223298136498730183e-03,-3.739358792090427021e-03,-9.691822852790146769e-04,2.056295766672029766e-03,-7.990032892864427569e-03,7.530386268409646963e-03,7.693356286976945832e-03,3.780334696179033470e-03,4.431915745750759569e-03,6.081725795079661984e-06,1.275052211321979273e-03,-9.056987956596719656e-04,2.206890308031276247e-03,1.355718563981471447e-03,2.710295657494752371e-03,-4.903695990634257366e-03,1.471356963347869411e-03,5.659304300234214644e-03,-7.424996146278155934e-03,-4.502528533401128387e-03,-1.224927190474970132e-02,-2.681032142542638143e-03,-2.519934863407593401e-03,5.359347046514920804e-03,-2.446778036574750320e-03,-3.000791916689082763e-03,-8.735135558445367848e-03,7.908256349662507942e-03,1.147176524989726356e-03,3.807894027653918687e-03,-6.861002645625395753e-03,-2.660847179173146844e-03,5.166649572079006283e-03,6.674783669307154943e-03,-5.189891370888631734e-03,8.618051182495805847e-04,-5.724135227862233409e-03,-2.049448153045390318e-03,-7.660609078281374737e-04,-1.829870374616686347e-03,-4.646876938388564207e-03,-6.309171967274751209e-05,4.526687240267120486e-04,-7.071504052430403926e-03,4.427475846174525574e-03,1.058222622047963642e-03,6.704978786938439218e-03,7.354791522086058910e-04,1.797415965443467970e-04,4.073128457849004014e-03,-3.264937555949726233e-03,3.227182874942260016e-03,-9.494603820148126393e-04,-1.526107883352963764e-03,-2.336769563642723032e-03,-1.933489679381224496e-03,1.254030506358576541e-03,6.526089509860843324e-03,9.958067793161904779e-04,7.545207225157620802e-03,-9.500881392109767482e-03,1.015812775713123561e-03,8.522572623824584831e-03,7.181794915193696026e-03,8.588097474468946169e-03,3.825140180657031746e-03,6.494745580334126468e-05,-4.905465331579899300e-03,-2.246293617906442493e-03,7.585073257239721613e-04,-1.332124623693065903e-04,7.530883005987638705e-03,6.952028190718204104e-03,-4.876542873464174456e-03,-3.659942431439887003e-05,1.128016543079945512e-03,1.371258965691090140e-02,-3.667223012836175969e-03,-4.965095577697162775e-03,1.511685829741956852e-02,-5.748472563459178751e-03,1.476376661243125957e-03,5.995895404091794965e-03,5.874074603050294125e-04,2.392883784698168585e-03,-1.455105784687986186e-03,2.941711270655902270e-03,5.250783165396966275e-03,-1.363526527024406859e-03,3.818132721472914087e-03,-3.452409070631622546e-03,-4.613585252084762695e-03,-1.220418054515387670e-03,-5.229758687278115645e-03,-5.905044381628103169e-03,3.773851514443877196e-03,3.311009966454823896e-03,1.038215192645149804e-02,1.726491285718057965e-04,-9.323302446908788080e-03,-5.439492589045433722e-03,-7.532857520397265652e-03,2.018635347217585373e-03,1.050892063284661468e-03,-1.052450870656448240e-02,6.613889620142995515e-03,-7.959861103856391519e-04,-8.176324155164173615e-03,7.040820408339651740e-03,9.075243307283981445e-03,-3.361795491148731815e-03,-3.528359411574687535e-03,-8.698299292844546837e-03,1.141467837808852852e-03,3.682438651905527074e-03,-1.860293839653535345e-04,-1.958070382492202964e-04,6.400637026812269238e-03,-3.247530049290993370e-03,-5.807959066553543482e-05,5.294640799704758774e-03,-3.818709975762187188e-03,5.833524754591351731e-03,-2.927411550423265549e-03,-3.194391780326855633e-03,-4.599313346098729701e-04,-1.109879825282889584e-03,1.899744205763337227e-03,-4.788019766865626685e-03,1.498352063325725193e-03,-8.460973418281718395e-03,-5.232100021952557772e-04,1.977807453117300888e-03,6.186388738718405153e-03,1.364397490548279954e-02,4.687610984097534406e-03,6.439395992563755680e-03,-1.075093445472449999e-04,-7.961564574795396143e-03,1.471403231860974985e-02,-1.476037494088378640e-03,-1.263451836195393977e-04,-1.610059613106638865e-03,-6.703277967849726035e-04,2.522685858697302271e-03,2.088301244467064086e-03,5.764379810200144201e-04,3.961594301425347929e-03,-6.689090781087511477e-03,5.401791270373184316e-03,-2.324580997168446909e-03,-8.521801703583966117e-03,4.718402473083947030e-03,-3.229927221169610896e-03,-2.878953652797220861e-03,2.318744261260357285e-03,2.238639906215444432e-03,-6.921863440142625142e-04,-7.411376397924713914e-03,8.542171073207599150e-03,3.277931935761684706e-03,1.424267863720409671e-02,-4.182381417079121988e-05,-4.416158455936913113e-03,1.248026765516784030e-03,7.400580969963443251e-03,1.422023153447267386e-03,-3.558646802277734642e-03,-1.938234443313582075e-03,5.636436904630853610e-03,-1.060174747427861238e-03,4.799748036659619950e-04,2.249212977626669722e-03,-2.124177564585863400e-03,2.558583900967820873e-03,-6.490793655077715073e-04,-3.733905864905795458e-03,-1.147470879385545329e-03,2.166620672742926215e-03,6.784210428971644601e-04,-9.565596399592347920e-03,-2.442942143355200570e-03,5.745466959196792595e-03,9.053581205878481796e-04,3.468639272875259638e-03,4.923622117781209159e-03,3.210106643809927576e-03,-1.579657230555852124e-03,5.450772706198448239e-03,-1.111866709575259427e-03,4.988108915992119606e-03,-3.961993059472770917e-03,-1.658812154432399120e-03,5.710071712309032661e-04,4.839714009018676599e-03,-2.150073194267024951e-03,2.184386985959791642e-03,-4.927301908950491513e-03,-6.188566618518170220e-03,-1.164450849018296499e-02,1.096649352356448811e-03,-2.518349469255152986e-03,1.035915228800620043e-03,5.343974109011333630e-04,3.548568391170708446e-03,8.570652538607882037e-03,4.751072813696854209e-04,-2.513730023450459500e-03,-1.079726881525863843e-03,9.818762990173765873e-03,6.844535141874500545e-03,2.164259596276890251e-03,2.144547161361117730e-03,-3.962576230743978384e-03,1.018106748820965914e-03,-3.899374645102410449e-03,2.353165195503736842e-03,8.296475664438538247e-03,8.895110267557786166e-03,8.506173118675217776e-03,-4.812572981977168186e-03,8.723799024592629762e-04,-8.402527359379026450e-03,-2.075109777534329351e-03,-3.468284487134048674e-03,-3.155171177044777293e-03,1.391001321784357840e-03,4.546796036193668372e-03,-5.972849388863360476e-03,-5.767446742933707467e-03,-3.733157345992662007e-03,-4.051411955707472784e-03,6.380925588554776061e-03,1.090994241174303263e-02,-1.278336181259697281e-03 -6.313265571283814721e-03,-4.019027427053747936e-03,-1.646320630861589042e-03,-1.122160300845410213e-02,-2.672566113980180246e-03,5.239642739083056125e-03,8.420479537978640613e-03,4.493820020674598251e-03,-1.203942369532525185e-03,-3.483473832162354168e-03,2.630297085225013001e-03,2.118990666734285686e-03,6.624996993450445355e-03,-2.763188233303314791e-03,-3.966348129193712768e-03,5.363849917235390466e-03,-2.655875416576423780e-03,-5.042864239742290457e-03,-1.794556418263341165e-03,-2.396632245795200051e-04,4.221110418187894175e-03,5.644510661306448990e-03,-5.213169371260465215e-03,6.097745438843584637e-03,-2.256822768306863351e-03,-1.169671052501228270e-02,-2.528323922115248163e-03,-1.667728264027873832e-03,8.684706083845419205e-05,2.554655249718994041e-03,-3.224469220338583957e-03,-1.387493901692564771e-02,1.628669078880640511e-02,1.005160314263389178e-02,2.281611512262803152e-03,-4.804598244814952367e-03,-6.703890682648232756e-03,-3.523744629696109586e-03,-5.404125469797859543e-03,5.620599987893971049e-03,-3.577207255998778816e-03,2.487627090611995839e-03,-4.484927867769907678e-03,-7.401786381644779407e-04,-1.646288751141443190e-03,1.356688033648199425e-05,1.168569105516663320e-02,-3.897914742824639192e-03,7.054682478983002186e-03,1.105611891032802496e-04,4.687797664300144898e-03,-8.135101899445114360e-03,1.551745754181946756e-04,-1.141446069100217342e-02,7.251167615055485044e-03,3.533622620929390522e-03,7.819048527174242078e-03,5.766684337313314652e-03,3.794061562903486431e-05,-3.877760608310733010e-03,2.228750704708035244e-03,-4.751256749020649033e-03,-7.231098958017374671e-04,-1.280102747724242915e-02,-2.593634841542461628e-03,-7.726056381626750409e-03,1.176692481298682999e-03,-2.033208125795894858e-03,-4.242326853880176891e-04,2.372980924816768099e-03,-5.068810907556156077e-03,-2.438809798508792624e-03,2.191292339583538872e-03,-8.895855674995773940e-03,5.246522189064563475e-03,7.478280803797691945e-03,-4.455320117367964875e-04,1.726782690727527532e-03,-3.243401124098882569e-03,-9.078061125046136756e-03,-1.048803353507296631e-03,3.278974870321041402e-03,6.183262128527322599e-03,-4.864843925762484672e-03,5.705348855229794812e-03,-4.250052566834828897e-03,-9.675788769230262223e-03,-4.039557064869586309e-03,-1.557132975767966778e-03,-8.961536812778820790e-03,3.931923105164952630e-03,3.619540765386244485e-03,7.405651899420666491e-03,7.908566502966812850e-03,-7.151480092536427295e-03,1.851861611296792593e-04,8.865291281117588379e-03,3.136639179235164564e-03,-9.718627667158983088e-03,2.537423687006741448e-03,-1.635984163383302076e-02,2.361530608666604237e-03,-2.214246348146223502e-04,-6.109468262702645071e-03,-5.085040066328661186e-03,7.782195166104529868e-03,3.242845853114417927e-03,-9.076476438143145017e-04,1.115579531375898367e-03,-2.984680971228505144e-03,6.279093309985356308e-03,-5.815500480179086953e-03,-2.212523581913695926e-04,-9.082834607926093008e-05,5.741611011229750400e-03,1.168783090764908454e-03,-2.219296957892112089e-04,4.163084352157263475e-03,9.423871621551905761e-03,-4.030192924710732615e-03,1.170881597513911748e-03,-7.103436777428504684e-03,-5.234654345551949944e-03,-1.368791978119125253e-03,-1.140725933222434645e-02,-7.958813069122097281e-03,-2.949493397225055880e-03,2.893968766963670326e-03,2.950458645233196503e-04,2.619952368489441262e-03,-3.327798389560783238e-03,7.269407895663789095e-03,-6.718585054410066738e-03,2.232896437657087927e-03,4.911628102360019246e-03,1.145469250825170156e-04,9.349877610571068892e-03,-2.546260767623608184e-03,-3.179276907326258374e-03,-2.262646947807738645e-03,-3.842699162853203346e-03,-3.887718222824691934e-03,5.104417611820456259e-03,-1.766628568725925417e-03,5.148215127344380956e-03,-1.983791221891165971e-03,-5.684897451386773833e-03,2.447021099712154944e-03,5.125786897098610038e-03,2.591918829640906018e-04,8.081974606329164409e-05,-6.182259744566276534e-03,2.597020903100065990e-04,-6.784858368705392771e-04,-2.394436006912766153e-03,9.174077125787381696e-04,9.092971885280569377e-03,-3.943194656611063824e-03,1.281829364562444206e-03,4.858945977286388031e-03,-2.700430246456224110e-03,1.998692778053060178e-03,-2.614465714471919840e-03,-5.622984759648479679e-04,2.859415305641073023e-04,-2.440006046714764641e-03,-8.711471218448507909e-04,-6.199707015892372969e-03,2.894959260648800620e-03,-9.192731044650462790e-04,5.794100068120046007e-03,-8.965003057715195938e-05,6.320947344266346704e-04,-1.235192962402359386e-03,6.275790931309531070e-04,-3.937604720528999243e-03,-8.170120026441757971e-03,9.101873318653325422e-04,-9.697688338408960457e-04,4.608873227517880714e-03,-8.759784020990185995e-04,1.977296342198271370e-03,-2.848029603774470276e-03,2.126242992558221787e-04,-1.723234960694924195e-04,1.227234078847092014e-02,-3.506458939207898143e-03,-3.373416088779178628e-03,4.341569581313957892e-03,1.003308081140299318e-02,1.233682065566664562e-03,-3.620047570481438499e-03,-7.907598503872683218e-03,1.379591155179792734e-03,-5.005189813111171442e-03,-2.484019681546226210e-03,4.383960651258157759e-03,5.220929183056013077e-03,5.861649879015611193e-03,2.728883574831669767e-03,1.023290264271534654e-02,-1.067557590969432646e-02,-2.756563675634914404e-03,-3.452235864887070061e-03,-1.372084136578082371e-03,-3.399966920573942088e-03,-4.645534291578547431e-04,-2.853150264315090149e-04,9.590717528521960911e-03,-1.118788443623429385e-02,1.962836217922297316e-03,-8.066023996048584180e-03,3.476128237978702161e-04,-4.355212258851479112e-03,1.451160096004665647e-04,-1.501021529498238610e-03,-1.060060150078362406e-02,1.646319657793828559e-03,4.999552879797185614e-03,4.486995723895136241e-03,4.243155389927234789e-03,-4.093110456593001753e-03,-2.811894949144733841e-03,9.926885649570171832e-04,5.522619246768094769e-04,-8.233059980745466647e-03,-1.329254317500469258e-03,8.815206667010976743e-03,4.084659124977977002e-04,-2.479756982676931641e-03,-3.713615318733698694e-03,-8.851730043108253006e-04,6.221827935049081435e-04,2.609422009961092845e-03,-4.947632116966200165e-03,1.838207536233195773e-03,3.131611343116372844e-03,7.064631758606132808e-03,-8.701592597355476896e-03,-2.078859839671054650e-03,2.568604328010720087e-03,-7.717465993995896618e-03,9.403647817601358582e-04,6.580055002069488046e-04,-6.122956743895309646e-03,-1.822356939594787844e-03,-1.541007796043881474e-03,-3.251523761529730855e-03,-3.784037444142219174e-03,-1.712174598083145193e-03,1.509039533007156637e-03,-1.353723737951866317e-03,-2.692002062832616043e-03,-3.462894411531721242e-03,7.404892053604500771e-03,4.497001623931062668e-04,-3.808523171680300041e-03,1.385352760524851930e-03,-7.779164235894110539e-03,-1.072933308211049958e-02,4.613330703539995047e-03,-6.535068439891919545e-03,5.769538903366203510e-04,2.541301935715062441e-04,2.396618171512538645e-03,-4.162521454549389203e-03,4.453534461040652148e-03,-3.691292412981671256e-05,-1.254496227265484466e-02,1.096346564118973860e-04,-3.610058611255376367e-03,-7.926205620858618392e-03,-1.223548596719826846e-03,4.454275592694983098e-03,1.034191116608294478e-02,1.689646072385732984e-03,-5.151657570864819416e-03,3.974409907329637566e-03,-1.298521951617367769e-03,1.043534740378732929e-02,-7.439915591541608518e-04,3.475320402380604155e-03,-1.205430257049730849e-02,4.404333493659044817e-04,-2.778469197428822413e-03,-1.994514630923850240e-03,-7.167174689833454362e-03,3.999586229155787515e-03,-3.694220556876342577e-03,-3.286397096130941244e-03,-9.798081562348394998e-04,-9.412826271651075771e-03,1.983927170362839796e-04,-3.915238392554215184e-04,-9.596455146961732977e-03,-3.980874506006596564e-03,3.972052656486325592e-03,8.692782828575456103e-03,2.998436289702931072e-03,-5.746461939274244120e-04,-6.347344905568401939e-03,1.836464259246469615e-04,-4.873966928374357833e-03,1.043137744075864045e-03,-3.545530455361297576e-03,-8.825360558822397186e-03,8.244371136205335543e-03,-1.916919912432615792e-03,-1.254954750571126917e-02,-1.141226224112181029e-03,5.463672168798746828e-03,-1.647012002710877094e-03,1.202181324887677933e-04,1.288873605871385672e-03,-3.341409213723445996e-04,-3.527441507339569744e-03,5.998108606479269408e-04,1.639305195058606609e-03,-4.351435283536541319e-03,-3.506995474758705170e-03,-7.961458709669624531e-03,1.855327431779221466e-03,5.679819287314970440e-03,4.072833104029604661e-05,9.631122995252389823e-03,-7.397732562023232386e-03,9.711518443342016457e-03,-9.407527076631092043e-04,4.851028773675915284e-03,7.303484832697408266e-03,5.051765354457739797e-03,-1.070620119541367248e-03,-3.533029083288206224e-03,-8.137187909445125697e-03,-9.233306942356916611e-03,-1.739654822383474020e-03,-7.500000106392453597e-03,1.019512385281651830e-03,-5.113210915669070926e-03,1.736934203952126112e-03,6.241313665193684058e-03,-7.070958804103378952e-03,-3.073790350309912878e-03,-3.668323420166943582e-04,-3.991010499103265358e-03,1.015722548260308659e-02,4.174511957505904099e-03,-1.634837183998451120e-03,9.856212309886111609e-04,-7.711386075000467837e-04,-2.670411616955343723e-03,-1.572771059257642781e-03,2.733574825524205318e-03,-5.584621049229758841e-03,7.763285072304469464e-04,6.311372873865830381e-03,-4.657150773659431316e-03,-4.489611622180447910e-03,-2.391627884765417083e-03,1.224204198340925285e-03,8.107569501297276493e-03,1.016223302097908621e-02,-3.624101198251645801e-03,-7.295614053786159299e-03,5.518258067265632144e-03,-3.323668056599988260e-03,-2.507479130752933287e-04,3.422109895154362095e-03,1.562882207602294109e-03,3.719979941965589291e-03,2.352157450159149225e-03,3.585083449467331169e-03,-6.192498526823843287e-04,-1.707242880355303938e-03,7.706351496284691467e-03,8.809648645955899665e-03,-3.832238942042285289e-03,3.615494652805274593e-03,5.384639526214720327e-03,5.467484516091771635e-03,-2.558969747920420822e-03,-3.337707896338054647e-03,7.394338346266904121e-04,-5.062872055510576549e-03,-5.142397137246518967e-05,3.337153596190757174e-03,1.762000680720124477e-03,-1.665592717310475368e-03,-3.145856402824049550e-03,2.961262150390965835e-03,5.162113666416596677e-03,2.078846203081694769e-03,-3.290777040842006798e-03,-6.439578389842879821e-03,-4.114446329897957812e-03,1.423106160762950198e-03,-2.828478584958144578e-03,3.536979675859738392e-04,3.015719747776925922e-03,1.113910406774792743e-03 -1.359050098219034353e-03,-5.667671495319096496e-03,3.114185107836844380e-05,-8.051753443356193807e-03,-6.999113895984169186e-03,-3.729372509911784567e-03,2.468737234567897586e-03,4.921858895257433383e-04,8.163113456709444188e-03,6.934345500817002848e-04,-1.853525015817365611e-03,3.145780411070019968e-03,-3.736258512516181952e-03,-1.433319616963384634e-03,5.977528012560803274e-03,-5.589888056204849257e-03,5.800450158310431134e-03,4.066203311477087183e-03,-2.006167358380338533e-03,-1.523346923289935467e-03,4.465578844338389565e-03,5.771947576312598700e-03,9.158763742456509788e-03,2.078608010942084496e-03,2.017653811050235547e-03,2.770683109758406704e-03,5.726955181937948584e-04,2.296347129601392065e-05,-1.920306164796477559e-03,1.706804551555145873e-03,-2.114131829832226368e-03,-2.815397666624355841e-03,3.732693355519594702e-03,-3.921763319608998974e-03,-1.096912717861313162e-02,3.596423950733677617e-03,-7.594005039697549527e-04,-1.082577386180198602e-02,-2.297739334572338182e-03,5.695695816798537411e-03,1.843208771176033448e-03,9.990569777554910574e-04,-3.755892715925458498e-03,-4.780036146557312821e-03,2.116518108368842938e-03,3.343362449606177272e-03,7.967672780706883118e-03,-2.296324782392395857e-03,1.384219830120673273e-03,-3.061195217176351183e-03,3.253284119924563719e-03,6.727761315496028245e-03,5.138156237906743411e-03,-4.320846271978791417e-04,1.020903536185551429e-02,-2.370679527102257777e-03,-2.682906781845652224e-04,7.753171802235259667e-03,3.329582160050643946e-03,-5.786253864759484389e-03,4.096810747360812334e-03,-7.638211133591758968e-03,4.632459227840208735e-03,-7.634573216179754186e-03,-1.043229898573324807e-03,6.743946346455978284e-03,-3.337818423826658439e-03,5.851673047102903119e-03,5.403187030726774691e-03,3.780114806038343492e-03,1.141888225699395215e-02,-5.141572646614690680e-04,2.509642476279396427e-03,-7.008378142919500023e-03,3.536287846486591262e-03,-1.346322648406450767e-03,-2.628521201226024107e-03,5.124695628693295632e-03,-1.446517373992368353e-03,5.144982043917870410e-03,3.142413947385531720e-03,-7.439862917166200186e-04,7.418746744977837600e-03,-1.883650035825755291e-03,9.009851448642904655e-04,9.412159869424957179e-03,-4.327011879656131321e-03,-2.321830929988133365e-03,8.948212948266835373e-03,-7.367731368801383289e-04,-8.191672664101770640e-03,-2.270537920391528172e-03,7.007617778644495941e-03,2.650433550123079251e-03,1.918735028215710773e-03,-7.405298899755499535e-03,-4.361575200854661538e-05,1.293148358684804555e-03,-7.150636311789214383e-03,2.649219237411137157e-03,-7.056016960906783854e-03,3.929957783381024528e-03,-5.907409788807512982e-03,5.524660857671577670e-03,-6.479536007040660739e-03,-1.137431861032296623e-03,-2.753232643340945609e-03,3.546418148117560717e-03,2.835944032990412884e-03,-4.838399013568642909e-03,7.120902714937871192e-03,2.870622257306460184e-03,-5.603250407240170129e-03,2.002249154853331763e-03,2.793755830115695563e-03,4.788197830067726472e-03,-4.280195897863643337e-03,-1.109936441669689055e-03,4.614199504771517371e-03,-6.072419647062538874e-03,7.231710150827428372e-03,1.075454680804393322e-03,4.462754853650179390e-03,7.874169867842603643e-03,-2.882905257631535739e-03,-7.616253382406686957e-03,-2.961093573156943926e-03,1.306264590325696652e-03,7.286155862294799689e-03,2.408887581723641660e-03,-3.465534921666407623e-04,3.098919854502790901e-03,-7.458797230249347883e-03,-4.670223726746898160e-03,1.140173992940063039e-02,2.841915183670888374e-03,2.445985948940462146e-03,-4.875502013657185933e-03,-1.118205936438831927e-02,1.218544351803542902e-03,-4.305808391489047288e-03,-2.745291107536198660e-03,5.936496071380141688e-03,-4.802674184633480212e-03,-3.150621315014110000e-03,8.974102915935682628e-03,1.025708547337448977e-02,1.442674010326275768e-03,6.894399241841766938e-03,-7.242850548846923968e-03,7.700574750530818878e-03,5.269482113880816047e-03,-1.842219408810008606e-03,-9.926182663858753100e-03,-2.558291015627874726e-03,3.795720236373336092e-03,-3.595556908862099482e-03,-5.152688849272643416e-03,-5.893832899246918573e-04,6.046330126475798518e-03,3.508877172516167457e-04,1.020001706926321021e-02,1.672281206145378128e-04,1.568608566576766387e-03,3.950732887938725353e-03,-2.371048766779427878e-03,-2.918765760751809019e-03,5.576857382552437596e-04,-3.430882780776978343e-03,1.786899993208441485e-03,5.793366440967029699e-03,6.940522029543173647e-03,3.417000781901491802e-03,-7.794546344556729789e-03,-2.317899820842673347e-03,-8.399245186856488243e-03,-5.510371728197623964e-04,3.308203672016073009e-03,-5.355631088086313871e-03,1.942757080325318046e-03,1.610180368485923316e-03,5.319141234769292868e-03,-4.265496016249293275e-03,7.552205887787794437e-04,6.311653125425539280e-03,2.492536813736834483e-03,-1.381744656972605295e-03,1.182877497655618858e-02,5.019152881185368629e-04,-4.062736061373537280e-03,1.144457573272655241e-03,-4.825391623220065887e-03,2.751066485651663777e-03,3.544754938165499934e-03,-6.148585320861575838e-03,5.391076415306810497e-04,6.227460267967542019e-03,4.662279879729776949e-03,-3.846529637956910830e-03,2.600577179342414367e-03,-9.674096502846897885e-03,-3.695625169628253843e-03,6.232747849395339074e-03,2.718217087728246745e-03,1.113460652353120669e-03,-4.622332990797433511e-03,-1.047374203242777647e-03,2.685108094775381406e-03,-3.957795898279025011e-03,3.837995867385414223e-03,-1.313922235216640963e-02,-9.778140084409278881e-04,-3.108386221907810144e-03,-3.172435333347353986e-03,-1.756005541853404963e-03,3.245068350019474600e-03,7.107700086414199363e-03,6.442181808805354888e-04,6.698352776374678387e-03,7.757601823926202692e-03,-4.649930907442165716e-03,7.196122504853249592e-03,-2.178829431749826177e-03,4.640522404829537303e-03,1.825526482481530915e-03,7.032388140889427389e-03,-8.347802208150354245e-03,9.162758268779006010e-03,-3.985367334535812323e-03,1.047186557774333615e-02,-2.562020672849578927e-03,-5.422609720058957881e-03,-4.000400747068645971e-03,-3.728123990936045747e-03,-3.060973538137057304e-03,-7.475973354825127086e-03,4.191037655192461804e-03,-5.274715153925961519e-03,-6.034472119300192047e-03,-3.833325432012953896e-03,2.992837801704261065e-03,-9.091292133125738681e-03,-3.270496535963509158e-03,-3.992646117883124788e-04,-4.808544049948744459e-03,-2.343127047253416428e-03,-1.263466736474866924e-02,-2.174515056295195630e-03,6.764128124413776484e-03,-3.864317518889812553e-03,-1.972016455668207206e-03,6.280292489705736718e-03,-7.287553958759163640e-03,-4.665771695646286216e-03,5.804187085187394179e-03,5.942941324463075527e-03,2.493881751828449199e-03,7.875933017920203408e-03,-5.821718907797254067e-03,-6.929141157757552887e-04,1.670840752398257509e-04,3.527789583492557581e-03,-1.913636049113109439e-03,-8.532278874770090477e-03,-8.418216176420410465e-03,1.622728413113024406e-03,4.119959372728773488e-03,4.576445673605397321e-04,-9.497269132745343604e-04,7.869719058283684149e-03,4.982527120515308286e-03,3.784388902056772777e-04,-5.322071977658001697e-03,-8.288634048407458182e-03,-3.728558748417255125e-03,7.750648756662836293e-03,-2.398683635657873972e-03,-6.854606645691500768e-04,-3.413735110606218835e-03,-4.097661353423457872e-03,-7.783709133209144003e-03,-2.172959544471414050e-03,1.221493677294693348e-03,-6.111686149932488155e-03,-2.577512749809448590e-03,5.259696091744901075e-03,9.764112349334681112e-03,2.247063748500081459e-03,-2.422311569392143153e-03,-7.833519309011067328e-03,4.089602922238331183e-03,1.287431896019107340e-03,3.976592703907311059e-03,-2.703488797877719608e-03,4.495934377557936346e-03,2.975234882471040898e-03,6.333408453491670004e-03,-2.493076274309913212e-03,3.120639973397454359e-03,5.033776841992556846e-04,-3.658174315761238918e-03,4.333440440153695861e-04,-4.372173114436183651e-03,4.682491920634287118e-03,-1.015473998462092242e-02,1.672273060837950465e-02,6.561436530533272179e-03,-6.369885815737922133e-03,-6.238179332792360456e-03,7.555360308073865577e-03,7.377527861380919760e-03,-6.040756659640802474e-03,-7.831772868811122737e-03,1.676433529324828503e-03,2.847530921546813712e-03,-4.815648886821359033e-03,-5.506141912024776000e-03,-2.060075331321095313e-03,-2.305921173112528925e-03,-4.064059821047068949e-03,2.381543664715425786e-03,-6.236486346587194344e-04,1.737496422044006129e-04,-4.663708334646795434e-03,-6.806643428754695184e-03,2.676776678937806651e-03,-3.086536406707828407e-03,3.945433990548829518e-03,1.207146017330159747e-03,4.395695337640005175e-03,5.040092283008022755e-03,-3.575067056211447304e-03,3.426521890240737979e-03,-5.615355539300695940e-03,-1.564667708142850186e-03,2.175053158959903542e-03,-5.635881384534688083e-04,1.153116919582439258e-04,1.124994293754751420e-02,-7.118329925191511573e-03,2.585762334571111831e-03,-1.177047539877203999e-03,2.012397011057587542e-03,-4.805282767437591904e-03,-1.987001234809773146e-03,6.936352490558104350e-03,-7.642334027089909936e-03,-4.188008498609460152e-03,5.496776040938361300e-03,2.469616197178241615e-03,2.057464723076381273e-04,-6.706083317016691379e-03,-2.238037526782616672e-03,7.242205157467873633e-03,-6.770652576002105655e-03,1.093075686701757039e-02,6.122675608518305355e-03,-1.569175693977608796e-03,2.201073349697242523e-03,5.343192278047582434e-04,7.039703427771766379e-03,3.299658288588673376e-03,-7.334834037144703257e-03,4.817437039509151961e-03,-5.633919588217373500e-03,5.817696598447659288e-03,-5.260853742737338877e-04,1.700723377205982934e-03,1.289387802409372105e-02,-4.431345474660523288e-03,-1.077145307931840455e-02,3.797979290448919628e-03,-8.750331442471562943e-03,-2.091162344584238022e-03,2.393176506552427505e-04,-5.954777824615611111e-04,1.641873432964903587e-03,-7.742263619301690755e-03,3.446660039364317623e-03,-3.479845027866438831e-03,-4.459476557829600632e-03,-1.630843550058539392e-03,-4.797547470219815438e-03,-4.430803536041317756e-03,8.531910735679984134e-03,2.207962589222669733e-03,-1.957763748668407602e-03,1.767913832283822019e-03,-3.662388074725783649e-03,2.664271630798685161e-03,2.674874297784009952e-03,-3.701051544631339067e-03,4.141999360412980218e-03,2.535859846813753350e-03,-6.718032889572971358e-03,3.339569208363935612e-03,-1.594460785585130169e-03,-1.046056664277748112e-03,-5.836116050955986526e-03,-1.002756046764335506e-03 -5.604824600441881549e-03,-3.669512653573464220e-03,-7.543371174268146892e-03,-3.034945372051321873e-03,-7.557850203095559322e-04,-4.030715037378175894e-03,3.209779071490532630e-04,8.376618664798433922e-03,-7.559590672768314729e-03,1.227130856371601341e-03,8.713504893722513051e-03,-2.318122426888146945e-03,-8.254897033008494131e-03,6.800287802372011301e-05,4.008548459871990981e-03,-3.090616762418263094e-03,7.017583976591294792e-03,1.334519748003049414e-03,-4.080595588101093227e-03,2.541663006158007259e-03,3.181068680765813950e-03,8.009602979352636529e-03,4.588238770332151708e-04,8.468237691336075179e-04,-3.667208760033957571e-03,3.099284997714351567e-03,5.807871034390201881e-04,-9.486243038319465979e-04,-2.363924838863168051e-03,6.130230279715218403e-03,-3.419402948503505110e-03,6.138142384463100322e-03,4.449628480510814908e-03,-2.726691435615773325e-03,1.012195985425333455e-02,2.089376670565603472e-03,-1.720943401994031764e-03,-1.190387097797807048e-02,6.718509535140880506e-03,8.555517965188468054e-05,-2.956026601840110353e-03,7.895341830490083986e-04,-6.345292618171381044e-03,4.360307342735799709e-03,1.144713470828601911e-03,2.207578100780956413e-04,1.474392107498550923e-02,6.741263873284226125e-03,-2.570182379592527398e-03,-2.905160735942903354e-03,-3.272249242741709623e-04,4.725906159269707869e-03,-4.984535788853901479e-03,-2.141805449370894326e-03,5.210014314788512756e-03,8.659693413244278631e-03,-2.163647023569560679e-03,-1.988715660398134022e-03,6.936783679444434927e-03,7.332108157950069198e-03,2.927991069463911958e-04,2.624539992759104476e-03,-2.159578892426917193e-03,-1.061249038902759879e-02,4.869293866357498994e-03,-3.902155427717698532e-03,-4.979785338053606403e-03,-6.571775071208409147e-03,3.247889100247829466e-03,-2.895134738669169396e-03,3.594882022862798659e-03,-4.641019501785787968e-03,-1.421908287762760608e-03,-6.536828994000915059e-05,1.344504689436088949e-05,3.755834191943890898e-03,-2.854351803160117169e-03,4.504518371708990734e-03,-4.876146783167850489e-03,1.063700341608049462e-03,-1.280182307247118090e-03,-2.914843021600501197e-03,-1.131788061514381327e-02,-1.721260889833443322e-03,-9.604026117598214686e-04,-5.658893635362103117e-03,-8.766399423550787565e-04,-2.671385993678924813e-03,-5.296428259829463227e-03,-6.145609676202833755e-04,1.098736809803209716e-02,-1.625107111486275057e-03,-1.996293146063041115e-03,-6.424405116313540616e-03,2.413268132224473655e-03,-2.826247383082520614e-03,5.943763874148784085e-03,5.459633482647211454e-03,5.805422439644255216e-03,3.409919360583436995e-04,-9.485063713023470303e-03,-5.276899682267930346e-03,-8.683433117995617567e-03,1.999157445850616838e-04,2.335488610652725039e-04,3.769581365928473867e-03,-2.948506129105524492e-03,9.701887608597144002e-04,4.301730764393277978e-03,8.280450664276003958e-03,8.132509396737156587e-03,4.465686653277648951e-03,-1.666443582913781354e-03,-9.999362833915384149e-04,4.356733930839238808e-03,-2.713662943338937712e-03,3.601419588478123671e-03,-6.566143800202859683e-03,1.036190158343238896e-02,1.194657811254556945e-02,-1.762374594260222231e-04,-2.244068031720965369e-03,3.737097649154458908e-03,4.972028914365503316e-03,-2.843140169444877911e-03,-6.633316442126947245e-03,-3.869608333485519656e-03,-1.842513657998473900e-03,2.980609673229681587e-03,7.031680650918786107e-03,3.525578116765704876e-03,1.046532506968000459e-04,-3.507421490783910190e-03,-8.338051746717776991e-03,3.989294579293822980e-03,1.009994466227135020e-02,-4.011464384934877551e-03,5.300313636548949691e-04,2.223506312275391403e-03,7.906172616655320062e-03,3.794453416034153891e-03,-3.913665948836514766e-03,2.951293199402927562e-03,4.700023547958829574e-03,1.183753137320353718e-03,-3.915423555330492002e-03,-7.689605715860880521e-03,-9.601817546197837622e-05,1.347316283907625369e-03,1.903797998937418502e-03,-2.604421539410743062e-03,-9.056075007652886156e-04,-5.902815018188576138e-03,-7.036131177216823837e-03,1.855366721114941851e-03,-3.548260042316219234e-03,-2.659324335055738165e-03,-3.928965896875512309e-04,-1.257473383100126763e-03,2.008509972764888074e-03,1.879192810159676990e-03,4.325376419878320181e-04,-7.972012697049829424e-04,9.832499868323190881e-03,-1.007821781029256264e-02,-7.367035817566838091e-03,2.670822694473146304e-03,3.990913816078592627e-04,4.277135688771938407e-03,2.880128845408776433e-03,5.350216234969630211e-03,-6.998066524872137775e-03,-2.195843480874478098e-03,-4.641286350931994945e-03,2.887225788228937949e-04,3.633078402260493667e-03,1.294212970842724235e-02,-2.905618364549914564e-03,5.683847942087022660e-03,7.135509943721910463e-03,-1.125631369140041001e-03,-5.677976495566016552e-03,-3.330721453738874627e-03,-2.367216098213116137e-03,4.348322523885245032e-03,5.675462270089044487e-04,4.383921405420945772e-03,1.972495769813463727e-06,5.970724744533747325e-03,-8.948476760785348261e-03,-1.316991374008056074e-03,3.826945364703762849e-03,7.903604164090732159e-03,-3.266242412685740979e-03,-4.776309543586340503e-04,3.190693056820854967e-03,-2.926404733219176418e-03,1.305287574107632902e-02,-3.493668563056403706e-03,3.661104779392017599e-03,-3.852031633110248373e-03,-3.184140819212349233e-03,2.330839496319916070e-03,-6.793306143021728832e-03,-2.941357074863492054e-03,1.264112857033613049e-02,-6.103140175823166744e-03,-1.058403446003322070e-02,7.758840993038979093e-04,-7.470779096019721129e-03,-1.187160487540420698e-04,-5.475501162391960795e-03,-1.778333461815109155e-03,-6.701327486415032363e-03,4.943525772586407005e-03,2.624056366542732843e-03,9.024263436247806358e-03,1.124470374290486218e-03,2.319567895312665828e-03,1.484238201018763187e-03,6.787706787014164030e-03,-1.386016452024320595e-04,-2.209316203965238593e-03,4.944237404072186932e-03,1.634333354351556310e-03,1.199186924785603826e-02,-8.552077939242858778e-03,-4.717401523696097745e-04,4.776050328146676932e-03,5.376521438346400618e-04,-3.999753434840859490e-03,-2.333351844161672857e-03,1.206833590899630262e-03,-9.811500097509534210e-03,4.592331956122298908e-04,5.414935534993395132e-03,2.501375026069902006e-03,2.208588303142489887e-03,-7.392118875389466250e-03,-3.245037866956951528e-04,4.679780068860611361e-03,6.113743868292169110e-04,2.508225581004518579e-03,-4.627059443517119071e-03,-1.848489332746031718e-03,1.341937886635815843e-03,3.095945052014094649e-04,4.845627253141410531e-03,-5.847029055402045913e-03,6.674675973676339131e-04,-1.064041226211748063e-03,-2.046182135436553588e-03,6.760792704516520486e-03,-9.161877742014223197e-04,2.673402730307775370e-04,1.466347381220700196e-03,1.463556082794181638e-03,-9.039109721191347993e-03,3.241308698143998378e-03,-2.150092041274570716e-03,1.172801163073049907e-02,-1.154303966203287270e-02,7.436157306154271736e-03,3.109650770238601893e-03,-6.580691042304647256e-03,4.694811110271817516e-04,-5.177315224241334744e-03,5.482657820827118236e-03,7.695482083754346357e-04,1.952248674855479402e-03,1.618622176180879680e-03,-5.733181802011119363e-03,-4.311362705167826048e-03,5.480940731428741476e-03,4.374347217215586447e-03,-5.760142615857147610e-03,-3.815391331207382709e-03,3.824920324288619278e-03,1.595990086999135799e-03,-1.742552313097311663e-04,1.258675236564973671e-03,5.314350813683611612e-03,-1.586548704458173994e-03,1.977482872277111677e-03,4.557288438084246447e-03,-2.353136067124446764e-04,1.500812682465429461e-03,-2.691557694347435688e-03,-9.680663185005296614e-03,7.874793383514509804e-03,-1.491074961275414684e-04,5.869045613467489949e-04,5.607211197404442585e-03,-9.164209503901142584e-03,-9.832366713471787228e-03,-2.005933667684049650e-03,-9.670418203635110363e-04,-5.864678683677788766e-03,-4.668202538426707501e-03,4.431140875371107277e-03,2.248653640934821906e-03,1.360894810247580914e-03,-3.924148790363291874e-03,-5.385083378284067843e-03,1.138709374310654422e-02,-1.192594103326247677e-02,-9.674604512386568564e-04,-6.085694056918098706e-03,1.338944854793253656e-03,4.454130339439697425e-03,1.115713359550445788e-03,1.282335487224868639e-03,-5.261578791318676646e-03,6.801368091530266349e-03,-2.498617901344891371e-03,-2.007933425658229958e-03,7.075667446519082887e-03,6.478342737112579089e-03,6.046213395372273917e-03,-3.477531194411622069e-03,-1.022303431471114146e-02,-4.947943359252492124e-03,-2.565234479594207667e-03,-3.698493612768421687e-03,-2.342342429167080641e-03,-5.037667228672821143e-03,3.765610141629958593e-03,-6.716401645646431599e-03,-7.286047328559479026e-04,-9.137792559713715746e-04,-9.668086684203793663e-03,5.037631712647433349e-04,3.507903662058301127e-03,-1.548596310902156190e-03,1.695700526736319781e-03,2.695104449624358828e-03,6.113211439740065013e-03,4.625485870374858484e-03,7.539625095583237825e-03,-2.831984060167132625e-03,-9.484485872289658004e-03,1.114276273529325988e-04,-6.097515218790345616e-03,-1.992140449654690101e-03,-3.001871065418381789e-03,9.065845971360756914e-03,8.623671505410039628e-03,8.053483411433163749e-03,-2.008194970867846658e-03,3.154409150642846233e-03,-5.236736471255997415e-03,-1.849187655795725633e-03,4.545312816626481851e-03,-3.886253149763747514e-03,-5.643658778371447339e-03,3.038734616865623218e-03,-2.786983004631266112e-03,-7.562541338621684393e-03,-9.826549721542619450e-03,-2.654464631304089776e-03,-1.442017502793445632e-03,-7.529392640064643850e-03,-4.906726611569398565e-03,5.630654767403724462e-03,1.286782806892123850e-03,3.323683208010473882e-04,-9.577595708582352027e-03,-6.199808005175692568e-03,-4.498033547997685663e-03,5.442734883959228151e-03,5.896094939016956367e-03,-2.095096315756138101e-03,-7.227536367500004069e-03,2.804393999206289834e-03,8.653091471998651226e-04,8.160330344756356420e-04,-5.501541576283639932e-04,-1.301375738079953940e-03,4.459053717842600289e-04,1.128975669979780799e-03,1.990762476313501748e-03,5.130412052599148121e-03,-4.983979268269218832e-03,5.642245615423930293e-03,-1.279455221615483865e-03,2.478656229568996713e-03,-2.356667798313826726e-03,-1.494000421480237532e-03,2.291373950986479429e-03,7.762122075525970666e-03,-4.059994406526447851e-03,2.490390652972148018e-03,-1.225754555476400200e-02,4.828842626507195805e-03,9.215620080798441610e-03,-6.016510742958806543e-03,-6.533008867337444792e-04,1.474308311617485200e-03,1.244283008247978363e-03,3.442462559205428790e-03 -4.979822205482473119e-03,1.283484923506127140e-03,8.652363287216692858e-03,-2.467120816688369005e-03,-2.696531126610588174e-03,-2.845322973553294794e-03,7.160599379771005572e-03,2.547222715318356225e-04,6.550907236817015561e-03,-3.639729048866135932e-03,3.788347087761489984e-03,-8.060285170263217086e-03,5.213234072921715291e-03,6.695808759384626503e-03,1.084115833738503445e-03,-3.520796459292924088e-03,-5.975200229345739404e-03,1.822182958929997139e-03,-1.274377180744872343e-03,-5.847182735523975524e-05,-2.367598005716765441e-03,-4.128034056578527497e-03,-4.166964611985159161e-03,6.726251876838479739e-05,-2.926190998184949256e-04,-1.210216348014967261e-02,2.447120997406255125e-05,4.382722125179436615e-03,-4.593213674414144826e-03,1.076154739979913628e-02,9.758743985456262010e-03,5.801738389620116412e-04,-1.571996167286139135e-03,2.286085662751059094e-03,-9.332408270978380729e-03,-5.766905857216906087e-03,-3.326168659758216382e-03,8.818109551589705525e-03,4.587381453184339632e-03,6.167350699612528762e-03,-4.182397530738368209e-03,-5.937639491841584585e-03,-3.485583445774363600e-03,-4.655017073514825638e-04,-5.413649997356218858e-03,4.604418355566066920e-03,-3.983044419491816972e-03,-1.592690780525875011e-04,-5.057884505483801856e-03,5.840280149492446411e-04,3.118368527091497453e-03,1.700876872772255763e-03,-4.466056239988112214e-03,-1.404008961283046428e-03,-7.655307233431067095e-03,-3.568596337035742682e-04,7.159669040784805670e-03,-3.233458830730498966e-03,3.693538588412402252e-03,2.926215736605470002e-03,2.667674313593141992e-03,-7.485473236385681295e-03,4.948121482394652638e-03,3.484356313777896437e-04,-4.616512050148921173e-03,-1.247618043265097268e-04,3.405367129764200976e-03,-2.060184084991709911e-03,6.731666267381935685e-04,-1.247891321176033356e-03,-9.358403889943852499e-03,-6.828421059868113605e-04,5.242504023612012780e-03,-4.013935899894307234e-03,-4.142562601657093745e-03,-7.125313692021533049e-04,5.689086069928066258e-03,1.132366273355913007e-02,5.647225476293594483e-03,-1.317328305401719310e-03,-2.071181548303495119e-03,2.603276782054315258e-03,1.504818375475147066e-03,-3.045081058797975433e-03,4.719983021248118360e-03,-1.553812817434439266e-03,-3.148914379776212268e-03,6.702728575549943453e-03,-7.488501722962102186e-05,1.004545851234986062e-03,6.467804440673371666e-04,-2.493976634932061533e-03,6.816466593605243493e-04,-6.041248321019474822e-03,8.085107118566638509e-03,-5.172388532688170469e-03,-7.553045299533687335e-03,1.191236069549552953e-02,2.621076347446560633e-03,4.473012804625327472e-03,-1.774209572608586531e-03,-4.659321586767034389e-03,-6.385564567405655270e-03,-2.261049254536923875e-03,-1.179462898515567376e-02,-8.857432122558475673e-03,1.227449032634916545e-02,-1.084013659734857013e-03,-4.788707758366555003e-03,4.731378395822045772e-03,-2.514985803983410626e-03,-8.756345899612409320e-03,7.337340059203515709e-03,8.804090096379574987e-03,-6.099676991086835920e-03,7.319669420665089569e-03,-1.183343577018764088e-02,4.646077969749711281e-03,-4.176237742869829198e-03,1.673826690259165801e-03,-2.398459880086720002e-03,-1.426406273649787580e-03,2.709206074791374944e-03,4.723417214289247525e-03,1.237171165718067758e-03,-2.805212551625418591e-03,-4.221181570904696846e-03,7.736569761117678863e-03,-1.583612206067046323e-03,2.373116884933037640e-03,-4.021353377635085609e-03,2.138578463226497445e-03,-1.147800098487150762e-03,8.959203004659709035e-03,1.561790358950357034e-03,6.360564296732096334e-03,4.303173395627101915e-04,-2.871339433633319059e-05,-3.025152782747197056e-03,-4.506179628213948657e-03,9.454282274434143646e-04,-3.857093460774443580e-03,2.095680198603005074e-03,-5.056733553448914190e-03,-6.600605996702559736e-03,-1.743767728331394832e-04,-4.017082383583046347e-03,-4.068307036344552541e-03,-1.601968862656455439e-03,-1.883149595422812595e-03,1.304224957786314851e-02,2.679937690678093568e-03,8.954100669927666069e-04,6.468466034197186722e-03,3.218058939062578769e-03,2.575941951567552613e-03,3.433365908219820385e-03,-8.651191071206329811e-03,-4.965177288558201294e-04,1.682983258846531324e-03,-5.257078745406008717e-03,-1.438904919113063400e-03,5.440208729493418993e-03,4.086463655480460089e-03,3.660115107023160976e-03,1.862036792279333575e-03,3.361210129422860342e-03,1.272104708658868379e-03,5.584834147733989430e-03,-1.945396720946845903e-03,-6.211359462027056001e-03,-7.958293699839576012e-04,3.079351715269240241e-03,7.260061247212852564e-03,-2.833833798472379806e-03,3.927432709437714102e-03,1.502586496959284253e-03,-2.907242664463970377e-03,2.563843436837807358e-03,-3.071328685840644598e-03,-1.586566938207319534e-03,1.454802232959187362e-03,-8.075732636020129321e-03,-9.214552847074697914e-04,1.049298448784156913e-03,4.567404975407332764e-03,-1.178891818186195845e-02,-3.768709189661800883e-03,-1.031531106053214415e-02,-2.559893513624583120e-03,2.755425290970075439e-03,4.872708550662313391e-03,-9.191558178132350513e-04,-4.346841191347197361e-03,5.040744514764081835e-03,-1.676974695569108734e-03,-1.761226782274347364e-03,-1.999525104361107636e-04,-1.192102925037658674e-03,7.980272160636032314e-03,7.838752508155799603e-03,-5.382526200140073241e-03,5.581951515561590742e-03,9.494258670769053360e-03,-1.352297120052731345e-02,-2.083568818058425740e-03,-1.941309373287672261e-03,-1.167054749721094069e-03,3.832466012060086955e-03,-2.394542837609808707e-03,-5.341563406126665844e-03,2.681992058908979053e-03,-6.536285443156274882e-03,-8.822399702699523436e-03,3.474448366071147126e-03,1.199384126107138107e-05,-3.543952188648905478e-03,5.312459412731270446e-03,-3.715680194509533765e-03,1.660327508056313236e-03,-6.293064954025147199e-03,4.175630719646509538e-03,6.767984128158253433e-03,-3.987524595952773019e-04,-4.618773391893064503e-03,1.257527261072308167e-02,3.982205248270926116e-04,1.199321341473568109e-03,5.913313959652888308e-03,1.042794262540295766e-02,6.263650950249008767e-03,3.983406428474634345e-03,4.456915188788614611e-03,-6.134123077947299446e-04,4.798801888866077205e-03,1.040565323509894807e-02,-8.708912336533682075e-03,1.245368450512709425e-02,-6.192375785335077314e-03,6.689269965481403667e-04,-8.843924535360165890e-04,-1.274652554763090534e-03,-3.600041064369972031e-03,-4.816551424305712606e-03,4.584230932403256828e-04,-4.082790314251453188e-03,7.721043138902061644e-04,-9.360700167576705560e-03,-4.268548460494180595e-03,-3.358880873078624006e-03,5.566665044841878629e-03,2.429360829190296486e-03,1.485281381611115508e-03,-6.544587977828971587e-03,-1.053229600938798347e-02,6.656879799837435094e-03,1.422146498251218409e-02,-4.593199401692992304e-03,-3.246925927055604923e-03,-1.536260842414009222e-03,-4.397650604152764618e-03,-5.594532591171859880e-03,-3.496046412970649480e-03,1.280007026294648770e-02,5.043339411874529256e-03,-5.534917892158402151e-03,-1.002698962987167743e-02,7.050014537179567818e-04,-1.791042504216154791e-03,4.440152341849136833e-03,2.638785480335884611e-03,-6.598016725636046860e-03,-8.618677630156581365e-03,-5.004265059089334916e-03,5.991183308129687532e-04,6.693945669527376005e-04,-2.692210769111699244e-03,8.102036346862238544e-03,-1.528814083528858394e-03,-1.533316807152350662e-03,1.273124330983843415e-02,4.133781319838078332e-03,-1.166197174072642305e-02,7.027928850521095519e-03,-1.082349757315240711e-02,1.298263189750577078e-02,-1.517832831492077440e-03,-6.127715875452231266e-03,-8.456019939419582221e-04,6.462816702329352361e-03,3.455339868170388099e-04,4.663058105916786066e-03,2.723473943706290214e-04,-1.266123444182882564e-02,-4.460989482791464199e-03,-4.017802689520160592e-03,1.760635376517553875e-03,-5.303132284871853652e-04,-8.340056827485621896e-03,5.076889304264020457e-03,-2.458905319447514826e-03,-6.010264328478827009e-03,5.622558226302023718e-03,3.726021376369034396e-03,6.375733334465406507e-03,-5.094255589311345173e-03,6.019244395183962794e-03,5.774811866444130339e-03,5.506707982340874018e-03,3.926996206644838699e-03,3.450923975583840980e-03,-3.792961177065362881e-03,-2.837464323885585931e-03,-1.608044544298981851e-03,-1.079631607185853846e-02,2.521992684985406748e-03,-8.999230298336142672e-03,5.512763888942563174e-03,-9.239136570897380375e-04,1.496626164712486738e-04,6.958312308745749804e-03,1.524039840635617576e-03,1.877960286445355632e-03,5.848462963774478211e-03,-2.794259817405917584e-03,3.724018489211992043e-03,-3.880076785554587189e-03,-4.053669742850110613e-04,2.643507659919247087e-03,-4.142838224856003858e-03,4.547206573840796746e-03,-1.314267649068837085e-02,-3.363807239902742545e-03,1.432812864963800125e-03,1.020397907144368420e-04,-4.324888398659458566e-03,1.257646450344171527e-03,1.634289129663505985e-03,4.547814464795649735e-03,1.730477209124179358e-03,9.752359918205219481e-04,-3.250438659870611303e-03,-1.459675194439126163e-03,6.025323864982232505e-03,1.524627930737229662e-03,-3.075999107505339968e-04,-5.322098577138315370e-03,-1.450886733089608088e-03,-7.282584934362951526e-03,-9.363841167528268519e-03,-7.971040701299685408e-03,1.064763379893352490e-03,4.899402004941097195e-04,4.292206944584194515e-03,5.862643753686486332e-03,-4.318416034804636872e-03,5.746716664192403137e-03,-3.259444134774421454e-03,-2.838183385703080459e-04,1.526857727290245126e-03,-3.761995861366187761e-03,-4.804984449852300801e-04,3.885462379464111292e-03,4.177854574633634022e-03,1.534003840270833745e-03,2.454488952872198467e-04,-6.685795192776679404e-04,-3.174903198737975482e-03,2.228437428667943802e-03,5.502854774864097097e-03,8.205364818472421495e-03,1.703967173805649802e-03,2.509351773215096710e-04,1.158977600939177684e-02,-1.104920660123098015e-03,-7.988589036231707671e-03,5.936148097966455510e-04,-1.055589795321914062e-02,3.886444418512001605e-04,1.263061404071970329e-03,-9.463370570196567508e-03,-9.454569832109339022e-04,-4.045162590057651761e-03,7.457802346654027019e-03,1.932391441893129393e-03,9.489028087295675942e-03,4.735094469583087280e-03,-6.116054609472156879e-03,-4.602537418423407328e-03,-2.671355343847406534e-03,-9.885194964280874259e-04,6.203773903126949193e-03,-1.122314397019401333e-02,2.207322488600327575e-03,1.820010261015487325e-04,-5.152587110591142286e-03,-6.515710954118528186e-03,-6.792232239581663993e-06,7.662375847215777483e-03,4.090374529171155490e-04 1.508315118698086823e-03,-1.753928702981050842e-03,6.035365942773225807e-03,-4.678848510614748608e-03,-3.570728934345649471e-03,-5.440564224631827192e-04,-7.206392162355640917e-03,-8.633785105867102135e-04,-5.196402975197618110e-03,-3.545205388642009257e-03,3.108924488308320751e-03,-8.460067385603594128e-03,-9.762118962649300735e-03,-2.772207906192728985e-03,1.823483502136106441e-03,-3.588240665832159292e-03,-7.859439553153022267e-04,-5.123109067074031915e-03,-5.968815786413173875e-03,1.072977204660840143e-02,6.765147033507500594e-03,-1.471302150275575060e-05,-7.799444349133245746e-03,-5.344507605741836975e-03,5.389016651788203216e-03,5.213455847708277358e-03,2.584330303768331803e-03,-2.018227776893193482e-03,1.481725463931830595e-03,2.872605387135155394e-03,-7.434583383421805624e-04,-2.053262482120687266e-03,-2.620993616567078924e-03,-1.399580281584079824e-04,5.546526502546307059e-03,-2.641801911406457294e-03,-9.657656262830599872e-03,5.053543603464149929e-03,-7.654195046441839012e-03,-7.536143664342215276e-03,-3.736463513825601622e-03,-9.449257126073109386e-03,-7.276142155349192044e-03,-3.959772105434929308e-03,-3.623999330185748245e-03,7.799964733515242412e-04,-3.492864263529967851e-03,-1.412326059662205287e-03,-2.084028924366493861e-03,-9.421180200228975246e-03,-4.743939430309146790e-03,-1.690924609272903079e-03,-9.714928657323302838e-03,1.274782482083747616e-03,8.919836174857448830e-04,-5.554966013096812420e-03,3.146081711315256053e-03,-5.930734132685841486e-03,3.648442441697871859e-03,-9.194217854712354032e-03,-5.252488857311689123e-03,2.633758964719847776e-03,9.213925748469284360e-03,7.263309647451273061e-03,-2.646863043177847315e-03,6.161716771338896425e-04,8.015796845259704623e-03,-3.610315060791636273e-03,-7.710629057801366435e-04,-5.697398799717824950e-04,-8.295639148010689259e-03,2.585682301349431691e-03,2.370278467057116041e-03,-1.294781644615293058e-03,7.355980739061755182e-03,-9.362710653836961786e-03,4.918517526363655641e-03,-1.927207716044792045e-03,-2.682870562078270606e-03,3.271257398734819105e-03,3.815279786520521868e-03,-5.298798397338860128e-03,6.292863645987148424e-03,2.477488582271515638e-03,-8.320226116059672300e-03,3.053120893659511288e-03,-6.887581032672233233e-03,5.394983574444936827e-03,-6.087823188740277549e-04,-2.183001304202471758e-03,1.936781186915107573e-03,-7.716620431098395252e-03,6.567493274360366659e-03,-3.213065870756214097e-05,3.175687986750108226e-03,5.076105114942676931e-03,-3.305532648372819758e-03,-4.522522754555850268e-03,-6.179017082289683938e-03,-1.763563987334662922e-03,9.542306711469579180e-04,8.131090904626834051e-03,3.331589632070777914e-03,-2.659021448735298613e-03,-4.138742980283653672e-03,4.705568620265549243e-03,9.363285551711895624e-04,-7.437409289147081139e-03,1.565214136933260622e-03,-4.759082393048361635e-03,3.268368788984506428e-03,-2.156286984249210659e-03,8.103854677911854001e-03,4.142839949058669917e-03,-7.884134003817547948e-03,-4.030331282883378025e-03,-7.532077424426466281e-04,-4.190077978713890698e-03,6.745418215531632025e-03,-3.253383520713867307e-03,-2.186341474058429296e-03,-2.601293505844504920e-03,-4.342031961446256479e-03,-1.693729779487480143e-03,9.987963832882192344e-04,5.487363383245549321e-03,4.658927919882225287e-03,4.633653733565771225e-03,-7.214230744605054993e-03,-5.369026460869806994e-04,-1.351804121544148730e-03,4.166545535318253932e-03,-1.340986332662282704e-02,-8.463021316727738696e-04,1.235649017234679301e-03,6.082728441365088277e-04,1.754449024594024456e-03,-1.969811583719958785e-03,-1.534888698635216830e-03,-2.729927961273900805e-03,-4.644398892826077929e-03,2.391082067357005059e-03,-1.592744027762149671e-03,3.442522646849961992e-03,-1.297351589863787995e-03,3.945437683661381402e-03,6.049895939899845032e-03,7.004534888082897129e-04,-3.908090907281277365e-03,6.975568646287466819e-03,-8.694116651096097173e-03,-8.616190964237445231e-04,6.130630740633672207e-03,-1.276430685305952667e-03,-9.304681745947387603e-03,-5.494655540607852086e-03,-4.166840025736575845e-03,-9.095346016227910321e-03,1.775349531413551686e-03,-8.398811212151611767e-04,2.103692991425856230e-03,-2.047386103098121617e-03,-9.413704998766013422e-04,1.841877463101306662e-03,1.060812850812835155e-02,5.877538835110685542e-04,-3.080759164984050481e-03,7.350923400734619192e-05,-5.278381840272792710e-03,-3.701059635492985958e-03,-3.641457216951317875e-04,1.267053229568777632e-02,2.997826145990403670e-03,1.688881855660503972e-03,-5.011425532391352265e-03,5.571982568164137763e-03,8.599815425768886964e-04,5.895539996163024453e-03,-2.335612827272345177e-03,4.116735032199389695e-03,-3.479367303347069606e-04,-1.691288269963201526e-03,-3.216154344087374627e-03,-6.007114501805779251e-03,3.976859349012117073e-03,-1.013070226079627704e-02,1.026277327611634571e-03,-6.819505946786219748e-03,6.081584844400928858e-04,5.146462613838703910e-03,-3.600392331953818602e-03,-1.092015622914460681e-03,9.878397209670728395e-04,1.588638702515836592e-03,-8.621259095327389252e-03,-1.772926032033236171e-04,-3.050364395924351100e-04,-2.568872810581794108e-04,-5.883160516830733718e-04,-4.881816944794246070e-03,1.543175872324555227e-02,1.411753266841249800e-03,5.148386020977966347e-04,2.587138269320049446e-03,8.016622713583991533e-04,-4.333736935059494602e-03,-5.770088695278339066e-04,4.984221899656410618e-03,7.017959435602935225e-05,1.684889256715880503e-03,-1.660195638239271327e-04,-3.678657817557071412e-04,-3.976539228069695471e-04,-1.672214792663007706e-03,8.928456906959058182e-04,4.985062810422978032e-03,-5.123521889484496279e-03,2.179257231474412103e-03,2.312196760691117171e-03,-7.364606903178392422e-03,1.137678860502432804e-02,7.511714067450453274e-04,4.479135067382793004e-04,-2.516854883470590748e-03,-1.979060797145122414e-03,-5.351810348174489487e-03,4.774795352187915197e-03,4.297024395814287578e-03,3.784454897936540257e-03,-5.795457753239920693e-04,2.732120881625148094e-03,2.749268711388104264e-03,-8.948980840046617813e-03,-1.673405092835693114e-03,-1.135928367133348920e-03,-3.662108657373755113e-04,-1.066831186228011166e-03,-5.755033934361077769e-03,8.033760003864841640e-03,1.245689897749099309e-04,-8.822534218886787100e-03,-3.253749864762756853e-03,3.428381681274415664e-03,9.053481224258369195e-03,-4.949712367888663318e-03,-7.441713631746682718e-03,-1.531318475744531489e-03,-4.358912254235669352e-03,-8.651264137942626595e-03,4.833019987596879306e-03,1.964103736297706428e-03,-2.869794380736956883e-03,-1.139948854994454058e-02,-3.386995342802973357e-04,-3.678237629474823828e-03,5.222899820603158806e-03,2.771693569892085332e-03,9.355413847811969424e-04,4.444156824055424046e-03,6.224872051955077611e-03,3.250102092257713127e-03,-2.466513231312892056e-03,-6.025827397075377012e-03,-1.235019907698379962e-03,9.777553751109387820e-03,4.730711297569393028e-03,-5.096667835255073477e-03,-9.476422948219183698e-03,1.029495298696724969e-03,3.449311865077416429e-03,2.192707592594907509e-03,4.649378715137196007e-03,-4.623322507228725189e-03,-1.400957731496230759e-03,2.129696667150091614e-03,-4.134427963129650233e-03,-6.983564896906904696e-04,-2.192868168649466840e-04,2.175527518742718502e-03,-7.152908839211716101e-03,1.422675015051413005e-03,4.203054834946154221e-03,-7.231295024833371901e-03,-3.783535100755589940e-04,-9.700425831163443993e-03,6.438522259088375228e-03,-5.867792560315521558e-03,4.955974537058028401e-03,1.102146325602715825e-03,3.055014470690767172e-03,-3.204190658664546939e-03,-6.634555939923979890e-04,7.178002022517595594e-03,-3.515253910250887553e-03,5.320490661417346875e-03,5.835179258064754045e-04,-3.150870092140745180e-03,-4.295922135627017191e-03,9.208106799125929031e-04,-5.777333007651021257e-03,4.181660653928749062e-03,5.904041655377618124e-03,-5.232737338623773445e-03,-2.328315870935073698e-03,2.706680154167279140e-03,-1.817556263218966491e-03,2.560328712976220333e-03,-2.574899776225764072e-03,-4.607655873714523904e-03,2.742460038388749161e-03,7.803044927991284810e-03,3.194224448179921409e-03,5.504862467064436729e-04,1.865673585186309333e-03,-1.329712259484203910e-03,2.795183588249722500e-03,-3.163181092989748215e-04,-7.930638601708152324e-03,-7.301865434432931734e-03,-2.851467039360375960e-03,3.097575964828353044e-03,4.261553891299706305e-03,-1.684403346818038162e-03,1.686033774139015454e-03,-3.735928514117875781e-03,5.925987549746848486e-03,2.781535219955082704e-03,-1.141809620285413190e-02,2.904216536810303936e-03,-3.910701875685979392e-03,8.946205478012771552e-04,-1.867410400664539589e-04,2.481201204571183063e-03,1.729338394597407236e-03,1.768164350273722157e-03,5.440505498278274538e-03,1.010146897052374530e-02,3.629616983182352920e-04,1.252847616161121766e-02,6.144048458742166693e-03,8.689168090662362481e-05,-6.218551042204487414e-03,-1.098708329183610598e-03,7.658277187757418780e-03,-2.706663846540264748e-03,8.453408369469615855e-04,3.105191947226499370e-04,-3.022726699158277355e-05,-2.185963457274257924e-03,-3.370844070655266804e-03,4.123657261684779071e-03,4.331055303457820825e-03,1.079178983134703937e-03,-1.015201083788694857e-02,2.664780619974095339e-03,-4.029879343071074253e-03,3.744828689601435376e-03,2.396985427744536754e-03,-3.189889825645971302e-03,1.758655551513367121e-03,1.146024184208444387e-04,-2.221527782654565339e-03,-1.782131393792096537e-05,5.736180782345964499e-04,1.224079685938632379e-03,5.078769360500340856e-03,-7.922547793623170272e-04,3.362605364847965055e-03,-1.048688810134181318e-02,-2.481496592582232985e-03,-4.744626953082266806e-03,9.535156577375627053e-04,-1.636358600753231966e-03,1.142519439006897067e-03,6.981343558584261408e-03,-6.186489052696627971e-04,-6.618380006283810374e-03,3.438991226104608744e-03,-1.311125754079666772e-03,9.756461442890055485e-03,-8.389100685463784721e-03,-4.181052417759536372e-03,-1.743962354690544022e-03,-1.397774850829108838e-03,-2.760343204081943357e-03,1.018458782947023918e-02,-2.977478776045473525e-03,5.988784694050538863e-03,-3.020937380467601015e-03,-1.050393348249294145e-03,-3.144756505805116394e-03,7.689360703752879875e-03,2.854619329196394183e-03,-2.245897473790305337e-03,5.559055467110981218e-03,-8.217923809619893938e-04,2.985622493706907229e-03,1.421914723734567752e-03,5.968551097769546690e-03,2.122133779198614924e-03 1.303238155158568468e-03,1.861241004811204627e-03,8.082021766113113917e-03,-6.582148645798749018e-03,9.630271415243864611e-03,-6.673971532136612449e-03,5.317140894019614129e-03,2.763551568062467233e-03,6.045277821209925524e-03,-5.010307031777376822e-04,-3.201536349659310672e-03,3.968648862148696152e-03,1.037651974891800211e-03,1.866907799438061167e-04,-3.216338662881873967e-03,-3.512554380290943898e-03,1.407191790909851302e-04,1.388390812800199305e-03,1.589317557529676102e-03,-1.022538808810160779e-02,-5.384293185697598856e-03,-1.816741376672287300e-02,1.283185299130114830e-03,1.215868304934609838e-03,2.011095341566693402e-03,1.387895916157652274e-02,-8.149254933162647721e-03,-5.768224315733542357e-03,-1.495645400937550395e-03,2.291422121108724168e-03,1.011252938770666222e-02,2.126322076770273666e-03,-1.389909569088623167e-03,-4.282060847969833099e-03,-3.481764214594487293e-05,-2.084539818031293582e-03,4.103105169183762606e-03,1.187506261907640605e-03,6.038157940292292539e-03,8.592539237627849422e-03,-1.074632945443601204e-02,4.369426530683443084e-04,-2.421107228285106563e-03,-9.807774540169570537e-03,-1.711486568585011449e-03,-4.644542718847949683e-03,-6.167810300536769432e-04,1.091672842211208888e-03,1.108864853533578741e-02,-6.185318576735551223e-03,-2.560505303897646877e-03,-2.383887948743761674e-04,5.200243068418346735e-03,-7.782640268061250021e-04,6.685796916083116192e-04,-5.995657462200845679e-04,6.650856535618020551e-03,-4.843219085482063004e-04,2.847582304057655334e-03,1.297130376568981242e-05,3.267706280683947001e-03,8.423914511076099204e-03,2.528149617584274887e-03,-8.816341762633230980e-03,2.580124954394762051e-03,-1.000561274211244495e-02,9.563936978359617591e-03,3.891843472336650126e-03,-6.380364696758438186e-04,1.627901830255469584e-03,-8.951306337270974189e-03,6.421187786263999296e-03,-5.267207835155007963e-03,-1.324620263282841673e-02,1.158489180140056260e-03,-5.228770434798184208e-03,3.432714638946949941e-03,-4.086886199410100272e-04,5.694219193649319166e-03,-7.619238648751345579e-03,-2.324096601305121034e-03,-4.431822431293059618e-03,3.271655096457896841e-03,2.505136129056905175e-03,-2.352073135629968292e-03,8.215658735841216767e-03,3.973623470689267856e-03,2.738202506256983727e-03,-3.773691819401718377e-03,-8.195290101070260708e-03,5.708938209497772635e-03,3.154963341771517549e-03,-4.349797337877502751e-03,2.448274874888257610e-03,4.838643722302991834e-03,6.157545283890712533e-04,-4.929314002542365368e-03,3.213866329565421946e-03,-8.925054087518889678e-03,-3.306719216990002420e-03,-2.897900937668373630e-03,-8.154010951204153368e-03,4.512721536407236257e-03,5.382753856465326281e-03,-5.712095575411071805e-04,-6.887051466936692373e-03,-6.009047239200838823e-03,-1.490997100033199414e-02,3.107412411453280236e-03,-6.283856254850825329e-03,7.232297103500947774e-03,6.945516140381576692e-03,-4.496548218780770795e-03,-1.342639355008483880e-02,-3.428933098444230526e-03,8.352459885413495572e-03,7.674302482002337382e-03,-1.251029730852970230e-03,5.214560359006912768e-03,-1.185232344406477551e-03,-1.494815089504589536e-03,4.269825482492539508e-03,5.130139125049604709e-04,1.599106641692454910e-03,3.073126643554575758e-03,4.333893593868065088e-03,-8.272517556847132488e-04,4.713686343540524938e-03,4.504891482741258876e-03,2.729265492052111979e-03,1.322956815225337401e-04,-4.903431833907736029e-03,2.558188796128411761e-03,-1.008132057157989578e-02,-1.355241936645548749e-03,-1.495736385287143450e-03,2.082930625251294948e-03,1.568528113590832943e-03,-4.577639791517752591e-04,2.259857188950506381e-03,-7.076245124381908523e-03,1.316889385908508181e-03,-4.211516792297404117e-03,-5.240749378991268765e-05,-2.626083033905710681e-03,-9.298621496145955019e-04,-3.306931768813194464e-03,-2.873933411517184434e-03,3.935406431911079618e-03,-3.060533156618943339e-03,9.158302875901415526e-04,-8.408227282690168905e-03,1.942334663013198584e-03,6.545754158175143105e-03,2.682917792682332908e-03,-1.861307497262665919e-03,-3.895820426287301391e-03,4.893342424008798736e-04,3.641560305537263308e-03,6.570618976674859335e-03,9.668014470867087456e-03,-3.231584405214528184e-03,4.891572926664492817e-03,-6.660365499354995410e-03,-2.130093267189752098e-03,-3.847135369918176610e-03,-1.217825700489154383e-03,-1.322660107641178591e-02,1.644229066066787981e-03,2.143312255767713514e-03,2.105806815971557246e-03,-2.856323337289731323e-04,-6.140975371918507182e-03,9.674480968231820299e-03,2.494837485410753876e-03,-6.306203100978397327e-03,-4.272549512285241623e-03,-5.032310706536198379e-03,6.594590048587923616e-03,2.404640459709860879e-03,-1.115942651833760554e-03,-1.435541895019630879e-02,6.715814868395945068e-03,-1.006869139358424620e-03,-4.852762885128810463e-04,6.488410055165319372e-04,-3.965888818731448944e-04,6.499461425146469892e-03,4.763191125114621741e-03,-1.482001615005876238e-03,-1.993928648466132634e-03,4.791550680723467934e-04,2.614961738812937543e-04,-1.024256870107894540e-02,1.101029415451742132e-03,-4.208905746967574364e-03,4.876698179973243397e-03,6.034228928883592427e-03,1.406265968925507178e-03,-1.970072239970803874e-03,2.707775312614943853e-03,-6.766307735033449088e-03,5.624763422860111148e-03,1.434075031972857628e-02,-9.786713708190088873e-03,-6.083587501695766203e-03,-8.028327109018257995e-03,1.252430403574450023e-03,-3.302583220825650295e-03,-7.114180887893878542e-03,5.261615496159836175e-03,-6.762558836988772672e-03,4.580885857250102102e-04,3.326977070786272736e-03,-3.895172747628957138e-03,-1.877355223772120881e-03,-1.676218685463736055e-03,-1.737444842915613708e-03,-4.335742072975304687e-03,-1.350884629056024223e-02,2.238999604768205764e-03,1.231298465164785570e-04,5.997471463994649005e-03,3.944039759210586854e-04,-2.986567584164634068e-03,1.043855652690789046e-02,-9.897421384838723385e-03,-9.814241654117490665e-03,2.949666438305432407e-03,9.092272677215455789e-03,8.384797377960386119e-04,-6.439363420622141749e-04,8.629670731775653187e-04,2.938918886047766840e-03,-1.079271081408443274e-03,-9.381064472458322187e-03,2.827748758360070316e-04,-6.351208000619157183e-03,-5.460054248203989255e-03,1.040278694257925187e-03,2.908836466517114149e-03,-4.897515243295405812e-04,6.714664766663660904e-03,1.545922727905458486e-03,5.145796847263353450e-03,-2.614141029563242860e-03,-7.637139157941342767e-04,3.267307708166672549e-03,-8.099406752357680029e-04,5.921755857128360376e-03,-2.649792691855633645e-04,2.086702630696359029e-03,3.196331605158325966e-03,3.934925696870175856e-03,-2.635907695712982283e-03,-5.343080396781459303e-03,-3.370532457314830404e-03,4.093970925516121802e-03,-1.372515940203400961e-03,-5.333351273829199830e-03,-1.441037212947933206e-03,8.925813073051538232e-03,-3.071232436693817575e-05,8.283068502826226170e-03,-4.778611679841740512e-03,-5.380066500154936752e-03,9.706118685115959965e-04,-8.578080483413283441e-03,-3.146702950110973757e-03,6.414508029548709155e-03,2.698186611060044999e-03,2.071161503743625988e-03,-6.850112568744062477e-03,1.081689006242192094e-02,3.486876361628456211e-03,3.677845655209498535e-03,-4.774772340743711828e-04,4.708923255851910246e-03,4.757167374171426995e-03,-1.486670893071562281e-03,1.293778720856181372e-03,-7.050682264305163649e-03,-5.196970983240094787e-03,6.731541600064466606e-03,1.137105942820479040e-03,-2.571568574049921518e-03,5.693391645925808403e-03,5.050338968181891566e-04,-6.669087818203760579e-03,-6.506221411633830720e-04,4.127447091701234513e-03,2.132363012180847321e-05,6.727919600666486342e-04,-1.045730291082147198e-02,2.235992142489818136e-03,-2.469587342941758409e-03,1.573120422722014194e-03,-2.285191640438952523e-03,3.970558868103525596e-03,-6.312967430139625365e-03,7.889991369575712760e-03,7.243565619306006204e-03,2.321115354786148274e-03,1.137071131915846868e-03,3.689998490309281997e-04,-3.502308508050741651e-04,3.957155143939210772e-03,4.201982250841417889e-03,-4.684357882585578510e-03,6.291336205102350705e-03,-3.547297328691656395e-03,-5.813811998235970772e-03,3.481678130354804411e-03,4.503198019135620962e-03,-7.015554708441239934e-03,5.066488195514910753e-03,-1.976876263932304638e-03,2.824584508996084639e-03,1.617666922921330965e-03,-2.305779789065091684e-04,-3.721648304576660911e-04,-1.105928426328204109e-02,-1.095443180549270611e-03,-1.069699567029571861e-03,-1.959990293525678152e-03,-9.301277972454897796e-04,-5.656013134796683897e-03,-1.872768810004693659e-03,8.108679881744067230e-03,1.019059849689343995e-02,-9.403239548360302347e-03,-2.031002885194129273e-04,1.377433782746151055e-03,-3.584647340295593620e-03,4.795874211895556107e-03,-5.516436696784114560e-04,-5.704136372752621580e-03,4.639215074975687469e-03,2.320680932945556624e-03,1.627214975236032646e-03,-2.503879245184283554e-03,1.547191365543935677e-03,6.952132652271970567e-04,1.253956964148889280e-03,-6.907175479345978482e-03,4.179928071155686667e-03,-4.562909110265459499e-03,-9.534997868673889682e-03,5.168071639287169852e-03,-1.314441414609265717e-03,2.938529758185711317e-03,-1.637833880594235332e-03,-1.026198351497138121e-03,5.221628982760387686e-03,3.867288942386744137e-03,1.787317912599143896e-03,3.749076847885949876e-03,-6.123569015680463586e-03,4.327518640557643202e-04,-8.882127470028075567e-03,4.701902429708420639e-04,1.638747450807165329e-02,-1.082479879365400282e-03,6.621003291873212536e-03,2.788771119309244994e-03,-1.747831805249223242e-03,-5.417155657274178547e-03,4.746617710070219487e-03,-8.307542260560267164e-03,-7.108464656615352470e-03,6.042303790387451405e-03,-2.711773643677179921e-03,-5.007470865721966313e-04,-3.960877842964711723e-03,-2.899783446315138020e-03,1.150293647140713214e-02,6.363126701787078301e-03,1.708990470685483069e-03,8.317500731413067505e-03,2.607795465741674819e-03,1.132822825825554550e-03,1.972417066281003002e-03,-8.925627148998144331e-04,-5.427200508484934793e-03,-3.346625283645026077e-03,1.326546518705840278e-03,4.325510607728907421e-03,-1.535921575662525232e-03,-3.513486239931347052e-03,-2.693378857470674504e-03,-4.917405664851479986e-03,2.008368773970354779e-03,1.950421150230979058e-04,-1.079353002556367354e-03,-4.639950772800871363e-03,4.754860397938659965e-03,1.120827525205123337e-03,2.280379419533898420e-03,3.884466582982006467e-03,-6.158568809522908458e-03 2.463370321040892104e-03,1.179091357167365945e-03,6.570508366355912698e-03,-8.936841848947590667e-04,-5.189498425047543023e-03,-4.992479934630189619e-04,-4.873857182815480305e-03,-6.659829361333151690e-03,7.131089907892148791e-03,-3.990703765689094552e-03,-2.160384503376373139e-03,-2.094975202222379256e-03,1.450526868248880720e-03,-1.029832917721390257e-03,3.328698147537272924e-03,-3.026827989142346752e-03,-2.153837948611356592e-03,-3.217317266605720059e-03,7.659385894681358404e-04,2.847143477628603665e-03,3.431001005302629792e-03,-6.299334593699193290e-03,1.754763871918854721e-03,-3.199603671549127587e-03,-4.409540373108668081e-03,4.865430313267980582e-03,1.478975600990235816e-04,-5.428277907413279514e-04,3.919460688650473063e-03,-1.847032416507840774e-03,-3.907688071792021420e-03,-3.123759862616874456e-03,-3.733180999925889490e-04,2.278596063784494533e-03,-1.034100130872569329e-02,-7.986627409808192204e-03,1.708990596903580458e-03,-2.293577517552605576e-03,4.172055449377123221e-03,2.429961982973470862e-03,-9.444843214225898814e-03,-1.103591844493203515e-04,1.603279782475541381e-03,1.473215577431283068e-03,-8.779328450766831285e-03,-1.417077583494149792e-03,-3.781930197525003486e-03,2.211083145850363009e-03,4.188506570950199272e-03,-2.987972012405060683e-03,-2.013275221938998997e-03,5.862537959202273295e-03,5.059852566848769412e-03,-4.470054819983282877e-04,-1.668804706872347950e-04,-4.059778254445242651e-03,2.098812556933647450e-03,-3.827270054871192897e-03,-4.306861374318219152e-03,9.727395029108949417e-03,2.358018233493492256e-03,-4.031639963458124969e-03,5.512488620140203362e-03,2.420813539275852230e-03,6.224109739398241657e-03,-4.447684006015985812e-03,-1.085505049890055909e-03,7.973053540723443579e-04,2.865419123158822676e-03,-8.233883031895671001e-03,-2.608650331522405132e-03,-5.631148673247559364e-04,5.614945449300375209e-03,1.752016627086496565e-03,3.290233131842059018e-03,-4.078363752419732202e-03,-4.257416366585635957e-03,1.802905997481846732e-03,1.435821780201496090e-02,3.166867608642333007e-03,7.503322208802827445e-04,-1.729131383894517157e-03,3.247961675167082563e-03,-4.213548000434616872e-03,9.457353621116815256e-04,-4.153911884250140706e-03,2.889574504493048254e-03,-3.303703013941020495e-03,-6.054015355737062468e-03,-9.368967389386397655e-04,-2.459932760162937982e-03,6.911009677241733255e-03,3.492167931551586318e-03,9.785330150976709069e-04,1.056133394401974736e-03,-7.252157158219443150e-03,3.071697186037942832e-03,-6.277785225155024969e-03,-3.842800370355628926e-03,-1.853065005704674023e-03,1.205615467280141389e-04,-5.857692493046640254e-03,6.365059403068155409e-03,-7.686858200173759918e-03,-6.173889777970727015e-03,4.549268832438512628e-03,-7.223833514381245748e-03,5.486678715884719601e-03,-7.344016652113529060e-03,5.639092412356823229e-03,-5.211725987370426257e-03,-2.985101322187654581e-03,-1.450653084169156552e-03,1.792108561068702722e-03,1.143273824841264247e-02,-1.343769945038801421e-03,-6.105110415678217071e-03,-2.057987735800574820e-03,6.844465904969899463e-03,3.469677483193617934e-03,4.356598810011238360e-03,-8.307176391933888873e-04,9.429573227328114137e-04,8.511998944338034639e-03,-8.252596768860474483e-03,-2.362009432865446162e-03,2.198635030290108528e-03,-1.466166323956307827e-03,2.109587146302001771e-03,-1.664478618345541922e-03,-6.165607652886033680e-04,-6.237877982075846171e-03,-1.130707380839454293e-04,2.181519184892149587e-03,5.781515573700261867e-03,9.064336896051353870e-03,-1.739926540231070745e-03,2.043325813361805340e-03,-1.329208088241893302e-03,-5.827684722934530458e-04,1.988691223305298031e-04,-1.124222095324039052e-03,3.307075944386363413e-03,-3.170485843391629335e-03,6.331038618579436141e-04,-3.091252386327292268e-03,-3.763765944178247384e-03,5.091984269111818687e-03,-1.924972177799275140e-03,2.826450582808654084e-03,1.312433441255906295e-02,-1.974037362015144091e-03,3.225063133041041960e-03,-4.290846976058894369e-03,-4.833156738458611120e-04,2.344045595630115872e-03,-2.794431246204855546e-03,2.039354079180019912e-03,4.608253741147531890e-03,-3.212786445092794679e-03,2.986096766121357476e-03,4.651421505910710661e-04,6.037936878653461445e-03,-2.724931324727741973e-03,3.325226743418840513e-03,5.509485771950443402e-03,-1.820423606409261614e-03,-1.994550538362458662e-03,-3.270151798188735380e-03,-6.959890719019218124e-03,2.229843634105853779e-03,1.250675165974619862e-02,-6.665028603970740533e-03,6.315512280681071823e-03,3.012262332047500315e-04,2.362644151506222180e-03,5.756002822977302272e-03,1.323334070439696579e-02,-2.235289922902362284e-03,1.295590252080087382e-03,-5.952132494258684930e-03,-1.805389790250647815e-03,3.396565408931457534e-03,-2.751182253689604066e-03,4.490475316862158756e-03,4.618607574258468303e-03,8.551609979026878611e-03,1.133887752062157778e-02,-7.879164051866402582e-04,-8.535815716113207344e-03,1.437050459787428880e-03,-7.362270078064827791e-03,-4.235564953764427501e-03,-4.791869423475149728e-03,8.927568956147298798e-03,-3.202414225661075978e-03,4.857356491794338933e-04,-2.699409611010455609e-03,2.502129553364918196e-03,-1.074941549057891257e-02,9.241821796652973514e-04,-2.980813207162498219e-03,-3.866470938975405999e-03,-6.447486499099513123e-03,-5.363698877867324917e-03,2.544287084514699603e-03,-1.295798754103027117e-03,3.416491660114468962e-03,5.132823493102026992e-03,2.212985416974534486e-04,1.906106823725291612e-03,6.833556027953499937e-03,-2.179742899657688329e-03,9.102399660896270603e-03,4.161241097533973504e-03,-1.471841472070474282e-03,1.196763309598867587e-03,-5.010153252480308191e-03,-4.132174877655158361e-03,2.544266595748110888e-03,3.533571311121344073e-03,-3.804036628166946742e-04,7.815440989383890674e-03,9.607903107477748159e-03,-7.776464179193545712e-03,-3.814794017271124459e-03,3.322556255425916909e-03,1.171316037679513526e-03,1.490102102451711487e-03,-3.294189743728410773e-03,-6.768308328740490348e-03,-1.854641451737757362e-04,2.517415701074889392e-03,-5.790590755620252973e-03,7.792814336793772896e-04,9.292819649176093863e-03,-3.804668504031241422e-03,-4.327971986453395432e-03,-5.299458471581170761e-03,3.174020294683571868e-03,-1.095592967939303412e-03,-3.491700453324693247e-03,1.143079495304804736e-03,-3.611697673052205633e-03,2.970952169654826006e-03,-5.192907393324536214e-03,-9.592635966445965320e-03,-4.775915718906952566e-03,3.918419322857383431e-03,7.161167495522101616e-04,3.762714282065059580e-03,3.654170190533374222e-03,-3.506493768175407682e-03,-6.364801241275541005e-03,2.440988735956098419e-03,-6.087307898839875928e-03,-3.701935101284005870e-03,1.086114740323266100e-04,1.199307474308802888e-03,-2.586515939040120136e-03,7.423191190121991492e-04,-4.129276302044311737e-03,1.232929593261214906e-03,6.937004276569724457e-04,2.783977556751262693e-03,-2.791232956683275396e-03,-7.757004018951887221e-04,-9.295729916217897529e-03,8.183867597118973242e-03,3.665130045061789320e-03,-8.956031942114707142e-03,3.852340375870217867e-03,1.901039970228434253e-03,5.428550055222500052e-03,-3.106395756658067225e-05,2.414594100052537377e-03,-7.131731397864323613e-03,4.330565761346055406e-03,8.885361672540569716e-03,1.213900970578489068e-03,4.835685885902447044e-03,2.091566639724599247e-03,1.115075454283040316e-03,-7.001919680989831911e-03,-4.651565568623541348e-03,-2.231042874897263498e-03,-6.777724391749941242e-03,-4.967585100444788827e-03,-1.050910451591898376e-03,-3.302805204814209233e-03,6.166315416992315147e-03,-2.769869054461405328e-03,-3.191688894615423659e-03,3.403922820208253510e-03,-9.193281531854742518e-04,-4.028588244922078759e-03,-4.520016491116929207e-03,8.087435506905124236e-03,-6.430142796179288099e-03,-2.984575216413357882e-03,-1.388981200880203817e-03,-4.603330430087603647e-03,4.529955908557811023e-03,-8.002567083674486104e-03,-4.409841716906622890e-03,-6.960242272641494615e-03,1.354435195101534942e-03,2.182680327951248399e-03,-4.311883782228577563e-04,1.502831527232767689e-03,-4.186813108863725384e-04,1.434740839973274937e-02,6.366829868344199561e-03,3.408902616282457471e-03,-9.580394980562406335e-03,-3.810776888327710530e-03,3.245227451757469374e-03,9.153911936818567852e-03,9.333621662729879717e-04,8.779127470709968684e-03,-3.381431150485506715e-03,5.630627331074598320e-03,-3.379461326810190725e-03,-1.323124118178026784e-03,-6.421582117348893087e-03,2.177594009608690290e-03,1.416996950164505246e-03,-6.264345982059986440e-03,-4.122292534740208333e-03,6.259953934213200030e-04,-1.739058613688861154e-03,-3.272535785715998367e-03,5.566353437390376101e-03,-2.428157101835647785e-03,5.377155785362551683e-03,-8.262585023873154130e-04,1.055565088825240411e-03,4.312086048599979640e-03,-4.307166156164964066e-03,-3.694892159659499565e-03,5.756678275244560099e-03,7.310238143862791624e-03,2.616158725150015928e-03,3.360859238885224864e-03,-4.160373481209502129e-03,-9.563761295951649088e-03,4.989560692778854554e-04,2.292147440667581873e-03,-1.633891390888246847e-03,-5.542295959506933142e-03,-2.609242164143609193e-03,-3.026463000001005776e-03,-5.564622238841659095e-03,-3.252648389547143151e-03,-7.289579198489275387e-03,-2.461205394729285012e-03,4.951088251941279661e-03,7.114354512498457551e-03,-1.431334401517849169e-03,-2.806302268789953071e-03,6.414034268438269378e-05,-2.920397822357446820e-03,-8.878889413121477184e-03,-6.514275967676307066e-03,5.054104027427569999e-03,-6.263479223171005729e-03,1.373490471477404702e-05,-3.363168289317878829e-03,-2.486271226814508982e-03,-7.771713797958510438e-03,5.652739673977905978e-04,3.913485415297795686e-03,-6.294955653983825075e-03,-6.014101888497273146e-03,-7.619461811740823980e-03,-2.629838470001074312e-03,-7.347565222789466041e-03,-3.786586221111595818e-03,-7.635291293251745800e-04,-1.146714999430704061e-02,4.809046102067891446e-03,4.342352820435613388e-03,4.583056524049624616e-04,-4.875838106432951388e-03,6.941743997362002233e-03,-6.798378130430141400e-03,-4.713126453633881968e-03,-2.597708499394053242e-03,3.087107210411414379e-03,-1.552830780196575854e-04,9.997429090372433446e-04,2.407213459706358978e-03,-1.666063568057589211e-03,-6.009044371628043286e-03,-1.620423930201269703e-03,2.882918722189504752e-03,2.863028797801269159e-03,-9.844109050582703149e-03,3.460710799908943425e-03,-7.471228877137921220e-03 2.224581144281568244e-03,1.025633069237996513e-02,-5.244749352608039927e-03,5.566856987240909917e-03,1.490588606546822256e-03,7.372684803026709156e-04,-3.687246206777368700e-04,3.325363493005357041e-03,2.668512820499257024e-04,5.868487965258795125e-03,8.160451349901277213e-03,-9.359282547627257145e-04,-6.895128763173356476e-03,1.326261897076327955e-03,-4.907513454507714951e-03,5.201824745275177367e-03,1.375516214763087934e-02,2.322416552756879221e-03,-1.759679717440456106e-03,5.577695421348289294e-03,-9.959882193850118012e-03,-9.950428497062166477e-03,6.115586929911537642e-03,8.138943235867364097e-04,1.178454231708214251e-03,-2.158040974410985885e-03,6.764353718093612210e-03,2.768024105778689199e-03,7.720985839043622428e-03,5.924863002838056010e-03,-5.275332754532101494e-03,2.248522561026557870e-03,7.984850685438353579e-03,-1.453767552773020297e-04,1.048486927577025853e-04,-1.101460924980006721e-03,1.006764906718664830e-02,-5.592970312600566152e-03,-3.987400222913734665e-03,3.672431711508187310e-04,2.558985898690942576e-03,-6.705242499544714768e-03,-1.139392257142386950e-03,-1.979293857511082023e-04,6.188632470180130407e-03,-2.423228479730581576e-03,1.383548750334804341e-03,6.019225374043846687e-03,2.901879318437252265e-03,3.910560808538755470e-03,-4.104830026918164488e-04,5.385403863671622262e-04,7.627039442377679147e-03,1.559612143209944488e-03,-4.005878166337388676e-04,3.147957030453168471e-03,5.747466259368632160e-03,-1.209856910925474549e-04,7.598707923026092742e-03,-1.191751238498740730e-03,3.727797585134228972e-03,4.656247326896279654e-03,5.175887150115375535e-03,2.489190899244930321e-03,-1.425402259220898832e-02,-1.942676511703531929e-03,-1.388663882377981322e-02,-2.164422852262242597e-03,-2.815749486508556694e-05,4.949656279239328903e-03,-5.666075877643002127e-03,8.712867821495605056e-03,-2.276695357731118186e-03,-6.941524193596234661e-03,8.052429914235267839e-04,4.080928291473197300e-03,-1.872915431908619499e-03,-1.066936450741030711e-02,1.189746635919575668e-03,-4.048592964346727635e-03,1.425313532345632087e-03,-2.277534868453875115e-03,6.396204837214759114e-05,4.564885122710990971e-03,-7.189465519041253472e-03,8.557769016574913110e-03,-5.788439327628680520e-03,6.428650920541892876e-03,1.043923894548865307e-03,1.131388540765497584e-03,4.078982050538662758e-03,3.722046759498940423e-04,-6.338141070700167336e-03,1.036126482327412966e-02,7.233959265829731367e-03,6.409632948405281569e-03,4.359147375568094701e-03,-2.266110076756948886e-03,-8.211618018959549598e-03,-3.427749240352708990e-03,-2.749960900303935647e-03,1.111495727574931397e-03,5.007575179383947153e-03,-5.490585587922544065e-03,2.059962254704672969e-03,6.309342942257691045e-03,2.967486070766313681e-04,-3.439443913929672320e-03,-4.340837449916047294e-03,-6.194116260782568661e-03,-4.624482172225218533e-03,9.295369887328368275e-03,-4.021280458345137792e-04,-9.570456686158712286e-04,8.149146644273131063e-04,-8.856998084519536810e-03,-1.100870511980175373e-03,-8.047466490845660639e-03,-8.399513941122302429e-03,9.252544153573559818e-03,1.791128999479785515e-03,1.489984425744363778e-03,-7.936354035786505334e-03,4.642191248398111589e-04,4.307106033070684861e-03,-9.516456617045691846e-03,8.932304015696198099e-03,-9.039059308134965789e-03,-4.042674873762235749e-03,1.115244007315204042e-03,-2.309153230765201639e-03,-8.362590875392113439e-04,-7.485692087926158929e-03,-7.074737314169257663e-03,-7.184255357771300067e-03,-1.465838253362933838e-03,-1.937844916658880700e-05,-3.903125630335110841e-03,7.949869349306224603e-03,1.052053084386799474e-03,5.876917756644792559e-03,6.318463965459706830e-04,-3.006931157026195879e-03,1.321702454064268509e-03,9.070214969753393960e-03,-1.936710553116778631e-03,-3.484924931517010584e-03,1.684751956290255490e-03,-1.170019466640971791e-02,9.609929980172324726e-04,5.251843094716632937e-03,6.788887260450585971e-03,-1.513213031747247641e-03,-1.930967421439771332e-03,9.449763604135567874e-03,-6.017002244061594313e-03,3.778243492598688542e-03,-6.554159365152484144e-03,-1.441241762094740185e-04,6.890375456549891441e-03,-2.422975719784946878e-03,-1.174536963456934355e-03,1.821176923088305534e-03,-9.967383154557843752e-03,9.240895681828662162e-04,-4.852164969794805181e-03,1.386561571841062697e-03,7.161370006604409963e-03,2.323896123370271672e-03,4.970313380710836629e-03,-1.215291949286982859e-03,2.599756785886390628e-03,7.719073746843467185e-03,4.875175358799398867e-03,3.135581252268708659e-03,1.943971110135072136e-03,-2.428134968042703484e-03,3.603095773700022947e-04,-4.251354703012415366e-03,-3.621981907460308965e-03,-4.588064583440991905e-03,3.890447886651254589e-03,-1.467467169520441164e-03,-2.337076387993977655e-03,-4.871733156880022481e-03,1.973478061781522470e-03,-5.394376838433746248e-03,2.385655616429663804e-03,1.079821236013813260e-02,2.463759234916083809e-03,2.512944774926191387e-03,-3.353569274965733163e-03,-1.090402894899619203e-02,9.797884327608223295e-03,-3.541397767179214201e-03,1.605192030978065893e-03,2.597334859654023501e-03,-7.961573259671506520e-04,-4.935014280421061959e-03,3.394976812134125645e-03,3.550913306910307224e-03,-1.851984729162017319e-03,9.059950208466551955e-03,-2.653900919363769845e-03,-6.629005589514185177e-03,-2.960167129537191830e-03,-7.650920117859025781e-03,6.580166907633225662e-03,-1.436912876155455520e-03,-6.054405010225978601e-03,5.245043213773823744e-03,4.547374423636765285e-03,-6.193855256447087371e-03,3.853016328220302406e-03,-2.728731946362230199e-03,1.801296409929731409e-03,1.315364051480240716e-03,4.562275906226551884e-04,6.400822049694010647e-03,-2.465623615695751881e-03,-8.175852147167513365e-03,5.789452787935042352e-03,-5.464394723134373584e-03,-8.121145724869214752e-03,5.424711595369159818e-03,1.432834529326897652e-03,-9.593490158893304420e-03,8.126402952723129139e-03,9.637772011905685818e-03,-3.104297789689183188e-03,-5.612324918466471413e-03,8.050692925679442087e-04,-3.875647626040520065e-03,1.363066755952544503e-03,-5.566082834632607793e-03,-1.211816121855425815e-03,-5.508042216792038634e-03,1.110473871003925483e-04,-6.298339909667173206e-04,6.099527996158302968e-03,5.959698266937807180e-03,-1.827632568733715112e-03,1.301138561866753418e-02,7.271532680154039682e-03,5.824319948675097357e-03,-2.753395863477505596e-04,2.680982041643561586e-03,1.924431396168719893e-03,1.572040096652266511e-03,4.621469676023592585e-03,-1.069206784028699014e-03,3.735720262900644388e-04,-3.753271488072518463e-03,2.973522410350362775e-03,2.548936027166554940e-03,2.968331246473938315e-03,-1.185773576746611399e-03,-1.354055763794205459e-03,5.043752905142426593e-03,-5.452584010305519362e-04,1.339684105436513093e-04,-2.531078030280616963e-03,-2.982576729620583372e-03,-9.314791771511487725e-03,-1.793472376030696763e-03,2.887509232001274004e-03,-1.580466887959973551e-04,1.126809672247028674e-02,-2.253940908565621459e-03,8.486864810212245598e-04,-2.727022532079645487e-03,-6.704225539703328324e-03,-4.544174752291687928e-03,5.089452150729685195e-03,1.516592374063996124e-03,4.420810028231386690e-03,4.964741468404386404e-03,-8.800487305137567190e-03,-2.925952121158202288e-03,8.304470824970785689e-03,-3.382202518130275615e-03,-2.493151349876065927e-03,7.975133199399814937e-03,3.971691274928326049e-03,2.562605226061991960e-03,-7.253030364902546268e-03,-2.456949642250359604e-03,2.215316328565600668e-03,-1.065284792525023234e-02,-7.322477974140096692e-03,2.066586097291218081e-03,-2.981319314718601943e-03,-8.876961589043273412e-03,-4.421011583219240128e-03,-4.991911647248918876e-04,-3.206128538223238043e-03,-3.467845192598923137e-03,-1.147484100055100971e-03,6.745240144123367974e-04,1.013901886292607659e-03,3.279754102530061440e-03,2.311532164179660141e-03,-1.051913198569574869e-03,7.851518043451788864e-04,6.302694478450185898e-03,-9.286693988576242079e-03,-1.368348581763468262e-03,-1.095149817183140939e-03,1.034820130378414976e-03,-1.072489590930618495e-03,4.387800576008808761e-04,-1.139600314194224098e-02,-6.455676313416675244e-04,3.483211817629151721e-04,1.187474347048283118e-03,8.021422864828747085e-03,-6.914756327468430178e-03,-2.578526682276476890e-03,-7.069365747346891167e-03,1.539494947292404216e-03,-2.500007777414107318e-03,-1.565159842215882099e-04,-8.907133544553523389e-04,-2.373452372459164492e-03,-8.424437938221226302e-03,-9.028661051375754670e-03,-1.979150612623315625e-03,-3.755724619234470339e-03,5.256901710234130996e-05,6.271462113609170032e-03,3.489589409087009899e-03,6.807201460726515886e-04,4.036155176643368807e-03,-4.913576628652515038e-04,1.383999335234990261e-05,2.714888808467571038e-03,-1.365358316045047881e-03,-2.018619404643729257e-03,-4.017079243074562003e-03,6.662549586945045767e-03,-2.754315400068255669e-03,1.076351655123777666e-02,-3.124619371424954949e-03,3.426346610807853767e-03,4.821319475691650258e-03,-3.068041294720233979e-03,-2.018104125485558850e-04,-7.230910648932542149e-04,-1.370240672993348723e-02,4.603976191967116757e-03,3.407204241970651030e-03,-6.798262876234792418e-03,2.862247465977162835e-04,-7.975269608826289838e-03,6.452219998993610414e-03,-4.454088354164837142e-04,3.070132418591761236e-03,-1.987671189479469323e-03,-2.637175958532835852e-03,4.814835251227878726e-03,-7.710059046651267928e-03,5.419068949371339330e-03,-1.874543965613293534e-03,4.956694715682436261e-03,-1.042521827999374866e-02,8.621517865543696160e-04,-1.046369239039705683e-03,-3.880091738764765083e-03,5.892870786913840218e-03,-2.765249028257850926e-03,-6.414556393419651968e-04,-2.331539141314691380e-03,-2.610914274889668089e-03,3.762058078204722628e-03,-5.012003470282245556e-04,-3.780896981854281642e-03,1.994433899819931177e-04,6.635382479696431664e-04,7.695439536250071356e-03,3.103321267342036994e-03,-1.030114340350879280e-02,5.758849854178466586e-03,-3.049660245987782705e-03,-4.211877645665622685e-03,5.498698366417936571e-03,4.772787609825612331e-04,-9.783973342649692970e-03,6.681021805349172428e-03,-1.490581669960560707e-03,2.364395050302830082e-03,2.008507076537832149e-03,-4.558367529332298280e-03,-3.460545101664790069e-03,-5.192631413358675917e-03,3.076749226152645372e-03,2.049217481927159516e-03,-8.098600981648725050e-03,1.790184172115356041e-03,3.008068629325481511e-03,-3.801361853600969051e-03 2.819191923864684379e-03,5.172586196104090282e-03,-9.788860684162635321e-04,-5.305256774727548988e-03,-6.249876806443632922e-03,-8.169476643663636448e-03,-1.037699513566700885e-02,1.605758920791411099e-03,-3.122111889853907470e-03,5.266866295339824033e-03,3.970546298691050445e-04,4.052722035794095040e-03,1.043467605792135731e-02,-7.862762288641106379e-03,5.782278920809658627e-03,7.851783860606084860e-04,5.195401552525256983e-03,-1.550445714832583005e-03,5.485612836293959384e-03,3.195465056458389810e-03,-4.918990399345748131e-03,-1.602754170094533581e-03,9.811838876862667355e-03,5.290387399392382575e-03,6.549526822444126650e-03,-1.774853528679634640e-03,-1.578074521983431969e-02,-7.482162292049604653e-03,1.297935636317705706e-02,-9.598713993943941078e-03,3.022479749738636416e-03,7.815176540164519084e-03,2.975593872872578372e-03,1.822204601587459330e-03,-6.455476892801847132e-03,5.169999927321576941e-03,1.563110181354548066e-03,3.819215024165963437e-03,-3.513081040509212234e-03,1.310467692815436803e-03,6.153923359170731079e-04,-4.847160836712003382e-04,4.034669648884955592e-03,-3.698610384050309203e-04,-2.108429175893303167e-03,-2.879205919419138828e-04,3.591305359100654526e-03,-1.306146650607911512e-03,1.898446935759124949e-03,5.501821046195349250e-03,8.491319274772993198e-03,-2.036694834452984926e-03,-9.870784578443066951e-04,-9.175058451280627926e-04,-5.771560873234719141e-03,2.063964813255159559e-03,-6.837764690531002686e-04,2.960977296351373616e-03,2.322932678035799156e-03,-9.131225212860617779e-03,-1.841585197734206363e-03,4.726338225461319820e-03,4.011100387991448947e-03,2.369131132590442980e-03,2.583847130626047532e-03,-6.427494812933288136e-03,-1.575504422016161523e-03,-5.989148281701584496e-03,2.263796662179586548e-03,1.073802959396120971e-02,2.129786773113464139e-03,2.193032380027071584e-03,2.914273358518760463e-03,-4.994752135393535665e-03,7.675058164622633895e-04,2.212329066447020921e-03,1.187939352874696659e-02,5.041446430999903510e-04,-1.594315442439254733e-03,-5.946606982737605383e-04,5.108250951449633433e-03,2.377768432927514740e-03,4.428292173525094806e-03,5.853734555448843364e-03,2.600409908459001742e-03,1.460490439822566874e-03,-8.057895827471024675e-03,4.980573417245832794e-03,-2.002851763919130147e-03,6.854589348648622467e-03,1.088526793963478763e-03,1.943632720180948773e-03,1.590616656258882732e-03,-1.183531659398531256e-02,3.313882833655828596e-03,-7.138813481224129051e-03,-3.511218265199168593e-03,9.776992130458650801e-04,-7.198856439869232189e-03,-3.254271876828688202e-03,-9.938644513967380759e-03,-6.933081242559100202e-03,7.901980530252001450e-03,-2.001148693861310758e-03,1.565186032500600613e-03,-1.534965625826400148e-04,-4.133839875701960861e-03,6.975550223367120439e-03,1.143753429445465691e-03,-4.892485268572773632e-03,8.896599711462657711e-03,-5.568938388967928886e-04,1.519472198720831901e-03,6.827812185754011942e-04,-1.082987207122394942e-02,3.005835266985824612e-03,1.910563776783284393e-04,3.970746336549038462e-03,-5.126342342892024548e-03,-2.220413405373173037e-03,7.074230097860698639e-03,-1.520531289173846856e-03,-4.476801463064865837e-03,3.614356633352390491e-03,3.210932975104846695e-04,4.380781242677139926e-03,4.266116637346164912e-03,-2.191061237283604014e-03,-9.582267972245158512e-04,-2.299107671261200157e-03,-8.758534996891736155e-04,1.566291206146133838e-03,-3.225672319131247211e-04,-5.680018150332793309e-03,-1.064814764470657323e-04,-4.740815744517019870e-03,-7.822321638777007119e-03,1.149929216620521055e-02,2.804068376927460002e-03,-2.184000398375626501e-03,1.960919253784343570e-03,1.430664262424799938e-04,-1.670972447499353931e-03,-1.039009731973021024e-02,-8.996342836680014821e-04,-4.447878316641095016e-03,-5.741309868878084061e-04,-6.630733239314436957e-03,5.336963890173066459e-03,-7.925792308531931854e-04,3.759189972167918236e-04,-2.293928621109064554e-03,-1.190299831266067830e-03,-3.367687368452794601e-03,5.785808405907073501e-03,-8.886442124792675364e-04,7.533953250907279679e-03,-6.799652551310366930e-03,-4.219958912121356959e-03,-8.202381634926564358e-03,-1.048579341443060028e-03,2.838379151009780017e-03,7.662158685291519137e-04,-9.140173334005081973e-03,-5.851934675286187037e-04,5.634134610382911159e-04,-2.164147462982296544e-03,-9.685213670743959098e-04,5.054825292196889717e-03,1.946677749491108672e-03,5.630732378019344397e-03,-5.261950596229646865e-03,-2.757790435342932257e-03,4.919636072210187283e-03,-2.686580221463364618e-03,6.977462910553478568e-04,-5.727895383111620761e-03,2.264364971498473728e-03,-2.538832415972479099e-03,3.912829869288960739e-03,-9.444724535646618901e-03,-3.180457616035854088e-03,4.602194632654128294e-03,6.927689000413675934e-03,-5.607432162529099422e-03,2.147543811837494387e-03,4.842443403629238200e-04,-3.345061302305855563e-03,6.690581139848029123e-03,5.025251700751393220e-03,-7.075745362259119875e-03,1.306826899405174550e-03,4.160687063250968418e-03,8.808656281627826740e-03,-3.761552464471568016e-03,1.474527381777033380e-03,-1.224185935174520535e-03,4.711881086687539320e-03,-5.330997029599208147e-03,-5.794238681731388521e-03,4.373498083781412332e-03,7.756458636246105209e-03,8.908809649891612865e-05,6.437744776021318838e-03,-7.806823913917466072e-03,-2.402476871505076616e-03,2.504045322405991109e-03,5.843402843545520906e-03,9.300177476638422924e-04,6.762769426731526055e-04,-3.026876100757689802e-04,-5.957469616718649415e-04,8.913742173621728646e-03,4.526372171350432835e-03,5.519836627735589391e-03,-9.414342810684667856e-03,-4.025064592389904564e-03,-4.702577851391669724e-04,-1.796440938867232282e-03,1.290107357873607457e-03,-4.010387767545972766e-03,-4.178643153097010010e-03,1.381941023564397221e-05,-5.714913125146944307e-03,-4.980157916265145167e-03,9.692955946351980781e-04,3.488440585053560489e-03,1.715618243764317026e-03,-6.365339648252977579e-03,7.916180712053088025e-04,3.327858045612873232e-04,6.425388908515294005e-03,9.373475479490068834e-05,-8.124007645785857878e-03,9.214224956070684452e-03,-2.015690359900985907e-04,6.892977534912933177e-03,1.384189918549010804e-03,-2.449492896676255110e-03,2.439506111436152546e-03,-6.640617280742151449e-03,5.489642857773367399e-03,-1.249075106302882177e-03,1.278680408455099005e-02,9.328139314022191111e-03,-1.152943814408760245e-02,1.566204033396081767e-03,-2.661873146427668965e-03,8.585499546648716869e-04,-2.741755802729672464e-03,-2.094279374880538176e-03,2.961282738932240026e-03,-7.343757484528931970e-03,-6.101514274323892317e-03,-9.168357154088355165e-03,1.656646319400784800e-03,4.552295746333290781e-03,2.621790124041156831e-03,-6.199322239404473842e-03,2.244554273771154785e-03,6.312826803112319383e-03,3.044549388740197711e-03,-5.214449443862038611e-03,9.682148323847276916e-03,-5.234141894467051956e-03,6.994920356051123438e-03,-1.035228243005033473e-02,-3.274464295583597463e-03,-2.460970577255114627e-04,5.691810029820360429e-04,-1.006349092703553820e-02,1.919884700238662367e-03,-1.478590394123897693e-02,-3.132170785572287869e-03,-4.156333711553471429e-03,1.547421439217892521e-03,2.716445636090367936e-03,-6.424494271795959648e-04,1.822027374352261364e-03,-5.456275216067948269e-03,2.620783283331242809e-03,2.838077243018813454e-03,-7.143861288726345397e-03,5.435179374427208908e-03,-6.617427400607000208e-03,8.459664662141209903e-04,3.172595087426159036e-03,-9.584731546549351763e-03,2.824783596996374192e-03,-1.498382625730582942e-03,-5.300138924192885660e-03,-4.238935002462316040e-03,-9.133553166670109122e-03,9.579029655890452383e-03,1.190862039472416435e-03,1.218965775348424853e-02,5.523381749969202142e-03,2.085395735541620103e-03,-2.209044608812361053e-03,8.928877414025655768e-03,-4.046556544929926467e-03,-1.064118805686924477e-03,7.937659918988509110e-04,-6.520663230701012440e-03,-4.335223955081251961e-03,-1.027748640696420600e-03,-7.316680118316326673e-04,9.386493850532237615e-03,7.620982900481004102e-03,7.332296649398038248e-03,2.716441630512776099e-03,-8.541160199009368778e-03,-5.552052050799745082e-04,1.149488877466897825e-02,1.950905485624182100e-03,6.080868436114519188e-03,2.823383524698025102e-03,-2.255510445107864262e-03,-5.085198113539642940e-04,2.167684400063714378e-03,-4.411856816949388804e-03,-6.434240741226346719e-03,3.312081254469371929e-03,-4.468167293311788371e-03,1.388235789390019747e-03,-7.838133020945587445e-04,-3.272856524230458291e-03,-8.407331882732339140e-03,2.078626636460968215e-03,-5.261700939092383081e-03,-6.528649841305451604e-03,-3.890972214762468146e-03,-4.521055271706268114e-03,3.306367635385179960e-03,3.624300791702754140e-03,7.267357020052704619e-03,1.657809079046511738e-04,7.753582313069515806e-03,-2.072346829834001945e-03,-3.498955844189378332e-03,-5.205506974671419247e-04,-2.144224852922435934e-03,5.756040205094232858e-03,5.635181904834439537e-03,-8.192553855791376988e-03,3.374574143017500163e-03,1.841257223377047985e-03,-8.939877232373308683e-03,6.278376185095904324e-03,-3.698706159238901968e-03,-5.160909145189522713e-03,-3.910390006011686508e-03,-7.894299304545398252e-04,-2.223105383094338330e-03,-5.535137278237488474e-03,1.191456615354034507e-02,-7.772872394500688308e-03,-3.930842551866957878e-03,4.404451704186330974e-03,-5.051300563887818921e-04,-4.094021140076435465e-03,-3.745107863589550255e-03,4.316085352153360721e-04,1.753096935767138901e-03,-1.683129496853857324e-03,-1.501611077038833564e-03,3.755443131370810299e-03,-7.601654050815530826e-03,7.918513447082019263e-03,8.832259642264932500e-03,1.392379155258504850e-03,-9.267619124177108156e-05,2.560492839518557252e-03,-2.351674744157119384e-03,-2.302740098312752639e-03,8.107253210775466554e-03,-1.801926122515316983e-04,-3.517299201663596185e-04,1.394599730503628130e-03,8.186350914220336811e-03,7.742254959297318294e-03,4.563088796387675290e-03,2.386930529784180282e-03,3.127516199498580306e-03,9.039204743807483389e-04,2.647904347793265077e-03,7.655806220810510744e-03,3.566031858219706452e-04,-3.098616372687628805e-03,2.812497840473346476e-03,8.743609906701184029e-03,-3.277503735860395808e-03,6.123579988421087687e-03,-1.431506471947962731e-04,5.170096060288269929e-03,7.949709051397430698e-03,2.604811097781197481e-03,-2.516773518738068486e-03,2.431353532768864428e-03,-1.129297202199475289e-03 -1.491782541867415070e-04,-4.751727960592363326e-03,-1.589793023664557445e-03,-4.407492311804116214e-03,-5.673482716328206505e-05,-1.545785684253549316e-04,-1.729475723787328532e-04,1.573303775204287422e-02,-2.907929499643452549e-03,1.147833238220803855e-04,-9.119599768382242316e-03,3.454899420190415873e-03,5.602498162832448073e-03,-3.745564835519817775e-04,-3.282392696401593372e-03,-1.107572385122063303e-02,-4.747706961288569879e-04,-3.302907420631739401e-03,5.036471082092411691e-03,3.149086907484534292e-03,-6.813214308724270703e-03,1.863076101436499359e-03,1.358209067797341288e-03,2.747312374673001843e-03,5.549402560375052071e-03,-4.043117908180063206e-04,2.661863593813987988e-03,5.092542603477268840e-04,4.923170624077913629e-03,-6.314347907238806318e-03,-6.250111636056680450e-03,8.462222165951199110e-03,3.303666185363070620e-03,3.416676955992385949e-03,7.437068481173245149e-03,-1.335628074533895364e-02,-1.171995288564265910e-03,8.793277448682217416e-04,5.393722692372965484e-03,3.309812717741006036e-03,3.624054105749400373e-03,-2.544768215758139857e-03,6.667443327172290456e-03,-3.127149175954062159e-03,-2.465981427422098051e-03,-8.189221228139741474e-03,3.717676188612274073e-04,-1.040463342974688385e-02,5.433418242107364293e-03,-5.714649303195607107e-03,-4.183242985299643300e-03,1.664221101088243912e-03,-3.396473557902740179e-03,1.086376744570670405e-02,-4.187677105362365426e-03,1.272583913485178650e-03,5.998255025051436699e-03,6.938588472525522056e-03,4.437131215109312932e-04,-1.952174710616779276e-03,7.005112228998486811e-03,-1.391924347910873400e-03,-5.524326680376594884e-03,-8.673566651760918414e-03,2.541467330934328930e-03,3.922929825078916126e-03,3.208213042351903358e-03,-5.379122168672096325e-03,-8.288281881866244732e-03,1.969232742256683648e-03,6.843333475027767177e-03,-4.336150066891604656e-03,-3.015538914889179505e-03,3.201287018526128474e-04,-4.003867708654118389e-03,3.866117396065585354e-03,5.823252521121362324e-03,-6.998151844232416811e-03,-5.002786575385752450e-03,-3.177262142558869365e-03,-1.507554990122628776e-03,-6.978227987948853843e-03,-6.863240607564967573e-03,-1.343543450702579245e-02,-3.272664532980077795e-05,-5.465170110788751534e-03,8.037034038622522039e-03,4.587894353060906812e-04,-5.659809468692838900e-03,-3.738771871698780107e-03,-6.994746720201289701e-03,-2.858446858564989233e-03,1.519592261552366085e-03,9.313367149103904050e-03,6.896118322985627519e-03,1.780200295751323400e-03,2.973270554097606853e-03,-3.874165910753060368e-03,-1.151987679372561395e-03,-6.340407319842712372e-03,-2.386336643784158114e-03,5.160879015384046628e-03,1.043164503714181764e-03,4.214949393056564679e-03,4.549181584297300011e-03,7.402864616097003594e-03,-1.862136761804935887e-03,7.433025935357960795e-04,5.236535986507632155e-03,2.185317048136507356e-03,-2.713420090797524255e-03,4.223394417386573327e-03,1.741871866134188764e-03,-4.691932928820584901e-03,-3.604539294677760412e-03,2.618174972244642022e-03,-1.178524202548321669e-03,1.903608168442982862e-03,5.595993137435551282e-03,1.116978985443270195e-03,-3.516835829328232663e-03,4.660414501812721461e-03,1.322344178396860775e-02,2.662601941924675527e-03,-6.191714382189268152e-03,7.154472981483223516e-04,-9.630849678604884445e-03,1.562728640711746886e-03,-7.215804237994967625e-03,5.405855213813240759e-03,2.015961063785031652e-03,3.352081829334861013e-04,8.518203195197795660e-03,1.128538789369856916e-02,1.956945092914815897e-03,-7.699330852901974630e-03,-5.993032629179464305e-03,-8.115105263312032829e-03,1.667863796199038672e-03,-5.180480844048667224e-03,2.650599236106661143e-03,-1.105603477238790468e-02,-5.761657919430720684e-03,-1.011160813784343568e-02,1.944454191467700176e-03,7.547430380022018565e-04,1.161290058871247179e-02,1.630979818954476565e-02,6.300493222366508779e-03,-5.995109236898944240e-03,-2.258604854943702740e-04,3.907421919611204358e-03,-6.018434772381724140e-03,-8.910194446010034214e-03,1.327925165457243591e-03,-1.008856926213703422e-03,3.054854877119416671e-03,3.185705893742214398e-03,-2.024706498933411761e-03,5.677893248066940082e-03,-3.371242367878959380e-04,6.648069086281961117e-03,-2.636797796212828060e-03,4.895268501861326296e-03,2.116732989994741642e-03,-4.011891024214966038e-03,4.353835176311194287e-03,3.732729094875189037e-03,3.775792226051158298e-03,1.596936867421504280e-04,4.659904940467554831e-03,-4.531472255227485778e-03,4.380740401003896910e-03,-8.951359629282814104e-03,-2.343955338289309756e-03,-8.699663413633205652e-03,-5.927139198885396539e-03,6.824627433961054368e-03,8.151019550804893682e-03,-7.424381798766785216e-03,-1.343136786620158923e-02,4.494702891359564877e-03,-6.784155587157799858e-03,2.330676328107963383e-03,-1.312743995580431360e-03,6.435032672996160788e-03,-2.186873961417807113e-03,-3.298179831877441973e-03,-5.012896168081374532e-03,1.969854459492772873e-03,-4.222914354677211567e-03,-1.356769913972108790e-02,-1.202804169829441660e-04,-1.582603254716650467e-03,-3.230723126329514681e-04,-3.154263538180333230e-03,8.659851470619253514e-04,4.329891015374861532e-04,4.860637463047938651e-03,-1.804172540807329195e-03,4.564958322839028984e-03,-6.316718952060723473e-03,8.710052209026574940e-03,3.039846154624391977e-03,-1.559377323061860061e-02,9.633029312685682452e-03,7.581573470119162654e-03,2.776457667827831200e-03,7.599756050719407886e-03,6.575333722948360944e-03,1.844403216027862800e-03,-2.520999495088345650e-03,-1.921258987861024009e-03,-4.215353482461983367e-03,2.182287453192070445e-03,1.932248019514732403e-03,7.242994594043205246e-03,-1.699658393217568203e-04,-1.205192572936148940e-03,2.705776930452187101e-03,-9.067115544394288703e-04,-3.674157995160727205e-03,5.258914033499709900e-03,-2.232803624478063662e-03,8.380326745652023912e-05,5.924689323774413874e-03,-6.568659322996618297e-03,-1.091571509398872944e-02,4.283403204663827415e-03,-3.880793205643301869e-03,-5.930741100776892698e-03,1.704722336623352931e-04,-6.045292799524125578e-03,-4.469392977634269529e-03,-8.335629550583735503e-03,-1.544759077461546116e-04,2.274510327393822542e-03,-9.775846357348124543e-03,-2.321133010205183016e-03,7.314949108175350682e-04,1.149518021523666727e-03,-4.881785353310014522e-03,-3.549549518876721904e-03,-2.780989156187521020e-03,-3.017722828877140995e-03,-6.025268904352173299e-03,7.522992089077844974e-03,1.492779371879118929e-03,5.239630922582810824e-03,-1.418463274926997633e-03,-1.249672309403297970e-03,-6.403220767878615126e-03,-3.741284288003729908e-03,5.985748122148004538e-04,2.727820429030157912e-03,7.511450967700866732e-03,-4.342408868350868499e-03,3.944015200036521569e-03,9.556284162552632772e-03,-3.744082666175044521e-03,-7.868263094446664371e-03,4.892947711142859799e-03,-1.555255419030045000e-03,-1.705022812451827434e-03,-9.159632211032573378e-04,-1.452297003866552224e-04,3.247811377363542945e-03,-6.712870004459572590e-03,-5.888045896417235317e-03,5.977588987511414480e-03,-6.290835117920005187e-03,-4.867248338062002020e-05,5.710035165667470651e-03,2.650718806223455876e-03,7.375725025123442032e-03,-2.548950368454527219e-03,-5.829405436675553533e-03,5.105310669428867213e-04,-2.294241544251520402e-03,5.910539427450151040e-03,-3.003867122144115911e-03,-3.759280237451063846e-03,2.837375911604557953e-03,7.085875732233210857e-03,1.896165240427308924e-03,2.030688812837600724e-03,7.523205725716944695e-03,-4.375736658422066730e-03,-1.361093021561991193e-03,-3.103897972506460337e-03,3.584690462970474643e-03,-9.886565186805472069e-03,-4.670465722776332221e-03,-5.072038373357504485e-04,-8.593946892848780680e-04,-5.889968095667326041e-04,2.452028872827096816e-03,-1.048220997587087470e-03,1.666762647133471610e-03,-3.405987989515028586e-03,-1.461847670374811494e-03,4.207797604979001321e-05,-7.470453507515049246e-03,6.433256802425605173e-03,5.550428093555177973e-03,4.846354687319813135e-04,1.277175966896838388e-03,-8.091718342869867395e-04,-9.878216307645222224e-03,-5.705112372532439823e-03,4.322443001953454357e-03,8.047027133061071275e-04,1.865307660325139479e-03,-3.350272842359431616e-04,6.395714350706970452e-04,-3.183719808816419441e-03,-2.915853676697575080e-03,1.120776168484695140e-03,3.688576907766327219e-04,-3.086136681507357232e-03,5.269823014616981105e-03,-1.311247772117512915e-03,-7.835842611695231774e-03,-1.230294096810707069e-03,-6.300448235108916002e-04,2.460813573214549674e-03,-6.025774642709709472e-03,2.751527551482690383e-03,6.267103523159590732e-03,-3.451342201747872786e-03,-1.223508994538866892e-04,-4.168114383994544168e-03,-9.792125826537565435e-04,-2.677903208348722097e-03,-2.579117580447949382e-03,3.880993824701585351e-03,3.349778902104293046e-03,7.929367449326546122e-03,4.170140923005677229e-03,-2.162311024801092264e-03,3.524017270451789818e-03,6.968190577824991822e-03,4.659535302204654769e-03,-1.266455184607382647e-03,-1.709008841620946037e-03,-5.558143201145782954e-03,-6.591017502677656674e-03,4.185964532780071280e-03,1.617900144497357896e-03,7.200936257554075952e-03,5.552122268067829700e-03,-1.323225200460446602e-02,3.685120843364015405e-03,-8.192958633822295181e-03,-4.732632844136370684e-03,4.950285105401623425e-04,4.001742134168373649e-04,1.561207863511286783e-03,7.720673519687224623e-03,-1.003182281969617652e-02,1.624840912664436438e-03,-5.777929874430157414e-04,-2.722993628902171601e-03,-2.036687931751854190e-03,-6.594538760708684028e-03,2.898197489129519366e-03,-2.482736230238224897e-03,-8.355283475307008836e-03,3.632974986821781160e-03,-2.307139477142871047e-04,-2.497023673849580426e-03,-7.237984350113176556e-04,-1.823544790416711759e-03,9.229636305862382343e-04,-1.393299299790620466e-03,-5.048255095643645537e-03,-8.926038456457969367e-04,3.655279481480113151e-03,5.003999515112220534e-03,3.503511223411751290e-03,-6.994902540589407330e-03,1.029063255579172088e-02,2.585218657397777512e-03,-1.044526509778879834e-02,1.612300504420827261e-03,1.274089515572517355e-03,-9.456725087601115624e-04,5.128479024155125840e-03,-2.228283387648810794e-03,-2.905062002409508279e-03,-1.101485042187934187e-02,2.014694523930209581e-03,-5.329263219272814198e-03,-2.201715745565518533e-03,-1.791910708489976071e-04,5.622768663374628877e-03,-3.568463731993213998e-04,2.226429783048444484e-03,-4.178021648205149791e-03,5.260099905158177429e-03 -2.121046948278311473e-03,5.001539476462405929e-04,1.585038654306162838e-03,-2.465747600916181302e-03,-4.467723571238237101e-03,4.090258837784164353e-03,3.272332026531765692e-03,-2.803011162663039831e-04,-2.600309092661127770e-03,1.026171521443406016e-02,-1.323390425477885589e-03,6.135461169992115910e-03,4.817454588647596472e-03,1.469806258455487848e-03,-3.059462173643852107e-03,-5.689608871582041427e-03,-1.416893151850897323e-03,-9.652454559443907414e-03,-6.730635007444136890e-03,1.793969135426770351e-03,-5.585958741482671729e-04,3.099046904740386174e-03,9.470648442307980086e-03,2.973722487059429938e-03,8.187257315033618851e-03,-6.258110982106187929e-03,7.666020207055510060e-04,-2.002084761125282866e-04,-7.698874617945533801e-03,5.728862853211073337e-03,1.770315556844649987e-03,5.721405996283140775e-03,4.837608210404091037e-03,-4.696540095744960487e-03,-1.702185327256243745e-03,-4.606110431396219593e-04,1.066163216472053137e-03,1.037184002491163548e-02,-8.116702637361436143e-04,-1.091728144668649252e-02,7.580604852353094690e-04,-4.561272178588321978e-03,-1.213040269459815220e-03,-7.928019131111038601e-04,-4.946721743491912411e-03,-4.387914508756281273e-04,9.782557355373380781e-03,5.214544995664676351e-03,5.610290111872805706e-03,-3.188607999492877121e-04,3.522515285002666367e-03,3.371280538317182867e-03,-2.088348425233179685e-03,-3.363542862213898116e-03,1.122264670551217024e-04,-3.476989646603729969e-03,-1.853035727357757846e-03,1.283679138409348354e-04,-3.113083107961581077e-03,-1.236747458142964905e-02,2.610807177009453247e-03,-2.540620535941385833e-03,7.861385462188848205e-03,1.017022822260184138e-02,-4.011155811048723596e-03,6.377585243976035406e-03,4.263640693606979433e-04,-3.719690080979124337e-03,7.493566413831967074e-03,7.597420126380573669e-04,-1.627542733997161496e-03,-6.129727412891055509e-04,3.532707982916340715e-03,8.396005316325793472e-03,-1.011909906302121964e-02,-3.876400224534770882e-03,5.082142509408517535e-03,3.979558472554793266e-03,-3.904616846252485698e-03,-1.280457510971525327e-03,-1.605296842981187262e-05,4.030761991619651735e-03,-2.116376361308998019e-03,-2.942921537057757893e-03,-3.526552343748026643e-03,-8.942106075377227042e-04,3.910276300423952932e-03,-9.917867479473005919e-04,-2.961080865357765556e-05,-9.597017033423462570e-04,-4.495771118533693246e-03,-3.010697498854838627e-03,1.804605408536096803e-03,-3.582876622644585660e-03,4.885336017262780287e-03,3.394267276599384747e-03,5.315694570407896412e-03,1.093538206525760878e-02,-7.559747539844552432e-04,2.293990641270489438e-04,1.444242212107501688e-04,-3.008234773141367155e-03,7.037733305801130955e-03,2.958906036636052995e-03,5.667507952064589072e-03,1.296584767885144984e-02,-8.488468493153360477e-04,8.495259171000914614e-03,-4.802515657438841470e-03,-4.493015559048992372e-03,1.106269632365555061e-02,-3.455363863758539444e-03,-2.801528564617387744e-03,-1.690354165611332405e-03,-7.324495022639198336e-04,-3.352026641337246902e-03,5.660958257492092541e-03,-3.209489634333131426e-03,-1.370877028402310831e-03,-3.186461636150687490e-03,2.953190166230326014e-03,-3.272492179074086831e-03,-6.181907866397082923e-03,-6.532155440732291367e-04,-2.632573457699946117e-03,3.532863219816597018e-03,8.674372993771063667e-05,-2.929660289346551656e-03,-2.429938860894192715e-03,8.787164195493885055e-03,7.791701570906122559e-03,2.686354998184485761e-03,-3.397200411808223765e-03,-5.886631790306843864e-04,-4.415366782267445694e-03,3.092838533335649613e-03,5.037085112886160662e-03,-5.511346081934633857e-03,-4.178290939688932004e-03,2.990423246855290415e-03,-3.808948671205882475e-03,4.482993055410777750e-04,1.393953185191324287e-02,-1.304923859406210453e-02,1.895548926871837249e-03,-2.159662161462810263e-03,-2.467370078026570861e-03,2.215587335729768837e-03,7.946893011702604176e-03,-2.459204648417477254e-03,-7.067076779411969253e-03,-6.016513729471369837e-03,-1.702917363507655893e-02,4.418847705716436730e-03,5.767572519029846335e-03,4.491151649519526355e-04,5.039337855336679592e-03,1.766362504853458788e-03,-1.571584506485209094e-05,-1.290340668617206803e-03,-2.477573889500302249e-03,-4.167126789569879260e-03,4.092497736828523171e-03,-2.004779454136448001e-03,-3.032506270344085143e-03,-4.301198395614922766e-03,5.765944193466160687e-04,1.454709318108320711e-03,9.230486847862555758e-04,9.268689538397170691e-03,-3.208456261486682289e-03,-2.936638440256178397e-03,-4.132603024279079065e-03,6.282122158724043250e-04,-7.564576944411723243e-03,3.706328222586556444e-03,-3.642545274790544123e-03,3.445497075644069623e-03,5.927310231364244474e-03,8.930476650982470963e-04,-3.571570797274342072e-03,1.604180691195967188e-03,-4.103431870230519855e-03,9.396503883272223651e-04,-6.691637273507619610e-03,-6.401084321790566876e-03,6.603646140253899115e-03,4.608768336843992869e-03,-4.479966468856890968e-03,-9.918119500755778403e-03,7.194215050809238225e-03,3.094957916250378263e-03,7.564691421665836052e-03,2.916060781457462375e-03,-1.495734347419186384e-03,1.255292463199557786e-03,1.748770384388700200e-04,3.286727605689318500e-04,-1.270912358011690949e-03,5.444354805754079117e-03,5.706881624759688014e-03,8.285146882146262465e-03,2.262085669900161002e-03,-4.652096979885353076e-03,-2.022107877154775881e-03,7.609088031977966028e-04,-5.849883324167937451e-03,-2.785853764453019576e-03,-5.625024396472293534e-03,-8.100525234136601513e-03,2.577685650234248955e-03,-6.526881852645106648e-04,-6.078056950131006941e-03,-6.656281608633943594e-03,4.184523568291677494e-03,-5.651666671204902273e-04,7.679992882147434162e-03,5.136514137056090445e-03,3.200482872920645209e-03,4.813535353676350276e-05,2.842687230564491102e-03,-9.155017610246325363e-03,-3.002806881185703081e-04,-8.784658279749951379e-03,4.328327475252058930e-03,2.506449325114964034e-03,-2.379204994213400624e-03,-6.227011949804177326e-03,5.491145924304711122e-03,-9.624666901616339351e-03,-5.556155914454221235e-04,1.094900989304699573e-03,-1.532705006633973519e-04,1.851368915006023361e-03,-4.608592041177229058e-03,-1.616296577046916899e-03,-6.744218320196704829e-03,8.713090589222788557e-04,3.261192128534427023e-03,3.638174258798900808e-03,-3.426242000260736227e-03,-3.264673995744559343e-03,6.027616463497610481e-03,-1.084978187596666657e-02,7.692922529807577625e-03,2.470024424510676095e-03,5.990261125311356974e-03,8.332265955489343403e-04,6.157740876035659033e-03,-9.871790261805849648e-04,2.095261474014195255e-03,8.791366678013637749e-04,-8.021741986744641492e-04,-1.681187898230429654e-04,5.553949334142966633e-03,2.645808207801963931e-03,8.025094188033309386e-03,-1.812320578614919726e-03,4.153440462648619498e-04,-8.333823589829873063e-03,-1.034606153628068580e-03,-9.517911382806111889e-03,3.649292235585592428e-03,-2.211119766243733933e-03,-6.400242239294954942e-04,1.306958654541813536e-03,-1.097240538832970191e-03,2.910175291513422864e-03,-8.003960884402707169e-04,-8.403711265593159785e-03,-2.052261582600485752e-03,4.073673659804620539e-03,3.052004078478260437e-03,-8.149849865252294101e-03,3.669188808532073635e-03,2.934582645510517250e-03,1.010945856754071927e-02,-4.039202274271408086e-03,-3.951082179954040242e-03,1.286886897363751555e-03,-7.124977393765674367e-04,1.247525069345407044e-02,-6.031915279244236781e-03,7.070466499571520513e-03,-7.415251942080825116e-03,7.463954452991789987e-03,-5.846967157191998790e-03,-5.502484807418526330e-03,5.496155057869175561e-03,8.697831907212487043e-03,-3.200882349203553173e-03,-7.495941959462779948e-03,1.446516259865446123e-03,1.178367083781929137e-03,3.015905592332124470e-03,2.962195134896648959e-05,5.570769979711405473e-03,1.521342976155780086e-03,1.044266759768504261e-03,2.139231846744498854e-03,6.976724700782170260e-03,-7.527486793942891365e-03,-4.849493792246510442e-03,1.446626385509788273e-03,-1.311015793967211214e-03,-3.740309060860811871e-03,3.234230337292418760e-04,5.021289855255296232e-03,-8.661579490573672810e-03,-2.045590105514322014e-03,-4.414619922344454120e-03,-2.507589897541786725e-03,-3.144347865196801817e-03,4.452960257929650676e-04,2.353945178045824694e-03,-3.941056868291964443e-03,-4.472161805184633916e-03,-3.097517626791070587e-03,9.553128536533459295e-03,7.657147470674477473e-04,3.296208709639974511e-03,3.301976991976855996e-03,1.311706321334002966e-03,-9.211576401202679117e-04,1.439068244653981263e-03,-1.444939183895451503e-03,-1.449503425292926516e-03,4.576221641779397095e-03,6.155084204970619915e-05,-5.977562391804677744e-03,6.425863266839387800e-03,9.134856754643773044e-03,9.791396204421707813e-03,4.318741476364875570e-03,1.396103097324878292e-03,1.328976792382873450e-03,3.333109015856655297e-03,-2.504439083420116965e-03,-3.134308200690705953e-03,4.671694624358081402e-03,2.125180845081557201e-03,3.727595585754733126e-03,2.063363164612816349e-03,-1.710666497067995281e-03,-5.136551935173612644e-03,2.875911574560253633e-03,-9.453592655002998266e-03,6.023694461802025016e-03,3.285033192784352421e-03,-4.741420010497600390e-04,9.050746689792725464e-04,-4.479365534806013620e-03,-4.128236011148735118e-03,4.950361088246499862e-03,-2.733024617261109110e-03,-2.244292881388942028e-03,-3.524220568968181705e-04,-5.773971485449104664e-03,5.116391638403517197e-03,4.444174645813524574e-03,9.864342639930331717e-04,2.844718250710317402e-04,3.283653561103231245e-03,-1.601452784771093549e-03,1.204538992978551777e-02,-6.424479295713214850e-04,-5.762568234929738020e-03,2.705810111719656862e-04,-5.397271603214238468e-03,-7.466574153331882041e-03,9.525193663834594440e-03,-2.496389378138073704e-03,-1.231176489610579568e-03,-1.672918457899881658e-03,-4.233230392533096674e-04,-1.188181879724412417e-04,-4.738377780992767199e-03,1.720697003965226349e-03,-4.215690613601509574e-04,-1.565488845133796141e-03,-1.005520893734133774e-02,8.488208455919239295e-06,5.268995942259105218e-03,-8.449681177906948190e-04,1.512146582359872368e-03,2.132303799551135692e-03,-6.865651791686570860e-03,-3.077670424194162667e-03,4.985332176642739803e-03,-6.482009047800843526e-03,2.174243940987417509e-03,2.409074560666727653e-03,-1.092936978373898254e-03,4.006698712628247588e-03,1.939733493704653240e-03,4.747385679074919360e-03,-2.704196764056577087e-04,-3.587217768906466459e-03,1.433195581270627560e-04,-3.476903469551399003e-03 -2.531318384200330254e-04,-1.547022706749497144e-03,-5.537269449475116677e-04,1.988795047520823880e-03,-2.385938077013326913e-03,8.404918518021785995e-03,9.653588722413241263e-04,2.642329852887737550e-03,1.762419826492893317e-03,8.302643146748047050e-04,3.655983967138576925e-03,9.694054501373732370e-04,9.195515645426045534e-03,-3.481369743182990625e-03,3.873773756624610248e-03,2.678120066811080519e-03,6.785867669576292963e-03,-3.880966949694720993e-04,7.321499876794336330e-03,-4.959266586137702328e-04,-2.104143540763619372e-03,1.728808459269679226e-03,-9.648639910516225437e-03,1.825139004555265164e-03,4.273231930118729163e-03,1.078191813470260165e-03,-6.544405753951444346e-03,6.835100265159541662e-04,-7.952288332797792861e-03,1.029795971378392495e-02,3.080964466630794606e-03,-4.121411210631782153e-03,-2.615280976668107275e-03,2.509485547625675810e-03,3.141641513274360219e-03,-1.777774699124026227e-03,-4.001078979000572810e-04,1.058240796420220143e-03,2.298898329268761302e-03,1.805803848545175887e-03,-2.876259796147320746e-03,-1.548023281198777955e-03,-1.979557945168380906e-03,-4.039535426483614397e-03,4.626203872658911885e-03,6.030489612726177293e-04,-6.527722188365439257e-03,1.270624542396415341e-02,-1.696014556822111885e-03,-9.278680974909440590e-03,1.808460105088125066e-03,-5.382811284722883657e-03,3.178468993594710246e-03,7.085529263620234282e-03,5.838710200813984792e-03,-6.028940419139836547e-03,-5.024318702843896638e-03,-7.518004390259551894e-04,1.622328470514311461e-03,7.414105928853885995e-03,3.360793583759509096e-04,4.308484284754618768e-03,-1.822569530783265876e-03,-1.193541039324875867e-02,5.490661461717218257e-03,-3.310784349110708650e-04,-2.376993414019316214e-03,1.472397494958582439e-03,1.031657305392110098e-02,4.628903201645163425e-03,2.791387962450854032e-03,9.142698308554429720e-03,-1.744140299301118755e-03,-1.218170357888631464e-03,9.879876061826718334e-04,-2.463012084672775402e-03,-6.667677805830082459e-04,-8.191209882295057837e-03,2.792488924281702912e-03,9.045607563809230853e-03,-4.526320573892754842e-03,4.287877232776803743e-03,1.202924675324554054e-03,-1.786327404745466352e-04,-5.097684201416775515e-03,-3.773455294202703318e-04,-1.137698336287669074e-03,8.536577640497991937e-04,2.369721610990911057e-03,-6.189421410064635531e-03,5.599813961241396736e-03,-5.506474560092336143e-03,1.872314387230248874e-03,2.559291668809509073e-03,-7.721208300112115085e-04,-7.269317419286150037e-04,6.329591161764365655e-03,-6.924547584215329406e-03,-2.931995038421073788e-03,-1.838710709588935264e-03,-1.670324602175273345e-03,9.108206992040902535e-04,5.198078109649585492e-03,-1.432996257397802257e-03,-1.567755276488750618e-03,7.115297649950877434e-03,-1.508692387373644292e-03,5.470508049666113898e-03,-6.621368574176039051e-03,-7.768841239312874493e-03,-4.091883668296319911e-04,-6.778483246185728131e-03,3.116490716297629256e-03,-3.615919500725013960e-03,1.695883235171016367e-03,2.105039239559899217e-03,-5.231939166204139548e-03,2.115735431748108283e-03,1.090209973515721782e-03,4.208927849933537073e-03,-6.912412312262842043e-03,-1.825365026007271462e-06,-5.313623704411954071e-03,5.366058524363904667e-03,7.987299249904562110e-03,-1.282336704811354484e-02,-4.326831324567680624e-03,8.249066556081057064e-04,-6.452957479916612143e-03,8.898706585878157771e-03,-1.997996016744565517e-03,-6.094922204955379938e-03,3.602945245318248547e-03,-6.577907674445005078e-03,-3.103582934576836622e-03,2.649801196307746340e-03,5.004023899340014590e-03,7.238068308578557951e-03,-3.353595041012323325e-03,-2.758977329242200908e-03,-1.038351523891979482e-02,-3.473219080102083432e-03,-4.038061958621996794e-03,6.012778671838672241e-03,-5.056185307970935579e-03,-7.399656294240462839e-03,-5.437150150052995172e-03,-6.186213167767836221e-05,4.423547156898311079e-03,-9.464623876067258801e-03,-5.401690952611284784e-04,-1.036861170295398996e-02,-1.467339171233279739e-03,-1.064336977157850569e-02,-5.859584433850952706e-04,4.907699428167501196e-03,4.757288700928908806e-03,2.908419346514505219e-03,-1.965194153895202106e-03,3.999882240407478390e-03,-5.058179655625570648e-03,2.730554380925401953e-03,6.472171966302110897e-03,7.222654399045545098e-03,4.431049120810475611e-03,-4.449958294826990464e-03,-6.911333054454862901e-03,-1.949141753337651313e-03,-8.346094452445676493e-03,1.347206554014591245e-02,5.070227908012944415e-03,3.679617346675372481e-03,-2.139269465140766682e-03,-1.027005001091373191e-03,-5.259307966000063318e-03,-3.096243853645799272e-03,1.798895150932028710e-04,2.649254499081452464e-04,5.649277924048068172e-04,3.433851984681734939e-03,4.454415072770075970e-04,-6.396506313006744375e-03,1.713744114663459661e-02,9.377428845869936747e-03,8.652383726248701404e-03,-4.878104380979016858e-03,1.633060622338433155e-03,-1.654745781057151682e-03,-1.099598806001895329e-02,1.052113294600434245e-03,1.156043172699319083e-02,-2.576272671282924323e-03,-5.121184362154941432e-03,3.886910963039397725e-03,2.137602058685149841e-04,1.491014395684652502e-03,-2.674944420707653456e-03,-6.099051819019192175e-03,3.199745394627893664e-03,1.442933212372919790e-03,-2.233148532552298605e-03,2.380558468484357711e-03,-6.633260791254438904e-03,-2.547573866965017163e-03,-2.076768505949037544e-03,-1.918114576508026924e-03,-4.000607309297349659e-03,6.297120230102299428e-04,-2.002013371714580527e-03,1.894353117043880109e-03,1.465670886950972383e-03,5.287982299288538496e-03,3.449956857384250952e-03,9.925006712803562808e-04,-3.463993003288447052e-03,4.453177627258448311e-03,-2.507383348216106738e-03,4.168195390912794729e-03,-1.499931513431277232e-03,-3.203373530707473871e-03,-4.036752614175955975e-03,-4.245073403097077668e-03,-2.819553956060165747e-03,-1.927363471254774216e-03,5.072863245129950564e-03,-6.876670755712001098e-04,8.228166491924000098e-03,-1.204226923648443618e-03,-5.305343185362128983e-03,4.897208167212598702e-04,-2.712286626193337151e-03,3.643408945766234315e-03,-1.755177904692083255e-04,4.585820507628655582e-04,-1.541655568210404358e-03,-4.141096686090025430e-03,3.452754091434012858e-03,1.157002648712075234e-03,-1.700731248362775906e-03,-2.912051444650883524e-03,-2.968611063302497265e-03,-3.327874454715249591e-03,1.541030541024927640e-02,2.218997026514391707e-03,-2.216886753356885065e-03,-1.602544311694613930e-03,-2.375900604755892299e-03,-4.938522529929901921e-03,-3.096675617271119323e-03,-6.419508752605189959e-03,2.860190951566004679e-03,-5.994499899231562831e-03,-5.669377531118477290e-04,-6.921419690235591608e-03,-4.914414823949144785e-03,8.119762456286540656e-03,-4.452850937858407639e-03,2.135627246225455141e-03,1.728483982256347418e-05,-1.732845836692197515e-03,-3.519244241752737226e-04,-5.097344745225864705e-03,-3.478988943042935107e-03,1.464631974770392968e-02,-4.100600044560308820e-03,1.147996994778297366e-03,-3.143973311267316169e-03,6.359321823862233020e-03,3.256905033470197143e-03,-4.684870234173321192e-03,8.352745531775637375e-03,-3.478788584141416246e-03,5.752664692665977741e-03,4.905425577603122166e-03,1.272882961081357213e-03,9.158313415553225034e-03,-1.667086522077911026e-02,-7.675536859068869807e-04,3.412333996665934278e-03,5.664027329134235789e-03,-1.543940339422252830e-03,5.240869654034478048e-03,-2.068282574420269877e-03,1.918619387001377439e-03,-3.584992860062296508e-03,-3.671707396533048333e-03,1.220673758862637036e-03,4.009840603426986739e-04,9.420312233869142932e-03,2.504225787645517499e-03,5.421826904920514639e-03,5.792901949616526078e-04,5.101192418719488605e-04,-1.446468986949841357e-02,-5.962240821989016511e-03,-1.763283946044393036e-03,6.130295295575118215e-03,7.640662520968814417e-03,-3.748026766780525147e-03,-1.563064085961206329e-03,6.429854992986502524e-03,1.595367825631989521e-03,-3.143565297915408563e-03,1.510249818028292586e-04,2.564193511661106914e-03,6.713891480028238989e-03,-2.432203339456053947e-03,-7.419009534180518242e-03,-5.729389200516453798e-03,7.272364573667536129e-03,-2.625515413287100430e-03,-1.414035868353925369e-05,-1.468881760713281436e-03,8.514377984821052889e-03,1.717134098180572114e-03,-2.082049565980571317e-03,-1.852364104729192556e-03,3.430947206642756021e-03,2.507498059221657891e-03,6.575481863438379163e-03,9.116601163330173389e-04,1.657027189157478028e-03,2.531437467838077403e-03,6.816604558754556925e-03,7.671295093465268361e-03,-4.007001417148786486e-03,-8.438597466176936401e-03,-1.236491401356987728e-03,3.076586860855291209e-03,6.321826994882364867e-03,-6.179038114811151450e-03,2.446314462842844560e-03,-3.273630340893564274e-03,5.378836025324248461e-03,2.097338861182848797e-03,-3.524228852657007843e-03,2.533626432555549299e-03,2.019447936375001577e-03,-1.417263058397107665e-03,-7.792025943445237031e-03,8.819116983026772938e-03,-2.677304331835098170e-03,-1.628778331355338130e-03,-1.146763863330049239e-02,2.288634397253359984e-04,5.712315900808023587e-03,1.421885381534299065e-03,-4.487232400266110040e-03,-2.128878965962827368e-03,-7.138634325174063495e-03,-7.078758484269573326e-03,-1.966704315849567963e-03,-3.444826811032021281e-03,-4.390167941929748116e-03,3.363750408107916127e-05,-6.592408721597570014e-03,-2.784388289633742825e-03,-6.418451569058684220e-03,3.668989198640286616e-03,-3.722463861310045929e-03,6.876216962045707086e-03,2.272069660246805404e-03,2.123185473633532223e-03,3.922161346096375914e-03,2.511827627256925551e-03,-3.999979274412380700e-03,3.698989418187002137e-03,4.534174584432355999e-03,-4.522859232374417185e-03,1.814860067034116059e-03,-3.854075180023872321e-03,2.047523068323901969e-03,1.275208922297566862e-02,-1.788103296253077930e-03,-1.073296302051423255e-02,3.257468125324922657e-03,-2.210538519015430199e-03,-5.681075060241434944e-03,1.189110248722363402e-03,-5.266700788153317811e-03,1.723895816119252675e-02,-7.117488571337583954e-03,3.386560655449764173e-03,-4.152947784603833008e-03,9.694379339359245496e-04,1.322678136788320779e-03,-6.890674924878570709e-03,-8.720390834987337692e-03,1.169269503728276663e-03,-3.013990289562353941e-03,-6.434799559848510160e-03,-9.911355701033679189e-04,-1.962671074757766471e-04,-4.594951348137087631e-03,-8.975589368612367155e-03,-9.140679192726373567e-03,-5.588178737741183316e-03,-5.294011010950659159e-03,-1.661122396572851852e-05,8.122218401277260828e-04 -4.290607860218159826e-03,4.263749963442544332e-03,4.836200751557788793e-03,-3.977088732133860632e-03,-1.703386120871721145e-03,1.469607037572131115e-04,2.011633949330347490e-03,1.745596458858036937e-03,-1.623993244201257747e-03,-1.785596746683530082e-03,-1.917163461637271535e-03,-2.334564831853593297e-03,8.928693713174406382e-03,7.894841590119345415e-03,1.882608973627550352e-03,4.724680326448259249e-03,8.914668318584613926e-04,2.924388275131201663e-03,-1.727907616744042436e-03,1.248643468466921036e-04,-2.090597741817498516e-03,2.946212737281907971e-03,4.423328413799366976e-03,7.520653230045620981e-03,-2.050951861003178986e-03,2.495332072297034524e-03,1.531784091299908529e-02,-8.798578206975288946e-03,2.428459041670695106e-03,-3.096667065588232758e-04,-3.922639325294651792e-03,-5.907277070590006771e-03,3.163575168069681504e-03,-6.633616351726670524e-03,5.677669736443000971e-04,-2.206341609540033149e-03,3.005923513235808642e-03,-4.813549571422201523e-03,4.069761813699096682e-03,3.724941030479535947e-04,3.542289471981577200e-03,8.129302127133754621e-03,2.462091877182161128e-03,-4.281304362943105790e-03,1.585443526728070739e-04,6.669064334762185575e-03,6.739134649124471979e-03,7.984508900512426316e-03,-2.038986874162564827e-03,-1.204251853640351047e-03,7.281317118310481895e-03,2.758330891309933655e-03,-1.286836548327319370e-02,-5.268711294423709078e-03,-5.314263106516213878e-03,-1.117043458138296903e-03,9.696580190683130526e-03,4.278989074558687752e-03,-2.894735806455827308e-03,3.698087554183191166e-03,-2.433944047821392630e-03,-2.080561981400532324e-03,2.151326082483588815e-03,-1.939316434679476181e-03,-4.855529685744552210e-04,-3.130225658086442913e-03,1.034677863598512920e-03,-1.896177990199772196e-03,-2.236022072187761247e-03,3.053116413304739641e-03,6.070582584304695705e-04,-9.964125461762339195e-03,3.964887790425941118e-03,9.566471115693767069e-03,-7.363247596415637211e-03,1.723365966646269029e-03,6.970579491070986143e-03,-2.223549028432670726e-03,1.747770039382661247e-03,3.841972550102940118e-03,-3.050109254822705972e-03,-3.137224355209532016e-03,3.543705636377411563e-03,2.484764753113547528e-04,-3.614789021272956249e-03,2.723354845965539252e-03,1.031632470855423003e-03,2.204915419042052360e-03,-1.724213477761313415e-03,1.355937925501474487e-03,-7.052909141984023064e-03,3.594882975417805753e-03,-4.972692531634665888e-03,-5.565258532117592892e-05,-5.724878526594939614e-03,4.264947956443799863e-03,-1.889182971014332409e-03,1.287786273480894667e-02,1.067622968747021029e-03,1.221139919972748580e-03,2.196304571636341612e-03,1.975718356443566996e-04,-8.118968565064484414e-04,-5.518104105601844347e-03,5.964759478620272788e-03,-6.121196041765315787e-03,-3.375485300939738524e-03,6.943212848952474113e-03,-2.398414094272802426e-03,9.203018102392710939e-03,-4.381873154404848604e-04,3.120354816632356452e-03,-4.190235896310348478e-03,1.526555501568716651e-03,-2.793284140422200822e-03,1.713307953712398619e-03,-4.167560829420644854e-03,-1.345029761729025749e-03,-9.888039109651181741e-03,5.242644572627497800e-03,-2.588296054029715510e-03,-3.597932940195404036e-03,4.578177595433892953e-04,1.822307573309036102e-03,-3.060260077204425555e-03,1.863692587966956716e-03,-2.897176356198728612e-04,8.304563716025175832e-03,6.784915418550129321e-03,-8.262737000853045966e-03,2.489640216348602223e-03,-1.040685573380630696e-02,1.021906786479915598e-02,-4.594789491505735582e-03,-7.787694441805806736e-03,-4.724594495203516577e-03,-2.492870871398065145e-03,-4.008835226508637023e-03,-1.244922404589638050e-02,7.038515224795269601e-03,-2.802866119128309425e-03,2.567607822976827775e-05,-4.642120222990145893e-03,-4.081681520238904237e-03,-6.122826495722134181e-04,-5.115558552980542951e-04,3.291382966114577910e-03,-1.302929294514579284e-02,-1.706043914530730194e-03,1.217133824396692422e-03,-1.880705531472742862e-03,-3.211247407748519817e-03,-1.864187501571084584e-03,2.079161140569116186e-03,2.163722574082582771e-04,-8.170386035344695454e-04,1.125807506741500748e-04,8.466101678466404767e-03,9.839925463876627015e-04,-3.954331095801224757e-03,4.051777917987224760e-03,-5.783195995004332055e-03,5.688960885378275753e-03,4.027465020556825377e-03,-1.208891175492822446e-03,-2.946378868423651497e-03,-3.608140475800671997e-03,-4.025076527254868065e-03,-6.757526773347421719e-04,1.068539915375794338e-03,1.946630310171108824e-03,2.202353109607530998e-04,-9.459070027656833399e-03,5.621203743348319494e-03,-2.306044611442272397e-03,4.350447320794653986e-04,5.590898248603102041e-03,4.587013963581465514e-04,2.999452342080119113e-03,6.589696906061009128e-03,-7.336327348845470865e-03,-1.532632571673929033e-04,8.380007540822824973e-04,-5.226416242583051090e-03,1.043036440836083139e-03,-1.908193642261711131e-03,-3.630819804207709080e-03,1.297069504889362436e-02,3.296116567079813205e-03,1.549829307311178638e-03,-6.371586359157684759e-03,-8.282101059092845247e-03,1.817176776214530359e-03,2.592405059323628358e-04,2.020635576792874973e-03,-5.230779995277276030e-03,5.978492493051530340e-04,-7.680289151754533285e-03,-5.947115703178294452e-03,-2.755757943163883970e-03,-6.123595869894261565e-03,-8.242587641033281582e-04,-2.491169677686374357e-04,1.587545606007095085e-03,4.331437650723401860e-03,-8.181637308049816409e-04,-3.853030939807044163e-03,4.660717591623060423e-03,8.006913418706556457e-03,8.709021947565506044e-03,-1.230994084773415296e-03,1.551437682768699129e-03,-2.323562514268298022e-03,1.480847743100632222e-03,-1.423546230675026315e-03,-7.516535766631643987e-03,1.617476625375471964e-03,5.044695488204060593e-03,4.149285703229869254e-03,-4.635354198638847074e-03,-4.649944644979254614e-04,4.952283602992391294e-03,1.660568091685766092e-04,6.656414672868505092e-03,-5.174136864744151472e-03,-6.532580516557508427e-04,-9.537405252070805736e-03,1.203717940141582404e-03,1.270700833802961829e-03,4.593631440425788651e-03,-5.018138106575843749e-03,4.909272779120759343e-03,-1.247571381367525286e-03,-6.990126472421385150e-03,2.537957411107630550e-03,1.102768521123403608e-02,6.409366602014448383e-03,-2.104933906408445066e-03,-4.496814337294412905e-03,1.975102506531999785e-03,4.341414625260982350e-04,6.552310603614452250e-03,-1.347890255312740593e-02,6.471730354184856114e-03,-7.565141279629751699e-03,7.098530158348023526e-03,1.365029026805897848e-02,1.561896072507746452e-03,-3.620620241592857873e-03,5.521703319376835076e-03,1.671894168925859786e-03,1.372487493608043167e-02,4.265536799568045998e-04,6.288946475144011061e-03,-3.178915609769941177e-03,-2.649616364686349732e-03,-2.448880198962327150e-03,7.634009674708186650e-03,3.306080269540837436e-03,-2.980132327432639732e-04,-8.765290420261771581e-04,-3.214861474005500496e-03,4.443154341016826263e-03,-1.082283837742250079e-02,-5.037079148232445278e-03,-3.735101849597222655e-03,2.811962671434806074e-03,2.255780852256963551e-03,3.004221083180240968e-03,1.335615861783824923e-02,1.144450408608034542e-02,-4.332065986825233035e-03,2.411965690882115695e-03,2.121828242894858389e-03,-3.489226944113658132e-03,1.582434204087201967e-03,4.458396438767649704e-03,7.851641121615087274e-03,3.926313142182287497e-03,-1.030796047531046212e-03,-6.020537514043703721e-03,4.614187141669639190e-03,-7.079782372788659926e-03,6.512412575680339745e-03,-2.917577660625719251e-03,-8.252402302322365754e-03,8.250192153044328888e-04,-5.727601927985048746e-03,1.955092200828387628e-03,7.988374583171456780e-03,1.006680427038567430e-02,-7.190318412931130491e-03,9.077715218546259521e-04,-7.735208518977763036e-03,5.280434864182828245e-03,-9.592146550477086836e-03,5.461244632329264118e-03,1.129351633648494578e-03,3.700863221468150225e-03,-9.850142310022215464e-03,4.486136248527772957e-03,-3.198798730419479333e-03,4.344823043875891029e-03,1.425317027741698859e-03,-4.716295266662829527e-03,-3.671564844159445895e-03,-2.397053819650459168e-03,3.719730586101238808e-03,2.097325138882115154e-03,9.678060571231401205e-03,3.957773137644462168e-03,-3.948107563105402763e-03,5.520068044694298980e-03,2.255444240102619756e-03,-2.822422875645657758e-03,4.621928957876295617e-03,-3.213991150430288261e-03,-3.225005261512336498e-03,5.120081268665083521e-03,-3.696383647817528921e-03,3.937554024966292282e-03,-2.725016953043765244e-03,1.690522653763411537e-03,2.496274010649297131e-03,-8.687039255766359179e-04,-4.711099469602679501e-03,-1.943029203072570025e-03,5.744335719371256727e-03,5.110547736978979586e-03,4.722177349389961986e-04,-5.834334064758699319e-03,6.827267847380441568e-03,-1.276523581675932141e-02,5.226128448904514279e-03,-5.355304911425375243e-03,-7.948659046755731511e-03,4.836672002383064425e-03,-9.134167002082119527e-03,1.633277614932422510e-03,4.250014593481219773e-03,-7.400617299935577202e-03,-4.703739069234202106e-04,-2.402077928801506917e-03,-4.835070256302717237e-03,7.692382897296498601e-03,-1.858286954878130785e-03,4.621228845754355233e-03,2.535695653782889833e-04,4.279387011068998097e-03,4.954731877218501810e-04,-2.702933993240956741e-03,4.800506899241045733e-03,-9.137361529333085488e-04,2.647139915613414690e-03,-1.083226125804725645e-03,3.732123329537789802e-03,3.870856623704427724e-03,6.640951431721093742e-03,2.475562989196976554e-04,1.028331046427920052e-03,-2.315942830741093191e-03,1.197881350694232584e-03,-6.294087996458951235e-03,-1.145224246936086621e-03,1.138621828257687730e-03,-9.368731116591597421e-04,6.216063509600275143e-03,3.647387797726383413e-03,-3.120014169246717282e-03,4.847495600376972914e-04,-4.856571830558863829e-04,1.666034787771991784e-04,8.662029322747139687e-03,1.235803548800783842e-03,-5.731950987163877097e-03,2.441380907300008771e-04,-2.144165378673442660e-04,-3.281512538920285018e-03,3.283736736196175192e-04,1.826135383649912184e-03,-4.119802862033210104e-03,-2.040417298349161447e-03,9.786451964889609449e-04,-1.301661775287272279e-03,-1.644423990917793241e-03,3.322914409273086071e-03,1.137527256004384514e-03,-4.761064734175146640e-03,3.390570962704903511e-03,1.272725469201293862e-03,6.666729086565873971e-03,-4.442836005626108441e-03,1.258758031816500185e-03,8.463907344754353218e-03,6.113342258860146984e-04,-1.589290020018155032e-03,-7.848099087799792484e-03,-3.997028726013180690e-03,1.336379815693982537e-02,-7.226266607437885868e-03 -3.389142823591415488e-03,-7.105496003818287040e-03,5.020191732420397138e-03,-9.166490249312235147e-03,-2.037561241642180890e-03,3.891823215161039034e-04,-6.311533873019960390e-03,5.426538745410060781e-03,-1.974800525567473790e-04,4.867461486779785761e-03,6.227485827568090930e-03,-5.878734996489516895e-06,9.431845279647012694e-04,-2.787360627808726458e-03,-3.016677441604405006e-04,-5.916656655690116090e-03,8.455895106097958708e-04,-8.002737833505255485e-03,-2.455355854938428423e-03,-3.644256930589324147e-03,-4.869548303891768973e-03,3.304489854873577607e-03,3.004715017186005040e-03,-4.643857709875152083e-03,9.241862429220291247e-03,8.594221395559138374e-03,-1.380909643376626323e-03,-1.789589514108472224e-03,-6.292943800566381043e-03,3.544597495755131417e-03,-5.480340310879156830e-03,-3.262817648155588116e-03,-3.795516901151410084e-03,-8.801733367065169011e-03,-6.029307229745088648e-03,3.047156820297843190e-03,-4.729924115655641248e-03,-2.628214641634574276e-03,-5.137839989857358354e-03,-8.320859443760399848e-03,6.499080147849839320e-04,-1.608478185749728090e-03,-5.712501722731862241e-03,1.241907780602386326e-03,9.667347053985377763e-04,1.019945015003646469e-02,1.100559914537154078e-02,-3.884447113923793649e-03,1.546626402472132608e-03,-1.506121665618461412e-03,3.614919900655183104e-03,9.720105247489235420e-03,-6.396606027570499696e-03,-3.237786084873593706e-03,-8.989561615348527421e-03,-4.348510063852956652e-03,4.546831664537127025e-03,-1.232707964394361665e-03,-8.170482049150393569e-03,-1.015111552383742705e-02,-1.727387042344330883e-03,-2.759924843526268771e-03,1.665610321236852317e-03,8.377566445795051400e-03,6.898291655992078338e-04,-3.882845961273495052e-03,2.821767359326835412e-03,-4.078706966206439161e-03,3.243880052461398274e-03,-5.905965047533667106e-03,6.054376356856759622e-03,3.092641053286417953e-03,-2.306614507178383292e-03,5.111532243184470185e-03,-1.351916362219491696e-03,-1.035255371189732973e-02,-6.223058607811164308e-03,2.475609224835768202e-03,-1.336969017900504781e-03,5.964534214598238120e-03,-1.024432218775296090e-02,4.530420583361695168e-04,5.911712172895787487e-03,-5.871421290062420195e-03,1.008208349963243648e-02,-3.793523538201937478e-03,-7.471182003795859283e-03,-9.810952607235214756e-03,-1.081468001892684806e-02,-1.437675990308156246e-03,5.631545751332059889e-03,-6.445835981901793616e-04,-7.566347846673494512e-03,-1.132274643059811518e-03,-4.532024588618939129e-03,8.959369993456919173e-03,6.834460485327028829e-05,-6.160661626276279536e-03,3.305650369814709926e-03,5.326165951868082511e-03,2.794971974833708131e-05,-8.409024446220561555e-03,-5.289991875146027442e-04,-1.880869489510938151e-03,-6.236706345639365066e-03,-6.960474269878023923e-03,-9.057577708440749673e-04,6.760818301744145331e-03,2.893880823071121825e-03,-6.880864207718637854e-03,-1.764369690989146767e-03,-3.099220066319887269e-03,-5.476790327382027347e-03,3.846628130155464342e-06,1.260174265723735306e-03,-1.330252472393118630e-03,-1.513242316000142865e-03,-5.657699727432251996e-03,8.074205359003126803e-03,1.884195340268866490e-04,6.242142519002898821e-03,-2.964563012407559709e-03,-2.972423694257401213e-03,-3.228484138299089843e-03,-2.749121652792855999e-03,1.209302592015591704e-02,9.035642105034379611e-04,-3.864088472487293979e-03,-3.587156979118782344e-03,-1.941759520879318777e-03,6.075286780239642047e-03,-6.623829692721558415e-03,-1.660350878771718695e-03,-3.823313990118262177e-03,4.776878023314593338e-03,6.809918050022434853e-03,-1.141424319428719497e-02,1.694067079016632919e-03,-4.393869044859700923e-03,3.094318854653946987e-03,-4.234744081920161114e-03,-3.413760108932005401e-03,3.576645035313390925e-03,-3.138850326437621612e-03,6.226217839942142351e-04,4.255754464189800794e-03,1.199169857671403662e-03,-3.395867055763131422e-03,4.433735257793329498e-03,3.546259076013525292e-03,8.114417561468453832e-03,-2.497197836536119934e-03,5.936185112334940034e-03,-3.984345409606651796e-03,4.131899661141187228e-04,3.777952287274199440e-03,-1.508289711726596826e-03,1.523247362377898881e-03,-2.053999320068813396e-03,4.332539363385054629e-04,-1.076485512037662207e-04,5.303495907164207553e-03,-9.289977017547648708e-03,-3.372751446323059028e-03,-1.802792772410006291e-03,-4.988532065267056899e-03,-1.519601627846384889e-03,-9.966399825766933049e-04,6.728546560664980768e-03,4.405187378297753878e-03,1.088826627896280746e-03,-8.144233357110252004e-06,7.794830266997186934e-04,-5.782629701596765127e-03,-1.300434630607495961e-03,-2.315932782996651604e-03,-1.045173202537295724e-02,4.273274597569320064e-03,4.295791310311454676e-03,-5.612981936617723877e-03,7.253931414027041738e-03,-6.751061622366564575e-03,5.770074369332095976e-04,7.719337489659379447e-05,2.584029572270675999e-04,1.357218389042331476e-02,-5.624046659179578710e-03,-1.510554995915451343e-03,-2.159425206144767895e-03,2.020720282675136546e-04,9.516950162544137587e-03,-3.693793860831997601e-03,4.975191786537045465e-04,6.891973640384137885e-04,2.240391058503285113e-03,5.061724499640503534e-03,1.059628638625091310e-02,-1.001796135216177601e-02,4.111054052620112531e-03,-4.807482692229011166e-03,5.192687727139876606e-03,-1.475243476876132980e-03,8.637381937102850795e-04,4.213373396290513480e-06,-3.514857021168058763e-03,-3.804230884947161148e-03,-2.382709438177885684e-03,8.773997950325092884e-04,5.201238453724233921e-03,-2.903489438952655023e-03,-4.485412335736882992e-03,6.930256371039786659e-03,2.409151599306538424e-03,2.132738611183046789e-03,1.028627235405232820e-02,5.322243904895058129e-03,6.411751966011434729e-05,4.173244105272535913e-03,-6.718340485645093313e-03,2.070655237250676917e-04,4.983004799517824421e-03,-2.975229852749396511e-03,-5.553410654007014504e-04,-6.732140375861879120e-03,-6.777784165865308706e-03,9.352715103340515992e-03,-6.897849757398978772e-04,9.707774385828277924e-03,6.452040103374945572e-04,-8.891848851984116009e-03,2.550180559253281253e-03,-9.102460578037161257e-03,7.339135509378128322e-03,7.687839773533124187e-04,6.187961364583836714e-04,3.594208257984218293e-04,-5.180158364262913914e-03,1.360481560813644195e-03,-2.443352019260279333e-03,-9.209832584011760220e-03,-6.592999499944556235e-04,1.846861300423550392e-03,1.097320452267569902e-03,6.861854570871743671e-03,-4.917524669907553415e-03,-2.269367689905611371e-03,-3.936739353760484311e-03,-6.998643043955649588e-03,1.438197702394698393e-03,5.435348602767341034e-04,2.025682419186200945e-03,2.695623961097451914e-03,2.346130372570005319e-03,3.974470962954751441e-03,2.078351882364471836e-03,1.765752364207080500e-03,-9.090023935694281632e-03,8.411232584135747842e-03,-1.919665367332411537e-03,-3.958287634451013864e-03,5.497590758207782265e-03,-1.989061435711478483e-03,-8.883550150363187653e-03,2.905747731322703216e-04,5.236890179182845766e-03,-9.078926486437514584e-03,9.367257849831906596e-04,3.324436893644694326e-03,-1.021835292370771968e-02,-9.635847961275641888e-03,-7.084738450588315070e-03,-1.725842723558756531e-03,8.166249267314738478e-03,1.506361805384423764e-03,-9.280817961622035706e-03,2.998314017532409306e-03,-2.064408430749042553e-03,3.250049532157719880e-03,6.278835589510771767e-03,5.760993483442297322e-03,-1.148898820356481073e-03,-1.704572167118826274e-03,5.219369399479557034e-04,1.788085389838565964e-03,-5.473754550290325406e-03,4.523280836273234871e-03,5.250009179991117422e-03,8.709444302427087710e-04,-8.538202904302577860e-04,-3.571349796104923008e-03,-3.076191896493292183e-03,3.888927981220333639e-03,-3.638252901755689023e-03,-1.866674839723777299e-03,3.463498843612241012e-03,-6.972658243471059787e-03,3.411176089078129452e-04,7.707003662623274423e-03,-2.917256758140416130e-04,-8.374433043120230180e-03,3.342445768588624697e-04,-4.517472088343359345e-03,-7.435048815753475711e-03,-9.880899990227903426e-03,-7.264630009900131932e-04,2.067587969004405432e-03,2.835611847725121471e-03,3.744807873639046693e-03,4.640885410164183843e-03,7.213992309844790250e-03,3.523687103826396872e-03,2.852704511575564639e-03,-3.675357622705430162e-04,1.697694500488533239e-03,-3.379032751171965471e-03,-5.951833431134955117e-03,5.678003464216659947e-03,-7.481114392680933423e-04,8.980575603439682961e-03,-5.551209534957377532e-03,1.984418118803405427e-03,-4.540609979142764402e-03,1.044523916807597831e-03,-9.958407766631162047e-03,-4.394876088243741992e-03,-8.791458822108149393e-04,-2.098027724903298117e-03,1.700597826994499013e-03,7.234041474736968569e-03,-7.772467676716368623e-04,2.513466384725360735e-03,3.088488094155142361e-03,-1.367164603598817986e-03,4.532010377786063878e-04,-1.458017766017504976e-02,-2.728704493192027347e-03,-1.196833789247820756e-03,2.513993491639029127e-05,-9.900335646300426062e-03,-1.263841771308416581e-03,1.731361246120499253e-03,2.522610341273301502e-03,3.327055018259050291e-04,-4.648177170302807311e-03,-6.473151938232591350e-03,6.907437886567185763e-03,5.301747198591470409e-03,1.422226049324989310e-04,-4.541251372404841828e-03,1.672689564585203232e-03,-2.880249559896367207e-03,5.941156478372402192e-04,9.970318679184233743e-03,4.314001246830468858e-03,6.815898811918597380e-04,-3.354045282557589899e-04,-2.059723988132499718e-03,-5.213295802731262273e-03,2.196612687245306800e-03,2.328927632244002111e-03,-1.472482222909502381e-04,-1.594997491330232831e-02,-1.415349436065863964e-03,-9.569440781835230475e-03,-1.507807747558137252e-03,-6.029299202747805640e-03,8.253424958537453060e-03,4.099000992619417430e-03,4.039269167986774510e-03,-2.952260703950116468e-03,1.514122323537073342e-03,-2.857851699225455108e-03,3.410309832190989120e-03,-1.202718620301199267e-03,1.006765563838800046e-02,-6.463542475472514731e-03,1.650505251687415294e-03,-1.358806696786375723e-03,2.032880024057743575e-03,-7.810748031982015080e-04,-2.763489665506310700e-03,-3.698865623837148354e-04,2.390393335459244789e-03,2.944367679394472356e-03,1.062425849338325713e-02,4.973701100767177231e-03,-2.109121478718005600e-03,1.740896898621217934e-04,9.494743061074733745e-03,3.187314381769603897e-03,1.456792273271793154e-03,5.476087611591869548e-05,-4.028138599571403494e-03,-4.886022333589367349e-03,-7.589364241499119666e-03,4.401090030155864648e-03,1.245257661793978390e-03,-5.655291492229112078e-04,-1.701263862770446616e-03,3.195348747754747209e-03 4.197631013932898517e-03,4.306420853095052105e-03,-9.853148415701335275e-03,2.770022016567648317e-03,4.817479693670416625e-03,-1.526951904949088454e-03,-4.401613851419950568e-03,1.751704910946264146e-03,3.174110272573326709e-03,-1.032132833035729648e-02,8.228480315776754176e-04,-8.586045495761543411e-03,3.866606006955927884e-03,1.862871906074597180e-03,8.657073566428613587e-03,-8.998166985884119501e-04,-1.614513349522456480e-03,2.030170559981313684e-04,-8.445261507589306438e-03,7.977943969288265463e-03,1.642805923057218337e-03,1.854109356473063143e-03,-1.791548756337250654e-03,7.055055034015015811e-03,-9.248977833617831254e-03,-4.735477330915748018e-03,-2.100942666185673206e-03,-7.039000137377351583e-04,-5.239243543730258933e-03,2.234962231166051205e-03,-1.300847209560550798e-02,-9.496669422724656178e-04,-1.078775337365996123e-02,3.195882826239494073e-04,1.233393109440945401e-03,7.904583811262886478e-03,-3.379705996319413899e-03,1.682615565680572198e-03,3.595716099986981037e-03,-6.567201664967513503e-04,2.493400053321890523e-03,-2.632364706867177089e-03,4.220235169300994688e-04,-4.376614224279922784e-03,3.342066348012333668e-03,-6.695322041188027894e-03,-6.499766697897040209e-03,-3.205250180108593604e-03,-1.363506318408543841e-04,3.746198491215988954e-03,-2.870947455620275878e-03,2.817905718520606183e-03,-2.122551629863086466e-03,8.472073407982257456e-03,4.416646959810100956e-03,3.535263757797032017e-03,5.726706160121086773e-04,-2.388667924985446011e-03,1.037062019182714064e-03,-4.904361084577772180e-03,-6.291070386018974767e-03,4.128833820874727722e-03,-1.164602599229122093e-03,-2.186220315746849243e-03,2.860819746348759935e-04,-1.671360329260472989e-04,5.215433592201742727e-03,6.477778184488718147e-04,-1.808325447530716862e-03,-1.999202934147443480e-04,-5.882015820506718978e-03,1.218517990790417803e-02,-1.982465313736709450e-03,-4.315297055930901460e-03,-3.830979442220119879e-03,-3.090707515885634388e-03,-3.758111048311010859e-04,2.948378123518685060e-03,1.681135537996308904e-04,3.459610355219702293e-03,-1.089996449160012329e-02,2.880368413385266894e-03,-4.817625902493584931e-03,-2.659834032726556517e-03,5.225661387081579397e-04,6.290568268138531198e-03,-4.228096964102662052e-03,-3.986954870847755038e-03,9.176540806930481159e-04,-8.563556971500975099e-03,1.059410661091591231e-02,4.527606691437018743e-03,-5.340644959159666048e-03,5.165394390924507410e-03,-1.439081706931742352e-03,-6.121191025171135794e-03,5.485905444399572818e-03,-1.043572033317377232e-02,6.463393303590366824e-03,-4.769607659677680030e-03,-1.649739894924035595e-03,2.083301659778250773e-03,-4.477778373344586116e-03,7.144363548862930728e-03,2.611037156717424708e-03,2.774511041943895635e-04,-9.329690672971006904e-03,1.115530255575475534e-02,-8.853864938979095350e-03,-3.316089516849199436e-04,-9.708936834780650680e-04,5.016213407815001656e-03,-3.179677338784440716e-03,6.079975616722058454e-03,-6.411714991035215479e-03,4.450648524504277441e-03,-3.393444175776809141e-03,-1.356643599910865817e-03,1.400551253200250359e-03,4.957566941889908138e-03,1.234184649912849381e-03,-8.008904495391684880e-05,8.299037687098336337e-04,-6.096363785522658681e-03,6.802706200521113709e-05,6.695568835487367490e-04,-5.702465805020598237e-03,2.744359335603994282e-03,2.787328940388680170e-04,-4.747740667423771925e-03,5.562508786554996344e-03,-6.227998845822102676e-03,-8.208750504527820247e-04,-7.573688827554515661e-03,7.084664096207168166e-03,-6.139279353131942181e-03,7.980010461958972649e-04,-1.978101306684875316e-03,1.784031991393317858e-03,-5.699487934667591196e-03,-4.866667765512137055e-03,7.021355764218460173e-03,-3.835919639486811295e-03,-2.816223830806618457e-03,2.797072929424240968e-03,-9.806978879940178931e-03,-6.006887417372226874e-04,6.563340653091350070e-03,-9.164742507988736606e-04,2.559408023814223592e-03,-5.651936445829025372e-03,6.416239767151057753e-03,-1.024482719912347354e-03,5.747647969572809452e-04,4.145815196126787297e-03,-2.555091826997714886e-03,6.209792163928786780e-03,-1.515091310920205908e-03,-2.247376220725400778e-03,-5.105354285606997569e-03,-1.000888226587113346e-03,2.941111491095132601e-03,3.017723056385062351e-03,4.276506605015393918e-03,3.800371237596085906e-03,-6.147173540802930783e-03,2.062117651172469708e-03,-1.204818846835015384e-03,7.452669015049842322e-04,-1.856396571641817997e-02,-2.876635324990576333e-03,4.062238709455086826e-03,8.999180711799064855e-03,4.720490319009991095e-03,2.725204228323176072e-03,-4.807870090354359090e-03,3.915859850669604035e-03,-1.342532591924992325e-03,3.919268528302355913e-03,-7.887129927324367353e-04,-3.180341079778939688e-03,3.665284230002957642e-03,-1.043975567822604672e-02,4.046700003568434040e-03,-1.702060508974910643e-03,-5.210237335071503254e-04,2.373019163680379229e-03,4.941914293416773026e-03,8.713281551848922699e-04,4.609934423210291024e-03,6.405532000516509818e-03,-1.649941052519268920e-03,-8.621482724907741230e-03,-4.771214340548222510e-04,-8.691062595631661447e-03,5.014076999681580694e-03,1.313553214790438980e-03,1.171602525456903569e-03,-6.095725151677115934e-03,3.849446998963973032e-03,9.762189635722340331e-03,-7.817392649759717649e-03,-4.824372817962210500e-03,-4.783667516417313120e-04,-1.065799002781197885e-03,-5.417111082664374105e-03,2.107172409584345645e-03,-3.092363625649778396e-03,8.933887123802638011e-03,-1.622498001817744952e-03,-4.548772880968924084e-03,4.296084851551148533e-03,-4.159560917025115212e-03,7.861427010070710583e-04,7.742930078826613070e-03,5.191048618968066878e-04,-3.807290483574960090e-03,6.330366940801817124e-03,7.595662366653525019e-04,5.036510078762534807e-03,6.280927690360227052e-04,2.703568546661657725e-03,-2.518530357357799329e-03,-6.044524649688185966e-03,1.438892670805186000e-02,2.295572782260317567e-04,-1.382985726536484607e-03,5.882067539766763775e-03,-5.401403852288801967e-04,9.530208596132209650e-03,-8.875385175812046737e-03,7.420827515024303002e-04,6.446602923710568166e-03,6.070221940684741033e-04,-6.074870435332500526e-03,-9.310620989056803826e-03,9.397744904737843213e-03,-6.101487233879905082e-03,2.276574415095384239e-03,-4.429297132896119031e-03,4.866323799549784760e-04,7.537427608281706444e-03,3.617130900622473819e-03,-7.287428554045300398e-03,4.431740024818424210e-03,4.632276574100929398e-03,5.420433266048809394e-03,-5.124805648018212513e-03,-4.248803136116505869e-03,-9.005346660182170226e-03,6.841296412073584639e-03,9.218216690110343651e-04,6.112761802303903411e-03,7.339043590364593972e-03,-2.206943406713166456e-03,1.152712015972617290e-03,2.196073728158596996e-03,4.101843127794243919e-03,3.604709961906846487e-03,9.694992467255211452e-03,-3.134855754076789052e-03,-3.500285981886723519e-03,4.288411703688694773e-03,-2.476401656199123504e-04,4.001244857494474887e-03,4.125193773706666532e-03,-1.415976430841674833e-03,9.547895415909082167e-03,-8.101196974709009430e-03,-1.737471484762885630e-03,-7.111321631428902095e-04,-3.873430715689980992e-03,-8.214777508859485078e-04,4.486591015764123357e-03,2.006458318685998624e-03,-4.331403485389709491e-03,4.407165647395324412e-04,1.067754339439030455e-03,-7.099287271781621646e-03,2.107018285963453870e-03,6.659738356624205109e-03,3.482060092899571645e-03,6.723836994756031199e-04,-1.128208959081297355e-02,4.473114553842580561e-03,6.335649442887249932e-03,8.092552513878055812e-04,-2.641877694497341954e-03,4.905319482956671182e-03,4.347703434594058294e-03,-1.552387520057027515e-03,3.524063519029746721e-03,-2.085764285153355289e-03,-2.617282149998659724e-04,-7.984675652052406532e-04,1.081184487517214757e-02,2.692483130775469736e-03,2.921947207989177356e-03,-4.046101875488504473e-03,1.172006425604542844e-02,6.713285660703689718e-04,-6.423021136498683129e-03,4.313398731228888609e-04,-6.497801851053484510e-04,-1.491772857450265454e-03,7.410344808218919874e-03,-4.891220198797274592e-03,-4.884273172653614235e-03,2.070684428645416623e-03,2.190180833213717111e-03,-6.366815187966124931e-04,1.617302834955714960e-02,3.194149668960354852e-03,-3.625412578727355700e-03,-4.687871401230003787e-03,-4.343663184079350932e-03,-4.376255257640845843e-04,4.849407763653662898e-03,-3.278078230988433484e-03,-4.818464253592123596e-03,9.953170281604561617e-03,7.331782739944084804e-05,-2.300922664710530058e-03,2.956501162622696393e-03,-5.860493135474732600e-03,1.965221157398702647e-03,3.382980941226382265e-03,1.244241657914181391e-02,8.902320756935927157e-03,-1.967876546322829771e-03,4.549791557645183580e-03,4.803136951501350317e-03,3.620676529536742769e-03,1.031887610332552288e-02,-1.198375888808471922e-02,-1.384083404597489901e-02,-3.146718616780798047e-03,4.003497183048167728e-03,3.933610965793064157e-03,-4.110981540442638763e-04,5.091546019374770631e-03,2.405737881198866286e-03,-1.913462455168902241e-03,4.298590980245801984e-04,-3.318757228510306467e-03,-5.914653955542256182e-03,-7.341058360713451428e-03,-3.927697290390154590e-03,1.045832821118219220e-03,2.179968901538139291e-03,-4.704655536757439138e-03,-3.664941956963342238e-04,-2.005269002175701087e-03,1.897649816495711541e-04,-1.195801485706475499e-03,8.641717535581464960e-05,-1.855058764630756422e-03,5.196450368227797240e-03,7.076294220239429025e-04,3.534750423539103768e-04,-3.954456333125762509e-03,3.622138250787118076e-03,1.456230741913526255e-03,7.819038104614707465e-04,2.612364990054611910e-03,4.331882690766021213e-03,-2.013376332343364515e-03,4.632917465693722585e-03,1.982988576817308526e-03,1.531642731970196420e-03,1.642783010173933834e-03,2.108007534215812495e-03,3.274098521834452845e-03,-3.706745651848497494e-03,8.452127242981347099e-03,-2.714028589656642797e-03,5.943396987562517969e-03,2.237748093645916449e-03,1.152437004376846621e-03,-4.771080124540839906e-03,-8.428107170070043772e-03,7.691840650925906193e-03,3.774560351267621915e-03,7.145614137957440395e-03,-4.045076595192654591e-03,4.749168607434490520e-03,-2.544395564994675254e-03,1.972029726619445311e-03,-3.571989917652413932e-03,4.381528757248028157e-03,-3.544696190615350719e-03,-1.507689731416724920e-03,2.978836566410896815e-03,-2.192958414622978372e-03,-2.529573388583418719e-03,1.207250801636423378e-02,-4.094128484015513263e-04,1.599425855273135105e-03,5.686550494403623361e-04,5.470163282524330428e-03 5.288747351142161247e-03,-3.424379321798941625e-03,-4.589760536526914930e-04,5.285236462750023644e-03,-1.208264768617245714e-03,4.575831604157163356e-03,-3.338540845022677241e-04,6.427069388972175314e-03,1.336907884688942139e-03,9.251963073159969603e-03,4.160992541762101836e-03,3.484067371084816501e-03,1.098022068826498727e-02,-5.557225077808653456e-03,3.391397322132677292e-03,-1.773072071032718655e-03,2.180187236976553929e-03,-2.935892572990235830e-03,-3.974021418335063095e-03,-9.968181892588147061e-03,7.193268945935749926e-03,-3.062885991045643987e-03,-4.971304311552257257e-03,-5.385249245699911744e-04,6.869787975206405458e-04,6.871204086503207922e-03,3.328225073710860960e-03,-7.072291501621978844e-03,2.654176055078556144e-03,2.881326650946784348e-03,-4.134040947354706359e-04,-1.061850991891556337e-03,5.332339236945220565e-04,2.833895108892183802e-03,-1.731989310827789256e-03,1.504607833783386988e-04,8.043453134630770829e-03,9.240367284004400491e-04,-5.429541775705550542e-05,-1.893617247083412107e-03,7.520659602606104624e-03,-6.935567856900580128e-03,-2.112470425194719684e-03,6.344645394779243469e-03,3.086486915631553481e-03,-2.944943745386569087e-03,5.928592891943727100e-03,2.064913310722549071e-03,-3.079519248525248114e-05,6.930727819729499799e-03,-4.332115103212482062e-03,-3.044303688441173800e-03,4.413545437231676700e-03,-2.245519018440875528e-04,-2.289974378073787297e-03,-5.113968364549081012e-03,7.939468222705886591e-03,3.482074610513680992e-04,-1.069875445257031238e-02,-7.678405732003340013e-03,-4.061707243619654302e-03,8.370290612104362704e-03,8.487866861981782987e-03,5.536276384106904834e-03,-1.230695670065192100e-03,-2.665985369999051419e-03,-7.114847533278003018e-03,-4.305501542164411229e-03,1.024017344511027675e-03,-2.234571482436129106e-03,-1.702419519726847434e-03,9.093609381181796958e-03,7.934566015763179492e-03,2.373129945306563359e-03,-7.433109676464325554e-04,-3.044312919239191683e-03,-1.610603184252206051e-04,6.757254288724752521e-04,1.259367820237822276e-03,-4.128893155650420303e-03,-1.914233695870942243e-03,-9.753996706844055313e-04,5.295603746955495142e-03,2.205169653038873290e-03,-4.470265505087642893e-03,-2.799498417154759177e-03,2.852899597513314754e-03,5.000108223276987680e-03,-2.220160881429514114e-03,-2.196516022755562979e-03,-6.208786833659904122e-03,-5.082789200213598188e-03,-2.439499110945878707e-03,-3.176195485296670618e-03,-6.785513616085932231e-04,-2.859360186286933388e-03,7.044828481179963436e-03,2.464927528818283003e-03,5.669160119607392911e-03,-4.904552955835798055e-03,-2.114150092488845546e-03,-2.944600601580544402e-03,4.201331763441682257e-03,8.453381250311822748e-03,-2.394673965628654668e-03,-2.171292747745131018e-03,7.856992742809834618e-03,5.347700490311533064e-03,-1.086018863704084694e-02,-2.186126560100106093e-03,-4.342014965916693794e-03,-1.057756336241934226e-03,3.434573616287725940e-03,3.356571336438707114e-04,1.277130457188176442e-03,-8.394448084968348864e-03,-7.452923656213564577e-03,1.736521109105423899e-03,2.065977056883209172e-03,-9.056578412911671025e-03,7.931201774299290186e-03,-4.801270873874390228e-03,5.833189795149062822e-03,1.466654521920725783e-03,-1.156026475396408756e-03,9.829593264607392605e-03,-7.638646907312323167e-03,4.402744016818103436e-03,-7.267021130753976467e-03,2.466417223639311325e-03,-4.917794095346364990e-03,1.059203426917296534e-03,-4.492015617845686315e-03,2.144877813242288510e-03,-6.741626031944698101e-04,3.590354946569540226e-03,-1.035396856892554021e-02,2.302755629584150084e-03,6.112937200036023047e-03,-3.945509216187940635e-03,-3.106052250033741746e-03,1.658638215952814047e-03,-1.043766818653519115e-03,8.157009269180661055e-04,-4.749443532741743842e-03,-1.317155747859055908e-03,-5.812201491260551682e-03,7.270338736962817482e-03,-1.770775544722420422e-03,7.365905514926116351e-03,-7.261664703874755658e-05,8.947399796161206384e-03,1.685192806430101152e-03,-1.572074211431239397e-03,2.682805220613471413e-03,8.564371539302747735e-03,-7.940597620773235093e-04,-1.398908750536185640e-03,-3.285962840577346463e-04,1.597287821644385528e-03,-5.510622643186552372e-03,-1.983320173061857167e-03,-5.127685446093844905e-03,-1.718508509558286075e-03,6.459868031537981115e-03,-2.139148865282392183e-03,-2.659622779346914202e-03,-5.364344914977441092e-03,-1.560456603819032747e-03,4.654184913361381765e-03,9.217229194493590030e-03,-5.487434630070370245e-03,-5.998153489073339110e-03,-3.369578804263481143e-03,6.933912691304064749e-03,3.421201712279576916e-03,3.204691446822172541e-03,1.287368626159787754e-03,1.816570589065542483e-04,-1.801644937727976247e-03,-7.772800697538489491e-03,4.019425504199012483e-03,1.901733783740221229e-03,4.588014891123668414e-03,-5.234573160882939093e-03,-7.407083094949664358e-05,-9.796188412574978466e-03,-1.174197471988817692e-03,6.209364821867032098e-03,-5.076633894581016569e-03,-6.440918210820418752e-03,-8.353098621742454169e-04,1.023266527601366348e-02,1.596698592375034136e-03,7.258792080547034342e-03,-4.353958827400215836e-03,3.926055928601122555e-03,5.486490042487591687e-03,-3.020370401563130490e-03,-3.594295472222794568e-04,-2.390934963084836648e-03,6.259388336170421639e-03,1.725132297352448968e-03,-1.783882162575971004e-03,2.539209826636417908e-03,-5.942982891604060847e-03,-5.662614270452505935e-04,-2.832952677600388252e-03,-7.688709677905468040e-03,8.629379512285187750e-03,-4.484522489204278116e-03,-4.708773963454926381e-03,2.847450905390219028e-03,1.303687488160370188e-03,7.817736547373608721e-03,-9.337866708755398343e-03,8.810355677714456352e-03,-4.438306217868244648e-03,2.474613187886543202e-03,-5.498425561172367616e-03,4.485163108390039909e-04,-6.170104130532110334e-03,8.508562363041191623e-04,3.673980610639702299e-03,-2.056396821114189440e-04,3.600774606570446811e-03,-2.378416686579364926e-03,-1.282433807233068768e-02,4.168983139947159133e-03,-6.477306296129902233e-03,3.210718450599756935e-04,-1.426448739906714322e-02,-3.299073113080935264e-03,2.274894355461879944e-03,-1.616310879372338111e-03,7.109728918621285648e-04,2.842086989577758890e-03,-1.096973168938972405e-03,-2.773197102850299273e-03,-8.805669216143580807e-03,9.871348798618138042e-04,1.128377552777712562e-03,-3.098034721725346965e-03,4.478539633575783115e-03,-6.711633783746537773e-04,-1.845726918520732571e-03,-8.051551564423064817e-04,-2.301263588108301214e-03,-2.595426515321826947e-03,-7.768567701373919540e-03,1.118763316662283250e-03,-1.719231400735225486e-03,-4.801120622932158848e-03,5.779939014944362151e-03,9.938601811536470002e-03,6.834311441562509245e-04,1.870741005432005740e-03,1.391551725357875529e-03,-4.516682964141279996e-03,4.075029551368269595e-03,6.273742779674513007e-03,-1.907198073177157616e-03,9.837805832490702923e-03,7.672857868617669920e-03,-4.135242993971090242e-03,1.802644339762605151e-03,-2.955015817089587903e-03,9.224453247955404335e-04,-1.252057522744966884e-03,6.181934391366330876e-03,-1.661091813297600013e-03,1.932075086079185937e-04,-8.687080409953213539e-03,3.302028990707213570e-03,3.903518578541975963e-03,-5.603991111344104083e-03,8.693812679248510283e-03,7.508741542349514457e-03,7.426372291425119808e-03,2.033669651978352484e-03,-2.132755919599135189e-03,-5.352999728293279628e-03,8.024891067657245400e-03,1.877214473279029236e-04,-8.133262383026487741e-03,-1.138273523895458458e-03,1.539009723675006892e-03,-5.326893036829109286e-03,-2.563918476041314107e-03,-7.266766381617893560e-04,-7.270703737394418852e-03,-2.864370330930603864e-03,-4.864903375975747346e-03,4.838490305892289667e-04,-1.892746705389504458e-03,-6.396386005188044871e-03,-4.272367379850012560e-03,-3.131618290670712401e-03,2.441370170050652811e-03,1.155415339202877680e-02,2.017981013098978788e-03,5.066728000515982166e-04,-3.789394986226407580e-03,6.117187371756080405e-03,3.286583703032727125e-03,7.117315272702329394e-03,9.781759865974022639e-03,-6.270780729713172803e-03,1.439207950571300782e-03,2.642589667210735321e-03,-2.288237747767929431e-03,6.703885119823430397e-03,2.964798892729305236e-03,-1.338910666578720619e-04,4.288922452114367345e-03,-2.068154927028755842e-03,-6.044176366143432476e-03,4.600786222802167279e-03,-3.873719634174684415e-03,6.579174650685731124e-03,5.026205353830766505e-03,-2.744532999775349362e-03,-1.533570931270296714e-03,-4.221523233110851397e-03,-6.211951915638333228e-04,4.417648902400437075e-03,-7.638676706956701139e-03,6.418122219357759450e-03,8.814718507187395413e-03,-7.358698589278685198e-03,8.971329789034948599e-04,3.694244958865970305e-03,4.564441309138352748e-03,4.577665564866701883e-03,1.551721567838404382e-03,-5.143974919686013919e-03,5.056841517711840699e-03,1.512796493369319636e-03,1.157813255817129790e-03,-3.593105194638641225e-03,4.874324658513475711e-03,9.944581519717717727e-03,3.988130297405756593e-03,8.200575219439574029e-03,-6.591065125754973393e-03,-7.042222251862444854e-03,1.059701149660887722e-02,-3.752798087944650158e-03,-3.514780253410426224e-03,-8.574325338876913338e-03,-1.315319743759310557e-03,3.451074874542311554e-04,-8.425795446126514016e-04,5.881850551094350808e-03,-3.358281036895029853e-03,-7.542800370023580031e-03,1.507715027145737014e-03,-8.268283826088860047e-03,3.798091091691047083e-03,-4.611881376295558915e-03,8.882125430248705850e-03,2.123098424905174759e-03,4.797704071036332056e-03,-5.165627043628863435e-03,2.140571184665110427e-03,1.336065696460280719e-03,3.149168379337697037e-03,-6.022866666863285062e-03,5.477880590144991978e-03,3.472834218249576995e-03,-6.885271261360161044e-03,-6.340440564147820611e-03,-3.036174186651827598e-05,-7.168443773651120635e-03,-8.686045415477584505e-03,6.804608250814595839e-03,-2.303911414926225823e-03,-4.860323979081371260e-03,1.830542894202836232e-03,-2.212626065393923005e-03,1.212151489598371247e-03,-4.215537805589523145e-03,-8.916322023198534202e-03,5.250213713611265667e-03,4.797412590179127821e-03,4.945144855635626863e-03,-3.915484710326490661e-03,2.696064936764778781e-03,-7.609832196217831230e-04,4.983383324361938434e-03,-6.127837423663744405e-03,6.482142117295830799e-03,-9.596575377961815475e-04,-2.179028738866022573e-03,1.351574551527090192e-03,-7.640685185480704938e-03,1.028113393613787167e-02,5.699755004436327967e-03,-1.334216478048082244e-02,2.412979526683709658e-03 2.418502902792171640e-03,-1.296319174894258221e-02,-6.222278356092101476e-03,2.565190317842332927e-03,5.472944187812878516e-03,-2.747095919736900534e-03,-5.136747548940338165e-03,-1.499627067732273687e-03,-5.739350313718898783e-03,5.595112729371577200e-03,-5.352680317554568863e-03,4.222693211721186175e-03,1.387973091584869360e-02,-7.281255556998894422e-03,6.244839285770548143e-03,-1.054600423714717548e-03,8.784172496669713850e-03,-2.152353536564445731e-03,5.271000114653335081e-03,8.136309063190575591e-03,8.516584702910608845e-03,-4.026726849724188693e-04,-7.285670889877501714e-03,1.127338757926401626e-02,-5.356224102535902219e-03,-4.305173598121165256e-03,-5.347252540485109180e-03,-1.024902935669847782e-02,5.005417432269845639e-03,-3.614254785368576067e-03,5.237629771700255503e-04,-7.471361385735111535e-03,-2.912002299774444757e-03,-1.043859299239239353e-03,-1.317726093062226989e-02,1.491104255205815072e-03,-5.342337839101009116e-03,-4.961828549420693561e-03,-1.001959159988544297e-03,-3.258412675329181492e-03,-6.704623721328557481e-03,-1.949082861729223987e-03,6.100562712118534625e-03,-3.875751087904709641e-04,1.755897582533666023e-03,-2.999085887488498457e-04,-9.839429352859582929e-04,-4.138727933984157406e-03,2.952734187865835563e-03,9.906721476582827299e-03,7.507308265131579532e-04,1.061401675965934106e-02,-8.695819818664172942e-04,-7.581454294835497437e-04,3.230149146646576694e-03,2.476605587462360654e-03,-1.019964935240940535e-02,-5.297901337260125035e-03,6.702411960317498831e-04,3.883913907357332187e-03,-1.625251272485398260e-03,-4.154036848456082900e-04,-1.006308446668675410e-04,-4.086052948514900911e-03,2.633961296631454844e-03,7.293315174819367183e-03,2.410621455557558201e-03,1.779197598587652692e-03,1.674922900443770457e-03,7.485460344260726684e-04,1.630241900741002340e-04,4.226217242944850429e-04,8.533242144335128829e-03,-5.329436336802093489e-04,2.347935152562118533e-03,-6.875801107192896664e-04,-1.031810495857645935e-02,4.284896267962702218e-03,-5.944117600980048342e-03,-4.193793284947036799e-03,2.770397193785031388e-03,-1.966704741210194025e-03,2.987097036619839295e-03,-7.865195864180204999e-03,1.448241451648011810e-02,-6.938293125663375430e-03,-3.921326867836504092e-04,-3.800094481588393684e-03,1.652291472641009840e-03,1.093577021134762260e-02,1.632391098344912689e-03,-1.546431785600556851e-03,2.526810276115315067e-03,-5.855021122274951632e-03,1.103308035793054828e-03,-1.504136208181216725e-03,1.390865252187515905e-03,-2.500329270388066457e-03,-6.998905750729838247e-03,6.475281334147967374e-03,5.754627497516838497e-03,-8.486363863962820470e-05,-1.780445290382109275e-03,1.070651023749068578e-02,-6.501457146329992810e-03,-6.340406231014984522e-04,1.202245832257572760e-03,2.303567483345253433e-04,-7.395082436583182671e-03,1.795807129718035317e-03,-2.555116237061997274e-03,-7.513895419073378899e-03,-1.991153499226896214e-03,3.714057246769241101e-03,9.175042843897364539e-04,2.301976612341156796e-03,1.928628812781354313e-03,8.805060990950615589e-04,-6.187095071401765671e-03,8.473244308544402269e-04,1.338498824852990847e-03,-7.678021813688934742e-04,4.413241211992576310e-03,-4.854932072800209283e-04,-6.334251671013966557e-03,-5.421069239536355742e-03,-2.354311576177610388e-03,3.818380731603390841e-03,-9.600765580802765944e-03,-5.697822808060455310e-03,3.158960634658268313e-05,6.658996057784742628e-03,-6.069473234081177761e-03,-1.370053664251719800e-03,5.585963322095722702e-03,9.760525108748013129e-03,4.205821662293021852e-03,-1.136674729230700625e-03,-1.964270649447094359e-04,-2.165068206357776922e-04,-1.786030213925817248e-03,-4.482732794636640899e-03,-6.623320989717843520e-03,2.946286590856079884e-04,2.088365502058899242e-03,7.257156819564855935e-03,-5.100397219766128476e-03,9.162975307404265957e-04,-4.217966358745452692e-03,4.146217686299037350e-03,5.683143789587506364e-03,4.222199496827651441e-04,-4.414054879016094876e-03,1.295539547887059138e-03,5.248838188496345513e-04,-3.768048039789364048e-03,2.808508974489131674e-03,4.228031400385319667e-03,4.292828832599213801e-03,6.051568031868698010e-03,7.331859538386074904e-03,-8.125368065887633023e-03,-5.757902721795920414e-03,7.120199882020296009e-03,-7.853826052580973813e-03,-1.558407035475464731e-03,-5.367811154738076629e-03,9.838086382801558040e-04,-1.921212042415779923e-03,4.636206559503081373e-03,-7.526738449289127435e-03,-9.709530857661394232e-03,9.934810052399061300e-03,3.501940353540898207e-03,6.069774474183044178e-04,-4.773492506059219542e-03,-1.329936193580202311e-03,5.208914711896680746e-03,-1.375708317385953899e-03,2.896348173210462878e-03,6.140799317288827761e-03,-7.764376227825373067e-03,1.293595558798277863e-03,-9.439811828766684829e-03,-1.156901180239528490e-03,3.405961993901962786e-03,-3.070827853501703706e-03,-1.873832210560283157e-03,-9.315069397608051124e-03,-1.683736517037407810e-03,-4.368824782153104755e-03,-8.131935656036298060e-03,-5.105629801766230214e-03,-4.073127630753568471e-03,-1.341591978796539530e-03,-3.733710800764017054e-03,9.635701092902421855e-04,-4.936564620910536584e-03,4.650026267641687271e-04,1.068579427063043623e-03,-2.212484552867216223e-03,1.420807497806062056e-03,9.749971670542913150e-05,4.651263562690142843e-03,-9.462361976255716537e-04,-2.489307090691531173e-03,-1.792858703444828684e-03,8.098446977521919776e-03,2.302664583933778774e-03,6.263768927869521955e-03,9.690360793186809062e-04,3.705898311477949774e-03,4.904754428838710087e-03,7.386856199332663027e-03,-2.150516238741951008e-03,-4.955902928560703032e-03,3.621722948473556601e-03,6.279058105924975076e-03,-1.050486591779445757e-03,3.638115570979511427e-03,5.546276527653269325e-03,-3.988759585838257821e-03,-6.841361174377229683e-04,-7.683197914258609908e-03,-9.889127248757390715e-04,6.835706127921800310e-03,-5.807682481505136753e-03,-7.528438508423701721e-03,6.955112422131114408e-04,-5.299691411458622853e-03,1.339724573882625702e-03,5.583034449731123609e-03,-2.183756712730524577e-03,-4.401408559128031951e-03,-6.455771238115878483e-03,-5.833098570547922378e-03,8.110389400411411459e-03,2.011493231281101090e-03,-4.144324088399227619e-03,3.431388307007470721e-03,-8.504117906720019653e-04,-2.214745367783181249e-03,-1.495000822832180231e-03,-2.336673844624608150e-03,9.876386956581793830e-03,5.632830134937537835e-03,-5.097069184937055332e-03,-3.817669780083981449e-03,2.804817456516746528e-03,3.581253223902434524e-04,1.496703328730843705e-04,-5.242889308737353968e-03,-1.072263898363409003e-03,7.003209977226461033e-03,1.980285068924122951e-03,8.438985116258955563e-04,1.595164275302467932e-03,6.155294953234592996e-04,-7.429256089094080605e-03,7.940368714446146073e-03,-2.332387891582031296e-03,-8.192987547687005656e-03,-2.334455332281391675e-03,-5.542375146064604848e-04,-9.834156768150914552e-04,1.603205813503268129e-03,-4.100504440314804190e-03,2.495598332771659534e-03,3.228551090487489107e-03,-1.700262230558182916e-03,3.554797187107370995e-03,6.182332428150458898e-04,-8.916005275264517194e-03,-7.103768352346113112e-03,-5.992471200905721829e-03,8.575729063149956219e-04,2.394434393636796742e-03,3.391424202734030064e-04,1.448080406163786550e-04,-1.228282413356981051e-03,-5.612791581414956765e-03,-1.159256102379008484e-03,-1.664545216155089184e-03,4.721325579084331009e-03,-1.692458799238584803e-03,-1.653416454362357363e-04,3.508143170581806084e-03,-6.601468650558960094e-03,-1.346194161422479543e-03,3.097362940956208122e-03,-8.316842372432433489e-03,-2.771466067036287752e-03,3.711174745558216102e-03,1.333318278076755202e-03,7.705150665794463852e-03,-1.474298961406277043e-03,9.335205551202688593e-03,-6.423073666602974706e-03,3.824060696739070737e-03,-2.555439253738522008e-03,3.622235122073987599e-05,-1.001097755022715627e-02,7.889875003305773377e-03,9.627505626297524002e-03,-7.731461454951122150e-03,-1.968357849014445125e-03,4.426838230747582280e-03,-4.100561488388678854e-04,6.106940472009079329e-03,1.284489736909210072e-03,5.005033474956269504e-03,-1.415522165826156630e-03,-3.066010282801663253e-03,1.225141856208722966e-03,-3.354553723316867908e-03,-3.813667730781339273e-03,-1.126526190607069280e-03,4.081876748368156414e-03,1.002232926573863939e-02,7.166210761534336574e-03,3.749344896165126423e-03,-1.240214903269684817e-02,-9.727513914044756682e-03,1.494010252005839308e-04,4.931097742814716592e-03,-3.313540482159726876e-03,-4.874658666018596639e-03,-5.223689633325847605e-03,-3.623106449972796506e-03,5.155124446875128803e-05,7.566312009473908154e-03,-2.854241652478602814e-03,7.719972089786601190e-03,5.832425659448484991e-03,9.453037748663596467e-04,4.530640604768668795e-04,-1.845491336701728016e-04,-6.334776936382501347e-03,-8.205818400488786371e-03,-8.433847369531854293e-03,4.288024195080628390e-03,-1.225775044105199135e-02,7.023953551744514540e-04,3.404850944952452288e-03,1.719181186485383849e-04,4.590337766076593544e-03,-9.652467603968947903e-05,9.472981343202972445e-04,4.288865479891303231e-04,2.437952970823335092e-03,-3.668300556734559702e-03,8.233662441272071933e-03,-3.509810767654985078e-03,-5.058291361321102728e-03,5.373195645159698983e-03,1.058130510557952911e-03,-2.309758134111834348e-03,-2.964548279640028591e-03,-5.024108415066693041e-03,7.460816803830098293e-03,1.230780389911614392e-03,-7.646442340331284677e-04,-1.338062388630237689e-03,4.531042578801248132e-03,1.651123116221024341e-03,-5.482796709127832693e-03,2.489210562010877453e-03,-4.932300482274237612e-03,2.686830199348481593e-03,-7.297302391036578420e-03,7.116097324322634768e-04,-7.438497606785119190e-04,1.650261759349784529e-03,3.701045410570182048e-03,-1.123503225244149722e-02,3.615252830715405541e-03,1.226336241742219738e-03,-3.030774738660820845e-03,-3.490588795320318481e-03,-3.085581860457944914e-03,-1.112302126675929854e-03,-2.877638366562962347e-03,5.022037272556409762e-03,-3.395435807847217106e-03,-2.974457118903062105e-03,-1.313471504183973734e-03,4.537794132583369139e-03,3.160656025383989801e-05,2.847042651070602094e-03,-2.588804167641973799e-05,-3.356429167389210749e-03,6.309268667856444230e-04,-8.857596676926480714e-03,-5.744020087473730371e-03,-1.255021019600069225e-03,2.309811444113418939e-03,-3.748492017077097085e-03,4.910522721528187522e-03,5.347228895927675675e-03,8.316695484573132305e-03 -5.095786272155952007e-03,1.911949639039111585e-03,-7.498076096505528615e-03,3.016169073199603465e-03,-6.103237291344970790e-03,7.098431739277778416e-03,-4.426221299558045184e-03,5.886741681935203641e-03,-7.387455990548140710e-04,8.724952048282606440e-03,-3.764469484075333959e-03,5.289294635318227654e-03,-4.422786523400690575e-03,6.514884298701606284e-03,5.059248313174147769e-03,2.554676226493101266e-03,1.113539612711426062e-03,-5.667104531077383524e-03,5.059623912526439900e-03,-7.989500051461035160e-03,7.940411987061954391e-03,3.800072744381246463e-04,1.367418957253850424e-03,1.018649098864313381e-02,-6.042571339635827328e-03,1.267445306474311714e-03,4.646537528643589904e-03,-2.169182066915839929e-05,2.737881795137373288e-06,8.448063701129141304e-03,-1.633173631703005143e-03,6.684313460813114116e-03,8.456965799602629216e-03,1.733253240280871726e-03,1.780850568111693811e-03,-6.534916051539737744e-04,-4.316975070784819651e-03,-6.101009149041872713e-03,7.270751900161123368e-03,4.967942886829819146e-03,-8.439380233381497953e-03,3.672753348388737519e-03,1.958212156364127349e-03,-4.312810125391208088e-03,-4.603977740604970129e-03,-1.912298852854038848e-04,-4.473593169041128852e-03,5.284270783765396100e-03,-2.594041556274714281e-03,-2.073402688228087598e-03,-8.165390045832918858e-04,1.240982003503675714e-03,-7.009088817051449191e-03,-2.562211996644032062e-03,4.684500207327644064e-03,-1.342263688355603861e-03,-3.235788169500353282e-03,1.657075874644305374e-03,-8.127124788606862051e-05,-4.442363699200141750e-03,6.068323693927727379e-03,1.625070255440057342e-04,-1.852845607494130837e-03,8.191077188174791454e-03,-1.699003382003141721e-04,-1.270041980960312511e-03,5.903884380316436198e-03,-1.365456324011547545e-03,7.901520417689171863e-03,5.869704577417568514e-03,-8.028556831614367057e-04,2.406639095829454528e-03,2.591709323286897886e-03,-1.896523192431782533e-03,-2.557815676760336315e-03,2.263841461209673261e-03,3.776111636448437319e-03,2.399471166329147313e-03,6.235041249810550912e-03,1.452419893531705589e-03,3.121700506478984442e-03,9.938465203990182878e-03,-3.755253479676155196e-03,-1.538181036911496873e-03,-5.649722731881369996e-03,-9.199560144388252295e-04,-8.147324565561820525e-03,-1.217790727113123202e-02,1.054345634533879368e-02,-1.664175882550119733e-03,1.677184488583188630e-03,-2.137833071365788287e-03,2.376250288525861513e-03,4.316101889444568483e-03,-5.181934893278129436e-03,-3.748860002383458032e-04,9.728907075423742627e-05,8.141899678769094068e-03,2.874764416187973980e-03,-6.538615019493843397e-04,-4.907450884318211156e-04,-1.357608135716704154e-04,8.215015030723429867e-03,1.338430270735873869e-03,-3.616147205455386756e-03,-1.233321463467806564e-02,-5.303192219004771758e-03,-7.623510027805152720e-04,1.534430542775466538e-03,-4.588788670308425847e-03,3.986283131987871341e-03,-4.070027839533708544e-03,4.094023573793763825e-03,-3.023820746364489723e-03,5.271288408984379191e-03,5.521821901371644532e-03,4.133152911435163633e-03,3.292177642781411728e-03,3.470610664420925237e-03,-4.453714008587891793e-03,-1.711875274629675355e-04,4.874980227472795052e-03,9.882525792988608448e-03,-3.335013252495604673e-03,1.090785528725245782e-04,-4.552742774893260048e-03,-5.595851985337567391e-03,4.629905485812910865e-03,5.785068506186167615e-03,6.682265420812230714e-04,5.498790204239211409e-03,2.598920453224790820e-03,-6.033758185414616129e-03,-3.899149902733777245e-03,-3.049824743150346407e-03,1.459544510732823774e-03,-6.233822958116547013e-03,1.267634246739554041e-03,5.465929382825635572e-04,2.169554927046032141e-03,1.292997182787089007e-03,1.430499105470622043e-03,-4.607145506739854061e-03,-3.000269845889539256e-03,4.770244396852925070e-04,8.397772099094301087e-04,1.010410556399142902e-04,-6.083322119052811394e-03,2.127760463923164015e-03,4.715034181485237320e-03,2.350384444172517464e-04,-6.540036057577563317e-03,-1.670552108328239289e-04,1.348966185198967977e-03,1.173184204539773519e-02,-3.057174057762565740e-03,1.256002253911824495e-03,-8.094935613036143701e-03,-8.484898044889882442e-04,3.604022237084859902e-04,1.042122005894058519e-02,-5.752277195814259857e-03,1.422430945655405246e-03,-3.093416308717519304e-03,6.464747948536245931e-03,5.063347712573108318e-03,1.136372918887936997e-02,4.281859670601732837e-03,2.763983657155858614e-03,1.156166651624204386e-02,3.229333114686389755e-03,-1.259241870293587116e-03,4.413139427861252455e-04,-9.181955281238327656e-04,-8.820642489272853201e-03,5.045206965966135201e-03,1.230601291715912714e-03,-4.032316182127419248e-03,-3.822457813944692920e-03,5.143179820438182129e-03,-4.833488680654617238e-03,-3.569309991242998817e-03,-6.982780401422899325e-03,2.696035636031624775e-03,2.329648259783924000e-04,-3.726387631236477860e-03,2.194106230008861270e-03,-2.974196298378568837e-04,-4.477001695552937713e-03,4.919022668029832514e-04,-2.789876815408329127e-03,-6.451858720147797717e-03,-3.844970162465601807e-03,8.961322761641353149e-03,-2.213540601130113087e-03,2.587486633892842249e-03,8.401657330380094468e-04,2.562198405056435781e-03,-5.471741405611229084e-03,-1.587952545015206690e-03,-8.275277899794817801e-03,6.280433129555860377e-05,-2.728481914236643470e-03,1.631895002348831419e-02,3.449422399298744083e-04,-4.580039176294480041e-03,1.615238297527746975e-03,3.886830514924881786e-03,-4.293562598054650840e-03,2.905336940398041672e-03,-1.401868874288730296e-03,1.246764386672082189e-03,1.721928744827048953e-03,-4.651118238854751175e-03,2.594642253615431693e-04,-9.131535236110076238e-04,-3.183941023315248631e-03,3.275471774564366707e-04,1.918816174535892954e-03,5.613178827694219504e-03,-4.698074568861639218e-03,4.901737882280013565e-03,-4.762988243315692039e-04,-1.141927943204338030e-03,-2.270200122536634481e-03,-7.424057666335439482e-03,1.454106395370698785e-03,-3.736105636874644149e-03,9.677465670554493554e-03,4.184224037202801880e-03,3.621479007481273778e-03,2.368341845869090419e-03,4.675333558835283870e-03,-2.743132839314759309e-03,9.087799922073282882e-03,-6.816160575999686243e-03,-3.980304455759933746e-04,-3.492084364223204014e-03,-4.170536599484408156e-03,4.748060378992405295e-03,-8.560562773561666838e-04,-2.925823638270550701e-03,-2.279178340766720157e-03,5.004998833552721724e-03,1.085168214709525403e-02,3.459568838370244426e-03,-1.258762537208079962e-03,1.371825955800009926e-02,3.602470701764738312e-03,-4.504940555756934209e-03,-3.697022150251430984e-03,-8.433095365586640649e-03,4.461941538411134771e-03,3.750280498911623319e-03,-1.695781098220466854e-03,-4.596277458981987791e-04,4.659894937019861370e-03,-1.179593106054253751e-03,2.696812753942132938e-03,1.971290519189228176e-03,-5.900317635940499454e-03,5.199905199809101014e-03,-1.678337958440007062e-03,7.131497291174177314e-03,-5.889897365682962051e-03,2.201665751020188098e-03,-3.315957192081622328e-03,2.892980781676090466e-03,5.640759538895459037e-03,3.548190761224827208e-03,2.537373682581599635e-03,-2.355146996666581533e-03,-1.860231023193348397e-04,9.402814457338233914e-03,-2.379702020884880229e-03,-1.352729785483937699e-02,1.436212491245091993e-03,1.212250026620513739e-03,-6.691597268853586612e-03,-5.488800991150886033e-04,-3.247918790621263149e-03,-1.931052049335625262e-04,-1.023498591040062494e-02,6.013166823104439986e-03,-1.590113369265133269e-04,2.072630988239366860e-03,3.261875386685263688e-04,-4.587465786995805102e-03,-6.105409809053515382e-04,1.192632511443761584e-03,-3.959736193585527107e-03,-6.723485622551429643e-04,1.490259718152968892e-02,1.062710488173490217e-02,-1.050933706246849619e-02,7.813299007539157778e-03,2.339987559702968838e-03,-1.188505991346359243e-03,1.206988082455765290e-02,2.931852045332172460e-03,5.136623363912022362e-04,2.465161434542228157e-03,1.698583733172461798e-03,-8.299187403595013445e-05,9.051808652262997900e-04,-3.634656221342409677e-03,-6.324967542064624873e-03,1.397611327950617002e-02,2.808157290453131526e-03,-3.083591375999096985e-03,-3.214023227396051955e-03,1.532577135953223100e-03,3.561173560529239395e-04,-3.591156937794895421e-03,-9.690596682696184141e-05,-7.918853438367902745e-03,-5.215861666330466086e-03,1.212291414789247977e-03,-3.053368478560885787e-04,-3.569335053446157091e-03,-6.776411178531242287e-03,8.679163351611851444e-05,3.861077915068307346e-04,1.374446441652130754e-03,-3.564489207168791349e-03,5.843941627363195314e-03,-1.409205523983967073e-03,-1.750737926892275429e-03,-2.522230313722847051e-03,8.591577420402471063e-04,1.184655913148863551e-03,6.202858934489620998e-04,-4.522088777441251629e-03,-2.235021268508094894e-03,-6.239973768996501030e-04,-7.374920281127482491e-03,8.321584351782969867e-04,-4.429870966029087268e-03,8.644528084017357589e-03,-1.031501511517992666e-02,-1.571773354505323584e-03,5.822062646015678695e-03,4.005825956019928337e-03,-7.711901202136152607e-04,2.868141495500536268e-03,2.771519427714429912e-03,1.747155057585788804e-04,4.192478058319184251e-04,2.241829346707222698e-03,-4.930719090611292849e-03,-2.782583200634662895e-03,1.323577233928126316e-04,7.875097044296016911e-06,1.012956231234607256e-02,1.474811237265629370e-03,-7.696572890649350761e-03,-2.851728672731288199e-03,6.761923659664997735e-04,6.087213575745389904e-03,-2.398473282970646405e-03,-2.417785442592188579e-03,-5.430046611243487253e-03,6.733931959541837771e-03,7.073154676111774106e-03,-5.403670855568748835e-03,2.407152915675673382e-03,-3.394267772336583867e-03,-6.204873919551650988e-05,5.867546682387760407e-04,-1.933446203864348757e-04,5.826231828212962045e-03,-4.978361080421957012e-03,-1.113519009444483357e-02,1.367804035021955847e-03,-4.987725892568488015e-03,-1.245326618832616819e-03,-2.201550113586348695e-03,2.834460441187616839e-03,-2.082545997966409480e-03,7.042257577656017321e-03,7.040943705230099173e-03,1.453157834748081481e-03,-2.765895560970145755e-03,5.437181631563241940e-03,-6.162435987913512892e-04,-1.688636562036004846e-03,7.529553213862786448e-03,5.011920982613994081e-03,-8.709984522934529488e-03,6.780018108241804044e-03,1.366360523791051822e-03,-8.235134021095241730e-03,3.330137102639468914e-03,4.492692554584677180e-03,5.555638519361318604e-03,-1.669054417149424922e-03,7.718235518850802106e-03,1.765492077997657611e-03,5.680107921259773540e-03,-3.437261135905559858e-03 1.046681447767853221e-03,-6.173702939279713782e-04,1.737594801800782527e-03,-1.333830578192101010e-03,2.867131745218432729e-03,3.638435053094800017e-03,-1.683716780479642102e-03,-2.490252858351565263e-03,-8.133623592059468707e-04,1.126942368658591636e-03,-4.353697024772839284e-03,-3.218374926302650010e-03,9.001559749178124174e-04,-5.447077138549649006e-03,-6.941588863909970092e-04,-5.813920386422409894e-03,5.964810849784854541e-03,5.402007362877436769e-03,1.311644952806537763e-04,1.259091846044796924e-02,1.856998186233302763e-03,-1.370685431969341549e-03,-5.164760665531409507e-03,-1.934050447458046402e-03,-1.393125768742935677e-03,-2.776121405634157224e-03,-8.746642377474380531e-03,2.719546232770367420e-03,-8.371510151808534902e-03,-1.384784564718750726e-03,5.250612493987706630e-03,-2.504571787793892761e-03,-1.530766055592563350e-03,-6.477445923820925001e-04,-2.466675444988225292e-03,-3.638173833293971448e-04,-2.161170948672313900e-03,3.529749599281243547e-03,5.279350146769526799e-04,7.862022113166782997e-03,-9.137625435394905807e-04,1.481665415990266604e-03,-3.679773424893123621e-03,-4.224148463109043570e-04,1.150143371634574849e-03,-4.243129587395517641e-03,8.048438405499420056e-04,3.529904512484241617e-03,9.016976145588545891e-03,8.092696930306097700e-03,4.637278206473100764e-04,-6.497602774288444233e-04,-2.644755085116007494e-03,-7.094076118792931596e-03,2.248271739755781408e-03,-2.221722299779968404e-03,-1.146171781216586208e-02,3.149176903465679064e-03,2.399428195784678291e-03,1.978463888824472539e-03,-1.572690023583638366e-03,5.100975105640346077e-03,-4.238968288708511167e-03,2.476805843850795122e-03,6.827005120382001815e-03,2.677335317848141775e-03,-1.042416164175939339e-03,-6.711243152382898383e-03,3.836056317570104776e-03,1.104381006554752383e-03,-3.533263350190200217e-03,1.631484737762046002e-03,-4.429283540099305562e-03,1.916742854769255301e-03,-4.372656544564297240e-03,8.805271084312142268e-03,-6.776391625033037144e-03,2.216403905394887137e-03,-1.915884813588967324e-03,8.491726552074042519e-03,4.436949161622647702e-04,4.730360647865035351e-03,4.644239926863356557e-03,-6.853322585806398415e-03,-4.180785136930469797e-03,-7.401094554978465516e-03,5.850444292598953530e-03,2.319876904528474486e-03,-4.303722589980406819e-03,1.187353398770126661e-03,-4.927637134169333098e-03,-6.479142302107937642e-04,-4.574598941927764117e-03,1.256715229793750263e-02,-7.358794636886242628e-03,-4.721576552026330795e-03,1.572019983796351479e-03,3.180633851693391578e-03,2.333438721593864267e-03,7.447021703291498455e-04,-7.124214635747430635e-03,8.973428078340246056e-03,3.629993063804101629e-04,-1.193206267901402058e-03,6.779006944292448669e-03,-3.669268922363283676e-03,1.292379168220721174e-03,-1.012555196125855842e-03,1.098532556007425671e-02,4.257927375499226223e-03,-7.116597117527711201e-03,1.662072049486805611e-04,3.178999149912830040e-04,-5.993522158243712664e-03,3.867374522541623498e-03,-2.754690525689152542e-03,-3.751524103255564693e-03,-3.275568355087069526e-03,3.651105836022456967e-03,-3.994582229461214780e-03,-3.157834059886932207e-03,1.168731953982004093e-02,-5.285906676648498455e-03,5.858121836956780345e-04,4.333119120430331365e-04,2.102381029669907544e-03,4.930173845041975998e-03,4.234516649289689523e-03,-1.116284407609963517e-03,-6.715685971253174992e-03,8.312412001350261842e-03,7.154039008587313427e-04,5.239536951004764888e-03,2.607550047276778991e-03,8.349427939515438948e-03,2.487789915188083536e-03,8.837711926499137657e-03,-8.287364625961707353e-04,1.073359333744667394e-03,3.016814211166545662e-03,6.207180374953722227e-03,-1.069775347395139289e-04,-6.474372392545548090e-03,-4.902981609855489725e-03,-2.807691978229686670e-03,1.202687906873243194e-03,1.647191940736839662e-03,-3.594750702826389390e-03,-6.909182979164068227e-03,-5.282502499497033696e-03,6.529703162568824271e-03,3.066302552287229755e-03,3.383486103156069843e-03,-3.888642534468089457e-03,-4.234743341420661722e-03,3.603906147839125635e-04,-2.652837144296343441e-03,2.575855473064315743e-03,-4.552438196117598521e-03,-8.717086579231187946e-03,-5.746630326696096358e-04,3.247440078455864076e-03,-1.581980729772638995e-04,-3.239932705035229038e-03,2.550181019491155540e-03,-2.623756791732087968e-03,4.862038474850368178e-03,6.380083584678554638e-03,-4.722489772421328467e-04,2.559398302095553922e-03,-1.836058068546840517e-03,-1.201085329801021215e-02,-1.539705471375391564e-03,-5.256918797716803385e-03,2.399043720557306105e-03,3.280228450143178858e-03,8.596619368307013148e-03,4.009271907416767609e-03,5.009012015749110536e-03,6.358298023411522795e-03,-6.573651478242957188e-03,-1.703838564058473594e-03,7.505089619629992659e-03,-1.249597684854912225e-03,6.807715975345705016e-03,-7.471366846393823202e-04,8.958651524551370857e-03,2.887309849838260323e-03,-1.022095903305740122e-04,1.350016266190147418e-02,-6.159402361923463007e-03,2.562528302534623895e-06,6.984728940216023037e-03,6.487746952807298736e-04,8.677880472202146103e-03,9.819556999609580991e-03,-3.265450727675573046e-03,-3.190152254726004097e-03,-4.968807464069430017e-03,2.451091700925580024e-03,-1.234120375940444148e-03,-2.143920199324440747e-03,-7.786245263950140189e-03,-4.693393509795796420e-03,1.953382147394841159e-03,-1.487684920315003644e-03,-7.466518990771723457e-03,1.194304453038577121e-03,-8.265141101495515272e-03,-6.337577869510014825e-03,-4.205087225538225830e-03,-7.471187342349788936e-03,-1.482695837281677387e-03,4.028531341496563792e-03,8.488728440175910550e-03,3.914536709343786544e-03,1.626696117435119746e-03,-3.664464134884170762e-03,4.484233068629557561e-03,-2.436687513937722989e-03,7.902387975256280547e-03,-2.390973401897538916e-03,-4.849411716082556618e-03,-1.293881202062763041e-04,1.715843327117311636e-03,3.316350259965201303e-03,2.205952103655935571e-03,1.372982360558098591e-03,-5.221887675133283190e-03,-1.333651302916142971e-02,-5.729104161442523033e-03,-2.170541498264083428e-03,-6.295189259721449722e-03,5.178942170071231151e-03,5.607740224894360026e-03,-8.493260106110509574e-04,4.706076050545131458e-03,2.376279180237415509e-03,-1.526139837174995441e-02,-2.793777726045639301e-03,-2.154030474358102198e-03,4.369318937059419282e-03,1.289880317548288205e-02,4.152915002786552318e-03,3.976381781791033139e-03,-1.653286240778071431e-03,-7.862347281052891462e-03,-4.094436046735961364e-03,2.427235444935143226e-03,-7.880680608367770373e-03,-3.409224525851656251e-03,7.793293570341681617e-03,-8.971277179722720804e-03,4.282868185684325125e-03,-5.048131436013579163e-04,-2.419546661948727018e-03,-9.915029290322508772e-04,3.967174920092630952e-03,-5.531294472114624272e-03,1.773676810936240539e-03,2.113081326244943981e-03,-2.292587549601397389e-03,1.958144989686935906e-03,2.964038213347094334e-03,7.273358816045405462e-03,-4.000147340825338342e-03,-1.706380323148644742e-03,-6.154112091346247183e-04,-7.201681469665353842e-04,-1.063645527190422175e-03,-3.283496540319087546e-03,-9.546073040331375364e-04,-7.979619120229293179e-04,8.574576054251300053e-03,6.779358592528797814e-03,8.546463943053065129e-03,2.095061472658566425e-03,3.681256192744571110e-03,3.525656385179484070e-03,2.965775643425283461e-03,2.278925190156709556e-04,9.388479600537857211e-03,-3.427556929457784181e-03,1.281614882521253489e-02,-4.736629080081833539e-03,4.327741519179045641e-03,6.247074639920854183e-03,-3.469013422927270853e-03,3.645601096577568909e-03,6.953754996934492782e-03,3.759750828528271669e-03,-1.245245347934277082e-02,-1.144848712605635701e-03,3.692326000802296278e-03,-3.360832918290219268e-03,-1.940595628418271233e-03,7.660244527273118477e-03,-3.880730201854425306e-03,-3.271891482326695372e-03,1.016976114678338518e-02,-4.008482733393080376e-04,-5.017289658713841582e-03,-1.118653705127102809e-03,-8.392062336358363734e-04,2.208595073754872698e-03,-7.090299945195320010e-03,9.907170970538455024e-03,-6.861690303199525533e-03,-2.851994665710893152e-03,-1.382593606055950103e-03,-8.919416279211453760e-03,2.749363489049426283e-03,-2.611579320176957153e-03,2.204551073072595190e-03,-3.249810586188291162e-03,-1.155532383376730015e-03,-8.311142457558371929e-03,8.864762578404646650e-04,1.292668369647119279e-02,7.138547901706665359e-03,3.999081820099621709e-03,4.127448328251785703e-03,-1.414726142100768700e-03,3.145069888123445457e-04,2.855273524347761794e-04,3.378862239553448666e-05,9.276628286164488243e-03,4.697161305930766875e-03,-7.193627639527041314e-04,5.606354076944096085e-03,-8.502674375883142316e-03,3.202046871459141020e-03,-5.826039305915749200e-03,8.173037343172994545e-03,-1.212561565441825761e-03,-2.742396108427659889e-03,-7.323857024061042678e-03,5.673281128926169724e-03,3.246761407636869490e-03,-3.216755055504170232e-03,7.271264743838824242e-03,8.765527279528971538e-03,4.030093620812752117e-03,5.795451402968471166e-03,1.601221799190435946e-03,-3.964240674105830135e-03,4.373510557971529815e-03,2.707296951724935934e-03,-9.920321074787072542e-04,-2.847905131692396539e-03,3.273414908317904930e-03,1.127170677881036802e-02,-1.344513248836417575e-02,8.417081155215442798e-03,-5.977476961498394154e-04,-8.454134213305142514e-04,-2.615305401833550761e-04,-1.545228596731162366e-03,-2.622419154174149541e-03,-5.578562089513882001e-03,-9.536533712197739834e-03,1.825180113603833837e-03,-4.072597461450490515e-04,4.439208975293299289e-03,9.276856154719163037e-04,-2.681061243324042980e-03,-7.573953398775011732e-03,2.024207319524330261e-03,-4.293008602945200779e-03,-6.945032527323623424e-04,-1.210797839912771193e-02,1.120878942875596892e-03,-5.673282142861966158e-03,1.723492713391641196e-03,-5.658073044610570021e-03,-3.762381151975727114e-03,-3.792539791769731095e-03,9.386087871804499674e-03,-4.561873561034552292e-03,7.344645658793487984e-04,-5.829946987361352313e-03,-5.428942604694448996e-03,-1.995448906260154521e-03,6.896137272706555355e-04,3.828681048556082126e-04,-8.842133954619300559e-03,4.684833851331511589e-03,4.751066781388163345e-03,7.269426089140669620e-05,1.504238928716808508e-03,-8.877111934849105058e-04,-2.449208391801743420e-03,-8.954196310879063148e-03,-4.558365116343215430e-03,-3.739048771390988870e-03,1.465903587975578640e-02,1.226472680572794974e-02,-2.805938841802848574e-04,-2.294665772948927722e-04,-5.119473866226475350e-03 -3.627632657795508159e-03,-4.885365751023813471e-03,-5.420270975680246701e-04,6.310011846635502943e-03,-2.624211986923464385e-03,1.568892821402580401e-03,2.324906865196162527e-05,-4.753997674067076177e-04,-5.245936355994522578e-03,-1.563787031708452563e-03,-2.130251321420263341e-03,-5.540201610926786396e-03,-6.440697663710367483e-03,5.725383373749737444e-03,7.694132048508824789e-03,-1.236334551733741117e-03,-6.511858380805385415e-04,-2.751541980982922641e-03,2.985270987157112067e-03,-5.144552233922184727e-03,-5.000757817578685080e-03,-8.105796477469969560e-04,1.020869384800359965e-03,-1.511679347427182657e-03,-2.271564344317559495e-03,-1.868943902765523440e-04,8.827954262857817596e-04,-2.561852785029130122e-03,-8.658608273653780121e-04,1.041459424570722984e-03,-1.664699902775237331e-03,1.413241652089918931e-03,1.323806274696275997e-03,-2.911790713833694016e-03,4.917202923839624450e-03,9.794637130711561501e-03,2.166580482285133252e-03,-1.839815852205886658e-03,1.455167378982736301e-03,1.028317400894982782e-02,4.432469627569162081e-03,-3.124024307916658735e-03,2.327558879092091627e-03,-3.560180387732046368e-04,7.065744100450239810e-03,-1.805161902004843815e-04,-1.046126587173112736e-03,-4.982442806360712698e-03,1.866408933816934975e-03,-1.378556902215640704e-03,3.213201075331602666e-03,3.181048927126721871e-03,-5.287105592803876793e-04,1.695084373060725888e-03,5.419877746446952020e-03,-2.746264732820194714e-03,4.861023537555039450e-03,2.302973302140842986e-03,5.890567148445317001e-03,-3.613178523891161692e-05,-1.262201731584093340e-03,-7.050665097629378771e-04,-5.226928680451909705e-03,2.649248921928342142e-03,-8.155968110336522456e-03,-2.660954435203193675e-04,1.838272978754275713e-03,5.851372094186603479e-03,-4.385830626976125815e-03,1.411412561397086497e-03,-8.646522480842118094e-03,-4.864444155215179137e-03,4.017176142240415533e-03,2.011246836083147981e-03,-4.302534112974493695e-03,5.139252391412009111e-03,-4.781705729672624215e-03,7.239724965221647733e-03,2.787509262000626136e-03,-8.180882805663772239e-03,-4.496738898719484373e-03,9.579768360637699182e-03,-5.828241260184016757e-04,1.207406514386462739e-03,-6.462669970722275878e-03,-9.105818219372556477e-03,1.065185182812405928e-02,-4.152902165420959898e-03,9.595642014945299919e-03,2.254023294669212606e-03,-4.876694159410422356e-03,-4.993319942381563813e-03,-3.755905073694970836e-04,4.259748019511954385e-03,-1.675293595782472410e-03,4.311632345183622275e-03,3.589814957166739297e-03,5.228657938289383959e-03,-5.468080115655159223e-03,3.437480380324802712e-04,8.027112015991341373e-03,-3.471833294241056123e-03,-3.395289578339821899e-03,7.365852853794545470e-03,-3.872641586598838965e-03,7.559321117403616048e-04,3.440323880133857627e-03,3.875371248000723107e-03,-5.584581455807486293e-03,1.606238912003931209e-03,1.378912543787929115e-03,-9.274947484288396894e-04,-4.907612760953758771e-03,-1.093105876537637860e-03,1.395539560863378291e-02,4.716587276259357787e-03,-4.737392760789176441e-03,6.014656148172746811e-03,-1.235541234998198228e-02,-1.108646625337402977e-02,-1.184704382280914702e-02,-8.932855419611550329e-03,-5.164786741294968104e-04,2.046105742964026124e-03,1.289261185756080102e-03,1.371077971718031837e-03,1.103008060910888079e-02,-5.968770276325162383e-03,5.685060578851665845e-04,2.201525394872532425e-03,1.896176374428971186e-03,-5.590364578004148243e-04,1.981158731539433854e-03,-1.364837176771100183e-03,8.468195951567713056e-03,-3.710053286914288817e-03,-4.056325268301043223e-03,-1.102908914284730598e-04,5.192096882282288238e-03,-2.962941474262018298e-03,-8.596426963421350837e-04,6.116426011192642247e-05,3.247102255792295739e-03,-7.093578729454766245e-03,-2.737004103602827618e-03,-8.250738614340417595e-03,-6.065359701595400034e-03,7.738381329574875109e-03,2.826621339859404372e-04,-4.357406951142904321e-03,1.097047133921687928e-02,6.987666296432767238e-03,-4.894495339083949730e-03,5.234196714265290130e-03,-7.355823048488342730e-03,3.302360280620640284e-03,-1.725409345467564292e-03,-2.963305212603532714e-03,1.003087076922799629e-02,7.324410386891553031e-04,2.492409029606130599e-03,4.724003771255862833e-04,3.042303608298320677e-03,1.579569978958472948e-03,3.467708439800772525e-03,1.897907586435611336e-03,-4.727497796442241360e-04,-3.391830449736587139e-03,-3.632080717524023474e-03,9.282684478175906237e-03,-2.271389799113357911e-03,2.241090745272100379e-03,2.676595866399517163e-03,2.294292823729032326e-03,1.797529762991632082e-03,4.558080241994398422e-03,-1.742123832791629212e-03,-8.258582459361317162e-06,1.067274511524485747e-03,5.226198505380588548e-03,-7.076274515148201459e-03,1.948478132115771247e-03,8.536280295146966152e-03,6.294447620964733515e-03,4.791985684436063722e-03,7.301464292997513102e-03,-2.341309815066543090e-03,1.112999251483936039e-03,2.433247452643324225e-03,1.709694080100754039e-03,-2.043910850238942964e-03,-1.611429143670934467e-03,1.676320612740961682e-02,1.206610310041293091e-03,-8.996126565987819235e-03,1.567755253165228901e-03,-3.479094129813643625e-03,8.102912057157329526e-04,-5.583306035535325670e-04,-3.312069301722713171e-03,-2.366597266657487122e-03,2.385052348924446546e-03,-6.084796605677776984e-03,5.386237373565840904e-03,-2.687065040768440907e-03,-5.023211058046241073e-03,3.645226765084515382e-03,1.799083035400201366e-03,8.440841174734819638e-03,-2.918117034726218015e-03,-1.891389144171153145e-03,-9.840301570544279168e-05,-3.331917088168013900e-03,7.087103372726624717e-03,6.801394226288438828e-04,1.330917505149567450e-04,-3.043740284542252247e-03,-2.484162792806063249e-03,-2.935722967771927251e-03,1.623027649832661182e-03,-3.463289929733673334e-03,1.160126255645983154e-03,3.256920905117260284e-04,-3.800424920148742520e-03,-3.599386572557217341e-03,-1.883427886427793381e-03,1.015052550116096568e-02,1.440100569433500934e-03,-6.176900245221166269e-05,-4.028730800503413454e-04,2.801408133669105321e-03,-1.928926965086396727e-03,-2.417624228304551207e-03,7.962142928691816335e-03,-3.510355104126788514e-03,-1.287034943728812596e-03,-2.907664848380208875e-03,2.137692224585094510e-03,2.189872304026167348e-03,1.136691572019570516e-02,-1.999434265261953048e-03,-1.786022636540540286e-03,8.126115197514051716e-03,1.862247412869198120e-04,-1.888303418726693973e-03,8.136684774129846962e-03,9.115165276965894606e-05,-9.832699437689964867e-04,-1.820313076335897508e-03,2.097346534593869539e-03,-1.041574013677496781e-02,-2.649081063812818923e-03,1.962856168909541538e-03,6.908068371926740989e-03,-4.153110971312869333e-03,1.591602139172823777e-03,5.305239398120917098e-03,2.376746712299945916e-03,3.961161799404090168e-03,-4.167257576730372393e-03,6.094061607899233131e-03,2.918133511433680834e-03,8.646726147260971190e-03,-4.547696483643582158e-03,-3.941726420756562835e-03,4.981957049019325456e-03,-3.757501077637546896e-03,-5.868093360764024781e-04,7.888714369408087465e-04,4.935365564269030524e-03,-2.010120278091517051e-03,3.120010119594974866e-03,-7.810603109334460920e-03,-8.619713726451004587e-03,-3.101512800716311894e-04,3.208575167615291407e-03,5.817159440317942612e-04,-8.025297433906139130e-03,2.325727669681930659e-03,-6.366391615033271598e-03,-4.802230721650101869e-03,4.183201810935368442e-03,-1.095079070402487113e-03,-3.269661411825974893e-03,-2.267646498050413743e-03,-6.675783814719247165e-03,-2.931889537539214986e-03,8.047435930358283333e-05,-1.720589742453401075e-03,-1.970620157053739989e-03,-5.190447001023762615e-03,1.626963602810959720e-03,1.033140960686557315e-02,-1.216680878233479361e-02,-5.759723633507588848e-03,2.210104711144092893e-03,8.090840881645369742e-03,1.910923576047619246e-03,3.333777268227486237e-03,4.656803575024571627e-04,-6.314224583409112045e-03,-4.984503364125453251e-03,-2.792899157513901824e-03,-2.333164701214274564e-03,1.221642187588227717e-03,1.768471095929233499e-03,9.744773184002332445e-03,3.040834243712661235e-03,1.786201728917193813e-03,-2.810516687522200512e-03,-8.315696343821314326e-03,-6.049910229435967977e-03,-1.632251715575585693e-03,-6.112528636002077277e-03,-4.935997861586439474e-03,-1.219930383831783588e-04,-2.172003390185813388e-03,-7.156063867632110956e-03,-1.285801102392558279e-02,-5.422211906239499590e-04,1.346231422844397334e-03,7.025156857066278474e-03,-3.931129815197589340e-03,-5.013276749621747158e-03,2.923790652367885302e-03,-1.746883629327485596e-03,-4.977942706118745030e-03,6.246801840353564256e-03,-3.702155255677620077e-03,-1.640354776644739263e-02,8.081107407489481961e-03,2.370902972358723305e-03,6.788473476253961345e-03,-1.436520480424463390e-03,-4.804687832150814596e-03,8.401616307413969920e-03,5.296491008318495591e-03,7.041259893092306939e-03,-3.288264266175568943e-03,-7.978350940223789605e-04,4.710323000135965658e-03,-1.164467593715816374e-02,-1.481575523177423642e-03,-8.323315549840878957e-03,4.721108068114474032e-03,-5.378773440146400985e-03,-1.261628242450188713e-03,-3.552291845239210610e-03,9.921191906211398667e-03,1.000355545115415561e-03,-7.522631092435005072e-03,-2.741326346786559631e-03,-3.208128154565204147e-03,6.470698366703503922e-03,-3.782702453404433989e-03,5.209160955636209928e-03,9.977640429930539380e-04,6.269918993705676964e-03,-1.168632090642854511e-03,5.328809213622372719e-03,3.327260694268171018e-03,5.092649740366390031e-03,-3.642485898110078921e-03,-1.241250604069178091e-03,1.049070185407542655e-02,2.152062876292420296e-03,-3.214368868701532068e-05,3.006689197242533827e-03,3.493263720037552292e-05,-8.424470245504566204e-03,-6.308538339195668768e-04,-6.121274819102862693e-04,6.158661146667188981e-03,2.521339398243813298e-04,-6.289518981516118400e-03,5.185042843373383233e-03,-6.129487277974548847e-04,-5.988491443422359027e-03,-3.813644149670772452e-03,-6.768347676581122803e-04,1.133014527532153924e-03,4.614828533879168811e-03,-6.504182136278716456e-03,-7.422668855494262558e-03,-6.044715371657329028e-03,-5.415787958238124731e-04,4.849652958703736493e-03,8.404740210237411635e-03,4.234804041624480771e-03,-4.893171982820483830e-03,-7.390424654434880891e-04,2.030786154334295480e-03,3.934628966849615037e-03,7.509699795350314143e-03,3.568572933906938218e-04,-4.980729718896684081e-03,6.200438189392621334e-03,1.898214528396361452e-03,3.835363677359942061e-03,2.033205331377924835e-03 7.868652391927223599e-03,2.016124587223690461e-03,5.447796374732696671e-03,5.408230103060652876e-03,5.414211262813260457e-04,8.982300655763270428e-03,-1.765046002993299118e-03,-4.514701217782723583e-03,-3.924416736160611652e-03,-4.396416378781383208e-03,-4.676430466363917619e-03,4.367078425316097377e-03,-4.094017239855098599e-03,1.682117400599408147e-03,-1.360334334038325148e-03,2.550433927220567207e-03,-4.276136609969381254e-03,2.332102811021640864e-03,-1.547762623599036177e-03,-2.210632562517068651e-03,-6.410767232711334085e-03,1.068793722422086215e-02,1.377076418401185047e-02,-2.600606355498270243e-03,-1.618225457270357258e-03,-4.477693751053494174e-03,-1.465685184869099404e-03,3.225847937746793414e-04,4.473262402378879658e-04,-3.504304199345201123e-03,-1.826035528199125872e-03,-5.773453537520161633e-03,-2.158371089144305293e-03,1.107029921753051489e-02,-2.936291807963707819e-03,5.524239652864354391e-04,9.904935375243967971e-03,3.005081476086145491e-03,-3.107795279890755540e-03,-8.751333280691145702e-03,-3.463741424349561240e-04,-7.549543608369688533e-03,-1.210281695011834052e-02,-9.862494904108703563e-04,8.162988015214939913e-03,5.019571203791296570e-04,-1.002144691233451679e-02,4.031653518322387240e-03,-7.449246823927954154e-03,-1.203444409339607694e-03,6.317169538818678126e-03,-2.248547589509238779e-03,-3.609914718282442730e-03,-4.595622651307666381e-03,-5.641757467388174019e-03,-2.279006163583726947e-03,-8.167027935406466141e-03,-1.757550341161310224e-03,-2.006570727007936979e-03,-6.020635166584071933e-03,-3.109404321938751339e-03,5.763574702617038159e-04,-1.228920435322737809e-03,-1.385525407615598474e-03,6.740355151417779314e-03,1.839943627128863947e-03,-1.308969209785498322e-03,7.353481535590088158e-03,-1.321319872443264300e-02,-5.147327214423659536e-03,-4.118102447213361345e-06,1.430180502253361160e-03,-4.506243306959108359e-03,2.444980011119273714e-03,-1.257893328106990958e-03,2.024491102692015706e-03,1.800253527935749076e-03,2.532502174724105253e-03,-7.924484678166273874e-03,-6.323022082154630512e-03,-2.851358590657211960e-03,-3.550614185796908354e-03,-4.924412140583623275e-03,-1.208232199366636103e-02,-3.168029486884775276e-03,-2.510769135763636813e-04,4.538209404730647080e-03,9.998948764733768933e-06,-1.562609747141293455e-03,5.464272542975042585e-03,-5.277303927792228143e-03,3.412017417296348671e-04,-2.934196845932353706e-03,1.285369417943174391e-03,1.660822534940790398e-03,-8.893870596011885579e-03,-1.772416953261593325e-03,-1.083524567376229321e-03,-2.515989910074470804e-03,1.416199164458304977e-03,5.539075111335179866e-03,3.573944224607277127e-03,3.907937118247581214e-03,-2.451387665787360323e-03,-3.246276065185587159e-03,3.029150299901774406e-03,-9.095068860252051440e-03,3.750574820288150781e-03,-7.250816353901899311e-03,-4.094442800918043912e-03,1.125928191538082566e-05,5.390137439632401370e-03,1.394821278728821789e-03,4.120264336872753001e-03,5.586772028781470528e-03,-1.122069294767333352e-04,1.434072285639798050e-03,-1.870012715850807557e-03,-4.071282994155637006e-03,-5.233804599777280679e-03,2.128729413564337935e-03,6.352341447734916047e-03,8.428449965212725468e-04,-7.757119081066837958e-03,-5.636026589776962220e-03,-1.159031335307159592e-02,-2.194393992246530378e-03,-8.159666045057904771e-03,8.534502731613066804e-04,-4.572285754606566187e-03,8.064736069399653859e-03,3.216894896323168988e-04,4.007013825281555792e-04,2.489872533618263904e-03,5.585095986666224407e-03,-3.212702410699023840e-03,4.117249717967408525e-03,-9.555318166007050265e-03,-5.286527377521355389e-03,-5.911065473144100357e-03,-2.677162313513953805e-03,2.455182678740589972e-03,-3.068029322037370624e-03,8.002911904332343090e-03,1.152059872919394302e-02,-1.098789073876799659e-02,-4.246701005512485728e-03,-3.761101576075922843e-03,1.851283376506728581e-03,6.332467353261367468e-04,-5.909452673400990286e-03,7.468706065607015783e-03,-7.055013790607825254e-04,3.353471858140769177e-03,2.981907561724307047e-04,-9.502006719394016920e-03,-2.837307897892413482e-03,-6.065561054165830371e-03,3.829577115072986537e-03,2.468721568635731927e-03,-4.961083188298700229e-03,5.354551441547216904e-04,-5.619799839330761118e-03,-7.236020142743731023e-03,-3.957120435182491340e-03,-4.034050255459100884e-03,-1.623647443997218529e-03,1.641356226213986627e-03,-4.401252771966709769e-03,-7.747899312469008065e-03,3.230317172247241121e-03,1.513303599730755639e-03,2.990048401219227905e-03,-6.559860965490239663e-03,-2.261581408250854484e-03,5.875635394358560276e-03,-3.850000127059931710e-03,-6.351142482906209537e-03,-5.631572831486976981e-03,4.512978271021668604e-03,-5.748109392790336225e-03,4.237808536647121732e-03,4.778990807957647288e-03,6.200191654575818757e-03,-1.168474339300785026e-03,-4.535282041938165577e-04,-4.502373988334112086e-03,6.832679014526107887e-04,5.952116974844033973e-03,2.269143874621280289e-03,1.637902082042753124e-03,1.116715961607684347e-02,2.638984566902983792e-03,-2.456300669543737530e-03,-1.658098499042747875e-03,8.822869218062488750e-03,-2.849862356889227254e-03,5.232117507732404402e-03,4.603950917724466615e-03,-9.007066412594874269e-03,2.904641470136318671e-03,-3.021642848646092366e-03,-1.438447921285228651e-02,-3.802303070980365182e-03,-3.415805776166928854e-04,1.904070296390562161e-03,-5.019624345102495500e-03,2.211210369511382341e-04,6.349628313208746655e-03,4.905412318337977126e-03,-6.673937983281588542e-03,3.734249264588537960e-03,3.799699855943089596e-05,-3.983542410342096746e-03,-3.907450612296091245e-03,-1.189806778048713685e-04,1.768541284027979731e-03,-8.037764121790675394e-05,7.427451021936687554e-03,-6.916459927185627556e-03,3.395099493806269693e-03,3.122913098815623383e-03,-1.963092031860236720e-03,4.771080930099490941e-03,-2.944755160410061391e-03,3.880659574299027016e-03,4.608030887741477490e-03,3.509538354883930383e-03,5.386574824106383488e-04,1.275497607039572691e-03,-1.039274436711567010e-02,-1.038581454256031020e-03,-8.614606819493895182e-04,3.401439325712829562e-03,4.038936372358881491e-03,-1.317098984004981934e-03,-6.111498836239749252e-03,9.839154285392469335e-03,8.290938840277813559e-05,9.457175937243510189e-03,-2.037123479945405092e-03,-1.756929811686369766e-03,8.809901256817371379e-03,-3.767774357209029059e-03,1.228759625353064962e-03,-4.195642260629824441e-04,5.956428465414387308e-03,-2.567587542143375846e-03,-3.036658285945548728e-03,-1.620085879620680172e-03,5.025105905336097085e-03,3.199787948861350113e-03,3.551266081196728999e-03,4.842662297622943833e-03,4.164051506478960545e-03,1.420445056515609247e-03,-3.059769524814054571e-03,-5.107311072966877127e-03,-3.234798571759336916e-04,1.291466977622802818e-02,1.325537728777547103e-03,1.041577537279957631e-03,2.653172303363700996e-03,7.379022330983859949e-03,2.384745480650860873e-03,1.797122339842997232e-03,7.130404194383828387e-03,-5.926693821607273774e-03,1.511630393188389751e-03,-5.198855164106603146e-04,4.530639936547143590e-04,2.949828517240769029e-04,-2.831692438647209070e-03,-5.242866069838592047e-03,-1.994050127891320776e-04,3.676940681751260332e-04,-4.219478079526820490e-03,-1.303296240298498342e-03,1.777674132599709562e-03,3.732697103801536909e-03,-8.681851224228792846e-04,-5.075331537449787565e-03,-8.057221584867403684e-04,2.938531746324905825e-03,2.020973475291077998e-04,8.646318577061562439e-04,-4.855968912644401028e-04,-8.127414208526055270e-03,-1.010485610278327555e-02,-4.460450368510900966e-03,3.356954573232262540e-03,-4.578000949935483177e-03,-2.455675872958592380e-05,8.005642893886003508e-04,-4.190421425008659072e-03,2.173620201531132733e-03,1.150630879448709225e-02,-5.214725238460270543e-03,-1.359567848751830430e-03,-6.137315256308141335e-03,2.453818919379984875e-03,4.345814191005498671e-03,2.775537525289431540e-03,-2.759968877837377238e-03,2.314048303118422696e-03,-1.671931827383433882e-03,-2.117806005791259634e-04,-4.963906633502864450e-03,6.641193007893291152e-03,-6.483741597028397537e-03,1.765303970853361864e-03,3.983938475396589518e-03,-1.550828786180638791e-03,-4.304992055888629335e-03,2.794474825805092193e-03,8.283742778175954419e-03,5.071348342870034276e-03,-1.323758529070370379e-03,-6.355264365386526816e-03,3.274161553546596840e-03,-3.745878449319298142e-03,-4.527937649827840083e-03,4.405044722659429688e-04,-6.965893030026193249e-03,-6.935454589319496599e-03,7.716984740444129270e-03,1.033324735607249337e-03,-7.223453637623751013e-03,-5.052609364385415511e-03,7.358238131603541884e-04,-3.553755984323857253e-03,4.937541366531036577e-03,3.514567629517125619e-03,6.021650072345692724e-03,-3.851060620491442740e-04,-3.606692363604924699e-03,2.788680617254742740e-04,1.247229959707956207e-02,-1.095282186961934806e-03,-1.698385265912109561e-03,2.082185781071643033e-04,-3.254356530782861589e-03,3.304252576513149429e-03,-2.333847546080526289e-03,2.197522532493844088e-03,-7.730453229934759578e-03,-8.079843386826524336e-04,-1.663865132439829118e-03,3.777179935322368305e-05,-1.679025589599591594e-03,3.768472031710976000e-03,-3.519235648062686312e-03,-8.680878569016259620e-04,7.686784159786554105e-03,2.353640533659587166e-03,-1.880180558234351281e-03,1.379417887788104502e-03,-2.139546200447813712e-03,-1.292871996888342312e-02,-1.597722680377065222e-03,2.958156589444545237e-03,4.347970699692993261e-03,2.698609013066837359e-03,4.397269875304902001e-03,1.834076337030240761e-03,1.173915295259154567e-03,-1.864003265088331205e-03,-2.426253337833739459e-03,-4.809290247727495554e-03,-1.435149115299681408e-02,7.993138182597605765e-03,2.501085928805759848e-03,-3.629901609663613478e-03,2.688505215624099193e-03,8.096475915075129065e-03,-3.410820199611946935e-03,-4.323920323785733662e-03,5.635861443196206119e-03,-5.458298698585792828e-03,-9.280738052612560324e-03,6.093104639278676231e-03,8.786539916756336971e-03,-4.645246526588840685e-03,-8.222369328614809181e-03,2.579898806794505509e-03,1.067102264789236178e-02,2.022488193615472850e-03,-2.598514489280460017e-03,-9.577956950212409074e-03,-2.105632228314774281e-03,-1.116369909531348325e-03,4.453602332674187758e-03,-4.160423289741420859e-03,2.756697718520144218e-03,-2.886299031135713521e-03,9.762459244695919150e-03,9.760249173167303768e-03,-4.461209454122403578e-03,4.907991422092434358e-03,-5.705756950702705124e-03 -1.521193173098392935e-03,-4.435698511973571248e-03,7.948765400974300635e-03,-3.489887985475650847e-03,1.256185978194994911e-02,1.139818808451298869e-03,-2.771163375338468304e-03,4.739815162727665197e-03,5.224031284573452171e-05,6.273399437753024617e-03,6.771922780049637394e-04,-3.181177184005173821e-03,-3.552722174096363539e-03,2.593376255148354369e-03,-3.768629879083034134e-03,-5.110266980729840681e-03,-4.170807210131417808e-03,1.427404897217678147e-03,-2.996555735763627463e-04,-2.668534471243550442e-03,3.889462872371745802e-03,-5.808313158219165044e-03,6.476826214726156758e-04,1.089843428685804104e-02,-5.480193932579380429e-03,6.090164611125901094e-03,-5.912149374989152800e-03,-1.043820944634458537e-03,-1.910826454076361815e-03,1.064728896721171825e-02,-6.237934716239285429e-03,6.643794663504014493e-03,2.558878420329543739e-04,-3.346264861857881339e-03,-3.822789953244760085e-03,-1.056717312253180850e-02,-3.002474063378874833e-03,-5.435308240879989372e-03,3.292914567600190891e-03,5.092080955331506308e-03,9.889836036963587512e-03,-4.601295306347520306e-03,9.649357033581525264e-04,-1.911719383204889136e-03,3.258557016379989412e-03,-1.157822807815898740e-02,2.105215205180796089e-03,3.651008708491092908e-03,5.641560455062988484e-03,-5.823808179936142138e-03,1.430447463209672844e-02,-6.536621965291910044e-03,3.284347379922455869e-03,-1.115709035334544463e-02,-1.113827792702053914e-03,-2.583792343109593808e-03,-2.312819732326627103e-03,3.843616805343724220e-03,1.383494284455872572e-03,-1.082530092680824944e-02,3.714361221011408618e-03,3.299067041696419900e-03,-1.151808857963876331e-02,1.340885740305660300e-03,1.296780729613924163e-03,-1.489195243852597201e-03,-2.222761072494649354e-03,-1.526535553210318138e-03,1.223488109154120644e-03,6.861118234725578635e-03,-2.618594997135413431e-03,-4.755656334079325000e-04,9.518723844354023966e-03,6.397373192611998001e-03,-1.155002061649073675e-03,6.129096581517302737e-03,1.060844937099307030e-03,5.813068806204321412e-03,3.011713632269041874e-03,-5.683844795377583636e-03,-1.085875938294833792e-03,4.044642491541515701e-04,-8.407507032672309130e-03,1.029909593874642464e-03,2.353944058316360163e-03,7.831451880533597115e-03,-1.165764522910085147e-03,-2.051305127054648715e-03,-5.766116449172542860e-03,1.313692116217072626e-03,9.362262576993456153e-03,-6.883498850230587371e-03,1.401165234860639840e-02,7.414419649449524921e-05,-4.764154430026917332e-03,3.321754550053387500e-03,-5.952183930750721066e-03,-1.139337128516562311e-02,4.891012092408912257e-03,-7.778180378514264193e-04,-2.733852290630120387e-03,4.386740519204208356e-03,2.948088685167794213e-03,-1.392229998570259069e-03,-3.077501494280120587e-03,-5.521971626188128777e-04,-3.345305716851060956e-03,4.180258509526147900e-03,-6.864268453280773785e-03,6.387403641714508619e-03,3.961942709457494903e-03,-2.142961605814004515e-03,7.940671116570744217e-03,1.209308738785631280e-03,-5.185056769271535748e-03,-2.881236425085215852e-04,-5.581708263427060876e-03,-7.055520187031004420e-03,4.814168382829147469e-03,-1.506141887641556515e-03,-2.098765541355347101e-03,-6.633280877263681048e-03,-2.863486049557959021e-03,5.606474508207960269e-03,-6.755933097178624079e-03,-6.148485275765567040e-03,3.342532644833052461e-03,-3.017513775122100499e-03,6.654228886915910385e-03,-1.784791122908718341e-03,-3.974159817736469362e-03,-2.136493946579612595e-03,4.141380531530990941e-04,-9.173905603377646018e-03,4.042968885011054975e-03,1.895284734589568545e-03,-4.717465910699119962e-03,2.408547391683851617e-03,-8.816906523852703409e-04,2.119727364154506532e-03,6.655490206663635874e-03,4.870452275625457295e-03,1.009795159102894451e-03,-1.525684563944617374e-03,6.162089507907462550e-03,8.794437295597871919e-04,9.696633587969601467e-03,-9.189346545250984266e-03,-7.717702774357606578e-03,-8.580628800611847518e-03,-5.466081271091637224e-03,5.299368826192259185e-03,-5.951004367022690207e-03,-9.709000487426353368e-03,-2.623001797405607061e-03,-2.376155853675443724e-03,-4.394980597394780066e-03,4.055422246945034016e-04,9.978524737012025095e-03,-1.327732096590226790e-03,7.357962466659939867e-03,-1.171146408591805001e-03,-8.537589168631965972e-03,1.123477835208245559e-03,1.249858706106120798e-03,2.747397646592138719e-03,3.801710647402522245e-04,6.602712361396463918e-04,3.691001149947513785e-03,-3.416106791045560569e-03,-5.606699299257767854e-05,1.784205116186836707e-03,5.427412155182630149e-03,6.041986363437807685e-03,2.103164632215191905e-03,1.163470114719798764e-03,5.015887823537884711e-03,1.004587627502609264e-02,5.989762760961189064e-03,8.249722826101161784e-04,3.559438794531868196e-04,4.218542710382390537e-03,-4.052332973352882305e-03,-1.341809230220676738e-03,7.080773853268577597e-03,-2.125971882782820305e-03,-1.226565558842031625e-02,-6.372504853080944616e-03,1.018314636481549261e-02,1.208602065749261965e-04,-5.064771031530039026e-03,-4.414240370880668889e-03,-5.675585489130172481e-03,-3.092988882759118156e-04,8.878354455998271888e-03,-5.075505823512667901e-03,6.603160312884080469e-04,4.248590750801609257e-03,2.464105274669509430e-04,3.835918533760843043e-03,7.103496827923319254e-03,4.493301840050193362e-03,-1.749069003397018087e-04,-1.524142959647274221e-03,-9.055634758118453337e-03,2.000057704828959049e-03,-1.424837525766806372e-03,1.446728519678465398e-03,5.596654386084941313e-04,-2.521754286664215552e-03,-1.628750308777887276e-02,9.859755803627103660e-03,2.581536138452578339e-04,4.534539198861852090e-03,3.706795870474590120e-03,-1.116253687363601333e-04,-8.781054985976683921e-03,-4.287777162321403275e-03,-3.450489205561376241e-03,4.104728323132581869e-04,-1.461441011672078407e-03,-2.622764375935979480e-03,-1.749460600418262192e-03,-1.480127849733530853e-03,6.377631951976055419e-04,4.228212917700075943e-03,-2.531217890057294354e-04,-5.616695846661899985e-03,9.378225797426429375e-04,2.519283417136856072e-03,-9.027628140371635537e-03,-3.945178541025735203e-03,-1.033289513660001855e-03,1.534721017816539869e-03,4.787803601384862245e-03,1.528012401072428991e-03,-3.961903609160340352e-03,2.554795619040800721e-03,-1.560040595840550244e-03,4.529554437575650933e-03,-6.306854468185175934e-04,-5.387765238771669192e-03,2.007799862355410115e-04,4.505714409149235398e-03,5.332369335541383738e-03,7.435014256448205759e-03,1.055112037452309547e-02,4.816416161903857955e-03,3.225463093466009699e-03,1.813319580377212520e-03,-8.760394965446299635e-04,1.196943779527178710e-03,2.680705553082087314e-03,-2.223986506508289582e-03,5.395722577234558261e-03,-2.289520469996929580e-03,5.125117925005296098e-04,2.751222262360280418e-03,1.666154269582838243e-03,-7.817373730924606287e-03,7.136718564111690748e-03,-7.697417964416516933e-03,7.208776811032643894e-03,-5.436164263246071916e-03,-3.492724087080985220e-03,-3.477530651671565613e-03,-3.943628920379583548e-03,3.192213539630792043e-03,-2.569510126265688987e-03,-2.836415846197551816e-03,4.411588334377462492e-03,3.473447191265178775e-03,5.311704161339831677e-03,3.947368617873402326e-03,6.068983246057382819e-03,1.114750236992128034e-03,-3.005879867170149853e-03,4.949357459431575831e-03,-6.992190181119685780e-04,-1.948372059180614050e-03,6.108546067959995558e-03,9.406153475779879466e-03,-1.841443110378605027e-03,-2.454964878149454213e-03,1.123430412314951790e-04,-9.993000828833833744e-04,-2.265940565816898251e-03,5.466495464332686918e-03,4.069858646829602774e-05,1.948218320537088016e-03,6.743248591418374424e-04,-3.088089265301943525e-04,-2.835976426419720424e-04,-1.540810108266099741e-03,-6.227183552633974890e-03,-1.796409021000028002e-03,4.651056021975727428e-03,9.792564073984877382e-04,-1.407477664899000084e-03,9.745138164930623281e-05,-1.390222582672058678e-02,4.433674186605060692e-04,7.252125733134413528e-04,3.057293844672380207e-03,-1.906521232551032464e-03,1.213055816645213641e-02,7.758493146306574481e-03,-8.907175400151008497e-04,-3.899455342307800508e-04,-5.030413480891709338e-03,-2.197598613704337175e-03,-1.613321804024906141e-03,3.285457902995411392e-03,-2.752890773831201832e-03,-3.377785815061558079e-03,-5.476196295686517820e-03,6.761433460903983235e-03,-2.403819264095746321e-03,-2.185717663546047317e-03,-4.729981315375933991e-03,4.033634572381643302e-03,-4.648863802054354830e-03,-3.754037750211495356e-03,-2.632225603175716161e-03,3.567168393443469112e-03,-3.133945016940144993e-03,-7.905525691581979236e-03,-9.698835256441355399e-03,-1.204487021790929266e-03,3.521871202830716835e-03,-6.104955684119348427e-03,-6.911598814871629075e-03,5.975979633569965558e-04,5.561825073729775165e-03,4.810819608365945327e-03,-8.781232680984761563e-03,-3.547733976476925757e-03,-4.038137435174311320e-03,7.382264580219051117e-03,-7.597127924071281693e-03,1.964825824835717428e-03,1.423356139276185173e-03,-8.384214287818332828e-04,-1.909107209016953216e-03,6.929136661056311056e-03,6.042348136475666825e-04,-3.555860969550297349e-03,3.827971986284536571e-03,6.923904030488572427e-03,9.267389762872119650e-04,-3.404609724310235334e-03,1.705344315367864332e-03,3.527163861401455200e-03,-6.272051597641937733e-04,7.381835641923886662e-04,-6.993845566697512537e-04,-2.004347448734241678e-03,-8.355810742125314734e-04,-5.970941245316939081e-03,2.188523389172779016e-03,-7.579085757971816550e-03,7.373059248524157366e-03,-3.844801512919417564e-03,4.840104416659027180e-03,3.748648083975612068e-05,5.194768269031256255e-03,1.008676006501582276e-02,-1.053345852357315607e-03,4.034157248154961878e-03,4.720747670305366732e-03,4.301255797864366945e-04,1.390963419185986972e-04,-6.623836516201011756e-03,-1.179961299622244971e-03,-5.357930748488238734e-03,-1.467105257543934111e-03,9.113496490404644421e-03,-3.318593647362127336e-03,7.845730093911259390e-03,5.701936789808857779e-05,1.652809347226647267e-03,-2.136943690331397170e-03,-4.953393532549637580e-03,3.206151644071909487e-03,-1.009944402736925526e-02,-5.880427991387199467e-03,-6.316901777031739716e-04,-4.845040039388792685e-03,-4.446439263634586331e-03,5.153412984516018633e-03,-2.393515972544942328e-05,-1.745619378403761137e-03,-4.885723154738164792e-03,4.458683495075195860e-03,-1.470034696291838260e-03,-1.738358754108329778e-03,-6.876429364914955983e-04,1.680616319024454651e-03,-6.216570471244822066e-03,2.950168996713680748e-03 -1.155191274183015396e-05,5.094528291973415005e-05,2.272495857168482705e-03,5.989052273360603168e-03,5.232285745073298779e-03,-2.147385287992767140e-03,3.533412019716204461e-03,3.361051396309936573e-03,-1.080733263407403916e-02,-3.298528674529893102e-03,2.336238627586947718e-03,-5.056785676511770487e-03,2.084594747855139738e-03,8.131262339929662755e-03,6.945874040682077817e-03,4.208292867052641252e-03,-2.662229211841691872e-04,6.880345779644195074e-03,4.081658607692694937e-03,-5.475465417238803943e-03,2.353933640011856340e-03,3.901125987142178408e-03,-9.646999982257636191e-04,-4.436765647794950994e-03,-6.716605143310645960e-03,7.471698562706105377e-03,3.087573097374091136e-03,-2.149023322265883318e-04,7.844273221305177873e-03,4.452410311606095489e-03,-2.393353165804736234e-03,-6.383513550689257597e-03,9.155481959502550365e-03,-5.106379994050953830e-03,2.802948628143491199e-03,2.498835732335047237e-05,-3.190764091377130268e-03,3.547936111108536015e-04,-2.209091946821665283e-04,1.436410250287549310e-03,-5.141247097358573291e-03,-5.549923360561865096e-03,-4.572077363643387883e-03,5.911818133457206331e-03,-1.406624760894291783e-03,-2.869095897231923849e-04,-6.304368628929237113e-03,-2.109652410332999838e-03,-4.307383941403200751e-03,5.038507444218250954e-03,5.510188911712170234e-03,-1.552877773155569548e-03,-1.688600266063768843e-04,5.152943546619629875e-03,-1.184780760649834647e-03,-3.341069380528189584e-03,3.951854019142410183e-03,6.397941103364694279e-03,-2.432225772599857453e-03,2.866054270800841210e-03,8.407426395338410408e-03,6.088869049751576348e-03,3.880471980365568160e-03,-1.051116789772517379e-02,3.859544289170288860e-04,3.364004704089919046e-03,-1.420135719954939357e-05,8.731272308008885099e-04,-3.610922504043081495e-03,3.686023564817739965e-03,-6.251820722089473231e-03,1.027725340867438088e-02,-3.388101799546646038e-03,3.328720593022597030e-03,4.869438908789531714e-03,5.352566137068995485e-04,-2.890882624641010791e-03,6.349046222161275170e-03,7.726960182712738767e-03,1.105595439760556804e-02,6.666942424977068647e-03,-4.079564615152752455e-03,-6.095214180519677172e-03,8.423285017459680377e-03,3.146203062745787059e-03,3.310032470616744004e-03,-9.783725675209516831e-03,1.146236419995657826e-02,2.745826518259173680e-03,4.196619595818438381e-03,-7.201801280778898258e-03,1.895358478588956425e-02,-5.158664545341994090e-03,4.578888218972908958e-03,-8.875886169967523928e-03,2.220141733499713787e-03,-1.175514580301987785e-02,5.846741515880785056e-03,2.056067398518169817e-03,5.453067018367101920e-03,3.846436499812678177e-03,-1.170196831544961878e-03,-7.898894671180996252e-04,1.007903214458202379e-03,-3.872229904424180549e-03,-4.674005582608934360e-03,-3.141837503896750469e-03,-6.731717374262340646e-03,-8.900491188231291623e-03,3.738691850664659956e-03,2.538499127579737190e-03,-6.457649448520935871e-03,-7.081249502219808672e-03,-2.490971369101255446e-03,2.181439563415192833e-03,7.395715162010857392e-04,-2.386123783912014705e-03,-5.804414643054958169e-03,2.471998297816383090e-03,5.225456487045572994e-03,1.452412871242914072e-02,-6.378176604914546563e-03,-2.273256541020489668e-03,1.347175115292245954e-03,2.520439735528256543e-03,-3.059310614046430437e-03,-1.743053477390639873e-03,-5.585063549496389716e-03,-1.694636249963841308e-03,-1.736008431183511678e-03,1.109606702303440244e-04,1.372071901188630979e-03,-2.011786576323966198e-03,8.237047709238186355e-03,5.634265867437921252e-03,8.511623816586232072e-03,4.040685509478041809e-03,6.022157237962837140e-03,5.365631501420615145e-03,8.764529254823573120e-04,5.827834303803161066e-03,2.860395948211951617e-03,-1.811956652869620979e-03,-3.190728701852329498e-04,-2.497780154720111081e-03,-8.747110596242455580e-04,6.684573328900721839e-03,3.953661335126372640e-03,-4.188176712501740818e-03,1.395780206955254911e-03,7.088705716991066177e-03,1.311581021160232452e-02,4.392595730640876149e-04,-3.444664607992491481e-03,8.110227599939371854e-03,-7.632850037114684641e-03,1.226757985717636616e-03,-5.864885778179206206e-03,1.335176570702058078e-03,7.736937502722095877e-03,-4.360379164891568446e-03,-2.969221055002436603e-04,-3.961475917673795352e-03,-5.212256477555473645e-03,-6.194337869433345044e-03,8.222100917796508479e-03,-5.393960701756787823e-03,1.063341564665811617e-02,1.174274586461664062e-03,-4.608730074014248841e-03,-1.351696203587906837e-03,2.962847933128987497e-03,6.791289134239372055e-03,-3.379716292984656124e-04,-6.811242672440523740e-04,2.379869388782166351e-03,-1.594149659529994203e-03,-4.242052764043946662e-03,-5.383233368748855524e-03,3.696793788692860887e-03,-3.020174411204930168e-04,-4.390542652164066448e-04,-9.001247848686723743e-03,8.002393988733485716e-03,4.045631753433238595e-03,4.974049242664187707e-03,7.722116930139736289e-03,-1.493930822973605580e-03,-5.552551984714790799e-03,-6.368559567456806569e-03,-3.085608677738445327e-03,2.843890615969269908e-03,-1.781437243395496764e-03,4.828314655879617327e-03,1.239587843651285227e-03,2.939320198161646095e-04,-1.466016201909636253e-03,-1.313006796856549087e-03,-2.151257838867295241e-03,-3.173604607340871613e-03,2.368702554976720106e-04,-4.192551818120970365e-03,4.570755760284452712e-03,4.467014334618237686e-03,1.212428306964208509e-02,-3.002477884189817207e-03,2.110672463529609025e-04,-4.514492812815211996e-03,4.741815949088658277e-03,-1.268840089713174201e-03,8.074806609268504048e-03,2.479089513170875878e-03,1.734940101217313313e-03,-7.121854474759058834e-03,-1.057135943390104423e-02,-2.516427259726882262e-03,1.540594499841281811e-03,-1.210286752734832452e-02,-3.270845756503713061e-03,3.937363623671461844e-03,-6.135014819676077712e-04,1.346768326221350637e-03,-1.582543320971828166e-03,-6.848198895258693353e-04,3.753625470378556435e-03,1.620023361595720546e-03,-5.588070100257575348e-03,-2.820220222503061567e-03,6.640173745074987900e-03,2.602964818608629813e-03,-1.704086336679496187e-03,4.665536625339191529e-03,2.365008076464355721e-03,-7.370877172947232778e-04,-2.737156944192691184e-03,-5.452791030871194291e-04,6.340979832390431815e-03,-3.143077863385574603e-03,-2.858992733212542634e-03,-5.035640223198712849e-04,2.926106076018989647e-03,-7.307537504979451501e-04,-5.473526113468422417e-03,6.726433190403563987e-03,-5.814815742960083300e-03,2.214221370118854126e-03,-9.101045411165459525e-05,3.233898028110959982e-03,-5.866506889936169526e-03,-3.708144595980329347e-03,-1.838986824339859097e-03,-2.884950660085560704e-03,4.579010280365500567e-04,-9.212380200932039284e-03,-5.951751854531658892e-03,5.832650876424223113e-04,-4.039724634341346557e-03,6.131022632048310035e-04,2.129674369053230921e-03,-4.542216585190553095e-03,7.070078507791325087e-03,1.952044208698259196e-03,1.770522904314832504e-04,4.531107898665369914e-03,9.061122193378137735e-03,-1.716768252875766360e-03,-4.204724163674486252e-03,9.878182772726293651e-03,-7.046172480549378787e-03,-5.499911119807952310e-03,-4.246707409677316115e-03,6.697669207405247335e-03,-2.758840105583747725e-03,-5.363591392943245668e-03,-3.690796192062984534e-03,-2.682144096431044161e-03,2.475666180762081583e-03,-4.396657935735501233e-03,3.311998154107653747e-03,-6.074952159524978079e-03,4.418095466316294961e-03,-4.535359632085021893e-04,-5.836311482024545555e-03,3.158540091074538581e-03,-1.030205484513112106e-03,3.239803994726550562e-03,3.463117348254947814e-03,-5.512700720636393159e-03,5.086849628789555204e-04,7.165808635686276044e-04,-6.123483912758089258e-04,-3.056620606317480508e-03,-7.477077093345911238e-03,3.171485731744586728e-03,-7.001577745252185632e-03,-4.494848337078210035e-03,1.155460035606387104e-02,-1.196711089847301950e-03,-3.545408909572553887e-03,2.469557958305367614e-05,1.023011551861490419e-03,-2.800747171147160812e-04,-7.066749177615946030e-04,-3.330497704782163455e-05,-5.580424982729680246e-03,-3.672021825479754569e-03,-8.383944907429285304e-03,-1.610065278509468026e-03,9.960745540173337499e-04,-6.708809093143644313e-03,-5.981168754920987950e-03,3.079185785109435705e-03,5.905330105717433373e-03,7.294665582732676005e-03,-9.154403204197046617e-03,2.324322403445273477e-03,-8.029918459365206068e-03,-3.404786388869548031e-03,-1.496837135041297366e-03,2.023858846124698858e-03,1.965598642940229093e-03,-2.945033442619510600e-03,4.763200892763059212e-03,-7.506607223052158551e-03,-3.694216461488403214e-03,-4.940314638827875485e-03,-3.979657202593825556e-03,5.808214562951765844e-04,-4.514111827844074338e-04,5.578955611192525435e-03,6.737191059974241948e-04,-2.983328342877123330e-03,-3.756854847690287331e-03,-3.678894621509039590e-04,5.994613499486637145e-03,1.574018573278355981e-02,-8.514284454777751050e-04,8.815381905514819982e-03,-1.629842575117826462e-03,-8.414207492085882395e-03,-3.088832099252168673e-03,1.879406285213165717e-03,3.638260355226688707e-03,3.562258620164013897e-03,-8.639678569694463073e-03,4.287025447688104957e-05,5.331943764444193289e-03,-4.809751678297250962e-03,2.816055170595504337e-03,5.084069370692267768e-04,4.979131028039112186e-03,-5.506977367523448880e-03,2.391971107711601471e-03,-9.832242951636944745e-04,9.481157227498606364e-05,-4.171054403932446079e-03,3.784693785487311477e-03,-1.005056486739147300e-03,-2.857075194664277038e-03,9.807506540125755443e-03,8.301476582801349188e-03,1.572871123555854124e-03,3.510030197411973408e-03,2.137376643546949964e-03,-1.045056125207301711e-03,-3.023730724874018638e-04,-9.434830579106373918e-03,2.018870761532994516e-03,-9.485309407859949735e-03,2.133806960814560970e-03,-1.733301440330611625e-03,3.379900826648100150e-03,-4.179048043753103882e-03,6.126289508786843425e-03,-4.304223544166193259e-03,-4.212637368858671125e-04,7.556175567969767616e-03,-3.747800806143877696e-03,-5.097058065487682782e-03,5.087864115801945392e-03,-4.490693958358867553e-03,1.978331526084576783e-03,-1.978619133245248640e-03,-3.188919614215932150e-03,5.062941421854556985e-03,6.920044304252284152e-03,7.197320103085801070e-04,-1.843001263231088330e-03,-1.347832635543273804e-03,-5.674185617469945826e-03,5.338009925497875716e-03,5.101980399347092604e-03,5.282052480606980183e-03,7.295492830521613063e-03,-8.665009015671269396e-03,4.139921076628559149e-04,1.474324116632524766e-03,6.867551544045999391e-04,-9.054670428698511492e-03,-6.783552461883089052e-03 -7.889141804363163438e-03,7.780576730371995507e-03,-7.787818972105921105e-03,1.071302272398394137e-02,-3.999342752087884506e-03,1.574441108419949442e-04,2.399956740579481818e-03,9.778429502027088244e-03,4.005788727708965713e-03,-7.176349349880278665e-03,1.356485571733810408e-03,5.189535255712069903e-03,5.432882031601985774e-03,1.054746832574789345e-03,-1.952388599100448935e-03,-1.049313341593090688e-03,-2.000127219529740288e-03,5.179020190044582826e-03,5.117870080403329885e-03,3.356040899904108067e-03,-2.325159963392952039e-03,-1.124224482753458272e-02,4.136329273794053622e-03,-1.034816759311395044e-02,-1.348400766818658733e-03,7.471959473300771211e-03,1.029443157971083374e-03,2.396814520740452210e-03,-2.707311767372978534e-03,-2.550489132573482108e-03,-6.317881066408763055e-03,1.027945050410837384e-02,-9.006524918268528400e-04,6.612204036838791564e-03,6.125677158628075464e-03,1.971001643727557807e-03,1.732067683449895693e-04,-2.506786458703394709e-03,-1.410085661322868500e-02,-1.943301109523830035e-03,-1.061108668474669342e-03,-3.324091696355012220e-03,6.809412766445922284e-03,-7.315770154199596753e-03,1.919245244000762716e-03,-9.979645845590759401e-03,-1.805352569032899228e-03,1.060003550287286385e-02,1.321749516506422585e-03,2.477504415514185290e-03,2.465017286541393380e-03,-3.116965838974367162e-03,1.765359218538817016e-03,3.085484385798518015e-03,4.302585281800583132e-03,5.354337686846399012e-03,2.678311323469656068e-03,-1.802510277881164700e-03,2.319633571518316378e-03,6.968179960034315789e-05,-6.904373909662933628e-03,-7.884293703293066046e-04,1.490142173490434981e-03,3.881867664117698551e-03,-9.151789362761222771e-04,4.603422594529902115e-03,-3.252644649237549854e-03,7.128470516900129936e-03,6.672670202660866724e-03,-5.057089986728458345e-03,-4.082737774405108960e-03,3.280051811570414578e-03,6.771716987620261287e-03,1.130126703401405919e-02,-4.688133231047012829e-03,3.139791134605698832e-03,-2.744184221169702551e-03,-1.851021311336051344e-03,-1.730072142938692817e-03,1.672276194734520060e-03,-6.672849858675483810e-03,3.840434078182802130e-04,-8.415737241629826781e-03,-4.679624800630427690e-03,-2.915533352582357941e-03,-7.131500945466633188e-04,4.212571551837827194e-03,-4.895957194084228116e-03,1.914947867945099866e-03,-8.753847238088041183e-05,1.055157207464999837e-02,5.332313100540074513e-03,-7.207775119739415030e-03,7.133549908424403722e-03,-6.710384633282150363e-05,-2.999017630645455217e-03,2.456607747311881623e-03,4.459568712518397274e-04,-5.623531455972656523e-03,3.510017119460417945e-03,-9.303625074680889340e-04,1.191245176421978002e-03,9.049103160205318810e-03,-4.872739464435909607e-04,1.399555050512385317e-02,3.056094231360470797e-03,3.136451333250165480e-03,4.718524263307944874e-03,-2.517344008663250374e-04,4.755723912382887018e-03,-7.769417252480405323e-03,5.849148158621511953e-03,-7.093405958616328354e-03,-5.145976221201423520e-03,-1.874681051194232234e-03,3.156611319653682569e-03,-5.483389761820698510e-04,2.448877066906886084e-04,1.082434234793804798e-03,3.870288611970261732e-03,1.984135238513532427e-03,-5.546605907811230293e-04,2.621630112842332060e-03,-1.645268224160226266e-03,-9.878062548186704317e-03,-3.080723228899868299e-03,-7.637656122992387804e-04,2.317388988355642207e-03,-4.488557037501048931e-03,2.416787473539963420e-03,3.051311861256489864e-03,-7.542940394843418092e-03,-2.380316715305795007e-03,2.161623084008103962e-03,4.954731788763183653e-03,-5.169471962024765541e-03,8.415892681155881169e-03,-4.408481258781732706e-03,3.391426625451701184e-03,-8.173257655900519406e-03,-1.167602029387671339e-03,-6.702749711057877542e-03,-6.227921204302690195e-03,-2.977433890192292525e-03,-1.262357115024569055e-03,1.663315353758766700e-03,3.769856964127476454e-03,5.573390356538881379e-05,2.358375290935741872e-03,5.327312586825415364e-03,-1.637732341448307595e-03,-7.243168656640096542e-05,7.902551457465013633e-04,-3.974030633872917620e-03,-2.718111238011089621e-04,-1.644664451532516997e-03,-5.594734061022810581e-03,5.760686524570396774e-03,-7.118494603337337939e-03,8.543393680332762169e-03,-3.386466325022377804e-03,5.320511092210885619e-03,-5.829709874669129635e-04,2.256825497424610680e-03,-1.169352740192348625e-03,1.020339762982046088e-02,7.018635419954203855e-03,4.051620185130940660e-03,-7.189968944820354473e-03,4.600345417449212826e-03,2.205600690621002495e-03,-4.552080792116048116e-03,2.459496404040386469e-03,6.726569927002008960e-03,-8.427341844723821793e-04,-5.662691354603232273e-03,4.748141697110248860e-03,-2.896136356168244747e-03,3.523816549892607938e-03,2.628551265356930244e-03,-1.162746609969708298e-03,1.950260783510978529e-04,-7.887408997742532957e-03,5.522841820659492307e-04,3.970018324463738500e-03,4.825844103450222704e-04,4.477230131251634139e-03,1.827859565850272709e-03,-1.441951935808230556e-02,3.469937516015041655e-03,1.044476144161845914e-02,-1.858342635442863065e-03,6.286530946053472783e-03,2.221979429599557111e-03,-3.071009523338777674e-03,-1.794991228103210920e-03,4.359755885354376498e-03,4.335447802189572666e-03,3.746542487858552442e-03,-3.715662157235899649e-04,2.273364382418974899e-04,-1.041021048777496759e-02,3.506610058101554666e-03,-2.201469978836793906e-03,-1.339254744723654715e-03,1.689217892619138996e-03,1.934052778344484257e-03,-9.169122850789942006e-03,-7.462430195850716652e-03,1.403116905073185493e-03,3.520519517532988852e-03,6.881847119883516273e-03,-5.737527852969374681e-04,1.323570479108987659e-04,-3.816245721216840383e-03,-1.505720739309165588e-04,3.385160322095743211e-03,-7.600288723432328548e-04,-3.604138726474759476e-03,2.064385519658882510e-03,-3.147976818909066672e-03,-2.976070201955481782e-03,-8.860711418958767266e-03,-8.119172667558982939e-04,-4.305872014277491709e-04,1.911041020512611183e-03,4.195725778797502158e-03,-3.235089036660792478e-03,1.555889217694036162e-03,1.082008686281714718e-02,-1.455460511626455351e-02,9.910965958574761113e-04,-2.750358269004045224e-03,-1.322230254851286752e-03,2.879473114945875750e-03,7.363667539329155944e-03,3.800849757224727607e-03,3.130841777361604867e-03,4.271723491686354665e-03,-5.500417991756966114e-03,4.034776180690306802e-03,2.134905820381126026e-04,-5.575876108177263597e-03,1.712061487005411874e-03,1.519851788432576879e-02,4.054690048830473202e-03,-4.435344259850352959e-03,-4.493270776592343655e-04,-5.230900554260048581e-03,2.823470886997305265e-03,2.555863947627144521e-03,-3.315048591955319524e-03,4.415710224015108321e-03,-3.548429713434382397e-03,1.441732987499315865e-03,2.695946413644981828e-03,-5.657157107573633492e-03,-2.187694070507921126e-03,-1.170159655711281235e-02,3.816597884378563569e-04,-3.601058287692187644e-03,3.004229498266627798e-03,9.771075341084449473e-03,2.508366460604431053e-03,2.502325343520063212e-03,-1.107699230979279637e-02,6.118087025409633083e-03,9.158566360441683374e-04,2.793435436047018734e-03,-1.608837653331788090e-03,-6.357938982565444416e-03,-1.158332294884981302e-02,-1.798577738299993355e-05,2.939564668507958171e-03,4.560574978416021344e-03,-1.304899024345411961e-03,1.969723288969378529e-04,1.462521082261694682e-03,4.373107936526226245e-04,-7.189348465599460164e-04,3.783980356806954798e-03,3.544289912156935868e-03,-2.756501070388334227e-03,-6.230764712071157861e-03,-1.678229495885395295e-03,-2.415511022303808771e-04,-5.622746800009880037e-03,-4.496770162879431924e-03,3.936764042262326166e-03,-9.244224576952692106e-04,-4.443828348652110148e-03,-7.816002381145518638e-03,3.313137509369348203e-03,-3.096318758862632269e-03,2.563163689611999262e-03,-9.156034084464881664e-03,3.332473846660354531e-03,7.082935371543779522e-03,-1.791574086237854302e-03,-3.236751753567847396e-03,6.581486059805541702e-03,-2.479686477783129509e-03,-5.755509724603159391e-03,-1.184147791994993653e-03,1.360775596995874659e-03,-1.222080181310899495e-02,8.125830310136371998e-03,-8.452127944822140915e-04,7.978840336591833463e-03,-7.776724440073340232e-03,-7.468067243205873951e-03,-4.937963095701681897e-03,-6.093572403358572462e-03,3.353090550603240062e-03,2.376394971021964647e-03,-7.259958757142769220e-04,-9.949814611346118661e-04,-3.169038027486066264e-03,4.402061119085935205e-03,-2.216191261525196198e-03,-7.923266134348374429e-03,-7.698857096778244195e-03,1.613723528342052250e-03,-3.687513275087381812e-04,1.097382605078059577e-02,-4.764099331763805602e-03,6.081691275631266255e-03,2.534221275056280833e-03,-2.243756913466578816e-03,1.024244689770681003e-03,-1.546047802965711329e-03,-5.102872390170358746e-04,2.688439275803311546e-03,-2.620532166595696943e-03,9.635171062021732902e-03,-5.839240482821131048e-03,6.423489207789068178e-03,3.079525820495657151e-03,8.675158327409941694e-03,-4.354156352346703548e-03,5.011393042562471180e-03,-1.407338216054886387e-03,1.367299999770233386e-03,2.008641243579022215e-03,4.855984295787280666e-04,1.851867962140450958e-03,8.783426730213060585e-03,3.148669788239355938e-03,1.577301379498774556e-03,-3.020358497104504701e-04,4.934448973487667346e-03,-2.637812797291044179e-03,1.143944299661976693e-03,-5.469738377015528202e-03,8.842506011712020220e-03,4.709752134967951955e-03,-1.751320708695447870e-04,-6.869688226775571942e-04,-2.335155800322808676e-03,-9.326950878648382001e-03,-2.736164802580700814e-03,3.628828502061066073e-04,-1.875291623242468335e-03,-2.190475575472966042e-03,-5.831369399322120818e-03,-3.920701040482336627e-03,6.411404308117634376e-04,3.589552576691261562e-04,-2.235998372539168243e-03,2.827897261329043264e-03,5.889934428588668343e-03,5.406270435073668845e-04,-1.281892812901595444e-02,-4.642958510024836581e-03,-1.522517110860267071e-03,-4.147210061584358405e-03,1.315515071263025386e-03,-5.255500477856699278e-03,7.806406137228691193e-03,-9.274235733234517838e-04,-1.123844152283323249e-03,-3.481700728963766853e-03,4.165408809118647734e-03,-5.514108229763572087e-03,-1.408385194419064243e-03,7.391413050399607554e-03,4.172387566362633499e-03,-1.923862576709285069e-03,1.734616884117769903e-04,6.874049963592387871e-04,-1.114311784341990089e-02,3.200468969235925609e-03,2.525332487991312973e-04,5.654229834626791490e-03,-4.362558391369445419e-03,-2.621184143147597002e-03,-5.420135610747178062e-03,3.732662426312646081e-03,-8.963901537831700864e-03,-4.597545243776018226e-03 -2.761517495482475586e-03,5.623929041336330793e-03,4.523054118995013914e-05,5.174749143692698337e-04,-3.809790227805874823e-03,-6.927153353330165603e-03,8.292496927479761290e-03,6.137315234193375588e-03,-6.681262769804715944e-03,-5.000438962705091271e-03,4.624226986503696384e-04,-5.977068651098893862e-03,4.184828925614813774e-03,2.296596930579597028e-03,-4.857254534017885553e-03,-1.487124015133278569e-03,-1.780261168311184430e-03,-1.676712023161577598e-03,2.528203747463064668e-03,3.322574159384971380e-03,-4.612733418623678403e-07,4.120406211070105906e-03,-7.199723735566440977e-03,1.124249025043708551e-03,3.916760920902309304e-03,5.375409541175132748e-04,4.680225326454943470e-03,-5.193548007118382361e-03,7.124459733888040486e-04,4.459307029051961385e-03,9.961595578938128945e-04,3.723646830526807644e-03,-2.585839529917276659e-03,-2.331197865169250174e-03,-7.463554208262839114e-03,-1.623591989256799924e-03,3.467491129870493762e-03,-4.717402607791717876e-03,5.018020654599242639e-03,5.114127541398545349e-03,-1.665596596539085580e-03,-3.933408413996647077e-03,2.830908184195160462e-03,-6.420379396577801344e-03,5.420103528034312355e-03,5.290369614139867928e-03,-3.516307077151411761e-03,-1.122681999170479020e-02,4.098204384479570833e-03,-4.871907000098008381e-03,-4.506931278574714123e-03,-5.167360255180215682e-04,-8.301934507508245895e-03,1.693091858798611302e-03,-5.237482789716020777e-03,-4.397580237275346832e-03,-5.363827304581998343e-03,-3.893923575776347696e-03,-9.255960423788593336e-03,-6.257139176649160681e-03,6.110047983374960849e-03,-1.997593421445947350e-03,-3.141925538103356418e-03,-1.084536262788470982e-03,-9.283525099576644610e-03,-4.084732904095096304e-03,-1.485377940591060299e-03,-6.567807930746717704e-03,5.543520283078057220e-03,8.220504585696501937e-04,-1.265777365131770570e-03,-4.473686633371601849e-03,5.416984696335331965e-03,4.476479630194879142e-03,8.887327763830105490e-03,3.998405845847737353e-03,8.276861792373508761e-03,1.368844882773008259e-03,1.286766912617218293e-03,2.811989521509225109e-03,5.956779186399783349e-03,2.148373152199373626e-03,-7.260443646831323851e-03,-6.548019935922623665e-03,-5.988756391100334003e-03,-2.803962222203699572e-03,-2.991050471016523833e-03,6.916017892745728035e-03,-1.992034206120584742e-03,-2.363408995970975833e-03,6.255628725529447341e-03,6.244233554176017650e-03,1.328895782289973111e-04,2.573658808243407033e-03,-2.496056722098531980e-03,-2.687646770311240708e-03,1.486457366586468702e-03,1.194224534492914838e-02,6.864151405619635512e-04,-1.473284745447902893e-03,-5.327062881509968782e-03,3.204722927807131597e-03,1.707488106210424197e-03,-4.865549502994262823e-03,3.243606973291114377e-03,3.699698828808000058e-03,-6.212766141134878732e-03,5.939567132029512954e-03,1.026107294452184267e-03,-5.992442572185948529e-03,8.810181429732952610e-03,5.292762824200712354e-03,8.601907250204449823e-03,-2.372367619698204002e-03,8.119344524679592001e-04,-9.881309792256752836e-03,2.084382268784292207e-03,-7.675877687152321376e-03,-3.959697299858200639e-03,-1.262978062051774400e-05,6.542752968738771262e-03,7.446501287276040097e-04,-2.518298498239114006e-03,-4.256261564231351643e-03,-6.569893437163069026e-03,2.029878127716664652e-03,3.936654053048826117e-03,3.616429363581656533e-03,6.765492891861829517e-04,-1.010466800319493458e-02,-9.134347159698739602e-03,2.926416791527871718e-03,4.041943786408688268e-03,7.430027526793736743e-03,7.744836062609186667e-03,-7.640589032654738870e-04,-1.689969534911686709e-03,7.479303889731904117e-03,-2.734821931111782855e-03,3.213427020535857340e-03,-2.353615933099539783e-03,6.506150107634257884e-03,-2.029928251456953638e-03,4.502374068812315180e-03,-2.004134206735580775e-03,-2.515722094541992908e-03,4.475449116152005599e-04,7.799353347021027553e-03,-2.038635556668739329e-03,-3.652686550962546894e-03,3.367756153057115120e-03,5.805985423058870323e-04,6.548542987763096221e-03,6.075836945725080970e-03,1.876898462632997774e-03,3.504368462259226016e-03,-3.010722297904840083e-03,2.872239393550825130e-03,4.285967184499872747e-03,-3.604633432195733077e-03,-6.821624468855083537e-04,-7.403634330708134354e-04,2.948424524574789259e-03,1.221054893029428030e-03,3.266160426778469059e-03,3.130426806531308708e-03,-8.910854816355709765e-03,-4.278032226554502777e-03,-2.886595774212275933e-03,5.161867785598293554e-03,-3.314158517790008560e-03,5.299915373210260100e-03,5.430774548490949899e-03,-5.215893578384530466e-03,-2.193238573788110473e-03,-6.311648373978510050e-03,6.398155480496268629e-03,-1.070881797066919501e-02,-4.246461565288376858e-03,-2.208961979366092735e-03,-1.898136934686975958e-03,3.521776232800912128e-03,-3.934933355734302959e-03,-1.897615278725158471e-03,-7.541925927532886105e-04,-8.719704732783750342e-03,-6.734534719696043566e-03,-5.180644596637533524e-03,8.780251283764082962e-04,1.303887804258113334e-02,4.368800974129924452e-04,5.545514270481706996e-03,-3.865568090154426675e-03,5.853587267572960690e-03,-2.932267347495259792e-04,4.171277732450085970e-03,3.458349575049992318e-03,-1.248226451327111124e-02,-3.412972885337276266e-04,-1.077852322498596034e-02,2.606760812310885290e-03,-5.809173285125376375e-03,1.437930394716058257e-03,6.178299820642503548e-03,-5.232895206759250340e-04,1.227068115528300897e-03,-4.942601329791158384e-03,1.258562918262210242e-02,-1.740078529873700409e-03,5.710373022302764161e-03,-1.007786829305318526e-03,-7.323481459317280132e-03,2.381833949941653417e-03,2.868457608246169615e-03,-1.012497533828473663e-02,-1.736446727002389709e-03,1.358678505081812454e-02,-7.925449402407754029e-03,-1.735457388845210733e-03,1.155468212940936011e-02,-4.837604282606976572e-03,-4.306865457818804567e-03,1.610275233173806206e-03,-2.612994596359153026e-03,-1.648217937034986494e-02,3.122123938415256612e-03,-9.293533644160575230e-03,1.545459406990714029e-03,-9.714923149330590302e-04,4.057386274580000729e-03,1.011396013519995227e-04,-2.016172927890662846e-03,-6.072087295485302343e-03,5.729945334715978202e-03,4.515134447843172777e-03,-2.376122825530954837e-03,1.105566013807427524e-03,4.012448333704523272e-03,5.945644891356754828e-03,-3.582397406072382502e-03,2.711487165513532405e-03,9.268644360194125756e-03,-1.616456443745943909e-03,3.162788430613145763e-04,-2.287764341376215347e-03,6.590880113457109508e-03,2.893033203506374228e-03,9.427427838975424666e-04,-6.999864154507253085e-03,2.061369289505761340e-03,6.526839051417999624e-03,-6.874104835854977051e-03,4.449874563628270288e-03,-1.150348267169108924e-03,-5.832903926078158996e-03,5.353235053761648370e-03,7.508565928107513833e-04,-2.445895019657175207e-03,-7.038708335867256173e-03,-4.661595180791196398e-03,-2.559930332216343168e-03,5.432220585043489106e-03,2.902972039149619068e-03,-1.118948574578024978e-04,-5.274141017704239535e-03,8.349124197900894759e-03,-1.868325996450708240e-04,2.851956110309061473e-03,4.102545447818242534e-03,-6.063976923268783910e-03,-3.205918398306347849e-03,1.770427801762982376e-03,-3.414367152533301277e-03,3.099320330680719379e-03,3.304809448452668253e-04,3.798940227922554744e-03,-5.701224862644017378e-03,6.204207159605627103e-03,2.053085684131067401e-03,-2.882799848737749911e-03,4.799048370892797580e-03,5.884679341018919131e-03,-6.947257456094046296e-03,-1.166425294463797760e-03,-6.530167677447453085e-03,3.842893132161050770e-04,-9.196676409555192694e-03,2.528290448875211618e-03,3.415458794459866939e-03,2.964266793247071451e-03,5.723583593785558886e-03,6.350608283359854235e-04,2.681090264241439616e-03,1.516879453631475106e-03,-4.737244085434185598e-03,6.695111018959512897e-03,1.021154255401962758e-02,-4.079816752178042939e-03,4.042513285494498559e-04,1.533829667859566486e-03,2.341134714384913561e-03,3.930687365948522770e-03,5.437868865676771151e-04,1.422577541376739401e-03,-2.741730358166826875e-03,-6.891340027045354810e-03,6.939752519025711719e-03,5.257805254033424448e-03,-8.860976140745959384e-04,-5.420447772017715493e-03,-9.383420025508223961e-04,1.709610804194949846e-06,4.157786291769120703e-04,-3.622019991939515963e-03,4.059451808209761647e-03,-2.723261709716086505e-03,2.748952924572502145e-03,-1.756655249009259154e-03,5.097960313843412535e-03,5.619838986269011169e-03,5.804757922293723831e-03,1.857082571881887249e-03,4.240421288488819933e-03,-1.478249489656126032e-03,-2.966407218225607621e-03,1.412169340812372909e-03,-1.217794948933673517e-03,-5.273576143257891098e-03,4.434316865048534427e-04,4.729041629846108229e-03,-2.317741628036133414e-03,-8.297614998993025487e-03,-4.631239104534377350e-03,8.129210668087942168e-03,-3.027792829881104878e-03,-4.000897197441946260e-03,5.886136435029129185e-03,-3.367507915023242558e-03,-1.346188196443802443e-02,-3.242968958965176951e-03,4.023547253512366195e-03,3.610866539674002004e-03,-1.150829442392156893e-03,-2.421291485461966094e-03,2.335126676993024876e-03,8.340820125241604072e-04,-1.536750204762054890e-03,3.177657636997754855e-03,-2.148596792113593914e-03,6.236804705402794208e-04,-3.762996345106323515e-03,3.930208317481746218e-04,4.869814737229583891e-03,7.893094581048609856e-04,-5.052208007014304808e-03,-1.938216921550609895e-03,-1.551921899780083500e-03,2.209084848561774280e-03,-4.953588883836116362e-03,1.979858951680905227e-03,-4.751814723460700239e-03,6.338269555747328256e-04,-2.398201585191412667e-03,-4.124741617386787004e-03,6.151577659061956138e-03,-1.298495750829252492e-03,-8.337608689472305462e-03,-1.279368224257268830e-02,1.143534799917585226e-05,-2.813128118681017514e-04,5.674415075719258482e-03,-1.398783428008469176e-03,-1.289409931310897366e-03,-5.870996669584852695e-03,1.586220362309463195e-03,-5.202941431551941741e-03,-3.236398127270838539e-03,-3.489488794929510463e-03,-4.806333640661414851e-03,-8.674128891072335409e-03,4.722176133086296995e-03,4.082550209991491104e-03,-3.069045860048695302e-03,-1.168644429588179930e-04,-4.933259377769074437e-03,-2.644470848525442511e-03,-2.266965523417341279e-03,-5.994972670175345228e-03,7.861846354554159061e-03,-1.999170307229793022e-03,-1.162028349730057043e-03,6.034813553665082514e-03,-2.962282542403769144e-03,7.914573540324740947e-03,-1.306266847783351922e-03,-6.199226363852846679e-03,-5.797052666805081224e-03,3.331218012784141740e-03,-6.531982121648366273e-03,1.002482384645158806e-02 -2.156766974235765476e-03,-5.883627597204191262e-03,2.392049258985094046e-03,4.130864916393412259e-03,-3.898504151301588998e-03,-4.661365292822254938e-04,1.411282749239840340e-03,3.525792350241710449e-03,-7.576925935118314359e-03,-4.730975925347739516e-03,3.542762588348609062e-03,-1.333280236081854642e-03,-7.180237233137867423e-03,5.029346779335246154e-03,3.523955401155439001e-04,1.194112475327488952e-02,6.874190061181542979e-03,-1.434676632942479685e-02,5.399897448657727146e-03,5.461931310861771818e-03,-1.088638256317033361e-04,-2.890730896322062787e-03,1.051600158322648845e-02,-1.633928698342606081e-02,3.508917357282803889e-03,1.205286033756928100e-03,-3.765102866389277367e-03,-2.673240472735597100e-03,-6.116863598027784572e-03,7.695294666421595084e-04,3.469090796990748331e-03,8.744557665350561346e-04,-3.170945702316649382e-03,-2.208508878859514763e-03,2.606645821644485404e-03,1.691042441644052298e-03,-1.519671982538612492e-02,1.025239564086726299e-02,-5.030092432824058608e-03,-5.858204589079011801e-03,1.643447329394933125e-03,5.876496918374700005e-04,3.405054719329526740e-03,-5.142571245834785115e-03,-1.035491442687994958e-02,2.313189666222729111e-03,-5.229095373751739854e-04,-7.046090263047873059e-03,4.402120894326996825e-03,-4.753745377678071302e-03,-8.704816095540099242e-06,1.052730731530532749e-03,2.011765830154836360e-03,5.364073804915361378e-03,-1.334172784306532486e-03,-2.130263263631972017e-03,1.188402378900180190e-03,-1.754928313576965392e-03,9.966286119737979496e-03,4.298417099094125823e-03,9.010582210485630281e-03,9.500606102956561010e-04,-7.916113773296144795e-03,4.111907163162474566e-03,1.341461194573443467e-03,-8.042793056825566292e-03,3.157528389482116421e-03,-2.974558300145736800e-03,-4.574182935736086260e-03,5.838877608198479556e-03,6.124060323133435554e-03,-8.859998018209840108e-04,6.555446951443021086e-03,-6.843229716097990302e-04,-4.141780913500851639e-03,6.642524274757958050e-03,3.090987367233006492e-03,-4.596527424211953859e-04,4.988854699552818482e-03,-7.628128528920169460e-03,5.441498201858691562e-04,6.879863611028685065e-03,5.339354544127912147e-03,-3.359544030081040855e-03,5.035435400213851206e-03,-6.785880694965813406e-03,4.891550018412262751e-03,-1.317226242529480991e-03,1.491903206447342881e-03,-1.044604641529271521e-03,4.519501419332817155e-03,3.844440996591055957e-03,-5.578197614451523532e-03,-5.805998581642132610e-03,-2.607340768792421742e-03,-2.576574848456930893e-03,8.135621658630262307e-03,-1.984947026057344143e-03,2.900220767219134806e-03,2.125614785953406586e-03,-4.684557511315515753e-03,-7.455787426895025584e-03,-2.582648254490074735e-03,-1.032137406808601200e-02,8.979598204236320658e-04,6.244754355088399152e-04,-6.685194136357455047e-03,-1.025303402869343940e-02,-2.180451356134717987e-03,-5.901950227182699293e-03,-5.492117653702698228e-03,-3.904175347796765526e-03,-6.087861432591107867e-03,-2.693070434282434332e-03,8.617731851244030128e-03,-5.857722150314150118e-03,-9.321596156665041101e-04,3.942089957808378800e-03,-1.569225133111317588e-03,-7.768802278248082560e-03,1.185249786790771405e-03,-4.607673606273561366e-03,1.249840028230289266e-02,4.436652521815321118e-03,-7.018070798602702364e-04,-2.828598126527746056e-04,-2.897301641770966469e-04,4.974352351850586454e-03,7.542998048018008383e-03,-2.727504114118151513e-03,8.190581596913520945e-03,-1.950991562333588341e-04,1.379312233485939293e-03,3.958379961918508973e-03,-5.579028076439884569e-03,5.204368916937820609e-04,-7.565653622497352584e-03,2.792003420131961705e-03,-3.565644744616348807e-03,-1.232712416845595932e-02,4.791003499196726766e-03,-2.894063214282531060e-03,2.023054568945880250e-03,-7.359355853430297502e-03,7.344909194625436587e-03,-1.502245321286658011e-03,1.180399040025524289e-03,-5.659130781022505413e-03,3.237316879314426892e-03,1.294475149266168142e-02,-2.120278409294817402e-03,-1.023281048403337345e-03,3.240856519495139473e-03,-2.895987188762881234e-03,-7.357645960805278551e-03,4.732308011805472464e-04,3.252317685089844458e-03,6.039926399698024827e-03,2.019322704501318871e-03,-5.831620281156821815e-03,-3.897780679194170518e-03,6.944844872056628446e-04,-4.503114130507157066e-03,-4.531974608607270728e-03,-3.343781103589512065e-03,6.825134983434063413e-03,1.371376344543619001e-03,-1.218050031928899881e-03,7.290539823524340261e-03,7.858645009532552739e-03,4.449072711651851260e-03,-4.756420614281389563e-03,-7.759129680931365139e-03,-6.033397781926002349e-05,4.049489083019230246e-03,5.360023036159049931e-03,4.093270757572604023e-04,-8.704383489181777983e-03,-5.763912995240100777e-03,-7.196735200936115080e-03,-3.187947725391108295e-03,2.130837852368782336e-03,-1.838672908521510590e-03,-4.086445655105562427e-04,9.264071267566868426e-03,3.670629545859409228e-03,-8.612019242057908711e-03,3.563570082815177738e-03,5.243150353346941357e-03,3.201092418130789541e-03,-4.905725414688233074e-03,7.197878149776660260e-03,4.034846458801555694e-03,-7.538529675315817566e-03,2.986005616991262678e-03,2.574280581770941288e-03,3.995471405794871235e-03,-5.264244238278197041e-03,-1.293199600029303455e-03,-1.795707396662963825e-04,-5.271212882045916179e-05,-1.604590984282279736e-03,-5.109015644347555734e-04,2.872538380759690705e-03,-2.926637736777315101e-03,6.236124921491579230e-03,-8.841531147972105185e-04,2.682374530606824927e-03,-3.019585592177023974e-03,8.071156622739586797e-03,-5.931301730281954634e-03,-7.378855236717396689e-03,6.326193865565641651e-03,-1.163381132369960096e-02,-3.653101646211313262e-04,-3.402847907248864219e-03,8.041768487159040024e-03,-1.202417011540343923e-02,-4.011393739839513688e-03,-1.010093027623911593e-02,-3.491317768161956211e-03,5.043946194983226286e-03,5.783681035355528986e-03,-8.364703791047355466e-03,-5.365815621449517188e-03,1.162218445936684906e-02,7.755799593349070684e-04,-6.316076683542361930e-03,2.309594235628800642e-04,4.652657741948239972e-03,-2.223514767716746297e-03,-4.140745826430807072e-04,2.152749026315682081e-03,3.669930326164848235e-03,7.884677729172014093e-03,5.924926094522267681e-04,4.526347164148487130e-04,-5.412133839846225078e-03,7.975628749083716945e-04,2.999912184069043405e-03,7.171077702216347230e-03,1.541620387340007564e-03,2.220208982885679087e-03,4.031432967951779217e-03,-3.111369344627281687e-03,-1.212153743468773843e-03,8.931801909945035617e-04,-2.192969256600926179e-03,4.203685355636200475e-04,-3.890829211870982726e-03,1.287982558662237797e-03,4.636629311190060450e-03,4.787591431609798144e-03,-2.903226653653290419e-03,5.396653297314487653e-03,6.986181405607143520e-03,1.777515624123846049e-03,1.161786008889528592e-02,-1.033741985285125753e-03,-5.469557546031851569e-03,-2.411057487873366142e-03,-4.994520687480953798e-03,-6.387486122331381732e-04,7.200409775182083083e-03,7.221107518963755479e-04,-3.964679722251045028e-03,-3.956719348012335130e-03,-5.448689209847891284e-03,6.547907789538820987e-03,-1.941258526925772722e-03,1.246557173090389985e-02,7.206019758150193589e-04,-6.037711491329126115e-03,-5.364746404662501522e-03,-2.722532176303637651e-03,-4.060695487943723203e-03,-3.161077671418356660e-03,3.052229137950922440e-03,-1.133409647730541014e-03,-2.632742948743660438e-03,-6.572147742477973985e-05,-1.850750066783497750e-03,4.466958772069511661e-03,-8.573895235934894665e-03,2.878713008818714180e-03,-4.045219238146819177e-03,-2.971690888714959618e-03,-3.807691120138988368e-03,-3.015271294386820400e-03,-5.289257931997610497e-03,3.904549586416131580e-03,3.991883742220951256e-03,7.190417189284601177e-03,2.432067688921906064e-03,5.875947326246154512e-03,-7.123142377411641277e-04,-5.719112133961250233e-03,-3.333109385516286761e-03,4.525091553112995692e-03,5.588909663246773886e-03,4.600416086681071576e-03,-3.280579580986831608e-03,2.168532854218898232e-03,7.517342393854227652e-03,-4.834825516732385890e-04,-1.007934403531376803e-02,-4.865093274598207042e-03,-8.842525742572750636e-04,-3.232167403786227744e-03,4.326958602624709355e-03,2.299016826449042095e-03,-4.824515076741217687e-04,-3.780025639435107519e-03,4.882472788356566340e-03,-3.916026120544821923e-03,-6.617137673218274029e-03,2.767512310022228234e-04,3.538190883617202930e-03,-9.891414685516846696e-04,-4.304478452518511848e-03,-3.755639686369193972e-03,9.615644093037981413e-03,4.465106866881764225e-03,-2.430488152966516640e-04,-1.312956259656961619e-03,-4.996543339988962322e-03,-4.384193331023112337e-03,-8.238665303674973456e-04,9.857573763725155716e-03,3.333155924364929624e-03,2.602167214660301101e-03,-9.546340758754373297e-04,-8.862519250541508672e-03,-1.516769382251588081e-04,6.326893724841021506e-03,3.798469618841583510e-03,-2.747097799584703997e-03,-6.946798902927063535e-03,8.206808303697132661e-04,6.849700759473897850e-04,-2.828685974193848041e-03,-9.386761339974581449e-04,-1.625813161545296929e-03,-5.084588085601471352e-03,-2.595188489438229971e-03,4.788900527890225037e-03,7.026636554067016392e-03,7.378026758383035393e-03,-8.408842483808881552e-03,6.847674418382444783e-03,-1.917615168287226937e-03,5.920690873312566514e-03,-1.749114358273980488e-04,2.084432256000074649e-03,-5.354672392353849487e-03,8.145044242984373880e-04,-1.116767726309004539e-03,1.784539260572378912e-03,-2.343033127608496694e-03,1.744293725786480453e-03,1.553547701256618703e-03,-1.772470129051394953e-03,-1.592058046645187380e-02,-4.778270757344932687e-03,1.201897638232901156e-03,-4.785652492215178820e-03,-5.914342219660572234e-03,5.071089604790003647e-03,-6.262686545150156735e-03,-3.326652379913243047e-03,7.801855990247479403e-03,2.141922282481196517e-03,1.537415815048693343e-03,-4.689756001506293140e-03,1.094476395209127879e-02,-2.772989037424227355e-03,9.784460742648774540e-04,-2.286805561651391022e-03,5.137013112223055671e-03,5.884274406253124992e-03,-5.931942698760115686e-03,-3.750051364875378081e-03,-7.679617304088308047e-04,-9.027367558737435993e-04,6.037142633948976763e-04,-1.356853816060809258e-02,4.675418126382905638e-03,-2.613127935115491796e-03,8.082569618421169694e-03,3.577035493998349715e-03,-1.034501971943148531e-03,3.152233544064101875e-03,4.663121185134026271e-03,7.577047096434324884e-03,-7.702272191531023271e-03,1.314897860315506753e-03,-8.283380009377347766e-03,2.440420257917063143e-03,-3.770027399259382947e-03,1.656490524175382182e-03 -5.860800761124983563e-03,8.220480795992858927e-03,-1.469547899031805032e-03,-1.009863712564546380e-03,-1.393739684828054033e-03,-2.492251769511426830e-05,5.510061344000305363e-04,4.782601414021886259e-03,-5.876794780492848591e-04,-1.455317396338681626e-04,5.490934867580367189e-03,3.755117049969921086e-03,1.162438914494626739e-03,-1.629538373791952505e-03,-9.966259350182465372e-04,2.399071897997858991e-05,1.018289713447786439e-03,3.413625910344580527e-03,-5.956250318785744936e-03,-7.966079263495250834e-03,-1.942268105890372675e-03,-2.325948562363365068e-03,-2.224046204700180340e-03,-3.615890459393822972e-03,-3.656534590430618372e-03,2.290852416685364070e-03,-9.835068433196236456e-04,7.583557741318829558e-03,-3.961410415844945562e-03,-4.172017953562749681e-03,-6.538627488527510093e-03,3.937279766053520223e-03,3.477783609755002482e-03,-8.990819651702600460e-03,9.165184747831959383e-03,-6.533924799829713727e-03,-1.051647681448322595e-02,-4.494554007740683525e-03,-6.321279565846413089e-03,-3.566838673424063601e-03,-7.404137605438933234e-03,7.756583220221198102e-03,-1.184120829015454903e-03,-3.609343251228629579e-03,-2.521163285410009305e-03,4.653457329763032585e-03,4.275488361658864346e-03,7.701265692197239304e-03,4.653758824758629474e-03,-5.949780048203946910e-03,-9.454894075516360671e-03,8.451423981461838350e-05,1.208510931441210499e-03,1.230165544897139120e-02,-4.464969253295090687e-03,-2.289367410276301265e-03,-8.510869458098818327e-05,5.907483245056014375e-03,-3.965452181421500605e-03,-1.113984647725882113e-02,6.398005706973060834e-04,-1.331210976270638105e-03,1.011699849768609664e-03,-7.487763407485520731e-03,-1.522294053615654225e-03,-3.430317550026003586e-04,-3.136969933474701485e-03,4.742298693049904618e-03,1.280340255760945012e-02,-6.218082214719569158e-03,-2.624841031461800332e-03,3.597042844069352555e-03,6.621986019353320006e-03,1.149953470481100692e-03,-1.002245394267377993e-02,4.935836409386832463e-03,-4.464978524426715251e-03,-1.244973287077216295e-03,-3.418524096579851363e-03,-7.297951871157506813e-03,4.591780793769506021e-03,9.915596050628040434e-04,-1.035584589673200356e-02,3.991455258049036656e-03,-2.620047784085402964e-04,-6.991220078764186392e-03,4.254516656774062072e-03,5.507003843926884003e-03,7.955733441459599334e-03,7.927999555019530145e-03,4.682213001751701388e-03,-3.138820548463993037e-03,2.173154578741849641e-03,4.483847298948774596e-03,6.266166652101962586e-04,8.278353550361951772e-04,-1.234948800100811594e-02,2.267394711745317898e-03,-7.419051670714523275e-04,-2.611843497961642275e-04,-1.850022291445065396e-04,-2.060919280663980804e-03,-3.651800736677616663e-03,5.087494787011045440e-03,-1.220376126127046927e-02,-4.958144893766297349e-03,-1.553851895855959376e-04,-8.756870725246182485e-03,8.911283673908688280e-03,-4.270246912215538461e-03,3.166119830191783599e-03,-4.623320658927313961e-03,-5.543307106719169014e-03,2.868212607425714086e-03,-5.095665541933466075e-03,1.975866165107868517e-03,-1.566397824973736865e-03,5.825655684294997298e-03,3.434920547101617642e-03,-6.876393791865393311e-03,2.391894285960103565e-03,-9.340121727631803608e-03,-3.076210011814080542e-04,9.404322933222158953e-03,-1.224569977443281483e-03,8.928516491004472380e-03,1.063797839566964568e-03,6.846661121420896320e-03,6.949658520935847895e-04,5.150496530988381803e-03,-1.411805958014191982e-03,-2.751976430739818541e-03,-9.332566947639216702e-04,1.377073485028238852e-03,-4.073092335835816133e-03,4.691265340237273269e-03,7.558179893105271534e-04,4.490100497255969381e-03,1.299853721141413371e-02,3.042535711811770842e-03,-2.078867193059700906e-03,3.555862049086889413e-03,-1.632245752806152101e-03,-2.853888776830007610e-03,7.274712112919946849e-03,7.235924520312470541e-03,2.697239306957134770e-03,9.218921183936181537e-04,5.433738972544698009e-03,-9.272706343828268291e-03,-1.472572940472554551e-03,-4.727731284787793815e-03,2.197581488083084032e-03,6.605792969575295139e-03,4.559064085702424799e-04,-8.931365064735795421e-04,-1.157254264720753052e-02,1.701933792315479810e-03,-1.366197777098446340e-03,-7.958519245363141967e-03,1.339010164648267746e-03,2.316466093098691181e-04,8.356303293456341513e-03,-1.819181051023784600e-03,4.714732385879584486e-03,-2.681643058282663465e-03,5.136829723858130428e-03,-7.769101531549939092e-03,1.125567767910280043e-02,1.383088909922809825e-03,2.618546258419201210e-03,6.661853921292358073e-03,4.942041597680190378e-03,4.902729185244198221e-03,-3.540209904757195703e-03,8.059775528898660904e-04,-1.021758380057708436e-02,2.486913064754313341e-03,8.826343074558713970e-04,3.647375928863508688e-03,-5.476392159521492218e-04,4.428159569562055593e-03,-9.275711241996457664e-04,4.288800387938113655e-03,1.064650128802353997e-02,-4.059257087857355757e-04,1.949629878444794151e-03,-2.107173456879048428e-03,-5.565200905174582172e-03,-8.762009623271711442e-03,-4.167432069426906492e-03,3.868432595247287432e-03,-6.722185188112593524e-03,-3.387925373664901069e-03,-1.606679368166408347e-03,-4.743340053942942530e-03,7.451956468781419096e-04,-7.107664685304428305e-03,-5.471975450426279190e-04,8.238048066694255572e-03,-9.359714488515753772e-03,1.916761128301746947e-03,-5.619909754357966906e-03,-1.318019307869290613e-02,-2.120308794269005169e-03,1.464062655530524743e-03,-5.365439712304303924e-04,-7.557356810185619431e-03,-8.566020970501055098e-04,3.100061597558566306e-03,-1.081073956315102558e-02,-1.158971639430208178e-03,-3.080057160867446728e-03,-9.175809622959716846e-03,1.265722308138587720e-03,1.095569459733999147e-03,-2.164737165542279448e-03,2.855907247139599127e-03,3.533602617288548672e-03,-2.877318447147300326e-03,5.894198871318429532e-03,6.402800752499643701e-04,-5.156946589172151964e-04,4.388338534071342147e-03,-9.610199960196706692e-03,-7.125960028933971298e-03,-4.057022450504754567e-03,-4.661003653963134014e-03,-8.449037815507511734e-03,8.204515353247225640e-03,-2.657784357421159784e-03,-2.643898056391741531e-03,1.723774998167571307e-02,-4.974452920186242392e-04,-3.051443958251657314e-03,-4.593854461572554129e-03,-1.332534446903969362e-03,1.454823956546130769e-03,-8.599267245907340346e-03,1.062147676548516780e-02,2.848926199807967803e-03,-3.178448626220074010e-03,5.188276006286029335e-03,-4.573221662501178626e-03,-1.772812002510248432e-03,1.618029214569976365e-03,-2.268374871892883064e-03,-2.209265988668060801e-03,1.348009101869856573e-03,6.940817504778587152e-04,5.918358159537307275e-03,8.664326737720195667e-03,2.969345585756472811e-03,-3.528921078717055411e-03,5.886808544235287363e-03,-3.138634508115879188e-03,5.533280063475836362e-03,-8.174514679121291233e-03,5.198563954146935735e-03,6.887821221056654453e-03,6.318853929069557541e-03,1.134610602493220117e-03,-2.799585106840262166e-03,2.225716703507663562e-03,-1.910813200534967893e-03,-1.050265749159649646e-02,6.497876986268336617e-03,5.132776154261049599e-03,8.606669691050367055e-03,2.056838755501277927e-03,-2.481574707076087630e-03,2.847846637683050693e-03,-8.501373867495306061e-03,-2.906470478259399939e-03,4.609233443140637806e-03,1.119232689555300534e-04,7.296714382823600445e-04,4.726244854288123745e-04,7.088670040086048237e-03,2.040320512888517662e-03,-2.039873487587747294e-03,1.234976054935529692e-03,-2.914860198678942676e-03,2.067792221798056638e-03,-2.907413394363879736e-04,6.201587800727757262e-03,4.313526742006843482e-04,-1.439620191254110601e-03,-8.420525234302790596e-03,-7.157457303402646638e-03,4.813374231525511908e-03,2.808983001104262654e-03,-1.948155757724423379e-03,-3.232568943299892091e-03,1.766765486275427913e-03,9.140677816998999533e-03,-6.726593004687032341e-03,6.447762367084637104e-03,1.032618062764988423e-02,-1.346822940502622737e-03,-1.069288456056985631e-03,-9.875834190972335169e-03,-1.294957867204214481e-03,-5.955440518298298590e-03,-6.058218805520397543e-03,2.296095581213200999e-03,-8.708080311792027080e-04,1.760022213332593374e-04,-2.477634278262090713e-03,2.903484086863239704e-03,-6.367853862842871512e-03,3.422250968230466234e-03,-8.666663262494289990e-03,-4.314354636764499915e-03,1.374478670404115626e-03,1.162080043105738123e-04,3.435247961512415474e-03,-8.743034064057143492e-03,-9.045386897339776147e-03,-1.287570882065337992e-02,-6.997682689120904298e-04,-8.116558322083120220e-04,8.537741825069109988e-03,2.855144768479633225e-03,4.622345094504510046e-03,7.981904034279106795e-05,5.104044908778441678e-03,5.664357864643310148e-03,-7.915708682355068748e-03,-6.526563232130819757e-03,8.446656222093906503e-04,1.758411051556518908e-03,1.391160847458760511e-03,9.808861015238310496e-04,-1.713678335112458345e-03,3.860710156788823080e-03,1.247853027043474333e-04,3.614641562643286163e-03,1.397772232727501366e-03,-3.830549756747296704e-03,-6.637914877426837263e-05,-8.357751198177928130e-04,-5.229250799087914285e-03,3.330196516898403097e-03,-6.220500110581604104e-03,1.069627082069824452e-02,-6.135164911585011711e-03,-3.962154474400455401e-03,7.835684788663815556e-03,-4.618082948651606838e-03,-6.956311001370098224e-03,-8.289939754706501795e-03,5.446142884135078688e-03,-2.981538148642098353e-03,1.611639353414055830e-03,-1.419179062231963780e-02,-9.820412385060952026e-04,-2.273774743707076933e-03,-2.842728343477334777e-03,1.072646265072733527e-03,1.976474510401085745e-03,-1.079903392264435977e-02,2.075928114704291959e-03,-5.697515557581533246e-03,3.856912127279006879e-03,-5.554547763432648869e-04,-4.849405209665744068e-04,-4.452448365537695942e-03,-1.219969438520454239e-02,3.088084372786799314e-03,3.456057726534803465e-03,-9.910845795853028602e-04,3.869865381855404178e-03,7.762584442555139561e-03,-5.013953473450381143e-03,1.950374783024718360e-03,-3.499166124639535840e-03,-4.962629047175240796e-03,-2.718051264231117863e-03,4.250107629206635726e-04,-5.013858351697222070e-03,4.899682030954991864e-03,-4.440156094206512173e-03,6.937543853148250839e-03,-1.047511771710657284e-02,-8.461713580377784941e-03,1.290974325724290710e-03,2.020587559648702285e-03,1.791985077605323488e-03,-8.279441861420027988e-04,-7.567076760813834513e-03,-3.468480270014899242e-03,-1.811014941428306735e-03,3.841219413169841973e-03,-4.001102493219226182e-03,-9.036760868603033051e-04,-1.133014481840289352e-03,6.643727454355732170e-03,-4.711505619808645760e-03,2.138581865045802860e-03 5.596260954796879183e-03,3.927124300417863048e-03,4.099610443070862177e-03,9.902354266730684779e-03,2.120895478509628434e-03,-4.305960409989074110e-03,-5.930986582244520029e-03,-5.864977195271951771e-03,-2.657539238970393815e-03,3.916119494794902123e-03,-6.213275546079677011e-04,5.768155141731623470e-03,-4.269354858646832833e-03,3.701877265474537763e-04,-1.125117819436876557e-03,8.211251885827157643e-05,-6.886132417098959208e-03,-5.388921973556060931e-03,1.248232577616239071e-03,6.758790753736265006e-03,6.981950907839558525e-05,-8.828641047007741369e-03,7.987918077390146476e-03,3.739812771713670308e-03,-7.747519942347568488e-03,4.433850750933731460e-03,5.195744552216976543e-03,2.223156054307662104e-03,-1.264806530163614879e-03,4.871665657161746744e-03,-2.759299465415429066e-03,-5.516836265150318157e-03,-3.558329146172713089e-03,-5.698978165480458706e-03,5.781839566713376302e-03,-4.795833271971861912e-03,3.711654450100040567e-03,-1.812170286818229423e-03,-1.771224493616815887e-03,2.844661621314239922e-04,1.934338122374186595e-03,-2.403013775344472504e-03,1.485500886251984402e-04,4.418279288931867797e-03,1.262287708490882673e-04,1.092555004777438603e-02,8.090008001821977560e-03,1.508220069521211924e-03,5.662568905430299081e-03,-5.957473395712576877e-03,7.938458195777717621e-03,-2.674074238612138537e-03,3.186445990284827653e-03,-2.553449878692204671e-04,3.118112830519664466e-03,3.881615216484010557e-03,1.069699613013259942e-02,-6.732376700239422855e-04,-7.414603581589092503e-03,-5.736975261948015423e-03,4.017940626924486509e-03,1.759753164365716535e-03,3.080244194943578418e-03,3.060882331593352181e-03,2.188284328702864382e-03,-3.793374698777343245e-03,3.047778772770790511e-04,7.646721044031226135e-03,7.299311444038055624e-03,1.728404859424081388e-03,5.013468787799283365e-03,1.599376644292143084e-03,3.885702760118269102e-03,5.231991409448899337e-03,1.604557081491359396e-03,2.081339722103006055e-03,-6.744154735807745063e-03,9.902063422179431182e-03,-4.074243536344676099e-03,-8.342351948854047003e-03,-1.542079090075579610e-03,1.035030518082883169e-02,-5.191303111759046005e-03,3.489907139122676479e-03,-5.607942986247347120e-03,-1.319725374701459508e-02,-4.363707698628518407e-03,-4.531039229424405415e-03,4.293288736442068057e-03,1.366195796029989057e-02,-5.682647933569144605e-03,-8.578592533562351755e-04,7.767606383778618943e-03,4.762700433064723758e-03,5.015545449472065195e-03,-7.295663049127815072e-04,5.025202992314067169e-03,5.826379175005116755e-03,1.649924667676943560e-03,4.320538896296697087e-03,-2.877559736081507398e-03,2.317868878266020512e-03,1.760089557307900404e-03,-9.223063342500639056e-03,-2.940886027705586717e-03,7.945744595113605341e-03,-6.342791149890332318e-03,-9.736891733750557831e-03,3.635193800656364663e-03,-6.584623428006189903e-03,2.379669493257155228e-03,-1.376522533029554338e-03,-1.543107109805208705e-03,-4.054169420985130265e-03,-9.848100089721614075e-03,-2.445139141842575048e-04,-4.966059478874441935e-03,-1.693967964672147501e-03,-3.375772660393787372e-03,3.341941485458473950e-05,-1.023747145578049272e-04,2.776988147423355407e-03,5.876134760626181702e-03,3.133763115565642472e-03,-1.850866254989569880e-03,-7.319722438821789530e-03,-1.535541989726539299e-03,9.163619686126915284e-03,4.166270416982991451e-04,7.548837317991933214e-03,1.563274264343125536e-04,-4.591143839143056044e-03,1.114012774897048654e-04,7.261815342050175100e-03,1.020040787353244721e-03,4.097145138596957757e-04,-6.189503830696197438e-03,4.172496402098250026e-03,4.021928844673424222e-03,-3.460305557287423565e-03,-2.482452613404122644e-03,5.558287137518492550e-03,6.639936810374351753e-03,9.460967792604803653e-03,7.613460484350436340e-03,5.318910215361800919e-03,6.183541757275461820e-03,1.073922586312094953e-02,6.839208199937167783e-03,3.431543971580338324e-03,-8.274831394261562487e-03,-1.432301422329469113e-03,5.209750782207282424e-03,1.799903454424016153e-04,-1.465375074093121059e-03,-5.147592852123265492e-03,9.188065254568546615e-04,-1.375657803737934982e-02,-8.257192692125778352e-04,6.085931040331289731e-05,-4.316306665567464645e-03,-5.578061913326591049e-03,2.002368312752310082e-04,1.020005816033887859e-03,2.264851825656199677e-04,-1.853361703906451557e-03,2.659219814624384434e-03,-3.914176271974879363e-03,3.829815352482682234e-03,7.655815482424959198e-03,2.164988144907077725e-03,2.245664230903689045e-03,-3.232129902576793504e-03,-3.786277432016810999e-03,-8.182785519450088557e-03,-2.147813171782151156e-03,-1.772553980915569753e-03,5.574896260011057147e-03,-1.064000812727669901e-02,6.059065646115714514e-03,1.047139674568976081e-03,8.958753068098016664e-03,-9.932194524480192008e-03,4.151222643885447224e-04,4.302975943417690547e-04,-2.850616426118263332e-03,-2.840492620634805718e-03,2.782721887403867989e-03,7.978649422677824799e-04,1.438533952754397424e-02,-5.521622553366760736e-03,-9.645246547094992037e-04,-3.604420104643451563e-03,-3.547036008363890682e-03,1.473915452901269994e-03,6.711990603190184576e-03,-2.508328948329032696e-03,1.920625366820579143e-03,5.049513764688770903e-03,4.635962577187411997e-03,4.671008935140877859e-03,3.972271837818544885e-03,-5.755203157227236840e-03,-1.354645968094636598e-03,2.815866291901829733e-03,6.234511409485367361e-03,1.955907918265505953e-03,-1.399049701866637591e-03,2.832680320192956964e-03,3.908459143983062864e-03,5.835582833578910507e-03,-5.850392175594142791e-03,9.324987585638484832e-04,1.308143319722003322e-03,4.570939241937044609e-03,5.141342797444717165e-03,-1.198520388883277587e-03,-1.424139114263222629e-03,2.903235189373262835e-03,4.134780721280702817e-04,-2.965432707135533773e-03,6.541710281894186222e-04,2.730315219507745833e-03,-1.075191903373618396e-03,6.096419983904341304e-03,4.241290877042669383e-03,8.948131152822065776e-04,1.418343502954561020e-02,-7.600563401792948155e-03,8.588911692801504821e-03,-7.276542526840485401e-05,-2.615692390602072578e-03,4.886416652423274457e-03,8.979719964428209758e-03,-6.137429332290765885e-03,-2.722323275574198460e-03,-1.320552765449148520e-02,-8.350575623265287309e-03,-4.330968650584218586e-03,1.134292688520689533e-03,6.063470252962929741e-03,1.163415127335267439e-03,-1.160844321401134725e-03,1.135656533590970966e-03,-4.570711982970821798e-03,-1.012055747469381081e-03,-3.488444091224690829e-03,-2.788055877722400299e-04,2.013535933894698444e-03,-2.732398369847813431e-03,1.759650703567261552e-04,-2.018584992107609090e-03,5.026993828048619689e-03,8.460864105518280109e-03,-2.314531587343459339e-03,-1.059463602720710435e-03,5.214010321430059477e-03,-1.517322202533442910e-03,-7.167057435906240906e-03,-1.995394148840116174e-03,1.302029382293100291e-03,2.004418212892389480e-03,1.885192995747939342e-03,4.735640312655232595e-03,-3.837896864953453408e-03,-9.258585995486949549e-03,-4.617675426032002116e-03,8.884120163973252202e-03,3.126216001391890747e-03,-1.961176801266590209e-03,-4.676604394091067943e-03,3.026731401212181052e-03,7.065298379506435196e-03,5.170625375778555716e-03,-8.077707395306464372e-04,-3.167957057617461041e-03,-1.233450049796904749e-03,-7.009565729529425304e-03,-1.592376067751065122e-03,-1.057726478722686861e-03,7.515785112375442806e-03,1.783541482339759095e-03,-2.337428539123180462e-03,1.442858040408679862e-03,-5.573331479982106956e-03,2.804487423492871452e-03,3.520471342484264208e-04,6.088187951381543971e-03,-8.593675330945167376e-03,5.138010960792162990e-03,6.197235023887044843e-03,8.954122804094738833e-03,-2.200046716017089576e-03,4.647660507306831598e-03,2.918586031716498708e-03,-5.229277757871673461e-03,-9.282529320981627863e-03,-5.119524229423354743e-03,-8.974069662599254860e-04,-3.931941659435797196e-03,-9.018150791241563756e-04,-8.004952892520410951e-03,3.624595813267245635e-03,7.878149965957439993e-03,-3.271347980749926988e-03,-8.696827169598374046e-04,-6.821257881317411090e-05,3.639713320146126339e-03,1.327897595286565459e-03,-4.215302348098380614e-03,5.734373512408138564e-03,-8.341294895129840875e-03,9.524273869784946958e-03,-6.682097673798692962e-04,-4.670113115320223898e-03,6.867610628365116571e-03,-6.310887491115245093e-03,5.057007883962122332e-04,4.966448783535168214e-03,7.239463529550580270e-04,3.011048068496115140e-03,-3.211163685838826329e-04,3.367281847998791810e-04,2.988399510564846567e-03,6.211069621893450417e-03,-2.304567550842704770e-03,-1.170852663743652492e-03,-2.273093508611516954e-03,-2.042332943397570568e-03,-3.287353026628393737e-03,-4.566518677676530656e-03,2.046863128796397487e-03,5.668324598474495848e-04,-9.885202925814214406e-03,-4.141997838254314612e-03,5.765509958055926518e-03,-5.971037991822555728e-03,-9.766080361797557091e-03,-4.606669804474725419e-03,2.777893097654760175e-03,-1.837628218527397571e-03,5.211245911306446738e-03,-3.459648115024794874e-03,6.020361042156371391e-03,-4.498194277094940373e-03,-3.526686302879988247e-03,1.132982212771495995e-03,-4.834210161890980899e-03,4.624263764356776642e-03,-4.616093235983479218e-03,6.961350490646856307e-03,9.882830315246901500e-04,-5.690499178467985086e-03,-4.884786624071723454e-03,2.376000708993020617e-03,5.107077526149293636e-04,-1.043599945361898201e-04,3.181571014039459491e-03,-3.776605521248531036e-03,-1.594884790867570406e-03,2.273600882348477022e-03,-7.615777854722489623e-03,-4.257902336166401180e-03,-2.208062656837454871e-03,-2.072385267495648821e-05,4.720523499806549353e-03,6.746054750065324764e-04,8.216762348661505334e-04,8.016691570907538960e-04,-1.889918692303991241e-03,-8.327330402209750723e-03,-4.513186415765970023e-03,-2.499564272274320403e-03,-6.885321503289924296e-03,1.723804437183075770e-03,1.668402401993460907e-03,4.439258051272891061e-03,-7.852915117331779951e-04,-1.690082718956690926e-03,-2.762916430600796582e-03,2.564676790779073191e-04,-4.082272601331990458e-04,1.019238661327794178e-02,-1.572176584670626908e-04,4.121565836471469124e-03,-2.930529165556397010e-03,1.139906744618848430e-02,2.512732486181491493e-03,6.048076147277696435e-03,7.938515254073550548e-03,2.026492491827629368e-03,2.154167429193077580e-03,-1.381099819855640190e-03,-9.770722954446170395e-05,-3.621758706603472443e-03,5.145632767708148165e-04,2.167269671946069846e-03,4.055304998847748463e-03,-4.741687602085555529e-03,3.662516006552029094e-03 3.772100069730747888e-04,-8.434136970900875385e-03,3.962783692363577165e-03,1.680729143771008934e-03,-1.083412349319326333e-02,-1.259486742255597029e-03,4.836600638187632875e-03,7.256346593913956533e-03,5.958869245766712855e-03,8.268399566911089314e-04,4.980552535591429783e-03,-8.901888518207183923e-03,8.923512683286977110e-03,5.621825460581216828e-03,4.316366737332037740e-05,2.190898567295251616e-04,-8.745206284312651707e-03,-2.219378420598901343e-03,-9.160420824923755020e-04,-1.307601196885227189e-05,-1.435237948165881761e-03,-1.389133442250680382e-03,1.683932001101082513e-03,4.610565927421600083e-03,-2.770040556980418907e-03,-9.890915895230773750e-04,-9.006425379525190116e-04,8.062196404381253081e-03,-5.592620225136592065e-03,-4.188520674498271736e-03,5.062906535345549838e-03,1.644130140186538316e-03,5.046127494985222321e-05,2.981718843381652665e-03,-1.849180192368609504e-03,-7.014307048439581346e-03,6.502073041392779753e-04,-3.572517674604115939e-03,-2.448935622251762877e-03,1.329166394907164636e-02,-3.447227794260845218e-03,1.030829293667717000e-02,4.964267033959984636e-03,-2.643262464829610522e-03,-1.099110546010842807e-02,2.920139669591131221e-03,1.776211853209587596e-03,1.785053728293189632e-03,-6.725518359598620620e-03,-4.018300479828465846e-03,2.356667604609668110e-03,-6.044744127180482623e-03,-1.943134802590225623e-03,2.904301030893454219e-03,4.447514478758515105e-03,2.235266045211823076e-03,-9.082611003341100817e-03,1.077566509661291236e-02,1.894058926712539068e-03,1.541072850200903054e-03,1.682655797503117279e-02,-1.822843333807922048e-03,-3.046279143905741317e-03,-4.100860454464907122e-03,8.853850168111956562e-03,2.692983653952712650e-03,-7.193147002035160664e-03,-2.023280782692410580e-03,-6.798243442084882411e-03,-2.986379146327889744e-04,-5.994368860223348201e-03,8.761083087408314332e-03,3.370483371724176253e-03,3.315551818791861484e-03,5.666393889930508039e-05,6.102711415871668946e-03,5.964815822638026323e-03,-1.231544015701453807e-03,-2.861106892247609868e-04,3.399517505433273697e-03,-3.650121033765248938e-03,-3.967536809864387164e-03,-9.978431990699107232e-03,-3.453730475297248081e-03,-1.265706021015277965e-03,-5.187697582109669289e-03,-3.261081608007230376e-03,3.992302585491620988e-03,4.689299596697185657e-03,1.753235493337281306e-02,3.889366252062665015e-03,1.113653895684061680e-02,2.927829242250134731e-03,4.405693039562222010e-03,-7.825527905424954286e-03,-6.018667782846270128e-04,5.963805817268387488e-03,9.780255408623803048e-03,-1.895840311870713698e-03,5.493537078803126863e-05,-4.309184618308211680e-03,-5.796076331433832468e-03,-4.921038493607873517e-04,5.738677117809021407e-03,-4.349565576548003465e-03,-1.923064060330263508e-03,1.224474570413341913e-03,-4.883720070046214303e-04,-3.710465335836100713e-03,-2.169873112689212676e-03,1.044463479530891156e-03,-6.384502034698393697e-03,-2.016487509178373910e-03,-6.989784410806144065e-03,-2.206509791238142441e-03,2.839162692365474524e-03,-6.014547263518335053e-03,-1.147675551767589341e-02,-6.316871318977842781e-04,5.698065280745471563e-03,4.191341435700543798e-04,-6.848234261470773554e-04,-5.789245662705348218e-03,7.326948023026388750e-03,5.081593649526125293e-03,-2.295378274044144410e-03,2.925855045189249253e-03,1.254095363845245288e-02,-3.923257647713143735e-04,5.402713321706713312e-03,6.491532455091040751e-03,2.199079985737005961e-03,-5.595741015765783206e-03,6.474518343316643630e-03,-1.325565569189216149e-02,2.502200199759742165e-03,-1.257949823804058696e-02,-9.356094592335745686e-04,1.561182758255602989e-03,-4.018587346426877316e-03,-9.503050640741156178e-04,8.672195064023760849e-03,1.658994900449146459e-03,-8.712528904317706421e-03,-3.755748521236046671e-03,2.796420829312693153e-03,-9.055536275699075863e-03,-1.360480708555121543e-04,-4.686703100200394385e-03,-8.209867319326937321e-03,-1.884016061151914988e-03,-9.657477209141278329e-03,-1.902117972086905806e-03,-2.531784690308098908e-03,-1.934267095166634703e-03,-1.904834149693953628e-03,6.380547102011144625e-03,-3.285530057578997824e-04,7.506197903447367308e-03,-8.612437506905329040e-03,-1.189944789240857288e-02,4.188860032641074718e-03,-1.100762478962218766e-02,-2.712265668253100587e-03,1.608410963248775669e-03,-8.714211340674885356e-03,3.142741981839941383e-04,2.160826633804889020e-03,9.069623984094495744e-03,-6.346847194407396510e-03,5.587816272032954112e-03,-8.660682371833846108e-03,3.899266365573141394e-03,-4.848149151371973498e-03,3.668215146607805167e-03,2.542049735646537828e-03,-1.026291702808602036e-03,-3.451640073328285461e-03,-1.169909449763780959e-02,8.785051692532715520e-03,-1.754961978814996481e-03,6.114324684516269162e-03,-7.225696917819763969e-03,4.505208851734827870e-04,5.423607255066975781e-03,-9.409515115862427531e-04,-1.994138829606963755e-03,-2.172328016807012659e-03,3.652928412697797975e-03,-1.283269155951381442e-03,-1.062932270727339965e-02,-5.598695415513341503e-03,4.411615160834293607e-03,3.615135871444648003e-03,5.181763343948165776e-03,1.610043812417959316e-03,-9.091206982124939648e-03,-3.279118886378037511e-03,-5.724597199223681494e-03,-8.984337165021822821e-03,8.452552529199101536e-03,-4.967390368040623498e-03,-1.285056918518125190e-03,-4.796757277012632902e-03,-2.324102620388639965e-03,1.474568527547592085e-03,7.661786386749206302e-04,-6.224343090718294225e-03,5.149924989082794134e-03,1.848165538170541472e-03,4.604784226415954348e-03,-7.218794858757406931e-03,5.992893012370401583e-03,-6.400204561581681814e-03,-2.620590802279806290e-03,-3.618013770274784713e-03,1.405787642438164134e-02,7.837932773291412386e-03,-9.277014832061517002e-03,5.769038681523959433e-03,-4.699666516227155554e-03,-1.469829834475187922e-03,-2.578119551241684054e-03,5.463432641194317530e-04,-4.839136448487771185e-03,-3.269410476214899049e-03,4.796296470381794627e-04,-3.673313276638024653e-03,3.621002350683181180e-03,-6.092649145128616918e-03,1.635237873995636386e-03,-3.638947221340419098e-03,-5.665089882391674374e-04,-8.195017580498878754e-04,-4.472108569107616748e-03,-3.397060757445675983e-03,2.571331463977665367e-03,6.158497364845897173e-03,-1.281579004903041392e-02,-6.790795881538882334e-03,1.690617685145778815e-03,-2.494018350360155140e-03,-1.039497856680034367e-02,5.855851080406170028e-03,1.233742370401011207e-03,1.431147988030297136e-03,3.313849730660981810e-04,6.072518592004098360e-03,2.580882226549025254e-03,-2.758028008082086258e-03,9.184170526551106717e-04,-2.306495528988739517e-03,7.532545252242320646e-03,-4.440218393897059183e-03,2.600280103201254786e-03,-9.475534693973613250e-04,5.799988758506958476e-03,8.895399778098222153e-04,1.843531508132559933e-03,1.088510148130049024e-02,-2.450713396537028670e-03,1.577095862663118603e-02,2.533806544556127397e-04,2.578900552070570538e-03,-3.676144411529445183e-03,1.926055564821043970e-03,-1.210967104506253077e-02,-7.712355310388601705e-03,8.613863427133607378e-03,-2.882032966476958877e-03,1.515997272991218654e-03,-2.590678378350226487e-03,6.995966733472700275e-03,3.679954160839756915e-03,-4.165947319350431222e-03,8.151628165521496855e-03,-3.777610513002830359e-04,-5.616480292686531345e-03,7.777652113591092611e-03,2.042180163600079874e-03,4.553268798009206531e-03,2.972298226635169353e-03,3.540692329677014327e-04,3.431114804812501230e-03,-6.851538550877703162e-04,-3.320008192996774105e-03,1.012309398886685700e-03,3.997650966791074442e-03,4.153986431530670709e-03,-2.956035224861758206e-03,7.150049487901375886e-03,-2.004592323736896151e-03,1.886381550619330204e-03,5.816009000897371176e-03,-4.235249174950798598e-03,6.357154436328661846e-03,1.936017482057817221e-03,-5.141411106763074321e-03,6.340145147377649303e-03,7.938544763958402251e-03,-3.118704428198648768e-03,4.348714411451418127e-03,6.952087731670654643e-03,-3.970927824360754102e-03,-1.201439867378731449e-03,-4.027011318967444950e-03,5.980861570733063751e-03,2.655613686822737816e-03,-3.178039632944148606e-03,-3.795365635476040042e-03,-4.111829418522052089e-03,-3.503199031904675759e-03,1.928739696981658864e-03,1.995607621352465076e-05,3.511780213233313321e-03,1.593328086891972542e-03,6.609981354430757329e-03,2.930565600603861343e-03,-2.255367567471309835e-03,2.086229613647235231e-03,-4.319181183878652601e-04,-9.753925630608475256e-03,1.133070177528790841e-03,4.594755758609301959e-03,5.841182121566470750e-03,-1.700517874320971873e-02,6.550675094543649546e-03,1.049063502426457635e-02,-1.467067226037142250e-04,-1.513585756031759783e-03,1.210166742058514135e-02,4.575270805017024709e-04,-1.947990886462171818e-03,-3.883764606358263132e-03,-1.171593766078624652e-03,-2.358525042169731453e-03,-1.278374819068250597e-03,-1.064037682211986871e-02,9.796545273722149312e-06,1.020782095210480411e-02,1.180457805513105904e-02,-3.761740535333645864e-03,2.729833820804888986e-03,-5.301464580642631721e-04,1.681785157367730703e-03,4.327538121830350193e-03,-6.258868051164040783e-03,-1.136027203361093980e-03,1.637298739648664070e-03,5.194432870932636469e-03,-2.511762354956461642e-03,3.342167378734883787e-03,-5.843405218617311620e-04,1.725669967115620648e-03,1.567916507765737370e-03,1.168184788031474772e-03,-1.400059883813874239e-03,9.338505239448950668e-03,2.971212290452969387e-03,5.743494508786051080e-04,-1.139170922531589292e-04,-5.133602430814240315e-03,-2.289336437257405519e-03,6.871879483388599122e-03,-8.254770345488024290e-04,-5.137483809607068866e-03,8.932888423138596963e-05,-5.469335321085673322e-03,4.832268450193086609e-03,-4.713944761272615119e-04,5.646833464359155702e-03,-1.629282816294559417e-03,1.825404486942769616e-03,2.856368584360910977e-03,-7.571918056184095969e-03,-1.746754919089488212e-03,9.207464977163118056e-04,-4.828230019929567740e-03,-5.016587901607947458e-03,-1.807986283515190199e-03,-5.119716588685512514e-03,2.840176765763891779e-03,8.799472870546843836e-03,5.742640441784880624e-03,1.104422568710243871e-02,7.711860869418107022e-03,-4.788598949572288668e-03,5.483776592135834391e-03,1.161424566268210584e-03,4.095559753211802960e-04,-5.247659183886160643e-03,-3.625042194828530457e-04,6.677458389924030829e-04,3.273355315976410329e-03,3.741767924362423420e-03,3.329223637292015245e-03,-9.964235654870277903e-03,2.668786925355038503e-03,7.543808733931308894e-03,-3.377021908457704044e-03 3.198669253188777140e-03,-6.117938680142832528e-03,7.422323856965715822e-04,-7.272326676247903776e-04,7.243013916757337980e-03,4.764819832067527725e-03,6.453178937623783038e-04,6.313200665707940744e-03,-2.361583751257205748e-03,1.988379171983138422e-03,2.378396235464668178e-05,3.110183295534802665e-03,7.995583433024358241e-03,-1.315623329170281233e-03,-1.267448472758972390e-03,1.618791293778504845e-03,7.793021206896180763e-04,4.762433951524125061e-03,2.612208446369309114e-03,-9.975176547395477999e-04,-7.131220935678520521e-03,-2.709532284761752546e-03,5.337431828199694708e-03,4.267072538526027817e-03,6.521901640453336285e-03,-4.171280853000363352e-03,2.743313256783888252e-03,-6.537493807801164625e-03,2.007534133341839361e-03,-1.733557149625198452e-03,8.306063806839802249e-03,9.165989230128675919e-03,-7.571429243084689710e-03,-6.540283312938548373e-03,3.194528647626831367e-03,3.819864458277147639e-03,1.160331615697050844e-02,3.560995386823183253e-03,-4.460855124378192323e-03,-1.606967643190525957e-03,-1.329160142447280597e-02,6.674449677818061928e-03,-5.427334844205210533e-04,-5.372077217328670093e-03,4.119426121691384217e-03,6.307972538987254366e-03,-1.504527169427683658e-04,2.069969108663951597e-03,-2.463675345672400666e-03,8.920212835486611164e-04,-1.470184410519794838e-03,-7.009204342345587424e-04,4.405206784049426998e-03,-1.979253464454110850e-03,-2.728798346586000078e-03,1.329124385557129513e-03,-6.484953858890957603e-04,6.399670080889897743e-03,-2.804789256237725289e-03,-6.374217729715562011e-03,5.057992141834597756e-03,1.725246271667675276e-03,3.692364797656188559e-03,1.249112822395520572e-02,6.250420442569471977e-03,-9.180664959695952754e-03,-3.867165513673761178e-03,6.982427107818393354e-03,3.227717976051148931e-03,-5.559539404534993672e-03,-4.688462250018908366e-03,1.161081484625572403e-02,1.435487734499583381e-03,7.878079765484293645e-03,1.510753737701936959e-03,-5.119540793471756730e-03,-2.561596728072272795e-03,-3.738795603311708370e-03,-4.376631604710541808e-03,4.126428789118623437e-03,-1.362753691272677870e-02,9.208386643242370004e-04,-3.538553812992112271e-03,1.491591248988655279e-04,-5.245682439161291309e-03,-4.950795449585787024e-03,7.272484702636662461e-03,-3.304101557232584554e-03,-4.981860139095231274e-03,-1.440517562194952800e-03,-4.980198885957400241e-03,4.884515105614962820e-03,-6.482893020606584136e-04,-1.480017425090200700e-04,2.274405461003433087e-03,8.516732443313308754e-03,3.545273017030083334e-03,-1.047207809140678937e-02,5.505883847835099480e-03,1.855062680019154960e-04,-5.408919462034172954e-03,2.660292467104798282e-03,-1.250633658296139623e-04,7.118532710023506467e-03,-2.454213929182191486e-03,1.598849843219344018e-03,-1.045827102245868714e-03,-1.442698258145166121e-03,3.014646453918453338e-03,3.986118536493254635e-03,-1.494021116632001907e-03,9.748518813365853256e-03,-3.601999635916018128e-04,3.233260141019222295e-03,3.651030482858713070e-03,-5.173601641054779467e-03,5.697629241775997722e-03,7.105130185431844658e-03,5.649535180559545414e-03,9.267015322388243463e-04,-5.452764949977666545e-03,-2.220651804594370865e-03,1.263282605687033417e-03,2.023616603124195768e-03,1.685840912682341243e-03,-8.284547741181529090e-03,-4.682944844083044970e-04,-2.493393108001415535e-03,-4.288095154419049176e-03,4.898521462412644065e-03,-1.015581705197201798e-02,1.671510013135919619e-03,5.026013891892167101e-05,3.950487740333878325e-03,7.926365792443509570e-03,3.130626018594508814e-03,-3.228823770625335269e-03,-3.238465267171924608e-03,7.680461127560022196e-04,4.132280594293337092e-03,-7.269755618213150951e-03,8.010207823157506441e-04,2.473503443859878699e-03,-1.182866996851711502e-03,6.829562595029450150e-03,1.035049345118043643e-03,-2.742595407491733080e-03,6.441649608214428287e-03,-5.975080093970053761e-03,2.221561568017294593e-03,1.210942990703944155e-04,7.991784152379625018e-04,5.664208945942585116e-03,2.094860633084215731e-03,2.231836376840192794e-03,-3.068156554359769295e-03,-2.051964640348763794e-04,-2.713355925889254349e-03,2.066352561914389617e-03,-6.063545717935492352e-03,3.028619855334041197e-03,-1.032261092699403521e-03,-1.556824103972092221e-04,8.577312550504484143e-03,-3.813714883487061454e-03,5.945444652774931581e-03,1.285555033792716313e-03,6.202823358707041171e-03,3.481157717396378762e-03,-3.799526037582569120e-03,9.543266831444158216e-03,-6.553134582718046514e-04,-5.362615545997356965e-03,-2.028835464314543077e-03,4.306736325989393710e-03,1.051990480736131155e-02,-4.336571168064149405e-03,2.715478780391393553e-04,-3.303429175618082664e-03,3.436058611302725318e-04,3.733675309198387048e-03,1.245072203411384370e-03,4.835711192893459368e-03,1.308731895361877565e-03,3.816814082675894205e-04,2.217308622159929344e-03,4.996153439529139352e-03,9.923159491012890042e-04,-1.574368354038199833e-03,-1.170484984725635863e-02,4.635056866057939151e-03,-2.655928831541622993e-03,1.393850395346391121e-03,2.291694950307475442e-03,-7.691825537359371634e-03,2.120383532393790502e-03,-2.821550698875247530e-03,2.283224056337538298e-03,-2.240598081281986245e-03,-4.030717050092374326e-03,4.117153713985324121e-03,-2.276743419867126056e-04,-9.265629480953822827e-03,-1.683886709241640134e-03,5.134908598655936116e-03,-4.123650134709400290e-03,-4.316727600330146943e-03,1.293761715161868787e-03,2.565211501015678456e-04,6.235531756539544330e-05,-3.314666839671733152e-03,-1.841952581978694185e-03,2.447957900187934781e-03,-5.555351962172096376e-03,-5.454215157955026767e-03,-2.551875150289731504e-03,1.484895312168808346e-03,9.765607002245714832e-03,6.765831210745253642e-03,1.919035041007631460e-03,5.969692182108537852e-03,-4.222988355126632530e-03,-2.800226919820106969e-04,3.585661197171841324e-03,2.471009696336092334e-04,-1.060060531590217413e-03,7.044907300449257696e-04,-2.168714161869902870e-03,-4.996220839745723480e-03,-8.434863909124564602e-03,-4.320040745610059080e-04,-4.804974184624141546e-03,8.310667889445440141e-04,-1.444459713789271980e-03,6.201818101221964405e-03,-3.161181222295258476e-03,7.732998030626269358e-03,7.326499229313635994e-03,2.504751833689191839e-03,3.033170601452765051e-04,-2.560322262694347470e-03,-4.578035861006764413e-03,-1.533045201317424560e-03,-1.648999123581411656e-04,-9.020939169460220550e-03,-2.965361128433176979e-03,-4.613418119441049271e-03,5.655891742990602739e-05,1.045512324260268336e-03,-2.793904779012654997e-03,1.325764208344874231e-03,-3.347846308920649720e-03,4.498567753733022910e-04,5.327928538365222198e-03,7.311410938526437947e-04,-1.920588514460795778e-03,-2.257224244107092995e-03,-4.872602448834807178e-03,-2.442385905576232967e-03,-2.992219495996316890e-03,-3.271090515893318017e-03,-2.578696969954251959e-05,-1.143106433683483032e-02,-4.021032630274311162e-04,1.824966008834429197e-03,-5.813087499195465982e-03,3.211754039183931098e-03,-1.004468302890574715e-03,3.453868863543465596e-03,-1.083784225615361425e-02,8.557365145117601032e-03,-1.034016285231000509e-03,3.149437991707573314e-04,4.203150972481940794e-04,-5.333107405327938005e-03,-2.564336355369820335e-03,-1.712504206888120460e-03,1.402458469296092615e-02,-1.249958707020617198e-03,7.149919737365089366e-03,6.394078152714717696e-03,5.623963043054074247e-03,-3.790427071410564820e-03,6.283395314212049387e-03,7.793908355638309166e-03,2.293129382865883616e-03,-5.631267861867577003e-03,-1.002368790376004617e-04,-2.642264426718788847e-03,4.140592482407040218e-04,1.942184840578055740e-03,1.862162806991018386e-03,-1.831081779393644920e-03,-3.856023883206221789e-03,7.350418074190069508e-04,-1.733506504180073668e-03,4.913630918729795793e-03,-1.032893522275996057e-04,-2.856880760458858221e-03,-1.551951649653494393e-04,5.093281315920273299e-03,-3.796377432277728455e-03,-6.961249876904595027e-03,-5.334193568986733691e-03,3.512767891531354790e-03,3.142543524909121623e-03,5.728872292295968983e-03,3.376137873457054055e-03,-6.973675584764467894e-04,1.217230863348204504e-04,4.483119285882276872e-03,7.671262893521137161e-03,-5.142763116321415673e-04,-4.722714246697775307e-03,-1.654669389527519720e-03,-1.846373756925920708e-03,5.208946207404687931e-03,4.196863298446283966e-04,-2.809818584504727122e-03,2.605804966283029479e-03,-2.616198602392236922e-03,-1.491782511423720087e-03,1.570862648290065637e-03,1.366139803384860095e-03,-4.544744074477786652e-03,6.605994234050014194e-03,-2.157779130876536465e-03,-2.747947760403674205e-03,-3.494297925142508983e-03,3.981564048560437725e-03,8.599240165108419306e-03,8.855726738359726404e-04,1.688785587018685336e-03,1.462707455145703618e-03,-4.142187877938952739e-03,-1.596418012245401146e-03,-1.919553110658801191e-03,2.908099220218555558e-03,-6.411771470215531579e-03,1.106178504516876909e-02,-3.631172113837731127e-03,1.137947685858138342e-03,6.388175500583228791e-03,-2.780850733962916127e-03,6.190168389924673078e-04,9.873815412528921809e-03,7.892090493311203561e-03,2.885563643055305029e-03,9.934373025054200283e-05,9.723516781526796160e-03,1.037183527819338959e-02,-6.967784645809283958e-03,-1.645366933233612182e-04,-2.433300852190848436e-04,-2.616465764313810269e-03,-7.202751664418500545e-03,-9.572078982408665192e-04,3.474899752016803473e-03,-1.078319314737364535e-03,-5.921967717976059885e-04,1.026593298698853275e-02,5.050820654848239138e-03,-2.697249309468989151e-03,2.874601675537206535e-03,7.001367382522738839e-03,-4.054606458215497325e-03,-3.638753429197304212e-03,-8.142531837334229783e-05,-1.502940099649904306e-02,-5.081986568875061783e-03,2.093858660697516319e-03,9.967920697638394875e-04,-9.883795424125471196e-04,-7.482318923710026826e-03,-2.931008311711817086e-03,-5.265269772691736488e-03,-2.827458382347749523e-03,-6.366894556204263748e-03,1.982657214287815873e-03,6.414071559469323655e-03,3.481187425474758894e-03,6.864199323983924406e-03,1.203837469692589212e-03,1.538174245810680196e-03,-3.233785519756591370e-03,-2.038755404303886545e-03,-2.574369489875506442e-03,-7.699874462401460435e-04,1.119457935975900435e-03,3.514784388728996005e-03,2.305709212782162953e-03,7.157591169816006349e-03,4.042751860188418568e-03,-3.766275422925066847e-03,-5.116012086440422171e-03,8.203491302152607226e-04,4.196481486350655231e-03,5.491586515868326045e-03,2.252898326604555607e-03,1.351536072495823767e-03 2.606616156135837950e-04,-5.670150063994428349e-03,-4.156140920612345913e-03,4.360751905127337505e-04,3.544465324647648723e-03,1.006536854684122670e-03,-2.430471784791352150e-03,4.823739619457317537e-03,-8.871840881335691905e-04,-8.490054450632882055e-03,-2.262762566098996703e-04,-1.024136420366782759e-03,1.494914829613094333e-03,7.577579820088485487e-04,-4.125415446778330809e-03,-4.356350850497838108e-04,-9.287188179764888779e-03,7.669362130780197316e-03,-6.402505451500136932e-03,-7.696788976841773032e-04,-4.666410506842725084e-03,-6.045570812175095300e-04,6.262321057198791223e-03,-1.517777959307668777e-03,1.606374554008692043e-03,3.256757206513067378e-03,8.552156344991696663e-03,-4.406734154958062699e-03,7.108133648285227570e-03,-2.375997870209272615e-03,-3.565052993998430418e-03,-6.612394529008196360e-03,-6.787458247253440498e-03,-7.050092025668015447e-03,5.140487457633189164e-03,6.505182145181759822e-03,2.457373907636496886e-03,-6.328424030641369678e-03,6.389428906832483835e-04,-3.726959667356639309e-03,1.695699950238900924e-02,7.215024377638413165e-03,7.466536930545768225e-03,-3.677832920137872382e-03,5.893718995702887375e-03,1.113789834980461607e-02,3.607643628144797249e-03,2.708346966733354757e-03,4.065910644717013120e-03,-1.848291852765423333e-03,-4.717078893865944028e-04,8.324166006965155046e-03,-3.882606865946160407e-04,-3.186238933464721843e-03,-1.792191105388427639e-03,-5.623102180481023621e-04,-6.527484389023768992e-03,8.965998424302313100e-03,1.654151825966778885e-03,-4.449411651807929149e-03,-1.048416780960663777e-02,-7.607551306975711067e-03,-2.688476581800059925e-03,-5.601276492503493368e-03,4.839574397223570142e-03,-4.548049467300911697e-03,3.084725322889547098e-03,1.138094140112531384e-02,-4.156195403981570967e-03,-4.076886719380412930e-03,1.855830401214745174e-04,-6.762525870173507168e-03,-1.341929748836505126e-03,-5.679079764441288120e-03,1.510502029560715793e-03,6.317333389895548636e-03,3.421478375956070895e-03,-3.401460924012736798e-03,-6.171360984456775080e-04,-5.267576040498314280e-03,4.553987818589346273e-03,-1.280155890669974414e-02,-9.201131376916081225e-03,-2.556930552418667027e-03,8.601899605027767126e-03,-7.855973758370398199e-03,2.404065641352955054e-03,-6.238586081824739650e-03,-1.721964735533334633e-03,-8.705151557399552027e-04,1.650949721054922447e-03,-3.875988368769505733e-03,3.095881337171993322e-04,-5.321543423502939942e-03,8.156049848630480409e-04,-4.746611782988344706e-03,-1.692994187090829340e-03,-1.135607392305608074e-02,-3.747358682616299102e-03,-2.902222517240617833e-03,-1.182021425697683988e-02,6.387074894494715674e-03,9.900004102978999252e-05,9.541687167276829626e-03,-5.477007994203801633e-03,5.100226485708157093e-03,-1.636014617351933334e-03,-4.209745733737033785e-03,3.959861648303473688e-03,-7.747873688052360486e-03,3.238502578769199107e-03,-1.972254136741452904e-03,-8.056598385252080122e-03,-4.980192508260310180e-03,-4.192580840974250746e-03,4.683501599916482447e-03,2.798651958680255930e-04,-5.335884688230778730e-03,-2.866093668800122601e-03,-3.081330272449136348e-03,8.685592448903602450e-03,-1.457162012181550167e-03,3.532021084932520857e-03,-5.654721086095313310e-04,1.219036773911506442e-03,1.198323161775272485e-03,1.245772356190467337e-03,6.514176554327423367e-03,-3.372723279199615030e-03,8.024364352119138294e-04,1.087272695692080571e-02,-3.723650136124265512e-04,-3.601496611166552734e-03,-2.115991086509590949e-03,4.615221443578078862e-03,6.263044463661465722e-03,1.050864222693473999e-03,7.849755575063120314e-03,1.884252389674724885e-03,3.402951723527017358e-03,-2.920890078353761618e-03,2.642679600523568147e-03,-8.064556201938938296e-03,-2.176403062969662619e-04,-1.471448358341670990e-03,4.652359609625970373e-03,-2.407947447178002652e-03,-9.596924270984615772e-04,2.438406282467515257e-03,7.946430038174533833e-03,8.597729567926610389e-03,-2.212731995600101826e-03,6.533284552191813421e-03,7.324857122484381734e-03,-1.456914572406077741e-03,-2.544382793902786807e-03,7.182186911480195265e-04,-8.058920308626120119e-03,-3.491706053457874648e-03,-1.496159922888639585e-03,-3.524891568487942967e-03,7.704760946881709059e-03,-5.055518224191379167e-03,-4.545538066439920089e-04,5.685920545061270114e-03,9.565726619971948869e-03,3.741966373470727216e-03,3.140709831548565024e-03,4.174032945502581006e-04,-2.216623010523212541e-03,-5.641308771219210282e-03,1.729756120116942913e-03,-7.560580067175102191e-04,-1.669082990446043441e-03,6.270457174824391189e-03,-4.307826099552770926e-04,-5.161524749290580770e-03,-1.692053007887569724e-03,-2.496248415698971296e-03,-1.134660365270675721e-04,-3.037442343255107504e-03,2.508052836983836603e-03,-4.884342311041360921e-03,-2.455321270109597342e-03,-9.703765274890117226e-03,1.613648612144689184e-03,-2.468334840204837975e-03,6.454489835510696973e-03,-2.751540786425553603e-04,-6.886448267105095729e-03,-3.728274838229518953e-03,3.021565430704119433e-03,2.050713157573625604e-03,1.087094692055678860e-03,8.388752441266909485e-03,3.908305794634510041e-03,2.946499219276081660e-03,9.021350133650971395e-03,-3.732750907577513533e-03,-3.931199606328140278e-03,2.897858143044813525e-03,-4.807546304134380222e-03,-5.419300169433059755e-03,-1.283893548692565485e-03,2.424660752728167046e-03,-1.125364507096027287e-03,-9.959382185027764508e-04,-3.051378443155771183e-03,1.214294868436205637e-03,-4.220363019540518922e-03,-1.008052747837716832e-02,-4.453656002750126146e-04,-2.601950429146894679e-03,2.478591853714699235e-03,-1.829235793478649124e-03,1.033943143779297857e-02,-6.080945503013273322e-03,2.997362421603531109e-03,5.221959463423231189e-03,1.102619336657050106e-03,-2.983517357335650311e-04,1.763784371731627913e-03,-1.859745427762530797e-03,-1.574317651548864114e-03,3.000042112963606858e-03,1.138918928697796014e-04,-5.193491739647106316e-03,1.993184398825511520e-03,-1.651298067473071013e-03,4.043910375636506661e-03,-1.505603316382793985e-03,-2.151873979256409795e-03,6.033779234101547574e-03,1.619406084814702671e-03,-8.222892823797012252e-03,7.482574754066522414e-03,-3.076893679424939736e-03,-3.170858730548809738e-03,4.732040835061398359e-03,-5.755200260704930980e-03,4.264210714111329147e-03,-2.329968020125901729e-04,4.971946514824533356e-03,2.766050523418093088e-03,1.211476490314785043e-03,-1.395551216447283790e-03,-2.446377618211109922e-03,-2.742852567747126272e-03,3.924008049704145844e-03,6.663407528978457922e-03,-6.391942552805596721e-03,-5.375440223683495763e-03,6.801189842457496361e-04,-1.379316013347379414e-03,5.130013827082278641e-03,1.457340454162568666e-04,-5.960855346298099718e-03,-4.080789172950517220e-03,3.312356310685215209e-03,-2.869996215416733155e-03,2.492924664604360030e-04,4.419168401341878362e-03,-9.019981620055271499e-04,4.532910143111214957e-03,-2.091078264523065031e-03,3.873276642344879693e-03,8.042227302019450781e-04,-7.239531802885063559e-03,-3.203821523179955014e-03,-1.061636842860729478e-03,5.978971841204636789e-03,-3.199718843689392488e-03,4.638202944251092370e-03,-1.325370566439819072e-03,-4.865748391404808339e-03,2.763785028040382022e-03,5.010631042427206479e-03,1.574693649036464386e-03,-4.781538079427103891e-03,4.734471187054438424e-03,-1.604996489669616134e-04,-2.632603890901212464e-03,-3.482196628900924729e-03,-5.997029072929483708e-03,6.548345847318823298e-03,6.197771691552749233e-04,3.866203356453175918e-04,-2.097279778232317635e-03,1.619399173875751786e-03,-6.244993650784284867e-03,2.673376375412693431e-03,5.178313524020248561e-03,-6.978117149775610144e-03,-2.856448952754225487e-03,-4.678306918560217603e-03,3.619555470957155186e-05,3.384981629750841766e-03,3.967387424261230484e-03,-4.487606221281418423e-03,1.976252831350710452e-03,-1.966592056252124566e-03,-7.201362620576200885e-03,-1.697667132860037134e-03,-5.593229798767905894e-03,-4.616348970238501673e-03,8.552200877420420427e-03,-4.648939517035652509e-03,-6.474803517059020603e-05,6.496076247005828919e-03,3.736558129182524120e-03,-1.109498224714604341e-03,5.422620451260960414e-03,-6.611298690453101638e-03,2.631561173233552212e-03,-9.250314058168732567e-04,5.944037276654318483e-03,5.192649439475234552e-03,3.658593637446799687e-03,-6.035591830864321227e-04,-5.573960548300230098e-05,-1.083893184026479134e-02,7.451427206705332588e-03,-5.693492203683662685e-04,1.286769464742297270e-03,3.645068259307561107e-03,7.104764707294253072e-04,3.313793170419523319e-03,5.055798110487691281e-03,-3.471295001556916164e-03,-7.966168754179599881e-03,-1.233101701846483177e-02,6.858341107083172628e-04,1.736251902690248792e-03,5.546566523329496227e-03,1.146131164367318253e-03,7.749044702347518757e-03,1.979376619220681813e-03,4.134961779463803763e-03,1.177529766054565778e-03,-1.052142319167211158e-03,-6.986715710544633660e-04,3.625265517215504711e-03,-1.180367328993843327e-03,-1.423111087386653159e-03,6.847401207306494158e-04,-7.339946542051598022e-03,3.548672419851785394e-03,-4.399262452555806335e-03,-1.883500585642874665e-03,3.562143081129959341e-03,-7.032597439572503203e-03,1.307159474925001464e-03,1.731478774980835201e-03,-6.886819838031217741e-04,4.000241155958142425e-04,-2.951462673312193509e-03,-7.464032190026537693e-03,-4.521802987892351369e-03,2.153142683091497679e-03,-1.148123331435654468e-02,-4.328183143849881863e-03,-9.861234999227132432e-04,-4.905882931900473753e-03,8.720959157170775009e-03,-1.402418926299593997e-03,-2.171126723300584576e-03,3.618437087801767114e-03,2.233627559021454743e-03,-9.127228836073751281e-03,1.771443947897031416e-03,8.806810400859843568e-03,-3.597773206090208557e-03,3.603757703260000007e-04,4.313602235390318930e-03,5.949956009087585020e-03,7.182874874038043013e-03,-3.383893215451788151e-03,6.959710373643318571e-03,-1.613875570555263050e-03,1.175641315724315107e-02,2.706418431449663254e-03,-5.875241634630799965e-03,-3.384784135362077940e-03,6.035417176513559580e-04,-9.503697426803828577e-03,-1.171375383993933329e-03,-3.632613871634474915e-03,1.234865111583189875e-03,2.677214514262392028e-03,3.601157473213023250e-03,1.977887652500792768e-03,4.329336722262717985e-03,-5.087422234670766624e-03,-2.310256716269327819e-03,6.607543087059008016e-03,5.498500773390277083e-03,-2.262613571011868989e-03,2.156039615576093790e-03,-2.302482128534113701e-03,6.339662236730052476e-03 -1.397506288746484804e-02,1.121586341935341090e-03,-2.714762611025610655e-03,7.235844829613574843e-03,-4.875902602279506316e-03,1.305722454015472182e-03,2.913230665895855543e-03,-3.425622629171097729e-03,-3.902277083983999011e-03,6.625739338683599676e-03,2.003595053803410020e-03,-3.048605424760882088e-03,9.156286711255481187e-03,-7.401433314126682796e-03,6.662220884747328299e-03,-5.190822027319175018e-03,3.622141464011411488e-03,3.597805049472052250e-03,-5.135476662089517690e-03,-3.290110804518566953e-03,-2.738011664762707818e-03,-2.357248633486605776e-03,-2.334071301614687715e-04,-9.205850197587198570e-04,-9.047602863156396042e-04,-4.234359932903492235e-04,5.913188723180707804e-03,-4.056951763397444698e-03,5.946917803738563033e-03,-1.109739693214592017e-02,4.493658416603814364e-04,-4.108895075542381807e-03,9.085585256615322930e-03,-4.633034984361249914e-03,-5.097718222232282270e-03,1.225262540299252093e-02,-1.002421607514187955e-02,-3.311180692152366177e-03,-6.616300948926667584e-03,-6.689341832697232254e-04,-5.528343244857612283e-03,3.016221203168445095e-03,4.804690680608925715e-05,-7.811050659648202318e-03,1.240251583137373399e-03,1.211420655136372830e-03,-8.018159529969631077e-03,1.521322102977619786e-02,5.110019970289884396e-03,-6.910718752553782359e-05,2.531226700255918491e-03,7.093747266558261431e-04,7.150347806284367003e-03,-8.544146933257121181e-04,-3.682346881323512837e-03,-1.851532688768843093e-03,-5.212044967529414971e-03,-6.459020599429487162e-03,2.139214225803705988e-03,-7.810831845509444427e-03,-5.045954147772908557e-03,1.607349692226368236e-04,-5.359025356361508778e-04,1.101100540357835382e-02,6.831800096167555542e-03,3.657611949400197847e-04,1.448290966843010247e-03,3.857630611102493608e-04,3.964486846458179295e-03,-3.617379149194204727e-03,-9.185201606074730810e-03,4.001884542591625099e-03,-1.267986236453568991e-03,-5.216864450663956550e-03,2.040182345972777530e-03,1.053631468068487150e-03,-6.710033337975485936e-03,4.232886733424893674e-03,-1.979420677811253703e-03,-3.484218043225924440e-03,-6.125604643095103850e-03,-3.357223947750016813e-03,-3.492517826838231233e-03,-4.969713216631872765e-03,6.465900643152424666e-03,-3.143945976934665095e-03,-3.152770552668096127e-04,-9.129004749790017859e-04,2.521045081045398376e-05,4.411983642254099910e-04,7.335425677084567685e-03,6.201040197842177214e-03,-4.859963677730842172e-04,3.887184021228751955e-03,-2.496578954008906652e-04,-4.975056282491158340e-03,4.889053517708343184e-03,-3.518760469460220815e-03,4.157452558843996102e-03,-6.851595117555106529e-03,4.837226679951513522e-03,1.360668177015858776e-03,2.539845187109097090e-03,4.782822951755020223e-03,1.021957907457223501e-03,-1.491473654837068572e-03,4.124567713478507168e-03,-1.703220359360958606e-03,-5.520809159131110852e-03,4.227690783045472307e-03,-4.817844774490539356e-03,4.482418954849634422e-03,1.623111457391297578e-03,-8.200681684962556756e-03,1.184336888987950771e-03,-1.680897772735152115e-03,-1.002877431715410708e-03,1.930870783852550255e-03,-1.106779585734579121e-03,2.739003642193868666e-03,-3.102681566651457456e-04,5.164884852413361045e-03,-1.099901172087516289e-02,-4.129965619750487239e-03,-4.872702568654452976e-03,5.821035326121991836e-03,-3.120641913838724697e-03,-3.114451657802234626e-04,1.633253148613391650e-03,1.737224777214869606e-03,2.697677866124241659e-03,-8.814084787484511435e-03,7.227558446146650469e-04,-5.056650117849359818e-03,9.544357528860410167e-03,2.081288124234437990e-03,-4.231519378082107714e-03,8.112999343481110820e-03,-7.308074975697690517e-03,-5.376499576043989280e-03,-5.912636689431176838e-03,5.206244781583299012e-04,1.281271820699485080e-02,-2.710352846656625091e-03,-5.492875506861006653e-03,-7.997241671748698752e-03,-1.276079662690795310e-03,1.841536273859149505e-03,-1.301708464186319969e-03,-1.440448893452827553e-03,5.914436040423166029e-03,7.100985786113724009e-04,3.848384854844601730e-05,-3.720199001551331534e-03,-2.397758085365791093e-03,-7.385888331756240710e-03,-1.514494637224871954e-03,-1.999598334280177758e-03,1.167336939857971986e-03,-1.930197191050199781e-03,5.416887844428759126e-04,-5.345535811032504393e-03,6.916043361693989186e-04,1.491337854747346600e-03,-1.201124519528605305e-04,2.108075228315724452e-03,1.707136778159022705e-02,-8.440200960640682802e-04,-1.204295261624687211e-02,-2.098524335767573083e-03,-1.025174950897071377e-02,-1.120576497150030425e-03,-2.908671682802141042e-03,-3.171945732750294702e-03,2.554984222237013675e-04,2.519213913301530505e-03,-1.027160968352832000e-03,5.813106057664878257e-03,-4.049034255267801655e-03,-5.118337981710350745e-03,6.407836347496784907e-03,1.143765817309048711e-03,-3.876128650654812532e-03,-2.516631065028172159e-03,3.734145792191027099e-03,-1.361753963961405238e-02,-6.892372903808129808e-03,-1.571305872596919905e-03,-3.452892673676149961e-03,-5.969527435173867073e-03,-1.678940957915558800e-03,-1.071259420157279381e-03,5.671035160278942514e-03,-9.748429451294478645e-04,4.549135940613267988e-03,3.475035270351967392e-03,-9.560542438546970872e-04,2.326338728532289894e-03,-5.502366571433306777e-03,1.167512173697324426e-03,2.736096762018926619e-03,-1.431051202718429524e-03,-7.092678238367930259e-04,-6.035353566572337902e-03,2.285258155687638210e-03,-2.206755797635100245e-04,6.936207751774703910e-03,6.761887751649871964e-03,-5.036132950392036359e-03,3.900696738474788636e-03,-1.306686950097939517e-02,-6.305339388432775610e-03,5.690712721299104977e-04,1.366595788410193647e-03,-1.373140976901101370e-03,4.256168994301747876e-04,-2.213251171633777784e-03,-4.262953698899542287e-03,-1.738948520475886491e-03,-3.114781948869510802e-03,-9.311169912815060734e-03,-1.878444537798002042e-03,3.954704066440184809e-03,-1.435868406985789988e-03,-6.105825160063245265e-03,1.493080486736786817e-04,1.887768263720407663e-03,2.722459818289434900e-03,8.893687536857833309e-03,1.680842018569650475e-04,5.077853243703997314e-03,8.842806068487836860e-03,-2.186546345663305420e-03,-6.874028521849087289e-03,8.703895334352423907e-03,3.286725493585206017e-06,-4.054271811867790794e-03,3.817749743427707905e-03,-8.776027654069352112e-04,6.563142481299887206e-03,-2.024881204882573657e-03,7.628550824658805870e-04,3.811223397677697064e-04,3.965890081086569444e-03,-5.655933559493484235e-04,3.373567108108625409e-03,-7.382249150763387467e-03,8.773114707525673725e-03,7.147151122880751606e-03,5.836691295858644629e-03,5.290740510578082791e-03,5.411228147215900626e-03,-7.665348896562997234e-03,-2.439337110610028704e-03,3.433370912624130605e-03,2.736000763618610769e-03,4.835118632571791183e-03,-9.388678283623435575e-05,-7.796329191549477081e-03,-6.719034694842746305e-03,-7.126196297501937588e-03,2.021946485238993677e-03,4.540595091986510123e-03,4.438609904990701779e-03,2.586861262874786804e-03,1.035021949305825039e-03,-5.616349613911726657e-03,2.178595927750051839e-06,1.988227107840902241e-04,3.231022302543441749e-03,-7.569116645767168770e-03,7.805674416110346993e-03,-1.202585220266302532e-02,8.618553942437742581e-04,-5.064106691329148084e-03,-1.856398467515835279e-03,5.910562313597425219e-03,-5.718753034260780008e-03,-6.262475109618851309e-03,7.970724214117099263e-03,1.201816735379569734e-02,1.218462567129395113e-03,-2.279214687024527439e-03,-3.198072185776635983e-03,-7.837344271029807533e-03,4.225763863109594865e-04,-1.508213410209372548e-03,1.704932040906435072e-03,5.228148119900515034e-03,-4.976265010347054492e-03,-4.464773699969137345e-03,-2.365795948787234506e-03,-8.754071257279769092e-03,-4.482560183960290594e-03,-1.003510125004019714e-03,-4.135442805290086808e-03,6.578353451650345053e-03,-1.201073922185880855e-02,1.614642050237156999e-03,-5.819349489494052632e-03,-1.390137399224405110e-03,1.293381550636391866e-03,-1.144953521502900182e-02,4.314303574152160946e-03,3.604322242712614411e-03,6.524417861655339330e-03,4.543557400540786453e-03,-5.002076103738825087e-03,2.429936054666220063e-03,-5.583990416749540740e-03,-4.955706328412567800e-03,2.666301023508898648e-03,-8.459046517580613364e-03,-2.357841599529710659e-04,-6.090179546257942127e-04,1.044684375483867673e-03,-5.674249487231199096e-03,-4.886835736319193949e-03,3.202646606987332323e-03,-4.695947305477185876e-03,3.277168427702864453e-03,1.042409699961535825e-02,-7.409359629485844437e-03,4.314715768720171719e-04,-6.312910596960769581e-03,-1.178641347689893932e-04,-4.964773732669726554e-03,-4.271810343591088757e-03,1.040148394766134783e-03,-1.033715025138285271e-03,2.765392221460323367e-03,-1.209538887696067063e-03,-8.060734505105674255e-03,-4.519433873062179627e-03,-5.199215996517866575e-03,-5.096017734910655329e-03,9.423331876980746771e-03,6.236499803057914820e-03,-2.876513960938480077e-03,-2.346231393171903292e-03,2.567733318573011281e-03,-1.466299233624050995e-03,-3.814348087446219494e-03,3.786192759638272714e-03,5.457469491095530943e-03,-1.684678769942042631e-03,3.265164070392098921e-03,2.456332694058542716e-03,3.968209393761981674e-03,-4.033573453886508606e-03,-1.841972391772256877e-03,-4.481790773378617476e-03,-2.157890291300504533e-03,-8.823676130414950455e-03,-1.746600468905236750e-03,-4.037925528682101070e-03,-2.308366717144432877e-03,-3.657284798708197900e-03,5.041987097662028149e-03,5.193254982407693836e-03,1.354385310230795531e-02,-2.287513521107299372e-03,-2.802884135466830306e-03,7.881567125756281170e-03,-5.072281879522778925e-03,1.831035713817990782e-03,-3.214430880699937782e-03,-1.045648883152686765e-03,-3.603834206398267605e-03,-1.336382119071594619e-02,1.184625382296548282e-03,1.027024737364606302e-03,-3.792966305061856898e-03,4.975715184494546966e-05,1.255140167948696475e-03,2.929563241019927586e-03,9.962352412246869727e-04,-1.956942884995448596e-03,4.131667168260503079e-03,-2.628914499381216924e-03,2.655980157716957175e-03,3.381457726563679919e-03,-4.368789543910008705e-03,1.020170147304787223e-03,-3.937075016797325633e-03,-2.176765847126330665e-03,-4.472828732824621172e-03,1.109106229553530158e-02,1.773326868854446054e-03,-5.269030742911169282e-03,-7.896587972183557666e-03,1.889142037927660971e-04,-8.363680238994286240e-03,5.458304772193329872e-03,-1.528861130756821451e-03,4.349240150218553944e-04,-8.136746323373066428e-03,2.447458290531051486e-03,-5.315803969338138424e-03,3.265176373989548374e-03 8.015576287227193369e-03,8.157898894984869434e-03,3.672269487809740659e-03,-4.365603694942078727e-03,7.102581373503022953e-03,1.652377856471363244e-03,-1.829921278718372091e-04,-4.131074906565372238e-04,-2.072635573715466816e-03,1.994345843334564307e-03,5.270451650713619922e-03,4.221773914601311198e-03,1.038579969459055275e-04,1.166862116535453660e-03,-4.006841737226998768e-03,5.554000742006241081e-03,1.532199413011603147e-03,8.029203140945890757e-03,-4.600537463784839555e-03,2.227358649125204138e-03,-5.979872377579005180e-03,-5.908064267703817644e-03,-1.032388758677106610e-02,-7.416506842429622598e-04,3.581239679265675342e-03,4.691484614309944165e-03,-2.554099894421899171e-03,-4.147712217582220050e-04,-6.610331877610928913e-03,-3.404496476487968784e-03,2.685506635217327354e-03,-8.750255349706435870e-03,-6.996132190325504013e-03,-1.276549793844217705e-02,2.041535266267050647e-03,1.905414386268989039e-03,-7.021411713046203651e-03,2.761925801961883763e-03,8.222292694575458749e-03,2.737185782509149977e-03,3.342911141497815694e-03,1.904576767900153025e-03,-7.150634677220977543e-03,4.958154412247461201e-03,-6.006862963513048825e-03,-2.425658513026279686e-03,-3.172765990271941838e-03,-2.139194815559588257e-03,3.258337864868886182e-03,-2.348183894040926846e-04,-1.456966954794382523e-03,-1.049923054049062667e-03,8.583342860487324288e-04,7.586127352108375244e-03,2.396519022523774679e-03,8.245364902960359080e-03,1.617611676045467789e-03,-2.480626270892887647e-04,9.979631319862536723e-03,4.936438090169270945e-03,-5.867760281481167878e-03,6.269249253081234964e-03,3.357804445383184150e-03,-8.090464117843577246e-03,6.657814090545538695e-03,1.366142117409969211e-03,-2.770143085328899413e-03,-7.425061023159155310e-03,-2.719751867895115437e-03,-5.507359295937068119e-04,2.887229894955765149e-03,4.961198833970677236e-03,-3.873427259533213530e-03,-2.624264769532109868e-04,3.494328346033308014e-03,1.466103173368928222e-03,5.036924922959863833e-03,-1.619086833719910216e-03,6.835265000319507532e-03,4.569476289689883459e-03,-6.791508436929506384e-03,1.866613518050885216e-03,6.103533765165790933e-05,7.357264661544707330e-04,3.947229289643038844e-04,-1.241901742981379011e-03,6.106519774723812369e-03,2.587236675428631564e-03,-6.106550682986790039e-03,-2.073439066396089419e-03,3.492018190045794151e-03,1.684836872634345312e-03,-2.837329263642881377e-03,5.025362225836615265e-03,-6.240406013480887838e-03,2.255042556958602139e-03,1.489619919151055150e-03,2.813256015851512901e-03,5.558170814158780622e-03,4.231758587859685804e-03,-3.166561633047765278e-03,1.005559227125203898e-03,7.578534829544501502e-04,1.227280167501108550e-03,4.426687521919985316e-04,-5.355390698633930892e-03,1.224611195107222711e-03,-5.706438156064160168e-03,1.522221344879635951e-03,-1.506305866730827401e-03,-2.023051531675240395e-03,2.961000464508413595e-03,4.829101368492717816e-03,-3.455087496436930975e-03,-6.708365856005899831e-03,-1.908793282097115407e-03,-9.207298830168455291e-04,-1.976368278363309624e-05,9.301536859497153142e-03,1.152291263080023123e-02,5.656675549822602275e-03,4.202907150906240279e-03,2.335567998086420134e-03,-1.649462035527561338e-03,-1.264171817056337866e-03,7.107768478331180423e-04,1.381117951632909475e-03,-1.898260061597398002e-04,8.823237415883642895e-03,3.046160846773914130e-04,3.818616922616449322e-03,1.045152920169987445e-03,3.879391616416161866e-03,-3.539392465063923281e-03,1.247891560040607396e-02,3.466598311532677248e-03,8.723947627393587728e-03,-4.229352163573746153e-03,-1.143162798064627488e-03,6.138994172843090583e-03,5.403594610215810452e-03,-2.992825426625739683e-03,-4.450756014929898728e-04,-3.109844639321001584e-03,-3.533890283142821723e-03,3.235902432572420728e-03,-2.854664644952810155e-03,2.846998432781137640e-03,-3.778060469676206099e-03,6.719855607370879720e-05,-6.296708364495467048e-04,-3.722606747825359629e-03,-1.785111515356914240e-04,-5.126417781788880461e-03,-1.612057178238864713e-03,4.614515402799294376e-03,3.774063434219296614e-03,-3.438093172729048651e-03,8.392223416451465631e-04,4.392446866083218535e-03,-1.162429657779665785e-03,-3.593275920706159475e-04,-6.750693883383359836e-03,-3.873402085380905110e-04,-4.608879253708393457e-03,-6.719037040278145155e-03,-6.791620274239667382e-03,-3.365807280230187753e-03,-2.961176229528374431e-03,1.921331077845191094e-03,-5.410560894592919079e-04,2.329106586469655450e-03,4.030757990210792965e-03,1.550208559283073597e-03,9.829180101250264111e-04,4.444207745763007270e-03,-4.790521969336986388e-03,-2.718608865409658393e-03,2.862333510365539652e-03,-5.095208604042637465e-03,-5.731316331538677776e-03,-7.712721873215854299e-03,-1.090535330507118534e-03,-3.904750197063584435e-04,-3.817768001365345366e-03,2.591435544922536787e-03,8.294755483761962140e-04,-2.583828123937859794e-03,8.408500590515552431e-03,1.806168814280515985e-03,2.251745446331727095e-03,-4.707403362303660792e-03,-1.872299261094388309e-03,1.473294744103613788e-03,9.299957349463072384e-03,5.285083478617304091e-03,-7.679789616059318376e-03,-6.397735863010816336e-03,1.004634358070178451e-03,-4.049254797786928928e-03,7.238630880544188149e-03,1.492094269464014919e-03,-5.362463839714709697e-03,-1.537316054304493224e-02,8.451849106045700860e-03,-3.087493427990020776e-03,3.187299774469072415e-03,-5.129143483306830391e-03,-1.720604327853948994e-03,5.520885548298163715e-04,-2.787450949308312425e-03,-3.801184984796041142e-03,-5.840305129573938880e-03,-8.855072010018874909e-03,-2.448343753286839550e-03,-3.068835786299102877e-03,3.815200883114365851e-03,-1.445424658606764669e-03,-1.880924565538284569e-03,-8.597479799741595749e-03,-2.045757133310793438e-03,8.324560921603153113e-03,-6.476484680970685023e-04,5.114263337205105821e-03,1.979195826715861566e-03,4.142914025920430877e-03,-3.483852914692767665e-03,-5.395395161307891402e-03,4.993447961772698625e-03,-3.724279142997620300e-04,5.897129969908069744e-03,4.883744964744450752e-03,6.250800398845034189e-03,-5.265164548485080399e-03,-7.387289327110207852e-03,3.250812521962170160e-03,7.337279343024377502e-03,2.115170915586566183e-03,1.758328243303669070e-03,-5.600691854644296828e-03,1.797832205764391495e-03,5.604502861642255730e-03,1.020045551166806966e-02,5.872556893064996217e-03,1.859435504451534575e-03,4.807421616791918845e-03,-8.561329019601209012e-03,3.441666584386915938e-03,-2.913206681449640855e-03,4.737352935874922059e-03,1.487301725211979996e-03,-1.297188505057331794e-03,4.272019966806133578e-03,3.184422289472841217e-03,5.126040655387465762e-03,-2.176642388137401025e-03,4.138143215483391056e-03,-2.059321757692482565e-04,-4.074989282427158264e-03,-4.045901559547838370e-04,-2.497336675244576139e-03,-8.934628506797827677e-03,8.049249215050218592e-03,-2.902181143414637576e-03,9.779999301974911380e-03,-5.875164320947986931e-04,1.024304529625990386e-03,-2.955211729792444481e-03,-1.004314701319801136e-03,3.004545290651103838e-03,-6.891575268612021125e-03,6.185781182193912306e-03,-1.007654035814320274e-02,1.044424358437597488e-03,-8.806749900892501875e-04,6.288941818429331400e-03,-7.444937875707162325e-05,3.858937275962858468e-04,2.765700897602991776e-03,-1.787977335891434514e-03,2.598216267482158365e-03,1.026875865082296085e-02,-4.132705347657260157e-03,1.527940984269248988e-03,8.539138443194009423e-04,3.547349941076406027e-03,8.251463171909879241e-03,-4.192922942543469457e-03,5.881085057020135447e-03,-2.385264359421801136e-03,1.494228088031177406e-03,-1.479031859904346522e-03,-2.404888487213925445e-03,-2.897856739130538328e-03,-7.742763709108198104e-03,-4.234336337492718345e-04,-1.070482899482036628e-02,-3.666408608742906250e-03,-7.927424338688917813e-03,-3.333277334894502912e-04,-4.385155221477144739e-03,-2.748359990110847608e-03,3.458400539075042563e-04,1.947039727843824725e-03,-2.320988269201264536e-03,5.807502391027615646e-03,7.936333017243961743e-03,-3.610479800110001029e-03,7.797510870378352409e-04,-6.082374532627253918e-04,-2.214609652162962718e-03,-1.712208271933236782e-03,-2.527886008991076568e-03,4.374921945091471594e-03,1.561656910809604846e-03,-6.229682188592622696e-03,2.096761560621139726e-03,-6.118568618432261574e-03,-2.114893567641542779e-03,-6.384398174185649741e-03,8.307841440181417195e-03,1.544747156883165924e-03,-3.314661232023151122e-03,-3.679349329403225344e-04,2.608399436601705704e-03,-3.256574724533795734e-03,2.122569412055424001e-03,-2.682168887026873793e-03,-5.646729764916185801e-04,4.855068297449881276e-03,5.773854217424334752e-03,3.520087757966595173e-03,8.526614194386851735e-04,-5.793028077731807515e-03,5.178275337921325015e-03,4.449289144074373460e-03,-6.030053900496899766e-03,-6.346524662147914712e-03,-4.106457675365293193e-03,5.544856922039114247e-03,-8.990295072323012740e-04,2.701986530606144063e-03,7.401712104363559206e-04,-5.342291227615270262e-03,4.946648238290198007e-03,-2.660841444900823157e-04,4.486213220125137048e-03,-7.470483103993730278e-03,3.552184788735308300e-03,7.369517064256981588e-03,1.112488911411498374e-02,-4.429627509211240231e-03,9.124938145440589232e-03,-2.248258350240565223e-03,8.567936688937681164e-03,-2.704157708580260441e-03,1.011208821921763057e-03,2.750937034655039862e-03,-8.739123074847417877e-03,-2.524430353827518316e-03,-5.160074672686477439e-03,-4.222624945592251869e-04,5.890415649859977870e-03,-6.687635660489519782e-03,1.355487914022337256e-03,-6.482487873769195337e-03,2.335933611337036656e-03,-8.268099045876279446e-03,-4.241961242953653437e-04,4.233195036374049966e-03,6.136112346921795968e-03,-5.760242987625076826e-03,-4.246819887261670153e-03,-1.447633300589609359e-02,1.618081882250879504e-03,-2.765629394983076979e-03,5.162375376925957757e-03,-3.215331652861679740e-03,-8.350977775194565102e-04,1.354835606037377814e-02,-6.178973761896158073e-03,-9.564461211035739027e-03,-2.151132355696837480e-03,1.017361721761878522e-02,-1.806377072225224785e-03,-8.532580923764521152e-03,-2.707876001145626248e-03,1.881370354194531825e-03,-2.767319982631493390e-03,-3.181025755784553516e-03,-9.411813515981901079e-03,1.355421356377194781e-04,-1.190836503253311441e-03,3.765220916490388901e-03,-2.809791412302183999e-03,7.877898251444994965e-04,3.261576067785298613e-03,7.719392757350664221e-03,1.896750924930633601e-03,-1.743596362242141916e-03 -1.322327429592811939e-03,2.481791179877216539e-03,1.297054676920514392e-03,1.746890463147799267e-04,3.656639824657780696e-04,-2.953571410903455680e-03,-4.862863245802553046e-03,2.990862659610579212e-03,-3.604894539038668716e-03,1.002451145104873986e-03,6.190148649389540757e-03,-2.330659258663910719e-03,-6.961549114399890638e-03,-6.877235794933655132e-03,-4.624122352947798199e-03,3.878587195344748370e-03,4.760417243218221899e-03,5.105626085029866005e-04,-2.557816410597685275e-03,1.153328793214229014e-03,4.279717641022103176e-03,6.801708792581823149e-03,4.828616925320487752e-03,-7.773083559899278926e-03,-2.749292129578741911e-04,-9.494310426951072315e-03,-8.702262191366588046e-03,1.852983913420611171e-03,2.570821625867024049e-03,-7.330303979919996528e-03,9.639092680853649647e-03,-7.099803133257654060e-03,7.675811066344405062e-03,1.083573147962790242e-03,-2.502883393112489627e-03,-3.626592685602328912e-03,6.480413298854146914e-04,-3.664473393913011924e-04,-1.293865602297065249e-03,3.365167007861042617e-03,-5.781378339207112642e-05,-2.749840182247813986e-03,-2.169727109418172640e-03,1.770869783288906395e-03,-6.097277339102619154e-03,5.475592642942502669e-03,-2.438425137679672491e-03,8.999401177078312705e-03,6.815865935186782125e-03,4.302725311274082330e-03,7.652382220824268701e-03,-7.986346680827020048e-04,-1.659743274732782784e-04,5.170929703551639146e-03,4.854494703062871806e-03,-2.110287082436778836e-04,1.510621404057263311e-03,-2.438504621473571853e-04,3.860987781292076360e-03,-3.666169849305977481e-04,5.768039808860146375e-03,1.388758127798990060e-03,-1.069859135979564849e-02,-6.430888195512312125e-03,5.357176837976187524e-03,8.308102342928048306e-03,9.640800958266923862e-03,-9.226258682280742034e-05,4.236786237474215837e-03,-2.095242622299219731e-03,8.058525934645666827e-04,-4.754731740800315076e-03,-1.853566227270010821e-03,7.538758089589372817e-03,6.880337437338314693e-04,3.346041674582108175e-03,-7.606824256191710952e-03,1.223981958377300117e-03,1.975420063472795897e-03,4.299016758434482874e-03,-1.985482286247644730e-03,-3.359568577912044017e-03,-4.214193016524267824e-03,7.555142665125059130e-03,2.972820979967982780e-03,-6.536279580972685632e-03,1.192076817414403599e-02,-2.534717035411064839e-03,6.335929551067224531e-03,3.366888649218672267e-05,-1.248303242166673953e-03,-1.183037759385131989e-04,-4.437863632309515735e-03,8.726409951118136582e-03,1.058169953611117018e-03,-1.017726091649466015e-02,-6.558534426887569109e-03,-5.412269091846578888e-03,-3.443096970292099362e-03,7.152536364456708873e-03,6.220467965704409158e-03,4.845301409025154667e-03,2.776770568131776019e-03,-4.814287527010627406e-03,-1.647558942855560909e-03,-5.063527985059485328e-03,3.699547832639702057e-03,1.368747948959593216e-03,-1.648422687580425837e-03,4.420359009428391668e-03,-9.266885474001864548e-03,3.662614210511718968e-03,1.683973374597927397e-03,7.093803200268282948e-03,2.168814015954752521e-03,-2.016761003359086879e-04,-4.558375405503642744e-03,3.960164258350462076e-03,5.572280278282080524e-04,2.609175288856203749e-03,-3.409137611740771338e-03,-2.118639895490785977e-03,5.389893795169563642e-03,6.382804603291403873e-05,3.352537893816768481e-03,1.972240052017813772e-03,-1.419451262302314810e-03,-7.081476206960059076e-03,6.173923981271577880e-04,7.497784540434094137e-04,-1.625186161128659712e-03,4.082887698277684260e-03,-1.669055859089364985e-03,-4.103826152483834241e-03,-1.608477403231144088e-03,1.302839107662324448e-02,4.949873048357760104e-03,1.912677732117304278e-03,1.043649857749973488e-03,-2.131426792350788347e-03,-8.925952267933454304e-04,-3.079895621386941496e-03,4.159360863570478115e-03,4.857133221016009771e-03,5.969939815197651198e-03,1.072813699382671286e-03,7.459309221817745283e-05,7.280644095968505022e-03,-2.827170984817162225e-03,4.295728857675880363e-03,1.985349380960371290e-03,5.561549892206234941e-03,6.990040578211648148e-03,1.013082624723927105e-03,5.342552148395661622e-03,5.269424890617493799e-03,8.465421277214399106e-04,-1.891724615177436128e-03,3.664168495518610718e-03,-5.518880198634920457e-03,-9.185583298503751867e-03,6.182517205415979586e-03,3.363591050005130222e-03,-2.077034668332471568e-03,7.366905316060106453e-04,2.005071855263301783e-03,-2.419852247460939441e-03,-9.202638541759419408e-03,-6.025755552011674912e-03,-1.157158120500383471e-03,5.802585810678527424e-04,2.882754108201128138e-03,1.180050923136908084e-02,2.292159885848046169e-03,9.832196372251628472e-03,2.246865234546081575e-03,-5.978488180095378399e-03,-1.040648063077164592e-02,-2.549942361232361218e-03,-1.056920142209761122e-04,2.901697918360051590e-03,1.037914906626881666e-03,-5.349043247871517956e-03,-1.713947719525411861e-03,-2.424743796355401798e-03,4.175829736570374831e-03,-1.005655985047568431e-02,-1.544348210434958650e-04,3.996396932215735311e-03,-6.070121134872556257e-05,2.023627807681784799e-03,2.099443029454647804e-03,7.077488406973202369e-03,3.203266156568014816e-03,-9.642894830226640852e-04,1.524785342989422310e-03,-5.108423087837946942e-03,-1.318392339922711334e-02,1.024050399828501778e-03,5.654513053960090636e-04,6.891946590553246418e-03,-5.841475079756662699e-03,-5.442691498680535603e-03,5.370290303147775765e-03,3.870867783812615131e-03,5.580326333596733607e-03,1.025527768062364092e-02,7.578022668101623734e-03,1.080119647075934112e-03,3.501203572755579117e-03,-3.339660370562832527e-03,9.687199496356845019e-03,9.357732673081650373e-05,5.686107909757050216e-03,7.149832125797360221e-04,-2.533867316702064724e-03,5.941510623206543336e-03,-3.825323050714568227e-03,6.140575948412025677e-03,9.703316440628176331e-03,1.986463623581751718e-03,1.897291521032106734e-03,-1.118341386753244314e-02,9.849056997211325798e-03,1.550421242570469748e-03,2.083227249147520752e-04,-3.011226595446881999e-03,-4.442573570901823220e-03,-2.615690596734557543e-03,-4.032960037780875147e-03,2.671536858791159243e-03,-2.484251631428958419e-03,1.035416908636845740e-02,2.102694055623798540e-03,-4.342223728935084258e-03,2.240499287915000397e-03,-6.976538273299677821e-04,-1.102547154554180814e-02,6.042263404572423930e-04,2.211113630255654317e-03,-7.369674767128625917e-03,-5.398367889296718132e-03,-4.250479420254172323e-03,5.255084901312505677e-03,-4.243911482194189476e-03,-8.295187449026309931e-03,-3.873544180870392914e-03,-3.688037675534155870e-03,-5.030020947179322452e-03,5.299282940286518563e-03,-5.444683381558907374e-03,-1.068896313941221929e-04,-4.232280428059203257e-03,2.137937726366028000e-03,-3.057818145331911344e-03,-5.562282447238094639e-03,-7.278086993130264241e-03,1.304050708363096653e-03,6.339350596299793070e-03,-3.347287627585370830e-03,7.189926549386472208e-03,-1.769706668911981808e-03,-4.987566260713906761e-03,1.748420215291341707e-03,-5.605531601374188697e-03,2.138365098604746311e-04,-4.650279662721020640e-03,7.827428194977319870e-03,1.165697073708052509e-03,-1.258493419196198872e-03,1.699739980499076052e-03,2.599975516159405719e-04,-6.378711897017191011e-04,-1.094463952980670625e-02,9.102022653936555491e-03,6.503515624887088982e-04,7.192127881781620576e-04,-1.321731040771553648e-04,6.613306769730750495e-04,-7.797421880038143830e-04,-6.030684418340613584e-03,5.956131470666452786e-03,-1.589788191903030758e-03,7.920743038524752019e-03,-2.420893697901057174e-03,6.143359657370970305e-03,-2.719289106642226901e-03,-2.704946694504695680e-03,2.408391782237341051e-03,1.664165219849302827e-03,3.807351643194804760e-03,-4.948953489662189127e-03,3.369961074274566842e-03,1.195798387262863140e-03,1.903685228600750321e-03,7.615023765940258545e-03,3.289298536880464960e-03,-5.491425031491756550e-03,7.694150851003364426e-03,1.101291016662172412e-02,-1.480724399107498021e-03,3.208691300914362607e-04,-1.717411934750785523e-03,-6.664369127698418528e-03,-3.197450527525357283e-03,-7.895693401398777567e-03,-1.014300045379122085e-02,6.350581971251419242e-04,-3.706288561096012825e-03,-2.395430737299055022e-04,4.245037176422430600e-03,4.625307936662293243e-03,-3.999656736425516793e-03,-1.024245760197059832e-02,-7.927971411815443528e-04,-1.374571372196588216e-03,-2.315921506056409316e-03,-5.809147242998037239e-03,-1.138651529990406669e-03,-6.734932697062213404e-03,1.830005711991621259e-03,-8.379039959329571746e-04,3.974591085491981091e-03,-5.331395177456025626e-03,-6.473306645266865833e-03,7.242294684047083549e-03,-6.386779803806901662e-03,2.033031782711341735e-03,-4.855674820153089467e-03,-5.685497039109870150e-03,-5.277052981141981854e-03,1.157495201043669264e-02,1.610139484066816160e-03,-3.069368400354200257e-04,-2.342814207664914265e-03,-6.462122602018019468e-03,4.933534318889777268e-03,2.801782405966233777e-03,1.653034807350235551e-03,-5.089733270023572009e-03,2.463925468957021817e-04,-4.982906297152491469e-03,5.274967590593562967e-04,4.639958363630111065e-04,-6.310262939007319459e-03,-8.372132475334624971e-03,3.819869816029758131e-03,9.239980737982698891e-04,-1.911155972507791413e-03,7.157652490170569523e-03,-8.033286974047421005e-04,3.141067374428068402e-03,-3.288126244152921561e-03,1.356376252588575989e-03,-3.342848330084716331e-03,6.170745489787536099e-03,-2.454889903044045387e-03,4.799198054904899383e-04,2.588480064737121687e-03,-2.928216373297725609e-03,8.137680490919783557e-03,6.517879168959389742e-03,4.931131914951082836e-03,9.444392547012332370e-03,-2.992055117546241379e-03,2.910565372943407867e-03,-4.711022707566524942e-03,9.564544003773219061e-03,8.244117452756055328e-03,-7.717027351080424499e-04,4.503835744388085330e-03,-2.800839915744517063e-04,-1.156031886846162725e-02,-2.770052030739673671e-03,-3.801627879485069538e-03,-6.481648235193315788e-03,-6.878343157530751768e-04,3.641954973914336095e-03,-9.177475196994046118e-04,-6.407769184378043006e-03,-8.730141637720937329e-04,9.113834295488955697e-03,2.284017683083417686e-03,-5.638789126228323104e-04,-3.362714580400455715e-03,-3.521495345991627196e-03,1.039131876818053142e-02,-2.665015773098750995e-03,5.662244084264477645e-03,-1.039967137710618664e-04,1.586558988669484617e-04,-8.995738160883018364e-04,-2.195448735020788175e-03,-4.077793647129621964e-05,4.105781173368575759e-03,1.274407473603974585e-03,7.914730828730712556e-03,-4.610641826329923282e-03,-5.644150019057000457e-03,5.043156212426378598e-03 -6.530675348050830886e-03,-1.950698207831837519e-03,-4.668187715141134536e-03,9.344868642952567153e-03,2.357693896777353013e-03,-1.032381578214990828e-03,2.054817057885744390e-03,-9.450565140280732523e-04,-3.474320243506299685e-03,3.124047619678277897e-03,-2.708688860448958072e-03,-6.129831723370601781e-03,1.159477441924827538e-02,4.199099697341346533e-03,6.297064774880169365e-03,1.683495884883265100e-03,6.632794089221027284e-03,1.602342501615232947e-03,-1.504427343111459404e-03,-4.801396897479734936e-03,-3.071507097127336352e-03,-5.877926897134621263e-03,7.949986242811341594e-04,3.839792849866444004e-03,7.572176423820017183e-03,5.180494019885981431e-04,1.062834686768380187e-02,8.560538884137842280e-03,3.147519182632864659e-03,-5.089048560212209617e-03,-5.098353289561038254e-03,6.324606580787654926e-03,-5.253557981241858445e-03,2.947563181135680506e-03,-3.681956066178498563e-03,5.832048087146130856e-03,-5.071909959888169263e-03,-1.855628039845609568e-04,-6.013472973175345388e-04,-7.804299004834876208e-03,-3.524263111800228932e-03,-7.144336511900664481e-03,8.165493320439372316e-03,-1.948656134656203071e-03,9.911712774966572725e-04,3.354885233088308796e-03,1.997883358422643772e-03,-4.101471067379926054e-03,-1.666592199004126923e-03,4.383767354090697087e-03,-5.367634283939024670e-03,2.476412865693237032e-03,-1.073556764414032756e-02,7.126164899336073647e-04,-3.143830536006130775e-03,-8.071109437859810831e-03,-6.709358224390065458e-04,1.677095565967399006e-03,8.186195467574651150e-03,1.722284443668423105e-03,2.370992365799368378e-03,2.320832437390874924e-03,-4.287607269059595487e-03,-6.885045812803055991e-03,6.034559335852146978e-04,3.299598593274796743e-04,-3.211998146435680835e-04,1.590530539558846976e-03,9.521055821623526649e-03,5.590421292254382085e-03,-5.081795781081671626e-03,-4.997078878925116348e-04,-1.308366133974385231e-03,-4.400550270199348607e-03,6.894996642271958298e-03,-3.261289642962897015e-03,-2.338044084744171809e-03,-4.403986003690697195e-04,1.440748277439213689e-03,2.336256558471121875e-03,-1.092784699120010871e-03,8.041037663925556037e-03,3.466260770064155108e-04,-3.178451434793023306e-03,4.012015427522691580e-03,-5.053911983702751758e-04,5.376792225840598856e-03,6.902963507960641239e-03,6.470256401656654518e-04,4.972896231593202168e-03,3.640203930944138893e-03,-1.681395840758552213e-03,7.341828257035512562e-03,7.704795129169287639e-03,-1.879055621793531295e-04,4.770617328147104541e-03,6.071452955025478590e-03,-2.334900789196412644e-03,2.604242092974751264e-03,3.956770338889517222e-03,-9.127133519485434651e-03,4.604613364358823083e-03,-5.988616255432437779e-03,5.034385264267885948e-03,5.263470541817492909e-03,2.835769375921688880e-03,-5.884246380354294162e-03,7.820490529096115265e-04,4.139551432934276964e-03,7.668936422328762569e-03,-1.425659369170624970e-03,-8.204455471460075228e-03,8.886753960425447995e-04,-5.682563003090792229e-03,-5.925526347555594084e-03,4.152196669035292803e-03,4.015380226652097082e-03,-9.171410065559350549e-04,-5.813429910505743883e-03,2.028972980842534343e-03,2.491661498270043859e-05,-9.030225979205119997e-04,-4.415972553234191744e-03,-1.523712138925222004e-03,3.991665931583592529e-03,-5.216406916764227721e-03,2.542453775707853060e-03,-7.219633902541669479e-03,1.812011147577663288e-03,-1.012869565175476654e-02,5.327418085001786856e-04,2.679806825574006148e-03,-1.338050952516700413e-03,2.266090770712137457e-03,-3.307090292532102217e-03,6.068190197398026092e-03,4.660934351312278137e-03,1.277122490185201263e-02,4.969241375926109795e-03,-2.485573688888495329e-03,-6.481591399571029997e-03,-4.229065072588109736e-03,2.041834636770135502e-03,-5.489058662271113602e-03,-4.296468084727258531e-03,5.950728759154335495e-03,-5.213852337807522129e-03,5.995946763872846315e-03,4.929005166097713032e-03,2.001412791930681778e-03,-4.212602150994342216e-03,-8.402377806438056460e-04,4.982109563842328920e-03,2.263639549349760142e-03,-4.638326939394592403e-03,1.282380394504255977e-02,4.440208878267460670e-03,5.768020406140280699e-03,-1.831738843431479895e-03,2.238641430249608058e-03,9.488449810589797253e-04,2.993840929818791596e-05,1.429049510082640567e-03,1.520929479059445128e-04,2.300954076019472855e-03,1.653505333538442951e-03,3.996089286039348001e-03,4.577224359313813475e-03,1.882174223438006508e-03,4.199491518942633297e-03,-8.400436070177407129e-04,2.831447169828632451e-03,-4.468183937431258190e-03,-2.080371137709971160e-03,-1.422868898109062689e-03,-6.605535452901555721e-03,-6.147869803459057349e-03,-3.646010492997231889e-03,6.148267239900394483e-03,-5.068263268146110069e-04,5.652357029512663361e-03,2.039109110218721623e-03,-2.690359442326293888e-03,-5.643141482748347329e-03,2.517923090831444832e-03,-2.428264080891138262e-03,-1.070241972901658746e-03,3.590730925418858608e-03,3.542358820933539255e-03,4.945390816926851715e-04,2.553055177817330559e-03,-1.499221629876277022e-03,5.846080580282563346e-03,2.312979309845128017e-03,6.662376249164267362e-04,2.754548942775868117e-03,8.109615521791296942e-04,-4.711735173424722381e-03,-2.898246534270065844e-03,-1.198998711909246041e-02,3.574009195213549706e-03,1.219313010474233107e-03,8.081108675707170813e-03,-7.634041067692074432e-03,5.201578275794549629e-03,-1.686952103625160779e-03,-1.156409343064043272e-03,-4.202356931066297818e-03,7.088013160722474171e-03,-9.793064586085756945e-03,3.532027771611732057e-03,-3.108134050644516169e-03,1.020708129052666155e-02,-4.714331351518509169e-03,6.491383764867253176e-04,3.325544193411391659e-04,-1.269274125402018770e-03,1.127474902762794044e-03,-1.092548324252525915e-03,3.738875293517973826e-03,-2.222876221293486505e-03,-1.838613548302036813e-03,-8.990793425096462984e-04,-2.402295720356769065e-03,5.747226297727351697e-03,-3.997240827183442397e-03,-4.376912885707092901e-03,-1.369468524200046855e-03,5.139716208939918221e-03,-1.427835230605252470e-03,5.267882579898301026e-03,5.483568523596322629e-04,-1.137497146706957464e-03,-2.874746998575560420e-03,-3.207942932428770565e-03,3.894553617511055128e-03,-6.076637027115548706e-03,2.345822893562134453e-03,-1.548373860049561974e-03,2.643946601704406669e-03,-2.652640966182239195e-03,1.699071332592147531e-03,-6.058078685672865435e-03,-4.137200050167114118e-03,-1.328540374709582606e-03,3.386011171834782483e-03,-1.048611617992833631e-02,4.800458431927722390e-03,-3.401197619425188560e-03,1.511891357378426022e-04,-6.859379017489045452e-03,2.309008424301547003e-03,-2.964731177235799351e-03,-3.202244444054669261e-03,-2.091537592407741002e-03,1.040678809901233147e-02,-2.223548274449912120e-03,4.506382469788713322e-04,-1.948929674927980327e-03,4.000878831812457702e-03,1.395474407208186071e-03,-4.271735390801050837e-04,-1.979507546558598435e-03,-6.472743630008982903e-03,4.301447446305070935e-03,7.915721362440741177e-03,9.266155056284775329e-03,-8.853836276328116983e-03,-5.259481373477224719e-03,-1.673188170391003756e-03,8.150642512690030664e-03,-9.268971064895516893e-03,5.887388544030857165e-03,-9.382518911797990691e-03,4.365500971262729554e-03,2.755629146997463138e-03,3.815899554881477338e-03,-9.135080281150944823e-03,5.077912108885382879e-03,-3.386054447791666915e-04,-3.412423219701173852e-03,1.066875735086598369e-04,-2.635201171710740455e-03,4.067915250845290142e-04,7.480196546909479230e-03,6.494620008354680223e-03,1.476085956690562952e-03,2.952418449355131286e-03,-2.034948421602958673e-03,-2.388944152470998551e-03,-3.337782504888529497e-03,-3.293995263327763323e-03,5.825680835842565047e-03,1.393823789196931310e-03,1.746508722938340183e-03,6.982975474291274430e-03,-6.200673679015440524e-03,-4.543723831694628477e-03,2.487427213776601410e-03,1.707363056240164371e-03,-3.624584491908947308e-03,1.717300963304310398e-03,2.432770462571310580e-03,-1.991178373992219270e-03,1.879033463339228752e-03,6.144102499716345587e-04,8.554231657927338292e-04,9.553297551053005834e-03,-2.980470308925925460e-03,3.906006723296071102e-03,-6.710122197079539108e-03,-3.073046752732471690e-03,3.398462900515215879e-03,-4.473527753365610059e-03,-1.223426815045960958e-03,2.747379744600432407e-03,-5.566826203586706066e-03,-9.136766253332992763e-03,-3.858355253336996291e-03,-7.156518524743250322e-03,5.750446158161868354e-03,-2.990364079046944901e-03,-3.602449844195310200e-03,3.731391711001433645e-04,-3.147062758915843313e-03,7.771911492108630158e-03,1.346038864253263278e-03,5.182795057598371842e-03,-7.235852968148858040e-03,-6.848380018488221636e-03,5.027536620365037102e-03,2.830299942223085240e-03,-9.804467275176210680e-04,8.112581989203908241e-04,-1.258975435863085059e-03,-2.727161647298873004e-03,-9.727887845583163551e-03,3.437108470306919628e-03,2.590078355116851288e-03,4.250199604053736362e-03,-5.637025249603811079e-03,-3.594891546739322884e-03,2.238383793154284044e-03,-2.733774814895256321e-03,-1.190788588311314077e-03,-2.322582153049999876e-03,-2.209490871428224871e-03,3.441191088418635172e-04,1.405239620392391940e-03,-2.621768598793243303e-03,-4.023796738853949000e-03,3.035095026354372159e-04,-4.228025278345401254e-03,1.270208675315181264e-03,-9.947060390794388793e-03,1.313309367374503565e-03,-1.007831441161948600e-03,-2.576099461929708991e-03,-4.423563376074039245e-03,-6.380311531411868992e-03,-4.136347913027025799e-03,4.573854768762240959e-03,1.238010685206781397e-03,3.670852567339005692e-03,6.482352350971172517e-03,9.131584915918120309e-04,4.556541306518734763e-03,-1.076752849858728621e-02,5.100380580643287635e-03,-4.757309995511702951e-03,3.207206243872773202e-04,4.054824686190490121e-03,1.985710721559958390e-03,7.710106829966763275e-03,-3.177633903820169768e-03,-1.829871576357571103e-03,-3.433808723014926140e-03,1.085068339137592154e-03,-4.339232007947256727e-03,-8.149823576212220783e-03,2.562859508127681655e-03,4.564784390327471415e-03,-1.103087405564636770e-03,-1.000574415913756815e-02,5.550329540987136712e-04,-1.662442089275512850e-03,-3.053387535581911141e-03,3.913866722582874831e-03,-3.805928154235126495e-03,4.428899634469411935e-03,-3.497954350315540415e-04,-4.265742232137262856e-03,8.673714989068371378e-03,-5.334361750850605786e-03,2.839096796803162234e-03,-5.807405636879626153e-03,6.513609469723972457e-03,2.247842227302037331e-03,3.955928679765462717e-03,5.692920502582208241e-03 5.302255868265536368e-03,2.723329199450575847e-05,1.288474537997554882e-03,-1.387948468656856121e-03,2.880392174660070977e-03,4.239633807640620644e-03,-7.055381198226720954e-03,2.443473311775181886e-03,6.554505574071499939e-03,-6.910134798972035301e-03,-3.498276923929306576e-03,-4.392936070342018731e-03,-2.771911739117360147e-04,6.948646387235726286e-03,-4.875003325195080214e-03,-2.969319411551946922e-04,-4.618537055422882245e-03,6.968505203347062117e-03,2.951616393949620977e-03,1.971301571635711812e-03,3.199601950596270370e-03,-4.078193138252907309e-03,7.846760723045201336e-03,7.565354002272274211e-03,-1.384016350559840553e-03,3.985155093821540588e-03,5.294360409052394729e-04,1.367974443229245940e-03,-4.339699936553952260e-03,5.150333398127918648e-03,1.311222530737220573e-03,-3.054124241736298588e-03,-2.582385766380067456e-03,1.476463015609807408e-03,3.506286952389696572e-03,-1.487008915169186476e-03,-1.245678510450610446e-03,2.969159119484173718e-03,-2.012174376501195926e-03,-3.923594764578888783e-03,-7.754519601998876178e-04,3.409559183958436532e-03,1.306364009723767977e-03,4.038929837378477453e-03,3.031104582413391129e-03,-3.180733167290862411e-03,1.093722326266787339e-03,-2.355797997228343925e-03,3.241087569979354214e-03,-4.334934943559859942e-03,3.216397876335006585e-04,3.208195682673022769e-03,-2.455855994133729282e-04,-6.102557256750318695e-04,-5.344541242840147692e-03,-1.181865772055916853e-03,-6.628755619655951614e-03,-7.669072765266559191e-03,7.451098086485343809e-03,8.992952699676854886e-03,-1.118667154847006844e-02,7.768561747691359563e-03,1.013039993394644449e-02,-4.639414512992304576e-03,-2.273830135155926937e-03,-4.844062678459475774e-03,1.340339487187837669e-03,1.062628670687633609e-03,-3.806882468633538373e-03,-4.629629853556038038e-03,7.833101433896235213e-03,2.712913660980013626e-04,-4.633906762302907240e-04,-5.002059769409055454e-04,5.987196639756227345e-03,2.701850646648465458e-04,1.880369632563939604e-04,-5.544008698521360988e-04,1.312315487774614632e-03,1.329376076942191014e-04,-7.134294573267472780e-03,3.439041482227462275e-03,4.887882812813285939e-03,8.413282922393731933e-03,4.188400189654099826e-03,-7.617510288768695667e-03,-2.873980488307461569e-04,2.055848049809076879e-03,-4.082537507084363013e-03,1.121451722272625087e-02,7.235593240592898484e-04,-5.427113278700404454e-04,-2.332709632734185985e-04,1.345568069240261726e-02,1.326425841692109673e-03,2.018037578348754173e-03,-1.696707066279643222e-03,4.696482426595626099e-03,-2.372668931979399123e-03,-2.111648656761859130e-03,-2.727489815830135044e-04,-2.602771140694573864e-03,2.204445862502719608e-03,3.419876384952175250e-03,-1.629347356687407128e-02,2.530267084223061791e-03,2.063316239687877553e-03,4.100674662741519982e-03,4.009665722290626545e-03,4.344275254276584904e-03,-1.662479298499480035e-03,-5.030809010891887020e-03,-3.692910959784767057e-03,1.370918662219255610e-03,2.895398711016983120e-03,-6.367266518325214245e-03,-6.502790213848076608e-05,-6.779162997787451655e-03,4.147008044111799054e-03,-3.923566598278672123e-03,-5.831509770265628969e-04,2.490521235259736919e-03,-3.375120651316291815e-03,-4.821125432239956607e-04,2.816696346997467262e-03,-2.132287022656094953e-03,-1.402951682361065766e-02,8.091112907377575117e-03,-1.414628090445520583e-02,4.814892086009383947e-03,-6.135307397316921985e-03,9.434142335033691290e-03,4.777344670629520874e-03,4.390082307135769087e-03,-1.777550201808821093e-03,-1.353891573987551578e-03,2.809116250563984014e-03,1.579820743722140305e-03,-6.086767543343883646e-03,-5.401973030084583497e-04,2.633532568177401889e-03,8.113253188675831644e-04,-1.557684200667063630e-03,-4.776351264274012257e-03,-7.898070916622007553e-03,-4.773128621958363463e-04,-4.365939351322826838e-03,-2.371701306482038111e-05,9.995668557411969968e-03,6.094452019400984494e-03,7.753424218164157175e-04,-2.024127270536183430e-03,-9.184002964684850586e-04,2.966438419573110691e-03,-1.576512674069313570e-03,2.455802786859491611e-03,-1.291958080285516425e-02,-8.776597129666237080e-04,-1.301544569351757198e-02,-4.318434931478770319e-03,5.378562499345339309e-03,1.493472247793103990e-03,-2.662342641559550110e-03,9.434910183333895719e-04,2.076412369547341649e-03,-4.366926937197608723e-03,2.182353368191303124e-03,6.035023385110918452e-03,-9.440392645541086439e-03,-2.733217619769437077e-04,3.624566192220114898e-03,9.410521578488132213e-03,-1.816636031540604709e-03,-2.375771692036499332e-03,1.889560972912271994e-03,4.290442653337574359e-03,-7.679763604920362106e-03,-6.037906675508807747e-03,9.699800498350650418e-03,-1.900638413084770450e-03,-2.944356711824893270e-03,3.702679108734035184e-03,-8.089251381683515950e-04,-8.575213801353977899e-03,5.165903520048565310e-03,5.645200940570153404e-03,-2.505812215444238461e-03,-7.802339064093164953e-04,1.603275817270168133e-03,9.774040666324606934e-04,-2.427251383080244670e-03,9.257619601680080242e-04,8.943840046875631625e-03,-6.873157677063030171e-03,5.541275589973364453e-03,8.144024283063476879e-03,-4.254570386490571371e-03,6.444792747067983182e-04,-4.179364084988061376e-03,-5.505090745431703153e-03,-5.289274648384036624e-03,-1.471369209242780586e-03,7.214133426592439502e-03,1.080774751822128482e-02,5.718000260583111288e-04,-6.155603745502372017e-04,-3.447173579137466273e-03,-2.789057063713650409e-03,-5.364310304964277527e-03,2.198734458810988265e-03,1.350214277377575524e-03,-4.417161358305640544e-03,-2.946482799517047778e-04,4.924310634336510721e-03,9.628278478491143506e-04,-2.512257484485650328e-03,-1.406122025877517894e-03,5.023605934903255937e-03,-1.273664839039970187e-03,-2.578395690045163190e-03,2.549510969661958424e-03,3.353628583267482480e-03,3.187106632878571044e-03,-6.627600902373344434e-03,-5.933562784737409900e-04,-2.865378083692490460e-03,-1.029609199746560995e-03,5.900702743372818887e-03,2.296244179270755595e-03,1.042278700255579865e-02,-9.832998594883307457e-04,-8.153911269419207228e-03,-5.581924763215406066e-03,2.518700174136505581e-04,1.444388768126654522e-03,-5.597161954470004572e-03,3.708750357130698953e-03,-1.484440975000867087e-02,-6.400088090820753653e-03,-1.660016442079501314e-03,1.321731633150691415e-03,2.784057287055846045e-03,1.509885511520103491e-03,-1.652708749311255750e-03,-2.998884978782797705e-03,3.362135631249134598e-03,-1.384460990212878608e-03,5.447306444451262102e-03,1.697169093554630730e-03,-6.822620352420049467e-03,-2.414549129326514550e-03,-7.753323663378180845e-03,-6.614345244855972944e-03,-3.655534224788479711e-03,-7.725845455345522854e-03,-2.909450078824361049e-04,1.305079032812091156e-03,-3.705015718494224491e-03,1.999518997165103027e-04,-5.492507750597934327e-05,-3.182711598646862077e-03,1.423369650019339419e-03,-6.371219398614394978e-03,6.803530916968335805e-03,-2.367111910062325280e-03,-1.394101608328100587e-03,-5.378312328874232211e-04,6.395604056754354734e-03,9.768005388630590993e-04,-4.620986914191677758e-03,-1.069188182491776114e-02,9.836215155840418844e-04,1.480361126905983035e-03,-2.051273180565402632e-03,-1.513729585956316136e-03,-2.761989661075534442e-03,-2.175471385886920076e-03,2.876656859442345421e-03,-6.189642368608644883e-03,-1.471081891444238473e-03,-3.118021936910281896e-03,2.209416084051069561e-03,-7.767764010601946757e-04,1.030593276184838154e-02,-2.419453930637737241e-03,2.365661514551889184e-03,2.770792393026912884e-03,-3.864631526870997667e-03,-8.529180433529813532e-03,6.499338202505662750e-03,7.043476000455539318e-05,-3.826049960291183147e-03,4.088012809098320703e-03,-6.534827008744921450e-03,1.874469291268805434e-03,6.123376748503852468e-03,3.371313100750168972e-03,-7.049206344357918184e-04,-3.156459150963534990e-03,-2.350577459305413976e-03,-4.929505665604791594e-03,-3.873149383275730157e-04,5.879140845559653258e-03,-2.233058257048079441e-03,-1.081998967250469854e-03,6.644021627211303682e-03,-1.663426545896820336e-03,3.972936095597067417e-03,9.898433467521231682e-03,-7.825267823744010186e-04,-1.218100700372675566e-02,8.913559314940970590e-03,4.940184645997357845e-04,-2.774944050809429255e-03,1.071101297994766317e-03,-4.302830861146005742e-03,2.896911377260123050e-04,-2.006885971695053766e-03,1.076101681078675890e-02,8.690674401132815127e-03,-7.073061823906625495e-03,-2.114776011869718385e-03,3.848470468261581956e-03,-1.222247259549993567e-03,-9.073723247851711929e-03,1.113699742618511637e-04,-1.455549397788369727e-03,-7.998488703913765596e-03,3.363066978556671083e-03,-2.014470601852216586e-04,-7.159682611688960446e-03,4.141400902501562141e-03,5.263763675973438978e-03,-4.366369636416640352e-04,9.832874025761936115e-03,-5.230365114092297082e-03,4.171508367346999160e-03,6.998764065069145585e-03,1.122932888021862955e-03,-1.076115028889484222e-03,6.195579154628741772e-03,1.586583482508765281e-03,-2.818061541987830335e-04,1.070652288306763941e-03,1.789608178709996495e-03,-1.051021136462764122e-03,-2.174462181839310914e-04,4.125430747439856778e-03,-9.314392304596188013e-03,2.181944582807098919e-03,6.151860331593130872e-03,4.714007626044482840e-03,-1.373202738349302938e-03,-2.148814662802190061e-03,1.176952116710394810e-03,-1.749756116983271041e-03,3.646209195463273687e-03,-5.752159411722674795e-03,5.436144017162179189e-03,1.530447449478402732e-03,-6.233195221765432158e-04,-3.784506444821145177e-03,-2.350463621062210892e-03,9.726712715331429712e-03,-1.698556258074967800e-03,2.741087968229887883e-03,1.391709771700629789e-02,-2.358398773676629537e-03,1.904247845540680628e-03,-3.804835550158689340e-03,7.807117281940326474e-04,-2.489167850678387136e-03,-5.501931315228291222e-04,-3.352944513981344528e-03,-6.848089407464407613e-03,6.747776485919803127e-03,1.433955511652989720e-03,7.917494571587534863e-03,-5.758067125584532750e-03,2.945304360908700646e-03,-2.775550732731620496e-03,-5.570429524064305846e-03,1.957818143025609643e-03,-5.284134665366428847e-03,6.753030367694060239e-04,-2.526968014002341049e-03,5.832114906207647555e-04,-1.003095717327892253e-02,6.525633088295097230e-03,-1.846289683895886409e-03,-9.721821873250297659e-04,-6.486037187188027717e-04,-2.785707860861340660e-03,-1.351936560807535763e-02,5.230988442190255550e-03,8.299067203624313840e-03,-1.072046627767340608e-03,7.123739114643534709e-03,2.322649195647650405e-03,6.805929583124388678e-03 -5.173481700557678448e-03,-1.028609702554938825e-02,2.960064963619541138e-04,6.508039609890392714e-03,1.073370777191402304e-03,-3.724221703452544965e-05,6.304207208973325663e-03,2.513884307765643349e-03,5.598836684465352716e-03,-8.048511611760959979e-04,-4.473798110100263617e-04,-1.340167768899572177e-04,-6.928534740922883892e-03,9.194862440803175472e-04,-1.012568482866351045e-02,-4.758503687956149378e-03,-2.120021655119648778e-03,-7.203169387573692475e-03,1.450153952218247427e-03,-2.095651927193065405e-03,-3.225933732724695861e-03,-4.421525705507811828e-03,-1.008769545081051634e-02,5.542793654491406236e-03,4.407692263838839178e-03,-7.131567812266219678e-04,-2.665655732266531980e-03,-3.342998562674193933e-03,1.968769867341779983e-03,-1.446888424265537756e-03,5.345213664153596403e-03,-7.047439105287760241e-03,-5.420326689858538834e-03,4.096155686162531936e-03,-6.786924227275327633e-03,-3.734993294872233222e-03,6.237158789209238351e-04,-4.402029238152602685e-03,1.622755943891883088e-03,-9.597920113893289350e-04,-4.557088732271168986e-04,-5.489803176035117603e-03,7.327768642546770260e-03,2.194560766479931955e-03,-4.789999059556702098e-03,8.089305324323000507e-03,3.923081342047517184e-03,1.272761635880130147e-04,1.709808671656444816e-03,-7.385886795032745727e-03,7.037798123516313120e-03,3.663527650775806805e-04,5.306335792976023952e-03,-2.764187187728798633e-03,-2.784163900688440586e-03,2.610580017615899990e-03,1.602510530368229718e-03,3.734002607822421455e-03,1.062305963859592551e-03,-3.228813620128553191e-03,5.976368754422435245e-03,3.340691306310600815e-03,1.151552370980891277e-02,-1.536306543454628011e-04,3.335153469203242654e-03,8.699260806704736974e-04,-5.098915785652584066e-05,4.178823229927591636e-03,5.589446528390399661e-03,4.668143404692948022e-03,-1.986010968682841698e-03,1.756231155013313625e-03,-2.002919127157277903e-03,-2.592059097648514564e-03,1.112420572680383399e-03,-5.746053018550890089e-03,-2.356278782306474220e-03,3.474897075710735978e-04,-5.508575224929967594e-04,-9.282002588255283757e-05,-7.525947812461093907e-03,-2.141594284074636997e-04,6.132341754605036797e-03,-6.325305435892740989e-03,-5.642873903395085808e-03,-3.783220444331247150e-04,-1.019266966356288488e-02,3.019382818781890087e-03,3.892238972011972335e-03,-7.512734877496905946e-03,6.913445420543717522e-04,1.874631994824240431e-03,2.821307762204677740e-03,5.034544112272201256e-03,4.476770467113489848e-04,3.928639441444308454e-03,-5.535768230487903875e-03,-5.836733684745146200e-03,-2.275627860743424571e-03,6.308959282024901097e-03,-1.800638792871556531e-06,2.512757460187745994e-03,-3.846051499678795806e-03,4.233621422224386442e-03,5.775926133315808716e-03,-8.413648694476409890e-04,4.649821111206616127e-03,-3.295130718258750934e-03,6.572691473080409129e-04,-1.425157851171982319e-03,-2.929471321796252737e-03,-1.272737271226374390e-03,7.555746722491124319e-06,-1.114989081299930750e-04,2.876090173885525631e-06,1.217532011517202892e-03,5.440455930387315658e-04,-6.354370187538055208e-03,3.129444539881431984e-03,3.932480570695325549e-03,4.957697130607353550e-03,-4.023259120229221927e-03,-7.045831888719501432e-04,8.219788972592304661e-04,3.072057234113989913e-04,-8.569785889441539420e-03,-3.300044577061402810e-03,3.648996428619101853e-03,-2.985292302007983792e-03,-4.586282436691018968e-03,-6.751686405068307688e-03,2.797529438085765569e-03,3.094419792002640283e-03,1.607158963486331540e-03,1.169048415800958383e-02,7.034083312211852727e-03,4.151578600675898297e-03,-4.136592070586862444e-03,-6.039306275389373993e-04,7.756005071499969851e-03,4.998486626804014843e-03,4.552728467311269352e-03,-7.503520690184251399e-03,-8.156365095394810596e-03,3.682046358215526703e-03,-5.775192418959221655e-03,7.774532349225551919e-03,-9.373961925885517226e-04,-5.960731424744948777e-03,-9.889548435582304342e-04,-4.907052804924848417e-03,9.178674495535913899e-03,-3.585769207978605627e-03,3.457748637112022932e-03,2.211491344579849787e-03,-3.111053406643296635e-03,5.961997981563518095e-03,3.740711699680471410e-04,7.123980761257028144e-03,-2.947114409759810636e-04,7.950298285952873967e-03,-3.611587557977308108e-04,7.464623547984770499e-03,-1.027169318341937358e-02,3.446745926894666245e-03,-1.088926378502026553e-02,-4.934623387916021314e-03,1.680755712426947870e-03,6.922676443454828417e-03,2.220294403514188469e-03,5.144916414415392656e-04,-2.791392090304971145e-03,4.055919708993217278e-03,-1.106718492309003224e-02,-3.787343242401934769e-03,6.167699559105638426e-05,5.449785199127476186e-04,7.227564758748303893e-04,-1.514343171716300276e-03,7.055204903672037460e-05,-3.432698626931626924e-03,-1.124419704354769371e-03,-2.189511164181986904e-03,2.213534496751049657e-03,-2.648374487450386693e-03,-6.727583834212865106e-03,3.213035440029460722e-03,6.602412574936444584e-04,-1.935976922316922332e-03,3.014309706967686821e-03,-1.462113282396918261e-03,5.918486200372211591e-03,2.367259460986452166e-03,4.271464159186832109e-03,5.686132447282212996e-03,-1.942144274805487859e-03,-6.016530148905589216e-03,-1.895468448421569821e-03,4.571032821708260509e-03,3.814797419459582065e-03,1.161774664813914303e-02,9.006362954380169039e-03,-5.583306815456343629e-03,1.470472818376105609e-03,-3.435795960198876307e-03,3.998437633840222147e-03,-5.332871637055829409e-03,-7.699503401085930863e-03,-3.580926323145963014e-03,-5.390853095750520498e-03,1.074641509071792844e-02,6.063773115394467152e-03,8.925418097145384685e-03,-4.765887654882445049e-03,5.026314864472529627e-03,2.459185122228747539e-03,1.662185444432085586e-03,-5.104831520457211828e-03,-1.907328029123947629e-03,-6.396883334451894040e-03,-4.273578453753993679e-03,-5.826230316098008179e-04,6.847115229223797901e-03,-3.239646228081942327e-03,-4.232427768593611909e-03,1.435881116041688157e-03,2.385788141951828562e-03,-3.306740038312141507e-03,1.506144660866978822e-03,-1.098733076139775649e-03,1.721706860017261764e-03,-4.022751146696727440e-03,-2.763606697990780240e-03,2.936421027814221669e-03,7.281046201262551115e-03,-8.173398067864918667e-03,-2.655152023859388023e-03,-6.170643064047071427e-03,7.775987782810624312e-03,3.882341378693646844e-03,9.907274543238088829e-04,3.809243096486438981e-03,-3.185531394957310473e-03,5.890284760632856151e-03,1.482729485976160260e-03,-2.602337443843436753e-04,-3.481334032716294508e-03,3.943210224175630260e-03,9.472872336728946178e-04,1.529554907173955340e-03,-1.257415796632191211e-03,-6.296237801942897606e-04,-6.048170555422231927e-03,-2.020913619769818090e-04,-6.815889139607787459e-03,-2.349057280515799272e-03,1.402711414220712653e-02,-3.743661300534157363e-04,1.814400062282174727e-03,2.599400181813183407e-03,-2.224204540539420107e-03,2.088289944325676800e-03,-4.779089222644685928e-03,3.634803919445329153e-03,4.707310153259552758e-03,1.874920281560396064e-03,-5.702444219204464565e-03,9.650988172361639819e-03,-7.346387391908954304e-03,2.855593782708631435e-03,3.643999044162624275e-03,-4.418285882846787957e-03,-8.279546009666688064e-03,-2.933128731477089527e-03,-4.469343889636093713e-03,-1.263318489798368567e-03,-5.758688868138005582e-03,-4.417900583305609322e-03,4.094747339139828343e-03,4.434871280710228451e-03,-1.431305731196636779e-03,-6.284431972172867427e-03,1.746599324450093249e-03,-8.124704989680290798e-03,-6.015721657275809216e-03,9.698639402301166557e-03,-2.263158047296754923e-04,-3.117231490068749292e-03,-5.623735249443552985e-03,-1.866840340606982758e-03,-7.536688207102371431e-03,3.028267571059853770e-03,-2.243579380078382715e-03,3.825246512193159441e-03,2.610219190925551838e-03,-2.047741324686437516e-03,-5.917523353397359173e-03,4.589534980464542169e-03,-8.742664816627252616e-03,3.746944864135647903e-03,-5.577148320914240148e-03,2.191137550547147487e-03,-7.224129534604759907e-03,-3.731184611697096424e-03,-2.578297453241513125e-03,-1.112881652632735207e-03,-1.558666342031502839e-04,4.570244081975737178e-03,9.775807536264471270e-03,5.374337298327095530e-03,-6.858659914406870992e-03,-1.418030650004765102e-03,1.325605592322413873e-03,3.806777430523047238e-03,-7.877262781149871893e-04,3.199058210872847679e-03,3.472954713320306353e-03,-2.754328683526053364e-03,-8.183624494121781489e-04,3.491824269338261039e-03,6.329998714125028951e-03,3.049902518107492841e-03,-1.887595406841079864e-04,1.321221308227138718e-03,-4.173110338099182172e-03,4.434971742246045234e-04,1.236764074485459906e-03,1.783154973060107869e-04,-3.860986952817293660e-03,-1.900961357601063618e-03,2.429336237482546525e-04,6.123784326514396421e-03,-7.149526492370572930e-03,3.630156373782689313e-04,3.906876775814619243e-03,2.733362219349355551e-03,-4.382174038856211616e-03,9.220596852115106948e-03,2.605005241827743241e-03,-1.066281257842276869e-02,5.844898287559211966e-03,2.892034739359443819e-03,-5.490095299936782400e-03,-1.130876860205109677e-03,1.125598626917774861e-03,-2.831654385537217891e-03,-4.375299568290857109e-03,-2.345484808478405861e-03,6.920350756641200675e-03,5.761055789679571035e-03,-4.199278147868725809e-03,-2.400734942788470411e-03,5.227208054563882824e-04,-1.462507210516583809e-03,1.029170301695007082e-02,3.251645409218671592e-03,-2.252069003632428765e-03,-6.929612881382714133e-03,-1.403792342917871243e-03,-7.706657507619649877e-03,2.552724651568477371e-04,5.883254396505372026e-03,-1.228901158224185398e-03,1.301477877018208593e-03,-4.208286288156216221e-03,7.264701814785670501e-03,-4.792189134543270272e-03,3.283207528110575462e-03,-2.127775829387380379e-04,-2.940575507459314546e-03,-7.749494314040946893e-03,1.008231628024163981e-03,-1.411739368952143485e-04,-4.944456784281704535e-03,7.241409630934325485e-03,4.105793879633499663e-03,-1.298909107528532260e-02,4.099926205364531201e-03,-3.223478297076977383e-04,2.180520543586394674e-03,1.539656977813686064e-03,-1.708544972803148140e-03,8.625905584736305934e-03,1.802949569763817161e-03,-1.025337356486432337e-02,1.040942339351335309e-02,-9.085530234410249539e-03,5.850287174245875563e-03,-1.060194113681334725e-02,6.003586103644424632e-03,1.096871802047548125e-02,9.208888613232903836e-03,-1.847870444794813116e-03,5.008282084540797291e-03,-3.471088342914354676e-03,6.223429868275802759e-03,-1.997584566247788841e-03,1.472274392851410669e-03,-1.915428355024605253e-03,3.778219047168963809e-03 -7.954126157085596865e-03,1.751966073241798243e-03,4.440576158767957880e-03,-6.204494768828415063e-04,-4.348319336389501986e-03,-1.287051528203911376e-03,5.905447026541176458e-04,1.970580084208451831e-03,-8.532667748152217334e-04,-3.898766620270350831e-03,-4.552897573635293106e-03,4.418156988632271874e-03,-2.346723669293199633e-03,6.636604841341885749e-05,-3.190102744629935618e-03,-8.778648753670060395e-03,5.573793782419943568e-04,-3.851511492901319323e-03,-1.104822936035719299e-03,3.599624231744436628e-03,-1.185891272979018411e-03,5.257589147673514055e-03,-5.207320722737417747e-04,-1.810331831358538745e-03,-2.618913981662198635e-03,1.690134793894342881e-03,-7.545592003451271658e-03,2.556533657564676477e-04,-1.606951759313126390e-03,5.498115805033901120e-03,-1.331733791875892425e-03,2.283143215263741359e-03,1.098700794242676512e-02,6.355983178428423891e-03,-1.755659645676180833e-03,-5.225318046148554225e-03,-7.382228556370058759e-03,8.877001624511951516e-03,1.520105937326140730e-02,1.350918011442180751e-03,3.619086338951358825e-03,1.034560333288898903e-02,-1.479660108206296183e-02,-1.891878951168020698e-03,1.004777149104450762e-02,-4.849976263550265254e-04,-1.794262934989376314e-03,-1.041717195688625537e-02,5.777058177742524538e-03,3.608311156521062501e-03,1.996575562924493476e-03,1.551072759375168864e-02,-6.549460553180451970e-03,-2.188568708327259913e-04,3.890767918795931535e-03,6.430914728626647113e-03,3.228013578964345712e-03,-3.791679368246910452e-03,-2.819277377520297383e-03,-4.014123931858758210e-03,3.472060515204918995e-03,3.059573590064219601e-03,-2.703009413712527051e-03,1.034481976463519012e-02,-1.066216868638907370e-03,-8.148690165066412028e-03,2.289124773454608540e-03,1.121016522936415811e-03,1.131706110146410774e-03,-1.758104704712233580e-03,-2.606874688195215568e-03,2.026078594584267405e-03,4.707120580048977133e-03,4.043505172669465233e-05,-4.521117327016910790e-03,2.222427032541653743e-03,-2.397220925227969580e-03,-2.941676425559612996e-03,7.789336764052724738e-03,-5.622606553119444388e-04,7.612314340907093586e-04,2.948260787182263674e-03,-5.152581742611563216e-03,-7.110905443174615976e-04,-3.604387066315564714e-03,8.724597325005238602e-03,-3.962017376326577509e-03,2.400163185324657826e-03,6.905695452541189307e-03,-2.817082279911160055e-03,-2.666076429883239111e-03,-1.247679873344006507e-03,-4.480616894640173341e-03,-5.818724197382029069e-03,5.531750131608284375e-03,-2.218659302428319796e-03,2.588629222200205127e-03,6.731405831398383499e-03,8.086041784317222689e-03,7.809222913782287896e-03,-9.261213592830105229e-03,-4.604270890151918906e-03,2.569903300235365767e-03,2.703024956737930685e-03,-5.634127739718424593e-03,7.541879596988777454e-03,3.003600593647909192e-04,-6.335899971133842484e-03,-1.704402855870490192e-03,-1.856407830035958825e-03,5.983111211269458683e-03,9.888273850150694141e-03,-6.401422438539412828e-04,-3.389005269814452954e-03,-2.662348711204580616e-03,-8.191377345059477639e-04,-6.869213540106004941e-03,6.180971790753910650e-03,2.361669955366908682e-03,-1.305673420975715475e-03,-5.397050578432672878e-03,-4.525429459061151155e-03,-2.232864006510171511e-03,-2.873790312664810040e-03,-2.474911055439268839e-03,-8.988982595587075713e-03,5.696676612216458428e-03,-4.458320480625147311e-03,-7.072277989872267565e-03,2.656549042960385486e-03,5.631623773127931130e-03,7.672862470509879233e-03,-1.041568481106025339e-03,-3.968200784148242719e-03,-5.784318538405954467e-03,-8.043167201930592411e-03,-5.055816946236258054e-03,-5.302775278146100576e-04,-1.516642159550230570e-03,-1.310181571615787882e-02,-1.980524538531387452e-03,-9.359145394586598776e-03,7.747357992663957181e-04,-1.663657007622074439e-03,-9.266886966024432543e-03,-6.743296535470796361e-03,7.527023823513151642e-03,4.333075936316227277e-03,-6.327688024866840073e-03,8.767284548934842214e-03,-1.527845281252558467e-03,-1.772464853962379613e-03,-1.642114893647141386e-03,-8.595854867246880207e-03,-6.370206256027160195e-03,2.019961562339933538e-03,9.747187727113412054e-03,-9.286763057969099969e-03,-3.458473489516075781e-03,1.848120528891117938e-03,4.938143782691178235e-03,4.627031585279253213e-03,-4.814259505665917545e-03,1.773897255610238895e-03,6.924290286488555797e-04,3.981979764252133441e-04,7.277078446134943417e-03,3.882931192755107938e-03,3.513869442071639471e-03,-8.026017800316844353e-04,-3.548189706659146334e-03,7.479645722325833739e-03,6.259699615593304275e-03,1.298996429254169714e-03,1.176085365622257795e-02,7.153853517505399592e-03,4.330712473483473365e-03,-3.881897385216496327e-03,3.582674203906302968e-03,-7.251527337429999540e-03,-1.696559747737416950e-03,1.158091278585239928e-02,-9.835181338285557684e-03,2.988789955504536546e-03,-7.173148960855283210e-03,-3.322094836730815539e-03,-9.964833739960409983e-03,6.627222324870590819e-03,-9.496576080861948710e-03,1.037964045334562220e-02,-6.117911218719077754e-03,1.414925022452354596e-02,6.657848634066622188e-03,4.344324411500727692e-03,-3.627182184826055564e-03,-5.208359536758862263e-03,1.223205808703664577e-02,-7.668002288715229610e-04,4.581768841198007036e-03,-3.714335783337972645e-03,-2.514086009401212025e-03,9.174809133497409223e-04,-5.127342732631961835e-04,1.697150368946732891e-03,-3.149776495885868357e-04,9.146046351033301075e-03,-7.696001207337847469e-03,2.227340680610113275e-05,7.825383657125687190e-03,-4.067153033428892034e-04,1.293251551245739629e-03,1.960048403267413536e-04,-5.571219125992036766e-03,-2.413028528002998316e-03,-4.880852556489750742e-03,-9.993960254334542225e-04,-3.026709063968904215e-03,6.604825143882340002e-03,-2.606918026174992031e-03,1.132292382236873771e-02,-4.266509374522670615e-03,-7.248517913566633261e-03,-1.511681963676919475e-03,-5.704695427591634053e-03,3.401800769140326478e-03,9.186725157797000912e-03,1.090541455925416337e-03,-3.999420494053486548e-03,-9.585585715233737453e-03,1.212878006870148111e-04,5.942893261282574877e-03,3.916923021070923758e-03,-1.518075253620725790e-03,7.802672933900471793e-03,-4.553094620415806988e-03,6.101040922230655840e-03,-4.425334024662740118e-03,-7.176758970089927754e-04,1.357796469238803068e-03,-3.710421723316134190e-03,-7.254169844430506837e-03,-7.098284501686301720e-03,5.904883036659190100e-03,5.551650137759903732e-04,-1.695310862830174065e-03,5.725954541558787728e-04,5.483023680824066458e-04,-1.515397761010112924e-03,3.212430575071468993e-05,-5.076760933112809716e-03,-3.106069950039818013e-03,-5.087027219976578661e-03,-1.655331021656438698e-03,2.095955937942426024e-03,8.120746146034067735e-04,-2.982013303081049779e-03,1.150170663063563789e-02,1.306425301460142603e-03,2.007551086777694619e-03,-5.452367701159595292e-03,-2.320212112952912530e-03,-3.600615506364653485e-03,-2.108426338919790124e-04,2.046568903489919510e-03,1.797862101317276081e-03,-3.804968918943990065e-03,4.177512514114868278e-03,5.430510874144664143e-03,-1.906601972283365537e-03,3.492275705544833908e-03,-2.029141868123000934e-03,-7.599969698198162302e-04,6.616252702969623901e-03,8.788853771306080753e-03,9.830538274161122400e-03,3.233104358675569379e-03,-7.027942656413884749e-03,-3.582117985821640579e-03,3.420055156637652580e-04,9.351361877465795211e-03,-5.529357344823023802e-03,-2.214543829770639592e-03,2.669603560971223893e-03,3.650710702841807043e-03,-4.879631878422521080e-03,-1.990161629236234138e-03,1.711544951378898794e-03,-2.342913660029462795e-03,-7.428222042451292431e-04,4.193968286529025508e-03,1.457116299194399344e-03,2.460580386501855232e-03,5.785920368469240098e-04,-2.497965830675898070e-04,-2.141946880636924101e-03,-2.163345405059311725e-03,1.846602948073245267e-03,-1.257052319478802176e-03,-2.182210567917102634e-03,5.549719374486778226e-03,-1.156756551374776669e-03,-8.523698378036652021e-03,1.182339018849699712e-03,9.030869053843381539e-03,-6.504895039480594696e-04,-4.180846302358436246e-03,3.836312860100584646e-03,-1.666177527265644276e-02,3.990104414758658367e-03,-9.543112811531182611e-04,-8.743097989240182846e-03,3.427752903743087800e-03,2.530800895192629159e-04,6.600484262659763075e-03,1.906244479552873187e-03,4.831506150470460045e-03,5.425172299003028524e-04,-5.591519992516834001e-03,-9.459181078026308170e-03,-3.451421302743838335e-03,-3.626630373553295112e-03,3.829385194955465220e-03,1.077350894268442986e-02,-4.326849975798846906e-03,-2.097023478199981292e-03,3.978827226840081326e-03,-6.927375964008282427e-03,6.679567460652758963e-04,-1.005634212038917716e-03,4.749771137808120672e-03,-1.019755889972947908e-02,1.278520388309981179e-03,6.050525273276117187e-03,-2.827247468417538011e-03,3.885357797367442671e-04,4.100441822057375105e-03,-5.007118481907358925e-04,-3.308501364037093155e-03,1.136389102255845303e-03,-1.222297185133264415e-03,2.599910374250970473e-03,-6.472614362217280273e-03,3.152873193996401022e-03,3.645299479660110741e-04,-1.626742970381076740e-03,-1.014705848560239730e-03,9.466406997390515790e-03,-2.803914202581970095e-03,5.116962846962501074e-03,1.902912346463627195e-03,-3.220898491243535684e-03,4.173501473428072386e-03,-3.199642054867144186e-03,-5.548932956226017114e-03,2.325842245531857706e-03,9.410723025718832624e-03,-3.841007852648009070e-03,5.181738483667058630e-03,-5.712474592666205546e-03,-1.789923810089430987e-03,1.132125803912463610e-03,-7.902167562976781818e-04,1.108948931090103897e-03,1.425968386809023207e-02,-6.709133486707398279e-03,4.369422369828347176e-03,3.942385706397653553e-03,-7.477723175765494312e-04,-1.996482604228777200e-03,4.268939229567687149e-03,2.562073692675149224e-03,-7.010182192573790466e-03,-2.933345493153117264e-03,-5.405500448291973570e-03,7.426725446448908564e-03,-6.841728377884471156e-03,1.587449026751637472e-03,7.600457366495252248e-03,-2.736023416177475482e-03,1.149527346065063122e-03,-1.066433576650112087e-03,5.394326087306678015e-03,-9.774876987562833644e-03,-1.496623839874370449e-03,1.525050212948034006e-03,-2.209921489625228923e-03,9.527292948604331943e-03,6.468031039972200850e-03,4.902291896919875637e-03,3.201475702658236350e-03,4.528596103595282710e-03,2.593869064494661730e-03,-6.461370982528629554e-04,1.715646681509292500e-04,3.037788559003060888e-03,3.424866374244158493e-04,5.403121694060788321e-03,1.291262685092571701e-02,1.385194432559135882e-04,-6.072933346319378534e-03 -5.396858710574460420e-03,-1.320340897740760128e-03,2.025086397976749372e-03,-5.757872688514675645e-03,1.585910178989935471e-02,-5.905230343768836342e-04,-3.738761463885658352e-03,-9.922540482129899031e-03,-6.857208531149020653e-03,2.181134111225880440e-03,3.379619697608900707e-03,-9.127347985368778824e-03,-2.594052551707724981e-03,-5.239898732868394221e-04,-3.943326835369269021e-03,-8.132925544136431276e-03,-5.945790450396372384e-03,-7.991726948138981601e-04,-7.108669914045950659e-03,3.687109176878660636e-03,-5.322286656734270122e-03,9.482489670804119045e-03,5.907050154246380920e-03,3.673135924400034065e-03,-8.705225456172235865e-04,3.152327143848691112e-03,-3.633117222646378274e-03,-2.661440085024012296e-03,-4.931656196276391815e-04,4.326974712036635307e-03,4.173428743250142715e-03,-3.505941338045724925e-03,-3.205107682631620943e-03,9.467406608564905759e-04,-3.906392120375592157e-03,-5.874678382361869095e-03,8.582964348529263827e-04,-1.638322754360240722e-03,3.967384573884254369e-03,-1.081116515537287144e-03,-1.667050874998110858e-03,-5.409304294361105998e-03,-5.872841380437210553e-03,4.398429652242660630e-03,1.532298864385852905e-03,-2.779258557605630668e-03,7.207778414828941785e-03,-1.674330295974427770e-04,-1.238580830870685597e-03,5.808942215804126404e-03,-5.175392853510671007e-03,3.316512071158479041e-03,1.972531823171408665e-03,7.650270118085123958e-03,1.998636240970706312e-03,-5.667111906828594921e-04,6.761655363178646974e-03,-4.092922371203837340e-03,-3.658098306696591553e-03,-4.648480231103768040e-03,-3.797558010391340055e-03,1.746107715719268842e-04,7.092449814858734110e-04,4.883441624497603453e-03,1.912450146515019952e-03,1.281862004400815581e-03,-5.867204956437363332e-04,2.627610171470517314e-03,2.129954919493026299e-03,-5.061966057077740312e-03,5.051617222873359091e-03,-6.912866184915782489e-03,3.766828411108613574e-03,-3.429867988218660620e-03,2.648632714213984985e-03,-5.361963500288426977e-03,-1.694385456284359329e-03,1.803263411751086179e-03,-6.858671267541156975e-03,-1.553767599418399417e-03,-1.805605043043237001e-03,6.324525734165736997e-04,-1.831632431375623512e-03,4.336257045725992916e-03,-2.156632450393965519e-03,1.532914977226681975e-03,1.318339275710226320e-04,8.426221256691054928e-03,3.206849710955933779e-03,2.570772236003483063e-03,-6.188459757539586936e-03,7.274894720138834481e-03,1.996014130946791986e-03,-1.013218859161372186e-02,-3.428477542217000986e-03,-1.168388987725396677e-02,6.667090200877727295e-04,5.875916107938429153e-03,-9.200357707616376021e-04,-7.431344831793472092e-03,1.530608198372838784e-02,2.776860615603724599e-03,2.539025783735650039e-03,5.669637690024711789e-03,6.038289645823408276e-03,-1.233832606968298870e-02,5.011271608987574212e-03,-6.438960539196487455e-03,-3.590344807119642042e-03,-4.213368466738314118e-03,1.339174916195859030e-02,-4.435063607659957448e-03,-4.622473781282208406e-04,9.910026195511073208e-03,4.854741493524094079e-03,-2.082932128694797887e-03,3.258378319963418750e-04,3.442441735546465827e-03,9.115531341533023718e-03,-2.469708623808352776e-03,-1.085537581074641662e-03,1.032166796926955359e-03,4.188859879048262638e-03,-7.365081167022518964e-03,1.311638763620770302e-03,3.468593498843972490e-03,3.028448864516414501e-04,-6.467025569009823144e-03,-1.185918356767186321e-03,-2.373052222736395361e-03,-6.021398294340451172e-03,2.981512237752907495e-04,-1.739296458915278159e-03,2.758107688435027394e-03,7.565703703144690921e-03,-3.930787924567517855e-03,4.121270727072900812e-03,-1.316572332696799809e-03,3.044886839441100085e-03,-1.902278970264718915e-03,-1.070551867637448140e-02,-3.561749722100486949e-03,1.312217043497506883e-03,-1.535117443563990293e-04,3.277159108702071191e-03,-5.409954230699286735e-03,5.416848484392792519e-03,-2.502071919264880172e-03,-2.086716232656458080e-03,5.114599626025328853e-03,3.092327973850894640e-03,1.623055202215824364e-03,3.137623776004618177e-03,-4.633432680781771063e-03,2.302266743473577702e-03,-1.159464840914062745e-04,1.586682032573739128e-03,-6.671027720303439311e-06,3.101845193130876085e-03,1.730692732515737992e-03,5.313223853821174951e-03,3.436745645110924046e-03,-5.343238688427248305e-04,-1.911855232768587889e-03,3.968384565828170923e-03,-3.043271710276512581e-04,5.009608242911245402e-03,-4.759938514842124678e-03,-2.524647713817027711e-03,8.227020908407358920e-04,3.523053312041949086e-03,-2.205397800228018831e-03,8.213712372319843108e-03,5.442697143439736464e-03,2.031820240845960950e-03,-6.217826019502691211e-03,8.631566179073550985e-05,-1.545993102935998330e-03,6.331195863054227442e-03,-1.228911453698881264e-03,-3.085831912046302448e-03,4.405823801930970098e-03,-1.879794184857834288e-03,9.799213543204522356e-03,-3.820115027340031076e-03,-2.947900460480258784e-03,-2.481681788298085566e-03,7.783721400256705773e-04,5.203774052594655614e-03,-8.688917317829087912e-04,3.304742677696003751e-03,1.718021343413066995e-03,-3.555235877599021680e-04,-7.299049331300483737e-03,4.601493923514027523e-04,7.314840774386757380e-03,7.663882589165610729e-03,-8.047770200738626339e-03,-4.347697499504846533e-03,6.887802867658249228e-03,-2.185054349057762805e-03,1.877444625824879797e-03,4.065819186499917103e-03,-2.981674170812056469e-03,2.612728597207687308e-03,3.321211558328489039e-03,3.509977753248195217e-03,4.135739404859470857e-03,5.960081743659889630e-03,1.684775792859963782e-03,-3.428587820808402450e-03,1.434647837887861139e-03,4.800272227083602315e-03,3.312059954993108908e-03,2.266236167028813555e-03,2.208521154129306722e-03,-5.624494745044898887e-03,-5.675231081684620015e-05,-2.549732972616148945e-03,3.915990638293316041e-03,3.793850096233438406e-03,-4.208285196038896356e-04,-2.289366289918646959e-03,-4.382380467052172067e-03,2.428107778259883717e-03,1.573830713217746168e-03,2.269085969223447706e-03,1.338281809920995047e-03,2.108092621241724996e-04,-1.344250145969655939e-02,8.085482321558808616e-05,3.040451517780747969e-03,-1.184939778244731550e-03,-2.332954032395118636e-03,2.500463247373887945e-03,-4.014899769694347045e-03,-3.867615769706233674e-03,1.646394719047451327e-04,-3.312453551609837593e-03,-1.227875338878035676e-03,-5.970867929562188869e-03,-6.564345798897883253e-04,5.578254768390838308e-03,-1.271065799972808741e-03,-5.097730276138689141e-03,7.445190544828407834e-04,6.265605937743157043e-03,4.090123753081231252e-03,-1.148720631473187535e-03,-4.521501725417071717e-03,8.056011263566984285e-03,-2.867429476808691820e-03,7.463036993115436391e-03,8.047336685701752743e-03,-2.912847846329529554e-03,-5.120418209671438042e-03,-3.990181009416203690e-03,-4.384016050259442569e-03,-6.575304028054598272e-03,-4.224142547131493064e-03,8.250110456919703197e-03,-2.152311790302213063e-03,4.889762006183286276e-03,-2.043571806366787681e-03,2.039278866107285121e-03,-2.410469683806596569e-03,-5.214549093874439002e-04,-9.915935470354667394e-03,1.757932878981476391e-03,-6.379591308307478283e-04,1.987965446505116692e-03,-6.279580961919935082e-03,1.181253057288767427e-03,-1.295547498357298762e-03,-7.709140487536225543e-03,2.370939521455737390e-03,2.512952717098660554e-03,2.362899741718663387e-03,5.974434980935142830e-03,-4.359668560513093108e-03,7.724923614708037803e-03,8.022070379603967252e-03,-3.940661000720152921e-03,6.593150785112435880e-03,-3.253059139954520940e-04,-5.794982228154113395e-03,-6.512457589216825646e-03,1.000867922010584537e-03,2.662355320087916090e-05,-7.300607459240241945e-03,-1.151434323932907808e-05,6.086062616249871167e-03,5.768016813074050737e-03,4.713181277292075033e-03,6.071672413906244311e-04,3.535471501811161734e-03,-6.653073422273721865e-03,3.287934083974198857e-03,2.968183206801611098e-03,-5.104855600908796837e-03,-6.478722884355826543e-03,-3.771301151620270391e-04,2.207778331551578811e-03,8.450384219353653317e-03,1.422887912526025860e-02,2.295056215515561306e-03,-7.435664087746216575e-04,6.469660904252841349e-03,5.264260372230087238e-04,-2.291575759271544631e-03,2.719613181098159537e-03,-2.977874283703807988e-03,-2.592628276155968352e-03,7.610460678129841658e-03,-4.038659226119699353e-03,3.437145737640823163e-04,7.229615403305474086e-03,-3.757160152427377418e-03,-9.574204203937157747e-04,-1.100627721167317776e-03,-7.710201908812680353e-03,1.968231358831075504e-03,5.871984073065013472e-03,-5.203178517225205517e-03,1.009520229860009577e-03,1.181598863640888038e-03,-9.502126407280130473e-04,-3.942579193358840967e-03,-1.808522847291411256e-04,-3.334132515089910896e-03,-4.531441129542674098e-03,-4.069574454987670217e-03,7.084724331879201347e-04,1.172858618585869659e-04,1.510060055030641420e-03,-1.918699856744060300e-03,6.280546573568066872e-03,5.924075462220735645e-03,2.771917834714354104e-03,4.523703723312243880e-03,2.217367291469776302e-03,4.152004808566534845e-03,-6.028337255112764829e-04,4.477087169117384413e-03,1.759389006101688257e-03,4.926691378130446629e-03,-1.054989599441294619e-02,-2.117118195770858590e-03,-5.519999142781748490e-03,-4.549742783491916159e-03,8.303373214970934460e-03,9.811639063176513104e-04,-6.946205393754676868e-03,2.969404280370578891e-03,8.115400716721846847e-03,-1.759683948584684059e-03,9.976418679718048257e-03,2.274990507841324287e-03,-5.777383214433956407e-05,3.188273132695977619e-03,-3.550407039415069081e-03,-4.199006097007607595e-03,-7.610071264266446769e-03,-6.492315375127104965e-03,-2.063093184489965895e-04,6.701451219210566995e-03,-1.637311801585709719e-03,-1.423621809919963909e-03,-1.204339740829754981e-03,-4.485882610189965460e-03,-3.860250703068288883e-03,-3.335814240723691294e-04,8.596766114343845416e-04,-1.034856955135942183e-02,9.402666750472443366e-03,2.261432306596371841e-03,3.474827440286564796e-03,-7.629907518246205507e-03,-2.083724700601967975e-06,-5.811040865021746028e-03,7.477151110523577726e-03,4.448941204060130432e-03,3.019709458396883147e-03,4.168101045023955495e-03,5.164502892961005988e-03,6.913883509647515170e-03,-1.372486559173295088e-04,-3.601929803007254682e-03,-4.111473346948382387e-03,1.033805326106451141e-03,3.081608342263807143e-03,1.698027552648678296e-03,9.823740903628924367e-03,4.149424479315785494e-03,1.937303917269211353e-03,3.094267632423439076e-03,-3.697195808096426489e-03,-8.337474046247825982e-03,3.228388043488068619e-03,8.109920356558299875e-03 2.314511804297148846e-03,4.857275243349053889e-03,-4.681219363938123046e-03,8.231289013701547849e-03,3.197928118310481259e-03,-9.271120284846306797e-04,-5.310272342151379707e-03,-3.346556876653038019e-03,-3.110785205945282988e-03,3.974061837605830268e-03,-2.173632139375938736e-03,3.790706707936258250e-03,-2.683612238808528224e-03,6.666574141383417541e-04,4.997288574533663603e-03,6.303417643422662767e-03,-7.305371572362658744e-03,3.865092133113357670e-03,-5.499266247544775527e-03,1.276654811816618600e-03,-1.004022323796202121e-03,8.683042984877292758e-03,-7.147360811473419014e-03,-7.754717863305131756e-03,-7.100159249193670891e-03,-8.595999841207402565e-04,4.874978025550699007e-03,5.019705917660829877e-04,3.028417640298526267e-03,1.392239667802789606e-03,2.995650494542034369e-03,6.385937944772507140e-03,1.026857042691522163e-04,7.664641562694108137e-03,3.256533093928862775e-03,8.764543114359337177e-04,1.044370993172149967e-03,-2.567406440433231602e-04,-5.462130753818246476e-03,3.984021163667845686e-03,-3.408048624470620507e-03,3.523171349078897587e-03,3.009626570225102000e-03,1.168563420981725892e-03,2.955998968645248692e-03,-2.645977051321413143e-03,-3.197834871158800139e-03,4.527263107508201247e-03,-6.250119851781642775e-04,5.504261710104612140e-03,1.318196469444481113e-03,5.255087314881502249e-03,6.358458793602898883e-04,3.765443828510316605e-03,1.104179875129941528e-02,2.744641621036487348e-03,5.838384307969362967e-03,2.177163762792500414e-04,-4.920838019696614427e-03,-5.700365870016635764e-03,3.789885809193723094e-03,-5.136793824734781945e-03,1.045291176665227341e-02,2.829678959264757656e-04,-2.972051628705716254e-03,4.299180666053752618e-03,-5.473847392367966677e-03,-2.262481322893495142e-03,8.054203849311372546e-04,5.642232576347913468e-03,4.085575045572222680e-04,-7.743959355832733850e-03,7.831420442140565874e-03,-2.104589186983250523e-03,-3.407294857429124177e-03,-2.272801027150449498e-03,-6.822160359926649322e-04,-1.498059871293482611e-03,-5.949271383962392162e-03,-4.342174044618195934e-04,5.174183231733765591e-03,-2.849152063804656198e-04,-5.317090966584658678e-03,-2.070688704750359849e-03,3.214840887091447667e-03,6.782022776258857071e-03,2.669991934123460905e-03,-4.745144521448398968e-04,-6.686950053310590850e-03,-1.004271969747003515e-02,-1.259380553558732321e-04,-1.775559550836622097e-03,8.914609253483418488e-04,-2.785567409326781315e-03,2.749414634739566401e-03,-8.115383082776693804e-03,-7.147149048877275701e-03,-6.431312075079030242e-04,-1.979382977092036637e-03,2.087495160304734740e-03,2.733828005711152684e-03,-5.166353998057300156e-05,1.141786697757513590e-02,-2.716182499148788419e-03,-2.736904834229447747e-03,-4.850642739276413151e-03,-3.544045513348059815e-03,-7.274991398594335239e-03,5.392600939007191445e-03,-1.067335618179333662e-03,2.371641614917071542e-03,3.763750950627686113e-03,-6.704197981062158876e-03,4.385444969353402729e-03,9.248595436989331953e-04,-1.467897049322266015e-03,-4.365433944341229507e-03,-3.561966608328747796e-03,7.202894029091412055e-03,-1.428092806272457166e-03,1.178341497716890035e-02,2.200070676265808694e-03,-1.933666881158151878e-04,2.175010390884973051e-03,4.505573879779917017e-03,-7.631490143154252663e-03,-2.716424557688866277e-03,-7.957406778088965422e-04,6.706738686766005458e-03,1.964586169657282517e-03,2.322416244196256422e-03,-1.425882897969701479e-03,-7.655823599084842033e-04,-2.839859804700429688e-03,2.895309047486836502e-03,1.197761817294926926e-03,-4.226935115400883534e-03,1.691933197800046590e-03,1.318488013411933495e-03,4.866710344272073309e-03,2.546685960489016801e-03,-6.002646135954915231e-03,-5.297631299490867997e-04,-6.661724673226523952e-04,-6.110820615048905349e-03,9.054665854240932701e-04,-6.497821380299887914e-03,-2.707090203947087621e-04,1.830224600528874111e-03,-8.840742937557321884e-04,-4.466200923760780574e-03,-1.179289825155975524e-02,-5.363399714861296491e-03,-8.478355384424691218e-04,7.708315969749257764e-03,3.103037442102078367e-03,2.151608719924353805e-03,-5.510173765175215878e-03,1.881109312348181774e-03,-1.483696976867530720e-03,2.106631901680768113e-03,-9.956382804973660811e-04,3.085834054496174473e-03,-3.190850843632246810e-03,-1.558847822465257282e-03,-4.183564948029390093e-03,4.266269620363247260e-04,-2.222470622536814894e-03,1.383722762245560498e-03,4.253095252778816247e-03,1.540296100215243248e-03,-5.843506871990711456e-03,-6.984047987082183523e-03,-1.740354320691250628e-03,4.364889291248337348e-03,8.033484648166321421e-03,6.014512859122963595e-04,2.200680734107428381e-04,2.673723123669770850e-03,2.165391065448153858e-03,-4.542030419288183019e-03,-1.100117055025866524e-03,-6.970553316324426714e-04,3.798807761600335713e-03,-4.978660267977647221e-03,-3.746709263895251548e-03,3.852535361645181497e-03,4.873018219270142636e-03,-4.905434021225503309e-03,-1.717216768376700732e-03,1.064374678691912442e-02,-2.571648536255200564e-03,6.320184702447260473e-03,-6.445668688198442561e-03,2.346561983567335263e-03,-3.080823971412234401e-04,1.089609373114776335e-03,3.999221057790285430e-03,4.926232195691736260e-03,1.352746475709712881e-03,-3.332523686068321073e-03,1.099098838254805068e-02,-4.302937608072118433e-03,-3.435142858350681734e-03,7.099274664998056657e-03,-5.708757198265190791e-03,8.450530227538347039e-03,2.972465328098162878e-03,-5.738148464805742713e-03,-3.764973202257896213e-03,-2.319226644462350426e-03,-5.270733311472357990e-03,-1.121912410716011284e-02,1.095724839987170622e-02,-1.046340680584555843e-02,-3.739129812721189283e-03,2.164191975467578445e-03,4.219989595835508048e-03,-2.579339523914369724e-03,8.514660195709566675e-04,2.291440200205154595e-03,-2.997545640844194881e-03,2.812089101972869640e-03,-2.604638788682053199e-04,-4.948580621516456305e-04,3.217555906672259821e-03,8.497966283175624469e-03,1.945227350549700055e-03,3.762445758891539566e-03,-2.343805634752059094e-03,5.245310733266319398e-04,-6.965046708322293091e-03,-1.562237951470467692e-03,-6.160390153322436958e-04,6.545218970281914059e-03,2.878448876058157906e-03,-9.426860478980440827e-03,-1.001340270187307119e-02,3.056457527071848677e-03,-2.584196909687859305e-03,7.999622389364174219e-03,-1.037438053346561011e-03,-1.565852970928578161e-03,-3.784551878218462943e-03,-7.381413348395263890e-03,-1.180797459613799633e-03,-7.194018852573263664e-04,4.040048258104374755e-03,-2.440235475841572542e-03,-4.139079579260721356e-03,5.427869055558439022e-03,-6.570665979442964939e-03,-3.223861460480937843e-03,-1.741617826139586203e-03,-4.958382307516891185e-03,7.090985831663860779e-04,-2.978255626984544543e-03,-1.288514615393262716e-02,1.799257875686316498e-03,-1.454526037440534436e-03,-2.355662160355449092e-03,1.383343775635225521e-03,-4.834261804210320153e-03,-3.081167912630888937e-03,-7.987669626321225111e-03,2.224789745247204246e-03,-1.687207762203567435e-03,1.323894201266803076e-03,1.067614923188713559e-03,-3.457589127814686739e-04,-2.554652020742825998e-03,3.863928336802494295e-03,-1.951561468567678025e-03,-1.110089580707762343e-03,-3.383826058251205443e-03,5.288559629466947284e-03,-1.405074319716444892e-02,1.437635303447321999e-03,-7.259235060434771070e-03,-4.870865482997270915e-03,3.450145885076618714e-03,1.817317004944823348e-03,1.304928391799024172e-04,-3.017181316116662650e-03,1.829391252679503930e-03,3.248225190186843688e-03,4.479178402700726390e-03,1.086895565193810899e-02,5.182458947644827223e-03,2.647388665570043174e-03,6.146232073157203990e-03,4.117135768415452861e-03,1.782100981261630331e-03,1.013688748582094751e-03,-3.329490331731301770e-03,-4.532013827697444971e-03,5.605752701817730971e-03,-6.588965234127603168e-03,4.638777840539480210e-03,-4.442878646898548549e-03,-7.537418867635130998e-03,-5.183246127457416010e-03,-1.098608368625623131e-02,2.047522727646862873e-03,5.670296619152651908e-03,-1.133692392412529500e-03,1.098145053324355155e-03,-6.275691247316871318e-05,-1.847238118123861565e-03,5.437660287788116664e-03,3.903203329972110888e-03,7.650917249606080731e-03,-6.385711259855771094e-03,7.775488996721598156e-03,-2.568424138433704745e-03,8.297942378146044851e-03,2.604792004524131808e-03,1.984474423078090750e-03,-1.254096891163124531e-03,7.288472140823564448e-03,6.876866281161367550e-04,-5.247866765862263701e-03,8.887588013953451327e-03,-2.988479151502642511e-03,-9.625475549295440386e-03,6.626334487163352410e-03,6.118234613722319665e-03,-7.269331984477952254e-04,-1.600946179492407709e-03,1.339931600772627761e-03,9.504223006646758676e-03,5.790857888672321444e-03,-1.077045343689396299e-02,-1.364571098601665214e-03,-2.470817220937838923e-03,-1.984549839620348456e-03,8.504400221129021845e-03,7.664136162398005846e-03,-1.232643872361460086e-02,-3.372580145586725423e-03,-8.694626020842966679e-03,-1.716352826650561570e-03,7.695468886795887604e-04,2.214595347861447738e-03,-9.099831447380364591e-03,-6.846669389252893478e-03,-4.711330874646872842e-03,-6.269983041303313007e-03,-7.559306840945239671e-03,-8.551786154130597631e-03,-4.282976685206231135e-03,-9.147423255754617150e-03,4.879071078792213871e-04,8.913939299172645922e-03,2.452351588642052386e-03,6.888935696044640004e-03,5.616142116749376842e-03,9.535158910612173732e-04,-1.842850265950357050e-03,5.053112861892960009e-03,1.021039786645702892e-02,-9.006212509071205964e-04,2.172247577556843910e-03,7.485796509335198440e-03,-8.246480544681473145e-04,7.661916065032605555e-03,-3.320513774277410071e-04,3.616198459801382888e-03,-5.940653245479466348e-04,2.745156854780146302e-03,1.072473606613359373e-02,6.871576520896769411e-04,-9.928458331299487225e-03,6.496581562440670830e-03,-6.997429401486170153e-03,2.144249161966151355e-04,9.883239008620307419e-04,2.078047255355301975e-03,1.501154580340613096e-02,6.191041141093564341e-03,-9.586537875760568728e-05,5.374328387194340874e-04,3.980272731161680656e-03,2.274761444710893688e-03,-7.143226692793563752e-03,6.468872544407414625e-03,-5.219721207126827058e-03,-1.671392503779450117e-02,-3.458108114098575742e-03,-4.517981337829645784e-04,-4.839183668861635098e-05,3.674537553157684010e-03,-8.514180431150859905e-04,-1.944995446578954299e-03,-1.251177166226233100e-03,1.473854932849969282e-04,6.847856284081773985e-03,2.387193961289289183e-03,6.105897604549164089e-04,6.764530327669175148e-03 -4.369055383995892536e-03,-3.495868432008810735e-03,1.923472588057597047e-03,-2.260195719147227011e-03,-4.083336763443195724e-03,-1.961836007525789675e-03,1.837691554484135494e-03,-7.362003770808808484e-04,3.364877382657664728e-03,-4.640374420619026649e-03,7.300295886646319780e-03,-6.602976342430094681e-03,-6.644265416742573503e-03,-1.070057778909825220e-03,4.165779450106432691e-03,-4.596228572308248155e-04,-3.292567667222484878e-03,-1.658907037704917577e-03,9.947163075043939998e-04,-7.242498315915731934e-03,4.205916075137846981e-03,3.066434980421100034e-03,5.292240300828000669e-03,1.741451055676693921e-03,3.818414930989840134e-04,-6.445777593036701968e-04,3.969535548814978436e-03,2.871373528642623581e-04,4.207992144203990571e-03,1.410815293170716378e-03,5.150861011776712052e-03,6.205410066235520286e-03,-4.977744058968678123e-03,-1.031253400612672406e-02,6.544122881102404886e-03,-1.702581302085581436e-02,2.392831128149526358e-03,5.049041141259268811e-03,-2.385645713994514160e-03,1.237620849282700195e-03,-6.400561219694942718e-03,-4.859429368127003886e-03,-1.339971756890284953e-03,-5.891881171071968078e-03,-1.222323893676366922e-02,-2.824077469321280547e-03,-4.302605467104002324e-03,-6.819090813056117965e-03,-1.548887328919736420e-02,-2.909417656189614444e-03,-9.235161183097854998e-03,5.431894061966706551e-03,-2.373066145641866687e-03,2.037863772831500185e-03,-8.520671665430476840e-03,8.372874401741690897e-03,-1.675102322627335799e-03,-4.710230272300638781e-03,-1.746721237509835491e-03,-1.361727592885881959e-02,-6.806438903755865875e-03,-3.452155731386690948e-03,8.351610790974486193e-03,-5.032427120253571765e-03,9.156588427137490101e-04,6.291021996200806682e-03,3.865088408388301505e-03,1.809266207157477283e-03,-3.363573258433521455e-03,6.504727297342236848e-03,-4.436822143403149586e-03,-2.429272871043196020e-03,-3.439965579871628269e-03,-1.278475998116597023e-03,1.435935182632098192e-03,9.968568659153884820e-03,2.168035622427007952e-03,5.758260648518887317e-03,8.833208575958151343e-03,2.029044506612856513e-03,-5.808774108301307214e-03,-6.026207172211691526e-03,5.259181223530228107e-03,6.109677919762765723e-04,5.484818838018166390e-04,-8.499375343213688358e-03,3.343167904860739476e-03,-4.815990806232564245e-03,5.027146499910677656e-03,-7.124990237037403043e-03,5.249099211766920965e-03,-3.916362222030589715e-03,-2.003715692287794895e-03,-7.171740849782560524e-03,-1.835723281426118562e-03,8.317730898304455736e-03,6.061401010913913118e-03,-2.858842687179817066e-04,1.095819889429522700e-03,2.280768491506848534e-03,5.734165254266446708e-03,-4.540989622394071661e-03,-5.514853885702272057e-03,-6.257772042193917618e-03,-1.165488788192482576e-02,-5.607693795765772253e-03,1.448173065712796657e-02,-6.714769472504832776e-03,-1.001662858092909961e-02,7.547014783791874612e-03,-4.304770401978955628e-03,2.309816022990871919e-03,1.684006224873995988e-04,4.833438579408841015e-03,-1.963822124077359114e-03,-3.214035146386103829e-03,1.097757000985288985e-04,7.996475749924658247e-03,-6.478993012154115987e-03,5.239792989222845409e-04,-5.782828578033693233e-03,-7.092424520766507992e-03,-8.496960870112398381e-04,7.245377043051186641e-03,4.734870770140601863e-03,7.864456937438620357e-03,-8.703692195619072008e-03,4.373419271021512442e-03,-4.795895205887018911e-03,-1.612739194803503376e-03,7.483212678884314586e-03,3.675087051036756043e-03,-5.965633906647891879e-03,4.616458966275906878e-03,8.121084907843096473e-03,1.103280973849950003e-03,5.355916451806926569e-03,-4.628138421204399874e-03,-2.794321134280329261e-03,-2.434748946343802834e-03,-9.795313757821203296e-04,9.090810161213051159e-05,-1.052919060764955818e-03,1.765506918248100406e-03,-2.033266149182467314e-03,-1.453453706195659842e-03,4.151326182152872100e-03,2.121148494624639641e-03,3.637412886621056111e-03,-2.542185747090929676e-03,2.647239665452288341e-03,3.967257458724569689e-03,2.686987138314003102e-03,-5.418018385516527695e-03,3.564134934963836873e-03,-2.913596928272116793e-03,-5.663364105329765576e-05,-8.456879770260734147e-03,-2.279868958477778589e-04,9.992883673606933834e-04,9.451546122512496581e-03,-4.808952108488379106e-04,3.089136073894575493e-03,-3.244985723210580975e-03,2.710855754496564046e-03,-3.386649851302081602e-03,4.041625117262257275e-03,2.157502573513811906e-03,-1.502428816888007855e-03,-8.974184118677628122e-03,5.127862116439221504e-03,1.042312879776697041e-04,-1.433730062675759361e-03,4.080971127716237837e-03,3.885322013097822821e-03,-5.424946448517400672e-04,-1.915728312967766563e-04,2.058993573044860109e-03,1.643649758415078113e-02,-1.537471383605226033e-03,-6.575667279334727541e-03,1.648732263367327705e-03,8.381244077147687044e-03,-5.592918972033712743e-03,-3.123120091726087726e-03,-1.333826679349121424e-03,2.105745724279735592e-03,-8.288396977255104488e-04,3.036053443371408093e-03,-5.124700928342138667e-03,-2.394632369326409570e-03,2.401127491687225085e-03,-7.997727994048213149e-03,-2.024980913632222403e-03,1.195886859369205526e-03,6.948740772426472735e-03,-3.978190929055973259e-04,-2.672212111548894452e-03,-2.727964725816880379e-04,4.038429211045727399e-03,5.966006102218103383e-04,5.058888352148153619e-03,-5.054795588504592228e-03,-3.177719586163294408e-03,2.048412658270776752e-03,1.342675835493139071e-03,-5.950896106136470062e-03,1.770796996505671862e-03,-6.514498901823991646e-04,-4.711361485727626371e-03,5.067520840655151231e-03,3.481166479593332443e-03,-1.821930987659727040e-04,9.875166889985482338e-03,-6.141437770014508343e-03,3.751905584925209654e-03,4.833655806035340105e-03,-2.007885309542348999e-04,-1.103385811674171581e-03,-1.574222894493384215e-03,-4.260604322809363742e-03,5.550103114415333785e-03,4.367439550510544247e-04,1.986684765238628386e-03,-6.395248872318022582e-03,-2.020967876957357676e-03,3.096696952087615255e-03,7.861844108289762351e-03,-3.668929939234885640e-03,5.519577179871440996e-03,-5.339050606688781377e-03,-5.257160414427186450e-03,3.038416649610132885e-04,3.371566384335363334e-03,4.952773487307209933e-03,-9.816193984976819692e-03,-4.785180097665401797e-03,-1.810530255497692313e-03,1.110497322227829535e-02,-1.657414613619757286e-03,-9.183745340860121915e-04,-3.673719062811092034e-03,-7.838987472694777803e-03,3.326267660302173555e-03,9.696909205016810904e-03,4.151187684920236605e-03,1.072432032307736767e-02,2.978182016715877280e-04,9.637633968815491857e-04,2.709813950219209459e-03,-4.470690082327156631e-03,-3.300574591826531054e-03,4.947261135888568299e-03,4.595194696609259028e-04,5.050330886521683764e-03,-1.759046118668839082e-03,1.126047320659249139e-02,-4.191595174458419282e-03,2.445265112651482178e-03,9.444816244211534000e-04,2.805167254807899944e-03,-1.192329716487620754e-03,1.826809115326302529e-03,5.982214881064104942e-03,2.195687128339910300e-03,-8.856510275525799822e-03,-7.745162185143455134e-03,1.578959763069264031e-03,-1.812293855373319027e-03,6.037788909916507255e-03,6.314013346216866638e-04,9.438721392723893946e-03,-3.429076400478312490e-03,-2.857730547445726070e-03,-1.094873480251086842e-03,-1.102828179702083737e-02,6.502702556538580807e-03,-1.037197500840750762e-02,-7.847085489016226989e-03,5.561435000093008273e-04,-5.852147030951242201e-05,4.649049902363712114e-03,8.729613073163267761e-03,1.877458818073892517e-03,8.022917193475751982e-03,7.096393185310958737e-03,2.323586549348911727e-03,-3.509276960839368656e-04,6.326158535336100230e-03,-8.226561341050231749e-03,-4.529552135980974802e-03,4.446456285873622190e-03,-2.293687898622188200e-04,-5.141175293783206528e-03,9.415349155801357433e-04,-9.287356180569408468e-03,-2.685933460897780602e-04,3.806588685767512293e-03,-7.505567878249694638e-03,7.813055981467644309e-03,7.968460827304965979e-03,-9.267706991954296819e-04,-1.170508897879053451e-03,4.450084267148849033e-03,5.877978103938199909e-03,-1.664145611437938379e-03,-1.248261650303046470e-02,-6.755392391999229772e-03,-1.833125867446870963e-03,3.991255151378817444e-04,6.144606103117580820e-03,-8.003808820945441244e-03,3.113105442448542773e-03,-4.160901782007194946e-03,-4.630283688482037668e-03,7.617871642778517062e-03,9.909573504306211453e-04,1.163485039299306557e-02,-8.551294482514770409e-03,1.734489187752487902e-03,-1.175234801678489262e-03,-3.657867514693326130e-03,-5.437499404735117968e-03,-6.931061582336053156e-03,4.483833728114478001e-03,6.818661598778109727e-03,-6.023618231050060709e-03,5.029082599613734356e-03,-2.432650897080521155e-03,-4.968104670625783301e-03,-1.699632000946230256e-03,7.097828474666255773e-04,2.095775878050105089e-03,1.930612594400404934e-03,4.220745558083191344e-03,9.656461830428017448e-04,-4.046086601935907539e-04,-5.595501488579610917e-03,1.634406309286704244e-03,-7.538382185889958530e-03,-2.461947116526128944e-03,8.803242400262005149e-03,-1.759287058787174785e-03,9.466635618778676889e-03,4.703897504751673904e-03,-1.488318391767037407e-03,1.947934423925180077e-04,-7.877996090882881039e-04,-3.813615352456693405e-03,1.064727681784236393e-02,1.001233846652122787e-03,6.928984753750674734e-03,2.134108958634865078e-03,9.683150786207926630e-03,9.847221613732111758e-03,-1.840302885950624969e-03,-6.404893160401646870e-03,-2.625042724175017436e-03,-2.939547019623870057e-03,-1.517557686611427977e-03,-7.074697841250934816e-04,-1.055869919440111365e-03,1.783381912987239745e-03,-4.741764877210924434e-04,9.640576264057808265e-03,7.503987974116422231e-03,1.351198049432695203e-03,-3.502207445557907212e-03,-2.585427166751020928e-03,1.278016858738933136e-03,-2.444068836828339891e-03,-3.480924696369141970e-04,-3.127263979093184375e-04,-2.276623812468443406e-03,-3.574115438432929389e-03,9.319617477760156615e-03,6.069347498869543875e-03,-1.157816715327642052e-03,1.614130972759244545e-03,-9.620634050660671274e-03,-6.776179838102583224e-03,3.132993611573676833e-03,3.379929771573883610e-03,1.862432683789586822e-03,-4.721501567597889172e-03,6.236014913605867835e-03,-3.323652194774003856e-04,6.850583683699807856e-04,2.512413616743211422e-03,-8.351674253543833401e-03,-3.589863021765662618e-03,-6.787144546256570522e-03,-6.336017885104845530e-03,-5.940813915993289548e-03,-1.081058528640326151e-03,2.931471376855003964e-03,-7.974712014632356211e-03,3.919493937065240095e-03,2.177565758811822634e-03,-7.079097261477731301e-03 -2.252923680217649560e-03,-3.070490959137514575e-03,-1.097985409193999755e-03,3.796088539330694234e-03,-1.687277816903674744e-03,-8.833099989198969770e-04,1.197541934002027052e-04,5.350086250462012666e-03,-3.311411111870068980e-03,-3.032144022173955886e-03,-7.953443350325183070e-03,4.854033451247636265e-03,3.843828916234271701e-03,-6.450768647268265849e-03,-3.160563303567457544e-03,-4.506959494214855832e-04,-6.264948671557969175e-03,-2.195458267432630253e-03,1.559152579429618065e-03,1.417179113737386227e-03,-3.645025410510152001e-03,-5.544913941286644388e-04,-4.011254148207786009e-03,3.410278012698141244e-03,-3.753141498053133226e-03,-3.097430669974785260e-03,-2.082459765160777666e-03,-6.946359207562808995e-03,-4.768122172540490214e-03,-2.900910993810652512e-04,1.702988364108664449e-04,4.660018781947725466e-03,-2.413533889248925621e-03,1.237936492099651366e-04,-3.562648266276534099e-03,4.157365727497756153e-03,-3.759982802983096828e-04,-1.918569992451713002e-03,8.423030817972116877e-03,-9.745123515947728618e-03,2.846881924896818095e-04,-2.534998520088768172e-03,5.160650353080892781e-03,-3.957731258386019968e-03,3.136448551260883743e-03,6.534470119799605688e-04,5.374676743797267807e-04,4.419644368556702868e-03,-1.511697192317692584e-03,-2.660639785225904063e-03,6.830239290108022776e-03,-3.628333673027637173e-03,-5.798037728117281554e-03,-8.820328459044315256e-03,-9.453640959593319218e-04,-1.629676422276336118e-03,-2.669742325591199089e-03,-4.540305209238205877e-04,6.301011043435983568e-03,-2.904990465463869794e-03,-3.982155894056835753e-03,-2.197981410238369210e-04,-5.905056908389392026e-03,7.339862156586968167e-03,6.275939183514129584e-03,5.126999775462420111e-03,5.844044821374294379e-04,-7.416629701660055031e-04,-1.650501694903978942e-03,-3.379280225544103056e-03,8.601890086553242928e-03,-9.086620411941788086e-03,-9.682890578132282919e-03,4.625992241769990958e-03,-5.786620297078208519e-03,3.301163291612567614e-03,-2.105214360326067811e-03,1.884567139680766489e-04,6.134452302501245714e-03,-3.193936850032805946e-03,1.704128732465967936e-03,-7.988031402023552940e-03,4.400529952382225413e-03,-5.165414912573856009e-03,1.268681784924990952e-02,-1.906805958510335687e-03,-8.696535887288182193e-03,-2.376134691351143233e-03,-4.778235684167666565e-03,-1.713437537818361407e-03,1.118517566188948452e-02,8.036648905313045418e-03,-4.070124471021688284e-03,1.125333595125028140e-02,2.236517750087971051e-04,8.902178478903249317e-05,-4.211997900194937999e-03,-4.819288031491430688e-04,-4.432902311250334925e-04,4.495561410522738548e-03,5.744238556591833855e-03,-4.807897497900017435e-03,-1.927667671459742504e-03,-3.051643929894272411e-03,9.309498100595050771e-03,1.278107644797506518e-03,2.014994569081692347e-03,-2.503989880296058680e-04,3.756703336132275351e-03,5.220648754391627767e-03,-3.231777738042516772e-03,2.804091726408934181e-03,-6.135644620092238835e-03,-2.379332539799496830e-03,-4.628673348123942682e-03,-2.406601074108585120e-03,3.023117801167221185e-03,8.198398994535985654e-04,-5.397102070036717721e-03,-1.187538917529677197e-02,5.337324787293369448e-03,-5.039133559827966430e-03,2.674605803589772020e-03,3.938298673218436309e-04,-5.327716809328738277e-03,-1.038110632849471686e-02,2.008608822973527702e-03,-1.754252940842452972e-03,-1.997244528000089846e-03,4.787873327448719830e-03,-1.051184870857960326e-03,-3.388098401398683475e-04,6.616985180024081287e-03,8.068154057670790143e-04,6.261950922231003004e-03,-1.527282966430300402e-03,-3.471016134435643986e-03,3.777984685003293527e-03,4.910637739946029610e-03,-1.095254225926506602e-03,3.736104098048330380e-03,5.071569794968557726e-03,-8.741381906258364684e-03,-3.827317158382876044e-03,-6.222917904896944387e-03,-4.490391658317486288e-03,-7.297669727718082526e-03,-8.425276780467168963e-03,7.810110351022415444e-03,-6.499007382882658959e-03,6.494554519766662475e-03,-3.535292970039393638e-03,-3.636667458539158578e-03,6.341078061919175508e-04,-6.857358891968506898e-03,1.014609722756432695e-03,-6.087513561387238037e-03,7.572610329975608921e-03,2.719373592446532994e-03,1.658469440989329994e-03,6.971775501259359908e-03,1.073756876786860666e-02,-8.990618655775533402e-03,-3.992426524377738588e-03,1.338359553245232298e-03,2.256953097037654298e-03,-9.034026110160046572e-03,-4.651288737771483287e-04,-1.641646008653943091e-03,6.647728556023626764e-03,-9.112560497213208557e-04,-5.343838099914097022e-03,5.190940280371905170e-03,1.132515992471630007e-03,2.101660532477105364e-03,-2.315670138489206789e-03,-1.196505559438373369e-02,-2.417407544366280674e-03,-1.576568626501444236e-03,-5.308336059161027076e-03,8.627542060902158513e-03,1.832356902816858210e-03,-3.427199757261388383e-04,-1.841302679753506043e-03,-1.774674904787325097e-03,9.393559188049660155e-03,-2.624809708873716930e-05,-1.042258927035078810e-03,2.338234792244443458e-03,6.290756507423867459e-03,-2.707416090333698670e-03,6.201768345927670054e-03,-3.210599252233204120e-03,-1.839988803830937877e-03,-5.406933620586125075e-03,9.497541567926716041e-03,-7.635172788256747167e-03,-3.527005933419392718e-03,9.268197414180104864e-04,5.073353778004624981e-04,-4.769459871873110365e-03,6.245696355982027943e-03,3.411916087925354578e-03,5.539087002227164463e-03,-1.287892532208094552e-03,-9.552746001067997234e-03,4.884136770355189221e-03,-2.362919079004231814e-04,-5.442886952768929802e-03,-2.502056545275411093e-03,9.401440306854871864e-03,-3.169354564030315170e-03,3.569226358106883986e-03,6.616676077644207191e-03,-2.494183000564051053e-03,1.765098389123627518e-03,1.470632468957393897e-03,-6.590463426178966647e-03,-1.184309242043213957e-03,-5.810519111460576112e-03,-4.451524005651711843e-03,1.692393836916241756e-03,6.730330748088081579e-03,-7.661615681559180225e-05,-7.851253112919374250e-03,-2.191347363944339249e-03,-8.957488327265028874e-03,-1.396200325229144399e-03,1.008365086693027886e-02,-6.137451946544096289e-04,6.009565023304186404e-03,4.581620970490767400e-03,2.782067989210524738e-03,-5.466919754234081212e-03,4.410917151110711710e-04,-5.061101121737179506e-03,2.867589652043728288e-03,2.990935988336182547e-03,-5.528670709253286596e-03,2.330870205218057035e-03,3.416855822007521404e-03,-6.104045230826421996e-03,2.400121172653796493e-03,-3.710620130526357061e-03,-2.164620416826845955e-03,2.112927435998460980e-03,-5.668593689820532709e-03,7.673830905165681532e-03,6.550124546148035484e-04,4.087277055050976202e-03,7.678355366184712533e-04,3.610008916971296691e-03,-1.269298749524678301e-03,-4.495704695393824661e-03,5.029304469658122355e-03,4.788903252628273072e-03,-2.955007198011899250e-04,-4.174128179357872344e-03,1.727481709841376112e-03,3.456428792546174193e-03,-4.537210341527621901e-04,-1.195857374458552313e-03,3.269028891076872657e-03,-3.270464341265363027e-03,-4.302295617905086615e-04,5.498054886987920227e-03,2.647253157896020930e-03,-5.647515190800186068e-04,-1.065227574531976652e-03,2.260736210488666596e-03,-8.709238384264356017e-04,4.403166789166510810e-03,-1.747228300625822822e-03,1.176011862644821650e-04,-2.264618749050392385e-03,1.794702628899150004e-03,-3.939975011620752178e-03,8.092195441570101608e-03,4.230828872419290301e-03,-3.257819273592311906e-03,5.775960111324157480e-03,1.766103257227534832e-03,-9.678135982424325738e-04,-3.250174634910312898e-03,-3.971471079117185748e-03,-2.416610150559944312e-03,-2.107024223722408530e-03,4.651642913965576935e-03,8.222778762859109247e-03,9.556746704618446784e-04,7.925875285109407198e-03,-4.514803964532995587e-05,-5.071164623577761009e-03,-8.989288780006231706e-04,1.780267773449184564e-03,4.427481734696097834e-03,4.606744421508558382e-03,-4.995624695551063518e-03,1.141976064883682557e-02,-1.268100939359122692e-03,1.957198755476707505e-03,-7.555006816230842859e-03,-9.465803840924953000e-03,1.228265284562900185e-04,7.138887517635136154e-03,-6.861771231051882070e-04,-3.309411342772305281e-03,3.559620287061832089e-03,-4.044142524047902865e-03,3.604121501058259180e-03,-2.396627268049567917e-03,6.484796469538715337e-03,-6.968378243495034364e-04,-8.605918570952036245e-03,4.788730849717219053e-03,6.176469515648778420e-03,1.144361977365527389e-02,1.469461607800070287e-03,4.675639165678668364e-03,2.707012508742606596e-03,-1.447082294288146729e-03,-1.654489199892472890e-03,-1.269411919075616978e-02,4.304193805603064721e-03,1.190532153816251326e-04,1.637902210732171187e-03,7.514436215400625493e-03,7.665960219456561739e-03,-9.368635555755821728e-03,-6.282291291205352977e-03,-2.324052626637887042e-03,1.868655728167439856e-03,-1.716222781305191654e-03,2.721003903042645486e-03,-3.001487713751199368e-03,6.178739254930207085e-03,5.233467098582741039e-04,1.666968347708372737e-03,-5.587724689728342659e-03,5.043881275493675674e-03,6.673735389865180154e-03,-2.137292223883673157e-03,4.470814801318211801e-04,-2.373748560606929112e-03,1.564718231586306437e-03,3.487922652173428614e-03,9.574933769281278317e-03,-8.677482435329271146e-03,-1.003038968401594308e-03,3.866096744688141303e-03,-8.503634110207790309e-03,1.749329617570103605e-04,3.565637988458323412e-03,7.928329286846084664e-03,6.875351287781455815e-03,-5.213411073430350602e-03,1.683806312943057467e-03,6.668279517762816175e-03,-8.848058952090560913e-04,2.231164360475052042e-03,6.008202335635048647e-04,-1.920963089466014382e-03,-2.658956339594942958e-03,2.548292748535901410e-03,-6.969345666396904726e-03,-1.166412004123167437e-03,-2.611607750515809821e-03,1.030888322788988233e-03,-4.751536583470462806e-04,4.725588502447087701e-04,-2.814383516454876423e-03,-6.655373836872129772e-03,4.515845388240762558e-03,-6.094952460718015439e-04,-6.744980578741723194e-03,1.461705219724548661e-03,-5.708102251557953125e-04,6.545867709882485612e-03,4.349894729144008622e-06,3.916759733859493699e-03,-1.157259498398411626e-03,1.934815279660022239e-03,-9.630913048945221851e-04,-1.876811840970970527e-03,5.296810033558515228e-03,-5.327055636703813615e-03,-8.016224883750069832e-04,-2.928119599147037400e-03,4.650899937482870852e-03,7.349672972826268129e-03,-1.018528575135163594e-02,1.872495603276717272e-03,-1.185128110917377199e-03,-2.549255280159898589e-03,-1.919706776734072650e-04,2.864840809877301027e-05,-8.086201812777270292e-03,-8.584930348171499609e-03,2.693223844770305589e-03,-6.287290423443468723e-03 -5.329188632523573776e-03,-7.401434354830357942e-03,-5.416555551663466408e-04,7.049244955938437432e-03,5.192933086859637809e-03,3.790584432703569210e-03,5.325120342689094143e-03,9.242723348249365107e-04,-4.390471832924150588e-05,-3.678058929532514003e-03,5.066734062602206226e-03,-9.412074256134430380e-04,-7.397452755811061847e-03,9.704114777281628180e-04,-9.418863421624866505e-03,-6.018536061138815910e-04,5.623085507919020967e-03,-2.184874215870745168e-03,1.549276006661922354e-03,-5.500487509129216301e-03,9.422116046478384854e-03,-5.964571196856826088e-03,1.293423623039625936e-02,6.513663449312125965e-03,-1.451359954323580819e-03,-5.383574083323813039e-04,-2.574043676519610115e-03,-4.605458834145498108e-03,-4.800026834239025722e-03,1.519924117262850660e-03,5.850627988283263990e-03,-9.387148058039652618e-03,-4.854612917193555817e-03,-1.189578345418409100e-03,6.147307229598140271e-03,-5.685294568306325852e-04,-3.169194629727109611e-03,9.875890301308519817e-03,-3.680502793576848758e-03,-5.219230102102914715e-04,-4.133195109758827993e-03,-9.807892702300707519e-03,1.235241515730672998e-03,3.811241428757228874e-03,2.791474258285216083e-03,2.482607966727915774e-04,1.926724844246385467e-04,-2.580125909147559706e-03,5.595730970682195361e-03,-4.529759065582252692e-03,-4.685571165647809191e-03,-4.398560260330760564e-03,-1.444648065796786188e-03,1.927585179735325253e-03,-4.714037249177089467e-03,3.159851210953693295e-03,-2.546464304888071840e-03,-4.543162963060843641e-03,2.144292761572789106e-03,1.504587801808103574e-03,1.288210340544232923e-03,-2.703064007530455999e-04,-8.440828434638369829e-04,2.910603132156118402e-03,4.704151438389827210e-03,1.513509155182592592e-04,3.759987854143026064e-03,1.372123128269752074e-03,-7.491644819067118377e-03,-2.947578771660721610e-03,1.035432489494395730e-03,2.229971855213410802e-03,4.026128126131419890e-03,-9.277652045517977356e-03,9.112295953783095329e-04,-7.121621512089360354e-03,-5.994164913319483153e-03,-4.547672057496011423e-03,1.282237050222703907e-02,-6.253652617538264617e-03,1.614973542267544903e-03,7.574972366806513244e-03,6.211593931826657131e-04,-1.121984347986776896e-03,-2.737380971212255345e-03,1.580759392777436709e-04,8.253583535285989679e-04,5.537301454132070172e-03,3.863992128338659699e-03,-1.346675796818123071e-03,2.527470811126745717e-04,5.241113763420194617e-03,1.796729183542776845e-04,3.547692789497321111e-03,-8.664106694517172483e-03,-4.461797537949874735e-03,-4.278866107949713497e-03,-2.769358426411949129e-03,-5.560229251179370571e-03,-3.783922574609357384e-03,-6.793118163011107984e-03,1.570778143372938626e-03,-1.573288713480078781e-03,1.002020270708556607e-02,4.295333791447913420e-03,-1.006931700884482585e-03,1.218565424561550859e-03,3.931485800867030866e-03,3.890468567299298040e-03,-5.881880714938046185e-03,3.739249654869073944e-03,8.667647023023411848e-03,-5.482163694276918041e-03,-1.757234528073332120e-03,-2.718769074745739441e-04,7.943408855131093432e-03,-6.026120389537072131e-03,-5.157905818432467586e-03,9.788120079334482161e-05,-2.351895661413852377e-05,-3.923998681214462166e-03,-1.484350272014222510e-03,-5.953131710964225387e-03,-3.279931151100968362e-03,5.300393993014266766e-03,6.019528088327673852e-03,3.046753640580779528e-03,4.812921855413838782e-04,4.491646964867646898e-03,6.237639745483021352e-03,-1.978035861297866745e-03,-6.043410600198718778e-03,2.356795538166297187e-03,-3.036776048608815903e-03,-8.221127553866747553e-03,1.920207323314058415e-03,-7.054256208678183308e-03,-2.634792228512587275e-03,2.884685894380383236e-03,4.820321006873596117e-03,-5.348912447032831696e-03,3.164433679430763303e-03,1.662715959356119658e-03,3.449619699137022270e-03,1.589206536644990940e-03,-1.266027459297382977e-03,1.241705138090296858e-03,6.961152692177293745e-03,5.306846634550934846e-03,6.431417552339553471e-03,3.705074272829525072e-03,-6.094255043780970847e-03,-2.398038305829272728e-04,-2.191576230707305891e-03,-6.703283285974013551e-03,-4.834592952318088506e-03,-1.181207078462801451e-03,-2.319583687019754082e-03,1.682536156180617585e-03,-9.873510564953750721e-03,4.750361826835514806e-03,2.745058540140246205e-03,5.528181091796661485e-03,2.468329434975098050e-03,-6.187047539376146817e-05,-1.380928960331134292e-03,4.934805989078121062e-03,-6.298436681661854055e-04,-1.912850741664752626e-03,4.168592987594525083e-04,2.397447622607300384e-03,1.046016018493454644e-02,2.336881196909764390e-03,-1.056519354310666648e-03,-2.960000682386209562e-03,1.270745326567906258e-02,1.762277651056172543e-03,1.010444641400989711e-02,-6.158787923828616673e-03,2.604097754314029737e-03,-9.617279742385830390e-04,-2.827829445155630151e-04,-2.955555247965682177e-03,-7.607930026762612687e-03,6.213632995417253148e-03,6.163168874578379454e-03,5.446110577812623803e-04,4.506257263244932421e-04,3.107578727100405113e-04,-1.694774255423743862e-04,-1.782988525899934602e-03,-4.554929209912773891e-03,-1.245903567815659862e-04,-5.014521164542500367e-03,9.978663552470607486e-04,-8.469632998876749919e-03,9.995547001769107889e-03,7.126333078112102556e-05,-4.709072070774951782e-03,-2.366622770994150551e-03,-1.864270811599541599e-03,7.217301381584198885e-03,-6.270237192647194757e-03,1.595016250410514688e-03,-2.967618746106058438e-03,3.445677561295973964e-03,-4.060017761176199294e-03,-1.051304948172065903e-03,4.051482876846867164e-03,1.693573562610088646e-04,-7.798264155405578051e-06,1.801376572003845046e-03,-2.194530255270157534e-03,1.302608274360354552e-03,-1.036046018298447929e-03,-1.428751471197056873e-02,-5.914997331980815631e-04,1.267324645791792015e-03,2.851489779844685376e-03,4.619484206763702604e-03,2.033800351300886489e-03,-1.748194570830028868e-03,-1.742856063826172847e-04,1.570323688870593152e-03,-9.695042081685135288e-03,1.022012346634134684e-02,-1.047589792479794706e-03,-2.934233038069080507e-03,-6.650874021279730344e-04,-8.395755116141916802e-03,-3.016401506024511507e-03,-2.805964468432516935e-03,-4.029710513418887849e-03,2.604337949546440146e-03,-2.039959228621837217e-03,5.264152799403600848e-04,-5.612509063040511080e-03,3.450477460267111721e-03,5.210341866861086663e-03,3.323508915874625030e-03,3.959105053649522197e-04,-4.937472657926954278e-03,-1.123889396366435124e-02,2.106042249256496329e-03,2.616469123191462413e-03,-2.703265716403965499e-03,-5.735524860949357470e-03,1.272607753333136792e-03,4.537067549550625155e-03,9.269302516704045316e-04,1.355127980304986213e-02,4.579903523458009329e-03,-1.313185397042910575e-02,1.876809438880155912e-04,-1.525832597384103092e-03,-4.742668169831080853e-03,-3.776943926803061000e-03,-8.356462166802283405e-04,-5.264338084470426664e-03,-4.778158566815133255e-03,-5.758106584531427416e-03,-1.289863633397119597e-02,-2.434003815208276005e-03,-7.945787485030351982e-03,-1.450215178135910569e-04,3.173501988901733951e-03,-7.444463517128581398e-03,3.670159911155108055e-03,-1.412136894547534171e-03,-1.434492399139504753e-03,8.778821588205365511e-04,1.709664079998367585e-03,-5.944042731155139980e-03,-1.885656322664759155e-03,5.333968511221106809e-03,1.795573185294160618e-03,1.661016885429474445e-04,7.456654371867560242e-03,-4.876035583894464814e-03,-1.878687684483104072e-03,3.669109059866054214e-03,2.236025048830548537e-03,-8.149930708619813544e-03,-1.296712595749595701e-03,2.344716630128810837e-03,4.598691905809841433e-03,-1.377370270489132464e-02,-8.698303595937489571e-03,-2.521944671583158364e-03,1.551885270665512599e-03,1.292891584015269093e-03,5.735929130517542591e-04,-1.069699232052071696e-03,-1.348545557858186117e-03,-1.228333009863040795e-03,7.314565359052783468e-03,6.296006609743157699e-03,-8.865358987890993234e-03,4.359680990339836505e-03,-3.687413934048085901e-03,1.609898186245553933e-03,-5.592794295183297834e-03,9.200889354119801480e-03,1.884179585724900371e-03,7.700255986619977157e-03,-3.358910677630732558e-05,3.727794396654913328e-03,-1.262764659150481797e-03,4.060086885985488177e-03,3.967594062997441886e-05,-2.804136463641443856e-03,1.200265403147090203e-03,3.683433046871668719e-03,6.597986681172145727e-03,3.800801895350987714e-03,2.198370786546706944e-03,3.009225880719162017e-03,-5.918402719500427678e-03,2.929698049108824757e-03,-3.946573445428971580e-03,1.779294710057686101e-03,-1.205727052071007633e-03,-2.132825949943686298e-03,-1.722441738762968237e-03,-1.768230143851920802e-03,-4.023863843284336377e-03,4.288824716230361586e-03,-2.334585008533574153e-03,3.603170078671434975e-03,-6.297423448826229564e-03,-4.453607303690299522e-03,-8.931721050441973619e-03,2.156152474738704233e-03,8.841303469207539151e-03,-6.293709715033574646e-03,-8.474802113474197733e-04,1.818980548311895876e-03,-9.700546252722427182e-03,-2.025542482561123840e-03,6.692134880885495379e-03,-9.803273677881627936e-04,2.814181975596430265e-03,1.152034541727763040e-03,9.370546092265979793e-04,3.402739844730784998e-03,4.479899279812534982e-03,-5.536564873447342994e-03,-4.406222796562669561e-03,6.567228140526491960e-03,7.973149568931388667e-03,-6.845061082368289864e-04,6.117175648010828894e-03,2.960808118525892329e-03,-8.245577283994519335e-05,6.761692008242817589e-03,5.122859425040882271e-03,-5.854103830578921003e-03,4.004580065916987786e-03,-2.035882240633675455e-03,-9.614029778657363912e-03,-7.548763808067864359e-03,-1.603569891161946105e-03,-1.241819643627678623e-03,-9.613393491463945520e-03,-4.530090179112059891e-03,-4.073157274071239546e-04,5.608442852588787705e-03,3.292175729936836718e-03,-2.328672737990473800e-03,-1.006095493030665809e-03,2.478985388708478555e-03,-8.970039375741138687e-03,-4.679993438327146628e-03,1.119865405516943022e-04,7.954431918642257404e-03,-4.711319775753896287e-03,-6.189869216954468033e-03,-4.502506147389067725e-04,-6.273743881189590942e-03,-1.077968967868061026e-02,5.155372998557389199e-04,-7.810325372067104029e-04,5.432773312372724157e-03,5.918896044952329932e-03,-1.798857827415405782e-03,-2.774404730773474131e-03,1.096623170443649991e-04,-7.944904008084537408e-03,-2.932603737880372027e-03,-8.647095485223221453e-03,-1.017314112564644520e-03,5.238559070318436241e-03,1.566945614707536058e-02,-3.572755521957146716e-03,-1.013966566355771785e-03,-3.718698227417204308e-03,4.881383202889001596e-04,1.158466024441274882e-03,-1.920642464436013268e-03,-2.985140851913207789e-03 -4.741006907016926709e-03,1.311064629180742902e-03,-2.801693510342351777e-03,1.367525019230693070e-03,6.196393704386860164e-03,-4.874377998117108111e-03,-5.233438511618030906e-03,3.690284000808022324e-03,1.544766968144195606e-03,8.622488221586098846e-04,-6.728972660510063909e-03,-3.777290151206349079e-03,3.237619377286832158e-04,-2.201634519762089761e-03,-2.832606141753768545e-03,1.940399477791023529e-03,-1.824330801006003263e-04,5.764773634295978534e-03,1.057108583290053937e-02,-5.892969597399792657e-05,4.809011159878049356e-03,-2.819494121382853935e-03,3.756871940530041604e-03,-3.726059693302299566e-03,-2.461716011245819891e-03,1.772080127647958706e-03,1.808589858690630848e-04,1.229472382441929154e-03,-7.855521459256029765e-03,2.455371447320422052e-03,1.180496831974272090e-02,-6.632762379533275847e-03,3.750419775224246403e-03,-3.143438230488558929e-03,3.438397270101751883e-03,3.646426036996496933e-03,-1.170101201790189156e-03,2.646264703290397008e-03,1.794710895144521149e-03,3.522831292581467006e-03,-4.190942082588760068e-03,-6.959943748290473541e-03,-8.816990358331307362e-04,5.316312240605919857e-04,2.006211223693370616e-03,3.011639656725783112e-03,4.739201346111527528e-05,-4.161752741898202927e-03,-4.759914489859931228e-04,-9.798612300173302828e-03,5.850258870823279479e-03,-6.665040474074047927e-03,3.775172568537210832e-04,-5.886375427905484381e-04,-2.441320275082460578e-03,-5.208623932644668641e-04,-5.788649069811112498e-03,-6.682224023243768518e-03,-1.286295907071741303e-02,2.958728994856898997e-03,-1.474019524486910090e-03,-1.015839642808452452e-03,2.310294283650101300e-03,-3.351420696218832614e-04,3.016229702413823766e-03,-8.348209897268945440e-03,2.957260479417500782e-04,2.668448906995999487e-03,-8.159099353549349037e-03,1.932866599818221882e-03,-2.179418338360968824e-03,2.708212299050576609e-03,3.545642814021169806e-03,7.429101194065931314e-03,1.158888404518209139e-03,2.017781032322331185e-03,2.731060874613179187e-03,5.067122397102008687e-03,1.387532312690085111e-03,-7.625793497240056923e-03,-2.070624354597463881e-03,1.297451086462656292e-03,-8.062395209039679642e-03,2.904120696327643276e-04,4.652480227614010284e-04,2.025173018132764643e-06,8.575255547297414363e-03,-8.801964291635726648e-03,-6.715990720475703210e-03,-2.813548000800739600e-03,1.577902218052209052e-02,4.490271231114508930e-04,5.197214275655800617e-03,8.467004149505800298e-03,4.751324193294292376e-03,1.479425681119980003e-03,5.659396336467998993e-03,-1.884834795600818895e-03,-1.731857371860326660e-03,-7.045758805429715166e-03,4.408616768945968281e-03,1.925036564231913123e-03,-8.171957244967956169e-03,-3.013867282261021782e-03,-2.970024724907691094e-03,7.998386213372421852e-03,5.688258782085310432e-03,-4.650424958843872580e-03,7.828762401037289709e-03,5.299412634155144936e-03,1.161754179132703981e-03,5.716746329642170311e-03,5.680451214827028986e-03,-2.491791379726871294e-04,-5.125321613214910664e-03,-2.985198082438481060e-03,-7.111025470747174733e-03,-2.080314989195147443e-03,2.478611030653963231e-03,-1.095095968457077693e-03,4.007958505790573320e-03,7.571541030515297963e-03,6.121947100900430351e-03,-3.899262563129180675e-04,3.106999861574686207e-04,-4.302134403255898285e-03,-9.038901921937443932e-04,-1.718161035505605134e-03,2.013421627666023804e-03,3.519585570618018102e-03,3.124414893858255777e-05,-1.328380783575360512e-03,-3.191812325792667184e-03,-5.416391409021103880e-03,6.796051448539550122e-04,2.084652043706493058e-03,-2.238587036447243009e-03,3.896198647672256127e-03,2.584595439869427202e-03,3.095166957208967144e-04,2.149678781180717688e-03,6.279964943356798786e-03,1.292860840442094884e-03,4.068805067831048061e-03,-8.021553707018572871e-03,2.741495579120631267e-03,-8.116208708272486206e-04,3.383535022849526105e-03,3.364280377927754721e-04,3.302475493744941228e-03,6.299215057603497862e-03,1.638055469629215530e-03,-5.791910092548614546e-03,-4.166359834029478848e-04,3.067386239979631724e-03,5.894938357925322042e-05,-5.302137294736112639e-04,1.683418354244329293e-03,-8.323519641485020845e-04,-1.091011504602754133e-03,-4.320564673016388975e-03,-6.327719468020357590e-03,1.602660888181019292e-03,2.646709511672700431e-03,4.257972387572028674e-04,-1.697200149879574517e-03,-4.449237058128073519e-03,1.968141379814426512e-03,3.745503147295452164e-03,2.293250458260902031e-03,2.794904577962854623e-03,6.426951898211272912e-03,2.277106494501814592e-03,-1.720157896177138511e-03,1.551415345602827388e-03,4.707323149623856944e-03,2.030960869576133813e-03,1.389050411067063815e-03,-5.211762600591900857e-03,-8.923910800203417593e-03,-6.254662253626313119e-04,-1.892487610132100376e-04,-1.899849081113242591e-04,-2.383806501621009086e-03,-8.057936943441148039e-03,1.317011709916356696e-03,-9.522408316547907245e-03,9.792761382471800412e-04,6.217126562353313341e-04,-4.241518880835840576e-03,-3.024901080359699776e-03,-9.459400333251806112e-03,2.164649270119509920e-03,-2.279889384420921152e-04,-3.476127655047571170e-03,-4.111684467053583632e-04,-9.719429277276078966e-04,2.045304522479804606e-03,1.505828671217605483e-02,-1.438210522561058149e-02,2.177338435486957709e-03,-5.347532463812102767e-03,-7.664259120085676301e-03,5.252462257602173376e-03,8.511248095113718021e-03,-5.284712974641377591e-03,2.841919331014111129e-03,-7.220903053479335414e-03,-5.715472259372288274e-04,1.176251740337792763e-05,-9.239914877766416956e-03,4.508257036971701631e-03,-2.980639854925581055e-03,-8.167714531779527928e-03,9.733128762368070327e-03,3.827727528858585024e-04,-1.925895431685245714e-04,4.750203197771787379e-04,1.159793810580895266e-03,8.197881863533572301e-04,1.163947649336283447e-04,5.784717192656643227e-03,1.999706424621209254e-03,-8.362230524488777073e-03,-8.217165357863389112e-04,-2.580573641724297901e-03,4.146349732087388669e-03,3.479156206557977495e-03,-1.244612245993572709e-03,-9.186149270807957012e-03,2.785870184357120034e-03,9.507572214383003045e-04,-3.134942441994095341e-03,-1.508959834936365310e-03,8.730789519556628490e-04,1.828605447845575152e-03,-1.047210337971854650e-02,3.749376416155349341e-03,-5.747046599261848056e-03,-7.018039715356057381e-04,2.061275623558463544e-03,5.701941415857592586e-03,8.610873594211228624e-03,7.501345974109090023e-04,1.569737230174911226e-04,-1.143095206178485791e-02,-2.175805590274558672e-03,2.687088153514395009e-03,-1.131870286670353352e-03,1.680343041585182504e-03,-2.235641071324194614e-04,2.619662272926061557e-04,1.874241327821757688e-02,1.624107884657519617e-03,2.604537491452933989e-03,3.718179430030334968e-03,-1.060823923516497234e-03,-1.888713072264000753e-03,-8.606843079761328924e-03,1.417447425857354870e-03,-1.220262736253659988e-03,-9.145237853780992373e-04,-9.674827336636047589e-03,3.072961558952452821e-03,-5.921582267031204327e-03,4.167874553982640375e-03,6.019747980538099272e-03,-5.356968604399764758e-03,3.239791013201535896e-03,-6.986983830475078892e-03,1.759583605190879292e-03,-7.052951589062703142e-03,5.665526944035287164e-03,5.804658605909534218e-03,-3.796359677222042785e-03,-5.181296850399031741e-03,3.254049741539554765e-04,1.963747569723869272e-03,6.339100825222812249e-03,3.820118460187820093e-03,-9.639416858739220618e-04,-2.683269954838087782e-03,5.926422009217266256e-04,5.637121657367682136e-03,-9.023301165185910154e-04,4.856055285056384493e-03,-3.193190432453755796e-03,-4.423700071088708732e-03,9.403272542045314729e-04,-4.609120253546142819e-03,1.309501679728252007e-03,4.766170406362057847e-03,7.616600665540555239e-03,-2.540721432311853108e-03,4.405602676310784935e-03,5.022439832992251201e-04,-7.205402835351351967e-03,7.170419434876025740e-04,6.094166102695098772e-03,2.951760880099291075e-03,8.019430620450008346e-03,4.664327339988946948e-03,-1.144105539951292307e-03,2.837787599112168765e-03,-3.384176605701895782e-03,-3.933153869736452125e-03,-8.620303713737049664e-03,-5.424233985610723771e-03,5.627348273679752479e-05,-9.604999171360624824e-03,-6.811174512983226778e-03,-1.937618964348173474e-03,-1.079468745663048762e-02,3.088819911158452100e-03,-6.220066503158395814e-03,-7.504726793639925238e-03,-4.127237456233058890e-03,-2.723234809484601469e-03,5.574697703096117771e-03,2.466159192022627719e-03,4.225731561485098799e-03,-8.695706946710842628e-03,4.356462526757126814e-03,4.009804300967176154e-03,-1.532875286366369315e-04,-3.353746434428948875e-03,4.348260952112371208e-03,-7.706106175369001332e-03,-1.165412480634584781e-02,3.672079632482280059e-03,1.021994050200735957e-03,2.865305281282615971e-03,-8.766103851148881326e-03,-1.149129723572146571e-04,-1.339671996941934565e-03,9.528084857529301940e-03,-4.500732733087810418e-03,-6.678507453837415758e-03,4.071282870333418034e-03,5.550656244542975445e-03,-2.806598494356477621e-03,-3.526907734652977473e-03,-9.924616514905932815e-04,-7.701526383306035554e-03,-4.283882560474861975e-04,2.083903577248419868e-03,-6.586441889502914204e-03,4.348121741442997207e-03,-4.308204799329452177e-04,-4.596357564113639181e-03,3.455620182157089718e-03,-3.748295376404981415e-03,5.074729252092286327e-03,2.416144713299586775e-03,-4.179268434560088632e-04,-2.557164532449619234e-04,-6.179696502060608512e-03,1.377778592047419932e-02,-3.928632907201586033e-03,8.476339852029690994e-03,-5.603003454308366597e-03,7.453355464104020889e-03,-2.663001974890833335e-03,9.278799037738011835e-04,-3.140088538273305783e-03,-1.865392644884306334e-03,-4.446768835389659606e-03,-2.387657751604773705e-03,-7.974923172413987682e-03,-2.288172541466803028e-03,5.259781622647027302e-04,-3.021557121745359255e-03,-3.805372106663840095e-03,3.889973713832059997e-03,-1.071496487517441080e-02,-2.980365944810519762e-03,-2.614268250526013450e-03,-1.896968926036119709e-03,-3.145740990582607537e-04,2.487502071582704984e-03,-5.350669901403849738e-03,-2.164999554367594715e-03,-7.178075473784196678e-03,-1.780794047001501255e-03,-1.173606292981626805e-03,1.038842927020273456e-02,-1.654490352174436529e-03,-2.328804020607745402e-03,9.549281262325264551e-04,-6.361114963614846948e-03,-8.624784036092278394e-03,5.499285022744610472e-03,6.083376527928322300e-03,1.406360232233463741e-03,-9.770804768117594824e-03,1.415123610508608196e-03,7.717414472081953955e-03,-5.747733878003031920e-03,-6.900542193105827296e-03,-1.001851899950372687e-03 mlpack-2.2.5/src/mlpack/tests/data/sensing_X.csv000066400000000000000000000234201315013601400215450ustar00rootroot000000000000007.018571107611352167e-01,5.317544186429903341e-01,4.018666410095843577e-01,2.390697680774649669e-01,6.676854435084295103e-01,6.719358190946538301e-01,7.346200380109945138e-01,4.242923576151336751e-01,4.320474294164593498e-01,4.773621295882205517e-01,1.620299839317403956e-01,3.707633014697527263e-01,6.228955777389471038e-01,8.845770910205317250e-01,4.843209709655287298e-01,7.263328406339173426e-01,6.905543827120925737e-01,6.642151815383887659e-01,2.220843513136977199e-01,8.598004453627916543e-01 1.695685336551006284e+00,9.465037806990547198e-01,5.915187650281323872e-01,5.159277940188081724e-01,1.085081614253922577e+00,1.107176885254656185e+00,1.081233246531234649e+00,5.772633091383185810e-01,1.224468114017021714e+00,1.573346174392824093e+00,3.160920234453725719e-01,9.964959906970178860e-01,1.313852389888940264e+00,1.662265513771665315e+00,1.130238273047413422e+00,1.138448429027818953e+00,1.096091686956324196e+00,1.130424737384860823e+00,3.095962828770020270e-01,1.452312325435639151e+00 1.160108640906202915e+00,5.275768700476237694e-01,8.442787282486260425e-01,3.806242663257826675e-01,1.095167468774427944e+00,1.319640909521315297e+00,9.032633875004610902e-01,9.451634936890339622e-01,8.718852187017718247e-01,8.637665155706153453e-01,3.469711973147199258e-01,9.419451347980668521e-01,8.166353478399739352e-01,1.602089681189851511e+00,1.013274706952766335e+00,1.004192174469563437e+00,1.478664187393482043e+00,9.430598991617573645e-01,4.253047401398331595e-01,1.229017062834806939e+00 9.151292196078673058e-01,4.450674192611857038e-01,3.524699873026367891e-01,2.756779999395441161e-01,5.835960294881290533e-01,6.360468873099185405e-01,5.250133291387188983e-01,3.566123691880087043e-01,6.903610784162339531e-01,8.633679893254795346e-01,1.852759967703723476e-01,5.991932680560264624e-01,6.692270357265919234e-01,9.227889701982663340e-01,6.495746371426793786e-01,5.773119045323484855e-01,6.539156307107213406e-01,5.808368838271429357e-01,1.777480302667651946e-01,7.474766638427596810e-01 8.670664387263647965e-01,5.309507288093259225e-01,2.312275200935980934e-01,2.616872646586015372e-01,5.115975951132261113e-01,4.769730384968288162e-01,5.607964361613793081e-01,2.043232502174675558e-01,6.074869922835745006e-01,8.242328486344763228e-01,1.381132687500017386e-01,4.471116205207295025e-01,7.014762161309888722e-01,7.792380446536759653e-01,5.286138520265033502e-01,5.719050149860940291e-01,4.388391903021773288e-01,5.709605660349048595e-01,1.263709417976090488e-01,7.307520757940969558e-01 1.268244058503915861e+00,1.105451980375919074e+00,5.534734174564098774e-01,4.294708619136933248e-01,1.117210342213533103e+00,1.000178365078553844e+00,1.385070266571316377e+00,5.445651611424530136e-01,7.209306754847445431e-01,8.939492935924415740e-01,2.331741593723202499e-01,4.959919818694145666e-01,1.215673734766394620e+00,1.432055015721426017e+00,7.439706510347348090e-01,1.317022895779672353e+00,9.490190994053093743e-01,1.205894721309130668e+00,3.268513506665369217e-01,1.554205202394081997e+00 7.916586037511946783e-01,4.608847074858395954e-01,5.339465814957851109e-01,2.647299377622928107e-01,7.586987149987332035e-01,8.528484089555199432e-01,7.124353067858164579e-01,5.867006362855200496e-01,5.491958063891864139e-01,5.620046948874970472e-01,2.166545497455619751e-01,5.520182558352033642e-01,6.181661911356868266e-01,1.063556444098505471e+00,6.345911165058197856e-01,7.489718886654139762e-01,9.276326567649324506e-01,6.935769338577862930e-01,2.783751289295686160e-01,9.013998509883752730e-01 1.410634977100313492e+00,9.980853874777946277e-01,6.569023776422310279e-01,4.614434716571473860e-01,1.164342562036468243e+00,1.149799788712510207e+00,1.280540020482989538e+00,6.708698985256924541e-01,9.098257516473147177e-01,1.092244604851047241e+00,2.902958149763540385e-01,7.382699268028587447e-01,1.213814306238300222e+00,1.599767986509796458e+00,9.334242909217012052e-01,1.275738547221290320e+00,1.152274274555701528e+00,1.193779738858731898e+00,3.623141100493105227e-01,1.540557174016160769e+00 1.602066959769349985e+00,1.084401550896807009e+00,8.461844436040875106e-01,5.284975260880165893e-01,1.391686460210427301e+00,1.432360827583532448e+00,1.464306228614305816e+00,8.885850754706495769e-01,1.051445930789926875e+00,1.203701602111872271e+00,3.613149511808345649e-01,9.137801391512124782e-01,1.346871117963855724e+00,1.919119876160554128e+00,1.123025315299212679e+00,1.479370407499707163e+00,1.477452466153587274e+00,1.378254304852041923e+00,4.580078913451893774e-01,1.782431739063479625e+00 8.826603373435565336e-01,2.971033749612658204e-01,4.128202359607265337e-01,2.608713970566244322e-01,5.645387330735576192e-01,6.987604535853095822e-01,3.930607509952001033e-01,4.420281634966737316e-01,7.249393602007283555e-01,8.576117332278510030e-01,2.098841637747239086e-01,7.037402977545710314e-01,5.652458848039312311e-01,9.483609713108460681e-01,7.088246420722235097e-01,4.869302438621870843e-01,7.659060890678366373e-01,5.050089297639762664e-01,1.959709107462509203e-01,6.527604628709311463e-01 2.065794835717876587e+00,1.232123949575444799e+00,1.093983894781149768e+00,6.675033122913496264e-01,1.713126909161035938e+00,1.839010925316980627e+00,1.682397321518632527e+00,1.159505973268866175e+00,1.434680695021070207e+00,1.639278585165574897e+00,4.807695801491689291e-01,1.301754770959977048e+00,1.638094643107057102e+00,2.450066788583369792e+00,1.512277311363142474e+00,1.751102817604832218e+00,1.931082107277404480e+00,1.655557071301588357e+00,5.759866539365747773e-01,2.141997067821839185e+00 9.391853211224697739e-01,7.849948494634020335e-01,5.474000131203753394e-01,3.270248225947309795e-01,9.396917785377717491e-01,9.168253324092114909e-01,1.081740501042554481e+00,5.751786488939570896e-01,5.427107910882017494e-01,5.933891157796316040e-01,2.132402686510043444e-01,4.458336222650663982e-01,8.768380170642441351e-01,1.206576869069057256e+00,6.245560602700412911e-01,1.049526288977798316e+00,9.302905657428195418e-01,9.489928292047586922e-01,3.092174606730173081e-01,1.228326201821780872e+00 2.331938005737645425e-01,1.358308462996479227e-01,1.882667270578899654e-01,8.065852381694049722e-02,2.526688916405231411e-01,2.914426708747606676e-01,2.320495671472929955e-01,2.110825232467308521e-01,1.601247235180169037e-01,1.459916616252300847e-01,7.230955223322613201e-02,1.746650578814866184e-01,1.813507871106633529e-01,3.474731439179533088e-01,2.006167165982971068e-01,2.446465912607785342e-01,3.235216602826047061e-01,2.225919675999596947e-01,9.787210904701262870e-02,2.902555322832652829e-01 1.123552459589906238e+00,6.009160169180032263e-01,8.736151157749837415e-01,3.811534942286039507e-01,1.158806978944881383e+00,1.356990602124725998e+00,1.027214364995289175e+00,9.789565315752781771e-01,7.987046440531897495e-01,7.531476466786863888e-01,3.437529173101439017e-01,8.678870048277049776e-01,8.428571689225161645e-01,1.628312718114995894e+00,9.720441760381290930e-01,1.101724501950499757e+00,1.511308085036697557e+00,1.013038967808250534e+00,4.492433866937720066e-01,1.320721327963630731e+00 1.683239513793602260e+00,9.442999834262661984e-01,6.718659637375055116e-01,5.198517184747398767e-01,1.159184187252225051e+00,1.209310968560051380e+00,1.139641372483543025e+00,6.773241633874236278e-01,1.208811755887976513e+00,1.505916911510352563e+00,3.365938231476593989e-01,1.018795843394570744e+00,1.304889231370121472e+00,1.744265894036790998e+00,1.157635311709251713e+00,1.200106597252657892e+00,1.224872899900464773e+00,1.175492484436202023e+00,3.511471640816856143e-01,1.513604014073075543e+00 1.252198989290581155e+00,5.111202442875580898e-01,7.191640553550810999e-01,3.892743653432753637e-01,9.718416669936192909e-01,1.170903260585461014e+00,7.649521981266790283e-01,7.864831499463423858e-01,9.788557763488248131e-01,1.084583082063270032e+00,3.267703660591596559e-01,9.845641961916590246e-01,8.516972551713674067e-01,1.507615100897600868e+00,1.030746942266105748e+00,8.796273027586893889e-01,1.291727273648222596e+00,8.615411811280264676e-01,3.550448344270134005e-01,1.117916765540573953e+00 2.114036538067682325e-01,1.250181321931290168e-01,7.102228196938523141e-02,6.468695073387586980e-02,1.362675132915319720e-01,1.349915897050475155e-01,1.416307102938915152e-01,6.817552459302730905e-02,1.494609628740105611e-01,1.941103261557435566e-01,3.806621012431269518e-02,1.180037966007081163e-01,1.680303175260380866e-01,2.054037086600684903e-01,1.370464430733649985e-01,1.465271460106522816e-01,1.313318065865120010e-01,1.445363465709183226e-01,3.788159826671219643e-02,1.855928248177030060e-01 1.250356208522147794e+00,4.295832026797873904e-01,1.007375018921371934e+00,4.067398153306469588e-01,1.201644610601489305e+00,1.538979000307021616e+00,8.691446155095555737e-01,1.147511921019273906e+00,1.000767693764335053e+00,9.440404874218608633e-01,4.124618229958483262e-01,1.155682392156581750e+00,7.952372447448745607e-01,1.810979344076692366e+00,1.187867669292813844e+00,1.024850402051799447e+00,1.769624551312922289e+00,9.705659675045277268e-01,4.948331748868131652e-01,1.269431904151817392e+00 1.984085650907913889e+00,1.146887016592136765e+00,1.095044995110230124e+00,6.418017305501254599e-01,1.668597685005980935e+00,1.821250268150384599e+00,1.601826698802186977e+00,1.170627357455949591e+00,1.392986642695611232e+00,1.566059989285151666e+00,4.770043408717689060e-01,1.294474621883833443e+00,1.550533480363649907e+00,2.395966996023629392e+00,1.485907034375673286e+00,1.681257723365842782e+00,1.931327648759597615e+00,1.589073823542024533e+00,5.723760357553414613e-01,2.057715253156014779e+00 5.054797104927557516e-01,4.468806761232581581e-01,5.276070507677950161e-01,1.981916539487088014e-01,7.372671137785876683e-01,7.975836353978684734e-01,7.789061668972178110e-01,5.958106303492801681e-01,2.683518760092241662e-01,1.595604710851930008e-01,1.765931409071654912e-01,3.130750324044421218e-01,4.805205887403649623e-01,9.104435996160420430e-01,4.300449419405245610e-01,7.683136772981324913e-01,8.756652521497553243e-01,6.659874935385696748e-01,2.880839283559324393e-01,8.698721660651588250e-01 mlpack-2.2.5/src/mlpack/tests/data/testRes.csv000066400000000000000000000000161315013601400212350ustar00rootroot000000000000000,0,0,1,1,1,1 mlpack-2.2.5/src/mlpack/tests/data/testSet.csv000066400000000000000000000001061315013601400212370ustar00rootroot000000000000002,4,4,2,0 2,5,4,2,0 2,4,4,4,0 8,4,4,2,1 8,4,4,2,1 8,5,4,2,1 8,4,4,4,1 mlpack-2.2.5/src/mlpack/tests/data/test_data_3_1000.csv000077500000000000000000001074531315013601400224560ustar00rootroot000000000000000.339406815,0.843176636,0.472701471 0.212587646,0.351174901,0.81056695 0.605649993,0.45338097,0.623370668 0.269783539,0.482031883,0.36535861 0.725254282,0.477113042,0.042100268 0.529287901,0.776008587,0.303809928 0.098778217,0.318454787,0.983422857 0.685345453,0.837942768,0.540406673 0.503220972,0.268813629,0.41488501 0.160147626,0.255047893,0.04072469 0.564535197,0.943435462,0.597070812 0.663842864,0.276972185,0.02208704 0.077321401,0.032366881,0.826784604 0.794220519,0.319582218,0.266025433 0.466815953,0.864683732,0.684380976 0.680962499,0.009554527,0.484176898 0.257862396,0.875068776,0.326253946 0.695503778,0.695095604,0.955586038 0.569205007,0.662786497,0.036489177 0.604542917,0.250714055,0.232826165 0.928175028,0.871769923,0.327107027 0.362809806,0.270846833,0.917535106 0.567471988,0.09223176,0.018232595 0.30294,0.197331083,0.676067984 0.136497436,0.991079028,0.640906359 0.490191642,0.321877535,0.210121475 0.886240693,0.124811844,0.109638108 0.487537807,0.474289999,0.34183089 0.038698268,0.515865087,0.984297254 0.437309222,0.534489172,0.792665419 0.898099395,0.461121197,0.785225662 0.256850927,0.840446806,0.056158684 0.335408063,0.806637161,0.913551388 0.11329872,0.670392847,0.333361274 0.954403847,0.024104509,0.325578493 0.824152332,0.614355433,0.271931013 0.729647547,0.666093053,0.579723184 0.25675029,0.94037263,0.530553224 0.799877963,0.555666351,0.056606945 0.213120693,0.763046224,0.341926361 0.975873714,0.554796483,0.049489218 0.422782321,0.375502502,0.875514176 0.732474122,0.920181004,0.273895723 0.288070185,0.878065303,0.57017629 0.269706239,0.854626516,0.607709975 0.615118638,0.006748605,0.00278243 0.655373608,0.348029869,0.909502319 0.358287814,0.419322455,0.362741982 0.152473842,0.659459939,0.497284571 0.930791658,0.934132013,0.150924236 0.792977546,0.953203388,0.465884431 0.971953827,0.268751729,0.220474277 0.244730747,0.056636753,0.088649766 0.873554351,0.305649442,0.91790044 0.26662478,0.221646762,0.310857157 0.659541537,0.93018384,0.139339275 0.833616742,0.833734413,0.551027856 0.43405195,0.874582065,0.996443541 0.442896336,0.118977275,0.03127628 0.388886541,0.976070927,0.294801481 0.14757794,0.195944854,0.129778502 0.73209291,0.551685931,0.218866346 0.85393572,0.675733762,0.501776114 0.804291505,0.746786474,0.94053733 0.199998362,0.403471102,0.614783956 0.302029244,0.084831174,0.043490422 0.458371115,0.076068613,0.940418665 0.122287089,0.867270578,0.612001352 0.423331474,0.370629389,0.407480574 0.400056969,0.919523609,0.940521669 0.74852813,0.872400563,0.915423635 0.614934326,0.67621724,0.382278246 0.0184522,0.545825352,0.74127138 0.937453855,0.175662201,0.666301896 0.504358818,0.251308945,0.849159677 0.397410107,0.973000161,0.648601332 0.398342217,0.110698975,0.916968596 0.464980239,0.683124011,0.070633362 0.787030874,0.393777381,0.731973049 0.612845512,0.893440416,0.475331995 0.241219407,0.792282417,0.389574277 0.465756798,0.552685716,0.092092299 0.908028882,0.837528446,0.794160948 0.552741674,0.898614081,0.764312365 0.607116253,0.958698621,0.334887326 0.322583246,0.541387861,0.879874555 0.124522558,0.229074642,0.510214096 0.049900273,0.471371867,0.367698395 0.261657863,0.105228571,0.748191349 0.216818324,0.700885804,0.34479269 0.896337659,0.634574468,0.203599217 0.961150989,0.920563011,0.795999477 0.120635447,0.744570376,0.107177572 0.696406743,0.788342315,0.173664558 0.577700329,0.493151732,0.989211395 0.270346683,0.586765585,0.208832269 0.171412097,0.116618251,0.53141933 0.596260532,0.819973735,0.531503373 0.120665467,0.556332466,0.643268746 0.546751646,0.563896374,0.079856633 0.637150933,0.126536213,0.823749724 0.637940649,0.951567917,0.397777975 0.344749598,0.517031469,0.48297473 0.296545224,0.419944602,0.99985146 0.707956343,0.985929306,0.942420811 0.24734852,0.001808114,0.489512545 0.29395388,0.751934338,0.924845167 0.306350765,0.676837884,0.614397758 0.387029257,0.668882644,0.316866608 0.166701475,0.220250465,0.70788096 0.666366134,0.343214579,0.063804166 0.970614577,0.514452347,0.643280872 0.084297811,0.906111319,0.590434434 0.163302217,0.226212634,0.074753132 0.20207705,0.197835179,0.217985026 0.413236066,0.640190511,0.520645448 0.807941459,0.463910044,0.996796367 0.208875906,0.182468954,0.876826443 0.743474185,0.840439019,0.143677671 0.266758693,0.103719005,0.920995789 0.030607849,0.973154392,0.814015083 0.237753714,0.374336732,0.44138091 0.83212984,0.547216604,0.371699647 0.302411666,0.58054099,0.303141758 0.949214871,0.783756542,0.457582838 0.32776739,0.133095384,0.351183944 0.673471065,0.432009028,0.761641303 0.120361022,0.494421101,0.954699616 0.049975694,0.857405242,0.133753572 0.314326245,0.599784238,0.698931698 0.10972582,0.994733888,0.603365409 0.246939825,0.79385323,0.576049373 0.420949269,0.55824091,0.684730016 0.085903635,0.678776288,0.759533545 0.221902971,0.606683148,0.183625782 0.934582003,0.263106456,0.195228637 0.276550653,0.563455012,0.477130256 0.939865401,0.683543172,0.98694541 0.090714119,0.155392084,0.183225576 0.546170002,0.226065658,0.757518873 0.635725491,0.259656977,0.803254796 0.768135532,0.329687113,0.784471673 0.67201594,0.69314804,0.216292884 0.731388623,0.632648812,0.298465113 0.030104188,0.531279626,0.68605789 0.404907965,0.617707054,0.646985633 0.38264213,0.522044947,0.606066308 0.850778503,0.771072538,0.780038925 0.333386945,0.62981651,0.838539888 0.144526261,0.90723358,0.672092924 0.803193149,0.545698586,0.740250704 0.144775421,0.073065649,0.81327723 0.800150626,0.077947117,0.498989131 0.805355858,0.282274855,0.111520406 0.432276345,0.614069782,0.04562788 0.119740317,0.122788948,0.68461108 0.188596378,0.666133286,0.753645204 0.143050522,0.393902986,0.609633117 0.754401856,0.84172035,0.387786256 0.97549575,0.97044364,0.621482928 0.735098473,0.96738673,0.239086021 0.825090649,0.153687653,0.520111132 0.720848546,0.211391499,0.513430909 0.572411742,0.56579983,0.313933048 0.766584951,0.704264072,0.103088529 0.933914925,0.70795791,0.378434849 0.232266382,0.864968616,0.664769493 0.180748316,0.792633394,0.983236654 0.320744207,0.073646797,0.915148464 0.058415284,0.478244018,0.171213592 0.613274471,0.423949271,0.899198164 0.83818587,0.622457639,0.496368891 0.547369341,0.5160996,0.318684775 0.489079348,0.504840066,0.174865371 0.133510366,0.873938618,0.95342181 0.355477984,0.610358907,0.32242224 0.32167355,0.132961802,0.381440702 0.660257981,0.59386003,0.570704079 0.519799486,0.220676336,0.85452965 0.097125446,0.037837774,0.581579153 0.801485909,0.741547848,0.06310355 0.413142247,0.303102946,0.30224609 0.07746447,0.555846316,0.750106689 0.593760097,0.256631753,0.179035377 0.819000445,0.86578977,0.797167379 0.644052663,0.148335877,0.377067692 0.02037784,0.835405997,0.192438566 0.248506314,0.951214215,0.492923258 0.387445752,0.862693509,0.11983047 0.411437123,0.512831692,0.516380652 0.481199694,0.970780992,0.565521666 0.967908564,0.168755985,0.447517833 0.280607962,0.670538365,0.548021587 0.402044213,0.121532495,0.136718448 0.83696286,0.739549154,0.495218329 0.652215616,0.664877651,0.838254198 0.846246408,0.411635906,0.96601722 0.359827733,0.627436225,0.666295882 0.522326573,0.496565812,0.404066784 0.614406114,0.160072022,0.269439305 0.221722954,0.558736063,0.890699947 0.561777087,0.782270647,0.792345194 0.385698506,0.295964873,0.697613223 0.101162968,0.27600378,0.239798872 0.075127486,0.14163579,0.728168103 0.982440842,0.583109151,0.395072917 0.69628067,0.26642599,0.283866713 0.073093261,0.914332418,0.925554624 0.01642578,0.927883834,0.248712685 0.11636724,0.556067816,0.248282085 0.487453151,0.058684617,0.294624957 0.813726551,0.860917181,0.678149491 0.492581545,0.501803813,0.193032429 0.642766795,0.422421802,0.950458987 0.662519175,0.950448071,0.157126432 0.548815699,0.127905654,0.23337741 0.159163516,0.345059322,0.586704542 0.40029112,0.928563882,0.954476476 0.587201396,0.44357769,0.797926632 0.026827624,0.206281621,0.680220462 0.884217164,0.266754666,0.652197582 0.475019281,0.447732834,0.14299077 0.193076354,0.317892868,0.976621856 0.515208981,0.512331237,0.422351595 0.336671812,0.870606258,0.364554196 0.438596677,0.333836845,0.801388791 0.194389409,0.929245672,0.589545825 0.205377525,0.079938747,0.187363423 0.426814991,0.823224852,0.707435026 0.262972959,0.517545732,0.19872636 0.720354434,0.847649202,0.709246477 0.355306192,0.303943053,0.835051265 0.949975427,0.106134411,0.204516092 0.106374426,0.874129261,0.971439223 0.14517828,0.371147898,0.695954142 0.739099753,0.331888701,0.890413781 0.627551297,0.9001009,0.177324543 0.047488938,0.224289129,0.220822902 0.912785118,0.79570392,0.838242455 0.49717293,0.703176431,0.754883589 0.090976094,0.502530756,0.657999889 0.194042479,0.284561692,0.14516165 0.409960603,0.285564554,0.097001811 0.310205693,0.003434942,0.173823303 0.233583043,0.118822434,0.816665508 0.513843271,0.539640669,0.864405207 0.40692643,0.436463418,0.369798489 0.126544008,0.159580886,0.933714485 0.286441339,0.872974675,0.04454198 0.964565622,0.910027403,0.897861798 0.203121728,0.899714292,0.085173744 0.445639841,0.360999337,0.016645134 0.307793993,0.117750087,0.562967352 0.281587526,0.174834541,0.274581395 0.119660773,0.099740072,0.484016211 0.511583585,0.54938211,0.339766424 0.188451695,0.073022292,0.006123739 0.346586219,0.49567248,0.234826476 0.225242461,0.587641331,0.725805817 0.075409614,0.208266437,0.826377328 0.204076002,0.04779427,0.040457828 0.050861901,0.763043363,0.256073344 0.972150662,0.792678045,0.909955027 0.506115605,0.031837525,0.903340416 0.804010111,0.955685921,0.175896939 0.092926989,0.062223289,0.821308211 0.363715968,0.726101463,0.79168981 0.787381816,0.338102828,0.005758252 0.484331698,0.495765424,0.891180155 0.241982415,0.277129738,0.561477087 0.484161267,0.286665154,0.03556541 0.211600046,0.304292614,0.395789513 0.372524976,0.202611617,0.166595985 0.265124748,0.017345601,0.037686194 0.701786714,0.420334817,0.714000487 0.034048463,0.651290563,0.050634716 0.802331316,0.558297752,0.291679579 0.344037056,0.467477672,0.358504649 0.639463582,0.425507582,0.954817361 0.602885138,0.374751922,0.374607167 0.993637385,0.955212301,0.16550343 0.955669008,0.745723993,0.889786752 0.365337348,0.19682491,0.506234866 0.7457291,0.51831627,0.978818087 0.92625289,0.631584997,0.443128894 0.786168714,0.264993195,0.263960382 0.316681591,0.61079768,0.089349247 0.858371024,0.834969763,0.174819213 0.525393487,0.243792169,0.955241627 0.720242053,0.143419208,0.402799979 0.749292304,0.217311863,0.799688479 0.246462289,0.958943724,0.142358796 0.528138907,0.590786754,0.948225902 0.454799161,0.510565688,0.295103038 0.953069085,0.021533141,0.116332423 0.188120341,0.690529852,0.623168048 0.318359731,0.758493036,0.91843922 0.726077549,0.902046947,0.327147423 0.386752461,0.338547997,0.651921958 0.707225745,0.584329479,0.37703596 0.060288975,0.494620757,0.075518168 0.237652566,0.962903992,0.824801251 0.535945075,0.958493881,0.754701994 0.064404553,0.235151293,0.39448081 0.979476468,0.347342952,0.99138709 0.189166661,0.798328607,0.697048046 0.180560013,0.342106481,0.174983336 0.28337819,0.962425666,0.955845318 0.593924663,0.66654314,0.570147835 0.114749593,0.903677338,0.957687266 0.151925114,0.716482401,0.637800283 0.235669594,0.580788646,0.528893286 0.778117587,0.250968708,0.684104646 0.747849981,0.214563448,0.02984775 0.720813243,0.066656345,0.737883757 0.626964368,0.953760147,0.459809098 0.469018562,0.720549931,0.518332767 0.821461664,0.507041049,0.514946331 0.384160041,0.953174654,0.443907617 0.233220889,0.511502601,0.369065624 0.434955659,0.150497671,0.76574469 0.8958592,0.481635774,0.942994014 0.979260732,0.445148596,0.323549157 0.334878174,0.403760723,0.385124629 0.460214884,0.33828675,0.592783427 0.518346254,0.909618383,0.6009723 0.338370801,0.317375424,0.337490389 0.636668843,0.96449714,0.481975016 0.025064304,0.923419227,0.119203699 0.048318449,0.53489191,0.76133984 0.491930784,0.016568024,0.112619998 0.17743988,0.903969674,0.481918653 0.981634317,0.513179093,0.316557669 0.02560158,0.930375993,0.563316641 0.017997936,0.890571459,0.4580491 0.96277821,0.443025655,0.083145161 0.419576578,0.112060055,0.531294103 0.494454706,0.954168063,0.047922651 0.800000835,0.673332473,0.064026809 0.870702162,0.510095577,0.863030178 0.851121904,0.916229763,0.781903614 0.159726434,0.082081261,0.19548317 0.362450326,0.788524336,0.826141196 0.270846003,0.098989879,0.574494436 0.406889772,0.838173717,0.436699777 0.035503139,0.853255007,0.642800341 0.083155666,0.952721164,0.708076056 0.847697478,0.56519776,0.894660498 0.037841045,0.984301359,0.365909559 0.177721428,0.418447797,0.157612683 0.429370039,0.508723836,0.767724035 0.071851749,0.216253471,0.819600825 0.578083664,0.212360494,0.627380646 0.380746754,0.954034946,0.11483721 0.211278539,0.560080096,0.685450354 0.770737322,0.813954563,0.79322567 0.318759117,0.06983,0.664250133 0.059856737,0.06677071,0.26622355 0.968241527,0.953861837,0.311894576 0.504226431,0.06220937,0.289105117 0.256406511,0.249902695,0.348997399 0.674888311,0.860374,0.605942473 0.246067727,0.048342783,0.343006159 0.830735494,0.783740344,0.677522751 0.99887952,0.341758368,0.229922444 0.731699282,0.940258743,0.10886285 0.541383735,0.910293019,0.381124662 0.750868727,0.848911762,0.265718422 0.425671591,0.626146239,0.622684142 0.214013066,0.091251581,0.864057899 0.545601885,0.310480085,0.046543211 0.517244356,0.115819763,0.248517895 0.872633121,0.50117097,0.12009094 0.255496857,0.472006579,0.796438566 0.468962035,0.26918685,0.131735945 0.742353904,0.528441793,0.565922864 0.85366711,0.2676075,0.914062206 0.447698287,0.149534939,0.670156644 0.445589481,0.6431063,0.225580433 0.357872915,0.788565726,0.814611643 0.580287142,0.506307991,0.527031912 0.500500265,0.365277722,0.04677688 0.141881394,0.926001483,0.86894952 0.221717771,0.366035312,0.125658418 0.600339909,0.684670388,0.826168927 0.307898392,0.20966968,0.752966481 0.959700077,0.899536378,0.491452813 0.230433688,0.613941888,0.415683508 0.495527265,0.634504412,0.370199526 0.506575734,0.986633413,0.84941237 0.761764339,0.963921599,0.828872018 0.348601654,0.087553061,0.791174897 0.104944192,0.102179531,0.905877926 0.375324247,0.246387607,0.301420991 0.875454272,0.118686164,0.988824311 0.17698346,0.393647261,0.159870783 0.917659703,0.583236755,0.630992101 0.285048123,0.469986869,0.37272766 0.011480822,0.597073945,0.904116141 0.313259229,0.510005423,0.894823085 0.795838324,0.911141124,0.928152818 0.164974957,0.359128099,0.60236716 0.983429159,0.003861397,0.083218217 0.242529745,0.562773547,0.664077813 0.765913188,0.194009625,0.286229668 0.070781352,0.102661854,0.249285398 0.511452125,0.418997177,0.284014634 0.439472205,0.891870259,0.82363463 0.580892549,0.466753672,0.140496383 0.615517449,0.738921356,0.461546367 0.824697707,0.698589656,0.941554339 0.46610398,0.902958283,0.688012984 0.523365471,0.691567649,0.547171487 0.545929937,0.714552317,0.041938604 0.32756288,0.701840615,0.927731162 0.761874356,0.276228477,0.886668834 0.979442228,0.298771691,0.591610911 0.374731022,0.860510449,0.321638525 0.8074911,0.097011746,0.930723417 0.453431338,0.206882669,0.431005917 0.910029309,0.03223923,0.493150704 0.2897017,0.170401689,0.739971322 0.024666309,0.777054677,0.769170439 0.05624039,0.089983601,0.64642539 0.149696037,0.539762835,0.702098143 0.676100319,0.000479419,0.639516981 0.967411256,0.893394783,0.958913773 0.158669993,0.527294695,0.347808355 0.181672491,0.532695548,0.988953142 0.053598946,0.497693858,0.118111495 0.132496571,0.985450674,0.753931807 0.87586561,0.732063591,0.884137731 0.419609591,0.012639269,0.645369169 0.102047486,0.008854525,0.658344391 0.123913855,0.210708056,0.499395878 0.159685659,0.968477268,0.586268979 0.834269522,0.369645239,0.245380904 0.637297781,0.768550638,0.48870442 0.778386961,0.376787501,0.03205647 0.67713794,0.632054697,0.000672655 0.860752189,0.140567399,0.326727043 0.220600271,0.039797462,0.871431738 0.373493897,0.910009286,0.043303147 0.269453424,0.571833998,0.346704152 0.919787568,0.373470212,0.873193468 0.776952353,0.362003265,0.172733797 0.575862615,0.900415576,0.599884308 0.616882997,0.8845633,0.13177173 0.366855251,0.729104299,0.950578149 0.668847681,0.753527405,0.660832331 0.264243456,0.308498641,0.912106098 0.542527865,0.880831766,0.535728949 0.460634645,0.013712653,0.152280892 0.828209711,0.921304334,0.049084108 0.874065663,0.473229025,0.545232499 0.731220357,0.126627169,0.996060848 0.943461868,0.033256065,0.992038738 0.211193798,0.522810965,0.907780013 0.767158364,0.967162642,0.888059793 0.689583275,0.841550923,0.377520241 0.147705388,0.959063909,0.031580823 0.654707489,0.752912445,0.305046055 0.628378168,0.075829853,0.719349441 0.886468112,0.185491156,0.719710557 0.749470564,0.448017109,0.897349202 0.492693185,0.884164268,0.633427171 0.44686733,0.7934547,0.773846432 0.630683325,0.776592453,0.708944434 0.814848973,0.845977344,0.643222219 0.016975156,0.729138989,0.058020996 0.511298247,0.07057554,0.733405098 0.784480806,0.738595698,0.373688534 0.530814843,0.44312087,0.691107945 0.944091316,0.957332961,0.639542386 0.771047017,0.811962024,0.977774991 0.87020688,0.755962661,0.925248114 0.458700988,0.334773333,0.095844508 0.533831151,0.912609619,0.027149015 0.524625598,0.652693277,0.497418106 0.805674264,0.723021478,0.80073208 0.113696528,0.650247192,0.344709776 0.826900827,0.593783006,0.550936366 0.940655423,0.740273144,0.630218018 0.141520315,0.632429144,0.838610834 0.39673597,0.503240828,0.590691376 0.037602886,0.040815285,0.620639119 0.716116291,0.506754028,0.253596249 0.619782298,7.76626E-05,0.676065593 0.496033457,0.98742451,0.984019601 0.649314148,0.147470427,0.489967654 0.691622038,0.161245902,0.647851723 0.936526892,0.590442875,0.939555093 0.604802621,0.838823011,0.251219058 0.071190531,0.67647138,0.597666328 0.019410183,0.495778133,0.44031324 0.726411874,0.262687025,0.086312948 0.830480537,0.135077568,0.079159787 0.950841893,0.769723105,0.47873095 0.611417896,0.84114966,0.395349789 0.181347141,0.287776713,0.883076078 0.200712222,0.873964629,0.571505353 0.65202277,0.084117342,0.250545655 0.342561024,0.202306216,0.079726003 0.584301932,0.122693153,0.129858724 0.591176502,0.051275102,0.876431468 0.165946295,0.474087103,0.856717365 0.839385948,0.763414504,0.961778868 0.528260865,0.865453126,0.680673095 0.076050301,0.71693581,0.15210816 0.780443967,0.33197709,0.73242445 0.363327494,0.164977224,0.185099911 0.687912867,0.396104619,0.249748592 0.88391393,0.554502064,0.089705278 0.33788714,0.686247878,0.252660937 0.19163616,0.441496434,0.513458703 0.478908993,0.15156254,0.818829745 0.918896553,0.899169945,0.780767514 0.782967436,0.327693122,0.755050753 0.32558364,0.492239506,0.12339517 0.047070459,0.693552034,0.508452959 0.109465204,0.821862145,0.632136838 0.826253828,0.610682399,0.632137891 0.162364171,0.5709024,0.027035072 0.479768494,0.607203769,0.077566143 0.897031412,0.795684932,0.974415558 0.801002173,0.551618649,0.876984199 0.123312093,0.411438516,0.901446561 0.594677287,0.32833558,0.914104796 0.741635419,0.14325589,0.115905361 0.08993896,0.243272135,0.742401503 0.116491314,0.690400792,0.020805328 0.180855336,0.599454312,0.340688071 0.087037755,0.006886539,0.952560809 0.300603611,0.113658264,0.797478049 0.832235841,0.05963984,0.771465426 0.095194013,0.247650851,0.801344581 0.300632189,0.150924198,0.086360387 0.874625368,0.700861247,0.713919826 0.863383564,0.57922769,0.870911826 0.11913471,0.767551415,0.50918181 0.556749667,0.691513618,0.782003681 0.197331319,0.827247513,0.779623914 0.987023902,0.734883462,0.623629089 0.420615082,0.614082171,0.741891207 0.312249031,0.014913149,0.070878868 0.974642188,0.983123549,0.086275706 0.783360774,0.814835668,0.67625897 0.540478752,0.254940938,0.449867885 0.048763621,0.290768213,0.625363258 0.697965851,0.033892112,0.612844092 0.724879255,0.708375839,0.525641059 0.747562377,0.173208535,0.263779612 0.867179342,0.213616814,0.754428508 0.02226162,0.326141353,0.081963664 0.627227744,0.116451144,0.409565408 0.543129433,0.092850944,0.54072763 0.281594806,0.709633472,0.876793176 0.35647452,0.063874296,0.965050871 0.045168661,0.497624359,0.186815072 0.524949861,0.944601324,0.332059785 0.126474627,0.02739514,0.246752374 0.208604998,0.568408651,0.772918262 0.125784169,0.514833609,0.514478954 0.154512957,0.373291441,0.993402025 0.233618131,0.572616698,0.016411005 0.999890963,0.570275565,0.216853317 0.486828361,0.379924401,0.696213866 0.075314427,0.667395497,0.863855433 0.86294927,0.812782874,0.997533964 0.031445186,0.249022328,0.973324576 0.326573891,0.118171329,0.965763005 0.332020059,0.604459411,0.538268842 0.706622108,0.694323961,0.209014536 0.932949763,0.08165582,0.356510191 0.75591714,0.880443277,0.240181713 0.227219665,0.515538046,0.063202431 0.069200681,0.150851636,0.361221939 0.902427408,0.646942656,0.504832272 0.262382978,0.180972368,0.403132445 0.032506623,0.656194,0.257345113 0.959652463,0.776117592,0.653289283 0.778669537,0.171816058,0.383820737 0.64856927,0.78342696,0.966231461 0.638608998,0.323023815,0.667259556 0.120265759,0.176019011,0.416173717 0.275065523,0.921190579,0.324061946 0.490137925,0.337844445,0.135339916 0.724097632,0.992269402,0.410123181 0.296958503,0.142356399,0.479483213 0.092381103,0.57773093,0.290898447 0.89183933,0.312149005,0.295126666 0.669251799,0.071453982,0.955861716 0.938378225,0.324238979,0.455589077 0.762236627,0.048617283,0.120655973 0.886194063,0.842136906,0.886167779 0.420448588,0.826040453,0.209811195 0.496120113,0.140244984,0.010275807 0.291770734,0.089337397,0.940136172 0.823744617,0.442752205,0.79506829 0.86635257,0.308919721,0.929313191 0.124187371,0.515507145,0.3952627 0.515643261,0.514493405,0.592216269 0.435577703,0.202265522,0.749380396 0.851215206,0.581140662,0.909262689 0.97276388,0.305964393,0.119556192 0.833642983,0.44267292,0.574065373 0.908658096,0.985442117,0.032891222 0.120536868,0.898167052,0.754847347 0.328480689,0.206500348,0.883388839 0.584233318,0.127164736,0.934356548 0.520904286,0.085542266,0.469645136 0.118804512,0.276694477,0.255706174 0.669152609,0.480169645,0.350044668 0.784599588,0.030844507,0.672270616 0.97462202,0.984822685,0.801402402 0.09061512,0.20599842,0.288943446 0.500630874,0.668012143,0.326107661 0.243946646,0.885842685,0.356343047 0.704519934,0.112411764,0.840776533 0.064722176,0.148130565,0.724221405 0.069998846,0.826917642,0.285248236 0.463142105,0.129132053,0.071693121 0.065672617,0.491471158,0.143248345 0.345719852,0.550477283,0.417188691 0.523811405,0.923188335,0.366706095 0.57113315,0.798590349,0.465646081 0.828359309,0.886833757,0.470994632 0.649200809,0.422037446,0.338970547 0.991959241,0.065292471,0.545926733 0.402707667,0.892315167,0.157737898 0.583371677,0.915247643,0.510882162 0.286752954,0.119216908,0.422178531 0.000574842,0.932477989,0.322762631 0.521100182,0.182516345,0.799539149 0.217552185,0.32460329,0.001286413 0.129263953,0.832799191,0.746800354 0.859133069,0.682500693,0.035727655 0.081296267,0.499283963,0.851895509 0.709384988,0.14985208,0.186521894 0.247922963,0.253358356,0.872326832 0.203028631,0.068652472,0.553487984 0.292370767,0.925595124,0.401383438 0.721522222,0.300176493,0.452098604 0.622021123,0.308001842,0.51395483 0.601298816,0.268135963,0.584441602 0.207949629,0.407128704,0.699430418 0.152216375,0.92660356,0.07049208 0.997031345,0.789488864,0.194662825 0.14170589,0.513011324,0.250918681 0.979853004,0.246273698,0.732371057 0.441466086,0.428787477,0.680856737 0.513859379,0.668402062,0.50429415 0.32103853,0.59436219,0.481843963 0.466004374,0.019901121,0.225087815 0.546731744,0.359957666,0.776590304 0.088133727,0.021028123,0.579299556 0.172044151,0.237278834,0.567876411 0.576325796,0.86256513,0.487980769 0.459957415,0.004052068,0.41344615 0.72021758,0.906208873,0.049850195 0.835505139,0.006504875,0.716129577 0.974913096,0.06350265,0.945758998 0.538076764,0.931252476,0.05429443 0.921879308,0.750002283,0.120075272 0.825790117,0.095295707,0.471769578 0.667512779,0.726667248,0.68041055 0.604774928,0.209313615,0.803678279 0.058678158,0.457882119,0.491090679 0.46503574,0.647148555,0.063745514 0.268569925,0.07151649,0.354414339 0.309997568,0.048651773,0.652050824 0.852057231,0.800064591,0.378993288 0.101844132,0.975250128,0.919521375 0.879950774,0.012524944,0.243977924 0.71298613,0.410784591,0.766666426 0.253953963,0.18863912,0.353408633 0.859540187,0.786140568,0.50468592 0.885165537,0.182373738,0.365436093 0.919226953,0.132590959,0.305319302 0.794222067,0.325843691,0.81503301 0.360472386,0.828503699,0.992751302 0.568328182,0.596642015,0.166689456 0.495797608,0.390533497,0.466894225 0.497383703,0.057721092,0.136501948 0.18770586,0.924785691,0.325442341 0.693138587,0.351786889,0.499636742 0.898980429,0.759285754,0.006488642 0.203362481,0.362873482,0.576750046 0.178651329,0.720602676,0.881219809 0.176525065,0.325805008,0.029694687 0.280908733,0.527522643,0.545345238 0.370750152,0.138599939,0.044930538 0.675097184,0.14761356,0.378589866 0.735023127,0.793326142,0.751658301 0.589712544,0.569527756,0.006401988 0.528971516,0.297342992,0.454367414 0.691477287,0.799565463,0.424110191 0.261622015,0.848996059,0.848455301 0.401014342,0.684428894,0.631646442 0.16646465,0.252704215,0.907185556 0.100875707,0.566947803,0.906685851 0.434813596,0.104021401,0.167032575 0.525475323,0.508926771,0.950312938 0.159164103,0.298161029,0.813651341 0.688364345,0.371765734,0.533450516 0.712069354,0.849924822,0.351626269 0.322500041,0.141195673,0.954104724 0.146595062,0.93264431,0.190821916 0.71991816,0.904994255,0.945180752 0.025505056,0.369278227,0.225567491 0.450884297,0.163076541,0.835655337 0.666130325,0.52707414,0.82767262 0.747584223,0.050899988,0.253442115 0.525074918,0.930938393,0.27765909 0.940041036,0.129750051,0.169526547 0.976328221,0.406056506,0.156213454 0.413206486,0.217043404,0.425652131 0.108491931,0.963192763,0.498477601 0.958709036,0.585116585,0.507265441 0.048428848,0.713725414,0.728970388 0.587791364,0.896305822,0.279922122 0.086686919,0.740059232,0.914875869 0.422027713,0.086096483,0.419750985 0.767716034,0.871663257,0.103971292 0.549835043,0.371430165,0.801009346 0.557408598,0.341725364,0.279171927 0.071240148,0.765613908,0.173767574 0.713230298,0.779720404,0.253165546 0.572322236,0.663937254,0.045664107 0.428432377,0.161070991,0.891029544 0.818292324,0.971164957,0.271696059 0.269446053,0.962766931,0.051526478 0.515277086,0.74833971,0.351491465 0.796419252,0.556278732,0.361314209 0.801556269,0.987424165,0.117197305 0.782772261,0.05866778,0.982749779 0.21806961,0.609256862,0.798461899 0.699205142,0.038761394,0.271238908 0.534754129,0.27476979,0.163606178 0.003518131,0.437675965,0.388250875 0.619198012,0.090710318,0.566559914 0.178576562,0.885793567,0.022734794 0.578539981,0.281190469,0.008260142 0.177713211,0.393560621,0.052236228 0.846158221,0.357695748,0.875170299 0.127568308,0.638314871,0.946658268 0.767138325,0.621405933,0.564104167 0.798451074,0.40443786,0.599831193 0.616223487,0.665752297,0.971012789 0.267441096,0.388352985,0.430687937 0.923867358,0.654582643,0.464037122 0.492137227,0.706258913,0.378247168 0.536642887,0.555595419,0.104998227 0.992969717,0.688862613,0.896407883 0.454975157,0.851727744,0.144297419 0.317976254,0.620102227,0.416793119 0.440632343,0.535615753,0.913356284 0.791010869,0.962116708,0.627040144 0.926826073,0.382456611,0.465806072 0.568904993,0.514101455,0.724489494 0.895517901,0.391005356,0.347893715 0.289875186,0.830981849,0.92116788 0.95185048,0.996829271,0.970163256 0.079055453,0.999386589,0.528208258 0.926932102,0.147799896,0.417138668 0.244651465,0.832349744,0.221104338 0.179560876,0.149581841,0.97827318 0.869778794,0.116050413,0.930858226 0.681347988,0.700100934,0.003010153 0.688804753,0.087819887,0.217246073 0.054919581,0.536206628,0.011960678 0.640496257,0.193125181,0.654595034 0.879605152,0.152112809,0.50946439 0.336877078,0.352944356,0.032651908 0.578287892,0.410740871,0.424981809 0.655610763,0.370342392,0.021605292 0.184746216,0.078627828,0.615262076 0.335250916,0.744164606,0.7834867 0.086006226,0.796624922,0.100735176 0.278674471,0.483655368,0.117132599 0.994681992,0.915583798,0.682419845 0.077364925,0.488968443,0.762836001 0.460939585,0.226843633,0.262301782 0.998409563,0.464398025,0.918229672 0.221191504,0.605272697,0.236818579 0.305532514,0.107986913,0.285771959 0.429457882,0.021852143,0.417044654 0.4398254,0.904405397,0.587007492 0.472361927,0.615492219,0.311474339 0.4847793,0.830454499,0.692963217 0.525054945,0.760690911,0.176296268 0.117729529,0.425190139,0.763022992 0.435815483,0.901034288,0.68353143 0.310722347,0.711502874,0.050054312 0.692557474,0.756865138,0.823601442 0.748561397,0.302607431,0.404056776 0.370478834,0.749199053,0.220199408 0.686929375,0.172808164,0.22046762 0.037511035,0.299597568,0.543432459 0.513900441,0.892613907,0.740051648 0.389543522,0.806516669,0.891439062 0.053758187,0.367104684,0.356060944 0.450039969,0.18662041,0.022226949 0.481122219,0.376490604,0.455652341 0.97009151,0.252002631,0.121449418 0.322174741,0.359645571,0.785282495 0.904310053,0.730301338,0.994210513 0.450101531,0.92830086,0.086584177 0.456948101,0.90305291,0.216589856 0.430158828,0.574385535,0.812451667 0.958800913,0.229029132,0.004822368 0.641856333,0.757170989,0.097059421 0.442276634,0.278413528,0.877655305 0.036927777,0.425286999,0.92305997 0.996003678,0.902465847,0.265142606 0.306340939,0.260744837,0.528606261 0.098272048,0.162476078,0.354882218 0.658054373,0.890822429,0.9000076 0.087284546,0.695167739,0.026293663 0.667310433,0.902843368,0.248946207 0.451887926,0.995052067,0.181712955 0.721298527,0.006611482,0.727102995 0.180137144,0.38951174,0.678305837 0.420761331,0.419860176,0.010656383 0.788488075,0.180473318,0.708019695 0.662265015,0.757397169,0.348937464 0.22732873,0.663301685,0.39923678 0.716892599,0.552981067,0.089832495 0.177215605,0.465175647,0.887666589 0.4010009,0.597937203,0.09497585 0.259096154,0.591668012,0.145793124 0.7855796,0.541345166,0.383678057 0.201753532,0.613603748,0.879697044 0.825321851,0.452349759,0.192581377 0.171266337,0.782789247,0.848185787 0.989170718,0.575391852,0.643933271 0.224216552,0.128615538,0.261286445 0.355440689,0.629457955,0.902600249 0.72784327,0.282293864,0.605943451 0.210467186,0.748327916,0.269725684 0.703080367,0.411052005,0.029450281 0.611720264,0.653108765,0.115754888 0.625714261,0.426502244,0.253625516 0.080879639,0.231561531,0.000776511 0.580765049,0.214103901,0.655333535 0.411287343,0.079075761,0.794277642 0.710073858,0.646863988,0.71074505 0.335569397,0.900645276,0.683474835 0.967747154,0.579773932,0.534024604 0.766717973,0.582199309,0.533102234 0.383468743,0.426721157,0.027251934 0.490400205,0.117276739,0.92366954 0.526437331,0.70107653,0.671085752 0.889392656,0.764668251,0.594183178 0.638642815,0.578480214,0.97861599 0.87668719,0.16462794,0.216101311 0.42672965,0.578827138,0.263549989 0.811170473,0.093966938,0.225951223 0.099089206,0.263591386,0.882393744 0.38399777,0.327948679,0.494541301 0.183583616,0.008025085,0.345896483 0.584960878,0.5469813,0.968535684 0.361345034,0.854037953,0.527327995 0.984905322,0.997741532,0.876521812 0.074758264,0.39928899,0.847634791 0.78330323,0.392062416,0.024783838 0.467728166,0.712167022,0.024533141 0.587280899,0.398576247,0.573112113 0.964829971,0.025982741,0.969019811 0.9497508,0.659436309,0.204878206 0.657359903,0.347373583,0.193308068 0.186434557,0.521059421,0.070439079 0.870109867,0.062761012,0.710077454 0.217962469,0.288311322,0.190708548 0.955539243,0.022311215,0.71590241 0.625665814,0.76136552,0.988044588 0.597252746,0.710748192,0.314068902 0.516054372,0.327282916,0.54307302 0.271367679,0.738701611,0.304169987 0.933804469,0.580994455,0.210076964 0.127919156,0.599299518,0.585857959 0.676065679,0.558987708,0.958866142 0.316141871,0.460898294,0.141769324 0.471335921,0.089770919,0.358606362 0.623875078,0.120949677,0.031070096 0.279561054,0.756633154,0.523821594 0.367638452,0.041473293,0.205100917 0.194748444,0.554149226,0.891998106 0.41189445,0.060780804,0.739908884 0.463521747,0.175865472,0.535693142 0.945971006,0.966028962,0.856940254 0.183047078,0.337562524,0.181769865 0.594627884,0.198176957,0.150059332 0.843270928,0.530723522,0.928016742 0.223830394,0.396224789,0.671524797 0.660767374,0.651553136,0.816830801 0.435601302,0.067504838,0.286367496 0.118647364,0.597413606,0.736034901 0.130876628,0.718657894,0.132667782 0.512036173,0.807939768,0.573980493 0.651567779,0.146952948,0.239972065 0.288725439,0.224872447,0.043641949 0.13707238,0.381109232,0.022199238 0.754226814,0.167426623,0.961971718 0.951586322,0.053557001,0.223348551 0.618926676,0.885546611,0.123622882 0.790423531,0.278666859,0.501354777 0.038612914,0.868235102,0.288826116 0.488859959,0.478054033,0.700027159 0.862804894,0.011591559,0.750381881 0.994070885,0.954113216,0.968886216 0.452966461,0.985185262,0.402556559 0.163204173,0.188199516,0.352205827 0.15850908,0.505182571,0.583169832 0.135779826,0.409087768,0.238200196 0.643385144,0.86154063,0.14538336 0.50233965,0.544662955,0.992305772 0.208435385,0.031950832,0.061424365 0.866478253,0.391456921,0.511463088 0.4937369,0.216683838,0.68183869 0.635277683,0.264963125,0.828569956 0.57036797,0.199089208,0.947261901 0.622849636,0.554898686,0.300444481 0.148150252,0.793195105,0.95852649 0.118643776,0.375521816,0.127817104 0.758672306,0.928120507,0.147843091 0.988902496,0.305378105,0.027460368 0.101391422,0.187140233,0.666743757 0.742622491,0.913697728,0.538923383 0.093250323,0.083342814,0.253041857 0.769590781,0.9991462,0.438612548 0.729371479,0.304770086,0.732577389 0.309854988,0.231328158,0.907015378 0.357043464,0.291981607,0.210471606 0.310867898,0.310831132,0.021305479 0.099716251,0.743995352,0.892636908 0.41508308,0.015438634,0.257251295 0.53442204,0.552940574,0.911759333 0.066875817,0.519643391,0.683239895 0.960228558,0.637860456,0.564663828 0.166667197,0.282113595,0.909573438 0.400063729,0.629753113,0.314970443 0.708945745,0.167807931,0.868195558 0.371947838,0.749772529,0.913374887 0.364252703,0.719347038,0.968988396 0.565947998,0.47317603,0.848594323 0.963005103,0.86347636,0.213376655 0.010974265,0.115488107,0.918644935 0.579274525,0.748172658,0.195517101 0.054742886,0.089561473,0.35514667 0.352904397,0.177453817,0.485671073 0.86540568,0.455589491,0.325840682 0.826269285,0.742045207,0.836774969 0.075485913,0.446267336,0.134777488 0.123130773,0.10695964,0.319080831 0.353341713,0.250920125,0.94582804 0.934151416,0.641155987,0.332526901 0.183094596,0.975798892,0.512697523 0.931523642,0.525759501,0.067066893 0.171012136,0.581683693,0.603794825 0.489763176,0.561915728,0.886623062 0.427818728,0.227974683,0.462025302 0.059325421,0.726266371,0.692412984 0.770271664,0.743519141,0.117959307 0.107862896,0.552555172,0.592259145 0.445007388,0.046308389,0.69499137 0.056486616,0.370154602,0.498507879 0.347798483,0.541312622,0.44955603 0.01637411,0.777726654,0.346640124 0.918778501,0.247274577,0.931656904 0.468325578,0.552066653,0.233304727 0.558842714,0.30110019,0.237582706 0.520406065,0.396600845,0.627623904 0.42717615,0.55961213,0.312743984 0.043819454,0.060632818,0.168267929 0.151405047,0.276450913,0.385322692 0.864539894,0.203199707,0.865006307 0.866179018,0.649792248,0.369625823 0.566181508,0.155001949,0.751738414 0.022193506,0.262524266,0.378478591 0.835870282,0.436869514,0.439857307 0.54507765,0.825712044,0.425012638 0.180124959,0.284189803,0.059324375 0.91303517,0.659662103,0.021990781 0.068890512,0.857174742,0.245915138 0.146299591,0.2282098,0.992357695 0.279495766,0.087424865,0.532747766 0.095737503,0.107245868,0.190786801 0.276947216,0.537071712,0.654100689 0.010738646,0.40673838,0.479608479 0.420307684,0.947352567,0.178277524 0.108124774,0.127227634,0.278086371 0.18958629,0.587262704,0.69187928 0.814773727,0.220263054,0.007250506 0.948149379,0.572617808,0.939774741 0.150492895,0.970045889,0.979230909 0.997567108,0.897085006,0.573132383 0.039773611,0.517659257,0.317936584 0.915778891,0.598912752,0.541405962 0.081857212,0.994515385,0.261260636 mlpack-2.2.5/src/mlpack/tests/data/test_labels_nonlinsep.txt000066400000000000000000000022601315013601400242210ustar00rootroot000000000000000 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 mlpack-2.2.5/src/mlpack/tests/data/test_nonlinsep.txt000066400000000000000000000373021315013601400227040ustar00rootroot000000000000001.9145763337,0.7607994154 -0.4419075344,0.1198130856 -0.168533061,2.2555822352 -0.6292755504,-3.6694019591 -0.8442795788,-1.4355136028 -1.207941707,-1.0728406149 1.9708419847,2.4471467711 0.9070819524,-0.286625082 -0.7858220816,-1.4759815986 0.0413525693,0.2159665965 0.7459962705,0.8566973545 0.0890448536,0.1121096514 1.618344128,1.3554084948 -0.1316666647,1.2597125938 -1.7469597406,1.6179505105 -0.2911663781,0.2534294347 -0.412071394,2.2413727216 3.9343704712,-0.5789921274 -2.012313374,2.6813471056 -0.050517892,-0.7373849287 -1.7894051685,1.5768413975 2.5608928791,-1.4033808438 0.6314632761,0.9709502645 1.9261331162,-0.400646637 2.6122244323,-1.3174757412 2.0024609662,0.2827583046 1.6071201126,-1.0307093622 0.1936486397,-3.7532120214 -1.4486522748,-0.5805915988 2.4529965098,-1.5648403386 -1.4956642495,-0.3540174803 -0.3811058981,-0.5269862412 -1.5073316224,-0.2139659923 0.1315582456,3.2408737947 1.0439979054,1.9024986219 -1.1488756409,2.029347388 -0.0825036477,0.2833298805 -0.5412526715,1.2010611837 0.0176789687,0.0019843876 -2.6102096209,-2.5109935006 -3.4497397765,0.4522262579 -0.9986411141,2.0949490184 0.0654247668,0.0322843932 0.670863648,-0.5796405837 -0.64821427,1.1624420014 1.4706835875,-1.1898569296 0.0438470369,2.2464661045 -0.8461422314,1.061071007 -2.7270426001,-2.3714200142 -2.2670598765,0.9015861135 -3.0238096685,1.6029761097 -0.0993517267,2.2548203874 -0.5071572168,-0.024749784 0.0202770144,0.0026192657 -1.6892301296,-2.0281274831 0.0609806587,3.2464381307 1.5892195858,1.3950501558 1.9288633814,2.4839440664 -0.2890858283,-0.6156337288 -3.0113905719,-1.9575800186 0.0496901657,0.0170213446 1.898074151,0.8115927479 0.8506381075,2.0141401248 -1.0026769328,3.1430900423 0.9296093656,0.562653379 -0.9405244659,0.9966823559 1.4676822972,-1.1929812383 0.9820187857,-0.087287532 2.2181288668,-1.8514756147 -0.7984928974,3.1906299427 -0.2184068516,-3.7343207385 1.0083243248,0.0668682024 0.9295468663,-0.2394054375 0.0746445574,0.1673200384 0.443583038,-0.7117910366 -3.0950734951,-1.8073227815 -0.3241440251,1.249036782 -1.8828732018,-1.8255126022 0.5776028167,-2.7226771179 1.5930327791,-2.3566954968 1.0123211645,0.1458804215 0.8317911427,-0.4096039922 1.742889216,1.1596685743 -1.837295361,1.5279047274 1.0123650156,0.1636676069 0.0508706321,0.0179322914 -0.274292464,2.2526463915 0.010340366,1.2486392039 -1.2171386508,0.7213685034 -0.0542493452,0.2756405567 -2.4513055212,0.3561218167 -1.1895132963,-2.3978807497 -2.5043257489,-0.218976369 0.1626268946,3.2379316268 -1.8390043482,1.5261055763 -0.0286525176,0.2655642978 1.0229466109,-1.5337838002 0.0367305215,0.0088892469 -2.6744765226,2.0902731918 -0.5105266675,-0.0384860654 -3.4642653829,-0.6831926763 -1.6690495995,-2.0469079251 -2.4529527375,0.3482708395 -0.0282439924,-0.7434199671 -2.0139817947,-1.6596352199 1.0121326264,0.1346616844 2.3625405718,-3.0462013537 -1.0918046959,2.0557618631 0.1520296375,-0.7662366271 -0.1229303001,-1.7343871283 -3.4382659137,-0.8335011436 0.2441118636,-0.7612059828 -3.0166151934,1.6150884244 2.6473744537,-1.2545074252 2.0646276337,-2.004132913 -3.0116754248,-1.9570961359 0.5976999936,-1.7018474976 0.4602977388,-0.7051021668 -1.7166544533,2.8604489536 0.0982016451,3.2436969506 0.3354498841,-0.7452989223 0.257979051,1.1882941556 -0.4119144378,0.1587611591 0.9904172086,-0.0513887563 0.0678666443,0.1803828168 0.1237402085,2.2377898243 0.0032795109,6.76007915180158E-005 -0.7869088493,2.166984038 -3.0959879111,1.4746630484 -0.946418805,-3.5861513639 1.3461597967,1.665923765 0.4780335037,2.1631959616 -0.5156487707,2.2272978903 -0.8902556405,3.1709610251 -0.0843758155,0.283724545 -2.0154804457,1.3174301304 2.8942574776,-0.6444233229 -0.0310403167,0.2666437579 1.0869753222,3.0041450313 1.2358753874,-1.3970811176 -2.8085130307,1.9241132412 -1.4343545118,-0.6275849278 -0.4495033263,2.2368446736 -1.0143833192,-2.4897084647 2.711498641,-2.7660650127 0.6676493876,2.0972402455 -1.2485658342,1.9785804309 0.0085533861,0.000460792 -0.7478869791,-2.5999955353 1.1712401838,1.8133672301 0.0960360647,-0.7638913204 -3.1669127637,1.3343300838 0.0703180279,0.0388520447 1.009820555,0.2294367072 -0.9997650556,0.9498967354 -0.0424531988,0.2713954621 -1.4225517877,-0.6628392332 3.3927633236,-1.9570204152 0.0379382371,2.2469928307 0.521608051,-0.6768280775 -1.4147481761,0.3614452078 -1.0099200949,-3.5658164019 0.2667268848,2.2149238285 -0.2141148072,-3.7347529569 1.8168397798,-2.208223242 -1.7774476294,-3.2103075451 2.7281604225,-1.093922189 0.157569741,2.2332372435 -2.3881172684,-0.9177556247 0.0017814105,1.99412891607258E-005 -1.4873379536,0.091924738 -1.5534279589,2.9424225725 -1.4487159218,-0.5803698233 -1.3837858059,-0.7629979575 -2.2386670463,-1.288781277 1.9124996442,0.7674459927 0.4474868459,1.1011094338 2.6260629271,1.625032889 1.4630367357,-3.5208030024 3.8284028272,-1.0084941366 -0.2139813049,0.281358428 2.0062664305,0.16960438 -0.6991653855,-2.6166468533 2.2247023981,-1.8444038416 1.6313533358,2.7107523388 -0.9687127209,0.9750828696 0.5962402101,-0.63376073 2.6805384313,-2.7937165313 -0.3905861566,2.2436878374 -2.3923541871,-0.9041543229 -0.5133968997,-0.255672844 1.8018902256,-2.2191146506 2.7565459401,-1.0309827359 -0.3275601291,0.2325802314 -0.3907564934,2.2436702953 0.0698892509,0.1767578878 -2.0868717506,1.2172977462 1.6830974808,2.6752040563 -1.255154843,-1.0041346108 -1.5081652066,-0.1824239334 -1.3412807841,-0.8543484273 0.4466662586,-0.7105888893 -0.539462113,-2.6641332026 0.5983559065,-0.6323882552 1.1378167884,-1.4651717339 -0.3195158415,0.2376642421 0.6656340634,2.0980449452 -0.0172614742,1.2520750376 0.5212843751,-2.7307104174 -1.4304348548,2.9970083153 -1.796653247,2.8160606135 -1.0286562618,-1.2802040514 1.8200515133,1.0078949121 1.9207833592,0.7404427217 -0.9900908433,-1.3166588851 0.8260353377,0.7528302725 1.8585410632,-3.3495517883 0.6959294687,0.9106801543 0.0892922905,0.102253871 -0.7736525571,-3.6352128714 3.6955328614,-1.3756879607 -2.8407175745,-2.2205017247 0.0390956315,0.0101400275 0.0025263911,4.01118093966982E-005 2.1643706042,2.2588446338 -2.7814212653,1.9594621426 1.6202035319,1.3528105415 -0.2457786896,0.2723416728 -0.4716114743,-0.385368216 0.3041137396,-2.7507646249 -3.4234729972,0.584852091 1.9271801468,-3.3142416919 -1.1774102157,-1.1135207438 0.8469135558,-0.3881154982 0.7953785961,2.0413916323 -0.2600036582,0.2672036046 -3.1476019246,1.3741550133 -1.3149132076,0.5735105805 -0.4874036639,0.0348286986 1.5172919848,-1.1394204641 -1.2413543661,0.6883287711 1.1499382043,1.8292574805 2.914191045,0.8904487155 -0.6132733882,2.2095600021 -0.5191733799,-0.0869072303 -1.3759139586,1.9045588647 -0.5216148782,-0.1957868742 0.0866093511,0.1309454876 -3.3097166537,0.9873747784 -0.3171222469,-0.5921850017 1.0024042708,0.2996303839 -3.3926097377,-1.0356128362 -2.4939515008,0.076447716 0.0848803299,0.0705592392 -1.7490409168,-1.9700684017 0.0313691936,-2.7516355178 2.0461926802,2.3776572884 -2.5907186176,2.1840908441 0.0006233344,3.2499694148 -0.1989256732,-1.7202129889 0.7696179344,0.8285856585 0.0836456176,0.1432566133 -2.1080603663,-1.5209682791 1.3286583458,1.6821698693 2.4101799598,-3.0117553508 -2.4253927743,-0.787220713 -1.998309188,-3.0639649749 -2.3624122898,0.6712922419 -1.4613333552,0.2137538265 2.0055389252,0.1035570516 -0.855431854,1.0552403273 -1.4793036586,1.8361692128 -0.0974359542,-0.7221098006 1.9519646637,-3.3010579296 1.1177798936,-1.4779660834 -1.9435021827,1.4084472486 -1.3276158705,-0.8808019664 -1.9917565582,1.3484392583 0.8451175168,-3.6902964245 2.6327900582,1.6127298061 2.9068723502,-2.5768121781 -3.0625216016,1.5357672259 0.6275828682,-0.6124997713 0.4271481824,-0.7179604719 -0.0225510118,0.2626645128 -1.666567087,-3.275492959 1.4827978526,2.8047636573 0.6928263032,0.9138030006 0.7841166864,0.8103620911 -1.5491979284,-2.1508020108 2.9980186388,0.3518307991 2.2228361811,-1.8464163365 1.4692325422,-2.4265094422 2.8007650808,1.2493377111 -0.5083990047,-0.2788258478 -1.8747502677,1.4875938649 -1.3899147708,-3.4166108976 0.2397244721,2.2199741116 0.9094425596,0.6076073459 -0.7043076306,1.1378755808 5.201365628,4.8615735237 -1.428636835,5.0970642636 -7.2533130432,1.7534576313 -3.1834823757,-5.8621922006 9.8279749866,-1.6936469043 -7.2019040628,6.0239916515 -6.7000258879,6.5598645746 -5.4535554959,0.571226035 3.2947105492,6.3727543326 -5.3414074501,-3.876635713 0.6171768117,-5.7339455859 -2.6896186603,8.899147047 9.5169203895,-2.9129041251 6.788679572,1.8751406554 -5.7529171176,-3.1952177655 -2.907250721,-8.1953568277 2.1475407813,-7.4927389347 7.089769612,-3.5440351357 -9.4745854856,0.5531470956 -5.5327643678,6.2739234272 -6.4238867582,-1.164235962 -6.4213123972,5.3979448308 5.4066021469,6.0774393554 5.7226456437,-3.8648089545 -1.1674547471,6.1702877283 3.7757219491,7.2492257343 -4.9000292479,6.7608519204 -5.8737016051,2.6239015863 -9.4753583446,0.5427975245 -7.8882554377,5.1274641451 5.3806062472,4.6530272697 7.8157445745,1.8748507273 -0.3450729222,5.2491175043 -8.344766491,-1.7849792521 7.9958437173,-0.1438349581 5.9332471339,-0.7468743727 -7.4053790659,1.0383958783 4.5986242788,7.9293601292 3.7191775917,3.5197193444 -6.8025180486,-3.3266855706 -2.8248319017,5.6576960112 -5.3126965297,-1.5929189046 0.3256244376,-8.7498632596 8.0014608527,0.1149899291 9.9649171985,-0.692582422 -5.4221365704,0.7759492386 -2.3680395784,4.7676672442 7.7500640008,-1.8296203255 0.7604934546,6.1839382838 -8.0562596646,-2.8771023664 -4.3931361373,-3.4836010328 2.9576619234,-5.0294015923 -8.7489537564,3.5420738101 -4.2004961177,-6.4026983488 0.723517966,6.189333987 1.1581555891,-6.6776417418 4.8731927243,3.6737275359 1.0552336311,5.1097658566 -5.817586113,4.5638600894 -5.445458826,-0.9484944328 -8.3993466382,1.1541195462 0.0074480279,5.2497689182 1.7849522971,-6.5535924213 -6.2964531491,-5.8866820733 0.9413166846,5.1355927266 5.8581720641,1.4669561026 0.6606213333,5.1879502253 -5.2465623224,6.5061413934 8.5458649,-2.6654058793 -5.0347906614,-4.2863392586 4.2527492623,8.1279247218 -2.9671891725,6.6877780197 2.6639438456,-5.1808191449 5.7619369465,4.1472929819 -7.6247850817,-3.9248665721 7.9946076392,0.4931941655 7.8013774154,-4.3237902547 -0.2982002039,6.2504779903 -6.000395615,-2.6674266718 1.712016391,-5.5391717032 2.5529195702,6.7239152261 1.4946424781,7.0603267278 -4.0040805167,-5.305789993 8.0014848157,0.1986761455 2.6983672963,5.5576310284 -1.4058936972,8.156893713 -5.7519083481,2.8660964014 6.3356579409,3.1450739787 7.9886926909,0.6132053604 8.948213515,-0.8176942007 6.8403256421,-7.1178017618 -0.3680516921,6.2485336183 2.1905370563,7.9092586475 8.6559931171,2.6308312493 5.0024763714,0.1823855534 8.4662367812,-2.8951087481 6.9223757913,-0.8919631216 4.7574473278,-3.4883366744 -5.266220987,6.4908500834 0.8657157659,-8.7228896687 6.0142759684,3.7522308011 -1.9941037359,4.9227012074 -0.8780163602,5.2030439842 2.1539370385,-8.5213952398 2.234337881,4.6722411301 -0.0274990774,9.2504322503 -6.3760508473,-1.4334419518 -6.2915689367,3.9172133029 2.2310302449,6.845468916 -8.4441914705,-1.1448557125 -1.5596590027,8.1319427385 7.3855545641,3.2421094648 0.8572093865,-6.7157186049 5.4816513013,-6.9558740437 -6.1641251305,-2.2304600736 8.5296066125,3.0379228273 -1.4270213277,-6.5625623372 -0.0035290637,-9.749941754 -6.5243622663,3.5359958889 -2.0845517467,8.0241036555 5.5183934,2.5241468958 -4.8538795807,4.1519247982 7.485554956,2.9896020797 2.3100236887,8.9152353868 4.5106168889,5.5363312488 -8.2200517141,2.0084394232 7.2777482515,3.4897013429 3.9498563568,-5.5938969112 -6.3198580032,3.873545224 5.2605029,3.0560193647 6.8311905735,-1.3758592115 -5.2089634866,5.2206038314 -4.4962260293,3.0020144476 0.3455917943,9.237586263 5.9996378478,3.7767972551 -7.2648909475,-2.0307537407 8.9983612216,0.3932854856 2.4650111682,4.5455564443 -6.1589619594,4.1134196029 0.7136456787,-5.725431341 -4.3527859728,4.6504664031 3.072955085,5.3468303452 -7.3812517398,4.0515774766 7.4716573782,5.1886965664 -4.5580147832,-8.5305411137 0.4151301872,-5.7465024007 -3.4656442853,-5.6907015449 -5.8779819245,-6.3197608511 5.3157486589,-2.6216542585 -7.321725609,4.1536590288 -7.4248919565,-4.3069904013 6.0425487005,5.4187340282 4.9043439336,-3.2905336648 0.341781978,-5.7493001582 6.5451247733,2.64995683 -5.0605614221,-4.2543467899 0.5729405842,6.2089769172 5.8091782472,-1.3487288554 4.114024503,3.0152198099 -3.3960123745,5.3537165441 -7.4346444846,-1.1602863305 4.3416857346,2.6514010799 -9.4987460381,0.062549268 -3.6707618121,8.5606581762 -0.3796403459,-5.7268953763 4.5906774153,2.1512808788 -4.7371761518,-4.6287557733 5.5361537378,-6.914154386 -2.1397025286,-5.270876595 -0.6935061373,6.2291605089 5.6550912879,4.2987060453 -8.0759606715,-5.1729887935 -4.7061141572,2.6836506647 7.8729738733,-4.1975151216 -5.978542458,-2.7193509539 1.3207476292,5.0389894355 -6.4984347752,0.0545593018 3.1432207513,5.3033496386 7.144430994,-3.4380342901 -6.7505439914,-3.4365571736 4.4375838598,6.8460548465 2.1969039847,8.9458673673 -5.5022922797,-0.1688788191 6.4916365548,-6.0598235225 9.9416007276,-0.9313736699 6.4265464047,-6.1263918242 -5.1367796154,5.2886603616 6.5942011305,2.5163769766 -6.5701083873,-3.7866611735 6.7695507849,1.9493297133 -7.7487000827,-5.6681146568 -4.5143934576,-8.5546327832 5.0134651618,6.4185241514 -8.8254564502,3.3564414322 -5.0492061176,2.0228897517 -0.9881027063,8.2098152131 -7.314388419,1.5053877877 -8.6190024109,3.8342532701 -3.4219703407,5.3380656776 -1.0240301034,-7.6607111187 -2.0846853243,-7.4190793642 6.9563287056,0.956210299 5.0022044233,0.1019454025 -8.1413888925,-5.0654392304 4.9324577211,0.9941910206 -0.3079638038,-8.7389689704 -8.1821656838,2.146982218 -1.187257327,-6.6160571747 0.1056889316,-6.751664006 -3.0956610696,-5.9117535335 3.9188301724,-5.6146188717 -8.0821573301,-2.7988455926 -3.8077975364,-4.1494801471 -4.9913455152,-7.0672392037 -4.7268989299,-4.6397669605 1.8907248939,9.0211107413 -4.7792804312,-2.8929062803 5.6149473697,4.353494205 -4.8227672231,4.1862630036 -4.4783983969,7.0358458078 -2.0866060994,-7.4184963226 7.2578003732,5.4946688188 -3.790925799,5.0964763424 2.3167553761,-5.3337966142 -8.0199850154,2.658282666 7.8788678076,-4.1869029419 -0.320102887,-9.7395121312 -5.7207883954,-3.2557422329 -4.8122870865,4.1977177373 -5.0729212396,-5.7051191138 5.7310036034,5.760013194 -8.4463249856,-1.1263838594 5.5869654136,4.391043802 3.9494650005,8.2861108731 4.2013707044,4.4635301771 -7.1544343067,-4.7603888316 4.9130445373,-4.8114938393 -6.4062159926,3.735915219 2.5833054425,7.7813808057 1.4379052006,5.0029206525 -0.2715692908,-5.7360455005 5.627698241,2.2491851414 9.7237099489,-2.1792018393 -6.680630065,3.2471926067 -6.6078545014,-3.7170115441 -7.4557578784,-0.9882974381 -0.2820459765,9.2505523327 -7.9523474732,-5.3679221665 6.704940603,2.1785959208 3.1752311812,5.2831060203 5.5181934582,4.4811909085 -4.9202233733,-4.4238188078 -7.2972877711,-1.8998059759 6.6700707526,-1.9679849187 1.9046090854,6.9512307803 -1.6590440345,6.0698169507 8.7979936286,-1.7424550042 -0.5200513664,-5.7119237212 3.2504673554,-4.856472523 6.8680326649,-1.2020122748 6.3272985785,-6.2252616535 5.1200652976,-2.9647082149 0.6490543605,6.1995142732 1.2800492991,-5.6421241652 4.7661227144,-3.4771228647 6.5227224512,2.7083040325 -4.2272963679,-6.3842243259 -7.2560594875,1.7430412117 5.9654573555,-0.5028728851 -7.983007904,2.7610594549 5.0752782204,-4.6479396536 -7.4549955423,-0.9951317875 -7.0320399002,2.4500775253 -5.1787601672,-2.0212390534 1.6806450259,8.0435122873 -9.4838218043,0.4173539082 2.1438479858,5.8108885381 -1.306869222,-6.5905207715 -8.5374125479,4.0051245756 -1.5477298249,9.1466547475 6.9452249362,1.0478971966 4.3211376653,-3.9897325904 -7.8462614857,5.1892078291 -3.1421051889,-5.8857612716 -8.2052904321,2.0635256436 -4.4678269808,-4.9017125847 0.716191987,-5.725184472 5.6998536537,2.0425052332 4.5258835247,-8.7218248081 3.7975198038,7.2372842553 -1.0843722449,-6.6362106697 7.7213638152,2.2600193576 -4.0410908308,-7.6738537544 0.1272803217,-9.7512466454 -8.4912022304,0.2589104558 7.5584457726,2.7879963676 -2.2414298966,-9.4507756259 4.859373718,3.6930091909 4.8510454736,-8.552073234 -6.8358325057,-3.2537621875 mlpack-2.2.5/src/mlpack/tests/data/thyroid_test.csv000066400000000000000000007432461315013601400223500ustar00rootroot000000000000000.29,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0061,0.028,0.111,0.131,0.085,0,1,0 0.32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0013,0.019,0.084,0.078,0.107,0,0,1 0.35,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.031,0.239,0.1,0.239,0,0,1 0.21,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.001,0.018,0.087,0.088,0.099,0,0,1 0.22,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.0004,0.022,0.134,0.135,0.099,0,0,1 0.22,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0016,0.02,0.123,0.113,0.109,0,0,1 0.39,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.0016,0.036,0.133,0.144,0.093,0,0,1 0.77,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00081,0.02,0.08,0.096,0.08316,0,0,1 0.23,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00025,0.014,0.113,0.096,0.11746,0,0,1 0.23,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0026,0.011,0.104,0.104,0.099,0,0,1 0.45,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.061,0.006,0.023,0.087,0.026,1,0,0 0.65,0,1,0,0,0,0,0,1,1,0,0,0,0,0,0,3e-05,0.023,0.154,0.09,0.17,0,0,1 0.53,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,4e-05,0.022,0.115,0.093,0.124,0,0,1 0.79,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.00419,0.011,0.078,0.097,0.081,0,0,1 0.79,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0032,0.015,0.159,0.116,0.136,0,0,1 0.57,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0011,0.0201,0.12,0.1,0.12,0,0,1 0.54,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,2e-05,0.0201,0.225,0.083,0.273,0,0,1 0.61,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0.0007,0.022,0.118,0.096,0.12266,0,0,1 0.61,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,4e-05,0.0201,0.115,0.083,0.138,0,0,1 0.14,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0024,0.0201,0.089,0.074,0.121,0,0,1 0.57,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0011,0.0201,0.103,0.092,0.112,0,0,1 0.55,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.001,0.028,0.122,0.112,0.109,0,0,1 0.59,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0077,0.018,0.111,0.107,0.104,0,1,0 0.6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,6e-05,0.016,0.123,0.081,0.152,0,0,1 0.73,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00089,0.019,0.151,0.097,0.155,0,0,1 0.6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2e-05,0.0201,0.175,0.103,0.169,0,0,1 0.56,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.027,0.017,0.074,0.101,0.074,0,1,0 0.38,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.019,0.012,0.102,0.095,0.10669,0,1,0 0.61,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0.0059,0.02,0.082,0.096,0.08523,0,0,1 0.42,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.0201,0.125,0.112,0.112,0,0,1 0.78,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00859,0.0201,0.124,0.098,0.126,0,0,1 0.64,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,3e-05,0.0201,0.065,0.082,0.08,0,0,1 0.52,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0201,0.16,0.082,0.195,0,0,1 0.81,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0019,0.019,0.118,0.102,0.116,0,0,1 0.21,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0025,0.014,0.109,0.073,0.148,0,0,1 0.75,1,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0.0052,0.0201,0.096,0.089,0.108,0,0,1 0.64,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0.00038,0.0201,0.1,0.084,0.119,0,0,1 0.23,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.002,0.0201,0.136,0.067,0.203,0,0,1 0.21,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0015,0.0201,0.14,0.137,0.102,0,0,1 0.53,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0.00012,0.0201,0.047,0.093,0.05,0,0,1 0.39,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0011,0.016,0.133,0.102,0.131,0,0,1 0.39,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0045,0.017,0.171,0.146,0.117,0,0,1 0.45,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0014,0.022,0.141,0.098,0.144,0,0,1 0.35,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0026,0.025,0.161,0.126,0.128,0,0,1 0.36,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.001,0.018,0.101,0.092,0.11,0,0,1 0.36,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0012,0.026,0.148,0.129,0.115,0,0,1 0.51,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00208,0.019,0.063,0.058,0.107,0,0,1 0.29,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.0019,0.0208,0.096,0.102,0.096,0,0,1 0.76,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.026,0.105,0.096,0.10914,0,0,1 0.81,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.003,0.007,0.069,0.096,0.07172,0,0,1 0.7,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.0014,0.014,0.071,0.08,0.089,0,0,1 0.87,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0019,0.018,0.119,0.089,0.134,0,0,1 0.63,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.015,0.116,0.088,0.132,0,0,1 0.63,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0014,0.02,0.136,0.114,0.119,0,0,1 0.63,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.033,0.019,0.065,0.094,0.07,0,1,0 0.61,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,6e-05,0.015,0.147,0.06,0.247,0,0,1 0.77,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.012,0.017,0.123,0.103,0.12,0,1,0 0.63,0,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0.0032,0.0201,0.16,0.098,0.162,0,0,1 0.55,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,6e-05,0.0201,0.206,0.081,0.254,0,0,1 0.29,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0034,0.0208,0.164,0.136,0.121,0,0,1 0.4,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.0013,0.0201,0.152,0.15,0.102,0,0,1 0.16,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0044,0.0201,0.101,0.102,0.099,0,0,1 0.28,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.00232,0.0201,0.104,0.103,0.101,0,0,1 0.39,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00011,0.0201,0.135,0.101,0.135,0,0,1 0.26,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0016,0.0201,0.121,0.108,0.112,0,0,1 0.25,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0036,0.0208,0.107,0.095,0.113,0,0,1 0.76,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,6e-05,0.022,0.137,0.102,0.135,0,0,1 0.32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0025,0.024,0.15,0.096,0.155,0,0,1 0.32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.015,0.0201,0.163,0.156,0.104,0,0,1 0.61,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.029,0.015,0.061,0.096,0.064,1,0,0 0.76,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0041,0.0201,0.056,0.076,0.073,0,0,1 0.27,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0.00232,0.0201,0.11,0.11,0.1,0,0,1 0.75,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0.032,0.015,0.096,0.098,0.098,0,0,1 0.78,0,1,0,0,0,0,0,1,1,0,0,0,0,0,0,0.0012,0.012,0.082,0.101,0.081,0,0,1 0.69,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0.00208,0.025,0.131,0.103,0.126,0,0,1 0.78,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0018,0.023,0.116,0.108,0.107,0,0,1 0.63,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.003,0.017,0.093,0.064,0.146,0,0,1 0.62,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.0201,0.069,0.087,0.079,0,0,1 0.67,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00098,0.016,0.147,0.089,0.165,0,0,1 0.67,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0019,0.025,0.102,0.093,0.11,0,0,1 0.57,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0.00057,0.027,0.189,0.135,0.14,0,0,1 0.16,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0.029,0.019,0.058,0.103,0.056,1,0,0 0.61,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00085,0.023,0.066,0.107,0.062,0,0,1 0.71,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00049,0.02,0.123,0.096,0.12785,0,0,1 0.7,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.0006,0.028,0.122,0.109,0.112,0,0,1 0.42,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00073,0.0201,0.127,0.104,0.121,0,0,1 0.54,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00072,0.0201,0.12,0.097,0.123,0,0,1 0.66,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0028,0.023,0.119,0.09,0.132,0,0,1 0.66,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0019,0.006,0.086,0.087,0.1,0,0,1 0.56,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00051,0.012,0.076,0.074,0.102,0,0,1 0.85,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.114,0.003,0.024,0.061,0.039,1,0,0 0.61,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00018,0.015,0.09,0.079,0.115,0,0,1 0.38,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0018,0.016,0.131,0.096,0.13617,0,0,1 0.43,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0.00232,0.0201,0.096,0.095,0.1,0,0,1 0.46,1,0,0,0,0,0,0,0,0,0,0,0,1,0,0,1e-05,0.0469,0.141,0.09,0.156,0,0,1 0.65,0,1,0,0,0,0,0,1,0,1,0,0,0,0,0,0.00208,0.017,0.164,0.109,0.151,0,0,1 0.78,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0072,0.014,0.131,0.109,0.121,0,0,1 0.42,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0.00909,0.0201,0.12,0.107,0.112,0,0,1 0.42,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.055,0.0201,0.077,0.097,0.08,0,0,1 0.42,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00092,0.016,0.089,0.082,0.108,0,0,1 0.26,0,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0.00041,0.0201,0.177,0.149,0.119,0,0,1 0.48,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00096,0.015,0.077,0.115,0.067,0,0,1 0.26,0,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0.0012,0.0201,0.131,0.153,0.086,0,0,1 0.61,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0016,0.011,0.113,0.113,0.1,0,0,1 0.69,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00034,0.0201,0.115,0.102,0.113,0,0,1 0.63,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0026,0.0201,0.048,0.055,0.087,0,0,1 0.4,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0015,0.0201,0.093,0.091,0.102,0,0,1 0.4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.033,0.032,0.11329,0.096,0.11776,0,0,1 0.4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.0201,0.071,0.101,0.07,0,0,1 0.24,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0018,0.0201,0.099,0.101,0.098,0,0,1 0.23,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.004,0.027,0.132,0.139,0.096,0,0,1 0.62,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0015,0.013,0.071,0.096,0.0738,0,0,1 0.53,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00033,0.004,0.083,0.054,0.154,0,0,1 0.64,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.00091,0.011,0.17,0.096,0.176,0,0,1 0.27,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00076,0.025,0.16,0.138,0.116,0,0,1 0.76,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.00232,0.0201,0.098,0.112,0.087,0,0,1 0.31,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0039,0.0201,0.106,0.115,0.093,0,0,1 0.38,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00087,0.018,0.107,0.088,0.121,0,0,1 0.28,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0046,0.015,0.092,0.102,0.09,0,0,1 0.28,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00096,0.014,0.105,0.101,0.105,0,0,1 0.68,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0039,0.016,0.139,0.093,0.15,0,0,1 0.79,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0.013,0.007,0.124,0.102,0.122,0,0,1 0.43,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0.00052,0.0005,0.046,0.052,0.086,0,0,1 0.43,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00093,0.0201,0.081,0.084,0.097,0,0,1 0.43,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0024,0.017,0.138,0.102,0.136,0,0,1 0.74,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0064,0.009,0.102,0.097,0.106,0,1,0 0.53,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00036,0.004,0.082,0.058,0.14,0,0,1 0.53,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0033,0.015,0.039,0.085,0.046,0,0,1 0.44,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0.00232,0.0201,0.11329,0.096,0.11776,0,0,1 0.44,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0014,0.0201,0.094,0.09,0.105,0,0,1 0.5219,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00038,0.02,0.113,0.108,0.104,0,0,1 0.73,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00208,0.0201,0.123,0.101,0.121,0,0,1 0.37,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0012,0.0201,0.159,0.18,0.088,0,0,1 0.33,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00066,0.0201,0.16,0.178,0.09,0,0,1 0.76,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0.00072,0.0201,0.131,0.091,0.143,0,0,1 0.69,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.012,0.007,0.084,0.07,0.121,0,1,0 0.75,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.049,0.003,0.005,0.116,0.004,1,0,0 0.41,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.00055,0.017,0.11,0.107,0.102,0,0,1 0.51,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.0018,0.0201,0.104,0.104,0.098,0,0,1 0.4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00063,0.0201,0.074,0.099,0.075,0,0,1 0.4,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00103,0.02,0.098,0.095,0.103,0,0,1 0.82,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0.00058,0.009,0.111,0.096,0.11538,0,0,1 0.2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0012,0.015,0.103,0.11,0.093,0,0,1 0.2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0059,0.0201,0.114,0.131,0.087,0,0,1 0.2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0011,0.0201,0.095,0.095,0.1,0,0,1 0.57,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0018,0.0201,0.083,0.076,0.108,0,0,1 0.62,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0046,0.0201,0.096,0.101,0.095,0,0,1 0.81,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.01,0.0005,0.091,0.097,0.093,0,1,0 0.57,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0031,0.01,0.128,0.107,0.119,0,0,1 0.39,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0017,0.005,0.077,0.11,0.07,0,0,1 0.15,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00068,0.0201,0.082,0.095,0.086,0,0,1 0.59,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0023,0.008,0.099,0.108,0.092,0,0,1 0.44,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.0201,0.131,0.094,0.138,0,0,1 0.66,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0012,0.0201,0.098,0.076,0.13,0,0,1 0.47,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.003,0.0201,0.123,0.102,0.12,0,0,1 0.52,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.003,0.0201,0.125,0.09,0.138,0,0,1 0.43,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00829,0.019,0.095,0.116,0.081,0,1,0 0.58,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0027,0.014,0.12,0.085,0.141,0,0,1 0.58,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00064,0.0201,0.098,0.063,0.156,0,0,1 0.91,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00469,0.014,0.101,0.088,0.115,0,0,1 0.33,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,2e-05,0.053,0.278,0.078,0.356,0,0,1 0.49,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.0201,0.087,0.084,0.103,0,0,1 0.63,0,0,0,0,0,0,0,0,0,1,0,0,1,0,0,9e-05,0.049,0.221,0.093,0.237,0,0,1 0.57,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0027,0.0201,0.123,0.089,0.139,0,0,1 0.73,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0016,0.0201,0.107,0.093,0.115,0,0,1 0.65,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0001,0.0201,0.145,0.104,0.137,0,0,1 0.65,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.007,0.017,0.116,0.09,0.128,0,1,0 0.85,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.197,0.0096,0.03121,0.102,0.03042,1,0,0 0.63,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0034,0.0201,0.096,0.091,0.105,0,0,1 0.25,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0016,0.017,0.123,0.104,0.116,0,0,1 0.39,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0013,0.012,0.058,0.084,0.068,0,0,1 0.85,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.197,0.0096,0.014,0.062,0.023,1,0,0 0.31,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0014,0.0201,0.112,0.114,0.098,0,0,1 0.49,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0024,0.0201,0.09,0.092,0.098,0,0,1 0.22,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0003,0.0201,0.159,0.104,0.153,0,0,1 0.76,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.00076,0.008,0.096,0.086,0.111,0,0,1 0.49,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.096,0.004,0.0029,0.12,0.0024,1,0,0 0.65,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,5e-05,0.0201,0.147,0.114,0.129,0,0,1 0.95,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00096,0.008,0.103,0.089,0.116,0,0,1 0.7,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.0014,0.01,0.077,0.072,0.107,0,0,1 0.75,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.0011,0.0201,0.091,0.079,0.114,0,0,1 0.6,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0012,0.016,0.1,0.086,0.117,0,0,1 0.71,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00011,0.0201,0.105,0.074,0.143,0,0,1 0.71,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.003,0.0201,0.093,0.077,0.121,0,0,1 0.31,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0014,0.0201,0.113,0.116,0.096,0,0,1 0.48,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0016,0.023,0.082,0.089,0.091,0,0,1 0.56,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0011,0.026,0.142,0.096,0.149,0,0,1 0.56,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0022,0.013,0.097,0.085,0.115,0,0,1 0.56,1,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0.00232,0.0201,0.098,0.086,0.115,0,0,1 0.39,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0057,0.0201,0.097,0.1,0.096,0,0,1 0.58,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.00232,0.0201,0.106,0.113,0.094,0,0,1 0.42,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.0201,0.11329,0.096,0.11776,0,0,1 0.72,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,6e-05,0.008,0.061,0.098,0.062,0,0,1 0.74,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00058,0.02,0.13,0.102,0.128,0,0,1 0.7,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0115,0.01,0.128,0.096,0.133,0,1,0 0.71,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00101,0.025,0.125,0.102,0.122,0,0,1 0.75,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0015,0.0201,0.102,0.116,0.087,0,0,1 0.55,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,2e-05,0.028,0.105,0.096,0.10914,0,0,1 0.57,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.0201,0.105,0.094,0.111,0,0,1 0.57,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00097,0.0201,0.079,0.074,0.107,0,0,1 0.33,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2e-05,0.037,0.046,0.092,0.051,0,0,1 0.49,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.0201,0.091,0.088,0.103,0,0,1 0.5,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0.0055,0.016,0.044,0.108,0.04,0,0,1 0.82,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00032,0.016,0.098,0.089,0.11,0,0,1 0.4,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0013,0.018,0.081,0.085,0.095,0,0,1 0.36,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0005,0.027,0.163,0.104,0.156,0,0,1 0.73,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0019,0.0201,0.119,0.09,0.132,0,0,1 0.74,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0044,0.024,0.115,0.114,0.101,0,0,1 0.56,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0.0012,0.0201,0.12,0.098,0.122,0,0,1 0.47,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00025,0.0201,0.139,0.104,0.134,0,0,1 0.63,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,7e-05,0.0201,0.1,0.095,0.106,0,0,1 0.77,1,1,0,0,0,0,0,1,1,0,0,0,0,0,0,0.0019,0.016,0.114,0.099,0.114,0,0,1 0.19,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.0201,0.098,0.104,0.094,0,0,1 0.19,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0017,0.0201,0.11329,0.096,0.11776,0,0,1 0.62,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0,2e-05,0.0201,0.127,0.095,0.134,0,0,1 0.56,0,1,0,0,0,0,1,0,0,0,0,0,1,0,0,0.00047,0.0201,0.125,0.104,0.119,0,0,1 0.27,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0013,0.038,0.146,0.142,0.103,0,0,1 0.18,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0.00074,0.023,0.143,0.115,0.124,0,0,1 0.35,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0037,0.028,0.19,0.14,0.136,0,0,1 0.63,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0022,0.0201,0.116,0.098,0.118,0,0,1 0.81,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0022,0.0201,0.138,0.084,0.165,0,0,1 0.71,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,8e-05,0.0201,0.108,0.08,0.134,0,0,1 0.58,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0016,0.02,0.119,0.111,0.107,0,0,1 0.57,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0017,0.0201,0.118,0.111,0.106,0,0,1 0.35,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0.0011,0.028,0.127,0.158,0.08,0,0,1 0.41,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0015,0.02,0.079,0.096,0.08212,0,0,1 0.5,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0.0003,0.0201,0.157,0.12,0.131,0,0,1 0.74,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0029,0.015,0.096,0.084,0.114,0,0,1 0.61,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.001,0.014,0.118,0.102,0.116,0,0,1 0.64,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00092,0.019,0.162,0.107,0.151,0,0,1 0.29,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0015,0.025,0.1,0.093,0.107,0,0,1 0.28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.0201,0.11329,0.096,0.11776,0,0,1 0.35,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0013,0.025,0.126,0.113,0.112,0,0,1 0.35,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.0025,0.018,0.077,0.096,0.08004,0,0,1 0.66,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0012,0.012,0.093,0.125,0.074,0,0,1 0.51,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.131,0.003,0.00839,0.101,0.00829,1,0,0 0.16,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.0015,0.019,0.106,0.104,0.104,0,0,1 0.16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0001,0.0201,0.143,0.096,0.14864,0,0,1 0.3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00419,0.025,0.114,0.116,0.097,0,0,1 0.43,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.0201,0.101,0.113,0.09,0,0,1 0.14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.0201,0.11329,0.096,0.11776,0,0,1 0.42,1,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0.00889,0.013,0.075,0.074,0.102,0,0,1 0.42,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.0014,0.014,0.116,0.095,0.123,0,0,1 0.6,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0.0001,0.069,0.132,0.096,0.13721,0,0,1 0.47,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0027,0.0201,0.082,0.101,0.082,0,0,1 0.36,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.03,0.071,0.085,0.084,0,0,1 0.74,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0045,0.013,0.108,0.096,0.11226,0,0,1 0.24,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0036,0.0201,0.133,0.083,0.161,0,0,1 0.24,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0038,0.0201,0.084,0.104,0.08,0,0,1 0.24,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0056,0.0201,0.138,0.121,0.114,0,0,1 0.5,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00097,0.0201,0.091,0.08,0.114,0,0,1 0.75,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0037,0.02,0.112,0.097,0.115,0,0,1 0.4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0012,0.0201,0.157,0.189,0.083,0,0,1 0.2,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,3e-05,0.02,0.092,0.103,0.089,0,0,1 0.2,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0062,0.007,0.101,0.084,0.12,0,0,1 0.66,0,1,0,0,0,0,0,1,1,0,0,0,0,0,0,0.00469,0.013,0.145,0.096,0.15072,0,0,1 0.28,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0.003,0.0208,0.136,0.096,0.14137,0,0,1 0.44,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,2e-05,0.0201,0.172,0.099,0.174,0,0,1 0.72,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0011,0.014,0.1,0.08,0.123,0,0,1 0.76,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.001,0.019,0.13,0.121,0.107,0,0,1 0.44,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0019,0.009,0.125,0.104,0.119,0,0,1 0.58,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0033,0.018,0.106,0.095,0.111,0,0,1 0.57,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0015,0.012,0.097,0.097,0.1,0,0,1 0.57,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0.0019,0.0208,0.113,0.101,0.112,0,0,1 0.82,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0016,0.0201,0.046,0.088,0.052,0,0,1 0.61,0,1,0,0,0,0,0,0,0,1,0,0,0,0,0,3e-05,0.0201,0.18,0.093,0.194,0,0,1 0.61,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00879,0.0201,0.166,0.128,0.13,0,0,1 0.56,1,0,0,0,1,0,0,0,0,1,0,0,0,0,0,0.003,0.015,0.103,0.08,0.128,0,0,1 0.15,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0019,0.0201,0.104,0.104,0.098,0,0,1 0.7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00062,0.025,0.111,0.125,0.088,0,0,1 0.32,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0015,0.028,0.133,0.089,0.149,0,0,1 0.42,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.00083,0.029,0.123,0.095,0.13,0,0,1 0.76,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0026,0.022,0.162,0.104,0.156,0,0,1 0.71,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0016,0.02,0.081,0.076,0.106,0,0,1 0.55,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00081,0.025,0.081,0.073,0.112,0,0,1 0.69,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0005,0.01,0.111,0.085,0.131,0,0,1 0.6,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0011,0.029,0.104,0.095,0.11,0,0,1 0.85,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.00055,0.0201,0.024,0.111,0.022,0,0,1 0.48,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.0201,0.065,0.077,0.085,0,0,1 0.41,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.0201,0.11329,0.096,0.11776,0,0,1 0.5,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0.067,0.007,0.08,0.107,0.074,0,0,1 0.64,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.00232,0.0201,0.1,0.091,0.11,0,0,1 0.29,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0.02,0.017,0.079,0.116,0.068,0,1,0 0.51,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00063,0.0201,0.093,0.101,0.093,0,0,1 0.26,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0016,0.023,0.115,0.085,0.136,0,0,1 0.69,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0016,0.0201,0.135,0.069,0.196,0,0,1 0.64,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0082,0.014,0.127,0.111,0.114,0,1,0 0.64,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.002,0.0201,0.116,0.101,0.115,0,0,1 0.32,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0018,0.025,0.138,0.089,0.154,0,0,1 0.32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0078,0.055,0.166,0.121,0.138,0,0,1 0.32,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0.0016,0.017,0.102,0.089,0.115,0,0,1 0.72,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0012,0.0201,0.123,0.093,0.133,0,0,1 0.32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0018,0.017,0.123,0.095,0.13,0,0,1 0.32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0014,0.0201,0.075,0.078,0.096,0,0,1 0.42,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,4e-05,0.031,0.119,0.08,0.149,0,0,1 0.41,0,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0.0031,0.012,0.144,0.103,0.139,0,0,1 0.41,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0017,0.018,0.135,0.094,0.145,0,0,1 0.43,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0.00055,0.029,0.128,0.087,0.148,0,0,1 0.68,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0001,0.0201,0.147,0.097,0.152,0,0,1 0.46,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0013,0.0201,0.073,0.077,0.095,0,0,1 0.59,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0038,0.0201,0.182,0.15,0.121,0,0,1 0.4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00085,0.0201,0.103,0.087,0.118,0,0,1 0.4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0011,0.0201,0.139,0.096,0.14449,0,0,1 0.4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00081,0.0201,0.107,0.1,0.108,0,0,1 0.72,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.085,0.006,0.022,0.111,0.02,1,0,0 0.33,0,1,0,0,0,0,0,0,0,0,0,1,1,0,0,0.0012,0.022,0.117,0.094,0.126,0,0,1 0.71,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.062,0.11329,0.096,0.11776,0,0,1 0.58,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0.0018,0.022,0.078,0.097,0.079,0,0,1 0.43,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0028,0.0201,0.117,0.077,0.151,0,0,1 0.2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.0201,0.101,0.108,0.094,0,0,1 0.37,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0018,0.0201,0.118,0.088,0.133,0,0,1 0.79,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0.0017,0.003,0.092,0.044,0.206,0,0,1 0.72,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0.0011,0.01,0.053,0.061,0.087,0,0,1 0.48,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.001,0.033,0.157,0.128,0.123,0,0,1 0.35,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00208,0.0201,0.115,0.096,0.121,0,0,1 0.83,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0052,0.018,0.107,0.102,0.104,0,0,1 0.6,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00052,0.026,0.119,0.104,0.114,0,0,1 0.85,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0014,0.006,0.124,0.096,0.129,0,0,1 0.38,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0015,0.032,0.136,0.091,0.149,0,0,1 0.81,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0023,0.024,0.155,0.113,0.137,0,0,1 0.6,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0018,0.007,0.076,0.075,0.102,0,0,1 0.39,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.0071,0.017,0.106,0.094,0.114,0,1,0 0.58,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,4e-05,0.027,0.108,0.072,0.149,0,0,1 0.67,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00208,0.018,0.098,0.096,0.10187,0,0,1 0.36,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0.00232,0.0201,0.11329,0.096,0.11776,0,0,1 0.42,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.025,0.06,0.091,0.065,0,0,1 0.75,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0026,0.0201,0.094,0.077,0.123,0,0,1 0.86,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0012,0.012,0.128,0.094,0.137,0,0,1 0.56,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0.0018,0.01,0.112,0.098,0.113,0,0,1 0.72,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0019,0.0201,0.086,0.109,0.079,0,0,1 0.47,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.00038,0.034,0.144,0.126,0.114,0,0,1 0.3,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.0201,0.08,0.086,0.093,0,0,1 0.41,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,6e-05,0.0201,0.159,0.094,0.169,0,0,1 0.45,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00011,0.0201,0.147,0.116,0.127,0,0,1 0.7,1,1,1,0,0,0,0,0,1,0,0,0,0,0,0,0.0095,0.005,0.099,0.058,0.169,0,0,1 0.66,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00032,0.017,0.152,0.113,0.135,0,0,1 0.66,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00208,0.028,0.148,0.149,0.099,0,0,1 0.29,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00068,0.02,0.142,0.104,0.135,0,0,1 0.33,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.0201,0.11329,0.096,0.11776,0,0,1 0.51,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0027,0.029,0.136,0.12,0.114,0,0,1 0.5,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0028,0.015,0.084,0.102,0.082,0,0,1 0.7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4e-05,0.0201,0.159,0.104,0.153,0,0,1 0.61,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.029,0.024,0.058,0.098,0.059,1,0,0 0.8,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0.00075,0.0208,0.129,0.104,0.122,0,0,1 0.6,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.00085,0.0201,0.196,0.173,0.113,0,0,1 0.37,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0014,0.013,0.102,0.093,0.11,0,0,1 0.45,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0011,0.016,0.123,0.102,0.12,0,0,1 0.45,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.002,0.026,0.108,0.096,0.113,0,0,1 0.53,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.0051,0.024,0.105,0.131,0.08,0,0,1 0.35,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.005,0.0201,0.093,0.099,0.093,0,0,1 0.72,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.0011,0.022,0.116,0.103,0.112,0,0,1 0.63,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00063,0.02,0.108,0.09,0.119,0,0,1 0.32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0038,0.026,0.144,0.143,0.101,0,0,1 0.61,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0022,0.0201,0.126,0.115,0.11,0,0,1 0.52,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0017,0.024,0.115,0.121,0.095,0,0,1 0.2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00055,0.016,0.095,0.085,0.112,0,0,1 0.57,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0023,0.0201,0.108,0.086,0.125,0,0,1 0.67,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.002,0.016,0.104,0.107,0.097,0,0,1 0.21,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0019,0.0201,0.12,0.116,0.102,0,0,1 0.21,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00076,0.011,0.104,0.113,0.092,0,0,1 0.21,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.011,0.014,0.082,0.099,0.083,0,1,0 0.69,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.0014,0.019,0.136,0.085,0.159,0,0,1 0.55,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.00066,0.0201,0.131,0.104,0.124,0,0,1 0.75,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0053,0.0201,0.098,0.075,0.13,0,0,1 0.72,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.00025,0.044,0.192,0.091,0.211,0,0,1 0.57,1,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0.022,0.024,0.112,0.096,0.11642,0,0,1 0.48,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.00232,0.0201,0.11329,0.096,0.11776,0,0,1 0.66,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.00081,0.0201,0.126,0.096,0.131,0,0,1 0.19,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0012,0.028,0.112,0.112,0.099,0,0,1 0.72,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5e-05,0.014,0.089,0.113,0.079,0,0,1 0.22,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.0016,0.0201,0.11,0.103,0.106,0,0,1 0.87,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0012,0.011,0.118,0.075,0.157,0,0,1 0.57,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00055,0.011,0.095,0.079,0.12,0,0,1 0.55,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.00232,0.055,0.214,0.091,0.236,0,0,1 0.71,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0012,0.0201,0.156,0.095,0.164,0,0,1 0.68,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.00232,0.0201,0.125,0.104,0.119,0,0,1 0.43,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.0201,0.06,0.122,0.05,0,0,1 0.34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00083,0.0201,0.125,0.102,0.123,0,0,1 0.34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.006,0.0201,0.091,0.093,0.098,0,0,1 0.34,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00098,0.019,0.133,0.1,0.133,0,0,1 0.28,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0014,0.026,0.091,0.067,0.135,0,0,1 0.83,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00036,0.019,0.096,0.094,0.102,0,0,1 0.51,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.0201,0.119,0.094,0.127,0,0,1 0.53,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.031,0.026,0.09663,0.095,0.10107,0,1,0 0.63,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00208,0.018,0.121,0.096,0.12577,0,0,1 0.16,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0018,0.0201,0.085,0.1,0.085,0,0,1 0.62,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,2e-05,0.0201,0.174,0.098,0.177,0,0,1 0.41,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0028,0.0201,0.102,0.094,0.109,0,0,1 0.62,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0029,0.029,0.122,0.111,0.11,0,0,1 0.6,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0048,0.028,0.1,0.104,0.094,0,0,1 0.4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0067,0.017,0.087,0.087,0.1,0,1,0 0.82,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00076,0.023,0.093,0.101,0.092,0,0,1 0.3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0012,0.0201,0.113,0.112,0.101,0,0,1 0.6,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,3e-05,0.0201,0.161,0.103,0.157,0,0,1 0.47,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00065,0.0201,0.131,0.099,0.132,0,0,1 0.27,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0018,0.033,0.119,0.131,0.09,0,0,1 0.3,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.00061,0.015,0.082,0.119,0.069,0,0,1 0.61,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0034,0.016,0.118,0.104,0.111,0,0,1 0.61,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0015,0.023,0.104,0.092,0.113,0,0,1 0.61,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0033,0.0201,0.107,0.103,0.104,0,0,1 0.43,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00062,0.025,0.134,0.109,0.123,0,0,1 0.35,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.00054,0.023,0.099,0.108,0.092,0,0,1 0.35,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.003,0.0201,0.099,0.115,0.086,0,0,1 0.71,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00063,0.013,0.164,0.1,0.164,0,0,1 0.45,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0024,0.018,0.117,0.1,0.117,0,0,1 0.64,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00208,0.0201,0.113,0.083,0.136,0,0,1 0.76,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.024,0.017,0.074,0.084,0.088,0,1,0 0.27,0,1,0,0,0,0,0,0,1,1,0,0,0,0,0,0.001,0.0201,0.138,0.091,0.152,0,0,1 0.73,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0.00051,0.024,0.192,0.119,0.162,0,0,1 0.73,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00101,0.018,0.095,0.096,0.09875,0,0,1 0.54,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00066,0.0201,0.117,0.109,0.107,0,0,1 0.39,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00062,0.0201,0.151,0.096,0.157,0,0,1 0.59,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.0018,0.025,0.102,0.11,0.093,0,0,1 0.59,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.042,0.002,0.014,0.116,0.012,1,0,0 0.52,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0029,0.0201,0.138,0.124,0.111,0,0,1 0.79,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0.01,0.0201,0.125,0.099,0.127,0,0,1 0.28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0056,0.0201,0.107,0.094,0.114,0,0,1 0.26,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.0201,0.11329,0.096,0.11776,0,0,1 0.74,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0.0029,0.0201,0.11,0.104,0.103,0,0,1 0.19,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.001,0.024,0.106,0.087,0.122,0,0,1 0.19,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0.0085,0.034,0.134,0.155,0.086,0,1,0 0.35,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0.0013,0.016,0.087,0.096,0.09043,0,0,1 0.46,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.0011,0.018,0.103,0.087,0.118,0,0,1 0.56,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0027,0.0201,0.125,0.104,0.119,0,0,1 0.4,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0.0024,0.023,0.15,0.124,0.121,0,0,1 0.54,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0072,0.017,0.074,0.088,0.084,0,1,0 0.65,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0027,0.0201,0.088,0.103,0.085,0,0,1 0.42,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0.001,0.0201,0.101,0.083,0.121,0,0,1 0.44,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0011,0.0201,0.112,0.101,0.111,0,0,1 0.76,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0028,0.0201,0.099,0.099,0.099,0,0,1 0.76,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0011,0.03,0.096,0.081,0.119,0,0,1 0.23,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0.0017,0.039,0.12,0.116,0.102,0,0,1 0.6,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00044,0.017,0.115,0.087,0.133,0,0,1 0.44,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,3e-05,0.0201,0.189,0.1,0.19,0,0,1 0.28,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00068,0.0201,0.118,0.096,0.12266,0,0,1 0.62,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0022,0.0208,0.096,0.086,0.112,0,0,1 0.72,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0035,0.0201,0.124,0.136,0.091,0,0,1 0.62,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,6e-05,0.0201,0.127,0.084,0.151,0,0,1 0.76,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0057,0.0201,0.098,0.086,0.115,0,0,1 0.67,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0013,0.0201,0.117,0.089,0.131,0,0,1 0.59,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0029,0.0201,0.145,0.098,0.147,0,0,1 0.63,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00051,0.0201,0.074,0.063,0.116,0,0,1 0.63,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0066,0.022,0.103,0.086,0.12,0,0,1 0.7,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0067,0.017,0.117,0.104,0.113,0,1,0 0.41,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.0046,0.024,0.129,0.102,0.127,0,0,1 0.73,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0061,0.014,0.082,0.085,0.097,0,1,0 0.73,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00083,0.031,0.124,0.11,0.113,0,0,1 0.53,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.0201,0.078,0.085,0.091,0,0,1 0.73,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0044,0.015,0.123,0.096,0.12785,0,0,1 0.32,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,2e-05,0.019,0.137,0.08,0.172,0,0,1 0.27,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0027,0.027,0.116,0.087,0.134,0,0,1 0.34,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0012,0.0201,0.07,0.066,0.106,0,0,1 0.49,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0016,0.0201,0.126,0.097,0.129,0,0,1 0.6,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0009,0.0201,0.089,0.096,0.092,0,0,1 0.6,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.0019,0.0201,0.126,0.116,0.109,0,0,1 0.11,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.003,0.027,0.099,0.1,0.099,0,0,1 0.64,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.00013,0.03,0.122,0.095,0.128,0,0,1 0.58,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.00232,0.0201,0.11329,0.096,0.11776,0,0,1 0.8,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0022,0.0201,0.111,0.088,0.127,0,0,1 0.25,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00208,0.0201,0.136,0.15,0.09,0,0,1 0.38,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0014,0.0201,0.17,0.184,0.092,0,0,1 0.19,0,0,0,0,0,0,0,0,1,0,0,1,0,0,0,0.0032,0.022,0.118,0.099,0.119,0,0,1 0.19,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0055,0.0201,0.141,0.104,0.133,0,0,1 0.74,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0044,0.0201,0.058,0.058,0.1,0,0,1 0.77,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,9e-05,0.018,0.12,0.096,0.124,0,0,1 0.73,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0011,0.016,0.089,0.074,0.119,0,0,1 0.7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0027,0.01,0.067,0.096,0.07,0,0,1 0.6,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.0013,0.017,0.148,0.104,0.141,0,0,1 0.77,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2e-05,0.0201,0.143,0.092,0.155,0,0,1 0.59,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.0029,0.0201,0.133,0.097,0.137,0,0,1 0.74,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.0013,0.019,0.1,0.092,0.108,0,0,1 0.72,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.004,0.0201,0.138,0.15,0.092,0,0,1 0.36,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.00232,0.0201,0.113,0.101,0.113,0,0,1 0.18,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.0201,0.136,0.098,0.138,0,0,1 0.59,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0076,0.0201,0.106,0.107,0.1,0,0,1 0.5,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,3e-05,0.0201,0.165,0.09,0.182,0,0,1 0.5,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0.00032,0.015,0.138,0.104,0.131,0,0,1 0.43,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0032,0.0208,0.14,0.114,0.123,0,0,1 0.43,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00068,0.025,0.175,0.123,0.143,0,0,1 0.42,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.024,0.009,0.062,0.09,0.069,0,0,1 0.42,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0028,0.0201,0.177,0.096,0.18399,0,0,1 0.71,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0013,0.0201,0.086,0.068,0.126,0,0,1 0.7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0017,0.0201,0.111,0.082,0.137,0,0,1 0.5,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00068,0.024,0.118,0.101,0.117,0,0,1 0.48,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0014,0.0201,0.094,0.079,0.119,0,0,1 0.69,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00208,0.0201,0.166,0.135,0.123,0,0,1 0.43,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.003,0.0201,0.093,0.069,0.135,0,0,1 0.7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0016,0.0201,0.114,0.109,0.104,0,0,1 0.56,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0014,0.0201,0.163,0.126,0.13,0,0,1 0.53,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0014,0.024,0.054,0.085,0.063,0,0,1 0.04,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,6e-05,0.0201,0.103,0.086,0.12,0,0,1 0.73,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0.00023,0.0201,0.095,0.116,0.081,0,0,1 0.49,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0.00208,0.0201,0.114,0.085,0.135,0,0,1 0.63,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.00232,0.0201,0.085,0.075,0.113,0,0,1 0.35,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0012,0.0201,0.104,0.096,0.108,0,0,1 0.4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0061,0.017,0.139,0.135,0.103,0,1,0 0.69,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0014,0.0201,0.085,0.082,0.104,0,0,1 0.69,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.001,0.0201,0.105,0.096,0.109,0,0,1 0.69,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0018,0.0201,0.077,0.085,0.091,0,0,1 0.29,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0027,0.0201,0.089,0.074,0.121,0,0,1 0.29,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00062,0.0201,0.119,0.098,0.121,0,0,1 0.66,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.02,0.0096,0.042,0.098,0.043,1,0,0 0.24,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0043,0.0201,0.141,0.173,0.081,0,0,1 0.24,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,6e-05,0.017,0.066,0.079,0.084,0,0,1 0.65,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0082,0.016,0.096,0.095,0.10041,0,1,0 0.3,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0.0014,0.013,0.117,0.116,0.101,0,0,1 0.66,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,6e-05,0.019,0.158,0.107,0.148,0,0,1 0.15,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.0201,0.118,0.128,0.093,0,0,1 0.38,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0024,0.0208,0.11329,0.096,0.123,0,0,1 0.38,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.00232,0.0201,0.129,0.116,0.11,0,0,1 0.38,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0012,0.018,0.099,0.096,0.10291,0,0,1 0.32,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00076,0.0208,0.082,0.086,0.095,0,0,1 0.58,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0011,0.0201,0.06,0.082,0.072,0,0,1 0.62,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0.0016,0.0201,0.117,0.107,0.109,0,0,1 0.42,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,5e-05,0.027,0.111,0.088,0.127,0,0,1 0.56,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0013,0.019,0.086,0.071,0.12,0,0,1 0.5,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0033,0.019,0.11329,0.116,0.105,0,0,1 0.4,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.0016,0.0201,0.155,0.093,0.166,0,0,1 0.55,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5e-05,0.068,0.169,0.084,0.201,0,0,1 0.66,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0025,0.015,0.077,0.096,0.08004,0,0,1 0.23,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0019,0.0201,0.167,0.111,0.15,0,0,1 0.23,0,0,0,0,0,0,0,0,0,0,1,0,0,0,1,0.0014,0.028,0.087,0.114,0.087,0,0,1 0.72,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0.00232,0.0201,0.102,0.096,0.102,0,0,1 0.66,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.0032,0.0201,0.094,0.078,0.099,0,0,1 0.6,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0022,0.023,0.087,0.094,0.093,0,0,1 0.45,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00037,0.022,0.097,0.1,0.097,0,0,1 0.43,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00419,0.0201,0.093,0.064,0.146,0,0,1 0.73,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0014,0.0201,0.077,0.099,0.078,0,0,1 0.55,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,3e-05,0.02,0.109,0.093,0.117,0,0,1 0.51,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.106,0.004,0.01,0.111,0.0087,1,0,0 0.23,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00069,0.0201,0.204,0.134,0.152,0,0,1 0.45,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0016,0.026,0.113,0.113,0.1,0,0,1 0.45,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00077,0.017,0.091,0.096,0.095,0,0,1 0.68,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0006,0.016,0.086,0.094,0.092,0,0,1 0.58,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0013,0.03,0.136,0.1,0.135,0,0,1 0.39,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0032,0.024,0.107,0.098,0.11,0,0,1 0.68,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.00208,0.0201,0.1,0.104,0.096,0,0,1 0.34,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0012,0.0201,0.105,0.107,0.099,0,0,1 0.47,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0022,0.0201,0.101,0.104,0.096,0,0,1 0.77,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0029,0.0201,0.105,0.093,0.113,0,0,1 0.54,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.0201,0.088,0.087,0.1,0,0,1 0.54,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.0201,0.11329,0.096,0.11776,0,0,1 0.41,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,5e-05,0.023,0.19,0.107,0.177,0,0,1 0.57,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00023,0.0201,0.085,0.076,0.111,0,0,1 0.9,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0034,0.0201,0.087,0.084,0.103,0,0,1 0.86,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.0201,0.139,0.124,0.112,0,0,1 0.65,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00065,0.0201,0.106,0.094,0.113,0,0,1 0.4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00082,0.0201,0.14,0.104,0.132,0,0,1 0.29,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0059,0.0201,0.115,0.087,0.131,0,0,1 0.32,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0039,0.02,0.104,0.091,0.113,0,0,1 0.74,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.00232,0.004,0.11329,0.096,0.11776,0,0,1 0.87,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0034,0.014,0.111,0.126,0.088,0,0,1 0.64,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0025,0.0201,0.143,0.091,0.156,0,0,1 0.64,0,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0.00092,0.009,0.039,0.091,0.043,0,0,1 0.41,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0018,0.015,0.123,0.096,0.12785,0,0,1 0.41,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.008,0.015,0.072,0.095,0.07531,0,1,0 0.34,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0023,0.017,0.117,0.074,0.158,0,0,1 0.25,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00088,0.017,0.084,0.077,0.109,0,0,1 0.29,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.001,0.0201,0.101,0.092,0.11,0,0,1 0.62,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0017,0.0201,0.129,0.108,0.12,0,0,1 0.82,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0026,0.0201,0.065,0.064,0.103,0,0,1 0.6,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0.00065,0.0201,0.114,0.095,0.12,0,0,1 0.34,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0012,0.0208,0.115,0.104,0.111,0,0,1 0.57,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0.0048,0.0208,0.085,0.097,0.087,0,0,1 0.3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5e-05,0.0201,0.187,0.072,0.262,0,0,1 0.34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0023,0.0201,0.102,0.09,0.114,0,0,1 0.77,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0.0062,0.016,0.109,0.095,0.11401,0,1,0 0.77,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0022,0.0201,0.072,0.075,0.096,0,0,1 0.77,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0033,0.0201,0.116,0.099,0.117,0,0,1 0.37,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0013,0.019,0.108,0.096,0.11226,0,0,1 0.51,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.00073,0.0201,0.11,0.085,0.129,0,0,1 0.6,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00052,0.014,0.131,0.098,0.133,0,0,1 0.27,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0082,0.014,0.06,0.125,0.048,0,0,1 0.33,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0.0023,0.016,0.013,0.092,0.014,0,0,1 0.38,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,4e-05,0.016,0.149,0.097,0.154,0,0,1 0.74,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.003,0.015,0.131,0.112,0.117,0,0,1 0.4,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0022,0.017,0.129,0.087,0.148,0,0,1 0.61,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0022,0.018,0.083,0.071,0.117,0,0,1 0.56,0,1,0,1,0,0,0,0,0,1,0,0,0,0,0,0.006,0.0201,0.11,0.103,0.107,0,0,1 0.33,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0.00056,0.024,0.097,0.096,0.10083,0,0,1 0.33,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.0062,0.0201,0.051,0.066,0.078,0,0,1 0.33,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00096,0.032,0.154,0.132,0.117,0,0,1 0.65,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00081,0.0201,0.102,0.084,0.122,0,0,1 0.51,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.0012,0.0201,0.085,0.096,0.089,0,0,1 0.38,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0013,0.0201,0.105,0.087,0.121,0,0,1 0.64,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0014,0.013,0.119,0.076,0.157,0,0,1 0.61,0,0,0,0,0,0,1,0,1,0,0,0,0,0,0,2e-05,0.0201,0.113,0.093,0.122,0,0,1 0.72,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0.0016,0.0208,0.114,0.114,0.1,0,0,1 0.67,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0.0069,0.017,0.082,0.091,0.09,0,1,0 0.35,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0008,0.0201,0.189,0.147,0.128,0,0,1 0.29,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0201,0.178,0.157,0.114,0,0,1 0.65,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.02,0.104,0.093,0.112,0,0,1 0.73,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.0201,0.11329,0.096,0.11776,0,0,1 0.73,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0013,0.015,0.102,0.087,0.117,0,0,1 0.73,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,5e-05,0.0201,0.185,0.094,0.197,0,0,1 0.35,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00208,0.0201,0.132,0.112,0.118,0,0,1 0.47,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.00232,0.0201,0.114,0.111,0.102,0,0,1 0.3,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0.00232,0.0201,0.087,0.099,0.087,0,0,1 0.74,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0029,0.0201,0.103,0.099,0.104,0,0,1 0.74,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0028,0.02,0.099,0.103,0.095,0,0,1 0.25,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0012,0.025,0.123,0.11,0.111,0,0,1 0.64,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00063,0.0201,0.104,0.113,0.092,0,0,1 0.65,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00078,0.0201,0.11,0.096,0.115,0,0,1 0.69,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.001,0.0201,0.106,0.092,0.116,0,0,1 0.57,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0077,0.017,0.063,0.081,0.079,0,1,0 0.19,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0036,0.017,0.076,0.078,0.098,0,0,1 0.49,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00076,0.019,0.099,0.119,0.083,0,0,1 0.71,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0.0028,0.0201,0.106,0.088,0.12,0,0,1 0.77,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.00042,0.013,0.067,0.108,0.062,0,0,1 0.65,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,2e-05,0.0201,0.121,0.094,0.129,0,0,1 0.36,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0.00071,0.017,0.093,0.088,0.106,0,0,1 0.13,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0025,0.017,0.096,0.068,0.142,0,0,1 0.57,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0016,0.015,0.093,0.097,0.095,0,0,1 0.22,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0.0018,0.0201,0.095,0.098,0.097,0,0,1 0.53,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.00232,0.0201,0.11329,0.096,0.11776,0,0,1 0.42,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0019,0.017,0.101,0.084,0.12,0,0,1 0.66,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,5e-05,0.011,0.069,0.078,0.088,0,0,1 0.8,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0.00083,0.019,0.116,0.096,0.12058,0,0,1 0.54,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.0201,0.11329,0.096,0.11776,0,0,1 0.6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.0201,0.11329,0.096,0.11776,0,0,1 0.22,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0012,0.0201,0.086,0.086,0.099,0,0,1 0.58,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0012,0.0201,0.175,0.108,0.161,0,0,1 0.8,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00046,0.018,0.115,0.1,0.115,0,0,1 0.73,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.037,0.017,0.086,0.082,0.105,0,1,0 0.76,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0082,0.017,0.125,0.081,0.153,0,1,0 0.59,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0028,0.018,0.101,0.107,0.094,0,0,1 0.53,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0012,0.016,0.068,0.087,0.078,0,0,1 0.53,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0023,0.015,0.119,0.101,0.118,0,0,1 0.8,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.172,0.0096,0.029,0.084,0.035,1,0,0 0.71,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0016,0.0201,0.114,0.101,0.113,0,0,1 0.56,1,1,0,0,0,0,0,0,0,1,0,0,0,0,0,4e-05,0.0201,0.134,0.066,0.204,0,0,1 0.62,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00071,0.022,0.091,0.094,0.096,0,0,1 0.75,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0.0026,0.0201,0.096,0.112,0.086,0,0,1 0.62,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00064,0.014,0.11,0.078,0.142,0,0,1 0.51,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.0022,0.0201,0.12,0.122,0.099,0,0,1 0.51,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0029,0.015,0.124,0.088,0.141,0,0,1 0.52,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0019,0.028,0.124,0.116,0.106,0,0,1 0.52,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3e-05,0.0208,0.183,0.102,0.179,0,0,1 0.42,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.0044,0.019,0.127,0.114,0.111,0,0,1 0.79,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.002,0.0201,0.12,0.111,0.108,0,0,1 0.79,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0006,0.0201,0.097,0.09,0.108,0,0,1 0.72,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00095,0.0201,0.085,0.08,0.106,0,0,1 0.16,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0027,0.0201,0.1,0.103,0.096,0,0,1 0.5,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0027,0.015,0.172,0.115,0.149,0,0,1 0.51,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0064,0.0208,0.12,0.109,0.11,0,0,1 0.48,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00045,0.0201,0.113,0.104,0.107,0,0,1 0.45,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,2e-05,0.0419,0.183,0.078,0.235,0,0,1 0.42,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.114,0.008,0.00406,0.104,0.00384,1,0,0 0.32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,9e-05,0.023,0.212,0.134,0.158,0,0,1 0.79,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0064,0.018,0.109,0.097,0.113,0,0,1 0.61,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00075,0.017,0.088,0.092,0.096,0,0,1 0.42,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,2e-05,0.027,0.172,0.101,0.172,0,0,1 0.42,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,2e-05,0.031,0.136,0.1,0.136,0,0,1 0.76,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00019,0.016,0.129,0.099,0.13,0,0,1 0.22,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.0201,0.119,0.112,0.107,0,0,1 0.22,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0012,0.023,0.11329,0.096,0.11776,0,0,1 0.46,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00084,0.017,0.111,0.098,0.113,0,0,1 0.2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.001,0.0201,0.145,0.151,0.096,0,0,1 0.78,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.0015,0.0201,0.115,0.108,0.106,0,0,1 0.71,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00055,0.011,0.091,0.086,0.105,0,0,1 0.1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0025,0.0201,0.083,0.093,0.089,0,0,1 0.69,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0026,0.016,0.116,0.104,0.111,0,0,1 0.69,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0029,0.014,0.135,0.119,0.113,0,0,1 0.69,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0023,0.0201,0.111,0.132,0.084,0,0,1 0.75,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0043,0.017,0.142,0.112,0.127,0,0,1 0.31,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.0201,0.081,0.085,0.095,0,0,1 0.25,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.0015,0.018,0.093,0.099,0.094,0,0,1 0.25,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0.034,0.0096,0.014,0.103,0.013,1,0,0 0.25,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,6e-05,0.027,0.159,0.092,0.173,0,0,1 0.44,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00095,0.0201,0.115,0.097,0.117,0,0,1 0.44,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00066,0.0201,0.062,0.094,0.063,0,0,1 0.75,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0012,0.0201,0.134,0.114,0.117,0,0,1 0.52,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0029,0.0201,0.121,0.138,0.087,0,0,1 0.28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0014,0.0201,0.105,0.094,0.112,0,0,1 0.28,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,7e-05,0.019,0.175,0.123,0.143,0,0,1 0.28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.011,0.017,0.106,0.097,0.11,0,1,0 0.44,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.0013,0.015,0.064,0.101,0.063,0,0,1 0.24,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.002,0.0201,0.087,0.088,0.099,0,0,1 0.57,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0033,0.013,0.067,0.096,0.07,0,0,1 0.58,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.022,0.01,0.155,0.113,0.137,0,0,1 0.42,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.008,0.015,0.11,0.096,0.11434,0,0,1 0.31,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0015,0.0201,0.075,0.075,0.1,0,0,1 0.31,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,6e-05,0.0201,0.123,0.096,0.12785,0,0,1 0.31,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0.0036,0.015,0.105,0.08,0.131,0,0,1 0.31,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0017,0.014,0.062,0.064,0.098,0,0,1 0.39,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0018,0.0201,0.152,0.151,0.101,0,0,1 0.39,1,0,0,0,0,0,0,0,0,0,1,0,0,0,1,0.0011,0.018,0.091,0.094,0.097,0,0,1 0.39,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0016,0.0201,0.107,0.107,0.1,0,0,1 0.46,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0022,0.0201,0.116,0.1,0.117,0,0,1 0.48,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.018,0.11329,0.096,0.11776,0,0,1 0.46,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.002,0.0201,0.078,0.093,0.084,0,0,1 0.76,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4e-05,0.009,0.1,0.101,0.1,0,0,1 0.76,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0.00232,0.0201,0.11329,0.096,0.11776,0,0,1 0.76,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0037,0.0201,0.103,0.103,0.1,0,0,1 0.29,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0017,0.0201,0.125,0.108,0.115,0,0,1 0.29,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.0201,0.11329,0.096,0.11776,0,0,1 0.32,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,6e-05,0.029,0.116,0.096,0.12058,0,0,1 0.32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0017,0.017,0.138,0.115,0.125,0,0,1 0.67,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0023,0.0201,0.081,0.091,0.089,0,0,1 0.41,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00084,0.0201,0.13,0.104,0.123,0,0,1 0.62,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0057,0.0201,0.132,0.126,0.105,0,0,1 0.56,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5e-05,0.038,0.2,0.076,0.264,0,0,1 0.38,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,6e-05,0.029,0.15,0.096,0.15592,0,0,1 0.37,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.0014,0.0201,0.1,0.086,0.117,0,0,1 0.81,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.004,0.0201,0.097,0.092,0.106,0,0,1 0.55,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,7e-05,0.0201,0.092,0.058,0.157,0,0,1 0.5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5e-05,0.017,0.185,0.08,0.231,0,0,1 0.66,0,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0.048,0.004,0.012,0.135,0.00909,1,0,0 0.66,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0.0027,0.01,0.067,0.083,0.08,0,0,1 0.66,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0.001,0.02,0.091,0.095,0.095,0,0,1 0.69,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.018,0.203,0.091,0.224,0,0,1 0.36,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0028,0.0201,0.155,0.097,0.159,0,0,1 0.65,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.0013,0.014,0.144,0.13,0.11,0,0,1 0.54,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0022,0.0201,0.113,0.097,0.117,0,0,1 0.59,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0013,0.0201,0.083,0.081,0.103,0,0,1 0.32,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00035,0.017,0.097,0.086,0.113,0,0,1 0.54,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0011,0.0201,0.146,0.108,0.136,0,0,1 0.7,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.0019,0.022,0.11329,0.096,0.11776,0,0,1 0.7,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.00232,0.0201,0.11329,0.096,0.11776,0,0,1 0.56,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00208,0.017,0.124,0.111,0.112,0,0,1 0.78,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00103,0.0201,0.09,0.102,0.089,0,0,1 0.46,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0013,0.0201,0.124,0.101,0.122,0,0,1 0.19,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0028,0.0201,0.134,0.112,0.109,0,0,1 0.19,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0036,0.017,0.102,0.096,0.10602,0,0,1 0.19,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.0201,0.11329,0.096,0.11776,0,0,1 0.49,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0011,0.0201,0.091,0.104,0.087,0,0,1 0.62,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0007,0.0201,0.119,0.094,0.128,0,0,1 0.53,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0018,0.018,0.096,0.086,0.112,0,0,1 0.41,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.004,0.0201,0.098,0.083,0.118,0,0,1 0.72,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00208,0.0201,0.09,0.089,0.102,0,0,1 0.42,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0.00232,0.0201,0.109,0.104,0.104,0,0,1 0.65,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.0201,0.148,0.085,0.174,0,0,1 0.61,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.002,0.011,0.14,0.09,0.156,0,0,1 0.6,0,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0.00208,0.016,0.136,0.096,0.14137,0,0,1 0.57,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.004,0.0201,0.112,0.133,0.084,0,0,1 0.57,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0038,0.0201,0.132,0.116,0.114,0,0,1 0.36,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00044,0.016,0.122,0.086,0.141,0,0,1 0.69,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0013,0.0201,0.149,0.137,0.109,0,0,1 0.32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0028,0.0201,0.088,0.074,0.12,0,0,1 0.78,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00072,0.017,0.122,0.107,0.114,0,0,1 0.29,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.0012,0.023,0.099,0.096,0.104,0,0,1 0.19,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.164,0.0096,0.065,0.159,0.041,1,0,0 0.73,0,1,0,0,0,0,0,1,1,0,0,0,0,0,0,0.0032,0.0201,0.121,0.088,0.137,0,0,1 0.19,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0066,0.018,0.122,0.116,0.104,0,0,1 0.39,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.0201,0.11329,0.096,0.11776,0,0,1 0.46,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.0201,0.141,0.098,0.143,0,0,1 0.87,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0031,0.0201,0.08,0.088,0.091,0,0,1 0.29,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0027,0.0201,0.104,0.093,0.112,0,0,1 0.12,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.0201,0.11329,0.096,0.11776,0,0,1 0.26,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.0011,0.014,0.036,0.086,0.042,0,0,1 0.22,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.0201,0.395,0.107,0.369,0,0,1 0.74,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0072,0.013,0.096,0.099,0.097,0,0,1 0.61,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00092,0.0201,0.128,0.102,0.125,0,0,1 0.49,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0012,0.0201,0.098,0.089,0.11,0,0,1 0.49,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0019,0.0201,0.134,0.128,0.105,0,0,1 0.49,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0017,0.016,0.106,0.098,0.108,0,0,1 0.49,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5e-05,0.0201,0.193,0.104,0.184,0,0,1 0.25,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0013,0.0201,0.077,0.087,0.088,0,0,1 0.38,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00054,0.017,0.113,0.094,0.12,0,0,1 0.38,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0034,0.019,0.124,0.12,0.103,0,0,1 0.73,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0024,0.014,0.091,0.104,0.087,0,0,1 0.38,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00093,0.0201,0.107,0.104,0.101,0,0,1 0.61,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0025,0.0201,0.113,0.094,0.12,0,0,1 0.61,1,0,0,0,0,0,0,0,0,0,1,0,0,0,1,6e-05,0.026,0.11,0.096,0.11434,0,0,1 0.46,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00077,0.013,0.05,0.067,0.075,0,0,1 0.2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.002,0.0201,0.086,0.091,0.095,0,0,1 0.69,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00208,0.017,0.118,0.091,0.13,0,0,1 0.28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0025,0.0201,0.104,0.095,0.109,0,0,1 0.52,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0035,0.0201,0.074,0.083,0.089,0,0,1 0.52,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.00063,0.0201,0.111,0.099,0.113,0,0,1 0.31,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.044,0.017,0.079,0.082,0.096,0,1,0 0.37,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00091,0.02,0.085,0.104,0.081,0,0,1 0.33,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.032,0.185,0.091,0.206,0,0,1 0.59,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.0027,0.0201,0.148,0.109,0.136,0,0,1 0.57,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0013,0.0201,0.133,0.1,0.133,0,0,1 0.57,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0016,0.005,0.11,0.086,0.127,0,0,1 0.22,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0022,0.0201,0.121,0.093,0.131,0,0,1 0.32,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0.0075,0.0201,0.109,0.113,0.096,0,0,1 0.35,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1e-05,0.019,0.136,0.107,0.127,0,0,1 0.78,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00017,0.0201,0.075,0.102,0.073,0,0,1 0.78,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00058,0.02,0.101,0.09,0.112,0,0,1 0.36,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0.0023,0.0201,0.095,0.091,0.104,0,0,1 0.69,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.00045,0.014,0.091,0.08,0.114,0,0,1 0.38,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00097,0.016,0.105,0.086,0.122,0,0,1 0.49,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0.068,0.003,0.068,0.088,0.077,0,0,1 0.43,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0038,0.022,0.13,0.097,0.134,0,0,1 0.43,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0019,0.018,0.098,0.088,0.112,0,0,1 0.68,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0.001,0.0201,0.089,0.084,0.106,0,0,1 0.65,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0012,0.0201,0.098,0.078,0.126,0,0,1 0.55,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.0201,0.106,0.113,0.094,0,0,1 0.43,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0036,0.0201,0.088,0.099,0.089,0,0,1 0.43,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0011,0.023,0.121,0.088,0.137,0,0,1 0.43,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00208,0.006,0.087,0.087,0.1,0,0,1 0.78,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0018,0.013,0.098,0.103,0.095,0,0,1 0.75,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.004,0.0201,0.086,0.095,0.091,0,0,1 0.42,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0.0058,0.027,0.079,0.1,0.079,0,0,1 0.76,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0.0038,0.027,0.097,0.119,0.081,0,0,1 0.38,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0.0051,0.016,0.091,0.108,0.085,0,0,1 0.38,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0025,0.0201,0.095,0.099,0.096,0,0,1 0.38,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0075,0.017,0.076,0.095,0.08,0,1,0 0.54,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0.0033,0.0201,0.115,0.133,0.086,0,0,1 0.54,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.003,0.0201,0.107,0.108,0.099,0,0,1 0.62,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.011,0.096,0.094,0.102,0,0,1 0.73,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0038,0.0201,0.104,0.099,0.106,0,0,1 0.73,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0039,0.019,0.091,0.08,0.114,0,0,1 0.7,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0.0023,0.028,0.09,0.11,0.082,0,0,1 0.37,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00419,0.022,0.142,0.096,0.1476,0,0,1 0.52,1,0,0,0,0,0,0,1,0,1,0,0,0,0,0,0.0034,0.023,0.084,0.072,0.117,0,0,1 0.8,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0046,0.018,0.12,0.104,0.116,0,0,1 0.2,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0.0032,0.026,0.038,0.11,0.035,0,0,1 0.7,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0052,0.011,0.112,0.078,0.143,0,0,1 0.6,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0041,0.007,0.105,0.085,0.124,0,0,1 0.5219,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,3e-05,0.028,0.112,0.084,0.133,0,0,1 0.28,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,3e-05,0.024,0.22,0.148,0.149,0,0,1 0.34,0,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0.00232,0.026,0.082,0.102,0.08,0,0,1 0.34,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,3e-05,0.032,0.123,0.108,0.114,0,0,1 0.34,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,3e-05,0.022,0.105,0.085,0.123,0,0,1 0.47,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00208,0.0201,0.093,0.103,0.09,0,0,1 0.53,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4e-05,0.01,0.065,0.06,0.108,0,0,1 0.62,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00049,0.0201,0.111,0.09,0.124,0,0,1 0.51,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0011,0.0201,0.108,0.099,0.11,0,0,1 0.56,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0011,0.0201,0.12,0.103,0.116,0,0,1 0.59,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00068,0.012,0.023,0.017,0.136,0,0,1 0.83,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00073,0.02,0.105,0.085,0.125,0,0,1 0.67,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0023,0.0201,0.084,0.096,0.088,0,0,1 0.36,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0,4e-05,0.017,0.102,0.09,0.113,0,0,1 0.55,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4e-05,0.014,0.13,0.085,0.153,0,0,1 0.47,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0038,0.0201,0.08,0.091,0.088,0,0,1 0.51,1,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0.019,0.019,0.092,0.084,0.11,0,1,0 0.64,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.00088,0.0201,0.125,0.087,0.145,0,0,1 0.64,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.0201,0.11329,0.083,0.114,0,0,1 0.69,1,1,1,0,0,0,0,0,1,0,0,0,0,0,0,9e-05,0.0201,0.131,0.07,0.186,0,0,1 0.56,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.014,0.0201,0.11,0.097,0.113,0,0,1 0.56,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,4e-05,0.038,0.162,0.104,0.155,0,0,1 0.56,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.018,0.13,0.091,0.143,0,0,1 0.69,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0034,0.0201,0.131,0.092,0.141,0,0,1 0.6,1,1,0,0,0,0,0,1,0,1,0,0,0,0,0,4e-05,0.023,0.155,0.07,0.221,0,0,1 0.63,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0.00232,0.0201,0.11329,0.096,0.11776,0,0,1 0.39,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.00889,0.017,0.097,0.088,0.111,0,1,0 0.27,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4e-05,0.023,0.166,0.084,0.198,0,0,1 0.46,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.035,0.012,0.016,0.086,0.019,1,0,0 0.46,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.008,0.017,0.088,0.099,0.089,0,1,0 0.69,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00066,0.0201,0.107,0.086,0.124,0,0,1 0.63,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.00015,0.0201,0.173,0.112,0.154,0,0,1 0.63,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.0024,0.022,0.113,0.096,0.11746,0,0,1 0.63,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.017,0.081,0.095,0.086,0,0,1 0.31,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.018,0.067,0.089,0.072,0,0,1 0.69,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4e-05,0.014,0.199,0.098,0.204,0,0,1 0.6,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0062,0.007,0.055,0.058,0.096,0,1,0 0.75,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.014,0.161,0.088,0.183,0,0,1 0.75,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0013,0.023,0.11329,0.096,0.11776,0,0,1 0.75,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.0201,0.107,0.101,0.106,0,0,1 0.89,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.014,0.0096,0.03121,0.102,0.03042,1,0,0 0.5,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0029,0.0201,0.11329,0.096,0.11776,0,0,1 0.28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.00208,0.017,0.116,0.101,0.115,0,0,1 0.72,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0045,0.0201,0.102,0.094,0.108,0,0,1 0.61,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0068,0.009,0.096,0.101,0.095,0,0,1 0.39,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.00208,0.031,0.153,0.16,0.096,0,0,1 0.62,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.00018,0.015,0.103,0.096,0.10706,0,0,1 0.39,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,7e-05,0.024,0.037,0.081,0.045,0,0,1 0.39,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0011,0.0201,0.106,0.091,0.117,0,0,1 0.69,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0029,0.0201,0.11,0.103,0.107,0,0,1 0.25,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.0201,0.11,0.108,0.102,0,0,1 0.7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.001,0.0201,0.123,0.085,0.145,0,0,1 0.67,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.044,0.0096,0.031,0.104,0.029,1,0,0 0.13,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.005,0.01,0.103,0.072,0.142,0,0,1 0.13,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0039,0.0201,0.076,0.098,0.077,0,0,1 0.13,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0015,0.0201,0.106,0.107,0.099,0,0,1 0.73,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.0023,0.016,0.077,0.104,0.075,0,0,1 0.77,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.0201,0.11329,0.096,0.11776,0,0,1 0.36,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,7e-05,0.041,0.153,0.086,0.178,0,0,1 0.34,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.0024,0.016,0.11,0.104,0.105,0,0,1 0.75,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0026,0.0201,0.053,0.054,0.098,0,0,1 0.75,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7e-05,0.03,0.139,0.098,0.141,0,0,1 0.77,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0066,0.013,0.146,0.104,0.137,0,1,0 0.73,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0.0033,0.009,0.09,0.088,0.102,0,0,1 0.73,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.0208,0.132,0.092,0.143,0,0,1 0.71,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00069,0.0201,0.135,0.102,0.133,0,0,1 0.57,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4e-05,0.013,0.121,0.08,0.152,0,0,1 0.57,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0024,0.019,0.124,0.108,0.115,0,0,1 0.86,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0014,0.0201,0.116,0.093,0.125,0,0,1 0.86,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.022,0.084,0.077,0.11,0,0,1 0.76,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0065,0.0201,0.084,0.086,0.097,0,0,1 0.6,1,1,0,0,0,0,1,0,0,1,0,0,0,0,0,7e-05,0.013,0.131,0.093,0.142,0,0,1 0.66,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0022,0.016,0.077,0.085,0.09,0,0,1 0.77,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00043,0.0201,0.093,0.093,0.1,0,0,1 0.72,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.00045,0.0201,0.119,0.115,0.104,0,0,1 0.41,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00061,0.017,0.108,0.08,0.135,0,0,1 0.61,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0024,0.0201,0.176,0.093,0.189,0,0,1 0.61,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0032,0.0201,0.147,0.134,0.11,0,0,1 0.35,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0029,0.0201,0.1,0.098,0.102,0,0,1 0.26,0,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0.00088,0.0201,0.119,0.152,0.078,0,0,1 0.46,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,7e-05,0.038,0.148,0.096,0.15384,0,0,1 0.61,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00072,0.015,0.151,0.096,0.15696,0,0,1 0.61,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.00232,0.0201,0.11329,0.096,0.11776,0,0,1 0.62,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.0019,0.0208,0.116,0.112,0.103,0,0,1 0.56,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0003,0.016,0.131,0.095,0.138,0,0,1 0.28,0,1,0,0,0,0,0,0,0,1,0,0,0,0,0,7e-05,0.0201,0.172,0.171,0.101,0,0,1 0.84,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0017,0.018,0.082,0.092,0.089,0,0,1 0.66,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.048,0.005,0.00419,0.136,0.0031,1,0,0 0.74,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0.0003,0.009,0.104,0.078,0.133,0,0,1 0.74,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0012,0.0201,0.123,0.116,0.104,0,0,1 0.63,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0044,0.0201,0.136,0.1,0.136,0,0,1 0.8,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00829,0.017,0.115,0.086,0.134,0,1,0 0.25,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.028,0.011,0.087,0.094,0.093,0,0,1 0.32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0014,0.0201,0.088,0.078,0.113,0,0,1 0.6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0015,0.0201,0.157,0.104,0.148,0,0,1 0.33,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0016,0.0201,0.141,0.136,0.104,0,0,1 0.33,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.016,0.065,0.076,0.086,0,0,1 0.37,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0018,0.0208,0.184,0.141,0.13,0,0,1 0.42,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0014,0.023,0.095,0.098,0.097,0,0,1 0.22,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.01,0.079,0.086,0.092,0,0,1 0.67,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.002,0.019,0.101,0.096,0.105,0,0,1 0.52,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0019,0.0201,0.11,0.081,0.137,0,0,1 0.54,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0055,0.0201,0.057,0.065,0.087,0,0,1 0.74,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.0025,0.0201,0.112,0.096,0.116,0,0,1 0.35,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.00208,0.013,0.052,0.083,0.063,0,0,1 0.56,1,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0.0017,0.0201,0.127,0.094,0.136,0,0,1 0.28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0032,0.014,0.095,0.09,0.105,0,0,1 0.38,0,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0.0011,0.018,0.081,0.087,0.093,0,0,1 0.63,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0.0041,0.018,0.1,0.121,0.083,0,0,1 0.57,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,7e-05,0.0201,0.142,0.087,0.163,0,0,1 0.62,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,7e-05,0.02,0.209,0.096,0.218,0,0,1 0.44,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.028,0.017,0.085,0.101,0.084,0,1,0 0.16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00048,0.0201,0.161,0.087,0.185,0,0,1 0.13,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00013,0.0201,0.139,0.089,0.156,0,0,1 0.55,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0024,0.0201,0.09,0.103,0.087,0,0,1 0.58,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.028,0.017,0.086,0.102,0.084,0,1,0 0.52,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.025,0.0096,0.033,0.096,0.034,1,0,0 0.52,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0012,0.0208,0.144,0.125,0.115,0,0,1 0.68,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00016,0.0201,0.1,0.091,0.11,0,0,1 0.24,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0025,0.031,0.119,0.123,0.097,0,0,1 0.22,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,5e-05,0.062,0.386,0.102,0.378,0,0,1 0.32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0016,0.0201,0.138,0.148,0.093,0,0,1 0.78,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0009,0.018,0.1,0.091,0.11,0,0,1 0.84,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00208,0.0201,0.095,0.063,0.151,0,0,1 0.84,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.00232,0.029,0.139,0.093,0.15,0,0,1 0.76,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.0013,0.0201,0.083,0.096,0.087,0,0,1 0.29,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0015,0.024,0.133,0.109,0.122,0,0,1 0.79,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.011,0.0201,0.127,0.093,0.137,0,0,1 0.51,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0049,0.0201,0.1,0.081,0.122,0,0,1 0.57,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0058,0.013,0.057,0.096,0.05925,0,0,1 0.57,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.011,0.0208,0.075,0.101,0.074,0,1,0 0.83,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00079,0.013,0.108,0.076,0.141,0,0,1 0.33,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00042,0.0201,0.125,0.086,0.145,0,0,1 0.56,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0016,0.0201,0.149,0.104,0.14,0,0,1 0.33,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0.00062,0.0201,0.132,0.187,0.071,0,0,1 0.48,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0011,0.0201,0.149,0.129,0.116,0,0,1 0.49,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0011,0.0201,0.076,0.071,0.107,0,0,1 0.77,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.141,0.0005,0.003,0.104,0.003,1,0,0 0.49,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0017,0.0201,0.085,0.079,0.107,0,0,1 0.29,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0039,0.01,0.126,0.108,0.117,0,0,1 0.29,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.005,0.11329,0.096,0.11776,0,0,1 0.29,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00045,0.006,0.079,0.054,0.148,0,0,1 0.64,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0017,0.0201,0.119,0.101,0.119,0,0,1 0.65,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.002,0.009,0.112,0.094,0.119,0,0,1 0.37,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0014,0.016,0.081,0.11,0.074,0,0,1 0.3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0048,0.015,0.082,0.113,0.073,0,0,1 0.3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0023,0.0201,0.125,0.109,0.114,0,0,1 0.3,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00013,0.0201,0.103,0.079,0.13,0,0,1 0.35,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00077,0.015,0.141,0.104,0.134,0,0,1 0.7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0013,0.0005,0.02,0.065,0.031,0,0,1 0.58,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00061,0.0201,0.123,0.096,0.128,0,0,1 0.19,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0015,0.011,0.105,0.081,0.129,0,0,1 0.39,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00208,0.0201,0.162,0.119,0.136,0,0,1 0.59,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.007,0.017,0.077,0.098,0.078,0,1,0 0.66,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0032,0.0201,0.08,0.072,0.11,0,0,1 0.62,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00023,0.0201,0.118,0.087,0.136,0,0,1 0.62,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.021,0.014,0.088,0.077,0.115,0,1,0 0.53,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0.00029,0.0201,0.075,0.047,0.16,0,0,1 0.46,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.002,0.0201,0.152,0.099,0.154,0,0,1 0.73,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.0024,0.013,0.043,0.09,0.048,0,0,1 0.37,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0.00069,0.0201,0.123,0.097,0.128,0,0,1 0.37,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.0016,0.0201,0.142,0.123,0.115,0,0,1 0.51,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,6e-05,0.0201,0.248,0.143,0.173,0,0,1 0.51,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.0201,0.11329,0.096,0.11776,0,0,1 0.51,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0011,0.053,0.116,0.097,0.119,0,0,1 0.46,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7e-05,0.038,0.148,0.096,0.15384,0,0,1 0.46,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.003,0.013,0.127,0.111,0.115,0,0,1 0.46,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0012,0.014,0.14,0.09,0.156,0,0,1 0.3,1,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0.00099,0.019,0.119,0.083,0.143,0,0,1 0.3,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.002,0.019,0.107,0.085,0.126,0,0,1 0.63,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0031,0.0201,0.055,0.09,0.062,0,0,1 0.74,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0.00036,0.013,0.096,0.075,0.129,0,0,1 0.55,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0032,0.0201,0.111,0.102,0.109,0,0,1 0.54,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,6e-05,0.017,0.148,0.095,0.155,0,0,1 0.25,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0028,0.026,0.111,0.087,0.127,0,0,1 0.37,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00013,0.025,0.173,0.108,0.161,0,0,1 0.55,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0024,0.023,0.143,0.095,0.151,0,0,1 0.55,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.001,0.0208,0.1,0.077,0.13,0,0,1 0.82,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0062,0.015,0.099,0.09,0.11,0,1,0 0.77,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.0017,0.019,0.08,0.09,0.09,0,0,1 0.75,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.021,0.016,0.124,0.109,0.114,0,0,1 0.74,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.00019,0.019,0.109,0.097,0.112,0,0,1 0.24,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0.00104,0.025,0.116,0.089,0.131,0,0,1 0.32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0059,0.012,0.053,0.079,0.067,0,0,1 0.55,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0012,0.0201,0.063,0.089,0.071,0,0,1 0.63,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00075,0.0201,0.098,0.072,0.135,0,0,1 0.84,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0022,0.0201,0.092,0.063,0.146,0,0,1 0.84,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.004,0.024,0.123,0.096,0.12785,0,0,1 0.78,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0032,0.011,0.126,0.089,0.142,0,0,1 0.53,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0025,0.022,0.126,0.096,0.13097,0,0,1 0.83,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0019,0.005,0.134,0.073,0.183,0,0,1 0.37,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00061,0.011,0.073,0.079,0.093,0,0,1 0.42,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.002,0.018,0.126,0.091,0.138,0,0,1 0.56,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.0201,0.109,0.089,0.122,0,0,1 0.75,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0048,0.016,0.093,0.096,0.096,0,0,1 0.32,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.023,0.157,0.104,0.15,0,0,1 0.32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0037,0.0208,0.124,0.095,0.131,0,0,1 0.32,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0011,0.0201,0.09,0.079,0.114,0,0,1 0.5,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0001,0.0201,0.163,0.089,0.182,0,0,1 0.32,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0012,0.0201,0.104,0.096,0.108,0,0,1 0.48,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.0029,0.017,0.075,0.097,0.077,0,0,1 0.34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0045,0.0201,0.074,0.136,0.054,0,0,1 0.17,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.0012,0.0201,0.153,0.131,0.116,0,0,1 0.68,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0016,0.0201,0.096,0.096,0.1,0,0,1 0.59,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0001,0.017,0.122,0.091,0.135,0,0,1 0.59,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00065,0.0201,0.114,0.096,0.118,0,0,1 0.59,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0024,0.0201,0.088,0.082,0.108,0,0,1 0.62,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0036,0.018,0.139,0.111,0.125,0,0,1 0.58,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0013,0.018,0.111,0.092,0.12,0,0,1 0.59,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0026,0.0208,0.115,0.09,0.129,0,0,1 0.21,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.0201,0.11329,0.096,0.11776,0,0,1 0.15,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0033,0.025,0.144,0.119,0.121,0,0,1 0.15,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0033,0.023,0.12,0.109,0.11,0,0,1 0.15,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.0201,0.11329,0.096,0.11776,0,0,1 0.15,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0013,0.0201,0.156,0.102,0.152,0,0,1 0.15,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0009,0.029,0.101,0.096,0.106,0,0,1 0.26,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00073,0.029,0.102,0.088,0.116,0,0,1 0.58,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.0011,0.0201,0.118,0.104,0.113,0,0,1 0.35,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0.00039,0.0201,0.196,0.17,0.115,0,0,1 0.2,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0.0015,0.0201,0.103,0.112,0.092,0,0,1 0.2,0,1,0,0,0,0,0,0,1,1,0,0,0,0,0,0.002,0.018,0.13,0.101,0.128,0,0,1 0.64,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0026,0.0201,0.142,0.12,0.118,0,0,1 0.26,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.0201,0.11329,0.096,0.11776,0,0,1 0.66,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00087,0.018,0.11,0.089,0.124,0,0,1 0.66,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00016,0.0201,0.14,0.074,0.189,0,0,1 0.73,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0.0012,0.018,0.121,0.091,0.134,0,0,1 0.73,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0.00085,0.0208,0.093,0.096,0.09667,0,0,1 0.81,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.00067,0.0201,0.112,0.095,0.118,0,0,1 0.24,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0019,0.022,0.077,0.078,0.1,0,0,1 0.18,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00048,0.015,0.087,0.087,0.1,0,0,1 0.32,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0017,0.017,0.101,0.08,0.126,0,0,1 0.83,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.007,0.0201,0.148,0.087,0.17,0,0,1 0.58,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00095,0.0201,0.12,0.108,0.11,0,0,1 0.46,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.003,0.023,0.085,0.09,0.094,0,0,1 0.61,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0012,0.009,0.097,0.093,0.104,0,0,1 0.61,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00039,0.0201,0.139,0.078,0.179,0,0,1 0.76,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.0015,0.019,0.095,0.091,0.118,0,0,1 0.74,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00208,0.007,0.089,0.072,0.123,0,0,1 0.4,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00044,0.019,0.146,0.085,0.172,0,0,1 0.4,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.00232,0.0208,0.1,0.096,0.104,0,0,1 0.4,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.016,0.099,0.071,0.14,0,0,1 0.55,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00031,0.0201,0.177,0.091,0.194,0,0,1 0.63,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0013,0.024,0.091,0.079,0.115,0,0,1 0.45,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00208,0.0201,0.117,0.099,0.118,0,0,1 0.33,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0.00879,0.02,0.091,0.082,0.111,0,0,1 0.49,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00208,0.0201,0.183,0.102,0.179,0,0,1 0.72,0,0,0,0,0,0,0,1,0,1,0,0,0,0,0,0.00013,0.022,0.112,0.095,0.118,0,0,1 0.59,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,9e-05,0.0201,0.172,0.096,0.18,0,0,1 0.76,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0016,0.022,0.131,0.104,0.123,0,0,1 0.53,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.001,0.009,0.093,0.085,0.109,0,0,1 0.53,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0036,0.025,0.123,0.1,0.123,0,0,1 0.35,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0033,0.029,0.132,0.104,0.124,0,0,1 0.38,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0.00013,0.0201,0.146,0.141,0.104,0,0,1 0.33,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.00208,0.039,0.115,0.104,0.11,0,0,1 0.26,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.001,0.023,0.14,0.11,0.127,0,0,1 0.26,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0031,0.015,0.113,0.093,0.122,0,0,1 0.28,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00099,0.026,0.152,0.113,0.135,0,0,1 0.38,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0016,0.032,0.134,0.098,0.137,0,0,1 0.47,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.00049,0.017,0.101,0.075,0.134,0,0,1 0.38,0,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0.0025,0.02,0.128,0.112,0.115,0,0,1 0.52,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0046,0.009,0.105,0.072,0.145,0,0,1 0.27,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0.00036,0.024,0.114,0.1,0.115,0,0,1 0.54,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.00091,0.018,0.125,0.074,0.169,0,0,1 0.57,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.00051,0.019,0.136,0.076,0.18,0,0,1 0.72,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4e-05,0.026,0.214,0.099,0.216,0,0,1 0.2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0031,0.02,0.119,0.096,0.1237,0,0,1 0.61,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0024,0.024,0.13,0.095,0.137,0,0,1 0.45,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,4e-05,0.0201,0.238,0.109,0.218,0,0,1 0.52,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0045,0.012,0.108,0.073,0.148,0,0,1 0.52,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0009,0.02,0.101,0.108,0.094,0,0,1 0.65,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00419,0.0201,0.099,0.089,0.111,0,0,1 0.76,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0.0018,0.025,0.136,0.104,0.129,0,0,1 0.69,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0.00018,0.014,0.104,0.068,0.153,0,0,1 0.69,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00079,0.017,0.118,0.099,0.119,0,0,1 0.87,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00208,0.0208,0.118,0.093,0.127,0,0,1 0.59,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00074,0.022,0.095,0.084,0.113,0,0,1 0.55,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0025,0.0201,0.098,0.081,0.12,0,0,1 0.39,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,6e-05,0.0201,0.169,0.089,0.189,0,0,1 0.43,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,4e-05,0.045,0.161,0.108,0.149,0,0,1 0.57,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.011,0.0208,0.134,0.095,0.14016,0,1,0 0.58,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0014,0.025,0.099,0.115,0.086,0,0,1 0.36,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0013,0.018,0.082,0.095,0.086,0,0,1 0.72,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0011,0.024,0.189,0.104,0.179,0,0,1 0.7,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00097,0.013,0.113,0.091,0.124,0,0,1 0.26,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0012,0.0201,0.13,0.091,0.143,0,0,1 0.3,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0.00024,0.018,0.188,0.096,0.19542,0,0,1 0.59,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.0201,0.183,0.16,0.114,0,0,1 0.56,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.0201,0.136,0.088,0.155,0,0,1 0.78,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0005,0.003,0.057,0.11,0.052,0,0,1 0.78,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.029,0.022,0.103,0.09,0.114,0,1,0 0.47,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0011,0.0201,0.105,0.087,0.121,0,0,1 0.47,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.00096,0.014,0.106,0.09,0.118,0,0,1 0.66,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0039,0.028,0.124,0.108,0.115,0,0,1 0.34,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0014,0.0201,0.141,0.098,0.145,0,0,1 0.67,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0023,0.0201,0.081,0.086,0.094,0,0,1 0.75,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0022,0.016,0.071,0.099,0.072,0,0,1 0.75,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00098,0.019,0.15,0.103,0.146,0,0,1 0.31,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.002,0.0201,0.17,0.166,0.102,0,0,1 0.19,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.022,0.11329,0.096,0.11776,0,0,1 0.61,0,1,0,0,0,0,0,1,0,1,0,0,0,0,0,0.036,0.012,0.109,0.104,0.103,0,0,1 0.2,1,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0.012,0.027,0.121,0.096,0.12577,0,0,1 0.4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.002,0.018,0.13,0.087,0.149,0,0,1 0.37,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,8e-05,0.0201,0.168,0.104,0.162,0,0,1 0.2,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0.0012,0.0201,0.105,0.098,0.107,0,0,1 0.6,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.0012,0.016,0.104,0.099,0.105,0,0,1 0.36,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,8e-05,0.045,0.198,0.115,0.172,0,0,1 0.79,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0039,0.0201,0.091,0.077,0.118,0,0,1 0.65,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0063,0.0201,0.108,0.103,0.105,0,0,1 0.62,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.0019,0.02,0.146,0.095,0.154,0,0,1 0.66,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0017,0.003,0.061,0.116,0.052,0,0,1 0.6,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0009,0.01,0.141,0.102,0.138,0,0,1 0.7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0022,0.015,0.108,0.107,0.101,0,0,1 0.46,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0.0017,0.01,0.105,0.1,0.105,0,0,1 0.69,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0046,0.0201,0.18,0.146,0.123,0,0,1 0.64,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,8e-05,0.015,0.131,0.108,0.121,0,0,1 0.38,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00047,0.014,0.127,0.096,0.132,0,0,1 0.66,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0075,0.017,0.127,0.097,0.131,0,1,0 0.39,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0018,0.0201,0.112,0.104,0.106,0,0,1 0.81,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0043,0.004,0.106,0.068,0.156,0,0,1 0.73,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0017,0.007,0.082,0.095,0.086,0,0,1 0.8,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00091,0.007,0.099,0.094,0.105,0,0,1 0.52,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0038,0.015,0.111,0.073,0.152,0,0,1 0.52,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0053,0.011,0.098,0.092,0.107,0,0,1 0.71,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0011,0.005,0.093,0.075,0.124,0,0,1 0.45,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,8e-05,0.0201,0.171,0.088,0.194,0,0,1 0.33,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0017,0.0201,0.069,0.052,0.13,0,0,1 0.48,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0.00072,0.017,0.144,0.108,0.133,0,0,1 0.42,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,8e-05,0.0208,0.173,0.093,0.186,0,0,1 0.4,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00091,0.031,0.102,0.075,0.136,0,0,1 0.4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00062,0.012,0.068,0.037,0.184,0,0,1 0.46,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,3e-05,0.04,0.131,0.1,0.131,0,0,1 0.63,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00042,0.0208,0.145,0.102,0.142,0,0,1 0.64,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.00098,0.015,0.106,0.087,0.122,0,0,1 0.56,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7e-05,0.023,0.124,0.082,0.151,0,0,1 0.6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00087,0.019,0.12,0.096,0.125,0,0,1 0.39,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0017,0.0201,0.104,0.121,0.087,0,0,1 0.2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00469,0.03,0.109,0.104,0.103,0,0,1 0.62,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0017,0.02,0.132,0.11,0.12,0,0,1 0.39,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0011,0.019,0.079,0.107,0.074,0,0,1 0.62,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7e-05,0.0208,0.182,0.103,0.177,0,0,1 0.74,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.00232,0.019,0.11329,0.096,0.11776,0,0,1 0.74,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.013,0.016,0.128,0.101,0.127,0,1,0 0.6,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0058,0.014,0.096,0.099,0.097,0,0,1 0.44,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00028,0.0201,0.179,0.079,0.227,0,0,1 0.25,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0033,0.0201,0.151,0.114,0.133,0,0,1 0.46,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,6e-05,0.024,0.103,0.104,0.097,0,0,1 0.29,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00046,0.023,0.096,0.065,0.148,0,0,1 0.19,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0.00208,0.019,0.142,0.161,0.088,0,0,1 0.19,0,0,0,0,0,0,0,0,1,1,0,0,0,0,1,0.0041,0.026,0.124,0.13,0.095,0,0,1 0.19,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0016,0.018,0.125,0.074,0.169,0,0,1 0.58,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0014,0.0201,0.141,0.075,0.188,0,0,1 0.72,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0028,0.0201,0.114,0.095,0.12,0,0,1 0.68,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0017,0.014,0.085,0.093,0.091,0,0,1 0.36,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.0201,0.11329,0.096,0.11776,0,0,1 0.26,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0014,0.022,0.116,0.114,0.102,0,0,1 0.77,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0018,0.011,0.131,0.104,0.126,0,0,1 0.57,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0036,0.016,0.099,0.094,0.105,0,0,1 0.57,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0028,0.008,0.048,0.07,0.069,0,0,1 0.57,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.013,0.012,0.108,0.09,0.12,0,1,0 0.57,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00086,0.017,0.095,0.096,0.099,0,0,1 0.59,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00058,0.0201,0.12,0.093,0.129,0,0,1 0.52,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0044,0.023,0.11,0.116,0.095,0,0,1 0.61,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0024,0.0201,0.155,0.133,0.117,0,0,1 0.32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,9e-05,0.006,0.091,0.068,0.134,0,0,1 0.74,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0015,0.018,0.096,0.091,0.106,0,0,1 0.74,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0016,0.023,0.118,0.097,0.12,0,0,1 0.42,1,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0.0067,0.016,0.086,0.084,0.102,0,0,1 0.45,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0.003,0.0201,0.102,0.088,0.116,0,0,1 0.33,1,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0.0016,0.0201,0.085,0.102,0.083,0,0,1 0.54,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.01,0.022,0.081,0.109,0.074,0,1,0 0.41,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0015,0.0208,0.095,0.097,0.098,0,0,1 0.63,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0011,0.02,0.092,0.086,0.107,0,0,1 0.64,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0013,0.02,0.104,0.094,0.111,0,0,1 0.73,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0025,0.0201,0.107,0.111,0.096,0,0,1 0.73,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.00083,0.014,0.076,0.074,0.103,0,0,1 0.36,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0.00021,0.0201,0.104,0.095,0.11,0,0,1 0.31,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.0201,0.11329,0.096,0.11776,0,0,1 0.42,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,7e-05,0.02,0.099,0.086,0.115,0,0,1 0.58,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00419,0.027,0.132,0.17,0.078,0,0,1 0.46,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0037,0.028,0.092,0.122,0.075,0,0,1 0.94,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0034,0.009,0.106,0.114,0.093,0,0,1 0.74,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0011,0.014,0.116,0.081,0.143,0,0,1 0.58,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0016,0.024,0.102,0.071,0.144,0,0,1 0.73,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0064,0.017,0.097,0.087,0.112,0,1,0 0.34,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.0012,0.03,0.095,0.134,0.071,0,0,1 0.69,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,8e-05,0.024,0.132,0.095,0.139,0,0,1 0.42,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,7e-05,0.022,0.138,0.085,0.162,0,0,1 0.9,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0019,0.016,0.091,0.096,0.095,0,0,1 0.53,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00065,0.0208,0.087,0.083,0.105,0,0,1 0.52,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0077,0.026,0.057,0.094,0.061,1,0,0 0.52,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0.00074,0.039,0.148,0.153,0.097,0,0,1 0.23,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0.0015,0.0201,0.15,0.16,0.094,0,0,1 0.62,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00022,0.008,0.073,0.075,0.097,0,0,1 0.72,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0034,0.0208,0.088,0.096,0.092,0,0,1 0.69,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00021,0.009,0.093,0.087,0.107,0,0,1 0.69,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0014,0.032,0.148,0.114,0.13,0,0,1 0.42,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0011,0.026,0.114,0.095,0.12,0,0,1 0.58,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.02,0.0096,0.06,0.095,0.063,1,0,0 0.58,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00021,0.02,0.142,0.103,0.138,0,0,1 0.34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00083,0.0201,0.123,0.115,0.107,0,0,1 0.67,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,9e-05,0.016,0.192,0.116,0.164,0,0,1 0.34,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,6e-05,0.0201,0.6,0.11,0.546,0,0,1 0.46,0,1,0,0,0,0,0,1,1,1,0,0,0,0,0,0.0053,0.016,0.093,0.091,0.102,0,0,1 0.51,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.034,0.017,0.123,0.107,0.115,0,1,0 0.57,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0025,0.0201,0.125,0.116,0.108,0,0,1 0.35,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0018,0.019,0.11,0.093,0.118,0,0,1 0.24,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0014,0.038,0.18,0.157,0.115,0,0,1 0.23,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0.00078,0.029,0.137,0.141,0.097,0,0,1 0.78,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0044,0.0201,0.12,0.076,0.158,0,0,1 0.66,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0013,0.0201,0.187,0.104,0.178,0,0,1 0.83,0,0,0,0,1,0,0,0,0,1,0,0,0,0,0,0.0015,0.0201,0.159,0.098,0.162,0,0,1 0.81,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0026,0.0201,0.16,0.098,0.162,0,0,1 0.6,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0015,0.0201,0.137,0.082,0.167,0,0,1 0.63,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0067,0.015,0.101,0.065,0.155,0,1,0 0.68,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.0201,0.103,0.097,0.106,0,0,1 0.72,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.017,0.027,0.09,0.074,0.122,0,1,0 0.21,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0012,0.0201,0.106,0.1,0.106,0,0,1 0.81,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.039,0.007,0.099,0.089,0.111,0,1,0 0.45,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00929,0.015,0.057,0.069,0.083,0,1,0 0.81,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0062,0.017,0.098,0.084,0.117,0,1,0 0.74,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0048,0.017,0.13,0.102,0.128,0,0,1 0.61,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0017,0.022,0.091,0.104,0.088,0,0,1 0.5,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,8e-05,0.0201,0.182,0.131,0.14,0,0,1 0.67,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,9e-05,0.019,0.16,0.08,0.2,0,0,1 0.4,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0023,0.022,0.123,0.109,0.113,0,0,1 0.28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.002,0.023,0.069,0.091,0.076,0,0,1 0.59,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0.00028,0.0201,0.114,0.094,0.121,0,0,1 0.76,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,6e-05,0.039,0.186,0.094,0.198,0,0,1 0.76,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,7e-05,0.026,0.157,0.109,0.144,0,0,1 0.37,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.00012,0.007,0.053,0.096,0.05509,0,0,1 0.4,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.0201,0.138,0.095,0.145,0,0,1 0.8,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.002,0.013,0.122,0.099,0.123,0,0,1 0.65,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00045,0.012,0.133,0.094,0.142,0,0,1 0.76,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00097,0.0201,0.063,0.088,0.072,0,0,1 0.6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00208,0.024,0.127,0.12,0.106,0,0,1 0.64,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0018,0.0208,0.09,0.089,0.101,0,0,1 0.46,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0017,0.014,0.093,0.077,0.121,0,0,1 0.46,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0012,0.028,0.181,0.126,0.144,0,0,1 0.32,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0031,0.023,0.076,0.086,0.088,0,0,1 0.75,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.00073,0.022,0.106,0.074,0.143,0,0,1 0.75,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.00208,0.034,0.135,0.104,0.129,0,0,1 0.46,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0009,0.012,0.097,0.091,0.107,0,0,1 0.35,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.00085,0.018,0.122,0.066,0.185,0,0,1 0.38,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.002,0.023,0.122,0.096,0.12681,0,0,1 0.43,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.0201,0.103,0.12,0.086,0,0,1 0.39,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.0201,0.11329,0.096,0.11776,0,0,1 0.66,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,8e-05,0.01,0.141,0.075,0.188,0,0,1 0.84,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.0019,0.025,0.137,0.11,0.125,0,0,1 0.84,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0015,0.023,0.101,0.081,0.125,0,0,1 0.35,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00087,0.028,0.113,0.116,0.096,0,0,1 0.66,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0025,0.001,0.037,0.052,0.071,0,0,1 0.58,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0017,0.011,0.105,0.087,0.121,0,0,1 0.58,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0027,0.023,0.091,0.099,0.092,0,0,1 0.34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0014,0.023,0.126,0.1,0.126,0,0,1 0.44,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0.0032,0.025,0.088,0.102,0.086,0,0,1 0.35,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,8e-05,0.022,0.13,0.091,0.143,0,0,1 0.65,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0026,0.016,0.142,0.101,0.141,0,0,1 0.65,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.0013,0.033,0.13,0.121,0.107,0,0,1 0.44,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00085,0.017,0.081,0.096,0.084,0,0,1 0.66,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0016,0.017,0.109,0.082,0.133,0,0,1 0.44,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0001,0.029,0.165,0.112,0.147,0,0,1 0.64,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0.0022,0.023,0.113,0.069,0.164,0,0,1 0.42,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0.0019,0.022,0.126,0.097,0.13,0,0,1 0.58,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00015,0.016,0.144,0.109,0.132,0,0,1 0.2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.002,0.031,0.135,0.116,0.115,0,0,1 0.18,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.0201,0.066,0.078,0.085,0,0,1 0.41,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.0201,0.11329,0.096,0.11776,0,0,1 0.38,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0026,0.02,0.086,0.086,0.1,0,0,1 0.66,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0019,0.022,0.135,0.107,0.126,0,0,1 0.69,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00058,0.012,0.089,0.09,0.099,0,0,1 0.6,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0014,0.011,0.092,0.084,0.11,0,0,1 0.66,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,8e-05,0.011,0.138,0.08,0.173,0,0,1 0.66,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.028,0.11329,0.096,0.11776,0,0,1 0.66,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,7e-05,0.029,0.145,0.125,0.116,0,0,1 0.51,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0.012,0.029,0.044,0.113,0.039,0,0,1 0.47,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0.0012,0.027,0.082,0.104,0.078,0,0,1 0.66,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.00011,0.026,0.118,0.093,0.127,0,0,1 0.74,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00062,0.023,0.102,0.103,0.099,0,0,1 0.17,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0016,0.0201,0.078,0.076,0.103,0,0,1 0.69,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0018,0.026,0.122,0.103,0.118,0,0,1 0.42,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.002,0.023,0.134,0.1,0.134,0,0,1 0.02,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,5e-05,0.076,0.257,0.096,0.268,0,0,1 0.79,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.0027,0.012,0.1,0.093,0.108,0,0,1 0.85,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0016,0.0201,0.114,0.061,0.187,0,0,1 0.69,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.0201,0.09,0.091,0.099,0,0,1 0.49,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0.0037,0.02,0.053,0.071,0.075,0,0,1 0.46,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0.0025,0.015,0.069,0.09,0.077,0,0,1 0.42,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,4e-05,0.0208,0.111,0.089,0.125,0,0,1 0.42,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0012,0.025,0.106,0.098,0.108,0,0,1 0.73,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.017,0.017,0.112,0.116,0.097,0,1,0 0.37,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.0201,0.108,0.071,0.152,0,0,1 0.46,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.002,0.022,0.092,0.071,0.13,0,0,1 0.33,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00058,0.0201,0.098,0.099,0.099,0,0,1 0.37,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0015,0.01,0.117,0.119,0.098,0,0,1 0.82,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00087,0.0201,0.091,0.099,0.092,0,0,1 0.52,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0001,0.029,0.123,0.103,0.119,0,0,1 0.71,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.0043,0.013,0.055,0.083,0.066,0,0,1 0.45,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.022,0.0096,0.065,0.115,0.057,1,0,0 0.3,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.002,0.035,0.12,0.116,0.102,0,0,1 0.3,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0006,0.019,0.104,0.101,0.103,0,0,1 0.66,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0071,0.023,0.09,0.098,0.092,0,1,0 0.54,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,4e-05,0.022,0.132,0.093,0.142,0,0,1 0.61,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0066,0.022,0.105,0.088,0.119,0,0,1 0.74,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0.00012,0.023,0.106,0.092,0.115,0,0,1 0.24,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0.00063,0.024,0.094,0.093,0.101,0,0,1 0.63,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00014,0.0201,0.119,0.057,0.209,0,0,1 0.62,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0027,0.025,0.131,0.116,0.11,0,0,1 0.24,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.003,0.0201,0.105,0.1,0.105,0,0,1 0.61,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,8e-05,0.025,0.157,0.097,0.162,0,0,1 0.26,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0013,0.0201,0.09,0.09,0.1,0,0,1 0.78,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0018,0.023,0.109,0.138,0.079,0,0,1 0.73,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0016,0.012,0.106,0.123,0.086,0,0,1 0.34,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0038,0.02,0.087,0.115,0.076,0,0,1 0.76,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0011,0.018,0.127,0.128,0.099,0,0,1 0.46,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00087,0.026,0.129,0.142,0.091,0,0,1 0.6,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00055,0.003,0.033,0.064,0.052,0,0,1 0.42,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.00097,0.027,0.069,0.052,0.133,0,0,1 0.22,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0015,0.0201,0.098,0.128,0.077,0,0,1 0.28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00208,0.026,0.063,0.074,0.085,0,0,1 0.67,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0041,0.019,0.104,0.097,0.107,0,0,1 0.73,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.0201,0.085,0.083,0.102,0,0,1 0.73,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.002,0.012,0.108,0.094,0.115,0,0,1 0.78,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0011,0.025,0.131,0.111,0.118,0,0,1 0.54,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0052,0.017,0.168,0.114,0.147,0,0,1 0.76,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0011,0.014,0.099,0.06,0.165,0,0,1 0.82,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0025,0.011,0.112,0.068,0.165,0,0,1 0.72,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0017,0.013,0.108,0.094,0.115,0,0,1 0.39,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0.0057,0.0201,0.163,0.123,0.133,0,0,1 0.48,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.073,0.023,0.087,0.112,0.078,0,1,0 0.47,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0003,0.0201,0.113,0.091,0.125,0,0,1 0.34,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0.0001,0.031,0.03,0.089,0.034,0,0,1 0.19,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.00035,0.008,0.07,0.083,0.084,0,0,1 0.33,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0.012,0.015,0.086,0.1,0.086,0,1,0 0.25,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0013,0.022,0.133,0.092,0.145,0,0,1 0.69,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0.0035,0.013,0.07,0.092,0.076,0,0,1 0.75,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0007,0.015,0.112,0.089,0.126,0,0,1 0.6,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0012,0.006,0.103,0.101,0.102,0,0,1 0.27,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0014,0.019,0.148,0.113,0.131,0,0,1 0.47,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0003,0.0208,0.114,0.09,0.126,0,0,1 0.56,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00012,0.0201,0.14,0.092,0.152,0,0,1 0.54,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.436,0.0096,0.012,0.101,0.012,1,0,0 0.54,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0002,0.0201,0.108,0.084,0.129,0,0,1 0.29,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00208,0.0208,0.122,0.114,0.107,0,0,1 0.29,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0023,0.023,0.135,0.098,0.138,0,0,1 0.21,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.023,0.11329,0.096,0.11776,0,0,1 0.71,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0012,0.033,0.153,0.119,0.129,0,0,1 0.34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0014,0.014,0.115,0.091,0.126,0,0,1 0.8,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.003,0.015,0.095,0.091,0.104,0,0,1 0.63,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0053,0.02,0.142,0.115,0.123,0,0,1 0.61,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.02,0.017,0.104,0.095,0.10878,0,1,0 0.55,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0017,0.02,0.108,0.097,0.111,0,0,1 0.44,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0.0016,0.017,0.087,0.089,0.098,0,0,1 0.22,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0028,0.02,0.075,0.096,0.07796,0,0,1 0.67,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0026,0.004,0.08,0.057,0.14,0,0,1 0.67,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.0031,0.0201,0.118,0.101,0.117,0,0,1 0.81,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,9e-05,0.006,0.152,0.075,0.203,0,0,1 0.78,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.056,0.049,0.037,0.078,0.047,0,0,1 0.71,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0045,0.015,0.09,0.097,0.093,0,0,1 0.34,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0.00038,0.019,0.137,0.082,0.167,0,0,1 0.61,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0.0048,0.017,0.105,0.107,0.098,0,0,1 0.68,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0.00232,0.0201,0.102,0.104,0.096,0,0,1 0.55,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00067,0.018,0.12,0.088,0.136,0,0,1 0.73,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00208,0.0201,0.156,0.099,0.158,0,0,1 0.24,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.019,0.068,0.103,0.066,0,0,1 0.17,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.0016,0.0201,0.118,0.062,0.19,0,0,1 0.28,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0025,0.024,0.15,0.116,0.127,0,0,1 0.78,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.0019,0.017,0.107,0.108,0.099,0,0,1 0.35,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.0015,0.022,0.098,0.114,0.086,0,0,1 0.66,0,1,0,0,0,0,0,1,1,0,0,0,0,0,0,0.0024,0.0201,0.174,0.116,0.15,0,0,1 0.76,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0.033,0.024,0.064,0.116,0.055,1,0,0 0.26,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0065,0.023,0.101,0.114,0.089,0,1,0 0.46,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0029,0.012,0.101,0.107,0.094,0,0,1 0.64,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0.00047,0.024,0.092,0.072,0.128,0,0,1 0.29,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.00027,0.0208,0.092,0.104,0.088,0,0,1 0.15,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0025,0.018,0.081,0.089,0.091,0,0,1 0.86,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.006,0.007,0.058,0.067,0.087,0,0,1 0.65,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0012,0.0201,0.094,0.08,0.118,0,0,1 0.56,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0016,0.014,0.09,0.08,0.113,0,0,1 0.35,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0032,0.023,0.08,0.064,0.125,0,0,1 0.81,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.013,0.011,0.11,0.084,0.131,0,1,0 0.81,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0011,0.0201,0.097,0.096,0.10083,0,0,1 0.72,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.0041,0.019,0.085,0.099,0.085,0,0,1 0.52,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00051,0.0201,0.105,0.104,0.101,0,0,1 0.34,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,9e-05,0.037,0.201,0.097,0.207,0,0,1 0.62,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.00067,0.0201,0.108,0.099,0.109,0,0,1 0.17,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0014,0.0201,0.089,0.11,0.081,0,0,1 0.61,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0046,0.025,0.082,0.116,0.07,0,0,1 0.61,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.008,0.017,0.076,0.095,0.08,0,1,0 0.03,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.039,0.11329,0.096,0.11776,0,0,1 0.03,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.0015,0.0201,0.077,0.076,0.101,0,0,1 0.15,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0016,0.022,0.089,0.082,0.109,0,0,1 0.71,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.029,0.0096,0.057,0.108,0.053,1,0,0 0.87,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.001,0.015,0.096,0.08,0.12,0,0,1 0.83,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0027,0.013,0.1,0.083,0.121,0,0,1 0.68,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.005,0.013,0.113,0.084,0.135,0,0,1 0.49,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0012,0.02,0.08,0.1,0.08,0,0,1 0.39,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.0017,0.0201,0.077,0.065,0.118,0,0,1 0.37,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0001,0.019,0.156,0.096,0.16216,0,0,1 0.68,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00019,0.016,0.091,0.104,0.088,0,0,1 0.7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.014,0.017,0.123,0.111,0.111,0,1,0 0.49,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.01,0.017,0.111,0.085,0.131,0,1,0 0.63,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0019,0.015,0.063,0.057,0.111,0,0,1 0.21,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0011,0.0201,0.115,0.071,0.162,0,0,1 0.71,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0028,0.02,0.125,0.112,0.112,0,0,1 0.71,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00099,0.011,0.129,0.08,0.162,0,0,1 0.77,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0062,0.007,0.084,0.073,0.115,0,1,0 0.64,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0.0033,0.008,0.088,0.067,0.131,0,0,1 0.61,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0026,0.0201,0.1,0.104,0.094,0,0,1 0.13,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0.0029,0.0201,0.104,0.087,0.12,0,0,1 0.16,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0013,0.025,0.103,0.086,0.12,0,0,1 0.75,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0012,0.015,0.128,0.089,0.144,0,0,1 0.6,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0019,0.025,0.095,0.098,0.097,0,0,1 0.49,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0.003,0.022,0.091,0.086,0.105,0,0,1 0.49,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00072,0.007,0.123,0.073,0.168,0,0,1 0.83,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0012,0.016,0.1,0.089,0.112,0,0,1 0.58,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00011,0.0201,0.132,0.076,0.174,0,0,1 0.58,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00083,0.0208,0.127,0.101,0.126,0,0,1 0.3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0018,0.0201,0.111,0.104,0.107,0,0,1 0.64,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0038,0.018,0.097,0.085,0.114,0,0,1 0.61,0,1,0,0,0,0,0,1,1,0,0,0,0,0,0,0.0074,0.012,0.086,0.096,0.09,0,0,1 0.42,0,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0.00012,0.013,0.093,0.067,0.139,0,0,1 0.29,1,1,0,0,0,0,0,0,1,1,0,0,0,0,0,0.00019,0.017,0.147,0.096,0.1528,0,0,1 0.37,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0014,0.026,0.136,0.115,0.118,0,0,1 0.44,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0029,0.033,0.082,0.089,0.092,0,0,1 0.3,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0029,0.0208,0.116,0.067,0.173,0,0,1 0.31,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.0201,0.09,0.087,0.103,0,0,1 0.19,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.0201,0.084,0.078,0.108,0,0,1 0.59,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.011,0.016,0.12,0.095,0.12552,0,1,0 0.61,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0019,0.017,0.104,0.09,0.116,0,0,1 0.3,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0025,0.014,0.077,0.07,0.11,0,0,1 0.73,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0015,0.022,0.119,0.092,0.129,0,0,1 0.75,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0013,0.018,0.078,0.065,0.121,0,0,1 0.6,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0022,0.0201,0.077,0.063,0.122,0,0,1 0.74,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00048,0.015,0.092,0.085,0.108,0,0,1 0.74,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0012,0.019,0.122,0.056,0.218,0,0,1 0.63,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0017,0.016,0.098,0.088,0.111,0,0,1 0.15,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.0031,0.0201,0.088,0.074,0.119,0,0,1 0.15,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0026,0.014,0.137,0.081,0.169,0,0,1 0.47,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0052,0.0201,0.098,0.074,0.132,0,0,1 0.47,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00077,0.0201,0.08,0.096,0.08316,0,0,1 0.62,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0028,0.01,0.084,0.052,0.162,0,0,1 0.56,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00208,0.024,0.137,0.119,0.115,0,0,1 0.56,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00095,0.009,0.116,0.045,0.258,0,0,1 0.54,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0065,0.019,0.088,0.102,0.086,0,1,0 0.58,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.01,0.0201,0.081,0.094,0.086,0,0,1 0.58,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00071,0.025,0.094,0.094,0.1,0,0,1 0.61,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0039,0.012,0.107,0.086,0.122,0,0,1 0.7,1,1,0,0,0,0,0,1,0,1,0,0,0,0,0,0.0019,0.013,0.136,0.08,0.17,0,0,1 0.47,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0.0019,0.017,0.109,0.089,0.123,0,0,1 0.82,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.073,0.01,0.025,0.099,0.025,1,0,0 0.58,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0.00063,0.016,0.107,0.093,0.115,0,0,1 0.38,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0.0017,0.011,0.064,0.076,0.084,0,0,1 0.69,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0.0014,0.001,0.054,0.071,0.076,0,0,1 0.42,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0014,0.017,0.124,0.049,0.253,0,0,1 0.42,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0003,0.028,0.118,0.097,0.122,0,0,1 0.72,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.0049,0.009,0.099,0.078,0.127,0,0,1 0.28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.001,0.019,0.098,0.095,0.103,0,0,1 0.63,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0.0032,0.017,0.087,0.075,0.116,0,0,1 0.36,0,0,0,0,1,0,0,0,0,1,0,0,0,0,0,0.0029,0.02,0.096,0.091,0.106,0,0,1 0.63,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,8e-05,0.019,0.186,0.096,0.19334,0,0,1 0.45,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.012,0.0201,0.137,0.074,0.185,0,0,1 0.45,0,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0.0019,0.018,0.129,0.063,0.205,0,0,1 0.49,0,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0.00232,0.0201,0.11329,0.096,0.11776,0,0,1 0.56,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0012,0.016,0.114,0.072,0.158,0,0,1 0.72,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0041,0.003,0.1,0.087,0.115,0,0,1 0.8,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0041,0.007,0.131,0.074,0.177,0,0,1 0.59,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.0078,0.017,0.089,0.085,0.105,0,1,0 0.37,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0018,0.019,0.115,0.119,0.097,0,0,1 0.37,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0024,0.02,0.099,0.083,0.119,0,0,1 0.72,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.019,0.125,0.097,0.129,0,0,1 0.58,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0015,0.0201,0.151,0.102,0.148,0,0,1 0.26,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00039,0.018,0.147,0.079,0.185,0,0,1 0.76,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0043,0.025,0.089,0.095,0.094,0,0,1 0.33,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0065,0.012,0.09,0.086,0.105,0,1,0 0.33,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0.001,0.0208,0.108,0.092,0.117,0,0,1 0.58,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0.0064,0.023,0.085,0.104,0.08,0,1,0 0.34,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,9e-05,0.061,0.182,0.1,0.182,0,0,1 0.54,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0,8e-05,0.0201,0.184,0.109,0.169,0,0,1 0.53,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0014,0.027,0.129,0.095,0.136,0,0,1 0.6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00809,0.0208,0.118,0.104,0.112,0,1,0 0.86,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0049,0.015,0.073,0.096,0.07587,0,0,1 0.76,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00208,0.013,0.093,0.082,0.113,0,0,1 0.68,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.005,0.024,0.1,0.096,0.10395,0,0,1 0.6,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0.0017,0.026,0.113,0.09,0.126,0,0,1 0.6,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0029,0.0201,0.082,0.074,0.111,0,0,1 0.6,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.031,0.023,0.236,0.102,0.231,0,0,1 0.34,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0.0011,0.0201,0.111,0.096,0.116,0,0,1 0.22,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0019,0.0201,0.15,0.104,0.142,0,0,1 0.3,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0.0015,0.0201,0.222,0.173,0.128,0,0,1 0.27,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0.00026,0.0201,0.142,0.134,0.106,0,0,1 0.61,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.0201,0.11329,0.096,0.11776,0,0,1 0.61,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.017,0.016,0.089,0.069,0.129,0,1,0 0.31,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0.00014,0.01,0.103,0.066,0.156,0,0,1 0.64,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0031,0.0201,0.098,0.09,0.109,0,0,1 0.75,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00018,0.01,0.151,0.07,0.216,0,0,1 0.28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0019,0.015,0.102,0.08,0.128,0,0,1 0.83,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.012,0.015,0.094,0.094,0.1,0,1,0 0.3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.0201,0.091,0.084,0.108,0,0,1 0.48,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00055,0.0201,0.081,0.057,0.142,0,0,1 0.68,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0016,0.009,0.083,0.065,0.128,0,0,1 0.35,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.002,0.032,0.146,0.12,0.122,0,0,1 0.61,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.003,0.024,0.088,0.062,0.142,0,0,1 0.33,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0017,0.025,0.093,0.078,0.119,0,0,1 0.56,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0014,0.019,0.107,0.093,0.115,0,0,1 0.48,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0034,0.026,0.054,0.102,0.053,0,0,1 0.35,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0009,0.015,0.097,0.085,0.114,0,0,1 0.48,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,6e-05,0.034,0.133,0.052,0.251,0,0,1 0.76,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0.0013,0.024,0.097,0.099,0.098,0,0,1 0.39,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.0201,0.11329,0.096,0.11776,0,0,1 0.44,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0012,0.024,0.135,0.101,0.135,0,0,1 0.64,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00085,0.016,0.122,0.079,0.154,0,0,1 0.78,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.001,0.02,0.12,0.096,0.125,0,0,1 0.24,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.0201,0.1,0.087,0.115,0,0,1 0.37,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0012,0.014,0.072,0.047,0.153,0,0,1 0.56,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0008,0.017,0.093,0.073,0.127,0,0,1 0.81,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0038,0.0208,0.087,0.104,0.083,0,0,1 0.35,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0023,0.0201,0.127,0.111,0.114,0,0,1 0.61,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.002,0.018,0.095,0.091,0.104,0,0,1 0.72,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0014,0.0201,0.108,0.095,0.114,0,0,1 0.72,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.0015,0.018,0.097,0.1,0.097,0,0,1 0.72,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0024,0.018,0.082,0.084,0.099,0,0,1 0.57,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0017,0.022,0.086,0.069,0.126,0,0,1 0.34,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0014,0.0201,0.116,0.088,0.132,0,0,1 0.41,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0.00839,0.017,0.094,0.084,0.112,0,1,0 0.24,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0018,0.024,0.092,0.085,0.108,0,0,1 0.27,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.00078,0.023,0.116,0.073,0.159,0,0,1 0.68,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0.054,0.012,0.019,0.104,0.018,0,0,1 0.47,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00055,0.018,0.134,0.097,0.138,0,0,1 0.54,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,7e-05,0.031,0.156,0.081,0.193,0,0,1 0.56,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00021,0.024,0.232,0.098,0.237,0,0,1 0.71,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.0033,0.019,0.116,0.091,0.128,0,0,1 0.7,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.0032,0.01,0.085,0.058,0.144,0,0,1 0.48,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7e-05,0.056,0.292,0.098,0.298,0,0,1 0.65,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.002,0.027,0.121,0.097,0.125,0,0,1 0.7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.001,0.027,0.161,0.092,0.175,0,0,1 0.58,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0069,0.015,0.091,0.095,0.096,0,1,0 0.49,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0.032,0.0208,0.082,0.073,0.112,0,1,0 0.35,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.013,0.03,0.141,0.127,0.111,0,1,0 0.76,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0031,0.012,0.1,0.07,0.143,0,0,1 0.38,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0.0028,0.0201,0.114,0.094,0.122,0,0,1 0.37,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0039,0.01,0.09,0.077,0.117,0,0,1 0.17,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0029,0.025,0.11,0.091,0.121,0,0,1 0.22,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0024,0.031,0.11,0.082,0.134,0,0,1 0.38,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0.00013,0.064,0.256,0.077,0.332,0,0,1 0.38,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00056,0.029,0.158,0.075,0.211,0,0,1 0.34,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.0022,0.024,0.085,0.055,0.155,0,0,1 0.01,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0017,0.03,0.196,0.096,0.204,0,0,1 0.6,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0.012,0.031,0.093,0.096,0.09667,0,0,1 0.51,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00023,0.037,0.144,0.091,0.158,0,0,1 0.75,0,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0.0018,0.019,0.184,0.096,0.19126,0,0,1 0.19,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0024,0.029,0.108,0.1,0.108,0,0,1 0.74,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.098,0.035,0.013,0.09,0.014,0,0,1 0.78,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0014,0.008,0.084,0.049,0.171,0,0,1 0.69,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00419,0.019,0.144,0.088,0.164,0,0,1 0.79,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0024,0.012,0.075,0.057,0.132,0,0,1 0.63,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.002,0.018,0.073,0.064,0.114,0,0,1 0.35,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0.00091,0.0201,0.118,0.085,0.139,0,0,1 0.33,1,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0.0017,0.026,0.089,0.06,0.148,0,0,1 0.33,1,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0027,0.032,0.143,0.084,0.17,0,0,1 0.48,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0011,0.019,0.086,0.094,0.092,0,0,1 0.56,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0017,0.024,0.091,0.104,0.087,0,0,1 0.25,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00099,0.03,0.149,0.096,0.15488,0,0,1 0.42,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,6e-05,0.031,0.156,0.094,0.166,0,0,1 0.39,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0075,0.019,0.099,0.103,0.096,0,0,1 0.34,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,6e-05,0.051,0.138,0.113,0.122,0,0,1 0.56,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0014,0.023,0.115,0.107,0.107,0,0,1 0.56,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.013,0.025,0.077,0.095,0.081,0,1,0 0.17,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0016,0.0201,0.102,0.093,0.11,0,0,1 0.36,1,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0016,0.028,0.131,0.104,0.125,0,0,1 0.45,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0018,0.019,0.105,0.09,0.117,0,0,1 0.55,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0051,0.0208,0.096,0.087,0.11,0,0,1 0.52,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0119,0.026,0.079,0.09,0.088,0,1,0 0.52,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0024,0.028,0.118,0.101,0.117,0,0,1 0.28,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0019,0.017,0.074,0.073,0.101,0,0,1 0.39,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0009,0.015,0.084,0.089,0.094,0,0,1 0.22,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0016,0.027,0.111,0.091,0.122,0,0,1 0.62,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0004,0.026,0.101,0.103,0.098,0,0,1 0.36,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0032,0.02,0.107,0.084,0.127,0,0,1 0.83,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0024,0.018,0.099,0.076,0.13,0,0,1 0.83,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,6e-05,0.02,0.116,0.071,0.163,0,0,1 0.76,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0019,0.014,0.067,0.065,0.103,0,0,1 0.55,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0049,0.02,0.097,0.087,0.112,0,0,1 0.77,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0015,0.018,0.114,0.081,0.141,0,0,1 0.61,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,6e-05,0.053,0.21,0.087,0.241,0,0,1 0.44,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0016,0.0201,0.097,0.081,0.12,0,0,1 0.42,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0016,0.0201,0.123,0.091,0.135,0,0,1 0.38,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0.0015,0.029,0.089,0.1,0.089,0,0,1 0.38,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,6e-05,0.033,0.095,0.091,0.104,0,0,1 0.34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00469,0.022,0.155,0.094,0.165,0,0,1 0.8,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0033,0.019,0.112,0.087,0.129,0,0,1 0.47,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0015,0.029,0.069,0.101,0.068,0,0,1 0.57,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0.0036,0.025,0.06,0.094,0.064,0,0,1 0.68,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.066,0.0096,0.028,0.107,0.026,1,0,0 0.27,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0025,0.0201,0.107,0.099,0.108,0,0,1 0.57,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0.00208,0.025,0.134,0.094,0.142,0,0,1 0.77,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.003,0.026,0.114,0.088,0.13,0,0,1 0.36,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.00208,0.017,0.083,0.058,0.143,0,0,1 0.73,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0029,0.012,0.078,0.073,0.107,0,0,1 0.28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0033,0.02,0.075,0.082,0.092,0,0,1 0.47,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0.00035,0.026,0.063,0.092,0.069,0,0,1 0.72,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0016,0.018,0.099,0.104,0.094,0,0,1 0.19,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0045,0.013,0.062,0.074,0.084,0,0,1 0.55,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.004,0.011,0.079,0.073,0.108,0,0,1 0.37,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00048,0.004,0.064,0.032,0.2,0,0,1 0.57,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.001,0.011,0.064,0.067,0.095,0,0,1 0.71,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0063,0.014,0.066,0.064,0.103,0,1,0 0.72,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0024,0.008,0.084,0.082,0.102,0,0,1 0.21,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00068,0.017,0.094,0.096,0.09771,0,0,1 0.64,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.017,0.0201,0.109,0.101,0.108,0,0,1 0.64,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00929,0.025,0.092,0.095,0.09623,0,1,0 0.71,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.002,0.03,0.097,0.114,0.085,0,0,1 0.75,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00049,0.012,0.088,0.063,0.14,0,0,1 0.55,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.003,0.017,0.094,0.085,0.111,0,0,1 0.53,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0039,0.0201,0.142,0.116,0.121,0,0,1 0.42,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0015,0.024,0.109,0.097,0.112,0,0,1 0.25,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.0201,0.111,0.097,0.114,0,0,1 0.49,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0028,0.018,0.103,0.072,0.143,0,0,1 0.24,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00072,0.026,0.117,0.096,0.12162,0,0,1 0.27,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00034,0.007,0.066,0.116,0.056,0,0,1 0.44,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0.0016,0.024,0.13,0.108,0.12,0,0,1 0.52,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0012,0.019,0.079,0.062,0.127,0,0,1 0.46,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0.00074,0.0208,0.127,0.107,0.119,0,0,1 0.46,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.027,0.025,0.091,0.087,0.105,0,1,0 0.39,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.0201,0.118,0.071,0.166,0,0,1 0.34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.0201,0.11329,0.096,0.11776,0,0,1 0.69,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00066,0.022,0.152,0.104,0.145,0,0,1 0.75,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.002,0.017,0.123,0.075,0.164,0,0,1 0.81,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.494,0.005,0.012,0.139,0.009,1,0,0 0.45,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0013,0.017,0.09,0.057,0.158,0,0,1 0.59,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0.00208,0.0208,0.107,0.093,0.115,0,0,1 0.55,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,6e-05,0.0201,0.151,0.073,0.207,0,0,1 0.54,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0.0019,0.023,0.161,0.104,0.155,0,0,1 0.23,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,6e-05,0.18,0.43,0.067,0.642,0,0,1 0.74,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0018,0.017,0.107,0.084,0.127,0,0,1 0.56,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0014,0.024,0.129,0.094,0.137,0,0,1 0.56,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.03,0.026,0.151,0.148,0.102,0,0,1 0.67,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0.0023,0.027,0.124,0.101,0.123,0,0,1 0.3,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0044,0.027,0.131,0.099,0.132,0,0,1 0.35,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0013,0.02,0.08,0.108,0.081,0,0,1 0.79,0,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0.0019,0.02,0.189,0.104,0.18,0,0,1 0.21,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0.0071,0.029,0.048,0.101,0.048,0,0,1 0.62,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0.0013,0.0201,0.071,0.072,0.099,0,0,1 0.41,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0011,0.019,0.165,0.099,0.167,0,0,1 0.48,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0.0012,0.0201,0.105,0.072,0.146,0,0,1 0.54,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00011,0.0201,0.152,0.08,0.19,0,0,1 0.58,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0031,0.019,0.133,0.099,0.134,0,0,1 0.66,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0016,0.025,0.147,0.089,0.165,0,0,1 0.58,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0015,0.025,0.161,0.127,0.127,0,0,1 0.64,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.024,0.011,0.1,0.082,0.122,0,1,0 0.45,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0.00039,0.025,0.12,0.104,0.115,0,0,1 0.76,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,7e-05,0.033,0.183,0.1,0.183,0,0,1 0.24,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.0201,0.086,0.103,0.083,0,0,1 0.49,0,1,0,0,1,0,0,0,1,1,0,0,0,0,0,8e-05,0.027,0.157,0.104,0.148,0,0,1 0.15,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0011,0.0201,0.126,0.116,0.109,0,0,1 0.34,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,6e-05,0.039,0.148,0.101,0.147,0,0,1 0.23,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0011,0.0201,0.148,0.114,0.13,0,0,1 0.74,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0016,0.0201,0.157,0.124,0.127,0,0,1 0.59,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0017,0.015,0.12,0.076,0.158,0,0,1 0.37,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0.0019,0.023,0.112,0.087,0.129,0,0,1 0.43,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.00085,0.018,0.093,0.073,0.127,0,0,1 0.51,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0013,0.025,0.117,0.083,0.141,0,0,1 0.51,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0015,0.0201,0.113,0.092,0.123,0,0,1 0.46,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0022,0.024,0.116,0.08,0.145,0,0,1 0.84,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.0201,0.11329,0.096,0.11776,0,0,1 0.53,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.016,0.017,0.107,0.135,0.079,0,1,0 0.26,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.0201,0.123,0.104,0.116,0,0,1 0.61,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0054,0.0201,0.167,0.093,0.18,0,0,1 0.49,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0,7e-05,0.039,0.224,0.124,0.181,0,0,1 0.29,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00068,0.0201,0.164,0.11,0.149,0,0,1 0.27,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0016,0.0201,0.129,0.096,0.13409,0,0,1 0.48,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,6e-05,0.019,0.123,0.074,0.166,0,0,1 0.58,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0014,0.015,0.098,0.072,0.136,0,0,1 0.38,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0019,0.023,0.18,0.104,0.173,0,0,1 0.74,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.022,0.02,0.089,0.11,0.081,0,1,0 0.2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.001,0.024,0.165,0.097,0.17,0,0,1 0.7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.001,0.017,0.127,0.092,0.138,0,0,1 0.86,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0034,0.007,0.097,0.085,0.114,0,0,1 0.66,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00038,0.0419,0.179,0.167,0.107,0,0,1 0.52,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,0.062,0.0096,0.0025,0.119,0.0025,1,0,0 0.29,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0014,0.0208,0.146,0.12,0.121,0,0,1 0.49,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00023,0.0201,0.126,0.084,0.15,0,0,1 0.86,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0022,0.007,0.105,0.071,0.148,0,0,1 0.6,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.0201,0.074,0.069,0.107,0,0,1 0.23,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.022,0.126,0.116,0.109,0,0,1 0.24,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0.00094,0.037,0.19,0.195,0.097,0,0,1 0.27,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0015,0.004,0.042,0.116,0.036,0,0,1 0.71,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0045,0.01,0.085,0.079,0.107,0,0,1 0.75,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0066,0.014,0.115,0.094,0.122,0,1,0 0.21,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00035,0.009,0.068,0.096,0.07068,0,0,1 0.25,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00047,0.02,0.078,0.096,0.08108,0,0,1 0.7,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.004,0.004,0.048,0.051,0.094,0,0,1 0.2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00048,0.014,0.12,0.096,0.12474,0,0,1 0.57,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0043,0.01,0.05,0.082,0.061,0,0,1 0.49,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00088,0.012,0.11,0.08,0.138,0,0,1 0.74,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0041,0.012,0.123,0.094,0.131,0,0,1 0.51,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.0201,0.137,0.084,0.163,0,0,1 0.33,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,2e-05,0.0201,0.138,0.064,0.217,0,0,1 0.48,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.0066,0.015,0.111,0.086,0.129,0,1,0 0.72,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.0036,0.0201,0.087,0.098,0.089,0,0,1 0.43,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0019,0.033,0.11,0.119,0.092,0,0,1 0.37,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0018,0.02,0.118,0.114,0.103,0,0,1 0.67,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00075,0.017,0.147,0.089,0.165,0,0,1 0.67,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0022,0.028,0.099,0.067,0.148,0,0,1 0.53,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.022,0.0201,0.096,0.083,0.116,0,0,1 0.79,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.003,0.01,0.139,0.08,0.174,0,0,1 0.38,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0014,0.0201,0.101,0.09,0.112,0,0,1 0.74,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0022,0.013,0.08,0.074,0.108,0,0,1 0.74,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.0201,0.11329,0.096,0.11776,0,0,1 0.49,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0017,0.0208,0.104,0.078,0.133,0,0,1 0.72,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0046,0.0201,0.106,0.095,0.112,0,0,1 0.19,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.048,0.0201,0.139,0.116,0.118,0,0,1 0.57,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0003,0.011,0.1,0.064,0.156,0,0,1 0.23,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.0201,0.102,0.096,0.106,0,0,1 0.37,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,9e-05,0.019,0.108,0.077,0.14,0,0,1 0.76,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0049,0.01,0.122,0.11,0.111,0,0,1 0.41,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0019,0.02,0.141,0.078,0.181,0,0,1 0.41,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0016,0.018,0.111,0.081,0.137,0,0,1 0.31,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0012,0.017,0.101,0.086,0.117,0,0,1 0.22,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0015,0.017,0.106,0.072,0.147,0,0,1 0.56,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0012,0.0201,0.116,0.103,0.113,0,0,1 0.19,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0016,0.028,0.086,0.089,0.097,0,0,1 0.27,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.00074,0.02,0.108,0.06,0.18,0,0,1 0.46,0,1,0,0,0,0,1,0,1,1,0,0,0,0,0,0.0034,0.018,0.123,0.09,0.137,0,0,1 0.33,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.393,0.0096,0.015,0.116,0.013,1,0,0 0.73,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.015,0.02,0.08,0.083,0.096,0,1,0 0.78,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0025,0.015,0.128,0.096,0.13305,0,0,1 0.68,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.006,0.011,0.121,0.074,0.164,0,0,1 0.62,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0012,0.013,0.109,0.092,0.118,0,0,1 0.68,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.02,0.0201,0.095,0.098,0.097,0,0,1 0.68,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0015,0.02,0.101,0.088,0.115,0,0,1 0.29,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0.0043,0.037,0.124,0.146,0.085,0,0,1 0.46,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0035,0.02,0.106,0.09,0.118,0,0,1 0.46,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0015,0.023,0.096,0.087,0.11,0,0,1 0.37,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0037,0.02,0.047,0.075,0.063,0,0,1 0.62,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.058,0.0096,0.036,0.101,0.036,1,0,0 0.72,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0023,0.0208,0.07,0.076,0.092,0,0,1 0.55,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0022,0.018,0.129,0.098,0.132,0,0,1 0.7,1,1,0,0,0,0,0,1,1,0,0,0,0,0,0,0.037,0.013,0.094,0.1,0.094,0,0,1 0.67,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0048,0.012,0.128,0.092,0.139,0,0,1 0.53,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0.0011,0.019,0.076,0.067,0.113,0,0,1 0.57,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0.001,0.0201,0.079,0.081,0.098,0,0,1 0.25,0,0,0,0,0,0,0,0,0,1,0,0,1,0,0,0.0019,0.0208,0.095,0.094,0.101,0,0,1 0.42,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0035,0.011,0.06,0.09,0.067,0,0,1 0.69,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0015,0.01,0.169,0.096,0.17567,0,0,1 0.66,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0026,0.012,0.107,0.095,0.112,0,0,1 0.59,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0068,0.019,0.09,0.098,0.092,0,1,0 0.62,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0079,0.018,0.093,0.095,0.09728,0,1,0 0.41,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.0012,0.017,0.076,0.078,0.097,0,0,1 0.76,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00929,0.017,0.107,0.113,0.095,0,1,0 0.53,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.0038,0.019,0.107,0.101,0.106,0,0,1 0.22,1,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0.00066,0.022,0.12,0.122,0.098,0,0,1 0.68,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.0201,0.086,0.093,0.092,0,0,1 0.68,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.001,0.068,0.109,0.096,0.113,0,0,1 0.59,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7e-05,0.024,0.146,0.108,0.135,0,0,1 0.66,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0022,0.02,0.09,0.09,0.1,0,0,1 0.34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7e-05,0.034,0.093,0.107,0.087,0,0,1 0.45,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.003,0.0208,0.107,0.116,0.092,0,0,1 0.41,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00096,0.02,0.102,0.097,0.105,0,0,1 0.65,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.011,0.017,0.073,0.078,0.094,0,1,0 0.37,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0014,0.0201,0.098,0.1,0.098,0,0,1 0.32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00015,0.0201,0.102,0.096,0.106,0,0,1 0.57,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0014,0.025,0.121,0.102,0.119,0,0,1 0.15,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0012,0.0208,0.113,0.09,0.126,0,0,1 0.97,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0029,0.005,0.101,0.064,0.158,0,0,1 0.97,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0023,0.018,0.093,0.089,0.104,0,0,1 0.77,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7e-05,0.024,0.25,0.104,0.238,0,0,1 0.72,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0015,0.0201,0.079,0.096,0.082,0,0,1 0.72,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.027,0.11329,0.096,0.11776,0,0,1 0.72,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0015,0.024,0.112,0.102,0.11,0,0,1 0.56,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00064,0.024,0.086,0.092,0.094,0,0,1 0.7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.002,0.022,0.133,0.095,0.14,0,0,1 0.41,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0016,0.031,0.061,0.093,0.066,0,0,1 0.41,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0036,0.034,0.099,0.107,0.093,0,0,1 0.49,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0.02,0.0096,0.005,0.1,0.005,1,0,0 0.49,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0008,0.018,0.098,0.113,0.087,0,0,1 0.28,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0.0001,0.095,0.09,0.112,0.08,0,0,1 0.42,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0015,0.032,0.108,0.104,0.102,0,0,1 0.55,0,0,0,0,0,0,0,0,1,0,0,0,1,0,0,0.0026,0.024,0.074,0.097,0.076,0,0,1 0.6,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0.00232,0.027,0.071,0.085,0.084,0,0,1 0.6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,8e-05,0.028,0.161,0.112,0.144,0,0,1 0.32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.0201,0.082,0.091,0.09,0,0,1 0.43,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0.0072,0.023,0.081,0.099,0.082,0,1,0 0.26,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0011,0.023,0.096,0.078,0.123,0,0,1 0.56,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0051,0.029,0.133,0.1,0.133,0,0,1 0.56,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0031,0.009,0.057,0.061,0.093,0,0,1 0.65,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0018,0.023,0.119,0.104,0.114,0,0,1 0.53,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0029,0.009,0.05,0.062,0.081,0,0,1 0.76,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.0031,0.014,0.096,0.102,0.094,0,0,1 0.28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0024,0.009,0.058,0.128,0.045,0,0,1 0.67,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.003,0.016,0.08,0.064,0.125,0,0,1 0.47,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0031,0.024,0.093,0.11,0.085,0,0,1 0.49,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00027,0.016,0.075,0.077,0.097,0,0,1 0.83,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0049,0.012,0.055,0.074,0.074,0,0,1 0.7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.002,0.0208,0.11,0.087,0.126,0,0,1 0.62,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0027,0.02,0.103,0.072,0.143,0,0,1 0.75,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0019,0.023,0.103,0.095,0.108,0,0,1 0.71,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00083,0.03,0.154,0.114,0.135,0,0,1 0.41,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.00089,0.023,0.098,0.098,0.1,0,0,1 0.42,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0.00013,0.0208,0.084,0.075,0.112,0,0,1 0.47,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0.0023,0.023,0.075,0.089,0.084,0,0,1 0.47,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.011,0.0208,0.095,0.107,0.089,0,1,0 0.51,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,8e-05,0.0201,0.146,0.082,0.178,0,0,1 0.7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0043,0.0201,0.078,0.098,0.08,0,0,1 0.43,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0025,0.026,0.095,0.104,0.09,0,0,1 0.57,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.00232,0.0201,0.11329,0.096,0.11776,0,0,1 0.38,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.001,0.0201,0.117,0.092,0.127,0,0,1 0.53,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0006,0.025,0.075,0.096,0.07796,0,0,1 0.78,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0045,0.009,0.069,0.074,0.093,0,0,1 0.75,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00208,0.013,0.084,0.063,0.133,0,0,1 0.81,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0054,0.0208,0.102,0.11,0.093,0,0,1 0.9,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0015,0.018,0.098,0.094,0.104,0,0,1 0.86,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00208,0.022,0.075,0.082,0.092,0,0,1 0.38,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0.00058,0.023,0.099,0.096,0.103,0,0,1 0.3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00208,0.018,0.101,0.103,0.098,0,0,1 0.38,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00039,0.0208,0.111,0.091,0.122,0,0,1 0.48,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0044,0.006,0.065,0.066,0.098,0,0,1 0.73,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0011,0.036,0.161,0.069,0.233,0,0,1 0.47,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00025,0.024,0.15,0.11,0.136,0,0,1 0.47,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0.0012,0.0208,0.105,0.089,0.118,0,0,1 0.34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0019,0.027,0.128,0.096,0.13305,0,0,1 0.34,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0.01,0.022,0.086,0.091,0.095,0,1,0 0.47,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0.0011,0.0208,0.061,0.089,0.069,0,0,1 0.48,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0.00021,0.038,0.168,0.098,0.171,0,0,1 0.53,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.014,0.013,0.077,0.094,0.082,0,1,0 0.53,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.004,0.022,0.083,0.096,0.08627,0,0,1 0.73,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.002,0.019,0.099,0.083,0.119,0,0,1 0.67,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.0007,0.0201,0.099,0.094,0.105,0,0,1 0.78,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.038,0.014,0.074,0.098,0.076,0,1,0 0.79,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0019,0.013,0.073,0.076,0.096,0,0,1 0.79,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0087,0.033,0.138,0.153,0.09,0,1,0 0.7,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.001,0.023,0.112,0.104,0.108,0,0,1 0.79,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0037,0.01,0.081,0.082,0.099,0,0,1 0.56,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0001,0.015,0.108,0.081,0.133,0,0,1 0.62,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00088,0.01,0.09,0.067,0.134,0,0,1 0.62,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0.0065,0.017,0.11,0.141,0.078,0,1,0 0.69,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0024,0.0208,0.093,0.104,0.088,0,0,1 0.28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0019,0.0201,0.074,0.127,0.058,0,0,1 0.3,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00208,0.017,0.111,0.082,0.135,0,0,1 0.51,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0017,0.013,0.16,0.099,0.162,0,0,1 0.16,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.252,0.017,0.005,0.108,0.005,1,0,0 0.61,1,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0012,0.023,0.096,0.072,0.133,0,0,1 0.67,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0026,0.026,0.157,0.116,0.135,0,0,1 0.56,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00076,0.023,0.09,0.094,0.096,0,0,1 0.52,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00019,0.03,0.108,0.113,0.096,0,0,1 0.28,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0.0017,0.029,0.119,0.141,0.084,0,0,1 0.52,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0019,0.017,0.107,0.101,0.106,0,0,1 0.6,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.0027,0.013,0.105,0.101,0.104,0,0,1 0.71,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0027,0.018,0.097,0.101,0.096,0,0,1 0.57,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00093,0.014,0.057,0.072,0.079,0,0,1 0.61,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0013,0.02,0.137,0.115,0.119,0,0,1 0.34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.0201,0.082,0.108,0.076,0,0,1 0.29,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0.0079,0.024,0.151,0.163,0.093,0,0,1 0.6,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.0011,0.031,0.062,0.094,0.066,0,0,1 0.73,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00208,0.023,0.104,0.094,0.11,0,0,1 0.65,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.0028,0.027,0.105,0.116,0.091,0,0,1 0.66,0,0,0,1,0,0,0,1,0,1,0,0,0,0,0,9e-05,0.034,0.153,0.233,0.066,0,0,1 0.47,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0032,0.0419,0.126,0.11,0.115,0,0,1 0.53,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.0208,0.075,0.086,0.087,0,0,1 0.48,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0067,0.0201,0.161,0.162,0.099,0,0,1 0.61,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0016,0.0201,0.115,0.108,0.106,0,0,1 0.62,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0022,0.0201,0.099,0.103,0.095,0,0,1 0.42,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0012,0.02,0.057,0.041,0.14,0,0,1 0.32,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0.0016,0.017,0.125,0.096,0.13,0,0,1 0.84,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0078,0.012,0.083,0.069,0.121,0,1,0 0.78,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0022,0.031,0.132,0.104,0.125,0,0,1 0.59,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.00208,0.0201,0.087,0.096,0.091,0,0,1 0.72,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0011,0.0208,0.104,0.091,0.114,0,0,1 0.72,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0036,0.014,0.089,0.076,0.117,0,0,1 0.76,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0013,0.024,0.136,0.107,0.127,0,0,1 0.43,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00083,0.033,0.144,0.116,0.122,0,0,1 0.49,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0011,0.0201,0.128,0.102,0.125,0,0,1 0.23,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0.00022,0.065,0.245,0.076,0.325,0,0,1 0.26,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.002,0.018,0.059,0.1,0.059,0,0,1 0.26,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0024,0.024,0.082,0.081,0.102,0,0,1 0.26,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0051,0.02,0.08,0.077,0.104,0,0,1 0.84,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0.0009,0.017,0.099,0.079,0.125,0,0,1 0.84,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.001,0.011,0.129,0.086,0.15,0,0,1 0.84,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0017,0.022,0.106,0.096,0.11018,0,0,1 0.29,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0011,0.026,0.099,0.093,0.106,0,0,1 0.29,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0019,0.018,0.093,0.069,0.135,0,0,1 0.26,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0022,0.024,0.11,0.085,0.129,0,0,1 0.68,1,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0.00088,0.029,0.068,0.096,0.07068,0,0,1 0.79,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0025,0.02,0.088,0.091,0.097,0,0,1 0.56,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.012,0.014,0.067,0.071,0.094,0,1,0 0.75,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0014,0.009,0.094,0.074,0.127,0,0,1 0.49,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00063,0.011,0.06,0.064,0.094,0,0,1 0.62,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.002,0.015,0.095,0.067,0.142,0,0,1 0.28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.003,0.01,0.079,0.111,0.071,0,0,1 0.65,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0045,0.011,0.094,0.079,0.119,0,0,1 0.53,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0052,0.007,0.061,0.046,0.133,0,0,1 0.67,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0024,0.012,0.1,0.087,0.115,0,0,1 0.71,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0059,0.018,0.11329,0.096,0.079,0,0,1 0.71,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0017,0.023,0.154,0.114,0.135,0,0,1 0.54,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00015,0.0201,0.127,0.089,0.143,0,0,1 0.39,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.0201,0.089,0.116,0.076,0,0,1 0.22,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0011,0.02,0.081,0.088,0.092,0,0,1 0.69,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0016,0.0208,0.109,0.097,0.112,0,0,1 0.68,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0022,0.024,0.117,0.104,0.113,0,0,1 0.75,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0051,0.0208,0.09,0.107,0.084,0,0,1 0.66,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,9e-05,0.022,0.198,0.097,0.204,0,0,1 0.59,1,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0.015,0.014,0.155,0.104,0.148,0,0,1 0.01,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.016,0.027,0.15,0.109,0.138,0,1,0 0.61,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0018,0.003,0.023,0.051,0.045,0,0,1 0.57,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0024,0.0201,0.079,0.113,0.07,0,0,1 0.77,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,8e-05,0.025,0.208,0.086,0.242,0,0,1 0.77,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0016,0.024,0.075,0.098,0.077,0,0,1 0.64,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0044,0.022,0.098,0.097,0.101,0,0,1 0.8,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00839,0.013,0.068,0.09,0.076,0,1,0 0.3,1,1,0,0,0,0,0,0,0,1,0,0,0,0,0,5e-05,0.019,0.112,0.077,0.145,0,0,1 0.49,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0082,0.0208,0.079,0.089,0.089,0,1,0 0.79,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00081,0.011,0.106,0.081,0.131,0,0,1 0.47,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.0025,0.018,0.094,0.1,0.094,0,0,1 0.29,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.001,0.011,0.092,0.081,0.114,0,0,1 0.38,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.027,0.166,0.149,0.111,0,0,1 0.42,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0019,0.016,0.101,0.087,0.116,0,0,1 0.81,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,6e-05,0.032,0.138,0.108,0.128,0,0,1 0.7,0,1,0,0,0,0,0,1,1,0,0,0,0,0,0,0.0001,0.0208,0.126,0.083,0.152,0,0,1 0.55,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0046,0.0201,0.109,0.097,0.112,0,0,1 0.81,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00419,0.0208,0.088,0.107,0.082,0,0,1 0.44,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0016,0.0201,0.078,0.08,0.098,0,0,1 0.44,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.0016,0.017,0.118,0.11,0.107,0,0,1 0.62,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0.00041,0.031,0.108,0.107,0.101,0,0,1 0.87,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.016,0.11329,0.096,0.11776,0,0,1 0.5,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.062,0.009,0.087,0.094,0.092,0,1,0 0.32,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0016,0.027,0.114,0.093,0.123,0,0,1 0.31,0,0,0,0,0,0,0,0,1,0,0,0,1,0,0,0.0027,0.023,0.087,0.116,0.074,0,0,1 0.31,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0053,0.014,0.056,0.088,0.064,0,0,1 0.56,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0024,0.027,0.08,0.094,0.085,0,0,1 0.35,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0015,0.019,0.092,0.089,0.103,0,0,1 0.37,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0011,0.02,0.092,0.096,0.096,0,0,1 0.24,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.017,0.076,0.102,0.075,0,0,1 0.21,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0009,0.008,0.077,0.096,0.08004,0,0,1 0.81,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0006,0.006,0.123,0.091,0.135,0,0,1 0.24,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0.00232,0.0201,0.11329,0.096,0.11776,0,0,1 0.24,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4e-05,0.019,0.107,0.085,0.126,0,0,1 0.24,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0072,0.016,0.085,0.091,0.093,0,1,0 0.55,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0023,0.025,0.093,0.109,0.085,0,0,1 0.62,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.002,0.015,0.077,0.096,0.08004,0,0,1 0.54,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0028,0.024,0.104,0.074,0.141,0,0,1 0.46,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00046,0.023,0.093,0.089,0.104,0,0,1 0.33,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0043,0.023,0.125,0.095,0.132,0,0,1 0.67,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00046,0.024,0.158,0.116,0.135,0,0,1 0.75,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.007,0.023,0.095,0.11,0.086,0,0,1 0.75,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00078,0.0208,0.165,0.1,0.165,0,0,1 0.61,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,6e-05,0.023,0.152,0.093,0.163,0,0,1 0.48,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0.0027,0.023,0.094,0.094,0.1,0,0,1 0.29,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0041,0.013,0.063,0.104,0.061,0,0,1 0.53,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,4e-05,0.025,0.166,0.098,0.169,0,0,1 0.6,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00017,0.024,0.165,0.1,0.165,0,0,1 0.29,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0005,0.026,0.157,0.131,0.12,0,0,1 0.31,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00091,0.0201,0.128,0.087,0.147,0,0,1 0.51,0,1,0,0,0,0,0,0,1,1,0,0,0,0,0,0.00046,0.0201,0.186,0.115,0.162,0,0,1 0.34,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,3e-05,0.024,0.046,0.11,0.042,0,0,1 0.72,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.001,0.016,0.125,0.095,0.132,0,0,1 0.23,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,8e-05,0.0201,0.169,0.144,0.117,0,0,1 0.75,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0032,0.0201,0.145,0.1,0.145,0,0,1 0.24,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0.0014,0.03,0.132,0.135,0.098,0,0,1 0.34,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.025,0.186,0.133,0.14,0,0,1 0.15,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.003,0.023,0.084,0.098,0.086,0,0,1 0.15,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.001,0.018,0.121,0.125,0.097,0,0,1 0.15,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0.0015,0.019,0.138,0.098,0.141,0,0,1 0.7,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0023,0.016,0.086,0.072,0.119,0,0,1 0.72,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0013,0.008,0.108,0.082,0.132,0,0,1 0.22,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0014,0.038,0.177,0.175,0.101,0,0,1 0.22,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.002,0.024,0.181,0.109,0.166,0,0,1 0.76,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,7e-05,0.0208,0.094,0.1,0.094,0,0,1 0.81,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.0011,0.013,0.112,0.08,0.14,0,0,1 0.84,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.001,0.012,0.1,0.07,0.143,0,0,1 0.83,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00078,0.008,0.102,0.08,0.128,0,0,1 0.19,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00018,0.0201,0.224,0.134,0.167,0,0,1 0.17,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0036,0.022,0.111,0.101,0.11,0,0,1 0.85,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.003,0.025,0.194,0.108,0.18,0,0,1 0.77,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0031,0.013,0.106,0.088,0.12,0,0,1 0.69,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00047,0.0201,0.111,0.084,0.132,0,0,1 0.49,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0074,0.019,0.101,0.083,0.122,0,0,1 0.52,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0027,0.02,0.083,0.063,0.132,0,0,1 0.66,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00033,0.022,0.134,0.095,0.141,0,0,1 0.73,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,2e-05,0.044,0.235,0.092,0.255,0,0,1 0.57,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.00094,0.026,0.115,0.111,0.104,0,0,1 0.57,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0017,0.02,0.095,0.09,0.106,0,0,1 0.57,1,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0.006,0.023,0.115,0.096,0.11954,0,0,1 0.42,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00063,0.025,0.111,0.097,0.114,0,0,1 0.22,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0013,0.0201,0.114,0.088,0.13,0,0,1 0.86,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00012,0.009,0.119,0.091,0.131,0,0,1 0.54,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0034,0.017,0.113,0.093,0.122,0,0,1 0.82,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0016,0.005,0.1,0.096,0.104,0,0,1 0.53,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0027,0.0201,0.108,0.093,0.116,0,0,1 0.51,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0049,0.008,0.081,0.085,0.095,0,0,1 0.7,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00046,0.007,0.078,0.058,0.132,0,0,1 0.71,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0023,0.007,0.108,0.069,0.156,0,0,1 0.04,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.004,0.012,0.094,0.058,0.162,0,0,1 0.24,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,8e-05,0.004,0.058,0.076,0.076,0,0,1 0.62,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0016,0.015,0.105,0.076,0.138,0,0,1 0.69,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5e-05,0.003,0.065,0.042,0.155,0,0,1 0.65,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00419,0.006,0.058,0.073,0.079,0,0,1 0.56,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.014,0.019,0.094,0.086,0.109,0,1,0 0.67,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0055,0.022,0.107,0.086,0.124,0,0,1 0.6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.011,0.023,0.091,0.096,0.095,0,1,0 0.58,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00087,0.013,0.094,0.102,0.092,0,0,1 0.57,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00058,0.009,0.11,0.088,0.125,0,0,1 0.55,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0052,0.025,0.094,0.098,0.096,0,0,1 0.54,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,3e-05,0.027,0.112,0.1,0.112,0,0,1 0.75,1,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0.00037,0.017,0.101,0.077,0.131,0,0,1 0.66,1,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0.00018,0.026,0.083,0.08,0.104,0,0,1 0.85,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.0201,0.128,0.111,0.115,0,0,1 0.82,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.0201,0.105,0.11,0.095,0,0,1 0.7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0014,0.022,0.106,0.075,0.141,0,0,1 0.66,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0032,0.0201,0.095,0.087,0.109,0,0,1 0.7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0024,0.016,0.145,0.097,0.149,0,0,1 0.54,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0029,0.015,0.133,0.096,0.139,0,0,1 0.8,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0026,0.005,0.076,0.058,0.129,0,0,1 0.73,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00056,0.013,0.111,0.086,0.128,0,0,1 0.13,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00036,0.022,0.161,0.124,0.13,0,0,1 0.13,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.0201,0.11329,0.096,0.11776,0,0,1 0.32,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.003,0.029,0.107,0.087,0.123,0,0,1 0.53,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.001,0.0201,0.101,0.102,0.099,0,0,1 0.51,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.00088,0.0208,0.088,0.085,0.104,0,0,1 0.6,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00041,0.0201,0.096,0.087,0.11,0,0,1 0.5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.0201,0.11329,0.096,0.11776,0,0,1 0.39,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.0201,0.11329,0.096,0.11776,0,0,1 0.38,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0073,0.017,0.095,0.095,0.09937,0,1,0 0.38,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00037,0.025,0.18,0.123,0.146,0,0,1 0.55,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0035,0.024,0.119,0.17,0.07,0,0,1 0.32,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00099,0.025,0.101,0.098,0.103,0,0,1 0.74,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.005,0.023,0.082,0.102,0.08,0,0,1 0.74,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00071,0.025,0.141,0.139,0.101,0,0,1 0.25,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00419,0.022,0.078,0.094,0.083,0,0,1 0.25,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00081,0.018,0.078,0.101,0.077,0,0,1 0.52,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0022,0.026,0.106,0.097,0.109,0,0,1 0.34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0015,0.0201,0.11329,0.096,0.11776,0,0,1 0.36,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0.0026,0.0208,0.095,0.097,0.097,0,0,1 0.7,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0.0053,0.027,0.103,0.129,0.08,0,0,1 0.69,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00082,0.019,0.159,0.116,0.137,0,0,1 0.61,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00013,0.0201,0.107,0.108,0.099,0,0,1 0.11,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00208,0.028,0.092,0.097,0.095,0,0,1 0.65,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0017,0.003,0.07,0.061,0.115,0,0,1 0.28,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.0025,0.007,0.087,0.108,0.081,0,0,1 0.78,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.0201,0.089,0.097,0.092,0,0,1 0.51,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00099,0.019,0.091,0.095,0.096,0,0,1 0.79,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0015,0.022,0.102,0.09,0.113,0,0,1 0.69,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7e-05,0.027,0.123,0.129,0.095,0,0,1 0.48,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0011,0.025,0.111,0.13,0.085,0,0,1 0.32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5e-05,0.022,0.164,0.135,0.122,0,0,1 0.61,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0013,0.018,0.072,0.104,0.069,0,0,1 0.28,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.016,0.058,0.113,0.051,0,0,1 0.52,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0031,0.019,0.11329,0.096,0.11776,0,0,1 0.4,1,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0.00232,0.0201,0.085,0.112,0.076,0,0,1 0.57,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0.00013,0.024,0.116,0.089,0.13,0,0,1 0.33,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00096,0.027,0.112,0.103,0.109,0,0,1 0.6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0057,0.022,0.108,0.115,0.094,0,0,1 0.56,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00097,0.019,0.096,0.093,0.103,0,0,1 0.65,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0013,0.014,0.097,0.082,0.118,0,0,1 0.38,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0022,0.016,0.08,0.099,0.081,0,0,1 0.45,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0013,0.012,0.095,0.084,0.113,0,0,1 0.67,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00208,0.013,0.1,0.082,0.122,0,0,1 0.15,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0015,0.0201,0.084,0.104,0.081,0,0,1 0.71,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00088,0.022,0.148,0.122,0.121,0,0,1 0.14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0041,0.03,0.114,0.1,0.114,0,0,1 0.79,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0015,0.015,0.139,0.097,0.143,0,0,1 0.79,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.0201,0.11329,0.096,0.11776,0,0,1 0.57,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.001,0.019,0.083,0.101,0.082,0,0,1 0.76,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0027,0.018,0.099,0.08,0.124,0,0,1 0.26,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0018,0.0201,0.101,0.09,0.112,0,0,1 0.28,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.00066,0.024,0.087,0.082,0.106,0,0,1 0.47,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0.00208,0.024,0.106,0.112,0.095,0,0,1 0.58,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00018,0.014,0.104,0.083,0.125,0,0,1 0.41,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0018,0.028,0.06,0.095,0.063,0,0,1 0.2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0028,0.0201,0.188,0.181,0.104,0,0,1 0.77,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0019,0.017,0.128,0.093,0.138,0,0,1 0.56,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00889,0.012,0.109,0.093,0.117,0,1,0 0.73,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0.0013,0.017,0.059,0.087,0.068,0,0,1 0.71,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0022,0.012,0.127,0.089,0.143,0,0,1 0.74,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.01,0.015,0.1,0.087,0.115,0,1,0 0.7,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0.00037,0.006,0.115,0.073,0.158,0,0,1 0.66,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0012,0.006,0.078,0.084,0.093,0,0,1 0.65,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00031,0.003,0.08,0.072,0.111,0,0,1 0.18,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00469,0.028,0.135,0.12,0.113,0,0,1 0.61,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.106,0.003,0.005,0.111,0.0045,1,0,0 0.54,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0.0014,0.025,0.056,0.09,0.062,0,0,1 0.84,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,4e-05,0.0419,0.151,0.088,0.172,0,0,1 0.41,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0014,0.0208,0.103,0.104,0.1,0,0,1 0.51,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0028,0.025,0.082,0.092,0.089,0,0,1 0.77,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0001,0.019,0.1,0.098,0.102,0,0,1 0.2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0031,0.022,0.101,0.104,0.095,0,0,1 0.53,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00067,0.017,0.092,0.099,0.093,0,0,1 0.84,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0032,0.01,0.081,0.073,0.111,0,0,1 0.58,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.011,0.035,0.146,0.11,0.133,0,0,1 0.2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0037,0.028,0.158,0.096,0.165,0,0,1 0.59,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0033,0.023,0.14,0.11,0.127,0,0,1 0.65,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0016,0.019,0.131,0.095,0.138,0,0,1 0.86,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0065,0.004,0.105,0.08,0.131,0,1,0 0.71,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00028,0.005,0.109,0.077,0.142,0,0,1 0.54,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.014,0.02,0.062,0.09,0.069,0,1,0 0.4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0028,0.031,0.223,0.132,0.169,0,0,1 0.62,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0017,0.027,0.129,0.11,0.117,0,0,1 0.39,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00058,0.027,0.176,0.098,0.18,0,0,1 0.31,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5e-05,0.031,0.152,0.115,0.132,0,0,1 0.35,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.001,0.019,0.115,0.086,0.134,0,0,1 0.79,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00092,0.0201,0.101,0.091,0.111,0,0,1 0.23,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0011,0.0201,0.098,0.089,0.11,0,0,1 0.73,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0045,0.031,0.109,0.109,0.1,0,0,1 0.77,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3e-05,0.1329,0.41,0.079,0.519,0,0,1 0.28,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.0022,0.011,0.089,0.093,0.096,0,0,1 0.62,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0013,0.0208,0.11,0.114,0.096,0,0,1 0.49,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0017,0.03,0.125,0.124,0.101,0,0,1 0.54,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3e-05,0.0201,0.11329,0.096,0.11776,0,0,1 0.69,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0031,0.0201,0.109,0.08,0.136,0,0,1 0.53,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0015,0.0201,0.096,0.103,0.093,0,0,1 0.53,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0014,0.0201,0.083,0.08,0.104,0,0,1 0.61,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00038,0.0201,0.11,0.101,0.109,0,0,1 0.88,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00049,0.009,0.11,0.087,0.126,0,0,1 0.66,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0.00089,0.017,0.125,0.098,0.128,0,0,1 0.4,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00419,0.02,0.144,0.116,0.124,0,0,1 0.23,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0013,0.016,0.079,0.066,0.12,0,0,1 0.32,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0026,0.03,0.138,0.124,0.111,0,0,1 0.39,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0012,0.025,0.117,0.104,0.113,0,0,1 0.38,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00057,0.0201,0.088,0.094,0.094,0,0,1 0.38,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.0031,0.0201,0.134,0.107,0.125,0,0,1 0.72,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0011,0.003,0.085,0.061,0.139,0,0,1 0.72,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.0018,0.017,0.114,0.103,0.111,0,0,1 0.54,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0018,0.0201,0.115,0.098,0.117,0,0,1 0.41,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00939,0.008,0.08,0.135,0.059,0,0,1 0.37,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00062,0.02,0.095,0.083,0.115,0,0,1 0.27,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.0201,0.08,0.094,0.085,0,0,1 0.71,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.018,0.019,0.097,0.093,0.104,0,1,0 0.71,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0038,0.017,0.092,0.103,0.09,0,0,1 0.77,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0013,0.015,0.125,0.104,0.118,0,0,1 0.62,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.0201,0.074,0.095,0.078,0,0,1 0.56,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.044,0.016,0.028,0.097,0.029,1,0,0 0.37,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0015,0.022,0.1,0.087,0.115,0,0,1 0.64,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00037,0.026,0.108,0.107,0.101,0,0,1 0.56,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3e-05,0.064,0.191,0.097,0.197,0,0,1 0.39,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00081,0.0208,0.117,0.096,0.122,0,0,1 0.8,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0016,0.013,0.11329,0.096,0.11776,0,0,1 0.34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0035,0.017,0.005,0.12,0.00415,0,0,1 0.17,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0016,0.033,0.084,0.096,0.08731,0,0,1 0.19,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0023,0.03,0.093,0.095,0.098,0,0,1 0.34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0029,0.02,0.068,0.088,0.077,0,0,1 0.7,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0015,0.013,0.139,0.098,0.142,0,0,1 0.66,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0017,0.0201,0.132,0.113,0.117,0,0,1 0.66,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00068,0.0201,0.076,0.1,0.076,0,0,1 0.6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.027,0.012,0.139,0.109,0.128,0,1,0 0.76,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0055,0.013,0.128,0.104,0.121,0,0,1 0.36,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,3e-05,0.04,0.178,0.114,0.156,0,0,1 0.62,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0012,0.0201,0.087,0.095,0.092,0,0,1 0.72,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0012,0.005,0.079,0.063,0.126,0,0,1 0.78,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.012,0.005,0.06,0.062,0.097,0,1,0 0.34,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00067,0.013,0.072,0.075,0.096,0,0,1 0.34,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,4e-05,0.028,0.155,0.1,0.155,0,0,1 0.24,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0.00011,0.038,0.096,0.081,0.119,0,0,1 0.24,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0039,0.0201,0.09,0.102,0.088,0,0,1 0.69,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0.00011,0.0201,0.141,0.09,0.157,0,0,1 0.78,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.003,0.005,0.104,0.104,0.099,0,0,1 0.71,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5e-05,0.0201,0.178,0.113,0.158,0,0,1 0.8,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00094,0.011,0.133,0.083,0.16,0,0,1 0.8,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.033,0.0201,0.234,0.215,0.109,0,0,1 0.58,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0.059,0.019,0.038,0.113,0.034,1,0,0 0.58,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00093,0.0201,0.084,0.061,0.139,0,0,1 0.72,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0077,0.0201,0.05,0.066,0.076,0,0,1 0.47,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0013,0.0201,0.143,0.111,0.129,0,0,1 0.44,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0.0012,0.0201,0.072,0.084,0.086,0,0,1 0.73,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,4e-05,0.0201,0.138,0.094,0.146,0,0,1 0.58,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0026,0.029,0.119,0.112,0.106,0,0,1 0.58,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00022,0.02,0.134,0.066,0.203,0,0,1 0.62,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0011,0.022,0.116,0.104,0.112,0,0,1 0.65,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0077,0.014,0.079,0.092,0.086,0,1,0 0.56,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00909,0.008,0.081,0.066,0.123,0,1,0 0.37,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00013,0.018,0.118,0.091,0.13,0,0,1 0.66,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00032,0.007,0.087,0.091,0.096,0,0,1 0.39,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4e-05,0.013,0.086,0.102,0.084,0,0,1 0.41,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00012,0.014,0.184,0.062,0.297,0,0,1 0.68,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0018,0.016,0.099,0.082,0.121,0,0,1 0.61,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0.118,0.0096,0.018,0.104,0.017,1,0,0 0.66,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0017,0.007,0.069,0.093,0.074,0,0,1 0.74,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00929,0.024,0.117,0.107,0.108,0,1,0 0.7,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00046,0.015,0.118,0.075,0.157,0,0,1 0.74,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00058,0.01,0.09,0.077,0.117,0,0,1 0.62,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0022,0.025,0.085,0.092,0.092,0,0,1 0.3,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.00031,0.029,0.143,0.139,0.103,0,0,1 0.51,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0034,0.022,0.078,0.104,0.075,0,0,1 0.42,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4e-05,0.028,0.142,0.104,0.135,0,0,1 0.77,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00012,0.014,0.091,0.081,0.112,0,0,1 0.74,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00037,0.0201,0.165,0.164,0.101,0,0,1 0.79,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.0039,0.009,0.09,0.062,0.145,0,0,1 0.68,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0038,0.0201,0.134,0.102,0.131,0,0,1 0.91,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.011,0.017,0.082,0.088,0.093,0,1,0 0.87,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0033,0.011,0.064,0.055,0.116,0,0,1 0.87,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.176,0.0096,0.052,0.123,0.042,1,0,0 0.47,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0017,0.0201,0.078,0.096,0.081,0,0,1 0.71,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00419,0.0201,0.112,0.089,0.126,0,0,1 0.22,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0034,0.0201,0.213,0.176,0.121,0,0,1 0.49,0,0,0,0,0,0,0,0,0,1,0,0,1,0,0,0.0019,0.0201,0.122,0.078,0.156,0,0,1 0.69,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0028,0.022,0.083,0.08,0.104,0,0,1 0.32,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00208,0.0201,0.091,0.081,0.112,0,0,1 0.32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.016,0.016,0.062,0.088,0.071,0,1,0 0.88,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0015,0.0201,0.14,0.114,0.123,0,0,1 0.78,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0015,0.0201,0.105,0.062,0.169,0,0,1 0.77,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0016,0.0201,0.047,0.068,0.069,0,0,1 0.21,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0013,0.0201,0.093,0.073,0.127,0,0,1 0.48,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0041,0.0201,0.097,0.098,0.099,0,0,1 0.42,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0048,0.0201,0.057,0.08,0.071,0,0,1 0.41,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.0201,0.11329,0.096,0.11776,0,0,1 0.55,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.00085,0.026,0.112,0.101,0.111,0,0,1 0.55,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00419,0.016,0.069,0.099,0.07,0,0,1 0.19,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0011,0.017,0.111,0.083,0.134,0,0,1 0.48,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.00208,0.0201,0.129,0.128,0.101,0,0,1 0.26,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00089,0.02,0.08,0.07,0.114,0,0,1 0.33,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.00099,0.023,0.13,0.101,0.129,0,0,1 0.66,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,5e-05,0.025,0.13,0.086,0.151,0,0,1 0.54,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,4e-05,0.031,0.231,0.096,0.241,0,0,1 0.72,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0016,0.0201,0.141,0.097,0.145,0,0,1 0.36,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0011,0.0201,0.111,0.111,0.1,0,0,1 0.65,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0077,0.011,0.156,0.101,0.155,0,0,1 0.67,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0032,0.009,0.092,0.087,0.106,0,0,1 0.28,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.0011,0.0201,0.09,0.111,0.081,0,0,1 0.8,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0053,0.011,0.097,0.087,0.112,0,0,1 0.72,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00045,0.025,0.126,0.108,0.117,0,0,1 0.21,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0.007,0.024,0.045,0.111,0.041,0,0,1 0.73,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00208,0.009,0.091,0.054,0.169,0,0,1 0.68,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00032,0.008,0.082,0.063,0.13,0,0,1 0.57,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.00023,0.024,0.108,0.103,0.105,0,0,1 0.68,1,0,0,0,0,0,1,0,0,0,0,0,1,0,0,0.00048,0.03,0.097,0.104,0.092,0,0,1 0.41,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0011,0.0201,0.102,0.107,0.095,0,0,1 0.53,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0027,0.0201,0.117,0.095,0.123,0,0,1 0.18,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0014,0.0201,0.12,0.141,0.085,0,0,1 0.47,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0024,0.02,0.122,0.11,0.111,0,0,1 0.66,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0037,0.024,0.133,0.129,0.103,0,0,1 0.65,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5e-05,0.029,0.138,0.122,0.113,0,0,1 0.65,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0016,0.0208,0.104,0.132,0.079,0,0,1 0.49,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00011,0.016,0.186,0.097,0.191,0,0,1 0.8,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0027,0.02,0.13,0.104,0.124,0,0,1 0.28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00082,0.013,0.057,0.069,0.083,0,0,1 0.76,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0018,0.0201,0.16,0.115,0.139,0,0,1 0.46,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0011,0.023,0.09,0.087,0.103,0,0,1 0.22,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0003,0.03,0.161,0.174,0.093,0,0,1 0.68,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0012,0.024,0.111,0.091,0.122,0,0,1 0.61,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0012,0.02,0.121,0.089,0.136,0,0,1 0.76,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0085,0.01,0.109,0.088,0.124,0,1,0 0.57,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00013,0.0201,0.197,0.089,0.221,0,0,1 0.85,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00058,0.017,0.107,0.085,0.126,0,0,1 0.61,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0078,0.017,0.072,0.093,0.077,0,1,0 0.23,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0029,0.0201,0.109,0.09,0.121,0,0,1 0.78,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0013,0.023,0.12,0.116,0.103,0,0,1 0.86,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0024,0.0201,0.097,0.091,0.107,0,0,1 0.61,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00017,0.024,0.122,0.1,0.122,0,0,1 0.58,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00014,0.02,0.175,0.104,0.165,0,0,1 0.7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.02,0.017,0.072,0.096,0.075,0,1,0 0.2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.00046,0.02,0.119,0.11,0.108,0,0,1 0.55,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0.00043,0.0201,0.107,0.098,0.109,0,0,1 0.26,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0.00114,0.0201,0.092,0.093,0.099,0,0,1 0.59,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.00075,0.0201,0.129,0.103,0.125,0,0,1 0.54,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0011,0.0201,0.098,0.095,0.103,0,0,1 0.28,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.00112,0.026,0.122,0.095,0.127,0,0,1 0.3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0014,0.0201,0.091,0.089,0.102,0,0,1 0.35,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.031,0.017,0.067,0.092,0.073,0,1,0 0.04,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00022,0.0201,0.162,0.116,0.137,0,0,1 0.78,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.191,0.008,0.005,0.103,0.00485,1,0,0 0.29,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0024,0.0201,0.094,0.092,0.102,0,0,1 0.57,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,6e-05,0.0201,0.171,0.092,0.186,0,0,1 0.69,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0.0029,0.0201,0.086,0.075,0.115,0,0,1 0.28,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00029,0.0201,0.061,0.051,0.12,0,0,1 0.39,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00077,0.0201,0.085,0.071,0.12,0,0,1 0.72,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0026,0.0201,0.024,0.056,0.043,0,0,1 0.49,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.002,0.0201,0.089,0.087,0.102,0,0,1 0.39,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00092,0.0201,0.115,0.108,0.106,0,0,1 0.46,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,6e-05,0.0201,0.117,0.086,0.136,0,0,1 0.4,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.006,0.02,0.125,0.104,0.12,0,0,1 0.4,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.073,0.0096,0.043,0.087,0.049,1,0,0 0.69,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.0013,0.0201,0.098,0.084,0.117,0,0,1 0.57,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0029,0.025,0.118,0.121,0.098,0,0,1 0.32,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0029,0.02,0.088,0.084,0.105,0,0,1 0.76,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0018,0.0201,0.11,0.099,0.111,0,0,1 0.76,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.001,0.0201,0.095,0.092,0.103,0,0,1 0.64,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00084,0.0201,0.094,0.097,0.097,0,0,1 0.73,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0023,0.019,0.102,0.1,0.102,0,0,1 0.73,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00056,0.0201,0.109,0.101,0.108,0,0,1 0.14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0016,0.0201,0.114,0.103,0.111,0,0,1 0.44,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,6e-05,0.016,0.113,0.088,0.128,0,0,1 0.29,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.00095,0.02,0.136,0.101,0.134,0,0,1 0.29,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.00232,0.023,0.102,0.096,0.10602,0,0,1 0.61,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0012,0.0201,0.069,0.081,0.085,0,0,1 0.61,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.011,0.024,0.059,0.088,0.067,0,0,1 0.61,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00062,0.02,0.104,0.095,0.109,0,0,1 0.73,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00082,0.017,0.073,0.087,0.084,0,0,1 0.19,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0026,0.0201,0.092,0.083,0.111,0,0,1 0.75,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0036,0.0208,0.106,0.104,0.101,0,0,1 0.32,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0.00232,0.0201,0.064,0.076,0.084,0,0,1 0.57,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0017,0.019,0.087,0.112,0.078,0,0,1 0.54,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0013,0.0201,0.107,0.116,0.091,0,0,1 0.48,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0013,0.015,0.124,0.108,0.115,0,0,1 0.41,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0013,0.0201,0.124,0.086,0.144,0,0,1 0.62,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0.0037,0.014,0.083,0.081,0.102,0,0,1 0.66,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0.0018,0.0201,0.105,0.108,0.097,0,0,1 0.75,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.0034,0.0201,0.091,0.084,0.108,0,0,1 0.75,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0028,0.0201,0.156,0.146,0.107,0,0,1 0.51,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.0201,0.11329,0.096,0.11776,0,0,1 0.25,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0.0018,0.0201,0.18,0.189,0.095,0,0,1 0.58,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,3e-05,0.0208,0.13,0.087,0.149,0,0,1 0.3,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0.0035,0.024,0.095,0.108,0.088,0,0,1 0.62,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0018,0.009,0.121,0.096,0.126,0,0,1 0.34,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1e-05,0.04,0.239,0.09,0.266,0,0,1 0.69,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0016,0.029,0.119,0.107,0.111,0,0,1 0.48,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,3e-05,0.031,0.096,0.092,0.104,0,0,1 0.48,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,7e-05,0.017,0.104,0.096,0.1081,0,0,1 0.78,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0013,0.0201,0.117,0.098,0.119,0,0,1 0.66,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00061,0.013,0.107,0.096,0.11122,0,0,1 0.66,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.012,0.022,0.096,0.095,0.10041,0,1,0 0.69,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0018,0.02,0.126,0.102,0.124,0,0,1 0.74,0,0,0,0,0,0,0,0,0,0,1,0,0,0,1,0.0012,0.013,0.095,0.102,0.093,0,0,1 0.34,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.00022,0.028,0.095,0.11,0.086,0,0,1 0.33,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0017,0.025,0.115,0.112,0.103,0,0,1 0.3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00069,0.0201,0.094,0.11,0.086,0,0,1 0.76,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0011,0.014,0.074,0.073,0.101,0,0,1 0.76,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00079,0.0201,0.096,0.099,0.097,0,0,1 0.75,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0051,0.024,0.126,0.113,0.112,0,0,1 0.77,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5e-05,0.024,0.146,0.092,0.159,0,0,1 0.58,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0.0011,0.026,0.134,0.096,0.119,0,0,1 0.45,0,0,0,0,0,0,0,0,0,0,1,0,0,0,1,0.0011,0.012,0.07,0.098,0.071,0,0,1 0.42,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0024,0.0201,0.099,0.085,0.117,0,0,1 0.5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0019,0.018,0.12,0.101,0.119,0,0,1 0.38,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.0201,0.11329,0.096,0.11776,0,0,1 0.6,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.002,0.0208,0.1,0.095,0.105,0,0,1 0.55,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0028,0.009,0.122,0.09,0.136,0,0,1 0.82,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0011,0.02,0.126,0.109,0.116,0,0,1 0.58,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0024,0.012,0.119,0.096,0.1237,0,0,1 0.52,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0017,0.0201,0.109,0.11,0.099,0,0,1 0.49,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00043,0.015,0.168,0.111,0.151,0,0,1 0.23,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.00075,0.022,0.106,0.088,0.12,0,0,1 0.66,1,0,0,0,0,0,0,0,0,0,1,0,0,0,1,0.006,0.02,0.091,0.107,0.085,0,0,1 0.34,0,0,0,0,0,0,0,0,0,0,1,0,0,0,1,0.0058,0.011,0.047,0.099,0.048,0,0,1 0.4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.002,0.018,0.115,0.112,0.103,0,0,1 0.79,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.003,0.0201,0.084,0.083,0.101,0,0,1 0.29,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0016,0.0201,0.178,0.202,0.088,0,0,1 0.32,0,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0.0016,0.0201,0.17,0.173,0.098,0,0,1 0.29,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00066,0.0208,0.109,0.114,0.096,0,0,1 0.48,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,5e-05,0.034,0.219,0.151,0.145,0,0,1 0.48,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00065,0.0201,0.098,0.081,0.121,0,0,1 0.49,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.00232,0.0201,0.148,0.107,0.138,0,0,1 0.39,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0019,0.0201,0.092,0.103,0.089,0,0,1 0.44,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0046,0.0201,0.069,0.09,0.077,0,0,1 0.61,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0024,0.0201,0.087,0.08,0.109,0,0,1 0.46,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.003,0.018,0.115,0.101,0.114,0,0,1 0.19,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00048,0.0201,0.074,0.058,0.128,0,0,1 0.83,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0023,0.0201,0.072,0.062,0.116,0,0,1 0.37,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.00048,0.0201,0.04,0.052,0.077,0,0,1 0.37,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0016,0.0201,0.109,0.067,0.163,0,0,1 0.59,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,6e-05,0.0208,0.099,0.073,0.136,0,0,1 0.59,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0015,0.0201,0.113,0.092,0.123,0,0,1 0.31,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0.0014,0.018,0.097,0.096,0.10083,0,0,1 0.48,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0012,0.024,0.122,0.104,0.117,0,0,1 0.39,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0035,0.026,0.106,0.109,0.097,0,0,1 0.68,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0019,0.02,0.1,0.092,0.109,0,0,1 0.62,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00082,0.0201,0.11,0.081,0.136,0,0,1 0.2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0043,0.031,0.105,0.102,0.103,0,0,1 0.74,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0012,0.009,0.099,0.099,0.1,0,0,1 0.54,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0033,0.0201,0.095,0.084,0.113,0,0,1 0.54,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0059,0.0201,0.123,0.102,0.121,0,0,1 0.44,1,0,0,0,0,0,0,0,0,1,0,0,1,0,0,0.00232,0.0201,0.11329,0.096,0.11776,0,0,1 0.33,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5e-05,0.0201,0.148,0.095,0.156,0,0,1 0.32,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0015,0.027,0.163,0.114,0.143,0,0,1 0.29,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0014,0.033,0.139,0.1,0.139,0,0,1 0.78,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0014,0.0201,0.161,0.088,0.183,0,0,1 0.43,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.0201,0.126,0.103,0.122,0,0,1 0.61,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.065,0.0096,0.051,0.104,0.049,1,0,0 0.23,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0032,0.0201,0.096,0.096,0.1,0,0,1 0.56,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0.0024,0.0201,0.101,0.079,0.128,0,0,1 0.34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4e-05,0.0201,0.158,0.091,0.174,0,0,1 0.46,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0011,0.034,0.126,0.122,0.103,0,0,1 0.7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0017,0.0201,0.097,0.113,0.086,0,0,1 0.69,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,8e-05,0.0201,0.114,0.045,0.253,0,0,1 0.39,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.028,0.148,0.109,0.136,0,0,1 0.74,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0051,0.0201,0.093,0.077,0.121,0,0,1 0.81,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0017,0.016,0.112,0.091,0.123,0,0,1 0.72,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0011,0.018,0.126,0.081,0.156,0,0,1 0.83,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.17,0.0096,0.029,0.103,0.028,1,0,0 0.72,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0031,0.0201,0.107,0.1,0.107,0,0,1 0.77,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0012,0.0201,0.101,0.09,0.112,0,0,1 0.58,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0043,0.03,0.105,0.099,0.106,0,0,1 0.62,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0014,0.0201,0.118,0.1,0.118,0,0,1 0.46,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00062,0.033,0.13,0.12,0.108,0,0,1 0.33,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00098,0.024,0.113,0.108,0.105,0,0,1 0.73,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0013,0.0201,0.106,0.074,0.143,0,0,1 0.73,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.024,0.0201,0.244,0.216,0.113,0,0,1 0.73,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0019,0.0201,0.084,0.1,0.084,0,0,1 0.67,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,9e-05,0.01,0.101,0.116,0.087,0,0,1 0.71,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.00989,0.018,0.089,0.099,0.09,0,1,0 0.68,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.00033,0.031,0.103,0.11,0.094,0,0,1 0.74,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.0201,0.074,0.087,0.085,0,0,1 0.6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00073,0.03,0.148,0.129,0.115,0,0,1 0.74,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.0043,0.012,0.027,0.103,0.026,0,0,1 0.53,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.0201,0.108,0.082,0.132,0,0,1 0.74,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0035,0.0201,0.144,0.089,0.162,0,0,1 0.77,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0017,0.007,0.087,0.081,0.107,0,0,1 0.66,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0013,0.0201,0.086,0.068,0.126,0,0,1 0.78,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.00057,0.0201,0.138,0.076,0.182,0,0,1 0.68,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0015,0.009,0.076,0.063,0.121,0,0,1 0.76,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0023,0.016,0.125,0.095,0.132,0,0,1 0.32,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.0201,0.077,0.073,0.105,0,0,1 0.66,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.0026,0.022,0.109,0.096,0.1133,0,0,1 0.23,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0.00031,0.017,0.147,0.104,0.141,0,0,1 0.62,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00016,0.0208,0.077,0.09,0.086,0,0,1 0.5,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0015,0.0201,0.092,0.087,0.106,0,0,1 0.54,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0014,0.015,0.107,0.086,0.124,0,0,1 0.29,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0019,0.0208,0.081,0.092,0.088,0,0,1 0.59,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.001,0.0208,0.108,0.104,0.104,0,0,1 0.6,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0.00232,0.026,0.11329,0.096,0.11776,0,0,1 0.76,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00074,0.0201,0.134,0.102,0.131,0,0,1 0.48,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0022,0.0201,0.112,0.097,0.116,0,0,1 0.23,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0012,0.0201,0.095,0.088,0.108,0,0,1 0.93,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.0201,0.137,0.104,0.131,0,0,1 0.93,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0056,0.0201,0.083,0.087,0.095,0,0,1 0.63,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7e-05,0.011,0.145,0.087,0.167,0,0,1 0.51,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0034,0.022,0.108,0.112,0.096,0,0,1 0.76,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.012,0.015,0.126,0.095,0.133,0,1,0 0.7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0022,0.0201,0.112,0.073,0.153,0,0,1 0.77,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0022,0.0201,0.078,0.055,0.142,0,0,1 0.61,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0026,0.024,0.098,0.093,0.105,0,0,1 0.39,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0027,0.0201,0.109,0.1,0.109,0,0,1 0.76,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.00045,0.0201,0.184,0.121,0.152,0,0,1 0.33,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00096,0.0201,0.095,0.082,0.116,0,0,1 0.66,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0025,0.013,0.094,0.089,0.106,0,0,1 0.52,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0004,0.011,0.132,0.093,0.142,0,0,1 0.72,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0049,0.005,0.035,0.085,0.041,0,0,1 0.43,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0045,0.002,0.044,0.064,0.069,0,0,1 0.21,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.022,0.015,0.107,0.091,0.118,0,1,0 0.66,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00081,0.011,0.143,0.091,0.157,0,0,1 0.28,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0014,0.023,0.12,0.094,0.128,0,0,1 0.72,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.0038,0.012,0.122,0.116,0.104,0,0,1 0.75,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.00041,0.017,0.134,0.084,0.16,0,0,1 0.75,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.0017,0.031,0.09,0.096,0.094,0,0,1 0.65,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00075,0.011,0.078,0.071,0.11,0,0,1 0.67,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0.00077,0.0201,0.095,0.102,0.093,0,0,1 0.26,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0.0031,0.026,0.095,0.089,0.107,0,0,1 0.81,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0.0012,0.023,0.089,0.114,0.078,0,0,1 0.43,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0016,0.0201,0.075,0.091,0.082,0,0,1 0.45,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0028,0.0201,0.089,0.097,0.092,0,0,1 0.45,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0035,0.051,0.175,0.091,0.192,0,0,1 0.61,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00809,0.012,0.092,0.096,0.096,0,0,1 0.68,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7e-05,0.0201,0.147,0.095,0.155,0,0,1 0.6,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0034,0.012,0.109,0.086,0.127,0,0,1 0.3,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0034,0.0201,0.094,0.083,0.113,0,0,1 0.47,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.0014,0.0201,0.135,0.104,0.127,0,0,1 0.43,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0019,0.0201,0.077,0.09,0.086,0,0,1 0.43,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0016,0.0201,0.134,0.086,0.156,0,0,1 0.66,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.00093,0.0201,0.114,0.102,0.112,0,0,1 0.45,1,0,0,0,0,0,0,0,0,0,1,0,0,0,1,0.0025,0.0201,0.108,0.098,0.11,0,0,1 0.01,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.0065,0.017,0.133,0.126,0.106,0,1,0 0.46,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0026,0.008,0.097,0.089,0.109,0,0,1 0.33,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.041,0.017,0.095,0.102,0.093,0,1,0 0.33,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00061,0.0201,0.184,0.15,0.123,0,0,1 0.56,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0039,0.0201,0.083,0.086,0.097,0,0,1 0.56,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.005,0.0201,0.098,0.101,0.097,0,0,1 0.56,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0036,0.0201,0.091,0.08,0.114,0,0,1 0.73,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0013,0.0201,0.134,0.103,0.13,0,0,1 0.44,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0018,0.0201,0.097,0.104,0.092,0,0,1 0.55,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0011,0.0201,0.084,0.082,0.102,0,0,1 0.5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0037,0.0201,0.118,0.099,0.119,0,0,1 0.74,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.002,0.0201,0.099,0.094,0.105,0,0,1 0.51,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0.0014,0.023,0.122,0.11,0.111,0,0,1 0.25,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0.0018,0.0201,0.158,0.158,0.1,0,0,1 0.74,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,7e-05,0.022,0.191,0.097,0.197,0,0,1 0.29,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0015,0.016,0.083,0.101,0.082,0,0,1 0.6,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7e-05,0.02,0.144,0.078,0.185,0,0,1 0.67,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0077,0.014,0.083,0.104,0.08,0,0,1 0.57,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,7e-05,0.016,0.075,0.109,0.069,0,0,1 0.59,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.002,0.0201,0.157,0.104,0.15,0,0,1 0.34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0013,0.0201,0.189,0.177,0.107,0,0,1 0.9,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0045,0.001,0.028,0.079,0.035,0,0,1 0.92,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00014,0.009,0.092,0.091,0.101,0,0,1 0.22,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.003,0.022,0.134,0.097,0.138,0,0,1 0.22,0,1,0,0,0,0,0,0,0,1,0,0,0,0,0,9e-05,0.022,0.189,0.102,0.185,0,0,1 0.55,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0045,0.012,0.151,0.114,0.132,0,0,1 0.33,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0.0067,0.017,0.09,0.102,0.088,0,1,0 0.34,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.00024,0.027,0.117,0.108,0.108,0,0,1 0.39,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.002,0.024,0.096,0.098,0.098,0,0,1 0.49,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,7e-05,0.033,0.239,0.128,0.187,0,0,1 0.49,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0014,0.0201,0.097,0.071,0.137,0,0,1 0.81,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0032,0.0201,0.075,0.1,0.075,0,0,1 0.54,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,7e-05,0.05,0.216,0.086,0.251,0,0,1 0.31,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0016,0.0201,0.164,0.104,0.158,0,0,1 0.31,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00047,0.0201,0.092,0.091,0.101,0,0,1 0.76,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0034,0.019,0.126,0.116,0.107,0,0,1 0.74,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0007,0.0201,0.09,0.077,0.117,0,0,1 0.18,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0013,0.0201,0.061,0.077,0.079,0,0,1 0.18,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0059,0.0208,0.093,0.096,0.097,0,0,1 0.73,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.0073,0.0208,0.139,0.115,0.121,0,1,0 0.49,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0.0001,0.031,0.139,0.114,0.122,0,0,1 0.62,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.00232,0.0201,0.098,0.1,0.098,0,0,1 0.76,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0029,0.016,0.109,0.083,0.131,0,0,1 0.45,1,1,0,0,0,0,0,0,0,0,1,0,0,0,1,0.0017,0.018,0.094,0.086,0.109,0,0,1 0.45,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0.0023,0.022,0.098,0.093,0.105,0,0,1 0.43,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0055,0.004,0.047,0.088,0.053,0,0,1 0.76,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.00026,0.001,0.034,0.061,0.056,0,0,1 0.35,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0.0011,0.018,0.111,0.102,0.109,0,0,1 0.38,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,8e-05,0.04,0.221,0.109,0.203,0,0,1 0.54,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00069,0.034,0.147,0.135,0.109,0,0,1 0.64,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00032,0.0201,0.099,0.101,0.098,0,0,1 0.64,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0026,0.0201,0.162,0.097,0.167,0,0,1 0.39,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0001,0.041,0.131,0.095,0.138,0,0,1 0.58,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0064,0.017,0.117,0.104,0.113,0,1,0 0.38,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0055,0.0201,0.131,0.115,0.114,0,0,1 0.29,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00208,0.016,0.099,0.087,0.114,0,0,1 0.46,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00027,0.0201,0.117,0.108,0.108,0,0,1 0.58,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00052,0.0201,0.116,0.098,0.118,0,0,1 0.01,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00208,0.0201,0.117,0.097,0.121,0,0,1 0.55,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0.0072,0.017,0.106,0.083,0.128,0,1,0 0.33,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0.0045,0.0208,0.083,0.095,0.087,0,0,1 0.72,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00879,0.006,0.04,0.083,0.048,1,0,0 0.66,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.002,0.02,0.177,0.116,0.15,0,0,1 0.66,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00889,0.017,0.119,0.119,0.1,0,1,0 0.21,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0085,0.016,0.112,0.091,0.123,0,1,0 0.66,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0078,0.017,0.077,0.076,0.101,0,1,0 0.45,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0.00021,0.0201,0.179,0.099,0.181,0,0,1 0.5219,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0015,0.0201,0.132,0.102,0.129,0,0,1 0.63,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0032,0.019,0.114,0.1,0.114,0,0,1 0.87,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0064,0.0005,0.04,0.068,0.059,1,0,0 0.32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0001,0.0201,0.153,0.067,0.228,0,0,1 0.58,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.001,0.019,0.112,0.095,0.118,0,0,1 0.31,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.166,0.006,0.032,0.104,0.031,1,0,0 0.87,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.002,0.013,0.098,0.091,0.108,0,0,1 0.82,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0019,0.0201,0.122,0.112,0.109,0,0,1 0.64,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0062,0.02,0.073,0.1,0.073,0,1,0 0.26,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0017,0.0201,0.144,0.17,0.085,0,0,1 0.29,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0049,0.0201,0.146,0.132,0.111,0,0,1 0.24,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,6e-05,0.026,0.176,0.107,0.164,0,0,1 0.25,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0015,0.0201,0.156,0.136,0.115,0,0,1 0.25,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.0201,0.11329,0.096,0.11776,0,0,1 0.44,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0.0019,0.018,0.094,0.089,0.106,0,0,1 0.44,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0054,0.0201,0.075,0.082,0.091,0,0,1 0.86,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.004,0.0201,0.106,0.083,0.128,0,0,1 0.42,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,6e-05,0.023,0.13,0.103,0.126,0,0,1 0.73,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,4e-05,0.038,0.292,0.091,0.321,0,0,1 0.66,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0026,0.022,0.193,0.122,0.158,0,0,1 0.69,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0018,0.016,0.109,0.093,0.117,0,0,1 0.22,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0014,0.0201,0.088,0.073,0.121,0,0,1 0.7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0044,0.028,0.118,0.101,0.117,0,0,1 0.28,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0011,0.023,0.099,0.086,0.115,0,0,1 0.38,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0043,0.015,0.142,0.097,0.146,0,0,1 0.35,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.00052,0.026,0.091,0.097,0.094,0,0,1 0.35,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.0201,0.11329,0.096,0.11776,0,0,1 0.52,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,4e-05,0.0201,0.176,0.149,0.118,0,0,1 0.42,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0.0038,0.023,0.127,0.097,0.131,0,0,1 0.73,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5e-05,0.025,0.202,0.1,0.202,0,0,1 0.63,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0073,0.017,0.142,0.126,0.113,0,1,0 0.63,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.003,0.0201,0.101,0.096,0.105,0,0,1 0.71,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0034,0.0201,0.106,0.093,0.114,0,0,1 0.16,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0.00021,0.024,0.126,0.093,0.135,0,0,1 0.19,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.00091,0.022,0.096,0.088,0.109,0,0,1 0.61,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0022,0.018,0.123,0.104,0.118,0,0,1 0.54,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0031,0.019,0.098,0.101,0.097,0,0,1 0.65,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0014,0.019,0.109,0.092,0.118,0,0,1 0.56,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0.0016,0.03,0.104,0.115,0.09,0,0,1 0.56,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.0201,0.167,0.154,0.108,0,0,1 0.74,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,4e-05,0.0208,0.166,0.089,0.186,0,0,1 0.55,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,4e-05,0.0201,0.188,0.083,0.226,0,0,1 0.76,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0013,0.022,0.142,0.131,0.108,0,0,1 0.48,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.00063,0.0201,0.132,0.107,0.123,0,0,1 0.15,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,4e-05,0.03,0.129,0.097,0.133,0,0,1 0.71,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0.0012,0.02,0.103,0.099,0.104,0,0,1 0.69,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,4e-05,0.014,0.163,0.082,0.199,0,0,1 0.54,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.00085,0.013,0.102,0.091,0.113,0,0,1 0.76,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0031,0.024,0.122,0.096,0.12681,0,0,1 0.7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0026,0.0201,0.097,0.112,0.087,0,0,1 0.95,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0044,0.004,0.054,0.062,0.087,0,0,1 0.51,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00052,0.0201,0.137,0.111,0.123,0,0,1 0.47,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0.0025,0.025,0.108,0.123,0.088,0,0,1 0.28,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0037,0.024,0.107,0.096,0.112,0,0,1 0.54,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0.0026,0.0201,0.083,0.107,0.078,0,0,1 0.53,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.00072,0.0201,0.12,0.088,0.139,0,0,1 0.59,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0.0025,0.027,0.074,0.112,0.066,0,0,1 0.61,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.0025,0.02,0.101,0.085,0.119,0,0,1 0.7,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0019,0.0201,0.112,0.095,0.118,0,0,1 0.48,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0.02,0.028,0.074,0.095,0.0774,0,1,0 0.75,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00057,0.0201,0.135,0.088,0.153,0,0,1 0.51,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.001,0.026,0.089,0.089,0.1,0,0,1 0.51,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00011,0.0208,0.135,0.108,0.125,0,0,1 0.36,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,4e-05,0.0201,0.169,0.098,0.172,0,0,1 0.64,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,4e-05,0.024,0.132,0.095,0.139,0,0,1 0.38,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0.0014,0.0201,0.093,0.083,0.112,0,0,1 0.79,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0018,0.0201,0.152,0.103,0.148,0,0,1 0.79,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0013,0.0201,0.078,0.083,0.094,0,0,1 0.81,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00208,0.0201,0.137,0.115,0.119,0,0,1 0.74,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0011,0.017,0.08,0.095,0.084,0,0,1 0.16,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,5e-05,0.026,0.109,0.099,0.11,0,0,1 0.66,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0034,0.027,0.09,0.099,0.091,0,0,1 0.49,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0037,0.02,0.091,0.085,0.107,0,0,1 0.37,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00208,0.0201,0.086,0.077,0.112,0,0,1 0.27,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0018,0.022,0.091,0.085,0.107,0,0,1 0.37,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0018,0.0201,0.228,0.165,0.138,0,0,1 0.26,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.46,0.005,0.022,0.138,0.016,1,0,0 0.26,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0.0016,0.0201,0.136,0.107,0.128,0,0,1 0.13,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0033,0.023,0.105,0.085,0.124,0,0,1 0.17,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.015,0.018,0.132,0.089,0.148,0,0,1 0.77,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0013,0.012,0.123,0.098,0.126,0,0,1 0.72,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0041,0.013,0.101,0.078,0.13,0,0,1 0.72,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0014,0.023,0.065,0.087,0.075,0,0,1 0.72,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.00232,0.01,0.016,0.11,0.015,0,0,1 0.72,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.192,0.0096,0.03121,0.102,0.03042,1,0,0 0.39,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00011,0.024,0.135,0.092,0.147,0,0,1 0.39,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0.0043,0.018,0.092,0.096,0.09563,0,0,1 0.75,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0017,0.02,0.111,0.108,0.103,0,0,1 0.75,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0014,0.0201,0.077,0.071,0.109,0,0,1 0.72,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0016,0.022,0.128,0.112,0.114,0,0,1 0.29,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0017,0.02,0.093,0.081,0.115,0,0,1 0.74,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.0012,0.0201,0.08,0.095,0.084,0,0,1 0.23,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1e-05,0.0201,0.127,0.093,0.137,0,0,1 0.44,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0046,0.0201,0.079,0.083,0.095,0,0,1 0.4,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0026,0.024,0.105,0.099,0.106,0,0,1 0.79,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0012,0.029,0.127,0.1,0.127,0,0,1 0.61,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0033,0.0201,0.11,0.111,0.099,0,0,1 0.84,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.004,0.017,0.064,0.096,0.067,0,0,1 0.64,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0017,0.0201,0.08,0.094,0.085,0,0,1 0.37,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0019,0.0201,0.134,0.12,0.112,0,0,1 0.28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00051,0.0201,0.203,0.195,0.104,0,0,1 0.71,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.002,0.025,0.107,0.1,0.105,0,0,1 0.76,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0013,0.0201,0.058,0.065,0.089,0,0,1 0.76,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0022,0.0201,0.077,0.06,0.128,0,0,1 0.87,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.014,0.017,0.089,0.101,0.088,0,1,0 0.7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0023,0.026,0.117,0.108,0.108,0,0,1 0.51,0,1,0,0,0,0,0,1,1,0,0,0,0,0,0,3e-05,0.043,0.167,0.102,0.164,0,0,1 0.34,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0014,0.0201,0.076,0.076,0.1,0,0,1 0.34,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.00232,0.0201,0.11329,0.096,0.11776,0,0,1 0.21,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.02,0.11329,0.096,0.11776,0,0,1 0.57,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.031,0.041,0.23,0.2,0.115,0,0,1 0.66,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.002,0.019,0.091,0.096,0.095,0,0,1 0.84,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.002,0.0201,0.136,0.075,0.181,0,0,1 0.79,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00028,0.015,0.138,0.099,0.139,0,0,1 0.67,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0027,0.019,0.138,0.095,0.145,0,0,1 0.76,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00208,0.022,0.107,0.09,0.119,0,0,1 0.87,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.01,0.002,0.043,0.063,0.068,0,0,1 0.27,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00098,0.026,0.144,0.101,0.143,0,0,1 0.86,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.0016,0.004,0.076,0.104,0.072,0,0,1 0.32,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00033,0.023,0.077,0.083,0.093,0,0,1 0.79,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0071,0.026,0.153,0.123,0.124,0,0,1 0.44,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0.0075,0.0201,0.086,0.142,0.061,0,0,1 0.81,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0048,0.0201,0.13,0.116,0.11,0,0,1 0.62,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.0201,0.131,0.113,0.116,0,0,1 0.75,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0014,0.016,0.086,0.089,0.097,0,0,1 0.41,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.002,0.024,0.103,0.102,0.101,0,0,1 0.41,0,0,0,0,0,0,0,0,0,0,1,0,0,0,1,0.0011,0.0201,0.103,0.099,0.104,0,0,1 0.32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.001,0.0201,0.061,0.099,0.062,0,0,1 0.5,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0002,0.018,0.13,0.112,0.116,0,0,1 0.56,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,3e-05,0.027,0.212,0.115,0.184,0,0,1 0.41,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0056,0.014,0.13,0.116,0.112,0,0,1 0.63,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,3e-05,0.015,0.147,0.093,0.158,0,0,1 0.63,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.0201,0.056,0.1,0.056,0,0,1 0.61,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.109,0.013,0.044,0.098,0.045,1,0,0 0.22,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.00232,0.0201,0.11329,0.096,0.11776,0,0,1 0.67,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5e-05,0.031,0.157,0.092,0.171,0,0,1 0.46,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0012,0.023,0.098,0.093,0.105,0,0,1 0.45,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00011,0.0201,0.129,0.085,0.152,0,0,1 0.36,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.0201,0.103,0.081,0.127,0,0,1 0.6,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3e-05,0.076,0.236,0.091,0.259,0,0,1 0.29,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,3e-05,0.027,0.201,0.084,0.239,0,0,1 0.6,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.0201,0.099,0.103,0.096,0,0,1 0.34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00078,0.0201,0.179,0.081,0.221,0,0,1 0.56,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.015,0.0208,0.133,0.097,0.137,0,1,0 0.4,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00015,0.016,0.173,0.09,0.192,0,0,1 0.4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00208,0.0201,0.081,0.078,0.104,0,0,1 0.63,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00034,0.0201,0.142,0.099,0.143,0,0,1 0.72,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0019,0.018,0.148,0.125,0.118,0,0,1 0.72,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00085,0.017,0.081,0.089,0.091,0,0,1 0.02,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.136,0.0201,0.053,0.077,0.069,0,0,1 0.35,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,6e-05,0.0201,0.11329,0.096,0.143,0,0,1 0.65,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0.0036,0.0201,0.097,0.107,0.091,0,0,1 0.62,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00074,0.0201,0.097,0.101,0.096,0,0,1 0.35,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.0201,0.114,0.08,0.1425,0,0,1 0.21,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0.00232,0.0201,0.164,0.161,0.102,0,0,1 0.39,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.0201,0.11329,0.096,0.11776,0,0,1 0.24,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.0201,0.11329,0.096,0.11776,0,0,1 0.34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.0201,0.11329,0.096,0.11776,0,0,1 0.34,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0034,0.0201,0.105,0.101,0.104,0,0,1 0.34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.011,0.017,0.069,0.096,0.072,0,1,0 0.34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0022,0.0201,0.106,0.111,0.096,0,0,1 0.69,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0035,0.0201,0.13,0.107,0.122,0,0,1 0.52,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0025,0.0201,0.093,0.088,0.106,0,0,1 0.36,0,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0.00232,0.0201,0.11329,0.096,0.11776,0,0,1 0.36,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0017,0.0201,0.1,0.091,0.11,0,0,1 0.62,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.00232,0.006,0.11329,0.096,0.11776,0,0,1 0.48,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0.012,0.0201,0.108,0.103,0.105,0,0,1 0.48,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,6e-05,0.014,0.129,0.09,0.143,0,0,1 0.46,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.002,0.009,0.102,0.094,0.109,0,0,1 0.67,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00085,0.02,0.14,0.136,0.103,0,0,1 0.64,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0.0014,0.0201,0.099,0.092,0.108,0,0,1 0.22,0,0,0,0,0,0,0,0,0,1,0,0,1,0,0,0.0015,0.02,0.11329,0.096,0.107,0,0,1 0.53,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0064,0.003,0.013,0.094,0.014,1,0,0 0.7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4e-05,0.0201,0.046,0.058,0.079,0,0,1 0.53,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00024,0.0201,0.153,0.095,0.161,0,0,1 0.59,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0014,0.024,0.131,0.108,0.121,0,0,1 0.59,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0033,0.01,0.092,0.09,0.102,0,0,1 0.74,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0027,0.017,0.115,0.111,0.104,0,0,1 0.26,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0011,0.0201,0.096,0.089,0.108,0,0,1 0.34,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0.0044,0.0201,0.127,0.135,0.094,0,0,1 0.24,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.011,0.019,0.092,0.084,0.11,0,1,0 0.6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00013,0.013,0.146,0.091,0.16,0,0,1 0.66,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4e-05,0.049,0.249,0.086,0.29,0,0,1 0.3,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.00079,0.018,0.081,0.086,0.094,0,0,1 0.8,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0061,0.017,0.116,0.094,0.123,0,1,0 0.33,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0018,0.0201,0.166,0.177,0.094,0,0,1 0.49,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.0201,0.102,0.094,0.109,0,0,1 0.39,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4e-05,0.01,0.061,0.077,0.079,0,0,1 0.48,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0019,0.01,0.06,0.074,0.081,0,0,1 0.37,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00094,0.019,0.099,0.086,0.115,0,0,1 0.37,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3e-05,0.018,0.161,0.081,0.199,0,0,1 0.22,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00091,0.0201,0.134,0.091,0.147,0,0,1 0.93,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00098,0.0201,0.088,0.066,0.133,0,0,1 0.76,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0018,0.016,0.123,0.096,0.128,0,0,1 0.7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0018,0.011,0.091,0.079,0.115,0,0,1 0.44,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0023,0.019,0.074,0.079,0.094,0,0,1 0.31,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0018,0.024,0.107,0.083,0.129,0,0,1 0.71,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.0201,0.11329,0.096,0.11776,0,0,1 0.71,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0023,0.025,0.16,0.116,0.136,0,0,1 0.71,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.00232,0.0201,0.11329,0.096,0.11776,0,0,1 0.46,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0032,0.011,0.123,0.094,0.131,0,0,1 0.81,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00028,0.013,0.091,0.102,0.089,0,0,1 0.77,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.002,0.0201,0.097,0.079,0.123,0,0,1 0.66,0,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0.002,0.018,0.115,0.097,0.119,0,0,1 0.56,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0014,0.0201,0.111,0.093,0.119,0,0,1 0.5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.0201,0.11329,0.096,0.11776,0,0,1 0.7,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,3e-05,0.0208,0.204,0.093,0.219,0,0,1 0.1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.002,0.0201,0.098,0.099,0.099,0,0,1 0.24,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0018,0.0201,0.164,0.134,0.122,0,0,1 0.49,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0034,0.0201,0.095,0.087,0.109,0,0,1 0.31,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0019,0.0201,0.108,0.094,0.115,0,0,1 0.31,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0029,0.0201,0.113,0.076,0.149,0,0,1 0.14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0015,0.0201,0.104,0.098,0.106,0,0,1 0.66,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0026,0.0201,0.085,0.07,0.121,0,0,1 0.46,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0022,0.013,0.114,0.084,0.136,0,0,1 0.71,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.011,0.013,0.081,0.091,0.089,0,1,0 0.43,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.03,0.13,0.084,0.155,0,0,1 0.3,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0014,0.0201,0.143,0.097,0.147,0,0,1 0.3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.015,0.056,0.278,0.078,0.356,0,0,1 0.6,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0201,0.155,0.078,0.199,0,0,1 0.62,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0.00066,0.0208,0.15,0.119,0.126,0,0,1 0.25,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0.001,0.032,0.117,0.111,0.105,0,0,1 0.47,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.0201,0.11329,0.096,0.11776,0,0,1 0.33,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0008,0.023,0.114,0.104,0.11,0,0,1 0.58,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00029,0.0201,0.131,0.099,0.132,0,0,1 0.58,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0013,0.027,0.116,0.113,0.103,0,0,1 0.24,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.0201,0.077,0.095,0.081,0,0,1 0.24,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0033,0.0201,0.146,0.178,0.082,0,0,1 0.37,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0019,0.0201,0.118,0.128,0.092,0,0,1 0.23,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0036,0.0201,0.14,0.15,0.093,0,0,1 0.26,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0018,0.0201,0.122,0.103,0.119,0,0,1 0.45,1,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0.0035,0.0201,0.107,0.098,0.109,0,0,1 0.53,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.002,0.015,0.089,0.103,0.086,0,0,1 0.7,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0078,0.019,0.052,0.077,0.068,0,0,1 0.49,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,2e-05,0.037,0.178,0.096,0.18503,0,0,1 0.88,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0015,0.008,0.11329,0.096,0.11776,0,0,1 0.33,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0017,0.0201,0.103,0.115,0.09,0,0,1 0.74,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0035,0.0201,0.062,0.078,0.08,0,0,1 0.69,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00034,0.009,0.15,0.09,0.167,0,0,1 0.7,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00027,0.007,0.117,0.084,0.139,0,0,1 0.59,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.047,0.0096,0.019,0.094,0.02,1,0,0 0.67,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0026,0.0201,0.075,0.057,0.132,0,0,1 0.83,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0022,0.015,0.115,0.084,0.137,0,0,1 0.83,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.00083,0.0201,0.125,0.094,0.133,0,0,1 0.49,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0.00027,0.018,0.098,0.108,0.091,0,0,1 0.49,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0.0025,0.025,0.1,0.103,0.097,0,0,1 0.82,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0015,0.0201,0.086,0.08,0.108,0,0,1 0.59,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.002,0.0201,0.117,0.11,0.106,0,0,1 0.43,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00046,0.0201,0.126,0.082,0.154,0,0,1 0.69,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,3e-05,0.014,0.158,0.096,0.16424,0,0,1 0.4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0012,0.0201,0.157,0.152,0.103,0,0,1 0.24,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0.0012,0.0201,0.114,0.111,0.103,0,0,1 0.54,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.04,0.019,0.048,0.079,0.061,1,0,0 0.81,0,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0.0039,0.014,0.102,0.102,0.1,0,0,1 0.86,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.002,0.0201,0.127,0.108,0.118,0,0,1 0.49,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0018,0.0201,0.08,0.066,0.121,0,0,1 0.65,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3e-05,0.0201,0.148,0.103,0.144,0,0,1 0.65,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0019,0.0201,0.086,0.086,0.1,0,0,1 0.65,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0019,0.0201,0.114,0.09,0.127,0,0,1 0.41,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.017,0.083,0.079,0.105,0,0,1 0.32,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.177,0.0201,0.072,0.1,0.072,0,0,1 0.42,0,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0.00232,0.019,0.11329,0.096,0.11776,0,0,1 0.59,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0.00083,0.023,0.119,0.111,0.107,0,0,1 0.5,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,5e-05,0.0201,0.129,0.104,0.124,0,0,1 0.63,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0026,0.025,0.109,0.107,0.102,0,0,1 0.54,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0014,0.0201,0.102,0.11,0.093,0,0,1 0.28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0018,0.017,0.12,0.091,0.132,0,0,1 0.78,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0028,0.0201,0.068,0.1,0.068,0,0,1 0.76,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00083,0.0208,0.134,0.107,0.125,0,0,1 0.42,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0027,0.017,0.167,0.096,0.17359,0,0,1 0.46,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00083,0.0201,0.08,0.088,0.091,0,0,1 0.75,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0017,0.016,0.112,0.114,0.098,0,0,1 0.75,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0032,0.016,0.08,0.091,0.088,0,0,1 0.75,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0013,0.0201,0.118,0.091,0.13,0,0,1 0.49,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0035,0.012,0.144,0.088,0.164,0,0,1 0.65,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3e-05,0.024,0.152,0.07,0.217,0,0,1 0.67,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00809,0.017,0.11,0.091,0.121,0,1,0 0.79,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0011,0.0201,0.137,0.104,0.131,0,0,1 0.35,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.0201,0.226,0.093,0.243,0,0,1 0.67,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.001,0.022,0.204,0.146,0.14,0,0,1 0.6,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.00081,0.016,0.099,0.096,0.10291,0,0,1 0.69,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,2e-05,0.0201,0.102,0.077,0.133,0,0,1 0.69,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0031,0.0201,0.097,0.102,0.095,0,0,1 0.78,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0024,0.0201,0.108,0.109,0.099,0,0,1 0.73,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2e-05,0.0201,0.143,0.1,0.143,0,0,1 0.85,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.013,0.017,0.12,0.088,0.136,0,1,0 0.74,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0011,0.017,0.102,0.098,0.104,0,0,1 0.65,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.014,0.0201,0.057,0.083,0.069,0,0,1 0.82,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0.00469,0.014,0.098,0.091,0.108,0,0,1 0.68,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.0008,0.018,0.069,0.085,0.082,0,0,1 0.47,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0005,0.013,0.092,0.078,0.118,0,0,1 0.68,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0019,0.0201,0.14,0.096,0.146,0,0,1 0.76,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0065,0.012,0.169,0.099,0.171,0,0,1 0.82,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0016,0.0083,0.104,0.077,0.135,0,0,1 0.67,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0076,0.017,0.107,0.102,0.105,0,1,0 0.67,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.01,0.0201,0.155,0.15,0.103,0,0,1 0.67,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00208,0.023,0.138,0.124,0.111,0,0,1 0.65,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2e-05,0.0201,0.161,0.102,0.158,0,0,1 0.48,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0019,0.0069,0.115,0.08,0.144,0,0,1 0.56,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,2e-05,0.019,0.098,0.065,0.151,0,0,1 0.43,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.014,0.066,0.078,0.085,0,0,1 0.55,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0018,0.0093,0.089,0.066,0.135,0,0,1 0.74,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0013,0.0201,0.086,0.101,0.085,0,0,1 0.53,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0016,0.0201,0.097,0.104,0.093,0,0,1 0.62,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0004,0.025,0.129,0.104,0.124,0,0,1 0.44,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0018,0.0208,0.163,0.12,0.136,0,0,1 0.46,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0026,0.0201,0.101,0.104,0.097,0,0,1 0.45,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0011,0.0201,0.106,0.096,0.11,0,0,1 0.67,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0.0025,0.0201,0.103,0.074,0.139,0,0,1 0.54,1,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0.0032,0.022,0.107,0.1,0.107,0,0,1 0.62,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.001,0.013,0.079,0.076,0.104,0,0,1 0.58,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0011,0.0201,0.065,0.096,0.068,0,0,1 0.6,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0034,0.012,0.106,0.11,0.096,0,0,1 0.38,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,2e-05,0.0208,0.144,0.104,0.136,0,0,1 0.18,0,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0.0013,0.033,0.163,0.153,0.107,0,0,1 0.63,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,2e-05,0.016,0.183,0.103,0.178,0,0,1 0.57,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0028,0.0201,0.11,0.094,0.117,0,0,1 0.3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.00025,0.0201,0.102,0.076,0.134,0,0,1 0.52,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0022,0.019,0.088,0.104,0.085,0,0,1 0.64,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0013,0.0201,0.115,0.07,0.164,0,0,1 0.25,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.0201,0.083,0.083,0.1,0,0,1 0.75,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00069,0.0201,0.117,0.1,0.117,0,0,1 0.28,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,2e-05,0.0201,0.106,0.123,0.086,0,0,1 0.5,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.0005,0.008,0.123,0.116,0.105,0,0,1 0.23,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00041,0.0201,0.197,0.176,0.112,0,0,1 0.59,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0.00058,0.019,0.143,0.096,0.14864,0,0,1 0.34,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0.0025,0.023,0.113,0.104,0.107,0,0,1 0.67,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00077,0.0201,0.135,0.116,0.116,0,0,1 0.67,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0013,0.0201,0.131,0.097,0.135,0,0,1 0.33,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0066,0.017,0.114,0.093,0.123,0,1,0 0.66,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00829,0.017,0.094,0.095,0.099,0,1,0 0.35,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0017,0.0201,0.09,0.084,0.107,0,0,1 0.35,1,1,0,0,0,0,0,0,0,0,1,0,0,0,1,0.0017,0.0201,0.083,0.112,0.074,0,0,1 0.17,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.001,0.015,0.103,0.088,0.117,0,0,1 0.59,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0017,0.0208,0.142,0.128,0.111,0,0,1 0.39,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00208,0.011,0.084,0.098,0.086,0,0,1 0.54,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.0201,0.085,0.089,0.096,0,0,1 0.27,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1e-05,0.032,0.163,0.11,0.148,0,0,1 0.27,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00052,0.023,0.102,0.094,0.109,0,0,1 0.27,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0013,0.018,0.16,0.112,0.143,0,0,1 0.39,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1e-05,0.0201,0.098,0.075,0.131,0,0,1 0.48,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4e-05,0.015,0.081,0.101,0.08,0,0,1 0.17,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0008,0.0201,0.077,0.091,0.085,0,0,1 0.62,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0016,0.0201,0.074,0.073,0.101,0,0,1 0.63,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0054,0.0201,0.088,0.101,0.087,0,0,1 0.3,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0033,0.0201,0.176,0.157,0.112,0,0,1 0.46,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,6e-05,0.0201,0.161,0.099,0.163,0,0,1 0.15,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0015,0.0201,0.099,0.104,0.094,0,0,1 0.75,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.011,0.01,0.099,0.092,0.108,0,1,0 0.19,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00056,0.0201,0.082,0.058,0.139,0,0,1 0.48,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0011,0.0201,0.091,0.104,0.088,0,0,1 0.78,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0037,0.0201,0.114,0.092,0.124,0,0,1 0.42,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0017,0.0201,0.095,0.086,0.11,0,0,1 0.46,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0027,0.0201,0.091,0.096,0.095,0,0,1 0.37,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0019,0.0201,0.108,0.093,0.116,0,0,1 0.37,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0027,0.035,0.207,0.096,0.21517,0,0,1 0.71,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0024,0.025,0.093,0.107,0.087,0,0,1 0.56,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.065,0.0096,0.049,0.095,0.052,1,0,0 0.75,0,0,0,0,1,0,0,0,0,1,0,0,0,0,0,0.0011,0.017,0.103,0.077,0.134,0,0,1 0.64,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0.0013,0.0201,0.125,0.094,0.133,0,0,1 0.47,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0032,0.0201,0.119,0.104,0.114,0,0,1 0.74,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0018,0.023,0.114,0.11,0.104,0,0,1 0.83,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0035,0.018,0.131,0.122,0.107,0,0,1 0.69,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0013,0.025,0.147,0.089,0.165,0,0,1 0.63,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00088,0.0201,0.139,0.095,0.146,0,0,1 0.67,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0056,0.0201,0.085,0.094,0.09,0,0,1 0.6,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.007,0.017,0.096,0.093,0.103,0,1,0 0.75,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0015,0.005,0.053,0.125,0.042,0,0,1 0.44,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0026,0.014,0.088,0.079,0.111,0,0,1 0.71,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.0015,0.02,0.141,0.103,0.137,0,0,1 0.71,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00086,0.015,0.15,0.093,0.161,0,0,1 0.86,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0067,0.016,0.121,0.088,0.138,0,1,0 0.86,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0015,0.0208,0.064,0.07,0.091,0,0,1 0.24,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0027,0.024,0.139,0.098,0.142,0,0,1 0.48,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0.0029,0.028,0.122,0.119,0.103,0,0,1 0.65,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.023,0.017,0.078,0.092,0.085,0,1,0 0.77,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0011,0.0208,0.084,0.09,0.093,0,0,1 0.77,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0025,0.004,0.099,0.08,0.124,0,0,1 0.5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0018,0.024,0.116,0.097,0.12,0,0,1 0.68,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,6e-05,0.024,0.152,0.116,0.131,0,0,1 0.7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0031,0.0201,0.124,0.11,0.113,0,0,1 0.64,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0.0018,0.0208,0.097,0.104,0.093,0,0,1 0.7,0,1,0,0,0,0,0,1,1,0,0,0,0,0,0,0.004,0.018,0.109,0.082,0.133,0,0,1 0.67,0,0,0,1,0,0,0,1,0,1,0,0,0,0,0,3e-05,0.024,0.137,0.085,0.161,0,0,1 0.74,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00037,0.0201,0.131,0.084,0.156,0,0,1 0.24,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00092,0.025,0.099,0.097,0.102,0,0,1 0.69,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0023,0.018,0.084,0.124,0.068,0,0,1 0.42,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0022,0.017,0.105,0.091,0.115,0,0,1 0.5,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00017,0.0201,0.152,0.13,0.117,0,0,1 0.45,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.00232,0.0201,0.131,0.104,0.126,0,0,1 0.72,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00056,0.0201,0.094,0.061,0.154,0,0,1 0.69,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.00095,0.022,0.134,0.09,0.149,0,0,1 0.51,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0.0014,0.0201,0.148,0.107,0.138,0,0,1 0.45,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0041,0.0201,0.125,0.104,0.119,0,0,1 0.61,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.001,0.0201,0.173,0.162,0.107,0,0,1 0.72,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0032,0.0201,0.097,0.095,0.102,0,0,1 0.19,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0025,0.027,0.137,0.114,0.12,0,0,1 0.55,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.0201,0.091,0.104,0.087,0,0,1 0.22,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0.0019,0.0201,0.182,0.195,0.093,0,0,1 0.02,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0041,0.0201,0.155,0.104,0.149,0,0,1 0.57,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.0201,0.16,0.104,0.152,0,0,1 0.42,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.00232,0.023,0.11329,0.096,0.11776,0,0,1 0.42,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0201,0.178,0.071,0.251,0,0,1 0.42,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.0035,0.015,0.083,0.092,0.09,0,0,1 0.26,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0.198,0.0096,0.005,0.095,0.0055,1,0,0 0.26,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0.00232,0.018,0.11329,0.096,0.11776,0,0,1 0.81,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.0082,0.009,0.128,0.091,0.141,0,1,0 0.64,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,9e-05,0.023,0.114,0.093,0.123,0,0,1 0.2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0014,0.0201,0.121,0.129,0.094,0,0,1 0.32,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.0201,0.11329,0.096,0.11776,0,0,1 0.49,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.006,0.026,0.128,0.125,0.102,0,0,1 0.65,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.001,0.014,0.129,0.1,0.129,0,0,1 0.65,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.001,0.0201,0.101,0.083,0.122,0,0,1 0.49,0,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0.00232,0.039,0.24,0.125,0.192,0,0,1 0.83,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,9e-05,0.0201,0.102,0.093,0.11,0,0,1 0.83,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0017,0.0201,0.123,0.097,0.127,0,0,1 0.64,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.038,0.012,0.06,0.103,0.058,1,0,0 0.84,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.0028,0.002,0.094,0.081,0.116,0,0,1 0.79,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.0048,0.007,0.077,0.096,0.08004,0,0,1 0.79,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0012,0.0201,0.14,0.116,0.12,0,0,1 0.21,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0095,0.0208,0.14,0.149,0.094,0,1,0 0.21,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0014,0.016,0.063,0.085,0.074,0,0,1 0.49,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.0201,0.118,0.088,0.134,0,0,1 0.54,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,9e-05,0.0201,0.134,0.099,0.135,0,0,1 0.6,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,9e-05,0.019,0.142,0.098,0.145,0,0,1 0.6,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0019,0.0201,0.121,0.103,0.117,0,0,1 0.74,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0055,0.0201,0.126,0.116,0.109,0,0,1 0.5,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0035,0.0208,0.177,0.1,0.177,0,0,1 0.48,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0043,0.018,0.072,0.09,0.08,0,0,1 0.48,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.0201,0.207,0.166,0.125,0,0,1 0.48,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0024,0.0201,0.111,0.099,0.112,0,0,1 0.41,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0012,0.0201,0.11,0.1,0.11,0,0,1 0.41,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.001,0.0201,0.078,0.075,0.104,0,0,1 0.47,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0014,0.014,0.096,0.087,0.11,0,0,1 0.25,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.00208,0.0201,0.117,0.088,0.133,0,0,1 0.67,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0024,0.0201,0.127,0.109,0.117,0,0,1 0.28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0013,0.0201,0.075,0.068,0.11,0,0,1 0.3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00047,0.0201,0.185,0.17,0.109,0,0,1 0.45,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.0201,0.11329,0.096,0.11776,0,0,1 0.61,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.032,0.198,0.155,0.128,0,0,1 0.18,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00033,0.0201,0.154,0.086,0.179,0,0,1 0.41,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0073,0.017,0.127,0.09,0.141,0,1,0 0.54,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0016,0.0201,0.096,0.086,0.112,0,0,1 0.48,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0015,0.0201,0.081,0.074,0.109,0,0,1 0.69,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.024,0.017,0.075,0.084,0.089,0,1,0 0.62,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.00208,0.0201,0.137,0.089,0.154,0,0,1 0.62,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0.0033,0.0201,0.149,0.133,0.112,0,0,1 0.46,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0018,0.0201,0.116,0.094,0.123,0,0,1 0.75,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.014,0.102,0.084,0.121,0,0,1 0.41,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0014,0.0201,0.101,0.101,0.1,0,0,1 0.58,0,0,0,0,1,0,0,0,0,1,0,0,0,0,0,0.001,0.018,0.112,0.072,0.156,0,0,1 0.6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0031,0.018,0.095,0.091,0.104,0,0,1 0.72,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0013,0.0201,0.096,0.066,0.145,0,0,1 0.51,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00031,0.036,0.175,0.099,0.177,0,0,1 0.59,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0026,0.0201,0.093,0.08,0.116,0,0,1 0.76,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0.00035,0.014,0.139,0.092,0.151,0,0,1 0.64,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00049,0.016,0.097,0.079,0.123,0,0,1 0.36,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0015,0.0201,0.131,0.101,0.13,0,0,1 0.67,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0012,0.012,0.123,0.09,0.137,0,0,1 0.56,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,2e-05,0.015,0.114,0.07,0.163,0,0,1 0.7,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0018,0.015,0.059,0.079,0.075,0,0,1 0.61,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0048,0.0201,0.093,0.085,0.109,0,0,1 0.2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0019,0.0201,0.173,0.132,0.131,0,0,1 0.72,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0009,0.0201,0.153,0.099,0.155,0,0,1 0.69,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0017,0.02,0.154,0.131,0.118,0,0,1 0.69,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00208,0.0201,0.102,0.066,0.155,0,0,1 0.69,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0018,0.02,0.078,0.096,0.08108,0,0,1 0.53,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2e-05,0.025,0.134,0.088,0.152,0,0,1 0.68,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0023,0.014,0.085,0.087,0.098,0,0,1 0.42,1,1,0,0,0,0,0,0,1,0,1,0,0,0,0,0.00082,0.0208,0.121,0.077,0.157,0,0,1 0.56,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.0201,0.113,0.093,0.122,0,0,1 0.59,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.073,0.0096,0.043,0.085,0.051,1,0,0 0.52,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.0201,0.105,0.083,0.127,0,0,1 0.21,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,2e-05,0.023,0.119,0.079,0.151,0,0,1 0.43,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.0201,0.11329,0.096,0.11776,0,0,1 0.6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.003,0.0201,0.1,0.076,0.132,0,0,1 0.68,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0017,0.0201,0.095,0.083,0.114,0,0,1 0.67,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0012,0.0201,0.1,0.087,0.115,0,0,1 0.58,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0053,0.0201,0.12,0.095,0.126,0,0,1 0.66,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00038,0.0201,0.06,0.072,0.083,0,0,1 0.66,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.002,0.0201,0.089,0.095,0.094,0,0,1 0.62,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,2e-05,0.0201,0.149,0.098,0.152,0,0,1 0.85,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0011,0.0201,0.084,0.08,0.105,0,0,1 0.57,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.002,0.02,0.152,0.095,0.16,0,0,1 0.76,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00048,0.008,0.062,0.076,0.082,0,0,1 0.33,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1e-05,0.024,0.139,0.096,0.145,0,0,1 0.45,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.00232,0.0201,0.11329,0.096,0.11776,0,0,1 0.4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5e-05,0.015,0.135,0.104,0.127,0,0,1 0.23,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0027,0.0201,0.107,0.113,0.095,0,0,1 0.8,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0054,0.0201,0.104,0.068,0.153,0,0,1 0.53,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3e-05,0.022,0.132,0.088,0.15,0,0,1 0.74,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0018,0.0201,0.104,0.101,0.103,0,0,1 0.3,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0.031,0.013,0.066,0.088,0.075,0,0,1 0.67,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0031,0.015,0.108,0.094,0.115,0,0,1 0.56,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0017,0.006,0.121,0.119,0.102,0,0,1 0.56,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0024,0.0201,0.096,0.087,0.11,0,0,1 0.7,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0036,0.022,0.094,0.081,0.116,0,0,1 0.74,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.003,0.013,0.096,0.101,0.095,0,0,1 0.67,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7e-05,0.0201,0.138,0.099,0.139,0,0,1 0.64,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0041,0.0201,0.08,0.093,0.086,0,0,1 0.64,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0012,0.0201,0.138,0.093,0.148,0,0,1 0.64,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0019,0.0201,0.1,0.096,0.10395,0,0,1 0.57,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0023,0.001,0.056,0.078,0.072,0,0,1 0.27,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0036,0.017,0.115,0.11,0.105,0,0,1 0.24,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,3e-05,0.023,0.042,0.092,0.046,0,0,1 0.27,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.0201,0.106,0.079,0.134,0,0,1 0.6,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,1e-05,0.023,0.116,0.094,0.123,0,0,1 0.62,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0075,0.017,0.081,0.103,0.079,0,1,0 0.51,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,6e-05,0.01,0.146,0.098,0.149,0,0,1 0.82,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0044,0.0201,0.105,0.092,0.114,0,0,1 0.75,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0.0015,0.012,0.091,0.097,0.094,0,0,1 0.71,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00083,0.0201,0.104,0.087,0.12,0,0,1 0.57,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.0028,0.0201,0.116,0.12,0.097,0,0,1 0.7,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0011,0.0201,0.067,0.083,0.081,0,0,1 0.59,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.0201,0.07,0.077,0.091,0,0,1 0.59,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00067,0.0201,0.118,0.122,0.097,0,0,1 0.91,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00101,0.0201,0.104,0.089,0.117,0,0,1 0.62,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.00064,0.014,0.13,0.091,0.143,0,0,1 0.76,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0046,0.0201,0.065,0.091,0.071,0,0,1 0.49,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0004,0.0201,0.148,0.098,0.151,0,0,1 0.63,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.022,0.017,0.088,0.11,0.08,0,1,0 0.64,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0.00084,0.0201,0.121,0.089,0.136,0,0,1 0.64,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00208,0.0201,0.08,0.088,0.091,0,0,1 0.64,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00091,0.0201,0.133,0.104,0.125,0,0,1 0.64,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0023,0.0201,0.13,0.087,0.149,0,0,1 0.64,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0022,0.0201,0.111,0.097,0.114,0,0,1 0.51,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0016,0.019,0.096,0.1,0.096,0,0,1 0.44,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00049,0.018,0.109,0.082,0.133,0,0,1 0.18,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0016,0.0208,0.063,0.078,0.081,0,0,1 0.18,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0.0011,0.0201,0.071,0.061,0.116,0,0,1 0.78,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.0016,0.01,0.086,0.079,0.109,0,0,1 0.91,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0031,0.012,0.084,0.091,0.092,0,0,1 0.45,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0.0013,0.013,0.073,0.104,0.07,0,0,1 0.39,0,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0.00083,0.0201,0.09,0.102,0.088,0,0,1 0.36,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.004,0.0201,0.08,0.082,0.098,0,0,1 0.37,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0023,0.0201,0.126,0.116,0.107,0,0,1 0.39,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0.0011,0.018,0.077,0.062,0.124,0,0,1 0.62,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0.00018,0.002,0.094,0.079,0.119,0,0,1 0.69,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0024,0.0201,0.139,0.091,0.153,0,0,1 0.36,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0013,0.02,0.123,0.096,0.12785,0,0,1 0.51,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0041,0.0201,0.093,0.098,0.095,0,0,1 0.54,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,6e-05,0.014,0.108,0.097,0.111,0,0,1 0.22,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0.0019,0.0201,0.155,0.097,0.16,0,0,1 0.59,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.013,0.017,0.064,0.097,0.066,0,1,0 0.59,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0011,0.0201,0.104,0.093,0.112,0,0,1 0.62,0,0,0,0,1,0,0,0,0,1,0,0,0,0,0,0.0011,0.014,0.103,0.089,0.115,0,0,1 0.62,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00208,0.0201,0.09,0.087,0.103,0,0,1 0.75,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00419,0.0201,0.081,0.086,0.094,0,0,1 0.75,0,0,0,0,1,0,0,0,0,1,0,0,0,0,0,0.0007,0.0201,0.146,0.096,0.152,0,0,1 0.5,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.0201,0.108,0.088,0.123,0,0,1 0.42,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0039,0.0201,0.102,0.101,0.101,0,0,1 0.62,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00094,0.0201,0.094,0.095,0.099,0,0,1 0.46,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0014,0.0201,0.147,0.109,0.135,0,0,1 0.61,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0.00232,0.009,0.063,0.065,0.097,0,0,1 0.3,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00099,0.0201,0.111,0.076,0.146,0,0,1 0.56,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0048,0.0201,0.083,0.084,0.099,0,0,1 0.34,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0.0079,0.0201,0.088,0.083,0.106,0,0,1 0.63,1,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0.013,0.096,0.092,0.104,0,0,1 0.6,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0.017,0.158,0.104,0.152,0,0,1 0.2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0016,0.0201,0.133,0.1,0.133,0,0,1 0.24,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00037,0.0201,0.149,0.084,0.177,0,0,1 0.6,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1e-05,0.018,0.13,0.086,0.151,0,0,1 0.58,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0032,0.015,0.073,0.082,0.089,0,0,1 0.62,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0023,0.0201,0.091,0.083,0.11,0,0,1 0.7,1,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0029,0.011,0.094,0.062,0.152,0,0,1 0.33,0,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0.0011,0.016,0.104,0.079,0.132,0,0,1 0.71,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.003,0.012,0.092,0.069,0.133,0,0,1 0.63,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.0029,0.0201,0.082,0.067,0.122,0,0,1 0.47,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.00075,0.01,0.141,0.095,0.148,0,0,1 0.76,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00064,0.014,0.101,0.084,0.12,0,0,1 0.54,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0027,0.0201,0.139,0.108,0.129,0,0,1 0.54,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0034,0.0201,0.072,0.096,0.07483,0,0,1 0.72,0,1,0,0,0,0,0,0,0,1,0,0,0,0,0,4e-05,0.032,0.239,0.083,0.288,0,0,1 0.72,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0026,0.05,0.16,0.087,0.184,0,0,1 0.7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.001,0.019,0.1,0.092,0.109,0,0,1 0.21,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00208,0.0201,0.129,0.096,0.13409,0,0,1 0.61,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0022,0.0201,0.094,0.086,0.109,0,0,1 0.75,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0011,0.0201,0.094,0.088,0.107,0,0,1 0.81,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,2e-05,0.022,0.153,0.109,0.14,0,0,1 0.72,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.0137,0.017,0.067,0.075,0.089,0,1,0 0.64,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0015,0.015,0.089,0.097,0.092,0,0,1 0.64,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0.00056,0.018,0.09,0.079,0.114,0,0,1 0.83,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00084,0.0201,0.102,0.085,0.12,0,0,1 0.7,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00057,0.0208,0.101,0.078,0.129,0,0,1 0.7,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00086,0.0201,0.097,0.101,0.096,0,0,1 0.31,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.02,0.025,0.058,0.107,0.054,1,0,0 0.21,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0.0011,0.023,0.06,0.098,0.061,0,0,1 0.25,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00067,0.0201,0.153,0.107,0.143,0,0,1 0.79,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0027,0.0201,0.083,0.084,0.099,0,0,1 0.63,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.002,0.0201,0.087,0.073,0.119,0,0,1 0.78,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00021,0.0201,0.214,0.123,0.174,0,0,1 0.66,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0045,0.0201,0.115,0.125,0.092,0,0,1 0.85,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00065,0.0208,0.16,0.098,0.163,0,0,1 0.8,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.018,0.02,0.076,0.108,0.07,0,1,0 0.43,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.00232,0.019,0.092,0.076,0.121,0,0,1 0.55,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00082,0.0201,0.08,0.086,0.093,0,0,1 0.72,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.001,0.0201,0.096,0.09,0.107,0,0,1 0.72,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00069,0.0201,0.108,0.087,0.124,0,0,1 0.72,1,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0.00041,0.019,0.108,0.072,0.15,0,0,1 0.19,0,0,0,0,0,0,0,0,1,1,0,0,0,0,0,2e-05,0.0201,0.171,0.097,0.176,0,0,1 0.77,0,0,0,0,0,0,0,0,0,1,0,0,1,0,0,0.026,0.03,0.061,0.088,0.069,0,0,1 0.37,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2e-05,0.02,0.126,0.085,0.148,0,0,1 0.69,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0012,0.0201,0.064,0.052,0.121,0,0,1 0.57,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2e-05,0.0208,0.097,0.072,0.135,0,0,1 0.59,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0022,0.013,0.078,0.076,0.103,0,0,1 0.67,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0011,0.015,0.087,0.089,0.098,0,0,1 0.67,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.0208,0.11329,0.096,0.11776,0,0,1 0.44,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0014,0.029,0.1,0.097,0.103,0,0,1 0.59,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00096,0.0201,0.097,0.094,0.103,0,0,1 0.37,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.0028,0.0201,0.044,0.064,0.069,0,0,1 0.35,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0011,0.0201,0.09,0.077,0.117,0,0,1 0.31,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,5e-05,0.0201,0.117,0.078,0.15,0,0,1 0.31,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.049,0.0096,0.087,0.135,0.064,1,0,0 0.4,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00064,0.02,0.057,0.068,0.084,0,0,1 0.54,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.022,0.015,0.052,0.084,0.062,1,0,0 0.54,1,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0035,0.017,0.113,0.096,0.11746,0,0,1 0.39,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.0015,0.016,0.066,0.115,0.057,0,0,1 0.3,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0.0033,0.0201,0.087,0.116,0.074,0,0,1 0.76,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0.0015,0.0201,0.101,0.082,0.123,0,0,1 0.42,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0.0023,0.023,0.093,0.127,0.073,0,0,1 0.74,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.002,0.0201,0.088,0.077,0.114,0,0,1 0.74,0,1,0,0,0,0,0,0,1,0,1,0,0,0,0,0.0011,0.019,0.106,0.116,0.091,0,0,1 0.13,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.004,0.0201,0.102,0.08,0.128,0,0,1 0.51,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.004,0.017,0.07,0.09,0.078,0,0,1 0.05,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,2e-05,0.0201,0.206,0.141,0.146,0,0,1 0.59,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.0201,0.11329,0.096,0.11776,0,0,1 0.29,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,2e-05,0.057,0.172,0.086,0.2,0,0,1 0.79,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0013,0.017,0.102,0.09,0.113,0,0,1 0.25,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0019,0.0201,0.091,0.124,0.073,0,0,1 0.54,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0.0018,0.023,0.077,0.09,0.086,0,0,1 0.61,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,4e-05,0.011,0.082,0.094,0.087,0,0,1 0.61,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.001,0.016,0.13,0.1,0.13,0,0,1 0.4,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00419,0.017,0.093,0.091,0.102,0,0,1 0.76,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0.0014,0.0208,0.08,0.113,0.071,0,0,1 0.53,1,0,0,0,0,0,0,1,0,1,0,0,0,0,0,0.00072,0.016,0.065,0.065,0.1,0,0,1 0.28,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2e-05,0.059,0.183,0.082,0.223,0,0,1 0.43,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,2e-05,0.029,0.13,0.08,0.163,0,0,1 0.55,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0018,0.0201,0.064,0.066,0.097,0,0,1 0.45,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0013,0.0201,0.082,0.067,0.122,0,0,1 0.31,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0012,0.0201,0.122,0.084,0.145,0,0,1 0.62,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00052,0.0201,0.118,0.09,0.131,0,0,1 0.62,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0011,0.0201,0.086,0.096,0.08939,0,0,1 0.23,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.00232,0.0201,0.059,0.077,0.077,0,0,1 0.52,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0024,0.0201,0.121,0.088,0.138,0,0,1 0.2,1,0,0,0,0,0,0,0,1,0,1,0,0,0,0,0.0004,0.023,0.132,0.083,0.159,0,0,1 0.2,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.00054,0.0201,0.075,0.071,0.106,0,0,1 0.66,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,2e-05,0.017,0.15,0.087,0.172,0,0,1 0.43,0,1,0,0,0,0,0,0,0,1,0,0,0,0,0,2e-05,0.02,0.183,0.104,0.174,0,0,1 0.78,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0016,0.009,0.102,0.071,0.144,0,0,1 0.68,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00939,0.0096,0.034,0.063,0.054,1,0,0 0.63,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0018,0.017,0.09,0.079,0.114,0,0,1 0.27,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0015,0.0201,0.064,0.089,0.072,0,0,1 0.3,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0.0015,0.017,0.106,0.092,0.115,0,0,1 0.13,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0014,0.0201,0.082,0.095,0.086,0,0,1 0.35,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,2e-05,0.0201,0.12,0.08,0.15,0,0,1 0.44,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,2e-05,0.0201,0.154,0.104,0.148,0,0,1 0.47,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0028,0.015,0.091,0.092,0.099,0,0,1 0.3,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.0026,0.022,0.14,0.133,0.105,0,0,1 0.14,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,2e-05,0.025,0.119,0.073,0.163,0,0,1 0.17,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00098,0.016,0.138,0.084,0.164,0,0,1 0.79,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.0029,0.008,0.074,0.072,0.103,0,0,1 0.79,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.004,0.0201,0.102,0.08,0.128,0,0,1 0.57,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0.01679,0.017,0.034,0.097,0.035,0,0,1 0.65,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.00015,0.024,0.137,0.099,0.138,0,0,1 0.55,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0013,0.0201,0.134,0.101,0.133,0,0,1 0.66,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0059,0.015,0.108,0.089,0.121,0,0,1 0.78,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.001,0.01,0.102,0.069,0.148,0,0,1 0.38,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0008,0.0201,0.091,0.104,0.088,0,0,1 0.38,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00035,0.0201,0.132,0.104,0.127,0,0,1 0.58,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.0201,0.077,0.089,0.087,0,0,1 0.28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0026,0.0201,0.115,0.097,0.119,0,0,1 0.48,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0.0029,0.0201,0.109,0.099,0.11,0,0,1 0.6,1,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0.0055,0.0201,0.105,0.081,0.13,0,0,1 0.6,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00012,0.0201,0.138,0.08,0.173,0,0,1 0.52,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.022,0.071,0.078,0.091,0,0,1 0.56,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.074,0.005,0.005,0.093,0.0055,1,0,0 0.25,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00072,0.0201,0.128,0.103,0.124,0,0,1 0.21,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0013,0.0201,0.153,0.156,0.098,0,0,1 0.26,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.001,0.0201,0.116,0.173,0.067,0,0,1 0.79,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0033,0.008,0.083,0.067,0.124,0,0,1 0.82,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.00093,0.011,0.077,0.096,0.08004,0,0,1 0.76,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.0013,0.036,0.204,0.179,0.114,0,0,1 0.47,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0015,0.0201,0.097,0.083,0.117,0,0,1 0.47,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00208,0.0201,0.097,0.066,0.147,0,0,1 0.09,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0017,0.032,0.125,0.085,0.147,0,0,1 0.66,1,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0.005,0.02,0.106,0.104,0.101,0,0,1 0.35,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.00047,0.014,0.042,0.073,0.058,0,0,1 0.33,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.00232,0.0201,0.11329,0.096,0.11776,0,0,1 0.42,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.0201,0.114,0.09,0.127,0,0,1 0.32,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00208,0.0201,0.113,0.104,0.109,0,0,1 0.45,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0011,0.02,0.068,0.093,0.073,0,0,1 0.27,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.00097,0.027,0.096,0.08,0.12,0,0,1 0.52,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3e-05,0.019,0.084,0.067,0.125,0,0,1 0.57,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0003,0.022,0.098,0.077,0.127,0,0,1 0.65,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0014,0.025,0.18,0.125,0.144,0,0,1 0.57,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00062,0.0201,0.165,0.099,0.167,0,0,1 0.57,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0034,0.0201,0.075,0.095,0.079,0,0,1 0.39,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0027,0.0201,0.1,0.11,0.091,0,0,1 0.85,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.032,0.0096,0.055,0.089,0.062,1,0,0 0.22,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0023,0.018,0.145,0.092,0.158,0,0,1 0.23,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0027,0.0201,0.064,0.08,0.08,0,0,1 0.52,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0015,0.0201,0.098,0.088,0.111,0,0,1 0.76,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00085,0.0201,0.094,0.052,0.177,0,0,1 0.36,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0012,0.0201,0.117,0.109,0.107,0,0,1 0.6,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0.003,0.016,0.104,0.098,0.106,0,0,1 0.3,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0043,0.02,0.154,0.1,0.154,0,0,1 0.37,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0.0017,0.018,0.072,0.069,0.104,0,0,1 0.35,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0019,0.0201,0.13,0.103,0.126,0,0,1 0.53,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0016,0.0201,0.12,0.096,0.125,0,0,1 0.39,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0013,0.0201,0.059,0.08,0.074,0,0,1 0.66,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.053,0.016,0.113,0.129,0.088,0,1,0 0.69,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0016,0.008,0.044,0.115,0.038,0,0,1 0.69,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0018,0.024,0.132,0.121,0.109,0,0,1 0.76,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0025,0.0201,0.067,0.099,0.068,0,0,1 0.76,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0025,0.0201,0.139,0.107,0.13,0,0,1 0.56,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0023,0.0201,0.129,0.1,0.129,0,0,1 0.58,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0.0004,0.0201,0.077,0.081,0.095,0,0,1 0.37,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00089,0.0201,0.118,0.091,0.13,0,0,1 0.61,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0052,0.0201,0.065,0.107,0.061,0,0,1 0.59,1,0,0,0,1,0,0,0,0,0,1,0,0,0,1,0.00419,0.025,0.09,0.102,0.088,0,0,1 0.71,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0034,0.0208,0.087,0.089,0.098,0,0,1 0.32,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.002,0.0201,0.119,0.088,0.135,0,0,1 0.8,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0012,0.0201,0.057,0.078,0.073,0,0,1 0.8,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0017,0.0201,0.145,0.113,0.128,0,0,1 0.45,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0039,0.0201,0.08,0.075,0.107,0,0,1 0.7,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0013,0.0201,0.099,0.086,0.115,0,0,1 0.58,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0.00095,0.0201,0.088,0.085,0.104,0,0,1 0.55,0,0,0,0,0,0,0,0,0,0,1,0,0,0,1,0.0031,0.0201,0.064,0.09,0.071,0,0,1 0.49,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,3e-05,0.024,0.18,0.125,0.144,0,0,1 0.72,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0035,0.016,0.108,0.083,0.13,0,0,1 0.51,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.013,0.0208,0.082,0.104,0.077,0,0,1 0.59,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.03,0.011,0.045,0.091,0.049,1,0,0 0.71,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0018,0.0201,0.105,0.096,0.109,0,0,1 0.41,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0033,0.017,0.115,0.087,0.132,0,0,1 0.59,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.0201,0.043,0.081,0.053,0,0,1 0.61,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0.003,0.012,0.069,0.099,0.07,0,0,1 0.34,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,9e-05,0.0208,0.116,0.098,0.118,0,0,1 0.57,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.0208,0.093,0.084,0.111,0,0,1 0.39,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0028,0.0201,0.089,0.09,0.099,0,0,1 0.39,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.006,0.0201,0.135,0.096,0.141,0,0,1 0.21,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0012,0.0201,0.083,0.065,0.128,0,0,1 0.28,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0013,0.0201,0.08,0.077,0.104,0,0,1 0.27,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0036,0.0201,0.11,0.104,0.106,0,0,1 0.3,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00097,0.017,0.051,0.056,0.091,0,0,1 0.6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0028,0.0201,0.071,0.076,0.093,0,0,1 0.26,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0.00232,0.03,0.189,0.121,0.156,0,0,1 0.53,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0014,0.022,0.091,0.076,0.12,0,0,1 0.2,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,2e-05,0.0201,0.182,0.143,0.127,0,0,1 0.2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,8e-05,0.0201,0.179,0.098,0.183,0,0,1 0.37,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0.024,0.016,0.095,0.086,0.11,0,0,1 0.37,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0011,0.0201,0.083,0.075,0.111,0,0,1 0.54,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2e-05,0.0201,0.085,0.085,0.1,0,0,1 0.82,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0015,0.0201,0.116,0.092,0.126,0,0,1 0.78,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0063,0.0096,0.059,0.098,0.06,1,0,0 0.78,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00208,0.0201,0.137,0.096,0.143,0,0,1 0.78,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0013,0.0201,0.085,0.069,0.123,0,0,1 0.23,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0011,0.0201,0.087,0.097,0.09,0,0,1 0.15,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.0201,0.073,0.087,0.084,0,0,1 0.31,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0.00469,0.026,0.073,0.097,0.075,0,0,1 0.3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0013,0.0201,0.136,0.158,0.086,0,0,1 0.58,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.02,0.0096,0.039,0.1,0.039,1,0,0 0.19,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.5,0.005,0.005,0.119,0.004,1,0,0 0.28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00208,0.0201,0.085,0.073,0.116,0,0,1 0.6,0,0,0,0,0,0,0,0,1,1,0,0,0,0,0,2e-05,0.014,0.09,0.081,0.111,0,0,1 0.08,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00929,0.024,0.126,0.096,0.131,0,0,1 0.26,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0.00232,0.024,0.182,0.101,0.18,0,0,1 0.77,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0036,0.018,0.134,0.102,0.131,0,0,1 0.61,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00909,0.017,0.082,0.084,0.098,0,1,0 0.53,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2e-05,0.0201,0.11329,0.096,0.11776,0,0,1 0.66,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0067,0.0201,0.056,0.073,0.077,0,0,1 0.66,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0069,0.017,0.095,0.104,0.09,0,1,0 0.66,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0014,0.0201,0.066,0.093,0.071,0,0,1 0.56,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0022,0.022,0.09,0.084,0.107,0,0,1 0.56,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.0024,0.0201,0.067,0.071,0.094,0,0,1 0.76,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0034,0.0201,0.097,0.123,0.079,0,0,1 0.58,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0025,0.014,0.063,0.096,0.06548,0,0,1 0.65,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0052,0.0201,0.103,0.146,0.071,0,0,1 0.51,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0015,0.0201,0.095,0.077,0.123,0,0,1 0.51,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0.0027,0.02,0.115,0.09,0.128,0,0,1 0.65,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0072,0.017,0.081,0.09,0.09,0,1,0 0.34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0072,0.0201,0.234,0.07,0.334,0,0,1 0.25,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00469,0.0201,0.162,0.142,0.114,0,0,1 0.35,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0012,0.0201,0.093,0.076,0.122,0,0,1 0.28,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0001,0.0201,0.157,0.091,0.173,0,0,1 0.69,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.0013,0.0201,0.109,0.093,0.117,0,0,1 0.59,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.006,0.011,0.048,0.062,0.077,0,0,1 0.59,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0.0032,0.018,0.106,0.116,0.091,0,0,1 0.59,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00076,0.0201,0.121,0.111,0.109,0,0,1 0.41,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0023,0.0201,0.097,0.096,0.101,0,0,1 0.41,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0045,0.0201,0.097,0.085,0.114,0,0,1 0.51,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0017,0.0201,0.072,0.092,0.078,0,0,1 0.42,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0034,0.0201,0.09,0.093,0.097,0,0,1 0.26,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0.00056,0.0208,0.137,0.091,0.151,0,0,1 0.52,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0019,0.0201,0.104,0.08,0.13,0,0,1 0.25,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0024,0.02,0.106,0.085,0.125,0,0,1 0.84,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.00041,0.011,0.148,0.109,0.136,0,0,1 0.84,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.005,0.026,0.128,0.114,0.112,0,0,1 0.84,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.014,0.067,0.28,0.085,0.329,0,0,1 0.57,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,2e-05,0.0208,0.193,0.096,0.20062,0,0,1 0.72,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.00232,0.0201,0.084,0.094,0.089,0,0,1 0.65,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.0201,0.11329,0.096,0.11776,0,0,1 0.62,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00064,0.0201,0.144,0.115,0.125,0,0,1 0.62,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0007,0.0201,0.093,0.088,0.106,0,0,1 0.62,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0.00062,0.0201,0.154,0.175,0.088,0,0,1 0.8,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00054,0.02,0.112,0.111,0.101,0,0,1 0.52,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00959,0.012,0.126,0.09,0.14,0,1,0 0.52,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0026,0.0201,0.091,0.09,0.101,0,0,1 0.18,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00208,0.0201,0.143,0.096,0.14864,0,0,1 0.8,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.00055,0.004,0.09,0.071,0.127,0,0,1 0.74,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.00099,0.019,0.121,0.109,0.111,0,0,1 0.25,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0019,0.0201,0.17,0.15,0.113,0,0,1 0.38,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00055,0.022,0.145,0.101,0.144,0,0,1 0.66,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,2e-05,0.024,0.122,0.092,0.133,0,0,1 0.63,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0025,0.018,0.094,0.088,0.107,0,0,1 0.7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.002,0.02,0.099,0.11,0.09,0,0,1 0.62,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00094,0.018,0.083,0.082,0.101,0,0,1 0.6,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.0201,0.094,0.109,0.086,0,0,1 0.66,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0.0036,0.019,0.082,0.113,0.073,0,0,1 0.77,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0.039,0.012,0.048,0.114,0.042,0,0,1 0.35,0,1,0,0,0,0,0,0,1,0,0,1,0,0,0,0.0025,0.0201,0.144,0.152,0.095,0,0,1 0.71,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.023,0.018,0.087,0.096,0.091,0,1,0 0.31,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0013,0.016,0.124,0.096,0.129,0,0,1 0.3,1,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0.00232,0.0201,0.11329,0.096,0.11776,0,0,1 0.69,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.0022,0.0201,0.087,0.08,0.109,0,0,1 0.44,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.027,0.101,0.11,0.092,0,0,1 0.59,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00099,0.0201,0.07,0.067,0.104,0,0,1 0.47,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0031,0.0201,0.133,0.107,0.124,0,0,1 0.67,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0054,0.012,0.088,0.069,0.128,0,0,1 0.42,0,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0.00232,0.0208,0.11329,0.096,0.11776,0,0,1 0.15,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0048,0.0201,0.069,0.102,0.068,0,0,1 0.56,0,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0.002,0.02,0.094,0.09,0.104,0,0,1 0.34,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,8e-05,0.041,0.115,0.096,0.12,0,0,1 0.46,1,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0.004,0.022,0.123,0.09,0.137,0,0,1 0.46,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0018,0.0201,0.123,0.099,0.124,0,0,1 0.23,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0015,0.0201,0.121,0.121,0.1,0,0,1 0.7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0011,0.026,0.117,0.104,0.111,0,0,1 0.45,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0014,0.0201,0.066,0.077,0.086,0,0,1 0.35,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00068,0.02,0.09,0.08,0.113,0,0,1 0.57,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0048,0.024,0.164,0.112,0.146,0,0,1 0.88,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0019,0.013,0.11,0.082,0.134,0,0,1 0.49,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0046,0.022,0.092,0.083,0.111,0,0,1 0.82,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0035,0.0201,0.116,0.074,0.157,0,0,1 0.23,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0023,0.0201,0.158,0.155,0.102,0,0,1 0.17,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0013,0.0201,0.074,0.077,0.096,0,0,1 0.36,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00232,0.0201,0.11329,0.096,0.11776,0,0,1 0.47,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.001,0.0201,0.074,0.058,0.125,0,0,1 0.74,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.0051,0.025,0.132,0.097,0.136,0,0,1 0.69,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.00041,0.0201,0.117,0.114,0.103,0,0,1 0.39,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0017,0.0201,0.09,0.068,0.132,0,0,1 0.55,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0.0029,0.0201,0.143,0.091,0.157,0,0,1 0.22,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0026,0.011,0.119,0.088,0.135,0,0,1 0.45,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0033,0.02,0.093,0.098,0.095,0,0,1 0.82,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.00058,0.009,0.096,0.076,0.126,0,0,1 0.57,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0.0033,0.025,0.066,0.091,0.073,0,0,1 0.27,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0015,0.0201,0.09,0.082,0.11,0,0,1 0.26,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0005,0.018,0.111,0.083,0.134,0,0,1 0.26,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.001,0.0201,0.08,0.075,0.107,0,0,1 0.66,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0015,0.0201,0.078,0.07,0.111,0,0,1 0.66,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0037,0.0201,0.105,0.083,0.127,0,0,1 0.81,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0051,0.017,0.098,0.096,0.10187,0,0,1 0.39,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00419,0.024,0.102,0.102,0.1,0,0,1 0.74,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.0036,0.02,0.084,0.078,0.108,0,0,1 0.58,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0012,0.0201,0.098,0.093,0.105,0,0,1 0.95,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0016,0.0201,0.098,0.091,0.108,0,0,1 0.66,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00073,0.015,0.097,0.071,0.136,0,0,1 0.45,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,2e-05,0.0201,0.114,0.081,0.141,0,0,1 0.33,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00096,0.028,0.118,0.116,0.101,0,0,1 0.65,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,3e-05,0.022,0.146,0.104,0.138,0,0,1 0.39,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0.0008,0.022,0.091,0.079,0.115,0,0,1 0.36,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00097,0.0201,0.101,0.077,0.131,0,0,1 0.36,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0034,0.0469,0.172,0.087,0.198,0,0,1 0.6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0019,0.0201,0.123,0.097,0.127,0,0,1 0.6,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.0013,0.0201,0.11,0.087,0.126,0,0,1 0.42,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0019,0.0201,0.125,0.078,0.16,0,0,1 0.46,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.0017,0.0201,0.135,0.104,0.129,0,0,1 0.58,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00046,0.0201,0.117,0.09,0.13,0,0,1 0.81,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0013,0.013,0.107,0.085,0.126,0,0,1 0.59,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.0018,0.0201,0.121,0.099,0.122,0,0,1 0.82,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0012,0.014,0.115,0.074,0.155,0,0,1 0.56,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.00208,0.0201,0.13,0.112,0.116,0,0,1 0.72,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0.001,0.014,0.12,0.094,0.128,0,0,1 0.72,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.021,0.0096,0.06,0.116,0.051,1,0,0 0.33,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0023,0.0201,0.077,0.083,0.093,0,0,1 0.3,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0017,0.023,0.083,0.081,0.102,0,0,1 0.69,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0069,0.017,0.109,0.103,0.106,0,1,0 0.69,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0055,0.0201,0.056,0.086,0.065,0,0,1 0.75,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00093,0.0201,0.127,0.094,0.135,0,0,1 0.64,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.003,0.0201,0.096,0.079,0.122,0,0,1 0.64,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0039,0.0201,0.106,0.093,0.114,0,0,1 0.64,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0033,0.0201,0.113,0.093,0.122,0,0,1 0.69,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.00044,0.011,0.066,0.1,0.066,0,0,1 0.65,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0019,0.0201,0.086,0.094,0.091,0,0,1 0.65,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00065,0.0201,0.096,0.079,0.122,0,0,1 0.73,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0017,0.018,0.082,0.093,0.088,0,0,1 0.56,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0.00046,0.019,0.103,0.104,0.097,0,0,1 0.45,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00077,0.0201,0.11329,0.096,0.11776,0,0,1 0.7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0009,0.015,0.104,0.095,0.109,0,0,1 0.79,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0049,0.0201,0.077,0.082,0.094,0,0,1 0.59,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0025,0.0208,0.079,0.099,0.08,0,0,1 0.51,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.106,0.006,0.005,0.089,0.0055,1,0,0 0.51,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.00076,0.0201,0.09,0.067,0.134,0,0,1 0.35,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0028,0.0201,0.09,0.089,0.101,0,0,1 0.73,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.00056,0.0201,0.081,0.09,0.09,0,0,1 mlpack-2.2.5/src/mlpack/tests/data/thyroid_train.csv000066400000000000000000010225071315013601400224750ustar00rootroot000000000000000.73,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0.0006,0.015,0.12,0.082,0.146,0,0,1 0.24,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00025,0.03,0.143,0.133,0.108,0,0,1 0.47,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0019,0.024,0.102,0.131,0.078,0,0,1 0.64,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0009,0.017,0.077,0.09,0.085,0,0,1 0.23,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00025,0.026,0.139,0.09,0.153,0,0,1 0.69,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00025,0.016,0.086,0.07,0.123,0,0,1 0.85,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00025,0.023,0.128,0.104,0.121,0,0,1 0.48,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00208,0.02,0.086,0.078,0.11,0,0,1 0.67,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0013,0.024,0.087,0.109,0.08,0,0,1 0.76,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0001,0.029,0.124,0.128,0.097,0,0,1 0.62,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.011,0.008,0.073,0.074,0.098,0,1,0 0.18,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0001,0.023,0.098,0.085,0.115,0,0,1 0.59,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0008,0.023,0.094,0.099,0.09475,0,0,1 0.49,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0006,0.023,0.113,0.102,0.111,0,0,1 0.53,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0023,0.02,0.063,0.095,0.066,0,0,1 0.39,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0001,0.018,0.09,0.071,0.126,0,0,1 0.39,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.0006,0.02,0.114,0.1,0.114,0,0,1 0.65,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0016,0.018,0.078,0.092,0.085,0,0,1 0.64,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.032,0.014,0.085,0.116,0.071,0,0,1 0.5,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0.061,0.0096,0.013,0.116,0.011,1,0,0 0.76,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0001,0.0208,0.098,0.101,0.097,0,0,1 0.62,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0037,0.0206,0.086,0.091,0.095,0,0,1 0.63,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.53,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0006,0.026,0.136,0.119,0.114,0,0,1 0.62,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00208,0.0208,0.087,0.093,0.094,0,0,1 0.77,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0009,0.024,0.1,0.08,0.124,0,0,1 0.77,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0008,0.014,0.124,0.12,0.103,0,0,1 0.78,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.025,0.009,0.05,0.084,0.06,1,0,0 0.27,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0005,0.013,0.108,0.079,0.137,0,0,1 0.57,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.28,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.72,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0003,0.006,0.12,0.104,0.113,0,0,1 0.22,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0005,0.017,0.083,0.086,0.097,0,0,1 0.22,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0009,0.025,0.11,0.1,0.11,0,0,1 0.33,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0004,0.022,0.09,0.098,0.091,0,0,1 0.44,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0009,0.018,0.119,0.098,0.12,0,0,1 0.31,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0001,0.032,0.207,0.157,0.132,0,0,1 0.38,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0013,0.028,0.122,0.11,0.111,0,0,1 0.38,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00959,0.048,0.168,0.028,0.612,0,0,1 0.38,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0079,0.0469,0.123,0.028,0.445,0,0,1 0.2,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.72,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.0001,0.025,0.18,0.083,0.217,0,0,1 0.78,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0001,0.006,0.07,0.09,0.177,0,0,1 0.61,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.00979,0.004,0.081,0.064,0.126,0,1,0 0.38,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0001,0.015,0.106,0.079,0.134,0,0,1 0.83,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0029,0.014,0.095,0.081,0.118,0,0,1 0.53,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0014,0.019,0.104,0.093,0.112,0,0,1 0.66,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0001,0.018,0.108,0.071,0.151,0,0,1 0.53,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0001,0.0206,0.129,0.108,0.119,0,0,1 0.5,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.004,0.0206,0.12,0.08,0.15,0,0,1 0.49,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0005,0.018,0.086,0.082,0.105,0,0,1 0.55,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0001,0.018,0.103,0.091,0.113,0,0,1 0.31,0,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0.00189,0.03,0.158,0.101,0.156,0,0,1 0.54,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.0004,0.0206,0.06,0.087,0.069,0,0,1 0.54,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0001,0.019,0.102,0.093,0.11,0,0,1 0.61,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.001,0.022,0.091,0.087,0.105,0,0,1 0.49,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0.0001,0.013,0.082,0.1,0.082,0,0,1 0.28,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.0001,0.018,0.085,0.078,0.109,0,0,1 0.55,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,5e-05,0.028,0.189,0.166,0.114,0,0,1 0.8,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.028,0.103,0.1,0.103,0,0,1 0.28,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5e-05,0.016,0.091,0.088,0.103,0,0,1 0.63,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0003,0.008,0.074,0.085,0.087,0,0,1 0.59,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,5e-05,0.036,0.094,0.102,0.092,0,0,1 0.67,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.00208,0.02,0.083,0.096,0.087,0,0,1 0.86,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5e-05,0.02,0.079,0.062,0.126,0,0,1 0.72,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.001,0.017,0.093,0.078,0.119,0,0,1 0.78,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0024,0.013,0.087,0.089,0.097,0,0,1 0.72,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0016,0.022,0.107,0.112,0.096,0,0,1 0.72,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0.00189,0.032,0.11118,0.099,0.11207,0,0,1 0.27,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0019,0.015,0.104,0.099,0.10483,0,0,1 0.62,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0026,0.0208,0.079,0.089,0.09,0,0,1 0.62,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0014,0.039,0.097,0.084,0.115,0,0,1 0.65,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.002,0.026,0.134,0.099,0.136,0,0,1 0.3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0002,0.026,0.08,0.084,0.095,0,0,1 0.8,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0008,0.023,0.124,0.104,0.118,0,0,1 0.7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0023,0.018,0.122,0.085,0.144,0,0,1 0.57,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0003,0.0208,0.122,0.082,0.15,0,0,1 0.57,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.001,0.022,0.079,0.07,0.114,0,0,1 0.47,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0003,0.0208,0.099,0.085,0.117,0,0,1 0.58,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0008,0.022,0.096,0.086,0.111,0,0,1 0.58,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5e-05,0.019,0.123,0.099,0.124,0,0,1 0.36,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0003,0.011,0.069,0.06,0.114,0,0,1 0.16,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,5e-05,0.062,0.255,0.104,0.244,0,0,1 0.28,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0.0025,0.026,0.179,0.155,0.115,0,0,1 0.56,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0.0082,0.02,0.066,0.094,0.07,0,1,0 0.31,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0008,0.026,0.113,0.091,0.123,0,0,1 0.55,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0003,0.0206,0.076,0.101,0.076,0,0,1 0.33,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0003,0.025,0.107,0.092,0.116,0,0,1 0.42,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,5e-05,0.02,0.129,0.102,0.126,0,0,1 0.26,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.69,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.109,0.103,0.106,0,0,1 0.29,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0002,0.022,0.089,0.099,0.09,0,0,1 0.55,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0002,0.036,0.301,0.175,0.172,0,0,1 0.55,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0.0076,0.01,0.086,0.084,0.102,0,0,1 0.29,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0002,0.027,0.103,0.099,0.10383,0,0,1 0.38,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0048,0.024,0.114,0.084,0.135,0,0,1 0.4,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0002,0.016,0.095,0.099,0.096,0,0,1 0.27,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0017,0.019,0.146,0.119,0.123,0,0,1 0.5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0002,0.02,0.121,0.104,0.116,0,0,1 0.67,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00208,0.014,0.119,0.098,0.121,0,0,1 0.67,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.0002,0.009,0.075,0.073,0.102,0,0,1 0.58,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0002,0.0208,0.106,0.094,0.113,0,0,1 0.16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.05,0.164,0.126,0.13,0,0,1 0.32,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0.00025,0.025,0.105,0.104,0.1,0,0,1 0.25,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0011,0.022,0.112,0.098,0.115,0,0,1 0.73,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.047,0.011,0.052,0.09,0.058,1,0,0 0.78,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.0007,0.016,0.102,0.123,0.083,0,0,1 0.65,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0012,0.006,0.061,0.058,0.105,0,0,1 0.72,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0023,0.02,0.13,0.099,0.13103,0,0,1 0.74,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0013,0.012,0.089,0.094,0.095,0,0,1 0.49,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0002,0.02,0.094,0.097,0.098,0,0,1 0.69,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0002,0.025,0.124,0.115,0.108,0,0,1 0.22,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0002,0.018,0.157,0.09,0.175,0,0,1 0.38,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0039,0.022,0.061,0.076,0.08,0,0,1 0.38,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.144,0.094,0.154,0,0,1 0.43,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.07,0.005,0.0029,0.104,0.0028,1,0,0 0.42,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.031,0.008,0.013,0.119,0.011,1,0,0 0.7,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0037,0.0206,0.069,0.083,0.083,0,0,1 0.7,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0.0009,0.0206,0.07,0.08,0.087,0,0,1 0.3,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0002,0.019,0.086,0.09,0.096,0,0,1 0.28,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0002,0.02,0.115,0.092,0.124,0,0,1 0.58,0,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0.0002,0.0206,0.256,0.09,0.283,0,0,1 0.65,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0002,0.019,0.134,0.098,0.137,0,0,1 0.43,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0002,0.019,0.098,0.089,0.11,0,0,1 0.63,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0.0002,0.009,0.11,0.082,0.134,0,0,1 0.63,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0009,0.0208,0.153,0.122,0.126,0,0,1 0.63,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0014,0.019,0.118,0.099,0.11895,0,0,1 0.36,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00208,0.024,0.179,0.136,0.131,0,0,1 0.71,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0002,0.016,0.122,0.081,0.152,0,0,1 0.43,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0002,0.0206,0.197,0.108,0.182,0,0,1 0.43,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0026,0.018,0.137,0.102,0.134,0,0,1 0.5,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0002,0.018,0.123,0.097,0.126,0,0,1 0.42,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0023,0.011,0.093,0.073,0.127,0,0,1 0.55,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00208,0.008,0.075,0.077,0.098,0,0,1 0.53,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0031,0.024,0.087,0.094,0.093,0,0,1 0.61,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.0002,0.038,0.252,0.113,0.222,0,0,1 0.37,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.116,0.09,0.128,0,0,1 0.45,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0002,0.02,0.087,0.099,0.0877,0,0,1 0.26,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0.011,0.0206,0.089,0.104,0.086,0,0,1 0.26,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0002,0.051,0.187,0.116,0.16,0,0,1 0.84,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0002,0.007,0.137,0.084,0.164,0,0,1 0.59,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0043,0.012,0.059,0.083,0.071,0,0,1 0.62,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.0002,0.008,0.124,0.09,0.137,0,0,1 0.67,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0002,0.019,0.109,0.103,0.106,0,0,1 0.67,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0028,0.022,0.105,0.091,0.115,0,0,1 0.78,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0063,0.011,0.056,0.086,0.065,0,1,0 0.85,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0002,0.013,0.111,0.094,0.118,0,0,1 0.53,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.025,0.004,0.039,0.061,0.064,1,0,0 0.49,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.001,0.016,0.093,0.094,0.099,0,0,1 0.01,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0009,0.033,0.156,0.167,0.093,0,0,1 0.59,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0002,0.028,0.142,0.099,0.143,0,0,1 0.56,0,1,0,0,0,0,0,1,1,0,0,0,0,0,0,0.0022,0.014,0.088,0.077,0.114,0,0,1 0.18,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0002,0.0208,0.12,0.112,0.107,0,0,1 0.78,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0009,0.023,0.112,0.108,0.104,0,0,1 0.69,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0068,0.022,0.077,0.1,0.07692,0,1,0 0.52,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0009,0.027,0.114,0.131,0.087,0,0,1 0.46,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0008,0.0208,0.108,0.091,0.119,0,0,1 0.66,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0002,0.025,0.142,0.125,0.113,0,0,1 0.66,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0002,0.016,0.137,0.095,0.144,0,0,1 0.36,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0008,0.023,0.072,0.104,0.068,0,0,1 0.63,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.052,0.085,0.061,0,0,1 0.49,0,1,0,0,0,0,0,1,0,1,0,0,0,0,0,0.0002,0.024,0.137,0.099,0.1381,0,0,1 0.68,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.64,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.68,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0.0016,0.025,0.099,0.109,0.09,0,0,1 0.73,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0.003,0.019,0.106,0.099,0.10685,0,0,1 0.57,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0.0001,0.024,0.081,0.089,0.091,0,0,1 0.29,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.0013,0.009,0.074,0.071,0.104,0,0,1 0.29,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.0002,0.009,0.054,0.076,0.072,0,0,1 0.77,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0002,0.016,0.084,0.087,0.097,0,0,1 0.65,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0002,0.024,0.129,0.098,0.131,0,0,1 0.62,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0011,0.017,0.105,0.102,0.103,0,0,1 0.35,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.00189,0.022,0.081,0.096,0.084,0,0,1 0.43,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0001,0.0206,0.062,0.094,0.066,0,0,1 0.38,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0004,0.024,0.102,0.112,0.092,0,0,1 0.48,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0.0001,0.0206,0.119,0.091,0.131,0,0,1 0.35,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0023,0.024,0.13,0.111,0.117,0,0,1 0.78,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.63,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0026,0.013,0.128,0.107,0.12,0,0,1 0.52,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0011,0.027,0.159,0.119,0.134,0,0,1 0.68,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0001,0.018,0.08,0.077,0.104,0,0,1 0.24,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0.0033,0.012,0.07,0.087,0.08,0,0,1 0.63,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0004,0.014,0.115,0.102,0.113,0,0,1 0.68,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0003,0.022,0.099,0.086,0.115,0,0,1 0.61,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0015,0.017,0.111,0.094,0.118,0,0,1 0.59,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0007,0.016,0.129,0.107,0.121,0,0,1 0.57,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.001,0.016,0.073,0.097,0.075,0,0,1 0.57,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0001,0.022,0.15,0.101,0.149,0,0,1 0.59,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0023,0.02,0.074,0.069,0.107,0,0,1 0.25,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0003,0.031,0.129,0.099,0.13003,0,0,1 0.25,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0004,0.023,0.107,0.094,0.114,0,0,1 0.31,0,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0.00189,0.029,0.159,0.143,0.111,0,0,1 0.36,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.095,0.076,0.125,0,0,1 0.72,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0.0004,0.0206,0.186,0.124,0.15,0,0,1 0.49,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0001,0.025,0.109,0.108,0.1,0,0,1 0.71,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0001,0.014,0.12,0.087,0.138,0,0,1 0.34,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0001,0.039,0.2,0.136,0.147,0,0,1 0.29,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0.0001,0.031,0.142,0.177,0.08,0,0,1 0.69,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0008,0.024,0.073,0.097,0.075,0,0,1 0.69,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,5e-05,0.026,0.093,0.088,0.106,0,0,1 0.52,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0007,0.02,0.085,0.089,0.096,0,0,1 0.32,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0.0005,0.03,0.167,0.182,0.092,0,0,1 0.32,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.0001,0.019,0.126,0.076,0.163,0,0,1 0.73,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.001,0.016,0.076,0.081,0.094,0,0,1 0.73,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0001,0.018,0.137,0.098,0.14,0,0,1 0.73,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0001,0.02,0.192,0.102,0.188,0,0,1 0.29,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0001,0.022,0.085,0.104,0.08,0,0,1 0.51,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.024,0.004,0.003,0.099,0.003,1,0,0 0.67,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0019,0.0206,0.152,0.113,0.135,0,0,1 0.25,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0.0049,0.0206,0.074,0.092,0.08,0,0,1 0.32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0024,0.0206,0.059,0.05,0.119,0,0,1 0.32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5e-05,0.014,0.139,0.088,0.157,0,0,1 0.27,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.023,0.1,0.088,0.113,0,0,1 0.33,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.087,0.086,0.101,0,0,1 0.33,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.016,0.017,0.075,0.1,0.07492,0,1,0 0.59,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0015,0.0208,0.077,0.072,0.107,0,0,1 0.78,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.001,0.023,0.096,0.104,0.092,0,0,1 0.64,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0015,0.028,0.13,0.116,0.111,0,0,1 0.71,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0019,0.012,0.061,0.08,0.076,0,0,1 0.55,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0016,0.017,0.076,0.065,0.116,0,0,1 0.55,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,5e-05,0.02,0.149,0.109,0.136,0,0,1 0.62,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0004,0.022,0.085,0.107,0.079,0,0,1 0.17,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.35,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.35,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0015,0.02,0.093,0.101,0.093,0,0,1 0.56,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0208,0.113,0.094,0.12,0,0,1 0.26,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.0013,0.025,0.132,0.124,0.107,0,0,1 0.74,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.001,0.018,0.105,0.09,0.117,0,0,1 0.92,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0.0003,0.0206,0.11118,0.099,0.11207,0,0,1 0.79,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00208,0.022,0.097,0.101,0.097,0,0,1 0.7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0009,0.018,0.094,0.082,0.115,0,0,1 0.42,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0033,0.017,0.056,0.09,0.062,0,0,1 0.76,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0003,0.023,0.146,0.1,0.146,0,0,1 0.76,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0012,0.025,0.133,0.116,0.115,0,0,1 0.59,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0003,0.0208,0.082,0.088,0.093,0,0,1 0.75,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.001,0.015,0.121,0.08,0.153,0,0,1 0.53,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0003,0.012,0.105,0.091,0.115,0,0,1 0.46,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0011,0.029,0.125,0.102,0.122,0,0,1 0.7,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.0003,0.018,0.098,0.093,0.105,0,0,1 0.75,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0018,0.02,0.084,0.09,0.093,0,0,1 0.75,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0024,0.026,0.108,0.104,0.102,0,0,1 0.75,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0003,0.0208,0.117,0.095,0.123,0,0,1 0.57,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0015,0.025,0.135,0.102,0.132,0,0,1 0.41,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0.0003,0.023,0.111,0.083,0.134,0,0,1 0.26,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0023,0.018,0.093,0.119,0.078,0,0,1 0.23,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0007,0.019,0.116,0.089,0.13,0,0,1 0.71,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0036,0.017,0.089,0.104,0.086,0,0,1 0.32,0,1,0,0,0,1,0,0,0,1,0,0,0,0,0,0.0003,0.03,0.101,0.114,0.089,0,0,1 0.71,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0003,0.023,0.12,0.08,0.149,0,0,1 0.71,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0003,0.0208,0.119,0.103,0.116,0,0,1 0.62,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0003,0.02,0.089,0.089,0.099,0,0,1 0.54,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0003,0.023,0.134,0.082,0.163,0,0,1 0.49,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0008,0.025,0.205,0.12,0.17,0,0,1 0.54,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0.00189,0.0206,0.096,0.128,0.075,0,0,1 0.52,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0003,0.0206,0.129,0.019,0.142,0,0,1 0.45,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0009,0.023,0.138,0.101,0.126,0,0,1 0.6,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0003,0.018,0.114,0.1,0.114,0,0,1 0.65,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.01479,0.015,0.061,0.085,0.072,0,1,0 0.72,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0007,0.02,0.082,0.094,0.087,0,0,1 0.54,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.007,0.0206,0.095,0.098,0.098,0,0,1 0.21,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.031,0.11118,0.099,0.11207,0,0,1 0.21,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0009,0.018,0.084,0.075,0.113,0,0,1 0.21,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0003,0.026,0.106,0.104,0.101,0,0,1 0.73,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0025,0.0208,0.123,0.103,0.119,0,0,1 0.58,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0013,0.019,0.066,0.096,0.068,0,0,1 0.57,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.35,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.6,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.56,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.51,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.55,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.32,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0.0018,0.022,0.101,0.113,0.09,0,0,1 0.52,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00025,0.0206,0.119,0.092,0.129,0,0,1 0.6,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0.0059,0.0206,0.126,0.113,0.111,0,0,1 0.77,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0009,0.0206,0.131,0.074,0.176,0,0,1 0.38,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0009,0.024,0.091,0.099,0.09173,0,0,1 0.73,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.0008,0.019,0.121,0.101,0.119,0,0,1 0.8,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00208,0.014,0.111,0.097,0.115,0,0,1 0.53,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.001,0.022,0.108,0.089,0.121,0,0,1 0.39,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0015,0.019,0.102,0.087,0.118,0,0,1 0.77,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0016,0.023,0.144,0.095,0.152,0,0,1 0.2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0016,0.024,0.093,0.092,0.101,0,0,1 0.54,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0016,0.029,0.115,0.107,0.107,0,0,1 0.77,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.026,0.11118,0.099,0.11207,0,0,1 0.69,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0012,0.023,0.135,0.107,0.127,0,0,1 0.46,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0025,0.0206,0.109,0.111,0.098,0,0,1 0.51,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0014,0.0206,0.151,0.104,0.142,0,0,1 0.44,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.118,0.095,0.124,0,0,1 0.68,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.026,0.003,0.006,0.08,0.0076,1,0,0 0.58,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0013,0.0206,0.123,0.093,0.132,0,0,1 0.36,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.00189,0.018,0.082,0.099,0.08266,0,0,1 0.26,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0014,0.015,0.09,0.09,0.1,0,0,1 0.61,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0.00189,0.02,0.147,0.099,0.14817,0,0,1 0.49,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0024,0.027,0.093,0.099,0.094,0,0,1 0.7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.001,0.02,0.091,0.095,0.096,0,0,1 0.62,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.0012,0.0206,0.084,0.09,0.094,0,0,1 0.56,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0019,0.0206,0.129,0.104,0.123,0,0,1 0.67,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0008,0.023,0.104,0.104,0.1,0,0,1 0.45,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.0002,0.0208,0.101,0.084,0.121,0,0,1 0.32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.53,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0011,0.022,0.083,0.104,0.079,0,0,1 0.45,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0002,0.02,0.133,0.098,0.136,0,0,1 0.38,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0002,0.02,0.078,0.085,0.092,0,0,1 0.73,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.0002,0.025,0.136,0.089,0.153,0,0,1 0.62,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0013,0.018,0.112,0.089,0.126,0,0,1 0.37,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0.027,0.017,0.087,0.099,0.0877,0,0,1 0.35,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0012,0.019,0.095,0.11,0.087,0,0,1 0.35,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0012,0.02,0.089,0.099,0.08971,0,0,1 0.35,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0002,0.01,0.068,0.088,0.078,0,0,1 0.81,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0014,0.02,0.097,0.12,0.081,0,0,1 0.81,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0002,0.022,0.133,0.078,0.171,0,0,1 0.62,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0008,0.013,0.11,0.112,0.098,0,0,1 0.5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0031,0.019,0.077,0.08,0.097,0,0,1 0.62,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.026,0.142,0.099,0.14314,0,0,1 0.27,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0.0023,0.026,0.1,0.099,0.1008,0,0,1 0.37,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.043,0.187,0.096,0.194,0,0,1 0.51,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.071,0.087,0.082,0,0,1 0.7,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0.0016,0.018,0.107,0.111,0.097,0,0,1 0.52,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0002,0.0206,0.105,0.097,0.108,0,0,1 0.47,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0032,0.018,0.095,0.109,0.087,0,0,1 0.29,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0008,0.031,0.169,0.149,0.114,0,0,1 0.27,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.52,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.52,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0017,0.0208,0.103,0.103,0.1,0,0,1 0.52,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0029,0.02,0.073,0.104,0.07,0,0,1 0.75,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0001,0.019,0.152,0.081,0.188,0,0,1 0.25,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0.0001,0.017,0.092,0.089,0.103,0,0,1 0.49,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0.0001,0.027,0.141,0.116,0.12,0,0,1 0.3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0026,0.0208,0.097,0.099,0.09778,0,0,1 0.73,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.0016,0.02,0.105,0.116,0.089,0,0,1 0.12,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00208,0.027,0.093,0.099,0.09375,0,0,1 0.63,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0001,0.0206,0.117,0.098,0.12,0,0,1 0.21,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.43,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.26,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.58,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0001,0.02,0.101,0.12,0.084,0,0,1 0.42,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0018,0.02,0.066,0.093,0.071,0,0,1 0.42,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0001,0.015,0.087,0.08,0.109,0,0,1 0.7,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0001,0.018,0.17,0.099,0.171,0,0,1 0.57,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.0012,0.014,0.118,0.102,0.116,0,0,1 0.43,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0015,0.012,0.078,0.072,0.108,0,0,1 0.37,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.018,0.015,0.098,0.088,0.112,0,1,0 0.63,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0011,0.023,0.09,0.088,0.102,0,0,1 0.49,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0001,0.003,0.032,0.052,0.06,0,0,1 0.37,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0012,0.02,0.103,0.109,0.095,0,0,1 0.21,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.0004,0.034,0.189,0.135,0.14,0,0,1 0.19,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0.0001,0.0419,0.128,0.098,0.13,0,0,1 0.75,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0001,0.031,0.141,0.095,0.149,0,0,1 0.29,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.09,0.1,0.09,0,0,1 0.74,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0022,0.026,0.084,0.098,0.086,0,0,1 0.26,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.002,0.0206,0.137,0.163,0.084,0,0,1 0.73,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0004,0.023,0.094,0.09,0.105,0,0,1 0.53,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0004,0.0206,0.115,0.09,0.128,0,0,1 0.53,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0001,0.0206,0.184,0.121,0.153,0,0,1 0.7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0008,0.018,0.082,0.077,0.107,0,0,1 0.58,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0.0005,0.023,0.115,0.1,0.115,0,0,1 0.78,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.01,0.02,0.069,0.096,0.071,0,1,0 0.33,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.001,0.024,0.12,0.098,0.123,0,0,1 0.65,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0006,0.02,0.144,0.1,0.145,0,0,1 0.63,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.002,0.018,0.133,0.116,0.115,0,0,1 0.6,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0004,0.024,0.077,0.099,0.078,0,0,1 0.53,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.001,0.0208,0.074,0.09,0.083,0,0,1 0.77,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.001,0.024,0.101,0.116,0.087,0,0,1 0.77,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0006,0.012,0.146,0.093,0.157,0,0,1 0.77,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0039,0.02,0.105,0.104,0.1,0,0,1 0.39,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0014,0.024,0.112,0.103,0.109,0,0,1 0.56,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5e-05,0.0206,0.23,0.104,0.217,0,0,1 0.19,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0013,0.025,0.151,0.121,0.125,0,0,1 0.84,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0022,0.003,0.073,0.085,0.086,0,0,1 0.67,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.106,0.092,0.115,0,0,1 0.67,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0017,0.023,0.104,0.099,0.10483,0,0,1 0.49,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0005,0.018,0.128,0.1,0.128,0,0,1 0.7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0018,0.018,0.085,0.092,0.092,0,0,1 0.74,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.003,0.012,0.096,0.093,0.103,0,0,1 0.66,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.0001,0.006,0.121,0.085,0.143,0,0,1 0.33,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0005,0.019,0.092,0.092,0.101,0,0,1 0.89,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0007,0.015,0.12,0.092,0.13,0,0,1 0.39,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0.004,0.017,0.063,0.099,0.0635,0,0,1 0.39,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.5,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0004,0.028,0.094,0.088,0.106,0,0,1 0.61,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0004,0.0206,0.113,0.104,0.106,0,0,1 0.41,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0004,0.0206,0.091,0.096,0.095,0,0,1 0.41,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0004,0.025,0.098,0.101,0.096,0,0,1 0.72,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0003,0.019,0.104,0.101,0.104,0,0,1 0.37,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0007,0.0208,0.098,0.099,0.1,0,0,1 0.4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.042,0.002,0.0048,0.09,0.0054,1,0,0 0.32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.79,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0002,0.016,0.082,0.08,0.104,0,0,1 0.56,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0006,0.024,0.142,0.112,0.127,0,0,1 0.65,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,5e-05,0.034,0.21,0.104,0.199,0,0,1 0.56,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,5e-05,0.019,0.137,0.109,0.126,0,0,1 0.39,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,5e-05,0.023,0.095,0.089,0.106,0,0,1 0.85,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0005,0.012,0.093,0.079,0.117,0,0,1 0.83,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0002,0.011,0.095,0.073,0.129,0,0,1 0.83,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0019,0.026,0.119,0.122,0.097,0,0,1 0.16,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0.0005,0.032,0.137,0.116,0.119,0,0,1 0.73,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0001,0.015,0.077,0.079,0.097,0,0,1 0.72,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5e-05,0.011,0.108,0.066,0.165,0,0,1 0.35,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5e-05,0.023,0.112,0.1,0.112,0,0,1 0.54,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0,5e-05,0.012,0.112,0.077,0.145,0,0,1 0.34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.68,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0006,0.0206,0.11118,0.099,0.11207,0,0,1 0.84,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0003,0.0206,0.154,0.097,0.159,0,0,1 0.28,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0.0003,0.029,0.171,0.12,0.143,0,0,1 0.6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00809,0.018,0.059,0.096,0.061,1,0,0 0.57,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.0009,0.015,0.091,0.078,0.116,0,0,1 0.25,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0046,0.024,0.061,0.082,0.075,0,0,1 0.25,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0003,0.041,0.158,0.142,0.111,0,0,1 0.31,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0003,0.027,0.108,0.104,0.103,0,0,1 0.64,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0003,0.028,0.123,0.123,0.1,0,0,1 0.38,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0005,0.022,0.088,0.096,0.092,0,0,1 0.38,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0003,0.025,0.088,0.095,0.093,0,0,1 0.64,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.145,0.093,0.155,0,0,1 0.66,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0003,0.016,0.12,0.091,0.132,0,0,1 0.57,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0.00189,0.0206,0.088,0.086,0.102,0,0,1 0.69,1,1,0,0,0,0,0,1,0,1,0,0,0,0,0,0.0003,0.026,0.089,0.11,0.081,0,0,1 0.68,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0003,0.017,0.144,0.096,0.15,0,0,1 0.32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.57,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0009,0.015,0.091,0.079,0.115,0,0,1 0.75,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0016,0.015,0.085,0.094,0.09,0,0,1 0.7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00015,0.011,0.171,0.109,0.157,0,0,1 0.7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.002,0.022,0.138,0.124,0.111,0,0,1 0.31,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.196,0.094,0.209,0,0,1 0.55,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0027,0.024,0.133,0.1,0.132,0,0,1 0.72,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0008,0.018,0.093,0.079,0.117,0,0,1 0.49,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0.0022,0.012,0.089,0.109,0.082,0,0,1 0.59,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.0006,0.015,0.133,0.119,0.112,0,0,1 0.69,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.02,0.01,0.051,0.084,0.061,1,0,0 0.69,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0005,0.008,0.087,0.089,0.098,0,0,1 0.58,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0009,0.016,0.091,0.104,0.086,0,0,1 0.65,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0004,0.0206,0.175,0.112,0.156,0,0,1 0.74,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0013,0.019,0.099,0.112,0.088,0,0,1 0.4,0,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.32,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0.00015,0.024,0.135,0.115,0.118,0,0,1 0.56,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0006,0.027,0.1,0.104,0.096,0,0,1 0.68,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0005,0.023,0.081,0.092,0.088,0,0,1 0.79,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0004,0.02,0.143,0.128,0.112,0,0,1 0.57,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.39,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.59,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0013,0.032,0.149,0.116,0.127,0,0,1 0.68,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0013,0.015,0.074,0.072,0.103,0,0,1 0.89,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0008,0.006,0.101,0.073,0.138,0,0,1 0.89,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.4,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0.00015,0.03,0.21,0.148,0.142,0,0,1 0.7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.57,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.45,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0009,0.026,0.092,0.09,0.102,0,0,1 0.73,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0001,0.017,0.107,0.094,0.113,0,0,1 0.74,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0001,0.02,0.113,0.097,0.116,0,0,1 0.51,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0.0019,0.022,0.074,0.091,0.081,0,0,1 0.46,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0006,0.023,0.101,0.099,0.102,0,0,1 0.46,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0003,0.022,0.085,0.113,0.075,0,0,1 0.56,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0006,0.0206,0.117,0.111,0.105,0,0,1 0.12,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0001,0.03,0.176,0.089,0.197,0,0,1 0.15,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0001,0.023,0.183,0.089,0.205,0,0,1 0.58,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0032,0.0208,0.075,0.104,0.072,0,0,1 0.66,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0009,0.019,0.072,0.084,0.086,0,0,1 0.2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.0009,0.024,0.123,0.099,0.125,0,0,1 0.58,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0011,0.009,0.083,0.078,0.106,0,0,1 0.62,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.012,0.015,0.105,0.086,0.122,0,1,0 0.74,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0006,0.016,0.095,0.107,0.089,0,0,1 0.67,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0001,0.0208,0.096,0.101,0.095,0,0,1 0.46,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0001,0.007,0.069,0.077,0.089,0,0,1 0.41,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0003,0.019,0.117,0.108,0.108,0,0,1 0.32,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.001,0.029,0.152,0.116,0.13,0,0,1 0.28,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0005,0.022,0.107,0.094,0.113,0,0,1 0.28,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0022,0.022,0.082,0.094,0.087,0,0,1 0.25,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0009,0.026,0.12,0.124,0.096,0,0,1 0.54,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.001,0.024,0.107,0.104,0.101,0,0,1 0.54,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0012,0.017,0.081,0.087,0.094,0,0,1 0.64,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0001,0.0206,0.123,0.074,0.166,0,0,1 0.57,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.00189,0.018,0.071,0.072,0.099,0,0,1 0.84,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.026,0.0206,0.073,0.103,0.072,0,0,1 0.68,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.0011,0.018,0.089,0.083,0.105,0,0,1 0.56,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0054,0.017,0.104,0.101,0.103,0,0,1 0.27,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.079,0.079,0.1,0,0,1 0.26,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0014,0.025,0.098,0.092,0.107,0,0,1 0.38,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.37,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.26,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.26,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.35,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.003,0.029,0.087,0.11,0.079,0,0,1 0.19,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0016,0.022,0.096,0.094,0.102,0,0,1 0.19,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0017,0.019,0.089,0.088,0.102,0,0,1 0.28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.009,0.031,0.131,0.123,0.106,0,1,0 0.39,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.51,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0015,0.024,0.103,0.114,0.09,0,0,1 0.56,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0002,0.0208,0.069,0.081,0.086,0,0,1 0.6,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.001,0.0208,0.097,0.11,0.09,0,0,1 0.62,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0001,0.007,0.071,0.074,0.096,0,0,1 0.73,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0018,0.016,0.11,0.098,0.112,0,0,1 0.73,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0001,0.006,0.113,0.089,0.127,0,0,1 0.28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.001,0.025,0.134,0.113,0.119,0,0,1 0.28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0044,0.028,0.103,0.11,0.094,0,0,1 0.77,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.012,0.0174,0.077,0.104,0.073,0,1,0 0.75,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0013,0.022,0.1,0.097,0.104,0,0,1 0.47,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0006,0.029,0.113,0.113,0.1,0,0,1 0.47,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0007,0.019,0.099,0.079,0.125,0,0,1 0.47,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0005,0.0208,0.056,0.096,0.058,0,0,1 0.47,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0009,0.0208,0.091,0.098,0.092,0,0,1 0.56,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0014,0.023,0.111,0.097,0.115,0,0,1 0.6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0009,0.024,0.111,0.112,0.099,0,0,1 0.32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0016,0.013,0.119,0.09,0.133,0,0,1 0.29,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0013,0.016,0.09,0.097,0.093,0,0,1 0.45,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0017,0.02,0.071,0.093,0.076,0,0,1 0.75,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0.0002,0.034,0.103,0.132,0.078,0,0,1 0.56,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0008,0.018,0.076,0.08,0.095,0,0,1 0.56,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0002,0.022,0.108,0.088,0.122,0,0,1 0.68,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0012,0.012,0.084,0.078,0.107,0,0,1 0.17,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0.0006,0.026,0.102,0.097,0.106,0,0,1 0.22,0,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0.0002,0.023,0.147,0.101,0.145,0,0,1 0.26,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0.0022,0.029,0.085,0.108,0.079,0,0,1 0.42,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0004,0.0206,0.258,0.114,0.227,0,0,1 0.18,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.00959,0.024,0.22,0.168,0.131,0,0,1 0.48,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.098,0.019,0.029,0.111,0.026,1,0,0 0.41,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0018,0.024,0.115,0.111,0.104,0,0,1 0.69,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.021,0.008,0.053,0.086,0.061,1,0,0 0.69,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0033,0.015,0.13,0.086,0.151,0,0,1 0.26,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0.0002,0.039,0.122,0.128,0.096,0,0,1 0.33,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.00189,0.023,0.11,0.101,0.109,0,0,1 0.78,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0025,0.011,0.07,0.092,0.076,0,0,1 0.78,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0003,0.017,0.076,0.086,0.088,0,0,1 0.78,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0019,0.024,0.109,0.099,0.10987,0,0,1 0.3,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0.0003,0.018,0.117,0.097,0.121,0,0,1 0.66,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0013,0.017,0.086,0.091,0.094,0,0,1 0.34,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.00189,0.053,0.181,0.104,0.174,0,0,1 0.39,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0.0015,0.028,0.131,0.126,0.104,0,0,1 0.59,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0003,0.019,0.144,0.093,0.156,0,0,1 0.59,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0012,0.012,0.07,0.071,0.099,0,0,1 0.59,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.001,0.014,0.083,0.086,0.096,0,0,1 0.55,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0003,0.0208,0.1,0.104,0.096,0,0,1 0.42,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0007,0.015,0.09,0.114,0.079,0,0,1 0.51,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0012,0.023,0.072,0.084,0.085,0,0,1 0.33,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.001,0.022,0.084,0.084,0.1,0,0,1 0.51,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0009,0.017,0.095,0.095,0.1,0,0,1 0.59,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0.0003,0.016,0.112,0.075,0.148,0,0,1 0.65,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0016,0.019,0.099,0.103,0.097,0,0,1 0.6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0024,0.0208,0.093,0.096,0.097,0,0,1 0.66,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0.0006,0.022,0.123,0.093,0.132,0,0,1 0.37,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.001,0.023,0.089,0.094,0.095,0,0,1 0.28,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0003,0.0206,0.209,0.108,0.194,0,0,1 0.79,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.029,0.016,0.047,0.097,0.048,1,0,0 0.48,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0003,0.024,0.117,0.102,0.115,0,0,1 0.7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.53,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.56,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.57,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.17,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.083,0.099,0.084,0,0,1 0.17,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0009,0.014,0.111,0.102,0.109,0,0,1 0.17,0,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0.0003,0.015,0.101,0.081,0.125,0,0,1 0.33,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0003,0.0208,0.112,0.099,0.1129,0,0,1 0.18,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0009,0.03,0.143,0.132,0.108,0,0,1 0.74,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0.021,0.018,0.126,0.116,0.107,0,0,1 0.34,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0.00189,0.0206,0.103,0.138,0.075,0,0,1 0.48,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.082,0.093,0.088,0,0,1 0.46,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0003,0.022,0.187,0.11,0.17,0,0,1 0.46,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0064,0.023,0.064,0.098,0.065,0,1,0 0.36,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.22,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0013,0.0206,0.11,0.094,0.117,0,0,1 0.7,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00015,0.0206,0.162,0.112,0.146,0,0,1 0.48,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0003,0.026,0.119,0.12,0.1,0,0,1 0.59,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00015,0.025,0.09,0.085,0.106,0,0,1 0.59,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0005,0.0208,0.086,0.081,0.107,0,0,1 0.39,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0005,0.022,0.121,0.101,0.121,0,0,1 0.62,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.017,0.017,0.065,0.068,0.097,0,1,0 0.77,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.023,0.005,0.033,0.067,0.049,1,0,0 0.77,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.018,0.003,0.044,0.056,0.077,0,1,0 0.85,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0073,0.024,0.066,0.109,0.061,0,0,1 0.41,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.38,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.37,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.55,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00015,0.02,0.098,0.067,0.131,0,0,1 0.15,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.65,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.64,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.15,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00015,0.027,0.096,0.104,0.091,0,0,1 0.35,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0.0003,0.02,0.075,0.121,0.062,0,0,1 0.73,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.00208,0.028,0.109,0.132,0.083,0,0,1 0.65,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.00015,0.035,0.217,0.098,0.221,0,0,1 0.35,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00015,0.051,0.167,0.1,0.168,0,0,1 0.68,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0014,0.024,0.149,0.116,0.126,0,0,1 0.81,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0005,0.022,0.102,0.085,0.119,0,0,1 0.9,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00015,0.017,0.118,0.082,0.144,0,0,1 0.47,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.00015,0.043,0.189,0.107,0.176,0,0,1 0.63,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00015,0.0206,0.079,0.081,0.097,0,0,1 0.32,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0.001,0.024,0.121,0.12,0.101,0,0,1 0.59,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0009,0.02,0.088,0.099,0.0887,0,0,1 0.48,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00015,0.027,0.119,0.112,0.106,0,0,1 0.28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0025,0.029,0.086,0.119,0.072,0,0,1 0.47,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0006,0.017,0.094,0.099,0.09475,0,0,1 0.59,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00015,0.028,0.084,0.099,0.086,0,0,1 0.67,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.00015,0.016,0.094,0.087,0.108,0,0,1 0.39,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0003,0.0206,0.11118,0.099,0.11207,0,0,1 0.46,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0014,0.016,0.086,0.074,0.111,0,0,1 0.54,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0006,0.018,0.1,0.077,0.13,0,0,1 0.39,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.063,0.087,0.072,0,0,1 0.24,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.065,0.007,0.028,0.1,0.028,1,0,0 0.24,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.00015,0.027,0.116,0.082,0.143,0,0,1 0.27,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0002,0.022,0.099,0.082,0.12,0,0,1 0.46,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00015,0.03,0.161,0.093,0.174,0,0,1 0.55,1,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0.01,0.0208,0.065,0.1,0.06493,0,1,0 0.55,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00015,0.025,0.109,0.075,0.145,0,0,1 0.22,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.001,0.023,0.087,0.089,0.098,0,0,1 0.22,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00208,0.023,0.081,0.101,0.08,0,0,1 0.39,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.024,0.019,0.065,0.1,0.06493,0,1,0 0.27,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00015,0.028,0.199,0.146,0.137,0,0,1 0.29,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0.00015,0.036,0.144,0.176,0.082,0,0,1 0.29,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.29,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.51,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.00189,0.026,0.11118,0.099,0.11207,0,0,1 0.59,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.0001,0.025,0.1,0.098,0.103,0,0,1 0.41,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.006,0.019,0.114,0.1,0.114,0,0,1 0.74,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0017,0.014,0.075,0.074,0.101,0,0,1 0.39,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00015,0.016,0.074,0.089,0.083,0,0,1 0.32,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.00015,0.029,0.132,0.114,0.116,0,0,1 0.66,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00015,0.008,0.09,0.077,0.117,0,0,1 0.5,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00015,0.0206,0.248,0.15,0.165,0,0,1 0.5,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00015,0.022,0.151,0.091,0.166,0,0,1 0.5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0008,0.0208,0.105,0.102,0.1,0,0,1 0.59,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.38,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00015,0.043,0.144,0.162,0.089,0,0,1 0.62,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0011,0.023,0.13,0.124,0.105,0,0,1 0.56,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0004,0.023,0.12,0.103,0.117,0,0,1 0.28,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0017,0.0206,0.138,0.121,0.115,0,0,1 0.15,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0011,0.0206,0.067,0.093,0.073,0,0,1 0.89,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0009,0.024,0.105,0.095,0.11,0,0,1 0.41,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0007,0.022,0.104,0.102,0.102,0,0,1 0.87,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00015,0.017,0.162,0.087,0.186,0,0,1 0.45,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0006,0.019,0.121,0.104,0.116,0,0,1 0.85,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0008,0.0208,0.1,0.104,0.095,0,0,1 0.3,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.018,0.096,0.093,0.103,0,0,1 0.3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0012,0.035,0.12,0.122,0.098,0,0,1 0.7,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0.0002,0.027,0.079,0.098,0.081,0,0,1 0.71,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0006,0.01,0.088,0.094,0.094,0,0,1 0.62,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0016,0.019,0.066,0.071,0.093,0,0,1 0.6,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0.012,0.022,0.071,0.14,0.051,1,0,0 0.74,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0016,0.007,0.082,0.088,0.093,0,0,1 0.32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0006,0.016,0.134,0.12,0.112,0,0,1 0.68,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0002,0.009,0.072,0.077,0.094,0,0,1 0.71,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0002,0.022,0.14,0.114,0.123,0,0,1 0.64,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.001,0.007,0.091,0.083,0.11,0,0,1 0.64,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0067,0.02,0.11,0.116,0.094,0,0,1 0.21,0,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0.0002,0.025,0.108,0.113,0.096,0,0,1 0.54,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0002,0.0206,0.101,0.064,0.158,0,0,1 0.21,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0.0013,0.018,0.095,0.096,0.099,0,0,1 0.59,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0027,0.0208,0.086,0.092,0.093,0,0,1 0.66,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0.0038,0.0208,0.079,0.107,0.074,0,0,1 0.57,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0002,0.016,0.141,0.089,0.158,0,0,1 0.57,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0002,0.0206,0.121,0.079,0.153,0,0,1 0.41,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0002,0.026,0.197,0.101,0.195,0,0,1 0.57,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.022,0.104,0.107,0.097,0,0,1 0.28,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.001,0.023,0.116,0.113,0.103,0,0,1 0.62,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.091,0.101,0.09,0,0,1 0.21,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.19,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.19,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.62,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.67,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.65,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.021,0.012,0.072,0.1,0.07192,0,1,0 0.43,0,0,0,0,0,0,0,1,0,1,0,0,0,0,0,0.0035,0.017,0.099,0.114,0.087,0,0,1 0.51,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00025,0.019,0.101,0.108,0.094,0,0,1 0.6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.14,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0014,0.0206,0.091,0.093,0.098,0,0,1 0.41,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00025,0.031,0.134,0.1,0.134,0,0,1 0.68,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0014,0.013,0.119,0.111,0.107,0,0,1 0.65,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0005,0.0206,0.09,0.109,0.083,0,0,1 0.53,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.00025,0.0208,0.145,0.103,0.141,0,0,1 0.36,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.00025,0.017,0.083,0.092,0.09,0,0,1 0.5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00025,0.02,0.077,0.075,0.103,0,0,1 0.58,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0013,0.023,0.077,0.099,0.07762,0,0,1 0.34,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0.00025,0.067,0.23,0.173,0.133,0,0,1 0.84,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00025,0.02,0.134,0.079,0.17,0,0,1 0.35,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00025,0.004,0.038,0.108,0.035,0,0,1 0.35,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0005,0.022,0.089,0.099,0.08971,0,0,1 0.37,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00025,0.0206,0.133,0.098,0.136,0,0,1 0.58,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0015,0.016,0.103,0.09,0.114,0,0,1 0.34,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0006,0.023,0.076,0.104,0.073,0,0,1 0.49,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0017,0.023,0.123,0.115,0.106,0,0,1 0.22,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0013,0.023,0.083,0.097,0.085,0,0,1 0.56,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00025,0.02,0.083,0.087,0.096,0,0,1 0.63,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0.0028,0.02,0.093,0.104,0.089,0,0,1 0.75,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0023,0.013,0.088,0.085,0.103,0,0,1 0.62,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.1,0.004,0.01,0.1,0.01,1,0,0 0.54,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.019,0.095,0.088,0.109,0,0,1 0.3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0009,0.019,0.076,0.083,0.092,0,0,1 0.34,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0011,0.018,0.093,0.09,0.103,0,0,1 0.34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.035,0.014,0.078,0.098,0.08,0,1,0 0.39,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00025,0.019,0.094,0.093,0.101,0,0,1 0.49,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00208,0.013,0.12,0.143,0.083,0,0,1 0.55,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0002,0.018,0.134,0.102,0.131,0,0,1 0.39,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0007,0.012,0.081,0.099,0.082,0,0,1 0.76,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.011,0.012,0.086,0.085,0.101,0,1,0 0.59,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0002,0.0206,0.082,0.073,0.112,0,0,1 0.59,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0004,0.0208,0.078,0.082,0.095,0,0,1 0.72,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0002,0.023,0.087,0.094,0.093,0,0,1 0.72,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0014,0.014,0.115,0.097,0.118,0,0,1 0.72,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0006,0.02,0.106,0.104,0.101,0,0,1 0.62,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0012,0.0206,0.09,0.116,0.077,0,0,1 0.73,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0007,0.02,0.116,0.099,0.117,0,0,1 0.58,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0008,0.0206,0.098,0.102,0.096,0,0,1 0.41,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0.0002,0.0206,0.43,0.109,0.395,0,0,1 0.47,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0002,0.015,0.061,0.091,0.067,0,0,1 0.35,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0002,0.0208,0.139,0.1,0.139,0,0,1 0.38,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0002,0.015,0.048,0.071,0.067,0,0,1 0.56,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.001,0.023,0.103,0.108,0.095,0,0,1 0.49,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0007,0.0208,0.102,0.093,0.11,0,0,1 0.75,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0006,0.018,0.099,0.103,0.096,0,0,1 0.82,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.0002,0.014,0.113,0.085,0.132,0,0,1 0.2,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0.0002,0.03,0.135,0.129,0.105,0,0,1 0.58,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.11,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.089,0.089,0.1,0,0,1 0.13,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.094,0.091,0.103,0,0,1 0.25,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.33,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.61,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0.00015,0.0208,0.097,0.103,0.095,0,0,1 0.38,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0029,0.018,0.093,0.095,0.098,0,0,1 0.38,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0012,0.02,0.101,0.101,0.1,0,0,1 0.82,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0002,0.001,0.05,0.031,0.161,0,0,1 0.77,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0007,0.022,0.072,0.116,0.062,0,0,1 0.58,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0016,0.02,0.109,0.109,0.1,0,0,1 0.27,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00015,0.016,0.101,0.109,0.092,0,0,1 0.27,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.103,0.114,0.091,0,0,1 0.73,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0019,0.015,0.113,0.104,0.106,0,0,1 0.45,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0009,0.019,0.083,0.067,0.124,0,0,1 0.48,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0022,0.0208,0.055,0.085,0.065,0,0,1 0.28,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.001,0.031,0.107,0.104,0.103,0,0,1 0.31,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0014,0.023,0.15,0.142,0.106,0,0,1 0.35,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0001,0.028,0.138,0.113,0.122,0,0,1 0.6,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.026,0.02,0.069,0.097,0.071,0,1,0 0.6,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00208,0.014,0.089,0.099,0.09,0,0,1 0.75,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.001,0.022,0.12,0.103,0.117,0,0,1 0.58,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00015,0.025,0.087,0.079,0.109,0,0,1 0.73,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0026,0.016,0.089,0.094,0.095,0,0,1 0.54,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0006,0.022,0.098,0.103,0.096,0,0,1 0.6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0005,0.01,0.073,0.082,0.089,0,0,1 0.37,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0007,0.026,0.098,0.095,0.103,0,0,1 0.6,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0004,0.019,0.106,0.104,0.1,0,0,1 0.82,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0.00015,0.014,0.133,0.1,0.134,0,0,1 0.41,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.00015,0.073,0.43,0.109,0.395,0,0,1 0.2,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0.00015,0.025,0.093,0.1,0.093,0,0,1 0.46,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.004,0.019,0.069,0.099,0.06955,0,0,1 0.19,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0184,0.02,0.105,0.1,0.10489,0,1,0 0.51,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0002,0.02,0.113,0.084,0.134,0,0,1 0.6,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0005,0.024,0.093,0.104,0.089,0,0,1 0.44,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0001,0.025,0.087,0.098,0.088,0,0,1 0.75,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0008,0.025,0.141,0.128,0.11,0,0,1 0.16,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0004,0.017,0.088,0.089,0.099,0,0,1 0.87,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0002,0.015,0.132,0.086,0.153,0,0,1 0.87,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.25,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0007,0.016,0.094,0.093,0.101,0,0,1 0.84,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0007,0.016,0.07,0.093,0.075,0,0,1 0.75,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0023,0.007,0.078,0.082,0.095,0,0,1 0.52,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0017,0.018,0.102,0.099,0.103,0,0,1 0.64,0,0,0,0,0,0,0,0,0,1,0,0,1,0,0,0.00189,0.061,0.214,0.094,0.228,0,0,1 0.6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0015,0.019,0.072,0.104,0.068,0,0,1 0.58,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0003,0.031,0.151,0.14,0.108,0,0,1 0.36,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0002,0.027,0.12,0.097,0.123,0,0,1 0.43,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.065,0.093,0.07,0,0,1 0.52,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.052,0.104,0.049,0,0,1 0.41,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.0002,0.038,0.253,0.124,0.204,0,0,1 0.34,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0.0002,0.028,0.16,0.151,0.106,0,0,1 0.63,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0.0002,0.011,0.187,0.095,0.197,0,0,1 0.42,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.0062,0.017,0.072,0.089,0.081,0,1,0 0.48,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.003,0.0206,0.09,0.114,0.079,0,0,1 0.32,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0007,0.029,0.09,0.102,0.088,0,0,1 0.64,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.001,0.022,0.099,0.107,0.092,0,0,1 0.64,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.002,0.018,0.08,0.091,0.088,0,0,1 0.64,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0014,0.012,0.119,0.098,0.121,0,0,1 0.59,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.011,0.015,0.089,0.112,0.079,0,1,0 0.56,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0.0002,0.025,0.138,0.115,0.119,0,0,1 0.7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0038,0.031,0.11,0.108,0.102,0,0,1 0.66,0,0,0,0,0,0,0,1,0,1,0,0,0,0,0,0.0002,0.018,0.091,0.112,0.081,0,0,1 0.51,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0007,0.0206,0.127,0.119,0.107,0,0,1 0.37,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.027,0.009,0.087,0.084,0.105,0,1,0 0.22,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0002,0.029,0.107,0.101,0.106,0,0,1 0.22,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.43,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0009,0.027,0.109,0.11,0.099,0,0,1 0.6,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.012,0.017,0.067,0.087,0.077,0,1,0 0.54,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0007,0.02,0.101,0.109,0.092,0,0,1 0.34,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0011,0.018,0.06,0.075,0.08,0,0,1 0.33,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.0002,0.015,0.157,0.091,0.172,0,0,1 0.26,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0.0003,0.0206,0.151,0.197,0.076,0,0,1 0.72,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0.0063,0.015,0.094,0.092,0.102,0,0,1 0.58,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0075,0.016,0.082,0.108,0.077,0,1,0 0.65,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0018,0.026,0.121,0.104,0.115,0,0,1 0.54,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.58,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.6,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.49,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.36,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0013,0.019,0.062,0.077,0.081,0,0,1 0.6,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.036,0.006,0.023,0.068,0.034,1,0,0 0.56,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0005,0.023,0.085,0.081,0.105,0,0,1 0.84,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0016,0.015,0.096,0.082,0.117,0,0,1 0.58,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00208,0.024,0.095,0.095,0.1,0,0,1 0.56,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0.0006,0.025,0.122,0.104,0.116,0,0,1 0.65,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0011,0.014,0.121,0.081,0.149,0,0,1 0.89,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00025,0.012,0.07,0.104,0.066,0,0,1 0.66,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00025,0.006,0.11,0.083,0.133,0,0,1 0.66,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.064,0.078,0.082,0,0,1 0.55,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0.0009,0.023,0.117,0.095,0.123,0,0,1 0.67,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.097,0.095,0.102,0,0,1 0.29,0,0,0,0,0,0,0,0,0,1,0,0,1,0,0,0.00189,0.016,0.086,0.078,0.111,0,0,1 0.58,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0008,0.014,0.129,0.107,0.12,0,0,1 0.22,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0017,0.015,0.133,0.084,0.159,0,0,1 0.84,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00025,0.022,0.138,0.097,0.143,0,0,1 0.58,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.074,0.082,0.09,0,0,1 0.44,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0002,0.015,0.107,0.079,0.135,0,0,1 0.75,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0.00025,0.034,0.151,0.113,0.134,0,0,1 0.54,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00025,0.029,0.101,0.1,0.1,0,0,1 0.54,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0.0023,0.02,0.078,0.104,0.075,0,0,1 0.76,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.00025,0.008,0.078,0.075,0.104,0,0,1 0.7,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.0018,0.015,0.07,0.086,0.081,0,0,1 0.33,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.69,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.16,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0.03,0.012,0.03,0.145,0.021,0,0,1 0.72,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.075,0.101,0.074,0,0,1 0.75,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00025,0.0208,0.172,0.101,0.171,0,0,1 0.63,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0082,0.0208,0.08,0.102,0.078,0,1,0 0.49,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00025,0.0206,0.184,0.086,0.214,0,0,1 0.51,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.29,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,5e-05,0.02,0.127,0.092,0.138,0,0,1 0.92,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0007,0.013,0.12,0.084,0.143,0,0,1 0.36,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0029,0.014,0.092,0.092,0.101,0,0,1 0.36,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0.00025,0.018,0.124,0.075,0.166,0,0,1 0.23,1,1,0,0,0,0,0,0,1,0,0,0,0,0,0,5e-05,0.033,0.232,0.134,0.174,0,0,1 0.34,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0004,0.0206,0.109,0.091,0.12,0,0,1 0.42,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5e-05,0.026,0.122,0.116,0.105,0,0,1 0.71,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0011,0.0206,0.108,0.104,0.104,0,0,1 0.13,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0002,0.0206,0.067,0.093,0.072,0,0,1 0.45,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0013,0.023,0.088,0.084,0.106,0,0,1 0.59,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0012,0.02,0.11,0.096,0.114,0,0,1 0.48,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0003,0.016,0.086,0.093,0.092,0,0,1 0.15,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.19,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.73,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.01,0.01,0.084,0.139,0.06,1,0,0 0.19,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0.0004,0.024,0.103,0.091,0.114,0,0,1 0.78,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5e-05,0.02,0.114,0.085,0.134,0,0,1 0.67,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0002,0.016,0.09,0.097,0.093,0,0,1 0.76,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0017,0.008,0.08,0.077,0.104,0,0,1 0.48,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5e-05,0.004,0.098,0.073,0.133,0,0,1 0.55,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.001,0.018,0.095,0.095,0.101,0,0,1 0.73,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5e-05,0.019,0.076,0.088,0.086,0,0,1 0.5,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,5e-05,0.018,0.109,0.077,0.141,0,0,1 0.5,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,5e-05,0.0206,0.105,0.092,0.114,0,0,1 0.46,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0002,0.035,0.172,0.099,0.173,0,0,1 0.59,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5e-05,0.02,0.118,0.092,0.129,0,0,1 0.79,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0005,0.016,0.128,0.099,0.129,0,0,1 0.39,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0005,0.0208,0.095,0.089,0.107,0,0,1 0.28,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0005,0.022,0.089,0.104,0.084,0,0,1 0.28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0015,0.028,0.117,0.098,0.114,0,0,1 0.59,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.0003,0.024,0.086,0.09,0.096,0,0,1 0.24,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0045,0.0206,0.166,0.159,0.104,0,0,1 0.34,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,5e-05,0.0419,0.213,0.194,0.11,0,0,1 0.27,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.002,0.025,0.09,0.104,0.085,0,0,1 0.64,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5e-05,0.031,0.169,0.121,0.14,0,0,1 0.32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5e-05,0.031,0.206,0.159,0.129,0,0,1 0.32,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.109,0.002,0.003,0.102,0.003,1,0,0 0.49,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0012,0.0206,0.087,0.091,0.095,0,0,1 0.8,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5e-05,0.037,0.152,0.093,0.164,0,0,1 0.78,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5e-05,0.013,0.101,0.099,0.10181,0,0,1 0.68,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0004,0.022,0.117,0.086,0.136,0,0,1 0.47,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0006,0.02,0.142,0.1,0.142,0,0,1 0.58,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5e-05,0.024,0.103,0.097,0.107,0,0,1 0.36,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0075,0.041,0.139,0.165,0.084,0,1,0 0.4,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5e-05,0.025,0.131,0.091,0.144,0,0,1 0.73,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.002,0.029,0.116,0.116,0.099,0,0,1 0.33,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0013,0.025,0.101,0.09,0.112,0,0,1 0.66,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,5e-05,0.018,0.133,0.102,0.13,0,0,1 0.76,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.001,0.029,0.1,0.102,0.098,0,0,1 0.41,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5e-05,0.03,0.09,0.096,0.094,0,0,1 0.75,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0023,0.026,0.09,0.099,0.091,0,0,1 0.54,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00025,0.0206,0.156,0.123,0.126,0,0,1 0.61,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00025,0.0206,0.087,0.079,0.109,0,0,1 0.26,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0014,0.0206,0.106,0.128,0.084,0,0,1 0.27,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.086,0.093,0.092,0,0,1 0.34,0,0,0,0,0,1,0,0,0,1,0,0,1,0,0,0.00025,0.039,0.139,0.127,0.11,0,0,1 0.29,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0016,0.026,0.089,0.098,0.091,0,0,1 0.29,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0026,0.016,0.135,0.086,0.157,0,0,1 0.58,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5e-05,0.026,0.132,0.103,0.128,0,0,1 0.37,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.00025,0.02,0.083,0.114,0.072,0,0,1 0.4,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00025,0.023,0.134,0.084,0.16,0,0,1 0.59,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0015,0.0208,0.103,0.091,0.113,0,0,1 0.42,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.002,0.0206,0.102,0.103,0.093,0,0,1 0.6,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0.0002,0.04,0.068,0.1,0.067,0,0,1 0.86,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00025,0.018,0.089,0.089,0.1,0,0,1 0.74,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0016,0.0208,0.12,0.098,0.122,0,0,1 0.37,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.00025,0.02,0.114,0.09,0.128,0,0,1 0.85,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00025,0.011,0.132,0.085,0.154,0,0,1 0.78,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0006,0.024,0.118,0.088,0.135,0,0,1 0.36,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00859,0.037,0.131,0.165,0.079,0,1,0 0.57,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0009,0.0208,0.093,0.09,0.104,0,0,1 0.38,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0004,0.02,0.091,0.078,0.117,0,0,1 0.34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00025,0.023,0.103,0.104,0.098,0,0,1 0.34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00025,0.016,0.107,0.104,0.102,0,0,1 0.45,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.51,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.001,0.019,0.091,0.103,0.089,0,0,1 0.66,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0005,0.019,0.08,0.085,0.094,0,0,1 0.44,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00025,0.0206,0.106,0.102,0.105,0,0,1 0.19,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.0006,0.023,0.1,0.094,0.106,0,0,1 0.35,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00025,0.02,0.121,0.081,0.148,0,0,1 0.59,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0.00025,0.024,0.109,0.097,0.113,0,0,1 0.83,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00025,0.023,0.147,0.109,0.135,0,0,1 0.83,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0022,0.009,0.069,0.074,0.093,0,0,1 0.45,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0074,0.014,0.108,0.108,0.1,0,1,0 0.8,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0061,0.013,0.083,0.088,0.095,0,1,0 0.79,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0008,0.022,0.103,0.095,0.109,0,0,1 0.84,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0006,0.0208,0.11,0.104,0.106,0,0,1 0.82,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0005,0.006,0.139,0.086,0.161,0,0,1 0.34,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.00015,0.023,0.139,0.104,0.131,0,0,1 0.64,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.42,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0009,0.0206,0.135,0.066,0.206,0,0,1 0.28,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0.00015,0.019,0.075,0.112,0.067,0,0,1 0.21,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.028,0.143,0.125,0.114,0,0,1 0.64,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0002,0.019,0.159,0.104,0.153,0,0,1 0.36,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0.0005,0.0206,0.101,0.086,0.117,0,0,1 0.26,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0.0012,0.04,0.168,0.157,0.106,0,0,1 0.52,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00879,0.023,0.106,0.125,0.085,0,1,0 0.7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00208,0.023,0.18,0.13,0.139,0,0,1 0.7,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0005,0.019,0.087,0.087,0.1,0,0,1 0.7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0025,0.023,0.084,0.092,0.092,0,0,1 0.37,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0011,0.02,0.097,0.099,0.098,0,0,1 0.42,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0006,0.025,0.125,0.122,0.102,0,0,1 0.4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00015,0.017,0.132,0.093,0.141,0,0,1 0.58,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0005,0.034,0.091,0.097,0.094,0,0,1 0.41,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.001,0.015,0.1,0.086,0.116,0,0,1 0.8,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00015,0.022,0.124,0.081,0.152,0,0,1 0.67,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00015,0.029,0.099,0.114,0.088,0,0,1 0.5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0003,0.015,0.146,0.116,0.124,0,0,1 0.74,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0006,0.024,0.137,0.116,0.116,0,0,1 0.55,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0002,0.017,0.095,0.079,0.121,0,0,1 0.6,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.0038,0.022,0.076,0.13,0.058,0,0,1 0.79,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.017,0.016,0.066,0.1,0.065,0,1,0 0.43,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0002,0.0206,0.134,0.103,0.131,0,0,1 0.37,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0009,0.023,0.077,0.08,0.096,0,0,1 0.55,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0.0002,0.018,0.052,0.095,0.055,0,0,1 0.72,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.15,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0034,0.045,0.097,0.099,0.09778,0,0,1 0.69,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0049,0.0206,0.116,0.11,0.105,0,0,1 0.26,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0.0062,0.0174,0.086,0.09,0.095,0,1,0 0.22,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0017,0.022,0.093,0.108,0.087,0,0,1 0.26,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.15,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0007,0.018,0.084,0.098,0.086,0,0,1 0.87,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0003,0.015,0.1,0.12,0.083,0,0,1 0.58,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0006,0.02,0.107,0.095,0.113,0,0,1 0.58,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.6,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0002,0.0206,0.159,0.107,0.149,0,0,1 0.7,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0009,0.026,0.092,0.089,0.103,0,0,1 0.7,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0002,0.02,0.119,0.086,0.138,0,0,1 0.7,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0014,0.0208,0.091,0.099,0.092,0,0,1 0.85,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.001,0.01,0.092,0.056,0.164,0,0,1 0.55,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0034,0.023,0.086,0.108,0.08,0,0,1 0.51,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0.001,0.022,0.069,0.104,0.066,0,0,1 0.75,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0008,0.0005,0.106,0.114,0.093,0,0,1 0.75,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00959,0.019,0.079,0.1,0.08,0,1,0 0.7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0006,0.025,0.098,0.104,0.092,0,0,1 0.49,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.001,0.028,0.141,0.111,0.127,0,0,1 0.36,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0002,0.032,0.187,0.174,0.107,0,0,1 0.36,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0015,0.033,0.132,0.121,0.11,0,0,1 0.36,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0002,0.03,0.125,0.074,0.169,0,0,1 0.7,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0007,0.009,0.073,0.099,0.07358,0,0,1 0.63,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.126,0.013,0.039,0.108,0.037,1,0,0 0.58,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.0017,0.024,0.133,0.115,0.116,0,0,1 0.58,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0002,0.019,0.093,0.084,0.111,0,0,1 0.8,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.0002,0.018,0.111,0.097,0.114,0,0,1 0.73,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.001,0.014,0.048,0.104,0.046,0,0,1 0.56,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0002,0.0208,0.136,0.099,0.13709,0,0,1 0.71,0,0,0,0,0,0,0,0,0,1,0,0,1,0,0,0.0002,0.045,0.125,0.082,0.152,0,0,1 0.31,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0011,0.022,0.103,0.104,0.098,0,0,1 0.45,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0002,0.018,0.094,0.084,0.111,0,0,1 0.67,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0005,0.019,0.109,0.101,0.108,0,0,1 0.59,0,1,0,0,1,0,0,0,0,0,0,0,1,0,0,0.0002,0.043,0.14,0.168,0.083,0,0,1 0.73,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0007,0.0206,0.088,0.082,0.107,0,0,1 0.73,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.027,0.087,0.107,0.081,0,0,1 0.48,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0024,0.023,0.099,0.104,0.095,0,0,1 0.48,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0002,0.018,0.104,0.111,0.094,0,0,1 0.48,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0002,0.025,0.091,0.116,0.078,0,0,1 0.48,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0008,0.01,0.046,0.066,0.07,0,0,1 0.48,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0008,0.012,0.097,0.09,0.107,0,0,1 0.48,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0005,0.014,0.08,0.088,0.091,0,0,1 0.59,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0007,0.028,0.087,0.1,0.087,0,0,1 0.57,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.43,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0007,0.025,0.099,0.112,0.089,0,0,1 0.34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0002,0.025,0.104,0.12,0.086,0,0,1 0.38,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0005,0.0469,0.172,0.183,0.094,0,0,1 0.39,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0002,0.019,0.083,0.089,0.093,0,0,1 0.39,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0002,0.003,0.073,0.078,0.094,0,0,1 0.32,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0012,0.029,0.128,0.112,0.114,0,0,1 0.28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0005,0.025,0.084,0.104,0.079,0,0,1 0.75,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0006,0.015,0.063,0.068,0.092,0,0,1 0.75,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0015,0.022,0.07,0.085,0.082,0,0,1 0.19,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0002,0.023,0.091,0.093,0.098,0,0,1 0.73,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0005,0.007,0.11,0.093,0.118,0,0,1 0.6,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0012,0.008,0.044,0.084,0.052,0,0,1 0.6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0006,0.024,0.101,0.116,0.086,0,0,1 0.6,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.0002,0.0208,0.105,0.109,0.097,0,0,1 0.68,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0019,0.029,0.09,0.1,0.09,0,0,1 0.3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0006,0.032,0.123,0.129,0.096,0,0,1 0.39,0,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0.0002,0.022,0.106,0.075,0.141,0,0,1 0.23,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0006,0.0206,0.078,0.107,0.073,0,0,1 0.64,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0002,0.0206,0.098,0.072,0.136,0,0,1 0.21,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.00189,0.037,0.162,0.147,0.11,0,0,1 0.22,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.18,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.024,0.086,0.101,0.085,0,0,1 0.11,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0002,0.0206,0.191,0.123,0.156,0,0,1 0.36,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0022,0.0206,0.187,0.169,0.111,0,0,1 0.32,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0.0041,0.0206,0.117,0.151,0.078,0,0,1 0.32,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.0002,0.017,0.085,0.084,0.101,0,0,1 0.57,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.0002,0.02,0.091,0.116,0.077,0,0,1 0.57,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0014,0.023,0.07,0.099,0.071,0,0,1 0.45,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0019,0.023,0.092,0.104,0.088,0,0,1 0.38,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0006,0.028,0.146,0.11,0.133,0,0,1 0.56,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0002,0.0206,0.134,0.103,0.13,0,0,1 0.76,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0009,0.022,0.105,0.109,0.096,0,0,1 0.73,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0035,0.016,0.143,0.104,0.134,0,0,1 0.87,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.0002,0.034,0.164,0.082,0.2,0,0,1 0.29,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.014,0.015,0.095,0.09,0.106,0,0,1 0.56,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0009,0.024,0.099,0.121,0.082,0,0,1 0.61,0,0,0,0,0,0,0,1,0,1,0,0,0,0,0,0.0037,0.024,0.077,0.124,0.062,0,0,1 0.1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0029,0.04,0.09,0.111,0.081,0,0,1 0.36,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0.0009,0.028,0.126,0.129,0.098,0,0,1 0.11,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.35,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0002,0.0208,0.116,0.104,0.109,0,0,1 0.34,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0.0002,0.05,0.223,0.169,0.131,0,0,1 0.24,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0017,0.024,0.126,0.092,0.136,0,0,1 0.52,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0002,0.023,0.116,0.094,0.123,0,0,1 0.72,1,0,0,0,0,0,0,0,0,1,0,0,1,0,0,0.00189,0.028,0.12,0.116,0.102,0,0,1 0.31,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0.00189,0.036,0.176,0.143,0.123,0,0,1 0.23,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.086,0.098,0.088,0,0,1 0.49,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0006,0.013,0.077,0.065,0.118,0,0,1 0.52,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0.0005,0.016,0.08,0.082,0.098,0,0,1 0.79,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.001,0.014,0.092,0.088,0.105,0,0,1 0.61,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0002,0.023,0.112,0.101,0.111,0,0,1 0.74,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.001,0.0208,0.077,0.091,0.084,0,0,1 0.63,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.0008,0.019,0.098,0.089,0.111,0,0,1 0.68,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0011,0.015,0.092,0.108,0.085,0,0,1 0.65,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0016,0.014,0.078,0.082,0.096,0,0,1 0.68,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.043,0.003,0.014,0.103,0.01347,1,0,0 0.2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.022,0.117,0.099,0.11794,0,0,1 0.34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.082,0.002,0.0058,0.103,0.00558,1,0,0 0.46,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00025,0.02,0.112,0.101,0.111,0,0,1 0.37,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00025,0.025,0.111,0.097,0.114,0,0,1 0.25,1,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0.00025,0.034,0.101,0.099,0.10181,0,0,1 0.58,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00025,0.024,0.102,0.077,0.134,0,0,1 0.29,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00025,0.011,0.046,0.099,0.104,0,0,1 0.28,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00025,0.02,0.099,0.08,0.124,0,0,1 0.45,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00025,0.014,0.086,0.075,0.114,0,0,1 0.58,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0009,0.023,0.102,0.1,0.102,0,0,1 0.27,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0054,0.022,0.075,0.09,0.084,0,0,1 0.36,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0015,0.013,0.062,0.075,0.083,0,0,1 0.36,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00025,0.025,0.102,0.096,0.106,0,0,1 0.7,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.0006,0.026,0.144,0.12,0.12,0,0,1 0.59,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00025,0.018,0.102,0.082,0.124,0,0,1 0.49,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.081,0.114,0.071,0,0,1 0.46,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0097,0.005,0.037,0.072,0.051,1,0,0 0.64,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.0006,0.018,0.112,0.104,0.106,0,0,1 0.54,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0031,0.0206,0.104,0.09,0.116,0,0,1 0.54,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.28,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0.0037,0.028,0.1,0.099,0.1008,0,0,1 0.69,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00025,0.0206,0.135,0.11,0.123,0,0,1 0.73,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0208,0.11118,0.099,0.11207,0,0,1 0.76,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0008,0.011,0.094,0.093,0.101,0,0,1 0.64,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0018,0.014,0.117,0.097,0.121,0,0,1 0.17,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00025,0.0206,0.117,0.092,0.126,0,0,1 0.81,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0012,0.015,0.114,0.099,0.115,0,0,1 0.61,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0009,0.023,0.124,0.116,0.106,0,0,1 0.74,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00025,0.008,0.103,0.073,0.141,0,0,1 0.65,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00025,0.014,0.079,0.077,0.103,0,0,1 0.78,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0007,0.015,0.088,0.087,0.101,0,0,1 0.65,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00025,0.022,0.081,0.09,0.09,0,0,1 0.49,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0008,0.015,0.085,0.073,0.116,0,0,1 0.58,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0009,0.015,0.088,0.084,0.105,0,0,1 0.66,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0006,0.024,0.1,0.1,0.1,0,0,1 0.68,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0054,0.027,0.094,0.114,0.082,0,0,1 0.21,1,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0.00025,0.013,0.054,0.107,0.051,0,0,1 0.5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0011,0.022,0.081,0.094,0.086,0,0,1 0.47,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.64,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00025,0.0206,0.092,0.091,0.101,0,0,1 0.64,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0076,0.0419,0.164,0.155,0.106,0,0,1 0.76,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00208,0.013,0.093,0.101,0.091,0,0,1 0.61,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.0006,0.01,0.096,0.093,0.104,0,0,1 0.79,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0008,0.012,0.113,0.101,0.111,0,0,1 0.66,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0008,0.018,0.114,0.092,0.124,0,0,1 0.59,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0009,0.036,0.112,0.116,0.096,0,0,1 0.59,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.24,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0006,0.03,0.119,0.103,0.115,0,0,1 0.65,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0016,0.022,0.12,0.085,0.141,0,0,1 0.6,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0026,0.028,0.082,0.089,0.093,0,0,1 0.44,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0006,0.027,0.107,0.086,0.125,0,0,1 0.44,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0035,0.017,0.103,0.096,0.107,0,0,1 0.44,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0032,0.019,0.099,0.072,0.139,0,0,1 0.23,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.18,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00025,0.031,0.16,0.09,0.177,0,0,1 0.4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0036,0.025,0.095,0.1,0.095,0,0,1 0.82,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.59,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0006,0.045,0.161,0.165,0.097,0,0,1 0.59,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0044,0.024,0.08,0.098,0.081,0,0,1 0.59,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0006,0.02,0.106,0.087,0.121,0,0,1 0.26,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0011,0.02,0.072,0.069,0.104,0,0,1 0.26,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0029,0.024,0.09,0.092,0.098,0,0,1 0.73,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0017,0.018,0.198,0.1,0.198,0,0,1 0.62,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.024,0.017,0.072,0.107,0.068,0,1,0 0.23,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0009,0.029,0.136,0.138,0.099,0,0,1 0.74,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0004,0.027,0.101,0.096,0.105,0,0,1 0.19,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00025,0.019,0.165,0.095,0.174,0,0,1 0.52,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.035,0.0208,0.077,0.11,0.07,0,1,0 0.39,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00025,0.029,0.125,0.096,0.13,0,0,1 0.63,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00025,0.0206,0.161,0.107,0.151,0,0,1 0.77,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00025,0.017,0.113,0.094,0.12,0,0,1 0.6,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00015,0.076,0.12,0.104,0.116,0,0,1 0.76,1,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0066,0.01,0.091,0.104,0.088,0,0,1 0.76,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.002,0.037,0.142,0.113,0.126,0,0,1 0.52,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.00015,0.027,0.133,0.104,0.127,0,0,1 0.74,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00065,0.009,0.092,0.071,0.129,0,0,1 0.55,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0018,0.023,0.098,0.097,0.101,0,0,1 0.7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0015,0.003,0.079,0.08,0.098,0,0,1 0.53,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.18,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.38,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.67,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0019,0.0206,0.167,0.143,0.117,0,0,1 0.36,0,0,0,0,0,1,0,0,0,0,0,0,1,0,0,4e-05,0.0206,0.149,0.165,0.09,0,0,1 0.74,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0.0014,0.0206,0.109,0.096,0.114,0,0,1 0.7,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00012,0.0206,0.124,0.096,0.129,0,0,1 0.51,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.077,0.077,0.099,0,0,1 0.84,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,4e-05,0.0206,0.213,0.099,0.216,0,0,1 0.39,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0069,0.016,0.125,0.102,0.123,0,0,1 0.36,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0.00025,0.023,0.099,0.095,0.104,0,0,1 0.55,1,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0.00859,0.003,0.073,0.076,0.096,0,1,0 0.41,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.15,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.15,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4e-05,0.014,0.103,0.085,0.12,0,0,1 0.7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0035,0.025,0.123,0.115,0.107,0,0,1 0.83,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0016,0.012,0.103,0.079,0.131,0,0,1 0.54,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.016,0.06,0.065,0.092,0,0,1 0.72,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0011,0.022,0.124,0.108,0.115,0,0,1 0.69,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0015,0.018,0.136,0.092,0.149,0,0,1 0.41,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0011,0.0206,0.097,0.088,0.11,0,0,1 0.41,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.34,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,4e-05,0.029,0.144,0.158,0.091,0,0,1 0.33,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.16,0.003,0.0095,0.103,0.00914,1,0,0 0.33,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0018,0.02,0.139,0.102,0.137,0,0,1 0.33,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0022,0.026,0.086,0.088,0.098,0,0,1 0.72,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.015,0.022,0.098,0.129,0.076,0,1,0 0.8,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.006,0.016,0.099,0.095,0.104,0,0,1 0.8,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0026,0.022,0.097,0.104,0.094,0,0,1 0.64,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.031,0.136,0.107,0.127,0,0,1 0.7,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00016,0.019,0.175,0.099,0.17641,0,0,1 0.5,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00052,0.0208,0.094,0.095,0.099,0,0,1 0.7,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0.00208,0.017,0.131,0.1,0.13,0,0,1 0.7,0,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0.0013,0.018,0.094,0.086,0.11,0,0,1 0.7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0032,0.009,0.084,0.087,0.096,0,0,1 0.61,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00469,0.011,0.089,0.075,0.118,0,0,1 0.61,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00088,0.003,0.046,0.058,0.079,0,0,1 0.73,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0024,0.02,0.119,0.092,0.129,0,0,1 0.73,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0027,0.016,0.097,0.078,0.125,0,0,1 0.35,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00026,0.01,0.057,0.089,0.064,0,0,1 0.58,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0031,0.029,0.096,0.103,0.093,0,0,1 0.73,0,0,0,0,0,0,0,0,0,1,0,0,1,0,0,2e-05,0.015,0.077,0.094,0.082,0,0,1 0.66,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00039,0.023,0.102,0.099,0.10282,0,0,1 0.37,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0011,0.019,0.117,0.1,0.118,0,0,1 0.71,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00077,0.0208,0.123,0.091,0.135,0,0,1 0.32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0079,0.022,0.076,0.101,0.075,0,1,0 0.54,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.019,0.022,0.083,0.103,0.081,0,1,0 0.49,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.002,0.0208,0.079,0.087,0.091,0,0,1 0.78,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0029,0.017,0.104,0.076,0.137,0,0,1 0.19,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0031,0.028,0.131,0.095,0.138,0,0,1 0.19,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.003,0.0208,0.08,0.089,0.091,0,0,1 0.66,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,7e-05,0.0206,0.118,0.094,0.126,0,0,1 0.5,0,0,0,0,0,0,0,0,1,1,0,0,1,0,0,0.04,0.022,0.075,0.101,0.074,0,1,0 0.5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.38,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00023,0.034,0.183,0.098,0.186,0,0,1 0.38,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0028,0.024,0.096,0.098,0.099,0,0,1 0.27,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.141,0.133,0.106,0,0,1 0.75,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0016,0.016,0.102,0.094,0.109,0,0,1 0.42,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00012,0.0206,0.149,0.104,0.143,0,0,1 0.69,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0018,0.023,0.108,0.103,0.105,0,0,1 0.81,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00065,0.013,0.091,0.087,0.104,0,0,1 0.43,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0011,0.013,0.136,0.097,0.14,0,0,1 0.65,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2e-05,0.032,0.129,0.077,0.168,0,0,1 0.17,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0.028,0.02,0.086,0.099,0.087,0,0,1 0.19,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00084,0.02,0.098,0.088,0.112,0,0,1 0.5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0016,0.024,0.111,0.1,0.111,0,0,1 0.7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0014,0.017,0.097,0.079,0.122,0,0,1 0.62,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0023,0.023,0.092,0.095,0.097,0,0,1 0.71,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0019,0.015,0.135,0.104,0.127,0,0,1 0.55,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0061,0.023,0.088,0.091,0.097,0,1,0 0.55,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0092,0.025,0.106,0.113,0.094,0,1,0 0.72,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0015,0.019,0.141,0.109,0.129,0,0,1 0.51,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.17,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.41,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.57,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.3,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0.0013,0.02,0.08,0.072,0.11,0,0,1 0.16,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0022,0.0206,0.113,0.095,0.119,0,0,1 0.62,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0056,0.0206,0.103,0.112,0.093,0,0,1 0.32,0,1,0,0,0,1,0,0,0,1,0,0,0,0,0,2e-05,0.037,0.139,0.134,0.104,0,0,1 0.43,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.00095,0.019,0.101,0.081,0.125,0,0,1 0.78,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.00099,0.017,0.114,0.096,0.119,0,0,1 0.62,1,1,0,0,0,0,0,0,1,1,0,0,0,0,0,0.013,0.011,0.087,0.081,0.107,0,0,1 0.62,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0.0065,0.018,0.097,0.129,0.076,0,0,1 0.82,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.011,0.069,0.064,0.107,0,0,1 0.55,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0.0012,0.024,0.095,0.104,0.09,0,0,1 0.55,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0019,0.023,0.098,0.103,0.095,0,0,1 0.55,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0006,0.011,0.095,0.097,0.098,0,0,1 0.79,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0038,0.014,0.098,0.097,0.11,0,0,1 0.08,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.35,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.031,0.026,0.046,0.1,0.046,1,0,0 0.35,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0019,0.0206,0.073,0.077,0.095,0,0,1 0.35,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.00093,0.028,0.155,0.101,0.153,0,0,1 0.5,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.151,0.01,0.032,0.116,0.028,1,0,0 0.5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0028,0.016,0.105,0.08,0.131,0,0,1 0.78,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00058,0.022,0.092,0.096,0.096,0,0,1 0.78,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00077,0.019,0.113,0.103,0.11,0,0,1 0.58,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00419,0.0208,0.083,0.119,0.069,0,0,1 0.58,0,1,0,0,0,0,0,0,0,1,0,0,0,0,0,1e-05,0.033,0.129,0.123,0.105,0,0,1 0.61,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1e-05,0.0208,0.159,0.113,0.14,0,0,1 0.39,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0016,0.018,0.114,0.098,0.12,0,0,1 0.87,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0025,0.017,0.14,0.096,0.147,0,0,1 0.63,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0058,0.013,0.092,0.094,0.098,0,0,1 0.63,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.63,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.63,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00043,0.029,0.118,0.142,0.083,0,0,1 0.62,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0066,0.022,0.099,0.116,0.085,0,1,0 0.56,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0003,0.028,0.141,0.099,0.142,0,0,1 0.6,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.098,0.004,0.0058,0.08,0.007,1,0,0 0.37,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.098,0.083,0.119,0,0,1 0.19,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0057,0.019,0.104,0.11,0.094,0,0,1 0.43,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,5e-05,0.0206,0.16,0.103,0.156,0,0,1 0.76,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.00065,0.015,0.111,0.096,0.116,0,0,1 0.54,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.00208,0.025,0.128,0.112,0.114,0,0,1 0.74,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0063,0.017,0.146,0.122,0.119,0,1,0 0.8,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0013,0.028,0.225,0.168,0.134,0,0,1 0.29,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.00025,0.02,0.035,0.099,0.03528,0,0,1 0.49,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00012,0.023,0.181,0.111,0.164,0,0,1 0.66,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00042,0.019,0.153,0.104,0.147,0,0,1 0.71,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,9e-05,0.025,0.167,0.119,0.14,0,0,1 0.22,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.151,0.104,0.144,0,0,1 0.39,0,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0.002,0.023,0.102,0.116,0.088,0,0,1 0.62,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00419,0.022,0.104,0.111,0.093,0,0,1 0.69,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.002,0.017,0.109,0.093,0.117,0,0,1 0.72,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.161,0.09,0.18,0,0,1 0.67,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00025,0.02,0.101,0.081,0.124,0,0,1 0.19,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0037,0.03,0.068,0.085,0.08,0,0,1 0.63,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0045,0.015,0.093,0.087,0.107,0,0,1 0.69,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1e-05,0.024,0.127,0.084,0.152,0,0,1 0.24,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0013,0.018,0.095,0.082,0.116,0,0,1 0.36,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00058,0.0206,0.115,0.12,0.096,0,0,1 0.61,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2e-05,0.0206,0.113,0.098,0.116,0,0,1 0.6,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0058,0.018,0.098,0.087,0.112,0,0,1 0.21,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0022,0.015,0.078,0.089,0.088,0,0,1 0.4,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00065,0.017,0.101,0.093,0.109,0,0,1 0.46,1,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0.0017,0.0206,0.139,0.11,0.127,0,0,1 0.45,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.00094,0.02,0.102,0.086,0.119,0,0,1 0.41,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0031,0.025,0.149,0.109,0.137,0,0,1 0.38,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0.00071,0.043,0.129,0.162,0.079,0,0,1 0.15,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1e-05,0.06,0.25,0.08,0.312,0,0,1 0.61,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00073,0.017,0.16,0.111,0.149,0,0,1 0.73,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00079,0.015,0.11,0.088,0.125,0,0,1 0.46,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00419,0.022,0.068,0.099,0.06854,0,0,1 0.61,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0012,0.018,0.088,0.099,0.0887,0,0,1 0.71,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0013,0.012,0.098,0.099,0.09879,0,0,1 0.79,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0014,0.016,0.121,0.113,0.107,0,0,1 0.24,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0037,0.023,0.1,0.111,0.09,0,0,1 0.73,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0014,0.008,0.061,0.049,0.124,0,0,1 0.67,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.00052,0.014,0.129,0.096,0.134,0,0,1 0.67,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.051,0.004,0.012,0.126,0.01,1,0,0 0.44,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.045,0.014,0.039,0.116,0.033,1,0,0 0.57,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2e-05,0.034,0.136,0.073,0.185,0,0,1 0.31,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.112,0.097,0.116,0,0,1 0.53,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,2e-05,0.043,0.25,0.101,0.247,0,0,1 0.73,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1e-05,0.018,0.173,0.1,0.173,0,0,1 0.5,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0013,0.02,0.094,0.096,0.098,0,0,1 0.28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.139,0.009,0.036,0.09,0.04,1,0,0 0.25,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0013,0.015,0.096,0.08,0.121,0,0,1 0.45,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.00042,0.019,0.109,0.077,0.138,0,0,1 0.22,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0012,0.019,0.092,0.087,0.105,0,0,1 0.64,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0033,0.015,0.102,0.095,0.108,0,0,1 0.33,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0019,0.0208,0.069,0.074,0.093,0,0,1 0.71,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2e-05,0.01,0.12,0.056,0.213,0,0,1 0.67,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0017,0.018,0.143,0.087,0.164,0,0,1 0.27,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.00085,0.023,0.141,0.114,0.125,0,0,1 0.65,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0027,0.023,0.1,0.111,0.09,0,0,1 0.81,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0.0019,0.015,0.071,0.078,0.091,0,0,1 0.38,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0.0012,0.0206,0.115,0.097,0.114,0,0,1 0.73,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.0039,0.009,0.062,0.054,0.115,0,0,1 0.51,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00208,0.02,0.096,0.095,0.101,0,0,1 0.65,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0014,0.017,0.141,0.115,0.123,0,0,1 0.62,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0035,0.02,0.067,0.073,0.092,0,0,1 0.78,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00035,0.02,0.093,0.103,0.09,0,0,1 0.3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0025,0.022,0.099,0.104,0.096,0,0,1 0.67,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0.00042,0.014,0.14,0.098,0.143,0,0,1 0.35,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00097,0.0206,0.094,0.097,0.097,0,0,1 0.35,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.028,0.025,0.09159,0.1,0.0915,0,1,0 0.35,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.073,0.091,0.081,0,0,1 0.65,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0017,0.023,0.101,0.095,0.106,0,0,1 0.25,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.0014,0.016,0.078,0.088,0.089,0,0,1 0.37,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,5e-05,0.0206,0.181,0.121,0.15,0,0,1 0.67,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0017,0.019,0.103,0.099,0.104,0,0,1 0.46,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0017,0.023,0.136,0.107,0.127,0,0,1 0.79,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0011,0.017,0.111,0.102,0.108,0,0,1 0.73,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0046,0.008,0.091,0.086,0.106,0,0,1 0.55,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0016,0.016,0.114,0.093,0.123,0,0,1 0.44,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0007,0.019,0.108,0.104,0.103,0,0,1 0.24,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0013,0.017,0.106,0.1,0.106,0,0,1 0.37,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0011,0.024,0.106,0.097,0.109,0,0,1 0.48,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0.00077,0.011,0.081,0.086,0.095,0,0,1 0.34,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.0041,0.04,0.159,0.162,0.098,0,0,1 0.47,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0019,0.017,0.098,0.108,0.09,0,0,1 0.18,0,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0.00083,0.029,0.068,0.116,0.058,0,0,1 0.3,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0.00071,0.031,0.125,0.139,0.09,0,0,1 0.29,0,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0.0014,0.034,0.147,0.149,0.099,0,0,1 0.62,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.0033,0.02,0.116,0.104,0.11,0,0,1 0.73,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0041,0.01,0.089,0.091,0.098,0,0,1 0.79,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0023,0.004,0.073,0.06,0.122,0,0,1 0.73,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.0032,0.015,0.069,0.08,0.086,0,0,1 0.18,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0015,0.026,0.086,0.091,0.094,0,0,1 0.59,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0028,0.0206,0.134,0.104,0.128,0,0,1 0.53,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1e-05,0.0206,0.176,0.102,0.171,0,0,1 0.53,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,6e-05,0.019,0.135,0.102,0.132,0,0,1 0.51,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0016,0.0208,0.111,0.095,0.117,0,0,1 0.37,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.118,0.104,0.112,0,0,1 0.43,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0.0065,0.018,0.06,0.089,0.067,0,1,0 0.32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00064,0.017,0.102,0.076,0.134,0,0,1 0.46,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0015,0.016,0.125,0.104,0.118,0,0,1 0.59,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.002,0.024,0.094,0.096,0.098,0,0,1 0.63,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0.0018,0.02,0.152,0.116,0.129,0,0,1 0.33,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0012,0.022,0.118,0.089,0.132,0,0,1 0.33,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.019,0.11118,0.099,0.11207,0,0,1 0.33,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.001,0.002,0.087,0.054,0.161,0,0,1 0.46,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00208,0.032,0.16,0.153,0.105,0,0,1 0.52,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.001,0.019,0.088,0.08,0.11,0,0,1 0.52,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00097,0.023,0.125,0.104,0.117,0,0,1 0.52,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1e-05,0.027,0.122,0.083,0.147,0,0,1 0.53,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.183,0.014,0.014,0.131,0.011,1,0,0 0.53,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00063,0.016,0.115,0.09,0.128,0,0,1 0.53,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00051,0.024,0.124,0.096,0.129,0,0,1 0.71,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00469,0.017,0.093,0.098,0.094,0,0,1 0.5,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00054,0.018,0.16,0.11,0.146,0,0,1 0.61,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.061,0.078,0.081,0,0,1 0.4,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0016,0.019,0.142,0.091,0.156,0,0,1 0.59,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1e-05,0.022,0.164,0.101,0.162,0,0,1 0.38,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5e-05,0.048,0.032,0.099,0.03225,0,0,1 0.37,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00072,0.028,0.11,0.099,0.111,0,0,1 0.72,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00074,0.01,0.082,0.086,0.095,0,0,1 0.73,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1e-05,0.034,0.133,0.084,0.159,0,0,1 0.73,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.091,0.08,0.113,0,0,1 0.55,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1e-05,0.055,0.219,0.08,0.274,0,0,1 0.46,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,2e-05,0.019,0.116,0.09,0.129,0,0,1 0.16,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0033,0.0206,0.065,0.073,0.09,0,0,1 0.52,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0055,0.02,0.095,0.108,0.088,0,0,1 0.46,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0005,0.017,0.132,0.102,0.13,0,0,1 0.65,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0028,0.018,0.107,0.11,0.098,0,0,1 0.74,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0.0069,0.0174,0.097,0.087,0.112,0,1,0 0.74,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00033,0.023,0.099,0.098,0.101,0,0,1 0.74,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00073,0.023,0.122,0.08,0.152,0,0,1 0.74,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0026,0.0208,0.149,0.102,0.147,0,0,1 0.42,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0076,0.023,0.131,0.115,0.113,0,0,1 0.42,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0061,0.022,0.074,0.1,0.07392,0,1,0 0.67,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0028,0.018,0.143,0.091,0.157,0,0,1 0.67,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0006,0.0206,0.076,0.086,0.088,0,0,1 0.27,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0022,0.023,0.105,0.097,0.109,0,0,1 0.38,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.002,0.019,0.139,0.096,0.145,0,0,1 0.68,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.0037,0.006,0.065,0.088,0.074,0,0,1 0.61,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.34,0,0,0,1,0,1,0,0,0,1,0,0,0,0,0,1e-05,0.038,0.205,0.184,0.111,0,0,1 0.44,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0018,0.019,0.118,0.107,0.11,0,0,1 0.55,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00028,0.0208,0.116,0.114,0.102,0,0,1 0.55,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1e-05,0.022,0.172,0.11,0.157,0,0,1 0.47,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1e-05,0.0419,0.214,0.145,0.148,0,0,1 0.59,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0018,0.029,0.123,0.131,0.094,0,0,1 0.48,0,0,0,0,0,0,0,0,0,1,0,0,1,0,0,0.00042,0.025,0.093,0.114,0.082,0,0,1 0.68,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,2e-05,0.022,0.142,0.099,0.143,0,0,1 0.2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0026,0.033,0.121,0.108,0.112,0,0,1 0.2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1e-05,0.048,0.022,0.099,0.02217,0,0,1 0.59,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0041,0.031,0.11,0.104,0.104,0,0,1 0.77,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00037,0.017,0.14,0.077,0.182,0,0,1 0.68,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0024,0.016,0.083,0.089,0.093,0,0,1 0.86,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0017,0.016,0.124,0.101,0.122,0,0,1 0.66,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00019,0.014,0.097,0.087,0.111,0,0,1 0.73,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.008,0.013,0.101,0.093,0.108,0,1,0 0.76,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.002,0.014,0.107,0.077,0.139,0,0,1 0.63,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0024,0.011,0.124,0.085,0.145,0,0,1 0.39,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00093,0.026,0.13,0.113,0.115,0,0,1 0.66,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.00034,0.0206,0.095,0.099,0.095,0,0,1 0.77,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0041,0.0208,0.098,0.112,0.087,0,0,1 0.65,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1e-05,0.031,0.168,0.098,0.171,0,0,1 0.49,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1e-05,0.0206,0.126,0.083,0.152,0,0,1 0.29,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0016,0.022,0.087,0.095,0.092,0,0,1 0.65,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0023,0.013,0.157,0.091,0.172,0,0,1 0.23,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0014,0.02,0.156,0.116,0.134,0,0,1 0.78,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0029,0.019,0.09,0.097,0.093,0,0,1 0.24,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.00077,0.028,0.11,0.091,0.121,0,0,1 0.51,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0016,0.0206,0.16,0.109,0.147,0,0,1 0.26,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.0012,0.025,0.114,0.109,0.104,0,0,1 0.61,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.002,0.017,0.107,0.099,0.107,0,0,1 0.22,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0019,0.0208,0.076,0.079,0.096,0,0,1 0.72,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.0013,0.01,0.101,0.08,0.127,0,0,1 0.72,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00054,0.017,0.09,0.083,0.109,0,0,1 0.26,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.00025,0.018,0.12,0.085,0.14,0,0,1 0.74,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.002,0.022,0.109,0.091,0.119,0,0,1 0.49,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1e-05,0.0206,0.137,0.08,0.17,0,0,1 0.65,0,1,0,0,0,0,0,0,0,1,0,0,0,0,0,1e-05,0.019,0.088,0.081,0.109,0,0,1 0.65,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.01,0.02,0.078,0.1,0.07792,0,1,0 0.65,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00014,0.053,0.129,0.099,0.13003,0,0,1 0.56,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.005,0.017,0.06,0.077,0.078,0,0,1 0.21,0,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0.0024,0.035,0.171,0.149,0.115,0,0,1 0.81,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0039,0.018,0.131,0.098,0.133,0,0,1 0.28,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0017,0.0208,0.142,0.107,0.133,0,0,1 0.63,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0.00078,0.019,0.085,0.11,0.077,0,0,1 0.68,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00095,0.02,0.109,0.115,0.094,0,0,1 0.58,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00017,0.012,0.075,0.088,0.085,0,0,1 0.64,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00044,0.025,0.089,0.097,0.092,0,0,1 0.68,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.028,0.018,0.072,0.087,0.082,0,1,0 0.82,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.0013,0.014,0.131,0.108,0.121,0,0,1 0.7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0015,0.008,0.099,0.079,0.126,0,0,1 0.62,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00086,0.019,0.078,0.085,0.091,0,0,1 0.62,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.0022,0.028,0.122,0.113,0.109,0,0,1 0.72,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0097,0.011,0.077,0.079,0.097,0,1,0 0.67,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0011,0.023,0.114,0.11,0.104,0,0,1 0.24,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0026,0.013,0.067,0.088,0.076,0,0,1 0.24,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0032,0.028,0.091,0.099,0.09173,0,0,1 0.28,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0014,0.024,0.11,0.099,0.111,0,0,1 0.47,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0019,0.023,0.135,0.107,0.126,0,0,1 0.37,0,0,0,0,0,0,0,0,0,0,1,0,0,0,1,0.0044,0.026,0.108,0.112,0.096,0,0,1 0.74,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.0033,0.012,0.091,0.092,0.099,0,0,1 0.59,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0071,0.0208,0.079,0.096,0.083,0,1,0 0.69,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0016,0.019,0.09,0.099,0.091,0,0,1 0.41,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3e-05,0.0208,0.187,0.089,0.21,0,0,1 0.41,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1e-05,0.025,0.022,0.099,0.02217,0,0,1 0.31,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0012,0.027,0.099,0.088,0.113,0,0,1 0.37,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0.00064,0.0208,0.092,0.099,0.093,0,0,1 0.73,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0006,0.013,0.146,0.089,0.165,0,0,1 0.62,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0011,0.023,0.119,0.095,0.125,0,0,1 0.71,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00092,0.0208,0.084,0.086,0.098,0,0,1 0.27,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3e-05,0.022,0.088,0.09,0.098,0,0,1 0.37,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,9e-05,0.022,0.128,0.075,0.172,0,0,1 0.25,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0007,0.0206,0.183,0.141,0.13,0,0,1 0.62,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0024,0.02,0.103,0.095,0.109,0,0,1 0.58,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00049,0.0206,0.172,0.115,0.15,0,0,1 0.65,0,0,0,0,0,0,0,1,1,1,0,0,0,0,0,0.0013,0.0208,0.162,0.119,0.137,0,0,1 0.51,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00029,0.0206,0.098,0.103,0.095,0,0,1 0.71,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.00016,0.024,0.165,0.135,0.123,0,0,1 0.72,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0039,0.025,0.087,0.111,0.078,0,0,1 0.78,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.00064,0.017,0.113,0.099,0.115,0,0,1 0.37,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00208,0.023,0.093,0.082,0.113,0,0,1 0.37,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.65,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.002,0.0208,0.154,0.099,0.156,0,0,1 0.8,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4e-05,0.02,0.141,0.104,0.134,0,0,1 0.67,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,2e-05,0.0206,0.121,0.096,0.127,0,0,1 0.7,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0.00015,0.015,0.085,0.091,0.094,0,0,1 0.3,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0.0005,0.022,0.124,0.131,0.095,0,0,1 0.71,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0008,0.023,0.133,0.11,0.121,0,0,1 0.53,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0.078,0.0206,0.078,0.116,0.067,0,0,1 0.23,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0037,0.023,0.15,0.125,0.12,0,0,1 0.65,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0028,0.0208,0.084,0.087,0.096,0,0,1 0.68,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.018,0.142,0.104,0.135,0,0,1 0.35,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00208,0.022,0.106,0.091,0.117,0,0,1 0.72,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.00082,0.015,0.083,0.076,0.117,0,0,1 0.82,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.0032,0.014,0.076,0.086,0.088,0,0,1 0.76,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0075,0.0208,0.089,0.114,0.078,0,1,0 0.76,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.03,0.158,0.087,0.183,0,0,1 0.73,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00039,0.0206,0.104,0.097,0.107,0,0,1 0.68,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.055,0.006,0.015,0.089,0.017,1,0,0 0.68,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.00096,0.0206,0.104,0.1,0.104,0,0,1 0.59,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0015,0.018,0.147,0.111,0.132,0,0,1 0.33,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0062,0.008,0.051,0.088,0.058,1,0,0 0.61,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.0064,0.024,0.141,0.126,0.112,0,1,0 0.14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0036,0.012,0.103,0.088,0.118,0,0,1 0.59,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0011,0.02,0.095,0.085,0.111,0,0,1 0.71,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0019,0.029,0.111,0.092,0.121,0,0,1 0.52,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.002,0.0206,0.12,0.102,0.118,0,0,1 0.31,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0011,0.02,0.098,0.084,0.117,0,0,1 0.63,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.0014,0.014,0.09,0.091,0.1,0,0,1 0.71,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0014,0.018,0.094,0.076,0.124,0,0,1 0.69,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0023,0.014,0.088,0.086,0.102,0,0,1 0.33,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0013,0.022,0.12,0.096,0.126,0,0,1 0.58,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.002,0.017,0.144,0.094,0.153,0,0,1 0.59,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00082,0.014,0.091,0.097,0.094,0,0,1 0.73,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0034,0.016,0.1,0.088,0.113,0,0,1 0.65,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0034,0.019,0.093,0.097,0.096,0,0,1 0.5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0018,0.025,0.114,0.111,0.103,0,0,1 0.45,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00057,0.018,0.128,0.114,0.112,0,0,1 0.34,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0018,0.022,0.078,0.077,0.102,0,0,1 0.55,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0014,0.018,0.103,0.089,0.115,0,0,1 0.49,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0045,0.032,0.084,0.084,0.1,0,0,1 0.24,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00082,0.025,0.097,0.091,0.107,0,0,1 0.67,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0014,0.0208,0.132,0.113,0.116,0,0,1 0.58,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00082,0.022,0.1,0.098,0.102,0,0,1 0.6,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00052,0.025,0.105,0.101,0.104,0,0,1 0.28,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0.0024,0.0208,0.125,0.099,0.126,0,0,1 0.72,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.35,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0019,0.018,0.1,0.081,0.123,0,0,1 0.56,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0013,0.018,0.104,0.099,0.10483,0,0,1 0.56,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.003,0.011,0.122,0.113,0.108,0,0,1 0.29,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0017,0.023,0.135,0.097,0.14,0,0,1 0.27,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0014,0.023,0.132,0.094,0.14,0,0,1 0.39,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.00025,0.029,0.136,0.144,0.094,0,0,1 0.6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.151,0.014,0.042,0.104,0.039,1,0,0 0.6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0011,0.0206,0.11118,0.099,0.11207,0,0,1 0.6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.012,0.087,0.072,0.121,0,0,1 0.39,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0022,0.032,0.108,0.107,0.1,0,0,1 0.58,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.00058,0.015,0.083,0.086,0.097,0,0,1 0.54,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.00046,0.03,0.116,0.104,0.11,0,0,1 0.72,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.0054,0.018,0.087,0.091,0.095,0,0,1 0.2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0001,0.013,0.077,0.099,0.07762,0,0,1 0.32,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.00809,0.025,0.09,0.095,0.095,0,1,0 0.53,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0016,0.019,0.096,0.078,0.124,0,0,1 0.23,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0014,0.03,0.174,0.135,0.129,0,0,1 0.23,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1e-05,0.04,0.204,0.084,0.242,0,0,1 0.23,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00208,0.027,0.113,0.11,0.102,0,0,1 0.37,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.012,0.024,0.084,0.124,0.068,0,1,0 0.23,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2e-05,0.034,0.091,0.11,0.083,0,0,1 0.67,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0043,0.02,0.125,0.101,0.124,0,0,1 0.57,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00075,0.009,0.082,0.062,0.133,0,0,1 0.57,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0011,0.03,0.117,0.097,0.12,0,0,1 0.84,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.012,0.0174,0.101,0.108,0.094,0,1,0 0.42,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0023,0.022,0.178,0.126,0.142,0,0,1 0.58,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0006,0.02,0.151,0.104,0.144,0,0,1 0.24,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.00078,0.025,0.148,0.123,0.12,0,0,1 0.7,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0085,0.0206,0.127,0.104,0.121,0,0,1 0.83,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.00829,0.0208,0.104,0.104,0.097,0,1,0 0.02,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.472,0.017,0.034,0.116,0.029,1,0,0 0.45,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.005,0.007,0.054,0.085,0.063,0,0,1 0.55,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0029,0.015,0.088,0.094,0.094,0,0,1 0.26,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00208,0.022,0.095,0.1,0.095,0,0,1 0.75,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4e-05,0.008,0.093,0.058,0.16,0,0,1 0.63,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.00074,0.0206,0.074,0.077,0.097,0,0,1 0.5,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.0014,0.018,0.126,0.107,0.117,0,0,1 0.72,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5e-05,0.009,0.093,0.097,0.097,0,0,1 0.69,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00083,0.024,0.123,0.114,0.108,0,0,1 0.63,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0023,0.023,0.111,0.104,0.105,0,0,1 0.74,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0055,0.014,0.089,0.099,0.089,0,0,1 0.61,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00051,0.0206,0.09,0.081,0.111,0,0,1 0.27,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0018,0.023,0.119,0.099,0.12,0,0,1 0.27,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.015,0.016,0.082,0.082,0.1,0,1,0 0.71,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2e-05,0.022,0.165,0.089,0.186,0,0,1 0.71,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0011,0.024,0.122,0.112,0.109,0,0,1 0.6,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.00889,0.0174,0.075,0.097,0.077,0,1,0 0.53,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0039,0.0206,0.114,0.116,0.097,0,0,1 0.53,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0054,0.026,0.125,0.099,0.126,0,0,1 0.34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0012,0.016,0.103,0.073,0.142,0,0,1 0.34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0033,0.016,0.083,0.07,0.118,0,0,1 0.81,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0013,0.016,0.094,0.08,0.119,0,0,1 0.54,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0.0029,0.025,0.074,0.099,0.075,0,0,1 0.67,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.00084,0.033,0.222,0.182,0.122,0,0,1 0.37,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0014,0.017,0.071,0.072,0.099,0,0,1 0.29,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.003,0.022,0.086,0.109,0.079,0,0,1 0.61,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0039,0.027,0.084,0.103,0.081,0,0,1 0.6,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.0017,0.017,0.109,0.084,0.13,0,0,1 0.63,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0015,0.0208,0.129,0.108,0.12,0,0,1 0.85,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00058,0.019,0.14,0.096,0.145,0,0,1 0.81,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0031,0.018,0.105,0.092,0.114,0,0,1 0.77,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0062,0.012,0.089,0.074,0.121,0,1,0 0.22,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0031,0.027,0.136,0.127,0.107,0,0,1 0.56,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2e-05,0.071,0.223,0.1,0.222,0,0,1 0.47,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0023,0.016,0.075,0.093,0.08,0,0,1 0.67,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.0012,0.022,0.121,0.102,0.118,0,0,1 0.59,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0.004,0.013,0.12,0.094,0.128,0,0,1 0.72,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0053,0.01,0.097,0.065,0.15,0,0,1 0.51,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0033,0.019,0.092,0.093,0.098,0,0,1 0.31,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0004,0.025,0.165,0.09,0.184,0,0,1 0.41,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0025,0.018,0.095,0.09,0.106,0,0,1 0.51,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00469,0.0206,0.111,0.1,0.112,0,0,1 0.77,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0027,0.02,0.103,0.09,0.114,0,0,1 0.26,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0016,0.0208,0.14,0.104,0.132,0,0,1 0.55,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0019,0.023,0.09,0.081,0.112,0,0,1 0.45,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0011,0.02,0.063,0.066,0.096,0,0,1 0.45,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0048,0.009,0.087,0.078,0.112,0,0,1 0.45,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.009,0.028,0.237,0.116,0.203,0,0,1 0.35,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0017,0.023,0.082,0.088,0.093,0,0,1 0.34,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0.0078,0.02,0.095,0.099,0.096,0,1,0 0.62,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0044,0.026,0.098,0.093,0.105,0,0,1 0.62,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2e-05,0.02,0.113,0.103,0.11,0,0,1 0.74,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00028,0.013,0.138,0.083,0.166,0,0,1 0.57,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0016,0.02,0.087,0.061,0.143,0,0,1 0.41,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.53,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0024,0.026,0.151,0.116,0.13,0,0,1 0.57,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0019,0.017,0.076,0.081,0.093,0,0,1 0.54,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00469,0.0206,0.12,0.101,0.118,0,0,1 0.47,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.011,0.026,0.091,0.1,0.0909,0,1,0 0.54,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0012,0.019,0.089,0.085,0.104,0,0,1 0.75,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.01,0.016,0.09,0.089,0.101,0,1,0 0.75,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00077,0.023,0.083,0.089,0.094,0,0,1 0.35,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00097,0.024,0.081,0.075,0.109,0,0,1 0.42,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0012,0.018,0.07,0.086,0.081,0,0,1 0.23,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.0041,0.019,0.066,0.085,0.077,0,0,1 0.5,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0017,0.018,0.113,0.101,0.112,0,0,1 0.48,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0028,0.0206,0.112,0.092,0.121,0,0,1 0.7,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0.0023,0.027,0.095,0.107,0.088,0,0,1 0.72,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.025,0.231,0.092,0.251,0,0,1 0.62,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0.0208,0.13,0.099,0.131,0,0,1 0.73,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0056,0.019,0.115,0.098,0.118,0,0,1 0.72,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0015,0.013,0.086,0.088,0.099,0,0,1 0.47,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0011,0.023,0.113,0.11,0.103,0,0,1 0.47,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,2e-05,0.019,0.111,0.093,0.119,0,0,1 0.38,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0025,0.023,0.09,0.091,0.099,0,0,1 0.25,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0034,0.019,0.063,0.081,0.078,0,0,1 0.51,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,8e-05,0.028,0.109,0.09,0.121,0,0,1 0.51,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.058,0.0174,0.077,0.111,0.069,0,1,0 0.54,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0019,0.02,0.088,0.095,0.092,0,0,1 0.27,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0014,0.02,0.093,0.091,0.102,0,0,1 0.19,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0031,0.018,0.082,0.099,0.08266,0,0,1 0.19,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0.0018,0.018,0.092,0.099,0.09274,0,0,1 0.61,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0043,0.018,0.124,0.099,0.125,0,0,1 0.61,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0062,0.023,0.118,0.13,0.091,0,1,0 0.57,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00058,0.025,0.089,0.099,0.089,0,0,1 0.57,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.001,0.02,0.087,0.084,0.103,0,0,1 0.25,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0017,0.02,0.09,0.085,0.106,0,0,1 0.25,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2e-05,0.023,0.085,0.079,0.107,0,0,1 0.18,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0012,0.024,0.074,0.116,0.063,0,0,1 0.25,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0.0011,0.026,0.212,0.175,0.121,0,0,1 0.31,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0019,0.027,0.099,0.1,0.099,0,0,1 0.61,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.58,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0018,0.016,0.102,0.107,0.096,0,0,1 0.52,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0033,0.015,0.111,0.098,0.114,0,0,1 0.52,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0051,0.014,0.117,0.102,0.114,0,0,1 0.84,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0036,0.006,0.082,0.074,0.11,0,0,1 0.35,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.23,0.015,0.036,0.134,0.027,1,0,0 0.19,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.09,0.089,0.101,0,0,1 0.02,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,2e-05,0.0206,0.145,0.093,0.155,0,0,1 0.69,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0049,0.0206,0.073,0.093,0.079,0,0,1 0.84,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.00189,0.0206,0.088,0.087,0.1,0,0,1 0.69,1,1,0,0,0,0,0,1,0,1,0,0,0,0,0,0.00071,0.0206,0.091,0.094,0.098,0,0,1 0.59,1,1,0,1,0,0,0,1,0,1,0,0,0,0,0,8e-05,0.0206,0.125,0.078,0.161,0,0,1 0.19,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.009,0.0174,0.095,0.087,0.11,0,1,0 0.62,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0025,0.0206,0.126,0.124,0.101,0,0,1 0.26,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0018,0.025,0.147,0.113,0.129,0,0,1 0.73,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0018,0.0208,0.103,0.092,0.112,0,0,1 0.51,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0.002,0.0206,0.109,0.091,0.12,0,0,1 0.3,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0032,0.0206,0.097,0.069,0.141,0,0,1 0.36,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.022,0.002,0.004,0.101,0.004,1,0,0 0.59,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0.0097,0.0174,0.081,0.096,0.084,0,1,0 0.29,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.007,0.0206,0.098,0.085,0.114,0,0,1 0.33,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.66,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00031,0.02,0.123,0.115,0.107,0,0,1 0.27,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0015,0.022,0.182,0.137,0.133,0,0,1 0.7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00072,0.012,0.061,0.087,0.07,0,0,1 0.57,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0008,0.019,0.109,0.078,0.139,0,0,1 0.23,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0012,0.016,0.105,0.087,0.12,0,0,1 0.42,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2e-05,0.026,0.138,0.158,0.088,0,0,1 0.16,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,7e-05,0.015,0.14,0.089,0.157,0,0,1 0.12,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,2e-05,0.023,0.152,0.08,0.19,0,0,1 0.5,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0019,0.02,0.097,0.088,0.11,0,0,1 0.57,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,8e-05,0.0206,0.142,0.093,0.153,0,0,1 0.24,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0001,0.016,0.182,0.099,0.18346,0,0,1 0.19,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0014,0.019,0.111,0.108,0.102,0,0,1 0.52,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0023,0.025,0.114,0.107,0.106,0,0,1 0.38,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00057,0.013,0.156,0.139,0.112,0,0,1 0.38,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0043,0.017,0.076,0.099,0.07661,0,0,1 0.61,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00081,0.023,0.103,0.102,0.1,0,0,1 0.68,0,1,0,0,0,0,0,1,0,1,0,0,0,0,0,0.0026,0.02,0.152,0.141,0.108,0,0,1 0.68,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2e-05,0.025,0.113,0.076,0.149,0,0,1 0.68,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0037,0.019,0.067,0.095,0.072,0,0,1 0.54,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0054,0.015,0.102,0.096,0.106,0,0,1 0.54,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0029,0.0208,0.127,0.124,0.102,0,0,1 0.6,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0025,0.013,0.119,0.088,0.135,0,0,1 0.29,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0.024,0.0096,0.063,0.12,0.053,1,0,0 0.67,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0023,0.014,0.124,0.086,0.144,0,0,1 0.74,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0013,0.013,0.111,0.092,0.12,0,0,1 0.63,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.019,0.165,0.139,0.118,0,0,1 0.57,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0029,0.015,0.085,0.084,0.102,0,0,1 0.26,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0045,0.0208,0.113,0.102,0.111,0,0,1 0.48,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0028,0.027,0.102,0.094,0.109,0,0,1 0.48,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00015,0.02,0.124,0.096,0.13,0,0,1 0.48,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0031,0.025,0.09,0.103,0.088,0,0,1 0.84,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0034,0.024,0.11118,0.099,0.11207,0,0,1 0.19,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.022,0.11118,0.099,0.11207,0,0,1 0.19,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00086,0.022,0.11118,0.099,0.11207,0,0,1 0.59,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.015,0.11118,0.099,0.11207,0,0,1 0.69,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.015,0.11118,0.099,0.11207,0,0,1 0.64,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,8e-05,0.017,0.074,0.107,0.069,0,0,1 0.64,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0016,0.028,0.099,0.083,0.119,0,0,1 0.79,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0051,0.02,0.107,0.104,0.104,0,0,1 0.79,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.01209,0.034,0.094,0.092,0.102,0,0,1 0.79,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2e-05,0.016,0.143,0.09,0.159,0,0,1 0.42,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0028,0.02,0.068,0.087,0.078,0,0,1 0.2,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,2e-05,0.026,0.106,0.091,0.116,0,0,1 0.31,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,2e-05,0.0208,0.212,0.116,0.181,0,0,1 0.4,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0016,0.024,0.105,0.083,0.125,0,0,1 0.73,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00056,0.013,0.07,0.093,0.075,0,0,1 0.67,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00097,0.018,0.102,0.092,0.111,0,0,1 0.28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0019,0.0206,0.104,0.104,0.098,0,0,1 0.61,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00208,0.012,0.109,0.078,0.14,0,0,1 0.44,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0034,0.016,0.107,0.115,0.093,0,0,1 0.51,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0024,0.02,0.143,0.131,0.109,0,0,1 0.74,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0.0005,0.024,0.1,0.107,0.093,0,0,1 0.26,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0057,0.0208,0.073,0.074,0.098,0,0,1 0.54,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0016,0.02,0.084,0.099,0.08467,0,0,1 0.54,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0023,0.024,0.106,0.1,0.106,0,0,1 0.63,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0027,0.022,0.083,0.099,0.08366,0,0,1 0.18,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0012,0.019,0.083,0.108,0.077,0,0,1 0.23,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0025,0.034,0.165,0.152,0.108,0,0,1 0.62,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.014,0.11118,0.099,0.11207,0,0,1 0.59,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0011,0.0206,0.141,0.116,0.12,0,0,1 0.59,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00093,0.017,0.081,0.099,0.08165,0,0,1 0.68,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0028,0.019,0.123,0.116,0.106,0,0,1 0.84,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0046,0.015,0.083,0.078,0.106,0,0,1 0.53,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4e-05,0.025,0.157,0.107,0.147,0,0,1 0.72,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00085,0.0206,0.131,0.091,0.144,0,0,1 0.57,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5e-05,0.017,0.075,0.081,0.093,0,0,1 0.35,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.11,0.096,0.114,0,0,1 0.68,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0048,0.0208,0.107,0.099,0.108,0,0,1 0.55,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.06,0.007,0.014,0.111,0.013,1,0,0 0.28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.047,0.002,0.002,0.084,0.002,1,0,0 0.52,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.00093,0.013,0.086,0.064,0.134,0,0,1 0.42,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.013,0.019,0.061,0.082,0.074,0,1,0 0.78,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00073,0.02,0.092,0.095,0.097,0,0,1 0.39,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.001,0.019,0.083,0.09,0.092,0,0,1 0.5,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0002,0.018,0.142,0.099,0.144,0,0,1 0.52,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.52,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00013,0.0208,0.087,0.095,0.091,0,0,1 0.24,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.00083,0.022,0.079,0.08,0.099,0,0,1 0.71,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,2e-05,0.018,0.143,0.087,0.164,0,0,1 0.74,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0024,0.012,0.095,0.08,0.119,0,0,1 0.57,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0.0014,0.025,0.101,0.103,0.096,0,0,1 0.23,0,0,0,0,0,0,0,0,0,0,1,0,0,0,1,0.00208,0.031,0.116,0.12,0.096,0,0,1 0.28,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0001,0.02,0.169,0.099,0.17036,0,0,1 0.37,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0031,0.0208,0.087,0.075,0.115,0,0,1 0.6,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00035,0.016,0.098,0.075,0.131,0,0,1 0.7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0017,0.018,0.167,0.088,0.189,0,0,1 0.79,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00208,0.017,0.112,0.096,0.117,0,0,1 0.67,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0014,0.015,0.095,0.078,0.122,0,0,1 0.55,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0015,0.004,0.052,0.075,0.069,0,0,1 0.7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0028,0.024,0.111,0.08,0.139,0,0,1 0.62,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0022,0.02,0.117,0.08,0.145,0,0,1 0.4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0072,0.023,0.111,0.099,0.112,0,1,0 0.63,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0049,0.019,0.09,0.098,0.092,0,0,1 0.67,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0026,0.002,0.099,0.072,0.138,0,0,1 0.33,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0.0018,0.024,0.12,0.099,0.121,0,0,1 0.44,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.0019,0.029,0.164,0.138,0.119,0,0,1 0.2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0007,0.054,0.148,0.148,0.1,0,0,1 0.41,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.41,0,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.74,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0033,0.018,0.098,0.083,0.118,0,0,1 0.72,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.003,0.023,0.121,0.101,0.12,0,0,1 0.41,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0019,0.022,0.138,0.09,0.154,0,0,1 0.63,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.014,0.005,0.095,0.083,0.114,0,1,0 0.58,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0015,0.016,0.14,0.11,0.127,0,0,1 0.79,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0028,0.019,0.108,0.093,0.116,0,0,1 0.26,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0.0092,0.016,0.104,0.112,0.093,0,1,0 0.79,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0024,0.0206,0.11118,0.099,0.11207,0,0,1 0.34,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,6e-05,0.0469,0.157,0.168,0.094,0,0,1 0.27,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00094,0.029,0.092,0.093,0.1,0,0,1 0.27,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5e-05,0.085,0.244,0.098,0.249,0,0,1 0.19,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00208,0.029,0.135,0.081,0.167,0,0,1 0.27,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0.00189,0.04,0.187,0.188,0.1,0,0,1 0.76,0,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0.0025,0.0206,0.118,0.1,0.118,0,0,1 0.3,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0.0014,0.022,0.122,0.11,0.111,0,0,1 0.74,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0037,0.022,0.099,0.104,0.096,0,0,1 0.75,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.012,0.02,0.141,0.113,0.125,0,0,1 0.48,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.112,0.09,0.124,0,0,1 0.48,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.016,0.11118,0.099,0.11207,0,0,1 0.24,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.121,0.109,0.111,0,0,1 0.3,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0029,0.014,0.113,0.083,0.136,0,0,1 0.3,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.76,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0023,0.015,0.093,0.071,0.132,0,0,1 0.76,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.0014,0.024,0.118,0.091,0.13,0,0,1 0.36,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.02,0.096,0.086,0.112,0,0,1 0.61,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0017,0.025,0.121,0.107,0.114,0,0,1 0.07,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.015,0.0174,0.137,0.103,0.133,0,1,0 0.65,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0025,0.0206,0.137,0.104,0.129,0,0,1 0.73,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0019,0.022,0.134,0.116,0.114,0,0,1 0.13,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0019,0.0206,0.113,0.099,0.114,0,0,1 0.56,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0.0011,0.022,0.103,0.09,0.114,0,0,1 0.14,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.003,0.027,0.08,0.083,0.097,0,0,1 0.14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0029,0.008,0.121,0.095,0.127,0,0,1 0.15,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0013,0.0206,0.102,0.097,0.105,0,0,1 0.36,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0018,0.019,0.138,0.091,0.155,0,0,1 0.36,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0036,0.013,0.153,0.1,0.152,0,0,1 0.68,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.019,0.0174,0.093,0.12,0.077,0,1,0 0.29,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0028,0.018,0.099,0.099,0.09979,0,0,1 0.8,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00097,0.017,0.083,0.097,0.086,0,0,1 0.55,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.0025,0.019,0.109,0.104,0.104,0,0,1 0.36,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0022,0.017,0.115,0.098,0.118,0,0,1 0.42,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0068,0.012,0.044,0.087,0.051,1,0,0 0.42,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0.0072,0.013,0.093,0.09,0.104,0,0,1 0.42,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0016,0.016,0.139,0.104,0.134,0,0,1 0.44,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00041,0.008,0.105,0.082,0.127,0,0,1 0.44,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.00101,0.018,0.109,0.1,0.109,0,0,1 0.44,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00072,0.009,0.096,0.075,0.127,0,0,1 0.72,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3e-05,0.049,0.192,0.109,0.176,0,0,1 0.79,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0024,0.026,0.111,0.1,0.111,0,0,1 0.44,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0.003,0.014,0.103,0.088,0.118,0,0,1 0.37,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0014,0.02,0.099,0.102,0.097,0,0,1 0.42,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0051,0.015,0.053,0.076,0.07,0,0,1 0.67,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00045,0.0206,0.124,0.102,0.122,0,0,1 0.24,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00038,0.023,0.152,0.096,0.158,0,0,1 0.63,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.108,0.004,0.014,0.098,0.014,1,0,0 0.63,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0029,0.032,0.11118,0.099,0.11207,0,0,1 0.63,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.162,0.081,0.2,0,0,1 0.49,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0017,0.02,0.155,0.101,0.154,0,0,1 0.4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0027,0.02,0.128,0.102,0.126,0,0,1 0.33,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.12,0.098,0.122,0,0,1 0.7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0014,0.018,0.088,0.114,0.076,0,0,1 0.9,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0025,0.012,0.097,0.093,0.104,0,0,1 0.78,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.00089,0.018,0.103,0.098,0.105,0,0,1 0.46,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00098,0.0206,0.109,0.091,0.12,0,0,1 0.6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.116,0.104,0.109,0,0,1 0.06,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0014,0.02,0.134,0.096,0.14,0,0,1 0.07,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00094,0.02,0.093,0.084,0.11,0,0,1 0.34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0023,0.025,0.121,0.129,0.094,0,0,1 0.55,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0092,0.018,0.087,0.104,0.084,0,1,0 0.27,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0016,0.024,0.121,0.123,0.098,0,0,1 0.82,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.001,0.024,0.139,0.115,0.121,0,0,1 0.21,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0015,0.02,0.076,0.103,0.073,0,0,1 0.31,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.188,0.013,0.063,0.119,0.053,1,0,0 0.71,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.71,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0051,0.018,0.087,0.089,0.098,0,0,1 0.49,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4e-05,0.0206,0.138,0.083,0.166,0,0,1 0.4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0028,0.016,0.075,0.076,0.099,0,0,1 0.7,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0015,0.015,0.07,0.077,0.091,0,0,1 0.7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0008,0.02,0.109,0.085,0.129,0,0,1 0.36,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.006,0.017,0.121,0.112,0.108,0,0,1 0.36,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0011,0.023,0.101,0.094,0.107,0,0,1 0.16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0011,0.022,0.09,0.098,0.092,0,0,1 0.16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0023,0.024,0.11,0.092,0.119,0,0,1 0.82,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0005,0.0206,0.125,0.084,0.149,0,0,1 0.55,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0022,0.016,0.084,0.091,0.092,0,0,1 0.46,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0011,0.0206,0.07,0.058,0.122,0,0,1 0.25,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0018,0.02,0.09,0.092,0.098,0,0,1 0.65,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.54,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0144,0.011,0.051,0.068,0.075,0,1,0 0.33,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0022,0.017,0.094,0.1,0.094,0,0,1 0.65,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00469,0.016,0.072,0.085,0.085,0,0,1 0.72,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0019,0.014,0.109,0.095,0.116,0,0,1 0.6,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0004,0.02,0.113,0.103,0.11,0,0,1 0.52,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0009,0.0206,0.1,0.079,0.127,0,0,1 0.52,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0006,0.019,0.092,0.104,0.088,0,0,1 0.59,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.076,0.094,0.081,0,0,1 0.69,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0012,0.023,0.11,0.112,0.098,0,0,1 0.71,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.008,0.0174,0.066,0.066,0.1,0,1,0 0.07,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0073,0.016,0.064,0.097,0.066,0,1,0 0.59,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4e-05,0.025,0.161,0.083,0.195,0,0,1 0.6,1,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0305,0.01,0.032,0.063,0.051,1,0,0 0.29,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0007,0.0208,0.11,0.095,0.116,0,0,1 0.23,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,4e-05,0.038,0.131,0.104,0.125,0,0,1 0.2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0012,0.0208,0.106,0.092,0.115,0,0,1 0.66,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.001,0.0206,0.103,0.116,0.088,0,0,1 0.69,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0018,0.023,0.097,0.089,0.109,0,0,1 0.21,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0004,0.0208,0.082,0.076,0.108,0,0,1 0.62,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0013,0.025,0.091,0.102,0.09,0,0,1 0.61,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0011,0.014,0.11,0.099,0.11088,0,0,1 0.5,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0017,0.0206,0.09,0.092,0.098,0,0,1 0.53,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0009,0.0206,0.088,0.104,0.084,0,0,1 0.37,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0026,0.023,0.068,0.094,0.072,0,0,1 0.63,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0019,0.0206,0.118,0.102,0.115,0,0,1 0.6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00208,0.0206,0.127,0.101,0.126,0,0,1 0.59,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0012,0.0206,0.14,0.163,0.086,0,0,1 0.59,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.022,0.11118,0.099,0.11207,0,0,1 0.78,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0011,0.02,0.123,0.091,0.135,0,0,1 0.67,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.046,0.01,0.048,0.098,0.049,1,0,0 0.6,1,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0.0027,0.0206,0.088,0.107,0.082,0,0,1 0.6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.001,0.015,0.102,0.099,0.10282,0,0,1 0.73,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0031,0.015,0.12,0.094,0.127,0,0,1 0.73,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0.0022,0.025,0.11,0.128,0.085,0,0,1 0.61,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.0003,0.02,0.093,0.099,0.093,0,0,1 0.47,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0027,0.02,0.11,0.102,0.108,0,0,1 0.47,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0011,0.028,0.096,0.094,0.103,0,0,1 0.69,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,1e-05,0.0206,0.176,0.111,0.158,0,0,1 0.69,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.118,0.111,0.107,0,0,1 0.13,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0027,0.028,0.11,0.11,0.1,0,0,1 0.31,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,4e-05,0.027,0.163,0.094,0.172,0,0,1 0.25,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0.00208,0.0206,0.101,0.1,0.101,0,0,1 0.6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4e-05,0.0206,0.139,0.107,0.129,0,0,1 0.84,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.017,0.0096,0.035,0.083,0.042,1,0,0 0.28,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4e-05,0.026,0.143,0.107,0.134,0,0,1 0.29,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0016,0.0206,0.14,0.141,0.099,0,0,1 0.5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0006,0.02,0.083,0.131,0.063,0,0,1 0.87,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.001,0.012,0.103,0.087,0.118,0,0,1 0.87,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.003,0.016,0.071,0.104,0.067,0,0,1 0.76,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0027,0.014,0.091,0.085,0.108,0,0,1 0.48,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0035,0.035,0.166,0.135,0.123,0,0,1 0.29,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0005,0.025,0.08,0.088,0.091,0,0,1 0.72,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.0007,0.014,0.11,0.088,0.124,0,0,1 0.46,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0024,0.023,0.099,0.104,0.091,0,0,1 0.48,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0007,0.0206,0.114,0.098,0.117,0,0,1 0.46,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4e-05,0.015,0.075,0.09,0.083,0,0,1 0.77,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4e-05,0.012,0.067,0.096,0.069,0,0,1 0.77,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1e-05,0.1059,0.226,0.065,0.349,0,0,1 0.25,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.468,0.007,0.021,0.11,0.019,1,0,0 0.29,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0013,0.028,0.144,0.116,0.123,0,0,1 0.63,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0012,0.024,0.134,0.092,0.145,0,0,1 0.34,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0022,0.023,0.099,0.079,0.125,0,0,1 0.22,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.001,0.027,0.15,0.122,0.124,0,0,1 0.84,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0014,0.025,0.134,0.112,0.12,0,0,1 0.31,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0012,0.022,0.103,0.092,0.112,0,0,1 0.41,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0056,0.0206,0.093,0.084,0.11,0,0,1 0.6,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0001,0.0206,0.15,0.099,0.151,0,0,1 0.55,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0032,0.017,0.126,0.124,0.101,0,0,1 0.63,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,2e-05,0.016,0.124,0.089,0.139,0,0,1 0.35,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.096,0.076,0.127,0,0,1 0.46,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0022,0.0206,0.119,0.09,0.131,0,0,1 0.65,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0015,0.022,0.097,0.087,0.111,0,0,1 0.75,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0026,0.018,0.062,0.085,0.073,0,0,1 0.85,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0033,0.023,0.12,0.096,0.126,0,0,1 0.59,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0114,0.0174,0.099,0.097,0.102,0,1,0 0.84,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0035,0.0206,0.077,0.074,0.104,0,0,1 0.71,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0012,0.026,0.105,0.102,0.103,0,0,1 0.9,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0018,0.024,0.073,0.107,0.069,0,0,1 0.74,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0018,0.026,0.131,0.116,0.111,0,0,1 0.72,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1e-05,0.029,0.198,0.091,0.217,0,0,1 0.46,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0011,0.028,0.143,0.13,0.11,0,0,1 0.65,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0039,0.016,0.103,0.113,0.091,0,0,1 0.65,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.103,0.013,0.031,0.103,0.02983,1,0,0 0.71,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.001,0.017,0.079,0.095,0.083,0,0,1 0.52,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0018,0.0206,0.092,0.087,0.106,0,0,1 0.54,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.00051,0.0206,0.118,0.115,0.103,0,0,1 0.54,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00014,0.034,0.105,0.099,0.10584,0,0,1 0.64,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.01,0.008,0.099,0.097,0.102,0,1,0 0.04,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7e-05,0.032,0.246,0.104,0.232,0,0,1 0.32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0011,0.022,0.107,0.094,0.114,0,0,1 0.63,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00088,0.022,0.101,0.103,0.097,0,0,1 0.62,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.013,0.0096,0.075,0.135,0.055,1,0,0 0.56,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0032,0.017,0.087,0.087,0.1,0,0,1 0.72,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,8e-05,0.0206,0.133,0.084,0.158,0,0,1 0.72,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0.0017,0.0206,0.113,0.085,0.132,0,0,1 0.63,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00067,0.018,0.107,0.085,0.126,0,0,1 0.59,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,2e-05,0.027,0.117,0.074,0.157,0,0,1 0.75,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0.00083,0.019,0.116,0.099,0.117,0,0,1 0.75,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0023,0.016,0.111,0.077,0.145,0,0,1 0.3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.09,0.092,0.098,0,0,1 0.64,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0077,0.0206,0.111,0.112,0.099,0,0,1 0.75,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0.00031,0.022,0.085,0.119,0.071,0,0,1 0.47,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00068,0.018,0.104,0.089,0.117,0,0,1 0.38,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.001,0.02,0.131,0.095,0.138,0,0,1 0.61,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0022,0.017,0.079,0.078,0.101,0,0,1 0.61,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00044,0.0206,0.075,0.06,0.124,0,0,1 0.41,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0012,0.027,0.113,0.104,0.107,0,0,1 0.6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0024,0.0206,0.11118,0.099,0.11207,0,0,1 0.52,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3e-05,0.0206,0.163,0.095,0.172,0,0,1 0.52,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0092,0.022,0.072,0.108,0.066,0,1,0 0.46,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.00015,0.053,0.233,0.092,0.253,0,0,1 0.66,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0011,0.0206,0.117,0.108,0.108,0,0,1 0.42,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.001,0.024,0.128,0.111,0.115,0,0,1 0.67,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.052,0.012,0.065,0.092,0.071,0,1,0 0.16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0036,0.026,0.094,0.094,0.1,0,0,1 0.16,0,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0.0049,0.018,0.143,0.097,0.148,0,0,1 0.3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0053,0.0208,0.092,0.104,0.088,0,0,1 0.86,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.013,0.0174,0.087,0.092,0.094,0,1,0 0.39,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00055,0.0206,0.083,0.104,0.08,0,0,1 0.19,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0017,0.019,0.1,0.104,0.093,0,0,1 0.78,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00074,0.005,0.085,0.077,0.11,0,0,1 0.68,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.0067,0.014,0.101,0.1,0.10089,0,1,0 0.68,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0015,0.008,0.098,0.082,0.119,0,0,1 0.58,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0.0008,0.022,0.097,0.11,0.088,0,0,1 0.77,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7e-05,0.009,0.061,0.099,0.061,0,0,1 0.7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0044,0.013,0.107,0.112,0.095,0,0,1 0.68,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0.0026,0.018,0.104,0.11,0.095,0,0,1 0.69,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00031,0.0206,0.118,0.083,0.142,0,0,1 0.39,0,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0.0015,0.028,0.099,0.095,0.104,0,0,1 0.3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.016,0.106,0.14,0.076,0,0,1 0.42,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,4e-05,0.019,0.117,0.077,0.153,0,0,1 0.69,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0031,0.0206,0.11,0.102,0.108,0,0,1 0.61,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0012,0.0206,0.093,0.094,0.098,0,0,1 0.61,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00029,0.015,0.119,0.09,0.132,0,0,1 0.58,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0013,0.0206,0.088,0.081,0.109,0,0,1 0.56,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1e-05,0.041,0.191,0.1,0.191,0,0,1 0.47,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0044,0.016,0.1,0.101,0.099,0,0,1 0.26,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00208,0.0206,0.083,0.07,0.117,0,0,1 0.44,1,0,0,0,0,0,0,0,0,0,0,0,1,0,0,1e-05,0.0206,0.135,0.082,0.166,0,0,1 0.37,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0.025,0.0174,0.071,0.104,0.068,0,1,0 0.29,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.0051,0.0206,0.072,0.08,0.09,0,0,1 0.91,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0034,0.0206,0.112,0.099,0.113,0,0,1 0.73,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0027,0.011,0.062,0.07,0.089,0,0,1 0.73,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0014,0.0206,0.087,0.092,0.095,0,0,1 0.64,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0026,0.017,0.085,0.099,0.08568,0,0,1 0.45,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.002,0.017,0.093,0.084,0.111,0,0,1 0.47,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0015,0.014,0.09,0.097,0.092,0,0,1 0.65,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0032,0.014,0.084,0.096,0.088,0,0,1 0.65,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.00036,0.024,0.128,0.101,0.127,0,0,1 0.65,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0015,0.017,0.116,0.107,0.108,0,0,1 0.5,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0.003,0.0206,0.106,0.107,0.099,0,0,1 0.63,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0039,0.018,0.103,0.084,0.124,0,0,1 0.31,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0001,0.018,0.093,0.099,0.09375,0,0,1 0.27,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.022,0.11118,0.099,0.11207,0,0,1 0.08,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0011,0.022,0.077,0.085,0.091,0,0,1 0.42,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0029,0.0206,0.141,0.108,0.131,0,0,1 0.61,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0026,0.0206,0.113,0.082,0.138,0,0,1 0.61,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5e-05,0.024,0.128,0.113,0.114,0,0,1 0.29,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.71,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0023,0.0206,0.056,0.112,0.05,0,0,1 0.41,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.00039,0.019,0.12,0.103,0.117,0,0,1 0.71,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.00939,0.014,0.077,0.089,0.087,0,1,0 0.63,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0206,0.127,0.092,0.138,0,0,1 0.54,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.016,0.015,0.06,0.078,0.077,0,1,0 0.54,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0033,0.007,0.076,0.072,0.105,0,0,1 0.64,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0.0001,0.0206,0.113,0.091,0.125,0,0,1 0.64,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0.0015,0.023,0.116,0.108,0.107,0,0,1 0.64,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0032,0.015,0.106,0.091,0.117,0,0,1 0.27,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0029,0.0206,0.112,0.095,0.117,0,0,1 0.27,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0033,0.017,0.092,0.099,0.09274,0,0,1 0.27,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.00208,0.0206,0.07,0.048,0.146,0,0,1 0.62,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.005,0.0206,0.082,0.081,0.101,0,0,1 0.66,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0019,0.013,0.109,0.104,0.105,0,0,1 0.59,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0015,0.0206,0.092,0.093,0.099,0,0,1 0.65,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0043,0.0206,0.088,0.072,0.122,0,0,1 0.76,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0018,0.014,0.111,0.078,0.142,0,0,1 0.63,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0027,0.007,0.041,0.06,0.069,0,0,1 0.63,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00098,0.0206,0.11118,0.072,0.118,0,0,1 0.15,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0016,0.024,0.105,0.116,0.091,0,0,1 0.65,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.012,0.0174,0.099,0.114,0.087,0,1,0 0.75,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0019,0.014,0.107,0.092,0.116,0,0,1 0.73,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00208,0.0206,0.088,0.093,0.095,0,0,1 0.14,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0029,0.031,0.112,0.104,0.106,0,0,1 0.27,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0022,0.017,0.089,0.086,0.104,0,0,1 0.27,0,1,0,0,0,0,0,0,1,1,0,0,0,0,0,0.00189,0.019,0.11118,0.099,0.11207,0,0,1 0.31,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.001,0.022,0.124,0.099,0.125,0,0,1 0.31,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.0031,0.018,0.12,0.122,0.099,0,0,1 0.34,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0.0024,0.032,0.137,0.171,0.08,0,0,1 0.34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.178,0.1,0.178,0,0,1 0.42,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0.0011,0.017,0.079,0.088,0.09,0,0,1 0.21,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.17,0.101,0.167,0,0,1 0.39,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0022,0.019,0.098,0.088,0.11,0,0,1 0.7,0,1,0,0,0,0,0,0,0,1,0,0,0,0,0,2e-05,0.015,0.16,0.094,0.171,0,0,1 0.64,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0001,0.01,0.145,0.099,0.14616,0,0,1 0.83,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.0025,0.0206,0.118,0.097,0.121,0,0,1 0.49,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0011,0.0206,0.124,0.115,0.108,0,0,1 0.49,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0012,0.018,0.109,0.104,0.104,0,0,1 0.36,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00024,0.024,0.119,0.11,0.109,0,0,1 0.63,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0033,0.009,0.082,0.11,0.075,0,0,1 0.76,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0011,0.026,0.122,0.125,0.098,0,0,1 0.69,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0.0071,0.0206,0.093,0.11,0.085,0,0,1 0.69,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0011,0.019,0.135,0.096,0.141,0,0,1 0.7,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00043,0.0206,0.116,0.078,0.149,0,0,1 0.83,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0029,0.0206,0.085,0.065,0.131,0,0,1 0.31,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0034,0.0206,0.079,0.095,0.083,0,0,1 0.34,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00208,0.017,0.138,0.103,0.134,0,0,1 0.72,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1e-05,0.018,0.151,0.094,0.161,0,0,1 0.6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0065,0.0206,0.045,0.047,0.096,0,0,1 0.6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0011,0.0206,0.09,0.107,0.084,0,0,1 0.57,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00043,0.0206,0.096,0.088,0.109,0,0,1 0.57,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.022,0.11118,0.099,0.11207,0,0,1 0.28,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.00015,0.0206,0.133,0.099,0.135,0,0,1 0.18,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0011,0.0206,0.093,0.104,0.089,0,0,1 0.18,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.095,0.093,0.102,0,0,1 0.25,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0264,0.023,0.073,0.109,0.067,0,1,0 0.83,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00469,0.019,0.089,0.099,0.08971,0,0,1 0.83,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.007,0.0174,0.071,0.078,0.09,0,1,0 0.83,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0016,0.0206,0.12,0.129,0.092,0,0,1 0.83,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0025,0.015,0.091,0.081,0.113,0,0,1 0.83,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0014,0.0206,0.095,0.086,0.111,0,0,1 0.83,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.019,0.0174,0.148,0.111,0.134,0,1,0 0.47,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.0011,0.0206,0.094,0.097,0.097,0,0,1 0.6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0016,0.0208,0.112,0.098,0.114,0,0,1 0.58,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00064,0.01,0.103,0.099,0.10383,0,0,1 0.58,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.016,0.11118,0.099,0.11207,0,0,1 0.84,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0.0005,0.0208,0.138,0.092,0.15,0,0,1 0.37,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.00065,0.013,0.136,0.119,0.115,0,0,1 0.8,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0.0053,0.02,0.086,0.102,0.083,0,0,1 0.44,1,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0.021,0.0206,0.076,0.09,0.084,0,0,1 0.65,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0013,0.0208,0.111,0.099,0.112,0,0,1 0.55,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00208,0.022,0.084,0.085,0.099,0,0,1 0.28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0071,0.017,0.096,0.089,0.108,0,1,0 0.28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00047,0.022,0.161,0.104,0.153,0,0,1 0.53,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0017,0.019,0.11,0.073,0.15,0,0,1 0.44,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00026,0.0206,0.127,0.103,0.124,0,0,1 0.37,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00021,0.0206,0.156,0.084,0.187,0,0,1 0.41,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,2e-05,0.02,0.122,0.094,0.122,0,0,1 0.42,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0028,0.017,0.105,0.103,0.101,0,0,1 0.77,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0022,0.002,0.08,0.08,0.099,0,0,1 0.57,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0016,0.025,0.103,0.102,0.101,0,0,1 0.57,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00067,0.026,0.079,0.084,0.093,0,0,1 0.27,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0026,0.022,0.094,0.089,0.106,0,0,1 0.72,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0011,0.0206,0.131,0.05,0.265,0,0,1 0.81,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.017,0.071,0.092,0.077,0,0,1 0.39,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.00189,0.066,0.248,0.089,0.28,0,0,1 0.49,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.49,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2e-05,0.0206,0.131,0.083,0.158,0,0,1 0.4,1,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0.0011,0.028,0.107,0.109,0.099,0,0,1 0.28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0013,0.014,0.12,0.082,0.146,0,0,1 0.57,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00062,0.024,0.118,0.112,0.106,0,0,1 0.65,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.055,0.007,0.015,0.108,0.014,1,0,0 0.28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0012,0.0206,0.166,0.146,0.115,0,0,1 0.34,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.004,0.018,0.135,0.1,0.134,0,0,1 0.3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0019,0.0206,0.098,0.099,0.099,0,0,1 0.44,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0023,0.0206,0.088,0.104,0.083,0,0,1 0.73,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.0025,0.02,0.134,0.116,0.115,0,0,1 0.69,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00069,0.016,0.089,0.096,0.093,0,0,1 0.72,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0023,0.012,0.077,0.085,0.091,0,0,1 0.65,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.00032,0.027,0.096,0.14,0.069,0,0,1 0.36,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,5e-05,0.022,0.162,0.115,0.14,0,0,1 0.47,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.0017,0.017,0.091,0.096,0.094,0,0,1 0.48,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0018,0.027,0.124,0.12,0.104,0,0,1 0.27,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0023,0.019,0.094,0.104,0.089,0,0,1 0.55,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.0045,0.017,0.091,0.109,0.084,0,0,1 0.61,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0012,0.023,0.143,0.102,0.141,0,0,1 0.23,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.00066,0.023,0.114,0.088,0.129,0,0,1 0.73,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00208,0.019,0.118,0.09,0.131,0,0,1 0.68,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0012,0.0206,0.125,0.111,0.113,0,0,1 0.71,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0005,0.022,0.198,0.123,0.16,0,0,1 0.24,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,9e-05,0.0206,0.126,0.086,0.146,0,0,1 0.14,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0038,0.025,0.08,0.089,0.09,0,0,1 0.31,0,1,0,0,0,1,0,0,0,1,0,0,0,0,0,0.00044,0.015,0.075,0.092,0.081,0,0,1 0.43,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0066,0.014,0.137,0.112,0.123,0,0,1 0.22,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0026,0.011,0.102,0.081,0.125,0,0,1 0.32,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,5e-05,0.039,0.289,0.175,0.165,0,0,1 0.22,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0.0013,0.018,0.118,0.096,0.123,0,0,1 0.42,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,2e-05,0.026,0.203,0.101,0.2,0,0,1 0.27,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0026,0.0206,0.112,0.115,0.097,0,0,1 0.81,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0014,0.01,0.092,0.099,0.093,0,0,1 0.4,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0.012,0.0208,0.081,0.109,0.074,0,1,0 0.49,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00093,0.0206,0.074,0.087,0.085,0,0,1 0.71,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,3e-05,0.0206,0.175,0.104,0.168,0,0,1 0.39,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,6e-05,0.024,0.148,0.107,0.138,0,0,1 0.39,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.0012,0.02,0.102,0.093,0.11,0,0,1 0.64,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,7e-05,0.019,0.112,0.094,0.119,0,0,1 0.52,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.098,0.071,0.138,0,0,1 0.53,1,1,0,0,0,0,1,0,0,0,0,0,0,0,0,8e-05,0.0206,0.127,0.09,0.141,0,0,1 0.47,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,2e-05,0.012,0.072,0.082,0.088,0,0,1 0.13,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0208,0.082,0.088,0.093,0,0,1 0.81,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0018,0.018,0.148,0.119,0.125,0,0,1 0.74,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.0054,0.014,0.109,0.083,0.131,0,0,1 0.55,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.002,0.012,0.095,0.098,0.096,0,0,1 0.55,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0009,0.016,0.126,0.093,0.135,0,0,1 0.29,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00208,0.022,0.101,0.079,0.129,0,0,1 0.29,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.007,0.0174,0.112,0.108,0.103,0,1,0 0.22,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0027,0.024,0.157,0.12,0.131,0,0,1 0.28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.00036,0.03,0.131,0.104,0.126,0,0,1 0.64,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.00082,0.0206,0.125,0.101,0.124,0,0,1 0.52,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0045,0.0206,0.075,0.084,0.089,0,0,1 0.52,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0013,0.02,0.102,0.099,0.10282,0,0,1 0.49,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.087,0.12,0.073,0,0,1 0.63,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00075,0.014,0.112,0.086,0.13,0,0,1 0.7,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0036,0.01,0.06,0.088,0.068,0,0,1 0.65,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0022,0.019,0.101,0.099,0.102,0,0,1 0.72,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0041,0.016,0.094,0.092,0.102,0,0,1 0.42,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0002,0.019,0.151,0.098,0.154,0,0,1 0.74,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.022,0.012,0.071,0.111,0.064,1,0,0 0.51,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4e-05,0.037,0.169,0.114,0.148,0,0,1 0.24,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0004,0.0206,0.142,0.12,0.118,0,0,1 0.68,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0048,0.02,0.087,0.095,0.091,0,0,1 0.65,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3e-05,0.0206,0.113,0.085,0.133,0,0,1 0.25,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.236,0.002,0.016,0.094,0.017,1,0,0 0.45,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,8e-05,0.048,0.257,0.12,0.214,0,0,1 0.51,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00208,0.022,0.11118,0.099,0.095,0,0,1 0.53,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5e-05,0.043,0.184,0.119,0.155,0,0,1 0.63,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.033,0.015,0.128,0.1,0.12787,0,1,0 0.42,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.101,0.109,0.093,0,0,1 0.54,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0018,0.018,0.094,0.1,0.094,0,0,1 0.3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00076,0.034,0.129,0.153,0.084,0,0,1 0.4,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.063,0.088,0.072,0,0,1 0.4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00208,0.006,0.079,0.091,0.087,0,0,1 0.4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.034,0.017,0.078,0.09,0.087,0,1,0 0.57,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.099,0.099,0.1,0,0,1 0.6,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,2e-05,0.016,0.155,0.072,0.216,0,0,1 0.6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.022,0.11118,0.099,0.11207,0,0,1 0.75,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0067,0.0208,0.16,0.121,0.132,0,0,1 0.58,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00026,0.027,0.12,0.127,0.095,0,0,1 0.55,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.00099,0.013,0.093,0.086,0.108,0,0,1 0.12,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.0024,0.0206,0.166,0.12,0.138,0,0,1 0.7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0024,0.0206,0.101,0.09,0.113,0,0,1 0.68,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00047,0.0206,0.117,0.102,0.115,0,0,1 0.28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0061,0.022,0.136,0.124,0.11,0,1,0 0.6,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,3e-05,0.023,0.16,0.104,0.151,0,0,1 0.62,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00072,0.005,0.078,0.095,0.082,0,0,1 0.63,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,2e-05,0.0206,0.166,0.097,0.17,0,0,1 0.34,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0.027,0.044,0.273,0.122,0.222,0,0,1 0.77,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0016,0.015,0.117,0.086,0.136,0,0,1 0.74,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0015,0.0206,0.175,0.115,0.153,0,0,1 0.8,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0079,0.015,0.106,0.094,0.113,0,0,1 0.53,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,7e-05,0.0206,0.171,0.104,0.162,0,0,1 0.59,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0028,0.017,0.097,0.091,0.107,0,0,1 0.59,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.0017,0.012,0.101,0.09,0.112,0,0,1 0.65,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0004,0.01,0.154,0.103,0.149,0,0,1 0.23,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0016,0.014,0.076,0.083,0.091,0,0,1 0.79,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,2e-05,0.024,0.099,0.073,0.135,0,0,1 0.79,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0068,0.0174,0.144,0.129,0.111,0,1,0 0.79,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.143,0.007,0.073,0.101,0.072,0,1,0 0.25,0,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0.00189,0.022,0.113,0.108,0.105,0,0,1 0.45,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.00075,0.023,0.119,0.094,0.126,0,0,1 0.37,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0024,0.027,0.155,0.143,0.109,0,0,1 0.23,0,0,0,0,0,0,0,0,0,1,1,0,0,0,0,0.0079,0.022,0.104,0.107,0.097,0,1,0 0.23,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0013,0.018,0.122,0.11,0.111,0,0,1 0.7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0012,0.0206,0.131,0.095,0.137,0,0,1 0.47,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0.0022,0.02,0.108,0.108,0.1,0,0,1 0.44,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.002,0.013,0.136,0.094,0.145,0,0,1 0.31,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.003,0.0208,0.086,0.099,0.086,0,0,1 0.31,1,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0.0016,0.0206,0.111,0.1,0.111,0,0,1 0.75,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,2e-05,0.0206,0.15,0.088,0.171,0,0,1 0.72,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0208,0.098,0.09,0.109,0,0,1 0.63,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00063,0.023,0.139,0.111,0.125,0,0,1 0.27,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2e-05,0.053,0.272,0.097,0.281,0,0,1 0.77,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.0013,0.018,0.104,0.104,0.1,0,0,1 0.47,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0013,0.0206,0.098,0.114,0.086,0,0,1 0.46,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,2e-05,0.045,0.156,0.076,0.205,0,0,1 0.42,1,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0.00067,0.019,0.074,0.079,0.093,0,0,1 0.5,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.013,0.013,0.086,0.087,0.099,0,1,0 0.19,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0016,0.016,0.094,0.104,0.089,0,0,1 0.19,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.02,0.147,0.09,0.163,0,0,1 0.49,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.53,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00032,0.0206,0.145,0.096,0.152,0,0,1 0.28,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00419,0.015,0.083,0.099,0.08366,0,0,1 0.28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0034,0.019,0.086,0.099,0.08669,0,0,1 0.6,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00081,0.0206,0.192,0.144,0.134,0,0,1 0.19,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0037,0.011,0.126,0.091,0.138,0,0,1 0.32,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.029,0.138,0.115,0.12,0,0,1 0.53,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.0024,0.037,0.172,0.167,0.103,0,0,1 0.64,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0012,0.023,0.177,0.151,0.117,0,0,1 0.19,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0023,0.023,0.082,0.088,0.092,0,0,1 0.21,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0012,0.016,0.099,0.087,0.114,0,0,1 0.35,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0025,0.0206,0.139,0.113,0.123,0,0,1 0.69,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0208,0.113,0.088,0.128,0,0,1 0.44,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.011,0.008,0.086,0.066,0.13,0,1,0 0.76,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0036,0.005,0.057,0.099,0.05745,0,0,1 0.76,1,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0012,0.0208,0.081,0.094,0.086,0,0,1 0.18,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.478,0.0096,0.045,0.13,0.034,1,0,0 0.51,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2e-05,0.033,0.2,0.1,0.2,0,0,1 0.26,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0013,0.0206,0.098,0.104,0.095,0,0,1 0.36,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0023,0.0206,0.125,0.093,0.135,0,0,1 0.23,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0.0046,0.0206,0.157,0.171,0.092,0,0,1 0.61,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0036,0.0206,0.129,0.114,0.114,0,0,1 0.3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.005,0.024,0.099,0.107,0.093,0,0,1 0.55,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00929,0.019,0.085,0.107,0.08,0,1,0 0.42,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0025,0.0208,0.121,0.112,0.108,0,0,1 0.45,0,0,0,0,0,0,0,0,0,0,1,0,0,0,1,0.0016,0.018,0.089,0.08,0.112,0,0,1 0.89,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0025,0.018,0.086,0.104,0.083,0,0,1 0.54,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00052,0.0206,0.096,0.069,0.14,0,0,1 0.26,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0019,0.029,0.094,0.115,0.082,0,0,1 0.26,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0027,0.028,0.096,0.082,0.118,0,0,1 0.75,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0018,0.02,0.102,0.083,0.124,0,0,1 0.43,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.002,0.026,0.087,0.113,0.078,0,0,1 0.7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0036,0.019,0.114,0.099,0.11491,0,0,1 0.36,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0019,0.0206,0.059,0.058,0.099,0,0,1 0.35,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0027,0.017,0.101,0.099,0.10181,0,0,1 0.41,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0036,0.023,0.114,0.099,0.11491,0,0,1 0.54,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0033,0.0206,0.147,0.107,0.138,0,0,1 0.45,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00094,0.024,0.08,0.104,0.076,0,0,1 0.41,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.002,0.025,0.109,0.103,0.105,0,0,1 0.41,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0028,0.0206,0.149,0.1,0.148,0,0,1 0.53,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0.0206,0.103,0.108,0.095,0,0,1 0.52,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.099,0.092,0.107,0,0,1 0.74,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0014,0.0206,0.123,0.157,0.079,0,0,1 0.63,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0.042,0.016,0.06,0.093,0.064,1,0,0 0.88,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0029,0.0206,0.078,0.078,0.1,0,0,1 0.38,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00061,0.017,0.107,0.081,0.132,0,0,1 0.38,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0014,0.015,0.095,0.091,0.104,0,0,1 0.33,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0069,0.0174,0.085,0.089,0.095,0,1,0 0.68,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0014,0.018,0.12,0.109,0.11,0,0,1 0.23,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00078,0.02,0.102,0.097,0.106,0,0,1 0.6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00055,0.0206,0.18,0.104,0.172,0,0,1 0.6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0014,0.017,0.103,0.089,0.116,0,0,1 0.46,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.00098,0.025,0.12,0.104,0.113,0,0,1 0.79,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0014,0.016,0.11,0.085,0.129,0,0,1 0.74,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0029,0.016,0.09,0.079,0.114,0,0,1 0.74,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0048,0.025,0.066,0.089,0.075,0,0,1 0.66,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.002,0.0206,0.134,0.098,0.137,0,0,1 0.17,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0011,0.0206,0.154,0.134,0.115,0,0,1 0.17,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0016,0.011,0.052,0.036,0.145,0,0,1 0.14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0019,0.03,0.128,0.111,0.116,0,0,1 0.31,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0022,0.025,0.085,0.081,0.105,0,0,1 0.45,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0032,0.02,0.073,0.074,0.099,0,0,1 0.56,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00066,0.017,0.114,0.089,0.128,0,0,1 0.61,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.063,0.071,0.089,0,0,1 0.41,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.00082,0.014,0.129,0.088,0.147,0,0,1 0.68,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00092,0.0206,0.098,0.079,0.125,0,0,1 0.59,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.00189,0.0208,0.098,0.09,0.108,0,0,1 0.57,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0018,0.03,0.106,0.112,0.096,0,0,1 0.63,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0065,0.007,0.086,0.072,0.12,0,1,0 0.74,1,1,0,1,0,0,0,0,1,1,0,0,0,0,0,0.0052,0.0206,0.11118,0.099,0.11207,0,0,1 0.65,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,3e-05,0.019,0.139,0.084,0.165,0,0,1 0.61,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,3e-05,0.018,0.145,0.104,0.14,0,0,1 0.38,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0.00088,0.016,0.108,0.085,0.127,0,0,1 0.94,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0039,0.0206,0.157,0.102,0.154,0,0,1 0.5,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,3e-05,0.034,0.131,0.102,0.129,0,0,1 0.18,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,3e-05,0.0206,0.183,0.13,0.141,0,0,1 0.18,1,1,0,0,0,0,0,1,1,1,0,0,0,0,0,0.025,0.011,0.136,0.096,0.143,0,0,1 0.22,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.0023,0.036,0.148,0.142,0.104,0,0,1 0.53,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,3e-05,0.054,0.162,0.073,0.222,0,0,1 0.38,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.0022,0.0206,0.093,0.115,0.081,0,0,1 0.69,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0013,0.019,0.101,0.099,0.10181,0,0,1 0.29,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0023,0.019,0.1,0.093,0.107,0,0,1 0.35,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0004,0.0206,0.135,0.084,0.16,0,0,1 0.39,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0015,0.0206,0.093,0.075,0.125,0,0,1 0.43,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0039,0.015,0.058,0.058,0.099,0,0,1 0.73,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,3e-05,0.017,0.127,0.089,0.143,0,0,1 0.63,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3e-05,0.055,0.199,0.104,0.19,0,0,1 0.41,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0.0051,0.018,0.091,0.112,0.082,0,0,1 0.64,0,1,0,0,0,0,0,1,1,0,0,0,0,0,0,0.00073,0.02,0.152,0.115,0.131,0,0,1 0.67,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,3e-05,0.0206,0.152,0.099,0.153,0,0,1 0.29,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00033,0.022,0.142,0.093,0.153,0,0,1 0.35,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0.00014,0.019,0.073,0.116,0.063,0,0,1 0.66,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0023,0.0206,0.078,0.102,0.077,0,0,1 0.62,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0017,0.0206,0.097,0.086,0.112,0,0,1 0.6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0017,0.016,0.13,0.104,0.123,0,0,1 0.57,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0022,0.0206,0.1,0.1,0.1,0,0,1 0.36,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0.0013,0.0208,0.081,0.09,0.09,0,0,1 0.57,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00062,0.015,0.074,0.078,0.096,0,0,1 0.54,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0.00058,0.026,0.1,0.101,0.099,0,0,1 0.51,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0.0058,0.0206,0.261,0.232,0.113,0,0,1 0.72,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00034,0.014,0.1,0.08,0.124,0,0,1 0.83,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0023,0.009,0.056,0.067,0.084,0,0,1 0.26,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00017,0.009,0.122,0.099,0.12298,0,0,1 0.58,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0.003,0.0206,0.115,0.107,0.107,0,0,1 0.72,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.108,0.102,0.106,0,0,1 0.38,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.044,0.017,0.059,0.104,0.057,1,0,0 0.38,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00093,0.016,0.131,0.13,0.101,0,0,1 0.38,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0015,0.0208,0.109,0.102,0.107,0,0,1 0.37,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0016,0.022,0.104,0.099,0.105,0,0,1 0.2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0013,0.017,0.104,0.096,0.108,0,0,1 0.53,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0.00189,0.028,0.11118,0.099,0.11207,0,0,1 0.56,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00038,0.0206,0.105,0.09,0.116,0,0,1 0.68,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0012,0.0206,0.095,0.102,0.093,0,0,1 0.29,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00031,0.0206,0.146,0.104,0.141,0,0,1 0.48,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.024,0.13,0.124,0.105,0,0,1 0.58,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.00098,0.0206,0.099,0.108,0.092,0,0,1 0.69,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.017,0.116,0.099,0.11693,0,0,1 0.4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0016,0.014,0.096,0.099,0.09677,0,0,1 0.16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0031,0.011,0.096,0.087,0.111,0,0,1 0.6,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0052,0.0206,0.088,0.095,0.092,0,0,1 0.83,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0023,0.0206,0.1,0.082,0.122,0,0,1 0.38,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.003,0.017,0.084,0.099,0.085,0,0,1 0.38,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0017,0.0206,0.11118,0.099,0.11207,0,0,1 0.38,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.078,0.087,0.09,0,0,1 0.61,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00035,0.014,0.108,0.102,0.105,0,0,1 0.3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0018,0.0208,0.116,0.104,0.11,0,0,1 0.02,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.00419,0.0208,0.152,0.093,0.163,0,0,1 0.47,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0026,0.017,0.066,0.075,0.088,0,0,1 0.21,0,1,0,0,0,0,0,0,0,0,0,1,0,0,1,0.0013,0.019,0.112,0.104,0.106,0,0,1 0.21,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0031,0.031,0.164,0.081,0.201,0,0,1 0.21,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.09,0.098,0.092,0,0,1 0.21,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.018,0.163,0.104,0.155,0,0,1 0.35,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00046,0.0206,0.077,0.067,0.115,0,0,1 0.37,0,0,0,0,0,0,1,0,0,1,0,0,1,0,0,0.00057,0.026,0.159,0.139,0.114,0,0,1 0.66,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,4e-05,0.003,0.072,0.061,0.119,0,0,1 0.37,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.012,0.0174,0.068,0.062,0.111,0,1,0 0.6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0014,0.0206,0.1,0.083,0.121,0,0,1 0.19,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.058,0.0174,0.074,0.09,0.082,0,1,0 0.67,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,3e-05,0.0206,0.124,0.104,0.119,0,0,1 0.59,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,9e-05,0.013,0.08,0.102,0.079,0,0,1 0.3,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0011,0.031,0.119,0.108,0.109,0,0,1 0.21,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0015,0.02,0.076,0.103,0.073,0,0,1 0.21,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0051,0.0206,0.087,0.089,0.098,0,0,1 0.41,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0028,0.016,0.075,0.076,0.099,0,0,1 0.41,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.02,0.099,0.096,0.104,0,0,1 0.69,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.026,0.02,0.089,0.15,0.059,1,0,0 0.5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00017,0.017,0.12,0.098,0.122,0,0,1 0.75,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00012,0.0206,0.109,0.085,0.128,0,0,1 0.3,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0026,0.017,0.082,0.103,0.079,0,0,1 0.33,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,4e-05,0.0206,0.112,0.083,0.134,0,0,1 0.62,1,0,0,0,1,0,0,0,1,0,0,0,0,0,1,0.013,0.008,0.083,0.081,0.103,0,1,0 0.55,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,3e-05,0.044,0.195,0.082,0.237,0,0,1 0.62,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00076,0.0206,0.105,0.085,0.124,0,0,1 0.62,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0019,0.0206,0.068,0.064,0.106,0,0,1 0.62,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0013,0.015,0.096,0.088,0.109,0,0,1 0.86,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.002,0.0206,0.131,0.101,0.131,0,0,1 0.36,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.081,0.085,0.095,0,0,1 0.72,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.0058,0.014,0.107,0.107,0.1,0,0,1 0.72,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0014,0.0206,0.113,0.097,0.117,0,0,1 0.41,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.61,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.091,0.071,0.128,0,0,1 0.5,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00066,0.0206,0.127,0.097,0.131,0,0,1 0.63,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.07,0.0096,0.063,0.119,0.053,1,0,0 0.45,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.057,0.085,0.065,0,0,1 0.19,0,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0.00045,0.032,0.13,0.183,0.071,0,0,1 0.19,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.005,0.022,0.115,0.102,0.114,0,0,1 0.52,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00045,0.0208,0.107,0.09,0.119,0,0,1 0.39,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00208,0.0206,0.11118,0.099,0.11207,0,0,1 0.69,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0001,0.0208,0.188,0.097,0.194,0,0,1 0.36,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0019,0.0206,0.123,0.116,0.107,0,0,1 0.36,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00058,0.015,0.116,0.093,0.124,0,0,1 0.67,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0013,0.014,0.113,0.085,0.132,0,0,1 0.78,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0029,0.022,0.129,0.097,0.133,0,0,1 0.78,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,1e-05,0.0208,0.214,0.087,0.245,0,0,1 0.77,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0.0035,0.008,0.068,0.097,0.07,0,0,1 0.76,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00069,0.023,0.138,0.104,0.133,0,0,1 0.7,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.003,0.013,0.105,0.094,0.112,0,0,1 0.44,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0.00089,0.0206,0.04,0.096,0.042,0,0,1 0.44,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0019,0.022,0.103,0.12,0.086,0,0,1 0.47,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.00208,0.0206,0.117,0.109,0.107,0,0,1 0.37,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.055,0.009,0.045,0.104,0.043,1,0,0 0.35,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.55,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.00057,0.014,0.075,0.072,0.104,0,0,1 0.55,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1e-05,0.019,0.115,0.077,0.149,0,0,1 0.55,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0.00058,0.0206,0.118,0.103,0.114,0,0,1 0.55,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.00189,0.0206,0.089,0.096,0.093,0,0,1 0.55,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0019,0.0206,0.128,0.098,0.131,0,0,1 0.46,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1e-05,0.02,0.093,0.097,0.096,0,0,1 0.52,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.044,0.025,0.066,0.122,0.054,1,0,0 0.83,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00077,0.0144,0.123,0.099,0.124,0,0,1 0.7,1,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0.0023,0.012,0.107,0.101,0.105,0,0,1 0.39,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1e-05,0.015,0.084,0.099,0.085,0,0,1 0.29,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00084,0.015,0.1,0.074,0.135,0,0,1 0.3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00042,0.018,0.12,0.094,0.128,0,0,1 0.65,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.099,0.052,0.188,0,0,1 0.52,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0.0012,0.017,0.092,0.076,0.121,0,0,1 0.47,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,4e-05,0.02,0.104,0.087,0.12,0,0,1 0.29,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.35,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00044,0.018,0.094,0.086,0.11,0,0,1 0.6,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0037,0.019,0.115,0.107,0.107,0,0,1 0.33,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1e-05,0.05,0.244,0.067,0.362,0,0,1 0.72,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0029,0.012,0.174,0.099,0.1754,0,0,1 0.65,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.012,0.012,0.118,0.09,0.13,0,0,1 0.78,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0082,0.0206,0.11118,0.099,0.11207,0,0,1 0.16,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0045,0.0206,0.11118,0.099,0.11207,0,0,1 0.51,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0001,0.0206,0.13,0.086,0.151,0,0,1 0.43,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0019,0.018,0.114,0.092,0.125,0,0,1 0.33,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0019,0.019,0.083,0.103,0.081,0,0,1 0.54,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00038,0.011,0.128,0.08,0.16,0,0,1 0.66,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0038,0.019,0.119,0.1,0.119,0,0,1 0.515,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0011,0.02,0.118,0.113,0.104,0,0,1 0.38,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0018,0.025,0.101,0.104,0.097,0,0,1 0.38,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0058,0.02,0.102,0.116,0.089,0,0,1 0.37,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0016,0.023,0.097,0.104,0.093,0,0,1 0.65,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0026,0.022,0.073,0.099,0.074,0,0,1 0.26,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0024,0.019,0.17,0.123,0.138,0,0,1 0.61,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0.00041,0.012,0.116,0.087,0.134,0,0,1 0.51,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,6e-05,0.022,0.164,0.109,0.151,0,0,1 0.25,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.078,0.09,0.087,0,0,1 0.77,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.036,0.0174,0.082,0.102,0.081,0,1,0 0.57,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,6e-05,0.0419,0.235,0.11,0.214,0,0,1 0.66,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,6e-05,0.02,0.153,0.099,0.154,0,0,1 0.63,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0.0029,0.016,0.101,0.099,0.10181,0,0,1 0.27,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0043,0.025,0.105,0.101,0.103,0,0,1 0.58,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,6e-05,0.034,0.129,0.095,0.136,0,0,1 0.58,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00065,0.03,0.092,0.095,0.092,0,0,1 0.56,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0.0052,0.0206,0.137,0.128,0.107,0,0,1 0.39,1,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0.0036,0.0206,0.092,0.109,0.084,0,0,1 0.85,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.008,0.011,0.125,0.089,0.142,0,1,0 0.5,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.024,0.153,0.119,0.129,0,0,1 0.58,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,4e-05,0.026,0.117,0.102,0.114,0,0,1 0.71,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.022,0.093,0.087,0.107,0,0,1 0.7,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,7e-05,0.009,0.104,0.083,0.124,0,0,1 0.29,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.00015,0.055,0.161,0.161,0.1,0,0,1 0.61,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00025,0.025,0.101,0.1,0.101,0,0,1 0.37,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0051,0.025,0.084,0.104,0.08,0,0,1 0.7,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.00078,0.019,0.136,0.09,0.152,0,0,1 0.26,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0027,0.025,0.13,0.108,0.121,0,0,1 0.26,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0023,0.02,0.135,0.121,0.111,0,0,1 0.7,0,0,0,0,0,0,0,0,0,1,0,0,1,0,0,0,0.035,0.194,0.09,0.215,0,0,1 0.7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.002,0.025,0.102,0.092,0.111,0,0,1 0.41,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.002,0.028,0.11,0.114,0.097,0,0,1 0.58,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0037,0.017,0.064,0.069,0.093,0,0,1 0.48,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0024,0.027,0.11,0.109,0.101,0,0,1 0.32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0035,0.018,0.091,0.09,0.101,0,0,1 0.67,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0011,0.012,0.09,0.082,0.11,0,0,1 0.35,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.067,0.09,0.075,0,0,1 0.25,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00081,0.028,0.106,0.132,0.081,0,0,1 0.72,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0023,0.026,0.111,0.104,0.105,0,0,1 0.72,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0.026,0.082,0.089,0.093,0,0,1 0.6,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.183,0.011,0.045,0.097,0.046,1,0,0 0.65,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00061,0.018,0.096,0.103,0.094,0,0,1 0.75,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00016,0.0206,0.11118,0.099,0.11207,0,0,1 0.36,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0017,0.023,0.077,0.104,0.074,0,0,1 0.47,0,0,0,0,0,0,0,0,0,1,0,0,1,0,0,0,0.03,0.145,0.123,0.117,0,0,1 0.64,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.0024,0.02,0.101,0.094,0.107,0,0,1 0.78,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0023,0.0208,0.112,0.111,0.101,0,0,1 0.65,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0019,0.018,0.084,0.091,0.093,0,0,1 0.74,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00909,0.02,0.072,0.09,0.08,0,1,0 0.66,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00032,0.025,0.169,0.114,0.149,0,0,1 0.59,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00082,0.026,0.18,0.142,0.126,0,0,1 0.59,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.012,0.015,0.088,0.098,0.09,0,1,0 0.64,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.00889,0.012,0.067,0.075,0.089,0,1,0 0.37,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0015,0.03,0.114,0.093,0.122,0,0,1 0.37,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0.013,0.025,0.096,0.133,0.073,0,1,0 0.15,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.092,0.104,0.087,0,0,1 0.15,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0026,0.024,0.1,0.099,0.1008,0,0,1 0.56,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00087,0.019,0.116,0.099,0.11693,0,0,1 0.67,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0.0013,0.022,0.104,0.092,0.114,0,0,1 0.4,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00098,0.018,0.098,0.099,0.099,0,0,1 0.67,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00098,0.018,0.094,0.088,0.107,0,0,1 0.28,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0033,0.018,0.109,0.091,0.119,0,0,1 0.4,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.00047,0.0206,0.089,0.089,0.1,0,0,1 0.41,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00058,0.0206,0.102,0.084,0.121,0,0,1 0.28,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0011,0.02,0.105,0.09,0.116,0,0,1 0.43,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0022,0.0206,0.098,0.085,0.115,0,0,1 0.18,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0015,0.0206,0.116,0.104,0.111,0,0,1 0.73,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00095,0.025,0.119,0.104,0.114,0,0,1 0.66,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0032,0.016,0.126,0.102,0.123,0,0,1 0.43,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0014,0.022,0.075,0.085,0.088,0,0,1 0.48,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0016,0.016,0.14,0.101,0.138,0,0,1 0.37,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.64,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.22,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0012,0.025,0.106,0.099,0.10685,0,0,1 0.63,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0001,0.0206,0.08,0.084,0.095,0,0,1 0.65,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0017,0.024,0.073,0.089,0.082,0,0,1 0.63,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0035,0.025,0.108,0.096,0.113,0,0,1 0.93,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.0027,0.004,0.077,0.086,0.09,0,0,1 0.74,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0013,0.023,0.113,0.096,0.118,0,0,1 0.73,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00088,0.015,0.113,0.074,0.153,0,0,1 0.73,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0012,0.023,0.074,0.099,0.07458,0,0,1 0.26,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0.00043,0.041,0.123,0.148,0.083,0,0,1 0.61,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,8e-05,0.023,0.15,0.12,0.125,0,0,1 0.89,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0025,0.016,0.108,0.092,0.118,0,0,1 0.89,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0033,0.022,0.105,0.094,0.112,0,0,1 0.49,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00086,0.027,0.094,0.099,0.09475,0,0,1 0.83,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0026,0.013,0.1,0.086,0.117,0,0,1 0.55,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00081,0.003,0.061,0.054,0.113,0,0,1 0.55,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00208,0.016,0.086,0.08,0.107,0,0,1 0.29,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.22,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.69,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.65,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.65,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00089,0.026,0.095,0.101,0.095,0,0,1 0.48,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0012,0.025,0.081,0.091,0.089,0,0,1 0.29,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0016,0.027,0.087,0.085,0.102,0,0,1 0.59,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0011,0.038,0.263,0.17,0.155,0,0,1 0.31,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00091,0.0208,0.066,0.066,0.099,0,0,1 0.59,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.53,0.02,0.01,0.112,0.0085,1,0,0 0.7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0022,0.018,0.087,0.116,0.075,0,0,1 0.7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00035,0.003,0.044,0.038,0.118,0,0,1 0.88,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0023,0.022,0.101,0.1,0.101,0,0,1 0.62,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.0011,0.024,0.081,0.097,0.084,0,0,1 0.73,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.002,0.016,0.162,0.094,0.173,0,0,1 0.34,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0033,0.027,0.104,0.101,0.103,0,0,1 0.52,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1e-05,0.024,0.108,0.07,0.154,0,0,1 0.17,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0.145,0.012,0.036,0.15,0.024,0,0,1 0.34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0068,0.024,0.096,0.104,0.09,0,1,0 0.34,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1e-05,0.023,0.149,0.088,0.169,0,0,1 0.74,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.00189,0.045,0.171,0.116,0.148,0,0,1 0.58,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.018,0.0208,0.068,0.108,0.063,1,0,0 0.62,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00052,0.0208,0.1,0.099,0.1008,0,0,1 0.2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0013,0.026,0.087,0.099,0.0877,0,0,1 0.36,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0017,0.026,0.129,0.116,0.109,0,0,1 0.2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0015,0.024,0.137,0.135,0.101,0,0,1 0.7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0051,0.002,0.059,0.073,0.082,0,0,1 0.67,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.0017,0.007,0.079,0.077,0.102,0,0,1 0.27,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0018,0.025,0.097,0.1,0.085,0,0,1 0.88,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00083,0.015,0.089,0.094,0.095,0,0,1 0.43,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0022,0.023,0.087,0.093,0.094,0,0,1 0.67,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1e-05,0.0419,0.188,0.139,0.135,0,0,1 0.23,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0016,0.027,0.138,0.134,0.103,0,0,1 0.53,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,3e-05,0.0206,0.052,0.109,0.14,0,0,1 0.21,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0023,0.016,0.088,0.101,0.087,0,0,1 0.32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0009,0.023,0.129,0.129,0.1,0,0,1 0.3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0043,0.02,0.082,0.099,0.08266,0,0,1 0.35,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0012,0.027,0.137,0.119,0.115,0,0,1 0.35,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0023,0.023,0.083,0.119,0.069,0,0,1 0.35,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0025,0.012,0.124,0.112,0.111,0,0,1 0.39,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0017,0.02,0.078,0.084,0.093,0,0,1 0.69,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0059,0.018,0.163,0.109,0.149,0,0,1 0.69,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0.0013,0.017,0.116,0.095,0.122,0,0,1 0.6,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0073,0.016,0.109,0.104,0.105,0,0,1 0.31,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.31,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0013,0.015,0.101,0.089,0.113,0,0,1 0.71,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00058,0.0208,0.119,0.108,0.11,0,0,1 0.44,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0028,0.023,0.129,0.124,0.103,0,0,1 0.44,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0059,0.024,0.061,0.099,0.06149,0,0,1 0.55,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00091,0.018,0.093,0.104,0.089,0,0,1 0.51,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.002,0.018,0.068,0.079,0.086,0,0,1 0.18,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0.016,0.013,0.075,0.111,0.068,0,0,1 0.59,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.00065,0.0206,0.126,0.131,0.096,0,0,1 0.74,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0013,0.001,0.065,0.048,0.137,0,0,1 0.17,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0015,0.005,0.043,0.041,0.105,0,0,1 0.75,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0038,0.016,0.106,0.088,0.12,0,0,1 0.31,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0048,0.026,0.093,0.116,0.079,0,0,1 0.53,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.024,0.099,0.113,0.088,0,0,1 0.53,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.116,0.095,0.122,0,0,1 0.53,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.012,0.02,0.09159,0.1,0.0915,0,1,0 0.53,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.00062,0.017,0.116,0.1,0.116,0,0,1 0.82,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.00208,0.017,0.099,0.091,0.108,0,0,1 0.82,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.002,0.004,0.058,0.068,0.085,0,0,1 0.26,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0018,0.027,0.119,0.136,0.088,0,0,1 0.66,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.00189,0.028,0.11118,0.099,0.11207,0,0,1 0.8,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0025,0.022,0.113,0.116,0.097,0,0,1 0.73,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00084,0.013,0.066,0.116,0.056,0,0,1 0.81,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0025,0.018,0.076,0.102,0.075,0,0,1 0.78,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0005,0.019,0.081,0.083,0.098,0,0,1 0.4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0031,0.022,0.103,0.11,0.093,0,0,1 0.26,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.74,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0.028,0.0206,0.074,0.103,0.072,0,0,1 0.57,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1e-05,0.029,0.098,0.087,0.113,0,0,1 0.41,0,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.8,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0.027,0.101,0.084,0.121,0,0,1 0.48,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00074,0.029,0.1,0.104,0.095,0,0,1 0.62,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00859,0.011,0.084,0.074,0.114,0,1,0 0.71,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00046,0.014,0.085,0.112,0.077,0,0,1 0.58,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0017,0.023,0.124,0.125,0.099,0,0,1 0.78,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00077,0.018,0.109,0.092,0.118,0,0,1 0.78,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00208,0.0208,0.141,0.085,0.167,0,0,1 0.78,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0028,0.022,0.105,0.101,0.105,0,0,1 0.56,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0019,0.014,0.062,0.072,0.085,0,0,1 0.75,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00012,0.015,0.117,0.094,0.125,0,0,1 0.07,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0004,0.023,0.054,0.086,0.063,0,0,1 0.6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0028,0.024,0.112,0.099,0.1129,0,0,1 0.2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0024,0.031,0.135,0.099,0.13608,0,0,1 0.73,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.002,0.016,0.116,0.093,0.125,0,0,1 0.56,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.0014,0.01,0.099,0.07,0.142,0,0,1 0.73,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0085,0.0208,0.104,0.113,0.092,0,0,1 0.59,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0049,0.014,0.096,0.088,0.109,0,0,1 0.41,1,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0.022,0.064,0.094,0.069,0,0,1 0.51,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.028,0.132,0.095,0.139,0,0,1 0.45,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.015,0.0208,0.083,0.124,0.067,0,1,0 0.02,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0067,0.0096,0.03375,0.103,0.03249,1,0,0 0.61,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.025,0.114,0.093,0.122,0,0,1 0.61,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0028,0.017,0.096,0.083,0.116,0,0,1 0.56,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0011,0.017,0.082,0.1,0.081,0,0,1 0.63,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0011,0.019,0.11,0.102,0.108,0,0,1 0.83,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0028,0.011,0.066,0.071,0.093,0,0,1 0.83,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0.0022,0.007,0.071,0.076,0.093,0,0,1 0.65,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0013,0.015,0.11,0.08,0.137,0,0,1 0.46,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00469,0.019,0.096,0.112,0.085,0,0,1 0.2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0007,0.024,0.113,0.134,0.084,0,0,1 0.48,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0049,0.019,0.101,0.099,0.10181,0,0,1 0.48,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00208,0.016,0.096,0.079,0.122,0,0,1 0.36,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0.0206,0.108,0.11,0.099,0,0,1 0.48,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0.178,0.01,0.063,0.108,0.059,1,0,0 0.42,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,2e-05,0.04,0.2,0.156,0.128,0,0,1 0.34,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,2e-05,0.023,0.122,0.166,0.073,0,0,1 0.7,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.55,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.001,0.023,0.12,0.101,0.119,0,0,1 0.39,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0019,0.02,0.109,0.099,0.10987,0,0,1 0.75,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0014,0.0208,0.115,0.097,0.119,0,0,1 0.75,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0012,0.02,0.101,0.093,0.109,0,0,1 0.45,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00208,0.013,0.109,0.089,0.122,0,0,1 0.45,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2e-05,0.014,0.144,0.099,0.14516,0,0,1 0.53,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0016,0.0208,0.117,0.109,0.107,0,0,1 0.5,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.001,0.02,0.101,0.094,0.108,0,0,1 0.66,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0046,0.024,0.125,0.109,0.115,0,0,1 0.22,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0056,0.03,0.123,0.122,0.101,0,0,1 0.72,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0023,0.023,0.111,0.114,0.097,0,0,1 0.56,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.003,0.023,0.128,0.114,0.112,0,0,1 0.64,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,9e-05,0.01,0.101,0.082,0.123,0,0,1 0.2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.33,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.65,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.71,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2e-05,0.022,0.152,0.078,0.196,0,0,1 0.76,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0062,0.005,0.13,0.085,0.153,0,1,0 0.57,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0016,0.0208,0.164,0.114,0.145,0,0,1 0.56,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0028,0.017,0.113,0.099,0.11391,0,0,1 0.63,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0034,0.0208,0.157,0.116,0.135,0,0,1 0.53,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.165,0.004,0.017,0.119,0.014,1,0,0 0.4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0011,0.018,0.15,0.104,0.145,0,0,1 0.31,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.022,0.012,0.146,0.121,0.121,0,0,1 0.28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.054,0.0208,0.05,0.11,0.046,1,0,0 0.15,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.001,0.014,0.064,0.079,0.081,0,0,1 0.87,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0022,0.009,0.106,0.092,0.115,0,0,1 0.6,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00091,0.01,0.102,0.091,0.112,0,0,1 0.65,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00068,0.014,0.101,0.098,0.103,0,0,1 0.72,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1e-05,0.011,0.24,0.107,0.223,0,0,1 0.61,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00061,0.016,0.103,0.093,0.111,0,0,1 0.61,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0.0018,0.019,0.102,0.104,0.097,0,0,1 0.29,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0046,0.016,0.097,0.112,0.087,0,0,1 0.68,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0023,0.015,0.112,0.099,0.114,0,0,1 0.63,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0082,0.0208,0.08,0.101,0.079,0,1,0 0.7,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0011,0.014,0.106,0.092,0.115,0,0,1 0.3,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0.00189,0.018,0.089,0.104,0.085,0,0,1 0.53,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0019,0.026,0.118,0.11,0.107,0,0,1 0.28,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0.035,0.023,0.042,0.101,0.041,1,0,0 0.28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0014,0.024,0.102,0.108,0.095,0,0,1 0.64,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0.00085,0.011,0.099,0.111,0.09,0,0,1 0.71,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0013,0.014,0.116,0.096,0.121,0,0,1 0.72,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0004,0.013,0.091,0.102,0.089,0,0,1 0.25,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3e-05,0.033,0.116,0.099,0.11693,0,0,1 0.42,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0052,0.014,0.058,0.083,0.069,0,0,1 0.82,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00017,0.018,0.119,0.091,0.13,0,0,1 0.21,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.00083,0.022,0.132,0.099,0.133,0,0,1 0.54,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3e-05,0.014,0.152,0.104,0.145,0,0,1 0.71,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0.0016,0.018,0.136,0.116,0.117,0,0,1 0.76,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0024,0.0005,0.038,0.052,0.073,0,0,1 0.62,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,3e-05,0.026,0.128,0.103,0.124,0,0,1 0.56,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0012,0.016,0.119,0.104,0.113,0,0,1 0.76,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0056,0.01,0.143,0.083,0.173,0,0,1 0.73,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00035,0.016,0.12,0.111,0.108,0,0,1 0.73,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0017,0.036,0.115,0.123,0.093,0,0,1 0.64,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3e-05,0.01,0.103,0.085,0.122,0,0,1 0.34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3e-05,0.031,0.142,0.147,0.097,0,0,1 0.29,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0017,0.0206,0.141,0.125,0.113,0,0,1 0.39,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0023,0.025,0.13,0.101,0.129,0,0,1 0.39,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0019,0.024,0.095,0.121,0.078,0,0,1 0.59,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.0071,0.019,0.098,0.111,0.088,0,1,0 0.22,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0.0024,0.017,0.103,0.1,0.103,0,0,1 0.71,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0027,0.012,0.107,0.11,0.097,0,0,1 0.71,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00032,0.016,0.164,0.111,0.148,0,0,1 0.25,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0024,0.022,0.152,0.136,0.112,0,0,1 0.25,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0019,0.017,0.167,0.121,0.137,0,0,1 0.54,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,3e-05,0.019,0.1,0.111,0.09,0,0,1 0.64,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.002,0.013,0.124,0.09,0.138,0,0,1 0.56,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0016,0.019,0.097,0.099,0.099,0,0,1 0.69,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.0025,0.015,0.082,0.066,0.124,0,0,1 0.71,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0078,0.014,0.111,0.096,0.116,0,0,1 0.35,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0038,0.02,0.098,0.101,0.097,0,0,1 0.78,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.0041,0.019,0.116,0.104,0.11,0,0,1 0.61,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.55,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.29,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.21,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.77,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.2,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,2e-05,0.026,0.171,0.112,0.153,0,0,1 0.64,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.001,0.015,0.126,0.086,0.147,0,0,1 0.56,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5e-05,0.007,0.05,0.025,0.205,0,0,1 0.7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0062,0.018,0.124,0.111,0.112,0,1,0 0.61,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.25,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00879,0.022,0.109,0.097,0.113,0,1,0 0.3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0013,0.023,0.106,0.104,0.1,0,0,1 0.73,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00989,0.007,0.111,0.092,0.12,0,1,0 0.73,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0041,0.022,0.145,0.134,0.108,0,0,1 0.73,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.02,0.087,0.099,0.0877,0,0,1 0.29,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.014,0.076,0.057,0.132,0,0,1 0.29,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0044,0.017,0.094,0.097,0.097,0,0,1 0.76,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0013,0.018,0.101,0.116,0.086,0,0,1 0.76,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.00042,0.01,0.115,0.111,0.104,0,0,1 0.79,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0056,0.015,0.094,0.102,0.092,0,0,1 0.72,1,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0.00028,0.009,0.079,0.07,0.112,0,0,1 0.8,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00037,0.007,0.134,0.112,0.12,0,0,1 0.39,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.16,0.004,0.011,0.124,0.00889,1,0,0 0.36,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.025,0.003,0.01,0.116,0.00839,1,0,0 0.28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0009,0.045,0.204,0.203,0.1,0,0,1 0.23,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0013,0.0208,0.108,0.104,0.105,0,0,1 0.28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0008,0.038,0.193,0.193,0.1,0,0,1 0.58,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0054,0.016,0.105,0.099,0.10584,0,0,1 0.69,1,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0.0013,0.02,0.121,0.107,0.113,0,0,1 0.29,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0009,0.027,0.156,0.125,0.125,0,0,1 0.59,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0004,0.016,0.113,0.099,0.114,0,0,1 0.59,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0014,0.011,0.121,0.111,0.109,0,0,1 0.59,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0022,0.025,0.096,0.091,0.105,0,0,1 0.59,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0024,0.019,0.133,0.099,0.134,0,0,1 0.56,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00208,0.022,0.091,0.084,0.108,0,0,1 0.7,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0015,0.023,0.141,0.12,0.117,0,0,1 0.7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0011,0.026,0.162,0.121,0.134,0,0,1 0.51,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.72,1,0,0,0,0,0,0,0,0,1,0,0,1,0,0,0.0015,0.018,0.11118,0.093,0.112,0,0,1 0.58,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0028,0.0208,0.109,0.099,0.10987,0,0,1 0.67,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0017,0.0206,0.082,0.089,0.092,0,0,1 0.72,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0023,0.02,0.106,0.101,0.105,0,0,1 0.84,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0017,0.012,0.132,0.092,0.145,0,0,1 0.69,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00208,0.019,0.09,0.087,0.104,0,0,1 0.73,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0022,0.016,0.129,0.122,0.106,0,0,1 0.76,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0032,0.008,0.101,0.099,0.103,0,0,1 0.28,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.00208,0.026,0.121,0.094,0.13,0,0,1 0.47,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.003,0.014,0.159,0.095,0.166,0,0,1 0.01,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.043,0.0096,0.03375,0.103,0.03249,1,0,0 0.01,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0016,0.018,0.119,0.104,0.113,0,0,1 0.47,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.002,0.01,0.13,0.088,0.148,0,0,1 0.8,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0014,0.008,0.105,0.088,0.12,0,0,1 0.73,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0018,0.017,0.13,0.12,0.109,0,0,1 0.73,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0029,0.02,0.121,0.119,0.102,0,0,1 0.44,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0012,0.017,0.092,0.098,0.094,0,0,1 0.88,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0035,0.016,0.094,0.094,0.1,0,0,1 0.72,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0017,0.02,0.078,0.094,0.087,0,0,1 0.72,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0024,0.022,0.096,0.093,0.103,0,0,1 0.72,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0023,0.009,0.14,0.099,0.14112,0,0,1 0.69,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0011,0.016,0.099,0.092,0.108,0,0,1 0.46,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0019,0.02,0.106,0.101,0.104,0,0,1 0.46,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0014,0.015,0.097,0.094,0.104,0,0,1 0.53,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.034,0.201,0.108,0.186,0,0,1 0.53,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0015,0.028,0.13,0.101,0.128,0,0,1 0.68,1,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0.004,0.011,0.115,0.09,0.128,0,0,1 0.43,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0009,0.015,0.076,0.081,0.093,0,0,1 0.39,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0014,0.017,0.094,0.085,0.11,0,0,1 0.28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0059,0.02,0.126,0.126,0.1,0,0,1 0.31,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0016,0.023,0.089,0.122,0.072,0,0,1 0.34,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0.026,0.037,0.107,0.121,0.088,0,0,1 0.55,0,1,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0.016,0.115,0.084,0.138,0,0,1 0.54,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0.0007,0.02,0.095,0.099,0.096,0,0,1 0.48,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2e-05,0.022,0.127,0.104,0.121,0,0,1 0.43,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0033,0.025,0.07,0.093,0.075,0,0,1 0.46,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00088,0.007,0.082,0.099,0.08266,0,0,1 0.46,0,0,0,0,0,0,0,0,0,1,1,0,0,0,0,0.0017,0.011,0.094,0.091,0.103,0,0,1 0.49,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0022,0.023,0.092,0.099,0.09274,0,0,1 0.49,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0011,0.013,0.108,0.083,0.13,0,0,1 0.81,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0032,0.018,0.123,0.098,0.125,0,0,1 0.56,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0015,0.0206,0.103,0.107,0.097,0,0,1 0.56,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0024,0.017,0.086,0.091,0.095,0,0,1 0.23,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0026,0.018,0.107,0.104,0.101,0,0,1 0.23,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.79,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.002,0.012,0.088,0.085,0.104,0,0,1 0.32,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0031,0.0208,0.088,0.07,0.125,0,0,1 0.72,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2e-05,0.014,0.14,0.113,0.124,0,0,1 0.7,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0.0012,0.0206,0.109,0.108,0.1,0,0,1 0.59,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0052,0.0206,0.119,0.093,0.127,0,0,1 0.61,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0.023,0.11,0.101,0.109,0,0,1 0.46,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0.005,0.02,0.107,0.114,0.094,0,0,1 0.23,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3e-05,0.028,0.183,0.123,0.149,0,0,1 0.34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0017,0.019,0.104,0.113,0.092,0,0,1 0.34,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0027,0.031,0.097,0.11,0.086,0,0,1 0.64,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,8e-05,0.013,0.105,0.104,0.101,0,0,1 0.18,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.44,0.005,0.024,0.132,0.018,1,0,0 0.55,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0014,0.016,0.101,0.093,0.108,0,0,1 0.77,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0014,0.017,0.077,0.087,0.089,0,0,1 0.83,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00208,0.016,0.103,0.092,0.112,0,0,1 0.73,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00084,0.02,0.101,0.104,0.097,0,0,1 0.77,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00027,0.0206,0.11118,0.099,0.11207,0,0,1 0.61,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00063,0.018,0.12,0.091,0.132,0,0,1 0.63,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.002,0.016,0.102,0.082,0.128,0,0,1 0.77,0,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0.00189,0.018,0.111,0.096,0.115,0,0,1 0.77,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.002,0.013,0.097,0.09,0.108,0,0,1 0.36,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0013,0.037,0.184,0.18,0.102,0,0,1 0.47,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0023,0.02,0.109,0.1,0.109,0,0,1 0.72,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0016,0.022,0.087,0.116,0.074,0,0,1 0.59,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2e-05,0.018,0.162,0.099,0.164,0,0,1 0.69,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0015,0.018,0.142,0.113,0.125,0,0,1 0.31,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0022,0.023,0.132,0.116,0.114,0,0,1 0.49,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0013,0.016,0.126,0.087,0.145,0,0,1 0.59,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1e-05,0.015,0.14,0.088,0.16,0,0,1 0.42,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0019,0.02,0.114,0.133,0.086,0,0,1 0.25,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.78,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0023,0.0206,0.08,0.086,0.093,0,0,1 0.78,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0015,0.013,0.116,0.097,0.119,0,0,1 0.52,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0015,0.023,0.117,0.111,0.105,0,0,1 0.52,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0027,0.0208,0.116,0.099,0.11693,0,0,1 0.83,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00043,0.004,0.092,0.075,0.123,0,0,1 0.3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1e-05,0.02,0.071,0.089,0.08,0,0,1 0.78,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00071,0.005,0.105,0.082,0.129,0,0,1 0.78,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0011,0.018,0.098,0.114,0.085,0,0,1 0.78,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00057,0.019,0.09,0.088,0.102,0,0,1 0.24,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.59,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2e-05,0.016,0.14,0.095,0.148,0,0,1 0.39,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0044,0.011,0.063,0.08,0.078,0,0,1 0.79,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0011,0.007,0.116,0.085,0.137,0,0,1 0.63,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.003,0.009,0.137,0.095,0.144,0,0,1 0.62,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0012,0.014,0.113,0.103,0.11,0,0,1 0.41,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0037,0.017,0.112,0.104,0.107,0,0,1 0.81,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0019,0.003,0.102,0.096,0.106,0,0,1 0.4,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0018,0.02,0.088,0.099,0.0887,0,0,1 0.53,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00809,0.024,0.132,0.128,0.103,0,1,0 0.53,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0015,0.018,0.105,0.099,0.10584,0,0,1 0.63,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0026,0.0206,0.084,0.09,0.093,0,0,1 0.77,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0031,0.009,0.105,0.082,0.129,0,0,1 0.77,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.002,0.029,0.151,0.135,0.111,0,0,1 0.5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0023,0.02,0.127,0.113,0.113,0,0,1 0.23,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0024,0.013,0.072,0.09,0.08,0,0,1 0.23,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0029,0.023,0.192,0.147,0.13,0,0,1 0.37,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0011,0.02,0.111,0.091,0.123,0,0,1 0.49,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0019,0.018,0.081,0.091,0.088,0,0,1 0.28,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0032,0.013,0.096,0.09,0.107,0,0,1 0.54,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0024,0.022,0.116,0.103,0.112,0,0,1 0.23,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0026,0.02,0.104,0.104,0.098,0,0,1 0.23,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0019,0.016,0.088,0.089,0.1,0,0,1 0.52,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.002,0.018,0.093,0.099,0.095,0,0,1 0.64,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00092,0.0206,0.162,0.107,0.152,0,0,1 0.78,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0031,0.011,0.112,0.082,0.137,0,0,1 0.7,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.019,0.103,0.095,0.109,0,0,1 0.78,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0015,0.008,0.108,0.081,0.133,0,0,1 0.58,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.022,0.023,0.197,0.092,0.213,0,0,1 0.34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0031,0.018,0.094,0.099,0.09475,0,0,1 0.25,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0016,0.0208,0.089,0.086,0.103,0,0,1 0.66,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00071,0.017,0.102,0.112,0.092,0,0,1 0.71,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00052,0.013,0.138,0.107,0.129,0,0,1 0.7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.026,0.16,0.088,0.18,0,0,1 0.7,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0.017,0.071,0.108,0.066,0,0,1 0.33,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.33,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.66,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0013,0.003,0.064,0.066,0.096,0,0,1 0.71,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.16,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0018,0.02,0.103,0.103,0.1,0,0,1 0.45,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0063,0.02,0.119,0.104,0.114,0,1,0 0.08,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0034,0.0206,0.087,0.1,0.087,0,0,1 0.19,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0022,0.0206,0.085,0.095,0.09,0,0,1 0.33,0,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0.0039,0.022,0.12,0.141,0.085,0,0,1 0.35,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.00072,0.038,0.14,0.116,0.121,0,0,1 0.81,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0013,0.012,0.147,0.104,0.138,0,0,1 0.68,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.001,0.0208,0.124,0.108,0.114,0,0,1 0.23,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0013,0.022,0.103,0.135,0.076,0,0,1 0.47,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0011,0.0208,0.094,0.079,0.119,0,0,1 0.74,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0005,0.008,0.151,0.101,0.15,0,0,1 0.49,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00049,0.017,0.146,0.089,0.165,0,0,1 0.19,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.009,0.022,0.117,0.1,0.11688,0,1,0 0.34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0044,0.027,0.131,0.173,0.076,0,0,1 0.37,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0023,0.018,0.078,0.104,0.076,0,0,1 0.7,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0017,0.01,0.112,0.102,0.11,0,0,1 0.62,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0011,0.019,0.113,0.114,0.099,0,0,1 0.34,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0016,0.017,0.067,0.115,0.059,0,0,1 0.34,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.02,0.054,0.166,0.033,0,0,1 0.48,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0044,0.016,0.061,0.109,0.056,0,0,1 0.8,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.002,0.012,0.039,0.095,0.041,0,0,1 0.8,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0033,0.013,0.093,0.086,0.108,0,0,1 0.75,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0024,0.002,0.078,0.056,0.139,0,0,1 0.45,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0018,0.017,0.104,0.099,0.10483,0,0,1 0.82,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0022,0.01,0.068,0.077,0.088,0,0,1 0.47,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.0015,0.015,0.098,0.086,0.113,0,0,1 0.47,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0056,0.019,0.081,0.1,0.082,0,0,1 0.62,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.0034,0.013,0.092,0.089,0.104,0,0,1 0.8,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0.0022,0.017,0.083,0.111,0.076,0,0,1 0.26,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0019,0.015,0.087,0.094,0.093,0,0,1 0.66,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.015,0.076,0.085,0.089,0,0,1 0.24,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.002,0.016,0.096,0.099,0.09677,0,0,1 0.56,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0022,0.014,0.071,0.072,0.099,0,0,1 0.66,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0045,0.013,0.089,0.073,0.121,0,0,1 0.66,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0045,0.005,0.083,0.077,0.108,0,0,1 0.68,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5e-05,0.002,0.115,0.092,0.126,0,0,1 0.23,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0019,0.024,0.129,0.116,0.109,0,0,1 0.37,1,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0.011,0.012,0.074,0.077,0.096,0,0,1 0.79,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0024,0.013,0.092,0.092,0.1,0,0,1 0.54,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0.00094,0.013,0.078,0.112,0.07,0,0,1 0.75,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.12,0.097,0.124,0,0,1 0.75,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00097,0.006,0.08,0.072,0.11,0,0,1 0.51,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.012,0.015,0.106,0.092,0.115,0,1,0 0.63,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0015,0.012,0.117,0.096,0.121,0,0,1 0.71,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00056,0.006,0.049,0.054,0.09,0,0,1 0.41,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.013,0.095,0.079,0.12,0,0,1 0.41,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.015,0.1,0.101,0.099,0,0,1 0.27,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0.025,0.145,0.145,0.1,0,0,1 0.61,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.01,0.072,0.07,0.103,0,0,1 0.65,0,1,0,0,0,0,0,1,1,0,0,0,0,0,0,3e-05,0.019,0.151,0.089,0.169,0,0,1 0.36,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00088,0.016,0.114,0.102,0.112,0,0,1 0.38,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0015,0.033,0.122,0.146,0.083,0,0,1 0.7,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0019,0.018,0.114,0.102,0.112,0,0,1 0.62,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.00859,0.0174,0.086,0.084,0.101,0,1,0 0.64,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0029,0.0206,0.121,0.1,0.121,0,0,1 0.37,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00054,0.025,0.17,0.14,0.121,0,0,1 0.37,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.012,0.012,0.105,0.084,0.125,0,1,0 0.47,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0033,0.018,0.091,0.092,0.099,0,0,1 0.78,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3e-05,0.018,0.122,0.107,0.114,0,0,1 0.71,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0013,0.009,0.126,0.085,0.148,0,0,1 0.71,0,0,0,0,1,0,0,0,0,1,0,0,0,0,0,3e-05,0.038,0.171,0.113,0.151,0,0,1 0.66,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0.0017,0.014,0.089,0.087,0.103,0,0,1 0.46,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0022,0.017,0.081,0.099,0.08165,0,0,1 0.87,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00079,0.015,0.145,0.088,0.165,0,0,1 0.87,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.015,0.045,0.239,0.21,0.113,0,0,1 0.61,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0038,0.013,0.093,0.084,0.111,0,0,1 0.76,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0039,0.017,0.104,0.104,0.1,0,0,1 0.47,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.0012,0.0208,0.13,0.099,0.131,0,0,1 0.56,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0004,0.0206,0.11118,0.099,0.11207,0,0,1 0.35,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,3e-05,0.017,0.114,0.098,0.115,0,0,1 0.57,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.65,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.18,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.38,0,0,0,0,0,0,0,0,0,0,0,1,1,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.52,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0018,0.023,0.128,0.122,0.105,0,0,1 0.52,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0026,0.016,0.103,0.092,0.111,0,0,1 0.36,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0019,0.018,0.122,0.092,0.113,0,0,1 0.36,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00469,0.02,0.019,0.112,0.017,0,0,1 0.53,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0015,0.016,0.108,0.114,0.095,0,0,1 0.4,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.0092,0.015,0.113,0.103,0.109,0,1,0 0.71,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,3e-05,0.015,0.104,0.104,0.1,0,0,1 0.17,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.013,0.016,0.151,0.139,0.109,0,0,1 0.5,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,3e-05,0.0206,0.124,0.099,0.13,0,0,1 0.41,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0.0032,0.013,0.078,0.097,0.08,0,0,1 0.56,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.002,0.01,0.054,0.078,0.07,0,0,1 0.34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0018,0.014,0.085,0.093,0.092,0,0,1 0.34,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3e-05,0.016,0.096,0.097,0.099,0,0,1 0.77,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0013,0.013,0.102,0.095,0.108,0,0,1 0.59,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.0068,0.007,0.084,0.078,0.107,0,1,0 0.6,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.4,0.005,0.011,0.119,0.00909,1,0,0 0.62,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00054,0.0206,0.085,0.094,0.091,0,0,1 0.14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.1,0.109,0.091,0,0,1 0.4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.63,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.32,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.0028,0.013,0.086,0.089,0.096,0,0,1 0.28,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0.103,0.0208,0.065,0.116,0.056,1,0,0 0.3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0085,0.0208,0.091,0.116,0.079,0,1,0 0.2,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,3e-05,0.027,0.107,0.107,0.1,0,0,1 0.86,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0055,0.002,0.038,0.076,0.05,0,0,1 0.63,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0074,0.02,0.12,0.102,0.117,0,1,0 0.67,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0.00189,0.0206,0.101,0.101,0.1,0,0,1 0.69,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00075,0.016,0.1,0.093,0.107,0,0,1 0.69,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0012,0.023,0.104,0.099,0.10483,0,0,1 0.74,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.023,0.002,0.05,0.085,0.058,1,0,0 0.38,1,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.57,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.001,0.0208,0.102,0.097,0.105,0,0,1 0.32,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.0022,0.02,0.102,0.086,0.119,0,0,1 0.59,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0017,0.0208,0.084,0.1,0.083,0,0,1 0.88,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0012,0.016,0.107,0.109,0.099,0,0,1 0.63,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0018,0.023,0.146,0.134,0.108,0,0,1 0.26,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.019,0.092,0.1,0.092,0,0,1 0.26,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0029,0.017,0.129,0.11,0.116,0,0,1 0.48,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3e-05,0.018,0.083,0.119,0.069,0,0,1 0.63,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0015,0.019,0.097,0.103,0.094,0,0,1 0.58,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,3e-05,0.0206,0.137,0.091,0.151,0,0,1 0.58,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.58,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0023,0.023,0.104,0.108,0.096,0,0,1 0.43,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.002,0.018,0.121,0.094,0.129,0,0,1 0.22,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0017,0.023,0.127,0.1,0.127,0,0,1 0.75,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0049,0.019,0.122,0.095,0.129,0,0,1 0.82,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3e-05,0.014,0.074,0.052,0.143,0,0,1 0.6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00419,0.016,0.101,0.11,0.092,0,0,1 0.79,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3e-05,0.041,0.16,0.078,0.204,0,0,1 0.59,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,3e-05,0.02,0.164,0.092,0.178,0,0,1 0.25,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0018,0.0206,0.075,0.087,0.086,0,0,1 0.54,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0.00086,0.017,0.098,0.096,0.102,0,0,1 0.6,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00573,0.018,0.109,0.104,0.104,0,0,1 0.34,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,3e-05,0.025,0.119,0.155,0.076,0,0,1 0.23,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0027,0.02,0.09,0.093,0.097,0,0,1 0.25,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0013,0.018,0.118,0.099,0.119,0,0,1 0.56,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00067,0.011,0.137,0.086,0.16,0,0,1 0.42,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0013,0.017,0.084,0.063,0.133,0,0,1 0.31,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0017,0.022,0.116,0.114,0.102,0,0,1 0.62,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0035,0.017,0.098,0.074,0.132,0,0,1 0.33,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,8e-05,0.0206,0.126,0.1,0.126,0,0,1 0.61,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.006,0.015,0.097,0.088,0.11,0,0,1 0.75,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0035,0.022,0.118,0.101,0.117,0,0,1 0.61,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0013,0.017,0.091,0.098,0.093,0,0,1 0.68,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0009,0.019,0.108,0.104,0.102,0,0,1 0.68,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0024,0.013,0.08,0.088,0.091,0,0,1 0.54,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00208,0.02,0.092,0.1,0.092,0,0,1 0.41,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0012,0.016,0.091,0.068,0.133,0,0,1 0.3,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.67,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.38,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.44,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.44,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0035,0.026,0.113,0.109,0.104,0,0,1 0.35,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0017,0.022,0.114,0.093,0.123,0,0,1 0.48,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0.0015,0.025,0.101,0.097,0.104,0,0,1 0.39,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0032,0.031,0.099,0.101,0.098,0,0,1 0.6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0012,0.026,0.117,0.131,0.09,0,0,1 0.42,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0072,0.0208,0.123,0.116,0.104,0,1,0 0.6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.002,0.01,0.061,0.077,0.08,0,0,1 0.61,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00208,0.013,0.092,0.089,0.103,0,0,1 0.61,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2e-05,0.015,0.125,0.077,0.163,0,0,1 0.33,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0.0082,0.015,0.074,0.09,0.083,0,0,1 0.29,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0016,0.024,0.107,0.11,0.092,0,0,1 0.39,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.001,0.024,0.098,0.108,0.091,0,0,1 0.24,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0016,0.022,0.139,0.102,0.136,0,0,1 0.24,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0018,0.0208,0.091,0.093,0.098,0,0,1 0.32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.003,0.019,0.094,0.107,0.088,0,0,1 0.6,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0023,0.022,0.09,0.104,0.087,0,0,1 0.6,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0022,0.016,0.123,0.109,0.113,0,0,1 0.38,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0027,0.015,0.076,0.088,0.086,0,0,1 0.33,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.01029,0.02,0.068,0.115,0.059,1,0,0 0.77,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0029,0.026,0.118,0.119,0.099,0,0,1 0.33,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0.003,0.025,0.121,0.138,0.088,0,0,1 0.69,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2e-05,0.024,0.136,0.092,0.148,0,0,1 0.69,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.01109,0.013,0.088,0.104,0.083,0,1,0 0.55,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.17,0.11,0.155,0,0,1 0.54,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00093,0.018,0.115,0.094,0.123,0,0,1 0.77,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00989,0.0206,0.121,0.102,0.118,0,0,1 0.22,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0037,0.0208,0.083,0.082,0.101,0,0,1 0.31,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0078,0.02,0.159,0.151,0.105,0,0,1 0.77,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0049,0.016,0.085,0.087,0.098,0,0,1 0.64,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0046,0.015,0.133,0.108,0.124,0,0,1 0.51,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0024,0.025,0.106,0.081,0.13,0,0,1 0.77,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0025,0.017,0.101,0.085,0.119,0,0,1 0.53,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0052,0.02,0.088,0.104,0.085,0,0,1 0.43,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00092,0.023,0.093,0.086,0.108,0,0,1 0.83,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0023,0.016,0.105,0.083,0.127,0,0,1 0.46,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0032,0.012,0.122,0.094,0.131,0,0,1 0.58,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1e-05,0.026,0.166,0.08,0.207,0,0,1 0.75,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0019,0.014,0.079,0.079,0.101,0,0,1 0.25,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0018,0.0208,0.102,0.112,0.091,0,0,1 0.56,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.023,0.098,0.107,0.092,0,0,1 0.85,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0024,0.014,0.086,0.098,0.088,0,0,1 0.15,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0034,0.0206,0.112,0.13,0.086,0,0,1 0.15,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00051,0.013,0.121,0.078,0.155,0,0,1 0.26,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,4e-05,0.016,0.097,0.1,0.095,0,0,1 0.34,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0.0019,0.0208,0.118,0.12,0.098,0,0,1 0.33,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1e-05,0.0206,0.132,0.087,0.152,0,0,1 0.33,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1e-05,0.015,0.16,0.087,0.185,0,0,1 0.33,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0026,0.013,0.089,0.09,0.098,0,0,1 0.31,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.68,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.012,0.014,0.111,0.12,0.093,0,1,0 0.68,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.00097,0.013,0.085,0.087,0.098,0,0,1 0.69,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0019,0.015,0.126,0.096,0.131,0,0,1 0.79,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0027,0.009,0.086,0.085,0.102,0,0,1 0.83,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0024,0.013,0.101,0.086,0.117,0,0,1 0.83,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.0017,0.015,0.097,0.098,0.099,0,0,1 0.31,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0.00095,0.026,0.152,0.14,0.11,0,0,1 0.56,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0095,0.0206,0.115,0.131,0.088,0,0,1 0.41,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00076,0.0206,0.107,0.091,0.118,0,0,1 0.91,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00015,0.013,0.126,0.096,0.132,0,0,1 0.24,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.01,0.025,0.11,0.116,0.094,0,1,0 0.3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0013,0.017,0.093,0.096,0.098,0,0,1 0.38,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.00037,0.022,0.086,0.075,0.115,0,0,1 0.2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0015,0.025,0.094,0.104,0.09,0,0,1 0.2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0018,0.016,0.063,0.1,0.063,0,0,1 0.57,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00093,0.013,0.09,0.104,0.086,0,0,1 0.27,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0019,0.024,0.115,0.114,0.1,0,0,1 0.64,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.001,0.017,0.096,0.09,0.107,0,0,1 0.61,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0062,0.012,0.083,0.082,0.101,0,1,0 0.61,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0012,0.016,0.093,0.081,0.114,0,0,1 0.61,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.0011,0.022,0.083,0.083,0.101,0,0,1 0.53,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.0038,0.025,0.105,0.104,0.101,0,0,1 0.85,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0013,0.009,0.111,0.104,0.107,0,0,1 0.54,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0023,0.017,0.105,0.099,0.10584,0,0,1 0.4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0026,0.024,0.105,0.116,0.088,0,0,1 0.36,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,2e-05,0.029,0.194,0.163,0.119,0,0,1 0.63,1,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0.00809,0.0206,0.082,0.069,0.119,0,0,1 0.77,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0019,0.0206,0.11118,0.099,0.11207,0,0,1 0.77,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5e-05,0.024,0.096,0.098,0.098,0,0,1 0.25,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0016,0.054,0.152,0.15,0.102,0,0,1 0.25,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.036,0.045,0.162,0.115,0.142,0,0,1 0.25,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.002,0.023,0.092,0.08,0.114,0,0,1 0.39,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0026,0.03,0.125,0.113,0.111,0,0,1 0.15,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0023,0.034,0.115,0.096,0.12,0,0,1 0.15,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.015,0.135,0.099,0.13608,0,0,1 0.63,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0077,0.019,0.089,0.104,0.085,0,1,0 0.58,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.039,0.181,0.096,0.188,0,0,1 0.62,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0015,0.016,0.069,0.082,0.083,0,0,1 0.7,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.086,0.005,0.027,0.084,0.032,1,0,0 0.38,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0014,0.015,0.08,0.112,0.071,0,0,1 0.46,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0014,0.013,0.104,0.104,0.099,0,0,1 0.85,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.001,0.011,0.11,0.108,0.102,0,0,1 0.85,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00084,0.008,0.161,0.097,0.166,0,0,1 0.77,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0013,0.005,0.054,0.086,0.062,0,0,1 0.49,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0024,0.025,0.094,0.115,0.082,0,0,1 0.93,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.003,0.009,0.092,0.081,0.112,0,0,1 0.7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0055,0.017,0.131,0.149,0.088,0,0,1 0.59,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,6e-05,0.025,0.121,0.125,0.097,0,0,1 0.59,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0018,0.017,0.119,0.099,0.11995,0,0,1 0.46,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0048,0.029,0.132,0.127,0.104,0,0,1 0.65,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0013,0.02,0.105,0.128,0.083,0,0,1 0.5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.002,0.025,0.133,0.108,0.123,0,0,1 0.5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0028,0.02,0.116,0.126,0.092,0,0,1 0.19,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0027,0.027,0.121,0.116,0.104,0,0,1 0.21,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0018,0.026,0.12,0.12,0.1,0,0,1 0.36,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0024,0.031,0.133,0.153,0.087,0,0,1 0.36,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0013,0.016,0.112,0.113,0.1,0,0,1 0.5,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.003,0.022,0.091,0.111,0.082,0,0,1 0.32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0012,0.016,0.068,0.102,0.066,0,0,1 0.32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.002,0.017,0.092,0.101,0.091,0,0,1 0.23,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0072,0.035,0.126,0.138,0.092,0,1,0 0.4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0024,0.0208,0.112,0.125,0.09,0,0,1 0.4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.001,0.011,0.111,0.102,0.109,0,0,1 0.55,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0017,0.019,0.132,0.115,0.114,0,0,1 0.61,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2e-05,0.0208,0.128,0.094,0.136,0,0,1 0.52,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.001,0.018,0.102,0.097,0.106,0,0,1 0.44,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.003,0.02,0.115,0.11,0.104,0,0,1 0.53,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0007,0.0206,0.046,0.087,0.052,0,0,1 0.79,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0035,0.016,0.091,0.099,0.091,0,0,1 0.54,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.004,0.02,0.11,0.099,0.111,0,0,1 0.37,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0017,0.025,0.139,0.112,0.124,0,0,1 0.4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.134,0.12,0.112,0,0,1 0.35,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0015,0.024,0.111,0.152,0.073,0,0,1 0.64,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0.0013,0.026,0.118,0.111,0.106,0,0,1 0.53,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0.00085,0.02,0.103,0.093,0.111,0,0,1 0.84,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.003,0.014,0.103,0.11,0.094,0,0,1 0.49,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0.0006,0.0206,0.087,0.1,0.088,0,0,1 0.68,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.003,0.026,0.117,0.124,0.094,0,0,1 0.62,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00037,0.012,0.091,0.095,0.096,0,0,1 0.26,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0014,0.0208,0.112,0.099,0.1129,0,0,1 0.67,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.00081,0.0206,0.084,0.083,0.101,0,0,1 0.74,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,2e-05,0.022,0.108,0.085,0.126,0,0,1 0.39,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.003,0.027,0.091,0.092,0.098,0,0,1 0.42,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4e-05,0.035,0.158,0.159,0.1,0,0,1 0.59,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.00189,0.023,0.098,0.108,0.091,0,0,1 0.59,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,2e-05,0.057,0.372,0.127,0.291,0,0,1 0.72,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0023,0.008,0.09,0.08,0.112,0,0,1 0.72,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0.013,0.013,0.106,0.131,0.082,0,0,1 0.38,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00089,0.013,0.118,0.087,0.136,0,0,1 0.64,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.00068,0.014,0.092,0.081,0.114,0,0,1 0.54,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0013,0.011,0.13,0.103,0.127,0,0,1 0.54,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0025,0.004,0.056,0.068,0.083,0,0,1 0.69,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0022,0.018,0.092,0.099,0.09274,0,0,1 0.4,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0013,0.023,0.087,0.096,0.09,0,0,1 0.7,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,2e-05,0.017,0.141,0.088,0.16,0,0,1 0.73,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0044,0.012,0.095,0.111,0.085,0,0,1 0.35,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.002,0.026,0.102,0.113,0.091,0,0,1 0.48,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.0016,0.01,0.071,0.081,0.088,0,0,1 0.48,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00064,0.023,0.107,0.104,0.101,0,0,1 0.34,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,2e-05,0.037,0.182,0.175,0.104,0,0,1 0.35,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00035,0.025,0.099,0.104,0.096,0,0,1 0.72,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00067,0.018,0.099,0.098,0.101,0,0,1 0.9,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0057,0.016,0.089,0.091,0.098,0,0,1 0.76,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00068,0.02,0.107,0.097,0.11,0,0,1 0.85,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.006,0.02,0.103,0.099,0.10383,0,0,1 0.38,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0013,0.024,0.138,0.116,0.118,0,0,1 0.24,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.103,0.109,0.095,0,0,1 0.5,1,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0.015,0.02,0.071,0.108,0.066,0,0,1 0.34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00073,0.018,0.097,0.097,0.101,0,0,1 0.59,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0013,0.016,0.082,0.088,0.093,0,0,1 0.44,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.199,0.006,0.01,0.103,0.01,1,0,0 0.21,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0016,0.017,0.079,0.077,0.102,0,0,1 0.26,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.115,0.108,0.107,0,0,1 0.01,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00879,0.019,0.113,0.086,0.131,0,1,0 0.01,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0015,0.014,0.082,0.074,0.111,0,0,1 0.01,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0066,0.017,0.068,0.1,0.06793,0,1,0 0.51,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0014,0.026,0.118,0.091,0.13,0,0,1 0.51,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00058,0.016,0.115,0.099,0.11592,0,0,1 0.74,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00061,0.009,0.115,0.075,0.153,0,0,1 0.74,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0034,0.01,0.102,0.09,0.114,0,0,1 0.61,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0.017,0.022,0.131,0.122,0.108,0,0,1 0.37,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0026,0.022,0.08,0.107,0.075,0,0,1 0.62,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.011,0.01,0.101,0.079,0.128,0,1,0 0.73,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0014,0.016,0.119,0.092,0.13,0,0,1 0.73,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2e-05,0.023,0.122,0.086,0.142,0,0,1 0.38,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,3e-05,0.018,0.13,0.087,0.15,0,0,1 0.59,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00078,0.02,0.07,0.076,0.092,0,0,1 0.6,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.099,0.008,0.012,0.127,0.01,1,0,0 0.61,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0.002,0.0206,0.085,0.1,0.085,0,0,1 0.37,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.022,0.014,0.071,0.097,0.073,0,1,0 0.39,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.00079,0.019,0.107,0.099,0.108,0,0,1 0.39,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00208,0.02,0.089,0.114,0.079,0,0,1 0.71,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00098,0.018,0.1,0.099,0.1008,0,0,1 0.75,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.00189,0.0206,0.147,0.08,0.183,0,0,1 0.78,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0013,0.009,0.065,0.088,0.074,0,0,1 0.6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3e-05,0.003,0.087,0.069,0.126,0,0,1 0.38,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0017,0.027,0.13,0.099,0.13103,0,0,1 0.74,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.023,0.007,0.069,0.104,0.066,0,1,0 0.64,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0035,0.018,0.072,0.086,0.084,0,0,1 0.6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.018,0.015,0.066,0.1,0.066,0,1,0 0.7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.002,0.013,0.106,0.084,0.125,0,0,1 0.2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.123,0.104,0.118,0,0,1 0.2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0001,0.019,0.094,0.091,0.102,0,0,1 0.27,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0.0022,0.041,0.157,0.201,0.078,0,0,1 0.74,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0.0011,0.024,0.095,0.109,0.086,0,0,1 0.65,0,0,0,0,1,0,0,0,0,1,0,0,0,0,0,2e-05,0.039,0.133,0.123,0.108,0,0,1 0.6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00077,0.032,0.175,0.147,0.119,0,0,1 0.6,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0009,0.015,0.086,0.065,0.132,0,0,1 0.8,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0025,0.018,0.112,0.099,0.113,0,0,1 0.75,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.62,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,2e-05,0.022,0.183,0.103,0.178,0,0,1 0.35,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0.00189,0.0206,0.089,0.11,0.081,0,0,1 0.24,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0025,0.018,0.092,0.102,0.09,0,0,1 0.6,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.017,0.075,0.103,0.072,0,0,1 0.6,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,2e-05,0.039,0.123,0.098,0.126,0,0,1 0.66,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0015,0.015,0.129,0.102,0.127,0,0,1 0.58,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0012,0.017,0.123,0.096,0.129,0,0,1 0.56,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00093,0.0208,0.092,0.085,0.109,0,0,1 0.56,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0.0027,0.015,0.103,0.095,0.109,0,0,1 0.56,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0079,0.012,0.073,0.08,0.091,0,1,0 0.44,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0017,0.013,0.093,0.087,0.107,0,0,1 0.25,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0015,0.0208,0.133,0.099,0.13407,0,0,1 0.59,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0015,0.017,0.097,0.076,0.129,0,0,1 0.6,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0044,0.01,0.084,0.069,0.121,0,0,1 0.74,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0063,0.012,0.08,0.092,0.087,0,0,1 0.14,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0051,0.028,0.093,0.086,0.109,0,0,1 0.48,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0071,0.014,0.088,0.093,0.095,0,1,0 0.64,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2e-05,0.0206,0.125,0.113,0.11,0,0,1 0.55,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.011,0.015,0.144,0.124,0.116,0,1,0 0.44,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.016,0.0174,0.087,0.091,0.095,0,1,0 0.25,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0.002,0.022,0.123,0.152,0.081,0,0,1 0.25,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.003,0.0208,0.093,0.101,0.092,0,0,1 0.54,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,8e-05,0.0208,0.133,0.114,0.116,0,0,1 0.25,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0.00052,0.018,0.107,0.093,0.115,0,0,1 0.71,0,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0.0005,0.0206,0.126,0.102,0.124,0,0,1 0.65,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,2e-05,0.031,0.114,0.091,0.126,0,0,1 0.34,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,2e-05,0.037,0.196,0.165,0.119,0,0,1 0.21,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0014,0.016,0.095,0.093,0.102,0,0,1 0.48,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00042,0.024,0.134,0.114,0.117,0,0,1 0.82,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0019,0.029,0.116,0.12,0.097,0,0,1 0.43,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00068,0.02,0.054,0.093,0.058,0,0,1 0.57,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.59,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0009,0.02,0.097,0.071,0.135,0,0,1 0.79,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00084,0.008,0.094,0.086,0.109,0,0,1 0.77,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00208,0.008,0.099,0.088,0.113,0,0,1 0.84,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.034,0.007,0.052,0.111,0.047,1,0,0 0.6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.027,0.018,0.065,0.099,0.066,0,1,0 0.16,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00081,0.0208,0.07,0.083,0.084,0,0,1 0.47,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.02,0.085,0.104,0.08,0,0,1 0.47,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00061,0.014,0.107,0.086,0.124,0,0,1 0.66,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00083,0.023,0.113,0.084,0.134,0,0,1 0.27,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0014,0.019,0.089,0.085,0.105,0,0,1 0.55,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0006,0.015,0.113,0.097,0.117,0,0,1 0.88,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0012,0.02,0.123,0.116,0.107,0,0,1 0.66,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0012,0.022,0.122,0.115,0.106,0,0,1 0.38,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.002,0.026,0.076,0.085,0.089,0,0,1 0.34,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0017,0.024,0.094,0.116,0.081,0,0,1 0.56,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00087,0.02,0.095,0.091,0.105,0,0,1 0.51,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0018,0.019,0.097,0.1,0.097,0,0,1 0.76,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0031,0.02,0.118,0.116,0.102,0,0,1 0.19,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0028,0.014,0.096,0.088,0.109,0,0,1 0.72,0,0,0,0,1,0,0,0,0,1,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0022,0.018,0.09,0.084,0.108,0,0,1 0.71,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0067,0.019,0.091,0.103,0.089,0,1,0 0.53,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0043,0.015,0.094,0.088,0.107,0,0,1 0.67,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0019,0.01,0.065,0.063,0.105,0,0,1 0.45,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.003,0.0208,0.121,0.102,0.118,0,0,1 0.8,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0017,0.008,0.101,0.081,0.125,0,0,1 0.18,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.012,0.022,0.094,0.096,0.098,0,1,0 0.76,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00061,0.015,0.112,0.098,0.114,0,0,1 0.77,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.001,0.0208,0.113,0.109,0.115,0,0,1 0.8,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0022,0.006,0.08,0.07,0.115,0,0,1 0.44,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00419,0.022,0.113,0.109,0.103,0,0,1 0.55,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.002,0.017,0.115,0.104,0.105,0,0,1 0.64,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0012,0.0206,0.089,0.095,0.094,0,0,1 0.4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00073,0.02,0.121,0.102,0.118,0,0,1 0.44,0,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0.0044,0.019,0.082,0.09,0.09,0,0,1 0.68,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.02,0.025,0.111,0.157,0.071,0,1,0 0.67,0,0,0,0,0,0,0,0,0,1,0,0,1,0,0,7e-05,0.017,0.132,0.102,0.13,0,0,1 0.72,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0.0057,0.014,0.092,0.099,0.093,0,0,1 0.72,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0054,0.01,0.075,0.082,0.092,0,0,1 0.39,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,2e-05,0.04,0.209,0.104,0.197,0,0,1 0.83,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0006,0.012,0.095,0.083,0.114,0,0,1 0.49,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0025,0.015,0.101,0.111,0.092,0,0,1 0.49,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.00829,0.009,0.103,0.111,0.093,0,1,0 0.4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0007,0.018,0.096,0.094,0.101,0,0,1 0.68,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.016,0.014,0.068,0.101,0.067,0,1,0 0.7,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.0011,0.017,0.095,0.068,0.14,0,0,1 0.66,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.00208,0.02,0.128,0.104,0.123,0,0,1 0.59,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.0043,0.011,0.07,0.1,0.07,0,0,1 0.53,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,2e-05,0.02,0.131,0.093,0.142,0,0,1 0.55,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.0019,0.018,0.097,0.091,0.106,0,0,1 0.57,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.63,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0044,0.023,0.118,0.116,0.101,0,0,1 0.76,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00093,0.0208,0.142,0.109,0.129,0,0,1 0.76,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0011,0.02,0.092,0.095,0.097,0,0,1 0.76,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.027,0.023,0.135,0.154,0.088,0,1,0 0.5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.058,0.024,0.025,0.121,0.02,1,0,0 0.15,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.145,0.017,0.019,0.113,0.017,1,0,0 0.27,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2e-05,0.029,0.118,0.152,0.078,0,0,1 0.41,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00839,0.015,0.123,0.096,0.129,0,0,1 0.43,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0022,0.022,0.093,0.116,0.08,0,0,1 0.25,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,9e-05,0.022,0.181,0.123,0.147,0,0,1 0.25,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.011,0.007,0.122,0.113,0.108,0,1,0 0.25,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.003,0.027,0.11,0.087,0.126,0,0,1 0.37,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0017,0.019,0.095,0.104,0.09,0,0,1 0.79,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.0013,0.009,0.102,0.095,0.107,0,0,1 0.67,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0032,0.019,0.091,0.079,0.115,0,0,1 0.24,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0023,0.02,0.121,0.081,0.149,0,0,1 0.66,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.002,0.0208,0.108,0.096,0.112,0,0,1 0.77,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2e-05,0.027,0.155,0.098,0.159,0,0,1 0.58,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0056,0.0208,0.094,0.101,0.093,0,0,1 0.38,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00075,0.018,0.114,0.101,0.113,0,0,1 0.43,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.086,0.104,0.083,0,0,1 0.43,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00089,0.02,0.094,0.099,0.09475,0,0,1 0.71,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,2e-05,0.0206,0.127,0.104,0.119,0,0,1 0.4,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00026,0.0206,0.12,0.104,0.114,0,0,1 0.56,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2e-05,0.02,0.139,0.082,0.17,0,0,1 0.58,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00022,0.026,0.156,0.111,0.14,0,0,1 0.35,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,5e-05,0.0206,0.129,0.101,0.128,0,0,1 0.67,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0027,0.016,0.127,0.1,0.126,0,0,1 0.33,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0018,0.018,0.092,0.084,0.11,0,0,1 0.42,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0031,0.0208,0.134,0.1,0.135,0,0,1 0.32,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0015,0.018,0.093,0.093,0.1,0,0,1 0.24,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.066,0.01,0.112,0.101,0.11,0,0,1 0.42,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.003,0.008,0.083,0.099,0.084,0,0,1 0.56,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.093,0.095,0.098,0,0,1 0.28,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0.00939,0.026,0.089,0.102,0.087,0,1,0 0.05,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.017,0.02,0.074,0.095,0.078,0,0,1 0.18,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.041,0.0174,0.118,0.113,0.105,0,1,0 0.75,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0051,0.022,0.082,0.078,0.105,0,0,1 0.68,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0013,0.024,0.082,0.091,0.09,0,0,1 0.68,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0012,0.0208,0.085,0.099,0.08568,0,0,1 0.83,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00092,0.009,0.086,0.076,0.113,0,0,1 0.83,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00068,0.022,0.105,0.099,0.10584,0,0,1 0.83,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0015,0.022,0.112,0.097,0.115,0,0,1 0.83,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0024,0.012,0.099,0.088,0.112,0,0,1 0.23,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,1e-05,0.029,0.205,0.087,0.235,0,0,1 0.48,1,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0.0054,0.019,0.087,0.1,0.087,0,0,1 0.79,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.031,0.162,0.078,0.209,0,0,1 0.7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0041,0.02,0.125,0.097,0.128,0,0,1 0.22,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0026,0.0206,0.134,0.114,0.118,0,0,1 0.68,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.00058,0.0206,0.15,0.103,0.146,0,0,1 0.32,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.134,0.129,0.104,0,0,1 0.32,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.00096,0.0206,0.122,0.107,0.114,0,0,1 0.37,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0022,0.0206,0.1,0.114,0.087,0,0,1 0.66,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0025,0.016,0.073,0.08,0.092,0,0,1 0.23,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0041,0.02,0.102,0.099,0.10282,0,0,1 0.23,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00019,0.013,0.1,0.085,0.118,0,0,1 0.23,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.72,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0.0037,0.013,0.083,0.138,0.06,0,0,1 0.42,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0023,0.023,0.128,0.085,0.151,0,0,1 0.42,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0036,0.019,0.087,0.099,0.0877,0,0,1 0.55,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2e-05,0.022,0.071,0.1,0.071,0,0,1 0.32,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,1e-05,0.023,0.147,0.075,0.198,0,0,1 0.61,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.0016,0.019,0.103,0.088,0.118,0,0,1 0.62,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0014,0.02,0.101,0.113,0.088,0,0,1 0.62,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0011,0.02,0.139,0.104,0.134,0,0,1 0.64,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0.00024,0.014,0.065,0.085,0.077,0,0,1 0.27,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0016,0.019,0.119,0.089,0.134,0,0,1 0.3,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0025,0.019,0.11,0.089,0.125,0,0,1 0.6,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.002,0.022,0.128,0.124,0.103,0,0,1 0.53,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0016,0.018,0.081,0.116,0.07,0,0,1 0.53,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00019,0.018,0.124,0.112,0.11,0,0,1 0.55,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0036,0.02,0.096,0.11,0.088,0,0,1 0.61,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.011,0.012,0.083,0.101,0.082,0,1,0 0.83,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0029,0.013,0.092,0.1,0.092,0,0,1 0.27,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0.0014,0.0206,0.078,0.086,0.09,0,0,1 0.43,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.0011,0.023,0.14,0.12,0.116,0,0,1 0.28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0028,0.022,0.083,0.1,0.082,0,0,1 0.61,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4e-05,0.008,0.074,0.075,0.099,0,0,1 0.52,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.002,0.018,0.088,0.086,0.102,0,0,1 0.68,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0014,0.019,0.098,0.082,0.118,0,0,1 0.23,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0018,0.019,0.077,0.079,0.097,0,0,1 0.27,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0032,0.024,0.102,0.088,0.116,0,0,1 0.48,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.002,0.027,0.181,0.165,0.109,0,0,1 0.76,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0025,0.015,0.081,0.092,0.088,0,0,1 0.45,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0011,0.016,0.084,0.096,0.087,0,0,1 0.45,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0.0023,0.024,0.091,0.086,0.106,0,0,1 0.54,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0041,0.008,0.077,0.082,0.093,0,0,1 0.45,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0023,0.025,0.165,0.103,0.161,0,0,1 0.55,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.00055,0.0208,0.075,0.101,0.074,0,0,1 0.22,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0017,0.017,0.085,0.092,0.092,0,0,1 0.36,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0031,0.018,0.087,0.116,0.075,0,0,1 0.35,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0017,0.024,0.102,0.104,0.096,0,0,1 0.35,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0015,0.023,0.098,0.092,0.106,0,0,1 0.29,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0.00062,0.038,0.211,0.188,0.112,0,0,1 0.34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00085,0.025,0.108,0.109,0.099,0,0,1 0.34,0,0,0,0,0,0,0,0,1,0,1,0,0,0,0,0.0063,0.027,0.153,0.17,0.09,0,0,1 0.94,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0019,0.013,0.119,0.104,0.114,0,0,1 0.84,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0.0011,0.022,0.115,0.095,0.121,0,0,1 0.21,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.107,0.107,0.1,0,0,1 0.21,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.29,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.0011,0.015,0.093,0.075,0.124,0,0,1 0.25,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0.00889,0.017,0.062,0.075,0.083,0,1,0 0.28,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0014,0.0208,0.135,0.092,0.147,0,0,1 0.28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0015,0.023,0.134,0.124,0.109,0,0,1 0.68,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.002,0.025,0.079,0.086,0.092,0,0,1 0.74,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.0051,0.018,0.112,0.107,0.105,0,0,1 0.14,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0058,0.028,0.091,0.103,0.088,0,0,1 0.75,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.036,0.184,0.114,0.162,0,0,1 0.52,1,0,0,0,0,0,1,1,0,0,0,0,0,0,0,4e-05,0.019,0.086,0.077,0.112,0,0,1 0.52,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.012,0.018,0.109,0.104,0.102,0,1,0 0.22,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0.002,0.019,0.101,0.092,0.109,0,0,1 0.6,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00018,0.0208,0.132,0.11,0.12,0,0,1 0.69,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00022,0.024,0.129,0.114,0.113,0,0,1 0.48,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0029,0.022,0.146,0.148,0.098,0,0,1 0.78,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0026,0.003,0.087,0.095,0.091,0,0,1 0.75,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0028,0.02,0.092,0.103,0.089,0,0,1 0.52,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0003,0.012,0.101,0.09,0.112,0,0,1 0.45,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0067,0.025,0.098,0.088,0.111,0,1,0 0.57,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.036,0.144,0.084,0.171,0,0,1 0.73,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1e-05,0.01,0.101,0.086,0.117,0,0,1 0.57,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0023,0.0206,0.111,0.11,0.101,0,0,1 0.23,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.067,0.098,0.068,0,0,1 0.51,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.092,0.083,0.111,0,0,1 0.56,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00052,0.024,0.158,0.116,0.134,0,0,1 0.38,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00067,0.019,0.092,0.076,0.121,0,0,1 0.67,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0019,0.029,0.133,0.116,0.113,0,0,1 0.28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0022,0.019,0.086,0.104,0.081,0,0,1 0.28,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0.0015,0.015,0.12,0.101,0.118,0,0,1 0.45,1,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0.0012,0.027,0.12,0.113,0.106,0,0,1 0.45,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0019,0.025,0.114,0.101,0.113,0,0,1 0.77,1,1,0,0,0,0,0,1,1,0,0,0,0,0,0,0.00093,0.019,0.121,0.136,0.089,0,0,1 0.56,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00072,0.0208,0.114,0.111,0.103,0,0,1 0.39,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0014,0.029,0.097,0.103,0.094,0,0,1 0.28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00208,0.0206,0.1,0.115,0.087,0,0,1 0.42,0,0,0,1,0,1,0,0,0,1,0,0,0,0,0,0,0.039,0.205,0.17,0.121,0,0,1 0.34,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0.035,0.198,0.179,0.111,0,0,1 0.56,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0206,0.17,0.096,0.177,0,0,1 0.78,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0063,0.013,0.12,0.111,0.109,0,0,1 0.11,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.84,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.00067,0.009,0.07,0.078,0.09,0,0,1 0.49,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,8e-05,0.002,0.059,0.079,0.074,0,0,1 0.6,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.015,0.132,0.095,0.138,0,0,1 0.37,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.13,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.54,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.56,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.00189,0.027,0.12,0.127,0.095,0,0,1 0.62,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00095,0.025,0.138,0.116,0.118,0,0,1 0.77,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0023,0.017,0.092,0.096,0.096,0,0,1 0.29,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0014,0.038,0.127,0.114,0.112,0,0,1 0.65,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0031,0.027,0.131,0.103,0.126,0,0,1 0.44,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.014,0.027,0.063,0.116,0.054,0,0,1 0.24,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00085,0.03,0.121,0.125,0.097,0,0,1 0.3,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0.0011,0.036,0.159,0.165,0.096,0,0,1 0.57,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.016,0.057,0.104,0.054,0,0,1 0.76,0,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0.039,0.0174,0.083,0.116,0.07,0,1,0 0.69,1,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.7,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0023,0.014,0.098,0.094,0.105,0,0,1 0.7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00082,0.024,0.096,0.091,0.105,0,0,1 0.62,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0.00043,0.022,0.122,0.116,0.104,0,0,1 0.5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.028,0.002,0.033,0.07,0.047,1,0,0 0.28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00419,0.028,0.134,0.126,0.107,0,0,1 0.24,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0024,0.028,0.122,0.113,0.109,0,0,1 0.38,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0071,0.0206,0.13,0.098,0.132,0,0,1 0.62,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0.0034,0.013,0.134,0.119,0.113,0,0,1 0.62,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00045,0.016,0.101,0.089,0.114,0,0,1 0.68,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.015,0.02,0.081,0.101,0.081,0,1,0 0.6,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0082,0.017,0.065,0.096,0.068,0,1,0 0.44,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,4e-05,0.0206,0.141,0.103,0.137,0,0,1 0.13,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0029,0.033,0.113,0.123,0.092,0,0,1 0.43,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.019,0.0206,0.063,0.097,0.065,0,0,1 0.23,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0017,0.01,0.115,0.092,0.125,0,0,1 0.84,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0033,0.022,0.109,0.107,0.101,0,0,1 0.58,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00021,0.009,0.122,0.104,0.117,0,0,1 0.75,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00208,0.015,0.103,0.107,0.096,0,0,1 0.75,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0.00046,0.029,0.139,0.153,0.091,0,0,1 0.71,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00048,0.017,0.097,0.104,0.093,0,0,1 0.71,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0018,0.015,0.139,0.103,0.135,0,0,1 0.87,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2e-05,0.016,0.134,0.079,0.17,0,0,1 0.51,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0028,0.0206,0.069,0.076,0.091,0,0,1 0.33,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0029,0.022,0.096,0.103,0.093,0,0,1 0.43,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1e-05,0.0206,0.171,0.098,0.174,0,0,1 0.72,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0014,0.016,0.118,0.101,0.117,0,0,1 0.27,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.00087,0.018,0.098,0.1,0.098,0,0,1 0.25,1,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0.0015,0.027,0.12,0.131,0.091,0,0,1 0.46,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.029,0.158,0.103,0.154,0,0,1 0.46,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00091,0.024,0.124,0.104,0.118,0,0,1 0.31,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0051,0.0206,0.093,0.097,0.095,0,0,1 0.62,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00081,0.023,0.109,0.116,0.093,0,0,1 0.67,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00067,0.018,0.08,0.086,0.093,0,0,1 0.48,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0015,0.019,0.089,0.092,0.096,0,0,1 0.65,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00102,0.03,0.13,0.099,0.13103,0,0,1 0.67,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0.0013,0.023,0.097,0.109,0.088,0,0,1 0.6,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0011,0.022,0.107,0.095,0.113,0,0,1 0.78,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.00208,0.018,0.08,0.072,0.111,0,0,1 0.79,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0028,0.012,0.122,0.1,0.122,0,0,1 0.66,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0019,0.022,0.11,0.121,0.091,0,0,1 0.29,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0013,0.018,0.106,0.116,0.091,0,0,1 0.23,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0036,0.07,0.141,0.177,0.08,0,0,1 0.37,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0006,0.0208,0.148,0.119,0.124,0,0,1 0.37,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0041,0.025,0.118,0.099,0.11895,0,0,1 0.48,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0018,0.023,0.089,0.083,0.107,0,0,1 0.57,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.023,0.213,0.109,0.195,0,0,1 0.6,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.011,0.026,0.109,0.104,0.103,0,1,0 0.41,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2e-05,0.044,0.138,0.064,0.218,0,0,1 0.5,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.011,0.022,0.095,0.11,0.086,0,0,1 0.19,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00879,0.027,0.108,0.111,0.097,0,1,0 0.5,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.08,0.014,0.041,0.114,0.036,1,0,0 0.59,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.072,0.092,0.078,0,0,1 0.63,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0056,0.022,0.099,0.102,0.097,0,0,1 0.22,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0017,0.023,0.111,0.102,0.108,0,0,1 0.22,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0013,0.019,0.094,0.089,0.105,0,0,1 0.34,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0.00829,0.031,0.172,0.18,0.096,0,0,1 0.63,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.034,0.21,0.111,0.188,0,0,1 0.21,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0015,0.0208,0.13,0.104,0.124,0,0,1 0.27,0,1,0,0,0,0,0,0,1,0,0,0,0,0,1,0.00208,0.0208,0.114,0.11,0.104,0,0,1 0.8,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0011,0.018,0.094,0.094,0.1,0,0,1 0.36,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4e-05,0.022,0.116,0.104,0.111,0,0,1 0.52,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.002,0.018,0.086,0.086,0.101,0,0,1 0.65,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0.00047,0.023,0.113,0.091,0.124,0,0,1 0.26,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0019,0.025,0.102,0.116,0.087,0,0,1 0.83,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0.00044,0.0206,0.145,0.108,0.135,0,0,1 0.55,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0206,0.161,0.09,0.18,0,0,1 0.79,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.034,0.151,0.074,0.203,0,0,1 0.45,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0014,0.0208,0.1,0.095,0.106,0,0,1 0.33,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0015,0.025,0.109,0.111,0.098,0,0,1 0.19,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0032,0.036,0.139,0.153,0.091,0,0,1 0.19,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.004,0.022,0.09,0.116,0.078,0,0,1 0.83,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00038,0.0208,0.09,0.088,0.102,0,0,1 0.83,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0018,0.024,0.1,0.102,0.098,0,0,1 0.68,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0013,0.02,0.131,0.107,0.122,0,0,1 0.14,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.00078,0.022,0.078,0.088,0.089,0,0,1 0.59,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0029,0.014,0.095,0.111,0.086,0,0,1 0.53,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00083,0.0206,0.193,0.133,0.144,0,0,1 0.22,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.00056,0.027,0.179,0.125,0.143,0,0,1 0.44,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0014,0.024,0.112,0.101,0.111,0,0,1 0.44,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.00208,0.033,0.14,0.165,0.086,0,0,1 0.46,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,2e-05,0.027,0.107,0.077,0.139,0,0,1 0.74,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0015,0.0208,0.07,0.074,0.094,0,0,1 0.39,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00038,0.013,0.084,0.091,0.097,0,0,1 0.67,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0022,0.02,0.099,0.096,0.103,0,0,1 0.38,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0028,0.022,0.057,0.092,0.063,0,0,1 0.72,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0.006,0.024,0.118,0.126,0.094,0,0,1 0.68,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.042,0.002,0.018,0.109,0.016,1,0,0 0.48,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0016,0.0208,0.08,0.098,0.082,0,0,1 0.22,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0029,0.019,0.083,0.074,0.112,0,0,1 0.45,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0017,0.025,0.106,0.097,0.11,0,0,1 0.61,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0.00099,0.015,0.063,0.056,0.113,0,0,1 0.61,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0079,0.029,0.116,0.102,0.114,0,1,0 0.71,1,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0.025,0.003,0.031,0.068,0.046,1,0,0 0.57,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0013,0.027,0.089,0.091,0.099,0,0,1 0.34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.4,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0.0013,0.026,0.135,0.109,0.124,0,0,1 0.32,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00045,0.025,0.1,0.116,0.086,0,0,1 0.47,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0.00469,0.024,0.102,0.112,0.092,0,0,1 0.72,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00097,0.02,0.164,0.099,0.16532,0,0,1 0.47,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0024,0.022,0.143,0.092,0.154,0,0,1 0.61,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.0018,0.023,0.089,0.085,0.105,0,0,1 0.49,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0012,0.022,0.1,0.093,0.108,0,0,1 0.7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00034,0.025,0.163,0.111,0.148,0,0,1 0.78,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0014,0.02,0.13,0.093,0.139,0,0,1 0.53,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0016,0.0208,0.133,0.108,0.123,0,0,1 0.54,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0031,0.038,0.157,0.152,0.103,0,0,1 0.54,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1e-05,0.036,0.142,0.123,0.115,0,0,1 0.42,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0013,0.02,0.087,0.073,0.118,0,0,1 0.49,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0049,0.007,0.082,0.079,0.104,0,0,1 0.34,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,3e-05,0.0419,0.134,0.165,0.081,0,0,1 0.68,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.002,0.015,0.11,0.089,0.124,0,0,1 0.68,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0025,0.035,0.11118,0.099,0.11207,0,0,1 0.68,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.175,0.086,0.204,0,0,1 0.24,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0014,0.023,0.073,0.109,0.067,0,0,1 0.75,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00033,0.018,0.134,0.097,0.138,0,0,1 0.17,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0008,0.034,0.046,0.099,0.04637,0,0,1 0.3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0045,0.02,0.139,0.086,0.161,0,0,1 0.63,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,3e-05,0.033,0.12,0.096,0.125,0,0,1 0.28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0007,0.02,0.045,0.099,0.04536,0,0,1 0.71,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0015,0.02,0.096,0.104,0.092,0,0,1 0.62,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0012,0.0208,0.134,0.104,0.127,0,0,1 0.71,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0032,0.018,0.086,0.081,0.106,0,0,1 0.78,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0013,0.019,0.126,0.09,0.141,0,0,1 0.9,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,3e-05,0.019,0.203,0.09,0.224,0,0,1 0.5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.117,0.004,0.017,0.114,0.015,1,0,0 0.68,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.024,0.013,0.084,0.082,0.102,0,1,0 0.63,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00069,0.0206,0.162,0.099,0.163,0,0,1 0.66,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.0011,0.018,0.135,0.109,0.124,0,0,1 0.66,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0.0012,0.023,0.133,0.116,0.113,0,0,1 0.44,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.0023,0.023,0.118,0.101,0.117,0,0,1 0.38,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.00073,0.024,0.135,0.104,0.129,0,0,1 0.34,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00088,0.03,0.069,0.067,0.102,0,0,1 0.25,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0012,0.026,0.121,0.097,0.125,0,0,1 0.47,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0.00063,0.026,0.16,0.12,0.134,0,0,1 0.49,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.00189,0.024,0.111,0.107,0.104,0,0,1 0.49,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3e-05,0.0208,0.183,0.102,0.179,0,0,1 0.5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.089,0.002,0.01,0.113,0.009,1,0,0 0.5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0076,0.017,0.128,0.108,0.118,0,1,0 0.68,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.0015,0.023,0.117,0.104,0.113,0,0,1 0.71,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.038,0.004,0.037,0.074,0.05,1,0,0 0.69,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0008,0.006,0.082,0.046,0.179,0,0,1 0.75,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3e-05,0.012,0.098,0.083,0.118,0,0,1 0.37,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00061,0.016,0.116,0.091,0.127,0,0,1 0.5,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0.076,0.005,0.022,0.112,0.019,0,0,1 0.39,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.043,0.016,0.063,0.084,0.075,0,1,0 0.18,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0023,0.0208,0.079,0.11,0.072,0,0,1 0.64,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00024,0.0206,0.083,0.09,0.093,0,0,1 0.7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00069,0.019,0.129,0.102,0.126,0,0,1 0.44,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0025,0.016,0.119,0.109,0.11,0,0,1 0.5,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0048,0.006,0.098,0.08,0.122,0,0,1 0.69,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0062,0.019,0.089,0.094,0.095,0,1,0 0.63,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0058,0.019,0.102,0.116,0.087,0,0,1 0.6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00048,0.017,0.088,0.083,0.105,0,0,1 0.41,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0041,0.02,0.127,0.104,0.122,0,0,1 0.54,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0027,0.019,0.111,0.099,0.112,0,0,1 0.6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0016,0.017,0.153,0.104,0.144,0,0,1 0.63,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0019,0.019,0.125,0.101,0.124,0,0,1 0.71,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0016,0.003,0.073,0.09,0.081,0,0,1 0.19,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.004,0.029,0.122,0.102,0.119,0,0,1 0.48,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0024,0.02,0.066,0.074,0.089,0,0,1 0.21,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0016,0.0208,0.096,0.085,0.112,0,0,1 0.59,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0001,0.02,0.102,0.082,0.125,0,0,1 0.61,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00038,0.0206,0.121,0.095,0.128,0,0,1 0.44,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.041,0.015,0.048,0.116,0.041,1,0,0 0.16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.076,0.093,0.082,0,0,1 0.16,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0027,0.019,0.112,0.126,0.089,0,0,1 0.81,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0054,0.013,0.073,0.072,0.101,0,0,1 0.75,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0045,0.011,0.132,0.101,0.131,0,0,1 0.7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.034,0.015,0.088,0.104,0.083,0,1,0 0.34,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.00083,0.024,0.114,0.102,0.112,0,0,1 0.57,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00909,0.0174,0.101,0.112,0.09,0,1,0 0.33,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.001,0.014,0.088,0.078,0.113,0,0,1 0.76,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0.0018,0.012,0.065,0.076,0.086,0,0,1 0.67,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0.002,0.025,0.133,0.119,0.112,0,0,1 0.46,1,1,0,0,0,0,1,0,1,0,0,0,0,0,0,0.0014,0.022,0.122,0.103,0.119,0,0,1 0.55,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.00098,0.0206,0.107,0.099,0.107,0,0,1 0.54,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.0009,0.0206,0.113,0.09,0.125,0,0,1 0.75,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.002,0.0206,0.074,0.104,0.07,0,0,1 0.8,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0011,0.013,0.118,0.112,0.105,0,0,1 0.34,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.019,0.039,0.146,0.116,0.125,0,1,0 0.55,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1e-05,0.018,0.18,0.082,0.219,0,0,1 0.18,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0011,0.0206,0.147,0.095,0.154,0,0,1 0.41,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.015,0.017,0.119,0.099,0.12,0,0,1 0.63,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0029,0.0206,0.095,0.108,0.088,0,0,1 0.55,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0013,0.0206,0.072,0.075,0.096,0,0,1 0.47,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.002,0.0206,0.114,0.089,0.129,0,0,1 0.46,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0046,0.012,0.048,0.089,0.054,0,0,1 0.58,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0037,0.016,0.076,0.081,0.095,0,0,1 0.5,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.00082,0.0208,0.124,0.103,0.121,0,0,1 0.5,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00067,0.026,0.085,0.089,0.096,0,0,1 0.29,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0011,0.038,0.161,0.174,0.093,0,0,1 0.42,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.00189,0.011,0.093,0.078,0.119,0,0,1 0.48,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0023,0.009,0.105,0.061,0.173,0,0,1 0.58,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.0011,0.023,0.06,0.089,0.068,0,0,1 0.29,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.015,0.0206,0.113,0.113,0.1,0,0,1 0.29,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.001,0.012,0.084,0.067,0.125,0,0,1 0.36,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0019,0.01,0.077,0.086,0.09,0,0,1 0.37,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0016,0.0206,0.067,0.071,0.095,0,0,1 0.37,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0015,0.018,0.109,0.097,0.112,0,0,1 0.53,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.06,0.003,0.013,0.099,0.013,1,0,0 0.57,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.0026,0.007,0.115,0.099,0.11592,0,0,1 0.59,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0055,0.007,0.077,0.074,0.104,0,0,1 0.73,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0.0206,0.102,0.089,0.115,0,0,1 0.47,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0.0034,0.022,0.102,0.102,0.101,0,0,1 0.21,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0.00189,0.0206,0.132,0.087,0.152,0,0,1 0.34,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0206,0.166,0.096,0.174,0,0,1 0.31,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0.005,0.0206,0.141,0.109,0.13,0,0,1 0.6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0206,0.103,0.065,0.158,0,0,1 0.26,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0026,0.02,0.111,0.08,0.138,0,0,1 0.56,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.039,0.141,0.112,0.126,0,0,1 0.56,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.51,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.001,0.022,0.095,0.096,0.1,0,0,1 0.51,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0041,0.0206,0.085,0.092,0.092,0,0,1 0.46,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0006,0.0206,0.098,0.094,0.104,0,0,1 0.76,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0.0206,0.143,0.076,0.189,0,0,1 0.55,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0017,0.02,0.102,0.096,0.106,0,0,1 0.55,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.011,0.004,0.102,0.1,0.10189,0,1,0 0.7,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0024,0.0206,0.07,0.09,0.078,0,0,1 0.47,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0026,0.0206,0.073,0.079,0.092,0,0,1 0.58,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.003,0.016,0.071,0.082,0.086,0,0,1 0.25,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0029,0.0206,0.088,0.087,0.1,0,0,1 0.6,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.071,0.184,0.103,0.177,0,0,1 0.6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0014,0.0206,0.119,0.112,0.106,0,0,1 0.26,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0.00939,0.0174,0.07,0.095,0.074,0,1,0 0.39,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0.0043,0.024,0.135,0.151,0.089,0,0,1 0.73,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0.036,0.119,0.09,0.133,0,0,1 0.73,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00042,0.03,0.091,0.115,0.079,0,0,1 0.74,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.019,0.098,0.104,0.092,0,0,1 0.4,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0014,0.018,0.092,0.085,0.108,0,0,1 0.4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0017,0.0206,0.117,0.121,0.097,0,0,1 0.48,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00096,0.0206,0.129,0.135,0.096,0,0,1 0.68,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.001,0.0206,0.103,0.101,0.102,0,0,1 0.35,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00027,0.016,0.104,0.078,0.134,0,0,1 0.75,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,4e-05,0.02,0.146,0.104,0.139,0,0,1 0.25,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.0027,0.0206,0.073,0.097,0.076,0,0,1 0.44,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.5,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,1e-05,0.028,0.074,0.114,0.065,0,0,1 0.53,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0018,0.024,0.104,0.081,0.128,0,0,1 0.57,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0019,0.0206,0.085,0.079,0.108,0,0,1 0.18,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00098,0.0206,0.158,0.131,0.12,0,0,1 0.18,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00419,0.0206,0.078,0.077,0.102,0,0,1 0.18,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.012,0.0174,0.142,0.137,0.104,0,1,0 0.18,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0013,0.0206,0.091,0.098,0.093,0,0,1 0.44,1,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0.0001,0.006,0.061,0.086,0.071,0,0,1 0.28,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0031,0.027,0.1,0.093,0.108,0,0,1 0.73,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.0039,0.0208,0.13,0.111,0.116,0,0,1 0.25,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00208,0.0206,0.107,0.115,0.092,0,0,1 0.45,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0014,0.0206,0.101,0.095,0.106,0,0,1 0.45,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00097,0.011,0.077,0.086,0.089,0,0,1 0.45,1,0,0,0,1,0,0,0,1,0,0,0,0,0,1,0.01,0.003,0.067,0.1,0.06693,0,1,0 0.55,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0026,0.019,0.115,0.102,0.114,0,0,1 0.3,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0.0027,0.0206,0.125,0.143,0.087,0,0,1 0.72,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.041,0.139,0.071,0.196,0,0,1 0.42,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.052,0.008,0.028,0.086,0.033,1,0,0 0.55,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.019,0.107,0.074,0.144,0,0,1 0.71,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0026,0.013,0.093,0.081,0.115,0,0,1 0.45,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.00189,0.013,0.11118,0.099,0.11207,0,0,1 0.56,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.022,0.074,0.086,0.086,0,0,1 0.42,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,6e-05,0.02,0.079,0.081,0.098,0,0,1 0.47,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00079,0.012,0.117,0.085,0.138,0,0,1 0.47,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.02,0.128,0.094,0.136,0,0,1 0.73,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.007,0.063,0.088,0.072,0,0,1 0.29,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00058,0.0206,0.08,0.082,0.098,0,0,1 0.57,1,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0.05,0.013,0.064,0.099,0.06451,0,0,1 0.57,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0008,0.0206,0.082,0.099,0.083,0,0,1 0.31,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0013,0.0206,0.161,0.133,0.121,0,0,1 0.2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00208,0.0206,0.105,0.104,0.101,0,0,1 0.55,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00034,0.014,0.123,0.096,0.128,0,0,1 0.25,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0.0011,0.0206,0.101,0.107,0.094,0,0,1 0.25,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0.014,0.0206,0.081,0.09,0.09,0,0,1 0.71,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1e-05,0.0206,0.146,0.093,0.156,0,0,1 0.26,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0034,0.0206,0.104,0.112,0.092,0,0,1 0.58,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0019,0.0206,0.096,0.087,0.11,0,0,1 0.58,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0018,0.0206,0.063,0.086,0.073,0,0,1 0.57,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0014,0.0206,0.082,0.107,0.077,0,0,1 0.35,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0.0039,0.0206,0.083,0.085,0.097,0,0,1 0.53,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4e-05,0.018,0.102,0.1,0.102,0,0,1 0.35,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,4e-05,0.0208,0.071,0.11,0.064,0,0,1 0.6,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.11118,0.099,0.11207,0,0,1 0.38,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0025,0.0206,0.081,0.086,0.094,0,0,1 0.79,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0014,0.006,0.094,0.099,0.09475,0,0,1 0.38,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0025,0.015,0.081,0.086,0.094,0,0,1 0.48,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.091,0.091,0.1,0,0,1 0.45,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0.00208,0.0206,0.093,0.116,0.08,0,0,1 0.64,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0013,0.024,0.111,0.115,0.097,0,0,1 0.64,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0043,0.0206,0.114,0.104,0.108,0,0,1 0.57,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.00208,0.0206,0.076,0.1,0.075,0,0,1 0.55,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00013,0.01,0.102,0.082,0.124,0,0,1 0.55,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0018,0.0206,0.084,0.095,0.088,0,0,1 0.55,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0024,0.023,0.104,0.103,0.101,0,0,1 0.72,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0048,0.0206,0.087,0.093,0.093,0,0,1 0.72,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.004,0.032,0.106,0.087,0.122,0,0,1 0.39,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0016,0.012,0.095,0.093,0.102,0,0,1 0.45,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,6e-05,0.046,0.21,0.111,0.19,0,0,1 0.45,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0.00044,0.028,0.153,0.091,0.168,0,0,1 0.54,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.0014,0.0206,0.098,0.101,0.097,0,0,1 0.46,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,5e-05,0.022,0.097,0.077,0.125,0,0,1 0.33,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0015,0.0208,0.104,0.085,0.122,0,0,1 0.7,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0027,0.0206,0.155,0.104,0.148,0,0,1 0.07,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0024,0.0206,0.155,0.094,0.165,0,0,1 0.38,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,6e-05,0.018,0.139,0.099,0.14012,0,0,1 0.27,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0031,0.016,0.086,0.104,0.083,0,0,1 0.27,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.013,0.0174,0.087,0.085,0.103,0,1,0 0.58,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0055,0.0208,0.082,0.092,0.089,0,0,1 0.58,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0026,0.0206,0.164,0.11,0.149,0,0,1 0.49,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.116,0.007,0.072,0.095,0.076,0,0,1 0.82,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0066,0.016,0.092,0.1,0.091,0,1,0 0.18,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00073,0.0206,0.194,0.097,0.2,0,0,1 0.33,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.00016,0.052,0.183,0.083,0.22,0,0,1 0.33,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4e-05,0.0208,0.159,0.101,0.157,0,0,1 0.37,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0028,0.0206,0.11118,0.099,0.11207,0,0,1 0.55,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0208,0.11118,0.099,0.11207,0,0,1 0.72,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.114,0.111,0.102,0,0,1 0.77,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0035,0.0206,0.114,0.1,0.115,0,0,1 0.62,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00208,0.017,0.096,0.099,0.09677,0,0,1 0.53,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,5e-05,0.022,0.089,0.083,0.107,0,0,1 0.69,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0014,0.0206,0.123,0.091,0.136,0,0,1 0.76,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0034,0.0206,0.077,0.067,0.115,0,0,1 0.72,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.098,0.099,0.09879,0,0,1 0.38,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.0011,0.022,0.145,0.123,0.118,0,0,1 0.3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.003,0.0206,0.096,0.082,0.118,0,0,1 0.68,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0.0019,0.0206,0.093,0.086,0.108,0,0,1 0.31,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0034,0.0206,0.104,0.094,0.112,0,0,1 0.35,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0052,0.0206,0.095,0.111,0.085,0,0,1 0.35,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0018,0.017,0.118,0.11,0.107,0,0,1 0.54,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00075,0.023,0.109,0.114,0.095,0,0,1 0.43,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0.0017,0.0206,0.104,0.091,0.115,0,0,1 0.16,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.025,0.071,0.08,0.088,0,0,1 0.16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0097,0.0174,0.098,0.13,0.076,0,1,0 0.16,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0062,0.0174,0.108,0.114,0.094,0,1,0 0.45,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0.0206,0.132,0.09,0.147,0,0,1 0.32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0014,0.0206,0.108,0.115,0.094,0,0,1 0.36,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00074,0.015,0.072,0.05,0.145,0,0,1 0.6,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.013,0.014,0.057,0.062,0.092,0,0,1 0.37,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00208,0.024,0.177,0.146,0.122,0,0,1 0.26,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,8e-05,0.017,0.174,0.123,0.142,0,0,1 0.3,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,8e-05,0.019,0.147,0.103,0.143,0,0,1 0.5,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0.0033,0.0208,0.107,0.107,0.1,0,0,1 0.59,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1e-05,0.02,0.113,0.086,0.131,0,0,1 0.72,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.005,0.0206,0.101,0.088,0.115,0,0,1 0.65,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00047,0.008,0.154,0.075,0.204,0,0,1 0.77,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,6e-05,0.0206,0.125,0.107,0.117,0,0,1 0.41,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0013,0.025,0.125,0.114,0.109,0,0,1 0.88,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.013,0.0174,0.123,0.099,0.124,0,1,0 0.64,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00078,0.0206,0.106,0.088,0.121,0,0,1 0.46,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00189,0.0206,0.093,0.091,0.102,0,0,1 mlpack-2.2.5/src/mlpack/tests/data/trainRes.csv000066400000000000000000000002111315013601400213700ustar00rootroot000000000000002.75, 4.0000, 3.6875, 2.3750, 0.3333, 0.8000, 0.6292, 0.3833, 0.5161 8.3333, 4.6667, 3.6667, 2.4, 0.8095, 3.3810, 0.6667, 0.4000, 0.4839 mlpack-2.2.5/src/mlpack/tests/data/trainSet.csv000066400000000000000000000004661315013601400214060ustar00rootroot000000000000003,3,3,3,0 3,4,4,3,0 3,4,4,3,0 3,3,4,3,0 3,6,4,3,0 2,4,4,3,0 2,4,4,1,0 3,3,3,2,0 3,4,4,2,0 3,4,4,2,0 3,3,4,2,0 3,6,4,2,0 2,4,4,2,0 2,4,4,3,0 2,4,1,2,0 4,4,4,2,0 9,3,3,3,1 9,4,4,3,1 9,4,4,3,1 9,3,4,3,1 9,6,4,3,1 9,4,4,3,1 9,4,4,1,1 9,3,3,2,1 8,4,4,2,1 8,4,4,2,1 8,3,4,2,1 8,6,4,2,1 7,8,4,3,1 6,9,1,2,1 8,5,4,2,1 mlpack-2.2.5/src/mlpack/tests/data/train_labels_nonlinsep.txt000066400000000000000000000045401315013601400243620ustar00rootroot000000000000000 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 mlpack-2.2.5/src/mlpack/tests/data/train_nonlinsep.txt000066400000000000000000000766271315013601400230570ustar00rootroot000000000000000.5776630436,2.1309597023 -3.0669472019,-1.8597441654 1.0845304365,3.0051687315 0.4471632283,1.1012942797 1.7826952134,1.0852161162 1.924794268,-0.4051826671 0.0595111492,2.2449934136 -1.2502170484,0.6757151927 -0.9302175422,-3.591138588 1.5153385502,-3.5011496408 -3.1153526356,-1.7681782711 3.7785490381,-1.1599861909 2.7890564547,-0.9535823926 1.8706829785,-2.1677433947 -3.3145581648,-1.2972431409 -2.4968198958,0.043852875 0.7980325741,-0.4532930623 -0.4034906084,-0.4990962366 -0.2828657948,-0.6204905575 -0.4909594424,-1.6311860259 0.9262462824,3.0664773964 -0.4840191527,2.2321101352 -0.0479286621,-0.7381256532 -1.6523690992,-2.0621376615 1.9623633032,-0.2577322605 0.4740336876,-1.7287810156 -0.490574126,0.0268222193 0.9675209951,0.4577167161 3.4961129544,-1.7838478595 2.7758733559,-2.706610093 0.0825822986,0.063245482 0.8726450319,-1.6075716916 -0.3459855851,-0.5650438096 0.0105108776,-0.7521505173 1.946923494,0.6447151607 -0.4686211154,-0.3917953956 -1.6832757965,-2.0337097638 1.9224337657,-0.4130836777 0.4150605207,-1.7384385464 -0.9386278609,-1.361815927 -0.029786023,0.2660805296 0.9720216293,0.4424539026 0.7238578343,-2.6962680716 -0.9924402129,0.9559742458 3.7023874892,-1.3592205425 -1.3093259741,-2.3250555539 -2.1067050351,-1.5231093154 -1.6044639163,-2.1044574828 2.93286974,0.8110139065 0.564178443,-0.6535047111 0.2442915185,-1.7550914518 2.7657507904,-1.0096868578 -1.3681395292,1.9093902119 2.9985937917,-0.0245942786 0.0054335805,-1.750485589 -1.5494813932,2.9442675306 1.4582382218,-2.4323201924 1.4354010958,-1.2256998389 0.8573290562,-3.6880142451 -0.4918132286,1.215462411 -2.1095599051,-1.5185940312 -1.5241494709,1.8039834928 0.9351877837,-1.5789346668 -2.867566281,-2.1823209869 0.8028009792,3.1077500183 0.1849234821,1.2118720917 -0.2443369227,0.2728236765 2.7909542469,1.2744729499 -0.3221818838,-0.5876618818 0.63657528,2.1093926673 1.1553404732,2.9745430359 2.0927873254,2.3323235211 -2.412295237,-0.8361671642 -1.101221123,2.0515322441 2.3497793252,-1.700107971 0.498297816,1.0703482909 0.5028548299,1.067414398 0.6253945748,-1.6945480789 2.8347515431,1.1566951045 -0.3869530544,-0.5200087347 -0.2865202296,2.2519872569 -0.1046652488,1.2588586532 -2.3856109149,-0.9256780873 -1.4954286781,-0.3558217139 2.7240791262,-2.7546569062 0.9010699213,0.6247940429 2.8153673985,1.2106583226 1.5180836739,-3.5000938662 2.5617284017,1.7364903658 0.9543025735,0.4983648152 -0.7055075034,-3.6521087021 1.9105794661,0.773522301 1.7736370029,-0.7726229789 -3.3528744871,-1.1779290739 1.7413741883,-2.2617188655 -0.4061121391,1.2355106279 0.7375374657,0.8663251821 -1.400890873,1.8887498865 1.3084002254,2.9011790079 0.0546515831,-0.7594692874 0.6707198572,0.9353700483 0.6924137072,-1.674909136 -0.2971029055,0.2503941861 -1.0386940503,-1.2703177036 -0.2506531193,3.252597002 1.9103691116,-0.45170879 -0.4310988001,-0.459509245 0.0773963182,0.1610863715 -0.3498409868,-0.5611573303 -2.0871460266,-2.9983012803 -2.1780420819,-1.4035537391 0.3924412932,-1.7416061843 0.576035244,-2.7229166636 -0.5811936729,-1.5914690356 1.6207312029,1.3520717612 1.94972268,-0.3129135644 1.9838884899,0.4582734133 -0.250900848,1.25656217 -1.5176994744,-2.1760945289 -2.3478193688,0.7112385734 -0.2732065734,-1.7029048466 -1.4480435937,-0.5827062795 1.0084998608,0.0688352123 -0.8847225332,1.0361539617 0.7324333987,-2.694467972 0.7130454076,0.8930096325 1.8170635481,1.0143910093 0.0685197236,-1.7548739306 -1.4999326317,1.8215615735 -2.9714948814,-2.0237017721 2.9009867578,0.941515369 0.0970756917,-0.7639726759 3.7430952905,-1.2567643353 -1.4404715511,0.2867198306 -0.5222411894,-0.1185682716 -1.4589972462,0.2226885897 1.8446464582,-0.6258937485 -2.3156507532,0.7925314389 0.2309439566,-0.7625985377 -0.5127959005,-0.0488575518 -1.5057006539,-0.2478829846 1.9214609647,0.7381736653 -1.0925293277,-1.2142230876 -1.148036248,3.1007307319 -1.2782834611,1.9622579206 -1.9392740698,-1.7575132815 -0.1570846054,1.2599860178 -0.4410843492,0.1210138424 2.0033941573,0.2666112218 0.0360392236,0.2214047726 0.2154168553,-1.7562625408 -0.5229694498,-0.1325236194 -3.4995046162,-0.328650281 -0.1530021881,-0.6991664678 0.9465230001,-1.5734358438 -0.8972678808,-2.5423082115 1.6308445815,-0.9990463134 -0.3684961785,-0.5413710886 -1.5521074418,-2.1484255523 2.0057114117,0.2071165238 -0.1826765389,0.2870074967 0.5423637653,1.0406875742 -3.1593668102,1.3500483513 -0.5214215333,-0.1078155057 -1.4156807883,0.3589507547 3.9802815321,-0.2680573483 2.3745045447,2.0113025593 1.0619099602,3.0145273599 0.2015369778,-0.7648973884 0.22996415,-0.7626931677 1.0015487554,-1.5453475801 -1.4051512678,-0.7103379975 0.1474011054,2.234661015 -1.4805475801,1.8352977706 -1.2167639481,0.7218642188 0.3294651017,-2.7493114544 -0.7591657142,2.1747559916 -2.3750684123,-2.7540734816 0.0893012215,0.1037027901 -0.7033001894,1.1383442843 1.7423231164,-3.40543217 -1.4882791159,-3.369788232 -1.1633697456,-1.1313651536 -1.3768928272,1.9039475684 -1.7314513646,-3.2379842665 1.6619999486,1.2920971389 0.3655801394,1.1437577955 -0.3560214359,-0.5547867717 -2.4870377187,0.1406267385 2.8755213142,-2.6090134855 1.0849712493,-1.4981446581 -1.0996809904,2.0522274748 -3.1345207029,1.400411483 -0.5231681291,-0.1670317512 0.9833533036,-2.6290809713 0.9395962754,0.5381145865 0.0855697532,0.0731579299 1.7080173256,-2.284216665 -1.1437826884,-1.1554245366 -0.3247672061,0.2343774864 -2.3947892909,-0.8962138326 0.0617290689,0.0280047009 0.7485770149,3.1241384883 1.6706883959,-0.9425866937 -2.4464995282,-0.6985096236 0.0638769072,0.187005161 -2.4937968726,0.0780761336 0.3502859369,1.150847525 -0.120491148,0.2887818099 -3.4011215684,0.680665792 -2.8146923813,-2.2565339914 3.9341778343,-0.5800144613 -1.4366966185,0.2985584173 -0.4948457921,-0.3264135001 0.1714284071,-0.7660909387 -0.4430408668,0.1181457828 -1.121622567,0.8347716568 1.3975201862,-2.463313168 -0.2131591534,-2.7292837667 1.440649703,1.5714843613 3.004020948,0.1251543952 -0.8893947283,-2.5456060302 -2.491109789,0.1047536758 -2.957659633,1.710297768 0.0076969364,0.0003729619 1.2728336486,2.9191284265 -2.4468034635,0.3769631832 -0.7130064929,-1.5214694256 -0.4584156709,-2.684215701 -2.433961201,-0.7528857066 0.3512094483,2.1968733012 0.4009451148,1.1263363857 0.0865556004,0.13121742 1.1297808912,2.985832576 1.9886164446,2.4311624249 0.9457819625,3.0594296403 -3.340086695,0.89636132 1.0084185084,0.2470297686 2.9826304655,-0.2001282618 -2.0231570274,1.3071685284 2.5128954161,-1.4780999453 0.0098501107,0.2433624474 0.398989021,-0.7276146453 -1.4811233047,0.1261046068 -2.426322638,-0.7835908362 -0.37458021,-0.5345394707 -0.1338298431,0.2894822863 0.7658032321,3.1190452583 -0.1933091256,0.2854435098 1.6422031743,-2.3266395048 -0.8723141364,-1.4147512715 0.0371877336,0.0091235882 -2.0065038439,-3.0580802805 0.0865623763,0.1311832861 -0.4972368391,-0.3190527338 -0.2961004262,0.2509163369 -2.4758125153,-0.5410529081 0.795388408,3.1100521712 -3.4565575666,-0.7319871656 1.6059592437,-1.0322240106 -0.3379371212,1.2471404293 -2.6323191276,-2.4856558215 1.4902288094,-1.1691531174 0.0183200628,0.2370156012 0.8621110172,-0.3651175898 -1.0196897613,2.0865113582 0.2533661794,2.2174657934 0.9961269079,-0.0226355528 -2.493591388,-0.3983798231 -2.2652185025,0.9054518548 3.6939854935,-1.3793764834 0.1890520203,-0.7655348569 0.0872781531,0.1273048693 1.4292024979,-1.2318025647 -3.0942888123,-1.8088140247 1.603194124,1.376271197 -1.3850413708,-3.4188386264 -0.2930495722,-0.6124759719 0.661588652,-3.7196367111 -3.1963093829,1.2710493291 3.6073335497,-1.5711213494 -1.4988903136,0.0101286724 1.626049622,-1.0055571759 0.9542872834,0.498408804 0.9600344794,1.954246648 -2.9267963763,-2.0941707486 -3.1515054141,-1.6952927841 -0.6692846023,-1.5463668479 -0.7485918172,-1.4998897984 -0.3529159311,2.2472489705 -0.3589554353,-0.5517000681 3.9980093496,-0.0439415733 -2.4756242464,-2.6556362088 -1.3019322111,-0.927483295 -2.063106043,2.6461791335 0.2529482292,-0.7601452738 -0.8434333532,-3.6164836126 -0.6386692043,2.2042236298 0.0758972863,0.0480521654 1.9639288123,0.5699198597 0.6472176731,-1.6884622176 2.2423494,2.1729876671 0.9734783254,-0.118701934 0.8717176299,3.0853968767 2.9992127249,0.332244097 -0.0988363296,-0.7215982852 -0.0850183486,0.2838568552 -3.4665108338,0.3489704065 1.2069471468,-1.4181508014 1.9036307156,0.794984426 -0.297203432,0.2503416092 1.9903050984,0.4118909345 -2.4671001954,-0.5940154241 -0.0982668449,-3.7445351238 0.3260150783,-2.7495229531 1.6721290039,-2.3076673158 2.9778351212,-0.2378176316 -0.9237837135,1.008969641 -0.9387065492,0.9980357732 -1.0729371948,-1.2352618467 0.7259085155,0.8792094101 -0.2437390181,-1.7101869286 -0.3959726256,0.176146544 0.051054462,1.2424149899 -0.5105186281,-0.0384511305 -0.430579715,0.1356254551 1.9409457737,0.6682950081 -0.5546688757,-1.6037993525 0.0757116869,0.1649830248 -2.1549883872,-1.4438126859 0.6224412225,0.9786738406 0.0222952097,0.0031759774 -0.3820981592,0.1897786521 0.4715978271,-0.700338716 1.1435942702,-1.4614147398 1.0119605929,-2.6200233737 0.0892114344,0.1087079168 0.4851461573,-0.6943657887 -0.096442258,1.2584828912 -0.4476818615,0.1111407845 -2.3775437372,0.6274178093 -0.5132382635,-0.2564884193 0.0719501663,0.1728424462 0.9249856301,-0.2495576064 -0.9155116037,1.014897496 -0.4416378637,0.1202073913 0.7966721074,-0.4549432768 -1.3606484998,0.4872741583 -2.7001355633,2.0600808672 -1.4056163813,-0.7091277216 -0.4100606975,0.1608864764 0.5757725727,-0.646589385 2.9138978072,0.8916237436 -1.7336460653,1.6304316998 -1.9040337003,-1.8005457242 -0.2750844386,0.2609805964 -2.0812063913,2.6333082306 -0.5414602979,-3.6872507231 -0.1323213171,3.253779342 -3.5020179479,-0.264684098 0.0069591009,-2.7503938921 0.821738767,-1.6288181125 0.3393330768,-0.7443770444 -3.4389719263,0.5099233796 -1.3321937465,0.5425707892 0.0697471076,0.0380240108 0.0690726645,0.0370685219 1.6726076276,-2.3073596179 0.2039471026,-2.7542230703 0.0734438141,-0.7617650067 1.8370958794,-2.1932271668 -1.6168186503,-2.0937398174 -3.4162625454,-0.9377841343 -1.2038193551,-2.389632995 1.5741622001,-1.072529537 -0.4834470969,2.2321929921 0.9750757664,-0.1131305699 2.9340933066,-0.4850916729 1.2354972449,2.9373754188 2.5046618031,-1.4904294165 3.9423319,-0.5353732862 -3.2177149944,-1.5496571512 0.5420978553,1.0408754451 -1.1464004748,3.1012479409 -0.2259474108,3.2531991845 3.1347284352,-2.3179848221 0.1828700216,-2.7544871082 2.4201773127,-3.0043850794 -0.4356357434,-0.4523195211 0.4117475843,-0.7233821487 2.3398785492,-1.7122551313 -3.3955504267,0.7027916238 1.0055861973,0.0404569628 3.8716263616,-0.8569496402 -0.5205503552,-0.2073752644 0.0845129323,0.1400858813 1.9620494879,-0.2591958977 0.9907053681,-0.0500385024 -1.5593505363,1.7775795777 -1.5093159286,-2.1826943645 -2.9240268471,-2.098420403 -0.1935634979,-1.7213289678 -0.0524206535,0.2750247138 2.975008262,-0.2584129812 0.4721610109,1.086610028 1.9309435241,-0.3840049187 -3.3266657947,0.93762473 -0.0090910882,0.2554989295 2.1573972636,2.2662176382 -3.0999954621,-1.7979297923 -2.476232071,2.3017694486 0.8758012785,0.6725179464 3.437678298,-1.8842352082 -0.0076135821,0.2546433627 -1.9059432416,-1.7982612699 -1.507646357,-0.1119721467 1.9174915062,0.7513328382 -1.2591685532,0.6626709447 -1.546794817,-3.340212929 -0.9155202226,1.0148913683 2.1055928433,-1.9656014814 0.2100592899,1.2043348182 1.6920401518,-2.2947519953 -1.3573656098,-0.8215421346 0.2758984413,3.224634491 -0.1627096798,-2.7357108019 0.8452921254,-0.3904831122 -2.388322709,-0.9171022377 -2.3369738495,0.7396090717 -2.836659515,1.8863448943 -0.4401229291,-0.4449831189 -0.3281415542,0.2322017259 1.0788954533,-2.5975018203 -1.1102366344,2.0474354484 -0.4966950847,0.0100012173 1.4720694305,1.5373058215 -2.4394892324,0.4091074405 -0.2797779411,0.2588762526 2.0791984727,2.3457400013 2.0060504854,0.1272615523 -2.0705385547,-3.0108947589 -0.5195041081,-0.0895362423 -0.6965544671,-2.6175100149 -3.3723178211,-1.1111422528 -0.5355107566,1.2028428004 -2.5292302333,2.2487123484 -1.0938885314,-1.2127355663 0.0349439888,0.0080070028 2.9805905163,0.5353340803 -1.0440523592,-1.2649697944 0.3742454069,2.191351098 -0.7191326278,-3.648839686 -1.2123585261,1.9977181643 0.3562320916,2.1956914262 -0.4354186996,3.2421396847 1.3075717123,2.9016035151 2.3234695912,2.0763893096 0.6517847795,0.9529455482 -1.2247324941,-2.3773625526 -0.0727701093,-1.7418352891 2.3782670708,2.0063601162 0.0605851149,3.2464649488 1.9525062684,2.4633818793 2.7133655062,-1.1252394155 0.3636624298,-0.738134734 1.0054863175,0.0396076525 2.6599447442,-1.2310678841 -0.4786144966,-0.3694636031 -2.2094267683,-2.9008077663 -0.3964958609,0.1756065052 0.97036264,-2.6330831566 -0.5233581789,-0.1511942596 -2.3895360697,-0.9132306836 1.836649765,0.9706827751 -0.3777928785,0.1937535575 -2.1275236361,1.1550364778 0.7872641524,-1.6421913047 -0.4646256609,-1.6416178402 1.5288002266,-1.1263835086 2.9265279066,-2.5562378545 -0.0672181213,0.2795847163 0.354451325,-0.7405931465 0.2586741087,-2.7527848558 1.4242544688,2.8387331732 -0.332343958,2.248926653 -3.2876878561,1.0485957772 0.0127958329,0.2412264069 -2.5002940279,-0.3131848401 -0.4716855599,-3.6998729922 -0.4778969803,-0.3711532799 1.7249870209,1.1909459895 0.0730217065,0.043036665 1.0166918283,3.0326309949 3.2143581569,-2.215567632 -0.6054273974,-1.5797091089 -0.5203797788,-0.0971145629 0.0721816373,0.1723871164 -0.4474036521,-3.7039448403 -0.699561074,-1.5293117235 -0.1483159914,-0.701318896 -0.4852925243,0.0399340264 1.0061922369,0.0457521614 0.0888697108,0.0931579427 -0.4396896166,-0.4457018086 -1.1642882524,3.0955412126 2.2027059649,2.2174369895 2.9920017786,-0.1114228922 0.8578562555,-0.3717084329 0.9080208159,-3.6780956585 -0.9792341127,-1.3265047025 -0.1679019051,-3.739090152 -3.1337452221,1.4019505213 -1.8065538052,1.5596401317 3.6602920008,-1.4572207043 0.0785551405,0.0533603244 0.899846151,-0.3005177953 1.8918239532,0.8297351077 0.522020775,1.0547430951 3.7857770422,-1.1392448808 -3.5001881296,-0.3138936277 -0.599827591,1.1812097217 0.9538463101,-0.1789406711 2.7455026751,-2.7349962953 2.0159768611,-2.0479881767 -1.2613304969,-0.9945523205 -2.3245991006,0.7707542091 0.3324598117,1.1587831294 1.9782135989,-0.1751074284 -0.068414483,3.2526276865 -0.515652813,-0.2432753543 1.427028262,-3.5338283042 0.0948025103,-3.7528224193 0.9238689817,0.5760213879 -0.6439166373,-1.560030409 0.7299104137,2.0712266326 3.9418523169,-0.5380809331 -0.7255760803,-1.5139851404 -0.7109450705,3.2068610082 0.2343595517,1.1964788924 -0.2929038251,-1.6977275165 -1.5080929229,-0.1300041472 2.5427345709,-2.9098652373 1.6801332196,1.2642656975 -2.5029802623,-0.0571329461 -2.043582255,1.2792969607 0.2601357002,-2.7527315441 -0.3144187586,-2.7134846307 -0.7194394945,-1.5176576454 -1.1411171381,-2.424920888 -2.4958416535,-0.3735942364 2.4232729053,1.9455963596 -1.4466012054,-2.2303737433 1.9475033437,-0.3219080431 -0.07966472,0.2827052363 1.9748477485,-0.1943469598 0.7137173099,-0.5425896359 -1.3188327986,-0.897186923 0.0647747122,0.1855698632 -1.1500245108,0.8035399756 0.9216054655,-1.5853972107 0.0564044877,0.0226540809 -1.5547933717,1.7810559957 -2.2942579628,0.8423244326 0.0818725568,0.1490503514 -1.3010598501,1.9493639931 0.8015564125,-1.6367450821 1.8864133543,-3.3354242313 -0.0766861564,0.2820155778 -2.9335957539,1.7472449437 1.3980106682,-3.5440282472 1.3596402881,-1.2966027846 1.7356416908,-0.8400726153 0.2371672573,-3.7525582756 1.8182439211,1.0118318161 -1.500825782,-0.309111758 -1.6538043123,2.8933452561 -2.4890080454,-0.4421991645 -1.3914307965,-3.4159161439 0.3569049426,-0.7399496535 -0.6883077687,-3.6561579024 0.0796327969,0.0557483762 -0.7494340841,-1.4993642211 -0.7042379982,-1.5266028102 1.7341794241,-0.842546276 -0.9430731085,-3.5871879175 -0.9587984413,0.9828129858 -0.3961669257,0.1759462337 2.0045281158,0.242920563 -3.0418616665,-1.9048076018 -1.5008659797,-0.008387474 2.318269274,2.0828253604 2.9131338577,-0.5735585945 -2.436424442,0.4220165342 -0.2123944331,-0.6680580735 0.8274797671,-1.6265122622 0.8687474573,-0.3545853519 -1.4892557529,0.0803479215 -0.6785896569,1.149522673 -2.3998317381,-0.8794697985 -3.487352953,0.1779049075 -0.5904400684,7.2389131294 -4.9731877044,-7.0805343276 -5.500332028,-0.3062936346 -1.4048370471,-5.5349395885 -4.3791247456,-4.9850026158 5.761023267,5.7287851756 8.8816295203,-1.3037108941 -6.0879319428,-2.4456320977 3.8552793355,8.3323539547 -6.4175750635,-4.051815995 -4.5670507263,2.8999507518 5.8166172689,-1.319873779 6.1055430964,-3.2611870412 0.3695611418,-7.7487770995 5.3657761798,-4.3253222665 -4.9054094579,2.3273127912 3.1499596417,6.4495752 7.9063330356,1.3907006993 -6.4588342028,0.5880563886 -3.1222847351,7.7025277927 0.1189186863,5.2450442414 -7.3704017881,5.823775582 8.0378567843,-5.7808350488 7.8906353928,1.4877164974 -6.6925577387,6.5672294649 8.2063642072,3.8627464845 1.7217482513,9.0579193784 4.3310336284,-3.9795554282 -1.0998134215,5.1676355329 -2.0459082338,8.0332568932 -4.5089539979,-8.557616416 -4.8595951601,-8.355777929 6.9679056041,0.847566589 -9.2179022886,-2.4641214261 2.1680549648,8.9534328484 -4.7977769437,4.2134853684 -5.0897850416,1.9271095099 -3.8144660865,-4.1429936044 -6.2744026889,-1.8660346554 -2.358928527,-8.3806726827 4.1275101928,8.194993051 -2.2070855081,7.9937814702 6.913900315,1.2658132091 -7.4542205449,-1.0020222067 7.035835476,-5.4425919499 -7.5608102137,3.7218029048 5.507684043,2.5491269282 8.7439472677,2.2981171 -5.1166181239,-6.973508809 -9.3839206927,1.3292876823 4.9335872634,0.9874738434 5.2616929586,-2.7218759324 5.7161417969,-5.4228544942 -0.3280986794,5.2496911784 3.4242809074,3.8249275901 6.8228148955,1.7335418127 -4.0292361519,3.5719432531 5.960071061,-0.5496624885 -6.2629637435,1.5855082727 -7.1883427583,-4.7068887727 -9.1792809498,2.2918214856 -4.4422239477,3.0766030555 -2.3922582263,7.9441727221 8.8766141076,-1.3337987576 4.1454689824,-4.1625552307 -2.5607478782,-6.177757387 6.8203619614,-1.4231567472 -8.493091927,-0.5369544256 5.7265006679,1.9594914294 2.8219190604,-7.2811298726 6.8236860742,-5.6970735424 -4.5613090557,-3.2460695489 -2.1165406656,6.9831297893 1.8570439957,-6.5353409278 -1.9175533252,7.0357184726 5.0825725136,7.6161710845 1.6743808012,5.9764036478 7.6063232785,-2.321979352 -5.4550662505,-0.8789671931 -1.5844798114,-8.5757743528 -2.7186651144,7.8453922421 5.8651287251,-6.6475240454 -4.4892658071,3.0117795925 2.6057447443,7.7734078681 5.6466187205,-3.9698677204 -3.3444363703,6.5174437354 6.3081853364,3.2030069591 -5.416855454,0.8059975287 9.0674561827,-4.0554900224 -2.0168106573,8.040020173 -6.9004050854,-5.1357476588 -2.0627181358,6.9979478736 -8.2188249648,-4.9338847918 2.1270967607,-5.4062552709 3.0615008489,6.4944627277 5.2481782693,3.0784601175 -6.0669243752,-4.5822849406 6.8421544723,1.6470214321 -5.5022304646,-0.1869476267 5.8403167614,-6.6685574795 2.472101624,4.5414166018 2.4726678395,8.8684656662 -5.1997163783,3.732896487 -5.5807266882,7.5040741705 -2.5173693106,4.6961469713 6.4249994876,6.4805485168 6.9003896818,1.3472962887 -5.6878576732,-7.7951591521 8.9699885442,-0.5921034802 2.5681603796,7.7867205703 4.8401706077,6.5555584169 -7.3201252254,4.1563586374 6.001663041,0.2323094023 5.9418978319,3.8715651927 3.9694553103,-4.3219938533 2.5626546718,4.4872009509 -6.0104230632,2.3166914063 -0.7414951733,-8.7049521806 -6.2638771905,1.5822372123 2.9808739753,6.5340498954 8.658169487,-4.83945254 6.6402096823,4.6334025532 -6.7748612285,3.0567865778 -6.3155249618,1.3851409555 -6.6068325879,-3.7189202873 1.9776525271,-7.5356613998 -7.3331857473,1.4207239654 -6.8309259441,-3.2646317379 -6.495900273,-0.4395132965 6.8712864319,-1.1855277332 -7.4904822086,0.2506783692 5.9206814778,-0.8254935593 -8.361657787,-1.6955835681 6.8280265806,-1.3898365816 2.717398976,-6.2500951868 4.8521830246,-4.8701038194 -7.2571254892,-6.3055753986 6.9928947086,0.5123742632 5.9912646045,-0.201452936 -0.6040639987,-6.7085664784 5.9593907802,-6.5661709023 -1.7898715327,-5.4107978421 2.8954445915,6.574642026 1.9884635397,5.8703891962 3.9116069767,8.3048590208 1.2603902941,5.0564010628 8.0028710179,-3.9552981172 -4.8993614866,6.7613166927 -8.1311210391,2.3204908391 -5.4478436095,0.6125992789 -2.8130958312,5.6631766353 -8.3557694209,1.4072604842 -5.5022832429,-0.1730634587 -5.2649880719,-5.5206947927 -2.9463064887,-7.100379951 5.4328597918,-2.3869951432 -9.4669877784,0.6478109487 -0.9727270888,-9.6853446975 -7.3740212971,-1.5381171453 5.4921426802,-4.1712991866 -2.6792211602,-8.2773715591 -4.41293575,4.5962325298 -6.5657303959,3.4624566459 -3.731674983,-5.5106389597 8.9903089977,0.6060390454 -4.7246851693,-2.9870391712 8.3876017072,3.4298671572 -2.9925900209,7.7509958848 -6.7145431401,-5.3856787413 5.1738392436,3.2092931872 -9.2176421968,2.1436923484 -9.5000630568,-0.0038094961 -4.3305735802,7.1239415869 4.5938129439,-6.3648660312 -9.4780037173,0.506147449 7.004721697,5.8252414128 -1.9525256562,7.0269101119 5.0642097828,-6.0129377621 8.2520079693,-5.4814878763 -3.0932204231,-8.1230824005 9.0419330717,-4.1097591428 8.3553276051,3.5117462984 2.9786790642,-5.0177447796 -4.5436616867,8.1499486856 2.4818821311,-5.2644085446 -8.484269459,-0.7000636976 8.9807857975,-0.4496911934 7.0763009809,5.7348791926 -4.0370027535,-3.9134929494 5.3833916109,-2.4893061266 -5.4429995058,-0.965312329 -8.5032050062,4.0742358633 4.1214290298,3.0043965561 6.4062943592,-4.6235640815 -9.1566425995,-2.6970678425 -6.7823287065,4.9550933429 -3.6145089347,-5.5922539585 0.466329004,-9.7464595139 4.9913083352,0.4940881819 3.7111207007,4.9001216373 -2.8183520987,-7.1552297982 -1.5418718867,5.0676349121 8.9814286389,-0.4401389442 -6.4622053992,0.5583569147 8.1735471943,-5.5939107749 4.166062435,7.0219301858 -5.9867228259,5.8590535167 4.1350495568,-7.7992109283 -1.8569397435,-7.4843722023 -7.1498245345,-4.767595175 -2.732226183,-6.0989341228 -7.8956593263,-3.3148160064 -0.0276985069,-5.7491665015 5.8125677487,7.0540275582 -7.5009841488,-0.056364697 5.6398347395,7.1978503955 -0.1150685546,-6.746304368 -3.2921499457,-4.59301678 1.4854134595,9.1038725118 -5.206873132,6.5367283313 5.9162475674,1.1714604902 9.7569603674,-2.0366101076 -2.2075719075,-8.4247860416 -4.6819782824,-3.057849508 -6.8072892215,2.9879296625 -7.1420345974,2.1332040499 4.2850581851,-5.3537430256 6.9225813343,-0.8906108795 4.2728523944,-8.843484734 -1.2371389074,6.1585206534 -3.0813516606,4.3707563064 4.9922474923,0.4797741042 7.4794057637,5.1770922959 5.7761884248,1.7923678933 4.6623374427,-6.3169499631 0.5070268762,-6.7429091902 2.9352534788,-7.2388440718 -3.3585742793,-5.7580381586 2.3534576733,-8.4723226722 2.3873652155,4.5899130453 -1.3317825016,7.1564180075 1.3321029399,6.0712011639 -4.287190588,-5.068178528 6.9602733811,-0.6020927876 -3.0239510677,-8.1505778275 -5.810198512,6.0277068909 -7.164666462,-2.384798922 -6.7238048779,3.1616483376 -7.9469518116,-3.1828176904 -7.3985360755,1.0798994022 -6.0457991558,2.2294640687 2.7089614712,-6.2535251184 5.0211184246,-4.703765511 -5.2461591292,7.7333280858 -2.884481311,-4.875623664 -0.7349628325,-7.6998571954 -2.2242500903,5.9021592795 -7.3736612131,1.2199839492 -6.3769527911,5.4483183479 7.7636703997,-1.7759621328 -2.181138798,-5.2524924261 -5.439097671,-0.9912842571 3.005837847,-5.0025132151 -6.8398848137,2.9168103719 2.8810314727,-6.1809494527 3.2643734963,-8.1833473691 -5.4533983691,-0.8915190258 -5.5405565792,3.2350964659 8.2942819728,3.6605942682 -8.0560898115,-2.8776073976 -4.0416820823,7.2845057345 -7.8418605373,3.1199890317 4.6087873918,-6.3544877006 4.0862513279,5.8712223803 0.7575783542,7.1935185007 5.7797131737,4.1212670766 -5.6873639541,6.1394382523 -0.213115352,-7.7426879945 9.5507696347,-2.8062766957 -6.7394026277,-5.3533253302 6.7397080968,-1.7366695662 6.1492379341,6.7518988646 -5.2664561861,7.7200202482 4.345910783,6.9071512007 -7.3617536992,-1.6023023932 -1.5784976642,-7.554055045 -4.8877605435,2.3615599246 -3.5596763418,-4.3764394038 -8.3658049357,-1.6728007921 -2.7657388744,-7.1769931161 4.7611077089,6.6155910062 4.2236681348,-8.8661088889 8.0015765924,0.1689037642 -0.1008976394,6.2517542482 -0.4923912884,7.2440863845 5.0020582653,0.0903311841 6.98223377,0.6824574348 2.5309018918,7.7997152901 -8.4939425131,0.1989668194 6.5068048787,-4.4870257461 -1.6051958893,9.1377576077 -1.6486867221,5.0374045892 6.7030352815,6.1820388485 6.9241366188,1.1997301117 -6.2013212935,5.6400844242 -3.9591362881,6.1814709737 -6.2972277459,5.5368498339 5.8353535659,-5.2995157851 -6.3341225502,-1.628211523 -6.4652570673,-0.8491688578 -5.5779783224,6.2352773876 -1.735677756,-6.4799187318 -7.3416975736,-1.701296991 -6.4100550404,-1.2491327839 2.3081286777,-6.4016856601 -8.5003924343,-0.0225584996 -0.8155904542,6.217493816 -3.0306877688,5.5567163863 -4.2378937177,-3.6817773705 -5.3274105533,-6.8075634019 -2.7907386297,-4.9334328883 4.1472971763,5.8261898015 5.8432379034,-7.9309486138 3.6593896841,-4.5736187482 6.3663986612,3.0785890036 -5.2776992212,-3.9673809947 7.7402905924,-6.1611200113 5.2089518436,-8.3467806864 -6.3547901998,1.2151654748 -8.344218712,1.4675621293 -9.355573949,-1.8178293161 6.4319536629,2.9303825157 -6.1690794103,1.8918369093 -6.428714948,-1.1328174365 -5.277356614,-8.0886564762 -6.4821379884,5.3275286729 2.4065739351,-5.2967965254 4.8603777825,1.3446735055 4.1700589814,4.4943527238 2.8636297087,8.7425776371 7.6034467025,2.6543841355 -6.0834695385,2.1323118748 -4.9271649768,2.2842550989 -0.5739125245,-6.7119323869 2.7937438862,-5.1165001625 3.1523355096,7.5558781184 -8.4862909141,0.3489029172 -3.535143919,-4.3974483457 4.9255230811,7.7225775304 1.7659028887,4.8854379976 -8.3816570954,1.2624946488 4.5172752771,-7.59333484 4.043240337,-8.9463189496 -7.3670607239,-4.4094263635 4.6952091891,-5.0148432008 5.9854309536,0.6063872202 5.5089750828,-5.6250065822 -6.2309426865,5.6085698614 -1.6880359904,5.0256555012 -2.4343334329,4.7366205626 3.2738106051,8.5892855495 4.0781640967,3.0668085891 -8.3343911715,-4.7281195869 -0.596129073,-5.7023467057 -6.3112332182,1.4025352678 4.5354286265,5.51508111 -3.1951630459,5.4692214103 4.5888607367,2.155491767 -8.4759445223,-4.4585475941 -0.7441234598,-6.6911226826 4.5996325569,2.1303688121 7.941796637,1.1358351277 -4.9744521773,5.4352237399 5.6840272661,-1.7660132993 -7.2759887943,-1.9870147882 -3.3957374461,8.66800797 3.0207771439,5.3782840991 -3.5483044173,8.6096987934 1.3515934316,5.0297855319 8.8669423908,-1.3901228996 -2.5993392551,-8.3044194417 7.199535576,-3.3270261453 -1.0729634813,6.1849593511 4.2041322246,4.4607904993 3.1058331777,-8.2415627844 -4.3529672589,5.9234183019 0.7030106379,6.1922289812 4.0602320886,4.5990652007 -2.3223671501,7.9634363227 -4.3982459622,-3.4767565313 -1.6721331986,-6.4982257827 -0.9197331969,8.2164252239 3.2547765328,-4.8537526254 -6.2093674658,-5.9819036726 4.5983735239,4.0301984952 8.2227012436,-3.4984135526 -5.7697658529,-3.1628569372 -1.4335658951,8.1526223978 -9.4997005335,-0.3352534504 2.0540748816,-7.516856069 4.3158047004,-7.7048716898 0.3410290072,-6.749424693 7.0129575275,-5.470979019 7.5820478511,5.019613438 -4.0517630592,3.5478491319 -8.2312350829,1.9656838538 8.7615895875,-1.9033855845 7.0227527144,-3.668904624 -6.0755476746,-7.4864007498 0.4389609922,-9.7472834546 1.5699769747,6.0075614107 -4.7114823198,2.6748292078 -5.6348762474,-3.4105404338 -5.7133619094,-3.269518832 -5.0192128891,-4.3054821964 -0.5065154442,9.2448458951 -8.4912440156,0.2580612395 6.2488916218,3.3236404285 -4.2770071673,3.2894250802 -5.7276947811,-5.0173730569 -7.18634439,1.9907649026 -4.2048953915,6.0246968226 -0.2802887734,-6.737555196 4.9665441713,-3.2012894807 2.6645651936,4.4231166889 -5.3945236995,-8.0081663493 3.5877793014,-5.8210688986 -8.4134405163,-1.380064727 -3.6676169027,7.4709288767 4.3103992781,2.705650608 8.99764401,0.4193969356 -0.3496603277,7.2492428747 -2.7134288365,-6.1078624283 2.9607922378,8.7082692697 2.0525201264,7.9490692113 -1.8027560643,9.1043455961 -2.5359934244,6.8521851992 -0.9210692484,5.1969266328 -4.7368687648,2.6325901943 5.0008882072,0.2872729811 -8.9584276346,3.0034979568 8.9239266457,-4.3502801083 -1.1762828691,5.1531705585 -4.7760211749,-2.8986462636 -5.346229626,-1.4618158821 -2.4151619655,-6.2401533518 -6.1512736608,4.1243911221 -3.8186216888,-4.1389405561 4.5590268316,6.7623180266 -5.4187446522,0.795360764 1.011552282,-7.7046566284 5.4183272834,2.7465083086 -6.0610912313,-4.5903426252 -2.4018808511,4.7519576577 8.457599283,-2.9188676859 -0.119687931,-8.747003545 2.2459915845,4.6662028297 -6.8536060637,-3.2140002917 5.7550192565,1.8657407158 -2.1675934424,4.8549192233 -7.5704230294,3.7031137773 -5.9741658178,4.3660907868 -7.4543976537,-1.000452583 -3.526978916,-4.4043868501 -5.196810543,-1.9699832525 4.7633221753,-7.4475695621 0.9556941182,-9.7187241934 -5.4797451366,0.3383979269 -6.5918184306,-3.7468141004 1.8126283381,-7.5734932129 -1.2776451825,6.1513091466 -2.7000309097,5.7144903538 4.1093541416,-5.4834295249 -5.3800653322,0.9930695535 5.7632127103,-7.9873397602 8.0003336838,0.3004985458 -2.8990046271,7.7844508264 7.8014271682,1.9389605717 -4.9957331481,-5.7757123312 -7.369890468,1.2399545604 0.6533398076,6.1989531848 -4.6322235036,-4.7388962267 2.5125283102,4.5175227621 1.1445351818,5.0876312615 -8.9429911216,3.0467552954 -5.3799273478,-3.820152329 1.2660703671,-9.6883062096 7.2038829127,5.5677671561 -5.4962383424,-0.4173721983 0.8865075176,-6.7126123309 4.9624215157,0.7917079809 3.6249959336,3.6227794004 3.6593145816,4.9410561297 -5.7114183989,-5.0366912566 -2.8555129728,-8.2145963685 9.6825938949,-2.3436871112 -6.392673819,3.75794594 7.846959761,-6.0291563737 -1.0202506775,6.1925048825 3.1865661251,6.4305468774 -2.0313323362,8.0366586526 -9.48776291,0.3484139917 8.7332907599,-2.0198551827 7.9743868548,0.8184533377 -5.4859403553,-0.5832447042 -1.41199878,6.1254208459 -2.526742033,5.7880253921 0.4671215487,7.2246383364 1.1159572251,-5.6721819298 3.1768140711,-7.1419369396 3.6485832186,4.9494231372 5.4289432251,2.7240114513 -7.4272339581,0.8946376534 -2.6648631231,4.6197724378 1.3545813837,-5.6268471759 8.8660625134,-1.3951425181 -5.9129390079,4.4450818884 5.4300445198,-8.2097100343 6.8318638178,-1.3728677211 -6.0615144542,2.1894988798 3.3326541919,6.3519095092 5.3681624339,2.8497242016 -2.1109531838,5.9404205533 -4.2955158626,-7.5281924809 -4.4204478091,-6.2459910519 4.7671306158,3.817810742 -5.2455064853,6.506960009 -5.4803150201,4.9473940354 -3.5824627503,-4.3567059803 -4.6362987438,-3.1310630711 -8.4254424604,0.9749640621 5.0011089882,0.2783461567 -2.1368012673,-6.3483577687 4.4583469814,-7.6266706193 -2.1484424376,6.974137152 -5.1016607148,1.8980781758 -2.5346735358,6.8526411209 -3.9925168283,4.9481953129 -1.0994455021,-5.6125408876 6.9325566709,1.1419738343 5.0669140199,5.0078653893 1.2838067275,8.1242471643 2.1004366638,7.9355441344 -6.947031606,2.6677423988 -4.8049901268,-4.5548173885 -6.753903036,-5.3343061538 -3.2269947468,-8.0680198701 0.2665430733,-5.7511976225 1.6792842629,-8.6185504868 -2.6827987551,5.7220771063 -0.0744839791,-6.7478322136 -5.4374202407,4.9925153199 5.4318714419,-2.3890891916 2.1857243721,8.9488110027 6.4948602165,-6.0564901447 -3.7399929986,-7.8319305121 6.6176130412,-7.3188024338 0.0011581051,8.2499775771 -6.3736094194,-1.4456271238 6.841974628,1.6478502688 -7.5667538995,-4.0407175243 -8.7714842814,3.4885559942 -5.6192854132,-3.437613924 -3.3759620659,-9.0865047982 2.9131120657,5.4409981587 -7.4059918811,1.0346066457 -9.1072816275,-2.8692706778 9.8210165011,-1.7301290633 1.1772393938,7.1273784088 -6.344984181,-1.5804318385 2.1891050894,-9.5379132897 -2.2052993247,-6.3230517444 -6.6173675387,-6.9954384506 -7.9545265348,-5.3645740681 -4.9556563051,-8.2970066385 -4.9694871353,-2.526507908 5.8527805862,-1.1700837927 -6.0384470495,7.1537592027 -6.7416914863,-6.8716423259 -1.7029015301,-5.4414929647 4.0996494066,-4.2053193583 -9.4695346103,0.6173747127 -8.9889198104,2.9160294283 4.4972110253,-5.1851348474 0.657254419,8.2110244071 7.9410546178,-0.8228053174 -2.3213271568,-6.2782311768 6.916429297,5.9335523165 -5.8685918826,-2.9635620262 -4.9552523278,2.2272136272 4.0578575711,-5.5198292349 5.6397114685,2.2163846 -0.2593944135,-6.7388863224 7.9410013193,1.1422882954 -4.4296833528,8.2099594531 -4.0316677073,-3.9193092584 -6.1711439238,-2.2093662442 4.8799198754,5.1984309001 -8.0882173866,2.4566468146 3.0184634937,6.5157494809 mlpack-2.2.5/src/mlpack/tests/data/vc2.csv000066400000000000000000000161231315013601400203040ustar00rootroot0000000000000063.03,22.55,39.61,40.48,98.67,-0.25 39.06,10.06,25.02,29,114.41,4.56 68.83,22.22,50.09,46.61,105.99,-3.53 69.3,24.65,44.31,44.64,101.87,11.21 49.71,9.65,28.32,40.06,108.17,7.92 40.25,13.92,25.12,26.33,130.33,2.23 53.43,15.86,37.17,37.57,120.57,5.99 45.37,10.76,29.04,34.61,117.27,-10.68 43.79,13.53,42.69,30.26,125,13.29 36.69,5.01,41.95,31.68,84.24,0.66 49.71,13.04,31.33,36.67,108.65,-7.83 31.23,17.72,15.5,13.52,120.06,0.5 48.92,19.96,40.26,28.95,119.32,8.03 53.57,20.46,33.1,33.11,110.97,7.04 57.3,24.19,47,33.11,116.81,5.77 44.32,12.54,36.1,31.78,124.12,5.42 63.83,20.36,54.55,43.47,112.31,-0.62 31.28,3.14,32.56,28.13,129.01,3.62 38.7,13.44,31,25.25,123.16,1.43 41.73,12.25,30.12,29.48,116.59,-1.24 43.92,14.18,37.83,29.74,134.46,6.45 54.92,21.06,42.2,33.86,125.21,2.43 63.07,24.41,54,38.66,106.42,15.78 45.54,13.07,30.3,32.47,117.98,-4.99 36.13,22.76,29,13.37,115.58,-3.24 54.12,26.65,35.33,27.47,121.45,1.57 26.15,10.76,14,15.39,125.2,-10.09 43.58,16.51,47,27.07,109.27,8.99 44.55,21.93,26.79,22.62,111.07,2.65 66.88,24.89,49.28,41.99,113.48,-2.01 50.82,15.4,42.53,35.42,112.19,10.87 46.39,11.08,32.14,35.31,98.77,6.39 44.94,17.44,27.78,27.49,117.98,5.57 38.66,12.99,40,25.68,124.91,2.7 59.6,32,46.56,27.6,119.33,1.47 31.48,7.83,24.28,23.66,113.83,4.39 32.09,6.99,36,25.1,132.26,6.41 35.7,19.44,20.7,16.26,137.54,-0.26 55.84,28.85,47.69,27,123.31,2.81 52.42,19.01,35.87,33.41,116.56,1.69 74.38,32.05,78.77,42.32,143.56,56.13 89.68,32.7,83.13,56.98,129.96,92.03 44.53,9.43,52,35.1,134.71,29.11 77.69,21.38,64.43,56.31,114.82,26.93 76.15,21.94,82.96,54.21,123.93,10.43 83.93,41.29,62,42.65,115.01,26.59 78.49,22.18,60,56.31,118.53,27.38 75.65,19.34,64.15,56.31,95.9,69.55 72.08,18.95,51,53.13,114.21,1.01 58.6,-0.26,51.5,58.86,102.04,28.06 72.56,17.39,52,55.18,119.19,32.11 86.9,32.93,47.79,53.97,135.08,101.72 84.97,33.02,60.86,51.95,125.66,74.33 55.51,20.1,44,35.42,122.65,34.55 72.22,23.08,91,49.14,137.74,56.8 70.22,39.82,68.12,30.4,148.53,145.38 86.75,36.04,69.22,50.71,139.41,110.86 58.78,7.67,53.34,51.12,98.5,51.58 67.41,17.44,60.14,49.97,111.12,33.16 47.74,12.09,39,35.66,117.51,21.68 77.11,30.47,69.48,46.64,112.15,70.76 74.01,21.12,57.38,52.88,120.21,74.56 88.62,29.09,47.56,59.53,121.76,51.81 81.1,24.79,77.89,56.31,151.84,65.21 76.33,42.4,57.2,33.93,124.27,50.13 45.44,9.91,45,35.54,163.07,20.32 59.79,17.88,59.21,41.91,119.32,22.12 44.91,10.22,44.63,34.7,130.08,37.36 56.61,16.8,42,39.81,127.29,24.02 71.19,23.9,43.7,47.29,119.86,27.28 81.66,28.75,58.23,52.91,114.77,30.61 70.95,20.16,62.86,50.79,116.18,32.52 85.35,15.84,71.67,69.51,124.42,76.02 58.1,14.84,79.65,43.26,113.59,50.24 94.17,15.38,67.71,78.79,114.89,53.26 57.52,33.65,50.91,23.88,140.98,148.75 96.66,19.46,90.21,77.2,120.67,64.08 74.72,19.76,82.74,54.96,109.36,33.31 77.66,22.43,93.89,55.22,123.06,61.21 58.52,13.92,41.47,44.6,115.51,30.39 84.59,30.36,65.48,54.22,108.01,25.12 79.94,18.77,63.31,61.16,114.79,38.54 70.4,13.47,61.2,56.93,102.34,25.54 49.78,6.47,53,43.32,110.86,25.34 77.41,29.4,63.23,48.01,118.45,93.56 65.01,27.6,50.95,37.41,116.58,7.02 65.01,9.84,57.74,55.18,94.74,49.7 78.43,33.43,76.28,45,138.55,77.16 63.17,6.33,63,56.84,110.64,42.61 68.61,15.08,63.01,53.53,123.43,39.5 63.9,13.71,62.12,50.19,114.13,41.42 85,29.61,83.35,55.39,126.91,71.32 42.02,-6.55,67.9,48.58,111.59,27.34 69.76,19.28,48.5,50.48,96.49,51.17 80.99,36.84,86.96,44.14,141.09,85.87 129.83,8.4,48.38,121.43,107.69,418.54 70.48,12.49,62.42,57.99,114.19,56.9 86.04,38.75,47.87,47.29,122.09,61.99 65.54,24.16,45.78,41.38,136.44,16.38 60.75,15.75,43.2,45,113.05,31.69 54.74,12.1,41,42.65,117.64,40.38 83.88,23.08,87.14,60.8,124.65,80.56 80.07,48.07,52.4,32.01,110.71,67.73 65.67,10.54,56.49,55.12,109.16,53.93 74.72,14.32,32.5,60.4,107.18,37.02 48.06,5.69,57.06,42.37,95.44,32.84 70.68,21.7,59.18,48.97,103.01,27.81 80.43,17,66.54,63.43,116.44,57.78 90.51,28.27,69.81,62.24,100.89,58.82 77.24,16.74,49.78,60.5,110.69,39.79 50.07,9.12,32.17,40.95,99.71,26.77 69.78,13.78,58,56,118.93,17.91 69.63,21.12,52.77,48.5,116.8,54.82 81.75,20.12,70.56,61.63,119.43,55.51 52.2,17.21,78.09,34.99,136.97,54.94 77.12,30.35,77.48,46.77,110.61,82.09 88.02,39.84,81.77,48.18,116.6,56.77 83.4,34.31,78.42,49.09,110.47,49.67 72.05,24.7,79.87,47.35,107.17,56.43 85.1,21.07,91.73,64.03,109.06,38.03 69.56,15.4,74.44,54.16,105.07,29.7 89.5,48.9,72,40.6,134.63,118.35 85.29,18.28,100.74,67.01,110.66,58.88 60.63,20.6,64.54,40.03,117.23,104.86 60.04,14.31,58.04,45.73,105.13,30.41 85.64,42.69,78.75,42.95,105.14,42.89 85.58,30.46,78.23,55.12,114.87,68.38 55.08,-3.76,56,58.84,109.92,31.77 65.76,9.83,50.82,55.92,104.39,39.31 79.25,23.94,40.8,55.3,98.62,36.71 81.11,20.69,60.69,60.42,94.02,40.51 48.03,3.97,58.34,44.06,125.35,35 63.4,14.12,48.14,49.29,111.92,31.78 57.29,15.15,64,42.14,116.74,30.34 41.19,5.79,42.87,35.39,103.35,27.66 66.8,14.55,72.08,52.25,82.46,41.69 79.48,26.73,70.65,52.74,118.59,61.7 44.22,1.51,46.11,42.71,108.63,42.81 57.04,0.35,49.2,56.69,103.05,52.17 64.27,12.51,68.7,51.77,95.25,39.41 38.51,16.96,35.11,21.54,127.63,7.99 54.92,18.97,51.6,35.95,125.85,2 44.36,8.95,46.9,35.42,129.22,4.99 48.32,17.45,48,30.87,128.98,-0.91 45.7,10.66,42.58,35.04,130.18,-3.39 30.74,13.35,35.9,17.39,142.41,-2.01 50.91,6.68,30.9,44.24,118.15,-1.06 38.13,6.56,50.45,31.57,132.11,6.34 51.62,15.97,35,35.66,129.39,1.01 64.31,26.33,50.96,37.98,106.18,3.12 44.49,21.79,31.47,22.7,113.78,-0.28 54.95,5.87,53,49.09,126.97,-0.63 56.1,13.11,62.64,43,116.23,31.17 69.4,18.9,75.97,50.5,103.58,-0.44 89.83,22.64,90.56,67.2,100.5,3.04 59.73,7.72,55.34,52,125.17,3.24 63.96,16.06,63.12,47.9,142.36,6.3 61.54,19.68,52.89,41.86,118.69,4.82 38.05,8.3,26.24,29.74,123.8,3.89 43.44,10.1,36.03,33.34,137.44,-3.11 65.61,23.14,62.58,42.47,124.13,-4.08 53.91,12.94,39,40.97,118.19,5.07 43.12,13.82,40.35,29.3,128.52,0.97 40.68,9.15,31.02,31.53,139.12,-2.51 37.73,9.39,42,28.35,135.74,13.68 63.93,19.97,40.18,43.96,113.07,-11.06 61.82,13.6,64,48.22,121.78,1.3 62.14,13.96,58,48.18,133.28,4.96 69,13.29,55.57,55.71,126.61,10.83 56.45,19.44,43.58,37,139.19,-1.86 41.65,8.84,36.03,32.81,116.56,-6.05 51.53,13.52,35,38.01,126.72,13.93 39.09,5.54,26.93,33.55,131.58,-0.76 34.65,7.51,43,27.14,123.99,-4.08 63.03,27.34,51.61,35.69,114.51,7.44 47.81,10.69,54,37.12,125.39,-0.4 46.64,15.85,40,30.78,119.38,9.06 49.83,16.74,28,33.09,121.44,1.91 47.32,8.57,35.56,38.75,120.58,1.63 50.75,20.24,37,30.52,122.34,2.29 36.16,-0.81,33.63,36.97,135.94,-2.09 40.75,1.84,50,38.91,139.25,0.67 42.92,-5.85,58,48.76,121.61,-3.36 63.79,21.35,66,42.45,119.55,12.38 72.96,19.58,61.01,53.38,111.23,0.81 67.54,14.66,58,52.88,123.63,25.97 54.75,9.75,48,45,123.04,8.24 50.16,-2.97,42,53.13,131.8,-8.29 40.35,10.19,37.97,30.15,128.01,0.46 63.62,16.93,49.35,46.68,117.09,-0.36 54.14,11.94,43,42.21,122.21,0.15 74.98,14.92,53.73,60.05,105.65,1.59 42.52,14.38,25.32,28.14,128.91,0.76 33.79,3.68,25.5,30.11,128.33,-1.78 54.5,6.82,47,47.68,111.79,-4.41 48.17,9.59,39.71,38.58,135.62,5.36 46.37,10.22,42.7,36.16,121.25,-0.54 52.86,9.41,46.99,43.45,123.09,1.86 57.15,16.49,42.84,40.66,113.81,5.02 37.14,16.48,24,20.66,125.01,7.37 51.31,8.88,57,42.44,126.47,-2.14 42.52,16.54,42,25.97,120.63,7.88 39.36,7.01,37,32.35,117.82,1.9 35.88,1.11,43.46,34.77,126.92,-1.63 43.19,9.98,28.94,33.22,123.47,1.74 67.29,16.72,51,50.57,137.59,4.96 51.33,13.63,33.26,37.69,131.31,1.79mlpack-2.2.5/src/mlpack/tests/data/vc2_labels.txt000066400000000000000000000006351315013601400216530ustar00rootroot000000000000000 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2mlpack-2.2.5/src/mlpack/tests/data/vc2_test.csv000066400000000000000000000044641315013601400213500ustar00rootroot0000000000000074.43,41.56,27.7,32.88,107.95,5 50.21,29.76,36.1,20.45,128.29,5.74 30.15,11.92,34,18.23,112.68,11.46 41.17,17.32,33.47,23.85,116.38,-9.57 47.66,13.28,36.68,34.38,98.25,6.27 43.35,7.47,28.07,35.88,112.78,5.75 46.86,15.35,38,31.5,116.25,1.66 43.2,19.66,35,23.54,124.85,-2.92 48.11,14.93,35.56,33.18,124.06,7.95 92.03,35.39,77.42,56.63,115.72,58.06 67.03,13.28,66.15,53.75,100.72,33.99 80.82,19.24,61.64,61.58,89.47,44.17 80.65,26.34,60.9,54.31,120.1,52.47 68.72,49.43,68.06,19.29,125.02,54.69 37.9,4.48,24.71,33.42,157.85,33.61 64.62,15.23,67.63,49.4,90.3,31.33 75.44,31.54,89.6,43.9,106.83,54.97 71,37.52,84.54,33.49,125.16,67.77 81.06,20.8,91.78,60.26,125.43,38.18 91.47,24.51,84.62,66.96,117.31,52.62 81.08,21.26,78.77,59.83,90.07,49.16 60.42,5.27,59.81,55.15,109.03,30.27 85.68,38.65,82.68,47.03,120.84,61.96 82.41,29.28,77.05,53.13,117.04,62.77 43.72,9.81,52,33.91,88.43,40.88 86.47,40.3,61.14,46.17,97.4,55.75 74.47,33.28,66.94,41.19,146.47,124.98 70.25,10.34,76.37,59.91,119.24,32.67 72.64,18.93,68,53.71,116.96,25.38 71.24,5.27,86,65.97,110.7,38.26 63.77,12.76,65.36,51.01,89.82,56 58.83,37.58,125.74,21.25,135.63,117.31 74.85,13.91,62.69,60.95,115.21,33.17 75.3,16.67,61.3,58.63,118.88,31.58 63.36,20.02,67.5,43.34,131,37.56 67.51,33.28,96.28,34.24,145.6,88.3 76.31,41.93,93.28,34.38,132.27,101.22 73.64,9.71,63,63.92,98.73,26.98 56.54,14.38,44.99,42.16,101.72,25.77 80.11,33.94,85.1,46.17,125.59,100.29 95.48,46.55,59,48.93,96.68,77.28 74.09,18.82,76.03,55.27,128.41,73.39 87.68,20.37,93.82,67.31,120.94,76.73 48.26,16.42,36.33,31.84,94.88,28.34 65.76,13.21,44,52.55,129.39,-1.98 40.41,-1.33,30.98,41.74,119.34,-6.17 48.8,18.02,52,30.78,139.15,10.44 50.09,13.43,34.46,36.66,119.13,3.09 64.26,14.5,43.9,49.76,115.39,5.95 53.68,13.45,41.58,40.24,113.91,2.74 49,13.11,51.87,35.88,126.4,0.54 59.17,14.56,43.2,44.6,121.04,2.83 67.8,16.55,43.26,51.25,119.69,4.87 61.73,17.11,46.9,44.62,120.92,3.09 33.04,-0.32,19.07,33.37,120.39,9.35 74.57,15.72,58.62,58.84,105.42,0.6 44.43,14.17,32.24,30.26,131.72,-3.6 36.42,13.88,20.24,22.54,126.08,0.18 51.08,14.21,35.95,36.87,115.8,6.91 34.76,2.63,29.5,32.12,127.14,-0.46 48.9,5.59,55.5,43.32,137.11,19.85 46.24,10.06,37,36.17,128.06,-5.1 46.43,6.62,48.1,39.81,130.35,2.45 39.66,16.21,36.67,23.45,131.92,-4.97 45.58,18.76,33.77,26.82,116.8,3.13 66.51,20.9,31.73,45.61,128.9,1.52 82.91,29.89,58.25,53.01,110.71,6.08 mlpack-2.2.5/src/mlpack/tests/data/vc2_test_labels.txt000066400000000000000000000002051315013601400227030ustar00rootroot000000000000000 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2mlpack-2.2.5/src/mlpack/tests/dbscan_test.cpp000066400000000000000000000170651315013601400211670ustar00rootroot00000000000000/** * @file dbscan_test.cpp * @author Ryan Curtin * * Test the DBSCAN implementation. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include #include #include #include "test_tools.hpp" using namespace mlpack; using namespace mlpack::dbscan; using namespace mlpack::distribution; BOOST_AUTO_TEST_SUITE(DBSCANTest); BOOST_AUTO_TEST_CASE(OneClusterTest) { // Make sure that if we have points in the unit box, and if we set epsilon // large enough, all points end up as in one cluster. arma::mat points(10, 200, arma::fill::randu); DBSCAN<> d(2.0, 2); arma::Row assignments; const size_t clusters = d.Cluster(points, assignments); BOOST_REQUIRE_EQUAL(clusters, 1); BOOST_REQUIRE_EQUAL(assignments.n_elem, points.n_cols); for (size_t i = 0; i < assignments.n_elem; ++i) BOOST_REQUIRE_EQUAL(assignments[i], 0); } /** * When epsilon is small enough, every point returned should be noise. */ BOOST_AUTO_TEST_CASE(TinyEpsilonTest) { arma::mat points(10, 200, arma::fill::randu); DBSCAN<> d(1e-50, 2); arma::Row assignments; const size_t clusters = d.Cluster(points, assignments); BOOST_REQUIRE_EQUAL(clusters, 0); BOOST_REQUIRE_EQUAL(assignments.n_elem, points.n_cols); for (size_t i = 0; i < assignments.n_elem; ++i) BOOST_REQUIRE_EQUAL(assignments[i], SIZE_MAX); } /** * Check that outliers are properly labeled as noise. */ BOOST_AUTO_TEST_CASE(OutlierTest) { arma::mat points(2, 200, arma::fill::randu); // Add 3 outliers. points.col(15) = arma::vec("10.3 1.6"); points.col(45) = arma::vec("-100 0.0"); points.col(101) = arma::vec("1.5 1.5"); DBSCAN<> d(0.1, 3); arma::Row assignments; const size_t clusters = d.Cluster(points, assignments); BOOST_REQUIRE_GT(clusters, 0); BOOST_REQUIRE_EQUAL(assignments.n_elem, points.n_cols); BOOST_REQUIRE_EQUAL(assignments[15], SIZE_MAX); BOOST_REQUIRE_EQUAL(assignments[45], SIZE_MAX); BOOST_REQUIRE_EQUAL(assignments[101], SIZE_MAX); } /** * Check that the Gaussian clusters are correctly found. */ BOOST_AUTO_TEST_CASE(GaussiansTest) { arma::mat points(3, 300); GaussianDistribution g1(3), g2(3), g3(3); g1.Mean() = arma::vec("0.0 0.0 0.0"); g2.Mean() = arma::vec("6.0 6.0 8.0"); g3.Mean() = arma::vec("-6.0 1.0 -7.0"); for (size_t i = 0; i < 100; ++i) points.col(i) = g1.Random(); for (size_t i = 100; i < 200; ++i) points.col(i) = g2.Random(); for (size_t i = 200; i < 300; ++i) points.col(i) = g3.Random(); DBSCAN<> d(2.0, 3); arma::Row assignments; arma::mat centroids; const size_t clusters = d.Cluster(points, assignments, centroids); BOOST_REQUIRE_EQUAL(clusters, 3); // Our centroids should be close to one of our Gaussians. arma::Row matches(3); matches.fill(3); for (size_t j = 0; j < 3; ++j) { if (arma::norm(g1.Mean() - centroids.col(j)) < 3.0) matches(0) = j; else if (arma::norm(g2.Mean() - centroids.col(j)) < 3.0) matches(1) = j; else if (arma::norm(g3.Mean() - centroids.col(j)) < 3.0) matches(2) = j; } BOOST_REQUIRE_NE(matches(0), matches(1)); BOOST_REQUIRE_NE(matches(1), matches(2)); BOOST_REQUIRE_NE(matches(2), matches(0)); BOOST_REQUIRE_NE(matches(0), 3); BOOST_REQUIRE_NE(matches(1), 3); BOOST_REQUIRE_NE(matches(2), 3); for (size_t i = 0; i < 100; ++i) { // Each point should either be noise or in cluster matches(0). BOOST_REQUIRE_NE(assignments(i), matches(1)); BOOST_REQUIRE_NE(assignments(i), matches(2)); } for (size_t i = 100; i < 200; ++i) { BOOST_REQUIRE_NE(assignments(i), matches(0)); BOOST_REQUIRE_NE(assignments(i), matches(2)); } for (size_t i = 200; i < 300; ++i) { BOOST_REQUIRE_NE(assignments(i), matches(0)); BOOST_REQUIRE_NE(assignments(i), matches(1)); } } BOOST_AUTO_TEST_CASE(OneClusterSingleModeTest) { // Make sure that if we have points in the unit box, and if we set epsilon // large enough, all points end up as in one cluster. arma::mat points(10, 200, arma::fill::randu); DBSCAN<> d(2.0, 2, false); arma::Row assignments; const size_t clusters = d.Cluster(points, assignments); BOOST_REQUIRE_EQUAL(clusters, 1); BOOST_REQUIRE_EQUAL(assignments.n_elem, points.n_cols); for (size_t i = 0; i < assignments.n_elem; ++i) BOOST_REQUIRE_EQUAL(assignments[i], 0); } /** * When epsilon is small enough, every point returned should be noise. */ BOOST_AUTO_TEST_CASE(TinyEpsilonSingleModeTest) { arma::mat points(10, 200, arma::fill::randu); DBSCAN<> d(1e-50, 2, false); arma::Row assignments; const size_t clusters = d.Cluster(points, assignments); BOOST_REQUIRE_EQUAL(clusters, 0); BOOST_REQUIRE_EQUAL(assignments.n_elem, points.n_cols); for (size_t i = 0; i < assignments.n_elem; ++i) BOOST_REQUIRE_EQUAL(assignments[i], SIZE_MAX); } /** * Check that outliers are properly labeled as noise. */ BOOST_AUTO_TEST_CASE(OutlierSingleModeTest) { arma::mat points(2, 200, arma::fill::randu); // Add 3 outliers. points.col(15) = arma::vec("10.3 1.6"); points.col(45) = arma::vec("-100 0.0"); points.col(101) = arma::vec("1.5 1.5"); DBSCAN<> d(0.1, 3, false); arma::Row assignments; const size_t clusters = d.Cluster(points, assignments); BOOST_REQUIRE_GT(clusters, 0); BOOST_REQUIRE_EQUAL(assignments.n_elem, points.n_cols); BOOST_REQUIRE_EQUAL(assignments[15], SIZE_MAX); BOOST_REQUIRE_EQUAL(assignments[45], SIZE_MAX); BOOST_REQUIRE_EQUAL(assignments[101], SIZE_MAX); } /** * Check that the Gaussian clusters are correctly found. */ BOOST_AUTO_TEST_CASE(GaussiansSingleModeTest) { arma::mat points(3, 300); GaussianDistribution g1(3), g2(3), g3(3); g1.Mean() = arma::vec("0.0 0.0 0.0"); g2.Mean() = arma::vec("6.0 6.0 8.0"); g3.Mean() = arma::vec("-6.0 1.0 -7.0"); for (size_t i = 0; i < 100; ++i) points.col(i) = g1.Random(); for (size_t i = 100; i < 200; ++i) points.col(i) = g2.Random(); for (size_t i = 200; i < 300; ++i) points.col(i) = g3.Random(); DBSCAN<> d(2.0, 3); arma::Row assignments; arma::mat centroids; const size_t clusters = d.Cluster(points, assignments, centroids); BOOST_REQUIRE_EQUAL(clusters, 3); // Our centroids should be close to one of our Gaussians. arma::Row matches(3); matches.fill(3); for (size_t j = 0; j < 3; ++j) { if (arma::norm(g1.Mean() - centroids.col(j)) < 3.0) matches(0) = j; else if (arma::norm(g2.Mean() - centroids.col(j)) < 3.0) matches(1) = j; else if (arma::norm(g3.Mean() - centroids.col(j)) < 3.0) matches(2) = j; } BOOST_REQUIRE_NE(matches(0), matches(1)); BOOST_REQUIRE_NE(matches(1), matches(2)); BOOST_REQUIRE_NE(matches(2), matches(0)); BOOST_REQUIRE_NE(matches(0), 3); BOOST_REQUIRE_NE(matches(1), 3); BOOST_REQUIRE_NE(matches(2), 3); for (size_t i = 0; i < 100; ++i) { // Each point should either be noise or in cluster matches(0). BOOST_REQUIRE_NE(assignments(i), matches(1)); BOOST_REQUIRE_NE(assignments(i), matches(2)); } for (size_t i = 100; i < 200; ++i) { BOOST_REQUIRE_NE(assignments(i), matches(0)); BOOST_REQUIRE_NE(assignments(i), matches(2)); } for (size_t i = 200; i < 300; ++i) { BOOST_REQUIRE_NE(assignments(i), matches(0)); BOOST_REQUIRE_NE(assignments(i), matches(1)); } } BOOST_AUTO_TEST_SUITE_END(); mlpack-2.2.5/src/mlpack/tests/decision_stump_test.cpp000066400000000000000000000266551315013601400227670ustar00rootroot00000000000000/** * @file decision_stump_test.cpp * @author Udit Saxena * * Tests for DecisionStump class. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include #include #include #include "test_tools.hpp" using namespace mlpack; using namespace mlpack::decision_stump; using namespace arma; using namespace mlpack::distribution; BOOST_AUTO_TEST_SUITE(DecisionStumpTest); /** * This tests handles the case wherein only one class exists in the input * labels. It checks whether the only class supplied was the only class * predicted. */ BOOST_AUTO_TEST_CASE(OneClass) { const size_t numClasses = 2; const size_t inpBucketSize = 6; mat trainingData; trainingData << 2.4 << 3.8 << 3.8 << endr << 1 << 1 << 2 << endr << 1.3 << 1.9 << 1.3 << endr; // No need to normalize labels here. Mat labelsIn; labelsIn << 1 << 1 << 1; mat testingData; testingData << 2.4 << 2.5 << 2.6; DecisionStump<> ds(trainingData, labelsIn.row(0), numClasses, inpBucketSize); Row predictedLabels; ds.Classify(testingData, predictedLabels); for (size_t i = 0; i < predictedLabels.size(); i++ ) BOOST_CHECK_EQUAL(predictedLabels(i), 1); } /** * This tests whether the entropy is being correctly calculated by checking the * correct value of the splitting column value. This test is for an * inpBucketSize of 4 and the correct value of the splitting dimension is 0. */ BOOST_AUTO_TEST_CASE(CorrectDimensionChosen) { const size_t numClasses = 2; const size_t inpBucketSize = 4; // This dataset comes from Chapter 6 of the book "Data Mining: Concepts, // Models, Methods, and Algorithms" (2nd Edition) by Mehmed Kantardzic. It is // found on page 176 (and a description of the correct splitting dimension is // given below that). mat trainingData; trainingData << 0 << 0 << 0 << 0 << 0 << 1 << 1 << 1 << 1 << 2 << 2 << 2 << 2 << 2 << endr << 70 << 90 << 85 << 95 << 70 << 90 << 78 << 65 << 75 << 80 << 70 << 80 << 80 << 96 << endr << 1 << 1 << 0 << 0 << 0 << 1 << 0 << 1 << 0 << 1 << 1 << 0 << 0 << 0 << endr; // No need to normalize labels here. Mat labelsIn; labelsIn << 0 << 1 << 1 << 1 << 0 << 0 << 0 << 0 << 0 << 1 << 1 << 0 << 0 << 0; DecisionStump<> ds(trainingData, labelsIn.row(0), numClasses, inpBucketSize); // Only need to check the value of the splitting column, no need of // classification. BOOST_CHECK_EQUAL(ds.SplitDimension(), 0); } /** * This tests for the classification: * if testinput < 0 - class 0 * if testinput > 0 - class 1 * An almost perfect split on zero. */ BOOST_AUTO_TEST_CASE(PerfectSplitOnZero) { const size_t numClasses = 2; const size_t inpBucketSize = 2; mat trainingData; trainingData << -1 << 1 << -2 << 2 << -3 << 3; // No need to normalize labels here. Mat labelsIn; labelsIn << 0 << 1 << 0 << 1 << 0 << 1; mat testingData; testingData << -4 << 7 << -7 << -5 << 6; DecisionStump<> ds(trainingData, labelsIn.row(0), numClasses, inpBucketSize); Row predictedLabels; ds.Classify(testingData, predictedLabels); BOOST_CHECK_EQUAL(predictedLabels(0, 0), 0); BOOST_CHECK_EQUAL(predictedLabels(0, 1), 1); BOOST_CHECK_EQUAL(predictedLabels(0, 2), 0); BOOST_CHECK_EQUAL(predictedLabels(0, 3), 0); BOOST_CHECK_EQUAL(predictedLabels(0, 4), 1); } /** * This tests the binning function for the case when a dataset with cardinality * of input < inpBucketSize is provided. */ BOOST_AUTO_TEST_CASE(BinningTesting) { const size_t numClasses = 2; const size_t inpBucketSize = 10; mat trainingData; trainingData << -1 << 1 << -2 << 2 << -3 << 3 << -4; // No need to normalize labels here. Mat labelsIn; labelsIn << 0 << 1 << 0 << 1 << 0 << 1 << 0; mat testingData; testingData << 5; DecisionStump<> ds(trainingData, labelsIn.row(0), numClasses, inpBucketSize); Row predictedLabels; ds.Classify(testingData, predictedLabels); BOOST_CHECK_EQUAL(predictedLabels(0, 0), 0); } /** * This is a test for the case when non-overlapping, multiple classes are * provided. It tests for a perfect split due to the non-overlapping nature of * the input classes. */ BOOST_AUTO_TEST_CASE(PerfectMultiClassSplit) { const size_t numClasses = 4; const size_t inpBucketSize = 3; mat trainingData; trainingData << -8 << -7 << -6 << -5 << -4 << -3 << -2 << -1 << 0 << 1 << 2 << 3 << 4 << 5 << 6 << 7; // No need to normalize labels here. Mat labelsIn; labelsIn << 0 << 0 << 0 << 0 << 1 << 1 << 1 << 1 << 2 << 2 << 2 << 2 << 3 << 3 << 3 << 3; mat testingData; testingData << -6.1 << -2.1 << 1.1 << 5.1; DecisionStump<> ds(trainingData, labelsIn.row(0), numClasses, inpBucketSize); Row predictedLabels; ds.Classify(testingData, predictedLabels); BOOST_CHECK_EQUAL(predictedLabels(0, 0), 0); BOOST_CHECK_EQUAL(predictedLabels(0, 1), 1); BOOST_CHECK_EQUAL(predictedLabels(0, 2), 2); BOOST_CHECK_EQUAL(predictedLabels(0, 3), 3); } /** * This test is for the case when reasonably overlapping, multiple classes are * provided in the input label set. It tests whether classification takes place * with a reasonable amount of error due to the overlapping nature of input * classes. */ BOOST_AUTO_TEST_CASE(MultiClassSplit) { const size_t numClasses = 3; const size_t inpBucketSize = 3; mat trainingData; trainingData << -7 << -6 << -5 << -4 << -3 << -2 << -1 << 0 << 1 << 2 << 3 << 4 << 5 << 6 << 7 << 8 << 9 << 10; // No need to normalize labels here. Mat labelsIn; labelsIn << 0 << 0 << 0 << 0 << 1 << 1 << 0 << 0 << 1 << 1 << 1 << 2 << 1 << 2 << 2 << 2 << 2 << 2; mat testingData; testingData << -6.1 << -5.9 << -2.1 << -0.7 << 2.5 << 4.7 << 7.2 << 9.1; DecisionStump<> ds(trainingData, labelsIn.row(0), numClasses, inpBucketSize); Row predictedLabels; ds.Classify(testingData, predictedLabels); BOOST_CHECK_EQUAL(predictedLabels(0, 0), 0); BOOST_CHECK_EQUAL(predictedLabels(0, 1), 0); BOOST_CHECK_EQUAL(predictedLabels(0, 2), 1); BOOST_CHECK_EQUAL(predictedLabels(0, 3), 1); BOOST_CHECK_EQUAL(predictedLabels(0, 4), 1); BOOST_CHECK_EQUAL(predictedLabels(0, 5), 1); BOOST_CHECK_EQUAL(predictedLabels(0, 6), 2); BOOST_CHECK_EQUAL(predictedLabels(0, 7), 2); } /** * This tests that the decision stump can learn a good split on a dataset with * four dimensions that have progressing levels of separation. */ BOOST_AUTO_TEST_CASE(DimensionSelectionTest) { const size_t numClasses = 2; const size_t inpBucketSize = 2500; arma::mat dataset(4, 5000); // The most separable dimension. GaussianDistribution g1("-5", "1"); GaussianDistribution g2("5", "1"); for (size_t i = 0; i < 2500; ++i) { arma::vec tmp = g1.Random(); dataset(1, i) = tmp[0]; } for (size_t i = 2500; i < 5000; ++i) { arma::vec tmp = g2.Random(); dataset(1, i) = tmp[0]; } g1 = GaussianDistribution("-3", "1"); g2 = GaussianDistribution("3", "1"); for (size_t i = 0; i < 2500; ++i) { arma::vec tmp = g1.Random(); dataset(3, i) = tmp[0]; } for (size_t i = 2500; i < 5000; ++i) { arma::vec tmp = g2.Random(); dataset(3, i) = tmp[0]; } g1 = GaussianDistribution("-1", "1"); g2 = GaussianDistribution("1", "1"); for (size_t i = 0; i < 2500; ++i) { arma::vec tmp = g1.Random(); dataset(0, i) = tmp[0]; } for (size_t i = 2500; i < 5000; ++i) { arma::vec tmp = g2.Random(); dataset(0, i) = tmp[0]; } // Not separable at all. g1 = GaussianDistribution("0", "1"); g2 = GaussianDistribution("0", "1"); for (size_t i = 0; i < 2500; ++i) { arma::vec tmp = g1.Random(); dataset(2, i) = tmp[0]; } for (size_t i = 2500; i < 5000; ++i) { arma::vec tmp = g2.Random(); dataset(2, i) = tmp[0]; } // Generate the labels. arma::Row labels(5000); for (size_t i = 0; i < 2500; ++i) labels[i] = 0; for (size_t i = 2500; i < 5000; ++i) labels[i] = 1; // Now create a decision stump. DecisionStump<> ds(dataset, labels, numClasses, inpBucketSize); // Make sure it split on the dimension that is most separable. BOOST_CHECK_EQUAL(ds.SplitDimension(), 1); // Make sure every bin below -1 classifies as label 0, and every bin above 1 // classifies as label 1 (What happens in [-1, 1] isn't that big a deal.). for (size_t i = 0; i < ds.Split().n_elem; ++i) { if (ds.Split()[i] <= -3.0) BOOST_CHECK_EQUAL(ds.BinLabels()[i], 0); else if (ds.Split()[i] >= 3.0) BOOST_CHECK_EQUAL(ds.BinLabels()[i], 1); } } /** * Ensure that the default constructor works and that it classifies things as 0 * always. */ BOOST_AUTO_TEST_CASE(EmptyConstructorTest) { DecisionStump<> d; arma::mat data = arma::randu(3, 10); arma::Row labels; d.Classify(data, labels); for (size_t i = 0; i < 10; ++i) BOOST_REQUIRE_EQUAL(labels[i], 0); // Now train on another dataset and make sure something kind of makes sense. mat trainingData; trainingData << -7 << -6 << -5 << -4 << -3 << -2 << -1 << 0 << 1 << 2 << 3 << 4 << 5 << 6 << 7 << 8 << 9 << 10; // No need to normalize labels here. Mat labelsIn; labelsIn << 0 << 0 << 0 << 0 << 1 << 1 << 0 << 0 << 1 << 1 << 1 << 2 << 1 << 2 << 2 << 2 << 2 << 2; mat testingData; testingData << -6.1 << -5.9 << -2.1 << -0.7 << 2.5 << 4.7 << 7.2 << 9.1; DecisionStump<> ds(trainingData, labelsIn.row(0), 4, 3); Row predictedLabels(testingData.n_cols); ds.Classify(testingData, predictedLabels); BOOST_CHECK_EQUAL(predictedLabels(0, 0), 0); BOOST_CHECK_EQUAL(predictedLabels(0, 1), 0); BOOST_CHECK_EQUAL(predictedLabels(0, 2), 1); BOOST_CHECK_EQUAL(predictedLabels(0, 3), 1); BOOST_CHECK_EQUAL(predictedLabels(0, 4), 1); BOOST_CHECK_EQUAL(predictedLabels(0, 5), 1); BOOST_CHECK_EQUAL(predictedLabels(0, 6), 2); BOOST_CHECK_EQUAL(predictedLabels(0, 7), 2); } /** * Ensure that a matrix holding ints can be trained. The bigger issue here is * just compilation. */ BOOST_AUTO_TEST_CASE(IntTest) { // Train on a dataset and make sure something kind of makes sense. imat trainingData; trainingData << -7 << -6 << -5 << -4 << -3 << -2 << -1 << 0 << 1 << 2 << 3 << 4 << 5 << 6 << 7 << 8 << 9 << 10; // No need to normalize labels here. Mat labelsIn; labelsIn << 0 << 0 << 0 << 0 << 1 << 1 << 0 << 0 << 1 << 1 << 1 << 2 << 1 << 2 << 2 << 2 << 2 << 2; DecisionStump ds(trainingData, labelsIn.row(0), 4, 3); imat testingData; testingData << -6 << -6 << -2 << -1 << 3 << 5 << 7 << 9; arma::Row predictedLabels; ds.Classify(testingData, predictedLabels); BOOST_CHECK_EQUAL(predictedLabels(0, 0), 0); BOOST_CHECK_EQUAL(predictedLabels(0, 1), 0); BOOST_CHECK_EQUAL(predictedLabels(0, 2), 1); BOOST_CHECK_EQUAL(predictedLabels(0, 3), 1); BOOST_CHECK_EQUAL(predictedLabels(0, 4), 1); BOOST_CHECK_EQUAL(predictedLabels(0, 5), 1); BOOST_CHECK_EQUAL(predictedLabels(0, 6), 2); BOOST_CHECK_EQUAL(predictedLabels(0, 7), 2); } BOOST_AUTO_TEST_SUITE_END(); mlpack-2.2.5/src/mlpack/tests/decision_tree_test.cpp000066400000000000000000000422201315013601400225400ustar00rootroot00000000000000/** * @file decision_tree_test.cpp * @author Ryan Curtin * * Tests for the DecisionTree class and related classes. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include #include #include #include #include #include "test_tools.hpp" #include "serialization.hpp" using namespace mlpack; using namespace mlpack::tree; using namespace mlpack::distribution; BOOST_AUTO_TEST_SUITE(DecisionTreeTest); /** * Make sure the Gini gain is zero when the labels are perfect. */ BOOST_AUTO_TEST_CASE(GiniGainPerfectTest) { arma::Row labels; labels.zeros(10); // Test that it's perfect regardless of number of classes. for (size_t c = 1; c < 10; ++c) BOOST_REQUIRE_SMALL(GiniGain::Evaluate(labels, c), 1e-5); } /** * Make sure the Gini gain is -0.5 when the class split between two classes * is even. */ BOOST_AUTO_TEST_CASE(GiniGainEvenSplitTest) { arma::Row labels(10); for (size_t i = 0; i < 5; ++i) labels[i] = 0; for (size_t i = 5; i < 10; ++i) labels[i] = 1; // Test that it's -0.5 regardless of the number of classes. for (size_t c = 2; c < 10; ++c) BOOST_REQUIRE_CLOSE(GiniGain::Evaluate(labels, c), -0.5, 1e-5); } /** * The Gini gain of an empty vector is 0. */ BOOST_AUTO_TEST_CASE(GiniGainEmptyTest) { // Test across some numbers of classes. arma::Row labels; for (size_t c = 1; c < 10; ++c) BOOST_REQUIRE_SMALL(GiniGain::Evaluate(labels, c), 1e-5); } /** * The Gini gain is -(1 - 1/k) for k classes evenly split. */ BOOST_AUTO_TEST_CASE(GiniGainEvenSplitManyClassTest) { // Try with many different classes. for (size_t c = 2; c < 30; ++c) { arma::Row labels(c); for (size_t i = 0; i < c; ++i) labels[i] = i; // Calculate Gini gain and make sure it is correct. BOOST_REQUIRE_CLOSE(GiniGain::Evaluate(labels, c), -(1.0 - 1.0 / c), 1e-5); } } /** * The Gini gain should not be sensitive to the number of points. */ BOOST_AUTO_TEST_CASE(GiniGainManyPoints) { for (size_t i = 1; i < 20; ++i) { const size_t numPoints = 100 * i; arma::Row labels(numPoints); for (size_t j = 0; j < numPoints / 2; ++j) labels[j] = 0; for (size_t j = numPoints / 2; j < numPoints; ++j) labels[j] = 1; BOOST_REQUIRE_CLOSE(GiniGain::Evaluate(labels, 2), -0.5, 1e-5); } } /** * The information gain should be zero when the labels are perfect. */ BOOST_AUTO_TEST_CASE(InformationGainPerfectTest) { arma::Row labels; labels.zeros(10); // Test that it's perfect regardless of number of classes. for (size_t c = 1; c < 10; ++c) BOOST_REQUIRE_SMALL(InformationGain::Evaluate(labels, c), 1e-5); } /** * If we have an even split, the information gain should be -1. */ BOOST_AUTO_TEST_CASE(InformationGainEvenSplitTest) { arma::Row labels(10); for (size_t i = 0; i < 5; ++i) labels[i] = 0; for (size_t i = 5; i < 10; ++i) labels[i] = 1; // Test that it's -1 regardless of the number of classes. for (size_t c = 2; c < 10; ++c) BOOST_REQUIRE_CLOSE(InformationGain::Evaluate(labels, c), -1.0, 1e-5); } /** * The information gain of an empty vector is 0. */ BOOST_AUTO_TEST_CASE(InformationGainEmptyTest) { arma::Row labels; for (size_t c = 1; c < 10; ++c) BOOST_REQUIRE_SMALL(InformationGain::Evaluate(labels, c), 1e-5); } /** * The information gain is log2(1/k) when splitting equal classes. */ BOOST_AUTO_TEST_CASE(InformationGainEvenSplitManyClassTest) { // Try with many different numbers of classes. for (size_t c = 2; c < 30; ++c) { arma::Row labels(c); for (size_t i = 0; i < c; ++i) labels[i] = i; // Calculate information gain and make sure it is correct. BOOST_REQUIRE_CLOSE(InformationGain::Evaluate(labels, c), std::log2(1.0 / c), 1e-5); } } /** * The information gain should not be sensitive to the number of points. */ BOOST_AUTO_TEST_CASE(InformationGainManyPoints) { for (size_t i = 1; i < 20; ++i) { const size_t numPoints = 100 * i; arma::Row labels(numPoints); for (size_t j = 0; j < numPoints / 2; ++j) labels[j] = 0; for (size_t j = numPoints / 2; j < numPoints; ++j) labels[j] = 1; BOOST_REQUIRE_CLOSE(InformationGain::Evaluate(labels, 2), -1.0, 1e-5); } } /** * Check that the BestBinaryNumericSplit will split on an obviously splittable * dimension. */ BOOST_AUTO_TEST_CASE(BestBinaryNumericSplitSimpleSplitTest) { arma::vec values("0.0 0.1 0.2 0.3 0.4 0.5 0.6 0.7 0.8 0.9 1.0"); arma::Row labels("0 0 0 0 0 1 1 1 1 1 1"); arma::vec classProbabilities; BestBinaryNumericSplit::template AuxiliarySplitInfo aux; // Call the method to do the splitting. const double bestGain = GiniGain::Evaluate(labels, 2); const double gain = BestBinaryNumericSplit::SplitIfBetter(bestGain, values, labels, 2, 3, classProbabilities, aux); // Make sure that a split was made. BOOST_REQUIRE_GT(gain, bestGain); // The split is perfect, so we should be able to accomplish a gain of 0. BOOST_REQUIRE_SMALL(gain, 1e-5); // The class probabilities, for this split, hold the splitting point, which // should be between 4 and 5. BOOST_REQUIRE_EQUAL(classProbabilities.n_elem, 1); BOOST_REQUIRE_GT(classProbabilities[0], 0.4); BOOST_REQUIRE_LT(classProbabilities[0], 0.5); } /** * Check that the BestBinaryNumericSplit won't split if not enough points are * given. */ BOOST_AUTO_TEST_CASE(BestBinaryNumericSplitMinSamplesTest) { arma::vec values("0.0 0.1 0.2 0.3 0.4 0.5 0.6 0.7 0.8 0.9 1.0"); arma::Row labels("0 0 0 0 0 1 1 1 1 1 1"); arma::vec classProbabilities; BestBinaryNumericSplit::template AuxiliarySplitInfo aux; // Call the method to do the splitting. const double bestGain = GiniGain::Evaluate(labels, 2); const double gain = BestBinaryNumericSplit::SplitIfBetter(bestGain, values, labels, 2, 8, classProbabilities, aux); // Make sure that no split was made. BOOST_REQUIRE_EQUAL(gain, bestGain); BOOST_REQUIRE_EQUAL(classProbabilities.n_elem, 0); } /** * Check that the BestBinaryNumericSplit doesn't split a dimension that gives no * gain. */ BOOST_AUTO_TEST_CASE(BestBinaryNumericSplitNoGainTest) { arma::vec values(100); arma::Row labels(100); for (size_t i = 0; i < 100; i += 2) { values[i] = i; labels[i] = 0; values[i + 1] = i; labels[i + 1] = 1; } arma::vec classProbabilities; BestBinaryNumericSplit::template AuxiliarySplitInfo aux; // Call the method to do the splitting. const double bestGain = GiniGain::Evaluate(labels, 2); const double gain = BestBinaryNumericSplit::SplitIfBetter(bestGain, values, labels, 2, 10, classProbabilities, aux); // Make sure there was no split. BOOST_REQUIRE_EQUAL(gain, bestGain); BOOST_REQUIRE_EQUAL(classProbabilities.n_elem, 0); } /** * Check that the AllCategoricalSplit will split when the split is obviously * better. */ BOOST_AUTO_TEST_CASE(AllCategoricalSplitSimpleSplitTest) { arma::vec values("0 0 0 1 1 1 2 2 2 3 3 3"); arma::Row labels("0 0 0 2 2 2 1 1 1 2 2 2"); arma::vec classProbabilities; AllCategoricalSplit::template AuxiliarySplitInfo aux; // Call the method to do the splitting. const double bestGain = GiniGain::Evaluate(labels, 3); const double gain = AllCategoricalSplit::SplitIfBetter(bestGain, values, 4, labels, 3, 3, classProbabilities, aux); // Make sure that a split was made. BOOST_REQUIRE_GT(gain, bestGain); // Since the split is perfect, make sure the new gain is 0. BOOST_REQUIRE_SMALL(gain, 1e-5); // Make sure the class probabilities now hold the number of children. BOOST_REQUIRE_EQUAL(classProbabilities.n_elem, 1); BOOST_REQUIRE_EQUAL((size_t) classProbabilities[0], 4); } /** * Make sure that AllCategoricalSplit respects the minimum number of samples * required to split. */ BOOST_AUTO_TEST_CASE(AllCategoricalSplitMinSamplesTest) { arma::vec values("0 0 0 1 1 1 2 2 2 3 3 3"); arma::Row labels("0 0 0 2 2 2 1 1 1 2 2 2"); arma::vec classProbabilities; AllCategoricalSplit::template AuxiliarySplitInfo aux; // Call the method to do the splitting. const double bestGain = GiniGain::Evaluate(labels, 3); const double gain = AllCategoricalSplit::SplitIfBetter(bestGain, values, 4, labels, 3, 4, classProbabilities, aux); // Make sure it's not split. BOOST_REQUIRE_EQUAL(gain, bestGain); BOOST_REQUIRE_EQUAL(classProbabilities.n_elem, 0); } /** * Check that no split is made when it doesn't get us anything. */ BOOST_AUTO_TEST_CASE(AllCategoricalSplitNoGainTest) { arma::vec values(300); arma::Row labels(300); for (size_t i = 0; i < 300; i += 3) { values[i] = (i / 3) % 10; labels[i] = 0; values[i + 1] = (i / 3) % 10; labels[i + 1] = 1; values[i + 2] = (i / 3) % 10; labels[i + 2] = 2; } arma::vec classProbabilities; AllCategoricalSplit::template AuxiliarySplitInfo aux; // Call the method to do the splitting. const double bestGain = GiniGain::Evaluate(labels, 3); const double gain = AllCategoricalSplit::SplitIfBetter(bestGain, values, 10, labels, 3, 10, classProbabilities, aux); // Make sure that there was no split. BOOST_REQUIRE_EQUAL(gain, bestGain); BOOST_REQUIRE_EQUAL(classProbabilities.n_elem, 0); } /** * A basic construction of the decision tree---ensure that we can create the * tree and that it split at least once. */ BOOST_AUTO_TEST_CASE(BasicConstructionTest) { arma::mat dataset(10, 1000, arma::fill::randu); arma::Row labels(1000); for (size_t i = 0; i < 1000; ++i) labels[i] = i % 3; // 3 classes. // Use default parameters. DecisionTree<> d(dataset, labels, 3, 50); // Now require that we have some children. BOOST_REQUIRE_GT(d.NumChildren(), 0); } /** * Construct the decision tree on numeric data only and see that we can fit it * exactly and achieve perfect performance on the training set. */ BOOST_AUTO_TEST_CASE(PerfectTrainingSet) { // Completely random dataset with no structure. arma::mat dataset(10, 1000, arma::fill::randu); arma::Row labels(1000); for (size_t i = 0; i < 1000; ++i) labels[i] = i % 3; // 3 classes. DecisionTree<> d(dataset, labels, 3, 1); // Minimum leaf size of 1. // Make sure that we can get perfect accuracy on the training set. for (size_t i = 0; i < 1000; ++i) { size_t prediction; arma::vec probabilities; d.Classify(dataset.col(i), prediction, probabilities); BOOST_REQUIRE_EQUAL(prediction, labels[i]); BOOST_REQUIRE_EQUAL(probabilities.n_elem, 3); for (size_t j = 0; j < 3; ++j) { if (labels[i] == j) BOOST_REQUIRE_CLOSE(probabilities[j], 1.0, 1e-5); else BOOST_REQUIRE_SMALL(probabilities[j], 1e-5); } } } /** * Make sure class probabilities are computed correctly in the root node. */ BOOST_AUTO_TEST_CASE(ClassProbabilityTest) { arma::mat dataset(5, 100, arma::fill::randu); arma::Row labels(100); for (size_t i = 0; i < 100; i += 2) { labels[i] = 0; labels[i + 1] = 1; } // Create a decision tree that can't split. DecisionTree<> d(dataset, labels, 2, 1000); BOOST_REQUIRE_EQUAL(d.NumChildren(), 0); // Estimate a point's probabilities. arma::vec probabilities; size_t prediction; d.Classify(dataset.col(0), prediction, probabilities); BOOST_REQUIRE_EQUAL(probabilities.n_elem, 2); BOOST_REQUIRE_CLOSE(probabilities[0], 0.5, 1e-5); BOOST_REQUIRE_CLOSE(probabilities[1], 0.5, 1e-5); } /** * Test that the decision tree generalizes reasonably. */ BOOST_AUTO_TEST_CASE(SimpleGeneralizationTest) { arma::mat inputData; if (!data::Load("vc2.csv", inputData)) BOOST_FAIL("Cannot load test dataset vc2.csv!"); arma::Mat labels; if (!data::Load("vc2_labels.txt", labels)) BOOST_FAIL("Cannot load labels for vc2_labels.txt"); // Build decision tree. DecisionTree<> d(inputData, labels, 3, 10); // Leaf size of 10. // Load testing data. arma::mat testData; if (!data::Load("vc2_test.csv", testData)) BOOST_FAIL("Cannot load test dataset vc2_test.csv!"); arma::Mat trueTestLabels; if (!data::Load("vc2_test_labels.txt", trueTestLabels)) BOOST_FAIL("Cannot load labels for vc2_test_labels.txt"); // Get the predicted test labels. arma::Row predictions; d.Classify(testData, predictions); BOOST_REQUIRE_EQUAL(predictions.n_elem, testData.n_cols); // Figure out the accuracy. double correct = 0.0; for (size_t i = 0; i < predictions.n_elem; ++i) if (predictions[i] == trueTestLabels[i]) ++correct; correct /= predictions.n_elem; BOOST_REQUIRE_GT(correct, 0.75); } /** * Test that we can build a decision tree on a simple categorical dataset. */ BOOST_AUTO_TEST_CASE(CategoricalBuildTest) { // We'll build a spiral dataset plus two noisy categorical features. We need // to build the distributions for the categorical features (they'll be // discrete distributions). DiscreteDistribution c1[5]; // The distribution will be automatically normalized. for (size_t i = 0; i < 5; ++i) { std::vector probs; probs.push_back(arma::vec(4, arma::fill::randu)); c1[i] = DiscreteDistribution(probs); } DiscreteDistribution c2[5]; for (size_t i = 0; i < 5; ++i) { std::vector probs; probs.push_back(arma::vec(2, arma::fill::randu)); c2[i] = DiscreteDistribution(probs); } arma::mat spiralDataset(4, 10000); arma::Row labels(10000); for (size_t i = 0; i < 10000; ++i) { // One circle every 20000 samples. Plus some noise. const double magnitude = 2.0 + (double(i) / 2000.0) + 0.5 * mlpack::math::Random(); const double angle = (i % 2000) * (2 * M_PI) + mlpack::math::Random(); const double x = magnitude * cos(angle); const double y = magnitude * sin(angle); spiralDataset(0, i) = x; spiralDataset(1, i) = y; // Set categorical features c1 and c2. if (i < 2000) { spiralDataset(2, i) = c1[1].Random()[0]; spiralDataset(3, i) = c2[1].Random()[0]; labels[i] = 1; } else if (i < 4000) { spiralDataset(2, i) = c1[3].Random()[0]; spiralDataset(3, i) = c2[3].Random()[0]; labels[i] = 3; } else if (i < 6000) { spiralDataset(2, i) = c1[2].Random()[0]; spiralDataset(3, i) = c2[2].Random()[0]; labels[i] = 2; } else if (i < 8000) { spiralDataset(2, i) = c1[0].Random()[0]; spiralDataset(3, i) = c2[0].Random()[0]; labels[i] = 0; } else { spiralDataset(2, i) = c1[4].Random()[0]; spiralDataset(3, i) = c2[4].Random()[0]; labels[i] = 4; } } // Now create the dataset info. data::DatasetInfo di(4); di.Type(2) = data::Datatype::categorical; di.Type(3) = data::Datatype::categorical; // Set mappings. di.MapString("0", 2); di.MapString("1", 2); di.MapString("2", 2); di.MapString("3", 2); di.MapString("0", 3); di.MapString("1", 3); // Now shuffle the dataset. arma::uvec indices = arma::shuffle(arma::linspace(0, 9999, 10000)); arma::mat d(4, 10000); arma::Row l(10000); for (size_t i = 0; i < 10000; ++i) { d.col(i) = spiralDataset.col(indices[i]); l[i] = labels[indices[i]]; } // Split into a training set and a test set. arma::mat trainingData = d.cols(0, 4999); arma::mat testData = d.cols(5000, 9999); arma::Row trainingLabels = l.subvec(0, 4999); arma::Row testLabels = l.subvec(5000, 9999); // Build the tree. DecisionTree<> tree(trainingData, di, trainingLabels, 5, 10); // Now evaluate the accuracy of the tree. arma::Row predictions; tree.Classify(testData, predictions); BOOST_REQUIRE_EQUAL(predictions.n_elem, testData.n_cols); size_t correct = 0; for (size_t i = 0; i < testData.n_cols; ++i) if (testLabels[i] == predictions[i]) ++correct; // Make sure we got at least 70% accuracy. const double correctPct = double(correct) / double(testData.n_cols); BOOST_REQUIRE_GT(correctPct, 0.70); } /** * Make sure that when we ask for a decision stump, we get one. */ BOOST_AUTO_TEST_CASE(DecisionStumpTest) { // Use a random dataset. arma::mat dataset(10, 1000, arma::fill::randu); arma::Row labels(1000); for (size_t i = 0; i < 1000; ++i) labels[i] = i % 3; // 3 classes. // Build a decision stump. DecisionTree stump(dataset, labels, 3, 1); // Check that it has children. BOOST_REQUIRE_EQUAL(stump.NumChildren(), 2); // Check that its children doesn't have children. BOOST_REQUIRE_EQUAL(stump.Child(0).NumChildren(), 0); BOOST_REQUIRE_EQUAL(stump.Child(1).NumChildren(), 0); } BOOST_AUTO_TEST_SUITE_END(); mlpack-2.2.5/src/mlpack/tests/det_test.cpp000066400000000000000000000246571315013601400205160ustar00rootroot00000000000000/** * @file det_test.cpp * @author Parikshit Ram (pram@cc.gatech.edu) * * Unit tests for the functions of the class DTree and the utility functions * using this class. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include #include #include "test_tools.hpp" // This trick does not work on Windows. We will have to comment out the tests // that depend on it. #ifndef _WIN32 #define protected public #define private public #endif #include #include #ifndef _WIN32 #undef protected #undef private #endif using namespace mlpack; using namespace mlpack::det; using namespace std; BOOST_AUTO_TEST_SUITE(DETTest); // Tests for the private functions. We cannot perform these if we are on // Windows because we cannot make private functions accessible using the macro // trick above. #ifndef _WIN32 BOOST_AUTO_TEST_CASE(TestGetMaxMinVals) { arma::mat testData(3, 5); testData << 4 << 5 << 7 << 3 << 5 << arma::endr << 5 << 0 << 1 << 7 << 1 << arma::endr << 5 << 6 << 7 << 1 << 8 << arma::endr; DTree tree(testData); BOOST_REQUIRE_EQUAL(tree.maxVals[0], 7); BOOST_REQUIRE_EQUAL(tree.minVals[0], 3); BOOST_REQUIRE_EQUAL(tree.maxVals[1], 7); BOOST_REQUIRE_EQUAL(tree.minVals[1], 0); BOOST_REQUIRE_EQUAL(tree.maxVals[2], 8); BOOST_REQUIRE_EQUAL(tree.minVals[2], 1); } BOOST_AUTO_TEST_CASE(TestComputeNodeError) { arma::vec maxVals("7 7 8"); arma::vec minVals("3 0 1"); DTree testDTree(maxVals, minVals, 5); double trueNodeError = -log(4.0) - log(7.0) - log(7.0); BOOST_REQUIRE_CLOSE((double) testDTree.logNegError, trueNodeError, 1e-10); testDTree.start = 3; testDTree.end = 5; double nodeError = testDTree.LogNegativeError(5); trueNodeError = 2 * log(2.0 / 5.0) - log(4.0) - log(7.0) - log(7.0); BOOST_REQUIRE_CLOSE(nodeError, trueNodeError, 1e-10); } BOOST_AUTO_TEST_CASE(TestWithinRange) { arma::vec maxVals("7 7 8"); arma::vec minVals("3 0 1"); DTree testDTree(maxVals, minVals, 5); arma::vec testQuery(3); testQuery << 4.5 << 2.5 << 2; BOOST_REQUIRE_EQUAL(testDTree.WithinRange(testQuery), true); testQuery << 8.5 << 2.5 << 2; BOOST_REQUIRE_EQUAL(testDTree.WithinRange(testQuery), false); } BOOST_AUTO_TEST_CASE(TestFindSplit) { arma::mat testData(3,5); testData << 4 << 5 << 7 << 3 << 5 << arma::endr << 5 << 0 << 1 << 7 << 1 << arma::endr << 5 << 6 << 7 << 1 << 8 << arma::endr; DTree testDTree(testData); size_t obDim, trueDim; double trueLeftError, obLeftError, trueRightError, obRightError, obSplit, trueSplit; trueDim = 2; trueSplit = 5.5; trueLeftError = 2 * log(2.0 / 5.0) - (log(7.0) + log(4.0) + log(4.5)); trueRightError = 2 * log(3.0 / 5.0) - (log(7.0) + log(4.0) + log(2.5)); testDTree.logVolume = log(7.0) + log(4.0) + log(7.0); BOOST_REQUIRE(testDTree.FindSplit(testData, obDim, obSplit, obLeftError, obRightError, 1)); BOOST_REQUIRE(trueDim == obDim); BOOST_REQUIRE_CLOSE(trueSplit, obSplit, 1e-10); BOOST_REQUIRE_CLOSE(trueLeftError, obLeftError, 1e-10); BOOST_REQUIRE_CLOSE(trueRightError, obRightError, 1e-10); } BOOST_AUTO_TEST_CASE(TestSplitData) { arma::mat testData(3, 5); testData << 4 << 5 << 7 << 3 << 5 << arma::endr << 5 << 0 << 1 << 7 << 1 << arma::endr << 5 << 6 << 7 << 1 << 8 << arma::endr; DTree testDTree(testData); arma::Col oTest(5); oTest << 1 << 2 << 3 << 4 << 5; size_t splitDim = 2; double trueSplitVal = 5.5; size_t splitInd = testDTree.SplitData(testData, splitDim, trueSplitVal, oTest); BOOST_REQUIRE_EQUAL(splitInd, 2); // 2 points on left side. BOOST_REQUIRE_EQUAL(oTest[0], 1); BOOST_REQUIRE_EQUAL(oTest[1], 4); BOOST_REQUIRE_EQUAL(oTest[2], 3); BOOST_REQUIRE_EQUAL(oTest[3], 2); BOOST_REQUIRE_EQUAL(oTest[4], 5); } #endif // Tests for the public functions. BOOST_AUTO_TEST_CASE(TestGrow) { arma::mat testData(3, 5); testData << 4 << 5 << 7 << 3 << 5 << arma::endr << 5 << 0 << 1 << 7 << 1 << arma::endr << 5 << 6 << 7 << 1 << 8 << arma::endr; arma::Col oTest(5); oTest << 0 << 1 << 2 << 3 << 4; double rootError, lError, rError, rlError, rrError; rootError = -log(4.0) - log(7.0) - log(7.0); lError = 2 * log(2.0 / 5.0) - (log(7.0) + log(4.0) + log(4.5)); rError = 2 * log(3.0 / 5.0) - (log(7.0) + log(4.0) + log(2.5)); rlError = 2 * log(1.0 / 5.0) - (log(0.5) + log(4.0) + log(2.5)); rrError = 2 * log(2.0 / 5.0) - (log(6.5) + log(4.0) + log(2.5)); DTree testDTree(testData); double alpha = testDTree.Grow(testData, oTest, false, 2, 1); BOOST_REQUIRE_EQUAL(oTest[0], 0); BOOST_REQUIRE_EQUAL(oTest[1], 3); BOOST_REQUIRE_EQUAL(oTest[2], 1); BOOST_REQUIRE_EQUAL(oTest[3], 2); BOOST_REQUIRE_EQUAL(oTest[4], 4); // Test the structure of the tree. BOOST_REQUIRE(testDTree.Left()->Left() == NULL); BOOST_REQUIRE(testDTree.Left()->Right() == NULL); BOOST_REQUIRE(testDTree.Right()->Left()->Left() == NULL); BOOST_REQUIRE(testDTree.Right()->Left()->Right() == NULL); BOOST_REQUIRE(testDTree.Right()->Right()->Left() == NULL); BOOST_REQUIRE(testDTree.Right()->Right()->Right() == NULL); BOOST_REQUIRE(testDTree.SubtreeLeaves() == 3); BOOST_REQUIRE(testDTree.SplitDim() == 2); BOOST_REQUIRE_CLOSE(testDTree.SplitValue(), 5.5, 1e-5); BOOST_REQUIRE(testDTree.Right()->SplitDim() == 1); BOOST_REQUIRE_CLOSE(testDTree.Right()->SplitValue(), 0.5, 1e-5); // Test node errors for every node (these are private functions). #ifndef _WIN32 BOOST_REQUIRE_CLOSE(testDTree.logNegError, rootError, 1e-10); BOOST_REQUIRE_CLOSE(testDTree.Left()->logNegError, lError, 1e-10); BOOST_REQUIRE_CLOSE(testDTree.Right()->logNegError, rError, 1e-10); BOOST_REQUIRE_CLOSE(testDTree.Right()->Left()->logNegError, rlError, 1e-10); BOOST_REQUIRE_CLOSE(testDTree.Right()->Right()->logNegError, rrError, 1e-10); #endif // Test alpha. double rootAlpha, rAlpha; rootAlpha = std::log(-((std::exp(rootError) - (std::exp(lError) + std::exp(rlError) + std::exp(rrError))) / 2)); rAlpha = std::log(-(std::exp(rError) - (std::exp(rlError) + std::exp(rrError)))); BOOST_REQUIRE_CLOSE(alpha, min(rootAlpha, rAlpha), 1e-10); } BOOST_AUTO_TEST_CASE(TestPruneAndUpdate) { arma::mat testData(3, 5); testData << 4 << 5 << 7 << 3 << 5 << arma::endr << 5 << 0 << 1 << 7 << 1 << arma::endr << 5 << 6 << 7 << 1 << 8 << arma::endr; arma::Col oTest(5); oTest << 0 << 1 << 2 << 3 << 4; DTree testDTree(testData); double alpha = testDTree.Grow(testData, oTest, false, 2, 1); alpha = testDTree.PruneAndUpdate(alpha, testData.n_cols, false); BOOST_REQUIRE_CLOSE(alpha, numeric_limits::max(), 1e-10); BOOST_REQUIRE(testDTree.SubtreeLeaves() == 1); double rootError = -log(4.0) - log(7.0) - log(7.0); BOOST_REQUIRE_CLOSE(testDTree.LogNegError(), rootError, 1e-10); BOOST_REQUIRE_CLOSE(testDTree.SubtreeLeavesLogNegError(), rootError, 1e-10); BOOST_REQUIRE(testDTree.Left() == NULL); BOOST_REQUIRE(testDTree.Right() == NULL); } BOOST_AUTO_TEST_CASE(TestComputeValue) { arma::mat testData(3, 5); testData << 4 << 5 << 7 << 3 << 5 << arma::endr << 5 << 0 << 1 << 7 << 1 << arma::endr << 5 << 6 << 7 << 1 << 8 << arma::endr; arma::vec q1(3), q2(3), q3(3), q4(3); q1 << 4 << 2 << 2; q2 << 5 << 0.25 << 6; q3 << 5 << 3 << 7; q4 << 2 << 3 << 3; arma::Col oTest(5); oTest << 0 << 1 << 2 << 3 << 4; DTree testDTree(testData); double alpha = testDTree.Grow(testData, oTest, false, 2, 1); double d1 = (2.0 / 5.0) / exp(log(4.0) + log(7.0) + log(4.5)); double d2 = (1.0 / 5.0) / exp(log(4.0) + log(0.5) + log(2.5)); double d3 = (2.0 / 5.0) / exp(log(4.0) + log(6.5) + log(2.5)); BOOST_REQUIRE_CLOSE(d1, testDTree.ComputeValue(q1), 1e-10); BOOST_REQUIRE_CLOSE(d2, testDTree.ComputeValue(q2), 1e-10); BOOST_REQUIRE_CLOSE(d3, testDTree.ComputeValue(q3), 1e-10); BOOST_REQUIRE_CLOSE(0.0, testDTree.ComputeValue(q4), 1e-10); alpha = testDTree.PruneAndUpdate(alpha, testData.n_cols, false); double d = 1.0 / exp(log(4.0) + log(7.0) + log(7.0)); BOOST_REQUIRE_CLOSE(d, testDTree.ComputeValue(q1), 1e-10); BOOST_REQUIRE_CLOSE(d, testDTree.ComputeValue(q2), 1e-10); BOOST_REQUIRE_CLOSE(d, testDTree.ComputeValue(q3), 1e-10); BOOST_REQUIRE_CLOSE(0.0, testDTree.ComputeValue(q4), 1e-10); } BOOST_AUTO_TEST_CASE(TestVariableImportance) { arma::mat testData(3, 5); testData << 4 << 5 << 7 << 3 << 5 << arma::endr << 5 << 0 << 1 << 7 << 1 << arma::endr << 5 << 6 << 7 << 1 << 8 << arma::endr; double rootError, lError, rError, rlError, rrError; rootError = -1.0 * exp(-log(4.0) - log(7.0) - log(7.0)); lError = -1.0 * exp(2 * log(2.0 / 5.0) - (log(7.0) + log(4.0) + log(4.5))); rError = -1.0 * exp(2 * log(3.0 / 5.0) - (log(7.0) + log(4.0) + log(2.5))); rlError = -1.0 * exp(2 * log(1.0 / 5.0) - (log(0.5) + log(4.0) + log(2.5))); rrError = -1.0 * exp(2 * log(2.0 / 5.0) - (log(6.5) + log(4.0) + log(2.5))); arma::Col oTest(5); oTest << 0 << 1 << 2 << 3 << 4; DTree testDTree(testData); testDTree.Grow(testData, oTest, false, 2, 1); arma::vec imps; testDTree.ComputeVariableImportance(imps); BOOST_REQUIRE_CLOSE((double) 0.0, imps[0], 1e-10); BOOST_REQUIRE_CLOSE((double) (rError - (rlError + rrError)), imps[1], 1e-10); BOOST_REQUIRE_CLOSE((double) (rootError - (lError + rError)), imps[2], 1e-10); } /** * These are not yet implemented. * BOOST_AUTO_TEST_CASE(TestTagTree) { MatType testData(3, 5); testData << 4 << 5 << 7 << 3 << 5 << arma::endr << 5 << 0 << 1 << 7 << 1 << arma::endr << 5 << 6 << 7 << 1 << 8 << arma::endr; DTree<>* testDTree = new DTree<>(&testData); delete testDTree; } BOOST_AUTO_TEST_CASE(TestFindBucket) { MatType testData(3, 5); testData << 4 << 5 << 7 << 3 << 5 << arma::endr << 5 << 0 << 1 << 7 << 1 << arma::endr << 5 << 6 << 7 << 1 << 8 << arma::endr; DTree<>* testDTree = new DTree<>(&testData); delete testDTree; } // Test functions in dt_utils.hpp BOOST_AUTO_TEST_CASE(TestTrainer) { } BOOST_AUTO_TEST_CASE(TestPrintVariableImportance) { } BOOST_AUTO_TEST_CASE(TestPrintLeafMembership) { } */ BOOST_AUTO_TEST_SUITE_END(); mlpack-2.2.5/src/mlpack/tests/distribution_test.cpp000066400000000000000000000700121315013601400224430ustar00rootroot00000000000000/** * @file distribution_test.cpp * @author Ryan Curtin * @author Yannis Mentekidis * * Tests for the classes: * * mlpack::distribution::DiscreteDistribution * * mlpack::distribution::GaussianDistribution * * mlpack::distribution::GammaDistribution * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include #include #include "test_tools.hpp" using namespace mlpack; using namespace mlpack::distribution; using namespace mlpack::math; BOOST_AUTO_TEST_SUITE(DistributionTest); /*********************************/ /** Discrete Distribution Tests **/ /*********************************/ /** * Make sure we initialize correctly. */ BOOST_AUTO_TEST_CASE(DiscreteDistributionConstructorTest) { DiscreteDistribution d(5); BOOST_REQUIRE_EQUAL(d.Probabilities().n_elem, 5); BOOST_REQUIRE_CLOSE(d.Probability("0"), 0.2, 1e-5); BOOST_REQUIRE_CLOSE(d.Probability("1"), 0.2, 1e-5); BOOST_REQUIRE_CLOSE(d.Probability("2"), 0.2, 1e-5); BOOST_REQUIRE_CLOSE(d.Probability("3"), 0.2, 1e-5); BOOST_REQUIRE_CLOSE(d.Probability("4"), 0.2, 1e-5); } /** * Make sure we get the probabilities of observations right. */ BOOST_AUTO_TEST_CASE(DiscreteDistributionProbabilityTest) { DiscreteDistribution d(5); d.Probabilities() = "0.2 0.4 0.1 0.1 0.2"; BOOST_REQUIRE_CLOSE(d.Probability("0"), 0.2, 1e-5); BOOST_REQUIRE_CLOSE(d.Probability("1"), 0.4, 1e-5); BOOST_REQUIRE_CLOSE(d.Probability("2"), 0.1, 1e-5); BOOST_REQUIRE_CLOSE(d.Probability("3"), 0.1, 1e-5); BOOST_REQUIRE_CLOSE(d.Probability("4"), 0.2, 1e-5); } /** * Make sure we get random observations correct. */ BOOST_AUTO_TEST_CASE(DiscreteDistributionRandomTest) { DiscreteDistribution d(arma::Col("3")); d.Probabilities() = "0.3 0.6 0.1"; arma::vec actualProb(3); actualProb.zeros(); for (size_t i = 0; i < 50000; i++) actualProb((size_t) (d.Random()[0] + 0.5))++; // Normalize. actualProb /= accu(actualProb); // 8% tolerance, because this can be a noisy process. BOOST_REQUIRE_CLOSE(actualProb(0), 0.3, 8.0); BOOST_REQUIRE_CLOSE(actualProb(1), 0.6, 8.0); BOOST_REQUIRE_CLOSE(actualProb(2), 0.1, 8.0); } /** * Make sure we can estimate from observations correctly. */ BOOST_AUTO_TEST_CASE(DiscreteDistributionTrainTest) { DiscreteDistribution d(4); arma::mat obs("0 0 1 1 2 2 2 3"); d.Train(obs); BOOST_REQUIRE_CLOSE(d.Probability("0"), 0.25, 1e-5); BOOST_REQUIRE_CLOSE(d.Probability("1"), 0.25, 1e-5); BOOST_REQUIRE_CLOSE(d.Probability("2"), 0.375, 1e-5); BOOST_REQUIRE_CLOSE(d.Probability("3"), 0.125, 1e-5); } /** * Estimate from observations with probabilities. */ BOOST_AUTO_TEST_CASE(DiscreteDistributionTrainProbTest) { DiscreteDistribution d(3); arma::mat obs("0 0 1 2"); arma::vec prob("0.25 0.25 0.5 1.0"); d.Train(obs, prob); BOOST_REQUIRE_CLOSE(d.Probability("0"), 0.25, 1e-5); BOOST_REQUIRE_CLOSE(d.Probability("1"), 0.25, 1e-5); BOOST_REQUIRE_CLOSE(d.Probability("2"), 0.5, 1e-5); } /** * Achieve multidimensional probability distribution. */ BOOST_AUTO_TEST_CASE(MultiDiscreteDistributionTrainProbTest) { DiscreteDistribution d("10 10 10"); arma::mat obs("0 1 1 1 2 2 2 2 2 2;" "0 0 0 1 1 1 2 2 2 2;" "0 0 0 1 1 2 2 2 2 2;"); d.Train(obs); BOOST_REQUIRE_CLOSE(d.Probability("0 0 0"), 0.009, 1e-5); BOOST_REQUIRE_CLOSE(d.Probability("0 1 2"), 0.015, 1e-5); BOOST_REQUIRE_CLOSE(d.Probability("2 1 0"), 0.054, 1e-5); } /** * Make sure we initialize multidimensional probability distribution * correctly. */ BOOST_AUTO_TEST_CASE(MultiDiscreteDistributionConstructorTest) { DiscreteDistribution d("4 4 4 4"); BOOST_REQUIRE_EQUAL(d.Probabilities(0).size(), 4); BOOST_REQUIRE_EQUAL(d.Dimensionality(), 4); BOOST_REQUIRE_CLOSE(d.Probability("0 0 0 0"), 0.00390625, 1e-5); BOOST_REQUIRE_CLOSE(d.Probability("0 1 2 3"), 0.00390625, 1e-5); } /** * Achieve multidimensional probability distribution. */ BOOST_AUTO_TEST_CASE(MultiDiscreteDistributionTrainTest) { std::vector pro; pro.push_back(arma::vec("0.1, 0.3, 0.6")); pro.push_back(arma::vec("0.3, 0.3, 0.3")); pro.push_back(arma::vec("0.25, 0.25, 0.5")); DiscreteDistribution d(pro); BOOST_REQUIRE_CLOSE(d.Probability("0 0 0"), 0.0083333, 1e-3); BOOST_REQUIRE_CLOSE(d.Probability("0 1 2"), 0.0166666, 1e-3); BOOST_REQUIRE_CLOSE(d.Probability("2 1 0"), 0.05, 1e-5); } /** * Estimate multidimensional probability distribution from observations with * probabilities. */ BOOST_AUTO_TEST_CASE(MultiDiscreteDistributionTrainProTest) { DiscreteDistribution d("5 5 5"); arma::mat obs("0 0 1 1 2;" "0 1 1 2 2;" "0 1 1 2 2"); arma::vec prob("0.25 0.25 0.25 0.25 1"); d.Train(obs, prob); BOOST_REQUIRE_CLOSE(d.Probability("0 0 0"), 0.00390625, 1e-5); BOOST_REQUIRE_CLOSE(d.Probability("1 0 1"), 0.0078125, 1e-5); BOOST_REQUIRE_CLOSE(d.Probability("2 1 0"), 0.015625, 1e-5); } /*********************************/ /** Gaussian Distribution Tests **/ /*********************************/ /** * Make sure Gaussian distributions are initialized correctly. */ BOOST_AUTO_TEST_CASE(GaussianDistributionEmptyConstructor) { GaussianDistribution d; BOOST_REQUIRE_EQUAL(d.Mean().n_elem, 0); BOOST_REQUIRE_EQUAL(d.Covariance().n_elem, 0); } /** * Make sure Gaussian distributions are initialized to the correct * dimensionality. */ BOOST_AUTO_TEST_CASE(GaussianDistributionDimensionalityConstructor) { GaussianDistribution d(4); BOOST_REQUIRE_EQUAL(d.Mean().n_elem, 4); BOOST_REQUIRE_EQUAL(d.Covariance().n_rows, 4); BOOST_REQUIRE_EQUAL(d.Covariance().n_cols, 4); } /** * Make sure Gaussian distributions are initialized correctly when we give a * mean and covariance. */ BOOST_AUTO_TEST_CASE(GaussianDistributionDistributionConstructor) { arma::vec mean(3); arma::mat covariance(3, 3); mean.randu(); covariance.randu(); covariance *= covariance.t(); covariance += arma::eye(3, 3); GaussianDistribution d(mean, covariance); for (size_t i = 0; i < 3; i++) BOOST_REQUIRE_CLOSE(d.Mean()[i], mean[i], 1e-5); for (size_t i = 0; i < 3; i++) for (size_t j = 0; j < 3; j++) BOOST_REQUIRE_CLOSE(d.Covariance()(i, j), covariance(i, j), 1e-5); } /** * Make sure the probability of observations is correct. */ BOOST_AUTO_TEST_CASE(GaussianDistributionProbabilityTest) { arma::vec mean("5 6 3 3 2"); arma::mat cov("6 1 1 1 2;" "1 7 1 0 0;" "1 1 4 1 1;" "1 0 1 7 0;" "2 0 1 0 6"); GaussianDistribution d(mean, cov); BOOST_REQUIRE_CLOSE(d.LogProbability("0 1 2 3 4"), -13.432076798791542, 1e-5); BOOST_REQUIRE_CLOSE(d.LogProbability("3 2 3 7 8"), -15.814880322345738, 1e-5); BOOST_REQUIRE_CLOSE(d.LogProbability("2 2 0 8 1"), -13.754462857772776, 1e-5); BOOST_REQUIRE_CLOSE(d.LogProbability("2 1 5 0 1"), -13.283283233107898, 1e-5); BOOST_REQUIRE_CLOSE(d.LogProbability("3 0 5 1 0"), -13.800326511545279, 1e-5); BOOST_REQUIRE_CLOSE(d.LogProbability("4 0 6 1 0"), -14.900192463287908, 1e-5); } /** * Test GaussianDistribution::Probability() in the univariate case. */ BOOST_AUTO_TEST_CASE(GaussianUnivariateProbabilityTest) { GaussianDistribution g(arma::vec("0.0"), arma::mat("1.0")); // Simple case. BOOST_REQUIRE_CLOSE(g.Probability(arma::vec("0.0")), 0.398942280401433, 1e-5); BOOST_REQUIRE_CLOSE(g.Probability(arma::vec("1.0")), 0.241970724519143, 1e-5); BOOST_REQUIRE_CLOSE(g.Probability(arma::vec("-1.0")), 0.241970724519143, 1e-5); // A few more cases... arma::mat covariance; covariance = 2.0; g.Covariance(std::move(covariance)); BOOST_REQUIRE_CLOSE(g.Probability(arma::vec("0.0")), 0.282094791773878, 1e-5); BOOST_REQUIRE_CLOSE(g.Probability(arma::vec("1.0")), 0.219695644733861, 1e-5); BOOST_REQUIRE_CLOSE(g.Probability(arma::vec("-1.0")), 0.219695644733861, 1e-5); g.Mean().fill(1.0); covariance = 1.0; g.Covariance(std::move(covariance)); BOOST_REQUIRE_CLOSE(g.Probability(arma::vec("1.0")), 0.398942280401433, 1e-5); covariance = 2.0; g.Covariance(std::move(covariance)); BOOST_REQUIRE_CLOSE(g.Probability(arma::vec("-1.0")), 0.103776874355149, 1e-5); } /** * Test GaussianDistribution::Probability() in the multivariate case. */ BOOST_AUTO_TEST_CASE(GaussianMultivariateProbabilityTest) { // Simple case. arma::vec mean = "0 0"; arma::mat cov = "1 0; 0 1"; arma::vec x = "0 0"; GaussianDistribution g(mean, cov); BOOST_REQUIRE_CLOSE(g.Probability(x), 0.159154943091895, 1e-5); arma::mat covariance; covariance = "2 0; 0 2"; g.Covariance(std::move(covariance)); BOOST_REQUIRE_CLOSE(g.Probability(x), 0.0795774715459477, 1e-5); x = "1 1"; BOOST_REQUIRE_CLOSE(g.Probability(x), 0.0482661763150270, 1e-5); BOOST_REQUIRE_CLOSE(g.Probability(-x), 0.0482661763150270, 1e-5); g.Mean() = "1 1"; BOOST_REQUIRE_CLOSE(g.Probability(x), 0.0795774715459477, 1e-5); g.Mean() *= -1; BOOST_REQUIRE_CLOSE(g.Probability(-x), 0.0795774715459477, 1e-5); g.Mean() = "1 1"; covariance = "2 1.5; 1.5 4"; g.Covariance(std::move(covariance)); BOOST_REQUIRE_CLOSE(g.Probability(x), 0.066372199406187285, 1e-5); g.Mean() *= -1; BOOST_REQUIRE_CLOSE(g.Probability(-x), 0.066372199406187285, 1e-5); g.Mean() = "1 1"; x = "-1 4"; BOOST_REQUIRE_CLOSE(g.Probability(x), 0.00072147262356379415, 1e-5); BOOST_REQUIRE_CLOSE(g.Probability(-x), 0.00085851785428674523, 1e-5); // Higher-dimensional case. x = "0 1 2 3 4"; g.Mean() = "5 6 3 3 2"; covariance = "6 1 1 1 2;" "1 7 1 0 0;" "1 1 4 1 1;" "1 0 1 7 0;" "2 0 1 0 6"; g.Covariance(std::move(covariance)); BOOST_REQUIRE_CLOSE(g.Probability(x), 1.4673143531128877e-06, 1e-5); BOOST_REQUIRE_CLOSE(g.Probability(-x), 7.7404143494891786e-09, 1e-8); g.Mean() *= -1; BOOST_REQUIRE_CLOSE(g.Probability(-x), 1.4673143531128877e-06, 1e-5); BOOST_REQUIRE_CLOSE(g.Probability(x), 7.7404143494891786e-09, 1e-8); } /** * Test the phi() function, for multiple points in the multivariate Gaussian * case. */ BOOST_AUTO_TEST_CASE(GaussianMultipointMultivariateProbabilityTest) { // Same case as before. arma::vec mean = "5 6 3 3 2"; arma::mat cov("6 1 1 1 2;" "1 7 1 0 0;" "1 1 4 1 1;" "1 0 1 7 0;" "2 0 1 0 6"); arma::mat points = "0 3 2 2 3 4;" "1 2 2 1 0 0;" "2 3 0 5 5 6;" "3 7 8 0 1 1;" "4 8 1 1 0 0;"; arma::vec phis; GaussianDistribution g(mean, cov); g.LogProbability(points, phis); BOOST_REQUIRE_EQUAL(phis.n_elem, 6); BOOST_REQUIRE_CLOSE(phis(0), -13.432076798791542, 1e-5); BOOST_REQUIRE_CLOSE(phis(1), -15.814880322345738, 1e-5); BOOST_REQUIRE_CLOSE(phis(2), -13.754462857772776, 1e-5); BOOST_REQUIRE_CLOSE(phis(3), -13.283283233107898, 1e-5); BOOST_REQUIRE_CLOSE(phis(4), -13.800326511545279, 1e-5); BOOST_REQUIRE_CLOSE(phis(5), -14.900192463287908, 1e-5); } /** * Make sure random observations follow the probability distribution correctly. */ BOOST_AUTO_TEST_CASE(GaussianDistributionRandomTest) { arma::vec mean("1.0 2.25"); arma::mat cov("0.85 0.60;" "0.60 1.45"); GaussianDistribution d(mean, cov); arma::mat obs(2, 5000); for (size_t i = 0; i < 5000; i++) obs.col(i) = d.Random(); // Now make sure that reflects the actual distribution. arma::vec obsMean = arma::mean(obs, 1); arma::mat obsCov = ccov(obs); // 10% tolerance because this can be noisy. BOOST_REQUIRE_CLOSE(obsMean[0], mean[0], 10.0); BOOST_REQUIRE_CLOSE(obsMean[1], mean[1], 10.0); BOOST_REQUIRE_CLOSE(obsCov(0, 0), cov(0, 0), 10.0); BOOST_REQUIRE_CLOSE(obsCov(0, 1), cov(0, 1), 10.0); BOOST_REQUIRE_CLOSE(obsCov(1, 0), cov(1, 0), 10.0); BOOST_REQUIRE_CLOSE(obsCov(1, 1), cov(1, 1), 10.0); } /** * Make sure that we can properly estimate from given observations. */ BOOST_AUTO_TEST_CASE(GaussianDistributionTrainTest) { arma::vec mean("1.0 3.0 0.0 2.5"); arma::mat cov("3.0 0.0 1.0 4.0;" "0.0 2.4 0.5 0.1;" "1.0 0.5 6.3 0.0;" "4.0 0.1 0.0 9.1"); // Now generate the observations. arma::mat observations(4, 10000); arma::mat transChol = trans(chol(cov)); for (size_t i = 0; i < 10000; i++) observations.col(i) = transChol * arma::randn(4) + mean; // Now estimate. GaussianDistribution d; // Find actual mean and covariance of data. arma::vec actualMean = arma::mean(observations, 1); arma::mat actualCov = ccov(observations); d.Train(observations); // Check that everything is estimated right. for (size_t i = 0; i < 4; i++) BOOST_REQUIRE_SMALL(d.Mean()[i] - actualMean[i], 1e-5); for (size_t i = 0; i < 4; i++) for (size_t j = 0; j < 4; j++) BOOST_REQUIRE_SMALL(d.Covariance()(i, j) - actualCov(i, j), 1e-5); } /** * This test verifies the fitting of GaussianDistribution works properly when probabilities for each sample is given. **/ BOOST_AUTO_TEST_CASE(GaussianDistributionTrainWithProbabilitiesTest) { double mean = 5.0; double stddeviation = 2.0; //Creates a normal distribution generator. std::default_random_engine generator; generator.seed(std::time(NULL)); std::normal_distribution dist(mean, stddeviation); size_t N = 50000; size_t d =1; arma::mat rdata(d, N); for(size_t i = 0;i < d; i++) for(size_t j = 0;j < N;j++) rdata(i,j) = dist(generator); //Creates a uniform distribution generator std::uniform_real_distribution prob(0, 1); arma::vec probabilities(N); for(size_t i = 0;i < N;i++) probabilities(i) = prob(generator); //Fits result with probabilities and data. GaussianDistribution guDist; guDist.Train(rdata, probabilities); //Fits result only with data GaussianDistribution guDist2; guDist2.Train(rdata); BOOST_REQUIRE_CLOSE(guDist.Mean()[0], guDist2.Mean()[0], 5); BOOST_REQUIRE_CLOSE(guDist.Covariance()[0], guDist2.Covariance()[0], 5); BOOST_REQUIRE_CLOSE(guDist.Mean()[0], mean, 5); BOOST_REQUIRE_CLOSE(guDist.Covariance()[0], stddeviation*stddeviation, 5); } /** *This test ensures that the same result is obtained when trained with probabilities all set to 1 and with no probabilities at all **/ BOOST_AUTO_TEST_CASE(GaussianDistributionWithProbabilties1Test) { double mean = 5.0; double stddeviation = 4.0; //Create a normal distribution random generator std::default_random_engine generator; generator.seed(std::time(NULL)); std::normal_distribution dist(mean, stddeviation); size_t N = 50000; size_t d = 1; arma::mat rdata(d, N); for(size_t i = 0; i < d; i++) for(size_t j = 0; j < N ; j++) rdata(i,j) = dist(generator); arma::vec probabilities(N, arma::fill::ones); //fits data with only data GaussianDistribution guDist; guDist.Train(rdata); //fits result with data and each probability as 1 GaussianDistribution guDist2; guDist2.Train(rdata, probabilities); BOOST_REQUIRE_CLOSE(guDist.Mean()[0], guDist2.Mean()[0], 1e-15); BOOST_REQUIRE_CLOSE(guDist.Covariance()[0], guDist2.Covariance()[0], 1e-2); } /** This test draes points from two different normal distributions, * stes the probabilities for points from the first distribution * to something small and the probabilities for the second to * something large *It ensures that the normal distribution recovered the same *parameters as the second normal distribution with high probabilities **/ BOOST_AUTO_TEST_CASE(GaussianDistributionTrainWithTwoDistProbabilitiesTest) { double mean1 = 5.0; double stddeviation1 = 4.0; double mean2 = 3.0; double stddeviation2 = 1.0; //Create two gaussian distribution random generator std::default_random_engine generator; generator.seed(std::time(NULL)); std::normal_distribution dist1(mean1, stddeviation1); std::normal_distribution dist2(mean2, stddeviation2); std::uniform_real_distribution lowProb(0, 0.02); std::uniform_real_distribution highProb(0.98, 1); size_t N = 50000; size_t d = 1; arma::mat rdata(d, N); arma::vec probabilities(N); //draws point alternatily from the two different distributions. for(size_t i = 0 ; i < d; i++) { for(size_t j = 0; j < N; j++) { if(j%2 == 0) rdata(i,j) = dist1(generator); else rdata(i,j) = dist2(generator); } } for(size_t i = 0 ; i < N ; i++) { if(i%2 == 0) probabilities(i) = highProb(generator); else probabilities(i) = lowProb(generator); } GaussianDistribution guDist; guDist.Train(rdata, probabilities); BOOST_REQUIRE_CLOSE(guDist.Mean()[0], mean1, 5); BOOST_REQUIRE_CLOSE(guDist.Covariance()[0], stddeviation1*stddeviation1, 5); } /******************************/ /** Gamma Distribution Tests **/ /******************************/ /** * Make sure that using an object to fit one reference set and then asking * to fit another works properly. */ BOOST_AUTO_TEST_CASE(GammaDistributionTrainTest) { // Create a gamma distribution random generator. double alphaReal = 5.3; double betaReal = 1.5; std::gamma_distribution dist(alphaReal, betaReal); // Create a N x d gamma distribution data and fit the results. size_t N = 200; size_t d = 2; arma::mat rdata(d, N); // Random generation of gamma-like points. for (size_t j = 0; j < d; ++j) for (size_t i = 0; i < N; ++i) rdata(j, i) = dist(math::randGen); // Create Gamma object and call Train() on reference set. GammaDistribution gDist; gDist.Train(rdata); // Training must estimate d pairs of alpha and beta parameters. BOOST_REQUIRE_EQUAL(gDist.Dimensionality(), d); BOOST_REQUIRE_EQUAL(gDist.Dimensionality(), d); // Create a N' x d' gamma distribution, fit results without new object. size_t N2 = 350; size_t d2 = 4; arma::mat rdata2(d2, N2); // Random generation of gamma-like points. for (size_t j = 0; j < d2; ++j) for (size_t i = 0; i < N2; ++i) rdata2(j, i) = dist(math::randGen); // Fit results using old object. gDist.Train(rdata2); // Training must estimate d' pairs of alpha and beta parameters. BOOST_REQUIRE_EQUAL(gDist.Dimensionality(), d2); BOOST_REQUIRE_EQUAL(gDist.Dimensionality(), d2); } /** * This test verifies that the fitting procedure for GammaDistribution works * properly when probabilities for each sample is given. */ BOOST_AUTO_TEST_CASE(GammaDistributionTrainWithProbabilitiesTest) { double alphaReal = 5.4; double betaReal = 6.7; // Create a gamma distribution random generator. std::gamma_distribution dist(alphaReal, betaReal); size_t N = 50000; size_t d = 2; arma::mat rdata(d, N); for (size_t j = 0; j < d; j++) for (size_t i = 0; i < N; i++) rdata(j, i) = dist(math::randGen); // Fill the probabilities randomly. arma::vec probabilities(N, arma::fill::randu); // Fit results with probabilities and data. GammaDistribution gDist; gDist.Train(rdata, probabilities); // Fit results with only data. GammaDistribution gDist2; gDist2.Train(rdata); BOOST_REQUIRE_CLOSE(gDist2.Alpha(0), gDist.Alpha(0), 1.5); BOOST_REQUIRE_CLOSE(gDist2.Beta(0), gDist.Beta(0), 1.5); BOOST_REQUIRE_CLOSE(gDist2.Alpha(1), gDist.Alpha(1), 1.5); BOOST_REQUIRE_CLOSE(gDist2.Beta(1), gDist.Beta(1), 1.5); BOOST_REQUIRE_CLOSE(alphaReal, gDist.Alpha(0), 2.5); BOOST_REQUIRE_CLOSE(betaReal, gDist.Beta(0), 2.5); BOOST_REQUIRE_CLOSE(alphaReal, gDist.Alpha(1), 2.5); BOOST_REQUIRE_CLOSE(betaReal, gDist.Beta(1), 2.5); } /** * This test ensures that the same result is obtained when trained with * probabilities all set to 1 and with no probabilities at all. */ BOOST_AUTO_TEST_CASE(GammaDistributionTrainAllProbabilities1Test) { double alphaReal = 5.4; double betaReal = 6.7; // Create a gamma distribution random generator. std::gamma_distribution dist(alphaReal, betaReal); size_t N = 1000; size_t d = 2; arma::mat rdata(d, N); for (size_t j = 0; j < d; j++) for (size_t i = 0; i < N; i++) rdata(j, i) = dist(math::randGen); // Fit results with only data. GammaDistribution gDist; gDist.Train(rdata); // Fit results with data and each probability as 1. GammaDistribution gDist2; arma::vec allProbabilities1(N, arma::fill::ones); gDist2.Train(rdata, allProbabilities1); BOOST_REQUIRE_CLOSE(gDist2.Alpha(0), gDist.Alpha(0), 1e-5); BOOST_REQUIRE_CLOSE(gDist2.Beta(0), gDist.Beta(0), 1e-5); BOOST_REQUIRE_CLOSE(gDist2.Alpha(1), gDist.Alpha(1), 1e-5); BOOST_REQUIRE_CLOSE(gDist2.Beta(1), gDist.Beta(1), 1e-5); } /** * This test draws points from two different gamma distributions, sets the * probabilities for the points from the first distribution to something small * and the probabilities for the second to something large. It ensures that the * gamma distribution recovered has the same parameters as the second gamma * distribution with high probabilities. */ BOOST_AUTO_TEST_CASE(GammaDistributionTrainTwoDistProbabilities1Test) { double alphaReal = 5.4; double betaReal = 6.7; double alphaReal2 = 1.9; double betaReal2 = 8.4; // Create two gamma distribution random generators. std::gamma_distribution dist(alphaReal, betaReal); std::gamma_distribution dist2(alphaReal2, betaReal2); size_t N = 50000; size_t d = 2; arma::mat rdata(d, N); arma::vec probabilities(N); // Draw points alternately from the two different distributions. for (size_t j = 0; j < d; j++) { for (size_t i = 0; i < N; i++) { if (i % 2 == 0) rdata(j, i) = dist(math::randGen); else rdata(j, i) = dist2(math::randGen); } } for (size_t i = 0; i < N; i++) { if (i % 2 == 0) probabilities(i) = 0.02 * math::Random(); else probabilities(i) = 0.98 + 0.02 * math::Random(); } GammaDistribution gDist; gDist.Train(rdata, probabilities); BOOST_REQUIRE_CLOSE(alphaReal2, gDist.Alpha(0), 5); BOOST_REQUIRE_CLOSE(betaReal2, gDist.Beta(0), 5); BOOST_REQUIRE_CLOSE(alphaReal2, gDist.Alpha(1), 5); BOOST_REQUIRE_CLOSE(betaReal2, gDist.Beta(1), 5); } /** * This test verifies that the fitting procedure for GammaDistribution works * properly and converges near the actual gamma parameters. We do this twice * with different alpha/beta parameters so we make sure we don't have some weird * bug that always converges to the same number. */ BOOST_AUTO_TEST_CASE(GammaDistributionFittingTest) { // Offset from the actual alpha/beta. 10% is quite a relaxed tolerance since // the random points we generate are few (for test speed) and might be fitted // better by a similar distribution. double errorTolerance = 10; size_t N = 5000; size_t d = 1; // Only 1 dimension is required for this. /** Iteration 1 (first parameter set) **/ // Create a gamma-random generator and data. double alphaReal = 5.3; double betaReal = 1.5; std::gamma_distribution dist(alphaReal, betaReal); // Random generation of gamma-like points. arma::mat rdata(d, N); for (size_t j = 0; j < d; ++j) for (size_t i = 0; i < N; ++i) rdata(j, i) = dist(math::randGen); // Create Gamma object and call Train() on reference set. GammaDistribution gDist; gDist.Train(rdata); // Estimated parameter must be close to real. BOOST_REQUIRE_CLOSE(gDist.Alpha(0), alphaReal, errorTolerance); BOOST_REQUIRE_CLOSE(gDist.Beta(0), betaReal, errorTolerance); /** Iteration 2 (different parameter set) **/ // Create a gamma-random generator and data. double alphaReal2 = 7.2; double betaReal2 = 0.9; std::gamma_distribution dist2(alphaReal2, betaReal2); // Random generation of gamma-like points. arma::mat rdata2(d, N); for (size_t j = 0; j < d; ++j) for (size_t i = 0; i < N; ++i) rdata2(j, i) = dist2(math::randGen); // Create Gamma object and call Train() on reference set. GammaDistribution gDist2; gDist2.Train(rdata2); // Estimated parameter must be close to real. BOOST_REQUIRE_CLOSE(gDist2.Alpha(0), alphaReal2, errorTolerance); BOOST_REQUIRE_CLOSE(gDist2.Beta(0), betaReal2, errorTolerance); } /** * Test that Train() and the constructor that takes data give the same resulting * distribution. */ BOOST_AUTO_TEST_CASE(GammaDistributionTrainConstructorTest) { const arma::mat data = arma::randu(10, 500); GammaDistribution d1(data); GammaDistribution d2; d2.Train(data); for (size_t i = 0; i < 10; ++i) { BOOST_REQUIRE_CLOSE(d1.Alpha(i), d2.Alpha(i), 1e-5); BOOST_REQUIRE_CLOSE(d1.Beta(i), d2.Beta(i), 1e-5); } } /** * Test that Train() with a dataset and Train() with dataset statistics return * the same results. */ BOOST_AUTO_TEST_CASE(GammaDistributionTrainStatisticsTest) { const arma::mat data = arma::randu(1, 500); // Train object d1 with the data. GammaDistribution d1(data); // Train object d2 with the data's statistics. GammaDistribution d2; const arma::vec meanLogx = arma::mean(arma::log(data), 1); const arma::vec meanx = arma::mean(data, 1); const arma::vec logMeanx = arma::log(meanx); d2.Train(logMeanx, meanLogx, meanx); BOOST_REQUIRE_CLOSE(d1.Alpha(0), d2.Alpha(0), 1e-5); BOOST_REQUIRE_CLOSE(d1.Beta(0), d2.Beta(0), 1e-5); } /** * Tests that Random() generates points that can be reasonably well fit by the * distribution that generated them. */ BOOST_AUTO_TEST_CASE(GammaDistributionRandomTest) { const arma::vec a("2.0 2.5 3.0"), b("0.4 0.6 1.3"); const size_t numPoints = 2000; // Distribution to generate points. GammaDistribution d1(a, b); arma::mat data(3, numPoints); // 3-d points. for (size_t i = 0; i < numPoints; ++i) data.col(i) = d1.Random(); // Distribution to fit points. GammaDistribution d2(data); for (size_t i = 0; i < 3; ++i) { BOOST_REQUIRE_CLOSE(d2.Alpha(i), a(i), 10); // Within 10% BOOST_REQUIRE_CLOSE(d2.Beta(i), b(i), 10); } } BOOST_AUTO_TEST_CASE(GammaDistributionProbabilityTest) { // Train two 1-dimensional distributions. const arma::vec a1("2.0"), b1("0.9"), a2("3.1"), b2("1.4"); arma::mat x1("2.0"), x2("2.94"); arma::vec prob1, prob2; // Evaluated at wolfram|alpha GammaDistribution d1(a1, b1); d1.Probability(x1, prob1); BOOST_REQUIRE_CLOSE(prob1(0), 0.267575, 1e-3); // Evaluated at wolfram|alpha GammaDistribution d2(a2, b2); d2.Probability(x2, prob2); BOOST_REQUIRE_CLOSE(prob2(0), 0.189043, 1e-3); // Check that the overload that returns the probability for 1 dimension // agrees. BOOST_REQUIRE_CLOSE(prob2(0), d2.Probability(2.94, 0), 1e-5); // Combine into one 2-dimensional distribution. const arma::vec a3("2.0 3.1"), b3("0.9 1.4"); arma::mat x3(2, 2); x3 << 2.0 << 2.94 << arma::endr << 2.0 << 2.94; arma::vec prob3; // Expect that the 2-dimensional distribution returns the product of the // 1-dimensional distributions (evaluated at wolfram|alpha). GammaDistribution d3(a3, b3); d3.Probability(x3, prob3); BOOST_REQUIRE_CLOSE(prob3(0), 0.04408, 1e-2); BOOST_REQUIRE_CLOSE(prob3(1), 0.026165, 1e-2); } BOOST_AUTO_TEST_CASE(GammaDistributionLogProbabilityTest) { // Train two 1-dimensional distributions. const arma::vec a1("2.0"), b1("0.9"), a2("3.1"), b2("1.4"); arma::mat x1("2.0"), x2("2.94"); arma::vec prob1, prob2; // Evaluated at wolfram|alpha GammaDistribution d1(a1, b1); d1.LogProbability(x1, prob1); BOOST_REQUIRE_CLOSE(prob1(0), std::log(0.267575), 1e-3); // Evaluated at wolfram|alpha GammaDistribution d2(a2, b2); d2.LogProbability(x2, prob2); BOOST_REQUIRE_CLOSE(prob2(0), std::log(0.189043), 1e-3); // Combine into one 2-dimensional distribution. const arma::vec a3("2.0 3.1"), b3("0.9 1.4"); arma::mat x3(2, 2); x3 << 2.0 << 2.94 << arma::endr << 2.0 << 2.94; arma::vec prob3; // Expect that the 2-dimensional distribution returns the product of the // 1-dimensional distributions (evaluated at wolfram|alpha). GammaDistribution d3(a3, b3); d3.LogProbability(x3, prob3); BOOST_REQUIRE_CLOSE(prob3(0), std::log(0.04408), 1e-3); BOOST_REQUIRE_CLOSE(prob3(1), std::log(0.026165), 1e-3); } BOOST_AUTO_TEST_SUITE_END(); mlpack-2.2.5/src/mlpack/tests/drusilla_select_test.cpp000066400000000000000000000133061315013601400231050ustar00rootroot00000000000000/** * @file drusilla_select_test.cpp * @author Ryan Curtin * * Test for DrusillaSelect. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include #include #include #include "test_tools.hpp" #include "serialization.hpp" using namespace mlpack; using namespace mlpack::neighbor; BOOST_AUTO_TEST_SUITE(DrusillaSelectTest); // If we have a dataset with an extreme outlier, then every point (except that // one) should end up with that point as the furthest neighbor candidate. BOOST_AUTO_TEST_CASE(DrusillaSelectExtremeOutlierTest) { arma::mat dataset = arma::randu(5, 100); dataset.col(99) += 100; // Make last column very large. // Construct with some reasonable parameters. DrusillaSelect<> ds(dataset, 5, 5); // Query with every point except the extreme point. arma::mat distances; arma::Mat neighbors; ds.Search(dataset.cols(0, 98), 1, neighbors, distances); BOOST_REQUIRE_EQUAL(neighbors.n_cols, 99); BOOST_REQUIRE_EQUAL(neighbors.n_rows, 1); BOOST_REQUIRE_EQUAL(distances.n_cols, 99); BOOST_REQUIRE_EQUAL(distances.n_rows, 1); for (size_t i = 0; i < 99; ++i) { BOOST_REQUIRE_EQUAL(neighbors[i], 99); } } // If we use only one projection with the number of points equal to what is in // the dataset, we should end up with the exact result. BOOST_AUTO_TEST_CASE(DrusillaSelectExhaustiveExactTest) { arma::mat dataset = arma::randu(5, 100); // Construct with one projection and 100 points in that projection. DrusillaSelect<> ds(dataset, 100, 1); arma::mat distances, distancesTrue; arma::Mat neighbors, neighborsTrue; ds.Search(dataset, 5, neighbors, distances); AllkFN kfn(dataset); kfn.Search(dataset, 5, neighborsTrue, distancesTrue); BOOST_REQUIRE_EQUAL(neighborsTrue.n_cols, neighbors.n_cols); BOOST_REQUIRE_EQUAL(neighborsTrue.n_rows, neighbors.n_rows); BOOST_REQUIRE_EQUAL(distancesTrue.n_cols, distances.n_cols); BOOST_REQUIRE_EQUAL(distancesTrue.n_rows, distances.n_rows); for (size_t i = 0; i < distances.n_elem; ++i) { BOOST_REQUIRE_EQUAL(neighbors[i], neighborsTrue[i]); BOOST_REQUIRE_CLOSE(distances[i], distancesTrue[i], 1e-5); } } // Test that we can call Train() after calling the constructor. BOOST_AUTO_TEST_CASE(RetrainTest) { arma::mat firstDataset = arma::randu(3, 10); arma::mat dataset = arma::randu(3, 200); DrusillaSelect<> ds(firstDataset, 3, 3); ds.Train(std::move(dataset), 2, 2); arma::mat distances; arma::Mat neighbors; ds.Search(dataset, 1, neighbors, distances); BOOST_REQUIRE_EQUAL(neighbors.n_cols, 200); BOOST_REQUIRE_EQUAL(neighbors.n_rows, 1); BOOST_REQUIRE_EQUAL(distances.n_cols, 200); BOOST_REQUIRE_EQUAL(distances.n_rows, 1); } // Test serialization. BOOST_AUTO_TEST_CASE(SerializationTest) { // Create a random dataset. arma::mat dataset = arma::randu(3, 100); DrusillaSelect<> ds(dataset, 3, 3); arma::mat fakeDataset1 = arma::randu(2, 15); arma::mat fakeDataset2 = arma::randu(10, 18); DrusillaSelect<> dsXml(fakeDataset1, 5, 3); DrusillaSelect<> dsText(2, 2); DrusillaSelect<> dsBinary(5, 2); dsBinary.Train(fakeDataset2); // Now do the serialization. SerializeObjectAll(ds, dsXml, dsText, dsBinary); // Now do a search and make sure all the results are the same. arma::Mat neighbors, neighborsXml, neighborsText, neighborsBinary; arma::mat distances, distancesXml, distancesText, distancesBinary; ds.Search(dataset, 3, neighbors, distances); dsXml.Search(dataset, 3, neighborsXml, distancesXml); dsText.Search(dataset, 3, neighborsText, distancesText); dsBinary.Search(dataset, 3, neighborsBinary, distancesBinary); BOOST_REQUIRE_EQUAL(neighbors.n_rows, neighborsXml.n_rows); BOOST_REQUIRE_EQUAL(neighbors.n_cols, neighborsXml.n_cols); BOOST_REQUIRE_EQUAL(neighbors.n_rows, neighborsText.n_rows); BOOST_REQUIRE_EQUAL(neighbors.n_cols, neighborsText.n_cols); BOOST_REQUIRE_EQUAL(neighbors.n_rows, neighborsBinary.n_rows); BOOST_REQUIRE_EQUAL(neighbors.n_cols, neighborsBinary.n_cols); BOOST_REQUIRE_EQUAL(distances.n_rows, distancesXml.n_rows); BOOST_REQUIRE_EQUAL(distances.n_cols, distancesXml.n_cols); BOOST_REQUIRE_EQUAL(distances.n_rows, distancesText.n_rows); BOOST_REQUIRE_EQUAL(distances.n_cols, distancesText.n_cols); BOOST_REQUIRE_EQUAL(distances.n_rows, distancesBinary.n_rows); BOOST_REQUIRE_EQUAL(distances.n_cols, distancesBinary.n_cols); for (size_t i = 0; i < neighbors.n_elem; ++i) { BOOST_REQUIRE_EQUAL(neighbors[i], neighborsXml[i]); BOOST_REQUIRE_EQUAL(neighbors[i], neighborsText[i]); BOOST_REQUIRE_EQUAL(neighbors[i], neighborsBinary[i]); BOOST_REQUIRE_CLOSE(distances[i], distancesXml[i], 1e-5); BOOST_REQUIRE_CLOSE(distances[i], distancesText[i], 1e-5); BOOST_REQUIRE_CLOSE(distances[i], distancesBinary[i], 1e-5); } } // Make sure we can create the object with a sparse matrix. BOOST_AUTO_TEST_CASE(SparseTest) { arma::sp_mat dataset; dataset.sprandu(50, 1000, 0.3); DrusillaSelect ds(dataset, 5, 10); // Run a search. arma::mat distances; arma::Mat neighbors; ds.Search(dataset, 3, neighbors, distances); BOOST_REQUIRE_EQUAL(neighbors.n_cols, 1000); BOOST_REQUIRE_EQUAL(neighbors.n_rows, 3); BOOST_REQUIRE_EQUAL(distances.n_cols, 1000); BOOST_REQUIRE_EQUAL(distances.n_rows, 3); } BOOST_AUTO_TEST_SUITE_END(); mlpack-2.2.5/src/mlpack/tests/emst_test.cpp000066400000000000000000000171771315013601400207110ustar00rootroot00000000000000/** * @file emst_test.cpp * * Test file for EMST methods. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include #include #include #include "test_tools.hpp" #include using namespace mlpack; using namespace mlpack::emst; using namespace mlpack::tree; using namespace mlpack::bound; using namespace mlpack::metric; BOOST_AUTO_TEST_SUITE(EMSTTest); /** * Simple emst test with small, synthetic dataset. This is an * exhaustive test, which checks that each method for performing the calculation * (dual-tree, naive) produces the correct results. The dataset is in one * dimension for simplicity -- the correct functionality of distance functions * is not tested here. */ BOOST_AUTO_TEST_CASE(ExhaustiveSyntheticTest) { // Set up our data. arma::mat data(1, 11); data[0] = 0.05; // Row addressing is unnecessary (they are all 0). data[1] = 0.37; data[2] = 0.15; data[3] = 1.25; data[4] = 5.05; data[5] = -0.22; data[6] = -2.00; data[7] = -1.30; data[8] = 0.45; data[9] = 0.91; data[10] = 1.00; arma::mat results; // Build the tree by hand to get a leaf size of 1. typedef KDTree TreeType; std::vector oldFromNew; std::vector newFromOld; TreeType tree(data, oldFromNew, newFromOld, 1); // Create the DTB object and run the calculation. DualTreeBoruvka<> dtb(&tree); dtb.ComputeMST(results); // Now the exhaustive check for correctness. if (newFromOld[1] < newFromOld[8]) { BOOST_REQUIRE_EQUAL(results(0, 0), newFromOld[1]); BOOST_REQUIRE_EQUAL(results(1, 0), newFromOld[8]); } else { BOOST_REQUIRE_EQUAL(results(1, 0), newFromOld[1]); BOOST_REQUIRE_EQUAL(results(0, 0), newFromOld[8]); } BOOST_REQUIRE_CLOSE(results(2, 0), 0.08, 1e-5); if (newFromOld[9] < newFromOld[10]) { BOOST_REQUIRE_EQUAL(results(0, 1), newFromOld[9]); BOOST_REQUIRE_EQUAL(results(1, 1), newFromOld[10]); } else { BOOST_REQUIRE_EQUAL(results(1, 1), newFromOld[9]); BOOST_REQUIRE_EQUAL(results(0, 1), newFromOld[10]); } BOOST_REQUIRE_CLOSE(results(2, 1), 0.09, 1e-5); if (newFromOld[0] < newFromOld[2]) { BOOST_REQUIRE_EQUAL(results(0, 2), newFromOld[0]); BOOST_REQUIRE_EQUAL(results(1, 2), newFromOld[2]); } else { BOOST_REQUIRE_EQUAL(results(1, 2), newFromOld[0]); BOOST_REQUIRE_EQUAL(results(0, 2), newFromOld[2]); } BOOST_REQUIRE_CLOSE(results(2, 2), 0.1, 1e-5); if (newFromOld[1] < newFromOld[2]) { BOOST_REQUIRE_EQUAL(results(0, 3), newFromOld[1]); BOOST_REQUIRE_EQUAL(results(1, 3), newFromOld[2]); } else { BOOST_REQUIRE_EQUAL(results(1, 3), newFromOld[1]); BOOST_REQUIRE_EQUAL(results(0, 3), newFromOld[2]); } BOOST_REQUIRE_CLOSE(results(2, 3), 0.22, 1e-5); if (newFromOld[3] < newFromOld[10]) { BOOST_REQUIRE_EQUAL(results(0, 4), newFromOld[3]); BOOST_REQUIRE_EQUAL(results(1, 4), newFromOld[10]); } else { BOOST_REQUIRE_EQUAL(results(1, 4), newFromOld[3]); BOOST_REQUIRE_EQUAL(results(0, 4), newFromOld[10]); } BOOST_REQUIRE_CLOSE(results(2, 4), 0.25, 1e-5); if (newFromOld[0] < newFromOld[5]) { BOOST_REQUIRE_EQUAL(results(0, 5), newFromOld[0]); BOOST_REQUIRE_EQUAL(results(1, 5), newFromOld[5]); } else { BOOST_REQUIRE_EQUAL(results(1, 5), newFromOld[0]); BOOST_REQUIRE_EQUAL(results(0, 5), newFromOld[5]); } BOOST_REQUIRE_CLOSE(results(2, 5), 0.27, 1e-5); if (newFromOld[8] < newFromOld[9]) { BOOST_REQUIRE_EQUAL(results(0, 6), newFromOld[8]); BOOST_REQUIRE_EQUAL(results(1, 6), newFromOld[9]); } else { BOOST_REQUIRE_EQUAL(results(1, 6), newFromOld[8]); BOOST_REQUIRE_EQUAL(results(0, 6), newFromOld[9]); } BOOST_REQUIRE_CLOSE(results(2, 6), 0.46, 1e-5); if (newFromOld[6] < newFromOld[7]) { BOOST_REQUIRE_EQUAL(results(0, 7), newFromOld[6]); BOOST_REQUIRE_EQUAL(results(1, 7), newFromOld[7]); } else { BOOST_REQUIRE_EQUAL(results(1, 7), newFromOld[6]); BOOST_REQUIRE_EQUAL(results(0, 7), newFromOld[7]); } BOOST_REQUIRE_CLOSE(results(2, 7), 0.7, 1e-5); if (newFromOld[5] < newFromOld[7]) { BOOST_REQUIRE_EQUAL(results(0, 8), newFromOld[5]); BOOST_REQUIRE_EQUAL(results(1, 8), newFromOld[7]); } else { BOOST_REQUIRE_EQUAL(results(1, 8), newFromOld[5]); BOOST_REQUIRE_EQUAL(results(0, 8), newFromOld[7]); } BOOST_REQUIRE_CLOSE(results(2, 8), 1.08, 1e-5); if (newFromOld[3] < newFromOld[4]) { BOOST_REQUIRE_EQUAL(results(0, 9), newFromOld[3]); BOOST_REQUIRE_EQUAL(results(1, 9), newFromOld[4]); } else { BOOST_REQUIRE_EQUAL(results(1, 9), newFromOld[3]); BOOST_REQUIRE_EQUAL(results(0, 9), newFromOld[4]); } BOOST_REQUIRE_CLOSE(results(2, 9), 3.8, 1e-5); } /** * Test the dual tree method against the naive computation. * * Errors are produced if the results are not identical. */ BOOST_AUTO_TEST_CASE(DualTreeVsNaive) { arma::mat inputData; // Hard-coded filename: bad! // Code duplication: also bad! if (!data::Load("test_data_3_1000.csv", inputData)) BOOST_FAIL("Cannot load test dataset test_data_3_1000.csv!"); // Set up matrices to work with. arma::mat dualData = inputData; arma::mat naiveData = inputData; // Reset parameters from last test. DualTreeBoruvka<> dtb(dualData); arma::mat dualResults; dtb.ComputeMST(dualResults); // Set naive mode. DualTreeBoruvka<> dtbNaive(naiveData, true); arma::mat naiveResults; dtbNaive.ComputeMST(naiveResults); BOOST_REQUIRE_EQUAL(dualResults.n_cols, naiveResults.n_cols); BOOST_REQUIRE_EQUAL(dualResults.n_rows, naiveResults.n_rows); for (size_t i = 0; i < dualResults.n_cols; i++) { BOOST_REQUIRE_EQUAL(dualResults(0, i), naiveResults(0, i)); BOOST_REQUIRE_EQUAL(dualResults(1, i), naiveResults(1, i)); BOOST_REQUIRE_CLOSE(dualResults(2, i), naiveResults(2, i), 1e-5); } } /** * Make sure the cover tree works fine. */ BOOST_AUTO_TEST_CASE(CoverTreeTest) { arma::mat inputData; if (!data::Load("test_data_3_1000.csv", inputData)) BOOST_FAIL("Cannot load test dataset test_data_3_1000.csv!"); DualTreeBoruvka<> bst(inputData); DualTreeBoruvka ct(inputData); arma::mat bstResults; arma::mat coverResults; // Run the algorithms. bst.ComputeMST(bstResults); ct.ComputeMST(coverResults); for (size_t i = 0; i < bstResults.n_cols; i++) { BOOST_REQUIRE_EQUAL(bstResults(0, i), coverResults(0, i)); BOOST_REQUIRE_EQUAL(bstResults(1, i), coverResults(1, i)); BOOST_REQUIRE_CLOSE(bstResults(2, i), coverResults(2, i), 1e-5); } } /** * Test BinarySpaceTree with Ball Bound. */ BOOST_AUTO_TEST_CASE(BallTreeTest) { arma::mat inputData; if (!data::Load("test_data_3_1000.csv", inputData)) BOOST_FAIL("Cannot load test dataset test_data_3_1000.csv!"); // naive mode. DualTreeBoruvka<> bst(inputData, true); // Ball tree. DualTreeBoruvka ballt(inputData); arma::mat bstResults; arma::mat ballResults; // Run the algorithms. bst.ComputeMST(bstResults); ballt.ComputeMST(ballResults); for (size_t i = 0; i < bstResults.n_cols; i++) { BOOST_REQUIRE_EQUAL(bstResults(0, i), ballResults(0, i)); BOOST_REQUIRE_EQUAL(bstResults(1, i), ballResults(1, i)); BOOST_REQUIRE_CLOSE(bstResults(2, i), ballResults(2, i), 1e-5); } } BOOST_AUTO_TEST_SUITE_END(); mlpack-2.2.5/src/mlpack/tests/fastmks_test.cpp000066400000000000000000001002411315013601400213720ustar00rootroot00000000000000/** * @file fastmks_test.cpp * @author Ryan Curtin * * Ensure that fast max-kernel search is correct. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include #include #include #include #include "test_tools.hpp" #include "serialization.hpp" using namespace mlpack; using namespace mlpack::tree; using namespace mlpack::fastmks; using namespace mlpack::kernel; using namespace mlpack::metric; BOOST_AUTO_TEST_SUITE(FastMKSTest); /** * Compare single-tree and naive. */ BOOST_AUTO_TEST_CASE(SingleTreeVsNaive) { // First create a random dataset. arma::mat data; data.randn(5, 1000); LinearKernel lk; // Now run FastMKS naively. FastMKS naive(data, lk, false, true); arma::Mat naiveIndices; arma::mat naiveProducts; naive.Search(10, naiveIndices, naiveProducts); // Now run it in single-tree mode. FastMKS single(data, lk, true); arma::Mat singleIndices; arma::mat singleProducts; single.Search(10, singleIndices, singleProducts); // Compare the results. for (size_t q = 0; q < singleIndices.n_cols; ++q) { for (size_t r = 0; r < singleIndices.n_rows; ++r) { BOOST_REQUIRE_EQUAL(singleIndices(r, q), naiveIndices(r, q)); BOOST_REQUIRE_CLOSE(singleProducts(r, q), naiveProducts(r, q), 1e-5); } } } /** * Compare dual-tree and naive. */ BOOST_AUTO_TEST_CASE(DualTreeVsNaive) { // First create a random dataset. arma::mat data; data.randn(10, 5000); LinearKernel lk; // Now run FastMKS naively. FastMKS naive(data, lk, false, true); arma::Mat naiveIndices; arma::mat naiveProducts; naive.Search(10, naiveIndices, naiveProducts); // Now run it in dual-tree mode. FastMKS tree(data, lk); arma::Mat treeIndices; arma::mat treeProducts; tree.Search(10, treeIndices, treeProducts); for (size_t q = 0; q < treeIndices.n_cols; ++q) { for (size_t r = 0; r < treeIndices.n_rows; ++r) { BOOST_REQUIRE_EQUAL(treeIndices(r, q), naiveIndices(r, q)); BOOST_REQUIRE_CLOSE(treeProducts(r, q), naiveProducts(r, q), 1e-5); } } } /** * Compare dual-tree and single-tree on a larger dataset. */ BOOST_AUTO_TEST_CASE(DualTreeVsSingleTree) { // First create a random dataset. arma::mat data; data.randu(8, 5000); PolynomialKernel pk(5.0, 2.5); FastMKS single(data, pk, true); arma::Mat singleIndices; arma::mat singleProducts; single.Search(10, singleIndices, singleProducts); // Now run it in dual-tree mode. FastMKS tree(data, pk); arma::Mat treeIndices; arma::mat treeProducts; tree.Search(10, treeIndices, treeProducts); for (size_t q = 0; q < treeIndices.n_cols; ++q) { for (size_t r = 0; r < treeIndices.n_rows; ++r) { BOOST_REQUIRE_EQUAL(treeIndices(r, q), singleIndices(r, q)); BOOST_REQUIRE_CLOSE(treeProducts(r, q), singleProducts(r, q), 1e-5); } } } /** * Test sparse FastMKS (how useful is this, I'm not sure). */ BOOST_AUTO_TEST_CASE(SparseFastMKSTest) { // First create a random sparse dataset. arma::sp_mat dataset; dataset.sprandu(10, 100, 0.3); FastMKS sparsemks(dataset); arma::mat denseset(dataset); FastMKS densemks(denseset); // Store the results in these. arma::Mat sparseIndices, denseIndices; arma::mat sparseKernels, denseKernels; // Do the searches. sparsemks.Search(3, sparseIndices, sparseKernels); densemks.Search(3, denseIndices, denseKernels); // Make sure the results are the same. for (size_t i = 0; i < sparseIndices.n_cols; ++i) { for (size_t j = 0; j < sparseIndices.n_rows; ++j) { if (std::abs(sparseKernels(j, i)) > 1e-15) BOOST_REQUIRE_CLOSE(sparseKernels(j, i), denseKernels(j, i), 1e-5); else BOOST_REQUIRE_SMALL(denseKernels(j, i), 1e-15); BOOST_REQUIRE_EQUAL(sparseIndices(j, i), denseIndices(j, i)); } } } BOOST_AUTO_TEST_CASE(SparsePolynomialFastMKSTest) { // Do it again with the polynomial kernel, just to be sure. arma::sp_mat dataset; dataset.sprandu(10, 100, 0.3); arma::mat denseset(dataset); PolynomialKernel pk(3); for (size_t i = 0; i < 100; ++i) for (size_t j = 0; j < 100; ++j) if (std::abs(pk.Evaluate(dataset.col(i), dataset.col(j))) < 1e-10) BOOST_REQUIRE_SMALL(pk.Evaluate(denseset.col(i), denseset.col(j)), 1e-10); else BOOST_REQUIRE_CLOSE(pk.Evaluate(dataset.col(i), dataset.col(j)), pk.Evaluate(denseset.col(i), denseset.col(j)), 1e-5); FastMKS sparsepoly(dataset); FastMKS densepoly(denseset); // Store the results in these. arma::Mat sparseIndices, denseIndices; arma::mat sparseKernels, denseKernels; // Do the searches. sparsepoly.Search(3, sparseIndices, sparseKernels); densepoly.Search(3, denseIndices, denseKernels); // Make sure the results are the same. for (size_t i = 0; i < sparseIndices.n_cols; ++i) { for (size_t j = 0; j < sparseIndices.n_rows; ++j) { if (std::abs(sparseKernels(j, i)) > 1e-15) BOOST_REQUIRE_CLOSE(sparseKernels(j, i), denseKernels(j, i), 1e-5); else BOOST_REQUIRE_SMALL(denseKernels(j, i), 1e-15); BOOST_REQUIRE_EQUAL(sparseIndices(j, i), denseIndices(j, i)); } } } // Make sure the empty constructor works. BOOST_AUTO_TEST_CASE(EmptyConstructorTest) { FastMKS f; arma::mat queryData = arma::randu(5, 100); arma::Mat indices; arma::mat products; BOOST_REQUIRE_THROW(f.Search(queryData, 3, indices, products), std::invalid_argument); } // Make sure the simplest overload of Train() works. BOOST_AUTO_TEST_CASE(SimpleTrainTest) { arma::mat referenceSet = arma::randu(5, 100); FastMKS f(referenceSet); FastMKS f2; f2.Train(referenceSet); arma::Mat indices, indices2; arma::mat products, products2; arma::mat querySet = arma::randu(5, 20); f.Search(querySet, 3, indices, products); f2.Search(querySet, 3, indices2, products2); BOOST_REQUIRE_EQUAL(indices.n_rows, indices2.n_rows); BOOST_REQUIRE_EQUAL(products.n_rows, products2.n_rows); BOOST_REQUIRE_EQUAL(indices.n_cols, indices2.n_cols); BOOST_REQUIRE_EQUAL(products.n_cols, products2.n_cols); for (size_t i = 0; i < products.n_elem; ++i) { if (std::abs(products[i]) < 1e-5) BOOST_REQUIRE_SMALL(products2[i], 1e-5); else BOOST_REQUIRE_CLOSE(products[i], products2[i], 1e-5); BOOST_REQUIRE_EQUAL(indices[i], indices2[i]); } } // Test the Train() overload that takes a kernel too. BOOST_AUTO_TEST_CASE(SimpleTrainKernelTest) { arma::mat referenceSet = arma::randu(5, 100); GaussianKernel gk(2.0); FastMKS f(referenceSet, gk); FastMKS f2; f2.Train(referenceSet, gk); arma::Mat indices, indices2; arma::mat products, products2; arma::mat querySet = arma::randu(5, 20); f.Search(querySet, 3, indices, products); f2.Search(querySet, 3, indices2, products2); BOOST_REQUIRE_EQUAL(indices.n_rows, indices2.n_rows); BOOST_REQUIRE_EQUAL(products.n_rows, products2.n_rows); BOOST_REQUIRE_EQUAL(indices.n_cols, indices2.n_cols); BOOST_REQUIRE_EQUAL(products.n_cols, products2.n_cols); for (size_t i = 0; i < products.n_elem; ++i) { if (std::abs(products[i]) < 1e-5) BOOST_REQUIRE_SMALL(products2[i], 1e-5); else BOOST_REQUIRE_CLOSE(products[i], products2[i], 1e-5); BOOST_REQUIRE_EQUAL(indices[i], indices2[i]); } } BOOST_AUTO_TEST_CASE(SerializationTest) { arma::mat dataset = arma::randu(5, 200); FastMKS f(dataset); FastMKS fXml, fText, fBinary; arma::mat otherDataset = arma::randu(3, 10); fBinary.Train(otherDataset); SerializeObjectAll(f, fXml, fText, fBinary); arma::mat kernels, xmlKernels, textKernels, binaryKernels; arma::Mat indices, xmlIndices, textIndices, binaryIndices; arma::mat querySet = arma::randu(5, 100); f.Search(querySet, 5, indices, kernels); fXml.Search(querySet, 5, xmlIndices, xmlKernels); fText.Search(querySet, 5, textIndices, textKernels); fBinary.Search(querySet, 5, binaryIndices, binaryKernels); CheckMatrices(indices, xmlIndices, textIndices, binaryIndices); CheckMatrices(kernels, xmlKernels, textKernels, binaryKernels); } // Make sure that we get an exception if we try to build the wrong FastMKSModel. BOOST_AUTO_TEST_CASE(FastMKSModelWrongModelTest) { PolynomialKernel pk(2.0); arma::mat data = arma::randu(5, 5); FastMKSModel m(FastMKSModel::LINEAR_KERNEL); BOOST_REQUIRE_THROW(m.BuildModel(data, pk, false, false, 2.0), std::invalid_argument); } // Test the linear kernel mode of the FastMKSModel. BOOST_AUTO_TEST_CASE(FastMKSModelLinearTest) { LinearKernel lk; arma::mat referenceData = arma::randu(10, 100); FastMKS f(referenceData, lk); FastMKSModel m(FastMKSModel::LINEAR_KERNEL); FastMKSModel mNaive(FastMKSModel::LINEAR_KERNEL); FastMKSModel mSingle(FastMKSModel::LINEAR_KERNEL); m.BuildModel(referenceData, lk, false, false, 2.0); mNaive.BuildModel(referenceData, lk, false, true, 2.0); mSingle.BuildModel(referenceData, lk, true, false, 2.0); // Now search, first monochromatically. arma::Mat indices, mIndices, mNaiveIndices, mSingleIndices; arma::mat kernels, mKernels, mNaiveKernels, mSingleKernels; f.Search(3, indices, kernels); m.Search(3, mIndices, mKernels); mNaive.Search(3, mNaiveIndices, mNaiveKernels); mSingle.Search(3, mSingleIndices, mSingleKernels); BOOST_REQUIRE_EQUAL(indices.n_cols, mIndices.n_cols); BOOST_REQUIRE_EQUAL(indices.n_cols, mNaiveIndices.n_cols); BOOST_REQUIRE_EQUAL(indices.n_cols, mSingleIndices.n_cols); BOOST_REQUIRE_EQUAL(indices.n_rows, mIndices.n_rows); BOOST_REQUIRE_EQUAL(indices.n_rows, mNaiveIndices.n_rows); BOOST_REQUIRE_EQUAL(indices.n_rows, mSingleIndices.n_rows); BOOST_REQUIRE_EQUAL(kernels.n_cols, mKernels.n_cols); BOOST_REQUIRE_EQUAL(kernels.n_cols, mNaiveKernels.n_cols); BOOST_REQUIRE_EQUAL(kernels.n_cols, mSingleKernels.n_cols); BOOST_REQUIRE_EQUAL(kernels.n_rows, mKernels.n_rows); BOOST_REQUIRE_EQUAL(kernels.n_rows, mNaiveKernels.n_rows); BOOST_REQUIRE_EQUAL(kernels.n_rows, mSingleKernels.n_rows); for (size_t i = 0; i < indices.n_elem; ++i) { BOOST_REQUIRE_EQUAL(indices[i], mIndices[i]); BOOST_REQUIRE_EQUAL(indices[i], mNaiveIndices[i]); BOOST_REQUIRE_EQUAL(indices[i], mSingleIndices[i]); if (std::abs(kernels[i]) < 1e-5) { BOOST_REQUIRE_SMALL(mKernels[i], 1e-5); BOOST_REQUIRE_SMALL(mNaiveKernels[i], 1e-5); BOOST_REQUIRE_SMALL(mSingleKernels[i], 1e-5); } else { BOOST_REQUIRE_CLOSE(kernels[i], mKernels[i], 1e-5); BOOST_REQUIRE_CLOSE(kernels[i], mNaiveKernels[i], 1e-5); BOOST_REQUIRE_CLOSE(kernels[i], mSingleKernels[i], 1e-5); } } // Now test with a different query set. arma::mat querySet = arma::randu(10, 50); f.Search(querySet, 3, indices, kernels); m.Search(querySet, 3, mIndices, mKernels, 2.0); mNaive.Search(querySet, 3, mNaiveIndices, mNaiveKernels, 2.0); mSingle.Search(querySet, 3, mSingleIndices, mSingleKernels, 2.0); BOOST_REQUIRE_EQUAL(indices.n_cols, mIndices.n_cols); BOOST_REQUIRE_EQUAL(indices.n_cols, mNaiveIndices.n_cols); BOOST_REQUIRE_EQUAL(indices.n_cols, mSingleIndices.n_cols); BOOST_REQUIRE_EQUAL(indices.n_rows, mIndices.n_rows); BOOST_REQUIRE_EQUAL(indices.n_rows, mNaiveIndices.n_rows); BOOST_REQUIRE_EQUAL(indices.n_rows, mSingleIndices.n_rows); BOOST_REQUIRE_EQUAL(kernels.n_cols, mKernels.n_cols); BOOST_REQUIRE_EQUAL(kernels.n_cols, mNaiveKernels.n_cols); BOOST_REQUIRE_EQUAL(kernels.n_cols, mSingleKernels.n_cols); BOOST_REQUIRE_EQUAL(kernels.n_rows, mKernels.n_rows); BOOST_REQUIRE_EQUAL(kernels.n_rows, mNaiveKernels.n_rows); BOOST_REQUIRE_EQUAL(kernels.n_rows, mSingleKernels.n_rows); for (size_t i = 0; i < indices.n_elem; ++i) { BOOST_REQUIRE_EQUAL(indices[i], mIndices[i]); BOOST_REQUIRE_EQUAL(indices[i], mNaiveIndices[i]); BOOST_REQUIRE_EQUAL(indices[i], mSingleIndices[i]); if (std::abs(kernels[i]) < 1e-5) { BOOST_REQUIRE_SMALL(mKernels[i], 1e-5); BOOST_REQUIRE_SMALL(mNaiveKernels[i], 1e-5); BOOST_REQUIRE_SMALL(mSingleKernels[i], 1e-5); } else { BOOST_REQUIRE_CLOSE(kernels[i], mKernels[i], 1e-5); BOOST_REQUIRE_CLOSE(kernels[i], mNaiveKernels[i], 1e-5); BOOST_REQUIRE_CLOSE(kernels[i], mSingleKernels[i], 1e-5); } } } // Test the polynomial kernel mode of the FastMKSModel. BOOST_AUTO_TEST_CASE(FastMKSModelPolynomialTest) { PolynomialKernel pk(2.0); arma::mat referenceData = arma::randu(10, 100); FastMKS f(referenceData, pk); FastMKSModel m(FastMKSModel::POLYNOMIAL_KERNEL); FastMKSModel mNaive(FastMKSModel::POLYNOMIAL_KERNEL); FastMKSModel mSingle(FastMKSModel::POLYNOMIAL_KERNEL); m.BuildModel(referenceData, pk, false, false, 2.0); mNaive.BuildModel(referenceData, pk, false, true, 2.0); mSingle.BuildModel(referenceData, pk, true, false, 2.0); // Now search, first monochromatically. arma::Mat indices, mIndices, mNaiveIndices, mSingleIndices; arma::mat kernels, mKernels, mNaiveKernels, mSingleKernels; f.Search(3, indices, kernels); m.Search(3, mIndices, mKernels); mNaive.Search(3, mNaiveIndices, mNaiveKernels); mSingle.Search(3, mSingleIndices, mSingleKernels); BOOST_REQUIRE_EQUAL(indices.n_cols, mIndices.n_cols); BOOST_REQUIRE_EQUAL(indices.n_cols, mNaiveIndices.n_cols); BOOST_REQUIRE_EQUAL(indices.n_cols, mSingleIndices.n_cols); BOOST_REQUIRE_EQUAL(indices.n_rows, mIndices.n_rows); BOOST_REQUIRE_EQUAL(indices.n_rows, mNaiveIndices.n_rows); BOOST_REQUIRE_EQUAL(indices.n_rows, mSingleIndices.n_rows); BOOST_REQUIRE_EQUAL(kernels.n_cols, mKernels.n_cols); BOOST_REQUIRE_EQUAL(kernels.n_cols, mNaiveKernels.n_cols); BOOST_REQUIRE_EQUAL(kernels.n_cols, mSingleKernels.n_cols); BOOST_REQUIRE_EQUAL(kernels.n_rows, mKernels.n_rows); BOOST_REQUIRE_EQUAL(kernels.n_rows, mNaiveKernels.n_rows); BOOST_REQUIRE_EQUAL(kernels.n_rows, mSingleKernels.n_rows); for (size_t i = 0; i < indices.n_elem; ++i) { BOOST_REQUIRE_EQUAL(indices[i], mIndices[i]); BOOST_REQUIRE_EQUAL(indices[i], mNaiveIndices[i]); BOOST_REQUIRE_EQUAL(indices[i], mSingleIndices[i]); if (std::abs(kernels[i]) < 1e-5) { BOOST_REQUIRE_SMALL(mKernels[i], 1e-5); BOOST_REQUIRE_SMALL(mNaiveKernels[i], 1e-5); BOOST_REQUIRE_SMALL(mSingleKernels[i], 1e-5); } else { BOOST_REQUIRE_CLOSE(kernels[i], mKernels[i], 1e-5); BOOST_REQUIRE_CLOSE(kernels[i], mNaiveKernels[i], 1e-5); BOOST_REQUIRE_CLOSE(kernels[i], mSingleKernels[i], 1e-5); } } // Now test with a different query set. arma::mat querySet = arma::randu(10, 50); f.Search(querySet, 3, indices, kernels); m.Search(querySet, 3, mIndices, mKernels, 2.0); mNaive.Search(querySet, 3, mNaiveIndices, mNaiveKernels, 2.0); mSingle.Search(querySet, 3, mSingleIndices, mSingleKernels, 2.0); BOOST_REQUIRE_EQUAL(indices.n_cols, mIndices.n_cols); BOOST_REQUIRE_EQUAL(indices.n_cols, mNaiveIndices.n_cols); BOOST_REQUIRE_EQUAL(indices.n_cols, mSingleIndices.n_cols); BOOST_REQUIRE_EQUAL(indices.n_rows, mIndices.n_rows); BOOST_REQUIRE_EQUAL(indices.n_rows, mNaiveIndices.n_rows); BOOST_REQUIRE_EQUAL(indices.n_rows, mSingleIndices.n_rows); BOOST_REQUIRE_EQUAL(kernels.n_cols, mKernels.n_cols); BOOST_REQUIRE_EQUAL(kernels.n_cols, mNaiveKernels.n_cols); BOOST_REQUIRE_EQUAL(kernels.n_cols, mSingleKernels.n_cols); BOOST_REQUIRE_EQUAL(kernels.n_rows, mKernels.n_rows); BOOST_REQUIRE_EQUAL(kernels.n_rows, mNaiveKernels.n_rows); BOOST_REQUIRE_EQUAL(kernels.n_rows, mSingleKernels.n_rows); for (size_t i = 0; i < indices.n_elem; ++i) { BOOST_REQUIRE_EQUAL(indices[i], mIndices[i]); BOOST_REQUIRE_EQUAL(indices[i], mNaiveIndices[i]); BOOST_REQUIRE_EQUAL(indices[i], mSingleIndices[i]); if (std::abs(kernels[i]) < 1e-5) { BOOST_REQUIRE_SMALL(mKernels[i], 1e-5); BOOST_REQUIRE_SMALL(mNaiveKernels[i], 1e-5); BOOST_REQUIRE_SMALL(mSingleKernels[i], 1e-5); } else { BOOST_REQUIRE_CLOSE(kernels[i], mKernels[i], 1e-5); BOOST_REQUIRE_CLOSE(kernels[i], mNaiveKernels[i], 1e-5); BOOST_REQUIRE_CLOSE(kernels[i], mSingleKernels[i], 1e-5); } } } // Test the cosine distance mode of the FastMKSModel. BOOST_AUTO_TEST_CASE(FastMKSModelCosineTest) { CosineDistance ck; arma::mat referenceData = arma::randu(10, 100); FastMKS f(referenceData, ck); FastMKSModel m(FastMKSModel::COSINE_DISTANCE); FastMKSModel mNaive(FastMKSModel::COSINE_DISTANCE); FastMKSModel mSingle(FastMKSModel::COSINE_DISTANCE); m.BuildModel(referenceData, ck, false, false, 2.0); mNaive.BuildModel(referenceData, ck, false, true, 2.0); mSingle.BuildModel(referenceData, ck, true, false, 2.0); // Now search, first monochromatically. arma::Mat indices, mIndices, mNaiveIndices, mSingleIndices; arma::mat kernels, mKernels, mNaiveKernels, mSingleKernels; f.Search(3, indices, kernels); m.Search(3, mIndices, mKernels); mNaive.Search(3, mNaiveIndices, mNaiveKernels); mSingle.Search(3, mSingleIndices, mSingleKernels); BOOST_REQUIRE_EQUAL(indices.n_cols, mIndices.n_cols); BOOST_REQUIRE_EQUAL(indices.n_cols, mNaiveIndices.n_cols); BOOST_REQUIRE_EQUAL(indices.n_cols, mSingleIndices.n_cols); BOOST_REQUIRE_EQUAL(indices.n_rows, mIndices.n_rows); BOOST_REQUIRE_EQUAL(indices.n_rows, mNaiveIndices.n_rows); BOOST_REQUIRE_EQUAL(indices.n_rows, mSingleIndices.n_rows); BOOST_REQUIRE_EQUAL(kernels.n_cols, mKernels.n_cols); BOOST_REQUIRE_EQUAL(kernels.n_cols, mNaiveKernels.n_cols); BOOST_REQUIRE_EQUAL(kernels.n_cols, mSingleKernels.n_cols); BOOST_REQUIRE_EQUAL(kernels.n_rows, mKernels.n_rows); BOOST_REQUIRE_EQUAL(kernels.n_rows, mNaiveKernels.n_rows); BOOST_REQUIRE_EQUAL(kernels.n_rows, mSingleKernels.n_rows); for (size_t i = 0; i < indices.n_elem; ++i) { BOOST_REQUIRE_EQUAL(indices[i], mIndices[i]); BOOST_REQUIRE_EQUAL(indices[i], mNaiveIndices[i]); BOOST_REQUIRE_EQUAL(indices[i], mSingleIndices[i]); if (std::abs(kernels[i]) < 1e-5) { BOOST_REQUIRE_SMALL(mKernels[i], 1e-5); BOOST_REQUIRE_SMALL(mNaiveKernels[i], 1e-5); BOOST_REQUIRE_SMALL(mSingleKernels[i], 1e-5); } else { BOOST_REQUIRE_CLOSE(kernels[i], mKernels[i], 1e-5); BOOST_REQUIRE_CLOSE(kernels[i], mNaiveKernels[i], 1e-5); BOOST_REQUIRE_CLOSE(kernels[i], mSingleKernels[i], 1e-5); } } // Now test with a different query set. arma::mat querySet = arma::randu(10, 50); f.Search(querySet, 3, indices, kernels); m.Search(querySet, 3, mIndices, mKernels, 2.0); mNaive.Search(querySet, 3, mNaiveIndices, mNaiveKernels, 2.0); mSingle.Search(querySet, 3, mSingleIndices, mSingleKernels, 2.0); BOOST_REQUIRE_EQUAL(indices.n_cols, mIndices.n_cols); BOOST_REQUIRE_EQUAL(indices.n_cols, mNaiveIndices.n_cols); BOOST_REQUIRE_EQUAL(indices.n_cols, mSingleIndices.n_cols); BOOST_REQUIRE_EQUAL(indices.n_rows, mIndices.n_rows); BOOST_REQUIRE_EQUAL(indices.n_rows, mNaiveIndices.n_rows); BOOST_REQUIRE_EQUAL(indices.n_rows, mSingleIndices.n_rows); BOOST_REQUIRE_EQUAL(kernels.n_cols, mKernels.n_cols); BOOST_REQUIRE_EQUAL(kernels.n_cols, mNaiveKernels.n_cols); BOOST_REQUIRE_EQUAL(kernels.n_cols, mSingleKernels.n_cols); BOOST_REQUIRE_EQUAL(kernels.n_rows, mKernels.n_rows); BOOST_REQUIRE_EQUAL(kernels.n_rows, mNaiveKernels.n_rows); BOOST_REQUIRE_EQUAL(kernels.n_rows, mSingleKernels.n_rows); for (size_t i = 0; i < indices.n_elem; ++i) { BOOST_REQUIRE_EQUAL(indices[i], mIndices[i]); BOOST_REQUIRE_EQUAL(indices[i], mNaiveIndices[i]); BOOST_REQUIRE_EQUAL(indices[i], mSingleIndices[i]); if (std::abs(kernels[i]) < 1e-5) { BOOST_REQUIRE_SMALL(mKernels[i], 1e-5); BOOST_REQUIRE_SMALL(mNaiveKernels[i], 1e-5); BOOST_REQUIRE_SMALL(mSingleKernels[i], 1e-5); } else { BOOST_REQUIRE_CLOSE(kernels[i], mKernels[i], 1e-5); BOOST_REQUIRE_CLOSE(kernels[i], mNaiveKernels[i], 1e-5); BOOST_REQUIRE_CLOSE(kernels[i], mSingleKernels[i], 1e-5); } } } // Test the Gaussian kernel mode of the FastMKSModel. BOOST_AUTO_TEST_CASE(FastMKSModelGaussianTest) { GaussianKernel gk(1.5); arma::mat referenceData = arma::randu(10, 100); FastMKS f(referenceData, gk); FastMKSModel m(FastMKSModel::GAUSSIAN_KERNEL); FastMKSModel mNaive(FastMKSModel::GAUSSIAN_KERNEL); FastMKSModel mSingle(FastMKSModel::GAUSSIAN_KERNEL); m.BuildModel(referenceData, gk, false, false, 2.0); mNaive.BuildModel(referenceData, gk, false, true, 2.0); mSingle.BuildModel(referenceData, gk, true, false, 2.0); // Now search, first monochromatically. arma::Mat indices, mIndices, mNaiveIndices, mSingleIndices; arma::mat kernels, mKernels, mNaiveKernels, mSingleKernels; f.Search(3, indices, kernels); m.Search(3, mIndices, mKernels); mNaive.Search(3, mNaiveIndices, mNaiveKernels); mSingle.Search(3, mSingleIndices, mSingleKernels); BOOST_REQUIRE_EQUAL(indices.n_cols, mIndices.n_cols); BOOST_REQUIRE_EQUAL(indices.n_cols, mNaiveIndices.n_cols); BOOST_REQUIRE_EQUAL(indices.n_cols, mSingleIndices.n_cols); BOOST_REQUIRE_EQUAL(indices.n_rows, mIndices.n_rows); BOOST_REQUIRE_EQUAL(indices.n_rows, mNaiveIndices.n_rows); BOOST_REQUIRE_EQUAL(indices.n_rows, mSingleIndices.n_rows); BOOST_REQUIRE_EQUAL(kernels.n_cols, mKernels.n_cols); BOOST_REQUIRE_EQUAL(kernels.n_cols, mNaiveKernels.n_cols); BOOST_REQUIRE_EQUAL(kernels.n_cols, mSingleKernels.n_cols); BOOST_REQUIRE_EQUAL(kernels.n_rows, mKernels.n_rows); BOOST_REQUIRE_EQUAL(kernels.n_rows, mNaiveKernels.n_rows); BOOST_REQUIRE_EQUAL(kernels.n_rows, mSingleKernels.n_rows); for (size_t i = 0; i < indices.n_elem; ++i) { BOOST_REQUIRE_EQUAL(indices[i], mIndices[i]); BOOST_REQUIRE_EQUAL(indices[i], mNaiveIndices[i]); BOOST_REQUIRE_EQUAL(indices[i], mSingleIndices[i]); if (std::abs(kernels[i]) < 1e-5) { BOOST_REQUIRE_SMALL(mKernels[i], 1e-5); BOOST_REQUIRE_SMALL(mNaiveKernels[i], 1e-5); BOOST_REQUIRE_SMALL(mSingleKernels[i], 1e-5); } else { BOOST_REQUIRE_CLOSE(kernels[i], mKernels[i], 1e-5); BOOST_REQUIRE_CLOSE(kernels[i], mNaiveKernels[i], 1e-5); BOOST_REQUIRE_CLOSE(kernels[i], mSingleKernels[i], 1e-5); } } // Now test with a different query set. arma::mat querySet = arma::randu(10, 50); f.Search(querySet, 3, indices, kernels); m.Search(querySet, 3, mIndices, mKernels, 2.0); mNaive.Search(querySet, 3, mNaiveIndices, mNaiveKernels, 2.0); mSingle.Search(querySet, 3, mSingleIndices, mSingleKernels, 2.0); BOOST_REQUIRE_EQUAL(indices.n_cols, mIndices.n_cols); BOOST_REQUIRE_EQUAL(indices.n_cols, mNaiveIndices.n_cols); BOOST_REQUIRE_EQUAL(indices.n_cols, mSingleIndices.n_cols); BOOST_REQUIRE_EQUAL(indices.n_rows, mIndices.n_rows); BOOST_REQUIRE_EQUAL(indices.n_rows, mNaiveIndices.n_rows); BOOST_REQUIRE_EQUAL(indices.n_rows, mSingleIndices.n_rows); BOOST_REQUIRE_EQUAL(kernels.n_cols, mKernels.n_cols); BOOST_REQUIRE_EQUAL(kernels.n_cols, mNaiveKernels.n_cols); BOOST_REQUIRE_EQUAL(kernels.n_cols, mSingleKernels.n_cols); BOOST_REQUIRE_EQUAL(kernels.n_rows, mKernels.n_rows); BOOST_REQUIRE_EQUAL(kernels.n_rows, mNaiveKernels.n_rows); BOOST_REQUIRE_EQUAL(kernels.n_rows, mSingleKernels.n_rows); for (size_t i = 0; i < indices.n_elem; ++i) { BOOST_REQUIRE_EQUAL(indices[i], mIndices[i]); BOOST_REQUIRE_EQUAL(indices[i], mNaiveIndices[i]); BOOST_REQUIRE_EQUAL(indices[i], mSingleIndices[i]); if (std::abs(kernels[i]) < 1e-5) { BOOST_REQUIRE_SMALL(mKernels[i], 1e-5); BOOST_REQUIRE_SMALL(mNaiveKernels[i], 1e-5); BOOST_REQUIRE_SMALL(mSingleKernels[i], 1e-5); } else { BOOST_REQUIRE_CLOSE(kernels[i], mKernels[i], 1e-5); BOOST_REQUIRE_CLOSE(kernels[i], mNaiveKernels[i], 1e-5); BOOST_REQUIRE_CLOSE(kernels[i], mSingleKernels[i], 1e-5); } } } // Test the Epanechnikov kernel mode of the FastMKSModel. BOOST_AUTO_TEST_CASE(FastMKSModelEpanTest) { EpanechnikovKernel ek(2.5); arma::mat referenceData = arma::randu(10, 100); FastMKS f(referenceData, ek); FastMKSModel m(FastMKSModel::EPANECHNIKOV_KERNEL); FastMKSModel mNaive(FastMKSModel::EPANECHNIKOV_KERNEL); FastMKSModel mSingle(FastMKSModel::EPANECHNIKOV_KERNEL); m.BuildModel(referenceData, ek, false, false, 2.0); mNaive.BuildModel(referenceData, ek, false, true, 2.0); mSingle.BuildModel(referenceData, ek, true, false, 2.0); // Now search, first monochromatically. arma::Mat indices, mIndices, mNaiveIndices, mSingleIndices; arma::mat kernels, mKernels, mNaiveKernels, mSingleKernels; f.Search(3, indices, kernels); m.Search(3, mIndices, mKernels); mNaive.Search(3, mNaiveIndices, mNaiveKernels); mSingle.Search(3, mSingleIndices, mSingleKernels); BOOST_REQUIRE_EQUAL(indices.n_cols, mIndices.n_cols); BOOST_REQUIRE_EQUAL(indices.n_cols, mNaiveIndices.n_cols); BOOST_REQUIRE_EQUAL(indices.n_cols, mSingleIndices.n_cols); BOOST_REQUIRE_EQUAL(indices.n_rows, mIndices.n_rows); BOOST_REQUIRE_EQUAL(indices.n_rows, mNaiveIndices.n_rows); BOOST_REQUIRE_EQUAL(indices.n_rows, mSingleIndices.n_rows); BOOST_REQUIRE_EQUAL(kernels.n_cols, mKernels.n_cols); BOOST_REQUIRE_EQUAL(kernels.n_cols, mNaiveKernels.n_cols); BOOST_REQUIRE_EQUAL(kernels.n_cols, mSingleKernels.n_cols); BOOST_REQUIRE_EQUAL(kernels.n_rows, mKernels.n_rows); BOOST_REQUIRE_EQUAL(kernels.n_rows, mNaiveKernels.n_rows); BOOST_REQUIRE_EQUAL(kernels.n_rows, mSingleKernels.n_rows); for (size_t i = 0; i < indices.n_elem; ++i) { BOOST_REQUIRE_EQUAL(indices[i], mIndices[i]); BOOST_REQUIRE_EQUAL(indices[i], mNaiveIndices[i]); BOOST_REQUIRE_EQUAL(indices[i], mSingleIndices[i]); if (std::abs(kernels[i]) < 1e-5) { BOOST_REQUIRE_SMALL(mKernels[i], 1e-5); BOOST_REQUIRE_SMALL(mNaiveKernels[i], 1e-5); BOOST_REQUIRE_SMALL(mSingleKernels[i], 1e-5); } else { BOOST_REQUIRE_CLOSE(kernels[i], mKernels[i], 1e-5); BOOST_REQUIRE_CLOSE(kernels[i], mNaiveKernels[i], 1e-5); BOOST_REQUIRE_CLOSE(kernels[i], mSingleKernels[i], 1e-5); } } // Now test with a different query set. arma::mat querySet = arma::randu(10, 50); f.Search(querySet, 3, indices, kernels); m.Search(querySet, 3, mIndices, mKernels, 2.0); mNaive.Search(querySet, 3, mNaiveIndices, mNaiveKernels, 2.0); mSingle.Search(querySet, 3, mSingleIndices, mSingleKernels, 2.0); BOOST_REQUIRE_EQUAL(indices.n_cols, mIndices.n_cols); BOOST_REQUIRE_EQUAL(indices.n_cols, mNaiveIndices.n_cols); BOOST_REQUIRE_EQUAL(indices.n_cols, mSingleIndices.n_cols); BOOST_REQUIRE_EQUAL(indices.n_rows, mIndices.n_rows); BOOST_REQUIRE_EQUAL(indices.n_rows, mNaiveIndices.n_rows); BOOST_REQUIRE_EQUAL(indices.n_rows, mSingleIndices.n_rows); BOOST_REQUIRE_EQUAL(kernels.n_cols, mKernels.n_cols); BOOST_REQUIRE_EQUAL(kernels.n_cols, mNaiveKernels.n_cols); BOOST_REQUIRE_EQUAL(kernels.n_cols, mSingleKernels.n_cols); BOOST_REQUIRE_EQUAL(kernels.n_rows, mKernels.n_rows); BOOST_REQUIRE_EQUAL(kernels.n_rows, mNaiveKernels.n_rows); BOOST_REQUIRE_EQUAL(kernels.n_rows, mSingleKernels.n_rows); for (size_t i = 0; i < indices.n_elem; ++i) { BOOST_REQUIRE_EQUAL(indices[i], mIndices[i]); BOOST_REQUIRE_EQUAL(indices[i], mNaiveIndices[i]); BOOST_REQUIRE_EQUAL(indices[i], mSingleIndices[i]); if (std::abs(kernels[i]) < 1e-5) { BOOST_REQUIRE_SMALL(mKernels[i], 1e-5); BOOST_REQUIRE_SMALL(mNaiveKernels[i], 1e-5); BOOST_REQUIRE_SMALL(mSingleKernels[i], 1e-5); } else { BOOST_REQUIRE_CLOSE(kernels[i], mKernels[i], 1e-5); BOOST_REQUIRE_CLOSE(kernels[i], mNaiveKernels[i], 1e-5); BOOST_REQUIRE_CLOSE(kernels[i], mSingleKernels[i], 1e-5); } } } // Test the triangular kernel mode of the FastMKSModel. BOOST_AUTO_TEST_CASE(FastMKSModelTriangularTest) { TriangularKernel tk(2.0); arma::mat referenceData = arma::randu(10, 100); FastMKS f(referenceData, tk); FastMKSModel m(FastMKSModel::TRIANGULAR_KERNEL); FastMKSModel mNaive(FastMKSModel::TRIANGULAR_KERNEL); FastMKSModel mSingle(FastMKSModel::TRIANGULAR_KERNEL); m.BuildModel(referenceData, tk, false, false, 2.0); mNaive.BuildModel(referenceData, tk, false, true, 2.0); mSingle.BuildModel(referenceData, tk, true, false, 2.0); // Now search, first monochromatically. arma::Mat indices, mIndices, mNaiveIndices, mSingleIndices; arma::mat kernels, mKernels, mNaiveKernels, mSingleKernels; f.Search(3, indices, kernels); m.Search(3, mIndices, mKernels); mNaive.Search(3, mNaiveIndices, mNaiveKernels); mSingle.Search(3, mSingleIndices, mSingleKernels); BOOST_REQUIRE_EQUAL(indices.n_cols, mIndices.n_cols); BOOST_REQUIRE_EQUAL(indices.n_cols, mNaiveIndices.n_cols); BOOST_REQUIRE_EQUAL(indices.n_cols, mSingleIndices.n_cols); BOOST_REQUIRE_EQUAL(indices.n_rows, mIndices.n_rows); BOOST_REQUIRE_EQUAL(indices.n_rows, mNaiveIndices.n_rows); BOOST_REQUIRE_EQUAL(indices.n_rows, mSingleIndices.n_rows); BOOST_REQUIRE_EQUAL(kernels.n_cols, mKernels.n_cols); BOOST_REQUIRE_EQUAL(kernels.n_cols, mNaiveKernels.n_cols); BOOST_REQUIRE_EQUAL(kernels.n_cols, mSingleKernels.n_cols); BOOST_REQUIRE_EQUAL(kernels.n_rows, mKernels.n_rows); BOOST_REQUIRE_EQUAL(kernels.n_rows, mNaiveKernels.n_rows); BOOST_REQUIRE_EQUAL(kernels.n_rows, mSingleKernels.n_rows); for (size_t i = 0; i < indices.n_elem; ++i) { BOOST_REQUIRE_EQUAL(indices[i], mIndices[i]); BOOST_REQUIRE_EQUAL(indices[i], mNaiveIndices[i]); BOOST_REQUIRE_EQUAL(indices[i], mSingleIndices[i]); if (std::abs(kernels[i]) < 1e-5) { BOOST_REQUIRE_SMALL(mKernels[i], 1e-5); BOOST_REQUIRE_SMALL(mNaiveKernels[i], 1e-5); BOOST_REQUIRE_SMALL(mSingleKernels[i], 1e-5); } else { BOOST_REQUIRE_CLOSE(kernels[i], mKernels[i], 1e-5); BOOST_REQUIRE_CLOSE(kernels[i], mNaiveKernels[i], 1e-5); BOOST_REQUIRE_CLOSE(kernels[i], mSingleKernels[i], 1e-5); } } // Now test with a different query set. arma::mat querySet = arma::randu(10, 50); f.Search(querySet, 3, indices, kernels); m.Search(querySet, 3, mIndices, mKernels, 2.0); mNaive.Search(querySet, 3, mNaiveIndices, mNaiveKernels, 2.0); mSingle.Search(querySet, 3, mSingleIndices, mSingleKernels, 2.0); BOOST_REQUIRE_EQUAL(indices.n_cols, mIndices.n_cols); BOOST_REQUIRE_EQUAL(indices.n_cols, mNaiveIndices.n_cols); BOOST_REQUIRE_EQUAL(indices.n_cols, mSingleIndices.n_cols); BOOST_REQUIRE_EQUAL(indices.n_rows, mIndices.n_rows); BOOST_REQUIRE_EQUAL(indices.n_rows, mNaiveIndices.n_rows); BOOST_REQUIRE_EQUAL(indices.n_rows, mSingleIndices.n_rows); BOOST_REQUIRE_EQUAL(kernels.n_cols, mKernels.n_cols); BOOST_REQUIRE_EQUAL(kernels.n_cols, mNaiveKernels.n_cols); BOOST_REQUIRE_EQUAL(kernels.n_cols, mSingleKernels.n_cols); BOOST_REQUIRE_EQUAL(kernels.n_rows, mKernels.n_rows); BOOST_REQUIRE_EQUAL(kernels.n_rows, mNaiveKernels.n_rows); BOOST_REQUIRE_EQUAL(kernels.n_rows, mSingleKernels.n_rows); for (size_t i = 0; i < indices.n_elem; ++i) { BOOST_REQUIRE_EQUAL(indices[i], mIndices[i]); BOOST_REQUIRE_EQUAL(indices[i], mNaiveIndices[i]); BOOST_REQUIRE_EQUAL(indices[i], mSingleIndices[i]); if (std::abs(kernels[i]) < 1e-5) { BOOST_REQUIRE_SMALL(mKernels[i], 1e-5); BOOST_REQUIRE_SMALL(mNaiveKernels[i], 1e-5); BOOST_REQUIRE_SMALL(mSingleKernels[i], 1e-5); } else { BOOST_REQUIRE_CLOSE(kernels[i], mKernels[i], 1e-5); BOOST_REQUIRE_CLOSE(kernels[i], mNaiveKernels[i], 1e-5); BOOST_REQUIRE_CLOSE(kernels[i], mSingleKernels[i], 1e-5); } } } BOOST_AUTO_TEST_SUITE_END(); mlpack-2.2.5/src/mlpack/tests/gmm_test.cpp000066400000000000000000000613161315013601400205130ustar00rootroot00000000000000/** * @file gmm_test.cpp * @author Ryan Curtin * @author Michael Fox * * Test for the Gaussian Mixture Model class. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include #include #include #include #include #include #include #include "test_tools.hpp" using namespace mlpack; using namespace mlpack::gmm; BOOST_AUTO_TEST_SUITE(GMMTest); /** * Test GMM::Probability() for a single observation for a few cases. */ BOOST_AUTO_TEST_CASE(GMMProbabilityTest) { // Create a GMM. GMM gmm(2, 2); gmm.Component(0) = distribution::GaussianDistribution("0 0", "1 0; 0 1"); gmm.Component(1) = distribution::GaussianDistribution("3 3", "2 1; 1 2"); gmm.Weights() = "0.3 0.7"; // Now test a couple observations. These comparisons are calculated by hand. BOOST_REQUIRE_CLOSE(gmm.Probability("0 0"), 0.05094887202, 1e-5); BOOST_REQUIRE_CLOSE(gmm.Probability("1 1"), 0.03451996667, 1e-5); BOOST_REQUIRE_CLOSE(gmm.Probability("2 2"), 0.04696302254, 1e-5); BOOST_REQUIRE_CLOSE(gmm.Probability("3 3"), 0.06432759685, 1e-5); BOOST_REQUIRE_CLOSE(gmm.Probability("-1 5.3"), 2.503171278804e-6, 1e-5); BOOST_REQUIRE_CLOSE(gmm.Probability("1.4 0"), 0.024676682176, 1e-5); } /** * Test GMM::Probability() for a single observation being from a particular * component. */ BOOST_AUTO_TEST_CASE(GMMProbabilityComponentTest) { // Create a GMM (same as the last test). GMM gmm(2, 2); gmm.Component(0) = distribution::GaussianDistribution("0 0", "1 0; 0 1"); gmm.Component(1) = distribution::GaussianDistribution("3 3", "2 1; 1 2"); gmm.Weights() = "0.3 0.7"; // Now test a couple observations. These comparisons are calculated by hand. BOOST_REQUIRE_CLOSE(gmm.Probability("0 0", 0), 0.0477464829276, 1e-5); BOOST_REQUIRE_CLOSE(gmm.Probability("0 0", 1), 0.0032023890978, 1e-5); BOOST_REQUIRE_CLOSE(gmm.Probability("1 1", 0), 0.0175649494573, 1e-5); BOOST_REQUIRE_CLOSE(gmm.Probability("1 1", 1), 0.0169550172159, 1e-5); BOOST_REQUIRE_CLOSE(gmm.Probability("2 2", 0), 8.7450733951e-4, 1e-5); BOOST_REQUIRE_CLOSE(gmm.Probability("2 2", 1), 0.0460885151993, 1e-5); BOOST_REQUIRE_CLOSE(gmm.Probability("3 3", 0), 5.8923841039e-6, 1e-5); BOOST_REQUIRE_CLOSE(gmm.Probability("3 3", 1), 0.0643217044658, 1e-5); BOOST_REQUIRE_CLOSE(gmm.Probability("-1 5.3", 0), 2.30212100302e-8, 1e-5); BOOST_REQUIRE_CLOSE(gmm.Probability("-1 5.3", 1), 2.48015006877e-6, 1e-5); BOOST_REQUIRE_CLOSE(gmm.Probability("1.4 0", 0), 0.0179197849738, 1e-5); BOOST_REQUIRE_CLOSE(gmm.Probability("1.4 0", 1), 0.0067568972024, 1e-5); } /** * Test training a model on only one Gaussian (randomly generated) in two * dimensions. We will vary the dataset size from small to large. The EM * algorithm is used for training the GMM. */ BOOST_AUTO_TEST_CASE(GMMTrainEMOneGaussian) { for (size_t iterations = 0; iterations < 4; iterations++) { // Determine random covariance and mean. arma::vec mean; mean.randu(2); arma::vec covar; covar.randu(2); arma::mat data; data.randn(2 /* dimension */, 150 * pow(10, (iterations / 3.0))); // Now apply mean and covariance. data.row(0) *= covar(0); data.row(1) *= covar(1); data.row(0) += mean(0); data.row(1) += mean(1); // Now, train the model. GMM gmm(1, 2); gmm.Train(data, 10); arma::vec actualMean = arma::mean(data, 1); arma::mat actualCovar = ccov(data, 1 /* biased estimator */); // Check the model to see that it is correct. CheckMatrices(gmm.Component(0).Mean(), actualMean); CheckMatrices(gmm.Component(0).Covariance(), actualCovar); BOOST_REQUIRE_CLOSE(gmm.Weights()[0], 1.0, 1e-5); } } /** * Test a training model on multiple Gaussians in higher dimensionality than * two. We will hold the dataset size constant at 10k points. The EM algorithm * is used for training the GMM. */ BOOST_AUTO_TEST_CASE(GMMTrainEMMultipleGaussians) { // Higher dimensionality gives us a greater chance of having separated // Gaussians. size_t dims = 8; size_t gaussians = 3; // Generate dataset. arma::mat data; data.zeros(dims, 500); std::vector means(gaussians); std::vector covars(gaussians); arma::vec weights(gaussians); arma::Col counts(gaussians); // Choose weights randomly. weights.zeros(); while (weights.min() < 0.02) { weights.randu(gaussians); weights /= accu(weights); } for (size_t i = 0; i < gaussians; i++) counts[i] = round(weights[i] * (data.n_cols - gaussians)); // Ensure one point minimum in each. counts += 1; // Account for rounding errors (possibly necessary). counts[gaussians - 1] += (data.n_cols - arma::accu(counts)); // Build each Gaussian individually. size_t point = 0; for (size_t i = 0; i < gaussians; i++) { arma::mat gaussian; gaussian.randn(dims, counts[i]); // Randomly generate mean and covariance. means[i].randu(dims); means[i] -= 0.5; means[i] *= 50; // We need to make sure the covariance is positive definite. We will take a // random matrix C and then set our covariance to 4 * C * C', which will be // positive semidefinite. covars[i].randu(dims, dims); covars[i] *= 4 * trans(covars[i]); data.cols(point, point + counts[i] - 1) = (covars[i] * gaussian + means[i] * arma::ones(counts[i])); // Calculate the actual means and covariances because they will probably // be different (this is easier to do before we shuffle the points). means[i] = arma::mean(data.cols(point, point + counts[i] - 1), 1); covars[i] = ccov(data.cols(point, point + counts[i] - 1), 1 /* biased */); point += counts[i]; } // Calculate actual weights. for (size_t i = 0; i < gaussians; i++) weights[i] = (double) counts[i] / data.n_cols; // Now train the model. GMM gmm(gaussians, dims); gmm.Train(data, 10); arma::uvec sortRef = sort_index(weights); arma::uvec sortTry = sort_index(gmm.Weights()); // Check the model to see that it is correct. for (size_t i = 0; i < gaussians; i++) { // Check the mean. CheckMatrices(gmm.Component(sortTry[i]).Mean(), means[sortRef[i]], 1e-3); // Check the covariance. CheckMatrices(gmm.Component(sortTry[i]).Covariance(), covars[sortRef[i]], 0.05); // Check the weight. BOOST_REQUIRE_CLOSE(gmm.Weights()[sortTry[i]], weights[sortRef[i]], 0.001); } } /** * Train a single-gaussian mixture, but using the overload of Train() where * probabilities of the observation are given. */ BOOST_AUTO_TEST_CASE(GMMTrainEMSingleGaussianWithProbability) { // Generate observations from a Gaussian distribution. distribution::GaussianDistribution d("0.5 1.0", "1.0 0.3; 0.3 1.0"); // 10000 observations, each with random probability. arma::mat observations(2, 20000); for (size_t i = 0; i < 20000; i++) observations.col(i) = d.Random(); arma::vec probabilities; probabilities.randu(20000); // Random probabilities. // Now train the model. GMM g(1, 2); g.Train(observations, probabilities, 10); // Check that it is trained correctly. 5% tolerance because of random error // present in observations. BOOST_REQUIRE_CLOSE(g.Component(0).Mean()[0], 0.5, 5.0); BOOST_REQUIRE_CLOSE(g.Component(0).Mean()[1], 1.0, 5.0); // 6% tolerance on the large numbers, 10% on the smaller numbers. BOOST_REQUIRE_CLOSE(g.Component(0).Covariance()(0, 0), 1.0, 6.0); BOOST_REQUIRE_CLOSE(g.Component(0).Covariance()(0, 1), 0.3, 10.0); BOOST_REQUIRE_CLOSE(g.Component(0).Covariance()(1, 0), 0.3, 10.0); BOOST_REQUIRE_CLOSE(g.Component(0).Covariance()(1, 1), 1.0, 6.0); BOOST_REQUIRE_CLOSE(g.Weights()[0], 1.0, 1e-5); } /** * Train a multi-Gaussian mixture, using the overload of Train() where * probabilities of the observation are given. */ BOOST_AUTO_TEST_CASE(GMMTrainEMMultipleGaussiansWithProbability) { // We'll have three Gaussian distributions from this mixture, and one Gaussian // not from this mixture (but we'll put some observations from it in). distribution::GaussianDistribution d1("0.0 1.0 0.0", "1.0 0.0 0.5;" "0.0 0.8 0.1;" "0.5 0.1 1.0"); distribution::GaussianDistribution d2("2.0 -1.0 5.0", "3.0 0.0 0.5;" "0.0 1.2 0.2;" "0.5 0.2 1.3"); distribution::GaussianDistribution d3("0.0 5.0 -3.0", "2.0 0.0 0.0;" "0.0 0.3 0.0;" "0.0 0.0 1.0"); distribution::GaussianDistribution d4("4.0 2.0 2.0", "1.5 0.6 0.5;" "0.6 1.1 0.1;" "0.5 0.1 1.0"); // Now we'll generate points and probabilities. 1500 points. Slower than I // would like... arma::mat points(3, 5000); arma::vec probabilities(5000); for (size_t i = 0; i < 5000; i++) { double randValue = math::Random(); if (randValue <= 0.20) // p(d1) = 0.20 points.col(i) = d1.Random(); else if (randValue <= 0.50) // p(d2) = 0.30 points.col(i) = d2.Random(); else if (randValue <= 0.90) // p(d3) = 0.40 points.col(i) = d3.Random(); else // p(d4) = 0.10 points.col(i) = d4.Random(); // Set the probability right. If it came from this mixture, it should be // 0.97 plus or minus a little bit of noise. If not, then it should be 0.03 // plus or minus a little bit of noise. The base probability (minus the // noise) is parameterizable for easy modification of the test. double confidence = 0.998; double perturbation = math::Random(-0.002, 0.002); if (randValue <= 0.90) probabilities(i) = confidence + perturbation; else probabilities(i) = (1 - confidence) + perturbation; } // Now train the model. GMM g(3, 3); // 3 dimensions, 3 components (the fourth component is fake). g.Train(points, probabilities, 8); // Now check the results. We need to order by weights so that when we do the // checking, things will be correct. arma::uvec sortedIndices = sort_index(g.Weights()); // The tolerances in our checks are quite large, but it is good to remember // that we introduced a fair amount of random noise into this whole process. // We don't need to look for the fourth Gaussian since that is not supposed to // be a part of this mixture. // First Gaussian (d1). BOOST_REQUIRE_SMALL(g.Weights()[sortedIndices[0]] - 0.2, 0.1); for (size_t i = 0; i < 3; i++) BOOST_REQUIRE_SMALL((g.Component(sortedIndices[0]).Mean()[i] - d1.Mean()[i]), 0.4); for (size_t row = 0; row < 3; row++) for (size_t col = 0; col < 3; col++) BOOST_REQUIRE_SMALL((g.Component(sortedIndices[0]).Covariance()(row, col) - d1.Covariance()(row, col)), 0.7); // Big tolerance! Lots of noise. // Second Gaussian (d2). BOOST_REQUIRE_SMALL(g.Weights()[sortedIndices[1]] - 0.3, 0.1); for (size_t i = 0; i < 3; i++) BOOST_REQUIRE_SMALL((g.Component(sortedIndices[1]).Mean()[i] - d2.Mean()[i]), 0.4); for (size_t row = 0; row < 3; row++) for (size_t col = 0; col < 3; col++) BOOST_REQUIRE_SMALL((g.Component(sortedIndices[1]).Covariance()(row, col) - d2.Covariance()(row, col)), 0.7); // Big tolerance! Lots of noise. // Third Gaussian (d3). BOOST_REQUIRE_SMALL(g.Weights()[sortedIndices[2]] - 0.4, 0.1); for (size_t i = 0; i < 3; ++i) BOOST_REQUIRE_SMALL((g.Component(sortedIndices[2]).Mean()[i] - d3.Mean()[i]), 0.4); for (size_t row = 0; row < 3; ++row) for (size_t col = 0; col < 3; ++col) BOOST_REQUIRE_SMALL((g.Component(sortedIndices[2]).Covariance()(row, col) - d3.Covariance()(row, col)), 0.7); } /** * Make sure generating observations randomly works. We'll do this by * generating a bunch of random observations and then re-training on them, and * hope that our model is the same. */ BOOST_AUTO_TEST_CASE(GMMRandomTest) { // Simple GMM distribution. GMM gmm(2, 2); gmm.Weights() = arma::vec("0.40 0.60"); // N([2.25 3.10], [1.00 0.20; 0.20 0.89]) gmm.Component(0) = distribution::GaussianDistribution("2.25 3.10", "1.00 0.60; 0.60 0.89"); // N([4.10 1.01], [1.00 0.00; 0.00 1.01]) gmm.Component(1) = distribution::GaussianDistribution("4.10 1.01", "1.00 0.70; 0.70 1.01"); // Now generate a bunch of observations. arma::mat observations(2, 4000); for (size_t i = 0; i < 4000; i++) observations.col(i) = gmm.Random(); // A new one which we'll train. GMM gmm2(2, 2); gmm2.Train(observations, 10); // Now check the results. We need to order by weights so that when we do the // checking, things will be correct. arma::uvec sortedIndices = sort_index(gmm2.Weights()); // Now check that the parameters are the same. Tolerances are kind of big // because we only used 2000 observations. BOOST_REQUIRE_CLOSE(gmm.Weights()[0], gmm2.Weights()[sortedIndices[0]], 7.0); BOOST_REQUIRE_CLOSE(gmm.Weights()[1], gmm2.Weights()[sortedIndices[1]], 7.0); BOOST_REQUIRE_CLOSE(gmm.Component(0).Mean()[0], gmm2.Component(sortedIndices[0]).Mean()[0], 7.5); BOOST_REQUIRE_CLOSE(gmm.Component(0).Mean()[1], gmm2.Component(sortedIndices[0]).Mean()[1], 7.5); BOOST_REQUIRE_CLOSE(gmm.Component(0).Covariance()(0, 0), gmm2.Component(sortedIndices[0]).Covariance()(0, 0), 13.0); BOOST_REQUIRE_CLOSE(gmm.Component(0).Covariance()(0, 1), gmm2.Component(sortedIndices[0]).Covariance()(0, 1), 22.0); BOOST_REQUIRE_CLOSE(gmm.Component(0).Covariance()(1, 0), gmm2.Component(sortedIndices[0]).Covariance()(1, 0), 22.0); BOOST_REQUIRE_CLOSE(gmm.Component(0).Covariance()(1, 1), gmm2.Component(sortedIndices[0]).Covariance()(1, 1), 13.0); BOOST_REQUIRE_CLOSE(gmm.Component(1).Mean()[0], gmm2.Component(sortedIndices[1]).Mean()[0], 7.5); BOOST_REQUIRE_CLOSE(gmm.Component(1).Mean()[1], gmm2.Component(sortedIndices[1]).Mean()[1], 7.5); BOOST_REQUIRE_CLOSE(gmm.Component(1).Covariance()(0, 0), gmm2.Component(sortedIndices[1]).Covariance()(0, 0), 13.0); BOOST_REQUIRE_CLOSE(gmm.Component(1).Covariance()(0, 1), gmm2.Component(sortedIndices[1]).Covariance()(0, 1), 22.0); BOOST_REQUIRE_CLOSE(gmm.Component(1).Covariance()(1, 0), gmm2.Component(sortedIndices[1]).Covariance()(1, 0), 22.0); BOOST_REQUIRE_CLOSE(gmm.Component(1).Covariance()(1, 1), gmm2.Component(sortedIndices[1]).Covariance()(1, 1), 13.0); } /** * Test classification of observations by component. */ BOOST_AUTO_TEST_CASE(GMMClassifyTest) { // First create a Gaussian with a few components. GMM gmm(3, 2); gmm.Component(0) = distribution::GaussianDistribution("0 0", "1 0; 0 1"); gmm.Component(1) = distribution::GaussianDistribution("1 3", "3 2; 2 3"); gmm.Component(2) = distribution::GaussianDistribution("-2 -2", "2.2 1.4; 1.4 5.1"); gmm.Weights() = "0.6 0.25 0.15"; arma::mat observations = arma::trans(arma::mat( " 0 0;" " 0 1;" " 0 2;" " 1 -2;" " 2 -2;" "-2 0;" " 5 5;" "-2 -2;" " 3 3;" "25 25;" "-1 -1;" "-3 -3;" "-5 1")); arma::Row classes; gmm.Classify(observations, classes); // Test classification of points. Classifications produced by hand. BOOST_REQUIRE_EQUAL(classes[ 0], 0); BOOST_REQUIRE_EQUAL(classes[ 1], 0); BOOST_REQUIRE_EQUAL(classes[ 2], 1); BOOST_REQUIRE_EQUAL(classes[ 3], 0); BOOST_REQUIRE_EQUAL(classes[ 4], 0); BOOST_REQUIRE_EQUAL(classes[ 5], 0); BOOST_REQUIRE_EQUAL(classes[ 6], 1); BOOST_REQUIRE_EQUAL(classes[ 7], 2); BOOST_REQUIRE_EQUAL(classes[ 8], 1); BOOST_REQUIRE_EQUAL(classes[ 9], 1); BOOST_REQUIRE_EQUAL(classes[10], 0); BOOST_REQUIRE_EQUAL(classes[11], 2); BOOST_REQUIRE_EQUAL(classes[12], 2); } BOOST_AUTO_TEST_CASE(GMMLoadSaveTest) { // Create a GMM, save it, and load it. GMM gmm(10, 4); gmm.Weights().randu(); for (size_t i = 0; i < gmm.Gaussians(); ++i) { gmm.Component(i).Mean().randu(); arma::mat covariance = arma::randu( gmm.Component(i).Covariance().n_rows, gmm.Component(i).Covariance().n_cols); covariance *= covariance.t(); covariance += arma::eye(covariance.n_rows, covariance.n_cols); gmm.Component(i).Covariance(std::move(covariance)); } // Save the GMM. { std::ofstream ofs("test-gmm-save.xml"); boost::archive::xml_oarchive ar(ofs); ar << data::CreateNVP(gmm, "gmm"); } // Load the GMM. GMM gmm2; { std::ifstream ifs("test-gmm-save.xml"); boost::archive::xml_iarchive ar(ifs); ar >> data::CreateNVP(gmm2, "gmm"); } // Remove clutter. //remove("test-gmm-save.xml"); BOOST_REQUIRE_EQUAL(gmm.Gaussians(), gmm2.Gaussians()); BOOST_REQUIRE_EQUAL(gmm.Dimensionality(), gmm2.Dimensionality()); for (size_t i = 0; i < gmm.Dimensionality(); ++i) BOOST_REQUIRE_CLOSE(gmm.Weights()[i], gmm2.Weights()[i], 1e-3); for (size_t i = 0; i < gmm.Gaussians(); ++i) { for (size_t j = 0; j < gmm.Dimensionality(); ++j) BOOST_REQUIRE_CLOSE(gmm.Component(i).Mean()[j], gmm2.Component(i).Mean()[j], 1e-3); for (size_t j = 0; j < gmm.Dimensionality(); ++j) { for (size_t k = 0; k < gmm.Dimensionality(); ++k) { BOOST_REQUIRE_CLOSE(gmm.Component(i).Covariance()(j, k), gmm2.Component(i).Covariance()(j, k), 1e-3); } } } } BOOST_AUTO_TEST_CASE(NoConstraintTest) { // Generate random matrices and make sure they end up the same. for (size_t i = 0; i < 30; ++i) { const size_t rows = 5 + math::RandInt(100); const size_t cols = 5 + math::RandInt(100); arma::mat cov(rows, cols); cov.randu(); arma::mat newcov(cov); NoConstraint::ApplyConstraint(newcov); for (size_t j = 0; j < cov.n_elem; ++j) BOOST_REQUIRE_CLOSE(newcov(j), cov(j), 1e-20); } } BOOST_AUTO_TEST_CASE(PositiveDefiniteConstraintTest) { // Make sure matrices are made to be positive definite, or more specifically, // that they can be Cholesky decomposed. for (size_t i = 0; i < 30; ++i) { const size_t elem = 5 + math::RandInt(50); arma::mat cov(elem, elem); cov.randu(); PositiveDefiniteConstraint::ApplyConstraint(cov); arma::mat c; #if (ARMA_VERSION_MAJOR < 4) || \ ((ARMA_VERSION_MAJOR == 4) && (ARMA_VERSION_MINOR < 500)) BOOST_REQUIRE(arma::chol(c, cov)); #else BOOST_REQUIRE(arma::chol(c, cov, "lower")); #endif } } BOOST_AUTO_TEST_CASE(DiagonalConstraintTest) { // Make sure matrices are made to be positive definite. for (size_t i = 0; i < 30; ++i) { const size_t elem = 5 + math::RandInt(50); arma::mat cov(elem, elem); cov.randu(); DiagonalConstraint::ApplyConstraint(cov); for (size_t j = 0; j < elem; ++j) for (size_t k = 0; k < elem; ++k) if (j != k) BOOST_REQUIRE_SMALL(cov(j, k), 1e-50); } } BOOST_AUTO_TEST_CASE(EigenvalueRatioConstraintTest) { // Generate a list of eigenvalue ratios. arma::vec ratios("1.0 0.7 0.4 0.2 0.1 0.1 0.05 0.01"); EigenvalueRatioConstraint erc(ratios); // Now make some random matrices and see if the constraint works. for (size_t i = 0; i < 30; ++i) { arma::mat cov(8, 8); cov.randu(); erc.ApplyConstraint(cov); // Decompose the matrix and make sure things are right. arma::vec eigenvalues = arma::eig_sym(cov); for (size_t i = 0; i < eigenvalues.n_elem; ++i) BOOST_REQUIRE_CLOSE(eigenvalues[i] / eigenvalues[0], ratios[i], 1e-5); } } BOOST_AUTO_TEST_CASE(UseExistingModelTest) { // If we run a GMM and it converges, then if we run it again using the // converged results as the starting point, then it should terminate after one // iteration and give basically the same results. // Higher dimensionality gives us a greater chance of having separated // Gaussians. size_t dims = 8; size_t gaussians = 3; // Generate dataset. arma::mat data; data.zeros(dims, 500); std::vector means(gaussians); std::vector covars(gaussians); arma::vec weights(gaussians); arma::Col counts(gaussians); // Choose weights randomly. weights.zeros(); while (weights.min() < 0.02) { weights.randu(gaussians); weights /= accu(weights); } for (size_t i = 0; i < gaussians; i++) counts[i] = round(weights[i] * (data.n_cols - gaussians)); // Ensure one point minimum in each. counts += 1; // Account for rounding errors (possibly necessary). counts[gaussians - 1] += (data.n_cols - arma::accu(counts)); // Build each Gaussian individually. size_t point = 0; for (size_t i = 0; i < gaussians; i++) { arma::mat gaussian; gaussian.randn(dims, counts[i]); // Randomly generate mean and covariance. means[i].randu(dims); means[i] -= 0.5; means[i] *= 50; // We need to make sure the covariance is positive definite. We will take a // random matrix C and then set our covariance to 4 * C * C', which will be // positive semidefinite. covars[i].randu(dims, dims); covars[i] *= 4 * trans(covars[i]); data.cols(point, point + counts[i] - 1) = (covars[i] * gaussian + means[i] * arma::ones(counts[i])); // Calculate the actual means and covariances because they will probably // be different (this is easier to do before we shuffle the points). means[i] = arma::mean(data.cols(point, point + counts[i] - 1), 1); covars[i] = ccov(data.cols(point, point + counts[i] - 1), 1 /* biased */); point += counts[i]; } // Calculate actual weights. for (size_t i = 0; i < gaussians; i++) weights[i] = (double) counts[i] / data.n_cols; // Now train the model. GMM gmm(gaussians, dims); gmm.Train(data, 10); GMM oldgmm(gmm); // Retrain the model with the existing model as the starting point. gmm.Train(data, 1, true); // Check for similarity. for (size_t i = 0; i < gmm.Gaussians(); ++i) { BOOST_REQUIRE_CLOSE(gmm.Weights()[i], oldgmm.Weights()[i], 1e-4); for (size_t j = 0; j < gmm.Dimensionality(); ++j) { BOOST_REQUIRE_CLOSE(gmm.Component(i).Mean()[j], oldgmm.Component(i).Mean()[j], 1e-3); for (size_t k = 0; k < gmm.Dimensionality(); ++k) BOOST_REQUIRE_CLOSE(gmm.Component(i).Covariance()(j, k), oldgmm.Component(i).Covariance()(j, k), 1e-3); } } // Do it again, with a larger number of trials. gmm = oldgmm; // Retrain the model with the existing model as the starting point. gmm.Train(data, 10, true); // Check for similarity. for (size_t i = 0; i < gmm.Gaussians(); ++i) { BOOST_REQUIRE_CLOSE(gmm.Weights()[i], oldgmm.Weights()[i], 1e-4); for (size_t j = 0; j < gmm.Dimensionality(); ++j) { BOOST_REQUIRE_CLOSE(gmm.Component(i).Mean()[j], oldgmm.Component(i).Mean()[j], 1e-3); for (size_t k = 0; k < gmm.Dimensionality(); ++k) BOOST_REQUIRE_CLOSE(gmm.Component(i).Covariance()(j, k), oldgmm.Component(i).Covariance()(j, k), 1e-3); } } // Do it again, but using the overload of Train() that takes probabilities // into account. arma::vec probabilities(data.n_cols); probabilities.ones(); // Fill with ones. gmm = oldgmm; gmm.Train(data, probabilities, 1, true); // Check for similarity. for (size_t i = 0; i < gmm.Gaussians(); ++i) { BOOST_REQUIRE_CLOSE(gmm.Weights()[i], oldgmm.Weights()[i], 1e-4); for (size_t j = 0; j < gmm.Dimensionality(); ++j) { BOOST_REQUIRE_CLOSE(gmm.Component(i).Mean()[j], oldgmm.Component(i).Mean()[j], 1e-3); for (size_t k = 0; k < gmm.Dimensionality(); ++k) BOOST_REQUIRE_CLOSE(gmm.Component(i).Covariance()(j, k), oldgmm.Component(i).Covariance()(j, k), 1e-3); } } // One more time, with multiple trials. gmm = oldgmm; gmm.Train(data, probabilities, 10, true); // Check for similarity. for (size_t i = 0; i < gmm.Gaussians(); ++i) { BOOST_REQUIRE_CLOSE(gmm.Weights()[i], oldgmm.Weights()[i], 1e-4); for (size_t j = 0; j < gmm.Dimensionality(); ++j) { BOOST_REQUIRE_CLOSE(gmm.Component(i).Mean()[j], oldgmm.Component(i).Mean()[j], 1e-3); for (size_t k = 0; k < gmm.Dimensionality(); ++k) BOOST_REQUIRE_CLOSE(gmm.Component(i).Covariance()(j, k), oldgmm.Component(i).Covariance()(j, k), 1e-3); } } } BOOST_AUTO_TEST_SUITE_END(); mlpack-2.2.5/src/mlpack/tests/gradient_descent_test.cpp000066400000000000000000000032471315013601400232340ustar00rootroot00000000000000/** * @file gradient_descent_test.cpp * @author Sumedh Ghaisas * * Test file for Gradient Descent optimizer. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include #include #include #include #include #include "test_tools.hpp" using namespace std; using namespace arma; using namespace mlpack; using namespace mlpack::optimization; using namespace mlpack::optimization::test; BOOST_AUTO_TEST_SUITE(GradientDescentTest); BOOST_AUTO_TEST_CASE(SimpleGDTestFunction) { GDTestFunction f; GradientDescent s(f, 0.01, 5000000, 1e-9); arma::vec coordinates = f.GetInitialPoint(); double result = s.Optimize(coordinates); BOOST_REQUIRE_SMALL(result, 1e-4); BOOST_REQUIRE_SMALL(coordinates[0], 1e-2); BOOST_REQUIRE_SMALL(coordinates[1], 1e-2); BOOST_REQUIRE_SMALL(coordinates[2], 1e-2); } BOOST_AUTO_TEST_CASE(RosenbrockTest) { // Create the Rosenbrock function. RosenbrockFunction f; GradientDescent s(f, 0.001, 0, 1e-15); arma::mat coordinates = f.GetInitialPoint(); double result = s.Optimize(coordinates); BOOST_REQUIRE_SMALL(result, 1e-10); for (size_t j = 0; j < 2; ++j) BOOST_REQUIRE_CLOSE(coordinates[j], (double) 1.0, 1e-3); } BOOST_AUTO_TEST_SUITE_END(); mlpack-2.2.5/src/mlpack/tests/hmm_test.cpp000066400000000000000000001202551315013601400205120ustar00rootroot00000000000000/** * @file hmm_test.cpp * * Test file for HMMs. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include #include #include #include #include "test_tools.hpp" using namespace mlpack; using namespace mlpack::hmm; using namespace mlpack::distribution; using namespace mlpack::gmm; BOOST_AUTO_TEST_SUITE(HMMTest); /** * We will use the simple case proposed by Russell and Norvig in Artificial * Intelligence: A Modern Approach, 2nd Edition, around p.549. */ BOOST_AUTO_TEST_CASE(SimpleDiscreteHMMTestViterbi) { // We have two hidden states: rain/dry. Two emission states: umbrella/no // umbrella. // In this example, the transition matrix is // rain dry // [[0.7 0.3] rain // [0.3 0.7]] dry // and the emission probability is // rain dry // [[0.9 0.2] umbrella // [0.1 0.8]] no umbrella arma::vec initial("1 0"); // Default MATLAB initial states. arma::mat transition("0.7 0.3; 0.3 0.7"); std::vector emission(2); emission[0] = DiscreteDistribution(std::vector{"0.9 0.1"}); emission[1] = DiscreteDistribution(std::vector{"0.2 0.8"}); HMM hmm(initial, transition, emission); // Now let's take a sequence and find what the most likely state is. // We'll use the sequence [U U N U U] (U = umbrella, N = no umbrella) like on // p. 547. arma::mat observation = "0 0 1 0 0"; arma::Row states; hmm.Predict(observation, states); // Check each state. BOOST_REQUIRE_EQUAL(states[0], 0); // Rain. BOOST_REQUIRE_EQUAL(states[1], 0); // Rain. BOOST_REQUIRE_EQUAL(states[2], 1); // No rain. BOOST_REQUIRE_EQUAL(states[3], 0); // Rain. BOOST_REQUIRE_EQUAL(states[4], 0); // Rain. } /** * This example is from Borodovsky & Ekisheva, p. 80-81. It is just slightly * more complex. */ BOOST_AUTO_TEST_CASE(BorodovskyHMMTestViterbi) { // Equally probable initial states. arma::vec initial(3); initial.fill(1.0 / 3.0); // Two hidden states: H (high GC content) and L (low GC content), as well as a // start state. arma::mat transition("0.0 0.0 0.0;" "0.5 0.5 0.4;" "0.5 0.5 0.6"); // Four emission states: A, C, G, T. Start state doesn't emit... std::vector emission(3); emission[0] = DiscreteDistribution(std::vector{"0.25 0.25 0.25 0.25"}); emission[1] = DiscreteDistribution(std::vector{"0.20 0.30 0.30 0.20"}); emission[2] = DiscreteDistribution(std::vector{"0.30 0.20 0.20 0.30"}); HMM hmm(initial, transition, emission); // GGCACTGAA. arma::mat observation("2 2 1 0 1 3 2 0 0"); arma::Row states; hmm.Predict(observation, states); // Most probable path is HHHLLLLLL. BOOST_REQUIRE_EQUAL(states[0], 1); BOOST_REQUIRE_EQUAL(states[1], 1); BOOST_REQUIRE_EQUAL(states[2], 1); BOOST_REQUIRE_EQUAL(states[3], 2); // This could actually be one of two states (equal probability). BOOST_REQUIRE((states[4] == 1) || (states[4] == 2)); BOOST_REQUIRE_EQUAL(states[5], 2); // This could also be one of two states. BOOST_REQUIRE((states[6] == 1) || (states[6] == 2)); BOOST_REQUIRE_EQUAL(states[7], 2); BOOST_REQUIRE_EQUAL(states[8], 2); } /** * Ensure that the forward-backward algorithm is correct. */ BOOST_AUTO_TEST_CASE(ForwardBackwardTwoState) { arma::mat obs("3 3 2 1 1 1 1 3 3 1"); // The values used for the initial distribution here don't entirely make // sense. I am not sure how the output came from hmmdecode(), and the // documentation below doesn't completely say. It seems like maybe the // transition matrix needs to be transposed and the results recalculated, but // I am not certain. arma::vec initial("0.1 0.4"); arma::mat transition("0.1 0.9; 0.4 0.6"); std::vector emis(2); emis[0] = DiscreteDistribution(std::vector{"0.85 0.15 0.00 0.00"}); emis[1] = DiscreteDistribution(std::vector{"0.00 0.00 0.50 0.50"}); HMM hmm(initial, transition, emis); // Now check we are getting the same results as MATLAB for this sequence. arma::mat stateProb; arma::mat forwardProb; arma::mat backwardProb; arma::vec scales; const double log = hmm.Estimate(obs, stateProb, forwardProb, backwardProb, scales); // All values obtained from MATLAB hmmdecode(). BOOST_REQUIRE_CLOSE(log, -23.4349, 1e-3); BOOST_REQUIRE_SMALL(stateProb(0, 0), 1e-5); BOOST_REQUIRE_CLOSE(stateProb(1, 0), 1.0, 1e-5); BOOST_REQUIRE_SMALL(stateProb(0, 1), 1e-5); BOOST_REQUIRE_CLOSE(stateProb(1, 1), 1.0, 1e-5); BOOST_REQUIRE_SMALL(stateProb(0, 2), 1e-5); BOOST_REQUIRE_CLOSE(stateProb(1, 2), 1.0, 1e-5); BOOST_REQUIRE_CLOSE(stateProb(0, 3), 1.0, 1e-5); BOOST_REQUIRE_SMALL(stateProb(1, 3), 1e-5); BOOST_REQUIRE_CLOSE(stateProb(0, 4), 1.0, 1e-5); BOOST_REQUIRE_SMALL(stateProb(1, 4), 1e-5); BOOST_REQUIRE_CLOSE(stateProb(0, 5), 1.0, 1e-5); BOOST_REQUIRE_SMALL(stateProb(1, 5), 1e-5); BOOST_REQUIRE_CLOSE(stateProb(0, 6), 1.0, 1e-5); BOOST_REQUIRE_SMALL(stateProb(1, 6), 1e-5); BOOST_REQUIRE_SMALL(stateProb(0, 7), 1e-5); BOOST_REQUIRE_CLOSE(stateProb(1, 7), 1.0, 1e-5); BOOST_REQUIRE_SMALL(stateProb(0, 8), 1e-5); BOOST_REQUIRE_CLOSE(stateProb(1, 8), 1.0, 1e-5); BOOST_REQUIRE_CLOSE(stateProb(0, 9), 1.0, 1e-5); BOOST_REQUIRE_SMALL(stateProb(1, 9), 1e-5); } /** * In this example we try to estimate the transmission and emission matrices * based on some observations. We use the simplest possible model. */ BOOST_AUTO_TEST_CASE(SimplestBaumWelchDiscreteHMM) { // Don't yet require a useful distribution. 1 state, 1 emission. HMM hmm(1, DiscreteDistribution(1)); std::vector observations; // Different lengths for each observation sequence. observations.push_back("0 0 0 0 0 0 0 0"); // 8 zeros. observations.push_back("0 0 0 0 0 0 0"); // 7 zeros. observations.push_back("0 0 0 0 0 0 0 0 0 0 0 0"); // 12 zeros. observations.push_back("0 0 0 0 0 0 0 0 0 0"); // 10 zeros. hmm.Train(observations); BOOST_REQUIRE_CLOSE(hmm.Initial()[0], 1.0, 1e-5); BOOST_REQUIRE_CLOSE(hmm.Emission()[0].Probability("0"), 1.0, 1e-5); BOOST_REQUIRE_CLOSE(hmm.Transition()(0, 0), 1.0, 1e-5); } /** * A slightly more complex model to estimate. */ BOOST_AUTO_TEST_CASE(SimpleBaumWelchDiscreteHMM) { HMM hmm(1, 2); // 1 state, 2 emissions. // Randomize the emission matrix. hmm.Emission()[0].Probabilities() = arma::randu(2); hmm.Emission()[0].Probabilities() /= accu(hmm.Emission()[0].Probabilities()); // P(each emission) = 0.5. // I've been careful to make P(first emission = 0) = P(first emission = 1). std::vector observations; observations.push_back("0 1 0 1 0 1 0 1 0 1 0 1"); observations.push_back("0 0 0 0 0 0 1 1 1 1 1 1"); observations.push_back("1 1 1 1 1 1 0 0 0 0 0 0"); observations.push_back("1 1 1 0 0 0 1 1 1 0 0 0"); observations.push_back("0 0 1 1 0 0 0 0 1 1 1 1"); observations.push_back("1 1 1 0 0 0 1 1 1 0 0 0"); observations.push_back("0 1 0 1 0 1 0 1 0 1 0 1"); observations.push_back("0 0 0 0 0 0 1 1 1 1 1 1"); observations.push_back("1 1 1 1 1 1 0 0 0 0 0 0"); observations.push_back("1 1 1 0 0 0 1 1 1 0 0 0"); observations.push_back("0 0 1 1 0 0 0 0 1 1 1 1"); observations.push_back("1 1 1 0 0 0 1 1 1 0 0 0"); hmm.Train(observations); BOOST_REQUIRE_CLOSE(hmm.Emission()[0].Probability("0"), 0.5, 1e-5); BOOST_REQUIRE_CLOSE(hmm.Emission()[0].Probability("1"), 0.5, 1e-5); BOOST_REQUIRE_CLOSE(hmm.Transition()(0, 0), 1.0, 1e-5); BOOST_REQUIRE_CLOSE(hmm.Initial()[0], 1.0, 1e-5); } /** * Increasing complexity, but still simple; 4 emissions, 2 states; the state can * be determined directly by the emission. */ BOOST_AUTO_TEST_CASE(SimpleBaumWelchDiscreteHMM_2) { HMM hmm(2, DiscreteDistribution(4)); // A little bit of obfuscation to the solution. hmm.Transition() = arma::mat("0.1 0.4; 0.9 0.6"); hmm.Emission()[0].Probabilities() = "0.85 0.15 0.00 0.00"; hmm.Emission()[1].Probabilities() = "0.00 0.00 0.50 0.50"; // True emission matrix: // [[0.4 0 ] // [0.6 0 ] // [0 0.2] // [0 0.8]] // True transmission matrix: // [[0.5 0.5] // [0.5 0.5]] // Generate observations randomly by hand. This is kinda ugly, but it works. std::vector observations; size_t obsNum = 250; // Number of observations. size_t obsLen = 500; // Number of elements in each observation. size_t stateZeroStarts = 0; // Number of times we start in state 0. for (size_t i = 0; i < obsNum; i++) { arma::mat observation(1, obsLen); size_t state = 0; size_t emission = 0; for (size_t obs = 0; obs < obsLen; obs++) { // See if state changed. double r = math::Random(); if (r <= 0.5) { if (obs == 0) ++stateZeroStarts; state = 0; } else { state = 1; } // Now set the observation. r = math::Random(); switch (state) { // case 0 is not possible. case 0: if (r <= 0.4) emission = 0; else emission = 1; break; case 1: if (r <= 0.2) emission = 2; else emission = 3; break; } observation(0, obs) = emission; } observations.push_back(observation); } hmm.Train(observations); // Calculate true probability of class 0 at the start. double prob = double(stateZeroStarts) / observations.size(); // Only require 2.5% tolerance, because this is a little fuzzier. BOOST_REQUIRE_CLOSE(hmm.Initial()[0], prob, 2.5); BOOST_REQUIRE_CLOSE(hmm.Initial()[1], 1.0 - prob, 2.5); BOOST_REQUIRE_CLOSE(hmm.Transition()(0, 0), 0.5, 2.5); BOOST_REQUIRE_CLOSE(hmm.Transition()(1, 0), 0.5, 2.5); BOOST_REQUIRE_CLOSE(hmm.Transition()(0, 1), 0.5, 2.5); BOOST_REQUIRE_CLOSE(hmm.Transition()(1, 1), 0.5, 2.5); BOOST_REQUIRE_CLOSE(hmm.Emission()[0].Probability("0"), 0.4, 3.0); BOOST_REQUIRE_CLOSE(hmm.Emission()[0].Probability("1"), 0.6, 3.0); BOOST_REQUIRE_SMALL(hmm.Emission()[0].Probability("2"), 2.5); BOOST_REQUIRE_SMALL(hmm.Emission()[0].Probability("3"), 2.5); BOOST_REQUIRE_SMALL(hmm.Emission()[1].Probability("0"), 2.5); BOOST_REQUIRE_SMALL(hmm.Emission()[1].Probability("1"), 2.5); BOOST_REQUIRE_CLOSE(hmm.Emission()[1].Probability("2"), 0.2, 3.0); BOOST_REQUIRE_CLOSE(hmm.Emission()[1].Probability("3"), 0.8, 3.0); } BOOST_AUTO_TEST_CASE(DiscreteHMMLabeledTrainTest) { // Generate a random Markov model with 3 hidden states and 6 observations. arma::mat transition; std::vector emission(3); transition.randu(3, 3); emission[0].Probabilities() = arma::randu(6); emission[0].Probabilities() /= accu(emission[0].Probabilities()); emission[1].Probabilities() = arma::randu(6); emission[1].Probabilities() /= accu(emission[1].Probabilities()); emission[2].Probabilities() = arma::randu(6); emission[2].Probabilities() /= accu(emission[2].Probabilities()); // Normalize so they we have a correct transition matrix. for (size_t col = 0; col < 3; col++) transition.col(col) /= accu(transition.col(col)); // Now generate sequences. size_t obsNum = 250; size_t obsLen = 800; std::vector observations(obsNum); std::vector > states(obsNum); for (size_t n = 0; n < obsNum; n++) { observations[n].set_size(1, obsLen); states[n].set_size(obsLen); // Random starting state. states[n][0] = math::RandInt(3); // Random starting observation. observations[n].col(0) = emission[states[n][0]].Random(); // Now the rest of the observations. for (size_t t = 1; t < obsLen; t++) { // Choose random number for state transition. double state = math::Random(); // Decide next state. double sumProb = 0; for (size_t st = 0; st < 3; st++) { sumProb += transition(st, states[n][t - 1]); if (sumProb >= state) { states[n][t] = st; break; } } // Decide observation. observations[n].col(t) = emission[states[n][t]].Random(); } } // Now that our data is generated, we give the HMM the labeled data to train // on. HMM hmm(3, DiscreteDistribution(6)); hmm.Train(observations, states); // Make sure the initial weights are fine. They should be equal (or close). for (size_t row = 0; row < hmm.Transition().n_rows; ++row) BOOST_REQUIRE_SMALL(hmm.Initial()[row] - 1.0 / 3.0, 0.1); // We can't use % tolerance here because percent error increases as the actual // value gets very small. So, instead, we just ensure that every value is no // more than 0.02 away from the actual value. for (size_t row = 0; row < hmm.Transition().n_rows; row++) for (size_t col = 0; col < hmm.Transition().n_cols; col++) BOOST_REQUIRE_SMALL(hmm.Transition()(row, col) - transition(row, col), 0.025); for (size_t col = 0; col < hmm.Emission().size(); col++) { for (size_t row = 0; row < hmm.Emission()[col].Probabilities().n_elem; row++) { arma::vec obs(1); obs[0] = row; BOOST_REQUIRE_SMALL(hmm.Emission()[col].Probability(obs) - emission[col].Probability(obs), 0.07); } } } /** * Make sure the Generate() function works for a uniformly distributed HMM; * we'll take many samples just to make sure. */ BOOST_AUTO_TEST_CASE(DiscreteHMMSimpleGenerateTest) { // Very simple HMM. 4 emissions with equal probability and 2 states with // equal probability. HMM hmm(2, DiscreteDistribution(4)); hmm.Initial() = arma::ones(2) / 2.0; hmm.Transition() = arma::ones(2, 2) / 2.0; // Now generate a really, really long sequence. arma::mat dataSeq; arma::Row stateSeq; hmm.Generate(100000, dataSeq, stateSeq); // Now find the empirical probabilities of each state. arma::vec emissionProb(4); arma::vec stateProb(2); emissionProb.zeros(); stateProb.zeros(); for (size_t i = 0; i < 100000; i++) { emissionProb[(size_t) dataSeq.col(i)[0] + 0.5]++; stateProb[stateSeq[i]]++; } // Normalize so these are probabilities. emissionProb /= accu(emissionProb); stateProb /= accu(stateProb); // Now check that the probabilities are right. 2% tolerance. BOOST_REQUIRE_CLOSE(emissionProb[0], 0.25, 2.0); BOOST_REQUIRE_CLOSE(emissionProb[1], 0.25, 2.0); BOOST_REQUIRE_CLOSE(emissionProb[2], 0.25, 2.0); BOOST_REQUIRE_CLOSE(emissionProb[3], 0.25, 2.0); BOOST_REQUIRE_CLOSE(stateProb[0], 0.50, 2.0); BOOST_REQUIRE_CLOSE(stateProb[1], 0.50, 2.0); } /** * More complex test for Generate(). */ BOOST_AUTO_TEST_CASE(DiscreteHMMGenerateTest) { // 6 emissions, 4 states. Random transition and emission probability. arma::vec initial("1 0 0 0"); arma::mat transition(4, 4); std::vector emission(4); emission[0].Probabilities() = arma::randu(6); emission[0].Probabilities() /= accu(emission[0].Probabilities()); emission[1].Probabilities() = arma::randu(6); emission[1].Probabilities() /= accu(emission[1].Probabilities()); emission[2].Probabilities() = arma::randu(6); emission[2].Probabilities() /= accu(emission[2].Probabilities()); emission[3].Probabilities() = arma::randu(6); emission[3].Probabilities() /= accu(emission[3].Probabilities()); transition.randu(); // Normalize matrix. for (size_t col = 0; col < 4; col++) transition.col(col) /= accu(transition.col(col)); // Create HMM object. HMM hmm(initial, transition, emission); // We'll create a bunch of sequences. int numSeq = 400; int numObs = 3000; std::vector sequences(numSeq); std::vector > states(numSeq); for (int i = 0; i < numSeq; i++) { // Random starting state. size_t startState = math::RandInt(4); hmm.Generate(numObs, sequences[i], states[i], startState); } // Now we will calculate the full probabilities. HMM hmm2(4, 6); hmm2.Train(sequences, states); // Check that training gives the same result. Exact tolerance of 0.005. for (size_t row = 0; row < 4; row++) for (size_t col = 0; col < 4; col++) BOOST_REQUIRE_SMALL(hmm.Transition()(row, col) - hmm2.Transition()(row, col), 0.005); for (size_t row = 0; row < 6; row++) { arma::vec obs(1); obs[0] = row; for (size_t col = 0; col < 4; col++) { BOOST_REQUIRE_SMALL(hmm.Emission()[col].Probability(obs) - hmm2.Emission()[col].Probability(obs), 0.005); } } } BOOST_AUTO_TEST_CASE(DiscreteHMMLogLikelihoodTest) { // Create a simple HMM with three states and four emissions. arma::vec initial("0.5 0.2 0.3"); // Default MATLAB initial states. arma::mat transition("0.5 0.0 0.1;" "0.2 0.6 0.2;" "0.3 0.4 0.7"); std::vector emission(3); emission[0].Probabilities() = "0.75 0.25 0.00 0.00"; emission[1].Probabilities() = "0.00 0.25 0.25 0.50"; emission[2].Probabilities() = "0.10 0.40 0.40 0.10"; HMM hmm(initial, transition, emission); // Now generate some sequences and check that the log-likelihood is the same // as MATLAB gives for this HMM. BOOST_REQUIRE_CLOSE(hmm.LogLikelihood("0 1 2 3"), -4.9887223949, 1e-5); BOOST_REQUIRE_CLOSE(hmm.LogLikelihood("1 2 0 0"), -6.0288487077, 1e-5); BOOST_REQUIRE_CLOSE(hmm.LogLikelihood("3 3 3 3"), -5.5544000018, 1e-5); BOOST_REQUIRE_CLOSE(hmm.LogLikelihood("0 2 2 1 2 3 0 0 1 3 1 0 0 3 1 2 2"), -24.51556128368, 1e-5); } /** * A simple test to make sure HMMs with Gaussian output distributions work. */ BOOST_AUTO_TEST_CASE(GaussianHMMSimpleTest) { // We'll have two Gaussians, far away from each other, one corresponding to // each state. // E(0) ~ N([ 5.0 5.0], eye(2)). // E(1) ~ N([-5.0 -5.0], eye(2)). // The transition matrix is simple: // T = [[0.75 0.25] // [0.25 0.75]] GaussianDistribution g1("5.0 5.0", "1.0 0.0; 0.0 1.0"); GaussianDistribution g2("-5.0 -5.0", "1.0 0.0; 0.0 1.0"); arma::vec initial("1 0"); // Default MATLAB initial states. arma::mat transition("0.75 0.25; 0.25 0.75"); std::vector emission; emission.push_back(g1); emission.push_back(g2); HMM hmm(initial, transition, emission); // Now, generate some sequences. arma::mat observations(2, 1000); arma::Row classes(1000); // 1000-observations sequence. classes[0] = 0; observations.col(0) = g1.Random(); for (size_t i = 1; i < 1000; i++) { double randValue = math::Random(); if (randValue > 0.75) // Then we change state. classes[i] = (classes[i - 1] + 1) % 2; else classes[i] = classes[i - 1]; if (classes[i] == 0) observations.col(i) = g1.Random(); else observations.col(i) = g2.Random(); } // Now predict the sequence. arma::Row predictedClasses; arma::mat stateProb; hmm.Predict(observations, predictedClasses); hmm.Estimate(observations, stateProb); // Check that each prediction is right. for (size_t i = 0; i < 1000; i++) { BOOST_REQUIRE_EQUAL(predictedClasses[i], classes[i]); // The probability of the wrong class should be infinitesimal. BOOST_REQUIRE_SMALL(stateProb((classes[i] + 1) % 2, i), 0.001); } } /** * Ensure that Gaussian HMMs can be trained properly, for the labeled training * case and also for the unlabeled training case. */ BOOST_AUTO_TEST_CASE(GaussianHMMTrainTest) { // Four emission Gaussians and three internal states. The goal is to estimate // the transition matrix correctly, and each distribution correctly. std::vector emission; emission.push_back(GaussianDistribution("0.0 0.0 0.0", "1.0 0.2 0.2;" "0.2 1.5 0.0;" "0.2 0.0 1.1")); emission.push_back(GaussianDistribution("2.0 1.0 5.0", "0.7 0.3 0.0;" "0.3 2.6 0.0;" "0.0 0.0 1.0")); emission.push_back(GaussianDistribution("5.0 0.0 0.5", "1.0 0.0 0.0;" "0.0 1.0 0.0;" "0.0 0.0 1.0")); arma::mat transition("0.3 0.5 0.7;" "0.3 0.4 0.1;" "0.4 0.1 0.2"); // Now generate observations. std::vector observations(100); std::vector > states(100); for (size_t obs = 0; obs < 100; obs++) { observations[obs].set_size(3, 1000); states[obs].set_size(1000); // Always start in state zero. states[obs][0] = 0; observations[obs].col(0) = emission[0].Random(); for (size_t t = 1; t < 1000; t++) { // Choose the state. double randValue = math::Random(); double probSum = 0; for (size_t state = 0; state < 3; state++) { probSum += transition(state, states[obs][t - 1]); if (probSum >= randValue) { states[obs][t] = state; break; } } // Now choose the emission. observations[obs].col(t) = emission[states[obs][t]].Random(); } } // Now that the data is generated, train the HMM. HMM hmm(3, GaussianDistribution(3)); hmm.Train(observations, states); // Check initial weights. BOOST_REQUIRE_CLOSE(hmm.Initial()[0], 1.0, 1e-5); BOOST_REQUIRE_SMALL(hmm.Initial()[1], 1e-3); BOOST_REQUIRE_SMALL(hmm.Initial()[2], 1e-3); // We use an absolute tolerance of 0.01 for the transition matrices. // Check that the transition matrix is correct. for (size_t row = 0; row < 3; row++) for (size_t col = 0; col < 3; col++) BOOST_REQUIRE_SMALL(transition(row, col) - hmm.Transition()(row, col), 0.01); // Check that each distribution is correct. for (size_t dist = 0; dist < 3; dist++) { // Check that the mean is correct. Absolute tolerance of 0.04. for (size_t dim = 0; dim < 3; dim++) BOOST_REQUIRE_SMALL(hmm.Emission()[dist].Mean()(dim) - emission[dist].Mean()(dim), 0.04); // Check that the covariance is correct. Absolute tolerance of 0.075. for (size_t row = 0; row < 3; row++) for (size_t col = 0; col < 3; col++) BOOST_REQUIRE_SMALL(hmm.Emission()[dist].Covariance()(row, col) - emission[dist].Covariance()(row, col), 0.075); } // Now let's try it all again, but this time, unlabeled. Everything will fail // if we don't have a decent guess at the Gaussians, so we'll take a "poor" // guess at it ourselves. I won't use K-Means because we can't afford to add // the instability of that to our test. We'll leave the covariances as the // identity. HMM hmm2(3, GaussianDistribution(3)); hmm2.Emission()[0].Mean() = "0.3 -0.2 0.1"; // Actual: [0 0 0]. hmm2.Emission()[1].Mean() = "1.0 1.4 3.2"; // Actual: [2 1 5]. hmm2.Emission()[2].Mean() = "3.1 -0.2 6.1"; // Actual: [5 0 5]. // We'll only use 20 observation sequences to try and keep training time // shorter. observations.resize(20); hmm.Train(observations); BOOST_REQUIRE_CLOSE(hmm.Initial()[0], 1.0, 0.1); BOOST_REQUIRE_SMALL(hmm.Initial()[1], 0.05); BOOST_REQUIRE_SMALL(hmm.Initial()[2], 0.05); // The tolerances are increased because there is more error in unlabeled // training; we use an absolute tolerance of 0.03 for the transition matrices. // Check that the transition matrix is correct. for (size_t row = 0; row < 3; row++) for (size_t col = 0; col < 3; col++) BOOST_REQUIRE_SMALL(transition(row, col) - hmm.Transition()(row, col), 0.03); // Check that each distribution is correct. for (size_t dist = 0; dist < 3; dist++) { // Check that the mean is correct. Absolute tolerance of 0.09. for (size_t dim = 0; dim < 3; dim++) BOOST_REQUIRE_SMALL(hmm.Emission()[dist].Mean()(dim) - emission[dist].Mean()(dim), 0.09); // Check that the covariance is correct. Absolute tolerance of 0.12. for (size_t row = 0; row < 3; row++) for (size_t col = 0; col < 3; col++) BOOST_REQUIRE_SMALL(hmm.Emission()[dist].Covariance()(row, col) - emission[dist].Covariance()(row, col), 0.14); } } /** * Make sure that a random sequence generated by a Gaussian HMM fits the * distribution correctly. */ BOOST_AUTO_TEST_CASE(GaussianHMMGenerateTest) { // Our distribution will have three two-dimensional output Gaussians. HMM hmm(3, GaussianDistribution(2)); hmm.Transition() = arma::mat("0.4 0.6 0.8; 0.2 0.2 0.1; 0.4 0.2 0.1"); hmm.Emission()[0] = GaussianDistribution("0.0 0.0", "1.0 0.0; 0.0 1.0"); hmm.Emission()[1] = GaussianDistribution("2.0 2.0", "1.0 0.5; 0.5 1.2"); hmm.Emission()[2] = GaussianDistribution("-2.0 1.0", "2.0 0.1; 0.1 1.0"); // Now we will generate a long sequence. std::vector observations(1); std::vector > states(1); // Start in state 1 (no reason). hmm.Generate(10000, observations[0], states[0], 1); HMM hmm2(3, GaussianDistribution(2)); // Now estimate the HMM from the generated sequence. hmm2.Train(observations, states); // Check that the estimated matrices are the same. for (size_t row = 0; row < 3; row++) for (size_t col = 0; col < 3; col++) BOOST_REQUIRE_SMALL(hmm.Transition()(row, col) - hmm2.Transition()(row, col), 0.04); // Check that each Gaussian is the same. for (size_t em = 0; em < 3; em++) { // Check that the mean is the same. BOOST_REQUIRE_SMALL(hmm.Emission()[em].Mean()(0) - hmm2.Emission()[em].Mean()(0), 0.1); BOOST_REQUIRE_SMALL(hmm.Emission()[em].Mean()(1) - hmm2.Emission()[em].Mean()(1), 0.1); // Check that the covariances are the same. BOOST_REQUIRE_SMALL(hmm.Emission()[em].Covariance()(0, 0) - hmm2.Emission()[em].Covariance()(0, 0), 0.2); BOOST_REQUIRE_SMALL(hmm.Emission()[em].Covariance()(0, 1) - hmm2.Emission()[em].Covariance()(0, 1), 0.2); BOOST_REQUIRE_SMALL(hmm.Emission()[em].Covariance()(1, 0) - hmm2.Emission()[em].Covariance()(1, 0), 0.2); BOOST_REQUIRE_SMALL(hmm.Emission()[em].Covariance()(1, 1) - hmm2.Emission()[em].Covariance()(1, 1), 0.2); } } /** * Test that HMMs work with Gaussian mixture models. We'll try putting in a * simple model by hand and making sure that prediction of observation sequences * works correctly. */ BOOST_AUTO_TEST_CASE(GMMHMMPredictTest) { // We will use two GMMs; one with two components and one with three. std::vector gmms(2); gmms[0] = GMM(2, 2); gmms[0].Weights() = arma::vec("0.75 0.25"); // N([2.25 3.10], [1.00 0.20; 0.20 0.89]) gmms[0].Component(0) = GaussianDistribution("4.25 3.10", "1.00 0.20; 0.20 0.89"); // N([4.10 1.01], [1.00 0.00; 0.00 1.01]) gmms[0].Component(1) = GaussianDistribution("7.10 5.01", "1.00 0.00; 0.00 1.01"); gmms[1] = GMM(3, 2); gmms[1].Weights() = arma::vec("0.4 0.2 0.4"); gmms[1].Component(0) = GaussianDistribution("-3.00 -6.12", "1.00 0.00; 0.00 1.00"); gmms[1].Component(1) = GaussianDistribution("-4.25 -7.12", "1.50 0.60; 0.60 1.20"); gmms[1].Component(2) = GaussianDistribution("-6.15 -2.00", "1.00 0.80; 0.80 1.00"); // Default MATLAB initial probabilities. arma::vec initial("1 0"); // Transition matrix. arma::mat trans("0.30 0.50;" "0.70 0.50"); // Now build the model. HMM hmm(initial, trans, gmms); // Make a sequence of observations. arma::mat observations(2, 1000); arma::Row states(1000); states[0] = 0; observations.col(0) = gmms[0].Random(); for (size_t i = 1; i < 1000; i++) { double randValue = math::Random(); if (randValue <= trans(0, states[i - 1])) states[i] = 0; else states[i] = 1; observations.col(i) = gmms[states[i]].Random(); } // Run the prediction. arma::Row predictions; hmm.Predict(observations, predictions); // Check that the predictions were correct. for (size_t i = 0; i < 1000; i++) BOOST_REQUIRE_EQUAL(predictions[i], states[i]); } /** * Test that GMM-based HMMs can train on models correctly using labeled training * data. */ BOOST_AUTO_TEST_CASE(GMMHMMLabeledTrainingTest) { srand(time(NULL)); // We will use two GMMs; one with two components and one with three. std::vector gmms(2, GMM(2, 2)); gmms[0].Weights() = arma::vec("0.3 0.7"); // N([2.25 3.10], [1.00 0.20; 0.20 0.89]) gmms[0].Component(0) = GaussianDistribution("4.25 3.10", "1.00 0.20; 0.20 0.89"); // N([4.10 1.01], [1.00 0.00; 0.00 1.01]) gmms[0].Component(1) = GaussianDistribution("7.10 5.01", "1.00 0.00; 0.00 1.01"); gmms[1].Weights() = arma::vec("0.20 0.80"); gmms[1].Component(0) = GaussianDistribution("-3.00 -6.12", "1.00 0.00; 0.00 1.00"); gmms[1].Component(1) = GaussianDistribution("-4.25 -2.12", "1.50 0.60; 0.60 1.20"); // Transition matrix. arma::mat transMat("0.40 0.60;" "0.60 0.40"); // Make a sequence of observations. std::vector observations(5, arma::mat(2, 2500)); std::vector > states(5, arma::Row(2500)); for (size_t obs = 0; obs < 5; obs++) { states[obs][0] = 0; observations[obs].col(0) = gmms[0].Random(); for (size_t i = 1; i < 2500; i++) { double randValue = (double) rand() / (double) RAND_MAX; if (randValue <= transMat(0, states[obs][i - 1])) states[obs][i] = 0; else states[obs][i] = 1; observations[obs].col(i) = gmms[states[obs][i]].Random(); } } // Set up the GMM for training. HMM hmm(2, GMM(2, 2)); // Train the HMM. hmm.Train(observations, states); // Check the initial weights. The dataset was generated with 100% probability // of a sequence starting in state 0. BOOST_REQUIRE_CLOSE(hmm.Initial()[0], 1.0, 0.01); BOOST_REQUIRE_SMALL(hmm.Initial()[1], 0.01); // Check the results. Use absolute tolerances instead of percentages. BOOST_REQUIRE_SMALL(hmm.Transition()(0, 0) - transMat(0, 0), 0.03); BOOST_REQUIRE_SMALL(hmm.Transition()(0, 1) - transMat(0, 1), 0.03); BOOST_REQUIRE_SMALL(hmm.Transition()(1, 0) - transMat(1, 0), 0.03); BOOST_REQUIRE_SMALL(hmm.Transition()(1, 1) - transMat(1, 1), 0.03); // Now the emission probabilities (the GMMs). // We have to sort each GMM for comparison. arma::uvec sortedIndices = sort_index(hmm.Emission()[0].Weights()); BOOST_REQUIRE_SMALL(hmm.Emission()[0].Weights()[sortedIndices[0]] - gmms[0].Weights()[0], 0.08); BOOST_REQUIRE_SMALL(hmm.Emission()[0].Weights()[sortedIndices[1]] - gmms[0].Weights()[1], 0.08); BOOST_REQUIRE_SMALL(hmm.Emission()[0].Component(sortedIndices[0]).Mean()[0] - gmms[0].Component(0).Mean()[0], 0.15); BOOST_REQUIRE_SMALL(hmm.Emission()[0].Component(sortedIndices[0]).Mean()[1] - gmms[0].Component(0).Mean()[1], 0.15); BOOST_REQUIRE_SMALL(hmm.Emission()[0].Component(sortedIndices[1]).Mean()[0] - gmms[0].Component(1).Mean()[0], 0.15); BOOST_REQUIRE_SMALL(hmm.Emission()[0].Component(sortedIndices[1]).Mean()[1] - gmms[0].Component(1).Mean()[1], 0.15); BOOST_REQUIRE_SMALL(hmm.Emission()[0].Component(sortedIndices[0]). Covariance()(0, 0) - gmms[0].Component(0).Covariance()(0, 0), 0.3); BOOST_REQUIRE_SMALL(hmm.Emission()[0].Component(sortedIndices[0]). Covariance()(0, 1) - gmms[0].Component(0).Covariance()(0, 1), 0.3); BOOST_REQUIRE_SMALL(hmm.Emission()[0].Component(sortedIndices[0]). Covariance()(1, 0) - gmms[0].Component(0).Covariance()(1, 0), 0.3); BOOST_REQUIRE_SMALL(hmm.Emission()[0].Component(sortedIndices[0]). Covariance()(1, 1) - gmms[0].Component(0).Covariance()(1, 1), 0.3); BOOST_REQUIRE_SMALL(hmm.Emission()[0].Component(sortedIndices[1]). Covariance()(0, 0) - gmms[0].Component(1).Covariance()(0, 0), 0.3); BOOST_REQUIRE_SMALL(hmm.Emission()[0].Component(sortedIndices[1]). Covariance()(0, 1) - gmms[0].Component(1).Covariance()(0, 1), 0.3); BOOST_REQUIRE_SMALL(hmm.Emission()[0].Component(sortedIndices[1]). Covariance()(1, 0) - gmms[0].Component(1).Covariance()(1, 0), 0.3); BOOST_REQUIRE_SMALL(hmm.Emission()[0].Component(sortedIndices[1]). Covariance()(1, 1) - gmms[0].Component(1).Covariance()(1, 1), 0.3); // Sort the GMM. sortedIndices = sort_index(hmm.Emission()[1].Weights()); BOOST_REQUIRE_SMALL(hmm.Emission()[1].Weights()[sortedIndices[0]] - gmms[1].Weights()[0], 0.08); BOOST_REQUIRE_SMALL(hmm.Emission()[1].Weights()[sortedIndices[1]] - gmms[1].Weights()[1], 0.08); BOOST_REQUIRE_SMALL(hmm.Emission()[1].Component(sortedIndices[0]).Mean()[0] - gmms[1].Component(0).Mean()[0], 0.15); BOOST_REQUIRE_SMALL(hmm.Emission()[1].Component(sortedIndices[0]).Mean()[1] - gmms[1].Component(0).Mean()[1], 0.15); BOOST_REQUIRE_SMALL(hmm.Emission()[1].Component(sortedIndices[1]).Mean()[0] - gmms[1].Component(1).Mean()[0], 0.15); BOOST_REQUIRE_SMALL(hmm.Emission()[1].Component(sortedIndices[1]).Mean()[1] - gmms[1].Component(1).Mean()[1], 0.15); BOOST_REQUIRE_SMALL(hmm.Emission()[1].Component(sortedIndices[0]). Covariance()(0, 0) - gmms[1].Component(0).Covariance()(0, 0), 0.3); BOOST_REQUIRE_SMALL(hmm.Emission()[1].Component(sortedIndices[0]). Covariance()(0, 1) - gmms[1].Component(0).Covariance()(0, 1), 0.3); BOOST_REQUIRE_SMALL(hmm.Emission()[1].Component(sortedIndices[0]). Covariance()(1, 0) - gmms[1].Component(0).Covariance()(1, 0), 0.3); BOOST_REQUIRE_SMALL(hmm.Emission()[1].Component(sortedIndices[0]). Covariance()(1, 1) - gmms[1].Component(0).Covariance()(1, 1), 0.3); BOOST_REQUIRE_SMALL(hmm.Emission()[1].Component(sortedIndices[1]). Covariance()(0, 0) - gmms[1].Component(1).Covariance()(0, 0), 0.3); BOOST_REQUIRE_SMALL(hmm.Emission()[1].Component(sortedIndices[1]). Covariance()(0, 1) - gmms[1].Component(1).Covariance()(0, 1), 0.3); BOOST_REQUIRE_SMALL(hmm.Emission()[1].Component(sortedIndices[1]). Covariance()(1, 0) - gmms[1].Component(1).Covariance()(1, 0), 0.3); BOOST_REQUIRE_SMALL(hmm.Emission()[1].Component(sortedIndices[1]). Covariance()(1, 1) - gmms[1].Component(1).Covariance()(1, 1), 0.3); } /** * Test saving and loading of GMM HMMs */ BOOST_AUTO_TEST_CASE(GMMHMMLoadSaveTest) { // Create a GMM HMM, save it, and load it. HMM hmm(3, GMM(4, 3)); for(size_t j = 0; j < hmm.Emission().size(); ++j) { hmm.Emission()[j].Weights().randu(); for (size_t i = 0; i < hmm.Emission()[j].Gaussians(); ++i) { hmm.Emission()[j].Component(i).Mean().randu(); arma::mat covariance = arma::randu( hmm.Emission()[j].Component(i).Covariance().n_rows, hmm.Emission()[j].Component(i).Covariance().n_cols); covariance *= covariance.t(); covariance += arma::eye(covariance.n_rows, covariance.n_cols); hmm.Emission()[j].Component(i).Covariance(std::move(covariance)); } } // Save the HMM. { std::ofstream ofs("test-hmm-save.xml"); boost::archive::xml_oarchive ar(ofs); ar << data::CreateNVP(hmm, "hmm"); } // Load the HMM. HMM hmm2(3, GMM(4, 3)); { std::ifstream ifs("test-hmm-save.xml"); boost::archive::xml_iarchive ar(ifs); ar >> data::CreateNVP(hmm2, "hmm"); } // Remove clutter. remove("test-hmm-save.xml"); for (size_t j = 0; j < hmm.Emission().size(); ++j) { BOOST_REQUIRE_EQUAL(hmm.Emission()[j].Gaussians(), hmm2.Emission()[j].Gaussians()); BOOST_REQUIRE_EQUAL(hmm.Emission()[j].Dimensionality(), hmm2.Emission()[j].Dimensionality()); for (size_t i = 0; i < hmm.Emission()[j].Dimensionality(); ++i) BOOST_REQUIRE_CLOSE(hmm.Emission()[j].Weights()[i], hmm2.Emission()[j].Weights()[i], 1e-3); for (size_t i = 0; i < hmm.Emission()[j].Gaussians(); ++i) { for (size_t l = 0; l < hmm.Emission()[j].Dimensionality(); ++l) { BOOST_REQUIRE_CLOSE(hmm.Emission()[j].Component(i).Mean()[l], hmm2.Emission()[j].Component(i).Mean()[l], 1e-3); for (size_t k = 0; k < hmm.Emission()[j].Dimensionality(); ++k) { BOOST_REQUIRE_CLOSE(hmm.Emission()[j].Component(i).Covariance()(l,k), hmm2.Emission()[j].Component(i).Covariance()(l, k), 1e-3); } } } } } /** * Test saving and loading of Gaussian HMMs */ BOOST_AUTO_TEST_CASE(GaussianHMMLoadSaveTest) { // Create a Gaussian HMM, save it, and load it. HMM hmm(3, GaussianDistribution(2)); for(size_t j = 0; j < hmm.Emission().size(); ++j) { hmm.Emission()[j].Mean().randu(); arma::mat covariance = arma::randu( hmm.Emission()[j].Covariance().n_rows, hmm.Emission()[j].Covariance().n_cols); covariance *= covariance.t(); covariance += arma::eye(covariance.n_rows, covariance.n_cols); hmm.Emission()[j].Covariance(std::move(covariance)); } // Save the HMM. { std::ofstream ofs("test-hmm-save.xml"); boost::archive::xml_oarchive ar(ofs); ar << data::CreateNVP(hmm, "hmm"); } // Load the HMM. HMM hmm2(3, GaussianDistribution(2)); { std::ifstream ifs("test-hmm-save.xml"); boost::archive::xml_iarchive ar(ifs); ar >> data::CreateNVP(hmm2, "hmm"); } // Remove clutter. remove("test-hmm-save.xml"); for (size_t j = 0; j < hmm.Emission().size(); ++j) { BOOST_REQUIRE_EQUAL(hmm.Emission()[j].Dimensionality(), hmm2.Emission()[j].Dimensionality()); for (size_t i = 0; i < hmm.Emission()[j].Dimensionality(); ++i) { BOOST_REQUIRE_CLOSE(hmm.Emission()[j].Mean()[i], hmm2.Emission()[j].Mean()[i], 1e-3); for (size_t k = 0; k < hmm.Emission()[j].Dimensionality(); ++k) { BOOST_REQUIRE_CLOSE(hmm.Emission()[j].Covariance()(i,k), hmm2.Emission()[j].Covariance()(i, k), 1e-3); } } } } /** * Test saving and loading of Discrete HMMs */ BOOST_AUTO_TEST_CASE(DiscreteHMMLoadSaveTest) { // Create a Discrete HMM, save it, and load it. std::vector emission(4); emission[0].Probabilities() = arma::randu(6); emission[0].Probabilities() /= accu(emission[0].Probabilities()); emission[1].Probabilities() = arma::randu(6); emission[1].Probabilities() /= accu(emission[1].Probabilities()); emission[2].Probabilities() = arma::randu(6); emission[2].Probabilities() /= accu(emission[2].Probabilities()); emission[3].Probabilities() = arma::randu(6); emission[3].Probabilities() /= accu(emission[3].Probabilities()); // Create HMM object. HMM hmm(3, DiscreteDistribution(3)); for(size_t j = 0; j < hmm.Emission().size(); ++j) { hmm.Emission()[j].Probabilities() = arma::randu(3); hmm.Emission()[j].Probabilities() /= accu(emission[j].Probabilities()); } // Save the HMM. { std::ofstream ofs("test-hmm-save.xml"); boost::archive::xml_oarchive ar(ofs); ar << data::CreateNVP(hmm, "hmm"); } // Load the HMM. HMM hmm2(3, DiscreteDistribution(3)); { std::ifstream ifs("test-hmm-save.xml"); boost::archive::xml_iarchive ar(ifs); ar >> data::CreateNVP(hmm2, "hmm"); } // Remove clutter. remove("test-hmm-save.xml"); for (size_t j = 0; j < hmm.Emission().size(); ++j) for (size_t i = 0; i < hmm.Emission()[j].Probabilities().n_elem; ++i) BOOST_REQUIRE_CLOSE(hmm.Emission()[j].Probabilities()[i], hmm2.Emission()[j].Probabilities()[i], 1e-3); } BOOST_AUTO_TEST_SUITE_END(); mlpack-2.2.5/src/mlpack/tests/hoeffding_tree_test.cpp000066400000000000000000001077321315013601400227060ustar00rootroot00000000000000/** * @file hoeffding_tree_test.cpp * @author Ryan Curtin * * Test file for Hoeffding trees. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include #include #include #include #include #include #include #include "test_tools.hpp" #include "serialization.hpp" #include using namespace std; using namespace arma; using namespace mlpack; using namespace mlpack::math; using namespace mlpack::data; using namespace mlpack::tree; BOOST_AUTO_TEST_SUITE(HoeffdingTreeTest); BOOST_AUTO_TEST_CASE(GiniImpurityPerfectSimpleTest) { // Make a simple test for Gini impurity with one class. In this case it // should always be 0. We'll assemble the count matrix by hand. arma::Mat counts(2, 2); // 2 categories, 2 classes. counts(0, 0) = 10; // 10 points in category 0 with class 0. counts(0, 1) = 0; // 0 points in category 0 with class 1. counts(1, 0) = 12; // 12 points in category 1 with class 0. counts(1, 1) = 0; // 0 points in category 1 with class 1. // Since the split gets us nothing, there should be no gain. BOOST_REQUIRE_SMALL(GiniImpurity::Evaluate(counts), 1e-10); } BOOST_AUTO_TEST_CASE(GiniImpurityImperfectSimpleTest) { // Make a simple test where a split will give us perfect classification. arma::Mat counts(2, 2); // 2 categories, 2 classes. counts(0, 0) = 10; // 10 points in category 0 with class 0. counts(1, 0) = 0; // 0 points in category 0 with class 1. counts(0, 1) = 0; // 0 points in category 1 with class 0. counts(1, 1) = 10; // 10 points in category 1 with class 1. // The impurity before the split should be 0.5^2 + 0.5^2 = 0.5. // The impurity after the split should be 0. // So the gain should be 0.5. BOOST_REQUIRE_CLOSE(GiniImpurity::Evaluate(counts), 0.5, 1e-5); } BOOST_AUTO_TEST_CASE(GiniImpurityBadSplitTest) { // Make a simple test where a split gets us nothing. arma::Mat counts(2, 2); counts(0, 0) = 10; counts(0, 1) = 10; counts(1, 0) = 5; counts(1, 1) = 5; BOOST_REQUIRE_SMALL(GiniImpurity::Evaluate(counts), 1e-10); } /** * A hand-crafted more difficult test for the Gini impurity, where four * categories and three classes are available. */ BOOST_AUTO_TEST_CASE(GiniImpurityThreeClassTest) { arma::Mat counts(3, 4); counts(0, 0) = 0; counts(1, 0) = 0; counts(2, 0) = 10; counts(0, 1) = 5; counts(1, 1) = 5; counts(2, 1) = 0; counts(0, 2) = 4; counts(1, 2) = 4; counts(2, 2) = 4; counts(0, 3) = 8; counts(1, 3) = 1; counts(2, 3) = 1; // The Gini impurity of the whole thing is: // (overall sum) 0.65193 - // (category 0) 0.40476 * 0 - // (category 1) 0.23810 * 0.5 - // (category 2) 0.28571 * 0.66667 - // (category 2) 0.23810 * 0.34 // = 0.26145 BOOST_REQUIRE_CLOSE(GiniImpurity::Evaluate(counts), 0.26145, 1e-3); } BOOST_AUTO_TEST_CASE(GiniImpurityZeroTest) { // When nothing has been seen, the gini impurity should be zero. arma::Mat counts = arma::zeros>(10, 10); BOOST_REQUIRE_SMALL(GiniImpurity::Evaluate(counts), 1e-10); } /** * Test that the range of Gini impurities is correct for a handful of class * sizes. */ BOOST_AUTO_TEST_CASE(GiniImpurityRangeTest) { BOOST_REQUIRE_CLOSE(GiniImpurity::Range(1), 0, 1e-5); BOOST_REQUIRE_CLOSE(GiniImpurity::Range(2), 0.5, 1e-5); BOOST_REQUIRE_CLOSE(GiniImpurity::Range(3), 0.66666667, 1e-5); BOOST_REQUIRE_CLOSE(GiniImpurity::Range(4), 0.75, 1e-5); BOOST_REQUIRE_CLOSE(GiniImpurity::Range(5), 0.8, 1e-5); BOOST_REQUIRE_CLOSE(GiniImpurity::Range(10), 0.9, 1e-5); BOOST_REQUIRE_CLOSE(GiniImpurity::Range(100), 0.99, 1e-5); BOOST_REQUIRE_CLOSE(GiniImpurity::Range(1000), 0.999, 1e-5); } BOOST_AUTO_TEST_CASE(InformationGainPerfectSimpleTest) { // Make a simple test for Gini impurity with one class. In this case it // should always be 0. We'll assemble the count matrix by hand. arma::Mat counts(2, 2); // 2 categories, 2 classes. counts(0, 0) = 10; // 10 points in category 0 with class 0. counts(0, 1) = 0; // 0 points in category 0 with class 1. counts(1, 0) = 12; // 12 points in category 1 with class 0. counts(1, 1) = 0; // 0 points in category 1 with class 1. // Since the split gets us nothing, there should be no gain. BOOST_REQUIRE_SMALL(InformationGain::Evaluate(counts), 1e-10); } BOOST_AUTO_TEST_CASE(InformationGainImperfectSimpleTest) { // Make a simple test where a split will give us perfect classification. arma::Mat counts(2, 2); // 2 categories, 2 classes. counts(0, 0) = 10; // 10 points in category 0 with class 0. counts(1, 0) = 0; // 0 points in category 0 with class 1. counts(0, 1) = 0; // 0 points in category 1 with class 0. counts(1, 1) = 10; // 10 points in category 1 with class 1. // The impurity before the split should be 0.5 log2(0.5) + 0.5 log2(0.5) = -1. // The impurity after the split should be 0. // So the gain should be 1. BOOST_REQUIRE_CLOSE(InformationGain::Evaluate(counts), 1.0, 1e-5); } BOOST_AUTO_TEST_CASE(InformationGainBadSplitTest) { // Make a simple test where a split gets us nothing. arma::Mat counts(2, 2); counts(0, 0) = 10; counts(0, 1) = 10; counts(1, 0) = 5; counts(1, 1) = 5; BOOST_REQUIRE_SMALL(InformationGain::Evaluate(counts), 1e-10); } /** * A hand-crafted more difficult test for the Gini impurity, where four * categories and three classes are available. */ BOOST_AUTO_TEST_CASE(InformationGainThreeClassTest) { arma::Mat counts(3, 4); counts(0, 0) = 0; counts(1, 0) = 0; counts(2, 0) = 10; counts(0, 1) = 5; counts(1, 1) = 5; counts(2, 1) = 0; counts(0, 2) = 4; counts(1, 2) = 4; counts(2, 2) = 4; counts(0, 3) = 8; counts(1, 3) = 1; counts(2, 3) = 1; // The Gini impurity of the whole thing is: // (overall sum) -1.5516 + // (category 0) 0.40476 * 0 - // (category 1) 0.23810 * -1 - // (category 2) 0.28571 * -1.5850 - // (category 3) 0.23810 * -0.92193 // = 0.64116649 BOOST_REQUIRE_CLOSE(InformationGain::Evaluate(counts), 0.64116649, 1e-5); } BOOST_AUTO_TEST_CASE(InformationGainZeroTest) { // When nothing has been seen, the information gain should be zero. arma::Mat counts = arma::zeros>(10, 10); BOOST_REQUIRE_SMALL(InformationGain::Evaluate(counts), 1e-10); } /** * Test that the range of information gains is correct for a handful of class * sizes. */ BOOST_AUTO_TEST_CASE(InformationGainRangeTest) { BOOST_REQUIRE_CLOSE(InformationGain::Range(1), 0, 1e-5); BOOST_REQUIRE_CLOSE(InformationGain::Range(2), 1.0, 1e-5); BOOST_REQUIRE_CLOSE(InformationGain::Range(3), 1.5849625, 1e-5); BOOST_REQUIRE_CLOSE(InformationGain::Range(4), 2, 1e-5); BOOST_REQUIRE_CLOSE(InformationGain::Range(5), 2.32192809, 1e-5); BOOST_REQUIRE_CLOSE(InformationGain::Range(10), 3.32192809, 1e-5); BOOST_REQUIRE_CLOSE(InformationGain::Range(100), 6.64385619, 1e-5); BOOST_REQUIRE_CLOSE(InformationGain::Range(1000), 9.96578428, 1e-5); } /** * Feed the HoeffdingCategoricalSplit class many examples, all from the same * class, and verify that the majority class is correct. */ BOOST_AUTO_TEST_CASE(HoeffdingCategoricalSplitMajorityClassTest) { // Ten categories, three classes. HoeffdingCategoricalSplit split(10, 3); for (size_t i = 0; i < 500; ++i) { split.Train(mlpack::math::RandInt(0, 10), 1); BOOST_REQUIRE_EQUAL(split.MajorityClass(), 1); } } /** * A harder majority class example. */ BOOST_AUTO_TEST_CASE(HoeffdingCategoricalSplitHarderMajorityClassTest) { // Ten categories, three classes. HoeffdingCategoricalSplit split(10, 3); split.Train(mlpack::math::RandInt(0, 10), 1); for (size_t i = 0; i < 250; ++i) { split.Train(mlpack::math::RandInt(0, 10), 1); split.Train(mlpack::math::RandInt(0, 10), 2); BOOST_REQUIRE_EQUAL(split.MajorityClass(), 1); } } /** * Ensure that the fitness function is positive when we pass some data that * would result in an improvement if it was split. */ BOOST_AUTO_TEST_CASE(HoeffdingCategoricalSplitEasyFitnessCheck) { HoeffdingCategoricalSplit split(5, 3); for (size_t i = 0; i < 100; ++i) split.Train(0, 0); for (size_t i = 0; i < 100; ++i) split.Train(1, 1); for (size_t i = 0; i < 100; ++i) split.Train(2, 1); for (size_t i = 0; i < 100; ++i) split.Train(3, 2); for (size_t i = 0; i < 100; ++i) split.Train(4, 2); double bestGain, secondBestGain; split.EvaluateFitnessFunction(bestGain, secondBestGain); BOOST_REQUIRE_GT(bestGain, 0.0); BOOST_REQUIRE_SMALL(secondBestGain, 1e-10); } /** * Ensure that the fitness function returns 0 (no improvement) when a split * would not get us any improvement. */ BOOST_AUTO_TEST_CASE(HoeffdingCategoricalSplitNoImprovementFitnessTest) { HoeffdingCategoricalSplit split(2, 2); // No training has yet happened, so a split would get us nothing. double bestGain, secondBestGain; split.EvaluateFitnessFunction(bestGain, secondBestGain); BOOST_REQUIRE_SMALL(bestGain, 1e-10); BOOST_REQUIRE_SMALL(secondBestGain, 1e-10); split.Train(0, 0); split.Train(1, 0); split.Train(0, 1); split.Train(1, 1); // Now, a split still gets us only 50% accuracy in each split bin. split.EvaluateFitnessFunction(bestGain, secondBestGain); BOOST_REQUIRE_SMALL(bestGain, 1e-10); BOOST_REQUIRE_SMALL(secondBestGain, 1e-10); } /** * Test that when we do split, we get reasonable split information. */ BOOST_AUTO_TEST_CASE(HoeffdingCategoricalSplitSplitTest) { HoeffdingCategoricalSplit split(3, 3); // 3 categories. // No training is necessary because we can just call CreateChildren(). data::DatasetInfo info(3); info.MapString("hello", 0); // Make dimension 0 categorical. HoeffdingCategoricalSplit::SplitInfo splitInfo(3); // Create the children. arma::Col childMajorities; split.Split(childMajorities, splitInfo); BOOST_REQUIRE_EQUAL(childMajorities.n_elem, 3); BOOST_REQUIRE_EQUAL(splitInfo.CalculateDirection(0), 0); BOOST_REQUIRE_EQUAL(splitInfo.CalculateDirection(1), 1); BOOST_REQUIRE_EQUAL(splitInfo.CalculateDirection(2), 2); } /** * If we feed the HoeffdingTree a ton of points of the same class, it should * not suggest that we split. */ BOOST_AUTO_TEST_CASE(HoeffdingTreeNoSplitTest) { // Make all dimensions categorical. data::DatasetInfo info(3); info.MapString("cat1", 0); info.MapString("cat2", 0); info.MapString("cat3", 0); info.MapString("cat4", 0); info.MapString("cat1", 1); info.MapString("cat2", 1); info.MapString("cat3", 1); info.MapString("cat1", 2); info.MapString("cat2", 2); HoeffdingTree<> split(info, 2, 0.95, 5000, 1); // Feed it samples. for (size_t i = 0; i < 1000; ++i) { // Create the test point. arma::Col testPoint(3); testPoint(0) = mlpack::math::RandInt(0, 4); testPoint(1) = mlpack::math::RandInt(0, 3); testPoint(2) = mlpack::math::RandInt(0, 2); split.Train(testPoint, 0); // Always label 0. BOOST_REQUIRE_EQUAL(split.SplitCheck(), 0); } } /** * If we feed the HoeffdingTree a ton of points of two different classes, it * should very clearly suggest that we split (eventually). */ BOOST_AUTO_TEST_CASE(HoeffdingTreeEasySplitTest) { // It'll be a two-dimensional dataset with two categories each. In the first // dimension, category 0 will only receive points with class 0, and category 1 // will only receive points with class 1. In the second dimension, all points // will have category 0 (so it is useless). data::DatasetInfo info(2); info.MapString("cat0", 0); info.MapString("cat1", 0); info.MapString("cat0", 1); HoeffdingTree<> tree(info, 2, 0.95, 5000, 5000 /* never check for splits */); // Feed samples from each class. for (size_t i = 0; i < 500; ++i) { tree.Train(arma::Col("0 0"), 0); tree.Train(arma::Col("1 0"), 1); } // Now it should be ready to split. BOOST_REQUIRE_EQUAL(tree.SplitCheck(), 2); BOOST_REQUIRE_EQUAL(tree.SplitDimension(), 0); } /** * If we force a success probability of 1, it should never split. */ BOOST_AUTO_TEST_CASE(HoeffdingTreeProbability1SplitTest) { // It'll be a two-dimensional dataset with two categories each. In the first // dimension, category 0 will only receive points with class 0, and category 1 // will only receive points with class 1. In the second dimension, all points // will have category 0 (so it is useless). data::DatasetInfo info(2); info.MapString("cat0", 0); info.MapString("cat1", 0); info.MapString("cat0", 1); HoeffdingTree<> split(info, 2, 1.0, 12000, 1 /* always check for splits */); // Feed samples from each class. for (size_t i = 0; i < 5000; ++i) { split.Train(arma::Col("0 0"), 0); split.Train(arma::Col("1 0"), 1); } // But because the success probability is 1, it should never split. BOOST_REQUIRE_EQUAL(split.SplitCheck(), 0); BOOST_REQUIRE_EQUAL(split.SplitDimension(), size_t(-1)); } /** * A slightly harder splitting problem: there are two features; one gives * perfect classification, another gives almost perfect classification (with 10% * error). Splits should occur after many samples. */ BOOST_AUTO_TEST_CASE(HoeffdingTreeAlmostPerfectSplit) { // Two categories and two dimensions. data::DatasetInfo info(2); info.MapString("cat0", 0); info.MapString("cat1", 0); info.MapString("cat0", 1); info.MapString("cat1", 1); HoeffdingTree<> split(info, 2, 0.95, 5000, 5000 /* never check for splits */); // Feed samples. for (size_t i = 0; i < 500; ++i) { if (mlpack::math::Random() <= 0.9) split.Train(arma::Col("0 0"), 0); else split.Train(arma::Col("1 0"), 0); if (mlpack::math::Random() <= 0.9) split.Train(arma::Col("1 1"), 1); else split.Train(arma::Col("0 1"), 1); } // Ensure that splitting should happen. BOOST_REQUIRE_EQUAL(split.SplitCheck(), 2); // Make sure that it's split on the correct dimension. BOOST_REQUIRE_EQUAL(split.SplitDimension(), 1); } /** * Test that the HoeffdingTree class will not split if the two features are * equally good. */ BOOST_AUTO_TEST_CASE(HoeffdingTreeEqualSplitTest) { // Two categories and two dimensions. data::DatasetInfo info(2); info.MapString("cat0", 0); info.MapString("cat1", 0); info.MapString("cat0", 1); info.MapString("cat1", 1); HoeffdingTree<> split(info, 2, 0.95, 5000, 1); // Feed samples. for (size_t i = 0; i < 500; ++i) { split.Train(arma::Col("0 0"), 0); split.Train(arma::Col("1 1"), 1); } // Ensure that splitting should not happen. BOOST_REQUIRE_EQUAL(split.SplitCheck(), 0); } // This is used in the next test. template using HoeffdingSizeTNumericSplit = HoeffdingNumericSplit; /** * Build a decision tree on a dataset with two meaningless dimensions and ensure * that it can properly classify all of the training points. (The dataset is * perfectly separable.) */ BOOST_AUTO_TEST_CASE(HoeffdingTreeSimpleDatasetTest) { DatasetInfo info(3); info.MapString("cat0", 0); info.MapString("cat1", 0); info.MapString("cat2", 0); info.MapString("cat3", 0); info.MapString("cat4", 0); info.MapString("cat5", 0); info.MapString("cat6", 0); info.MapString("cat0", 1); info.MapString("cat1", 1); info.MapString("cat2", 1); info.MapString("cat0", 2); info.MapString("cat1", 2); // Now generate data. arma::Mat dataset(3, 9000); arma::Row labels(9000); for (size_t i = 0; i < 9000; i += 3) { dataset(0, i) = mlpack::math::RandInt(7); dataset(1, i) = 0; dataset(2, i) = mlpack::math::RandInt(2); labels(i) = 0; dataset(0, i + 1) = mlpack::math::RandInt(7); dataset(1, i + 1) = 2; dataset(2, i + 1) = mlpack::math::RandInt(2); labels(i + 1) = 1; dataset(0, i + 2) = mlpack::math::RandInt(7); dataset(1, i + 2) = 1; dataset(2, i + 2) = mlpack::math::RandInt(2); labels(i + 2) = 2; } // Now train two streaming decision trees; one on the whole dataset, and one // on streaming data. typedef HoeffdingTree TreeType; TreeType batchTree(dataset, info, labels, 3, false); TreeType streamTree(info, 3); for (size_t i = 0; i < 9000; ++i) streamTree.Train(dataset.col(i), labels[i]); // Each tree should have a single split. BOOST_REQUIRE_EQUAL(batchTree.NumChildren(), 3); BOOST_REQUIRE_EQUAL(streamTree.NumChildren(), 3); BOOST_REQUIRE_EQUAL(batchTree.SplitDimension(), 1); BOOST_REQUIRE_EQUAL(streamTree.SplitDimension(), 1); // Now, classify all the points in the dataset. arma::Row batchLabels(9000); arma::Row streamLabels(9000); streamTree.Classify(dataset, batchLabels); for (size_t i = 0; i < 9000; ++i) streamLabels[i] = batchTree.Classify(dataset.col(i)); for (size_t i = 0; i < 9000; ++i) { BOOST_REQUIRE_EQUAL(labels[i], streamLabels[i]); BOOST_REQUIRE_EQUAL(labels[i], batchLabels[i]); } } /** * Test that the HoeffdingNumericSplit class has a fitness function value of 0 * before it's seen enough points. */ BOOST_AUTO_TEST_CASE(HoeffdingNumericSplitFitnessFunctionTest) { HoeffdingNumericSplit split(5, 10, 100); // The first 99 iterations should not calculate anything. The 100th is where // the counting starts. for (size_t i = 0; i < 99; ++i) { split.Train(mlpack::math::Random(), mlpack::math::RandInt(5)); double bestGain, secondBestGain; split.EvaluateFitnessFunction(bestGain, secondBestGain); BOOST_REQUIRE_SMALL(bestGain, 1e-10); BOOST_REQUIRE_SMALL(secondBestGain, 1e-10); } } /** * Make sure the majority class is correct in the samples before binning. */ BOOST_AUTO_TEST_CASE(HoeffdingNumericSplitPreBinningMajorityClassTest) { HoeffdingNumericSplit split(3, 10, 100); for (size_t i = 0; i < 100; ++i) { split.Train(mlpack::math::Random(), 1); BOOST_REQUIRE_EQUAL(split.MajorityClass(), 1); } } /** * Use a numeric feature that is bimodal (with a margin), and make sure that the * HoeffdingNumericSplit bins it reasonably into two bins and returns sensible * Gini impurity numbers. */ BOOST_AUTO_TEST_CASE(HoeffdingNumericSplitBimodalTest) { // 2 classes, 2 bins, 200 samples before binning. HoeffdingNumericSplit split(2, 2, 200); for (size_t i = 0; i < 100; ++i) { split.Train(mlpack::math::Random() + 0.3, 0); split.Train(-mlpack::math::Random() - 0.3, 1); } // Push the majority class to 1. split.Train(-mlpack::math::Random() - 0.3, 1); BOOST_REQUIRE_EQUAL(split.MajorityClass(), 1); // Push the majority class back to 0. split.Train(mlpack::math::Random() + 0.3, 0); split.Train(mlpack::math::Random() + 0.3, 0); BOOST_REQUIRE_EQUAL(split.MajorityClass(), 0); // Now the binning should be complete, and so the impurity should be // (0.5 * (1 - 0.5)) * 2 = 0.50 (it will be 0 in the two created children). double bestGain, secondBestGain; split.EvaluateFitnessFunction(bestGain, secondBestGain); BOOST_REQUIRE_CLOSE(bestGain, 0.50, 0.03); BOOST_REQUIRE_SMALL(secondBestGain, 1e-10); // Make sure that if we do create children, that the correct number of // children is created, and that the bins end up in the right place. NumericSplitInfo<> info; arma::Col childMajorities; split.Split(childMajorities, info); BOOST_REQUIRE_EQUAL(childMajorities.n_elem, 2); // Now check the split info. for (size_t i = 0; i < 10; ++i) { BOOST_REQUIRE_NE(info.CalculateDirection(mlpack::math::Random() + 0.3), info.CalculateDirection(-mlpack::math::Random() - 0.3)); } } /** * Create a BinaryNumericSplit object, feed it a bunch of samples where anything * less than 1.0 is class 0 and anything greater is class 1. Then make sure it * can perform a perfect split. */ BOOST_AUTO_TEST_CASE(BinaryNumericSplitSimpleSplitTest) { BinaryNumericSplit split(2); // 2 classes. // Feed it samples. for (size_t i = 0; i < 500; ++i) { split.Train(mlpack::math::Random(), 0); split.Train(mlpack::math::Random() + 1.0, 1); // Now ensure the fitness function gives good gain. // The Gini impurity for the unsplit node is 2 * (0.5^2) = 0.5, and the Gini // impurity for the children is 0. double bestGain, secondBestGain; split.EvaluateFitnessFunction(bestGain, secondBestGain); BOOST_REQUIRE_CLOSE(bestGain, 0.5, 1e-5); BOOST_REQUIRE_GT(bestGain, secondBestGain); } // Now, when we ask it to split, ensure that the split value is reasonable. arma::Col childMajorities; BinaryNumericSplitInfo<> splitInfo; split.Split(childMajorities, splitInfo); BOOST_REQUIRE_EQUAL(childMajorities[0], 0); BOOST_REQUIRE_EQUAL(childMajorities[1], 1); BOOST_REQUIRE_EQUAL(splitInfo.CalculateDirection(0.5), 0); BOOST_REQUIRE_EQUAL(splitInfo.CalculateDirection(1.5), 1); BOOST_REQUIRE_EQUAL(splitInfo.CalculateDirection(0.0), 0); BOOST_REQUIRE_EQUAL(splitInfo.CalculateDirection(-1.0), 0); BOOST_REQUIRE_EQUAL(splitInfo.CalculateDirection(0.9), 0); BOOST_REQUIRE_EQUAL(splitInfo.CalculateDirection(1.1), 1); } /** * Create a BinaryNumericSplit object, feed it samples in the same way as * before, but with four classes. */ BOOST_AUTO_TEST_CASE(BinaryNumericSplitSimpleFourClassSplitTest) { BinaryNumericSplit split(4); // 4 classes. // Feed it samples. for (size_t i = 0; i < 250; ++i) { split.Train(mlpack::math::Random(), 0); split.Train(mlpack::math::Random() + 2.0, 1); split.Train(mlpack::math::Random() - 1.0, 2); split.Train(mlpack::math::Random() + 1.0, 3); // The same as the previous test, but with four classes: 4 * (0.25 * 0.75) = // 0.75. We can only split in one place, though, which will give one // perfect child, giving a gain of 0.75 - 3 * (1/3 * 2/3) = 0.25. double bestGain, secondBestGain; split.EvaluateFitnessFunction(bestGain, secondBestGain); BOOST_REQUIRE_CLOSE(bestGain, 0.25, 1e-5); BOOST_REQUIRE_GE(bestGain, secondBestGain); } // Now, when we ask it to split, ensure that the split value is reasonable. arma::Col childMajorities; BinaryNumericSplitInfo<> splitInfo; split.Split(childMajorities, splitInfo); // We don't really care where it splits -- it can split anywhere. But it has // to split in only two directions. BOOST_REQUIRE_EQUAL(childMajorities.n_elem, 2); } /** * Create a HoeffdingTree that uses the HoeffdingNumericSplit and make sure it * can split meaningfully on the correct dimension. */ BOOST_AUTO_TEST_CASE(NumericHoeffdingTreeTest) { // Generate data. arma::mat dataset(3, 9000); arma::Row labels(9000); data::DatasetInfo info(3); // All features are numeric. for (size_t i = 0; i < 9000; i += 3) { dataset(0, i) = mlpack::math::Random(); dataset(1, i) = mlpack::math::Random(); dataset(2, i) = mlpack::math::Random(); labels[i] = 0; dataset(0, i + 1) = mlpack::math::Random(); dataset(1, i + 1) = mlpack::math::Random() - 1.0; dataset(2, i + 1) = mlpack::math::Random() + 0.5; labels[i + 1] = 2; dataset(0, i + 2) = mlpack::math::Random(); dataset(1, i + 2) = mlpack::math::Random() + 1.0; dataset(2, i + 2) = mlpack::math::Random() + 0.8; labels[i + 2] = 1; } // Now train two streaming decision trees; one on the whole dataset, and one // on streaming data. typedef HoeffdingTree TreeType; TreeType batchTree(dataset, info, labels, 3, false); TreeType streamTree(info, 3); for (size_t i = 0; i < 9000; ++i) streamTree.Train(dataset.col(i), labels[i]); // Each tree should have at least one split. BOOST_REQUIRE_GT(batchTree.NumChildren(), 0); BOOST_REQUIRE_GT(streamTree.NumChildren(), 0); BOOST_REQUIRE_EQUAL(batchTree.SplitDimension(), 1); BOOST_REQUIRE_EQUAL(streamTree.SplitDimension(), 1); // Now, classify all the points in the dataset. arma::Row batchLabels(9000); arma::Row streamLabels(9000); streamTree.Classify(dataset, batchLabels); for (size_t i = 0; i < 9000; ++i) streamLabels[i] = batchTree.Classify(dataset.col(i)); size_t streamCorrect = 0; size_t batchCorrect = 0; for (size_t i = 0; i < 9000; ++i) { if (labels[i] == streamLabels[i]) ++streamCorrect; if (labels[i] == batchLabels[i]) ++batchCorrect; } // 66% accuracy shouldn't be too much to ask... BOOST_REQUIRE_GT(streamCorrect, 6000); BOOST_REQUIRE_GT(batchCorrect, 6000); } /** * The same as the previous test, but with the numeric binary split, and with a * categorical feature. */ BOOST_AUTO_TEST_CASE(BinaryNumericHoeffdingTreeTest) { // Generate data. arma::mat dataset(4, 9000); arma::Row labels(9000); data::DatasetInfo info(4); // All features are numeric, except the fourth. info.MapString("0", 3); for (size_t i = 0; i < 9000; i += 3) { dataset(0, i) = mlpack::math::Random(); dataset(1, i) = mlpack::math::Random(); dataset(2, i) = mlpack::math::Random(); dataset(3, i) = 0.0; labels[i] = 0; dataset(0, i + 1) = mlpack::math::Random(); dataset(1, i + 1) = mlpack::math::Random() - 1.0; dataset(2, i + 1) = mlpack::math::Random() + 0.5; dataset(3, i + 1) = 0.0; labels[i + 1] = 2; dataset(0, i + 2) = mlpack::math::Random(); dataset(1, i + 2) = mlpack::math::Random() + 1.0; dataset(2, i + 2) = mlpack::math::Random() + 0.8; dataset(3, i + 2) = 0.0; labels[i + 2] = 1; } // Now train two streaming decision trees; one on the whole dataset, and one // on streaming data. typedef HoeffdingTree TreeType; TreeType batchTree(dataset, info, labels, 3, false); TreeType streamTree(info, 3); for (size_t i = 0; i < 9000; ++i) streamTree.Train(dataset.col(i), labels[i]); // Each tree should have at least one split. BOOST_REQUIRE_GT(batchTree.NumChildren(), 0); BOOST_REQUIRE_GT(streamTree.NumChildren(), 0); BOOST_REQUIRE_EQUAL(batchTree.SplitDimension(), 1); BOOST_REQUIRE_EQUAL(streamTree.SplitDimension(), 1); // Now, classify all the points in the dataset. arma::Row batchLabels(9000); arma::Row streamLabels(9000); streamTree.Classify(dataset, batchLabels); for (size_t i = 0; i < 9000; ++i) streamLabels[i] = batchTree.Classify(dataset.col(i)); size_t streamCorrect = 0; size_t batchCorrect = 0; for (size_t i = 0; i < 9000; ++i) { if (labels[i] == streamLabels[i]) ++streamCorrect; if (labels[i] == batchLabels[i]) ++batchCorrect; } // Require a pretty high accuracy: 95%. BOOST_REQUIRE_GT(streamCorrect, 8550); BOOST_REQUIRE_GT(batchCorrect, 8550); } /** * Test majority probabilities. */ BOOST_AUTO_TEST_CASE(MajorityProbabilityTest) { data::DatasetInfo info(1); HoeffdingTree<> tree(info, 3); // Feed the tree a few samples. tree.Train(arma::vec("1"), 0); tree.Train(arma::vec("2"), 0); tree.Train(arma::vec("3"), 0); size_t prediction; double probability; tree.Classify(arma::vec("1"), prediction, probability); BOOST_REQUIRE_EQUAL(prediction, 0); BOOST_REQUIRE_CLOSE(probability, 1.0, 1e-5); // Make it impure. tree.Train(arma::vec("4"), 1); tree.Classify(arma::vec("3"), prediction, probability); BOOST_REQUIRE_EQUAL(prediction, 0); BOOST_REQUIRE_CLOSE(probability, 0.75, 1e-5); // Flip the majority class. tree.Train(arma::vec("4"), 1); tree.Train(arma::vec("4"), 1); tree.Train(arma::vec("4"), 1); tree.Train(arma::vec("4"), 1); tree.Classify(arma::vec("3"), prediction, probability); BOOST_REQUIRE_EQUAL(prediction, 1); BOOST_REQUIRE_CLOSE(probability, 0.625, 1e-5); } /** * Make sure that batch training mode outperforms non-batch mode. */ BOOST_AUTO_TEST_CASE(BatchTrainingTest) { // We need to create a dataset with some amount of complexity, that must be // split in a handful of ways to accurately classify the data. An expanding // spiral should do the trick here. We'll make the spiral in two dimensions. // The label will change as the index increases. arma::mat spiralDataset(2, 10000); for (size_t i = 0; i < 10000; ++i) { // One circle every 20000 samples. Plus some noise. const double magnitude = 2.0 + (double(i) / 20000.0) + 0.5 * mlpack::math::Random(); const double angle = (i % 20000) * (2 * M_PI) + mlpack::math::Random(); const double x = magnitude * cos(angle); const double y = magnitude * sin(angle); spiralDataset(0, i) = x; spiralDataset(1, i) = y; } arma::Row labels(10000); for (size_t i = 0; i < 2000; ++i) labels[i] = 1; for (size_t i = 2000; i < 4000; ++i) labels[i] = 3; for (size_t i = 4000; i < 6000; ++i) labels[i] = 2; for (size_t i = 6000; i < 8000; ++i) labels[i] = 0; for (size_t i = 8000; i < 10000; ++i) labels[i] = 4; // Now shuffle the dataset. arma::uvec indices = arma::shuffle(arma::linspace(0, 9999, 10000)); arma::mat d(2, 10000); arma::Row l(10000); for (size_t i = 0; i < 10000; ++i) { d.col(i) = spiralDataset.col(indices[i]); l[i] = labels[indices[i]]; } // Split into a training set and a test set. arma::mat trainingData = d.cols(0, 4999); arma::mat testData = d.cols(5000, 9999); arma::Row trainingLabels = l.subvec(0, 4999); arma::Row testLabels = l.subvec(5000, 9999); data::DatasetInfo info(2); // Now build two decision trees; one in batch mode, and one in streaming mode. // We need to set the confidence pretty high so that the streaming tree isn't // able to have enough samples to build to the same leaves. HoeffdingTree<> batchTree(trainingData, info, trainingLabels, 5, true, 0.99999999); HoeffdingTree<> streamTree(trainingLabels, info, trainingLabels, 5, false, 0.99999999); // Ensure that the performance of the batch tree is better. size_t batchCorrect = 0; size_t streamCorrect = 0; for (size_t i = 0; i < 5000; ++i) { size_t streamLabel = streamTree.Classify(testData.col(i)); size_t batchLabel = batchTree.Classify(testData.col(i)); if (streamLabel == testLabels[i]) ++streamCorrect; if (batchLabel == testLabels[i]) ++batchCorrect; } // The batch tree must be a bit better than the stream tree. But not too // much, since the accuracy is already going to be very high. BOOST_REQUIRE_GE(batchCorrect, streamCorrect); } // Make sure that changing the confidence properly propagates to all leaves. BOOST_AUTO_TEST_CASE(ConfidenceChangeTest) { // Generate data. arma::mat dataset(4, 9000); arma::Row labels(9000); data::DatasetInfo info(4); // All features are numeric, except the fourth. info.MapString("0", 3); for (size_t i = 0; i < 9000; i += 3) { dataset(0, i) = mlpack::math::Random(); dataset(1, i) = mlpack::math::Random(); dataset(2, i) = mlpack::math::Random(); dataset(3, i) = 0.0; labels[i] = 0; dataset(0, i + 1) = mlpack::math::Random(); dataset(1, i + 1) = mlpack::math::Random() - 1.0; dataset(2, i + 1) = mlpack::math::Random() + 0.5; dataset(3, i + 1) = 0.0; labels[i + 1] = 2; dataset(0, i + 2) = mlpack::math::Random(); dataset(1, i + 2) = mlpack::math::Random() + 1.0; dataset(2, i + 2) = mlpack::math::Random() + 0.8; dataset(3, i + 2) = 0.0; labels[i + 2] = 1; } HoeffdingTree<> tree(info, 3, 0.5); // Low success probability. size_t i = 0; while ((tree.NumChildren() == 0) && (i < 9000)) { tree.Train(dataset.col(i), labels[i]); i++; } BOOST_REQUIRE_LT(i, 9000); // Now we have split the root node, but we need to make sure we can feed // through the rest of the points while requiring a confidence of 1.0, and // make sure no splits happen. tree.SuccessProbability(1.0); tree.MaxSamples(0); i = 0; while ((tree.NumChildren() == 0) && (i < 90000)) { tree.Train(dataset.col(i % 9000), labels[i % 9000]); i++; } for (size_t c = 0; c < tree.NumChildren(); ++c) BOOST_REQUIRE_EQUAL(tree.Child(c).NumChildren(), 0); } //! Make sure parameter changes are propagated to children. BOOST_AUTO_TEST_CASE(ParameterChangeTest) { // Generate data. arma::mat dataset(4, 9000); arma::Row labels(9000); data::DatasetInfo info(4); // All features are numeric, except the fourth. info.MapString("0", 3); for (size_t i = 0; i < 9000; i += 3) { dataset(0, i) = mlpack::math::Random(); dataset(1, i) = mlpack::math::Random(); dataset(2, i) = mlpack::math::Random(); dataset(3, i) = 0.0; labels[i] = 0; dataset(0, i + 1) = mlpack::math::Random(); dataset(1, i + 1) = mlpack::math::Random() - 1.0; dataset(2, i + 1) = mlpack::math::Random() + 0.5; dataset(3, i + 1) = 0.0; labels[i + 1] = 2; dataset(0, i + 2) = mlpack::math::Random(); dataset(1, i + 2) = mlpack::math::Random() + 1.0; dataset(2, i + 2) = mlpack::math::Random() + 0.8; dataset(3, i + 2) = 0.0; labels[i + 2] = 1; } HoeffdingTree<> tree(dataset, info, labels, 3, true); // Batch training. // Now change parameters... tree.SuccessProbability(0.7); tree.MinSamples(17); tree.MaxSamples(192); tree.CheckInterval(3); std::stack*> stack; stack.push(&tree); while (!stack.empty()) { HoeffdingTree<>* node = stack.top(); stack.pop(); BOOST_REQUIRE_CLOSE(node->SuccessProbability(), 0.7, 1e-5); BOOST_REQUIRE_EQUAL(node->MinSamples(), 17); BOOST_REQUIRE_EQUAL(node->MaxSamples(), 192); BOOST_REQUIRE_EQUAL(node->CheckInterval(), 3); for (size_t i = 0; i < node->NumChildren(); ++i) stack.push(&node->Child(i)); } } BOOST_AUTO_TEST_CASE(MultipleSerializationTest) { // Generate data. arma::mat dataset(4, 9000); arma::Row labels(9000); data::DatasetInfo info(4); // All features are numeric, except the fourth. info.MapString("0", 3); for (size_t i = 0; i < 9000; i += 3) { dataset(0, i) = mlpack::math::Random(); dataset(1, i) = mlpack::math::Random(); dataset(2, i) = mlpack::math::Random(); dataset(3, i) = 0.0; labels[i] = 0; dataset(0, i + 1) = mlpack::math::Random(); dataset(1, i + 1) = mlpack::math::Random() - 1.0; dataset(2, i + 1) = mlpack::math::Random() + 0.5; dataset(3, i + 1) = 0.0; labels[i + 1] = 2; dataset(0, i + 2) = mlpack::math::Random(); dataset(1, i + 2) = mlpack::math::Random() + 1.0; dataset(2, i + 2) = mlpack::math::Random() + 0.8; dataset(3, i + 2) = 0.0; labels[i + 2] = 1; } // Batch training will give a tree with many labels. HoeffdingTree<> deepTree(dataset, info, labels, 3, true); // Streaming training will not. HoeffdingTree<> shallowTree(dataset, info, labels, 3, false); // Now serialize the shallow tree into the deep tree. std::ostringstream oss; { boost::archive::binary_oarchive boa(oss); boa << data::CreateNVP(shallowTree, "streamingDecisionTree"); } std::istringstream iss(oss.str()); { boost::archive::binary_iarchive bia(iss); bia >> data::CreateNVP(deepTree, "streamingDecisionTree"); } // Now do some classification and make sure the results are the same. arma::Row deepPredictions, shallowPredictions; shallowTree.Classify(dataset, shallowPredictions); deepTree.Classify(dataset, deepPredictions); for (size_t i = 0; i < deepPredictions.n_elem; ++i) { BOOST_REQUIRE_EQUAL(shallowPredictions[i], deepPredictions[i]); } } BOOST_AUTO_TEST_SUITE_END(); mlpack-2.2.5/src/mlpack/tests/hyperplane_test.cpp000066400000000000000000000077641315013601400221110ustar00rootroot00000000000000/** * @file hyperplane_test.cpp * * Tests for Hyperplane and ProjVector implementations. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include #include #include #include "test_tools.hpp" using namespace mlpack; using namespace mlpack::math; using namespace mlpack::tree; using namespace mlpack::metric; using namespace mlpack::bound; BOOST_AUTO_TEST_SUITE(HyperplaneTest); /** * Ensure that a hyperplane, by default, consider all points to the left. */ BOOST_AUTO_TEST_CASE(HyperplaneEmptyConstructor) { Hyperplane h1; AxisOrthogonalHyperplane h2; arma::mat dataset; dataset.randu(3, 20); // 20 points in 3 dimensions. for (size_t i = 0; i < dataset.n_cols; i++) { BOOST_REQUIRE(h1.Left(dataset.col(i))); BOOST_REQUIRE(h2.Left(dataset.col(i))); BOOST_REQUIRE(!h1.Right(dataset.col(i))); BOOST_REQUIRE(!h2.Right(dataset.col(i))); } } /** * Ensure that we get the correct hyperplane given the projection vector. */ BOOST_AUTO_TEST_CASE(ProjectionTest) { // General hyperplane. ProjVector projVect1(arma::vec("1 1")); Hyperplane h1(projVect1, 0); BOOST_REQUIRE_EQUAL(h1.Project(arma::vec("1 -1")), 0); BOOST_REQUIRE(h1.Left(arma::vec("1 -1"))); BOOST_REQUIRE(!h1.Right(arma::vec("1 -1"))); BOOST_REQUIRE_EQUAL(h1.Project(arma::vec("-1 1")), 0); BOOST_REQUIRE(h1.Left(arma::vec("-1 1"))); BOOST_REQUIRE(!h1.Right(arma::vec("-1 1"))); BOOST_REQUIRE_EQUAL(h1.Project(arma::vec("1 0")), h1.Project(arma::vec("0 1"))); BOOST_REQUIRE(h1.Right(arma::vec("1 0"))); BOOST_REQUIRE(!h1.Left(arma::vec("1 0"))); BOOST_REQUIRE_EQUAL(h1.Project(arma::vec("-1 -1")), h1.Project(arma::vec("-2 0"))); BOOST_REQUIRE(h1.Left(arma::vec("-1 -1"))); BOOST_REQUIRE(!h1.Right(arma::vec("-1 -1"))); // A simple 2-dimensional bound. BallBound b1(2); b1.Center() = arma::vec("-1 -1"); b1.Radius() = 1.41; BOOST_REQUIRE(h1.Left(b1)); BOOST_REQUIRE(!h1.Right(b1)); b1.Center() = arma::vec("1 1"); b1.Radius() = 1.41; BOOST_REQUIRE(h1.Right(b1)); BOOST_REQUIRE(!h1.Left(b1)); b1.Center() = arma::vec("0 0"); b1.Radius() = 1.41; BOOST_REQUIRE(!h1.Right(b1)); BOOST_REQUIRE(!h1.Left(b1)); } /** * Ensure that we get the correct AxisOrthogonalHyperplane given the * AxisParallelProjVector. */ BOOST_AUTO_TEST_CASE(AxisOrthogonalProjectionTest) { // AxisParallel hyperplane. AxisParallelProjVector projVect2(1); AxisOrthogonalHyperplane h2(projVect2, 1); BOOST_REQUIRE_EQUAL(h2.Project(arma::vec("0 0")), -1); BOOST_REQUIRE(h2.Left(arma::vec("0 0"))); BOOST_REQUIRE(!h2.Right(arma::vec("0 0"))); BOOST_REQUIRE_EQUAL(h2.Project(arma::vec("0 1")), 0); BOOST_REQUIRE(h2.Left(arma::vec("0 1"))); BOOST_REQUIRE(!h2.Right(arma::vec("0 1"))); BOOST_REQUIRE_EQUAL(h2.Project(arma::vec("0 2")), 1); BOOST_REQUIRE(h2.Right(arma::vec("0 2"))); BOOST_REQUIRE(!h2.Left(arma::vec("0 2"))); BOOST_REQUIRE_EQUAL(h2.Project(arma::vec("1 2")), 1); BOOST_REQUIRE(h2.Right(arma::vec("1 2"))); BOOST_REQUIRE(!h2.Left(arma::vec("1 2"))); BOOST_REQUIRE_EQUAL(h2.Project(arma::vec("1 0")), -1); BOOST_REQUIRE(h2.Left(arma::vec("1 0"))); BOOST_REQUIRE(!h2.Right(arma::vec("1 0"))); // A simple 2-dimensional bound. HRectBound b2(2); b2[0] = Range(-1.0, 1.0); b2[1] = Range(-1.0, 1.0); BOOST_REQUIRE(h2.Left(b2)); BOOST_REQUIRE(!h2.Right(b2)); b2[0] = Range(-1.0, 1.0); b2[1] = Range(1.001, 2.0); BOOST_REQUIRE(h2.Right(b2)); BOOST_REQUIRE(!h2.Left(b2)); b2[0] = Range(-1.0, 1.0); b2[1] = Range(0, 2.0); BOOST_REQUIRE(!h2.Right(b2)); BOOST_REQUIRE(!h2.Left(b2)); } BOOST_AUTO_TEST_SUITE_END(); mlpack-2.2.5/src/mlpack/tests/imputation_test.cpp000066400000000000000000000235651315013601400221300ustar00rootroot00000000000000/** * @file imputation_test.cpp * @author Keon Kim * * Tests for data::Imputer class * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include #include #include #include #include #include #include #include #include #include #include #include #include "test_tools.hpp" using namespace mlpack; using namespace mlpack::data; using namespace std; BOOST_AUTO_TEST_SUITE(ImputationTest); /** * 1. Make sure a CSV is loaded correctly with mappings using MissingPolicy. * 2. Try Imputer object with CustomImputation method to impute data "a". * (It is ok to test on one method since the other ones will be covered in the * next cases). */ BOOST_AUTO_TEST_CASE(DatasetMapperImputerTest) { fstream f; f.open("test_file.csv", fstream::out); f << "a, 2, 3" << endl; f << "5, 6, a" << endl; f << "8, 9, 10" << endl; f.close(); arma::mat input; MissingPolicy policy({"a"}); DatasetMapper info(policy); BOOST_REQUIRE(data::Load("test_file.csv", input, info) == true); // row and column test. BOOST_REQUIRE_EQUAL(input.n_rows, 3); BOOST_REQUIRE_EQUAL(input.n_cols, 3); // Load check // MissingPolicy should convert strings to nans. BOOST_REQUIRE(std::isnan(input(0, 0)) == true); BOOST_REQUIRE_CLOSE(input(0, 1), 5.0, 1e-5); BOOST_REQUIRE_CLOSE(input(0, 2), 8.0, 1e-5); BOOST_REQUIRE_CLOSE(input(1, 0), 2.0, 1e-5); BOOST_REQUIRE_CLOSE(input(1, 1), 6.0, 1e-5); BOOST_REQUIRE_CLOSE(input(1, 2), 9.0, 1e-5); BOOST_REQUIRE_CLOSE(input(2, 0), 3.0, 1e-5); BOOST_REQUIRE(std::isnan(input(2, 1)) == true); BOOST_REQUIRE_CLOSE(input(2, 2), 10.0, 1e-5); // convert missing vals to 99. CustomImputation customStrategy(99); Imputer, CustomImputation> imputer(info, customStrategy); // convert a or nan to 99 for dimension 0. imputer.Impute(input, "a", 0); // Custom imputation result check. BOOST_REQUIRE_CLOSE(input(0, 0), 99.0, 1e-5); BOOST_REQUIRE_CLOSE(input(0, 1), 5.0, 1e-5); BOOST_REQUIRE_CLOSE(input(0, 2), 8.0, 1e-5); BOOST_REQUIRE_CLOSE(input(1, 0), 2.0, 1e-5); BOOST_REQUIRE_CLOSE(input(1, 1), 6.0, 1e-5); BOOST_REQUIRE_CLOSE(input(1, 2), 9.0, 1e-5); BOOST_REQUIRE_CLOSE(input(2, 0), 3.0, 1e-5); BOOST_REQUIRE(std::isnan(input(2, 1)) == true); // remains as NaN BOOST_REQUIRE_CLOSE(input(2, 2), 10.0, 1e-5); // Remove the file. remove("test_file.csv"); } /** * Make sure CustomImputation method replaces data 0 to 99. */ BOOST_AUTO_TEST_CASE(CustomImputationTest) { arma::mat columnWiseInput("3.0 0.0 2.0 0.0;" "5.0 6.0 0.0 6.0;" "9.0 8.0 4.0 8.0;"); arma::mat rowWiseInput(columnWiseInput); double customValue = 99; double mappedValue = 0.0; CustomImputation imputer(customValue); // column wise imputer.Impute(columnWiseInput, mappedValue, 0/*dimension*/, true); BOOST_REQUIRE_CLOSE(columnWiseInput(0, 0), 3.0, 1e-5); BOOST_REQUIRE_CLOSE(columnWiseInput(0, 1), 99.0, 1e-5); BOOST_REQUIRE_CLOSE(columnWiseInput(0, 2), 2.0, 1e-5); BOOST_REQUIRE_CLOSE(columnWiseInput(0, 3), 99.0, 1e-5); BOOST_REQUIRE_CLOSE(columnWiseInput(1, 0), 5.0, 1e-5); BOOST_REQUIRE_CLOSE(columnWiseInput(1, 1), 6.0, 1e-5); BOOST_REQUIRE_CLOSE(columnWiseInput(1, 2), 0.0, 1e-5); BOOST_REQUIRE_CLOSE(columnWiseInput(1, 3), 6.0, 1e-5); BOOST_REQUIRE_CLOSE(columnWiseInput(2, 0), 9.0, 1e-5); BOOST_REQUIRE_CLOSE(columnWiseInput(2, 1), 8.0, 1e-5); BOOST_REQUIRE_CLOSE(columnWiseInput(2, 2), 4.0, 1e-5); BOOST_REQUIRE_CLOSE(columnWiseInput(2, 3), 8.0, 1e-5); // row wise imputer.Impute(rowWiseInput, mappedValue, 1, false); BOOST_REQUIRE_CLOSE(rowWiseInput(0, 0), 3.0, 1e-5); BOOST_REQUIRE_CLOSE(rowWiseInput(0, 1), 99.0, 1e-5); BOOST_REQUIRE_CLOSE(rowWiseInput(0, 2), 2.0, 1e-5); BOOST_REQUIRE_CLOSE(rowWiseInput(0, 3), 0.0, 1e-5); BOOST_REQUIRE_CLOSE(rowWiseInput(1, 0), 5.0, 1e-5); BOOST_REQUIRE_CLOSE(rowWiseInput(1, 1), 6.0, 1e-5); BOOST_REQUIRE_CLOSE(rowWiseInput(1, 2), 0.0, 1e-5); BOOST_REQUIRE_CLOSE(rowWiseInput(1, 3), 6.0, 1e-5); BOOST_REQUIRE_CLOSE(rowWiseInput(2, 0), 9.0, 1e-5); BOOST_REQUIRE_CLOSE(rowWiseInput(2, 1), 8.0, 1e-5); BOOST_REQUIRE_CLOSE(rowWiseInput(2, 2), 4.0, 1e-5); BOOST_REQUIRE_CLOSE(rowWiseInput(2, 3), 8.0, 1e-5); } /** * Make sure MeanImputation method replaces data 0 to mean value of each * dimensions. */ BOOST_AUTO_TEST_CASE(MeanImputationTest) { arma::mat columnWiseInput("3.0 0.0 2.0 0.0;" "5.0 6.0 0.0 6.0;" "9.0 8.0 4.0 8.0;"); arma::mat rowWiseInput(columnWiseInput); double mappedValue = 0.0; MeanImputation imputer; // column wise imputer.Impute(columnWiseInput, mappedValue, 0, true); BOOST_REQUIRE_CLOSE(columnWiseInput(0, 0), 3.0, 1e-5); BOOST_REQUIRE_CLOSE(columnWiseInput(0, 1), 2.5, 1e-5); BOOST_REQUIRE_CLOSE(columnWiseInput(0, 2), 2.0, 1e-5); BOOST_REQUIRE_CLOSE(columnWiseInput(0, 3), 2.5, 1e-5); BOOST_REQUIRE_CLOSE(columnWiseInput(1, 0), 5.0, 1e-5); BOOST_REQUIRE_CLOSE(columnWiseInput(1, 1), 6.0, 1e-5); BOOST_REQUIRE_CLOSE(columnWiseInput(1, 2), 0.0, 1e-5); BOOST_REQUIRE_CLOSE(columnWiseInput(1, 3), 6.0, 1e-5); BOOST_REQUIRE_CLOSE(columnWiseInput(2, 0), 9.0, 1e-5); BOOST_REQUIRE_CLOSE(columnWiseInput(2, 1), 8.0, 1e-5); BOOST_REQUIRE_CLOSE(columnWiseInput(2, 2), 4.0, 1e-5); BOOST_REQUIRE_CLOSE(columnWiseInput(2, 3), 8.0, 1e-5); // row wise imputer.Impute(rowWiseInput, mappedValue, 1, false); BOOST_REQUIRE_CLOSE(rowWiseInput(0, 0), 3.0, 1e-5); BOOST_REQUIRE_CLOSE(rowWiseInput(0, 1), 7.0, 1e-5); BOOST_REQUIRE_CLOSE(rowWiseInput(0, 2), 2.0, 1e-5); BOOST_REQUIRE_CLOSE(rowWiseInput(0, 3), 0.0, 1e-5); BOOST_REQUIRE_CLOSE(rowWiseInput(1, 0), 5.0, 1e-5); BOOST_REQUIRE_CLOSE(rowWiseInput(1, 1), 6.0, 1e-5); BOOST_REQUIRE_CLOSE(rowWiseInput(1, 2), 0.0, 1e-5); BOOST_REQUIRE_CLOSE(rowWiseInput(1, 3), 6.0, 1e-5); BOOST_REQUIRE_CLOSE(rowWiseInput(2, 0), 9.0, 1e-5); BOOST_REQUIRE_CLOSE(rowWiseInput(2, 1), 8.0, 1e-5); BOOST_REQUIRE_CLOSE(rowWiseInput(2, 2), 4.0, 1e-5); BOOST_REQUIRE_CLOSE(rowWiseInput(2, 3), 8.0, 1e-5); } /** * Make sure MeanImputation method replaces data 0 to median value of each * dimensions. */ BOOST_AUTO_TEST_CASE(MedianImputationTest) { arma::mat columnWiseInput("3.0 0.0 2.0 0.0;" "5.0 6.0 0.0 6.0;" "9.0 8.0 4.0 8.0;"); arma::mat rowWiseInput(columnWiseInput); double mappedValue = 0.0; MedianImputation imputer; // column wise imputer.Impute(columnWiseInput, mappedValue, 1, true); BOOST_REQUIRE_CLOSE(columnWiseInput(0, 0), 3.0, 1e-5); BOOST_REQUIRE_CLOSE(columnWiseInput(0, 1), 0.0, 1e-5); BOOST_REQUIRE_CLOSE(columnWiseInput(0, 2), 2.0, 1e-5); BOOST_REQUIRE_CLOSE(columnWiseInput(0, 3), 0.0, 1e-5); BOOST_REQUIRE_CLOSE(columnWiseInput(1, 0), 5.0, 1e-5); BOOST_REQUIRE_CLOSE(columnWiseInput(1, 1), 6.0, 1e-5); BOOST_REQUIRE_CLOSE(columnWiseInput(1, 2), 6.0, 1e-5); BOOST_REQUIRE_CLOSE(columnWiseInput(1, 3), 6.0, 1e-5); BOOST_REQUIRE_CLOSE(columnWiseInput(2, 0), 9.0, 1e-5); BOOST_REQUIRE_CLOSE(columnWiseInput(2, 1), 8.0, 1e-5); BOOST_REQUIRE_CLOSE(columnWiseInput(2, 2), 4.0, 1e-5); BOOST_REQUIRE_CLOSE(columnWiseInput(2, 3), 8.0, 1e-5); // row wise imputer.Impute(rowWiseInput, mappedValue, 1, false); BOOST_REQUIRE_CLOSE(rowWiseInput(0, 0), 3.0, 1e-5); BOOST_REQUIRE_CLOSE(rowWiseInput(0, 1), 7.0, 1e-5); BOOST_REQUIRE_CLOSE(rowWiseInput(0, 2), 2.0, 1e-5); BOOST_REQUIRE_CLOSE(rowWiseInput(0, 3), 0.0, 1e-5); BOOST_REQUIRE_CLOSE(rowWiseInput(1, 0), 5.0, 1e-5); BOOST_REQUIRE_CLOSE(rowWiseInput(1, 1), 6.0, 1e-5); BOOST_REQUIRE_CLOSE(rowWiseInput(1, 2), 0.0, 1e-5); BOOST_REQUIRE_CLOSE(rowWiseInput(1, 3), 6.0, 1e-5); BOOST_REQUIRE_CLOSE(rowWiseInput(2, 0), 9.0, 1e-5); BOOST_REQUIRE_CLOSE(rowWiseInput(2, 1), 8.0, 1e-5); BOOST_REQUIRE_CLOSE(rowWiseInput(2, 2), 4.0, 1e-5); } /** * Make sure ListwiseDeletion method deletes the whole column (if column wise) * or the row (if row wise) containing value of 0. */ BOOST_AUTO_TEST_CASE(ListwiseDeletionTest) { arma::mat columnWiseInput("3.0 0.0 2.0 0.0;" "5.0 6.0 0.0 6.0;" "9.0 8.0 4.0 8.0;"); arma::mat rowWiseInput(columnWiseInput); double mappedValue = 0.0; ListwiseDeletion imputer; // column wise imputer.Impute(columnWiseInput, mappedValue, 0, true); // column wise BOOST_REQUIRE_CLOSE(columnWiseInput(0, 0), 3.0, 1e-5); BOOST_REQUIRE_CLOSE(columnWiseInput(0, 1), 2.0, 1e-5); BOOST_REQUIRE_CLOSE(columnWiseInput(1, 0), 5.0, 1e-5); BOOST_REQUIRE_CLOSE(columnWiseInput(1, 1), 0.0, 1e-5); BOOST_REQUIRE_CLOSE(columnWiseInput(2, 0), 9.0, 1e-5); BOOST_REQUIRE_CLOSE(columnWiseInput(2, 1), 4.0, 1e-5); // row wise imputer.Impute(rowWiseInput, mappedValue, 1, false); // row wise BOOST_REQUIRE_CLOSE(rowWiseInput(0, 0), 5.0, 1e-5); BOOST_REQUIRE_CLOSE(rowWiseInput(0, 1), 6.0, 1e-5); BOOST_REQUIRE_CLOSE(rowWiseInput(0, 2), 0.0, 1e-5); BOOST_REQUIRE_CLOSE(rowWiseInput(0, 3), 6.0, 1e-5); BOOST_REQUIRE_CLOSE(rowWiseInput(1, 0), 9.0, 1e-5); BOOST_REQUIRE_CLOSE(rowWiseInput(1, 1), 8.0, 1e-5); BOOST_REQUIRE_CLOSE(rowWiseInput(1, 2), 4.0, 1e-5); BOOST_REQUIRE_CLOSE(rowWiseInput(1, 3), 8.0, 1e-5); } BOOST_AUTO_TEST_SUITE_END(); mlpack-2.2.5/src/mlpack/tests/ind2sub_test.cpp000066400000000000000000000020221315013601400212660ustar00rootroot00000000000000/** * @file ind2sub_test.cpp * @author Nilay Jain * * Test the backported Armadillo ind2sub() and sub2ind() functions. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include #include #include "test_tools.hpp" BOOST_AUTO_TEST_SUITE(ind2subTest); /** * This test checks whether ind2sub and sub2ind are * compiled successfully and that they function properly. */ BOOST_AUTO_TEST_CASE(ind2sub_test) { arma::mat A = arma::randu(4,5); size_t index = 13; arma::uvec u = arma::ind2sub(arma::size(A), index); BOOST_REQUIRE_EQUAL(u(0), index % A.n_rows); BOOST_REQUIRE_EQUAL(u(1), index / A.n_rows); index = arma::sub2ind(arma::size(A), u(0), u(1)); BOOST_REQUIRE_EQUAL(index, u(0) + u(1) * A.n_rows); } BOOST_AUTO_TEST_SUITE_END(); mlpack-2.2.5/src/mlpack/tests/kernel_pca_test.cpp000066400000000000000000000121271315013601400220320ustar00rootroot00000000000000/** * @file kernel_pca_test.cpp * @author Ryan Curtin * * Test file for Kernel PCA. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include #include #include #include #include #include "test_tools.hpp" BOOST_AUTO_TEST_SUITE(KernelPCATest); using namespace mlpack; using namespace mlpack::math; using namespace mlpack::kpca; using namespace mlpack::kernel; using namespace std; using namespace arma; /** * If KernelPCA is working right, then it should turn a circle dataset into a * linearly separable dataset in one dimension (which is easy to check). */ BOOST_AUTO_TEST_CASE(CircleTransformationTestNaive) { // The dataset, which will have three concentric rings in three dimensions. arma::mat dataset; // Now, there are 750 points centered at the origin with unit variance. dataset.randn(3, 750); dataset *= 0.05; // Take the second 250 points and spread them away from the origin. for (size_t i = 250; i < 500; ++i) { // Push the point away from the origin by 2. const double pointNorm = norm(dataset.col(i), 2); dataset(0, i) += 2.0 * (dataset(0, i) / pointNorm); dataset(1, i) += 2.0 * (dataset(1, i) / pointNorm); dataset(2, i) += 2.0 * (dataset(2, i) / pointNorm); } // Take the third 500 points and spread them away from the origin. for (size_t i = 500; i < 750; ++i) { // Push the point away from the origin by 5. const double pointNorm = norm(dataset.col(i), 2); dataset(0, i) += 5.0 * (dataset(0, i) / pointNorm); dataset(1, i) += 5.0 * (dataset(1, i) / pointNorm); dataset(2, i) += 5.0 * (dataset(2, i) / pointNorm); } // Now we have a dataset; we will use the GaussianKernel to perform KernelPCA // using the naive method to take it down to one dimension. KernelPCA p; p.Apply(dataset, 1); // Get the ranges of each "class". These are all initialized as empty ranges // containing no points. Range ranges[3]; ranges[0] = Range(); ranges[1] = Range(); ranges[2] = Range(); // Expand the ranges to hold all of the points in the class. for (size_t i = 0; i < 250; ++i) ranges[0] |= dataset(0, i); for (size_t i = 250; i < 500; ++i) ranges[1] |= dataset(0, i); for (size_t i = 500; i < 750; ++i) ranges[2] |= dataset(0, i); // None of these ranges should overlap -- the classes should be linearly // separable. BOOST_REQUIRE_EQUAL(ranges[0].Contains(ranges[1]), false); BOOST_REQUIRE_EQUAL(ranges[0].Contains(ranges[2]), false); BOOST_REQUIRE_EQUAL(ranges[1].Contains(ranges[2]), false); } /** * If KernelPCA is working right, then it should turn a circle dataset into a * linearly separable dataset in one dimension (which is easy to check). */ BOOST_AUTO_TEST_CASE(CircleTransformationTestNystroem) { // The dataset, which will have three concentric rings in three dimensions. arma::mat dataset; // Now, there are 750 points centered at the origin with unit variance. dataset.randn(3, 750); dataset *= 0.05; // Take the second 250 points and spread them away from the origin. for (size_t i = 250; i < 500; ++i) { // Push the point away from the origin by 2. const double pointNorm = norm(dataset.col(i), 2); dataset(0, i) += 2.0 * (dataset(0, i) / pointNorm); dataset(1, i) += 2.0 * (dataset(1, i) / pointNorm); dataset(2, i) += 2.0 * (dataset(2, i) / pointNorm); } // Take the third 500 points and spread them away from the origin. for (size_t i = 500; i < 750; ++i) { // Push the point away from the origin by 5. const double pointNorm = norm(dataset.col(i), 2); dataset(0, i) += 5.0 * (dataset(0, i) / pointNorm); dataset(1, i) += 5.0 * (dataset(1, i) / pointNorm); dataset(2, i) += 5.0 * (dataset(2, i) / pointNorm); } // Now we have a dataset; we will use the GaussianKernel to perform KernelPCA // using the nytroem method to take it down to one dimension. KernelPCA > p; p.Apply(dataset, 1); // Get the ranges of each "class". These are all initialized as empty ranges // containing no points. Range ranges[3]; ranges[0] = Range(); ranges[1] = Range(); ranges[2] = Range(); // Expand the ranges to hold all of the points in the class. for (size_t i = 0; i < 250; ++i) ranges[0] |= dataset(0, i); for (size_t i = 250; i < 500; ++i) ranges[1] |= dataset(0, i); for (size_t i = 500; i < 750; ++i) ranges[2] |= dataset(0, i); // None of these ranges should overlap -- the classes should be linearly // separable. BOOST_REQUIRE_EQUAL(ranges[0].Contains(ranges[1]), false); BOOST_REQUIRE_EQUAL(ranges[0].Contains(ranges[2]), false); BOOST_REQUIRE_EQUAL(ranges[1].Contains(ranges[2]), false); } BOOST_AUTO_TEST_SUITE_END(); mlpack-2.2.5/src/mlpack/tests/kernel_test.cpp000066400000000000000000000514511315013601400212120ustar00rootroot00000000000000/** * @file kernel_test.cpp * @author Ryan Curtin * @author Ajinkya Kale * * Tests for the various kernel classes. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include #include #include #include #include #include #include #include #include #include #include #include #include #include "test_tools.hpp" #include "serialization.hpp" using namespace mlpack; using namespace mlpack::kernel; using namespace mlpack::metric; BOOST_AUTO_TEST_SUITE(KernelTest); /** * Basic test of the Manhattan distance. */ BOOST_AUTO_TEST_CASE(ManhattanDistanceTest) { // A couple quick tests. arma::vec a = "1.0 3.0 4.0"; arma::vec b = "3.0 3.0 5.0"; BOOST_REQUIRE_CLOSE(ManhattanDistance::Evaluate(a, b), 3.0, 1e-5); BOOST_REQUIRE_CLOSE(ManhattanDistance::Evaluate(b, a), 3.0, 1e-5); // Check also for when the root is taken (should be the same). BOOST_REQUIRE_CLOSE((LMetric<1, true>::Evaluate(a, b)), 3.0, 1e-5); BOOST_REQUIRE_CLOSE((LMetric<1, true>::Evaluate(b, a)), 3.0, 1e-5); } /** * Basic test of squared Euclidean distance. */ BOOST_AUTO_TEST_CASE(SquaredEuclideanDistanceTest) { // Sample 2-dimensional vectors. arma::vec a = "1.0 2.0"; arma::vec b = "0.0 -2.0"; BOOST_REQUIRE_CLOSE(SquaredEuclideanDistance::Evaluate(a, b), 17.0, 1e-5); BOOST_REQUIRE_CLOSE(SquaredEuclideanDistance::Evaluate(b, a), 17.0, 1e-5); } /** * Basic test of Euclidean distance. */ BOOST_AUTO_TEST_CASE(EuclideanDistanceTest) { arma::vec a = "1.0 3.0 5.0 7.0"; arma::vec b = "4.0 0.0 2.0 0.0"; BOOST_REQUIRE_CLOSE(EuclideanDistance::Evaluate(a, b), sqrt(76.0), 1e-5); BOOST_REQUIRE_CLOSE(EuclideanDistance::Evaluate(b, a), sqrt(76.0), 1e-5); } /** * Arbitrary test case for coverage. */ BOOST_AUTO_TEST_CASE(ArbitraryCaseTest) { arma::vec a = "3.0 5.0 6.0 7.0"; arma::vec b = "1.0 2.0 1.0 0.0"; BOOST_REQUIRE_CLOSE((LMetric<3, false>::Evaluate(a, b)), 503.0, 1e-5); BOOST_REQUIRE_CLOSE((LMetric<3, false>::Evaluate(b, a)), 503.0, 1e-5); BOOST_REQUIRE_CLOSE((LMetric<3, true>::Evaluate(a, b)), 7.95284762, 1e-5); BOOST_REQUIRE_CLOSE((LMetric<3, true>::Evaluate(b, a)), 7.95284762, 1e-5); } /** * Make sure two vectors of all zeros return zero distance, for a few different * powers. */ BOOST_AUTO_TEST_CASE(LMetricZerosTest) { arma::vec a(250); a.fill(0.0); // We cannot use a loop because compilers seem to be unable to unroll the loop // and realize the variable actually is knowable at compile-time. BOOST_REQUIRE((LMetric<1, false>::Evaluate(a, a)) == 0); BOOST_REQUIRE((LMetric<1, true>::Evaluate(a, a)) == 0); BOOST_REQUIRE((LMetric<2, false>::Evaluate(a, a)) == 0); BOOST_REQUIRE((LMetric<2, true>::Evaluate(a, a)) == 0); BOOST_REQUIRE((LMetric<3, false>::Evaluate(a, a)) == 0); BOOST_REQUIRE((LMetric<3, true>::Evaluate(a, a)) == 0); BOOST_REQUIRE((LMetric<4, false>::Evaluate(a, a)) == 0); BOOST_REQUIRE((LMetric<4, true>::Evaluate(a, a)) == 0); BOOST_REQUIRE((LMetric<5, false>::Evaluate(a, a)) == 0); BOOST_REQUIRE((LMetric<5, true>::Evaluate(a, a)) == 0); } /** * Simple test of Mahalanobis distance with unset covariance matrix in * constructor. */ BOOST_AUTO_TEST_CASE(MDUnsetCovarianceTest) { MahalanobisDistance md; md.Covariance() = arma::eye(4, 4); arma::vec a = "1.0 2.0 2.0 3.0"; arma::vec b = "0.0 0.0 1.0 3.0"; BOOST_REQUIRE_CLOSE(md.Evaluate(a, b), 6.0, 1e-5); BOOST_REQUIRE_CLOSE(md.Evaluate(b, a), 6.0, 1e-5); } /** * Simple test of Mahalanobis distance with unset covariance matrix in * constructor and t_take_root set to true. */ BOOST_AUTO_TEST_CASE(MDRootUnsetCovarianceTest) { MahalanobisDistance md; md.Covariance() = arma::eye(4, 4); arma::vec a = "1.0 2.0 2.5 5.0"; arma::vec b = "0.0 2.0 0.5 8.0"; BOOST_REQUIRE_CLOSE(md.Evaluate(a, b), sqrt(14.0), 1e-5); BOOST_REQUIRE_CLOSE(md.Evaluate(b, a), sqrt(14.0), 1e-5); } /** * Simple test of Mahalanobis distance setting identity covariance in * constructor. */ BOOST_AUTO_TEST_CASE(MDEyeCovarianceTest) { MahalanobisDistance md(4); arma::vec a = "1.0 2.0 2.0 3.0"; arma::vec b = "0.0 0.0 1.0 3.0"; BOOST_REQUIRE_CLOSE(md.Evaluate(a, b), 6.0, 1e-5); BOOST_REQUIRE_CLOSE(md.Evaluate(b, a), 6.0, 1e-5); } /** * Simple test of Mahalanobis distance setting identity covariance in * constructor and t_take_root set to true. */ BOOST_AUTO_TEST_CASE(MDRootEyeCovarianceTest) { MahalanobisDistance md(4); arma::vec a = "1.0 2.0 2.5 5.0"; arma::vec b = "0.0 2.0 0.5 8.0"; BOOST_REQUIRE_CLOSE(md.Evaluate(a, b), sqrt(14.0), 1e-5); BOOST_REQUIRE_CLOSE(md.Evaluate(b, a), sqrt(14.0), 1e-5); } /** * Simple test with diagonal covariance matrix. */ BOOST_AUTO_TEST_CASE(MDDiagonalCovarianceTest) { arma::mat cov = arma::eye(5, 5); cov(0, 0) = 2.0; cov(1, 1) = 0.5; cov(2, 2) = 3.0; cov(3, 3) = 1.0; cov(4, 4) = 1.5; MahalanobisDistance md(cov); arma::vec a = "1.0 2.0 2.0 4.0 5.0"; arma::vec b = "2.0 3.0 1.0 1.0 0.0"; BOOST_REQUIRE_CLOSE(md.Evaluate(a, b), 52.0, 1e-5); BOOST_REQUIRE_CLOSE(md.Evaluate(b, a), 52.0, 1e-5); } /** * More specific case with more difficult covariance matrix. */ BOOST_AUTO_TEST_CASE(MDFullCovarianceTest) { arma::mat cov = "1.0 2.0 3.0 4.0;" "0.5 0.6 0.7 0.1;" "3.4 4.3 5.0 6.1;" "1.0 2.0 4.0 1.0;"; MahalanobisDistance md(cov); arma::vec a = "1.0 2.0 2.0 4.0"; arma::vec b = "2.0 3.0 1.0 1.0"; BOOST_REQUIRE_CLOSE(md.Evaluate(a, b), 15.7, 1e-5); BOOST_REQUIRE_CLOSE(md.Evaluate(b, a), 15.7, 1e-5); } /** * Simple test case for the cosine distance. */ BOOST_AUTO_TEST_CASE(CosineDistanceSameAngleTest) { arma::vec a = "1.0 2.0 3.0"; arma::vec b = "2.0 4.0 6.0"; BOOST_REQUIRE_CLOSE(CosineDistance::Evaluate(a, b), 1.0, 1e-5); BOOST_REQUIRE_CLOSE(CosineDistance::Evaluate(b, a), 1.0, 1e-5); } /** * Now let's have them be orthogonal. */ BOOST_AUTO_TEST_CASE(CosineDistanceOrthogonalTest) { arma::vec a = "0.0 1.0"; arma::vec b = "1.0 0.0"; BOOST_REQUIRE_SMALL(CosineDistance::Evaluate(a, b), 1e-5); BOOST_REQUIRE_SMALL(CosineDistance::Evaluate(b, a), 1e-5); } /** * Some random angle test. */ BOOST_AUTO_TEST_CASE(CosineDistanceRandomTest) { arma::vec a = "0.1 0.2 0.3 0.4 0.5"; arma::vec b = "1.2 1.0 0.8 -0.3 -0.5"; BOOST_REQUIRE_CLOSE(CosineDistance::Evaluate(a, b), 0.1385349024, 1e-5); BOOST_REQUIRE_CLOSE(CosineDistance::Evaluate(b, a), 0.1385349024, 1e-5); } /** * Linear Kernel test. */ BOOST_AUTO_TEST_CASE(LinearKernelTest) { arma::vec a = ".2 .3 .4 .1"; arma::vec b = ".56 .21 .623 .82"; LinearKernel lk; BOOST_REQUIRE_CLOSE(lk.Evaluate(a,b), .5062, 1e-5); BOOST_REQUIRE_CLOSE(lk.Evaluate(b,a), .5062, 1e-5); } /** * Linear Kernel test, orthogonal vectors. */ BOOST_AUTO_TEST_CASE(LinearKernelOrthogonalTest) { arma::vec a = "1 0 0"; arma::vec b = "0 0 1"; LinearKernel lk; BOOST_REQUIRE_SMALL(lk.Evaluate(a,b), 1e-5); BOOST_REQUIRE_SMALL(lk.Evaluate(b,a), 1e-5); } BOOST_AUTO_TEST_CASE(GaussianKernelTest) { arma::vec a = "1 0 0"; arma::vec b = "0 1 0"; arma::vec c = "0 0 1"; GaussianKernel gk(.5); BOOST_REQUIRE_CLOSE(gk.Evaluate(a, b), .018315638888734, 1e-5); BOOST_REQUIRE_CLOSE(gk.Evaluate(b, a), .018315638888734, 1e-5); BOOST_REQUIRE_CLOSE(gk.Evaluate(a, c), .018315638888734, 1e-5); BOOST_REQUIRE_CLOSE(gk.Evaluate(c, a), .018315638888734, 1e-5); BOOST_REQUIRE_CLOSE(gk.Evaluate(b, c), .018315638888734, 1e-5); BOOST_REQUIRE_CLOSE(gk.Evaluate(c, b), .018315638888734, 1e-5); /* check the single dimension evaluate function */ BOOST_REQUIRE_CLOSE(gk.Evaluate(1.0), 0.1353352832366127, 1e-5); BOOST_REQUIRE_CLOSE(gk.Evaluate(2.0), 0.00033546262790251185, 1e-5); BOOST_REQUIRE_CLOSE(gk.Evaluate(3.0), 1.5229979744712629e-08, 1e-5); /* check the normalization constant */ BOOST_REQUIRE_CLOSE(gk.Normalizer(1), 1.2533141373155001, 1e-5); BOOST_REQUIRE_CLOSE(gk.Normalizer(2), 1.5707963267948963, 1e-5); BOOST_REQUIRE_CLOSE(gk.Normalizer(3), 1.9687012432153019, 1e-5); BOOST_REQUIRE_CLOSE(gk.Normalizer(4), 2.4674011002723386, 1e-5); /* check the convolution integral */ BOOST_REQUIRE_CLOSE(gk.ConvolutionIntegral(a,b), 0.024304474038457577, 1e-5); BOOST_REQUIRE_CLOSE(gk.ConvolutionIntegral(a,c), 0.024304474038457577, 1e-5); BOOST_REQUIRE_CLOSE(gk.ConvolutionIntegral(b,c), 0.024304474038457577, 1e-5); } BOOST_AUTO_TEST_CASE(GaussianKernelSerializationTest) { GaussianKernel gk(0.5); GaussianKernel xmlGk(1.5), textGk, binaryGk(15.0); // Serialize the kernels. SerializeObjectAll(gk, xmlGk, textGk, binaryGk); BOOST_REQUIRE_CLOSE(gk.Bandwidth(), 0.5, 1e-5); BOOST_REQUIRE_CLOSE(xmlGk.Bandwidth(), 0.5, 1e-5); BOOST_REQUIRE_CLOSE(textGk.Bandwidth(), 0.5, 1e-5); BOOST_REQUIRE_CLOSE(binaryGk.Bandwidth(), 0.5, 1e-5); } BOOST_AUTO_TEST_CASE(SphericalKernelTest) { arma::vec a = "1.0 0.0"; arma::vec b = "0.0 1.0"; arma::vec c = "0.2 0.9"; SphericalKernel sk(.5); BOOST_REQUIRE_CLOSE(sk.Evaluate(a, b), 0.0, 1e-5); BOOST_REQUIRE_CLOSE(sk.Evaluate(a, c), 0.0, 1e-5); BOOST_REQUIRE_CLOSE(sk.Evaluate(b, c), 1.0, 1e-5); /* check the single dimension evaluate function */ BOOST_REQUIRE_CLOSE(sk.Evaluate(0.10), 1.0, 1e-5); BOOST_REQUIRE_CLOSE(sk.Evaluate(0.25), 1.0, 1e-5); BOOST_REQUIRE_CLOSE(sk.Evaluate(0.50), 1.0, 1e-5); BOOST_REQUIRE_CLOSE(sk.Evaluate(1.00), 0.0, 1e-5); /* check the normalization constant */ BOOST_REQUIRE_CLOSE(sk.Normalizer(1), 1.0, 1e-5); BOOST_REQUIRE_CLOSE(sk.Normalizer(2), 0.78539816339744828, 1e-5); BOOST_REQUIRE_CLOSE(sk.Normalizer(3), 0.52359877559829893, 1e-5); BOOST_REQUIRE_CLOSE(sk.Normalizer(4), 0.30842513753404244, 1e-5); /* check the convolution integral */ BOOST_REQUIRE_CLOSE(sk.ConvolutionIntegral(a,b), 0.0, 1e-5); BOOST_REQUIRE_CLOSE(sk.ConvolutionIntegral(a,c), 0.0, 1e-5); BOOST_REQUIRE_CLOSE(sk.ConvolutionIntegral(b,c), 1.0021155029652784, 1e-5); } BOOST_AUTO_TEST_CASE(EpanechnikovKernelTest) { arma::vec a = "1.0 0.0"; arma::vec b = "0.0 1.0"; arma::vec c = "0.1 0.9"; EpanechnikovKernel ek(.5); BOOST_REQUIRE_CLOSE(ek.Evaluate(a, b), 0.0, 1e-5); BOOST_REQUIRE_CLOSE(ek.Evaluate(b, c), 0.92, 1e-5); BOOST_REQUIRE_CLOSE(ek.Evaluate(a, c), 0.0, 1e-5); /* check the single dimension evaluate function */ BOOST_REQUIRE_CLOSE(ek.Evaluate(0.10), 0.96, 1e-5); BOOST_REQUIRE_CLOSE(ek.Evaluate(0.25), 0.75, 1e-5); BOOST_REQUIRE_CLOSE(ek.Evaluate(0.50), 0.0, 1e-5); BOOST_REQUIRE_CLOSE(ek.Evaluate(1.00), 0.0, 1e-5); /* check the normalization constant */ BOOST_REQUIRE_CLOSE(ek.Normalizer(1), 0.666666666666666, 1e-5); BOOST_REQUIRE_CLOSE(ek.Normalizer(2), 0.39269908169872414, 1e-5); BOOST_REQUIRE_CLOSE(ek.Normalizer(3), 0.20943951023931956, 1e-5); BOOST_REQUIRE_CLOSE(ek.Normalizer(4), 0.10280837917801415, 1e-5); /* check the convolution integral */ BOOST_REQUIRE_CLOSE(ek.ConvolutionIntegral(a,b), 0.0, 1e-5); BOOST_REQUIRE_CLOSE(ek.ConvolutionIntegral(a,c), 0.0, 1e-5); BOOST_REQUIRE_CLOSE(ek.ConvolutionIntegral(b,c), 1.5263455690698258, 1e-5); } BOOST_AUTO_TEST_CASE(PolynomialKernelTest) { arma::vec a = "0 0 1"; arma::vec b = "0 1 0"; PolynomialKernel pk(5.0, 5.0); BOOST_REQUIRE_CLOSE(pk.Evaluate(a, b), 3125.0, 0); BOOST_REQUIRE_CLOSE(pk.Evaluate(b, a), 3125.0, 0); } BOOST_AUTO_TEST_CASE(HyperbolicTangentKernelTest) { arma::vec a = "0 0 1"; arma::vec b = "0 1 0"; HyperbolicTangentKernel tk(5.0, 5.0); BOOST_REQUIRE_CLOSE(tk.Evaluate(a, b), 0.9999092, 1e-5); BOOST_REQUIRE_CLOSE(tk.Evaluate(b, a), 0.9999092, 1e-5); } BOOST_AUTO_TEST_CASE(LaplacianKernelTest) { arma::vec a = "0 0 1"; arma::vec b = "0 1 0"; LaplacianKernel lk(1.0); BOOST_REQUIRE_CLOSE(lk.Evaluate(a, b), 0.243116734, 5e-5); BOOST_REQUIRE_CLOSE(lk.Evaluate(b, a), 0.243116734, 5e-5); } // Ensure that the p-spectrum kernel successfully extracts all length-p // substrings from the data. BOOST_AUTO_TEST_CASE(PSpectrumSubstringExtractionTest) { std::vector > datasets; datasets.push_back(std::vector()); datasets[0].push_back("herpgle"); datasets[0].push_back("herpagkle"); datasets[0].push_back("klunktor"); datasets[0].push_back("flibbynopple"); datasets.push_back(std::vector()); datasets[1].push_back("floggy3245"); datasets[1].push_back("flippydopflip"); datasets[1].push_back("stupid fricking cat"); datasets[1].push_back("food time isn't until later"); datasets[1].push_back("leave me alone until 6:00"); datasets[1].push_back("only after that do you get any food."); datasets[1].push_back("obloblobloblobloblobloblob"); PSpectrumStringKernel p(datasets, 3); // Ensure the sizes are correct. BOOST_REQUIRE_EQUAL(p.Counts().size(), 2); BOOST_REQUIRE_EQUAL(p.Counts()[0].size(), 4); BOOST_REQUIRE_EQUAL(p.Counts()[1].size(), 7); // herpgle: her, erp, rpg, pgl, gle BOOST_REQUIRE_EQUAL(p.Counts()[0][0].size(), 5); BOOST_REQUIRE_EQUAL(p.Counts()[0][0]["her"], 1); BOOST_REQUIRE_EQUAL(p.Counts()[0][0]["erp"], 1); BOOST_REQUIRE_EQUAL(p.Counts()[0][0]["rpg"], 1); BOOST_REQUIRE_EQUAL(p.Counts()[0][0]["pgl"], 1); BOOST_REQUIRE_EQUAL(p.Counts()[0][0]["gle"], 1); // herpagkle: her, erp, rpa, pag, agk, gkl, kle BOOST_REQUIRE_EQUAL(p.Counts()[0][1].size(), 7); BOOST_REQUIRE_EQUAL(p.Counts()[0][1]["her"], 1); BOOST_REQUIRE_EQUAL(p.Counts()[0][1]["erp"], 1); BOOST_REQUIRE_EQUAL(p.Counts()[0][1]["rpa"], 1); BOOST_REQUIRE_EQUAL(p.Counts()[0][1]["pag"], 1); BOOST_REQUIRE_EQUAL(p.Counts()[0][1]["agk"], 1); BOOST_REQUIRE_EQUAL(p.Counts()[0][1]["gkl"], 1); BOOST_REQUIRE_EQUAL(p.Counts()[0][1]["kle"], 1); // klunktor: klu, lun, unk, nkt, kto, tor BOOST_REQUIRE_EQUAL(p.Counts()[0][2].size(), 6); BOOST_REQUIRE_EQUAL(p.Counts()[0][2]["klu"], 1); BOOST_REQUIRE_EQUAL(p.Counts()[0][2]["lun"], 1); BOOST_REQUIRE_EQUAL(p.Counts()[0][2]["unk"], 1); BOOST_REQUIRE_EQUAL(p.Counts()[0][2]["nkt"], 1); BOOST_REQUIRE_EQUAL(p.Counts()[0][2]["kto"], 1); BOOST_REQUIRE_EQUAL(p.Counts()[0][2]["tor"], 1); // flibbynopple: fli lib ibb bby byn yno nop opp ppl ple BOOST_REQUIRE_EQUAL(p.Counts()[0][3].size(), 10); BOOST_REQUIRE_EQUAL(p.Counts()[0][3]["fli"], 1); BOOST_REQUIRE_EQUAL(p.Counts()[0][3]["lib"], 1); BOOST_REQUIRE_EQUAL(p.Counts()[0][3]["ibb"], 1); BOOST_REQUIRE_EQUAL(p.Counts()[0][3]["bby"], 1); BOOST_REQUIRE_EQUAL(p.Counts()[0][3]["byn"], 1); BOOST_REQUIRE_EQUAL(p.Counts()[0][3]["yno"], 1); BOOST_REQUIRE_EQUAL(p.Counts()[0][3]["nop"], 1); BOOST_REQUIRE_EQUAL(p.Counts()[0][3]["opp"], 1); BOOST_REQUIRE_EQUAL(p.Counts()[0][3]["ppl"], 1); BOOST_REQUIRE_EQUAL(p.Counts()[0][3]["ple"], 1); // floggy3245: flo log ogg ggy gy3 y32 324 245 BOOST_REQUIRE_EQUAL(p.Counts()[1][0].size(), 8); BOOST_REQUIRE_EQUAL(p.Counts()[1][0]["flo"], 1); BOOST_REQUIRE_EQUAL(p.Counts()[1][0]["log"], 1); BOOST_REQUIRE_EQUAL(p.Counts()[1][0]["ogg"], 1); BOOST_REQUIRE_EQUAL(p.Counts()[1][0]["ggy"], 1); BOOST_REQUIRE_EQUAL(p.Counts()[1][0]["gy3"], 1); BOOST_REQUIRE_EQUAL(p.Counts()[1][0]["y32"], 1); BOOST_REQUIRE_EQUAL(p.Counts()[1][0]["324"], 1); BOOST_REQUIRE_EQUAL(p.Counts()[1][0]["245"], 1); // flippydopflip: fli lip ipp ppy pyd ydo dop opf pfl fli lip // fli(2) lip(2) ipp ppy pyd ydo dop opf pfl BOOST_REQUIRE_EQUAL(p.Counts()[1][1].size(), 9); BOOST_REQUIRE_EQUAL(p.Counts()[1][1]["fli"], 2); BOOST_REQUIRE_EQUAL(p.Counts()[1][1]["lip"], 2); BOOST_REQUIRE_EQUAL(p.Counts()[1][1]["ipp"], 1); BOOST_REQUIRE_EQUAL(p.Counts()[1][1]["ppy"], 1); BOOST_REQUIRE_EQUAL(p.Counts()[1][1]["pyd"], 1); BOOST_REQUIRE_EQUAL(p.Counts()[1][1]["ydo"], 1); BOOST_REQUIRE_EQUAL(p.Counts()[1][1]["dop"], 1); BOOST_REQUIRE_EQUAL(p.Counts()[1][1]["opf"], 1); BOOST_REQUIRE_EQUAL(p.Counts()[1][1]["pfl"], 1); // stupid fricking cat: stu tup upi pid fri ric ick cki kin ing cat BOOST_REQUIRE_EQUAL(p.Counts()[1][2].size(), 11); BOOST_REQUIRE_EQUAL(p.Counts()[1][2]["stu"], 1); BOOST_REQUIRE_EQUAL(p.Counts()[1][2]["tup"], 1); BOOST_REQUIRE_EQUAL(p.Counts()[1][2]["upi"], 1); BOOST_REQUIRE_EQUAL(p.Counts()[1][2]["pid"], 1); BOOST_REQUIRE_EQUAL(p.Counts()[1][2]["fri"], 1); BOOST_REQUIRE_EQUAL(p.Counts()[1][2]["ric"], 1); BOOST_REQUIRE_EQUAL(p.Counts()[1][2]["ick"], 1); BOOST_REQUIRE_EQUAL(p.Counts()[1][2]["cki"], 1); BOOST_REQUIRE_EQUAL(p.Counts()[1][2]["kin"], 1); BOOST_REQUIRE_EQUAL(p.Counts()[1][2]["ing"], 1); BOOST_REQUIRE_EQUAL(p.Counts()[1][2]["cat"], 1); // food time isn't until later: foo ood tim ime isn unt nti til lat ate ter BOOST_REQUIRE_EQUAL(p.Counts()[1][3].size(), 11); BOOST_REQUIRE_EQUAL(p.Counts()[1][3]["foo"], 1); BOOST_REQUIRE_EQUAL(p.Counts()[1][3]["ood"], 1); BOOST_REQUIRE_EQUAL(p.Counts()[1][3]["tim"], 1); BOOST_REQUIRE_EQUAL(p.Counts()[1][3]["ime"], 1); BOOST_REQUIRE_EQUAL(p.Counts()[1][3]["isn"], 1); BOOST_REQUIRE_EQUAL(p.Counts()[1][3]["unt"], 1); BOOST_REQUIRE_EQUAL(p.Counts()[1][3]["nti"], 1); BOOST_REQUIRE_EQUAL(p.Counts()[1][3]["til"], 1); BOOST_REQUIRE_EQUAL(p.Counts()[1][3]["lat"], 1); BOOST_REQUIRE_EQUAL(p.Counts()[1][3]["ate"], 1); BOOST_REQUIRE_EQUAL(p.Counts()[1][3]["ter"], 1); // leave me alone until 6:00: lea eav ave alo lon one unt nti til BOOST_REQUIRE_EQUAL(p.Counts()[1][4].size(), 9); BOOST_REQUIRE_EQUAL(p.Counts()[1][4]["lea"], 1); BOOST_REQUIRE_EQUAL(p.Counts()[1][4]["eav"], 1); BOOST_REQUIRE_EQUAL(p.Counts()[1][4]["ave"], 1); BOOST_REQUIRE_EQUAL(p.Counts()[1][4]["alo"], 1); BOOST_REQUIRE_EQUAL(p.Counts()[1][4]["lon"], 1); BOOST_REQUIRE_EQUAL(p.Counts()[1][4]["one"], 1); BOOST_REQUIRE_EQUAL(p.Counts()[1][4]["unt"], 1); BOOST_REQUIRE_EQUAL(p.Counts()[1][4]["nti"], 1); BOOST_REQUIRE_EQUAL(p.Counts()[1][4]["til"], 1); // only after that do you get any food.: // onl nly aft fte ter tha hat you get any foo ood BOOST_REQUIRE_EQUAL(p.Counts()[1][5].size(), 12); BOOST_REQUIRE_EQUAL(p.Counts()[1][5]["onl"], 1); BOOST_REQUIRE_EQUAL(p.Counts()[1][5]["nly"], 1); BOOST_REQUIRE_EQUAL(p.Counts()[1][5]["aft"], 1); BOOST_REQUIRE_EQUAL(p.Counts()[1][5]["fte"], 1); BOOST_REQUIRE_EQUAL(p.Counts()[1][5]["ter"], 1); BOOST_REQUIRE_EQUAL(p.Counts()[1][5]["tha"], 1); BOOST_REQUIRE_EQUAL(p.Counts()[1][5]["hat"], 1); BOOST_REQUIRE_EQUAL(p.Counts()[1][5]["you"], 1); BOOST_REQUIRE_EQUAL(p.Counts()[1][5]["get"], 1); BOOST_REQUIRE_EQUAL(p.Counts()[1][5]["any"], 1); BOOST_REQUIRE_EQUAL(p.Counts()[1][5]["foo"], 1); BOOST_REQUIRE_EQUAL(p.Counts()[1][5]["ood"], 1); // obloblobloblobloblobloblob: obl(8) blo(8) lob(8) BOOST_REQUIRE_EQUAL(p.Counts()[1][6].size(), 3); BOOST_REQUIRE_EQUAL(p.Counts()[1][6]["obl"], 8); BOOST_REQUIRE_EQUAL(p.Counts()[1][6]["blo"], 8); BOOST_REQUIRE_EQUAL(p.Counts()[1][6]["lob"], 8); } BOOST_AUTO_TEST_CASE(PSpectrumStringEvaluateTest) { // Construct simple dataset. std::vector > dataset; dataset.push_back(std::vector()); dataset[0].push_back("hello"); dataset[0].push_back("jello"); dataset[0].push_back("mellow"); dataset[0].push_back("mellow jello"); PSpectrumStringKernel p(dataset, 3); arma::vec a("0 0"); arma::vec b("0 0"); BOOST_REQUIRE_CLOSE(p.Evaluate(a, b), 3.0, 1e-5); BOOST_REQUIRE_CLOSE(p.Evaluate(b, a), 3.0, 1e-5); b = "0 1"; BOOST_REQUIRE_CLOSE(p.Evaluate(a, b), 2.0, 1e-5); BOOST_REQUIRE_CLOSE(p.Evaluate(b, a), 2.0, 1e-5); b = "0 2"; BOOST_REQUIRE_CLOSE(p.Evaluate(a, b), 2.0, 1e-5); BOOST_REQUIRE_CLOSE(p.Evaluate(b, a), 2.0, 1e-5); b = "0 3"; BOOST_REQUIRE_CLOSE(p.Evaluate(a, b), 4.0, 1e-5); BOOST_REQUIRE_CLOSE(p.Evaluate(b, a), 4.0, 1e-5); a = "0 1"; b = "0 1"; BOOST_REQUIRE_CLOSE(p.Evaluate(a, b), 3.0, 1e-5); BOOST_REQUIRE_CLOSE(p.Evaluate(b, a), 3.0, 1e-5); b = "0 2"; BOOST_REQUIRE_CLOSE(p.Evaluate(a, b), 2.0, 1e-5); BOOST_REQUIRE_CLOSE(p.Evaluate(b, a), 2.0, 1e-5); b = "0 3"; BOOST_REQUIRE_CLOSE(p.Evaluate(a, b), 5.0, 1e-5); BOOST_REQUIRE_CLOSE(p.Evaluate(b, a), 5.0, 1e-5); a = "0 2"; b = "0 2"; BOOST_REQUIRE_CLOSE(p.Evaluate(a, b), 4.0, 1e-5); BOOST_REQUIRE_CLOSE(p.Evaluate(b, a), 4.0, 1e-5); b = "0 3"; BOOST_REQUIRE_CLOSE(p.Evaluate(a, b), 6.0, 1e-5); BOOST_REQUIRE_CLOSE(p.Evaluate(b, a), 6.0, 1e-5); a = "0 3"; BOOST_REQUIRE_CLOSE(p.Evaluate(a, b), 11.0, 1e-5); BOOST_REQUIRE_CLOSE(p.Evaluate(b, a), 11.0, 1e-5); } BOOST_AUTO_TEST_SUITE_END(); mlpack-2.2.5/src/mlpack/tests/kernel_traits_test.cpp000066400000000000000000000051501315013601400225730ustar00rootroot00000000000000/** * @file kernel_traits_test.cpp * @author Ryan Curtin * * Test the KernelTraits class. Because all of the values are known at compile * time, this test is meant to ensure that uses of KernelTraits still compile * okay and react as expected. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include #include #include "test_tools.hpp" using namespace mlpack; using namespace mlpack::kernel; BOOST_AUTO_TEST_SUITE(KernelTraitsTest); BOOST_AUTO_TEST_CASE(IsNormalizedTest) { // Reason number ten billion why macros are bad: // // The Boost unit test framework is built on macros. When I write // BOOST_REQUIRE_EQUAL(KernelTraits::IsNormalized, false), what actually // happens (in gcc at least) is that the 'false' gets implicitly converted to // an int; then, the compiler goes looking for an int IsNormalized variable in // KernelTraits. But this doesn't exist, so we get this error at linker time: // // kernel_traits_test.cpp:(.text+0xb86): undefined reference to // `mlpack::kernel::KernelTraits::IsNormalized' // // and this actually tells us nothing about the error. When you dig deep // enough or get frustrated enough, you end up realizing it's a macro problem // and now have to *explicitly* cast to bool. Yes, really; try it if you // don't believe me. // Test each kernel individually. // If the type is not a valid kernel, it should be false (default value). BOOST_REQUIRE_EQUAL((bool) KernelTraits::IsNormalized, false); // Normalized kernels. BOOST_REQUIRE_EQUAL((bool) KernelTraits::IsNormalized, true); BOOST_REQUIRE_EQUAL((bool) KernelTraits::IsNormalized, true); BOOST_REQUIRE_EQUAL((bool) KernelTraits::IsNormalized, true); BOOST_REQUIRE_EQUAL((bool) KernelTraits::IsNormalized, true); BOOST_REQUIRE_EQUAL((bool) KernelTraits::IsNormalized, true); BOOST_REQUIRE_EQUAL((bool) KernelTraits::IsNormalized, true); // Unnormalized kernels. BOOST_REQUIRE_EQUAL((bool) KernelTraits::IsNormalized, false); BOOST_REQUIRE_EQUAL((bool) KernelTraits::IsNormalized, false); BOOST_REQUIRE_EQUAL((bool) KernelTraits::IsNormalized, false); } BOOST_AUTO_TEST_SUITE_END(); mlpack-2.2.5/src/mlpack/tests/kfn_test.cpp000066400000000000000000000603661315013601400205150ustar00rootroot00000000000000/** * @file kfn_test.cpp * * Tests for KFN (k-furthest-neighbors). * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include #include #include #include #include "test_tools.hpp" using namespace mlpack; using namespace mlpack::neighbor; using namespace mlpack::tree; using namespace mlpack::metric; using namespace mlpack::bound; BOOST_AUTO_TEST_SUITE(KFNTest); /** * Simple furthest-neighbors test with small, synthetic dataset. This is an * exhaustive test, which checks that each method for performing the calculation * (dual-tree, single-tree, naive) produces the correct results. An * eleven-point dataset and the ten furthest neighbors are taken. The dataset * is in one dimension for simplicity -- the correct functionality of distance * functions is not tested here. */ BOOST_AUTO_TEST_CASE(ExhaustiveSyntheticTest) { // Set up our data. arma::mat data(1, 11); data[0] = 0.05; // Row addressing is unnecessary (they are all 0). data[1] = 0.35; data[2] = 0.15; data[3] = 1.25; data[4] = 5.05; data[5] = -0.22; data[6] = -2.00; data[7] = -1.30; data[8] = 0.45; data[9] = 0.90; data[10] = 1.00; typedef BinarySpaceTree, arma::mat> TreeType; // We will loop through three times, one for each method of performing the // calculation. We'll always use 10 neighbors, so set that parameter. std::vector oldFromNew; std::vector newFromOld; TreeType tree(data, oldFromNew, newFromOld, 1); KFN kfn(std::move(tree)); for (int i = 0; i < 3; i++) { switch (i) { case 0: // Use the dual-tree method. kfn.Naive() = false; kfn.SingleMode() = false; break; case 1: // Use the single-tree method. kfn.Naive() = false; kfn.SingleMode() = true; break; case 2: // Use the naive method. kfn.Naive() = true; break; } // Now perform the actual calculation. arma::Mat neighbors; arma::mat distances; kfn.Search(10, neighbors, distances); // Now the exhaustive check for correctness. This will be long. We must // also remember that the distances returned are squared distances. As a // result, distance comparisons are written out as (distance * distance) for // readability. // Neighbors of point 0. BOOST_REQUIRE_EQUAL(neighbors(9, newFromOld[0]), newFromOld[2]); BOOST_REQUIRE_CLOSE(distances(9, newFromOld[0]), 0.10, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(8, newFromOld[0]), newFromOld[5]); BOOST_REQUIRE_CLOSE(distances(8, newFromOld[0]), 0.27, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(7, newFromOld[0]), newFromOld[1]); BOOST_REQUIRE_CLOSE(distances(7, newFromOld[0]), 0.30, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(6, newFromOld[0]), newFromOld[8]); BOOST_REQUIRE_CLOSE(distances(6, newFromOld[0]), 0.40, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(5, newFromOld[0]), newFromOld[9]); BOOST_REQUIRE_CLOSE(distances(5, newFromOld[0]), 0.85, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(4, newFromOld[0]), newFromOld[10]); BOOST_REQUIRE_CLOSE(distances(4, newFromOld[0]), 0.95, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(3, newFromOld[0]), newFromOld[3]); BOOST_REQUIRE_CLOSE(distances(3, newFromOld[0]), 1.20, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(2, newFromOld[0]), newFromOld[7]); BOOST_REQUIRE_CLOSE(distances(2, newFromOld[0]), 1.35, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(1, newFromOld[0]), newFromOld[6]); BOOST_REQUIRE_CLOSE(distances(1, newFromOld[0]), 2.05, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(0, newFromOld[0]), newFromOld[4]); BOOST_REQUIRE_CLOSE(distances(0, newFromOld[0]), 5.00, 1e-5); // Neighbors of point 1. BOOST_REQUIRE_EQUAL(neighbors(9, newFromOld[1]), newFromOld[8]); BOOST_REQUIRE_CLOSE(distances(9, newFromOld[1]), 0.10, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(8, newFromOld[1]), newFromOld[2]); BOOST_REQUIRE_CLOSE(distances(8, newFromOld[1]), 0.20, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(7, newFromOld[1]), newFromOld[0]); BOOST_REQUIRE_CLOSE(distances(7, newFromOld[1]), 0.30, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(6, newFromOld[1]), newFromOld[9]); BOOST_REQUIRE_CLOSE(distances(6, newFromOld[1]), 0.55, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(5, newFromOld[1]), newFromOld[5]); BOOST_REQUIRE_CLOSE(distances(5, newFromOld[1]), 0.57, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(4, newFromOld[1]), newFromOld[10]); BOOST_REQUIRE_CLOSE(distances(4, newFromOld[1]), 0.65, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(3, newFromOld[1]), newFromOld[3]); BOOST_REQUIRE_CLOSE(distances(3, newFromOld[1]), 0.90, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(2, newFromOld[1]), newFromOld[7]); BOOST_REQUIRE_CLOSE(distances(2, newFromOld[1]), 1.65, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(1, newFromOld[1]), newFromOld[6]); BOOST_REQUIRE_CLOSE(distances(1, newFromOld[1]), 2.35, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(0, newFromOld[1]), newFromOld[4]); BOOST_REQUIRE_CLOSE(distances(0, newFromOld[1]), 4.70, 1e-5); // Neighbors of point 2. BOOST_REQUIRE_EQUAL(neighbors(9, newFromOld[2]), newFromOld[0]); BOOST_REQUIRE_CLOSE(distances(9, newFromOld[2]), 0.10, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(8, newFromOld[2]), newFromOld[1]); BOOST_REQUIRE_CLOSE(distances(8, newFromOld[2]), 0.20, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(7, newFromOld[2]), newFromOld[8]); BOOST_REQUIRE_CLOSE(distances(7, newFromOld[2]), 0.30, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(6, newFromOld[2]), newFromOld[5]); BOOST_REQUIRE_CLOSE(distances(6, newFromOld[2]), 0.37, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(5, newFromOld[2]), newFromOld[9]); BOOST_REQUIRE_CLOSE(distances(5, newFromOld[2]), 0.75, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(4, newFromOld[2]), newFromOld[10]); BOOST_REQUIRE_CLOSE(distances(4, newFromOld[2]), 0.85, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(3, newFromOld[2]), newFromOld[3]); BOOST_REQUIRE_CLOSE(distances(3, newFromOld[2]), 1.10, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(2, newFromOld[2]), newFromOld[7]); BOOST_REQUIRE_CLOSE(distances(2, newFromOld[2]), 1.45, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(1, newFromOld[2]), newFromOld[6]); BOOST_REQUIRE_CLOSE(distances(1, newFromOld[2]), 2.15, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(0, newFromOld[2]), newFromOld[4]); BOOST_REQUIRE_CLOSE(distances(0, newFromOld[2]), 4.90, 1e-5); // Neighbors of point 3. BOOST_REQUIRE_EQUAL(neighbors(9, newFromOld[3]), newFromOld[10]); BOOST_REQUIRE_CLOSE(distances(9, newFromOld[3]), 0.25, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(8, newFromOld[3]), newFromOld[9]); BOOST_REQUIRE_CLOSE(distances(8, newFromOld[3]), 0.35, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(7, newFromOld[3]), newFromOld[8]); BOOST_REQUIRE_CLOSE(distances(7, newFromOld[3]), 0.80, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(6, newFromOld[3]), newFromOld[1]); BOOST_REQUIRE_CLOSE(distances(6, newFromOld[3]), 0.90, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(5, newFromOld[3]), newFromOld[2]); BOOST_REQUIRE_CLOSE(distances(5, newFromOld[3]), 1.10, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(4, newFromOld[3]), newFromOld[0]); BOOST_REQUIRE_CLOSE(distances(4, newFromOld[3]), 1.20, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(3, newFromOld[3]), newFromOld[5]); BOOST_REQUIRE_CLOSE(distances(3, newFromOld[3]), 1.47, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(2, newFromOld[3]), newFromOld[7]); BOOST_REQUIRE_CLOSE(distances(2, newFromOld[3]), 2.55, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(1, newFromOld[3]), newFromOld[6]); BOOST_REQUIRE_CLOSE(distances(1, newFromOld[3]), 3.25, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(0, newFromOld[3]), newFromOld[4]); BOOST_REQUIRE_CLOSE(distances(0, newFromOld[3]), 3.80, 1e-5); // Neighbors of point 4. BOOST_REQUIRE_EQUAL(neighbors(9, newFromOld[4]), newFromOld[3]); BOOST_REQUIRE_CLOSE(distances(9, newFromOld[4]), 3.80, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(8, newFromOld[4]), newFromOld[10]); BOOST_REQUIRE_CLOSE(distances(8, newFromOld[4]), 4.05, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(7, newFromOld[4]), newFromOld[9]); BOOST_REQUIRE_CLOSE(distances(7, newFromOld[4]), 4.15, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(6, newFromOld[4]), newFromOld[8]); BOOST_REQUIRE_CLOSE(distances(6, newFromOld[4]), 4.60, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(5, newFromOld[4]), newFromOld[1]); BOOST_REQUIRE_CLOSE(distances(5, newFromOld[4]), 4.70, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(4, newFromOld[4]), newFromOld[2]); BOOST_REQUIRE_CLOSE(distances(4, newFromOld[4]), 4.90, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(3, newFromOld[4]), newFromOld[0]); BOOST_REQUIRE_CLOSE(distances(3, newFromOld[4]), 5.00, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(2, newFromOld[4]), newFromOld[5]); BOOST_REQUIRE_CLOSE(distances(2, newFromOld[4]), 5.27, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(1, newFromOld[4]), newFromOld[7]); BOOST_REQUIRE_CLOSE(distances(1, newFromOld[4]), 6.35, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(0, newFromOld[4]), newFromOld[6]); BOOST_REQUIRE_CLOSE(distances(0, newFromOld[4]), 7.05, 1e-5); // Neighbors of point 5. BOOST_REQUIRE_EQUAL(neighbors(9, newFromOld[5]), newFromOld[0]); BOOST_REQUIRE_CLOSE(distances(9, newFromOld[5]), 0.27, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(8, newFromOld[5]), newFromOld[2]); BOOST_REQUIRE_CLOSE(distances(8, newFromOld[5]), 0.37, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(7, newFromOld[5]), newFromOld[1]); BOOST_REQUIRE_CLOSE(distances(7, newFromOld[5]), 0.57, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(6, newFromOld[5]), newFromOld[8]); BOOST_REQUIRE_CLOSE(distances(6, newFromOld[5]), 0.67, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(5, newFromOld[5]), newFromOld[7]); BOOST_REQUIRE_CLOSE(distances(5, newFromOld[5]), 1.08, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(4, newFromOld[5]), newFromOld[9]); BOOST_REQUIRE_CLOSE(distances(4, newFromOld[5]), 1.12, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(3, newFromOld[5]), newFromOld[10]); BOOST_REQUIRE_CLOSE(distances(3, newFromOld[5]), 1.22, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(2, newFromOld[5]), newFromOld[3]); BOOST_REQUIRE_CLOSE(distances(2, newFromOld[5]), 1.47, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(1, newFromOld[5]), newFromOld[6]); BOOST_REQUIRE_CLOSE(distances(1, newFromOld[5]), 1.78, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(0, newFromOld[5]), newFromOld[4]); BOOST_REQUIRE_CLOSE(distances(0, newFromOld[5]), 5.27, 1e-5); // Neighbors of point 6. BOOST_REQUIRE_EQUAL(neighbors(9, newFromOld[6]), newFromOld[7]); BOOST_REQUIRE_CLOSE(distances(9, newFromOld[6]), 0.70, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(8, newFromOld[6]), newFromOld[5]); BOOST_REQUIRE_CLOSE(distances(8, newFromOld[6]), 1.78, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(7, newFromOld[6]), newFromOld[0]); BOOST_REQUIRE_CLOSE(distances(7, newFromOld[6]), 2.05, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(6, newFromOld[6]), newFromOld[2]); BOOST_REQUIRE_CLOSE(distances(6, newFromOld[6]), 2.15, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(5, newFromOld[6]), newFromOld[1]); BOOST_REQUIRE_CLOSE(distances(5, newFromOld[6]), 2.35, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(4, newFromOld[6]), newFromOld[8]); BOOST_REQUIRE_CLOSE(distances(4, newFromOld[6]), 2.45, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(3, newFromOld[6]), newFromOld[9]); BOOST_REQUIRE_CLOSE(distances(3, newFromOld[6]), 2.90, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(2, newFromOld[6]), newFromOld[10]); BOOST_REQUIRE_CLOSE(distances(2, newFromOld[6]), 3.00, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(1, newFromOld[6]), newFromOld[3]); BOOST_REQUIRE_CLOSE(distances(1, newFromOld[6]), 3.25, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(0, newFromOld[6]), newFromOld[4]); BOOST_REQUIRE_CLOSE(distances(0, newFromOld[6]), 7.05, 1e-5); // Neighbors of point 7. BOOST_REQUIRE_EQUAL(neighbors(9, newFromOld[7]), newFromOld[6]); BOOST_REQUIRE_CLOSE(distances(9, newFromOld[7]), 0.70, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(8, newFromOld[7]), newFromOld[5]); BOOST_REQUIRE_CLOSE(distances(8, newFromOld[7]), 1.08, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(7, newFromOld[7]), newFromOld[0]); BOOST_REQUIRE_CLOSE(distances(7, newFromOld[7]), 1.35, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(6, newFromOld[7]), newFromOld[2]); BOOST_REQUIRE_CLOSE(distances(6, newFromOld[7]), 1.45, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(5, newFromOld[7]), newFromOld[1]); BOOST_REQUIRE_CLOSE(distances(5, newFromOld[7]), 1.65, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(4, newFromOld[7]), newFromOld[8]); BOOST_REQUIRE_CLOSE(distances(4, newFromOld[7]), 1.75, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(3, newFromOld[7]), newFromOld[9]); BOOST_REQUIRE_CLOSE(distances(3, newFromOld[7]), 2.20, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(2, newFromOld[7]), newFromOld[10]); BOOST_REQUIRE_CLOSE(distances(2, newFromOld[7]), 2.30, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(1, newFromOld[7]), newFromOld[3]); BOOST_REQUIRE_CLOSE(distances(1, newFromOld[7]), 2.55, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(0, newFromOld[7]), newFromOld[4]); BOOST_REQUIRE_CLOSE(distances(0, newFromOld[7]), 6.35, 1e-5); // Neighbors of point 8. BOOST_REQUIRE_EQUAL(neighbors(9, newFromOld[8]), newFromOld[1]); BOOST_REQUIRE_CLOSE(distances(9, newFromOld[8]), 0.10, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(8, newFromOld[8]), newFromOld[2]); BOOST_REQUIRE_CLOSE(distances(8, newFromOld[8]), 0.30, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(7, newFromOld[8]), newFromOld[0]); BOOST_REQUIRE_CLOSE(distances(7, newFromOld[8]), 0.40, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(6, newFromOld[8]), newFromOld[9]); BOOST_REQUIRE_CLOSE(distances(6, newFromOld[8]), 0.45, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(5, newFromOld[8]), newFromOld[10]); BOOST_REQUIRE_CLOSE(distances(5, newFromOld[8]), 0.55, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(4, newFromOld[8]), newFromOld[5]); BOOST_REQUIRE_CLOSE(distances(4, newFromOld[8]), 0.67, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(3, newFromOld[8]), newFromOld[3]); BOOST_REQUIRE_CLOSE(distances(3, newFromOld[8]), 0.80, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(2, newFromOld[8]), newFromOld[7]); BOOST_REQUIRE_CLOSE(distances(2, newFromOld[8]), 1.75, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(1, newFromOld[8]), newFromOld[6]); BOOST_REQUIRE_CLOSE(distances(1, newFromOld[8]), 2.45, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(0, newFromOld[8]), newFromOld[4]); BOOST_REQUIRE_CLOSE(distances(0, newFromOld[8]), 4.60, 1e-5); // Neighbors of point 9. BOOST_REQUIRE_EQUAL(neighbors(9, newFromOld[9]), newFromOld[10]); BOOST_REQUIRE_CLOSE(distances(9, newFromOld[9]), 0.10, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(8, newFromOld[9]), newFromOld[3]); BOOST_REQUIRE_CLOSE(distances(8, newFromOld[9]), 0.35, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(7, newFromOld[9]), newFromOld[8]); BOOST_REQUIRE_CLOSE(distances(7, newFromOld[9]), 0.45, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(6, newFromOld[9]), newFromOld[1]); BOOST_REQUIRE_CLOSE(distances(6, newFromOld[9]), 0.55, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(5, newFromOld[9]), newFromOld[2]); BOOST_REQUIRE_CLOSE(distances(5, newFromOld[9]), 0.75, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(4, newFromOld[9]), newFromOld[0]); BOOST_REQUIRE_CLOSE(distances(4, newFromOld[9]), 0.85, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(3, newFromOld[9]), newFromOld[5]); BOOST_REQUIRE_CLOSE(distances(3, newFromOld[9]), 1.12, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(2, newFromOld[9]), newFromOld[7]); BOOST_REQUIRE_CLOSE(distances(2, newFromOld[9]), 2.20, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(1, newFromOld[9]), newFromOld[6]); BOOST_REQUIRE_CLOSE(distances(1, newFromOld[9]), 2.90, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(0, newFromOld[9]), newFromOld[4]); BOOST_REQUIRE_CLOSE(distances(0, newFromOld[9]), 4.15, 1e-5); // Neighbors of point 10. BOOST_REQUIRE_EQUAL(neighbors(9, newFromOld[10]), newFromOld[9]); BOOST_REQUIRE_CLOSE(distances(9, newFromOld[10]), 0.10, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(8, newFromOld[10]), newFromOld[3]); BOOST_REQUIRE_CLOSE(distances(8, newFromOld[10]), 0.25, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(7, newFromOld[10]), newFromOld[8]); BOOST_REQUIRE_CLOSE(distances(7, newFromOld[10]), 0.55, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(6, newFromOld[10]), newFromOld[1]); BOOST_REQUIRE_CLOSE(distances(6, newFromOld[10]), 0.65, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(5, newFromOld[10]), newFromOld[2]); BOOST_REQUIRE_CLOSE(distances(5, newFromOld[10]), 0.85, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(4, newFromOld[10]), newFromOld[0]); BOOST_REQUIRE_CLOSE(distances(4, newFromOld[10]), 0.95, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(3, newFromOld[10]), newFromOld[5]); BOOST_REQUIRE_CLOSE(distances(3, newFromOld[10]), 1.22, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(2, newFromOld[10]), newFromOld[7]); BOOST_REQUIRE_CLOSE(distances(2, newFromOld[10]), 2.30, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(1, newFromOld[10]), newFromOld[6]); BOOST_REQUIRE_CLOSE(distances(1, newFromOld[10]), 3.00, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(0, newFromOld[10]), newFromOld[4]); BOOST_REQUIRE_CLOSE(distances(0, newFromOld[10]), 4.05, 1e-5); } } /** * Test the dual-tree furthest-neighbors method with the naive method. This * uses both a query and reference dataset. * * Errors are produced if the results are not identical. */ BOOST_AUTO_TEST_CASE(DualTreeVsNaive1) { arma::mat dataset; // Hard-coded filename: bad? if (!data::Load("test_data_3_1000.csv", dataset)) BOOST_FAIL("Cannot load test dataset test_data_3_1000.csv!"); KFN kfn(dataset); KFN naive(dataset, NAIVE_MODE); arma::Mat neighborsTree; arma::mat distancesTree; kfn.Search(dataset, 15, neighborsTree, distancesTree); arma::Mat neighborsNaive; arma::mat distancesNaive; naive.Search(dataset, 15, neighborsNaive, distancesNaive); for (size_t i = 0; i < neighborsTree.n_elem; i++) { BOOST_REQUIRE(neighborsTree[i] == neighborsNaive[i]); BOOST_REQUIRE_CLOSE(distancesTree[i], distancesNaive[i], 1e-5); } } /** * Test the dual-tree furthest-neighbors method with the naive method. This * uses only a reference dataset. * * Errors are produced if the results are not identical. */ BOOST_AUTO_TEST_CASE(DualTreeVsNaive2) { arma::mat dataset; // Hard-coded filename: bad? // Code duplication: also bad! if (!data::Load("test_data_3_1000.csv", dataset)) BOOST_FAIL("Cannot load test dataset test_data_3_1000.csv!"); KFN kfn(dataset); KFN naive(dataset, NAIVE_MODE); arma::Mat neighborsTree; arma::mat distancesTree; kfn.Search(15, neighborsTree, distancesTree); arma::Mat neighborsNaive; arma::mat distancesNaive; naive.Search(15, neighborsNaive, distancesNaive); for (size_t i = 0; i < neighborsTree.n_elem; i++) { BOOST_REQUIRE_EQUAL(neighborsTree[i], neighborsNaive[i]); BOOST_REQUIRE_CLOSE(distancesTree[i], distancesNaive[i], 1e-5); } } /** * Test the single-tree furthest-neighbors method with the naive method. This * uses only a reference dataset. * * Errors are produced if the results are not identical. */ BOOST_AUTO_TEST_CASE(SingleTreeVsNaive) { arma::mat dataset; // Hard-coded filename: bad! // Code duplication: also bad! if (!data::Load("test_data_3_1000.csv", dataset)) BOOST_FAIL("Cannot load test dataset test_data_3_1000.csv!"); KFN kfn(dataset, SINGLE_TREE_MODE); KFN naive(dataset, NAIVE_MODE); arma::Mat neighborsTree; arma::mat distancesTree; kfn.Search(15, neighborsTree, distancesTree); arma::Mat neighborsNaive; arma::mat distancesNaive; naive.Search(15, neighborsNaive, distancesNaive); for (size_t i = 0; i < neighborsTree.n_elem; i++) { BOOST_REQUIRE_EQUAL(neighborsTree[i], neighborsNaive[i]); BOOST_REQUIRE_CLOSE(distancesTree[i], distancesNaive[i], 1e-5); } } /** * Test the cover tree single-tree furthest-neighbors method against the naive * method. This uses only a random reference dataset. * * Errors are produced if the results are not identical. */ BOOST_AUTO_TEST_CASE(SingleCoverTreeTest) { arma::mat data; data.randu(75, 1000); // 75 dimensional, 1000 points. // This depends on the cover tree not mapping points. CoverTree, NeighborSearchStat, arma::mat, FirstPointIsRoot> tree(data); NeighborSearch, arma::mat, StandardCoverTree> coverTreeSearch(std::move(tree), SINGLE_TREE_MODE); KFN naive(data, NAIVE_MODE); arma::Mat coverTreeNeighbors; arma::mat coverTreeDistances; coverTreeSearch.Search(data, 15, coverTreeNeighbors, coverTreeDistances); arma::Mat naiveNeighbors; arma::mat naiveDistances; naive.Search(data, 15, naiveNeighbors, naiveDistances); for (size_t i = 0; i < coverTreeNeighbors.n_elem; ++i) { BOOST_REQUIRE_EQUAL(coverTreeNeighbors[i], naiveNeighbors[i]); BOOST_REQUIRE_CLOSE(coverTreeDistances[i], naiveDistances[i], 1e-5); } } /** * Test the cover tree dual-tree furthest neighbors method against the naive * method. */ BOOST_AUTO_TEST_CASE(DualCoverTreeTest) { arma::mat dataset; data::Load("test_data_3_1000.csv", dataset); KFN tree(dataset); arma::Mat kdNeighbors; arma::mat kdDistances; tree.Search(dataset, 5, kdNeighbors, kdDistances); typedef CoverTree, NeighborSearchStat, arma::mat, FirstPointIsRoot> TreeType; TreeType referenceTree(dataset); NeighborSearch, arma::mat, StandardCoverTree> coverTreeSearch(std::move(referenceTree)); arma::Mat coverNeighbors; arma::mat coverDistances; coverTreeSearch.Search(dataset, 5, coverNeighbors, coverDistances); for (size_t i = 0; i < coverNeighbors.n_elem; ++i) { BOOST_REQUIRE_EQUAL(coverNeighbors(i), kdNeighbors(i)); BOOST_REQUIRE_CLOSE(coverDistances(i), kdDistances(i), 1e-5); } } /** * Test the ball tree single-tree furthest-neighbors method against the naive * method. This uses only a random reference dataset. * * Errors are produced if the results are not identical. */ BOOST_AUTO_TEST_CASE(SingleBallTreeTest) { arma::mat data; data.randu(75, 1000); // 75 dimensional, 1000 points. typedef BallTree, arma::mat> TreeType; TreeType tree(data); KFN naive(tree.Dataset(), NAIVE_MODE); // BinarySpaceTree modifies data. Use modified data to maintain the // correspondence between points in the dataset for both methods. The order of // query points in both methods should be same. NeighborSearch, arma::mat, BallTree> ballTreeSearch(std::move(tree), SINGLE_TREE_MODE); arma::Mat ballTreeNeighbors; arma::mat ballTreeDistances; ballTreeSearch.Search(15, ballTreeNeighbors, ballTreeDistances); arma::Mat naiveNeighbors; arma::mat naiveDistances; naive.Search(15, naiveNeighbors, naiveDistances); for (size_t i = 0; i < ballTreeNeighbors.n_elem; ++i) { BOOST_REQUIRE_EQUAL(ballTreeNeighbors[i], naiveNeighbors[i]); BOOST_REQUIRE_CLOSE(ballTreeDistances[i], naiveDistances[i], 1e-5); } } /** * Test the ball tree dual-tree furthest neighbors method against the naive * method. */ BOOST_AUTO_TEST_CASE(DualBallTreeTest) { arma::mat dataset; data::Load("test_data_3_1000.csv", dataset); KFN tree(dataset); arma::Mat kdNeighbors; arma::mat kdDistances; tree.Search(5, kdNeighbors, kdDistances); NeighborSearch, arma::mat, BallTree> ballTreeSearch(dataset); arma::Mat ballNeighbors; arma::mat ballDistances; ballTreeSearch.Search(5, ballNeighbors, ballDistances); for (size_t i = 0; i < ballNeighbors.n_elem; ++i) { BOOST_REQUIRE_EQUAL(ballNeighbors(i), kdNeighbors(i)); BOOST_REQUIRE_CLOSE(ballDistances(i), kdDistances(i), 1e-5); } } BOOST_AUTO_TEST_SUITE_END(); mlpack-2.2.5/src/mlpack/tests/kmeans_test.cpp000066400000000000000000000535101315013601400212060ustar00rootroot00000000000000/** * @file kmeans_test.cpp * @author Ryan Curtin * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include #include #include #include #include #include #include #include #include #include #include #include #include #include "test_tools.hpp" using namespace mlpack; using namespace mlpack::kmeans; using namespace mlpack::metric; using namespace mlpack::tree; using namespace mlpack::neighbor; BOOST_AUTO_TEST_SUITE(KMeansTest); // Generate dataset; written transposed because it's easier to read. arma::mat kMeansData(" 0.0 0.0;" // Class 1. " 0.3 0.4;" " 0.1 0.0;" " 0.1 0.3;" " -0.2 -0.2;" " -0.1 0.3;" " -0.4 0.1;" " 0.2 -0.1;" " 0.3 0.0;" " -0.3 -0.3;" " 0.1 -0.1;" " 0.2 -0.3;" " -0.3 0.2;" " 10.0 10.0;" // Class 2. " 10.1 9.9;" " 9.9 10.0;" " 10.2 9.7;" " 10.2 9.8;" " 9.7 10.3;" " 9.9 10.1;" "-10.0 5.0;" // Class 3. " -9.8 5.1;" " -9.9 4.9;" "-10.0 4.9;" "-10.2 5.2;" "-10.1 5.1;" "-10.3 5.3;" "-10.0 4.8;" " -9.6 5.0;" " -9.8 5.1;"); /** * 30-point 3-class test case for K-Means. */ BOOST_AUTO_TEST_CASE(KMeansSimpleTest) { // This test was originally written to use RandomPartition, and is left that // way because RandomPartition gives better initializations here. KMeans kmeans; arma::Row assignments; kmeans.Cluster((arma::mat) trans(kMeansData), 3, assignments); // Now make sure we got it all right. There is no restriction on how the // clusters are ordered, so we have to be careful about that. size_t firstClass = assignments(0); for (size_t i = 1; i < 13; i++) BOOST_REQUIRE_EQUAL(assignments(i), firstClass); size_t secondClass = assignments(13); // To ensure that class 1 != class 2. BOOST_REQUIRE_NE(firstClass, secondClass); for (size_t i = 13; i < 20; i++) BOOST_REQUIRE_EQUAL(assignments(i), secondClass); size_t thirdClass = assignments(20); // To ensure that this is the third class which we haven't seen yet. BOOST_REQUIRE_NE(firstClass, thirdClass); BOOST_REQUIRE_NE(secondClass, thirdClass); for (size_t i = 20; i < 30; i++) BOOST_REQUIRE_EQUAL(assignments(i), thirdClass); } /** * Make sure the empty cluster policy class does nothing. */ BOOST_AUTO_TEST_CASE(AllowEmptyClusterTest) { arma::Row assignments; assignments.randu(30); arma::Row assignmentsOld = assignments; arma::mat centroids; centroids.randu(30, 3); // This doesn't matter. arma::Col counts(3); counts[0] = accu(assignments == 0); counts[1] = accu(assignments == 1); counts[2] = 0; arma::Col countsOld = counts; // Make sure the method doesn't modify any points. metric::LMetric<2, true> metric; BOOST_REQUIRE_EQUAL(AllowEmptyClusters::EmptyCluster(kMeansData, 2, centroids, centroids, counts, metric, 0), 0); // Make sure no assignments were changed. for (size_t i = 0; i < assignments.n_elem; i++) BOOST_REQUIRE_EQUAL(assignments[i], assignmentsOld[i]); // Make sure no counts were changed. for (size_t i = 0; i < 3; i++) BOOST_REQUIRE_EQUAL(counts[i], countsOld[i]); } /** * Make sure the max variance method finds the correct point. */ BOOST_AUTO_TEST_CASE(MaxVarianceNewClusterTest) { // Five points. arma::mat data("0.4 1.0 5.0 -2.0 -2.5;" "1.0 0.8 0.7 5.1 5.2;"); // Point 2 is the mis-clustered point we're looking for to be moved. arma::Row assignments("0 0 0 1 1"); arma::mat centroids(2, 3); centroids.col(0) = (1.0 / 3.0) * (data.col(0) + data.col(1) + data.col(2)); centroids.col(1) = 0.5 * (data.col(3) + data.col(4)); centroids(0, 2) = DBL_MAX; centroids(1, 2) = DBL_MAX; arma::Col counts("3 2 0"); metric::LMetric<2, true> metric; // This should only change one point. MaxVarianceNewCluster mvnc; BOOST_REQUIRE_EQUAL(mvnc.EmptyCluster(data, 2, centroids, centroids, counts, metric, 0), 1); // Add the variance of each point's distance away from the cluster. I think // this is the sensible thing to do. for (size_t i = 0; i < data.n_cols; ++i) { // Find the closest centroid to this point. double minDistance = std::numeric_limits::infinity(); size_t closestCluster = centroids.n_cols; // Invalid value. for (size_t j = 0; j < centroids.n_cols; j++) { const double distance = metric.Evaluate(data.col(i), centroids.col(j)); if (distance < minDistance) { minDistance = distance; closestCluster = j; } } assignments[i] = closestCluster; } BOOST_REQUIRE_EQUAL(assignments[0], 0); BOOST_REQUIRE_EQUAL(assignments[1], 0); BOOST_REQUIRE_EQUAL(assignments[2], 2); BOOST_REQUIRE_EQUAL(assignments[3], 1); BOOST_REQUIRE_EQUAL(assignments[4], 1); // Ensure that the counts are right. BOOST_REQUIRE_EQUAL(counts[0], 2); BOOST_REQUIRE_EQUAL(counts[1], 2); BOOST_REQUIRE_EQUAL(counts[2], 1); } /** * Make sure the random partitioner seems to return valid results. */ BOOST_AUTO_TEST_CASE(RandomPartitionTest) { arma::mat data; data.randu(2, 1000); // One thousand points. arma::Row assignments; // We'll ask for 18 clusters (arbitrary). RandomPartition::Cluster(data, 18, assignments); // Ensure that the right number of assignments were given. BOOST_REQUIRE_EQUAL(assignments.n_elem, 1000); // Ensure that no value is greater than 17 (the maximum valid cluster). for (size_t i = 0; i < 1000; i++) BOOST_REQUIRE_LT(assignments[i], 18); } /** * Make sure that random initialization fails for a corner case dataset. */ BOOST_AUTO_TEST_CASE(RandomInitialAssignmentFailureTest) { // This is a very synthetic dataset. It is one Gaussian with a huge number of // points combined with one faraway Gaussian with very few points. Normally, // k-means should not get the correct result -- which is one cluster at each // Gaussian. This is because the random partitioning scheme has very low // (virtually zero) likelihood of separating the two Gaussians properly, and // then the algorithm will never converge to that result. // // So we will set the initial assignments appropriately. Remember, once the // initial assignments are done, k-means is deterministic. arma::mat dataset(2, 10002); dataset.randn(); // Now move the second Gaussian far away. for (size_t i = 0; i < 2; ++i) dataset.col(10000 + i) += arma::vec("50 50"); // Ensure that k-means fails when run with random initialization. This isn't // strictly a necessary test, but it does help let us know that this is a good // test. size_t successes = 0; for (size_t run = 0; run < 15; ++run) { arma::mat centroids; arma::Row assignments; KMeans<> kmeans; kmeans.Cluster(dataset, 2, assignments, centroids); // Inspect centroids. See if one is close to the second Gaussian. if ((centroids(0, 0) >= 30.0 && centroids(1, 0) >= 30.0) || (centroids(0, 1) >= 30.0 && centroids(1, 1) >= 30.0)) ++successes; } // Only one success allowed. The probability of two successes should be // infinitesimal. BOOST_REQUIRE_LT(successes, 2); } /** * Make sure that specifying initial assignments is successful for a corner case * dataset which doesn't usually converge otherwise. */ BOOST_AUTO_TEST_CASE(InitialAssignmentTest) { // For a better description of this dataset, see // RandomInitialAssignmentFailureTest. arma::mat dataset(2, 10002); dataset.randn(); // Now move the second Gaussian far away. for (size_t i = 0; i < 2; ++i) dataset.col(10000 + i) += arma::vec("50 50"); // Now, if we specify initial assignments, the algorithm should converge (with // zero iterations, actually, because this is the solution). arma::Row assignments(10002); assignments.fill(0); assignments[10000] = 1; assignments[10001] = 1; KMeans<> kmeans; kmeans.Cluster(dataset, 2, assignments, true); // Check results. for (size_t i = 0; i < 10000; ++i) BOOST_REQUIRE_EQUAL(assignments[i], 0); for (size_t i = 10000; i < 10002; ++i) BOOST_REQUIRE_EQUAL(assignments[i], 1); // Now, slightly harder. Give it one incorrect assignment in each cluster. // The wrong assignment should be quickly fixed. assignments[9999] = 1; assignments[10000] = 0; kmeans.Cluster(dataset, 2, assignments, true); // Check results. for (size_t i = 0; i < 10000; ++i) BOOST_REQUIRE_EQUAL(assignments[i], 0); for (size_t i = 10000; i < 10002; ++i) BOOST_REQUIRE_EQUAL(assignments[i], 1); } /** * Make sure specifying initial centroids is successful for a corner case which * doesn't usually converge otherwise. */ BOOST_AUTO_TEST_CASE(InitialCentroidTest) { // For a better description of this dataset, see // RandomInitialAssignmentFailureTest. arma::mat dataset(2, 10002); dataset.randn(); // Now move the second Gaussian far away. for (size_t i = 0; i < 2; ++i) dataset.col(10000 + i) += arma::vec("50 50"); arma::Row assignments; arma::mat centroids(2, 2); centroids.col(0) = arma::vec("0 0"); centroids.col(1) = arma::vec("50 50"); // This should converge correctly. KMeans<> k; k.Cluster(dataset, 2, assignments, centroids, false, true); // Check results. for (size_t i = 0; i < 10000; ++i) BOOST_REQUIRE_EQUAL(assignments[i], 0); for (size_t i = 10000; i < 10002; ++i) BOOST_REQUIRE_EQUAL(assignments[i], 1); // Now add a little noise to the initial centroids. centroids.col(0) = arma::vec("3 4"); centroids.col(1) = arma::vec("25 10"); k.Cluster(dataset, 2, assignments, centroids, false, true); // Check results. for (size_t i = 0; i < 10000; ++i) BOOST_REQUIRE_EQUAL(assignments[i], 0); for (size_t i = 10000; i < 10002; ++i) BOOST_REQUIRE_EQUAL(assignments[i], 1); } /** * Ensure that initial assignments override initial centroids. */ BOOST_AUTO_TEST_CASE(InitialAssignmentOverrideTest) { // For a better description of this dataset, see // RandomInitialAssignmentFailureTest. arma::mat dataset(2, 10002); dataset.randn(); // Now move the second Gaussian far away. for (size_t i = 0; i < 2; ++i) dataset.col(10000 + i) += arma::vec("50 50"); arma::Row assignments(10002); assignments.fill(0); assignments[10000] = 1; assignments[10001] = 1; // Note that this initial centroid guess is the opposite of the assignments // guess! arma::mat centroids(2, 2); centroids.col(0) = arma::vec("50 50"); centroids.col(1) = arma::vec("0 0"); KMeans<> k; k.Cluster(dataset, 2, assignments, centroids, true, true); // Because the initial assignments guess should take priority, we should get // those same results back. for (size_t i = 0; i < 10000; ++i) BOOST_REQUIRE_EQUAL(assignments[i], 0); for (size_t i = 10000; i < 10002; ++i) BOOST_REQUIRE_EQUAL(assignments[i], 1); // Make sure the centroids are about right too. BOOST_REQUIRE_LT(centroids(0, 0), 10.0); BOOST_REQUIRE_LT(centroids(1, 0), 10.0); BOOST_REQUIRE_GT(centroids(0, 1), 40.0); BOOST_REQUIRE_GT(centroids(1, 1), 40.0); } /** * Test that the refined starting policy returns decent initial cluster * estimates. */ BOOST_AUTO_TEST_CASE(RefinedStartTest) { // Our dataset will be five Gaussians of largely varying numbers of points and // we expect that the refined starting policy should return good guesses at // what these Gaussians are. arma::mat data(3, 3000); data.randn(); // First Gaussian: 10000 points, centered at (0, 0, 0). // Second Gaussian: 2000 points, centered at (5, 0, -2). // Third Gaussian: 5000 points, centered at (-2, -2, -2). // Fourth Gaussian: 1000 points, centered at (-6, 8, 8). // Fifth Gaussian: 12000 points, centered at (1, 6, 1). arma::mat centroids(" 0 5 -2 -6 1;" " 0 0 -2 8 6;" " 0 -2 -2 8 1"); for (size_t i = 1000; i < 1200; ++i) data.col(i) += centroids.col(1); for (size_t i = 1200; i < 1700; ++i) data.col(i) += centroids.col(2); for (size_t i = 1700; i < 1800; ++i) data.col(i) += centroids.col(3); for (size_t i = 1800; i < 3000; ++i) data.col(i) += centroids.col(4); // Now run the RefinedStart algorithm and make sure it doesn't deviate too // much from the actual solution. RefinedStart rs; arma::Row assignments; arma::mat resultingCentroids; rs.Cluster(data, 5, assignments); // Calculate resulting centroids. resultingCentroids.zeros(3, 5); arma::Col counts(5); counts.zeros(); for (size_t i = 0; i < 3000; ++i) { resultingCentroids.col(assignments[i]) += data.col(i); ++counts[assignments[i]]; } // Normalize centroids. for (size_t i = 0; i < 5; ++i) if (counts[i] != 0) resultingCentroids /= counts[i]; // Calculate sum of distances from centroid means. double distortion = 0; for (size_t i = 0; i < 3000; ++i) distortion += metric::EuclideanDistance::Evaluate(data.col(i), resultingCentroids.col(assignments[i])); // Using the refined start, the distance for this dataset is usually around // 13500. Regular k-means is between 10000 and 30000 (I think the 10000 // figure is a corner case which actually does not give good clusters), and // random initial starts give distortion around 22000. So we'll require that // our distortion is less than 14000. BOOST_REQUIRE_LT(distortion, 14000.0); } #ifdef ARMA_HAS_SPMAT // Can't do this test on Armadillo 3.4; var(SpBase) is not implemented. #if !((ARMA_VERSION_MAJOR == 3) && (ARMA_VERSION_MINOR == 4)) /** * Make sure sparse k-means works okay. */ BOOST_AUTO_TEST_CASE(SparseKMeansTest) { // Huge dimensionality, few points. arma::SpMat data(5000, 12); data(14, 0) = 6.4; data(14, 1) = 6.3; data(14, 2) = 6.5; data(14, 3) = 6.2; data(14, 4) = 6.1; data(14, 5) = 6.6; data(1402, 6) = -3.2; data(1402, 7) = -3.3; data(1402, 8) = -3.1; data(1402, 9) = -3.4; data(1402, 10) = -3.5; data(1402, 11) = -3.0; arma::Row assignments; KMeans kmeans; // Default options. kmeans.Cluster(data, 2, assignments); size_t clusterOne = assignments[0]; size_t clusterTwo = assignments[6]; BOOST_REQUIRE_EQUAL(assignments[0], clusterOne); BOOST_REQUIRE_EQUAL(assignments[1], clusterOne); BOOST_REQUIRE_EQUAL(assignments[2], clusterOne); BOOST_REQUIRE_EQUAL(assignments[3], clusterOne); BOOST_REQUIRE_EQUAL(assignments[4], clusterOne); BOOST_REQUIRE_EQUAL(assignments[5], clusterOne); BOOST_REQUIRE_EQUAL(assignments[6], clusterTwo); BOOST_REQUIRE_EQUAL(assignments[7], clusterTwo); BOOST_REQUIRE_EQUAL(assignments[8], clusterTwo); BOOST_REQUIRE_EQUAL(assignments[9], clusterTwo); BOOST_REQUIRE_EQUAL(assignments[10], clusterTwo); BOOST_REQUIRE_EQUAL(assignments[11], clusterTwo); } #endif // Exclude Armadillo 3.4. #endif // ARMA_HAS_SPMAT BOOST_AUTO_TEST_CASE(ElkanTest) { const size_t trials = 5; for (size_t t = 0; t < trials; ++t) { arma::mat dataset(10, 1000); dataset.randu(); const size_t k = 5 * (t + 1); arma::mat centroids(10, k); centroids.randu(); // Make sure Elkan's algorithm and the naive method return the same // clusters. arma::mat naiveCentroids(centroids); KMeans<> km; arma::Row assignments; km.Cluster(dataset, k, assignments, naiveCentroids, false, true); KMeans elkan; arma::Row elkanAssignments; arma::mat elkanCentroids(centroids); elkan.Cluster(dataset, k, elkanAssignments, elkanCentroids, false, true); for (size_t i = 0; i < dataset.n_cols; ++i) BOOST_REQUIRE_EQUAL(assignments[i], elkanAssignments[i]); for (size_t i = 0; i < centroids.n_elem; ++i) BOOST_REQUIRE_CLOSE(naiveCentroids[i], elkanCentroids[i], 1e-5); } } BOOST_AUTO_TEST_CASE(HamerlyTest) { const size_t trials = 5; for (size_t t = 0; t < trials; ++t) { arma::mat dataset(10, 1000); dataset.randu(); const size_t k = 5 * (t + 1); arma::mat centroids(10, k); centroids.randu(); // Make sure Hamerly's algorithm and the naive method return the same // clusters. arma::mat naiveCentroids(centroids); KMeans<> km; arma::Row assignments; km.Cluster(dataset, k, assignments, naiveCentroids, false, true); KMeans hamerly; arma::Row hamerlyAssignments; arma::mat hamerlyCentroids(centroids); hamerly.Cluster(dataset, k, hamerlyAssignments, hamerlyCentroids, false, true); for (size_t i = 0; i < dataset.n_cols; ++i) BOOST_REQUIRE_EQUAL(assignments[i], hamerlyAssignments[i]); for (size_t i = 0; i < centroids.n_elem; ++i) BOOST_REQUIRE_CLOSE(naiveCentroids[i], hamerlyCentroids[i], 1e-5); } } BOOST_AUTO_TEST_CASE(PellegMooreTest) { const size_t trials = 5; for (size_t t = 0; t < trials; ++t) { arma::mat dataset(10, 1000); dataset.randu(); const size_t k = 5 * (t + 1); arma::mat centroids(10, k); centroids.randu(); // Make sure the Pelleg-Moore algorithm and the naive method return the same // clusters. arma::mat naiveCentroids(centroids); KMeans<> km; arma::Row assignments; km.Cluster(dataset, k, assignments, naiveCentroids, false, true); KMeans pellegMoore; arma::Row pmAssignments; arma::mat pmCentroids(centroids); pellegMoore.Cluster(dataset, k, pmAssignments, pmCentroids, false, true); for (size_t i = 0; i < dataset.n_cols; ++i) BOOST_REQUIRE_EQUAL(assignments[i], pmAssignments[i]); for (size_t i = 0; i < centroids.n_elem; ++i) BOOST_REQUIRE_CLOSE(naiveCentroids[i], pmCentroids[i], 1e-5); } } BOOST_AUTO_TEST_CASE(DTNNTest) { const size_t trials = 5; for (size_t t = 0; t < trials; ++t) { arma::mat dataset(10, 1000); dataset.randu(); const size_t k = 5 * (t + 1); arma::mat centroids(10, k); centroids.randu(); arma::mat naiveCentroids(centroids); KMeans<> km; arma::Row assignments; km.Cluster(dataset, k, assignments, naiveCentroids, false, true); KMeans dtnn; arma::Row dtnnAssignments; arma::mat dtnnCentroids(centroids); dtnn.Cluster(dataset, k, dtnnAssignments, dtnnCentroids, false, true); for (size_t i = 0; i < dataset.n_cols; ++i) BOOST_REQUIRE_EQUAL(assignments[i], dtnnAssignments[i]); for (size_t i = 0; i < centroids.n_elem; ++i) BOOST_REQUIRE_CLOSE(naiveCentroids[i], dtnnCentroids[i], 1e-5); } } BOOST_AUTO_TEST_CASE(DTNNCoverTreeTest) { const size_t trials = 5; for (size_t t = 0; t < trials; ++t) { arma::mat dataset(10, 1000); dataset.randu(); const size_t k = 5; arma::mat centroids(10, k); centroids.randu(); arma::mat naiveCentroids(centroids); KMeans<> km; arma::Row assignments; km.Cluster(dataset, k, assignments, naiveCentroids, false, true); KMeans dtnn; arma::Row dtnnAssignments; arma::mat dtnnCentroids(centroids); dtnn.Cluster(dataset, k, dtnnAssignments, dtnnCentroids, false, true); for (size_t i = 0; i < dataset.n_cols; ++i) BOOST_REQUIRE_EQUAL(assignments[i], dtnnAssignments[i]); for (size_t i = 0; i < centroids.n_elem; ++i) BOOST_REQUIRE_CLOSE(naiveCentroids[i], dtnnCentroids[i], 1e-5); } } /** * Make sure that the sample initialization strategy successfully samples points * from the dataset. */ BOOST_AUTO_TEST_CASE(SampleInitializationTest) { arma::mat dataset = arma::randu(5, 100); const size_t clusters = 10; arma::mat centroids; SampleInitialization::Cluster(dataset, clusters, centroids); // Check that the size of the matrix is correct. BOOST_REQUIRE_EQUAL(centroids.n_cols, 10); BOOST_REQUIRE_EQUAL(centroids.n_rows, 5); // Check that each entry in the matrix is some sample from the dataset. for (size_t i = 0; i < clusters; ++i) { // If the loop successfully terminates, j will be equal to dataset.n_cols. // If not then we have found a match. size_t j; for (j = 0; j < dataset.n_cols; ++j) { const double distance = metric::EuclideanDistance::Evaluate( centroids.col(i), dataset.col(j)); if (distance < 1e-10) break; } BOOST_REQUIRE_LT(j, dataset.n_cols); } } BOOST_AUTO_TEST_SUITE_END(); mlpack-2.2.5/src/mlpack/tests/knn_test.cpp000066400000000000000000001376361315013601400205320ustar00rootroot00000000000000/** * @file knn_test.cpp * * Test file for KNN class. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include #include #include #include #include #include #include #include "test_tools.hpp" using namespace mlpack; using namespace mlpack::neighbor; using namespace mlpack::tree; using namespace mlpack::metric; using namespace mlpack::bound; BOOST_AUTO_TEST_SUITE(KNNTest); /** * Test that Unmap() works in the dual-tree case (see unmap.hpp). */ BOOST_AUTO_TEST_CASE(DualTreeUnmapTest) { std::vector refMap; refMap.push_back(3); refMap.push_back(4); refMap.push_back(1); refMap.push_back(2); refMap.push_back(0); std::vector queryMap; queryMap.push_back(2); queryMap.push_back(0); queryMap.push_back(4); queryMap.push_back(3); queryMap.push_back(1); queryMap.push_back(5); // Now generate some results. 6 queries, 5 references. arma::Mat neighbors("3 1 2 0 4;" "1 0 2 3 4;" "0 1 2 3 4;" "4 1 0 3 2;" "3 0 4 1 2;" "3 0 4 1 2;"); neighbors = neighbors.t(); // Integer distances will work fine here. arma::mat distances("3 1 2 0 4;" "1 0 2 3 4;" "0 1 2 3 4;" "4 1 0 3 2;" "3 0 4 1 2;" "3 0 4 1 2;"); distances = distances.t(); // This is what the results should be when they are unmapped. arma::Mat correctNeighbors("4 3 1 2 0;" "2 3 0 4 1;" "2 4 1 3 0;" "0 4 3 2 1;" "3 4 1 2 0;" "2 3 0 4 1;"); correctNeighbors = correctNeighbors.t(); arma::mat correctDistances("1 0 2 3 4;" "3 0 4 1 2;" "3 1 2 0 4;" "4 1 0 3 2;" "0 1 2 3 4;" "3 0 4 1 2;"); correctDistances = correctDistances.t(); // Perform the unmapping. arma::Mat neighborsOut; arma::mat distancesOut; Unmap(neighbors, distances, refMap, queryMap, neighborsOut, distancesOut); for (size_t i = 0; i < correctNeighbors.n_elem; ++i) { BOOST_REQUIRE_EQUAL(neighborsOut[i], correctNeighbors[i]); BOOST_REQUIRE_CLOSE(distancesOut[i], correctDistances[i], 1e-5); } // Now try taking the square root. Unmap(neighbors, distances, refMap, queryMap, neighborsOut, distancesOut, true); for (size_t i = 0; i < correctNeighbors.n_elem; ++i) { BOOST_REQUIRE_EQUAL(neighborsOut[i], correctNeighbors[i]); BOOST_REQUIRE_CLOSE(distancesOut[i], sqrt(correctDistances[i]), 1e-5); } } /** * Check that Unmap() works in the single-tree case. */ BOOST_AUTO_TEST_CASE(SingleTreeUnmapTest) { std::vector refMap; refMap.push_back(3); refMap.push_back(4); refMap.push_back(1); refMap.push_back(2); refMap.push_back(0); // Now generate some results. 6 queries, 5 references. arma::Mat neighbors("3 1 2 0 4;" "1 0 2 3 4;" "0 1 2 3 4;" "4 1 0 3 2;" "3 0 4 1 2;" "3 0 4 1 2;"); neighbors = neighbors.t(); // Integer distances will work fine here. arma::mat distances("3 1 2 0 4;" "1 0 2 3 4;" "0 1 2 3 4;" "4 1 0 3 2;" "3 0 4 1 2;" "3 0 4 1 2;"); distances = distances.t(); // This is what the results should be when they are unmapped. arma::Mat correctNeighbors("2 4 1 3 0;" "4 3 1 2 0;" "3 4 1 2 0;" "0 4 3 2 1;" "2 3 0 4 1;" "2 3 0 4 1;"); correctNeighbors = correctNeighbors.t(); arma::mat correctDistances = distances; // Perform the unmapping. arma::Mat neighborsOut; arma::mat distancesOut; Unmap(neighbors, distances, refMap, neighborsOut, distancesOut); for (size_t i = 0; i < correctNeighbors.n_elem; ++i) { BOOST_REQUIRE_EQUAL(neighborsOut[i], correctNeighbors[i]); BOOST_REQUIRE_CLOSE(distancesOut[i], correctDistances[i], 1e-5); } // Now try taking the square root. Unmap(neighbors, distances, refMap, neighborsOut, distancesOut, true); for (size_t i = 0; i < correctNeighbors.n_elem; ++i) { BOOST_REQUIRE_EQUAL(neighborsOut[i], correctNeighbors[i]); BOOST_REQUIRE_CLOSE(distancesOut[i], sqrt(correctDistances[i]), 1e-5); } } /** * Test that an empty KNN object will throw exceptions when Search() is * called. */ BOOST_AUTO_TEST_CASE(EmptySearchTest) { KNN empty; arma::mat dataset = arma::randu(5, 100); KNN::Tree queryTree(dataset); arma::Mat neighbors; arma::mat distances; BOOST_REQUIRE_THROW(empty.Search(dataset, 5, neighbors, distances), std::invalid_argument); BOOST_REQUIRE_THROW(empty.Search(5, neighbors, distances), std::invalid_argument); BOOST_REQUIRE_THROW(empty.Search(queryTree, 5, neighbors, distances), std::invalid_argument); } /** * Test that when training is performed, the results are the same. */ BOOST_AUTO_TEST_CASE(TrainTest) { KNN empty; arma::mat dataset = arma::randu(5, 100); KNN baseline(dataset); arma::Mat neighbors, baselineNeighbors; arma::mat distances, baselineDistances; empty.Train(dataset); empty.Search(5, neighbors, distances); baseline.Search(5, baselineNeighbors, baselineDistances); BOOST_REQUIRE_EQUAL(neighbors.n_rows, baselineNeighbors.n_rows); BOOST_REQUIRE_EQUAL(neighbors.n_cols, baselineNeighbors.n_cols); BOOST_REQUIRE_EQUAL(distances.n_rows, baselineDistances.n_rows); BOOST_REQUIRE_EQUAL(distances.n_cols, baselineDistances.n_cols); for (size_t i = 0; i < distances.n_elem; ++i) { if (std::abs(baselineDistances[i]) < 1e-5) BOOST_REQUIRE_SMALL(distances[i], 1e-5); else BOOST_REQUIRE_CLOSE(distances[i], baselineDistances[i], 1e-5); BOOST_REQUIRE_EQUAL(neighbors[i], baselineNeighbors[i]); } } /** * Test that when training is performed with a tree, the results are the same. */ BOOST_AUTO_TEST_CASE(TrainTreeTest) { KNN empty; arma::mat dataset = arma::randu(5, 100); KNN baseline(dataset); arma::Mat neighbors, baselineNeighbors; arma::mat distances, baselineDistances; std::vector oldFromNewReferences; KNN::Tree tree(dataset, oldFromNewReferences); empty.Train(std::move(tree)); empty.Search(5, neighbors, distances); baseline.Search(5, baselineNeighbors, baselineDistances); BOOST_REQUIRE_EQUAL(neighbors.n_rows, baselineNeighbors.n_rows); BOOST_REQUIRE_EQUAL(neighbors.n_cols, baselineNeighbors.n_cols); BOOST_REQUIRE_EQUAL(distances.n_rows, baselineDistances.n_rows); BOOST_REQUIRE_EQUAL(distances.n_cols, baselineDistances.n_cols); // We have to unmap the results. arma::mat tmpDistances(distances.n_rows, distances.n_cols); arma::Mat tmpNeighbors(neighbors.n_rows, neighbors.n_cols); for (size_t i = 0; i < distances.n_cols; ++i) { tmpDistances.col(oldFromNewReferences[i]) = distances.col(i); for (size_t j = 0; j < distances.n_rows; ++j) { tmpNeighbors(j, oldFromNewReferences[i]) = oldFromNewReferences[neighbors(j, i)]; } } for (size_t i = 0; i < distances.n_elem; ++i) { if (std::abs(baselineDistances[i]) < 1e-5) BOOST_REQUIRE_SMALL(tmpDistances[i], 1e-5); else BOOST_REQUIRE_CLOSE(tmpDistances[i], baselineDistances[i], 1e-5); BOOST_REQUIRE_EQUAL(tmpNeighbors[i], baselineNeighbors[i]); } } /** * Test that training with a tree throws an exception when in naive mode. */ BOOST_AUTO_TEST_CASE(NaiveTrainTreeTest) { KNN empty(NAIVE_MODE); arma::mat dataset = arma::randu(5, 100); KNN::Tree tree(dataset); BOOST_REQUIRE_THROW(empty.Train(std::move(tree)), std::invalid_argument); } /** * Test that the rvalue reference move constructor works. */ BOOST_AUTO_TEST_CASE(MoveConstructorTest) { arma::mat dataset = arma::randu(3, 200); arma::mat copy(dataset); KNN moveknn(std::move(copy)); KNN knn(dataset); BOOST_REQUIRE_EQUAL(copy.n_elem, 0); BOOST_REQUIRE_EQUAL(moveknn.ReferenceSet().n_rows, 3); BOOST_REQUIRE_EQUAL(moveknn.ReferenceSet().n_cols, 200); arma::mat moveDistances, distances; arma::Mat moveNeighbors, neighbors; moveknn.Search(1, moveNeighbors, moveDistances); knn.Search(1, neighbors, distances); BOOST_REQUIRE_EQUAL(moveNeighbors.n_rows, neighbors.n_rows); BOOST_REQUIRE_EQUAL(moveNeighbors.n_cols, neighbors.n_cols); BOOST_REQUIRE_EQUAL(moveDistances.n_rows, distances.n_rows); BOOST_REQUIRE_EQUAL(moveDistances.n_cols, distances.n_cols); for (size_t i = 0; i < moveDistances.n_elem; ++i) { BOOST_REQUIRE_EQUAL(moveNeighbors[i], neighbors[i]); if (std::abs(distances[i]) < 1e-5) BOOST_REQUIRE_SMALL(moveDistances[i], 1e-5); else BOOST_REQUIRE_CLOSE(moveDistances[i], distances[i], 1e-5); } } /** * Test that the dataset can be retrained with the move Train() function. */ BOOST_AUTO_TEST_CASE(MoveTrainTest) { arma::mat dataset = arma::randu(3, 200); // Do it in tree mode, and in naive mode. KNN knn; knn.Train(std::move(dataset)); arma::mat distances; arma::Mat neighbors; knn.Search(1, neighbors, distances); BOOST_REQUIRE_EQUAL(dataset.n_elem, 0); BOOST_REQUIRE_EQUAL(neighbors.n_cols, 200); BOOST_REQUIRE_EQUAL(distances.n_cols, 200); dataset = arma::randu(3, 300); knn.Naive() = true; knn.Train(std::move(dataset)); knn.Search(1, neighbors, distances); BOOST_REQUIRE_EQUAL(dataset.n_elem, 0); BOOST_REQUIRE_EQUAL(neighbors.n_cols, 300); BOOST_REQUIRE_EQUAL(distances.n_cols, 300); } /** * Simple nearest-neighbors test with small, synthetic dataset. This is an * exhaustive test, which checks that each method for performing the calculation * (dual-tree, single-tree, naive) produces the correct results. An * eleven-point dataset and the ten nearest neighbors are taken. The dataset is * in one dimension for simplicity -- the correct functionality of distance * functions is not tested here. */ BOOST_AUTO_TEST_CASE(ExhaustiveSyntheticTest) { // Set up our data. arma::mat data(1, 11); data[0] = 0.05; // Row addressing is unnecessary (they are all 0). data[1] = 0.35; data[2] = 0.15; data[3] = 1.25; data[4] = 5.05; data[5] = -0.22; data[6] = -2.00; data[7] = -1.30; data[8] = 0.45; data[9] = 0.90; data[10] = 1.00; typedef KDTree, arma::mat> TreeType; // We will loop through three times, one for each method of performing the // calculation. std::vector oldFromNew; std::vector newFromOld; TreeType tree(data, oldFromNew, newFromOld, 1); KNN knn(std::move(tree)); for (int i = 0; i < 3; i++) { switch (i) { case 0: // Use the dual-tree method. knn.Naive() = false; knn.SingleMode() = false; break; case 1: // Use the single-tree method. knn.Naive() = false; knn.SingleMode() = true; break; case 2: // Use the naive method. knn.Naive() = true; break; } // Now perform the actual calculation. arma::Mat neighbors; arma::mat distances; knn.Search(10, neighbors, distances); // Now the exhaustive check for correctness. This will be long. We must // also remember that the distances returned are squared distances. As a // result, distance comparisons are written out as (distance * distance) for // readability. // Neighbors of point 0. BOOST_REQUIRE_EQUAL(neighbors(0, newFromOld[0]), newFromOld[2]); BOOST_REQUIRE_CLOSE(distances(0, newFromOld[0]), 0.10, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(1, newFromOld[0]), newFromOld[5]); BOOST_REQUIRE_CLOSE(distances(1, newFromOld[0]), 0.27, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(2, newFromOld[0]), newFromOld[1]); BOOST_REQUIRE_CLOSE(distances(2, newFromOld[0]), 0.30, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(3, newFromOld[0]), newFromOld[8]); BOOST_REQUIRE_CLOSE(distances(3, newFromOld[0]), 0.40, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(4, newFromOld[0]), newFromOld[9]); BOOST_REQUIRE_CLOSE(distances(4, newFromOld[0]), 0.85, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(5, newFromOld[0]), newFromOld[10]); BOOST_REQUIRE_CLOSE(distances(5, newFromOld[0]), 0.95, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(6, newFromOld[0]), newFromOld[3]); BOOST_REQUIRE_CLOSE(distances(6, newFromOld[0]), 1.20, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(7, newFromOld[0]), newFromOld[7]); BOOST_REQUIRE_CLOSE(distances(7, newFromOld[0]), 1.35, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(8, newFromOld[0]), newFromOld[6]); BOOST_REQUIRE_CLOSE(distances(8, newFromOld[0]), 2.05, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(9, newFromOld[0]), newFromOld[4]); BOOST_REQUIRE_CLOSE(distances(9, newFromOld[0]), 5.00, 1e-5); // Neighbors of point 1. BOOST_REQUIRE_EQUAL(neighbors(0, newFromOld[1]), newFromOld[8]); BOOST_REQUIRE_CLOSE(distances(0, newFromOld[1]), 0.10, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(1, newFromOld[1]), newFromOld[2]); BOOST_REQUIRE_CLOSE(distances(1, newFromOld[1]), 0.20, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(2, newFromOld[1]), newFromOld[0]); BOOST_REQUIRE_CLOSE(distances(2, newFromOld[1]), 0.30, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(3, newFromOld[1]), newFromOld[9]); BOOST_REQUIRE_CLOSE(distances(3, newFromOld[1]), 0.55, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(4, newFromOld[1]), newFromOld[5]); BOOST_REQUIRE_CLOSE(distances(4, newFromOld[1]), 0.57, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(5, newFromOld[1]), newFromOld[10]); BOOST_REQUIRE_CLOSE(distances(5, newFromOld[1]), 0.65, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(6, newFromOld[1]), newFromOld[3]); BOOST_REQUIRE_CLOSE(distances(6, newFromOld[1]), 0.90, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(7, newFromOld[1]), newFromOld[7]); BOOST_REQUIRE_CLOSE(distances(7, newFromOld[1]), 1.65, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(8, newFromOld[1]), newFromOld[6]); BOOST_REQUIRE_CLOSE(distances(8, newFromOld[1]), 2.35, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(9, newFromOld[1]), newFromOld[4]); BOOST_REQUIRE_CLOSE(distances(9, newFromOld[1]), 4.70, 1e-5); // Neighbors of point 2. BOOST_REQUIRE_EQUAL(neighbors(0, newFromOld[2]), newFromOld[0]); BOOST_REQUIRE_CLOSE(distances(0, newFromOld[2]), 0.10, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(1, newFromOld[2]), newFromOld[1]); BOOST_REQUIRE_CLOSE(distances(1, newFromOld[2]), 0.20, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(2, newFromOld[2]), newFromOld[8]); BOOST_REQUIRE_CLOSE(distances(2, newFromOld[2]), 0.30, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(3, newFromOld[2]), newFromOld[5]); BOOST_REQUIRE_CLOSE(distances(3, newFromOld[2]), 0.37, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(4, newFromOld[2]), newFromOld[9]); BOOST_REQUIRE_CLOSE(distances(4, newFromOld[2]), 0.75, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(5, newFromOld[2]), newFromOld[10]); BOOST_REQUIRE_CLOSE(distances(5, newFromOld[2]), 0.85, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(6, newFromOld[2]), newFromOld[3]); BOOST_REQUIRE_CLOSE(distances(6, newFromOld[2]), 1.10, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(7, newFromOld[2]), newFromOld[7]); BOOST_REQUIRE_CLOSE(distances(7, newFromOld[2]), 1.45, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(8, newFromOld[2]), newFromOld[6]); BOOST_REQUIRE_CLOSE(distances(8, newFromOld[2]), 2.15, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(9, newFromOld[2]), newFromOld[4]); BOOST_REQUIRE_CLOSE(distances(9, newFromOld[2]), 4.90, 1e-5); // Neighbors of point 3. BOOST_REQUIRE_EQUAL(neighbors(0, newFromOld[3]), newFromOld[10]); BOOST_REQUIRE_CLOSE(distances(0, newFromOld[3]), 0.25, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(1, newFromOld[3]), newFromOld[9]); BOOST_REQUIRE_CLOSE(distances(1, newFromOld[3]), 0.35, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(2, newFromOld[3]), newFromOld[8]); BOOST_REQUIRE_CLOSE(distances(2, newFromOld[3]), 0.80, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(3, newFromOld[3]), newFromOld[1]); BOOST_REQUIRE_CLOSE(distances(3, newFromOld[3]), 0.90, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(4, newFromOld[3]), newFromOld[2]); BOOST_REQUIRE_CLOSE(distances(4, newFromOld[3]), 1.10, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(5, newFromOld[3]), newFromOld[0]); BOOST_REQUIRE_CLOSE(distances(5, newFromOld[3]), 1.20, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(6, newFromOld[3]), newFromOld[5]); BOOST_REQUIRE_CLOSE(distances(6, newFromOld[3]), 1.47, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(7, newFromOld[3]), newFromOld[7]); BOOST_REQUIRE_CLOSE(distances(7, newFromOld[3]), 2.55, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(8, newFromOld[3]), newFromOld[6]); BOOST_REQUIRE_CLOSE(distances(8, newFromOld[3]), 3.25, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(9, newFromOld[3]), newFromOld[4]); BOOST_REQUIRE_CLOSE(distances(9, newFromOld[3]), 3.80, 1e-5); // Neighbors of point 4. BOOST_REQUIRE_EQUAL(neighbors(0, newFromOld[4]), newFromOld[3]); BOOST_REQUIRE_CLOSE(distances(0, newFromOld[4]), 3.80, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(1, newFromOld[4]), newFromOld[10]); BOOST_REQUIRE_CLOSE(distances(1, newFromOld[4]), 4.05, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(2, newFromOld[4]), newFromOld[9]); BOOST_REQUIRE_CLOSE(distances(2, newFromOld[4]), 4.15, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(3, newFromOld[4]), newFromOld[8]); BOOST_REQUIRE_CLOSE(distances(3, newFromOld[4]), 4.60, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(4, newFromOld[4]), newFromOld[1]); BOOST_REQUIRE_CLOSE(distances(4, newFromOld[4]), 4.70, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(5, newFromOld[4]), newFromOld[2]); BOOST_REQUIRE_CLOSE(distances(5, newFromOld[4]), 4.90, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(6, newFromOld[4]), newFromOld[0]); BOOST_REQUIRE_CLOSE(distances(6, newFromOld[4]), 5.00, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(7, newFromOld[4]), newFromOld[5]); BOOST_REQUIRE_CLOSE(distances(7, newFromOld[4]), 5.27, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(8, newFromOld[4]), newFromOld[7]); BOOST_REQUIRE_CLOSE(distances(8, newFromOld[4]), 6.35, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(9, newFromOld[4]), newFromOld[6]); BOOST_REQUIRE_CLOSE(distances(9, newFromOld[4]), 7.05, 1e-5); // Neighbors of point 5. BOOST_REQUIRE_EQUAL(neighbors(0, newFromOld[5]), newFromOld[0]); BOOST_REQUIRE_CLOSE(distances(0, newFromOld[5]), 0.27, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(1, newFromOld[5]), newFromOld[2]); BOOST_REQUIRE_CLOSE(distances(1, newFromOld[5]), 0.37, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(2, newFromOld[5]), newFromOld[1]); BOOST_REQUIRE_CLOSE(distances(2, newFromOld[5]), 0.57, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(3, newFromOld[5]), newFromOld[8]); BOOST_REQUIRE_CLOSE(distances(3, newFromOld[5]), 0.67, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(4, newFromOld[5]), newFromOld[7]); BOOST_REQUIRE_CLOSE(distances(4, newFromOld[5]), 1.08, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(5, newFromOld[5]), newFromOld[9]); BOOST_REQUIRE_CLOSE(distances(5, newFromOld[5]), 1.12, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(6, newFromOld[5]), newFromOld[10]); BOOST_REQUIRE_CLOSE(distances(6, newFromOld[5]), 1.22, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(7, newFromOld[5]), newFromOld[3]); BOOST_REQUIRE_CLOSE(distances(7, newFromOld[5]), 1.47, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(8, newFromOld[5]), newFromOld[6]); BOOST_REQUIRE_CLOSE(distances(8, newFromOld[5]), 1.78, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(9, newFromOld[5]), newFromOld[4]); BOOST_REQUIRE_CLOSE(distances(9, newFromOld[5]), 5.27, 1e-5); // Neighbors of point 6. BOOST_REQUIRE_EQUAL(neighbors(0, newFromOld[6]), newFromOld[7]); BOOST_REQUIRE_CLOSE(distances(0, newFromOld[6]), 0.70, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(1, newFromOld[6]), newFromOld[5]); BOOST_REQUIRE_CLOSE(distances(1, newFromOld[6]), 1.78, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(2, newFromOld[6]), newFromOld[0]); BOOST_REQUIRE_CLOSE(distances(2, newFromOld[6]), 2.05, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(3, newFromOld[6]), newFromOld[2]); BOOST_REQUIRE_CLOSE(distances(3, newFromOld[6]), 2.15, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(4, newFromOld[6]), newFromOld[1]); BOOST_REQUIRE_CLOSE(distances(4, newFromOld[6]), 2.35, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(5, newFromOld[6]), newFromOld[8]); BOOST_REQUIRE_CLOSE(distances(5, newFromOld[6]), 2.45, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(6, newFromOld[6]), newFromOld[9]); BOOST_REQUIRE_CLOSE(distances(6, newFromOld[6]), 2.90, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(7, newFromOld[6]), newFromOld[10]); BOOST_REQUIRE_CLOSE(distances(7, newFromOld[6]), 3.00, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(8, newFromOld[6]), newFromOld[3]); BOOST_REQUIRE_CLOSE(distances(8, newFromOld[6]), 3.25, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(9, newFromOld[6]), newFromOld[4]); BOOST_REQUIRE_CLOSE(distances(9, newFromOld[6]), 7.05, 1e-5); // Neighbors of point 7. BOOST_REQUIRE_EQUAL(neighbors(0, newFromOld[7]), newFromOld[6]); BOOST_REQUIRE_CLOSE(distances(0, newFromOld[7]), 0.70, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(1, newFromOld[7]), newFromOld[5]); BOOST_REQUIRE_CLOSE(distances(1, newFromOld[7]), 1.08, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(2, newFromOld[7]), newFromOld[0]); BOOST_REQUIRE_CLOSE(distances(2, newFromOld[7]), 1.35, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(3, newFromOld[7]), newFromOld[2]); BOOST_REQUIRE_CLOSE(distances(3, newFromOld[7]), 1.45, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(4, newFromOld[7]), newFromOld[1]); BOOST_REQUIRE_CLOSE(distances(4, newFromOld[7]), 1.65, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(5, newFromOld[7]), newFromOld[8]); BOOST_REQUIRE_CLOSE(distances(5, newFromOld[7]), 1.75, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(6, newFromOld[7]), newFromOld[9]); BOOST_REQUIRE_CLOSE(distances(6, newFromOld[7]), 2.20, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(7, newFromOld[7]), newFromOld[10]); BOOST_REQUIRE_CLOSE(distances(7, newFromOld[7]), 2.30, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(8, newFromOld[7]), newFromOld[3]); BOOST_REQUIRE_CLOSE(distances(8, newFromOld[7]), 2.55, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(9, newFromOld[7]), newFromOld[4]); BOOST_REQUIRE_CLOSE(distances(9, newFromOld[7]), 6.35, 1e-5); // Neighbors of point 8. BOOST_REQUIRE_EQUAL(neighbors(0, newFromOld[8]), newFromOld[1]); BOOST_REQUIRE_CLOSE(distances(0, newFromOld[8]), 0.10, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(1, newFromOld[8]), newFromOld[2]); BOOST_REQUIRE_CLOSE(distances(1, newFromOld[8]), 0.30, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(2, newFromOld[8]), newFromOld[0]); BOOST_REQUIRE_CLOSE(distances(2, newFromOld[8]), 0.40, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(3, newFromOld[8]), newFromOld[9]); BOOST_REQUIRE_CLOSE(distances(3, newFromOld[8]), 0.45, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(4, newFromOld[8]), newFromOld[10]); BOOST_REQUIRE_CLOSE(distances(4, newFromOld[8]), 0.55, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(5, newFromOld[8]), newFromOld[5]); BOOST_REQUIRE_CLOSE(distances(5, newFromOld[8]), 0.67, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(6, newFromOld[8]), newFromOld[3]); BOOST_REQUIRE_CLOSE(distances(6, newFromOld[8]), 0.80, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(7, newFromOld[8]), newFromOld[7]); BOOST_REQUIRE_CLOSE(distances(7, newFromOld[8]), 1.75, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(8, newFromOld[8]), newFromOld[6]); BOOST_REQUIRE_CLOSE(distances(8, newFromOld[8]), 2.45, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(9, newFromOld[8]), newFromOld[4]); BOOST_REQUIRE_CLOSE(distances(9, newFromOld[8]), 4.60, 1e-5); // Neighbors of point 9. BOOST_REQUIRE_EQUAL(neighbors(0, newFromOld[9]), newFromOld[10]); BOOST_REQUIRE_CLOSE(distances(0, newFromOld[9]), 0.10, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(1, newFromOld[9]), newFromOld[3]); BOOST_REQUIRE_CLOSE(distances(1, newFromOld[9]), 0.35, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(2, newFromOld[9]), newFromOld[8]); BOOST_REQUIRE_CLOSE(distances(2, newFromOld[9]), 0.45, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(3, newFromOld[9]), newFromOld[1]); BOOST_REQUIRE_CLOSE(distances(3, newFromOld[9]), 0.55, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(4, newFromOld[9]), newFromOld[2]); BOOST_REQUIRE_CLOSE(distances(4, newFromOld[9]), 0.75, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(5, newFromOld[9]), newFromOld[0]); BOOST_REQUIRE_CLOSE(distances(5, newFromOld[9]), 0.85, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(6, newFromOld[9]), newFromOld[5]); BOOST_REQUIRE_CLOSE(distances(6, newFromOld[9]), 1.12, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(7, newFromOld[9]), newFromOld[7]); BOOST_REQUIRE_CLOSE(distances(7, newFromOld[9]), 2.20, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(8, newFromOld[9]), newFromOld[6]); BOOST_REQUIRE_CLOSE(distances(8, newFromOld[9]), 2.90, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(9, newFromOld[9]), newFromOld[4]); BOOST_REQUIRE_CLOSE(distances(9, newFromOld[9]), 4.15, 1e-5); // Neighbors of point 10. BOOST_REQUIRE_EQUAL(neighbors(0, newFromOld[10]), newFromOld[9]); BOOST_REQUIRE_CLOSE(distances(0, newFromOld[10]), 0.10, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(1, newFromOld[10]), newFromOld[3]); BOOST_REQUIRE_CLOSE(distances(1, newFromOld[10]), 0.25, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(2, newFromOld[10]), newFromOld[8]); BOOST_REQUIRE_CLOSE(distances(2, newFromOld[10]), 0.55, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(3, newFromOld[10]), newFromOld[1]); BOOST_REQUIRE_CLOSE(distances(3, newFromOld[10]), 0.65, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(4, newFromOld[10]), newFromOld[2]); BOOST_REQUIRE_CLOSE(distances(4, newFromOld[10]), 0.85, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(5, newFromOld[10]), newFromOld[0]); BOOST_REQUIRE_CLOSE(distances(5, newFromOld[10]), 0.95, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(6, newFromOld[10]), newFromOld[5]); BOOST_REQUIRE_CLOSE(distances(6, newFromOld[10]), 1.22, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(7, newFromOld[10]), newFromOld[7]); BOOST_REQUIRE_CLOSE(distances(7, newFromOld[10]), 2.30, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(8, newFromOld[10]), newFromOld[6]); BOOST_REQUIRE_CLOSE(distances(8, newFromOld[10]), 3.00, 1e-5); BOOST_REQUIRE_EQUAL(neighbors(9, newFromOld[10]), newFromOld[4]); BOOST_REQUIRE_CLOSE(distances(9, newFromOld[10]), 4.05, 1e-5); } } /** * Test the dual-tree nearest-neighbors method with the naive method. This * uses both a query and reference dataset. * * Errors are produced if the results are not identical. */ BOOST_AUTO_TEST_CASE(DualTreeVsNaive1) { arma::mat dataset; // Hard-coded filename: bad? if (!data::Load("test_data_3_1000.csv", dataset)) BOOST_FAIL("Cannot load test dataset test_data_3_1000.csv!"); KNN knn(dataset); KNN naive(dataset, NAIVE_MODE); arma::Mat neighborsTree; arma::mat distancesTree; knn.Search(dataset, 15, neighborsTree, distancesTree); arma::Mat neighborsNaive; arma::mat distancesNaive; naive.Search(dataset, 15, neighborsNaive, distancesNaive); for (size_t i = 0; i < neighborsTree.n_elem; i++) { BOOST_REQUIRE_EQUAL(neighborsTree(i), neighborsNaive(i)); BOOST_REQUIRE_CLOSE(distancesTree(i), distancesNaive(i), 1e-5); } } /** * Test the dual-tree nearest-neighbors method with the naive method. This uses * only a reference dataset. * * Errors are produced if the results are not identical. */ BOOST_AUTO_TEST_CASE(DualTreeVsNaive2) { arma::mat dataset; // Hard-coded filename: bad? // Code duplication: also bad! if (!data::Load("test_data_3_1000.csv", dataset)) BOOST_FAIL("Cannot load test dataset test_data_3_1000.csv!"); KNN knn(dataset); // Set naive mode. KNN naive(dataset, NAIVE_MODE); arma::Mat neighborsTree; arma::mat distancesTree; knn.Search(15, neighborsTree, distancesTree); arma::Mat neighborsNaive; arma::mat distancesNaive; naive.Search(15, neighborsNaive, distancesNaive); for (size_t i = 0; i < neighborsTree.n_elem; i++) { BOOST_REQUIRE_EQUAL(neighborsTree[i], neighborsNaive[i]); BOOST_REQUIRE_CLOSE(distancesTree[i], distancesNaive[i], 1e-5); } } /** * Test the single-tree nearest-neighbors method with the naive method. This * uses only a reference dataset. * * Errors are produced if the results are not identical. */ BOOST_AUTO_TEST_CASE(SingleTreeVsNaive) { arma::mat dataset; // Hard-coded filename: bad? // Code duplication: also bad! if (!data::Load("test_data_3_1000.csv", dataset)) BOOST_FAIL("Cannot load test dataset test_data_3_1000.csv!"); KNN knn(dataset, SINGLE_TREE_MODE); // Set up computation for naive mode. KNN naive(dataset, NAIVE_MODE); arma::Mat neighborsTree; arma::mat distancesTree; knn.Search(15, neighborsTree, distancesTree); arma::Mat neighborsNaive; arma::mat distancesNaive; naive.Search(15, neighborsNaive, distancesNaive); for (size_t i = 0; i < neighborsTree.n_elem; i++) { BOOST_REQUIRE_EQUAL(neighborsTree[i], neighborsNaive[i]); BOOST_REQUIRE_CLOSE(distancesTree[i], distancesNaive[i], 1e-5); } } /** * Test the cover tree single-tree nearest-neighbors method against the naive * method. This uses only a random reference dataset. * * Errors are produced if the results are not identical. */ BOOST_AUTO_TEST_CASE(SingleCoverTreeTest) { arma::mat data; data.randu(75, 1000); // 75 dimensional, 1000 points. StandardCoverTree, arma::mat> tree(data); NeighborSearch, arma::mat, StandardCoverTree> coverTreeSearch(std::move(tree), SINGLE_TREE_MODE); KNN naive(data, NAIVE_MODE); arma::Mat coverTreeNeighbors; arma::mat coverTreeDistances; coverTreeSearch.Search(15, coverTreeNeighbors, coverTreeDistances); arma::Mat naiveNeighbors; arma::mat naiveDistances; naive.Search(15, naiveNeighbors, naiveDistances); for (size_t i = 0; i < coverTreeNeighbors.n_elem; ++i) { BOOST_REQUIRE_EQUAL(coverTreeNeighbors[i], naiveNeighbors[i]); BOOST_REQUIRE_CLOSE(coverTreeDistances[i], naiveDistances[i], 1e-5); } } /** * Test the cover tree dual-tree nearest neighbors method against the naive * method. */ BOOST_AUTO_TEST_CASE(DualCoverTreeTest) { arma::mat dataset; data::Load("test_data_3_1000.csv", dataset); KNN tree(dataset); arma::Mat kdNeighbors; arma::mat kdDistances; tree.Search(dataset, 5, kdNeighbors, kdDistances); StandardCoverTree, arma::mat> referenceTree(dataset); NeighborSearch coverTreeSearch(std::move(referenceTree)); arma::Mat coverNeighbors; arma::mat coverDistances; coverTreeSearch.Search(dataset, 5, coverNeighbors, coverDistances); for (size_t i = 0; i < coverNeighbors.n_elem; ++i) { BOOST_REQUIRE_EQUAL(coverNeighbors(i), kdNeighbors(i)); BOOST_REQUIRE_CLOSE(coverDistances(i), kdDistances(i), 1e-5); } } /** * Test the ball tree single-tree nearest-neighbors method against the naive * method. This uses only a random reference dataset. * * Errors are produced if the results are not identical. */ BOOST_AUTO_TEST_CASE(SingleBallTreeTest) { arma::mat data; data.randu(50, 300); // 50 dimensional, 300 points. typedef BallTree, arma::mat> TreeType; TreeType tree(data); KNN naive(tree.Dataset(), NAIVE_MODE); // BinarySpaceTree modifies data. Use modified data to maintain the // correspondance between points in the dataset for both methods. The order of // query points in both methods should be same. NeighborSearch ballTreeSearch(std::move(tree), SINGLE_TREE_MODE); arma::Mat ballTreeNeighbors; arma::mat ballTreeDistances; ballTreeSearch.Search(2, ballTreeNeighbors, ballTreeDistances); arma::Mat naiveNeighbors; arma::mat naiveDistances; naive.Search(2, naiveNeighbors, naiveDistances); for (size_t i = 0; i < ballTreeNeighbors.n_elem; ++i) { BOOST_REQUIRE_EQUAL(ballTreeNeighbors[i], naiveNeighbors[i]); BOOST_REQUIRE_CLOSE(ballTreeDistances[i], naiveDistances[i], 1e-5); } } /** * Test the ball tree dual-tree nearest neighbors method against the naive * method. */ BOOST_AUTO_TEST_CASE(DualBallTreeTest) { arma::mat dataset; data::Load("test_data_3_1000.csv", dataset); KNN tree(dataset); arma::Mat kdNeighbors; arma::mat kdDistances; tree.Search(5, kdNeighbors, kdDistances); NeighborSearch ballTreeSearch(dataset); arma::Mat ballNeighbors; arma::mat ballDistances; ballTreeSearch.Search(5, ballNeighbors, ballDistances); for (size_t i = 0; i < ballNeighbors.n_elem; ++i) { BOOST_REQUIRE_EQUAL(ballNeighbors(i), kdNeighbors(i)); BOOST_REQUIRE_CLOSE(ballDistances(i), kdDistances(i), 1e-5); } } /** * Test the spill tree hybrid sp-tree search (defeatist search on overlapping * nodes, and backtracking in non-overlapping nodes) against the naive method. * This uses only a random reference dataset. */ BOOST_AUTO_TEST_CASE(HybridSpillSearchTest) { arma::mat dataset; dataset.randu(50, 300); // 50 dimensional, 300 points. const size_t k = 3; KNN naive(dataset); arma::Mat neighborsNaive; arma::mat distancesNaive; naive.Search(dataset, k, neighborsNaive, distancesNaive); double maxDist = 0; for (size_t i = 0; i < neighborsNaive.n_cols; ++i) if (distancesNaive(k - 1, i) > maxDist) maxDist = distancesNaive(k - 1, i); // If we are sure that tau is a valid strict upper bound of the kth nearest // neighbor of the query points, then we can be sure that we will get an exact // solution. SpillKNN::Tree referenceTree(dataset, maxDist * 1.01 /* tau parameter */); SpillKNN spTreeSearch(std::move(referenceTree)); for (size_t mode = 0; mode < 2; mode++) { spTreeSearch.SingleMode() = (mode == 0); arma::Mat neighborsSPTree; arma::mat distancesSPTree; spTreeSearch.Search(dataset, k, neighborsSPTree, distancesSPTree); for (size_t i = 0; i < neighborsSPTree.n_elem; ++i) { BOOST_REQUIRE_EQUAL(neighborsSPTree(i), neighborsNaive(i)); BOOST_REQUIRE_CLOSE(distancesSPTree(i), distancesNaive(i), 1e-5); } } } /** * Test hybrid sp-tree search doesn't repeat points. * This uses only a random reference dataset. */ BOOST_AUTO_TEST_CASE(DuplicatedSpillSearchTest) { arma::mat dataset; dataset.randu(50, 300); // 50 dimensional, 300 points. const size_t k = 15; for (size_t test = 0; test < 2; test++) { double tau = test * 0.1; SpillKNN::Tree referenceTree(dataset, tau); SpillKNN spTreeSearch(std::move(referenceTree)); arma::Mat neighborsSPTree; arma::mat distancesSPTree; for (size_t mode = 0; mode < 2; mode++) { spTreeSearch.SingleMode() = (mode == 0); spTreeSearch.Search(dataset, k, neighborsSPTree, distancesSPTree); for (size_t i = 0; i < neighborsSPTree.n_cols; ++i) { // Test that at least one point was found. BOOST_REQUIRE(distancesSPTree(0, i) != DBL_MAX); for (size_t j = 0; j < neighborsSPTree.n_rows; ++j) { if (distancesSPTree(j, i) == DBL_MAX) break; // All candidates with same distances must be different points. for (size_t k = j + 1; k < neighborsSPTree.n_rows && distancesSPTree(k, i) == distancesSPTree(j, i); ++k) BOOST_REQUIRE(neighborsSPTree(k, i) != neighborsSPTree(j, i)); } } } } } /** * Make sure sparse nearest neighbors works with kd trees. */ BOOST_AUTO_TEST_CASE(SparseKNNKDTreeTest) { // The dimensionality of these datasets must be high so that the probability // of a completely empty point is very low. In this case, with dimensionality // 70, the probability of all 70 dimensions being zero is 0.8^70 = 1.65e-7 in // the reference set and 0.9^70 = 6.27e-4 in the query set. arma::sp_mat queryDataset; queryDataset.sprandu(70, 200, 0.2); arma::sp_mat referenceDataset; referenceDataset.sprandu(70, 500, 0.1); arma::mat denseQuery(queryDataset); arma::mat denseReference(referenceDataset); typedef NeighborSearch SparseKNN; SparseKNN a(referenceDataset); KNN naive(denseReference, NAIVE_MODE); arma::mat sparseDistances; arma::Mat sparseNeighbors; a.Search(queryDataset, 10, sparseNeighbors, sparseDistances); arma::mat naiveDistances; arma::Mat naiveNeighbors; naive.Search(denseQuery, 10, naiveNeighbors, naiveDistances); for (size_t i = 0; i < naiveNeighbors.n_cols; ++i) { for (size_t j = 0; j < naiveNeighbors.n_rows; ++j) { BOOST_REQUIRE_EQUAL(naiveNeighbors(j, i), sparseNeighbors(j, i)); BOOST_REQUIRE_CLOSE(naiveDistances(j, i), sparseDistances(j, i), 1e-5); } } } /* BOOST_AUTO_TEST_CASE(SparseKNNCoverTreeTest) { typedef CoverTree, FirstPointIsRoot, NeighborSearchStat, arma::sp_mat> SparseCoverTree; // The dimensionality of these datasets must be high so that the probability // of a completely empty point is very low. In this case, with dimensionality // 70, the probability of all 70 dimensions being zero is 0.8^70 = 1.65e-7 in // the reference set and 0.9^70 = 6.27e-4 in the query set. arma::sp_mat queryDataset; queryDataset.sprandu(50, 5000, 0.2); arma::sp_mat referenceDataset; referenceDataset.sprandu(50, 8000, 0.1); arma::mat denseQuery(queryDataset); arma::mat denseReference(referenceDataset); typedef NeighborSearch SparseKNN; arma::mat sparseDistances; arma::Mat sparseNeighbors; a.Search(10, sparseNeighbors, sparseDistances); arma::mat naiveDistances; arma::Mat naiveNeighbors; naive.Search(10, naiveNeighbors, naiveDistances); for (size_t i = 0; i < naiveNeighbors.n_cols; ++i) { for (size_t j = 0; j < naiveNeighbors.n_rows; ++j) { BOOST_REQUIRE_EQUAL(naiveNeighbors(j, i), sparseNeighbors(j, i)); BOOST_REQUIRE_CLOSE(naiveDistances(j, i), sparseDistances(j, i), 1e-5); } } } */ BOOST_AUTO_TEST_CASE(KNNModelTest) { // Ensure that we can build an NSModel and get correct // results. typedef NSModel KNNModel; arma::mat queryData = arma::randu(10, 50); arma::mat referenceData = arma::randu(10, 200); // Build all the possible models. KNNModel models[28]; models[0] = KNNModel(KNNModel::TreeTypes::KD_TREE, true); models[1] = KNNModel(KNNModel::TreeTypes::KD_TREE, false); models[2] = KNNModel(KNNModel::TreeTypes::COVER_TREE, true); models[3] = KNNModel(KNNModel::TreeTypes::COVER_TREE, false); models[4] = KNNModel(KNNModel::TreeTypes::R_TREE, true); models[5] = KNNModel(KNNModel::TreeTypes::R_TREE, false); models[6] = KNNModel(KNNModel::TreeTypes::R_STAR_TREE, true); models[7] = KNNModel(KNNModel::TreeTypes::R_STAR_TREE, false); models[8] = KNNModel(KNNModel::TreeTypes::X_TREE, true); models[9] = KNNModel(KNNModel::TreeTypes::X_TREE, false); models[10] = KNNModel(KNNModel::TreeTypes::BALL_TREE, true); models[11] = KNNModel(KNNModel::TreeTypes::BALL_TREE, false); models[12] = KNNModel(KNNModel::TreeTypes::HILBERT_R_TREE, true); models[13] = KNNModel(KNNModel::TreeTypes::HILBERT_R_TREE, false); models[14] = KNNModel(KNNModel::TreeTypes::R_PLUS_TREE, true); models[15] = KNNModel(KNNModel::TreeTypes::R_PLUS_TREE, false); models[16] = KNNModel(KNNModel::TreeTypes::R_PLUS_PLUS_TREE, true); models[17] = KNNModel(KNNModel::TreeTypes::R_PLUS_PLUS_TREE, false); models[18] = KNNModel(KNNModel::TreeTypes::VP_TREE, true); models[19] = KNNModel(KNNModel::TreeTypes::VP_TREE, false); models[20] = KNNModel(KNNModel::TreeTypes::RP_TREE, true); models[21] = KNNModel(KNNModel::TreeTypes::RP_TREE, false); models[22] = KNNModel(KNNModel::TreeTypes::MAX_RP_TREE, true); models[23] = KNNModel(KNNModel::TreeTypes::MAX_RP_TREE, false); models[24] = KNNModel(KNNModel::TreeTypes::UB_TREE, true); models[25] = KNNModel(KNNModel::TreeTypes::UB_TREE, false); models[26] = KNNModel(KNNModel::TreeTypes::OCTREE, true); models[27] = KNNModel(KNNModel::TreeTypes::OCTREE, false); for (size_t j = 0; j < 2; ++j) { // Get a baseline. KNN knn(referenceData); arma::Mat baselineNeighbors; arma::mat baselineDistances; knn.Search(queryData, 3, baselineNeighbors, baselineDistances); for (size_t i = 0; i < 28; ++i) { // We only have std::move() constructors so make a copy of our data. arma::mat referenceCopy(referenceData); arma::mat queryCopy(queryData); if (j == 0) models[i].BuildModel(std::move(referenceCopy), 20, DUAL_TREE_MODE); if (j == 1) models[i].BuildModel(std::move(referenceCopy), 20, SINGLE_TREE_MODE); if (j == 2) models[i].BuildModel(std::move(referenceCopy), 20, NAIVE_MODE); arma::Mat neighbors; arma::mat distances; models[i].Search(std::move(queryCopy), 3, neighbors, distances); BOOST_REQUIRE_EQUAL(neighbors.n_rows, baselineNeighbors.n_rows); BOOST_REQUIRE_EQUAL(neighbors.n_cols, baselineNeighbors.n_cols); BOOST_REQUIRE_EQUAL(neighbors.n_elem, baselineNeighbors.n_elem); BOOST_REQUIRE_EQUAL(distances.n_rows, baselineDistances.n_rows); BOOST_REQUIRE_EQUAL(distances.n_cols, baselineDistances.n_cols); BOOST_REQUIRE_EQUAL(distances.n_elem, baselineDistances.n_elem); for (size_t k = 0; k < distances.n_elem; ++k) { BOOST_REQUIRE_EQUAL(neighbors[k], baselineNeighbors[k]); if (std::abs(baselineDistances[k]) < 1e-5) BOOST_REQUIRE_SMALL(distances[k], 1e-5); else BOOST_REQUIRE_CLOSE(distances[k], baselineDistances[k], 1e-5); } } } } BOOST_AUTO_TEST_CASE(KNNModelMonochromaticTest) { // Ensure that we can build an NSModel and get correct // results, in the case where the reference set is the same as the query set. typedef NSModel KNNModel; arma::mat referenceData = arma::randu(10, 200); // Build all the possible models. KNNModel models[28]; models[0] = KNNModel(KNNModel::TreeTypes::KD_TREE, true); models[1] = KNNModel(KNNModel::TreeTypes::KD_TREE, false); models[2] = KNNModel(KNNModel::TreeTypes::COVER_TREE, true); models[3] = KNNModel(KNNModel::TreeTypes::COVER_TREE, false); models[4] = KNNModel(KNNModel::TreeTypes::R_TREE, true); models[5] = KNNModel(KNNModel::TreeTypes::R_TREE, false); models[6] = KNNModel(KNNModel::TreeTypes::R_STAR_TREE, true); models[7] = KNNModel(KNNModel::TreeTypes::R_STAR_TREE, false); models[8] = KNNModel(KNNModel::TreeTypes::X_TREE, true); models[9] = KNNModel(KNNModel::TreeTypes::X_TREE, false); models[10] = KNNModel(KNNModel::TreeTypes::BALL_TREE, true); models[11] = KNNModel(KNNModel::TreeTypes::BALL_TREE, false); models[12] = KNNModel(KNNModel::TreeTypes::HILBERT_R_TREE, true); models[13] = KNNModel(KNNModel::TreeTypes::HILBERT_R_TREE, false); models[14] = KNNModel(KNNModel::TreeTypes::R_PLUS_TREE, true); models[15] = KNNModel(KNNModel::TreeTypes::R_PLUS_TREE, false); models[16] = KNNModel(KNNModel::TreeTypes::R_PLUS_PLUS_TREE, true); models[17] = KNNModel(KNNModel::TreeTypes::R_PLUS_PLUS_TREE, false); models[18] = KNNModel(KNNModel::TreeTypes::VP_TREE, true); models[19] = KNNModel(KNNModel::TreeTypes::VP_TREE, false); models[20] = KNNModel(KNNModel::TreeTypes::RP_TREE, true); models[21] = KNNModel(KNNModel::TreeTypes::RP_TREE, false); models[22] = KNNModel(KNNModel::TreeTypes::MAX_RP_TREE, true); models[23] = KNNModel(KNNModel::TreeTypes::MAX_RP_TREE, false); models[24] = KNNModel(KNNModel::TreeTypes::UB_TREE, true); models[25] = KNNModel(KNNModel::TreeTypes::UB_TREE, false); models[26] = KNNModel(KNNModel::TreeTypes::OCTREE, true); models[27] = KNNModel(KNNModel::TreeTypes::OCTREE, false); for (size_t j = 0; j < 2; ++j) { // Get a baseline. KNN knn(referenceData); arma::Mat baselineNeighbors; arma::mat baselineDistances; knn.Search(3, baselineNeighbors, baselineDistances); for (size_t i = 0; i < 28; ++i) { // We only have a std::move() constructor... so copy the data. arma::mat referenceCopy(referenceData); if (j == 0) models[i].BuildModel(std::move(referenceCopy), 20, DUAL_TREE_MODE); if (j == 1) models[i].BuildModel(std::move(referenceCopy), 20, SINGLE_TREE_MODE); if (j == 2) models[i].BuildModel(std::move(referenceCopy), 20, NAIVE_MODE); arma::Mat neighbors; arma::mat distances; models[i].Search(3, neighbors, distances); BOOST_REQUIRE_EQUAL(neighbors.n_rows, baselineNeighbors.n_rows); BOOST_REQUIRE_EQUAL(neighbors.n_cols, baselineNeighbors.n_cols); BOOST_REQUIRE_EQUAL(neighbors.n_elem, baselineNeighbors.n_elem); BOOST_REQUIRE_EQUAL(distances.n_rows, baselineDistances.n_rows); BOOST_REQUIRE_EQUAL(distances.n_cols, baselineDistances.n_cols); BOOST_REQUIRE_EQUAL(distances.n_elem, baselineDistances.n_elem); for (size_t k = 0; k < distances.n_elem; ++k) { BOOST_REQUIRE_EQUAL(neighbors[k], baselineNeighbors[k]); if (std::abs(baselineDistances[k]) < 1e-5) BOOST_REQUIRE_SMALL(distances[k], 1e-5); else BOOST_REQUIRE_CLOSE(distances[k], baselineDistances[k], 1e-5); } } } } /** * If we search twice with the same reference tree, the bounds need to be reset * before the second search. This test ensures that that happens, by making * sure the number of scores and base cases are equivalent for each search. */ BOOST_AUTO_TEST_CASE(DoubleReferenceSearchTest) { arma::mat dataset = arma::randu(5, 500); KNN knn(std::move(dataset)); arma::mat distances, secondDistances; arma::Mat neighbors, secondNeighbors; knn.Search(3, neighbors, distances); size_t baseCases = knn.BaseCases(); size_t scores = knn.Scores(); knn.Search(3, secondNeighbors, secondDistances); BOOST_REQUIRE_EQUAL(knn.BaseCases(), baseCases); BOOST_REQUIRE_EQUAL(knn.Scores(), scores); } /** * Make sure that the neighborPtr matrix isn't accidentally deleted. * See issue #478. */ BOOST_AUTO_TEST_CASE(NeighborPtrDeleteTest) { arma::mat dataset = arma::randu(5, 100); // Build the tree ourselves. std::vector oldFromNewReferences; KNN::Tree tree(dataset); KNN knn(std::move(tree)); // Now make a query set. arma::mat queryset = arma::randu(5, 50); arma::mat distances; arma::Mat neighbors; knn.Search(queryset, 3, neighbors, distances); // These will (hopefully) fail is either the neighbors or the distances matrix // has been accidentally deleted. BOOST_REQUIRE_EQUAL(neighbors.n_cols, 50); BOOST_REQUIRE_EQUAL(neighbors.n_rows, 3); BOOST_REQUIRE_EQUAL(distances.n_cols, 50); BOOST_REQUIRE_EQUAL(distances.n_rows, 3); } BOOST_AUTO_TEST_SUITE_END(); mlpack-2.2.5/src/mlpack/tests/krann_search_test.cpp000066400000000000000000000541461315013601400223740ustar00rootroot00000000000000/** * @file allkrann_search_test.cpp * * Unit tests for the 'RASearch' class and consequently the * 'RASearchRules' class * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include #include #include #include #include #include "test_tools.hpp" #include #include using namespace std; using namespace mlpack; using namespace mlpack::neighbor; using namespace mlpack::tree; using namespace mlpack::metric; using namespace mlpack::bound; BOOST_AUTO_TEST_SUITE(KRANNTest); // Test the correctness and guarantees of KRANN when in naive mode. BOOST_AUTO_TEST_CASE(NaiveGuaranteeTest) { arma::Mat neighbors; arma::mat distances; arma::mat refData; arma::mat queryData; data::Load("rann_test_r_3_900.csv", refData, true); data::Load("rann_test_q_3_100.csv", queryData, true); RASearch<> rsRann(refData, true, false, 1.0); arma::mat qrRanks; data::Load("rann_test_qr_ranks.csv", qrRanks, true, false); // No transpose. size_t numRounds = 1000; arma::Col numSuccessRounds(queryData.n_cols); numSuccessRounds.fill(0); // 1% of 900 is 9, so the rank is expected to be less than 10 size_t expectedRankErrorUB = 10; for (size_t rounds = 0; rounds < numRounds; rounds++) { rsRann.Search(queryData, 1, neighbors, distances); for (size_t i = 0; i < queryData.n_cols; i++) if (qrRanks(i, neighbors(0, i)) < expectedRankErrorUB) numSuccessRounds[i]++; neighbors.reset(); distances.reset(); } // Find the 95%-tile threshold so that 95% of the queries should pass this // threshold. size_t threshold = floor(numRounds * (0.95 - (1.96 * sqrt(0.95 * 0.05 / numRounds)))); size_t numQueriesFail = 0; for (size_t i = 0; i < queryData.n_cols; i++) if (numSuccessRounds[i] < threshold) numQueriesFail++; Log::Warn << "RANN-RS: RANN guarantee fails on " << numQueriesFail << " queries." << endl; // assert that at most 5% of the queries fall out of this threshold // 5% of 100 queries is 5. size_t maxNumQueriesFail = 6; BOOST_REQUIRE_LT(numQueriesFail, maxNumQueriesFail); } // Test single-tree rank-approximate search (harder to test because of // the randomness involved). BOOST_AUTO_TEST_CASE(SingleTreeSearch) { arma::mat refData; arma::mat queryData; data::Load("rann_test_r_3_900.csv", refData, true); data::Load("rann_test_q_3_100.csv", queryData, true); // Search for 1 rank-approximate nearest-neighbors in the top 30% of the point // (rank error of 3). arma::Mat neighbors; arma::mat distances; RASearch<> tssRann(refData, false, true, 1.0, 0.95, false, false); // The relative ranks for the given query reference pair arma::Mat qrRanks; data::Load("rann_test_qr_ranks.csv", qrRanks, true, false); // No transpose. size_t numRounds = 1000; arma::Col numSuccessRounds(queryData.n_cols); numSuccessRounds.fill(0); // 1% of 900 is 9, so the rank is expected to be less than 10. size_t expectedRankErrorUB = 10; for (size_t rounds = 0; rounds < numRounds; rounds++) { tssRann.Search(queryData, 1, neighbors, distances); for (size_t i = 0; i < queryData.n_cols; i++) if (qrRanks(i, neighbors(0, i)) < expectedRankErrorUB) numSuccessRounds[i]++; neighbors.reset(); distances.reset(); } // Find the 95%-tile threshold so that 95% of the queries should pass this // threshold. size_t threshold = floor(numRounds * (0.95 - (1.96 * sqrt(0.95 * 0.05 / numRounds)))); size_t numQueriesFail = 0; for (size_t i = 0; i < queryData.n_cols; i++) if (numSuccessRounds[i] < threshold) numQueriesFail++; Log::Warn << "RANN-TSS: RANN guarantee fails on " << numQueriesFail << " queries." << endl; // Assert that at most 5% of the queries fall out of this threshold. // 5% of 100 queries is 5. size_t maxNumQueriesFail = 6; BOOST_REQUIRE_LT(numQueriesFail, maxNumQueriesFail); } // Test dual-tree rank-approximate search (harder to test because of the // randomness involved). BOOST_AUTO_TEST_CASE(DualTreeSearch) { arma::mat refData; arma::mat queryData; data::Load("rann_test_r_3_900.csv", refData, true); data::Load("rann_test_q_3_100.csv", queryData, true); // Search for 1 rank-approximate nearest-neighbors in the top 30% of the point // (rank error of 3). arma::Mat neighbors; arma::mat distances; RASearch<> tsdRann(refData, false, false, 1.0, 0.95, false, false, 5); arma::Mat qrRanks; data::Load("rann_test_qr_ranks.csv", qrRanks, true, false); // No transpose. size_t numRounds = 1000; arma::Col numSuccessRounds(queryData.n_cols); numSuccessRounds.fill(0); // 1% of 900 is 9, so the rank is expected to be less than 10. size_t expectedRankErrorUB = 10; // Build query tree by hand. typedef KDTree, arma::mat> TreeType; std::vector oldFromNewQueries; TreeType queryTree(queryData, oldFromNewQueries); for (size_t rounds = 0; rounds < numRounds; rounds++) { tsdRann.Search(&queryTree, 1, neighbors, distances); for (size_t i = 0; i < queryData.n_cols; i++) { const size_t oldIndex = oldFromNewQueries[i]; if (qrRanks(oldIndex, neighbors(0, i)) < expectedRankErrorUB) numSuccessRounds[i]++; } neighbors.reset(); distances.reset(); tsdRann.ResetQueryTree(&queryTree); } // Find the 95%-tile threshold so that 95% of the queries should pass this // threshold. size_t threshold = floor(numRounds * (0.95 - (1.96 * sqrt(0.95 * 0.05 / numRounds)))); size_t numQueriesFail = 0; for (size_t i = 0; i < queryData.n_cols; i++) if (numSuccessRounds[i] < threshold) numQueriesFail++; Log::Warn << "RANN-TSD: RANN guarantee fails on " << numQueriesFail << " queries." << endl; // assert that at most 5% of the queries fall out of this threshold // 5% of 100 queries is 5. size_t maxNumQueriesFail = 6; BOOST_REQUIRE_LT(numQueriesFail, maxNumQueriesFail); } // Test rank-approximate search with just a single dataset. These tests just // ensure that the method runs okay. BOOST_AUTO_TEST_CASE(SingleDatasetNaiveSearch) { arma::mat dataset(5, 2500); dataset.randn(); arma::Mat neighbors; arma::mat distances; RASearch<> naive(dataset, true); naive.Search(1, neighbors, distances); BOOST_REQUIRE_EQUAL(neighbors.n_rows, 1); BOOST_REQUIRE_EQUAL(neighbors.n_cols, 2500); BOOST_REQUIRE_EQUAL(distances.n_rows, 1); BOOST_REQUIRE_EQUAL(distances.n_cols, 2500); } // Test rank-approximate search with just a single dataset in single-tree mode. // These tests just ensure that the method runs okay. BOOST_AUTO_TEST_CASE(SingleDatasetSingleSearch) { arma::mat dataset(5, 2500); dataset.randn(); arma::Mat neighbors; arma::mat distances; RASearch<> single(dataset, false, true); single.Search(1, neighbors, distances); BOOST_REQUIRE_EQUAL(neighbors.n_rows, 1); BOOST_REQUIRE_EQUAL(neighbors.n_cols, 2500); BOOST_REQUIRE_EQUAL(distances.n_rows, 1); BOOST_REQUIRE_EQUAL(distances.n_cols, 2500); } // Test rank-approximate search with just a single dataset in dual-tree mode. // These tests just ensure that the method runs okay. BOOST_AUTO_TEST_CASE(SingleDatasetSearch) { arma::mat dataset(5, 2500); dataset.randn(); arma::Mat neighbors; arma::mat distances; RASearch<> allkrann(dataset); allkrann.Search(1, neighbors, distances); BOOST_REQUIRE_EQUAL(neighbors.n_rows, 1); BOOST_REQUIRE_EQUAL(neighbors.n_cols, 2500); BOOST_REQUIRE_EQUAL(distances.n_rows, 1); BOOST_REQUIRE_EQUAL(distances.n_cols, 2500); } // Test single-tree rank-approximate search with cover trees. BOOST_AUTO_TEST_CASE(SingleCoverTreeTest) { arma::mat refData; arma::mat queryData; data::Load("rann_test_r_3_900.csv", refData, true); data::Load("rann_test_q_3_100.csv", queryData, true); // Search for 1 rank-approximate nearest-neighbors in the top 30% of the point // (rank error of 3). arma::Mat neighbors; arma::mat distances; typedef RASearch RACoverTreeSearch; RACoverTreeSearch tssRann(refData, false, true, 1.0, 0.95, false, false, 5); // The relative ranks for the given query reference pair. arma::Mat qrRanks; data::Load("rann_test_qr_ranks.csv", qrRanks, true, false); // No transpose. size_t numRounds = 1000; arma::Col numSuccessRounds(queryData.n_cols); numSuccessRounds.fill(0); // 1% of 900 is 9, so the rank is expected to be less than 10. size_t expectedRankErrorUB = 10; for (size_t rounds = 0; rounds < numRounds; rounds++) { tssRann.Search(queryData, 1, neighbors, distances); for (size_t i = 0; i < queryData.n_cols; i++) if (qrRanks(i, neighbors(0, i)) < expectedRankErrorUB) numSuccessRounds[i]++; neighbors.reset(); distances.reset(); } // Find the 95%-tile threshold so that 95% of the queries should pass this // threshold. size_t threshold = floor(numRounds * (0.95 - (1.96 * sqrt(0.95 * 0.05 / numRounds)))); size_t numQueriesFail = 0; for (size_t i = 0; i < queryData.n_cols; i++) if (numSuccessRounds[i] < threshold) numQueriesFail++; Log::Warn << "RANN-TSS (cover tree): RANN guarantee fails on " << numQueriesFail << " queries." << endl; // Assert that at most 5% of the queries fall out of this threshold. // 5% of 100 queries is 5. size_t maxNumQueriesFail = 6; BOOST_REQUIRE_LT(numQueriesFail, maxNumQueriesFail); } // Test dual-tree rank-approximate search with cover trees. BOOST_AUTO_TEST_CASE(DualCoverTreeTest) { arma::mat refData; arma::mat queryData; data::Load("rann_test_r_3_900.csv", refData, true); data::Load("rann_test_q_3_100.csv", queryData, true); // Search for 1 rank-approximate nearest-neighbors in the top 30% of the point // (rank error of 3). arma::Mat neighbors; arma::mat distances; typedef StandardCoverTree, arma::mat> TreeType; typedef RASearch RACoverTreeSearch; TreeType refTree(refData); TreeType queryTree(queryData); RACoverTreeSearch tsdRann(&refTree, false, 1.0, 0.95, false, false, 5); arma::Mat qrRanks; data::Load("rann_test_qr_ranks.csv", qrRanks, true, false); // No transpose. size_t numRounds = 1000; arma::Col numSuccessRounds(queryData.n_cols); numSuccessRounds.fill(0); // 1% of 900 is 9, so the rank is expected to be less than 10. size_t expectedRankErrorUB = 10; for (size_t rounds = 0; rounds < numRounds; rounds++) { tsdRann.Search(&queryTree, 1, neighbors, distances); for (size_t i = 0; i < queryData.n_cols; i++) if (qrRanks(i, neighbors(0, i)) < expectedRankErrorUB) numSuccessRounds[i]++; neighbors.reset(); distances.reset(); tsdRann.ResetQueryTree(&queryTree); } // Find the 95%-tile threshold so that 95% of the queries should pass this // threshold. size_t threshold = floor(numRounds * (0.95 - (1.96 * sqrt(0.95 * 0.05 / numRounds)))); size_t numQueriesFail = 0; for (size_t i = 0; i < queryData.n_cols; i++) if (numSuccessRounds[i] < threshold) numQueriesFail++; Log::Warn << "RANN-TSD (cover tree): RANN guarantee fails on " << numQueriesFail << " queries." << endl; // assert that at most 5% of the queries fall out of this threshold // 5% of 100 queries is 5. size_t maxNumQueriesFail = 6; BOOST_REQUIRE_LT(numQueriesFail, maxNumQueriesFail); } // Test single-tree rank-approximate search with ball trees. // This is known to not work right now. /* BOOST_AUTO_TEST_CASE(SingleBallTreeTest) { arma::mat refData; arma::mat queryData; data::Load("rann_test_r_3_900.csv", refData, true); data::Load("rann_test_q_3_100.csv", queryData, true); // Search for 1 rank-approximate nearest-neighbors in the top 30% of the point // (rank error of 3). arma::Mat neighbors; arma::mat distances; typedef BinarySpaceTree, RAQueryStat > TreeType; typedef RASearch RABallTreeSearch; RABallTreeSearch tssRann(refData, queryData, false, true); // The relative ranks for the given query reference pair. arma::Mat qrRanks; data::Load("rann_test_qr_ranks.csv", qrRanks, true, false); // No transpose. size_t numRounds = 30; arma::Col numSuccessRounds(queryData.n_cols); numSuccessRounds.fill(0); // 1% of 900 is 9, so the rank is expected to be less than 10. size_t expectedRankErrorUB = 10; for (size_t rounds = 0; rounds < numRounds; rounds++) { tssRann.Search(1, neighbors, distances, 1.0, 0.95, false, false, 5); for (size_t i = 0; i < queryData.n_cols; i++) if (qrRanks(i, neighbors(0, i)) < expectedRankErrorUB) numSuccessRounds[i]++; neighbors.reset(); distances.reset(); } // Find the 95%-tile threshold so that 95% of the queries should pass this // threshold. size_t threshold = floor(numRounds * (0.95 - (1.96 * sqrt(0.95 * 0.05 / numRounds)))); size_t numQueriesFail = 0; for (size_t i = 0; i < queryData.n_cols; i++) if (numSuccessRounds[i] < threshold) numQueriesFail++; Log::Warn << "RANN-TSS (ball tree): RANN guarantee fails on " << numQueriesFail << " queries." << endl; // Assert that at most 5% of the queries fall out of this threshold. // 5% of 100 queries is 5. size_t maxNumQueriesFail = 6; BOOST_REQUIRE_LT(numQueriesFail, maxNumQueriesFail); } // Test dual-tree rank-approximate search with Ball trees. BOOST_AUTO_TEST_CASE(DualBallTreeTest) { arma::mat refData; arma::mat queryData; data::Load("rann_test_r_3_900.csv", refData, true); data::Load("rann_test_q_3_100.csv", queryData, true); // Search for 1 rank-approximate nearest-neighbors in the top 30% of the point // (rank error of 3). arma::Mat neighbors; arma::mat distances; typedef BinarySpaceTree, RAQueryStat > TreeType; typedef RASearch RABallTreeSearch; TreeType refTree(refData); TreeType queryTree(queryData); RABallTreeSearch tsdRann(&refTree, &queryTree, refData, queryData, false); arma::Mat qrRanks; data::Load("rann_test_qr_ranks.csv", qrRanks, true, false); // No transpose. size_t numRounds = 1000; arma::Col numSuccessRounds(queryData.n_cols); numSuccessRounds.fill(0); // 1% of 900 is 9, so the rank is expected to be less than 10. size_t expectedRankErrorUB = 10; for (size_t rounds = 0; rounds < numRounds; rounds++) { tsdRann.Search(1, neighbors, distances, 1.0, 0.95, false, false, 5); for (size_t i = 0; i < queryData.n_cols; i++) if (qrRanks(i, neighbors(0, i)) < expectedRankErrorUB) numSuccessRounds[i]++; neighbors.reset(); distances.reset(); tsdRann.ResetQueryTree(); } // Find the 95%-tile threshold so that 95% of the queries should pass this // threshold. size_t threshold = floor(numRounds * (0.95 - (1.96 * sqrt(0.95 * 0.05 / numRounds)))); size_t numQueriesFail = 0; for (size_t i = 0; i < queryData.n_cols; i++) if (numSuccessRounds[i] < threshold) numQueriesFail++; Log::Warn << "RANN-TSD (Ball tree): RANN guarantee fails on " << numQueriesFail << " queries." << endl; // assert that at most 5% of the queries fall out of this threshold // 5% of 100 queries is 5. size_t maxNumQueriesFail = 6; BOOST_REQUIRE_LT(numQueriesFail, maxNumQueriesFail); } */ /** * Make sure that the neighborPtr matrix isn't accidentally deleted. * See issue #478. */ BOOST_AUTO_TEST_CASE(NeighborPtrDeleteTest) { arma::mat dataset = arma::randu(5, 100); // Build the tree ourselves. std::vector oldFromNewReferences; RASearch<>::Tree tree(dataset); RASearch<> allkrann(&tree); // Now make a query set. arma::mat queryset = arma::randu(5, 50); arma::mat distances; arma::Mat neighbors; allkrann.Search(queryset, 3, neighbors, distances); // These will (hopefully) fail is either the neighbors or the distances matrix // has been accidentally deleted. BOOST_REQUIRE_EQUAL(neighbors.n_cols, 50); BOOST_REQUIRE_EQUAL(neighbors.n_rows, 3); BOOST_REQUIRE_EQUAL(distances.n_cols, 50); BOOST_REQUIRE_EQUAL(distances.n_rows, 3); } /** * Test that the rvalue reference move constructor works. */ BOOST_AUTO_TEST_CASE(MoveConstructorTest) { arma::mat dataset = arma::randu(3, 200); arma::mat copy(dataset); KRANN moveknn(std::move(copy)); KRANN knn(dataset); BOOST_REQUIRE_EQUAL(copy.n_elem, 0); BOOST_REQUIRE_EQUAL(moveknn.ReferenceSet().n_rows, 3); BOOST_REQUIRE_EQUAL(moveknn.ReferenceSet().n_cols, 200); arma::mat moveDistances, distances; arma::Mat moveNeighbors, neighbors; moveknn.Search(1, moveNeighbors, moveDistances); knn.Search(1, neighbors, distances); BOOST_REQUIRE_EQUAL(moveNeighbors.n_rows, neighbors.n_rows); BOOST_REQUIRE_EQUAL(moveNeighbors.n_rows, neighbors.n_rows); BOOST_REQUIRE_EQUAL(moveNeighbors.n_cols, neighbors.n_cols); BOOST_REQUIRE_EQUAL(moveDistances.n_rows, distances.n_rows); BOOST_REQUIRE_EQUAL(moveDistances.n_cols, distances.n_cols); } /** * Test that the dataset can be retrained with the move Train() function. */ BOOST_AUTO_TEST_CASE(MoveTrainTest) { arma::mat dataset = arma::randu(3, 200); // Do it in tree mode, and in naive mode. KRANN knn; knn.Train(std::move(dataset)); arma::mat distances; arma::Mat neighbors; knn.Search(1, neighbors, distances); BOOST_REQUIRE_EQUAL(dataset.n_elem, 0); BOOST_REQUIRE_EQUAL(neighbors.n_cols, 200); BOOST_REQUIRE_EQUAL(distances.n_cols, 200); dataset = arma::randu(3, 300); knn.Naive() = true; knn.Train(std::move(dataset)); knn.Search(1, neighbors, distances); BOOST_REQUIRE_EQUAL(dataset.n_elem, 0); BOOST_REQUIRE_EQUAL(neighbors.n_cols, 300); BOOST_REQUIRE_EQUAL(distances.n_cols, 300); } /** * Make sure the RAModel class works. */ BOOST_AUTO_TEST_CASE(RAModelTest) { // Ensure that we can build an RAModel and get correct // results. typedef RAModel KNNModel; arma::mat queryData, referenceData; data::Load("rann_test_r_3_900.csv", referenceData, true); data::Load("rann_test_q_3_100.csv", queryData, true); // Build all the possible models. KNNModel models[20]; models[0] = KNNModel(KNNModel::TreeTypes::KD_TREE, false); models[1] = KNNModel(KNNModel::TreeTypes::KD_TREE, true); models[2] = KNNModel(KNNModel::TreeTypes::COVER_TREE, false); models[3] = KNNModel(KNNModel::TreeTypes::COVER_TREE, true); models[4] = KNNModel(KNNModel::TreeTypes::R_TREE, false); models[5] = KNNModel(KNNModel::TreeTypes::R_TREE, true); models[6] = KNNModel(KNNModel::TreeTypes::R_STAR_TREE, false); models[7] = KNNModel(KNNModel::TreeTypes::R_STAR_TREE, true); models[8] = KNNModel(KNNModel::TreeTypes::X_TREE, false); models[9] = KNNModel(KNNModel::TreeTypes::X_TREE, true); models[10] = KNNModel(KNNModel::TreeTypes::HILBERT_R_TREE, false); models[11] = KNNModel(KNNModel::TreeTypes::HILBERT_R_TREE, true); models[12] = KNNModel(KNNModel::TreeTypes::R_PLUS_TREE, false); models[13] = KNNModel(KNNModel::TreeTypes::R_PLUS_TREE, true); models[14] = KNNModel(KNNModel::TreeTypes::R_PLUS_PLUS_TREE, false); models[15] = KNNModel(KNNModel::TreeTypes::R_PLUS_PLUS_TREE, true); models[16] = KNNModel(KNNModel::TreeTypes::UB_TREE, false); models[17] = KNNModel(KNNModel::TreeTypes::UB_TREE, true); models[18] = KNNModel(KNNModel::TreeTypes::OCTREE, false); models[19] = KNNModel(KNNModel::TreeTypes::OCTREE, true); arma::Mat qrRanks; data::Load("rann_test_qr_ranks.csv", qrRanks, true, false); // No transpose. for (size_t j = 0; j < 3; ++j) { for (size_t i = 0; i < 20; ++i) { // We only have std::move() constructors so make a copy of our data. arma::mat referenceCopy(referenceData); if (j == 0) models[i].BuildModel(std::move(referenceCopy), 20, false, false); if (j == 1) models[i].BuildModel(std::move(referenceCopy), 20, false, true); if (j == 2) models[i].BuildModel(std::move(referenceCopy), 20, true, false); // Set the search parameters. models[i].Tau() = 1.0; models[i].Alpha() = 0.95; models[i].SampleAtLeaves() = false; models[i].FirstLeafExact() = false; models[i].SingleSampleLimit() = 5; arma::Mat neighbors; arma::mat distances; arma::Col numSuccessRounds(queryData.n_cols); numSuccessRounds.fill(0); // 1% of 900 is 9, so the rank is expected to be less than 10. size_t expectedRankErrorUB = 10; size_t numRounds = 100; for (size_t round = 0; round < numRounds; round++) { arma::mat queryCopy(queryData); models[i].Search(std::move(queryCopy), 1, neighbors, distances); for (size_t k = 0; k < queryData.n_cols; k++) if (qrRanks(k, neighbors(0, k)) < expectedRankErrorUB) numSuccessRounds[k]++; neighbors.reset(); distances.reset(); } // Find the 95%-tile threshold so that 95% of the queries should pass this // threshold. size_t threshold = floor(numRounds * (0.95 - (1.96 * sqrt(0.95 * 0.05 / numRounds)))); size_t numQueriesFail = 0; for (size_t k = 0; k < queryData.n_cols; k++) if (numSuccessRounds[k] < threshold) numQueriesFail++; // assert that at most 5% of the queries fall out of this threshold // 5% of 100 queries is 5. size_t maxNumQueriesFail = 50; // See #734 for why this is so high. BOOST_REQUIRE_LT(numQueriesFail, maxNumQueriesFail); } } } BOOST_AUTO_TEST_SUITE_END(); mlpack-2.2.5/src/mlpack/tests/lars_test.cpp000066400000000000000000000154211315013601400206700ustar00rootroot00000000000000/** * @file lars_test.cpp * @author Nishant Mehta * * Test for LARS. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ // Note: We don't use BOOST_REQUIRE_CLOSE in the code below because we need // to use FPC_WEAK, and it's not at all intuitive how to do that. #include #include #include #include "test_tools.hpp" using namespace mlpack; using namespace mlpack::regression; BOOST_AUTO_TEST_SUITE(LARSTest); void GenerateProblem(arma::mat& X, arma::vec& y, size_t nPoints, size_t nDims) { X = arma::randn(nDims, nPoints); arma::vec beta = arma::randn(nDims, 1); y = trans(X) * beta; } void LARSVerifyCorrectness(arma::vec beta, arma::vec errCorr, double lambda) { size_t nDims = beta.n_elem; const double tol = 1e-10; for(size_t j = 0; j < nDims; j++) { if (beta(j) == 0) { // Make sure that |errCorr(j)| <= lambda. BOOST_REQUIRE_SMALL(std::max(fabs(errCorr(j)) - lambda, 0.0), tol); } else if (beta(j) < 0) { // Make sure that errCorr(j) == lambda. BOOST_REQUIRE_SMALL(errCorr(j) - lambda, tol); } else // beta(j) > 0 { // Make sure that errCorr(j) == -lambda. BOOST_REQUIRE_SMALL(errCorr(j) + lambda, tol); } } } void LassoTest(size_t nPoints, size_t nDims, bool elasticNet, bool useCholesky) { arma::mat X; arma::vec y; for (size_t i = 0; i < 100; i++) { GenerateProblem(X, y, nPoints, nDims); // Armadillo's median is broken, so... arma::vec sortedAbsCorr = sort(abs(X * y)); double lambda1 = sortedAbsCorr(nDims / 2); double lambda2; if (elasticNet) lambda2 = lambda1 / 2; else lambda2 = 0; LARS lars(useCholesky, lambda1, lambda2); arma::vec betaOpt; lars.Train(X, y, betaOpt); arma::vec errCorr = (X * trans(X) + lambda2 * arma::eye(nDims, nDims)) * betaOpt - X * y; LARSVerifyCorrectness(betaOpt, errCorr, lambda1); } } BOOST_AUTO_TEST_CASE(LARSTestLassoCholesky) { LassoTest(100, 10, false, true); } BOOST_AUTO_TEST_CASE(LARSTestLassoGram) { LassoTest(100, 10, false, false); } BOOST_AUTO_TEST_CASE(LARSTestElasticNetCholesky) { LassoTest(100, 10, true, true); } BOOST_AUTO_TEST_CASE(LARSTestElasticNetGram) { LassoTest(100, 10, true, false); } // Ensure that LARS doesn't crash when the data has linearly dependent features // (meaning that there is a singularity). This test uses the Cholesky // factorization. BOOST_AUTO_TEST_CASE(CholeskySingularityTest) { arma::mat X; arma::mat Y; data::Load("lars_dependent_x.csv", X); data::Load("lars_dependent_y.csv", Y); arma::vec y = Y.row(0).t(); // Test for a couple values of lambda1. for (double lambda1 = 0.0; lambda1 < 1.0; lambda1 += 0.1) { LARS lars(true, lambda1, 0.0); arma::vec betaOpt; lars.Train(X, y, betaOpt); arma::vec errCorr = (X * X.t()) * betaOpt - X * y; LARSVerifyCorrectness(betaOpt, errCorr, lambda1); } } // Same as the above test but with no cholesky factorization. BOOST_AUTO_TEST_CASE(NoCholeskySingularityTest) { arma::mat X; arma::mat Y; data::Load("lars_dependent_x.csv", X); data::Load("lars_dependent_y.csv", Y); arma::vec y = Y.row(0).t(); // Test for a couple values of lambda1. for (double lambda1 = 0.0; lambda1 < 1.0; lambda1 += 0.1) { LARS lars(false, lambda1, 0.0); arma::vec betaOpt; lars.Train(X, y, betaOpt); arma::vec errCorr = (X * X.t()) * betaOpt - X * y; // #373: this test fails on i386 only sometimes. // LARSVerifyCorrectness(betaOpt, errCorr, lambda1); } } // Make sure that Predict() provides reasonable enough solutions. BOOST_AUTO_TEST_CASE(PredictTest) { for (size_t i = 0; i < 2; ++i) { // Run with both true and false. bool useCholesky = bool(i); arma::mat X; arma::vec y; GenerateProblem(X, y, 1000, 100); for (double lambda1 = 0.0; lambda1 < 1.0; lambda1 += 0.2) { for (double lambda2 = 0.0; lambda2 < 1.0; lambda2 += 0.2) { LARS lars(useCholesky, lambda1, lambda2); arma::vec betaOpt; lars.Train(X, y, betaOpt); // Calculate what the actual error should be with these regression // parameters. arma::vec betaOptPred = (X * X.t()) * betaOpt; arma::vec predictions; lars.Predict(X, predictions); arma::vec adjPred = X * predictions; BOOST_REQUIRE_EQUAL(predictions.n_elem, 1000); for (size_t i = 0; i < betaOptPred.n_elem; ++i) { if (std::abs(betaOptPred[i]) < 1e-5) BOOST_REQUIRE_SMALL(adjPred[i], 1e-5); else BOOST_REQUIRE_CLOSE(adjPred[i], betaOptPred[i], 1e-5); } } } } } BOOST_AUTO_TEST_CASE(PredictRowMajorTest) { arma::mat X; arma::vec y; GenerateProblem(X, y, 1000, 100); // Set lambdas to 0. LARS lars(false, 0, 0); arma::vec betaOpt; lars.Train(X, y, betaOpt); // Get both row-major and column-major predictions. Make sure they are the // same. arma::vec rowMajorPred, colMajorPred; lars.Predict(X, colMajorPred); lars.Predict(X.t(), rowMajorPred, true); BOOST_REQUIRE_EQUAL(colMajorPred.n_elem, rowMajorPred.n_elem); for (size_t i = 0; i < colMajorPred.n_elem; ++i) { if (std::abs(colMajorPred[i]) < 1e-5) BOOST_REQUIRE_SMALL(rowMajorPred[i], 1e-5); else BOOST_REQUIRE_CLOSE(colMajorPred[i], rowMajorPred[i], 1e-5); } } /** * Make sure that if we train twice, there is no issue. */ BOOST_AUTO_TEST_CASE(RetrainTest) { arma::mat origX; arma::vec origY; GenerateProblem(origX, origY, 1000, 50); arma::mat newX; arma::vec newY; GenerateProblem(newX, newY, 750, 75); LARS lars(false, 0.1, 0.1); arma::vec betaOpt; lars.Train(origX, origY, betaOpt); // Now train on new data. lars.Train(newX, newY, betaOpt); arma::vec errCorr = (newX * trans(newX) + 0.1 * arma::eye(75, 75)) * betaOpt - newX * newY; LARSVerifyCorrectness(betaOpt, errCorr, 0.1); } /** * Make sure if we train twice using the Cholesky decomposition, there is no * issue. */ BOOST_AUTO_TEST_CASE(RetrainCholeskyTest) { arma::mat origX; arma::vec origY; GenerateProblem(origX, origY, 1000, 50); arma::mat newX; arma::vec newY; GenerateProblem(newX, newY, 750, 75); LARS lars(true, 0.1, 0.1); arma::vec betaOpt; lars.Train(origX, origY, betaOpt); // Now train on new data. lars.Train(newX, newY, betaOpt); arma::vec errCorr = (newX * trans(newX) + 0.1 * arma::eye(75, 75)) * betaOpt - newX * newY; LARSVerifyCorrectness(betaOpt, errCorr, 0.1); } BOOST_AUTO_TEST_SUITE_END(); mlpack-2.2.5/src/mlpack/tests/lbfgs_test.cpp000066400000000000000000000065241315013601400210300ustar00rootroot00000000000000/** * @file lbfgs_test.cpp * * Tests the L-BFGS optimizer on a couple test functions. * * @author Ryan Curtin (gth671b@mail.gatech.edu) * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include #include #include #include #include "test_tools.hpp" using namespace mlpack::optimization; using namespace mlpack::optimization::test; BOOST_AUTO_TEST_SUITE(LBFGSTest); /** * Tests the L-BFGS optimizer using the Rosenbrock Function. */ BOOST_AUTO_TEST_CASE(RosenbrockFunctionTest) { RosenbrockFunction f; L_BFGS lbfgs(f); lbfgs.MaxIterations() = 10000; arma::vec coords = f.GetInitialPoint(); if (!lbfgs.Optimize(coords)) BOOST_FAIL("L-BFGS optimization reported failure."); double finalValue = f.Evaluate(coords); BOOST_REQUIRE_SMALL(finalValue, 1e-5); BOOST_REQUIRE_CLOSE(coords[0], 1.0, 1e-5); BOOST_REQUIRE_CLOSE(coords[1], 1.0, 1e-5); } /** * Tests the L-BFGS optimizer using the Wood Function. */ BOOST_AUTO_TEST_CASE(WoodFunctionTest) { WoodFunction f; L_BFGS lbfgs(f); lbfgs.MaxIterations() = 10000; arma::vec coords = f.GetInitialPoint(); if (!lbfgs.Optimize(coords)) BOOST_FAIL("L-BFGS optimization reported failure."); double finalValue = f.Evaluate(coords); BOOST_REQUIRE_SMALL(finalValue, 1e-5); BOOST_REQUIRE_CLOSE(coords[0], 1.0, 1e-5); BOOST_REQUIRE_CLOSE(coords[1], 1.0, 1e-5); BOOST_REQUIRE_CLOSE(coords[2], 1.0, 1e-5); BOOST_REQUIRE_CLOSE(coords[3], 1.0, 1e-5); } /** * Tests the L-BFGS optimizer using the generalized Rosenbrock function. This * is actually multiple tests, increasing the dimension by powers of 2, from 4 * dimensions to 1024 dimensions. */ BOOST_AUTO_TEST_CASE(GeneralizedRosenbrockFunctionTest) { for (int i = 2; i < 10; i++) { // Dimension: powers of 2 int dim = std::pow(2.0, i); GeneralizedRosenbrockFunction f(dim); L_BFGS lbfgs(f, 20); lbfgs.MaxIterations() = 10000; arma::vec coords = f.GetInitialPoint(); if (!lbfgs.Optimize(coords)) BOOST_FAIL("L-BFGS optimization reported failure."); double finalValue = f.Evaluate(coords); // Test the output to make sure it is correct. BOOST_REQUIRE_SMALL(finalValue, 1e-5); for (int j = 0; j < dim; j++) BOOST_REQUIRE_CLOSE(coords[j], 1.0, 1e-5); } } /** * Tests the L-BFGS optimizer using the Rosenbrock-Wood combined function. This * is a test on optimizing a matrix of coordinates. */ BOOST_AUTO_TEST_CASE(RosenbrockWoodFunctionTest) { RosenbrockWoodFunction f; L_BFGS lbfgs(f); lbfgs.MaxIterations() = 10000; arma::mat coords = f.GetInitialPoint(); if (!lbfgs.Optimize(coords)) BOOST_FAIL("L-BFGS optimization reported failure."); double finalValue = f.Evaluate(coords); BOOST_REQUIRE_SMALL(finalValue, 1e-5); for (int row = 0; row < 4; row++) { BOOST_REQUIRE_CLOSE((coords(row, 0)), 1.0, 1e-5); BOOST_REQUIRE_CLOSE((coords(row, 1)), 1.0, 1e-5); } } BOOST_AUTO_TEST_SUITE_END(); mlpack-2.2.5/src/mlpack/tests/lin_alg_test.cpp000066400000000000000000000162161315013601400213370ustar00rootroot00000000000000/** * @file lin_alg_test.cpp * @author Ryan Curtin * * Simple tests for things in the linalg__private namespace. * Partly so I can be sure that my changes are working. * Move to boost unit testing framework at some point. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include #include #include #include "test_tools.hpp" using namespace arma; using namespace mlpack; using namespace mlpack::math; BOOST_AUTO_TEST_SUITE(LinAlgTest); /** * Test for linalg__private::Center(). There are no edge cases here, so we'll * just try it once for now. */ BOOST_AUTO_TEST_CASE(TestCenterA) { mat tmp(5, 5); // [[0 0 0 0 0] // [1 2 3 4 5] // [2 4 6 8 10] // [3 6 9 12 15] // [4 8 12 16 20]] for (int row = 0; row < 5; row++) for (int col = 0; col < 5; col++) tmp(row, col) = row * (col + 1); mat tmp_out; Center(tmp, tmp_out); // average should be // [[0 3 6 9 12]]' // so result should be // [[ 0 0 0 0 0] // [-2 -1 0 1 2 ] // [-4 -2 0 2 4 ] // [-6 -3 0 3 6 ] // [-8 -4 0 4 8]] for (int row = 0; row < 5; row++) for (int col = 0; col < 5; col++) BOOST_REQUIRE_CLOSE(tmp_out(row, col), (double) (col - 2) * row, 1e-5); } BOOST_AUTO_TEST_CASE(TestCenterB) { mat tmp(5, 6); for (int row = 0; row < 5; row++) for (int col = 0; col < 6; col++) tmp(row, col) = row * (col + 1); mat tmp_out; Center(tmp, tmp_out); // average should be // [[0 3.5 7 10.5 14]]' // so result should be // [[ 0 0 0 0 0 0 ] // [-2.5 -1.5 -0.5 0.5 1.5 2.5] // [-5 -3 -1 1 3 5 ] // [-7.5 -4.5 -1.5 1.5 1.5 4.5] // [-10 -6 -2 2 6 10 ]] for (int row = 0; row < 5; row++) for (int col = 0; col < 6; col++) BOOST_REQUIRE_CLOSE(tmp_out(row, col), (double) (col - 2.5) * row, 1e-5); } BOOST_AUTO_TEST_CASE(TestWhitenUsingEig) { // After whitening using eigendecomposition, the covariance of // our matrix will be I (or something very close to that). // We are loading a matrix from an external file... bad choice. mat tmp, tmp_centered, whitened, whitening_matrix; data::Load("trainSet.csv", tmp); Center(tmp, tmp_centered); WhitenUsingEig(tmp_centered, whitened, whitening_matrix); mat newcov = ccov(whitened); for (int row = 0; row < 5; row++) { for (int col = 0; col < 5; col++) { if (row == col) { // diagonal will be 0 in the case of any zero-valued eigenvalues // (rank-deficient covariance case) if (std::abs(newcov(row, col)) > 1e-10) BOOST_REQUIRE_CLOSE(newcov(row, col), 1.0, 1e-10); } else { BOOST_REQUIRE_SMALL(newcov(row, col), 1e-10); } } } } BOOST_AUTO_TEST_CASE(TestOrthogonalize) { // Generate a random matrix; then, orthogonalize it and test if it's // orthogonal. mat tmp, orth; data::Load("fake.csv", tmp); Orthogonalize(tmp, orth); // test orthogonality mat test = ccov(orth); double ival = test(0, 0); for (size_t row = 0; row < test.n_rows; row++) { for (size_t col = 0; col < test.n_cols; col++) { if (row == col) { if (std::abs(test(row, col)) > 1e-10) BOOST_REQUIRE_CLOSE(test(row, col), ival, 1e-10); } else { BOOST_REQUIRE_SMALL(test(row, col), 1e-10); } } } } // Test RemoveRows(). BOOST_AUTO_TEST_CASE(TestRemoveRows) { // Run this test several times. for (size_t run = 0; run < 10; ++run) { arma::mat input; input.randu(200, 200); // Now pick some random numbers. std::vector rowsToRemove; size_t row = 0; while (row < 200) { row += RandInt(1, (2 * (run + 1) + 1)); if (row < 200) { rowsToRemove.push_back(row); } } // Ensure we're not about to remove every single row. if (rowsToRemove.size() == 10) { rowsToRemove.erase(rowsToRemove.begin() + 4); // Random choice to remove. } arma::mat output; RemoveRows(input, rowsToRemove, output); // Now check that the output is right. size_t outputRow = 0; size_t skipIndex = 0; for (row = 0; row < 200; ++row) { // Was this row supposed to be removed? If so skip it. if ((skipIndex < rowsToRemove.size()) && (rowsToRemove[skipIndex] == row)) { ++skipIndex; } else { // Compare. BOOST_REQUIRE_EQUAL(accu(input.row(row) == output.row(outputRow)), 200); // Increment output row counter. ++outputRow; } } } } BOOST_AUTO_TEST_CASE(TestSvecSmat) { arma::mat X(3, 3); X(0, 0) = 0; X(0, 1) = 1, X(0, 2) = 2; X(1, 0) = 1; X(1, 1) = 3, X(1, 2) = 4; X(2, 0) = 2; X(2, 1) = 4, X(2, 2) = 5; arma::vec sx; Svec(X, sx); BOOST_REQUIRE_CLOSE(sx(0), 0, 1e-7); BOOST_REQUIRE_CLOSE(sx(1), M_SQRT2 * 1., 1e-7); BOOST_REQUIRE_CLOSE(sx(2), M_SQRT2 * 2., 1e-7); BOOST_REQUIRE_CLOSE(sx(3), 3., 1e-7); BOOST_REQUIRE_CLOSE(sx(4), M_SQRT2 * 4., 1e-7); BOOST_REQUIRE_CLOSE(sx(5), 5., 1e-7); arma::mat Xtest; Smat(sx, Xtest); BOOST_REQUIRE_EQUAL(Xtest.n_rows, 3); BOOST_REQUIRE_EQUAL(Xtest.n_cols, 3); for (size_t i = 0; i < 3; i++) for (size_t j = 0; j < 3; j++) BOOST_REQUIRE_CLOSE(X(i, j), Xtest(i, j), 1e-7); } BOOST_AUTO_TEST_CASE(TestSparseSvec) { arma::sp_mat X; X.zeros(3, 3); X(1, 0) = X(0, 1) = 1; arma::sp_vec sx; Svec(X, sx); const double v0 = sx(0); const double v1 = sx(1); const double v2 = sx(2); const double v3 = sx(3); const double v4 = sx(4); const double v5 = sx(5); BOOST_REQUIRE_CLOSE(v0, 0, 1e-7); BOOST_REQUIRE_CLOSE(v1, M_SQRT2 * 1., 1e-7); BOOST_REQUIRE_CLOSE(v2, 0, 1e-7); BOOST_REQUIRE_CLOSE(v3, 0, 1e-7); BOOST_REQUIRE_CLOSE(v4, 0, 1e-7); BOOST_REQUIRE_CLOSE(v5, 0, 1e-7); } BOOST_AUTO_TEST_CASE(TestSymKronIdSimple) { arma::mat A(3, 3); A(0, 0) = 1; A(0, 1) = 2, A(0, 2) = 3; A(1, 0) = 2; A(1, 1) = 4, A(1, 2) = 5; A(2, 0) = 3; A(2, 1) = 5, A(2, 2) = 6; arma::mat Op; SymKronId(A, Op); const arma::mat X = A + arma::ones(3, 3); arma::vec sx; Svec(X, sx); const arma::vec lhs = Op * sx; const arma::mat Rhs = 0.5 * (A * X + X * A); arma::vec rhs; Svec(Rhs, rhs); BOOST_REQUIRE_EQUAL(lhs.n_elem, rhs.n_elem); for (size_t j = 0; j < lhs.n_elem; j++) BOOST_REQUIRE_CLOSE(lhs(j), rhs(j), 1e-5); } BOOST_AUTO_TEST_CASE(TestSymKronId) { const size_t n = 10; arma::mat A = arma::randu(n, n); A += A.t(); arma::mat Op; SymKronId(A, Op); for (size_t i = 0; i < 5; i++) { arma::mat X = arma::randu(n, n); X += X.t(); arma::vec sx; Svec(X, sx); const arma::vec lhs = Op * sx; const arma::mat Rhs = 0.5 * (A * X + X * A); arma::vec rhs; Svec(Rhs, rhs); BOOST_REQUIRE_EQUAL(lhs.n_elem, rhs.n_elem); for (size_t j = 0; j < lhs.n_elem; j++) BOOST_REQUIRE_CLOSE(lhs(j), rhs(j), 1e-5); } } BOOST_AUTO_TEST_SUITE_END(); mlpack-2.2.5/src/mlpack/tests/linear_regression_test.cpp000066400000000000000000000142761315013601400234500ustar00rootroot00000000000000/** * @file linear_regression_test.cpp * * Test for linear regression. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include #include #include #include "test_tools.hpp" using namespace mlpack; using namespace mlpack::regression; BOOST_AUTO_TEST_SUITE(LinearRegressionTest); /** * Creates two 10x3 random matrices and one 10x1 "results" matrix. * Finds B in y=BX with one matrix, then predicts against the other. */ BOOST_AUTO_TEST_CASE(LinearRegressionTestCase) { // Predictors and points are 10x3 matrices. arma::mat predictors(3, 10); arma::mat points(3, 10); // Responses is the "correct" value for each point in predictors and points. arma::vec responses(10); // The values we get back when we predict for points. arma::vec predictions(10); // We'll randomly select some coefficients for the linear response. arma::vec coeffs; coeffs.randu(4); // Now generate each point. for (size_t row = 0; row < 3; row++) predictors.row(row) = arma::linspace(0, 9, 10); points = predictors; // Now add a small amount of noise to each point. for (size_t elem = 0; elem < points.n_elem; elem++) { // Max added noise is 0.02. points[elem] += math::Random() / 50.0; predictors[elem] += math::Random() / 50.0; } // Generate responses. for (size_t elem = 0; elem < responses.n_elem; elem++) responses[elem] = coeffs[0] + dot(coeffs.rows(1, 3), arma::ones(3) * elem); // Initialize and predict. LinearRegression lr(predictors, responses); lr.Predict(points, predictions); // Output result and verify we have less than 5% error from "correct" value // for each point. for (size_t i = 0; i < predictions.n_cols; ++i) BOOST_REQUIRE_SMALL(predictions(i) - responses(i), .05); } /** * Check the functionality of ComputeError(). */ BOOST_AUTO_TEST_CASE(ComputeErrorTest) { arma::mat predictors; predictors << 0 << 1 << 2 << 4 << 8 << 16 << arma::endr << 16 << 8 << 4 << 2 << 1 << 0 << arma::endr; arma::vec responses = "0 2 4 3 8 8"; // http://www.mlpack.org/trac/ticket/298 // This dataset gives a cost of 1.189500337 (as calculated in Octave). LinearRegression lr(predictors, responses); BOOST_REQUIRE_CLOSE(lr.ComputeError(predictors, responses), 1.189500337, 1e-3); } /** * Ensure that the cost is 0 when a perfectly-fitting dataset is given. */ BOOST_AUTO_TEST_CASE(ComputeErrorPerfectFitTest) { // Linear regression should perfectly model this dataset. arma::mat predictors; predictors << 0 << 1 << 2 << 1 << 6 << 2 << arma::endr << 0 << 1 << 2 << 2 << 2 << 6 << arma::endr; arma::vec responses = "0 2 4 3 8 8"; LinearRegression lr(predictors, responses); BOOST_REQUIRE_SMALL(lr.ComputeError(predictors, responses), 1e-25); } /** * Test ridge regression using an empty dataset, which is not invertible. But * the ridge regression part should make it invertible. */ BOOST_AUTO_TEST_CASE(RidgeRegressionTest) { // Create empty dataset. arma::mat data; data.zeros(10, 5000); // 10-dimensional, 5000 points. arma::vec responses; responses.zeros(5000); // 5000 points. // Any lambda greater than 0 works to make the predictors covariance matrix // invertible. If ridge regression is not working correctly, then the matrix // will not be invertible and the test should segfault (or something else // ugly). LinearRegression lr(data, responses, 0.0001); // Now just make sure that it predicts some more zeros. arma::vec predictedResponses; lr.Predict(data, predictedResponses); for (size_t i = 0; i < 5000; ++i) BOOST_REQUIRE_SMALL((double) predictedResponses[i], 1e-20); } /** * Creates two 10x3 random matrices and one 10x1 "results" matrix. * Finds B in y=BX with one matrix, then predicts against the other, but uses * ridge regression with an extremely small lambda value. */ BOOST_AUTO_TEST_CASE(RidgeRegressionTestCase) { // Predictors and points are 10x3 matrices. arma::mat predictors(3, 10); arma::mat points(3, 10); // Responses is the "correct" value for each point in predictors and points. arma::vec responses(10); // The values we get back when we predict for points. arma::vec predictions(10); // We'll randomly select some coefficients for the linear response. arma::vec coeffs; coeffs.randu(4); // Now generate each point. for (size_t row = 0; row < 3; row++) predictors.row(row) = arma::linspace(0, 9, 10); points = predictors; // Now add a small amount of noise to each point. for (size_t elem = 0; elem < points.n_elem; elem++) { // Max added noise is 0.02. points[elem] += math::Random() / 50.0; predictors[elem] += math::Random() / 50.0; } // Generate responses. for (size_t elem = 0; elem < responses.n_elem; elem++) responses[elem] = coeffs[0] + dot(coeffs.rows(1, 3), arma::ones(3) * elem); // Initialize and predict with very small lambda. LinearRegression lr(predictors, responses, 0.001); lr.Predict(points, predictions); // Output result and verify we have less than 5% error from "correct" value // for each point. for (size_t i = 0; i < predictions.n_cols; ++i) BOOST_REQUIRE_SMALL(predictions(i) - responses(i), .05); } /** * Test that a LinearRegression model trained in the constructor and trained in * the Train() method give the same model. */ BOOST_AUTO_TEST_CASE(LinearRegressionTrainTest) { // Random dataset. arma::mat dataset = arma::randu(5, 1000); arma::vec responses = arma::randu(1000); LinearRegression lr(dataset, responses, 0.3); LinearRegression lrTrain; lrTrain.Lambda() = 0.3; lrTrain.Train(dataset, responses); BOOST_REQUIRE_EQUAL(lr.Parameters().n_elem, lrTrain.Parameters().n_elem); for (size_t i = 0; i < lr.Parameters().n_elem; ++i) BOOST_REQUIRE_CLOSE(lr.Parameters()[i], lrTrain.Parameters()[i], 1e-5); } BOOST_AUTO_TEST_SUITE_END(); mlpack-2.2.5/src/mlpack/tests/load_save_test.cpp000066400000000000000000001401301315013601400216600ustar00rootroot00000000000000/** * @file load_save_test.cpp * @author Ryan Curtin * * Tests for data::Load() and data::Save(). * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include #include #include #include #include #include "test_tools.hpp" using namespace mlpack; using namespace mlpack::data; using namespace std; BOOST_AUTO_TEST_SUITE(LoadSaveTest); /** * Make sure failure occurs when no extension given. */ BOOST_AUTO_TEST_CASE(NoExtensionLoad) { arma::mat out; BOOST_REQUIRE(data::Load("noextension", out) == false); } /** * Make sure failure occurs when no extension given. */ BOOST_AUTO_TEST_CASE(NoExtensionSave) { arma::mat out; BOOST_REQUIRE(data::Save("noextension", out) == false); } /** * Make sure load fails if the file does not exist. */ BOOST_AUTO_TEST_CASE(NotExistLoad) { arma::mat out; BOOST_REQUIRE(data::Load("nonexistentfile_______________.csv", out) == false); } /** * Make sure a CSV is loaded correctly. */ BOOST_AUTO_TEST_CASE(LoadCSVTest) { fstream f; f.open("test_file.csv", fstream::out); f << "1, 2, 3, 4" << endl; f << "5, 6, 7, 8" << endl; f.close(); arma::mat test; BOOST_REQUIRE(data::Load("test_file.csv", test) == true); BOOST_REQUIRE_EQUAL(test.n_rows, 4); BOOST_REQUIRE_EQUAL(test.n_cols, 2); for (int i = 0; i < 8; i++) BOOST_REQUIRE_CLOSE(test[i], (double) (i + 1), 1e-5); // Remove the file. remove("test_file.csv"); } /** * Make sure a TSV is loaded correctly. */ BOOST_AUTO_TEST_CASE(LoadTSVTest) { fstream f; f.open("test_file.csv", fstream::out); f << "1\t2\t3\t4" << endl; f << "5\t6\t7\t8" << endl; f.close(); arma::mat test; BOOST_REQUIRE(data::Load("test_file.csv", test) == true); BOOST_REQUIRE_EQUAL(test.n_rows, 4); BOOST_REQUIRE_EQUAL(test.n_cols, 2); for (int i = 0; i < 8; i++) BOOST_REQUIRE_CLOSE(test[i], (double) (i + 1), 1e-5); // Remove the file. remove("test_file.csv"); } /** * Test TSV loading with .tsv extension. */ BOOST_AUTO_TEST_CASE(LoadTSVExtensionTest) { fstream f; f.open("test_file.tsv", fstream::out); f << "1\t2\t3\t4" << endl; f << "5\t6\t7\t8" << endl; f.close(); arma::mat test; BOOST_REQUIRE(data::Load("test_file.tsv", test) == true); BOOST_REQUIRE_EQUAL(test.n_rows, 4); BOOST_REQUIRE_EQUAL(test.n_cols, 2); for (int i = 0; i < 8; i++) BOOST_REQUIRE_CLOSE(test[i], (double) (i + 1), 1e-5); // Remove the file. remove("test_file.tsv"); } /** * Make sure a CSV is saved correctly. */ BOOST_AUTO_TEST_CASE(SaveCSVTest) { arma::mat test = "1 5;" "2 6;" "3 7;" "4 8;"; BOOST_REQUIRE(data::Save("test_file.csv", test) == true); // Load it in and make sure it is the same. arma::mat test2; BOOST_REQUIRE(data::Load("test_file.csv", test2) == true); BOOST_REQUIRE_EQUAL(test2.n_rows, 4); BOOST_REQUIRE_EQUAL(test2.n_cols, 2); for (int i = 0; i < 8; i++) BOOST_REQUIRE_CLOSE(test2[i], (double) (i + 1), 1e-5); // Remove the file. remove("test_file.csv"); } /** * Make sure CSVs can be loaded in transposed form. */ BOOST_AUTO_TEST_CASE(LoadTransposedCSVTest) { fstream f; f.open("test_file.csv", fstream::out); f << "1, 2, 3, 4" << endl; f << "5, 6, 7, 8" << endl; f.close(); arma::mat test; BOOST_REQUIRE(data::Load("test_file.csv", test, false, true) == true); BOOST_REQUIRE_EQUAL(test.n_cols, 2); BOOST_REQUIRE_EQUAL(test.n_rows, 4); for (size_t i = 0; i < 8; ++i) BOOST_REQUIRE_CLOSE(test[i], (double) (i + 1), 1e-5); // Remove the file. remove("test_file.csv"); } /** * The test LoadColVecCSVTest, LoadMatinColVec, LoadRowVecCSVTest need to run in * debug mode only; without debugging symbols, size checks are not performed and * thus the exception will not be thrown. */ /** * Make sure ColVec can be loaded. */ #ifdef DEBUG BOOST_AUTO_TEST_CASE(LoadColVecCSVTest) { fstream f; f.open("test_file.csv", fstream::out); for (int i = 0; i < 8; ++i) f << i << endl; f.close(); arma::colvec test; BOOST_REQUIRE(data::Load("test_file.csv", test, false) == true); BOOST_REQUIRE_EQUAL(test.n_cols, 1); BOOST_REQUIRE_EQUAL(test.n_rows, 8); for (size_t i = 0; i < 8; ++i) BOOST_REQUIRE_CLOSE(test[i], (double) (i), 1e-5); // Remove the file. remove("test_file.csv"); } /** * Make sure Load() throws an exception when trying to load a matrix into a * colvec or rowvec. */ BOOST_AUTO_TEST_CASE(LoadMatinColVec) { fstream f; f.open("test_file.csv", fstream::out); f << "1, 2" << endl; f << "3, 4" << endl; f.close(); arma::colvec coltest; BOOST_REQUIRE_THROW(data::Load("test_file.csv", coltest, false), std::logic_error); Timer::Stop("loading_data"); arma::rowvec rowtest; BOOST_REQUIRE_THROW(data::Load("test_file.csv", rowtest, false), std::logic_error); Timer::Stop("loading_data"); remove("test_file.csv"); } /** * Make sure that rowvecs can be loaded successfully. */ BOOST_AUTO_TEST_CASE(LoadRowVecCSVTest) { fstream f; f.open("test_file.csv", fstream::out); for (int i = 0 ; i < 7; ++i) f << i << ","; f << "7"; f << endl; f.close(); arma::rowvec test; BOOST_REQUIRE(data::Load("test_file.csv", test, false) == true); BOOST_REQUIRE_EQUAL(test.n_cols, 8); BOOST_REQUIRE_EQUAL(test.n_rows, 1); for (size_t i = 0; i < 8 ; ++i) BOOST_REQUIRE_CLOSE(test[i], (double) (i) , 1e-5); remove("test_file.csv"); } #endif /** * Make sure TSVs can be loaded in transposed form. */ BOOST_AUTO_TEST_CASE(LoadTransposedTSVTest) { fstream f; f.open("test_file.csv", fstream::out); f << "1\t2\t3\t4" << endl; f << "5\t6\t7\t8" << endl; f.close(); arma::mat test; BOOST_REQUIRE(data::Load("test_file.csv", test, false, true) == true); BOOST_REQUIRE_EQUAL(test.n_cols, 2); BOOST_REQUIRE_EQUAL(test.n_rows, 4); for (size_t i = 0; i < 8; ++i) BOOST_REQUIRE_CLOSE(test[i], (double) (i + 1), 1e-5); // Remove the file. remove("test_file.csv"); } /** * Check TSV loading with .tsv extension. */ BOOST_AUTO_TEST_CASE(LoadTransposedTSVExtensionTest) { fstream f; f.open("test_file.tsv", fstream::out); f << "1\t2\t3\t4" << endl; f << "5\t6\t7\t8" << endl; f.close(); arma::mat test; BOOST_REQUIRE(data::Load("test_file.tsv", test, false, true) == true); BOOST_REQUIRE_EQUAL(test.n_cols, 2); BOOST_REQUIRE_EQUAL(test.n_rows, 4); for (size_t i = 0; i < 8; ++i) BOOST_REQUIRE_CLOSE(test[i], (double) (i + 1), 1e-5); // Remove the file. remove("test_file.tsv"); } /** * Make sure CSVs can be loaded in non-transposed form. */ BOOST_AUTO_TEST_CASE(LoadNonTransposedCSVTest) { fstream f; f.open("test_file.csv", fstream::out); f << "1, 3, 5, 7" << endl; f << "2, 4, 6, 8" << endl; f.close(); arma::mat test; BOOST_REQUIRE(data::Load("test_file.csv", test, false, false) == true); BOOST_REQUIRE_EQUAL(test.n_cols, 4); BOOST_REQUIRE_EQUAL(test.n_rows, 2); for (size_t i = 0; i < 8; ++i) BOOST_REQUIRE_CLOSE(test[i], (double) (i + 1), 1e-5); // Remove the file. remove("test_file.csv"); } /** * Make sure CSVs can be saved in non-transposed form. */ BOOST_AUTO_TEST_CASE(SaveNonTransposedCSVTest) { arma::mat test = "1 2;" "3 4;" "5 6;" "7 8;"; BOOST_REQUIRE(data::Save("test_file.csv", test, false, false) == true); // Load it in and make sure it is in the same. arma::mat test2; BOOST_REQUIRE(data::Load("test_file.csv", test2, false, false) == true); BOOST_REQUIRE_EQUAL(test2.n_rows, 4); BOOST_REQUIRE_EQUAL(test2.n_cols, 2); for (size_t i = 0; i < 8; ++i) BOOST_REQUIRE_CLOSE(test[i], test2[i], 1e-5); // Remove the file. remove("test_file.csv"); } /** * Make sure arma_ascii is loaded correctly. */ BOOST_AUTO_TEST_CASE(LoadArmaASCIITest) { arma::mat test = "1 5;" "2 6;" "3 7;" "4 8;"; arma::mat testTrans = trans(test); BOOST_REQUIRE(testTrans.save("test_file.txt", arma::arma_ascii)); BOOST_REQUIRE(data::Load("test_file.txt", test) == true); BOOST_REQUIRE_EQUAL(test.n_rows, 4); BOOST_REQUIRE_EQUAL(test.n_cols, 2); for (int i = 0; i < 8; i++) BOOST_REQUIRE_CLOSE(test[i], (double) (i + 1), 1e-5); // Remove the file. remove("test_file.txt"); } /** * Make sure a CSV is saved correctly. */ BOOST_AUTO_TEST_CASE(SaveArmaASCIITest) { arma::mat test = "1 5;" "2 6;" "3 7;" "4 8;"; BOOST_REQUIRE(data::Save("test_file.txt", test) == true); // Load it in and make sure it is the same. BOOST_REQUIRE(data::Load("test_file.txt", test) == true); BOOST_REQUIRE_EQUAL(test.n_rows, 4); BOOST_REQUIRE_EQUAL(test.n_cols, 2); for (int i = 0; i < 8; i++) BOOST_REQUIRE_CLOSE(test[i], (double) (i + 1), 1e-5); // Remove the file. remove("test_file.txt"); } /** * Make sure raw_ascii is loaded correctly. */ BOOST_AUTO_TEST_CASE(LoadRawASCIITest) { fstream f; f.open("test_file.txt", fstream::out); f << "1 2 3 4" << endl; f << "5 6 7 8" << endl; f.close(); arma::mat test; BOOST_REQUIRE(data::Load("test_file.txt", test) == true); BOOST_REQUIRE_EQUAL(test.n_rows, 4); BOOST_REQUIRE_EQUAL(test.n_cols, 2); for (int i = 0; i < 8; i++) BOOST_REQUIRE_CLOSE(test[i], (double) (i + 1), 1e-5); // Remove the file. remove("test_file.txt"); } /** * Make sure CSV is loaded correctly as .txt. */ BOOST_AUTO_TEST_CASE(LoadCSVTxtTest) { fstream f; f.open("test_file.txt", fstream::out); f << "1, 2, 3, 4" << endl; f << "5, 6, 7, 8" << endl; f.close(); arma::mat test; BOOST_REQUIRE(data::Load("test_file.txt", test) == true); BOOST_REQUIRE_EQUAL(test.n_rows, 4); BOOST_REQUIRE_EQUAL(test.n_cols, 2); for (int i = 0; i < 8; i++) BOOST_REQUIRE_CLOSE(test[i], (double) (i + 1), 1e-5); // Remove the file. remove("test_file.txt"); } /** * Make sure arma_binary is loaded correctly. */ BOOST_AUTO_TEST_CASE(LoadArmaBinaryTest) { arma::mat test = "1 5;" "2 6;" "3 7;" "4 8;"; arma::mat testTrans = trans(test); BOOST_REQUIRE(testTrans.quiet_save("test_file.bin", arma::arma_binary) == true); // Now reload through our interface. BOOST_REQUIRE(data::Load("test_file.bin", test) == true); BOOST_REQUIRE_EQUAL(test.n_rows, 4); BOOST_REQUIRE_EQUAL(test.n_cols, 2); for (int i = 0; i < 8; i++) BOOST_REQUIRE_CLOSE(test[i], (double) (i + 1), 1e-5); // Remove the file. remove("test_file.bin"); } /** * Make sure arma_binary is saved correctly. */ BOOST_AUTO_TEST_CASE(SaveArmaBinaryTest) { arma::mat test = "1 5;" "2 6;" "3 7;" "4 8;"; BOOST_REQUIRE(data::Save("test_file.bin", test) == true); BOOST_REQUIRE(data::Load("test_file.bin", test) == true); BOOST_REQUIRE_EQUAL(test.n_rows, 4); BOOST_REQUIRE_EQUAL(test.n_cols, 2); for (int i = 0; i < 8; i++) BOOST_REQUIRE_CLOSE(test[i], (double) (i + 1), 1e-5); // Remove the file. remove("test_file.bin"); } /** * Make sure raw_binary is loaded correctly. */ BOOST_AUTO_TEST_CASE(LoadRawBinaryTest) { arma::mat test = "1 2;" "3 4;" "5 6;" "7 8;"; arma::mat testTrans = trans(test); BOOST_REQUIRE(testTrans.quiet_save("test_file.bin", arma::raw_binary) == true); // Now reload through our interface. BOOST_REQUIRE(data::Load("test_file.bin", test) == true); BOOST_REQUIRE_EQUAL(test.n_rows, 1); BOOST_REQUIRE_EQUAL(test.n_cols, 8); for (int i = 0; i < 8; i++) BOOST_REQUIRE_CLOSE(test[i], (double) (i + 1), 1e-5); // Remove the file. remove("test_file.bin"); } /** * Make sure load as PGM is successful. */ BOOST_AUTO_TEST_CASE(LoadPGMBinaryTest) { arma::mat test = "1 5;" "2 6;" "3 7;" "4 8;"; arma::mat testTrans = trans(test); BOOST_REQUIRE(testTrans.quiet_save("test_file.pgm", arma::pgm_binary) == true); // Now reload through our interface. BOOST_REQUIRE(data::Load("test_file.pgm", test) == true); BOOST_REQUIRE_EQUAL(test.n_rows, 4); BOOST_REQUIRE_EQUAL(test.n_cols, 2); for (int i = 0; i < 8; i++) BOOST_REQUIRE_CLOSE(test[i], (double) (i + 1), 1e-5); // Remove the file. remove("test_file.pgm"); } /** * Make sure save as PGM is successful. */ BOOST_AUTO_TEST_CASE(SavePGMBinaryTest) { arma::mat test = "1 5;" "2 6;" "3 7;" "4 8;"; BOOST_REQUIRE(data::Save("test_file.pgm", test) == true); // Now reload through our interface. BOOST_REQUIRE(data::Load("test_file.pgm", test) == true); BOOST_REQUIRE_EQUAL(test.n_rows, 4); BOOST_REQUIRE_EQUAL(test.n_cols, 2); for (int i = 0; i < 8; i++) BOOST_REQUIRE_CLOSE(test[i], (double) (i + 1), 1e-5); // Remove the file. remove("test_file.pgm"); } // Don't perform any HDF5 tests on Armadillo 4.300-4.400 (inclusive). A bug // causes loading to fail. #if ((ARMA_VERSION_MAJOR == 4) && \ (ARMA_VERSION_MINOR < 300 || ARMA_VERSION_MINOR > 400)) || \ (ARMA_VERSION_MAJOR >= 5) #if defined(ARMA_USE_HDF5) /** * Make sure load as HDF5 is successful. */ BOOST_AUTO_TEST_CASE(LoadHDF5Test) { arma::mat test = "1 5;" "2 6;" "3 7;" "4 8;"; arma::mat testTrans = trans(test); BOOST_REQUIRE(testTrans.quiet_save("test_file.h5", arma::hdf5_binary) == true); BOOST_REQUIRE(testTrans.quiet_save("test_file.hdf5", arma::hdf5_binary) == true); BOOST_REQUIRE(testTrans.quiet_save("test_file.hdf", arma::hdf5_binary) == true); BOOST_REQUIRE(testTrans.quiet_save("test_file.he5", arma::hdf5_binary) == true); // Now reload through our interface. BOOST_REQUIRE(data::Load("test_file.h5", test) == true); BOOST_REQUIRE_EQUAL(test.n_rows, 4); BOOST_REQUIRE_EQUAL(test.n_cols, 2); for (int i = 0; i < 8; ++i) BOOST_REQUIRE_CLOSE(test[i], (double) (i + 1), 1e-5); // Make sure the other extensions work too. BOOST_REQUIRE(data::Load("test_file.hdf5", test) == true); BOOST_REQUIRE_EQUAL(test.n_rows, 4); BOOST_REQUIRE_EQUAL(test.n_cols, 2); for (int i = 0; i < 8; ++i) BOOST_REQUIRE_CLOSE(test[i], (double) (i + 1), 1e-5); BOOST_REQUIRE(data::Load("test_file.hdf", test) == true); BOOST_REQUIRE_EQUAL(test.n_rows, 4); BOOST_REQUIRE_EQUAL(test.n_cols, 2); for (int i = 0; i < 8; ++i) BOOST_REQUIRE_CLOSE(test[i], (double) (i + 1), 1e-5); BOOST_REQUIRE(data::Load("test_file.he5", test) == true); BOOST_REQUIRE_EQUAL(test.n_rows, 4); BOOST_REQUIRE_EQUAL(test.n_cols, 2); for (int i = 0; i < 8; ++i) BOOST_REQUIRE_CLOSE(test[i], (double) (i + 1), 1e-5); remove("test_file.h5"); remove("test_file.hdf"); remove("test_file.hdf5"); remove("test_file.he5"); } /** * Make sure save as HDF5 is successful. */ BOOST_AUTO_TEST_CASE(SaveHDF5Test) { arma::mat test = "1 5;" "2 6;" "3 7;" "4 8;"; BOOST_REQUIRE(data::Save("test_file.h5", test) == true); BOOST_REQUIRE(data::Save("test_file.hdf5", test) == true); BOOST_REQUIRE(data::Save("test_file.hdf", test) == true); BOOST_REQUIRE(data::Save("test_file.he5", test) == true); // Now load them all and verify they were saved okay. BOOST_REQUIRE(data::Load("test_file.h5", test) == true); BOOST_REQUIRE_EQUAL(test.n_rows, 4); BOOST_REQUIRE_EQUAL(test.n_cols, 2); for (int i = 0; i < 8; ++i) BOOST_REQUIRE_CLOSE(test[i], (double) (i + 1), 1e-5); // Make sure the other extensions work too. BOOST_REQUIRE(data::Load("test_file.hdf5", test) == true); BOOST_REQUIRE_EQUAL(test.n_rows, 4); BOOST_REQUIRE_EQUAL(test.n_cols, 2); for (int i = 0; i < 8; ++i) BOOST_REQUIRE_CLOSE(test[i], (double) (i + 1), 1e-5); BOOST_REQUIRE(data::Load("test_file.hdf", test) == true); BOOST_REQUIRE_EQUAL(test.n_rows, 4); BOOST_REQUIRE_EQUAL(test.n_cols, 2); for (int i = 0; i < 8; ++i) BOOST_REQUIRE_CLOSE(test[i], (double) (i + 1), 1e-5); BOOST_REQUIRE(data::Load("test_file.he5", test) == true); BOOST_REQUIRE_EQUAL(test.n_rows, 4); BOOST_REQUIRE_EQUAL(test.n_cols, 2); for (int i = 0; i < 8; ++i) BOOST_REQUIRE_CLOSE(test[i], (double) (i + 1), 1e-5); remove("test_file.h5"); remove("test_file.hdf"); remove("test_file.hdf5"); remove("test_file.he5"); } #else /** * Ensure saving as HDF5 fails. */ BOOST_AUTO_TEST_CASE(NoHDF5Test) { arma::mat test; test.randu(5, 5); // Stop warnings. BOOST_REQUIRE(data::Save("test_file.h5", test) == false); BOOST_REQUIRE(data::Save("test_file.hdf5", test) == false); BOOST_REQUIRE(data::Save("test_file.hdf", test) == false); BOOST_REQUIRE(data::Save("test_file.he5", test) == false); } #endif #endif /** * Test normalization of labels. */ BOOST_AUTO_TEST_CASE(NormalizeLabelSmallDatasetTest) { arma::irowvec labels("-1 1 1 -1 -1 -1 1 1"); arma::Row newLabels; arma::ivec mappings; data::NormalizeLabels(labels, newLabels, mappings); BOOST_REQUIRE_EQUAL(mappings[0], -1); BOOST_REQUIRE_EQUAL(mappings[1], 1); BOOST_REQUIRE_EQUAL(newLabels[0], 0); BOOST_REQUIRE_EQUAL(newLabels[1], 1); BOOST_REQUIRE_EQUAL(newLabels[2], 1); BOOST_REQUIRE_EQUAL(newLabels[3], 0); BOOST_REQUIRE_EQUAL(newLabels[4], 0); BOOST_REQUIRE_EQUAL(newLabels[5], 0); BOOST_REQUIRE_EQUAL(newLabels[6], 1); BOOST_REQUIRE_EQUAL(newLabels[7], 1); arma::irowvec revertedLabels; data::RevertLabels(newLabels, mappings, revertedLabels); for (size_t i = 0; i < labels.n_elem; ++i) BOOST_REQUIRE_EQUAL(labels[i], revertedLabels[i]); } /** * Harder label normalization test. */ BOOST_AUTO_TEST_CASE(NormalizeLabelTest) { arma::rowvec randLabels(5000); for (size_t i = 0; i < 5000; ++i) randLabels[i] = math::RandInt(-50, 50); randLabels[0] = 0.65; // Hey, doubles work too! arma::Row newLabels; arma::vec mappings; data::NormalizeLabels(randLabels, newLabels, mappings); // Now map them back and ensure they are right. arma::rowvec revertedLabels(5000); data::RevertLabels(newLabels, mappings, revertedLabels); for (size_t i = 0; i < 5000; ++i) BOOST_REQUIRE_EQUAL(randLabels[i], revertedLabels[i]); } // Test structures. class TestInner { public: TestInner(char c, string s) : c(c), s(s) { } template void Serialize(Archive& ar, const unsigned int /* version */) { ar & data::CreateNVP(c, "char"); ar & data::CreateNVP(s, "string"); } // Public members for testing. char c; string s; }; class Test { public: Test(int x, int y) : x(x), y(y), ina('a', "hello"), inb('b', "goodbye") { } template void Serialize(Archive& ar, const unsigned int /* version */) { ar & data::CreateNVP(x, "x"); ar & data::CreateNVP(y, "y"); ar & data::CreateNVP(ina, "ina"); ar & data::CreateNVP(inb, "inb"); } // Public members for testing. int x; int y; TestInner ina; TestInner inb; }; /** * Make sure we can load and save. */ BOOST_AUTO_TEST_CASE(LoadBinaryTest) { Test x(10, 12); BOOST_REQUIRE_EQUAL(data::Save("test.bin", "x", x, false), true); // Now reload. Test y(11, 14); BOOST_REQUIRE_EQUAL(data::Load("test.bin", "x", y, false), true); BOOST_REQUIRE_EQUAL(y.x, x.x); BOOST_REQUIRE_EQUAL(y.y, x.y); BOOST_REQUIRE_EQUAL(y.ina.c, x.ina.c); BOOST_REQUIRE_EQUAL(y.ina.s, x.ina.s); BOOST_REQUIRE_EQUAL(y.inb.c, x.inb.c); BOOST_REQUIRE_EQUAL(y.inb.s, x.inb.s); } /** * Make sure we can load and save. */ BOOST_AUTO_TEST_CASE(LoadXMLTest) { Test x(10, 12); BOOST_REQUIRE_EQUAL(data::Save("test.xml", "x", x, false), true); // Now reload. Test y(11, 14); BOOST_REQUIRE_EQUAL(data::Load("test.xml", "x", y, false), true); BOOST_REQUIRE_EQUAL(y.x, x.x); BOOST_REQUIRE_EQUAL(y.y, x.y); BOOST_REQUIRE_EQUAL(y.ina.c, x.ina.c); BOOST_REQUIRE_EQUAL(y.ina.s, x.ina.s); BOOST_REQUIRE_EQUAL(y.inb.c, x.inb.c); BOOST_REQUIRE_EQUAL(y.inb.s, x.inb.s); } /** * Make sure we can load and save. */ BOOST_AUTO_TEST_CASE(LoadTextTest) { Test x(10, 12); BOOST_REQUIRE_EQUAL(data::Save("test.txt", "x", x, false), true); // Now reload. Test y(11, 14); BOOST_REQUIRE_EQUAL(data::Load("test.txt", "x", y, false), true); BOOST_REQUIRE_EQUAL(y.x, x.x); BOOST_REQUIRE_EQUAL(y.y, x.y); BOOST_REQUIRE_EQUAL(y.ina.c, x.ina.c); BOOST_REQUIRE_EQUAL(y.ina.s, x.ina.s); BOOST_REQUIRE_EQUAL(y.inb.c, x.inb.c); BOOST_REQUIRE_EQUAL(y.inb.s, x.inb.s); } /** * Test DatasetInfo by making a map for a dimension. */ BOOST_AUTO_TEST_CASE(DatasetInfoTest) { DatasetInfo di(100); // Do all types default to numeric? for (size_t i = 0; i < 100; ++i) { BOOST_REQUIRE(di.Type(i) == Datatype::numeric); BOOST_REQUIRE_EQUAL(di.NumMappings(i), 0); } // Okay. Add some mappings for dimension 3. const size_t first = di.MapString("test_mapping_1", 3); const size_t second = di.MapString("test_mapping_2", 3); const size_t third = di.MapString("test_mapping_3", 3); BOOST_REQUIRE_EQUAL(first, 0); BOOST_REQUIRE_EQUAL(second, 1); BOOST_REQUIRE_EQUAL(third, 2); // Now dimension 3 should be categorical. for (size_t i = 0; i < 100; ++i) { if (i == 3) { BOOST_REQUIRE(di.Type(i) == Datatype::categorical); BOOST_REQUIRE_EQUAL(di.NumMappings(i), 3); } else { BOOST_REQUIRE(di.Type(i) == Datatype::numeric); BOOST_REQUIRE_EQUAL(di.NumMappings(i), 0); } } // Get the mappings back. const string& strFirst = di.UnmapString(first, 3); const string& strSecond = di.UnmapString(second, 3); const string& strThird = di.UnmapString(third, 3); BOOST_REQUIRE_EQUAL(strFirst, "test_mapping_1"); BOOST_REQUIRE_EQUAL(strSecond, "test_mapping_2"); BOOST_REQUIRE_EQUAL(strThird, "test_mapping_3"); } /** * Test loading regular CSV with DatasetInfo. Everything should be numeric. */ BOOST_AUTO_TEST_CASE(RegularCSVDatasetInfoLoad) { vector testFiles; testFiles.push_back("fake.csv"); testFiles.push_back("german.csv"); testFiles.push_back("iris.csv"); testFiles.push_back("vc2.csv"); testFiles.push_back("johnson8-4-4.csv"); testFiles.push_back("lars_dependent_y.csv"); testFiles.push_back("vc2_test_labels.txt"); for (size_t i = 0; i < testFiles.size(); ++i) { arma::mat one, two; DatasetInfo info; data::Load(testFiles[i], one); data::Load(testFiles[i], two, info); // Check that the matrices contain the same information. BOOST_REQUIRE_EQUAL(one.n_elem, two.n_elem); BOOST_REQUIRE_EQUAL(one.n_rows, two.n_rows); BOOST_REQUIRE_EQUAL(one.n_cols, two.n_cols); for (size_t i = 0; i < one.n_elem; ++i) { if (std::abs(one[i]) < 1e-8) BOOST_REQUIRE_SMALL(two[i], 1e-8); else BOOST_REQUIRE_CLOSE(one[i], two[i], 1e-8); } // Check that all dimensions are numeric. for (size_t i = 0; i < two.n_rows; ++i) BOOST_REQUIRE(info.Type(i) == Datatype::numeric); } } /** * Test non-transposed loading of regular CSVs with DatasetInfo. Everything * should be numeric. */ BOOST_AUTO_TEST_CASE(NontransposedCSVDatasetInfoLoad) { vector testFiles; testFiles.push_back("fake.csv"); testFiles.push_back("german.csv"); testFiles.push_back("iris.csv"); testFiles.push_back("vc2.csv"); testFiles.push_back("johnson8-4-4.csv"); testFiles.push_back("lars_dependent_y.csv"); testFiles.push_back("vc2_test_labels.txt"); for (size_t i = 0; i < testFiles.size(); ++i) { arma::mat one, two; DatasetInfo info; data::Load(testFiles[i], one, true, false); // No transpose. data::Load(testFiles[i], two, info, true, false); // Check that the matrices contain the same information. BOOST_REQUIRE_EQUAL(one.n_elem, two.n_elem); BOOST_REQUIRE_EQUAL(one.n_rows, two.n_rows); BOOST_REQUIRE_EQUAL(one.n_cols, two.n_cols); for (size_t i = 0; i < one.n_elem; ++i) { if (std::abs(one[i]) < 1e-8) BOOST_REQUIRE_SMALL(two[i], 1e-8); else BOOST_REQUIRE_CLOSE(one[i], two[i], 1e-8); } // Check that all dimensions are numeric. for (size_t i = 0; i < two.n_rows; ++i) BOOST_REQUIRE(info.Type(i) == Datatype::numeric); } } /** * Create a file with a categorical string feature, then load it. */ BOOST_AUTO_TEST_CASE(CategoricalCSVLoadTest00) { fstream f; f.open("test.csv", fstream::out); f << "1, 2, hello" << endl; f << "3, 4, goodbye" << endl; f << "5, 6, coffee" << endl; f << "7, 8, confusion" << endl; f << "9, 10, hello" << endl; f << "11, 12, confusion" << endl; f << "13, 14, confusion" << endl; f.close(); // Load the test CSV. arma::umat matrix; DatasetInfo info; data::Load("test.csv", matrix, info); BOOST_REQUIRE_EQUAL(matrix.n_cols, 7); BOOST_REQUIRE_EQUAL(matrix.n_rows, 3); BOOST_REQUIRE_EQUAL(matrix(0, 0), 1); BOOST_REQUIRE_EQUAL(matrix(1, 0), 2); BOOST_REQUIRE_EQUAL(matrix(2, 0), 0); BOOST_REQUIRE_EQUAL(matrix(0, 1), 3); BOOST_REQUIRE_EQUAL(matrix(1, 1), 4); BOOST_REQUIRE_EQUAL(matrix(2, 1), 1); BOOST_REQUIRE_EQUAL(matrix(0, 2), 5); BOOST_REQUIRE_EQUAL(matrix(1, 2), 6); BOOST_REQUIRE_EQUAL(matrix(2, 2), 2); BOOST_REQUIRE_EQUAL(matrix(0, 3), 7); BOOST_REQUIRE_EQUAL(matrix(1, 3), 8); BOOST_REQUIRE_EQUAL(matrix(2, 3), 3); BOOST_REQUIRE_EQUAL(matrix(0, 4), 9); BOOST_REQUIRE_EQUAL(matrix(1, 4), 10); BOOST_REQUIRE_EQUAL(matrix(2, 4), 0); BOOST_REQUIRE_EQUAL(matrix(0, 5), 11); BOOST_REQUIRE_EQUAL(matrix(1, 5), 12); BOOST_REQUIRE_EQUAL(matrix(2, 5), 3); BOOST_REQUIRE_EQUAL(matrix(0, 6), 13); BOOST_REQUIRE_EQUAL(matrix(1, 6), 14); BOOST_REQUIRE_EQUAL(matrix(2, 6), 3); BOOST_REQUIRE(info.Type(0) == Datatype::numeric); BOOST_REQUIRE(info.Type(1) == Datatype::numeric); BOOST_REQUIRE(info.Type(2) == Datatype::categorical); BOOST_REQUIRE_EQUAL(info.MapString("hello", 2), 0); BOOST_REQUIRE_EQUAL(info.MapString("goodbye", 2), 1); BOOST_REQUIRE_EQUAL(info.MapString("coffee", 2), 2); BOOST_REQUIRE_EQUAL(info.MapString("confusion", 2), 3); BOOST_REQUIRE_EQUAL(info.UnmapString(0, 2), "hello"); BOOST_REQUIRE_EQUAL(info.UnmapString(1, 2), "goodbye"); BOOST_REQUIRE_EQUAL(info.UnmapString(2, 2), "coffee"); BOOST_REQUIRE_EQUAL(info.UnmapString(3, 2), "confusion"); remove("test.csv"); } BOOST_AUTO_TEST_CASE(CategoricalCSVLoadTest01) { fstream f; f.open("test.csv", fstream::out); f << "1, 1, 1" << endl; f << "1, 1, 1" << endl; f << " , 1, 1" << endl; f << "1, 1, 1" << endl; f.close(); // Load the test CSV. arma::umat matrix; DatasetInfo info; data::Load("test.csv", matrix, info, true); BOOST_REQUIRE_EQUAL(matrix.n_cols, 4); BOOST_REQUIRE_EQUAL(matrix.n_rows, 3); BOOST_REQUIRE_EQUAL(matrix(0, 0), 0); BOOST_REQUIRE_EQUAL(matrix(0, 1), 0); BOOST_REQUIRE_EQUAL(matrix(0, 2), 1); BOOST_REQUIRE_EQUAL(matrix(0, 3), 0); BOOST_REQUIRE_EQUAL(matrix(1, 0), 1); BOOST_REQUIRE_EQUAL(matrix(1, 1), 1); BOOST_REQUIRE_EQUAL(matrix(1, 2), 1); BOOST_REQUIRE_EQUAL(matrix(1, 3), 1); BOOST_REQUIRE_EQUAL(matrix(2, 0), 1); BOOST_REQUIRE_EQUAL(matrix(2, 1), 1); BOOST_REQUIRE_EQUAL(matrix(2, 2), 1); BOOST_REQUIRE_EQUAL(matrix(2, 3), 1); BOOST_REQUIRE(info.Type(0) == Datatype::categorical); BOOST_REQUIRE(info.Type(1) == Datatype::numeric); BOOST_REQUIRE(info.Type(2) == Datatype::numeric); BOOST_REQUIRE(info.Type(3) == Datatype::numeric); BOOST_REQUIRE_EQUAL(info.MapString("1", 0), 0); BOOST_REQUIRE_EQUAL(info.MapString("", 0), 1); BOOST_REQUIRE_EQUAL(info.UnmapString(0, 0), "1"); BOOST_REQUIRE_EQUAL(info.UnmapString(1, 0), ""); remove("test.csv"); } BOOST_AUTO_TEST_CASE(CategoricalCSVLoadTest02) { fstream f; f.open("test.csv", fstream::out); f << "1, 1, 1" << endl; f << ", 1, 1" << endl; f << "1, 1, 1" << endl; f << "1, 1, 1" << endl; f.close(); // Load the test CSV. arma::umat matrix; DatasetInfo info; data::Load("test.csv", matrix, info, true); BOOST_REQUIRE_EQUAL(matrix.n_cols, 4); BOOST_REQUIRE_EQUAL(matrix.n_rows, 3); BOOST_REQUIRE_EQUAL(matrix(0, 0), 0); BOOST_REQUIRE_EQUAL(matrix(0, 1), 1); BOOST_REQUIRE_EQUAL(matrix(0, 2), 0); BOOST_REQUIRE_EQUAL(matrix(0, 3), 0); BOOST_REQUIRE_EQUAL(matrix(1, 0), 1); BOOST_REQUIRE_EQUAL(matrix(1, 1), 1); BOOST_REQUIRE_EQUAL(matrix(1, 2), 1); BOOST_REQUIRE_EQUAL(matrix(1, 3), 1); BOOST_REQUIRE_EQUAL(matrix(2, 0), 1); BOOST_REQUIRE_EQUAL(matrix(2, 1), 1); BOOST_REQUIRE_EQUAL(matrix(2, 2), 1); BOOST_REQUIRE_EQUAL(matrix(2, 3), 1); BOOST_REQUIRE(info.Type(0) == Datatype::categorical); BOOST_REQUIRE(info.Type(1) == Datatype::numeric); BOOST_REQUIRE(info.Type(2) == Datatype::numeric); BOOST_REQUIRE_EQUAL(info.MapString("", 0), 1); BOOST_REQUIRE_EQUAL(info.MapString("1", 0), 0); BOOST_REQUIRE_EQUAL(info.UnmapString(0, 0), "1"); BOOST_REQUIRE_EQUAL(info.UnmapString(1, 0), ""); remove("test.csv"); } BOOST_AUTO_TEST_CASE(CategoricalCSVLoadTest03) { fstream f; f.open("test.csv", fstream::out); f << ", 1, 1" << endl; f << "1, 1, 1" << endl; f << "1, 1, 1" << endl; f << "1, 1, 1" << endl; f.close(); // Load the test CSV. arma::umat matrix; DatasetInfo info; data::Load("test.csv", matrix, info, true); BOOST_REQUIRE_EQUAL(matrix.n_cols, 4); BOOST_REQUIRE_EQUAL(matrix.n_rows, 3); BOOST_REQUIRE_EQUAL(matrix(0, 0), 0); BOOST_REQUIRE_EQUAL(matrix(0, 1), 1); BOOST_REQUIRE_EQUAL(matrix(0, 2), 1); BOOST_REQUIRE_EQUAL(matrix(0, 3), 1); BOOST_REQUIRE_EQUAL(matrix(1, 0), 1); BOOST_REQUIRE_EQUAL(matrix(1, 1), 1); BOOST_REQUIRE_EQUAL(matrix(1, 2), 1); BOOST_REQUIRE_EQUAL(matrix(1, 3), 1); BOOST_REQUIRE_EQUAL(matrix(2, 0), 1); BOOST_REQUIRE_EQUAL(matrix(2, 1), 1); BOOST_REQUIRE_EQUAL(matrix(2, 2), 1); BOOST_REQUIRE_EQUAL(matrix(2, 3), 1); BOOST_REQUIRE(info.Type(0) == Datatype::categorical); BOOST_REQUIRE(info.Type(1) == Datatype::numeric); BOOST_REQUIRE(info.Type(2) == Datatype::numeric); BOOST_REQUIRE_EQUAL(info.MapString("", 0), 0); BOOST_REQUIRE_EQUAL(info.MapString("1", 0), 1); BOOST_REQUIRE_EQUAL(info.UnmapString(0, 0), ""); BOOST_REQUIRE_EQUAL(info.UnmapString(1, 0), "1"); remove("test.csv"); } BOOST_AUTO_TEST_CASE(CategoricalCSVLoadTest04) { fstream f; f.open("test.csv", fstream::out); f << "200-DM, 1, 1" << endl; f << "1, 1, 1" << endl; f << "1, 1, 1" << endl; f << "1, 1, 1" << endl; f.close(); // Load the test CSV. arma::umat matrix; DatasetInfo info; data::Load("test.csv", matrix, info, true); BOOST_REQUIRE_EQUAL(matrix.n_cols, 4); BOOST_REQUIRE_EQUAL(matrix.n_rows, 3); BOOST_REQUIRE_EQUAL(matrix(0, 0), 0); BOOST_REQUIRE_EQUAL(matrix(0, 1), 1); BOOST_REQUIRE_EQUAL(matrix(0, 2), 1); BOOST_REQUIRE_EQUAL(matrix(0, 3), 1); BOOST_REQUIRE_EQUAL(matrix(1, 0), 1); BOOST_REQUIRE_EQUAL(matrix(1, 1), 1); BOOST_REQUIRE_EQUAL(matrix(1, 2), 1); BOOST_REQUIRE_EQUAL(matrix(1, 3), 1); BOOST_REQUIRE_EQUAL(matrix(2, 0), 1); BOOST_REQUIRE_EQUAL(matrix(2, 1), 1); BOOST_REQUIRE_EQUAL(matrix(2, 2), 1); BOOST_REQUIRE_EQUAL(matrix(2, 3), 1); BOOST_REQUIRE(info.Type(0) == Datatype::categorical); BOOST_REQUIRE(info.Type(1) == Datatype::numeric); BOOST_REQUIRE(info.Type(2) == Datatype::numeric); BOOST_REQUIRE_EQUAL(info.MapString("200-DM", 0), 0); BOOST_REQUIRE_EQUAL(info.MapString("1", 0), 1); BOOST_REQUIRE_EQUAL(info.UnmapString(0, 0), "200-DM"); BOOST_REQUIRE_EQUAL(info.UnmapString(1, 0), "1"); remove("test.csv"); } BOOST_AUTO_TEST_CASE(CategoricalNontransposedCSVLoadTest00) { fstream f; f.open("test.csv", fstream::out); f << "1, 2, hello" << endl; f << "3, 4, goodbye" << endl; f << "5, 6, coffee" << endl; f << "7, 8, confusion" << endl; f << "9, 10, hello" << endl; f << "11, 12, 15" << endl; f << "13, 14, confusion" << endl; f.close(); // Load the test CSV. arma::umat matrix; DatasetInfo info; data::Load("test.csv", matrix, info, true, false); // No transpose. BOOST_REQUIRE_EQUAL(matrix.n_cols, 3); BOOST_REQUIRE_EQUAL(matrix.n_rows, 7); BOOST_REQUIRE_EQUAL(matrix(0, 0), 0); BOOST_REQUIRE_EQUAL(matrix(0, 1), 1); BOOST_REQUIRE_EQUAL(matrix(0, 2), 2); BOOST_REQUIRE_EQUAL(matrix(1, 0), 0); BOOST_REQUIRE_EQUAL(matrix(1, 1), 1); BOOST_REQUIRE_EQUAL(matrix(1, 2), 2); BOOST_REQUIRE_EQUAL(matrix(2, 0), 0); BOOST_REQUIRE_EQUAL(matrix(2, 1), 1); BOOST_REQUIRE_EQUAL(matrix(2, 2), 2); BOOST_REQUIRE_EQUAL(matrix(3, 0), 0); BOOST_REQUIRE_EQUAL(matrix(3, 1), 1); BOOST_REQUIRE_EQUAL(matrix(3, 2), 2); BOOST_REQUIRE_EQUAL(matrix(4, 0), 0); BOOST_REQUIRE_EQUAL(matrix(4, 1), 1); BOOST_REQUIRE_EQUAL(matrix(4, 2), 2); BOOST_REQUIRE_EQUAL(matrix(5, 0), 11); BOOST_REQUIRE_EQUAL(matrix(5, 1), 12); BOOST_REQUIRE_EQUAL(matrix(5, 2), 15); BOOST_REQUIRE_EQUAL(matrix(6, 0), 0); BOOST_REQUIRE_EQUAL(matrix(6, 1), 1); BOOST_REQUIRE_EQUAL(matrix(6, 2), 2); BOOST_REQUIRE(info.Type(0) == Datatype::categorical); BOOST_REQUIRE(info.Type(1) == Datatype::categorical); BOOST_REQUIRE(info.Type(2) == Datatype::categorical); BOOST_REQUIRE(info.Type(3) == Datatype::categorical); BOOST_REQUIRE(info.Type(4) == Datatype::categorical); BOOST_REQUIRE(info.Type(5) == Datatype::numeric); BOOST_REQUIRE(info.Type(6) == Datatype::categorical); BOOST_REQUIRE_EQUAL(info.MapString("1", 0), 0); BOOST_REQUIRE_EQUAL(info.MapString("2", 0), 1); BOOST_REQUIRE_EQUAL(info.MapString("hello", 0), 2); BOOST_REQUIRE_EQUAL(info.MapString("3", 1), 0); BOOST_REQUIRE_EQUAL(info.MapString("4", 1), 1); BOOST_REQUIRE_EQUAL(info.MapString("goodbye", 1), 2); BOOST_REQUIRE_EQUAL(info.MapString("5", 2), 0); BOOST_REQUIRE_EQUAL(info.MapString("6", 2), 1); BOOST_REQUIRE_EQUAL(info.MapString("coffee", 2), 2); BOOST_REQUIRE_EQUAL(info.MapString("7", 3), 0); BOOST_REQUIRE_EQUAL(info.MapString("8", 3), 1); BOOST_REQUIRE_EQUAL(info.MapString("confusion", 3), 2); BOOST_REQUIRE_EQUAL(info.MapString("9", 4), 0); BOOST_REQUIRE_EQUAL(info.MapString("10", 4), 1); BOOST_REQUIRE_EQUAL(info.MapString("hello", 4), 2); BOOST_REQUIRE_EQUAL(info.MapString("13", 6), 0); BOOST_REQUIRE_EQUAL(info.MapString("14", 6), 1); BOOST_REQUIRE_EQUAL(info.MapString("confusion", 6), 2); BOOST_REQUIRE_EQUAL(info.UnmapString(0, 0), "1"); BOOST_REQUIRE_EQUAL(info.UnmapString(1, 0), "2"); BOOST_REQUIRE_EQUAL(info.UnmapString(2, 0), "hello"); BOOST_REQUIRE_EQUAL(info.UnmapString(0, 1), "3"); BOOST_REQUIRE_EQUAL(info.UnmapString(1, 1), "4"); BOOST_REQUIRE_EQUAL(info.UnmapString(2, 1), "goodbye"); BOOST_REQUIRE_EQUAL(info.UnmapString(0, 2), "5"); BOOST_REQUIRE_EQUAL(info.UnmapString(1, 2), "6"); BOOST_REQUIRE_EQUAL(info.UnmapString(2, 2), "coffee"); BOOST_REQUIRE_EQUAL(info.UnmapString(0, 3), "7"); BOOST_REQUIRE_EQUAL(info.UnmapString(1, 3), "8"); BOOST_REQUIRE_EQUAL(info.UnmapString(2, 3), "confusion"); BOOST_REQUIRE_EQUAL(info.UnmapString(0, 4), "9"); BOOST_REQUIRE_EQUAL(info.UnmapString(1, 4), "10"); BOOST_REQUIRE_EQUAL(info.UnmapString(2, 4), "hello"); BOOST_REQUIRE_EQUAL(info.UnmapString(0, 6), "13"); BOOST_REQUIRE_EQUAL(info.UnmapString(1, 6), "14"); BOOST_REQUIRE_EQUAL(info.UnmapString(2, 6), "confusion"); remove("test.csv"); } BOOST_AUTO_TEST_CASE(CategoricalNontransposedCSVLoadTest01) { fstream f; f.open("test.csv", fstream::out); f << "1, 1, 1" << endl; f << "1, 1, 1" << endl; f << " , 1, 1" << endl; f << "1, 1, 1" << endl; f.close(); // Load the test CSV. arma::umat matrix; DatasetInfo info; data::Load("test.csv", matrix, info, true, false); // No transpose. BOOST_REQUIRE_EQUAL(matrix.n_cols, 3); BOOST_REQUIRE_EQUAL(matrix.n_rows, 4); BOOST_REQUIRE_EQUAL(matrix(0, 0), 1); BOOST_REQUIRE_EQUAL(matrix(0, 1), 1); BOOST_REQUIRE_EQUAL(matrix(0, 2), 1); BOOST_REQUIRE_EQUAL(matrix(1, 0), 1); BOOST_REQUIRE_EQUAL(matrix(1, 1), 1); BOOST_REQUIRE_EQUAL(matrix(1, 2), 1); BOOST_REQUIRE_EQUAL(matrix(2, 0), 0); BOOST_REQUIRE_EQUAL(matrix(2, 1), 1); BOOST_REQUIRE_EQUAL(matrix(2, 2), 1); BOOST_REQUIRE_EQUAL(matrix(3, 0), 1); BOOST_REQUIRE_EQUAL(matrix(3, 1), 1); BOOST_REQUIRE_EQUAL(matrix(3, 2), 1); BOOST_REQUIRE(info.Type(0) == Datatype::numeric); BOOST_REQUIRE(info.Type(1) == Datatype::numeric); BOOST_REQUIRE(info.Type(2) == Datatype::categorical); BOOST_REQUIRE(info.Type(3) == Datatype::numeric); BOOST_REQUIRE_EQUAL(info.MapString("", 2), 0); BOOST_REQUIRE_EQUAL(info.MapString("1", 2), 1); BOOST_REQUIRE_EQUAL(info.UnmapString(0, 2), ""); BOOST_REQUIRE_EQUAL(info.UnmapString(1, 2), "1"); remove("test.csv"); } BOOST_AUTO_TEST_CASE(CategoricalNontransposedCSVLoadTest02) { fstream f; f.open("test.csv", fstream::out); f << "1, 1, 1" << endl; f << ", 1, 1" << endl; f << "1, 1, 1" << endl; f << "1, 1, 1" << endl; f.close(); // Load the test CSV. arma::umat matrix; DatasetInfo info; data::Load("test.csv", matrix, info, true, false); // No transpose. BOOST_REQUIRE_EQUAL(matrix.n_cols, 3); BOOST_REQUIRE_EQUAL(matrix.n_rows, 4); BOOST_REQUIRE_EQUAL(matrix(0, 0), 1); BOOST_REQUIRE_EQUAL(matrix(0, 1), 1); BOOST_REQUIRE_EQUAL(matrix(0, 2), 1); BOOST_REQUIRE_EQUAL(matrix(1, 0), 0); BOOST_REQUIRE_EQUAL(matrix(1, 1), 1); BOOST_REQUIRE_EQUAL(matrix(1, 2), 1); BOOST_REQUIRE_EQUAL(matrix(2, 0), 1); BOOST_REQUIRE_EQUAL(matrix(2, 1), 1); BOOST_REQUIRE_EQUAL(matrix(2, 2), 1); BOOST_REQUIRE_EQUAL(matrix(3, 0), 1); BOOST_REQUIRE_EQUAL(matrix(3, 1), 1); BOOST_REQUIRE_EQUAL(matrix(3, 2), 1); BOOST_REQUIRE(info.Type(0) == Datatype::numeric); BOOST_REQUIRE(info.Type(1) == Datatype::categorical); BOOST_REQUIRE(info.Type(2) == Datatype::numeric); BOOST_REQUIRE(info.Type(3) == Datatype::numeric); BOOST_REQUIRE_EQUAL(info.MapString("", 1), 0); BOOST_REQUIRE_EQUAL(info.MapString("1", 1), 1); BOOST_REQUIRE_EQUAL(info.UnmapString(0, 1), ""); BOOST_REQUIRE_EQUAL(info.UnmapString(1, 1), "1"); remove("test.csv"); } BOOST_AUTO_TEST_CASE(CategoricalNontransposedCSVLoadTest03) { fstream f; f.open("test.csv", fstream::out); f << ", 1, 1" << endl; f << "1, 1, 1" << endl; f << "1, 1, 1" << endl; f << "1, 1, 1" << endl; f.close(); // Load the test CSV. arma::umat matrix; DatasetInfo info; data::Load("test.csv", matrix, info, true, false); // No transpose. BOOST_REQUIRE_EQUAL(matrix.n_cols, 3); BOOST_REQUIRE_EQUAL(matrix.n_rows, 4); BOOST_REQUIRE_EQUAL(matrix(0, 0), 0); BOOST_REQUIRE_EQUAL(matrix(0, 1), 1); BOOST_REQUIRE_EQUAL(matrix(0, 2), 1); BOOST_REQUIRE_EQUAL(matrix(1, 0), 1); BOOST_REQUIRE_EQUAL(matrix(1, 1), 1); BOOST_REQUIRE_EQUAL(matrix(1, 2), 1); BOOST_REQUIRE_EQUAL(matrix(2, 0), 1); BOOST_REQUIRE_EQUAL(matrix(2, 1), 1); BOOST_REQUIRE_EQUAL(matrix(2, 2), 1); BOOST_REQUIRE_EQUAL(matrix(3, 0), 1); BOOST_REQUIRE_EQUAL(matrix(3, 1), 1); BOOST_REQUIRE_EQUAL(matrix(3, 2), 1); BOOST_REQUIRE(info.Type(0) == Datatype::categorical); BOOST_REQUIRE(info.Type(1) == Datatype::numeric); BOOST_REQUIRE(info.Type(2) == Datatype::numeric); BOOST_REQUIRE(info.Type(3) == Datatype::numeric); BOOST_REQUIRE_EQUAL(info.MapString("", 1), 0); BOOST_REQUIRE_EQUAL(info.MapString("1", 1), 1); BOOST_REQUIRE_EQUAL(info.UnmapString(0, 1), ""); BOOST_REQUIRE_EQUAL(info.UnmapString(1, 1), "1"); remove("test.csv"); } BOOST_AUTO_TEST_CASE(CategoricalNontransposedCSVLoadTest04) { fstream f; f.open("test.csv", fstream::out); f << " 200-DM , 1 , 1 " << endl; f << " 1 , 1 , 1 " << endl; f << " 1 , 1 , 1 " << endl; f << " 1 , 1 , 1 " << endl; f.close(); // Load the test CSV. arma::umat matrix; DatasetInfo info; data::Load("test.csv", matrix, info, true, false); // No transpose. BOOST_REQUIRE_EQUAL(matrix.n_cols, 3); BOOST_REQUIRE_EQUAL(matrix.n_rows, 4); BOOST_REQUIRE(info.Type(0) == Datatype::categorical); BOOST_REQUIRE(info.Type(1) == Datatype::numeric); BOOST_REQUIRE(info.Type(2) == Datatype::numeric); BOOST_REQUIRE(info.Type(3) == Datatype::numeric); BOOST_REQUIRE_EQUAL(matrix(0, 0), 0); BOOST_REQUIRE_EQUAL(matrix(0, 1), 1); BOOST_REQUIRE_EQUAL(matrix(0, 2), 1); BOOST_REQUIRE_EQUAL(matrix(1, 0), 1); BOOST_REQUIRE_EQUAL(matrix(1, 1), 1); BOOST_REQUIRE_EQUAL(matrix(1, 2), 1); BOOST_REQUIRE_EQUAL(matrix(2, 0), 1); BOOST_REQUIRE_EQUAL(matrix(2, 1), 1); BOOST_REQUIRE_EQUAL(matrix(2, 2), 1); BOOST_REQUIRE_EQUAL(matrix(3, 0), 1); BOOST_REQUIRE_EQUAL(matrix(3, 1), 1); BOOST_REQUIRE_EQUAL(matrix(3, 2), 1); BOOST_REQUIRE_EQUAL(info.MapString("200-DM", 1), 0); BOOST_REQUIRE_EQUAL(info.MapString("1", 1), 1); BOOST_REQUIRE_EQUAL(info.UnmapString(0, 1), "200-DM"); BOOST_REQUIRE_EQUAL(info.UnmapString(1, 1), "1"); remove("test.csv"); } /** * A harder test CSV based on the concerns in #658. */ BOOST_AUTO_TEST_CASE(HarderKeonTest) { fstream f; f.open("test.csv", fstream::out); f << "a,, 13,\t, 0" << endl; f << "b, 3, 14, hello,1" << endl; f << "b, 4, 15, , 2" << endl; f << ", 5, 16, ," << endl; f.close(); // Load transposed. arma::mat dataset; data::DatasetInfo info; data::Load("test.csv", dataset, info, true, true); BOOST_REQUIRE_EQUAL(dataset.n_rows, 5); BOOST_REQUIRE_EQUAL(dataset.n_cols, 4); BOOST_REQUIRE_EQUAL(info.Dimensionality(), 5); BOOST_REQUIRE_EQUAL(info.NumMappings(0), 3); BOOST_REQUIRE_EQUAL(info.NumMappings(1), 4); BOOST_REQUIRE_EQUAL(info.NumMappings(2), 0); BOOST_REQUIRE_EQUAL(info.NumMappings(3), 2); // \t and "" are equivalent. BOOST_REQUIRE_EQUAL(info.NumMappings(4), 4); // Now load non-transposed. data::DatasetInfo ntInfo; data::Load("test.csv", dataset, ntInfo, true, false); BOOST_REQUIRE_EQUAL(dataset.n_rows, 4); BOOST_REQUIRE_EQUAL(dataset.n_cols, 5); BOOST_REQUIRE_EQUAL(ntInfo.Dimensionality(), 4); BOOST_REQUIRE_EQUAL(ntInfo.NumMappings(0), 4); BOOST_REQUIRE_EQUAL(ntInfo.NumMappings(1), 5); BOOST_REQUIRE_EQUAL(ntInfo.NumMappings(2), 5); BOOST_REQUIRE_EQUAL(ntInfo.NumMappings(3), 3); remove("test.csv"); } /** * A simple ARFF load test. Two attributes, both numeric. */ BOOST_AUTO_TEST_CASE(SimpleARFFTest) { fstream f; f.open("test.arff", fstream::out); f << "@relation test" << endl; f << endl; f << "@attribute one NUMERIC" << endl; f << "@attribute two NUMERIC" << endl; f << endl; f << "@data" << endl; f << "1, 2" << endl; f << "3, 4" << endl; f << "5, 6" << endl; f << "7, 8" << endl; f.close(); arma::mat dataset; DatasetInfo info; data::Load("test.arff", dataset, info); BOOST_REQUIRE_EQUAL(info.Dimensionality(), 2); BOOST_REQUIRE(info.Type(0) == Datatype::numeric); BOOST_REQUIRE(info.Type(1) == Datatype::numeric); BOOST_REQUIRE_EQUAL(dataset.n_rows, 2); BOOST_REQUIRE_EQUAL(dataset.n_cols, 4); for (size_t i = 0; i < 8; ++i) BOOST_REQUIRE_CLOSE(dataset[i], double(i + 1), 1e-5); remove("test.arff"); } /** * Another simple ARFF load test. Three attributes, two categorical, one * numeric. */ BOOST_AUTO_TEST_CASE(SimpleARFFCategoricalTest) { fstream f; f.open("test.arff", fstream::out); f << "@relation test" << endl; f << endl; f << "@attribute one STRING" << endl; f << "@attribute two REAL" << endl; f << endl; f << "@attribute three STRING" << endl; f << endl; f << "\% a comment line " << endl; f << endl; f << "@data" << endl; f << "hello, 1, moo" << endl; f << "cheese, 2.34, goodbye" << endl; f << "seven, 1.03e+5, moo" << endl; f << "hello, -1.3, goodbye" << endl; f.close(); arma::mat dataset; DatasetInfo info; data::Load("test.arff", dataset, info); BOOST_REQUIRE_EQUAL(info.Dimensionality(), 3); BOOST_REQUIRE(info.Type(0) == Datatype::categorical); BOOST_REQUIRE_EQUAL(info.NumMappings(0), 3); BOOST_REQUIRE(info.Type(1) == Datatype::numeric); BOOST_REQUIRE(info.Type(2) == Datatype::categorical); BOOST_REQUIRE_EQUAL(info.NumMappings(2), 2); BOOST_REQUIRE_EQUAL(dataset.n_rows, 3); BOOST_REQUIRE_EQUAL(dataset.n_cols, 4); // The first dimension must all be different (except the ones that are the // same). BOOST_REQUIRE_EQUAL(dataset(0, 0), dataset(0, 3)); BOOST_REQUIRE_NE(dataset(0, 0), dataset(0, 1)); BOOST_REQUIRE_NE(dataset(0, 1), dataset(0, 2)); BOOST_REQUIRE_NE(dataset(0, 2), dataset(0, 0)); BOOST_REQUIRE_CLOSE(dataset(1, 0), 1.0, 1e-5); BOOST_REQUIRE_CLOSE(dataset(1, 1), 2.34, 1e-5); BOOST_REQUIRE_CLOSE(dataset(1, 2), 1.03e5, 1e-5); BOOST_REQUIRE_CLOSE(dataset(1, 3), -1.3, 1e-5); BOOST_REQUIRE_EQUAL(dataset(2, 0), dataset(2, 2)); BOOST_REQUIRE_EQUAL(dataset(2, 1), dataset(2, 3)); BOOST_REQUIRE_NE(dataset(2, 0), dataset(2, 1)); remove("test.arff"); } /** * A harder ARFF test, where we have each type of supported value, and some * random whitespace too. */ BOOST_AUTO_TEST_CASE(HarderARFFTest) { fstream f; f.open("test.arff", fstream::out); f << "@relation \t test" << endl; f << endl; f << endl; f << "@attribute @@@@flfl numeric" << endl; f << endl; f << "\% comment" << endl; f << "@attribute \"hello world\" string" << endl; f << "@attribute 12345 integer" << endl; f << "@attribute real real" << endl; f << "@attribute \"blah blah blah \t \" numeric \% comment" << endl; f << "\% comment" << endl; f << "@data" << endl; f << "1, one, 3, 4.5, 6" << endl; f << "2, two, 4, 5.5, 7 \% comment" << endl; f << "3, \"three five, six\", 5, 6.5, 8" << endl; f.close(); arma::mat dataset; DatasetInfo info; data::Load("test.arff", dataset, info); BOOST_REQUIRE_EQUAL(info.Dimensionality(), 5); BOOST_REQUIRE(info.Type(0) == Datatype::numeric); BOOST_REQUIRE(info.Type(1) == Datatype::categorical); BOOST_REQUIRE_EQUAL(info.NumMappings(1), 3); BOOST_REQUIRE(info.Type(2) == Datatype::numeric); BOOST_REQUIRE(info.Type(3) == Datatype::numeric); BOOST_REQUIRE(info.Type(4) == Datatype::numeric); BOOST_REQUIRE_EQUAL(dataset.n_rows, 5); BOOST_REQUIRE_EQUAL(dataset.n_cols, 3); BOOST_REQUIRE_CLOSE(dataset(0, 0), 1.0, 1e-5); BOOST_REQUIRE_CLOSE(dataset(0, 1), 2.0, 1e-5); BOOST_REQUIRE_CLOSE(dataset(0, 2), 3.0, 1e-5); BOOST_REQUIRE_NE(dataset(1, 0), dataset(1, 1)); BOOST_REQUIRE_NE(dataset(1, 1), dataset(1, 2)); BOOST_REQUIRE_NE(dataset(1, 0), dataset(1, 2)); BOOST_REQUIRE_CLOSE(dataset(2, 0), 3.0, 1e-5); BOOST_REQUIRE_CLOSE(dataset(2, 1), 4.0, 1e-5); BOOST_REQUIRE_CLOSE(dataset(2, 2), 5.0, 1e-5); BOOST_REQUIRE_CLOSE(dataset(3, 0), 4.5, 1e-5); BOOST_REQUIRE_CLOSE(dataset(3, 1), 5.5, 1e-5); BOOST_REQUIRE_CLOSE(dataset(3, 2), 6.5, 1e-5); BOOST_REQUIRE_CLOSE(dataset(4, 0), 6.0, 1e-5); BOOST_REQUIRE_CLOSE(dataset(4, 1), 7.0, 1e-5); BOOST_REQUIRE_CLOSE(dataset(4, 2), 8.0, 1e-5); remove("test.arff"); } /** * If we pass a bad DatasetInfo, it should throw. */ BOOST_AUTO_TEST_CASE(BadDatasetInfoARFFTest) { fstream f; f.open("test.arff", fstream::out); f << "@relation \t test" << endl; f << endl; f << endl; f << "@attribute @@@@flfl numeric" << endl; f << endl; f << "\% comment" << endl; f << "@attribute \"hello world\" string" << endl; f << "@attribute 12345 integer" << endl; f << "@attribute real real" << endl; f << "@attribute \"blah blah blah \t \" numeric \% comment" << endl; f << "\% comment" << endl; f << "@data" << endl; f << "1, one, 3, 4.5, 6" << endl; f << "2, two, 4, 5.5, 7 \% comment" << endl; f << "3, \"three five, six\", 5, 6.5, 8" << endl; f.close(); arma::mat dataset; DatasetInfo info(6); BOOST_REQUIRE_THROW(data::LoadARFF("test.arff", dataset, info), std::invalid_argument); remove("test.arff"); } /** * A test to check whether the arff loader is case insensitive to declarations: * @relation, @attribute, @data. */ BOOST_AUTO_TEST_CASE(CaseTest) { arma::mat dataset; DatasetMapper info; LoadARFF("casecheck.arff", dataset, info); BOOST_CHECK_EQUAL(dataset.n_rows, 2); BOOST_CHECK_EQUAL(dataset.n_cols, 3); } BOOST_AUTO_TEST_SUITE_END(); mlpack-2.2.5/src/mlpack/tests/local_coordinate_coding_test.cpp000066400000000000000000000110301315013601400245430ustar00rootroot00000000000000/** * @file local_coordinate_coding_test.cpp * @author Nishant Mehta * * Test for Local Coordinate Coding. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ // Note: We don't use BOOST_REQUIRE_CLOSE in the code below because we need // to use FPC_WEAK, and it's not at all intuitive how to do that. #include #include #include "test_tools.hpp" #include "serialization.hpp" using namespace arma; using namespace mlpack; using namespace mlpack::regression; using namespace mlpack::lcc; BOOST_AUTO_TEST_SUITE(LocalCoordinateCodingTest); void VerifyCorrectness(vec beta, vec errCorr, double lambda) { const double tol = 1e-12; size_t nDims = beta.n_elem; for(size_t j = 0; j < nDims; j++) { if (beta(j) == 0) { // make sure that errCorr(j) <= lambda BOOST_REQUIRE_SMALL(std::max(fabs(errCorr(j)) - lambda, 0.0), tol); } else if (beta(j) < 0) { // make sure that errCorr(j) == lambda BOOST_REQUIRE_SMALL(errCorr(j) - lambda, tol); } else { // beta(j) > 0 // make sure that errCorr(j) == -lambda BOOST_REQUIRE_SMALL(errCorr(j) + lambda, tol); } } } BOOST_AUTO_TEST_CASE(LocalCoordinateCodingTestCodingStep) { double lambda1 = 0.1; uword nAtoms = 25; mat X; X.load("mnist_first250_training_4s_and_9s.arm"); uword nPoints = X.n_cols; // normalize each point since these are images for (uword i = 0; i < nPoints; i++) { X.col(i) /= norm(X.col(i), 2); } mat Z; LocalCoordinateCoding lcc(X, nAtoms, lambda1); lcc.Encode(X, Z); mat D = lcc.Dictionary(); for (uword i = 0; i < nPoints; i++) { vec sqDists = vec(nAtoms); for (uword j = 0; j < nAtoms; j++) { vec diff = D.unsafe_col(j) - X.unsafe_col(i); sqDists[j] = dot(diff, diff); } mat Dprime = D * diagmat(1.0 / sqDists); mat zPrime = Z.unsafe_col(i) % sqDists; vec errCorr = trans(Dprime) * (Dprime * zPrime - X.unsafe_col(i)); VerifyCorrectness(zPrime, errCorr, 0.5 * lambda1); } } BOOST_AUTO_TEST_CASE(LocalCoordinateCodingTestDictionaryStep) { const double tol = 1e-12; double lambda = 0.1; uword nAtoms = 25; mat X; X.load("mnist_first250_training_4s_and_9s.arm"); uword nPoints = X.n_cols; // normalize each point since these are images for (uword i = 0; i < nPoints; i++) { X.col(i) /= norm(X.col(i), 2); } mat Z; LocalCoordinateCoding lcc(X, nAtoms, lambda); lcc.Encode(X, Z); uvec adjacencies = find(Z); lcc.OptimizeDictionary(X, Z, adjacencies); mat D = lcc.Dictionary(); mat grad = zeros(D.n_rows, D.n_cols); for (uword i = 0; i < nPoints; i++) { grad += (D - repmat(X.unsafe_col(i), 1, nAtoms)) * diagmat(abs(Z.unsafe_col(i))); } grad = lambda * grad + (D * Z - X) * trans(Z); BOOST_REQUIRE_SMALL(norm(grad, "fro"), tol); } BOOST_AUTO_TEST_CASE(SerializationTest) { mat X = randu(100, 100); size_t nAtoms = 25; LocalCoordinateCoding lcc(nAtoms, 0.05); lcc.Train(X); mat Y = randu(100, 200); mat codes; lcc.Encode(Y, codes); LocalCoordinateCoding lccXml(50, 0.1), lccText(12, 0.0), lccBinary(0, 0.0); SerializeObjectAll(lcc, lccXml, lccText, lccBinary); CheckMatrices(lcc.Dictionary(), lccXml.Dictionary(), lccText.Dictionary(), lccBinary.Dictionary()); mat xmlCodes, textCodes, binaryCodes; lccXml.Encode(Y, xmlCodes); lccText.Encode(Y, textCodes); lccBinary.Encode(Y, binaryCodes); CheckMatrices(codes, xmlCodes, textCodes, binaryCodes); // Check the parameters, too. BOOST_REQUIRE_EQUAL(lcc.Atoms(), lccXml.Atoms()); BOOST_REQUIRE_EQUAL(lcc.Atoms(), lccText.Atoms()); BOOST_REQUIRE_EQUAL(lcc.Atoms(), lccBinary.Atoms()); BOOST_REQUIRE_CLOSE(lcc.Tolerance(), lccXml.Tolerance(), 1e-5); BOOST_REQUIRE_CLOSE(lcc.Tolerance(), lccText.Tolerance(), 1e-5); BOOST_REQUIRE_CLOSE(lcc.Tolerance(), lccBinary.Tolerance(), 1e-5); BOOST_REQUIRE_CLOSE(lcc.Lambda(), lccXml.Lambda(), 1e-5); BOOST_REQUIRE_CLOSE(lcc.Lambda(), lccText.Lambda(), 1e-5); BOOST_REQUIRE_CLOSE(lcc.Lambda(), lccBinary.Lambda(), 1e-5); BOOST_REQUIRE_EQUAL(lcc.MaxIterations(), lccXml.MaxIterations()); BOOST_REQUIRE_EQUAL(lcc.MaxIterations(), lccText.MaxIterations()); BOOST_REQUIRE_EQUAL(lcc.MaxIterations(), lccBinary.MaxIterations()); } BOOST_AUTO_TEST_SUITE_END(); mlpack-2.2.5/src/mlpack/tests/log_test.cpp000066400000000000000000000024371315013601400205130ustar00rootroot00000000000000/** * @file log_test.cpp * @author Marcus Edel * * Test of the mlpack log class. **/ #include #include #include "test_tools.hpp" using namespace mlpack; BOOST_AUTO_TEST_SUITE(LogTest); /** * Simple log assert test. Be careful the test halts the program execution, so * run the test at the end of all other tests. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ BOOST_AUTO_TEST_CASE(LogAssertConditionTest) { // Only do anything for Assert() if in debugging mode. #ifdef DEBUG // If everything goes well we reach the boost test condition which is // always true by simplicity's sake. Log::Assert(true, "test"); BOOST_REQUIRE_EQUAL(1, 1); // The test case should halt the program execution and prints a custom // error message. Since the program is halted we should never reach the // boost test condition which is always false by simplicity's sake. // Log::Assert(false, "test"); // BOOST_REQUIRE_EQUAL(1, 0); #endif } BOOST_AUTO_TEST_SUITE_END(); mlpack-2.2.5/src/mlpack/tests/logistic_regression_test.cpp000066400000000000000000000756631315013601400240220ustar00rootroot00000000000000/** * @file logistic_regression_test.cpp * @author Ryan Curtin * * Test for LogisticFunction and LogisticRegression. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include #include #include #include #include "test_tools.hpp" using namespace mlpack; using namespace mlpack::regression; using namespace mlpack::optimization; using namespace mlpack::distribution; BOOST_AUTO_TEST_SUITE(LogisticRegressionTest); /** * Test the LogisticRegressionFunction on a simple set of points. */ BOOST_AUTO_TEST_CASE(LogisticRegressionFunctionEvaluate) { // Very simple fake dataset. arma::mat data("1 2 3;" "1 2 3"); arma::Row responses("1 1 0"); // Create a LogisticRegressionFunction. LogisticRegressionFunction<> lrf(data, responses, 0.0 /* no regularization */); // These were hand-calculated using Octave. BOOST_REQUIRE_CLOSE(lrf.Evaluate(arma::vec("1 1 1")), 7.0562141665, 1e-5); BOOST_REQUIRE_CLOSE(lrf.Evaluate(arma::vec("0 0 0")), 2.0794415417, 1e-5); BOOST_REQUIRE_CLOSE(lrf.Evaluate(arma::vec("-1 -1 -1")), 8.0562141665, 1e-5); BOOST_REQUIRE_CLOSE(lrf.Evaluate(arma::vec("200 -40 -40")), 0.0, 1e-5); BOOST_REQUIRE_CLOSE(lrf.Evaluate(arma::vec("200 -80 0")), 0.0, 1e-5); BOOST_REQUIRE_CLOSE(lrf.Evaluate(arma::vec("200 -100 20")), 0.0, 1e-5); } /** * A more complicated test for the LogisticRegressionFunction. */ BOOST_AUTO_TEST_CASE(LogisticRegressionFunctionRandomEvaluate) { const size_t points = 1000; const size_t dimension = 10; const size_t trials = 50; // Create a random dataset. arma::mat data; data.randu(dimension, points); // Create random responses. arma::Row responses(points); for (size_t i = 0; i < points; ++i) responses[i] = math::RandInt(0, 2); LogisticRegressionFunction<> lrf(data, responses, 0.0 /* no regularization */); // Run a bunch of trials. for (size_t i = 0; i < trials; ++i) { // Generate a random set of parameters. arma::vec parameters; parameters.randu(dimension + 1); // Hand-calculate the loss function. double loglikelihood = 0.0; for (size_t j = 0; j < points; ++j) { const double sigmoid = (1.0 / (1.0 + exp(-parameters[0] - arma::dot(data.col(j), parameters.subvec(1, dimension))))); if (responses[j] == 1.0) loglikelihood += log(std::pow(sigmoid, responses[j])); else loglikelihood += log(std::pow(1.0 - sigmoid, 1.0 - responses[j])); } BOOST_REQUIRE_CLOSE(lrf.Evaluate(parameters), -loglikelihood, 1e-5); } } /** * Test regularization for the LogisticRegressionFunction Evaluate() function. */ BOOST_AUTO_TEST_CASE(LogisticRegressionFunctionRegularizationEvaluate) { const size_t points = 5000; const size_t dimension = 25; const size_t trials = 10; // Create a random dataset. arma::mat data; data.randu(dimension, points); // Create random responses. arma::Row responses(points); for (size_t i = 0; i < points; ++i) responses[i] = math::RandInt(0, 2); LogisticRegressionFunction<> lrfNoReg(data, responses, 0.0); LogisticRegressionFunction<> lrfSmallReg(data, responses, 0.5); LogisticRegressionFunction<> lrfBigReg(data, responses, 20.0); for (size_t i = 0; i < trials; ++i) { arma::vec parameters(dimension + 1); parameters.randu(); // Regularization term: 0.5 * lambda * || parameters ||_2^2 (but note that // the first parameters term is ignored). const double smallRegTerm = 0.25 * std::pow(arma::norm(parameters, 2), 2.0) - 0.25 * std::pow(parameters[0], 2.0); const double bigRegTerm = 10.0 * std::pow(arma::norm(parameters, 2), 2.0) - 10.0 * std::pow(parameters[0], 2.0); BOOST_REQUIRE_CLOSE(lrfNoReg.Evaluate(parameters) + smallRegTerm, lrfSmallReg.Evaluate(parameters), 1e-5); BOOST_REQUIRE_CLOSE(lrfNoReg.Evaluate(parameters) + bigRegTerm, lrfBigReg.Evaluate(parameters), 1e-5); } } /** * Test gradient of the LogisticRegressionFunction. */ BOOST_AUTO_TEST_CASE(LogisticRegressionFunctionGradient) { // Very simple fake dataset. arma::mat data("1 2 3;" "1 2 3"); arma::Row responses("1 1 0"); // Create a LogisticRegressionFunction. LogisticRegressionFunction<> lrf(data, responses, 0.0 /* no regularization */); arma::vec gradient; // If the model is at the optimum, then the gradient should be zero. lrf.Gradient(arma::vec("200 -40 -40"), gradient); BOOST_REQUIRE_EQUAL(gradient.n_elem, 3); BOOST_REQUIRE_SMALL(gradient[0], 1e-15); BOOST_REQUIRE_SMALL(gradient[1], 1e-15); BOOST_REQUIRE_SMALL(gradient[2], 1e-15); // Perturb two elements in the wrong way, so they need to become smaller. lrf.Gradient(arma::vec("200 -20 -20"), gradient); // The actual values are less important; the gradient just needs to be pointed // the right way. BOOST_REQUIRE_EQUAL(gradient.n_elem, 3); BOOST_REQUIRE_GE(gradient[1], 0.0); BOOST_REQUIRE_GE(gradient[2], 0.0); // Perturb two elements in the wrong way, so they need to become larger. lrf.Gradient(arma::vec("200 -60 -60"), gradient); // The actual values are less important; the gradient just needs to be pointed // the right way. BOOST_REQUIRE_EQUAL(gradient.n_elem, 3); BOOST_REQUIRE_LE(gradient[1], 0.0); BOOST_REQUIRE_LE(gradient[2], 0.0); // Perturb the intercept element. lrf.Gradient(arma::vec("250 -40 -40"), gradient); // The actual values are less important; the gradient just needs to be pointed // the right way. BOOST_REQUIRE_EQUAL(gradient.n_elem, 3); BOOST_REQUIRE_GE(gradient[0], 0.0); } /** * Test individual Evaluate() functions for SGD. */ BOOST_AUTO_TEST_CASE(LogisticRegressionSeparableEvaluate) { // Very simple fake dataset. arma::mat data("1 2 3;" "1 2 3;"); arma::Row responses("1 1 0"); // Create a LogisticRegressionFunction. LogisticRegressionFunction<> lrf(data, responses, 0.0 /* no regularization */); // These were hand-calculated using Octave. BOOST_REQUIRE_CLOSE(lrf.Evaluate(arma::vec("1 1 1"), 0), 4.85873516e-2, 1e-5); BOOST_REQUIRE_CLOSE(lrf.Evaluate(arma::vec("1 1 1"), 1), 6.71534849e-3, 1e-5); BOOST_REQUIRE_CLOSE(lrf.Evaluate(arma::vec("1 1 1"), 2), 7.00091146645, 1e-5); BOOST_REQUIRE_CLOSE(lrf.Evaluate(arma::vec("0 0 0"), 0), 0.6931471805, 1e-5); BOOST_REQUIRE_CLOSE(lrf.Evaluate(arma::vec("0 0 0"), 1), 0.6931471805, 1e-5); BOOST_REQUIRE_CLOSE(lrf.Evaluate(arma::vec("0 0 0"), 2), 0.6931471805, 1e-5); BOOST_REQUIRE_CLOSE(lrf.Evaluate(arma::vec("-1 -1 -1"), 0), 3.0485873516, 1e-5); BOOST_REQUIRE_CLOSE(lrf.Evaluate(arma::vec("-1 -1 -1"), 1), 5.0067153485, 1e-5); BOOST_REQUIRE_CLOSE(lrf.Evaluate(arma::vec("-1 -1 -1"), 2), 9.1146645377e-4, 1e-5); BOOST_REQUIRE_SMALL(lrf.Evaluate(arma::vec("200 -40 -40"), 0), 1e-5); BOOST_REQUIRE_SMALL(lrf.Evaluate(arma::vec("200 -40 -40"), 1), 1e-5); BOOST_REQUIRE_SMALL(lrf.Evaluate(arma::vec("200 -40 -40"), 2), 1e-5); BOOST_REQUIRE_SMALL(lrf.Evaluate(arma::vec("200 -80 0"), 0), 1e-5); BOOST_REQUIRE_SMALL(lrf.Evaluate(arma::vec("200 -80 0"), 1), 1e-5); BOOST_REQUIRE_SMALL(lrf.Evaluate(arma::vec("200 -80 0"), 2), 1e-5); BOOST_REQUIRE_SMALL(lrf.Evaluate(arma::vec("200 -100 20"), 0), 1e-5); BOOST_REQUIRE_SMALL(lrf.Evaluate(arma::vec("200 -100 20"), 1), 1e-5); BOOST_REQUIRE_SMALL(lrf.Evaluate(arma::vec("200 -100 20"), 2), 1e-5); } /** * Test regularization for the separable LogisticRegressionFunction Evaluate() * function. */ BOOST_AUTO_TEST_CASE(LogisticRegressionFunctionRegularizationSeparableEvaluate) { const size_t points = 5000; const size_t dimension = 25; const size_t trials = 10; // Create a random dataset. arma::mat data; data.randu(dimension, points); // Create random responses. arma::Row responses(points); for (size_t i = 0; i < points; ++i) responses[i] = math::RandInt(0, 2); LogisticRegressionFunction<> lrfNoReg(data, responses, 0.0); LogisticRegressionFunction<> lrfSmallReg(data, responses, 0.5); LogisticRegressionFunction<> lrfBigReg(data, responses, 20.0); // Check that the number of functions is correct. BOOST_REQUIRE_EQUAL(lrfNoReg.NumFunctions(), points); BOOST_REQUIRE_EQUAL(lrfSmallReg.NumFunctions(), points); BOOST_REQUIRE_EQUAL(lrfBigReg.NumFunctions(), points); for (size_t i = 0; i < trials; ++i) { arma::vec parameters(dimension + 1); parameters.randu(); // Regularization term: 0.5 * lambda * || parameters ||_2^2 (but note that // the first parameters term is ignored). const double smallRegTerm = (0.25 * std::pow(arma::norm(parameters, 2), 2.0) - 0.25 * std::pow(parameters[0], 2.0)) / points; const double bigRegTerm = (10.0 * std::pow(arma::norm(parameters, 2), 2.0) - 10.0 * std::pow(parameters[0], 2.0)) / points; for (size_t j = 0; j < points; ++j) { BOOST_REQUIRE_CLOSE(lrfNoReg.Evaluate(parameters, j) + smallRegTerm, lrfSmallReg.Evaluate(parameters, j), 1e-5); BOOST_REQUIRE_CLOSE(lrfNoReg.Evaluate(parameters, j) + bigRegTerm, lrfBigReg.Evaluate(parameters, j), 1e-5); } } } /** * Test separable gradient of the LogisticRegressionFunction. */ BOOST_AUTO_TEST_CASE(LogisticRegressionFunctionSeparableGradient) { // Very simple fake dataset. arma::mat data("1 2 3;" "1 2 3"); arma::Row responses("1 1 0"); // Create a LogisticRegressionFunction. LogisticRegressionFunction<> lrf(data, responses, 0.0 /* no regularization */); arma::vec gradient; // If the model is at the optimum, then the gradient should be zero. lrf.Gradient(arma::vec("200 -40 -40"), 0, gradient); BOOST_REQUIRE_EQUAL(gradient.n_elem, 3); BOOST_REQUIRE_SMALL(gradient[0], 1e-15); BOOST_REQUIRE_SMALL(gradient[1], 1e-15); BOOST_REQUIRE_SMALL(gradient[2], 1e-15); lrf.Gradient(arma::vec("200 -40 -40"), 1, gradient); BOOST_REQUIRE_EQUAL(gradient.n_elem, 3); BOOST_REQUIRE_SMALL(gradient[0], 1e-15); BOOST_REQUIRE_SMALL(gradient[1], 1e-15); BOOST_REQUIRE_SMALL(gradient[2], 1e-15); lrf.Gradient(arma::vec("200 -40 -40"), 2, gradient); BOOST_REQUIRE_EQUAL(gradient.n_elem, 3); BOOST_REQUIRE_SMALL(gradient[0], 1e-15); BOOST_REQUIRE_SMALL(gradient[1], 1e-15); BOOST_REQUIRE_SMALL(gradient[2], 1e-15); // Perturb two elements in the wrong way, so they need to become smaller. For // the first two data points, classification is still correct so the gradient // should be zero. lrf.Gradient(arma::vec("200 -30 -30"), 0, gradient); BOOST_REQUIRE_EQUAL(gradient.n_elem, 3); BOOST_REQUIRE_SMALL(gradient[0], 1e-15); BOOST_REQUIRE_SMALL(gradient[1], 1e-15); BOOST_REQUIRE_SMALL(gradient[2], 1e-15); lrf.Gradient(arma::vec("200 -30 -30"), 1, gradient); BOOST_REQUIRE_EQUAL(gradient.n_elem, 3); BOOST_REQUIRE_SMALL(gradient[0], 1e-15); BOOST_REQUIRE_SMALL(gradient[1], 1e-15); BOOST_REQUIRE_SMALL(gradient[2], 1e-15); lrf.Gradient(arma::vec("200 -30 -30"), 2, gradient); BOOST_REQUIRE_EQUAL(gradient.n_elem, 3); BOOST_REQUIRE_GE(gradient[1], 0.0); BOOST_REQUIRE_GE(gradient[2], 0.0); // Perturb two elements in the other wrong way, so they need to become larger. // For the first and last data point, classification is still correct so the // gradient should be zero. lrf.Gradient(arma::vec("200 -60 -60"), 0, gradient); BOOST_REQUIRE_EQUAL(gradient.n_elem, 3); BOOST_REQUIRE_SMALL(gradient[0], 1e-15); BOOST_REQUIRE_SMALL(gradient[1], 1e-15); BOOST_REQUIRE_SMALL(gradient[2], 1e-15); lrf.Gradient(arma::vec("200 -30 -30"), 1, gradient); BOOST_REQUIRE_EQUAL(gradient.n_elem, 3); BOOST_REQUIRE_LE(gradient[1], 0.0); BOOST_REQUIRE_LE(gradient[2], 0.0); lrf.Gradient(arma::vec("200 -60 -60"), 2, gradient); BOOST_REQUIRE_EQUAL(gradient.n_elem, 3); BOOST_REQUIRE_SMALL(gradient[0], 1e-15); BOOST_REQUIRE_SMALL(gradient[1], 1e-15); BOOST_REQUIRE_SMALL(gradient[2], 1e-15); } /** * Test Gradient() function when regularization is used. */ BOOST_AUTO_TEST_CASE(LogisticRegressionFunctionRegularizationGradient) { const size_t points = 5000; const size_t dimension = 25; const size_t trials = 10; // Create a random dataset. arma::mat data; data.randu(dimension, points); // Create random responses. arma::Row responses(points); for (size_t i = 0; i < points; ++i) responses[i] = math::RandInt(0, 2); LogisticRegressionFunction<> lrfNoReg(data, responses, 0.0); LogisticRegressionFunction<> lrfSmallReg(data, responses, 0.5); LogisticRegressionFunction<> lrfBigReg(data, responses, 20.0); for (size_t i = 0; i < trials; ++i) { arma::vec parameters(dimension + 1); parameters.randu(); // Regularization term: 0.5 * lambda * || parameters ||_2^2 (but note that // the first parameters term is ignored). Now we take the gradient of this // to obtain // g[i] = lambda * parameters[i] // although g(0) == 0 because we are not regularizing the intercept term of // the model. arma::vec gradient; arma::vec smallRegGradient; arma::vec bigRegGradient; lrfNoReg.Gradient(parameters, gradient); lrfSmallReg.Gradient(parameters, smallRegGradient); lrfBigReg.Gradient(parameters, bigRegGradient); // Check sizes of gradients. BOOST_REQUIRE_EQUAL(gradient.n_elem, parameters.n_elem); BOOST_REQUIRE_EQUAL(smallRegGradient.n_elem, parameters.n_elem); BOOST_REQUIRE_EQUAL(bigRegGradient.n_elem, parameters.n_elem); // Make sure first term has zero regularization. BOOST_REQUIRE_CLOSE(gradient[0], smallRegGradient[0], 1e-5); BOOST_REQUIRE_CLOSE(gradient[0], bigRegGradient[0], 1e-5); // Check other terms. for (size_t j = 1; j < parameters.n_elem; ++j) { const double smallRegTerm = 0.5 * parameters[j]; const double bigRegTerm = 20.0 * parameters[j]; BOOST_REQUIRE_CLOSE(gradient[j] + smallRegTerm, smallRegGradient[j], 1e-5); BOOST_REQUIRE_CLOSE(gradient[j] + bigRegTerm, bigRegGradient[j], 1e-5); } } } /** * Test separable Gradient() function when regularization is used. */ BOOST_AUTO_TEST_CASE(LogisticRegressionFunctionRegularizationSeparableGradient) { const size_t points = 2000; const size_t dimension = 25; const size_t trials = 3; // Create a random dataset. arma::mat data; data.randu(dimension, points); // Create random responses. arma::Row responses(points); for (size_t i = 0; i < points; ++i) responses[i] = math::RandInt(0, 2); LogisticRegressionFunction<> lrfNoReg(data, responses, 0.0); LogisticRegressionFunction<> lrfSmallReg(data, responses, 0.5); LogisticRegressionFunction<> lrfBigReg(data, responses, 20.0); for (size_t i = 0; i < trials; ++i) { arma::vec parameters(dimension + 1); parameters.randu(); // Regularization term: 0.5 * lambda * || parameters ||_2^2 (but note that // the first parameters term is ignored). Now we take the gradient of this // to obtain // g[i] = lambda * parameters[i] // although g(0) == 0 because we are not regularizing the intercept term of // the model. arma::vec gradient; arma::vec smallRegGradient; arma::vec bigRegGradient; // Test separable gradient for each point. Regularization will be the same. for (size_t k = 0; k < points; ++k) { lrfNoReg.Gradient(parameters, k, gradient); lrfSmallReg.Gradient(parameters, k, smallRegGradient); lrfBigReg.Gradient(parameters, k, bigRegGradient); // Check sizes of gradients. BOOST_REQUIRE_EQUAL(gradient.n_elem, parameters.n_elem); BOOST_REQUIRE_EQUAL(smallRegGradient.n_elem, parameters.n_elem); BOOST_REQUIRE_EQUAL(bigRegGradient.n_elem, parameters.n_elem); // Make sure first term has zero regularization. BOOST_REQUIRE_CLOSE(gradient[0], smallRegGradient[0], 1e-5); BOOST_REQUIRE_CLOSE(gradient[0], bigRegGradient[0], 1e-5); // Check other terms. for (size_t j = 1; j < parameters.n_elem; ++j) { const double smallRegTerm = 0.5 * parameters[j] / points; const double bigRegTerm = 20.0 * parameters[j] / points; BOOST_REQUIRE_CLOSE(gradient[j] + smallRegTerm, smallRegGradient[j], 1e-5); BOOST_REQUIRE_CLOSE(gradient[j] + bigRegTerm, bigRegGradient[j], 1e-5); } } } } // Test training of logistic regression on a simple dataset. BOOST_AUTO_TEST_CASE(LogisticRegressionLBFGSSimpleTest) { // Very simple fake dataset. arma::mat data("1 2 3;" "1 2 3"); arma::Row responses("1 1 0"); // Create a logistic regression object using L-BFGS (that is the default). LogisticRegression<> lr(data, responses); // Test sigmoid function. arma::vec sigmoids = 1 / (1 + arma::exp(-lr.Parameters()[0] - data.t() * lr.Parameters().subvec(1, lr.Parameters().n_elem - 1))); // Large 0.1% error tolerance is because the optimizer may terminate before // the predictions converge to 1. BOOST_REQUIRE_CLOSE(sigmoids[0], 1.0, 0.1); BOOST_REQUIRE_CLOSE(sigmoids[1], 1.0, 5.0); BOOST_REQUIRE_SMALL(sigmoids[2], 0.1); } // Test training of logistic regression on a simple dataset using SGD. BOOST_AUTO_TEST_CASE(LogisticRegressionSGDSimpleTest) { // Very simple fake dataset. arma::mat data("1 2 3;" "1 2 3"); arma::Row responses("1 1 0"); // Create a logistic regression object using a custom SGD object with a much // smaller tolerance. LogisticRegressionFunction<> lrf(data, responses, 0.001); SGD> sgd(lrf, 0.005, 500000, 1e-10); LogisticRegression<> lr(sgd); // Test sigmoid function. arma::vec sigmoids = 1 / (1 + arma::exp(-lr.Parameters()[0] - data.t() * lr.Parameters().subvec(1, lr.Parameters().n_elem - 1))); // Large 0.1% error tolerance is because the optimizer may terminate before // the predictions converge to 1. SGD tolerance is larger because its default // convergence tolerance is larger. BOOST_REQUIRE_CLOSE(sigmoids[0], 1.0, 3.0); BOOST_REQUIRE_CLOSE(sigmoids[1], 1.0, 12.0); BOOST_REQUIRE_SMALL(sigmoids[2], 0.1); } // Test training of logistic regression on a simple dataset with regularization. BOOST_AUTO_TEST_CASE(LogisticRegressionLBFGSRegularizationSimpleTest) { // Very simple fake dataset. arma::mat data("1 2 3;" "1 2 3"); arma::Row responses("1 1 0"); // Create a logistic regression object using L-BFGS (that is the default). LogisticRegression<> lr(data, responses, 0.001); // Test sigmoid function. arma::vec sigmoids = 1 / (1 + arma::exp(-lr.Parameters()[0] - data.t() * lr.Parameters().subvec(1, lr.Parameters().n_elem - 1))); // Large error tolerance is because the optimizer may terminate before // the predictions converge to 1. BOOST_REQUIRE_CLOSE(sigmoids[0], 1.0, 5.0); BOOST_REQUIRE_CLOSE(sigmoids[1], 1.0, 10.0); BOOST_REQUIRE_SMALL(sigmoids[2], 0.1); } // Test training of logistic regression on a simple dataset using SGD with // regularization. BOOST_AUTO_TEST_CASE(LogisticRegressionSGDRegularizationSimpleTest) { // Very simple fake dataset. arma::mat data("1 2 3;" "1 2 3"); arma::Row responses("1 1 0"); // Create a logistic regression object using custom SGD with a much smaller // tolerance. LogisticRegressionFunction<> lrf(data, responses, 0.001); SGD> sgd(lrf, 0.005, 500000, 1e-10); LogisticRegression<> lr(sgd); // Test sigmoid function. arma::vec sigmoids = 1 / (1 + arma::exp(-lr.Parameters()[0] - data.t() * lr.Parameters().subvec(1, lr.Parameters().n_elem - 1))); // Large error tolerance is because the optimizer may terminate before // the predictions converge to 1. SGD tolerance is wider because its default // convergence tolerance is larger. BOOST_REQUIRE_CLOSE(sigmoids[0], 1.0, 7.0); BOOST_REQUIRE_CLOSE(sigmoids[1], 1.0, 14.0); BOOST_REQUIRE_SMALL(sigmoids[2], 0.1); } // Test training of logistic regression on two Gaussians and ensure it's // properly separable. BOOST_AUTO_TEST_CASE(LogisticRegressionLBFGSGaussianTest) { // Generate a two-Gaussian dataset. GaussianDistribution g1(arma::vec("1.0 1.0 1.0"), arma::eye(3, 3)); GaussianDistribution g2(arma::vec("9.0 9.0 9.0"), arma::eye(3, 3)); arma::mat data(3, 1000); arma::Row responses(1000); for (size_t i = 0; i < 500; ++i) { data.col(i) = g1.Random(); responses[i] = 0; } for (size_t i = 500; i < 1000; ++i) { data.col(i) = g2.Random(); responses[i] = 1; } // Now train a logistic regression object on it. LogisticRegression<> lr(data.n_rows, 0.5); lr.Train(data, responses); // Ensure that the error is close to zero. const double acc = lr.ComputeAccuracy(data, responses); BOOST_REQUIRE_CLOSE(acc, 100.0, 0.3); // 0.3% error tolerance. // Create a test set. for (size_t i = 0; i < 500; ++i) { data.col(i) = g1.Random(); responses[i] = 0; } for (size_t i = 500; i < 1000; ++i) { data.col(i) = g2.Random(); responses[i] = 1; } // Ensure that the error is close to zero. const double testAcc = lr.ComputeAccuracy(data, responses); BOOST_REQUIRE_CLOSE(testAcc, 100.0, 0.6); // 0.6% error tolerance. } // Test training of logistic regression on two Gaussians and ensure it's // properly separable using SGD. BOOST_AUTO_TEST_CASE(LogisticRegressionSGDGaussianTest) { // Generate a two-Gaussian dataset. GaussianDistribution g1(arma::vec("1.0 1.0 1.0"), arma::eye(3, 3)); GaussianDistribution g2(arma::vec("9.0 9.0 9.0"), arma::eye(3, 3)); arma::mat data(3, 1000); arma::Row responses(1000); for (size_t i = 0; i < 500; ++i) { data.col(i) = g1.Random(); responses[i] = 0; } for (size_t i = 500; i < 1000; ++i) { data.col(i) = g2.Random(); responses[i] = 1; } // Now train a logistic regression object on it. LogisticRegression<> lr(data.n_rows, 0.5); lr.Train(data, responses); // Ensure that the error is close to zero. const double acc = lr.ComputeAccuracy(data, responses); BOOST_REQUIRE_CLOSE(acc, 100.0, 0.3); // 0.3% error tolerance. // Create a test set. for (size_t i = 0; i < 500; ++i) { data.col(i) = g1.Random(); responses[i] = 0; } for (size_t i = 500; i < 1000; ++i) { data.col(i) = g2.Random(); responses[i] = 1; } // Ensure that the error is close to zero. const double testAcc = lr.ComputeAccuracy(data, responses); BOOST_REQUIRE_CLOSE(testAcc, 100.0, 0.6); // 0.6% error tolerance. } /** * Test constructor that takes an already-instantiated optimizer. */ BOOST_AUTO_TEST_CASE(LogisticRegressionInstantiatedOptimizer) { // Very simple fake dataset. arma::mat data("1 2 3;" "1 2 3"); arma::Row responses("1 1 0"); // Create an optimizer and function. LogisticRegressionFunction<> lrf(data, responses, 0.0005); L_BFGS> lbfgsOpt(lrf); lbfgsOpt.MinGradientNorm() = 1e-50; LogisticRegression<> lr(lbfgsOpt); // Test sigmoid function. arma::vec sigmoids = 1 / (1 + arma::exp(-lr.Parameters()[0] - data.t() * lr.Parameters().subvec(1, lr.Parameters().n_elem - 1))); // Error tolerance is small because we tightened the optimizer tolerance. BOOST_REQUIRE_CLOSE(sigmoids[0], 1.0, 0.1); BOOST_REQUIRE_CLOSE(sigmoids[1], 1.0, 0.6); BOOST_REQUIRE_SMALL(sigmoids[2], 0.1); // Now do the same with SGD. SGD> sgdOpt(lrf); sgdOpt.StepSize() = 0.15; sgdOpt.Tolerance() = 1e-75; LogisticRegression<> lr2(sgdOpt); // Test sigmoid function. sigmoids = 1 / (1 + arma::exp(-lr2.Parameters()[0] - data.t() * lr2.Parameters().subvec(1, lr2.Parameters().n_elem - 1))); // Error tolerance is small because we tightened the optimizer tolerance. BOOST_REQUIRE_CLOSE(sigmoids[0], 1.0, 0.1); BOOST_REQUIRE_CLOSE(sigmoids[1], 1.0, 0.6); BOOST_REQUIRE_SMALL(sigmoids[2], 0.1); } /** * Test the Train() function and make sure it works the same as if we'd called * the constructor by hand, with the L-BFGS optimizer. */ BOOST_AUTO_TEST_CASE(LogisticRegressionLBFGSTrainTest) { // Make a random dataset with random labels. arma::mat dataset(5, 800); dataset.randu(); arma::Row labels(800); for (size_t i = 0; i < 800; ++i) labels[i] = math::RandInt(0, 2); LogisticRegression<> lr(dataset, labels, 0.3); LogisticRegression<> lr2(dataset.n_rows, 0.3); lr2.Train(dataset, labels); BOOST_REQUIRE_EQUAL(lr.Parameters().n_elem, lr2.Parameters().n_elem); for (size_t i = 0; i < lr.Parameters().n_elem; ++i) BOOST_REQUIRE_CLOSE(lr.Parameters()[i], lr2.Parameters()[i], 0.005); } /** * Test the Train() function and make sure it works the same as if we'd called * the constructor by hand, with the SGD optimizer. */ BOOST_AUTO_TEST_CASE(LogisticRegressionSGDTrainTest) { // Make a random dataset with random labels. arma::mat dataset(5, 800); dataset.randu(); arma::Row labels(800); for (size_t i = 0; i < 800; ++i) labels[i] = math::RandInt(0, 2); LogisticRegressionFunction<> lrf(dataset, labels, 0.3); SGD> sgd(lrf); sgd.Shuffle() = false; LogisticRegression<> lr(sgd); LogisticRegression<> lr2(dataset.n_rows, 0.3); LogisticRegressionFunction<> lrf2(dataset, labels, 0.3); SGD> sgd2(lrf2); sgd2.Shuffle() = false; lr2.Train(sgd2); BOOST_REQUIRE_EQUAL(lr.Parameters().n_elem, lr2.Parameters().n_elem); for (size_t i = 0; i < lr.Parameters().n_elem; ++i) BOOST_REQUIRE_CLOSE(lr.Parameters()[i], lr2.Parameters()[i], 1e-5); } /** * Test sparse and dense logistic regression and make sure they both work the * same using the L-BFGS optimizer. */ BOOST_AUTO_TEST_CASE(LogisticRegressionSparseLBFGSTest) { // Create a random dataset. arma::sp_mat dataset; dataset.sprandu(10, 800, 0.3); arma::mat denseDataset(dataset); arma::Row labels(800); for (size_t i = 0; i < 800; ++i) labels[i] = math::RandInt(0, 2); LogisticRegression<> lr(denseDataset, labels, 0.3); LogisticRegression lrSparse(dataset, labels, 0.3); BOOST_REQUIRE_EQUAL(lr.Parameters().n_elem, lrSparse.Parameters().n_elem); for (size_t i = 0; i < lr.Parameters().n_elem; ++i) BOOST_REQUIRE_CLOSE(lr.Parameters()[i], lrSparse.Parameters()[i], 1e-3); } /** * Test sparse and dense logistic regression and make sure they both work the * same using the SGD optimizer. */ BOOST_AUTO_TEST_CASE(LogisticRegressionSparseSGDTest) { // Create a random dataset. arma::sp_mat dataset; dataset.sprandu(10, 800, 0.3); arma::mat denseDataset(dataset); arma::Row labels(800); for (size_t i = 0; i < 800; ++i) labels[i] = math::RandInt(0, 2); LogisticRegression<> lr(10, 0.3); LogisticRegressionFunction<> lrf(denseDataset, labels, 0.3); SGD> sgd(lrf); sgd.Shuffle() = false; lr.Train(sgd); LogisticRegression lrSparse(10, 0.3); LogisticRegressionFunction lrfSparse(dataset, labels, 0.3); SGD> sgdSparse(lrfSparse); sgdSparse.Shuffle() = false; lrSparse.Train(sgdSparse); BOOST_REQUIRE_EQUAL(lr.Parameters().n_elem, lrSparse.Parameters().n_elem); for (size_t i = 0; i < lr.Parameters().n_elem; ++i) BOOST_REQUIRE_CLOSE(lr.Parameters()[i], lrSparse.Parameters()[i], 1e-3); } /** * Test multi-point classification (Classify()). */ BOOST_AUTO_TEST_CASE(ClassifyTest) { // Generate a two-Gaussian dataset. GaussianDistribution g1(arma::vec("1.0 1.0 1.0"), arma::eye(3, 3)); GaussianDistribution g2(arma::vec("9.0 9.0 9.0"), arma::eye(3, 3)); arma::mat data(3, 1000); arma::Row responses(1000); for (size_t i = 0; i < 500; ++i) { data.col(i) = g1.Random(); responses[i] = 0; } for (size_t i = 500; i < 1000; ++i) { data.col(i) = g2.Random(); responses[i] = 1; } // Now train a logistic regression object on it. LogisticRegression<> lr(data.n_rows, 0.5); lr.Train<>(data, responses); // Create a test set. for (size_t i = 0; i < 500; ++i) { data.col(i) = g1.Random(); responses[i] = 0; } for (size_t i = 500; i < 1000; ++i) { data.col(i) = g2.Random(); responses[i] = 1; } arma::Row predictions; lr.Classify(data, predictions); BOOST_REQUIRE_GE((double) arma::accu(predictions == responses), 900); } /** * Test that single-point classification gives the same results as multi-point * classification. */ BOOST_AUTO_TEST_CASE(SinglePointClassifyTest) { // Generate a two-Gaussian dataset. GaussianDistribution g1(arma::vec("1.0 1.0 1.0"), arma::eye(3, 3)); GaussianDistribution g2(arma::vec("9.0 9.0 9.0"), arma::eye(3, 3)); arma::mat data(3, 1000); arma::Row responses(1000); for (size_t i = 0; i < 500; ++i) { data.col(i) = g1.Random(); responses[i] = 0; } for (size_t i = 500; i < 1000; ++i) { data.col(i) = g2.Random(); responses[i] = 1; } // Now train a logistic regression object on it. LogisticRegression<> lr(data.n_rows, 0.5); lr.Train<>(data, responses); // Create a test set. for (size_t i = 0; i < 500; ++i) { data.col(i) = g1.Random(); responses[i] = 0; } for (size_t i = 500; i < 1000; ++i) { data.col(i) = g2.Random(); responses[i] = 1; } arma::Row predictions; lr.Classify(data, predictions); for (size_t i = 0; i < data.n_cols; ++i) { size_t pred = lr.Classify(data.col(i)); BOOST_REQUIRE_EQUAL(pred, predictions[i]); } } /** * Test that giving point probabilities works. */ BOOST_AUTO_TEST_CASE(ClassifyProbabilitiesTest) { // Generate a two-Gaussian dataset. GaussianDistribution g1(arma::vec("1.0 1.0 1.0"), arma::eye(3, 3)); GaussianDistribution g2(arma::vec("9.0 9.0 9.0"), arma::eye(3, 3)); arma::mat data(3, 1000); arma::Row responses(1000); for (size_t i = 0; i < 500; ++i) { data.col(i) = g1.Random(); responses[i] = 0; } for (size_t i = 500; i < 1000; ++i) { data.col(i) = g2.Random(); responses[i] = 1; } // Now train a logistic regression object on it. LogisticRegression<> lr(data.n_rows, 0.5); lr.Train<>(data, responses); // Create a test set. for (size_t i = 0; i < 500; ++i) { data.col(i) = g1.Random(); responses[i] = 0; } for (size_t i = 500; i < 1000; ++i) { data.col(i) = g2.Random(); responses[i] = 1; } arma::mat probabilities; lr.Classify(data, probabilities); BOOST_REQUIRE_EQUAL(probabilities.n_cols, data.n_cols); BOOST_REQUIRE_EQUAL(probabilities.n_rows, 2); for (size_t i = 0; i < data.n_cols; ++i) { BOOST_REQUIRE_CLOSE(probabilities(0, i) + probabilities(1, i), 1.0, 1e-5); // 10% tolerance. if (responses[i] == 0) BOOST_REQUIRE_CLOSE(probabilities(0, i), 1.0, 10.0); else BOOST_REQUIRE_CLOSE(probabilities(1, i), 1.0, 10.0); } } BOOST_AUTO_TEST_SUITE_END(); mlpack-2.2.5/src/mlpack/tests/lrsdp_test.cpp000066400000000000000000000221701315013601400210520ustar00rootroot00000000000000/** * @file lrsdp_test.cpp * @author Ryan Curtin * * Tests for LR-SDP (core/optimizers/sdp/). * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include #include #include #include "test_tools.hpp" using namespace mlpack; using namespace mlpack::optimization; BOOST_AUTO_TEST_SUITE(LRSDPTest); /** * Create a Lovasz-Theta initial point. */ void CreateLovaszThetaInitialPoint(const arma::mat& edges, arma::mat& coordinates) { // Get the number of vertices in the problem. const size_t vertices = max(max(edges)) + 1; const size_t m = edges.n_cols + 1; float r = 0.5 + sqrt(0.25 + 2 * m); if (ceil(r) > vertices) r = vertices; // An upper bound on the dimension. coordinates.set_size(vertices, ceil(r)); // Now we set the entries of the initial matrix according to the formula given // in Section 4 of Monteiro and Burer. for (size_t i = 0; i < vertices; ++i) { for (size_t j = 0; j < ceil(r); ++j) { if (i == j) coordinates(i, j) = sqrt(1.0 / r) + sqrt(1.0 / (vertices * m)); else coordinates(i, j) = sqrt(1.0 / (vertices * m)); } } } /** * Prepare an LRSDP object to solve the Lovasz-Theta SDP in the manner detailed * in Monteiro + Burer 2004. The list of edges in the graph must be given; that * is all that is necessary to set up the problem. A matrix which will contain * initial point coordinates should be given also. */ void SetupLovaszTheta(const arma::mat& edges, LRSDP>& lovasz) { // Get the number of vertices in the problem. const size_t vertices = max(max(edges)) + 1; // C = -(e e^T) = -ones(). lovasz.SDP().C().ones(vertices, vertices); lovasz.SDP().C() *= -1; // b_0 = 1; else = 0. lovasz.SDP().SparseB().zeros(edges.n_cols + 1); lovasz.SDP().SparseB()[0] = 1; // A_0 = I_n. lovasz.SDP().SparseA()[0].eye(vertices, vertices); // A_ij only has ones at (i, j) and (j, i) and 0 elsewhere. for (size_t i = 0; i < edges.n_cols; ++i) { lovasz.SDP().SparseA()[i + 1].zeros(vertices, vertices); lovasz.SDP().SparseA()[i + 1](edges(0, i), edges(1, i)) = 1.; lovasz.SDP().SparseA()[i + 1](edges(1, i), edges(0, i)) = 1.; } // Set the Lagrange multipliers right. lovasz.AugLag().Lambda().ones(edges.n_cols + 1); lovasz.AugLag().Lambda() *= -1; lovasz.AugLag().Lambda()[0] = -double(vertices); } /** * johnson8-4-4.co test case for Lovasz-Theta LRSDP. * See Monteiro and Burer 2004. */ BOOST_AUTO_TEST_CASE(Johnson844LovaszThetaSDP) { // Load the edges. arma::mat edges; data::Load("johnson8-4-4.csv", edges, true); // The LRSDP itself and the initial point. arma::mat coordinates; CreateLovaszThetaInitialPoint(edges, coordinates); LRSDP> lovasz(edges.n_cols + 1, 0, coordinates); SetupLovaszTheta(edges, lovasz); double finalValue = lovasz.Optimize(coordinates); // Final value taken from Monteiro + Burer 2004. BOOST_REQUIRE_CLOSE(finalValue, -14.0, 1e-5); // Now ensure that all the constraints are satisfied. arma::mat rrt = coordinates * trans(coordinates); BOOST_REQUIRE_CLOSE(trace(rrt), 1.0, 1e-5); // All those edge constraints... for (size_t i = 0; i < edges.n_cols; ++i) { BOOST_REQUIRE_SMALL(rrt(edges(0, i), edges(1, i)), 1e-5); BOOST_REQUIRE_SMALL(rrt(edges(1, i), edges(0, i)), 1e-5); } } /** * Create an unweighted graph laplacian from the edges. */ void CreateSparseGraphLaplacian(const arma::mat& edges, arma::sp_mat& laplacian) { // Get the number of vertices in the problem. const size_t vertices = max(max(edges)) + 1; laplacian.zeros(vertices, vertices); for (size_t i = 0; i < edges.n_cols; ++i) { laplacian(edges(0, i), edges(1, i)) = -1.0; laplacian(edges(1, i), edges(0, i)) = -1.0; } for (size_t i = 0; i < vertices; ++i) { laplacian(i, i) = -arma::accu(laplacian.row(i)); } } BOOST_AUTO_TEST_CASE(ErdosRenyiRandomGraphMaxCutSDP) { // Load the edges. arma::mat edges; data::Load("erdosrenyi-n100.csv", edges, true); arma::sp_mat laplacian; CreateSparseGraphLaplacian(edges, laplacian); float r = 0.5 + sqrt(0.25 + 2 * edges.n_cols); if (ceil(r) > laplacian.n_rows) r = laplacian.n_rows; // initialize coordinates to a feasible point arma::mat coordinates(laplacian.n_rows, ceil(r)); coordinates.zeros(); for (size_t i = 0; i < coordinates.n_rows; ++i) { coordinates(i, i % coordinates.n_cols) = 1.; } LRSDP> maxcut(laplacian.n_rows, 0, coordinates); maxcut.SDP().C() = laplacian; maxcut.SDP().C() *= -1.; // need to minimize the negative maxcut.SDP().SparseB().ones(laplacian.n_rows); for (size_t i = 0; i < laplacian.n_rows; ++i) { maxcut.SDP().SparseA()[i].zeros(laplacian.n_rows, laplacian.n_rows); maxcut.SDP().SparseA()[i](i, i) = 1.; } const double finalValue = maxcut.Optimize(coordinates); const arma::mat rrt = coordinates * trans(coordinates); for (size_t i = 0; i < laplacian.n_rows; ++i) { BOOST_REQUIRE_CLOSE(rrt(i, i), 1., 1e-5); } // Final value taken by solving with Mosek BOOST_REQUIRE_CLOSE(finalValue, -3672.7, 1e-1); } /* * Test a nuclear norm minimization SDP. * * Specifically, fix an unknown m x n matrix X. Our goal is to recover X from p * measurements of X, where the i-th measurement is of the form * * b_i = dot(A_i, X) * * where the A_i's have iid entries from Normal(0, 1/p). We do this by solving * the the following semi-definite program * * min ||X||_* subj to dot(A_i, X) = b_i, i=1,...,p * * where ||X||_* denotes the nuclear norm (sum of singular values) of X. The * equivalent SDP is * * min tr(W1) + tr(W2) : [ W1, X ; X', W2 ] is PSD, * dot(A_i, X) = b_i, i = 1, ..., p * * For more details on matrix sensing and nuclear norm minimization, see * * Guaranteed Minimum-Rank Solutions of Linear Matrix Equations via Nuclear * Norm Minimization. * Benjamin Recht, Maryam Fazel, Pablo Parrilo. * SIAM Review 2010. * */ BOOST_AUTO_TEST_CASE(GaussianMatrixSensingSDP) { arma::mat Xorig, A; // read the unknown matrix X and the measurement matrices A_i in data::Load("sensing_X.csv", Xorig, true, false); data::Load("sensing_A.csv", A, true, false); const size_t m = Xorig.n_rows; const size_t n = Xorig.n_cols; const size_t p = A.n_rows; assert(A.n_cols == m * m); arma::vec b(p); for (size_t i = 0; i < p; ++i) { const arma::mat Ai = arma::reshape(A.row(i), n, m); b(i) = arma::dot(trans(Ai), Xorig); } float r = 0.5 + sqrt(0.25 + 2 * p); if (ceil(r) > m + n) r = m + n; arma::mat coordinates; coordinates.eye(m + n, ceil(r)); LRSDP> sensing(0, p, coordinates); sensing.SDP().C().eye(m + n, m + n); sensing.SDP().DenseB() = 2. * b; const auto block_rows = arma::span(0, m - 1); const auto block_cols = arma::span(m, m + n - 1); for (size_t i = 0; i < p; ++i) { const arma::mat Ai = arma::reshape(A.row(i), n, m); sensing.SDP().DenseA()[i].zeros(m + n, m + n); sensing.SDP().DenseA()[i](block_rows, block_cols) = trans(Ai); sensing.SDP().DenseA()[i](block_cols, block_rows) = Ai; } double finalValue = sensing.Optimize(coordinates); BOOST_REQUIRE_CLOSE(finalValue, 44.7550132629, 1e-1); const arma::mat rrt = coordinates * trans(coordinates); for (size_t i = 0; i < p; ++i) { const arma::mat Ai = arma::reshape(A.row(i), n, m); const double measurement = arma::dot(trans(Ai), rrt(block_rows, block_cols)); BOOST_REQUIRE_CLOSE(measurement, b(i), 1e-3); } // check matrix recovery const double err = arma::norm(Xorig - rrt(block_rows, block_cols), "fro") / arma::norm(Xorig, "fro"); BOOST_REQUIRE_SMALL(err, 1e-3); } /** * keller4.co test case for Lovasz-Theta LRSDP. * This is commented out because it takes a long time to run. * See Monteiro and Burer 2004. * BOOST_AUTO_TEST_CASE(Keller4LovaszThetaSDP) { // Load the edges. arma::mat edges; data::Load("keller4.csv", edges, true); // The LRSDP itself and the initial point. arma::mat coordinates; CreateLovaszThetaInitialPoint(edges, coordinates); LRSDP> lovasz(edges.n_cols, coordinates); SetupLovaszTheta(edges, lovasz); double finalValue = lovasz.Optimize(coordinates); // Final value taken from Monteiro + Burer 2004. BOOST_REQUIRE_CLOSE(finalValue, -14.013, 1e-2); // Not as much precision... // The SB method came to -14.013, but M&B's method only came to -14.005. // Now ensure that all the constraints are satisfied. arma::mat rrt = coordinates * trans(coordinates); BOOST_REQUIRE_CLOSE(trace(rrt), 1.0, 1e-5); // All those edge constraints... for (size_t i = 0; i < edges.n_cols; ++i) { BOOST_REQUIRE_SMALL(rrt(edges(0, i), edges(1, i)), 1e-3); BOOST_REQUIRE_SMALL(rrt(edges(1, i), edges(0, i)), 1e-3); } }*/ BOOST_AUTO_TEST_SUITE_END(); mlpack-2.2.5/src/mlpack/tests/lsh_test.cpp000066400000000000000000000622161315013601400205210ustar00rootroot00000000000000/** * @file lsh_test.cpp * * Unit tests for the 'LSHSearch' class. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include #include #include #include "test_tools.hpp" #include #include using namespace std; using namespace mlpack; using namespace mlpack::neighbor; /** * Generates a point set of four clusters: * -C1 around (0.5, 3.5), * -C2 around (3.5, 3.5), * -C3 around (0.5, 0.5), * -C4 around (3.5, 3.5). * * It then merges these clusters into one set, rdata. */ void GetPointset(const size_t N, arma::mat& rdata) { const size_t d = 2; // Create four clusters of points. arma::mat c1(d, N / 4, arma::fill::randu); arma::mat c2(d, N / 4, arma::fill::randu); arma::mat c3(d, N / 4, arma::fill::randu); arma::mat c4(d, N / 4, arma::fill::randu); arma::colvec offset1; offset1 << 0 << arma::endr << 3 << arma::endr; arma::colvec offset2; offset2 << 3 << arma::endr << 3 << arma::endr; arma::colvec offset4; offset4 << 3 << arma::endr << 0 << arma::endr; // Spread points in plane. for (size_t p = 0; p < N / 4; ++p) { c1.col(p) += offset1; c2.col(p) += offset2; c4.col(p) += offset4; } rdata.set_size(d, N); rdata.cols(0, (N / 4) - 1) = c1; rdata.cols(N / 4, (N / 2) - 1) = c2; rdata.cols(N / 2, (3 * N / 4) - 1) = c3; rdata.cols(3 * N / 4, N - 1) = c4; } /** * Generates two queries, one around (0.5, 0.5) and one around (3.5, 3.5). */ void GetQueries(arma::mat& qdata) { const size_t d = 2; // Generate two queries inside two of the clusters. // Put query 1 into cluster 3. arma::colvec q1, q2; q1.randu(d, 1); // Offset second query to go into cluster 2. q2.randu(d, 1); q2.row(0) += 3; q2.row(1) += 3; qdata.set_size(d, 2); qdata.col(0) = q1; qdata.col(1) = q2; } BOOST_AUTO_TEST_SUITE(LSHTest); /** * Test: Run LSH with varying number of tables, keeping all other parameters * constant. Compute the recall, i.e. the number of reported neighbors that * are real neighbors of the query. * LSH's property is that (with high probability), increasing the number of * tables will increase recall. Epsilon ensures that if noise lightly affects * the projections, the test will not fail. * This produces false negatives, so we attempt the test numTries times and * only declare failure if all of them fail. */ BOOST_AUTO_TEST_CASE(NumTablesTest) { // kNN and LSH parameters (use LSH default parameters). const int k = 4; const int numProj = 10; const double hashWidth = 0; const int secondHashSize = 99901; const int bucketSize = 500; // Test parameters. const double epsilon = 0.1; // Allowed deviation from expected monotonicity. const int numTries = 5; // Tries for each test before declaring failure. // Read iris training and testing data as reference and query sets. const string trainSet = "iris_train.csv"; const string testSet = "iris_test.csv"; arma::mat rdata; arma::mat qdata; data::Load(trainSet, rdata, true); data::Load(testSet, qdata, true); // Run classic knn on reference data. KNN knn(rdata); arma::Mat groundTruth; arma::mat groundDistances; knn.Search(qdata, k, groundTruth, groundDistances); bool fail; for (int t = 0; t < numTries; ++t) { fail = false; const int lSize = 6; // Number of runs. const int lValue[] = {1, 8, 16, 32, 64, 128}; // Number of tables. double lValueRecall[lSize] = {0.0}; // Recall of each LSH run. for (size_t l = 0; l < lSize; ++l) { // Run LSH with only numTables varying (other values are defaults). LSHSearch<> lshTest(rdata, numProj, lValue[l], hashWidth, secondHashSize, bucketSize); arma::Mat lshNeighbors; arma::mat lshDistances; lshTest.Search(qdata, k, lshNeighbors, lshDistances); // Compute recall for each query. lValueRecall[l] = LSHSearch<>::ComputeRecall(lshNeighbors, groundTruth); if (l > 0) { if (lValueRecall[l] < lValueRecall[l - 1] - epsilon) { fail = true; // If test fails at one point, stop and retry. break; } } } if (!fail) break; // If test passes one time, it is sufficient. } BOOST_REQUIRE(fail == false); } /** * Test: Run LSH with varying hash width, keeping all other parameters * constant. Compute the recall, i.e. the number of reported neighbors that * are real neighbors of the query. * LSH's property is that (with high probability), increasing the hash width * will increase recall. Epsilon ensures that if noise lightly affects the * projections, the test will not fail. */ BOOST_AUTO_TEST_CASE(HashWidthTest) { // kNN and LSH parameters (use LSH default parameters). const int k = 4; const int numTables = 30; const int numProj = 10; const int secondHashSize = 99901; const int bucketSize = 500; // Test parameters. const double epsilon = 0.1; // Allowed deviation from expected monotonicity. // Read iris training and testing data as reference and query. const string trainSet = "iris_train.csv"; const string testSet = "iris_test.csv"; arma::mat rdata; arma::mat qdata; data::Load(trainSet, rdata, true); data::Load(testSet, qdata, true); // Run classic knn on reference data. KNN knn(rdata); arma::Mat groundTruth; arma::mat groundDistances; knn.Search(qdata, k, groundTruth, groundDistances); const int hSize = 7; // Number of runs. const double hValue[] = {0.1, 0.5, 1, 5, 10, 50, 500}; // Hash width. double hValueRecall[hSize] = {0.0}; // Recall of each run. for (size_t h = 0; h < hSize; ++h) { // Run LSH with only hashWidth varying (other values are defaults). LSHSearch<> lshTest( rdata, numProj, numTables, hValue[h], secondHashSize, bucketSize); arma::Mat lshNeighbors; arma::mat lshDistances; lshTest.Search(qdata, k, lshNeighbors, lshDistances); // Compute recall for each query. hValueRecall[h] = LSHSearch<>::ComputeRecall(lshNeighbors, groundTruth); if (h > 0) BOOST_REQUIRE_GE(hValueRecall[h], hValueRecall[h - 1] - epsilon); } } /** * Test: Run LSH with varying number of projections, keeping other parameters * constant. Compute the recall, i.e. the number of reported neighbors that * are real neighbors of the query. * LSH's property is that (with high probability), increasing the number of * projections per table will decrease recall. Epsilon ensures that if noise * lightly affects the projections, the test will not fail. */ BOOST_AUTO_TEST_CASE(NumProjTest) { // kNN and LSH parameters (use LSH default parameters). const int k = 4; const int numTables = 30; const double hashWidth = 0; const int secondHashSize = 99901; const int bucketSize = 500; // Test parameters. const double epsilon = 0.1; // Allowed deviation from expected monotonicity. // Read iris training and testing data as reference and query sets. const string trainSet = "iris_train.csv"; const string testSet = "iris_test.csv"; arma::mat rdata; arma::mat qdata; data::Load(trainSet, rdata, true); data::Load(testSet, qdata, true); // Run classic knn on reference data. KNN knn(rdata); arma::Mat groundTruth; arma::mat groundDistances; knn.Search(qdata, k, groundTruth, groundDistances); // LSH test parameters for numProj. const int pSize = 5; // Number of runs. const int pValue[] = {1, 10, 20, 50, 100}; // Number of projections. double pValueRecall[pSize] = {0.0}; // Recall of each run. for (size_t p = 0; p < pSize; ++p) { // Run LSH with only numProj varying (other values are defaults). LSHSearch<> lshTest( rdata, pValue[p], numTables, hashWidth, secondHashSize, bucketSize); arma::Mat lshNeighbors; arma::mat lshDistances; lshTest.Search(qdata, k, lshNeighbors, lshDistances); // Compute recall for each query. pValueRecall[p] = LSHSearch<>::ComputeRecall(lshNeighbors, groundTruth); // Don't check the first run; only check that increasing P decreases recall. if (p > 0) BOOST_REQUIRE_LE(pValueRecall[p] - epsilon, pValueRecall[p - 1]); } } /** * Test: Run two LSH searches: * First, a very expensive LSH search, with a large number of hash tables * and a large hash width. This run should return an acceptable recall. We set * the bar very low (recall >= 50%) to make sure that a test fail means bad * implementation. * Second, a very cheap LSH search, with parameters that should cause recall * to be very low. Set the threshold very high (recall <= 25%) to make sure * that a test fail means bad implementation. */ BOOST_AUTO_TEST_CASE(RecallTest) { // kNN and LSH parameters (use LSH default parameters). const int k = 4; const int secondHashSize = 99901; const int bucketSize = 500; // Read iris training and testing data as reference and query sets. const string trainSet = "iris_train.csv"; const string testSet = "iris_test.csv"; arma::mat rdata; arma::mat qdata; data::Load(trainSet, rdata, true); data::Load(testSet, qdata, true); // Run classic knn on reference data. KNN knn(rdata); arma::Mat groundTruth; arma::mat groundDistances; knn.Search(qdata, k, groundTruth, groundDistances); // Expensive LSH run. const int hExp = 10000; // First-level hash width. const int kExp = 1; // Projections per table. const int tExp = 128; // Number of tables. const double recallThreshExp = 0.5; LSHSearch<> lshTestExp( rdata, kExp, tExp, hExp, secondHashSize, bucketSize); arma::Mat lshNeighborsExp; arma::mat lshDistancesExp; lshTestExp.Search(qdata, k, lshNeighborsExp, lshDistancesExp); const double recallExp = LSHSearch<>::ComputeRecall(lshNeighborsExp, groundTruth); // This run should have recall higher than the threshold. BOOST_REQUIRE_GE(recallExp, recallThreshExp); // Cheap LSH run. const int hChp = 1; // Small first-level hash width. const int kChp = 100; // Large number of projections per table. const int tChp = 1; // Only one table. const double recallThreshChp = 0.25; // Recall threshold. LSHSearch<> lshTestChp( rdata, kChp, tChp, hChp, secondHashSize, bucketSize); arma::Mat lshNeighborsChp; arma::mat lshDistancesChp; lshTestChp.Search(qdata, k, lshNeighborsChp, lshDistancesChp); const double recallChp = LSHSearch<>::ComputeRecall(lshNeighborsChp, groundTruth); // This run should have recall lower than the threshold. BOOST_REQUIRE_LE(recallChp, recallThreshChp); } /** * Test: This is a deterministic test that projects 2-dpoints to a known line * (axis 2). The reference set contains 4 well-separated clusters that will * merge into 2 clusters when projected on that axis. * * We create two queries, each one belonging in one cluster (q1 in cluster 3 * located around (0, 0) and q2 in cluster 2 located around (3, 3). After the * projection, q1 should have neighbors in C3 and C4 and q2 in C1 and C2. */ BOOST_AUTO_TEST_CASE(DeterministicMerge) { const size_t N = 40; // Must be divisible by 4 to create 4 clusters properly. arma::mat rdata; arma::mat qdata; GetPointset(N, rdata); GetQueries(qdata); const int k = N / 2; const double hashWidth = 1; const int secondHashSize = 99901; const int bucketSize = 500; // 1 table, with one projection to axis 1. arma::cube projections(2, 1, 1); projections(0, 0, 0) = 0; projections(1, 0, 0) = 1; LSHSearch<> lshTest(rdata, projections, hashWidth, secondHashSize, bucketSize); arma::Mat neighbors; arma::mat distances; lshTest.Search(qdata, k, neighbors, distances); // Test query 1. size_t q; for (size_t j = 0; j < k; ++j) // For each neighbor. { // If the neighbor is not found, ignore the point. if (neighbors(j, 0) == N || neighbors(j, 1) == N) continue; // Query 1 is in cluster 3, which under this projection was merged with // cluster 4. Clusters 3 and 4 have points 20:39, so only neighbors among //those should be found. q = 0; BOOST_REQUIRE_GE(neighbors(j, q), N / 2); // Query 2 is in cluster 2, which under this projection was merged with // cluster 1. Clusters 1 and 2 have points 0:19, so only neighbors among // those should be found. q = 1; BOOST_REQUIRE_LT(neighbors(j, q), N / 2); } } /** * Test: This is a deterministic test that projects 2-d points to the plane. * The reference set contains 4 well-separated clusters that should not merge. * * We create two queries, each one belonging in one cluster (q1 in cluster 3 * located around (0, 0) and q2 in cluster 2 located around (3, 3). The test is * a success if, after the projection, q1 should have neighbors in c3 and q2 * in c2. */ BOOST_AUTO_TEST_CASE(DeterministicNoMerge) { const size_t N = 40; arma::mat rdata; arma::mat qdata; GetPointset(N, rdata); GetQueries(qdata); const int k = N / 2; const double hashWidth = 1; const int secondHashSize = 99901; const int bucketSize = 500; // 1 table, with one projection to axis 1. arma::cube projections(2, 2, 1); projections(0, 0, 0) = 0; projections(1, 0, 0) = 1; projections(0, 1, 0) = 1; projections(1, 1, 0) = 0; LSHSearch<> lshTest(rdata, projections, hashWidth, secondHashSize, bucketSize); arma::Mat neighbors; arma::mat distances; lshTest.Search(qdata, k, neighbors, distances); // Test query 1. size_t q; for (size_t j = 0; j < k; ++j) // For each neighbor. { // If the neighbor is not found, ignore the point. if (neighbors(j, 0) == N || neighbors(j, 1) == N) continue; // Query 1 is in cluster 3, which is points 20:29. q = 0; BOOST_REQUIRE_LT(neighbors(j, q), 3 * N / 4); BOOST_REQUIRE_GE(neighbors(j, q), N / 2); // Query 2 is in cluster 2, which is points 10:19. q = 1; BOOST_REQUIRE_LT(neighbors(j, q), N / 2); BOOST_REQUIRE_GE(neighbors(j, q), N / 4); } } /** * Test: Create an LSHSearch object and use an increasing number of probes to * search for points. Require that recall for the same object doesn't decrease * with increasing number of probes. Also require that at least a few times * there's some increase in recall. */ BOOST_AUTO_TEST_CASE(MultiprobeTest) { // Test parameters. const double epsilonIncrease = 0.05; const size_t repetitions = 5; // Train five objects. const size_t probeTrials = 5; const size_t numProbes[probeTrials] = {0, 1, 2, 3, 4}; // Algorithm parameters. const int k = 4; const int numTables = 16; const int numProj = 3; const double hashWidth = 0; const int secondHashSize = 99901; const int bucketSize = 500; const string trainSet = "iris_train.csv"; const string testSet = "iris_test.csv"; arma::mat rdata; arma::mat qdata; data::Load(trainSet, rdata, true); data::Load(testSet, qdata, true); // Run classic knn on reference set. KNN knn(rdata); arma::Mat groundTruth; arma::mat groundDistances; knn.Search(qdata, k, groundTruth, groundDistances); bool foundIncrease = 0; for (size_t rep = 0; rep < repetitions; ++rep) { // Train a model. LSHSearch<> multiprobeTest(rdata, numProj, numTables, hashWidth, secondHashSize, bucketSize); double prevRecall = 0; // Search with varying number of probes. for (size_t p = 0; p < probeTrials; ++p) { arma::Mat lshNeighbors; arma::mat lshDistances; multiprobeTest.Search(qdata, k, lshNeighbors, lshDistances, 0, numProbes[p]); // Compute recall of this run. double recall = LSHSearch<>::ComputeRecall(lshNeighbors, groundTruth); if (p > 0) { // More probes should at the very least not lower recall... BOOST_REQUIRE_GE(recall, prevRecall); // ... and should ideally increase it a bit. if (recall > prevRecall + epsilonIncrease) foundIncrease = true; prevRecall = recall; } } } BOOST_REQUIRE(foundIncrease); } /** * Test: This is a deterministic test that verifies multiprobe LSH works * correctly. To do this, we generate two queries, q1 and q2. q1 is hashed * directly under cluster C2, q2 is hashed in C2's center. * We verify that: * 1) q1 should have no neighbors without multiprobe. * 2) q1 should have neighbors only from C2 with 1 additional probe. * 3) q2 should have all neighbors found with 3 additional probes. */ BOOST_AUTO_TEST_CASE(MultiprobeDeterministicTest) { // Generate known deterministic clusters of points. const size_t N = 40; arma::mat rdata; GetPointset(N, rdata); const int k = N / 4; const double hashWidth = 1; const int secondHashSize = 99901; const int bucketSize = 500; // 1 table, projections on orthonormal plane. arma::cube projections(2, 2, 1); projections(0, 0, 0) = 1; projections(1, 0, 0) = 0; projections(0, 1, 0) = 0; projections(1, 1, 0) = 1; // Construct LSH object with given tables. LSHSearch<> lshTest(rdata, projections, hashWidth, secondHashSize, bucketSize); const arma::mat offsets = lshTest.Offsets(); // Construct q1 so it is hashed directly under C2. arma::mat q1; q1 << 3.9 << arma::endr << 2.99; q1 -= offsets; // Construct q2 so it is hashed near the center of C2. arma::mat q2; q2 << 3.6 << arma::endr << 3.6; q2 -= offsets; arma::Mat neighbors; arma::mat distances; // Test that q1 simple search comes up empty. lshTest.Search(q1, k, neighbors, distances); BOOST_REQUIRE(arma::all(neighbors.col(0) == N)); // Test that q1 search with 1 additional probe returns some C2 points. lshTest.Search(q1, k, neighbors, distances, 0, 1); BOOST_REQUIRE(arma::all( (neighbors.col(0) == N) || ((neighbors.col(0) >= N / 4) && (neighbors.col(0) < N / 2)))); // Test that q2 simple search returns some C2 points. lshTest.Search(q2, k, neighbors, distances); BOOST_REQUIRE(arma::all( (neighbors.col(0) == N) || ((neighbors.col(0) >= N / 4) && (neighbors.col(0) < N / 2)))); // Test that q2 with 3 additional probes returns all C2 points. lshTest.Search(q2, k, neighbors, distances, 0, 3); BOOST_REQUIRE(arma::all( (neighbors.col(0) >= N / 4) && (neighbors.col(0) < N / 2))); } BOOST_AUTO_TEST_CASE(LSHTrainTest) { // This is a not very good test that simply checks that the re-trained LSH // model operates on the correct dimensionality and returns the correct number // of results. arma::mat referenceData = arma::randu(3, 100); arma::mat newReferenceData = arma::randu(10, 400); arma::mat queryData = arma::randu(10, 200); LSHSearch<> lsh(referenceData, 3, 2, 2.0, 11, 3); lsh.Train(newReferenceData, 4, 3, 3.0, 12, 4); arma::Mat neighbors; arma::mat distances; lsh.Search(queryData, 3, neighbors, distances); BOOST_REQUIRE_EQUAL(neighbors.n_cols, 200); BOOST_REQUIRE_EQUAL(neighbors.n_rows, 3); BOOST_REQUIRE_EQUAL(distances.n_cols, 200); BOOST_REQUIRE_EQUAL(distances.n_rows, 3); } /** * Test: this verifies ComputeRecall works correctly by providing two identical * vectors and requiring that Recall is equal to 1. */ BOOST_AUTO_TEST_CASE(RecallTestIdentical) { const size_t k = 5; // 5 nearest neighbors const size_t numQueries = 1; // base = [1; 2; 3; 4; 5] arma::Mat base; base.set_size(k, numQueries); base.col(0) = arma::linspace< arma::Col >(1, k, k); // q1 = [1; 2; 3; 4; 5]. Expect recall = 1 arma::Mat q1; q1.set_size(k, numQueries); q1.col(0) = arma::linspace< arma::Col >(1, k, k); BOOST_REQUIRE_EQUAL(LSHSearch<>::ComputeRecall(base, q1), 1); } /** * Test: this verifies ComputeRecall returns correct values for partially * correct found neighbors. This is important because this is a good example of * how the recall and accuracy metrics differ - accuracy in this case would be * 0, recall should not be */ BOOST_AUTO_TEST_CASE(RecallTestPartiallyCorrect) { const size_t k = 5; // 5 nearest neighbors const size_t numQueries = 1; // base = [1; 2; 3; 4; 5] arma::Mat base; base.set_size(k, numQueries); base.col(0) = arma::linspace< arma::Col >(1, k, k); // q2 = [2; 3; 4; 6; 7]. Expect recall = 0.6. This is important because this // is a good example of how recall and accuracy differ. Accuracy here would // be 0 but recall should not be. arma::Mat q2; q2.set_size(k, numQueries); q2 << 2 << arma::endr << 3 << arma::endr << 4 << arma::endr << 6 << arma::endr << 7 << arma::endr; BOOST_REQUIRE_CLOSE(LSHSearch<>::ComputeRecall(base, q2), 0.6, 0.0001); } /** * Test: If given a completely wrong vector, ComputeRecall should return 0 */ BOOST_AUTO_TEST_CASE(RecallTestIncorrect) { const size_t k = 5; // 5 nearest neighbors const size_t numQueries = 1; // base = [1; 2; 3; 4; 5] arma::Mat base; base.set_size(k, numQueries); base.col(0) = arma::linspace< arma::Col >(1, k, k); // q3 = [6; 7; 8; 9; 10]. Expected recall = 0 arma::Mat q3; q3.set_size(k, numQueries); q3.col(0) = arma::linspace< arma::Col >(k + 1, 2 * k, k); BOOST_REQUIRE_EQUAL(LSHSearch<>::ComputeRecall(base, q3), 0); } /** * Test: If given a vector of wrong shape, ComputeRecall should throw an * exception. */ BOOST_AUTO_TEST_CASE(RecallTestException) { const size_t k = 5; // 5 nearest neighbors const size_t numQueries = 1; // base = [1; 2; 3; 4; 5] arma::Mat base; base.set_size(k, numQueries); base.col(0) = arma::linspace< arma::Col >(1, k, k); // verify that nonsense arguments throw exception arma::Mat q4; q4.set_size(2 * k, numQueries); BOOST_REQUIRE_THROW(LSHSearch<>::ComputeRecall(base, q4), std::invalid_argument); } BOOST_AUTO_TEST_CASE(EmptyConstructorTest) { // If we create an empty LSH model and then call Search(), it should throw an // exception. LSHSearch<> lsh; arma::mat dataset = arma::randu(5, 50); arma::mat distances; arma::Mat neighbors; BOOST_REQUIRE_THROW(lsh.Search(dataset, 2, neighbors, distances), std::invalid_argument); // Now, train. lsh.Train(dataset, 4, 3, 3.0, 12, 4); lsh.Search(dataset, 3, neighbors, distances); BOOST_REQUIRE_EQUAL(neighbors.n_cols, 50); BOOST_REQUIRE_EQUAL(neighbors.n_rows, 3); BOOST_REQUIRE_EQUAL(distances.n_cols, 50); BOOST_REQUIRE_EQUAL(distances.n_rows, 3); } // These two tests are only compiled if the user has specified OpenMP to be // used. #ifdef HAS_OPENMP /** * Test: This test verifies that parallel query processing returns correct * results for the bichromatic search. */ BOOST_AUTO_TEST_CASE(ParallelBichromatic) { // kNN and LSH parameters (use LSH default parameters). const int k = 4; const int numTables = 16; const int numProj = 3; // Read iris training and testing data as reference and query sets. const string trainSet = "iris_train.csv"; const string testSet = "iris_test.csv"; arma::mat rdata; arma::mat qdata; data::Load(trainSet, rdata, true); data::Load(testSet, qdata, true); // Where to store neighbors and distances arma::Mat sequentialNeighbors; arma::Mat parallelNeighbors; arma::mat distances; // Construct an LSH object. By default, it uses the maximum number of threads LSHSearch<> lshTest(rdata, numProj, numTables); //default parameters lshTest.Search(qdata, k, parallelNeighbors, distances); // Now perform same search but with 1 thread size_t prevNumThreads = omp_get_max_threads(); // Store number of threads used. omp_set_num_threads(1); lshTest.Search(qdata, k, sequentialNeighbors, distances); omp_set_num_threads(prevNumThreads); // Require both have same results double recall = LSHSearch<>::ComputeRecall(sequentialNeighbors, parallelNeighbors); BOOST_REQUIRE_EQUAL(recall, 1); } /** * Test: This test verifies that parallel query processing returns correct * results for the monochromatic search. */ BOOST_AUTO_TEST_CASE(ParallelMonochromatic) { // kNN and LSH parameters. const int k = 4; const int numTables = 16; const int numProj = 3; // Read iris training data as reference and query set. const string trainSet = "iris_train.csv"; arma::mat rdata; data::Load(trainSet, rdata, true); // Where to store neighbors and distances arma::Mat sequentialNeighbors; arma::Mat parallelNeighbors; arma::mat distances; // Construct an LSH object, using maximum number of available threads. LSHSearch<> lshTest(rdata, numProj, numTables); lshTest.Search(k, parallelNeighbors, distances); // Now perform same search but with 1 thread. size_t prevNumThreads = omp_get_max_threads(); // Store number of threads used. omp_set_num_threads(1); lshTest.Search(k, sequentialNeighbors, distances); omp_set_num_threads(prevNumThreads); // Require both have same results. double recall = LSHSearch<>::ComputeRecall(sequentialNeighbors, parallelNeighbors); BOOST_REQUIRE_EQUAL(recall, 1); } #endif BOOST_AUTO_TEST_SUITE_END(); mlpack-2.2.5/src/mlpack/tests/math_test.cpp000066400000000000000000000327571315013601400206730ustar00rootroot00000000000000/** * @file math_test.cpp * @author Ryan Curtin * * Tests for everything in the math:: namespace. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include #include #include #include #include "test_tools.hpp" using namespace mlpack; using namespace math; BOOST_AUTO_TEST_SUITE(MathTest); /** * Verify that the empty constructor creates an empty range. */ BOOST_AUTO_TEST_CASE(RangeEmptyConstructor) { Range x = Range(); // Just verify that it is empty. BOOST_REQUIRE_GT(x.Lo(), x.Hi()); } /** * Verify that the point constructor correctly creates a range that is just a * point. */ BOOST_AUTO_TEST_CASE(RangePointConstructor) { Range x(10.0); BOOST_REQUIRE_CLOSE(x.Lo(), x.Hi(), 1e-25); BOOST_REQUIRE_SMALL(x.Width(), 1e-5); BOOST_REQUIRE_CLOSE(x.Lo(), 10.0, 1e-25); BOOST_REQUIRE_CLOSE(x.Hi(), 10.0, 1e-25); } /** * Verify that the range constructor correctly creates the range. */ BOOST_AUTO_TEST_CASE(RangeConstructor) { Range x(0.5, 5.5); BOOST_REQUIRE_CLOSE(x.Lo(), 0.5, 1e-25); BOOST_REQUIRE_CLOSE(x.Hi(), 5.5, 1e-25); } /** * Test that we get the width correct. */ BOOST_AUTO_TEST_CASE(RangeWidth) { Range x(0.0, 10.0); BOOST_REQUIRE_CLOSE(x.Width(), 10.0, 1e-20); // Make it empty. x.Hi() = 0.0; BOOST_REQUIRE_SMALL(x.Width(), 1e-5); // Make it negative. x.Hi() = -2.0; BOOST_REQUIRE_SMALL(x.Width(), 1e-5); // Just one more test. x.Lo() = -5.2; x.Hi() = 5.2; BOOST_REQUIRE_CLOSE(x.Width(), 10.4, 1e-5); } /** * Test that we get the midpoint correct. */ BOOST_AUTO_TEST_CASE(RangeMidpoint) { Range x(0.0, 10.0); BOOST_REQUIRE_CLOSE(x.Mid(), 5.0, 1e-5); x.Lo() = -5.0; BOOST_REQUIRE_CLOSE(x.Mid(), 2.5, 1e-5); } /** * Test that we can expand to include other ranges correctly. */ BOOST_AUTO_TEST_CASE(RangeIncludeOther) { // We need to test both |= and |. // We have three cases: non-overlapping; overlapping; equivalent, and then a // couple permutations (switch left with right and make sure it still works). Range x(0.0, 2.0); Range y(3.0, 5.0); Range z(0.0, 2.0); // Used for operator|=(). Range w; z |= y; w = x | y; BOOST_REQUIRE_SMALL(z.Lo(), 1e-5); BOOST_REQUIRE_CLOSE(z.Hi(), 5.0, 1e-5); BOOST_REQUIRE_SMALL(w.Lo(), 1e-5); BOOST_REQUIRE_CLOSE(w.Hi(), 5.0, 1e-5); // Switch operator precedence. z = y; z |= x; w = y | x; BOOST_REQUIRE_SMALL(z.Lo(), 1e-5); BOOST_REQUIRE_CLOSE(z.Hi(), 5.0, 1e-5); BOOST_REQUIRE_SMALL(w.Lo(), 1e-5); BOOST_REQUIRE_CLOSE(w.Hi(), 5.0, 1e-5); // Now make them overlapping. x = Range(0.0, 3.5); y = Range(3.0, 4.0); z = x; z |= y; w = x | y; BOOST_REQUIRE_SMALL(z.Lo(), 1e-5); BOOST_REQUIRE_CLOSE(z.Hi(), 4.0, 1e-5); BOOST_REQUIRE_SMALL(w.Lo(), 1e-5); BOOST_REQUIRE_CLOSE(w.Hi(), 4.0, 1e-5); // Switch operator precedence. z = y; z |= x; w = y | x; BOOST_REQUIRE_SMALL(z.Lo(), 1e-5); BOOST_REQUIRE_CLOSE(z.Hi(), 4.0, 1e-5); BOOST_REQUIRE_SMALL(w.Lo(), 1e-5); BOOST_REQUIRE_CLOSE(w.Hi(), 4.0, 1e-5); // Now the equivalent case. x = Range(0.0, 2.0); y = Range(0.0, 2.0); z = x; z |= y; w = x | y; BOOST_REQUIRE_SMALL(z.Lo(), 1e-5); BOOST_REQUIRE_CLOSE(z.Hi(), 2.0, 1e-5); BOOST_REQUIRE_SMALL(w.Lo(), 1e-5); BOOST_REQUIRE_CLOSE(w.Hi(), 2.0, 1e-5); z = y; z |= x; w = y | x; BOOST_REQUIRE_SMALL(z.Lo(), 1e-5); BOOST_REQUIRE_CLOSE(z.Hi(), 2.0, 1e-5); BOOST_REQUIRE_SMALL(w.Lo(), 1e-5); BOOST_REQUIRE_CLOSE(w.Hi(), 2.0, 1e-5); } /** * Test that we can 'and' ranges correctly. */ BOOST_AUTO_TEST_CASE(RangeIntersectOther) { // We need to test both &= and &. // We have three cases: non-overlapping, overlapping; equivalent, and then a // couple permutations (switch left with right and make sure it still works). Range x(0.0, 2.0); Range y(3.0, 5.0); Range z(0.0, 2.0); Range w; z &= y; w = x & y; BOOST_REQUIRE_SMALL(z.Width(), 1e-5); BOOST_REQUIRE_SMALL(w.Width(), 1e-5); // Reverse operator precedence. z = y; z &= x; w = y & x; BOOST_REQUIRE_SMALL(z.Width(), 1e-5); BOOST_REQUIRE_SMALL(w.Width(), 1e-5); // Now make them overlapping. x = Range(0.0, 3.5); y = Range(3.0, 4.0); z = x; z &= y; w = x & y; BOOST_REQUIRE_CLOSE(z.Lo(), 3.0, 1e-5); BOOST_REQUIRE_CLOSE(z.Hi(), 3.5, 1e-5); BOOST_REQUIRE_CLOSE(w.Lo(), 3.0, 1e-5); BOOST_REQUIRE_CLOSE(w.Hi(), 3.5, 1e-5); // Reverse operator precedence. z = y; z &= x; w = y & x; BOOST_REQUIRE_CLOSE(z.Lo(), 3.0, 1e-5); BOOST_REQUIRE_CLOSE(z.Hi(), 3.5, 1e-5); BOOST_REQUIRE_CLOSE(w.Lo(), 3.0, 1e-5); BOOST_REQUIRE_CLOSE(w.Hi(), 3.5, 1e-5); // Now make them equivalent. x = Range(2.0, 4.0); y = Range(2.0, 4.0); z = x; z &= y; w = x & y; BOOST_REQUIRE_CLOSE(z.Lo(), 2.0, 1e-5); BOOST_REQUIRE_CLOSE(z.Hi(), 4.0, 1e-5); BOOST_REQUIRE_CLOSE(w.Lo(), 2.0, 1e-5); BOOST_REQUIRE_CLOSE(w.Hi(), 4.0, 1e-5); } /** * Test multiplication of a range with a double. */ BOOST_AUTO_TEST_CASE(RangeMultiply) { // We need to test both * and *=, as well as both cases of *. // We'll try with a couple of numbers: -1, 0, 2. // And we'll have a couple of cases for bounds: strictly less than zero; // including zero; and strictly greater than zero. // // So, nine total cases. Range x(-5.0, -3.0); Range y(-5.0, -3.0); Range z; Range w; y *= -1.0; z = x * -1.0; w = -1.0 * x; BOOST_REQUIRE_CLOSE(y.Lo(), 3.0, 1e-5); BOOST_REQUIRE_CLOSE(y.Hi(), 5.0, 1e-5); BOOST_REQUIRE_CLOSE(z.Lo(), 3.0, 1e-5); BOOST_REQUIRE_CLOSE(z.Hi(), 5.0, 1e-5); BOOST_REQUIRE_CLOSE(w.Lo(), 3.0, 1e-5); BOOST_REQUIRE_CLOSE(w.Hi(), 5.0, 1e-5); y = x; y *= 0.0; z = x * 0.0; w = 0.0 * x; BOOST_REQUIRE_SMALL(y.Lo(), 1e-5); BOOST_REQUIRE_SMALL(y.Hi(), 1e-5); BOOST_REQUIRE_SMALL(z.Lo(), 1e-5); BOOST_REQUIRE_SMALL(z.Hi(), 1e-5); BOOST_REQUIRE_SMALL(w.Lo(), 1e-5); BOOST_REQUIRE_SMALL(w.Hi(), 1e-5); y = x; y *= 2.0; z = x * 2.0; w = 2.0 * x; BOOST_REQUIRE_CLOSE(y.Lo(), -10.0, 1e-5); BOOST_REQUIRE_CLOSE(y.Hi(), -6.0, 1e-5); BOOST_REQUIRE_CLOSE(z.Lo(), -10.0, 1e-5); BOOST_REQUIRE_CLOSE(z.Hi(), -6.0, 1e-5); BOOST_REQUIRE_CLOSE(w.Lo(), -10.0, 1e-5); BOOST_REQUIRE_CLOSE(w.Hi(), -6.0, 1e-5); x = Range(-2.0, 2.0); y = x; y *= -1.0; z = x * -1.0; w = -1.0 * x; BOOST_REQUIRE_CLOSE(y.Lo(), -2.0, 1e-5); BOOST_REQUIRE_CLOSE(y.Hi(), 2.0, 1e-5); BOOST_REQUIRE_CLOSE(z.Lo(), -2.0, 1e-5); BOOST_REQUIRE_CLOSE(z.Hi(), 2.0, 1e-5); BOOST_REQUIRE_CLOSE(w.Lo(), -2.0, 1e-5); BOOST_REQUIRE_CLOSE(w.Hi(), 2.0, 1e-5); y = x; y *= 0.0; z = x * 0.0; w = 0.0 * x; BOOST_REQUIRE_SMALL(y.Lo(), 1e-5); BOOST_REQUIRE_SMALL(y.Hi(), 1e-5); BOOST_REQUIRE_SMALL(z.Lo(), 1e-5); BOOST_REQUIRE_SMALL(z.Hi(), 1e-5); BOOST_REQUIRE_SMALL(w.Lo(), 1e-5); BOOST_REQUIRE_SMALL(w.Hi(), 1e-5); y = x; y *= 2.0; z = x * 2.0; w = 2.0 * x; BOOST_REQUIRE_CLOSE(y.Lo(), -4.0, 1e-5); BOOST_REQUIRE_CLOSE(y.Hi(), 4.0, 1e-5); BOOST_REQUIRE_CLOSE(z.Lo(), -4.0, 1e-5); BOOST_REQUIRE_CLOSE(z.Hi(), 4.0, 1e-5); BOOST_REQUIRE_CLOSE(w.Lo(), -4.0, 1e-5); BOOST_REQUIRE_CLOSE(w.Hi(), 4.0, 1e-5); x = Range(3.0, 5.0); y = x; y *= -1.0; z = x * -1.0; w = -1.0 * x; BOOST_REQUIRE_CLOSE(y.Lo(), -5.0, 1e-5); BOOST_REQUIRE_CLOSE(y.Hi(), -3.0, 1e-5); BOOST_REQUIRE_CLOSE(z.Lo(), -5.0, 1e-5); BOOST_REQUIRE_CLOSE(z.Hi(), -3.0, 1e-5); BOOST_REQUIRE_CLOSE(w.Lo(), -5.0, 1e-5); BOOST_REQUIRE_CLOSE(w.Hi(), -3.0, 1e-5); y = x; y *= 0.0; z = x * 0.0; w = 0.0 * x; BOOST_REQUIRE_SMALL(y.Lo(), 1e-5); BOOST_REQUIRE_SMALL(y.Hi(), 1e-5); BOOST_REQUIRE_SMALL(z.Lo(), 1e-5); BOOST_REQUIRE_SMALL(z.Hi(), 1e-5); BOOST_REQUIRE_SMALL(w.Lo(), 1e-5); BOOST_REQUIRE_SMALL(w.Hi(), 1e-5); y = x; y *= 2.0; z = x * 2.0; w = 2.0 * x; BOOST_REQUIRE_CLOSE(y.Lo(), 6.0, 1e-5); BOOST_REQUIRE_CLOSE(y.Hi(), 10.0, 1e-5); BOOST_REQUIRE_CLOSE(z.Lo(), 6.0, 1e-5); BOOST_REQUIRE_CLOSE(z.Hi(), 10.0, 1e-5); BOOST_REQUIRE_CLOSE(w.Lo(), 6.0, 1e-5); BOOST_REQUIRE_CLOSE(w.Hi(), 10.0, 1e-5); } /** * Test equality operator. */ BOOST_AUTO_TEST_CASE(RangeEquality) { // Three cases: non-overlapping, overlapping, equivalent. We should also // consider empty ranges, which are not necessarily equal... Range x(0.0, 2.0); Range y(3.0, 5.0); // These are odd calls, but we don't want to use operator!= here. BOOST_REQUIRE_EQUAL((x == y), false); BOOST_REQUIRE_EQUAL((y == x), false); y = Range(1.0, 3.0); BOOST_REQUIRE_EQUAL((x == y), false); BOOST_REQUIRE_EQUAL((y == x), false); y = Range(0.0, 2.0); BOOST_REQUIRE_EQUAL((x == y), true); BOOST_REQUIRE_EQUAL((y == x), true); x = Range(1.0, -1.0); // Empty. y = Range(1.0, -1.0); // Also empty. BOOST_REQUIRE_EQUAL((x == y), true); BOOST_REQUIRE_EQUAL((y == x), true); // No need to test what it does if the empty ranges are different "ranges" // because we are not forcing behavior for that. } /** * Test inequality operator. */ BOOST_AUTO_TEST_CASE(RangeInequality) { // We will use the same three cases as the RangeEquality test. Range x(0.0, 2.0); Range y(3.0, 5.0); // Again, odd calls, but we want to force use of operator!=. BOOST_REQUIRE_EQUAL((x != y), true); BOOST_REQUIRE_EQUAL((y != x), true); y = Range(1.0, 3.0); BOOST_REQUIRE_EQUAL((x != y), true); BOOST_REQUIRE_EQUAL((y != x), true); y = Range(0.0, 2.0); BOOST_REQUIRE_EQUAL((x != y), false); BOOST_REQUIRE_EQUAL((y != x), false); x = Range(1.0, -1.0); // Empty. y = Range(1.0, -1.0); // Also empty. BOOST_REQUIRE_EQUAL((x != y), false); BOOST_REQUIRE_EQUAL((y != x), false); } /** * Test strict less-than operator. */ BOOST_AUTO_TEST_CASE(RangeStrictLessThan) { // Three cases: non-overlapping, overlapping, and equivalent. Range x(0.0, 2.0); Range y(3.0, 5.0); BOOST_REQUIRE_EQUAL((x < y), true); BOOST_REQUIRE_EQUAL((y < x), false); y = Range(1.0, 3.0); BOOST_REQUIRE_EQUAL((x < y), false); BOOST_REQUIRE_EQUAL((y < x), false); y = Range(0.0, 2.0); BOOST_REQUIRE_EQUAL((x < y), false); BOOST_REQUIRE_EQUAL((y < x), false); } /** * Test strict greater-than operator. */ BOOST_AUTO_TEST_CASE(RangeStrictGreaterThan) { // Three cases: non-overlapping, overlapping, and equivalent. Range x(0.0, 2.0); Range y(3.0, 5.0); BOOST_REQUIRE_EQUAL((x > y), false); BOOST_REQUIRE_EQUAL((y > x), true); y = Range(1.0, 3.0); BOOST_REQUIRE_EQUAL((x > y), false); BOOST_REQUIRE_EQUAL((y > x), false); y = Range(0.0, 2.0); BOOST_REQUIRE_EQUAL((x > y), false); BOOST_REQUIRE_EQUAL((y > x), false); } /** * Test the Contains() operator. */ BOOST_AUTO_TEST_CASE(RangeContains) { // We have three Range cases: strictly less than 0; overlapping 0; and // strictly greater than 0. Then the numbers we check can be the same three // cases, including one greater than and one less than the range. This should // be about 15 total cases. Range x(-2.0, -1.0); BOOST_REQUIRE(!x.Contains(-3.0)); BOOST_REQUIRE(x.Contains(-2.0)); BOOST_REQUIRE(x.Contains(-1.5)); BOOST_REQUIRE(x.Contains(-1.0)); BOOST_REQUIRE(!x.Contains(-0.5)); BOOST_REQUIRE(!x.Contains(0.0)); BOOST_REQUIRE(!x.Contains(1.0)); x = Range(-1.0, 1.0); BOOST_REQUIRE(!x.Contains(-2.0)); BOOST_REQUIRE(x.Contains(-1.0)); BOOST_REQUIRE(x.Contains(0.0)); BOOST_REQUIRE(x.Contains(1.0)); BOOST_REQUIRE(!x.Contains(2.0)); x = Range(1.0, 2.0); BOOST_REQUIRE(!x.Contains(-1.0)); BOOST_REQUIRE(!x.Contains(0.0)); BOOST_REQUIRE(!x.Contains(0.5)); BOOST_REQUIRE(x.Contains(1.0)); BOOST_REQUIRE(x.Contains(1.5)); BOOST_REQUIRE(x.Contains(2.0)); BOOST_REQUIRE(!x.Contains(2.5)); // Now let's try it on an empty range. x = Range(); BOOST_REQUIRE(!x.Contains(-10.0)); BOOST_REQUIRE(!x.Contains(0.0)); BOOST_REQUIRE(!x.Contains(10.0)); // And an infinite range. x = Range(-DBL_MAX, DBL_MAX); BOOST_REQUIRE(x.Contains(-10.0)); BOOST_REQUIRE(x.Contains(0.0)); BOOST_REQUIRE(x.Contains(10.0)); } /** * Test that Range::Contains() works on other Ranges. It should return false * unless the ranges overlap at all. */ BOOST_AUTO_TEST_CASE(RangeContainsRange) { // Empty ranges should not contain each other. Range a; Range b; BOOST_REQUIRE_EQUAL(a.Contains(b), false); BOOST_REQUIRE_EQUAL(b.Contains(a), false); // Completely disparate ranges. a = Range(-5.0, -3.0); b = Range(3.0, 5.0); BOOST_REQUIRE_EQUAL(a.Contains(b), false); BOOST_REQUIRE_EQUAL(b.Contains(a), false); // Overlapping at the end-point; this is containment of the end point. a = Range(-5.0, 0.0); b = Range(0.0, 5.0); BOOST_REQUIRE_EQUAL(a.Contains(b), true); BOOST_REQUIRE_EQUAL(b.Contains(a), true); // Partially overlapping. a = Range(-5.0, 2.0); b = Range(-2.0, 5.0); BOOST_REQUIRE_EQUAL(a.Contains(b), true); BOOST_REQUIRE_EQUAL(b.Contains(a), true); // One range encloses the other. a = Range(-5.0, 5.0); b = Range(-3.0, 3.0); BOOST_REQUIRE_EQUAL(a.Contains(b), true); BOOST_REQUIRE_EQUAL(b.Contains(a), true); // Identical ranges. a = Range(-3.0, 3.0); b = Range(-3.0, 3.0); BOOST_REQUIRE_EQUAL(a.Contains(b), true); BOOST_REQUIRE_EQUAL(b.Contains(a), true); // Single-point ranges. a = Range(0.0, 0.0); b = Range(0.0, 0.0); BOOST_REQUIRE_EQUAL(a.Contains(b), true); BOOST_REQUIRE_EQUAL(b.Contains(a), true); } BOOST_AUTO_TEST_SUITE_END(); mlpack-2.2.5/src/mlpack/tests/matrix_completion_test.cpp000066400000000000000000000036641315013601400234720ustar00rootroot00000000000000/** * @file matrix_completion_test.cpp * @author Stephen Tu * * Tests for matrix completion * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include #include #include #include #include "test_tools.hpp" using namespace mlpack; using namespace mlpack::matrix_completion; BOOST_AUTO_TEST_SUITE(MatrixCompletionTest); /** * A matrix completion test. * * The matrix X = F1 F2^T was generated such that the entries of Fi were iid * from the uniform distribution on [0, 1]. Then, enough random samples * (without replacement) were taking from X such that exact recovered was * possible. * * X is stored in the file "completion_X.csv" and the indices are stored in the * file "completion_indices.csv". Recovery was verified by solving the SDP with * Mosek. */ BOOST_AUTO_TEST_CASE(UniformMatrixCompletionSDP) { arma::mat Xorig, values; arma::umat indices; data::Load("completion_X.csv", Xorig, true, false); data::Load("completion_indices.csv", indices, true, false); values.set_size(indices.n_cols); for (size_t i = 0; i < indices.n_cols; ++i) { values(i) = Xorig(indices(0, i), indices(1, i)); } arma::mat recovered; MatrixCompletion mc(Xorig.n_rows, Xorig.n_cols, indices, values); mc.Recover(recovered); const double err = arma::norm(Xorig - recovered, "fro") / arma::norm(Xorig, "fro"); BOOST_REQUIRE_SMALL(err, 1e-5); for (size_t i = 0; i < indices.n_cols; ++i) { BOOST_REQUIRE_CLOSE( recovered(indices(0, i), indices(1, i)), Xorig(indices(0, i), indices(1, i)), 1e-5); } } BOOST_AUTO_TEST_SUITE_END(); mlpack-2.2.5/src/mlpack/tests/maximal_inputs_test.cpp000066400000000000000000000050211315013601400227540ustar00rootroot00000000000000/** * @file maximal_inputs_test.cpp * @author Ngap Wei Tham * * Test the MaximalInputs and ColumnsToBlocks functions. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include #include #include #include #include "test_tools.hpp" using namespace mlpack; arma::mat CreateMaximalInput() { arma::mat w1(2, 4); w1 << 0 << 1 << 2 << 3 << arma::endr << 4 << 5 << 6 << 7; arma::mat input(5, 5); input.submat(0, 0, 1, 3) = w1; arma::mat maximalInputs; mlpack::nn::MaximalInputs(input, maximalInputs); return maximalInputs; } void TestResults(const arma::mat&actualResult, const arma::mat& expectResult) { BOOST_REQUIRE_EQUAL(expectResult.n_rows, actualResult.n_rows); BOOST_REQUIRE_EQUAL(expectResult.n_cols, actualResult.n_cols); for(size_t i = 0; i != expectResult.n_elem; ++i) { BOOST_REQUIRE_CLOSE(expectResult[i], actualResult[i], 1e-2); } } BOOST_AUTO_TEST_SUITE(MaximalInputsTest); BOOST_AUTO_TEST_CASE(ColumnToBlocksEvaluate) { arma::mat output; mlpack::math::ColumnsToBlocks ctb(1, 2); ctb.Transform(CreateMaximalInput(), output); arma::mat matlabResults; matlabResults << -1 << -1 << -1 << -1 << -1 << -1 << -1 << arma::endr << -1 << -1<< -0.42857 << -1 << 0.14286 << 0.71429 << -1 << arma::endr << -1 << -0.71429 << -0.14286 << -1 << 0.42857 << 1 << -1 << arma::endr << -1 << -1 << -1 << -1 << -1 << -1 << -1; TestResults(output, matlabResults); } BOOST_AUTO_TEST_CASE(ColumnToBlocksChangeBlockSize) { arma::mat output; mlpack::math::ColumnsToBlocks ctb(1, 2); ctb.BlockWidth(4); ctb.BlockHeight(1); ctb.BufValue(-3); ctb.Transform(CreateMaximalInput(), output); arma::mat matlabResults; matlabResults<< -3 << -3 << -3 << -3 << -3 << -3 << -3 << -3 << -3 << -3 << -3 << arma::endr << -3 << -1 << -0.71429 << -0.42857 << -0.14286 << -3 << 0.14286 << 0.42857 << 0.71429 << 1 << -3 << arma::endr << -3 << -3 << -3 << -3 << -3 << -3 << -3 << -3 << -3 << -3 << -3 << arma::endr; TestResults(output, matlabResults); } BOOST_AUTO_TEST_SUITE_END(); mlpack-2.2.5/src/mlpack/tests/mean_shift_test.cpp000066400000000000000000000114441315013601400220450ustar00rootroot00000000000000/** * @file mean_shift_test.cpp * @author Shangtong Zhang * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include #include #include #include "test_tools.hpp" using namespace mlpack; using namespace mlpack::meanshift; using namespace mlpack::distribution; BOOST_AUTO_TEST_SUITE(MeanShiftTest); // Generate dataset; written transposed because it's easier to read. arma::mat meanShiftData(" 0.0 0.0;" // Class 1. " 0.3 0.4;" " 0.1 0.0;" " 0.1 0.3;" " -0.2 -0.2;" " -0.1 0.3;" " -0.4 0.1;" " 0.2 -0.1;" " 0.3 0.0;" " -0.3 -0.3;" " 0.1 -0.1;" " 0.2 -0.3;" " -0.3 0.2;" " 10.0 10.0;" // Class 2. " 10.1 9.9;" " 9.9 10.0;" " 10.2 9.7;" " 10.2 9.8;" " 9.7 10.3;" " 9.9 10.1;" "-10.0 5.0;" // Class 3. " -9.8 5.1;" " -9.9 4.9;" "-10.0 4.9;" "-10.2 5.2;" "-10.1 5.1;" "-10.3 5.3;" "-10.0 4.8;" " -9.6 5.0;" " -9.8 5.1;"); /** * 30-point 3-class test case for Mean Shift. */ BOOST_AUTO_TEST_CASE(MeanShiftSimpleTest) { MeanShift<> meanShift; arma::Col assignments; arma::mat centroids; meanShift.Cluster((arma::mat) trans(meanShiftData), assignments, centroids); // Now make sure we got it all right. There is no restriction on how the // clusters are ordered, so we have to be careful about that. size_t firstClass = assignments(0); for (size_t i = 1; i < 13; i++) BOOST_REQUIRE_EQUAL(assignments(i), firstClass); size_t secondClass = assignments(13); // To ensure that class 1 != class 2. BOOST_REQUIRE_NE(firstClass, secondClass); for (size_t i = 13; i < 20; i++) BOOST_REQUIRE_EQUAL(assignments(i), secondClass); size_t thirdClass = assignments(20); // To ensure that this is the third class which we haven't seen yet. BOOST_REQUIRE_NE(firstClass, thirdClass); BOOST_REQUIRE_NE(secondClass, thirdClass); for (size_t i = 20; i < 30; i++) BOOST_REQUIRE_EQUAL(assignments(i), thirdClass); } // Generate samples from four Gaussians, and make sure mean shift nearly // recovers those four centers. BOOST_AUTO_TEST_CASE(GaussianClustering) { GaussianDistribution g1("0.0 0.0 0.0", arma::eye(3, 3)); GaussianDistribution g2("5.0 5.0 5.0", 2 * arma::eye(3, 3)); GaussianDistribution g3("-3.0 3.0 -1.0", arma::eye(3, 3)); GaussianDistribution g4("6.0 -2.0 -2.0", 3 * arma::eye(3, 3)); arma::mat dataset(3, 4000); for (size_t i = 0; i < 1000; ++i) dataset.col(i) = g1.Random(); for (size_t i = 1000; i < 2000; ++i) dataset.col(i) = g2.Random(); for (size_t i = 2000; i < 3000; ++i) dataset.col(i) = g3.Random(); for (size_t i = 3000; i < 4000; ++i) dataset.col(i) = g4.Random(); // Now that the dataset is generated, run mean shift. Pre-set radius. MeanShift<> meanShift(2.9); arma::Col assignments; arma::mat centroids; meanShift.Cluster(dataset, assignments, centroids); BOOST_REQUIRE_EQUAL(centroids.n_cols, 4); BOOST_REQUIRE_EQUAL(centroids.n_rows, 3); // Check that each centroid is close to only one mean. arma::vec centroidDistances(4); arma::uvec minIndices(4); for (size_t i = 0; i < 4; ++i) { centroidDistances(0) = metric::EuclideanDistance::Evaluate(g1.Mean(), centroids.col(i)); centroidDistances(1) = metric::EuclideanDistance::Evaluate(g2.Mean(), centroids.col(i)); centroidDistances(2) = metric::EuclideanDistance::Evaluate(g3.Mean(), centroids.col(i)); centroidDistances(3) = metric::EuclideanDistance::Evaluate(g4.Mean(), centroids.col(i)); // Are we near a centroid of a Gaussian? const double minVal = centroidDistances.min(minIndices[i]); BOOST_REQUIRE_SMALL(minVal, 0.65); // A decent amount of tolerance... } // Ensure each centroid corresponds to a different Gaussian. for (size_t i = 0; i < 4; ++i) for (size_t j = i + 1; j < 4; ++j) BOOST_REQUIRE_NE(minIndices[i], minIndices[j]); } BOOST_AUTO_TEST_SUITE_END(); mlpack-2.2.5/src/mlpack/tests/metric_test.cpp000066400000000000000000000041241315013601400212100ustar00rootroot00000000000000/** * @file metric_test.cpp * * Unit tests for the 'LMetric' class. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include #include #include #include "test_tools.hpp" using namespace std; using namespace mlpack::metric; BOOST_AUTO_TEST_SUITE(LMetricTest); BOOST_AUTO_TEST_CASE(L1MetricTest) { arma::vec a1(5); a1.randn(); arma::vec b1(5); b1.randn(); arma::Col a2(5); a2 << 1 << 2 << 1 << 0 << 5; arma::Col b2(5); b2 << 2 << 5 << 2 << 0 << 1; ManhattanDistance lMetric; BOOST_REQUIRE_CLOSE((double) arma::accu(arma::abs(a1 - b1)), lMetric.Evaluate(a1, b1), 1e-5); BOOST_REQUIRE_CLOSE((double) arma::accu(arma::abs(a2 - b2)), lMetric.Evaluate(a2, b2), 1e-5); } BOOST_AUTO_TEST_CASE(L2MetricTest) { arma::vec a1(5); a1.randn(); arma::vec b1(5); b1.randn(); arma::vec a2(5); a2 << 1 << 2 << 1 << 0 << 5; arma::vec b2(5); b2 << 2 << 5 << 2 << 0 << 1; EuclideanDistance lMetric; BOOST_REQUIRE_CLOSE((double) sqrt(arma::accu(arma::square(a1 - b1))), lMetric.Evaluate(a1, b1), 1e-5); BOOST_REQUIRE_CLOSE((double) sqrt(arma::accu(arma::square(a2 - b2))), lMetric.Evaluate(a2, b2), 1e-5); } BOOST_AUTO_TEST_CASE(LINFMetricTest) { arma::vec a1(5); a1.randn(); arma::vec b1(5); b1.randn(); arma::Col a2(5); a2 << 1 << 2 << 1 << 0 << 5; arma::Col b2(5); b2 << 2 << 5 << 2 << 0 << 1; ChebyshevDistance lMetric; BOOST_REQUIRE_CLOSE((double) arma::as_scalar(arma::max(arma::abs(a1 - b1))), lMetric.Evaluate(a1, b1), 1e-5); BOOST_REQUIRE_CLOSE((double) arma::as_scalar(arma::max(arma::abs(a2 - b2))), lMetric.Evaluate(a2, b2), 1e-5); } BOOST_AUTO_TEST_SUITE_END(); mlpack-2.2.5/src/mlpack/tests/minibatch_sgd_test.cpp000066400000000000000000000113101315013601400225130ustar00rootroot00000000000000/** * @file minibatch_sgd_test.cpp * @author Ryan Curtin * * Test file for minibatch SGD. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include #include #include #include #include #include #include #include "test_tools.hpp" using namespace std; using namespace arma; using namespace mlpack; using namespace mlpack::optimization; using namespace mlpack::optimization::test; using namespace mlpack::distribution; using namespace mlpack::regression; BOOST_AUTO_TEST_SUITE(MiniBatchSGDTest); /** * If the batch size is 1, and we aren't shuffling, we should get the exact same * results as regular SGD. */ BOOST_AUTO_TEST_CASE(SGDSimilarityTest) { SGDTestFunction f; SGD s(f, 0.0003, 100000, 1e-4, false); MiniBatchSGD ms(f, 1, 0.0003, 100000, 1e-4, false); arma::mat sCoord = f.GetInitialPoint(); arma::mat msCoord = f.GetInitialPoint(); const double sResult = s.Optimize(sCoord); const double msResult = ms.Optimize(msCoord); BOOST_REQUIRE_CLOSE(sResult, msResult, 1e-2); BOOST_REQUIRE_CLOSE(sCoord[0], msCoord[0], 1e-2); BOOST_REQUIRE_CLOSE(sCoord[1], msCoord[1], 1e-2); BOOST_REQUIRE_CLOSE(sCoord[2], msCoord[2], 1e-2); } /* BOOST_AUTO_TEST_CASE(SimpleSGDTestFunction) { SGDTestFunction f; // Batch size of 3. MiniBatchSGD s(f, 3, 0.0005, 2000000, 1e-9, true); arma::mat coordinates = f.GetInitialPoint(); double result = s.Optimize(coordinates); BOOST_REQUIRE_CLOSE(result, -1.0, 0.05); BOOST_REQUIRE_SMALL(coordinates[0], 1e-3); BOOST_REQUIRE_SMALL(coordinates[1], 1e-7); BOOST_REQUIRE_SMALL(coordinates[2], 1e-7); } */ /** * Run mini-batch SGD on logistic regression and make sure the results are * acceptable. */ BOOST_AUTO_TEST_CASE(LogisticRegressionTest) { // Generate a two-Gaussian dataset. GaussianDistribution g1(arma::vec("1.0 1.0 1.0"), arma::eye(3, 3)); GaussianDistribution g2(arma::vec("9.0 9.0 9.0"), arma::eye(3, 3)); arma::mat data(3, 1000); arma::Row responses(1000); for (size_t i = 0; i < 500; ++i) { data.col(i) = g1.Random(); responses[i] = 0; } for (size_t i = 500; i < 1000; ++i) { data.col(i) = g2.Random(); responses[i] = 1; } // Shuffle the dataset. arma::uvec indices = arma::shuffle(arma::linspace(0, data.n_cols - 1, data.n_cols)); arma::mat shuffledData(3, 1000); arma::Row shuffledResponses(1000); for (size_t i = 0; i < data.n_cols; ++i) { shuffledData.col(i) = data.col(indices[i]); shuffledResponses[i] = responses[indices[i]]; } // Create a test set. arma::mat testData(3, 1000); arma::Row testResponses(1000); for (size_t i = 0; i < 500; ++i) { testData.col(i) = g1.Random(); testResponses[i] = 0; } for (size_t i = 500; i < 1000; ++i) { testData.col(i) = g2.Random(); testResponses[i] = 1; } // Now run mini-batch SGD with a couple of batch sizes. for (size_t batchSize = 5; batchSize < 50; batchSize += 5) { LogisticRegression<> lr(shuffledData.n_rows, 0.5); LogisticRegressionFunction<> lrf(shuffledData, shuffledResponses, 0.5); MiniBatchSGD> mbsgd(lrf, batchSize); lr.Train(mbsgd); // Ensure that the error is close to zero. const double acc = lr.ComputeAccuracy(data, responses); BOOST_REQUIRE_CLOSE(acc, 100.0, 0.3); // 0.3% error tolerance. const double testAcc = lr.ComputeAccuracy(testData, testResponses); BOOST_REQUIRE_CLOSE(testAcc, 100.0, 0.6); // 0.6% error tolerance. } } /** * Run mini-batch SGD on a simple test function and make sure the last batch * size is handled correctly. * * When using a batchsize that fulfilled the constraint: * (numFunctions % batchSize) == 1 we have to make sure that the last batch size * isn't zero. */ BOOST_AUTO_TEST_CASE(ZeroBatchSizeTest) { // Create the generalized Rosenbrock function. GeneralizedRosenbrockFunction f(10); MiniBatchSGD s( f, f.NumFunctions() - 1, 0.01, 3); arma::mat coordinates = f.GetInitialPoint(); s.Optimize(coordinates); const bool finite = coordinates.is_finite(); BOOST_REQUIRE_EQUAL(finite, true); } BOOST_AUTO_TEST_SUITE_END(); mlpack-2.2.5/src/mlpack/tests/mlpack_test.cpp000066400000000000000000000027641315013601400212040ustar00rootroot00000000000000/** * @file mlpack_test.cpp * * Simple file defining the name of the overall test for mlpack, and set up * global test fixture for each test. Each individual test is contained in * its own file. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #define BOOST_TEST_MODULE mlpackTest #include // We only need to do this for old Boost versions. #if BOOST_VERSION < 103600 #define BOOST_AUTO_TEST_MAIN #endif #include #include "test_tools.hpp" #include /** * Provide a global fixture for each test. * * A global fixture is expected to be implemented as a class where the class * constructor serves as a setup method and class destructor serves as teardown * method. * * By default, Log::objects should have their output redirected, otherwise * the UTF test output would be drowned out by Log::Debug and Log::Warn * messages. * * For more detailed test output, set the CMake flag TEST_VERBOSE=ON. */ struct GlobalFixture { GlobalFixture() { #ifndef TEST_VERBOSE #ifdef DEBUG mlpack::Log::Debug.ignoreInput = true; #endif mlpack::Log::Info.ignoreInput = true; mlpack::Log::Warn.ignoreInput = true; #endif } }; BOOST_GLOBAL_FIXTURE(GlobalFixture); mlpack-2.2.5/src/mlpack/tests/nbc_test.cpp000066400000000000000000000227421315013601400204750ustar00rootroot00000000000000/** * @file nbc_test.cpp * * Test for the Naive Bayes classifier. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include #include #include #include "test_tools.hpp" using namespace mlpack; using namespace naive_bayes; BOOST_AUTO_TEST_SUITE(NBCTest); BOOST_AUTO_TEST_CASE(NaiveBayesClassifierTest) { const char* trainFilename = "trainSet.csv"; const char* testFilename = "testSet.csv"; const char* trainResultFilename = "trainRes.csv"; const char* testResultFilename = "testRes.csv"; size_t classes = 2; arma::mat trainData, trainRes, calcMat; data::Load(trainFilename, trainData, true); data::Load(trainResultFilename, trainRes, true); // Get the labels out. arma::Row labels(trainData.n_cols); for (size_t i = 0; i < trainData.n_cols; ++i) labels[i] = trainData(trainData.n_rows - 1, i); trainData.shed_row(trainData.n_rows - 1); NaiveBayesClassifier<> nbcTest(trainData, labels, classes); size_t dimension = nbcTest.Means().n_rows; calcMat.zeros(2 * dimension + 1, classes); for (size_t i = 0; i < dimension; i++) { for (size_t j = 0; j < classes; j++) { calcMat(i, j) = nbcTest.Means()(i, j); calcMat(i + dimension, j) = nbcTest.Variances()(i, j); } } for (size_t i = 0; i < classes; i++) calcMat(2 * dimension, i) = nbcTest.Probabilities()(i); for (size_t i = 0; i < calcMat.n_rows; i++) for (size_t j = 0; j < classes; j++) BOOST_REQUIRE_CLOSE(trainRes(i, j) + .00001, calcMat(i, j), 0.01); arma::mat testData; arma::Mat testRes; arma::Row calcVec; data::Load(testFilename, testData, true); data::Load(testResultFilename, testRes, true); testData.shed_row(testData.n_rows - 1); // Remove the labels. nbcTest.Classify(testData, calcVec); for (size_t i = 0; i < testData.n_cols; i++) BOOST_REQUIRE_EQUAL(testRes(i), calcVec(i)); } // The same test, but this one uses the incremental algorithm to calculate // variance. BOOST_AUTO_TEST_CASE(NaiveBayesClassifierIncrementalTest) { const char* trainFilename = "trainSet.csv"; const char* testFilename = "testSet.csv"; const char* trainResultFilename = "trainRes.csv"; const char* testResultFilename = "testRes.csv"; size_t classes = 2; arma::mat trainData, trainRes, calcMat; data::Load(trainFilename, trainData, true); data::Load(trainResultFilename, trainRes, true); // Get the labels out. arma::Row labels(trainData.n_cols); for (size_t i = 0; i < trainData.n_cols; ++i) labels[i] = trainData(trainData.n_rows - 1, i); trainData.shed_row(trainData.n_rows - 1); NaiveBayesClassifier<> nbcTest(trainData, labels, classes, true); size_t dimension = nbcTest.Means().n_rows; calcMat.zeros(2 * dimension + 1, classes); for (size_t i = 0; i < dimension; i++) { for (size_t j = 0; j < classes; j++) { calcMat(i, j) = nbcTest.Means()(i, j); calcMat(i + dimension, j) = nbcTest.Variances()(i, j); } } for (size_t i = 0; i < classes; i++) calcMat(2 * dimension, i) = nbcTest.Probabilities()(i); for (size_t i = 0; i < calcMat.n_rows; i++) for (size_t j = 0; j < classes; j++) BOOST_REQUIRE_CLOSE(trainRes(i, j) + .00001, calcMat(i, j), 0.01); arma::mat testData; arma::Mat testRes; arma::Row calcVec; data::Load(testFilename, testData, true); data::Load(testResultFilename, testRes, true); testData.shed_row(testData.n_rows - 1); // Remove the labels. nbcTest.Classify(testData, calcVec); for (size_t i = 0; i < testData.n_cols; i++) BOOST_REQUIRE_EQUAL(testRes(i), calcVec(i)); } /** * Ensure that separate training gives the same model. */ BOOST_AUTO_TEST_CASE(SeparateTrainTest) { const char* trainFilename = "trainSet.csv"; const char* trainResultFilename = "trainRes.csv"; size_t classes = 2; arma::mat trainData, trainRes, calcMat; data::Load(trainFilename, trainData, true); data::Load(trainResultFilename, trainRes, true); // Get the labels out. arma::Row labels(trainData.n_cols); for (size_t i = 0; i < trainData.n_cols; ++i) labels[i] = trainData(trainData.n_rows - 1, i); trainData.shed_row(trainData.n_rows - 1); NaiveBayesClassifier<> nbc(trainData, labels, classes, true); NaiveBayesClassifier<> nbcTrain(trainData.n_rows, classes); nbcTrain.Train(trainData, labels, false); BOOST_REQUIRE_EQUAL(nbc.Means().n_rows, nbcTrain.Means().n_rows); BOOST_REQUIRE_EQUAL(nbc.Means().n_cols, nbcTrain.Means().n_cols); BOOST_REQUIRE_EQUAL(nbc.Variances().n_rows, nbcTrain.Variances().n_rows); BOOST_REQUIRE_EQUAL(nbc.Variances().n_cols, nbcTrain.Variances().n_cols); BOOST_REQUIRE_EQUAL(nbc.Probabilities().n_elem, nbcTrain.Probabilities().n_elem); for (size_t i = 0; i < nbc.Means().n_elem; ++i) { if (std::abs(nbc.Means()[i]) < 1e-5) BOOST_REQUIRE_SMALL(nbcTrain.Means()[i], 1e-5); else BOOST_REQUIRE_CLOSE(nbc.Means()[i], nbcTrain.Means()[i], 1e-5); } for (size_t i = 0; i < nbc.Variances().n_elem; ++i) { if (std::abs(nbc.Variances()[i]) < 1e-5) BOOST_REQUIRE_SMALL(nbcTrain.Variances()[i], 1e-5); else BOOST_REQUIRE_CLOSE(nbc.Variances()[i], nbcTrain.Variances()[i], 1e-5); } for (size_t i = 0; i < nbc.Probabilities().n_elem; ++i) { if (std::abs(nbc.Probabilities()[i]) < 1e-5) BOOST_REQUIRE_SMALL(nbcTrain.Probabilities()[i], 1e-5); else BOOST_REQUIRE_CLOSE(nbc.Probabilities()[i], nbcTrain.Probabilities()[i], 1e-5); } } BOOST_AUTO_TEST_CASE(SeparateTrainIncrementalTest) { const char* trainFilename = "trainSet.csv"; const char* trainResultFilename = "trainRes.csv"; size_t classes = 2; arma::mat trainData, trainRes, calcMat; data::Load(trainFilename, trainData, true); data::Load(trainResultFilename, trainRes, true); // Get the labels out. arma::Row labels(trainData.n_cols); for (size_t i = 0; i < trainData.n_cols; ++i) labels[i] = trainData(trainData.n_rows - 1, i); trainData.shed_row(trainData.n_rows - 1); NaiveBayesClassifier<> nbc(trainData, labels, classes, true); NaiveBayesClassifier<> nbcTrain(trainData.n_rows, classes); nbcTrain.Train(trainData, labels, true); BOOST_REQUIRE_EQUAL(nbc.Means().n_rows, nbcTrain.Means().n_rows); BOOST_REQUIRE_EQUAL(nbc.Means().n_cols, nbcTrain.Means().n_cols); BOOST_REQUIRE_EQUAL(nbc.Variances().n_rows, nbcTrain.Variances().n_rows); BOOST_REQUIRE_EQUAL(nbc.Variances().n_cols, nbcTrain.Variances().n_cols); BOOST_REQUIRE_EQUAL(nbc.Probabilities().n_elem, nbcTrain.Probabilities().n_elem); for (size_t i = 0; i < nbc.Means().n_elem; ++i) { if (std::abs(nbc.Means()[i]) < 1e-5) BOOST_REQUIRE_SMALL(nbcTrain.Means()[i], 1e-5); else BOOST_REQUIRE_CLOSE(nbc.Means()[i], nbcTrain.Means()[i], 1e-5); } for (size_t i = 0; i < nbc.Variances().n_elem; ++i) { if (std::abs(nbc.Variances()[i]) < 1e-5) BOOST_REQUIRE_SMALL(nbcTrain.Variances()[i], 1e-5); else BOOST_REQUIRE_CLOSE(nbc.Variances()[i], nbcTrain.Variances()[i], 1e-5); } for (size_t i = 0; i < nbc.Probabilities().n_elem; ++i) { if (std::abs(nbc.Probabilities()[i]) < 1e-5) BOOST_REQUIRE_SMALL(nbcTrain.Probabilities()[i], 1e-5); else BOOST_REQUIRE_CLOSE(nbc.Probabilities()[i], nbcTrain.Probabilities()[i], 1e-5); } } BOOST_AUTO_TEST_CASE(SeparateTrainIndividualIncrementalTest) { const char* trainFilename = "trainSet.csv"; const char* trainResultFilename = "trainRes.csv"; size_t classes = 2; arma::mat trainData, trainRes, calcMat; data::Load(trainFilename, trainData, true); data::Load(trainResultFilename, trainRes, true); // Get the labels out. arma::Row labels(trainData.n_cols); for (size_t i = 0; i < trainData.n_cols; ++i) labels[i] = trainData(trainData.n_rows - 1, i); trainData.shed_row(trainData.n_rows - 1); NaiveBayesClassifier<> nbc(trainData, labels, classes, true); NaiveBayesClassifier<> nbcTrain(trainData.n_rows, classes); for (size_t i = 0; i < trainData.n_cols; ++i) nbcTrain.Train(trainData.col(i), labels[i]); BOOST_REQUIRE_EQUAL(nbc.Means().n_rows, nbcTrain.Means().n_rows); BOOST_REQUIRE_EQUAL(nbc.Means().n_cols, nbcTrain.Means().n_cols); BOOST_REQUIRE_EQUAL(nbc.Variances().n_rows, nbcTrain.Variances().n_rows); BOOST_REQUIRE_EQUAL(nbc.Variances().n_cols, nbcTrain.Variances().n_cols); BOOST_REQUIRE_EQUAL(nbc.Probabilities().n_elem, nbcTrain.Probabilities().n_elem); for (size_t i = 0; i < nbc.Means().n_elem; ++i) { if (std::abs(nbc.Means()[i]) < 1e-5) BOOST_REQUIRE_SMALL(nbcTrain.Means()[i], 1e-5); else BOOST_REQUIRE_CLOSE(nbc.Means()[i], nbcTrain.Means()[i], 1e-5); } for (size_t i = 0; i < nbc.Variances().n_elem; ++i) { if (std::abs(nbc.Variances()[i]) < 1e-5) BOOST_REQUIRE_SMALL(nbcTrain.Variances()[i], 1e-5); else BOOST_REQUIRE_CLOSE(nbc.Variances()[i], nbcTrain.Variances()[i], 1e-5); } for (size_t i = 0; i < nbc.Probabilities().n_elem; ++i) { if (std::abs(nbc.Probabilities()[i]) < 1e-5) BOOST_REQUIRE_SMALL(nbcTrain.Probabilities()[i], 1e-5); else BOOST_REQUIRE_CLOSE(nbc.Probabilities()[i], nbcTrain.Probabilities()[i], 1e-5); } } BOOST_AUTO_TEST_SUITE_END(); mlpack-2.2.5/src/mlpack/tests/nca_test.cpp000066400000000000000000000260601315013601400204710ustar00rootroot00000000000000/** * @file nca_test.cpp * @author Ryan Curtin * * Unit tests for Neighborhood Components Analysis and related code (including * the softmax error function). * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include #include #include #include #include #include "test_tools.hpp" using namespace mlpack; using namespace mlpack::metric; using namespace mlpack::nca; using namespace mlpack::optimization; // // Tests for the SoftmaxErrorFunction // BOOST_AUTO_TEST_SUITE(NCATest); /** * The Softmax error function should return the identity matrix as its initial * point. */ BOOST_AUTO_TEST_CASE(SoftmaxInitialPoint) { // Cheap fake dataset. arma::mat data; data.randu(5, 5); arma::Row labels; labels.zeros(5); SoftmaxErrorFunction sef(data, labels); // Verify the initial point is the identity matrix. arma::mat initialPoint = sef.GetInitialPoint(); for (int row = 0; row < 5; row++) { for (int col = 0; col < 5; col++) { if (row == col) BOOST_REQUIRE_CLOSE(initialPoint(row, col), 1.0, 1e-5); else BOOST_REQUIRE_SMALL(initialPoint(row, col), 1e-5); } } } /*** * On a simple fake dataset, ensure that the initial function evaluation is * correct. */ BOOST_AUTO_TEST_CASE(SoftmaxInitialEvaluation) { // Useful but simple dataset with six points and two classes. arma::mat data = "-0.1 -0.1 -0.1 0.1 0.1 0.1;" " 1.0 0.0 -1.0 1.0 0.0 -1.0 "; arma::Row labels = " 0 0 0 1 1 1 "; SoftmaxErrorFunction sef(data, labels); double objective = sef.Evaluate(arma::eye(2, 2)); // Result painstakingly calculated by hand by rcurtin (recorded forever in his // notebook). As a result of lack of precision of the by-hand result, the // tolerance is fairly high. BOOST_REQUIRE_CLOSE(objective, -1.5115, 0.01); } /** * On a simple fake dataset, ensure that the initial gradient evaluation is * correct. */ BOOST_AUTO_TEST_CASE(SoftmaxInitialGradient) { // Useful but simple dataset with six points and two classes. arma::mat data = "-0.1 -0.1 -0.1 0.1 0.1 0.1;" " 1.0 0.0 -1.0 1.0 0.0 -1.0 "; arma::Row labels = " 0 0 0 1 1 1 "; SoftmaxErrorFunction sef(data, labels); arma::mat gradient; arma::mat coordinates = arma::eye(2, 2); sef.Gradient(coordinates, gradient); // Results painstakingly calculated by hand by rcurtin (recorded forever in // his notebook). As a result of lack of precision of the by-hand result, the // tolerance is fairly high. BOOST_REQUIRE_CLOSE(gradient(0, 0), -0.089766, 0.05); BOOST_REQUIRE_SMALL(gradient(1, 0), 1e-5); BOOST_REQUIRE_SMALL(gradient(0, 1), 1e-5); BOOST_REQUIRE_CLOSE(gradient(1, 1), 1.63823, 0.01); } /** * On optimally separated datasets, ensure that the objective function is * optimal (equal to the negative number of points). */ BOOST_AUTO_TEST_CASE(SoftmaxOptimalEvaluation) { // Simple optimal dataset. arma::mat data = " 500 500 -500 -500;" " 1 0 1 0 "; arma::Row labels = " 0 0 1 1 "; SoftmaxErrorFunction sef(data, labels); double objective = sef.Evaluate(arma::eye(2, 2)); // Use a very close tolerance for optimality; we need to be sure this function // gives optimal results correctly. BOOST_REQUIRE_CLOSE(objective, -4.0, 1e-10); } /** * On optimally separated datasets, ensure that the gradient is zero. */ BOOST_AUTO_TEST_CASE(SoftmaxOptimalGradient) { // Simple optimal dataset. arma::mat data = " 500 500 -500 -500;" " 1 0 1 0 "; arma::Row labels = " 0 0 1 1 "; SoftmaxErrorFunction sef(data, labels); arma::mat gradient; sef.Gradient(arma::eye(2, 2), gradient); BOOST_REQUIRE_SMALL(gradient(0, 0), 1e-5); BOOST_REQUIRE_SMALL(gradient(0, 1), 1e-5); BOOST_REQUIRE_SMALL(gradient(1, 0), 1e-5); BOOST_REQUIRE_SMALL(gradient(1, 1), 1e-5); } /** * Ensure the separable objective function is right. */ BOOST_AUTO_TEST_CASE(SoftmaxSeparableObjective) { // Useful but simple dataset with six points and two classes. arma::mat data = "-0.1 -0.1 -0.1 0.1 0.1 0.1;" " 1.0 0.0 -1.0 1.0 0.0 -1.0 "; arma::Row labels = " 0 0 0 1 1 1 "; SoftmaxErrorFunction sef(data, labels); // Results painstakingly calculated by hand by rcurtin (recorded forever in // his notebook). As a result of lack of precision of the by-hand result, the // tolerance is fairly high. arma::mat coordinates = arma::eye(2, 2); BOOST_REQUIRE_CLOSE(sef.Evaluate(coordinates, 0), -0.22480, 0.01); BOOST_REQUIRE_CLOSE(sef.Evaluate(coordinates, 1), -0.30613, 0.01); BOOST_REQUIRE_CLOSE(sef.Evaluate(coordinates, 2), -0.22480, 0.01); BOOST_REQUIRE_CLOSE(sef.Evaluate(coordinates, 3), -0.22480, 0.01); BOOST_REQUIRE_CLOSE(sef.Evaluate(coordinates, 4), -0.30613, 0.01); BOOST_REQUIRE_CLOSE(sef.Evaluate(coordinates, 5), -0.22480, 0.01); } /** * Ensure the optimal separable objective function is right. */ BOOST_AUTO_TEST_CASE(OptimalSoftmaxSeparableObjective) { // Simple optimal dataset. arma::mat data = " 500 500 -500 -500;" " 1 0 1 0 "; arma::Row labels = " 0 0 1 1 "; SoftmaxErrorFunction sef(data, labels); arma::mat coordinates = arma::eye(2, 2); // Use a very close tolerance for optimality; we need to be sure this function // gives optimal results correctly. BOOST_REQUIRE_CLOSE(sef.Evaluate(coordinates, 0), -1.0, 1e-10); BOOST_REQUIRE_CLOSE(sef.Evaluate(coordinates, 1), -1.0, 1e-10); BOOST_REQUIRE_CLOSE(sef.Evaluate(coordinates, 2), -1.0, 1e-10); BOOST_REQUIRE_CLOSE(sef.Evaluate(coordinates, 3), -1.0, 1e-10); } /** * Ensure the separable gradient is right. */ BOOST_AUTO_TEST_CASE(SoftmaxSeparableGradient) { // Useful but simple dataset with six points and two classes. arma::mat data = "-0.1 -0.1 -0.1 0.1 0.1 0.1;" " 1.0 0.0 -1.0 1.0 0.0 -1.0 "; arma::Row labels = " 0 0 0 1 1 1 "; SoftmaxErrorFunction sef(data, labels); arma::mat coordinates = arma::eye(2, 2); arma::mat gradient(2, 2); sef.Gradient(coordinates, 0, gradient); BOOST_REQUIRE_CLOSE(gradient(0, 0), -2.0 * 0.0069708, 0.01); BOOST_REQUIRE_CLOSE(gradient(0, 1), -2.0 * -0.0101707, 0.01); BOOST_REQUIRE_CLOSE(gradient(1, 0), -2.0 * -0.0101707, 0.01); BOOST_REQUIRE_CLOSE(gradient(1, 1), -2.0 * -0.14359, 0.01); sef.Gradient(coordinates, 1, gradient); BOOST_REQUIRE_CLOSE(gradient(0, 0), -2.0 * 0.008496, 0.01); BOOST_REQUIRE_SMALL(gradient(0, 1), 1e-5); BOOST_REQUIRE_SMALL(gradient(1, 0), 1e-5); BOOST_REQUIRE_CLOSE(gradient(1, 1), -2.0 * -0.12238, 0.01); sef.Gradient(coordinates, 2, gradient); BOOST_REQUIRE_CLOSE(gradient(0, 0), -2.0 * 0.0069708, 0.01); BOOST_REQUIRE_CLOSE(gradient(0, 1), -2.0 * 0.0101707, 0.01); BOOST_REQUIRE_CLOSE(gradient(1, 0), -2.0 * 0.0101707, 0.01); BOOST_REQUIRE_CLOSE(gradient(1, 1), -2.0 * -0.1435886, 0.01); sef.Gradient(coordinates, 3, gradient); BOOST_REQUIRE_CLOSE(gradient(0, 0), -2.0 * 0.0069708, 0.01); BOOST_REQUIRE_CLOSE(gradient(0, 1), -2.0 * 0.0101707, 0.01); BOOST_REQUIRE_CLOSE(gradient(1, 0), -2.0 * 0.0101707, 0.01); BOOST_REQUIRE_CLOSE(gradient(1, 1), -2.0 * -0.1435886, 0.01); sef.Gradient(coordinates, 4, gradient); BOOST_REQUIRE_CLOSE(gradient(0, 0), -2.0 * 0.008496, 0.01); BOOST_REQUIRE_SMALL(gradient(0, 1), 1e-5); BOOST_REQUIRE_SMALL(gradient(1, 0), 1e-5); BOOST_REQUIRE_CLOSE(gradient(1, 1), -2.0 * -0.12238, 0.01); sef.Gradient(coordinates, 5, gradient); BOOST_REQUIRE_CLOSE(gradient(0, 0), -2.0 * 0.0069708, 0.01); BOOST_REQUIRE_CLOSE(gradient(0, 1), -2.0 * -0.0101707, 0.01); BOOST_REQUIRE_CLOSE(gradient(1, 0), -2.0 * -0.0101707, 0.01); BOOST_REQUIRE_CLOSE(gradient(1, 1), -2.0 * -0.1435886, 0.01); } // // Tests for the NCA algorithm. // /** * On our simple dataset, ensure that the NCA algorithm fully separates the * points. */ BOOST_AUTO_TEST_CASE(NCASGDSimpleDataset) { // Useful but simple dataset with six points and two classes. arma::mat data = "-0.1 -0.1 -0.1 0.1 0.1 0.1;" " 1.0 0.0 -1.0 1.0 0.0 -1.0 "; arma::Row labels = " 0 0 0 1 1 1 "; // Huge learning rate because this is so simple. NCA nca(data, labels); nca.Optimizer().StepSize() = 1.2; nca.Optimizer().MaxIterations() = 300000; nca.Optimizer().Tolerance() = 0; nca.Optimizer().Shuffle() = true; arma::mat outputMatrix; nca.LearnDistance(outputMatrix); // Ensure that the objective function is better now. SoftmaxErrorFunction sef(data, labels); double initObj = sef.Evaluate(arma::eye(2, 2)); double finalObj = sef.Evaluate(outputMatrix); arma::mat finalGradient; sef.Gradient(outputMatrix, finalGradient); // finalObj must be less than initObj. BOOST_REQUIRE_LT(finalObj, initObj); // Verify that final objective is optimal. BOOST_REQUIRE_CLOSE(finalObj, -6.0, 0.005); // The solution is not unique, so the best we can do is ensure the gradient // norm is close to 0. BOOST_REQUIRE_LT(arma::norm(finalGradient, 2), 1e-4); } BOOST_AUTO_TEST_CASE(NCALBFGSSimpleDataset) { // Useful but simple dataset with six points and two classes. arma::mat data = "-0.1 -0.1 -0.1 0.1 0.1 0.1;" " 1.0 0.0 -1.0 1.0 0.0 -1.0 "; arma::Row labels = " 0 0 0 1 1 1 "; // Huge learning rate because this is so simple. NCA nca(data, labels); nca.Optimizer().NumBasis() = 5; arma::mat outputMatrix; nca.LearnDistance(outputMatrix); // Ensure that the objective function is better now. SoftmaxErrorFunction sef(data, labels); double initObj = sef.Evaluate(arma::eye(2, 2)); double finalObj = sef.Evaluate(outputMatrix); arma::mat finalGradient; sef.Gradient(outputMatrix, finalGradient); // finalObj must be less than initObj. BOOST_REQUIRE_LT(finalObj, initObj); // Verify that final objective is optimal. BOOST_REQUIRE_CLOSE(finalObj, -6.0, 1e-5); // The solution is not unique, so the best we can do is ensure the gradient // norm is close to 0. BOOST_REQUIRE_LT(arma::norm(finalGradient, 2), 1e-6); } BOOST_AUTO_TEST_SUITE_END(); mlpack-2.2.5/src/mlpack/tests/nmf_test.cpp000066400000000000000000000151161315013601400205100ustar00rootroot00000000000000/** * @file nmf_test.cpp * @author Mohan Rajendran * * Test file for NMF class. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include #include #include #include #include #include #include #include #include "test_tools.hpp" BOOST_AUTO_TEST_SUITE(NMFTest); using namespace std; using namespace arma; using namespace mlpack; using namespace mlpack::amf; /** * Check the if the product of the calculated factorization is close to the * input matrix. Default case. */ BOOST_AUTO_TEST_CASE(NMFDefaultTest) { mat w = randu(20, 12); mat h = randu(12, 20); mat v = w * h; size_t r = 12; AMF<> nmf; nmf.Apply(v, r, w, h); mat wh = w * h; // Make sure reconstruction error is not too high. 5.0% tolerance. BOOST_REQUIRE_SMALL(arma::norm(v - wh, "fro") / arma::norm(v, "fro"), 0.05); } /** * Check the if the product of the calculated factorization is close to the * input matrix. Random Acol initialization distance minimization update. */ BOOST_AUTO_TEST_CASE(NMFAcolDistTest) { mat w = randu(20, 12); mat h = randu(12, 20); mat v = w * h; const size_t r = 12; SimpleResidueTermination srt(1e-7, 10000); AMF > nmf(srt); nmf.Apply(v, r, w, h); mat wh = w * h; BOOST_REQUIRE_SMALL(arma::norm(v - wh, "fro") / arma::norm(v, "fro"), 0.015); } /** * Check the if the product of the calculated factorization is close to the * input matrix. Random initialization divergence minimization update. */ BOOST_AUTO_TEST_CASE(NMFRandomDivTest) { mat w = randu(20, 12); mat h = randu(12, 20); mat v = w * h; size_t r = 12; // Custom tighter tolerance. SimpleResidueTermination srt(1e-8, 10000); AMF nmf(srt); nmf.Apply(v, r, w, h); mat wh = w * h; // Make sure reconstruction error is not too high. 1.5% tolerance. BOOST_REQUIRE_SMALL(arma::norm(v - wh, "fro") / arma::norm(v, "fro"), 0.015); } /** * Check that the product of the calculated factorization is close to the * input matrix. This uses the random initialization and alternating least * squares update rule. */ BOOST_AUTO_TEST_CASE(NMFALSTest) { mat w = randu(20, 12); mat h = randu(12, 20); mat v = w * h; size_t r = 12; SimpleResidueTermination srt(1e-12, 50000); AMF, NMFALSUpdate> nmf(srt); nmf.Apply(v, r, w, h); const mat wh = w * h; // Make sure reconstruction error is not too high. 8% tolerance. It seems // like ALS doesn't converge to results that are as good. It also seems to be // particularly sensitive to initial conditions. BOOST_REQUIRE_SMALL(arma::norm(v - wh, "fro") / arma::norm(v, "fro"), 0.08); } /** * Check the if the product of the calculated factorization is close to the * input matrix, with a sparse input matrix. Random Acol initialization, * distance minimization update. */ BOOST_AUTO_TEST_CASE(SparseNMFAcolDistTest) { // We have to ensure that the residues aren't NaNs. This can happen when a // matrix is created with all zeros in a column or row. double denseResidue = std::numeric_limits::quiet_NaN(); double sparseResidue = std::numeric_limits::quiet_NaN(); mat vp, dvp; // Resulting matrices. while (sparseResidue != sparseResidue && denseResidue != denseResidue) { mat w, h; sp_mat v; v.sprandu(20, 20, 0.3); // Ensure there is at least one nonzero element in every row and column. for (size_t i = 0; i < 20; ++i) v(i, i) += 1e-5; mat dv(v); // Make a dense copy. mat dw, dh; size_t r = 15; SimpleResidueTermination srt(1e-10, 10000); // Get an initialization. arma::mat iw, ih; RandomAcolInitialization<>::Initialize(v, r, iw, ih); GivenInitialization g(std::move(iw), std::move(ih)); // The GivenInitialization will force the same initialization for both // Apply() calls. AMF nmf(srt, g); nmf.Apply(v, r, w, h); nmf.Apply(dv, r, dw, dh); // Reconstruct matrices. vp = w * h; dvp = dw * dh; denseResidue = arma::norm(v - vp, "fro"); sparseResidue = arma::norm(dv - dvp, "fro"); } // Make sure the results are about equal for the W and H matrices. BOOST_REQUIRE_SMALL(arma::norm(vp - dvp, "fro") / arma::norm(vp, "fro"), 1e-5); } /** * Check that the product of the calculated factorization is close to the * input matrix, with a sparse input matrix. This uses the random * initialization and alternating least squares update rule. */ BOOST_AUTO_TEST_CASE(SparseNMFALSTest) { // We have to ensure that the residues aren't NaNs. This can happen when a // matrix is created with all zeros in a column or row. double denseResidue = std::numeric_limits::quiet_NaN(); double sparseResidue = std::numeric_limits::quiet_NaN(); mat vp, dvp; // Resulting matrices. while (sparseResidue != sparseResidue && denseResidue != denseResidue) { mlpack::math::RandomSeed(std::time(NULL)); mat w, h; sp_mat v; v.sprandu(10, 10, 0.3); // Ensure there is at least one nonzero element in every row and column. for (size_t i = 0; i < 10; ++i) v(i, i) += 1e-5; mat dv(v); // Make a dense copy. mat dw, dh; size_t r = 5; SimpleResidueTermination srt(1e-10, 10000); AMF nmf(srt); const size_t seed = mlpack::math::RandInt(1000000); mlpack::math::RandomSeed(seed); nmf.Apply(v, r, w, h); mlpack::math::RandomSeed(seed); nmf.Apply(dv, r, dw, dh); // Reconstruct matrices. vp = w * h; // In general vp won't be sparse. dvp = dw * dh; denseResidue = arma::norm(v - vp, "fro"); sparseResidue = arma::norm(dv - dvp, "fro"); } // Make sure the results are about equal for the W and H matrices. BOOST_REQUIRE_SMALL(arma::norm(vp - dvp, "fro") / arma::norm(vp, "fro"), 1e-5); } BOOST_AUTO_TEST_SUITE_END(); mlpack-2.2.5/src/mlpack/tests/nystroem_method_test.cpp000066400000000000000000000130471315013601400231510ustar00rootroot00000000000000/** * @file nystroem_method_test.cpp * @author Ryan Curtin * * Test the NystroemMethod class and ensure that the reconstructed kernel matrix * errors are comparable with those in the literature. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include #include #include "test_tools.hpp" #include #include #include #include using namespace mlpack; using namespace mlpack::kernel; BOOST_AUTO_TEST_SUITE(NystroemMethodTest); /** * Make sure that if the rank is the same and we do a full-rank approximation, * the result is virtually identical (a little bit of tolerance for floating * point error). */ BOOST_AUTO_TEST_CASE(FullRankTest) { // Run several trials. for (size_t trial = 0; trial < 5; ++trial) { arma::mat data; data.randu(5, trial * 200); GaussianKernel gk; NystroemMethod nm(data, gk, trial * 200); arma::mat g; nm.Apply(g); // Construct exact kernel matrix. arma::mat kernel(trial * 200, trial * 200); for (size_t i = 0; i < trial * 200; ++i) for (size_t j = 0; j < trial * 200; ++j) kernel(i, j) = gk.Evaluate(data.col(i), data.col(j)); // Reconstruct approximation. arma::mat approximation = g * g.t(); // Check closeness. for (size_t i = 0; i < trial * 200; ++i) { for (size_t j = 0; j < trial * 200; ++j) { if (kernel(i, j) < 1e-5) BOOST_REQUIRE_SMALL(approximation(i, j), 1e-4); else BOOST_REQUIRE_CLOSE(kernel(i, j), approximation(i, j), 1e-5); } } } } /** * Can we accurately represent a rank-10 matrix? */ BOOST_AUTO_TEST_CASE(Rank10Test) { arma::mat data; data.randu(500, 500); // Just so it's square. // Use SVD and only keep the first ten singular vectors. arma::mat U; arma::vec s; arma::mat V; arma::svd(U, s, V, data); // Don't set completely to 0; the hope is that K is still positive definite. s.subvec(0, 9) += 1.0; // Make sure the first 10 singular vectors are large. s.subvec(10, s.n_elem - 1).fill(1e-6); arma::mat dataMod = U * arma::diagmat(s) * V.t(); // Add some noise. dataMod += 1e-5 * arma::randu(dataMod.n_rows, dataMod.n_cols); // Calculate the true kernel matrix. LinearKernel lk; arma::mat kernel = dataMod.t() * dataMod; // Now use the linear kernel to get a Nystroem approximation; try this several // times. double normalizedFroAverage = 0.0; for (size_t trial = 0; trial < 20; ++trial) { LinearKernel lk; NystroemMethod nm(dataMod, lk, 10); arma::mat g; nm.Apply(g); arma::mat approximation = g * g.t(); // Check the normalized Frobenius norm. const double normalizedFro = arma::norm(kernel - approximation, "fro") / arma::norm(kernel, "fro"); normalizedFroAverage += normalizedFro; } normalizedFroAverage /= 20; BOOST_REQUIRE_SMALL(normalizedFroAverage, 1e-3); } /** * Can we reproduce the results in Zhang, Tsang, and Kwok (2008)? * They provide the following test points (approximately) in their experiments * in Section 4.1, for the german dataset: * * rank = 0.02n; approximation error: ~27 * rank = 0.04n; approximation error: ~15 * rank = 0.06n; approximation error: ~10 * rank = 0.08n; approximation error: ~7 * rank = 0.10n; approximation error: ~3 */ BOOST_AUTO_TEST_CASE(GermanTest) { // Load the dataset. arma::mat dataset; data::Load("german.csv", dataset, true); // These are our tolerance bounds. double results[5] = { 32.0, 20.0, 15.0, 12.0, 9.0 }; // The bandwidth of the kernel is selected to be the half the average // distance between each point and the mean of the dataset. This isn't // _exactly_ what the paper says, but I've modified what it said because our // formulation of what the Gaussian kernel is is different. GaussianKernel gk(16.461); // Calculate the true kernel matrix. arma::mat kernel(dataset.n_cols, dataset.n_cols); for (size_t i = 0; i < dataset.n_cols; ++i) for (size_t j = 0; j < dataset.n_cols; ++j) kernel(i, j) = gk.Evaluate(dataset.col(i), dataset.col(j)); for (size_t trial = 0; trial < 5; ++trial) { // We will repeat each trial 20 times. double avgError = 0.0; for (size_t z = 0; z < 20; ++z) { NystroemMethod > nm(dataset, gk, size_t((double((trial + 1) * 2) / 100.0) * dataset.n_cols)); arma::mat g; nm.Apply(g); // Reconstruct kernel matrix. arma::mat approximation = g * g.t(); const double error = arma::norm(kernel - approximation, "fro"); if (error != error) { // Sometimes K' is singular. Unlucky. --z; continue; } else { Log::Debug << "Trial " << trial << ": error " << error << ".\n"; avgError += arma::norm(kernel - approximation, "fro"); } } avgError /= 20; // Ensure that this is within tolerance, which is at least as good as the // paper's results (plus a little bit for noise). BOOST_REQUIRE_SMALL(avgError, results[trial]); } } BOOST_AUTO_TEST_SUITE_END(); mlpack-2.2.5/src/mlpack/tests/octree_test.cpp000066400000000000000000000230141315013601400212050ustar00rootroot00000000000000/** * @file octree_test.cpp * @author Ryan Curtin * * Test various properties of the Octree. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include #include #include #include "test_tools.hpp" #include "serialization.hpp" using namespace mlpack; using namespace mlpack::math; using namespace mlpack::tree; using namespace mlpack::metric; using namespace mlpack::bound; BOOST_AUTO_TEST_SUITE(OctreeTest); /** * Build a quad-tree (2-d octree) on 4 points, and guarantee four points are * created. */ BOOST_AUTO_TEST_CASE(SimpleQuadtreeTest) { // Four corners of the unit square. arma::mat dataset("0 0 1 1; 0 1 0 1"); Octree<> t(dataset, 1); BOOST_REQUIRE_EQUAL(t.NumChildren(), 4); BOOST_REQUIRE_EQUAL(t.Dataset().n_cols, 4); BOOST_REQUIRE_EQUAL(t.Dataset().n_rows, 2); BOOST_REQUIRE_EQUAL(t.NumDescendants(), 4); BOOST_REQUIRE_EQUAL(t.NumPoints(), 0); for (size_t i = 0; i < 4; ++i) { BOOST_REQUIRE_EQUAL(t.Child(i).NumDescendants(), 1); BOOST_REQUIRE_EQUAL(t.Child(i).NumPoints(), 1); } } /** * Build an octree on 3 points and make sure that only three children are * created. */ BOOST_AUTO_TEST_CASE(OctreeMissingChildTest) { // Only three corners of the unit square. arma::mat dataset("0 0 1; 0 1 1"); Octree<> t(dataset, 1); BOOST_REQUIRE_EQUAL(t.NumChildren(), 3); BOOST_REQUIRE_EQUAL(t.Dataset().n_cols, 3); BOOST_REQUIRE_EQUAL(t.Dataset().n_rows, 2); BOOST_REQUIRE_EQUAL(t.NumDescendants(), 3); BOOST_REQUIRE_EQUAL(t.NumPoints(), 0); for (size_t i = 0; i < 3; ++i) { BOOST_REQUIRE_EQUAL(t.Child(i).NumDescendants(), 1); BOOST_REQUIRE_EQUAL(t.Child(i).NumPoints(), 1); } } /** * Ensure that building an empty octree does not fail. */ BOOST_AUTO_TEST_CASE(EmptyOctreeTest) { arma::mat dataset; Octree<> t(dataset); BOOST_REQUIRE_EQUAL(t.NumChildren(), 0); BOOST_REQUIRE_EQUAL(t.Dataset().n_cols, 0); BOOST_REQUIRE_EQUAL(t.Dataset().n_rows, 0); BOOST_REQUIRE_EQUAL(t.NumDescendants(), 0); BOOST_REQUIRE_EQUAL(t.NumPoints(), 0); } /** * Ensure that maxLeafSize is respected. */ BOOST_AUTO_TEST_CASE(MaxLeafSizeTest) { arma::mat dataset(5, 15, arma::fill::randu); Octree<> t1(dataset, 20); Octree<> t2(std::move(dataset), 20); BOOST_REQUIRE_EQUAL(t1.NumChildren(), 0); BOOST_REQUIRE_EQUAL(t1.NumDescendants(), 15); BOOST_REQUIRE_EQUAL(t1.NumPoints(), 15); BOOST_REQUIRE_EQUAL(t2.NumChildren(), 0); BOOST_REQUIRE_EQUAL(t2.NumDescendants(), 15); BOOST_REQUIRE_EQUAL(t2.NumPoints(), 15); } /** * Check that the mappings given are correct. */ BOOST_AUTO_TEST_CASE(MappingsTest) { // Test with both constructors. arma::mat dataset(3, 5, arma::fill::randu); arma::mat datacopy(dataset); std::vector oldFromNewCopy, oldFromNewMove; Octree<> t1(dataset, oldFromNewCopy, 1); Octree<> t2(std::move(dataset), oldFromNewMove, 1); for (size_t i = 0; i < oldFromNewCopy.size(); ++i) { BOOST_REQUIRE_SMALL(arma::norm(datacopy.col(oldFromNewCopy[i]) - t1.Dataset().col(i)), 1e-3); BOOST_REQUIRE_SMALL(arma::norm(datacopy.col(oldFromNewMove[i]) - t2.Dataset().col(i)), 1e-3); } } /** * Check that the reverse mappings are correct too. */ BOOST_AUTO_TEST_CASE(ReverseMappingsTest) { // Test with both constructors. arma::mat dataset(3, 300, arma::fill::randu); arma::mat datacopy(dataset); std::vector oldFromNewCopy, oldFromNewMove, newFromOldCopy, newFromOldMove; Octree<> t1(dataset, oldFromNewCopy, newFromOldCopy); Octree<> t2(std::move(dataset), oldFromNewMove, newFromOldMove); for (size_t i = 0; i < oldFromNewCopy.size(); ++i) { BOOST_REQUIRE_SMALL(arma::norm(datacopy.col(oldFromNewCopy[i]) - t1.Dataset().col(i)), 1e-3); BOOST_REQUIRE_SMALL(arma::norm(datacopy.col(oldFromNewMove[i]) - t2.Dataset().col(i)), 1e-3); BOOST_REQUIRE_EQUAL(newFromOldCopy[oldFromNewCopy[i]], i); BOOST_REQUIRE_EQUAL(newFromOldMove[oldFromNewMove[i]], i); } } /** * Make sure no children at the same level are overlapping. */ template void CheckOverlap(TreeType& node) { // Check each combination of children. for (size_t i = 0; i < node.NumChildren(); ++i) for (size_t j = i + 1; j < node.NumChildren(); ++j) BOOST_REQUIRE_EQUAL(node.Child(i).Bound().Overlap(node.Child(j).Bound()), 0.0); // We need exact equality here. for (size_t i = 0; i < node.NumChildren(); ++i) CheckOverlap(node.Child(i)); } BOOST_AUTO_TEST_CASE(OverlapTest) { // Test with both constructors. arma::mat dataset(3, 300, arma::fill::randu); Octree<> t1(dataset); Octree<> t2(std::move(dataset)); CheckOverlap(t1); CheckOverlap(t2); } /** * Make sure no points are further than the furthest point distance, and that no * descendants are further than the furthest descendant distance. */ template void CheckFurthestDistances(TreeType& node) { arma::vec center; node.Center(center); // Compare points held in the node. for (size_t i = 0; i < node.NumPoints(); ++i) { // Handle floating-point inaccuracies. BOOST_REQUIRE_LE(metric::EuclideanDistance::Evaluate(node.Dataset().col(node.Point(i)), center), node.FurthestPointDistance() * (1 + 1e-5)); } // Compare descendants held in the node. for (size_t i = 0; i < node.NumDescendants(); ++i) { // Handle floating-point inaccuracies. BOOST_REQUIRE_LE(metric::EuclideanDistance::Evaluate(node.Dataset().col(node.Descendant(i)), center), node.FurthestDescendantDistance() * (1 + 1e-5)); } for (size_t i = 0; i < node.NumChildren(); ++i) CheckFurthestDistances(node.Child(i)); } BOOST_AUTO_TEST_CASE(FurthestDistanceTest) { // Test with both constructors. arma::mat dataset(3, 500, arma::fill::randu); Octree<> t1(dataset); Octree<> t2(std::move(dataset)); CheckFurthestDistances(t1); CheckFurthestDistances(t2); } /** * The maximum number of children a node can have is limited by the * dimensionality. So we test to make sure there are no cases where we have too * many children. */ template void CheckNumChildren(TreeType& node) { BOOST_REQUIRE_LE(node.NumChildren(), std::pow(2, node.Dataset().n_rows)); for (size_t i = 0; i < node.NumChildren(); ++i) CheckNumChildren(node.Child(i)); } BOOST_AUTO_TEST_CASE(MaxNumChildrenTest) { for (size_t d = 1; d < 10; ++d) { arma::mat dataset(d, 1000 * d, arma::fill::randu); Octree<> t(std::move(dataset)); CheckNumChildren(t); } } /** * Test the copy constructor. */ template void CheckSameNode(TreeType& node1, TreeType& node2) { BOOST_REQUIRE_EQUAL(node1.NumChildren(), node2.NumChildren()); BOOST_REQUIRE_NE(&node1.Dataset(), &node2.Dataset()); // Make sure the children actually got copied. for (size_t i = 0; i < node1.NumChildren(); ++i) BOOST_REQUIRE_NE(&node1.Child(i), &node2.Child(i)); // Check that all the points are the same. BOOST_REQUIRE_EQUAL(node1.NumPoints(), node2.NumPoints()); BOOST_REQUIRE_EQUAL(node1.NumDescendants(), node2.NumDescendants()); for (size_t i = 0; i < node1.NumPoints(); ++i) BOOST_REQUIRE_EQUAL(node1.Point(i), node2.Point(i)); for (size_t i = 0; i < node1.NumDescendants(); ++i) BOOST_REQUIRE_EQUAL(node1.Descendant(i), node2.Descendant(i)); // Check that the bound is the same. BOOST_REQUIRE_EQUAL(node1.Bound().Dim(), node2.Bound().Dim()); for (size_t d = 0; d < node1.Bound().Dim(); ++d) { BOOST_REQUIRE_CLOSE(node1.Bound()[d].Lo(), node2.Bound()[d].Lo(), 1e-5); BOOST_REQUIRE_CLOSE(node1.Bound()[d].Hi(), node2.Bound()[d].Hi(), 1e-5); } // Check that the furthest point and descendant distance are the same. BOOST_REQUIRE_CLOSE(node1.FurthestPointDistance(), node2.FurthestPointDistance(), 1e-5); BOOST_REQUIRE_CLOSE(node1.FurthestDescendantDistance(), node2.FurthestDescendantDistance(), 1e-5); } BOOST_AUTO_TEST_CASE(CopyConstructorTest) { // Use a small random dataset. arma::mat dataset(3, 100, arma::fill::randu); Octree<> t(dataset); Octree<> t2(t); CheckSameNode(t, t2); } /** * Test the move constructor. */ BOOST_AUTO_TEST_CASE(MoveConstructorTest) { // Use a small random dataset. arma::mat dataset(3, 100, arma::fill::randu); Octree<> t(std::move(dataset)); Octree<> tcopy(t); // Move the tree. Octree<> t2(std::move(t)); // Make sure the original tree has no data. BOOST_REQUIRE_EQUAL(t.Dataset().n_rows, 0); BOOST_REQUIRE_EQUAL(t.Dataset().n_cols, 0); BOOST_REQUIRE_EQUAL(t.NumChildren(), 0); BOOST_REQUIRE_EQUAL(t.NumPoints(), 0); BOOST_REQUIRE_EQUAL(t.NumDescendants(), 0); BOOST_REQUIRE_SMALL(t.FurthestPointDistance(), 1e-5); BOOST_REQUIRE_SMALL(t.FurthestDescendantDistance(), 1e-5); BOOST_REQUIRE_EQUAL(t.Bound().Dim(), 0); // Check that the new tree is the same as our copy. CheckSameNode(tcopy, t2); } /** * Test serialization. */ BOOST_AUTO_TEST_CASE(SerializationTest) { // Use a small random dataset. arma::mat dataset(3, 500, arma::fill::randu); Octree<> t(std::move(dataset)); Octree<>* xmlTree; Octree<>* binaryTree; Octree<>* textTree; SerializePointerObjectAll(&t, xmlTree, binaryTree, textTree); CheckSameNode(t, *xmlTree); CheckSameNode(t, *binaryTree); CheckSameNode(t, *textTree); delete xmlTree; delete binaryTree; delete textTree; } BOOST_AUTO_TEST_SUITE_END(); mlpack-2.2.5/src/mlpack/tests/pca_test.cpp000066400000000000000000000211051315013601400204660ustar00rootroot00000000000000/** * @file pca_test.cpp * @author Ajinkya Kale * @author Marcus Edel * * Test file for PCA class. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include #include #include #include #include #include #include "test_tools.hpp" BOOST_AUTO_TEST_SUITE(PCATest); using namespace arma; using namespace mlpack; using namespace mlpack::pca; using namespace mlpack::distribution; /* * Compare the output of the our PCA implementation with Armadillo's using the * specified decomposition policy. */ template void ArmaComparisonPCA() { arma::mat coeff, coeff1, score, score1; arma::vec eigVal, eigVal1; arma::mat data = arma::randu(3, 1000); PCAType exactPCA; exactPCA.Apply(data, score1, eigVal1, coeff1); princomp(coeff, score, eigVal, trans(data)); // Verify the PCA results based on the eigenvalues. for (size_t i = 0; i < eigVal.n_elem; i++) { if (eigVal[i] == 0.0) BOOST_REQUIRE_SMALL(eigVal1[i], 1e-15); else BOOST_REQUIRE_CLOSE(eigVal[i], eigVal1[i], 0.0001); } } /* * Test that dimensionality reduction with PCA works the same way MATLAB does * (which should be correct!) using the specified decomposition policy. */ template void PCADimensionalityReduction() { // Fake, simple dataset. The results we will compare against are from MATLAB. mat data("1 0 2 3 9;" "5 2 8 4 8;" "6 7 3 1 8"); // Now run PCA to reduce the dimensionality. PCAType p; const double varRetained = p.Apply(data, 2); // Reduce to 2 dimensions. // Compare with correct results. mat correct("-1.53781086 -3.51358020 -0.16139887 -1.87706634 7.08985628;" " 1.29937798 3.45762685 -2.69910005 -3.15620704 1.09830225"); BOOST_REQUIRE_EQUAL(data.n_rows, correct.n_rows); BOOST_REQUIRE_EQUAL(data.n_cols, correct.n_cols); // If the eigenvectors are pointed opposite directions, they will cancel // each other out in this summation. for (size_t i = 0; i < data.n_rows; i++) { if (accu(abs(correct.row(i) + data.row(i))) < 0.001 /* arbitrary */) { // Flip Armadillo coefficients for this column. data.row(i) *= -1; } } for (size_t row = 0; row < 2; row++) for (size_t col = 0; col < 5; col++) BOOST_REQUIRE_CLOSE(data(row, col), correct(row, col), 1e-3); // Check that the amount of variance retained is right. BOOST_REQUIRE_CLOSE(varRetained, 0.904876047045906, 1e-5); } /** * Test that setting the variance retained parameter to perform dimensionality * reduction works using the specified decomposition policy. */ template void PCAVarianceRetained() { // Fake, simple dataset. mat data("1 0 2 3 9;" "5 2 8 4 8;" "6 7 3 1 8"); // The normalized eigenvalues: // 0.616237391936100 // 0.288638655109805 // 0.095123952954094 // So if we keep one dimension, the actual variance retained is // 0.616237391936100 // and if we keep two, the actual variance retained is // 0.904876047045906 // and if we keep three, the actual variance retained is 1. PCAType p; arma::mat origData = data; double varRetained = p.Apply(data, 0.1); BOOST_REQUIRE_EQUAL(data.n_rows, 1); BOOST_REQUIRE_EQUAL(data.n_cols, 5); BOOST_REQUIRE_CLOSE(varRetained, 0.616237391936100, 1e-5); data = origData; varRetained = p.Apply(data, 0.5); BOOST_REQUIRE_EQUAL(data.n_rows, 1); BOOST_REQUIRE_EQUAL(data.n_cols, 5); BOOST_REQUIRE_CLOSE(varRetained, 0.616237391936100, 1e-5); data = origData; varRetained = p.Apply(data, 0.7); BOOST_REQUIRE_EQUAL(data.n_rows, 2); BOOST_REQUIRE_EQUAL(data.n_cols, 5); BOOST_REQUIRE_CLOSE(varRetained, 0.904876047045906, 1e-5); data = origData; varRetained = p.Apply(data, 0.904); BOOST_REQUIRE_EQUAL(data.n_rows, 2); BOOST_REQUIRE_EQUAL(data.n_cols, 5); BOOST_REQUIRE_CLOSE(varRetained, 0.904876047045906, 1e-5); data = origData; varRetained = p.Apply(data, 0.905); BOOST_REQUIRE_EQUAL(data.n_rows, 3); BOOST_REQUIRE_EQUAL(data.n_cols, 5); BOOST_REQUIRE_CLOSE(varRetained, 1.0, 1e-5); data = origData; varRetained = p.Apply(data, 1.0); BOOST_REQUIRE_EQUAL(data.n_rows, 3); BOOST_REQUIRE_EQUAL(data.n_cols, 5); BOOST_REQUIRE_CLOSE(varRetained, 1.0, 1e-5); } /** * Compare the output of our exact PCA implementation with Armadillo's. */ BOOST_AUTO_TEST_CASE(ArmaComparisonExactPCATest) { ArmaComparisonPCA(); } /** * Compare the output of our randomized-SVD PCA implementation with Armadillo's. */ BOOST_AUTO_TEST_CASE(ArmaComparisonRandomizedPCATest) { ArmaComparisonPCA(); } /** * Test that dimensionality reduction with exact-svd PCA works the same way * MATLAB does (which should be correct!). */ BOOST_AUTO_TEST_CASE(ExactPCADimensionalityReductionTest) { PCADimensionalityReduction(); } /** * Test that dimensionality reduction with randomized-svd PCA works the same way * MATLAB does (which should be correct!). */ BOOST_AUTO_TEST_CASE(RandomizedPCADimensionalityReductionTest) { PCADimensionalityReduction(); } /** * Test that dimensionality reduction with QUIC-SVD PCA works the same way * as the Exact-SVD PCA method. */ BOOST_AUTO_TEST_CASE(QUICPCADimensionalityReductionTest) { arma::mat data, data1; data::Load("test_data_3_1000.csv", data); data1 = data; // It isn't guaranteed that the QUIC-SVD will match with the exact SVD method, // starting with random samples. If this works 1 of 5 times, I'm fine with // that. All I want to know is that the QUIC-SVD method is able to solve the // task and is at least as good as the exact method (plus a little bit for // noise). size_t successes = 0; for (size_t trial = 0; trial < 5; ++trial) { PCAType exactPCA; const double varRetainedExact = exactPCA.Apply(data, 1); PCAType quicPCA; const double varRetainedQUIC = quicPCA.Apply(data1, 1); if (std::abs(varRetainedExact - varRetainedQUIC) < 0.2) { ++successes; break; } } BOOST_REQUIRE_GE(successes, 1); BOOST_REQUIRE_EQUAL(data.n_rows, data1.n_rows); BOOST_REQUIRE_EQUAL(data.n_cols, data1.n_cols); } /** * Test that setting the variance retained parameter to perform dimensionality * reduction works using the exact svd PCA method. */ BOOST_AUTO_TEST_CASE(ExactPCAVarianceRetainedTest) { PCAVarianceRetained(); } /** * Test that scaling PCA works. */ BOOST_AUTO_TEST_CASE(PCAScalingTest) { // Generate an artificial dataset in 3 dimensions. arma::mat data(3, 5000); arma::vec mean("1.0 3.0 -12.0"); arma::mat cov("1.0 0.9 0.0;" "0.9 1.0 0.0;" "0.0 0.0 12.0"); GaussianDistribution g(mean, cov); for (size_t i = 0; i < 5000; ++i) data.col(i) = g.Random(); // Now get the principal components when we are scaling. PCA p(true); arma::mat transData; arma::vec eigval; arma::mat eigvec; p.Apply(data, transData, eigval, eigvec); // The first two components of the eigenvector with largest eigenvalue should // be somewhere near sqrt(2) / 2. The third component should be close to // zero. There is noise, of course... BOOST_REQUIRE_CLOSE(std::abs(eigvec(0, 0)), sqrt(2) / 2, 0.2); BOOST_REQUIRE_CLOSE(std::abs(eigvec(1, 0)), sqrt(2) / 2, 0.2); BOOST_REQUIRE_SMALL(eigvec(2, 0), 0.08); // Large tolerance for noise. // The second component should be focused almost entirely in the third // dimension. BOOST_REQUIRE_SMALL(eigvec(0, 1), 0.08); BOOST_REQUIRE_SMALL(eigvec(1, 1), 0.08); BOOST_REQUIRE_CLOSE(std::abs(eigvec(2, 1)), 1.0, 0.2); // The third component should have the same absolute value characteristics as // the first. BOOST_REQUIRE_CLOSE(std::abs(eigvec(0, 0)), sqrt(2) / 2, 0.2); // 20% tolerance. BOOST_REQUIRE_CLOSE(std::abs(eigvec(1, 0)), sqrt(2) / 2, 0.2); BOOST_REQUIRE_SMALL(eigvec(2, 0), 0.08); // Large tolerance for noise. // The eigenvalues should sum to three. BOOST_REQUIRE_CLOSE(accu(eigval), 3.0, 0.1); // 10% tolerance. } BOOST_AUTO_TEST_SUITE_END(); mlpack-2.2.5/src/mlpack/tests/perceptron_test.cpp000066400000000000000000000163411315013601400221120ustar00rootroot00000000000000/** * @file perceptron_test.cpp * @author Udit Saxena * * Tests for perceptron. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include #include #include #include #include "test_tools.hpp" using namespace mlpack; using namespace arma; using namespace mlpack::perceptron; using namespace mlpack::distribution; BOOST_AUTO_TEST_SUITE(PerceptronTest); /** * This test tests whether the SimpleWeightUpdate updates weights and biases correctly, * without specifying the instance weight. */ BOOST_AUTO_TEST_CASE(SimpleWeightUpdateWeights) { SimpleWeightUpdate wip; /** * The weights of the incorrectly classified class should decrease while the * weight of the correct class should increase. */ vec trainingPoint("1 2 3 4 5"); mat weights("0 1 6;" "2 3 6;" "4 5 6;" "6 7 6;" "8 9 6"); vec biases("2 5 7"); size_t incorrectClass = 0; size_t correctClass = 2; wip.UpdateWeights(trainingPoint, weights, biases, incorrectClass, correctClass); BOOST_CHECK_EQUAL(weights(0, 0), -1); BOOST_CHECK_EQUAL(weights(1, 0), 0); BOOST_CHECK_EQUAL(weights(2, 0), 1); BOOST_CHECK_EQUAL(weights(3, 0), 2); BOOST_CHECK_EQUAL(weights(4, 0), 3); BOOST_CHECK_EQUAL(weights(0, 2), 7); BOOST_CHECK_EQUAL(weights(1, 2), 8); BOOST_CHECK_EQUAL(weights(2, 2), 9); BOOST_CHECK_EQUAL(weights(3, 2), 10); BOOST_CHECK_EQUAL(weights(4, 2), 11); BOOST_CHECK_EQUAL(biases(0), 1); BOOST_CHECK_EQUAL(biases(2), 8); } /** * This test tests whether the SimpleWeightUpdate updates weights and biases correctly, * and specifies the instance weight. */ BOOST_AUTO_TEST_CASE(SimpleWeightUpdateInstanceWeight) { SimpleWeightUpdate wip; /** * The weights of the incorrectly classified class should decrease * while the weights of the correct class should increase. The decrease and * increase depend on the specified instance weight. */ vec trainingPoint("1 2 3 4 5"); mat weights("0 1 6;" "2 3 6;" "4 5 6;" "6 7 6;" "8 9 6"); vec biases("2 5 7"); size_t incorrectClass = 0; size_t correctClass = 2; double instanceWeight = 3.0; wip.UpdateWeights(trainingPoint, weights, biases, incorrectClass, correctClass, instanceWeight); BOOST_CHECK_EQUAL(weights(0, 0), -3); BOOST_CHECK_EQUAL(weights(1, 0), -4); BOOST_CHECK_EQUAL(weights(2, 0), -5); BOOST_CHECK_EQUAL(weights(3, 0), -6); BOOST_CHECK_EQUAL(weights(4, 0), -7); BOOST_CHECK_EQUAL(weights(0, 2), 9); BOOST_CHECK_EQUAL(weights(1, 2), 12); BOOST_CHECK_EQUAL(weights(2, 2), 15); BOOST_CHECK_EQUAL(weights(3, 2), 18); BOOST_CHECK_EQUAL(weights(4, 2), 21); BOOST_CHECK_EQUAL(biases(0), -1); BOOST_CHECK_EQUAL(biases(2), 10); } /** * This test tests whether the perceptron converges for the AND gate classifier. */ BOOST_AUTO_TEST_CASE(And) { mat trainData; trainData << 0 << 1 << 1 << 0 << endr << 1 << 0 << 1 << 0 << endr; Mat labels; labels << 0 << 0 << 1 << 0; Perceptron<> p(trainData, labels.row(0), 2, 1000); mat testData; testData << 0 << 1 << 1 << 0 << endr << 1 << 0 << 1 << 0 << endr; Row predictedLabels(testData.n_cols); p.Classify(testData, predictedLabels); BOOST_CHECK_EQUAL(predictedLabels(0, 0), 0); BOOST_CHECK_EQUAL(predictedLabels(0, 1), 0); BOOST_CHECK_EQUAL(predictedLabels(0, 2), 1); BOOST_CHECK_EQUAL(predictedLabels(0, 3), 0); } /** * This test tests whether the perceptron converges for the OR gate classifier. */ BOOST_AUTO_TEST_CASE(Or) { mat trainData; trainData << 0 << 1 << 1 << 0 << endr << 1 << 0 << 1 << 0 << endr; Mat labels; labels << 1 << 1 << 1 << 0; Perceptron<> p(trainData, labels.row(0), 2, 1000); mat testData; testData << 0 << 1 << 1 << 0 << endr << 1 << 0 << 1 << 0 << endr; Row predictedLabels(testData.n_cols); p.Classify(testData, predictedLabels); BOOST_CHECK_EQUAL(predictedLabels(0, 0), 1); BOOST_CHECK_EQUAL(predictedLabels(0, 1), 1); BOOST_CHECK_EQUAL(predictedLabels(0, 2), 1); BOOST_CHECK_EQUAL(predictedLabels(0, 3), 0); } /** * This tests the convergence on a set of linearly separable data with 3 * classes. */ BOOST_AUTO_TEST_CASE(Random3) { mat trainData; trainData << 0 << 1 << 1 << 4 << 5 << 4 << 1 << 2 << 1 << endr << 1 << 0 << 1 << 1 << 1 << 2 << 4 << 5 << 4 << endr; Mat labels; labels << 0 << 0 << 0 << 1 << 1 << 1 << 2 << 2 << 2; Perceptron<> p(trainData, labels.row(0), 3, 1000); mat testData; testData << 0 << 1 << 1 << endr << 1 << 0 << 1 << endr; Row predictedLabels(testData.n_cols); p.Classify(testData, predictedLabels); for (size_t i = 0; i < predictedLabels.n_cols; i++) BOOST_CHECK_EQUAL(predictedLabels(0, i), 0); } /** * This tests the convergence of the perceptron on a dataset which has only TWO * points which belong to different classes. */ BOOST_AUTO_TEST_CASE(TwoPoints) { mat trainData; trainData << 0 << 1 << endr << 1 << 0 << endr; Mat labels; labels << 0 << 1; Perceptron<> p(trainData, labels.row(0), 2, 1000); mat testData; testData << 0 << 1 << endr << 1 << 0 << endr; Row predictedLabels(testData.n_cols); p.Classify(testData, predictedLabels); BOOST_CHECK_EQUAL(predictedLabels(0, 0), 0); BOOST_CHECK_EQUAL(predictedLabels(0, 1), 1); } /** * This tests the convergence of the perceptron on a dataset which has a * non-linearly separable dataset. */ BOOST_AUTO_TEST_CASE(NonLinearlySeparableDataset) { mat trainData; trainData << 1 << 2 << 3 << 4 << 5 << 6 << 7 << 8 << 1 << 2 << 3 << 4 << 5 << 6 << 7 << 8 << endr << 1 << 1 << 1 << 1 << 1 << 1 << 1 << 1 << 2 << 2 << 2 << 2 << 2 << 2 << 2 << 2 << endr; Mat labels; labels << 0 << 0 << 0 << 1 << 0 << 1 << 1 << 1 << 0 << 0 << 0 << 1 << 0 << 1 << 1 << 1; Perceptron<> p(trainData, labels.row(0), 2, 1000); mat testData; testData << 3 << 4 << 5 << 6 << endr << 3 << 2.3 << 1.7 << 1.5 << endr; Row predictedLabels(testData.n_cols); p.Classify(testData, predictedLabels); BOOST_CHECK_EQUAL(predictedLabels(0, 0), 0); BOOST_CHECK_EQUAL(predictedLabels(0, 1), 0); BOOST_CHECK_EQUAL(predictedLabels(0, 2), 1); BOOST_CHECK_EQUAL(predictedLabels(0, 3), 1); } BOOST_AUTO_TEST_CASE(SecondaryConstructor) { mat trainData; trainData << 1 << 2 << 3 << 4 << 5 << 6 << 7 << 8 << 1 << 2 << 3 << 4 << 5 << 6 << 7 << 8 << endr << 1 << 1 << 1 << 1 << 1 << 1 << 1 << 1 << 2 << 2 << 2 << 2 << 2 << 2 << 2 << 2 << endr; Mat labels; labels << 0 << 0 << 0 << 1 << 0 << 1 << 1 << 1 << 0 << 0 << 0 << 1 << 0 << 1 << 1 << 1; Perceptron<> p1(trainData, labels.row(0), 2, 1000); Perceptron<> p2(p1); } BOOST_AUTO_TEST_SUITE_END(); mlpack-2.2.5/src/mlpack/tests/performance_functions_test.cpp000066400000000000000000000032641315013601400243220ustar00rootroot00000000000000/** * @file performance_functions_test.cpp * @author Marcus Edel * * Tests for the various performance functions. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include #include #include #include #include #include "test_tools.hpp" using namespace mlpack; using namespace mlpack::ann; BOOST_AUTO_TEST_SUITE(PerformanceFunctionsTest); // Test the mean squared error performance function. BOOST_AUTO_TEST_CASE(MeanSquaredErrorTest) { arma::colvec input("1.0 0.0 1.0 0.0 -1.0 0.0 -1.0 0.0"); arma::colvec target = arma::zeros(8); BOOST_REQUIRE_EQUAL(MeanSquaredErrorFunction::Error(input, target), 0.5); } // Test the cross entropy performance function. BOOST_AUTO_TEST_CASE(CrossEntropyErrorTest) { arma::colvec input; input << std::exp(-2.0) << std::exp(-1.0); arma::colvec target = arma::ones(2); BOOST_REQUIRE_EQUAL(CrossEntropyErrorFunction<>::Error(input, target), 3); } // Test the sum squared error performance function. BOOST_AUTO_TEST_CASE(SumSquaredErrorTest) { arma::colvec input("1.0 0.0 1.0 0.0 -1.0 0.0 -1.0 0.0"); arma::colvec target = arma::zeros(8); BOOST_REQUIRE_EQUAL(SumSquaredErrorFunction::Error(input, target), 4); } BOOST_AUTO_TEST_SUITE_END(); mlpack-2.2.5/src/mlpack/tests/prefixedoutstream_test.cpp000066400000000000000000000164211315013601400235020ustar00rootroot00000000000000/** * @file prefixedoutstream_test.cpp * @author Matthew Amidon, Ryan Curtin * * Tests for the PrefixedOutStream class * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include #include #include #include #include "test_tools.hpp" #define BASH_RED "\033[0;31m" #define BASH_GREEN "\033[0;32m" #define BASH_YELLOW "\033[0;33m" #define BASH_CYAN "\033[0;36m" #define BASH_CLEAR "\033[0m" using namespace mlpack; using namespace mlpack::util; BOOST_AUTO_TEST_SUITE(PrefixedOutStreamTest); /** * Test the output of CLI using PrefixedOutStream. We will pass bogus * input to a stringstream so that none of it gets to the screen. */ BOOST_AUTO_TEST_CASE(TestPrefixedOutStreamBasic) { std::stringstream ss; PrefixedOutStream pss(ss, BASH_GREEN "[INFO ] " BASH_CLEAR); pss << "This shouldn't break anything" << std::endl; BOOST_REQUIRE_EQUAL(ss.str(), BASH_GREEN "[INFO ] " BASH_CLEAR "This shouldn't break anything\n"); ss.str(""); pss << "Test the new lines..."; pss << "shouldn't get 'Info' here." << std::endl; BOOST_REQUIRE_EQUAL(ss.str(), BASH_GREEN "[INFO ] " BASH_CLEAR "Test the new lines...shouldn't get 'Info' here.\n"); pss << "But now I should." << std::endl << std::endl; pss << ""; BOOST_REQUIRE_EQUAL(ss.str(), BASH_GREEN "[INFO ] " BASH_CLEAR "Test the new lines...shouldn't get 'Info' here.\n" BASH_GREEN "[INFO ] " BASH_CLEAR "But now I should.\n" BASH_GREEN "[INFO ] " BASH_CLEAR "\n" BASH_GREEN "[INFO ] " BASH_CLEAR ""); } /** * Test that we can correctly output Armadillo objects to PrefixedOutStream * objects. */ BOOST_AUTO_TEST_CASE(TestArmadilloPrefixedOutStream) { // We will test this with both a vector and a matrix. arma::vec test("1.0 1.5 2.0 2.5 3.0 3.5 4.0"); std::stringstream ss; PrefixedOutStream pss(ss, BASH_GREEN "[INFO ] " BASH_CLEAR); pss << test; // This should result in nothing being on the current line (since it clears // it). BOOST_REQUIRE_EQUAL(ss.str(), BASH_GREEN "[INFO ] " BASH_CLEAR " 1.0000\n" BASH_GREEN "[INFO ] " BASH_CLEAR " 1.5000\n" BASH_GREEN "[INFO ] " BASH_CLEAR " 2.0000\n" BASH_GREEN "[INFO ] " BASH_CLEAR " 2.5000\n" BASH_GREEN "[INFO ] " BASH_CLEAR " 3.0000\n" BASH_GREEN "[INFO ] " BASH_CLEAR " 3.5000\n" BASH_GREEN "[INFO ] " BASH_CLEAR " 4.0000\n"); ss.str(""); pss << trans(test); // This should result in there being stuff on the line. BOOST_REQUIRE_EQUAL(ss.str(), BASH_GREEN "[INFO ] " BASH_CLEAR " 1.0000 1.5000 2.0000 2.5000 3.0000 3.5000 4.0000\n"); arma::mat test2("1.0 1.5 2.0; 2.5 3.0 3.5; 4.0 4.5 4.99999"); ss.str(""); pss << test2; BOOST_REQUIRE_EQUAL(ss.str(), BASH_GREEN "[INFO ] " BASH_CLEAR " 1.0000 1.5000 2.0000\n" BASH_GREEN "[INFO ] " BASH_CLEAR " 2.5000 3.0000 3.5000\n" BASH_GREEN "[INFO ] " BASH_CLEAR " 4.0000 4.5000 5.0000\n"); // Try and throw a curveball by not clearing the line before outputting // something else. The PrefixedOutStream should not force Armadillo objects // onto their own lines. ss.str(""); pss << "hello" << test2; BOOST_REQUIRE_EQUAL(ss.str(), BASH_GREEN "[INFO ] " BASH_CLEAR "hello 1.0000 1.5000 2.0000\n" BASH_GREEN "[INFO ] " BASH_CLEAR " 2.5000 3.0000 3.5000\n" BASH_GREEN "[INFO ] " BASH_CLEAR " 4.0000 4.5000 5.0000\n"); } /** * Test that we can correctly output things in general. */ BOOST_AUTO_TEST_CASE(TestPrefixedOutStream) { std::stringstream ss; PrefixedOutStream pss(ss, BASH_GREEN "[INFO ] " BASH_CLEAR); pss << "hello world I am "; pss << 7; BOOST_REQUIRE_EQUAL(ss.str(), BASH_GREEN "[INFO ] " BASH_CLEAR "hello world I am 7"); pss << std::endl; BOOST_REQUIRE_EQUAL(ss.str(), BASH_GREEN "[INFO ] " BASH_CLEAR "hello world I am 7\n"); ss.str(""); pss << std::endl; BOOST_REQUIRE_EQUAL(ss.str(), BASH_GREEN "[INFO ] " BASH_CLEAR "\n"); } /** * Test format modifiers. */ BOOST_AUTO_TEST_CASE(TestPrefixedOutStreamModifiers) { std::stringstream ss; PrefixedOutStream pss(ss, BASH_GREEN "[INFO ] " BASH_CLEAR); pss << "I have a precise number which is "; pss << std::setw(6) << std::setfill('0') << (int)156; BOOST_REQUIRE_EQUAL(ss.str(), BASH_GREEN "[INFO ] " BASH_CLEAR "I have a precise number which is 000156"); } /** * Test formatted floating-point output. */ BOOST_AUTO_TEST_CASE(TestFormattedOutput) { std::stringstream ss; PrefixedOutStream pss(ss, BASH_GREEN "[INFO ]" BASH_CLEAR); const double pi = std::acos(-1.0); pss << std::setprecision(10) << pi; BOOST_REQUIRE_EQUAL(ss.str(), BASH_GREEN "[INFO ]" BASH_CLEAR "3.141592654"); } /** * Test custom precision output of arma objects. */ BOOST_AUTO_TEST_CASE(TestArmaCustomPrecision) { std::stringstream ss; PrefixedOutStream pss(ss, BASH_GREEN "[INFO ] " BASH_CLEAR); // The vector to be tested. arma::vec test("1.0 1.5 2.0 2.5 3.0 3.5 4.0"); // The matrix to be tested. arma::mat test2("1.0 1.5 2.0; 2.5 3.0 3.5; 4.0 4.5 4.99999"); // Try to print armadillo objects with custom precision. ss << std::fixed; ss << std::setprecision(6); ss.str(""); pss << test; BOOST_REQUIRE_EQUAL(ss.str(), BASH_GREEN "[INFO ] " BASH_CLEAR " 1.000000\n" BASH_GREEN "[INFO ] " BASH_CLEAR " 1.500000\n" BASH_GREEN "[INFO ] " BASH_CLEAR " 2.000000\n" BASH_GREEN "[INFO ] " BASH_CLEAR " 2.500000\n" BASH_GREEN "[INFO ] " BASH_CLEAR " 3.000000\n" BASH_GREEN "[INFO ] " BASH_CLEAR " 3.500000\n" BASH_GREEN "[INFO ] " BASH_CLEAR " 4.000000\n"); ss.str(""); pss << trans(test); BOOST_REQUIRE_EQUAL(ss.str(), BASH_GREEN "[INFO ] " BASH_CLEAR " 1.000000 1.500000 2.000000 2.500000" " 3.000000 3.500000 4.000000\n"); // Try printing a matrix, with higher precision. ss << std::setprecision(8); ss.str(""); pss << test2; BOOST_REQUIRE_EQUAL(ss.str(), BASH_GREEN "[INFO ] " BASH_CLEAR " 1.00000000 1.50000000 2.00000000\n" BASH_GREEN "[INFO ] " BASH_CLEAR " 2.50000000 3.00000000 3.50000000\n" BASH_GREEN "[INFO ] " BASH_CLEAR " 4.00000000 4.50000000 4.99999000\n"); // Try alignment with larger values. test2.at(2) = 40; ss.str(""); pss << trans(test2); BOOST_REQUIRE_EQUAL(ss.str(), BASH_GREEN "[INFO ] " BASH_CLEAR " 1.00000000 2.50000000 40.00000000\n" BASH_GREEN "[INFO ] " BASH_CLEAR " 1.50000000 3.00000000 4.50000000\n" BASH_GREEN "[INFO ] " BASH_CLEAR " 2.00000000 3.50000000 4.99999000\n"); // Test stream after reset. test2.at(2) = 4; ss << std::setprecision(6); ss.unsetf(std::ios::floatfield); ss.str(""); pss << test2; BOOST_REQUIRE_EQUAL(ss.str(), BASH_GREEN "[INFO ] " BASH_CLEAR " 1.0000 1.5000 2.0000\n" BASH_GREEN "[INFO ] " BASH_CLEAR " 2.5000 3.0000 3.5000\n" BASH_GREEN "[INFO ] " BASH_CLEAR " 4.0000 4.5000 5.0000\n"); } BOOST_AUTO_TEST_SUITE_END(); mlpack-2.2.5/src/mlpack/tests/qdafn_test.cpp000066400000000000000000000137221315013601400210220ustar00rootroot00000000000000/** * @file qdafn_test.cpp * @author Ryan Curtin * * Test the QDAFN functionality. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include #include "test_tools.hpp" #include "serialization.hpp" #include #include #include using namespace std; using namespace arma; using namespace mlpack; using namespace mlpack::neighbor; BOOST_AUTO_TEST_SUITE(QDAFNTest); /** * With one reference point, make sure that is the one that is returned. */ BOOST_AUTO_TEST_CASE(QDAFNTrivialTest) { arma::mat refSet(5, 1); refSet.randu(); // 5 tables, 1 point. QDAFN<> qdafn(refSet, 5, 1); arma::mat querySet(5, 5); querySet.randu(); arma::Mat neighbors; arma::mat distances; qdafn.Search(querySet, 1, neighbors, distances); // Check sizes. BOOST_REQUIRE_EQUAL(neighbors.n_rows, 1); BOOST_REQUIRE_EQUAL(neighbors.n_cols, 5); BOOST_REQUIRE_EQUAL(distances.n_rows, 1); BOOST_REQUIRE_EQUAL(distances.n_cols, 5); for (size_t i = 0; i < 5; ++i) { BOOST_REQUIRE_EQUAL(neighbors[i], 0); const double dist = metric::EuclideanDistance::Evaluate(querySet.col(i), refSet.col(0)); BOOST_REQUIRE_CLOSE(distances[i], dist, 1e-5); } } /** * Given a random uniform reference set, ensure that we get a neighbor and * distance within 10% of the actual true furthest neighbor distance at least * 70% of the time. */ BOOST_AUTO_TEST_CASE(QDAFNUniformSet) { arma::mat uniformSet = arma::randu(25, 1000); QDAFN<> qdafn(uniformSet, 10, 30); // Get the actual neighbors. AllkFN kfn(uniformSet); arma::Mat trueNeighbors; arma::mat trueDistances; kfn.Search(1000, trueNeighbors, trueDistances); arma::Mat qdafnNeighbors; arma::mat qdafnDistances; qdafn.Search(uniformSet, 1, qdafnNeighbors, qdafnDistances); BOOST_REQUIRE_EQUAL(qdafnNeighbors.n_rows, 1); BOOST_REQUIRE_EQUAL(qdafnNeighbors.n_cols, 1000); BOOST_REQUIRE_EQUAL(qdafnDistances.n_rows, 1); BOOST_REQUIRE_EQUAL(qdafnDistances.n_cols, 1000); size_t successes = 0; for (size_t i = 0; i < 1000; ++i) { // Find the true neighbor. size_t trueIndex = 1000; for (size_t j = 0; j < 1000; ++j) { if (trueNeighbors(j, i) == qdafnNeighbors(0, i)) { trueIndex = j; break; } } BOOST_REQUIRE_NE(trueIndex, 1000); if (0.9 * trueDistances(0, i) <= qdafnDistances(0, i)) ++successes; } BOOST_REQUIRE_GE(successes, 700); } /** * Test re-training method. */ BOOST_AUTO_TEST_CASE(RetrainTest) { arma::mat dataset = arma::randu(25, 500); arma::mat newDataset = arma::randu(15, 600); QDAFN<> qdafn(dataset, 20, 60); qdafn.Train(newDataset, 10, 50); BOOST_REQUIRE_EQUAL(qdafn.NumProjections(), 10); for (size_t i = 0; i < 10; ++i) { BOOST_REQUIRE_EQUAL(qdafn.CandidateSet(i).n_rows, 15); BOOST_REQUIRE_EQUAL(qdafn.CandidateSet(i).n_cols, 50); } } /** * Test serialization of QDAFN. */ BOOST_AUTO_TEST_CASE(SerializationTest) { // Use a random dataset. arma::mat dataset = arma::randu(15, 300); QDAFN<> qdafn(dataset, 10, 50); arma::mat fakeDataset1 = arma::randu(10, 200); arma::mat fakeDataset2 = arma::randu(50, 500); QDAFN<> qdafnXml(fakeDataset1, 5, 10); QDAFN<> qdafnText(6, 50); QDAFN<> qdafnBinary(7, 15); qdafnBinary.Train(fakeDataset2); // Serialize the objects. SerializeObjectAll(qdafn, qdafnXml, qdafnText, qdafnBinary); // Check that the tables are all the same. BOOST_REQUIRE_EQUAL(qdafnXml.NumProjections(), qdafn.NumProjections()); BOOST_REQUIRE_EQUAL(qdafnText.NumProjections(), qdafn.NumProjections()); BOOST_REQUIRE_EQUAL(qdafnBinary.NumProjections(), qdafn.NumProjections()); for (size_t i = 0; i < qdafn.NumProjections(); ++i) { BOOST_REQUIRE_EQUAL(qdafnXml.CandidateSet(i).n_rows, qdafn.CandidateSet(i).n_rows); BOOST_REQUIRE_EQUAL(qdafnText.CandidateSet(i).n_rows, qdafn.CandidateSet(i).n_rows); BOOST_REQUIRE_EQUAL(qdafnBinary.CandidateSet(i).n_rows, qdafn.CandidateSet(i).n_rows); BOOST_REQUIRE_EQUAL(qdafnXml.CandidateSet(i).n_cols, qdafn.CandidateSet(i).n_cols); BOOST_REQUIRE_EQUAL(qdafnText.CandidateSet(i).n_cols, qdafn.CandidateSet(i).n_cols); BOOST_REQUIRE_EQUAL(qdafnBinary.CandidateSet(i).n_cols, qdafn.CandidateSet(i).n_cols); for (size_t j = 0; j < qdafn.CandidateSet(i).n_elem; ++j) { if (std::abs(qdafn.CandidateSet(i)[j]) < 1e-5) { BOOST_REQUIRE_SMALL(qdafnXml.CandidateSet(i)[j], 1e-5); BOOST_REQUIRE_SMALL(qdafnText.CandidateSet(i)[j], 1e-5); BOOST_REQUIRE_SMALL(qdafnBinary.CandidateSet(i)[j], 1e-5); } else { const double value = qdafn.CandidateSet(i)[j]; BOOST_REQUIRE_CLOSE(qdafnXml.CandidateSet(i)[j], value, 1e-5); BOOST_REQUIRE_CLOSE(qdafnText.CandidateSet(i)[j], value, 1e-5); BOOST_REQUIRE_CLOSE(qdafnBinary.CandidateSet(i)[j], value, 1e-5); } } } } // Make sure QDAFN works with sparse data. BOOST_AUTO_TEST_CASE(SparseTest) { arma::sp_mat dataset; dataset.sprandu(200, 1000, 0.3); // Create a sparse version. QDAFN sparse(dataset, 15, 50); // Make sure the results are of the right shape. It's hard to test anything // more than that because we don't have easy-to-check performance guarantees. arma::Mat neighbors; arma::mat distances; sparse.Search(dataset, 3, neighbors, distances); BOOST_REQUIRE_EQUAL(neighbors.n_rows, 3); BOOST_REQUIRE_EQUAL(neighbors.n_cols, 1000); BOOST_REQUIRE_EQUAL(distances.n_rows, 3); BOOST_REQUIRE_EQUAL(distances.n_cols, 1000); } BOOST_AUTO_TEST_SUITE_END(); mlpack-2.2.5/src/mlpack/tests/quic_svd_test.cpp000066400000000000000000000043511315013601400215440ustar00rootroot00000000000000/** * @file quic_svd_test.cpp * @author Siddharth Agrawal * * Test file for QUIC-SVD class. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include #include #include #include "test_tools.hpp" BOOST_AUTO_TEST_SUITE(QUICSVDTest); using namespace mlpack; /** * The reconstruction error of the obtained SVD should be small. */ BOOST_AUTO_TEST_CASE(QUICSVDReconstructionError) { // Load the dataset. arma::mat dataset; data::Load("test_data_3_1000.csv", dataset); // Obtain the SVD using default parameters. arma::mat u, v, sigma; svd::QUIC_SVD quicsvd(dataset, u, v, sigma); // Reconstruct the matrix using the SVD. arma::mat reconstruct; reconstruct = u * sigma * v.t(); // The relative reconstruction error should be small. double relativeError = arma::norm(dataset - reconstruct, "frob") / arma::norm(dataset, "frob"); BOOST_REQUIRE_SMALL(relativeError, 1e-5); } /** * The singular value error of the obtained SVD should be small. */ BOOST_AUTO_TEST_CASE(QUICSVDSingularValueError) { arma::mat U = arma::randn(3, 20); arma::mat V = arma::randn(10, 3); arma::mat R; arma::qr_econ(U, R, U); arma::qr_econ(V, R, V); arma::mat s = arma::diagmat(arma::vec("1 0.1 0.01")); arma::mat data = arma::trans(U * arma::diagmat(s) * V.t()); arma::vec s1, s3; arma::mat U1, U2, V1, V2, s2; // Obtain the SVD using default parameters. arma::svd_econ(U1, s1, V1, data); svd::QUIC_SVD quicsvd(data, U1, V1, s2); s3 = arma::diagvec(s2); s1 = s1.subvec(0, s3.n_elem - 1); // The sigular value error should be small. double error = arma::norm(s1 - s3); BOOST_REQUIRE_SMALL(error, 0.05); } BOOST_AUTO_TEST_CASE(QUICSVDSameDimensionTest) { arma::mat dataset = arma::randn(10, 10); // Obtain the SVD using default parameters. arma::mat u, v, sigma; svd::QUIC_SVD quicsvd(dataset, u, v, sigma); } BOOST_AUTO_TEST_SUITE_END(); mlpack-2.2.5/src/mlpack/tests/radical_test.cpp000066400000000000000000000024761315013601400213340ustar00rootroot00000000000000/** * @file radical_main.cpp * @author Nishant Mehta * * Test for RADICAL. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include #include #include #include "test_tools.hpp" BOOST_AUTO_TEST_SUITE(RadicalTest); using namespace mlpack; using namespace mlpack::radical; using namespace std; using namespace arma; BOOST_AUTO_TEST_CASE(Radical_Test_Radical3D) { mat matX; data::Load("data_3d_mixed.txt", matX); Radical rad(0.175, 5, 100, matX.n_rows - 1); mat matY; mat matW; rad.DoRadical(matX, matY, matW); mat matYT = trans(matY); double valEst = 0; for (uword i = 0; i < matYT.n_cols; i++) { vec y = vec(matYT.col(i)); valEst += rad.Vasicek(y); } mat matS; data::Load("data_3d_ind.txt", matS); rad.DoRadical(matS, matY, matW); matYT = trans(matY); double valBest = 0; for (uword i = 0; i < matYT.n_cols; i++) { vec y = vec(matYT.col(i)); valBest += rad.Vasicek(y); } BOOST_REQUIRE_CLOSE(valBest, valEst, 0.25); } BOOST_AUTO_TEST_SUITE_END(); mlpack-2.2.5/src/mlpack/tests/randomized_svd_test.cpp000066400000000000000000000035171315013601400227420ustar00rootroot00000000000000/** * @file randomized_svd_test.cpp * @author Marcus Edel * * Test file for the Randomized SVD class. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include #include #include #include "test_tools.hpp" BOOST_AUTO_TEST_SUITE(RandomizedSVDTest); using namespace mlpack; /** * The reconstruction and sigular value error of the obtained SVD should be * small. */ BOOST_AUTO_TEST_CASE(RandomizedSVDReconstructionError) { arma::mat U = arma::randn(3, 20); arma::mat V = arma::randn(10, 3); arma::mat R; arma::qr_econ(U, R, U); arma::qr_econ(V, R, V); arma::mat s = arma::diagmat(arma::vec("1 0.1 0.01")); arma::mat data = arma::trans(U * arma::diagmat(s) * V.t()); // Center the data into a temporary matrix. arma::mat centeredData; math::Center(data, centeredData); arma::mat U1, U2, V1, V2; arma::vec s1, s2, s3; arma::svd_econ(U1, s1, V1, centeredData); svd::RandomizedSVD rSVD(0, 10); rSVD.Apply(data, U2, s2, V2, 3); // Use the same amount of data for the compariosn (matrix rank). s3 = s1.subvec(0, s2.n_elem - 1); // The sigular value error should be small. double error = arma::norm(s2 - s3, "frob") / arma::norm(s2, "frob"); BOOST_REQUIRE_SMALL(error, 1e-5); arma::mat reconstruct = U2 * arma::diagmat(s2) * V2.t(); // The relative reconstruction error should be small. error = arma::norm(centeredData - reconstruct, "frob") / arma::norm(centeredData, "frob"); BOOST_REQUIRE_SMALL(error, 1e-5); } BOOST_AUTO_TEST_SUITE_END(); mlpack-2.2.5/src/mlpack/tests/range_search_test.cpp000066400000000000000000001604051315013601400223530ustar00rootroot00000000000000/** * @file range_search_test.cpp * @author Ryan Curtin * * Test file for RangeSearch<> class. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include #include #include #include #include #include "test_tools.hpp" using namespace mlpack; using namespace mlpack::range; using namespace mlpack::math; using namespace mlpack::tree; using namespace mlpack::bound; using namespace mlpack::metric; using namespace std; BOOST_AUTO_TEST_SUITE(RangeSearchTest); // Get our results into a sorted format, so we can actually then test for // correctness. void SortResults(const vector>& neighbors, const vector>& distances, vector>>& output) { output.resize(neighbors.size()); for (size_t i = 0; i < neighbors.size(); i++) { output[i].resize(neighbors[i].size()); for (size_t j = 0; j < neighbors[i].size(); j++) output[i][j] = make_pair(distances[i][j], neighbors[i][j]); // Now that it's constructed, sort it. sort(output[i].begin(), output[i].end()); } } // Clean a tree's statistics. template void CleanTree(TreeType& node) { node.Stat().LastDistance() = 0.0; for (size_t i = 0; i < node.NumChildren(); ++i) CleanTree(node.Child(i)); } /** * Simple range-search test with small, synthetic dataset. This is an * exhaustive test, which checks that each method for performing the calculation * (dual-tree, single-tree, naive) produces the correct results. An * eleven-point dataset and the points within three ranges are taken. The * dataset is in one dimension for simplicity -- the correct functionality of * distance functions is not tested here. */ BOOST_AUTO_TEST_CASE(ExhaustiveSyntheticTest) { // Set up our data. arma::mat data(1, 11); data[0] = 0.05; // Row addressing is unnecessary (they are all 0). data[1] = 0.35; data[2] = 0.15; data[3] = 1.25; data[4] = 5.05; data[5] = -0.22; data[6] = -2.00; data[7] = -1.30; data[8] = 0.45; data[9] = 0.90; data[10] = 1.00; typedef KDTree TreeType; // We will loop through three times, one for each method of performing the // calculation. std::vector oldFromNew; std::vector newFromOld; TreeType* tree = new TreeType(data, oldFromNew, newFromOld, 1); for (int i = 0; i < 3; i++) { RangeSearch<>* rs; switch (i) { case 0: // Use the naive method. rs = new RangeSearch<>(tree->Dataset(), true); break; case 1: // Use the single-tree method. rs = new RangeSearch<>(tree, true); break; case 2: // Use the dual-tree method. rs = new RangeSearch<>(tree); break; } // Now perform the first calculation. Points within 0.50. vector> neighbors; vector> distances; rs->Search(Range(0.0, sqrt(0.5)), neighbors, distances); // Now the exhaustive check for correctness. This will be long. vector>> sortedOutput; SortResults(neighbors, distances, sortedOutput); BOOST_REQUIRE(sortedOutput[newFromOld[0]].size() == 4); BOOST_REQUIRE(sortedOutput[newFromOld[0]][0].second == newFromOld[2]); BOOST_REQUIRE_CLOSE(sortedOutput[newFromOld[0]][0].first, 0.10, 1e-5); BOOST_REQUIRE(sortedOutput[newFromOld[0]][1].second == newFromOld[5]); BOOST_REQUIRE_CLOSE(sortedOutput[newFromOld[0]][1].first, 0.27, 1e-5); BOOST_REQUIRE(sortedOutput[newFromOld[0]][2].second == newFromOld[1]); BOOST_REQUIRE_CLOSE(sortedOutput[newFromOld[0]][2].first, 0.30, 1e-5); BOOST_REQUIRE(sortedOutput[newFromOld[0]][3].second == newFromOld[8]); BOOST_REQUIRE_CLOSE(sortedOutput[newFromOld[0]][3].first, 0.40, 1e-5); // Neighbors of point 1. BOOST_REQUIRE(sortedOutput[newFromOld[1]].size() == 6); BOOST_REQUIRE(sortedOutput[newFromOld[1]][0].second == newFromOld[8]); BOOST_REQUIRE_CLOSE(sortedOutput[newFromOld[1]][0].first, 0.10, 1e-5); BOOST_REQUIRE(sortedOutput[newFromOld[1]][1].second == newFromOld[2]); BOOST_REQUIRE_CLOSE(sortedOutput[newFromOld[1]][1].first, 0.20, 1e-5); BOOST_REQUIRE(sortedOutput[newFromOld[1]][2].second == newFromOld[0]); BOOST_REQUIRE_CLOSE(sortedOutput[newFromOld[1]][2].first, 0.30, 1e-5); BOOST_REQUIRE(sortedOutput[newFromOld[1]][3].second == newFromOld[9]); BOOST_REQUIRE_CLOSE(sortedOutput[newFromOld[1]][3].first, 0.55, 1e-5); BOOST_REQUIRE(sortedOutput[newFromOld[1]][4].second == newFromOld[5]); BOOST_REQUIRE_CLOSE(sortedOutput[newFromOld[1]][4].first, 0.57, 1e-5); BOOST_REQUIRE(sortedOutput[newFromOld[1]][5].second == newFromOld[10]); BOOST_REQUIRE_CLOSE(sortedOutput[newFromOld[1]][5].first, 0.65, 1e-5); // Neighbors of point 2. BOOST_REQUIRE(sortedOutput[newFromOld[2]].size() == 4); BOOST_REQUIRE(sortedOutput[newFromOld[2]][0].second == newFromOld[0]); BOOST_REQUIRE_CLOSE(sortedOutput[newFromOld[2]][0].first, 0.10, 1e-5); BOOST_REQUIRE(sortedOutput[newFromOld[2]][1].second == newFromOld[1]); BOOST_REQUIRE_CLOSE(sortedOutput[newFromOld[2]][1].first, 0.20, 1e-5); BOOST_REQUIRE(sortedOutput[newFromOld[2]][2].second == newFromOld[8]); BOOST_REQUIRE_CLOSE(sortedOutput[newFromOld[2]][2].first, 0.30, 1e-5); BOOST_REQUIRE(sortedOutput[newFromOld[2]][3].second == newFromOld[5]); BOOST_REQUIRE_CLOSE(sortedOutput[newFromOld[2]][3].first, 0.37, 1e-5); // Neighbors of point 3. BOOST_REQUIRE(sortedOutput[newFromOld[3]].size() == 2); BOOST_REQUIRE(sortedOutput[newFromOld[3]][0].second == newFromOld[10]); BOOST_REQUIRE_CLOSE(sortedOutput[newFromOld[3]][0].first, 0.25, 1e-5); BOOST_REQUIRE(sortedOutput[newFromOld[3]][1].second == newFromOld[9]); BOOST_REQUIRE_CLOSE(sortedOutput[newFromOld[3]][1].first, 0.35, 1e-5); // Neighbors of point 4. BOOST_REQUIRE(sortedOutput[newFromOld[4]].size() == 0); // Neighbors of point 5. BOOST_REQUIRE(sortedOutput[newFromOld[5]].size() == 4); BOOST_REQUIRE(sortedOutput[newFromOld[5]][0].second == newFromOld[0]); BOOST_REQUIRE_CLOSE(sortedOutput[newFromOld[5]][0].first, 0.27, 1e-5); BOOST_REQUIRE(sortedOutput[newFromOld[5]][1].second == newFromOld[2]); BOOST_REQUIRE_CLOSE(sortedOutput[newFromOld[5]][1].first, 0.37, 1e-5); BOOST_REQUIRE(sortedOutput[newFromOld[5]][2].second == newFromOld[1]); BOOST_REQUIRE_CLOSE(sortedOutput[newFromOld[5]][2].first, 0.57, 1e-5); BOOST_REQUIRE(sortedOutput[newFromOld[5]][3].second == newFromOld[8]); BOOST_REQUIRE_CLOSE(sortedOutput[newFromOld[5]][3].first, 0.67, 1e-5); // Neighbors of point 6. BOOST_REQUIRE(sortedOutput[newFromOld[6]].size() == 1); BOOST_REQUIRE(sortedOutput[newFromOld[6]][0].second == newFromOld[7]); BOOST_REQUIRE_CLOSE(sortedOutput[newFromOld[6]][0].first, 0.70, 1e-5); // Neighbors of point 7. BOOST_REQUIRE(sortedOutput[newFromOld[7]].size() == 1); BOOST_REQUIRE(sortedOutput[newFromOld[7]][0].second == newFromOld[6]); BOOST_REQUIRE_CLOSE(sortedOutput[newFromOld[7]][0].first, 0.70, 1e-5); // Neighbors of point 8. BOOST_REQUIRE(sortedOutput[newFromOld[8]].size() == 6); BOOST_REQUIRE(sortedOutput[newFromOld[8]][0].second == newFromOld[1]); BOOST_REQUIRE_CLOSE(sortedOutput[newFromOld[8]][0].first, 0.10, 1e-5); BOOST_REQUIRE(sortedOutput[newFromOld[8]][1].second == newFromOld[2]); BOOST_REQUIRE_CLOSE(sortedOutput[newFromOld[8]][1].first, 0.30, 1e-5); BOOST_REQUIRE(sortedOutput[newFromOld[8]][2].second == newFromOld[0]); BOOST_REQUIRE_CLOSE(sortedOutput[newFromOld[8]][2].first, 0.40, 1e-5); BOOST_REQUIRE(sortedOutput[newFromOld[8]][3].second == newFromOld[9]); BOOST_REQUIRE_CLOSE(sortedOutput[newFromOld[8]][3].first, 0.45, 1e-5); BOOST_REQUIRE(sortedOutput[newFromOld[8]][4].second == newFromOld[10]); BOOST_REQUIRE_CLOSE(sortedOutput[newFromOld[8]][4].first, 0.55, 1e-5); BOOST_REQUIRE(sortedOutput[newFromOld[8]][5].second == newFromOld[5]); BOOST_REQUIRE_CLOSE(sortedOutput[newFromOld[8]][5].first, 0.67, 1e-5); // Neighbors of point 9. BOOST_REQUIRE(sortedOutput[newFromOld[9]].size() == 4); BOOST_REQUIRE(sortedOutput[newFromOld[9]][0].second == newFromOld[10]); BOOST_REQUIRE_CLOSE(sortedOutput[newFromOld[9]][0].first, 0.10, 1e-5); BOOST_REQUIRE(sortedOutput[newFromOld[9]][1].second == newFromOld[3]); BOOST_REQUIRE_CLOSE(sortedOutput[newFromOld[9]][1].first, 0.35, 1e-5); BOOST_REQUIRE(sortedOutput[newFromOld[9]][2].second == newFromOld[8]); BOOST_REQUIRE_CLOSE(sortedOutput[newFromOld[9]][2].first, 0.45, 1e-5); BOOST_REQUIRE(sortedOutput[newFromOld[9]][3].second == newFromOld[1]); BOOST_REQUIRE_CLOSE(sortedOutput[newFromOld[9]][3].first, 0.55, 1e-5); // Neighbors of point 10. BOOST_REQUIRE(sortedOutput[newFromOld[10]].size() == 4); BOOST_REQUIRE(sortedOutput[newFromOld[10]][0].second == newFromOld[9]); BOOST_REQUIRE_CLOSE(sortedOutput[newFromOld[10]][0].first, 0.10, 1e-5); BOOST_REQUIRE(sortedOutput[newFromOld[10]][1].second == newFromOld[3]); BOOST_REQUIRE_CLOSE(sortedOutput[newFromOld[10]][1].first, 0.25, 1e-5); BOOST_REQUIRE(sortedOutput[newFromOld[10]][2].second == newFromOld[8]); BOOST_REQUIRE_CLOSE(sortedOutput[newFromOld[10]][2].first, 0.55, 1e-5); BOOST_REQUIRE(sortedOutput[newFromOld[10]][3].second == newFromOld[1]); BOOST_REQUIRE_CLOSE(sortedOutput[newFromOld[10]][3].first, 0.65, 1e-5); // Now do it again with a different range: [sqrt(0.5) 1.0]. if (rs->ReferenceTree()) CleanTree(*rs->ReferenceTree()); rs->Search(Range(sqrt(0.5), 1.0), neighbors, distances); SortResults(neighbors, distances, sortedOutput); // Neighbors of point 0. BOOST_REQUIRE(sortedOutput[newFromOld[0]].size() == 2); BOOST_REQUIRE(sortedOutput[newFromOld[0]][0].second == newFromOld[9]); BOOST_REQUIRE_CLOSE(sortedOutput[newFromOld[0]][0].first, 0.85, 1e-5); BOOST_REQUIRE(sortedOutput[newFromOld[0]][1].second == newFromOld[10]); BOOST_REQUIRE_CLOSE(sortedOutput[newFromOld[0]][1].first, 0.95, 1e-5); // Neighbors of point 1. BOOST_REQUIRE(sortedOutput[newFromOld[1]].size() == 1); BOOST_REQUIRE(sortedOutput[newFromOld[1]][0].second == newFromOld[3]); BOOST_REQUIRE_CLOSE(sortedOutput[newFromOld[1]][0].first, 0.90, 1e-5); // Neighbors of point 2. BOOST_REQUIRE(sortedOutput[newFromOld[2]].size() == 2); BOOST_REQUIRE(sortedOutput[newFromOld[2]][0].second == newFromOld[9]); BOOST_REQUIRE_CLOSE(sortedOutput[newFromOld[2]][0].first, 0.75, 1e-5); BOOST_REQUIRE(sortedOutput[newFromOld[2]][1].second == newFromOld[10]); BOOST_REQUIRE_CLOSE(sortedOutput[newFromOld[2]][1].first, 0.85, 1e-5); // Neighbors of point 3. BOOST_REQUIRE(sortedOutput[newFromOld[3]].size() == 2); BOOST_REQUIRE(sortedOutput[newFromOld[3]][0].second == newFromOld[8]); BOOST_REQUIRE_CLOSE(sortedOutput[newFromOld[3]][0].first, 0.80, 1e-5); BOOST_REQUIRE(sortedOutput[newFromOld[3]][1].second == newFromOld[1]); BOOST_REQUIRE_CLOSE(sortedOutput[newFromOld[3]][1].first, 0.90, 1e-5); // Neighbors of point 4. BOOST_REQUIRE(sortedOutput[newFromOld[4]].size() == 0); // Neighbors of point 5. BOOST_REQUIRE(sortedOutput[newFromOld[5]].size() == 0); // Neighbors of point 6. BOOST_REQUIRE(sortedOutput[newFromOld[6]].size() == 0); // Neighbors of point 7. BOOST_REQUIRE(sortedOutput[newFromOld[7]].size() == 0); // Neighbors of point 8. BOOST_REQUIRE(sortedOutput[newFromOld[8]].size() == 1); BOOST_REQUIRE(sortedOutput[newFromOld[8]][0].second == newFromOld[3]); BOOST_REQUIRE_CLOSE(sortedOutput[newFromOld[8]][0].first, 0.80, 1e-5); // Neighbors of point 9. BOOST_REQUIRE(sortedOutput[newFromOld[9]].size() == 2); BOOST_REQUIRE(sortedOutput[newFromOld[9]][0].second == newFromOld[2]); BOOST_REQUIRE_CLOSE(sortedOutput[newFromOld[9]][0].first, 0.75, 1e-5); BOOST_REQUIRE(sortedOutput[newFromOld[9]][1].second == newFromOld[0]); BOOST_REQUIRE_CLOSE(sortedOutput[newFromOld[9]][1].first, 0.85, 1e-5); // Neighbors of point 10. BOOST_REQUIRE(sortedOutput[newFromOld[10]].size() == 2); BOOST_REQUIRE(sortedOutput[newFromOld[10]][0].second == newFromOld[2]); BOOST_REQUIRE_CLOSE(sortedOutput[newFromOld[10]][0].first, 0.85, 1e-5); BOOST_REQUIRE(sortedOutput[newFromOld[10]][1].second == newFromOld[0]); BOOST_REQUIRE_CLOSE(sortedOutput[newFromOld[10]][1].first, 0.95, 1e-5); // Now do it again with a different range: [1.0 inf]. if (rs->ReferenceTree()) CleanTree(*rs->ReferenceTree()); rs->Search(Range(1.0, numeric_limits::infinity()), neighbors, distances); SortResults(neighbors, distances, sortedOutput); // Neighbors of point 0. BOOST_REQUIRE(sortedOutput[newFromOld[0]].size() == 4); BOOST_REQUIRE(sortedOutput[newFromOld[0]][0].second == newFromOld[3]); BOOST_REQUIRE_CLOSE(sortedOutput[newFromOld[0]][0].first, 1.20, 1e-5); BOOST_REQUIRE(sortedOutput[newFromOld[0]][1].second == newFromOld[7]); BOOST_REQUIRE_CLOSE(sortedOutput[newFromOld[0]][1].first, 1.35, 1e-5); BOOST_REQUIRE(sortedOutput[newFromOld[0]][2].second == newFromOld[6]); BOOST_REQUIRE_CLOSE(sortedOutput[newFromOld[0]][2].first, 2.05, 1e-5); BOOST_REQUIRE(sortedOutput[newFromOld[0]][3].second == newFromOld[4]); BOOST_REQUIRE_CLOSE(sortedOutput[newFromOld[0]][3].first, 5.00, 1e-5); // Neighbors of point 1. BOOST_REQUIRE(sortedOutput[newFromOld[1]].size() == 3); BOOST_REQUIRE(sortedOutput[newFromOld[1]][0].second == newFromOld[7]); BOOST_REQUIRE_CLOSE(sortedOutput[newFromOld[1]][0].first, 1.65, 1e-5); BOOST_REQUIRE(sortedOutput[newFromOld[1]][1].second == newFromOld[6]); BOOST_REQUIRE_CLOSE(sortedOutput[newFromOld[1]][1].first, 2.35, 1e-5); BOOST_REQUIRE(sortedOutput[newFromOld[1]][2].second == newFromOld[4]); BOOST_REQUIRE_CLOSE(sortedOutput[newFromOld[1]][2].first, 4.70, 1e-5); // Neighbors of point 2. BOOST_REQUIRE(sortedOutput[newFromOld[2]].size() == 4); BOOST_REQUIRE(sortedOutput[newFromOld[2]][0].second == newFromOld[3]); BOOST_REQUIRE_CLOSE(sortedOutput[newFromOld[2]][0].first, 1.10, 1e-5); BOOST_REQUIRE(sortedOutput[newFromOld[2]][1].second == newFromOld[7]); BOOST_REQUIRE_CLOSE(sortedOutput[newFromOld[2]][1].first, 1.45, 1e-5); BOOST_REQUIRE(sortedOutput[newFromOld[2]][2].second == newFromOld[6]); BOOST_REQUIRE_CLOSE(sortedOutput[newFromOld[2]][2].first, 2.15, 1e-5); BOOST_REQUIRE(sortedOutput[newFromOld[2]][3].second == newFromOld[4]); BOOST_REQUIRE_CLOSE(sortedOutput[newFromOld[2]][3].first, 4.90, 1e-5); // Neighbors of point 3. BOOST_REQUIRE(sortedOutput[newFromOld[3]].size() == 6); BOOST_REQUIRE(sortedOutput[newFromOld[3]][0].second == newFromOld[2]); BOOST_REQUIRE_CLOSE(sortedOutput[newFromOld[3]][0].first, 1.10, 1e-5); BOOST_REQUIRE(sortedOutput[newFromOld[3]][1].second == newFromOld[0]); BOOST_REQUIRE_CLOSE(sortedOutput[newFromOld[3]][1].first, 1.20, 1e-5); BOOST_REQUIRE(sortedOutput[newFromOld[3]][2].second == newFromOld[5]); BOOST_REQUIRE_CLOSE(sortedOutput[newFromOld[3]][2].first, 1.47, 1e-5); BOOST_REQUIRE(sortedOutput[newFromOld[3]][3].second == newFromOld[7]); BOOST_REQUIRE_CLOSE(sortedOutput[newFromOld[3]][3].first, 2.55, 1e-5); BOOST_REQUIRE(sortedOutput[newFromOld[3]][4].second == newFromOld[6]); BOOST_REQUIRE_CLOSE(sortedOutput[newFromOld[3]][4].first, 3.25, 1e-5); BOOST_REQUIRE(sortedOutput[newFromOld[3]][5].second == newFromOld[4]); BOOST_REQUIRE_CLOSE(sortedOutput[newFromOld[3]][5].first, 3.80, 1e-5); // Neighbors of point 4. BOOST_REQUIRE(sortedOutput[newFromOld[4]].size() == 10); BOOST_REQUIRE(sortedOutput[newFromOld[4]][0].second == newFromOld[3]); BOOST_REQUIRE_CLOSE(sortedOutput[newFromOld[4]][0].first, 3.80, 1e-5); BOOST_REQUIRE(sortedOutput[newFromOld[4]][1].second == newFromOld[10]); BOOST_REQUIRE_CLOSE(sortedOutput[newFromOld[4]][1].first, 4.05, 1e-5); BOOST_REQUIRE(sortedOutput[newFromOld[4]][2].second == newFromOld[9]); BOOST_REQUIRE_CLOSE(sortedOutput[newFromOld[4]][2].first, 4.15, 1e-5); BOOST_REQUIRE(sortedOutput[newFromOld[4]][3].second == newFromOld[8]); BOOST_REQUIRE_CLOSE(sortedOutput[newFromOld[4]][3].first, 4.60, 1e-5); BOOST_REQUIRE(sortedOutput[newFromOld[4]][4].second == newFromOld[1]); BOOST_REQUIRE_CLOSE(sortedOutput[newFromOld[4]][4].first, 4.70, 1e-5); BOOST_REQUIRE(sortedOutput[newFromOld[4]][5].second == newFromOld[2]); BOOST_REQUIRE_CLOSE(sortedOutput[newFromOld[4]][5].first, 4.90, 1e-5); BOOST_REQUIRE(sortedOutput[newFromOld[4]][6].second == newFromOld[0]); BOOST_REQUIRE_CLOSE(sortedOutput[newFromOld[4]][6].first, 5.00, 1e-5); BOOST_REQUIRE(sortedOutput[newFromOld[4]][7].second == newFromOld[5]); BOOST_REQUIRE_CLOSE(sortedOutput[newFromOld[4]][7].first, 5.27, 1e-5); BOOST_REQUIRE(sortedOutput[newFromOld[4]][8].second == newFromOld[7]); BOOST_REQUIRE_CLOSE(sortedOutput[newFromOld[4]][8].first, 6.35, 1e-5); BOOST_REQUIRE(sortedOutput[newFromOld[4]][9].second == newFromOld[6]); BOOST_REQUIRE_CLOSE(sortedOutput[newFromOld[4]][9].first, 7.05, 1e-5); // Neighbors of point 5. BOOST_REQUIRE(sortedOutput[newFromOld[5]].size() == 6); BOOST_REQUIRE(sortedOutput[newFromOld[5]][0].second == newFromOld[7]); BOOST_REQUIRE_CLOSE(sortedOutput[newFromOld[5]][0].first, 1.08, 1e-5); BOOST_REQUIRE(sortedOutput[newFromOld[5]][1].second == newFromOld[9]); BOOST_REQUIRE_CLOSE(sortedOutput[newFromOld[5]][1].first, 1.12, 1e-5); BOOST_REQUIRE(sortedOutput[newFromOld[5]][2].second == newFromOld[10]); BOOST_REQUIRE_CLOSE(sortedOutput[newFromOld[5]][2].first, 1.22, 1e-5); BOOST_REQUIRE(sortedOutput[newFromOld[5]][3].second == newFromOld[3]); BOOST_REQUIRE_CLOSE(sortedOutput[newFromOld[5]][3].first, 1.47, 1e-5); BOOST_REQUIRE(sortedOutput[newFromOld[5]][4].second == newFromOld[6]); BOOST_REQUIRE_CLOSE(sortedOutput[newFromOld[5]][4].first, 1.78, 1e-5); BOOST_REQUIRE(sortedOutput[newFromOld[5]][5].second == newFromOld[4]); BOOST_REQUIRE_CLOSE(sortedOutput[newFromOld[5]][5].first, 5.27, 1e-5); // Neighbors of point 6. BOOST_REQUIRE(sortedOutput[newFromOld[6]].size() == 9); BOOST_REQUIRE(sortedOutput[newFromOld[6]][0].second == newFromOld[5]); BOOST_REQUIRE_CLOSE(sortedOutput[newFromOld[6]][0].first, 1.78, 1e-5); BOOST_REQUIRE(sortedOutput[newFromOld[6]][1].second == newFromOld[0]); BOOST_REQUIRE_CLOSE(sortedOutput[newFromOld[6]][1].first, 2.05, 1e-5); BOOST_REQUIRE(sortedOutput[newFromOld[6]][2].second == newFromOld[2]); BOOST_REQUIRE_CLOSE(sortedOutput[newFromOld[6]][2].first, 2.15, 1e-5); BOOST_REQUIRE(sortedOutput[newFromOld[6]][3].second == newFromOld[1]); BOOST_REQUIRE_CLOSE(sortedOutput[newFromOld[6]][3].first, 2.35, 1e-5); BOOST_REQUIRE(sortedOutput[newFromOld[6]][4].second == newFromOld[8]); BOOST_REQUIRE_CLOSE(sortedOutput[newFromOld[6]][4].first, 2.45, 1e-5); BOOST_REQUIRE(sortedOutput[newFromOld[6]][5].second == newFromOld[9]); BOOST_REQUIRE_CLOSE(sortedOutput[newFromOld[6]][5].first, 2.90, 1e-5); BOOST_REQUIRE(sortedOutput[newFromOld[6]][6].second == newFromOld[10]); BOOST_REQUIRE_CLOSE(sortedOutput[newFromOld[6]][6].first, 3.00, 1e-5); BOOST_REQUIRE(sortedOutput[newFromOld[6]][7].second == newFromOld[3]); BOOST_REQUIRE_CLOSE(sortedOutput[newFromOld[6]][7].first, 3.25, 1e-5); BOOST_REQUIRE(sortedOutput[newFromOld[6]][8].second == newFromOld[4]); BOOST_REQUIRE_CLOSE(sortedOutput[newFromOld[6]][8].first, 7.05, 1e-5); // Neighbors of point 7. BOOST_REQUIRE(sortedOutput[newFromOld[7]].size() == 9); BOOST_REQUIRE(sortedOutput[newFromOld[7]][0].second == newFromOld[5]); BOOST_REQUIRE_CLOSE(sortedOutput[newFromOld[7]][0].first, 1.08, 1e-5); BOOST_REQUIRE(sortedOutput[newFromOld[7]][1].second == newFromOld[0]); BOOST_REQUIRE_CLOSE(sortedOutput[newFromOld[7]][1].first, 1.35, 1e-5); BOOST_REQUIRE(sortedOutput[newFromOld[7]][2].second == newFromOld[2]); BOOST_REQUIRE_CLOSE(sortedOutput[newFromOld[7]][2].first, 1.45, 1e-5); BOOST_REQUIRE(sortedOutput[newFromOld[7]][3].second == newFromOld[1]); BOOST_REQUIRE_CLOSE(sortedOutput[newFromOld[7]][3].first, 1.65, 1e-5); BOOST_REQUIRE(sortedOutput[newFromOld[7]][4].second == newFromOld[8]); BOOST_REQUIRE_CLOSE(sortedOutput[newFromOld[7]][4].first, 1.75, 1e-5); BOOST_REQUIRE(sortedOutput[newFromOld[7]][5].second == newFromOld[9]); BOOST_REQUIRE_CLOSE(sortedOutput[newFromOld[7]][5].first, 2.20, 1e-5); BOOST_REQUIRE(sortedOutput[newFromOld[7]][6].second == newFromOld[10]); BOOST_REQUIRE_CLOSE(sortedOutput[newFromOld[7]][6].first, 2.30, 1e-5); BOOST_REQUIRE(sortedOutput[newFromOld[7]][7].second == newFromOld[3]); BOOST_REQUIRE_CLOSE(sortedOutput[newFromOld[7]][7].first, 2.55, 1e-5); BOOST_REQUIRE(sortedOutput[newFromOld[7]][8].second == newFromOld[4]); BOOST_REQUIRE_CLOSE(sortedOutput[newFromOld[7]][8].first, 6.35, 1e-5); // Neighbors of point 8. BOOST_REQUIRE(sortedOutput[newFromOld[8]].size() == 3); BOOST_REQUIRE(sortedOutput[newFromOld[8]][0].second == newFromOld[7]); BOOST_REQUIRE_CLOSE(sortedOutput[newFromOld[8]][0].first, 1.75, 1e-5); BOOST_REQUIRE(sortedOutput[newFromOld[8]][1].second == newFromOld[6]); BOOST_REQUIRE_CLOSE(sortedOutput[newFromOld[8]][1].first, 2.45, 1e-5); BOOST_REQUIRE(sortedOutput[newFromOld[8]][2].second == newFromOld[4]); BOOST_REQUIRE_CLOSE(sortedOutput[newFromOld[8]][2].first, 4.60, 1e-5); // Neighbors of point 9. BOOST_REQUIRE(sortedOutput[newFromOld[9]].size() == 4); BOOST_REQUIRE(sortedOutput[newFromOld[9]][0].second == newFromOld[5]); BOOST_REQUIRE_CLOSE(sortedOutput[newFromOld[9]][0].first, 1.12, 1e-5); BOOST_REQUIRE(sortedOutput[newFromOld[9]][1].second == newFromOld[7]); BOOST_REQUIRE_CLOSE(sortedOutput[newFromOld[9]][1].first, 2.20, 1e-5); BOOST_REQUIRE(sortedOutput[newFromOld[9]][2].second == newFromOld[6]); BOOST_REQUIRE_CLOSE(sortedOutput[newFromOld[9]][2].first, 2.90, 1e-5); BOOST_REQUIRE(sortedOutput[newFromOld[9]][3].second == newFromOld[4]); BOOST_REQUIRE_CLOSE(sortedOutput[newFromOld[9]][3].first, 4.15, 1e-5); // Neighbors of point 10. BOOST_REQUIRE(sortedOutput[newFromOld[10]].size() == 4); BOOST_REQUIRE(sortedOutput[newFromOld[10]][0].second == newFromOld[5]); BOOST_REQUIRE_CLOSE(sortedOutput[newFromOld[10]][0].first, 1.22, 1e-5); BOOST_REQUIRE(sortedOutput[newFromOld[10]][1].second == newFromOld[7]); BOOST_REQUIRE_CLOSE(sortedOutput[newFromOld[10]][1].first, 2.30, 1e-5); BOOST_REQUIRE(sortedOutput[newFromOld[10]][2].second == newFromOld[6]); BOOST_REQUIRE_CLOSE(sortedOutput[newFromOld[10]][2].first, 3.00, 1e-5); BOOST_REQUIRE(sortedOutput[newFromOld[10]][3].second == newFromOld[4]); BOOST_REQUIRE_CLOSE(sortedOutput[newFromOld[10]][3].first, 4.05, 1e-5); // Clean the memory. delete rs; } delete tree; } /** * Test the dual-tree range search method with the naive method. This * uses both a query and reference dataset. * * Errors are produced if the results are not identical. */ BOOST_AUTO_TEST_CASE(DualTreeVsNaive1) { arma::mat dataForTree; // Hard-coded filename: bad! if (!data::Load("test_data_3_1000.csv", dataForTree)) BOOST_FAIL("Cannot load test dataset test_data_3_1000.csv!"); // Set up matrices to work with. arma::mat dualQuery(dataForTree); arma::mat dualReferences(dataForTree); arma::mat naiveQuery(dataForTree); arma::mat naiveReferences(dataForTree); RangeSearch<> rs(dualReferences); RangeSearch<> naive(naiveReferences, true); vector> neighborsTree; vector> distancesTree; rs.Search(dualQuery, Range(0.25, 1.05), neighborsTree, distancesTree); vector>> sortedTree; SortResults(neighborsTree, distancesTree, sortedTree); vector> neighborsNaive; vector> distancesNaive; naive.Search(naiveQuery, Range(0.25, 1.05), neighborsNaive, distancesNaive); vector>> sortedNaive; SortResults(neighborsNaive, distancesNaive, sortedNaive); for (size_t i = 0; i < sortedTree.size(); i++) { BOOST_REQUIRE(sortedTree[i].size() == sortedNaive[i].size()); for (size_t j = 0; j < sortedTree[i].size(); j++) { BOOST_REQUIRE(sortedTree[i][j].second == sortedNaive[i][j].second); BOOST_REQUIRE_CLOSE(sortedTree[i][j].first, sortedNaive[i][j].first, 1e-5); } } } /** * Test the dual-tree range search method with the naive method. This uses * only a reference dataset. * * Errors are produced if the results are not identical. */ BOOST_AUTO_TEST_CASE(DualTreeVsNaive2) { arma::mat dataForTree; // Hard-coded filename: bad! // Code duplication: also bad! if (!data::Load("test_data_3_1000.csv", dataForTree)) BOOST_FAIL("Cannot load test dataset test_data_3_1000.csv!"); // Set up matrices to work with. arma::mat dualQuery(dataForTree); arma::mat naiveQuery(dataForTree); RangeSearch<> rs(dualQuery); // Set naive mode. RangeSearch<> naive(naiveQuery, true); vector> neighborsTree; vector> distancesTree; rs.Search(Range(0.25, 1.05), neighborsTree, distancesTree); vector>> sortedTree; SortResults(neighborsTree, distancesTree, sortedTree); vector> neighborsNaive; vector> distancesNaive; naive.Search(Range(0.25, 1.05), neighborsNaive, distancesNaive); vector>> sortedNaive; SortResults(neighborsNaive, distancesNaive, sortedNaive); for (size_t i = 0; i < sortedTree.size(); i++) { BOOST_REQUIRE(sortedTree[i].size() == sortedNaive[i].size()); for (size_t j = 0; j < sortedTree[i].size(); j++) { BOOST_REQUIRE(sortedTree[i][j].second == sortedNaive[i][j].second); BOOST_REQUIRE_CLOSE(sortedTree[i][j].first, sortedNaive[i][j].first, 1e-5); } } } /** * Test the single-tree range search method with the naive method. This * uses only a reference dataset. * * Errors are produced if the results are not identical. */ BOOST_AUTO_TEST_CASE(SingleTreeVsNaive) { arma::mat dataForTree; // Hard-coded filename: bad! // Code duplication: also bad! if (!data::Load("test_data_3_1000.csv", dataForTree)) BOOST_FAIL("Cannot load test dataset test_data_3_1000.csv!"); // Set up matrices to work with (may not be necessary with no ALIAS_MATRIX?). arma::mat singleQuery(dataForTree); arma::mat naiveQuery(dataForTree); RangeSearch<> single(singleQuery, false, true); // Set up computation for naive mode. RangeSearch<> naive(naiveQuery, true); vector> neighborsSingle; vector> distancesSingle; single.Search(Range(0.25, 1.05), neighborsSingle, distancesSingle); vector>> sortedTree; SortResults(neighborsSingle, distancesSingle, sortedTree); vector> neighborsNaive; vector> distancesNaive; naive.Search(Range(0.25, 1.05), neighborsNaive, distancesNaive); vector>> sortedNaive; SortResults(neighborsNaive, distancesNaive, sortedNaive); for (size_t i = 0; i < sortedTree.size(); i++) { BOOST_REQUIRE(sortedTree[i].size() == sortedNaive[i].size()); for (size_t j = 0; j < sortedTree[i].size(); j++) { BOOST_REQUIRE(sortedTree[i][j].second == sortedNaive[i][j].second); BOOST_REQUIRE_CLOSE(sortedTree[i][j].first, sortedNaive[i][j].first, 1e-5); } } } /** * Ensure that dual tree range search with cover trees works by comparing * with the kd-tree implementation. */ BOOST_AUTO_TEST_CASE(CoverTreeTest) { arma::mat data; data.randu(8, 1000); // 1000 points in 8 dimensions. // Set up cover tree range search. RangeSearch coversearch(data); // Four trials with different ranges. for (size_t r = 0; r < 4; ++r) { // Set up kd-tree range search. RangeSearch<> kdsearch(data); Range range; switch (r) { case 0: // Includes zero distance. range = Range(0.0, 0.75); break; case 1: // A bounded range on both sides. range = Range(0.5, 1.5); break; case 2: // A range with no upper bound. range = Range(0.8, DBL_MAX); break; case 3: // A range which should have no results. range = Range(15.6, 15.7); break; } // Results for kd-tree search. vector> kdNeighbors; vector> kdDistances; // Results for cover tree search. vector> coverNeighbors; vector> coverDistances; // Clean the tree statistics. CleanTree(*coversearch.ReferenceTree()); // Run the searches. kdsearch.Search(range, kdNeighbors, kdDistances); coversearch.Search(range, coverNeighbors, coverDistances); // Sort before comparison. vector>> kdSorted; vector>> coverSorted; SortResults(kdNeighbors, kdDistances, kdSorted); SortResults(coverNeighbors, coverDistances, coverSorted); // Now compare the results. for (size_t i = 0; i < kdSorted.size(); ++i) { for (size_t j = 0; j < kdSorted[i].size(); ++j) { BOOST_REQUIRE_EQUAL(kdSorted[i][j].second, coverSorted[i][j].second); BOOST_REQUIRE_CLOSE(kdSorted[i][j].first, coverSorted[i][j].first, 1e-5); } BOOST_REQUIRE_EQUAL(kdSorted[i].size(), coverSorted[i].size()); } } } /** * Ensure that dual tree range search with cover trees works when using * two datasets. */ BOOST_AUTO_TEST_CASE(CoverTreeTwoDatasetsTest) { arma::mat data; data.randu(8, 1000); // 1000 points in 8 dimensions. arma::mat queries; queries.randu(8, 350); // 350 points in 8 dimensions. // Set up cover tree range search. RangeSearch coversearch(data); // Four trials with different ranges. for (size_t r = 0; r < 4; ++r) { // Set up kd-tree range search. We don't have an easy way to rebuild the // tree, so we'll just reinstantiate it here each loop time. RangeSearch<> kdsearch(data); Range range; switch (r) { case 0: // Includes zero distance. range = Range(0.0, 0.75); break; case 1: // A bounded range on both sides. range = Range(0.85, 1.05); break; case 2: // A range with no upper bound. range = Range(1.35, DBL_MAX); break; case 3: // A range which should have no results. range = Range(15.6, 15.7); break; } // Results for kd-tree search. vector> kdNeighbors; vector> kdDistances; // Results for cover tree search. vector> coverNeighbors; vector> coverDistances; // Clean the trees. CleanTree(*coversearch.ReferenceTree()); // Run the searches. coversearch.Search(queries, range, coverNeighbors, coverDistances); kdsearch.Search(queries, range, kdNeighbors, kdDistances); // Sort before comparison. vector>> kdSorted; vector>> coverSorted; SortResults(kdNeighbors, kdDistances, kdSorted); SortResults(coverNeighbors, coverDistances, coverSorted); // Now compare the results. for (size_t i = 0; i < kdSorted.size(); ++i) { for (size_t j = 0; j < kdSorted[i].size(); ++j) { BOOST_REQUIRE_EQUAL(kdSorted[i][j].second, coverSorted[i][j].second); BOOST_REQUIRE_CLOSE(kdSorted[i][j].first, coverSorted[i][j].first, 1e-5); } BOOST_REQUIRE_EQUAL(kdSorted[i].size(), coverSorted[i].size()); } } } /** * Ensure that single-tree cover tree range search works. */ BOOST_AUTO_TEST_CASE(CoverTreeSingleTreeTest) { arma::mat data; data.randu(8, 1000); // 1000 points in 8 dimensions. // Set up cover tree range search. RangeSearch coversearch(data, false, true); // Four trials with different ranges. for (size_t r = 0; r < 4; ++r) { // Set up kd-tree range search. RangeSearch<> kdsearch(data); Range range; switch (r) { case 0: // Includes zero distance. range = Range(0.0, 0.75); break; case 1: // A bounded range on both sides. range = Range(0.5, 1.5); break; case 2: // A range with no upper bound. range = Range(0.8, DBL_MAX); break; case 3: // A range which should have no results. range = Range(15.6, 15.7); break; } // Results for kd-tree search. vector> kdNeighbors; vector> kdDistances; // Results for cover tree search. vector> coverNeighbors; vector> coverDistances; // Clean the tree statistics. CleanTree(*coversearch.ReferenceTree()); // Run the searches. kdsearch.Search(range, kdNeighbors, kdDistances); coversearch.Search(range, coverNeighbors, coverDistances); // Sort before comparison. vector>> kdSorted; vector>> coverSorted; SortResults(kdNeighbors, kdDistances, kdSorted); SortResults(coverNeighbors, coverDistances, coverSorted); // Now compare the results. for (size_t i = 0; i < kdSorted.size(); ++i) { for (size_t j = 0; j < kdSorted[i].size(); ++j) { BOOST_REQUIRE_EQUAL(kdSorted[i][j].second, coverSorted[i][j].second); BOOST_REQUIRE_CLOSE(kdSorted[i][j].first, coverSorted[i][j].first, 1e-5); } BOOST_REQUIRE_EQUAL(kdSorted[i].size(), coverSorted[i].size()); } } } /** * Ensure that single-tree ball tree range search works. */ BOOST_AUTO_TEST_CASE(SingleBallTreeTest) { arma::mat data; data.randu(8, 1000); // 1000 points in 8 dimensions. // Set up ball tree range search. RangeSearch ballsearch(data, false, true); // Four trials with different ranges. for (size_t r = 0; r < 4; ++r) { // Set up kd-tree range search. RangeSearch<> kdsearch(data); Range range; switch (r) { case 0: // Includes zero distance. range = Range(0.0, 0.75); break; case 1: // A bounded range on both sides. range = Range(0.5, 1.5); break; case 2: // A range with no upper bound. range = Range(0.8, DBL_MAX); break; case 3: // A range which should have no results. range = Range(15.6, 15.7); break; } // Results for kd-tree search. vector> kdNeighbors; vector> kdDistances; // Results for ball tree search. vector> ballNeighbors; vector> ballDistances; // Clean the tree statistics. CleanTree(*ballsearch.ReferenceTree()); // Run the searches. kdsearch.Search(range, kdNeighbors, kdDistances); ballsearch.Search(range, ballNeighbors, ballDistances); // Sort before comparison. vector>> kdSorted; vector>> ballSorted; SortResults(kdNeighbors, kdDistances, kdSorted); SortResults(ballNeighbors, ballDistances, ballSorted); // Now compare the results. for (size_t i = 0; i < kdSorted.size(); ++i) { for (size_t j = 0; j < kdSorted[i].size(); ++j) { BOOST_REQUIRE_EQUAL(kdSorted[i][j].second, ballSorted[i][j].second); BOOST_REQUIRE_CLOSE(kdSorted[i][j].first, ballSorted[i][j].first, 1e-5); } BOOST_REQUIRE_EQUAL(kdSorted[i].size(), ballSorted[i].size()); } } } /** * Ensure that dual tree range search with ball trees works by comparing * with the kd-tree implementation. */ BOOST_AUTO_TEST_CASE(DualBallTreeTest) { arma::mat data; data.randu(8, 1000); // 1000 points in 8 dimensions. // Set up ball tree range search. RangeSearch ballsearch(data); // Four trials with different ranges. for (size_t r = 0; r < 4; ++r) { // Set up kd-tree range search. RangeSearch<> kdsearch(data); Range range; switch (r) { case 0: // Includes zero distance. range = Range(0.0, 0.75); break; case 1: // A bounded range on both sides. range = Range(0.5, 1.5); break; case 2: // A range with no upper bound. range = Range(0.8, DBL_MAX); break; case 3: // A range which should have no results. range = Range(15.6, 15.7); break; } // Results for kd-tree search. vector> kdNeighbors; vector> kdDistances; // Results for ball tree search. vector> ballNeighbors; vector> ballDistances; // Clean the tree statistics. CleanTree(*ballsearch.ReferenceTree()); // Run the searches. kdsearch.Search(range, kdNeighbors, kdDistances); ballsearch.Search(range, ballNeighbors, ballDistances); // Sort before comparison. vector>> kdSorted; vector>> ballSorted; SortResults(kdNeighbors, kdDistances, kdSorted); SortResults(ballNeighbors, ballDistances, ballSorted); // Now compare the results. for (size_t i = 0; i < kdSorted.size(); ++i) { for (size_t j = 0; j < kdSorted[i].size(); ++j) { BOOST_REQUIRE_EQUAL(kdSorted[i][j].second, ballSorted[i][j].second); BOOST_REQUIRE_CLOSE(kdSorted[i][j].first, ballSorted[i][j].first, 1e-5); } BOOST_REQUIRE_EQUAL(kdSorted[i].size(), ballSorted[i].size()); } } } /** * Ensure that dual tree range search with ball trees works when using * two datasets. */ BOOST_AUTO_TEST_CASE(DualBallTreeTest2) { arma::mat data; data.randu(8, 1000); // 1000 points in 8 dimensions. arma::mat queries; queries.randu(8, 350); // 350 points in 8 dimensions. // Set up ball tree range search. RangeSearch ballsearch(data); // Four trials with different ranges. for (size_t r = 0; r < 4; ++r) { // Set up kd-tree range search. We don't have an easy way to rebuild the // tree, so we'll just reinstantiate it here each loop time. RangeSearch<> kdsearch(data); Range range; switch (r) { case 0: // Includes zero distance. range = Range(0.0, 0.75); break; case 1: // A bounded range on both sides. range = Range(0.85, 1.05); break; case 2: // A range with no upper bound. range = Range(1.35, DBL_MAX); break; case 3: // A range which should have no results. range = Range(15.6, 15.7); break; } // Results for kd-tree search. vector> kdNeighbors; vector> kdDistances; // Results for ball tree search. vector> ballNeighbors; vector> ballDistances; // Clean the trees. CleanTree(*ballsearch.ReferenceTree()); // Run the searches. ballsearch.Search(queries, range, ballNeighbors, ballDistances); kdsearch.Search(queries, range, kdNeighbors, kdDistances); // Sort before comparison. vector>> kdSorted; vector>> ballSorted; SortResults(kdNeighbors, kdDistances, kdSorted); SortResults(ballNeighbors, ballDistances, ballSorted); // Now compare the results. for (size_t i = 0; i < kdSorted.size(); ++i) { BOOST_REQUIRE_EQUAL(kdSorted[i].size(), ballSorted[i].size()); for (size_t j = 0; j < kdSorted[i].size(); ++j) { BOOST_REQUIRE_EQUAL(kdSorted[i][j].second, ballSorted[i][j].second); BOOST_REQUIRE_CLOSE(kdSorted[i][j].first, ballSorted[i][j].first, 1e-5); } } } } /** * Make sure that no results are returned when we build a range search object * with no reference set. */ BOOST_AUTO_TEST_CASE(EmptySearchTest) { RangeSearch rs; vector> neighbors; vector> distances; rs.Search(math::Range(0.0, 10.0), neighbors, distances); BOOST_REQUIRE_EQUAL(neighbors.size(), 0); BOOST_REQUIRE_EQUAL(distances.size(), 0); // Now check with a query set. arma::mat querySet = arma::randu(3, 100); BOOST_REQUIRE_THROW(rs.Search(querySet, math::Range(0.0, 10.0), neighbors, distances), std::invalid_argument); } /** * Make sure things work right after Train() is called. */ BOOST_AUTO_TEST_CASE(TrainTest) { RangeSearch<> empty; arma::mat dataset = arma::randu(5, 100); RangeSearch<> baseline(dataset); vector> neighbors, baselineNeighbors; vector> distances, baselineDistances; empty.Train(dataset); empty.Search(math::Range(0.5, 0.7), neighbors, distances); baseline.Search(math::Range(0.5, 0.7), baselineNeighbors, baselineDistances); BOOST_REQUIRE_EQUAL(neighbors.size(), baselineNeighbors.size()); BOOST_REQUIRE_EQUAL(distances.size(), baselineDistances.size()); // Sort the results before comparing. vector>> sorted; vector>> baselineSorted; SortResults(neighbors, distances, sorted); SortResults(baselineNeighbors, baselineDistances, baselineSorted); for (size_t i = 0; i < sorted.size(); ++i) { BOOST_REQUIRE_EQUAL(sorted[i].size(), baselineSorted[i].size()); for (size_t j = 0; j < sorted[i].size(); ++j) { BOOST_REQUIRE_EQUAL(sorted[i][j].second, baselineSorted[i][j].second); BOOST_REQUIRE_CLOSE(sorted[i][j].first, baselineSorted[i][j].first, 1e-5); } } } /** * Test training when a tree is given. */ BOOST_AUTO_TEST_CASE(TrainTreeTest) { // Avoid mappings by using the cover tree. typedef RangeSearch RSType; RSType empty; arma::mat dataset = arma::randu(5, 100); RSType baseline(dataset); vector> neighbors, baselineNeighbors; vector> distances, baselineDistances; RSType::Tree tree(dataset); empty.Train(&tree); empty.Search(math::Range(0.5, 0.7), neighbors, distances); baseline.Search(math::Range(0.5, 0.7), baselineNeighbors, baselineDistances); BOOST_REQUIRE_EQUAL(neighbors.size(), baselineNeighbors.size()); BOOST_REQUIRE_EQUAL(distances.size(), baselineDistances.size()); // Sort the results before comparing. vector>> sorted; vector>> baselineSorted; SortResults(neighbors, distances, sorted); SortResults(baselineNeighbors, baselineDistances, baselineSorted); for (size_t i = 0; i < sorted.size(); ++i) { BOOST_REQUIRE_EQUAL(sorted[i].size(), baselineSorted[i].size()); for (size_t j = 0; j < sorted[i].size(); ++j) { BOOST_REQUIRE_EQUAL(sorted[i][j].second, baselineSorted[i][j].second); BOOST_REQUIRE_CLOSE(sorted[i][j].first, baselineSorted[i][j].first, 1e-5); } } } /** * Test that training with a tree throws an exception when in naive mode. */ BOOST_AUTO_TEST_CASE(NaiveTrainTreeTest) { RangeSearch<> empty(true); arma::mat dataset = arma::randu(5, 100); RangeSearch<>::Tree tree(dataset); BOOST_REQUIRE_THROW(empty.Train(&tree), std::invalid_argument); } /** * Test that the move constructor works. */ BOOST_AUTO_TEST_CASE(TreeMoveConstructorTest) { arma::mat dataset = arma::randu(3, 100); arma::mat copy(dataset); RangeSearch<> movers(std::move(copy)); RangeSearch<> rs(dataset); BOOST_REQUIRE_EQUAL(copy.n_elem, 0); BOOST_REQUIRE_EQUAL(movers.ReferenceSet().n_rows, 3); BOOST_REQUIRE_EQUAL(movers.ReferenceSet().n_cols, 100); vector> moveNeighbors, neighbors; vector> moveDistances, distances; movers.Search(math::Range(0.5, 0.7), moveNeighbors, moveDistances); rs.Search(math::Range(0.5, 0.7), neighbors, distances); BOOST_REQUIRE_EQUAL(neighbors.size(), moveNeighbors.size()); BOOST_REQUIRE_EQUAL(distances.size(), moveDistances.size()); // Sort the results before comparing. vector>> sorted; vector>> moveSorted; SortResults(neighbors, distances, sorted); SortResults(moveNeighbors, moveDistances, moveSorted); for (size_t i = 0; i < sorted.size(); ++i) { BOOST_REQUIRE_EQUAL(sorted[i].size(), moveSorted[i].size()); for (size_t j = 0; j < sorted[i].size(); ++j) { BOOST_REQUIRE_EQUAL(sorted[i][j].second, moveSorted[i][j].second); BOOST_REQUIRE_CLOSE(sorted[i][j].first, moveSorted[i][j].first, 1e-5); } } } /** * Test that the std::move() Train() function works. */ BOOST_AUTO_TEST_CASE(MoveTrainTest) { arma::mat dataset = arma::randu(3, 100); arma::mat copy(dataset); RangeSearch<> movers; movers.Train(std::move(copy)); RangeSearch<> rs(dataset); BOOST_REQUIRE_EQUAL(copy.n_elem, 0); BOOST_REQUIRE_EQUAL(movers.ReferenceSet().n_rows, 3); BOOST_REQUIRE_EQUAL(movers.ReferenceSet().n_cols, 100); vector> moveNeighbors, neighbors; vector> moveDistances, distances; movers.Search(math::Range(0.5, 0.7), moveNeighbors, moveDistances); rs.Search(math::Range(0.5, 0.7), neighbors, distances); BOOST_REQUIRE_EQUAL(neighbors.size(), moveNeighbors.size()); BOOST_REQUIRE_EQUAL(distances.size(), moveDistances.size()); // Sort the results before comparing. vector>> sorted; vector>> moveSorted; SortResults(neighbors, distances, sorted); SortResults(moveNeighbors, moveDistances, moveSorted); for (size_t i = 0; i < sorted.size(); ++i) { BOOST_REQUIRE_EQUAL(sorted[i].size(), moveSorted[i].size()); for (size_t j = 0; j < sorted[i].size(); ++j) { BOOST_REQUIRE_EQUAL(sorted[i][j].second, moveSorted[i][j].second); BOOST_REQUIRE_CLOSE(sorted[i][j].first, moveSorted[i][j].first, 1e-5); } } } BOOST_AUTO_TEST_CASE(RSModelTest) { // Ensure that we can build an RSModel and get correct results. arma::mat queryData = arma::randu(10, 50); arma::mat referenceData = arma::randu(10, 200); // Build all the possible models. RSModel models[28]; models[0] = RSModel(RSModel::TreeTypes::KD_TREE, true); models[1] = RSModel(RSModel::TreeTypes::KD_TREE, false); models[2] = RSModel(RSModel::TreeTypes::COVER_TREE, true); models[3] = RSModel(RSModel::TreeTypes::COVER_TREE, false); models[4] = RSModel(RSModel::TreeTypes::R_TREE, true); models[5] = RSModel(RSModel::TreeTypes::R_TREE, false); models[6] = RSModel(RSModel::TreeTypes::R_STAR_TREE, true); models[7] = RSModel(RSModel::TreeTypes::R_STAR_TREE, false); models[8] = RSModel(RSModel::TreeTypes::X_TREE, true); models[9] = RSModel(RSModel::TreeTypes::X_TREE, false); models[10] = RSModel(RSModel::TreeTypes::BALL_TREE, true); models[11] = RSModel(RSModel::TreeTypes::BALL_TREE, false); models[12] = RSModel(RSModel::TreeTypes::HILBERT_R_TREE, true); models[13] = RSModel(RSModel::TreeTypes::HILBERT_R_TREE, false); models[14] = RSModel(RSModel::TreeTypes::R_PLUS_TREE, true); models[15] = RSModel(RSModel::TreeTypes::R_PLUS_TREE, false); models[16] = RSModel(RSModel::TreeTypes::R_PLUS_PLUS_TREE, true); models[17] = RSModel(RSModel::TreeTypes::R_PLUS_PLUS_TREE, false); models[18] = RSModel(RSModel::TreeTypes::VP_TREE, true); models[19] = RSModel(RSModel::TreeTypes::VP_TREE, false); models[20] = RSModel(RSModel::TreeTypes::RP_TREE, true); models[21] = RSModel(RSModel::TreeTypes::RP_TREE, false); models[22] = RSModel(RSModel::TreeTypes::MAX_RP_TREE, true); models[23] = RSModel(RSModel::TreeTypes::MAX_RP_TREE, false); models[24] = RSModel(RSModel::TreeTypes::UB_TREE, true); models[25] = RSModel(RSModel::TreeTypes::UB_TREE, false); models[26] = RSModel(RSModel::TreeTypes::OCTREE, true); models[27] = RSModel(RSModel::TreeTypes::OCTREE, false); for (size_t j = 0; j < 2; ++j) { // Get a baseline. RangeSearch<> rs(referenceData); vector> baselineNeighbors; vector> baselineDistances; rs.Search(queryData, math::Range(0.25, 0.75), baselineNeighbors, baselineDistances); vector>> baselineSorted; SortResults(baselineNeighbors, baselineDistances, baselineSorted); for (size_t i = 0; i < 28; ++i) { // We only have std::move() constructors, so make a copy of our data. arma::mat referenceCopy(referenceData); arma::mat queryCopy(queryData); if (j == 0) models[i].BuildModel(std::move(referenceCopy), 5, false, false); else if (j == 1) models[i].BuildModel(std::move(referenceCopy), 5, false, true); else if (j == 2) models[i].BuildModel(std::move(referenceCopy), 5, true, false); vector> neighbors; vector> distances; models[i].Search(std::move(queryCopy), math::Range(0.25, 0.75), neighbors, distances); BOOST_REQUIRE_EQUAL(neighbors.size(), baselineNeighbors.size()); BOOST_REQUIRE_EQUAL(distances.size(), baselineDistances.size()); vector>> sorted; SortResults(neighbors, distances, sorted); for (size_t k = 0; k < sorted.size(); ++k) { BOOST_REQUIRE_EQUAL(sorted[k].size(), baselineSorted[k].size()); for (size_t l = 0; l < sorted[k].size(); ++l) { BOOST_REQUIRE_EQUAL(sorted[k][l].second, baselineSorted[k][l].second); BOOST_REQUIRE_CLOSE(sorted[k][l].first, baselineSorted[k][l].first, 1e-5); } } } } } BOOST_AUTO_TEST_CASE(RSModelMonochromaticTest) { // Ensure that we can build an RSModel and get correct results. arma::mat referenceData = arma::randu(10, 200); // Build all the possible models. RSModel models[28]; models[0] = RSModel(RSModel::TreeTypes::KD_TREE, true); models[1] = RSModel(RSModel::TreeTypes::KD_TREE, false); models[2] = RSModel(RSModel::TreeTypes::COVER_TREE, true); models[3] = RSModel(RSModel::TreeTypes::COVER_TREE, false); models[4] = RSModel(RSModel::TreeTypes::R_TREE, true); models[5] = RSModel(RSModel::TreeTypes::R_TREE, false); models[6] = RSModel(RSModel::TreeTypes::R_STAR_TREE, true); models[7] = RSModel(RSModel::TreeTypes::R_STAR_TREE, false); models[8] = RSModel(RSModel::TreeTypes::X_TREE, true); models[9] = RSModel(RSModel::TreeTypes::X_TREE, false); models[10] = RSModel(RSModel::TreeTypes::BALL_TREE, true); models[11] = RSModel(RSModel::TreeTypes::BALL_TREE, false); models[12] = RSModel(RSModel::TreeTypes::HILBERT_R_TREE, true); models[13] = RSModel(RSModel::TreeTypes::HILBERT_R_TREE, false); models[14] = RSModel(RSModel::TreeTypes::R_PLUS_TREE, true); models[15] = RSModel(RSModel::TreeTypes::R_PLUS_TREE, false); models[16] = RSModel(RSModel::TreeTypes::R_PLUS_PLUS_TREE, true); models[17] = RSModel(RSModel::TreeTypes::R_PLUS_PLUS_TREE, false); models[18] = RSModel(RSModel::TreeTypes::VP_TREE, true); models[19] = RSModel(RSModel::TreeTypes::VP_TREE, false); models[20] = RSModel(RSModel::TreeTypes::RP_TREE, true); models[21] = RSModel(RSModel::TreeTypes::RP_TREE, false); models[22] = RSModel(RSModel::TreeTypes::MAX_RP_TREE, true); models[23] = RSModel(RSModel::TreeTypes::MAX_RP_TREE, false); models[24] = RSModel(RSModel::TreeTypes::MAX_RP_TREE, true); models[25] = RSModel(RSModel::TreeTypes::MAX_RP_TREE, false); models[26] = RSModel(RSModel::TreeTypes::OCTREE, true); models[27] = RSModel(RSModel::TreeTypes::OCTREE, false); for (size_t j = 0; j < 2; ++j) { // Get a baseline. RangeSearch<> rs(referenceData); vector> baselineNeighbors; vector> baselineDistances; rs.Search(math::Range(0.25, 0.5), baselineNeighbors, baselineDistances); vector>> baselineSorted; SortResults(baselineNeighbors, baselineDistances, baselineSorted); for (size_t i = 0; i < 28; ++i) { // We only have std::move() cosntructors, so make a copy of our data. arma::mat referenceCopy(referenceData); if (j == 0) models[i].BuildModel(std::move(referenceCopy), 5, false, false); else if (j == 1) models[i].BuildModel(std::move(referenceCopy), 5, false, true); else if (j == 2) models[i].BuildModel(std::move(referenceCopy), 5, true, false); vector> neighbors; vector> distances; models[i].Search(math::Range(0.25, 0.5), neighbors, distances); BOOST_REQUIRE_EQUAL(neighbors.size(), baselineNeighbors.size()); BOOST_REQUIRE_EQUAL(distances.size(), baselineDistances.size()); vector>> sorted; SortResults(neighbors, distances, sorted); for (size_t k = 0; k < sorted.size(); ++k) { BOOST_REQUIRE_EQUAL(sorted[k].size(), baselineSorted[k].size()); for (size_t l = 0; l < sorted[k].size(); ++l) { BOOST_REQUIRE_EQUAL(sorted[k][l].second, baselineSorted[k][l].second); BOOST_REQUIRE_CLOSE(sorted[k][l].first, baselineSorted[k][l].first, 1e-5); } } } } } /** * Make sure that the neighborPtr matrix isn't accidentally deleted. * See issue #478. */ BOOST_AUTO_TEST_CASE(NeighborPtrDeleteTest) { arma::mat dataset = arma::randu(5, 100); // Build the tree ourselves. vector oldFromNewReferences; RangeSearch<>::Tree tree(dataset); RangeSearch<> ra(&tree); // Now make a query set. arma::mat queryset = arma::randu(5, 50); vector> distances; vector> neighbors; ra.Search(queryset, math::Range(0.2, 0.5), neighbors, distances); // These will (hopefully) fail is either the neighbors or the distances matrix // has been accidentally deleted. BOOST_REQUIRE_EQUAL(neighbors.size(), 50); BOOST_REQUIRE_EQUAL(distances.size(), 50); } /** * Make sure the copy constructor works. */ BOOST_AUTO_TEST_CASE(CopyConstructorTest) { arma::mat dataset(5, 100, arma::fill::randu); RangeSearch<> r(dataset); RangeSearch<> r2(r); vector> distances, distances2; vector> neighbors, neighbors2; r.Search(math::Range(0.2, 0.5), neighbors, distances); r2.Search(math::Range(0.2, 0.5), neighbors2, distances2); BOOST_REQUIRE_EQUAL(neighbors.size(), neighbors2.size()); BOOST_REQUIRE_EQUAL(distances.size(), distances2.size()); for (size_t i = 0; i < neighbors.size(); ++i) { BOOST_REQUIRE_EQUAL(neighbors[i].size(), neighbors2[i].size()); BOOST_REQUIRE_EQUAL(distances[i].size(), distances2[i].size()); for (size_t j = 0; j < neighbors[i].size(); ++j) { BOOST_REQUIRE_EQUAL(neighbors[i][j], neighbors2[i][j]); BOOST_REQUIRE_CLOSE(distances[i][j], distances2[i][j], 1e-5); } } } /** * Make sure the move constructor works. */ BOOST_AUTO_TEST_CASE(MoveConstructorTest) { arma::mat dataset(5, 100, arma::fill::randu); RangeSearch<> r(dataset); RangeSearch<> rCopy(r); RangeSearch<> r2(std::move(rCopy)); vector> distances, distancesCopy, distances2; vector> neighbors, neighborsCopy, neighbors2; // Search with all three objects. The second should give no results. r.Search(math::Range(0.2, 0.5), neighbors, distances); rCopy.Search(math::Range(0.2, 0.5), neighborsCopy, distancesCopy); r2.Search(math::Range(0.2, 0.5), neighbors2, distances2); BOOST_REQUIRE_EQUAL(distancesCopy.size(), 0); BOOST_REQUIRE_EQUAL(neighborsCopy.size(), 0); BOOST_REQUIRE_EQUAL(distances.size(), distances2.size()); BOOST_REQUIRE_EQUAL(neighbors.size(), neighbors2.size()); for (size_t i = 0; i < neighbors.size(); ++i) { BOOST_REQUIRE_EQUAL(neighbors[i].size(), neighbors2[i].size()); BOOST_REQUIRE_EQUAL(distances[i].size(), distances2[i].size()); for (size_t j = 0; j < neighbors[i].size(); ++j) { BOOST_REQUIRE_EQUAL(neighbors[i][j], neighbors2[i][j]); BOOST_REQUIRE_CLOSE(distances[i][j], distances2[i][j], 1e-5); } } } BOOST_AUTO_TEST_SUITE_END(); mlpack-2.2.5/src/mlpack/tests/rectangle_tree_test.cpp000066400000000000000000001210141315013601400227060ustar00rootroot00000000000000/** * @file tree_traits_test.cpp * @author Andrew Wells * * Tests for the RectangleTree class. This should ensure that the class works * correctly and that subsequent changes don't break anything. Because it's * only used to test the trees, it is slow. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include #include #include #include #include #include "test_tools.hpp" using namespace mlpack; using namespace mlpack::neighbor; using namespace mlpack::tree; using namespace mlpack::metric; BOOST_AUTO_TEST_SUITE(RectangleTreeTest); // Test the traits on RectangleTrees. BOOST_AUTO_TEST_CASE(RectangleTreeTraitsTest) { // Children may be overlapping. bool b = TreeTraits>::HasOverlappingChildren; BOOST_REQUIRE_EQUAL(b, true); // Points are not contained in multiple levels. b = TreeTraits>::HasSelfChildren; BOOST_REQUIRE_EQUAL(b, false); } // Test to make sure the tree can be contains the correct number of points after // it is constructed. BOOST_AUTO_TEST_CASE(RectangleTreeConstructionCountTest) { arma::mat dataset; dataset.randu(3, 1000); // 1000 points in 3 dimensions. typedef RTree, arma::mat> TreeType; TreeType tree(dataset, 20, 6, 5, 2, 0); TreeType tree2 = tree; BOOST_REQUIRE_EQUAL(tree.NumDescendants(), 1000); BOOST_REQUIRE_EQUAL(tree2.NumDescendants(), 1000); } /** * A function to return a std::vector containing pointers to each point in the * tree. * * @param tree The tree that we want to extract all of the points from. * @return A vector containing pointers to each point in this tree. */ template std::vector GetAllPointsInTree(const TreeType& tree) { std::vector vec; if (tree.NumChildren() > 0) { for (size_t i = 0; i < tree.NumChildren(); i++) { std::vector tmp = GetAllPointsInTree(tree.Child(i)); vec.insert(vec.begin(), tmp.begin(), tmp.end()); } } else { for (size_t i = 0; i < tree.Count(); i++) { arma::vec* c = new arma::vec(tree.Dataset().col(tree.Point(i))); vec.push_back(c); } } return vec; } // Test to ensure that none of the points in the tree are duplicates. This, // combined with the above test to see how many points are in the tree, should // ensure that we inserted all points. BOOST_AUTO_TEST_CASE(RectangleTreeConstructionRepeatTest) { arma::mat dataset; dataset.randu(8, 1000); // 1000 points in 8 dimensions. typedef RTree, arma::mat> TreeType; TreeType tree(dataset, 20, 6, 5, 2, 0); std::vector allPoints = GetAllPointsInTree(tree); for (size_t i = 0; i < allPoints.size(); i++) { for (size_t j = i + 1; j < allPoints.size(); j++) { arma::vec v1 = *(allPoints[i]); arma::vec v2 = *(allPoints[j]); bool same = true; for (size_t k = 0; k < v1.n_rows; k++) same &= (v1[k] == v2[k]); BOOST_REQUIRE_NE(same, true); } } for (size_t i = 0; i < allPoints.size(); i++) delete allPoints[i]; } /** * A function to check that each non-leaf node fully encloses its child nodes * and that each leaf node encloses its points. It recurses so that it checks * each node under (and including) this one. * * @param tree The tree to check. */ template void CheckContainment(const TreeType& tree) { if (tree.NumChildren() == 0) { for (size_t i = 0; i < tree.Count(); i++) BOOST_REQUIRE(tree.Bound().Contains( tree.Dataset().unsafe_col(tree.Point(i)))); } else { for (size_t i = 0; i < tree.NumChildren(); i++) { for (size_t j = 0; j < tree.Bound().Dim(); j++) { // All children should be covered by the parent node. // Some children can be empty (only in case of the R++ tree) bool success = (tree.Child(i).Bound()[j].Hi() == std::numeric_limits::lowest() && tree.Child(i).Bound()[j].Lo() == std::numeric_limits::max()) || tree.Bound()[j].Contains(tree.Child(i).Bound()[j]); BOOST_REQUIRE(success); } CheckContainment(tree.Child(i)); } } } /** * A function to check that containment is as tight as possible. */ template void CheckExactContainment(const TreeType& tree) { if (tree.NumChildren() == 0) { for (size_t i = 0; i < tree.Bound().Dim(); i++) { double min = DBL_MAX; double max = -1.0 * DBL_MAX; for(size_t j = 0; j < tree.Count(); j++) { if (tree.Dataset().col(tree.Point(j))[i] < min) min = tree.Dataset().col(tree.Point(j))[i]; if (tree.Dataset().col(tree.Point(j))[i] > max) max = tree.Dataset().col(tree.Point(j))[i]; } BOOST_REQUIRE_EQUAL(max, tree.Bound()[i].Hi()); BOOST_REQUIRE_EQUAL(min, tree.Bound()[i].Lo()); } } else { for (size_t i = 0; i < tree.Bound().Dim(); i++) { double min = DBL_MAX; double max = -1.0 * DBL_MAX; for (size_t j = 0; j < tree.NumChildren(); j++) { if (tree.Child(j).Bound()[i].Lo() < min) min = tree.Child(j).Bound()[i].Lo(); if (tree.Child(j).Bound()[i].Hi() > max) max = tree.Child(j).Bound()[i].Hi(); } BOOST_REQUIRE_EQUAL(max, tree.Bound()[i].Hi()); BOOST_REQUIRE_EQUAL(min, tree.Bound()[i].Lo()); } for (size_t i = 0; i < tree.NumChildren(); i++) CheckExactContainment(tree.Child(i)); } } /** * A function to check that parents and children are set correctly. */ template void CheckHierarchy(const TreeType& tree) { for (size_t i = 0; i < tree.NumChildren(); i++) { BOOST_REQUIRE_EQUAL(&tree, tree.Child(i).Parent()); CheckHierarchy(tree.Child(i)); } } // Test to see if the bounds of the tree are correct. (Cover all bounds and // points beneath this node of the tree). BOOST_AUTO_TEST_CASE(RectangleTreeContainmentTest) { arma::mat dataset; dataset.randu(8, 1000); // 1000 points in 8 dimensions. typedef RTree, arma::mat> TreeType; TreeType tree(dataset, 20, 6, 5, 2, 0); CheckContainment(tree); CheckExactContainment(tree); } /** * A function to check that each of the fill requirements is met. For a * non-leaf node: * * MinNumChildren() <= NumChildren() <= MaxNumChildren() * For a leaf node: * MinLeafSize() <= Count() <= MaxLeafSize * * It recurses so that it checks each node under (and including) this one. * @param tree The tree to check. */ template void CheckFills(const TreeType& tree) { if (tree.IsLeaf()) { BOOST_REQUIRE(tree.Count() >= tree.MinLeafSize() || tree.Parent() == NULL); BOOST_REQUIRE(tree.Count() <= tree.MaxLeafSize()); } else { for (size_t i = 0; i < tree.NumChildren(); i++) { BOOST_REQUIRE(tree.NumChildren() >= tree.MinNumChildren() || tree.Parent() == NULL); BOOST_REQUIRE(tree.NumChildren() <= tree.MaxNumChildren()); CheckFills(tree.Child(i)); } } } // Test to ensure that the minimum and maximum fills are satisfied. BOOST_AUTO_TEST_CASE(CheckMinAndMaxFills) { arma::mat dataset; dataset.randu(8, 1000); // 1000 points in 8 dimensions. typedef RTree, arma::mat> TreeType; TreeType tree(dataset, 20, 6, 5, 2, 0); CheckFills(tree); } /** * A function to get the height of this tree. Though it should equal * tree.TreeDepth(), we ensure that every leaf node is on the same level by * doing it this way. * * @param tree The tree for which we want the height. * @return The height of this tree. */ template int GetMaxLevel(const TreeType& tree) { int max = 1; if (!tree.IsLeaf()) { int m = 0; for (size_t i = 0; i < tree.NumChildren(); i++) { int n = GetMaxLevel(tree.Child(i)); if (n > m) m = n; } max += m; } return max; } /** * A function to get the "shortest height" of this tree. Though it should equal * tree.TreeDepth(), we ensure that every leaf node is on the same level by * doing it this way. * * @param tree The tree for which we want the height. * @return The "shortest height" of the tree. */ template int GetMinLevel(const TreeType& tree) { int min = 1; if (!tree.IsLeaf()) { int m = INT_MAX; for (size_t i = 0; i < tree.NumChildren(); i++) { int n = GetMinLevel(tree.Child(i)); if (n < m) m = n; } min += m; } return min; } /** * A function to check that numDescendants values are set correctly. */ template size_t CheckNumDescendants(const TreeType& tree) { if (tree.IsLeaf()) { BOOST_REQUIRE_EQUAL(tree.NumDescendants(), tree.Count()); return tree.Count(); } size_t numDescendants = 0; for (size_t i = 0; i < tree.NumChildren(); i++) numDescendants += CheckNumDescendants(tree.Child(i)); BOOST_REQUIRE_EQUAL(tree.NumDescendants(), numDescendants); return numDescendants; } // A test to ensure that all leaf nodes are stored on the same level of the // tree. BOOST_AUTO_TEST_CASE(TreeBalance) { arma::mat dataset; dataset.randu(8, 1000); // 1000 points in 8 dimensions. typedef RTree, arma::mat> TreeType; TreeType tree(dataset, 20, 6, 5, 2, 0); BOOST_REQUIRE_EQUAL(GetMinLevel(tree), GetMaxLevel(tree)); BOOST_REQUIRE_EQUAL(tree.TreeDepth(), GetMinLevel(tree)); } // A test to see if point deletion is working correctly. We build a tree, then // delete numIter points and test that the query gives correct results. It is // remotely possible that this test will give a false negative if it should // happen that two points are the same distance from a third point. BOOST_AUTO_TEST_CASE(PointDeletion) { arma::mat dataset; dataset.randu(8, 1000); // 1000 points in 8 dimensions. arma::mat querySet; querySet.randu(8, 500); const int numIter = 50; typedef RTree, arma::mat> TreeType; TreeType tree(dataset, 20, 6, 5, 2, 0); for (int i = 0; i < numIter; i++) tree.DeletePoint(999 - i); // Do a few sanity checks. Ensure each point is unique, the tree has the // correct number of points, the tree has legal containment, and the tree's // data is in sync. std::vector allPoints = GetAllPointsInTree(tree); for (size_t i = 0; i < allPoints.size(); i++) { for (size_t j = i + 1; j < allPoints.size(); j++) { arma::vec v1 = *(allPoints[i]); arma::vec v2 = *(allPoints[j]); bool same = true; for (size_t k = 0; k < v1.n_rows; k++) same &= (v1[k] == v2[k]); BOOST_REQUIRE(!same); } } for (size_t i = 0; i < allPoints.size(); i++) delete allPoints[i]; BOOST_REQUIRE_EQUAL(tree.NumDescendants(), 1000 - numIter); CheckContainment(tree); CheckExactContainment(tree); CheckNumDescendants(tree); // Single-tree search. NeighborSearch, arma::mat, RTree> knn1(std::move(tree), SINGLE_TREE_MODE); arma::Mat neighbors1; arma::mat distances1; knn1.Search(querySet, 5, neighbors1, distances1); arma::mat newDataset; newDataset = dataset; newDataset.resize(8, 1000-numIter); arma::Mat neighbors2; arma::mat distances2; // Nearest neighbor search the naive way. KNN knn2(newDataset, NAIVE_MODE); knn2.Search(querySet, 5, neighbors2, distances2); for (size_t i = 0; i < neighbors1.size(); i++) { BOOST_REQUIRE_EQUAL(distances1[i], distances2[i]); BOOST_REQUIRE_EQUAL(neighbors1[i], neighbors2[i]); } } // A test to see if dynamic point insertion is working correctly. // We build a tree, then add numIter points and test that the query gives // correct results. It is remotely possible that this test will give a false // negative if it should happen that two points are the same distance from a // third point. Note that this is extremely inefficient. You should not use // dynamic insertion until a better solution for resizing matrices is available. BOOST_AUTO_TEST_CASE(PointDynamicAdd) { const int numIter = 50; arma::mat dataset; dataset.randu(8, 1000); // 1000 points in 8 dimensions. typedef RTree, arma::mat> TreeType; TreeType tree(dataset, 20, 6, 5, 2, 0); // Add numIter new points to the dataset. The tree copies the dataset, so we // must modify both the original dataset and the one that the tree holds. // (This API is clunky. It should be redone sometime.) tree.Dataset().reshape(8, 1000 + numIter); dataset.reshape(8, 1000 + numIter); arma::mat tmpData; tmpData.randu(8, numIter); for (int i = 0; i < numIter; i++) { tree.Dataset().col(1000 + i) = tmpData.col(i); dataset.col(1000 + i) = tmpData.col(i); tree.InsertPoint(1000 + i); } // Do a few sanity checks. Ensure each point is unique, the tree has the // correct number of points, the tree has legal containment, and the tree's // data is in sync. std::vector allPoints = GetAllPointsInTree(tree); for (size_t i = 0; i < allPoints.size(); i++) { for (size_t j = i + 1; j < allPoints.size(); j++) { arma::vec v1 = *(allPoints[i]); arma::vec v2 = *(allPoints[j]); bool same = true; for (size_t k = 0; k < v1.n_rows; k++) same &= (v1[k] == v2[k]); BOOST_REQUIRE(!same); } } for (size_t i = 0; i < allPoints.size(); i++) delete allPoints[i]; BOOST_REQUIRE_EQUAL(tree.NumDescendants(), 1000 + numIter); CheckContainment(tree); CheckExactContainment(tree); CheckNumDescendants(tree); // Now we will compare the output of the R Tree vs the output of a naive // search. arma::Mat neighbors1; arma::mat distances1; arma::Mat neighbors2; arma::mat distances2; // Nearest neighbor search with the R tree. NeighborSearch, arma::mat, RTree> knn1(std::move(tree), SINGLE_TREE_MODE); knn1.Search(5, neighbors1, distances1); // Nearest neighbor search the naive way. KNN knn2(dataset, NAIVE_MODE); knn2.Search(5, neighbors2, distances2); for (size_t i = 0; i < neighbors1.size(); i++) { BOOST_REQUIRE_EQUAL(distances1[i], distances2[i]); BOOST_REQUIRE_EQUAL(neighbors1[i], neighbors2[i]); } } // A test to ensure that the SingleTreeTraverser is working correctly by // comparing its results to the results of a naive search. BOOST_AUTO_TEST_CASE(SingleTreeTraverserTest) { arma::mat dataset; dataset.randu(8, 1000); // 1000 points in 8 dimensions. arma::Mat neighbors1; arma::mat distances1; arma::Mat neighbors2; arma::mat distances2; typedef RStarTree, arma::mat> TreeType; TreeType rTree(dataset, 20, 6, 5, 2, 0); BOOST_REQUIRE_EQUAL(rTree.NumDescendants(), 1000); CheckContainment(rTree); CheckExactContainment(rTree); CheckHierarchy(rTree); CheckNumDescendants(rTree); // Nearest neighbor search with the R tree. NeighborSearch, arma::mat, RStarTree> knn1(std::move(rTree), SINGLE_TREE_MODE); knn1.Search(5, neighbors1, distances1); // Nearest neighbor search the naive way. KNN knn2(dataset, NAIVE_MODE); knn2.Search(5, neighbors2, distances2); for (size_t i = 0; i < neighbors1.size(); i++) { BOOST_REQUIRE_EQUAL(neighbors1[i], neighbors2[i]); BOOST_REQUIRE_EQUAL(distances1[i], distances2[i]); } } // A test to ensure that the SingleTreeTraverser is working correctly by // comparing its results to the results of a naive search. BOOST_AUTO_TEST_CASE(XTreeTraverserTest) { arma::mat dataset; const int numP = 1000; dataset.randu(8, numP); // 1000 points in 8 dimensions. arma::Mat neighbors1; arma::mat distances1; arma::Mat neighbors2; arma::mat distances2; typedef XTree, arma::mat> TreeType; TreeType xTree(dataset, 20, 6, 5, 2, 0); BOOST_REQUIRE_EQUAL(xTree.NumDescendants(), numP); CheckContainment(xTree); CheckExactContainment(xTree); CheckHierarchy(xTree); CheckNumDescendants(xTree); // Nearest neighbor search with the X tree. NeighborSearch, arma::mat, XTree> knn1(std::move(xTree), SINGLE_TREE_MODE); knn1.Search(5, neighbors1, distances1); // Nearest neighbor search the naive way. KNN knn2(dataset, NAIVE_MODE); knn2.Search(5, neighbors2, distances2); for (size_t i = 0; i < neighbors1.size(); i++) { BOOST_REQUIRE_EQUAL(neighbors1[i], neighbors2[i]); BOOST_REQUIRE_EQUAL(distances1[i], distances2[i]); } } BOOST_AUTO_TEST_CASE(HilbertRTreeTraverserTest) { arma::mat dataset; const int numP = 1000; dataset.randu(8, numP); // 1000 points in 8 dimensions. arma::Mat neighbors1; arma::mat distances1; arma::Mat neighbors2; arma::mat distances2; typedef HilbertRTree, arma::mat> TreeType; TreeType hilbertRTree(dataset, 20, 6, 5, 2, 0); BOOST_REQUIRE_EQUAL(hilbertRTree.NumDescendants(), numP); CheckContainment(hilbertRTree); CheckExactContainment(hilbertRTree); CheckHierarchy(hilbertRTree); CheckNumDescendants(hilbertRTree); // Nearest neighbor search with the Hilbert R tree. NeighborSearch, arma::mat, HilbertRTree> knn1(std::move(hilbertRTree), SINGLE_TREE_MODE); knn1.Search(5, neighbors1, distances1); // Nearest neighbor search the naive way. KNN knn2(dataset, NAIVE_MODE); knn2.Search(5, neighbors2, distances2); for (size_t i = 0; i < neighbors1.size(); i++) { BOOST_REQUIRE_EQUAL(neighbors1[i], neighbors2[i]); BOOST_REQUIRE_EQUAL(distances1[i], distances2[i]); } } template void CheckHilbertOrdering(const TreeType& tree) { if (tree.IsLeaf()) { for (size_t i = 0; i < tree.NumPoints() - 1; i++) BOOST_REQUIRE_LE(tree.AuxiliaryInfo().HilbertValue().ComparePoints( tree.Dataset().col(tree.Point(i)), tree.Dataset().col(tree.Point(i + 1))), 0); BOOST_REQUIRE_EQUAL(tree.AuxiliaryInfo().HilbertValue().CompareWith( tree.Dataset().col(tree.Point(tree.NumPoints() - 1))), 0); } else { for (size_t i = 0; i < tree.NumChildren() - 1; i++) BOOST_REQUIRE_LE(tree.AuxiliaryInfo().HilbertValue().CompareValues( tree.Child(i).AuxiliaryInfo().HilbertValue(), tree.Child(i + 1).AuxiliaryInfo().HilbertValue()), 0); BOOST_REQUIRE_EQUAL(tree.AuxiliaryInfo().HilbertValue().CompareWith( tree.Child(tree.NumChildren() - 1).AuxiliaryInfo().HilbertValue()), 0); for (size_t i = 0; i < tree.NumChildren(); i++) CheckHilbertOrdering(tree.Child(i)); } } BOOST_AUTO_TEST_CASE(HilbertRTreeOrderingTest) { arma::mat dataset; dataset.randu(8, 1000); // 1000 points in 8 dimensions. typedef HilbertRTree, arma::mat> TreeType; TreeType hilbertRTree(dataset, 20, 6, 5, 2, 0); CheckHilbertOrdering(hilbertRTree); } template void CheckDiscreteHilbertValueSync(const TreeType& tree) { typedef DiscreteHilbertValue HilbertValue; typedef typename HilbertValue::HilbertElemType HilbertElemType; if (tree.IsLeaf()) { const HilbertValue& value = tree.AuxiliaryInfo().HilbertValue(); for (size_t i = 0; i < tree.NumPoints(); i++) { arma::Col pointValue = HilbertValue::CalculateValue(tree.Dataset().col(tree.Point(i))); const int equal = HilbertValue::CompareValues( value.LocalHilbertValues()->col(i), pointValue); BOOST_REQUIRE_EQUAL(equal, 0); } } else for (size_t i = 0; i < tree.NumChildren(); i++) CheckDiscreteHilbertValueSync(tree.Child(i)); } BOOST_AUTO_TEST_CASE(DiscreteHilbertValueSyncTest) { arma::mat dataset; dataset.randu(8, 1000); // 1000 points in 8 dimensions. typedef HilbertRTree,arma::mat> TreeType; TreeType hilbertRTree(dataset, 20, 6, 5, 2, 0); CheckDiscreteHilbertValueSync(hilbertRTree); } BOOST_AUTO_TEST_CASE(DiscreteHilbertValueTest) { arma::vec point01(1); arma::vec point02(1); point01[0] = -DBL_MAX; point02[0] = DBL_MAX; BOOST_REQUIRE_EQUAL(DiscreteHilbertValue::ComparePoints(point01, point02), -1); point01[0] = -DBL_MAX; point02[0] = -100; BOOST_REQUIRE_EQUAL(DiscreteHilbertValue::ComparePoints(point01, point02), -1); point01[0] = -100; point02[0] = -1; BOOST_REQUIRE_EQUAL(DiscreteHilbertValue::ComparePoints(point01, point02), -1); point01[0] = -1; point02[0] = -std::numeric_limits::min(); BOOST_REQUIRE_EQUAL(DiscreteHilbertValue::ComparePoints(point01, point02), -1); point01[0] = -std::numeric_limits::min(); point02[0] = 0; BOOST_REQUIRE_EQUAL(DiscreteHilbertValue::ComparePoints(point01, point02), -1); point01[0] = 0; point02[0] = std::numeric_limits::min(); BOOST_REQUIRE_EQUAL(DiscreteHilbertValue::ComparePoints(point01, point02), -1); point01[0] = std::numeric_limits::min(); point02[0] = 1; BOOST_REQUIRE_EQUAL(DiscreteHilbertValue::ComparePoints(point01, point02), -1); point01[0] = 1; point02[0] = 100; BOOST_REQUIRE_EQUAL(DiscreteHilbertValue::ComparePoints(point01, point02), -1); point01[0] = 100; point02[0] = DBL_MAX; BOOST_REQUIRE_EQUAL(DiscreteHilbertValue::ComparePoints(point01, point02), -1); arma::vec point1(2); arma::vec point2(2); point1[0] = -DBL_MAX; point1[1] = -DBL_MAX; point2[0] = 0; point2[1] = 0; BOOST_REQUIRE_EQUAL(DiscreteHilbertValue::ComparePoints(point1, point2), -1); point1[0] = -1; point1[1] = -1; point2[0] = 1; point2[1] = -1; BOOST_REQUIRE_EQUAL(DiscreteHilbertValue::ComparePoints(point1, point2), -1); point1[0] = -1; point1[1] = -1; point2[0] = -1; point2[1] = 1; BOOST_REQUIRE_EQUAL(DiscreteHilbertValue::ComparePoints(point1, point2), -1); point1[0] = -DBL_MAX + 1; point1[1] = -DBL_MAX + 1; point2[0] = -1; point2[1] = -1; BOOST_REQUIRE_EQUAL(DiscreteHilbertValue::ComparePoints(point1, point2), -1); point1[0] = DBL_MAX * 0.75; point1[1] = DBL_MAX * 0.75; point2[0] = DBL_MAX * 0.25; point2[1] = DBL_MAX * 0.25; BOOST_REQUIRE_EQUAL(DiscreteHilbertValue::ComparePoints(point1, point2), 1); arma::vec point3(4); arma::vec point4(4); point3[0] = -DBL_MAX; point3[1] = -DBL_MAX; point3[2] = -DBL_MAX; point3[3] = -DBL_MAX; point4[0] = 1.0; point4[1] = 1.0; point4[2] = 1.0; point4[3] = 1.0; BOOST_REQUIRE_EQUAL(DiscreteHilbertValue::ComparePoints(point3, point4), -1); point3[0] = -DBL_MAX; point3[1] = DBL_MAX; point3[2] = DBL_MAX; point3[3] = DBL_MAX; point4[0] = DBL_MAX; point4[1] = DBL_MAX; point4[2] = DBL_MAX; point4[3] = DBL_MAX; BOOST_REQUIRE_EQUAL(DiscreteHilbertValue::ComparePoints(point3, point4), -1); } template void CheckHilbertValue(const TreeType& tree) { typedef DiscreteHilbertValue HilbertValue; const HilbertValue& value = tree.AuxiliaryInfo().HilbertValue(); if (tree.IsLeaf()) { BOOST_REQUIRE_EQUAL(value.OwnsLocalHilbertValues(), true); return; } for (size_t i = 0; i < tree.NumChildren(); i++) { const HilbertValue& childValue = tree.Child(i).AuxiliaryInfo().HilbertValue(); BOOST_REQUIRE_EQUAL(value.ValueToInsert(), childValue.ValueToInsert()); } const HilbertValue& childValue = tree.Child(tree.NumChildren() - 1).AuxiliaryInfo().HilbertValue(); BOOST_REQUIRE_EQUAL(value.LocalHilbertValues(), childValue.LocalHilbertValues()); if (!tree.Parent()) BOOST_REQUIRE_EQUAL(value.OwnsValueToInsert(), true); else BOOST_REQUIRE_EQUAL(value.OwnsValueToInsert(), false); BOOST_REQUIRE_EQUAL(value.OwnsLocalHilbertValues(), false); for (size_t i = 0; i < tree.NumChildren(); i++) CheckHilbertValue(tree.Child(i)); } BOOST_AUTO_TEST_CASE(HilbertRTeeCopyConstructorTest) { typedef HilbertRTree, arma::mat> TreeType; arma::mat dataset; dataset.randu(8, 1000); // 1000 points in 8 dimensions. TreeType tree(dataset, 20, 6, 5, 2, 0); TreeType copy(tree); CheckHilbertValue(copy); CheckDiscreteHilbertValueSync(copy); CheckHilbertOrdering(copy); CheckContainment(copy); CheckExactContainment(copy); CheckHierarchy(copy); CheckNumDescendants(copy); } BOOST_AUTO_TEST_CASE(HilbertRTeeMoveConstructorTest) { typedef HilbertRTree, arma::mat> TreeType; arma::mat dataset; dataset.randu(8, 1000); // 1000 points in 8 dimensions. TreeType tree(dataset, 20, 6, 5, 2, 0); TreeType copy(std::move(tree)); CheckHilbertValue(copy); CheckDiscreteHilbertValueSync(copy); CheckHilbertOrdering(copy); CheckContainment(copy); CheckExactContainment(copy); CheckHierarchy(copy); CheckNumDescendants(copy); } template void CheckOverlap(const TreeType& tree) { bool success = true; // Check if two nodes overlap each other. for (size_t i = 0; i < tree.NumChildren(); i++) { success = true; for (size_t j = 0; j < tree.NumChildren(); j++) { if (j == i) continue; success = !tree.Child(i).Bound().Contains(tree.Child(j).Bound()); if (!success) break; } if (!success) break; } BOOST_REQUIRE_EQUAL(success, true); for (size_t i = 0; i < tree.NumChildren(); i++) CheckOverlap(tree.Child(i)); } BOOST_AUTO_TEST_CASE(RPlusTreeOverlapTest) { arma::mat dataset; dataset.randu(8, 1000); // 1000 points in 8 dimensions. typedef RPlusTree,arma::mat> TreeType; TreeType rPlusTree(dataset, 20, 6, 5, 2, 0); CheckOverlap(rPlusTree); // Children can not be overlapping. bool b = TreeTraits::HasOverlappingChildren; BOOST_REQUIRE_EQUAL(b, false); // Ensure that all leaf nodes are at the same level. BOOST_REQUIRE_EQUAL(GetMinLevel(rPlusTree), GetMaxLevel(rPlusTree)); BOOST_REQUIRE_EQUAL(rPlusTree.TreeDepth(), GetMinLevel(rPlusTree)); } BOOST_AUTO_TEST_CASE(RPlusTreeTraverserTest) { arma::mat dataset; const int numP = 1000; dataset.randu(8, numP); // 1000 points in 8 dimensions. arma::Mat neighbors1; arma::mat distances1; arma::Mat neighbors2; arma::mat distances2; typedef RPlusTree, arma::mat > TreeType; TreeType rPlusTree(dataset, 20, 6, 5, 2, 0); BOOST_REQUIRE_EQUAL(rPlusTree.NumDescendants(), numP); CheckContainment(rPlusTree); CheckExactContainment(rPlusTree); CheckHierarchy(rPlusTree); CheckOverlap(rPlusTree); CheckNumDescendants(rPlusTree); // Nearest neighbor search with the R+ tree. NeighborSearch, arma::mat, RPlusTree > knn1(std::move(rPlusTree), SINGLE_TREE_MODE); knn1.Search(5, neighbors1, distances1); // Nearest neighbor search the naive way. KNN knn2(dataset, NAIVE_MODE); knn2.Search(5, neighbors2, distances2); for (size_t i = 0; i < neighbors1.size(); i++) { BOOST_REQUIRE_EQUAL(neighbors1[i], neighbors2[i]); BOOST_REQUIRE_EQUAL(distances1[i], distances2[i]); } } template void CheckRPlusPlusTreeBound(const TreeType& tree) { typedef bound::HRectBound Bound; bool success = true; // Ensure that the maximum bounding rectangle contains all children. for (size_t k = 0; k < tree.Bound().Dim(); k++) { BOOST_REQUIRE_LE(tree.Bound()[k].Hi(), tree.AuxiliaryInfo().OuterBound()[k].Hi()); BOOST_REQUIRE_LE(tree.AuxiliaryInfo().OuterBound()[k].Lo(), tree.Bound()[k].Lo()); } if (tree.IsLeaf()) { // Ensure that the maximum bounding rectangle contains all points. for (size_t i = 0; i < tree.Count(); i++) BOOST_REQUIRE_EQUAL(true, tree.Bound().Contains(tree.Dataset().col(tree.Point(i)))); return; } // Ensure that two children's maximum bounding rectangles do not overlap // each other. for (size_t i = 0; i < tree.NumChildren(); i++) { const Bound& bound1 = tree.Child(i).AuxiliaryInfo().OuterBound(); success = true; for (size_t j = 0; j < tree.NumChildren(); j++) { if (j == i) continue; const Bound& bound2 = tree.Child(j).AuxiliaryInfo().OuterBound(); success = !bound1.Contains(bound2); if (!success) break; } if (!success) break; } BOOST_REQUIRE_EQUAL(success, true); for (size_t i = 0; i < tree.NumChildren(); i++) CheckRPlusPlusTreeBound(tree.Child(i)); } BOOST_AUTO_TEST_CASE(RPlusPlusTreeBoundTest) { arma::mat dataset; dataset.randu(8, 1000); // 1000 points in 8 dimensions. // Check the MinimalCoverageSweep. typedef RPlusPlusTree,arma::mat> TreeType; TreeType rPlusPlusTree(dataset, 20, 6, 5, 2, 0); CheckRPlusPlusTreeBound(rPlusPlusTree); // Children can not be overlapping. bool b = TreeTraits::HasOverlappingChildren; BOOST_REQUIRE_EQUAL(b, false); BOOST_REQUIRE_EQUAL(GetMinLevel(rPlusPlusTree), GetMaxLevel(rPlusPlusTree)); BOOST_REQUIRE_EQUAL(rPlusPlusTree.TreeDepth(), GetMinLevel(rPlusPlusTree)); // Check the MinimalSplitsNumberSweep. typedef RectangleTree, arma::mat, RPlusTreeSplit, RPlusPlusTreeDescentHeuristic, RPlusPlusTreeAuxiliaryInformation> RPlusPlusTreeMinimalSplits; RPlusPlusTreeMinimalSplits rPlusPlusTree2(dataset, 20, 6, 5, 2, 0); CheckRPlusPlusTreeBound(rPlusPlusTree2); BOOST_REQUIRE_EQUAL(GetMinLevel(rPlusPlusTree2), GetMaxLevel(rPlusPlusTree2)); BOOST_REQUIRE_EQUAL(rPlusPlusTree2.TreeDepth(), GetMinLevel(rPlusPlusTree2)); } BOOST_AUTO_TEST_CASE(RPlusPlusTreeTraverserTest) { arma::mat dataset; const int numP = 1000; dataset.randu(8, numP); // 1000 points in 8 dimensions. arma::Mat neighbors1; arma::mat distances1; arma::Mat neighbors2; arma::mat distances2; typedef RPlusPlusTree, arma::mat > TreeType; TreeType rPlusPlusTree(dataset, 20, 6, 5, 2, 0); BOOST_REQUIRE_EQUAL(rPlusPlusTree.NumDescendants(), numP); CheckContainment(rPlusPlusTree); CheckExactContainment(rPlusPlusTree); CheckHierarchy(rPlusPlusTree); CheckRPlusPlusTreeBound(rPlusPlusTree); CheckNumDescendants(rPlusPlusTree); // Nearest neighbor search with the R++ tree. NeighborSearch, arma::mat, RPlusPlusTree > knn1(std::move(rPlusPlusTree), SINGLE_TREE_MODE); knn1.Search(5, neighbors1, distances1); // Nearest neighbor search the naive way. KNN knn2(dataset, NAIVE_MODE); knn2.Search(5, neighbors2, distances2); for (size_t i = 0; i < neighbors1.size(); i++) { BOOST_REQUIRE_EQUAL(neighbors1[i], neighbors2[i]); BOOST_REQUIRE_EQUAL(distances1[i], distances2[i]); } } // Test the tree splitting. We set MaxLeafSize and MaxNumChildren rather low // to allow us to test by hand without adding hundreds of points. BOOST_AUTO_TEST_CASE(RTreeSplitTest) { arma::mat data = arma::trans(arma::mat("0.0 0.0;" "0.0 1.0;" "1.0 0.1;" "1.0 0.5;" "0.7 0.3;" "0.9 0.9;" "0.5 0.6;" "0.6 0.3;" "0.1 0.5;" "0.3 0.7;")); typedef RTree, arma::mat> TreeType; TreeType rTree(data, 5, 2, 2, 1, 0); // There's technically no reason they have to be in a certain order, so we // use firstChild etc. to arbitrarily name them. BOOST_REQUIRE_EQUAL(rTree.NumChildren(), 2); BOOST_REQUIRE_EQUAL(rTree.NumDescendants(), 10); BOOST_REQUIRE_EQUAL(rTree.TreeDepth(), 3); int firstChild = 0, secondChild = 1; if (rTree.Child(firstChild).NumChildren() == 2) { firstChild = 1; secondChild = 0; } BOOST_REQUIRE_SMALL(rTree.Child(firstChild).Bound()[0].Lo(), 1e-15); BOOST_REQUIRE_CLOSE(rTree.Child(firstChild).Bound()[0].Hi(), 0.1, 1e-15); BOOST_REQUIRE_SMALL(rTree.Child(firstChild).Bound()[1].Lo(), 1e-15); BOOST_REQUIRE_CLOSE(rTree.Child(firstChild).Bound()[1].Hi(), 1.0, 1e-15); BOOST_REQUIRE_CLOSE(rTree.Child(secondChild).Bound()[0].Lo(), 0.3, 1e-15); BOOST_REQUIRE_CLOSE(rTree.Child(secondChild).Bound()[0].Hi(), 1.0, 1e-15); BOOST_REQUIRE_CLOSE(rTree.Child(secondChild).Bound()[1].Lo(), 0.1, 1e-15); BOOST_REQUIRE_CLOSE(rTree.Child(secondChild).Bound()[1].Hi(), 0.9, 1e-15); BOOST_REQUIRE_EQUAL(rTree.Child(firstChild).NumChildren(), 1); BOOST_REQUIRE_SMALL( rTree.Child(firstChild).Child(0).Bound()[0].Lo(), 1e-15); BOOST_REQUIRE_CLOSE( rTree.Child(firstChild).Child(0).Bound()[0].Hi(), 0.1, 1e-15); BOOST_REQUIRE_SMALL( rTree.Child(firstChild).Child(0).Bound()[1].Lo(), 1e-15); BOOST_REQUIRE_CLOSE( rTree.Child(firstChild).Child(0).Bound()[1].Hi(), 1.0, 1e-15); BOOST_REQUIRE_EQUAL(rTree.Child(firstChild).Child(0).Count(), 3); int firstPrime = 0, secondPrime = 1; if (rTree.Child(secondChild).Child(firstPrime).Count() == 3) { firstPrime = 1; secondPrime = 0; } BOOST_REQUIRE_EQUAL(rTree.Child(secondChild).NumChildren(), 2); BOOST_REQUIRE_EQUAL( rTree.Child(secondChild).Child(firstPrime).Count(), 4); BOOST_REQUIRE_CLOSE( rTree.Child(secondChild).Child(firstPrime).Bound()[0].Lo(), 0.3, 1e-15); BOOST_REQUIRE_CLOSE( rTree.Child(secondChild).Child(firstPrime).Bound()[0].Hi(), 0.7, 1e-15); BOOST_REQUIRE_CLOSE( rTree.Child(secondChild).Child(firstPrime).Bound()[1].Lo(), 0.3, 1e-15); BOOST_REQUIRE_CLOSE( rTree.Child(secondChild).Child(firstPrime).Bound()[1].Hi(), 0.7, 1e-15); BOOST_REQUIRE_EQUAL( rTree.Child(secondChild).Child(secondPrime).Count(), 3); BOOST_REQUIRE_CLOSE( rTree.Child(secondChild).Child(secondPrime).Bound()[0].Lo(), 0.9, 1e-15); BOOST_REQUIRE_CLOSE( rTree.Child(secondChild).Child(secondPrime).Bound()[0].Hi(), 1.0, 1e-15); BOOST_REQUIRE_CLOSE( rTree.Child(secondChild).Child(secondPrime).Bound()[1].Lo(), 0.1, 1e-15); BOOST_REQUIRE_CLOSE( rTree.Child(secondChild).Child(secondPrime).Bound()[1].Hi(), 0.9, 1e-15); } // Test the tree splitting. We set MaxLeafSize and MaxNumChildren rather low // to allow us to test by hand without adding hundreds of points. BOOST_AUTO_TEST_CASE(RStarTreeSplitTest) { arma::mat data = arma::trans(arma::mat("0.0 0.0;" "0.0 1.0;" "1.0 0.1;" "1.0 0.5;" "0.7 0.3;" "0.9 0.9;" "0.5 0.6;" "0.6 0.3;" "0.1 0.5;" "0.3 0.7;")); typedef RStarTree, arma::mat> TreeType; TreeType rTree(data, 5, 2, 2, 1, 0); // There's technically no reason they have to be in a certain order, so we // use firstChild etc. to arbitrarily name them. BOOST_REQUIRE_EQUAL(rTree.NumChildren(), 2); BOOST_REQUIRE_EQUAL(rTree.NumDescendants(), 10); BOOST_REQUIRE_EQUAL(rTree.TreeDepth(), 3); int firstChild = 0, secondChild = 1; if (rTree.Child(firstChild).NumChildren() == 2) { firstChild = 1; secondChild = 0; } BOOST_REQUIRE_SMALL(rTree.Child(firstChild).Bound()[0].Lo(), 1e-15); BOOST_REQUIRE_CLOSE(rTree.Child(firstChild).Bound()[0].Hi(), 0.1, 1e-15); BOOST_REQUIRE_SMALL(rTree.Child(firstChild).Bound()[1].Lo(), 1e-15); BOOST_REQUIRE_CLOSE(rTree.Child(firstChild).Bound()[1].Hi(), 1.0, 1e-15); BOOST_REQUIRE_CLOSE(rTree.Child(secondChild).Bound()[0].Lo(), 0.3, 1e-15); BOOST_REQUIRE_CLOSE(rTree.Child(secondChild).Bound()[0].Hi(), 1.0, 1e-15); BOOST_REQUIRE_CLOSE(rTree.Child(secondChild).Bound()[1].Lo(), 0.1, 1e-15); BOOST_REQUIRE_CLOSE(rTree.Child(secondChild).Bound()[1].Hi(), 0.9, 1e-15); BOOST_REQUIRE_EQUAL(rTree.Child(firstChild).NumChildren(), 1); BOOST_REQUIRE_SMALL( rTree.Child(firstChild).Child(0).Bound()[0].Lo(), 1e-15); BOOST_REQUIRE_CLOSE( rTree.Child(firstChild).Child(0).Bound()[0].Hi(), 0.1, 1e-15); BOOST_REQUIRE_SMALL( rTree.Child(firstChild).Child(0).Bound()[1].Lo(), 1e-15); BOOST_REQUIRE_CLOSE( rTree.Child(firstChild).Child(0).Bound()[1].Hi(), 1.0, 1e-15); BOOST_REQUIRE_EQUAL(rTree.Child(firstChild).Child(0).Count(), 3); int firstPrime = 0, secondPrime = 1; if (rTree.Child(secondChild).Child(firstPrime).Count() == 3) { firstPrime = 1; secondPrime = 0; } BOOST_REQUIRE_EQUAL(rTree.Child(secondChild).NumChildren(), 2); BOOST_REQUIRE_EQUAL( rTree.Child(secondChild).Child(firstPrime).Count(), 4); BOOST_REQUIRE_CLOSE( rTree.Child(secondChild).Child(firstPrime).Bound()[0].Lo(), 0.3, 1e-15); BOOST_REQUIRE_CLOSE( rTree.Child(secondChild).Child(firstPrime).Bound()[0].Hi(), 0.7, 1e-15); BOOST_REQUIRE_CLOSE( rTree.Child(secondChild).Child(firstPrime).Bound()[1].Lo(), 0.3, 1e-15); BOOST_REQUIRE_CLOSE( rTree.Child(secondChild).Child(firstPrime).Bound()[1].Hi(), 0.7, 1e-15); BOOST_REQUIRE_EQUAL( rTree.Child(secondChild).Child(secondPrime).Count(), 3); BOOST_REQUIRE_CLOSE( rTree.Child(secondChild).Child(secondPrime).Bound()[0].Lo(), 0.9, 1e-15); BOOST_REQUIRE_CLOSE( rTree.Child(secondChild).Child(secondPrime).Bound()[0].Hi(), 1.0, 1e-15); BOOST_REQUIRE_CLOSE( rTree.Child(secondChild).Child(secondPrime).Bound()[1].Lo(), 0.1, 1e-15); BOOST_REQUIRE_CLOSE( rTree.Child(secondChild).Child(secondPrime).Bound()[1].Hi(), 0.9, 1e-15); } BOOST_AUTO_TEST_CASE(RectangleTreeMoveDatasetTest) { arma::mat dataset = arma::randu(3, 1000); typedef RTree TreeType; TreeType tree(std::move(dataset)); BOOST_REQUIRE_EQUAL(dataset.n_elem, 0); BOOST_REQUIRE_EQUAL(tree.Dataset().n_rows, 3); BOOST_REQUIRE_EQUAL(tree.Dataset().n_cols, 1000); } BOOST_AUTO_TEST_SUITE_END(); mlpack-2.2.5/src/mlpack/tests/regularized_svd_test.cpp000066400000000000000000000206201315013601400231150ustar00rootroot00000000000000/** * @file regularized_svd_test.cpp * @author Siddharth Agrawal * * Test the RegularizedSVDFunction class. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include #include #include #include "test_tools.hpp" using namespace mlpack; using namespace mlpack::svd; BOOST_AUTO_TEST_SUITE(RegularizedSVDTest); BOOST_AUTO_TEST_CASE(RegularizedSVDFunctionRandomEvaluate) { // Define useful constants. const size_t numUsers = 100; const size_t numItems = 100; const size_t numRatings = 1000; const size_t maxRating = 5; const size_t rank = 10; const size_t numTrials = 50; // Make a random rating dataset. arma::mat data = arma::randu(3, numRatings); data.row(0) = floor(data.row(0) * numUsers); data.row(1) = floor(data.row(1) * numItems); data.row(2) = floor(data.row(2) * maxRating + 0.5); // Manually set last row to maximum user and maximum item. data(0, numRatings - 1) = numUsers - 1; data(1, numRatings - 1) = numItems - 1; // Make a RegularizedSVDFunction with zero regularization. RegularizedSVDFunction rSVDFunc(data, rank, 0); for (size_t i = 0; i < numTrials; i++) { arma::mat parameters = arma::randu(rank, numUsers + numItems); // Calculate cost by summing up cost of each example. double cost = 0; for (size_t j = 0; j < numRatings; j++) { const size_t user = data(0, j); const size_t item = data(1, j) + numUsers; const double rating = data(2, j); const double ratingError = rating - arma::dot(parameters.col(user), parameters.col(item)); const double ratingErrorSquared = ratingError * ratingError; cost += ratingErrorSquared; } // Compare calculated cost and value obtained using Evaluate(). BOOST_REQUIRE_CLOSE(cost, rSVDFunc.Evaluate(parameters), 1e-5); } } BOOST_AUTO_TEST_CASE(RegularizedSVDFunctionRegularizationEvaluate) { // Define useful constants. const size_t numUsers = 100; const size_t numItems = 100; const size_t numRatings = 1000; const size_t maxRating = 5; const size_t rank = 10; const size_t numTrials = 50; // Make a random rating dataset. arma::mat data = arma::randu(3, numRatings); data.row(0) = floor(data.row(0) * numUsers); data.row(1) = floor(data.row(1) * numItems); data.row(2) = floor(data.row(2) * maxRating + 0.5); // Manually set last row to maximum user and maximum item. data(0, numRatings - 1) = numUsers - 1; data(1, numRatings - 1) = numItems - 1; // Make three RegularizedSVDFunction objects with different amounts of // regularization. RegularizedSVDFunction rSVDFuncNoReg(data, rank, 0); RegularizedSVDFunction rSVDFuncSmallReg(data, rank, 0.5); RegularizedSVDFunction rSVDFuncBigReg(data, rank, 20); for (size_t i = 0; i < numTrials; i++) { arma::mat parameters = arma::randu(rank, numUsers + numItems); // Calculate the regularization contributions of parameters corresponding to // each rating and sum them up. double smallRegTerm = 0; double bigRegTerm = 0; for (size_t j = 0; j < numRatings; j++) { const size_t user = data(0, j); const size_t item = data(1, j) + numUsers; const double userVecNorm = arma::norm(parameters.col(user), 2); const double itemVecNorm = arma::norm(parameters.col(item), 2); smallRegTerm += 0.5 * (userVecNorm * userVecNorm + itemVecNorm * itemVecNorm); bigRegTerm += 20 * (userVecNorm * userVecNorm + itemVecNorm * itemVecNorm); } // Cost with regularization should be close to the sum of cost without // regularization and the regularization terms. BOOST_REQUIRE_CLOSE(rSVDFuncNoReg.Evaluate(parameters) + smallRegTerm, rSVDFuncSmallReg.Evaluate(parameters), 1e-5); BOOST_REQUIRE_CLOSE(rSVDFuncNoReg.Evaluate(parameters) + bigRegTerm, rSVDFuncBigReg.Evaluate(parameters), 1e-5); } } BOOST_AUTO_TEST_CASE(RegularizedSVDFunctionGradient) { // Define useful constants. const size_t numUsers = 50; const size_t numItems = 50; const size_t numRatings = 100; const size_t maxRating = 5; const size_t rank = 10; // Make a random rating dataset. arma::mat data = arma::randu(3, numRatings); data.row(0) = floor(data.row(0) * numUsers); data.row(1) = floor(data.row(1) * numItems); data.row(2) = floor(data.row(2) * maxRating + 0.5); // Manually set last row to maximum user and maximum item. data(0, numRatings - 1) = numUsers - 1; data(1, numRatings - 1) = numItems - 1; arma::mat parameters = arma::randu(rank, numUsers + numItems); // Make two RegularizedSVDFunction objects, one with regularization and one // without. RegularizedSVDFunction rSVDFunc1(data, rank, 0); RegularizedSVDFunction rSVDFunc2(data, rank, 0.5); // Calculate gradients for both the objects. arma::mat gradient1, gradient2; rSVDFunc1.Gradient(parameters, gradient1); rSVDFunc2.Gradient(parameters, gradient2); // Perturbation constant. const double epsilon = 0.0001; double costPlus1, costMinus1, numGradient1; double costPlus2, costMinus2, numGradient2; for (size_t i = 0; i < rank; i++) { for (size_t j = 0; j < numUsers + numItems; j++) { // Perturb parameter with a positive constant and get costs. parameters(i, j) += epsilon; costPlus1 = rSVDFunc1.Evaluate(parameters); costPlus2 = rSVDFunc2.Evaluate(parameters); // Perturb parameter with a negative constant and get costs. parameters(i, j) -= 2 * epsilon; costMinus1 = rSVDFunc1.Evaluate(parameters); costMinus2 = rSVDFunc2.Evaluate(parameters); // Compute numerical gradients using the costs calculated above. numGradient1 = (costPlus1 - costMinus1) / (2 * epsilon); numGradient2 = (costPlus2 - costMinus2) / (2 * epsilon); // Restore the parameter value. parameters(i, j) += epsilon; // Compare numerical and backpropagation gradient values. if (std::abs(gradient1(i, j)) <= 1e-6) BOOST_REQUIRE_SMALL(numGradient1, 1e-5); else BOOST_REQUIRE_CLOSE(numGradient1, gradient1(i, j), 1e-2); if (std::abs(gradient2(i, j)) <= 1e-6) BOOST_REQUIRE_SMALL(numGradient2, 1e-5); else BOOST_REQUIRE_CLOSE(numGradient2, gradient2(i, j), 1e-2); } } } BOOST_AUTO_TEST_CASE(RegularizedSVDFunctionOptimize) { // Define useful constants. const size_t numUsers = 50; const size_t numItems = 50; const size_t numRatings = 100; const size_t iterations = 30; const size_t rank = 10; const double alpha = 0.01; const double lambda = 0.01; // Initiate random parameters. arma::mat parameters = arma::randu(rank, numUsers + numItems); // Make a random rating dataset. arma::mat data = arma::randu(3, numRatings); data.row(0) = floor(data.row(0) * numUsers); data.row(1) = floor(data.row(1) * numItems); // Manually set last row to maximum user and maximum item. data(0, numRatings - 1) = numUsers - 1; data(1, numRatings - 1) = numItems - 1; // Make rating entries based on the parameters. for (size_t i = 0; i < numRatings; i++) { data(2, i) = arma::dot(parameters.col(data(0, i)), parameters.col(numUsers + data(1, i))); } // Make the Reg SVD function and the optimizer. RegularizedSVDFunction rSVDFunc(data, rank, lambda); mlpack::optimization::SGD optimizer(rSVDFunc, alpha, iterations * numRatings); // Obtain optimized parameters after training. arma::mat optParameters = arma::randu(rank, numUsers + numItems); optimizer.Optimize(optParameters); // Get predicted ratings from optimized parameters. arma::mat predictedData(1, numRatings); for (size_t i = 0; i < numRatings; i++) { predictedData(0, i) = arma::dot(optParameters.col(data(0, i)), optParameters.col(numUsers + data(1, i))); } // Calculate relative error. const double relativeError = arma::norm(data.row(2) - predictedData, "frob") / arma::norm(data, "frob"); // Relative error should be small. BOOST_REQUIRE_SMALL(relativeError, 1e-2); } BOOST_AUTO_TEST_SUITE_END(); mlpack-2.2.5/src/mlpack/tests/sa_test.cpp000066400000000000000000000077251315013601400203420ustar00rootroot00000000000000/* * @file sa_test.cpp * @auther Zhihao Lou * * Test file for SA (simulated annealing). * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include #include #include #include #include #include #include #include #include "test_tools.hpp" using namespace std; using namespace arma; using namespace mlpack; using namespace mlpack::optimization; using namespace mlpack::optimization::test; using namespace mlpack::metric; BOOST_AUTO_TEST_SUITE(SATest); BOOST_AUTO_TEST_CASE(GeneralizedRosenbrockTest) { size_t dim = 50; GeneralizedRosenbrockFunction f(dim); double iteration = 0; double result = DBL_MAX; arma::mat coordinates; while (result > 1e-6) { ExponentialSchedule schedule(1e-5); SA sa(f, schedule, 10000000, 1000., 1000, 100, 1e-10, 3, 20, 0.3, 0.3); coordinates = f.GetInitialPoint(); result = sa.Optimize(coordinates); ++iteration; BOOST_REQUIRE_LT(iteration, 4); // No more than three tries. } // 0.1% tolerance for each coordinate. BOOST_REQUIRE_SMALL(result, 1e-6); for (size_t j = 0; j < dim; ++j) BOOST_REQUIRE_CLOSE(coordinates[j], (double) 1.0, 0.1); } // The Rosenbrock function is a simple function to optimize. BOOST_AUTO_TEST_CASE(RosenbrockTest) { RosenbrockFunction f; ExponentialSchedule schedule(1e-5); SA //sa(f, schedule); // All default parameters. sa(f, schedule, 10000000, 1000., 1000, 100, 1e-11, 3, 20, 0.3, 0.3); arma::mat coordinates = f.GetInitialPoint(); const double result = sa.Optimize(coordinates); BOOST_REQUIRE_SMALL(result, 1e-6); BOOST_REQUIRE_CLOSE(coordinates[0], 1.0, 1e-3); BOOST_REQUIRE_CLOSE(coordinates[1], 1.0, 1e-3); } /** * The Rastrigrin function, a (not very) simple nonconvex function. It is * defined by * * f(x) = 10n + \sum_{i = 1}^{n} (x_i^2 - 10 cos(2 \pi x_i)). * * It has very many local minima, so finding the true global minimum is * difficult. The function is two-dimensional, and has minimum 0 where * x = [0 0]. We are only using it for simulated annealing, so there is no need * to implement the gradient. */ class RastrigrinFunction { public: double Evaluate(const arma::mat& coordinates) const { double objective = 20; // 10 * n, n = 2. objective += std::pow(coordinates[0], 2.0) - 10 * std::cos(2 * M_PI * coordinates[0]); objective += std::pow(coordinates[1], 2.0) - 10 * std::cos(2 * M_PI * coordinates[1]); return objective; } arma::mat GetInitialPoint() const { return arma::mat("-3 -3"); } }; BOOST_AUTO_TEST_CASE(RastrigrinFunctionTest) { // Simulated annealing isn't guaranteed to converge (except in very specific // situations). If this works 1 of 8 times, I'm fine with that. All I want // to know is that this implementation will escape from local minima. size_t successes = 0; for (size_t trial = 0; trial < 8; ++trial) { RastrigrinFunction f; ExponentialSchedule schedule(3e-6); SA //sa(f, schedule); sa(f, schedule, 20000000, 100, 50, 1000, 1e-12, 2, 0.2, 0.01, 0.1); arma::mat coordinates = f.GetInitialPoint(); const double result = sa.Optimize(coordinates); if ((std::abs(result) < 1e-3) && (std::abs(coordinates[0]) < 1e-3) && (std::abs(coordinates[1]) < 1e-3)) { ++successes; break; // No need to continue. } } BOOST_REQUIRE_GE(successes, 1); } BOOST_AUTO_TEST_SUITE_END(); mlpack-2.2.5/src/mlpack/tests/sdp_primal_dual_test.cpp000066400000000000000000000420671315013601400230740ustar00rootroot00000000000000/** * @file sdp_primal_dual_test.cpp * @author Stephen Tu * * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include #include #include #include #include #include "test_tools.hpp" using namespace mlpack; using namespace mlpack::optimization; using namespace mlpack::distribution; using namespace mlpack::neighbor; class UndirectedGraph { public: UndirectedGraph() {} size_t NumVertices() const { return numVertices; } size_t NumEdges() const { return edges.n_cols; } const arma::umat& Edges() const { return edges; } const arma::vec& Weights() const { return weights; } void Laplacian(arma::sp_mat& laplacian) const { laplacian.zeros(numVertices, numVertices); for (size_t i = 0; i < edges.n_cols; ++i) { laplacian(edges(0, i), edges(1, i)) = -weights(i); laplacian(edges(1, i), edges(0, i)) = -weights(i); } for (size_t i = 0; i < numVertices; ++i) { laplacian(i, i) = -arma::accu(laplacian.row(i)); } } static void LoadFromEdges(UndirectedGraph& g, const std::string& edgesFilename, bool transposeEdges) { data::Load(edgesFilename, g.edges, true, transposeEdges); if (g.edges.n_rows != 2) Log::Fatal << "Invalid datafile" << std::endl; g.weights.ones(g.edges.n_cols); g.ComputeVertices(); } static void LoadFromEdgesAndWeights(UndirectedGraph& g, const std::string& edgesFilename, bool transposeEdges, const std::string& weightsFilename, bool transposeWeights) { data::Load(edgesFilename, g.edges, true, transposeEdges); if (g.edges.n_rows != 2) Log::Fatal << "Invalid datafile" << std::endl; data::Load(weightsFilename, g.weights, true, transposeWeights); if (g.weights.n_elem != g.edges.n_cols) Log::Fatal << "Size mismatch" << std::endl; g.ComputeVertices(); } static void ErdosRenyiRandomGraph(UndirectedGraph& g, size_t numVertices, double edgeProbability, bool weighted, bool selfLoops = false) { if (edgeProbability < 0. || edgeProbability > 1.) Log::Fatal << "edgeProbability not in [0, 1]" << std::endl; std::vector> edges; std::vector weights; for (size_t i = 0; i < numVertices; i ++) { for (size_t j = (selfLoops ? i : i + 1); j < numVertices; j++) { if (math::Random() > edgeProbability) continue; edges.emplace_back(i, j); weights.push_back(weighted ? math::Random() : 1.); } } g.edges.set_size(2, edges.size()); for (size_t i = 0; i < edges.size(); i++) { g.edges(0, i) = edges[i].first; g.edges(1, i) = edges[i].second; } g.weights = arma::vec(weights); g.numVertices = numVertices; } private: void ComputeVertices() { numVertices = max(max(edges)) + 1; } arma::umat edges; arma::vec weights; size_t numVertices; }; static inline SDP ConstructMaxCutSDPFromGraph(const UndirectedGraph& g) { SDP sdp(g.NumVertices(), g.NumVertices(), 0); g.Laplacian(sdp.C()); sdp.C() *= -1; for (size_t i = 0; i < g.NumVertices(); i++) { sdp.SparseA()[i].zeros(g.NumVertices(), g.NumVertices()); sdp.SparseA()[i](i, i) = 1.; } sdp.SparseB().ones(); return sdp; } static inline SDP ConstructLovaszThetaSDPFromGraph(const UndirectedGraph& g) { SDP sdp(g.NumVertices(), g.NumEdges() + 1, 0); sdp.C().ones(); sdp.C() *= -1.; sdp.SparseA()[0].eye(g.NumVertices(), g.NumVertices()); for (size_t i = 0; i < g.NumEdges(); i++) { sdp.SparseA()[i + 1].zeros(g.NumVertices(), g.NumVertices()); sdp.SparseA()[i + 1](g.Edges()(0, i), g.Edges()(1, i)) = 1.; sdp.SparseA()[i + 1](g.Edges()(1, i), g.Edges()(0, i)) = 1.; } sdp.SparseB().zeros(); sdp.SparseB()[0] = 1.; return sdp; } static inline SDP ConstructMaxCutSDPFromLaplacian(const std::string& laplacianFilename) { arma::mat laplacian; data::Load(laplacianFilename, laplacian, true, false); if (laplacian.n_rows != laplacian.n_cols) Log::Fatal << "laplacian not square" << std::endl; SDP sdp(laplacian.n_rows, laplacian.n_rows, 0); sdp.C() = -arma::sp_mat(laplacian); for (size_t i = 0; i < laplacian.n_rows; i++) { sdp.SparseA()[i].zeros(laplacian.n_rows, laplacian.n_rows); sdp.SparseA()[i](i, i) = 1.; } sdp.SparseB().ones(); return sdp; } static void CheckPositiveSemiDefinite(const arma::mat& X) { const auto evals = arma::eig_sym(X); BOOST_REQUIRE_GE(evals(0), 1e-20); } template static void CheckKKT(const SDPType& sdp, const arma::mat& X, const arma::vec& ysparse, const arma::vec& ydense, const arma::mat& Z) { // require that the KKT optimality conditions for sdp are satisfied // by the primal-dual pair (X, y, Z) CheckPositiveSemiDefinite(X); CheckPositiveSemiDefinite(Z); const double normXz = arma::norm(X * Z, "fro"); BOOST_REQUIRE_SMALL(normXz, 1e-5); for (size_t i = 0; i < sdp.NumSparseConstraints(); i++) { BOOST_REQUIRE_SMALL( fabs(arma::dot(sdp.SparseA()[i], X) - sdp.SparseB()[i]), 1e-5); } for (size_t i = 0; i < sdp.NumDenseConstraints(); i++) { BOOST_REQUIRE_SMALL( fabs(arma::dot(sdp.DenseA()[i], X) - sdp.DenseB()[i]), 1e-5); } arma::mat dualCheck = Z - sdp.C(); for (size_t i = 0; i < sdp.NumSparseConstraints(); i++) dualCheck += ysparse(i) * sdp.SparseA()[i]; for (size_t i = 0; i < sdp.NumDenseConstraints(); i++) dualCheck += ydense(i) * sdp.DenseA()[i]; const double dualInfeas = arma::norm(dualCheck, "fro"); BOOST_REQUIRE_SMALL(dualInfeas, 1e-5); } BOOST_AUTO_TEST_SUITE(SdpPrimalDualTest); static void SolveMaxCutFeasibleSDP(const SDP& sdp) { arma::mat X0, Z0; arma::vec ysparse0, ydense0; ydense0.set_size(0); // strictly feasible starting point X0.eye(sdp.N(), sdp.N()); ysparse0 = -1.1 * arma::vec(arma::sum(arma::abs(sdp.C()), 0).t()); Z0 = -arma::diagmat(ysparse0) + sdp.C(); PrimalDualSolver> solver(sdp, X0, ysparse0, ydense0, Z0); arma::mat X, Z; arma::vec ysparse, ydense; solver.Optimize(X, ysparse, ydense, Z); CheckKKT(sdp, X, ysparse, ydense, Z); } static void SolveMaxCutPositiveSDP(const SDP& sdp) { arma::mat X0, Z0; arma::vec ysparse0, ydense0; ydense0.set_size(0); // infeasible, but positive starting point X0 = arma::eye(sdp.N(), sdp.N()); ysparse0 = arma::randu(sdp.NumSparseConstraints()); Z0.eye(sdp.N(), sdp.N()); PrimalDualSolver> solver(sdp, X0, ysparse0, ydense0, Z0); arma::mat X, Z; arma::vec ysparse, ydense; solver.Optimize(X, ysparse, ydense, Z); CheckKKT(sdp, X, ysparse, ydense, Z); } BOOST_AUTO_TEST_CASE(SmallMaxCutSdp) { auto sdp = ConstructMaxCutSDPFromLaplacian("r10.txt"); SolveMaxCutFeasibleSDP(sdp); SolveMaxCutPositiveSDP(sdp); UndirectedGraph g; UndirectedGraph::ErdosRenyiRandomGraph(g, 10, 0.3, true); sdp = ConstructMaxCutSDPFromGraph(g); // the following was resulting in non-positive Z0 matrices on some // random instances. //SolveMaxCutFeasibleSDP(sdp); SolveMaxCutPositiveSDP(sdp); } BOOST_AUTO_TEST_CASE(SmallLovaszThetaSdp) { UndirectedGraph g; UndirectedGraph::LoadFromEdges(g, "johnson8-4-4.csv", true); auto sdp = ConstructLovaszThetaSDPFromGraph(g); PrimalDualSolver> solver(sdp); arma::mat X, Z; arma::vec ysparse, ydense; solver.Optimize(X, ysparse, ydense, Z); CheckKKT(sdp, X, ysparse, ydense, Z); } static inline arma::sp_mat RepeatBlockDiag(const arma::sp_mat& block, size_t repeat) { assert(block.n_rows == block.n_cols); arma::sp_mat ret(block.n_rows * repeat, block.n_rows * repeat); ret.zeros(); for (size_t i = 0; i < repeat; i++) ret(arma::span(i * block.n_rows, (i + 1) * block.n_rows - 1), arma::span(i * block.n_rows, (i + 1) * block.n_rows - 1)) = block; return ret; } static inline arma::sp_mat BlockDiag(const std::vector& blocks) { // assumes all blocks are the same size const size_t n = blocks.front().n_rows; assert(blocks.front().n_cols == n); arma::sp_mat ret(n * blocks.size(), n * blocks.size()); ret.zeros(); for (size_t i = 0; i < blocks.size(); i++) ret(arma::span(i * n, (i + 1) * n - 1), arma::span(i * n, (i + 1) * n - 1)) = blocks[i]; return ret; } static inline SDP ConstructLogChebychevApproxSdp(const arma::mat& A, const arma::vec& b) { if (A.n_rows != b.n_elem) Log::Fatal << "A.n_rows != len(b)" << std::endl; const size_t p = A.n_rows; const size_t k = A.n_cols; // [0, 0, 0] // [0, 0, 1] // [0, 1, 0] arma::sp_mat cblock(3, 3); cblock(1, 2) = cblock(2, 1) = 1.; const arma::sp_mat C = RepeatBlockDiag(cblock, p); SDP sdp(C.n_rows, k + 1, 0); sdp.C() = C; sdp.SparseB().zeros(); sdp.SparseB()[0] = -1; // [1, 0, 0] // [0, 0, 0] // [0, 0, 1] arma::sp_mat a0block(3, 3); a0block(0, 0) = a0block(2, 2) = 1.; sdp.SparseA()[0] = RepeatBlockDiag(a0block, p); sdp.SparseA()[0] *= -1.; for (size_t i = 0; i < k; i++) { std::vector blocks; for (size_t j = 0; j < p; j++) { arma::sp_mat block(3, 3); const double f = A(j, i) / b(j); // [ -a_j(i)/b_j 0 0 ] // [ 0 a_j(i)/b_j 0 ] // [ 0 0 0 ] block(0, 0) = -f; block(1, 1) = f; blocks.emplace_back(block); } sdp.SparseA()[i + 1] = BlockDiag(blocks); sdp.SparseA()[i + 1] *= -1; } return sdp; } static inline arma::mat RandomOrthogonalMatrix(size_t rows, size_t cols) { arma::mat Q, R; if (!arma::qr(Q, R, arma::randu(rows, cols))) Log::Fatal << "could not compute QR decomposition" << std::endl; return Q; } static inline arma::mat RandomFullRowRankMatrix(size_t rows, size_t cols) { const arma::mat U = RandomOrthogonalMatrix(rows, rows); const arma::mat V = RandomOrthogonalMatrix(cols, cols); arma::mat S; S.zeros(rows, cols); for (size_t i = 0; i < std::min(rows, cols); i++) { S(i, i) = math::Random() + 1e-3; } return U * S * V; } /** * See the examples section, Eq. 9, of * * Semidefinite Programming. * Lieven Vandenberghe and Stephen Boyd. * SIAM Review. 1996. * * The logarithmic Chebychev approximation to Ax = b, A is p x k and b is * length p is given by the SDP: * * min t * s.t. * [ t - dot(a_i, x) 0 0 ] * [ 0 dot(a_i, x) / b_i 1 ] >= 0, i=1,...,p * [ 0 1 t ] * */ BOOST_AUTO_TEST_CASE(LogChebychevApproxSdp) { const size_t p0 = 5; const size_t k0 = 10; const arma::mat A0 = RandomFullRowRankMatrix(p0, k0); const arma::vec b0 = arma::randu(p0); const auto sdp0 = ConstructLogChebychevApproxSdp(A0, b0); PrimalDualSolver> solver0(sdp0); arma::mat X0, Z0; arma::vec ysparse0, ydense0; solver0.Optimize(X0, ysparse0, ydense0, Z0); CheckKKT(sdp0, X0, ysparse0, ydense0, Z0); const size_t p1 = 10; const size_t k1 = 5; const arma::mat A1 = RandomFullRowRankMatrix(p1, k1); const arma::vec b1 = arma::randu(p1); const auto sdp1 = ConstructLogChebychevApproxSdp(A1, b1); PrimalDualSolver> solver1(sdp1); arma::mat X1, Z1; arma::vec ysparse1, ydense1; solver1.Optimize(X1, ysparse1, ydense1, Z1); CheckKKT(sdp1, X1, ysparse1, ydense1, Z1); } /** * Example 1 on the SDP wiki * * min x_13 * s.t. * -0.2 <= x_12 <= -0.1 * 0.4 <= x_23 <= 0.5 * x_11 = x_22 = x_33 = 1 * X >= 0 * */ BOOST_AUTO_TEST_CASE(CorrelationCoeffToySdp) { // The semi-definite constraint looks like: // // [ 1 x_12 x_13 0 0 0 0 ] // [ 1 x_23 0 0 0 0 ] // [ 1 0 0 0 0 ] // [ s1 0 0 0 ] >= 0 // [ s2 0 0 ] // [ s3 0 ] // [ s4 ] // x_11 == 0 arma::sp_mat A0(7, 7); A0.zeros(); A0(0, 0) = 1.; // x_22 == 0 arma::sp_mat A1(7, 7); A1.zeros(); A1(1, 1) = 1.; // x_33 == 0 arma::sp_mat A2(7, 7); A2.zeros(); A2(2, 2) = 1.; // x_12 <= -0.1 <==> x_12 + s1 == -0.1, s1 >= 0 arma::sp_mat A3(7, 7); A3.zeros(); A3(1, 0) = A3(0, 1) = 1.; A3(3, 3) = 2.; // -0.2 <= x_12 <==> x_12 - s2 == -0.2, s2 >= 0 arma::sp_mat A4(7, 7); A4.zeros(); A4(1, 0) = A4(0, 1) = 1.; A4(4, 4) = -2.; // x_23 <= 0.5 <==> x_23 + s3 == 0.5, s3 >= 0 arma::sp_mat A5(7, 7); A5.zeros(); A5(2, 1) = A5(1, 2) = 1.; A5(5, 5) = 2.; // 0.4 <= x_23 <==> x_23 - s4 == 0.4, s4 >= 0 arma::sp_mat A6(7, 7); A6.zeros(); A6(2, 1) = A6(1, 2) = 1.; A6(6, 6) = -2.; std::vector ais({A0, A1, A2, A3, A4, A5, A6}); SDP sdp(7, 7 + 4 + 4 + 4 + 3 + 2 + 1, 0); for (size_t j = 0; j < 3; j++) { // x_j4 == x_j5 == x_j6 == x_j7 == 0 for (size_t i = 0; i < 4; i++) { arma::sp_mat A(7, 7); A.zeros(); A(i + 3, j) = A(j, i + 3) = 1; ais.emplace_back(A); } } // x_45 == x_46 == x_47 == 0 for (size_t i = 0; i < 3; i++) { arma::sp_mat A(7, 7); A.zeros(); A(i + 4, 3) = A(3, i + 4) = 1; ais.emplace_back(A); } // x_56 == x_57 == 0 for (size_t i = 0; i < 2; i++) { arma::sp_mat A(7, 7); A.zeros(); A(i + 5, 4) = A(4, i + 5) = 1; ais.emplace_back(A); } // x_67 == 0 arma::sp_mat A(7, 7); A.zeros(); A(6, 5) = A(5, 6) = 1; ais.emplace_back(A); std::swap(sdp.SparseA(), ais); sdp.SparseB().zeros(); sdp.SparseB()[0] = sdp.SparseB()[1] = sdp.SparseB()[2] = 1.; sdp.SparseB()[3] = -0.2; sdp.SparseB()[4] = -0.4; sdp.SparseB()[5] = 1.; sdp.SparseB()[6] = 0.8; sdp.C().zeros(); sdp.C()(0, 2) = sdp.C()(2, 0) = 1.; PrimalDualSolver> solver(sdp); arma::mat X, Z; arma::vec ysparse, ydense; const double obj = solver.Optimize(X, ysparse, ydense, Z); CheckKKT(sdp, X, ysparse, ydense, Z); BOOST_REQUIRE_CLOSE(obj, 2 * (-0.978), 1e-3); } ///** // * Maximum variance unfolding (MVU) SDP to learn the unrolled gram matrix. For // * the SDP formulation, see: // * // * Unsupervised learning of image manifolds by semidefinite programming. // * Kilian Weinberger and Lawrence Saul. CVPR 04. // * http://repository.upenn.edu/cgi/viewcontent.cgi?article=1000&context=cis_papers // * // * @param origData origDim x numPoints // * @param numNeighbors // */ //static inline SDP ConstructMvuSDP(const arma::mat& origData, // size_t numNeighbors) //{ // const size_t numPoints = origData.n_cols; // // assert(numNeighbors <= numPoints); // // arma::Mat neighbors; // arma::mat distances; // KNN knn(origData); // knn.Search(numNeighbors, neighbors, distances); // // SDP sdp(numPoints, numNeighbors * numPoints, 1); // sdp.C().eye(numPoints, numPoints); // sdp.C() *= -1; // sdp.DenseA()[0].ones(numPoints, numPoints); // sdp.DenseB()[0] = 0; // // for (size_t i = 0; i < neighbors.n_cols; ++i) // { // for (size_t j = 0; j < numNeighbors; ++j) // { // // This is the index of the constraint. // const size_t index = (i * numNeighbors) + j; // // arma::sp_mat& aRef = sdp.SparseA()[index]; // aRef.zeros(numPoints, numPoints); // // // A_ij(i, i) = 1. // aRef(i, i) = 1; // // // A_ij(i, j) = -1. // aRef(i, neighbors(j, i)) = -1; // // // A_ij(j, i) = -1. // aRef(neighbors(j, i), i) = -1; // // // A_ij(j, j) = 1. // aRef(neighbors(j, i), neighbors(j, i)) = 1; // // // The constraint b_ij is the distance between these two points. // sdp.SparseB()[index] = distances(j, i); // } // } // // return sdp; //} // ///** // * Maximum variance unfolding // * // * Test doesn't work, because the constraint matrices are not linearly // * independent. // */ //BOOST_AUTO_TEST_CASE(SmallMvuSdp) //{ // const size_t n = 20; // // arma::mat origData(3, n); // // // sample n random points on 3-dim unit sphere // GaussianDistribution gauss(3); // for (size_t i = 0; i < n; i++) // { // // how european of them // origData.col(i) = arma::normalise(gauss.Random()); // } // // auto sdp = ConstructMvuSDP(origData, 5); // // PrimalDualSolver> solver(sdp); // arma::mat X, Z; // arma::vec ysparse, ydense; // const auto p = solver.Optimize(X, ysparse, ydense, Z); // BOOST_REQUIRE(p.first); //} BOOST_AUTO_TEST_SUITE_END(); mlpack-2.2.5/src/mlpack/tests/serialization.cpp000066400000000000000000000046521315013601400215510ustar00rootroot00000000000000/** * @file serialization.cpp * @author Ryan Curtin * * Miscellaneous utility functions for serialization tests. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include "serialization.hpp" namespace mlpack { // Utility function to check the equality of two Armadillo matrices. void CheckMatrices(const arma::mat& x, const arma::mat& xmlX, const arma::mat& textX, const arma::mat& binaryX) { // First check dimensions. BOOST_REQUIRE_EQUAL(x.n_rows, xmlX.n_rows); BOOST_REQUIRE_EQUAL(x.n_rows, textX.n_rows); BOOST_REQUIRE_EQUAL(x.n_rows, binaryX.n_rows); BOOST_REQUIRE_EQUAL(x.n_cols, xmlX.n_cols); BOOST_REQUIRE_EQUAL(x.n_cols, textX.n_cols); BOOST_REQUIRE_EQUAL(x.n_cols, binaryX.n_cols); BOOST_REQUIRE_EQUAL(x.n_elem, xmlX.n_elem); BOOST_REQUIRE_EQUAL(x.n_elem, textX.n_elem); BOOST_REQUIRE_EQUAL(x.n_elem, binaryX.n_elem); // Now check elements. for (size_t i = 0; i < x.n_elem; ++i) { const double val = x[i]; if (val == 0.0) { BOOST_REQUIRE_SMALL(xmlX[i], 1e-6); BOOST_REQUIRE_SMALL(textX[i], 1e-6); BOOST_REQUIRE_SMALL(binaryX[i], 1e-6); } else { BOOST_REQUIRE_CLOSE(val, xmlX[i], 1e-6); BOOST_REQUIRE_CLOSE(val, textX[i], 1e-6); BOOST_REQUIRE_CLOSE(val, binaryX[i], 1e-6); } } } void CheckMatrices(const arma::Mat& x, const arma::Mat& xmlX, const arma::Mat& textX, const arma::Mat& binaryX) { // First check dimensions. BOOST_REQUIRE_EQUAL(x.n_rows, xmlX.n_rows); BOOST_REQUIRE_EQUAL(x.n_rows, textX.n_rows); BOOST_REQUIRE_EQUAL(x.n_rows, binaryX.n_rows); BOOST_REQUIRE_EQUAL(x.n_cols, xmlX.n_cols); BOOST_REQUIRE_EQUAL(x.n_cols, textX.n_cols); BOOST_REQUIRE_EQUAL(x.n_cols, binaryX.n_cols); BOOST_REQUIRE_EQUAL(x.n_elem, xmlX.n_elem); BOOST_REQUIRE_EQUAL(x.n_elem, textX.n_elem); BOOST_REQUIRE_EQUAL(x.n_elem, binaryX.n_elem); // Now check elements. for (size_t i = 0; i < x.n_elem; ++i) { BOOST_REQUIRE_EQUAL(x[i], xmlX[i]); BOOST_REQUIRE_EQUAL(x[i], textX[i]); BOOST_REQUIRE_EQUAL(x[i], binaryX[i]); } } } // namespace mlpack mlpack-2.2.5/src/mlpack/tests/serialization.hpp000066400000000000000000000161451315013601400215560ustar00rootroot00000000000000/** * @file serialization.hpp * @author Ryan Curtin * * Miscellaneous utility functions for serialization tests. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_TESTS_SERIALIZATION_HPP #define MLPACK_TESTS_SERIALIZATION_HPP #include #include #include #include #include #include #include #include #include #include "test_tools.hpp" namespace mlpack { // Test function for loading and saving Armadillo objects. template void TestArmadilloSerialization(arma::Cube& x) { // First save it. std::ofstream ofs("test", std::ios::binary); OArchiveType o(ofs); bool success = true; try { o << BOOST_SERIALIZATION_NVP(x); } catch (boost::archive::archive_exception& e) { success = false; } BOOST_REQUIRE_EQUAL(success, true); ofs.close(); // Now load it. arma::Cube orig(x); success = true; std::ifstream ifs("test", std::ios::binary); IArchiveType i(ifs); try { i >> BOOST_SERIALIZATION_NVP(x); } catch (boost::archive::archive_exception& e) { success = false; } BOOST_REQUIRE_EQUAL(success, true); BOOST_REQUIRE_EQUAL(x.n_rows, orig.n_rows); BOOST_REQUIRE_EQUAL(x.n_cols, orig.n_cols); BOOST_REQUIRE_EQUAL(x.n_elem_slice, orig.n_elem_slice); BOOST_REQUIRE_EQUAL(x.n_slices, orig.n_slices); BOOST_REQUIRE_EQUAL(x.n_elem, orig.n_elem); for(size_t slice = 0; slice != x.n_slices; ++slice){ auto const &orig_slice = orig.slice(slice); auto const &x_slice = x.slice(slice); for (size_t i = 0; i < x.n_cols; ++i){ for (size_t j = 0; j < x.n_rows; ++j){ if (double(orig_slice(j, i)) == 0.0) BOOST_REQUIRE_SMALL(double(x_slice(j, i)), 1e-8); else BOOST_REQUIRE_CLOSE(double(orig_slice(j, i)), double(x_slice(j, i)), 1e-8); } } } remove("test"); } // Test all serialization strategies. template void TestAllArmadilloSerialization(arma::Cube& x) { TestArmadilloSerialization(x); TestArmadilloSerialization(x); TestArmadilloSerialization(x); } // Test function for loading and saving Armadillo objects. template void TestArmadilloSerialization(MatType& x) { // First save it. std::ofstream ofs("test", std::ios::binary); OArchiveType o(ofs); bool success = true; try { o << BOOST_SERIALIZATION_NVP(x); } catch (boost::archive::archive_exception& e) { success = false; } BOOST_REQUIRE_EQUAL(success, true); ofs.close(); // Now load it. MatType orig(x); success = true; std::ifstream ifs("test", std::ios::binary); IArchiveType i(ifs); try { i >> BOOST_SERIALIZATION_NVP(x); } catch (boost::archive::archive_exception& e) { success = false; } BOOST_REQUIRE_EQUAL(success, true); BOOST_REQUIRE_EQUAL(x.n_rows, orig.n_rows); BOOST_REQUIRE_EQUAL(x.n_cols, orig.n_cols); BOOST_REQUIRE_EQUAL(x.n_elem, orig.n_elem); for (size_t i = 0; i < x.n_cols; ++i) for (size_t j = 0; j < x.n_rows; ++j) if (double(orig(j, i)) == 0.0) BOOST_REQUIRE_SMALL(double(x(j, i)), 1e-8); else BOOST_REQUIRE_CLOSE(double(orig(j, i)), double(x(j, i)), 1e-8); remove("test"); } // Test all serialization strategies. template void TestAllArmadilloSerialization(MatType& x) { TestArmadilloSerialization(x); TestArmadilloSerialization(x); TestArmadilloSerialization(x); } // Save and load an mlpack object. // The re-loaded copy is placed in 'newT'. template void SerializeObject(T& t, T& newT) { std::ofstream ofs("test", std::ios::binary); OArchiveType o(ofs); bool success = true; try { o << data::CreateNVP(t, "t"); } catch (boost::archive::archive_exception& e) { success = false; } ofs.close(); BOOST_REQUIRE_EQUAL(success, true); std::ifstream ifs("test", std::ios::binary); IArchiveType i(ifs); try { i >> data::CreateNVP(newT, "t"); } catch (boost::archive::archive_exception& e) { success = false; } ifs.close(); BOOST_REQUIRE_EQUAL(success, true); } // Test mlpack serialization with all three archive types. template void SerializeObjectAll(T& t, T& xmlT, T& textT, T& binaryT) { SerializeObject(t, textT); SerializeObject(t, binaryT); SerializeObject(t, xmlT); } // Save and load a non-default-constructible mlpack object. template void SerializePointerObject(T* t, T*& newT) { std::ofstream ofs("test", std::ios::binary); OArchiveType o(ofs); bool success = true; try { o << data::CreateNVP(*t, "t"); } catch (boost::archive::archive_exception& e) { success = false; } ofs.close(); BOOST_REQUIRE_EQUAL(success, true); std::ifstream ifs("test", std::ios::binary); IArchiveType i(ifs); try { newT = new T(i); } catch (std::exception& e) { success = false; } ifs.close(); BOOST_REQUIRE_EQUAL(success, true); } template void SerializePointerObjectAll(T* t, T*& xmlT, T*& textT, T*& binaryT) { SerializePointerObject(t, textT); SerializePointerObject(t, binaryT); SerializePointerObject(t, xmlT); } // Utility function to check the equality of two Armadillo matrices. void CheckMatrices(const arma::mat& x, const arma::mat& xmlX, const arma::mat& textX, const arma::mat& binaryX); void CheckMatrices(const arma::Mat& x, const arma::Mat& xmlX, const arma::Mat& textX, const arma::Mat& binaryX); } // namespace mlpack #endif mlpack-2.2.5/src/mlpack/tests/serialization_test.cpp000066400000000000000000001550641315013601400226140ustar00rootroot00000000000000/** * @file serialization_test.cpp * @author Ryan Curtin * * Test serialization of mlpack objects. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include #include #include "test_tools.hpp" #include "serialization.hpp" #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include using namespace mlpack; using namespace mlpack::distribution; using namespace mlpack::regression; using namespace mlpack::bound; using namespace mlpack::metric; using namespace mlpack::tree; using namespace mlpack::perceptron; using namespace mlpack::regression; using namespace mlpack::naive_bayes; using namespace mlpack::neighbor; using namespace mlpack::decision_stump; using namespace arma; using namespace boost; using namespace boost::archive; using namespace boost::serialization; using namespace std; BOOST_AUTO_TEST_SUITE(SerializationTest); /** * Serialize a random cube. */ BOOST_AUTO_TEST_CASE(CubeSerializeTest) { arma::cube m; m.randu(2, 50, 50); TestAllArmadilloSerialization(m); } /** * Serialize an empty cube. */ BOOST_AUTO_TEST_CASE(EmptyCubeSerializeTest) { arma::cube c; TestAllArmadilloSerialization(c); } /** * Can we load and save an Armadillo matrix? */ BOOST_AUTO_TEST_CASE(MatrixSerializeXMLTest) { arma::mat m; m.randu(50, 50); TestAllArmadilloSerialization(m); } /** * How about columns? */ BOOST_AUTO_TEST_CASE(ColSerializeTest) { arma::vec m; m.randu(50, 1); TestAllArmadilloSerialization(m); } /** * How about rows? */ BOOST_AUTO_TEST_CASE(RowSerializeTest) { arma::rowvec m; m.randu(1, 50); TestAllArmadilloSerialization(m); } // A quick test with an empty matrix. BOOST_AUTO_TEST_CASE(EmptyMatrixSerializeTest) { arma::mat m; TestAllArmadilloSerialization(m); } /** * Can we load and save a sparse Armadillo matrix? */ BOOST_AUTO_TEST_CASE(SparseMatrixSerializeTest) { arma::sp_mat m; m.sprandu(50, 50, 0.3); TestAllArmadilloSerialization(m); } /** * How about columns? */ BOOST_AUTO_TEST_CASE(SparseColSerializeTest) { arma::sp_vec m; m.sprandu(50, 1, 0.3); TestAllArmadilloSerialization(m); } /** * How about rows? */ BOOST_AUTO_TEST_CASE(SparseRowSerializeTest) { arma::sp_rowvec m; m.sprandu(1, 50, 0.3); TestAllArmadilloSerialization(m); } // A quick test with an empty matrix. BOOST_AUTO_TEST_CASE(EmptySparseMatrixSerializeTest) { arma::sp_mat m; TestAllArmadilloSerialization(m); } // Now, test mlpack objects. BOOST_AUTO_TEST_CASE(DiscreteDistributionTest) { // I assume that I am properly saving vectors, so, this should be // straightforward. vec prob; prob.randu(12); std::vector prob_vector = std::vector(1, prob); DiscreteDistribution t(prob_vector); DiscreteDistribution xmlT, textT, binaryT; // Load and save with all serializers. SerializeObjectAll(t, xmlT, textT, binaryT); for (size_t i = 0; i < 12; ++i) { vec obs(1); obs[0] = i; const double prob = t.Probability(obs); if (prob == 0.0) { BOOST_REQUIRE_SMALL(xmlT.Probability(obs), 1e-8); BOOST_REQUIRE_SMALL(textT.Probability(obs), 1e-8); BOOST_REQUIRE_SMALL(binaryT.Probability(obs), 1e-8); } else { BOOST_REQUIRE_CLOSE(prob, xmlT.Probability(obs), 1e-8); BOOST_REQUIRE_CLOSE(prob, textT.Probability(obs), 1e-8); BOOST_REQUIRE_CLOSE(prob, binaryT.Probability(obs), 1e-8); } } } BOOST_AUTO_TEST_CASE(GaussianDistributionTest) { vec mean(10); mean.randu(); // Generate a covariance matrix. mat cov; cov.randu(10, 10); cov = (cov * cov.t()); GaussianDistribution g(mean, cov); GaussianDistribution xmlG, textG, binaryG; SerializeObjectAll(g, xmlG, textG, binaryG); BOOST_REQUIRE_EQUAL(g.Dimensionality(), xmlG.Dimensionality()); BOOST_REQUIRE_EQUAL(g.Dimensionality(), textG.Dimensionality()); BOOST_REQUIRE_EQUAL(g.Dimensionality(), binaryG.Dimensionality()); // First, check the means. CheckMatrices(g.Mean(), xmlG.Mean(), textG.Mean(), binaryG.Mean()); // Now, check the covariance. CheckMatrices(g.Covariance(), xmlG.Covariance(), textG.Covariance(), binaryG.Covariance()); // Lastly, run some observations through and make sure the probability is the // same. This should test anything cached internally. arma::mat randomObs; randomObs.randu(10, 500); for (size_t i = 0; i < 500; ++i) { const double prob = g.Probability(randomObs.unsafe_col(i)); if (prob == 0.0) { BOOST_REQUIRE_SMALL(xmlG.Probability(randomObs.unsafe_col(i)), 1e-8); BOOST_REQUIRE_SMALL(textG.Probability(randomObs.unsafe_col(i)), 1e-8); BOOST_REQUIRE_SMALL(binaryG.Probability(randomObs.unsafe_col(i)), 1e-8); } else { BOOST_REQUIRE_CLOSE(prob, xmlG.Probability(randomObs.unsafe_col(i)), 1e-8); BOOST_REQUIRE_CLOSE(prob, textG.Probability(randomObs.unsafe_col(i)), 1e-8); BOOST_REQUIRE_CLOSE(prob, binaryG.Probability(randomObs.unsafe_col(i)), 1e-8); } } } BOOST_AUTO_TEST_CASE(LaplaceDistributionTest) { vec mean(20); mean.randu(); LaplaceDistribution l(mean, 2.5); LaplaceDistribution xmlL, textL, binaryL; SerializeObjectAll(l, xmlL, textL, binaryL); BOOST_REQUIRE_CLOSE(l.Scale(), xmlL.Scale(), 1e-8); BOOST_REQUIRE_CLOSE(l.Scale(), textL.Scale(), 1e-8); BOOST_REQUIRE_CLOSE(l.Scale(), binaryL.Scale(), 1e-8); CheckMatrices(l.Mean(), xmlL.Mean(), textL.Mean(), binaryL.Mean()); } BOOST_AUTO_TEST_CASE(MahalanobisDistanceTest) { MahalanobisDistance<> d; d.Covariance().randu(50, 50); MahalanobisDistance<> xmlD, textD, binaryD; SerializeObjectAll(d, xmlD, textD, binaryD); // Check the covariance matrices. CheckMatrices(d.Covariance(), xmlD.Covariance(), textD.Covariance(), binaryD.Covariance()); } BOOST_AUTO_TEST_CASE(LinearRegressionTest) { // Generate some random data. mat data; data.randn(15, 800); vec responses; responses.randn(800, 1); LinearRegression lr(data, responses, 0.05); // Train the model. LinearRegression xmlLr, textLr, binaryLr; SerializeObjectAll(lr, xmlLr, textLr, binaryLr); BOOST_REQUIRE_CLOSE(lr.Lambda(), xmlLr.Lambda(), 1e-8); BOOST_REQUIRE_CLOSE(lr.Lambda(), textLr.Lambda(), 1e-8); BOOST_REQUIRE_CLOSE(lr.Lambda(), binaryLr.Lambda(), 1e-8); CheckMatrices(lr.Parameters(), xmlLr.Parameters(), textLr.Parameters(), binaryLr.Parameters()); } BOOST_AUTO_TEST_CASE(RegressionDistributionTest) { // Generate some random data. mat data; data.randn(15, 800); vec responses; responses.randn(800, 1); RegressionDistribution rd(data, responses); RegressionDistribution xmlRd, textRd, binaryRd; // Okay, now save it and load it. SerializeObjectAll(rd, xmlRd, textRd, binaryRd); // Check the gaussian distribution. CheckMatrices(rd.Err().Mean(), xmlRd.Err().Mean(), textRd.Err().Mean(), binaryRd.Err().Mean()); CheckMatrices(rd.Err().Covariance(), xmlRd.Err().Covariance(), textRd.Err().Covariance(), binaryRd.Err().Covariance()); // Check the regression function. if (rd.Rf().Lambda() == 0.0) { BOOST_REQUIRE_SMALL(xmlRd.Rf().Lambda(), 1e-8); BOOST_REQUIRE_SMALL(textRd.Rf().Lambda(), 1e-8); BOOST_REQUIRE_SMALL(binaryRd.Rf().Lambda(), 1e-8); } else { BOOST_REQUIRE_CLOSE(rd.Rf().Lambda(), xmlRd.Rf().Lambda(), 1e-8); BOOST_REQUIRE_CLOSE(rd.Rf().Lambda(), textRd.Rf().Lambda(), 1e-8); BOOST_REQUIRE_CLOSE(rd.Rf().Lambda(), binaryRd.Rf().Lambda(), 1e-8); } CheckMatrices(rd.Rf().Parameters(), xmlRd.Rf().Parameters(), textRd.Rf().Parameters(), binaryRd.Rf().Parameters()); } BOOST_AUTO_TEST_CASE(BallBoundTest) { BallBound<> b(100); b.Center().randu(); b.Radius() = 14.0; BallBound<> xmlB, textB, binaryB; SerializeObjectAll(b, xmlB, textB, binaryB); // Check the dimensionality. BOOST_REQUIRE_EQUAL(b.Dim(), xmlB.Dim()); BOOST_REQUIRE_EQUAL(b.Dim(), textB.Dim()); BOOST_REQUIRE_EQUAL(b.Dim(), binaryB.Dim()); // Check the radius. BOOST_REQUIRE_CLOSE(b.Radius(), xmlB.Radius(), 1e-8); BOOST_REQUIRE_CLOSE(b.Radius(), textB.Radius(), 1e-8); BOOST_REQUIRE_CLOSE(b.Radius(), binaryB.Radius(), 1e-8); // Now check the vectors. CheckMatrices(b.Center(), xmlB.Center(), textB.Center(), binaryB.Center()); } BOOST_AUTO_TEST_CASE(MahalanobisBallBoundTest) { BallBound, arma::vec> b(100); b.Center().randu(); b.Radius() = 14.0; b.Metric().Covariance().randu(100, 100); BallBound, arma::vec> xmlB, textB, binaryB; SerializeObjectAll(b, xmlB, textB, binaryB); // Check the radius. BOOST_REQUIRE_CLOSE(b.Radius(), xmlB.Radius(), 1e-8); BOOST_REQUIRE_CLOSE(b.Radius(), textB.Radius(), 1e-8); BOOST_REQUIRE_CLOSE(b.Radius(), binaryB.Radius(), 1e-8); // Check the vectors. CheckMatrices(b.Center(), xmlB.Center(), textB.Center(), binaryB.Center()); CheckMatrices(b.Metric().Covariance(), xmlB.Metric().Covariance(), textB.Metric().Covariance(), binaryB.Metric().Covariance()); } BOOST_AUTO_TEST_CASE(HRectBoundTest) { HRectBound<> b(2); arma::mat points("0.0, 1.1; 5.0, 2.2"); points = points.t(); b |= points; // [0.0, 5.0]; [1.1, 2.2]; HRectBound<> xmlB, textB, binaryB; SerializeObjectAll(b, xmlB, textB, binaryB); // Check the dimensionality. BOOST_REQUIRE_EQUAL(b.Dim(), xmlB.Dim()); BOOST_REQUIRE_EQUAL(b.Dim(), textB.Dim()); BOOST_REQUIRE_EQUAL(b.Dim(), binaryB.Dim()); // Check the bounds. for (size_t i = 0; i < b.Dim(); ++i) { BOOST_REQUIRE_CLOSE(b[i].Lo(), xmlB[i].Lo(), 1e-8); BOOST_REQUIRE_CLOSE(b[i].Hi(), xmlB[i].Hi(), 1e-8); BOOST_REQUIRE_CLOSE(b[i].Lo(), textB[i].Lo(), 1e-8); BOOST_REQUIRE_CLOSE(b[i].Hi(), textB[i].Hi(), 1e-8); BOOST_REQUIRE_CLOSE(b[i].Lo(), binaryB[i].Lo(), 1e-8); BOOST_REQUIRE_CLOSE(b[i].Hi(), binaryB[i].Hi(), 1e-8); } // Check the minimum width. BOOST_REQUIRE_CLOSE(b.MinWidth(), xmlB.MinWidth(), 1e-8); BOOST_REQUIRE_CLOSE(b.MinWidth(), textB.MinWidth(), 1e-8); BOOST_REQUIRE_CLOSE(b.MinWidth(), binaryB.MinWidth(), 1e-8); } template void CheckTrees(TreeType& tree, TreeType& xmlTree, TreeType& textTree, TreeType& binaryTree) { const typename TreeType::Mat* dataset = &tree.Dataset(); // Make sure that the data matrices are the same. if (tree.Parent() == NULL) { CheckMatrices(*dataset, xmlTree.Dataset(), textTree.Dataset(), binaryTree.Dataset()); // Also ensure that the other parents are null too. BOOST_REQUIRE_EQUAL(xmlTree.Parent(), (TreeType*) NULL); BOOST_REQUIRE_EQUAL(textTree.Parent(), (TreeType*) NULL); BOOST_REQUIRE_EQUAL(binaryTree.Parent(), (TreeType*) NULL); } // Make sure the number of children is the same. BOOST_REQUIRE_EQUAL(tree.NumChildren(), xmlTree.NumChildren()); BOOST_REQUIRE_EQUAL(tree.NumChildren(), textTree.NumChildren()); BOOST_REQUIRE_EQUAL(tree.NumChildren(), binaryTree.NumChildren()); // Make sure the number of descendants is the same. BOOST_REQUIRE_EQUAL(tree.NumDescendants(), xmlTree.NumDescendants()); BOOST_REQUIRE_EQUAL(tree.NumDescendants(), textTree.NumDescendants()); BOOST_REQUIRE_EQUAL(tree.NumDescendants(), binaryTree.NumDescendants()); // Make sure the number of points is the same. BOOST_REQUIRE_EQUAL(tree.NumPoints(), xmlTree.NumPoints()); BOOST_REQUIRE_EQUAL(tree.NumPoints(), textTree.NumPoints()); BOOST_REQUIRE_EQUAL(tree.NumPoints(), binaryTree.NumPoints()); // Check that each point is the same. for (size_t i = 0; i < tree.NumPoints(); ++i) { BOOST_REQUIRE_EQUAL(tree.Point(i), xmlTree.Point(i)); BOOST_REQUIRE_EQUAL(tree.Point(i), textTree.Point(i)); BOOST_REQUIRE_EQUAL(tree.Point(i), binaryTree.Point(i)); } // Check that the parent distance is the same. BOOST_REQUIRE_CLOSE(tree.ParentDistance(), xmlTree.ParentDistance(), 1e-8); BOOST_REQUIRE_CLOSE(tree.ParentDistance(), textTree.ParentDistance(), 1e-8); BOOST_REQUIRE_CLOSE(tree.ParentDistance(), binaryTree.ParentDistance(), 1e-8); // Check that the furthest descendant distance is the same. BOOST_REQUIRE_CLOSE(tree.FurthestDescendantDistance(), xmlTree.FurthestDescendantDistance(), 1e-8); BOOST_REQUIRE_CLOSE(tree.FurthestDescendantDistance(), textTree.FurthestDescendantDistance(), 1e-8); BOOST_REQUIRE_CLOSE(tree.FurthestDescendantDistance(), binaryTree.FurthestDescendantDistance(), 1e-8); // Check that the minimum bound distance is the same. BOOST_REQUIRE_CLOSE(tree.MinimumBoundDistance(), xmlTree.MinimumBoundDistance(), 1e-8); BOOST_REQUIRE_CLOSE(tree.MinimumBoundDistance(), textTree.MinimumBoundDistance(), 1e-8); BOOST_REQUIRE_CLOSE(tree.MinimumBoundDistance(), binaryTree.MinimumBoundDistance(), 1e-8); // Recurse into the children. for (size_t i = 0; i < tree.NumChildren(); ++i) { // Check that the child dataset is the same. BOOST_REQUIRE_EQUAL(&xmlTree.Dataset(), &xmlTree.Child(i).Dataset()); BOOST_REQUIRE_EQUAL(&textTree.Dataset(), &textTree.Child(i).Dataset()); BOOST_REQUIRE_EQUAL(&binaryTree.Dataset(), &binaryTree.Child(i).Dataset()); // Make sure the parent link is right. BOOST_REQUIRE_EQUAL(xmlTree.Child(i).Parent(), &xmlTree); BOOST_REQUIRE_EQUAL(textTree.Child(i).Parent(), &textTree); BOOST_REQUIRE_EQUAL(binaryTree.Child(i).Parent(), &binaryTree); CheckTrees(tree.Child(i), xmlTree.Child(i), textTree.Child(i), binaryTree.Child(i)); } } BOOST_AUTO_TEST_CASE(BinarySpaceTreeTest) { arma::mat data; data.randu(3, 100); typedef KDTree TreeType; TreeType tree(data); TreeType* xmlTree; TreeType* textTree; TreeType* binaryTree; SerializePointerObjectAll(&tree, xmlTree, textTree, binaryTree); CheckTrees(tree, *xmlTree, *textTree, *binaryTree); delete xmlTree; delete textTree; delete binaryTree; } BOOST_AUTO_TEST_CASE(BinarySpaceTreeOverwriteTest) { arma::mat data; data.randu(3, 100); typedef KDTree TreeType; TreeType tree(data); arma::mat otherData; otherData.randu(5, 50); TreeType xmlTree(otherData); TreeType textTree(xmlTree); TreeType binaryTree(xmlTree); SerializeObjectAll(tree, xmlTree, textTree, binaryTree); CheckTrees(tree, xmlTree, textTree, binaryTree); } BOOST_AUTO_TEST_CASE(CoverTreeTest) { arma::mat data; data.randu(3, 100); typedef StandardCoverTree TreeType; TreeType tree(data); TreeType* xmlTree; TreeType* textTree; TreeType* binaryTree; SerializePointerObjectAll(&tree, xmlTree, textTree, binaryTree); CheckTrees(tree, *xmlTree, *textTree, *binaryTree); // Also check a few other things. std::stack stack, xmlStack, textStack, binaryStack; stack.push(&tree); xmlStack.push(xmlTree); textStack.push(textTree); binaryStack.push(binaryTree); while (!stack.empty()) { TreeType* node = stack.top(); TreeType* xmlNode = xmlStack.top(); TreeType* textNode = textStack.top(); TreeType* binaryNode = binaryStack.top(); stack.pop(); xmlStack.pop(); textStack.pop(); binaryStack.pop(); BOOST_REQUIRE_EQUAL(node->Scale(), xmlNode->Scale()); BOOST_REQUIRE_EQUAL(node->Scale(), textNode->Scale()); BOOST_REQUIRE_EQUAL(node->Scale(), binaryNode->Scale()); BOOST_REQUIRE_CLOSE(node->Base(), xmlNode->Base(), 1e-5); BOOST_REQUIRE_CLOSE(node->Base(), textNode->Base(), 1e-5); BOOST_REQUIRE_CLOSE(node->Base(), binaryNode->Base(), 1e-5); for (size_t i = 0; i < node->NumChildren(); ++i) { stack.push(&node->Child(i)); xmlStack.push(&xmlNode->Child(i)); textStack.push(&textNode->Child(i)); binaryStack.push(&binaryNode->Child(i)); } } delete xmlTree; delete textTree; delete binaryTree; } BOOST_AUTO_TEST_CASE(CoverTreeOverwriteTest) { arma::mat data; data.randu(3, 100); typedef StandardCoverTree TreeType; TreeType tree(data); arma::mat otherData; otherData.randu(5, 50); TreeType xmlTree(otherData); TreeType textTree(xmlTree); TreeType binaryTree(xmlTree); SerializeObjectAll(tree, xmlTree, textTree, binaryTree); CheckTrees(tree, xmlTree, textTree, binaryTree); // Also check a few other things. std::stack stack, xmlStack, textStack, binaryStack; stack.push(&tree); xmlStack.push(&xmlTree); textStack.push(&textTree); binaryStack.push(&binaryTree); while (!stack.empty()) { TreeType* node = stack.top(); TreeType* xmlNode = xmlStack.top(); TreeType* textNode = textStack.top(); TreeType* binaryNode = binaryStack.top(); stack.pop(); xmlStack.pop(); textStack.pop(); binaryStack.pop(); BOOST_REQUIRE_EQUAL(node->Scale(), xmlNode->Scale()); BOOST_REQUIRE_EQUAL(node->Scale(), textNode->Scale()); BOOST_REQUIRE_EQUAL(node->Scale(), binaryNode->Scale()); BOOST_REQUIRE_CLOSE(node->Base(), xmlNode->Base(), 1e-5); BOOST_REQUIRE_CLOSE(node->Base(), textNode->Base(), 1e-5); BOOST_REQUIRE_CLOSE(node->Base(), binaryNode->Base(), 1e-5); for (size_t i = 0; i < node->NumChildren(); ++i) { stack.push(&node->Child(i)); xmlStack.push(&xmlNode->Child(i)); textStack.push(&textNode->Child(i)); binaryStack.push(&binaryNode->Child(i)); } } } BOOST_AUTO_TEST_CASE(RectangleTreeTest) { arma::mat data; data.randu(3, 1000); typedef RTree TreeType; TreeType tree(data); TreeType* xmlTree; TreeType* textTree; TreeType* binaryTree; SerializePointerObjectAll(&tree, xmlTree, textTree, binaryTree); CheckTrees(tree, *xmlTree, *textTree, *binaryTree); // Check a few other things too. std::stack stack, xmlStack, textStack, binaryStack; stack.push(&tree); xmlStack.push(xmlTree); textStack.push(textTree); binaryStack.push(binaryTree); while (!stack.empty()) { // Check more things... TreeType* node = stack.top(); TreeType* xmlNode = xmlStack.top(); TreeType* textNode = textStack.top(); TreeType* binaryNode = binaryStack.top(); stack.pop(); xmlStack.pop(); textStack.pop(); binaryStack.pop(); BOOST_REQUIRE_EQUAL(node->MaxLeafSize(), xmlNode->MaxLeafSize()); BOOST_REQUIRE_EQUAL(node->MaxLeafSize(), textNode->MaxLeafSize()); BOOST_REQUIRE_EQUAL(node->MaxLeafSize(), binaryNode->MaxLeafSize()); BOOST_REQUIRE_EQUAL(node->MinLeafSize(), xmlNode->MinLeafSize()); BOOST_REQUIRE_EQUAL(node->MinLeafSize(), textNode->MinLeafSize()); BOOST_REQUIRE_EQUAL(node->MinLeafSize(), binaryNode->MinLeafSize()); BOOST_REQUIRE_EQUAL(node->MaxNumChildren(), xmlNode->MaxNumChildren()); BOOST_REQUIRE_EQUAL(node->MaxNumChildren(), textNode->MaxNumChildren()); BOOST_REQUIRE_EQUAL(node->MaxNumChildren(), binaryNode->MaxNumChildren()); BOOST_REQUIRE_EQUAL(node->MinNumChildren(), xmlNode->MinNumChildren()); BOOST_REQUIRE_EQUAL(node->MinNumChildren(), textNode->MinNumChildren()); BOOST_REQUIRE_EQUAL(node->MinNumChildren(), binaryNode->MinNumChildren()); } delete xmlTree; delete textTree; delete binaryTree; } BOOST_AUTO_TEST_CASE(RectangleTreeOverwriteTest) { arma::mat data; data.randu(3, 1000); typedef RTree TreeType; TreeType tree(data); arma::mat otherData; otherData.randu(5, 50); TreeType xmlTree(otherData); TreeType textTree(otherData); TreeType binaryTree(textTree); SerializeObjectAll(tree, xmlTree, textTree, binaryTree); CheckTrees(tree, xmlTree, textTree, binaryTree); // Check a few other things too. std::stack stack, xmlStack, textStack, binaryStack; stack.push(&tree); xmlStack.push(&xmlTree); textStack.push(&textTree); binaryStack.push(&binaryTree); while (!stack.empty()) { // Check more things... TreeType* node = stack.top(); TreeType* xmlNode = xmlStack.top(); TreeType* textNode = textStack.top(); TreeType* binaryNode = binaryStack.top(); stack.pop(); xmlStack.pop(); textStack.pop(); binaryStack.pop(); BOOST_REQUIRE_EQUAL(node->MaxLeafSize(), xmlNode->MaxLeafSize()); BOOST_REQUIRE_EQUAL(node->MaxLeafSize(), textNode->MaxLeafSize()); BOOST_REQUIRE_EQUAL(node->MaxLeafSize(), binaryNode->MaxLeafSize()); BOOST_REQUIRE_EQUAL(node->MinLeafSize(), xmlNode->MinLeafSize()); BOOST_REQUIRE_EQUAL(node->MinLeafSize(), textNode->MinLeafSize()); BOOST_REQUIRE_EQUAL(node->MinLeafSize(), binaryNode->MinLeafSize()); BOOST_REQUIRE_EQUAL(node->MaxNumChildren(), xmlNode->MaxNumChildren()); BOOST_REQUIRE_EQUAL(node->MaxNumChildren(), textNode->MaxNumChildren()); BOOST_REQUIRE_EQUAL(node->MaxNumChildren(), binaryNode->MaxNumChildren()); BOOST_REQUIRE_EQUAL(node->MinNumChildren(), xmlNode->MinNumChildren()); BOOST_REQUIRE_EQUAL(node->MinNumChildren(), textNode->MinNumChildren()); BOOST_REQUIRE_EQUAL(node->MinNumChildren(), binaryNode->MinNumChildren()); } } BOOST_AUTO_TEST_CASE(PerceptronTest) { // Create a perceptron. Train it randomly. Then check that it hasn't // changed. arma::mat data; data.randu(3, 100); arma::Row labels(100); for (size_t i = 0; i < labels.n_elem; ++i) { if (data(1, i) > 0.5) labels[i] = 0; else labels[i] = 1; } Perceptron<> p(data, labels, 2, 15); Perceptron<> pXml(2, 3), pText(2, 3), pBinary(2, 3); SerializeObjectAll(p, pXml, pText, pBinary); // Now check that things are the same. CheckMatrices(p.Weights(), pXml.Weights(), pText.Weights(), pBinary.Weights()); CheckMatrices(p.Biases(), pXml.Biases(), pText.Biases(), pBinary.Biases()); BOOST_REQUIRE_EQUAL(p.MaxIterations(), pXml.MaxIterations()); BOOST_REQUIRE_EQUAL(p.MaxIterations(), pText.MaxIterations()); BOOST_REQUIRE_EQUAL(p.MaxIterations(), pBinary.MaxIterations()); } BOOST_AUTO_TEST_CASE(LogisticRegressionTest) { arma::mat data; data.randu(3, 100); arma::Row responses; responses.randu(100); LogisticRegression<> lr(data, responses, 0.5); LogisticRegression<> lrXml(data, responses + 3, 0.3); LogisticRegression<> lrText(data, responses + 1); LogisticRegression<> lrBinary(3, 0.0); SerializeObjectAll(lr, lrXml, lrText, lrBinary); CheckMatrices(lr.Parameters(), lrXml.Parameters(), lrText.Parameters(), lrBinary.Parameters()); BOOST_REQUIRE_CLOSE(lr.Lambda(), lrXml.Lambda(), 1e-5); BOOST_REQUIRE_CLOSE(lr.Lambda(), lrText.Lambda(), 1e-5); BOOST_REQUIRE_CLOSE(lr.Lambda(), lrBinary.Lambda(), 1e-5); } BOOST_AUTO_TEST_CASE(KNNTest) { using neighbor::KNN; arma::mat dataset = arma::randu(5, 2000); KNN knn(dataset, DUAL_TREE_MODE); KNN knnXml, knnText, knnBinary; SerializeObjectAll(knn, knnXml, knnText, knnBinary); // Now run nearest neighbor and make sure the results are the same. arma::mat querySet = arma::randu(5, 1000); arma::mat distances, xmlDistances, textDistances, binaryDistances; arma::Mat neighbors, xmlNeighbors, textNeighbors, binaryNeighbors; knn.Search(querySet, 5, neighbors, distances); knnXml.Search(querySet, 5, xmlNeighbors, xmlDistances); knnText.Search(querySet, 5, textNeighbors, textDistances); knnBinary.Search(querySet, 5, binaryNeighbors, binaryDistances); CheckMatrices(distances, xmlDistances, textDistances, binaryDistances); CheckMatrices(neighbors, xmlNeighbors, textNeighbors, binaryNeighbors); } BOOST_AUTO_TEST_CASE(SoftmaxRegressionTest) { using regression::SoftmaxRegression; arma::mat dataset = arma::randu(5, 1000); arma::Row labels(1000); for (size_t i = 0; i < 500; ++i) labels[i] = 0; for (size_t i = 500; i < 1000; ++i) labels[i] = 1; SoftmaxRegression<> sr(dataset, labels, 2); SoftmaxRegression<> srXml(dataset.n_rows, 2); SoftmaxRegression<> srText(dataset.n_rows, 2); SoftmaxRegression<> srBinary(dataset.n_rows, 2); SerializeObjectAll(sr, srXml, srText, srBinary); CheckMatrices(sr.Parameters(), srXml.Parameters(), srText.Parameters(), srBinary.Parameters()); } BOOST_AUTO_TEST_CASE(DETTest) { using det::DTree; // Create a density estimation tree on a random dataset. arma::mat dataset = arma::randu(25, 5000); DTree tree(dataset); arma::mat otherDataset = arma::randu(5, 100); DTree xmlTree, binaryTree, textTree(otherDataset); SerializeObjectAll(tree, xmlTree, binaryTree, textTree); std::stack stack, xmlStack, binaryStack, textStack; stack.push(&tree); xmlStack.push(&xmlTree); binaryStack.push(&binaryTree); textStack.push(&textTree); while (!stack.empty()) { // Get the top node from the stack. DTree* node = stack.top(); DTree* xmlNode = xmlStack.top(); DTree* binaryNode = binaryStack.top(); DTree* textNode = textStack.top(); stack.pop(); xmlStack.pop(); binaryStack.pop(); textStack.pop(); // Check that all the members are the same. BOOST_REQUIRE_EQUAL(node->Start(), xmlNode->Start()); BOOST_REQUIRE_EQUAL(node->Start(), binaryNode->Start()); BOOST_REQUIRE_EQUAL(node->Start(), textNode->Start()); BOOST_REQUIRE_EQUAL(node->End(), xmlNode->End()); BOOST_REQUIRE_EQUAL(node->End(), binaryNode->End()); BOOST_REQUIRE_EQUAL(node->End(), textNode->End()); BOOST_REQUIRE_EQUAL(node->SplitDim(), xmlNode->SplitDim()); BOOST_REQUIRE_EQUAL(node->SplitDim(), binaryNode->SplitDim()); BOOST_REQUIRE_EQUAL(node->SplitDim(), textNode->SplitDim()); if (std::abs(node->SplitValue()) < 1e-5) { BOOST_REQUIRE_SMALL(xmlNode->SplitValue(), 1e-5); BOOST_REQUIRE_SMALL(binaryNode->SplitValue(), 1e-5); BOOST_REQUIRE_SMALL(textNode->SplitValue(), 1e-5); } else { BOOST_REQUIRE_CLOSE(node->SplitValue(), xmlNode->SplitValue(), 1e-5); BOOST_REQUIRE_CLOSE(node->SplitValue(), binaryNode->SplitValue(), 1e-5); BOOST_REQUIRE_CLOSE(node->SplitValue(), textNode->SplitValue(), 1e-5); } if (std::abs(node->LogNegError()) < 1e-5) { BOOST_REQUIRE_SMALL(xmlNode->LogNegError(), 1e-5); BOOST_REQUIRE_SMALL(binaryNode->LogNegError(), 1e-5); BOOST_REQUIRE_SMALL(textNode->LogNegError(), 1e-5); } else { BOOST_REQUIRE_CLOSE(node->LogNegError(), xmlNode->LogNegError(), 1e-5); BOOST_REQUIRE_CLOSE(node->LogNegError(), binaryNode->LogNegError(), 1e-5); BOOST_REQUIRE_CLOSE(node->LogNegError(), textNode->LogNegError(), 1e-5); } if (std::abs(node->SubtreeLeavesLogNegError()) < 1e-5) { BOOST_REQUIRE_SMALL(xmlNode->SubtreeLeavesLogNegError(), 1e-5); BOOST_REQUIRE_SMALL(binaryNode->SubtreeLeavesLogNegError(), 1e-5); BOOST_REQUIRE_SMALL(textNode->SubtreeLeavesLogNegError(), 1e-5); } else { BOOST_REQUIRE_CLOSE(node->SubtreeLeavesLogNegError(), xmlNode->SubtreeLeavesLogNegError(), 1e-5); BOOST_REQUIRE_CLOSE(node->SubtreeLeavesLogNegError(), binaryNode->SubtreeLeavesLogNegError(), 1e-5); BOOST_REQUIRE_CLOSE(node->SubtreeLeavesLogNegError(), textNode->SubtreeLeavesLogNegError(), 1e-5); } BOOST_REQUIRE_EQUAL(node->SubtreeLeaves(), xmlNode->SubtreeLeaves()); BOOST_REQUIRE_EQUAL(node->SubtreeLeaves(), binaryNode->SubtreeLeaves()); BOOST_REQUIRE_EQUAL(node->SubtreeLeaves(), textNode->SubtreeLeaves()); if (std::abs(node->Ratio()) < 1e-5) { BOOST_REQUIRE_SMALL(xmlNode->Ratio(), 1e-5); BOOST_REQUIRE_SMALL(binaryNode->Ratio(), 1e-5); BOOST_REQUIRE_SMALL(textNode->Ratio(), 1e-5); } else { BOOST_REQUIRE_CLOSE(node->Ratio(), xmlNode->Ratio(), 1e-5); BOOST_REQUIRE_CLOSE(node->Ratio(), binaryNode->Ratio(), 1e-5); BOOST_REQUIRE_CLOSE(node->Ratio(), textNode->Ratio(), 1e-5); } if (std::abs(node->LogVolume()) < 1e-5) { BOOST_REQUIRE_SMALL(xmlNode->LogVolume(), 1e-5); BOOST_REQUIRE_SMALL(binaryNode->LogVolume(), 1e-5); BOOST_REQUIRE_SMALL(textNode->LogVolume(), 1e-5); } else { BOOST_REQUIRE_CLOSE(node->LogVolume(), xmlNode->LogVolume(), 1e-5); BOOST_REQUIRE_CLOSE(node->LogVolume(), binaryNode->LogVolume(), 1e-5); BOOST_REQUIRE_CLOSE(node->LogVolume(), textNode->LogVolume(), 1e-5); } if (node->Left() == NULL) { BOOST_REQUIRE(xmlNode->Left() == NULL); BOOST_REQUIRE(binaryNode->Left() == NULL); BOOST_REQUIRE(textNode->Left() == NULL); } else { BOOST_REQUIRE(xmlNode->Left() != NULL); BOOST_REQUIRE(binaryNode->Left() != NULL); BOOST_REQUIRE(textNode->Left() != NULL); // Push children onto stack. stack.push(node->Left()); xmlStack.push(xmlNode->Left()); binaryStack.push(binaryNode->Left()); textStack.push(textNode->Left()); } if (node->Right() == NULL) { BOOST_REQUIRE(xmlNode->Right() == NULL); BOOST_REQUIRE(binaryNode->Right() == NULL); BOOST_REQUIRE(textNode->Right() == NULL); } else { BOOST_REQUIRE(xmlNode->Right() != NULL); BOOST_REQUIRE(binaryNode->Right() != NULL); BOOST_REQUIRE(textNode->Right() != NULL); // Push children onto stack. stack.push(node->Right()); xmlStack.push(xmlNode->Right()); binaryStack.push(binaryNode->Right()); textStack.push(textNode->Right()); } BOOST_REQUIRE_EQUAL(node->Root(), xmlNode->Root()); BOOST_REQUIRE_EQUAL(node->Root(), binaryNode->Root()); BOOST_REQUIRE_EQUAL(node->Root(), textNode->Root()); if (std::abs(node->AlphaUpper()) < 1e-5) { BOOST_REQUIRE_SMALL(xmlNode->AlphaUpper(), 1e-5); BOOST_REQUIRE_SMALL(binaryNode->AlphaUpper(), 1e-5); BOOST_REQUIRE_SMALL(textNode->AlphaUpper(), 1e-5); } else { BOOST_REQUIRE_CLOSE(node->AlphaUpper(), xmlNode->AlphaUpper(), 1e-5); BOOST_REQUIRE_CLOSE(node->AlphaUpper(), binaryNode->AlphaUpper(), 1e-5); BOOST_REQUIRE_CLOSE(node->AlphaUpper(), textNode->AlphaUpper(), 1e-5); } BOOST_REQUIRE_EQUAL(node->MaxVals().n_elem, xmlNode->MaxVals().n_elem); BOOST_REQUIRE_EQUAL(node->MaxVals().n_elem, binaryNode->MaxVals().n_elem); BOOST_REQUIRE_EQUAL(node->MaxVals().n_elem, textNode->MaxVals().n_elem); for (size_t i = 0; i < node->MaxVals().n_elem; ++i) { if (std::abs(node->MaxVals()[i]) < 1e-5) { BOOST_REQUIRE_SMALL(xmlNode->MaxVals()[i], 1e-5); BOOST_REQUIRE_SMALL(binaryNode->MaxVals()[i], 1e-5); BOOST_REQUIRE_SMALL(textNode->MaxVals()[i], 1e-5); } else { BOOST_REQUIRE_CLOSE(node->MaxVals()[i], xmlNode->MaxVals()[i], 1e-5); BOOST_REQUIRE_CLOSE(node->MaxVals()[i], binaryNode->MaxVals()[i], 1e-5); BOOST_REQUIRE_CLOSE(node->MaxVals()[i], textNode->MaxVals()[i], 1e-5); } } BOOST_REQUIRE_EQUAL(node->MinVals().n_elem, xmlNode->MinVals().n_elem); BOOST_REQUIRE_EQUAL(node->MinVals().n_elem, binaryNode->MinVals().n_elem); BOOST_REQUIRE_EQUAL(node->MinVals().n_elem, textNode->MinVals().n_elem); for (size_t i = 0; i < node->MinVals().n_elem; ++i) { if (std::abs(node->MinVals()[i]) < 1e-5) { BOOST_REQUIRE_SMALL(xmlNode->MinVals()[i], 1e-5); BOOST_REQUIRE_SMALL(binaryNode->MinVals()[i], 1e-5); BOOST_REQUIRE_SMALL(textNode->MinVals()[i], 1e-5); } else { BOOST_REQUIRE_CLOSE(node->MinVals()[i], xmlNode->MinVals()[i], 1e-5); BOOST_REQUIRE_CLOSE(node->MinVals()[i], binaryNode->MinVals()[i], 1e-5); BOOST_REQUIRE_CLOSE(node->MinVals()[i], textNode->MinVals()[i], 1e-5); } } } } BOOST_AUTO_TEST_CASE(NaiveBayesSerializationTest) { // Train NBC randomly. Make sure the model is the same after serializing and // re-loading. arma::mat dataset; dataset.randu(10, 500); arma::Row labels(500); for (size_t i = 0; i < 500; ++i) { if (dataset(0, i) > 0.5) labels[i] = 0; else labels[i] = 1; } NaiveBayesClassifier<> nbc(dataset, labels, 2); // Initialize some empty Naive Bayes classifiers. NaiveBayesClassifier<> xmlNbc(0, 0), textNbc(0, 0), binaryNbc(0, 0); SerializeObjectAll(nbc, xmlNbc, textNbc, binaryNbc); BOOST_REQUIRE_EQUAL(nbc.Means().n_elem, xmlNbc.Means().n_elem); BOOST_REQUIRE_EQUAL(nbc.Means().n_elem, textNbc.Means().n_elem); BOOST_REQUIRE_EQUAL(nbc.Means().n_elem, binaryNbc.Means().n_elem); for (size_t i = 0; i < nbc.Means().n_elem; ++i) { BOOST_REQUIRE_CLOSE(nbc.Means()[i], xmlNbc.Means()[i], 1e-5); BOOST_REQUIRE_CLOSE(nbc.Means()[i], textNbc.Means()[i], 1e-5); BOOST_REQUIRE_CLOSE(nbc.Means()[i], binaryNbc.Means()[i], 1e-5); } BOOST_REQUIRE_EQUAL(nbc.Variances().n_elem, xmlNbc.Variances().n_elem); BOOST_REQUIRE_EQUAL(nbc.Variances().n_elem, textNbc.Variances().n_elem); BOOST_REQUIRE_EQUAL(nbc.Variances().n_elem, binaryNbc.Variances().n_elem); for (size_t i = 0; i < nbc.Variances().n_elem; ++i) { BOOST_REQUIRE_CLOSE(nbc.Variances()[i], xmlNbc.Variances()[i], 1e-5); BOOST_REQUIRE_CLOSE(nbc.Variances()[i], textNbc.Variances()[i], 1e-5); BOOST_REQUIRE_CLOSE(nbc.Variances()[i], binaryNbc.Variances()[i], 1e-5); } BOOST_REQUIRE_EQUAL(nbc.Probabilities().n_elem, xmlNbc.Probabilities().n_elem); BOOST_REQUIRE_EQUAL(nbc.Probabilities().n_elem, textNbc.Probabilities().n_elem); BOOST_REQUIRE_EQUAL(nbc.Probabilities().n_elem, binaryNbc.Probabilities().n_elem); for (size_t i = 0; i < nbc.Probabilities().n_elem; ++i) { BOOST_REQUIRE_CLOSE(nbc.Probabilities()[i], xmlNbc.Probabilities()[i], 1e-5); BOOST_REQUIRE_CLOSE(nbc.Probabilities()[i], textNbc.Probabilities()[i], 1e-5); BOOST_REQUIRE_CLOSE(nbc.Probabilities()[i], binaryNbc.Probabilities()[i], 1e-5); } } BOOST_AUTO_TEST_CASE(RASearchTest) { using neighbor::AllkRANN; using neighbor::KNN; arma::mat dataset = arma::randu(5, 200); arma::mat otherDataset = arma::randu(5, 100); // Find nearest neighbors in the top 10, with accuracy 0.95. So 95% of the // results we get (at least) should fall into the top 10 of the true nearest // neighbors. AllkRANN allkrann(dataset, false, false, 5, 0.95); AllkRANN krannXml(otherDataset, false, false); AllkRANN krannText(otherDataset, true, false); AllkRANN krannBinary(otherDataset, true, true); SerializeObjectAll(allkrann, krannXml, krannText, krannBinary); // Now run nearest neighbor and make sure the results are the same. arma::mat querySet = arma::randu(5, 100); arma::mat distances, xmlDistances, textDistances, binaryDistances; arma::Mat neighbors, xmlNeighbors, textNeighbors, binaryNeighbors; KNN knn(dataset); // Exact search. knn.Search(querySet, 10, neighbors, distances); krannXml.Search(querySet, 5, xmlNeighbors, xmlDistances); krannText.Search(querySet, 5, textNeighbors, textDistances); krannBinary.Search(querySet, 5, binaryNeighbors, binaryDistances); BOOST_REQUIRE_EQUAL(xmlNeighbors.n_rows, 5); BOOST_REQUIRE_EQUAL(xmlNeighbors.n_cols, 100); BOOST_REQUIRE_EQUAL(textNeighbors.n_rows, 5); BOOST_REQUIRE_EQUAL(textNeighbors.n_cols, 100); BOOST_REQUIRE_EQUAL(binaryNeighbors.n_rows, 5); BOOST_REQUIRE_EQUAL(binaryNeighbors.n_cols, 100); size_t xmlCorrect = 0; size_t textCorrect = 0; size_t binaryCorrect = 0; for (size_t i = 0; i < xmlNeighbors.n_cols; ++i) { // See how many are in the top 10. for (size_t j = 0; j < xmlNeighbors.n_rows; ++j) { for (size_t k = 0; k < neighbors.n_rows; ++k) { if (neighbors(k, i) == xmlNeighbors(j, i)) xmlCorrect++; if (neighbors(k, i) == textNeighbors(j, i)) textCorrect++; if (neighbors(k, i) == binaryNeighbors(j, i)) binaryCorrect++; } } } // We need 95% of these to be correct. BOOST_REQUIRE_GT(xmlCorrect, 95 * 5); BOOST_REQUIRE_GT(binaryCorrect, 95 * 5); BOOST_REQUIRE_GT(textCorrect, 95 * 5); } /** * Test that an LSH model can be serialized and deserialized. */ BOOST_AUTO_TEST_CASE(LSHTest) { // Since we still don't have good tests for LSH, basically what we're going to // do is serialize an LSH model, and make sure we can deserialize it and that // we still get results when we call Search(). arma::mat referenceData = arma::randu(10, 100); LSHSearch<> lsh(referenceData, 5, 10); // Arbitrary chosen parameters. LSHSearch<> xmlLsh; arma::mat textData = arma::randu(5, 50); LSHSearch<> textLsh(textData, 4, 5); LSHSearch<> binaryLsh(referenceData, 15, 2); // Now serialize. SerializeObjectAll(lsh, xmlLsh, textLsh, binaryLsh); // Check what we can about the serialized objects. BOOST_REQUIRE_EQUAL(lsh.NumProjections(), xmlLsh.NumProjections()); BOOST_REQUIRE_EQUAL(lsh.NumProjections(), textLsh.NumProjections()); BOOST_REQUIRE_EQUAL(lsh.NumProjections(), binaryLsh.NumProjections()); for (size_t i = 0; i < lsh.NumProjections(); ++i) { CheckMatrices(lsh.Projections().slice(i), xmlLsh.Projections().slice(i), textLsh.Projections().slice(i), binaryLsh.Projections().slice(i)); } CheckMatrices(lsh.ReferenceSet(), xmlLsh.ReferenceSet(), textLsh.ReferenceSet(), binaryLsh.ReferenceSet()); CheckMatrices(lsh.Offsets(), xmlLsh.Offsets(), textLsh.Offsets(), binaryLsh.Offsets()); CheckMatrices(lsh.SecondHashWeights(), xmlLsh.SecondHashWeights(), textLsh.SecondHashWeights(), binaryLsh.SecondHashWeights()); BOOST_REQUIRE_EQUAL(lsh.BucketSize(), xmlLsh.BucketSize()); BOOST_REQUIRE_EQUAL(lsh.BucketSize(), textLsh.BucketSize()); BOOST_REQUIRE_EQUAL(lsh.BucketSize(), binaryLsh.BucketSize()); BOOST_REQUIRE_EQUAL(lsh.SecondHashTable().size(), xmlLsh.SecondHashTable().size()); BOOST_REQUIRE_EQUAL(lsh.SecondHashTable().size(), textLsh.SecondHashTable().size()); BOOST_REQUIRE_EQUAL(lsh.SecondHashTable().size(), binaryLsh.SecondHashTable().size()); for (size_t i = 0; i < lsh.SecondHashTable().size(); ++i) CheckMatrices(lsh.SecondHashTable()[i], xmlLsh.SecondHashTable()[i], textLsh.SecondHashTable()[i], binaryLsh.SecondHashTable()[i]); } // Make sure serialization works for the decision stump. BOOST_AUTO_TEST_CASE(DecisionStumpTest) { // Generate dataset. arma::mat trainingData = arma::randu(4, 100); arma::Row labels(100); for (size_t i = 0; i < 25; ++i) labels[i] = 0; for (size_t i = 25; i < 50; ++i) labels[i] = 3; for (size_t i = 50; i < 75; ++i) labels[i] = 1; for (size_t i = 75; i < 100; ++i) labels[i] = 2; DecisionStump<> ds(trainingData, labels, 4, 3); arma::mat otherData = arma::randu(3, 100); arma::Row otherLabels = arma::randu>(100); DecisionStump<> xmlDs(otherData, otherLabels, 2, 3); DecisionStump<> textDs; DecisionStump<> binaryDs(trainingData, labels, 4, 10); SerializeObjectAll(ds, xmlDs, textDs, binaryDs); // Make sure that everything is the same about the new decision stumps. BOOST_REQUIRE_EQUAL(ds.SplitDimension(), xmlDs.SplitDimension()); BOOST_REQUIRE_EQUAL(ds.SplitDimension(), textDs.SplitDimension()); BOOST_REQUIRE_EQUAL(ds.SplitDimension(), binaryDs.SplitDimension()); CheckMatrices(ds.Split(), xmlDs.Split(), textDs.Split(), binaryDs.Split()); CheckMatrices(ds.BinLabels(), xmlDs.BinLabels(), textDs.BinLabels(), binaryDs.BinLabels()); } // Make sure serialization works for LARS. BOOST_AUTO_TEST_CASE(LARSTest) { using namespace mlpack::regression; // Create a dataset. arma::mat X = arma::randn(75, 250); arma::vec beta = arma::randn(75, 1); arma::vec y = trans(X) * beta; LARS lars(true, 0.1, 0.1); arma::vec betaOpt; lars.Train(X, y, betaOpt); // Now, serialize. LARS xmlLars(false, 0.5, 0.0), binaryLars(true, 1.0, 0.0), textLars(false, 0.1, 0.1); // Train textLars. arma::mat textX = arma::randn(25, 150); arma::vec textBeta = arma::randn(25, 1); arma::vec textY = trans(textX) * textBeta; arma::vec textBetaOpt; textLars.Train(textX, textY, textBetaOpt); SerializeObjectAll(lars, xmlLars, binaryLars, textLars); // Now, check that predictions are the same. arma::vec pred, xmlPred, textPred, binaryPred; lars.Predict(X, pred); xmlLars.Predict(X, xmlPred); textLars.Predict(X, textPred); binaryLars.Predict(X, binaryPred); CheckMatrices(pred, xmlPred, textPred, binaryPred); } /** * Test serialization of the HoeffdingNumericSplit object after binning has * occured. */ BOOST_AUTO_TEST_CASE(HoeffdingNumericSplitTest) { using namespace mlpack::tree; HoeffdingNumericSplit split(3); // Train until it bins. for (size_t i = 0; i < 200; ++i) split.Train(mlpack::math::Random(), mlpack::math::RandInt(3)); HoeffdingNumericSplit xmlSplit(5); HoeffdingNumericSplit textSplit(7); for (size_t i = 0; i < 200; ++i) textSplit.Train(mlpack::math::Random() + 3, 0); HoeffdingNumericSplit binarySplit(2); SerializeObjectAll(split, xmlSplit, textSplit, binarySplit); // Ensure that everything is the same. BOOST_REQUIRE_EQUAL(split.Bins(), xmlSplit.Bins()); BOOST_REQUIRE_EQUAL(split.Bins(), textSplit.Bins()); BOOST_REQUIRE_EQUAL(split.Bins(), binarySplit.Bins()); double bestSplit, secondBestSplit; double baseBestSplit, baseSecondBestSplit; split.EvaluateFitnessFunction(baseBestSplit, baseSecondBestSplit); xmlSplit.EvaluateFitnessFunction(bestSplit, secondBestSplit); BOOST_REQUIRE_CLOSE(bestSplit, baseBestSplit, 1e-5); BOOST_REQUIRE_SMALL(secondBestSplit, 1e-10); textSplit.EvaluateFitnessFunction(bestSplit, secondBestSplit); BOOST_REQUIRE_CLOSE(bestSplit, baseBestSplit, 1e-5); BOOST_REQUIRE_SMALL(secondBestSplit, 1e-10); binarySplit.EvaluateFitnessFunction(bestSplit, secondBestSplit); BOOST_REQUIRE_CLOSE(bestSplit, baseBestSplit, 1e-5); BOOST_REQUIRE_SMALL(secondBestSplit, 1e-10); arma::Col children, xmlChildren, textChildren, binaryChildren; NumericSplitInfo splitInfo, xmlSplitInfo, textSplitInfo, binarySplitInfo; split.Split(children, splitInfo); xmlSplit.Split(xmlChildren, xmlSplitInfo); binarySplit.Split(binaryChildren, binarySplitInfo); textSplit.Split(textChildren, textSplitInfo); BOOST_REQUIRE_EQUAL(children.size(), xmlChildren.size()); BOOST_REQUIRE_EQUAL(children.size(), textChildren.size()); BOOST_REQUIRE_EQUAL(children.size(), binaryChildren.size()); for (size_t i = 0; i < children.size(); ++i) { BOOST_REQUIRE_EQUAL(children[i], xmlChildren[i]); BOOST_REQUIRE_EQUAL(children[i], textChildren[i]); BOOST_REQUIRE_EQUAL(children[i], binaryChildren[i]); } // Random checks. for (size_t i = 0; i < 200; ++i) { const double random = mlpack::math::Random() * 1.5; BOOST_REQUIRE_EQUAL(splitInfo.CalculateDirection(random), xmlSplitInfo.CalculateDirection(random)); BOOST_REQUIRE_EQUAL(splitInfo.CalculateDirection(random), textSplitInfo.CalculateDirection(random)); BOOST_REQUIRE_EQUAL(splitInfo.CalculateDirection(random), binarySplitInfo.CalculateDirection(random)); } } /** * Make sure serialization of the HoeffdingNumericSplit object before binning * occurs is successful. */ BOOST_AUTO_TEST_CASE(HoeffdingNumericSplitBeforeBinningTest) { using namespace mlpack::tree; HoeffdingNumericSplit split(3); // Train but not until it bins. for (size_t i = 0; i < 50; ++i) split.Train(mlpack::math::Random(), mlpack::math::RandInt(3)); HoeffdingNumericSplit xmlSplit(5); HoeffdingNumericSplit textSplit(7); for (size_t i = 0; i < 200; ++i) textSplit.Train(mlpack::math::Random() + 3, 0); HoeffdingNumericSplit binarySplit(2); SerializeObjectAll(split, xmlSplit, textSplit, binarySplit); // Ensure that everything is the same. BOOST_REQUIRE_EQUAL(split.Bins(), xmlSplit.Bins()); BOOST_REQUIRE_EQUAL(split.Bins(), textSplit.Bins()); BOOST_REQUIRE_EQUAL(split.Bins(), binarySplit.Bins()); double baseBestSplit, baseSecondBestSplit; double bestSplit, secondBestSplit; split.EvaluateFitnessFunction(baseBestSplit, baseSecondBestSplit); textSplit.EvaluateFitnessFunction(bestSplit, secondBestSplit); BOOST_REQUIRE_SMALL(baseBestSplit, 1e-5); BOOST_REQUIRE_SMALL(baseSecondBestSplit, 1e-5); BOOST_REQUIRE_SMALL(bestSplit, 1e-5); BOOST_REQUIRE_SMALL(secondBestSplit, 1e-5); xmlSplit.EvaluateFitnessFunction(bestSplit, secondBestSplit); BOOST_REQUIRE_SMALL(bestSplit, 1e-5); BOOST_REQUIRE_SMALL(secondBestSplit, 1e-5); binarySplit.EvaluateFitnessFunction(bestSplit, secondBestSplit); BOOST_REQUIRE_SMALL(bestSplit, 1e-5); BOOST_REQUIRE_SMALL(secondBestSplit, 1e-5); } /** * Make sure the HoeffdingCategoricalSplit object serializes correctly. */ BOOST_AUTO_TEST_CASE(HoeffdingCategoricalSplitTest) { using namespace mlpack::tree; HoeffdingCategoricalSplit split(10, 3); for (size_t i = 0; i < 50; ++i) split.Train(mlpack::math::RandInt(10), mlpack::math::RandInt(3)); HoeffdingCategoricalSplit xmlSplit(3, 7); HoeffdingCategoricalSplit binarySplit(4, 11); HoeffdingCategoricalSplit textSplit(2, 2); for (size_t i = 0; i < 10; ++i) textSplit.Train(mlpack::math::RandInt(2), mlpack::math::RandInt(2)); SerializeObjectAll(split, xmlSplit, textSplit, binarySplit); BOOST_REQUIRE_EQUAL(split.MajorityClass(), xmlSplit.MajorityClass()); BOOST_REQUIRE_EQUAL(split.MajorityClass(), textSplit.MajorityClass()); BOOST_REQUIRE_EQUAL(split.MajorityClass(), binarySplit.MajorityClass()); double bestSplit, secondBestSplit; double baseBestSplit, baseSecondBestSplit; split.EvaluateFitnessFunction(baseBestSplit, baseSecondBestSplit); xmlSplit.EvaluateFitnessFunction(bestSplit, secondBestSplit); BOOST_REQUIRE_CLOSE(bestSplit, baseBestSplit, 1e-5); BOOST_REQUIRE_SMALL(secondBestSplit, 1e-10); textSplit.EvaluateFitnessFunction(bestSplit, secondBestSplit); BOOST_REQUIRE_CLOSE(bestSplit, baseBestSplit, 1e-5); BOOST_REQUIRE_SMALL(secondBestSplit, 1e-10); binarySplit.EvaluateFitnessFunction(bestSplit, secondBestSplit); BOOST_REQUIRE_CLOSE(bestSplit, baseBestSplit, 1e-5); BOOST_REQUIRE_SMALL(secondBestSplit, 1e-10); arma::Col children, xmlChildren, textChildren, binaryChildren; CategoricalSplitInfo splitInfo(1); // I don't care about this. split.Split(children, splitInfo); xmlSplit.Split(xmlChildren, splitInfo); binarySplit.Split(binaryChildren, splitInfo); textSplit.Split(textChildren, splitInfo); BOOST_REQUIRE_EQUAL(children.size(), xmlChildren.size()); BOOST_REQUIRE_EQUAL(children.size(), textChildren.size()); BOOST_REQUIRE_EQUAL(children.size(), binaryChildren.size()); for (size_t i = 0; i < children.size(); ++i) { BOOST_REQUIRE_EQUAL(children[i], xmlChildren[i]); BOOST_REQUIRE_EQUAL(children[i], textChildren[i]); BOOST_REQUIRE_EQUAL(children[i], binaryChildren[i]); } } /** * Make sure the HoeffdingTree object serializes correctly before a split has * occured. */ BOOST_AUTO_TEST_CASE(HoeffdingTreeBeforeSplitTest) { data::DatasetInfo info(5); info.MapString("0", 2); // Dimension 1 is categorical. info.MapString("1", 2); HoeffdingTree<> split(info, 2, 0.99, 15000, 1); // Train for 2 samples. split.Train(arma::vec("0.3 0.4 1 0.6 0.7"), 0); split.Train(arma::vec("-0.3 0.0 0 0.7 0.8"), 1); data::DatasetInfo wrongInfo(3); wrongInfo.MapString("1", 1); HoeffdingTree<> xmlSplit(wrongInfo, 7, 0.1, 10, 1); // Force the binarySplit to split. data::DatasetInfo binaryInfo(2); binaryInfo.MapString("cat0", 0); binaryInfo.MapString("cat1", 0); binaryInfo.MapString("cat0", 1); HoeffdingTree<> binarySplit(info, 2, 0.95, 5000, 1); // Feed samples from each class. for (size_t i = 0; i < 500; ++i) { binarySplit.Train(arma::Col("0 0"), 0); binarySplit.Train(arma::Col("1 0"), 1); } HoeffdingTree<> textSplit(wrongInfo, 11, 0.75, 1000, 1); SerializeObjectAll(split, xmlSplit, textSplit, binarySplit); BOOST_REQUIRE_EQUAL(split.SplitDimension(), xmlSplit.SplitDimension()); BOOST_REQUIRE_EQUAL(split.SplitDimension(), binarySplit.SplitDimension()); BOOST_REQUIRE_EQUAL(split.SplitDimension(), textSplit.SplitDimension()); BOOST_REQUIRE_EQUAL(split.MajorityClass(), xmlSplit.MajorityClass()); BOOST_REQUIRE_EQUAL(split.MajorityClass(), binarySplit.MajorityClass()); BOOST_REQUIRE_EQUAL(split.MajorityClass(), textSplit.MajorityClass()); BOOST_REQUIRE_EQUAL(split.SplitCheck(), xmlSplit.SplitCheck()); BOOST_REQUIRE_EQUAL(split.SplitCheck(), binarySplit.SplitCheck()); BOOST_REQUIRE_EQUAL(split.SplitCheck(), textSplit.SplitCheck()); } /** * Make sure the HoeffdingTree object serializes correctly after a split has * occurred. */ BOOST_AUTO_TEST_CASE(HoeffdingTreeAfterSplitTest) { // Force the split to split. data::DatasetInfo info(2); info.MapString("cat0", 0); info.MapString("cat1", 0); info.MapString("cat0", 1); HoeffdingTree<> split(info, 2, 0.95, 5000, 1); // Feed samples from each class. for (size_t i = 0; i < 500; ++i) { split.Train(arma::Col("0 0"), 0); split.Train(arma::Col("1 0"), 1); } // Ensure a split has happened. BOOST_REQUIRE_NE(split.SplitDimension(), size_t(-1)); data::DatasetInfo wrongInfo(3); wrongInfo.MapString("1", 1); HoeffdingTree<> xmlSplit(wrongInfo, 7, 0.1, 10, 1); data::DatasetInfo binaryInfo(5); binaryInfo.MapString("0", 2); // Dimension 2 is categorical. binaryInfo.MapString("1", 2); HoeffdingTree<> binarySplit(binaryInfo, 2, 0.99, 15000, 1); // Train for 2 samples. binarySplit.Train(arma::vec("0.3 0.4 1 0.6 0.7"), 0); binarySplit.Train(arma::vec("-0.3 0.0 0 0.7 0.8"), 1); HoeffdingTree<> textSplit(wrongInfo, 11, 0.75, 1000, 1); SerializeObjectAll(split, xmlSplit, textSplit, binarySplit); BOOST_REQUIRE_EQUAL(split.SplitDimension(), xmlSplit.SplitDimension()); BOOST_REQUIRE_EQUAL(split.SplitDimension(), binarySplit.SplitDimension()); BOOST_REQUIRE_EQUAL(split.SplitDimension(), textSplit.SplitDimension()); // If splitting has already happened, then SplitCheck() should return 0. BOOST_REQUIRE_EQUAL(split.SplitCheck(), 0); BOOST_REQUIRE_EQUAL(split.SplitCheck(), xmlSplit.SplitCheck()); BOOST_REQUIRE_EQUAL(split.SplitCheck(), binarySplit.SplitCheck()); BOOST_REQUIRE_EQUAL(split.SplitCheck(), textSplit.SplitCheck()); BOOST_REQUIRE_EQUAL(split.MajorityClass(), xmlSplit.MajorityClass()); BOOST_REQUIRE_EQUAL(split.MajorityClass(), binarySplit.MajorityClass()); BOOST_REQUIRE_EQUAL(split.MajorityClass(), textSplit.MajorityClass()); BOOST_REQUIRE_EQUAL(split.CalculateDirection(arma::vec("0.3 0.4 1 0.6 0.7")), xmlSplit.CalculateDirection(arma::vec("0.3 0.4 1 0.6 0.7"))); BOOST_REQUIRE_EQUAL(split.CalculateDirection(arma::vec("0.3 0.4 1 0.6 0.7")), binarySplit.CalculateDirection(arma::vec("0.3 0.4 1 0.6 0.7"))); BOOST_REQUIRE_EQUAL(split.CalculateDirection(arma::vec("0.3 0.4 1 0.6 0.7")), textSplit.CalculateDirection(arma::vec("0.3 0.4 1 0.6 0.7"))); } BOOST_AUTO_TEST_CASE(EmptyHoeffdingTreeTest) { using namespace mlpack::tree; data::DatasetInfo info(6); HoeffdingTree<> tree(info, 2); HoeffdingTree<> xmlTree(info, 3); HoeffdingTree<> binaryTree(info, 4); HoeffdingTree<> textTree(info, 5); SerializeObjectAll(tree, xmlTree, binaryTree, textTree); BOOST_REQUIRE_EQUAL(tree.NumChildren(), 0); BOOST_REQUIRE_EQUAL(xmlTree.NumChildren(), 0); BOOST_REQUIRE_EQUAL(binaryTree.NumChildren(), 0); BOOST_REQUIRE_EQUAL(textTree.NumChildren(), 0); } /** * Build a Hoeffding tree, then save it and make sure other trees can classify * as effectively. */ BOOST_AUTO_TEST_CASE(HoeffdingTreeTest) { using namespace mlpack::tree; arma::mat dataset(2, 400); arma::Row labels(400); for (size_t i = 0; i < 200; ++i) { dataset(0, 2 * i) = mlpack::math::RandInt(4); dataset(1, 2 * i) = mlpack::math::RandInt(2); dataset(0, 2 * i + 1) = mlpack::math::RandInt(4); dataset(1, 2 * i + 1) = mlpack::math::RandInt(2) + 2; labels[2 * i] = 0; labels[2 * i + 1] = 1; } // Make the features categorical. data::DatasetInfo info(2); info.MapString("a", 0); info.MapString("b", 0); info.MapString("c", 0); info.MapString("d", 0); info.MapString("a", 1); info.MapString("b", 1); info.MapString("c", 1); info.MapString("d", 1); HoeffdingTree<> tree(dataset, info, labels, 2, false /* no batch mode */); data::DatasetInfo xmlInfo(1); HoeffdingTree<> xmlTree(xmlInfo, 1); data::DatasetInfo binaryInfo(5); HoeffdingTree<> binaryTree(binaryInfo, 6); data::DatasetInfo textInfo(7); HoeffdingTree<> textTree(textInfo, 100); SerializeObjectAll(tree, xmlTree, textTree, binaryTree); BOOST_REQUIRE_EQUAL(tree.NumChildren(), xmlTree.NumChildren()); BOOST_REQUIRE_EQUAL(tree.NumChildren(), textTree.NumChildren()); BOOST_REQUIRE_EQUAL(tree.NumChildren(), binaryTree.NumChildren()); BOOST_REQUIRE_EQUAL(tree.SplitDimension(), xmlTree.SplitDimension()); BOOST_REQUIRE_EQUAL(tree.SplitDimension(), textTree.SplitDimension()); BOOST_REQUIRE_EQUAL(tree.SplitDimension(), binaryTree.SplitDimension()); for (size_t i = 0; i < tree.NumChildren(); ++i) { BOOST_REQUIRE_EQUAL(tree.Child(i).NumChildren(), 0); BOOST_REQUIRE_EQUAL(xmlTree.Child(i).NumChildren(), 0); BOOST_REQUIRE_EQUAL(binaryTree.Child(i).NumChildren(), 0); BOOST_REQUIRE_EQUAL(textTree.Child(i).NumChildren(), 0); BOOST_REQUIRE_EQUAL(tree.Child(i).SplitDimension(), xmlTree.Child(i).SplitDimension()); BOOST_REQUIRE_EQUAL(tree.Child(i).SplitDimension(), textTree.Child(i).SplitDimension()); BOOST_REQUIRE_EQUAL(tree.Child(i).SplitDimension(), binaryTree.Child(i).SplitDimension()); } } BOOST_AUTO_TEST_SUITE_END(); mlpack-2.2.5/src/mlpack/tests/sgd_test.cpp000066400000000000000000000033711315013601400205050ustar00rootroot00000000000000/** * @file sgd_test.cpp * @author Ryan Curtin * * Test file for SGD (stochastic gradient descent). * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include #include #include #include #include #include "test_tools.hpp" using namespace std; using namespace arma; using namespace mlpack; using namespace mlpack::optimization; using namespace mlpack::optimization::test; BOOST_AUTO_TEST_SUITE(SGDTest); BOOST_AUTO_TEST_CASE(SimpleSGDTestFunction) { SGDTestFunction f; SGD s(f, 0.0003, 5000000, 1e-9, true); arma::mat coordinates = f.GetInitialPoint(); double result = s.Optimize(coordinates); BOOST_REQUIRE_CLOSE(result, -1.0, 0.05); BOOST_REQUIRE_SMALL(coordinates[0], 1e-3); BOOST_REQUIRE_SMALL(coordinates[1], 1e-7); BOOST_REQUIRE_SMALL(coordinates[2], 1e-7); } BOOST_AUTO_TEST_CASE(GeneralizedRosenbrockTest) { // Loop over several variants. for (size_t i = 10; i < 50; i += 5) { // Create the generalized Rosenbrock function. GeneralizedRosenbrockFunction f(i); SGD s(f, 0.001, 0, 1e-15, true); arma::mat coordinates = f.GetInitialPoint(); double result = s.Optimize(coordinates); BOOST_REQUIRE_SMALL(result, 1e-10); for (size_t j = 0; j < i; ++j) BOOST_REQUIRE_CLOSE(coordinates[j], (double) 1.0, 1e-3); } } BOOST_AUTO_TEST_SUITE_END(); mlpack-2.2.5/src/mlpack/tests/softmax_regression_test.cpp000066400000000000000000000303251315013601400236500ustar00rootroot00000000000000/** * @file softmax_regression_test.cpp * @author Siddharth Agrawal * * Test the SoftmaxRegression class. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include #include #include #include "test_tools.hpp" using namespace mlpack; using namespace mlpack::regression; using namespace mlpack::distribution; using namespace mlpack::optimization; BOOST_AUTO_TEST_SUITE(SoftmaxRegressionTest); BOOST_AUTO_TEST_CASE(SoftmaxRegressionFunctionEvaluate) { const size_t points = 1000; const size_t trials = 50; const size_t inputSize = 10; const size_t numClasses = 5; // Initialize a random dataset. arma::mat data; data.randu(inputSize, points); // Create random class labels. arma::Row labels(points); for(size_t i = 0; i < points; i++) labels(i) = math::RandInt(0, numClasses); // Create a SoftmaxRegressionFunction. Regularization term ignored. SoftmaxRegressionFunction srf(data, labels, numClasses, 0); // Run a number of trials. for(size_t i = 0; i < trials; i++) { // Create a random set of parameters. arma::mat parameters; parameters.randu(numClasses, inputSize); double logLikelihood = 0; // Compute error for each training example. for(size_t j = 0; j < points; j++) { arma::mat hypothesis, probabilities; hypothesis = arma::exp(parameters * data.col(j)); probabilities = hypothesis / arma::accu(hypothesis); logLikelihood += log(probabilities(labels(j), 0)); } logLikelihood /= points; // Compare with the value returned by the function. BOOST_REQUIRE_CLOSE(srf.Evaluate(parameters), -logLikelihood, 1e-5); } } BOOST_AUTO_TEST_CASE(SoftmaxRegressionFunctionRegularizationEvaluate) { const size_t points = 1000; const size_t trials = 50; const size_t inputSize = 10; const size_t numClasses = 5; // Initialize a random dataset. arma::mat data; data.randu(inputSize, points); // Create random class labels. arma::Row labels(points); for(size_t i = 0; i < points; i++) labels(i) = math::RandInt(0, numClasses); // 3 objects for comparing regularization costs. SoftmaxRegressionFunction srfNoReg(data, labels, numClasses, 0); SoftmaxRegressionFunction srfSmallReg(data, labels, numClasses, 1); SoftmaxRegressionFunction srfBigReg(data, labels, numClasses, 20); // Run a number of trials. for (size_t i = 0; i < trials; i++) { // Create a random set of parameters. arma::mat parameters; parameters.randu(numClasses, inputSize); double wL2SquaredNorm; wL2SquaredNorm = arma::accu(parameters % parameters); // Calculate regularization terms. const double smallRegTerm = 0.5 * wL2SquaredNorm; const double bigRegTerm = 10 * wL2SquaredNorm; BOOST_REQUIRE_CLOSE(srfNoReg.Evaluate(parameters) + smallRegTerm, srfSmallReg.Evaluate(parameters), 1e-5); BOOST_REQUIRE_CLOSE(srfNoReg.Evaluate(parameters) + bigRegTerm, srfBigReg.Evaluate(parameters), 1e-5); } } BOOST_AUTO_TEST_CASE(SoftmaxRegressionFunctionGradient) { const size_t points = 1000; const size_t inputSize = 10; const size_t numClasses = 5; // Initialize a random dataset. arma::mat data; data.randu(inputSize, points); // Create random class labels. arma::Row labels(points); for(size_t i = 0; i < points; i++) labels(i) = math::RandInt(0, numClasses); // 2 objects for 2 terms in the cost function. Each term contributes towards // the gradient and thus need to be checked independently. SoftmaxRegressionFunction srf1(data, labels, numClasses, 0); SoftmaxRegressionFunction srf2(data, labels, numClasses, 20); // Create a random set of parameters. arma::mat parameters; parameters.randu(numClasses, inputSize); // Get gradients for the current parameters. arma::mat gradient1, gradient2; srf1.Gradient(parameters, gradient1); srf2.Gradient(parameters, gradient2); // Perturbation constant. const double epsilon = 0.0001; double costPlus1, costMinus1, numGradient1; double costPlus2, costMinus2, numGradient2; // For each parameter. for (size_t i = 0; i < numClasses; i++) { for (size_t j = 0; j < inputSize; j++) { // Perturb parameter with a positive constant and get costs. parameters(i, j) += epsilon; costPlus1 = srf1.Evaluate(parameters); costPlus2 = srf2.Evaluate(parameters); // Perturb parameter with a negative constant and get costs. parameters(i, j) -= 2 * epsilon; costMinus1 = srf1.Evaluate(parameters); costMinus2 = srf2.Evaluate(parameters); // Compute numerical gradients using the costs calculated above. numGradient1 = (costPlus1 - costMinus1) / (2 * epsilon); numGradient2 = (costPlus2 - costMinus2) / (2 * epsilon); // Restore the parameter value. parameters(i, j) += epsilon; // Compare numerical and backpropagation gradient values. BOOST_REQUIRE_CLOSE(numGradient1, gradient1(i, j), 1e-2); BOOST_REQUIRE_CLOSE(numGradient2, gradient2(i, j), 1e-2); } } } BOOST_AUTO_TEST_CASE(SoftmaxRegressionTwoClasses) { const size_t points = 1000; const size_t inputSize = 3; const size_t numClasses = 2; const double lambda = 0.5; // Generate two-Gaussian dataset. GaussianDistribution g1(arma::vec("1.0 9.0 1.0"), arma::eye(3, 3)); GaussianDistribution g2(arma::vec("4.0 3.0 4.0"), arma::eye(3, 3)); arma::mat data(inputSize, points); arma::Row labels(points); for (size_t i = 0; i < points / 2; i++) { data.col(i) = g1.Random(); labels(i) = 0; } for (size_t i = points / 2; i < points; i++) { data.col(i) = g2.Random(); labels(i) = 1; } // Train softmax regression object. SoftmaxRegression<> sr(data, labels, numClasses, lambda); // Compare training accuracy to 100. const double acc = sr.ComputeAccuracy(data, labels); BOOST_REQUIRE_CLOSE(acc, 100.0, 0.5); // Create test dataset. for (size_t i = 0; i < points / 2; i++) { data.col(i) = g1.Random(); labels(i) = 0; } for (size_t i = points / 2; i < points; i++) { data.col(i) = g2.Random(); labels(i) = 1; } // Compare test accuracy to 100. const double testAcc = sr.ComputeAccuracy(data, labels); BOOST_REQUIRE_CLOSE(testAcc, 100.0, 0.6); } BOOST_AUTO_TEST_CASE(SoftmaxRegressionFitIntercept) { // Generate a two-Gaussian dataset, // which can't be separated without adding the intercept term. GaussianDistribution g1(arma::vec("1.0 1.0 1.0"), arma::eye(3, 3)); GaussianDistribution g2(arma::vec("9.0 9.0 9.0"), arma::eye(3, 3)); arma::mat data(3, 1000); arma::Row responses(1000); for (size_t i = 0; i < 500; ++i) { data.col(i) = g1.Random(); responses[i] = 0; } for (size_t i = 500; i < 1000; ++i) { data.col(i) = g2.Random(); responses[i] = 1; } // Now train a logistic regression object on it. SoftmaxRegression<> lr(data, responses, 2, 0.01, true); // Ensure that the error is close to zero. const double acc = lr.ComputeAccuracy(data, responses); BOOST_REQUIRE_CLOSE(acc, 100.0, 2.0); // Create a test set. for (size_t i = 0; i < 500; ++i) { data.col(i) = g1.Random(); responses[i] = 0; } for (size_t i = 500; i < 1000; ++i) { data.col(i) = g2.Random(); responses[i] = 1; } // Ensure that the error is close to zero. const double testAcc = lr.ComputeAccuracy(data, responses); BOOST_REQUIRE_CLOSE(testAcc, 100.0, 2.0); } BOOST_AUTO_TEST_CASE(SoftmaxRegressionMultipleClasses) { const size_t points = 5000; const size_t inputSize = 5; const size_t numClasses = 5; const double lambda = 0.5; // Generate five-Gaussian dataset. arma::mat identity = arma::eye(5, 5); GaussianDistribution g1(arma::vec("1.0 9.0 1.0 2.0 2.0"), identity); GaussianDistribution g2(arma::vec("4.0 3.0 4.0 2.0 2.0"), identity); GaussianDistribution g3(arma::vec("3.0 2.0 7.0 0.0 5.0"), identity); GaussianDistribution g4(arma::vec("4.0 1.0 1.0 2.0 7.0"), identity); GaussianDistribution g5(arma::vec("1.0 0.0 1.0 8.0 3.0"), identity); arma::mat data(inputSize, points); arma::Row labels(points); for (size_t i = 0; i < points / 5; i++) { data.col(i) = g1.Random(); labels(i) = 0; } for (size_t i = points / 5; i < (2 * points) / 5; i++) { data.col(i) = g2.Random(); labels(i) = 1; } for (size_t i = (2 * points) / 5; i < (3 * points) / 5; i++) { data.col(i) = g3.Random(); labels(i) = 2; } for (size_t i = (3 * points) / 5; i < (4 * points) / 5; i++) { data.col(i) = g4.Random(); labels(i) = 3; } for (size_t i = (4 * points) / 5; i < points; i++) { data.col(i) = g5.Random(); labels(i) = 4; } // Train softmax regression object. SoftmaxRegression<> sr(data, labels, numClasses, lambda); // Compare training accuracy to 100. const double acc = sr.ComputeAccuracy(data, labels); BOOST_REQUIRE_CLOSE(acc, 100.0, 2.0); // Create test dataset. for (size_t i = 0; i < points / 5; i++) { data.col(i) = g1.Random(); labels(i) = 0; } for (size_t i = points / 5; i < (2 * points) / 5; i++) { data.col(i) = g2.Random(); labels(i) = 1; } for (size_t i = (2 * points) / 5; i < (3 * points) / 5; i++) { data.col(i) = g3.Random(); labels(i) = 2; } for (size_t i = (3 * points) / 5; i < (4 * points) / 5; i++) { data.col(i) = g4.Random(); labels(i) = 3; } for (size_t i = (4 * points) / 5; i < points; i++) { data.col(i) = g5.Random(); labels(i) = 4; } // Compare test accuracy to 100. const double testAcc = sr.ComputeAccuracy(data, labels); BOOST_REQUIRE_CLOSE(testAcc, 100.0, 2.0); } BOOST_AUTO_TEST_CASE(SoftmaxRegressionTrainTest) { // Make sure a SoftmaxRegression object trained with Train() operates the same // as a SoftmaxRegression object trained in the constructor. arma::mat dataset = arma::randu(5, 1000); arma::Row labels(1000); for (size_t i = 0; i < 500; ++i) labels[i] = size_t(0.0); for (size_t i = 500; i < 1000; ++i) labels[i] = size_t(1.0); // This should be the same as the default parameters given by // SoftmaxRegression. SoftmaxRegressionFunction srf(dataset, labels, 2, 0.0001, false); L_BFGS lbfgs(srf); SoftmaxRegression<> sr(lbfgs); SoftmaxRegression<> sr2(dataset.n_rows, 2); sr2.Parameters() = srf.GetInitialPoint(); // Start from the same place. sr2.Train(dataset, labels, 2); // Ensure that the parameters are the same. BOOST_REQUIRE_EQUAL(sr.Parameters().n_rows, sr2.Parameters().n_rows); BOOST_REQUIRE_EQUAL(sr.Parameters().n_cols, sr2.Parameters().n_cols); for (size_t i = 0; i < sr.Parameters().n_elem; ++i) { if (std::abs(sr.Parameters()[i]) < 1e-4) BOOST_REQUIRE_SMALL(sr2.Parameters()[i], 1e-4); else BOOST_REQUIRE_CLOSE(sr.Parameters()[i], sr2.Parameters()[i], 1e-4); } } BOOST_AUTO_TEST_CASE(SoftmaxRegressionOptimizerTrainTest) { // The same as the previous test, just passing in an instantiated optimizer. arma::mat dataset = arma::randu(5, 1000); arma::Row labels(1000); for (size_t i = 0; i < 500; ++i) labels[i] = size_t(0.0); for (size_t i = 500; i < 1000; ++i) labels[i] = size_t(1.0); SoftmaxRegressionFunction srf(dataset, labels, 2, 0.01, true); L_BFGS lbfgs(srf); SoftmaxRegression<> sr(lbfgs); SoftmaxRegression<> sr2(dataset.n_rows, 2, true); L_BFGS lbfgs2(srf); sr2.Parameters() = srf.GetInitialPoint(); sr2.Train(lbfgs2); // Ensure that the parameters are the same. BOOST_REQUIRE_EQUAL(sr.Parameters().n_rows, sr2.Parameters().n_rows); BOOST_REQUIRE_EQUAL(sr.Parameters().n_cols, sr2.Parameters().n_cols); for (size_t i = 0; i < sr.Parameters().n_elem; ++i) { if (std::abs(sr.Parameters()[i]) < 1e-5) BOOST_REQUIRE_SMALL(sr2.Parameters()[i], 1e-5); else BOOST_REQUIRE_CLOSE(sr.Parameters()[i], sr2.Parameters()[i], 1e-5); } } BOOST_AUTO_TEST_SUITE_END(); mlpack-2.2.5/src/mlpack/tests/sort_policy_test.cpp000066400000000000000000000171431315013601400223000ustar00rootroot00000000000000/** * @file sort_policy_test.cpp * @author Ryan Curtin * * Tests for each of the implementations of the SortPolicy class. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include #include // Classes to test. #include #include #include #include "test_tools.hpp" using namespace mlpack; using namespace mlpack::neighbor; using namespace mlpack::bound; using namespace mlpack::tree; using namespace mlpack::metric; BOOST_AUTO_TEST_SUITE(SortPolicyTest); // Tests for NearestNeighborSort /** * Ensure the best distance for nearest neighbors is 0. */ BOOST_AUTO_TEST_CASE(NnsBestDistance) { BOOST_REQUIRE(NearestNeighborSort::BestDistance() == 0); } /** * Ensure the worst distance for nearest neighbors is DBL_MAX. */ BOOST_AUTO_TEST_CASE(NnsWorstDistance) { BOOST_REQUIRE(NearestNeighborSort::WorstDistance() == DBL_MAX); } /** * Make sure the comparison works for values strictly less than the reference. */ BOOST_AUTO_TEST_CASE(NnsIsBetterStrict) { BOOST_REQUIRE(NearestNeighborSort::IsBetter(5.0, 6.0) == true); } /** * Warn in case the comparison is not strict. */ BOOST_AUTO_TEST_CASE(NnsIsBetterNotStrict) { BOOST_WARN(NearestNeighborSort::IsBetter(6.0, 6.0) == true); } /** * Very simple sanity check to ensure that bounds are working alright. We will * use a one-dimensional bound for simplicity. */ BOOST_AUTO_TEST_CASE(NnsNodeToNodeDistance) { // Well, there's no easy way to make HRectBounds the way we want, so we have // to make them and then expand the region to include new points. arma::mat dataset("1"); typedef KDTree TreeType; TreeType nodeOne(dataset); arma::vec utility(1); utility[0] = 0; nodeOne.Bound() = HRectBound(1); nodeOne.Bound() |= utility; utility[0] = 1; nodeOne.Bound() |= utility; TreeType nodeTwo(dataset); nodeTwo.Bound() = HRectBound(1); utility[0] = 5; nodeTwo.Bound() |= utility; utility[0] = 6; nodeTwo.Bound() |= utility; // This should use the L2 distance. BOOST_REQUIRE_CLOSE(NearestNeighborSort::BestNodeToNodeDistance(&nodeOne, &nodeTwo), 4.0, 1e-5); // And another just to be sure, from the other side. nodeTwo.Bound().Clear(); utility[0] = -2; nodeTwo.Bound() |= utility; utility[0] = -1; nodeTwo.Bound() |= utility; // Again, the distance is the L2 distance. BOOST_REQUIRE_CLOSE(NearestNeighborSort::BestNodeToNodeDistance(&nodeOne, &nodeTwo), 1.0, 1e-5); // Now, when the bounds overlap. nodeTwo.Bound().Clear(); utility[0] = -0.5; nodeTwo.Bound() |= utility; utility[0] = 0.5; nodeTwo.Bound() |= utility; BOOST_REQUIRE_SMALL(NearestNeighborSort::BestNodeToNodeDistance(&nodeOne, &nodeTwo), 1e-5); } /** * Another very simple sanity check for the point-to-node case, again in one * dimension. */ BOOST_AUTO_TEST_CASE(NnsPointToNodeDistance) { // Well, there's no easy way to make HRectBounds the way we want, so we have // to make them and then expand the region to include new points. arma::vec utility(1); utility[0] = 0; arma::mat dataset("1"); typedef KDTree TreeType; TreeType node(dataset); node.Bound() = HRectBound(1); node.Bound() |= utility; utility[0] = 1; node.Bound() |= utility; arma::vec point(1); point[0] = -0.5; // The distance is the L2 distance. BOOST_REQUIRE_CLOSE(NearestNeighborSort::BestPointToNodeDistance(point, &node), 0.5, 1e-5); // Now from the other side of the bound. point[0] = 1.5; BOOST_REQUIRE_CLOSE(NearestNeighborSort::BestPointToNodeDistance(point, &node), 0.5, 1e-5); // And now when the point is inside the bound. point[0] = 0.5; BOOST_REQUIRE_SMALL(NearestNeighborSort::BestPointToNodeDistance(point, &node), 1e-5); } // Tests for FurthestNeighborSort /** * Ensure the best distance for furthest neighbors is DBL_MAX. */ BOOST_AUTO_TEST_CASE(FnsBestDistance) { BOOST_REQUIRE(FurthestNeighborSort::BestDistance() == DBL_MAX); } /** * Ensure the worst distance for furthest neighbors is 0. */ BOOST_AUTO_TEST_CASE(FnsWorstDistance) { BOOST_REQUIRE(FurthestNeighborSort::WorstDistance() == 0); } /** * Make sure the comparison works for values strictly less than the reference. */ BOOST_AUTO_TEST_CASE(FnsIsBetterStrict) { BOOST_REQUIRE(FurthestNeighborSort::IsBetter(5.0, 4.0) == true); } /** * Warn in case the comparison is not strict. */ BOOST_AUTO_TEST_CASE(FnsIsBetterNotStrict) { BOOST_WARN(FurthestNeighborSort::IsBetter(6.0, 6.0) == true); } /** * Very simple sanity check to ensure that bounds are working alright. We will * use a one-dimensional bound for simplicity. */ BOOST_AUTO_TEST_CASE(FnsNodeToNodeDistance) { // Well, there's no easy way to make HRectBounds the way we want, so we have // to make them and then expand the region to include new points. arma::vec utility(1); utility[0] = 0; arma::mat dataset("1"); typedef KDTree TreeType; TreeType nodeOne(dataset); nodeOne.Bound() = HRectBound(1); nodeOne.Bound() |= utility; utility[0] = 1; nodeOne.Bound() |= utility; TreeType nodeTwo(dataset); nodeTwo.Bound() = HRectBound(1); utility[0] = 5; nodeTwo.Bound() |= utility; utility[0] = 6; nodeTwo.Bound() |= utility; // This should use the L2 distance. BOOST_REQUIRE_CLOSE(FurthestNeighborSort::BestNodeToNodeDistance(&nodeOne, &nodeTwo), 6.0, 1e-5); // And another just to be sure, from the other side. nodeTwo.Bound().Clear(); utility[0] = -2; nodeTwo.Bound() |= utility; utility[0] = -1; nodeTwo.Bound() |= utility; // Again, the distance is the L2 distance. BOOST_REQUIRE_CLOSE(FurthestNeighborSort::BestNodeToNodeDistance(&nodeOne, &nodeTwo), 3.0, 1e-5); // Now, when the bounds overlap. nodeTwo.Bound().Clear(); utility[0] = -0.5; nodeTwo.Bound() |= utility; utility[0] = 0.5; nodeTwo.Bound() |= utility; BOOST_REQUIRE_CLOSE(FurthestNeighborSort::BestNodeToNodeDistance(&nodeOne, &nodeTwo), 1.5, 1e-5); } /** * Another very simple sanity check for the point-to-node case, again in one * dimension. */ BOOST_AUTO_TEST_CASE(FnsPointToNodeDistance) { // Well, there's no easy way to make HRectBounds the way we want, so we have // to make them and then expand the region to include new points. arma::vec utility(1); utility[0] = 0; arma::mat dataset("1"); typedef KDTree TreeType; TreeType node(dataset); node.Bound() = HRectBound(1); node.Bound() |= utility; utility[0] = 1; node.Bound() |= utility; arma::vec point(1); point[0] = -0.5; // The distance is the L2 distance. BOOST_REQUIRE_CLOSE(FurthestNeighborSort::BestPointToNodeDistance(point, &node), 1.5, 1e-5); // Now from the other side of the bound. point[0] = 1.5; BOOST_REQUIRE_CLOSE(FurthestNeighborSort::BestPointToNodeDistance(point, &node), 1.5, 1e-5); // And now when the point is inside the bound. point[0] = 0.5; BOOST_REQUIRE_CLOSE(FurthestNeighborSort::BestPointToNodeDistance(point, &node), 0.5, 1e-5); } BOOST_AUTO_TEST_SUITE_END(); mlpack-2.2.5/src/mlpack/tests/sparse_autoencoder_test.cpp000066400000000000000000000213631315013601400236160ustar00rootroot00000000000000/** * @file sparse_autoencoder_test.cpp * @author Siddharth Agrawal * * Test the SparseAutoencoder class. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include #include #include #include "test_tools.hpp" using namespace mlpack; using namespace mlpack::nn; BOOST_AUTO_TEST_SUITE(SparseAutoencoderTest); BOOST_AUTO_TEST_CASE(SparseAutoencoderFunctionEvaluate) { const size_t vSize = 5; const size_t hSize = 3; const size_t r = 2 * hSize + 1; const size_t c = vSize + 1; // Simple fake dataset. arma::mat data1("0.1 0.2 0.3 0.4 0.5;" "0.1 0.2 0.3 0.4 0.5;" "0.1 0.2 0.3 0.4 0.5;" "0.1 0.2 0.3 0.4 0.5;" "0.1 0.2 0.3 0.4 0.5"); // Transpose of the above dataset. arma::mat data2 = data1.t(); // Create a SparseAutoencoderFunction. Regularization and KL divergence terms // ignored. SparseAutoencoderFunction saf1(data1, vSize, hSize, 0, 0); // Test using first dataset. Values were calculated using Octave. BOOST_REQUIRE_CLOSE(saf1.Evaluate(arma::ones(r, c)), 1.190472606540, 1e-5); BOOST_REQUIRE_CLOSE(saf1.Evaluate(arma::zeros(r, c)), 0.150000000000, 1e-5); BOOST_REQUIRE_CLOSE(saf1.Evaluate(-arma::ones(r, c)), 0.048800332266, 1e-5); // Create a SparseAutoencoderFunction. Regularization and KL divergence terms // ignored. SparseAutoencoderFunction saf2(data2, vSize, hSize, 0, 0); // Test using second dataset. Values were calculated using Octave. BOOST_REQUIRE_CLOSE(saf2.Evaluate(arma::ones(r, c)), 1.197585812647, 1e-5); BOOST_REQUIRE_CLOSE(saf2.Evaluate(arma::zeros(r, c)), 0.150000000000, 1e-5); BOOST_REQUIRE_CLOSE(saf2.Evaluate(-arma::ones(r, c)), 0.063466617408, 1e-5); } BOOST_AUTO_TEST_CASE(SparseAutoencoderFunctionRandomEvaluate) { const size_t points = 1000; const size_t trials = 50; const size_t vSize = 20; const size_t hSize = 10; const size_t l1 = hSize; const size_t l2 = vSize; const size_t l3 = 2 * hSize; // Initialize a random dataset. arma::mat data; data.randu(vSize, points); // Create a SparseAutoencoderFunction. Regularization and KL divergence terms // ignored. SparseAutoencoderFunction saf(data, vSize, hSize, 0, 0); // Run a number of trials. for (size_t i = 0; i < trials; i++) { // Create a random set of parameters. arma::mat parameters; parameters.randu(l3 + 1, l2 + 1); double reconstructionError = 0; // Compute error for each training example. for (size_t j = 0; j < points; j++) { arma::mat hiddenLayer, outputLayer, diff; hiddenLayer = 1.0 / (1 + arma::exp(-(parameters.submat(0, 0, l1 - 1, l2 - 1) * data.col(j) + parameters.submat(0, l2, l1 - 1, l2)))); outputLayer = 1.0 / (1 + arma::exp(-(parameters.submat(l1, 0, l3 - 1,l2 - 1).t() * hiddenLayer + parameters.submat(l3, 0, l3, l2 - 1).t()))); diff = outputLayer - data.col(j); reconstructionError += 0.5 * arma::sum(arma::sum(diff % diff)); } reconstructionError /= points; // Compare with the value returned by the function. BOOST_REQUIRE_CLOSE(saf.Evaluate(parameters), reconstructionError, 1e-5); } } BOOST_AUTO_TEST_CASE(SparseAutoencoderFunctionRegularizationEvaluate) { const size_t points = 1000; const size_t trials = 50; const size_t vSize = 20; const size_t hSize = 10; const size_t l2 = vSize; const size_t l3 = 2 * hSize; // Initialize a random dataset. arma::mat data; data.randu(vSize, points); // 3 objects for comparing regularization costs. SparseAutoencoderFunction safNoReg(data, vSize, hSize, 0, 0); SparseAutoencoderFunction safSmallReg(data, vSize, hSize, 0.5, 0); SparseAutoencoderFunction safBigReg(data, vSize, hSize, 20, 0); // Run a number of trials. for (size_t i = 0; i < trials; i++) { // Create a random set of parameters. arma::mat parameters; parameters.randu(l3 + 1, l2 + 1); double wL2SquaredNorm; wL2SquaredNorm = arma::accu(parameters.submat(0, 0, l3 - 1, l2 - 1) % parameters.submat(0, 0, l3 - 1, l2 - 1)); // Calculate regularization terms. const double smallRegTerm = 0.25 * wL2SquaredNorm; const double bigRegTerm = 10 * wL2SquaredNorm; BOOST_REQUIRE_CLOSE(safNoReg.Evaluate(parameters) + smallRegTerm, safSmallReg.Evaluate(parameters), 1e-5); BOOST_REQUIRE_CLOSE(safNoReg.Evaluate(parameters) + bigRegTerm, safBigReg.Evaluate(parameters), 1e-5); } } BOOST_AUTO_TEST_CASE(SparseAutoencoderFunctionKLDivergenceEvaluate) { const size_t points = 1000; const size_t trials = 50; const size_t vSize = 20; const size_t hSize = 10; const size_t l1 = hSize; const size_t l2 = vSize; const size_t l3 = 2 * hSize; const double rho = 0.01; // Initialize a random dataset. arma::mat data; data.randu(vSize, points); // 3 objects for comparing divergence costs. SparseAutoencoderFunction safNoDiv(data, vSize, hSize, 0, 0, rho); SparseAutoencoderFunction safSmallDiv(data, vSize, hSize, 0, 5, rho); SparseAutoencoderFunction safBigDiv(data, vSize, hSize, 0, 20, rho); // Run a number of trials. for(size_t i = 0; i < trials; i++) { // Create a random set of parameters. arma::mat parameters; parameters.randu(l3 + 1, l2 + 1); arma::mat rhoCap; rhoCap.zeros(hSize, 1); // Compute hidden layer activations for each example. for (size_t j = 0; j < points; j++) { arma::mat hiddenLayer; hiddenLayer = 1.0 / (1 + arma::exp(-(parameters.submat(0, 0, l1 - 1, l2 - 1) * data.col(j) + parameters.submat(0, l2, l1 - 1, l2)))); rhoCap += hiddenLayer; } rhoCap /= points; // Calculate divergence terms. const double smallDivTerm = 5 * arma::accu(rho * arma::log(rho / rhoCap) + (1 - rho) * arma::log((1 - rho) / (1 - rhoCap))); const double bigDivTerm = 20 * arma::accu(rho * arma::log(rho / rhoCap) + (1 - rho) * arma::log((1 - rho) / (1 - rhoCap))); BOOST_REQUIRE_CLOSE(safNoDiv.Evaluate(parameters) + smallDivTerm, safSmallDiv.Evaluate(parameters), 1e-5); BOOST_REQUIRE_CLOSE(safNoDiv.Evaluate(parameters) + bigDivTerm, safBigDiv.Evaluate(parameters), 1e-5); } } BOOST_AUTO_TEST_CASE(SparseAutoencoderFunctionGradient) { const size_t points = 1000; const size_t vSize = 20; const size_t hSize = 10; const size_t l2 = vSize; const size_t l3 = 2 * hSize; // Initialize a random dataset. arma::mat data; data.randu(vSize, points); // 3 objects for 3 terms in the cost function. Each term contributes towards // the gradient and thus need to be checked independently. SparseAutoencoderFunction saf1(data, vSize, hSize, 0, 0); SparseAutoencoderFunction saf2(data, vSize, hSize, 20, 0); SparseAutoencoderFunction saf3(data, vSize, hSize, 20, 20); // Create a random set of parameters. arma::mat parameters; parameters.randu(l3 + 1, l2 + 1); // Get gradients for the current parameters. arma::mat gradient1, gradient2, gradient3; saf1.Gradient(parameters, gradient1); saf2.Gradient(parameters, gradient2); saf3.Gradient(parameters, gradient3); // Perturbation constant. const double epsilon = 0.0001; double costPlus1, costMinus1, numGradient1; double costPlus2, costMinus2, numGradient2; double costPlus3, costMinus3, numGradient3; // For each parameter. for (size_t i = 0; i <= l3; i++) { for (size_t j = 0; j <= l2; j++) { // Perturb parameter with a positive constant and get costs. parameters(i, j) += epsilon; costPlus1 = saf1.Evaluate(parameters); costPlus2 = saf2.Evaluate(parameters); costPlus3 = saf3.Evaluate(parameters); // Perturb parameter with a negative constant and get costs. parameters(i, j) -= 2 * epsilon; costMinus1 = saf1.Evaluate(parameters); costMinus2 = saf2.Evaluate(parameters); costMinus3 = saf3.Evaluate(parameters); // Compute numerical gradients using the costs calculated above. numGradient1 = (costPlus1 - costMinus1) / (2 * epsilon); numGradient2 = (costPlus2 - costMinus2) / (2 * epsilon); numGradient3 = (costPlus3 - costMinus3) / (2 * epsilon); // Restore the parameter value. parameters(i, j) += epsilon; // Compare numerical and backpropagation gradient values. BOOST_REQUIRE_CLOSE(numGradient1, gradient1(i, j), 1e-2); BOOST_REQUIRE_CLOSE(numGradient2, gradient2(i, j), 1e-2); BOOST_REQUIRE_CLOSE(numGradient3, gradient3(i, j), 1e-2); } } } BOOST_AUTO_TEST_SUITE_END(); mlpack-2.2.5/src/mlpack/tests/sparse_coding_test.cpp000066400000000000000000000124351315013601400225510ustar00rootroot00000000000000/** * @file sparse_coding_test.cpp * * Test for Sparse Coding * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ // Note: We don't use BOOST_REQUIRE_CLOSE in the code below because we need // to use FPC_WEAK, and it's not at all intuitive how to do that. #include #include #include #include "test_tools.hpp" #include "serialization.hpp" using namespace arma; using namespace mlpack; using namespace mlpack::regression; using namespace mlpack::sparse_coding; BOOST_AUTO_TEST_SUITE(SparseCodingTest); void SCVerifyCorrectness(vec beta, vec errCorr, double lambda) { const double tol = 1e-12; size_t nDims = beta.n_elem; for(size_t j = 0; j < nDims; j++) { if (beta(j) == 0) { // Make sure that errCorr(j) <= lambda. BOOST_REQUIRE_SMALL(std::max(fabs(errCorr(j)) - lambda, 0.0), tol); } else if (beta(j) < 0) { // Make sure that errCorr(j) == lambda. BOOST_REQUIRE_SMALL(errCorr(j) - lambda, tol); } else // beta(j) > 0. { // Make sure that errCorr(j) == -lambda. BOOST_REQUIRE_SMALL(errCorr(j) + lambda, tol); } } } BOOST_AUTO_TEST_CASE(SparseCodingTestCodingStepLasso) { double lambda1 = 0.1; uword nAtoms = 25; mat X; X.load("mnist_first250_training_4s_and_9s.arm"); uword nPoints = X.n_cols; // Normalize each point since these are images. for (uword i = 0; i < nPoints; ++i) { X.col(i) /= norm(X.col(i), 2); } SparseCoding sc(nAtoms, lambda1); mat Z; DataDependentRandomInitializer::Initialize(X, 25, sc.Dictionary()); sc.Encode(X, Z); mat D = sc.Dictionary(); for (uword i = 0; i < nPoints; ++i) { vec errCorr = trans(D) * (D * Z.unsafe_col(i) - X.unsafe_col(i)); SCVerifyCorrectness(Z.unsafe_col(i), errCorr, lambda1); } } BOOST_AUTO_TEST_CASE(SparseCodingTestCodingStepElasticNet) { double lambda1 = 0.1; double lambda2 = 0.2; uword nAtoms = 25; mat X; X.load("mnist_first250_training_4s_and_9s.arm"); uword nPoints = X.n_cols; // Normalize each point since these are images. for (uword i = 0; i < nPoints; ++i) X.col(i) /= norm(X.col(i), 2); SparseCoding sc(nAtoms, lambda1, lambda2); mat Z; DataDependentRandomInitializer::Initialize(X, 25, sc.Dictionary()); sc.Encode(X, Z); mat D = sc.Dictionary(); for(uword i = 0; i < nPoints; ++i) { vec errCorr = (trans(D) * D + lambda2 * eye(nAtoms, nAtoms)) * Z.unsafe_col(i) - trans(D) * X.unsafe_col(i); SCVerifyCorrectness(Z.unsafe_col(i), errCorr, lambda1); } } BOOST_AUTO_TEST_CASE(SparseCodingTestDictionaryStep) { const double tol = 1e-6; double lambda1 = 0.1; uword nAtoms = 25; mat X; X.load("mnist_first250_training_4s_and_9s.arm"); uword nPoints = X.n_cols; // Normalize each point since these are images. for (uword i = 0; i < nPoints; ++i) X.col(i) /= norm(X.col(i), 2); SparseCoding sc(nAtoms, lambda1, 0.0, 0, 0.01, tol); mat Z; DataDependentRandomInitializer::Initialize(X, 25, sc.Dictionary()); sc.Encode(X, Z); mat D = sc.Dictionary(); uvec adjacencies = find(Z); double normGradient = sc.OptimizeDictionary(X, Z, adjacencies); BOOST_REQUIRE_SMALL(normGradient, tol); } BOOST_AUTO_TEST_CASE(SerializationTest) { mat X = randu(100, 100); size_t nAtoms = 25; SparseCoding sc(nAtoms, 0.05, 0.1); sc.Train(X); mat Y = randu(100, 200); mat codes; sc.Encode(Y, codes); SparseCoding scXml(50, 0.01), scText(nAtoms, 0.05), scBinary(0, 0.0); SerializeObjectAll(sc, scXml, scText, scBinary); CheckMatrices(sc.Dictionary(), scXml.Dictionary(), scText.Dictionary(), scBinary.Dictionary()); mat xmlCodes, textCodes, binaryCodes; scXml.Encode(Y, xmlCodes); scText.Encode(Y, textCodes); scBinary.Encode(Y, binaryCodes); CheckMatrices(codes, xmlCodes, textCodes, binaryCodes); // Check the parameters, too. BOOST_REQUIRE_EQUAL(sc.Atoms(), scXml.Atoms()); BOOST_REQUIRE_EQUAL(sc.Atoms(), scText.Atoms()); BOOST_REQUIRE_EQUAL(sc.Atoms(), scBinary.Atoms()); BOOST_REQUIRE_CLOSE(sc.Lambda1(), scXml.Lambda1(), 1e-5); BOOST_REQUIRE_CLOSE(sc.Lambda1(), scText.Lambda1(), 1e-5); BOOST_REQUIRE_CLOSE(sc.Lambda1(), scBinary.Lambda1(), 1e-5); BOOST_REQUIRE_CLOSE(sc.Lambda2(), scXml.Lambda2(), 1e-5); BOOST_REQUIRE_CLOSE(sc.Lambda2(), scText.Lambda2(), 1e-5); BOOST_REQUIRE_CLOSE(sc.Lambda2(), scBinary.Lambda2(), 1e-5); BOOST_REQUIRE_EQUAL(sc.MaxIterations(), scXml.MaxIterations()); BOOST_REQUIRE_EQUAL(sc.MaxIterations(), scText.MaxIterations()); BOOST_REQUIRE_EQUAL(sc.MaxIterations(), scBinary.MaxIterations()); BOOST_REQUIRE_CLOSE(sc.ObjTolerance(), scXml.ObjTolerance(), 1e-5); BOOST_REQUIRE_CLOSE(sc.ObjTolerance(), scText.ObjTolerance(), 1e-5); BOOST_REQUIRE_CLOSE(sc.ObjTolerance(), scBinary.ObjTolerance(), 1e-5); BOOST_REQUIRE_CLOSE(sc.NewtonTolerance(), scXml.NewtonTolerance(), 1e-5); BOOST_REQUIRE_CLOSE(sc.NewtonTolerance(), scText.NewtonTolerance(), 1e-5); BOOST_REQUIRE_CLOSE(sc.NewtonTolerance(), scBinary.NewtonTolerance(), 1e-5); } BOOST_AUTO_TEST_SUITE_END(); mlpack-2.2.5/src/mlpack/tests/spill_tree_test.cpp000066400000000000000000000213451315013601400220730ustar00rootroot00000000000000/** * @file spill_tree_test.cpp * @author Marcos Pividori * * Tests for the SpillTree class. This should ensure that the class works * correctly and that subsequent changes don't break anything. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include #include #include #include using namespace mlpack; using namespace mlpack::tree; using namespace mlpack::metric; BOOST_AUTO_TEST_SUITE(SpillTreeTest); /** * Test to make sure the tree contains the correct number of points after * it is constructed. Also, it checks some invariants in the relation between * parent and child nodes. */ BOOST_AUTO_TEST_CASE(SpillTreeConstructionCountTest) { arma::mat dataset; dataset.randu(3, 1000); // 1000 points in 3 dimensions. typedef SPTree TreeType; // When overlapping buffer is 0, there shouldn't be repeated points. TreeType tree1(dataset, 0); TreeType tree2 = tree1; BOOST_REQUIRE_EQUAL(tree1.NumDescendants(), 1000); BOOST_REQUIRE_EQUAL(tree2.NumDescendants(), 1000); // When overlapping buffer is greater than 0, it is possible to have repeated // points. So, let's check node by node, that the number of descendants // equals to the addition of the number of descendants of child nodes. TreeType tree3(dataset, 0.5); std::stack nodes; nodes.push(&tree3); while (!nodes.empty()) { TreeType* node = nodes.top(); nodes.pop(); size_t numDesc = node->NumPoints(); if (node->Left()) { nodes.push(node->Left()); numDesc += node->Left()->NumDescendants(); } if (node->Right()) { nodes.push(node->Right()); numDesc += node->Right()->NumDescendants(); } if (node->IsLeaf()) BOOST_REQUIRE_EQUAL(node->NumPoints(), node->NumDescendants()); else BOOST_REQUIRE_EQUAL(node->NumPoints(), 0); BOOST_REQUIRE_EQUAL(node->NumDescendants(), numDesc); } } /** * Test to check that parents and children are set correctly. */ BOOST_AUTO_TEST_CASE(SpillTreeConstructionParentTest) { arma::mat dataset; dataset.randu(3, 1000); // 1000 points in 3 dimensions. typedef SPTree TreeType; TreeType tree(dataset, 0.5); std::stack nodes; nodes.push(&tree); while (!nodes.empty()) { TreeType* node = nodes.top(); nodes.pop(); if (node->Left()) { nodes.push(node->Left()); BOOST_REQUIRE_EQUAL(node, node->Left()->Parent()); } if (node->Right()) { nodes.push(node->Right()); BOOST_REQUIRE_EQUAL(node, node->Right()->Parent()); } } } /** * Auxiliary function to execute the same test for different flavours of Spill * Trees. */ template void SpillTreeHyperplaneTestAux() { arma::mat dataset; dataset.randu(3, 1000); // 1000 points in 3 dimensions. for (size_t cases = 0; cases < 3; cases++) { double tau = cases * 0.05; // Let's check node by node, that points in the left child are considered to // the left by the splitting hyperplane, and the same for points in the // right child. SpillType tree(dataset, tau); std::stack nodes; nodes.push(&tree); while (!nodes.empty()) { SpillType* node = nodes.top(); nodes.pop(); if (node->Overlap()) { // We have a overlapping node. if (node->Left()) { // Let's check that points in the left child are projected to values // in the range: (-inf, tau] size_t numDesc = node->Left()->NumDescendants(); for (size_t i = 0; i < numDesc; i++) { size_t descIndex = node->Left()->Descendant(i); BOOST_REQUIRE_LE( node->Hyperplane().Project(node->Dataset().col(descIndex)), tau); } } if (node->Right()) { // Let's check that points in the right child are projected to values // in the range: (-tau, inf) size_t numDesc = node->Right()->NumDescendants(); for (size_t i = 0; i < numDesc; i++) { size_t descIndex = node->Right()->Descendant(i); BOOST_REQUIRE_GT( node->Hyperplane().Project(node->Dataset().col(descIndex)), -tau); } } } else { // We have a non-overlapping node. if (node->Left()) { // Let's check that points in the left child are considered to the // left by the splitting hyperplane. size_t numDesc = node->Left()->NumDescendants(); for (size_t i = 0; i < numDesc; i++) { size_t descIndex = node->Left()->Descendant(i); BOOST_REQUIRE( node->Hyperplane().Left(node->Dataset().col(descIndex))); } } if (node->Right()) { // Let's check that points in the right child are considered to the // right by the splitting hyperplane. size_t numDesc = node->Right()->NumDescendants(); for (size_t i = 0; i < numDesc; i++) { size_t descIndex = node->Right()->Descendant(i); BOOST_REQUIRE( node->Hyperplane().Right(node->Dataset().col(descIndex))); } } } if (node->Left()) nodes.push(node->Left()); if (node->Right()) nodes.push(node->Right()); } } } /** * Test to make sure that the points in the left child are considered to the * left by the node's splitting hyperplane, and the same for points in the * right child. */ BOOST_AUTO_TEST_CASE(SpillTreeHyperplaneTest) { typedef SPTree SpillType1; typedef NonOrtSPTree SpillType2; typedef MeanSPTree SpillType3; typedef NonOrtMeanSPTree SpillType4; SpillTreeHyperplaneTestAux(); SpillTreeHyperplaneTestAux(); SpillTreeHyperplaneTestAux(); SpillTreeHyperplaneTestAux(); } /** * Simple test for the move constructor. */ BOOST_AUTO_TEST_CASE(SpillTreeMoveConstructorTest) { arma::mat dataset = arma::randu(3, 1000); typedef SPTree TreeType; TreeType tree(dataset); TreeType* left = tree.Left(); TreeType* right = tree.Right(); size_t numDesc = tree.NumDescendants(); TreeType newTree(std::move(tree)); BOOST_REQUIRE(tree.Left() == NULL); BOOST_REQUIRE(tree.Right() == NULL); BOOST_REQUIRE_EQUAL(tree.NumDescendants(), 0); BOOST_REQUIRE_EQUAL(newTree.Left(), left); BOOST_REQUIRE_EQUAL(newTree.Right(), right); BOOST_REQUIRE_EQUAL(newTree.NumDescendants(), numDesc); if (left) { BOOST_REQUIRE(newTree.Left() != NULL); BOOST_REQUIRE_EQUAL(newTree.Left()->Parent(), &newTree); } if (right) { BOOST_REQUIRE(newTree.Right() != NULL); BOOST_REQUIRE_EQUAL(newTree.Right()->Parent(), &newTree); } } /** * Simple test for the copy constructor. */ BOOST_AUTO_TEST_CASE(SpillTreeCopyConstructorTest) { arma::mat dataset = arma::randu(3, 1000); typedef SPTree TreeType; TreeType* tree = new TreeType(dataset); TreeType* left = tree->Left(); TreeType* right = tree->Right(); size_t numDesc = tree->NumDescendants(); // Copy the tree. TreeType newTree(*tree); delete tree; BOOST_REQUIRE_EQUAL(newTree.Dataset().n_rows, 3); BOOST_REQUIRE_EQUAL(newTree.Dataset().n_cols, 1000); BOOST_REQUIRE_EQUAL(newTree.NumDescendants(), numDesc); if (left) { BOOST_REQUIRE(newTree.Left() != left); BOOST_REQUIRE(newTree.Left() != NULL); BOOST_REQUIRE_EQUAL(newTree.Left()->Parent(), &newTree); } if (right) { BOOST_REQUIRE(newTree.Right() != right); BOOST_REQUIRE(newTree.Right() != NULL); BOOST_REQUIRE_EQUAL(newTree.Right()->Parent(), &newTree); } } /** * Simple test for the constructor that takes a rvalue reference to the dataset. */ BOOST_AUTO_TEST_CASE(SpillTreeMoveDatasetTest) { arma::mat dataset = arma::randu(3, 1000); typedef SPTree TreeType; TreeType tree(std::move(dataset)); BOOST_REQUIRE_EQUAL(dataset.n_elem, 0); BOOST_REQUIRE_EQUAL(tree.Dataset().n_rows, 3); BOOST_REQUIRE_EQUAL(tree.Dataset().n_cols, 1000); } BOOST_AUTO_TEST_SUITE_END(); mlpack-2.2.5/src/mlpack/tests/split_data_test.cpp000066400000000000000000000124601315013601400220530ustar00rootroot00000000000000/** * @file split_data_test.cpp * @author Tham Ngap Wei * * Test the SplitData method. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include #include #include #include "test_tools.hpp" using namespace mlpack; using namespace arma; using namespace mlpack::data; BOOST_AUTO_TEST_SUITE(SplitDataTest); /** * Compare the data after train test split. This assumes that the labels * correspond to each column, so that we can easily check each point against its * original. * * @param inputData The original data set before split. * @param compareData The data want to compare with the inputData; * it could be train data or test data. * @param inputLabel The labels of each point in compareData. */ void CompareData(const mat& inputData, const mat& compareData, const Row& inputLabel) { for (size_t i = 0; i != compareData.n_cols; ++i) { const mat& lhsCol = inputData.col(inputLabel(i)); const mat& rhsCol = compareData.col(i); for (size_t j = 0; j != lhsCol.n_rows; ++j) { if (std::abs(rhsCol(j)) < 1e-5) BOOST_REQUIRE_SMALL(lhsCol(j), 1e-5); else BOOST_REQUIRE_CLOSE(lhsCol(j), rhsCol(j), 1e-5); } } } void CheckMatEqual(const mat& inputData, const mat& compareData) { const mat& sortedInput = arma::sort(inputData, "ascend", 1); const mat& sortedCompare = arma::sort(compareData, "ascend", 1); for (size_t i = 0; i < sortedInput.n_cols; ++i) { const mat& lhsCol = sortedInput.col(i); const mat& rhsCol = sortedCompare.col(i); for (size_t j = 0; j < lhsCol.n_rows; ++j) { if (std::abs(rhsCol(j)) < 1e-5) BOOST_REQUIRE_SMALL(lhsCol(j), 1e-5); else BOOST_REQUIRE_CLOSE(lhsCol(j), rhsCol(j), 1e-5); } } } /** * Check that no labels have been duplicated. */ void CheckDuplication(const Row& trainLabels, const Row& testLabels) { // Assemble a vector that will hold the counts of each element. Row counts(trainLabels.n_elem + testLabels.n_elem); counts.zeros(); for (size_t i = 0; i < trainLabels.n_elem; ++i) { BOOST_REQUIRE_LT(trainLabels[i], counts.n_elem); counts[trainLabels[i]]++; } for (size_t i = 0; i < testLabels.n_elem; ++i) { BOOST_REQUIRE_LT(testLabels[i], counts.n_elem); counts[testLabels[i]]++; } // Now make sure each point has been used once. for (size_t i = 0; i < counts.n_elem; ++i) BOOST_REQUIRE_EQUAL(counts[i], 1); } BOOST_AUTO_TEST_CASE(SplitDataResultMat) { mat input(2, 10); size_t count = 0; // count for putting unique sequential values input.imbue([&count] () { return ++count; }); const auto value = Split(input, 0.2); BOOST_REQUIRE_EQUAL(std::get<0>(value).n_cols, 8); // train data BOOST_REQUIRE_EQUAL(std::get<1>(value).n_cols, 2); // test data mat concat = arma::join_rows(std::get<0>(value), std::get<1>(value)); CheckMatEqual(input, concat); } BOOST_AUTO_TEST_CASE(SplitLabeledDataResultMat) { mat input(2, 10); input.randu(); // Set the labels to the column ID, so that CompareData can compare the data // after Split is called. const Row labels = arma::linspace>(0, input.n_cols - 1, input.n_cols); const auto value = Split(input, labels, 0.2); BOOST_REQUIRE_EQUAL(std::get<0>(value).n_cols, 8); BOOST_REQUIRE_EQUAL(std::get<1>(value).n_cols, 2); BOOST_REQUIRE_EQUAL(std::get<2>(value).n_cols, 8); BOOST_REQUIRE_EQUAL(std::get<3>(value).n_cols, 2); CompareData(input, std::get<0>(value), std::get<2>(value)); CompareData(input, std::get<1>(value), std::get<3>(value)); // The last thing to check is that we aren't duplicating any points in the // train or test labels. CheckDuplication(std::get<2>(value), std::get<3>(value)); } /** * The same test as above, but on a larger dataset. */ BOOST_AUTO_TEST_CASE(SplitDataLargerTest) { size_t count = 0; mat input(10, 497); input.imbue([&count] () { return ++count; }); const auto value = Split(input, 0.3); BOOST_REQUIRE_EQUAL(std::get<0>(value).n_cols, 497 - size_t(0.3 * 497)); BOOST_REQUIRE_EQUAL(std::get<1>(value).n_cols, size_t(0.3 * 497)); mat concat = arma::join_rows(std::get<0>(value), std::get<1>(value)); CheckMatEqual(input, concat); } BOOST_AUTO_TEST_CASE(SplitLabeledDataLargerTest) { mat input(10, 497); input.randu(); // Set the labels to the column ID. const Row labels = arma::linspace>(0, input.n_cols - 1, input.n_cols); const auto value = Split(input, labels, 0.3); BOOST_REQUIRE_EQUAL(std::get<0>(value).n_cols, 497 - size_t(0.3 * 497)); BOOST_REQUIRE_EQUAL(std::get<1>(value).n_cols, size_t(0.3 * 497)); BOOST_REQUIRE_EQUAL(std::get<2>(value).n_cols, 497 - size_t(0.3 * 497)); BOOST_REQUIRE_EQUAL(std::get<3>(value).n_cols, size_t(0.3 * 497)); CompareData(input, std::get<0>(value), std::get<2>(value)); CompareData(input, std::get<1>(value), std::get<3>(value)); CheckDuplication(std::get<2>(value), std::get<3>(value)); } BOOST_AUTO_TEST_SUITE_END(); mlpack-2.2.5/src/mlpack/tests/svd_batch_test.cpp000066400000000000000000000134401315013601400216630ustar00rootroot00000000000000#include #include #include #include #include #include #include #include #include "test_tools.hpp" BOOST_AUTO_TEST_SUITE(SVDBatchTest); using namespace std; using namespace mlpack; using namespace mlpack::amf; using namespace arma; /** * Make sure the SVD Batch lerning is converging. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ BOOST_AUTO_TEST_CASE(SVDBatchConvergenceElementTest) { sp_mat data; data.sprandn(1000, 1000, 0.2); AMF, AverageInitialization, SVDBatchLearning> amf; mat m1, m2; amf.Apply(data, 2, m1, m2); BOOST_REQUIRE_NE(amf.TerminationPolicy().Iteration(), amf.TerminationPolicy().MaxIterations()); } //! This is used to ensure we start from the same initial point. class SpecificRandomInitialization { public: SpecificRandomInitialization(const size_t n, const size_t r, const size_t m) : W(arma::randu(n, r)), H(arma::randu(r, m)) { } template inline void Initialize(const MatType& /* V */, const size_t /* r */, arma::mat& W, arma::mat& H) { W = this->W; H = this->H; } private: arma::mat W; arma::mat H; }; /** * Make sure the momentum is working okay. */ BOOST_AUTO_TEST_CASE(SVDBatchMomentumTest) { mat dataset; data::Load("GroupLens100k.csv", dataset); // Generate list of locations for batch insert constructor for sparse // matrices. arma::umat locations(2, dataset.n_cols); arma::vec values(dataset.n_cols); for (size_t i = 0; i < dataset.n_cols; ++i) { // We have to transpose it because items are rows, and users are columns. locations(0, i) = ((arma::uword) dataset(0, i)); locations(1, i) = ((arma::uword) dataset(1, i)); values(i) = dataset(2, i); } // Find maximum user and item IDs. const size_t maxUserID = (size_t) max(locations.row(0)) + 1; const size_t maxItemID = (size_t) max(locations.row(1)) + 1; // Fill sparse matrix. sp_mat cleanedData = arma::sp_mat(locations, values, maxUserID, maxItemID); // Create the initial matrices. SpecificRandomInitialization sri(cleanedData.n_rows, 2, cleanedData.n_cols); ValidationRMSETermination vrt(cleanedData, 2000); AMF, SpecificRandomInitialization, SVDBatchLearning> amf1(vrt, sri, SVDBatchLearning(0.0009, 0, 0, 0)); mat m1, m2; const double regularRMSE = amf1.Apply(cleanedData, 2, m1, m2); AMF, SpecificRandomInitialization, SVDBatchLearning> amf2(vrt, sri, SVDBatchLearning(0.0009, 0, 0, 0.8)); const double momentumRMSE = amf2.Apply(cleanedData, 2, m1, m2); BOOST_REQUIRE_LE(momentumRMSE, regularRMSE + 0.05); } /** * Make sure the regularization is working okay. */ BOOST_AUTO_TEST_CASE(SVDBatchRegularizationTest) { mat dataset; data::Load("GroupLens100k.csv", dataset); // Generate list of locations for batch insert constructor for sparse // matrices. arma::umat locations(2, dataset.n_cols); arma::vec values(dataset.n_cols); for (size_t i = 0; i < dataset.n_cols; ++i) { // We have to transpose it because items are rows, and users are columns. locations(0, i) = ((arma::uword) dataset(0, i)); locations(1, i) = ((arma::uword) dataset(1, i)); values(i) = dataset(2, i); } // Find maximum user and item IDs. const size_t maxUserID = (size_t) max(locations.row(0)) + 1; const size_t maxItemID = (size_t) max(locations.row(1)) + 1; // Fill sparse matrix. sp_mat cleanedData = arma::sp_mat(locations, values, maxUserID, maxItemID); // Create the initial matrices. SpecificRandomInitialization sri(cleanedData.n_rows, 2, cleanedData.n_cols); ValidationRMSETermination vrt(cleanedData, 2000); AMF, SpecificRandomInitialization, SVDBatchLearning> amf1(vrt, sri, SVDBatchLearning(0.0009, 0, 0, 0)); mat m1, m2; double regularRMSE = amf1.Apply(cleanedData, 2, m1, m2); AMF, SpecificRandomInitialization, SVDBatchLearning> amf2(vrt, sri, SVDBatchLearning(0.0009, 0.5, 0.5, 0.8)); double momentumRMSE = amf2.Apply(cleanedData, 2, m1, m2); BOOST_REQUIRE_LE(momentumRMSE, regularRMSE + 0.05); } /** * Make sure the SVD can factorize matrices with negative entries. */ BOOST_AUTO_TEST_CASE(SVDBatchNegativeElementTest) { // Create two 5x3 matrices that we should be able to recover. mat testLeft; testLeft.randu(5, 3); testLeft -= 0.5; // Shift so elements are negative. mat testRight; testRight.randu(3, 5); testRight -= 0.5; // Shift so elements are negative. // Assemble a rank-3 matrix that is 5x5. mat test = testLeft * testRight; AMF, RandomInitialization, SVDBatchLearning> amf(SimpleToleranceTermination(), RandomInitialization(), SVDBatchLearning(0.1, 0.001, 0.001, 0)); mat m1, m2; amf.Apply(test, 3, m1, m2); arma::mat result = m1 * m2; // 5% tolerance on the norm. BOOST_REQUIRE_CLOSE(arma::norm(test, "fro"), arma::norm(result, "fro"), 5.0); } BOOST_AUTO_TEST_SUITE_END(); mlpack-2.2.5/src/mlpack/tests/svd_incremental_test.cpp000066400000000000000000000113771315013601400231120ustar00rootroot00000000000000#include #include #include #include #include #include #include #include #include #include #include "test_tools.hpp" BOOST_AUTO_TEST_SUITE(SVDIncrementalTest); using namespace std; using namespace mlpack; using namespace mlpack::amf; using namespace arma; /** * Test for convergence of incomplete incremenal learning * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ BOOST_AUTO_TEST_CASE(SVDIncompleteIncrementalConvergenceTest) { sp_mat data; data.sprandn(1000, 1000, 0.2); SVDIncompleteIncrementalLearning svd(0.01); IncompleteIncrementalTermination > iit; AMF >, RandomInitialization, SVDIncompleteIncrementalLearning> amf(iit, RandomInitialization(), svd); mat m1,m2; amf.Apply(data, 2, m1, m2); BOOST_REQUIRE_NE(amf.TerminationPolicy().Iteration(), amf.TerminationPolicy().MaxIterations()); } /** * Test for convergence of complete incremenal learning */ BOOST_AUTO_TEST_CASE(SVDCompleteIncrementalConvergenceTest) { sp_mat data; data.sprandn(1000, 1000, 0.2); SVDCompleteIncrementalLearning svd(0.01); CompleteIncrementalTermination > iit; AMF >, RandomInitialization, SVDCompleteIncrementalLearning > amf(iit, RandomInitialization(), svd); mat m1,m2; amf.Apply(data, 2, m1, m2); BOOST_REQUIRE_NE(amf.TerminationPolicy().Iteration(), amf.TerminationPolicy().MaxIterations()); } //! This is used to ensure we start from the same initial point. class SpecificRandomInitialization { public: SpecificRandomInitialization(const size_t n, const size_t r, const size_t m) : W(arma::randu(n, r)), H(arma::randu(r, m)) { } template inline void Initialize(const MatType& /* V */, const size_t /* r */, arma::mat& W, arma::mat& H) { W = this->W; H = this->H; } private: arma::mat W; arma::mat H; }; BOOST_AUTO_TEST_CASE(SVDIncompleteIncrementalRegularizationTest) { mat dataset; data::Load("GroupLens100k.csv", dataset); // Generate list of locations for batch insert constructor for sparse // matrices. arma::umat locations(2, dataset.n_cols); arma::vec values(dataset.n_cols); for (size_t i = 0; i < dataset.n_cols; ++i) { // We have to transpose it because items are rows, and users are columns. locations(0, i) = ((arma::uword) dataset(0, i)); locations(1, i) = ((arma::uword) dataset(1, i)); values(i) = dataset(2, i); } // Find maximum user and item IDs. const size_t maxUserID = (size_t) max(locations.row(0)) + 1; const size_t maxItemID = (size_t) max(locations.row(1)) + 1; // Fill sparse matrix. sp_mat cleanedData = arma::sp_mat(locations, values, maxUserID, maxItemID); sp_mat cleanedData2 = cleanedData; SpecificRandomInitialization sri(cleanedData.n_rows, 2, cleanedData.n_cols); ValidationRMSETermination vrt(cleanedData, 2000); AMF >, SpecificRandomInitialization, SVDIncompleteIncrementalLearning> amf1(vrt, sri, SVDIncompleteIncrementalLearning(0.001, 0, 0)); mat m1, m2; double regularRMSE = amf1.Apply(cleanedData, 2, m1, m2); ValidationRMSETermination vrt2(cleanedData2, 2000); AMF >, SpecificRandomInitialization, SVDIncompleteIncrementalLearning> amf2(vrt2, sri, SVDIncompleteIncrementalLearning(0.001, 0.01, 0.01)); mat m3, m4; double regularizedRMSE = amf2.Apply(cleanedData2, 2, m3, m4); BOOST_REQUIRE_LT(regularizedRMSE, regularRMSE + 0.075); } BOOST_AUTO_TEST_SUITE_END(); mlpack-2.2.5/src/mlpack/tests/termination_policy_test.cpp000066400000000000000000000036111315013601400236350ustar00rootroot00000000000000/** * @file termination_policy_test.cpp * @author Ryan Curtin * * Tests for AMF termination policies. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include #include #include #include #include #include "test_tools.hpp" BOOST_AUTO_TEST_SUITE(TerminationPolicyTest); using namespace std; using namespace arma; using namespace mlpack; using namespace mlpack::amf; /** * Simple test -- make sure termination happens after the right number of * iterations. */ BOOST_AUTO_TEST_CASE(MaxIterationTerminationTest) { MaxIterationTermination mit(500); arma::mat x; // Just an argument to pass. for (size_t i = 0; i < 499; ++i) BOOST_REQUIRE_EQUAL(mit.IsConverged(x, x), false); // Should keep returning true once maximum iterations are reached. BOOST_REQUIRE_EQUAL(mit.IsConverged(x, x), true); BOOST_REQUIRE_EQUAL(mit.Iteration(), 500); BOOST_REQUIRE_EQUAL(mit.IsConverged(x, x), true); BOOST_REQUIRE_EQUAL(mit.IsConverged(x, x), true); } /** * Make sure that AMF properly terminates. */ BOOST_AUTO_TEST_CASE(AMFMaxIterationTerminationTest) { mat w = randu(20, 12); mat h = randu(12, 20); mat v = w * h; size_t r = 12; // Custom tighter tolerance. MaxIterationTermination mit(10); // Only 10 iterations. AMF nmf(mit); nmf.Apply(v, r, w, h); BOOST_REQUIRE_EQUAL(nmf.TerminationPolicy().Iteration(), 10); } BOOST_AUTO_TEST_SUITE_END(); mlpack-2.2.5/src/mlpack/tests/test_tools.hpp000066400000000000000000000042041315013601400210710ustar00rootroot00000000000000/** * @file test_tools.hpp * @author Ryan Curtin * * This file includes some useful macros for tests. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef MLPACK_TESTS_TEST_TOOLS_HPP #define MLPACK_TESTS_TEST_TOOLS_HPP #include // This is only necessary for pre-1.36 Boost.Test. #if BOOST_VERSION < 103600 #include #include // This depends on other macros. Probably not a great idea... but it works, and // we only need it for ancient Boost versions. #define BOOST_REQUIRE_GE( L, R ) \ BOOST_REQUIRE_EQUAL( (L >= R), true ) #define BOOST_REQUIRE_NE( L, R ) \ BOOST_REQUIRE_EQUAL( (L != R), true ) #define BOOST_REQUIRE_LE( L, R ) \ BOOST_REQUIRE_EQUAL( (L <= R), true ) #define BOOST_REQUIRE_LT( L, R ) \ BOOST_REQUIRE_EQUAL( (L < R), true ) #define BOOST_REQUIRE_GT( L, R ) \ BOOST_REQUIRE_EQUAL( (L > R), true ) #endif // Require the approximation L to be within a relative error of E respect to the // actual value R. #define REQUIRE_RELATIVE_ERR( L, R, E ) \ BOOST_REQUIRE_LE( std::abs((R) - (L)), (E) * std::abs(R)) #include // Check the values of two matrices. inline void CheckMatrices(const arma::mat& a, const arma::mat& b, double tolerance = 1e-5) { BOOST_REQUIRE_EQUAL(a.n_rows, b.n_rows); BOOST_REQUIRE_EQUAL(a.n_cols, b.n_cols); for (size_t i = 0; i < a.n_elem; ++i) { if (std::abs(a[i]) < tolerance / 2) BOOST_REQUIRE_SMALL(b[i], tolerance / 2); else BOOST_REQUIRE_CLOSE(a[i], b[i], tolerance); } } // Check the values of two unsigned matrices. inline void CheckMatrices(const arma::Mat& a, const arma::Mat& b) { BOOST_REQUIRE_EQUAL(a.n_rows, b.n_rows); BOOST_REQUIRE_EQUAL(a.n_cols, b.n_cols); for (size_t i = 0; i < a.n_elem; ++i) BOOST_REQUIRE_EQUAL(a[i], b[i]); } #endif mlpack-2.2.5/src/mlpack/tests/timer_test.cpp000066400000000000000000000036471315013601400210560ustar00rootroot00000000000000/** * @file timer_test.cpp * @author Matthew Amidon, Ryan Curtin * * Test for the timer class * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #ifndef _WIN32 #include #endif // For Sleep(). #ifdef _WIN32 #include #endif #include #include #include "test_tools.hpp" using namespace mlpack; BOOST_AUTO_TEST_SUITE(TimerTest); /** * We should be able to start and then stop a timer multiple times and it should * save the value. */ BOOST_AUTO_TEST_CASE(MultiRunTimerTest) { Timer::Start("test_timer"); // On Windows (or, at least, in Windows not using VS2010) we cannot use // usleep() because it is not provided. Instead we will use Sleep() for a // number of milliseconds. #ifdef _WIN32 Sleep(10); #else usleep(10000); #endif Timer::Stop("test_timer"); BOOST_REQUIRE_GE(Timer::Get("test_timer").count(), 10000); // Restart it. Timer::Start("test_timer"); #ifdef _WIN32 Sleep(10); #else usleep(10000); #endif Timer::Stop("test_timer"); BOOST_REQUIRE_GE(Timer::Get("test_timer").count(), 20000); // Just one more time, for good measure... Timer::Start("test_timer"); #ifdef _WIN32 Sleep(20); #else usleep(20000); #endif Timer::Stop("test_timer"); BOOST_REQUIRE_GE(Timer::Get("test_timer").count(), 40000); } BOOST_AUTO_TEST_CASE(TwiceStopTimerTest) { Timer::Start("test_timer"); Timer::Stop("test_timer"); BOOST_REQUIRE_THROW(Timer::Stop("test_timer"), std::runtime_error); } BOOST_AUTO_TEST_CASE(TwiceStartTimerTest) { Timer::Start("test_timer"); BOOST_REQUIRE_THROW(Timer::Start("test_timer"), std::runtime_error); } BOOST_AUTO_TEST_SUITE_END(); mlpack-2.2.5/src/mlpack/tests/tree_test.cpp000066400000000000000000002072271315013601400206750ustar00rootroot00000000000000/** * @file tree_test.cpp * * Tests for tree-building methods. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include #include #include #include #include #include #include #include #include #include "test_tools.hpp" using namespace mlpack; using namespace mlpack::math; using namespace mlpack::tree; using namespace mlpack::metric; using namespace mlpack::bound; BOOST_AUTO_TEST_SUITE(TreeTest); /** * Ensure that a bound, by default, is empty and has no dimensionality. */ BOOST_AUTO_TEST_CASE(HRectBoundEmptyConstructor) { HRectBound b; BOOST_REQUIRE_EQUAL((int) b.Dim(), 0); BOOST_REQUIRE_EQUAL(b.MinWidth(), 0.0); } /** * Ensure that when we specify the dimensionality in the constructor, it is * correct, and the bounds are all the empty set. */ BOOST_AUTO_TEST_CASE(HRectBoundDimConstructor) { HRectBound b(2); // We'll do this with 2 and 5 dimensions. BOOST_REQUIRE_EQUAL(b.Dim(), 2); BOOST_REQUIRE_SMALL(b[0].Width(), 1e-5); BOOST_REQUIRE_SMALL(b[1].Width(), 1e-5); b = HRectBound(5); BOOST_REQUIRE_EQUAL(b.Dim(), 5); BOOST_REQUIRE_SMALL(b[0].Width(), 1e-5); BOOST_REQUIRE_SMALL(b[1].Width(), 1e-5); BOOST_REQUIRE_SMALL(b[2].Width(), 1e-5); BOOST_REQUIRE_SMALL(b[3].Width(), 1e-5); BOOST_REQUIRE_SMALL(b[4].Width(), 1e-5); BOOST_REQUIRE_EQUAL(b.MinWidth(), 0.0); } /** * Test the copy constructor. */ BOOST_AUTO_TEST_CASE(HRectBoundCopyConstructor) { HRectBound b(2); b[0] = Range(0.0, 2.0); b[1] = Range(2.0, 3.0); b.MinWidth() = 0.5; HRectBound c(b); BOOST_REQUIRE_EQUAL(c.Dim(), 2); BOOST_REQUIRE_SMALL(c[0].Lo(), 1e-5); BOOST_REQUIRE_CLOSE(c[0].Hi(), 2.0, 1e-5); BOOST_REQUIRE_CLOSE(c[1].Lo(), 2.0, 1e-5); BOOST_REQUIRE_CLOSE(c[1].Hi(), 3.0, 1e-5); BOOST_REQUIRE_CLOSE(c.MinWidth(), 0.5, 1e-5); } /** * Test the assignment operator. */ BOOST_AUTO_TEST_CASE(HRectBoundAssignmentOperator) { HRectBound b(2); b[0] = Range(0.0, 2.0); b[1] = Range(2.0, 3.0); b.MinWidth() = 0.5; HRectBound c(4); c = b; BOOST_REQUIRE_EQUAL(c.Dim(), 2); BOOST_REQUIRE_SMALL(c[0].Lo(), 1e-5); BOOST_REQUIRE_CLOSE(c[0].Hi(), 2.0, 1e-5); BOOST_REQUIRE_CLOSE(c[1].Lo(), 2.0, 1e-5); BOOST_REQUIRE_CLOSE(c[1].Hi(), 3.0, 1e-5); BOOST_REQUIRE_CLOSE(c.MinWidth(), 0.5, 1e-5); } /** * Test that clearing the dimensions resets the bound to empty. */ BOOST_AUTO_TEST_CASE(HRectBoundClear) { HRectBound b(2); // We'll do this with two dimensions only. b[0] = Range(0.0, 2.0); b[1] = Range(2.0, 4.0); b.MinWidth() = 1.0; // Now we just need to make sure that we clear the range. b.Clear(); BOOST_REQUIRE_SMALL(b[0].Width(), 1e-5); BOOST_REQUIRE_SMALL(b[1].Width(), 1e-5); BOOST_REQUIRE_SMALL(b.MinWidth(), 1e-5); } BOOST_AUTO_TEST_CASE(HRectBoundMoveConstructor) { HRectBound b(2); b[0] = Range(0.0, 2.0); b[1] = Range(2.0, 4.0); b.MinWidth() = 1.0; HRectBound b2(std::move(b)); BOOST_REQUIRE_EQUAL(b.Dim(), 0); BOOST_REQUIRE_EQUAL(b2.Dim(), 2); BOOST_REQUIRE_EQUAL(b.MinWidth(), 0.0); BOOST_REQUIRE_EQUAL(b2.MinWidth(), 1.0); BOOST_REQUIRE_SMALL(b2[0].Lo(), 1e-5); BOOST_REQUIRE_CLOSE(b2[0].Hi(), 2.0, 1e-5); BOOST_REQUIRE_CLOSE(b2[1].Lo(), 2.0, 1e-5); BOOST_REQUIRE_CLOSE(b2[1].Hi(), 4.0, 1e-5); } /** * Ensure that we get the correct center for our bound. */ BOOST_AUTO_TEST_CASE(HRectBoundCenter) { // Create a simple 3-dimensional bound. HRectBound b(3); b[0] = Range(0.0, 5.0); b[1] = Range(-2.0, -1.0); b[2] = Range(-10.0, 50.0); arma::vec center; b.Center(center); BOOST_REQUIRE_EQUAL(center.n_elem, 3); BOOST_REQUIRE_CLOSE(center[0], 2.5, 1e-5); BOOST_REQUIRE_CLOSE(center[1], -1.5, 1e-5); BOOST_REQUIRE_CLOSE(center[2], 20.0, 1e-5); } /** * Ensure the volume calculation is correct. */ BOOST_AUTO_TEST_CASE(HRectBoundVolume) { // Create a simple 3-dimensional bound. HRectBound b(3); b[0] = Range(0.0, 5.0); b[1] = Range(-2.0, -1.0); b[2] = Range(-10.0, 50.0); BOOST_REQUIRE_CLOSE(b.Volume(), 300.0, 1e-5); } /** * Ensure that we calculate the correct minimum distance between a point and a * bound. */ BOOST_AUTO_TEST_CASE(HRectBoundMinDistancePoint) { // We'll do the calculation in five dimensions, and we'll use three cases for // the point: point is outside the bound; point is on the edge of the bound; // point is inside the bound. In the latter two cases, the distance should be // zero. HRectBound b(5); b[0] = Range(0.0, 2.0); b[1] = Range(1.0, 5.0); b[2] = Range(-2.0, 2.0); b[3] = Range(-5.0, -2.0); b[4] = Range(1.0, 2.0); arma::vec point = "-2.0 0.0 10.0 3.0 3.0"; // This will be the Euclidean distance. BOOST_REQUIRE_CLOSE(b.MinDistance(point), sqrt(95.0), 1e-5); point = "2.0 5.0 2.0 -5.0 1.0"; BOOST_REQUIRE_SMALL(b.MinDistance(point), 1e-5); point = "1.0 2.0 0.0 -2.0 1.5"; BOOST_REQUIRE_SMALL(b.MinDistance(point), 1e-5); } /** * Ensure that we calculate the correct minimum distance between a bound and * another bound. */ BOOST_AUTO_TEST_CASE(HRectBoundMinDistanceBound) { // We'll do the calculation in five dimensions, and we can use six cases. // The other bound is completely outside the bound; the other bound is on the // edge of the bound; the other bound partially overlaps the bound; the other // bound fully overlaps the bound; the other bound is entirely inside the // bound; the other bound entirely envelops the bound. HRectBound b(5); b[0] = Range(0.0, 2.0); b[1] = Range(1.0, 5.0); b[2] = Range(-2.0, 2.0); b[3] = Range(-5.0, -2.0); b[4] = Range(1.0, 2.0); HRectBound c(5); // The other bound is completely outside the bound. c[0] = Range(-5.0, -2.0); c[1] = Range(6.0, 7.0); c[2] = Range(-2.0, 2.0); c[3] = Range(2.0, 5.0); c[4] = Range(3.0, 4.0); BOOST_REQUIRE_CLOSE(b.MinDistance(c), sqrt(22.0), 1e-5); BOOST_REQUIRE_CLOSE(c.MinDistance(b), sqrt(22.0), 1e-5); // The other bound is on the edge of the bound. c[0] = Range(-2.0, 0.0); c[1] = Range(0.0, 1.0); c[2] = Range(-3.0, -2.0); c[3] = Range(-10.0, -5.0); c[4] = Range(2.0, 3.0); BOOST_REQUIRE_SMALL(b.MinDistance(c), 1e-5); BOOST_REQUIRE_SMALL(c.MinDistance(b), 1e-5); // The other bound partially overlaps the bound. c[0] = Range(-2.0, 1.0); c[1] = Range(0.0, 2.0); c[2] = Range(-2.0, 2.0); c[3] = Range(-8.0, -4.0); c[4] = Range(0.0, 4.0); BOOST_REQUIRE_SMALL(b.MinDistance(c), 1e-5); BOOST_REQUIRE_SMALL(c.MinDistance(b), 1e-5); // The other bound fully overlaps the bound. BOOST_REQUIRE_SMALL(b.MinDistance(b), 1e-5); BOOST_REQUIRE_SMALL(c.MinDistance(c), 1e-5); // The other bound is entirely inside the bound / the other bound entirely // envelops the bound. c[0] = Range(-1.0, 3.0); c[1] = Range(0.0, 6.0); c[2] = Range(-3.0, 3.0); c[3] = Range(-7.0, 0.0); c[4] = Range(0.0, 5.0); BOOST_REQUIRE_SMALL(b.MinDistance(c), 1e-5); BOOST_REQUIRE_SMALL(c.MinDistance(b), 1e-5); // Now we must be sure that the minimum distance to itself is 0. BOOST_REQUIRE_SMALL(b.MinDistance(b), 1e-5); BOOST_REQUIRE_SMALL(c.MinDistance(c), 1e-5); } /** * Ensure that we calculate the correct maximum distance between a bound and a * point. This uses the same test cases as the MinDistance test. */ BOOST_AUTO_TEST_CASE(HRectBoundMaxDistancePoint) { // We'll do the calculation in five dimensions, and we'll use three cases for // the point: point is outside the bound; point is on the edge of the bound; // point is inside the bound. In the latter two cases, the distance should be // zero. HRectBound b(5); b[0] = Range(0.0, 2.0); b[1] = Range(1.0, 5.0); b[2] = Range(-2.0, 2.0); b[3] = Range(-5.0, -2.0); b[4] = Range(1.0, 2.0); arma::vec point = "-2.0 0.0 10.0 3.0 3.0"; // This will be the Euclidean distance. BOOST_REQUIRE_CLOSE(b.MaxDistance(point), sqrt(253.0), 1e-5); point = "2.0 5.0 2.0 -5.0 1.0"; BOOST_REQUIRE_CLOSE(b.MaxDistance(point), sqrt(46.0), 1e-5); point = "1.0 2.0 0.0 -2.0 1.5"; BOOST_REQUIRE_CLOSE(b.MaxDistance(point), sqrt(23.25), 1e-5); } /** * Ensure that we calculate the correct maximum distance between a bound and * another bound. This uses the same test cases as the MinDistance test. */ BOOST_AUTO_TEST_CASE(HRectBoundMaxDistanceBound) { // We'll do the calculation in five dimensions, and we can use six cases. // The other bound is completely outside the bound; the other bound is on the // edge of the bound; the other bound partially overlaps the bound; the other // bound fully overlaps the bound; the other bound is entirely inside the // bound; the other bound entirely envelops the bound. HRectBound b(5); b[0] = Range(0.0, 2.0); b[1] = Range(1.0, 5.0); b[2] = Range(-2.0, 2.0); b[3] = Range(-5.0, -2.0); b[4] = Range(1.0, 2.0); HRectBound c(5); // The other bound is completely outside the bound. c[0] = Range(-5.0, -2.0); c[1] = Range(6.0, 7.0); c[2] = Range(-2.0, 2.0); c[3] = Range(2.0, 5.0); c[4] = Range(3.0, 4.0); BOOST_REQUIRE_CLOSE(b.MaxDistance(c), sqrt(210.0), 1e-5); BOOST_REQUIRE_CLOSE(c.MaxDistance(b), sqrt(210.0), 1e-5); // The other bound is on the edge of the bound. c[0] = Range(-2.0, 0.0); c[1] = Range(0.0, 1.0); c[2] = Range(-3.0, -2.0); c[3] = Range(-10.0, -5.0); c[4] = Range(2.0, 3.0); BOOST_REQUIRE_CLOSE(b.MaxDistance(c), sqrt(134.0), 1e-5); BOOST_REQUIRE_CLOSE(c.MaxDistance(b), sqrt(134.0), 1e-5); // The other bound partially overlaps the bound. c[0] = Range(-2.0, 1.0); c[1] = Range(0.0, 2.0); c[2] = Range(-2.0, 2.0); c[3] = Range(-8.0, -4.0); c[4] = Range(0.0, 4.0); BOOST_REQUIRE_CLOSE(b.MaxDistance(c), sqrt(102.0), 1e-5); BOOST_REQUIRE_CLOSE(c.MaxDistance(b), sqrt(102.0), 1e-5); // The other bound fully overlaps the bound. BOOST_REQUIRE_CLOSE(b.MaxDistance(b), sqrt(46.0), 1e-5); BOOST_REQUIRE_CLOSE(c.MaxDistance(c), sqrt(61.0), 1e-5); // The other bound is entirely inside the bound / the other bound entirely // envelops the bound. c[0] = Range(-1.0, 3.0); c[1] = Range(0.0, 6.0); c[2] = Range(-3.0, 3.0); c[3] = Range(-7.0, 0.0); c[4] = Range(0.0, 5.0); BOOST_REQUIRE_CLOSE(b.MaxDistance(c), sqrt(100.0), 1e-5); BOOST_REQUIRE_CLOSE(c.MaxDistance(b), sqrt(100.0), 1e-5); // Identical bounds. This will be the sum of the squared widths in each // dimension. BOOST_REQUIRE_CLOSE(b.MaxDistance(b), sqrt(46.0), 1e-5); BOOST_REQUIRE_CLOSE(c.MaxDistance(c), sqrt(162.0), 1e-5); // One last additional case. If the bound encloses only one point, the // maximum distance between it and itself is 0. HRectBound d(2); d[0] = Range(2.0, 2.0); d[1] = Range(3.0, 3.0); BOOST_REQUIRE_SMALL(d.MaxDistance(d), 1e-5); } /** * Ensure that the ranges returned by RangeDistance() are equal to the minimum * and maximum distance. We will perform this test by creating random bounds * and comparing the behavior to MinDistance() and MaxDistance() -- so this test * is assuming that those passed and operate correctly. */ BOOST_AUTO_TEST_CASE(HRectBoundRangeDistanceBound) { for (int i = 0; i < 50; i++) { size_t dim = math::RandInt(20); HRectBound a(dim); HRectBound b(dim); // We will set the low randomly and the width randomly for each dimension of // each bound. arma::vec loA(dim); arma::vec widthA(dim); loA.randu(); widthA.randu(); arma::vec lo_b(dim); arma::vec width_b(dim); lo_b.randu(); width_b.randu(); for (size_t j = 0; j < dim; j++) { a[j] = Range(loA[j], loA[j] + widthA[j]); b[j] = Range(lo_b[j], lo_b[j] + width_b[j]); } // Now ensure that MinDistance and MaxDistance report the same. Range r = a.RangeDistance(b); Range s = b.RangeDistance(a); BOOST_REQUIRE_CLOSE(r.Lo(), s.Lo(), 1e-5); BOOST_REQUIRE_CLOSE(r.Hi(), s.Hi(), 1e-5); BOOST_REQUIRE_CLOSE(r.Lo(), a.MinDistance(b), 1e-5); BOOST_REQUIRE_CLOSE(r.Hi(), a.MaxDistance(b), 1e-5); BOOST_REQUIRE_CLOSE(s.Lo(), b.MinDistance(a), 1e-5); BOOST_REQUIRE_CLOSE(s.Hi(), b.MaxDistance(a), 1e-5); } } /** * Ensure that the ranges returned by RangeDistance() are equal to the minimum * and maximum distance. We will perform this test by creating random bounds * and comparing the bheavior to MinDistance() and MaxDistance() -- so this test * is assuming that those passed and operate correctly. This is for the * bound-to-point case. */ BOOST_AUTO_TEST_CASE(HRectBoundRangeDistancePoint) { for (int i = 0; i < 20; i++) { size_t dim = math::RandInt(20); HRectBound a(dim); // We will set the low randomly and the width randomly for each dimension of // each bound. arma::vec loA(dim); arma::vec widthA(dim); loA.randu(); widthA.randu(); for (size_t j = 0; j < dim; j++) a[j] = Range(loA[j], loA[j] + widthA[j]); // Now run the test on a few points. for (int j = 0; j < 10; j++) { arma::vec point(dim); point.randu(); Range r = a.RangeDistance(point); BOOST_REQUIRE_CLOSE(r.Lo(), a.MinDistance(point), 1e-5); BOOST_REQUIRE_CLOSE(r.Hi(), a.MaxDistance(point), 1e-5); } } } /** * Test that we can expand the bound to include a new point. */ BOOST_AUTO_TEST_CASE(HRectBoundOrOperatorPoint) { // Because this should be independent in each dimension, we can essentially // run five test cases at once. HRectBound b(5); b[0] = Range(1.0, 3.0); b[1] = Range(2.0, 4.0); b[2] = Range(-2.0, -1.0); b[3] = Range(0.0, 0.0); b[4] = Range(); // Empty range. b.MinWidth() = 0.0; arma::vec point = "2.0 4.0 2.0 -1.0 6.0"; b |= point; BOOST_REQUIRE_CLOSE(b[0].Lo(), 1.0, 1e-5); BOOST_REQUIRE_CLOSE(b[0].Hi(), 3.0, 1e-5); BOOST_REQUIRE_CLOSE(b[1].Lo(), 2.0, 1e-5); BOOST_REQUIRE_CLOSE(b[1].Hi(), 4.0, 1e-5); BOOST_REQUIRE_CLOSE(b[2].Lo(), -2.0, 1e-5); BOOST_REQUIRE_CLOSE(b[2].Hi(), 2.0, 1e-5); BOOST_REQUIRE_CLOSE(b[3].Lo(), -1.0, 1e-5); BOOST_REQUIRE_SMALL(b[3].Hi(), 1e-5); BOOST_REQUIRE_CLOSE(b[4].Lo(), 6.0, 1e-5); BOOST_REQUIRE_CLOSE(b[4].Hi(), 6.0, 1e-5); BOOST_REQUIRE_SMALL(b.MinWidth(), 1e-5); } /** * Test that we can expand the bound to include another bound. */ BOOST_AUTO_TEST_CASE(HRectBoundOrOperatorBound) { // Because this should be independent in each dimension, we can run many tests // at once. HRectBound b(8); b[0] = Range(1.0, 3.0); b[1] = Range(2.0, 4.0); b[2] = Range(-2.0, -1.0); b[3] = Range(4.0, 5.0); b[4] = Range(2.0, 4.0); b[5] = Range(0.0, 0.0); b[6] = Range(); b[7] = Range(1.0, 3.0); HRectBound c(8); c[0] = Range(-3.0, -1.0); // Entirely less than the other bound. c[1] = Range(0.0, 2.0); // Touching edges. c[2] = Range(-3.0, -1.5); // Partially overlapping. c[3] = Range(4.0, 5.0); // Identical. c[4] = Range(1.0, 5.0); // Entirely enclosing. c[5] = Range(2.0, 2.0); // A single point. c[6] = Range(1.0, 3.0); c[7] = Range(); // Empty set. HRectBound d = c; b |= c; d |= b; BOOST_REQUIRE_CLOSE(b[0].Lo(), -3.0, 1e-5); BOOST_REQUIRE_CLOSE(b[0].Hi(), 3.0, 1e-5); BOOST_REQUIRE_CLOSE(d[0].Lo(), -3.0, 1e-5); BOOST_REQUIRE_CLOSE(d[0].Hi(), 3.0, 1e-5); BOOST_REQUIRE_CLOSE(b[1].Lo(), 0.0, 1e-5); BOOST_REQUIRE_CLOSE(b[1].Hi(), 4.0, 1e-5); BOOST_REQUIRE_CLOSE(d[1].Lo(), 0.0, 1e-5); BOOST_REQUIRE_CLOSE(d[1].Hi(), 4.0, 1e-5); BOOST_REQUIRE_CLOSE(b[2].Lo(), -3.0, 1e-5); BOOST_REQUIRE_CLOSE(b[2].Hi(), -1.0, 1e-5); BOOST_REQUIRE_CLOSE(d[2].Lo(), -3.0, 1e-5); BOOST_REQUIRE_CLOSE(d[2].Hi(), -1.0, 1e-5); BOOST_REQUIRE_CLOSE(b[3].Lo(), 4.0, 1e-5); BOOST_REQUIRE_CLOSE(b[3].Hi(), 5.0, 1e-5); BOOST_REQUIRE_CLOSE(d[3].Lo(), 4.0, 1e-5); BOOST_REQUIRE_CLOSE(d[3].Hi(), 5.0, 1e-5); BOOST_REQUIRE_CLOSE(b[4].Lo(), 1.0, 1e-5); BOOST_REQUIRE_CLOSE(b[4].Hi(), 5.0, 1e-5); BOOST_REQUIRE_CLOSE(d[4].Lo(), 1.0, 1e-5); BOOST_REQUIRE_CLOSE(d[4].Hi(), 5.0, 1e-5); BOOST_REQUIRE_SMALL(b[5].Lo(), 1e-5); BOOST_REQUIRE_CLOSE(b[5].Hi(), 2.0, 1e-5); BOOST_REQUIRE_SMALL(d[5].Lo(), 1e-5); BOOST_REQUIRE_CLOSE(d[5].Hi(), 2.0, 1e-5); BOOST_REQUIRE_CLOSE(b[6].Lo(), 1.0, 1e-5); BOOST_REQUIRE_CLOSE(b[6].Hi(), 3.0, 1e-5); BOOST_REQUIRE_CLOSE(d[6].Lo(), 1.0, 1e-5); BOOST_REQUIRE_CLOSE(d[6].Hi(), 3.0, 1e-5); BOOST_REQUIRE_CLOSE(b[7].Lo(), 1.0, 1e-5); BOOST_REQUIRE_CLOSE(b[7].Hi(), 3.0, 1e-5); BOOST_REQUIRE_CLOSE(d[7].Lo(), 1.0, 1e-5); BOOST_REQUIRE_CLOSE(d[7].Hi(), 3.0, 1e-5); BOOST_REQUIRE_CLOSE(b.MinWidth(), 1.0, 1e-5); BOOST_REQUIRE_CLOSE(d.MinWidth(), 1.0, 1e-5); } /** * Test that the Contains() function correctly figures out whether or not a * point is in a bound. */ BOOST_AUTO_TEST_CASE(HRectBoundContains) { // We can test a couple different points: completely outside the bound, // adjacent in one dimension to the bound, adjacent in all dimensions to the // bound, and inside the bound. HRectBound b(3); b[0] = Range(0.0, 2.0); b[1] = Range(0.0, 2.0); b[2] = Range(0.0, 2.0); // Completely outside the range. arma::vec point = "-1.0 4.0 4.0"; BOOST_REQUIRE(!b.Contains(point)); // Completely outside, but one dimension is in the range. point = "-1.0 4.0 1.0"; BOOST_REQUIRE(!b.Contains(point)); // Outside, but one dimension is on the edge. point = "-1.0 0.0 3.0"; BOOST_REQUIRE(!b.Contains(point)); // Two dimensions are on the edge, but one is outside. point = "0.0 0.0 3.0"; BOOST_REQUIRE(!b.Contains(point)); // Completely on the edge (should be contained). point = "0.0 0.0 0.0"; BOOST_REQUIRE(b.Contains(point)); // Inside the range. point = "0.3 1.0 0.4"; BOOST_REQUIRE(b.Contains(point)); } BOOST_AUTO_TEST_CASE(TestBallBound) { BallBound<> b1; BallBound<> b2; // Create two balls with a center distance of 1 from each other. // Give the first one a radius of 0.3 and the second a radius of 0.4. b1.Center().set_size(3); b1.Center()[0] = 1; b1.Center()[1] = 2; b1.Center()[2] = 3; b1.Radius() = 0.3; b2.Center().set_size(3); b2.Center()[0] = 1; b2.Center()[1] = 2; b2.Center()[2] = 4; b2.Radius() = 0.4; BOOST_REQUIRE_CLOSE(b1.MinDistance(b2), 1-0.3-0.4, 1e-5); BOOST_REQUIRE_CLOSE(b1.RangeDistance(b2).Hi(), 1+0.3+0.4, 1e-5); BOOST_REQUIRE_CLOSE(b1.RangeDistance(b2).Lo(), 1-0.3-0.4, 1e-5); BOOST_REQUIRE_CLOSE(b1.RangeDistance(b2).Hi(), 1+0.3+0.4, 1e-5); BOOST_REQUIRE_CLOSE(b1.RangeDistance(b2).Lo(), 1-0.3-0.4, 1e-5); BOOST_REQUIRE_CLOSE(b2.MinDistance(b1), 1-0.3-0.4, 1e-5); BOOST_REQUIRE_CLOSE(b2.MaxDistance(b1), 1+0.3+0.4, 1e-5); BOOST_REQUIRE_CLOSE(b2.RangeDistance(b1).Hi(), 1+0.3+0.4, 1e-5); BOOST_REQUIRE_CLOSE(b2.RangeDistance(b1).Lo(), 1-0.3-0.4, 1e-5); BOOST_REQUIRE(b1.Contains(b1.Center())); BOOST_REQUIRE(!b1.Contains(b2.Center())); BOOST_REQUIRE(!b2.Contains(b1.Center())); BOOST_REQUIRE(b2.Contains(b2.Center())); arma::vec b2point(3); // A point that's within the radius but not the center. b2point[0] = 1.1; b2point[1] = 2.1; b2point[2] = 4.1; BOOST_REQUIRE(b2.Contains(b2point)); BOOST_REQUIRE_SMALL(b1.MinDistance(b1.Center()), 1e-5); BOOST_REQUIRE_CLOSE(b1.MinDistance(b2.Center()), 1 - 0.3, 1e-5); BOOST_REQUIRE_CLOSE(b2.MinDistance(b1.Center()), 1 - 0.4, 1e-5); BOOST_REQUIRE_CLOSE(b2.MaxDistance(b1.Center()), 1 + 0.4, 1e-5); BOOST_REQUIRE_CLOSE(b1.MaxDistance(b2.Center()), 1 + 0.3, 1e-5); } BOOST_AUTO_TEST_CASE(BallBoundMoveConstructor) { BallBound<> b1(2.0, arma::vec("2 1 1")); BallBound<> b2(std::move(b1)); BOOST_REQUIRE_EQUAL(b2.Dim(), 3); BOOST_REQUIRE_EQUAL(b1.Dim(), 0); BOOST_REQUIRE_CLOSE(b2.Center()[0], 2.0, 1e-5); BOOST_REQUIRE_CLOSE(b2.Center()[1], 1.0, 1e-5); BOOST_REQUIRE_CLOSE(b2.Center()[2], 1.0, 1e-5); BOOST_REQUIRE_CLOSE(b2.MinWidth(), 4.0, 1e-5); BOOST_REQUIRE_SMALL(b1.MinWidth(), 1e-5); } /** * Ensure that we calculate the correct minimum distance between a point and a * bound. */ BOOST_AUTO_TEST_CASE(HRectBoundRootMinDistancePoint) { // We'll do the calculation in five dimensions, and we'll use three cases for // the point: point is outside the bound; point is on the edge of the bound; // point is inside the bound. In the latter two cases, the distance should be // zero. HRectBound b(5); b[0] = Range(0.0, 2.0); b[1] = Range(1.0, 5.0); b[2] = Range(-2.0, 2.0); b[3] = Range(-5.0, -2.0); b[4] = Range(1.0, 2.0); arma::vec point = "-2.0 0.0 10.0 3.0 3.0"; // This will be the Euclidean distance. BOOST_REQUIRE_CLOSE(b.MinDistance(point), sqrt(95.0), 1e-5); point = "2.0 5.0 2.0 -5.0 1.0"; BOOST_REQUIRE_SMALL(b.MinDistance(point), 1e-5); point = "1.0 2.0 0.0 -2.0 1.5"; BOOST_REQUIRE_SMALL(b.MinDistance(point), 1e-5); } /** * Ensure that we calculate the correct minimum distance between a bound and * another bound. */ BOOST_AUTO_TEST_CASE(HRectBoundRootMinDistanceBound) { // We'll do the calculation in five dimensions, and we can use six cases. // The other bound is completely outside the bound; the other bound is on the // edge of the bound; the other bound partially overlaps the bound; the other // bound fully overlaps the bound; the other bound is entirely inside the // bound; the other bound entirely envelops the bound. HRectBound b(5); b[0] = Range(0.0, 2.0); b[1] = Range(1.0, 5.0); b[2] = Range(-2.0, 2.0); b[3] = Range(-5.0, -2.0); b[4] = Range(1.0, 2.0); HRectBound c(5); // The other bound is completely outside the bound. c[0] = Range(-5.0, -2.0); c[1] = Range(6.0, 7.0); c[2] = Range(-2.0, 2.0); c[3] = Range(2.0, 5.0); c[4] = Range(3.0, 4.0); BOOST_REQUIRE_CLOSE(b.MinDistance(c), sqrt(22.0), 1e-5); BOOST_REQUIRE_CLOSE(c.MinDistance(b), sqrt(22.0), 1e-5); // The other bound is on the edge of the bound. c[0] = Range(-2.0, 0.0); c[1] = Range(0.0, 1.0); c[2] = Range(-3.0, -2.0); c[3] = Range(-10.0, -5.0); c[4] = Range(2.0, 3.0); BOOST_REQUIRE_SMALL(b.MinDistance(c), 1e-5); BOOST_REQUIRE_SMALL(c.MinDistance(b), 1e-5); // The other bound partially overlaps the bound. c[0] = Range(-2.0, 1.0); c[1] = Range(0.0, 2.0); c[2] = Range(-2.0, 2.0); c[3] = Range(-8.0, -4.0); c[4] = Range(0.0, 4.0); BOOST_REQUIRE_SMALL(b.MinDistance(c), 1e-5); BOOST_REQUIRE_SMALL(c.MinDistance(b), 1e-5); // The other bound fully overlaps the bound. BOOST_REQUIRE_SMALL(b.MinDistance(b), 1e-5); BOOST_REQUIRE_SMALL(c.MinDistance(c), 1e-5); // The other bound is entirely inside the bound / the other bound entirely // envelops the bound. c[0] = Range(-1.0, 3.0); c[1] = Range(0.0, 6.0); c[2] = Range(-3.0, 3.0); c[3] = Range(-7.0, 0.0); c[4] = Range(0.0, 5.0); BOOST_REQUIRE_SMALL(b.MinDistance(c), 1e-5); BOOST_REQUIRE_SMALL(c.MinDistance(b), 1e-5); // Now we must be sure that the minimum distance to itself is 0. BOOST_REQUIRE_SMALL(b.MinDistance(b), 1e-5); BOOST_REQUIRE_SMALL(c.MinDistance(c), 1e-5); } /** * Ensure that we calculate the correct maximum distance between a bound and a * point. This uses the same test cases as the MinDistance test. */ BOOST_AUTO_TEST_CASE(HRectBoundRootMaxDistancePoint) { // We'll do the calculation in five dimensions, and we'll use three cases for // the point: point is outside the bound; point is on the edge of the bound; // point is inside the bound. In the latter two cases, the distance should be // zero. HRectBound b(5); b[0] = Range(0.0, 2.0); b[1] = Range(1.0, 5.0); b[2] = Range(-2.0, 2.0); b[3] = Range(-5.0, -2.0); b[4] = Range(1.0, 2.0); arma::vec point = "-2.0 0.0 10.0 3.0 3.0"; // This will be the Euclidean distance. BOOST_REQUIRE_CLOSE(b.MaxDistance(point), sqrt(253.0), 1e-5); point = "2.0 5.0 2.0 -5.0 1.0"; BOOST_REQUIRE_CLOSE(b.MaxDistance(point), sqrt(46.0), 1e-5); point = "1.0 2.0 0.0 -2.0 1.5"; BOOST_REQUIRE_CLOSE(b.MaxDistance(point), sqrt(23.25), 1e-5); } /** * Ensure that we calculate the correct maximum distance between a bound and * another bound. This uses the same test cases as the MinDistance test. */ BOOST_AUTO_TEST_CASE(HRectBoundRootMaxDistanceBound) { // We'll do the calculation in five dimensions, and we can use six cases. // The other bound is completely outside the bound; the other bound is on the // edge of the bound; the other bound partially overlaps the bound; the other // bound fully overlaps the bound; the other bound is entirely inside the // bound; the other bound entirely envelops the bound. HRectBound b(5); b[0] = Range(0.0, 2.0); b[1] = Range(1.0, 5.0); b[2] = Range(-2.0, 2.0); b[3] = Range(-5.0, -2.0); b[4] = Range(1.0, 2.0); HRectBound c(5); // The other bound is completely outside the bound. c[0] = Range(-5.0, -2.0); c[1] = Range(6.0, 7.0); c[2] = Range(-2.0, 2.0); c[3] = Range(2.0, 5.0); c[4] = Range(3.0, 4.0); BOOST_REQUIRE_CLOSE(b.MaxDistance(c), sqrt(210.0), 1e-5); BOOST_REQUIRE_CLOSE(c.MaxDistance(b), sqrt(210.0), 1e-5); // The other bound is on the edge of the bound. c[0] = Range(-2.0, 0.0); c[1] = Range(0.0, 1.0); c[2] = Range(-3.0, -2.0); c[3] = Range(-10.0, -5.0); c[4] = Range(2.0, 3.0); BOOST_REQUIRE_CLOSE(b.MaxDistance(c), sqrt(134.0), 1e-5); BOOST_REQUIRE_CLOSE(c.MaxDistance(b), sqrt(134.0), 1e-5); // The other bound partially overlaps the bound. c[0] = Range(-2.0, 1.0); c[1] = Range(0.0, 2.0); c[2] = Range(-2.0, 2.0); c[3] = Range(-8.0, -4.0); c[4] = Range(0.0, 4.0); BOOST_REQUIRE_CLOSE(b.MaxDistance(c), sqrt(102.0), 1e-5); BOOST_REQUIRE_CLOSE(c.MaxDistance(b), sqrt(102.0), 1e-5); // The other bound fully overlaps the bound. BOOST_REQUIRE_CLOSE(b.MaxDistance(b), sqrt(46.0), 1e-5); BOOST_REQUIRE_CLOSE(c.MaxDistance(c), sqrt(61.0), 1e-5); // The other bound is entirely inside the bound / the other bound entirely // envelops the bound. c[0] = Range(-1.0, 3.0); c[1] = Range(0.0, 6.0); c[2] = Range(-3.0, 3.0); c[3] = Range(-7.0, 0.0); c[4] = Range(0.0, 5.0); BOOST_REQUIRE_CLOSE(b.MaxDistance(c), sqrt(100.0), 1e-5); BOOST_REQUIRE_CLOSE(c.MaxDistance(b), sqrt(100.0), 1e-5); // Identical bounds. This will be the sum of the squared widths in each // dimension. BOOST_REQUIRE_CLOSE(b.MaxDistance(b), sqrt(46.0), 1e-5); BOOST_REQUIRE_CLOSE(c.MaxDistance(c), sqrt(162.0), 1e-5); // One last additional case. If the bound encloses only one point, the // maximum distance between it and itself is 0. HRectBound d(2); d[0] = Range(2.0, 2.0); d[1] = Range(3.0, 3.0); BOOST_REQUIRE_SMALL(d.MaxDistance(d), 1e-5); } /** * Ensure that the ranges returned by RangeDistance() are equal to the minimum * and maximum distance. We will perform this test by creating random bounds * and comparing the behavior to MinDistance() and MaxDistance() -- so this test * is assuming that those passed and operate correctly. */ BOOST_AUTO_TEST_CASE(HRectBoundRootRangeDistanceBound) { for (int i = 0; i < 50; i++) { size_t dim = math::RandInt(20); HRectBound a(dim); HRectBound b(dim); // We will set the low randomly and the width randomly for each dimension of // each bound. arma::vec loA(dim); arma::vec widthA(dim); loA.randu(); widthA.randu(); arma::vec lo_b(dim); arma::vec width_b(dim); lo_b.randu(); width_b.randu(); for (size_t j = 0; j < dim; j++) { a[j] = Range(loA[j], loA[j] + widthA[j]); b[j] = Range(lo_b[j], lo_b[j] + width_b[j]); } // Now ensure that MinDistance and MaxDistance report the same. Range r = a.RangeDistance(b); Range s = b.RangeDistance(a); BOOST_REQUIRE_CLOSE(r.Lo(), s.Lo(), 1e-5); BOOST_REQUIRE_CLOSE(r.Hi(), s.Hi(), 1e-5); BOOST_REQUIRE_CLOSE(r.Lo(), a.MinDistance(b), 1e-5); BOOST_REQUIRE_CLOSE(r.Hi(), a.MaxDistance(b), 1e-5); BOOST_REQUIRE_CLOSE(s.Lo(), b.MinDistance(a), 1e-5); BOOST_REQUIRE_CLOSE(s.Hi(), b.MaxDistance(a), 1e-5); } } /** * Ensure that the ranges returned by RangeDistance() are equal to the minimum * and maximum distance. We will perform this test by creating random bounds * and comparing the bheavior to MinDistance() and MaxDistance() -- so this test * is assuming that those passed and operate correctly. This is for the * bound-to-point case. */ BOOST_AUTO_TEST_CASE(HRectBoundRootRangeDistancePoint) { for (int i = 0; i < 20; i++) { size_t dim = math::RandInt(20); HRectBound a(dim); // We will set the low randomly and the width randomly for each dimension of // each bound. arma::vec loA(dim); arma::vec widthA(dim); loA.randu(); widthA.randu(); for (size_t j = 0; j < dim; j++) a[j] = Range(loA[j], loA[j] + widthA[j]); // Now run the test on a few points. for (int j = 0; j < 10; j++) { arma::vec point(dim); point.randu(); Range r = a.RangeDistance(point); BOOST_REQUIRE_CLOSE(r.Lo(), a.MinDistance(point), 1e-5); BOOST_REQUIRE_CLOSE(r.Hi(), a.MaxDistance(point), 1e-5); } } } /** * Ensure that HRectBound::Diameter() works properly. */ BOOST_AUTO_TEST_CASE(HRectBoundDiameter) { HRectBound> b(4); b[0] = math::Range(0.0, 1.0); b[1] = math::Range(-1.0, 0.0); b[2] = math::Range(2.0, 3.0); b[3] = math::Range(7.0, 7.0); BOOST_REQUIRE_CLOSE(b.Diameter(), std::pow(3.0, 1.0 / 3.0), 1e-5); HRectBound> c(4); c[0] = math::Range(0.0, 1.0); c[1] = math::Range(-1.0, 0.0); c[2] = math::Range(2.0, 3.0); c[3] = math::Range(0.0, 0.0); BOOST_REQUIRE_CLOSE(c.Diameter(), 3.0, 1e-5); HRectBound> d(2); d[0] = math::Range(2.2, 2.2); d[1] = math::Range(1.0, 1.0); BOOST_REQUIRE_SMALL(d.Diameter(), 1e-5); } /** * It seems as though Bill has stumbled across a bug where * BinarySpaceTree<>::count() returns something different than * BinarySpaceTree<>::count_. So, let's build a simple tree and make sure they * are the same. */ BOOST_AUTO_TEST_CASE(TreeCountMismatch) { arma::mat dataset = "2.0 5.0 9.0 4.0 8.0 7.0;" "3.0 4.0 6.0 7.0 1.0 2.0 "; // Leaf size of 1. KDTree rootNode(dataset, 1); BOOST_REQUIRE(rootNode.Count() == 6); BOOST_REQUIRE(rootNode.Left()->Count() == 3); BOOST_REQUIRE(rootNode.Left()->Left()->Count() == 2); BOOST_REQUIRE(rootNode.Left()->Left()->Left()->Count() == 1); BOOST_REQUIRE(rootNode.Left()->Left()->Right()->Count() == 1); BOOST_REQUIRE(rootNode.Left()->Right()->Count() == 1); BOOST_REQUIRE(rootNode.Right()->Count() == 3); BOOST_REQUIRE(rootNode.Right()->Left()->Count() == 2); BOOST_REQUIRE(rootNode.Right()->Left()->Left()->Count() == 1); BOOST_REQUIRE(rootNode.Right()->Left()->Right()->Count() == 1); BOOST_REQUIRE(rootNode.Right()->Right()->Count() == 1); } BOOST_AUTO_TEST_CASE(CheckParents) { arma::mat dataset = "2.0 5.0 9.0 4.0 8.0 7.0;" "3.0 4.0 6.0 7.0 1.0 2.0 "; // Leaf size of 1. KDTree rootNode(dataset, 1); BOOST_REQUIRE_EQUAL(rootNode.Parent(), (KDTree*) NULL); BOOST_REQUIRE_EQUAL(&rootNode, rootNode.Left()->Parent()); BOOST_REQUIRE_EQUAL(&rootNode, rootNode.Right()->Parent()); BOOST_REQUIRE_EQUAL(rootNode.Left(), rootNode.Left()->Left()->Parent()); BOOST_REQUIRE_EQUAL(rootNode.Left(), rootNode.Left()->Right()->Parent()); BOOST_REQUIRE_EQUAL(rootNode.Left()->Left(), rootNode.Left()->Left()->Left()->Parent()); BOOST_REQUIRE_EQUAL(rootNode.Left()->Left(), rootNode.Left()->Left()->Right()->Parent()); BOOST_REQUIRE_EQUAL(rootNode.Right(), rootNode.Right()->Left()->Parent()); BOOST_REQUIRE_EQUAL(rootNode.Right(), rootNode.Right()->Right()->Parent()); BOOST_REQUIRE_EQUAL(rootNode.Right()->Left(), rootNode.Right()->Left()->Left()->Parent()); BOOST_REQUIRE_EQUAL(rootNode.Right()->Left(), rootNode.Right()->Left()->Right()->Parent()); } BOOST_AUTO_TEST_CASE(CheckDataset) { arma::mat dataset = "2.0 5.0 9.0 4.0 8.0 7.0;" "3.0 4.0 6.0 7.0 1.0 2.0 "; // Leaf size of 1. KDTree rootNode(dataset, 1); arma::mat* rootDataset = &rootNode.Dataset(); BOOST_REQUIRE_EQUAL(&rootNode.Left()->Dataset(), rootDataset); BOOST_REQUIRE_EQUAL(&rootNode.Right()->Dataset(), rootDataset); BOOST_REQUIRE_EQUAL(&rootNode.Left()->Left()->Dataset(), rootDataset); BOOST_REQUIRE_EQUAL(&rootNode.Left()->Right()->Dataset(), rootDataset); BOOST_REQUIRE_EQUAL(&rootNode.Right()->Left()->Dataset(), rootDataset); BOOST_REQUIRE_EQUAL(&rootNode.Right()->Right()->Dataset(), rootDataset); BOOST_REQUIRE_EQUAL(&rootNode.Left()->Left()->Left()->Dataset(), rootDataset); BOOST_REQUIRE_EQUAL(&rootNode.Left()->Left()->Right()->Dataset(), rootDataset); BOOST_REQUIRE_EQUAL(&rootNode.Right()->Left()->Left()->Dataset(), rootDataset); BOOST_REQUIRE_EQUAL(&rootNode.Right()->Left()->Right()->Dataset(), rootDataset); } // Ensure FurthestDescendantDistance() works. BOOST_AUTO_TEST_CASE(FurthestDescendantDistanceTest) { arma::mat dataset = "1; 3"; // One point. KDTree rootNode(dataset, 1); BOOST_REQUIRE_SMALL(rootNode.FurthestDescendantDistance(), 1e-5); dataset = "1 -1; 1 -1"; // Square of size [2, 2]. // Both points are contained in the one node. KDTree twoPoint(dataset); BOOST_REQUIRE_CLOSE(twoPoint.FurthestDescendantDistance(), sqrt(2.0), 1e-5); } // Ensure that FurthestPointDistance() works. BOOST_AUTO_TEST_CASE(FurthestPointDistanceTest) { arma::mat dataset; dataset.randu(5, 100); typedef KDTree TreeType; TreeType tree(dataset); // Now, check each node. std::queue nodeQueue; nodeQueue.push(&tree); while (!nodeQueue.empty()) { TreeType* node = nodeQueue.front(); nodeQueue.pop(); if (node->NumChildren() != 0) BOOST_REQUIRE_EQUAL(node->FurthestPointDistance(), 0.0); else { // Get center. arma::vec center; node->Center(center); double maxDist = 0.0; for (size_t i = 0; i < node->NumPoints(); ++i) { const double dist = metric::EuclideanDistance::Evaluate(center, dataset.col(node->Point(i))); if (dist > maxDist) maxDist = dist; } // We don't require an exact value because FurthestPointDistance() can // just bound the value instead of returning the exact value. BOOST_REQUIRE_LE(maxDist, node->FurthestPointDistance()); if (node->Left()) nodeQueue.push(node->Left()); if (node->Right()) nodeQueue.push(node->Right()); } } } BOOST_AUTO_TEST_CASE(ParentDistanceTest) { arma::mat dataset; dataset.randu(5, 500); typedef KDTree TreeType; TreeType tree(dataset); // The root's parent distance should be 0 (although maybe it doesn't actually // matter; I just want to be sure it's not an uninitialized value, which this // test *sort* of checks). BOOST_REQUIRE_EQUAL(tree.ParentDistance(), 0.0); // Do a depth-first traversal and make sure the parent distance is the same as // we calculate. std::stack nodeStack; nodeStack.push(&tree); while (!nodeStack.empty()) { TreeType* node = nodeStack.top(); nodeStack.pop(); // If it's a leaf, nothing to check. if (node->NumChildren() == 0) continue; arma::vec center, leftCenter, rightCenter; node->Center(center); node->Left()->Center(leftCenter); node->Right()->Center(rightCenter); const double leftDistance = LMetric<2>::Evaluate(center, leftCenter); const double rightDistance = LMetric<2>::Evaluate(center, rightCenter); BOOST_REQUIRE_CLOSE(leftDistance, node->Left()->ParentDistance(), 1e-5); BOOST_REQUIRE_CLOSE(rightDistance, node->Right()->ParentDistance(), 1e-5); nodeStack.push(node->Left()); nodeStack.push(node->Right()); } } BOOST_AUTO_TEST_CASE(ParentDistanceTestWithMapping) { arma::mat dataset; dataset.randu(5, 500); std::vector oldFromNew; typedef KDTree TreeType; TreeType tree(dataset, oldFromNew); // The root's parent distance should be 0 (although maybe it doesn't actually // matter; I just want to be sure it's not an uninitialized value, which this // test *sort* of checks). BOOST_REQUIRE_EQUAL(tree.ParentDistance(), 0.0); // Do a depth-first traversal and make sure the parent distance is the same as // we calculate. std::stack nodeStack; nodeStack.push(&tree); while (!nodeStack.empty()) { TreeType* node = nodeStack.top(); nodeStack.pop(); // If it's a leaf, nothing to check. if (node->NumChildren() == 0) continue; arma::vec center, leftCenter, rightCenter; node->Center(center); node->Left()->Center(leftCenter); node->Right()->Center(rightCenter); const double leftDistance = LMetric<2>::Evaluate(center, leftCenter); const double rightDistance = LMetric<2>::Evaluate(center, rightCenter); BOOST_REQUIRE_CLOSE(leftDistance, node->Left()->ParentDistance(), 1e-5); BOOST_REQUIRE_CLOSE(rightDistance, node->Right()->ParentDistance(), 1e-5); nodeStack.push(node->Left()); nodeStack.push(node->Right()); } } // Forward declaration of methods we need for the next test. template bool CheckPointBounds(TreeType& node); template void GenerateVectorOfTree(TreeType* node, size_t depth, std::vector& v); /** * Exhaustive kd-tree test based on #125. * * - Generate a random dataset of a random size. * - Build a tree on that dataset. * - Ensure all the permutation indices map back to the correct points. * - Verify that each point is contained inside all of the bounds of its parent * nodes. * - Verify that each bound at a particular level of the tree does not overlap * with any other bounds at that level. * * Then, we do that whole process a handful of times. */ BOOST_AUTO_TEST_CASE(KdTreeTest) { typedef KDTree TreeType; size_t maxRuns = 10; // Ten total tests. size_t pointIncrements = 1000; // Range is from 2000 points to 11000. // We use the default leaf size of 20. for (size_t run = 0; run < maxRuns; run++) { size_t dimensions = run + 2; size_t maxPoints = (run + 1) * pointIncrements; size_t size = maxPoints; arma::mat dataset = arma::mat(dimensions, size); // Mappings for post-sort verification of data. std::vector newToOld; std::vector oldToNew; // Generate data. dataset.randu(); // Build the tree itself. TreeType root(dataset, newToOld, oldToNew); const arma::mat& treeset = root.Dataset(); // Ensure the size of the tree is correct. BOOST_REQUIRE_EQUAL(root.Count(), size); // Check the forward and backward mappings for correctness. for (size_t i = 0; i < size; i++) { for (size_t j = 0; j < dimensions; j++) { BOOST_REQUIRE_EQUAL(treeset(j, i), dataset(j, newToOld[i])); BOOST_REQUIRE_EQUAL(treeset(j, oldToNew[i]), dataset(j, i)); } } // Now check that each point is contained inside of all bounds above it. CheckPointBounds(root); // Now check that no peers overlap. std::vector v; GenerateVectorOfTree(&root, 1, v); // Start with the first pair. size_t depth = 2; // Compare each peer against every other peer. while (depth < v.size()) { for (size_t i = depth; i < 2 * depth && i < v.size(); i++) for (size_t j = i + 1; j < 2 * depth && j < v.size(); j++) if (v[i] != NULL && v[j] != NULL) BOOST_REQUIRE(!v[i]->Bound().Contains(v[j]->Bound())); depth *= 2; } } arma::mat dataset(25, 1000); for (size_t col = 0; col < dataset.n_cols; ++col) for (size_t row = 0; row < dataset.n_rows; ++row) dataset(row, col) = row + col; TreeType root(dataset); } BOOST_AUTO_TEST_CASE(MaxRPTreeTest) { typedef MaxRPTree TreeType; size_t maxRuns = 10; // Ten total tests. size_t pointIncrements = 1000; // Range is from 2000 points to 11000. // We use the default leaf size of 20. for (size_t run = 0; run < maxRuns; run++) { size_t dimensions = run + 2; size_t maxPoints = (run + 1) * pointIncrements; size_t size = maxPoints; arma::mat dataset = arma::mat(dimensions, size); // Mappings for post-sort verification of data. std::vector newToOld; std::vector oldToNew; // Generate data. dataset.randu(); // Build the tree itself. TreeType root(dataset, newToOld, oldToNew); const arma::mat& treeset = root.Dataset(); // Ensure the size of the tree is correct. BOOST_REQUIRE_EQUAL(root.Count(), size); // Check the forward and backward mappings for correctness. for (size_t i = 0; i < size; i++) { for (size_t j = 0; j < dimensions; j++) { BOOST_REQUIRE_EQUAL(treeset(j, i), dataset(j, newToOld[i])); BOOST_REQUIRE_EQUAL(treeset(j, oldToNew[i]), dataset(j, i)); } } } } template bool CheckHyperplaneSplit(const TreeType& tree) { typedef typename TreeType::ElemType ElemType; const typename TreeType::Mat& dataset = tree.Dataset(); arma::Mat mat(dataset.n_rows + 1, tree.Left()->NumDescendants() + tree.Right()->NumDescendants()); // We will try to find a hyperplane that splits the node. // The hyperplane may be represented as // a_1 * x_1 + ... + a_n * x_n + a_{n + 1} = 0. // We have to solve the system of inequalities (mat^t) * x <= 0, // where x[0], ... , x[dataset.n_rows-1] are the components of the normal // to the hyperplane and x[dataset.n_rows] is the position of the hyperplane // i.e. x = (a_1, ... , a_{n + 1}). // Each column of the matrix consists of a point and 1. // In such a way, the inner product of a column and x is equal to the value // of the hyperplane expression. // The hyperplane splits the node if the expression takes on opposite // values on node's children. for (size_t i = 0; i < tree.Left()->NumDescendants(); i++) { for (size_t k = 0; k < dataset.n_rows; k++) mat(k, i) = - dataset(k, tree.Left()->Descendant(i)); mat(dataset.n_rows, i) = -1; } for (size_t i = 0; i < tree.Right()->NumDescendants(); i++) { for (size_t k = 0; k < dataset.n_rows; k++) mat(k, i + tree.Left()->NumDescendants()) = dataset(k, tree.Right()->Descendant(i)); mat(dataset.n_rows, i + tree.Left()->NumDescendants()) = 1; } arma::Col x(dataset.n_rows + 1); x.zeros(); // Define an initial value. x[0] = 1.0; x[1] = -arma::mean( dataset.cols(tree.Begin(), tree.Begin() + tree.Count() - 1).row(0)); const size_t numIters = 1000000; const ElemType delta = 1e-4; // We will solve the system using a simple gradient method. bool success = false; for (size_t it = 0; it < numIters; it++) { success = true; for (size_t k = 0; k < tree.Count(); k++) { ElemType result = arma::dot(mat.col(k), x); if (result > 0) { x -= mat.col(k) * delta; success = false; } } // The norm of the direction shouldn't be equal to zero. if (arma::norm(x.rows(0, dataset.n_rows-1)) < 1e-8) { x[math::RandInt(0, dataset.n_rows)] = 1.0; success = false; } if (success) break; } return success; } template void CheckMaxRPTreeSplit(const TreeType& tree) { if (tree.IsLeaf()) return; BOOST_REQUIRE_EQUAL(CheckHyperplaneSplit(tree), true); CheckMaxRPTreeSplit(*tree.Left()); CheckMaxRPTreeSplit(*tree.Right()); } BOOST_AUTO_TEST_CASE(MaxRPTreeSplitTest) { typedef MaxRPTree TreeType; arma::mat dataset; dataset.randu(8, 1000); TreeType root(dataset); CheckMaxRPTreeSplit(root); } BOOST_AUTO_TEST_CASE(RPTreeTest) { typedef RPTree TreeType; size_t maxRuns = 10; // Ten total tests. size_t pointIncrements = 1000; // Range is from 2000 points to 11000. // We use the default leaf size of 20. for (size_t run = 0; run < maxRuns; run++) { size_t dimensions = run + 2; size_t maxPoints = (run + 1) * pointIncrements; size_t size = maxPoints; arma::mat dataset = arma::mat(dimensions, size); // Mappings for post-sort verification of data. std::vector newToOld; std::vector oldToNew; // Generate data. dataset.randu(); // Build the tree itself. TreeType root(dataset, newToOld, oldToNew); const arma::mat& treeset = root.Dataset(); // Ensure the size of the tree is correct. BOOST_REQUIRE_EQUAL(root.Count(), size); // Check the forward and backward mappings for correctness. for (size_t i = 0; i < size; i++) { for (size_t j = 0; j < dimensions; j++) { BOOST_REQUIRE_EQUAL(treeset(j, i), dataset(j, newToOld[i])); BOOST_REQUIRE_EQUAL(treeset(j, oldToNew[i]), dataset(j, i)); } } } } template void CheckRPTreeSplit(const TreeType& tree) { typedef typename TreeType::ElemType ElemType; if (tree.IsLeaf()) return; if (!CheckHyperplaneSplit(tree)) { // Check if that was mean split. arma::Col center; tree.Left()->Bound().Center(center); ElemType maxDist = 0; for (size_t k =0; k < tree.Left()->NumDescendants(); k++) { ElemType dist = MetricType::Evaluate(center, tree.Dataset().col(tree.Left()->Descendant(k))); if (dist > maxDist) maxDist = dist; } for (size_t k =0; k < tree.Right()->NumDescendants(); k++) { ElemType dist = MetricType::Evaluate(center, tree.Dataset().col(tree.Right()->Descendant(k))); BOOST_REQUIRE_LE(maxDist, dist * (1.0 + 10.0 * std::numeric_limits::epsilon())); } } CheckRPTreeSplit(*tree.Left()); CheckRPTreeSplit(*tree.Right()); } BOOST_AUTO_TEST_CASE(RPTreeSplitTest) { typedef RPTree TreeType; arma::mat dataset; dataset.randu(8, 1000); TreeType root(dataset); CheckRPTreeSplit(root); } // Recursively checks that each node contains all points that it claims to have. template bool CheckPointBounds(TreeType& node) { // Check that each point which this tree claims is actually inside the tree. for (size_t index = 0; index < node.NumDescendants(); index++) if (!node.Bound().Contains(node.Dataset().col(node.Descendant(index)))) return false; bool result = true; for (size_t child = 0; child < node.NumChildren(); ++child) result &= CheckPointBounds(node.Child(child)); return result; } /** * Exhaustive ball tree test based on #125. * * - Generate a random dataset of a random size. * - Build a tree on that dataset. * - Ensure all the permutation indices map back to the correct points. * - Verify that each point is contained inside all of the bounds of its parent * nodes. * * Then, we do that whole process a handful of times. */ BOOST_AUTO_TEST_CASE(BallTreeTest) { typedef BallTree TreeType; size_t maxRuns = 10; // Ten total tests. size_t pointIncrements = 1000; // Range is from 2000 points to 11000. // We use the default leaf size of 20. for (size_t run = 0; run < maxRuns; run++) { size_t dimensions = run + 2; size_t maxPoints = (run + 1) * pointIncrements; size_t size = maxPoints; arma::mat dataset = arma::mat(dimensions, size); arma::mat datacopy; // Used to test mappings. // Mappings for post-sort verification of data. std::vector newToOld; std::vector oldToNew; // Generate data. dataset.randu(); // Build the tree itself. TreeType root(dataset, newToOld, oldToNew); const arma::mat& treeset = root.Dataset(); // Ensure the size of the tree is correct. BOOST_REQUIRE_EQUAL(root.NumDescendants(), size); // Check the forward and backward mappings for correctness. for(size_t i = 0; i < size; i++) { for(size_t j = 0; j < dimensions; j++) { BOOST_REQUIRE_EQUAL(treeset(j, i), dataset(j, newToOld[i])); BOOST_REQUIRE_EQUAL(treeset(j, oldToNew[i]), dataset(j, i)); } } // Now check that each point is contained inside of all bounds above it. CheckPointBounds(root); } } template void GenerateVectorOfTree(TreeType* node, size_t depth, std::vector& v) { if (node == NULL) return; if (depth >= v.size()) v.resize(2 * depth + 1, NULL); // Resize to right size; fill with NULL. v[depth] = node; // Recurse to the left and right children. GenerateVectorOfTree(node->Left(), depth * 2, v); GenerateVectorOfTree(node->Right(), depth * 2 + 1, v); return; } /** * Exhaustive sparse kd-tree test based on #125. * * - Generate a random dataset of a random size. * - Build a tree on that dataset. * - Ensure all the permutation indices map back to the correct points. * - Verify that each point is contained inside all of the bounds of its parent * nodes. * - Verify that each bound at a particular level of the tree does not overlap * with any other bounds at that level. * * Then, we do that whole process a handful of times. */ BOOST_AUTO_TEST_CASE(ExhaustiveSparseKDTreeTest) { typedef KDTree> TreeType; size_t maxRuns = 2; // Two total tests. size_t pointIncrements = 200; // Range is from 200 points to 400. // We use the default leaf size of 20. for (size_t run = 0; run < maxRuns; run++) { size_t dimensions = run + 2; size_t maxPoints = (run + 1) * pointIncrements; size_t size = maxPoints; arma::SpMat dataset = arma::SpMat(dimensions, size); arma::SpMat datacopy; // Used to test mappings. // Mappings for post-sort verification of data. std::vector newToOld; std::vector oldToNew; // Generate data. dataset.sprandu(dimensions, size, 0.1); datacopy = dataset; // Save a copy. // Build the tree itself. TreeType root(dataset, newToOld, oldToNew); const arma::sp_mat& treeset = root.Dataset(); // Ensure the size of the tree is correct. BOOST_REQUIRE_EQUAL(root.Count(), size); // Check the forward and backward mappings for correctness. for (size_t i = 0; i < size; i++) { for (size_t j = 0; j < dimensions; j++) { BOOST_REQUIRE_EQUAL(treeset(j, i), dataset(j, newToOld[i])); BOOST_REQUIRE_EQUAL(treeset(j, oldToNew[i]), dataset(j, i)); } } // Now check that each point is contained inside of all bounds above it. CheckPointBounds(root); // Now check that no peers overlap. std::vector v; GenerateVectorOfTree(&root, 1, v); // Start with the first pair. size_t depth = 2; // Compare each peer against every other peer. while (depth < v.size()) { for (size_t i = depth; i < 2 * depth && i < v.size(); i++) for (size_t j = i + 1; j < 2 * depth && j < v.size(); j++) if (v[i] != NULL && v[j] != NULL) BOOST_REQUIRE(!v[i]->Bound().Contains(v[j]->Bound())); depth *= 2; } } arma::SpMat dataset(25, 1000); for (size_t col = 0; col < dataset.n_cols; ++col) for (size_t row = 0; row < dataset.n_rows; ++row) dataset(row, col) = row + col; TreeType root(dataset); } BOOST_AUTO_TEST_CASE(BinarySpaceTreeMoveConstructorTest) { arma::mat dataset(5, 1000); dataset.randu(); BinarySpaceTree tree(dataset); BinarySpaceTree tree2(std::move(tree)); BOOST_REQUIRE_EQUAL(tree.NumChildren(), 0); BOOST_REQUIRE_EQUAL(tree2.NumChildren(), 2); } template void RecurseTreeCountLeaves(const TreeType& node, arma::vec& counts) { for (size_t i = 0; i < node.NumChildren(); ++i) { if (node.Child(i).NumChildren() == 0) counts[node.Child(i).Point()]++; else RecurseTreeCountLeaves(node.Child(i), counts); } } template void CheckSelfChild(const TreeType& node) { if (node.NumChildren() == 0) return; // No self-child applicable here. bool found = false; for (size_t i = 0; i < node.NumChildren(); ++i) { if (node.Child(i).Point() == node.Point()) found = true; // Recursively check the children. CheckSelfChild(node.Child(i)); } // Ensure this has its own self-child. BOOST_REQUIRE_EQUAL(found, true); } template void CheckCovering(const TreeType& node) { // Return if a leaf. No checking necessary. if (node.NumChildren() == 0) return; const typename TreeType::Mat& dataset = node.Dataset(); const size_t nodePoint = node.Point(); // To ensure that this node satisfies the covering principle, we must ensure // that the distance to each child is less than pow(base, scale). double maxDistance = pow(node.Base(), node.Scale()); for (size_t i = 0; i < node.NumChildren(); ++i) { const size_t childPoint = node.Child(i).Point(); double distance = MetricType::Evaluate(dataset.col(nodePoint), dataset.col(childPoint)); BOOST_REQUIRE_LE(distance, maxDistance); // Check the child. CheckCovering(node.Child(i)); } } /** * Create a simple cover tree and then make sure it is valid. */ BOOST_AUTO_TEST_CASE(SimpleCoverTreeConstructionTest) { // 20-point dataset. arma::mat data = arma::trans(arma::mat("0.0 0.0;" "1.0 0.0;" "0.5 0.5;" "2.0 2.0;" "-1.0 2.0;" "3.0 0.0;" "1.5 5.5;" "-2.0 -2.0;" "-1.5 1.5;" "0.0 4.0;" "2.0 1.0;" "2.0 1.2;" "-3.0 -2.5;" "-5.0 -5.0;" "3.5 1.5;" "2.0 2.5;" "-1.0 -1.0;" "-3.5 1.5;" "3.5 -1.5;" "2.0 1.0;")); // The root point will be the first point, (0, 0). typedef StandardCoverTree TreeType; TreeType tree(data); // Expansion constant of 2.0. // The furthest point from the root will be (-5, -5), with a distance of // of sqrt(50). This means the scale of the root node should be 3 (because // 2^3 = 8). BOOST_REQUIRE_EQUAL(tree.Scale(), 3); // Now loop through the tree and ensure that each leaf is only created once. arma::vec counts; counts.zeros(20); RecurseTreeCountLeaves(tree, counts); // Each point should only have one leaf node representing it. for (size_t i = 0; i < 20; ++i) BOOST_REQUIRE_EQUAL(counts[i], 1); // Each non-leaf should have a self-child. CheckSelfChild(tree); // Each node must satisfy the covering principle (its children must be less // than or equal to a certain distance apart). CheckCovering>(tree); // There's no need to check the separation invariant because that is relaxed // in our implementation. } /** * Create a large cover tree and make sure it's accurate. */ BOOST_AUTO_TEST_CASE(CoverTreeConstructionTest) { arma::mat dataset; // 50-dimensional, 1000 point. dataset.randu(50, 1000); typedef StandardCoverTree TreeType; TreeType tree(dataset); // Ensure each leaf is only created once. arma::vec counts; counts.zeros(1000); RecurseTreeCountLeaves(tree, counts); for (size_t i = 0; i < 1000; ++i) BOOST_REQUIRE_EQUAL(counts[i], 1); // Each non-leaf should have a self-child. CheckSelfChild(tree); // Each node must satisfy the covering principle (its children must be less // than or equal to a certain distance apart). CheckCovering >(tree); // There's no need to check the separation because that is relaxed in our // implementation. } /** * Create a cover tree on sparse data and make sure it's accurate. */ BOOST_AUTO_TEST_CASE(SparseCoverTreeConstructionTest) { arma::sp_mat dataset; // 50-dimensional, 1000 point. dataset.sprandu(50, 1000, 0.3); typedef StandardCoverTree TreeType; TreeType tree(dataset); // Ensure each leaf is only created once. arma::vec counts; counts.zeros(1000); RecurseTreeCountLeaves(tree, counts); for (size_t i = 0; i < 1000; ++i) BOOST_REQUIRE_EQUAL(counts[i], 1); // Each non-leaf should have a self-child. CheckSelfChild(tree); // Each node must satisfy the covering principle (its children must be less // than or equal to a certain distance apart). CheckCovering >(tree); // There's no need to check the separation invariant because that is relaxed // in our implementation. } /** * Test the manual constructor. */ BOOST_AUTO_TEST_CASE(CoverTreeManualConstructorTest) { arma::mat dataset; dataset.zeros(10, 10); typedef StandardCoverTree TreeType; TreeType node(dataset, 1.3, 3, 2, NULL, 1.5, 2.75); BOOST_REQUIRE_EQUAL(&node.Dataset(), &dataset); BOOST_REQUIRE_EQUAL(node.Base(), 1.3); BOOST_REQUIRE_EQUAL(node.Point(), 3); BOOST_REQUIRE_EQUAL(node.Scale(), 2); BOOST_REQUIRE_EQUAL(node.Parent(), (CoverTree<>*) NULL); BOOST_REQUIRE_EQUAL(node.ParentDistance(), 1.5); BOOST_REQUIRE_EQUAL(node.FurthestDescendantDistance(), 2.75); } /** * Make sure cover trees work in different metric spaces. */ BOOST_AUTO_TEST_CASE(CoverTreeAlternateMetricTest) { arma::mat dataset; // 5-dimensional, 300-point dataset. dataset.randu(5, 300); typedef StandardCoverTree TreeType; TreeType tree(dataset); // Ensure each leaf is only created once. arma::vec counts; counts.zeros(300); RecurseTreeCountLeaves(tree, counts); for (size_t i = 0; i < 300; ++i) BOOST_REQUIRE_EQUAL(counts[i], 1); // Each non-leaf should have a self-child. CheckSelfChild(tree); // Each node must satisfy the covering principle (its children must be less // than or equal to a certain distance apart). CheckCovering(tree); // There's no need to check the separation invariant because that is relaxed // in our implementation. } /** * Make sure copy constructor works for the cover tree. */ BOOST_AUTO_TEST_CASE(CoverTreeCopyConstructor) { arma::mat dataset; dataset.randu(10, 10); // dataset is irrelevant. typedef StandardCoverTree TreeType; TreeType c(dataset, 1.3, 0, 5, NULL, 1.45, 5.2); // Random parameters. c.Children().push_back(new TreeType(dataset, 1.3, 1, 4, &c, 1.3, 2.45)); c.Children().push_back(new TreeType(dataset, 1.5, 2, 3, &c, 1.2, 5.67)); TreeType d = c; // Check that everything is the same. // As the tree being copied doesn't own the dataset, they must share the same // pointer. BOOST_REQUIRE_EQUAL(c.Dataset().memptr(), d.Dataset().memptr()); BOOST_REQUIRE_CLOSE(c.Base(), d.Base(), 1e-50); BOOST_REQUIRE_EQUAL(c.Point(), d.Point()); BOOST_REQUIRE_EQUAL(c.Scale(), d.Scale()); BOOST_REQUIRE_EQUAL(c.Parent(), d.Parent()); BOOST_REQUIRE_EQUAL(c.ParentDistance(), d.ParentDistance()); BOOST_REQUIRE_EQUAL(c.FurthestDescendantDistance(), d.FurthestDescendantDistance()); BOOST_REQUIRE_EQUAL(c.NumChildren(), d.NumChildren()); BOOST_REQUIRE_NE(&c.Child(0), &d.Child(0)); BOOST_REQUIRE_NE(&c.Child(1), &d.Child(1)); BOOST_REQUIRE_EQUAL(c.Child(0).Parent(), &c); BOOST_REQUIRE_EQUAL(c.Child(1).Parent(), &c); BOOST_REQUIRE_EQUAL(d.Child(0).Parent(), &d); BOOST_REQUIRE_EQUAL(d.Child(1).Parent(), &d); // Check that the children are okay. BOOST_REQUIRE_EQUAL(c.Child(0).Dataset().memptr(), c.Dataset().memptr()); BOOST_REQUIRE_CLOSE(c.Child(0).Base(), d.Child(0).Base(), 1e-50); BOOST_REQUIRE_EQUAL(c.Child(0).Point(), d.Child(0).Point()); BOOST_REQUIRE_EQUAL(c.Child(0).Scale(), d.Child(0).Scale()); BOOST_REQUIRE_EQUAL(c.Child(0).ParentDistance(), d.Child(0).ParentDistance()); BOOST_REQUIRE_EQUAL(c.Child(0).FurthestDescendantDistance(), d.Child(0).FurthestDescendantDistance()); BOOST_REQUIRE_EQUAL(c.Child(0).NumChildren(), d.Child(0).NumChildren()); BOOST_REQUIRE_EQUAL(c.Child(1).Dataset().memptr(), c.Dataset().memptr()); BOOST_REQUIRE_CLOSE(c.Child(1).Base(), d.Child(1).Base(), 1e-50); BOOST_REQUIRE_EQUAL(c.Child(1).Point(), d.Child(1).Point()); BOOST_REQUIRE_EQUAL(c.Child(1).Scale(), d.Child(1).Scale()); BOOST_REQUIRE_EQUAL(c.Child(1).ParentDistance(), d.Child(1).ParentDistance()); BOOST_REQUIRE_EQUAL(c.Child(1).FurthestDescendantDistance(), d.Child(1).FurthestDescendantDistance()); BOOST_REQUIRE_EQUAL(c.Child(1).NumChildren(), d.Child(1).NumChildren()); // Check copy constructor when the tree being copied owns the dataset. TreeType e(std::move(dataset), 1.3); TreeType f = e; // As the tree being copied owns the dataset, they must have different // instances. BOOST_REQUIRE_NE(e.Dataset().memptr(), f.Dataset().memptr()); } BOOST_AUTO_TEST_CASE(CoverTreeMoveDatasetTest) { arma::mat dataset = arma::randu(3, 1000); typedef StandardCoverTree TreeType; TreeType t(std::move(dataset)); BOOST_REQUIRE_EQUAL(dataset.n_elem, 0); BOOST_REQUIRE_EQUAL(t.Dataset().n_rows, 3); BOOST_REQUIRE_EQUAL(t.Dataset().n_cols, 1000); EuclideanDistance ed; // Test the other constructor. dataset = arma::randu(3, 1000); TreeType t2(std::move(dataset), ed); BOOST_REQUIRE_EQUAL(dataset.n_elem, 0); BOOST_REQUIRE_EQUAL(t2.Dataset().n_rows, 3); BOOST_REQUIRE_EQUAL(t2.Dataset().n_cols, 1000); } /** * Make sure copy constructor works right for the binary space tree. */ BOOST_AUTO_TEST_CASE(BinarySpaceTreeCopyConstructor) { arma::mat data("1"); typedef KDTree TreeType; TreeType b(data); b.Begin() = 10; b.Count() = 50; b.Left() = new TreeType(data); b.Left()->Begin() = 10; b.Left()->Count() = 30; b.Right() = new TreeType(data); b.Right()->Begin() = 40; b.Right()->Count() = 20; // Copy the tree. TreeType c(b); // Ensure everything copied correctly. BOOST_REQUIRE_EQUAL(b.Begin(), c.Begin()); BOOST_REQUIRE_EQUAL(b.Count(), c.Count()); BOOST_REQUIRE_NE(b.Left(), c.Left()); BOOST_REQUIRE_NE(b.Right(), c.Right()); // Check the children. BOOST_REQUIRE_EQUAL(b.Left()->Begin(), c.Left()->Begin()); BOOST_REQUIRE_EQUAL(b.Left()->Count(), c.Left()->Count()); BOOST_REQUIRE_EQUAL(b.Left()->Left(), (TreeType*) NULL); BOOST_REQUIRE_EQUAL(b.Left()->Left(), c.Left()->Left()); BOOST_REQUIRE_EQUAL(b.Left()->Right(), (TreeType*) NULL); BOOST_REQUIRE_EQUAL(b.Left()->Right(), c.Left()->Right()); BOOST_REQUIRE_EQUAL(b.Right()->Begin(), c.Right()->Begin()); BOOST_REQUIRE_EQUAL(b.Right()->Count(), c.Right()->Count()); BOOST_REQUIRE_EQUAL(b.Right()->Left(), (TreeType*) NULL); BOOST_REQUIRE_EQUAL(b.Right()->Left(), c.Right()->Left()); BOOST_REQUIRE_EQUAL(b.Right()->Right(), (TreeType*) NULL); BOOST_REQUIRE_EQUAL(b.Right()->Right(), c.Right()->Right()); } //! Count the number of leaves under this node. template size_t NumLeaves(TreeType* node) { if (node->NumChildren() == 0) return 1; size_t count = 0; for (size_t i = 0; i < node->NumChildren(); ++i) count += NumLeaves(&node->Child(i)); return count; } //! Returns true if the index is contained somewhere under this node. template bool FindIndex(TreeType* node, const size_t index) { for (size_t i = 0; i < node->NumPoints(); ++i) if (node->Point(i) == index) return true; for (size_t i = 0; i < node->NumChildren(); ++i) if (FindIndex(&node->Child(i), index)) return true; return false; } //! Check that the points in the given node are accessible through the //! Descendant() function of the root node. template bool CheckAccessibility(TreeType* childNode, TreeType* rootNode) { for (size_t i = 0; i < childNode->NumPoints(); ++i) { bool found = false; for (size_t j = 0; j < rootNode->NumDescendants(); ++j) { if (childNode->Point(i) == rootNode->Descendant(j)) { found = true; break; } } if (!found) { Log::Debug << "Did not find descendant " << childNode->Point(i) << ".\n"; return false; } } // Now check the children. for (size_t i = 0; i < childNode->NumChildren(); ++i) if (!CheckAccessibility(&childNode->Child(i), rootNode)) return false; return true; } //! Check that Descendant() and NumDescendants() is right for this node. template void CheckDescendants(TreeType* node) { // In a cover tree, the number of leaves should be the number of descendant // points. const size_t numLeaves = NumLeaves(node); BOOST_REQUIRE_EQUAL(numLeaves, node->NumDescendants()); // Now check that each descendant is somewhere in the tree. for (size_t i = 0; i < node->NumDescendants(); ++i) { Log::Debug << "Check for descendant " << node->Descendant(i) << " (i " << i << ").\n"; BOOST_REQUIRE_EQUAL(FindIndex(node, node->Descendant(i)), true); } // Now check that every actual descendant is accessible through the // Descendant() function. BOOST_REQUIRE_EQUAL(CheckAccessibility(node, node), true); // Now check that there are no duplicates in the list of descendants. std::vector descendants; descendants.resize(node->NumDescendants()); for (size_t i = 0; i < node->NumDescendants(); ++i) descendants[i] = node->Descendant(i); // Sort the list. std::sort(descendants.begin(), descendants.end()); // Check that there are no duplicates (this is easy because it's sorted). for (size_t i = 1; i < descendants.size(); ++i) BOOST_REQUIRE_NE(descendants[i], descendants[i - 1]); // Now perform these same checks for the children. for (size_t i = 0; i < node->NumChildren(); ++i) CheckDescendants(&node->Child(i)); } /** * Make sure Descendant() and NumDescendants() works properly for the cover * tree. */ BOOST_AUTO_TEST_CASE(CoverTreeDescendantTest) { arma::mat dataset; dataset.randu(3, 100); StandardCoverTree tree(dataset); // Now check that the NumDescendants() count and each Descendant() is right // using the recursive function above. CheckDescendants(&tree); } BOOST_AUTO_TEST_SUITE_END(); mlpack-2.2.5/src/mlpack/tests/tree_traits_test.cpp000066400000000000000000000063231315013601400222550ustar00rootroot00000000000000/** * @file tree_traits_test.cpp * @author Ryan Curtin * * Tests for the TreeTraits class. These could all be known at compile-time, * but realistically the function is to be sure that nobody changes tree traits * without breaking something. Thus, people must be certain when they make a * change like that (because they have to change the test too). That's the * hope, at least... * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include #include #include #include #include #include #include "test_tools.hpp" using namespace mlpack; using namespace mlpack::tree; using namespace mlpack::metric; BOOST_AUTO_TEST_SUITE(TreeTraitsTest); // Be careful! When writing new tests, always get the boolean value of each // trait and store it in a temporary, because the Boost unit test macros do // weird things and will cause bizarre problems. // Test the defaults. BOOST_AUTO_TEST_CASE(DefaultsTraitsTest) { // An irrelevant non-tree type class is used here so that the default // implementation of TreeTraits is chosen. bool b = TreeTraits::HasOverlappingChildren; BOOST_REQUIRE_EQUAL(b, true); b = TreeTraits::HasSelfChildren; BOOST_REQUIRE_EQUAL(b, false); b = TreeTraits::FirstPointIsCentroid; BOOST_REQUIRE_EQUAL(b, false); b = TreeTraits::RearrangesDataset; BOOST_REQUIRE_EQUAL(b, false); b = TreeTraits::BinaryTree; BOOST_REQUIRE_EQUAL(b, false); } // Test the binary space tree traits. BOOST_AUTO_TEST_CASE(BinarySpaceTreeTraitsTest) { typedef BinarySpaceTree> TreeType; // Children are non-overlapping. bool b = TreeTraits::HasOverlappingChildren; BOOST_REQUIRE_EQUAL(b, false); // Points are not contained at multiple levels. b = TreeTraits::HasSelfChildren; BOOST_REQUIRE_EQUAL(b, false); // The first point is not the centroid. b = TreeTraits::FirstPointIsCentroid; BOOST_REQUIRE_EQUAL(b, false); // The dataset gets rearranged at build time. b = TreeTraits::RearrangesDataset; BOOST_REQUIRE_EQUAL(b, true); // It is a binary tree. b = TreeTraits::BinaryTree; BOOST_REQUIRE_EQUAL(b, true); } // Test the cover tree traits. BOOST_AUTO_TEST_CASE(CoverTreeTraitsTest) { // Children may be overlapping. bool b = TreeTraits>::HasOverlappingChildren; BOOST_REQUIRE_EQUAL(b, true); // The cover tree has self-children. b = TreeTraits>::HasSelfChildren; BOOST_REQUIRE_EQUAL(b, true); // The first point is the center of the node. b = TreeTraits>::FirstPointIsCentroid; BOOST_REQUIRE_EQUAL(b, true); b = TreeTraits>::RearrangesDataset; BOOST_REQUIRE_EQUAL(b, false); b = TreeTraits>::BinaryTree; BOOST_REQUIRE_EQUAL(b, false); // Not necessarily binary. } BOOST_AUTO_TEST_SUITE_END(); mlpack-2.2.5/src/mlpack/tests/ub_tree_test.cpp000066400000000000000000000242171315013601400213570ustar00rootroot00000000000000/** * @file ub_tree_test.cpp * @author Mikhail Lozhnikov * * Tests for the UB tree. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include #include #include #include #include using namespace mlpack; using namespace mlpack::math; using namespace mlpack::tree; using namespace mlpack::metric; using namespace mlpack::bound; using namespace mlpack::neighbor; BOOST_AUTO_TEST_SUITE(UBTreeTest); BOOST_AUTO_TEST_CASE(AddressTest) { typedef double ElemType; typedef typename std::conditional::type AddressElemType; arma::Mat dataset(8, 1000); dataset.randu(); dataset -= 0.5; arma::Col address(dataset.n_rows); arma::Col point(dataset.n_rows); // Ensure that this is one-to-one transform. for (size_t i = 0; i < dataset.n_cols; i++) { addr::PointToAddress(address, dataset.col(i)); addr::AddressToPoint(point, address); for (size_t k = 0; k < dataset.n_rows; k++) BOOST_REQUIRE_CLOSE(dataset(k, i), point[k], 1e-13); } } template void CheckSplit(const TreeType& tree) { typedef typename TreeType::ElemType ElemType; typedef typename std::conditional::type AddressElemType; if (tree.IsLeaf()) return; arma::Col lo(tree.Bound().Dim()); arma::Col hi(tree.Bound().Dim()); lo.fill(std::numeric_limits::max()); hi.fill(0); arma::Col address(tree.Bound().Dim()); // Find the highest address of the left node. for (size_t i = 0; i < tree.Left()->NumDescendants(); i++) { addr::PointToAddress(address, tree.Dataset().col(tree.Left()->Descendant(i))); if (addr::CompareAddresses(address, hi) > 0) hi = address; } // Find the lowest address of the right node. for (size_t i = 0; i < tree.Right()->NumDescendants(); i++) { addr::PointToAddress(address, tree.Dataset().col(tree.Right()->Descendant(i))); if (addr::CompareAddresses(address, lo) < 0) lo = address; } // Addresses in the left node should be less than addresses in the right node. BOOST_REQUIRE_LE(addr::CompareAddresses(hi, lo), 0); CheckSplit(*tree.Left()); CheckSplit(*tree.Right()); } BOOST_AUTO_TEST_CASE(UBTreeSplitTest) { typedef UBTree TreeType; arma::mat dataset(8, 1000); dataset.randu(); TreeType tree(dataset); CheckSplit(tree); } template void CheckBound(const TreeType& tree) { typedef typename TreeType::ElemType ElemType; for (size_t i = 0; i < tree.NumDescendants(); i++) { arma::Col point = tree.Dataset().col(tree.Descendant(i)); // Check that the point is contained in the bound. BOOST_REQUIRE_EQUAL(true, tree.Bound().Contains(point)); const arma::Mat& loBound = tree.Bound().LoBound(); const arma::Mat& hiBound = tree.Bound().HiBound(); // Ensure that there is a hyperrectangle that contains the point. bool success = false; for (size_t j = 0; j < tree.Bound().NumBounds(); j++) { success = true; for (size_t k = 0; k < loBound.n_rows; k++) { if (point[k] < loBound(k, j) - 1e-14 * std::fabs(loBound(k, j)) || point[k] > hiBound(k, j) + 1e-14 * std::fabs(hiBound(k, j))) { success = false; break; } } if (success) break; } BOOST_REQUIRE_EQUAL(success, true); } if (!tree.IsLeaf()) { CheckBound(*tree.Left()); CheckBound(*tree.Right()); } } BOOST_AUTO_TEST_CASE(UBTreeBoundTest) { typedef UBTree TreeType; arma::mat dataset(8, 1000); dataset.randu(); TreeType tree(dataset); CheckBound(tree); } // Ensure that MinDistance() and MaxDistance() works correctly. template void CheckDistance(TreeType& tree, TreeType* node = NULL) { typedef typename TreeType::ElemType ElemType; if (node == NULL) { node = &tree; while (node->Parent() != NULL) node = node->Parent(); CheckDistance(tree, node); for (size_t j = 0; j < tree.Dataset().n_cols; j++) { const arma::Col& point = tree. Dataset().col(j); ElemType maxDist = 0; ElemType minDist = std::numeric_limits::max(); for (size_t i = 0; i < tree.NumDescendants(); i++) { ElemType dist = MetricType::Evaluate( tree.Dataset().col(tree.Descendant(i)), tree.Dataset().col(j)); if (dist > maxDist) maxDist = dist; if (dist < minDist) minDist = dist; } BOOST_REQUIRE_LE(tree.Bound().MinDistance(point), minDist * (1.0 + 10 * std::numeric_limits::epsilon())); BOOST_REQUIRE_LE(maxDist, tree.Bound().MaxDistance(point) * (1.0 + 10 * std::numeric_limits::epsilon())); math::RangeType r = tree.Bound().RangeDistance(point); BOOST_REQUIRE_LE(r.Lo(), minDist * (1.0 + 10 * std::numeric_limits::epsilon())); BOOST_REQUIRE_LE(maxDist, r.Hi() * (1.0 + 10 * std::numeric_limits::epsilon())); } if (!tree.IsLeaf()) { CheckDistance(*tree.Left()); CheckDistance(*tree.Right()); } } else { if (&tree != node) { ElemType maxDist = 0; ElemType minDist = std::numeric_limits::max(); for (size_t i = 0; i < tree.NumDescendants(); i++) for (size_t j = 0; j < node->NumDescendants(); j++) { ElemType dist = MetricType::Evaluate( tree.Dataset().col(tree.Descendant(i)), node->Dataset().col(node->Descendant(j))); if (dist > maxDist) maxDist = dist; if (dist < minDist) minDist = dist; } BOOST_REQUIRE_LE(tree.Bound().MinDistance(node->Bound()), minDist * (1.0 + 10 * std::numeric_limits::epsilon())); BOOST_REQUIRE_LE(maxDist, tree.Bound().MaxDistance(node->Bound()) * (1.0 + 10 * std::numeric_limits::epsilon())); math::RangeType r = tree.Bound().RangeDistance(node->Bound()); BOOST_REQUIRE_LE(r.Lo(), minDist * (1.0 + 10 * std::numeric_limits::epsilon())); BOOST_REQUIRE_LE(maxDist, r.Hi() * (1.0 + 10 * std::numeric_limits::epsilon())); } if (!node->IsLeaf()) { CheckDistance(tree, node->Left()); CheckDistance(tree, node->Right()); } } } BOOST_AUTO_TEST_CASE(UBTreeDistanceTest) { typedef UBTree TreeType; arma::mat dataset(8, 1000); dataset.randu(); TreeType tree(dataset); CheckDistance(tree); } BOOST_AUTO_TEST_CASE(UBTreeTest) { typedef UBTree TreeType; size_t maxRuns = 10; // Ten total tests. size_t pointIncrements = 1000; // Range is from 2000 points to 11000. // We use the default leaf size of 20. for (size_t run = 0; run < maxRuns; run++) { size_t dimensions = run + 2; size_t maxPoints = (run + 1) * pointIncrements; size_t size = maxPoints; arma::mat dataset = arma::mat(dimensions, size); arma::mat datacopy; // Used to test mappings. // Mappings for post-sort verification of data. std::vector newToOld; std::vector oldToNew; // Generate data. dataset.randu(); // Build the tree itself. TreeType root(dataset, newToOld, oldToNew); const arma::mat& treeset = root.Dataset(); // Ensure the size of the tree is correct. BOOST_REQUIRE_EQUAL(root.NumDescendants(), size); // Check the forward and backward mappings for correctness. for (size_t i = 0; i < size; i++) { for (size_t j = 0; j < dimensions; j++) { BOOST_REQUIRE_EQUAL(treeset(j, i), dataset(j, newToOld[i])); BOOST_REQUIRE_EQUAL(treeset(j, oldToNew[i]), dataset(j, i)); } } } } BOOST_AUTO_TEST_CASE(SingleTreeTraverserTest) { arma::mat dataset; dataset.randu(8, 1000); // 1000 points in 8 dimensions. arma::Mat neighbors1; arma::mat distances1; arma::Mat neighbors2; arma::mat distances2; // Nearest neighbor search with the UB tree. NeighborSearch, arma::mat, UBTree> knn1(dataset, SINGLE_TREE_MODE); knn1.Search(5, neighbors1, distances1); // Nearest neighbor search the naive way. KNN knn2(dataset, NAIVE_MODE); knn2.Search(5, neighbors2, distances2); for (size_t i = 0; i < neighbors1.size(); i++) { BOOST_REQUIRE_EQUAL(neighbors1[i], neighbors2[i]); BOOST_REQUIRE_EQUAL(distances1[i], distances2[i]); } } BOOST_AUTO_TEST_CASE(DualTreeTraverserTest) { arma::mat dataset; dataset.randu(8, 1000); // 1000 points in 8 dimensions. arma::Mat neighbors1; arma::mat distances1; arma::Mat neighbors2; arma::mat distances2; // Nearest neighbor search with the UB tree. NeighborSearch, arma::mat, UBTree> knn1(dataset, DUAL_TREE_MODE); knn1.Search(5, neighbors1, distances1); // Nearest neighbor search the naive way. KNN knn2(dataset, NAIVE_MODE); knn2.Search(5, neighbors2, distances2); for (size_t i = 0; i < neighbors1.size(); i++) { BOOST_REQUIRE_EQUAL(neighbors1[i], neighbors2[i]); BOOST_REQUIRE_EQUAL(distances1[i], distances2[i]); } } BOOST_AUTO_TEST_SUITE_END(); mlpack-2.2.5/src/mlpack/tests/union_find_test.cpp000066400000000000000000000027601315013601400220610ustar00rootroot00000000000000/** * @file union_find_test.cpp * @author Bill March (march@gatech.edu) * * Unit tests for the Union-Find data structure. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include #include #include #include "test_tools.hpp" using namespace mlpack; using namespace mlpack::emst; BOOST_AUTO_TEST_SUITE(UnionFindTest); BOOST_AUTO_TEST_CASE(TestFind) { static const size_t testSize = 10; UnionFind testUnionFind(testSize); for (size_t i = 0; i < testSize; i++) BOOST_REQUIRE(testUnionFind.Find(i) == i); testUnionFind.Union(0, 1); testUnionFind.Union(1, 2); BOOST_REQUIRE(testUnionFind.Find(2) == testUnionFind.Find(0)); } BOOST_AUTO_TEST_CASE(TestUnion) { static const size_t testSize = 10; UnionFind testUnionFind(testSize); testUnionFind.Union(0, 1); testUnionFind.Union(2, 3); testUnionFind.Union(0, 2); testUnionFind.Union(5, 0); testUnionFind.Union(0, 6); BOOST_REQUIRE(testUnionFind.Find(0) == testUnionFind.Find(1)); BOOST_REQUIRE(testUnionFind.Find(2) == testUnionFind.Find(3)); BOOST_REQUIRE(testUnionFind.Find(1) == testUnionFind.Find(5)); BOOST_REQUIRE(testUnionFind.Find(6) == testUnionFind.Find(3)); } BOOST_AUTO_TEST_SUITE_END(); mlpack-2.2.5/src/mlpack/tests/vantage_point_tree_test.cpp000066400000000000000000000215171315013601400236070ustar00rootroot00000000000000/** * @file tree_test.cpp * * Tests for tree-building methods. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include #include #include #include #include #include #include "test_tools.hpp" using namespace mlpack; using namespace mlpack::math; using namespace mlpack::tree; using namespace mlpack::neighbor; using namespace mlpack::metric; using namespace mlpack::bound; BOOST_AUTO_TEST_SUITE(VantagePointTreeTest); BOOST_AUTO_TEST_CASE(VPTreeTraitsTest) { typedef VPTree TreeType; bool b = TreeTraits::HasOverlappingChildren; BOOST_REQUIRE_EQUAL(b, true); b = TreeTraits::FirstPointIsCentroid; BOOST_REQUIRE_EQUAL(b, false); b = TreeTraits::HasSelfChildren; BOOST_REQUIRE_EQUAL(b, false); b = TreeTraits::RearrangesDataset; BOOST_REQUIRE_EQUAL(b, true); b = TreeTraits::BinaryTree; BOOST_REQUIRE_EQUAL(b, true); } BOOST_AUTO_TEST_CASE(HollowBallBoundTest) { HollowBallBound b(2, 4, arma::vec("1.0 2.0 3.0 4.0 5.0")); BOOST_REQUIRE_EQUAL(b.Contains(arma::vec("1.0 2.0 3.0 7.0 5.0")), true); BOOST_REQUIRE_EQUAL(b.Contains(arma::vec("1.0 2.0 3.0 9.0 5.0")), false); BOOST_REQUIRE_EQUAL(b.Contains(arma::vec("1.0 2.0 3.0 5.0 5.0")), false); HollowBallBound b2(0.5, 1, arma::vec("1.0 2.0 3.0 7.0 5.0")); BOOST_REQUIRE_EQUAL(b.Contains(b2), true); b2 = HollowBallBound(2.5, 3.5, arma::vec("1.0 2.0 3.0 4.5 5.0")); BOOST_REQUIRE_EQUAL(b.Contains(b2), true); b2 = HollowBallBound(2.0, 3.5, arma::vec("1.0 2.0 3.0 4.5 5.0")); BOOST_REQUIRE_EQUAL(b.Contains(b2), false); BOOST_REQUIRE_CLOSE(b.MinDistance(arma::vec("1.0 2.0 8.0 4.0 5.0")), 1.0, 1e-5); BOOST_REQUIRE_CLOSE(b.MinDistance(arma::vec("1.0 2.0 4.0 4.0 5.0")), 1.0, 1e-5); BOOST_REQUIRE_CLOSE(b.MinDistance(arma::vec("1.0 2.0 3.0 4.0 5.0")), 2.0, 1e-5); BOOST_REQUIRE_CLOSE(b.MinDistance(arma::vec("1.0 2.0 5.0 4.0 5.0")), 0.0, 1e-5); BOOST_REQUIRE_CLOSE(b.MinDistance(arma::vec("5.0 2.0 3.0 4.0 5.0")), 0.0, 1e-5); BOOST_REQUIRE_CLOSE(b.MinDistance(arma::vec("3.0 2.0 3.0 4.0 5.0")), 0.0, 1e-5); BOOST_REQUIRE_CLOSE(b.MaxDistance(arma::vec("1.0 2.0 4.0 4.0 5.0")), 5.0, 1e-5); BOOST_REQUIRE_CLOSE(b.MaxDistance(arma::vec("1.0 2.0 8.0 4.0 5.0")), 9.0, 1e-5); BOOST_REQUIRE_CLOSE(b.MaxDistance(arma::vec("1.0 2.0 3.0 4.0 5.0")), 4.0, 1e-5); b2 = HollowBallBound(3, 4, arma::vec("1.0 2.0 3.0 5.0 5.0")); BOOST_REQUIRE_CLOSE(b.MinDistance(b2), 0.0, 1e-5); b2 = HollowBallBound(1, 2, arma::vec("1.0 2.0 3.0 4.0 5.0")); BOOST_REQUIRE_CLOSE(b.MinDistance(b2), 0.0, 1e-5); b2 = HollowBallBound(0.5, 1.0, arma::vec("1.0 2.5 3.0 4.0 5.0")); BOOST_REQUIRE_CLOSE(b.MinDistance(b2), 0.5, 1e-5); b2 = HollowBallBound(0.5, 1.0, arma::vec("1.0 8.0 3.0 4.0 5.0")); BOOST_REQUIRE_CLOSE(b.MinDistance(b2), 1.0, 1e-5); b2 = HollowBallBound(0.5, 2.0, arma::vec("1.0 8.0 3.0 4.0 5.0")); BOOST_REQUIRE_CLOSE(b.MinDistance(b2), 0.0, 1e-5); b2 = HollowBallBound(0.5, 2.0, arma::vec("1.0 8.0 3.0 4.0 5.0")); BOOST_REQUIRE_CLOSE(b.MaxDistance(b2), 12.0, 1e-5); b2 = HollowBallBound(0.5, 2.0, arma::vec("1.0 3.0 3.0 4.0 5.0")); BOOST_REQUIRE_CLOSE(b.MaxDistance(b2), 7.0, 1e-5); HollowBallBound b1 = b; b2 = HollowBallBound(1.0, 2.0, arma::vec("1.0 2.5 3.0 4.0 5.0")); b1 |= b2; BOOST_REQUIRE_CLOSE(b1.InnerRadius(), 0.5, 1e-5); b1 = b; b2 = HollowBallBound(0.5, 2.0, arma::vec("1.0 3.0 3.0 4.0 5.0")); b1 |= b2; BOOST_REQUIRE_CLOSE(b1.InnerRadius(), 0.0, 1e-5); b1 = b; b2 = HollowBallBound(0.5, 4.0, arma::vec("1.0 3.0 3.0 4.0 5.0")); b1 |= b2; BOOST_REQUIRE_CLOSE(b1.OuterRadius(), 5.0, 1e-5); } template void CheckBound(TreeType& tree) { typedef typename TreeType::ElemType ElemType; if (tree.IsLeaf()) { // Ensure that the bound contains all descendant points. for (size_t i = 0; i < tree.NumPoints(); i++) { ElemType dist = tree.Bound().Metric().Evaluate(tree.Bound().Center(), tree.Dataset().col(tree.Point(i))); ElemType hollowDist = tree.Bound().Metric().Evaluate( tree.Bound().HollowCenter(), tree.Dataset().col(tree.Point(i))); BOOST_REQUIRE_LE(tree.Bound().InnerRadius(), hollowDist * (1.0 + 10.0 * std::numeric_limits::epsilon())); BOOST_REQUIRE_LE(dist, tree.Bound().OuterRadius() * (1.0 + 10.0 * std::numeric_limits::epsilon())); } } else { // Ensure that the bound contains all descendant points. for (size_t i = 0; i < tree.NumDescendants(); i++) { ElemType dist = tree.Bound().Metric().Evaluate(tree.Bound().Center(), tree.Dataset().col(tree.Descendant(i))); ElemType hollowDist = tree.Bound().Metric().Evaluate( tree.Bound().HollowCenter(), tree.Dataset().col(tree.Descendant(i))); BOOST_REQUIRE_LE(tree.Bound().InnerRadius(), hollowDist * (1.0 + 10.0 * std::numeric_limits::epsilon())); BOOST_REQUIRE_LE(dist, tree.Bound().OuterRadius() * (1.0 + 10.0 * std::numeric_limits::epsilon())); } CheckBound(*tree.Left()); CheckBound(*tree.Right()); } } BOOST_AUTO_TEST_CASE(VPTreeBoundTest) { typedef VPTree TreeType; arma::mat dataset(8, 1000); dataset.randu(); TreeType tree(dataset); CheckBound(tree); } BOOST_AUTO_TEST_CASE(VPTreeTest) { typedef VPTree TreeType; size_t maxRuns = 10; // Ten total tests. size_t pointIncrements = 1000; // Range is from 2000 points to 11000. // We use the default leaf size of 20. for (size_t run = 0; run < maxRuns; run++) { size_t dimensions = run + 2; size_t maxPoints = (run + 1) * pointIncrements; size_t size = maxPoints; arma::mat dataset = arma::mat(dimensions, size); arma::mat datacopy; // Used to test mappings. // Mappings for post-sort verification of data. std::vector newToOld; std::vector oldToNew; // Generate data. dataset.randu(); // Build the tree itself. TreeType root(dataset, newToOld, oldToNew); const arma::mat& treeset = root.Dataset(); // Ensure the size of the tree is correct. BOOST_REQUIRE_EQUAL(root.NumDescendants(), size); // Check the forward and backward mappings for correctness. for(size_t i = 0; i < size; i++) { for(size_t j = 0; j < dimensions; j++) { BOOST_REQUIRE_EQUAL(treeset(j, i), dataset(j, newToOld[i])); BOOST_REQUIRE_EQUAL(treeset(j, oldToNew[i]), dataset(j, i)); } } } } BOOST_AUTO_TEST_CASE(SingleTreeTraverserTest) { arma::mat dataset; dataset.randu(8, 1000); // 1000 points in 8 dimensions. arma::Mat neighbors1; arma::mat distances1; arma::Mat neighbors2; arma::mat distances2; // Nearest neighbor search with the VP tree. NeighborSearch, arma::mat, VPTree> knn1(dataset, SINGLE_TREE_MODE); knn1.Search(5, neighbors1, distances1); // Nearest neighbor search the naive way. KNN knn2(dataset, NAIVE_MODE); knn2.Search(5, neighbors2, distances2); for (size_t i = 0; i < neighbors1.size(); i++) { BOOST_REQUIRE_EQUAL(neighbors1[i], neighbors2[i]); BOOST_REQUIRE_EQUAL(distances1[i], distances2[i]); } } BOOST_AUTO_TEST_CASE(DualTreeTraverserTest) { arma::mat dataset; dataset.randu(8, 1000); // 1000 points in 8 dimensions. arma::Mat neighbors1; arma::mat distances1; arma::Mat neighbors2; arma::mat distances2; // Nearest neighbor search with the VP tree. NeighborSearch, arma::mat, VPTree> knn1(dataset, DUAL_TREE_MODE); knn1.Search(5, neighbors1, distances1); // Nearest neighbor search the naive way. KNN knn2(dataset, NAIVE_MODE); knn2.Search(5, neighbors2, distances2); for (size_t i = 0; i < neighbors1.size(); i++) { BOOST_REQUIRE_EQUAL(neighbors1[i], neighbors2[i]); BOOST_REQUIRE_EQUAL(distances1[i], distances2[i]); } } BOOST_AUTO_TEST_SUITE_END();